From 364df36ac40b28f172f2bb0b821befeefd306246 Mon Sep 17 00:00:00 2001 From: Yeuoly Date: Mon, 8 Jul 2024 22:37:20 +0800 Subject: [PATCH 001/325] feat: plugin call dify --- api/.env.example | 3 + api/configs/feature/__init__.py | 14 +++++ api/controllers/inner_api/__init__.py | 2 +- api/controllers/inner_api/plugin/__init__.py | 1 + api/controllers/inner_api/plugin/plugin.py | 59 +++++++++++++++++++ api/controllers/inner_api/plugin/wraps.py | 47 +++++++++++++++ .../inner_api/workspace/workspace.py | 4 +- api/controllers/inner_api/wraps.py | 20 ++++++- api/services/plugin/plugin_invoke_service.py | 16 +++++ 9 files changed, 161 insertions(+), 5 deletions(-) create mode 100644 api/controllers/inner_api/plugin/__init__.py create mode 100644 api/controllers/inner_api/plugin/plugin.py create mode 100644 api/controllers/inner_api/plugin/wraps.py create mode 100644 api/services/plugin/plugin_invoke_service.py diff --git a/api/.env.example b/api/.env.example index 573c8bf90c11a8..bdea20c5ffd9b6 100644 --- a/api/.env.example +++ b/api/.env.example @@ -238,3 +238,6 @@ WORKFLOW_CALL_MAX_DEPTH=5 # App configuration APP_MAX_EXECUTION_TIME=1200 +# Plugin configuration +PLUGIN_INNER_API_URL=http://127.0.0.1:5002 +PLUGIN_INNER_API_KEY=lYkiYYT6owG+71oLerGzA7GXCgOT++6ovaezWAjpCjf+Sjc3ZtU+qUEi diff --git a/api/configs/feature/__init__.py b/api/configs/feature/__init__.py index bd0ef983c4c5bf..2012b56c6251d8 100644 --- a/api/configs/feature/__init__.py +++ b/api/configs/feature/__init__.py @@ -47,6 +47,19 @@ class CodeExecutionSandboxConfig(BaseSettings): default='dify-sandbox', ) +class PluginConfig(BaseSettings): + """ + Plugin configs + """ + PLUGIN_INNER_API_URL: str = Field( + description='Plugin inner API URL', + default='http://plugin:8194', + ) + + PLUGIN_INNER_API_KEY: str = Field( + description='Plugin inner API key', + default='dify-inner-api-key', + ) class EndpointConfig(BaseSettings): """ @@ -431,6 +444,7 @@ class FeatureConfig( AppExecutionConfig, BillingConfig, CodeExecutionSandboxConfig, + PluginConfig, DataSetConfig, EndpointConfig, FileAccessConfig, diff --git a/api/controllers/inner_api/__init__.py b/api/controllers/inner_api/__init__.py index ad49a649caab66..32770aa0de6851 100644 --- a/api/controllers/inner_api/__init__.py +++ b/api/controllers/inner_api/__init__.py @@ -5,5 +5,5 @@ bp = Blueprint('inner_api', __name__, url_prefix='/inner/api') api = ExternalApi(bp) +from .plugin import plugin from .workspace import workspace - diff --git a/api/controllers/inner_api/plugin/__init__.py b/api/controllers/inner_api/plugin/__init__.py new file mode 100644 index 00000000000000..fd1918e6b26fee --- /dev/null +++ b/api/controllers/inner_api/plugin/__init__.py @@ -0,0 +1 @@ +from .plugin import * \ No newline at end of file diff --git a/api/controllers/inner_api/plugin/plugin.py b/api/controllers/inner_api/plugin/plugin.py new file mode 100644 index 00000000000000..6ce860b877498a --- /dev/null +++ b/api/controllers/inner_api/plugin/plugin.py @@ -0,0 +1,59 @@ + +from flask_restful import Resource, reqparse + +from controllers.console.setup import setup_required +from controllers.inner_api import api +from controllers.inner_api.plugin.wraps import get_tenant +from controllers.inner_api.wraps import plugin_inner_api_only +from libs.helper import compact_generate_response +from models.account import Tenant +from services.plugin.plugin_invoke_service import PluginInvokeService + + +class PluginInvokeModelApi(Resource): + @setup_required + @plugin_inner_api_only + @get_tenant + def post(self, user_id: str, tenant_model: Tenant): + parser = reqparse.RequestParser() + parser.add_argument('provider', type=dict, required=True, location='json') + parser.add_argument('model', type=dict, required=True, location='json') + parser.add_argument('parameters', type=dict, required=True, location='json') + + args = parser.parse_args() + + +class PluginInvokeToolApi(Resource): + @setup_required + @plugin_inner_api_only + @get_tenant + def post(self, user_id: str, tenant_model: Tenant): + parser = reqparse.RequestParser() + parser.add_argument('provider', type=dict, required=True, location='json') + parser.add_argument('tool', type=dict, required=True, location='json') + parser.add_argument('parameters', type=dict, required=True, location='json') + + args = parser.parse_args() + + response = PluginInvokeService.invoke_tool(user_id, tenant_model, + args['provider'], args['tool'], + args['parameters']) + return compact_generate_response(response) + + +class PluginInvokeNodeApi(Resource): + @setup_required + @plugin_inner_api_only + @get_tenant + def post(self, user_id: str, tenant_model: Tenant): + parser = reqparse.RequestParser() + args = parser.parse_args() + + return { + 'message': 'success' + } + + +api.add_resource(PluginInvokeModelApi, '/invoke/model') +api.add_resource(PluginInvokeToolApi, '/invoke/tool') +api.add_resource(PluginInvokeNodeApi, '/invoke/node') diff --git a/api/controllers/inner_api/plugin/wraps.py b/api/controllers/inner_api/plugin/wraps.py new file mode 100644 index 00000000000000..d604e84a9dcc5e --- /dev/null +++ b/api/controllers/inner_api/plugin/wraps.py @@ -0,0 +1,47 @@ +from collections.abc import Callable +from functools import wraps +from typing import Optional + +from flask_restful import reqparse + +from extensions.ext_database import db +from models.account import Tenant + + +def get_tenant(view: Optional[Callable] = None): + def decorator(view_func): + @wraps(view_func) + def decorated_view(*args, **kwargs): + # fetch json body + parser = reqparse.RequestParser() + parser.add_argument('tenant_id', type=str, required=True, location='json') + parser.add_argument('user_id', type=str, required=True, location='json') + + kwargs = parser.parse_args() + + user_id = kwargs.get('user_id') + tenant_id = kwargs.get('tenant_id') + + del kwargs['tenant_id'] + del kwargs['user_id'] + + try: + tenant_model = db.session.query(Tenant).filter( + Tenant.id == tenant_id, + ).first() + except Exception: + raise ValueError('tenant not found') + + if not tenant_model: + raise ValueError('tenant not found') + + kwargs['tenant_model'] = tenant_model + kwargs['user_id'] = user_id + + return view_func(*args, **kwargs) + return decorated_view + + if view is None: + return decorator + else: + return decorator(view) diff --git a/api/controllers/inner_api/workspace/workspace.py b/api/controllers/inner_api/workspace/workspace.py index 06610d89330837..791d37f8686ef2 100644 --- a/api/controllers/inner_api/workspace/workspace.py +++ b/api/controllers/inner_api/workspace/workspace.py @@ -2,7 +2,7 @@ from controllers.console.setup import setup_required from controllers.inner_api import api -from controllers.inner_api.wraps import inner_api_only +from controllers.inner_api.wraps import enterprise_inner_api_only from events.tenant_event import tenant_was_created from models.account import Account from services.account_service import TenantService @@ -11,7 +11,7 @@ class EnterpriseWorkspace(Resource): @setup_required - @inner_api_only + @enterprise_inner_api_only def post(self): parser = reqparse.RequestParser() parser.add_argument('name', type=str, required=True, location='json') diff --git a/api/controllers/inner_api/wraps.py b/api/controllers/inner_api/wraps.py index 07cd38bc8561e9..0793a67eec01a5 100644 --- a/api/controllers/inner_api/wraps.py +++ b/api/controllers/inner_api/wraps.py @@ -5,11 +5,12 @@ from flask import abort, current_app, request +from configs import dify_config from extensions.ext_database import db from models.model import EndUser -def inner_api_only(view): +def enterprise_inner_api_only(view): @wraps(view) def decorated(*args, **kwargs): if not current_app.config['INNER_API']: @@ -25,7 +26,7 @@ def decorated(*args, **kwargs): return decorated -def inner_api_user_auth(view): +def enterprise_inner_api_user_auth(view): @wraps(view) def decorated(*args, **kwargs): if not current_app.config['INNER_API']: @@ -59,3 +60,18 @@ def decorated(*args, **kwargs): return view(*args, **kwargs) return decorated + +def plugin_inner_api_only(view): + @wraps(view) + def decorated(*args, **kwargs): + if not dify_config.PLUGIN_INNER_API_KEY: + abort(404) + + # get header 'X-Inner-Api-Key' + inner_api_key = request.headers.get('X-Inner-Api-Key') + if not inner_api_key or inner_api_key != dify_config.PLUGIN_INNER_API_KEY: + abort(404) + + return view(*args, **kwargs) + + return decorated \ No newline at end of file diff --git a/api/services/plugin/plugin_invoke_service.py b/api/services/plugin/plugin_invoke_service.py new file mode 100644 index 00000000000000..131de1ec1a5a0a --- /dev/null +++ b/api/services/plugin/plugin_invoke_service.py @@ -0,0 +1,16 @@ +from collections.abc import Generator +from typing import Any + +from core.tools.entities.tool_entities import ToolInvokeMessage +from models.account import Tenant + + +class PluginInvokeService: + @classmethod + def invoke_tool(cls, user_id: str, tenant: Tenant, + tool_provider: str, tool_name: str, + tool_parameters: dict[str, Any]) -> Generator[ToolInvokeMessage]: + """ + Invokes a tool with the given user ID and tool parameters. + """ + \ No newline at end of file From 563d81277b74fb6e87bbcdf5f81b6e3832112fdd Mon Sep 17 00:00:00 2001 From: Yeuoly Date: Tue, 9 Jul 2024 15:37:56 +0800 Subject: [PATCH 002/325] refactor: tool response to generator --- api/controllers/inner_api/plugin/plugin.py | 2 + api/core/agent/entities.py | 6 +- .../plugin_tool_callback_handler.py | 5 ++ api/core/tools/tool/api_tool.py | 5 +- api/core/tools/tool/dataset_retriever_tool.py | 5 +- api/core/tools/tool/tool.py | 8 +-- api/core/tools/tool/workflow_tool.py | 12 ++-- api/core/tools/tool_engine.py | 66 +++++++++++++++---- api/core/tools/tool_manager.py | 50 ++++---------- api/core/tools/utils/configuration.py | 7 +- api/core/tools/utils/message_transformer.py | 41 ++++++------ api/core/workflow/nodes/tool/entities.py | 3 +- api/core/workflow/nodes/tool/tool_node.py | 2 +- api/services/plugin/plugin_invoke_service.py | 43 ++++++++++-- api/services/tools/tools_transform_service.py | 32 ++++++--- 15 files changed, 177 insertions(+), 110 deletions(-) create mode 100644 api/core/callback_handler/plugin_tool_callback_handler.py diff --git a/api/controllers/inner_api/plugin/plugin.py b/api/controllers/inner_api/plugin/plugin.py index 6ce860b877498a..95500ea9f375e9 100644 --- a/api/controllers/inner_api/plugin/plugin.py +++ b/api/controllers/inner_api/plugin/plugin.py @@ -21,6 +21,8 @@ def post(self, user_id: str, tenant_model: Tenant): parser.add_argument('parameters', type=dict, required=True, location='json') args = parser.parse_args() + + class PluginInvokeToolApi(Resource): diff --git a/api/core/agent/entities.py b/api/core/agent/entities.py index 5274224de5772c..189c925d828d1b 100644 --- a/api/core/agent/entities.py +++ b/api/core/agent/entities.py @@ -1,14 +1,16 @@ from enum import Enum -from typing import Any, Literal, Optional, Union +from typing import Any, Optional, Union from pydantic import BaseModel +from core.tools.entities.tool_entities import ToolProviderType + class AgentToolEntity(BaseModel): """ Agent Tool Entity. """ - provider_type: Literal["builtin", "api", "workflow"] + provider_type: ToolProviderType provider_id: str tool_name: str tool_parameters: dict[str, Any] = {} diff --git a/api/core/callback_handler/plugin_tool_callback_handler.py b/api/core/callback_handler/plugin_tool_callback_handler.py new file mode 100644 index 00000000000000..e9b9784014fe35 --- /dev/null +++ b/api/core/callback_handler/plugin_tool_callback_handler.py @@ -0,0 +1,5 @@ +from core.callback_handler.agent_tool_callback_handler import DifyAgentCallbackHandler + + +class DifyPluginCallbackHandler(DifyAgentCallbackHandler): + """Callback Handler that prints to std out.""" \ No newline at end of file diff --git a/api/core/tools/tool/api_tool.py b/api/core/tools/tool/api_tool.py index c8b683f9ef14ac..1d600d5efc26e7 100644 --- a/api/core/tools/tool/api_tool.py +++ b/api/core/tools/tool/api_tool.py @@ -1,4 +1,5 @@ import json +from collections.abc import Generator from os import getenv from typing import Any from urllib.parse import urlencode @@ -269,7 +270,7 @@ def _convert_body_property_type(self, property: dict[str, Any], value: Any) -> A except ValueError as e: return value - def _invoke(self, user_id: str, tool_parameters: dict[str, Any]) -> ToolInvokeMessage | list[ToolInvokeMessage]: + def _invoke(self, user_id: str, tool_parameters: dict[str, Any]) -> Generator[ToolInvokeMessage, None, None]: """ invoke http request """ @@ -283,4 +284,4 @@ def _invoke(self, user_id: str, tool_parameters: dict[str, Any]) -> ToolInvokeMe response = self.validate_and_parse_response(response) # assemble invoke message - return self.create_text_message(response) + yield self.create_text_message(response) diff --git a/api/core/tools/tool/dataset_retriever_tool.py b/api/core/tools/tool/dataset_retriever_tool.py index 1170e1b7a5f065..3e81d84c92a479 100644 --- a/api/core/tools/tool/dataset_retriever_tool.py +++ b/api/core/tools/tool/dataset_retriever_tool.py @@ -1,3 +1,4 @@ +from collections.abc import Generator from typing import Any from core.app.app_config.entities import DatasetRetrieveConfigEntity @@ -86,7 +87,7 @@ def get_runtime_parameters(self) -> list[ToolParameter]: def tool_provider_type(self) -> ToolProviderType: return ToolProviderType.DATASET_RETRIEVAL - def _invoke(self, user_id: str, tool_parameters: dict[str, Any]) -> ToolInvokeMessage | list[ToolInvokeMessage]: + def _invoke(self, user_id: str, tool_parameters: dict[str, Any]) -> Generator[ToolInvokeMessage, None, None]: """ invoke dataset retriever tool """ @@ -97,7 +98,7 @@ def _invoke(self, user_id: str, tool_parameters: dict[str, Any]) -> ToolInvokeMe # invoke dataset retriever tool result = self.retrival_tool._run(query=query) - return self.create_text_message(text=result) + yield self.create_text_message(text=result) def validate_credentials(self, credentials: dict[str, Any], parameters: dict[str, Any]) -> None: """ diff --git a/api/core/tools/tool/tool.py b/api/core/tools/tool/tool.py index 04c09c7f5b8238..291cac5ee3f848 100644 --- a/api/core/tools/tool/tool.py +++ b/api/core/tools/tool/tool.py @@ -1,4 +1,5 @@ from abc import ABC, abstractmethod +from collections.abc import Generator from copy import deepcopy from enum import Enum from typing import Any, Optional, Union @@ -190,7 +191,7 @@ def list_default_image_variables(self) -> list[ToolRuntimeVariable]: return result - def invoke(self, user_id: str, tool_parameters: dict[str, Any]) -> list[ToolInvokeMessage]: + def invoke(self, user_id: str, tool_parameters: dict[str, Any]) -> Generator[ToolInvokeMessage]: # update tool_parameters if self.runtime.runtime_parameters: tool_parameters.update(self.runtime.runtime_parameters) @@ -203,9 +204,6 @@ def invoke(self, user_id: str, tool_parameters: dict[str, Any]) -> list[ToolInvo tool_parameters=tool_parameters, ) - if not isinstance(result, list): - result = [result] - return result def _transform_tool_parameters_type(self, tool_parameters: dict[str, Any]) -> dict[str, Any]: @@ -221,7 +219,7 @@ def _transform_tool_parameters_type(self, tool_parameters: dict[str, Any]) -> di return result @abstractmethod - def _invoke(self, user_id: str, tool_parameters: dict[str, Any]) -> Union[ToolInvokeMessage, list[ToolInvokeMessage]]: + def _invoke(self, user_id: str, tool_parameters: dict[str, Any]) -> Generator[ToolInvokeMessage, None, None]: pass def validate_credentials(self, credentials: dict[str, Any], parameters: dict[str, Any]) -> None: diff --git a/api/core/tools/tool/workflow_tool.py b/api/core/tools/tool/workflow_tool.py index 071081303c3b2a..d0a9df64799776 100644 --- a/api/core/tools/tool/workflow_tool.py +++ b/api/core/tools/tool/workflow_tool.py @@ -1,5 +1,6 @@ import json import logging +from collections.abc import Generator from copy import deepcopy from typing import Any, Union @@ -34,7 +35,7 @@ def tool_provider_type(self) -> ToolProviderType: def _invoke( self, user_id: str, tool_parameters: dict[str, Any] - ) -> Union[ToolInvokeMessage, list[ToolInvokeMessage]]: + ) -> Generator[ToolInvokeMessage, None, None]: """ invoke the tool """ @@ -46,6 +47,7 @@ def _invoke( from core.app.apps.workflow.app_generator import WorkflowAppGenerator generator = WorkflowAppGenerator() + result = generator.generate( app_model=app, workflow=workflow, @@ -64,16 +66,12 @@ def _invoke( if data.get('error'): raise Exception(data.get('error')) - result = [] - outputs = data.get('outputs', {}) outputs, files = self._extract_files(outputs) for file in files: - result.append(self.create_file_var_message(file)) + yield self.create_file_var_message(file) - result.append(self.create_text_message(json.dumps(outputs, ensure_ascii=False))) - - return result + yield self.create_text_message(json.dumps(outputs, ensure_ascii=False)) def _get_user(self, user_id: str) -> Union[EndUser, Account]: """ diff --git a/api/core/tools/tool_engine.py b/api/core/tools/tool_engine.py index 7615368934bfe5..7d94eedc5fd9b4 100644 --- a/api/core/tools/tool_engine.py +++ b/api/core/tools/tool_engine.py @@ -1,4 +1,5 @@ import json +from collections.abc import Generator from copy import deepcopy from datetime import datetime, timezone from mimetypes import guess_type @@ -8,6 +9,7 @@ from core.app.entities.app_invoke_entities import InvokeFrom from core.callback_handler.agent_tool_callback_handler import DifyAgentCallbackHandler +from core.callback_handler.plugin_tool_callback_handler import DifyPluginCallbackHandler from core.callback_handler.workflow_tool_callback_handler import DifyWorkflowCallbackHandler from core.file.file_obj import FileTransferMethod from core.ops.ops_trace_manager import TraceQueueManager @@ -64,16 +66,25 @@ def agent_invoke( tool_inputs=tool_parameters ) - meta, response = ToolEngine._invoke(tool, tool_parameters, user_id) - response = ToolFileMessageTransformer.transform_tool_invoke_messages( - messages=response, - user_id=user_id, - tenant_id=tenant_id, + messages = ToolEngine._invoke(tool, tool_parameters, user_id) + invocation_meta_dict = {'meta': None} + + def message_callback(invocation_meta_dict: dict, messages: Generator[ToolInvokeMessage, None, None]): + for message in messages: + if isinstance(message, ToolInvokeMeta): + invocation_meta_dict['meta'] = message + else: + yield message + + messages = ToolFileMessageTransformer.transform_tool_invoke_messages( + messages=message_callback(invocation_meta_dict, messages), + user_id=user_id, + tenant_id=tenant_id, conversation_id=message.conversation_id ) # extract binary data from tool invoke message - binary_files = ToolEngine._extract_tool_response_binary(response) + binary_files = ToolEngine._extract_tool_response_binary(messages) # create message file message_files = ToolEngine._create_message_files( tool_messages=binary_files, @@ -82,7 +93,9 @@ def agent_invoke( user_id=user_id ) - plain_text = ToolEngine._convert_tool_response_to_str(response) + plain_text = ToolEngine._convert_tool_response_to_str(messages) + + meta = invocation_meta_dict['meta'] # hit the callback handler agent_tool_callback.on_tool_end( @@ -127,7 +140,7 @@ def workflow_invoke(tool: Tool, tool_parameters: dict, user_id: str, workflow_id: str, workflow_tool_callback: DifyWorkflowCallbackHandler, workflow_call_depth: int, - ) -> list[ToolInvokeMessage]: + ) -> Generator[ToolInvokeMessage, None, None]: """ Workflow invokes the tool with the given arguments. """ @@ -154,10 +167,38 @@ def workflow_invoke(tool: Tool, tool_parameters: dict, except Exception as e: workflow_tool_callback.on_tool_error(e) raise e - + + @staticmethod + def plugin_invoke(tool: Tool, tool_parameters: dict, user_id: str, + callback: DifyPluginCallbackHandler + ) -> Generator[ToolInvokeMessage, None, None]: + """ + Plugin invokes the tool with the given arguments. + """ + try: + # hit the callback handler + callback.on_tool_start( + tool_name=tool.identity.name, + tool_inputs=tool_parameters + ) + + response = tool.invoke(user_id, tool_parameters) + + # hit the callback handler + callback.on_tool_end( + tool_name=tool.identity.name, + tool_inputs=tool_parameters, + tool_outputs=response, + ) + + return response + except Exception as e: + callback.on_tool_error(e) + raise e + @staticmethod def _invoke(tool: Tool, tool_parameters: dict, user_id: str) \ - -> tuple[ToolInvokeMeta, list[ToolInvokeMessage]]: + -> Generator[ToolInvokeMessage | ToolInvokeMeta, None, None]: """ Invoke the tool with the given arguments. """ @@ -170,16 +211,15 @@ def _invoke(tool: Tool, tool_parameters: dict, user_id: str) \ 'tool_icon': tool.identity.icon }) try: - response = tool.invoke(user_id, tool_parameters) + yield from tool.invoke(user_id, tool_parameters) except Exception as e: meta.error = str(e) raise ToolEngineInvokeError(meta) finally: ended_at = datetime.now(timezone.utc) meta.time_cost = (ended_at - started_at).total_seconds() + yield meta - return meta, response - @staticmethod def _convert_tool_response_to_str(tool_response: list[ToolInvokeMessage]) -> str: """ diff --git a/api/core/tools/tool_manager.py b/api/core/tools/tool_manager.py index e30a905cbcf309..5822841db747c9 100644 --- a/api/core/tools/tool_manager.py +++ b/api/core/tools/tool_manager.py @@ -18,6 +18,7 @@ ApiProviderAuthType, ToolInvokeFrom, ToolParameter, + ToolProviderType, ) from core.tools.errors import ToolProviderNotFoundError from core.tools.provider.api_tool_provider import ApiToolProviderController @@ -26,6 +27,7 @@ from core.tools.tool.api_tool import ApiTool from core.tools.tool.builtin_tool import BuiltinTool from core.tools.tool.tool import Tool +from core.tools.tool.workflow_tool import WorkflowTool from core.tools.tool_label_manager import ToolLabelManager from core.tools.utils.configuration import ( ToolConfigurationManager, @@ -78,37 +80,13 @@ def get_builtin_tool(cls, provider: str, tool_name: str) -> BuiltinTool: return tool @classmethod - def get_tool(cls, provider_type: str, provider_id: str, tool_name: str, tenant_id: str = None) \ - -> Union[BuiltinTool, ApiTool]: - """ - get the tool - - :param provider_type: the type of the provider - :param provider_name: the name of the provider - :param tool_name: the name of the tool - - :return: the tool - """ - if provider_type == 'builtin': - return cls.get_builtin_tool(provider_id, tool_name) - elif provider_type == 'api': - if tenant_id is None: - raise ValueError('tenant id is required for api provider') - api_provider, _ = cls.get_api_provider_controller(tenant_id, provider_id) - return api_provider.get_tool(tool_name) - elif provider_type == 'app': - raise NotImplementedError('app provider not implemented') - else: - raise ToolProviderNotFoundError(f'provider type {provider_type} not found') - - @classmethod - def get_tool_runtime(cls, provider_type: str, + def get_tool_runtime(cls, provider_type: ToolProviderType, provider_id: str, tool_name: str, tenant_id: str, invoke_from: InvokeFrom = InvokeFrom.DEBUGGER, tool_invoke_from: ToolInvokeFrom = ToolInvokeFrom.AGENT) \ - -> Union[BuiltinTool, ApiTool]: + -> Union[BuiltinTool, ApiTool, WorkflowTool]: """ get the tool runtime @@ -118,7 +96,7 @@ def get_tool_runtime(cls, provider_type: str, :return: the tool """ - if provider_type == 'builtin': + if provider_type == ToolProviderType.BUILT_IN: builtin_tool = cls.get_builtin_tool(provider_id, tool_name) # check if the builtin tool need credentials @@ -155,7 +133,7 @@ def get_tool_runtime(cls, provider_type: str, 'tool_invoke_from': tool_invoke_from, }) - elif provider_type == 'api': + elif provider_type == ToolProviderType.API: if tenant_id is None: raise ValueError('tenant id is required for api provider') @@ -171,7 +149,7 @@ def get_tool_runtime(cls, provider_type: str, 'invoke_from': invoke_from, 'tool_invoke_from': tool_invoke_from, }) - elif provider_type == 'workflow': + elif provider_type == ToolProviderType.WORKFLOW: workflow_provider = db.session.query(WorkflowToolProvider).filter( WorkflowToolProvider.tenant_id == tenant_id, WorkflowToolProvider.id == provider_id @@ -190,10 +168,10 @@ def get_tool_runtime(cls, provider_type: str, 'invoke_from': invoke_from, 'tool_invoke_from': tool_invoke_from, }) - elif provider_type == 'app': + elif provider_type == ToolProviderType.APP: raise NotImplementedError('app provider not implemented') else: - raise ToolProviderNotFoundError(f'provider type {provider_type} not found') + raise ToolProviderNotFoundError(f'provider type {provider_type.value} not found') @classmethod def _init_runtime_parameter(cls, parameter_rule: ToolParameter, parameters: dict) -> Union[str, int, float, bool]: @@ -554,7 +532,7 @@ def user_get_api_provider(cls, provider: str, tenant_id: str) -> dict: }) @classmethod - def get_tool_icon(cls, tenant_id: str, provider_type: str, provider_id: str) -> Union[str, dict]: + def get_tool_icon(cls, tenant_id: str, provider_type: ToolProviderType, provider_id: str) -> Union[str, dict]: """ get the tool icon @@ -563,14 +541,12 @@ def get_tool_icon(cls, tenant_id: str, provider_type: str, provider_id: str) -> :param provider_id: the id of the provider :return: """ - provider_type = provider_type - provider_id = provider_id - if provider_type == 'builtin': + if provider_type == ToolProviderType.BUILT_IN: return (current_app.config.get("CONSOLE_API_URL") + "/console/api/workspaces/current/tool-provider/builtin/" + provider_id + "/icon") - elif provider_type == 'api': + elif provider_type == ToolProviderType.API: try: provider: ApiToolProvider = db.session.query(ApiToolProvider).filter( ApiToolProvider.tenant_id == tenant_id, @@ -582,7 +558,7 @@ def get_tool_icon(cls, tenant_id: str, provider_type: str, provider_id: str) -> "background": "#252525", "content": "\ud83d\ude01" } - elif provider_type == 'workflow': + elif provider_type == ToolProviderType.WORKFLOW: provider: WorkflowToolProvider = db.session.query(WorkflowToolProvider).filter( WorkflowToolProvider.tenant_id == tenant_id, WorkflowToolProvider.id == provider_id diff --git a/api/core/tools/utils/configuration.py b/api/core/tools/utils/configuration.py index b213879e960b14..68b0cea24f97eb 100644 --- a/api/core/tools/utils/configuration.py +++ b/api/core/tools/utils/configuration.py @@ -9,6 +9,7 @@ from core.tools.entities.tool_entities import ( ToolParameter, ToolProviderCredentials, + ToolProviderType, ) from core.tools.provider.tool_provider import ToolProviderController from core.tools.tool.tool import Tool @@ -108,7 +109,7 @@ class ToolParameterConfigurationManager(BaseModel): tenant_id: str tool_runtime: Tool provider_name: str - provider_type: str + provider_type: ToolProviderType identity_id: str def _deep_copy(self, parameters: dict[str, Any]) -> dict[str, Any]: @@ -191,7 +192,7 @@ def decrypt_tool_parameters(self, parameters: dict[str, Any]) -> dict[str, Any]: """ cache = ToolParameterCache( tenant_id=self.tenant_id, - provider=f'{self.provider_type}.{self.provider_name}', + provider=f'{self.provider_type.value}.{self.provider_name}', tool_name=self.tool_runtime.identity.name, cache_type=ToolParameterCacheType.PARAMETER, identity_id=self.identity_id @@ -221,7 +222,7 @@ def decrypt_tool_parameters(self, parameters: dict[str, Any]) -> dict[str, Any]: def delete_tool_parameters_cache(self): cache = ToolParameterCache( tenant_id=self.tenant_id, - provider=f'{self.provider_type}.{self.provider_name}', + provider=f'{self.provider_type.value}.{self.provider_name}', tool_name=self.tool_runtime.identity.name, cache_type=ToolParameterCacheType.PARAMETER, identity_id=self.identity_id diff --git a/api/core/tools/utils/message_transformer.py b/api/core/tools/utils/message_transformer.py index ef9e5b67ae2ab6..770abc683c581c 100644 --- a/api/core/tools/utils/message_transformer.py +++ b/api/core/tools/utils/message_transformer.py @@ -1,4 +1,5 @@ import logging +from collections.abc import Generator from mimetypes import guess_extension from core.file.file_obj import FileTransferMethod, FileType, FileVar @@ -9,20 +10,18 @@ class ToolFileMessageTransformer: @classmethod - def transform_tool_invoke_messages(cls, messages: list[ToolInvokeMessage], + def transform_tool_invoke_messages(cls, messages: Generator[ToolInvokeMessage, None, None], user_id: str, tenant_id: str, - conversation_id: str) -> list[ToolInvokeMessage]: + conversation_id: str) -> Generator[ToolInvokeMessage, None, None]: """ Transform tool message and handle file download """ - result = [] - for message in messages: if message.type == ToolInvokeMessage.MessageType.TEXT: - result.append(message) + yield message elif message.type == ToolInvokeMessage.MessageType.LINK: - result.append(message) + yield message elif message.type == ToolInvokeMessage.MessageType.IMAGE: # try to download image try: @@ -35,20 +34,20 @@ def transform_tool_invoke_messages(cls, messages: list[ToolInvokeMessage], url = f'/files/tools/{file.id}{guess_extension(file.mimetype) or ".png"}' - result.append(ToolInvokeMessage( + yield ToolInvokeMessage( type=ToolInvokeMessage.MessageType.IMAGE_LINK, message=url, save_as=message.save_as, meta=message.meta.copy() if message.meta is not None else {}, - )) + ) except Exception as e: logger.exception(e) - result.append(ToolInvokeMessage( + yield ToolInvokeMessage( type=ToolInvokeMessage.MessageType.TEXT, message=f"Failed to download image: {message.message}, you can try to download it yourself.", meta=message.meta.copy() if message.meta is not None else {}, save_as=message.save_as, - )) + ) elif message.type == ToolInvokeMessage.MessageType.BLOB: # get mime type and save blob to storage mimetype = message.meta.get('mime_type', 'octet/stream') @@ -67,43 +66,41 @@ def transform_tool_invoke_messages(cls, messages: list[ToolInvokeMessage], # check if file is image if 'image' in mimetype: - result.append(ToolInvokeMessage( + yield ToolInvokeMessage( type=ToolInvokeMessage.MessageType.IMAGE_LINK, message=url, save_as=message.save_as, meta=message.meta.copy() if message.meta is not None else {}, - )) + ) else: - result.append(ToolInvokeMessage( + yield ToolInvokeMessage( type=ToolInvokeMessage.MessageType.LINK, message=url, save_as=message.save_as, meta=message.meta.copy() if message.meta is not None else {}, - )) + ) elif message.type == ToolInvokeMessage.MessageType.FILE_VAR: file_var: FileVar = message.meta.get('file_var') if file_var: if file_var.transfer_method == FileTransferMethod.TOOL_FILE: url = cls.get_tool_file_url(file_var.related_id, file_var.extension) if file_var.type == FileType.IMAGE: - result.append(ToolInvokeMessage( + yield ToolInvokeMessage( type=ToolInvokeMessage.MessageType.IMAGE_LINK, message=url, save_as=message.save_as, meta=message.meta.copy() if message.meta is not None else {}, - )) + ) else: - result.append(ToolInvokeMessage( + yield ToolInvokeMessage( type=ToolInvokeMessage.MessageType.LINK, message=url, save_as=message.save_as, meta=message.meta.copy() if message.meta is not None else {}, - )) + ) else: - result.append(message) - - return result + yield message @classmethod def get_tool_file_url(cls, tool_file_id: str, extension: str) -> str: - return f'/files/tools/{tool_file_id}{extension or ".bin"}' \ No newline at end of file + return f'/files/tools/{tool_file_id}{extension or ".bin"}' diff --git a/api/core/workflow/nodes/tool/entities.py b/api/core/workflow/nodes/tool/entities.py index 2e4743c483229f..bc9bfde4db4a0d 100644 --- a/api/core/workflow/nodes/tool/entities.py +++ b/api/core/workflow/nodes/tool/entities.py @@ -3,12 +3,13 @@ from pydantic import BaseModel, field_validator from pydantic_core.core_schema import ValidationInfo +from core.tools.entities.tool_entities import ToolProviderType from core.workflow.entities.base_node_data_entities import BaseNodeData class ToolEntity(BaseModel): provider_id: str - provider_type: Literal['builtin', 'api', 'workflow'] + provider_type: ToolProviderType provider_name: str # redundancy tool_name: str tool_label: str # redundancy diff --git a/api/core/workflow/nodes/tool/tool_node.py b/api/core/workflow/nodes/tool/tool_node.py index cddea03bf862ad..f77ccd9bd677b6 100644 --- a/api/core/workflow/nodes/tool/tool_node.py +++ b/api/core/workflow/nodes/tool/tool_node.py @@ -32,7 +32,7 @@ def _run(self, variable_pool: VariablePool) -> NodeRunResult: # fetch tool icon tool_info = { - 'provider_type': node_data.provider_type, + 'provider_type': node_data.provider_type.value, 'provider_id': node_data.provider_id } diff --git a/api/services/plugin/plugin_invoke_service.py b/api/services/plugin/plugin_invoke_service.py index 131de1ec1a5a0a..317b42a6e16f75 100644 --- a/api/services/plugin/plugin_invoke_service.py +++ b/api/services/plugin/plugin_invoke_service.py @@ -1,16 +1,49 @@ from collections.abc import Generator -from typing import Any +from typing import Any, Union -from core.tools.entities.tool_entities import ToolInvokeMessage +from core.app.entities.app_invoke_entities import InvokeFrom +from core.callback_handler.plugin_tool_callback_handler import DifyPluginCallbackHandler +from core.model_runtime.entities.model_entities import ModelType +from core.tools.entities.tool_entities import ToolInvokeMessage, ToolProviderType +from core.tools.tool_engine import ToolEngine +from core.tools.tool_manager import ToolManager +from core.tools.utils.message_transformer import ToolFileMessageTransformer +from core.workflow.entities.node_entities import NodeType from models.account import Tenant +from services.tools.tools_transform_service import ToolTransformService class PluginInvokeService: @classmethod - def invoke_tool(cls, user_id: str, tenant: Tenant, - tool_provider: str, tool_name: str, + def invoke_tool(cls, user_id: str, invoke_from: InvokeFrom, tenant: Tenant, + tool_provider_type: ToolProviderType, tool_provider: str, tool_name: str, tool_parameters: dict[str, Any]) -> Generator[ToolInvokeMessage]: """ Invokes a tool with the given user ID and tool parameters. """ - \ No newline at end of file + tool_runtime = ToolManager.get_tool_runtime(tool_provider_type, provider_id=tool_provider, + tool_name=tool_name, tenant_id=tenant.id, + invoke_from=invoke_from) + + response = ToolEngine.plugin_invoke(tool_runtime, + tool_parameters, + user_id, + callback=DifyPluginCallbackHandler()) + response = ToolFileMessageTransformer.transform_tool_invoke_messages(response) + return ToolTransformService.transform_messages_to_dict(response) + + @classmethod + def invoke_model(cls, user_id: str, tenant: Tenant, + model_provider: str, model_name: str, model_type: ModelType, + model_parameters: dict[str, Any]) -> Union[dict, Generator[ToolInvokeMessage]]: + """ + Invokes a model with the given user ID and model parameters. + """ + + @classmethod + def invoke_workflow_node(cls, user_id: str, tenant: Tenant, + node_type: NodeType, node_data: dict[str, Any], + inputs: dict[str, Any]) -> Generator[ToolInvokeMessage]: + """ + Invokes a workflow node with the given user ID and node parameters. + """ \ No newline at end of file diff --git a/api/services/tools/tools_transform_service.py b/api/services/tools/tools_transform_service.py index 5c777324689341..08023a4a92dfda 100644 --- a/api/services/tools/tools_transform_service.py +++ b/api/services/tools/tools_transform_service.py @@ -1,5 +1,6 @@ import json import logging +from collections.abc import Generator from typing import Optional, Union from flask import current_app @@ -9,6 +10,7 @@ from core.tools.entities.tool_bundle import ApiToolBundle from core.tools.entities.tool_entities import ( ApiProviderAuthType, + ToolInvokeMessage, ToolParameter, ToolProviderCredentials, ToolProviderType, @@ -24,8 +26,8 @@ logger = logging.getLogger(__name__) class ToolTransformService: - @staticmethod - def get_tool_provider_icon_url(provider_type: str, provider_name: str, icon: str) -> Union[str, dict]: + @classmethod + def get_tool_provider_icon_url(cls, provider_type: str, provider_name: str, icon: str) -> Union[str, dict]: """ get tool provider icon url """ @@ -45,8 +47,8 @@ def get_tool_provider_icon_url(provider_type: str, provider_name: str, icon: str return '' - @staticmethod - def repack_provider(provider: Union[dict, UserToolProvider]): + @classmethod + def repack_provider(cls, provider: Union[dict, UserToolProvider]): """ repack provider @@ -65,8 +67,9 @@ def repack_provider(provider: Union[dict, UserToolProvider]): icon=provider.icon ) - @staticmethod + @classmethod def builtin_provider_to_user_provider( + cls, provider_controller: BuiltinToolProviderController, db_provider: Optional[BuiltinToolProvider], decrypt_credentials: bool = True, @@ -126,8 +129,9 @@ def builtin_provider_to_user_provider( return result - @staticmethod + @classmethod def api_provider_to_controller( + cls, db_provider: ApiToolProvider, ) -> ApiToolProviderController: """ @@ -142,8 +146,9 @@ def api_provider_to_controller( return controller - @staticmethod + @classmethod def workflow_provider_to_controller( + cls, db_provider: WorkflowToolProvider ) -> WorkflowToolProviderController: """ @@ -179,8 +184,9 @@ def workflow_provider_to_user_provider( labels=labels or [] ) - @staticmethod + @classmethod def api_provider_to_user_provider( + cls, provider_controller: ApiToolProviderController, db_provider: ApiToolProvider, decrypt_credentials: bool = True, @@ -231,8 +237,9 @@ def api_provider_to_user_provider( return result - @staticmethod + @classmethod def tool_to_user_tool( + cls, tool: Union[ApiToolBundle, WorkflowTool, Tool], credentials: dict = None, tenant_id: str = None, @@ -287,4 +294,9 @@ def tool_to_user_tool( ), parameters=tool.parameters, labels=labels - ) \ No newline at end of file + ) + + @classmethod + def transform_messages_to_dict(cls, responses: Generator[ToolInvokeMessage, None, None]): + for response in responses: + yield response.model_dump() \ No newline at end of file From e1db77eec2fae8542c96e009647ae24d29ee74e7 Mon Sep 17 00:00:00 2001 From: Yeuoly Date: Mon, 15 Jul 2024 16:00:11 +0800 Subject: [PATCH 003/325] fix --- api/core/agent/entities.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/api/core/agent/entities.py b/api/core/agent/entities.py index 189c925d828d1b..776e40e9fee753 100644 --- a/api/core/agent/entities.py +++ b/api/core/agent/entities.py @@ -76,5 +76,5 @@ class Strategy(Enum): model: str strategy: Strategy prompt: Optional[AgentPromptEntity] = None - tools: list[AgentToolEntity] = None + tools: Optional[list[AgentToolEntity]] = None max_iteration: int = 5 From f29b44acd807734eba64dd8c6a6ffbcf4e2150c4 Mon Sep 17 00:00:00 2001 From: Yeuoly Date: Mon, 29 Jul 2024 16:40:04 +0800 Subject: [PATCH 004/325] feat: support plugin inner api --- api/controllers/inner_api/plugin/plugin.py | 73 ++++++--- api/controllers/inner_api/plugin/wraps.py | 25 +++ api/core/plugin/entities/request.py | 42 +++++ api/core/tools/entities/tool_entities.py | 13 +- api/core/tools/tool/tool.py | 175 ++++++++++----------- 5 files changed, 210 insertions(+), 118 deletions(-) create mode 100644 api/core/plugin/entities/request.py diff --git a/api/controllers/inner_api/plugin/plugin.py b/api/controllers/inner_api/plugin/plugin.py index 95500ea9f375e9..cc2a49a41fb050 100644 --- a/api/controllers/inner_api/plugin/plugin.py +++ b/api/controllers/inner_api/plugin/plugin.py @@ -1,34 +1,68 @@ - from flask_restful import Resource, reqparse from controllers.console.setup import setup_required from controllers.inner_api import api -from controllers.inner_api.plugin.wraps import get_tenant +from controllers.inner_api.plugin.wraps import get_tenant, plugin_data from controllers.inner_api.wraps import plugin_inner_api_only +from core.plugin.entities.request import RequestInvokeLLM, RequestInvokeModeration, RequestInvokeRerank, RequestInvokeSpeech2Text, RequestInvokeTTS, RequestInvokeTextEmbedding, RequestInvokeTool from libs.helper import compact_generate_response from models.account import Tenant from services.plugin.plugin_invoke_service import PluginInvokeService -class PluginInvokeModelApi(Resource): +class PluginInvokeLLMApi(Resource): @setup_required @plugin_inner_api_only @get_tenant - def post(self, user_id: str, tenant_model: Tenant): - parser = reqparse.RequestParser() - parser.add_argument('provider', type=dict, required=True, location='json') - parser.add_argument('model', type=dict, required=True, location='json') - parser.add_argument('parameters', type=dict, required=True, location='json') + @plugin_data(payload_type=RequestInvokeLLM) + def post(self, user_id: str, tenant_model: Tenant, payload: RequestInvokeLLM): + pass - args = parser.parse_args() +class PluginInvokeTextEmbeddingApi(Resource): + @setup_required + @plugin_inner_api_only + @get_tenant + @plugin_data(payload_type=RequestInvokeTextEmbedding) + def post(self, user_id: str, tenant_model: Tenant, payload: RequestInvokeTextEmbedding): + pass + +class PluginInvokeRerankApi(Resource): + @setup_required + @plugin_inner_api_only + @get_tenant + @plugin_data(payload_type=RequestInvokeRerank) + def post(self, user_id: str, tenant_model: Tenant, payload: RequestInvokeRerank): + pass + +class PluginInvokeTTSApi(Resource): + @setup_required + @plugin_inner_api_only + @get_tenant + @plugin_data(payload_type=RequestInvokeTTS) + def post(self, user_id: str, tenant_model: Tenant, payload: RequestInvokeTTS): + pass + +class PluginInvokeSpeech2TextApi(Resource): + @setup_required + @plugin_inner_api_only + @get_tenant + @plugin_data(payload_type=RequestInvokeSpeech2Text) + def post(self, user_id: str, tenant_model: Tenant, payload: RequestInvokeSpeech2Text): + pass - - +class PluginInvokeModerationApi(Resource): + @setup_required + @plugin_inner_api_only + @get_tenant + @plugin_data(payload_type=RequestInvokeModeration) + def post(self, user_id: str, tenant_model: Tenant, payload: RequestInvokeModeration): + pass class PluginInvokeToolApi(Resource): @setup_required @plugin_inner_api_only @get_tenant + @plugin_data(payload_type=RequestInvokeTool) def post(self, user_id: str, tenant_model: Tenant): parser = reqparse.RequestParser() parser.add_argument('provider', type=dict, required=True, location='json') @@ -37,9 +71,9 @@ def post(self, user_id: str, tenant_model: Tenant): args = parser.parse_args() - response = PluginInvokeService.invoke_tool(user_id, tenant_model, - args['provider'], args['tool'], - args['parameters']) + response = PluginInvokeService.invoke_tool( + user_id, tenant_model, args['provider'], args['tool'], args['parameters'] + ) return compact_generate_response(response) @@ -51,11 +85,14 @@ def post(self, user_id: str, tenant_model: Tenant): parser = reqparse.RequestParser() args = parser.parse_args() - return { - 'message': 'success' - } + return {'message': 'success'} -api.add_resource(PluginInvokeModelApi, '/invoke/model') +api.add_resource(PluginInvokeLLMApi, '/invoke/llm') +api.add_resource(PluginInvokeTextEmbeddingApi, '/invoke/text-embedding') +api.add_resource(PluginInvokeRerankApi, '/invoke/rerank') +api.add_resource(PluginInvokeTTSApi, '/invoke/tts') +api.add_resource(PluginInvokeSpeech2TextApi, '/invoke/speech2text') +api.add_resource(PluginInvokeModerationApi, '/invoke/moderation') api.add_resource(PluginInvokeToolApi, '/invoke/tool') api.add_resource(PluginInvokeNodeApi, '/invoke/node') diff --git a/api/controllers/inner_api/plugin/wraps.py b/api/controllers/inner_api/plugin/wraps.py index d604e84a9dcc5e..eb753218ff0b0b 100644 --- a/api/controllers/inner_api/plugin/wraps.py +++ b/api/controllers/inner_api/plugin/wraps.py @@ -2,7 +2,9 @@ from functools import wraps from typing import Optional +from flask import request from flask_restful import reqparse +from pydantic import BaseModel from extensions.ext_database import db from models.account import Tenant @@ -45,3 +47,26 @@ def decorated_view(*args, **kwargs): return decorator else: return decorator(view) + +def plugin_data(view: Optional[Callable] = None, *, payload_type: type[BaseModel]): + def decorator(view_func): + def decorated_view(*args, **kwargs): + try: + data = request.get_json() + except Exception: + raise ValueError('invalid json') + + try: + payload = payload_type(**data) + except Exception as e: + raise ValueError(f'invalid payload: {str(e)}') + + kwargs['payload'] = payload + return view_func(*args, **kwargs) + + return decorated_view + + if view is None: + return decorator + else: + return decorator(view) diff --git a/api/core/plugin/entities/request.py b/api/core/plugin/entities/request.py new file mode 100644 index 00000000000000..62db6396a88e25 --- /dev/null +++ b/api/core/plugin/entities/request.py @@ -0,0 +1,42 @@ +from pydantic import BaseModel + + +class RequestInvokeTool(BaseModel): + """ + Request to invoke a tool + """ + +class RequestInvokeLLM(BaseModel): + """ + Request to invoke LLM + """ + +class RequestInvokeTextEmbedding(BaseModel): + """ + Request to invoke text embedding + """ + +class RequestInvokeRerank(BaseModel): + """ + Request to invoke rerank + """ + +class RequestInvokeTTS(BaseModel): + """ + Request to invoke TTS + """ + +class RequestInvokeSpeech2Text(BaseModel): + """ + Request to invoke speech2text + """ + +class RequestInvokeModeration(BaseModel): + """ + Request to invoke moderation + """ + +class RequestInvokeNode(BaseModel): + """ + Request to invoke node + """ \ No newline at end of file diff --git a/api/core/tools/entities/tool_entities.py b/api/core/tools/entities/tool_entities.py index 569a1d3238f87f..5213f35a4b41c9 100644 --- a/api/core/tools/entities/tool_entities.py +++ b/api/core/tools/entities/tool_entities.py @@ -103,8 +103,8 @@ class MessageType(Enum): """ plain text, image url or link url """ - message: Union[str, bytes, dict] = None - meta: dict[str, Any] = None + message: Optional[Union[str, bytes, dict]] = None + meta: Optional[dict[str, Any]] = None save_as: str = '' class ToolInvokeMessageBinary(BaseModel): @@ -168,16 +168,19 @@ def get_simple_instance(cls, """ # convert options to ToolParameterOption if options: - options = [ToolParameterOption(value=option, label=I18nObject(en_US=option, zh_Hans=option)) for option in options] + option_objs = [ToolParameterOption(value=option, label=I18nObject(en_US=option, zh_Hans=option)) for option in options] + else: + option_objs = None return cls( name=name, label=I18nObject(en_US='', zh_Hans=''), + placeholder=None, human_description=I18nObject(en_US='', zh_Hans=''), type=type, form=cls.ToolParameterForm.LLM, llm_description=llm_description, required=required, - options=options, + options=option_objs, ) class ToolProviderIdentity(BaseModel): @@ -245,7 +248,7 @@ def to_dict(self) -> dict: 'default': self.default, 'options': self.options, 'help': self.help.to_dict() if self.help else None, - 'label': self.label.to_dict(), + 'label': self.label.to_dict() if self.label else None, 'url': self.url, 'placeholder': self.placeholder.to_dict() if self.placeholder else None, } diff --git a/api/core/tools/tool/tool.py b/api/core/tools/tool/tool.py index e679434bdebb5b..134016b28d17dc 100644 --- a/api/core/tools/tool/tool.py +++ b/api/core/tools/tool/tool.py @@ -40,8 +40,9 @@ def set_parameters(cls, v, validation_info: ValidationInfo) -> list[ToolParamete class Runtime(BaseModel): """ - Meta data of a tool call processing + Meta data of a tool call processing """ + def __init__(self, **data: Any): super().__init__(**data) if not self.runtime_parameters: @@ -65,10 +66,10 @@ class VARIABLE_KEY(Enum): def fork_tool_runtime(self, runtime: dict[str, Any]) -> 'Tool': """ - fork a new tool with meta data + fork a new tool with meta data - :param meta: the meta data of a tool call processing, tenant_id is required - :return: the new tool + :param meta: the meta data of a tool call processing, tenant_id is required + :return: the new tool """ return self.__class__( identity=self.identity.model_copy() if self.identity else None, @@ -76,82 +77,82 @@ def fork_tool_runtime(self, runtime: dict[str, Any]) -> 'Tool': description=self.description.model_copy() if self.description else None, runtime=Tool.Runtime(**runtime), ) - + @abstractmethod def tool_provider_type(self) -> ToolProviderType: """ - get the tool provider type + get the tool provider type - :return: the tool provider type + :return: the tool provider type """ - + def load_variables(self, variables: ToolRuntimeVariablePool): """ - load variables from database + load variables from database - :param conversation_id: the conversation id + :param conversation_id: the conversation id """ self.variables = variables def set_image_variable(self, variable_name: str, image_key: str) -> None: """ - set an image variable + set an image variable """ if not self.variables: return - + self.variables.set_file(self.identity.name, variable_name, image_key) def set_text_variable(self, variable_name: str, text: str) -> None: """ - set a text variable + set a text variable """ if not self.variables: return - + self.variables.set_text(self.identity.name, variable_name, text) - + def get_variable(self, name: Union[str, Enum]) -> Optional[ToolRuntimeVariable]: """ - get a variable + get a variable - :param name: the name of the variable - :return: the variable + :param name: the name of the variable + :return: the variable """ if not self.variables: return None - + if isinstance(name, Enum): name = name.value - + for variable in self.variables.pool: if variable.name == name: return variable - + return None def get_default_image_variable(self) -> Optional[ToolRuntimeVariable]: """ - get the default image variable + get the default image variable - :return: the image variable + :return: the image variable """ if not self.variables: return None - + return self.get_variable(self.VARIABLE_KEY.IMAGE) - + def get_variable_file(self, name: Union[str, Enum]) -> Optional[bytes]: """ - get a variable file + get a variable file - :param name: the name of the variable - :return: the variable file + :param name: the name of the variable + :return: the variable file """ variable = self.get_variable(name) if not variable: return None - + if not isinstance(variable, ToolRuntimeImageVariable): return None @@ -160,31 +161,31 @@ def get_variable_file(self, name: Union[str, Enum]) -> Optional[bytes]: file_binary = ToolFileManager.get_file_binary_by_message_file_id(message_file_id) if not file_binary: return None - + return file_binary[0] - + def list_variables(self) -> list[ToolRuntimeVariable]: """ - list all variables + list all variables - :return: the variables + :return: the variables """ if not self.variables: return [] - + return self.variables.pool - + def list_default_image_variables(self) -> list[ToolRuntimeVariable]: """ - list all image variables + list all image variables - :return: the image variables + :return: the image variables """ if not self.variables: return [] - + result = [] - + for variable in self.variables.pool: if variable.name.startswith(self.VARIABLE_KEY.IMAGE.value): result.append(variable) @@ -215,38 +216,40 @@ def _transform_tool_parameters_type(self, tool_parameters: Mapping[str, Any]) -> result = deepcopy(tool_parameters) for parameter in self.parameters or []: if parameter.name in tool_parameters: - result[parameter.name] = ToolParameterConverter.cast_parameter_by_type(tool_parameters[parameter.name], parameter.type) + result[parameter.name] = ToolParameterConverter.cast_parameter_by_type( + tool_parameters[parameter.name], parameter.type + ) return result @abstractmethod def _invoke(self, user_id: str, tool_parameters: dict[str, Any]) -> Generator[ToolInvokeMessage, None, None]: pass - + def validate_credentials(self, credentials: dict[str, Any], parameters: dict[str, Any]) -> None: """ - validate the credentials + validate the credentials - :param credentials: the credentials - :param parameters: the parameters + :param credentials: the credentials + :param parameters: the parameters """ pass def get_runtime_parameters(self) -> list[ToolParameter]: """ - get the runtime parameters + get the runtime parameters - interface for developer to dynamic change the parameters of a tool depends on the variables pool + interface for developer to dynamic change the parameters of a tool depends on the variables pool - :return: the runtime parameters + :return: the runtime parameters """ return self.parameters or [] - + def get_all_runtime_parameters(self) -> list[ToolParameter]: """ - get all runtime parameters + get all runtime parameters - :return: all runtime parameters + :return: all runtime parameters """ parameters = self.parameters or [] parameters = parameters.copy() @@ -275,68 +278,50 @@ def get_all_runtime_parameters(self) -> list[ToolParameter]: parameters.append(parameter) return parameters - + def create_image_message(self, image: str, save_as: str = '') -> ToolInvokeMessage: """ - create an image message + create an image message - :param image: the url of the image - :return: the image message + :param image: the url of the image + :return: the image message """ - return ToolInvokeMessage(type=ToolInvokeMessage.MessageType.IMAGE, - message=image, - save_as=save_as) - + return ToolInvokeMessage(type=ToolInvokeMessage.MessageType.IMAGE, message=image, save_as=save_as) + def create_file_var_message(self, file_var: FileVar) -> ToolInvokeMessage: - return ToolInvokeMessage(type=ToolInvokeMessage.MessageType.FILE_VAR, - message='', - meta={ - 'file_var': file_var - }, - save_as='') - + return ToolInvokeMessage( + type=ToolInvokeMessage.MessageType.FILE_VAR, message='', meta={'file_var': file_var}, save_as='' + ) + def create_link_message(self, link: str, save_as: str = '') -> ToolInvokeMessage: """ - create a link message + create a link message - :param link: the url of the link - :return: the link message + :param link: the url of the link + :return: the link message """ - return ToolInvokeMessage(type=ToolInvokeMessage.MessageType.LINK, - message=link, - save_as=save_as) - + return ToolInvokeMessage(type=ToolInvokeMessage.MessageType.LINK, message=link, save_as=save_as) + def create_text_message(self, text: str, save_as: str = '') -> ToolInvokeMessage: """ - create a text message + create a text message - :param text: the text - :return: the text message + :param text: the text + :return: the text message """ - return ToolInvokeMessage( - type=ToolInvokeMessage.MessageType.TEXT, - message=text, - save_as=save_as - ) - - def create_blob_message(self, blob: bytes, meta: dict = None, save_as: str = '') -> ToolInvokeMessage: + return ToolInvokeMessage(type=ToolInvokeMessage.MessageType.TEXT, message=text, save_as=save_as) + + def create_blob_message(self, blob: bytes, meta: Optional[dict] = None, save_as: str = '') -> ToolInvokeMessage: """ - create a blob message + create a blob message - :param blob: the blob - :return: the blob message + :param blob: the blob + :return: the blob message """ - return ToolInvokeMessage( - type=ToolInvokeMessage.MessageType.BLOB, - message=blob, meta=meta, - save_as=save_as - ) + return ToolInvokeMessage(type=ToolInvokeMessage.MessageType.BLOB, message=blob, meta=meta, save_as=save_as) def create_json_message(self, object: dict) -> ToolInvokeMessage: """ - create a json message + create a json message """ - return ToolInvokeMessage( - type=ToolInvokeMessage.MessageType.JSON, - message=object - ) + return ToolInvokeMessage(type=ToolInvokeMessage.MessageType.JSON, message=object) From d52476c1c9fc22f15ded6dc52b8217b3a5491c12 Mon Sep 17 00:00:00 2001 From: Yeuoly Date: Mon, 29 Jul 2024 18:57:34 +0800 Subject: [PATCH 005/325] feat: support backwards invocation --- api/controllers/inner_api/plugin/plugin.py | 45 ++++++++---- .../app/features/rate_limiting/rate_limit.py | 4 +- api/core/tools/entities/tool_entities.py | 8 ++- api/libs/helper.py | 72 +++++++++---------- 4 files changed, 77 insertions(+), 52 deletions(-) diff --git a/api/controllers/inner_api/plugin/plugin.py b/api/controllers/inner_api/plugin/plugin.py index cc2a49a41fb050..b3ebf81bf6f7a0 100644 --- a/api/controllers/inner_api/plugin/plugin.py +++ b/api/controllers/inner_api/plugin/plugin.py @@ -1,10 +1,20 @@ +import time from flask_restful import Resource, reqparse from controllers.console.setup import setup_required from controllers.inner_api import api from controllers.inner_api.plugin.wraps import get_tenant, plugin_data from controllers.inner_api.wraps import plugin_inner_api_only -from core.plugin.entities.request import RequestInvokeLLM, RequestInvokeModeration, RequestInvokeRerank, RequestInvokeSpeech2Text, RequestInvokeTTS, RequestInvokeTextEmbedding, RequestInvokeTool +from core.plugin.entities.request import ( + RequestInvokeLLM, + RequestInvokeModeration, + RequestInvokeRerank, + RequestInvokeSpeech2Text, + RequestInvokeTextEmbedding, + RequestInvokeTool, + RequestInvokeTTS, +) +from core.tools.entities.tool_entities import ToolInvokeMessage from libs.helper import compact_generate_response from models.account import Tenant from services.plugin.plugin_invoke_service import PluginInvokeService @@ -18,6 +28,7 @@ class PluginInvokeLLMApi(Resource): def post(self, user_id: str, tenant_model: Tenant, payload: RequestInvokeLLM): pass + class PluginInvokeTextEmbeddingApi(Resource): @setup_required @plugin_inner_api_only @@ -26,6 +37,7 @@ class PluginInvokeTextEmbeddingApi(Resource): def post(self, user_id: str, tenant_model: Tenant, payload: RequestInvokeTextEmbedding): pass + class PluginInvokeRerankApi(Resource): @setup_required @plugin_inner_api_only @@ -34,6 +46,7 @@ class PluginInvokeRerankApi(Resource): def post(self, user_id: str, tenant_model: Tenant, payload: RequestInvokeRerank): pass + class PluginInvokeTTSApi(Resource): @setup_required @plugin_inner_api_only @@ -42,6 +55,7 @@ class PluginInvokeTTSApi(Resource): def post(self, user_id: str, tenant_model: Tenant, payload: RequestInvokeTTS): pass + class PluginInvokeSpeech2TextApi(Resource): @setup_required @plugin_inner_api_only @@ -50,6 +64,7 @@ class PluginInvokeSpeech2TextApi(Resource): def post(self, user_id: str, tenant_model: Tenant, payload: RequestInvokeSpeech2Text): pass + class PluginInvokeModerationApi(Resource): @setup_required @plugin_inner_api_only @@ -58,23 +73,27 @@ class PluginInvokeModerationApi(Resource): def post(self, user_id: str, tenant_model: Tenant, payload: RequestInvokeModeration): pass + class PluginInvokeToolApi(Resource): @setup_required @plugin_inner_api_only @get_tenant @plugin_data(payload_type=RequestInvokeTool) - def post(self, user_id: str, tenant_model: Tenant): - parser = reqparse.RequestParser() - parser.add_argument('provider', type=dict, required=True, location='json') - parser.add_argument('tool', type=dict, required=True, location='json') - parser.add_argument('parameters', type=dict, required=True, location='json') - - args = parser.parse_args() - - response = PluginInvokeService.invoke_tool( - user_id, tenant_model, args['provider'], args['tool'], args['parameters'] - ) - return compact_generate_response(response) + def post(self, user_id: str, tenant_model: Tenant, payload: RequestInvokeTool): + def generator(): + for i in range(10): + time.sleep(0.1) + yield ( + ToolInvokeMessage( + type=ToolInvokeMessage.MessageType.TEXT, + message=ToolInvokeMessage.TextMessage(text='helloworld'), + ) + .model_dump_json() + .encode() + + b'\n\n' + ) + + return compact_generate_response(generator()) class PluginInvokeNodeApi(Resource): diff --git a/api/core/app/features/rate_limiting/rate_limit.py b/api/core/app/features/rate_limiting/rate_limit.py index f11e8021f0b1cc..570e3c003f3273 100644 --- a/api/core/app/features/rate_limiting/rate_limit.py +++ b/api/core/app/features/rate_limiting/rate_limit.py @@ -1,7 +1,7 @@ import logging import time import uuid -from collections.abc import Generator +from collections.abc import Callable, Generator from datetime import timedelta from typing import Optional, Union @@ -91,7 +91,7 @@ def generate(self, generator: Union[Generator, callable, dict], request_id: str) class RateLimitGenerator: - def __init__(self, rate_limit: RateLimit, generator: Union[Generator, callable], request_id: str): + def __init__(self, rate_limit: RateLimit, generator: Union[Generator, Callable[[], Generator]], request_id: str): self.rate_limit = rate_limit if callable(generator): self.generator = generator() diff --git a/api/core/tools/entities/tool_entities.py b/api/core/tools/entities/tool_entities.py index 5213f35a4b41c9..1d39e7fb005d8d 100644 --- a/api/core/tools/entities/tool_entities.py +++ b/api/core/tools/entities/tool_entities.py @@ -90,6 +90,12 @@ def value_of(cls, value: str) -> 'ApiProviderAuthType': raise ValueError(f'invalid mode value {value}') class ToolInvokeMessage(BaseModel): + class TextMessage(BaseModel): + text: str + + class JsonMessage(BaseModel): + json_object: dict + class MessageType(Enum): TEXT = "text" IMAGE = "image" @@ -103,7 +109,7 @@ class MessageType(Enum): """ plain text, image url or link url """ - message: Optional[Union[str, bytes, dict]] = None + message: JsonMessage | TextMessage meta: Optional[dict[str, Any]] = None save_as: str = '' diff --git a/api/libs/helper.py b/api/libs/helper.py index 15cd65dd6a38ba..c169d6ba1744b6 100644 --- a/api/libs/helper.py +++ b/api/libs/helper.py @@ -36,8 +36,7 @@ def email(email): if re.match(pattern, email) is not None: return email - error = ('{email} is not a valid email.' - .format(email=email)) + error = '{email} is not a valid email.'.format(email=email) raise ValueError(error) @@ -49,10 +48,10 @@ def uuid_value(value): uuid_obj = uuid.UUID(value) return str(uuid_obj) except ValueError: - error = ('{value} is not a valid uuid.' - .format(value=value)) + error = '{value} is not a valid uuid.'.format(value=value) raise ValueError(error) + def alphanumeric(value: str): # check if the value is alphanumeric and underlined if re.match(r'^[a-zA-Z0-9_]+$', value): @@ -60,6 +59,7 @@ def alphanumeric(value: str): raise ValueError(f'{value} is not a valid alphanumeric value') + def timestamp_value(timestamp): try: int_timestamp = int(timestamp) @@ -67,13 +67,12 @@ def timestamp_value(timestamp): raise ValueError return int_timestamp except ValueError: - error = ('{timestamp} is not a valid timestamp.' - .format(timestamp=timestamp)) + error = '{timestamp} is not a valid timestamp.'.format(timestamp=timestamp) raise ValueError(error) class str_len: - """ Restrict input to an integer in a range (inclusive) """ + """Restrict input to an integer in a range (inclusive)""" def __init__(self, max_length, argument='argument'): self.max_length = max_length @@ -82,15 +81,17 @@ def __init__(self, max_length, argument='argument'): def __call__(self, value): length = len(value) if length > self.max_length: - error = ('Invalid {arg}: {val}. {arg} cannot exceed length {length}' - .format(arg=self.argument, val=value, length=self.max_length)) + error = 'Invalid {arg}: {val}. {arg} cannot exceed length {length}'.format( + arg=self.argument, val=value, length=self.max_length + ) raise ValueError(error) return value class float_range: - """ Restrict input to an float in a range (inclusive) """ + """Restrict input to an float in a range (inclusive)""" + def __init__(self, low, high, argument='argument'): self.low = low self.high = high @@ -99,8 +100,9 @@ def __init__(self, low, high, argument='argument'): def __call__(self, value): value = _get_float(value) if value < self.low or value > self.high: - error = ('Invalid {arg}: {val}. {arg} must be within the range {lo} - {hi}' - .format(arg=self.argument, val=value, lo=self.low, hi=self.high)) + error = 'Invalid {arg}: {val}. {arg} must be within the range {lo} - {hi}'.format( + arg=self.argument, val=value, lo=self.low, hi=self.high + ) raise ValueError(error) return value @@ -115,8 +117,9 @@ def __call__(self, value): try: datetime.strptime(value, self.format) except ValueError: - error = ('Invalid {arg}: {val}. {arg} must be conform to the format {format}' - .format(arg=self.argument, val=value, format=self.format)) + error = 'Invalid {arg}: {val}. {arg} must be conform to the format {format}'.format( + arg=self.argument, val=value, format=self.format + ) raise ValueError(error) return value @@ -128,18 +131,18 @@ def _get_float(value): except (TypeError, ValueError): raise ValueError('{} is not a valid float'.format(value)) + def timezone(timezone_string): if timezone_string and timezone_string in available_timezones(): return timezone_string - error = ('{timezone_string} is not a valid timezone.' - .format(timezone_string=timezone_string)) + error = '{timezone_string} is not a valid timezone.'.format(timezone_string=timezone_string) raise ValueError(error) def generate_string(n): letters_digits = string.ascii_letters + string.digits - result = "" + result = '' for i in range(n): result += random.choice(letters_digits) @@ -149,8 +152,8 @@ def generate_string(n): def get_remote_ip(request) -> str: if request.headers.get('CF-Connecting-IP'): return request.headers.get('Cf-Connecting-Ip') - elif request.headers.getlist("X-Forwarded-For"): - return request.headers.getlist("X-Forwarded-For")[0] + elif request.headers.getlist('X-Forwarded-For'): + return request.headers.getlist('X-Forwarded-For')[0] else: return request.remote_addr @@ -160,19 +163,24 @@ def generate_text_hash(text: str) -> str: return sha256(hash_text.encode()).hexdigest() -def compact_generate_response(response: Union[dict, RateLimitGenerator]) -> Response: +def compact_generate_response(response: Union[dict, Generator, RateLimitGenerator]) -> Response: if isinstance(response, dict): return Response(response=json.dumps(response), status=200, mimetype='application/json') else: + def generate() -> Generator: - yield from response + for data in response: + if isinstance(data, dict): + yield json.dumps(data).encode() + if isinstance(data, str): + yield data.encode() + else: + yield data - return Response(stream_with_context(generate()), status=200, - mimetype='text/event-stream') + return Response(stream_with_context(generate()), status=200, mimetype='text/event-stream') class TokenManager: - @classmethod def generate_token(cls, account: Account, token_type: str, additional_data: dict = None) -> str: old_token = cls._get_current_token_for_account(account.id, token_type) @@ -182,21 +190,13 @@ def generate_token(cls, account: Account, token_type: str, additional_data: dict cls.revoke_token(old_token, token_type) token = str(uuid.uuid4()) - token_data = { - 'account_id': account.id, - 'email': account.email, - 'token_type': token_type - } + token_data = {'account_id': account.id, 'email': account.email, 'token_type': token_type} if additional_data: token_data.update(additional_data) expiry_hours = current_app.config[f'{token_type.upper()}_TOKEN_EXPIRY_HOURS'] token_key = cls._get_token_key(token, token_type) - redis_client.setex( - token_key, - expiry_hours * 60 * 60, - json.dumps(token_data) - ) + redis_client.setex(token_key, expiry_hours * 60 * 60, json.dumps(token_data)) cls._set_current_token_for_account(account.id, token, token_type, expiry_hours) return token @@ -215,7 +215,7 @@ def get_token_data(cls, token: str, token_type: str) -> Optional[dict[str, Any]] key = cls._get_token_key(token, token_type) token_data_json = redis_client.get(key) if token_data_json is None: - logging.warning(f"{token_type} token {token} not found with key {key}") + logging.warning(f'{token_type} token {token} not found with key {key}') return None token_data = json.loads(token_data_json) return token_data @@ -243,7 +243,7 @@ def __init__(self, prefix: str, max_attempts: int, time_window: int): self.time_window = time_window def _get_key(self, email: str) -> str: - return f"{self.prefix}:{email}" + return f'{self.prefix}:{email}' def is_rate_limited(self, email: str) -> bool: key = self._get_key(email) From 31e8b134d14f492dbf256067db184f83e4eef24e Mon Sep 17 00:00:00 2001 From: Yeuoly Date: Mon, 29 Jul 2024 22:08:14 +0800 Subject: [PATCH 006/325] feat: backwards invoke llm --- api/controllers/inner_api/plugin/plugin.py | 14 ++++- api/core/model_manager.py | 4 +- api/core/plugin/backwards_invocation/model.py | 49 +++++++++++++++ api/core/plugin/entities/request.py | 59 ++++++++++++++++++- 4 files changed, 119 insertions(+), 7 deletions(-) create mode 100644 api/core/plugin/backwards_invocation/model.py diff --git a/api/controllers/inner_api/plugin/plugin.py b/api/controllers/inner_api/plugin/plugin.py index b3ebf81bf6f7a0..3a76e007677d7e 100644 --- a/api/controllers/inner_api/plugin/plugin.py +++ b/api/controllers/inner_api/plugin/plugin.py @@ -1,10 +1,13 @@ import time +from collections.abc import Generator + from flask_restful import Resource, reqparse from controllers.console.setup import setup_required from controllers.inner_api import api from controllers.inner_api.plugin.wraps import get_tenant, plugin_data from controllers.inner_api.wraps import plugin_inner_api_only +from core.plugin.backwards_invocation.model import PluginBackwardsInvocation from core.plugin.entities.request import ( RequestInvokeLLM, RequestInvokeModeration, @@ -17,7 +20,6 @@ from core.tools.entities.tool_entities import ToolInvokeMessage from libs.helper import compact_generate_response from models.account import Tenant -from services.plugin.plugin_invoke_service import PluginInvokeService class PluginInvokeLLMApi(Resource): @@ -26,7 +28,15 @@ class PluginInvokeLLMApi(Resource): @get_tenant @plugin_data(payload_type=RequestInvokeLLM) def post(self, user_id: str, tenant_model: Tenant, payload: RequestInvokeLLM): - pass + def generator(): + response = PluginBackwardsInvocation.invoke_llm(user_id, tenant_model, payload) + if isinstance(response, Generator): + for chunk in response: + yield chunk.model_dump_json().encode() + b'\n\n' + else: + yield response.model_dump_json().encode() + b'\n\n' + + return compact_generate_response(generator()) class PluginInvokeTextEmbeddingApi(Resource): diff --git a/api/core/model_manager.py b/api/core/model_manager.py index 8e99ad3dec040b..e46d1d35ee402e 100644 --- a/api/core/model_manager.py +++ b/api/core/model_manager.py @@ -7,7 +7,7 @@ from core.entities.provider_entities import ModelLoadBalancingConfiguration from core.errors.error import ProviderTokenNotInitError from core.model_runtime.callbacks.base_callback import Callback -from core.model_runtime.entities.llm_entities import LLMResult +from core.model_runtime.entities.llm_entities import LLMResult, LLMResultChunk from core.model_runtime.entities.message_entities import PromptMessage, PromptMessageTool from core.model_runtime.entities.model_entities import ModelType from core.model_runtime.entities.rerank_entities import RerankResult @@ -103,7 +103,7 @@ def _get_load_balancing_manager(self, configuration: ProviderConfiguration, def invoke_llm(self, prompt_messages: list[PromptMessage], model_parameters: Optional[dict] = None, tools: Optional[list[PromptMessageTool]] = None, stop: Optional[list[str]] = None, stream: bool = True, user: Optional[str] = None, callbacks: Optional[list[Callback]] = None) \ - -> Union[LLMResult, Generator]: + -> Union[LLMResult, Generator[LLMResultChunk, None, None]]: """ Invoke large language model diff --git a/api/core/plugin/backwards_invocation/model.py b/api/core/plugin/backwards_invocation/model.py new file mode 100644 index 00000000000000..b6da133119e2b1 --- /dev/null +++ b/api/core/plugin/backwards_invocation/model.py @@ -0,0 +1,49 @@ +from collections.abc import Generator + +from core.model_manager import ModelManager +from core.model_runtime.entities.llm_entities import LLMResult, LLMResultChunk +from core.plugin.entities.request import RequestInvokeLLM +from core.workflow.nodes.llm.llm_node import LLMNode +from models.account import Tenant + + +class PluginBackwardsInvocation: + @classmethod + def invoke_llm( + cls, user_id: str, tenant: Tenant, payload: RequestInvokeLLM + ) -> Generator[LLMResultChunk, None, None] | LLMResult: + """ + invoke llm + """ + model_instance = ModelManager().get_model_instance( + tenant_id=tenant.id, + provider=payload.provider, + model_type=payload.model_type, + model=payload.model, + ) + + # invoke model + response = model_instance.invoke_llm( + prompt_messages=payload.prompt_messages, + model_parameters=payload.model_parameters, + tools=payload.tools, + stop=payload.stop, + stream=payload.stream or True, + user=user_id, + ) + + if isinstance(response, Generator): + + def handle() -> Generator[LLMResultChunk, None, None]: + for chunk in response: + if chunk.delta.usage: + LLMNode.deduct_llm_quota( + tenant_id=tenant.id, model_instance=model_instance, usage=chunk.delta.usage + ) + yield chunk + + return handle() + else: + if response.usage: + LLMNode.deduct_llm_quota(tenant_id=tenant.id, model_instance=model_instance, usage=response.usage) + return response diff --git a/api/core/plugin/entities/request.py b/api/core/plugin/entities/request.py index 62db6396a88e25..bb08facf751b44 100644 --- a/api/core/plugin/entities/request.py +++ b/api/core/plugin/entities/request.py @@ -1,4 +1,17 @@ -from pydantic import BaseModel +from typing import Any, Optional + +from pydantic import BaseModel, Field, field_validator + +from core.model_runtime.entities.message_entities import ( + AssistantPromptMessage, + PromptMessage, + PromptMessageRole, + PromptMessageTool, + SystemPromptMessage, + ToolPromptMessage, + UserPromptMessage, +) +from core.model_runtime.entities.model_entities import ModelType class RequestInvokeTool(BaseModel): @@ -6,37 +19,77 @@ class RequestInvokeTool(BaseModel): Request to invoke a tool """ -class RequestInvokeLLM(BaseModel): + +class BaseRequestInvokeModel(BaseModel): + provider: str + model: str + model_type: ModelType + + +class RequestInvokeLLM(BaseRequestInvokeModel): """ Request to invoke LLM """ + model_type: ModelType = ModelType.LLM + mode: str + model_parameters: dict[str, Any] = Field(default_factory=dict) + prompt_messages: list[PromptMessage] + tools: Optional[list[PromptMessageTool]] = Field(default_factory=list) + stop: Optional[list[str]] = Field(default_factory=list) + stream: Optional[bool] = False + + @field_validator('prompt_messages', mode='before') + def convert_prompt_messages(cls, v): + if not isinstance(v, list): + raise ValueError('prompt_messages must be a list') + + for i in range(len(v)): + if v[i]['role'] == PromptMessageRole.USER.value: + v[i] = UserPromptMessage(**v[i]) + elif v[i]['role'] == PromptMessageRole.ASSISTANT.value: + v[i] = AssistantPromptMessage(**v[i]) + elif v[i]['role'] == PromptMessageRole.SYSTEM.value: + v[i] = SystemPromptMessage(**v[i]) + elif v[i]['role'] == PromptMessageRole.TOOL.value: + v[i] = ToolPromptMessage(**v[i]) + else: + v[i] = PromptMessage(**v[i]) + + return v + + class RequestInvokeTextEmbedding(BaseModel): """ Request to invoke text embedding """ + class RequestInvokeRerank(BaseModel): """ Request to invoke rerank """ + class RequestInvokeTTS(BaseModel): """ Request to invoke TTS """ + class RequestInvokeSpeech2Text(BaseModel): """ Request to invoke speech2text """ + class RequestInvokeModeration(BaseModel): """ Request to invoke moderation """ + class RequestInvokeNode(BaseModel): """ Request to invoke node - """ \ No newline at end of file + """ From 25b8a512bfba2934cdce9aa14de2011511d86666 Mon Sep 17 00:00:00 2001 From: Yeuoly Date: Thu, 29 Aug 2024 12:55:00 +0800 Subject: [PATCH 007/325] feat: invoke app --- api/controllers/inner_api/plugin/plugin.py | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/api/controllers/inner_api/plugin/plugin.py b/api/controllers/inner_api/plugin/plugin.py index 3a76e007677d7e..67a51fdb2ecd56 100644 --- a/api/controllers/inner_api/plugin/plugin.py +++ b/api/controllers/inner_api/plugin/plugin.py @@ -116,6 +116,15 @@ def post(self, user_id: str, tenant_model: Tenant): return {'message': 'success'} +class PluginInvokeAppApi(Resource): + @setup_required + @plugin_inner_api_only + @get_tenant + def post(self, user_id: str, tenant_model: Tenant): + parser = reqparse.RequestParser() + args = parser.parse_args() + + return {'message': 'success'} api.add_resource(PluginInvokeLLMApi, '/invoke/llm') api.add_resource(PluginInvokeTextEmbeddingApi, '/invoke/text-embedding') @@ -125,3 +134,4 @@ def post(self, user_id: str, tenant_model: Tenant): api.add_resource(PluginInvokeModerationApi, '/invoke/moderation') api.add_resource(PluginInvokeToolApi, '/invoke/tool') api.add_resource(PluginInvokeNodeApi, '/invoke/node') +api.add_resource(PluginInvokeAppApi, '/invoke/app') From c28998a6f0de33260837e1fbc4ec922f11e5abb3 Mon Sep 17 00:00:00 2001 From: Yeuoly Date: Thu, 29 Aug 2024 13:42:31 +0800 Subject: [PATCH 008/325] refactor: tool message transformer --- api/core/tools/entities/tool_entities.py | 7 +++- api/core/tools/tool_file_manager.py | 10 ++--- api/core/tools/utils/message_transformer.py | 44 ++++++++++++++------- api/core/workflow/nodes/tool/tool_node.py | 4 +- api/models/tools.py | 18 +++++---- 5 files changed, 52 insertions(+), 31 deletions(-) diff --git a/api/core/tools/entities/tool_entities.py b/api/core/tools/entities/tool_entities.py index 7d96db865237a2..c266db1cdbd1d8 100644 --- a/api/core/tools/entities/tool_entities.py +++ b/api/core/tools/entities/tool_entities.py @@ -96,6 +96,9 @@ class TextMessage(BaseModel): class JsonMessage(BaseModel): json_object: dict + class BlobMessage(BaseModel): + blob: bytes + class MessageType(Enum): TEXT = "text" IMAGE = "image" @@ -109,7 +112,7 @@ class MessageType(Enum): """ plain text, image url or link url """ - message: JsonMessage | TextMessage | None + message: JsonMessage | TextMessage | BlobMessage | None meta: dict[str, Any] | None = None save_as: str = '' @@ -321,7 +324,7 @@ def set_text(self, tool_name: str, name: str, value: str) -> None: self.pool.append(variable) - def set_file(self, tool_name: str, value: str, name: str = None) -> None: + def set_file(self, tool_name: str, value: str, name: Optional[str] = None) -> None: """ set an image variable diff --git a/api/core/tools/tool_file_manager.py b/api/core/tools/tool_file_manager.py index f9f7c7d78a7f28..078c58c662a5b7 100644 --- a/api/core/tools/tool_file_manager.py +++ b/api/core/tools/tool_file_manager.py @@ -80,8 +80,8 @@ def create_file_by_raw( def create_file_by_url( user_id: str, tenant_id: str, - conversation_id: str, file_url: str, + conversation_id: Optional[str] = None, ) -> ToolFile: """ create file @@ -131,7 +131,7 @@ def get_file_binary(id: str) -> Union[tuple[bytes, str], None]: :return: the binary of the file, mime type """ - tool_file: ToolFile = ( + tool_file: ToolFile | None = ( db.session.query(ToolFile) .filter( ToolFile.id == id, @@ -155,7 +155,7 @@ def get_file_binary_by_message_file_id(id: str) -> Union[tuple[bytes, str], None :return: the binary of the file, mime type """ - message_file: MessageFile = ( + message_file: MessageFile | None = ( db.session.query(MessageFile) .filter( MessageFile.id == id, @@ -173,7 +173,7 @@ def get_file_binary_by_message_file_id(id: str) -> Union[tuple[bytes, str], None tool_file_id = None - tool_file: ToolFile = ( + tool_file: ToolFile | None = ( db.session.query(ToolFile) .filter( ToolFile.id == tool_file_id, @@ -197,7 +197,7 @@ def get_file_generator_by_tool_file_id(tool_file_id: str) -> Union[tuple[Generat :return: the binary of the file, mime type """ - tool_file: ToolFile = ( + tool_file: ToolFile | None = ( db.session.query(ToolFile) .filter( ToolFile.id == tool_file_id, diff --git a/api/core/tools/utils/message_transformer.py b/api/core/tools/utils/message_transformer.py index d22b88e58cdf41..41a93a4f9541ad 100644 --- a/api/core/tools/utils/message_transformer.py +++ b/api/core/tools/utils/message_transformer.py @@ -1,8 +1,9 @@ import logging from collections.abc import Generator from mimetypes import guess_extension +from typing import Optional -from core.file.file_obj import FileTransferMethod, FileType +from core.file.file_obj import FileTransferMethod, FileType, FileVar from core.tools.entities.tool_entities import ToolInvokeMessage from core.tools.tool_file_manager import ToolFileManager @@ -13,7 +14,7 @@ class ToolFileMessageTransformer: def transform_tool_invoke_messages(cls, messages: Generator[ToolInvokeMessage, None, None], user_id: str, tenant_id: str, - conversation_id: str) -> Generator[ToolInvokeMessage, None, None]: + conversation_id: Optional[str] = None) -> Generator[ToolInvokeMessage, None, None]: """ Transform tool message and handle file download """ @@ -25,18 +26,23 @@ def transform_tool_invoke_messages(cls, messages: Generator[ToolInvokeMessage, N elif message.type == ToolInvokeMessage.MessageType.IMAGE: # try to download image try: + if not conversation_id: + raise + + assert isinstance(message.message, ToolInvokeMessage.TextMessage) + file = ToolFileManager.create_file_by_url( user_id=user_id, tenant_id=tenant_id, + file_url=message.message.text, conversation_id=conversation_id, - file_url=message.message ) url = f'/files/tools/{file.id}{guess_extension(file.mimetype) or ".png"}' yield ToolInvokeMessage( type=ToolInvokeMessage.MessageType.IMAGE_LINK, - message=url, + message=ToolInvokeMessage.TextMessage(text=url), save_as=message.save_as, meta=message.meta.copy() if message.meta is not None else {}, ) @@ -44,57 +50,67 @@ def transform_tool_invoke_messages(cls, messages: Generator[ToolInvokeMessage, N logger.exception(e) yield ToolInvokeMessage( type=ToolInvokeMessage.MessageType.TEXT, - message=f"Failed to download image: {message.message}, you can try to download it yourself.", + message=ToolInvokeMessage.TextMessage( + text=f"Failed to download image: {message.message}, you can try to download it yourself." + ), meta=message.meta.copy() if message.meta is not None else {}, save_as=message.save_as, ) elif message.type == ToolInvokeMessage.MessageType.BLOB: # get mime type and save blob to storage + assert message.meta + mimetype = message.meta.get('mime_type', 'octet/stream') # if message is str, encode it to bytes - if isinstance(message.message, str): - message.message = message.message.encode('utf-8') + + if not isinstance(message.message, ToolInvokeMessage.BlobMessage): + raise ValueError("unexpected message type") file = ToolFileManager.create_file_by_raw( user_id=user_id, tenant_id=tenant_id, conversation_id=conversation_id, - file_binary=message.message, + file_binary=message.message.blob, mimetype=mimetype ) - url = cls.get_tool_file_url(file.id, guess_extension(file.mimetype)) + extension = guess_extension(file.mimetype) or ".bin" + url = cls.get_tool_file_url(file.id, extension) # check if file is image if 'image' in mimetype: yield ToolInvokeMessage( type=ToolInvokeMessage.MessageType.IMAGE_LINK, - message=url, + message=ToolInvokeMessage.TextMessage(text=url), save_as=message.save_as, meta=message.meta.copy() if message.meta is not None else {}, ) else: yield ToolInvokeMessage( type=ToolInvokeMessage.MessageType.LINK, - message=url, + message=ToolInvokeMessage.TextMessage(text=url), save_as=message.save_as, meta=message.meta.copy() if message.meta is not None else {}, ) elif message.type == ToolInvokeMessage.MessageType.FILE_VAR: - file_var = message.meta.get('file_var') + assert message.meta + + file_var: FileVar | None = message.meta.get('file_var') if file_var: if file_var.transfer_method == FileTransferMethod.TOOL_FILE: + assert file_var.related_id and file_var.extension + url = cls.get_tool_file_url(file_var.related_id, file_var.extension) if file_var.type == FileType.IMAGE: yield ToolInvokeMessage( type=ToolInvokeMessage.MessageType.IMAGE_LINK, - message=url, + message=ToolInvokeMessage.TextMessage(text=url), save_as=message.save_as, meta=message.meta.copy() if message.meta is not None else {}, ) else: yield ToolInvokeMessage( type=ToolInvokeMessage.MessageType.LINK, - message=url, + message=ToolInvokeMessage.TextMessage(text=url), save_as=message.save_as, meta=message.meta.copy() if message.meta is not None else {}, ) diff --git a/api/core/workflow/nodes/tool/tool_node.py b/api/core/workflow/nodes/tool/tool_node.py index 52a4a216e0eaf5..0774fc4f3d12b2 100644 --- a/api/core/workflow/nodes/tool/tool_node.py +++ b/api/core/workflow/nodes/tool/tool_node.py @@ -1,4 +1,4 @@ -from collections.abc import Mapping, Sequence +from collections.abc import Generator, Mapping, Sequence from os import path from typing import Any, cast @@ -145,7 +145,7 @@ def _fetch_files(self, variable_pool: VariablePool) -> list[FileVar]: assert isinstance(variable, ArrayAnyVariable | ArrayAnySegment) return list(variable.value) if variable else [] - def _convert_tool_messages(self, messages: list[ToolInvokeMessage]): + def _convert_tool_messages(self, messages: Generator[ToolInvokeMessage, None, None]): """ Convert ToolInvokeMessages into tuple[plain_text, files] """ diff --git a/api/models/tools.py b/api/models/tools.py index 069dc5bad083c8..3ee246eeb33de5 100644 --- a/api/models/tools.py +++ b/api/models/tools.py @@ -1,6 +1,7 @@ import json from sqlalchemy import ForeignKey +from sqlalchemy.orm import DeclarativeBase, Mapped, mapped_column from core.tools.entities.common_entities import I18nObject from core.tools.entities.tool_bundle import ApiToolBundle @@ -277,7 +278,7 @@ class ToolConversationVariables(db.Model): def variables(self) -> dict: return json.loads(self.variables_str) -class ToolFile(db.Model): +class ToolFile(DeclarativeBase): """ store the file created by agent """ @@ -288,16 +289,17 @@ class ToolFile(db.Model): db.Index('tool_file_conversation_id_idx', 'conversation_id'), ) - id = db.Column(StringUUID, server_default=db.text('uuid_generate_v4()')) + id: Mapped[str] = mapped_column(StringUUID, default=db.text('uuid_generate_v4()')) # conversation user id - user_id = db.Column(StringUUID, nullable=False) + user_id: Mapped[str] = mapped_column(StringUUID) # tenant id - tenant_id = db.Column(StringUUID, nullable=False) + tenant_id: Mapped[StringUUID] = mapped_column(StringUUID) # conversation id - conversation_id = db.Column(StringUUID, nullable=True) + conversation_id: Mapped[StringUUID] = mapped_column(nullable=True) # file key - file_key = db.Column(db.String(255), nullable=False) + file_key: Mapped[str] = mapped_column(db.String(255), nullable=False) # mime type - mimetype = db.Column(db.String(255), nullable=False) + mimetype: Mapped[str] = mapped_column(db.String(255), nullable=False) # original url - original_url = db.Column(db.String(2048), nullable=True) \ No newline at end of file + original_url: Mapped[str] = mapped_column(db.String(2048), nullable=True) + \ No newline at end of file From 531ffaec4fd94817f73f4bd172b4abbd95d9211f Mon Sep 17 00:00:00 2001 From: Yeuoly Date: Thu, 29 Aug 2024 13:56:48 +0800 Subject: [PATCH 009/325] fix: tool node --- api/core/workflow/nodes/tool/tool_node.py | 45 ++++++++++++++++------- 1 file changed, 32 insertions(+), 13 deletions(-) diff --git a/api/core/workflow/nodes/tool/tool_node.py b/api/core/workflow/nodes/tool/tool_node.py index 0774fc4f3d12b2..47498f4f5fde1f 100644 --- a/api/core/workflow/nodes/tool/tool_node.py +++ b/api/core/workflow/nodes/tool/tool_node.py @@ -128,8 +128,10 @@ def _generate_parameters( else: tool_input = node_data.tool_parameters[parameter_name] if tool_input.type == 'variable': - # TODO: check if the variable exists in the variable pool - parameter_value = variable_pool.get(tool_input.value).value + parameter_value_segment = variable_pool.get(tool_input.value) + if not parameter_value_segment: + raise Exception("input variable dose not exists") + parameter_value = parameter_value_segment.value else: segment_group = parser.convert_template( template=str(tool_input.value), @@ -163,7 +165,7 @@ def _convert_tool_messages(self, messages: Generator[ToolInvokeMessage, None, No return plain_text, files, json - def _extract_tool_response_binary(self, tool_response: list[ToolInvokeMessage]) -> list[FileVar]: + def _extract_tool_response_binary(self, tool_response: Generator[ToolInvokeMessage, None, None]) -> list[FileVar]: """ Extract tool response binary """ @@ -172,7 +174,10 @@ def _extract_tool_response_binary(self, tool_response: list[ToolInvokeMessage]) for response in tool_response: if response.type == ToolInvokeMessage.MessageType.IMAGE_LINK or \ response.type == ToolInvokeMessage.MessageType.IMAGE: - url = response.message + assert isinstance(response.message, ToolInvokeMessage.TextMessage) + assert response.meta + + url = response.message.text ext = path.splitext(url)[1] mimetype = response.meta.get('mime_type', 'image/jpeg') filename = response.save_as or url.split('/')[-1] @@ -192,7 +197,10 @@ def _extract_tool_response_binary(self, tool_response: list[ToolInvokeMessage]) )) elif response.type == ToolInvokeMessage.MessageType.BLOB: # get tool file id - tool_file_id = response.message.split('/')[-1].split('.')[0] + assert isinstance(response.message, ToolInvokeMessage.TextMessage) + assert response.meta + + tool_file_id = response.message.text.split('/')[-1].split('.')[0] result.append(FileVar( tenant_id=self.tenant_id, type=FileType.IMAGE, @@ -207,18 +215,28 @@ def _extract_tool_response_binary(self, tool_response: list[ToolInvokeMessage]) return result - def _extract_tool_response_text(self, tool_response: list[ToolInvokeMessage]) -> str: + def _extract_tool_response_text(self, tool_response: Generator[ToolInvokeMessage]) -> str: """ Extract tool response text """ - return '\n'.join([ - f'{message.message}' if message.type == ToolInvokeMessage.MessageType.TEXT else - f'Link: {message.message}' if message.type == ToolInvokeMessage.MessageType.LINK else '' - for message in tool_response - ]) + result: list[str] = [] + for message in tool_response: + if message.type == ToolInvokeMessage.MessageType.TEXT: + assert isinstance(message.message, ToolInvokeMessage.TextMessage) + result.append(message.message.text) + elif message.type == ToolInvokeMessage.MessageType.LINK: + assert isinstance(message.message, ToolInvokeMessage.TextMessage) + result.append(f'Link: {message.message.text}') - def _extract_tool_response_json(self, tool_response: list[ToolInvokeMessage]) -> list[dict]: - return [message.message for message in tool_response if message.type == ToolInvokeMessage.MessageType.JSON] + return '\n'.join(result) + + def _extract_tool_response_json(self, tool_response: Generator[ToolInvokeMessage]) -> list[dict]: + result: list[dict] = [] + for message in tool_response: + if message.type == ToolInvokeMessage.MessageType.JSON: + assert isinstance(message, ToolInvokeMessage.JsonMessage) + result.append(message.json_object) + return result @classmethod def _extract_variable_selector_to_variable_mapping(cls, node_data: ToolNodeData) -> dict[str, list[str]]: @@ -231,6 +249,7 @@ def _extract_variable_selector_to_variable_mapping(cls, node_data: ToolNodeData) for parameter_name in node_data.tool_parameters: input = node_data.tool_parameters[parameter_name] if input.type == 'mixed': + assert isinstance(input.value, str) selectors = VariableTemplateParser(input.value).extract_variable_selectors() for selector in selectors: result[selector.variable] = selector.value_selector From c8b0160ea9afb9c93061b33ba74cbe269a09debc Mon Sep 17 00:00:00 2001 From: Yeuoly Date: Thu, 29 Aug 2024 14:06:10 +0800 Subject: [PATCH 010/325] fix: tool type --- api/core/tools/tool/tool.py | 32 +++++++++++++++++++------------- 1 file changed, 19 insertions(+), 13 deletions(-) diff --git a/api/core/tools/tool/tool.py b/api/core/tools/tool/tool.py index ae346759e2cb06..52513c13f9461a 100644 --- a/api/core/tools/tool/tool.py +++ b/api/core/tools/tool/tool.py @@ -4,7 +4,7 @@ from enum import Enum from typing import TYPE_CHECKING, Any, Optional, Union -from pydantic import BaseModel, ConfigDict, field_validator +from pydantic import BaseModel, ConfigDict, Field, field_validator from pydantic_core.core_schema import ValidationInfo from core.app.entities.app_invoke_entities import InvokeFrom @@ -27,8 +27,8 @@ class Tool(BaseModel, ABC): - identity: Optional[ToolIdentity] = None - parameters: Optional[list[ToolParameter]] = None + identity: ToolIdentity + parameters: list[ToolParameter] = Field(default_factory=list) description: Optional[ToolDescription] = None is_team_authorization: bool = False @@ -194,10 +194,8 @@ def list_default_image_variables(self) -> list[ToolRuntimeVariable]: return result - def invoke(self, user_id: str, tool_parameters: Mapping[str, Any]) -> Generator[ToolInvokeMessage]: - # update tool_parameters - # TODO: Fix type error. - if self.runtime.runtime_parameters: + def invoke(self, user_id: str, tool_parameters: dict[str, Any]) -> Generator[ToolInvokeMessage]: + if self.runtime and self.runtime.runtime_parameters: tool_parameters.update(self.runtime.runtime_parameters) # try parse tool parameters into the correct type @@ -210,7 +208,7 @@ def invoke(self, user_id: str, tool_parameters: Mapping[str, Any]) -> Generator[ return result - def _transform_tool_parameters_type(self, tool_parameters: Mapping[str, Any]) -> dict[str, Any]: + def _transform_tool_parameters_type(self, tool_parameters: dict[str, Any]) -> dict[str, Any]: """ Transform tool parameters type """ @@ -289,7 +287,7 @@ def create_image_message(self, image: str, save_as: str = '') -> ToolInvokeMessa :return: the image message """ return ToolInvokeMessage(type=ToolInvokeMessage.MessageType.IMAGE, - message=image, + message=ToolInvokeMessage.TextMessage(text=image), save_as=save_as) def create_file_var_message(self, file_var: "FileVar") -> ToolInvokeMessage: @@ -308,7 +306,7 @@ def create_link_message(self, link: str, save_as: str = '') -> ToolInvokeMessage :return: the link message """ return ToolInvokeMessage(type=ToolInvokeMessage.MessageType.LINK, - message=link, + message=ToolInvokeMessage.TextMessage(text=link), save_as=save_as) def create_text_message(self, text: str, save_as: str = '') -> ToolInvokeMessage: @@ -320,7 +318,7 @@ def create_text_message(self, text: str, save_as: str = '') -> ToolInvokeMessage """ return ToolInvokeMessage( type=ToolInvokeMessage.MessageType.TEXT, - message=text, + message=ToolInvokeMessage.TextMessage(text=text), save_as=save_as ) @@ -331,10 +329,18 @@ def create_blob_message(self, blob: bytes, meta: Optional[dict] = None, save_as: :param blob: the blob :return: the blob message """ - return ToolInvokeMessage(type=ToolInvokeMessage.MessageType.BLOB, message=blob, meta=meta, save_as=save_as) + return ToolInvokeMessage( + type=ToolInvokeMessage.MessageType.BLOB, + message=ToolInvokeMessage.BlobMessage(blob=blob), + meta=meta, + save_as=save_as + ) def create_json_message(self, object: dict) -> ToolInvokeMessage: """ create a json message """ - return ToolInvokeMessage(type=ToolInvokeMessage.MessageType.JSON, message=object) + return ToolInvokeMessage( + type=ToolInvokeMessage.MessageType.JSON, + message=ToolInvokeMessage.JsonMessage(json_object=object) + ) From 4a8d3c54ca54b3e792ea86be0a3d148a8388bc9e Mon Sep 17 00:00:00 2001 From: Yeuoly Date: Thu, 29 Aug 2024 14:09:47 +0800 Subject: [PATCH 011/325] fix: workflow as tool type --- api/core/app/apps/workflow/app_generator.py | 24 ++++++++++++++++++++- api/core/tools/tool/tool.py | 2 +- api/core/tools/tool/workflow_tool.py | 4 +++- 3 files changed, 27 insertions(+), 3 deletions(-) diff --git a/api/core/app/apps/workflow/app_generator.py b/api/core/app/apps/workflow/app_generator.py index df40aec154a856..c5bf35edb607f1 100644 --- a/api/core/app/apps/workflow/app_generator.py +++ b/api/core/app/apps/workflow/app_generator.py @@ -4,7 +4,7 @@ import threading import uuid from collections.abc import Generator -from typing import Union +from typing import Literal, Union, overload from flask import Flask, current_app from pydantic import ValidationError @@ -32,6 +32,28 @@ class WorkflowAppGenerator(BaseAppGenerator): + @overload + def generate( + self, app_model: App, + workflow: Workflow, + user: Union[Account, EndUser], + args: dict, + invoke_from: InvokeFrom, + stream: Literal[True] = True, + call_depth: int = 0, + ) -> Generator[dict, None, None]: ... + + @overload + def generate( + self, app_model: App, + workflow: Workflow, + user: Union[Account, EndUser], + args: dict, + invoke_from: InvokeFrom, + stream: Literal[False] = False, + call_depth: int = 0, + ) -> dict: ... + def generate( self, app_model: App, workflow: Workflow, diff --git a/api/core/tools/tool/tool.py b/api/core/tools/tool/tool.py index 52513c13f9461a..68db0d5b2eaf4b 100644 --- a/api/core/tools/tool/tool.py +++ b/api/core/tools/tool/tool.py @@ -1,5 +1,5 @@ from abc import ABC, abstractmethod -from collections.abc import Generator, Mapping +from collections.abc import Generator from copy import deepcopy from enum import Enum from typing import TYPE_CHECKING, Any, Optional, Union diff --git a/api/core/tools/tool/workflow_tool.py b/api/core/tools/tool/workflow_tool.py index 4724c992d26e05..7a669a4fe4a465 100644 --- a/api/core/tools/tool/workflow_tool.py +++ b/api/core/tools/tool/workflow_tool.py @@ -48,6 +48,8 @@ def _invoke( from core.app.apps.workflow.app_generator import WorkflowAppGenerator generator = WorkflowAppGenerator() + assert self.runtime and self.runtime.invoke_from + result = generator.generate( app_model=app, workflow=workflow, @@ -154,7 +156,7 @@ def _transform_args(self, tool_parameters: dict) -> tuple[dict, list[dict]]: try: file_var_list = [FileVar(**f) for f in file] for file_var in file_var_list: - file_dict = { + file_dict: dict[str, Any] = { 'transfer_method': file_var.transfer_method.value, 'type': file_var.type.value, } From f47712beaedd264042d5ee06f1998c0ad0748420 Mon Sep 17 00:00:00 2001 From: Yeuoly Date: Thu, 29 Aug 2024 14:18:00 +0800 Subject: [PATCH 012/325] feat: add type annatation --- .../app/apps/advanced_chat/app_generator.py | 22 ++++++++++++- api/core/app/apps/agent_chat/app_generator.py | 20 ++++++++++- api/core/app/apps/chat/app_generator.py | 20 ++++++++++- api/core/app/apps/completion/app_generator.py | 33 +++++++++++++++---- 4 files changed, 85 insertions(+), 10 deletions(-) diff --git a/api/core/app/apps/advanced_chat/app_generator.py b/api/core/app/apps/advanced_chat/app_generator.py index 5a1e5973cd46ea..80d2694fc9f482 100644 --- a/api/core/app/apps/advanced_chat/app_generator.py +++ b/api/core/app/apps/advanced_chat/app_generator.py @@ -4,7 +4,7 @@ import threading import uuid from collections.abc import Generator -from typing import Union +from typing import Any, Literal, Union, overload from flask import Flask, current_app from pydantic import ValidationError @@ -39,6 +39,26 @@ class AdvancedChatAppGenerator(MessageBasedAppGenerator): + @overload + def generate( + self, app_model: App, + workflow: Workflow, + user: Union[Account, EndUser], + args: dict, + invoke_from: InvokeFrom, + stream: Literal[True] = True, + ) -> Generator[str, None, None]: ... + + @overload + def generate( + self, app_model: App, + workflow: Workflow, + user: Union[Account, EndUser], + args: dict, + invoke_from: InvokeFrom, + stream: Literal[False] = False, + ) -> dict: ... + def generate( self, app_model: App, workflow: Workflow, diff --git a/api/core/app/apps/agent_chat/app_generator.py b/api/core/app/apps/agent_chat/app_generator.py index 53780bdfb003b2..cf6d8292567afd 100644 --- a/api/core/app/apps/agent_chat/app_generator.py +++ b/api/core/app/apps/agent_chat/app_generator.py @@ -3,7 +3,7 @@ import threading import uuid from collections.abc import Generator -from typing import Any, Union +from typing import Any, Literal, Union, overload from flask import Flask, current_app from pydantic import ValidationError @@ -28,6 +28,24 @@ class AgentChatAppGenerator(MessageBasedAppGenerator): + @overload + def generate( + self, app_model: App, + user: Union[Account, EndUser], + args: Any, + invoke_from: InvokeFrom, + stream: Literal[True] = True, + ) -> Generator[dict, None, None]: ... + + @overload + def generate( + self, app_model: App, + user: Union[Account, EndUser], + args: Any, + invoke_from: InvokeFrom, + stream: Literal[False] = False, + ) -> dict: ... + def generate(self, app_model: App, user: Union[Account, EndUser], args: Any, diff --git a/api/core/app/apps/chat/app_generator.py b/api/core/app/apps/chat/app_generator.py index 5b896e28455340..bf4e6083b6bd3a 100644 --- a/api/core/app/apps/chat/app_generator.py +++ b/api/core/app/apps/chat/app_generator.py @@ -3,7 +3,7 @@ import threading import uuid from collections.abc import Generator -from typing import Any, Union +from typing import Any, Literal, Union, overload from flask import Flask, current_app from pydantic import ValidationError @@ -28,6 +28,24 @@ class ChatAppGenerator(MessageBasedAppGenerator): + @overload + def generate( + self, app_model: App, + user: Union[Account, EndUser], + args: Any, + invoke_from: InvokeFrom, + stream: Literal[True] = True, + ) -> Generator[dict, None, None]: ... + + @overload + def generate( + self, app_model: App, + user: Union[Account, EndUser], + args: Any, + invoke_from: InvokeFrom, + stream: Literal[False] = False, + ) -> dict: ... + def generate( self, app_model: App, user: Union[Account, EndUser], diff --git a/api/core/app/apps/completion/app_generator.py b/api/core/app/apps/completion/app_generator.py index c4e1caf65a9679..b14568480f1d6f 100644 --- a/api/core/app/apps/completion/app_generator.py +++ b/api/core/app/apps/completion/app_generator.py @@ -3,7 +3,7 @@ import threading import uuid from collections.abc import Generator -from typing import Any, Union +from typing import Any, Literal, Union, overload from flask import Flask, current_app from pydantic import ValidationError @@ -30,12 +30,31 @@ class CompletionAppGenerator(MessageBasedAppGenerator): - def generate(self, app_model: App, - user: Union[Account, EndUser], - args: Any, - invoke_from: InvokeFrom, - stream: bool = True) \ - -> Union[dict, Generator[dict, None, None]]: + @overload + def generate( + self, app_model: App, + user: Union[Account, EndUser], + args: Any, + invoke_from: InvokeFrom, + stream: Literal[True] = True, + ) -> Generator[dict, None, None]: ... + + @overload + def generate( + self, app_model: App, + user: Union[Account, EndUser], + args: Any, + invoke_from: InvokeFrom, + stream: Literal[False] = False, + ) -> dict: ... + + def generate( + self, app_model: App, + user: Union[Account, EndUser], + args: Any, + invoke_from: InvokeFrom, + stream: bool = True + ) -> Union[dict, Generator[dict, None, None]]: """ Generate App response. From 6ce02b07d3111fe63ebb0f7fcf1bc2392c3333c3 Mon Sep 17 00:00:00 2001 From: Yeuoly Date: Thu, 29 Aug 2024 14:23:19 +0800 Subject: [PATCH 013/325] feat: add type annatation --- api/core/app/apps/advanced_chat/app_generator.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/api/core/app/apps/advanced_chat/app_generator.py b/api/core/app/apps/advanced_chat/app_generator.py index 80d2694fc9f482..e7c9ebe09749de 100644 --- a/api/core/app/apps/advanced_chat/app_generator.py +++ b/api/core/app/apps/advanced_chat/app_generator.py @@ -4,7 +4,7 @@ import threading import uuid from collections.abc import Generator -from typing import Any, Literal, Union, overload +from typing import Literal, Union, overload from flask import Flask, current_app from pydantic import ValidationError From ec711d094d002bdc3068f73002899b8a2422193c Mon Sep 17 00:00:00 2001 From: Yeuoly Date: Thu, 29 Aug 2024 19:49:57 +0800 Subject: [PATCH 014/325] refactor: enforce return object in app generator --- .../app/apps/advanced_chat/app_generator.py | 18 +++++-- .../generate_response_converter.py | 9 ++-- api/core/app/apps/agent_chat/app_generator.py | 13 ++++- .../agent_chat/generate_response_converter.py | 9 ++-- .../base_app_generate_response_converter.py | 18 ++----- api/core/app/apps/base_app_generator.py | 22 +++++++- api/core/app/apps/base_app_queue_manager.py | 4 +- api/core/app/apps/chat/app_generator.py | 13 ++++- .../apps/chat/generate_response_converter.py | 9 ++-- api/core/app/apps/completion/app_generator.py | 9 ++++ .../completion/generate_response_converter.py | 9 ++-- api/core/app/apps/workflow/app_generator.py | 19 +++++-- .../workflow/generate_response_converter.py | 9 ++-- api/services/app_generate_service.py | 50 ++++++++++++------- 14 files changed, 140 insertions(+), 71 deletions(-) diff --git a/api/core/app/apps/advanced_chat/app_generator.py b/api/core/app/apps/advanced_chat/app_generator.py index e7c9ebe09749de..6d654c2dc6aebb 100644 --- a/api/core/app/apps/advanced_chat/app_generator.py +++ b/api/core/app/apps/advanced_chat/app_generator.py @@ -4,7 +4,7 @@ import threading import uuid from collections.abc import Generator -from typing import Literal, Union, overload +from typing import Any, Literal, Union, overload from flask import Flask, current_app from pydantic import ValidationError @@ -47,7 +47,7 @@ def generate( args: dict, invoke_from: InvokeFrom, stream: Literal[True] = True, - ) -> Generator[str, None, None]: ... + ) -> Generator[dict | str, None, None]: ... @overload def generate( @@ -59,6 +59,16 @@ def generate( stream: Literal[False] = False, ) -> dict: ... + @overload + def generate( + self, app_model: App, + workflow: Workflow, + user: Union[Account, EndUser], + args: dict, + invoke_from: InvokeFrom, + stream: bool = True, + ) -> Union[dict[str, Any], Generator[dict | str, Any, None]]: ... + def generate( self, app_model: App, workflow: Workflow, @@ -152,7 +162,7 @@ def generate( def single_iteration_generate(self, app_model: App, workflow: Workflow, node_id: str, - user: Account, + user: Account | EndUser, args: dict, stream: bool = True): """ @@ -325,7 +335,7 @@ def _generate(self, *, user=user, stream=stream, ) - + return AdvancedChatAppGenerateResponseConverter.convert( response=response, invoke_from=invoke_from diff --git a/api/core/app/apps/advanced_chat/generate_response_converter.py b/api/core/app/apps/advanced_chat/generate_response_converter.py index ef579827b47c7e..2ddbd816e25cc0 100644 --- a/api/core/app/apps/advanced_chat/generate_response_converter.py +++ b/api/core/app/apps/advanced_chat/generate_response_converter.py @@ -1,4 +1,3 @@ -import json from collections.abc import Generator from typing import Any, cast @@ -56,7 +55,7 @@ def convert_blocking_simple_response(cls, blocking_response: AppBlockingResponse return response @classmethod - def convert_stream_full_response(cls, stream_response: Generator[AppStreamResponse, None, None]) -> Generator[str, Any, None]: + def convert_stream_full_response(cls, stream_response: Generator[AppStreamResponse, None, None]) -> Generator[dict | str, Any, None]: """ Convert stream full response. :param stream_response: stream response @@ -82,10 +81,10 @@ def convert_stream_full_response(cls, stream_response: Generator[AppStreamRespon response_chunk.update(data) else: response_chunk.update(sub_stream_response.to_dict()) - yield json.dumps(response_chunk) + yield response_chunk @classmethod - def convert_stream_simple_response(cls, stream_response: Generator[AppStreamResponse, None, None]) -> Generator[str, Any, None]: + def convert_stream_simple_response(cls, stream_response: Generator[AppStreamResponse, None, None]) -> Generator[dict | str, Any, None]: """ Convert stream simple response. :param stream_response: stream response @@ -119,4 +118,4 @@ def convert_stream_simple_response(cls, stream_response: Generator[AppStreamResp else: response_chunk.update(sub_stream_response.to_dict()) - yield json.dumps(response_chunk) + yield response_chunk diff --git a/api/core/app/apps/agent_chat/app_generator.py b/api/core/app/apps/agent_chat/app_generator.py index 29c7447290cd08..726e7ca65c542e 100644 --- a/api/core/app/apps/agent_chat/app_generator.py +++ b/api/core/app/apps/agent_chat/app_generator.py @@ -35,7 +35,7 @@ def generate( args: dict, invoke_from: InvokeFrom, stream: Literal[True] = True, - ) -> Generator[str, None, None]: ... + ) -> Generator[dict | str, None, None]: ... @overload def generate( @@ -46,12 +46,21 @@ def generate( stream: Literal[False] = False, ) -> dict: ... + @overload + def generate( + self, app_model: App, + user: Union[Account, EndUser], + args: dict, + invoke_from: InvokeFrom, + stream: bool = False, + ) -> dict | Generator[dict | str, None, None]: ... + def generate(self, app_model: App, user: Union[Account, EndUser], args: Any, invoke_from: InvokeFrom, stream: bool = True) \ - -> Union[dict, Generator[str, None, None]]: + -> Union[dict, Generator[dict | str, None, None]]: """ Generate App response. diff --git a/api/core/app/apps/agent_chat/generate_response_converter.py b/api/core/app/apps/agent_chat/generate_response_converter.py index 118d82c495f1fe..02aec27e393588 100644 --- a/api/core/app/apps/agent_chat/generate_response_converter.py +++ b/api/core/app/apps/agent_chat/generate_response_converter.py @@ -1,4 +1,3 @@ -import json from collections.abc import Generator from typing import cast @@ -52,7 +51,7 @@ def convert_blocking_simple_response(cls, blocking_response: ChatbotAppBlockingR @classmethod def convert_stream_full_response(cls, stream_response: Generator[ChatbotAppStreamResponse, None, None]) \ - -> Generator[str, None, None]: + -> Generator[dict | str, None, None]: """ Convert stream full response. :param stream_response: stream response @@ -78,11 +77,11 @@ def convert_stream_full_response(cls, stream_response: Generator[ChatbotAppStrea response_chunk.update(data) else: response_chunk.update(sub_stream_response.to_dict()) - yield json.dumps(response_chunk) + yield response_chunk @classmethod def convert_stream_simple_response(cls, stream_response: Generator[ChatbotAppStreamResponse, None, None]) \ - -> Generator[str, None, None]: + -> Generator[dict | str, None, None]: """ Convert stream simple response. :param stream_response: stream response @@ -114,4 +113,4 @@ def convert_stream_simple_response(cls, stream_response: Generator[ChatbotAppStr else: response_chunk.update(sub_stream_response.to_dict()) - yield json.dumps(response_chunk) + yield response_chunk diff --git a/api/core/app/apps/base_app_generate_response_converter.py b/api/core/app/apps/base_app_generate_response_converter.py index 1165314a7f2bd8..bd0d08cb7bf7fa 100644 --- a/api/core/app/apps/base_app_generate_response_converter.py +++ b/api/core/app/apps/base_app_generate_response_converter.py @@ -21,24 +21,16 @@ def convert(cls, response: Union[ if isinstance(response, AppBlockingResponse): return cls.convert_blocking_full_response(response) else: - def _generate_full_response() -> Generator[str, Any, None]: - for chunk in cls.convert_stream_full_response(response): - if chunk == 'ping': - yield f'event: {chunk}\n\n' - else: - yield f'data: {chunk}\n\n' + def _generate_full_response() -> Generator[dict | str, Any, None]: + yield from cls.convert_stream_simple_response(response) return _generate_full_response() else: if isinstance(response, AppBlockingResponse): return cls.convert_blocking_simple_response(response) else: - def _generate_simple_response() -> Generator[str, Any, None]: - for chunk in cls.convert_stream_simple_response(response): - if chunk == 'ping': - yield f'event: {chunk}\n\n' - else: - yield f'data: {chunk}\n\n' + def _generate_simple_response() -> Generator[dict | str, Any, None]: + yield from cls.convert_stream_simple_response(response) return _generate_simple_response() @@ -55,7 +47,7 @@ def convert_blocking_simple_response(cls, blocking_response: AppBlockingResponse @classmethod @abstractmethod def convert_stream_full_response(cls, stream_response: Generator[AppStreamResponse, None, None]) \ - -> Generator[str, None, None]: + -> Generator[dict | str, None, None]: raise NotImplementedError @classmethod diff --git a/api/core/app/apps/base_app_generator.py b/api/core/app/apps/base_app_generator.py index 9e331dff4d64e2..7727519aef5a08 100644 --- a/api/core/app/apps/base_app_generator.py +++ b/api/core/app/apps/base_app_generator.py @@ -1,5 +1,6 @@ -from collections.abc import Mapping -from typing import Any, Optional +from collections.abc import Generator, Mapping +import json +from typing import Any, Optional, Union from core.app.app_config.entities import AppConfig, VariableEntity, VariableEntityType @@ -54,3 +55,20 @@ def _sanitize_value(self, value: Any) -> Any: if isinstance(value, str): return value.replace('\x00', '') return value + + @classmethod + def convert_to_event_stream(cls, generator: Union[dict, Generator[dict| str, None, None]]): + """ + Convert messages into event stream + """ + if isinstance(generator, dict): + return generator + else: + def gen(): + for message in generator: + if isinstance(message, dict): + yield f'data: {json.dumps(message)}\n\n' + else: + yield f'event: {message}\n\n' + + return gen() \ No newline at end of file diff --git a/api/core/app/apps/base_app_queue_manager.py b/api/core/app/apps/base_app_queue_manager.py index f929a979f129de..b45f57e9b6372d 100644 --- a/api/core/app/apps/base_app_queue_manager.py +++ b/api/core/app/apps/base_app_queue_manager.py @@ -3,7 +3,7 @@ from abc import abstractmethod from collections.abc import Generator from enum import Enum -from typing import Any +from typing import Any, Optional from sqlalchemy.orm import DeclarativeMeta @@ -118,7 +118,7 @@ def set_stop_flag(cls, task_id: str, invoke_from: InvokeFrom, user_id: str) -> N Set task stop flag :return: """ - result = redis_client.get(cls._generate_task_belong_cache_key(task_id)) + result: Optional[Any] = redis_client.get(cls._generate_task_belong_cache_key(task_id)) if result is None: return diff --git a/api/core/app/apps/chat/app_generator.py b/api/core/app/apps/chat/app_generator.py index ab15928b744b61..b784f42e7e319f 100644 --- a/api/core/app/apps/chat/app_generator.py +++ b/api/core/app/apps/chat/app_generator.py @@ -35,7 +35,7 @@ def generate( args: Any, invoke_from: InvokeFrom, stream: Literal[True] = True, - ) -> Generator[str, None, None]: ... + ) -> Generator[dict | str, None, None]: ... @overload def generate( @@ -46,13 +46,22 @@ def generate( stream: Literal[False] = False, ) -> dict: ... + @overload + def generate( + self, app_model: App, + user: Union[Account, EndUser], + args: Any, + invoke_from: InvokeFrom, + stream: bool = False, + ) -> Union[dict, Generator[dict | str, None, None]]: ... + def generate( self, app_model: App, user: Union[Account, EndUser], args: Any, invoke_from: InvokeFrom, stream: bool = True, - ) -> Union[dict, Generator[str, None, None]]: + ) -> Union[dict, Generator[dict | str, None, None]]: """ Generate App response. diff --git a/api/core/app/apps/chat/generate_response_converter.py b/api/core/app/apps/chat/generate_response_converter.py index 625e14c9c39712..0ae9926bb8b10a 100644 --- a/api/core/app/apps/chat/generate_response_converter.py +++ b/api/core/app/apps/chat/generate_response_converter.py @@ -1,4 +1,3 @@ -import json from collections.abc import Generator from typing import cast @@ -52,7 +51,7 @@ def convert_blocking_simple_response(cls, blocking_response: ChatbotAppBlockingR @classmethod def convert_stream_full_response(cls, stream_response: Generator[ChatbotAppStreamResponse, None, None]) \ - -> Generator[str, None, None]: + -> Generator[dict | str, None, None]: """ Convert stream full response. :param stream_response: stream response @@ -78,11 +77,11 @@ def convert_stream_full_response(cls, stream_response: Generator[ChatbotAppStrea response_chunk.update(data) else: response_chunk.update(sub_stream_response.to_dict()) - yield json.dumps(response_chunk) + yield response_chunk @classmethod def convert_stream_simple_response(cls, stream_response: Generator[ChatbotAppStreamResponse, None, None]) \ - -> Generator[str, None, None]: + -> Generator[dict | str, None, None]: """ Convert stream simple response. :param stream_response: stream response @@ -114,4 +113,4 @@ def convert_stream_simple_response(cls, stream_response: Generator[ChatbotAppStr else: response_chunk.update(sub_stream_response.to_dict()) - yield json.dumps(response_chunk) + yield response_chunk diff --git a/api/core/app/apps/completion/app_generator.py b/api/core/app/apps/completion/app_generator.py index c0b13b40fdb0d6..3ce4d3ccaa0630 100644 --- a/api/core/app/apps/completion/app_generator.py +++ b/api/core/app/apps/completion/app_generator.py @@ -48,6 +48,15 @@ def generate( stream: Literal[False] = False, ) -> dict: ... + @overload + def generate( + self, app_model: App, + user: Union[Account, EndUser], + args: dict, + invoke_from: InvokeFrom, + stream: bool = False, + ) -> dict | Generator[str, None, None]: ... + def generate(self, app_model: App, user: Union[Account, EndUser], args: Any, diff --git a/api/core/app/apps/completion/generate_response_converter.py b/api/core/app/apps/completion/generate_response_converter.py index 14db74dbd04b95..61bb03952fbec4 100644 --- a/api/core/app/apps/completion/generate_response_converter.py +++ b/api/core/app/apps/completion/generate_response_converter.py @@ -1,4 +1,3 @@ -import json from collections.abc import Generator from typing import cast @@ -51,7 +50,7 @@ def convert_blocking_simple_response(cls, blocking_response: CompletionAppBlocki @classmethod def convert_stream_full_response(cls, stream_response: Generator[CompletionAppStreamResponse, None, None]) \ - -> Generator[str, None, None]: + -> Generator[dict | str, None, None]: """ Convert stream full response. :param stream_response: stream response @@ -76,11 +75,11 @@ def convert_stream_full_response(cls, stream_response: Generator[CompletionAppSt response_chunk.update(data) else: response_chunk.update(sub_stream_response.to_dict()) - yield json.dumps(response_chunk) + yield response_chunk @classmethod def convert_stream_simple_response(cls, stream_response: Generator[CompletionAppStreamResponse, None, None]) \ - -> Generator[str, None, None]: + -> Generator[dict | str, None, None]: """ Convert stream simple response. :param stream_response: stream response @@ -111,4 +110,4 @@ def convert_stream_simple_response(cls, stream_response: Generator[CompletionApp else: response_chunk.update(sub_stream_response.to_dict()) - yield json.dumps(response_chunk) + yield response_chunk diff --git a/api/core/app/apps/workflow/app_generator.py b/api/core/app/apps/workflow/app_generator.py index 26bb6c0f4f70b8..f1e79ff3e36d37 100644 --- a/api/core/app/apps/workflow/app_generator.py +++ b/api/core/app/apps/workflow/app_generator.py @@ -40,7 +40,8 @@ def generate( args: dict, invoke_from: InvokeFrom, stream: Literal[True] = True, - ) -> Generator[str, None, None]: ... + call_depth: int = 0, + ) -> Generator[dict | str, None, None]: ... @overload def generate( @@ -50,8 +51,20 @@ def generate( args: dict, invoke_from: InvokeFrom, stream: Literal[False] = False, + call_depth: int = 0, ) -> dict: ... + @overload + def generate( + self, app_model: App, + workflow: Workflow, + user: Union[Account, EndUser], + args: dict, + invoke_from: InvokeFrom, + stream: bool = False, + call_depth: int = 0, + ) -> dict | Generator[dict | str, None, None]: ... + def generate( self, app_model: App, workflow: Workflow, @@ -127,7 +140,7 @@ def _generate( application_generate_entity: WorkflowAppGenerateEntity, invoke_from: InvokeFrom, stream: bool = True, - ) -> Union[dict, Generator[str, None, None]]: + ) -> Union[dict, Generator[str | dict, None, None]]: """ Generate App response. @@ -173,7 +186,7 @@ def _generate( def single_iteration_generate(self, app_model: App, workflow: Workflow, node_id: str, - user: Account, + user: Account | EndUser, args: dict, stream: bool = True): """ diff --git a/api/core/app/apps/workflow/generate_response_converter.py b/api/core/app/apps/workflow/generate_response_converter.py index 88bde58ba049ba..48f20d8dc1db90 100644 --- a/api/core/app/apps/workflow/generate_response_converter.py +++ b/api/core/app/apps/workflow/generate_response_converter.py @@ -1,4 +1,3 @@ -import json from collections.abc import Generator from typing import cast @@ -36,7 +35,7 @@ def convert_blocking_simple_response(cls, blocking_response: WorkflowAppBlocking @classmethod def convert_stream_full_response(cls, stream_response: Generator[WorkflowAppStreamResponse, None, None]) \ - -> Generator[str, None, None]: + -> Generator[dict | str, None, None]: """ Convert stream full response. :param stream_response: stream response @@ -60,11 +59,11 @@ def convert_stream_full_response(cls, stream_response: Generator[WorkflowAppStre response_chunk.update(data) else: response_chunk.update(sub_stream_response.to_dict()) - yield json.dumps(response_chunk) + yield response_chunk @classmethod def convert_stream_simple_response(cls, stream_response: Generator[WorkflowAppStreamResponse, None, None]) \ - -> Generator[str, None, None]: + -> Generator[dict | str, None, None]: """ Convert stream simple response. :param stream_response: stream response @@ -90,4 +89,4 @@ def convert_stream_simple_response(cls, stream_response: Generator[WorkflowAppSt response_chunk.update(sub_stream_response.to_ignore_detail_dict()) else: response_chunk.update(sub_stream_response.to_dict()) - yield json.dumps(response_chunk) + yield response_chunk diff --git a/api/services/app_generate_service.py b/api/services/app_generate_service.py index 747505977fe282..fd7eeea48fa8be 100644 --- a/api/services/app_generate_service.py +++ b/api/services/app_generate_service.py @@ -42,48 +42,58 @@ def generate( request_id = rate_limit.enter(request_id) if app_model.mode == AppMode.COMPLETION.value: return rate_limit.generate( - CompletionAppGenerator().generate( - app_model=app_model, user=user, args=args, invoke_from=invoke_from, stream=streaming + CompletionAppGenerator.convert_to_event_stream( + CompletionAppGenerator().generate( + app_model=app_model, user=user, args=args, invoke_from=invoke_from, stream=streaming + ), ), request_id, ) elif app_model.mode == AppMode.AGENT_CHAT.value or app_model.is_agent: return rate_limit.generate( - AgentChatAppGenerator().generate( - app_model=app_model, user=user, args=args, invoke_from=invoke_from, stream=streaming + AgentChatAppGenerator.convert_to_event_stream( + AgentChatAppGenerator().generate( + app_model=app_model, user=user, args=args, invoke_from=invoke_from, stream=streaming + ), ), request_id, ) elif app_model.mode == AppMode.CHAT.value: return rate_limit.generate( - ChatAppGenerator().generate( - app_model=app_model, user=user, args=args, invoke_from=invoke_from, stream=streaming + ChatAppGenerator.convert_to_event_stream( + ChatAppGenerator().generate( + app_model=app_model, user=user, args=args, invoke_from=invoke_from, stream=streaming + ), ), request_id, ) elif app_model.mode == AppMode.ADVANCED_CHAT.value: workflow = cls._get_workflow(app_model, invoke_from) return rate_limit.generate( - AdvancedChatAppGenerator().generate( + AdvancedChatAppGenerator.convert_to_event_stream( + AdvancedChatAppGenerator().generate( app_model=app_model, workflow=workflow, user=user, args=args, invoke_from=invoke_from, stream=streaming, + ), ), request_id, ) elif app_model.mode == AppMode.WORKFLOW.value: workflow = cls._get_workflow(app_model, invoke_from) return rate_limit.generate( - WorkflowAppGenerator().generate( - app_model=app_model, - workflow=workflow, - user=user, - args=args, - invoke_from=invoke_from, - stream=streaming, + WorkflowAppGenerator.convert_to_event_stream( + WorkflowAppGenerator().generate( + app_model=app_model, + workflow=workflow, + user=user, + args=args, + invoke_from=invoke_from, + stream=streaming, + ), ), request_id, ) @@ -108,13 +118,17 @@ def generate_single_iteration( ): if app_model.mode == AppMode.ADVANCED_CHAT.value: workflow = cls._get_workflow(app_model, InvokeFrom.DEBUGGER) - return AdvancedChatAppGenerator().single_iteration_generate( - app_model=app_model, workflow=workflow, node_id=node_id, user=user, args=args, stream=streaming + return AdvancedChatAppGenerator.convert_to_event_stream( + AdvancedChatAppGenerator().single_iteration_generate( + app_model=app_model, workflow=workflow, node_id=node_id, user=user, args=args, stream=streaming + ) ) elif app_model.mode == AppMode.WORKFLOW.value: workflow = cls._get_workflow(app_model, InvokeFrom.DEBUGGER) - return WorkflowAppGenerator().single_iteration_generate( - app_model=app_model, workflow=workflow, node_id=node_id, user=user, args=args, stream=streaming + return AdvancedChatAppGenerator.convert_to_event_stream( + WorkflowAppGenerator().single_iteration_generate( + app_model=app_model, workflow=workflow, node_id=node_id, user=user, args=args, stream=streaming + ) ) else: raise ValueError(f"Invalid app mode {app_model.mode}") From 113ff27d07d6e35393bcb641f58bcb1acfafc51f Mon Sep 17 00:00:00 2001 From: Yeuoly Date: Thu, 29 Aug 2024 20:06:14 +0800 Subject: [PATCH 015/325] fix: types --- api/controllers/console/app/workflow.py | 43 ++++++++++++++++++++-- api/controllers/console/app/wraps.py | 2 +- api/controllers/inner_api/plugin/plugin.py | 4 +- api/core/app/apps/base_app_generator.py | 2 +- 4 files changed, 44 insertions(+), 7 deletions(-) diff --git a/api/controllers/console/app/workflow.py b/api/controllers/console/app/workflow.py index e44820f6345c48..20ec3ef021a507 100644 --- a/api/controllers/console/app/workflow.py +++ b/api/controllers/console/app/workflow.py @@ -20,6 +20,7 @@ from libs import helper from libs.helper import TimestampField, uuid_value from libs.login import current_user, login_required +from models.account import Account from models.model import App, AppMode from services.app_dsl_service import AppDslService from services.app_generate_service import AppGenerateService @@ -97,6 +98,9 @@ def post(self, app_model: App): else: abort(415) + if not isinstance(current_user, Account): + raise Forbidden() + workflow_service = WorkflowService() try: @@ -136,6 +140,9 @@ def post(self, app_model: App): # The role of the current user in the ta table must be admin, owner, or editor if not current_user.is_editor: raise Forbidden() + + if not isinstance(current_user, Account): + raise Forbidden() parser = reqparse.RequestParser() parser.add_argument("data", type=str, required=True, nullable=False, location="json") @@ -160,6 +167,9 @@ def post(self, app_model: App): # The role of the current user in the ta table must be admin, owner, or editor if not current_user.is_editor: raise Forbidden() + + if not isinstance(current_user, Account): + raise Forbidden() parser = reqparse.RequestParser() parser.add_argument("inputs", type=dict, location="json") @@ -197,6 +207,9 @@ def post(self, app_model: App, node_id: str): # The role of the current user in the ta table must be admin, owner, or editor if not current_user.is_editor: raise Forbidden() + + if not isinstance(current_user, Account): + raise Forbidden() parser = reqparse.RequestParser() parser.add_argument("inputs", type=dict, location="json") @@ -231,6 +244,9 @@ def post(self, app_model: App, node_id: str): # The role of the current user in the ta table must be admin, owner, or editor if not current_user.is_editor: raise Forbidden() + + if not isinstance(current_user, Account): + raise Forbidden() parser = reqparse.RequestParser() parser.add_argument("inputs", type=dict, location="json") @@ -265,6 +281,9 @@ def post(self, app_model: App): # The role of the current user in the ta table must be admin, owner, or editor if not current_user.is_editor: raise Forbidden() + + if not isinstance(current_user, Account): + raise Forbidden() parser = reqparse.RequestParser() parser.add_argument("inputs", type=dict, required=True, nullable=False, location="json") @@ -315,14 +334,21 @@ def post(self, app_model: App, node_id: str): # The role of the current user in the ta table must be admin, owner, or editor if not current_user.is_editor: raise Forbidden() + + if not isinstance(current_user, Account): + raise Forbidden() parser = reqparse.RequestParser() parser.add_argument("inputs", type=dict, required=True, nullable=False, location="json") args = parser.parse_args() + inputs = args.get("inputs") + if inputs == None: + raise ValueError("missing inputs") + workflow_service = WorkflowService() workflow_node_execution = workflow_service.run_draft_workflow_node( - app_model=app_model, node_id=node_id, user_inputs=args.get("inputs"), account=current_user + app_model=app_model, node_id=node_id, user_inputs=inputs, account=current_user ) return workflow_node_execution @@ -360,6 +386,9 @@ def post(self, app_model: App): # The role of the current user in the ta table must be admin, owner, or editor if not current_user.is_editor: raise Forbidden() + + if not isinstance(current_user, Account): + raise Forbidden() workflow_service = WorkflowService() workflow = workflow_service.publish_workflow(app_model=app_model, account=current_user) @@ -397,15 +426,20 @@ def get(self, app_model: App, block_type: str): # The role of the current user in the ta table must be admin, owner, or editor if not current_user.is_editor: raise Forbidden() + + if not isinstance(current_user, Account): + raise Forbidden() parser = reqparse.RequestParser() parser.add_argument("q", type=str, location="args") args = parser.parse_args() + q = args.get("q") + filters = None - if args.get("q"): + if q: try: - filters = json.loads(args.get("q")) + filters = json.loads(q) except json.JSONDecodeError: raise ValueError("Invalid filters") @@ -428,6 +462,9 @@ def post(self, app_model: App): # The role of the current user in the ta table must be admin, owner, or editor if not current_user.is_editor: raise Forbidden() + + if not isinstance(current_user, Account): + raise Forbidden() if request.data: parser = reqparse.RequestParser() diff --git a/api/controllers/console/app/wraps.py b/api/controllers/console/app/wraps.py index 5e0a4bc814633a..8a743d6be9f7da 100644 --- a/api/controllers/console/app/wraps.py +++ b/api/controllers/console/app/wraps.py @@ -8,7 +8,7 @@ from models.model import App, AppMode -def get_app_model(view: Optional[Callable] = None, *, mode: Union[AppMode, list[AppMode]] = None): +def get_app_model(view: Optional[Callable] = None, *, mode: Union[AppMode, list[AppMode], None] = None): def decorator(view_func): @wraps(view_func) def decorated_view(*args, **kwargs): diff --git a/api/controllers/inner_api/plugin/plugin.py b/api/controllers/inner_api/plugin/plugin.py index 67a51fdb2ecd56..d9b58e1e932657 100644 --- a/api/controllers/inner_api/plugin/plugin.py +++ b/api/controllers/inner_api/plugin/plugin.py @@ -7,7 +7,7 @@ from controllers.inner_api import api from controllers.inner_api.plugin.wraps import get_tenant, plugin_data from controllers.inner_api.wraps import plugin_inner_api_only -from core.plugin.backwards_invocation.model import PluginBackwardsInvocation +from core.plugin.backwards_invocation.model import PluginModelBackwardsInvocation from core.plugin.entities.request import ( RequestInvokeLLM, RequestInvokeModeration, @@ -29,7 +29,7 @@ class PluginInvokeLLMApi(Resource): @plugin_data(payload_type=RequestInvokeLLM) def post(self, user_id: str, tenant_model: Tenant, payload: RequestInvokeLLM): def generator(): - response = PluginBackwardsInvocation.invoke_llm(user_id, tenant_model, payload) + response = PluginModelBackwardsInvocation.invoke_llm(user_id, tenant_model, payload) if isinstance(response, Generator): for chunk in response: yield chunk.model_dump_json().encode() + b'\n\n' diff --git a/api/core/app/apps/base_app_generator.py b/api/core/app/apps/base_app_generator.py index 7727519aef5a08..4159670de4bbbf 100644 --- a/api/core/app/apps/base_app_generator.py +++ b/api/core/app/apps/base_app_generator.py @@ -1,5 +1,5 @@ -from collections.abc import Generator, Mapping import json +from collections.abc import Generator, Mapping from typing import Any, Optional, Union from core.app.app_config.entities import AppConfig, VariableEntity, VariableEntityType From 41ed2e0cc291543ee7338b14bf416c5bcfab7255 Mon Sep 17 00:00:00 2001 From: Yeuoly Date: Thu, 29 Aug 2024 20:17:17 +0800 Subject: [PATCH 016/325] feat: backwards invoke app --- api/controllers/inner_api/plugin/plugin.py | 8 +- api/core/plugin/backwards_invocation/app.py | 141 ++++++++++++++++++ api/core/plugin/backwards_invocation/base.py | 20 +++ api/core/plugin/backwards_invocation/model.py | 5 +- api/core/plugin/entities/request.py | 14 +- api/libs/login.py | 3 +- api/models/tools.py | 9 +- 7 files changed, 187 insertions(+), 13 deletions(-) create mode 100644 api/core/plugin/backwards_invocation/app.py create mode 100644 api/core/plugin/backwards_invocation/base.py diff --git a/api/controllers/inner_api/plugin/plugin.py b/api/controllers/inner_api/plugin/plugin.py index d9b58e1e932657..dfe02b76351d23 100644 --- a/api/controllers/inner_api/plugin/plugin.py +++ b/api/controllers/inner_api/plugin/plugin.py @@ -1,5 +1,4 @@ import time -from collections.abc import Generator from flask_restful import Resource, reqparse @@ -30,15 +29,10 @@ class PluginInvokeLLMApi(Resource): def post(self, user_id: str, tenant_model: Tenant, payload: RequestInvokeLLM): def generator(): response = PluginModelBackwardsInvocation.invoke_llm(user_id, tenant_model, payload) - if isinstance(response, Generator): - for chunk in response: - yield chunk.model_dump_json().encode() + b'\n\n' - else: - yield response.model_dump_json().encode() + b'\n\n' + return PluginModelBackwardsInvocation.convert_to_event_stream(response) return compact_generate_response(generator()) - class PluginInvokeTextEmbeddingApi(Resource): @setup_required @plugin_inner_api_only diff --git a/api/core/plugin/backwards_invocation/app.py b/api/core/plugin/backwards_invocation/app.py new file mode 100644 index 00000000000000..7304c637faf0f9 --- /dev/null +++ b/api/core/plugin/backwards_invocation/app.py @@ -0,0 +1,141 @@ +from collections.abc import Generator, Mapping +from typing import Literal, Union + +from core.app.apps.advanced_chat.app_generator import AdvancedChatAppGenerator +from core.app.apps.workflow.app_generator import WorkflowAppGenerator +from core.app.entities.app_invoke_entities import InvokeFrom +from core.plugin.backwards_invocation.base import BaseBackwardsInvocation +from extensions.ext_database import db +from models.account import Account +from models.model import App, AppMode, EndUser + + +class PluginAppBackwardsInvocation(BaseBackwardsInvocation): + @classmethod + def invoke_app( + cls, app_id: str, + user_id: str, + tenant_id: str, + query: str, + inputs: Mapping, + files: list[dict], + ) -> Generator[dict, None, None] | dict: + """ + invoke app + """ + app = cls._get_app(app_id, tenant_id) + if app.mode in [AppMode.ADVANCED_CHAT.value, AppMode.AGENT_CHAT.value, AppMode.CHAT.value]: + return cls.invoke_chat_app(app, user_id, tenant_id, query, inputs, files) + elif app.mode in [AppMode.WORKFLOW.value]: + return cls.invoke_workflow_app(app, user_id, tenant_id, inputs, files) + elif app.mode in [AppMode.COMPLETION]: + return cls.invoke_completion_app(app, user_id, tenant_id, inputs, files) + + raise ValueError("unexpected app type") + + @classmethod + def invoke_chat_app( + cls, + app: App, + user: Account | EndUser, + tenant_id: str, + conversation_id: str, + query: str, + stream: bool, + inputs: Mapping, + files: list[dict], + ) -> Generator[dict, None, None] | dict: + """ + invoke chat app + """ + if app.mode == AppMode.ADVANCED_CHAT.value: + workflow = app.workflow + if not workflow: + raise ValueError("unexpected app type") + + generator = AdvancedChatAppGenerator() + response = generator.generate( + app_model=app, + workflow=workflow, + user=user, + args={ + }, + invoke_from=InvokeFrom.SERVICE_API, + stream=stream + ) + + + + @classmethod + def invoke_workflow_app( + cls, + app: App, + user_id: str, + tenant_id: str, + inputs: Mapping, + files: list[dict], + ): + """ + invoke workflow app + """ + workflow = app.workflow + if not workflow: + raise ValueError("") + + generator = WorkflowAppGenerator() + + result = generator.generate( + app_model=app, + workflow=workflow, + user=cls._get_user(user_id), + args={ + 'inputs': tool_parameters, + 'files': files + }, + invoke_from=self.runtime.invoke_from, + stream=False, + call_depth=self.workflow_call_depth + 1, + ) + + @classmethod + def invoke_completion_app( + cls, + app: App, + user_id: str, + tenant_id: str, + inputs: Mapping, + files: list[dict], + ): + """ + invoke completion app + """ + + @classmethod + def _get_user(cls, user_id: str) -> Union[EndUser, Account]: + """ + get the user by user id + """ + + user = db.session.query(EndUser).filter(EndUser.id == user_id).first() + if not user: + user = db.session.query(Account).filter(Account.id == user_id).first() + + if not user: + raise ValueError('user not found') + + return user + + @classmethod + def _get_app(cls, app_id: str, tenant_id: str) -> App: + """ + get app + """ + app = db.session.query(App). \ + filter(App.id == app_id). \ + filter(App.tenant_id == tenant_id). \ + first() + + if not app: + raise ValueError("app not found") + + return app \ No newline at end of file diff --git a/api/core/plugin/backwards_invocation/base.py b/api/core/plugin/backwards_invocation/base.py new file mode 100644 index 00000000000000..28b691d9905398 --- /dev/null +++ b/api/core/plugin/backwards_invocation/base.py @@ -0,0 +1,20 @@ +import json +from collections.abc import Generator + +from pydantic import BaseModel + + +class BaseBackwardsInvocation: + @classmethod + def convert_to_event_stream(cls, response: Generator[BaseModel | dict, None, None] | BaseModel | dict): + if isinstance(response, Generator): + for chunk in response: + if isinstance(chunk, BaseModel): + yield chunk.model_dump_json().encode() + b'\n\n' + else: + yield json.dumps(chunk).encode() + b'\n\n' + else: + if isinstance(response, BaseModel): + yield response.model_dump_json().encode() + b'\n\n' + else: + yield json.dumps(response).encode() + b'\n\n' \ No newline at end of file diff --git a/api/core/plugin/backwards_invocation/model.py b/api/core/plugin/backwards_invocation/model.py index b6da133119e2b1..7904fd62340ccf 100644 --- a/api/core/plugin/backwards_invocation/model.py +++ b/api/core/plugin/backwards_invocation/model.py @@ -2,12 +2,13 @@ from core.model_manager import ModelManager from core.model_runtime.entities.llm_entities import LLMResult, LLMResultChunk +from core.plugin.backwards_invocation.base import BaseBackwardsInvocation from core.plugin.entities.request import RequestInvokeLLM from core.workflow.nodes.llm.llm_node import LLMNode from models.account import Tenant -class PluginBackwardsInvocation: +class PluginModelBackwardsInvocation(BaseBackwardsInvocation): @classmethod def invoke_llm( cls, user_id: str, tenant: Tenant, payload: RequestInvokeLLM @@ -47,3 +48,5 @@ def handle() -> Generator[LLMResultChunk, None, None]: if response.usage: LLMNode.deduct_llm_quota(tenant_id=tenant.id, model_instance=model_instance, usage=response.usage) return response + + \ No newline at end of file diff --git a/api/core/plugin/entities/request.py b/api/core/plugin/entities/request.py index bb08facf751b44..d7781ba37558f0 100644 --- a/api/core/plugin/entities/request.py +++ b/api/core/plugin/entities/request.py @@ -1,4 +1,4 @@ -from typing import Any, Optional +from typing import Any, Literal, Optional from pydantic import BaseModel, Field, field_validator @@ -93,3 +93,15 @@ class RequestInvokeNode(BaseModel): """ Request to invoke node """ + +class RequestInvokeApp(BaseModel): + """ + Request to invoke app + """ + app_id: str + inputs: dict[str, Any] + query: Optional[str] = None + response_mode: Literal["blocking", "streaming"] + conversation_id: Optional[str] = None + user: Optional[str] = None + files: list[dict] = Field(default_factory=list) diff --git a/api/libs/login.py b/api/libs/login.py index 7f05eb8404a0ba..8431d967bd1ee9 100644 --- a/api/libs/login.py +++ b/api/libs/login.py @@ -9,6 +9,7 @@ from extensions.ext_database import db from models.account import Account, Tenant, TenantAccountJoin +from models.model import EndUser #: A proxy for the current user. If no user is logged in, this will be an #: anonymous user @@ -96,7 +97,7 @@ def decorated_view(*args, **kwargs): return decorated_view -def _get_user(): +def _get_user() -> EndUser | Account | None: if has_request_context(): if "_login_user" not in g: current_app.login_manager._load_user() diff --git a/api/models/tools.py b/api/models/tools.py index 3ee246eeb33de5..937481583ab799 100644 --- a/api/models/tools.py +++ b/api/models/tools.py @@ -278,7 +278,10 @@ class ToolConversationVariables(db.Model): def variables(self) -> dict: return json.loads(self.variables_str) -class ToolFile(DeclarativeBase): +class Base(DeclarativeBase): + pass + +class ToolFile(Base): """ store the file created by agent """ @@ -293,9 +296,9 @@ class ToolFile(DeclarativeBase): # conversation user id user_id: Mapped[str] = mapped_column(StringUUID) # tenant id - tenant_id: Mapped[StringUUID] = mapped_column(StringUUID) + tenant_id: Mapped[str] = mapped_column(StringUUID) # conversation id - conversation_id: Mapped[StringUUID] = mapped_column(nullable=True) + conversation_id: Mapped[str] = mapped_column(StringUUID, nullable=True) # file key file_key: Mapped[str] = mapped_column(db.String(255), nullable=False) # mime type From 12ea085e224cf2b487c66b30c7183885f9e5fe7d Mon Sep 17 00:00:00 2001 From: Yeuoly Date: Thu, 29 Aug 2024 20:50:36 +0800 Subject: [PATCH 017/325] feat: implement invoke app args --- api/controllers/inner_api/plugin/plugin.py | 29 ++++-- api/core/plugin/backwards_invocation/app.py | 96 +++++++++++++++----- api/core/plugin/backwards_invocation/base.py | 6 +- api/core/plugin/entities/request.py | 1 + api/services/plugin/plugin_invoke_service.py | 49 ---------- 5 files changed, 99 insertions(+), 82 deletions(-) delete mode 100644 api/services/plugin/plugin_invoke_service.py diff --git a/api/controllers/inner_api/plugin/plugin.py b/api/controllers/inner_api/plugin/plugin.py index dfe02b76351d23..c7886b5773397e 100644 --- a/api/controllers/inner_api/plugin/plugin.py +++ b/api/controllers/inner_api/plugin/plugin.py @@ -6,10 +6,13 @@ from controllers.inner_api import api from controllers.inner_api.plugin.wraps import get_tenant, plugin_data from controllers.inner_api.wraps import plugin_inner_api_only +from core.plugin.backwards_invocation.app import PluginAppBackwardsInvocation from core.plugin.backwards_invocation.model import PluginModelBackwardsInvocation from core.plugin.entities.request import ( + RequestInvokeApp, RequestInvokeLLM, RequestInvokeModeration, + RequestInvokeNode, RequestInvokeRerank, RequestInvokeSpeech2Text, RequestInvokeTextEmbedding, @@ -104,21 +107,33 @@ class PluginInvokeNodeApi(Resource): @setup_required @plugin_inner_api_only @get_tenant - def post(self, user_id: str, tenant_model: Tenant): - parser = reqparse.RequestParser() - args = parser.parse_args() - - return {'message': 'success'} + @plugin_data(payload_type=RequestInvokeNode) + def post(self, user_id: str, tenant_model: Tenant, payload: RequestInvokeNode): + pass class PluginInvokeAppApi(Resource): @setup_required @plugin_inner_api_only @get_tenant - def post(self, user_id: str, tenant_model: Tenant): + @plugin_data(payload_type=RequestInvokeApp) + def post(self, user_id: str, tenant_model: Tenant, payload: RequestInvokeApp): parser = reqparse.RequestParser() args = parser.parse_args() - return {'message': 'success'} + response = PluginAppBackwardsInvocation.invoke_app( + app_id=payload.app_id, + user_id=user_id, + tenant_id=tenant_model.id, + conversation_id=payload.conversation_id, + query=payload.query, + stream=payload.stream, + inputs=payload.inputs, + files=payload.files + ) + + return compact_generate_response( + PluginAppBackwardsInvocation.convert_to_event_stream(response) + ) api.add_resource(PluginInvokeLLMApi, '/invoke/llm') api.add_resource(PluginInvokeTextEmbeddingApi, '/invoke/text-embedding') diff --git a/api/core/plugin/backwards_invocation/app.py b/api/core/plugin/backwards_invocation/app.py index 7304c637faf0f9..d20e0535dc7a40 100644 --- a/api/core/plugin/backwards_invocation/app.py +++ b/api/core/plugin/backwards_invocation/app.py @@ -1,7 +1,10 @@ from collections.abc import Generator, Mapping -from typing import Literal, Union +from typing import Optional, Union from core.app.apps.advanced_chat.app_generator import AdvancedChatAppGenerator +from core.app.apps.agent_chat.app_generator import AgentChatAppGenerator +from core.app.apps.chat.app_generator import ChatAppGenerator +from core.app.apps.completion.app_generator import CompletionAppGenerator from core.app.apps.workflow.app_generator import WorkflowAppGenerator from core.app.entities.app_invoke_entities import InvokeFrom from core.plugin.backwards_invocation.base import BaseBackwardsInvocation @@ -16,20 +19,29 @@ def invoke_app( cls, app_id: str, user_id: str, tenant_id: str, - query: str, + conversation_id: Optional[str], + query: Optional[str], + stream: bool, inputs: Mapping, files: list[dict], - ) -> Generator[dict, None, None] | dict: + ) -> Generator[dict | str, None, None] | dict: """ invoke app """ app = cls._get_app(app_id, tenant_id) + user = cls._get_user(user_id) + + conversation_id = conversation_id or "" + if app.mode in [AppMode.ADVANCED_CHAT.value, AppMode.AGENT_CHAT.value, AppMode.CHAT.value]: - return cls.invoke_chat_app(app, user_id, tenant_id, query, inputs, files) + if not query: + raise ValueError("missing query") + + return cls.invoke_chat_app(app, user, conversation_id, query, stream, inputs, files) elif app.mode in [AppMode.WORKFLOW.value]: - return cls.invoke_workflow_app(app, user_id, tenant_id, inputs, files) + return cls.invoke_workflow_app(app, user, stream, inputs, files) elif app.mode in [AppMode.COMPLETION]: - return cls.invoke_completion_app(app, user_id, tenant_id, inputs, files) + return cls.invoke_completion_app(app, user, stream, inputs, files) raise ValueError("unexpected app type") @@ -38,13 +50,12 @@ def invoke_chat_app( cls, app: App, user: Account | EndUser, - tenant_id: str, conversation_id: str, query: str, stream: bool, inputs: Mapping, files: list[dict], - ) -> Generator[dict, None, None] | dict: + ) -> Generator[dict | str, None, None] | dict: """ invoke chat app """ @@ -53,25 +64,54 @@ def invoke_chat_app( if not workflow: raise ValueError("unexpected app type") - generator = AdvancedChatAppGenerator() - response = generator.generate( + return AdvancedChatAppGenerator().generate( app_model=app, workflow=workflow, user=user, args={ + "inputs": inputs, + "query": query, + "files": files, + "conversation_id": conversation_id, + }, + invoke_from=InvokeFrom.SERVICE_API, + stream=stream + ) + elif app.mode == AppMode.AGENT_CHAT.value: + return AgentChatAppGenerator().generate( + app_model=app, + user=user, + args={ + "inputs": inputs, + "query": query, + "files": files, + "conversation_id": conversation_id, + }, + invoke_from=InvokeFrom.SERVICE_API, + stream=stream + ) + elif app.mode == AppMode.CHAT.value: + return ChatAppGenerator().generate( + app_model=app, + user=user, + args={ + "inputs": inputs, + "query": query, + "files": files, + "conversation_id": conversation_id, }, invoke_from=InvokeFrom.SERVICE_API, stream=stream ) + else: + raise ValueError("unexpected app type") - - @classmethod def invoke_workflow_app( cls, app: App, - user_id: str, - tenant_id: str, + user: EndUser | Account, + stream: bool, inputs: Mapping, files: list[dict], ): @@ -82,33 +122,41 @@ def invoke_workflow_app( if not workflow: raise ValueError("") - generator = WorkflowAppGenerator() - - result = generator.generate( + return WorkflowAppGenerator().generate( app_model=app, workflow=workflow, - user=cls._get_user(user_id), + user=user, args={ - 'inputs': tool_parameters, + 'inputs': inputs, 'files': files }, - invoke_from=self.runtime.invoke_from, - stream=False, - call_depth=self.workflow_call_depth + 1, + invoke_from=InvokeFrom.SERVICE_API, + stream=stream, + call_depth=1, ) @classmethod def invoke_completion_app( cls, app: App, - user_id: str, - tenant_id: str, + user: EndUser | Account, + stream: bool, inputs: Mapping, files: list[dict], ): """ invoke completion app """ + return CompletionAppGenerator().generate( + app_model=app, + user=user, + args={ + 'inputs': inputs, + 'files': files + }, + invoke_from=InvokeFrom.SERVICE_API, + stream=stream, + ) @classmethod def _get_user(cls, user_id: str) -> Union[EndUser, Account]: diff --git a/api/core/plugin/backwards_invocation/base.py b/api/core/plugin/backwards_invocation/base.py index 28b691d9905398..065e7135949c2c 100644 --- a/api/core/plugin/backwards_invocation/base.py +++ b/api/core/plugin/backwards_invocation/base.py @@ -6,15 +6,17 @@ class BaseBackwardsInvocation: @classmethod - def convert_to_event_stream(cls, response: Generator[BaseModel | dict, None, None] | BaseModel | dict): + def convert_to_event_stream(cls, response: Generator[BaseModel | dict | str, None, None] | BaseModel | dict): if isinstance(response, Generator): for chunk in response: if isinstance(chunk, BaseModel): yield chunk.model_dump_json().encode() + b'\n\n' + if isinstance(chunk, str): + yield f"event: {chunk}\n\n".encode() else: yield json.dumps(chunk).encode() + b'\n\n' else: if isinstance(response, BaseModel): yield response.model_dump_json().encode() + b'\n\n' else: - yield json.dumps(response).encode() + b'\n\n' \ No newline at end of file + yield json.dumps(response).encode() + b'\n\n' diff --git a/api/core/plugin/entities/request.py b/api/core/plugin/entities/request.py index d7781ba37558f0..2a24beb0c80af1 100644 --- a/api/core/plugin/entities/request.py +++ b/api/core/plugin/entities/request.py @@ -105,3 +105,4 @@ class RequestInvokeApp(BaseModel): conversation_id: Optional[str] = None user: Optional[str] = None files: list[dict] = Field(default_factory=list) + stream: bool = Field(default=False) \ No newline at end of file diff --git a/api/services/plugin/plugin_invoke_service.py b/api/services/plugin/plugin_invoke_service.py deleted file mode 100644 index 317b42a6e16f75..00000000000000 --- a/api/services/plugin/plugin_invoke_service.py +++ /dev/null @@ -1,49 +0,0 @@ -from collections.abc import Generator -from typing import Any, Union - -from core.app.entities.app_invoke_entities import InvokeFrom -from core.callback_handler.plugin_tool_callback_handler import DifyPluginCallbackHandler -from core.model_runtime.entities.model_entities import ModelType -from core.tools.entities.tool_entities import ToolInvokeMessage, ToolProviderType -from core.tools.tool_engine import ToolEngine -from core.tools.tool_manager import ToolManager -from core.tools.utils.message_transformer import ToolFileMessageTransformer -from core.workflow.entities.node_entities import NodeType -from models.account import Tenant -from services.tools.tools_transform_service import ToolTransformService - - -class PluginInvokeService: - @classmethod - def invoke_tool(cls, user_id: str, invoke_from: InvokeFrom, tenant: Tenant, - tool_provider_type: ToolProviderType, tool_provider: str, tool_name: str, - tool_parameters: dict[str, Any]) -> Generator[ToolInvokeMessage]: - """ - Invokes a tool with the given user ID and tool parameters. - """ - tool_runtime = ToolManager.get_tool_runtime(tool_provider_type, provider_id=tool_provider, - tool_name=tool_name, tenant_id=tenant.id, - invoke_from=invoke_from) - - response = ToolEngine.plugin_invoke(tool_runtime, - tool_parameters, - user_id, - callback=DifyPluginCallbackHandler()) - response = ToolFileMessageTransformer.transform_tool_invoke_messages(response) - return ToolTransformService.transform_messages_to_dict(response) - - @classmethod - def invoke_model(cls, user_id: str, tenant: Tenant, - model_provider: str, model_name: str, model_type: ModelType, - model_parameters: dict[str, Any]) -> Union[dict, Generator[ToolInvokeMessage]]: - """ - Invokes a model with the given user ID and model parameters. - """ - - @classmethod - def invoke_workflow_node(cls, user_id: str, tenant: Tenant, - node_type: NodeType, node_data: dict[str, Any], - inputs: dict[str, Any]) -> Generator[ToolInvokeMessage]: - """ - Invokes a workflow node with the given user ID and node parameters. - """ \ No newline at end of file From ece82b87bfa9a9b555af73c49884c9be870129b1 Mon Sep 17 00:00:00 2001 From: Yeuoly Date: Thu, 29 Aug 2024 21:14:23 +0800 Subject: [PATCH 018/325] feat: invoke app --- api/controllers/inner_api/plugin/plugin.py | 7 ++----- api/core/plugin/backwards_invocation/base.py | 2 +- api/core/plugin/entities/request.py | 2 +- 3 files changed, 4 insertions(+), 7 deletions(-) diff --git a/api/controllers/inner_api/plugin/plugin.py b/api/controllers/inner_api/plugin/plugin.py index c7886b5773397e..1586d39b4178d7 100644 --- a/api/controllers/inner_api/plugin/plugin.py +++ b/api/controllers/inner_api/plugin/plugin.py @@ -1,6 +1,6 @@ import time -from flask_restful import Resource, reqparse +from flask_restful import Resource from controllers.console.setup import setup_required from controllers.inner_api import api @@ -117,16 +117,13 @@ class PluginInvokeAppApi(Resource): @get_tenant @plugin_data(payload_type=RequestInvokeApp) def post(self, user_id: str, tenant_model: Tenant, payload: RequestInvokeApp): - parser = reqparse.RequestParser() - args = parser.parse_args() - response = PluginAppBackwardsInvocation.invoke_app( app_id=payload.app_id, user_id=user_id, tenant_id=tenant_model.id, conversation_id=payload.conversation_id, query=payload.query, - stream=payload.stream, + stream=payload.response_mode == 'streaming', inputs=payload.inputs, files=payload.files ) diff --git a/api/core/plugin/backwards_invocation/base.py b/api/core/plugin/backwards_invocation/base.py index 065e7135949c2c..7b699b4d6784f5 100644 --- a/api/core/plugin/backwards_invocation/base.py +++ b/api/core/plugin/backwards_invocation/base.py @@ -11,7 +11,7 @@ def convert_to_event_stream(cls, response: Generator[BaseModel | dict | str, Non for chunk in response: if isinstance(chunk, BaseModel): yield chunk.model_dump_json().encode() + b'\n\n' - if isinstance(chunk, str): + elif isinstance(chunk, str): yield f"event: {chunk}\n\n".encode() else: yield json.dumps(chunk).encode() + b'\n\n' diff --git a/api/core/plugin/entities/request.py b/api/core/plugin/entities/request.py index 2a24beb0c80af1..fd22d1f057ebc7 100644 --- a/api/core/plugin/entities/request.py +++ b/api/core/plugin/entities/request.py @@ -105,4 +105,4 @@ class RequestInvokeApp(BaseModel): conversation_id: Optional[str] = None user: Optional[str] = None files: list[dict] = Field(default_factory=list) - stream: bool = Field(default=False) \ No newline at end of file + \ No newline at end of file From 50a5cfe56a1fd583e682964468a5779d934a23ab Mon Sep 17 00:00:00 2001 From: Yeuoly Date: Thu, 29 Aug 2024 21:48:20 +0800 Subject: [PATCH 019/325] fix: endpoint using default user --- api/core/plugin/backwards_invocation/app.py | 17 ++++++++++++----- 1 file changed, 12 insertions(+), 5 deletions(-) diff --git a/api/core/plugin/backwards_invocation/app.py b/api/core/plugin/backwards_invocation/app.py index d20e0535dc7a40..b1e1065affce80 100644 --- a/api/core/plugin/backwards_invocation/app.py +++ b/api/core/plugin/backwards_invocation/app.py @@ -1,6 +1,7 @@ from collections.abc import Generator, Mapping from typing import Optional, Union +from controllers.service_api.wraps import create_or_update_end_user_for_user_id from core.app.apps.advanced_chat.app_generator import AdvancedChatAppGenerator from core.app.apps.agent_chat.app_generator import AgentChatAppGenerator from core.app.apps.chat.app_generator import ChatAppGenerator @@ -29,7 +30,10 @@ def invoke_app( invoke app """ app = cls._get_app(app_id, tenant_id) - user = cls._get_user(user_id) + if not user_id: + user = create_or_update_end_user_for_user_id(app) + else: + user = cls._get_user(user_id) conversation_id = conversation_id or "" @@ -178,10 +182,13 @@ def _get_app(cls, app_id: str, tenant_id: str) -> App: """ get app """ - app = db.session.query(App). \ - filter(App.id == app_id). \ - filter(App.tenant_id == tenant_id). \ - first() + try: + app = db.session.query(App). \ + filter(App.id == app_id). \ + filter(App.tenant_id == tenant_id). \ + first() + except Exception: + raise ValueError("app not found") if not app: raise ValueError("app not found") From 1fa3b9cfd8834e2f80ed2104498098acd189e059 Mon Sep 17 00:00:00 2001 From: Yeuoly Date: Fri, 30 Aug 2024 14:23:14 +0800 Subject: [PATCH 020/325] refactor tools --- api/controllers/inner_api/plugin/plugin.py | 9 ++ api/controllers/inner_api/wraps.py | 2 + api/core/app/entities/queue_entities.py | 2 +- api/core/entities/parameter_entities.py | 30 +++++ api/core/entities/provider_entities.py | 55 ++++++++- api/core/file/tool_file_parser.py | 7 +- api/core/plugin/entities/request.py | 14 ++- api/core/tools/entities/api_entities.py | 4 +- api/core/tools/entities/tool_entities.py | 71 ++--------- api/core/tools/provider/api_tool_provider.py | 18 +-- api/core/tools/provider/app_tool_provider.py | 115 ------------------ .../tools/provider/builtin_tool_provider.py | 92 ++------------ api/core/tools/provider/tool_provider.py | 113 +++-------------- .../tools/provider/workflow_tool_provider.py | 19 +-- api/core/tools/tool/tool.py | 2 +- api/core/tools/tool_manager.py | 89 ++++++++------ api/core/tools/utils/configuration.py | 8 +- .../utils/workflow_configuration_sync.py | 6 +- .../tools/api_tools_manage_service.py | 14 +-- api/services/tools/tools_transform_service.py | 4 +- 20 files changed, 239 insertions(+), 435 deletions(-) create mode 100644 api/core/entities/parameter_entities.py delete mode 100644 api/core/tools/provider/app_tool_provider.py diff --git a/api/controllers/inner_api/plugin/plugin.py b/api/controllers/inner_api/plugin/plugin.py index 1586d39b4178d7..c3911f31aeb56a 100644 --- a/api/controllers/inner_api/plugin/plugin.py +++ b/api/controllers/inner_api/plugin/plugin.py @@ -10,6 +10,7 @@ from core.plugin.backwards_invocation.model import PluginModelBackwardsInvocation from core.plugin.entities.request import ( RequestInvokeApp, + RequestInvokeEncrypt, RequestInvokeLLM, RequestInvokeModeration, RequestInvokeNode, @@ -132,6 +133,14 @@ def post(self, user_id: str, tenant_model: Tenant, payload: RequestInvokeApp): PluginAppBackwardsInvocation.convert_to_event_stream(response) ) +class PluginInvokeEncryptApi(Resource): + @setup_required + @plugin_inner_api_only + @get_tenant + @plugin_data(payload_type=RequestInvokeEncrypt) + def post(self, user_id: str, tenant_model: Tenant, payload: RequestInvokeEncrypt): + """""" + api.add_resource(PluginInvokeLLMApi, '/invoke/llm') api.add_resource(PluginInvokeTextEmbeddingApi, '/invoke/text-embedding') api.add_resource(PluginInvokeRerankApi, '/invoke/rerank') diff --git a/api/controllers/inner_api/wraps.py b/api/controllers/inner_api/wraps.py index 754b792d6193c1..235359a9bb0bb6 100644 --- a/api/controllers/inner_api/wraps.py +++ b/api/controllers/inner_api/wraps.py @@ -46,6 +46,8 @@ def decorated(*args, **kwargs): user_id = user_id.split(" ")[1] inner_api_key = request.headers.get("X-Inner-Api-Key") + if not inner_api_key: + raise ValueError("inner api key not found") data_to_sign = f"DIFY {user_id}" diff --git a/api/core/app/entities/queue_entities.py b/api/core/app/entities/queue_entities.py index 15348251f2de35..73476bef58a302 100644 --- a/api/core/app/entities/queue_entities.py +++ b/api/core/app/entities/queue_entities.py @@ -60,7 +60,7 @@ class QueueIterationStartEvent(AppQueueEvent): node_data: BaseNodeData node_run_index: int - inputs: dict = None + inputs: Optional[dict] = None predecessor_node_id: Optional[str] = None metadata: Optional[dict] = None diff --git a/api/core/entities/parameter_entities.py b/api/core/entities/parameter_entities.py new file mode 100644 index 00000000000000..cc402de7736a55 --- /dev/null +++ b/api/core/entities/parameter_entities.py @@ -0,0 +1,30 @@ +from enum import Enum + + +class CommonParameterType(Enum): + SECRET_INPUT = "secret-input" + TEXT_INPUT = "text-input" + SELECT = "select" + STRING = "string" + NUMBER = "number" + FILE = "file" + BOOLEAN = "boolean" + APP_SELECTOR = "app-selector" + MODEL_CONFIG = "model-config" + + +class AppSelectorScope(Enum): + ALL = "all" + CHAT = "chat" + WORKFLOW = "workflow" + COMPLETION = "completion" + + +class ModelConfigScope(Enum): + LLM = "llm" + TEXT_EMBEDDING = "text-embedding" + RERANK = "rerank" + TTS = "tts" + SPEECH2TEXT = "speech2text" + MODERATION = "moderation" + VISION = "vision" \ No newline at end of file diff --git a/api/core/entities/provider_entities.py b/api/core/entities/provider_entities.py index 0d5b0a1b2c6ba6..ae78d9ecf93b27 100644 --- a/api/core/entities/provider_entities.py +++ b/api/core/entities/provider_entities.py @@ -1,8 +1,10 @@ from enum import Enum -from typing import Optional +from typing import Optional, Union -from pydantic import BaseModel, ConfigDict +from pydantic import BaseModel, ConfigDict, Field +from core.entities.parameter_entities import AppSelectorScope, CommonParameterType, ModelConfigScope +from core.model_runtime.entities.common_entities import I18nObject from core.model_runtime.entities.model_entities import ModelType from models.provider import ProviderQuotaType @@ -100,3 +102,52 @@ class ModelSettings(BaseModel): # pydantic configs model_config = ConfigDict(protected_namespaces=()) + +class BasicProviderConfig(BaseModel): + """ + Base model class for common provider settings like credentials + """ + class Type(Enum): + SECRET_INPUT = CommonParameterType.SECRET_INPUT.value + TEXT_INPUT = CommonParameterType.TEXT_INPUT.value + SELECT = CommonParameterType.SELECT.value + BOOLEAN = CommonParameterType.BOOLEAN.value + APP_SELECTOR = CommonParameterType.APP_SELECTOR.value + MODEL_CONFIG = CommonParameterType.MODEL_CONFIG.value + + @classmethod + def value_of(cls, value: str) -> "ProviderConfig.Type": + """ + Get value of given mode. + + :param value: mode value + :return: mode + """ + for mode in cls: + if mode.value == value: + return mode + raise ValueError(f'invalid mode value {value}') + + @staticmethod + def default(value: str) -> str: + return "" + + type: Type = Field(..., description="The type of the credentials") + name: str = Field(..., description="The name of the credentials") + +class ProviderConfig(BasicProviderConfig): + """ + Model class for common provider settings like credentials + """ + class Option(BaseModel): + value: str = Field(..., description="The value of the option") + label: I18nObject = Field(..., description="The label of the option") + + scope: AppSelectorScope | ModelConfigScope | None + required: bool = False + default: Optional[Union[int, str]] = None + options: Optional[list[Option]] = None + label: Optional[I18nObject] = None + help: Optional[I18nObject] = None + url: Optional[str] = None + placeholder: Optional[I18nObject] = None diff --git a/api/core/file/tool_file_parser.py b/api/core/file/tool_file_parser.py index ea8605ac577e3a..98226e89c057f4 100644 --- a/api/core/file/tool_file_parser.py +++ b/api/core/file/tool_file_parser.py @@ -1,4 +1,9 @@ -tool_file_manager = { +from typing import TYPE_CHECKING, Any + +if TYPE_CHECKING: + from core.tools.tool_file_manager import ToolFileManager + +tool_file_manager: dict[str, Any] = { 'manager': None } diff --git a/api/core/plugin/entities/request.py b/api/core/plugin/entities/request.py index fd22d1f057ebc7..0533746815f0a0 100644 --- a/api/core/plugin/entities/request.py +++ b/api/core/plugin/entities/request.py @@ -1,7 +1,9 @@ +from collections.abc import Mapping from typing import Any, Literal, Optional from pydantic import BaseModel, Field, field_validator +from core.entities.provider_entities import BasicProviderConfig from core.model_runtime.entities.message_entities import ( AssistantPromptMessage, PromptMessage, @@ -30,11 +32,10 @@ class RequestInvokeLLM(BaseRequestInvokeModel): """ Request to invoke LLM """ - model_type: ModelType = ModelType.LLM mode: str model_parameters: dict[str, Any] = Field(default_factory=dict) - prompt_messages: list[PromptMessage] + prompt_messages: list[PromptMessage] = Field(default_factory=list) tools: Optional[list[PromptMessageTool]] = Field(default_factory=list) stop: Optional[list[str]] = Field(default_factory=list) stream: Optional[bool] = False @@ -105,4 +106,11 @@ class RequestInvokeApp(BaseModel): conversation_id: Optional[str] = None user: Optional[str] = None files: list[dict] = Field(default_factory=list) - \ No newline at end of file + +class RequestInvokeEncrypt(BaseModel): + """ + Request to encryption + """ + opt: Literal["encrypt", "decrypt"] + data: dict = Field(default_factory=dict) + config: Mapping[str, BasicProviderConfig] = Field(default_factory=Mapping) diff --git a/api/core/tools/entities/api_entities.py b/api/core/tools/entities/api_entities.py index 2b01b8fd8e89c9..71db8d8b2dfc77 100644 --- a/api/core/tools/entities/api_entities.py +++ b/api/core/tools/entities/api_entities.py @@ -4,7 +4,7 @@ from core.model_runtime.utils.encoders import jsonable_encoder from core.tools.entities.common_entities import I18nObject -from core.tools.entities.tool_entities import ToolProviderCredentials, ToolProviderType +from core.tools.entities.tool_entities import ProviderConfig, ToolProviderType from core.tools.tool.tool import ToolParameter @@ -62,4 +62,4 @@ def to_dict(self) -> dict: } class UserToolProviderCredentials(BaseModel): - credentials: dict[str, ToolProviderCredentials] \ No newline at end of file + credentials: dict[str, ProviderConfig] \ No newline at end of file diff --git a/api/core/tools/entities/tool_entities.py b/api/core/tools/entities/tool_entities.py index c266db1cdbd1d8..98efb92a0d9d73 100644 --- a/api/core/tools/entities/tool_entities.py +++ b/api/core/tools/entities/tool_entities.py @@ -3,6 +3,7 @@ from pydantic import BaseModel, Field, field_validator +from core.entities.parameter_entities import AppSelectorScope, CommonParameterType, ModelConfigScope from core.tools.entities.common_entities import I18nObject @@ -137,12 +138,12 @@ def transform_id_to_str(cls, value) -> str: class ToolParameter(BaseModel): class ToolParameterType(str, Enum): - STRING = "string" - NUMBER = "number" - BOOLEAN = "boolean" - SELECT = "select" - SECRET_INPUT = "secret-input" - FILE = "file" + STRING = CommonParameterType.STRING.value + NUMBER = CommonParameterType.NUMBER.value + BOOLEAN = CommonParameterType.BOOLEAN.value + SELECT = CommonParameterType.SELECT.value + SECRET_INPUT = CommonParameterType.SECRET_INPUT.value + FILE = CommonParameterType.FILE.value class ToolParameterForm(Enum): SCHEMA = "schema" # should be set while adding tool @@ -151,16 +152,17 @@ class ToolParameterForm(Enum): name: str = Field(..., description="The name of the parameter") label: I18nObject = Field(..., description="The label presented to the user") - human_description: Optional[I18nObject] = Field(None, description="The description presented to the user") - placeholder: Optional[I18nObject] = Field(None, description="The placeholder presented to the user") + human_description: Optional[I18nObject] = Field(default=None, description="The description presented to the user") + placeholder: Optional[I18nObject] = Field(default=None, description="The placeholder presented to the user") type: ToolParameterType = Field(..., description="The type of the parameter") + scope: AppSelectorScope | ModelConfigScope | None = None form: ToolParameterForm = Field(..., description="The form of the parameter, schema/form/llm") llm_description: Optional[str] = None required: Optional[bool] = False default: Optional[Union[float, int, str]] = None min: Optional[Union[float, int]] = None max: Optional[Union[float, int]] = None - options: Optional[list[ToolParameterOption]] = None + options: list[ToolParameterOption] = Field(default_factory=list) @classmethod def get_simple_instance(cls, @@ -211,57 +213,6 @@ class ToolIdentity(BaseModel): provider: str = Field(..., description="The provider of the tool") icon: Optional[str] = None -class ToolCredentialsOption(BaseModel): - value: str = Field(..., description="The value of the option") - label: I18nObject = Field(..., description="The label of the option") - -class ToolProviderCredentials(BaseModel): - class CredentialsType(Enum): - SECRET_INPUT = "secret-input" - TEXT_INPUT = "text-input" - SELECT = "select" - BOOLEAN = "boolean" - - @classmethod - def value_of(cls, value: str) -> "ToolProviderCredentials.CredentialsType": - """ - Get value of given mode. - - :param value: mode value - :return: mode - """ - for mode in cls: - if mode.value == value: - return mode - raise ValueError(f'invalid mode value {value}') - - @staticmethod - def default(value: str) -> str: - return "" - - name: str = Field(..., description="The name of the credentials") - type: CredentialsType = Field(..., description="The type of the credentials") - required: bool = False - default: Optional[Union[int, str]] = None - options: Optional[list[ToolCredentialsOption]] = None - label: Optional[I18nObject] = None - help: Optional[I18nObject] = None - url: Optional[str] = None - placeholder: Optional[I18nObject] = None - - def to_dict(self) -> dict: - return { - 'name': self.name, - 'type': self.type.value, - 'required': self.required, - 'default': self.default, - 'options': self.options, - 'help': self.help.to_dict() if self.help else None, - 'label': self.label.to_dict() if self.label else None, - 'url': self.url, - 'placeholder': self.placeholder.to_dict() if self.placeholder else None, - } - class ToolRuntimeVariableType(Enum): TEXT = "text" IMAGE = "image" diff --git a/api/core/tools/provider/api_tool_provider.py b/api/core/tools/provider/api_tool_provider.py index ae80ad2114cce0..fc7fcb675a7ad8 100644 --- a/api/core/tools/provider/api_tool_provider.py +++ b/api/core/tools/provider/api_tool_provider.py @@ -3,8 +3,8 @@ from core.tools.entities.tool_bundle import ApiToolBundle from core.tools.entities.tool_entities import ( ApiProviderAuthType, + ProviderConfig, ToolCredentialsOption, - ToolProviderCredentials, ToolProviderType, ) from core.tools.provider.tool_provider import ToolProviderController @@ -20,10 +20,10 @@ class ApiToolProviderController(ToolProviderController): @staticmethod def from_db(db_provider: ApiToolProvider, auth_type: ApiProviderAuthType) -> 'ApiToolProviderController': credentials_schema = { - 'auth_type': ToolProviderCredentials( + 'auth_type': ProviderConfig( name='auth_type', required=True, - type=ToolProviderCredentials.CredentialsType.SELECT, + type=ProviderConfig.Type.SELECT, options=[ ToolCredentialsOption(value='none', label=I18nObject(en_US='None', zh_Hans='无')), ToolCredentialsOption(value='api_key', label=I18nObject(en_US='api_key', zh_Hans='api_key')) @@ -38,30 +38,30 @@ def from_db(db_provider: ApiToolProvider, auth_type: ApiProviderAuthType) -> 'Ap if auth_type == ApiProviderAuthType.API_KEY: credentials_schema = { **credentials_schema, - 'api_key_header': ToolProviderCredentials( + 'api_key_header': ProviderConfig( name='api_key_header', required=False, default='api_key', - type=ToolProviderCredentials.CredentialsType.TEXT_INPUT, + type=ProviderConfig.Type.TEXT_INPUT, help=I18nObject( en_US='The header name of the api key', zh_Hans='携带 api key 的 header 名称' ) ), - 'api_key_value': ToolProviderCredentials( + 'api_key_value': ProviderConfig( name='api_key_value', required=True, - type=ToolProviderCredentials.CredentialsType.SECRET_INPUT, + type=ProviderConfig.Type.SECRET_INPUT, help=I18nObject( en_US='The api key', zh_Hans='api key的值' ) ), - 'api_key_header_prefix': ToolProviderCredentials( + 'api_key_header_prefix': ProviderConfig( name='api_key_header_prefix', required=False, default='basic', - type=ToolProviderCredentials.CredentialsType.SELECT, + type=ProviderConfig.Type.SELECT, help=I18nObject( en_US='The prefix of the api key header', zh_Hans='api key header 的前缀' diff --git a/api/core/tools/provider/app_tool_provider.py b/api/core/tools/provider/app_tool_provider.py deleted file mode 100644 index 2d472e0a93c866..00000000000000 --- a/api/core/tools/provider/app_tool_provider.py +++ /dev/null @@ -1,115 +0,0 @@ -import logging -from typing import Any - -from core.tools.entities.common_entities import I18nObject -from core.tools.entities.tool_entities import ToolParameter, ToolParameterOption, ToolProviderType -from core.tools.provider.tool_provider import ToolProviderController -from core.tools.tool.tool import Tool -from extensions.ext_database import db -from models.model import App, AppModelConfig -from models.tools import PublishedAppTool - -logger = logging.getLogger(__name__) - -class AppToolProviderEntity(ToolProviderController): - @property - def provider_type(self) -> ToolProviderType: - return ToolProviderType.APP - - def _validate_credentials(self, tool_name: str, credentials: dict[str, Any]) -> None: - pass - - def validate_parameters(self, tool_name: str, tool_parameters: dict[str, Any]) -> None: - pass - - def get_tools(self, user_id: str) -> list[Tool]: - db_tools: list[PublishedAppTool] = db.session.query(PublishedAppTool).filter( - PublishedAppTool.user_id == user_id, - ).all() - - if not db_tools or len(db_tools) == 0: - return [] - - tools: list[Tool] = [] - - for db_tool in db_tools: - tool = { - 'identity': { - 'author': db_tool.author, - 'name': db_tool.tool_name, - 'label': { - 'en_US': db_tool.tool_name, - 'zh_Hans': db_tool.tool_name - }, - 'icon': '' - }, - 'description': { - 'human': { - 'en_US': db_tool.description_i18n.en_US, - 'zh_Hans': db_tool.description_i18n.zh_Hans - }, - 'llm': db_tool.llm_description - }, - 'parameters': [] - } - # get app from db - app: App = db_tool.app - - if not app: - logger.error(f"app {db_tool.app_id} not found") - continue - - app_model_config: AppModelConfig = app.app_model_config - user_input_form_list = app_model_config.user_input_form_list - for input_form in user_input_form_list: - # get type - form_type = input_form.keys()[0] - default = input_form[form_type]['default'] - required = input_form[form_type]['required'] - label = input_form[form_type]['label'] - variable_name = input_form[form_type]['variable_name'] - options = input_form[form_type].get('options', []) - if form_type == 'paragraph' or form_type == 'text-input': - tool['parameters'].append(ToolParameter( - name=variable_name, - label=I18nObject( - en_US=label, - zh_Hans=label - ), - human_description=I18nObject( - en_US=label, - zh_Hans=label - ), - llm_description=label, - form=ToolParameter.ToolParameterForm.FORM, - type=ToolParameter.ToolParameterType.STRING, - required=required, - default=default - )) - elif form_type == 'select': - tool['parameters'].append(ToolParameter( - name=variable_name, - label=I18nObject( - en_US=label, - zh_Hans=label - ), - human_description=I18nObject( - en_US=label, - zh_Hans=label - ), - llm_description=label, - form=ToolParameter.ToolParameterForm.FORM, - type=ToolParameter.ToolParameterType.SELECT, - required=required, - default=default, - options=[ToolParameterOption( - value=option, - label=I18nObject( - en_US=option, - zh_Hans=option - ) - ) for option in options] - )) - - tools.append(Tool(**tool)) - return tools \ No newline at end of file diff --git a/api/core/tools/provider/builtin_tool_provider.py b/api/core/tools/provider/builtin_tool_provider.py index bcf41c90edbfcd..7ad8a5468b131d 100644 --- a/api/core/tools/provider/builtin_tool_provider.py +++ b/api/core/tools/provider/builtin_tool_provider.py @@ -2,22 +2,23 @@ from os import listdir, path from typing import Any +from pydantic import Field + +from core.entities.provider_entities import ProviderConfig from core.helper.module_import_helper import load_single_subclass_from_source -from core.tools.entities.tool_entities import ToolParameter, ToolProviderCredentials, ToolProviderType +from core.tools.entities.tool_entities import ToolProviderType from core.tools.entities.values import ToolLabelEnum, default_tool_label_dict from core.tools.errors import ( - ToolNotFoundError, - ToolParameterValidationError, ToolProviderNotFoundError, ) from core.tools.provider.tool_provider import ToolProviderController from core.tools.tool.builtin_tool import BuiltinTool -from core.tools.tool.tool import Tool -from core.tools.utils.tool_parameter_converter import ToolParameterConverter from core.tools.utils.yaml_utils import load_yaml_file class BuiltinToolProviderController(ToolProviderController): + tools: list[BuiltinTool] = Field(default_factory=list) + def __init__(self, **data: Any) -> None: if self.provider_type == ToolProviderType.API or self.provider_type == ToolProviderType.APP: super().__init__(**data) @@ -41,7 +42,7 @@ def __init__(self, **data: Any) -> None: 'credentials_schema': provider_yaml.get('credentials_for_provider', None), }) - def _get_builtin_tools(self) -> list[Tool]: + def _get_builtin_tools(self) -> list[BuiltinTool]: """ returns a list of tools that the provider can provide @@ -72,7 +73,7 @@ def _get_builtin_tools(self) -> list[Tool]: self.tools = tools return tools - def get_credentials_schema(self) -> dict[str, ToolProviderCredentials]: + def get_credentials_schema(self) -> dict[str, ProviderConfig]: """ returns the credentials schema of the provider @@ -83,7 +84,7 @@ def get_credentials_schema(self) -> dict[str, ToolProviderCredentials]: return self.credentials_schema.copy() - def get_tools(self) -> list[Tool]: + def get_tools(self) -> list[BuiltinTool]: """ returns a list of tools that the provider can provide @@ -91,24 +92,12 @@ def get_tools(self) -> list[Tool]: """ return self._get_builtin_tools() - def get_tool(self, tool_name: str) -> Tool: + def get_tool(self, tool_name: str) -> BuiltinTool | None: """ returns the tool that the provider can provide """ return next(filter(lambda x: x.identity.name == tool_name, self.get_tools()), None) - def get_parameters(self, tool_name: str) -> list[ToolParameter]: - """ - returns the parameters of the tool - - :param tool_name: the name of the tool, defined in `get_tools` - :return: list of parameters - """ - tool = next(filter(lambda x: x.identity.name == tool_name, self.get_tools()), None) - if tool is None: - raise ToolNotFoundError(f'tool {tool_name} not found') - return tool.parameters - @property def need_credentials(self) -> bool: """ @@ -143,67 +132,6 @@ def _get_tool_labels(self) -> list[ToolLabelEnum]: returns the labels of the provider """ return self.identity.tags or [] - - def validate_parameters(self, tool_id: int, tool_name: str, tool_parameters: dict[str, Any]) -> None: - """ - validate the parameters of the tool and set the default value if needed - - :param tool_name: the name of the tool, defined in `get_tools` - :param tool_parameters: the parameters of the tool - """ - tool_parameters_schema = self.get_parameters(tool_name) - - tool_parameters_need_to_validate: dict[str, ToolParameter] = {} - for parameter in tool_parameters_schema: - tool_parameters_need_to_validate[parameter.name] = parameter - - for parameter in tool_parameters: - if parameter not in tool_parameters_need_to_validate: - raise ToolParameterValidationError(f'parameter {parameter} not found in tool {tool_name}') - - # check type - parameter_schema = tool_parameters_need_to_validate[parameter] - if parameter_schema.type == ToolParameter.ToolParameterType.STRING: - if not isinstance(tool_parameters[parameter], str): - raise ToolParameterValidationError(f'parameter {parameter} should be string') - - elif parameter_schema.type == ToolParameter.ToolParameterType.NUMBER: - if not isinstance(tool_parameters[parameter], int | float): - raise ToolParameterValidationError(f'parameter {parameter} should be number') - - if parameter_schema.min is not None and tool_parameters[parameter] < parameter_schema.min: - raise ToolParameterValidationError(f'parameter {parameter} should be greater than {parameter_schema.min}') - - if parameter_schema.max is not None and tool_parameters[parameter] > parameter_schema.max: - raise ToolParameterValidationError(f'parameter {parameter} should be less than {parameter_schema.max}') - - elif parameter_schema.type == ToolParameter.ToolParameterType.BOOLEAN: - if not isinstance(tool_parameters[parameter], bool): - raise ToolParameterValidationError(f'parameter {parameter} should be boolean') - - elif parameter_schema.type == ToolParameter.ToolParameterType.SELECT: - if not isinstance(tool_parameters[parameter], str): - raise ToolParameterValidationError(f'parameter {parameter} should be string') - - options = parameter_schema.options - if not isinstance(options, list): - raise ToolParameterValidationError(f'parameter {parameter} options should be list') - - if tool_parameters[parameter] not in [x.value for x in options]: - raise ToolParameterValidationError(f'parameter {parameter} should be one of {options}') - - tool_parameters_need_to_validate.pop(parameter) - - for parameter in tool_parameters_need_to_validate: - parameter_schema = tool_parameters_need_to_validate[parameter] - if parameter_schema.required: - raise ToolParameterValidationError(f'parameter {parameter} is required') - - # the parameter is not set currently, set the default value if needed - if parameter_schema.default is not None: - default_value = ToolParameterConverter.cast_parameter_by_type(parameter_schema.default, - parameter_schema.type) - tool_parameters[parameter] = default_value def validate_credentials(self, credentials: dict[str, Any]) -> None: """ diff --git a/api/core/tools/provider/tool_provider.py b/api/core/tools/provider/tool_provider.py index ef1ace9c7c31e7..ac770a2a60125c 100644 --- a/api/core/tools/provider/tool_provider.py +++ b/api/core/tools/provider/tool_provider.py @@ -1,25 +1,23 @@ from abc import ABC, abstractmethod -from typing import Any, Optional +from typing import Any -from pydantic import BaseModel +from pydantic import BaseModel, Field +from core.entities.provider_entities import ProviderConfig from core.tools.entities.tool_entities import ( - ToolParameter, - ToolProviderCredentials, ToolProviderIdentity, ToolProviderType, ) -from core.tools.errors import ToolNotFoundError, ToolParameterValidationError, ToolProviderCredentialValidationError +from core.tools.errors import ToolProviderCredentialValidationError from core.tools.tool.tool import Tool -from core.tools.utils.tool_parameter_converter import ToolParameterConverter class ToolProviderController(BaseModel, ABC): - identity: Optional[ToolProviderIdentity] = None - tools: Optional[list[Tool]] = None - credentials_schema: Optional[dict[str, ToolProviderCredentials]] = None + identity: ToolProviderIdentity + tools: list[Tool] = Field(default_factory=list) + credentials_schema: dict[str, ProviderConfig] = Field(default_factory=dict) - def get_credentials_schema(self) -> dict[str, ToolProviderCredentials]: + def get_credentials_schema(self) -> dict[str, ProviderConfig]: """ returns the credentials schema of the provider @@ -27,15 +25,6 @@ def get_credentials_schema(self) -> dict[str, ToolProviderCredentials]: """ return self.credentials_schema.copy() - @abstractmethod - def get_tools(self) -> list[Tool]: - """ - returns a list of tools that the provider can provide - - :return: list of tools - """ - pass - @abstractmethod def get_tool(self, tool_name: str) -> Tool: """ @@ -45,18 +34,6 @@ def get_tool(self, tool_name: str) -> Tool: """ pass - def get_parameters(self, tool_name: str) -> list[ToolParameter]: - """ - returns the parameters of the tool - - :param tool_name: the name of the tool, defined in `get_tools` - :return: list of parameters - """ - tool = next(filter(lambda x: x.identity.name == tool_name, self.get_tools()), None) - if tool is None: - raise ToolNotFoundError(f'tool {tool_name} not found') - return tool.parameters - @property def provider_type(self) -> ToolProviderType: """ @@ -66,66 +43,6 @@ def provider_type(self) -> ToolProviderType: """ return ToolProviderType.BUILT_IN - def validate_parameters(self, tool_id: int, tool_name: str, tool_parameters: dict[str, Any]) -> None: - """ - validate the parameters of the tool and set the default value if needed - - :param tool_name: the name of the tool, defined in `get_tools` - :param tool_parameters: the parameters of the tool - """ - tool_parameters_schema = self.get_parameters(tool_name) - - tool_parameters_need_to_validate: dict[str, ToolParameter] = {} - for parameter in tool_parameters_schema: - tool_parameters_need_to_validate[parameter.name] = parameter - - for parameter in tool_parameters: - if parameter not in tool_parameters_need_to_validate: - raise ToolParameterValidationError(f'parameter {parameter} not found in tool {tool_name}') - - # check type - parameter_schema = tool_parameters_need_to_validate[parameter] - if parameter_schema.type == ToolParameter.ToolParameterType.STRING: - if not isinstance(tool_parameters[parameter], str): - raise ToolParameterValidationError(f'parameter {parameter} should be string') - - elif parameter_schema.type == ToolParameter.ToolParameterType.NUMBER: - if not isinstance(tool_parameters[parameter], int | float): - raise ToolParameterValidationError(f'parameter {parameter} should be number') - - if parameter_schema.min is not None and tool_parameters[parameter] < parameter_schema.min: - raise ToolParameterValidationError(f'parameter {parameter} should be greater than {parameter_schema.min}') - - if parameter_schema.max is not None and tool_parameters[parameter] > parameter_schema.max: - raise ToolParameterValidationError(f'parameter {parameter} should be less than {parameter_schema.max}') - - elif parameter_schema.type == ToolParameter.ToolParameterType.BOOLEAN: - if not isinstance(tool_parameters[parameter], bool): - raise ToolParameterValidationError(f'parameter {parameter} should be boolean') - - elif parameter_schema.type == ToolParameter.ToolParameterType.SELECT: - if not isinstance(tool_parameters[parameter], str): - raise ToolParameterValidationError(f'parameter {parameter} should be string') - - options = parameter_schema.options - if not isinstance(options, list): - raise ToolParameterValidationError(f'parameter {parameter} options should be list') - - if tool_parameters[parameter] not in [x.value for x in options]: - raise ToolParameterValidationError(f'parameter {parameter} should be one of {options}') - - tool_parameters_need_to_validate.pop(parameter) - - for parameter in tool_parameters_need_to_validate: - parameter_schema = tool_parameters_need_to_validate[parameter] - if parameter_schema.required: - raise ToolParameterValidationError(f'parameter {parameter} is required') - - # the parameter is not set currently, set the default value if needed - if parameter_schema.default is not None: - tool_parameters[parameter] = ToolParameterConverter.cast_parameter_by_type(parameter_schema.default, - parameter_schema.type) - def validate_credentials_format(self, credentials: dict[str, Any]) -> None: """ validate the format of the credentials of the provider and set the default value if needed @@ -136,7 +53,7 @@ def validate_credentials_format(self, credentials: dict[str, Any]) -> None: if credentials_schema is None: return - credentials_need_to_validate: dict[str, ToolProviderCredentials] = {} + credentials_need_to_validate: dict[str, ProviderConfig] = {} for credential_name in credentials_schema: credentials_need_to_validate[credential_name] = credentials_schema[credential_name] @@ -146,12 +63,12 @@ def validate_credentials_format(self, credentials: dict[str, Any]) -> None: # check type credential_schema = credentials_need_to_validate[credential_name] - if credential_schema == ToolProviderCredentials.CredentialsType.SECRET_INPUT or \ - credential_schema == ToolProviderCredentials.CredentialsType.TEXT_INPUT: + if credential_schema == ProviderConfig.Type.SECRET_INPUT or \ + credential_schema == ProviderConfig.Type.TEXT_INPUT: if not isinstance(credentials[credential_name], str): raise ToolProviderCredentialValidationError(f'credential {credential_name} should be string') - elif credential_schema.type == ToolProviderCredentials.CredentialsType.SELECT: + elif credential_schema.type == ProviderConfig.Type.SELECT: if not isinstance(credentials[credential_name], str): raise ToolProviderCredentialValidationError(f'credential {credential_name} should be string') @@ -173,9 +90,9 @@ def validate_credentials_format(self, credentials: dict[str, Any]) -> None: if credential_schema.default is not None: default_value = credential_schema.default # parse default value into the correct type - if credential_schema.type == ToolProviderCredentials.CredentialsType.SECRET_INPUT or \ - credential_schema.type == ToolProviderCredentials.CredentialsType.TEXT_INPUT or \ - credential_schema.type == ToolProviderCredentials.CredentialsType.SELECT: + if credential_schema.type == ProviderConfig.Type.SECRET_INPUT or \ + credential_schema.type == ProviderConfig.Type.TEXT_INPUT or \ + credential_schema.type == ProviderConfig.Type.SELECT: default_value = str(default_value) credentials[credential_name] = default_value diff --git a/api/core/tools/provider/workflow_tool_provider.py b/api/core/tools/provider/workflow_tool_provider.py index f14abac76777da..a84b7a36ed9e47 100644 --- a/api/core/tools/provider/workflow_tool_provider.py +++ b/api/core/tools/provider/workflow_tool_provider.py @@ -1,5 +1,8 @@ +from collections.abc import Mapping from typing import Optional +from pydantic import Field + from core.app.app_config.entities import VariableEntity, VariableEntityType from core.app.apps.workflow.app_config_manager import WorkflowAppConfigManager from core.tools.entities.common_entities import I18nObject @@ -28,6 +31,7 @@ class WorkflowToolProviderController(ToolProviderController): provider_id: str + tools: list[WorkflowTool] = Field(default_factory=list) @classmethod def from_db(cls, db_provider: WorkflowToolProvider) -> 'WorkflowToolProviderController': @@ -71,16 +75,17 @@ def _get_db_provider_tool(self, db_provider: WorkflowToolProvider, app: App) -> :param app: the app :return: the tool """ - workflow: Workflow = db.session.query(Workflow).filter( + workflow: Workflow | None = db.session.query(Workflow).filter( Workflow.app_id == db_provider.app_id, Workflow.version == db_provider.version ).first() + if not workflow: raise ValueError('workflow not found') # fetch start node - graph: dict = workflow.graph_dict - features_dict: dict = workflow.features_dict + graph: Mapping = workflow.graph_dict + features_dict: Mapping = workflow.features_dict features = WorkflowAppConfigManager.convert_features( config_dict=features_dict, app_mode=AppMode.WORKFLOW @@ -89,7 +94,7 @@ def _get_db_provider_tool(self, db_provider: WorkflowToolProvider, app: App) -> parameters = db_provider.parameter_configurations variables = WorkflowToolConfigurationUtils.get_workflow_graph_variables(graph) - def fetch_workflow_variable(variable_name: str) -> VariableEntity: + def fetch_workflow_variable(variable_name: str) -> VariableEntity | None: return next(filter(lambda x: x.variable == variable_name, variables), None) user = db_provider.user @@ -99,7 +104,7 @@ def fetch_workflow_variable(variable_name: str) -> VariableEntity: variable = fetch_workflow_variable(parameter.name) if variable: parameter_type = None - options = None + options = [] if variable.type not in VARIABLE_TO_PARAMETER_TYPE_MAPPING: raise ValueError(f'unsupported variable type {variable.type}') parameter_type = VARIABLE_TO_PARAMETER_TYPE_MAPPING[variable.type] @@ -185,7 +190,7 @@ def fetch_workflow_variable(variable_name: str) -> VariableEntity: label=db_provider.label ) - def get_tools(self, user_id: str, tenant_id: str) -> list[WorkflowTool]: + def get_tools(self, tenant_id: str) -> list[WorkflowTool]: """ fetch tools from database @@ -196,7 +201,7 @@ def get_tools(self, user_id: str, tenant_id: str) -> list[WorkflowTool]: if self.tools is not None: return self.tools - db_providers: WorkflowToolProvider = db.session.query(WorkflowToolProvider).filter( + db_providers: WorkflowToolProvider | None = db.session.query(WorkflowToolProvider).filter( WorkflowToolProvider.tenant_id == tenant_id, WorkflowToolProvider.app_id == self.provider_id, ).first() diff --git a/api/core/tools/tool/tool.py b/api/core/tools/tool/tool.py index 68db0d5b2eaf4b..6005297118516a 100644 --- a/api/core/tools/tool/tool.py +++ b/api/core/tools/tool/tool.py @@ -55,7 +55,7 @@ def __init__(self, **data: Any): invoke_from: Optional[InvokeFrom] = None tool_invoke_from: Optional[ToolInvokeFrom] = None credentials: Optional[dict[str, Any]] = None - runtime_parameters: Optional[dict[str, Any]] = None + runtime_parameters: dict[str, Any] = Field(default_factory=dict) runtime: Optional[Runtime] = None variables: Optional[ToolRuntimeVariablePool] = None diff --git a/api/core/tools/tool_manager.py b/api/core/tools/tool_manager.py index 06b6bb9f5285af..efc28020161603 100644 --- a/api/core/tools/tool_manager.py +++ b/api/core/tools/tool_manager.py @@ -4,7 +4,7 @@ from collections.abc import Generator from os import listdir, path from threading import Lock -from typing import Any, Union +from typing import Any, Union, cast from configs import dify_config from core.agent.entities import AgentToolEntity @@ -22,6 +22,7 @@ from core.tools.tool.api_tool import ApiTool from core.tools.tool.builtin_tool import BuiltinTool from core.tools.tool.tool import Tool +from core.tools.tool.workflow_tool import WorkflowTool from core.tools.tool_label_manager import ToolLabelManager from core.tools.utils.configuration import ToolConfigurationManager, ToolParameterConfigurationManager from core.tools.utils.tool_parameter_converter import ToolParameterConverter @@ -57,7 +58,7 @@ def get_builtin_provider(cls, provider: str) -> BuiltinToolProviderController: return cls._builtin_providers[provider] @classmethod - def get_builtin_tool(cls, provider: str, tool_name: str) -> BuiltinTool: + def get_builtin_tool(cls, provider: str, tool_name: str) -> BuiltinTool | None: """ get the builtin tool @@ -78,7 +79,7 @@ def get_tool_runtime(cls, provider_type: ToolProviderType, tenant_id: str, invoke_from: InvokeFrom = InvokeFrom.DEBUGGER, tool_invoke_from: ToolInvokeFrom = ToolInvokeFrom.AGENT) \ - -> Union[BuiltinTool, ApiTool]: + -> Union[BuiltinTool, ApiTool, WorkflowTool]: """ get the tool runtime @@ -90,19 +91,21 @@ def get_tool_runtime(cls, provider_type: ToolProviderType, """ if provider_type == ToolProviderType.BUILT_IN: builtin_tool = cls.get_builtin_tool(provider_id, tool_name) + if not builtin_tool: + raise ValueError(f"tool {tool_name} not found") # check if the builtin tool need credentials provider_controller = cls.get_builtin_provider(provider_id) if not provider_controller.need_credentials: - return builtin_tool.fork_tool_runtime(runtime={ + return cast(BuiltinTool, builtin_tool.fork_tool_runtime(runtime={ 'tenant_id': tenant_id, 'credentials': {}, 'invoke_from': invoke_from, 'tool_invoke_from': tool_invoke_from, - }) + })) # get credentials - builtin_provider: BuiltinToolProvider = db.session.query(BuiltinToolProvider).filter( + builtin_provider: BuiltinToolProvider | None = db.session.query(BuiltinToolProvider).filter( BuiltinToolProvider.tenant_id == tenant_id, BuiltinToolProvider.provider == provider_id, ).first() @@ -117,13 +120,13 @@ def get_tool_runtime(cls, provider_type: ToolProviderType, decrypted_credentials = tool_configuration.decrypt_tool_credentials(credentials) - return builtin_tool.fork_tool_runtime(runtime={ + return cast(BuiltinTool, builtin_tool.fork_tool_runtime(runtime={ 'tenant_id': tenant_id, 'credentials': decrypted_credentials, 'runtime_parameters': {}, 'invoke_from': invoke_from, 'tool_invoke_from': tool_invoke_from, - }) + })) elif provider_type == ToolProviderType.API: if tenant_id is None: @@ -135,12 +138,12 @@ def get_tool_runtime(cls, provider_type: ToolProviderType, tool_configuration = ToolConfigurationManager(tenant_id=tenant_id, provider_controller=api_provider) decrypted_credentials = tool_configuration.decrypt_tool_credentials(credentials) - return api_provider.get_tool(tool_name).fork_tool_runtime(runtime={ + return cast(ApiTool, api_provider.get_tool(tool_name).fork_tool_runtime(runtime={ 'tenant_id': tenant_id, 'credentials': decrypted_credentials, 'invoke_from': invoke_from, 'tool_invoke_from': tool_invoke_from, - }) + })) elif provider_type == ToolProviderType.WORKFLOW: workflow_provider = db.session.query(WorkflowToolProvider).filter( WorkflowToolProvider.tenant_id == tenant_id, @@ -154,12 +157,12 @@ def get_tool_runtime(cls, provider_type: ToolProviderType, db_provider=workflow_provider ) - return controller.get_tools(user_id=None, tenant_id=workflow_provider.tenant_id)[0].fork_tool_runtime(runtime={ + return cast(WorkflowTool, controller.get_tools(tenant_id=workflow_provider.tenant_id)[0].fork_tool_runtime(runtime={ 'tenant_id': tenant_id, 'credentials': {}, 'invoke_from': invoke_from, 'tool_invoke_from': tool_invoke_from, - }) + })) elif provider_type == ToolProviderType.APP: raise NotImplementedError('app provider not implemented') else: @@ -220,7 +223,10 @@ def get_agent_tool_runtime(cls, tenant_id: str, app_id: str, agent_tool: AgentTo identity_id=f'AGENT.{app_id}' ) runtime_parameters = encryption_manager.decrypt_tool_parameters(runtime_parameters) - + + if not tool_entity.runtime: + raise Exception("tool missing runtime") + tool_entity.runtime.runtime_parameters.update(runtime_parameters) return tool_entity @@ -258,6 +264,9 @@ def get_workflow_tool_runtime(cls, tenant_id: str, app_id: str, node_id: str, wo if runtime_parameters: runtime_parameters = encryption_manager.decrypt_tool_parameters(runtime_parameters) + if not tool_entity.runtime: + raise Exception("tool missing runtime") + tool_entity.runtime.runtime_parameters.update(runtime_parameters) return tool_entity @@ -304,20 +313,20 @@ def _list_builtin_providers(cls) -> Generator[BuiltinToolProviderController, Non """ list all the builtin providers """ - for provider in listdir(path.join(path.dirname(path.realpath(__file__)), 'provider', 'builtin')): - if provider.startswith('__'): + for provider_path in listdir(path.join(path.dirname(path.realpath(__file__)), 'provider', 'builtin')): + if provider_path.startswith('__'): continue - if path.isdir(path.join(path.dirname(path.realpath(__file__)), 'provider', 'builtin', provider)): - if provider.startswith('__'): + if path.isdir(path.join(path.dirname(path.realpath(__file__)), 'provider', 'builtin', provider_path)): + if provider_path.startswith('__'): continue # init provider try: provider_class = load_single_subclass_from_source( - module_name=f'core.tools.provider.builtin.{provider}.{provider}', + module_name=f'core.tools.provider.builtin.{provider_path}.{provider_path}', script_path=path.join(path.dirname(path.realpath(__file__)), - 'provider', 'builtin', provider, f'{provider}.py'), + 'provider', 'builtin', provider_path, f'{provider_path}.py'), parent_type=BuiltinToolProviderController) provider: BuiltinToolProviderController = provider_class() cls._builtin_providers[provider.identity.name] = provider @@ -387,8 +396,8 @@ def user_list_providers(cls, user_id: str, tenant_id: str, typ: UserToolProvider for provider in builtin_providers: # handle include, exclude if is_filtered( - include_set=dify_config.POSITION_TOOL_INCLUDES_SET, - exclude_set=dify_config.POSITION_TOOL_EXCLUDES_SET, + include_set=dify_config.POSITION_TOOL_INCLUDES_SET, # type: ignore + exclude_set=dify_config.POSITION_TOOL_EXCLUDES_SET, # type: ignore data=provider, name_func=lambda x: x.identity.name ): @@ -461,7 +470,7 @@ def get_api_provider_controller(cls, tenant_id: str, provider_id: str) -> tuple[ :return: the provider controller, the credentials """ - provider: ApiToolProvider = db.session.query(ApiToolProvider).filter( + provider: ApiToolProvider | None = db.session.query(ApiToolProvider).filter( ApiToolProvider.id == provider_id, ApiToolProvider.tenant_id == tenant_id, ).first() @@ -486,22 +495,22 @@ def user_get_api_provider(cls, provider: str, tenant_id: str) -> dict: """ get tool provider """ - provider: ApiToolProvider = db.session.query(ApiToolProvider).filter( + provider_obj: ApiToolProvider| None = db.session.query(ApiToolProvider).filter( ApiToolProvider.tenant_id == tenant_id, ApiToolProvider.name == provider, ).first() - if provider is None: + if provider_obj is None: raise ValueError(f'you have not added provider {provider}') try: - credentials = json.loads(provider.credentials_str) or {} + credentials = json.loads(provider_obj.credentials_str) or {} except: credentials = {} # package tool provider controller controller = ApiToolProviderController.from_db( - provider, ApiProviderAuthType.API_KEY if credentials['auth_type'] == 'api_key' else ApiProviderAuthType.NONE + provider_obj, ApiProviderAuthType.API_KEY if credentials['auth_type'] == 'api_key' else ApiProviderAuthType.NONE ) # init tool configuration tool_configuration = ToolConfigurationManager(tenant_id=tenant_id, provider_controller=controller) @@ -510,7 +519,7 @@ def user_get_api_provider(cls, provider: str, tenant_id: str) -> dict: masked_credentials = tool_configuration.mask_tool_credentials(decrypted_credentials) try: - icon = json.loads(provider.icon) + icon = json.loads(provider_obj.icon) except: icon = { "background": "#252525", @@ -521,14 +530,14 @@ def user_get_api_provider(cls, provider: str, tenant_id: str) -> dict: labels = ToolLabelManager.get_tool_labels(controller) return jsonable_encoder({ - 'schema_type': provider.schema_type, - 'schema': provider.schema, - 'tools': provider.tools, + 'schema_type': provider_obj.schema_type, + 'schema': provider_obj.schema, + 'tools': provider_obj.tools, 'icon': icon, - 'description': provider.description, + 'description': provider_obj.description, 'credentials': masked_credentials, - 'privacy_policy': provider.privacy_policy, - 'custom_disclaimer': provider.custom_disclaimer, + 'privacy_policy': provider_obj.privacy_policy, + 'custom_disclaimer': provider_obj.custom_disclaimer, 'labels': labels, }) @@ -551,25 +560,29 @@ def get_tool_icon(cls, tenant_id: str, provider_type: ToolProviderType, provider + "/icon") elif provider_type == ToolProviderType.API: try: - provider: ApiToolProvider = db.session.query(ApiToolProvider).filter( + api_provider: ApiToolProvider | None = db.session.query(ApiToolProvider).filter( ApiToolProvider.tenant_id == tenant_id, ApiToolProvider.id == provider_id ).first() - return json.loads(provider.icon) + if not api_provider: + raise ValueError("api tool not found") + + return json.loads(api_provider.icon) except: return { "background": "#252525", "content": "\ud83d\ude01" } elif provider_type == ToolProviderType.WORKFLOW: - provider: WorkflowToolProvider = db.session.query(WorkflowToolProvider).filter( + workflow_provider: WorkflowToolProvider | None = db.session.query(WorkflowToolProvider).filter( WorkflowToolProvider.tenant_id == tenant_id, WorkflowToolProvider.id == provider_id ).first() - if provider is None: + + if workflow_provider is None: raise ToolProviderNotFoundError(f'workflow provider {provider_id} not found') - return json.loads(provider.icon) + return json.loads(workflow_provider.icon) else: raise ValueError(f"provider type {provider_type} not found") diff --git a/api/core/tools/utils/configuration.py b/api/core/tools/utils/configuration.py index 68b0cea24f97eb..2fc0ba3bcd3192 100644 --- a/api/core/tools/utils/configuration.py +++ b/api/core/tools/utils/configuration.py @@ -7,8 +7,8 @@ from core.helper.tool_parameter_cache import ToolParameterCache, ToolParameterCacheType from core.helper.tool_provider_cache import ToolProviderCredentialsCache, ToolProviderCredentialsCacheType from core.tools.entities.tool_entities import ( + ProviderConfig, ToolParameter, - ToolProviderCredentials, ToolProviderType, ) from core.tools.provider.tool_provider import ToolProviderController @@ -36,7 +36,7 @@ def encrypt_tool_credentials(self, credentials: dict[str, str]) -> dict[str, str # get fields need to be decrypted fields = self.provider_controller.get_credentials_schema() for field_name, field in fields.items(): - if field.type == ToolProviderCredentials.CredentialsType.SECRET_INPUT: + if field.type == ProviderConfig.Type.SECRET_INPUT: if field_name in credentials: encrypted = encrypter.encrypt_token(self.tenant_id, credentials[field_name]) credentials[field_name] = encrypted @@ -54,7 +54,7 @@ def mask_tool_credentials(self, credentials: dict[str, Any]) -> dict[str, Any]: # get fields need to be decrypted fields = self.provider_controller.get_credentials_schema() for field_name, field in fields.items(): - if field.type == ToolProviderCredentials.CredentialsType.SECRET_INPUT: + if field.type == ProviderConfig.Type.SECRET_INPUT: if field_name in credentials: if len(credentials[field_name]) > 6: credentials[field_name] = \ @@ -84,7 +84,7 @@ def decrypt_tool_credentials(self, credentials: dict[str, str]) -> dict[str, str # get fields need to be decrypted fields = self.provider_controller.get_credentials_schema() for field_name, field in fields.items(): - if field.type == ToolProviderCredentials.CredentialsType.SECRET_INPUT: + if field.type == ProviderConfig.Type.SECRET_INPUT: if field_name in credentials: try: credentials[field_name] = encrypter.decrypt_token(self.tenant_id, credentials[field_name]) diff --git a/api/core/tools/utils/workflow_configuration_sync.py b/api/core/tools/utils/workflow_configuration_sync.py index ff5505bbbfb9f5..b8237fd043e277 100644 --- a/api/core/tools/utils/workflow_configuration_sync.py +++ b/api/core/tools/utils/workflow_configuration_sync.py @@ -1,3 +1,5 @@ +from collections.abc import Mapping + from core.app.app_config.entities import VariableEntity from core.tools.entities.tool_entities import WorkflowToolParameterConfiguration @@ -13,7 +15,7 @@ def check_parameter_configurations(cls, configurations: list[dict]): raise ValueError('invalid parameter configuration') @classmethod - def get_workflow_graph_variables(cls, graph: dict) -> list[VariableEntity]: + def get_workflow_graph_variables(cls, graph: Mapping) -> list[VariableEntity]: """ get workflow graph variables """ @@ -44,5 +46,3 @@ def check_is_synced(cls, for parameter in tool_configurations: if parameter.name not in variable_names: raise ValueError('parameter configuration mismatch, please republish the tool to update') - - return True \ No newline at end of file diff --git a/api/services/tools/api_tools_manage_service.py b/api/services/tools/api_tools_manage_service.py index 3ded9c0989b53c..d7538bd812878b 100644 --- a/api/services/tools/api_tools_manage_service.py +++ b/api/services/tools/api_tools_manage_service.py @@ -10,8 +10,8 @@ from core.tools.entities.tool_entities import ( ApiProviderAuthType, ApiProviderSchemaType, + ProviderConfig, ToolCredentialsOption, - ToolProviderCredentials, ) from core.tools.provider.api_tool_provider import ApiToolProviderController from core.tools.tool_label_manager import ToolLabelManager @@ -39,9 +39,9 @@ def parser_api_schema(schema: str) -> list[ApiToolBundle]: raise ValueError(f"invalid schema: {str(e)}") credentials_schema = [ - ToolProviderCredentials( + ProviderConfig( name="auth_type", - type=ToolProviderCredentials.CredentialsType.SELECT, + type=ProviderConfig.Type.SELECT, required=True, default="none", options=[ @@ -50,17 +50,17 @@ def parser_api_schema(schema: str) -> list[ApiToolBundle]: ], placeholder=I18nObject(en_US="Select auth type", zh_Hans="选择认证方式"), ), - ToolProviderCredentials( + ProviderConfig( name="api_key_header", - type=ToolProviderCredentials.CredentialsType.TEXT_INPUT, + type=ProviderConfig.Type.TEXT_INPUT, required=False, placeholder=I18nObject(en_US="Enter api key header", zh_Hans="输入 api key header,如:X-API-KEY"), default="api_key", help=I18nObject(en_US="HTTP header name for api key", zh_Hans="HTTP 头部字段名,用于传递 api key"), ), - ToolProviderCredentials( + ProviderConfig( name="api_key_value", - type=ToolProviderCredentials.CredentialsType.TEXT_INPUT, + type=ProviderConfig.Type.TEXT_INPUT, required=False, placeholder=I18nObject(en_US="Enter api key", zh_Hans="输入 api key"), default="", diff --git a/api/services/tools/tools_transform_service.py b/api/services/tools/tools_transform_service.py index d072203fbaf387..1848fb2a1343ce 100644 --- a/api/services/tools/tools_transform_service.py +++ b/api/services/tools/tools_transform_service.py @@ -8,8 +8,8 @@ from core.tools.entities.tool_bundle import ApiToolBundle from core.tools.entities.tool_entities import ( ApiProviderAuthType, + ProviderConfig, ToolParameter, - ToolProviderCredentials, ToolProviderType, ) from core.tools.provider.api_tool_provider import ApiToolProviderController @@ -92,7 +92,7 @@ def builtin_provider_to_user_provider( # get credentials schema schema = provider_controller.get_credentials_schema() for name, value in schema.items(): - result.masked_credentials[name] = ToolProviderCredentials.CredentialsType.default(value.type) + result.masked_credentials[name] = ProviderConfig.Type.default(value.type) # check if the provider need credentials if not provider_controller.need_credentials: From cf4e9f317e207a4f1d6fef71e4329d6c74134054 Mon Sep 17 00:00:00 2001 From: Yeuoly Date: Fri, 30 Aug 2024 15:55:10 +0800 Subject: [PATCH 021/325] refactor: tool models --- .../agent_tool_callback_handler.py | 4 +- api/core/tools/tool_engine.py | 59 ++++++++++--------- api/core/workflow/nodes/tool/tool_node.py | 6 +- api/models/base.py | 5 ++ api/models/model.py | 28 +++++---- api/models/tools.py | 6 +- 6 files changed, 60 insertions(+), 48 deletions(-) create mode 100644 api/models/base.py diff --git a/api/core/callback_handler/agent_tool_callback_handler.py b/api/core/callback_handler/agent_tool_callback_handler.py index 578996574739a8..4c246b230de5a1 100644 --- a/api/core/callback_handler/agent_tool_callback_handler.py +++ b/api/core/callback_handler/agent_tool_callback_handler.py @@ -1,5 +1,5 @@ import os -from collections.abc import Mapping, Sequence +from collections.abc import Iterable, Mapping from typing import Any, Optional, TextIO, Union from pydantic import BaseModel @@ -55,7 +55,7 @@ def on_tool_end( self, tool_name: str, tool_inputs: Mapping[str, Any], - tool_outputs: Sequence[ToolInvokeMessage], + tool_outputs: Iterable[ToolInvokeMessage] | str, message_id: Optional[str] = None, timer: Optional[Any] = None, trace_manager: Optional[TraceQueueManager] = None diff --git a/api/core/tools/tool_engine.py b/api/core/tools/tool_engine.py index 9397f22494880d..0fb44704983b72 100644 --- a/api/core/tools/tool_engine.py +++ b/api/core/tools/tool_engine.py @@ -1,9 +1,9 @@ import json -from collections.abc import Generator, Mapping +from collections.abc import Generator, Iterable from copy import deepcopy from datetime import datetime, timezone from mimetypes import guess_type -from typing import Any, Optional, Union +from typing import Any, Optional, Union, cast from yarl import URL @@ -40,7 +40,7 @@ def agent_invoke( user_id: str, tenant_id: str, message: Message, invoke_from: InvokeFrom, agent_tool_callback: DifyAgentCallbackHandler, trace_manager: Optional[TraceQueueManager] = None - ) -> tuple[str, list[tuple[MessageFile, bool]], ToolInvokeMeta]: + ) -> tuple[str, list[tuple[MessageFile, str]], ToolInvokeMeta]: """ Agent invokes the tool with the given arguments. """ @@ -67,9 +67,9 @@ def agent_invoke( ) messages = ToolEngine._invoke(tool, tool_parameters, user_id) - invocation_meta_dict = {'meta': None} + invocation_meta_dict: dict[str, ToolInvokeMeta] = {} - def message_callback(invocation_meta_dict: dict, messages: Generator[ToolInvokeMessage, None, None]): + def message_callback(invocation_meta_dict: dict, messages: Generator[ToolInvokeMessage | ToolInvokeMeta, None, None]): for message in messages: if isinstance(message, ToolInvokeMeta): invocation_meta_dict['meta'] = message @@ -136,7 +136,7 @@ def message_callback(invocation_meta_dict: dict, messages: Generator[ToolInvokeM return error_response, [], ToolInvokeMeta.error_instance(error_response) @staticmethod - def workflow_invoke(tool: Tool, tool_parameters: Mapping[str, Any], + def workflow_invoke(tool: Tool, tool_parameters: dict[str, Any], user_id: str, workflow_tool_callback: DifyWorkflowCallbackHandler, workflow_call_depth: int, @@ -156,6 +156,7 @@ def workflow_invoke(tool: Tool, tool_parameters: Mapping[str, Any], if tool.runtime and tool.runtime.runtime_parameters: tool_parameters = {**tool.runtime.runtime_parameters, **tool_parameters} + response = tool.invoke(user_id=user_id, tool_parameters=tool_parameters) # hit the callback handler @@ -204,6 +205,9 @@ def _invoke(tool: Tool, tool_parameters: dict, user_id: str) \ """ Invoke the tool with the given arguments. """ + if not tool.runtime: + raise ValueError("missing runtime in tool") + started_at = datetime.now(timezone.utc) meta = ToolInvokeMeta(time_cost=0.0, error=None, tool_config={ 'tool_name': tool.identity.name, @@ -223,42 +227,42 @@ def _invoke(tool: Tool, tool_parameters: dict, user_id: str) \ yield meta @staticmethod - def _convert_tool_response_to_str(tool_response: list[ToolInvokeMessage]) -> str: + def _convert_tool_response_to_str(tool_response: Generator[ToolInvokeMessage, None, None]) -> str: """ Handle tool response """ result = '' for response in tool_response: if response.type == ToolInvokeMessage.MessageType.TEXT: - result += response.message + result += cast(ToolInvokeMessage.TextMessage, response.message).text elif response.type == ToolInvokeMessage.MessageType.LINK: - result += f"result link: {response.message}. please tell user to check it." + result += f"result link: {cast(ToolInvokeMessage.TextMessage, response.message).text}. please tell user to check it." elif response.type == ToolInvokeMessage.MessageType.IMAGE_LINK or \ response.type == ToolInvokeMessage.MessageType.IMAGE: result += "image has been created and sent to user already, you do not need to create it, just tell the user to check it now." elif response.type == ToolInvokeMessage.MessageType.JSON: - result += f"tool response: {json.dumps(response.message, ensure_ascii=False)}." + result += f"tool response: {json.dumps(cast(ToolInvokeMessage.JsonMessage, response.message).json_object, ensure_ascii=False)}." else: result += f"tool response: {response.message}." return result @staticmethod - def _extract_tool_response_binary(tool_response: list[ToolInvokeMessage]) -> list[ToolInvokeMessageBinary]: + def _extract_tool_response_binary(tool_response: Generator[ToolInvokeMessage, None, None]) -> Generator[ToolInvokeMessageBinary, None, None]: """ Extract tool response binary """ - result = [] - for response in tool_response: if response.type == ToolInvokeMessage.MessageType.IMAGE_LINK or \ response.type == ToolInvokeMessage.MessageType.IMAGE: mimetype = None + if not response.meta: + raise ValueError("missing meta data") if response.meta.get('mime_type'): mimetype = response.meta.get('mime_type') else: try: - url = URL(response.message) + url = URL(cast(ToolInvokeMessage.TextMessage, response.message).text) extension = url.suffix guess_type_result, _ = guess_type(f'a{extension}') if guess_type_result: @@ -269,35 +273,36 @@ def _extract_tool_response_binary(tool_response: list[ToolInvokeMessage]) -> lis if not mimetype: mimetype = 'image/jpeg' - result.append(ToolInvokeMessageBinary( + yield ToolInvokeMessageBinary( mimetype=response.meta.get('mime_type', 'image/jpeg'), - url=response.message, + url=cast(ToolInvokeMessage.TextMessage, response.message).text, save_as=response.save_as, - )) + ) elif response.type == ToolInvokeMessage.MessageType.BLOB: - result.append(ToolInvokeMessageBinary( + if not response.meta: + raise ValueError("missing meta data") + + yield ToolInvokeMessageBinary( mimetype=response.meta.get('mime_type', 'octet/stream'), - url=response.message, + url=cast(ToolInvokeMessage.TextMessage, response.message).text, save_as=response.save_as, - )) + ) elif response.type == ToolInvokeMessage.MessageType.LINK: # check if there is a mime type in meta if response.meta and 'mime_type' in response.meta: - result.append(ToolInvokeMessageBinary( + yield ToolInvokeMessageBinary( mimetype=response.meta.get('mime_type', 'octet/stream') if response.meta else 'octet/stream', - url=response.message, + url=cast(ToolInvokeMessage.TextMessage, response.message).text, save_as=response.save_as, - )) - - return result + ) @staticmethod def _create_message_files( - tool_messages: list[ToolInvokeMessageBinary], + tool_messages: Iterable[ToolInvokeMessageBinary], agent_message: Message, invoke_from: InvokeFrom, user_id: str - ) -> list[tuple[Any, str]]: + ) -> list[tuple[MessageFile, str]]: """ Create message file diff --git a/api/core/workflow/nodes/tool/tool_node.py b/api/core/workflow/nodes/tool/tool_node.py index 47498f4f5fde1f..6ba7e7e09b01c4 100644 --- a/api/core/workflow/nodes/tool/tool_node.py +++ b/api/core/workflow/nodes/tool/tool_node.py @@ -1,4 +1,4 @@ -from collections.abc import Generator, Mapping, Sequence +from collections.abc import Generator, Sequence from os import path from typing import Any, cast @@ -100,7 +100,7 @@ def _generate_parameters( variable_pool: VariablePool, node_data: ToolNodeData, for_log: bool = False, - ) -> Mapping[str, Any]: + ) -> dict[str, Any]: """ Generate parameters based on the given tool parameters, variable pool, and node data. @@ -110,7 +110,7 @@ def _generate_parameters( node_data (ToolNodeData): The data associated with the tool node. Returns: - Mapping[str, Any]: A dictionary containing the generated parameters. + dict[str, Any]: A dictionary containing the generated parameters. """ tool_parameters_dictionary = {parameter.name: parameter for parameter in tool_parameters} diff --git a/api/models/base.py b/api/models/base.py new file mode 100644 index 00000000000000..1c2dcc40b94f8a --- /dev/null +++ b/api/models/base.py @@ -0,0 +1,5 @@ +from sqlalchemy.orm import DeclarativeBase + + +class Base(DeclarativeBase): + pass \ No newline at end of file diff --git a/api/models/model.py b/api/models/model.py index e2d1fcfc237a4d..298bfbda120e90 100644 --- a/api/models/model.py +++ b/api/models/model.py @@ -14,6 +14,7 @@ from core.file.upload_file_parser import UploadFileParser from extensions.ext_database import db from libs.helper import generate_string +from models.base import Base from .account import Account, Tenant from .types import StringUUID @@ -211,7 +212,7 @@ def tags(self): return tags if tags else [] -class AppModelConfig(db.Model): +class AppModelConfig(Base): __tablename__ = 'app_model_configs' __table_args__ = ( db.PrimaryKeyConstraint('id', name='app_model_config_pkey'), @@ -550,6 +551,9 @@ def model_config(self): else: app_model_config = db.session.query(AppModelConfig).filter( AppModelConfig.id == self.app_model_config_id).first() + + if not app_model_config: + raise ValueError("app config not found") model_config = app_model_config.to_dict() @@ -640,7 +644,7 @@ def in_debug_mode(self): return self.override_model_configs is not None -class Message(db.Model): +class Message(Base): __tablename__ = 'messages' __table_args__ = ( db.PrimaryKeyConstraint('id', name='message_pkey'), @@ -932,7 +936,7 @@ def from_account(self): return account -class MessageFile(db.Model): +class MessageFile(Base): __tablename__ = 'message_files' __table_args__ = ( db.PrimaryKeyConstraint('id', name='message_file_pkey'), @@ -940,15 +944,15 @@ class MessageFile(db.Model): db.Index('message_file_created_by_idx', 'created_by') ) - id = db.Column(StringUUID, server_default=db.text('uuid_generate_v4()')) - message_id = db.Column(StringUUID, nullable=False) - type = db.Column(db.String(255), nullable=False) - transfer_method = db.Column(db.String(255), nullable=False) - url = db.Column(db.Text, nullable=True) - belongs_to = db.Column(db.String(255), nullable=True) - upload_file_id = db.Column(StringUUID, nullable=True) - created_by_role = db.Column(db.String(255), nullable=False) - created_by = db.Column(StringUUID, nullable=False) + id: Mapped[str] = mapped_column(StringUUID, default=db.text('uuid_generate_v4()')) + message_id: Mapped[str] = mapped_column(StringUUID, nullable=False) + type: Mapped[str] = mapped_column(db.String(255), nullable=False) + transfer_method: Mapped[str] = mapped_column(db.String(255), nullable=False) + url: Mapped[str] = mapped_column(db.Text, nullable=True) + belongs_to: Mapped[str] = mapped_column(db.String(255), nullable=True) + upload_file_id: Mapped[str] = mapped_column(StringUUID, nullable=True) + created_by_role: Mapped[str] = mapped_column(db.String(255), nullable=False) + created_by: Mapped[str] = mapped_column(StringUUID, nullable=False) created_at = db.Column(db.DateTime, nullable=False, server_default=db.text('CURRENT_TIMESTAMP(0)')) diff --git a/api/models/tools.py b/api/models/tools.py index 937481583ab799..1e7421622aae08 100644 --- a/api/models/tools.py +++ b/api/models/tools.py @@ -1,12 +1,13 @@ import json from sqlalchemy import ForeignKey -from sqlalchemy.orm import DeclarativeBase, Mapped, mapped_column +from sqlalchemy.orm import Mapped, mapped_column from core.tools.entities.common_entities import I18nObject from core.tools.entities.tool_bundle import ApiToolBundle from core.tools.entities.tool_entities import ApiProviderSchemaType, WorkflowToolParameterConfiguration from extensions.ext_database import db +from models.base import Base from .model import Account, App, Tenant from .types import StringUUID @@ -277,9 +278,6 @@ class ToolConversationVariables(db.Model): @property def variables(self) -> dict: return json.loads(self.variables_str) - -class Base(DeclarativeBase): - pass class ToolFile(Base): """ From 886a1601152b5fd887f6e5777efd0bd37b88f84c Mon Sep 17 00:00:00 2001 From: Yeuoly Date: Fri, 30 Aug 2024 18:11:38 +0800 Subject: [PATCH 022/325] fix: invoke tool streamingly --- api/core/entities/provider_entities.py | 4 +- api/core/helper/tool_provider_cache.py | 1 + api/core/tools/entities/api_entities.py | 11 ++-- api/core/tools/entities/tool_entities.py | 4 +- api/core/tools/provider/api_tool_provider.py | 25 +++++---- .../tools/provider/builtin_tool_provider.py | 2 +- api/core/tools/provider/tool_provider.py | 4 +- api/core/tools/tool/tool.py | 13 ++++- api/core/tools/tool_manager.py | 21 ++++++-- api/core/tools/utils/configuration.py | 24 +++++---- api/core/tools/utils/parser.py | 11 ++-- api/core/workflow/nodes/tool/tool_node.py | 17 +++--- api/models/model.py | 24 ++++----- api/models/tools.py | 6 +-- .../tools/api_tools_manage_service.py | 52 ++++++++++++------- api/services/tools/tools_transform_service.py | 22 +++++--- 16 files changed, 149 insertions(+), 92 deletions(-) diff --git a/api/core/entities/provider_entities.py b/api/core/entities/provider_entities.py index ae78d9ecf93b27..e0d1de151f143d 100644 --- a/api/core/entities/provider_entities.py +++ b/api/core/entities/provider_entities.py @@ -4,8 +4,8 @@ from pydantic import BaseModel, ConfigDict, Field from core.entities.parameter_entities import AppSelectorScope, CommonParameterType, ModelConfigScope -from core.model_runtime.entities.common_entities import I18nObject from core.model_runtime.entities.model_entities import ModelType +from core.tools.entities.common_entities import I18nObject from models.provider import ProviderQuotaType @@ -143,7 +143,7 @@ class Option(BaseModel): value: str = Field(..., description="The value of the option") label: I18nObject = Field(..., description="The label of the option") - scope: AppSelectorScope | ModelConfigScope | None + scope: AppSelectorScope | ModelConfigScope | None = None required: bool = False default: Optional[Union[int, str]] = None options: Optional[list[Option]] = None diff --git a/api/core/helper/tool_provider_cache.py b/api/core/helper/tool_provider_cache.py index 6c5d3b8fb6880c..2777367963bdae 100644 --- a/api/core/helper/tool_provider_cache.py +++ b/api/core/helper/tool_provider_cache.py @@ -8,6 +8,7 @@ class ToolProviderCredentialsCacheType(Enum): PROVIDER = "tool_provider" + ENDPOINT = "endpoint" class ToolProviderCredentialsCache: def __init__(self, tenant_id: str, identity_id: str, cache_type: ToolProviderCredentialsCacheType): diff --git a/api/core/tools/entities/api_entities.py b/api/core/tools/entities/api_entities.py index 71db8d8b2dfc77..2aaca3506009b2 100644 --- a/api/core/tools/entities/api_entities.py +++ b/api/core/tools/entities/api_entities.py @@ -1,10 +1,11 @@ from typing import Literal, Optional -from pydantic import BaseModel +from pydantic import BaseModel, Field +from core.entities.provider_entities import ProviderConfig from core.model_runtime.utils.encoders import jsonable_encoder from core.tools.entities.common_entities import I18nObject -from core.tools.entities.tool_entities import ProviderConfig, ToolProviderType +from core.tools.entities.tool_entities import ToolProviderType from core.tools.tool.tool import ToolParameter @@ -14,7 +15,7 @@ class UserTool(BaseModel): label: I18nObject # label description: I18nObject parameters: Optional[list[ToolParameter]] = None - labels: list[str] = None + labels: list[str] = Field(default_factory=list) UserToolProviderTypeLiteral = Optional[Literal[ 'builtin', 'api', 'workflow' @@ -32,8 +33,8 @@ class UserToolProvider(BaseModel): original_credentials: Optional[dict] = None is_team_authorization: bool = False allow_delete: bool = True - tools: list[UserTool] = None - labels: list[str] = None + tools: list[UserTool] = Field(default_factory=list) + labels: list[str] = Field(default_factory=list) def to_dict(self) -> dict: # ------------- diff --git a/api/core/tools/entities/tool_entities.py b/api/core/tools/entities/tool_entities.py index 98efb92a0d9d73..4b0961fb0999c9 100644 --- a/api/core/tools/entities/tool_entities.py +++ b/api/core/tools/entities/tool_entities.py @@ -25,7 +25,7 @@ class ToolLabelEnum(Enum): UTILITIES = 'utilities' OTHER = 'other' -class ToolProviderType(Enum): +class ToolProviderType(str, Enum): """ Enum class for tool provider """ @@ -181,7 +181,7 @@ def get_simple_instance(cls, if options: option_objs = [ToolParameterOption(value=option, label=I18nObject(en_US=option, zh_Hans=option)) for option in options] else: - option_objs = None + option_objs = [] return cls( name=name, label=I18nObject(en_US='', zh_Hans=''), diff --git a/api/core/tools/provider/api_tool_provider.py b/api/core/tools/provider/api_tool_provider.py index fc7fcb675a7ad8..880ddc4955b917 100644 --- a/api/core/tools/provider/api_tool_provider.py +++ b/api/core/tools/provider/api_tool_provider.py @@ -1,21 +1,23 @@ +from pydantic import Field + +from core.entities.provider_entities import ProviderConfig from core.tools.entities.common_entities import I18nObject from core.tools.entities.tool_bundle import ApiToolBundle from core.tools.entities.tool_entities import ( ApiProviderAuthType, - ProviderConfig, - ToolCredentialsOption, ToolProviderType, ) from core.tools.provider.tool_provider import ToolProviderController from core.tools.tool.api_tool import ApiTool -from core.tools.tool.tool import Tool from extensions.ext_database import db from models.tools import ApiToolProvider class ApiToolProviderController(ToolProviderController): provider_id: str + tenant_id: str + tools: list[ApiTool] = Field(default_factory=list) @staticmethod def from_db(db_provider: ApiToolProvider, auth_type: ApiProviderAuthType) -> 'ApiToolProviderController': @@ -25,8 +27,8 @@ def from_db(db_provider: ApiToolProvider, auth_type: ApiProviderAuthType) -> 'Ap required=True, type=ProviderConfig.Type.SELECT, options=[ - ToolCredentialsOption(value='none', label=I18nObject(en_US='None', zh_Hans='无')), - ToolCredentialsOption(value='api_key', label=I18nObject(en_US='api_key', zh_Hans='api_key')) + ProviderConfig.Option(value='none', label=I18nObject(en_US='None', zh_Hans='无')), + ProviderConfig.Option(value='api_key', label=I18nObject(en_US='api_key', zh_Hans='api_key')) ], default='none', help=I18nObject( @@ -67,9 +69,9 @@ def from_db(db_provider: ApiToolProvider, auth_type: ApiProviderAuthType) -> 'Ap zh_Hans='api key header 的前缀' ), options=[ - ToolCredentialsOption(value='basic', label=I18nObject(en_US='Basic', zh_Hans='Basic')), - ToolCredentialsOption(value='bearer', label=I18nObject(en_US='Bearer', zh_Hans='Bearer')), - ToolCredentialsOption(value='custom', label=I18nObject(en_US='Custom', zh_Hans='Custom')) + ProviderConfig.Option(value='basic', label=I18nObject(en_US='Basic', zh_Hans='Basic')), + ProviderConfig.Option(value='bearer', label=I18nObject(en_US='Bearer', zh_Hans='Bearer')), + ProviderConfig.Option(value='custom', label=I18nObject(en_US='Custom', zh_Hans='Custom')) ] ) } @@ -96,6 +98,7 @@ def from_db(db_provider: ApiToolProvider, auth_type: ApiProviderAuthType) -> 'Ap }, 'credentials_schema': credentials_schema, 'provider_id': db_provider.id or '', + 'tenant_id': db_provider.tenant_id or '', }) @property @@ -142,7 +145,7 @@ def load_bundled_tools(self, tools: list[ApiToolBundle]) -> list[ApiTool]: return self.tools - def get_tools(self, user_id: str, tenant_id: str) -> list[ApiTool]: + def get_tools(self, tenant_id: str) -> list[ApiTool]: """ fetch tools from database @@ -153,7 +156,7 @@ def get_tools(self, user_id: str, tenant_id: str) -> list[ApiTool]: if self.tools is not None: return self.tools - tools: list[Tool] = [] + tools: list[ApiTool] = [] # get tenant api providers db_providers: list[ApiToolProvider] = db.session.query(ApiToolProvider).filter( @@ -179,7 +182,7 @@ def get_tool(self, tool_name: str) -> ApiTool: :return: the tool """ if self.tools is None: - self.get_tools() + self.get_tools(self.tenant_id) for tool in self.tools: if tool.identity.name == tool_name: diff --git a/api/core/tools/provider/builtin_tool_provider.py b/api/core/tools/provider/builtin_tool_provider.py index 7ad8a5468b131d..8dd543b00a539e 100644 --- a/api/core/tools/provider/builtin_tool_provider.py +++ b/api/core/tools/provider/builtin_tool_provider.py @@ -39,7 +39,7 @@ def __init__(self, **data: Any) -> None: super().__init__(**{ 'identity': provider_yaml['identity'], - 'credentials_schema': provider_yaml.get('credentials_for_provider', None), + 'credentials_schema': provider_yaml.get('credentials_for_provider', {}) or {}, }) def _get_builtin_tools(self) -> list[BuiltinTool]: diff --git a/api/core/tools/provider/tool_provider.py b/api/core/tools/provider/tool_provider.py index ac770a2a60125c..057f3060ed0f34 100644 --- a/api/core/tools/provider/tool_provider.py +++ b/api/core/tools/provider/tool_provider.py @@ -1,7 +1,7 @@ from abc import ABC, abstractmethod from typing import Any -from pydantic import BaseModel, Field +from pydantic import BaseModel, ConfigDict, Field from core.entities.provider_entities import ProviderConfig from core.tools.entities.tool_entities import ( @@ -17,6 +17,8 @@ class ToolProviderController(BaseModel, ABC): tools: list[Tool] = Field(default_factory=list) credentials_schema: dict[str, ProviderConfig] = Field(default_factory=dict) + model_config = ConfigDict(validate_assignment=True) + def get_credentials_schema(self) -> dict[str, ProviderConfig]: """ returns the credentials schema of the provider diff --git a/api/core/tools/tool/tool.py b/api/core/tools/tool/tool.py index 6005297118516a..6f21afdb357fd5 100644 --- a/api/core/tools/tool/tool.py +++ b/api/core/tools/tool/tool.py @@ -206,7 +206,16 @@ def invoke(self, user_id: str, tool_parameters: dict[str, Any]) -> Generator[Too tool_parameters=tool_parameters, ) - return result + if isinstance(result, ToolInvokeMessage): + def single_generator(): + yield result + return single_generator() + elif isinstance(result, list): + def generator(): + yield from result + return generator() + else: + return result def _transform_tool_parameters_type(self, tool_parameters: dict[str, Any]) -> dict[str, Any]: """ @@ -223,7 +232,7 @@ def _transform_tool_parameters_type(self, tool_parameters: dict[str, Any]) -> di return result @abstractmethod - def _invoke(self, user_id: str, tool_parameters: dict[str, Any]) -> Generator[ToolInvokeMessage, None, None]: + def _invoke(self, user_id: str, tool_parameters: dict[str, Any]) -> ToolInvokeMessage | list[ToolInvokeMessage] | Generator[ToolInvokeMessage, None, None]: pass def validate_credentials(self, credentials: dict[str, Any], parameters: dict[str, Any]) -> None: diff --git a/api/core/tools/tool_manager.py b/api/core/tools/tool_manager.py index efc28020161603..56e97252f9478a 100644 --- a/api/core/tools/tool_manager.py +++ b/api/core/tools/tool_manager.py @@ -116,7 +116,12 @@ def get_tool_runtime(cls, provider_type: ToolProviderType, # decrypt the credentials credentials = builtin_provider.credentials controller = cls.get_builtin_provider(provider_id) - tool_configuration = ToolConfigurationManager(tenant_id=tenant_id, provider_controller=controller) + tool_configuration = ToolConfigurationManager( + tenant_id=tenant_id, + config=controller.get_credentials_schema(), + provider_type=controller.provider_type.value, + provider_identity=controller.identity.name + ) decrypted_credentials = tool_configuration.decrypt_tool_credentials(credentials) @@ -135,7 +140,12 @@ def get_tool_runtime(cls, provider_type: ToolProviderType, api_provider, credentials = cls.get_api_provider_controller(tenant_id, provider_id) # decrypt the credentials - tool_configuration = ToolConfigurationManager(tenant_id=tenant_id, provider_controller=api_provider) + tool_configuration = ToolConfigurationManager( + tenant_id=tenant_id, + config=api_provider.get_credentials_schema(), + provider_type=api_provider.provider_type.value, + provider_identity=api_provider.identity.name + ) decrypted_credentials = tool_configuration.decrypt_tool_credentials(credentials) return cast(ApiTool, api_provider.get_tool(tool_name).fork_tool_runtime(runtime={ @@ -513,7 +523,12 @@ def user_get_api_provider(cls, provider: str, tenant_id: str) -> dict: provider_obj, ApiProviderAuthType.API_KEY if credentials['auth_type'] == 'api_key' else ApiProviderAuthType.NONE ) # init tool configuration - tool_configuration = ToolConfigurationManager(tenant_id=tenant_id, provider_controller=controller) + tool_configuration = ToolConfigurationManager( + tenant_id=tenant_id, + config=controller.get_credentials_schema(), + provider_type=controller.provider_type.value, + provider_identity=controller.identity.name + ) decrypted_credentials = tool_configuration.decrypt_tool_credentials(credentials) masked_credentials = tool_configuration.mask_tool_credentials(decrypted_credentials) diff --git a/api/core/tools/utils/configuration.py b/api/core/tools/utils/configuration.py index 2fc0ba3bcd3192..5b65ce443f0fcc 100644 --- a/api/core/tools/utils/configuration.py +++ b/api/core/tools/utils/configuration.py @@ -1,23 +1,25 @@ +from collections.abc import Mapping from copy import deepcopy from typing import Any from pydantic import BaseModel +from core.entities.provider_entities import BasicProviderConfig from core.helper import encrypter from core.helper.tool_parameter_cache import ToolParameterCache, ToolParameterCacheType from core.helper.tool_provider_cache import ToolProviderCredentialsCache, ToolProviderCredentialsCacheType from core.tools.entities.tool_entities import ( - ProviderConfig, ToolParameter, ToolProviderType, ) -from core.tools.provider.tool_provider import ToolProviderController from core.tools.tool.tool import Tool class ToolConfigurationManager(BaseModel): tenant_id: str - provider_controller: ToolProviderController + config: Mapping[str, BasicProviderConfig] + provider_type: str + provider_identity: str def _deep_copy(self, credentials: dict[str, str]) -> dict[str, str]: """ @@ -34,9 +36,9 @@ def encrypt_tool_credentials(self, credentials: dict[str, str]) -> dict[str, str credentials = self._deep_copy(credentials) # get fields need to be decrypted - fields = self.provider_controller.get_credentials_schema() + fields = self.config for field_name, field in fields.items(): - if field.type == ProviderConfig.Type.SECRET_INPUT: + if field.type == BasicProviderConfig.Type.SECRET_INPUT: if field_name in credentials: encrypted = encrypter.encrypt_token(self.tenant_id, credentials[field_name]) credentials[field_name] = encrypted @@ -52,9 +54,9 @@ def mask_tool_credentials(self, credentials: dict[str, Any]) -> dict[str, Any]: credentials = self._deep_copy(credentials) # get fields need to be decrypted - fields = self.provider_controller.get_credentials_schema() + fields = self.config for field_name, field in fields.items(): - if field.type == ProviderConfig.Type.SECRET_INPUT: + if field.type == BasicProviderConfig.Type.SECRET_INPUT: if field_name in credentials: if len(credentials[field_name]) > 6: credentials[field_name] = \ @@ -74,7 +76,7 @@ def decrypt_tool_credentials(self, credentials: dict[str, str]) -> dict[str, str """ cache = ToolProviderCredentialsCache( tenant_id=self.tenant_id, - identity_id=f'{self.provider_controller.provider_type.value}.{self.provider_controller.identity.name}', + identity_id=f'{self.provider_type}.{self.provider_identity}', cache_type=ToolProviderCredentialsCacheType.PROVIDER ) cached_credentials = cache.get() @@ -82,9 +84,9 @@ def decrypt_tool_credentials(self, credentials: dict[str, str]) -> dict[str, str return cached_credentials credentials = self._deep_copy(credentials) # get fields need to be decrypted - fields = self.provider_controller.get_credentials_schema() + fields = self.config for field_name, field in fields.items(): - if field.type == ProviderConfig.Type.SECRET_INPUT: + if field.type == BasicProviderConfig.Type.SECRET_INPUT: if field_name in credentials: try: credentials[field_name] = encrypter.decrypt_token(self.tenant_id, credentials[field_name]) @@ -97,7 +99,7 @@ def decrypt_tool_credentials(self, credentials: dict[str, str]) -> dict[str, str def delete_tool_credentials_cache(self): cache = ToolProviderCredentialsCache( tenant_id=self.tenant_id, - identity_id=f'{self.provider_controller.provider_type.value}.{self.provider_controller.identity.name}', + identity_id=f'{self.provider_type}.{self.provider_identity}', cache_type=ToolProviderCredentialsCacheType.PROVIDER ) cache.delete() diff --git a/api/core/tools/utils/parser.py b/api/core/tools/utils/parser.py index f711f7c9f3c2e8..882e276afe37a7 100644 --- a/api/core/tools/utils/parser.py +++ b/api/core/tools/utils/parser.py @@ -16,7 +16,7 @@ class ApiBasedToolSchemaParser: @staticmethod - def parse_openapi_to_tool_bundle(openapi: dict, extra_info: dict = None, warning: dict = None) -> list[ApiToolBundle]: + def parse_openapi_to_tool_bundle(openapi: dict, extra_info: dict | None = None, warning: dict | None = None) -> list[ApiToolBundle]: warning = warning if warning is not None else {} extra_info = extra_info if extra_info is not None else {} @@ -173,7 +173,7 @@ def _get_tool_parameter_type(parameter: dict) -> ToolParameter.ToolParameterType return ToolParameter.ToolParameterType.STRING @staticmethod - def parse_openapi_yaml_to_tool_bundle(yaml: str, extra_info: dict = None, warning: dict = None) -> list[ApiToolBundle]: + def parse_openapi_yaml_to_tool_bundle(yaml: str, extra_info: dict | None = None, warning: dict | None = None) -> list[ApiToolBundle]: """ parse openapi yaml to tool bundle @@ -189,7 +189,8 @@ def parse_openapi_yaml_to_tool_bundle(yaml: str, extra_info: dict = None, warnin return ApiBasedToolSchemaParser.parse_openapi_to_tool_bundle(openapi, extra_info=extra_info, warning=warning) @staticmethod - def parse_swagger_to_openapi(swagger: dict, extra_info: dict = None, warning: dict = None) -> dict: + def parse_swagger_to_openapi(swagger: dict, extra_info: dict | None = None, warning: dict | None = None) -> dict: + warning = warning or {} """ parse swagger to openapi @@ -255,7 +256,7 @@ def parse_swagger_to_openapi(swagger: dict, extra_info: dict = None, warning: di return openapi @staticmethod - def parse_openai_plugin_json_to_tool_bundle(json: str, extra_info: dict = None, warning: dict = None) -> list[ApiToolBundle]: + def parse_openai_plugin_json_to_tool_bundle(json: str, extra_info: dict | None = None, warning: dict | None = None) -> list[ApiToolBundle]: """ parse openapi plugin yaml to tool bundle @@ -287,7 +288,7 @@ def parse_openai_plugin_json_to_tool_bundle(json: str, extra_info: dict = None, return ApiBasedToolSchemaParser.parse_openapi_yaml_to_tool_bundle(response.text, extra_info=extra_info, warning=warning) @staticmethod - def auto_parse_to_tool_bundle(content: str, extra_info: dict = None, warning: dict = None) -> tuple[list[ApiToolBundle], str]: + def auto_parse_to_tool_bundle(content: str, extra_info: dict | None = None, warning: dict | None = None) -> tuple[list[ApiToolBundle], str]: """ auto parse to tool bundle diff --git a/api/core/workflow/nodes/tool/tool_node.py b/api/core/workflow/nodes/tool/tool_node.py index 6ba7e7e09b01c4..2d01bad1f468ac 100644 --- a/api/core/workflow/nodes/tool/tool_node.py +++ b/api/core/workflow/nodes/tool/tool_node.py @@ -1,6 +1,6 @@ from collections.abc import Generator, Sequence from os import path -from typing import Any, cast +from typing import Any, Iterable, cast from core.app.segments import ArrayAnySegment, ArrayAnyVariable, parser from core.callback_handler.workflow_tool_callback_handler import DifyWorkflowCallbackHandler @@ -158,14 +158,17 @@ def _convert_tool_messages(self, messages: Generator[ToolInvokeMessage, None, No tenant_id=self.tenant_id, conversation_id=None, ) + + result = list(messages) + # extract plain text and files - files = self._extract_tool_response_binary(messages) - plain_text = self._extract_tool_response_text(messages) - json = self._extract_tool_response_json(messages) + files = self._extract_tool_response_binary(result) + plain_text = self._extract_tool_response_text(result) + json = self._extract_tool_response_json(result) return plain_text, files, json - def _extract_tool_response_binary(self, tool_response: Generator[ToolInvokeMessage, None, None]) -> list[FileVar]: + def _extract_tool_response_binary(self, tool_response: Iterable[ToolInvokeMessage]) -> list[FileVar]: """ Extract tool response binary """ @@ -215,7 +218,7 @@ def _extract_tool_response_binary(self, tool_response: Generator[ToolInvokeMessa return result - def _extract_tool_response_text(self, tool_response: Generator[ToolInvokeMessage]) -> str: + def _extract_tool_response_text(self, tool_response: Iterable[ToolInvokeMessage]) -> str: """ Extract tool response text """ @@ -230,7 +233,7 @@ def _extract_tool_response_text(self, tool_response: Generator[ToolInvokeMessage return '\n'.join(result) - def _extract_tool_response_json(self, tool_response: Generator[ToolInvokeMessage]) -> list[dict]: + def _extract_tool_response_json(self, tool_response: Iterable[ToolInvokeMessage]) -> list[dict]: result: list[dict] = [] for message in tool_response: if message.type == ToolInvokeMessage.MessageType.JSON: diff --git a/api/models/model.py b/api/models/model.py index 298bfbda120e90..74ba4a7fd56ba7 100644 --- a/api/models/model.py +++ b/api/models/model.py @@ -7,7 +7,7 @@ from flask import request from flask_login import UserMixin from sqlalchemy import Float, func, text -from sqlalchemy.orm import Mapped, mapped_column +from sqlalchemy.orm import Mapped, mapped_column, relationship from configs import dify_config from core.file.tool_file_parser import ToolFileParser @@ -495,14 +495,14 @@ def tenant(self): return tenant -class Conversation(db.Model): +class Conversation(Base): __tablename__ = 'conversations' __table_args__ = ( db.PrimaryKeyConstraint('id', name='conversation_pkey'), db.Index('conversation_app_from_user_idx', 'app_id', 'from_source', 'from_end_user_id') ) - id = db.Column(StringUUID, server_default=db.text('uuid_generate_v4()')) + id: Mapped[str] = mapped_column(StringUUID, server_default=db.text('uuid_generate_v4()')) app_id = db.Column(StringUUID, nullable=False) app_model_config_id = db.Column(StringUUID, nullable=True) model_provider = db.Column(db.String(255), nullable=True) @@ -526,8 +526,8 @@ class Conversation(db.Model): created_at = db.Column(db.DateTime, nullable=False, server_default=db.text('CURRENT_TIMESTAMP(0)')) updated_at = db.Column(db.DateTime, nullable=False, server_default=db.text('CURRENT_TIMESTAMP(0)')) - messages = db.relationship("Message", backref="conversation", lazy='select', passive_deletes="all") - message_annotations = db.relationship("MessageAnnotation", backref="conversation", lazy='select', passive_deletes="all") + messages: Mapped[list["Message"]] = relationship("Message", backref="conversation", lazy='select', passive_deletes="all") + message_annotations: Mapped[list["MessageAnnotation"]] = relationship("MessageAnnotation", backref="conversation", lazy='select', passive_deletes="all") is_deleted = db.Column(db.Boolean, nullable=False, server_default=db.text('false')) @@ -660,10 +660,10 @@ class Message(Base): model_provider = db.Column(db.String(255), nullable=True) model_id = db.Column(db.String(255), nullable=True) override_model_configs = db.Column(db.Text) - conversation_id = db.Column(StringUUID, db.ForeignKey('conversations.id'), nullable=False) - inputs = db.Column(db.JSON) - query = db.Column(db.Text, nullable=False) - message = db.Column(db.JSON, nullable=False) + conversation_id: Mapped[str] = mapped_column(StringUUID, db.ForeignKey('conversations.id'), nullable=False) + inputs: Mapped[str] = mapped_column(db.JSON) + query: Mapped[str] = mapped_column(db.Text, nullable=False) + message: Mapped[str] = mapped_column(db.JSON, nullable=False) message_tokens = db.Column(db.Integer, nullable=False, server_default=db.text('0')) message_unit_price = db.Column(db.Numeric(10, 4), nullable=False) message_price_unit = db.Column(db.Numeric(10, 7), nullable=False, server_default=db.text('0.001')) @@ -944,7 +944,7 @@ class MessageFile(Base): db.Index('message_file_created_by_idx', 'created_by') ) - id: Mapped[str] = mapped_column(StringUUID, default=db.text('uuid_generate_v4()')) + id: Mapped[str] = mapped_column(StringUUID, server_default=db.text('uuid_generate_v4()')) message_id: Mapped[str] = mapped_column(StringUUID, nullable=False) type: Mapped[str] = mapped_column(db.String(255), nullable=False) transfer_method: Mapped[str] = mapped_column(db.String(255), nullable=False) @@ -956,7 +956,7 @@ class MessageFile(Base): created_at = db.Column(db.DateTime, nullable=False, server_default=db.text('CURRENT_TIMESTAMP(0)')) -class MessageAnnotation(db.Model): +class MessageAnnotation(Base): __tablename__ = 'message_annotations' __table_args__ = ( db.PrimaryKeyConstraint('id', name='message_annotation_pkey'), @@ -967,7 +967,7 @@ class MessageAnnotation(db.Model): id = db.Column(StringUUID, server_default=db.text('uuid_generate_v4()')) app_id = db.Column(StringUUID, nullable=False) - conversation_id = db.Column(StringUUID, db.ForeignKey('conversations.id'), nullable=True) + conversation_id: Mapped[str] = mapped_column(StringUUID, db.ForeignKey('conversations.id'), nullable=True) message_id = db.Column(StringUUID, nullable=True) question = db.Column(db.Text, nullable=True) content = db.Column(db.Text, nullable=False) diff --git a/api/models/tools.py b/api/models/tools.py index 1e7421622aae08..a87dfad0797934 100644 --- a/api/models/tools.py +++ b/api/models/tools.py @@ -77,10 +77,10 @@ def description_i18n(self) -> I18nObject: return I18nObject(**json.loads(self.description)) @property - def app(self) -> App: + def app(self) -> App | None: return db.session.query(App).filter(App.id == self.app_id).first() -class ApiToolProvider(db.Model): +class ApiToolProvider(Base): """ The table stores the api providers. """ @@ -290,7 +290,7 @@ class ToolFile(Base): db.Index('tool_file_conversation_id_idx', 'conversation_id'), ) - id: Mapped[str] = mapped_column(StringUUID, default=db.text('uuid_generate_v4()')) + id: Mapped[str] = mapped_column(StringUUID, server_default=db.text('uuid_generate_v4()')) # conversation user id user_id: Mapped[str] = mapped_column(StringUUID) # tenant id diff --git a/api/services/tools/api_tools_manage_service.py b/api/services/tools/api_tools_manage_service.py index d7538bd812878b..bfb9827ce268b9 100644 --- a/api/services/tools/api_tools_manage_service.py +++ b/api/services/tools/api_tools_manage_service.py @@ -3,6 +3,7 @@ from httpx import get +from core.entities.provider_entities import ProviderConfig from core.model_runtime.utils.encoders import jsonable_encoder from core.tools.entities.api_entities import UserTool, UserToolProvider from core.tools.entities.common_entities import I18nObject @@ -10,8 +11,6 @@ from core.tools.entities.tool_entities import ( ApiProviderAuthType, ApiProviderSchemaType, - ProviderConfig, - ToolCredentialsOption, ) from core.tools.provider.api_tool_provider import ApiToolProviderController from core.tools.tool_label_manager import ToolLabelManager @@ -45,8 +44,8 @@ def parser_api_schema(schema: str) -> list[ApiToolBundle]: required=True, default="none", options=[ - ToolCredentialsOption(value="none", label=I18nObject(en_US="None", zh_Hans="无")), - ToolCredentialsOption(value="api_key", label=I18nObject(en_US="Api Key", zh_Hans="Api Key")), + ProviderConfig.Option(value="none", label=I18nObject(en_US="None", zh_Hans="无")), + ProviderConfig.Option(value="api_key", label=I18nObject(en_US="Api Key", zh_Hans="Api Key")), ], placeholder=I18nObject(en_US="Select auth type", zh_Hans="选择认证方式"), ), @@ -79,15 +78,14 @@ def parser_api_schema(schema: str) -> list[ApiToolBundle]: raise ValueError(f"invalid schema: {str(e)}") @staticmethod - def convert_schema_to_tool_bundles(schema: str, extra_info: dict = None) -> list[ApiToolBundle]: + def convert_schema_to_tool_bundles(schema: str, extra_info: dict | None = None) -> tuple[list[ApiToolBundle], str]: """ convert schema to tool bundles :return: the list of tool bundles, description """ try: - tool_bundles = ApiBasedToolSchemaParser.auto_parse_to_tool_bundle(schema, extra_info=extra_info) - return tool_bundles + return ApiBasedToolSchemaParser.auto_parse_to_tool_bundle(schema, extra_info=extra_info) except Exception as e: raise ValueError(f"invalid schema: {str(e)}") @@ -111,7 +109,7 @@ def create_api_tool_provider( raise ValueError(f"invalid schema type {schema}") # check if the provider exists - provider: ApiToolProvider = ( + provider: ApiToolProvider | None = ( db.session.query(ApiToolProvider) .filter( ApiToolProvider.tenant_id == tenant_id, @@ -158,7 +156,13 @@ def create_api_tool_provider( provider_controller.load_bundled_tools(tool_bundles) # encrypt credentials - tool_configuration = ToolConfigurationManager(tenant_id=tenant_id, provider_controller=provider_controller) + tool_configuration = ToolConfigurationManager( + tenant_id=tenant_id, + config=provider_controller.get_credentials_schema(), + provider_type=provider_controller.provider_type.value, + provider_identity=provider_controller.identity.name + ) + encrypted_credentials = tool_configuration.encrypt_tool_credentials(credentials) db_provider.credentials_str = json.dumps(encrypted_credentials) @@ -195,21 +199,21 @@ def get_api_tool_provider_remote_schema(user_id: str, tenant_id: str, url: str): return {"schema": schema} @staticmethod - def list_api_tool_provider_tools(user_id: str, tenant_id: str, provider: str) -> list[UserTool]: + def list_api_tool_provider_tools(user_id: str, tenant_id: str, provider_name: str) -> list[UserTool]: """ list api tool provider tools """ - provider: ApiToolProvider = ( + provider: ApiToolProvider | None = ( db.session.query(ApiToolProvider) .filter( ApiToolProvider.tenant_id == tenant_id, - ApiToolProvider.name == provider, + ApiToolProvider.name == provider_name, ) .first() ) if provider is None: - raise ValueError(f"you have not added provider {provider}") + raise ValueError(f"you have not added provider {provider_name}") controller = ToolTransformService.api_provider_to_controller(db_provider=provider) labels = ToolLabelManager.get_tool_labels(controller) @@ -243,7 +247,7 @@ def update_api_tool_provider( raise ValueError(f"invalid schema type {schema}") # check if the provider exists - provider: ApiToolProvider = ( + provider: ApiToolProvider | None = ( db.session.query(ApiToolProvider) .filter( ApiToolProvider.tenant_id == tenant_id, @@ -282,7 +286,12 @@ def update_api_tool_provider( provider_controller.load_bundled_tools(tool_bundles) # get original credentials if exists - tool_configuration = ToolConfigurationManager(tenant_id=tenant_id, provider_controller=provider_controller) + tool_configuration = ToolConfigurationManager( + tenant_id=tenant_id, + config=provider_controller.get_credentials_schema(), + provider_type=provider_controller.provider_type.value, + provider_identity=provider_controller.identity.name + ) original_credentials = tool_configuration.decrypt_tool_credentials(provider.credentials) masked_credentials = tool_configuration.mask_tool_credentials(original_credentials) @@ -310,7 +319,7 @@ def delete_api_tool_provider(user_id: str, tenant_id: str, provider_name: str): """ delete tool provider """ - provider: ApiToolProvider = ( + provider: ApiToolProvider | None = ( db.session.query(ApiToolProvider) .filter( ApiToolProvider.tenant_id == tenant_id, @@ -360,7 +369,7 @@ def test_api_tool_preview( if tool_bundle is None: raise ValueError(f"invalid tool name {tool_name}") - db_provider: ApiToolProvider = ( + db_provider: ApiToolProvider | None = ( db.session.query(ApiToolProvider) .filter( ApiToolProvider.tenant_id == tenant_id, @@ -396,7 +405,12 @@ def test_api_tool_preview( # decrypt credentials if db_provider.id: - tool_configuration = ToolConfigurationManager(tenant_id=tenant_id, provider_controller=provider_controller) + tool_configuration = ToolConfigurationManager( + tenant_id=tenant_id, + config=provider_controller.get_credentials_schema(), + provider_type=provider_controller.provider_type.value, + provider_identity=provider_controller.identity.name + ) decrypted_credentials = tool_configuration.decrypt_tool_credentials(credentials) # check if the credential has changed, save the original credential masked_credentials = tool_configuration.mask_tool_credentials(decrypted_credentials) @@ -444,7 +458,7 @@ def list_api_tools(user_id: str, tenant_id: str) -> list[UserToolProvider]: # add icon ToolTransformService.repack_provider(user_provider) - tools = provider_controller.get_tools(user_id=user_id, tenant_id=tenant_id) + tools = provider_controller.get_tools(tenant_id=tenant_id) for tool in tools: user_provider.tools.append( diff --git a/api/services/tools/tools_transform_service.py b/api/services/tools/tools_transform_service.py index 1848fb2a1343ce..513a4219660b75 100644 --- a/api/services/tools/tools_transform_service.py +++ b/api/services/tools/tools_transform_service.py @@ -3,12 +3,12 @@ from typing import Optional, Union from configs import dify_config +from core.entities.provider_entities import ProviderConfig from core.tools.entities.api_entities import UserTool, UserToolProvider from core.tools.entities.common_entities import I18nObject from core.tools.entities.tool_bundle import ApiToolBundle from core.tools.entities.tool_entities import ( ApiProviderAuthType, - ProviderConfig, ToolParameter, ToolProviderType, ) @@ -106,7 +106,10 @@ def builtin_provider_to_user_provider( # init tool configuration tool_configuration = ToolConfigurationManager( - tenant_id=db_provider.tenant_id, provider_controller=provider_controller + tenant_id=db_provider.tenant_id, + config=provider_controller.get_credentials_schema(), + provider_type=provider_controller.provider_type.value, + provider_identity=provider_controller.identity.name ) # decrypt the credentials and mask the credentials decrypted_credentials = tool_configuration.decrypt_tool_credentials(credentials=credentials) @@ -143,7 +146,7 @@ def workflow_provider_to_controller(db_provider: WorkflowToolProvider) -> Workfl @staticmethod def workflow_provider_to_user_provider( - provider_controller: WorkflowToolProviderController, labels: list[str] = None + provider_controller: WorkflowToolProviderController, labels: list[str] | None = None ): """ convert provider controller to user provider @@ -174,7 +177,7 @@ def api_provider_to_user_provider( provider_controller: ApiToolProviderController, db_provider: ApiToolProvider, decrypt_credentials: bool = True, - labels: list[str] = None, + labels: list[str] | None = None, ) -> UserToolProvider: """ convert provider controller to user provider @@ -209,7 +212,10 @@ def api_provider_to_user_provider( if decrypt_credentials: # init tool configuration tool_configuration = ToolConfigurationManager( - tenant_id=db_provider.tenant_id, provider_controller=provider_controller + tenant_id=db_provider.tenant_id, + config=provider_controller.get_credentials_schema(), + provider_type=provider_controller.provider_type.value, + provider_identity=provider_controller.identity.name ) # decrypt the credentials and mask the credentials @@ -223,9 +229,9 @@ def api_provider_to_user_provider( @staticmethod def tool_to_user_tool( tool: Union[ApiToolBundle, WorkflowTool, Tool], - credentials: dict = None, - tenant_id: str = None, - labels: list[str] = None, + credentials: dict | None = None, + tenant_id: str | None = None, + labels: list[str] | None = None, ) -> UserTool: """ convert tool to user tool From 46ba16fe901bd826d5de7d1a2c85056d99e67bf1 Mon Sep 17 00:00:00 2001 From: Yeuoly Date: Fri, 30 Aug 2024 18:21:03 +0800 Subject: [PATCH 023/325] fix: reformatter --- api/core/workflow/nodes/tool/tool_node.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/api/core/workflow/nodes/tool/tool_node.py b/api/core/workflow/nodes/tool/tool_node.py index 2d01bad1f468ac..ba0f67e1e2bed0 100644 --- a/api/core/workflow/nodes/tool/tool_node.py +++ b/api/core/workflow/nodes/tool/tool_node.py @@ -1,6 +1,6 @@ -from collections.abc import Generator, Sequence +from collections.abc import Generator, Iterable, Sequence from os import path -from typing import Any, Iterable, cast +from typing import Any, cast from core.app.segments import ArrayAnySegment, ArrayAnyVariable, parser from core.callback_handler.workflow_tool_callback_handler import DifyWorkflowCallbackHandler From 279dee485db13767da34d1f0d552eb8b388541fb Mon Sep 17 00:00:00 2001 From: Yeuoly Date: Fri, 30 Aug 2024 21:10:19 +0800 Subject: [PATCH 024/325] feat: type --- api/core/entities/provider_entities.py | 4 - api/core/tools/entities/api_entities.py | 2 +- .../tools/provider/workflow_tool_provider.py | 6 +- api/models/tools.py | 108 ++++++------------ .../tools/builtin_tools_manage_service.py | 49 +++++--- api/services/tools/tools_transform_service.py | 24 ++-- .../tools/workflow_tools_manage_service.py | 75 ++++++------ 7 files changed, 121 insertions(+), 147 deletions(-) diff --git a/api/core/entities/provider_entities.py b/api/core/entities/provider_entities.py index e0d1de151f143d..6c616d1aadfa8d 100644 --- a/api/core/entities/provider_entities.py +++ b/api/core/entities/provider_entities.py @@ -128,10 +128,6 @@ def value_of(cls, value: str) -> "ProviderConfig.Type": return mode raise ValueError(f'invalid mode value {value}') - @staticmethod - def default(value: str) -> str: - return "" - type: Type = Field(..., description="The type of the credentials") name: str = Field(..., description="The name of the credentials") diff --git a/api/core/tools/entities/api_entities.py b/api/core/tools/entities/api_entities.py index 2aaca3506009b2..3f0e7285af1667 100644 --- a/api/core/tools/entities/api_entities.py +++ b/api/core/tools/entities/api_entities.py @@ -26,7 +26,7 @@ class UserToolProvider(BaseModel): author: str name: str # identifier description: I18nObject - icon: str + icon: str | dict label: I18nObject # label type: ToolProviderType masked_credentials: Optional[dict] = None diff --git a/api/core/tools/provider/workflow_tool_provider.py b/api/core/tools/provider/workflow_tool_provider.py index a84b7a36ed9e47..19bf3d9a86adf4 100644 --- a/api/core/tools/provider/workflow_tool_provider.py +++ b/api/core/tools/provider/workflow_tool_provider.py @@ -208,8 +208,12 @@ def get_tools(self, tenant_id: str) -> list[WorkflowTool]: if not db_providers: return [] + + app = db_providers.app + if not app: + raise ValueError("can not read app of workflow") - self.tools = [self._get_db_provider_tool(db_providers, db_providers.app)] + self.tools = [self._get_db_provider_tool(db_providers, app)] return self.tools diff --git a/api/models/tools.py b/api/models/tools.py index a87dfad0797934..47b89bfe0afef3 100644 --- a/api/models/tools.py +++ b/api/models/tools.py @@ -1,4 +1,5 @@ import json +from datetime import datetime from sqlalchemy import ForeignKey from sqlalchemy.orm import Mapped, mapped_column @@ -13,7 +14,7 @@ from .types import StringUUID -class BuiltinToolProvider(db.Model): +class BuiltinToolProvider(Base): """ This table stores the tool provider information for built-in tools for each tenant. """ @@ -25,61 +26,22 @@ class BuiltinToolProvider(db.Model): ) # id of the tool provider - id = db.Column(StringUUID, server_default=db.text('uuid_generate_v4()')) + id: Mapped[str] = mapped_column(StringUUID, server_default=db.text('uuid_generate_v4()')) # id of the tenant - tenant_id = db.Column(StringUUID, nullable=True) + tenant_id: Mapped[str] = mapped_column(StringUUID, nullable=True) # who created this tool provider - user_id = db.Column(StringUUID, nullable=False) + user_id: Mapped[str] = mapped_column(StringUUID, nullable=False) # name of the tool provider - provider = db.Column(db.String(40), nullable=False) + provider: Mapped[str] = mapped_column(db.String(40), nullable=False) # credential of the tool provider - encrypted_credentials = db.Column(db.Text, nullable=True) - created_at = db.Column(db.DateTime, nullable=False, server_default=db.text('CURRENT_TIMESTAMP(0)')) - updated_at = db.Column(db.DateTime, nullable=False, server_default=db.text('CURRENT_TIMESTAMP(0)')) + encrypted_credentials: Mapped[str] = mapped_column(db.Text, nullable=True) + created_at: Mapped[datetime] = mapped_column(db.DateTime, nullable=False, server_default=db.text('CURRENT_TIMESTAMP(0)')) + updated_at: Mapped[datetime] = mapped_column(db.DateTime, nullable=False, server_default=db.text('CURRENT_TIMESTAMP(0)')) @property def credentials(self) -> dict: return json.loads(self.encrypted_credentials) -class PublishedAppTool(db.Model): - """ - The table stores the apps published as a tool for each person. - """ - __tablename__ = 'tool_published_apps' - __table_args__ = ( - db.PrimaryKeyConstraint('id', name='published_app_tool_pkey'), - db.UniqueConstraint('app_id', 'user_id', name='unique_published_app_tool') - ) - - # id of the tool provider - id = db.Column(StringUUID, server_default=db.text('uuid_generate_v4()')) - # id of the app - app_id = db.Column(StringUUID, ForeignKey('apps.id'), nullable=False) - # who published this tool - user_id = db.Column(StringUUID, nullable=False) - # description of the tool, stored in i18n format, for human - description = db.Column(db.Text, nullable=False) - # llm_description of the tool, for LLM - llm_description = db.Column(db.Text, nullable=False) - # query description, query will be seem as a parameter of the tool, to describe this parameter to llm, we need this field - query_description = db.Column(db.Text, nullable=False) - # query name, the name of the query parameter - query_name = db.Column(db.String(40), nullable=False) - # name of the tool provider - tool_name = db.Column(db.String(40), nullable=False) - # author - author = db.Column(db.String(40), nullable=False) - created_at = db.Column(db.DateTime, nullable=False, server_default=db.text('CURRENT_TIMESTAMP(0)')) - updated_at = db.Column(db.DateTime, nullable=False, server_default=db.text('CURRENT_TIMESTAMP(0)')) - - @property - def description_i18n(self) -> I18nObject: - return I18nObject(**json.loads(self.description)) - - @property - def app(self) -> App | None: - return db.session.query(App).filter(App.id == self.app_id).first() - class ApiToolProvider(Base): """ The table stores the api providers. @@ -129,14 +91,14 @@ def credentials(self) -> dict: return json.loads(self.credentials_str) @property - def user(self) -> Account: + def user(self) -> Account | None: return db.session.query(Account).filter(Account.id == self.user_id).first() @property - def tenant(self) -> Tenant: + def tenant(self) -> Tenant | None: return db.session.query(Tenant).filter(Tenant.id == self.tenant_id).first() -class ToolLabelBinding(db.Model): +class ToolLabelBinding(Base): """ The table stores the labels for tools. """ @@ -146,15 +108,15 @@ class ToolLabelBinding(db.Model): db.UniqueConstraint('tool_id', 'label_name', name='unique_tool_label_bind'), ) - id = db.Column(StringUUID, server_default=db.text('uuid_generate_v4()')) + id: Mapped[str] = mapped_column(StringUUID, server_default=db.text('uuid_generate_v4()')) # tool id - tool_id = db.Column(db.String(64), nullable=False) + tool_id: Mapped[str] = mapped_column(db.String(64), nullable=False) # tool type - tool_type = db.Column(db.String(40), nullable=False) + tool_type: Mapped[str] = mapped_column(db.String(40), nullable=False) # label name - label_name = db.Column(db.String(40), nullable=False) + label_name: Mapped[str] = mapped_column(db.String(40), nullable=False) -class WorkflowToolProvider(db.Model): +class WorkflowToolProvider(Base): """ The table stores the workflow providers. """ @@ -165,41 +127,37 @@ class WorkflowToolProvider(db.Model): db.UniqueConstraint('tenant_id', 'app_id', name='unique_workflow_tool_provider_app_id'), ) - id = db.Column(StringUUID, server_default=db.text('uuid_generate_v4()')) + id: Mapped[str] = mapped_column(StringUUID, server_default=db.text('uuid_generate_v4()')) # name of the workflow provider - name = db.Column(db.String(40), nullable=False) + name: Mapped[str] = mapped_column(db.String(40), nullable=False) # label of the workflow provider - label = db.Column(db.String(255), nullable=False, server_default='') + label: Mapped[str] = mapped_column(db.String(255), nullable=False, server_default='') # icon - icon = db.Column(db.String(255), nullable=False) + icon: Mapped[str] = mapped_column(db.String(255), nullable=False) # app id of the workflow provider - app_id = db.Column(StringUUID, nullable=False) + app_id: Mapped[str] = mapped_column(StringUUID, nullable=False) # version of the workflow provider - version = db.Column(db.String(255), nullable=False, server_default='') + version: Mapped[str] = mapped_column(db.String(255), nullable=False, server_default='') # who created this tool - user_id = db.Column(StringUUID, nullable=False) + user_id: Mapped[str] = mapped_column(StringUUID, nullable=False) # tenant id - tenant_id = db.Column(StringUUID, nullable=False) + tenant_id: Mapped[str] = mapped_column(StringUUID, nullable=False) # description of the provider - description = db.Column(db.Text, nullable=False) + description: Mapped[str] = mapped_column(db.Text, nullable=False) # parameter configuration - parameter_configuration = db.Column(db.Text, nullable=False, server_default='[]') + parameter_configuration: Mapped[str] = mapped_column(db.Text, nullable=False, server_default='[]') # privacy policy - privacy_policy = db.Column(db.String(255), nullable=True, server_default='') + privacy_policy: Mapped[str] = mapped_column(db.String(255), nullable=True, server_default='') - created_at = db.Column(db.DateTime, nullable=False, server_default=db.text('CURRENT_TIMESTAMP(0)')) - updated_at = db.Column(db.DateTime, nullable=False, server_default=db.text('CURRENT_TIMESTAMP(0)')) + created_at: Mapped[datetime] = mapped_column(db.DateTime, nullable=False, server_default=db.text('CURRENT_TIMESTAMP(0)')) + updated_at: Mapped[datetime] = mapped_column(db.DateTime, nullable=False, server_default=db.text('CURRENT_TIMESTAMP(0)')) @property - def schema_type(self) -> ApiProviderSchemaType: - return ApiProviderSchemaType.value_of(self.schema_type_str) - - @property - def user(self) -> Account: + def user(self) -> Account | None: return db.session.query(Account).filter(Account.id == self.user_id).first() @property - def tenant(self) -> Tenant: + def tenant(self) -> Tenant | None: return db.session.query(Tenant).filter(Tenant.id == self.tenant_id).first() @property @@ -210,7 +168,7 @@ def parameter_configurations(self) -> list[WorkflowToolParameterConfiguration]: ] @property - def app(self) -> App: + def app(self) -> App | None: return db.session.query(App).filter(App.id == self.app_id).first() class ToolModelInvoke(db.Model): diff --git a/api/services/tools/builtin_tools_manage_service.py b/api/services/tools/builtin_tools_manage_service.py index dc8cebb587a2d8..8bc93e7ea3be0f 100644 --- a/api/services/tools/builtin_tools_manage_service.py +++ b/api/services/tools/builtin_tools_manage_service.py @@ -28,10 +28,13 @@ def list_builtin_tool_provider_tools(user_id: str, tenant_id: str, provider: str tools = provider_controller.get_tools() tool_provider_configurations = ToolConfigurationManager( - tenant_id=tenant_id, provider_controller=provider_controller + tenant_id=tenant_id, + config=provider_controller.get_credentials_schema(), + provider_type=provider_controller.provider_type.value, + provider_identity=provider_controller.identity.name, ) # check if user has added the provider - builtin_provider: BuiltinToolProvider = ( + builtin_provider: BuiltinToolProvider | None = ( db.session.query(BuiltinToolProvider) .filter( BuiltinToolProvider.tenant_id == tenant_id, @@ -75,7 +78,7 @@ def update_builtin_tool_provider(user_id: str, tenant_id: str, provider_name: st update builtin tool provider """ # get if the provider exists - provider: BuiltinToolProvider = ( + provider: BuiltinToolProvider | None = ( db.session.query(BuiltinToolProvider) .filter( BuiltinToolProvider.tenant_id == tenant_id, @@ -89,7 +92,13 @@ def update_builtin_tool_provider(user_id: str, tenant_id: str, provider_name: st provider_controller = ToolManager.get_builtin_provider(provider_name) if not provider_controller.need_credentials: raise ValueError(f"provider {provider_name} does not need credentials") - tool_configuration = ToolConfigurationManager(tenant_id=tenant_id, provider_controller=provider_controller) + tool_configuration = ToolConfigurationManager( + tenant_id=tenant_id, + config=provider_controller.get_credentials_schema(), + provider_type=provider_controller.provider_type.value, + provider_identity=provider_controller.identity.name, + ) + # get original credentials if exists if provider is not None: original_credentials = tool_configuration.decrypt_tool_credentials(provider.credentials) @@ -132,7 +141,7 @@ def get_builtin_tool_provider_credentials(user_id: str, tenant_id: str, provider """ get builtin tool provider credentials """ - provider: BuiltinToolProvider = ( + provider_obj: BuiltinToolProvider | None = ( db.session.query(BuiltinToolProvider) .filter( BuiltinToolProvider.tenant_id == tenant_id, @@ -141,12 +150,17 @@ def get_builtin_tool_provider_credentials(user_id: str, tenant_id: str, provider .first() ) - if provider is None: + if provider_obj is None: return {} - provider_controller = ToolManager.get_builtin_provider(provider.provider) - tool_configuration = ToolConfigurationManager(tenant_id=tenant_id, provider_controller=provider_controller) - credentials = tool_configuration.decrypt_tool_credentials(provider.credentials) + provider_controller = ToolManager.get_builtin_provider(provider_obj.provider) + tool_configuration = ToolConfigurationManager( + tenant_id=tenant_id, + config=provider_controller.get_credentials_schema(), + provider_type=provider_controller.provider_type.value, + provider_identity=provider_controller.identity.name, + ) + credentials = tool_configuration.decrypt_tool_credentials(provider_obj.credentials) credentials = tool_configuration.mask_tool_credentials(credentials) return credentials @@ -155,7 +169,7 @@ def delete_builtin_tool_provider(user_id: str, tenant_id: str, provider_name: st """ delete tool provider """ - provider: BuiltinToolProvider = ( + provider_obj: BuiltinToolProvider | None = ( db.session.query(BuiltinToolProvider) .filter( BuiltinToolProvider.tenant_id == tenant_id, @@ -164,15 +178,20 @@ def delete_builtin_tool_provider(user_id: str, tenant_id: str, provider_name: st .first() ) - if provider is None: + if provider_obj is None: raise ValueError(f"you have not added provider {provider_name}") - db.session.delete(provider) + db.session.delete(provider_obj) db.session.commit() # delete cache provider_controller = ToolManager.get_builtin_provider(provider_name) - tool_configuration = ToolConfigurationManager(tenant_id=tenant_id, provider_controller=provider_controller) + tool_configuration = ToolConfigurationManager( + tenant_id=tenant_id, + config=provider_controller.get_credentials_schema(), + provider_type=provider_controller.provider_type.value, + provider_identity=provider_controller.identity.name, + ) tool_configuration.delete_tool_credentials_cache() return {"result": "success"} @@ -212,8 +231,8 @@ def list_builtin_tools(user_id: str, tenant_id: str) -> list[UserToolProvider]: try: # handle include, exclude if is_filtered( - include_set=dify_config.POSITION_TOOL_INCLUDES_SET, - exclude_set=dify_config.POSITION_TOOL_EXCLUDES_SET, + include_set=dify_config.POSITION_TOOL_INCLUDES_SET, # type: ignore + exclude_set=dify_config.POSITION_TOOL_EXCLUDES_SET, # type: ignore data=provider_controller, name_func=lambda x: x.identity.name, ): diff --git a/api/services/tools/tools_transform_service.py b/api/services/tools/tools_transform_service.py index 513a4219660b75..8b4076a418a102 100644 --- a/api/services/tools/tools_transform_service.py +++ b/api/services/tools/tools_transform_service.py @@ -1,6 +1,6 @@ import json import logging -from typing import Optional, Union +from typing import Literal, Optional, Union, overload from configs import dify_config from core.entities.provider_entities import ProviderConfig @@ -25,7 +25,7 @@ class ToolTransformService: @classmethod - def get_tool_provider_icon_url(cls, provider_type: str, provider_name: str, icon: str) -> Union[str, dict]: + def get_tool_provider_icon_url(cls, provider_type: str, provider_name: str, icon: str | dict) -> Union[str, dict]: """ get tool provider icon url """ @@ -35,7 +35,9 @@ def get_tool_provider_icon_url(cls, provider_type: str, provider_name: str, icon return url_prefix + "builtin/" + provider_name + "/icon" elif provider_type in [ToolProviderType.API.value, ToolProviderType.WORKFLOW.value]: try: - return json.loads(icon) + if isinstance(icon, str): + return json.loads(icon) + return icon except: return {"background": "#252525", "content": "\ud83d\ude01"} @@ -92,7 +94,8 @@ def builtin_provider_to_user_provider( # get credentials schema schema = provider_controller.get_credentials_schema() for name, value in schema.items(): - result.masked_credentials[name] = ProviderConfig.Type.default(value.type) + if result.masked_credentials: + result.masked_credentials[name] = "" # check if the provider need credentials if not provider_controller.need_credentials: @@ -184,9 +187,14 @@ def api_provider_to_user_provider( """ username = "Anonymous" try: - username = db_provider.user.name + user = db_provider.user + if not user: + raise ValueError("user not found") + + username = user.name except Exception as e: logger.error(f"failed to get user name for api provider {db_provider.id}: {str(e)}") + # add provider into providers credentials = db_provider.credentials result = UserToolProvider( @@ -266,9 +274,9 @@ def tool_to_user_tool( author=tool.identity.author, name=tool.identity.name, label=tool.identity.label, - description=tool.description.human, + description=tool.description.human if tool.description else I18nObject(en_US=''), parameters=current_parameters, - labels=labels, + labels=labels or [], ) if isinstance(tool, ApiToolBundle): return UserTool( @@ -277,5 +285,5 @@ def tool_to_user_tool( label=I18nObject(en_US=tool.operation_id, zh_Hans=tool.operation_id), description=I18nObject(en_US=tool.summary or "", zh_Hans=tool.summary or ""), parameters=tool.parameters, - labels=labels, + labels=labels or [], ) diff --git a/api/services/tools/workflow_tools_manage_service.py b/api/services/tools/workflow_tools_manage_service.py index 3830e75339eebc..9586e743d08a2d 100644 --- a/api/services/tools/workflow_tools_manage_service.py +++ b/api/services/tools/workflow_tools_manage_service.py @@ -4,7 +4,7 @@ from sqlalchemy import or_ from core.model_runtime.utils.encoders import jsonable_encoder -from core.tools.entities.api_entities import UserToolProvider +from core.tools.entities.api_entities import UserTool, UserToolProvider from core.tools.provider.workflow_tool_provider import WorkflowToolProviderController from core.tools.tool_label_manager import ToolLabelManager from core.tools.utils.workflow_configuration_sync import WorkflowToolConfigurationUtils @@ -32,7 +32,7 @@ def create_workflow_tool( description: str, parameters: list[dict], privacy_policy: str = "", - labels: list[str] = None, + labels: list[str] | None = None, ) -> dict: """ Create a workflow tool. @@ -62,12 +62,12 @@ def create_workflow_tool( if existing_workflow_tool_provider is not None: raise ValueError(f"Tool with name {name} or app_id {workflow_app_id} already exists") - app: App = db.session.query(App).filter(App.id == workflow_app_id, App.tenant_id == tenant_id).first() + app: App | None = db.session.query(App).filter(App.id == workflow_app_id, App.tenant_id == tenant_id).first() if app is None: raise ValueError(f"App {workflow_app_id} not found") - workflow: Workflow = app.workflow + workflow: Workflow | None = app.workflow if workflow is None: raise ValueError(f"Workflow not found for app {workflow_app_id}") @@ -106,7 +106,7 @@ def update_workflow_tool( description: str, parameters: list[dict], privacy_policy: str = "", - labels: list[str] = None, + labels: list[str] | None = None, ) -> dict: """ Update a workflow tool. @@ -138,7 +138,7 @@ def update_workflow_tool( if existing_workflow_tool_provider is not None: raise ValueError(f"Tool with name {name} already exists") - workflow_tool_provider: WorkflowToolProvider = ( + workflow_tool_provider: WorkflowToolProvider | None = ( db.session.query(WorkflowToolProvider) .filter(WorkflowToolProvider.tenant_id == tenant_id, WorkflowToolProvider.id == workflow_tool_id) .first() @@ -147,14 +147,14 @@ def update_workflow_tool( if workflow_tool_provider is None: raise ValueError(f"Tool {workflow_tool_id} not found") - app: App = ( + app: App | None = ( db.session.query(App).filter(App.id == workflow_tool_provider.app_id, App.tenant_id == tenant_id).first() ) if app is None: raise ValueError(f"App {workflow_tool_provider.app_id} not found") - workflow: Workflow = app.workflow + workflow: Workflow | None = app.workflow if workflow is None: raise ValueError(f"Workflow not found for app {workflow_tool_provider.app_id}") @@ -243,36 +243,12 @@ def get_workflow_tool_by_tool_id(cls, user_id: str, tenant_id: str, workflow_too :param workflow_app_id: the workflow app id :return: the tool """ - db_tool: WorkflowToolProvider = ( + db_tool: WorkflowToolProvider | None = ( db.session.query(WorkflowToolProvider) .filter(WorkflowToolProvider.tenant_id == tenant_id, WorkflowToolProvider.id == workflow_tool_id) .first() ) - - if db_tool is None: - raise ValueError(f"Tool {workflow_tool_id} not found") - - workflow_app: App = db.session.query(App).filter(App.id == db_tool.app_id, App.tenant_id == tenant_id).first() - - if workflow_app is None: - raise ValueError(f"App {db_tool.app_id} not found") - - tool = ToolTransformService.workflow_provider_to_controller(db_tool) - - return { - "name": db_tool.name, - "label": db_tool.label, - "workflow_tool_id": db_tool.id, - "workflow_app_id": db_tool.app_id, - "icon": json.loads(db_tool.icon), - "description": db_tool.description, - "parameters": jsonable_encoder(db_tool.parameter_configurations), - "tool": ToolTransformService.tool_to_user_tool( - tool.get_tools(user_id, tenant_id)[0], labels=ToolLabelManager.get_tool_labels(tool) - ), - "synced": workflow_app.workflow.version == db_tool.version, - "privacy_policy": db_tool.privacy_policy, - } + return cls._get_workflow_tool(db_tool) @classmethod def get_workflow_tool_by_app_id(cls, user_id: str, tenant_id: str, workflow_app_id: str) -> dict: @@ -283,19 +259,31 @@ def get_workflow_tool_by_app_id(cls, user_id: str, tenant_id: str, workflow_app_ :param workflow_app_id: the workflow app id :return: the tool """ - db_tool: WorkflowToolProvider = ( + db_tool: WorkflowToolProvider | None = ( db.session.query(WorkflowToolProvider) .filter(WorkflowToolProvider.tenant_id == tenant_id, WorkflowToolProvider.app_id == workflow_app_id) .first() ) - + return cls._get_workflow_tool(db_tool) + + @classmethod + def _get_workflow_tool(cls, db_tool: WorkflowToolProvider | None): + """ + Get a workflow tool. + :db_tool: the database tool + :return: the tool + """ if db_tool is None: - raise ValueError(f"Tool {workflow_app_id} not found") + raise ValueError("Tool not found") - workflow_app: App = db.session.query(App).filter(App.id == db_tool.app_id, App.tenant_id == tenant_id).first() + workflow_app: App | None = db.session.query(App).filter(App.id == db_tool.app_id, App.tenant_id == db_tool.tenant_id).first() if workflow_app is None: raise ValueError(f"App {db_tool.app_id} not found") + + workflow = workflow_app.workflow + if not workflow: + raise ValueError("Workflow not found") tool = ToolTransformService.workflow_provider_to_controller(db_tool) @@ -308,14 +296,14 @@ def get_workflow_tool_by_app_id(cls, user_id: str, tenant_id: str, workflow_app_ "description": db_tool.description, "parameters": jsonable_encoder(db_tool.parameter_configurations), "tool": ToolTransformService.tool_to_user_tool( - tool.get_tools(user_id, tenant_id)[0], labels=ToolLabelManager.get_tool_labels(tool) + tool.get_tools(db_tool.tenant_id)[0], labels=ToolLabelManager.get_tool_labels(tool) ), - "synced": workflow_app.workflow.version == db_tool.version, + "synced": workflow.version == db_tool.version, "privacy_policy": db_tool.privacy_policy, } @classmethod - def list_single_workflow_tools(cls, user_id: str, tenant_id: str, workflow_tool_id: str) -> list[dict]: + def list_single_workflow_tools(cls, user_id: str, tenant_id: str, workflow_tool_id: str) -> list[UserTool]: """ List workflow tool provider tools. :param user_id: the user id @@ -323,7 +311,7 @@ def list_single_workflow_tools(cls, user_id: str, tenant_id: str, workflow_tool_ :param workflow_app_id: the workflow app id :return: the list of tools """ - db_tool: WorkflowToolProvider = ( + db_tool: WorkflowToolProvider | None = ( db.session.query(WorkflowToolProvider) .filter(WorkflowToolProvider.tenant_id == tenant_id, WorkflowToolProvider.id == workflow_tool_id) .first() @@ -336,6 +324,7 @@ def list_single_workflow_tools(cls, user_id: str, tenant_id: str, workflow_tool_ return [ ToolTransformService.tool_to_user_tool( - tool.get_tools(user_id, tenant_id)[0], labels=ToolLabelManager.get_tool_labels(tool) + tool=tool.get_tools(db_tool.tenant_id)[0], + labels=ToolLabelManager.get_tool_labels(tool) ) ] From 60e75dc748c1173e1ae65ef03fc70faa380b9208 Mon Sep 17 00:00:00 2001 From: Yeuoly Date: Fri, 30 Aug 2024 21:11:39 +0800 Subject: [PATCH 025/325] fix: linter --- api/models/tools.py | 2 -- api/services/tools/tools_transform_service.py | 3 +-- 2 files changed, 1 insertion(+), 4 deletions(-) diff --git a/api/models/tools.py b/api/models/tools.py index 47b89bfe0afef3..27d1001740c1a6 100644 --- a/api/models/tools.py +++ b/api/models/tools.py @@ -1,10 +1,8 @@ import json from datetime import datetime -from sqlalchemy import ForeignKey from sqlalchemy.orm import Mapped, mapped_column -from core.tools.entities.common_entities import I18nObject from core.tools.entities.tool_bundle import ApiToolBundle from core.tools.entities.tool_entities import ApiProviderSchemaType, WorkflowToolParameterConfiguration from extensions.ext_database import db diff --git a/api/services/tools/tools_transform_service.py b/api/services/tools/tools_transform_service.py index 8b4076a418a102..56e579a27b8eb6 100644 --- a/api/services/tools/tools_transform_service.py +++ b/api/services/tools/tools_transform_service.py @@ -1,9 +1,8 @@ import json import logging -from typing import Literal, Optional, Union, overload +from typing import Optional, Union from configs import dify_config -from core.entities.provider_entities import ProviderConfig from core.tools.entities.api_entities import UserTool, UserToolProvider from core.tools.entities.common_entities import I18nObject from core.tools.entities.tool_bundle import ApiToolBundle From de01ca8d550c3daa8107131a2d354fc317d0dc89 Mon Sep 17 00:00:00 2001 From: Yeuoly Date: Fri, 30 Aug 2024 21:25:58 +0800 Subject: [PATCH 026/325] feat: inner api encrypt --- api/controllers/inner_api/plugin/plugin.py | 7 ++- api/core/plugin/encrypt/__init__.py | 22 ++++++++ api/core/plugin/entities/request.py | 4 +- api/core/tools/tool_manager.py | 14 ++--- api/core/tools/utils/configuration.py | 52 +++++++++---------- .../tools/api_tools_manage_service.py | 16 +++--- .../tools/builtin_tools_manage_service.py | 18 +++---- api/services/tools/tools_transform_service.py | 14 ++--- 8 files changed, 88 insertions(+), 59 deletions(-) create mode 100644 api/core/plugin/encrypt/__init__.py diff --git a/api/controllers/inner_api/plugin/plugin.py b/api/controllers/inner_api/plugin/plugin.py index c3911f31aeb56a..70a74e73e7868f 100644 --- a/api/controllers/inner_api/plugin/plugin.py +++ b/api/controllers/inner_api/plugin/plugin.py @@ -8,6 +8,7 @@ from controllers.inner_api.wraps import plugin_inner_api_only from core.plugin.backwards_invocation.app import PluginAppBackwardsInvocation from core.plugin.backwards_invocation.model import PluginModelBackwardsInvocation +from core.plugin.encrypt import PluginEncrypter from core.plugin.entities.request import ( RequestInvokeApp, RequestInvokeEncrypt, @@ -139,7 +140,10 @@ class PluginInvokeEncryptApi(Resource): @get_tenant @plugin_data(payload_type=RequestInvokeEncrypt) def post(self, user_id: str, tenant_model: Tenant, payload: RequestInvokeEncrypt): - """""" + """ + encrypt or decrypt data + """ + return PluginEncrypter.invoke_encrypt(tenant_model, payload) api.add_resource(PluginInvokeLLMApi, '/invoke/llm') api.add_resource(PluginInvokeTextEmbeddingApi, '/invoke/text-embedding') @@ -150,3 +154,4 @@ def post(self, user_id: str, tenant_model: Tenant, payload: RequestInvokeEncrypt api.add_resource(PluginInvokeToolApi, '/invoke/tool') api.add_resource(PluginInvokeNodeApi, '/invoke/node') api.add_resource(PluginInvokeAppApi, '/invoke/app') +api.add_resource(PluginInvokeEncryptApi, '/invoke/encrypt') diff --git a/api/core/plugin/encrypt/__init__.py b/api/core/plugin/encrypt/__init__.py new file mode 100644 index 00000000000000..5315780fce1222 --- /dev/null +++ b/api/core/plugin/encrypt/__init__.py @@ -0,0 +1,22 @@ +from collections.abc import Mapping +from typing import Any + +from core.plugin.entities.request import RequestInvokeEncrypt +from core.tools.utils.configuration import ProviderConfigEncrypter +from models.account import Tenant + + +class PluginEncrypter: + @classmethod + def invoke_encrypt(cls, tenant: Tenant, payload: RequestInvokeEncrypt) -> Mapping[str, Any]: + encrypter = ProviderConfigEncrypter( + tenant_id=tenant.id, + config=payload.data, + provider_type=payload.type, + provider_identity=payload.identity, + ) + + if payload.opt == "encrypt": + return encrypter.encrypt(payload.data) + else: + return encrypter.decrypt(payload.data) \ No newline at end of file diff --git a/api/core/plugin/entities/request.py b/api/core/plugin/entities/request.py index 0533746815f0a0..9911ec9d85b4b5 100644 --- a/api/core/plugin/entities/request.py +++ b/api/core/plugin/entities/request.py @@ -112,5 +112,7 @@ class RequestInvokeEncrypt(BaseModel): Request to encryption """ opt: Literal["encrypt", "decrypt"] + type: Literal["endpoint"] + identity: str data: dict = Field(default_factory=dict) - config: Mapping[str, BasicProviderConfig] = Field(default_factory=Mapping) + config: Mapping[str, BasicProviderConfig] = Field(default_factory=Mapping) \ No newline at end of file diff --git a/api/core/tools/tool_manager.py b/api/core/tools/tool_manager.py index 56e97252f9478a..26b8c755802871 100644 --- a/api/core/tools/tool_manager.py +++ b/api/core/tools/tool_manager.py @@ -24,7 +24,7 @@ from core.tools.tool.tool import Tool from core.tools.tool.workflow_tool import WorkflowTool from core.tools.tool_label_manager import ToolLabelManager -from core.tools.utils.configuration import ToolConfigurationManager, ToolParameterConfigurationManager +from core.tools.utils.configuration import ProviderConfigEncrypter, ToolParameterConfigurationManager from core.tools.utils.tool_parameter_converter import ToolParameterConverter from core.workflow.nodes.tool.entities import ToolEntity from extensions.ext_database import db @@ -116,14 +116,14 @@ def get_tool_runtime(cls, provider_type: ToolProviderType, # decrypt the credentials credentials = builtin_provider.credentials controller = cls.get_builtin_provider(provider_id) - tool_configuration = ToolConfigurationManager( + tool_configuration = ProviderConfigEncrypter( tenant_id=tenant_id, config=controller.get_credentials_schema(), provider_type=controller.provider_type.value, provider_identity=controller.identity.name ) - decrypted_credentials = tool_configuration.decrypt_tool_credentials(credentials) + decrypted_credentials = tool_configuration.decrypt(credentials) return cast(BuiltinTool, builtin_tool.fork_tool_runtime(runtime={ 'tenant_id': tenant_id, @@ -140,13 +140,13 @@ def get_tool_runtime(cls, provider_type: ToolProviderType, api_provider, credentials = cls.get_api_provider_controller(tenant_id, provider_id) # decrypt the credentials - tool_configuration = ToolConfigurationManager( + tool_configuration = ProviderConfigEncrypter( tenant_id=tenant_id, config=api_provider.get_credentials_schema(), provider_type=api_provider.provider_type.value, provider_identity=api_provider.identity.name ) - decrypted_credentials = tool_configuration.decrypt_tool_credentials(credentials) + decrypted_credentials = tool_configuration.decrypt(credentials) return cast(ApiTool, api_provider.get_tool(tool_name).fork_tool_runtime(runtime={ 'tenant_id': tenant_id, @@ -523,14 +523,14 @@ def user_get_api_provider(cls, provider: str, tenant_id: str) -> dict: provider_obj, ApiProviderAuthType.API_KEY if credentials['auth_type'] == 'api_key' else ApiProviderAuthType.NONE ) # init tool configuration - tool_configuration = ToolConfigurationManager( + tool_configuration = ProviderConfigEncrypter( tenant_id=tenant_id, config=controller.get_credentials_schema(), provider_type=controller.provider_type.value, provider_identity=controller.identity.name ) - decrypted_credentials = tool_configuration.decrypt_tool_credentials(credentials) + decrypted_credentials = tool_configuration.decrypt(credentials) masked_credentials = tool_configuration.mask_tool_credentials(decrypted_credentials) try: diff --git a/api/core/tools/utils/configuration.py b/api/core/tools/utils/configuration.py index 5b65ce443f0fcc..2f4414c114b91e 100644 --- a/api/core/tools/utils/configuration.py +++ b/api/core/tools/utils/configuration.py @@ -15,60 +15,60 @@ from core.tools.tool.tool import Tool -class ToolConfigurationManager(BaseModel): +class ProviderConfigEncrypter(BaseModel): tenant_id: str config: Mapping[str, BasicProviderConfig] provider_type: str provider_identity: str - def _deep_copy(self, credentials: dict[str, str]) -> dict[str, str]: + def _deep_copy(self, data: dict[str, str]) -> dict[str, str]: """ - deep copy credentials + deep copy data """ - return deepcopy(credentials) + return deepcopy(data) - def encrypt_tool_credentials(self, credentials: dict[str, str]) -> dict[str, str]: + def encrypt(self, data: dict[str, str]) -> Mapping[str, str]: """ encrypt tool credentials with tenant id return a deep copy of credentials with encrypted values """ - credentials = self._deep_copy(credentials) + data = self._deep_copy(data) # get fields need to be decrypted fields = self.config for field_name, field in fields.items(): if field.type == BasicProviderConfig.Type.SECRET_INPUT: - if field_name in credentials: - encrypted = encrypter.encrypt_token(self.tenant_id, credentials[field_name]) - credentials[field_name] = encrypted + if field_name in data: + encrypted = encrypter.encrypt_token(self.tenant_id, data[field_name]) + data[field_name] = encrypted - return credentials + return data - def mask_tool_credentials(self, credentials: dict[str, Any]) -> dict[str, Any]: + def mask_tool_credentials(self, data: dict[str, Any]) -> Mapping[str, Any]: """ mask tool credentials return a deep copy of credentials with masked values """ - credentials = self._deep_copy(credentials) + data = self._deep_copy(data) # get fields need to be decrypted fields = self.config for field_name, field in fields.items(): if field.type == BasicProviderConfig.Type.SECRET_INPUT: - if field_name in credentials: - if len(credentials[field_name]) > 6: - credentials[field_name] = \ - credentials[field_name][:2] + \ - '*' * (len(credentials[field_name]) - 4) + \ - credentials[field_name][-2:] + if field_name in data: + if len(data[field_name]) > 6: + data[field_name] = \ + data[field_name][:2] + \ + '*' * (len(data[field_name]) - 4) + \ + data[field_name][-2:] else: - credentials[field_name] = '*' * len(credentials[field_name]) + data[field_name] = '*' * len(data[field_name]) - return credentials + return data - def decrypt_tool_credentials(self, credentials: dict[str, str]) -> dict[str, str]: + def decrypt(self, data: dict[str, str]) -> Mapping[str, str]: """ decrypt tool credentials with tenant id @@ -82,19 +82,19 @@ def decrypt_tool_credentials(self, credentials: dict[str, str]) -> dict[str, str cached_credentials = cache.get() if cached_credentials: return cached_credentials - credentials = self._deep_copy(credentials) + data = self._deep_copy(data) # get fields need to be decrypted fields = self.config for field_name, field in fields.items(): if field.type == BasicProviderConfig.Type.SECRET_INPUT: - if field_name in credentials: + if field_name in data: try: - credentials[field_name] = encrypter.decrypt_token(self.tenant_id, credentials[field_name]) + data[field_name] = encrypter.decrypt_token(self.tenant_id, data[field_name]) except: pass - cache.set(credentials) - return credentials + cache.set(data) + return data def delete_tool_credentials_cache(self): cache = ToolProviderCredentialsCache( diff --git a/api/services/tools/api_tools_manage_service.py b/api/services/tools/api_tools_manage_service.py index bfb9827ce268b9..3e6e62a415c5ef 100644 --- a/api/services/tools/api_tools_manage_service.py +++ b/api/services/tools/api_tools_manage_service.py @@ -15,7 +15,7 @@ from core.tools.provider.api_tool_provider import ApiToolProviderController from core.tools.tool_label_manager import ToolLabelManager from core.tools.tool_manager import ToolManager -from core.tools.utils.configuration import ToolConfigurationManager +from core.tools.utils.configuration import ProviderConfigEncrypter from core.tools.utils.parser import ApiBasedToolSchemaParser from extensions.ext_database import db from models.tools import ApiToolProvider @@ -156,14 +156,14 @@ def create_api_tool_provider( provider_controller.load_bundled_tools(tool_bundles) # encrypt credentials - tool_configuration = ToolConfigurationManager( + tool_configuration = ProviderConfigEncrypter( tenant_id=tenant_id, config=provider_controller.get_credentials_schema(), provider_type=provider_controller.provider_type.value, provider_identity=provider_controller.identity.name ) - encrypted_credentials = tool_configuration.encrypt_tool_credentials(credentials) + encrypted_credentials = tool_configuration.encrypt(credentials) db_provider.credentials_str = json.dumps(encrypted_credentials) db.session.add(db_provider) @@ -286,21 +286,21 @@ def update_api_tool_provider( provider_controller.load_bundled_tools(tool_bundles) # get original credentials if exists - tool_configuration = ToolConfigurationManager( + tool_configuration = ProviderConfigEncrypter( tenant_id=tenant_id, config=provider_controller.get_credentials_schema(), provider_type=provider_controller.provider_type.value, provider_identity=provider_controller.identity.name ) - original_credentials = tool_configuration.decrypt_tool_credentials(provider.credentials) + original_credentials = tool_configuration.decrypt(provider.credentials) masked_credentials = tool_configuration.mask_tool_credentials(original_credentials) # check if the credential has changed, save the original credential for name, value in credentials.items(): if name in masked_credentials and value == masked_credentials[name]: credentials[name] = original_credentials[name] - credentials = tool_configuration.encrypt_tool_credentials(credentials) + credentials = tool_configuration.encrypt(credentials) provider.credentials_str = json.dumps(credentials) db.session.add(provider) @@ -405,13 +405,13 @@ def test_api_tool_preview( # decrypt credentials if db_provider.id: - tool_configuration = ToolConfigurationManager( + tool_configuration = ProviderConfigEncrypter( tenant_id=tenant_id, config=provider_controller.get_credentials_schema(), provider_type=provider_controller.provider_type.value, provider_identity=provider_controller.identity.name ) - decrypted_credentials = tool_configuration.decrypt_tool_credentials(credentials) + decrypted_credentials = tool_configuration.decrypt(credentials) # check if the credential has changed, save the original credential masked_credentials = tool_configuration.mask_tool_credentials(decrypted_credentials) for name, value in credentials.items(): diff --git a/api/services/tools/builtin_tools_manage_service.py b/api/services/tools/builtin_tools_manage_service.py index 8bc93e7ea3be0f..e202eeb15b896e 100644 --- a/api/services/tools/builtin_tools_manage_service.py +++ b/api/services/tools/builtin_tools_manage_service.py @@ -10,7 +10,7 @@ from core.tools.provider.tool_provider import ToolProviderController from core.tools.tool_label_manager import ToolLabelManager from core.tools.tool_manager import ToolManager -from core.tools.utils.configuration import ToolConfigurationManager +from core.tools.utils.configuration import ProviderConfigEncrypter from extensions.ext_database import db from models.tools import BuiltinToolProvider from services.tools.tools_transform_service import ToolTransformService @@ -27,7 +27,7 @@ def list_builtin_tool_provider_tools(user_id: str, tenant_id: str, provider: str provider_controller: ToolProviderController = ToolManager.get_builtin_provider(provider) tools = provider_controller.get_tools() - tool_provider_configurations = ToolConfigurationManager( + tool_provider_configurations = ProviderConfigEncrypter( tenant_id=tenant_id, config=provider_controller.get_credentials_schema(), provider_type=provider_controller.provider_type.value, @@ -47,7 +47,7 @@ def list_builtin_tool_provider_tools(user_id: str, tenant_id: str, provider: str if builtin_provider is not None: # get credentials credentials = builtin_provider.credentials - credentials = tool_provider_configurations.decrypt_tool_credentials(credentials) + credentials = tool_provider_configurations.decrypt(credentials) result = [] for tool in tools: @@ -92,7 +92,7 @@ def update_builtin_tool_provider(user_id: str, tenant_id: str, provider_name: st provider_controller = ToolManager.get_builtin_provider(provider_name) if not provider_controller.need_credentials: raise ValueError(f"provider {provider_name} does not need credentials") - tool_configuration = ToolConfigurationManager( + tool_configuration = ProviderConfigEncrypter( tenant_id=tenant_id, config=provider_controller.get_credentials_schema(), provider_type=provider_controller.provider_type.value, @@ -101,7 +101,7 @@ def update_builtin_tool_provider(user_id: str, tenant_id: str, provider_name: st # get original credentials if exists if provider is not None: - original_credentials = tool_configuration.decrypt_tool_credentials(provider.credentials) + original_credentials = tool_configuration.decrypt(provider.credentials) masked_credentials = tool_configuration.mask_tool_credentials(original_credentials) # check if the credential has changed, save the original credential for name, value in credentials.items(): @@ -110,7 +110,7 @@ def update_builtin_tool_provider(user_id: str, tenant_id: str, provider_name: st # validate credentials provider_controller.validate_credentials(credentials) # encrypt credentials - credentials = tool_configuration.encrypt_tool_credentials(credentials) + credentials = tool_configuration.encrypt(credentials) except (ToolProviderNotFoundError, ToolNotFoundError, ToolProviderCredentialValidationError) as e: raise ValueError(str(e)) @@ -154,13 +154,13 @@ def get_builtin_tool_provider_credentials(user_id: str, tenant_id: str, provider return {} provider_controller = ToolManager.get_builtin_provider(provider_obj.provider) - tool_configuration = ToolConfigurationManager( + tool_configuration = ProviderConfigEncrypter( tenant_id=tenant_id, config=provider_controller.get_credentials_schema(), provider_type=provider_controller.provider_type.value, provider_identity=provider_controller.identity.name, ) - credentials = tool_configuration.decrypt_tool_credentials(provider_obj.credentials) + credentials = tool_configuration.decrypt(provider_obj.credentials) credentials = tool_configuration.mask_tool_credentials(credentials) return credentials @@ -186,7 +186,7 @@ def delete_builtin_tool_provider(user_id: str, tenant_id: str, provider_name: st # delete cache provider_controller = ToolManager.get_builtin_provider(provider_name) - tool_configuration = ToolConfigurationManager( + tool_configuration = ProviderConfigEncrypter( tenant_id=tenant_id, config=provider_controller.get_credentials_schema(), provider_type=provider_controller.provider_type.value, diff --git a/api/services/tools/tools_transform_service.py b/api/services/tools/tools_transform_service.py index 56e579a27b8eb6..1e069c704bd1b6 100644 --- a/api/services/tools/tools_transform_service.py +++ b/api/services/tools/tools_transform_service.py @@ -16,7 +16,7 @@ from core.tools.provider.workflow_tool_provider import WorkflowToolProviderController from core.tools.tool.tool import Tool from core.tools.tool.workflow_tool import WorkflowTool -from core.tools.utils.configuration import ToolConfigurationManager +from core.tools.utils.configuration import ProviderConfigEncrypter from models.tools import ApiToolProvider, BuiltinToolProvider, WorkflowToolProvider logger = logging.getLogger(__name__) @@ -107,15 +107,15 @@ def builtin_provider_to_user_provider( credentials = db_provider.credentials # init tool configuration - tool_configuration = ToolConfigurationManager( + tool_configuration = ProviderConfigEncrypter( tenant_id=db_provider.tenant_id, config=provider_controller.get_credentials_schema(), provider_type=provider_controller.provider_type.value, provider_identity=provider_controller.identity.name ) # decrypt the credentials and mask the credentials - decrypted_credentials = tool_configuration.decrypt_tool_credentials(credentials=credentials) - masked_credentials = tool_configuration.mask_tool_credentials(credentials=decrypted_credentials) + decrypted_credentials = tool_configuration.decrypt(data=credentials) + masked_credentials = tool_configuration.mask_tool_credentials(data=decrypted_credentials) result.masked_credentials = masked_credentials result.original_credentials = decrypted_credentials @@ -218,7 +218,7 @@ def api_provider_to_user_provider( if decrypt_credentials: # init tool configuration - tool_configuration = ToolConfigurationManager( + tool_configuration = ProviderConfigEncrypter( tenant_id=db_provider.tenant_id, config=provider_controller.get_credentials_schema(), provider_type=provider_controller.provider_type.value, @@ -226,8 +226,8 @@ def api_provider_to_user_provider( ) # decrypt the credentials and mask the credentials - decrypted_credentials = tool_configuration.decrypt_tool_credentials(credentials=credentials) - masked_credentials = tool_configuration.mask_tool_credentials(credentials=decrypted_credentials) + decrypted_credentials = tool_configuration.decrypt(data=credentials) + masked_credentials = tool_configuration.mask_tool_credentials(data=decrypted_credentials) result.masked_credentials = masked_credentials From b7c5abc5dd8871e38b42843cb749c70af899a59e Mon Sep 17 00:00:00 2001 From: Yeuoly Date: Fri, 30 Aug 2024 23:29:04 +0800 Subject: [PATCH 027/325] reformatter --- api/core/plugin/encrypt/__init__.py | 2 +- api/core/plugin/entities/request.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/api/core/plugin/encrypt/__init__.py b/api/core/plugin/encrypt/__init__.py index 5315780fce1222..614ce81d20c196 100644 --- a/api/core/plugin/encrypt/__init__.py +++ b/api/core/plugin/encrypt/__init__.py @@ -12,7 +12,7 @@ def invoke_encrypt(cls, tenant: Tenant, payload: RequestInvokeEncrypt) -> Mappin encrypter = ProviderConfigEncrypter( tenant_id=tenant.id, config=payload.data, - provider_type=payload.type, + provider_type=payload.namespace, provider_identity=payload.identity, ) diff --git a/api/core/plugin/entities/request.py b/api/core/plugin/entities/request.py index 9911ec9d85b4b5..78414e2aa59bc8 100644 --- a/api/core/plugin/entities/request.py +++ b/api/core/plugin/entities/request.py @@ -112,7 +112,7 @@ class RequestInvokeEncrypt(BaseModel): Request to encryption """ opt: Literal["encrypt", "decrypt"] - type: Literal["endpoint"] + namespace: Literal["endpoint"] identity: str data: dict = Field(default_factory=dict) config: Mapping[str, BasicProviderConfig] = Field(default_factory=Mapping) \ No newline at end of file From cf73374c1bc1509e1bbb75d8354a45f81a255620 Mon Sep 17 00:00:00 2001 From: Yeuoly Date: Tue, 10 Sep 2024 17:16:55 +0800 Subject: [PATCH 028/325] refactor: stream output --- api/core/workflow/nodes/tool/tool_node.py | 199 ++++++++++++---------- 1 file changed, 107 insertions(+), 92 deletions(-) diff --git a/api/core/workflow/nodes/tool/tool_node.py b/api/core/workflow/nodes/tool/tool_node.py index bb4649eaa4cb8f..3865695c71c448 100644 --- a/api/core/workflow/nodes/tool/tool_node.py +++ b/api/core/workflow/nodes/tool/tool_node.py @@ -1,6 +1,7 @@ -from collections.abc import Generator, Iterable, Sequence +from collections.abc import Generator, Iterable, Mapping, Sequence from os import path -from typing import Any, Mapping, cast +from typing import Any, cast +from urllib import response from core.app.segments import ArrayAnySegment, ArrayAnyVariable, parser from core.callback_handler.workflow_tool_callback_handler import DifyWorkflowCallbackHandler @@ -13,6 +14,7 @@ from core.workflow.entities.variable_pool import VariablePool from core.workflow.enums import SystemVariableKey from core.workflow.nodes.base_node import BaseNode +from core.workflow.nodes.event import RunCompletedEvent, RunEvent, RunStreamChunkEvent from core.workflow.nodes.tool.entities import ToolNodeData from core.workflow.utils.variable_template_parser import VariableTemplateParser from models import WorkflowNodeExecutionStatus @@ -26,7 +28,7 @@ class ToolNode(BaseNode): _node_data_cls = ToolNodeData _node_type = NodeType.TOOL - def _run(self) -> NodeRunResult: + def _run(self) -> Generator[RunEvent]: """ Run the tool node """ @@ -45,22 +47,34 @@ def _run(self) -> NodeRunResult: self.tenant_id, self.app_id, self.node_id, node_data, self.invoke_from ) except Exception as e: - return NodeRunResult( - status=WorkflowNodeExecutionStatus.FAILED, - inputs={}, - metadata={ - NodeRunMetadataKey.TOOL_INFO: tool_info - }, - error=f'Failed to get tool runtime: {str(e)}' + yield RunCompletedEvent( + run_result=NodeRunResult( + status=WorkflowNodeExecutionStatus.FAILED, + inputs={}, + metadata={ + NodeRunMetadataKey.TOOL_INFO: tool_info + }, + error=f'Failed to get tool runtime: {str(e)}' + ) ) + return # get parameters tool_parameters = tool_runtime.get_runtime_parameters() or [] - parameters = self._generate_parameters(tool_parameters=tool_parameters, variable_pool=self.graph_runtime_state.variable_pool, node_data=node_data) - parameters_for_log = self._generate_parameters(tool_parameters=tool_parameters, variable_pool=self.graph_runtime_state.variable_pool, node_data=node_data, for_log=True) + parameters = self._generate_parameters( + tool_parameters=tool_parameters, + variable_pool=self.graph_runtime_state.variable_pool, + node_data=node_data + ) + parameters_for_log = self._generate_parameters( + tool_parameters=tool_parameters, + variable_pool=self.graph_runtime_state.variable_pool, + node_data=node_data, + for_log=True + ) try: - messages = ToolEngine.workflow_invoke( + message_stream = ToolEngine.workflow_invoke( tool=tool_runtime, tool_parameters=parameters, user_id=self.user_id, @@ -69,30 +83,33 @@ def _run(self) -> NodeRunResult: thread_pool_id=self.thread_pool_id, ) except Exception as e: - return NodeRunResult( - status=WorkflowNodeExecutionStatus.FAILED, - inputs=parameters_for_log, - metadata={ - NodeRunMetadataKey.TOOL_INFO: tool_info - }, - error=f'Failed to invoke tool: {str(e)}', + yield RunCompletedEvent( + run_result=NodeRunResult( + status=WorkflowNodeExecutionStatus.FAILED, + inputs=parameters_for_log, + metadata={ + NodeRunMetadataKey.TOOL_INFO: tool_info + }, + error=f'Failed to invoke tool: {str(e)}', + ) ) + return # convert tool messages - plain_text, files, json = self._convert_tool_messages(messages) - - return NodeRunResult( - status=WorkflowNodeExecutionStatus.SUCCEEDED, - outputs={ - 'text': plain_text, - 'files': files, - 'json': json - }, - metadata={ - NodeRunMetadataKey.TOOL_INFO: tool_info - }, - inputs=parameters_for_log - ) + yield from self._transform_message(message_stream, tool_info, parameters_for_log) + + # return NodeRunResult( + # status=WorkflowNodeExecutionStatus.SUCCEEDED, + # outputs={ + # 'text': plain_text, + # 'files': files, + # 'json': json + # }, + # metadata={ + # NodeRunMetadataKey.TOOL_INFO: tool_info + # }, + # inputs=parameters_for_log + # ) def _generate_parameters( self, @@ -148,48 +165,40 @@ def _fetch_files(self, variable_pool: VariablePool) -> list[FileVar]: assert isinstance(variable, ArrayAnyVariable | ArrayAnySegment) return list(variable.value) if variable else [] - def _convert_tool_messages(self, messages: Generator[ToolInvokeMessage, None, None]): + def _transform_message(self, + messages: Generator[ToolInvokeMessage, None, None], + tool_info: Mapping[str, Any], + parameters_for_log: dict[str, Any]) -> Generator[RunEvent, None, None]: """ Convert ToolInvokeMessages into tuple[plain_text, files] """ # transform message and handle file storage - messages = ToolFileMessageTransformer.transform_tool_invoke_messages( + message_stream = ToolFileMessageTransformer.transform_tool_invoke_messages( messages=messages, user_id=self.user_id, tenant_id=self.tenant_id, conversation_id=None, ) - result = list(messages) - - # extract plain text and files - files = self._extract_tool_response_binary(result) - plain_text = self._extract_tool_response_text(result) - json = self._extract_tool_response_json(result) - - return plain_text, files, json - - def _extract_tool_response_binary(self, tool_response: Iterable[ToolInvokeMessage]) -> list[FileVar]: - """ - Extract tool response binary - """ - result = [] + files: list[FileVar] = [] + text = "" + json: list[dict] = [] - for response in tool_response: - if response.type == ToolInvokeMessage.MessageType.IMAGE_LINK or \ - response.type == ToolInvokeMessage.MessageType.IMAGE: - assert isinstance(response.message, ToolInvokeMessage.TextMessage) - assert response.meta + for message in message_stream: + if message.type == ToolInvokeMessage.MessageType.IMAGE_LINK or \ + message.type == ToolInvokeMessage.MessageType.IMAGE: + assert isinstance(message.message, ToolInvokeMessage.TextMessage) + assert message.meta - url = response.message.text + url = message.message.text ext = path.splitext(url)[1] - mimetype = response.meta.get('mime_type', 'image/jpeg') - filename = response.save_as or url.split('/')[-1] - transfer_method = response.meta.get('transfer_method', FileTransferMethod.TOOL_FILE) + mimetype = message.meta.get('mime_type', 'image/jpeg') + filename = message.save_as or url.split('/')[-1] + transfer_method = message.meta.get('transfer_method', FileTransferMethod.TOOL_FILE) # get tool file id tool_file_id = url.split('/')[-1].split('.')[0] - result.append(FileVar( + files.append(FileVar( tenant_id=self.tenant_id, type=FileType.IMAGE, transfer_method=transfer_method, @@ -199,48 +208,54 @@ def _extract_tool_response_binary(self, tool_response: Iterable[ToolInvokeMessag extension=ext, mime_type=mimetype, )) - elif response.type == ToolInvokeMessage.MessageType.BLOB: + elif message.type == ToolInvokeMessage.MessageType.BLOB: # get tool file id - assert isinstance(response.message, ToolInvokeMessage.TextMessage) - assert response.meta + assert isinstance(message.message, ToolInvokeMessage.TextMessage) + assert message.meta - tool_file_id = response.message.text.split('/')[-1].split('.')[0] - result.append(FileVar( + tool_file_id = message.message.text.split('/')[-1].split('.')[0] + files.append(FileVar( tenant_id=self.tenant_id, type=FileType.IMAGE, transfer_method=FileTransferMethod.TOOL_FILE, related_id=tool_file_id, - filename=response.save_as, - extension=path.splitext(response.save_as)[1], - mime_type=response.meta.get('mime_type', 'application/octet-stream'), + filename=message.save_as, + extension=path.splitext(message.save_as)[1], + mime_type=message.meta.get('mime_type', 'application/octet-stream'), )) - elif response.type == ToolInvokeMessage.MessageType.LINK: - pass # TODO: - - return result - - def _extract_tool_response_text(self, tool_response: Iterable[ToolInvokeMessage]) -> str: - """ - Extract tool response text - """ - result: list[str] = [] - for message in tool_response: - if message.type == ToolInvokeMessage.MessageType.TEXT: + elif message.type == ToolInvokeMessage.MessageType.TEXT: assert isinstance(message.message, ToolInvokeMessage.TextMessage) - result.append(message.message.text) + text += message.message.text + '\n' + yield RunStreamChunkEvent( + chunk_content=message.message.text, + from_variable_selector=[self.node_id, 'text'] + ) + elif message.type == ToolInvokeMessage.MessageType.JSON: + assert isinstance(message, ToolInvokeMessage.JsonMessage) + json.append(message.json_object) elif message.type == ToolInvokeMessage.MessageType.LINK: assert isinstance(message.message, ToolInvokeMessage.TextMessage) - result.append(f'Link: {message.message.text}') - - return '\n'.join(result) - - def _extract_tool_response_json(self, tool_response: Iterable[ToolInvokeMessage]) -> list[dict]: - result: list[dict] = [] - for message in tool_response: - if message.type == ToolInvokeMessage.MessageType.JSON: - assert isinstance(message, ToolInvokeMessage.JsonMessage) - result.append(message.json_object) - return result + stream_text = f'Link: {message.message.text}\n' + text += stream_text + yield RunStreamChunkEvent( + chunk_content=stream_text, + from_variable_selector=[self.node_id, 'text'] + ) + + yield RunCompletedEvent( + run_result=NodeRunResult( + status=WorkflowNodeExecutionStatus.SUCCEEDED, + outputs={ + 'text': text, + 'files': files, + 'json': json + }, + metadata={ + NodeRunMetadataKey.TOOL_INFO: tool_info + }, + inputs=parameters_for_log + ) + ) @classmethod def _extract_variable_selector_to_variable_mapping( From 70c001436e4513c38523537e334820d8089d0e7b Mon Sep 17 00:00:00 2001 From: Yeuoly Date: Tue, 10 Sep 2024 18:13:33 +0800 Subject: [PATCH 029/325] support variable --- api/core/tools/entities/tool_entities.py | 44 +++++++++++++++++++++-- api/core/workflow/nodes/tool/tool_node.py | 38 +++++++++++--------- 2 files changed, 64 insertions(+), 18 deletions(-) diff --git a/api/core/tools/entities/tool_entities.py b/api/core/tools/entities/tool_entities.py index 4b0961fb0999c9..ef96207fa73ee0 100644 --- a/api/core/tools/entities/tool_entities.py +++ b/api/core/tools/entities/tool_entities.py @@ -1,7 +1,8 @@ +import base64 from enum import Enum from typing import Any, Optional, Union, cast -from pydantic import BaseModel, Field, field_validator +from pydantic import BaseModel, Field, field_serializer, field_validator from core.entities.parameter_entities import AppSelectorScope, CommonParameterType, ModelConfigScope from core.tools.entities.common_entities import I18nObject @@ -100,6 +101,26 @@ class JsonMessage(BaseModel): class BlobMessage(BaseModel): blob: bytes + class VariableMessage(BaseModel): + variable_name: str = Field(..., description="The name of the variable") + variable_value: str = Field(..., description="The value of the variable") + stream: bool = Field(default=False, description="Whether the variable is streamed") + + @field_validator("variable_value", mode="before") + def transform_variable_value(cls, value, values) -> Any: + """ + Only basic types and lists are allowed. + """ + if not isinstance(value, dict | list | str | int | float | bool): + raise ValueError("Only basic types and lists are allowed.") + + # if stream is true, the value must be a string + if values.get('stream'): + if not isinstance(value, str): + raise ValueError("When 'stream' is True, 'variable_value' must be a string.") + + return value + class MessageType(Enum): TEXT = "text" IMAGE = "image" @@ -108,15 +129,34 @@ class MessageType(Enum): JSON = "json" IMAGE_LINK = "image_link" FILE_VAR = "file_var" + VARIABLE = "variable" type: MessageType = MessageType.TEXT """ plain text, image url or link url """ - message: JsonMessage | TextMessage | BlobMessage | None + message: JsonMessage | TextMessage | BlobMessage | VariableMessage | None meta: dict[str, Any] | None = None save_as: str = '' + @field_validator('message', mode='before') + @classmethod + def decode_blob_message(cls, v): + if isinstance(v, dict) and 'blob' in v: + try: + v['blob'] = base64.b64decode(v['blob']) + except Exception: + pass + return v + + @field_serializer('message') + def serialize_message(self, v): + if isinstance(v, self.BlobMessage): + return { + 'blob': base64.b64encode(v.blob).decode('utf-8') + } + return v + class ToolInvokeMessageBinary(BaseModel): mimetype: str = Field(..., description="The mimetype of the binary") url: str = Field(..., description="The url of the binary") diff --git a/api/core/workflow/nodes/tool/tool_node.py b/api/core/workflow/nodes/tool/tool_node.py index 3865695c71c448..1f32c7b8bd10c1 100644 --- a/api/core/workflow/nodes/tool/tool_node.py +++ b/api/core/workflow/nodes/tool/tool_node.py @@ -1,7 +1,6 @@ -from collections.abc import Generator, Iterable, Mapping, Sequence +from collections.abc import Generator, Mapping, Sequence from os import path from typing import Any, cast -from urllib import response from core.app.segments import ArrayAnySegment, ArrayAnyVariable, parser from core.callback_handler.workflow_tool_callback_handler import DifyWorkflowCallbackHandler @@ -98,19 +97,6 @@ def _run(self) -> Generator[RunEvent]: # convert tool messages yield from self._transform_message(message_stream, tool_info, parameters_for_log) - # return NodeRunResult( - # status=WorkflowNodeExecutionStatus.SUCCEEDED, - # outputs={ - # 'text': plain_text, - # 'files': files, - # 'json': json - # }, - # metadata={ - # NodeRunMetadataKey.TOOL_INFO: tool_info - # }, - # inputs=parameters_for_log - # ) - def _generate_parameters( self, *, @@ -183,6 +169,8 @@ def _transform_message(self, files: list[FileVar] = [] text = "" json: list[dict] = [] + + variables: dict[str, Any] = {} for message in message_stream: if message.type == ToolInvokeMessage.MessageType.IMAGE_LINK or \ @@ -241,6 +229,23 @@ def _transform_message(self, chunk_content=stream_text, from_variable_selector=[self.node_id, 'text'] ) + elif message.type == ToolInvokeMessage.MessageType.VARIABLE: + assert isinstance(message.message, ToolInvokeMessage.VariableMessage) + variable_name = message.message.variable_name + variable_value = message.message.variable_value + if message.message.stream: + if not isinstance(variable_value, str): + raise ValueError("When 'stream' is True, 'variable_value' must be a string.") + if variable_name not in variables: + variables[variable_name] = "" + variables[variable_name] += variable_value + + yield RunStreamChunkEvent( + chunk_content=variable_value, + from_variable_selector=[self.node_id, variable_name] + ) + else: + variables[variable_name] = variable_value yield RunCompletedEvent( run_result=NodeRunResult( @@ -248,7 +253,8 @@ def _transform_message(self, outputs={ 'text': text, 'files': files, - 'json': json + 'json': json, + **variables }, metadata={ NodeRunMetadataKey.TOOL_INFO: tool_info From 87c746f6bbc66b2a7ae12d117c77d52b9838d305 Mon Sep 17 00:00:00 2001 From: Yeuoly Date: Sat, 14 Sep 2024 01:26:22 +0800 Subject: [PATCH 030/325] tmp --- api/core/tools/entities/tool_entities.py | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/api/core/tools/entities/tool_entities.py b/api/core/tools/entities/tool_entities.py index ef96207fa73ee0..2f25898f4e94eb 100644 --- a/api/core/tools/entities/tool_entities.py +++ b/api/core/tools/entities/tool_entities.py @@ -120,6 +120,15 @@ def transform_variable_value(cls, value, values) -> Any: raise ValueError("When 'stream' is True, 'variable_value' must be a string.") return value + + @field_validator("variable_name", mode="before") + def transform_variable_name(cls, value) -> str: + """ + The variable name must be a string. + """ + if value in ["json", "text", "files"]: + raise ValueError(f"The variable name '{value}' is reserved.") + return value class MessageType(Enum): TEXT = "text" From c472ea6c67e43e1d4001b5aef0edab85d8d2af55 Mon Sep 17 00:00:00 2001 From: Yeuoly Date: Thu, 19 Sep 2024 18:02:24 +0800 Subject: [PATCH 031/325] fix: pydantic --- api/core/plugin/entities/request.py | 3 ++- api/core/tools/entities/tool_entities.py | 6 ++++-- api/core/tools/tool_engine.py | 5 ++--- 3 files changed, 8 insertions(+), 6 deletions(-) diff --git a/api/core/plugin/entities/request.py b/api/core/plugin/entities/request.py index 627b3352251493..2e87b76636ca2d 100644 --- a/api/core/plugin/entities/request.py +++ b/api/core/plugin/entities/request.py @@ -42,7 +42,8 @@ class RequestInvokeLLM(BaseRequestInvokeModel): stream: Optional[bool] = False @field_validator("prompt_messages", mode="before") - def convert_prompt_messages(self, v): + @classmethod + def convert_prompt_messages(cls, v): if not isinstance(v, list): raise ValueError("prompt_messages must be a list") diff --git a/api/core/tools/entities/tool_entities.py b/api/core/tools/entities/tool_entities.py index b764ac62ecaf7b..d8383539bbaed7 100644 --- a/api/core/tools/entities/tool_entities.py +++ b/api/core/tools/entities/tool_entities.py @@ -114,7 +114,8 @@ class VariableMessage(BaseModel): stream: bool = Field(default=False, description="Whether the variable is streamed") @field_validator("variable_value", mode="before") - def transform_variable_value(self, value, values) -> Any: + @classmethod + def transform_variable_value(cls, value, values) -> Any: """ Only basic types and lists are allowed. """ @@ -129,7 +130,8 @@ def transform_variable_value(self, value, values) -> Any: return value @field_validator("variable_name", mode="before") - def transform_variable_name(self, value) -> str: + @classmethod + def transform_variable_name(cls, value) -> str: """ The variable name must be a string. """ diff --git a/api/core/tools/tool_engine.py b/api/core/tools/tool_engine.py index 8a4be51d285cfb..469eea67a6920d 100644 --- a/api/core/tools/tool_engine.py +++ b/api/core/tools/tool_engine.py @@ -245,9 +245,8 @@ def _convert_tool_response_to_str(tool_response: Generator[ToolInvokeMessage, No + "you do not need to create it, just tell the user to check it now." ) elif response.type == ToolInvokeMessage.MessageType.JSON: - result += f"tool response: { - json.dumps(cast(ToolInvokeMessage.JsonMessage, response.message).json_object, ensure_ascii=False) - }." + text = json.dumps(cast(ToolInvokeMessage.JsonMessage, response.message).json_object, ensure_ascii=False) + result += f"tool response: {text}." else: result += f"tool response: {response.message}." From 661392eaef8cec4171258dedcbe884be80f02db8 Mon Sep 17 00:00:00 2001 From: Yeuoly Date: Fri, 20 Sep 2024 02:25:14 +0800 Subject: [PATCH 032/325] refactor: tool --- api/core/agent/base_agent_runner.py | 4 +- api/core/agent/cot_agent_runner.py | 2 +- api/core/rag/retrieval/dataset_retrieval.py | 10 +- api/core/tools/{tool => __base}/tool.py | 0 .../{provider => __base}/tool_provider.py | 2 +- api/core/tools/builtin_tool/_position.yaml | 3 + .../provider.py} | 13 +- .../providers}/__init__.py | 0 .../providers}/_positions.py | 0 .../providers}/code/_assets/icon.svg | 0 .../providers}/code/code.py | 2 +- .../providers}/code/code.yaml | 0 .../providers}/code/tools/simple_code.py | 2 +- .../providers}/code/tools/simple_code.yaml | 0 .../providers}/qrcode/_assets/icon.svg | 0 .../providers}/qrcode/qrcode.py | 4 +- .../providers}/qrcode/qrcode.yaml | 0 .../qrcode/tools/qrcode_generator.py | 2 +- .../qrcode/tools/qrcode_generator.yaml | 0 .../providers}/time/_assets/icon.svg | 0 .../providers}/time/time.py | 4 +- .../providers}/time/time.yaml | 0 .../providers}/time/tools/current_time.py | 2 +- .../providers}/time/tools/current_time.yaml | 0 .../providers}/time/tools/weekday.py | 4 +- .../providers}/time/tools/weekday.yaml | 0 .../builtin_tool.py => builtin_tool/tool.py} | 6 +- .../provider.py} | 7 +- .../{tool/api_tool.py => custom_tool/tool.py} | 15 +- api/core/tools/entities/api_entities.py | 2 +- api/core/tools/entities/tool_entities.py | 1 + .../tools/plugin_tool/plugin_tool_provider.py | 30 + api/core/tools/provider/_position.yaml | 38 - api/core/tools/provider/app_tool_provider.py | 103 - .../provider/builtin/aippt/_assets/icon.png | Bin 1961 -> 0 bytes .../tools/provider/builtin/aippt/aippt.py | 11 - .../tools/provider/builtin/aippt/aippt.yaml | 45 - .../provider/builtin/aippt/tools/aippt.py | 498 ---- .../provider/builtin/aippt/tools/aippt.yaml | 54 - .../builtin/alphavantage/_assets/icon.svg | 7 - .../builtin/alphavantage/alphavantage.py | 22 - .../builtin/alphavantage/alphavantage.yaml | 31 - .../builtin/alphavantage/tools/query_stock.py | 48 - .../alphavantage/tools/query_stock.yaml | 27 - .../provider/builtin/arxiv/_assets/icon.svg | 1 - .../tools/provider/builtin/arxiv/arxiv.py | 20 - .../tools/provider/builtin/arxiv/arxiv.yaml | 12 - .../builtin/arxiv/tools/arxiv_search.py | 119 - .../builtin/arxiv/tools/arxiv_search.yaml | 23 - .../provider/builtin/aws/_assets/icon.svg | 9 - api/core/tools/provider/builtin/aws/aws.py | 24 - api/core/tools/provider/builtin/aws/aws.yaml | 15 - .../builtin/aws/tools/apply_guardrail.py | 90 - .../builtin/aws/tools/apply_guardrail.yaml | 67 - .../aws/tools/lambda_translate_utils.py | 91 - .../aws/tools/lambda_translate_utils.yaml | 134 - .../builtin/aws/tools/lambda_yaml_to_json.py | 70 - .../aws/tools/lambda_yaml_to_json.yaml | 53 - .../aws/tools/sagemaker_text_rerank.py | 81 - .../aws/tools/sagemaker_text_rerank.yaml | 82 - .../builtin/aws/tools/sagemaker_tts.py | 101 - .../builtin/aws/tools/sagemaker_tts.yaml | 149 - .../provider/builtin/azuredalle/__init__.py | 0 .../builtin/azuredalle/_assets/icon.png | Bin 50703 -> 0 bytes .../provider/builtin/azuredalle/azuredalle.py | 20 - .../builtin/azuredalle/azuredalle.yaml | 76 - .../builtin/azuredalle/tools/dalle3.py | 83 - .../builtin/azuredalle/tools/dalle3.yaml | 136 - .../provider/builtin/bing/_assets/icon.svg | 40 - api/core/tools/provider/builtin/bing/bing.py | 23 - .../tools/provider/builtin/bing/bing.yaml | 107 - .../builtin/bing/tools/bing_web_search.py | 202 -- .../builtin/bing/tools/bing_web_search.yaml | 584 ---- .../provider/builtin/brave/_assets/icon.svg | 1 - .../tools/provider/builtin/brave/brave.py | 22 - .../tools/provider/builtin/brave/brave.yaml | 39 - .../builtin/brave/tools/brave_search.py | 138 - .../builtin/brave/tools/brave_search.yaml | 53 - .../provider/builtin/chart/_assets/icon.png | Bin 1363 -> 0 bytes .../tools/provider/builtin/chart/chart.py | 77 - .../tools/provider/builtin/chart/chart.yaml | 17 - .../tools/provider/builtin/chart/tools/bar.py | 48 - .../provider/builtin/chart/tools/bar.yaml | 41 - .../provider/builtin/chart/tools/line.py | 50 - .../provider/builtin/chart/tools/line.yaml | 41 - .../tools/provider/builtin/chart/tools/pie.py | 48 - .../provider/builtin/chart/tools/pie.yaml | 41 - .../provider/builtin/cogview/__init__.py | 0 .../provider/builtin/cogview/_assets/icon.png | Bin 22062 -> 0 bytes .../tools/provider/builtin/cogview/cogview.py | 28 - .../provider/builtin/cogview/cogview.yaml | 61 - .../builtin/cogview/tools/cogview3.py | 72 - .../builtin/cogview/tools/cogview3.yaml | 123 - .../provider/builtin/comfyui/_assets/icon.png | Bin 213986 -> 0 bytes .../tools/provider/builtin/comfyui/comfyui.py | 17 - .../provider/builtin/comfyui/comfyui.yaml | 42 - .../comfyui/tools/comfyui_stable_diffusion.py | 475 ---- .../tools/comfyui_stable_diffusion.yaml | 212 -- .../builtin/comfyui/tools/txt2img.json | 107 - .../builtin/crossref/_assets/icon.svg | 49 - .../provider/builtin/crossref/crossref.py | 20 - .../provider/builtin/crossref/crossref.yaml | 29 - .../builtin/crossref/tools/query_doi.py | 28 - .../builtin/crossref/tools/query_doi.yaml | 23 - .../builtin/crossref/tools/query_title.py | 143 - .../builtin/crossref/tools/query_title.yaml | 105 - .../tools/provider/builtin/dalle/__init__.py | 0 .../provider/builtin/dalle/_assets/icon.png | Bin 156474 -> 0 bytes .../tools/provider/builtin/dalle/dalle.py | 20 - .../tools/provider/builtin/dalle/dalle.yaml | 61 - .../provider/builtin/dalle/tools/dalle2.py | 66 - .../provider/builtin/dalle/tools/dalle2.yaml | 74 - .../provider/builtin/dalle/tools/dalle3.py | 115 - .../provider/builtin/dalle/tools/dalle3.yaml | 123 - .../provider/builtin/devdocs/_assets/icon.svg | 4 - .../tools/provider/builtin/devdocs/devdocs.py | 21 - .../provider/builtin/devdocs/devdocs.yaml | 13 - .../builtin/devdocs/tools/searchDevDocs.py | 47 - .../builtin/devdocs/tools/searchDevDocs.yaml | 34 - .../provider/builtin/did/_assets/icon.svg | 14 - api/core/tools/provider/builtin/did/did.py | 18 - api/core/tools/provider/builtin/did/did.yaml | 28 - .../tools/provider/builtin/did/did_appx.py | 87 - .../provider/builtin/did/tools/animations.py | 49 - .../builtin/did/tools/animations.yaml | 86 - .../tools/provider/builtin/did/tools/talks.py | 65 - .../provider/builtin/did/tools/talks.yaml | 126 - .../builtin/dingtalk/_assets/icon.svg | 7 - .../provider/builtin/dingtalk/dingtalk.py | 8 - .../provider/builtin/dingtalk/dingtalk.yaml | 16 - .../dingtalk/tools/dingtalk_group_bot.py | 89 - .../dingtalk/tools/dingtalk_group_bot.yaml | 52 - .../builtin/duckduckgo/_assets/icon.svg | 1 - .../provider/builtin/duckduckgo/duckduckgo.py | 20 - .../builtin/duckduckgo/duckduckgo.yaml | 12 - .../builtin/duckduckgo/tools/ddgo_ai.py | 20 - .../builtin/duckduckgo/tools/ddgo_ai.yaml | 47 - .../builtin/duckduckgo/tools/ddgo_img.py | 30 - .../builtin/duckduckgo/tools/ddgo_img.yaml | 88 - .../builtin/duckduckgo/tools/ddgo_search.py | 45 - .../builtin/duckduckgo/tools/ddgo_search.yaml | 41 - .../duckduckgo/tools/ddgo_translate.py | 20 - .../duckduckgo/tools/ddgo_translate.yaml | 51 - .../provider/builtin/feishu/_assets/icon.svg | 1 - .../tools/provider/builtin/feishu/feishu.py | 7 - .../tools/provider/builtin/feishu/feishu.yaml | 16 - .../builtin/feishu/tools/feishu_group_bot.py | 51 - .../feishu/tools/feishu_group_bot.yaml | 40 - .../builtin/feishu_base/_assets/icon.svg | 47 - .../builtin/feishu_base/feishu_base.py | 8 - .../builtin/feishu_base/feishu_base.yaml | 14 - .../feishu_base/tools/add_base_record.py | 56 - .../feishu_base/tools/add_base_record.yaml | 66 - .../builtin/feishu_base/tools/create_base.py | 41 - .../feishu_base/tools/create_base.yaml | 47 - .../feishu_base/tools/create_base_table.py | 48 - .../feishu_base/tools/create_base_table.yaml | 106 - .../feishu_base/tools/delete_base_records.py | 56 - .../tools/delete_base_records.yaml | 60 - .../feishu_base/tools/delete_base_tables.py | 46 - .../feishu_base/tools/delete_base_tables.yaml | 48 - .../feishu_base/tools/get_base_info.py | 39 - .../feishu_base/tools/get_base_info.yaml | 54 - .../tools/get_tenant_access_token.py | 48 - .../tools/get_tenant_access_token.yaml | 39 - .../feishu_base/tools/list_base_records.py | 65 - .../feishu_base/tools/list_base_records.yaml | 108 - .../feishu_base/tools/list_base_tables.py | 47 - .../feishu_base/tools/list_base_tables.yaml | 65 - .../feishu_base/tools/read_base_record.py | 49 - .../feishu_base/tools/read_base_record.yaml | 60 - .../feishu_base/tools/update_base_record.py | 60 - .../feishu_base/tools/update_base_record.yaml | 78 - .../builtin/feishu_document/_assets/icon.svg | 9 - .../feishu_document/feishu_document.py | 15 - .../feishu_document/feishu_document.yaml | 34 - .../feishu_document/tools/create_document.py | 19 - .../tools/create_document.yaml | 47 - .../tools/get_document_content.py | 19 - .../tools/get_document_content.yaml | 49 - .../tools/list_document_blocks.py | 19 - .../tools/list_document_blocks.yaml | 74 - .../feishu_document/tools/write_document.py | 19 - .../feishu_document/tools/write_document.yaml | 59 - .../builtin/feishu_message/_assets/icon.svg | 19 - .../builtin/feishu_message/feishu_message.py | 15 - .../feishu_message/feishu_message.yaml | 34 - .../feishu_message/tools/send_bot_message.py | 20 - .../tools/send_bot_message.yaml | 91 - .../tools/send_webhook_message.py | 19 - .../tools/send_webhook_message.yaml | 58 - .../builtin/firecrawl/_assets/icon.svg | 3 - .../provider/builtin/firecrawl/firecrawl.py | 14 - .../provider/builtin/firecrawl/firecrawl.yaml | 35 - .../builtin/firecrawl/firecrawl_appx.py | 122 - .../provider/builtin/firecrawl/tools/crawl.py | 45 - .../builtin/firecrawl/tools/crawl.yaml | 200 -- .../builtin/firecrawl/tools/crawl_job.py | 21 - .../builtin/firecrawl/tools/crawl_job.yaml | 37 - .../provider/builtin/firecrawl/tools/map.py | 25 - .../provider/builtin/firecrawl/tools/map.yaml | 59 - .../builtin/firecrawl/tools/scrape.py | 39 - .../builtin/firecrawl/tools/scrape.yaml | 152 - .../provider/builtin/gaode/_assets/icon.svg | 1 - .../tools/provider/builtin/gaode/gaode.py | 28 - .../tools/provider/builtin/gaode/gaode.yaml | 34 - .../builtin/gaode/tools/gaode_weather.py | 64 - .../builtin/gaode/tools/gaode_weather.yaml | 28 - .../builtin/getimgai/_assets/icon.svg | 1 - .../provider/builtin/getimgai/getimgai.py | 19 - .../provider/builtin/getimgai/getimgai.yaml | 29 - .../builtin/getimgai/getimgai_appx.py | 55 - .../builtin/getimgai/tools/text2image.py | 39 - .../builtin/getimgai/tools/text2image.yaml | 167 -- .../provider/builtin/github/_assets/icon.svg | 17 - .../tools/provider/builtin/github/github.py | 32 - .../tools/provider/builtin/github/github.yaml | 48 - .../github/tools/github_repositories.py | 70 - .../github/tools/github_repositories.yaml | 42 - .../builtin/gitlab/_assets/gitlab.svg | 2 - .../tools/provider/builtin/gitlab/gitlab.py | 32 - .../tools/provider/builtin/gitlab/gitlab.yaml | 38 - .../builtin/gitlab/tools/gitlab_commits.py | 142 - .../builtin/gitlab/tools/gitlab_commits.yaml | 88 - .../builtin/gitlab/tools/gitlab_files.py | 103 - .../builtin/gitlab/tools/gitlab_files.yaml | 56 - .../provider/builtin/google/_assets/icon.svg | 6 - .../tools/provider/builtin/google/google.py | 20 - .../tools/provider/builtin/google/google.yaml | 31 - .../builtin/google/tools/google_search.py | 40 - .../builtin/google/tools/google_search.yaml | 27 - .../builtin/google_translate/_assets/icon.svg | 18 - .../google_translate/google_translate.py | 13 - .../google_translate/google_translate.yaml | 12 - .../google_translate/tools/translate.py | 47 - .../google_translate/tools/translate.yaml | 215 -- .../provider/builtin/hap/_assets/icon.svg | 16 - api/core/tools/provider/builtin/hap/hap.py | 8 - api/core/tools/provider/builtin/hap/hap.yaml | 15 - .../builtin/hap/tools/add_worksheet_record.py | 52 - .../hap/tools/add_worksheet_record.yaml | 78 - .../hap/tools/delete_worksheet_record.py | 48 - .../hap/tools/delete_worksheet_record.yaml | 71 - .../builtin/hap/tools/get_worksheet_fields.py | 152 - .../hap/tools/get_worksheet_fields.yaml | 80 - .../hap/tools/get_worksheet_pivot_data.py | 137 - .../hap/tools/get_worksheet_pivot_data.yaml | 248 -- .../hap/tools/list_worksheet_records.py | 231 -- .../hap/tools/list_worksheet_records.yaml | 226 -- .../builtin/hap/tools/list_worksheets.py | 83 - .../builtin/hap/tools/list_worksheets.yaml | 68 - .../hap/tools/update_worksheet_record.py | 55 - .../hap/tools/update_worksheet_record.yaml | 90 - .../provider/builtin/jina/_assets/icon.svg | 4 - api/core/tools/provider/builtin/jina/jina.py | 38 - .../tools/provider/builtin/jina/jina.yaml | 32 - .../builtin/jina/tools/jina_reader.py | 74 - .../builtin/jina/tools/jina_reader.yaml | 166 -- .../builtin/jina/tools/jina_search.py | 46 - .../builtin/jina/tools/jina_search.yaml | 107 - .../builtin/jina/tools/jina_tokenizer.py | 39 - .../builtin/jina/tools/jina_tokenizer.yaml | 70 - .../builtin/json_process/_assets/icon.svg | 358 --- .../builtin/json_process/json_process.py | 16 - .../builtin/json_process/json_process.yaml | 14 - .../builtin/json_process/tools/delete.py | 61 - .../builtin/json_process/tools/delete.yaml | 52 - .../builtin/json_process/tools/insert.py | 105 - .../builtin/json_process/tools/insert.yaml | 101 - .../builtin/json_process/tools/parse.py | 53 - .../builtin/json_process/tools/parse.yaml | 52 - .../builtin/json_process/tools/replace.py | 129 - .../builtin/json_process/tools/replace.yaml | 119 - .../builtin/judge0ce/_assets/icon.svg | 21 - .../provider/builtin/judge0ce/judge0ce.py | 23 - .../provider/builtin/judge0ce/judge0ce.yaml | 32 - .../builtin/judge0ce/tools/executeCode.py | 61 - .../builtin/judge0ce/tools/executeCode.yaml | 67 - .../provider/builtin/maths/_assets/icon.svg | 10 - .../tools/provider/builtin/maths/maths.py | 18 - .../tools/provider/builtin/maths/maths.yaml | 15 - .../builtin/maths/tools/eval_expression.py | 30 - .../builtin/maths/tools/eval_expression.yaml | 26 - .../builtin/nominatim/_assets/icon.svg | 277 -- .../provider/builtin/nominatim/nominatim.py | 27 - .../provider/builtin/nominatim/nominatim.yaml | 43 - .../nominatim/tools/nominatim_lookup.py | 40 - .../nominatim/tools/nominatim_lookup.yaml | 31 - .../nominatim/tools/nominatim_reverse.py | 41 - .../nominatim/tools/nominatim_reverse.yaml | 47 - .../nominatim/tools/nominatim_search.py | 41 - .../nominatim/tools/nominatim_search.yaml | 51 - .../builtin/novitaai/_assets/icon.ico | Bin 9366 -> 0 bytes .../builtin/novitaai/_novita_tool_base.py | 69 - .../provider/builtin/novitaai/novitaai.py | 34 - .../provider/builtin/novitaai/novitaai.yaml | 32 - .../novitaai/tools/novitaai_createtile.py | 54 - .../novitaai/tools/novitaai_createtile.yaml | 80 - .../novitaai/tools/novitaai_modelquery.py | 148 - .../novitaai/tools/novitaai_modelquery.yaml | 175 -- .../novitaai/tools/novitaai_txt2img.py | 90 - .../novitaai/tools/novitaai_txt2img.yaml | 341 --- .../provider/builtin/onebot/_assets/icon.ico | Bin 38078 -> 0 bytes .../tools/provider/builtin/onebot/onebot.py | 10 - .../tools/provider/builtin/onebot/onebot.yaml | 35 - .../provider/builtin/onebot/tools/__init__.py | 0 .../builtin/onebot/tools/send_group_msg.py | 39 - .../builtin/onebot/tools/send_group_msg.yaml | 46 - .../builtin/onebot/tools/send_private_msg.py | 39 - .../onebot/tools/send_private_msg.yaml | 46 - .../builtin/openweather/_assets/icon.svg | 12 - .../builtin/openweather/openweather.py | 29 - .../builtin/openweather/openweather.yaml | 31 - .../builtin/openweather/tools/weather.py | 52 - .../builtin/openweather/tools/weather.yaml | 80 - .../builtin/perplexity/_assets/icon.svg | 3 - .../provider/builtin/perplexity/perplexity.py | 38 - .../builtin/perplexity/perplexity.yaml | 26 - .../perplexity/tools/perplexity_search.py | 67 - .../perplexity/tools/perplexity_search.yaml | 178 -- .../provider/builtin/pubmed/_assets/icon.svg | 1 - .../tools/provider/builtin/pubmed/pubmed.py | 20 - .../tools/provider/builtin/pubmed/pubmed.yaml | 13 - .../builtin/pubmed/tools/pubmed_search.py | 191 -- .../builtin/pubmed/tools/pubmed_search.yaml | 23 - .../provider/builtin/regex/_assets/icon.svg | 1 - .../tools/provider/builtin/regex/regex.py | 19 - .../tools/provider/builtin/regex/regex.yaml | 15 - .../builtin/regex/tools/regex_extract.py | 28 - .../builtin/regex/tools/regex_extract.yaml | 38 - .../builtin/searchapi/_assets/icon.svg | 1 - .../provider/builtin/searchapi/searchapi.py | 20 - .../provider/builtin/searchapi/searchapi.yaml | 34 - .../builtin/searchapi/tools/google.py | 112 - .../builtin/searchapi/tools/google.yaml | 481 ---- .../builtin/searchapi/tools/google_jobs.py | 102 - .../builtin/searchapi/tools/google_jobs.yaml | 478 ---- .../builtin/searchapi/tools/google_news.py | 97 - .../builtin/searchapi/tools/google_news.yaml | 482 ---- .../searchapi/tools/youtube_transcripts.py | 75 - .../searchapi/tools/youtube_transcripts.yaml | 34 - .../provider/builtin/searxng/_assets/icon.svg | 56 - .../builtin/searxng/docker/settings.yml | 2501 ----------------- .../provider/builtin/searxng/docker/uwsgi.ini | 54 - .../tools/provider/builtin/searxng/searxng.py | 20 - .../provider/builtin/searxng/searxng.yaml | 24 - .../builtin/searxng/tools/searxng_search.py | 46 - .../builtin/searxng/tools/searxng_search.yaml | 69 - .../provider/builtin/serper/_assets/icon.svg | 12 - .../tools/provider/builtin/serper/serper.py | 20 - .../tools/provider/builtin/serper/serper.yaml | 31 - .../builtin/serper/tools/serper_search.py | 34 - .../builtin/serper/tools/serper_search.yaml | 27 - .../builtin/siliconflow/_assets/icon.svg | 1 - .../builtin/siliconflow/siliconflow.py | 17 - .../builtin/siliconflow/siliconflow.yaml | 21 - .../builtin/siliconflow/tools/flux.py | 43 - .../builtin/siliconflow/tools/flux.yaml | 88 - .../siliconflow/tools/stable_diffusion.py | 45 - .../siliconflow/tools/stable_diffusion.yaml | 121 - .../provider/builtin/slack/_assets/icon.svg | 22 - .../tools/provider/builtin/slack/slack.py | 8 - .../tools/provider/builtin/slack/slack.yaml | 16 - .../builtin/slack/tools/slack_webhook.py | 46 - .../builtin/slack/tools/slack_webhook.yaml | 40 - .../tools/provider/builtin/spark/__init__.py | 0 .../provider/builtin/spark/_assets/icon.svg | 5 - .../tools/provider/builtin/spark/spark.py | 36 - .../tools/provider/builtin/spark/spark.yaml | 61 - .../spark/tools/spark_img_generation.py | 139 - .../spark/tools/spark_img_generation.yaml | 36 - .../provider/builtin/spider/_assets/icon.svg | 1 - .../tools/provider/builtin/spider/spider.py | 20 - .../tools/provider/builtin/spider/spider.yaml | 27 - .../provider/builtin/spider/spiderApp.py | 221 -- .../builtin/spider/tools/scraper_crawler.py | 49 - .../builtin/spider/tools/scraper_crawler.yaml | 102 - .../builtin/stability/_assets/icon.svg | 10 - .../provider/builtin/stability/stability.py | 16 - .../provider/builtin/stability/stability.yaml | 31 - .../provider/builtin/stability/tools/base.py | 31 - .../builtin/stability/tools/text2image.py | 56 - .../builtin/stability/tools/text2image.yaml | 142 - .../builtin/stablediffusion/_assets/icon.png | Bin 16324 -> 0 bytes .../stablediffusion/stablediffusion.py | 17 - .../stablediffusion/stablediffusion.yaml | 42 - .../stablediffusion/tools/stable_diffusion.py | 390 --- .../tools/stable_diffusion.yaml | 104 - .../builtin/stackexchange/_assets/icon.svg | 1 - .../builtin/stackexchange/stackexchange.py | 25 - .../builtin/stackexchange/stackexchange.yaml | 13 - .../tools/fetchAnsByStackExQuesID.py | 39 - .../tools/fetchAnsByStackExQuesID.yaml | 107 - .../tools/searchStackExQuestions.py | 45 - .../tools/searchStackExQuestions.yaml | 121 - .../provider/builtin/stepfun/__init__.py | 0 .../provider/builtin/stepfun/_assets/icon.png | Bin 1991 -> 0 bytes .../tools/provider/builtin/stepfun/stepfun.py | 24 - .../provider/builtin/stepfun/stepfun.yaml | 46 - .../provider/builtin/stepfun/tools/image.py | 75 - .../provider/builtin/stepfun/tools/image.yaml | 158 -- .../provider/builtin/tavily/_assets/icon.png | Bin 2070 -> 0 bytes .../tools/provider/builtin/tavily/tavily.py | 29 - .../tools/provider/builtin/tavily/tavily.yaml | 31 - .../builtin/tavily/tools/tavily_search.py | 124 - .../builtin/tavily/tools/tavily_search.yaml | 162 -- .../builtin/tianditu/_assets/icon.svg | 21 - .../provider/builtin/tianditu/tianditu.py | 23 - .../provider/builtin/tianditu/tianditu.yaml | 32 - .../builtin/tianditu/tools/geocoder.py | 33 - .../builtin/tianditu/tools/geocoder.yaml | 26 - .../builtin/tianditu/tools/poisearch.py | 58 - .../builtin/tianditu/tools/poisearch.yaml | 38 - .../builtin/tianditu/tools/staticmap.py | 49 - .../builtin/tianditu/tools/staticmap.yaml | 26 - .../provider/builtin/trello/_assets/icon.svg | 1 - .../builtin/trello/tools/create_board.py | 44 - .../builtin/trello/tools/create_board.yaml | 27 - .../trello/tools/create_list_on_board.py | 46 - .../trello/tools/create_list_on_board.yaml | 40 - .../trello/tools/create_new_card_on_board.py | 45 - .../tools/create_new_card_on_board.yaml | 145 - .../builtin/trello/tools/delete_board.py | 41 - .../builtin/trello/tools/delete_board.yaml | 27 - .../builtin/trello/tools/delete_card.py | 41 - .../builtin/trello/tools/delete_card.yaml | 27 - .../builtin/trello/tools/fetch_all_boards.py | 50 - .../trello/tools/fetch_all_boards.yaml | 28 - .../builtin/trello/tools/get_board_actions.py | 45 - .../trello/tools/get_board_actions.yaml | 27 - .../builtin/trello/tools/get_board_by_id.py | 66 - .../builtin/trello/tools/get_board_by_id.yaml | 27 - .../builtin/trello/tools/get_board_cards.py | 43 - .../builtin/trello/tools/get_board_cards.yaml | 27 - .../trello/tools/get_filterd_board_cards.py | 46 - .../trello/tools/get_filterd_board_cards.yaml | 40 - .../trello/tools/get_lists_on_board.py | 43 - .../trello/tools/get_lists_on_board.yaml | 27 - .../builtin/trello/tools/update_board.py | 47 - .../builtin/trello/tools/update_board.yaml | 157 -- .../builtin/trello/tools/update_card.py | 45 - .../builtin/trello/tools/update_card.yaml | 81 - .../tools/provider/builtin/trello/trello.py | 34 - .../tools/provider/builtin/trello/trello.yaml | 47 - .../provider/builtin/twilio/_assets/icon.svg | 1 - .../builtin/twilio/tools/send_message.py | 97 - .../builtin/twilio/tools/send_message.yaml | 40 - .../tools/provider/builtin/twilio/twilio.py | 29 - .../tools/provider/builtin/twilio/twilio.yaml | 48 - .../provider/builtin/vanna/_assets/icon.png | Bin 4612 -> 0 bytes .../provider/builtin/vanna/tools/vanna.py | 129 - .../provider/builtin/vanna/tools/vanna.yaml | 213 -- .../tools/provider/builtin/vanna/vanna.py | 25 - .../tools/provider/builtin/vanna/vanna.yaml | 25 - .../builtin/vectorizer/_assets/icon.png | Bin 1875 -> 0 bytes .../builtin/vectorizer/tools/test_data.py | 1 - .../builtin/vectorizer/tools/vectorizer.py | 69 - .../builtin/vectorizer/tools/vectorizer.yaml | 38 - .../provider/builtin/vectorizer/vectorizer.py | 20 - .../builtin/vectorizer/vectorizer.yaml | 47 - .../builtin/webscraper/_assets/icon.svg | 3 - .../builtin/webscraper/tools/webscraper.py | 33 - .../builtin/webscraper/tools/webscraper.yaml | 60 - .../provider/builtin/webscraper/webscraper.py | 23 - .../builtin/webscraper/webscraper.yaml | 15 - .../builtin/websearch/_assets/icon.svg | 23 - .../builtin/websearch/tools/get_markdown.py | 51 - .../builtin/websearch/tools/get_markdown.yaml | 96 - .../builtin/websearch/tools/job_search.py | 88 - .../builtin/websearch/tools/job_search.yaml | 41 - .../builtin/websearch/tools/news_search.py | 90 - .../builtin/websearch/tools/news_search.yaml | 501 ---- .../builtin/websearch/tools/scholar_search.py | 93 - .../websearch/tools/scholar_search.yaml | 501 ---- .../builtin/websearch/tools/web_search.py | 90 - .../builtin/websearch/tools/web_search.yaml | 376 --- .../provider/builtin/websearch/websearch.py | 21 - .../provider/builtin/websearch/websearch.yaml | 34 - .../provider/builtin/wecom/_assets/icon.png | Bin 262939 -> 0 bytes .../builtin/wecom/tools/wecom_group_bot.py | 57 - .../builtin/wecom/tools/wecom_group_bot.yaml | 64 - .../tools/provider/builtin/wecom/wecom.py | 7 - .../tools/provider/builtin/wecom/wecom.yaml | 15 - .../builtin/wikipedia/_assets/icon.svg | 3 - .../wikipedia/tools/wikipedia_search.py | 105 - .../wikipedia/tools/wikipedia_search.yaml | 101 - .../provider/builtin/wikipedia/wikipedia.py | 20 - .../provider/builtin/wikipedia/wikipedia.yaml | 15 - .../builtin/wolframalpha/_assets/icon.svg | 23 - .../wolframalpha/tools/wolframalpha.py | 72 - .../wolframalpha/tools/wolframalpha.yaml | 27 - .../builtin/wolframalpha/wolframalpha.py | 22 - .../builtin/wolframalpha/wolframalpha.yaml | 32 - .../provider/builtin/yahoo/_assets/icon.png | Bin 7647 -> 0 bytes .../provider/builtin/yahoo/tools/analytics.py | 70 - .../builtin/yahoo/tools/analytics.yaml | 54 - .../provider/builtin/yahoo/tools/news.py | 46 - .../provider/builtin/yahoo/tools/news.yaml | 28 - .../provider/builtin/yahoo/tools/ticker.py | 27 - .../provider/builtin/yahoo/tools/ticker.yaml | 28 - .../tools/provider/builtin/yahoo/yahoo.py | 20 - .../tools/provider/builtin/yahoo/yahoo.yaml | 16 - .../provider/builtin/youtube/_assets/icon.svg | 11 - .../provider/builtin/youtube/tools/videos.py | 74 - .../builtin/youtube/tools/videos.yaml | 54 - .../tools/provider/builtin/youtube/youtube.py | 22 - .../provider/builtin/youtube/youtube.yaml | 31 - api/core/tools/tool_engine.py | 4 +- api/core/tools/tool_label_manager.py | 15 +- api/core/tools/tool_manager.py | 36 +- api/core/tools/utils/configuration.py | 8 +- .../dataset_multi_retriever_tool.py | 2 +- .../dataset_retriever_base_tool.py | 0 .../dataset_retriever_tool.py | 0 .../{tool => utils}/dataset_retriever_tool.py | 4 +- api/core/tools/utils/web_reader_tool.py | 2 +- api/core/tools/workflow_as_tool/provider.py | 207 ++ .../tool.py} | 2 +- .../workflow_tool_provider.py | 4 +- .../tools/api_tools_manage_service.py | 2 +- .../tools/builtin_tools_manage_service.py | 4 +- api/services/tools/tools_transform_service.py | 10 +- .../tools/workflow_tools_manage_service.py | 2 +- .../tools/api_tool/test_api_tool.py | 4 +- 524 files changed, 338 insertions(+), 31279 deletions(-) rename api/core/tools/{tool => __base}/tool.py (100%) rename api/core/tools/{provider => __base}/tool_provider.py (99%) create mode 100644 api/core/tools/builtin_tool/_position.yaml rename api/core/tools/{provider/builtin_tool_provider.py => builtin_tool/provider.py} (92%) rename api/core/tools/{provider/builtin => builtin_tool/providers}/__init__.py (100%) rename api/core/tools/{provider/builtin => builtin_tool/providers}/_positions.py (100%) rename api/core/tools/{provider/builtin => builtin_tool/providers}/code/_assets/icon.svg (100%) rename api/core/tools/{provider/builtin => builtin_tool/providers}/code/code.py (66%) rename api/core/tools/{provider/builtin => builtin_tool/providers}/code/code.yaml (100%) rename api/core/tools/{provider/builtin => builtin_tool/providers}/code/tools/simple_code.py (93%) rename api/core/tools/{provider/builtin => builtin_tool/providers}/code/tools/simple_code.yaml (100%) rename api/core/tools/{provider/builtin => builtin_tool/providers}/qrcode/_assets/icon.svg (100%) rename api/core/tools/{provider/builtin => builtin_tool/providers}/qrcode/qrcode.py (71%) rename api/core/tools/{provider/builtin => builtin_tool/providers}/qrcode/qrcode.yaml (100%) rename api/core/tools/{provider/builtin => builtin_tool/providers}/qrcode/tools/qrcode_generator.py (97%) rename api/core/tools/{provider/builtin => builtin_tool/providers}/qrcode/tools/qrcode_generator.yaml (100%) rename api/core/tools/{provider/builtin => builtin_tool/providers}/time/_assets/icon.svg (100%) rename api/core/tools/{provider/builtin => builtin_tool/providers}/time/time.py (73%) rename api/core/tools/{provider/builtin => builtin_tool/providers}/time/time.yaml (100%) rename api/core/tools/{provider/builtin => builtin_tool/providers}/time/tools/current_time.py (94%) rename api/core/tools/{provider/builtin => builtin_tool/providers}/time/tools/current_time.yaml (100%) rename api/core/tools/{provider/builtin => builtin_tool/providers}/time/tools/weekday.py (91%) rename api/core/tools/{provider/builtin => builtin_tool/providers}/time/tools/weekday.yaml (100%) rename api/core/tools/{tool/builtin_tool.py => builtin_tool/tool.py} (96%) rename api/core/tools/{provider/api_tool_provider.py => custom_tool/provider.py} (97%) rename api/core/tools/{tool/api_tool.py => custom_tool/tool.py} (96%) create mode 100644 api/core/tools/plugin_tool/plugin_tool_provider.py delete mode 100644 api/core/tools/provider/_position.yaml delete mode 100644 api/core/tools/provider/app_tool_provider.py delete mode 100644 api/core/tools/provider/builtin/aippt/_assets/icon.png delete mode 100644 api/core/tools/provider/builtin/aippt/aippt.py delete mode 100644 api/core/tools/provider/builtin/aippt/aippt.yaml delete mode 100644 api/core/tools/provider/builtin/aippt/tools/aippt.py delete mode 100644 api/core/tools/provider/builtin/aippt/tools/aippt.yaml delete mode 100644 api/core/tools/provider/builtin/alphavantage/_assets/icon.svg delete mode 100644 api/core/tools/provider/builtin/alphavantage/alphavantage.py delete mode 100644 api/core/tools/provider/builtin/alphavantage/alphavantage.yaml delete mode 100644 api/core/tools/provider/builtin/alphavantage/tools/query_stock.py delete mode 100644 api/core/tools/provider/builtin/alphavantage/tools/query_stock.yaml delete mode 100644 api/core/tools/provider/builtin/arxiv/_assets/icon.svg delete mode 100644 api/core/tools/provider/builtin/arxiv/arxiv.py delete mode 100644 api/core/tools/provider/builtin/arxiv/arxiv.yaml delete mode 100644 api/core/tools/provider/builtin/arxiv/tools/arxiv_search.py delete mode 100644 api/core/tools/provider/builtin/arxiv/tools/arxiv_search.yaml delete mode 100644 api/core/tools/provider/builtin/aws/_assets/icon.svg delete mode 100644 api/core/tools/provider/builtin/aws/aws.py delete mode 100644 api/core/tools/provider/builtin/aws/aws.yaml delete mode 100644 api/core/tools/provider/builtin/aws/tools/apply_guardrail.py delete mode 100644 api/core/tools/provider/builtin/aws/tools/apply_guardrail.yaml delete mode 100644 api/core/tools/provider/builtin/aws/tools/lambda_translate_utils.py delete mode 100644 api/core/tools/provider/builtin/aws/tools/lambda_translate_utils.yaml delete mode 100644 api/core/tools/provider/builtin/aws/tools/lambda_yaml_to_json.py delete mode 100644 api/core/tools/provider/builtin/aws/tools/lambda_yaml_to_json.yaml delete mode 100644 api/core/tools/provider/builtin/aws/tools/sagemaker_text_rerank.py delete mode 100644 api/core/tools/provider/builtin/aws/tools/sagemaker_text_rerank.yaml delete mode 100644 api/core/tools/provider/builtin/aws/tools/sagemaker_tts.py delete mode 100644 api/core/tools/provider/builtin/aws/tools/sagemaker_tts.yaml delete mode 100644 api/core/tools/provider/builtin/azuredalle/__init__.py delete mode 100644 api/core/tools/provider/builtin/azuredalle/_assets/icon.png delete mode 100644 api/core/tools/provider/builtin/azuredalle/azuredalle.py delete mode 100644 api/core/tools/provider/builtin/azuredalle/azuredalle.yaml delete mode 100644 api/core/tools/provider/builtin/azuredalle/tools/dalle3.py delete mode 100644 api/core/tools/provider/builtin/azuredalle/tools/dalle3.yaml delete mode 100644 api/core/tools/provider/builtin/bing/_assets/icon.svg delete mode 100644 api/core/tools/provider/builtin/bing/bing.py delete mode 100644 api/core/tools/provider/builtin/bing/bing.yaml delete mode 100644 api/core/tools/provider/builtin/bing/tools/bing_web_search.py delete mode 100644 api/core/tools/provider/builtin/bing/tools/bing_web_search.yaml delete mode 100644 api/core/tools/provider/builtin/brave/_assets/icon.svg delete mode 100644 api/core/tools/provider/builtin/brave/brave.py delete mode 100644 api/core/tools/provider/builtin/brave/brave.yaml delete mode 100644 api/core/tools/provider/builtin/brave/tools/brave_search.py delete mode 100644 api/core/tools/provider/builtin/brave/tools/brave_search.yaml delete mode 100644 api/core/tools/provider/builtin/chart/_assets/icon.png delete mode 100644 api/core/tools/provider/builtin/chart/chart.py delete mode 100644 api/core/tools/provider/builtin/chart/chart.yaml delete mode 100644 api/core/tools/provider/builtin/chart/tools/bar.py delete mode 100644 api/core/tools/provider/builtin/chart/tools/bar.yaml delete mode 100644 api/core/tools/provider/builtin/chart/tools/line.py delete mode 100644 api/core/tools/provider/builtin/chart/tools/line.yaml delete mode 100644 api/core/tools/provider/builtin/chart/tools/pie.py delete mode 100644 api/core/tools/provider/builtin/chart/tools/pie.yaml delete mode 100644 api/core/tools/provider/builtin/cogview/__init__.py delete mode 100644 api/core/tools/provider/builtin/cogview/_assets/icon.png delete mode 100644 api/core/tools/provider/builtin/cogview/cogview.py delete mode 100644 api/core/tools/provider/builtin/cogview/cogview.yaml delete mode 100644 api/core/tools/provider/builtin/cogview/tools/cogview3.py delete mode 100644 api/core/tools/provider/builtin/cogview/tools/cogview3.yaml delete mode 100644 api/core/tools/provider/builtin/comfyui/_assets/icon.png delete mode 100644 api/core/tools/provider/builtin/comfyui/comfyui.py delete mode 100644 api/core/tools/provider/builtin/comfyui/comfyui.yaml delete mode 100644 api/core/tools/provider/builtin/comfyui/tools/comfyui_stable_diffusion.py delete mode 100644 api/core/tools/provider/builtin/comfyui/tools/comfyui_stable_diffusion.yaml delete mode 100644 api/core/tools/provider/builtin/comfyui/tools/txt2img.json delete mode 100644 api/core/tools/provider/builtin/crossref/_assets/icon.svg delete mode 100644 api/core/tools/provider/builtin/crossref/crossref.py delete mode 100644 api/core/tools/provider/builtin/crossref/crossref.yaml delete mode 100644 api/core/tools/provider/builtin/crossref/tools/query_doi.py delete mode 100644 api/core/tools/provider/builtin/crossref/tools/query_doi.yaml delete mode 100644 api/core/tools/provider/builtin/crossref/tools/query_title.py delete mode 100644 api/core/tools/provider/builtin/crossref/tools/query_title.yaml delete mode 100644 api/core/tools/provider/builtin/dalle/__init__.py delete mode 100644 api/core/tools/provider/builtin/dalle/_assets/icon.png delete mode 100644 api/core/tools/provider/builtin/dalle/dalle.py delete mode 100644 api/core/tools/provider/builtin/dalle/dalle.yaml delete mode 100644 api/core/tools/provider/builtin/dalle/tools/dalle2.py delete mode 100644 api/core/tools/provider/builtin/dalle/tools/dalle2.yaml delete mode 100644 api/core/tools/provider/builtin/dalle/tools/dalle3.py delete mode 100644 api/core/tools/provider/builtin/dalle/tools/dalle3.yaml delete mode 100644 api/core/tools/provider/builtin/devdocs/_assets/icon.svg delete mode 100644 api/core/tools/provider/builtin/devdocs/devdocs.py delete mode 100644 api/core/tools/provider/builtin/devdocs/devdocs.yaml delete mode 100644 api/core/tools/provider/builtin/devdocs/tools/searchDevDocs.py delete mode 100644 api/core/tools/provider/builtin/devdocs/tools/searchDevDocs.yaml delete mode 100644 api/core/tools/provider/builtin/did/_assets/icon.svg delete mode 100644 api/core/tools/provider/builtin/did/did.py delete mode 100644 api/core/tools/provider/builtin/did/did.yaml delete mode 100644 api/core/tools/provider/builtin/did/did_appx.py delete mode 100644 api/core/tools/provider/builtin/did/tools/animations.py delete mode 100644 api/core/tools/provider/builtin/did/tools/animations.yaml delete mode 100644 api/core/tools/provider/builtin/did/tools/talks.py delete mode 100644 api/core/tools/provider/builtin/did/tools/talks.yaml delete mode 100644 api/core/tools/provider/builtin/dingtalk/_assets/icon.svg delete mode 100644 api/core/tools/provider/builtin/dingtalk/dingtalk.py delete mode 100644 api/core/tools/provider/builtin/dingtalk/dingtalk.yaml delete mode 100644 api/core/tools/provider/builtin/dingtalk/tools/dingtalk_group_bot.py delete mode 100644 api/core/tools/provider/builtin/dingtalk/tools/dingtalk_group_bot.yaml delete mode 100644 api/core/tools/provider/builtin/duckduckgo/_assets/icon.svg delete mode 100644 api/core/tools/provider/builtin/duckduckgo/duckduckgo.py delete mode 100644 api/core/tools/provider/builtin/duckduckgo/duckduckgo.yaml delete mode 100644 api/core/tools/provider/builtin/duckduckgo/tools/ddgo_ai.py delete mode 100644 api/core/tools/provider/builtin/duckduckgo/tools/ddgo_ai.yaml delete mode 100644 api/core/tools/provider/builtin/duckduckgo/tools/ddgo_img.py delete mode 100644 api/core/tools/provider/builtin/duckduckgo/tools/ddgo_img.yaml delete mode 100644 api/core/tools/provider/builtin/duckduckgo/tools/ddgo_search.py delete mode 100644 api/core/tools/provider/builtin/duckduckgo/tools/ddgo_search.yaml delete mode 100644 api/core/tools/provider/builtin/duckduckgo/tools/ddgo_translate.py delete mode 100644 api/core/tools/provider/builtin/duckduckgo/tools/ddgo_translate.yaml delete mode 100644 api/core/tools/provider/builtin/feishu/_assets/icon.svg delete mode 100644 api/core/tools/provider/builtin/feishu/feishu.py delete mode 100644 api/core/tools/provider/builtin/feishu/feishu.yaml delete mode 100644 api/core/tools/provider/builtin/feishu/tools/feishu_group_bot.py delete mode 100644 api/core/tools/provider/builtin/feishu/tools/feishu_group_bot.yaml delete mode 100644 api/core/tools/provider/builtin/feishu_base/_assets/icon.svg delete mode 100644 api/core/tools/provider/builtin/feishu_base/feishu_base.py delete mode 100644 api/core/tools/provider/builtin/feishu_base/feishu_base.yaml delete mode 100644 api/core/tools/provider/builtin/feishu_base/tools/add_base_record.py delete mode 100644 api/core/tools/provider/builtin/feishu_base/tools/add_base_record.yaml delete mode 100644 api/core/tools/provider/builtin/feishu_base/tools/create_base.py delete mode 100644 api/core/tools/provider/builtin/feishu_base/tools/create_base.yaml delete mode 100644 api/core/tools/provider/builtin/feishu_base/tools/create_base_table.py delete mode 100644 api/core/tools/provider/builtin/feishu_base/tools/create_base_table.yaml delete mode 100644 api/core/tools/provider/builtin/feishu_base/tools/delete_base_records.py delete mode 100644 api/core/tools/provider/builtin/feishu_base/tools/delete_base_records.yaml delete mode 100644 api/core/tools/provider/builtin/feishu_base/tools/delete_base_tables.py delete mode 100644 api/core/tools/provider/builtin/feishu_base/tools/delete_base_tables.yaml delete mode 100644 api/core/tools/provider/builtin/feishu_base/tools/get_base_info.py delete mode 100644 api/core/tools/provider/builtin/feishu_base/tools/get_base_info.yaml delete mode 100644 api/core/tools/provider/builtin/feishu_base/tools/get_tenant_access_token.py delete mode 100644 api/core/tools/provider/builtin/feishu_base/tools/get_tenant_access_token.yaml delete mode 100644 api/core/tools/provider/builtin/feishu_base/tools/list_base_records.py delete mode 100644 api/core/tools/provider/builtin/feishu_base/tools/list_base_records.yaml delete mode 100644 api/core/tools/provider/builtin/feishu_base/tools/list_base_tables.py delete mode 100644 api/core/tools/provider/builtin/feishu_base/tools/list_base_tables.yaml delete mode 100644 api/core/tools/provider/builtin/feishu_base/tools/read_base_record.py delete mode 100644 api/core/tools/provider/builtin/feishu_base/tools/read_base_record.yaml delete mode 100644 api/core/tools/provider/builtin/feishu_base/tools/update_base_record.py delete mode 100644 api/core/tools/provider/builtin/feishu_base/tools/update_base_record.yaml delete mode 100644 api/core/tools/provider/builtin/feishu_document/_assets/icon.svg delete mode 100644 api/core/tools/provider/builtin/feishu_document/feishu_document.py delete mode 100644 api/core/tools/provider/builtin/feishu_document/feishu_document.yaml delete mode 100644 api/core/tools/provider/builtin/feishu_document/tools/create_document.py delete mode 100644 api/core/tools/provider/builtin/feishu_document/tools/create_document.yaml delete mode 100644 api/core/tools/provider/builtin/feishu_document/tools/get_document_content.py delete mode 100644 api/core/tools/provider/builtin/feishu_document/tools/get_document_content.yaml delete mode 100644 api/core/tools/provider/builtin/feishu_document/tools/list_document_blocks.py delete mode 100644 api/core/tools/provider/builtin/feishu_document/tools/list_document_blocks.yaml delete mode 100644 api/core/tools/provider/builtin/feishu_document/tools/write_document.py delete mode 100644 api/core/tools/provider/builtin/feishu_document/tools/write_document.yaml delete mode 100644 api/core/tools/provider/builtin/feishu_message/_assets/icon.svg delete mode 100644 api/core/tools/provider/builtin/feishu_message/feishu_message.py delete mode 100644 api/core/tools/provider/builtin/feishu_message/feishu_message.yaml delete mode 100644 api/core/tools/provider/builtin/feishu_message/tools/send_bot_message.py delete mode 100644 api/core/tools/provider/builtin/feishu_message/tools/send_bot_message.yaml delete mode 100644 api/core/tools/provider/builtin/feishu_message/tools/send_webhook_message.py delete mode 100644 api/core/tools/provider/builtin/feishu_message/tools/send_webhook_message.yaml delete mode 100644 api/core/tools/provider/builtin/firecrawl/_assets/icon.svg delete mode 100644 api/core/tools/provider/builtin/firecrawl/firecrawl.py delete mode 100644 api/core/tools/provider/builtin/firecrawl/firecrawl.yaml delete mode 100644 api/core/tools/provider/builtin/firecrawl/firecrawl_appx.py delete mode 100644 api/core/tools/provider/builtin/firecrawl/tools/crawl.py delete mode 100644 api/core/tools/provider/builtin/firecrawl/tools/crawl.yaml delete mode 100644 api/core/tools/provider/builtin/firecrawl/tools/crawl_job.py delete mode 100644 api/core/tools/provider/builtin/firecrawl/tools/crawl_job.yaml delete mode 100644 api/core/tools/provider/builtin/firecrawl/tools/map.py delete mode 100644 api/core/tools/provider/builtin/firecrawl/tools/map.yaml delete mode 100644 api/core/tools/provider/builtin/firecrawl/tools/scrape.py delete mode 100644 api/core/tools/provider/builtin/firecrawl/tools/scrape.yaml delete mode 100644 api/core/tools/provider/builtin/gaode/_assets/icon.svg delete mode 100644 api/core/tools/provider/builtin/gaode/gaode.py delete mode 100644 api/core/tools/provider/builtin/gaode/gaode.yaml delete mode 100644 api/core/tools/provider/builtin/gaode/tools/gaode_weather.py delete mode 100644 api/core/tools/provider/builtin/gaode/tools/gaode_weather.yaml delete mode 100644 api/core/tools/provider/builtin/getimgai/_assets/icon.svg delete mode 100644 api/core/tools/provider/builtin/getimgai/getimgai.py delete mode 100644 api/core/tools/provider/builtin/getimgai/getimgai.yaml delete mode 100644 api/core/tools/provider/builtin/getimgai/getimgai_appx.py delete mode 100644 api/core/tools/provider/builtin/getimgai/tools/text2image.py delete mode 100644 api/core/tools/provider/builtin/getimgai/tools/text2image.yaml delete mode 100644 api/core/tools/provider/builtin/github/_assets/icon.svg delete mode 100644 api/core/tools/provider/builtin/github/github.py delete mode 100644 api/core/tools/provider/builtin/github/github.yaml delete mode 100644 api/core/tools/provider/builtin/github/tools/github_repositories.py delete mode 100644 api/core/tools/provider/builtin/github/tools/github_repositories.yaml delete mode 100644 api/core/tools/provider/builtin/gitlab/_assets/gitlab.svg delete mode 100644 api/core/tools/provider/builtin/gitlab/gitlab.py delete mode 100644 api/core/tools/provider/builtin/gitlab/gitlab.yaml delete mode 100644 api/core/tools/provider/builtin/gitlab/tools/gitlab_commits.py delete mode 100644 api/core/tools/provider/builtin/gitlab/tools/gitlab_commits.yaml delete mode 100644 api/core/tools/provider/builtin/gitlab/tools/gitlab_files.py delete mode 100644 api/core/tools/provider/builtin/gitlab/tools/gitlab_files.yaml delete mode 100644 api/core/tools/provider/builtin/google/_assets/icon.svg delete mode 100644 api/core/tools/provider/builtin/google/google.py delete mode 100644 api/core/tools/provider/builtin/google/google.yaml delete mode 100644 api/core/tools/provider/builtin/google/tools/google_search.py delete mode 100644 api/core/tools/provider/builtin/google/tools/google_search.yaml delete mode 100644 api/core/tools/provider/builtin/google_translate/_assets/icon.svg delete mode 100644 api/core/tools/provider/builtin/google_translate/google_translate.py delete mode 100644 api/core/tools/provider/builtin/google_translate/google_translate.yaml delete mode 100644 api/core/tools/provider/builtin/google_translate/tools/translate.py delete mode 100644 api/core/tools/provider/builtin/google_translate/tools/translate.yaml delete mode 100644 api/core/tools/provider/builtin/hap/_assets/icon.svg delete mode 100644 api/core/tools/provider/builtin/hap/hap.py delete mode 100644 api/core/tools/provider/builtin/hap/hap.yaml delete mode 100644 api/core/tools/provider/builtin/hap/tools/add_worksheet_record.py delete mode 100644 api/core/tools/provider/builtin/hap/tools/add_worksheet_record.yaml delete mode 100644 api/core/tools/provider/builtin/hap/tools/delete_worksheet_record.py delete mode 100644 api/core/tools/provider/builtin/hap/tools/delete_worksheet_record.yaml delete mode 100644 api/core/tools/provider/builtin/hap/tools/get_worksheet_fields.py delete mode 100644 api/core/tools/provider/builtin/hap/tools/get_worksheet_fields.yaml delete mode 100644 api/core/tools/provider/builtin/hap/tools/get_worksheet_pivot_data.py delete mode 100644 api/core/tools/provider/builtin/hap/tools/get_worksheet_pivot_data.yaml delete mode 100644 api/core/tools/provider/builtin/hap/tools/list_worksheet_records.py delete mode 100644 api/core/tools/provider/builtin/hap/tools/list_worksheet_records.yaml delete mode 100644 api/core/tools/provider/builtin/hap/tools/list_worksheets.py delete mode 100644 api/core/tools/provider/builtin/hap/tools/list_worksheets.yaml delete mode 100644 api/core/tools/provider/builtin/hap/tools/update_worksheet_record.py delete mode 100644 api/core/tools/provider/builtin/hap/tools/update_worksheet_record.yaml delete mode 100644 api/core/tools/provider/builtin/jina/_assets/icon.svg delete mode 100644 api/core/tools/provider/builtin/jina/jina.py delete mode 100644 api/core/tools/provider/builtin/jina/jina.yaml delete mode 100644 api/core/tools/provider/builtin/jina/tools/jina_reader.py delete mode 100644 api/core/tools/provider/builtin/jina/tools/jina_reader.yaml delete mode 100644 api/core/tools/provider/builtin/jina/tools/jina_search.py delete mode 100644 api/core/tools/provider/builtin/jina/tools/jina_search.yaml delete mode 100644 api/core/tools/provider/builtin/jina/tools/jina_tokenizer.py delete mode 100644 api/core/tools/provider/builtin/jina/tools/jina_tokenizer.yaml delete mode 100644 api/core/tools/provider/builtin/json_process/_assets/icon.svg delete mode 100644 api/core/tools/provider/builtin/json_process/json_process.py delete mode 100644 api/core/tools/provider/builtin/json_process/json_process.yaml delete mode 100644 api/core/tools/provider/builtin/json_process/tools/delete.py delete mode 100644 api/core/tools/provider/builtin/json_process/tools/delete.yaml delete mode 100644 api/core/tools/provider/builtin/json_process/tools/insert.py delete mode 100644 api/core/tools/provider/builtin/json_process/tools/insert.yaml delete mode 100644 api/core/tools/provider/builtin/json_process/tools/parse.py delete mode 100644 api/core/tools/provider/builtin/json_process/tools/parse.yaml delete mode 100644 api/core/tools/provider/builtin/json_process/tools/replace.py delete mode 100644 api/core/tools/provider/builtin/json_process/tools/replace.yaml delete mode 100644 api/core/tools/provider/builtin/judge0ce/_assets/icon.svg delete mode 100644 api/core/tools/provider/builtin/judge0ce/judge0ce.py delete mode 100644 api/core/tools/provider/builtin/judge0ce/judge0ce.yaml delete mode 100644 api/core/tools/provider/builtin/judge0ce/tools/executeCode.py delete mode 100644 api/core/tools/provider/builtin/judge0ce/tools/executeCode.yaml delete mode 100644 api/core/tools/provider/builtin/maths/_assets/icon.svg delete mode 100644 api/core/tools/provider/builtin/maths/maths.py delete mode 100644 api/core/tools/provider/builtin/maths/maths.yaml delete mode 100644 api/core/tools/provider/builtin/maths/tools/eval_expression.py delete mode 100644 api/core/tools/provider/builtin/maths/tools/eval_expression.yaml delete mode 100644 api/core/tools/provider/builtin/nominatim/_assets/icon.svg delete mode 100644 api/core/tools/provider/builtin/nominatim/nominatim.py delete mode 100644 api/core/tools/provider/builtin/nominatim/nominatim.yaml delete mode 100644 api/core/tools/provider/builtin/nominatim/tools/nominatim_lookup.py delete mode 100644 api/core/tools/provider/builtin/nominatim/tools/nominatim_lookup.yaml delete mode 100644 api/core/tools/provider/builtin/nominatim/tools/nominatim_reverse.py delete mode 100644 api/core/tools/provider/builtin/nominatim/tools/nominatim_reverse.yaml delete mode 100644 api/core/tools/provider/builtin/nominatim/tools/nominatim_search.py delete mode 100644 api/core/tools/provider/builtin/nominatim/tools/nominatim_search.yaml delete mode 100644 api/core/tools/provider/builtin/novitaai/_assets/icon.ico delete mode 100644 api/core/tools/provider/builtin/novitaai/_novita_tool_base.py delete mode 100644 api/core/tools/provider/builtin/novitaai/novitaai.py delete mode 100644 api/core/tools/provider/builtin/novitaai/novitaai.yaml delete mode 100644 api/core/tools/provider/builtin/novitaai/tools/novitaai_createtile.py delete mode 100644 api/core/tools/provider/builtin/novitaai/tools/novitaai_createtile.yaml delete mode 100644 api/core/tools/provider/builtin/novitaai/tools/novitaai_modelquery.py delete mode 100644 api/core/tools/provider/builtin/novitaai/tools/novitaai_modelquery.yaml delete mode 100644 api/core/tools/provider/builtin/novitaai/tools/novitaai_txt2img.py delete mode 100644 api/core/tools/provider/builtin/novitaai/tools/novitaai_txt2img.yaml delete mode 100644 api/core/tools/provider/builtin/onebot/_assets/icon.ico delete mode 100644 api/core/tools/provider/builtin/onebot/onebot.py delete mode 100644 api/core/tools/provider/builtin/onebot/onebot.yaml delete mode 100644 api/core/tools/provider/builtin/onebot/tools/__init__.py delete mode 100644 api/core/tools/provider/builtin/onebot/tools/send_group_msg.py delete mode 100644 api/core/tools/provider/builtin/onebot/tools/send_group_msg.yaml delete mode 100644 api/core/tools/provider/builtin/onebot/tools/send_private_msg.py delete mode 100644 api/core/tools/provider/builtin/onebot/tools/send_private_msg.yaml delete mode 100644 api/core/tools/provider/builtin/openweather/_assets/icon.svg delete mode 100644 api/core/tools/provider/builtin/openweather/openweather.py delete mode 100644 api/core/tools/provider/builtin/openweather/openweather.yaml delete mode 100644 api/core/tools/provider/builtin/openweather/tools/weather.py delete mode 100644 api/core/tools/provider/builtin/openweather/tools/weather.yaml delete mode 100644 api/core/tools/provider/builtin/perplexity/_assets/icon.svg delete mode 100644 api/core/tools/provider/builtin/perplexity/perplexity.py delete mode 100644 api/core/tools/provider/builtin/perplexity/perplexity.yaml delete mode 100644 api/core/tools/provider/builtin/perplexity/tools/perplexity_search.py delete mode 100644 api/core/tools/provider/builtin/perplexity/tools/perplexity_search.yaml delete mode 100644 api/core/tools/provider/builtin/pubmed/_assets/icon.svg delete mode 100644 api/core/tools/provider/builtin/pubmed/pubmed.py delete mode 100644 api/core/tools/provider/builtin/pubmed/pubmed.yaml delete mode 100644 api/core/tools/provider/builtin/pubmed/tools/pubmed_search.py delete mode 100644 api/core/tools/provider/builtin/pubmed/tools/pubmed_search.yaml delete mode 100644 api/core/tools/provider/builtin/regex/_assets/icon.svg delete mode 100644 api/core/tools/provider/builtin/regex/regex.py delete mode 100644 api/core/tools/provider/builtin/regex/regex.yaml delete mode 100644 api/core/tools/provider/builtin/regex/tools/regex_extract.py delete mode 100644 api/core/tools/provider/builtin/regex/tools/regex_extract.yaml delete mode 100644 api/core/tools/provider/builtin/searchapi/_assets/icon.svg delete mode 100644 api/core/tools/provider/builtin/searchapi/searchapi.py delete mode 100644 api/core/tools/provider/builtin/searchapi/searchapi.yaml delete mode 100644 api/core/tools/provider/builtin/searchapi/tools/google.py delete mode 100644 api/core/tools/provider/builtin/searchapi/tools/google.yaml delete mode 100644 api/core/tools/provider/builtin/searchapi/tools/google_jobs.py delete mode 100644 api/core/tools/provider/builtin/searchapi/tools/google_jobs.yaml delete mode 100644 api/core/tools/provider/builtin/searchapi/tools/google_news.py delete mode 100644 api/core/tools/provider/builtin/searchapi/tools/google_news.yaml delete mode 100644 api/core/tools/provider/builtin/searchapi/tools/youtube_transcripts.py delete mode 100644 api/core/tools/provider/builtin/searchapi/tools/youtube_transcripts.yaml delete mode 100644 api/core/tools/provider/builtin/searxng/_assets/icon.svg delete mode 100644 api/core/tools/provider/builtin/searxng/docker/settings.yml delete mode 100644 api/core/tools/provider/builtin/searxng/docker/uwsgi.ini delete mode 100644 api/core/tools/provider/builtin/searxng/searxng.py delete mode 100644 api/core/tools/provider/builtin/searxng/searxng.yaml delete mode 100644 api/core/tools/provider/builtin/searxng/tools/searxng_search.py delete mode 100644 api/core/tools/provider/builtin/searxng/tools/searxng_search.yaml delete mode 100644 api/core/tools/provider/builtin/serper/_assets/icon.svg delete mode 100644 api/core/tools/provider/builtin/serper/serper.py delete mode 100644 api/core/tools/provider/builtin/serper/serper.yaml delete mode 100644 api/core/tools/provider/builtin/serper/tools/serper_search.py delete mode 100644 api/core/tools/provider/builtin/serper/tools/serper_search.yaml delete mode 100644 api/core/tools/provider/builtin/siliconflow/_assets/icon.svg delete mode 100644 api/core/tools/provider/builtin/siliconflow/siliconflow.py delete mode 100644 api/core/tools/provider/builtin/siliconflow/siliconflow.yaml delete mode 100644 api/core/tools/provider/builtin/siliconflow/tools/flux.py delete mode 100644 api/core/tools/provider/builtin/siliconflow/tools/flux.yaml delete mode 100644 api/core/tools/provider/builtin/siliconflow/tools/stable_diffusion.py delete mode 100644 api/core/tools/provider/builtin/siliconflow/tools/stable_diffusion.yaml delete mode 100644 api/core/tools/provider/builtin/slack/_assets/icon.svg delete mode 100644 api/core/tools/provider/builtin/slack/slack.py delete mode 100644 api/core/tools/provider/builtin/slack/slack.yaml delete mode 100644 api/core/tools/provider/builtin/slack/tools/slack_webhook.py delete mode 100644 api/core/tools/provider/builtin/slack/tools/slack_webhook.yaml delete mode 100644 api/core/tools/provider/builtin/spark/__init__.py delete mode 100644 api/core/tools/provider/builtin/spark/_assets/icon.svg delete mode 100644 api/core/tools/provider/builtin/spark/spark.py delete mode 100644 api/core/tools/provider/builtin/spark/spark.yaml delete mode 100644 api/core/tools/provider/builtin/spark/tools/spark_img_generation.py delete mode 100644 api/core/tools/provider/builtin/spark/tools/spark_img_generation.yaml delete mode 100644 api/core/tools/provider/builtin/spider/_assets/icon.svg delete mode 100644 api/core/tools/provider/builtin/spider/spider.py delete mode 100644 api/core/tools/provider/builtin/spider/spider.yaml delete mode 100644 api/core/tools/provider/builtin/spider/spiderApp.py delete mode 100644 api/core/tools/provider/builtin/spider/tools/scraper_crawler.py delete mode 100644 api/core/tools/provider/builtin/spider/tools/scraper_crawler.yaml delete mode 100644 api/core/tools/provider/builtin/stability/_assets/icon.svg delete mode 100644 api/core/tools/provider/builtin/stability/stability.py delete mode 100644 api/core/tools/provider/builtin/stability/stability.yaml delete mode 100644 api/core/tools/provider/builtin/stability/tools/base.py delete mode 100644 api/core/tools/provider/builtin/stability/tools/text2image.py delete mode 100644 api/core/tools/provider/builtin/stability/tools/text2image.yaml delete mode 100644 api/core/tools/provider/builtin/stablediffusion/_assets/icon.png delete mode 100644 api/core/tools/provider/builtin/stablediffusion/stablediffusion.py delete mode 100644 api/core/tools/provider/builtin/stablediffusion/stablediffusion.yaml delete mode 100644 api/core/tools/provider/builtin/stablediffusion/tools/stable_diffusion.py delete mode 100644 api/core/tools/provider/builtin/stablediffusion/tools/stable_diffusion.yaml delete mode 100644 api/core/tools/provider/builtin/stackexchange/_assets/icon.svg delete mode 100644 api/core/tools/provider/builtin/stackexchange/stackexchange.py delete mode 100644 api/core/tools/provider/builtin/stackexchange/stackexchange.yaml delete mode 100644 api/core/tools/provider/builtin/stackexchange/tools/fetchAnsByStackExQuesID.py delete mode 100644 api/core/tools/provider/builtin/stackexchange/tools/fetchAnsByStackExQuesID.yaml delete mode 100644 api/core/tools/provider/builtin/stackexchange/tools/searchStackExQuestions.py delete mode 100644 api/core/tools/provider/builtin/stackexchange/tools/searchStackExQuestions.yaml delete mode 100644 api/core/tools/provider/builtin/stepfun/__init__.py delete mode 100644 api/core/tools/provider/builtin/stepfun/_assets/icon.png delete mode 100644 api/core/tools/provider/builtin/stepfun/stepfun.py delete mode 100644 api/core/tools/provider/builtin/stepfun/stepfun.yaml delete mode 100644 api/core/tools/provider/builtin/stepfun/tools/image.py delete mode 100644 api/core/tools/provider/builtin/stepfun/tools/image.yaml delete mode 100644 api/core/tools/provider/builtin/tavily/_assets/icon.png delete mode 100644 api/core/tools/provider/builtin/tavily/tavily.py delete mode 100644 api/core/tools/provider/builtin/tavily/tavily.yaml delete mode 100644 api/core/tools/provider/builtin/tavily/tools/tavily_search.py delete mode 100644 api/core/tools/provider/builtin/tavily/tools/tavily_search.yaml delete mode 100644 api/core/tools/provider/builtin/tianditu/_assets/icon.svg delete mode 100644 api/core/tools/provider/builtin/tianditu/tianditu.py delete mode 100644 api/core/tools/provider/builtin/tianditu/tianditu.yaml delete mode 100644 api/core/tools/provider/builtin/tianditu/tools/geocoder.py delete mode 100644 api/core/tools/provider/builtin/tianditu/tools/geocoder.yaml delete mode 100644 api/core/tools/provider/builtin/tianditu/tools/poisearch.py delete mode 100644 api/core/tools/provider/builtin/tianditu/tools/poisearch.yaml delete mode 100644 api/core/tools/provider/builtin/tianditu/tools/staticmap.py delete mode 100644 api/core/tools/provider/builtin/tianditu/tools/staticmap.yaml delete mode 100644 api/core/tools/provider/builtin/trello/_assets/icon.svg delete mode 100644 api/core/tools/provider/builtin/trello/tools/create_board.py delete mode 100644 api/core/tools/provider/builtin/trello/tools/create_board.yaml delete mode 100644 api/core/tools/provider/builtin/trello/tools/create_list_on_board.py delete mode 100644 api/core/tools/provider/builtin/trello/tools/create_list_on_board.yaml delete mode 100644 api/core/tools/provider/builtin/trello/tools/create_new_card_on_board.py delete mode 100644 api/core/tools/provider/builtin/trello/tools/create_new_card_on_board.yaml delete mode 100644 api/core/tools/provider/builtin/trello/tools/delete_board.py delete mode 100644 api/core/tools/provider/builtin/trello/tools/delete_board.yaml delete mode 100644 api/core/tools/provider/builtin/trello/tools/delete_card.py delete mode 100644 api/core/tools/provider/builtin/trello/tools/delete_card.yaml delete mode 100644 api/core/tools/provider/builtin/trello/tools/fetch_all_boards.py delete mode 100644 api/core/tools/provider/builtin/trello/tools/fetch_all_boards.yaml delete mode 100644 api/core/tools/provider/builtin/trello/tools/get_board_actions.py delete mode 100644 api/core/tools/provider/builtin/trello/tools/get_board_actions.yaml delete mode 100644 api/core/tools/provider/builtin/trello/tools/get_board_by_id.py delete mode 100644 api/core/tools/provider/builtin/trello/tools/get_board_by_id.yaml delete mode 100644 api/core/tools/provider/builtin/trello/tools/get_board_cards.py delete mode 100644 api/core/tools/provider/builtin/trello/tools/get_board_cards.yaml delete mode 100644 api/core/tools/provider/builtin/trello/tools/get_filterd_board_cards.py delete mode 100644 api/core/tools/provider/builtin/trello/tools/get_filterd_board_cards.yaml delete mode 100644 api/core/tools/provider/builtin/trello/tools/get_lists_on_board.py delete mode 100644 api/core/tools/provider/builtin/trello/tools/get_lists_on_board.yaml delete mode 100644 api/core/tools/provider/builtin/trello/tools/update_board.py delete mode 100644 api/core/tools/provider/builtin/trello/tools/update_board.yaml delete mode 100644 api/core/tools/provider/builtin/trello/tools/update_card.py delete mode 100644 api/core/tools/provider/builtin/trello/tools/update_card.yaml delete mode 100644 api/core/tools/provider/builtin/trello/trello.py delete mode 100644 api/core/tools/provider/builtin/trello/trello.yaml delete mode 100644 api/core/tools/provider/builtin/twilio/_assets/icon.svg delete mode 100644 api/core/tools/provider/builtin/twilio/tools/send_message.py delete mode 100644 api/core/tools/provider/builtin/twilio/tools/send_message.yaml delete mode 100644 api/core/tools/provider/builtin/twilio/twilio.py delete mode 100644 api/core/tools/provider/builtin/twilio/twilio.yaml delete mode 100644 api/core/tools/provider/builtin/vanna/_assets/icon.png delete mode 100644 api/core/tools/provider/builtin/vanna/tools/vanna.py delete mode 100644 api/core/tools/provider/builtin/vanna/tools/vanna.yaml delete mode 100644 api/core/tools/provider/builtin/vanna/vanna.py delete mode 100644 api/core/tools/provider/builtin/vanna/vanna.yaml delete mode 100644 api/core/tools/provider/builtin/vectorizer/_assets/icon.png delete mode 100644 api/core/tools/provider/builtin/vectorizer/tools/test_data.py delete mode 100644 api/core/tools/provider/builtin/vectorizer/tools/vectorizer.py delete mode 100644 api/core/tools/provider/builtin/vectorizer/tools/vectorizer.yaml delete mode 100644 api/core/tools/provider/builtin/vectorizer/vectorizer.py delete mode 100644 api/core/tools/provider/builtin/vectorizer/vectorizer.yaml delete mode 100644 api/core/tools/provider/builtin/webscraper/_assets/icon.svg delete mode 100644 api/core/tools/provider/builtin/webscraper/tools/webscraper.py delete mode 100644 api/core/tools/provider/builtin/webscraper/tools/webscraper.yaml delete mode 100644 api/core/tools/provider/builtin/webscraper/webscraper.py delete mode 100644 api/core/tools/provider/builtin/webscraper/webscraper.yaml delete mode 100644 api/core/tools/provider/builtin/websearch/_assets/icon.svg delete mode 100644 api/core/tools/provider/builtin/websearch/tools/get_markdown.py delete mode 100644 api/core/tools/provider/builtin/websearch/tools/get_markdown.yaml delete mode 100644 api/core/tools/provider/builtin/websearch/tools/job_search.py delete mode 100644 api/core/tools/provider/builtin/websearch/tools/job_search.yaml delete mode 100644 api/core/tools/provider/builtin/websearch/tools/news_search.py delete mode 100644 api/core/tools/provider/builtin/websearch/tools/news_search.yaml delete mode 100644 api/core/tools/provider/builtin/websearch/tools/scholar_search.py delete mode 100644 api/core/tools/provider/builtin/websearch/tools/scholar_search.yaml delete mode 100644 api/core/tools/provider/builtin/websearch/tools/web_search.py delete mode 100644 api/core/tools/provider/builtin/websearch/tools/web_search.yaml delete mode 100644 api/core/tools/provider/builtin/websearch/websearch.py delete mode 100644 api/core/tools/provider/builtin/websearch/websearch.yaml delete mode 100644 api/core/tools/provider/builtin/wecom/_assets/icon.png delete mode 100644 api/core/tools/provider/builtin/wecom/tools/wecom_group_bot.py delete mode 100644 api/core/tools/provider/builtin/wecom/tools/wecom_group_bot.yaml delete mode 100644 api/core/tools/provider/builtin/wecom/wecom.py delete mode 100644 api/core/tools/provider/builtin/wecom/wecom.yaml delete mode 100644 api/core/tools/provider/builtin/wikipedia/_assets/icon.svg delete mode 100644 api/core/tools/provider/builtin/wikipedia/tools/wikipedia_search.py delete mode 100644 api/core/tools/provider/builtin/wikipedia/tools/wikipedia_search.yaml delete mode 100644 api/core/tools/provider/builtin/wikipedia/wikipedia.py delete mode 100644 api/core/tools/provider/builtin/wikipedia/wikipedia.yaml delete mode 100644 api/core/tools/provider/builtin/wolframalpha/_assets/icon.svg delete mode 100644 api/core/tools/provider/builtin/wolframalpha/tools/wolframalpha.py delete mode 100644 api/core/tools/provider/builtin/wolframalpha/tools/wolframalpha.yaml delete mode 100644 api/core/tools/provider/builtin/wolframalpha/wolframalpha.py delete mode 100644 api/core/tools/provider/builtin/wolframalpha/wolframalpha.yaml delete mode 100644 api/core/tools/provider/builtin/yahoo/_assets/icon.png delete mode 100644 api/core/tools/provider/builtin/yahoo/tools/analytics.py delete mode 100644 api/core/tools/provider/builtin/yahoo/tools/analytics.yaml delete mode 100644 api/core/tools/provider/builtin/yahoo/tools/news.py delete mode 100644 api/core/tools/provider/builtin/yahoo/tools/news.yaml delete mode 100644 api/core/tools/provider/builtin/yahoo/tools/ticker.py delete mode 100644 api/core/tools/provider/builtin/yahoo/tools/ticker.yaml delete mode 100644 api/core/tools/provider/builtin/yahoo/yahoo.py delete mode 100644 api/core/tools/provider/builtin/yahoo/yahoo.yaml delete mode 100644 api/core/tools/provider/builtin/youtube/_assets/icon.svg delete mode 100644 api/core/tools/provider/builtin/youtube/tools/videos.py delete mode 100644 api/core/tools/provider/builtin/youtube/tools/videos.yaml delete mode 100644 api/core/tools/provider/builtin/youtube/youtube.py delete mode 100644 api/core/tools/provider/builtin/youtube/youtube.yaml rename api/core/tools/{tool => utils}/dataset_retriever/dataset_multi_retriever_tool.py (98%) rename api/core/tools/{tool => utils}/dataset_retriever/dataset_retriever_base_tool.py (100%) rename api/core/tools/{tool => utils}/dataset_retriever/dataset_retriever_tool.py (100%) rename api/core/tools/{tool => utils}/dataset_retriever_tool.py (96%) create mode 100644 api/core/tools/workflow_as_tool/provider.py rename api/core/tools/{tool/workflow_tool.py => workflow_as_tool/tool.py} (99%) rename api/core/tools/{provider => workflow_as_tool}/workflow_tool_provider.py (98%) diff --git a/api/core/agent/base_agent_runner.py b/api/core/agent/base_agent_runner.py index d09a9956a4a591..55fd8825de6909 100644 --- a/api/core/agent/base_agent_runner.py +++ b/api/core/agent/base_agent_runner.py @@ -32,13 +32,13 @@ from core.model_runtime.entities.model_entities import ModelFeature from core.model_runtime.model_providers.__base.large_language_model import LargeLanguageModel from core.model_runtime.utils.encoders import jsonable_encoder +from core.tools.__base.tool import Tool from core.tools.entities.tool_entities import ( ToolParameter, ToolRuntimeVariablePool, ) -from core.tools.tool.dataset_retriever_tool import DatasetRetrieverTool -from core.tools.tool.tool import Tool from core.tools.tool_manager import ToolManager +from core.tools.utils.dataset_retriever_tool import DatasetRetrieverTool from core.tools.utils.tool_parameter_converter import ToolParameterConverter from extensions.ext_database import db from models.model import Conversation, Message, MessageAgentThought diff --git a/api/core/agent/cot_agent_runner.py b/api/core/agent/cot_agent_runner.py index ebe04bf26000f1..0d74b1e5ebe933 100644 --- a/api/core/agent/cot_agent_runner.py +++ b/api/core/agent/cot_agent_runner.py @@ -17,8 +17,8 @@ ) from core.ops.ops_trace_manager import TraceQueueManager from core.prompt.agent_history_prompt_transform import AgentHistoryPromptTransform +from core.tools.__base.tool import Tool from core.tools.entities.tool_entities import ToolInvokeMeta -from core.tools.tool.tool import Tool from core.tools.tool_engine import ToolEngine from models.model import Message diff --git a/api/core/rag/retrieval/dataset_retrieval.py b/api/core/rag/retrieval/dataset_retrieval.py index 286ecd4c036841..05f6fe6e269c8e 100644 --- a/api/core/rag/retrieval/dataset_retrieval.py +++ b/api/core/rag/retrieval/dataset_retrieval.py @@ -24,9 +24,7 @@ from core.rag.retrieval.retrieval_methods import RetrievalMethod from core.rag.retrieval.router.multi_dataset_function_call_router import FunctionCallMultiDatasetRouter from core.rag.retrieval.router.multi_dataset_react_route import ReactMultiDatasetRouter -from core.tools.tool.dataset_retriever.dataset_multi_retriever_tool import DatasetMultiRetrieverTool -from core.tools.tool.dataset_retriever.dataset_retriever_base_tool import DatasetRetrieverBaseTool -from core.tools.tool.dataset_retriever.dataset_retriever_tool import DatasetRetrieverTool +from core.tools.utils.dataset_retriever.dataset_retriever_base_tool import DatasetRetrieverBaseTool from extensions.ext_database import db from models.dataset import Dataset, DatasetQuery, DocumentSegment from models.dataset import Document as DatasetDocument @@ -371,7 +369,7 @@ def _on_retrieval_end( db.session.commit() # get tracing instance - trace_manager: TraceQueueManager = ( + trace_manager: TraceQueueManager | None = ( self.application_generate_entity.trace_manager if self.application_generate_entity else None ) if trace_manager: @@ -494,7 +492,8 @@ def to_dataset_retriever_tool( score_threshold_enabled = retrieval_model_config.get("score_threshold_enabled") if score_threshold_enabled: score_threshold = retrieval_model_config.get("score_threshold") - + + from core.tools.utils.dataset_retriever.dataset_retriever_tool import DatasetRetrieverTool tool = DatasetRetrieverTool.from_dataset( dataset=dataset, top_k=top_k, @@ -506,6 +505,7 @@ def to_dataset_retriever_tool( tools.append(tool) elif retrieve_config.retrieve_strategy == DatasetRetrieveConfigEntity.RetrieveStrategy.MULTIPLE: + from core.tools.utils.dataset_retriever.dataset_multi_retriever_tool import DatasetMultiRetrieverTool tool = DatasetMultiRetrieverTool.from_dataset( dataset_ids=[dataset.id for dataset in available_datasets], tenant_id=tenant_id, diff --git a/api/core/tools/tool/tool.py b/api/core/tools/__base/tool.py similarity index 100% rename from api/core/tools/tool/tool.py rename to api/core/tools/__base/tool.py diff --git a/api/core/tools/provider/tool_provider.py b/api/core/tools/__base/tool_provider.py similarity index 99% rename from api/core/tools/provider/tool_provider.py rename to api/core/tools/__base/tool_provider.py index f07af649ad055d..7960ed5f84d5dc 100644 --- a/api/core/tools/provider/tool_provider.py +++ b/api/core/tools/__base/tool_provider.py @@ -4,12 +4,12 @@ from pydantic import BaseModel, ConfigDict, Field from core.entities.provider_entities import ProviderConfig +from core.tools.__base.tool import Tool from core.tools.entities.tool_entities import ( ToolProviderIdentity, ToolProviderType, ) from core.tools.errors import ToolProviderCredentialValidationError -from core.tools.tool.tool import Tool class ToolProviderController(BaseModel, ABC): diff --git a/api/core/tools/builtin_tool/_position.yaml b/api/core/tools/builtin_tool/_position.yaml new file mode 100644 index 00000000000000..b5875e20759163 --- /dev/null +++ b/api/core/tools/builtin_tool/_position.yaml @@ -0,0 +1,3 @@ +- code +- time +- qrcode diff --git a/api/core/tools/provider/builtin_tool_provider.py b/api/core/tools/builtin_tool/provider.py similarity index 92% rename from api/core/tools/provider/builtin_tool_provider.py rename to api/core/tools/builtin_tool/provider.py index 2a966d3999adde..7d1775b7f561fa 100644 --- a/api/core/tools/provider/builtin_tool_provider.py +++ b/api/core/tools/builtin_tool/provider.py @@ -6,13 +6,13 @@ from core.entities.provider_entities import ProviderConfig from core.helper.module_import_helper import load_single_subclass_from_source +from core.tools.__base.tool_provider import ToolProviderController +from core.tools.builtin_tool.tool import BuiltinTool from core.tools.entities.tool_entities import ToolProviderType from core.tools.entities.values import ToolLabelEnum, default_tool_label_dict from core.tools.errors import ( ToolProviderNotFoundError, ) -from core.tools.provider.tool_provider import ToolProviderController -from core.tools.tool.builtin_tool import BuiltinTool from core.tools.utils.yaml_utils import load_yaml_file @@ -26,7 +26,7 @@ def __init__(self, **data: Any) -> None: # load provider yaml provider = self.__class__.__module__.split(".")[-1] - yaml_path = path.join(path.dirname(path.realpath(__file__)), "builtin", provider, f"{provider}.yaml") + yaml_path = path.join(path.dirname(path.realpath(__file__)), "providers", provider, f"{provider}.yaml") try: provider_yaml = load_yaml_file(yaml_path, ignore_error=False) except Exception as e: @@ -52,7 +52,7 @@ def _get_builtin_tools(self) -> list[BuiltinTool]: return self.tools provider = self.identity.name - tool_path = path.join(path.dirname(path.realpath(__file__)), "builtin", provider, "tools") + tool_path = path.join(path.dirname(path.realpath(__file__)), "providers", provider, "tools") # get all the yaml files in the tool path tool_files = list(filter(lambda x: x.endswith(".yaml") and not x.startswith("__"), listdir(tool_path))) tools = [] @@ -63,9 +63,10 @@ def _get_builtin_tools(self) -> list[BuiltinTool]: # get tool class, import the module assistant_tool_class = load_single_subclass_from_source( - module_name=f"core.tools.provider.builtin.{provider}.tools.{tool_name}", + module_name=f"core.tools.builtin_tool.providers.{provider}.tools.{tool_name}", script_path=path.join( - path.dirname(path.realpath(__file__)), "builtin", provider, "tools", f"{tool_name}.py" + path.dirname(path.realpath(__file__)), + "builtin_tool", "providers", provider, "tools", f"{tool_name}.py" ), parent_type=BuiltinTool, ) diff --git a/api/core/tools/provider/builtin/__init__.py b/api/core/tools/builtin_tool/providers/__init__.py similarity index 100% rename from api/core/tools/provider/builtin/__init__.py rename to api/core/tools/builtin_tool/providers/__init__.py diff --git a/api/core/tools/provider/builtin/_positions.py b/api/core/tools/builtin_tool/providers/_positions.py similarity index 100% rename from api/core/tools/provider/builtin/_positions.py rename to api/core/tools/builtin_tool/providers/_positions.py diff --git a/api/core/tools/provider/builtin/code/_assets/icon.svg b/api/core/tools/builtin_tool/providers/code/_assets/icon.svg similarity index 100% rename from api/core/tools/provider/builtin/code/_assets/icon.svg rename to api/core/tools/builtin_tool/providers/code/_assets/icon.svg diff --git a/api/core/tools/provider/builtin/code/code.py b/api/core/tools/builtin_tool/providers/code/code.py similarity index 66% rename from api/core/tools/provider/builtin/code/code.py rename to api/core/tools/builtin_tool/providers/code/code.py index 211417c9a431ed..53210e9c439cb6 100644 --- a/api/core/tools/provider/builtin/code/code.py +++ b/api/core/tools/builtin_tool/providers/code/code.py @@ -1,6 +1,6 @@ from typing import Any -from core.tools.provider.builtin_tool_provider import BuiltinToolProviderController +from core.tools.builtin_tool.provider import BuiltinToolProviderController class CodeToolProvider(BuiltinToolProviderController): diff --git a/api/core/tools/provider/builtin/code/code.yaml b/api/core/tools/builtin_tool/providers/code/code.yaml similarity index 100% rename from api/core/tools/provider/builtin/code/code.yaml rename to api/core/tools/builtin_tool/providers/code/code.yaml diff --git a/api/core/tools/provider/builtin/code/tools/simple_code.py b/api/core/tools/builtin_tool/providers/code/tools/simple_code.py similarity index 93% rename from api/core/tools/provider/builtin/code/tools/simple_code.py rename to api/core/tools/builtin_tool/providers/code/tools/simple_code.py index 632c9fc7f1451b..14ff1969ff2b4d 100644 --- a/api/core/tools/provider/builtin/code/tools/simple_code.py +++ b/api/core/tools/builtin_tool/providers/code/tools/simple_code.py @@ -1,8 +1,8 @@ from typing import Any from core.helper.code_executor.code_executor import CodeExecutor, CodeLanguage +from core.tools.builtin_tool.tool import BuiltinTool from core.tools.entities.tool_entities import ToolInvokeMessage -from core.tools.tool.builtin_tool import BuiltinTool class SimpleCode(BuiltinTool): diff --git a/api/core/tools/provider/builtin/code/tools/simple_code.yaml b/api/core/tools/builtin_tool/providers/code/tools/simple_code.yaml similarity index 100% rename from api/core/tools/provider/builtin/code/tools/simple_code.yaml rename to api/core/tools/builtin_tool/providers/code/tools/simple_code.yaml diff --git a/api/core/tools/provider/builtin/qrcode/_assets/icon.svg b/api/core/tools/builtin_tool/providers/qrcode/_assets/icon.svg similarity index 100% rename from api/core/tools/provider/builtin/qrcode/_assets/icon.svg rename to api/core/tools/builtin_tool/providers/qrcode/_assets/icon.svg diff --git a/api/core/tools/provider/builtin/qrcode/qrcode.py b/api/core/tools/builtin_tool/providers/qrcode/qrcode.py similarity index 71% rename from api/core/tools/provider/builtin/qrcode/qrcode.py rename to api/core/tools/builtin_tool/providers/qrcode/qrcode.py index 8466b9a26b42b6..542ee7b63e9e09 100644 --- a/api/core/tools/provider/builtin/qrcode/qrcode.py +++ b/api/core/tools/builtin_tool/providers/qrcode/qrcode.py @@ -1,8 +1,8 @@ from typing import Any +from core.tools.builtin_tool.provider import BuiltinToolProviderController +from core.tools.builtin_tool.providers.qrcode.tools.qrcode_generator import QRCodeGeneratorTool from core.tools.errors import ToolProviderCredentialValidationError -from core.tools.provider.builtin.qrcode.tools.qrcode_generator import QRCodeGeneratorTool -from core.tools.provider.builtin_tool_provider import BuiltinToolProviderController class QRCodeProvider(BuiltinToolProviderController): diff --git a/api/core/tools/provider/builtin/qrcode/qrcode.yaml b/api/core/tools/builtin_tool/providers/qrcode/qrcode.yaml similarity index 100% rename from api/core/tools/provider/builtin/qrcode/qrcode.yaml rename to api/core/tools/builtin_tool/providers/qrcode/qrcode.yaml diff --git a/api/core/tools/provider/builtin/qrcode/tools/qrcode_generator.py b/api/core/tools/builtin_tool/providers/qrcode/tools/qrcode_generator.py similarity index 97% rename from api/core/tools/provider/builtin/qrcode/tools/qrcode_generator.py rename to api/core/tools/builtin_tool/providers/qrcode/tools/qrcode_generator.py index d8ca20bde6ffc9..081ad30231f88e 100644 --- a/api/core/tools/provider/builtin/qrcode/tools/qrcode_generator.py +++ b/api/core/tools/builtin_tool/providers/qrcode/tools/qrcode_generator.py @@ -7,8 +7,8 @@ from qrcode.image.pure import PyPNGImage from qrcode.main import QRCode +from core.tools.builtin_tool.tool import BuiltinTool from core.tools.entities.tool_entities import ToolInvokeMessage -from core.tools.tool.builtin_tool import BuiltinTool class QRCodeGeneratorTool(BuiltinTool): diff --git a/api/core/tools/provider/builtin/qrcode/tools/qrcode_generator.yaml b/api/core/tools/builtin_tool/providers/qrcode/tools/qrcode_generator.yaml similarity index 100% rename from api/core/tools/provider/builtin/qrcode/tools/qrcode_generator.yaml rename to api/core/tools/builtin_tool/providers/qrcode/tools/qrcode_generator.yaml diff --git a/api/core/tools/provider/builtin/time/_assets/icon.svg b/api/core/tools/builtin_tool/providers/time/_assets/icon.svg similarity index 100% rename from api/core/tools/provider/builtin/time/_assets/icon.svg rename to api/core/tools/builtin_tool/providers/time/_assets/icon.svg diff --git a/api/core/tools/provider/builtin/time/time.py b/api/core/tools/builtin_tool/providers/time/time.py similarity index 73% rename from api/core/tools/provider/builtin/time/time.py rename to api/core/tools/builtin_tool/providers/time/time.py index e4df8d616cba38..234ca9d9d6389b 100644 --- a/api/core/tools/provider/builtin/time/time.py +++ b/api/core/tools/builtin_tool/providers/time/time.py @@ -1,8 +1,8 @@ from typing import Any +from core.tools.builtin_tool.provider import BuiltinToolProviderController +from core.tools.builtin_tool.providers.time.tools.current_time import CurrentTimeTool from core.tools.errors import ToolProviderCredentialValidationError -from core.tools.provider.builtin.time.tools.current_time import CurrentTimeTool -from core.tools.provider.builtin_tool_provider import BuiltinToolProviderController class WikiPediaProvider(BuiltinToolProviderController): diff --git a/api/core/tools/provider/builtin/time/time.yaml b/api/core/tools/builtin_tool/providers/time/time.yaml similarity index 100% rename from api/core/tools/provider/builtin/time/time.yaml rename to api/core/tools/builtin_tool/providers/time/time.yaml diff --git a/api/core/tools/provider/builtin/time/tools/current_time.py b/api/core/tools/builtin_tool/providers/time/tools/current_time.py similarity index 94% rename from api/core/tools/provider/builtin/time/tools/current_time.py rename to api/core/tools/builtin_tool/providers/time/tools/current_time.py index cc38739c16f04b..a403c4929651e0 100644 --- a/api/core/tools/provider/builtin/time/tools/current_time.py +++ b/api/core/tools/builtin_tool/providers/time/tools/current_time.py @@ -3,8 +3,8 @@ from pytz import timezone as pytz_timezone +from core.tools.builtin_tool.tool import BuiltinTool from core.tools.entities.tool_entities import ToolInvokeMessage -from core.tools.tool.builtin_tool import BuiltinTool class CurrentTimeTool(BuiltinTool): diff --git a/api/core/tools/provider/builtin/time/tools/current_time.yaml b/api/core/tools/builtin_tool/providers/time/tools/current_time.yaml similarity index 100% rename from api/core/tools/provider/builtin/time/tools/current_time.yaml rename to api/core/tools/builtin_tool/providers/time/tools/current_time.yaml diff --git a/api/core/tools/provider/builtin/time/tools/weekday.py b/api/core/tools/builtin_tool/providers/time/tools/weekday.py similarity index 91% rename from api/core/tools/provider/builtin/time/tools/weekday.py rename to api/core/tools/builtin_tool/providers/time/tools/weekday.py index b327e54e171048..570ab624133d2d 100644 --- a/api/core/tools/provider/builtin/time/tools/weekday.py +++ b/api/core/tools/builtin_tool/providers/time/tools/weekday.py @@ -2,8 +2,8 @@ from datetime import datetime from typing import Any, Union +from core.tools.builtin_tool.tool import BuiltinTool from core.tools.entities.tool_entities import ToolInvokeMessage -from core.tools.tool.builtin_tool import BuiltinTool class WeekdayTool(BuiltinTool): @@ -17,6 +17,8 @@ def _invoke( """ year = tool_parameters.get("year") month = tool_parameters.get("month") + if month is None: + raise ValueError("Month is required") day = tool_parameters.get("day") date_obj = self.convert_datetime(year, month, day) diff --git a/api/core/tools/provider/builtin/time/tools/weekday.yaml b/api/core/tools/builtin_tool/providers/time/tools/weekday.yaml similarity index 100% rename from api/core/tools/provider/builtin/time/tools/weekday.yaml rename to api/core/tools/builtin_tool/providers/time/tools/weekday.yaml diff --git a/api/core/tools/tool/builtin_tool.py b/api/core/tools/builtin_tool/tool.py similarity index 96% rename from api/core/tools/tool/builtin_tool.py rename to api/core/tools/builtin_tool/tool.py index 8edaf7c0e6a5f5..243d99dee3f36c 100644 --- a/api/core/tools/tool/builtin_tool.py +++ b/api/core/tools/builtin_tool/tool.py @@ -1,7 +1,7 @@ from core.model_runtime.entities.llm_entities import LLMResult from core.model_runtime.entities.message_entities import PromptMessage, SystemPromptMessage, UserPromptMessage +from core.tools.__base.tool import Tool from core.tools.entities.tool_entities import ToolProviderType -from core.tools.tool.tool import Tool from core.tools.utils.model_invocation_utils import ModelInvocationUtils from core.tools.utils.web_reader_tool import get_url @@ -32,7 +32,7 @@ def invoke_model(self, user_id: str, prompt_messages: list[PromptMessage], stop: # invoke model return ModelInvocationUtils.invoke( user_id=user_id, - tenant_id=self.runtime.tenant_id, + tenant_id=self.runtime.tenant_id or "", tool_type="builtin", tool_name=self.identity.name, prompt_messages=prompt_messages, @@ -124,7 +124,7 @@ def summarize(content: str) -> str: return result - def get_url(self, url: str, user_agent: str = None) -> str: + def get_url(self, url: str, user_agent: str | None = None) -> str: """ get url """ diff --git a/api/core/tools/provider/api_tool_provider.py b/api/core/tools/custom_tool/provider.py similarity index 97% rename from api/core/tools/provider/api_tool_provider.py rename to api/core/tools/custom_tool/provider.py index 307cc0a0d957e6..7ebaa6c5c6fc29 100644 --- a/api/core/tools/provider/api_tool_provider.py +++ b/api/core/tools/custom_tool/provider.py @@ -1,14 +1,14 @@ from pydantic import Field from core.entities.provider_entities import ProviderConfig +from core.tools.__base.tool_provider import ToolProviderController +from core.tools.custom_tool.tool import ApiTool from core.tools.entities.common_entities import I18nObject from core.tools.entities.tool_bundle import ApiToolBundle from core.tools.entities.tool_entities import ( ApiProviderAuthType, ToolProviderType, ) -from core.tools.provider.tool_provider import ToolProviderController -from core.tools.tool.api_tool import ApiTool from extensions.ext_database import db from models.tools import ApiToolProvider @@ -67,7 +67,8 @@ def from_db(db_provider: ApiToolProvider, auth_type: ApiProviderAuthType) -> "Ap else: raise ValueError(f"invalid auth type {auth_type}") - user_name = db_provider.user.name if db_provider.user_id else "" + user = db_provider.user + user_name = user.name if user else "" return ApiToolProviderController( **{ diff --git a/api/core/tools/tool/api_tool.py b/api/core/tools/custom_tool/tool.py similarity index 96% rename from api/core/tools/tool/api_tool.py rename to api/core/tools/custom_tool/tool.py index 87f2514ce2a514..9a728bb684b664 100644 --- a/api/core/tools/tool/api_tool.py +++ b/api/core/tools/custom_tool/tool.py @@ -7,10 +7,10 @@ import httpx from core.helper import ssrf_proxy +from core.tools.__base.tool import Tool from core.tools.entities.tool_bundle import ApiToolBundle from core.tools.entities.tool_entities import ToolInvokeMessage, ToolProviderType from core.tools.errors import ToolInvokeError, ToolParameterValidationError, ToolProviderCredentialValidationError -from core.tools.tool.tool import Tool API_TOOL_DEFAULT_TIMEOUT = ( int(getenv("API_TOOL_DEFAULT_CONNECT_TIMEOUT", "10")), @@ -33,10 +33,10 @@ def fork_tool_runtime(self, runtime: dict[str, Any]) -> "Tool": :return: the new tool """ return self.__class__( - identity=self.identity.model_copy() if self.identity else None, - parameters=self.parameters.copy() if self.parameters else None, + identity=self.identity.model_copy(), + parameters=self.parameters.copy() if self.parameters else [], description=self.description.model_copy() if self.description else None, - api_bundle=self.api_bundle.model_copy() if self.api_bundle else None, + api_bundle=self.api_bundle.model_copy(), runtime=Tool.Runtime(**runtime), ) @@ -60,6 +60,9 @@ def tool_provider_type(self) -> ToolProviderType: return ToolProviderType.API def assembling_request(self, parameters: dict[str, Any]) -> dict[str, Any]: + if self.runtime == None: + raise ToolProviderCredentialValidationError("runtime not initialized") + headers = {} credentials = self.runtime.credentials or {} @@ -88,7 +91,7 @@ def assembling_request(self, parameters: dict[str, Any]) -> dict[str, Any]: headers[api_key_header] = credentials["api_key_value"] - needed_parameters = [parameter for parameter in self.api_bundle.parameters if parameter.required] + needed_parameters = [parameter for parameter in (self.api_bundle.parameters or []) if parameter.required] for parameter in needed_parameters: if parameter.required and parameter.name not in parameters: raise ToolParameterValidationError(f"Missing required parameter {parameter.name}") @@ -204,7 +207,7 @@ def do_http_request( ) return response else: - raise ValueError(f"Invalid http method {self.method}") + raise ValueError(f"Invalid http method {method}") def _convert_body_property_any_of( self, property: dict[str, Any], value: Any, any_of: list[dict[str, Any]], max_recursive=10 diff --git a/api/core/tools/entities/api_entities.py b/api/core/tools/entities/api_entities.py index ae84338cdc6958..b3a98b4a6dcf79 100644 --- a/api/core/tools/entities/api_entities.py +++ b/api/core/tools/entities/api_entities.py @@ -4,9 +4,9 @@ from core.entities.provider_entities import ProviderConfig from core.model_runtime.utils.encoders import jsonable_encoder +from core.tools.__base.tool import ToolParameter from core.tools.entities.common_entities import I18nObject from core.tools.entities.tool_entities import ToolProviderType -from core.tools.tool.tool import ToolParameter class UserTool(BaseModel): diff --git a/api/core/tools/entities/tool_entities.py b/api/core/tools/entities/tool_entities.py index d8383539bbaed7..cc84f6eaada186 100644 --- a/api/core/tools/entities/tool_entities.py +++ b/api/core/tools/entities/tool_entities.py @@ -32,6 +32,7 @@ class ToolProviderType(str, Enum): Enum class for tool provider """ + PLUGIN = "plugin" BUILT_IN = "builtin" WORKFLOW = "workflow" API = "api" diff --git a/api/core/tools/plugin_tool/plugin_tool_provider.py b/api/core/tools/plugin_tool/plugin_tool_provider.py new file mode 100644 index 00000000000000..47a78ee3183fda --- /dev/null +++ b/api/core/tools/plugin_tool/plugin_tool_provider.py @@ -0,0 +1,30 @@ + + +from core.entities.provider_entities import ProviderConfig +from core.tools.__base.tool import Tool +from core.tools.__base.tool_provider import ToolProviderController +from core.tools.entities.tool_entities import ToolProviderType + + +class PluginToolProvider(ToolProviderController): + @property + def provider_type(self) -> ToolProviderType: + """ + returns the type of the provider + + :return: type of the provider + """ + return ToolProviderType.PLUGIN + + def get_tool(self, tool_name: str) -> Tool: + """ + return tool with given name + """ + return super().get_tool(tool_name) + + def get_credentials_schema(self) -> dict[str, ProviderConfig]: + """ + get credentials schema + """ + return super().get_credentials_schema() + \ No newline at end of file diff --git a/api/core/tools/provider/_position.yaml b/api/core/tools/provider/_position.yaml deleted file mode 100644 index 40c3356116770b..00000000000000 --- a/api/core/tools/provider/_position.yaml +++ /dev/null @@ -1,38 +0,0 @@ -- google -- bing -- perplexity -- duckduckgo -- searchapi -- serper -- searxng -- dalle -- azuredalle -- stability -- wikipedia -- nominatim -- yahoo -- alphavantage -- arxiv -- pubmed -- stablediffusion -- webscraper -- jina -- aippt -- youtube -- code -- wolframalpha -- maths -- github -- chart -- time -- vectorizer -- gaode -- wecom -- qrcode -- dingtalk -- feishu -- feishu_base -- feishu_document -- feishu_message -- slack -- tianditu diff --git a/api/core/tools/provider/app_tool_provider.py b/api/core/tools/provider/app_tool_provider.py deleted file mode 100644 index 09f328cd1fe65f..00000000000000 --- a/api/core/tools/provider/app_tool_provider.py +++ /dev/null @@ -1,103 +0,0 @@ -import logging -from typing import Any - -from core.tools.entities.common_entities import I18nObject -from core.tools.entities.tool_entities import ToolParameter, ToolParameterOption, ToolProviderType -from core.tools.provider.tool_provider import ToolProviderController -from core.tools.tool.tool import Tool -from extensions.ext_database import db -from models.model import App, AppModelConfig -from models.tools import PublishedAppTool - -logger = logging.getLogger(__name__) - - -class AppToolProviderEntity(ToolProviderController): - @property - def provider_type(self) -> ToolProviderType: - return ToolProviderType.APP - - def _validate_credentials(self, tool_name: str, credentials: dict[str, Any]) -> None: - pass - - def validate_parameters(self, tool_name: str, tool_parameters: dict[str, Any]) -> None: - pass - - def get_tools(self, user_id: str) -> list[Tool]: - db_tools: list[PublishedAppTool] = ( - db.session.query(PublishedAppTool) - .filter( - PublishedAppTool.user_id == user_id, - ) - .all() - ) - - if not db_tools or len(db_tools) == 0: - return [] - - tools: list[Tool] = [] - - for db_tool in db_tools: - tool = { - "identity": { - "author": db_tool.author, - "name": db_tool.tool_name, - "label": {"en_US": db_tool.tool_name, "zh_Hans": db_tool.tool_name}, - "icon": "", - }, - "description": { - "human": {"en_US": db_tool.description_i18n.en_US, "zh_Hans": db_tool.description_i18n.zh_Hans}, - "llm": db_tool.llm_description, - }, - "parameters": [], - } - # get app from db - app: App = db_tool.app - - if not app: - logger.error(f"app {db_tool.app_id} not found") - continue - - app_model_config: AppModelConfig = app.app_model_config - user_input_form_list = app_model_config.user_input_form_list - for input_form in user_input_form_list: - # get type - form_type = input_form.keys()[0] - default = input_form[form_type]["default"] - required = input_form[form_type]["required"] - label = input_form[form_type]["label"] - variable_name = input_form[form_type]["variable_name"] - options = input_form[form_type].get("options", []) - if form_type in {"paragraph", "text-input"}: - tool["parameters"].append( - ToolParameter( - name=variable_name, - label=I18nObject(en_US=label, zh_Hans=label), - human_description=I18nObject(en_US=label, zh_Hans=label), - llm_description=label, - form=ToolParameter.ToolParameterForm.FORM, - type=ToolParameter.ToolParameterType.STRING, - required=required, - default=default, - ) - ) - elif form_type == "select": - tool["parameters"].append( - ToolParameter( - name=variable_name, - label=I18nObject(en_US=label, zh_Hans=label), - human_description=I18nObject(en_US=label, zh_Hans=label), - llm_description=label, - form=ToolParameter.ToolParameterForm.FORM, - type=ToolParameter.ToolParameterType.SELECT, - required=required, - default=default, - options=[ - ToolParameterOption(value=option, label=I18nObject(en_US=option, zh_Hans=option)) - for option in options - ], - ) - ) - - tools.append(Tool(**tool)) - return tools diff --git a/api/core/tools/provider/builtin/aippt/_assets/icon.png b/api/core/tools/provider/builtin/aippt/_assets/icon.png deleted file mode 100644 index b70618b4878984f88dd1991a64cc69bc0afeaaa8..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1961 zcmV;a2UhrrP)}be?ilJh^tl7C04E7Km3nmuE)r9#}WA}rRl?ISYbedqU*T|WL)~Yw( zP?|Jh;?dp!(3&jVCkLS8&FUWaO>-bs*(NZtmjFC017KdFu(8U=ed`$sqx}y-6ABrr zGa%WqqYeovwib-*3jm<3Sy=1}AX(UyH4hD|O7k_sW`b#b2>_H$wBI7k-Ykyv)Bg@RW?`53 zId_Ix99W&^*D=E&>{#Uh$ZKx=*zkGdPc-Ec?Ia`?YN2WsA!HC$`4%v&A^=eCiYRQE zoUwc-%#sj55az4wR$5L9E#_ z=Ibgu5R0WoKwAXhQ@Y+9rz$%xH=AKC&=vvsHs8KV7_&`o+_#rQTLchHs$qqc`HJ;@ zG@7{*0R*53vF?$a@%*LNr{d7Id;rZ61Rz9d^C&~*2c!THJ{#KViZxv)a9`M34I#F_ z?tSLO-hnn?f%O7vay9W=mEA)(Qx~hYZDf zQ7+YBZHdR@L!lzVAZ#maLjYw;C-!tnV~%SYGfHU+`)SyQ0Lm00sWDV#hoz2w!XGbP zGnyxhzfU6oKLu5Ue=23Z!q#=ADfljeS`#C$$Oa@;=}4yP4pivWCz5Uz?H{ZnqrX*~jM5rA*=ZyO|b@=M91e-*Sv009Vc z>vE|Ew!<(cUyA?&F+Vj`rl5)v))`eT8`>fOpPEpYxrVB2x8%{k5ZWSu07QxPw%mN5 zy$=Od2p|CIVf!)3YPQ2M!c}OC0DMZ*(&J?x8;y{33$#T5!Q5DXloGz$Ym}-Qf|ohk z93uK()AM(**TzL^T&C|<^JK2oY+vmZ*<}0LIq;M`3WY+UP$(3tFEwTi8ywYmV@$V` zJq4xB3BBIdP1E%JqASFE9o%7hLP z+bOYLq2crtxBew7EFjJpg#Qld4_&op5t6$4!i!4Y7;v@4(LMn{RUQlA%YSUBDboSK zCw}^-D%(d1dn%vL_dl;}G8rTQ;lrUz834qw2;uk20FbOX-58Hg9f|X&Hb3+@lR%HCYJ&5LDq$)DnO;gLDsm(Bu7s#U210?cVpfvHqkNz@}*q3H%~V zF2aj&EH-KEe_u@yy}UGQvX%-M&ab9X*e)0<+yLk*dm(h5pu|!1{XkEp?eH0>NzycS zgFe=!&? z**##J+ZyAN0szunUdl@A+01eQfMCXB5Vp&q|F7H&4$>K??*%UysTu%qbKg*T!J%$+ zkskmcL*Y(O=p(MAdTR>V3(<#S?cV^_vh5f00e*l5C8%|00;m9AOHkVbpT)`ONq7G2(#DDi&Z96xcUFTgl9p2=SKvx vniW!boJF1XNl~T@3!zXb6bgkxp?v5cJQIugpWMps00000NkvXXu0mjf0Q!Xz diff --git a/api/core/tools/provider/builtin/aippt/aippt.py b/api/core/tools/provider/builtin/aippt/aippt.py deleted file mode 100644 index e0cbbd2992a515..00000000000000 --- a/api/core/tools/provider/builtin/aippt/aippt.py +++ /dev/null @@ -1,11 +0,0 @@ -from core.tools.errors import ToolProviderCredentialValidationError -from core.tools.provider.builtin.aippt.tools.aippt import AIPPTGenerateTool -from core.tools.provider.builtin_tool_provider import BuiltinToolProviderController - - -class AIPPTProvider(BuiltinToolProviderController): - def _validate_credentials(self, credentials: dict) -> None: - try: - AIPPTGenerateTool._get_api_token(credentials, user_id="__dify_system__") - except Exception as e: - raise ToolProviderCredentialValidationError(str(e)) diff --git a/api/core/tools/provider/builtin/aippt/aippt.yaml b/api/core/tools/provider/builtin/aippt/aippt.yaml deleted file mode 100644 index 9b1b45d0f21a73..00000000000000 --- a/api/core/tools/provider/builtin/aippt/aippt.yaml +++ /dev/null @@ -1,45 +0,0 @@ -identity: - author: Dify - name: aippt - label: - en_US: AIPPT - zh_Hans: AIPPT - description: - en_US: AI-generated PPT with one click, input your content topic, and let AI serve you one-stop - zh_Hans: AI一键生成PPT,输入你的内容主题,让AI为你一站式服务到底 - icon: icon.png - tags: - - productivity - - design -credentials_for_provider: - aippt_access_key: - type: secret-input - required: true - label: - en_US: AIPPT API key - zh_Hans: AIPPT API key - pt_BR: AIPPT API key - help: - en_US: Please input your AIPPT API key - zh_Hans: 请输入你的 AIPPT API key - pt_BR: Please input your AIPPT API key - placeholder: - en_US: Please input your AIPPT API key - zh_Hans: 请输入你的 AIPPT API key - pt_BR: Please input your AIPPT API key - url: https://www.aippt.cn - aippt_secret_key: - type: secret-input - required: true - label: - en_US: AIPPT Secret key - zh_Hans: AIPPT Secret key - pt_BR: AIPPT Secret key - help: - en_US: Please input your AIPPT Secret key - zh_Hans: 请输入你的 AIPPT Secret key - pt_BR: Please input your AIPPT Secret key - placeholder: - en_US: Please input your AIPPT Secret key - zh_Hans: 请输入你的 AIPPT Secret key - pt_BR: Please input your AIPPT Secret key diff --git a/api/core/tools/provider/builtin/aippt/tools/aippt.py b/api/core/tools/provider/builtin/aippt/tools/aippt.py deleted file mode 100644 index dd9371f70d63f5..00000000000000 --- a/api/core/tools/provider/builtin/aippt/tools/aippt.py +++ /dev/null @@ -1,498 +0,0 @@ -from base64 import b64encode -from hashlib import sha1 -from hmac import new as hmac_new -from json import loads as json_loads -from threading import Lock -from time import sleep, time -from typing import Any, Optional - -from httpx import get, post -from requests import get as requests_get -from yarl import URL - -from core.tools.entities.common_entities import I18nObject -from core.tools.entities.tool_entities import ToolInvokeMessage, ToolParameter, ToolParameterOption -from core.tools.tool.builtin_tool import BuiltinTool - - -class AIPPTGenerateTool(BuiltinTool): - """ - A tool for generating a ppt - """ - - _api_base_url = URL("https://co.aippt.cn/api") - _api_token_cache = {} - _api_token_cache_lock: Optional[Lock] = None - _style_cache = {} - _style_cache_lock: Optional[Lock] = None - - _task = {} - _task_type_map = { - "auto": 1, - "markdown": 7, - } - - def __init__(self, **kwargs: Any): - super().__init__(**kwargs) - self._api_token_cache_lock = Lock() - self._style_cache_lock = Lock() - - def _invoke(self, user_id: str, tool_parameters: dict[str, Any]) -> ToolInvokeMessage | list[ToolInvokeMessage]: - """ - Invokes the AIPPT generate tool with the given user ID and tool parameters. - - Args: - user_id (str): The ID of the user invoking the tool. - tool_parameters (dict[str, Any]): The parameters for the tool - - Returns: - ToolInvokeMessage | list[ToolInvokeMessage]: The result of the tool invocation, - which can be a single message or a list of messages. - """ - title = tool_parameters.get("title", "") - if not title: - return self.create_text_message("Please provide a title for the ppt") - - model = tool_parameters.get("model", "aippt") - if not model: - return self.create_text_message("Please provide a model for the ppt") - - outline = tool_parameters.get("outline", "") - - # create task - task_id = self._create_task( - type=self._task_type_map["auto" if not outline else "markdown"], - title=title, - content=outline, - user_id=user_id, - ) - - # get suit - color = tool_parameters.get("color") - style = tool_parameters.get("style") - - if color == "__default__": - color_id = "" - else: - color_id = int(color.split("-")[1]) - - if style == "__default__": - style_id = "" - else: - style_id = int(style.split("-")[1]) - - suit_id = self._get_suit(style_id=style_id, colour_id=color_id) - - # generate outline - if not outline: - self._generate_outline(task_id=task_id, model=model, user_id=user_id) - - # generate content - self._generate_content(task_id=task_id, model=model, user_id=user_id) - - # generate ppt - _, ppt_url = self._generate_ppt(task_id=task_id, suit_id=suit_id, user_id=user_id) - - return self.create_text_message( - """the ppt has been created successfully,""" - f"""the ppt url is {ppt_url}""" - """please give the ppt url to user and direct user to download it.""" - ) - - def _create_task(self, type: int, title: str, content: str, user_id: str) -> str: - """ - Create a task - - :param type: the task type - :param title: the task title - :param content: the task content - - :return: the task ID - """ - headers = { - "x-channel": "", - "x-api-key": self.runtime.credentials["aippt_access_key"], - "x-token": self._get_api_token(credentials=self.runtime.credentials, user_id=user_id), - } - response = post( - str(self._api_base_url / "ai" / "chat" / "v2" / "task"), - headers=headers, - files={"type": ("", str(type)), "title": ("", title), "content": ("", content)}, - ) - - if response.status_code != 200: - raise Exception(f"Failed to connect to aippt: {response.text}") - - response = response.json() - if response.get("code") != 0: - raise Exception(f'Failed to create task: {response.get("msg")}') - - return response.get("data", {}).get("id") - - def _generate_outline(self, task_id: str, model: str, user_id: str) -> str: - api_url = ( - self._api_base_url / "ai" / "chat" / "outline" - if model == "aippt" - else self._api_base_url / "ai" / "chat" / "wx" / "outline" - ) - api_url %= {"task_id": task_id} - - headers = { - "x-channel": "", - "x-api-key": self.runtime.credentials["aippt_access_key"], - "x-token": self._get_api_token(credentials=self.runtime.credentials, user_id=user_id), - } - - response = requests_get(url=api_url, headers=headers, stream=True, timeout=(10, 60)) - - if response.status_code != 200: - raise Exception(f"Failed to connect to aippt: {response.text}") - - outline = "" - for chunk in response.iter_lines(delimiter=b"\n\n"): - if not chunk: - continue - - event = "" - lines = chunk.decode("utf-8").split("\n") - for line in lines: - if line.startswith("event:"): - event = line[6:] - elif line.startswith("data:"): - data = line[5:] - if event == "message": - try: - data = json_loads(data) - outline += data.get("content", "") - except Exception as e: - pass - elif event == "close": - break - elif event in {"error", "filter"}: - raise Exception(f"Failed to generate outline: {data}") - - return outline - - def _generate_content(self, task_id: str, model: str, user_id: str) -> str: - api_url = ( - self._api_base_url / "ai" / "chat" / "content" - if model == "aippt" - else self._api_base_url / "ai" / "chat" / "wx" / "content" - ) - api_url %= {"task_id": task_id} - - headers = { - "x-channel": "", - "x-api-key": self.runtime.credentials["aippt_access_key"], - "x-token": self._get_api_token(credentials=self.runtime.credentials, user_id=user_id), - } - - response = requests_get(url=api_url, headers=headers, stream=True, timeout=(10, 60)) - - if response.status_code != 200: - raise Exception(f"Failed to connect to aippt: {response.text}") - - if model == "aippt": - content = "" - for chunk in response.iter_lines(delimiter=b"\n\n"): - if not chunk: - continue - - event = "" - lines = chunk.decode("utf-8").split("\n") - for line in lines: - if line.startswith("event:"): - event = line[6:] - elif line.startswith("data:"): - data = line[5:] - if event == "message": - try: - data = json_loads(data) - content += data.get("content", "") - except Exception as e: - pass - elif event == "close": - break - elif event in {"error", "filter"}: - raise Exception(f"Failed to generate content: {data}") - - return content - elif model == "wenxin": - response = response.json() - if response.get("code") != 0: - raise Exception(f'Failed to generate content: {response.get("msg")}') - - return response.get("data", "") - - return "" - - def _generate_ppt(self, task_id: str, suit_id: int, user_id) -> tuple[str, str]: - """ - Generate a ppt - - :param task_id: the task ID - :param suit_id: the suit ID - :return: the cover url of the ppt and the ppt url - """ - headers = { - "x-channel": "", - "x-api-key": self.runtime.credentials["aippt_access_key"], - "x-token": self._get_api_token(credentials=self.runtime.credentials, user_id=user_id), - } - - response = post( - str(self._api_base_url / "design" / "v2" / "save"), - headers=headers, - data={"task_id": task_id, "template_id": suit_id}, - ) - - if response.status_code != 200: - raise Exception(f"Failed to connect to aippt: {response.text}") - - response = response.json() - if response.get("code") != 0: - raise Exception(f'Failed to generate ppt: {response.get("msg")}') - - id = response.get("data", {}).get("id") - cover_url = response.get("data", {}).get("cover_url") - - response = post( - str(self._api_base_url / "download" / "export" / "file"), - headers=headers, - data={"id": id, "format": "ppt", "files_to_zip": False, "edit": True}, - ) - - if response.status_code != 200: - raise Exception(f"Failed to connect to aippt: {response.text}") - - response = response.json() - if response.get("code") != 0: - raise Exception(f'Failed to generate ppt: {response.get("msg")}') - - export_code = response.get("data") - if not export_code: - raise Exception("Failed to generate ppt, the export code is empty") - - current_iteration = 0 - while current_iteration < 50: - # get ppt url - response = post( - str(self._api_base_url / "download" / "export" / "file" / "result"), - headers=headers, - data={"task_key": export_code}, - ) - - if response.status_code != 200: - raise Exception(f"Failed to connect to aippt: {response.text}") - - response = response.json() - if response.get("code") != 0: - raise Exception(f'Failed to generate ppt: {response.get("msg")}') - - if response.get("msg") == "导出中": - current_iteration += 1 - sleep(2) - continue - - ppt_url = response.get("data", []) - if len(ppt_url) == 0: - raise Exception("Failed to generate ppt, the ppt url is empty") - - return cover_url, ppt_url[0] - - raise Exception("Failed to generate ppt, the export is timeout") - - @classmethod - def _get_api_token(cls, credentials: dict[str, str], user_id: str) -> str: - """ - Get API token - - :param credentials: the credentials - :return: the API token - """ - access_key = credentials["aippt_access_key"] - secret_key = credentials["aippt_secret_key"] - - cache_key = f"{access_key}#@#{user_id}" - - with cls._api_token_cache_lock: - # clear expired tokens - now = time() - for key in list(cls._api_token_cache.keys()): - if cls._api_token_cache[key]["expire"] < now: - del cls._api_token_cache[key] - - if cache_key in cls._api_token_cache: - return cls._api_token_cache[cache_key]["token"] - - # get token - headers = { - "x-api-key": access_key, - "x-timestamp": str(int(now)), - "x-signature": cls._calculate_sign(access_key, secret_key, int(now)), - } - - param = {"uid": user_id, "channel": ""} - - response = get(str(cls._api_base_url / "grant" / "token"), params=param, headers=headers) - - if response.status_code != 200: - raise Exception(f"Failed to connect to aippt: {response.text}") - response = response.json() - if response.get("code") != 0: - raise Exception(f'Failed to connect to aippt: {response.get("msg")}') - - token = response.get("data", {}).get("token") - expire = response.get("data", {}).get("time_expire") - - with cls._api_token_cache_lock: - cls._api_token_cache[cache_key] = {"token": token, "expire": now + expire} - - return token - - @classmethod - def _calculate_sign(cls, access_key: str, secret_key: str, timestamp: int) -> str: - return b64encode( - hmac_new( - key=secret_key.encode("utf-8"), msg=f"GET@/api/grant/token/@{timestamp}".encode(), digestmod=sha1 - ).digest() - ).decode("utf-8") - - @classmethod - def _get_styles(cls, credentials: dict[str, str], user_id: str) -> tuple[list[dict], list[dict]]: - """ - Get styles - """ - - # check cache - with cls._style_cache_lock: - # clear expired styles - now = time() - for key in list(cls._style_cache.keys()): - if cls._style_cache[key]["expire"] < now: - del cls._style_cache[key] - - key = f'{credentials["aippt_access_key"]}#@#{user_id}' - if key in cls._style_cache: - return cls._style_cache[key]["colors"], cls._style_cache[key]["styles"] - - headers = { - "x-channel": "", - "x-api-key": credentials["aippt_access_key"], - "x-token": cls._get_api_token(credentials=credentials, user_id=user_id), - } - response = get(str(cls._api_base_url / "template_component" / "suit" / "select"), headers=headers) - - if response.status_code != 200: - raise Exception(f"Failed to connect to aippt: {response.text}") - - response = response.json() - - if response.get("code") != 0: - raise Exception(f'Failed to connect to aippt: {response.get("msg")}') - - colors = [ - { - "id": f'id-{item.get("id")}', - "name": item.get("name"), - "en_name": item.get("en_name", item.get("name")), - } - for item in response.get("data", {}).get("colour") or [] - ] - styles = [ - { - "id": f'id-{item.get("id")}', - "name": item.get("title"), - } - for item in response.get("data", {}).get("suit_style") or [] - ] - - with cls._style_cache_lock: - cls._style_cache[key] = {"colors": colors, "styles": styles, "expire": now + 60 * 60} - - return colors, styles - - def get_styles(self, user_id: str) -> tuple[list[dict], list[dict]]: - """ - Get styles - - :param credentials: the credentials - :return: Tuple[list[dict[id, color]], list[dict[id, style]] - """ - if not self.runtime.credentials.get("aippt_access_key") or not self.runtime.credentials.get("aippt_secret_key"): - raise Exception("Please provide aippt credentials") - - return self._get_styles(credentials=self.runtime.credentials, user_id=user_id) - - def _get_suit(self, style_id: int, colour_id: int) -> int: - """ - Get suit - """ - headers = { - "x-channel": "", - "x-api-key": self.runtime.credentials["aippt_access_key"], - "x-token": self._get_api_token(credentials=self.runtime.credentials, user_id="__dify_system__"), - } - response = get( - str(self._api_base_url / "template_component" / "suit" / "search"), - headers=headers, - params={"style_id": style_id, "colour_id": colour_id, "page": 1, "page_size": 1}, - ) - - if response.status_code != 200: - raise Exception(f"Failed to connect to aippt: {response.text}") - - response = response.json() - - if response.get("code") != 0: - raise Exception(f'Failed to connect to aippt: {response.get("msg")}') - - if len(response.get("data", {}).get("list") or []) > 0: - return response.get("data", {}).get("list")[0].get("id") - - raise Exception("Failed to get suit, the suit does not exist, please check the style and color") - - def get_runtime_parameters(self) -> list[ToolParameter]: - """ - Get runtime parameters - - Override this method to add runtime parameters to the tool. - """ - try: - colors, styles = self.get_styles(user_id="__dify_system__") - except Exception as e: - colors, styles = ( - [{"id": "-1", "name": "__default__", "en_name": "__default__"}], - [{"id": "-1", "name": "__default__", "en_name": "__default__"}], - ) - - return [ - ToolParameter( - name="color", - label=I18nObject(zh_Hans="颜色", en_US="Color"), - human_description=I18nObject(zh_Hans="颜色", en_US="Color"), - type=ToolParameter.ToolParameterType.SELECT, - form=ToolParameter.ToolParameterForm.FORM, - required=False, - default=colors[0]["id"], - options=[ - ToolParameterOption( - value=color["id"], label=I18nObject(zh_Hans=color["name"], en_US=color["en_name"]) - ) - for color in colors - ], - ), - ToolParameter( - name="style", - label=I18nObject(zh_Hans="风格", en_US="Style"), - human_description=I18nObject(zh_Hans="风格", en_US="Style"), - type=ToolParameter.ToolParameterType.SELECT, - form=ToolParameter.ToolParameterForm.FORM, - required=False, - default=styles[0]["id"], - options=[ - ToolParameterOption(value=style["id"], label=I18nObject(zh_Hans=style["name"], en_US=style["name"])) - for style in styles - ], - ), - ] diff --git a/api/core/tools/provider/builtin/aippt/tools/aippt.yaml b/api/core/tools/provider/builtin/aippt/tools/aippt.yaml deleted file mode 100644 index d35798ad66106e..00000000000000 --- a/api/core/tools/provider/builtin/aippt/tools/aippt.yaml +++ /dev/null @@ -1,54 +0,0 @@ -identity: - name: aippt - author: Dify - label: - en_US: AIPPT - zh_Hans: AIPPT -description: - human: - en_US: AI-generated PPT with one click, input your content topic, and let AI serve you one-stop - zh_Hans: AI一键生成PPT,输入你的内容主题,让AI为你一站式服务到底 - llm: A tool used to generate PPT with AI, input your content topic, and let AI generate PPT for you. -parameters: - - name: title - type: string - required: true - label: - en_US: Title - zh_Hans: 标题 - human_description: - en_US: The title of the PPT. - zh_Hans: PPT的标题。 - llm_description: The title of the PPT, which will be used to generate the PPT outline. - form: llm - - name: outline - type: string - required: false - label: - en_US: Outline - zh_Hans: 大纲 - human_description: - en_US: The outline of the PPT - zh_Hans: PPT的大纲 - llm_description: The outline of the PPT, which will be used to generate the PPT content. provide it if you have. - form: llm - - name: llm - type: select - required: true - label: - en_US: LLM model - zh_Hans: 生成大纲的LLM - options: - - value: aippt - label: - en_US: AIPPT default model - zh_Hans: AIPPT默认模型 - - value: wenxin - label: - en_US: Wenxin ErnieBot - zh_Hans: 文心一言 - default: aippt - human_description: - en_US: The LLM model used for generating PPT outline. - zh_Hans: 用于生成PPT大纲的LLM模型。 - form: form diff --git a/api/core/tools/provider/builtin/alphavantage/_assets/icon.svg b/api/core/tools/provider/builtin/alphavantage/_assets/icon.svg deleted file mode 100644 index 785432943bc148..00000000000000 --- a/api/core/tools/provider/builtin/alphavantage/_assets/icon.svg +++ /dev/null @@ -1,7 +0,0 @@ - - - 形状结合 - - - - \ No newline at end of file diff --git a/api/core/tools/provider/builtin/alphavantage/alphavantage.py b/api/core/tools/provider/builtin/alphavantage/alphavantage.py deleted file mode 100644 index a84630e5aa990a..00000000000000 --- a/api/core/tools/provider/builtin/alphavantage/alphavantage.py +++ /dev/null @@ -1,22 +0,0 @@ -from typing import Any - -from core.tools.errors import ToolProviderCredentialValidationError -from core.tools.provider.builtin.alphavantage.tools.query_stock import QueryStockTool -from core.tools.provider.builtin_tool_provider import BuiltinToolProviderController - - -class AlphaVantageProvider(BuiltinToolProviderController): - def _validate_credentials(self, credentials: dict[str, Any]) -> None: - try: - QueryStockTool().fork_tool_runtime( - runtime={ - "credentials": credentials, - } - ).invoke( - user_id="", - tool_parameters={ - "code": "AAPL", # Apple Inc. - }, - ) - except Exception as e: - raise ToolProviderCredentialValidationError(str(e)) diff --git a/api/core/tools/provider/builtin/alphavantage/alphavantage.yaml b/api/core/tools/provider/builtin/alphavantage/alphavantage.yaml deleted file mode 100644 index 710510cfd8ed4a..00000000000000 --- a/api/core/tools/provider/builtin/alphavantage/alphavantage.yaml +++ /dev/null @@ -1,31 +0,0 @@ -identity: - author: zhuhao - name: alphavantage - label: - en_US: AlphaVantage - zh_Hans: AlphaVantage - pt_BR: AlphaVantage - description: - en_US: AlphaVantage is an online platform that provides financial market data and APIs, making it convenient for individual investors and developers to access stock quotes, technical indicators, and stock analysis. - zh_Hans: AlphaVantage是一个在线平台,它提供金融市场数据和API,便于个人投资者和开发者获取股票报价、技术指标和股票分析。 - pt_BR: AlphaVantage is an online platform that provides financial market data and APIs, making it convenient for individual investors and developers to access stock quotes, technical indicators, and stock analysis. - icon: icon.svg - tags: - - finance -credentials_for_provider: - api_key: - type: secret-input - required: true - label: - en_US: AlphaVantage API key - zh_Hans: AlphaVantage API key - pt_BR: AlphaVantage API key - placeholder: - en_US: Please input your AlphaVantage API key - zh_Hans: 请输入你的 AlphaVantage API key - pt_BR: Please input your AlphaVantage API key - help: - en_US: Get your AlphaVantage API key from AlphaVantage - zh_Hans: 从 AlphaVantage 获取您的 AlphaVantage API key - pt_BR: Get your AlphaVantage API key from AlphaVantage - url: https://www.alphavantage.co/support/#api-key diff --git a/api/core/tools/provider/builtin/alphavantage/tools/query_stock.py b/api/core/tools/provider/builtin/alphavantage/tools/query_stock.py deleted file mode 100644 index d06611acd05d1d..00000000000000 --- a/api/core/tools/provider/builtin/alphavantage/tools/query_stock.py +++ /dev/null @@ -1,48 +0,0 @@ -from typing import Any, Union - -import requests - -from core.tools.entities.tool_entities import ToolInvokeMessage -from core.tools.tool.builtin_tool import BuiltinTool - -ALPHAVANTAGE_API_URL = "https://www.alphavantage.co/query" - - -class QueryStockTool(BuiltinTool): - def _invoke( - self, - user_id: str, - tool_parameters: dict[str, Any], - ) -> Union[ToolInvokeMessage, list[ToolInvokeMessage]]: - stock_code = tool_parameters.get("code", "") - if not stock_code: - return self.create_text_message("Please tell me your stock code") - - if "api_key" not in self.runtime.credentials or not self.runtime.credentials.get("api_key"): - return self.create_text_message("Alpha Vantage API key is required.") - - params = { - "function": "TIME_SERIES_DAILY", - "symbol": stock_code, - "outputsize": "compact", - "datatype": "json", - "apikey": self.runtime.credentials["api_key"], - } - response = requests.get(url=ALPHAVANTAGE_API_URL, params=params) - response.raise_for_status() - result = self._handle_response(response.json()) - return self.create_json_message(result) - - def _handle_response(self, response: dict[str, Any]) -> dict[str, Any]: - result = response.get("Time Series (Daily)", {}) - if not result: - return {} - stock_result = {} - for k, v in result.items(): - stock_result[k] = {} - stock_result[k]["open"] = v.get("1. open") - stock_result[k]["high"] = v.get("2. high") - stock_result[k]["low"] = v.get("3. low") - stock_result[k]["close"] = v.get("4. close") - stock_result[k]["volume"] = v.get("5. volume") - return stock_result diff --git a/api/core/tools/provider/builtin/alphavantage/tools/query_stock.yaml b/api/core/tools/provider/builtin/alphavantage/tools/query_stock.yaml deleted file mode 100644 index d89f34e373f9fa..00000000000000 --- a/api/core/tools/provider/builtin/alphavantage/tools/query_stock.yaml +++ /dev/null @@ -1,27 +0,0 @@ -identity: - name: query_stock - author: zhuhao - label: - en_US: query_stock - zh_Hans: query_stock - pt_BR: query_stock -description: - human: - en_US: Retrieve information such as daily opening price, daily highest price, daily lowest price, daily closing price, and daily trading volume for a specified stock symbol. - zh_Hans: 获取指定股票代码的每日开盘价、每日最高价、每日最低价、每日收盘价和每日交易量等信息。 - pt_BR: Retrieve information such as daily opening price, daily highest price, daily lowest price, daily closing price, and daily trading volume for a specified stock symbol - llm: Retrieve information such as daily opening price, daily highest price, daily lowest price, daily closing price, and daily trading volume for a specified stock symbol -parameters: - - name: code - type: string - required: true - label: - en_US: stock code - zh_Hans: 股票代码 - pt_BR: stock code - human_description: - en_US: stock code - zh_Hans: 股票代码 - pt_BR: stock code - llm_description: stock code for query from alphavantage - form: llm diff --git a/api/core/tools/provider/builtin/arxiv/_assets/icon.svg b/api/core/tools/provider/builtin/arxiv/_assets/icon.svg deleted file mode 100644 index 0e60f635739993..00000000000000 --- a/api/core/tools/provider/builtin/arxiv/_assets/icon.svg +++ /dev/null @@ -1 +0,0 @@ - \ No newline at end of file diff --git a/api/core/tools/provider/builtin/arxiv/arxiv.py b/api/core/tools/provider/builtin/arxiv/arxiv.py deleted file mode 100644 index ebb2d1a8c47be9..00000000000000 --- a/api/core/tools/provider/builtin/arxiv/arxiv.py +++ /dev/null @@ -1,20 +0,0 @@ -from core.tools.errors import ToolProviderCredentialValidationError -from core.tools.provider.builtin.arxiv.tools.arxiv_search import ArxivSearchTool -from core.tools.provider.builtin_tool_provider import BuiltinToolProviderController - - -class ArxivProvider(BuiltinToolProviderController): - def _validate_credentials(self, credentials: dict) -> None: - try: - ArxivSearchTool().fork_tool_runtime( - runtime={ - "credentials": credentials, - } - ).invoke( - user_id="", - tool_parameters={ - "query": "John Doe", - }, - ) - except Exception as e: - raise ToolProviderCredentialValidationError(str(e)) diff --git a/api/core/tools/provider/builtin/arxiv/arxiv.yaml b/api/core/tools/provider/builtin/arxiv/arxiv.yaml deleted file mode 100644 index d26993b3364ea1..00000000000000 --- a/api/core/tools/provider/builtin/arxiv/arxiv.yaml +++ /dev/null @@ -1,12 +0,0 @@ -identity: - author: Yash Parmar - name: arxiv - label: - en_US: ArXiv - zh_Hans: ArXiv - description: - en_US: Access to a vast repository of scientific papers and articles in various fields of research. - zh_Hans: 访问各个研究领域大量科学论文和文章的存储库。 - icon: icon.svg - tags: - - search diff --git a/api/core/tools/provider/builtin/arxiv/tools/arxiv_search.py b/api/core/tools/provider/builtin/arxiv/tools/arxiv_search.py deleted file mode 100644 index 2d65ba2d6f4389..00000000000000 --- a/api/core/tools/provider/builtin/arxiv/tools/arxiv_search.py +++ /dev/null @@ -1,119 +0,0 @@ -import logging -from typing import Any, Optional - -import arxiv -from pydantic import BaseModel, Field - -from core.tools.entities.tool_entities import ToolInvokeMessage -from core.tools.tool.builtin_tool import BuiltinTool - -logger = logging.getLogger(__name__) - - -class ArxivAPIWrapper(BaseModel): - """Wrapper around ArxivAPI. - - To use, you should have the ``arxiv`` python package installed. - https://lukasschwab.me/arxiv.py/index.html - This wrapper will use the Arxiv API to conduct searches and - fetch document summaries. By default, it will return the document summaries - of the top-k results. - It limits the Document content by doc_content_chars_max. - Set doc_content_chars_max=None if you don't want to limit the content size. - - Args: - top_k_results: number of the top-scored document used for the arxiv tool - ARXIV_MAX_QUERY_LENGTH: the cut limit on the query used for the arxiv tool. - load_max_docs: a limit to the number of loaded documents - load_all_available_meta: - if True: the `metadata` of the loaded Documents contains all available - meta info (see https://lukasschwab.me/arxiv.py/index.html#Result), - if False: the `metadata` contains only the published date, title, - authors and summary. - doc_content_chars_max: an optional cut limit for the length of a document's - content - - Example: - .. code-block:: python - - arxiv = ArxivAPIWrapper( - top_k_results = 3, - ARXIV_MAX_QUERY_LENGTH = 300, - load_max_docs = 3, - load_all_available_meta = False, - doc_content_chars_max = 40000 - ) - arxiv.run("tree of thought llm) - """ - - arxiv_search: type[arxiv.Search] = arxiv.Search #: :meta private: - arxiv_http_error: tuple[type[Exception]] = (arxiv.ArxivError, arxiv.UnexpectedEmptyPageError, arxiv.HTTPError) - top_k_results: int = 3 - ARXIV_MAX_QUERY_LENGTH: int = 300 - load_max_docs: int = 100 - load_all_available_meta: bool = False - doc_content_chars_max: Optional[int] = 4000 - - def run(self, query: str) -> str: - """ - Performs an arxiv search and A single string - with the publish date, title, authors, and summary - for each article separated by two newlines. - - If an error occurs or no documents found, error text - is returned instead. Wrapper for - https://lukasschwab.me/arxiv.py/index.html#Search - - Args: - query: a plaintext search query - """ - try: - results = self.arxiv_search( # type: ignore - query[: self.ARXIV_MAX_QUERY_LENGTH], max_results=self.top_k_results - ).results() - except arxiv_http_error as ex: - return f"Arxiv exception: {ex}" - docs = [ - f"Published: {result.updated.date()}\n" - f"Title: {result.title}\n" - f"Authors: {', '.join(a.name for a in result.authors)}\n" - f"Summary: {result.summary}" - for result in results - ] - if docs: - return "\n\n".join(docs)[: self.doc_content_chars_max] - else: - return "No good Arxiv Result was found" - - -class ArxivSearchInput(BaseModel): - query: str = Field(..., description="Search query.") - - -class ArxivSearchTool(BuiltinTool): - """ - A tool for searching articles on Arxiv. - """ - - def _invoke(self, user_id: str, tool_parameters: dict[str, Any]) -> ToolInvokeMessage | list[ToolInvokeMessage]: - """ - Invokes the Arxiv search tool with the given user ID and tool parameters. - - Args: - user_id (str): The ID of the user invoking the tool. - tool_parameters (dict[str, Any]): The parameters for the tool, including the 'query' parameter. - - Returns: - ToolInvokeMessage | list[ToolInvokeMessage]: The result of the tool invocation, - which can be a single message or a list of messages. - """ - query = tool_parameters.get("query", "") - - if not query: - return self.create_text_message("Please input query") - - arxiv = ArxivAPIWrapper() - - response = arxiv.run(query) - - return self.create_text_message(self.summary(user_id=user_id, content=response)) diff --git a/api/core/tools/provider/builtin/arxiv/tools/arxiv_search.yaml b/api/core/tools/provider/builtin/arxiv/tools/arxiv_search.yaml deleted file mode 100644 index 7439a48658c6a0..00000000000000 --- a/api/core/tools/provider/builtin/arxiv/tools/arxiv_search.yaml +++ /dev/null @@ -1,23 +0,0 @@ -identity: - name: arxiv_search - author: Yash Parmar - label: - en_US: Arxiv Search - zh_Hans: Arxiv 搜索 -description: - human: - en_US: A tool for searching scientific papers and articles from the Arxiv repository. Input can be an Arxiv ID or an author's name. - zh_Hans: 一个用于从Arxiv存储库搜索科学论文和文章的工具。 输入可以是Arxiv ID或作者姓名。 - llm: A tool for searching scientific papers and articles from the Arxiv repository. Input can be an Arxiv ID or an author's name. -parameters: - - name: query - type: string - required: true - label: - en_US: Query string - zh_Hans: 查询字符串 - human_description: - en_US: The Arxiv ID or author's name used for searching. - zh_Hans: 用于搜索的Arxiv ID或作者姓名。 - llm_description: The Arxiv ID or author's name used for searching. - form: llm diff --git a/api/core/tools/provider/builtin/aws/_assets/icon.svg b/api/core/tools/provider/builtin/aws/_assets/icon.svg deleted file mode 100644 index ecfcfc08d4eeff..00000000000000 --- a/api/core/tools/provider/builtin/aws/_assets/icon.svg +++ /dev/null @@ -1,9 +0,0 @@ - - - - - - - - - \ No newline at end of file diff --git a/api/core/tools/provider/builtin/aws/aws.py b/api/core/tools/provider/builtin/aws/aws.py deleted file mode 100644 index f81b5dbd27d17c..00000000000000 --- a/api/core/tools/provider/builtin/aws/aws.py +++ /dev/null @@ -1,24 +0,0 @@ -from core.tools.errors import ToolProviderCredentialValidationError -from core.tools.provider.builtin.aws.tools.sagemaker_text_rerank import SageMakerReRankTool -from core.tools.provider.builtin_tool_provider import BuiltinToolProviderController - - -class SageMakerProvider(BuiltinToolProviderController): - def _validate_credentials(self, credentials: dict) -> None: - try: - SageMakerReRankTool().fork_tool_runtime( - runtime={ - "credentials": credentials, - } - ).invoke( - user_id="", - tool_parameters={ - "sagemaker_endpoint": "", - "query": "misaka mikoto", - "candidate_texts": "hello$$$hello world", - "topk": 5, - "aws_region": "", - }, - ) - except Exception as e: - raise ToolProviderCredentialValidationError(str(e)) diff --git a/api/core/tools/provider/builtin/aws/aws.yaml b/api/core/tools/provider/builtin/aws/aws.yaml deleted file mode 100644 index 847c6824a53df6..00000000000000 --- a/api/core/tools/provider/builtin/aws/aws.yaml +++ /dev/null @@ -1,15 +0,0 @@ -identity: - author: AWS - name: aws - label: - en_US: AWS - zh_Hans: 亚马逊云科技 - pt_BR: AWS - description: - en_US: Services on AWS. - zh_Hans: 亚马逊云科技的各类服务 - pt_BR: Services on AWS. - icon: icon.svg - tags: - - search -credentials_for_provider: diff --git a/api/core/tools/provider/builtin/aws/tools/apply_guardrail.py b/api/core/tools/provider/builtin/aws/tools/apply_guardrail.py deleted file mode 100644 index a04f5c0fe9f1af..00000000000000 --- a/api/core/tools/provider/builtin/aws/tools/apply_guardrail.py +++ /dev/null @@ -1,90 +0,0 @@ -import json -import logging -from typing import Any, Union - -import boto3 -from botocore.exceptions import BotoCoreError -from pydantic import BaseModel, Field - -from core.tools.entities.tool_entities import ToolInvokeMessage -from core.tools.tool.builtin_tool import BuiltinTool - -logging.basicConfig(level=logging.INFO) -logger = logging.getLogger(__name__) - - -class GuardrailParameters(BaseModel): - guardrail_id: str = Field(..., description="The identifier of the guardrail") - guardrail_version: str = Field(..., description="The version of the guardrail") - source: str = Field(..., description="The source of the content") - text: str = Field(..., description="The text to apply the guardrail to") - aws_region: str = Field(..., description="AWS region for the Bedrock client") - - -class ApplyGuardrailTool(BuiltinTool): - def _invoke( - self, user_id: str, tool_parameters: dict[str, Any] - ) -> Union[ToolInvokeMessage, list[ToolInvokeMessage]]: - """ - Invoke the ApplyGuardrail tool - """ - try: - # Validate and parse input parameters - params = GuardrailParameters(**tool_parameters) - - # Initialize AWS client - bedrock_client = boto3.client("bedrock-runtime", region_name=params.aws_region) - - # Apply guardrail - response = bedrock_client.apply_guardrail( - guardrailIdentifier=params.guardrail_id, - guardrailVersion=params.guardrail_version, - source=params.source, - content=[{"text": {"text": params.text}}], - ) - - logger.info(f"Raw response from AWS: {json.dumps(response, indent=2)}") - - # Check for empty response - if not response: - return self.create_text_message(text="Received empty response from AWS Bedrock.") - - # Process the result - action = response.get("action", "No action specified") - outputs = response.get("outputs", []) - output = outputs[0].get("text", "No output received") if outputs else "No output received" - assessments = response.get("assessments", []) - - # Format assessments - formatted_assessments = [] - for assessment in assessments: - for policy_type, policy_data in assessment.items(): - if isinstance(policy_data, dict) and "topics" in policy_data: - for topic in policy_data["topics"]: - formatted_assessments.append( - f"Policy: {policy_type}, Topic: {topic['name']}, Type: {topic['type']}," - f" Action: {topic['action']}" - ) - else: - formatted_assessments.append(f"Policy: {policy_type}, Data: {policy_data}") - - result = f"Action: {action}\n " - result += f"Output: {output}\n " - if formatted_assessments: - result += "Assessments:\n " + "\n ".join(formatted_assessments) + "\n " - # result += f"Full response: {json.dumps(response, indent=2, ensure_ascii=False)}" - - return self.create_text_message(text=result) - - except BotoCoreError as e: - error_message = f"AWS service error: {str(e)}" - logger.error(error_message, exc_info=True) - return self.create_text_message(text=error_message) - except json.JSONDecodeError as e: - error_message = f"JSON parsing error: {str(e)}" - logger.error(error_message, exc_info=True) - return self.create_text_message(text=error_message) - except Exception as e: - error_message = f"An unexpected error occurred: {str(e)}" - logger.error(error_message, exc_info=True) - return self.create_text_message(text=error_message) diff --git a/api/core/tools/provider/builtin/aws/tools/apply_guardrail.yaml b/api/core/tools/provider/builtin/aws/tools/apply_guardrail.yaml deleted file mode 100644 index 66044e4ea84fe1..00000000000000 --- a/api/core/tools/provider/builtin/aws/tools/apply_guardrail.yaml +++ /dev/null @@ -1,67 +0,0 @@ -identity: - name: apply_guardrail - author: AWS - label: - en_US: Content Moderation Guardrails - zh_Hans: 内容审查护栏 -description: - human: - en_US: Content Moderation Guardrails utilizes the ApplyGuardrail API, a feature of Guardrails for Amazon Bedrock. This API is capable of evaluating input prompts and model responses for all Foundation Models (FMs), including those on Amazon Bedrock, custom FMs, and third-party FMs. By implementing this functionality, organizations can achieve centralized governance across all their generative AI applications, thereby enhancing control and consistency in content moderation. - zh_Hans: 内容审查护栏采用 Guardrails for Amazon Bedrock 功能中的 ApplyGuardrail API 。ApplyGuardrail 可以评估所有基础模型(FMs)的输入提示和模型响应,包括 Amazon Bedrock 上的 FMs、自定义 FMs 和第三方 FMs。通过实施这一功能, 组织可以在所有生成式 AI 应用程序中实现集中化的治理,从而增强内容审核的控制力和一致性。 - llm: Content Moderation Guardrails utilizes the ApplyGuardrail API, a feature of Guardrails for Amazon Bedrock. This API is capable of evaluating input prompts and model responses for all Foundation Models (FMs), including those on Amazon Bedrock, custom FMs, and third-party FMs. By implementing this functionality, organizations can achieve centralized governance across all their generative AI applications, thereby enhancing control and consistency in content moderation. -parameters: - - name: guardrail_id - type: string - required: true - label: - en_US: Guardrail ID - zh_Hans: Guardrail ID - human_description: - en_US: Please enter the ID of the Guardrail that has already been created on Amazon Bedrock, for example 'qk5nk0e4b77b'. - zh_Hans: 请输入已经在 Amazon Bedrock 上创建好的 Guardrail ID, 例如 'qk5nk0e4b77b'. - llm_description: Please enter the ID of the Guardrail that has already been created on Amazon Bedrock, for example 'qk5nk0e4b77b'. - form: form - - name: guardrail_version - type: string - required: true - label: - en_US: Guardrail Version Number - zh_Hans: Guardrail 版本号码 - human_description: - en_US: Please enter the published version of the Guardrail ID that has already been created on Amazon Bedrock. This is typically a version number, such as 2. - zh_Hans: 请输入已经在Amazon Bedrock 上创建好的Guardrail ID发布的版本, 通常使用版本号, 例如2. - llm_description: Please enter the published version of the Guardrail ID that has already been created on Amazon Bedrock. This is typically a version number, such as 2. - form: form - - name: source - type: string - required: true - label: - en_US: Content Source (INPUT or OUTPUT) - zh_Hans: 内容来源 (INPUT or OUTPUT) - human_description: - en_US: The source of data used in the request to apply the guardrail. Valid Values "INPUT | OUTPUT" - zh_Hans: 用于应用护栏的请求中所使用的数据来源。有效值为 "INPUT | OUTPUT" - llm_description: The source of data used in the request to apply the guardrail. Valid Values "INPUT | OUTPUT" - form: form - - name: text - type: string - required: true - label: - en_US: Content to be reviewed - zh_Hans: 待审查内容 - human_description: - en_US: The content used for requesting guardrail review, which can be either user input or LLM output. - zh_Hans: 用于请求护栏审查的内容,可以是用户输入或 LLM 输出。 - llm_description: The content used for requesting guardrail review, which can be either user input or LLM output. - form: llm - - name: aws_region - type: string - required: true - label: - en_US: AWS Region - zh_Hans: AWS 区域 - human_description: - en_US: Please enter the AWS region for the Bedrock client, for example 'us-east-1'. - zh_Hans: 请输入 Bedrock 客户端的 AWS 区域,例如 'us-east-1'。 - llm_description: Please enter the AWS region for the Bedrock client, for example 'us-east-1'. - form: form diff --git a/api/core/tools/provider/builtin/aws/tools/lambda_translate_utils.py b/api/core/tools/provider/builtin/aws/tools/lambda_translate_utils.py deleted file mode 100644 index 48755753ace7c1..00000000000000 --- a/api/core/tools/provider/builtin/aws/tools/lambda_translate_utils.py +++ /dev/null @@ -1,91 +0,0 @@ -import json -from typing import Any, Union - -import boto3 - -from core.tools.entities.tool_entities import ToolInvokeMessage -from core.tools.tool.builtin_tool import BuiltinTool - - -class LambdaTranslateUtilsTool(BuiltinTool): - lambda_client: Any = None - - def _invoke_lambda(self, text_content, src_lang, dest_lang, model_id, dictionary_name, request_type, lambda_name): - msg = { - "src_content": text_content, - "src_lang": src_lang, - "dest_lang": dest_lang, - "dictionary_id": dictionary_name, - "request_type": request_type, - "model_id": model_id, - } - - invoke_response = self.lambda_client.invoke( - FunctionName=lambda_name, InvocationType="RequestResponse", Payload=json.dumps(msg) - ) - response_body = invoke_response["Payload"] - - response_str = response_body.read().decode("unicode_escape") - - return response_str - - def _invoke( - self, - user_id: str, - tool_parameters: dict[str, Any], - ) -> Union[ToolInvokeMessage, list[ToolInvokeMessage]]: - """ - invoke tools - """ - line = 0 - try: - if not self.lambda_client: - aws_region = tool_parameters.get("aws_region") - if aws_region: - self.lambda_client = boto3.client("lambda", region_name=aws_region) - else: - self.lambda_client = boto3.client("lambda") - - line = 1 - text_content = tool_parameters.get("text_content", "") - if not text_content: - return self.create_text_message("Please input text_content") - - line = 2 - src_lang = tool_parameters.get("src_lang", "") - if not src_lang: - return self.create_text_message("Please input src_lang") - - line = 3 - dest_lang = tool_parameters.get("dest_lang", "") - if not dest_lang: - return self.create_text_message("Please input dest_lang") - - line = 4 - lambda_name = tool_parameters.get("lambda_name", "") - if not lambda_name: - return self.create_text_message("Please input lambda_name") - - line = 5 - request_type = tool_parameters.get("request_type", "") - if not request_type: - return self.create_text_message("Please input request_type") - - line = 6 - model_id = tool_parameters.get("model_id", "") - if not model_id: - return self.create_text_message("Please input model_id") - - line = 7 - dictionary_name = tool_parameters.get("dictionary_name", "") - if not dictionary_name: - return self.create_text_message("Please input dictionary_name") - - result = self._invoke_lambda( - text_content, src_lang, dest_lang, model_id, dictionary_name, request_type, lambda_name - ) - - return self.create_text_message(text=result) - - except Exception as e: - return self.create_text_message(f"Exception {str(e)}, line : {line}") diff --git a/api/core/tools/provider/builtin/aws/tools/lambda_translate_utils.yaml b/api/core/tools/provider/builtin/aws/tools/lambda_translate_utils.yaml deleted file mode 100644 index 3bb133c7ec8d16..00000000000000 --- a/api/core/tools/provider/builtin/aws/tools/lambda_translate_utils.yaml +++ /dev/null @@ -1,134 +0,0 @@ -identity: - name: lambda_translate_utils - author: AWS - label: - en_US: TranslateTool - zh_Hans: 翻译工具 - pt_BR: TranslateTool - icon: icon.svg -description: - human: - en_US: A util tools for LLM translation, extra deployment is needed on AWS. Please refer Github Repo - https://github.com/ybalbert001/dynamodb-rag - zh_Hans: 大语言模型翻译工具(专词映射获取),需要在AWS上进行额外部署,可参考Github Repo - https://github.com/ybalbert001/dynamodb-rag - pt_BR: A util tools for LLM translation, specific Lambda Function deployment is needed on AWS. Please refer Github Repo - https://github.com/ybalbert001/dynamodb-rag - llm: A util tools for translation. -parameters: - - name: text_content - type: string - required: true - label: - en_US: source content for translation - zh_Hans: 待翻译原文 - pt_BR: source content for translation - human_description: - en_US: source content for translation - zh_Hans: 待翻译原文 - pt_BR: source content for translation - llm_description: source content for translation - form: llm - - name: src_lang - type: string - required: true - label: - en_US: source language code - zh_Hans: 原文语言代号 - pt_BR: source language code - human_description: - en_US: source language code - zh_Hans: 原文语言代号 - pt_BR: source language code - llm_description: source language code - form: llm - - name: dest_lang - type: string - required: true - label: - en_US: target language code - zh_Hans: 目标语言代号 - pt_BR: target language code - human_description: - en_US: target language code - zh_Hans: 目标语言代号 - pt_BR: target language code - llm_description: target language code - form: llm - - name: aws_region - type: string - required: false - label: - en_US: region of Lambda - zh_Hans: Lambda 所在的region - pt_BR: region of Lambda - human_description: - en_US: region of Lambda - zh_Hans: Lambda 所在的region - pt_BR: region of Lambda - llm_description: region of Lambda - form: form - - name: model_id - type: string - required: false - default: anthropic.claude-3-sonnet-20240229-v1:0 - label: - en_US: LLM model_id in bedrock - zh_Hans: bedrock上的大语言模型model_id - pt_BR: LLM model_id in bedrock - human_description: - en_US: LLM model_id in bedrock - zh_Hans: bedrock上的大语言模型model_id - pt_BR: LLM model_id in bedrock - llm_description: LLM model_id in bedrock - form: form - - name: dictionary_name - type: string - required: false - label: - en_US: dictionary name for term mapping - zh_Hans: 专词映射表名称 - pt_BR: dictionary name for term mapping - human_description: - en_US: dictionary name for term mapping - zh_Hans: 专词映射表名称 - pt_BR: dictionary name for term mapping - llm_description: dictionary name for term mapping - form: form - - name: request_type - type: select - required: false - label: - en_US: request type - zh_Hans: 请求类型 - pt_BR: request type - human_description: - en_US: request type - zh_Hans: 请求类型 - pt_BR: request type - default: term_mapping - options: - - value: term_mapping - label: - en_US: term_mapping - zh_Hans: 专词映射 - - value: segment_only - label: - en_US: segment_only - zh_Hans: 仅切词 - - value: translate - label: - en_US: translate - zh_Hans: 翻译内容 - form: form - - name: lambda_name - type: string - default: "translate_tool" - required: true - label: - en_US: AWS Lambda for term mapping retrieval - zh_Hans: 专词召回映射 - AWS Lambda - pt_BR: lambda name for term mapping retrieval - human_description: - en_US: AWS Lambda for term mapping retrieval - zh_Hans: 专词召回映射 - AWS Lambda - pt_BR: AWS Lambda for term mapping retrieval - llm_description: AWS Lambda for term mapping retrieval - form: form diff --git a/api/core/tools/provider/builtin/aws/tools/lambda_yaml_to_json.py b/api/core/tools/provider/builtin/aws/tools/lambda_yaml_to_json.py deleted file mode 100644 index f43f3b6fe05694..00000000000000 --- a/api/core/tools/provider/builtin/aws/tools/lambda_yaml_to_json.py +++ /dev/null @@ -1,70 +0,0 @@ -import json -import logging -from typing import Any, Union - -import boto3 - -from core.tools.entities.tool_entities import ToolInvokeMessage -from core.tools.tool.builtin_tool import BuiltinTool - -logging.basicConfig(level=logging.INFO) -logger = logging.getLogger(__name__) - -console_handler = logging.StreamHandler() -logger.addHandler(console_handler) - - -class LambdaYamlToJsonTool(BuiltinTool): - lambda_client: Any = None - - def _invoke_lambda(self, lambda_name: str, yaml_content: str) -> str: - msg = {"body": yaml_content} - logger.info(json.dumps(msg)) - - invoke_response = self.lambda_client.invoke( - FunctionName=lambda_name, InvocationType="RequestResponse", Payload=json.dumps(msg) - ) - response_body = invoke_response["Payload"] - - response_str = response_body.read().decode("utf-8") - resp_json = json.loads(response_str) - - logger.info(resp_json) - if resp_json["statusCode"] != 200: - raise Exception(f"Invalid status code: {response_str}") - - return resp_json["body"] - - def _invoke( - self, - user_id: str, - tool_parameters: dict[str, Any], - ) -> Union[ToolInvokeMessage, list[ToolInvokeMessage]]: - """ - invoke tools - """ - try: - if not self.lambda_client: - aws_region = tool_parameters.get("aws_region") # todo: move aws_region out, and update client region - if aws_region: - self.lambda_client = boto3.client("lambda", region_name=aws_region) - else: - self.lambda_client = boto3.client("lambda") - - yaml_content = tool_parameters.get("yaml_content", "") - if not yaml_content: - return self.create_text_message("Please input yaml_content") - - lambda_name = tool_parameters.get("lambda_name", "") - if not lambda_name: - return self.create_text_message("Please input lambda_name") - logger.debug(f"{json.dumps(tool_parameters, indent=2, ensure_ascii=False)}") - - result = self._invoke_lambda(lambda_name, yaml_content) - logger.debug(result) - - return self.create_text_message(result) - except Exception as e: - return self.create_text_message(f"Exception: {str(e)}") - - console_handler.flush() diff --git a/api/core/tools/provider/builtin/aws/tools/lambda_yaml_to_json.yaml b/api/core/tools/provider/builtin/aws/tools/lambda_yaml_to_json.yaml deleted file mode 100644 index 919c285348df83..00000000000000 --- a/api/core/tools/provider/builtin/aws/tools/lambda_yaml_to_json.yaml +++ /dev/null @@ -1,53 +0,0 @@ -identity: - name: lambda_yaml_to_json - author: AWS - label: - en_US: LambdaYamlToJson - zh_Hans: LambdaYamlToJson - pt_BR: LambdaYamlToJson - icon: icon.svg -description: - human: - en_US: A tool to convert yaml to json using AWS Lambda. - zh_Hans: 将 YAML 转为 JSON 的工具(通过AWS Lambda)。 - pt_BR: A tool to convert yaml to json using AWS Lambda. - llm: A tool to convert yaml to json. -parameters: - - name: yaml_content - type: string - required: true - label: - en_US: YAML content to convert for - zh_Hans: YAML 内容 - pt_BR: YAML content to convert for - human_description: - en_US: YAML content to convert for - zh_Hans: YAML 内容 - pt_BR: YAML content to convert for - llm_description: YAML content to convert for - form: llm - - name: aws_region - type: string - required: false - label: - en_US: region of lambda - zh_Hans: Lambda 所在的region - pt_BR: region of lambda - human_description: - en_US: region of lambda - zh_Hans: Lambda 所在的region - pt_BR: region of lambda - llm_description: region of lambda - form: form - - name: lambda_name - type: string - required: false - label: - en_US: name of lambda - zh_Hans: Lambda 名称 - pt_BR: name of lambda - human_description: - en_US: name of lambda - zh_Hans: Lambda 名称 - pt_BR: name of lambda - form: form diff --git a/api/core/tools/provider/builtin/aws/tools/sagemaker_text_rerank.py b/api/core/tools/provider/builtin/aws/tools/sagemaker_text_rerank.py deleted file mode 100644 index bffcd058b509bf..00000000000000 --- a/api/core/tools/provider/builtin/aws/tools/sagemaker_text_rerank.py +++ /dev/null @@ -1,81 +0,0 @@ -import json -import operator -from typing import Any, Union - -import boto3 - -from core.tools.entities.tool_entities import ToolInvokeMessage -from core.tools.tool.builtin_tool import BuiltinTool - - -class SageMakerReRankTool(BuiltinTool): - sagemaker_client: Any = None - sagemaker_endpoint: str = None - topk: int = None - - def _sagemaker_rerank(self, query_input: str, docs: list[str], rerank_endpoint: str): - inputs = [query_input] * len(docs) - response_model = self.sagemaker_client.invoke_endpoint( - EndpointName=rerank_endpoint, - Body=json.dumps({"inputs": inputs, "docs": docs}), - ContentType="application/json", - ) - json_str = response_model["Body"].read().decode("utf8") - json_obj = json.loads(json_str) - scores = json_obj["scores"] - return scores if isinstance(scores, list) else [scores] - - def _invoke( - self, - user_id: str, - tool_parameters: dict[str, Any], - ) -> Union[ToolInvokeMessage, list[ToolInvokeMessage]]: - """ - invoke tools - """ - line = 0 - try: - if not self.sagemaker_client: - aws_region = tool_parameters.get("aws_region") - if aws_region: - self.sagemaker_client = boto3.client("sagemaker-runtime", region_name=aws_region) - else: - self.sagemaker_client = boto3.client("sagemaker-runtime") - - line = 1 - if not self.sagemaker_endpoint: - self.sagemaker_endpoint = tool_parameters.get("sagemaker_endpoint") - - line = 2 - if not self.topk: - self.topk = tool_parameters.get("topk", 5) - - line = 3 - query = tool_parameters.get("query", "") - if not query: - return self.create_text_message("Please input query") - - line = 4 - candidate_texts = tool_parameters.get("candidate_texts") - if not candidate_texts: - return self.create_text_message("Please input candidate_texts") - - line = 5 - candidate_docs = json.loads(candidate_texts) - docs = [item.get("content") for item in candidate_docs] - - line = 6 - scores = self._sagemaker_rerank(query_input=query, docs=docs, rerank_endpoint=self.sagemaker_endpoint) - - line = 7 - for idx in range(len(candidate_docs)): - candidate_docs[idx]["score"] = scores[idx] - - line = 8 - sorted_candidate_docs = sorted(candidate_docs, key=operator.itemgetter("score"), reverse=True) - - line = 9 - return [self.create_json_message(res) for res in sorted_candidate_docs[: self.topk]] - - except Exception as e: - return self.create_text_message(f"Exception {str(e)}, line : {line}") diff --git a/api/core/tools/provider/builtin/aws/tools/sagemaker_text_rerank.yaml b/api/core/tools/provider/builtin/aws/tools/sagemaker_text_rerank.yaml deleted file mode 100644 index d1dfdb9f84a858..00000000000000 --- a/api/core/tools/provider/builtin/aws/tools/sagemaker_text_rerank.yaml +++ /dev/null @@ -1,82 +0,0 @@ -identity: - name: sagemaker_text_rerank - author: AWS - label: - en_US: SagemakerRerank - zh_Hans: Sagemaker重排序 - pt_BR: SagemakerRerank - icon: icon.svg -description: - human: - en_US: A tool for performing text similarity ranking. You can find deploy notebook on Github Repo - https://github.com/aws-samples/dify-aws-tool - zh_Hans: Sagemaker重排序工具, 请参考 Github Repo - https://github.com/aws-samples/dify-aws-tool上的部署脚本 - pt_BR: A tool for performing text similarity ranking. - llm: A tool for performing text similarity ranking. You can find deploy notebook on Github Repo - https://github.com/aws-samples/dify-aws-tool -parameters: - - name: sagemaker_endpoint - type: string - required: true - label: - en_US: sagemaker endpoint for reranking - zh_Hans: 重排序的SageMaker 端点 - pt_BR: sagemaker endpoint for reranking - human_description: - en_US: sagemaker endpoint for reranking - zh_Hans: 重排序的SageMaker 端点 - pt_BR: sagemaker endpoint for reranking - llm_description: sagemaker endpoint for reranking - form: form - - name: query - type: string - required: true - label: - en_US: Query string - zh_Hans: 查询语句 - pt_BR: Query string - human_description: - en_US: key words for searching - zh_Hans: 查询关键词 - pt_BR: key words for searching - llm_description: key words for searching - form: llm - - name: candidate_texts - type: string - required: true - label: - en_US: text candidates - zh_Hans: 候选文本 - pt_BR: text candidates - human_description: - en_US: searched candidates by query - zh_Hans: 查询文本搜到候选文本 - pt_BR: searched candidates by query - llm_description: searched candidates by query - form: llm - - name: topk - type: number - required: false - form: form - label: - en_US: Limit for results count - zh_Hans: 返回个数限制 - pt_BR: Limit for results count - human_description: - en_US: Limit for results count - zh_Hans: 返回个数限制 - pt_BR: Limit for results count - min: 1 - max: 10 - default: 5 - - name: aws_region - type: string - required: false - label: - en_US: region of sagemaker endpoint - zh_Hans: SageMaker 端点所在的region - pt_BR: region of sagemaker endpoint - human_description: - en_US: region of sagemaker endpoint - zh_Hans: SageMaker 端点所在的region - pt_BR: region of sagemaker endpoint - llm_description: region of sagemaker endpoint - form: form diff --git a/api/core/tools/provider/builtin/aws/tools/sagemaker_tts.py b/api/core/tools/provider/builtin/aws/tools/sagemaker_tts.py deleted file mode 100644 index bceeaab7453d94..00000000000000 --- a/api/core/tools/provider/builtin/aws/tools/sagemaker_tts.py +++ /dev/null @@ -1,101 +0,0 @@ -import json -from enum import Enum -from typing import Any, Union - -import boto3 - -from core.tools.entities.tool_entities import ToolInvokeMessage -from core.tools.tool.builtin_tool import BuiltinTool - - -class TTSModelType(Enum): - PresetVoice = "PresetVoice" - CloneVoice = "CloneVoice" - CloneVoice_CrossLingual = "CloneVoice_CrossLingual" - InstructVoice = "InstructVoice" - - -class SageMakerTTSTool(BuiltinTool): - sagemaker_client: Any = None - sagemaker_endpoint: str = None - s3_client: Any = None - comprehend_client: Any = None - - def _detect_lang_code(self, content: str, map_dict: dict = None): - map_dict = {"zh": "<|zh|>", "en": "<|en|>", "ja": "<|jp|>", "zh-TW": "<|yue|>", "ko": "<|ko|>"} - - response = self.comprehend_client.detect_dominant_language(Text=content) - language_code = response["Languages"][0]["LanguageCode"] - return map_dict.get(language_code, "<|zh|>") - - def _build_tts_payload( - self, - model_type: str, - content_text: str, - model_role: str, - prompt_text: str, - prompt_audio: str, - instruct_text: str, - ): - if model_type == TTSModelType.PresetVoice.value and model_role: - return {"tts_text": content_text, "role": model_role} - if model_type == TTSModelType.CloneVoice.value and prompt_text and prompt_audio: - return {"tts_text": content_text, "prompt_text": prompt_text, "prompt_audio": prompt_audio} - if model_type == TTSModelType.CloneVoice_CrossLingual.value and prompt_audio: - lang_tag = self._detect_lang_code(content_text) - return {"tts_text": f"{content_text}", "prompt_audio": prompt_audio, "lang_tag": lang_tag} - if model_type == TTSModelType.InstructVoice.value and instruct_text and model_role: - return {"tts_text": content_text, "role": model_role, "instruct_text": instruct_text} - - raise RuntimeError(f"Invalid params for {model_type}") - - def _invoke_sagemaker(self, payload: dict, endpoint: str): - response_model = self.sagemaker_client.invoke_endpoint( - EndpointName=endpoint, - Body=json.dumps(payload), - ContentType="application/json", - ) - json_str = response_model["Body"].read().decode("utf8") - json_obj = json.loads(json_str) - return json_obj - - def _invoke( - self, - user_id: str, - tool_parameters: dict[str, Any], - ) -> Union[ToolInvokeMessage, list[ToolInvokeMessage]]: - """ - invoke tools - """ - try: - if not self.sagemaker_client: - aws_region = tool_parameters.get("aws_region") - if aws_region: - self.sagemaker_client = boto3.client("sagemaker-runtime", region_name=aws_region) - self.s3_client = boto3.client("s3", region_name=aws_region) - self.comprehend_client = boto3.client("comprehend", region_name=aws_region) - else: - self.sagemaker_client = boto3.client("sagemaker-runtime") - self.s3_client = boto3.client("s3") - self.comprehend_client = boto3.client("comprehend") - - if not self.sagemaker_endpoint: - self.sagemaker_endpoint = tool_parameters.get("sagemaker_endpoint") - - tts_text = tool_parameters.get("tts_text") - tts_infer_type = tool_parameters.get("tts_infer_type") - - voice = tool_parameters.get("voice") - mock_voice_audio = tool_parameters.get("mock_voice_audio") - mock_voice_text = tool_parameters.get("mock_voice_text") - voice_instruct_prompt = tool_parameters.get("voice_instruct_prompt") - payload = self._build_tts_payload( - tts_infer_type, tts_text, voice, mock_voice_text, mock_voice_audio, voice_instruct_prompt - ) - - result = self._invoke_sagemaker(payload, self.sagemaker_endpoint) - - return self.create_text_message(text=result["s3_presign_url"]) - - except Exception as e: - return self.create_text_message(f"Exception {str(e)}") diff --git a/api/core/tools/provider/builtin/aws/tools/sagemaker_tts.yaml b/api/core/tools/provider/builtin/aws/tools/sagemaker_tts.yaml deleted file mode 100644 index a6a61dd4aa519a..00000000000000 --- a/api/core/tools/provider/builtin/aws/tools/sagemaker_tts.yaml +++ /dev/null @@ -1,149 +0,0 @@ -identity: - name: sagemaker_tts - author: AWS - label: - en_US: SagemakerTTS - zh_Hans: Sagemaker语音合成 - pt_BR: SagemakerTTS - icon: icon.svg -description: - human: - en_US: A tool for Speech synthesis - https://github.com/aws-samples/dify-aws-tool - zh_Hans: Sagemaker语音合成工具, 请参考 Github Repo - https://github.com/aws-samples/dify-aws-tool上的部署脚本 - pt_BR: A tool for Speech synthesis. - llm: A tool for Speech synthesis. You can find deploy notebook on Github Repo - https://github.com/aws-samples/dify-aws-tool -parameters: - - name: sagemaker_endpoint - type: string - required: true - label: - en_US: sagemaker endpoint for tts - zh_Hans: 语音生成的SageMaker端点 - pt_BR: sagemaker endpoint for tts - human_description: - en_US: sagemaker endpoint for tts - zh_Hans: 语音生成的SageMaker端点 - pt_BR: sagemaker endpoint for tts - llm_description: sagemaker endpoint for tts - form: form - - name: tts_text - type: string - required: true - label: - en_US: tts text - zh_Hans: 语音合成原文 - pt_BR: tts text - human_description: - en_US: tts text - zh_Hans: 语音合成原文 - pt_BR: tts text - llm_description: tts text - form: llm - - name: tts_infer_type - type: select - required: false - label: - en_US: tts infer type - zh_Hans: 合成方式 - pt_BR: tts infer type - human_description: - en_US: tts infer type - zh_Hans: 合成方式 - pt_BR: tts infer type - llm_description: tts infer type - options: - - value: PresetVoice - label: - en_US: preset voice - zh_Hans: 预置音色 - - value: CloneVoice - label: - en_US: clone voice - zh_Hans: 克隆音色 - - value: CloneVoice_CrossLingual - label: - en_US: clone crossLingual voice - zh_Hans: 克隆音色(跨语言) - - value: InstructVoice - label: - en_US: instruct voice - zh_Hans: 指令音色 - form: form - - name: voice - type: select - required: false - label: - en_US: preset voice - zh_Hans: 预置音色 - pt_BR: preset voice - human_description: - en_US: preset voice - zh_Hans: 预置音色 - pt_BR: preset voice - llm_description: preset voice - options: - - value: 中文男 - label: - en_US: zh-cn male - zh_Hans: 中文男 - - value: 中文女 - label: - en_US: zh-cn female - zh_Hans: 中文女 - - value: 粤语女 - label: - en_US: zh-TW female - zh_Hans: 粤语女 - form: form - - name: mock_voice_audio - type: string - required: false - label: - en_US: clone voice link - zh_Hans: 克隆音频链接 - pt_BR: clone voice link - human_description: - en_US: clone voice link - zh_Hans: 克隆音频链接 - pt_BR: clone voice link - llm_description: clone voice link - form: llm - - name: mock_voice_text - type: string - required: false - label: - en_US: text of clone voice - zh_Hans: 克隆音频对应文本 - pt_BR: text of clone voice - human_description: - en_US: text of clone voice - zh_Hans: 克隆音频对应文本 - pt_BR: text of clone voice - llm_description: text of clone voice - form: llm - - name: voice_instruct_prompt - type: string - required: false - label: - en_US: instruct prompt for voice - zh_Hans: 音色指令文本 - pt_BR: instruct prompt for voice - human_description: - en_US: instruct prompt for voice - zh_Hans: 音色指令文本 - pt_BR: instruct prompt for voice - llm_description: instruct prompt for voice - form: llm - - name: aws_region - type: string - required: false - label: - en_US: region of sagemaker endpoint - zh_Hans: SageMaker 端点所在的region - pt_BR: region of sagemaker endpoint - human_description: - en_US: region of sagemaker endpoint - zh_Hans: SageMaker 端点所在的region - pt_BR: region of sagemaker endpoint - llm_description: region of sagemaker endpoint - form: form diff --git a/api/core/tools/provider/builtin/azuredalle/__init__.py b/api/core/tools/provider/builtin/azuredalle/__init__.py deleted file mode 100644 index e69de29bb2d1d6..00000000000000 diff --git a/api/core/tools/provider/builtin/azuredalle/_assets/icon.png b/api/core/tools/provider/builtin/azuredalle/_assets/icon.png deleted file mode 100644 index 7083a3f638e9a18e2d9c09616bd1b9b5e36f53cb..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 50703 zcmeFZWmuG57dCv+2ny1Ilt@U6NT&j#(wzd5($dX{PPt#WQsIZ>ij-73>--8%VynAdRBxKaJWV=Xl zfjXG^Biwh>P$!ABL(4}8O+ugc;r)lAYeH*$N=gxs`D3?fgW91NGX6Zr;s4|KA^M#W3hR{NU+?T6FdKF_>uPEcNf- z!H-Y|T8t!N{PP};=+gebKWjmqs$&1I@BQ}g?@e`ZH($B_N1)c>FbmCo;O`fSdHnlL z3>eOTM6Vb6yXfDqWB>P&1X6W`F#aCNdFj8-`~Ul`Kng*tvJs_g7{%Dgda{L!O9;C&0YBD z-gLsuwT=IJsHOO(579~cbtOfe+ITfG=GAGuOiZ*|4cFxhl%uxsvRhnvw9>ct_`dwC z@H-6s!$Q7$)N?}6^MjLboaJ;+w2kMM;uuUv2gr0N28e}H={3=(y@V1AT0kkka@C9c zHg$D_vLHP_s6^XZ3OEY^E%S89D#{G!AM|&Fdu+r(8TkI3`D)@jab{QF(pZbWHfO@YFsZi_gT+TZ8zF#i> zAMa@lpMM`Dl=N84>&25r$m$l;_y*14Pjo+wZyaQ3k{N+VtT8w@xfTf?%Amx2;r_=5 zT`;+|p$ddwEq*9_f3L=@`<0Av1y{22c@t-mQJqx|Bx-DE?OCgtb1_4ObJjEAhkJxadB-< zLLiCRJHo^YfllaNzu((UdF8ajP<#r9x5~i&w>QszngWrOOZnu0w;1%XE9uP|p~q9h z_W)dS2aCi+)MasrRyzLH_(Rw}&`O2{7(NU56pbd?-}u=I4Pe8KVKz;t25kIsNLJ89 z@rfKD{Cp@CH2sYp3tD-RQ@62zt(+SicQE`&NgqK; z4GRD6s8W(e)P(Ry_nUZhDfbgsBl!?|JW$lSrkX8Z5Ty4J8mc@*Su4BT=wvP@lC1yp8`?P_NdU!h+$q zJ)-mzn)XGe?UM(V$VG!b<2;eUUUDWmhVa83vEo9YN6e5J{kaT5-RFuWKRJc3D8ps?i#%xBZ*NV6 zzQ+`uj`m8T0^oe1rF?+)9(xalq?!L(h)ws<*4y@ERkeMTMFjuH`40wZ#qAVU8ioxb z(OQSzJ<`Ph)NkUV?)XdV+cV6xONp;<{c`d%PIlz4b`<*0j^FXgoVYS=5RD2jx9nMe#W*@b6sy`HyoL zfXE=~15jbit2Xzql)3uX!}vPEWrkn~>DyKs_=qOC@g!dpCTsRa%d?ymP4eSl0fRg_ zpq}ss<(E!>y*Y7d0j-svabC{?JDCx~Z`iPJj1AGnf8CU1F~Sk;zXn66npB7vpvi~A^9w5zEmc;D)6Ws+(;&LwoSAkuQ(zNG?{*qt5vzY ze+@pC9ZVgPi((LH=X11H`{-J#b~>NS8Rec&WM~edu_su#Bu=h_GpH!Q@^>pR3MlP9 zWt<)I-Al{B1|R{1gZB`ond#Y!pRoLiW#VEh$bemq`U{kmbp6eRJ!EXl^+XpZvrUE1 zKvno7Lykwl(~X`rZeNez{_0N7wkrNO`So5ZwJ>TWU(cVFr3vivy&KOs=DZyDJ}yR3 zVVE)9_zA9TzSXNai5<_nm@J)qlZPdy9~A`p{ssZ-RlE}yc&QCCH8f?qe4llKT+j_B zUA!1!Gj|g|R@Bd~?!eaX()k!=U~zwwf|i=Duwmh4Rit7utx zlmo9}5Jp0D@$|~8JLl%7`|UlYSYVD%3PbmMQJ%^E9Y{CcW7zJD8tuadlCUdcs?ivr z{n^rnz{^tY-)h0R|92rRE#Mva&dX;%{Q)E59??xEOoZ@fi%PUsEwt`nzoEqvkAvgZ zmLC%+&(rx^OVISWQwghz*JFY`B zzXPOoI0eU|A|X=&+Od$r4WNPqL+?5;>fT~= z9Q(d=+mjms=H1UN`a&dxnaVtF%CQ1L#*$hGwVB$}d}lTpYZ< zieebJVFBss&VBX~e7~EJ44E>KB`om^d-`s4e4$zAD_GL;r?e=|^cX%ry70;inhKJ> zSf9+BG6j^&S#Dgs%!<{7vDoIbs4*i#ngOxUU=4srASE27p~=56i1KY9{xK)xqy&qS zS~te2N0C(JVnj1yTDFc}IXO=*wUJv={KX{}?fyg9-z4`wFqCho&)#i256=pC{Yp2@ z9Q0kJ9AxE<UE|!#H{S4Nj~ohTq+&67ZmS z1kIWo*W(vw>v-g$b#jZ{=Eh*ZV0tmsm$M4q08mn(Jc5O5BoGKgNmLFKRHD-M+Y`GLK)eGann_ZOF~Ck zQWlkn%>M<=-v0&|rREE%ET0H*-dTnBimeLxzVUtK41+9YyHWD)p%&O`$n@!y4eYIvg&PcCWjQFz|Vm8{dd zJ&M;ySW!4YGf2Xc7pf>t3((?ne7+Kx`EQXeeDFF7ffal-ncvJbVl2r*9x=uWoZ7`_ zXYkTn@;ug|m5dK^dh=VQcb+u-E~qJkDL7W`WQwy& zg4u-3DK+py81YH*DnK~?4G_VBp-9mTDkCFEHkqH=)Y>K^BF~`J%0BKfe8^R@KqQk# zzSole4RFWp=9Ok9z5gs7eowLaJt5?s@Q;%e-Vp#;!GH^P=g7chtJ?=2-$8_U0>*2^ z9%25@!e^*hwvo~ByHw6)C+r)HQVtBQ6UavcDhyeE*`N|8-WO$JNCvH6t#zvRZTg5S zISc-Vmk;gu6vw-F_vXj6n?-i8_<&PhcqTdMc0RA;nc{MY0Y<#y{*N9&w0l4Db12FL z*!~nqq9(AMx+Bx?BBw9www894G5mV9OUc{QI4ByY)#@0GDsYx4`@K2v(LK+8w4)q0 z_D^U^CGtpl`WYc+>v@9@=+e2b&jKFqCB0lRck6YB0kwl&Kg)+G;HW; zqn?@~5vl)31uSzX6-A%kz4(zyMph!od>d3VwR^ z%AADrM8psV+r+amXV$e>Cz)&N~5=elBBK{h5g+M^i=8O zA?fO7mJ@yrc+@3NbS_E!Fmj>l*>n^|PG+ttEeW*)p z@WYyXci96cD2Yw^;fuKgCay2&( zz>ns|-F%jII$NCO><>difMm5cR2g(OkOsh-;0IU|2vF8v!2LIqlQq6|k zY3jnt(@cGT1q>Z4v{sua+wvm%n`^)G-8_gFWqm&XVbO}0-v`yPr~qZI^-9;OZYj6# zs;9KmrI3#k8GU9%yb>|;$2sJmu&3AWJ<56uS~BfX`=8pS>Lcjp@DOyrUrLxV1-AYW z+9?I_K910vDXsDu5vHp2;ukF(TllRyPp<@KYyAU}G!Gs_r~96!Tj2cS*0=!>C_oHZ zU@V+3pz5HpRYJo%SIqn?*HMz@{w;h4xoRP6>3-gjeA2R?LCbDFP(sZalcXi*W4*Z!OgK-`f0DF8vQsMQQ>f`hbav(XCZeRY>pEy8!!#xFbAC{-- zZqEWgsbTnaH-)}dsR7qU6(6Z6VJ7drbO^rw0Q5@2-t9BA0qMUx4*63sFQ&>aHo6v+ zs`AjQQ|{+FC<=#4R1{MYqp67r3xI@|n@W?vkwfXzD?NHtlLGOH{{%`xZgr^G8+5-I z9ymk6Y33}@68LKj+;nI3Bd(y@NM+Eg>x_%uCN?PB99TsM)SQ3k%3llbgKG^l0Q%XM zxn~8S@2N5T(18a_OTsNwHK254I3Z*%LR@>--Dk#8{;+QShe-Lg0G2`Mpue9sdZmK? zUO^nV&Vk6{B;$>N(#7JWZoPB~w-FQ7>D{nY93XTz{}105$gU1ec+9x&;kc*TPN%b* zajh6MtgYo>zpPV|)edi3hDFhs6|~nKlqeo0!r-4S`YVSS?2}&nc*45qEw)9whdC%e@cD<6f3mDVAuJM&)*7WzX&1eyWP`IEKeh=N zDW$(pJiQwo0*2Wzf5qEZ8`1vR=&%Fd^OAxB$xI4(w-~ANu_B}&)7wdeSN{a<)Aei3 zH;!^sGMFqoy3_m7+?69BQ5lWF1eLfbk-uqaSrUVwAhl3*Kkm$cH0JiSFLTTeF65Ak zSA)bnYG8O$Y<=w3@lBbihUbGI!TPqnJnB{s|6!xQA7fq1oV9rvY{mBNpY^JoN0vVb`=yu35T5B|H_XX07 z>wnf6#0T->UvsL}D(xvill*#+58mIe%ePc)=)@iXZINyTk8*9$meqePt75m;Z5cz* z4%8TTUgM~jZKx0rA}^a}!qE05rWkz%N9M{L#t+qUMjj;eFo||_+@p2kwew;U9rbNH zEs|2DJ&broG41~0&FXbzT_$wk`wa_3IB#>*e!LehS6UT0a?cnp{%i={oCC4%=b zku{A+y3N89x<9u|(z7>>1(84wbR0yYnYCE)qTmBj^$&$1Q1k3xyDl zllGyM(V4-zY!UL~0F!jrhHQVYl1;Kt8Wo_vt!P;Vdg8QJ6W{#`+;S6f2X3MhmU@VR zp9SI{lw)wQ&ww+2DMC)+mhZ=Ri=tRLbo-)-ME*SEPvk5;q{FVXN{>o~a4J)zeA(2_rs~C2A zzoyg=6EqZ_x}1i+n$6$Vp-T}aKJZ;w*w|Y|KKLrMcHy8n;U(+bH_4YFw>m1B)H`rl z#2e?XNBiQQDP7uDKSvnJ{lfh<)leMKh$EPQeD57}za1h(ycMu7`+5#MZAH_gL=v<$ z{0WL>S+sJ?)amA&UuPJpXMcKblcl&yNdguJ952jaI>QWSMX|)RTLbsRw;V-!-z8;W z6-`b&?g@FHS}oIa*|1_qxe$MObn4~^A>93x#$9 z5++*fE^12){lPC=TgE~J6eZ5*kOS0F?q8n6qb%hxr^`~uvvMX_O8*i(rb$}1y}{E{_XY1N(-CP zxxj z7oykR!q<4do=R9~{!1`TAm94s8>InHQXuQY>HZxJxQbb85j0&hxFu(_b%gxu0)t{g zBkw$y)43?dfb(yAJYhRvEI~R+8q@WeaC~hS$Dp{lC^pQo5Pkl-D)0G8dEcDy57xD@ z4IiZ#MtAjT?QN~OoP>IU(K6$ku5MAc3Y8q%H`gE^eh%z}{_H&2yjjlwaIa^mg456J z+ng3yOK|Ql1{ae9`xhhPcC=0xcs+?m zBip`N9Qu@dU2A%EcQjz0hMF56_=tU?31&bn>$|#~2x*3NIynV+R3Fh~DwD`f#gk22 zd%QG@`2>pJMM`g&K&*vHaA>uJxRl|$@Zp7H^-i(#9Br-9Y*PD%H@2C{PK1FFb^WTr zt?raq|FE4p`MGUcG4V>4yME=(2>h@?`61_ zHjx)P--#PProt>XnKlqXp4?nzv;X#3KgS-d*XFqo+o^s`mj!rSjzDKgp_4ZoKfbp3 z(Az&SM_oz^_8A=HKY>2_fb?M=$i?wWD?%^LJ)FNO&!7k6JFyG51>@bPTspkos1*vb z9GL1G3x8$-z@B%&>gRV&_b65aRYXUry>lSaZ6@NDtd9tb(HTRE6hF&fm{elN=*!Wm zTXczehF$Y4d{Y*XJ*zEM08qdQbxFO|Zdyz2bIAy?o38{8=duNZ6~ExaohD5j;8nX3 zPd4BEDw#6rkyu>795+Q>g108yyW{*7Eq*NCt|=RYu9f$nJW++g{XON9++j6!V48V> z*X~PvdZ&qKJa;wQEG*wi(V@}Wqk$&dI{S1^XEXFC3x1i1(}nTLq7XLGSpBWFPK!3L zqEgW4%yqN3re0*Z>@tsYtZ`J3R)|ngH$Xu zO%U(a%BU3xMS#L49jnENYAn^J9jawAG#>BO(&B;zCUrZV#RFF@VT2DbApe{A9>X?> zv9|E?p1%?7`VtX>e;&PBamXaPdMN!_V?rxaXo7`gx_?@a=2zSNHP6I~LoKZCibIs| zw4YeHITVLEc*aQ->rH4$**y3tv)!mIewd-&6Bn4X7d^lo(&?Q%-yvRQ6{)alzf`|$ zkhtQWLa?ZGu;?u$4U~!Wg6JoTCh7JdD0A@JBy1EunNU^2|4<}eYn`=}U^W{k3$t)h z8s?Rmk^>`f%{_6iWo7@Rq}5+1*rKSmJUB(bcwRd$yZF|Y;CZ(-mx#n@?29JhJg?pI zAT*|<9t>KlhmT5y`n$hsOVx|GN^HD!<;;)@d09Mi2BTYQpjA5h()%^-tB+&0$yfT_ zY=!86Pu_HC+8ObfgDo*j2Mn{|z&T%Lz3lu52VXb~?XA!Ba(ere?G^@M%as z^NQ#BSkBd95sT&fI?XN<#F>px|FKSve#$!6IpCuYl@ZD<3n5zyypS2V*9XeF?2w z=jSZG-LV5_Xq4AP8;H9r?Gu;!*)GpW8ZwU@6bFQL!PWqpC7kEq zL!hVJR(IJKcq_gHi@!u)ZIWp|X7R<}Fm$(#nrkBX-amF+Pv)yF#YWd4u}1SYWWgnP zDTNQC_<@7JM}RalUmkMe+xIjrb=zJK-KHv_P&Xlr3=0xr=3!^8`AZMPqCT4gw!0J_ ztT~3I)cS+8kpmKOodstrY8TrLuzm#^5E>u*vQ44K?;*o=f!9`E#)a)^qQbS(S#^B- zi2%;TGk}}Rl|}L5LWHMkD&LvA(8OK&_$lW?OsBVhqIeZ4Zu~b8BToL)Hq~{%=Oqz0 zr{jm;`{b{R&)aId0J!TgMG_D+CH_*GYtf2kvOA~TjW@>lG?EU>@bwPLD|;edo-H+heV{L}!3ZxVf|v$+Y?a&kM3+_hWbF3s zo~VSABE=$GqU%bEqSq{r^LF!wFHrdtd-tVY@AnPh~Hyg;?1He z&WIQg{&f9|M0oi6j1Jkak->?xJ{WBgUSzO18+7=Q*L;mhGHs5RuIb$crTuNst^GGu zu@(X={C8jyHCz$etM+00HJb2e7apx_HYRD9ZJI~d8kjE!o28=wFh0W1qPc`<96aZx zab@px<()=Y-Q1sAHb7aI-sQs@Y(|*{lD2p@D@2H6hbjH0083BmQKA5|^T(K(numPxpBxFKNAZ)q% z{KSi8_6(~#A&9K(Vb$B_dt~?D%5UsZxYJyfVJFWIALEQ`%N&fuR76)jY-KzaXS6Af z!?$9cY=>H>0($$$-plqL4^NLAABjANm&$pKwByD9e5zlgcu};x-LXxPb()swl=rg| zQ618<$p%#1@L4QhJ_-EP=lD{bK6K2m9D?ML>~ycq7_R? zxkCnyBlqJaGO}MPkew~=Pap)A=7e->Mv$qWBUY=R4V7(l4bh7MGf9UfU(HL3%f+OI zh$P74h{sDedpk@9p2B3k5S%nyydV!hH74=m8T^@?=Ujsy{IUZ#UMq5ta`TMo0<0FB z<2jV^fe_8W4ODsVZmozZPr^$^h8v-<_C+pS%0v5PQ)qWPnkt-NH+*O84AOzPz@qi} zjc0SW-dCJC3=u*t;6hYAu!DH+AKo^22PUUxww4KcHDJ(aZ}b1zGT_KP$&YyZhD=H* z8{dbfzn^GAUtzzjnR3hA!_e-8_}!?$s)t*?!SbGVv+qcQ%*9&|na)Mg0eu6i*C+kN zbVa_ifzna5^)U@itS(lO``T{V=%Ezt?2y_=Arjrkva;|GU~|#;)V&TIyeL&sra$Yt zlXiXIm9X;r{gFdG3H@sHGS3)KPQPe9l3yT1)x*wbLH`)`W|bg@Mq>JqHFw65+OAz# zfQH&OO@VWfalpUsCa)h%MHYF99^w1DRvlh>+!jG-l=k%O1729wsmF(auy52(T)Ahn zgKjy#Rtv58h6;#Uqpz*Iv8rftLmdK&uPy|c*1r|9i{JL$J!E}JCKsT@sz9vB8gg+u z{6=s#vQ4|$&7o1RWf*1`{!>-_g6GJmeShN9r!#GHBwvY~fwg&20GI{p#qRD@40`Da z+}UT;fEPLpA9=TYhSD|dQhfRecF=9{IUpeMq0?~3ptXp9>uQpsy35n*PR(f)#?uZ1-02am zUVQE;`~Ft(F`c5>+2qK+nKVZyJI+J$fiZGQgc+h8KW_<1FwME{P*>sc#Bn9{ zRD#=w#?Z%p$-+f}wWOx(>Ba5wEM!B7!g|(3hgzAVAIkLVOVSp5|FMa}&x0@zC z&sCv`>@?GHYi_S9IDnwz1rY2`P_O4+>qo`gIf6=XKA=nU%mpl+ag~nVNbbNq@*`Y4 zn^{|hrB+PkNf0i{*TQgGtz~}?Yw)>!ZeAbqeiD_VyUtUxEjOp=y9Z4nLsL4M$ZUz( zbaUD1atp~m2yw#?^!7H>TaGKZbmEejmbjp;HrJ+!8*Cs?IvB9l#WK`@ceeOms|h7- zco(+iobN#8`ki}?9jyE3zKPpaokmwb?NTOzn94PB1Ot*) z^vYg1p|UKlB18)LwfFV}=WL&LQiKz!Q^d2_rocur_EN3C&$Lyr12QY2J<;*u*A9e(b^Nl zA2)Hn&?x6t zFhxptf`vq>XkP8S^dBC~mwYZ8w{?zD)sPb}SguFcp}V~78xUsCo^ zIAgl_U8nb(RO|NhmR%JLKVCWAS@io~K+bBytO|7YGQ~(Q%wZ7 zlWZXtAcQXqM$3Dz#^-x}+(W$5qn~L3DGM9g{3)WL>QgVfClUC0b8nvaENnPa68c%V z|F*Ty_VVu=MO&a!SJwvrU z7rHk*!K#seeAoW73Ov{oICvxJgp==v{Sf+GRv~NYNE>Ynk7?YO{EksW((oy|=+0G| z%4xqF{tHGKo%v!UDRKVo(2Q7GQ?#3t$9!~kau1};_0@1(Io-{*&NX+T(YfW1Q zvZLD>ksyZ}wQFilqxtKHpD_UR+xByO?|cU{-|24zIhko0ggX}Tn$}HeU~uvd@glt6 z80K36%7RCJDyXT0EyCfd5Qx>>LI?uLTcnv_9lXiT5{hotc~GtBszD4{yf+`G-2|m$ zBG{!d&Ib<@;^$i(QXFWl`YlAlg$Y2yf2!bNzqsM%QpR%x!*b`m!d(djN^I^IO|NKq zmDIuz8fY_)C0xf1!?ZFUu8P}f5m1+AK26Z%Q)@K!tVR;io!4nbj{Guw&kb^?UdQk39qp;8_X1R z>k(!Z@0p?WS|=t(mRgLx{AxdT>jm7b4xk`gR7yljv=oIdmv1e*U?e`5qE@{97@Yd; z+g4Gam6IpjbUWtY!yW$^u@W{pwEzxk*(23&@FK@^dFX`Mi^+Wiy2XbFuSG)?qp$!K zgQgL%z)$;pd4%c^95pBEVcgZDotMT0dfwD}B7KiwHT)fC&pxnkncd~2WyO_$(0f8GksXlHp^GnAi?JZ8m5a$s>Jux#+CJ>gezqPWw zgJ9&|Kaz++2TU_L%*j3uSbUE3X}bxwt?f;ABBENNf8Dt(=pLhKmcm9`6Cd~%j=i(4 z3`5SCaaQLK*f7BiXIS7l%XbU$HrLF9Yud}h1vynnJ5-W;xGx!0IDxJYun{@#2YL9hk-3;uQiizLW$id^5x!i^*J+%zHdPG2m2|T<#?z4Uh6U`ucdY z2G1j{UhQ&a{AqlzBs3lsk_gej~!-> ztz$mYDb%ulkEbyZDU$g9?i~c{%XnQtxy(DnU{ML7Ul0cXOgPV1u>5_Kx;B(yvd5RV z8j3!l0ZVAzQ=g|!(HmVCQ4O<9EJC_Qq1trP0)qKqhR_N**5Xk7QD*|V2rn4=+7$3L}BIGB{2I?24Ho{uKNxCM=nd_KM3ED>oPtRs3qC?QdP9^Nx87aV^PDxlVL{OZod; zcVvTY-m~XtzK5L zSCoHDkT4dKay6zbN%}>Ml<=^o+glhAu1jVSWuq}t%c)nDUJWYxRn=90XGg{2IymxS zm1o1;YzYUdz(Gm3kXbNBq!6lE17$y2M8LUqTIhx{Haq5FPJHN3fD*R{*hvdc5=7{;yy?d|?2cJu*-w%5e<50o@Jr7}CPbrPn zFE=Q7J8Vk&ZZ*Po_ui1X`7Ura9g1-uO`^@dk!x_TRJ2=;=-2sr(xu-D5fJHbFNt|2 zQ|Pb2CBl7JEvhS4+1VbxUn4Zz&r<;yecuE; ztEzW`aFvuO$#L^ddDl32*y~Ufjc!4j3O#}ln2w1T6yvVPd4ZAbTR6z4i7+3tux;7m zg%yIMYSywN!%x7Oh(NdQ;ak85;||t>8Kxx+2M__rYSAshPjiQ=JBh`#+hdxs`V!;f z`r^N=jPdumSi~VtgJ*{Hh7@mf>}w4VAVnXM1HG`VVjkes=llZY{9=zbgZ)fDwLauY zs^!K59CMt6)j^(bze;%-ExmO%xr2S46{8m%o_7)C?#QlG`m!lcl=hV-0jBbq3zfnn zn)@R?V~9J~(Pvu}Zvf3HPIO7f0mq6E8dk*mT%dFJ;4MIY=cqhmqObomKc(|csV~=& zyj=aEO`PE!2MH8YQt+V%yyC+H=hxN@LS7eHa>J~YczUnJrC`P{d~auS&1DUiU?XrA z?3Ut&$p-jpMuVWoE4`88Yi>Osw$;E|%}D3Lcn{!L{#758)zT03tQoJF0gJa$gjxz^ zM|}lXYSNyL2?tDeuBA?N|Ix#=BkHEK8HY2z5qhKG6I&ZK5*A^l_~Pmc#o_R z{TizwZrIf?`?y7{>l_!4;ULGEy`IaZ-mL3n%%HIwH+=EwEX*Ye#3bDq7X3HC9D*M0`3C5PRVSJV$kK*wI6q(7hMbG|kXAyz{8D7$@h0;S zFfkZK7i=^t`glrW>rbIDs0Us%%pAqt7Q!3y$uKwb)+jiQM%BW1xgM;*@Z5LYQDb|Q zaTHUfhsZ3^jx;v8mN3JxRa1c?%1KjaSSNA{>|(LCc_uSyvG50QaORYtd-LoN@So9* zePBj(1-J?qFe6MFyc4B-N<7r+$p@cZH`GWSkp)pR3ry{Ir|TCOlag@CAA&=zc$%lW zEgZO{pQrdGy-xK+o2H&^Rx6T5#M%@rk?l{yP{V+{9Ndn;H}bTzPX)Ut76Vku8GG_g zV(rMUo03H@XK_OOuakw}nOi&VYRb)f)Vh|rS$++|8-5zGF1z3&p??GnI`Rq{1*~Wh>(Vf%)$h{n$N>Y?hpKY?m3&8d8;|3 z@y6Y*>ekBGmzP z+ogWFw^0qkRLlLW#(u8T?Jow;vZl2utbTag4L*i{B82V^>}ESiHn2I=Q?eLovWGMK zo?CuL)Cr`ew?~hsvlq+j01O_8nQo0Sv;b&uU>@Vql-P6P?a4$;SOb{y>)BrU% zJF8@d*80ZJ4J<fc7>-IXGYth4Wg$>K0_E0=fH=KW|?x zGbb}SN;K3+jk45lO!!Jr&qGEo4guDPq4dO!h{=;5kKq*89AIOdLIvWZ8yk3ZJ@T8a z{Pu%hKc^U6Q_lHe$Gl%S%h(sW;zkGwF%h$f#fb3B8(t=3vB6?ujv4yeZW>C;9LE)1 z%`{8D{C15`*$%=31=$M@$m~)xQ3XCY{-y_3oVUICv@oTsq0yHIw~Sw3TzDxgIPd5!Q#b3y!iW;;*wKq7kuPOkb>RG^LTL z_sR>NX`Gs1eJG?PsizhoL&XpcB5rULwq5OK0a~i|Lq#wj&mqAsCO|pKPgUft%WsYp|D$SKUM>C`gYDN`RT844^Z4QUdujAz#5&J6w7}Xy1B-^ zx%K)cvPi7~&f0%=s1x>#A+*R|c0xjOF#^H<+1Vnd_P&fW4QSH8F=O%S_lWbE41${* z;?*p#?jknMIWM*zXlwSZJNW8F9uX~$r^j^&oIk986s%D>qmx^WHEF8;PDnlBElPP0 zFXF1O5ZWec^a`?Qdc#bw>sX|`tmpklLpmgL%^h`D>_NJN?^j*;DvzDzxoBr?g`zE; zu@~Mo2}?tQav_<^I`?L>r0{?1E#5&s9oB_`o`U_^f^>@=CWH(7TiTRViT%f} z$!qy(=6E5^3F7HSV_Agw)e*>aqc!jsSg9N&;f`SR>~+~cvjDL)ECqS;w9ZGP-RV!- zQ<26hfx+JtEC;-FNE)c;#G&U;>sdr_()dzh@#2q!gOD^2zZoun?|xF+moQQVlz-qf zXtfrxpLzvy2tM(jJ-c6p4QxEd<{wrg2jIfN!x=57Cu!7F7N2;Py0{`$ z%@5f0l-EWHs{1ePKKPJmz#*-i8qof9%xgcLssE*Tq7|RB-sllDgFFV6@URbHDy6tjrd1qg=o=5!X|Dfm3N#=m3aTzwHNL9?zU?*w`8lacC&ea zu1I&(S*p)0C9e5}7^-?`KrQC z!paIiGb5jSd~mK01CyRCidJaUJXAKJ`#FiIoZ)2KE5Y^7i#XL{ym&Evt~1w$Nw}31 zG7+FvQ+S8OCsdXrhYWhPT#z<4~#BC)JSQS9q1M_#8eU5TgFQQwfr z9@o_CN7U>>dBAg&`*$Ce2SBEhs*RJGzuB60S24wDLj>M%BYJIA5>K%77K@ zapN7(9V%x06P3^-!`{T#{N+1cy>$FMld0 z9nh&ur?DXqmrB4+TAukHw zQblxxDE6OhJZHSsUaR|%`2&U4d8v+u@h3f}(3_qL0niqBLSmB+g4$~dYP&rb*l&y! zR}Lz#zI0a;wAHt(P3o)r#E#Q@&WScETcW0k)hlD;Co5r~bJ|nl$08n~iVqxoQv3nJ zGMZcF`5N%xe!sUn!2mELVe&?5qQE~cq1?OU$UIu>tarG_hm(-zmYLf{r;;f{sREqP z*ZAb&*Vx$7w#Y#n6n6Z5kPwy?-X(R!{GPt{yv8sUY#{hVR7{UCDSH6s(n!+cu(ClY zXSLUut$=Em!!MpoxSg{eKj~m%xOsZwh84fG=t4%)qYRxCny=#S5T%imCmLuV~et?ub+LnTWE6I~5Lsotk0nF)qAy z9cB^)tsBMej7J<~7dA}(zJ+g5sY*?$41!dV!9?q#jChn$ss8OzNKNdHRSKxyIdOLk1_Yhg{gUudwahz}!v_ZU@gHr0mz#-Yr zq2n<11^Z`k0UYhLAXM`clZa&z6S*ay3!-7{E7D*%IPmx=iw*Lt1sf7sT6ns~7^Ku$ zt1}xxRF?Q7I+HCo+1}mMLlI|TObgEX#O4My2g4gbfnz#*QiTf+up#NmCP!^5e(qsY zL#;+}dwwWdU`{CxXMaUq0HpUWFyQ<7Z&7vfFnt_IpU!cnhQSG_{c3fidyZB64C<{(SyLx0wcUfWUWO5ERf#lhwyWI5S zs;dt{max)MWH8*hmimI~$KmsmbN!C{Ddv|u^awnWGu(?4C6;msBq^c^ra-$|wtUD- zPz>boNL0!Kk@dC9f2D^+!gmqriuV9;|$T98)TEUOo&) zhF`nHA9B2C{On_wrTL=eTf+$6kR3NH*XCLy%!h~wps$=QYjMS%+obZ~LN=%zC**Kk z$+X4-PVdE}yP*h1L)CIndj>TuU5^V-@5&ukSTY>mY^MBKrLovz5@f595$PkJ@xx!W zdTyYRk*o}R*a38v&a2r^m+OR=QL^j;S#@5PpHKK+K#^M;Tv?}MIr<*^Pqj^?VJsf) zC*=aoJzAJ(aY@Y$RP82X-*KweSaO78;tQ_K&5*fU6JU&~ZdLC|kkALQ5j)P@jheRi z29>8IXnsN2Po<|H^nR9sA?-d~9v;25RfINF;171kd$vFCxa-_6ff^j6>13aGxsC$x zpj?X*)n)3ehf~frqkhCMpc1^g&Ukd9iNuXGyV=R5O(LsQH=t?D%cI2Lz!!$HfuGZX zW4qAmEwru8;#axYYNVijMFdX5*e22K6F_MBN+#W?{b5r(h|~2qzoWe1StDv@2c5bq z1PRvW6yzbC;fs}*NmT5b2e*`7FU$oMKz_LVM@7_x6v(~2A>^7AwUaXu(pnCE4_dD? z8%AD6H5|%r51nbdZ%!r#cJ>7jqPc=Jel{^kS&ytVy|+aX@flYta;gO94+4jo!H!(_ zHNiTjYK1FONDg&dzoMAXnF+S@8R2suzBJWCrygJsdiEr-YK3WbejGCO+XVrGZ~&_0 zD*XmUs%1N-+ig45)=Jod?U%7F*%Q{sr8@OiFU_9Q1k`QR|C)QWsI)&w^vELxFhx$e zoR+-nw4ujoLQS74D4^o*Z1F7%8MRc5A^)pJfxYnY(o^duus_mNmGH6k-I+7ftkgRS zoM1#)Y;#ta9_l_@I3>rQ$*{+aBNu*@Z zCJFkNPkqD3)9B-^cd1}pO~3^o+nu*OO76u7a;&mp+%n3;gOH}VXth}seX6}L@h}?< zO*B2I2V21=Sxo0?s_?~}c)dk+N!8_v{AHP6{3UmiTrZ+?v*N@a6XZgl_%^8mgSKZK zyjcUR#;zr-A*gt+CoWA66vfhEsaJhlpxXkcPoK>%Y%=-BhJAjD`N6poYznCp-VGUm z-%&N<;h)alYEigyodeXLDx!?bt^buaZ>}O^yEmn3lIC$RiP&I@9a915tt`*xo-Qhl zZ{&gxtS8Mai$?F?iui5|{I<=Lm0JWvKo(|GA&5;vPJZfvoXBI=YenD9Ly}FW0NR=R z*CLJ>LOMmMzndr$I#gEPN6`}CUkCzevy%fnWDsob741^uj~__=1&r!_ z`pqP3)6y0YND93Oo?uVtR2D1HB}X&N46>9@42ZRpWUkR9ntl%w;|_3lE*A&{W$#!( zG0$Ee3vXHiz>5NBm1VVHvYN3E)`>l5-hB26o{*@BsKNps-uypIU4>tiQL|neq(LN9 zx*HK`77?VoyIbk5ML}9xNl1VOqRX?AH?cJI4>-~HYD5A1u+nR8~IdFGi} z2qWma!Gr^Z>)j;z>Nv?P%B{_1e?v*$ta54(k?-y!$`#C0yJ3>4%kJwrnVhca0ptHLxX6~|Io6!RZM7RLILundItk@mhW{d7Z##U@ z%<+;3#BTI-2DNH`jkD;`8mm@dghF^!vwFYZ)qR3!oPJjU9uTAb<@`}ZN9o59E3Arg zUwure8K6~5(INQeY|Gk%G!N3noAk?7xIw+ z#A;q#bzP1PLeA$vK@@z9cIoJkq9Ed@8YB%9tE?D#h2&U;Koi6QR&B)dzT251w$@2a zh|9R!=d(StL#C33qmd1v(bkfp9>Qqrc7fd09KT&kt{|S;uQw$$d2c8zl;`LHCY-E^ zPJR)^LUd4<-7tgZ+1U*TgCgvZz?hoLLkgFDz{@#w0XN*tGPiB1N(N5;;gM}6AjD38 zM97M5&WN)%6Z`rhh`a5hhXq>y;A~N!!B-P@Eqse-9+QK5!wArOy{KY=@;~6HoY#1@@p0R3~wN> z#h^FI>7D}Apu;CFpB44p3&l@*r(A3W&}UfApIBdy3Oe|k>rc4GUUSVm-1Q4#@YH?v zulAEMbzco$vMq4S4mZH+`CIkfW@@^bHB-j@5j=+z2 z5fATtzAaZ`>d)XktL_1Ro>hFlP|S$z2=p>aJuPHre4_r z;4k&)Ks>8Hyh^&_nWeytpjJF%gF6EJP8h*~$x9~az|lu*eLN_{*>$GQ?cD{I7Bdjs zVSib_d+kp5i%2R1>Su;$XIK?ytq}{^Z!7;v3EEEj=Y$LWjO;YZR+hynCJTO&j8}2b zp$T;dC>d%7{@@uDgB+0oSk+%ig_2+z9}!Yg+(ub!)E<=*xUK@U@2+SAW>~gi67Zgr#`@{>&@+cJW7(^dvvlI z7)=1u;O9pekzY1`&p{((N4Ta*F&m*1!@b;i8)$DS-z|uvWrZ0u-PHmwiCpSz{Pj4I zP=+Y~Fyf_g?`;>=9p&BYg=rv?Tpp%@7AJYo^gbA4R6c0JGNK3;8ZCNadq?+V875he zMSdnZXBcCnFKmY=Ry1oielo%^BVV%7DLpvGxH{gUX(}+R*;T9q1dv-fD`02sL^+th zSrOlUkz8=6yv1<4%$)SYlAr+uy_x!{@;o=Oy#e?91KF1(5SuG~93fWXL$jBMEkBHe zitKkxUj74-YF%+g1a;!BvOop;uDy0*x9-auAbm3A7W*Mb=`Mt|(o}G?UFS6OVP*{8 z`PA09X=wG4uwyg6gA6CT{)^EuyLrB>dqT(4;wnzh0FTU#H-O zJIWLR24kfV-x@vx@n-zrrc!1Bad~PYFlOexPVx1M6ET#H9M}2L-D><&TU) z3hCM1CLRU<%g&Wm&d4^p2Vy&Th6BHWxh@~_UM$F|!Y5+7@*RU008$uWtby?~wJnT` zRvs*ja|e?6baU2X!SIwF5WoN5L>3piQUf$@;{8sfQ~2O8AqoYDzT_?j?)>!#$nlUS1H?4uCEtXqR9E9Qv?JhfC_n2j zR9+N~nk)eoyDvJpD>w3Fm~m^JfRRroy-yvY0CiCn_P=@tLL#X8HhPk*e$VUcU#q}a z@K9^p;7Pwp%@IXk$AN{~^2TUI>=Zr7kct-fyfw-aDU^8a*(s_+y>n!czkngBIy%;k zb1CmduK(jqV+M?XYq-tUyM~*7e6$XU8isFd1j(Cao}jx-2+#=}5`F-e zX8ioG5++EnJz!N22;K8%5%s7fWf8}j4^PD?Bg0cU7bnpMwYi;tTEk*6-*7`+d+LDB zVci({Vk(sqsw8jw?n)cyioKX9xpY&Kmv?yZhk_mf^V>@v0{rHs@QbqK94d#{>rP$h zM&F(IB^~kQK~5DXoQ(FY``v_(q1Quw?X_(cclM;=wBPEEWJmx{4FK@vG0gMy!?isx ziT>wV@r@H}1!M;@W}{ACA+-sYw*c0Y)rdAJdzjrQ(a8cd;grfdc0RWCMIMkgHlL2% zr{gRNE*~D)e7bH<81^X^2IORhh)Kn_C{+*8S*eU(&Wt|Xh$g#~C$@2gCUN#}$c6J` zV!F5;%@3!iaS1Vmz(7PHpnmAZ{|vv{jeA=_ zku{-d-}FPj)`;PlW#-s9>ayry6h#7CJ)r;^f#V+{;t_WW`xgsfH`3g&&IEfHMHFRh z`<%Q+?b`ABi_y3gL=evZ|u+>gEs9`#Xm0z8U?V9JSSn%|B zKEmd@o0Kd30<7BPZXM;*J$E*i{Xa3OPmPtdYdyyKxT{(|1zoIr!p=dKO#gr}W@vG}6WseRi(gZi@yo zG8^baN&$K4_^tiK(TQ~$*9Z-y(>8T{&=!W&Tn-7&GheTFDV!RAsssXtRM6IW-G+XD zbB>50JUCOIN+OOvX&huIetv;@y43?KVE+tndbJIfoGfZx%Dx~gq8Jl=f$BYnE&)n@BX{&%k; z*6&4Ki);3ZZ_t=q>f(=R?L4Kj1{|c75C3M+nj)o8oDMS?sjd_sz&?wYot%k3ALA{~ zTssTM==#oeasMHvnF8qff2L*tWRurS_2wwVlJ>H~4g=(+L@(RhqEwIl$TR&nK+Did z{%c1)-$Zp`KJBw#(XVg4sAd;#ye@|wb3cwUjdBB;o$7c+jqfWHxM4Q711s^E}G(){a=I#uQ0V!B8CSm_HkkblyqoVvt-h%xIgjip5;<5jlQ1mf`&Ouyy z*jl0}>Y{EWNv-K3I0Q$2(bka+GNr+pJWgA3i*SP?-#I*ZI8~*ijT*u^59E`Vx}ldT zAe*XCpm!aO6cp7=jVgu6o1>yu*{L0LuAoHlplYBqMHv`KqL;UCnT)E2elKWu4eD3t z_*wVoAwnvuSDLcODuGrg#A)QL02?&*mRn+OrDP9vP$L>3{*iLuh9VHXZ+8?bg519n z%ESEf;sG$MLXvkCyP1x5EXeeZT=4J&!>?!nl~qyDw!rz!^;~~s?*xXaZKzj(M(=(z z_nqo)1Cp0!cJ=E(8kv;v&NN<*1b=v;f#BnDIT5KhBWL2zIu70nL47SH)0=&Lf1e|M z$}U;>Vv?Xfhv7@_Ye{s8`<*V$c}+=&um6D@O~^o-adqC}t1`d%v39tGq33H-xw z#M?d)`-~4tUSQ?wUD16f{P~=k;VTuK|eIQDP2n5O`eqTJoIcLNp=T{sL&baV>{Jv&&(e&9roxViKNVZtj zl9?ahHj9^`bq(><{Cl>6&rl%Z=6E!+`C52r1z(=WwSXxEXkNttEMi^M#;$KiC6s?W zB9MLOm3324^;nPSg`+@_u0OzhtmtB*@#b`X+gu%pBJHh(2mq4UfNHwsc*nc5)~83` zd^}k8Unx_*J#j6vui~`D#a#{p!2ToRpF;y%A7-r_~SwjqXo{L`P(1Zz0)ZEB~ z>YGL72-f`)czNy_StVZ2y6>rTdI5=YH<@}0WsiQ6KII?sSYKNymOfohi=%=+tC2cg zm!Ag76%?Tfp0<+hG}CpZ?YZXfx$XpPPdWwi0jX%=(LFOu`=uCfb3)>&%ST-90^qBR z*Tqmt)G8VY?S!}x>>^PQMR!KfL1WPP+u;AS#KvWwef1yxkv@MmT7>-+vyG{b?mqBU zR{2mYP=PIv_6i_nvXIh`qEGPWQ5oHTJKCA>&8({ZLnDh0`MF(SHLZEH#~Wd&eD-qB z+Y=4)H-+f^5j!i!K(K#+=XwhaVbcX3MBvR?t~!Dj zuY!#gGv3EAvZ5`;lIW3jdsiYYmG0^4A+;KKEV?QG{q>C=%&yXM7eoA@Vl z>!sQ%%Z+yn{f_Ssn(>qCS7iyyOW1+eu~gw~Ohn0- z_y5ku5to6TBGJL0waSL*)Iqh_zhxrce!6n_p29d0Yxq(LJ=LA|=(0G_7v3xcrrQgC zlW9e3O?R2A?=S*^(jR=K+}Hs_5_Wxbou(^G;t1Ss^YT{(RJs<3`#y`*&n7IuBHdX- z63uQ-Xe{rn+%<}0t-!lnN$4~98=CKZ!XHQROBvI!{Pi&&^=(UDcRn*Nt2!F*b^eLT zdvCN6Ku=5PU++Zl%Rd?I(VxK{Q(IK$4*MF<+GU0N$7^Ui3IRmqI56OyKw|Qt#QDk+zXPVri^?&$h)VV9k&C_ys~f z4c0+j_IEMjxuYbw*SBU)dTAet4KQwXI#sJOK-)Tj&D4q5v-qon1f=#Yl1i#_)@5cY zvhscVj(G6P>d8Y7MtZtCCT1x7!`6!=D>|otUit~6f=@i44Y073N9yh;m_WL_kzWy%QLQY2as08i6lk;3Z z3UGZ!LJ>O>0!_=~TWWh38~L1NL!CB#{KJFz(5y)P@IHk5r5u!28|c^|H+Or!APDV%~=dL(%fgO^u_qofJhQ> zn{q;q=%vX{l5!I)M8B-Quu2`lUkgco|NHIg)UdgFcyQyDuIy>3xX{pGg3jM6l5qmR ze|OFA<7E~RNA7@*hAQHEoDctNY8y$_jo+y=WwAGB=E(@V0Q2FG7w?@0X-P7rMkm8E z!p)VlRcBU$z+5@Z^w`Q)gLy-^u55vfXmd#-P+=KFx?X?BCxYV}=ZmMJ@|;=X2U|al ztIVj9I~Z=r8N_btUfz(C(u&?W{lb-=K3tQc%lOeLBO$mpQ6}(wH(H@5c^>hD9Hhzv zfeR_4b8ztWI-9MmGTKk?aXkHY6Ef2Wm3XbIy`8r-~6)5HY{iGUhblMow-im$y$MN+yKtcACEh;Zl82tfN`<|}Qb>TvHN zx{%=R@!A0L(5S;caMBJ%^XLCt&`#e2#DU6j)J*(e#=baaCR*u>adXu(jdUAP@bB}0mdh@UCN~~|VqU&PHj)FNn zv`8CB>vu4-Gmo6F%&%Jr$*UpZc+#*_w5FYei{@zc2Ly)~ZUghK$YvAVuA*Yq6fjh< zX24}O6Er&Sx5neBWr3M<3+{C`%vB_g_q#ZgUGk|LeEPe?(d8N7)F5v=x(>HK_(kdK zw~>mM4<<-XwPJc?ZVQ+k;8_M;hHMCcI+)#N{(jy?a8`!HB5O9Qibm*O@B9+J0ArJQ znCbG7+r0g%jg0i;84%p-CLI^a9V}QT^FVjwu(J@I-Xeb0_&ahkM|6Y1%-T;5Z_L6l z^pnn&01n3JS?!&qN!QQqPOFO;@$l>D>s#AD+v@N)TPv;yYM=G*rlE)QB953SB5|E`ztx_XN#Xc-}&jv$5_Mc7Y0`#zkK@ps*G%|<=q5|beuXt)dV#i zc=4H=e#6$n#}3RO%9R%haLX;YmXijeT20!|=$2>0z4r-2AiXKdZq)zm)_>M((w|^M z-N8-u^=lT~T`#vvZTHT`On-W8;&SpKpPlGL1ap3vlVZL6O~3j;YOsHhOyB-U$0vVZ zld@-sYHb#&J7NXDOEiUR@>p{zjp!#B=o8Vo! z-K9v5O_<-*R(p~c#^UQaialjLmfSRpXuk&{u%3i{W{bX*qLI8P|47%rpTK6^4%0LC z4cqTUJyoX78FKwkRRY|41rh4mIGa*8YR!r7VFU7@YRu5T+$Z6pdRq@FWAO~_KYAoV zTHboh=Lg~qhkanIMtFk;Lnc~`F0mOAdpOdttylE^CQmma>E8byp$jcFmeLp_LRNX@ zrrT{dZ==n~CG2=>{#JYuC2zWtlJPx8BHykZEE`0k8rf4dez(19u@HULJ!O3o=2#9c ziNi%06o~xFqW}Ceo$=SU?GEqb-U8l}#Z1#wMX49qbe6U%bOeu{h^^Y(^b7`SVEy#T zon=Dqdvvdd-66EJNlyc{FMtq2iwYrQZ3e`#_FK{NI;qHVTZXkW^`&pk*OCHIJ|YhB zTW(!%qbx$7jvO=(Gp|zp+y244>&#{xy3oDivJ{ZSiy-<0vJjJ^V?Td_?P=;9B~>2O zEtD{Lc5@J&ql?exN0r@^EaFUM;CA|^U|U^wbiz1bn>gkDpdy2iF-9Hd!6Hp_;VtT@ zXe()6+0)MWOR%tkbW)H7OBQl5c^DlDL1SA|pKwn4xYQ~5Ksd3o^vDmf6C1*M2-zq) zOT{inEwa$Q^KfG2O$A+Y@MAmU>j=Brml95AyoJPQ_-y_(+zHwI^K>>xk~1Zkpbpl( zN$YmlKsq{WFQkeWcWYqJF$VDfI+}_?V^Nd?wi2wq^g7ANX&cKAbBnLBPHvkHGYtJV zR3C{LW@EkRq()25gDhFW^b;`Qm+x-fL&Z9-n>=#uTRFd;{USG_>`c>mMHDsb?a!sZ24CV&e~4Re^L%7R)+ zWLyp9M0&~LvTPwuldENV>oD=q1FKJzvNw7cx!u~>3Asb&3tn!xR=}e(uFg7z$U)pi zhxh563j<%1MO`A(R!jgZR(Zg@PTx&C_$jbVc9;D+Bde0Wa!oV!- z`S1ja!smYtA)bSIQKIS2DWM$lH6n0Ve%tZ8G6z0<(Im{nC(|8KPX9cic={9E+M*lk zm4}WBSNpQk&a%?0`|rZfhzVRsp9qu;oCuMlRsGHfJn*9}RJ`5p^XEq}u8J;=lk6`O zH8*S_MZg9e&|1ytwu%~Yqyc;kDeI0s7`*<5>}K-}jaDE0Kx3qnwI~Fd|12$4VmV=- zvH0r~lB%QA4)rS>|8)lG4aY(-DV5Nd+lj6dr7&fjy5onBdGRFrkw!qcH~i6b1s-ij zeSQP=`JL!yf{zXTv$C`lMaf_r8!Vsx$eE_&TwT0vVm5f??PvRE09FXtUZsJtS-qZu z*le}$lyd(&?Z=KC^nA~$%!La0NENcTaB9`fXIs5UUiTg=-;#&6tgn)ufn@FtzVECc z8O-pZfU#)+e~bT-M=@b+PuJ?fP0uWZd^arp<*z)c>IZ7qVX#$gDyOLhL|Q7uJ2;fs zn>@7OB&^h6^bBqgOa%GpRq^e|zm0~dL9wycUp;?~8HHED$Sta%NFKJdSml*Q)SuH_ z07hdsxF{DxBdKQn9Eb{`y#>0~4x{Am)Jh)c)wbx$P7iKgsnu2ow5+}}whAhxGuh3% zdT;GR=;H~$Y)^(kKDI<8GBzD`%CWM&32Qex8I>}SH|gk_a?Cc`K^B`G?fdo z4u-txU@XN@5!~W&oOt|NPPf*YM&kJzeDvL3X`7vizcRwW9h$szNB)&+gkV79?Fk)@V?~=5;ZG>@?-rHq@~q_h&R6%wN`+%r^ZsNN^6e#?W$lo*)Uyyn(!t& z6NHL%R_gaBi0bIY+v1gXr!T$IIXzP`HW~<5M*LpPvD2W*!ij*dr4(Vk`!JigsVLo> zI|7(5$!ylS5j%~LZsj1@^B74|^#KJ{Kl)pJKgML(gC**H0>^nF)jZ~?pZ@qS|p|KGm2(KH-=L7hc-Wk%Y-kCLf#!LLMa0;K54LK~{WQD}BPEkQi0QS>1kZ+Xz^EhtmeHwu$pXE5u{ z7|KxGms)q>Z(h(;8VrQ_UGk|kVW3b#0wdL@q@VdVLGz2yhSA9aj2J&ooPDfvfJ>)J1x&?eunQJ&Q`PW0nn69FD zKf@2dW-+g+IA8vYa{oTQ8!gGLR1dgIoey)S{?4d4=I81RC+$jFO_}xtsVVJ-)$gXI zCW7NK?Jq_k!}`b>14ZcTjW@6{VClQ}6tlYCc24s1_OIUH*J0_^VyZ_hX(*abiUsJt zXRCW=?7(wx<#+ef)m|*EJvl=@@F3pnDm^<_bO(vYHN*kTMQPQe-Jh!Br?SsgLN2~!e@u7WzMqzFF-(i)1us`L*-F`d7 zfgZx{h6Fx!TDlAS_-r3>FxajV(6TS?0D~?%t8CrT(+$73oIrysnOu z>np#zlnCc@oSL7!^j>GAA5GDFisL%UzT>Qo96NkRT^a(^rIFb5cR!9>L_N0vKT4EM z`x~k6m7y|Xb^^4-|sOo|nQcNAIS)PLa7+x-~-N9P2mY&uYTrlmO5^U27KA z`cBgND0VGJTxz%X4_O86fmsNLOThf9VX4yeS2+;3naNWt0nlbYTGuAp?*=Z=(IN)L zBM+*}2F-}Raka+nysP>k@{_R=Mz%jUGF*G@Vw|tEZs6NvL{oo>8RgENH{uehtF>zy zlSE05n%w;#;6-*zZOPYfC+OpB8gj-MDI)l3c?h%=oW#a@I$7C=6uSVYp~?xN%3s}A z2Ssf|7$q{4*s06Bci!e)QTnL9Aa*s6YOnfqDz;6uJ=jS;&dE1!T%v_f_gV%W^ePg) z%WC~6Tcp&@h;xS;q6H`Z1x4)bGoj1b!5!lv+SiUV5npa^T2l_OHY6&i@4MUb+l7FI znqO>_d!u*R7k+j|x$B;P8P7GCF~LXq{1eUxckzkVTA?j<*|9`kKY^V0j`!t*DQK3q zT@>T~tSOya(TH^~UH$Ba*^d(B8)*Ct75pqnNcsD(!XbGJ| zn98{q&fYgYQEmBwSKzmOI27(}MmNQ`25Id@ZuToPeyH?SRECSA?SYisdz`zf6PukSV(%4QP5q@P)@I9cNFr#nPF*dfh=^l{d% z&$?=NmjE~3c9-WPb1>Gq76ucU%mF{Sg^0{5I_{4>VWkI0i0aW&9cZ+P4HmH*JFqCj zYPl%``6=d3NKQJxxn#NIk1rVAjPM^tF^RnDHp5u60|9nm! zDu9@EA_M7CjaLGk3a~RJ)XwbMM0g8y^5B+FGA4RTG(&gF9&5B&VP?>+MWtg!6rg#(>y0)v8z0lXb|3X1HazIn(kzoj=`Zd`T`;eScK9Np&t0oI5L5<5_QN z89`i^I+uu>H*su@|A^p-)m*GleZF6zDmh0Ht#DDUqwq7+?a+*hSK2nV@knwTR9yR` zQsR(VTnxp#8M$IsQ3d3q=`kB&${QrrOd**WIgkZ6KNPN0v9_X%Q%nKpYM$$66=THgu83ZJBf%YSBW>tW-pI`HxTU0wDMTi1;p4KM{OLMyp*=GIaA7VOz-od3F+MV=0$hmGin~`JA3@ zx=5W(hZe!|QjwNI#DEz+ubl|J1>%i?r!M7`2;X#t7_f{X{#J^7F)j5WWB$q2C$J50 zq}*?hEKfgsJ{jqgz+OR&GfIpocPN)U0=FG>K7Sb6PPbMIxaky8zM1=1Ny1y3R$9-B z+C(a~6_x9o^4qLa#1zWYZ!S`~hG63(1-&`oOyAiB8n}fm7SW<{*$83JWY5J%^b9Ma zeprhUp0rA{3p)QdWKLFLs4i2mDbg8rVTA;N3+sp>QDNPwhN!m3JF?kAvRUB0^YT|H z9|2(3PYMQ$ySR|T-dwaT1;WFxMmbLmTD-^4hcTKq%i3?#>=t)TPQuxPpbxq+W<_rA2+HGy%#6RTvqdJyao zD=Qpo&4%Os+acTLYiKKi85Cc0KK!F)TRq%kpmog#4j$Q%;GP;I3;vRZ6ibGi&HduVxdIp8Q2_Y4;E zZYFhBSX*pE@IXJF+91EKKhFvlxvFpB-p-14J)Bgk27=vJoJ(Qze}Y#PT^|I|ekGUv z!2}|MUdf{K%Xekk{y&Sho%^obL}rX=Bubr@@fBb4{z>FYC=E!X`}XV;ykYYgE`8Il z*si3FdFe-O`HF(@AZxAUshj9>uE0@jZm*YShR5FGW{c@VD_RV7y^wjKVi_K!_)rG? zttumg5n+Vny_UK%t#+G>+*k5-yIE7NWd0&FM#p`LQFT%^bOyx1TuBZ#2xCa8KY?4{ zLYdEe5zecZ6ad9VOgO0uN+uWDA|64H1g-&wk ziz>H0lG7JSuUZP7`e7j%%hcwvdDL@qrOi2dT*wpF&nAd1O;84?D=AcG9WtGOA@$hF z7+BqjVTGU+9kblp7sEZ`;|+Pun;t(kB@lBk&y+W1NW%9O{=un}&TY?E=kf7!c6N?~ zd2_TPDt|9=R6y0TU#JdVuLz-#U{(@<1nU`cqnz^vFhU^B_PX7_d9{A!pl8MiE64GX zE@i>k{A$tj4TtGo$NzqicYSr=U%E}>i;R52&H$m(HE+lNzN^cIreC|F-3KkBqwBVT z?&1y%&xk;Le~ND^@4+H|3=5Hkc1aHdY5eN9=un^-xj!vTT#%!3JBQbQ^pKAJvT^z| zNOpJYU=S#wzP>ggt}4YL**=&o6aq-tf*1eN+lVt+lYSpq_zwbbWbz*ffN0AphVt@g-+zKM~$%8s!@khZF4f+Ap6yFHqxH$BM6@l20~MtTx^ z9aJtVZ~BiA{)vZR^)z>p7^Gh(EP#mibb;oq%)cY8aqi2PDLg z-F^Y^8kbV^=Gfd@DiLr=P>)w{(Lr=ry-@D0-oTv@9ZEUnJn$jawMQ>=NBa&_Xc7|# z7lew>i7d3>h+OX z1MS7Xj9KU-<-yS!O?gA=@t_Iac1tfPam~?C@tB!RlBaxy_oqwSb++G+EGiL7+NS$Bv*kEV?jd{uWV^dS*kg9iCvrt z#%C!eMm@aIH8en=SUE-DKG?q{5}kIU zO;r?4k)}SA>WN@5}v2k@Vd0jU5EXL+xtm*~g zPPFf_0_M}_QX^PN4D`Z5CH=S-JG;hRKLaVmHuTdm5zN=(lM*1X=Lx``IR0amvLY!D z#zYEDDc58sEkG%ua@{$F3yNJy^OCkYpE||x25F>$A$OsD7Pz7kV2T0_Hc*q+JEv87 z)c3_>owpOzG0&?t@LL+jR*Sn|TwAAL(iv-Hrxf;X1Akq*wYMidW(9D0{A0e=sRKz{ zd55-!Xjg*#eRn6iwT~4Z4@_SZ6sw)02eQ_MD$KRKKs&9$Xc0CK1*-ZaEmy|vc+#?m zZO;3jKSYs>3s45Wc>#vg@p? zz4sPAx4*{XwI z(^ZQ&W%MsYJY<-8k?-X}y-!+7A(-@#ZPQ8IEZz$)kyKb{B=R6j|WfbrO*sxMFva1=sqKerjy-Ws_o4byed9!o@xx1ZLdSB=M} zHp8L8AT2Zz3&6SdG#hI0Kt!nB4zeMobnGbIMJcbphLU-2weUqg7w=u0w-K3WmC%wG%AqrOu!O#6v5S-guj*I)@1{s&!~It0e2=7U~W|&Z$>xA>2Jj|{+ew(4Dz@r2qmvUFKjN_k!ta5 z&0RB9l)EV(!Q!i;#ru*4XV~R)7xp9ibSm*ZORC!Bg1nSw?WztmcTcP|*y(W>xGx64 zLoB+|a)l6~vS3T+yiSy>nNFF(Bq}SWR(xlqwgh?o{apr&tTO&H0IR&* z-TYko)V3%Y**68sae~a}zv_MmC87t*oE?K5^`eXr{K>ph&qRnpDaa83U&m0t3|7Vb$ z=%vNrN=ch*Z_KI(V_2bb?=Qp8wzil<6K914n%>92^v^6Bx<*5QM+=9LK63Mw{K|dW zE}8#{MzQNdcR)($3^ye_z`6lmW!s6rAqm!-trxCu3U1J)A>xM*l7D+( ziINU>jjfkxf|^@|^5E+x82Kdd=RZ3o7CSw$ZfdCFWcfy5cS%KoJ4o&pVFl`C^G(4g z;0(f!tQCyTyYURtl9~@bWe#Z(a7&vdKU+Uci~eh_ZV`ELvNX_VzDq55Wi2P2Uo)g)~0Xu#Qg|62ZS+B1>Gp{qDwg`g)>**lvnTl z@~@Qzu~(j+Ut9-3)w|}b0JV?k(VfQYf_EVuDsWrqmgQXb&#st@%B?#13c}%mg}CBn zYOg=?@r>qF8H!QGKTyehKVrO6PMPR?XwV6_%x9n_4qfj-%Mc}h*#DYi{MTnK>;qDV z@#xa;aSr;|N*ix#McG`FMJy0H?|_llbmG+vqd4iveO%R>*qcj>^^0OLysr0o-y-Be z$y%-@+1JGpy>(JBIt+>lqQk>I3r#`SwW?&c7+{W1 zO$P80nkX&6?UVX4i92VHr(hL~==e?og~Nk&(35%;DaaYx^k4-`Sw6Ug0n+@G|I^qi zG@3{&dPeC{o8PpC+H}oejcvjt3oHiyf%9VWCF3JrOjsPj7bPWhX>M$K2DH%n=b+j; zC=CV+K4nRWa(t=8!xy#OY#FA7zu2Vc@ITD-b&emE@o$I!`DqicRtOep>trmuZ9&GD zF2D2B(Epg!8dn(W4bbkh(mWu8C+u%-ir8e)N&4+gpLs(*__3m%TU0TEe8k$uQQg6a zY*P2ZHZdJemna$zqkl$yhL~bkGQ(5b$!&(I6!1+$wbd8Bc?<1X!fT<)8nTp(fZ+SD zP|5ASbQYxxGc@1HQ1~G|{pv-Y9v3AXR(I6}4@C|Q&QgxS`IOv>;^5Qhy&=UYDt=Rv zO6)7EO%C(n7nA30>f4ms_ai=$5vjBvsq}pUj(cm#lQyAq0)i!P?lG%s;g&8Q2CmX{ zk!@ObL-gg0%?!z<=~*&2yOCIX^}>8sRJ_RhJTFjb3Kk7a{mBUyjBfCP6@(8-Irs#w zAfdY57Vysq)zwYGRA6GFKFLH$j>u7E%HBU~eIA+VgLt4gRqcQ@(iC*dCoz{g3Vtd% z;#CA4-Z$lPFF84f&3_qeRjo)5PQe(_k15n28MLJfNzrKdGZ_~0KJsEh8&t;+4N`A? z^a)i*bU@h8z*siKp`s*Yh%x=^WYU1rjO0Vl?hbeM)FSG86+Bw?{ZIH0Fzaiy zpY5V*|5Rm|Kr)UEeeRd%_9K{Nz!p)tY?IZW2X*_^bg@iI`AJz?x(MgiSo z;!7WcnwE$Cw_GsElIG>rMt*7vIP>boB7U2yg{G1jBsQnJG3f|@Q-a$&&Ftp@hO@o< z2ww$_`Q^ER7e$cDmn0GgEdSYV*a62{mA1fe`QxHg*BBTFa5*P3N8XM6j5Q?MWzA5U zi7E!Ku(pJHYrgW@%9H5EIm|tb8-c8bt6d|9UcgewQw{h zecZT|Y$1UAjZpb9eeTQLxgM(@jZfCr&e4lrDqVgUb-^|bh;f|%lcrYor1(91cEN%szjNcjjOO;K zE7wh3l$H8J2cEw9_`ac*3?Q9sOWuC*0L>kDE;;;8u?nP zGDbyg2!VIS7o|W_b-|E$xA#n7&ZC3R;vb1G)%VNNkg!1pvDv1MUBp%^@-Xik_IAR+ zOp1rWgA4_9yFgLF(iw9Gvx9$fFJM_w_B9#_HmNvFk`&?a@tKDNVhoqswzm!Gdnu~e zt3Ng@XXsg`0UCE+#+6>xO52F2nri$VfO~+tAV-_CYZQF_$IY9LPj40#T7NED#&7qJ zRTsw)EYh3|SMYKgTpHZn^5Q)uAX4$t8G!_clSI{Df)qZ5)9@fA4v)cS{aigTwg>1p zt)=p4j970NC;)olzpBTg_2`yI&gZI^i{!@)Y+LG>aO6UbO8{G1_nhdw+ymzeZB@kn zEH^%OZQM#~Iz}c`J}w1)rO!Nj$H2qN17{DKB_i0MJ#+zJU$^&*i+;=A@Ezu!#(r_Z zh5{8#ws%fL5LGYSj{dwvWTOT#J*k)z++>oOVX#x*r5;eAncUQ=0oAo9$HAueRH3Up z@n!cE8x2qX)oA}fH?#Tc;$|`6; zn;XgJWDjYHTiAmd*|*+bK8_`;h>%sguaGocTOK}mikloZmo-8ac?rUI?QlqhTlVI) zm>0vLl4y47Z88fq*UFq%z>|IUFE3LWA#W9Y1k@e9A~G%ex&)a4Duq`7#7(`>x8~6G zL(}wI6(zB*rYeG_0Bx_Goe8J6K==fx@ype+70BFqqRXSt#QiOsN<>z4(6eL|a6l~C z5(C$Fu}A3omtKLVI3RFRX{Y{=3#6dS1f7#Alm-@6OwY^((!r?Mi{DF(RQ*Anm$jaX zz_i2qVn`01RR%Cs?OI99>t8rrRsY=0C=V=HF?xKH(8X$!48vd1_z=c*;)a-TM8vACQtNcI5d@RTFU!kDYK~DV z^lmN_TJ#mWK&|gCpp8u7lTW>d4RJbHTAP}glyzPM4miIok%qziH@>qbnVXJS9==Z< zQsGd%P~p`nE{H^9b_H`4Chmppg+L_WavmvRBjl?WIe4N z6JQ0R6GIh!+uYwqMJJw2m(?0?s9p3!q?HkmUpg z9lV4=Z*|2|KpuL!ZZ;dO`cCHNPYeAV z%6lC(mz-~Js-A3wvJ9mCfUR~=r-Mn2&*RYlr?T%1YqAO24IQL}DhLPy0wPsW0Vx4# zB2APk5ITq;y|>Vj4kAUVf+D?2FQG~&iqdPS(gR2j$@hTzzVCO=k8_>pH*9uiXJ_u6 zxo7qVC+z6#Mr+Qu3$?7E0P*a#?wv~BruaEMCC=kQyi-4=j94`#llwgYq5n&R)F)?8Ai+G< za;jfv_o{d!E9uCD@2ai7z`XSco<9POEQ9C)VTwi^V+}mOi^fdAhMylV6ym>rZ6Buq zim02yVDEIDYezSbf1)ha^KfLj$GoA2scKde7#_|21@t6;qK{ep803%{u;EYP%7xoA zla$EKevXl>pGqcN&3=c7YSVcK!MZ@XFK7er&nUY65m%Mq7S@ z!;Hj-sO+%A@#-vyQ)L35LMr}ZwkMuEvN_WYiVXO1F# zPn~LXz6djpNh;W1n10m!jRO`P9{3F!p#t*`+Q&nweM`s(jH&8@j!ap^rQOlFVqzTm zI;lBTC-=%EdLXiL=mDjd56ILie}7mO+?RLalk#%>y*jghHPCF&6=W@?-3bs?nC@>% z_%%)e#w*R6wB@zpejnrzNUbI4pgg2o#Re z@@GR8L!jgd9Y){*D0+QED;3+)Q3{Z~gxGB4Yz_opg)PQvNyF1UGI z)r^I2$EgScQGl{h`E{}gcm0rV&|%hFwZz(0iK~IOGv13&0Ixp+2{dU1q^pzn!H}_~ z5#R5*g;Cwink?4JYF%e=2uSzRh)ygTk&^EZEaV(VFNw5eyd`3V@LC4igJTU zBeKm$H9oZozib0BiMWgY4d7+`mWiLP+fK=g7CcxlOZ6*70uKp}6FfO1@%(N5Ab?-h z(1rpbp_u`(^qX@E%C3^l@3zhKKD(=UbJD`4jsXzQGg1?;&4+kK72d;o$z4Ncq_}<& zhD>Bm!|GFLLkbeH=%=GFdgbNxXyUa~XVJ{!BIP6U*RaiIkUIO(#?*7RPqOC{wZa9$ zPC*{UZIL@=5geC@R{IZDZY45tyCg#&QWOZ2_{WmQMB*2GY)=Q+zlp@;#Vp-de>5+e z{_?-Q>h$8JM6kVN+B(4po{~55fOKOH86)?e-tT?V#1P7Lm~VBs%`n{@K?y%s4`+~d zbCQ?)x2U}`4@w@FnMd9COiqZKNI<}ma+~(ky#DC50Ws^3J$JK^S%zbq=1?uVFIM)E zmn$0HIRjTjS9qLMv`nM+!od7}y+{eZk}GT1X`1+X4?^m77+*s?_gnHkV4xN1>mK^6 zKU7C+E}1Nqsa2$g&m?m4@_hCJ#ayB#-rtgV9&_7P`GUa@lyJ0}_)TjGet5WJ{^s2N z$y^2|3MVe-?dmZ4$PYWRr>dE@LhaEa`!46#AjyB8)e0QjLtGC+%qPl>%EW!;4J+=^ z&db=!uNlYO)dcQYH&_k zN1LA9A>4VE&b@q#%Ou>tmBjB#+W}`-wgg@1Vri4tob+{d3LOdtMv|{79wDw{yrFKd z4HG@I8x2_=qz~iu1^V-K;jz*QiCn3sn;~@+aWKygx-&dkQB)GojgFZOwVa%zDvWQ>E0}I1pD5P zXVXs%AO{I=t{rSgJ{S`mjYev|%$&Pat>%DJ*Ioc;T}Kd#14#X8rQxt6;}<@iGW!Bj zUCDQNuW`kW5QE_58#*!b5|6I*V_1@#)!1A7UY#{(EKdRSYXN^?|0JI7%+T!cjf9iG zkqL?C`JB+oAF}=$BBrK{)>{8k-}}0ek38nm^8vQU{1j_9PPqzTy7+bVjg zY4^|=oix*xq=YFmCXrUG&~-PeKcMZ!&NFcy+0F(K#mk)e722do*-@^HmgfEvJrpWO zWFB@jwbme~TgSyp?w8va@IGYhCyXDP#vHu64(i4B+fHYOC_Q*aNhD{A9ySx5`$?AE zHf189Q*M@QEuqjto%9$--*y=QD)DSw;-d54BIsAFUWQC>8Wbsyylp$Ez>9}Q)GwA^ zRW$RVQx0=ad{m}ZT{4WfI4F`}|FqgWqHv=|B_KN(mxdU*EGtjy8*nE_FpjHSRiM4P z&70SakX~qYg{1h-2m6N@YOumEJFKv+X@U7oXoq??Z1m?rFDgGOHKx}!6Z5Mx-YhmJjI6N~D<)ZZ^VZiTB9+D7C}a8%iDXjgj4%aSD++vggjdks zd%N1Nh#%b;e78@T!aoRHv4FfO+p_Mv3^`sC(#UaBFnl(Xc{|W-M`oWIqllxrnaA)7>Dys{58^ zjLNOP_vF6wZcbntNF1mKwvz|n4%vO3$f0^afYM05tQ*{2K?t(G-DSJl_T7p88tGUS-n zOJ1WYda{V9YjXEOkPh)3im1OPnuB=r6vW1$d)N@jK*b&39XWr7T}Ze*l$fXkJn#R# z(twN$B*&bxjJrZHw?`0T1_398k}rtXwBv7>7SB35d!2}HSDjn@hnVwXK_zUgLWgO( ziD9~68egr&Bg7DC_@FO(R z_z|%?dbr0)aJR=_d)4G-Z(@W*tu}Vf<23u-14%}jv#Vvw=S9Rbz2nyEK4BO1KeeZ|zi}UY19p;5?is9=r6Yu32 zFwS3BMSU*1bC2rU;E91!Cx9Ied z5iBs=aCJ?$*~aBG+m;RbdUUSe*LRnGFC~!xqwOj+IeKNQYySH+n_4Zm#8&g9zFHtU zAD?q8GxB+Jp=+(btKn%3?3k=|pp8ShPm6v8uk5z2>_$WGKCX3Zi9$~%^AMlAgEv&r zU@h!Z11^=D6;}0XEKc{%{vCQDoo>QNMTZ|sEyyb+AsV;`9ML3!K5ltDr}P~*5*NAc z>dwd|JvBN{U9?taN+pq4ch=YF*YFH6eC`GOUe|@GN4N3aI8DL1GwBm+OqY|4OZ#Xi zl6+%030AUClO=md9#i*G=)c58?ovKZ;l{VGtet!LLLGH4z(Tvi?&h{gZ5ycYLq7l#o*MwO&>|E%lUl_ zAs<|psvqpXpTdW|@xCw9eIzoo7g0)75SfWn#_>#X%SdI7Bq?@l<{FTsGYXr9WekB0 zh+Xk`6@XX4cpd|UH!*x8HHTJCxNCN#7cN^GJte0(q*xVK*Qv!9o ztrUDlXRe>JWkyn(kR1@~+P$ljd#X!k#<#i!JZQVa$cx1@yQ=B1z+&nmvwa%U7uV%9 z$=^9Mr~ttO(^NFxGI`Xqo4LMyQjbEtrmjLGV3%47U1PrdLdh10N7BX7!T%yfJ;(+gM&U0puG?HFzB#q?4oV8BAVNorSMRCpO?W_FIa)k8euUYD@^!8OUE5qLc zK~a}%*`2L#o00b@s`w2q8*9DcH|~AXn6h+UO}v)@pLn&Rr)hbjd%2j|)=)*Dr_i2s z23T>}F@(H2p3C!=^7_e{X&sqGl=z;D1U$%B7Q(9GLqSBd$h%1b)0Z>z4@C>wfsDhY zOKU|`f09(IZH)u*7matF1mNqwQLiDlHsD^LNhyiST$8A{@SW`9w1wY<95;yGse%ao zBHDSEc@eZc-ZQq4@p+Kw==6A*&?Rm&WgI?Zp2g-&JU2Qw7{UZ`UN>T}U_X=f)}a zz~;&n!Vf1??_KzGh#3x6K=sfv>ALsrz68lDbm19MHOUlrHd|7U3EMfB1zM_zgp$AGbrQ+D98i~KZR7Aww#Iz1 zQ@nXFQ04%TuVf@nQ0zX%>2eaw0vm(MmTzo5RJ4-dK^u`hlFC&&)OX}CuAMu&%}!nY zesng#=^78Z-%SmKd={eeZj3&0Z*xJ0*Q&@ja#!+QrSWaf)Iy#Z>yO98Z_iXgd0tA<+0q)AfBiQ?3ttCqLG&$Y z=bp@0v*p*bq>gFIpNkTGAaZt5ptuZPG2m1uj-qwC|F{VzBY}4jhN}>r6(LC>%D6?5Bj76j>HEFSFR-FH;|7|4xd5kxs$Xq@80&6RF~@|C zsgk^g?@%cPsc7A{5*gxlx&173TV(%?d8;o&F3HcpI2OVlEO5<47xLKAvN#uwD8^ZI z#ZEX%Re-yhwnmC}xnGB8ntG4&!WuuLXmt#B6|J88o&De&by7jwhu0In#~r!kykKoThJ-vNzgdyN2j;(5F=<7tLzE&RyPg;PRG5^1EldU9-k8O*xj_XZ;_GR_YP zt5buN*6N$%1H8=h9_?507>d2-pJYYyh`bOp5tdXC3U#N5G&5;RY*45vJ$Ye%+RM+e zc@wDk;GGuguf|u?4Jv28!_MbstUTNTKsebb{*UyvY+3LyjO@0 zzoFOCnM^zaP|J7~S7BrhF7!3b#+4)fSpU#7Qe z8g#IC1HxQfzS2zufo^JUI2yX_P7#_k{wcDQD~M)QMgmcV985^_#Vjeo-iEY@X74m2 zGmgvPg%{P|(@R9SSv2|9bJN58 z{6uX+OV9|Qv3Gx=&d9usz+8#BbYyqtT zx0F4_eN2eo(}qI?Bx=WYeQ>eBphH1VeO9nN}JL()dZCe-nX ztp_$RS3*HlYI1K$4c`gCle)yAb*e9x03#hdfY-6%sDrkAGKx|`%=*vY6eY*3Q4Grcvc7fPp4Yn+ zXx~mweGxHbhXN{#K}g9b zWLg8mcDwAY3J2GZeCkHNC2OD5p{drZA07N~4Cu-P)40RDXzj0g{$?m|@EE;G317F&=Vn(=$eoRUpCN{SW6d^gx`qxwYt9LoH z1hO#$#k4epj(*pM@pZ?)?kR?0I*ZOLOo5gb93Ir*$N=3%v)6S|o$I11%lvmzp+H6S zYwpvDq=7>l!Jt_BB=YT>hT?p-tgatqsz{IM~(Br_7N%S}bqB>$@7KSueru zi<(|+ryM-06=0?4v2WWN5g{v2EGDTApaDl-CJ!9O@{uBoylZbtw^il^D`_bgE4C^h^CL`*j$*`(=U7U3;lvRghaJBj@`1 z-bsKw$MiEzfGlbhEhKTN%7ftoq(n1UmtEAL?*$qm!u8_L&w zGnrAY4v_|ddQZn5%7=V>H8$;A_sV96K^)6%>Y1cO1sxgG^o2_g^1)d~9Q2LYacaxG zwqzgdb~#%vJSn;lhl=2gUAB8uL@GSpY5<|g>whVBJSuVt#0h(Te~K**Pi-xqsSH-o zM>8}7is%M?!oFSex|ZPxIuDOgmJ)d0!sDM%Ce&4-ah&uGC0Q!~G93>VO-#yK$mAn; zA4hgsXqsfhA`ye`R%vZpYsscPBa|Li_IBIJ<31w}4>0k#2TYyEmBCjE-(j2Fnw zx^|}-^Ej;0uU4L25BIpdE_8+E%ZKBujeDKfj}10VG=h~XaN!*HkY8k78x?q#pJ~pE z-*dWRUsfs^0vEq5GGBYy%B-%x)O3n}Z=UdW8*(8b1rt-P*~G%%NxD)gKs_&>F1bWs zv6#7O_zHHmnh|oJ#DA@CT%qrzgu*?CCl!J8h z62MD9`ahj~S9{N?&)^+Obu6n#81uq$x!H!aa;C2Py2&?*zWm2!W!k>Xv1|bx6dn(d z-|WJ_1x)+4kJ=~}g>}ARU@*(&WcRXtM4ac}9Mlr3V5J88-rTwrrGuv?xdYr9 zzk4B5@v1(fk?WX;YoFY!u@kqLo4j!~?HLyj#8(Z_s$uKOI{z6D`T;H+83$GIU+3^$ z@gE1%2npczYOxH z!x=a`8A|+p-(q8#{}wJjhbM$FdOuW@F- zK8+%ZjCPi7*_jfn0tx^*@d`NG2$fDAxj-B|+_oJ96*@AGBNos??8jh}Kj=b7k0VBYseR&Xi2&Ti2@H zu4amRQTB0pOJW9bO!I`j&d;|^gA!E6@^l%T<>7i_pe^c-GSR6uhJfj_Pdx}TR7 zs@*V0bd@Ln% z0C(MFGRye$76OG?RY1EuwqxKz4=6+xBlvO?+iQPFbMH}uz%fk@-syL)pwX~oP1vRf zG{@Xm&<|A0JY3TWga^J%WxV(XdGwQ%g_|6d8SVTHP(jkr5iSsw&jIyQ1Mo$_@LUya z=ver?mHB+drC_^)u05Fs>_^WJR35wd$nTG8lK~zA$tClVuoq863nOGVP^b#RJX$4U z;i3Rda;+u92B0`P;nDHzM@37ywJaBD!1L2OdQ;-Y{rM6R`@qp+Km}MxYAbFNzDbAT zwKNAS^f2~f&GV3kxUjC^XH72ehe z0vN+Aj+Ht`4YcF*4=ZbHzX=_C$1gSC=(riykipr&ptLNgISg; z?9HV^XivX?@LM~<6MRYpw2X?1G9J|0uXxDD8z3;6?Dp?cgBvdpCw?d<@vwFb@}`{E2ua`;o1@Zv)H0Pk5c>cs|;~zoRY46YJV2RnE0mSx~|6>H>i5 zWg1W<+g{3=GR}m)xjH*%J`V)h9V}y*{JPXpb{z-paM++UE)a6|lm09jH9Ii+V-#=a z9TGgu6m&knSBeBsP5%T7{7gf+heExGCC}s4;Y4zv3S@5`qdEb8qRl1{~Yu zZgBF4^TOdg$DV)^b+o!fU(aHr!!!EW5Y&jYBln6z2{jAeKrFAJjKz$P4lN`}S^{`V zVwFf*j5Gn@_UWSP>cU$793(xoP-k}|38vz4@jO{ZWe*Vo-Oqf(Q+zzh1?b7-Gbp>Q zznTTN^#`~LT>T*eG~x&sfKYFe)0Npj-t@W#<-Z72T{t#&!6*cjUhNUCT#)xvV!DCe zcwQ6k9CM%~^);iP<8w}}duUNFSZ=vD_@_QWnNx6|I}&gURbdgpUKgA<{5&_bY%Fx1 zG`>yPt6avJ09jT^_7tt@YL}kfy34&x>64DLcq~PJbDK%zRZuMcoiH@=aK9Lo0tt2o zST86sPt8UEminIu5ihK^MzBcdY~qz(t68>1;;dSj)UVqh?J$N!&bXL(&T8drT#;@WIvt6Bj&HbQM=L0 zoX3G(s5IznKeGMk^`?EYCu40iXJGG~y$iS=S&4|fIX@gE(2*7R5wG0Y!*h<<23)|D zxVDB9uN&cWX|a#Urq?T<6_lC_D$~JjDbd})J_s5bY@_iHPz8=&a{d_rd1?q}#KxD{ ztTTdWZPpr=9V93C{?Q0w8fYGCpO!eukxTU~xr1ZQBk#7^y)%Mq79G;4gE1#SMC6*D z^#lPg+EF*8y~;>Evbb@c_X`BTxc0vT4RP+8iH;7FfHHl1(n=IlFX-?W=E|~zbboV% zoMuTFnEPce9Isvi)`yvw?sY{MlrWW9s0D?Pb7nFx;`*sy$im~=Y7S8HsV6I{6HXH2 zpl|Nn!16NYA0>Hv1hz#av(z1aJMuWsKdBTxqxVvE$|YZW-e7sy`Z=Jo?UmlGU1*cERL73gq*zgy`jKS zDq2Vg#s}6k!4i3QkOKu^x-g0(VOQe>0O7*Kvcp2r>H{vtZCfXO>@O5SUa9N*&xQYW z94ekBfhFnf1XapEjcvoPxCI#q==!8A?ht7_QE{K z|Fb!Rsr)YM)Cw#lbCrqoyH;mU^qHUIJ!BH2WbS*bB}s1_o&^QQe0QI+NsPJ&q~oI5)TA{sDQ>@6A-@_nmwFbL<$wIst51FH z>-Z==d*XZr5<|>Qsm7oD08mB2RQ#Dl%N_24%4i)1Cb0E4(T1_BMB34tGr)nWWtn~| zi!oAn_k5OGRF>%6)7%b7A_iJ!a7qY`7wO~34H0Zq$i*2VuAt(x1ic*@0s!W zm{)1~zU?4ZN!I(y=m38P1oJ=FAr-q{k1sDQbbADBpL6H&$l`X#I_IVKZe^|E`6cHRBYA?pB!Vq zbM#9_2hnU!&ZE_muTSi^+l)V*5Mu#8sO?NGblE;lc$53f{@E!~C+St(EG5%r!p?lU zkHs0N!)c$*I#LQsM*JBV(U{Ft4wTW-Y8gb z!ddH<2k;3T3RGK;Gi}HQ=7M`?b|wP-^EMK=)JhCp2K}*!B7^cvyl7AJx*sR>W|Ln z(rnz`An-YHCZF6!pS+F0jm`63E~#xdsy^gW$fS4pawwO4habnZue)y_V@Ky!1S(=5ZVZ%#j#Zgx?c`C2+w0BY3@ zaZtYyJI{QgrNx=h;xNU1B2X2n@&W~Hd@(X8ZB|4Qz0zZnk>2`c9QzG9>e|3JUcw4p zWUc#jzUz&&yNh+FgE%d{JvALyFpq#?d#bZ6ldMXuFMl;M~}5s}E9Ts=PG7A9LBp_T@;11wxw!SwK@r8!Of;)Y8Jp6G4y?Z9?y_r?^81GKU+OMx9N(q0{%wJ|_{n*Njxm1twV z`1@=W6K<(fW3)PRT86g_l67WFC!Ge0?~#j{f>|9wB-<)SZ=FJ@cHNIdBD{8pVdv^0 zv5xrJD3Ck>EQVf{5X2vX-SHeSBD+5M?$%r6OMj<>Z8F$6*RNaElfJq^F=1N6k)v)I zL%(%%{xDE#Q(rs{JlOs6E_!JL6KO5RPXS5-_fdX&vD?99VXc9L*N;hjJtBOt>C!TK zJbH7z(}qdidgc6ldhB@PlpgrCgN(fpG{w^gSlbuXM=uoilFzzVu!xwon+Muf3c$7| zT-txx`h^~g)^G)cv+zeaf_|m_pEjTS;D6mKbUrtlsY_Spf2L&&NKHQZW@ua7P0jQ! z7+YE6E%l}n`}k}F-Z*zt^?wf_IQRW<49~y4(g|+IgNCG!JZ*&B(nnhqaibZsR#%k6 z%wIjTahM~6d2$>31Yi)MhJ?_wZ*qC`r7tCP=z@R~k{khT`LtO>1mb?N8&h)!lvlhV z0A)$fJgtfOT(G~qlD;RaVvsp<`0|~2GesQa~lnq*!a|^w6xrK<3QU59t+0}x6 z#F0`qFb#^o)4(bHk?B`=^~bPplp#MkXr^T2=2o)|{raQ+P=5ad$JW#bZ|9p+qw-KN z$ImOyed05KmPgQhgf_aN{vHjF0I1>aMpnk;?9|X5Gt8`H6Eqo0mCWWCx!c+yat<8flQ(a-y%(^@#_p|~ z4m^xKTFgjuINHuk<4>A>5lRGdJ1S)|W4XHW9pYMaQp!Kjdw<2T+^)`#XY;q40}LLn z-_shr(-PdxtKw*PZCBE;dRsWF?JJ&9CL|4JG19L&ZKj=(_Ct69u3=H*bP;T+)YJ0G z`|{#j=&Dj)^p{fVVrb*NgFhQ^$#bL#%Od`n^~$@QP@iUa9~B0ffc9E99jAP1Te`v3 z{C4=MEuS*Y-U*YbhQ@XDQa1eUnaWBMypk<*M34$Nm}Y-1Rvi5=#)7g65QxB2Nlr%7 zdV8$3$=&n-o+gY%1vbm%)_!LMH9niCVpScy3?u&#sc3hrT^x5LQO@s^rN72HG0by* zHc+mZzBKypHn3+npU#THG2ae)O8`@ee)#mwsq0SJ-P7bs%)t97R3M~|#aPSH`~YjL z@xugC(d1RK0?__CZHWHP>6O(0MrWe6DX)7k2|zXamELtt_K!@=a|OZ2@!n^=#VDBV z^Xq7Oaw6KAx z?3FOF@5x9(Y{@Q@siTI+Fl>6mPu1@9^z*bu(|1AjAAlMl{$QLL_r0`AEWjn(97Ms6 zE_0x@rO*Et`vcY85Bnx9TXc0n>~^#Cr0UTE&-QOCE*=V>t^LyM%KB@K8_KeIJOvzu z7WGirXd(_ zH-AK(#w_)CuT=z51iLWy7>{hH?M7_Mw^3pUp{}mEkMR50@}?eBh?s}22GnCImbwlQ z!TvuYup&5oAl}mFJJIUOnZe;y1t;^qg86>B3M}0GBTT45e6Gy-z0uh{xLGUoOP)Dk z?0o+M#?EB|^s{*3{$#S-2Yj_XWz8|curvO%dgDbh*f*Ci_V#PG6%BCR*y0C1XPhV>G$C9qx6C9iTc@wc5>*Y85$XQ|m zLbLl~y|3xZ+!bMg6KDlstQ`LsE9X=luT3oo@8PO!kr2$F`M9(SmF!M}3q9+M(QZGza1tD^~URw_}>9N{yIVrkE?$L1_rA~fBJj@+yLKv_wW9rP(M8W`NyY!ec<^Y z|K}I~Kvv)vX9&E0+fu-s{gfSU2&RlktAWKR0)x6wP7E;9BUs{+3G;onJ6VJ_f>hsWl||9hMM725xjz?DB}_FMuS zz4!lY+0(T@=mQKcUHE$}kVx0Y|F13k?;rn$r2m)9u|yi-xN978M1mmTQ<7JcD|uiX F_&;rM#gPC2 diff --git a/api/core/tools/provider/builtin/azuredalle/azuredalle.py b/api/core/tools/provider/builtin/azuredalle/azuredalle.py deleted file mode 100644 index 1fab0d03a28ff3..00000000000000 --- a/api/core/tools/provider/builtin/azuredalle/azuredalle.py +++ /dev/null @@ -1,20 +0,0 @@ -from typing import Any - -from core.tools.errors import ToolProviderCredentialValidationError -from core.tools.provider.builtin.azuredalle.tools.dalle3 import DallE3Tool -from core.tools.provider.builtin_tool_provider import BuiltinToolProviderController - - -class AzureDALLEProvider(BuiltinToolProviderController): - def _validate_credentials(self, credentials: dict[str, Any]) -> None: - try: - DallE3Tool().fork_tool_runtime( - runtime={ - "credentials": credentials, - } - ).invoke( - user_id="", - tool_parameters={"prompt": "cute girl, blue eyes, white hair, anime style", "size": "square", "n": 1}, - ) - except Exception as e: - raise ToolProviderCredentialValidationError(str(e)) diff --git a/api/core/tools/provider/builtin/azuredalle/azuredalle.yaml b/api/core/tools/provider/builtin/azuredalle/azuredalle.yaml deleted file mode 100644 index 4353e0c4862f61..00000000000000 --- a/api/core/tools/provider/builtin/azuredalle/azuredalle.yaml +++ /dev/null @@ -1,76 +0,0 @@ -identity: - author: Leslie - name: azuredalle - label: - en_US: Azure DALL-E - zh_Hans: Azure DALL-E 绘画 - pt_BR: Azure DALL-E - description: - en_US: Azure DALL-E art - zh_Hans: Azure DALL-E 绘画 - pt_BR: Azure DALL-E art - icon: icon.png - tags: - - image - - productivity -credentials_for_provider: - azure_openai_api_key: - type: secret-input - required: true - label: - en_US: API key - zh_Hans: 密钥 - pt_BR: API key - help: - en_US: Please input your Azure OpenAI API key - zh_Hans: 请输入你的 Azure OpenAI API key - pt_BR: Introduza a sua chave de API OpenAI do Azure - placeholder: - en_US: Please input your Azure OpenAI API key - zh_Hans: 请输入你的 Azure OpenAI API key - pt_BR: Introduza a sua chave de API OpenAI do Azure - azure_openai_api_model_name: - type: text-input - required: true - label: - en_US: Deployment Name - zh_Hans: 部署名称 - pt_BR: Nome da Implantação - help: - en_US: Please input the name of your Azure Openai DALL-E API deployment - zh_Hans: 请输入你的 Azure Openai DALL-E API 部署名称 - pt_BR: Insira o nome da implantação da API DALL-E do Azure Openai - placeholder: - en_US: Please input the name of your Azure Openai DALL-E API deployment - zh_Hans: 请输入你的 Azure Openai DALL-E API 部署名称 - pt_BR: Insira o nome da implantação da API DALL-E do Azure Openai - azure_openai_base_url: - type: text-input - required: true - label: - en_US: API Endpoint URL - zh_Hans: API 域名 - pt_BR: API Endpoint URL - help: - en_US: Please input your Azure OpenAI Endpoint URL, e.g. https://xxx.openai.azure.com/ - zh_Hans: 请输入你的 Azure OpenAI API域名,例如:https://xxx.openai.azure.com/ - pt_BR: Introduza a URL do Azure OpenAI Endpoint, e.g. https://xxx.openai.azure.com/ - placeholder: - en_US: Please input your Azure OpenAI Endpoint URL, e.g. https://xxx.openai.azure.com/ - zh_Hans: 请输入你的 Azure OpenAI API域名,例如:https://xxx.openai.azure.com/ - pt_BR: Introduza a URL do Azure OpenAI Endpoint, e.g. https://xxx.openai.azure.com/ - azure_openai_api_version: - type: text-input - required: true - label: - en_US: API Version - zh_Hans: API 版本 - pt_BR: API Version - help: - en_US: Please input your Azure OpenAI API Version,e.g. 2023-12-01-preview - zh_Hans: 请输入你的 Azure OpenAI API 版本,例如:2023-12-01-preview - pt_BR: Introduza a versão da API OpenAI do Azure,e.g. 2023-12-01-preview - placeholder: - en_US: Please input your Azure OpenAI API Version,e.g. 2023-12-01-preview - zh_Hans: 请输入你的 Azure OpenAI API 版本,例如:2023-12-01-preview - pt_BR: Introduza a versão da API OpenAI do Azure,e.g. 2023-12-01-preview diff --git a/api/core/tools/provider/builtin/azuredalle/tools/dalle3.py b/api/core/tools/provider/builtin/azuredalle/tools/dalle3.py deleted file mode 100644 index cfa3cfb092803a..00000000000000 --- a/api/core/tools/provider/builtin/azuredalle/tools/dalle3.py +++ /dev/null @@ -1,83 +0,0 @@ -import random -from base64 import b64decode -from typing import Any, Union - -from openai import AzureOpenAI - -from core.tools.entities.tool_entities import ToolInvokeMessage -from core.tools.tool.builtin_tool import BuiltinTool - - -class DallE3Tool(BuiltinTool): - def _invoke( - self, - user_id: str, - tool_parameters: dict[str, Any], - ) -> Union[ToolInvokeMessage, list[ToolInvokeMessage]]: - """ - invoke tools - """ - client = AzureOpenAI( - api_version=self.runtime.credentials["azure_openai_api_version"], - azure_endpoint=self.runtime.credentials["azure_openai_base_url"], - api_key=self.runtime.credentials["azure_openai_api_key"], - ) - - SIZE_MAPPING = { - "square": "1024x1024", - "vertical": "1024x1792", - "horizontal": "1792x1024", - } - - # prompt - prompt = tool_parameters.get("prompt", "") - if not prompt: - return self.create_text_message("Please input prompt") - # get size - size = SIZE_MAPPING[tool_parameters.get("size", "square")] - # get n - n = tool_parameters.get("n", 1) - # get quality - quality = tool_parameters.get("quality", "standard") - if quality not in {"standard", "hd"}: - return self.create_text_message("Invalid quality") - # get style - style = tool_parameters.get("style", "vivid") - if style not in {"natural", "vivid"}: - return self.create_text_message("Invalid style") - # set extra body - seed_id = tool_parameters.get("seed_id", self._generate_random_id(8)) - extra_body = {"seed": seed_id} - - # call openapi dalle3 - model = self.runtime.credentials["azure_openai_api_model_name"] - response = client.images.generate( - prompt=prompt, - model=model, - size=size, - n=n, - extra_body=extra_body, - style=style, - quality=quality, - response_format="b64_json", - ) - - result = [] - - for image in response.data: - result.append( - self.create_blob_message( - blob=b64decode(image.b64_json), - meta={"mime_type": "image/png"}, - save_as=self.VariableKey.IMAGE.value, - ) - ) - result.append(self.create_text_message(f"\nGenerate image source to Seed ID: {seed_id}")) - - return result - - @staticmethod - def _generate_random_id(length=8): - characters = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789" - random_id = "".join(random.choices(characters, k=length)) - return random_id diff --git a/api/core/tools/provider/builtin/azuredalle/tools/dalle3.yaml b/api/core/tools/provider/builtin/azuredalle/tools/dalle3.yaml deleted file mode 100644 index e256748e8f7188..00000000000000 --- a/api/core/tools/provider/builtin/azuredalle/tools/dalle3.yaml +++ /dev/null @@ -1,136 +0,0 @@ -identity: - name: azure_dalle3 - author: Leslie - label: - en_US: Azure DALL-E 3 - zh_Hans: Azure DALL-E 3 绘画 - pt_BR: Azure DALL-E 3 - description: - en_US: DALL-E 3 is a powerful drawing tool that can draw the image you want based on your prompt, compared to DallE 2, DallE 3 has stronger drawing ability, but it will consume more resources - zh_Hans: DALL-E 3 是一个强大的绘画工具,它可以根据您的提示词绘制出您想要的图像,相比于DallE 2, DallE 3拥有更强的绘画能力,但会消耗更多的资源 - pt_BR: DALL-E 3 é uma poderosa ferramenta de desenho que pode desenhar a imagem que você deseja com base em seu prompt, em comparação com DallE 2, DallE 3 tem uma capacidade de desenho mais forte, mas consumirá mais recursos -description: - human: - en_US: DALL-E is a text to image tool - zh_Hans: DALL-E 是一个文本到图像的工具 - pt_BR: DALL-E é uma ferramenta de texto para imagem - llm: DALL-E is a tool used to generate images from text -parameters: - - name: prompt - type: string - required: true - label: - en_US: Prompt - zh_Hans: 提示词 - pt_BR: Prompt - human_description: - en_US: Image prompt, you can check the official documentation of DallE 3 - zh_Hans: 图像提示词,您可以查看 DallE 3 的官方文档 - pt_BR: Imagem prompt, você pode verificar a documentação oficial do DallE 3 - llm_description: Image prompt of DallE 3, you should describe the image you want to generate as a list of words as possible as detailed - form: llm - - name: seed_id - type: string - required: false - label: - en_US: Seed ID - zh_Hans: 种子ID - pt_BR: ID da semente - human_description: - en_US: Image generation seed ID to ensure consistency of series generated images - zh_Hans: 图像生成种子ID,确保系列生成图像的一致性 - pt_BR: ID de semente de geração de imagem para garantir a consistência das imagens geradas em série - llm_description: If the user requests image consistency, extract the seed ID from the user's question or context.The seed id consists of an 8-bit string containing uppercase and lowercase letters and numbers - form: llm - - name: size - type: select - required: true - human_description: - en_US: selecting the image size - zh_Hans: 选择图像大小 - pt_BR: seleccionar o tamanho da imagem - label: - en_US: Image size - zh_Hans: 图像大小 - pt_BR: Tamanho da imagem - form: form - options: - - value: square - label: - en_US: Squre(1024x1024) - zh_Hans: 方(1024x1024) - pt_BR: Squire(1024x1024) - - value: vertical - label: - en_US: Vertical(1024x1792) - zh_Hans: 竖屏(1024x1792) - pt_BR: Vertical(1024x1792) - - value: horizontal - label: - en_US: Horizontal(1792x1024) - zh_Hans: 横屏(1792x1024) - pt_BR: Horizontal(1792x1024) - default: square - - name: n - type: number - required: true - human_description: - en_US: selecting the number of images - zh_Hans: 选择图像数量 - pt_BR: seleccionar o número de imagens - label: - en_US: Number of images - zh_Hans: 图像数量 - pt_BR: Número de imagens - form: form - min: 1 - max: 1 - default: 1 - - name: quality - type: select - required: true - human_description: - en_US: selecting the image quality - zh_Hans: 选择图像质量 - pt_BR: seleccionar a qualidade da imagem - label: - en_US: Image quality - zh_Hans: 图像质量 - pt_BR: Qualidade da imagem - form: form - options: - - value: standard - label: - en_US: Standard - zh_Hans: 标准 - pt_BR: Normal - - value: hd - label: - en_US: HD - zh_Hans: 高清 - pt_BR: HD - default: standard - - name: style - type: select - required: true - human_description: - en_US: selecting the image style - zh_Hans: 选择图像风格 - pt_BR: seleccionar o estilo da imagem - label: - en_US: Image style - zh_Hans: 图像风格 - pt_BR: Estilo da imagem - form: form - options: - - value: vivid - label: - en_US: Vivid - zh_Hans: 生动 - pt_BR: Vívido - - value: natural - label: - en_US: Natural - zh_Hans: 自然 - pt_BR: Natural - default: vivid diff --git a/api/core/tools/provider/builtin/bing/_assets/icon.svg b/api/core/tools/provider/builtin/bing/_assets/icon.svg deleted file mode 100644 index a94de7971d35b7..00000000000000 --- a/api/core/tools/provider/builtin/bing/_assets/icon.svg +++ /dev/null @@ -1,40 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - \ No newline at end of file diff --git a/api/core/tools/provider/builtin/bing/bing.py b/api/core/tools/provider/builtin/bing/bing.py deleted file mode 100644 index c71128be4a784f..00000000000000 --- a/api/core/tools/provider/builtin/bing/bing.py +++ /dev/null @@ -1,23 +0,0 @@ -from typing import Any - -from core.tools.errors import ToolProviderCredentialValidationError -from core.tools.provider.builtin.bing.tools.bing_web_search import BingSearchTool -from core.tools.provider.builtin_tool_provider import BuiltinToolProviderController - - -class BingProvider(BuiltinToolProviderController): - def _validate_credentials(self, credentials: dict[str, Any]) -> None: - try: - BingSearchTool().fork_tool_runtime( - runtime={ - "credentials": credentials, - } - ).validate_credentials( - credentials=credentials, - tool_parameters={ - "query": "test", - "result_type": "link", - }, - ) - except Exception as e: - raise ToolProviderCredentialValidationError(str(e)) diff --git a/api/core/tools/provider/builtin/bing/bing.yaml b/api/core/tools/provider/builtin/bing/bing.yaml deleted file mode 100644 index 1ab17d5294b37c..00000000000000 --- a/api/core/tools/provider/builtin/bing/bing.yaml +++ /dev/null @@ -1,107 +0,0 @@ -identity: - author: Dify - name: bing - label: - en_US: Bing - zh_Hans: Bing - pt_BR: Bing - description: - en_US: Bing Search - zh_Hans: Bing 搜索 - pt_BR: Bing Search - icon: icon.svg - tags: - - search -credentials_for_provider: - subscription_key: - type: secret-input - required: true - label: - en_US: Bing subscription key - zh_Hans: Bing subscription key - pt_BR: Bing subscription key - placeholder: - en_US: Please input your Bing subscription key - zh_Hans: 请输入你的 Bing subscription key - pt_BR: Please input your Bing subscription key - help: - en_US: Get your Bing subscription key from Bing - zh_Hans: 从 Bing 获取您的 Bing subscription key - pt_BR: Get your Bing subscription key from Bing - url: https://www.microsoft.com/cognitive-services/en-us/bing-web-search-api - server_url: - type: text-input - required: false - label: - en_US: Bing endpoint - zh_Hans: Bing endpoint - pt_BR: Bing endpoint - placeholder: - en_US: Please input your Bing endpoint - zh_Hans: 请输入你的 Bing 端点 - pt_BR: Please input your Bing endpoint - help: - en_US: An endpoint is like "https://api.bing.microsoft.com/v7.0/search" - zh_Hans: 例如 "https://api.bing.microsoft.com/v7.0/search" - pt_BR: An endpoint is like "https://api.bing.microsoft.com/v7.0/search" - default: https://api.bing.microsoft.com/v7.0/search - allow_entities: - type: boolean - required: false - label: - en_US: Allow Entities Search - zh_Hans: 支持实体搜索 - pt_BR: Allow Entities Search - help: - en_US: Does your subscription plan allow entity search - zh_Hans: 您的订阅计划是否支持实体搜索 - pt_BR: Does your subscription plan allow entity search - default: true - allow_web_pages: - type: boolean - required: false - label: - en_US: Allow Web Pages Search - zh_Hans: 支持网页搜索 - pt_BR: Allow Web Pages Search - help: - en_US: Does your subscription plan allow web pages search - zh_Hans: 您的订阅计划是否支持网页搜索 - pt_BR: Does your subscription plan allow web pages search - default: true - allow_computation: - type: boolean - required: false - label: - en_US: Allow Computation Search - zh_Hans: 支持计算搜索 - pt_BR: Allow Computation Search - help: - en_US: Does your subscription plan allow computation search - zh_Hans: 您的订阅计划是否支持计算搜索 - pt_BR: Does your subscription plan allow computation search - default: false - allow_news: - type: boolean - required: false - label: - en_US: Allow News Search - zh_Hans: 支持新闻搜索 - pt_BR: Allow News Search - help: - en_US: Does your subscription plan allow news search - zh_Hans: 您的订阅计划是否支持新闻搜索 - pt_BR: Does your subscription plan allow news search - default: false - allow_related_searches: - type: boolean - required: false - label: - en_US: Allow Related Searches - zh_Hans: 支持相关搜索 - pt_BR: Allow Related Searches - help: - en_US: Does your subscription plan allow related searches - zh_Hans: 您的订阅计划是否支持相关搜索 - pt_BR: Does your subscription plan allow related searches - default: false diff --git a/api/core/tools/provider/builtin/bing/tools/bing_web_search.py b/api/core/tools/provider/builtin/bing/tools/bing_web_search.py deleted file mode 100644 index 8bed2c556cf879..00000000000000 --- a/api/core/tools/provider/builtin/bing/tools/bing_web_search.py +++ /dev/null @@ -1,202 +0,0 @@ -from typing import Any, Union -from urllib.parse import quote - -from requests import get - -from core.tools.entities.tool_entities import ToolInvokeMessage -from core.tools.tool.builtin_tool import BuiltinTool - - -class BingSearchTool(BuiltinTool): - url: str = "https://api.bing.microsoft.com/v7.0/search" - - def _invoke_bing( - self, - user_id: str, - server_url: str, - subscription_key: str, - query: str, - limit: int, - result_type: str, - market: str, - lang: str, - filters: list[str], - ) -> Union[ToolInvokeMessage, list[ToolInvokeMessage]]: - """ - invoke bing search - """ - market_code = f"{lang}-{market}" - accept_language = f"{lang},{market_code};q=0.9" - headers = {"Ocp-Apim-Subscription-Key": subscription_key, "Accept-Language": accept_language} - - query = quote(query) - server_url = f'{server_url}?q={query}&mkt={market_code}&count={limit}&responseFilter={",".join(filters)}' - response = get(server_url, headers=headers) - - if response.status_code != 200: - raise Exception(f"Error {response.status_code}: {response.text}") - - response = response.json() - search_results = response["webPages"]["value"][:limit] if "webPages" in response else [] - related_searches = response["relatedSearches"]["value"] if "relatedSearches" in response else [] - entities = response["entities"]["value"] if "entities" in response else [] - news = response["news"]["value"] if "news" in response else [] - computation = response["computation"]["value"] if "computation" in response else None - - if result_type == "link": - results = [] - if search_results: - for result in search_results: - url = f': {result["url"]}' if "url" in result else "" - results.append(self.create_text_message(text=f'{result["name"]}{url}')) - - if entities: - for entity in entities: - url = f': {entity["url"]}' if "url" in entity else "" - results.append(self.create_text_message(text=f'{entity.get("name", "")}{url}')) - - if news: - for news_item in news: - url = f': {news_item["url"]}' if "url" in news_item else "" - results.append(self.create_text_message(text=f'{news_item.get("name", "")}{url}')) - - if related_searches: - for related in related_searches: - url = f': {related["displayText"]}' if "displayText" in related else "" - results.append(self.create_text_message(text=f'{related.get("displayText", "")}{url}')) - - return results - else: - # construct text - text = "" - if search_results: - for i, result in enumerate(search_results): - text += f'{i + 1}: {result.get("name", "")} - {result.get("snippet", "")}\n' - - if computation and "expression" in computation and "value" in computation: - text += "\nComputation:\n" - text += f'{computation["expression"]} = {computation["value"]}\n' - - if entities: - text += "\nEntities:\n" - for entity in entities: - url = f'- {entity["url"]}' if "url" in entity else "" - text += f'{entity.get("name", "")}{url}\n' - - if news: - text += "\nNews:\n" - for news_item in news: - url = f'- {news_item["url"]}' if "url" in news_item else "" - text += f'{news_item.get("name", "")}{url}\n' - - if related_searches: - text += "\n\nRelated Searches:\n" - for related in related_searches: - url = f'- {related["webSearchUrl"]}' if "webSearchUrl" in related else "" - text += f'{related.get("displayText", "")}{url}\n' - - return self.create_text_message(text=self.summary(user_id=user_id, content=text)) - - def validate_credentials(self, credentials: dict[str, Any], tool_parameters: dict[str, Any]) -> None: - key = credentials.get("subscription_key") - if not key: - raise Exception("subscription_key is required") - - server_url = credentials.get("server_url") - if not server_url: - server_url = self.url - - query = tool_parameters.get("query") - if not query: - raise Exception("query is required") - - limit = min(tool_parameters.get("limit", 5), 10) - result_type = tool_parameters.get("result_type", "text") or "text" - - market = tool_parameters.get("market", "US") - lang = tool_parameters.get("language", "en") - filter = [] - - if credentials.get("allow_entities", False): - filter.append("Entities") - - if credentials.get("allow_computation", False): - filter.append("Computation") - - if credentials.get("allow_news", False): - filter.append("News") - - if credentials.get("allow_related_searches", False): - filter.append("RelatedSearches") - - if credentials.get("allow_web_pages", False): - filter.append("WebPages") - - if not filter: - raise Exception("At least one filter is required") - - self._invoke_bing( - user_id="test", - server_url=server_url, - subscription_key=key, - query=query, - limit=limit, - result_type=result_type, - market=market, - lang=lang, - filters=filter, - ) - - def _invoke( - self, - user_id: str, - tool_parameters: dict[str, Any], - ) -> Union[ToolInvokeMessage, list[ToolInvokeMessage]]: - """ - invoke tools - """ - - key = self.runtime.credentials.get("subscription_key", None) - if not key: - raise Exception("subscription_key is required") - - server_url = self.runtime.credentials.get("server_url", None) - if not server_url: - server_url = self.url - - query = tool_parameters.get("query") - if not query: - raise Exception("query is required") - - limit = min(tool_parameters.get("limit", 5), 10) - result_type = tool_parameters.get("result_type", "text") or "text" - - market = tool_parameters.get("market", "US") - lang = tool_parameters.get("language", "en") - filter = [] - - if tool_parameters.get("enable_computation", False): - filter.append("Computation") - if tool_parameters.get("enable_entities", False): - filter.append("Entities") - if tool_parameters.get("enable_news", False): - filter.append("News") - if tool_parameters.get("enable_related_search", False): - filter.append("RelatedSearches") - if tool_parameters.get("enable_webpages", False): - filter.append("WebPages") - - if not filter: - raise Exception("At least one filter is required") - - return self._invoke_bing( - user_id=user_id, - server_url=server_url, - subscription_key=key, - query=query, - limit=limit, - result_type=result_type, - market=market, - lang=lang, - filters=filter, - ) diff --git a/api/core/tools/provider/builtin/bing/tools/bing_web_search.yaml b/api/core/tools/provider/builtin/bing/tools/bing_web_search.yaml deleted file mode 100644 index a3f60bb09b6509..00000000000000 --- a/api/core/tools/provider/builtin/bing/tools/bing_web_search.yaml +++ /dev/null @@ -1,584 +0,0 @@ -identity: - name: bing_web_search - author: Dify - label: - en_US: BingWebSearch - zh_Hans: 必应网页搜索 - pt_BR: BingWebSearch -description: - human: - en_US: A tool for performing a Bing SERP search and extracting snippets and webpages.Input should be a search query. - zh_Hans: 一个用于执行 Bing SERP 搜索并提取片段和网页的工具。输入应该是一个搜索查询。 - pt_BR: A tool for performing a Bing SERP search and extracting snippets and webpages.Input should be a search query. - llm: A tool for performing a Bing SERP search and extracting snippets and webpages.Input should be a search query. -parameters: - - name: query - type: string - required: true - form: llm - label: - en_US: Query string - zh_Hans: 查询语句 - pt_BR: Query string - human_description: - en_US: used for searching - zh_Hans: 用于搜索网页内容 - pt_BR: used for searching - llm_description: key words for searching - - name: enable_computation - type: boolean - required: false - form: form - label: - en_US: Enable computation - zh_Hans: 启用计算 - pt_BR: Enable computation - human_description: - en_US: enable computation - zh_Hans: 启用计算 - pt_BR: enable computation - default: false - - name: enable_entities - type: boolean - required: false - form: form - label: - en_US: Enable entities - zh_Hans: 启用实体搜索 - pt_BR: Enable entities - human_description: - en_US: enable entities - zh_Hans: 启用实体搜索 - pt_BR: enable entities - default: true - - name: enable_news - type: boolean - required: false - form: form - label: - en_US: Enable news - zh_Hans: 启用新闻搜索 - pt_BR: Enable news - human_description: - en_US: enable news - zh_Hans: 启用新闻搜索 - pt_BR: enable news - default: false - - name: enable_related_search - type: boolean - required: false - form: form - label: - en_US: Enable related search - zh_Hans: 启用相关搜索 - pt_BR: Enable related search - human_description: - en_US: enable related search - zh_Hans: 启用相关搜索 - pt_BR: enable related search - default: false - - name: enable_webpages - type: boolean - required: false - form: form - label: - en_US: Enable webpages search - zh_Hans: 启用网页搜索 - pt_BR: Enable webpages search - human_description: - en_US: enable webpages search - zh_Hans: 启用网页搜索 - pt_BR: enable webpages search - default: true - - name: limit - type: number - required: true - form: form - label: - en_US: Limit for results length - zh_Hans: 返回长度限制 - pt_BR: Limit for results length - human_description: - en_US: limit the number of results - zh_Hans: 限制返回结果的数量 - pt_BR: limit the number of results - min: 1 - max: 10 - default: 5 - - name: result_type - type: select - required: true - label: - en_US: result type - zh_Hans: 结果类型 - pt_BR: result type - human_description: - en_US: return a list of links or texts - zh_Hans: 返回一个连接列表还是纯文本内容 - pt_BR: return a list of links or texts - default: text - options: - - value: link - label: - en_US: Link - zh_Hans: 链接 - pt_BR: Link - - value: text - label: - en_US: Text - zh_Hans: 文本 - pt_BR: Text - form: form - - name: market - type: select - label: - en_US: Market - zh_Hans: 市场 - pt_BR: Market - human_description: - en_US: market takes responsibility for the region - zh_Hans: 市场决定了搜索结果的地区 - pt_BR: market takes responsibility for the region - required: false - form: form - default: US - options: - - value: AR - label: - en_US: Argentina - zh_Hans: 阿根廷 - pt_BR: Argentina - - value: AU - label: - en_US: Australia - zh_Hans: 澳大利亚 - pt_BR: Australia - - value: AT - label: - en_US: Austria - zh_Hans: 奥地利 - pt_BR: Austria - - value: BE - label: - en_US: Belgium - zh_Hans: 比利时 - pt_BR: Belgium - - value: BR - label: - en_US: Brazil - zh_Hans: 巴西 - pt_BR: Brazil - - value: CA - label: - en_US: Canada - zh_Hans: 加拿大 - pt_BR: Canada - - value: CL - label: - en_US: Chile - zh_Hans: 智利 - pt_BR: Chile - - value: CO - label: - en_US: Colombia - zh_Hans: 哥伦比亚 - pt_BR: Colombia - - value: CN - label: - en_US: China - zh_Hans: 中国 - pt_BR: China - - value: CZ - label: - en_US: Czech Republic - zh_Hans: 捷克共和国 - pt_BR: Czech Republic - - value: DK - label: - en_US: Denmark - zh_Hans: 丹麦 - pt_BR: Denmark - - value: FI - label: - en_US: Finland - zh_Hans: 芬兰 - pt_BR: Finland - - value: FR - label: - en_US: France - zh_Hans: 法国 - pt_BR: France - - value: DE - label: - en_US: Germany - zh_Hans: 德国 - pt_BR: Germany - - value: HK - label: - en_US: Hong Kong - zh_Hans: 香港 - pt_BR: Hong Kong - - value: IN - label: - en_US: India - zh_Hans: 印度 - pt_BR: India - - value: ID - label: - en_US: Indonesia - zh_Hans: 印度尼西亚 - pt_BR: Indonesia - - value: IT - label: - en_US: Italy - zh_Hans: 意大利 - pt_BR: Italy - - value: JP - label: - en_US: Japan - zh_Hans: 日本 - pt_BR: Japan - - value: KR - label: - en_US: Korea - zh_Hans: 韩国 - pt_BR: Korea - - value: MY - label: - en_US: Malaysia - zh_Hans: 马来西亚 - pt_BR: Malaysia - - value: MX - label: - en_US: Mexico - zh_Hans: 墨西哥 - pt_BR: Mexico - - value: NL - label: - en_US: Netherlands - zh_Hans: 荷兰 - pt_BR: Netherlands - - value: NZ - label: - en_US: New Zealand - zh_Hans: 新西兰 - pt_BR: New Zealand - - value: 'NO' - label: - en_US: Norway - zh_Hans: 挪威 - pt_BR: Norway - - value: PH - label: - en_US: Philippines - zh_Hans: 菲律宾 - pt_BR: Philippines - - value: PL - label: - en_US: Poland - zh_Hans: 波兰 - pt_BR: Poland - - value: PT - label: - en_US: Portugal - zh_Hans: 葡萄牙 - pt_BR: Portugal - - value: RU - label: - en_US: Russia - zh_Hans: 俄罗斯 - pt_BR: Russia - - value: SA - label: - en_US: Saudi Arabia - zh_Hans: 沙特阿拉伯 - pt_BR: Saudi Arabia - - value: SG - label: - en_US: Singapore - zh_Hans: 新加坡 - pt_BR: Singapore - - value: ZA - label: - en_US: South Africa - zh_Hans: 南非 - pt_BR: South Africa - - value: ES - label: - en_US: Spain - zh_Hans: 西班牙 - pt_BR: Spain - - value: SE - label: - en_US: Sweden - zh_Hans: 瑞典 - pt_BR: Sweden - - value: CH - label: - en_US: Switzerland - zh_Hans: 瑞士 - pt_BR: Switzerland - - value: TW - label: - en_US: Taiwan - zh_Hans: 台湾 - pt_BR: Taiwan - - value: TH - label: - en_US: Thailand - zh_Hans: 泰国 - pt_BR: Thailand - - value: TR - label: - en_US: Turkey - zh_Hans: 土耳其 - pt_BR: Turkey - - value: GB - label: - en_US: United Kingdom - zh_Hans: 英国 - pt_BR: United Kingdom - - value: US - label: - en_US: United States - zh_Hans: 美国 - pt_BR: United States - - name: language - type: select - label: - en_US: Language - zh_Hans: 语言 - pt_BR: Language - human_description: - en_US: language takes responsibility for the language of the search result - zh_Hans: 语言决定了搜索结果的语言 - pt_BR: language takes responsibility for the language of the search result - required: false - default: en - form: form - options: - - value: ar - label: - en_US: Arabic - zh_Hans: 阿拉伯语 - pt_BR: Arabic - - value: bg - label: - en_US: Bulgarian - zh_Hans: 保加利亚语 - pt_BR: Bulgarian - - value: ca - label: - en_US: Catalan - zh_Hans: 加泰罗尼亚语 - pt_BR: Catalan - - value: zh-hans - label: - en_US: Chinese (Simplified) - zh_Hans: 中文(简体) - pt_BR: Chinese (Simplified) - - value: zh-hant - label: - en_US: Chinese (Traditional) - zh_Hans: 中文(繁体) - pt_BR: Chinese (Traditional) - - value: cs - label: - en_US: Czech - zh_Hans: 捷克语 - pt_BR: Czech - - value: da - label: - en_US: Danish - zh_Hans: 丹麦语 - pt_BR: Danish - - value: nl - label: - en_US: Dutch - zh_Hans: 荷兰语 - pt_BR: Dutch - - value: en - label: - en_US: English - zh_Hans: 英语 - pt_BR: English - - value: et - label: - en_US: Estonian - zh_Hans: 爱沙尼亚语 - pt_BR: Estonian - - value: fi - label: - en_US: Finnish - zh_Hans: 芬兰语 - pt_BR: Finnish - - value: fr - label: - en_US: French - zh_Hans: 法语 - pt_BR: French - - value: de - label: - en_US: German - zh_Hans: 德语 - pt_BR: German - - value: el - label: - en_US: Greek - zh_Hans: 希腊语 - pt_BR: Greek - - value: he - label: - en_US: Hebrew - zh_Hans: 希伯来语 - pt_BR: Hebrew - - value: hi - label: - en_US: Hindi - zh_Hans: 印地语 - pt_BR: Hindi - - value: hu - label: - en_US: Hungarian - zh_Hans: 匈牙利语 - pt_BR: Hungarian - - value: id - label: - en_US: Indonesian - zh_Hans: 印尼语 - pt_BR: Indonesian - - value: it - label: - en_US: Italian - zh_Hans: 意大利语 - pt_BR: Italian - - value: jp - label: - en_US: Japanese - zh_Hans: 日语 - pt_BR: Japanese - - value: kn - label: - en_US: Kannada - zh_Hans: 卡纳达语 - pt_BR: Kannada - - value: ko - label: - en_US: Korean - zh_Hans: 韩语 - pt_BR: Korean - - value: lv - label: - en_US: Latvian - zh_Hans: 拉脱维亚语 - pt_BR: Latvian - - value: lt - label: - en_US: Lithuanian - zh_Hans: 立陶宛语 - pt_BR: Lithuanian - - value: ms - label: - en_US: Malay - zh_Hans: 马来语 - pt_BR: Malay - - value: ml - label: - en_US: Malayalam - zh_Hans: 马拉雅拉姆语 - pt_BR: Malayalam - - value: mr - label: - en_US: Marathi - zh_Hans: 马拉地语 - pt_BR: Marathi - - value: nb - label: - en_US: Norwegian - zh_Hans: 挪威语 - pt_BR: Norwegian - - value: pl - label: - en_US: Polish - zh_Hans: 波兰语 - pt_BR: Polish - - value: pt-br - label: - en_US: Portuguese (Brazil) - zh_Hans: 葡萄牙语(巴西) - pt_BR: Portuguese (Brazil) - - value: pt-pt - label: - en_US: Portuguese (Portugal) - zh_Hans: 葡萄牙语(葡萄牙) - pt_BR: Portuguese (Portugal) - - value: pa - label: - en_US: Punjabi - zh_Hans: 旁遮普语 - pt_BR: Punjabi - - value: ro - label: - en_US: Romanian - zh_Hans: 罗马尼亚语 - pt_BR: Romanian - - value: ru - label: - en_US: Russian - zh_Hans: 俄语 - pt_BR: Russian - - value: sr - label: - en_US: Serbian - zh_Hans: 塞尔维亚语 - pt_BR: Serbian - - value: sk - label: - en_US: Slovak - zh_Hans: 斯洛伐克语 - pt_BR: Slovak - - value: sl - label: - en_US: Slovenian - zh_Hans: 斯洛文尼亚语 - pt_BR: Slovenian - - value: es - label: - en_US: Spanish - zh_Hans: 西班牙语 - pt_BR: Spanish - - value: sv - label: - en_US: Swedish - zh_Hans: 瑞典语 - pt_BR: Swedish - - value: ta - label: - en_US: Tamil - zh_Hans: 泰米尔语 - pt_BR: Tamil - - value: te - label: - en_US: Telugu - zh_Hans: 泰卢固语 - pt_BR: Telugu - - value: th - label: - en_US: Thai - zh_Hans: 泰语 - pt_BR: Thai - - value: tr - label: - en_US: Turkish - zh_Hans: 土耳其语 - pt_BR: Turkish - - value: uk - label: - en_US: Ukrainian - zh_Hans: 乌克兰语 - pt_BR: Ukrainian - - value: vi - label: - en_US: Vietnamese - zh_Hans: 越南语 - pt_BR: Vietnamese diff --git a/api/core/tools/provider/builtin/brave/_assets/icon.svg b/api/core/tools/provider/builtin/brave/_assets/icon.svg deleted file mode 100644 index d059f7c5161e98..00000000000000 --- a/api/core/tools/provider/builtin/brave/_assets/icon.svg +++ /dev/null @@ -1 +0,0 @@ - \ No newline at end of file diff --git a/api/core/tools/provider/builtin/brave/brave.py b/api/core/tools/provider/builtin/brave/brave.py deleted file mode 100644 index c24ee67334083b..00000000000000 --- a/api/core/tools/provider/builtin/brave/brave.py +++ /dev/null @@ -1,22 +0,0 @@ -from typing import Any - -from core.tools.errors import ToolProviderCredentialValidationError -from core.tools.provider.builtin.brave.tools.brave_search import BraveSearchTool -from core.tools.provider.builtin_tool_provider import BuiltinToolProviderController - - -class BraveProvider(BuiltinToolProviderController): - def _validate_credentials(self, credentials: dict[str, Any]) -> None: - try: - BraveSearchTool().fork_tool_runtime( - runtime={ - "credentials": credentials, - } - ).invoke( - user_id="", - tool_parameters={ - "query": "Sachin Tendulkar", - }, - ) - except Exception as e: - raise ToolProviderCredentialValidationError(str(e)) diff --git a/api/core/tools/provider/builtin/brave/brave.yaml b/api/core/tools/provider/builtin/brave/brave.yaml deleted file mode 100644 index 2b0dcc0188caf8..00000000000000 --- a/api/core/tools/provider/builtin/brave/brave.yaml +++ /dev/null @@ -1,39 +0,0 @@ -identity: - author: Yash Parmar - name: brave - label: - en_US: Brave - zh_Hans: Brave - pt_BR: Brave - description: - en_US: Brave - zh_Hans: Brave - pt_BR: Brave - icon: icon.svg - tags: - - search -credentials_for_provider: - brave_search_api_key: - type: secret-input - required: true - label: - en_US: Brave Search API key - zh_Hans: Brave Search API key - pt_BR: Brave Search API key - placeholder: - en_US: Please input your Brave Search API key - zh_Hans: 请输入你的 Brave Search API key - pt_BR: Please input your Brave Search API key - help: - en_US: Get your Brave Search API key from Brave - zh_Hans: 从 Brave 获取您的 Brave Search API key - pt_BR: Get your Brave Search API key from Brave - url: https://brave.com/search/api/ - base_url: - type: text-input - required: false - label: - en_US: Brave server's Base URL - zh_Hans: Brave服务器的API URL - placeholder: - en_US: https://api.search.brave.com/res/v1/web/search diff --git a/api/core/tools/provider/builtin/brave/tools/brave_search.py b/api/core/tools/provider/builtin/brave/tools/brave_search.py deleted file mode 100644 index c34362ae52ecac..00000000000000 --- a/api/core/tools/provider/builtin/brave/tools/brave_search.py +++ /dev/null @@ -1,138 +0,0 @@ -import json -from typing import Any, Optional - -import requests -from pydantic import BaseModel, Field - -from core.tools.entities.tool_entities import ToolInvokeMessage -from core.tools.tool.builtin_tool import BuiltinTool - -BRAVE_BASE_URL = "https://api.search.brave.com/res/v1/web/search" - - -class BraveSearchWrapper(BaseModel): - """Wrapper around the Brave search engine.""" - - api_key: str - """The API key to use for the Brave search engine.""" - search_kwargs: dict = Field(default_factory=dict) - """Additional keyword arguments to pass to the search request.""" - base_url: str = BRAVE_BASE_URL - """The base URL for the Brave search engine.""" - ensure_ascii: bool = True - """Ensure the JSON output is ASCII encoded.""" - - def run(self, query: str) -> str: - """Query the Brave search engine and return the results as a JSON string. - - Args: - query: The query to search for. - - Returns: The results as a JSON string. - - """ - web_search_results = self._search_request(query=query) - final_results = [ - { - "title": item.get("title"), - "link": item.get("url"), - "snippet": item.get("description"), - } - for item in web_search_results - ] - return json.dumps(final_results, ensure_ascii=self.ensure_ascii) - - def _search_request(self, query: str) -> list[dict]: - headers = { - "X-Subscription-Token": self.api_key, - "Accept": "application/json", - } - req = requests.PreparedRequest() - params = {**self.search_kwargs, **{"q": query}} - req.prepare_url(self.base_url, params) - if req.url is None: - raise ValueError("prepared url is None, this should not happen") - - response = requests.get(req.url, headers=headers) - if not response.ok: - raise Exception(f"HTTP error {response.status_code}") - - return response.json().get("web", {}).get("results", []) - - -class BraveSearch(BaseModel): - """Tool that queries the BraveSearch.""" - - name: str = "brave_search" - description: str = ( - "a search engine. " - "useful for when you need to answer questions about current events." - " input should be a search query." - ) - search_wrapper: BraveSearchWrapper - - @classmethod - def from_api_key( - cls, api_key: str, base_url: str, search_kwargs: Optional[dict] = None, ensure_ascii: bool = True, **kwargs: Any - ) -> "BraveSearch": - """Create a tool from an api key. - - Args: - api_key: The api key to use. - search_kwargs: Any additional kwargs to pass to the search wrapper. - **kwargs: Any additional kwargs to pass to the tool. - - Returns: - A tool. - """ - wrapper = BraveSearchWrapper( - api_key=api_key, base_url=base_url, search_kwargs=search_kwargs or {}, ensure_ascii=ensure_ascii - ) - return cls(search_wrapper=wrapper, **kwargs) - - def _run( - self, - query: str, - ) -> str: - """Use the tool.""" - return self.search_wrapper.run(query) - - -class BraveSearchTool(BuiltinTool): - """ - Tool for performing a search using Brave search engine. - """ - - def _invoke(self, user_id: str, tool_parameters: dict[str, Any]) -> ToolInvokeMessage | list[ToolInvokeMessage]: - """ - Invoke the Brave search tool. - - Args: - user_id (str): The ID of the user invoking the tool. - tool_parameters (dict[str, Any]): The parameters for the tool invocation. - - Returns: - ToolInvokeMessage | list[ToolInvokeMessage]: The result of the tool invocation. - """ - query = tool_parameters.get("query", "") - count = tool_parameters.get("count", 3) - api_key = self.runtime.credentials["brave_search_api_key"] - base_url = self.runtime.credentials.get("base_url", BRAVE_BASE_URL) - ensure_ascii = tool_parameters.get("ensure_ascii", True) - - if len(base_url) == 0: - base_url = BRAVE_BASE_URL - - if not query: - return self.create_text_message("Please input query") - - tool = BraveSearch.from_api_key( - api_key=api_key, base_url=base_url, search_kwargs={"count": count}, ensure_ascii=ensure_ascii - ) - - results = tool._run(query) - - if not results: - return self.create_text_message(f"No results found for '{query}' in Tavily") - else: - return self.create_text_message(text=results) diff --git a/api/core/tools/provider/builtin/brave/tools/brave_search.yaml b/api/core/tools/provider/builtin/brave/tools/brave_search.yaml deleted file mode 100644 index 5222a375f84cee..00000000000000 --- a/api/core/tools/provider/builtin/brave/tools/brave_search.yaml +++ /dev/null @@ -1,53 +0,0 @@ -identity: - name: brave_search - author: Yash Parmar - label: - en_US: BraveSearch - zh_Hans: BraveSearch - pt_BR: BraveSearch -description: - human: - en_US: BraveSearch is a privacy-focused search engine that leverages its own index to deliver unbiased, independent, and fast search results. It's designed to respect user privacy by not tracking searches or personal information, making it a secure choice for those concerned about online privacy. - zh_Hans: BraveSearch 是一个注重隐私的搜索引擎,它利用自己的索引来提供公正、独立和快速的搜索结果。它旨在通过不跟踪搜索或个人信息来尊重用户隐私,为那些关注在线隐私的用户提供了一个安全的选择。 - pt_BR: BraveSearch é um mecanismo de busca focado na privacidade que utiliza seu próprio índice para entregar resultados de busca imparciais, independentes e rápidos. Ele é projetado para respeitar a privacidade do usuário, não rastreando buscas ou informações pessoais, tornando-se uma escolha segura para aqueles preocupados com a privacidade online. - llm: BraveSearch is a privacy-centric search engine utilizing its unique index to offer unbiased, independent, and swift search results. It aims to protect user privacy by avoiding the tracking of search activities or personal data, presenting a secure option for users mindful of their online privacy. -parameters: - - name: query - type: string - required: true - label: - en_US: Query string - zh_Hans: 查询语句 - pt_BR: Query string - human_description: - en_US: The text input used for initiating searches on the web, focusing on delivering relevant and accurate results without compromising user privacy. - zh_Hans: 用于在网上启动搜索的文本输入,专注于提供相关且准确的结果,同时不妨碍用户隐私。 - pt_BR: A entrada de texto usada para iniciar pesquisas na web, focada em entregar resultados relevantes e precisos sem comprometer a privacidade do usuário. - llm_description: Keywords or phrases entered to perform searches, aimed at providing relevant and precise results while ensuring the privacy of the user is maintained. - form: llm - - name: count - type: number - required: false - default: 3 - label: - en_US: Result count - zh_Hans: 结果数量 - pt_BR: Contagem de resultados - human_description: - en_US: The number of search results to return, allowing users to control the breadth of their search output. - zh_Hans: 要返回的搜索结果数量,允许用户控制他们搜索输出的广度。 - pt_BR: O número de resultados de pesquisa a serem retornados, permitindo que os usuários controlem a amplitude de sua saída de pesquisa. - llm_description: Specifies the amount of search results to be displayed, offering users the ability to adjust the scope of their search findings. - form: llm - - name: ensure_ascii - type: boolean - default: true - label: - en_US: Ensure ASCII - zh_Hans: 确保 ASCII - pt_BR: Ensure ASCII - human_description: - en_US: Ensure the JSON output is ASCII encoded - zh_Hans: 确保输出的 JSON 是 ASCII 编码 - pt_BR: Ensure the JSON output is ASCII encoded - form: form diff --git a/api/core/tools/provider/builtin/chart/_assets/icon.png b/api/core/tools/provider/builtin/chart/_assets/icon.png deleted file mode 100644 index 878e56a0512c31735cc94480cef9b8fa5dfcc7fd..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1363 zcmV-Z1+4msP)@~0drDELIAGL9O(c600d`2O+f$vv5yPZ%b}MY?$!^0(ngoeR2q9r8kU$C`L_-DtfaD*9CP;uHKKjH23JN5$ zO@wH&siIUi)k2y$o9xEPdgkUHCytc)wtdgs8}0i@QMBXn%<6sS+!@~)?;!#J0I*+C zG$-9||Ip68MmGxRXie)it!i!3`Vj30lo~1>4b1ym5?c5a+MP{ zE+hQKUK))%WlB{&I^(BTW)yh0pLXB!!n1Fb!iGKEuKAGJZ{Je`(>f_^a3UNDA)+ z$CYCQE!I$tE=K{)v zarWn1)+G28KI2qC*)aH@4GF%5&o~n>YZ!}4@G*SGiGX>-m{)?Y;WL#BSTu}TCHNdZ zQ>g%3VU(5Nd-zPEUf+CYOXN3_2xGF%kIgopPv0ivTljzezWP|^l~jP#kyum-J|Fp0 z#05wX19=i8_GcSW0acB`WhD3*J|iZe>Jwm|1Yg5v!~`69Wx4V(I88!-u;*>~jMyEq zm6hbCt2h#Q+h@MpesnWO@@|$5#46zGwa1?%sCpRS;C&CD5vzddqa1uB4u=t4`sZ$` z@LSCq9eHgz^Yz<1nlAl4B3o3wzc`{d z9PjQ4x0=P%%FUL&d%{)kFAg3}bPNPI1_B%d0gizH$3TE%AiyyY;1~#S3S3V5l#RJ_iM#q0RGKU-i3ats7G z1_B%d0gizH$3TE%AiyyY;1~#SjM!Afx!-n37d%jQ{=&`^`#xJ#_9Z>=04HD41KB`; zV^aQfv6`zP87M$ujHpOaXi@;6{{?FhA2~o_W26bEhIEWfKJNLBi}z zGBeo~^%O=!X927>bY^%%d% None: - try: - LinearChartTool().fork_tool_runtime( - runtime={ - "credentials": credentials, - } - ).invoke( - user_id="", - tool_parameters={ - "data": "1,3,5,7,9,2,4,6,8,10", - }, - ) - except Exception as e: - raise ToolProviderCredentialValidationError(str(e)) diff --git a/api/core/tools/provider/builtin/chart/chart.yaml b/api/core/tools/provider/builtin/chart/chart.yaml deleted file mode 100644 index ad0d9a6cd688cf..00000000000000 --- a/api/core/tools/provider/builtin/chart/chart.yaml +++ /dev/null @@ -1,17 +0,0 @@ -identity: - author: Dify - name: chart - label: - en_US: ChartGenerator - zh_Hans: 图表生成 - pt_BR: Gerador de gráficos - description: - en_US: Chart Generator is a tool for generating statistical charts like bar chart, line chart, pie chart, etc. - zh_Hans: 图表生成是一个用于生成可视化图表的工具,你可以通过它来生成柱状图、折线图、饼图等各类图表 - pt_BR: O Gerador de gráficos é uma ferramenta para gerar gráficos estatísticos como gráfico de barras, gráfico de linhas, gráfico de pizza, etc. - icon: icon.png - tags: - - design - - productivity - - utilities -credentials_for_provider: diff --git a/api/core/tools/provider/builtin/chart/tools/bar.py b/api/core/tools/provider/builtin/chart/tools/bar.py deleted file mode 100644 index 3a47c0cfc0d47f..00000000000000 --- a/api/core/tools/provider/builtin/chart/tools/bar.py +++ /dev/null @@ -1,48 +0,0 @@ -import io -from typing import Any, Union - -import matplotlib.pyplot as plt - -from core.tools.entities.tool_entities import ToolInvokeMessage -from core.tools.tool.builtin_tool import BuiltinTool - - -class BarChartTool(BuiltinTool): - def _invoke( - self, user_id: str, tool_parameters: dict[str, Any] - ) -> Union[ToolInvokeMessage, list[ToolInvokeMessage]]: - data = tool_parameters.get("data", "") - if not data: - return self.create_text_message("Please input data") - data = data.split(";") - - # if all data is int, convert to int - if all(i.isdigit() for i in data): - data = [int(i) for i in data] - else: - data = [float(i) for i in data] - - axis = tool_parameters.get("x_axis") or None - if axis: - axis = axis.split(";") - if len(axis) != len(data): - axis = None - - flg, ax = plt.subplots(figsize=(10, 8)) - - if axis: - axis = [label[:10] + "..." if len(label) > 10 else label for label in axis] - ax.set_xticklabels(axis, rotation=45, ha="right") - ax.bar(axis, data) - else: - ax.bar(range(len(data)), data) - - buf = io.BytesIO() - flg.savefig(buf, format="png") - buf.seek(0) - plt.close(flg) - - return [ - self.create_text_message("the bar chart is saved as an image."), - self.create_blob_message(blob=buf.read(), meta={"mime_type": "image/png"}), - ] diff --git a/api/core/tools/provider/builtin/chart/tools/bar.yaml b/api/core/tools/provider/builtin/chart/tools/bar.yaml deleted file mode 100644 index ee7405f6810efa..00000000000000 --- a/api/core/tools/provider/builtin/chart/tools/bar.yaml +++ /dev/null @@ -1,41 +0,0 @@ -identity: - name: bar_chart - author: Dify - label: - en_US: Bar Chart - zh_Hans: 柱状图 - pt_BR: Gráfico de barras - icon: icon.svg -description: - human: - en_US: Bar chart - zh_Hans: 柱状图 - pt_BR: Gráfico de barras - llm: generate a bar chart with input data -parameters: - - name: data - type: string - required: true - label: - en_US: data - zh_Hans: 数据 - pt_BR: dados - human_description: - en_US: data for generating chart, each number should be separated by ";" - zh_Hans: 用于生成柱状图的数据,每个数字之间用 ";" 分隔 - pt_BR: dados para gerar gráfico de barras, cada número deve ser separado por ";" - llm_description: data for generating bar chart, data should be a string contains a list of numbers like "1;2;3;4;5" - form: llm - - name: x_axis - type: string - required: false - label: - en_US: X Axis - zh_Hans: x 轴 - pt_BR: Eixo X - human_description: - en_US: X axis for chart, each text should be separated by ";" - zh_Hans: 柱状图的 x 轴,每个文本之间用 ";" 分隔 - pt_BR: Eixo X para gráfico de barras, cada texto deve ser separado por ";" - llm_description: x axis for bar chart, x axis should be a string contains a list of texts like "a;b;c;1;2" in order to match the data - form: llm diff --git a/api/core/tools/provider/builtin/chart/tools/line.py b/api/core/tools/provider/builtin/chart/tools/line.py deleted file mode 100644 index 39e8caac7ef609..00000000000000 --- a/api/core/tools/provider/builtin/chart/tools/line.py +++ /dev/null @@ -1,50 +0,0 @@ -import io -from typing import Any, Union - -import matplotlib.pyplot as plt - -from core.tools.entities.tool_entities import ToolInvokeMessage -from core.tools.tool.builtin_tool import BuiltinTool - - -class LinearChartTool(BuiltinTool): - def _invoke( - self, - user_id: str, - tool_parameters: dict[str, Any], - ) -> Union[ToolInvokeMessage, list[ToolInvokeMessage]]: - data = tool_parameters.get("data", "") - if not data: - return self.create_text_message("Please input data") - data = data.split(";") - - axis = tool_parameters.get("x_axis") or None - if axis: - axis = axis.split(";") - if len(axis) != len(data): - axis = None - - # if all data is int, convert to int - if all(i.isdigit() for i in data): - data = [int(i) for i in data] - else: - data = [float(i) for i in data] - - flg, ax = plt.subplots(figsize=(10, 8)) - - if axis: - axis = [label[:10] + "..." if len(label) > 10 else label for label in axis] - ax.set_xticklabels(axis, rotation=45, ha="right") - ax.plot(axis, data) - else: - ax.plot(data) - - buf = io.BytesIO() - flg.savefig(buf, format="png") - buf.seek(0) - plt.close(flg) - - return [ - self.create_text_message("the linear chart is saved as an image."), - self.create_blob_message(blob=buf.read(), meta={"mime_type": "image/png"}), - ] diff --git a/api/core/tools/provider/builtin/chart/tools/line.yaml b/api/core/tools/provider/builtin/chart/tools/line.yaml deleted file mode 100644 index 35ebe3b68bddb3..00000000000000 --- a/api/core/tools/provider/builtin/chart/tools/line.yaml +++ /dev/null @@ -1,41 +0,0 @@ -identity: - name: line_chart - author: Dify - label: - en_US: Linear Chart - zh_Hans: 线性图表 - pt_BR: Gráfico linear - icon: icon.svg -description: - human: - en_US: linear chart - zh_Hans: 线性图表 - pt_BR: Gráfico linear - llm: generate a linear chart with input data -parameters: - - name: data - type: string - required: true - label: - en_US: data - zh_Hans: 数据 - pt_BR: dados - human_description: - en_US: data for generating chart, each number should be separated by ";" - zh_Hans: 用于生成线性图表的数据,每个数字之间用 ";" 分隔 - pt_BR: dados para gerar gráfico linear, cada número deve ser separado por ";" - llm_description: data for generating linear chart, data should be a string contains a list of numbers like "1;2;3;4;5" - form: llm - - name: x_axis - type: string - required: false - label: - en_US: X Axis - zh_Hans: x 轴 - pt_BR: Eixo X - human_description: - en_US: X axis for chart, each text should be separated by ";" - zh_Hans: 线性图表的 x 轴,每个文本之间用 ";" 分隔 - pt_BR: Eixo X para gráfico linear, cada texto deve ser separado por ";" - llm_description: x axis for linear chart, x axis should be a string contains a list of texts like "a;b;c;1;2" in order to match the data - form: llm diff --git a/api/core/tools/provider/builtin/chart/tools/pie.py b/api/core/tools/provider/builtin/chart/tools/pie.py deleted file mode 100644 index 2c3b8a733eac9a..00000000000000 --- a/api/core/tools/provider/builtin/chart/tools/pie.py +++ /dev/null @@ -1,48 +0,0 @@ -import io -from typing import Any, Union - -import matplotlib.pyplot as plt - -from core.tools.entities.tool_entities import ToolInvokeMessage -from core.tools.tool.builtin_tool import BuiltinTool - - -class PieChartTool(BuiltinTool): - def _invoke( - self, - user_id: str, - tool_parameters: dict[str, Any], - ) -> Union[ToolInvokeMessage, list[ToolInvokeMessage]]: - data = tool_parameters.get("data", "") - if not data: - return self.create_text_message("Please input data") - data = data.split(";") - categories = tool_parameters.get("categories") or None - - # if all data is int, convert to int - if all(i.isdigit() for i in data): - data = [int(i) for i in data] - else: - data = [float(i) for i in data] - - flg, ax = plt.subplots() - - if categories: - categories = categories.split(";") - if len(categories) != len(data): - categories = None - - if categories: - ax.pie(data, labels=categories) - else: - ax.pie(data) - - buf = io.BytesIO() - flg.savefig(buf, format="png") - buf.seek(0) - plt.close(flg) - - return [ - self.create_text_message("the pie chart is saved as an image."), - self.create_blob_message(blob=buf.read(), meta={"mime_type": "image/png"}), - ] diff --git a/api/core/tools/provider/builtin/chart/tools/pie.yaml b/api/core/tools/provider/builtin/chart/tools/pie.yaml deleted file mode 100644 index 541715cb7d86dd..00000000000000 --- a/api/core/tools/provider/builtin/chart/tools/pie.yaml +++ /dev/null @@ -1,41 +0,0 @@ -identity: - name: pie_chart - author: Dify - label: - en_US: Pie Chart - zh_Hans: 饼图 - pt_BR: Gráfico de pizza - icon: icon.svg -description: - human: - en_US: Pie chart - zh_Hans: 饼图 - pt_BR: Gráfico de pizza - llm: generate a pie chart with input data -parameters: - - name: data - type: string - required: true - label: - en_US: data - zh_Hans: 数据 - pt_BR: dados - human_description: - en_US: data for generating chart, each number should be separated by ";" - zh_Hans: 用于生成饼图的数据,每个数字之间用 ";" 分隔 - pt_BR: dados para gerar gráfico de pizza, cada número deve ser separado por ";" - llm_description: data for generating pie chart, data should be a string contains a list of numbers like "1;2;3;4;5" - form: llm - - name: categories - type: string - required: true - label: - en_US: Categories - zh_Hans: 分类 - pt_BR: Categorias - human_description: - en_US: Categories for chart, each category should be separated by ";" - zh_Hans: 饼图的分类,每个分类之间用 ";" 分隔 - pt_BR: Categorias para gráfico de pizza, cada categoria deve ser separada por ";" - llm_description: categories for pie chart, categories should be a string contains a list of texts like "a;b;c;1;2" in order to match the data, each category should be split by ";" - form: llm diff --git a/api/core/tools/provider/builtin/cogview/__init__.py b/api/core/tools/provider/builtin/cogview/__init__.py deleted file mode 100644 index e69de29bb2d1d6..00000000000000 diff --git a/api/core/tools/provider/builtin/cogview/_assets/icon.png b/api/core/tools/provider/builtin/cogview/_assets/icon.png deleted file mode 100644 index f0c1c24a02fc838655e47d58dd00a25a41620e78..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 22062 zcmeEuV~}NCvt}7xwr$(C?XIdWoU(1(?6T2i+qP}nHmBe3e)ry)8!E1657o zp8oB`8EZvXNf&4uo|62zBD`NwJ|4R=9qz;VpU-BrKV=}Zf6Nf~xi|hc&~}nqjzB;# ze@6g=0t}R%iTQUpFy^Y7PMUJE+yGl^dIKX{Lt}b3YrB7}fOy@w|BBYeP6mW-)>bx- z+-`it|3Yy8mH$yQ5EK3j;$+E3tSP5RC}QhiOvpyhO3z5l|AUZ_kk`S;gj-ou{NLt( zdwj%ZPEK~*3=FQWuJo=f^tKMB3`|^HTnvoN49v`Qe-U(!?lw*aZge(|B>&;$KmCXr zI|3Zc?VQYQZ3zGIYhY;W?8HY*{7<0&I{xE2oy<-CJCcp#zlZgAfDHe9!@xw($namj z|C;jtqvcj`FgN}i`5*uMOuYX>{vW#kj`JUQMKfC`+rKP0m;{-NuC zqwha*^Dph+7-toG9`}$QrBrlj)5GfZ*5sVT96*vM&%bi8eK}gb)+p{6~|qHp@d>8LRFk*X$)c2QF}yCRW0=C z^f$AN=qU{a@;svpuL}FuoWKPIq4H5b2}6TQkFQLF0O+&anx>~38#&{~jNJ>5Kgn{1 zWSgTz^iahD8I#MuYRh6s{+(E`sK9Z;HME9Uz+ak0Dlk!4`t+LWvl2TYOEi#c7?l!8 zn6glcCWefg)@|y48OgN;`=BZ!x@~M5px-AQPstRsGVyOep}na)NUdS+xaavS{7V5m zk)Sm>yn#@xyfPT$5arX`FxSx0kvYU%jp>8AWojFlbxFkpuT;vycf&0%ylsod585h> z)!dXt7{^DBtC1*`*uby6^Kyuvsunx%ze*p{YQdvq?4;UQ(SlP6?(Cggrf_R=^kwf6 zU1v2U9(cv{5s3wTF#~Ukax7$q7^>}qo76r75Y9D_Ex9(yeR_^ezb4&}E1p!$E736T zO$8oI@-}8J)ay3P+2t|A*8|-{^0hQmHbYe|c~+n73a}FQ`x<8Hp0rKa0}HM zUKlFCStuWlpXcp7-|_PGx-{ll!;d+2MUJ$uOs_w#d<`I+pdxytg8!kmY+a0EJs7Ct z;?{;XUcR;n#$qEw$Ihc_m;xV(RIlYYvM}GVl9dZ?VJ+sB7JJ$J3M`VER zO4b2nONFe}G$+a85)$=Xx;J>HkKj)~_g}|DMz)XF%Py$Azp%gC#%XD20M`vs49Mr+ zwz)0**in?Ki`L%khtthpzaOFryr-~CH}@NpXY5#M0rOM!j!&o<^O~r77_Mty2crK> z@ndhChWa5(msVl43}z+K>7_aqhakZb0QDMW5uXE|@}f=@7P^dA5JA>`ODRjFu4zn_ zIGI>{A`+wGc3AKBrzbIn8oh+Q7w`Ztrg^G(vL9zMlYQU3Ul<leeJx`N7u137+&t0ZDr??!eL%U5mg<_{CZa8USbGuKty+2Y-ugZgV7&evMtBUp{dq6g@_(~VZSw5*rImtgi2?4`s3`MGd9M$A{$FiR1{5> zRLk$D>ahnkT2Yd5NU;4gbd+OYhtpW)m6!vsoRI`n?{J_fs0g)8o5RmE)Cwg&`HB1B1EQYs`m|8hdG~^X1^X8Z$ zR~7g%Xc5F5RCNwHVdENnkZJdiKAiXU6Xd<%qU=lKOyiyey3qNHGd5JvLhSnTUlNzD#LL#O7lIOcVCZ#t(-hp ze{qtx(_|&Df{c=t0S%2ZAc{K7Gl zS|FbQM>QhkK~XaGmj;^JiA9zj73ATLKXX?BpAXZEM*U<(F%*38hnQ0>jfiyb+2#z&7= zjujd_re*B?a?&2nt8`xlMpUlK#`P;;_*9^UIo&;3Oo&#(bDjJZtg&GA=-p%=U{sF~ z$|#EB?Fj<71zcaie0v`*{W14_Oird}mM6trAEUvIS8M+cU7p~ntvVLkDXgxHkYywi z^jfXqHRufhFYM+&xd{&%*mX4L;Ow14Y6IddC!y~#k_B@i6Pd_$Ao+5e#&((w12kbs zrEyON@GP}!f}eJ*e+_)*FUZS-j@{6$Pe+vWp)^2aevNOVNiN>kKa$)1zEb z1xmy1zz|}7S5c9J@?khBS<<>Y(&*VO4a-O;_k|0kr{;&HA^}lfRPrE5d8xtPeg4_? z8C5Kok(-~IUd2;EQ%N${ebPzckxXoNM)G&8D&P3u%j#j{)Gva`nYPjabZYk(BKHx1 zLrh1&mCq=%g_pVor^ykGD{smnUczBrv6~wUA^=u*d7t>ldf=>gAEK#uXZpI+nt}vx_uPN~sbSf{C|Q_Ru3k7Aw$?DHX`?M& zb)n?b>JcNAGon6ee)>LM!+rEk@pht$4ULEPvt`Lc(#)MYC z@ZQMYvV5w90Z(nlU;aW_!)xK2w2=WV=(KaVp;nC=;anHI?j&j0>CB6;i80*Np+Ap3 zXr}heSi|?~HhE>cZJW!V`?g@FfZYO)kKVmzQo7VZ!L0d8POo@Bkuge)leEjUXrud{ zyo9!0?%*8f`+ds}|GLFb?K~C)mabsveuE2`P zM4H~%HJaEiw^UcDIW!ZBb1ov@{mtq{7yvI-z;&?pt3Cqn?=te=3 zU3`~G#bQsrz4^Vj7DeZI*UNjyPJ*2SEpCo(v^&k}c52LyYObG|%vc7qP@ZZ&#_Q0;o5X0wJKK>GJFLHVHdh~m_`!3P)Lo~?6JCdJj%%cTv()2 zrW>LkDUCLK6Z+nN)0IzMnQaA*+3;6bPj|%(KgVo-Fj67XfYbMtJsKbhrh>wQm?L%u z1OM<;+{DSPI_@DutpF85+%CXFaApf%uZo@YVf}1c6_s{$NNM05Q`Pi9b%NYT#S#8CrYOl01q6e8sFI@sphZT4+G6M#IV+$Tua8vT13s!H*RXs2nMtDHOIPz@5(5+XRu@JLq}tgHDZF7?;vjU8-eLOrfHZobUhf2pr z7{-+=e4=43X!ckGS!FH%z4!<1=;XmQm9ffDb81NQMr&=aQ=^&pZAkB3+LN zv=?~g9Mrw9Qeu`K)ItV&5sK=p0slqZHx3ochxGhMFef%IDk%w7K<3J%}^2u+_?cSZKomOrTiam<`}4ohP24rbUdj7(ocmJJ88G>L-BDU>-2TBJKL1e;k)B*r|+T z7N(M5iF1m~F3d(F(5+ogP;Q)NYFvOTyA(#-Q!sz}a3|YlR+PS#Y*~>BRvHkOw+Kl! zcAD*95LkCm8T(2Zgir9!SFTsSZ0A(K*{zVxhb>8-11^43a@*$wo=|&jy@-~AP zf`mH~O3mH&JqsW#o5^Uhxg>_ayYvxNK$Lh`-EAeq2&LEO*T+tE#XhvIzZb?;7I|xn zR^+Y?;?Y-E^0)eEVfBA8-x+#L%;3>SO+ZnKHt}(HRDzMU(ot}5_Ax;Zq0#eF4C>Tx zPh$QI5J7gll)n5pUynfKE`;Y5CfFVzn?HiBFS`9xbS@^lK^cN*&sE{inQbh-Tf%I* z*B==MC%@0Z#C~~yAhI)3@j2Rz&%OVR0JV@(UFV>!ZObS+R732us_rdf3ElBvNn=5m zoTQgkL_Ki|GQ46aS(TWXc77+K$4-GG=A)uwGl zeWfE!tRtO{UEyV9H?O~{h3m|J9Y?V+&1IPDyB=*tU0bODRA6Y^YUajK6HRk9@ z-8e>ymD-3E(<_wFxXvtt+;<@#)`BWa!5w+6C1?)8mTkv$%+hK=(&ebUG=p-SX6h}8 zjs3dKf?k_?+oc>&G=~7F(AF7TidL$m5c>TTs?87CaZdcwVf6M$k;I#(ItEEhbA0i* zc0}y405iybqXrqrY8&x6YkS;NR^`2ML+SOUmoe@t^PJ3MJeFXoG?w2D84gSTd3;Vd$_v~ZyalBjpdlWFcpNR9%{q4r{_4oIMKhUDUU2mGKPHSqT z{q-?*Rb@%V`&Gs3^C1>(Q;rSh8mF!VHq9xU^xI5y|1nzV4lplupC>B}IX=0p_YJ<( zZ~pu7U1u?4Jq79jnhdMx$mJLa&>kiTb_cFiMbX#-a(uYAhoKWx+=$|EEps)zQvbt2 zbrA|`U$}u zEhW?8fy)nxi`Ccr%|9tlNV`)^p6>(KRdylW8}7hM5BEg+OHT4ER#82sJ)8Eubnt|7 z&sZQh^I|q|drHCgnMmWai)5sVD}FGHNQQR3<0yV+*^o_a$1qCXU`JXUm={ODNPLA6 z+%KYjKEGnMgN(Dw$v=T~IVUrC5s4h8;E(tZ;I_-L+GdtTz>u+TkdmJ2`{%k%+n?9i zM!g@LWl6#9eY|Q{4?I!0o65|et~m+X4BSn<9r(tgxF@X;ya%YnLx5eVJ1Cm~#g9Jk z0FI~gWuZNdjmnr2X4y|=C~#jgQ^tZGH}+5_ScPMlRsGtjI$=CaFk7M0DTl0jIvJ|= zHieFGKAV2MY?<2uWpYM2>tZaW#gK|j+M_?#;S$0b$^==i6aMXjWr4)Lo(zfR&)a_M zfM>$V5qpRw)q6AXsSHCw0_JroMw(#C&t$` zQ;-NjjK4aYdmdGrdXKeR%fRVA*0;G~FV}ppJq`1xtAeBN4?7D>Wua~95Ig4AXmLD9 zu{Z0>v^mL!LXm>L8f*B1CtSL6r9AF)0^E$%@Iue~y9JdlT?Lzb071gWQ+N}IcMb$2 zgpOq_x|)bT{0W^u(WOL^5AN!uXjWN|`1DENSPA9u`@i15&HFQ7;sR2cWr{whL5)fG zs=c^bI}D#=iKYjv$*qu|qf%Yxd8Rg@Xye2omY1O1f_!@%Y)nQ!;3^3rb#Smdd=r^uy(MFuOf1_WOY;YOB zd$boG6vPaCd(Ou$_j4aXr%C5`n>VOA%C9;mk#h64M7Uw!QR+9N5Dc#nIWOHNdtWG{ zpZKc0OW<9!>w#@5FP#><-~Mhk2qm86_C&ArE;-j4Eg$n8UqjFvpVdfb*+-s!ZdZP04{jjEN+L)59wvsfZz`a^wxtE{kvk#Q%g2vk{j4Vd$iB zJ(a)B>A3+5kGIeR2l0azCv|D{Ervg0{7k8xG@yu}srTaobBA7Qc{#pKK2R4GONl=; z+jT1n0rZ-q(hDc62i{A1^M7YuW;hlV)rGjwl%L(pYT*cB0^jWTlh$n7GG!_-sue+B zRYv?GJvqXoyy#3L^*g^PKfe$ElV21`C^vWQXO>B}!6lg*dbS5nTE2un2KWL*1`r_`z3bC(x&i%`Bo$+4+jP@P@01+^QWA13u@zKK@42e zoN6dcd$LVA1JW>2QBzaWWfdpm>*33XEEnxQ70;TGq*|S?a_Idoz0rJIATp?%--dy5 zo57Ur51kuF)aDZOJMS@v;5O*mAq7Az6>*zvndd;alI!;H9VQyGj87+G{*0BULR@Dt zuK()~k%Vb&YNLpB3HSWeYk3Y`02{6JRnan_Gp~QDBps64RY?S_2|4cEpgKMAt95_r zin`zz57$uQz&P25@gkfK;T@U-q~0rs|eHAwl?5&lLuHw;2j{u%v#;uki|s9MytY2G8NB z#T2^`rck5ni_2c%U0|5J#$mf?x)lGBF9M0WXTnrbOQaI$wW4zJm^!Xif~5TG9-X!n zIGiQ0;ru9Weex+_p&#s8s7F7TNADL+OX)m&_evj848K9+%nE@(S4}q$U~-i~Eq}5j z1cZNtS;9H{wVj~7RO0oKq1W0W`@$ETx&K56R7Jz7(64FjiOPiZwvoPSKDHmP{?*`k ztN5!eRVx->K&SF3J&hn%bzwQLM1WMEzHl{YY>rEHa292__sv_dy}0)Wysqmul%S?X zuhms5@RR{RS5`IE%i!!EkEzeaL%d)Fn^#G(7f?d_9lqY|)Ac3IpW08og%KFPPE5j9 zo@w8@m0^2JUnUl&jcpq+t*aQUXsR80##-&|r41`##IYO7WwCJy$BLLszv{Wsb?_5qI|EhtrXOPP1McX#7b` z7X2TikRDo2oAw&E!`SAo7!@moG-3@%WHg?;VVg+WD*onX``ZNFgf-j`qT&+{!~PoN zJ!sLm-HT+l^x?Z2PX6ckh3>7d2&Z+ib&fa7Kt%sYFk~6j8q8i!_|xuKotw6ubeBbI zck~OHr}_c?#$}L=xKYh$!3)nV-!5lSDd}=r?HUdbG+V>=p#t&ar2Vdw%JI@OMHLR& zv_b>;hv>{F^BV$aLo1G&5s*|1(1n;x5L;C^XV*q({3e!|m?kkzIrIu9hoO@~nNVS5= zO^4_q1j(T9_GhY-?T5AoiU*?`;>r0khZ|xp4W9?t&Sr<@Esq%C$pi-Hcv}r$XxLtC z7f>zsh%?huQ1k?3d(gT|V@(%4B2IG2~jC6L#O9(I&^Q+cg zZs2m#sh%{XQ03KGLQa5Rhg&}O`{!&!m#Q;>&k_mz-iDf6eHhIztJ{LZ$TFDVw(6DFf~w(13`AXfP{P)s4`)hZ zA#8bdO5pFTM|^}es1Y;-@xj%n0*2oA=aL#gKdqR5EyYP4ws+58+UcbGSQ?v{x-UbX zhn-vj=X1_JdxBO#U(quI1jcrA)UF2?G63r_LeiW)+MOY9#xs$g&gJ!|6GF1%(}xUZ zQs2QDnpm9YMLw9-kO(f~27W?whCD8RHTGE#d}5?`DB_spdqy5V((mD$EeDf9FAksS zD$uS@x}=&)r*d@~n~ulr5f6vZkwq>Gq<&q`Tx;7j1qu;j^FQ|?c6cEuZTUcyevg+e z%oOo0@0M_5H6?6^Tr6Csj&o33i>Tr210s5w zS;A~t*AAa{m*uU6#{LK${W?NKbE|sq5xGiTZ8-00)^=(iWx3z|sr7s=@sZqsD-1uf z*C3edTqbZh<0f&jbHX4_^AwE=IHOhMi>qujH{scq&tb_5!oP0*O;||d>;3$w>3Jh> zq~A_>PL_98#57~4YYO;Y&)_Y|zbn=c1d9Rce^4F;$K+BsYN0I{bu|Yc(u;pj@#?Is*dQ7iYwnTUS7szN zjw(9ZbI|E(YLI}B*%`x2C@EjnRzQ7z1sGY&+x5}{DsQ3uF99Ua|MDuZ1%*37XPbx?XpWdo5V)=$w4gP`9z zX+p2eTqW|TqLyt{C-y8wmj*wS1!6(~*UXaI>|dqVK1^@b%T^M{@Q@O;}Hw_BEe zkhsg8nHwwSn*tkvs{L6D?J$6@bgxl-L>7Z3mrPC|+exXR6mc#V#Z5am65|Q`3xh9b zc++46qx9`Q4dvI6kCd62p9pK71#y>+2c)XVFf=K~DbLK!e200F<7i*8Z$Kz{5|aBx z7nK>|4~a@%UFag$lXWOwTd=!;b~W^^@=t+$U`x6haSpG`vKc?nXixPQ+~rMWVHsK3 zG&34NSA)AxPh-G0g7UsCIUhq;4OS6$SDB>~B@<>sNbMMj5?h0|yjFm7E@9&1K19k*2v_++C&&Ts%gA2$c!#8a&}Lho ziLM71T0yKIbN&+v-`pT@hU72g&t4K$N;m)oy`7^|UR{0NvtFQrsQDeqRD`)fA?_!c z2GDf2&CqpgdkuEhhF$4DMcRzmFDd~CbYMj}=rux!-QN{!O<5QZz-E9|xv-?QWW&9^ z1it$sii1uHGh>eyb8G2w)^Iht$?86?9D8rN zODz)FIHs4DAJ`C1r?3=}hUqP6wtk9#zc@bo@TJ2LDPw|xk}%9(VP#Y3;VqL4DYvyZ zU*<<$?+4#g36mj-8KmEC-qG_7&NJ50whaHc+_xkY4;0}a!2;ejm{5JcYtW{zy0-Sv zQpbSD`oUCo(pJF?TC6re#^v;_onygXnF*I6AQm$9IEE!8$iPV0nQp#P??F`MWxds_ zuPBq#qVl0Wo|a-?$E_xj8&wmgr~2{WSvi5X>^w>Fs?X~M4S-sK%co}s?0D>87+yB* zUsrVVJjxPbj2AqxXUb0rdFyMmuuJ+q|K>Lt8K>fKja)_oIw~Mb<@SP%Ib|=jZ1!IP z-P81+r5+#3)rBLeB^b8uQ|@Xdink4NOA#!2hR}WNqB*@g)h7Vy>%0=eoG5N-O2sKkW+i=!AS`cCboe{woM65+5A`nbJfs!6ge4|{U% zrs8o^B6k~t=bsJW3xyt_B?Utv)$hG%^D+i6qVvImS6Yo3L^?LWb?0xlUrsbeu0P@M z6Zdg+GP~8#03l;CaGJ2=llgN2WEuH)@l9bRp2kwpqc?pLnNED;I9TvXlF$zAD%4<+ zl(#kS$gNHpK*-?jRc0u5I?eIT1jCLhQDF{pC1g9hnH}Vsv~f7dzejDT;M@75V=day znVfXv;aEsTxnNWv?~K5Wj+#K7;KWI*eZdT^U~JLAY|JhSEWR3~nxWCqnwygz5W8Kq zbp-T!U<$YtmzgKV1`(77eAYCFjvyLlvem;5QXwux^YLCK;KYmNCegNo`fc*RecEtl z4h4040{ZUKq==oCj)PwQNGOrhyVP;2=LlaxBbAG|9U}FE$rx(mdU$&CAK0hDPe?1r z63$M`X~#S5-H0f6o=1c6SxQh8*h_1Fd{sbCJGM8RT1{~7BYL`+m4Jb>JvbHcJr$Ju zXw;kH_g4ftQsz#Li03_B_Zhk+4PPqDuO%qQ;p1AkzaSNxC&*7Q>>31XLy~TvB&A`V z-b)z-azz5-O9HWl4&1bwWpbLLn!Zd`UB!u+B=0Xu(T5GWAX$jQbl`00Mi^T_Yx%q4 zusE-RSMNAIRmek29#^28FTpmS#_Fu!#tx4Al}j<_LLyXT(hZE7qR|TQ6p*UV}#u;Ct%u`&PU1azu zgF%7l3h4fT6w74*`^`OMJw)8R(_)V%Nv5MjuWarbW_sy!ch8Hf;WmljYt2M|1Ot6u z?UOu=Y`VIGC>Htm9tW02oo*-7AgM-wFcYF$f-3UB=_rn{r-|N%oOGA6#6KuKl);|- zC;&zug|{IoL*_tnBI> z(#GR)?6YTcGrN)Zxi(%f2Za^aTfkw>eJ@{^-x8wiJ)lx%!&mbUK81@9_vaWNG&yl8-l_k< zo*A`v%kXJUmtXV>=Ey@@K8m}PGBDR~D60uoC`BJK#!r~EVhk-%=i7QtF)oHxUhoZ9 zaK?$O9(O=KZ{sW( z9tBumv{2|sh8SY)3@8udglHpEx?v_!ZP9#mBO>u7rtH9i zXoWt?FYAEPA1+b=E+r1p9R|+Ghzl1v7>~srAGLaLFAv_|hLmYa1(NcM77khfNrNJK zj|=ucT$JT_6IqBuT0-)-bVjCMe7mt4Y<@vRwY%JN%*Fg}YM048-}e|x)KVD#iju?+SVE&Kj4YxZ$?emz;~S*jWy1PaP+K^$p5f8eHfZJ zN+A?9ZVjorHjVrgbrqXCZ$}y2q9)+wprYYHH_Q&!-6Hb+3E8-q z$IBZB)^YGJ_?mY&grD;MHTEE3w8{ion#RY68JFW4!Ef8h?!cb?X6E} zfgm2IU#i#1@x1DCA=to)ew9)l=7V=UnvNbO@U#pkDnABZW`@KnTCu*gOYAWxMSJaE z4$bztCRrS|OQ`rYWWlMc(B(hF5Jc>3k4Q{lBsZDYyu^=PzCm?|nFW0bh|<sWtwp3vg7>fs3ZUI4g zeUrG9BWbI3T^Jk0^KLfWov$GE`I7T8n)XPw>+>6VHVt)`HWfW?o{dRH+tT9`ck*BP zYrP{(B_RUU0k)3HA6n0Iw0PBz3UQh8xwMKoV_=x~tvR|;x4+(FvF%wq6jX5W62IJ- z@&-1y=8cA=H8sFejBeX6!tuDdmTmogTDCpa&)(O3p_1bWz2)5?81uvViWu(jLPoWr zy&tZ!S8R{H)u4JQKk~ScgM@Ek4kfnvQDW2|7R&CvUYe_82ZgS6h_@se`DfOFX^AF4 zFuU_5wlYDYq)Zb(4ffnV+-<%c22* zGjMT+-n@|yebV^w(iD-hH-jRRzRX^3p@U|an3ub+q8Y@;0#x+_LSnOgYF?8bjo|i3 z5O|jWaOetLIq(Jco)#wk$zBe+Zo2N)iqe3boU3t|3#iQ4!yVahgGjbXNdVb6cTc%+ zlk_+aL8pNXvEvEf=Ok1%?#tzFD~vYJT#%{1BUZ22trqvlTS6o-w$KfB+Q`Cd7ekFK z4A?z)#s*6tDp`3j6nBw9k$oLMj`f;S<(&>5&e9YFl@uH^ksSvesy{vytQ+KF0p2Hf z7-H(p{rSgAcs3j6l@Q|eS8u|h5BME%u4a}1J8xI>thZErtSB|FWM(W6bL_Mhija#X z4nqyiM~+bg{x~O9A`EC%sg|DKduGb?=m9#drd?Pnex|*Jy@lNz3rAJf(*w>mB*++Y zFHx14M~8okphSUraVU0UxhO`L!>4uR)VB9_T|An;S!#zSl|Ys;-_!4S0L2kdEqBHD zLEGqX(|LqODy|A>@(bkPTQ~++BuQ;rFP^n&9Bta@WA|0*1E_Y3(Hmf;@)ojZ z4qYd$)(WzC@lI{$HB?C_IX&Zr=dcT=?cQ>24hJInSBNbY)R|Ww0!w`yq;=I#i=rI3 znrzb$rO-1Dj4rj|1e(|O9M5WeuU^c@adt*r69FfJ)_i1QBBe2#f)!FI`UJJkYrg5B zTcua6oE__)-Kfj>J#F?}l3{CdDf{MZ^IzT4{xviAE73_383DL_EC{q*%u6^fyI3RF zbK`5ftIWMls|4-waHGwJBGVwQzY%#6gbY99b;4w*L>FmUdHMtbF!2QZ_MOB9Ei>;; zeG7UQ5{?C974X44yv;Lt=ZjfBZ3+D%kn^t<`~-cPY`6K9bmp^>R&aPVplMjxHP$tD z>LPxjc;QvIZVu{DuKo}XS;CKKG;N@m&nUn)91a<@7`ibxfDzWJcQm+MbAI)cI}d*Ff7P6LoU$mqvFdo2(~Uj4-qt#)6HJ{ zou8tb2j9()p;dRQD5=-x7fL`Z_74T1l+-XHc3uDZPQ~*dz2R<71V^rAIj|i;`M7QE zO*{3p;|gl#msidiLG0eMk=GseB&jFba4?jvfdLN{cvKD|xhQP^>f(GTZf>%dU&mkH zQlw=kS~qEOjY{@?imh!6q4&o{Wa*8GyEeiMD+WGTdMQh{?`3zx*ZfIO3`;PZ>x4$! zyW_;GYH`uR_i(L%@384a)Z0`Jl>k-&X4KBfNzfO8cTmxGhC2y-d)1@B@aqajG^g3@ zgu4w2PYPaL(@|FYIZYXn~9(Z#q|Ip0@0L_y5v>!J*TmS67P~?Cvrg0 zCUl{W-$VuA0_Xe+rC`WTY<}23bclb8{DXwnE~hA)$qR*=4?&c7_?N>NeXPWluTSj} zYDgr~k$fCw8@yy3{1bF6IwM0cAqm!xC!Ajwamj*V*PJ+P-H_FA=J$SvWiiaG=OTlc zJGTwV)AanNF#q`6Vqpb`QzB!c_=&m!N5uf~P`Z z)Y?pu)b%5vG$W93YfHMkLhGaEITU* z$D4<>(VIfETr{nbAJT0$s&ODHUY|4_Aqp%_*Kp5>0p{s%`EcQLp8>q2a1j2Vs}709*WUgwnmHJh|ar5=nxOXLO}MD#}AL zGHb^nrKYjYwcof>P17uctP#2IMtgDN$mFJ`kDp&6d4Xdeubr;-``K6lA-d{G!d-BM zllJmCq>iG)Y$-TGbF<~gg-{z`kkOQWQa%95Pz4qK6UW9j7-~BtNo_*eF009fnE(t& zpH+~g?<`a7r}^KVPBzDFCHXLQ0#qncmgPXhUZ?nBNdHW2@y7er+d++7`6%B3OLE4*qU6k<11jExm(A zm>rdj>yq?T5q$U0(Uigr7yb5e$1qywMH~po>e)gj_*EEfznLDtoLuSN-@Kr{$HTqjz>Mw9q)U|+Tm(5AMY}Y@hcm4o! zmU%D84?PW8sHOjjq;2~My#*4}#G*d_zSEeTHT-bL{F>A5S=p#=6EG4sj~qx9JFVUk7eppZfQQ$`hY6mCQ$+^q>t zoA%l|r^_`X3&j-^t91Gt-GJ_3>~xlE#n7$(uT_FE>)-|ABm#$1fe(N0nB zciSKSO!v~Qe?Q8%o28d1e-+cm@!Q+6`gcBGDzQR+!W5+zaONrc+9T zqLROS?Ob!Ckp@(Kz@QqtO-4if`1gJ6XP4xB37S$HWy%da%LD3Z9)9r0?DUOsrI|hE zpMh$>_zX!8uwo{EC0{jk4N2-W+z%>1F$Muo2e%-)Qu5&VcrH^E&_{lsPUf)?Vrk{* zf*8Z_cDiv^J%1*IQwjvKPHLO$&Q9s(KI z70nVF*fYYvkgBJA(?;A^sGHF9z&O^ci$4n9le7rF8+m=#n6z}cww_nFMB{g7RV&$!{vr3h! z?LNXiKKt2k;Ovn((p(JWPu5#(P^Iqx(y(T8Wj1=DRC!*$&P-pSb;Gmc(#C@HCjklq zR`Mx3iHIOPobVQ}LxFabpgn_O5u}z`(&0`UH_FGiKLpNu`#Z3J_{Pd!djf66z&D^# z(lgPTxCcQScz5h#$n+(it-b?b-Piis793EJ5hG~;qooaW5Uf>x~J;)g1)bi|eJ-B5TFC|OX3jB~+J9aTeVru5@$~;W!v^&)y%cBl zQ9F)7FUct?Zq=J1^+Bt3=ln@1=`EZIOVh%@oi$C+P%c$bcH5;C9rthnN2nPpMIi}J zEUaKOxd4X%CPU50#+OytDxK^WC&vwGpNov=GwDkx+m4*W{^31u?U{}CB6-)}A6Q;w zj{jzHcz2NG@hqB_FKm?@Tmt+J> zxKQ3MRxbPaiJ5Bdo#bc=+Ii_u4yu1P)Wi7Zla*k%3YZ5mPnt~c_?6vaLa0q*+ zhi@6|P$&Omz^ zrK?kD1vqV84;Z!t3{m$KIen0d%glrH<0~?9Q;%0#xX~)WDKwERdQBR(bky1*M?*B` zWU)9Q;I&CP2B1<0hLYwDhBHT&q{1L)z>zZ}4pdHl+P-xzTOsi4d_gvmy%0u>C59gu z5GFf^fBD7hUoKIzY~N#0%ob228A2&--oET|s4Oe==3R>dI$t_3<69HE>Zh zGk>&Fkpw4~41#TgpJ&jc6j1J3aPU37+FD~sF7Ln3+Sfx6+Y5tn~XpgCHCT%#-nCR=yfb8 z{p%9lml3dMX~oKqotk|u?7*SE-|9S9v+yUVRQ8Gl$BK?Pdwy%Mh(8#?QbVZcCXBo; zXWw9zM(}Iy@q)BDZx|Ig;O|p)CysVHh?H+qQec=7(wh-Z&}d1cCuQJzXzMLj#>BQSUwyB;nZ)2rdHZ&5VL|~jpSBrD zgTxjKQ-;syIag1B6>@S6PgB`;6bim%P-S3)m8Qatc8B<5Uel_~7J;a9qa=VLz7r48 zwBhJj>Bb?-2CbkH#<4ihTCC?0eH!ELXppl6e@-T^D5y^9Y6_4q-)2^&oBEGO>AFf+ zw;xYcU601awY7BQZ(+bP0QT0vq3qc>2X6_U5g78WazBCjMygdz4C4fVh8UEIK_x8zHSu(wJJ+n<^2Pa(>l_kfoZ5=UX8p(;o>Gs!HtaH&!~C1Sd{>vbK)E~cS-pB{voHp&U5;LXFAmUZ;asy;0?0~%+;!}_(XB6;m68B#j1 zTz|$&Ma;DpN$XHiIzH?5{E+W0+(slYXv7pU{5bEFY7VCHj6HppJ@RqnF6Nc*R%$D4T4QT?BfP=$72^PO8CBCaPeB?I-Z(Vf}&#l zejeJ^R~s787o$|* z5&Z_$4{$yF-7He=1->iPl0z6aPlOqVpE1!H${GN*1)byI=g-Ah4`}Pniaa}KAsalv zMLBCU((G87uL4A|W zXhGwllfg=RU->+qDqM+l+L} zH>2)JukmW>ah<$ju_0C;ZgL0)eIzEB11Gq2ed=p))u%3z)Ooyo$P!R|JUAF1Gr3<+ zvc<3G2dXJPT-W2&$@k{SRw7JG=+#EJXRbv5dyKjgCihjToRm8>>dQf+iE>3*E=I5O zC2g>C{DFstV6gZ+-F%B#r|HC0LB1?%xhd0tjm|W21qJaB6M?W$tY90wMHCp`)tR*t z{NMLSprHQqE&w)vgkhQy>MO7$$t;sY3#`bNh5HkPE-;V(RmxetMZtYY#;1{wf4IAwe$Y0vd1ym zV*E9sGE#34znv`SCbF>5r@WbFA?36XFwv2!MWh?1pttp7Mv--LG2{*4Y`17n05{sf zjPO3W)P=Jmd0XlE?%$mpbe%m|k0+C6YIQb+Xo>Mddmob1VM0-}E~U zq0{*5cXY}?6p6>}7*7TB`;yKuD#YMWAnj^wXm9v}`L}UI$1$Ek5*nN~G|EKTl`%H& z4CgNa6LW2vAn~j)Lobe`?2ncK(&sSie}?daCs{4O#)h!GJj-MAv049@OGBym$MUm1 z8wyTRN35_mtZ~6V1l)PoP&h?=^ZMjHdqCS2Ii==Mf`~6{dl5*jyto2>YW3UVZQkDO zrDP4#_~Xv6WuK~$RVVaS7B`Yf>I-5cnP5P2@f6pYAJVV?)*6Fucwl zz1zO5{0%gH&7PQ0OM5Y^mxq2Mi9Nn&S1m!>CT+-Xddr=~Sj?yvS+TXRXC(E5bPw39 zW3#aa=~Q1Fh8PNGC3TUtaUodm!}pYr0}J5NFSf61Utw85*TM3P1z7(OdvK)6UBY!N zTeb9B@1fsqwEzAnH^e=1s`hr8Mv#WE4lTUa29% zP1eV~^74bDf*7b**);kR4!rmIH`zXqjC>$v98C%X|7EoLaLv=84!%9 z>mW7U>y-~X3Un97TrBfE&}s6RkACwsG6DwU_S8>LNpCLQ*BPu+RcZygy5mf9l;}(P z#$;-BGh3xReAvB;OP=VN!?=?*>?+v!Dl0g3-qR%Kwc8`blS)gZ*)$QguWN@~c;i>^ zWitI8mlNdh=7_WJhT5%1BzWGEU7n}?kmf(Fk9D?Zf0`W_T7hhe(`;fZ(gwx+oyA3^ z50%k6M9jXYG0k+^0vts~ofT8i4eea2L$?>nm==<)S5WBm2$T-mb8enu*@UMEz;FSd$ptEEN+v9 z;n-$VK1>R!h?P2cO6yEppO<&2`_QKZNdRguQgTFeGN$8g%}j0;1*naR0*><=cywON zFe_wNI!1gxq?eJSS*OjjCEXTAbK>~1Aaj=7aCJA&wGP-^EpqJlzOSw~5_F?E=pHRN zRu)UOIPi1(D$LTd8*FJ=_aoQ}H@{EL(V-*L1J-g;AVA$PF)$qquAdvo(LXv@gMQ@K z%zK~&W-#`8!-e6fSVBN@K7wKZKH_B%F_x?Ts75&Rk~M`-@Cy<{=sW&jKuZ3nhN2W< zOMC=mv41Ho>8cj~bvC6v5gHL~3r$B3VX@;v;^r5h>}n+v=}$4*XPKl|C> zfvXd(V>{*-3KejSzm`(##=x-0D)J3-py%4OXa@T$H%Y3D3qy>YZ0z|MHpkLV{Ka#q zU8tI_Uv{!^pRe|3w?}BJeJkljJ4Jcb+#4u}Y9toZ9o~w`-9bF;S45$aynE%z7wWYm zddMo{L0(l(tIs;w9dZ7l9vF~uXULKw$79M6W#tg5Sy*U+J{Xxqw?=@!i`-IZ-7Qzb zma4t8Ro9C14FPr2QmH~%;-e&6U51ICrq{*%`1lsPcmo6`s^?OCSc;ze0`M5=A8~I{ z?QJUp$j)&3H64z;o6k9k{7dpZS#|na^%1aZ)PDZ%L;8@ZSFEx}i*}1uMS1>28CJ7i zKP7R^>2Pd|oKwinFXGj+a?eBDj+ljSIMYF6&VnNHrH!b~jXpd6en-h9)f$na^q15@ zT2QK=o9N!pOF)#JYpeon_MogMpZI@4U!w8W^ai9RcH{GHs+pItP!P@W=Q)v!rWINpyD z7^51Dfs{luFs_j)+EIggwHI1PTXe*72>_vwbD5yQZqfv9I6uIP2Q58_`nA^3Jc(5+ z-^!CCw%pL4uN(NIagVTp4hR3PX+h6OM@qL-(p?9Up39xsihw4 z|4O8xlU8%z|Hzx#YrZ|Rb78?P#B{O`87&~a@P`&bOvp_8$l;|DIEubmGpvDZYkwi> zLXO3bQbC;kdM;YR#v5(wu{|eHiKWKRfo_&mFum-`vHVioQ6g0+B;=8S6^%;fH{?xD zCCd?8=!RaIn4Wv^|8O#ZvvA_*o~UmKW-7@S56gJQI@X;GTCEqF)TXOj-=OxoXc5=B zqb+PreWXzg(oPX#mE*6Dq4i;44MqPE3d@!!gi}JFc$pJ$$Xe${8+0i751Gnwos|anF$afUC+yL zxM%A0CqhqjB3~vC{}_M@udZsNfftNd`ozja#gTa-^p}9e&H07i;fbT*lEmU&D+b*M;nAR2GP) z1My%c4K3ZO18!EqWf=6p6Cy>=I_j{Wypcpjm)v)}Xq$X`@ecBzPxcg2jpn>U2mi=l zWxfiX4qxbTAwWhYMm2-ZlexYybibylZS}K)RfQ z8(@fh7^YL-SvgzC-!@t(;c3iYny*wR5rIOtk|p0IDWxdGtGe~0eCVQ-skY!ox4@|E zg$$(1@v_x$F8_Z zybb%Lr8D{-!C3ocxFXsnZ@x8fPd@iN*5Gmy0+o}l! z!ru;v4mmXC@Fj%=R-E!0MrJfg#$y5T`Fjx_H^ls6cIJ^zpF*i_=1g?gygmQ+p$lSB z#z4~p{}L^>X=L`79|F!r49>8aTiF7hy!8$FjZxnk?2f!&AEZ>MZK~o7nmKCi6?s9@ zoxsilZwRUjIG>5Mph>sMYYbgDAuB?Uox42y*PfoMk2OlqgT*cbAMk=~D}tlXcq+P+ zk9aaKr*;+wZ(gtJkDVnWX^6HNJ~h*L12+`*yKbg2USo+Je{3+_q_%cwZrV8AM}<%l z@<#H~KPIUS$Hi|N1T}quGQ`{pk*D7&?5S}r1$aBXKsGM@2J9iu7=7U zj_z*ogoYWaZm-AIY<<`(W*^K(SFQ2p>8wx~E{t#M{YDPrWF^B?=$M776dRY@$+9^?K{;vchJ0<^0cjrVyAFFMR6~Z=?Vg_U`$voE0g^#MJJ1hatru?8 zz*z^&*uD!=SJ;~QXUDfJ{5s!w%mlU*(=%1aXvkVxd(!uOX708Zmsvbt=g&x#l-kJl zNE@cep>$Nw9$poDfp<8Qbu?1ti}5S?MWahXi{w=pyYPCZ~5X~*s?I3iF}l;^W#;S2q<5t$_Y1^rdTN{ zkoKN;$M2)-qv2-DsU6EVK;zKb&Coxeps}p`aFslvrUfaDx~lhUTeV-i-HYQg2I1Aa zirJG_Ga=sE{_`^*TXYPa+%p%3R5;^IlSB@D`=Sn6Fv{ngXp~lQmOzPD_?3cGZ+e_? zML}w>WH)9yB&`V-z#6~E4hTI+eYe{L)K6%aU`*(k!kr9NQXbT;)_&sEJxS9kX1woK6mxO@hljx9RpYPv+LUN#Zjdhc%E}s( zjeSRTpf&R^?ZX}s%CB;xCXMR3jxcfTP+tE}1kzXPl)Td|tlcA|EPc{@w(vsK=N>I$ z(}OuP(T~u^3Y&U`A_>jlhlPQ;KuK-NNw z(R$t2-?O`yGzk}l79m&w%Y?ViO2pkgx~J!txV7rZGn;B=lFR&Pimm)pylpWkeQ`DB zeM6RUZnoIX_|$Efl!RFv$tr4Eq;}QkZ=H&YQ-qxiR}(TFshJK&?TsspK(;yM6iPc@ zW{u4&et0%9#SxW(urr%|*pRaDgYYv>i(;WoeK|%%bUN)P?wVO{Fhe&z z!6kmSp3-kHH&63qCT4%HIzz#uOC@8!s-th}UV_7Cp+W4NP?s5F%C7DKI`E*RCEFtej_BUSDQxsVF1}D=|)`!mTs$0Azqi z@GSijd#lomQdbyBvqkj_ZG2~we@?Mnf<=-0x)p>~{jy<%7^@WlH)2eHNop}!G(_~& zDuia?QP02lw@oPI?d07F`5jkC3c_iw7L}%M*NW?#hBh7II*ip};s2Wll^}$aD36Wr z6?~{gn_m@m?=rQ-&%>ucTqKbom2J?jj&02d<2y5B1(RY^`&wpzv{ None: - try: - CogView3Tool().fork_tool_runtime( - runtime={ - "credentials": credentials, - } - ).invoke( - user_id="", - tool_parameters={ - "prompt": "一个城市在水晶瓶中欢快生活的场景,水彩画风格,展现出微观与珠宝般的美丽。", - "size": "square", - "n": 1, - }, - ) - except Exception as e: - raise ToolProviderCredentialValidationError(str(e)) from e diff --git a/api/core/tools/provider/builtin/cogview/cogview.yaml b/api/core/tools/provider/builtin/cogview/cogview.yaml deleted file mode 100644 index 374b0e98d9122c..00000000000000 --- a/api/core/tools/provider/builtin/cogview/cogview.yaml +++ /dev/null @@ -1,61 +0,0 @@ -identity: - author: Waffle - name: cogview - label: - en_US: CogView - zh_Hans: CogView 绘画 - pt_BR: CogView - description: - en_US: CogView art - zh_Hans: CogView 绘画 - pt_BR: CogView art - icon: icon.png - tags: - - image - - productivity -credentials_for_provider: - zhipuai_api_key: - type: secret-input - required: true - label: - en_US: ZhipuAI API key - zh_Hans: ZhipuAI API key - pt_BR: ZhipuAI API key - help: - en_US: Please input your ZhipuAI API key - zh_Hans: 请输入你的 ZhipuAI API key - pt_BR: Please input your ZhipuAI API key - placeholder: - en_US: Please input your ZhipuAI API key - zh_Hans: 请输入你的 ZhipuAI API key - pt_BR: Please input your ZhipuAI API key - zhipuai_organizaion_id: - type: text-input - required: false - label: - en_US: ZhipuAI organization ID - zh_Hans: ZhipuAI organization ID - pt_BR: ZhipuAI organization ID - help: - en_US: Please input your ZhipuAI organization ID - zh_Hans: 请输入你的 ZhipuAI organization ID - pt_BR: Please input your ZhipuAI organization ID - placeholder: - en_US: Please input your ZhipuAI organization ID - zh_Hans: 请输入你的 ZhipuAI organization ID - pt_BR: Please input your ZhipuAI organization ID - zhipuai_base_url: - type: text-input - required: false - label: - en_US: ZhipuAI base URL - zh_Hans: ZhipuAI base URL - pt_BR: ZhipuAI base URL - help: - en_US: Please input your ZhipuAI base URL - zh_Hans: 请输入你的 ZhipuAI base URL - pt_BR: Please input your ZhipuAI base URL - placeholder: - en_US: Please input your ZhipuAI base URL - zh_Hans: 请输入你的 ZhipuAI base URL - pt_BR: Please input your ZhipuAI base URL diff --git a/api/core/tools/provider/builtin/cogview/tools/cogview3.py b/api/core/tools/provider/builtin/cogview/tools/cogview3.py deleted file mode 100644 index 9039708588df16..00000000000000 --- a/api/core/tools/provider/builtin/cogview/tools/cogview3.py +++ /dev/null @@ -1,72 +0,0 @@ -import random -from typing import Any, Union - -from core.model_runtime.model_providers.zhipuai.zhipuai_sdk._client import ZhipuAI -from core.tools.entities.tool_entities import ToolInvokeMessage -from core.tools.tool.builtin_tool import BuiltinTool - - -class CogView3Tool(BuiltinTool): - """CogView3 Tool""" - - def _invoke( - self, user_id: str, tool_parameters: dict[str, Any] - ) -> Union[ToolInvokeMessage, list[ToolInvokeMessage]]: - """ - Invoke CogView3 tool - """ - client = ZhipuAI( - base_url=self.runtime.credentials["zhipuai_base_url"], - api_key=self.runtime.credentials["zhipuai_api_key"], - ) - size_mapping = { - "square": "1024x1024", - "vertical": "1024x1792", - "horizontal": "1792x1024", - } - # prompt - prompt = tool_parameters.get("prompt", "") - if not prompt: - return self.create_text_message("Please input prompt") - # get size - size = size_mapping[tool_parameters.get("size", "square")] - # get n - n = tool_parameters.get("n", 1) - # get quality - quality = tool_parameters.get("quality", "standard") - if quality not in {"standard", "hd"}: - return self.create_text_message("Invalid quality") - # get style - style = tool_parameters.get("style", "vivid") - if style not in {"natural", "vivid"}: - return self.create_text_message("Invalid style") - # set extra body - seed_id = tool_parameters.get("seed_id", self._generate_random_id(8)) - extra_body = {"seed": seed_id} - response = client.images.generations( - prompt=prompt, - model="cogview-3", - size=size, - n=n, - extra_body=extra_body, - style=style, - quality=quality, - response_format="b64_json", - ) - result = [] - for image in response.data: - result.append(self.create_image_message(image=image.url)) - result.append( - self.create_json_message( - { - "url": image.url, - } - ) - ) - return result - - @staticmethod - def _generate_random_id(length=8): - characters = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789" - random_id = "".join(random.choices(characters, k=length)) - return random_id diff --git a/api/core/tools/provider/builtin/cogview/tools/cogview3.yaml b/api/core/tools/provider/builtin/cogview/tools/cogview3.yaml deleted file mode 100644 index 1de3f599b6ac02..00000000000000 --- a/api/core/tools/provider/builtin/cogview/tools/cogview3.yaml +++ /dev/null @@ -1,123 +0,0 @@ -identity: - name: cogview3 - author: Waffle - label: - en_US: CogView 3 - zh_Hans: CogView 3 绘画 - pt_BR: CogView 3 - description: - en_US: CogView 3 is a powerful drawing tool that can draw the image you want based on your prompt - zh_Hans: CogView 3 是一个强大的绘画工具,它可以根据您的提示词绘制出您想要的图像 - pt_BR: CogView 3 is a powerful drawing tool that can draw the image you want based on your prompt -description: - human: - en_US: CogView 3 is a text to image tool - zh_Hans: CogView 3 是一个文本到图像的工具 - pt_BR: CogView 3 is a text to image tool - llm: CogView 3 is a tool used to generate images from text -parameters: - - name: prompt - type: string - required: true - label: - en_US: Prompt - zh_Hans: 提示词 - pt_BR: Prompt - human_description: - en_US: Image prompt, you can check the official documentation of CogView 3 - zh_Hans: 图像提示词,您可以查看 CogView 3 的官方文档 - pt_BR: Image prompt, you can check the official documentation of CogView 3 - llm_description: Image prompt of CogView 3, you should describe the image you want to generate as a list of words as possible as detailed - form: llm - - name: size - type: select - required: true - human_description: - en_US: selecting the image size - zh_Hans: 选择图像大小 - pt_BR: selecting the image size - label: - en_US: Image size - zh_Hans: 图像大小 - pt_BR: Image size - form: form - options: - - value: square - label: - en_US: Squre(1024x1024) - zh_Hans: 方(1024x1024) - pt_BR: Squre(1024x1024) - - value: vertical - label: - en_US: Vertical(1024x1792) - zh_Hans: 竖屏(1024x1792) - pt_BR: Vertical(1024x1792) - - value: horizontal - label: - en_US: Horizontal(1792x1024) - zh_Hans: 横屏(1792x1024) - pt_BR: Horizontal(1792x1024) - default: square - - name: n - type: number - required: true - human_description: - en_US: selecting the number of images - zh_Hans: 选择图像数量 - pt_BR: selecting the number of images - label: - en_US: Number of images - zh_Hans: 图像数量 - pt_BR: Number of images - form: form - min: 1 - max: 1 - default: 1 - - name: quality - type: select - required: true - human_description: - en_US: selecting the image quality - zh_Hans: 选择图像质量 - pt_BR: selecting the image quality - label: - en_US: Image quality - zh_Hans: 图像质量 - pt_BR: Image quality - form: form - options: - - value: standard - label: - en_US: Standard - zh_Hans: 标准 - pt_BR: Standard - - value: hd - label: - en_US: HD - zh_Hans: 高清 - pt_BR: HD - default: standard - - name: style - type: select - required: true - human_description: - en_US: selecting the image style - zh_Hans: 选择图像风格 - pt_BR: selecting the image style - label: - en_US: Image style - zh_Hans: 图像风格 - pt_BR: Image style - form: form - options: - - value: vivid - label: - en_US: Vivid - zh_Hans: 生动 - pt_BR: Vivid - - value: natural - label: - en_US: Natural - zh_Hans: 自然 - pt_BR: Natural - default: vivid diff --git a/api/core/tools/provider/builtin/comfyui/_assets/icon.png b/api/core/tools/provider/builtin/comfyui/_assets/icon.png deleted file mode 100644 index 958ec5c5cfe296860581e17b0987d7d131414f12..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 213986 zcmeFYRYP1&7c@G!yAvR|yUq|iSO{*x-2wy{+})kvZb3tWySqEV-Q8VgKA!hG=l=YF zbG7wiuU>0aRqw8jP*#+|KqW;5000}ZxM&1rJK-w0 zs4n60UvSY7R*2qHRqZ{+AxSs>Rmid7v$lx>tClVqkIoAdJ zKZE_LA8!qu#&E2%;(VV;(ttGMo%=i({%~kf-mg2kW?*oHlK1KCs>4~#s>gvodKg{r z*5z*2?fYN>{=iKY^!p%w!*3e@0J=l@2A*#8##`ZN4ga&^R>eX@#@Sb}k3;yK1*ZfP z8J-%JQLZRZZAePFejyT>IgsXO8D#-}WV?Y_;P#BbT9iL>;C5*QBXyut3eC_Q6;}a{ z6X0jbG){LE2uVcFk@5x&tp?xjzC#NpkNVJ42Ko7H>Kc`0Ifn0K{b}&V^I} zN7i3M;PpVU`S;Vser@hE*#)#q6#RB4;CO45$@gAqHc}^mIx8?o*B(jsc;=Gp)>GEY z;^0*kgGlI-(?s4m3dvOe%pQ#qL1|Eb_c}-d-8rj&ff`$d%mk-7Euo=QmkI<3_h+V- z`x_Y0fWT|ty`#Xy~U{lXJuh-vl)utTn1xN$4nH(5XX!r#8W1;5m+?`1RfXSd;_uC zE^?dp4Miumr=`kUq-Q+dfvR7)@IN#HqB!XC*Pi_R3fifTqy6;2+8}x^H<5itt<~Zy z=u@dCn%Ux&xc^`sT4{Ix>8`eHHtwU^ibxComy!jdj3I9n>XV0F^}g)U=V#H`6@_+P zl1{^PAnl+nLs|6ZKqZi{fg@^>0Bx16cDg&^_l!!K3yDE8WX`aj%!WZsI*T3>2S_voX`@?0v) zdYUWQ1tVTN;qiJmPH7Z{mx0WJ5SBf{ zg6%m+bZz-nn21gIY1swy-EAuA7R93h*?i6mZvL&qVy8t1EPloILby?u)fzXB&g+xP zo!q}qv}j*{-z%=5i${e4|JQ*#RFg-!Zu}*}cVA9@Pc`%&n^?Nj1CXN5k+cLwt!RVA zuz8$dq^u8jm(sF1REFlFOCzxngc8VP4B@}T2Gp_&>5o;RoRA08ZxcnB$w%8Fqy#GY zz3eIg-&FTFz96_ekzf>fTib#!M zn)X^H(-S%(8UkC7N2yz-r3oe}MXaC0awIUtS+K0Qv04Lp)OWiKg@*a+DR;%uawIg( z$Xs()>fAViZl%OQ>xJP!RF^Rz)z_!D)qi<%3m(tgS4SU2_F?6HcG~3&+&(8B+jGO+ z%V&8e$O~@>&9pU8;VMR=oXoz>t_Z|g4wF>v95p3l!{?$`|N1GdMi>y`uM)G3)554x z5y3SB&iM-rPmoBa=&4}zA^VVR86__pGMMpbQ`s3{=sW(weKJgRqUYLLm)Ytn@Bd;L zE+zG2y2#T2#+dEK$i!E6qn*FxwxAbz#pYAo*J6x5-#Bg7qK}+Nc>~-?_obv~6dJjeAk!x05DLqc~aa%ULGq7c9v_TO&m8XSepS9eJZk0g}+WU%mSrDg&

Hb^TS~?mM7h98jk+^CP9Gp<6go@&V zbzmB@Wxc{Ot3CoQ8>=+OC1G5!Z(fKo~G4;uOjRU>x!8LhZ6h z>{s?p>Qt4Kq=pQ2$+E8G>~F`HZ6JnbT|LnzxDaWfO6=VnIQEgNpE*mh$>er3l3$$3 z0H+k~N>D+w<5QB>%KAYMKkQS-hEUpDkYciIRAWZG+NZ>ejXv5KQ3+^pq!^LyKY))T zIQM>pmNYts#+QaQ;HoV!a}e464xINSDNtXKd@%e58U0&&v?%c6iJ5ZKNOt>jp1uf; z>mW!T&1y{|j;IY(%G_&q@x;&m?^&?p`i%L71C znQU(1p-G(4F`J5j6b`S#xP`GA$Sw$+Rg->S1SEMVhddKkcoewfP4}?*j)RnHeBKsiU=K z0*?B!T|c)`a```@v53hiPHdUF#9>Wkmc$Z<3ZaqAaa9B7Q8}0{a1E?H`0=Y-xCE%? zh{<@I9raXlt14+klIcM^G2zW5iQpZ+7holld|q}+PzF+R&_=TkmJWq$0yz;y#ccn4 z>rn<2+rFPNv@AY*HO56(bBrFp`8MJ$-$zU>(VLM^>Ht&fEt?JwDa`abl8oXfwz;b_ z-+{(Ck|dCTiaG4j-DF(Dme(9)bAaQ&@#`3;KXn|f zwCnM&vcTwo+BSle+?61KI%nNn^Kj>DWdaggym*?n+T?gcpxESWH*~6m5f#vTCK=N*1fjph47>0HcEo!0gR8bNajN6O->k^ylHuimf=%NtQ5B?FgmeB zJbCQumM#T;$M?{V*AQoNCjciC5Qtta` zQ>67KZlp0^nAis{If#*!)9*#wmqz8xPw$02-$9~9`h7blVs52*RI`&`$VNwlLHA=80zxb8&uwT|2gI^m1;* zOA!5D#cS`qlh@vRCvSe>KJNSQC-}n$Kg$=t@+8MkpJj3B<);plS@noY1){F&O$sp{^p&nywfEuOMNY%i~N=Go?I>t^Y zD#v1**nV-5cl10vdT`s$Id1iyU6>}p2so;VFI-sS{pU{csbYrd*&$lz*>7P~>!}C} zC_NEIuHe+THIu{&GH$|{0A6jQ?rq!p zqTc@(ij7YI@Nmnw(5kmSh&Ym(N>wQa?(4Fn4jfoA0RT|jTSkY+uElt$7|z%T_^ zXP^+V(SQhMYZci(D4BogVYK6cUTL@rr={h`dA@D)EkQD+!)Lk4sA_HPs-)OF%|bAW zT!?_Qq~J|~%XKD35Pb=;HC9AScnx?Th%`iLAPVX#(&#iV#k^d6h2ONFIt)y!KX@dj z$vh9|5DADk10VZYXZ9gBB1oWK=sC~0YZ^GibF@Au1gSv|?7{Dbr+)UQ*ZnxwJLgM0Vz6F>Tcy!&16;KLvPEWh$Q@8{#6edJ}A1dJd3JYi*-Se5kZ zhE`xE*~}g~qLAj8T?JR1ShVq9#HUX=6)8U{Qr48Ep^&?^gNkr6{#H zhJ%95LwNexi~RYSK0n+m*;hK|3XigCASA$>aQx8LS-yX|&rh9N;5`=-)y#Aj6;MhC zHHt(ioiYj~&xmjeIQXW!U>ML=aZwQGvM#{t#@>p} z?;*h>4-Jh;;-iwJywLjgkBw2&p&3 zRTXI+)hstW0T2<2L7$3FGkFy-0w(p?g_BhjX;=Ao7Qhp{08oprd7h4YYJWTKcmhc+ zYp4oZpnmx3b(Y2Th;9wGV6s%rngfP5olGlyiT3)|K2P0;OL4*4{B6_bH^ejrR6I^R zxP;a%;<9KF=IYlbhc12=ZC)C&AUE7ix%a!NBZw;?*#%!vAWoR*Fry?fu%Ayc%l_I8 zpmBm1Qy43O|1ux@%)@->6QAY$g^L@##4C!laEXOaeu8b^_D5J5x1fod z)k+g!gfgVtgfe1X^)ftFRH3qQHm!oz<_K7b0sDMp()zD}kp%m`@C*Yy@&`}wmuG|@ z+q;!rD-CnOPtf>&A{1b+i*vlOD*35fworWOB>!cpVX$K}^$4UV)ZT#H3rWBWqZl~@ z+_HO?uxUtpas~0_WHPKUxyMFgHw(y^qhy7lb0kf$OTR{_)v~;hB_o5{=I?aP=AS*QYph#PPLc+5XbM>$Amvt8rS?05hoWG3 z9#+2m<*Qz|lrto?D%hN-yD5_)-c>wMKqv`CiKc{`9w380IEZC{GJtb74-XCv!GJ3v zSd$_M-Z{#$M7<}*Xe~JxsCp%g&!H~?B~T>bx82RMdO}g*B!Dj{e4s4?sR$(RN#cmv z7uzGmU>~o>In(v`Hpde0Xg^PdxpRD_Fyc7}J$dMDLIClIL zF<7QgrfO6L(L zw7Jt-B}Frkd8n|c(5T{7Y1PI6U|RjvWqc}U9n88VWZYGeJp!V@ve7gW$vVZV zHa4ZHIh_(R=(A_w9Uo+Z<7!~gwu2(^x((s@B~1`q5TRe znE*&q2kKjswzXa#{EGe^Jbw1cr!tm;F53DT2K zT=m*rISt9ce@TUiN5yB#qE+uEJM3r@;DoSwmbwaf?{QU$FA4~r)_Eu^QkIP@%MyZV z<;UQt9r)5iRZuujBv3geEj_mm@4{DypW21){a#X0qQNA5T~X4y65Q%>;Y`!N>jrq| z@kN#8ehmaDA)wyI7wQY}-b6WK!tb1B`VCtdpBu6G(GR{P$MRCd;?gpQj~(aI!oo%` zXfiK603LoY1sd^|!eo z>MOMBC{!slE2t(ICld(jO`at&##H}6S!t6g28x)#k2mCc*(#%CYy;_)JolAL@OuyP z6K_4pJ<|h-V^RjnwSu+d?In#~o4cz9|Bf7}0)Ot{P4L)L>{ifLshddM>Bi&&@h40Smn&%6$ssOmw*|t?<0+bP40e+o9#~YpiSS4-;nxYklC2K+a9d$2D z17~rF&M2rPZ}`q!lcnHN;CBC1nDxH|Bwnk$qI1ukmAP>k=q(F)SoLPNFN%lDJAjRzCZ5MvwoGEsB!R5b63MnMQYj&`2 z3mp5oU%V;1BqX>c5-FGK{>BoPVpZUB0Gynac{SofJ`}XTr zQBcpHL`O9%DHl~qC$s*V+y>9l2vlo+=C!-OIjTvmE-bh08aZLLS$0yR1=1qD0Ig%? z1D|1E+wiVC_p`9HOezYJ%lIw~JEbbC_j~AERp>~C-@AK`Ke2p?CqDlqTb*Z|BD9Gr zd&El!oT{xOddsbQ^2TeOUr}8n*B%jdd31aEPM`AVLBG!KC?YsXlqlK?1qCeCoPPEq zC9tJ(rjnQdq5&cEd33s3^>_a-<}GW?YBFFVLiwy_MaIxuO~ckBHXH+hE2w6AEM|Gi zRDy_2OBkMi)6(4bUVs+7L)Y^;kQ6b(2Kw zv%7kSu}wB0ED;IDZ)Q3sXc7cRP|a$1i6x1YB2+k59zO$5KgC_QZe{0ek9Phd+V9iK zDm4%6*3z}!J8qRDkB0@e0PntUAMg1O50m!qqudgxn-;B1f>@i1^BJGe-!ui~0Bvg| zm;sw~9Dj8niIj}Z1SSF`BB*zyCffaM(`|761-ip2BF=$xjHLuxNO1VEOPqV%Ze|1Q zCUGoF*oTx@@s3hq#d4^L=v~*H7>iZ=-9T5>_7DUXGcdB8USIXm4Nm~1=u>Q(W(~<7 zr%K4J@NRBjt~oK}hq|?2Yi=E3n{6$YJhTXXg(a|pG{1&P)Cw~NEPm-)0DuL-6`A^y z+Yu?@ynT>F8#Kl|TTt3Th}=6iJB>dSv^YCEQ8zF zlDxo61pf1z57O-YR^0N4S!Yp;tbTCCW%*DC!I4DpDbaeX{F8Tf9yH<9 zktz?Kr24d>6Xf7?D%3rT$*>#mV+&#x&NEZ^lJ zg&M%H>+9~{J_C#AIe+9NYZuT>paJW| z^z6m+Ia!TXA!bD-r%?2VSNcw0QEd1KK*Uj0HbZu@dKp+-C|C0;lX-tFWaK7vqkz@T z-{dH}pOKV)f|ZOwH(`dZwcwzDfrq819=rN=`#x>*gbZGBYVyDK{3)-^(hUPD1j3-S z&sp#==o!)!qoR~mh41&wN1!OI6CmI;<(9eGZKfiC2o)pJ=+uii^Iz-09DaJ6DHE0@ zA(Tdo^_K0n4#v(5Eu7Dw83CN5apvNf1lkO4beZbsqA5KlpYU4>F(h=jIs!TfJ~o zX&L}^Ff@OgyV8AwH84VHy%2K|*|q%54bT9UP2N(XfSX!h48VqW0JN-FagMsp=RG6% zL(H4K%0!MbGtL0p(AOsp-Z6(8Od|0&l~obnY7TmHdF<2E^9k zvtnN-_+xsLwqXWmKGPZkZO>HndSaT`MO8A29@l~Q_8v)fCN%ZNCN3q zR65USbY|Vz|9-fW>dx<_UT&eNXdrX^6;+`SXGTn-Mg^o3I#SBAph+2|86(L#+F%l! zZ96s|?DkC*vjxqrz|DJL;lp1cojkG5NAb;xa~I}0eDnl$(@+-0H~xbg@v4cAI(W^{EA#Gc}|VMwb}3HD@j*78WAqw8sN04TO{^)SUmoCWJ0yr`iHm zq7X-s?L*I|eqaY!$BPQRYIicok@@Jg)_v#9srOO8BqhJ^>VGgrbEfg{OAEHR1k{*$5miAt@? zKy$Z4S5*PKv4G|Rt5l;7`pt9edM#{t2LQ3^-uX!v3dnQ*H5vdDdd397*{KA+z+05N z0NwL->Vb9FA;@!;xd7!d=*l$+dPo7TvWqEO60cP-^v*VWZ6X>?04VhS6x;VA(0P$LN8v!-lVd z7sYw|r#eyo(>!{Rjyl+lcb|U_HPOVDy1VB|)-VVqi)U9@I3MxAw7^dBj8lhATaDJ) zn54~gOR}2nh|8G(Jpc#G3i0d86J335d;);AHj;EFM<=$3nF&IN3$XUGnBfm55|r|I zfHi;X)E@69IDwASnR%?vs{dYq#m_%-P3Uux0u_&BZ-JD@0730*jakn>^Wt3*2*UwU zl&H!`a)qCwU3|_%@HF`t)O&p4NkyRXLM=+`Y?dBeAl8@odim+Kgt!8?<8E34IB!1L zLS`xWOp~?uKr{GK%Yr0=bB=(J#M1_EiHk2tsw9bo9h+d!cF+ddTrk*CaPg`0tbFMU z>v{~|?6`Pof#;5%q;A$9jCmtoIWadq#karhjof+g`T&eZfePDrqRIq~4jrbcqy44H z&o0NG%Vb{NhY5&!ISUB|O^oIj@aGpHION1<;jxeM-+u9zIMu>ZavVCo$RB>@5TgXH ztY}g}(nkLLx zHOr^lI#oMPnH7_o)mc?ReD~vw<#NtOwV5IDRpC57lz49=SyMfX24J||L3Ojhk_?$^DJKssWj8s*#GVVp!bE?(dIn|RQ1#*n&g0Z(?V;@9ipshJu0RGu+VT7~t^4an zZ(Ef*L(i&s=kUHjJE0)^DyJC+MM@m8x0}SEhD?7q(fJZg=Wdqj2r{+;PW2_Rn8wf z$}>+s&8bC2r?zus(eWD}J;tFk2|s|rjHfqK;AR}B8s&qJoaLb>8rpIPmneB|34ZN^ zk5HdEkN2KtWjV_cx{3Y@>hM6V$v^~75Y4henm`oil8q!&4YZaPWOY{Y##XYeL!L(~ z&k;)6q9CrJewSTxF;1L5I?s8);h7fL7L*l(DO}bC2+7+1cCM{}G)91RTtWs0Hr<$D z`|fqU7B(CMAmS_=5^}cXL13V;oYj+z4PcM&GAJh*sv<(C9_extkT9#rmYmII1iMoM zRESR)VBHgrUY!BZ>bir0Qt~>w4n-HqIf|65yP=BqDr)EK3UHQjHx9fwdF(%p+2_cwQfo6o`=+ZT<`|Cxq`4$8Hh*kPzqp%NX_=$h;3miFhfx*lUE-o(d{!bodcBo8m>ER2{(kOB6 zQe=6Yn48;5QejK4;OKK_`49i}Cm}}4nJI#E)OFrBz%~l+vh;qkE~eUE9|6S3g{nSl z#YI$=lnlq^NTycWjRl;%U;{vk9<2-crWlCX$Hi+Pk~n*2nPW?d14BnIDVu_Ds!6os zO#8nzbGm?S`Yo`GF&-L$0k-*%1bPDGafqxl)@Z{!04hS6nQ~c{VRb`a0}3cf&l3pZ zvd69k2bLX=GN}|?jh=t>|Q7$csdZHy?%6#2GGJ&4hMK| zu72HYBZ{HxM;@X#-Fyj-N=%Ddf~oI`Wb5Kk;0sTsHPq|OEfz<${m%SBu#>qe{9)r1 z$?NVs8B%5fIB|F%C_*54N0A~?Yl>|%uxk?%lr}b~E;88VIOyQWzx)4@nwNjoys)&y z)6X4cX?bO%mvSSn^AHiLa^u0G-WA6CPr617D0ff+P%11bSdvwcb(^W#V0Fxrz*3AX zXhKSF#Z}kIMQJ1hgj=QDe(U7*4}bn4{_wZ{6a7t_IKPr`{XTAXGe^c#eC|lWC!caW zdeSi}W*BbY&Wf~XFt46}{QvrKQrjR!55IYig&`1rB-@?ZOfljxU(+iNLgjnV|T#%S4pEDs2`* zg4KOvEzZB|U@ygli{PvEy?!>l1EAh#!-+f-$7C2gbtuDY*qd5AASY!(t=$ypx&ktJ ze8(CnCvU9LD{6R}3WSX$#wk$%zD=aj7`(?h zM~cbD1bMTyLrI2lQnj5TB@=cNpj}1b(}DDIqTpvX(+q}C*U&IwZbdK!*ETVK{1W5a zW*Hz11$KzAoRp zsbdO6bJt6`ZRNx^`;t}pU?;0!!8RjPYiE=~mqmpr0B3TG>6&ibj^O&lWLSfa;^R~i zRin(gH6#X&crg!cTcqIxx&!y(sFTguoOHIjGO^EfB53wGpbWH!K4Kq^bqpEagoN3g z5oQ!_&X$e8FOww4Wnkcn6PuoiV=Ri(WcZ3LGx*(etcU?{oJ0tTuzQBR13^Fe1bXD9 z@i6#iN7J?(J8_0H=PqvaQf|a`9x)~k9l5rV<9bGFYamhx^jH=50VT*P-+y%`o#)iu z+&_;2R#JpeP;MF0_+%J>B$VQ)n--j7yk!z6|?0>DZf8Ez0l-Wz*6(ID2OetkFi4B1S#Jh_ z-?Wz`6;YPx+Gx%h_%=2|Q6HT2;ptdBhFg0Lm+bp$@K!b|~K?7~k(Azek z2R!@Fe|BBhzR_{!!Uc|=I7Qn=Lh#@C4{pS(E>cPyK6adkzx0)jUJAwZ49lG~eWtoK zsuUutbTovXB8nG)F4CmwI;?Yf6_ z=2^5|?Sef0;0Jm33!h@&>%Ntp_q>+Dwr%X#a~ID&ahMCwJjz!-{eI3IdY({Qe+&K% z@8p~`_@bAU@)8xo7!T5zrSaJdaPpKnzU6Qj4O~$(2}**JtA52CTvKhM+`^Miedz_y zVJN2vz5+J}-&lJTv;Vc$aG!H$850AW3uDOl3sx*$v>F+>4 zDQCT~MIlVBuM*&fCjg!4AGw4pwj!Dl}{Mo<==|Sb{L65FL3m z=@~n_(Qd!FhgONDO8{#AD^Y2xf@UtH57KFi9MPH~bOGJ04)<5|nwf5Evs=P8d5!A2@U546KZhUf($VS^veQ zl-*1P!J`r&*7-gg%?jNcMLWGfp(WyRTXr!@KH~(uxwUGpGLr%h#s#O3FR^LsEZ#xW zDw}$qeT{Nd0GtT{MdKVSb&9n>kqjhWWNcX}!pieH1?v$TJ_4A8t^xn)DiXCz1SV>L zYpVU4RZyVW0^9koU48>p3CiS6EYw;=5!mEleC+5oFINjI_N!bvfBzQ?<7{DF%Tw)kAN8 zCq98Z_c`L}ry*WpF(8g`;BCzQ;NRwAD4|O9W&)$WrvOBfb$wNstvqhra`f@14UmTk z{zx9Xig?5oq_)kY0K0{Y6H?n)QKN~JUTF7+u=5~nokK4z8T%`ER#XYmBP0VuM}@j& zbSW~fjlmF-;9BSh$83z8XZH33x%y9-tg=(4Fx)_~&N9*)o&ex{=E|=E z|0Y_W!k~$;>BxRv=R}#D@a;{CcJY- zHPK`;l8|L^9e7Xl4j*e1*2zhNt;kxH zuc`l$R%w>b=711sa#l?~rV>ZgWM)Q#lHT??q!OS-oLPdZTFUBlg<{6DYp3V*@$=M& z53TRouY*v9BPULB=IjPncQ;}~!W89&(D2mVtzP5aWplrBXY=M<`rYlgg69 zz2K?aNJ9j56oo^tG;6-X@H-ANpUi73Ia6b3$n!FB899?k*$8NqqA@B4o~d1%DP}50 z?K0NXiq_3>0UsM`sqx$A*a3X{7ykXauKRikRpr##^EVQJS80svnj^Kvj zJpS~vJaOn*o;!AmbMu#&zqGK?OPqw?w3VjUrx`5}CO2G{H9xHNAM^dG51_CjNY{hNA4S(-K%!mK@s6|1UV0$~r8yC_RL@ z&LHFKAOXozm%b#NM*vZ#o;ubhzTq-fb;VR4;>y4ESm-TN|j$3W6;M*g??23x^If>u?i zlA^9<#VHa3QI!6c!0iW(GQ5%!nqXR6>Z}=1X5E%cB#J!$Ps=SXwZtU!W-8jEq+M#L zmmJ*v{q!Y38dKz6Wa}JSR74Fd_Z(D$3K+3|<^cno&6~>}enKt6ZQHKC+OO#}yLCIQG(RqB#c2+)o z4S>wGg@is~*L|$0OlX*$(qDuuZ)|O{B#MhIGH{f;wo(rZQnK9DtKm5ea}jBMBt-C2 zC7U)w^#_ll=U>?%x#MTfaP-6}_TRFbB3u_AtsC(oBc(*$G>qzowr)6f@-$C8^Bhk+ z{VZR6;z_>z_|qIdc9O-V^#$g>6d}$tcgJfuI~r%ea&`gC;Ig1FPsU;i^pY8ug$O+Y z%j25<_8xcdpJNal&o+t0-1-k1ZIe^0P!~~3XA^{K=<(Z!jAOKReIZZ^#lRy|1GE-a z#u3I9D~PcXT!LOf=^zX}p)aHvPd!r-dX7RHM4%}HQVCV*sC_bxm=+d3_fV$eWqV;U zTXyF_+lYuWMZrvK7S)MPjOI;8Gc|B>{>0PYe>bg277NP5Wf8pE2%v?0f9^IVVR3QH zSS?QxadnkL6UEHNYZrL}wdh&y+ zAt$?yzk4Ire4k(oN$sxHtC^RI}Fwon+iDIgvj-48zFhVJ`qny9AK+`nTbwjhxbJ>?F zWXlew?tddEMq`>h=Le-l31Xk&9xze^QIA|CNTf;yZ7QI0+;hu-{WG2mt@7+jLQ{a$ z(n_MF+x%Cf7Nzz=IbBe-LzZQUW+^i84Na4Xr9g6o=%7~GQKAt?g)*)R>HtLmH&@Y2 zm3WtEwWjp}s(|zgw6NC!*jH7Y`sl;Ng*=9ElLW^u={u_w^X(&d>Agkz+h?-<}{sL#uUm(WyZNq-EBHZ~}&POH0M981%RK|MJZK4gi=<=xh0C9u2>Gh z#U++6)s(Fu(UY1)97pH{r0)qLj7f&rt~^cUaY2bPwtG?Oq30<3o~BeJ*n$(n)&dUw z`~M1!jU^QW_oZyEreLmrnp^#vNN1k_47lG_?XsowE?Beqq(mCkl(Qu=8bR_@$umkL zqXSBSwzdi%K)tcHF0Ifk*QA*qtqPLtJPASwLccI|z&c63pW02e(_|PV%`{8Ui?6T* zAU0!M=02LltZnW=jQhB@s$Z)Ukk*U)B-r7fM)H(ASk~~)nRO0OW7>Qz=e;2Kg6|8K*lAZPH><8d6P>x5bO2>HN^fj;f z1&^Z|GA=5Zd`KJ~Qzry5=3m>=3Y1|CvsTviEeVaGH#&kE<> z^GVXm^*Iw=&p3Vl0#6=#j`n3z5x&}^Z6k|IODrrcb7B4xhmW4*nP;El@u#2VOHVw- zM0w!(Awz8PMBAkYUM zA16+YP3pH)Gt^m3C8ZdUxV)?=5K96atn>t_JX-ZAru)?ME3nd1HI7hKth9+?ge6h( zu8!+h6ooK$t$DZ%JWN-XER>0<0wj1AQ=;GZjL$_bJp6fkX;q4?w-Ut|38JREqRHgs zvw~aRCobPO^xRX(mV>MX0P4y-hkxoX(SPH+xcQI&c`i85%KR89D`Hzi+Y-c-X!GC! zM2Mq?(Q?Ze6AO~uJ&q`)7iPR?rAZ9)d0WEXnkI;W0M!hZ0#MJdF9U4j6M)6@EWf}= zyA_lD?M#6#L;X+M`u2a<{_p63?V8^?SsfRY0HQ1h=oi72yHns$^(jWTW%$va$9QspR1r7eU(e_%c+m8Yp3Wuep| zA>OEbMH`v*p3&z%Pda}FaUM;P=Im1xTW_N|Yd!)KgW0dz^uIdv+|!poMiaVxk>!v4 z8YiE7lC=8(s6bc04$8Ox5V7i0JBKfNwC%`>wxswJ0SArJE-9k~MfL$uvHNUrHsXN9 z5=I9v!JGnBvj>CyT>zTp^<|W7cmkmFCoJQPtNQ=2Mi)S#R5fr}_g)B;DYvNXSxk>U z&#n;@4KlBQ9NsVmUWC$Pj(h3aTX~ui(RIL61#z?~;zbNRV#@*&MM^Ylq$n$3jJ=_g z>OXI&-5+`%K*^r8mO(DZT8ynhW z-qpReYgOyk7#h}50;7SW+Uz-dafQW)KE7TnzOG|sH0IdJGhCd%#Ln$EB!RD|Xxf%j zXU_5DGgjdrK6--3o_>aBo;}Kig+=BU7Fk?ceno8dFBEfc`z{(+Sf?@-S+GoKB@*)H z-iqXEByq z0#hYuP^2uW`+-5-vaCuSBZXv1;<8|*4S2^~&oeF^QIt*D)Tbs=YoJV#+QA%f_`m*& zwf5IQ!Mn{L_}Y8E$n%nduX7z}qCWHpJogx!J4*TP{|COD!xtrOvf{~OiBa@`dg`$d zlL5!G6SJw;M(8Bn#}T%%d7nE>y}j$ zASXh|E`O6`K?J9(NdRit3_F3wv8Qv{OVCR;<7TpnU1dzJ4f}h|Ewc{!@AAQf4EUo1 z(uokonNk#)W~T@^W9 zq1ZBu^c{<_p~q@cHR_Hrp94`vf~V&kmqZD3%FZF2{pAmn#;*>mT+;(M$FoOIaB$zv zl*Pt-zrR{zWi)1fVSzK}E^zGR8J;@y9M2p+%9Do<^UQO{IC1727uOk3b-jk0o}+lv zcW`=S=$>Vb41kdnf(FnAyig_UMy5*Gv7_MI@0(*!2~RbVFRvs{<4pn8j9VZjmUD}| zNG*kPsG5F#SGfYrs7VPAu;M3*7`DJcQyF}~4FXdM3dy8!2d(!6Ef6OxpbUu*0hg(inOx8%v*nmm75>1AEyislxzpD zj?qd(XCEU8S`~b>duY!IWmUkaWhN;r;%G1og(fk!fkaAywD<~30JI)c8Db){lz#00r&H%^g9KI-sO~MyK>=m@l_-n4|sw4E#;WROG zOJfI&arUp+GK)-Cj8kpdJa)kZS=DIhMi^5FWA7Ou_z-Eg4=f}9<=XYMd}qrI(fKTB#sKmK0J?x%a@30|zRmOL(SHKD0Dqp>~M(Mq5tF zQ1@A6$HJ=i0hqHOGQnt}Ou7tXu~A__X;9|GMi4z`v)&QO#t`DDvQz3vV5pj(*Ivsy z38tR6fUgyS}Kvx?08dnwCKD-sjTK#e$m~tj!#V0^gm+4e)@|@1Au~N> zSVC&h6zKtrV)CF-K%C7G$BKalouPMI=V0Jq)=`Z>zWBm+ny=JY8P^;?b%y!H#TyC0 zHz}4^Mm+uO5x(^J(>(UnAs&75X$~DZ&bbToEG#XvyfUJ$*Ox#2atRqsvGv>j7|$(5 zg0yz;TyxtWrDXR`jW9LfbqD*r^`PSv@X_VS5g}+|B_p|utfSJZ0){mLDAoF^9yc*C5)T(e*iiumA6QNfl2Fs_56b-fBFoE4H83!uC(7$^|4-wXy`FGDbsyo;~24nSG=C zh;Z^7$yxx{*!I7C)p#&9K#f4h9ZhRk2qq~Bj--w(8Mt7e@|&*?8Gn(su>I^MDddu znz)wOb8H>LiQoP>ae1A63$F8MnuceO9cO<2)md$RGveaHBBxHD<@m`n96oxSrwsc1it5$6#X}UC+BzUN5_%iT2M#rtWq}tGY`!Of0N^#clEhx zo8z&u@|C5?NljD|D7^vMN=)#_Zb7D8+C`&Gs`E>t)(MytIo+eIayJc>M5~@QL6A<9 z0Bilzv5O2*-%F0Ayf26dO9I234kSrz>Is*A>KD*U7q0sJh&T@J-^ZW+6MvN3_wQxv z*3C3Aa_0Owe&_c-#Dfogf#avn5}R)X>{Z%lzrgqle*pKs3mf}(KyQcEi5R=>EoF9> z(q`QlpM<_RE@kl|bx^g?69b|yY9cm{=F%%J0jO!(5il?fx2%i~D3+~~sPyuxz574} zRnx^5JK(XXZT9&wlQ+kC$Wp`78(#ampBJets6+1IE5yoM_1>?*C8jbGHFne>wTpTz zs~wCWX2BW6KFE<4hCS{B9(wQl)@`jXm#CYTXP!OEnG5q9y_8o(v~4sczR`$_3yT~* zev&7id5%Y)c!tNGdWNSDALH4h$6q#LJm1XlGh5jF&OgEF)=`LUm=&HT!m<wwrX|?@Hd;Y8Fb!eKOb=WXVVGrq8-*oJYz0)h~U0CsS=}79ajSdQ+Pj-SP%vr|F_57PQkNzq zdMH8dYhwXGB{p|&$$9{4Vic{wK!s(^G!ijCX;4yFPheRCp&`p-LFt(43FA@_mrNvt z#0bwYV;G_s2B0-uW%e~lv{@QcoKj6!ECS0NANN%hF}55!di;hl&^m;wa_r=3o;iG! zFFy7p4}a+^JbCB{&mKL_rNt$hHqyqH^fDFZd8uQ|_x)*_9kPF#(SnK1`48e71Shb z8?|b_RhR-s`IbBC3bL*clGU!}1iL577#lBKat0(Efw2xmsJ)4??XKX|&-`<=zH*?k z_=11_r zRbtaptD_Pjs36IJ;Hc1eh$;{hb#_ygKnAL($p}2HCkBk`AOQdym4j{alMz7vo$G!M zs0hWH%Koy^KsOGU00yB>*0yqDXuy=|dLtl`&}*LiSXSDgYbFXE1|Xs0a!1_Zv%O@; zt?#tAA@l#&Bm`bpGvFFEt$;~mLus8e!=ZHwQ8kcCArKr>gFq}JLCjY>s4~K;|L(F2 zlNp^={3iY-*_%N~UfDGZC(ezDCr_>0T3;>^W8&O}i<~-rmXypI`i3#kmB-05XE}W2 zIL{qB#bZxC!=q0>!--R8IDPiQ#AN42d;c3p*mDr~#_!<#N=+dZl{yyUGEmdo+2>6M z`t04)XHnooml~d4aZI_A$~l7rq=woComkaRZuy(sccC`2Z1QX$pipSklmy#k{$y86U_EXdQU7d|11CXiqA{Y8}jkr{|(-_ zH&^{@?#C|_YiXg|W~X@klfT2>*S+iOzD9fGE6g@4EYLUVUvk8{VO&Rg2(_x22PKGj z#y~SxhE<@~wi)LY2%0@_G9y6h2<`e|z~DMa08|w>ZebH>YZp2Vp`QPnwImmBmqh?svJT>a?`>t(zMzGfE0=9D3eUI zp7*)cL>Z)2ncB6|tKImvRcxm3 zphX!0Q#oN6VVk$lW^dj7Zc&seT{R*KYVH4iR@NKk4r=RcGbDKCrr_zn`?sy5aRm{I zl1D!Ee(u;jWI00Lf8D*`&SfVArGO~6Y~9R5A9*jY|JHYZ&A-w4Q;Z+}UHt3*B(aSo z2@s?0bHFI&4!6r5IFe|yDANFE_t~5@8E6wK596kxS?ABgh9>~15>FnX+Rg(wK%k~*KXPjBh5}aMumuYwTVrfMfJCLc_7qRfKj-eOkhXEN1 zBdzJ>jCX`(;z3taS zTmY@ns^zX*hU~k2j+r5xTp9DZd03!8d_`rJrdb|X=!%(ETc)y#Uy(bHGkI=b6wraAHz2Cog&DdE(U z8f7$Um=j1UEJr4`$=&@lMyR4=+1t9xHDm0;L)#{Fon0q4J^?_fk32>1-ZxU`4Y8YY zcboPI)7OVJJbE>eAIN4o_EeK3z)<*W`#aLT96`Yq;YEx9pg#9W*!hFRnC$h$A>He1 zGGQWw#Gb#O6M#0e1WZ*hAwDqGz>|OdZ(Z?udENc@@K=B6 zU7To@qO1PCYX4VOX96#f9H?aep70-k{%6>C|F?h5*Nc}<8$K)w)Dg6%UKueGqp6di ziPnL2lCfcG62heu7M+bnN^BI16WaMpq;JSZxVpFw5&+N(X9yl7+Q!#NE~B04Pd9n) zT;}RNB&BvH43remH3>p~Oc$ayHc2jjlZ0h>k(;Oi-%=exV$KeujVf}sC@7j7q7H2m zUKPoeeup7w^Lz)_BB3GT5lTpcxR}+fa<9mh^nHOF%zc&Z@ioVAGj+euvMxf~Qs5>I zV4WdIcf4blCiDMp=3-pklAyuBf`t-GN3jB&Id|AfHGiNz^{N#&~ns`}9-839K zahfyd&U5_KSss7t5RW~5h~uZu@buxM96o;XMy0>b;kNH#_9y-pXI!7&jPk(28Mf`J z7z1CtqSTz$endT%HVc(Y5$H zmmW@4f>|3uZrYqKZvZ31M+51uISr| z&5`O1Udcj4lMAE^yNnb;>Xx!@X-YR~ zBiYQn+eEvzTR_!kb$|qg5f zg=Sf0@;r_Sn6XfyO^%T_Z||xH&G}2{Xnk8%FSj^x=3J(yHF*E(um)CDM&p_@=g#xR z$DZcX4?n^eAAgFcpLvd_j~u&E;lG3-Q!{M)tACGETc^48rXKqb_6Y@?Sc!aLAyKOb z3Km-+dkH_Y@L{ld}Y z;?bkHe2hBT`m^d@cW(3Vk{AsV5&_S<-~CZE0&X6wbk;$;#l$*zdGBN70)IDf^*e5G++Bmni-X1fjW5?&Tw3Dj$x~dqv`BB;t0MuZ zn}&~n`eFX_Z@iBOKlepWpTEF(TvOK#^*XmMZ^UXyD7fjb{v!r&em!^YYM36DoNXE& zyd=;hL>*H&7QqC`Cbz=N?}4oqsf3hJ5zs`DAH107^kBSzZg!8ZVuJ#5*`~ji^B=`P z(=jIron3A>8i<+wwaR+x)}#dISXO0IzoP%>aUT6^fBmY?&rLhFGX@sNLg9r$X`@!0 zS@L%GKbPrfo!iRV1R&+Y$jFX{?%I9u&ae5JstGr)iJcZQpvx^WGrN~nnO1XB?G84P z_a*09qQhSqM!+e^;`%4h*F^%5E}f$Ac42m{m+s^TZx?}LsP*P~m3B8Sm(~3}SFx)n zw#Z4eh@(|&H%(;=%C+Zjgcu+PvZJ9%{!RglAtFJ{ zwXh$!X$l_y%+~Z-TN*oED02I zo4(GgDlRsp)Zj%(N$^cq+}bF*&S>%IRza0{x;&w}8z|B!;S0ytpH_WcBml0hjTWJK zGwja3Tcw1w^fhtjjojlzP3p1GLURMwQ2#dMwL)W zn(cGOmJL>}$5;*7Z^&2yUs4sy)cLQl{E^>d{`0>_T3Y{E&sRlk(5D_@?)KLchf_2o zAhwJ&P8|M1p3U_IMye#|aL!Q&hr=kk^FtLLV7*E>)-zzuX*chsDVdDkx!gcYOhGL zwz;Y@4WP#{W}G=yg$*1(AUIr6o&cUwwAwd*5wAy?rgzFtRnoqt{R3aT+uCgPdLX|b; zTs|)y{~uXB2R(%o$FBa^zfoKl34qqi^pZ&)DytXUDg{I)f;(NY-EH>yJ5;((fVBh! z!v%Phk(yD36J50%mhNr7#x6j+h$k|}fHF=D^)LBNb*M1{Btp{=$1QyvJ{f8y7<^yy#w=4||H;@t25M=pNx(Odi4nay-+MHEpzzDF_vF*_bWO9SX^Ava7a)yaLg zm+am<=wNYcHumm*;MH3Gx(!I2B_0LBMakYNs+3;X zKjoPJsei@zy&t%yb9Mgwd6pVji9$=k$)%RzY{@_xQ`|#v2+p~TJCq4Sp=)A@Hypr< z!s&6OEj*{bf!b%AQ;8-iPHmU>|9|%0JI;=)y#Iec=giD)+th8za+5nYy@fy^Bq8tx zLP!q@q=)a9Z~Bkqn@S*skOl!lASATVLrf>8nc{APdzW0)wUV~)-ZnGmJikBAnY$}v zEXi8QlH=#~TIt@scjwN`ojFhYJfDw3VFFoFMH`=bC=3zszE~0I5JoD?2#W9Tc$5YJ zZefyQp-n#2irIY(g{l%$(4q)WqX$1=RFDuk7BQq0S>zB<4P-y1N`>EGULZ@eBl7?1 z3yd!@A~^M^YNP@ta2sH7#v>+&!jaB*NlU?YOI)Kyt69fxzLd#J-i}Uv7bNL1Ft_)9 zra$mTvg^Oe=*xeN{)SD=cY7c!$q5R9nXb~P8TQVWP#4Z$ozc&8Qfo;}VC4}U)(A#zP%(_#FeU^7+k48kj%W3# zW%olkWD$#(CaE_RuEhHibpgVaS(uYZh*aS&Gz0+)1k$BMqhON=i5HR**t7d#pM&pa z$3+03`2zcACowBpSRXk5Q-X7^%2E;^SHl1Wi;pEnO-su*i>!4l_O9YPY86w@5kLU& zeZ+NfGC}}{#9wRBclNO_ps}Q--2zuYeQX6I-3cZ?@_O2vzqYKPz9X^xwkw&q=VsRZ z-0w5=y+6WuyF<^nNli&2X$*rwtfeH75?O0Q#Q;Z{8CDI$5n+e?qsTHqsbj0UH! zG1G4wXsLoXq#`JeCXmDxD~X~z72vm~5%9-i1r}oFu8Ubb1mu6F8Ufcta<(9SZX?jo zX^?HVQAN}VsWl73CNXv{GEmJhT0&Hr#j=WTrFE;@afFFhoV zP6@Z)KTl@HICU(e+xKxEUr3T9jO^h-4H_N42X?jL&iy4~Qc8vU@80~~KU54xEd4wW z2txHZEkl1_l@Sf=sf*{a424%x6MkPV!C-YzT7|&$@>htD3uKb|5+?7hG!w#zUj$yr zlIJN8&Lyq!_3Ce`pX&9LFm9KG+B|sy8hP3u_C9M#cl=B73@&abFlQ5mf&^TG&>){L zSWzmgciqI^_xuL=-CsR!oPTB7TVVXXZ=n65H*-cNNMgf{X+kNhVqQ!{uwqHo;X3u|!>} z(u(Wm17}y3miTKNINNVk2R}>ARvZWFq9EIO{0t%Hzf?Q~&x= zb{?Vg2PnE-Hs7|Hq?Iz47c@tjOm&n`UUn~ACLO(oVId6^@qGt<6}sx_qVz5F4BR_Y za{bmmlRX=%3Da=XRabx4&r~tk)Z$$kz8_VvLRTW7(nlgV4PeRx#w9g~1tl8BHFNQ_ zN<|lzUt;{Yr~rVH;BIhm`#T$olqCZh-erLsd@?wHf9ngM)>ph-up=I{c&a9c_wG)*!L(^2Hdl6 zf@^QQm3z1Cd`yn_D42+_@mGJ5=e_Q?SOHwqhp*2SXaQ0y0SOFeLFgn@OMRr6RR%g9 zxw;_aqn;+v6HiM8CyNP464zEV^yf4Z|0)&R`a-XPiDi)lw2S+P?QP1942N8_RbXSomR4DMlCZ?gB^E`vv2#>@BV>F zijg!(xJQyWm&V4mkJ<)a2E$fK3=B#ieo@3WOD2RMY0nA9o!IHpaYG}*#2sE+@VePEN(Z3RK64*&y zmBou-36kGcHI?tOJoX7efUD;I(Li7Wfn*413{cyF$i|rfkg_a_pY@hhPv_OKYPi9v z7Vi9o-(}%z-#Co3b#O4(4d3_*v%CJ3(bXeNOymp?Wt_2T9oy`)KJJ%6RB393C#M0xht}qB2 z@Fu7_rM5@0l(_dMmvJn1P<;N%rU0Q4NpPtCJ!+crLb z$(Q)X)z@?5otr6+fNnM4b{9MT_BEXT`)}o*v2{@Pz}G0_%%-D^U?FHpsJN7m1po(F zuh(b$?s3NVPq68ZtBfUG1DwAe|b6@?-Jp1>4l{%!8FguwU$9=ST=3d<;k=l)94vKZ=5}U|@Ri97aT#C`&RCPF-1N zbhgKhKmAI|L%pV|Il@C|rK)=V=2w52zx>R9vTsV*H8soF@G$F6TFLi~JcB)xv+UkC z#jUs8izJ4%V=EY|H&{D5#LAHo#CtkL!I>x3xcl18+;QDQix6ciPNiFxh$eA*!sD#P zme90;X;nQ`NW*#c6e=_n8XCC*tdOf?*jlFUK02e-$4vkrE~f#^0OA89U12~xB(c0+ z#Oyv*f?hxb@`;4kDs`vC$B~kF{Us{&B$U+5d>6lvplegI3!X~%2m%1rKDNwpHp5Gp zg!39lK7o2#(BJmn==>jE^wz9ryzB>`!ymr(|MK`vXME>A#Gi(R6>Ek$bPcj0@?v4K){!QvNw+K_2VdB87aT5LPvyzKt2-) zePj?6IEiC?RX`vi7_b_^eOcfHB{w1KFI5)7e$iiNqF@aZz?!tdX$=^^_!4e^!yEC3 z444h$d#pSkoxFoXNv`(ZzlFd5h5y5E{rfw)ZEMQ@nHgFQOD(H&<{2wE^|Tdq`yQjv zvWB`1v-xF_^=a||UI<^TZ9BNHW43&P8FzP}OlzC|R5YRaZ zpvi*Z_&e6e?=)aLG7B?XwjJVmIXpWqK!5;R_DG9<@aeH{B$dP;u__>o&b}g&7zCz< zMHI4)zqjG1XTz9a1!`$86!-+>2*V$7JreCkD`N%u}Vc`5dA=AzYk+_RLJ_3N$6H%;?rYS%0>}T<{i$B2lomcSFFM591 z@54lEcQ)U|u6O+paRQAPJmhhE)lW`P zu`3cokyxCylr|x;HF`;%P6QGMP@<5R!rErSsub?|%|B+xYybE#oPXGDU3ESxidk_E z-?d-y0NZloHT>Nxex3{0H`#dd7{+G-?OfL7zW}*r^2GghE0a#74e^bZ`jaK|3-gjR88o{3C#H z90dT(?PcNit4R%{UY3j$g(x!h0xrx%rYeXF)syU4WtcjIkbdc}3jFC52U))T<&y0tv}6CdIE zANVJ38y@1)i5{2lXtUpCKtiV3B7yV7ViZzy#SJh$s9{%ZPUd0Fh^tA^ab(L(G`pFlO_sbv@ ztC0jq6qkC+gs5W^&P6T(#9oV&97XiVeT7;gjMgkg3B7&ehk2BTX2(qnKz*P7-up=8 zc~mY$Rl^9MEYbllRjd;a-D*0(Egnqi!U;|zb$g&)i%OFi5?D zIzyI6+H+$-l*|M`S+3B;gchkP&1!8Fq3#1ASE69mL?^G*Vw=WE9MvNz_a(AayeF(K zwYY-xV{f%KSW&_{RoPN<$IBC;4n0gBZtd{J(`wniq z?8{8=JoNH=?eu5i*FF_-E@CsIqM|jBC4oD^2g0AB0cu?76D!+<7I$WV#2|H8*ww|) z%pCSH9*P||0RVt>ro*_2B~mbjC^s=eH}=&-aG-;Je~{h_7he%+Ez`&rSQrH#0*E@$ z^BQX9u{tok{$!4HSynes$kDTak~cZMxm)q&^kHr17hmx*-t)Jw#{ly#P+&GB&e+2N zx8dT4KX7Lr_=fPj3R+F!DWee5Gai2~Gn=pF2Y&2TTzSK7hqRAc=Ir?A*Rk<${)2ma zimiq6ArfX;YYwEy4?7}%v|FC%>>A(8zWtLYyfDy}-@cizTz*~1C?B)Kw$>9a;3*&e z7sf_M+0lX9Cwc)mSF3>-GL-?QZB$_<-qX4Borna2tM5a%u>&;pkhI}LpgjWbU_;(gY9 z3WwkdMbwZ_T|qf}c!0ZqdDEZqzQ27vePFh9B(ij{MFv^?;HR?Rq`d@tC=ReHO+6bTB-RH)A1-T|jR;T6- z1G#yk6D3gSrp`0!49(Kg^g^MY27173e)YK2(BHSQq@QE3V)0@ISvn+GNk}9#6P<=k z-%0@Sg~x^(CgKoGB=Ip|QuBT9MmQDy-XOJs5=6hzXGx9 z;vG0v3Cu3(SZ0X8`^$Bx~5Id{_;)N2pqSUdi0W_FG*eD#~$cIRVbe}`i- zHq4o?{og$97hb_0;F4JnvwbpZaf6DZ#@2w~MJdIQNMKGa)dVd7qm)(< zQLK0p3{EXGnI*Hr=__H#!?urK!hLUk8-Dl0>|u)_E6xj!Khmp(5Xv9AZ`pLz65r7rOQL_ z>*4Owqu3S}+U(ptPH}>g=Ik2Z$5*eomhBIBA^#s{GBm`Qzx!%V{q>&*;LEd0zvIZ9 zC$lvQF=Rg6-Yq4N_{1A00}AzwstoW@Lr5ICjXd!h*1;rl?)R0L8HGN6$s#9zkI)V< zD)dAtWl@E%0`<8l%7aMcT?v0H67=f(5-58U6-0eVO;e%3)3KgR3`42#gc|gxl|3K+ zDEGYY!<0LZ1ic=dzYga817APJ#x{Tt9$~Ruddy8mHexq@Kl%Fa1uYR9Mx(`M*fhgi z15%QXG#P27O#7gpkjr9~rxLbn6Q(VOD@=v~<6!pwqbC-3Tm=BynL^raY}UexA@{+N zODq{BB9vaj4IrUXAR0IUQX=|4UMxi%R|&sUN_^l3D52FA_gTV6XBw&iR%GCKoQY$3Vh}MHRdQpfMU0B8M1925i@}Cin4=_1 z!VFSsEv5lyCQwe6?0NlL*!P*w;pdN#w_c)1ud%2EOhZK+Xb_zDh^sIF$36lOlVR3f zfUG_X);}$LzD-amA!{LIc=2EiUJSmKg$&;*V8~c_8z?v z;^Qg+pzPvz+)eAeCo`8rK&S-tYsFm=*#9EKA3?7^9aILQ=#R*NF%UvOth9=tl#+sG z&;zN19UeL2-S+uDoK8}94{a{GRMuEl#sbjfsq zz%`ft7mxqmmmc)b)s;;D_eVJSSN@1SK7kUn>lYD+kJ$k>JKyHsZ9AA>IPrKAuDkhm zKKq7@Bqq>@D=5fD+ZYZ1Jci_(mrXC+hu^2h*aP3ZL_hFm;}i7i$W zf6}-fe+HX^y~QL2oOautIo%p6!D4?gW@qF zls>Sm;v8ZHt3I+A9xD^f=qc3Bcm{gXnN5r_|WP zilZz`bJlXn1(8U2O)F5L0&1c3cw?xIuHZ;duapb4cYYX(Lqq<*_04}r4VdgYl0*Zh zrP{}(?Y#$u^)HVdux0V_vEiaqPvN=GdOF{{>biq&qkH9-IrW!*hdolGTXMpdw~vJt zMZw<5X=dk694G3%Coc-Fzv*_ax#2d-#{ixWYErLr*6;onC%@)r*$><~?V#hqS%y+W z5kYaIYAB<^Th;e1V*4^xXlga?tQ4X&;GBW5PHhFl~gBYr=*G^tvTG-*+*${MV-_@4R~% zPo}`C^YMY%4IjsttPxx!Tf>(miQrsGKKbxIJ88CX{XXgry<%xvtEr#*Bu2dilAoRgieIO3W)-OVR!%BZ>=HDEi*tlNf@yl; z2GpVEckaYtnx~+czYjfxx&N~^oxzX4^f^oyisI22lb{cD^%dk_SZVMJt*;mpwz!88cAMK{9OiazNeb-)|cwwJL{rCq0 z8e_QT`dhjDTQ@vb`u{FA{G4ZT)<66eW9wINeFx?y9GNi`Vo4*QtEob&Y0$lo<@jpJ zs+3MEsq-#EA)XQM2T4vzNDdNW6+nZ-E(o&RPL!OApFhm`o9Z)+3`Y1eW0^Bb9YSv! zm@}aQ;i@qr;}Pp96xu0B0+I=%32XvpC;M#w(>JpJlCK>ebKgT+mGua$K0o$BC9a8r zdWS~>vz{uV9`6*L+`M=q55v|x^O;=q8-K!8@BS#owtJyHiC@@@7QHY{rXIwG35aY6 zyYh6BRi~k?Rph5WEsVe#kJw5tF#}_<)>2}DK#WA9crj4ZF#K;|I3}%$2D;@={GL)1 zjyER|0QCe`Cvg9j53M(PpANd?FNup zr1lLgymSOoF@X;tRtZuYCI(UqHI%;fl-5wDhV`|;pE5k%VfrJVq`gQ&%NSRXxYVD!`uG$bv*8wFFfeBiu3BwK202o3 zg-5yNWyzj>6HL#{aq`;LkM!<5r1kThn{MCCl{ehN^vrUE$d3*)qa!@wO>e;dujkP= z@a3+jKhYyAGTKHFD|CDS@8v?|*auai8MFOj$t_*eP`1~Y1y|k6QiGFN5TOUuV~Nlc zoN_4)(EbpdKLpjj>b2lVghZ5{XaE`tbS3i6S2w%ylyyT#g_R~@qz26{OkI8@yZ`G` zbT7REJp^Fzp_v@;AEW1lOxl>>N@d{wQ5&#aqCNl>eW;}QhvEFQlh*MwpLrks^(XVy zPuxklYaX%{%mZjj;SHufjCD%k3UDQfNhsBYKC~DTW8(Q%#2UOx&}xY76*R$!p&utJ zJV0WpB?c0UdQXBQQ_F&SGAoNy7ZL%Pl_lV36?YT?fbnCFGjl!i9XC=x=@j~e@)ZjB z*xN#Is*!0@mM~RCX-jl~aMvhf8PKyqQ$R&YtflPsC>Jv9)1P+C4(+#o>(>G3m0*no zKIkr(3<5}0SURdN@n4Nje|ZrB;U*Ix_d>t#NSl@QoMShf8= z<}W&n+PafM0QMLV@IkF~o^88!GdX=|SdmB4_U@nLzrS`lJ02VR8`$WNzl@D{gZW3JuP;6MKCP7pQvydKyady=vXkis~Fd7}8Mo?}<=H z!}|!gSp_GJnlW)+U#LYKKpC^=l2Cq5O#EA;%$P6@kU3?zX=pZ}>7c)Lh8=JJXL^@@ z6}Mc0p9p5;?BLjKvZUf4KnF$yZv=H7j2(>jRrrN4_83M0)%n8){h$4#FX9z%ejy}NN_k~F|kE#$tfr?7)o5&N0Wf}p;}w9c$xs7OuSN9i;W|c7K`^D zrygSsK5LNIm3ov=pVctsks3oq`Is1`j4OK)D>kcBn7i>NT<7S5hL67h04VRfp6n?B ziLMe<{BmJ2JC>+ltpOBR4qW7-+3188VM|I40%{WvUeOYgQ%^>Wd3e;6hhZYZkN(Ju z*-L0Id7#%-`h8Dk43f#9^uM^r@o$$rUalZO2>aj(=V&2!SOblw8Jx?PzwCv)_x&Gx z$nB^rF!$d_`oyzPn;!1H`9I3kd+yz~lkt7Ww`8(&o^RiL2cP}&H<^4a`=|BHCv(zY z{V8kCJ%hap+xC^v6*5gojUja~Bh_5DlB+W64Bg19lg1l6^!dZR6)1Zbfj{+lX9sLk z=c-$_`yxqGXzJZi%rAe{GqgfiQ;KSa5mhX3*ynlu&Qp@P!sy|uP+dDpCQ3U7MH_)Vx7@nS z-@|v>aTx%x`)<~jp6%X{*jV23p{&)9LK6v#+|*J912%|MSXIQt>OkywiAadJG*onq zOW5Cs_3J>=6zw0~{`LBk*0P~)xUcK+CPeFVeMh%clD?wNMI^%jq^IDk8-<7O_^Y5{ z5Kw3si3Cs)`VQtw#g~pImImR%`0d~R^@9cg0QudwQn(k;*|X;{=l>Aqyl2uc$y|~ z_oq(7NMFo8LWWOt(xD^|mAF|L3+z+zmB^AC0JK0$zkM;>ucLG%{NEP_BZ3W}tky&i zrICmL6!13m_!9wNFr6ewGhuyZSYN>WwYPE4M?TKXS1vsYqh?F4?N#TY-eYWnR++qj zm%t>5NbIBw-g^?jYskLi%cu{P#juVYe3cnJ|6G3M6aPjsI?Q|bcDVJX36MIhXwfee z%912c5SJhxvXV%NY-tw-l$Oj0d6`3QsV9OHLyzU~cGxl>p9TMJSRGm5Fx&vDp{2=P43{Hkxph1AwPL{fS|J zN{3{Yg`Q`jgj^CjuAqqXp%9$E#D$<@2dsjCrB#7LvFiO*eIsQ4GH5J#pj#@P(o^3U@Z#e>qKPG z3h4T9k2Hu`zD6Lkdg_Gv@x%x{gr+J9fxS-zris8o8~*QV01BoE4P=p47D%lylM2H% z7;9QK3h2Hz`#=5(Zu;286g!riVQy*vQjpc>MSv4vL#f}1Me#8q3$EN48L;_@b6`!# z`qiOc@R0<_dyh^Z1dwVi&;0HG%S(Uvm$)DJ=)H5a$ANSuH9UzcDJVgN!U%;}D3!j8 zC$)?$hZ)c-NE#VslF%!PP`Z^+?ssL0HxeewtO+rf7YrU}LrcTi2)bydq*lNyEirWP zWFkx$L#9e5Vbn6S;j=Rf?FCQ&e`*E57jsP9bs57?dpXmEM~tFk=;NskWG;!N|8Vn^ z5TY^(#2D(JLe&H{tbnQop=N|$0V6qqZgK58+|1GK%Rcp~PYLwJS%Mm2uHB{Q(=dlC zhAvvN&J} z*l_YXwrzjtg`zvLpOqebqkc@)8Sif#9)*e3rfcKt>$!WfJ<+a># z@770pcODVTHf`eZ?|BPrHf`jNHtgBnBP~->EMfwPuNoXK6dLhHtVZXI;~i{VyN)Y>&u*TB{SJH!n;6g@C5F<}ND?9Sp3*tI z^%#?23r8squ_@LWat*kQT9(pJ6DU2_E6&DM*N0@vsD@xbgu;8UHcYOi1`OCnMry-% z-x-uNlrgQaAW9~LQECx}1XfQ?v1{+dQCC(uE&~8i^3B(hJoDvH7*u@RQ8SRe49NZf z2rNYh{Qp&tt>Uvz5K1wWF1S=HCQxY>LeUk9F4WIDi}vQDJL+`N<1Y+>a3-`%=xP7} zq0lacIfZ5lg$Idc|E18s2VVC}%lZ#?Pb__XsYFk;G?`d}c0jN60SIFg7M!Kwf+)p$ zfr}o0(Lnh^Q25B3A6t(_Tx-$@SnGJpNbKP! zk*1v3?{7rlawD!`JyBX3TAQWFKqI~I*gYG)Hu}wu3#cTko9y%3@0~* zRkJYpwhwdXhd)d?zUL^Nx#;*;oK+P&!v9svHLNURi)vw}bU;7$%10Pv{n|Ab|ZAn@!$~?yv1z^EK)0;pQUn)r% zD#N8}fEOg8((H0aI@CaOM_!bnX6ix$8HATSHZfS|a7hwmoN+;}*#^g-Gz8bMkQhpV zLJLw=3LlO;QBTJhnkX|?sY&z+7Wy~dv^=be2eIQc006gt3%&=E1S-nP@+DW9BUEbg z$XQ*D#Mm$*fYadM5?|dQ0Wg?EsBL#Hl+M;Y>8afJ-=9CK$FuR2_2fX`L8pM+8%hZg zvdcY_9bwI=AvFQDn_{biMnJ^!oxS`A{tpLM1l)tXUOn$cs)dd#neX->%_uOm3uV{^ zrUA9E;fzxcesHmUGVhD;7}tEvzSE{>=h?MqKV|92()8eeaExrvz6n0})o-$MeD5Q@ zE62wTE1kEQzr4S>Grywj-O zmjERa8UJ*#Jr+mxDoj9IAR+LTOpDTte6$QBSm^~I5&TEMH+78A_XQ(EDQ6EE z?1ZxCeV^dA_rD*1)ExhkbGTv?s*15G1RXqVngC&1OEpw7tBhZ)S<)#C=gexzroF^M zuE+q4sG|7kyLm9@zv@{};}_od7S3w6xCyv&U(W3d2Fe7IG~gp@LSigsA=JIlb&kXZ zs@V$g>`RZK42^u7hL6h2fXlEFGc$v;hRpkr-Fqb$gLMMV2g&6`u*TprOOhpMCZwq$ zQBO`uOYLCaCkR1YnED9YLWD73&zHWo++Wj!+Ho5IXm=9RZc|&;qKu=M;ex8_V1kMp zumgsXkw};k%V78=Hyk0dk_aFy9hmt7${LN+4#CMCfu%`8quIa!MG12Y1$|dgnk0;t zh_H9wapth0Zo*8k5-$9zgkL%HDuZ9@=&QK6Ix|bl{&3)JQ^w~m_T6?(b(m`BWFr}_ zbZB4c`3wv6ER3!?xK@G`1ziV?MiZ&k(axa>BK=Rb@rg<9-@fxGpsgRiEwnpacjIkb z`K_ByNWFrGW3`JedcsrE&qKglsY=IQBwxz#C8~V4~2OLTLjZ|J6>(2|-YjBG%&6FX{J(tXYf+%d%AQkPcb` zVdg(p00^(i{yQG@lN*~(*=NEUz^Qa6<1 zX=(UAn+Tn_LT75ojZzb(M*u=LfZ6MBI$B@a0e1Wb0QoG%EuX?Z@8^SvLWTb${BH&D zfr+UkWcyV+wE|Y@0BtLXg9=C3FCD1GcOAV6D9&64FbGqHW7nLcK4viy)8I9rggv2LZwU@uWD#3a+0qh!WvUvYXBe<2E(RAcTf(+^ z=qE$?uE*fWvxJTVY|yQ43=bdl_|#f54`p)*vJ4&z03OmNr)Sx^V>i8CpH{Q+Nbe2+ zx81dcPk-ra?B2Wok=~V~Zsw$uIP>q{#PIW;z;qwBZkva`A@wPRwKP=e5zzUCC?h_g zX=+;WlLHz^e24%%3)FQHKjw*JehS2_y%PbQKMtcDB9k?aW{Itz(D38QT;VB4beRLh-|xI3njM%RKbIB?&w_ zqKMeQ=Wi^cWo!XRsPt=KeHX2!yF4;}M?m~936$-+H9X^Yew~;8{7=vWzELV$r`z1y zRdO0gRwGqM?n+vTB`-W3uQZG$aSfZ8y;LKTkVSGux_ML@4{K1WkJejy~L+R;$rJgQ}1CfDpoK643977@T*c4%(|* zaKC@UGKPS!Z9||WVHeHp=?%V`yx^HQy-A& zYharUJQj-;17H9k^h>~m4ufLo16%gBneGF1i}wXq6|bJ2dJ=^mhTI|xkFrLg?+D0g9!m3uv>Kx{;x{;m#_HMe@UUQ62;G$KO z<#>(%jJiyX+VyndyU-{Gg zz@Pjk!@w5crZ#L@D4Fjmq%}wqS`HS9u!DhECsN zJq1u^2}!O8s+YD1FR#xfRz#zm#a zAOSod=d;!%mWeN4c})8MaGVDK`M!G?$qSMs4Uwbhps#OZmLHs8M4KuDF(5k#8cpjbS|Bv6ab_)qwQhOq^A$$V(5CM??HT&;gE#gUAT3(=cyYE z%{UvY8v60#A&$ZhvV39jM%4J5rBwmd2qRRFCoox68sXb!bGA*k(WD7pDb-*EY8ZM% z?sA}89D)Lz1kj6Vf{d+z@!iXG<{q8qoM-p=1QU}pY&vb@BMJm8OUGUJ-p_Tn+|KOW z{BhNmSWa8@BQIk8oBsl83E!H^=}$wmOemy5(+fBTnKv5c#^b3-B%tX z<37RpTakgxYb8rwNW=uha(|&pVhsJ*PDpJK;%Pvw;Ehm|nO5E$m~vAFCOFH{>RO0(~P5^V|ls z1}+mi31ku^gF719A)^|q2{RG`fI32MU_@cZCoVoVUn9qP0HE!0@_TNB^PU6{MQdcy z`Kv>i1cRu@kx&Zk z?R#$BF;9;=nXQu-ibz6|D8>r882X|t08{OQ_k*S0LK5gTz^++?drUj#pf)i*&Gub; zxaj<|7#?apV*9bs?ec}MT*kFG-u9?;{$~A2T=0Rna?*L{aAOzdHg_WnTO&B#P;Cx9 zcZFum%m9id8E9Pt-FRh=N^p!pPv6BGG!dbX(u)pX?;pOYF^lv1E|8e`*zZ?$c-5E~7>uz22Qv`0jNy?2D*U`e@OhK4x%XJ5_>fA9aYcBsZ~;J*vFcc!G9J93noO~JrWBte={ z$i&k!K}-n|a-%d=>8KBDuSWE<^K?z9Mx?%^KnJq(r6za>>KtWaNPJ*Uqy+0duJSL5 zi5Xd_EVxvVEPSDoC|XZwG-{MaDKwA;rD7>Wn3cd8F@%t7_z`1}E)=&vIPT$S*l`^I zpaq@#?qJpVPhzhEYOrR}`7iBwS7raD&jZr7Qoaf$7AwJZItg<#Zq_ls0IQz%3}%ng z4ExO7JQLH7JhhY>qSU?$MF}-qqu+P*HK!&i)?{42bsnF#xNv<9^97cefrY|RPZGv=9Xy+t z#t2;xz7Dl@YxtivK=$yg-R*JTwjE5*&OKrQKv_Dr+`E?;*^)al;{7!YuHdv*#lfZ z>)AeEurCvG5wbKTGogKwtI&)ATqX*}ka{SLhWARGug3&RW-84AL+Lz;F%+QyL<^;D zLZ!x(F1+s3&3^PAObTi4Re* zF_ekr@;fHU-3U+KID|Ur7mB2jLt5kbN(`7|TPf;gOJGG7@OX0XF(xIm!rhYv*WNM5 zWRcMrsnd0e#bXVq3B*GU!UEQHp@g>k-un)Ee0F#&%q9b}{^Vspe~(7Xi;{b`?OMnGGUsgHf3C&|B=rqm($k$+)r`p-~18N0$1*JFjdf`4p}__@f@Tkl-N~5 z23L1V4@cq+wwf&R0jWP0>F4VQRMmn730@h{lGli+ej!dI=|GJ%i>2p4>Z7C@k#9he zK+C|i^PDnlSPks_)YrNDZErp_c8d4SN4 z^6vpq>B)VlrgTCHCd~gQL_sFN`B2#rp>2XVQfUk%MEYTa_I==fuo9R8lN5&{v6A3a zN$P^DTl$IO@fhQwE_AGtiX&4c_a1Bz{7E%@|7{aS0V$Q15xD8*V@jlU+y?+Uc@xa; zCmC4_-s1@5KN8if9N}ka^|JJf2Udo}9~E#W%<>ZHH%kQRIp`(y>rgxYe99{>KPty@ z&;7S>_W2u`X*J$+IsUqg4+~hPeio zL#q`EC$<6xbH*m1iVdV#MzBl*w{D%~`rBri?PaVyWd*7wrG}BiLKKxEfRap#DwJu$ z46yZ%gV*zA?RsVA)R28kB1|V{9 z`a~}m?n5hq*F8t686PxDkdToYrKgaJQiu-@DQ1Jss&AbH9NXD-LwW6;-0`P>LcZ<( zV|)f5!n6_zi+#vA*&rgJ{Rpw_AJ%7!;KalJyQnC~b!o;Tg4F7?F1m=vz2@gR_XnRt zLZCYSR#~!trpw;kf=OxB5+sgt=L$nEfHRvu22 zl9o`5gbr;rWF~M2gzF%jvn*mmM4{1#@o!#sjK0ud$A19eyUg73c^d0~nO<~m_|Q|T z#&nmK|0};3gf^C{Zpk1>NQi-f3R)=jo@cUWD8KJ{aFhaot1r8ZO)q~j-8oM!gRE&V zS)tvJB z|HtSL{~*%^?6|KDxdrMJVriljsuVy|6x2`Z0=}tSL(?iw}Gb5%?Du z^Fm-qB(P!PNknj!Fi)iE&EkYW(kBZwi#L)CE$T4HcB(!3PBb7OxhI+bj zZ*5W1O_G4Ya$$zbdruaDxw$GP%c%7V)OlQ*Q09R{zSPK2J355a6R-s&V7w3X$5}#Q z4Y}G-p^%`}P^i+hhOP*CRl%(Z&8aTsmDe8m*LoOs+y?+q;`eOE&w;c6RWbzU1EM|p z_9dwQMduGXK;sgTxJmWiSk18xiur=X)foE07n1+YUmX=N)Xi64%d2ddK@|;%`E1xS zHQOh(3GKYV7LG&&9|ep|W@)V+VcSHTonPC=$<30-Z918gPfi&b9-`5-0ndWPcqqFt z-^-a@DA~Sifz8`?vv*#Q#u~;}ufjKLbgaWB2BU%@jQtrks0cO{@(dOPTEO++yyBq8 zC20e5?z00MN&!P_o0w)3-+qh>;GxVr&*bbJdme$gdsXG~>u%!O8*e$`o&UO@_z^C6 z%Nw{8xOE5UTn@GtGJ3B-80o4O-;R%NDkA-@R!V&w;QW``1Mx*me~&0mLgk}`sByV^g-6T2YEg?FZ!J?v8c@_ zUNKcWU~+SOPk2>EM;N{E0?zxnpX8JuelA&NF~C+2GYg(sCrp$|E*YcN(zDPii2_QY zq~Br*c7h-gpK181!jI;l3E@Mz9|7+`Oz7`>ND~X{pio?D11n&sk54Ve2{i%D3v%zt zOa>lG=fQiDB%zF;Mnox_b+Tq72)T)nOBq_ysikg{#Ry3wsMHuRi&D>-D1E}HWdEvb z(ay0Y;W_~V0Fdcz6#J(b8(PVHsW>Bp-hL&*F?e1%_sF$e6QS<|uXN%URRR;QfyJp1 znCwAWBOPsGPdk%x>%Gf<4856IcHX&%;Y}wqy`W@P@O8-N3c5MeY8l20y?z)MamG;g z7a$W_Cy&zU)Y&}Q=8h|8sM{XHjWmFZtwtII!4*oUv?L;l<R5;fp_iMNo}Q|2U`w^dlK`1m1=qYSMfua9A83S6GjZ9ukR1HH|gm;D?* znoL!hUszzzz6oaL7Z@8JI%c1}(C+Z{E3W0+x88ZoK6AOP{)ESI(TCp&qr-fCR|n=i zsSE{5YlYN7$M{(8heAU@(1^SiiNrNSl~%>970t?G+22My4G#3fz01FOrGcbZ zQd$!zDXkgYtb?7`#T&Ye*Fz#g zN#y>I0Ye$1(WIKGO8{f3FMyqgtO0Ak_u1?|3IPDX7ysp7c-6cAlD(bKO>2y$r1LfA zCrc!i&|maQ&46`|(k6H#!em?7e$y=nJ*H&kDcH0{Z@Pef z$_^lZ$`j$S0N|kJoM-2radz$5d&~lWUcb*Z*Wb$J-@b{t`4g{7V_FT)``9~K_rxb~ ztAmLxy}Je%W^1d$6?{wL^Z7(0Z_)~oT(#yH$zHJ9qiZnRS_NF2@UD?KcAW6;ut%)Im%7m%M?I*PocZY_N*)&(4cYc@!Sn~^-q(Z^#HTB7t4 zOIAxL#8W8bMravBT2a{oeIwL}GRQ`74mvm7bQs_0^4JLx07yBD-TFm(>sK>!+A8KE z4{iyI7vECgUwo~Gp^s8V9`1wy-Y6Xef}j9s>E*yQbl1Sp1?L^L z>4LG2GL10yf)_IN(f>SZ=kd{GyT=Kg;HK{S$hW<;N>qvuZW3 z|NYnS^FQ};4zB7E+8B*ygD0MUI#2x5-{l|w^miGbYxDPS|5yIwdr1fHd!IBu;TIkZMBBdcoUJ|`EY@0G4Aaals{mAvqf)`D|4 zVXnU?eWW3nrbTN|QX-k431AdcPiPG_!o_PmQmMmWKD+7dOZEoApMk?d5?3j;_*)mV4uhApf)i*%l?Uj04@$zlMxOLUo_)*ZtorLe=Gtk`_Pe^UPeJM-^N#{OsfM2s7g_)|{*Mp; zSD;`BrTyBH)HQp+XF6H@fk^1XVvpYn^b?RoA~kHN3`@N1yYc@O2F|O)s?Fov^>aVZ zjyL`JF>(G$t;YNR^=<6C=_|Zu;QV9eq<-HrG(;AY~ zPd{qM@vRSj81})C5lW{dH5gf0BX3xG-ccg7dpRA2qL$E;f}Za&*Po}%K$`}a8A{_Y zH9?0`%t#&Es^Jo$P)F_yaw%zyHj&iQD+;>(oPJ(VI;ESJbe*GL$X57_!M zFQVLXY^Zym{`99av-ui+;eR~`0M%+(8bW{G5JbTQ6+z>3AAeU7j0KXhz@1XYbN%XH zdL`pGUdl6`aN%L?ld9*QKYtzExRW(WkR30bqwYfhVU^gY4>$pc^6g91U#oI&5nBR< zItuk<>O%D=ax0V?0-BLnS;$1mtI@ldR3)0yFb$B<@UhM_oMxofQuKTD62OC~rG}wy zg<`IQ>Gm*dR?yA_jAEt0Nhs-Clq5BbHfn+0G5V=25)xrTi>R7Nm={G;OVRaAUHk3B zI-kpBCrkjK{e9TZJTlp)>@yf&83dp{^z6`z4md=wsK!yi@LH*!evBBMLf<4H34Y4M zf@2|p^r9ynwd3%;E}#74*D*Wz89;0;B; zq>*McS}oe$oMNGeaY~*SI3tiH^i=7To_4>a=N!57^jsK?aW-@nz}w z>Sfn*>6O^dHGRrMTzDmMZci*mXd_7n9vG}EuEq$!m%ed zP97+!wMeRHF-LjtT}YPTMr)8n@~6a5PXhnHOD$x{V8%a{>N*XR3|X8ADU?AQfYeam zJ4Lzm{=++$%VZ}`0N}gy$1j0SA2Zn_GajbUg-lcBMU62+v# zsuq0U7hZixN>rvXLgU>jCH%?)!=pqyoQH|1Zt_I}*zLc1iia5J$%-lO~k=F+mNka#(FE%p}k-!UY2E zV_)FTm%o%f?|lb)2soPMvX8#+UA*BpUWo^KzFHWCN(Vil^bb674_xIcy732^fu(HY z0Q~l^{3Q2W`$e)WeUQI+aP#+VVedcu3u77NHe}y*0&j`>u%DrF6ZgU^nZg8~T>ap? zT40e;FIZqn4I_5Y!PCLfBnbN8m86?P4vDFUim>sN_&7Nx^wfuGjHt}$71TSLHADZZ zFTMFcBa?s+kP>HGh1}5#u0a1nc>P?tlFhU*JbNRJlA0Gib zVFCaEX78Y`2EVU|?FKABMFxZjebGUwzaNPqY6GxOL;xu`6J$J_DvN(#fFD<60a_QH zgI%@ys2m>b9so_Y zM!h~nYErxd;|;2YX1z(4q^Jh$gGz$4HkSRNFtH9AG)O*PT_*Deugz4cni06;Lba8m~^BzBh@f|Rm9RRUKvrqM{4YtMdFn?RSLK0 z@Lg1iE31r)3LCVT`6o;I{Rd!)B0@WrKqyn`w-*x7>K$MbWh{a7M&Q&g-1jTL!ql7o z6u0Nt()YgS-`~d1{NOWK&~Se0Mkq+|l7uXuN~Bi@#qs35ad#4zx1)!!FWHL z)~{o3^EEVT4@6OXcboa>f8cN3!f4$_-GxBtQ>u^S+acR0P97vNdv0RFYvKd7Zd}(3 zb%9~gR8s>&lQ@2<0wlD1eY&J1X^XO=0T#bV8YcqQ(#KPCig%tYNf_O84>NChEyd^F z%IF!V&^W0@*Ld({_FFH-$h_?B`2a0z{-ete73E_0^&f$viw*-y|Owl6jcQz&z#Y zNB3Ad^St}VegxYF+XSbcn)R$%BaE!6(=nE=3Q$m#JsM*pU@fInybn&>Ye{J(miWt1 z;?EkC24;&MMe1=r2jeJ=qhIv#rlc`y7+qta2J>af*jU1ztGDvaH@^PRcHU+rPkafb z))1!yOh)MyohzB@L-Vu|%!Z9e?*u-IY-(nXd$#VN+wC3v%@4!w-L`}O`ofpFcgL|u z86V6fN!akGzt2Tie~y_`Pv!o*J1k7KBS6hixVVUWOVfv$@j{7xb_tojuUY^SfnYLN z69OKB;R4Mp3rLtxd5Uvkus zr#HTXfB&(cz)qL6S_Wr@B!zV=gfq?>W^7fBg>ILw%jxBPyn(cqP~a&LN|R8U(5KAB zVACd@exG)~A2bpKa`kv)F;+26&&pGptlE%KCZXK9ZgqkgpX0+n`;&`d{Xr}{m(P3Yx28>+d51kI74=C>FSh*6p&8Bb_H{?G5{ju(6{)0ch@ zb;rJp`|RgFn|J-uZ!(1^S4S#>m~f&7W9NhP-Ed$ofA}U*^I$174`8I(;O@&ld&t}A zU2`q&j?H9^I@7U2vk;y0NF2wjK!90XmvRkS0HF$?RDm1DlZyl6fRa=_xl{52w3kES zz+{VDFtL4%F_fuCjHB)ad`Qc zvLG=X=0`P*(GiiXm8#bOBM|99tH8jj(urS84p4e*ThS7lPk9Q|j?OsXQtRxvmv_J5 zh3vm+2la;F4A?C2gP(S0jWugCvX-UeI<&j<6j91d#Z%yzX}2+{C8^gaoC6;SDMUTa zD_!+;e3#~`gbk++GqlRmM^S|{hr!)5&O4v=1BbkLiAhM$dKS($1LwIYwTR+XE6{c@ zUvg_7PJPAC9_6$5=&(+&&+hSkbbHHR0l0h1eSGm7S1>(0zwFz7xR#ub5@l$>92^1zQ#3X>A9ul|slgBkxqw{aXZ7vNG3SBJgcc@tT7R&pCo`1~p z+afes0wy!iGmzTA^WQbXym;DLFqR3Y)r8uAf0bKb_(Jyl>s!&@v88XVY-nVJOFr~= zrh&pMX7@!>Qh`iL zYKu%0?7QR(%k~{S96R9x0G+v&G~Wvt%JC_R>3(4U@{4!?7g?h!eE=H^S}rO9TCY)A zFisA{iU2B@-91*dp;(=goqcSO?8VF^pLoSja^L5#pg9Dt4j3h=c}_WZh%?R|<)o9F z)S3yMew&5fJOu?wqSRBN@I8v6jVn7y5}5z6C9ScPwdl|8(LO4XB2jR z;_H0i`Ty&%G8eP%Jc{#wn533QRxD2+=t&qElAchXQs{s?`wV2&s-t$^9xc}CblJLd z53_U2=cHAYUcb-f*WAE2ue~uMoF7?}Bw_s<{)qFh{yekipUY);x3N3=G<*i6B+k+x z4H=XKFje^g$KH(hYY~56>f-b2cd0_xho7Y&v84e(B4K7$u29il(`6;3zh(u4wh6cxxzv$_YZ!er4qj{@1gXxdj*}2qtWveYQfaN z)WKvSC^GSj_YT`H0>IQI)Orh)pZ<3y-}LL0+iyEyKby~a33Ap3y2jJNlUgXngLn#Y z%!wnBk`e0>qX8peg7k12#cIR|g$TJ-a&Zi2LUV5if78uJe&Ub7PP_o1oS}RFCoz&i zSLsj9W5V>tVhLMK0Q<#0A%y=d8rlN`E`XrMw>TVQ@ca67=D;^$^aU?EX2<1=K419V z-{eCt{#EqeX-1mD(6B*kP#=MfXEiwI!Vyk8XNWcH>x`|=$cB}4#M4+|NQNDaQKh-c zuxi6FYfc;GKxWc$@e;H1QV^q+O>pHM&UPxbp!V0Ov#s@dJCn%*XW{YZojE8tvU`cM9b%`bfs-CJ%vny2cJ z?D^mKJf8QsjqGn17}aVLI<9t0VSPu}oO5{Z1I#L3x);CaZHM%?esUkZi!Y&RAQeNZ z5@>rRg8CwmV+Al2k7rQ+H-aQ4=opkGY2TwgLtZ3w2>c9SNi_|vA?qO#8mizNrSrHX zWjIMW)DU`|RB>lul}KZv6=P^M;Z9aUTf z6`N!gTw;RaYL-HI@u%P@wEYgpPDq%xpWZ_4?3BLq$kaUj(Hgb2!)OT(gIhxO1B*yt z2Q*aaVitWCgvijjKnl^T)1NGuDpFSb#E&BX^moTpsAJ;R>v{kG`e82k(VyTM|NEC& zarSClO_=UNwo+L&ma?K35dWoDT*NrmYJpd}kO*k8LSdDDQBqG+*0mt(!;Sy)VXl7L zJ8=&~tieDi8jvA4Z4ZM z1t>V^ir%~70#jP05^e0Gs zn=(mpg`?QLk8EY=NmpFn_y7VbC4&4k^!}>_!Y&;F1YDd+b^RLI{u1q^_>DEp#*Mgb z+m7k+sVlhs;*WCsCqKrTr#**f|HiMf{@E8{tdLkJZHQU=E*wLzAViu;IA#$cvrr;5 z8Y%tBE|yb(aEeHYN*c(AjD&H+I4GQ8<$z3Z=E&QtWLvu>}??(zLh&(0lj08o@Ax8HpaU%Konc8%{_rae47lPqKP|My!o zU-NV9X~WzdCCn>{3{zS|H)izVW|e_6PY6OfU5WIrRcU^S`wDZ`dH^9%XkvwOYl9dZ zm>>jG#%RAw+#X8LcrXFbhbn~^vQ#J%1uvX73fV-T&A<6-iaU;GnRaO=a%lFdLb5tyU-{ljR#}s1??Ou*k7^|C*5Cb22+uyL^$uBtQ7UnnK z#@K8Z#%h#6+nXTqD^Xl&P}$W1vcC@W@TfQBeUG+F3Q0ji>!8p;2-G3c1IGz59IS1E z-@1*7kNi8G>n?jpo`z)8;~4#^Ut%X+lA1sQVhpKNf@?XR-`y36>~t1hFqdM#&t z&vV%HtncBB=RccdD5c&qm;`JZP|f`u=z*Cpn7s2Y?)vJb+C{o?4xNO24rUv$@eRMvmY4j@a-Ee&t!>}4hrRnJ z*|`3sBihdH@%?<}OW)wuyYE}Jmcrp!_Tw*P)8GCzC5A2c<}g=~xH?IiP`X&wUjpU= zp^MVc#d~t(fSxdP2&6AXWN2$HmhT2S05zaMnOj2A6GqIG18IJ%#o3TJp|l<-XnW=4 z;W}$NF#g83v;E=^9vvc|SnV(?&IAm8`W_A^2>6v({}NrGS9lD9FFdI+h}c0mQ8fXM zUfnG?Wru(X9QV0bY!W5})~{K^d1r0pu6qtbJT)^(XY-w8-}7WB9AhGjz-$q~hba$& z7C39wQ)p(;*t(1PcfN=2rC-1o2jSeX39Ekf zwT#yc2?jBt!BRwO#30eO*!s*h44Z|Xf2hJVy=T@6YK2r>7}FHP+BBi}>CdCb!Box( z832HyGdGiDC1uwln&S7*(HU-HM`HHm!BuKsEXsUZjT+1Z0g#F%dmsmz3U)F__8R}8Pc1`wIkKzZ?$MOzKH*%poxO>C%h8cp zeze$*U3=KQ_sGl4ciwY9|NZqV=o}?pf$wI?*=KR?+x`i6;Rf!U26lLoo={4i6hlK` zDhX3r4Z`&jM)#D)63%z4i}U$uRM@M^sSwBUFjS;G!IT1Mgn{?L@2kxgm1C$teV74d zYiSL&dX2L(D86zvxBvZL;r1@KTB00)Q!*>g3PDKKxIe_@rPgfmgU^2w6E1*m`(Ehv zm7%7ohMkBUXiMQw2LuJnV`0ZFpe5uQWPXheW0eW;zkmHz{LUZz^+Rq&y{Fi+h3x51 z0*I|3C6iFa0mHOahN#5cgnqxEpF8r>fKMpIg4*De)l-N83uCFU%0a$)E938bD|0tq zg&rc=R`Z2FgFNnuVV@$!FrqXu6oX(iTvtuPG}$5*QaIZ)62O>45~YX%jAfOD`#$pV zLp;w%*G|v?0JO6U*?To+?ECQEAUTwK=P`{2Bqr*1E@28(iPgr?Gppw0VNn7Ui4>rD zo&GLZaTc`CJOxfa6Yf8LoR=>OC<+#KY-3@^(Q8kLt&^PoeH7>XC`mR%M{Ha{9BKpk zg=k;_BnAQD{`HU~%I*#y5XsB`&+Jh(N7$;eLc}lRoT0Ln#rk|BihcyoFc4TC z3_#Vl32Bo^LRmU`PAM}XbxJJ>C;p``{-J{g008Z)uV&*f{4`sA!jKQnzZ#nZSfK{> zB{OPi<&bw2NJHhJif)VpwX|x6;To7euQ^f9MRD_TtlCnniFz7Ov`=3&YyU-hk<_{wG1(C_yT{^n({Rlodmta|-xX#+QI z>B54cK}OSt1+fc^#GYvYXf6F~IpA?s5N!|wgfcFkgLd6RSeM1cY)--i0W8h<7p+jG zz?0Q2=QUyBKflbqZ~05Kv%ry=JmA03gEyf`aXe4(s^1lW_F(qH7yMvw zG%?`r=8j&e^m0SlSRCupH~|%k=wW@UmbW7d0;(;>>ItAKl%=4F1}a{l05*(_vTEhn zL4#V|wv{nJ6T?F88B-kuZUAizsk3OOV4(!aNHvWIR8kmWEVHCFa1$N6ANmLjU--J_(l1~b+otAi_IwjZj60r*#4{XQ%# z8H?m1m01++dED*-U7fM+Wk1I32mbwk43gcXBbW_O$6xScxOxr7TI7i#(E?DMHI&go zRaygy2nB+8P-DX4XGU1q+GlJ%%&deb{oC8P`sqLTh#uSVZ1W3kcJA5B{6d?N;i1Et z;Z{{Ir`3D?#09-9TuB;ii4dHVN7PkQb7tm3mY2r4tq$A4ENXn*~g>T)- zeShkr1uO5@67=pTIXb~&khv}Ka=>fd>#V=t&LGW~P z&x}+0Doo9HIC-=cf+q>%&VGrjSEA65EUBa{f7Ia>X?Vp}K9);krF)bxUTTw6QW&c=E#1?>T%?k{TOk`x8rHAj@DS z38VWanEC3LnEcvTa9i#|^M@b&rZ4?@%rnj-En;S9!uL~%P^e*4p<{y9KnDUCRV*n% z2@+K@8-Unipg$OoRxog8*K+^oKmV}zZ@KNn4gh+4@jJeRYV9rVv$B6xGPMC=pSO3FALT(Afmp@40)W;i)ID zV&w~eh^a3gJ9*R-&Gt=Av43isX0t(h7-CADZjY;PxRo1jyX&ZC|I%!7+F$=UYkuMd zT;BnILm%5SG)+Bt7li~HGSnC_4~sC*caCvYK>!GecoH6bjIT*psLJTJmd;2t9`L>* z0{(Ih3ven`lU*nbY#d1$E(>=5`9CoG`A?#L`7*JPX$z91lw|>~#3V^bD7-G_&8mn5 z2ddMXIaKH08Xji-=_7P=@UEm^lyqEzwHXUVkB;-ytqqQUiN_XV<&(uK037(KOREGA zW=n$vRT;E0x+N^+9%l_wLN8a6Ofb>mJ^sS;4jKSxzlZB~$eW{qOu!3ysb~*0_oStv za271At6N$Ll=B7cuY8$_uYHNVH{F1Cj$BSV+4MNp{n~G`!$WG6YN$VrShAGh{L>{s z#X=V~0J4Z%DMUdWf~OYo0&OFZS^O1OqQ?RHzp@iL0070!ZiO=y3dIlrByQgvI?{l# z1Q)OTOamye8-oHOXk)1hc7rh4fNzlRcen{ySOIH(@zreqv)3-$p4;qXyf4vW2Gygh z1%D_eCc~_}0Ke|>bk{!zsW*_!;2SLxu>nwN47DT)s%cf$A^~OHi{L;JhggrAl!k+z zw@lyLWA)lP_pLCT`If(8`pcIdQ*MP5$?o5_i)*gGl~XsId{}?Cd+&ZO{=!$dtHqKr>br(nQ$MO2{wy5_kUN-=cHVM}9&?Fssf9 zkJX^6IIp3ln5K~=M@bTc_aO+-D5V!BW)(e*?0@6NQ%NK^UFV_n23Lk)Vy>`EbYN^4 zyn#gJfYVrgipqm;yDIE2wFhe#RNJWZ*X9aOCoeFy8i@#{hkmKlZE!88o_YE~k4y6c z?G=a-`eLE&qjqjWW}#V!L@k+z;@Yd(ckxGQ-@H6dzcNWuR{h~$u{X@|MIs^?1dpl| z5hPN27GF3T?chjeCKU=bWJ2dM$RNzc?M?V4^Y&{@U;}1f*)G}^&$Em5JgO- zq812T%D9NRIM#$JSc}?l7^R2A8j1;n*_mTj)Ts?+obi8tiTmF1o@08PCyL#-a~EIu z+GRZD@#k~SnWtmz!Lb1CPM0gLy^*h6el2tJ%O6b2$!pp4-nY`Y@I3C`?#cIez}09Z zX&6VNa_~wo{6|SuwqNStLN-*!`z>V=3|L5*1n9<0tFl7F`zlWqO8LR@&&BJm;`yH= z{NI!)0bVGbrs1SvVfe0X-2b{ipuBe(%KZ?6loe+$X4|TW4gFv5!*fMQL~*4}a_A5Thf*0BRRs6Y&GlMnIf zgMi@yzptJL*&UczD9F`dN*@S;1ky$_7~Na9?hr;=Rms)iOAqY?ykWyo7;CQIyM^-l zZ_&Q_Rwi%x7CJq7$mi>DZ1g985zae3_yrRvr;Er;Sb<5Kcsd9jgysOJptOx)EGh$v z(Qy44B@;)EMXWGn3_}Z!@#BmS<%ACaw3tD+ej0n~FXDX2Krs$Kvw-ezAnS)A$~8h@ z0(26l0K(XY40HtCfF@XRiQr2?CLP*={wRz*>-(7f##fhTzqOCL4uz|stsloCXy>!2 zo5Yv19Q8mz4ANMG89k5m>{rmoL#sxTWTdI!Mn>_YE3pDKnF zg>D42B}<%kDAR{04=lKcHb8j2%i?%o5{ae4^F_DSgovzO6!mE?@m5U&Lt7!jDXGxR zVbAOSit#Ugnn5}6;o6c>MOK{y#)Phy0BI1KvDStXc^ShT6GV8O^B5zjQ&c2&%c|o# zv>+hOLTKcT;7TaHPUOwy2F@I-k!YC4G1e?;J%l!>=z(29cf*TtNhW7S7(>*9jFfPdCs`Cs*Q3Y0_w3xWm-qhX zXPEDF_|YH!e$Lr+I<02oJKno<&t7i0^)BB3iO+D=^|w-#%h%5ye)>~5{X_2{Y1FuT zyJumdq~U5L0`tZ(>@0I2S20K{S= zun%-lY7d<=!5G0g7i7n$HZO7hhi>~PCMW=H;2;RPEv2 zKCliU#j7WGC8qv7j7b6%Vg9Rlod?%NeH-Y!O%EIgDI z=}wiMFG}+kEcWDKsl$)Ik7d)!2I4&j1Ow5T$I3!AU)_m7t3oRG6w0$BD$Hr%_&=p- z8QVR{eQ*3TI=5bZgl8jWzY;`ebm_0fAVz~j2TE6>Vj{xdV^l#sUKH^z_WnI8QM`i# z&%>cu={z~maSkJv0wK?p#6uw#leFk*!EM`SdCI0yS`IV`=gXnge4x)Y7&I)BxX70x zfSRbJ>Z>(09=}Eqp#^-H`nhkq!=5>h)KbJO;;)Re^d)R-0yy_^ho%g;^+W%5Sm$C{ zOoXA=yq2jmPeFS=5(yDGBSKWv0&){tO_@d-VuivAnM6V$<7ujpi(|pUkeG1ISVOL! zl{HJ}(_g|*F9Ysmne9;#0H|x@c7B%Zl%FRr!-ZX%5WD(?F1=m*$WB>Ho|=F%jU3Hx zDIz9juz@WQa&aLv2VjNCBd#_|TFlEi``! zq(n?fk|+s|V)Bycd_grQ5OQkVdxJ;@l3_-khqo!B2Jb9x)jIOls?e*eWndGM#8TF3 z;d-xGT+Nag!9}@CL?}%)(M&8~#)V56p#A$2%3-B?u$DwzSp3xl<~9Sy*C}@{P+Olc zev;v$x4wbPzi}CUdS= zJQ)7AY5`P>EODf->cN(=kdP5du0bKtGnb^aYH&`eZ2#wfV&^A60`AC3kYwdKD2g#s z@rn&9UFtkZmIjCDtI~e}`57Y^D|qk2eWPK=MNG_W554Ta)C%tc=0XhCK}Vr2O5Fnp zrL`1ZnVRe~)a>!J4RuNl%!?qD-hu~{ge6e*0aZK=?^A-3YT^MEyos7BCDc>Pmf0S+ zjOU0Q8B`Svp2Qlg3GJ8C!?1y?t~;J1NZ*>`yROieF%G01OT9v6W1|v`cISlW)P|| zf=amQ{(tt~J6^Y}DD(f;T6^y|oN&+0`QFZ%rpZYOB48qjIU)v7aGW2`s5oYwF%77I zqGLjW=r{y12S73nG_jk`@#b{SJ?Tw*@3rdpN3H$7=eBg@cBHF6x6e6m+;LUaQ%^m` z$rb9E4#nnml)`uva5FTh5dvx|gw`hL2;%S_!fJ^>nz6Jw=ZtT79!GxtS5I3lM|DV8 z{0H3hw-NCO8A^>fQ`@q&J)*&;8U{RW?u7{6bl#m3i8yt*D5$t`|F4N2MVw2h$MQ+y zj*U!h-bhacih}4ficXv09nqVP=P1^+Tt@8`NduOAIxd!HrcywX16@0Hc`R`ztWj5z zu^LZh>hk6z2nY=4q(E`|K){?`Q|lkBsVz^m0PL|26E(>k7^KCx zS!gpgr(dGv;;4vh%yQPZVevzk^M%*_+G&*V6+|4~JG@O`d7lFQ47O9xV<5h?OGFLFjm5$wN2a?IYG0Oc=+VL(`SQF?A(T%TaRnEaoskh zb7+=BmR{>~LS_v9=pwlYr4wTE>o_|$;?~suRS4dq;((5a1-WK%)TVXNp(`Ua7kZmL zJ7>FOzQc)+U4Gh*=OcFa%x&Jpu0Q)dE_l_;I2dwnxVc77fU9x}pAprZU8&Z?9tEP< zxz(fvkz?mIhjCk92O&r3rAd5s3J?aE^sNO9&2yIZ7cR{P5TQ!F*3MbKc@%?Ny%@|N zT}>2W&ql{=FLLyMyqc45em&Y-ISofekd5czrZ!uhJnd(r{HBnIedAIvNa`F)js#N6 zpB2p^+B!sxFkHCTTULg^SAE+L(C(T5Pfvt>ODmKyqtuKlDx)9_MVXJ0&+VHhbHbkW zElYY7wE0i0q!Bg!v-pGnXf!rJ5F0VBKsb`RV@Xp-gbWh`>WCmBtU8Bx z*2)>Zy|0m<9$}e(P=!#lbzRP}pZaCup@R?fZhuL8*aQFop(^R@deoQnnp zTHws#H)0g(oUKFSzSD@{6T>I7DBee+OrR*lTM;k?1>y^IwMWP^y1REXa*kX)&N)J; zEC3J>-WeG&t#UzJa`e%DDzrrmCKM9lU|C56a5|^vFlZKWOrF#`L^DQ917hY_-RQaO z$(OOb?=UOZ-1iE0j~Gb1#im!ijPw5db(Fhz@`W1$qkT0~BTwx-wF2G*PMj)5a^f!C z6LrFV8ZGo1#;tw!x7CSTNiRKT^1i7iyT^3=y+AJoiIv!QPK!+fEK>kk>qHm65@5!2 zwiBwq{J-4xGe64c*3%V?TVKC8M|7k>*B-a)JN?Ag6w*BaVqH8iDaRzHJt)lIXpVn) zlG_viT)WFR|8a0*hnR$4Vp=atog_3v zl#VoIO=zVIeo+3|4GNUL+Ohb4jv)ZC4A`uYp>(vyK4U6h|i3YJ@bX14I#W6@rtoKuZ)` zQ`I}DGpdC?()HBq9A|vpvpDuo@1vZ*_ZiY7b{8hX%r`!V%l`JQ%zf1p`ONXa$s32T z#wc(dzt(WFoV*QN$-CtjTS&;hF}gq zan2itdhMX=t=^4?y;HF6wxb;Q@2_C-U)}|$YwG3Z^N{w27E zO~h44=^WcT!ott~67|iuKCnA}C-yMm0GO$csE=JvcjvcLilAyxdUUnj*a)0hq-wY6 z%yk){))FW}*}!!eY949@oDB;?p~&%vONx0oIR_hm?3MIi@k6I?+v6&(Ucftt6D4TC zVGbAZO$)ARd5cFg#fcK5;9MjVPfW|Fia}c{g3D}Cb}3)OdvtX~b@i=eGjn9?yAW&2 zOvhp_wZ!qvggVt>6(z%%{+be_7N)OYQkcq=T7+E1M22z34;ZL{r+JDJ>@a(D)TMvr zkX!N&^>)uQ-~0b~@6*4T;VrkGw&VJUnROrkSkC^_-(}OrSw6iC%U|rniUa9DE(}p> z#U?w}&BBnNd(knhZ&J?y?i3Q=gu!n=Yq4y-QT*$iQVK}5e@(T02Y~?2h6QCo88kCx z)937+hI%D%=r?|w@?YM0kEXlbwWVt^%MeYR7Nxn_QX*mSeoUnai9ovJG(D$Htt;KE zasHYV5>1?Xd;KQ9;lI9&Gr#lOxN#ZY`^kgsziuDxd=pZ1(BYjAB>MFG{+Vz3rI%9T z>7Yz?bEH?Z5(3fHw5v$r^0Wz#=wWvIE^a$|jLScCjEk?HXXjMltZmykbH@gzGV6GP zNF5y~7i(@ka-5Y>VE;*>CsR1rVs>_lI!*a=Vp7k&MrE~D)Cq%@XE6u75jp?;?e92^ z=jh8b5yz$<|0Vo4KbIrJB=i%7A~V&pG8%NRQOQs;psz|HntGv>IfP1d+;-Y)6c$`+ z4~#+{Eyz^|*{uf|edJ>g+)m#OdsqbkU@d|DL+`}z{&qs`Kq`#-YE$ZT7zl@tBU8KS zv^)c~Or$xN-ldViV@iNpYm7L;Qk!sV$?E5+}%PH-9&10wb`=2^@<4gU$>~q8WL!>P>T&fNgbaOU#fq%c8L$CRHboq2& z7Xq8k155|xlmuLpvS@lx(mkiD<}B2Bu7VJw1qWlGSM?Of?`PNXx3$v|#WKz-bk*T5}L`N+S^#z?)i|KVUOi#}+EC;Cs zX<|n~A=ZXZN{neZaHP}98RVt_x(dAio$onK=jh8c5!S!#)wI9s`RuP*Lr#ukl4~!4 zS2^b3yFei_e%(4vMSe)g)x?r+5Tgv^dr08icFvLC{e84Poq_7GUCGor%QB!Qx7(Dr#9Bucz%79CYXe^P=7-J9Li)I?Ua~eT%-M;V|;*y zE6@RkqE=gVLgpM%g_gpK2(b|MPI3?wY|Nb#Lx(`&;L|}Ro;v9{9Z^QN% zcj|x~ktVpd4jd6d0>%IoAm<-Ti)3wJ>lQzg*i@n8EGhC@LL7}hoOD?{q&5SQa}k9bcAFp9wl!$eeT zx>M7vTR%sfZZpc;l)lTzO%sdtlzxh0dXCu*>uJqQk>@#etnrzr)yhEvQ57d1#nkX> zQ3hI*O=n~q9F>QbgJb)S@}W2W>FGaT_rhjg{4%C~_$3@n(ODspN@_Y-TdT*W!lI@m zDQefIPYGZTVLA~}HI;})iHkLr+WuM?c|2smbiulV$2j&&|MNjx`)}-F7ytm(D&kxC z>2s+h0MYJdHOJ)eRJ{Qu0cNI9XSGPRA`oj>fCN>FG(#YIkB$Pxw6HSm*z%|gSbXO@ z(f;XMm^Ac_uwU%4+Pj-Xo&&0mRi6m}7;sKD_DV@o`YI@zs8CM{>L?*-d0Zl0* z7l^91IBACVJ%h!-V4bjGHfQ%szKbJ&{kO!^cx=B?Y{$2M8|VN1U(mkvd_K8UvV2RQ z;c-tpbjc*A7NKke;DG-EBGs2b}mj3XMIry6YK{$LH_qKIzVw3M;!`Vn@Gk$6l(%qOw zw>JEm6!@o37nh!!pjzJA|C~#G|B*O<9oj!(TbTOk{PYaZ_`#R*R{=8^;SZEns{ao~m=i~d1*$8~O2*Mg)LD5bcp)gXBoeZUb<-`De*LvnS6=VwcBiS2H!3<9;-*`)r&@$EjynPZlT4;rb{wEykWr)@ zR7{7)}fM9DA?K%0r|YlaQ^nM$?)8lg<;Xqhzadrb!b<1#>= z$bn_L2I@8ZfTs2@Vv1{J(iVu4CIe=|rVbRJzm7vc`NQ--@m@G>wf;2$z|;nu7^JO& z4e^PZZgv@8*Gg(zT=Uim2qMVQtD!$Xar^>qW`-yJ$oKQZfAxo4_VmZ|p?>5|H_dbP z@c}Ww#?}lj2n;G*SsOWd5UFcncIR1)KKDMT?|htd91V%>^<4O@OXw}Dp?10FWFM;F zsp^UtBDu@)nPXIra3QG?<{7SNQHaM0WVu7SHf&jPN1i*PGchAyWVE|&YmduFgTO#) zD|wV!Q~F3}rb9LZBM-9~^iB-;r(gc{({$eMy~+6(v*|T&WT~+Ara%#D+8PP!2si>l zM@++ih%%L~-)4!eBvlNs>!3k1eL^&Yv+}TdA@db^;n;et&+(uC6?hPJKirW$OalOr zYRN9x*~`}4*lye+7t&3yRUqq5A(^LATabdJ-d9!Yi%mbXT4<@KDoa|Mb5^E>y_a0f z$-jIXTAdy?G5}!}uOn!kM?{H|+ZEa90;I~LU6f5n@}x6a#3N~c)1If7+Uv#oR?@p1 zakWS3JlTc~)R_mTsRSX2Msw&+K$Ms=C2=+e*f~d?5CNNhCS*78mC|bw9GYSTWd|F;~FZKlmN2>^nmLkru%_wCv&wx#(~Hob!J8yScvI=7!s9 z`ZuhiOAaR;)V1)@)EtLm!X_<*A(r`D*LNbqlGx!*FrZc*C`p|FYyLICV3bMCzft*{ zf;7y>$^eakRN<`SM#UKZb%?mav1hBWW2NTsFZ_2-{>5vEt4pWx2+1kGXH#2Hjj40S z6#KLp(JKjQ!)AhnF{)(&b<+mR(W}>F{0g(PJmDvQg75i@-{GRKdLma0Bkwq{$fpi2 zGb&qjXwxCX*TSHzk-9c`I0_+{bg2(D^(~)ASMR!Se&44*#l_G07P=cdgnmu8n^DG^ z`Cdgk&rmf%o=Cts@G68*r!FByGKY8p4R~+Lg;^^nYh|=rIa%S!y!{)6z+gB+T~4je z)a|3&RLIJ-q3Af1Kq9xbI!gK9Ajh_-2lGJt4J4R!(W7RM@$;tY3)= zZ8aFbR_jzkuo9V=5@PNY&2-%I=bca`lBnWniK(H^&2>5X>R+I``ZP_QypQc+8vvj+ z5(3>#&!F;I!{5L>Ve&%W!Rm;5RMMK8vA{sq)TtqPt=JUTZ84~8La2zl3OXAgUs|F6 zxzC-(jkgxYZ0a&#NKbYhmH{k=LB7? z=r09U)@O9vaN*ZIld8RrlOOr$X*|XceJjq~%Q?UMzc~9BUq!WH9iKQk!d*L}KH`bm zrIi=N295`8;FYtU@3lTLImoC&F-Cb5hrys3rcY_DdDdAo<^=#GM8HIW?DIq>fkD&u zHzPvNN6G}F7MU>HcI;}w;2-{xTYl!{)VH4QKyNMde;(4^lmcQ+$W#i1Q%kUlX@A%j z+v}R7Uq#U)S3~~Hm0F3h z4%Bs^)QFFfb`51M5S5k&7_JZw-C)@MotVbJXa4oy`I;a49=vmiMvAVdRAo2{3>Bic zVBdnZ5?orY)qO1a$%Ml#vJf)9REoFK~hho;| ze{9W!tN!v|x#BHvKF#Owo|!-Me73#rk2$bmicqHx6A2)ZN+NBX1-*&%Hx#91XEwaEt`n?Si?Lius@Ud74R?4|aR_1^@tcpXhVk?1eM~ zFvt8HNnJk9K@|)cnCsGzL>;!Ejc-G-NH2;a2w^!gy*X!oJ#77|C$RJ_$PX6Tyy~!}3U5*oy?0;S7_AsU=2H>mW#LZ*x3VQn>&_B{Gy!Ob{lDlBarf za1K^$M)MW@8DVkCamf=N#fEQr5r^LS59sL5A<~Dt6=$BoId6Oo7yR;1GJn<{_AkT! z8+))1vbswr^OP>qNYS)X)>4TZ_t#syr#XF#=O)sxCDz}cLHgTKqfCh#EBCR6@ooxf zsWgf|Jy5Be9zmVJe}uN{_`W94c4V6HW3fC z;I=7|H9?Z3z`$eL7ak=ZyB2zl1FH5J+M>Sq$`5X$3{9iYh=}Mzm6*npW@3_C1WQ`y@KNYgNP0Ut#4~#iL*NBt|2l zl?g>#=npGKwWF>a&S!*J6SN|4<;2>YzVmi3`#d>&FcC4e$YPJTeCzj|#&dYj&7E;ByWjFxtWI?p4gv*|q~3*~!jwW8 zNQ6JL99w@%NW^A)tV-|AQ*zQ5hZm)baRCYdXC*aq$KGkj(og>qgKHl;XywB@0MH5- z2IQNcN$nZ{)2u1&d1r8X91d{pf>yh*7QmQV>tIZL!wKGr)ed`(%2iD5&0w9QecKWG z*IaX2H(%nA_PWOr$q`rJC8LlubR-Sx(Uj#YF2R?n9FP!zq}OZDyDZ^bNfua)@ox?~ zBaSMnUZ3{H4QMN;@>c%v*z2T08`1(xk+f@QmQa>nn!mWzJ% zC(*q-_{56R-w)+2HM;C+XDu6(8%@+DR}0Ln5!M*@R?afCrkxbu3M{nCSi{Yb2|2KF z>aBDhO%>33Q99{*7&XBl2K$$yv_!@ubV)6?fk?ZZab^dW-tYzv{n}3xj-M|5{yQ=Y z2Bf{g(txO`)ipKhMM)#BHQht@E>%H~Uq|dOq`xh6&GwwhlV1PZJn!dzl>Y1#A3Rp^ z;r%^MjsjEp6yA05;&IV3dnBP;QK?iZ>PU=G1V>pz9TXoTbyZT=ieElLIC{(7+OtDn z{46uu9>e)h*hOy{+PP!HY=Mff)DKjnVRB%mct=zuDxIQ5=6&KqSow{h*8L=o7(yz0 zr0yaY!AD9eLRXmE)nR76qpy+mnWGiqb&r28arD4-?)dXAWb2>2jm55KSf-gS0(o>q zVoFi~wK7;i;;Cp_mz<$vxg>G3CV_D<61$fWp)aOrX+l5_G9k=nY`XOX2Y>DtAF%zu zH}>!j006C5ajnfTeWu;+f+^ai47DLci$>yL0PPMg_bAr?uVYA5#=l2}+&MyTfZv+2 zxY2RWCyIQzl>y zn<+_{ir^(f`vYo)=`HIS<&(_LyQGRJ4Ww^afD`@w4M$32?! zU-#?m_~riwXYS4$)?1CiR$hDxbN zk&KFX@+b_J@78`^`~H7n)7ejD&!t;f89<&#inhlKjQTz5(SUZ|CI+RDoZRP$;ST}Z z-!dp!mQmI=fUIb@kRaq)Miom&^r>5se9kewbBfM7lg{1f9bJVtJn`$O79Ke54tK`c z?ET}naBQ|dfgQ4%ZO- zqzMXSELvhelcbugMFrMy2b`tpjoM#aUd44@jiV<^Ckkz0XqDo!}Av&s~O1U(mnsyxSI<}p$o3mc_eH81~ zaq?rIg7DxL+_*P3^Ry>&_FI0R-LLooR(EgbR)r%6O1ORi2Vq*;xXf7FsE()3)go&I znIr`ORsyRq{qiia|6#)9Z^!*u4>z}>0mDu{37^Dr&Ic-n^MMUW(2z8Y1ofg7| zE^Jpg{M*05p+EkgXm9C0K6-)<7f*etz1|%9=;VKX`QRjUHp0RU(4U9F2@L9rxK@|e zS3ir3{_;=R`1McXmSYtkJyvpJr3T4UCypmNn|hhYz^adUjC>YRD+NSTIn=7Ajb~7n zHqjG^qY_sM86KV9PBvO49KGr81xOnC!au#6K{dst&wVugVB%ow)_JC;TL_*(e@IYA z?sK9DAzFu!lML?+2?z>B(IRLh)PXt#qK=3~No##hXKR;Ya|!>|!ee9#y@X8-ybodaX8*Y8b2Le(QOn>9oviC3F$kY#fCvn3Z*VM3jLK)sRgrfn5 zZCbvKiclGRLoP|XA)zTlwV$XBcX3#@*Gd`RP=z{mzBO$AHWui%A&r0*Mp$2byD1S! zQ%GxqyX53Wsa-|%!PF@;2Qkvk99w3=efB!`zv?9nufCiwzo-9R+HldF{~hV=51Kg? z_PjWuIO|+C{oK#7{uMvOg3G!2P|fkxm_!Z==EO$v768%8E>#M;tb@nlq9c#i0u$;; z2S*i^5S6xOP=X7B4+3>fRaInL_u}Ud5SEVI?cLk|sgHBusuNuLf~TWhp&UWC?U-s8 zwC7rcTo@4vD79+h#W@J3$sr+7))ko-q6V^Q$Tt>rwsq-lY|)-|b}i+?4lnHg@E3XW zH@ujzdOD^s-aC^EFJ{~S`EwTAo=^w8lu6g$6VN2~6RFf%Ut@zkzEbdveTLw z4Z@NYXKH~)TDCU8%Gr#Q;A%8iwoYXndgV_Mj~#o!j>EmRuZ#eIfJ4YPKABRISar&X zohDX2YOFx#k^YEKLpIYTILkzx7%4^*G-SQdg07advx@fa7R4qOB`;lUbK#1!DglZ$2RinT9U9X@GX$RwL@(3dGO=iOxcNJ4B# zXnp4kxn&jlBemGxIdGmSHTL%~4Uef3&{84f>hR+55ps15G@^CD1+{LXs)Q)GAjA+U zs>m?ZNQh)1g;imM&L2dp!JTjC8k@iQ8b0>ccXRfIPh$6Z8<8AF5nSQuPGwBZ=1k9J zv}YaJlta2oJ}cz29zWyAW}&??V`fW>nH>dggQs%Bu&|*_+dNE{@Q?rH7x<6g_)YXQ z4P&}DmOtsK?0(}LIaw6w06Y=zlp&5Otj=F0WU8CG8P>+0wn8W^%)vLj7D)+SCB_>N zCj_TVIbk`6Ru-sT1TSozY7^h_URK`rw)=bcPm2k=E_-P7D}32j&^_zB8SeZJVjU4l zGtCkSMhJ*lH-N8|S_0Y5Eo7T#DQiVCi_$Bsb)-s}lq+k<`n11oivD??UA@Sm%brX; zow@*kNC&t6se}zLw59+Qo33lx22s;Q0GFJ6VmnXt0T`#`qH9TgiQJSXpJ`L?^bUz@8J zcD?*1^q%}^wgT4zivyTG5m`Mk#P=Zda=g!=FrI~1$i-^+4P1{)4k-N|bOPl!j}23t z+dMubk)%>aJVeqQ3{9`G6WEO6T@jcOPBAM9z_h@LbWe3M&(0iHKK>Do{o!w+$4}q9 zj61US)D+Kp>XUil3%-fDb?aHTVTvrvxb@b9T=~VTdH?%A!1XuYOnC}Pt=XA7SpxHd z?Qi%qRvvo^tAjuul}c^WWs&#|5Z6YyR4R0_Uf8m#sa5FIk#bN&9moR4wF%HF6vht%OTlk-$8x-$06MF^<>*)pU8{<*RQbil5H#j^8-+)L}$rN%|u94 zL(=GQMxIkK3E>z$>oZ{!(5vBdfBt{@*lS-;cz_i;r=I;hw!HQ?ITRop1S(qu!PzyK zt6kTX+K{t8%KF6W8=Z2#?e(T?YJz9Y8k#5KcP){gQwk5IuT3Y#3C_3KTRCof&NmQ` zKk%xD_r|`00swGRXHh)nHyFh!EY~smr=%QJB_NFh43U@#tvy?jxek>BY9$2oMvVXp z6|gv@c!p9xr^EJXsDAU0IQ9qs=d^FX`1Q!v=ixd}8Mh~9B1xDd-M}g9ZrFR0?&?N@ z*Q3^EXEP41rO9Uy6Us4`coCq6x=()oZl)i5F2fECVnm{*03x+iHBMTRL-OG3G#n_c zv9+6AW~vDmP$72{k5WsTzb#moL*fC9M;T*WgYjo7s8?u@V2d)nE2F(7XS+#v5rLKK z_Otp2f5hs?KSg=u_~||+_t@N2m(B&}l7H`avGK*vqwCBe-vA6(BJp@dc{0bZDD6_| zdBKaP9qqV`oMrstB10TL9o7aB@G|**Z2-`;H&rqT4$Kv5`aMMl4&`3%~K%tP{D@*B%`P z{_xNLlGpzJpR&5L7PfBo{Y#rW>pXV8`LEb7LJ^=0(VUBz;873zS*lWq(8H!05|ye7 z5ItE?vO&$Tj`%9lE^De$MKuce{)oB`&>O+1!j+L&)=&>=5eOfCEA_t5+{+^*?0(`` z^Y~Z(Am@I~qe;T(c5jfhh@9Q=8I$r)Gw7=Wto9?H`1LpN`FFg7(7W?tMfZnIzvx?; z`L&;8p|;M^JV362N*NFLfduB)Yjv$7LufCil19$-8XjWpW)c;;Ks*(HM)BGZx+`+k8JOcQHG2x^(=>!EsqEP zjoI~BR;||S;Mbr7wLf(b8$Idp!ZkGT6P7bx(dA-RFD_8=n0%wmCQo z90!gB;H1Ja;A9EI8ZQMwOk1y&KBqp#w<#fs`ZRX?0HCpPzU$UH{Zml@3j$8fhMNwJ z2EDQGQ1V0s1STqf8rU%H+3sQS-G9fSx4!{hzH2qRFUN{jhu8nsZ}9SOc?Mu5iI{?1 zf$_Cd2P4ippN)^@NZ}WL_l>;f_x{v^(YrDcHox-!BmB_!v)YGj5D{0?ONt7O^Y0?` zfl8EI)4fp#vfvoh&>q%kSu=_-6+nk0s?`y-Q~YX4-5=r02xTA)OVEHDjF4;IhaSF) zc>K$2eRV~P4dvYeS8c?EMX`vH#DYXdPyw~kZ_vkJ=ziRz}dOQOT&3$fW1&gUk5e52z)?=0I*%@vWw7##AN*K}`g~dOVOl5e=K|Pv0Zs*roZ7 zNc7%H3RQ^|N}Q?5Hs;K1DOj2nWE!59j>k>Fo$AW)12Y#4w)R=@Xt8B#BCC%6QdE z6(Wv)z`hob`AA%;8$-m4RIAG-qUj0_4Kvn5!cXN|gU zsc*R9v~Rz-S^UPYAx=FV&GR)7`PvuB_@@N|IvH}Jsm7lMSxXWZG9q9Dzq@h9tqD?R z*r&MoOr|c|$%->alS!mX1j6R#%GPuSShTHU_&Ha`9D&+YmJ%u3RL1IHZDtjiZaMb3 z6{g4CSn!h@x~bXKl1OW6_3@|yCp9t+vO#HWEC^d1?T*2p0GDD@z&x-YSUS8&`?~Al z=GzFzj=<_NakWQS9S~wfRFEL}Zijs1dUWGD*s>8WyO{cd^VpC%HUlTq=QZCmH-Ocp z8oyN0Ulv-c1=UE2-lnxR)*Lf~+07=7<&Le%`m03Z3{A}^wQx>eXD=L=2u%KN&114A z_^=W{;}W{gVDDOmI#g`v=FAmv{N3+j^-Zru%lqDWY~Q|%Yd`*X%zB{*2--Bc;-cc3 zaYiz^-lOWc%-5KTy|Ond+41=2v3xfMPHpkVS*GdyYw0g+kpZtu-|`NWle6E3Bl?$iW72 zj7u?%oGpn^IQim;;#?rc%IXDOPc|jgGlERlOiu~Dt|u149IzY9=o&$ryTiW@n8R-h zK1Tq0AXUZGim$0Lk69Q{NlS(Z^Had|(V~hURoQ z%l<2oME;vh1{{KQ3#7np&;$!k*xH6-|1l1{=4YsG{MO;(Cu#{wJ z8y(CXoAl|v1P@@cNGo=4J5)SD4f!zkb@WiAT65Yf6u%RoICF;yvbtyH5(tE{O;HDy`Cu*8jOs3DdWelU-G z;qM4Xu0_j-F6?|yta!%Pu;Djga*%2tzlCfM#d;mqGro_Zh z#Y_!h%(O{V;<&ulOgqd(7^a^C2VK{o0~x)RWBU{wefEo~Z@T4yIVh*mzCzjnZ)d@l zYQOEtgnAMgj!kUV^LT`AiXvTqgtRlVse%xiF{yT8MG>54=+cPp&Ya$iW8JP<+`f|x zuKnU^-vx~;Xx(RK(-RnJ8W@zI`=*V>fB_lzdTUxRW5X4IF=5ar=PfCCu*rR?p}$E` z#d?PI%>gsE~CKMylk_q&i$of3l%4p z0{ch8Ezxl#z`hy|2H3A~p!pZ!K!5`^92mgiMP+qS7%W7V<|{a;_JV^I<+jA)tD z(%c4-I7cl8T&Oluyy?)1g<&oQMKL+%rYO=lxLhpcYkq19lIldTtaKvqcmACvE$A0f;3CH)JhO7DR*}whk-*W!u85Ty0 zdMjA$$+Rn35R!9&sm!+}2!Qw2GSOpsy>>W-# z+1B%5xJ)eja1Yu8A8IDk>)7yR2Xd`6+P9ysm*|C_x7ik>EMrR>>bJay)p!5x19W6guYF|( z0D$(7;Mz0j^tqrm+ICV07o^D~lY_*0w2m;W$l5JJk&UHLeKfLMk!4hsP!B6M@6I`t z!|vyOH4A_CHaN{~fdJGesp}JTW-pyntrie+n> zW(nBlSa8zxw}+IChIB(mg^*B=LZEb}?xnWQTc?2~eik4&2p$}cOfyCyBMcn67zy){ zI4=~(gm4V>Xh}R;qDO1mM*{Vcn$D2`ha>T*!cm0-B^<2?#{zo1LQXhj*`cL}SO7Wd zJuT;w0-`aIqf7=(1tvwPWNe(^?byo;>Pb!KQkaMl!A=l@nZ5o9bvHG2~(z{iP~#Clh0vgjb>+lJ!TCWgQET~h(i?XVQGPV=qWldZr zXcdJt^FVMC3D}I6=wK+0T%hHIfk5FLrT36ITu0cxRCD0vFNd`sX|7ScuJO_jR#8gX_tD|Jy4ew?U`xnJY~93D2N1abGPf0C2S~Y?G(sh^K^XcOx=k^&gy;&9FrB&Q&!3?3@bk-*SX3^R1-(V?dr zdSV}3&!Izy8#M}wE|D;^@i~x`ks8&)9F0mdMwvC<%EU-QLNclp+@!e0o=ZO`LF&Y8 z+Flb3Se+i3Wcf}Sr9eXfTDnI1S9I+BhjzI_%8nF6A$;!GIRk@t{~hyh_*wM8=^c7? zS2i=do`3j@*KlwEgHfcN3+>#E+5RG9X|KBo0FpITqI8K=*s*!m^PbM{{lS~=v=mS; zEwk>~-^xninL-)5G!Nl+){c=;jC9pAl1Ltfn1%y|sE}o3IEJH=EY-9%DuhU$1jkwl zrN~97Dx1-01H1~_Th7LptB6*_{?a`?qz_tazxZ3&{>ImFbW@wrvJLDRh@*7E$eEP2 zBuVxbCOYMn#B&k5N0Km;tO>G>ZnRbw8^DO6xf4d>5=qq10C7U@9LwIa2K9~|Z8-5; zzeoA0D;|I&bKlrkZU6vixqu+#8!jQLIZ783Ax>g~;ZmlG7&$HmBt+`ECU3V#Q|fB!brB)QCFs}QW1xnu>P9kDG@4_Dh*LKXL^7a^ z01mH?I)y(LxLB~!3{+A~eX+)Y%6NjBAgOGT1~G{n2Q)ZIL6wdb7SnWw)TLqj?r6#s z&GEHbOretIPuO$KYceL%y;V9Nwc5XeR35=3e{JZUWu)!&J%R-o3bb|dIWaAs4FaU9 zv|Em8C(K@Z9mn4Izv=z+n;sC@`PD!56FlzmkK*K_;vGzP9GP9_X%(6 zlY5?kT&O^)MiSeQ<$Utm+qmI|8}D#iv>Y+@n8%at+0IImmh%Xe2pzFwT_{x1#JM`a zKplA<({&lFq!MaE3}n$ksA)$-+DfNLRikC3REPMC=pcrYnB^3CoBW0kQeXWq)CaF5 zEIOTUZ#HTb@*BQY9@IFo3omZ~AH(eABQYV*rP7>U*W4FMD~ zai-Xu&QFm_0TCsLQivmnGLo3ej1lKd$#mqQa*j}VG;_EdwqAJ)M}Phon@r_lVPBCX zz_sRDJLp{WWAr;uOb$N7QK8c0K$9#ry3{Hd4gQn4DLPv>GHQA9X!Y)p>b7pQioYml z=D95v*24uSORjsu)6mm6EpV+#wh_PW1=KT71+5`QT+9&{!9|OTUBj1dN(GJRkHKkC z(x9Yq&W_DAj|LIP%n?HkVgfXx(Z+@|&qNu0(f5wIpWSaU@?X@pLU0E9EiTW6IBN+xr?>r-xQ zdu`O_IHaJ6wcduwV`vI)Qt!SKV}-9lT%0zqH%NDcy>`;$KY6`%H~i=G+_e0GwK17; z->I)@uT=$1@45YF4S{KG2v9QCaj3+&k~2B@MP=z9-^TEt-i)q3FvDIt)6?8=&4*c8 zfze8!)e3CeSQvT~O|!i@e2rfRJJt;lH0D>Vd0oSqtpxf3MoyU)Vc+};7hd-CJKd(~ zm%Nhp%YT>?r9w>ivorZw0HV^1(5=lmmFno$N;w!(sSv7~7NGqhS!LXvS}SxQR3%6V zXs-{FS(F+orlmx$_*X^;t|1(`>ES~H6ruIJuVw3R{1&|#VPO$8*nKh4vk3H}ov^8B z0o$w!%Tm*gY39ZF=C}KJEdkk&?}ZewrLv(V1~p!kknZoyJ663?-= z4d2J;%B${&gL3~_cGoX?cv~DDVs!8W$muz;;DPQgu6U9&>ASJ?RYg2hb*TYl}=IQp|cd!Ouw*7NAXN6Dq8>O74s6yO5t z3D;B4b)$(A zm6BeU-dkcrT*D}&PAz|u`*uEh8IwYq|j=z3bjZ|${6Xf zm%eVKNu4oq9hzSMkxQEX+DMnmShIX2&9EMG{w+8qm>8z#z!uR=CLIdx^cg-X5XnRt zVDm7fu;bI}L-pQ|a^ih|gWmWN?#q$AcV#bn!Sk607HSyOrA2|H)0I5?xWt9sf{FSu zE(xsVNsN>KG3UhQ9F&--(e8~itXnt7($eA`ZqwlNpFtC>m9M$}GF6sDEDKG+Y=wfF zNKNiNSshtLAX>}Wlyme#MXgH9d0MpS#R0U1>7s@7282)iJHs13OE`E{66bn&nA^OW zvtR#O`cHWj$NHf2O2H(sZvyC2g>{qop%hqX zTHS?6sa7v^-nOSesiT5(3}YN?1(Z?2DVaEGH4Z>bIB}CH7MPPaw(z8VvOx=CLJ}(T zp$sr5TVWj!E~!N#sUC@pllxqu*F<7zzcaN~QiJ^0Sey**XjLl?_oYmkP-bBwK^(yuYC9AQ;`%P5a5+6OosRr)&8`3IHy?nb zb>G_~0RW(J$Z-F^kZt)oG~_r;>)v4HCRY9|`L9G3cwI8Jxx7IXZXK=f9Q zn|Fjyt*~|5I#%0o{@eeYtDf@A`>c*suY4Zd+wi--pL$qRC;ZoLE0=DHL~=~RC03$N z1V2bowz(q}4=#rojG^BYS<|>vM^dR%8;&7i$l&T*2-S$$C!Eiym9bIgb?f;i=@F=P1z2MW$Sb-jR`KNR6;!Ve6J1+qNfE6 zyyKf*{LOb705BZVQJ7B@GwY62Qv=i{gLU*>WHwmFT45@6Lnl-rg13+)tF0DRw>ot4 z$lT&_hF5)_#eaVX;n=MY{d#kr(b>M0&F7uVIp6SX_PpThInnZbxP(I^$W|lMwHrrq zCKjgS0adw1YPI0ooIZ4#C^#hwO&g}&4{9i1y0pC5+AtnNj^z|VjNFVUrArzBVg%j7 zIVuk^2zwQd{kK=&H(gv00(&F?Xl$^LxbJObd%mB#^fu52TksoHT$96W`q2ZVW^N%Y z_E^>9w0CV`*cOWvRH5CW_r)dHTj8IWlkc2m!z+H2g+KVi`)EfZ_0kp4T1RW^H!~^& z;v$U}nkNM>jpt2n+$IC2+(`r>OHMctqIC*{rg6P-N}96abhAZUg&{i1>yL19Wkl!F zbD7@Up(mQiI-GK|lOp|EAdlG4B_|M~5!_Nq2E@phnc$qzs$m!-d2T4pT5D?g&bgGm z+v6n`9GGIDIds5Cl4+3va}dKM5Kd3FNF{>8C{twJd+!LMXQIwZ@yN)M9nc4lqarR%EUC53J9O&$qh& zB;|F-ahJancjOA<@~!tH3oDC@?B3KSFFYC|E*doKTudc^ zgdQ=bkR;$ln(9^~ zO{8HRU*Fa#B#Gc>k9oC@7q3aY4G0n$dTf>erialLN1b~{0VKfS@83uN!|%J_4&(i8 zj|2c?Q}p1!z|>CMhHoUVN}LEGK}OAZ%<+g?%o$p*Jm4~PsZYoj$hWUY3ZagXC>e~V zS^4M+n|IH0%l3@DuYMWJ?|u*Up+oo4zSPvm-jCCqY|GbEYi%%Nbf^k0&HPhY>pXRw zJvD#%1~E{b1Pz9{Pb@h~RFD`^U*NJFt(CaH$Lhkh8VhlJ{3;w0 zFv(4UOpQR8tqC@4@?WaYRgu~WWX)na5j`Rrq+W{2eAcoy=VMHIbLo9nPB3AixMuQ@ zIbZ7)j>*9f5IQ>Pv3Cs;k{0;Xm_`vTxQXewR4JP;L?-(Es124%iMk^JGA_aUDIjQ& zl6nn{-5>!u7D&|21WriKJ|jV*Y1i9cshz>2-3;6c4ECKQ+_<0m@FMQoe7bd?rR%9jTI~B2cY>MiT)`&VLP_ zI7vFv`??|wT?8leGTYkO=qSW^8LK_eeqcv$kt479pZDKEeE{r{0AP)2=n)RSpM2BP zseA{kF54mUsC!-c*#w|BZ}DvGoO%v80yTn8oADsX16d#=Yue6D1JvlBL;44fNj}ETI`D zWPhjsMe}0f1kF0gprZQ1Z7d&OVfKPu#O(z`gu2!=URK7$z~;8CuwVi)@s72qLGxv% z1>h)C54DK3y@^8UEQ>GKK)z8={Yl#0I4-(R2!YMuqUQZhnk{PxenwWBWd3$wqD(e3 zHUU9%T(FwFh{2f6T!ay7Bg|duQC3o*wKmFP-1iKVY=4l5ObvG+lV*&ou$lsfPG+or z7eH4c@#dqf-gJ!m=raEJXUR@pLv{F*oLV`*uk6kt|Jz%Lk%PU6D;z5UO3hguX54aO zl^q*9)V)ZSnPjzr|1w6*C(HkYD-gvxVtj;9IjWAZZ$&w}EW{Q}w{5iba5VhV8`PNh zjLHbbg&P9gJo|nu5yj&ex3oFzKS@C331)H-G@xveB*1!1+!b8CEyH<;JkGYgB zU;PBOecLy&d;4}q7`b#2xNQgv`(W|JDr{2D*xjYbtlfCAA2?n{RC6Rc2ui6iHSUyY z>isbVu1X#w?L_u#j%$_{Am6B5jspS(@f2!-Zo}yJ2~{w7KVn>0p&uh9FeRSpR>n%5 zkh0RTI*d@(Y;ENn|M{OL-U%7>;b4yh0Jk$;xsI^!?dX|5f{rpAWy<~m6&#L8w0?0J zm&23U0)bkK=|;pkh=DwBQ3pkDJi^L}MT$$# z#P40tRNJ!@qkVnT$R84Mt*JC5kJ@wrG=pfGKK?543JPb+Yt3MgLWHsj+-T+mSmfVd zv)K)Ipdr~zTMv8-+ku~5Jn@ezvUo1&fm>q$f<&3S-X7nnq%zUxq)46PdQYwOvi11x1){N zb=gQMdnKwfj)y4yTKK}@AuE+*x-`^_4g}x2!)*ePEN4*HkY)6oIe&!2M`?tZ)RY3F zl?mz)=w!mSOc{Ra!yNgWH&b78$q%h7oU3;qcRuztMY2XCs-{-j_imAt7(MGZa%?h9HIxj(|a6b)dHJL}BAh zj{d`Ys6PLh`}v69|Mo}#aC_5=c=#iDg@if~ zVro)TO#EjY5oq$fAw>deeg7^Wzjn+47?UH-#)E(oBl4d78T**19ICyMw0t!WrBYM&wz>uFeeoD=z22TmOja>d)Wp{lCAh*s+84FZf2b zzT%~9U*8#L_=hSO-Vzud9uW2iI9lOrPpAWwJ(z=CPuRqxAKgORa9s_Hl`vmg&`_kl z%R(7*T6$t-lN^{zu)mF-HivKJGEW)9xGTpd_}M+xcGk%glY?)GBp!fG1C`XNlM94A zW1x@+EIA1f2Z7#jXzet0Zk&z|OkH^;`+w)x9+2aIf7>Ggz?U#Bk5TP^7x^WRq8_zD zJBS9nUn4i$WSqK2gQDq6PFbIyU!krdx_bk$r6v#7Yt#SmGDmlB=8QQw=kMOebzk-M zFdBX7jX53r6HDTe_u{oCYx?We=r8J#Rdu6bLq)RaGicn0nb`Az+p1J z3c*Y2ad(J!fJYB4v9fR#*;(7@p0kyDrbXy@BpAe=Ya-S;j5DxiRhI@0z>aeVn_u6+ z7tT2bQ7Kd@#U%kAJ*B)+tD(2`?fg*0GD%#jyX&O>U!?Dt21e67^Gu2SaWzAYchX7{@XsfIR+y!-sOykx`Ni>0gE>sAsk+SxYu|EBlY`2!rh=6W9YqG!?5z>-s9@MLov_>UWIqcc0l_dI1I zZG?V5Qg|z9r7M~kBT+!bp&2aoVN^x@tmiXF0-wBoo~gNM29mKD6p4Xu=DG3u>+gJ{ zGLJ^6JQnS$5df#KK(2^FRh#qLye^}2%>fSn(Q6rg<|7d98zHbS$NcOJ>%aR&Z2h6{ zVs6`Z0&q0I&HHOsj|lGQ3VL*iE(ugws+-LaLIrV!vo9z(`)S*m+HMM<$AWOYR+iIz z?ow-m;Kfs5C|sVLe-qqyMN+2(kWk8|rL`+*HjA2~rN`EUTEyg(6Q8J1Xf#a3+4>ArNx7`O~FWp@y6{gt1wflOZk_DBG52Nvc(2e?_Y z_+xJQu9w`W`=dIdI{E>!7+~XzsVR)+sx_i{a`24+5KLhHQ~y79QRBLJToVi=7(kk~ zo;6ThliG{JOPkmqQeJnE>hN)7?`EdT(Ehn#}BlbV35v9 zR5~II#GH9dP~zBPBojx*sqlVRt^Kbl10)?^OqSQ+-I)G%G7&KvjE}>_bvMC7^D&JA z-~eky%4O2FcZ$o8jAt4+1yre!q*P{OgfmR}aOk1qguFIVpB(McKXjbc14nTSE1(W( z&mi55sZ1H&@K=lud>pM;zSQM$KiNHV{s7l}>Z5$q@a42y1+5y6Ec9@5Gq}MP{^-3| zvb1^*-}yDW*f{Md`$`NtzBHYKsE(je_G+p;W4Z15^!2NJR?L- zAN|*VyVGrI?>@u6Kr5PSt5lH8YF9m)Wo;|8)_c=w9Q=ns1BikL%_a% zV6jq`U5401bqW=_(YO(-FotPzZ3KU;yw}wvoRt18OQ2BevYJjZn?n(X&P2F0x-`ea zk>?gbEQVZPB%XNq=q}uqJrV%i zk!d6z{C8yCg=qH~M0Lbe3ILKKptGLvra!(R)B%~ZmE3tDo-nZ>w{;V_gCJA%KRF;@ zH{$TKTTFk|rEK_-A7YByyu+ zuaj~>Q!*HnAQZ&o#Kvv9FsvaQ z>Je^PChS`z96Am)xKBycC+|-cv)kAH@4fB6?n(~aaDw*UO}GK% zg=eX%=x*A`CfdB=iv8StbcL^f%nmNtwT=zb7GU|10&`$c!_igGZAW^1YX2%9KT@-9 z^BiYvoWn_^Eyh`CbqbCgEZKL>9gpu8=jfh$9z88Vpc?~y9EHm$RoT<=%w%xv&2Q)C zH~lGb>EvlS4yVhC9Xr_bgD+wI%f6jG0-pyydRS>)zryI|p5gr~f|f1tUC1ia0qNwp zSP}>GL_cKif(~ardpldVWE=yQD&_LhZn`$gOfb={DOGyWN@NAl)r6uo-BXyJ2Sh4S zrg5x_wP?<&rjCp}b@SBZP6!SbQRax$s^rcybOO21axkhRWf{?7L{|eVLsDCgp)z=n zcuzwY$UGD|gil_<(m%cPfjBJ>n0>|10=O&l8=j0@@(a|%M5QJq(wq&@F`!7(5FM*Y zH;{V6t8RpC{LB>XZ5tQ}#J)nO&(`mmLe7B;fscRFOBjCs3-`lb32x()kex3f^ab&P zic1-Kg91!`igteOdY zjl{5%$$LsPbq^RPpdg6?C6b_t4B6*(T+-W5ie{rk{A;XH?rXXOTF7|aTIZmZL0kqs zvdHp*d8z{^VY$X>Bx=O@t^qPRv^#C|`nNMY@O}vQ{Qw^~b5B^wFRXGC%wIb;yC(%NKWPFz{QZ;kAE#%9(%dX{zu$AOhTEDa+gZ*wR>=vKz? z&sC{HlhscWi%eZAr|Bu27r>M{RmWX6eP@Arrjs_>QlOB;2=W$D}&fILH)cs@kZ}**4Y2_0Y;;(1){zZ2#U3?3#gUx#9~?d?vBizhCw&pL-mB z{gWBC9&JwESsm2OoU;-^@>GYP=&T918yuuUGiP_~1QHp}#CRq!xph>eukqF@L=5IE zLj?qwohIKnjo-1J&Ytz?G%Th=+RSBuNpLmhCprrNaxr3`y{HLHCT)OCAkeG}4HD2i zR;Qm)CV|cPebT&V%|6M5E6@ZQ_PC7a516P>3Jj!~ApjkxnMoa06I9 z(W5weg8uv}_2GG_6mixI?^2KwB1rCOpSYa*(1$5cUVV2igD=zUx{!5eH@s0h=l3uG zaIFq;bjN|pe}CscF>}UtP8^1XQaQRbBnDwv3jNx%xZ2~^Yp;S>g9fIvz?SJQM|#kA z9hlp|_8mK!oAFFfIkrs0mO|-JlXX1Z8OMfdz)wHrsdv7e=UmF}*ZwKTWW>O?pqn$7 z!G>FI<;d%Qi~bdt-=qD$FHJ;fJ>#i7?)AUQj`bV36*zbs9JywRc=HnOSP5lD)B^H0 zj?9Q8P7FqOP`Zj(_Mtss{bd_D^NE|uwg?NrqADkbC6qa$R(ID}Q`BNDx(YW zd`>vWG*GT-Md~8sdcOtaF=_o_(&-k)7a2 znx7dY;3X1-6ae_dHV$l@${75?@6vzwec6%q;Iu~qfV*LmO;eovTv*&p0J5}BI95H(qTft*+}1r;-ZOj zL@=^csG#uVTc#=YZi4M|#HpODZBTln#jg!#5kmk9QXAmf5q~o=dNSs|5~`aod}6h8V4`qQo)2LFo{62#B|QohR0;NiS>*C9YQ?bLyj*} z9bTY5z5;{NIx<{lh!cXS$K#3K(l_hDjne?ZTIpF4Z3m0^zvap~@p{oMMk zJmyDU#fckkqpU-yqv!F z*wL9{Dk#VQ;*VMSzkh+2_jOmr-M7{gAIF8Se=Xa0uHy*s*_%eVE9Z$f1l)YYg$P-T z+GTdXI-dq;d78eNs6(h0Y3=TE_SbG={rO!=;E))vL$o2eKpNivUAO4bjAsSd8)rP^OoiISa@F}GFODKMy=Cf(OUFhNfzGQ< z-*S?ie-gk^wa+$<1&8;AiGC~%sg5pD9XO7jT!m5UENEx=mJ#QiOk7TEGJCP!1Se_Q zp-kBHjr6V)7^cT{t@BSGp)OjSBQl=A=aM>POvfP;A=1ncuo|?rA*SuQo%4^W8^Alk zdnj_-fiMV^NBVHf3e}YdC_i^I^?`Zf@(60d8#m3^k zj0~%a%RSq+<$Uyy|B^5K)8F6eCcCL=cK)|t~8tHY2wMkDQEF{5(0fvm_ zB1C7cfFdyE(tY8qEpRHcx|mxEtS;3IP7HDLfz_pe3_u5_a;nau33jU;C= z9ZH5yWC-NKrW{VZ`p3|fr3d14J>d2T3Gi-OT)K+-(7%zLbq;k1;MxXvsuCMEfTY^L z7@;gC0TSoTK?sVk1y4pb=)=*P*7hyb#WcN7uClNvLoV{1^R3UOy85LY{F67`Kl@f4 zVRZ0s$$K}`IpfC}^eZY?r6|Cm>TQOARG56PLqu>fq%p#&^Fb$pK-7p6u^Tf+%Q6y7 zUZ>!DJWPl*pTLdVwsDDGr>f@oREaA!VR?bD?<92E6k9fuZ=WOISXdij$1%tSZNv_P zNj5d4yhh|Fs@k+bCXbgAzzao`Q=omPX($wHh<*vKH!guDwL*JOoy3CdbqOI!!-!hN zI3l1`L${}lRz}3-RfZ=P8691Pl@XM667KilGg1^|5R#8LwQNUyhUz*db6@`CwN$qJ|kg=Lo+&~724K$Z>BcA(X2(djr^Vk40U!4Y%M z=5=t@-@KoHd);g9a<8-JzKFPcD`%}n4!z>X7=HFn1_s^F=6CO4_iKNhJx_Tw#|QAa z&kR_(rib3L2un3CpCM)$NJvGyXrgA$8BtY@uanh7V(i0q&sope&8~}EtN=G*e3v7t zDd-nUuT0mTRV;JYAXF|X{fU5AM3t^aYG8;`XrMruDokm$s+CR}yPE}A3d+n#7?esJ zj992DD7{Uz3K1WLiVQ?-Hc1xoUI|h0#MEsx84wh(xetvxAZq&m;MrWj>d*cddicQo zeYzer_DBif9+*gr*111Gx$%X>kl`c}#o%%$fqE<}t78-Zz&STo&h=4Hb<{qB2Bgy> z-?Wj^1vqQK*)N`Ec8g;h@Szv|0KHFt{{GxUeD^}KO;2Ss`xKDUIDxT?6GxCCXoZN? z)yyTj@q57{@22#T(s0%~1k12p8b@qplCi2{Di1^_mfbd{iq~l9gDQaABnKo#^BOc1m9OiOMT+vkkN9F>cj%{M}}#qxxX$rqW9yVp;>I3<15aiRiJ2l z~pu1GwMs-o>MT=hx{x`BB`sFLLzLN8xyvV!np?1+rV- zNB^^b1=aGMe$J&Y_;$YI7k`$;B^cC7KSuhanpozHq9R#Ms0*eXY~7%gCG^yxu10IX znTq_jE`0b6ujh()-F2YtHgDrmZ~q4lefqPU_}@Q|&Ob;3>$hhdCDKP(Qs^V( zBCHY#b;Q-mbe*vptyNZo@dol}q}@_T>VAn=aEYI*cq0fHLB% z8tO=hfEL!aCu!Vo(|#<1_d-a-?CC@jtWu(3nk|yb0CxYhoig;$?RdKX{%@Rk?a$oL zC+xvxj|2es(4^QzasI1e>amQdL26tSa3N^`BrZT~h-sQy28|IO8YkmYSs(;_s~~Qg zLt{z%(aIU$H^auv(9lnO)e8vw_usGk=vsTpw?2z{?kR*INNgF#;VROY!}06@k0$nf zMo1KSH|7fn;1U|(#u>jxH-_VbYX8-;H%shf1P|>Nj4~nf=FHmw&XSBqWvo_cIa4dk)Y#1qJkprqa%0T7Obci05xq5 zq(hmGQ<1lci=QE$_!QyTC(t;VS9t&1slnYHnd@{o=hZ*S*+2NLT(=50e_|1i^>D3e zH0J2OCD?z0PS#=g;on6Me(WwkbHmx^@x!lw6SG^pL||bdG8{zuRb&_gS*t}Jg)R}I z(sN4I27HcoK}PV8|Mf>XdetZH)}CJY%2#pXz&%N=qbVZE!CToVH20%4zgyt!Md{+HyhB{1o^otEafAaeeGyvTU#QuG(;f)` z?uGf;3z@p~w^*&ZI2}?u;{imO2mzVnXCk+o$X=mgO^Zw*?^uzyTIk$n>b%FMXE`=~ zW0x5q>qRbq+H=tPh5L0MMGCU@k0;DM32j|sfO%9hZ-Gl7TFqVz#27)nIH>J6=@IvQG!#0=2I_m^UA0b5}ykpkq?2wBP|nri7kh3 z7v$Z7eEp1#2=z+V@kr(f-cvXm8yaN76$q+$4VHxjK}&%ck!nP*4pe1{teB&BRZZxZ zXozT4n*yY&&4Bkl?VC*gMSye}#v^M%;L2wwEH+PNpce(-BE1V2QuFVw)HLW2G`VJjN`emJFeh9wG z2u96F_m-s5)?-Kwuo1srCIcR_bWe;h!mz>pHdeV&1U2C-pE>~&FUfgo51j&bmVIvi z(QhRl`^swh`%?Bu0B|oY+wt|t-XFt-7EvOq0h}YOfq@%>yh|7KDGg;Vm52pIBE$f# z4u0JL~ft37CECiL?8kAIXa-}lcP`ur7?4;J|TUf78L%J88h}HZNoO+@JVm-ud7;)ZPk*YG`j%+wYOHa@ih%*#VWkMW| z>7G$T(MV`w1NB>nj!Yg0kY*T`rjDgx+8JVV0zhcHNf0qam{DhXA__ZlSp2pBO8=7| zx_?jHgVY`g0Pd~%4l`$d4{^(PFbFzs3CtQcxMK|xkl6EU>fBA=y7kuw)Fs8dA~$Hb zHR9~=U(e>pwip3&+ajNN)^kl7?|$FcDctO%DCRC8PCcC}2%7e{(dlpGZlj0|+n@<} zn%2McIx%_Hv85YzOaz46AprKdKI%A2R*l5mlcxYcQ)XjM+OmPD#w!?4-Sdny?=ecC zF>wk~(ck+uXhOpDQ($AhUlJ7GgmT^pXAN=PIXp4V1c+((Or$46#=<8qf~d!-qe_dR zBcuSqr;S#o3Z**yAL#s*R7=-j(z6e46X)3a-QUh*e&c6Y20nUQ1vmF#kb!SmW*Y1~ z5o=_1fIB(B9Ul=!0WB+LVqo}%w^H8v5eWAjqV;)!pIx8kS_NV`APgQ{QoQ%n^6lGr z*1x}p%%+a-Dsd@0NYxe|Dv62 zJ=-x442e`IB{jfUPW|R!w~8!$A@$u$A*aK$8C5mjh4A5 zIrTP5rCB1=Cbi3^^fjTq7%GrjYMRgTV}g}U2W+R7TMHgCa;Z}3!861`0o%JU`hRa_ z;ZJ_&{ycRLYI`IA_%ck2txTQw6ZB@EjOvI?YodCbdTJMKAVovoYT(?I$%@FhRRu6O zS*#U~R*T$wsvQINeBXLDp5I~ySo-|+eEyrh<9^@I#$2(LV(YW0)_*NQD?pLxaO%m_ zYIDKaxJ`(uuef31Drhii;sOrE`B`2ykQ+rNRPICG0iZKNf2rxKGY}! zI*2}Pqr*w0b`^wxb3#^3JIZEV`Qo?9Z^_Ju%wyn@gjJ+I2~-N?BIqgNmvbC>95l%LB50g_bp0EUCZv*TjRLKR|46<*%;tq7!&Kz<}Mb3CZe7=e7Z-2D6Mo8j8Y4&AKx;~ z1V5ogWR#w>a)AA8BjW*DL2Mwi^Gi0ntMyCy(&tJjGUwCO`M_ zc#5!*Xe8j$wuGc`Xf_T?L~*IyQH#NxP9{KseNBS+A(bI=oR9c}|42A_6}oiu{n*y` z<(W9g_8)sGkAL-#vj}|RNMv-=2v&^m;MAN^ra)A(P@6bX1h=rta9G-KgOvf}a0FHYlDEiep-QD95gnJA z05a#sPE{qz;HGQc`I_jK$uFJdoNqpZ=_xo$MIav!s5%*8x`wzESvgU|DB$~I(Loe~ z=4;MY4g@7hLo{<$pt!~G)^^q)=9>LhV> zA6b|u_Z79@3X)^354I>Z`$x*wecJEXXa<16_9Ej_piv3a!4OG~U)FrHIZ~INZz8cYn>X)fONBv#VM8=pr(3Mx6vRv4XaNsAu8(lbdU!S&zoIMf%oCb@lH=}VCxA}>Ls=#TUApSTWOyd#?AmWLb zw5}pa)WzCf8mHt^4Ow!J`VJf!v3zI|+0`X%@T@!UEd1OimOt>}2X3czMC{)J{Udl; zr8RpFQk99{(viyDnL z1`90QxHU=fOIJ|u`+MTS_o4m$4;i>0@7egB-@;Sg`6m=Rw({u}IC14N;UKhVL6)Hr zS_m16#uS8!ov|7apW$kIE)vmNX^Tsf4}%ST@&%#0lYIN*k!lFl;9*@75E0J)&EI72 zFTIlM-n+n!Z~Z)aAVacgoO7q3Y=YW&@}}V?p;wX!wnkp{6nTpvL$p3l{*=w^dGQ{K zB650nT$Kq6q@*%C($)Y!F>5g7tg&FjQe1fv3(vT4e*@Jl~Q zb=~Lg)AROFwMPPg(`0ewHVW}%bC0D8E=>na5FIXMgH48O*`P=>R7b(3XM`kasfa7U z7l`EOQlxiqM82cTaHDX>V=f}!y@!Q=eg6Zx-x`*QtJk4RpTx@w+3cf;;?2>DORbO- z=bDg7d#_>v(V8sUr{I7@oNU;V_C7b(SeP6?(qzCjA3R16nvG10W@^P#Z3KwcZqH<~ zQUw$!h-gsDWFutJ z6eg=tg%3^ZWttG_AQ14|&c|sD(y^|PFyeNFm4~#+>>L;TzrSJX36JG7e{wU+A2@)7 z86@w34@3{qxv_3_gJEeDBgz4JLdmTD@4>Z@7>Qw#nWt@K`wRCXEm#Fwc!tZ4{Wn(( z4%WmKAY?dcP5ABaQ;b*jvDA z`W1#6xP;5OCaA{9g6%}K&cs&Knn>v^NvNCLTb;KVrz>Nv{8W$AHPW|@Q%qa{5dq09!<7iqhQ*rI14GvB?BX}V z){DwH?w8twf? zzku|ehA1eY&OlJ7xoNEvkyxWrfGfZ&-PKQ{N8d-d`5lz=pF~F|ct|-g5w?8ScXG+w z{*Z0kHt+$3V_zIl-8O`(3we9|QU@vUG8CZ3#y%S^5{)|%Dl`W2x+bV6ch1D5LZJ3y z=ORW!dNPmJ6=CKK{D#NiHeE(WiTExWQ%A}JbXZP@WxLMgv48i^jNCe|ddv0bXO1C$ z7Uv7Rj!ai0CO(TAVkN3%!N{|k;@g;#35%+8P$ja!%+q%;_f0#XV-qOrRam%f$o&4A z)=3AoLtRcAPlW}sEo`WkV?wYEY8;wOCl0^i=GeRsd!4`C{)JTa%F?D4Vv2nkDf1V` z#J~a=$pjG?tAzq>v7oXk$W_kz{+$yl1dpR7j$+o~KKwzJfB)C-+Y|Q?w?{~Tr`uXP zDb9Z>^}1)EAyu^ntbe_>I(1CMow3NQJwNUOz?iz@B-T09IdXF9vX7qQxZnpj(mzw# zfpGjC|He(PdN4^^3a+(-Y~$mR_I70bV;F`Ogr;XMm_wHcu1sx#SrRBQXWnEnr*sXh zAqF;?Mhxb>W0stL;!(u0lHersh-r9CQ(hP=6pEExVDfkqeT>D6nsLUO1XTp9)2Ge> z3s?%!EcF=HWPV64P#^yoVd+L#yykwI(s#P7se6Fmw2=${^!4n1+(q0999@Vk++4v@ zh8IsJu$HpeLf4E~HUgLzYyE?YOoeI`h*dyFkycOe>d~^IzqACwk&gmyR1roMT1P@% zWW(V`f$5JlyFF5qs~|f2_UZoSP-7 zvZPW~R%N_;O84fu=fsH<5hwOq?|Rp}4*%ADaID3zG$gT(6eo*XIw&D`7r7h_N2f@p z!5T@llEBrd)ZnJ<`{Ml^`1)&bW7F|%9!s1(x`kg?BD?VmE)38m^DBBK{|>>QDqc;H zH$rl|-LlBfM12bgqw};IbEka?pllUZq{8u`Mq-M`p zIC$bX5B$(yf%(NVq48qGzyLhmh@0rfVVXVH62{j6Y98cbJ?945@t=qZc~Um85Xq`t zarV{pf{PF}Ax#5kBc~2;viqtL?d6_>FTI78S9~64{_F2QTc7#~(6~w5yqC0bFH9bz zCNSD{12J_rc2hFjArAXr)&d}SEdn|gG(P0Oz@BI1AJoor4NWOi?9RN#myWe&GC?z& z;A9~zDzgn2&z8P|KrS)#|JEe|VCmsp?imyX@4yE}+aJN5em~uVzd<;g*d<%MK-)YMsIYWRYP0s%bA2G>8c5 zY=ka2GmEJzG@gSS6W4vmb@*Fh51@~&bJrJtIqBIj3wjcWG`Qvf^`4ty^(MN@zM9Ag zQj3#F3dt6XY!KG3*h^V|&usP^rPm0!MDDp?Fb38YkpJCZ`7`zzeS7kkvoJ%px<@B097*0$@~K8L8T z_#&Dczm-XS8&Kg=19>02!>fo0`8Z!xYOeog7eE^_Y_oR7<$s#aF z%UNB&h-XbeDHBkNEozGYQe2U;u-ijEtEaOOpWTQv+mLGz_8On4eT$Qh`1qfYrl-)^ z3DVYKG&~Rbj77NmyS|Mpf9l5=1NTRmK00OlaEG6diB%0Ej1rmZ&E_xpR4f(v6kZf?KlMviOT+aple$BorsYkn8;>>RY_7S6nQ-_#N3{O%X{{ zfUbXxZu%&?@ephuLA%r0diD8%>V|81#jpMh`);^_2Z7D?#Pq?I>6wK0<2+X#(2kLM z=7kke`XJzm0f33QJWBMDQBp#T_%51J(k!5DLSv%ZZtqkwKhRQvT1QKLYr_g z5i}u5Z7_`_nv)+Up1P0jq4%O;`YayBPmkSS`PaDlM}Lfa-u)=^KffP0UjerQZbb44 zNge7_Pwf|!VAt#;9JM+^HpXz8t#p~;u2|)o?|C5yZ}lACR!-dAkWJzy zD#IJ*21YgIZ8eaoMH zA5*^Oe0B!RW?w#+NgeF!Kqnpi`cJd{N5A@6pPx$!LjdqOhoiaaTWD_j0k&JL#vgO* zU)3T}A(>%HsZAFLLBvLDy0pmp)#tjoIBT;aB4L6ajO_o${mkFE$}PZL;lXeI0m27A z^sIjRrv^c!Lh4<}>Q%VrGIV?;?((lBx;4ZpNt6^__Vdqoj(#MtE)ge6@#8OoDN{%X zd!AN+2Qjbnsuq%WxEOP-A>q>|`uOi6VVy8}oHRQPafUuudi9?I+-S_fAN)RU{n78G z1wKAk{E5WYktu99c;ECwKU*XsB*#1lrF;^x7?Kx#EtHC61}s^}Mo6TjxB#hB>Z#cP z`z|s{u(35~MMlIf(uP3e1Wih}VGi{v&T$&4t1;D)4=}yw?Szx}JhKPxDPr{cFXx&c z_}@8v_gUhH?}pCdvH&(@%?~bjqU2TzuL9onvog$EMDh6SRvA(6fEHFK?EmYp=DOGJ z;jwj?-nYqgOCYTPjuD9TL}oeTk1E*)SXpt7t&uY}AwiWF{rOf$J#T3~_&q=_l>sNSYWE%mzr6E?G$-oxYuRwn3Vn0RkE}(Kv;6os=d#r&{$s12nIB z4X^(B|AWzGt2_=oa5|z7PGDQ`j3Cwh4ZojxB6_aK9Z3kTTh!Dctv>VjB)uP;4>j*EQu|My>Eu98QZv>!IiZO3pmC3L~J`4&pGf{U(D{m ze2^nEaCgqpNg%4jYhw$2lC0nd`@Uq_-s)v$!7DK8J2w^!cVP&Y1Avip*>czF^Pb!h zAd<6qD=`?;P|8KpEB;3a;>=mFR%HhGJhUr{uuC0Vnizw1Lxn`QJw>-#^z4KxHmn}~ zI9tE*&q%Y2`}|*w7y^K22qG)2zT`&;*L*qiPN<^fTHc)GBQ7=zR;^_F&O~vNzYyx4 zs985R-E6aw63$OZU$DmIU$e^UL17PY^5_05$A9uCpNB&*j4`lGekl{tBAzTh0BUr9 zUWi0(e6te!$980{mkLHBZu#*aK3{_|hV08wqe-cz9c_q6gleW}+cI(ub(@%WEj|h%rhegA ziNZK2Tkrf?W{3WWi`_bT<>h~g%f98Ga`Mno<`15PSR>5}8rxnWUmX6dUxI#kR7B8f z>3i>79*-?~oOBEG8P#99k;}jDdb$oye|#OzRCwPIlet#LVxslQE_J897%<2pBZnu7 z{(>$A17g=jndn#A|N3@7u~{KNfBnEBu!vc&GtQ6C5Tp2)Sc%v{v(?d^+e9Zbm`6wv z>JeAYH#qrA|1;^-b7M^Q#fBjOcqUQrXYZ?ioXyoYl0t;a#=xrN5;U5J1m&B6H31tP z=CTQhTMQUdG>_i3Z;SdGLV{|W>Sc*5-g1D+O^&O9>0>jF{P*8PJo@;v_xT50G+6n< zH*)iT{8wDJdzCv9oIO0HeH@rK_-F%~0Ol_D?_-p1SgQBiV|jqf4)r!zhlG`3JmhYH z{5WcXSF)dL>Y28QaT}Pn37tj8T|kpiNky2?@d_aVlR2Z+J=E(bncV(vW*_;FJh2J! z;)1{SRvKF*rG07M+ReVXxW_jB#{ zeIDDF3#adz6CVYr5NA_55z}Olly2#cpRAo=M;AvQ$jK88eSn2H@Vs5E&ISoC zz%z+7qdWN*G`kNH_TFY{caJUV$;RY(xA2lLlkN}@=X%>9&!SbXHxKfXH3Mp2o?}xs zADmE+$IP#DT(+lT_xF4UlZTHJ?zy<@qys*8@K;{JZNK)Pxb27k2IodK_in(($ES2h zT6`R#u0oN#uv_<86e~+bwQF<4!q45TTD7-&E#<2+kZG~_mofl-KRfjZufkj-bx`K* zoE2YFRkbPnH8HDg)T~oeyzgqpbdTK6=KKGIaOfkDjvEUkJ}-_JS3XnizKrX?>nAyN zc8&JlqcHbKwE`$rGQa*Zr>iU{%zxNfaT~qPk z12fW*h=wteduj#AcG-!pm&wSEmReiWt@l5O&`~DrE%)#*+WLwIfWxNmx#hY(3xl7C z`35+*@bm{3J^vzj@9>o)dC&g2Fgv|Pdu)UF=qWh0VYSQi1e8NE{_k!Xh3xW z5)o+;C%_7_3G>5S)Y>q)(X-`Y@7H`8`>%fyo9}uT8ZYk5X~5G6U$OVAzk(P4-hbw* zt1jo=Ells)W_q6zH#`j$Nz8w}Xe*;ErZ48pb7A_D7uqrwHp+Pc05;bl?pT!bpqNbU zT1em^Mq+OHj7cF0BkyqD7lUZl>Je*Q%l4sr*#5J3!r2qBcQ~oSc{63Dtg8iOg(Itk^18#%MzUfx>ymlXV9qr&y zi)S@6io!~MS%sR~z8v?Z?F)IbAB*XG4Mpy;GtF07O!X@?z7q&}>^EfHO%^b!4Ryqk zz5VmZUewXxS;0f6=*9#&eva+KXGy2F@f$OoI@C3}f|RvTQn7n_lJ>p7Oncw6I|t?Y zfT0BNEWz)(jkQ<)JvLWffPKlvxa7<)T^w~9ffs`7EMv;G>RKF~?9326`Ag_r9;@qY z?!7o<)}gB{yI${E`N}<Xlp z3M+WFLWi3Lq^XIsj(QrArY1zeZ_TNin&#L^X77D3^9SxnQbOwrk%ZTlboza0+`ecH ze~^m$iZ7(P^}CoB9|}=iG)w7H#_KbF9LeQjRax;5f^!x?sbmd6w@&kgS90zD@KR1s zVdM50Zo@+*BuTj3N}HUm`c8IgfYMS_F(U0K4J@8xCwRnCE1xs!v6KhQGT5G1Wm6tp zv~oZ;)ba{A4w(}dG^bJ@X{Gik0uCEB12FNpUfuU5Nr3p-kzW%_0D_mJC#tQIJka40(S z46ysg7+9fwc#Fw{8~EKL&R*qVwc+}2`+CB4w=)0p_d(k}i$`O?=K|7<*#F}{!pna4 zf1}yA!u@9w8}|h^?+f@a!qpx{dhzKBHh#5CU~@SuEYse~?}he{QYOBc>p~-B79q0& zXr8EO>XtyZhZP$RkI1M(Ng+)d?IK6+4Lu2+bE;08!5rvf+4VLLU7{x zYX?R)lanMV>U z2WX)QT}3Tsh*Q;`#0ZX0;GqqYc$%vkj@59*%WmeXzy3XJ-E|k?;lt1LAsFyE!rE)! zz^netuW`edy_!dXyN(2=hc*eP1Y`wg#dwx3D{9D#_96hVUWsx;&a>L{!lZqDc;O4L zS*wztVVQ0}BYtVJo7&I0!YD>iho;3y>}E7(*G9|s2kvD0-uJ`yCVq7lhtCReMNYpL z!e$-|{Fy^^LBsctJwc53SwaIkJ&DfFFxvleQgnC^^xs)K5sC!?64|x$)6${SP&T-1j&L}OK^t@^xc!a!=3#9>8 zsIyaGLk{xF;SuN%#;U}uV_NebqL7?tx38$?4!1rho!Dmc;p6CIXGv#UXpMoY#F4~C zrnlJxp^P1NK&Tw6TaTglyo2e7f9L5vFPAum62L_Of8bS&U;U4ma1|<0i{hlBmE7j1 zWhAiOAT=-gIB#`UV9_E3wN!MH>xfxEt(lpEgkY90VipGa&z+7V7hNFFN3npU}GtIRUW^;Mvj zg`dBJfVX zV{HEEN6|xvVI<%KZr?~_va3xp%-OuA_txRuUhjnmn49WL3{5Ibc*3|m!&4(;^VLSml%L8?} zJ_Kqp1D-P3qLQrMU%D>9P9@H<%R9Obs!o|~%+S+Q+{rVfdB8~xS@37}#*ida{*u5Y zywxjA4^r2xuGHwe{sYqw{N`tM><2s%LjZ7*z#V)ot1tOGY>i(?Edi3b{mq?Nz0cVg zUBUMOfOjqr3bBlhq(m=>tIREnh}J&4V@1RXQELcdf~kFTnlBhJ{?awJ_rO75cZ3uF z=I7Y>cmDw$$^y?B_-k+Az~BG7T=T7enFGL`Q#gHi28TOn6<4nSHAzCgVLPJQ4Y(HR z>%U3L4PCRdn>}8%0R8*r)<53|5cbFDT&@kR;{uem6we^(W8$hbvxPz}FUiDXCG|ChI6sO7B@|K2RmFbgC z6UjSLn8B`=Yk%Yoth{oS`|qCO4$T<5F%vH|2raH3oaMBXc1i${s0F(G_Yvjy)DN~S z+5jg)UB;H@z^zhcI|qhfrawUe=h$64rU6D<9UCVnaAq4eTZr0sj^OIv=m)Zf#N{Dp zB+L~_v+b|I>KeOl{}s00{qsFbGT`aM5CB|6a0g${u2=m%Hk+Fu#oQibUp%MDCWNIL zT%hyTIiN19hXD|%O3RSTHlxKc!F|C#c&TGLf@>9apN%~H z6F~khR~P|ZWPN12-%?B1(f+4>pkU(~ZDFfK2w6B*X3np=201%t6YoH$$I$fDsQ6K8e$HrMDwzo0LIt_JO`fGaIQ!D7yU%PdbV!qizu z5?KT~&O0=7X0sVJ;l)vV@Ey4INIJQ}^wbv2S`rRNZI_u_lnuNp9X8mk4*-N*j|zC_ zi0W8bsqr8AHMZaV3+P2|^z>ZCpaHmupp(Z4+ecZu;+4!9LsgM0`*y?Cno28LPd3+Z zc$epcn^^$jd~Xs&-;M+7JY*^_IE5;j6Iog%98IJn+tk|uSsmfmV7kk3#n-%vwQu}7 zCLj0+>Ey*fSm;uMyW%RY{+{n;@6Y`!UjF)*@?e6^y7;A-})|gukGdSfA;53X@5v_NG;WsuP5O^ z6Qr#C7scf%ib)j3jyLl;T;aIlufKwJU&XnPPjF`hsTvS5$3TQtlop$nQ8WeR^rX^J zq{X0jQRjCeRB8c)VCTt;_WVxD0*#~<5!ma5)ImDF#q7b8goif?r)Q)tS^HjoXB878 zsMvA0RzMSkI{W=rupac5^L+ z8KVTPcecnYz`_c2`F`bmuCEGo4paTO+(y*j;K1nJ1XOm@iHTGP)Xm}Anq9AL=w3b| z?6a)!8enqAeH{Ife}o=-_!&ML1C~)=c^!NH_7AfAJKw_o5o`k6XCfyb>DZ2LZ!+J-om|2m#BaH9cS6!6dRnh)7sY-gMN?;#$usfk8OiM77>qrnAc6RNMiiEDr8X0{(vPTjMOi`9;87TUjL{l(a){0+1pD_o3#obrTn zPycsk-5Nc%PA@WN? z7KoKH^WtxdVtBb`B>TN*@n4zlCyTytV*qTP{9F<^^PM-uEK0Q+v^WUbQu&J7kCCH+ z$)Dc?_dH^Yhp5R3+s>GT*$nCCc5R15b@FY*?ME({`ghLp8^84{e9h}##0aeLl_E&P zCpWp}f&+m5uxYaIVcyz`lC!4gOfb5a`6Ub9zg0k5)Ua+8Z(pRAfnO|2lV~|H91+ay`pJqnh*aX+aLI~ zi@de-`GFw-xQJ0*_B!^w?#DPI*MLTAk5W%7vMZmEbf4^@C-+CLj z{O!Mm|He15-@^oWYy-|d+R+_V*i4|po@&I5yjY6~vnr9D*&IuYzrqSIb>;7x0|I$M zz-UG>C6L%T8@^2aqtEh^m8P-)(2Jv2m!U-G*-ZsU)|tNNZo;u`s;VJ%6U%s0#J4RP zJJNhcr8X-gi6V!8^C@Nj?|jD}^2N7ZMGLF~HZUkymP}2aw|m|}zrc<6)qs+rF1Ga* z-|=^N`=4HTNub*QD&*$xp-cI@m#Yr_`3Q<^ZBW1ECa(IsuV8W{aO#d34I^3?bCAC{ zw@~W(g#}m+0u?iS5*jD{B)>`)``-d<<=Szu($we-HjYhk$2ZV3TabiQ*VIe3{CR2h z%$k9y%gh7RF1r!h$D&5EFc5GRYfh05{|Z|l{N;i6e}*t<04`#r>EpB~KF0p*Uq>7r zfRN~%vmRlkA0FZ)w*qD}pwdh&SYLMERV3yME>2J{B=saJ^BW+L^7vL)bue=1M9{~D z?%r+O=?->{nXD>}7k1rtBUgXVx3lXjzMSsR19V4^4=UZK3DS&Nd*d6q@@M`v*Z-sM zLtl0iXC0jAVEw_CQy=T_ha+j`X{v@;SIpJy^W&2L(mH#TTL>!b^09kt?CR|fdR4S`s3;3#GPw<4lc#O%rKT0|~0at@0 zQnE4GO;9uj%l?v0Yg8w2{2eek3L3Z&{Lvr%SHASc*D?p{94D(dI`O0|Pm|@lEb#lP2QWEMb0sWX#fb@yC05JxLZUi~7Bo0=38?U^Y& zyiRxc3^Iw<-%&ZJ^1Y}8jgk=2%t9dfzVq!+qqL z!oUDL2jF+z$i6rH6E@dgOWSpbhJFmNW&4q0%wlZ;(yOtHFiVi!x|0Y_Kzvq*Yp>Qe zI3Sv}FoKhs5a#ySUQct2WA7I>oVwX_9k2&e2Y1IO9QmF9!nt4i^??=mj3D^yZ)D$h zeJ59b%h$1XSTk9n9)Z)xrm&tMRy3|5NUpUzv+XU}Uy~{1pxMq>>1};eUly{& zyb?hi*%Kf?E?E`c&!9IA$w-8GwrCrPGS5ll2+f(KWXj(@PbYb9yeQ+kVs$$)|IlH! zKl%_3hkBuoffNHll}ZFsLT3|Vm_zLF3YB+s5B)M}cI<+V&n7*p`Mj6x-n{xjN#26JabkzvGkBzO%uzG) zL68$OwjSStb6e1X^L1~@zvxa7NDeQND9Qeg!{(7G-Qxd!LiA;I zS}>8{V&w`x)2>D4o!9b8bgA|)eW_ptVcUb#)EmHzD5F}727ECc&V1cVDWC)hibtru zFcUQjR}}9mYH`#jrc6J0FX^FmNFJ>%&=ie!{k3x{Cy*j(Iw8dtQlL&kT?^BDein_} zPkKCG^Xgag{@?w1=D=KG7s@Ex+R73`wL|UyGzI`b4l>}>xpl65<(v8zyH5*0qIt>R zr}g`wD&+}*)PdjPzz@8EwJ*DhBOf}AJ6(ZW#WWUyq*l&r5Zc~cNBcmaQhF1gl>0YV zqZqT}Y@Bn5fi$XF^COx`v#SF>BsFniz6(cSa5&;GyWa zry#Ioc9L)4R-X%fCjb-;fl{X4Vs ztrZ*O;<5m(1OQI56%~D5-%0|DdTsPB2c#Wi2e=X3f0o$?9zagd(Wb_$qAA(phK#FGl6bKR#sdDAkT)+FQAvXv+Ael>eU*B^sf_5aQ_`Q2!l9=5Y zncX*s+aqf)s_AYW(OwUadf49x`@ZuVxbi!`0lzup)LY-m*3bVM>AnX@-G$Y1pD*yM zyYRQYh%3JPJK6Q7FCrYM*bkfq?grLlV)|%+M=`y{1e6+WR*lk6LYvpwRD!y^T9qRi z)V|QO+)rMD0+tqq(*GSurAxGL8Pk-gd)go4aB~m$2+5iOb}X0+lb}2vRuvyXR!8V| zi{AMl-6xJA+YZmjSOX14^@~G9(Rm;%{fG(jP+~;YSc>iAPrCR6{ICD~f960VtOMFX z8v>(haf^G}{rx`)G}=e;xzq5AUiDILxb_+jKlWr6!b+MPWrZoKt7zL2ok!=J?0(5D zG*`j)$7g6eLTfv*P839`oH0R;EDXv*Kj*a90ywPAFQ|~j?1fRGsysN4oY-Ld=o$3X z6pbVB)&yF%nJCwr4fdAf9;B##_Ezx?8Dmi?F&PBXeqRvVC19E-Eu3OirZ}h}{P^`Yk!1cgT6m1f;X`HYrc|$U+`L1)?gi5^f$0YDxFp4k9D}?6VlW{ z7$fQ#0g!gEtj`@>erQ8aCw5#3ZU96#~Xy!r}J!aZ3s+3 zB`_;3YD-UyYRZ2E_Tmr+>D-LjT@TY9I*OasRCtnjf+!-IvvFgS(riYhju0Y*+^U!I zTmYRgzxQX+_~}bY&1l43_rKp{FHs58hJ+J$5CiTrm zt>s)$^9@$Y7oSWRirolnb+kfH<)tOwGKtI1WF_uT9Mb#;yG-B0tP7je_qz6DprFtGlk^LAD2z}tCZW~REugq;^0Lqg zEJIB+re+;H%8Nh{HElu^yfdf2jYrn$K6(f_J4ezADpp2NuShgz%GE-ewbr@d9U-Lr zK};wnVnJ&6_}ia!@dx>MjDv07Pgy7mVPKBW)9Q$h)1mT3{! zOD~E`*XOwQ<&wrUTf0AP{;2FWd06fPbt2eUyjsFbY|4Jt9G@E-Qpo3JQ zQtimrOgN}C>gZfTa&~C+ozGN%R_-Jd2~v4NvQ@^3vtg-oQR!FLnbNV>g`11E-n()^8I(@=@l84w25CyQqQ!X~r}M zuVnR_tKp{W8GXr@aQU0Qkd-}FdoDM_4%hfNxo zY?jwL;aqD=-`=L~pZ5L#=%4eSe&x5Hd~>cwMhB3qzKJ;7X7$^@h`rza`JA{TkREHP z#|^DB?L?EH#oB<{HOW=DYDArsIvpk4^;^t8^1FFB*?@}yLjdqRfE!)S z-q-#Z?X_RVcB>#Qc6(1CH5Ahy#aaR-t9J!d?914T0A(Q>tZ)PpPQdvr0?dlv{GwbQ z$Tx%%IAkm=K*4E8k``7S>VJ1sjs_8-Yv zi(hf(gC$0QrLur5wf-%=wv-XT&SL{+BvXtcvK{QQd5}@emg&}|x7?9i0Id$Zy9R$M zvHh`!m_2ebzxfD4r1p-G6jib4qY_-#L1?KeM?0TGnB%;dE+hv4q>hmjy8Hfb(){?R zJy-kp@8{!pzLNkvwyvyC6QMO@tLqPVZrCfVQkHvTBTQwmGYI&s6#|)odGO4H#(OT` z(=g6{91on_;)SpI;tSf0R4eG^UyDC*IamLKZ)Wzg1Dv{j8=2RrcQ^@{ibHTXDp^Dz zed}lb?o~Ea0jipMRMVacbPu0~vlB|7w-63yO20o4RsyKNT89xTl$1rE38l&z|)MO1n@jSnjhut2mTqWuU=>EmTzEtKC+y= zj*ucQW?ya*Dujr&aCy-_TXkB#{L?%Ckz^I1i$~7*(^3{xZb#E_v zF2SBQF`6Z+?Kz_q(Dik;AAKA*o6xSWljbd;#4Z3ziiz5L%n#0lZ7PC}8ph)_WZynk z5A0=p@N!sdn5~Z3Ua8n_YIb4$RtH1Kg*$X^8hwV;T?;g45j)oFGZ3$&1}!2uRXx=bzmNVh80y}F2^+a%>%{&*bdpiCsffCX?=zBV@92@{pdM(>>O;hG|d>V zFeeeRWpLla&%!672u(?vt(qlwj#wEWi0&9ykW@fCUOcX8I52yV^*{Ytx+Awg>E|Bs z%wq@uo-c^!n0)x3)11ABy|4N~rfD}REc*Hd>(dUsUx3|604Qo+vDTuh|B_4pcPn@1 zl}5|$(_X)9s^WCHRA6=8lKw9zEL3H4)b+Z$Y zWFvJeRVoM5{+KB>jxHv~&LO_RPle3~PasFn0TtdibRpU?$zS7?TY>u$P)|E)Zv9h2 z`QB&7pacj>iWPJOsYp0v#W6l{2dCftf1+m|ynxR<;Mu?s06d?df%bu4Bb~d4)i3x7 zqFzh#73u<#YxAfq-P*EDPy>rwpC@VTMMilMfL?;IfH8$_3T z(~*?B1{^T~&f&co6}4N5ZY#nmZzF3Z(AI%b4YPd?b_=r=p;<|+t=7a9ka41_g}p0| z>DcylRNJJyb65se#>bRQaFc9y14MeV1Ch7q76z0=FJ@rhsV128Fsa09D zQ)K2r1W|{Vno24gii2Y%C1yyTqi&hGiqZjflpWcBSW@31Olw$L#F=5rtprBvq zm(ERjYz#~?GhhWF%?j`w7_tZ&>^y1z`rZJ@xy(e^K$n#$j2nTI+noK_!=%U8ku*l7 zHo>S9y4(uru?12@PzOFGVpYMs#W~4=MlKb_L`rIDXiD}UEu!hXW%!A>;ri>?NLCJ6 zZxidY7EL2WJnP%%*lInaQDdWpCC{KoN#||Ho%c?%6e46UtAC9w#Rma;R%v6dOj=l< z+ck1@3+A;cg1AXn;$i<-0DC~QS@;)uk)J<*%iQ18H0nYsEyffZEw&R*NtRf>*@@(`ao@+fi|y zOm^h_bW_rpsl(W1V zwf*vrT>|wC=J#%}dDs2Ob_ZHzX-P6sc(U#j8&_Q}Ej#})W%tTlhwoxUWB!D?^k2Rv zyh~ijP?+79A7CrMR)Fa|G3x~HVHP821Du;X_KwQJW;%V#dg~$P<3+^%iOY zhhl4UivInGT2ZZ*-38m_+jx+ncCJ)CFtmg|_zLADbW6(J=H6t(#K zTX{wE`BD{B9pW5S9N~^n*?9Od+%!-%tAvzGf9PW70J10x#bY!T+0$kNg%F;@vRd*~Jh5JWrvs$2j+a|Ak$j_xEux_$y3O z1$ac04G(g0L1&6XNT@_qe5UcsDAHUu5XtG2xdb`Mr2t{a*v~oS#nx__rdKBG2Nop< zm%Qay*=Y=*4GP}c$)ZAAG=*X{Yt%s;fom2i*UYH!mPA)pOJ`1YULDa%&ieBrBvy); z6>*!IJ7QTdOB|A1LnA_qO~RXd@YxQn%sKbjTxr3lfM$n1yGe@$962kllcnl~8khdH z($l)?a=F0S^Mis`Sqzx$5cDEIwJ&goVrS4$+QL>vdJ9;WpBof*32aIJyehp@;F4ud zuH_1IH4!MzJPXd;{W$vIF?{gEdJJBXS?(6e;zD&0y`UlC5~v4a#5<3M76qzkS$-(l zzAP3X3R9F-`%i{yZH=uKHs{bq-`iUxO>D)ABj*xV>?tQGA<6vuWvSOu^8e?Lx&ys^ z^M7337jCRcW23R1#l)YetnXq=lKSJY%}_5~N`c;1xgm6Y8&V-7ndjjX*1 zsQ`Z42t83*bC4A$qv6N{-1;DqIThGqISq4a6ZC@ebp-3{>Q38Q9)co$M5(S3RBNEyk)iB9d!US|(Ur375lIIAdJG9o^>*le;< zdJc-rDq2ufAChn|n6pBim)R(= z48)@Y__gHbQ)#2NIovAy`yFQyEUQY03Qi?jR>KvMHZmzI$od5r^3=L5+W9W@ya=S` zD)Q;(?D5GR`w+&*m&Q5}vLULel2X(q16(I=_1{6sJHO0l)Wh+SnI`p}-28Kv*~`Sr z3~T+fB5XEZXU)97?G+=Gi|6@1ih>yytE%=uF}NM;Z)#_#0Dstd%B&~Y&b{VN56 zO_n45-C|_2e+3k0zf+>2E7>J#w@yQ{Y@-%l9P)mTqn-u@lIj!zJ@H|tJY(NoBDHBGRs371odR1Wc4F6!_#<72Pg>BP4#xNN8*y*pqAPFN-w3(PLr0*x=z;t$F`Qz+t04bWZ6^HGKNV zYAmbJUF2DXVj6{xX~Q)76|jRiQe5TnVKvtOHzK1o1dhf+S^5?zb`gQI<;X3>h_n@v z88NEhpz2zHk>R@gzz_FD|BU48MBjUi4;xGqNt~K;vhAobnFifD!)RoE+gZX$%sZwa z3%`b1E3}7Dq9s7)AXM{Lptwq$ML@!VeU91XyMbN&b|*8{+V`9Galx*{hTO=ZpF?sG z$rbnvh~1{FGp%p!wBRkM>R%?~e&6#GD?T_Uz6{+M!94wZgqiW5z2{ zr|*C&8)L_Kf-l)ofjAmjx*$BVr%jm=+W+V_BH%Z{MiX;8v8~cTDz1+f!;R9TcYy zT;-$WaFdzg7h(mD1^i*9SUj+AZ5a!~YlM#@tp{-a<%wy;%B%-ei%ZA-J5{%X)iW)Q zxe1({!K%D6CPvG8H^BL<_=#~=ss^hVxnxKrD>%l#bbzY1eGO39{*3&y`$;s^2cai6 z(=>Jm%B9ZgNz|`>53`A`rG$y)QPZg`se+Z!#@16QhlMI;Y0Je<^y(CKdZ-n=@7<}V zS}w&sIWB+~nIB+Ypkr%9HjwjL3^csgV)Et95N_U(f=QriYy#epO#F=Mr{IbJ-=Tfwdf_)F}E!5)xN+ zDo#HZ@I4f z|M(_r*k~(YTPjjS7GTeV`@>_Jcx3fG)Vj@Pab5|N?DAAqy0V^I{xaP&qbn=&4W4EY zsRc{udK50~`{IuDVA{<2=y)5sd#9bbC=Ya0?Z#ud@F<1^(M8J-_VpIhZe%F}28C?s zd(9>AKckvqwbCWnM#q1c>Tz7B=rej5s7M`lK>(ywceLL{TS_`*4AbN3 z-&vopSkS-r-UbSR4GR+*+5xe^>x|f@>pjde#uqsJt5ECL0K7t(lzEU~0tF@NnCaTu zIP_Qn>bGR0niFRS3Yo}d9tjfy1|Kq&iu)^(4ji9 z4`;6@1dJ_t3{mKG`>Kv86>jidb6=riJhog6JGEqO|1iWPmdWm8fbo0P>Hglc+t`M$ za%JW48?U!(7J4(XUcte#@q#d($J%_yO? zax!$U@(4A$jI1ra{c;d0ja(m!_V2UQ zKjo(MbalAJmt37qtV3FTcX%?<@zxbym-_V-TQNA%flUM!L%widCKJ=iiV?`PUh|x< z+~F?;o2K$dFl;LjkZcLlER74XTME$_Mi%}Q_MM{|kV4lrgf$b||K0FxP%tRaS1_~t*A3Sf?54uhXzwE1l>CjemiKhZ z{s+!&1p+2^MUl?|VD+Ar<>5i7eR8IE2?vD4fVwiZ@F1!Fa*k7&d4uoMlmAkT5!QT< zSg7D7-or+7&QFW8UgsoYL>_@w2Lnwmk1R4i;z7e}Y}{Au+Hdf-B0N$upn+t~8HG7W zfk}NSJa|?kZP-}o)Vv7nE=*KyfW*$!_b(x$T>VX2oNW)!IA7IWiXm!TH%$l9f-yyx zF|9~Pt6r)?nT^;Qewp9-Me>H`Y{O>HU`ie@dz97`Z;M|qI)s%^9NYG9Fzcr~c;ZW+$T-dxdKkNe5Rg|3yB8za-Po6-&;os4qqs*WvcL z@zh#j>`sGXWPAS}qQZ6;O8MogGHw4y*Iw25tT405t>?lkFp@;c<*Ucs*#@jn3M4K! zxNDvYSuu0+;#IQaTWsA!M6amZm4?D1S(Rz#j{kARrm^cAMyUa^-^)V}229rIraFn`0>nh+`w&SFI($iDIBp| zjyF$E-WfWj?A|G>auQ|lAbbmdhUtBi`g0XU9rO`7VrC=gO~mV&!V$T{Ac5JR=`dRY z7x6wB#$K?xmDfC(0$hni5!y}q%L;NQ6iOWwBowWH3$f3%K(h2 ze0_VOvA`@MFdM56k6Vk0cMb-9129pn4liRnUixl5uFze)PsQH6;QkyY6uQ(gU}dMS z5SM}EFr_O=%k8W9_NPp7=L)CmGw1h(w@K!;1@4cF?TN|X7VCc(3K37JF*w-7)0eQ# zyXVL^UO!L;kIfuW7b!tcdn{~XixRN%UTW?B)DTKf3e z9;IaX&2Tj=RjexOwv$aR;*~_Y@x3_fZjN#tFaGobbG)tsh5q;-!i7Q@LR>F&uD&(K zs%^ehw=Me$mnU{TF<8LTaewL9U94&GkKDPU#&L{=2R;@T98YdDO83K6b#>8p~b{T|>qGJ^mmw*U3_{z^(K^B$*6HSa1Q8hK;oJ4TaOxjHyohK8dlEdu%H){3Z@#ixhK{&1_7oLw%ib*a} z>v{STXa*Z43|WSbRV>B2WBNIwidnk{BE0LB`jMwaVPO#eg}@kY*BQHk&pc#JyA3Y< z$p&@CC;xIkv^iJ%qxwWP>NIZ`v&{)`lfkRaW*Mo`E~3&$Sed4b-5#6!MuC7w?hnU4Of5{fd1HD`Hn8_f8V1J|$ZKgrH zlP=jwBp}RJG^)W6YJ58)k0r_19Rqu{x;m|wk*Q82@Tgy&yi8a5&_h?_K>^qU@xHCj^zg9><8RAa`DM!C0>R8bQf6tjknc(_k@UH^~*iVq#=ik8%+b zX3SBeh|cj(Cm>|xlRfBfUb*2uQB!QT@oW6Q!NgM;L8()f+Dh62nbZbl1qFVzY&IxF zw8L;ZQgRO+BWBbjYEMV^!z1G+l#n!;WG#T@oIsZhkSvWMn+HI?7$ku(3**Tq(g;|? z%)OW^UfpXOwXI`g{$4xmLqabMfo9m2_3jwbuPPO!) z$$j~jhYH8?{rN`G2Se)1>1>`Lq4Ek5$L)Y}#r++y1-2zFV)ys2#Z!zIE*|Ozv<#VYIgp0kQwFI9wKZ-N z4CGe1eyxX8goRN-c@jU^y;C3Q_7nU~Rev=TwlyF~4O^|V&|jz)(BkyI{}b`)iI3Iw zHe!-tttfD&+No0&zub~yhUQsW?%(^%v_v8l!QTNX8t=)eJHU6#ju4?~Z0G#mUJtvT zdZWnf1H+;mY^D;vwGw&bVXUL7eUK2lxC)le+S~^94k8==G59Wfy^#;G*EGBRI06X5Q*?!!oE*z{=d*nbeLg zQ#W|IE-wJ7Kh_J3@O?Shy6-+!xhcpsR1CJA{6ZaP>C6e8zYC=mzWTuofI`x(uR=$7 z{#`gf*)r*$7&M5qc;yj{vNoMFm~hyOlw zhd14$WeC0FZAx$5+Lgq*o^@9p=n-D0dd%?}wd5}>8mJ;D1pQg83{Gp%{4)>N*wFpf z5Ja@%hd)14Z~Z%}Hp6$FnW~F=m$B{`ilLC8+`6QCx3jMe!yI+&w8n5WC)JZT9?V6 zJhzfktWXRnRs~a3kN}D*8dQ76_ST=xYEM0u_RGFHf@ikygBNIJbAd#D!jcKb^c!CN ze6Zzs{1AX2j*uaBk&ECApkBU*l{P)=Yd}XH(wL+@7xQIatyMed;>F=SxmCe=D=VL_ zOSSyCeSJ&);zKYcD&b2qaZv5nn-Icz`5bNb{!bn;VNpeY8#>rM2I5!x#UTBO)P};HUmq znb<6mBB*+24FSX=JUm%uaSOKaF(jw_ACWg8Xts}_<^M`m4ETO#+73v10ZO*Ju_>txo{Q3Xu9z4D3( zJn}7G%voNv%T>;=6emEkS=b}7I+UpKZ&p^CQVeF68$4cJmdXORv{m2vx%5r;Q_vl8 zZJ=KRgrD+z{X}0;mGvJJZ1QbXDy#}>jJnWQ`0iNwvdMo-;An4l9QjEV8~}-TWts1oSO*7#_H z+n#5Q`(=_F-|)>CIh>0%CbqgJV5GJY;~vd<5okxof+MXO@%Z|5A@a>JumApYLH#P; z%CD#K?}RO&t&mv3S>8O{Y1}Cxfn0knDq)4DauS*Uv(dB5+q9RX{^WHAL*6PwNu`n@+v)pMmL}6YVd^8 znldMao`X#YO6`E$JenA(RY3)V9*$?pnfOjufoM2c-tXv$xu=LUaz~-khZruc?r!-w zsV`nN_f>k3hgdamLN6dx$CH_mmw$lQpEsULr>g$cn&wtuJD*9E?(FAZt=|-`I-5xK9_f80$oc;DPi=w%>PFuTcJq?;XNJ%*39Lp35^nj5IkB;D5lb&{=?Z?+tjgBIj zI_qI#eVQ#VyoA6MwP&)#AHPl*2YjpZ9Ix zgJa(6VqZ6}A!*p254V!ofyN zb%MdzxDBy`K|F#=oHnABqdDXNF}hU7^}iC%eTll*iDtGUMyP|sA95wn`j*L)hm1pS zX}cU`TSTzc_KKdD{>JPQ4eil%-vBx*5qoh~%b1`^8gaKb$F18~zX(F9l5!b}73Y5h zmP1er9W^s?78wzY=6_&~Bsu*5EJ?lxY~GqP}<(R|8Nya9?}$umpl(aKEQ&r`NB@0P*A*750P6l>{Q z7qeck8z~UbiR84Bt-0Qp#Zjy0 z6+DBR!1JPp2UbpD_K%j?A-Yq5*h^t)}a7_YOgJP9C-$Lj}5nEkr&o(@OPmqHyWi)FI+I53LA^(d308` z*d_wQtJI*!hz$TN}y{V+w9z| zM!@^>!dZ8FphjBzAd~yY5zQylq)TfT+^dQ=sV*kA}@L&g|ce6Zyb7A>qX z#_Sg&scew~1u=N?*=#84j}#5|WEqBh@BlluqvCCNHWL2~0oAl2@@)3|i4dSQ&nMt zL2|HL_oa*2&_1xK?~rI=v2rJq_p(H2m>C#F7W3zEN9()`x4ws|XV^W*Js^har=3V z)~3=&@b7)6_gkY@y~Gvq`Wfk2+;W89(|;a7e;$}KTL^mr&Ce@8$xf|PDcBkSBX29( z9nwnVHRAz}bo!?4xmuw=%kQ$;$;b2h9B z`IjQv?MqeY_I!QZwJ)fC+_^Ev>u!Jdpi_g!)YpDu+MLjxd5haaZ)sqK5xebebnC$b zC_@1WI`LcNU;N4MzG3Ev$T+kg?GIVK3`xkS*`ZG|_BRM#-JGVBdpOD)KuxH3C$Y76 ztoV`?)%|&c|54@L>}QW&_)>|)lbA-#AC|RMAXty0A6(Hd+nhHoPXxO&wI`YK%_%8q zU|_b6m6n<06K1VdSwdcvf1X84%11p# z2nxyVOdfN_zC+EJwS}S-nvU+I7-+}#xvfW5_T~lz6hJWh>mqK0Qy)P4 zJ0ztek0MX)lc$8+({~xvuH-hhl_!*y@c&}BCXqk9yWyzuF}`Wn?%Z~liWa8a%rt?K z?IGXwN^bgjCB1&PKG53)FbXyj>Qk)-YE4COp`u9C6Y3{yd}Fy|cZp5L%t~p8aJ_X) zPbgj3PDH#*pxxT<;235s@`YqN3no*Mh+)LKS|Qh3fgI!LaaO$x0Xl0#`(q1v;}<}K z0KSY-(jvi&(GuP+i4Q%PE@3g4F&raokif^tX{ih#vBZKm?b zO+%ces7eKJ4cGxn*)k%@v3BIAy;mdJVkqDf|bz z+uKbD{zq19a^_F?T2FwAk-!h?u-Ji@mf4pl@0rGURX->Q+4t)h=FHz@PmV$SAHHk+ z9~6J{TML5nr~SVN{wI^WKbHf<-o#V3+oMM(ETmj!0huty*B+4L$Qyj-V|1smL@j78 zY0tH83GKQQ!;k5_l{Ku@Y(ruKQMaS-^kIHdoI-^R${rydn{3vbIDLq+3GbWOc3VfzwM)HURWdf< zEFi_Wa?#J}yiCdkmFLLu49f*)gb?zyfe5#6DW=yf*$}gbNDMEqW|i5;#ncdePUf{| zr(;n`(k6B7=-Mr9ZBjb+ig+IQ!)Ue{lc?yutuy)W zpU0O84?o-uiPI!4ehuWS8^44S{amBnSp!*943nRa1cv>iY4x}iel{1l3$yo=dvz4r z823eayRvtv;T8F9H-7e4`tJtTc2!yol#Qm?*CfjJAo1)=mOW4Pe@fd*-f!>0BSH|H zA(m8ddTamtihIdKuiturBlWnw$An?886SqI2X4*6sys(D3Jcbfyh>L%2u?1{zQb}C z6^yY|e?H=H_*=*oB3*avmSpk7V^y1~Oh!W;$t9Mm7hYWcHC>Y%`hXhWj zVV$Cy3C56w4qoQU3#!MDkyWB-wh{SkHi$Jc0dZoUjZ-ixPyeF4CLf`jF$k6nU`zVOP`4+-KFs^OpHkcyRT&{D&Dq0jq~GhSh>}5Z&5^2HI1~yMcrfSylPg$F z3o^ZO?972nhRE&*J8u7$?S?M>u+KkOpe+qeapxA)m9D%r%Umy|=m8J~B8N^6d1)Sc zAg4$c!FEA3CwT)9s1Qrakd28IYRl4|ri*cAeKkJrn>9_6?{iNFOZ)Wp{vLxH|Gt~nDw_Pn2Oym$JImUmv12!g3lJ;A)9e{g-d z?|;0-2ER_T_kJR%b4E8p>QvLJ(j-Xqsi@4$N;1VnFwOnlrXqTVa6G-5@AwYQ{N+EN zVos!b9;)9uQ?(KL)f=$MQ6Ot*3M+-pe$%#y?kG^O)))~*Sm4HC_qPpxC^y&FSchXX zuN^W)lKt~c8V;A(KdopAw+8pP6xA~&=M=ZXkA)Q%PZo4UJP94_L0K_ZG^m47ax0>> zE!A35Wa)1irbL6&tS$(G8k@2beyhy@0a|9KR7(4X_epz#fJa$w1$foxKhHL=x5 z6Iv66R*YRkS4#rxTANf057O~T4qp{hk6rp9-fJp$?j* z=FFg>6OuIfYPf{Hj`Q22A(&bny2yVJBqSbz77dFAD;vKj3aBb86tvQMzJYc7Cv>(_Nh56vgZb zF67L(<1MybBM@r@mxwM)aa8^~Eq)BN?^5~X#k6J6Syqd*9fE6L35BPc|KXgeKR1`t=5mW`p36yfyFIoi`C0CWA;{B z#7r8E_NxixU(%2-!b?#zh^{tB!cR1AQ7zNB>_cXUM8d_e}4$K2J#QpKVo=GYv=^j_|bQX0Vfb z?Yo>20=vlQuw1U;Uw!^$5O`N-;+TB=a@_pG2QSj`$)}bN8xh-&*iA+fQ7GPG-a!dA z7&|RfVc(&O@P{@*NpJ1TdN-4vzXyHya;oi^_Lkhq5+-X!@+tNU+P*Z>%`aYkZBX6t zUYyD#<>l0Y^xW@OdDm|9j|1{>Ms6nj#c`%Q_km|bpiMk-6~X!B8uwE{@t-88bVhga z50yw>ryWBM0y1}C$c5?)K&qnfv3KXHtK{m!PB`Rt^;rXf-y)3`B_c|ofyopW>@5Bp zWO-xpk6IuT%QnFhLCqO*i!5<*we2FfpIpjR|o4e4iAp~^Fsf)(d*Lt>9$(? zb$fYXLd-@VI2Kx;4pmdTh>1}+Xcm&`0HVGeB67fr5KK6S?+V|hlr?~G zMpHIB<&$>eiVID1Fpjj{{+suJr&H$lJ$2$!X^Kcnf2-tPjG!?t4@cTTPHriv7Cf@) z1VI@d(t#0Hq0G?q1nUuh)>yuB*!rge4Jv`ih%cli2#pZ}J@q{F)~Ob~xTy_M7CKoI zAx1sjG1#OUD$@@LioI1|hhX7)T4Y#heh^aOg2~Z|#L)@p@IpL#f&16)iP=~u#MMgBRn_ya^#WB^uuYokuY0w6&$d)X zt;Q5l70y$gkS)G8+~xhDdfbNdDpr0Z*xVt{38v81Kz&D%MeMXWF85ty@hJnx1eTt;jT#e_Yl*nmRZFIm~y#rOrEr#sZ*8q#TT!qyHSA z)VqNbhTK87!$byV1DC`G%Inw)^&V7)_ttCd8UCT*AO}-%s$dU3`t5LQqcu8+O zbjse2DBB9}H{a^@k2kZBgG!HH;2Nx>OAsCQ9@BH4(Ei@(3oJutLE;MkPa#mKregcw zc$_YjHZFAW&VA60J~NNSqRXZMGAEKQedLcUQ2cUB)$ar*h~2!^LkZX{rn<~5N-3M` zzkJ?>mLKF}yO%dYAXaT-x6a_kh~c!;&3)gl1;%^N%(r^8bog)D^qMC($`eKg#XpEs z6|s;Q;#KVcll!pXdIJs_HPatA7{&O7;xj&6bV8~icdDY`TWkYNGP(l9>jS~ug(FXU zMDMSLNpW1eC+N+%W`|2geDLV>Khf31jE`b}@#<2i*Y!&%+42>VS`;r8&!p2OC~PTZ z2q_@p+QE9W6>i;Pa4jeEZAp6Wh`W>uNo8ak;w^mJNgtSjOE@TXk6POjxj(F5I0KHY}zl5+`E z`k38?H2s^} zHDfgkVS;X;d5o(G0ZwTx-oY}nWYMv>ThbY>Ja6rtZGV^%!^}m#oz+Y1cQMV+x znoNgv98YO8Hg45AKuCh^;`y(neD4VztFk{d+~)Sk04ah$-($whK~07(9+*O-DCV zg9&p3k8w=~j;_;2_TnS0I^M_z$c&G`al`ZJSvU}0N|m~yca|>u*!TH^hl^QvJi-Vp zYeZbP<+@!lFZwbqf@sR~gv^9Jbtxr-(@a@&Kt<-p3IRtAm_MJX$mk_no!MGI9bI@V zO};HF5r6(o3X}4q@o^gM7G$#kU?J~As0>G1JhL9P>fsD7^sZ>LlMFue4AAshQ4(Gt zBqi8a^W`D^bzjgxjH&UHc!6 zVQ~;ph<(MZ`X+jyf4xyB!#PLZu$|zbbm%91zq77pzr`s|Gi?iBRzUKMwYwGDy3+Xzh6*@T^;89V{ zkfXDG-!u@CHCyaw#Y_Xq+9|F`HBsC+T27iiq^(^5(_8%^Y{~>`f;lOWo*rE>f+x(4 zlMdW{>a0x8=Jf_(rgpIw)7h5jgwa6fW|mrvIKqVTcXFqN6lqEbIWnl5?|v@EfBT={|D*v%tGUnlp{##@57Rr`72O^te4x<5_~BusILFdMImDl@ zHg(s=)Fg{zS^XP6>0>})JyMXnm{DQSl&wH;qzQJF4XrAKh2cft{w^^HzCMJTDI86$ z9?f8mY{g3#exQJm6hDN8IW~o}O#o@7176|j@P?F(M&lz&^Q8}wtmVm)RI&8PCNxeO z!Q3Yh7Go84rwd0Sxp=!V3~FozM_R0jA}+4lZQv!_!rOQ84Ia5|<-g5mW^>o;M%8X7 z=skt4>e?Yp`yJzfcj)Qy)jX1U_JmZv^2hc?>TK%@lLr!`*Xw%kScbpN-Mre}oz-W^ zNFOF_z+PWanyU-h9aMjg-uLbu(Eiz$R+vQk0em+8^_~(Wx|6@5Pd~N1u1)PV3dW=v zlLF0XRb##|=;8Z~?nAXtG0|=n$le!5%W%eztGOTW>{F3sL5aro;cxfvp9RsuDk|Pp z`!LvA+k(k}+W>2BxX?EX19po7{0V$7F<|Qch zOmqh|28&c#022*#kvW{LM%_JlL~p(Pm{i*EhlCcQ_%B|8%*6szq5NI4D>CRt^4tG` zS?L4QD>tgw8i}sTn(G(9ZZkUQAz#_iVGzIl9daQ_7vWeC$@u$Yqs6^i6#FQE9w`}5 z@iI~6L4OH84(mJl0mr%=L=6jM8zUuS(1Qew>UfZELz&}OhkA-;HIs&H#bW{L6U;$= z)d(JQQPeRV$N(V-nf>+&t}*;_wU5~!+Ub&n>2(OJR0_4ZBOlg^abY{Q2*;?>zjuX-N38hd^@6l8*grV|0#+0 zQlJiBdZ|l9#ru37`zXT*55*9Wl&uIzo$oR8IBIlK|GFBmAGnIR%dt0gG_R$kSJEA>XVy(3t@u=4*2g#@dS z?tdvp=o}UI!Px%GVU5?gS;!(@AR%#4`&;CZL|GR1ny~4C7pNpl;9$Ho zNO#a624n@em!g>s9>Zbis0%1F4PQV}>0U-hHrS96QZ*iZ?c@78n6D#%aF)D;uKe<@ zP^G1TQjRHV?(lX#dWT@?YLfQ~_PV40(`G#^2Vf;S_wcgUj2Vmit7VNRK<85Gl(V8j zijRys6KZ9cjx!*Q+_K|~1Z76I9orU*u@ENJLC>F!2y=qq5N6R0avs`Yp^1RN%j|an zI$7Uxt>Zo_0myifwKwM`!3aBHg%d73_76yCW~Kk>C@vFBk=S6+0x5P|yycf7F>iE& zB@XzW6%L5Gnc2KuL{-SI8eygN=XZ6r-6sV%aV~6VWy5P2iDFbJ*u``He?qZ}1Qg0K zfR>r7W5R)TF|OvbtfId^kVz{4+fv;=7%JQtPA~9(QvUCU z9gM;M^zVN+rjq{8GlLf(x550^E%<-$qymBu`u|%1%`E!=|Iwc;qEZu9;U(X}UB(a) NvXV*?HDZQ={|9+=nRoyI diff --git a/api/core/tools/provider/builtin/comfyui/comfyui.py b/api/core/tools/provider/builtin/comfyui/comfyui.py deleted file mode 100644 index 7013a0b93c8dcb..00000000000000 --- a/api/core/tools/provider/builtin/comfyui/comfyui.py +++ /dev/null @@ -1,17 +0,0 @@ -from typing import Any - -from core.tools.errors import ToolProviderCredentialValidationError -from core.tools.provider.builtin.comfyui.tools.comfyui_stable_diffusion import ComfyuiStableDiffusionTool -from core.tools.provider.builtin_tool_provider import BuiltinToolProviderController - - -class ComfyUIProvider(BuiltinToolProviderController): - def _validate_credentials(self, credentials: dict[str, Any]) -> None: - try: - ComfyuiStableDiffusionTool().fork_tool_runtime( - runtime={ - "credentials": credentials, - } - ).validate_models() - except Exception as e: - raise ToolProviderCredentialValidationError(str(e)) diff --git a/api/core/tools/provider/builtin/comfyui/comfyui.yaml b/api/core/tools/provider/builtin/comfyui/comfyui.yaml deleted file mode 100644 index 066fd853082817..00000000000000 --- a/api/core/tools/provider/builtin/comfyui/comfyui.yaml +++ /dev/null @@ -1,42 +0,0 @@ -identity: - author: Qun - name: comfyui - label: - en_US: ComfyUI - zh_Hans: ComfyUI - pt_BR: ComfyUI - description: - en_US: ComfyUI is a tool for generating images which can be deployed locally. - zh_Hans: ComfyUI 是一个可以在本地部署的图片生成的工具。 - pt_BR: ComfyUI is a tool for generating images which can be deployed locally. - icon: icon.png - tags: - - image -credentials_for_provider: - base_url: - type: text-input - required: true - label: - en_US: Base URL - zh_Hans: ComfyUI服务器的Base URL - pt_BR: Base URL - placeholder: - en_US: Please input your ComfyUI server's Base URL - zh_Hans: 请输入你的 ComfyUI 服务器的 Base URL - pt_BR: Please input your ComfyUI server's Base URL - model: - type: text-input - required: true - label: - en_US: Model with suffix - zh_Hans: 模型, 需要带后缀 - pt_BR: Model with suffix - placeholder: - en_US: Please input your model - zh_Hans: 请输入你的模型名称 - pt_BR: Please input your model - help: - en_US: The checkpoint name of the ComfyUI server, e.g. xxx.safetensors - zh_Hans: ComfyUI服务器的模型名称, 比如 xxx.safetensors - pt_BR: The checkpoint name of the ComfyUI server, e.g. xxx.safetensors - url: https://docs.dify.ai/tutorials/tool-configuration/comfyui diff --git a/api/core/tools/provider/builtin/comfyui/tools/comfyui_stable_diffusion.py b/api/core/tools/provider/builtin/comfyui/tools/comfyui_stable_diffusion.py deleted file mode 100644 index 81fc8cc9851a7e..00000000000000 --- a/api/core/tools/provider/builtin/comfyui/tools/comfyui_stable_diffusion.py +++ /dev/null @@ -1,475 +0,0 @@ -import json -import os -import random -import uuid -from copy import deepcopy -from enum import Enum -from typing import Any, Union - -import websocket -from httpx import get, post -from yarl import URL - -from core.tools.entities.common_entities import I18nObject -from core.tools.entities.tool_entities import ToolInvokeMessage, ToolParameter, ToolParameterOption -from core.tools.errors import ToolProviderCredentialValidationError -from core.tools.tool.builtin_tool import BuiltinTool - -SD_TXT2IMG_OPTIONS = {} -LORA_NODE = { - "inputs": {"lora_name": "", "strength_model": 1, "strength_clip": 1, "model": ["11", 0], "clip": ["11", 1]}, - "class_type": "LoraLoader", - "_meta": {"title": "Load LoRA"}, -} -FluxGuidanceNode = { - "inputs": {"guidance": 3.5, "conditioning": ["6", 0]}, - "class_type": "FluxGuidance", - "_meta": {"title": "FluxGuidance"}, -} - - -class ModelType(Enum): - SD15 = 1 - SDXL = 2 - SD3 = 3 - FLUX = 4 - - -class ComfyuiStableDiffusionTool(BuiltinTool): - def _invoke( - self, user_id: str, tool_parameters: dict[str, Any] - ) -> Union[ToolInvokeMessage, list[ToolInvokeMessage]]: - """ - invoke tools - """ - # base url - base_url = self.runtime.credentials.get("base_url", "") - if not base_url: - return self.create_text_message("Please input base_url") - - if tool_parameters.get("model"): - self.runtime.credentials["model"] = tool_parameters["model"] - - model = self.runtime.credentials.get("model", None) - if not model: - return self.create_text_message("Please input model") - - # prompt - prompt = tool_parameters.get("prompt", "") - if not prompt: - return self.create_text_message("Please input prompt") - - # get negative prompt - negative_prompt = tool_parameters.get("negative_prompt", "") - - # get size - width = tool_parameters.get("width", 1024) - height = tool_parameters.get("height", 1024) - - # get steps - steps = tool_parameters.get("steps", 1) - - # get sampler_name - sampler_name = tool_parameters.get("sampler_name", "euler") - - # scheduler - scheduler = tool_parameters.get("scheduler", "normal") - - # get cfg - cfg = tool_parameters.get("cfg", 7.0) - - # get model type - model_type = tool_parameters.get("model_type", ModelType.SD15.name) - - # get lora - # supports up to 3 loras - lora_list = [] - lora_strength_list = [] - if tool_parameters.get("lora_1"): - lora_list.append(tool_parameters["lora_1"]) - lora_strength_list.append(tool_parameters.get("lora_strength_1", 1)) - if tool_parameters.get("lora_2"): - lora_list.append(tool_parameters["lora_2"]) - lora_strength_list.append(tool_parameters.get("lora_strength_2", 1)) - if tool_parameters.get("lora_3"): - lora_list.append(tool_parameters["lora_3"]) - lora_strength_list.append(tool_parameters.get("lora_strength_3", 1)) - - return self.text2img( - base_url=base_url, - model=model, - model_type=model_type, - prompt=prompt, - negative_prompt=negative_prompt, - width=width, - height=height, - steps=steps, - sampler_name=sampler_name, - scheduler=scheduler, - cfg=cfg, - lora_list=lora_list, - lora_strength_list=lora_strength_list, - ) - - def get_checkpoints(self) -> list[str]: - """ - get checkpoints - """ - try: - base_url = self.runtime.credentials.get("base_url", None) - if not base_url: - return [] - api_url = str(URL(base_url) / "models" / "checkpoints") - response = get(url=api_url, timeout=(2, 10)) - if response.status_code != 200: - return [] - else: - return response.json() - except Exception as e: - return [] - - def get_loras(self) -> list[str]: - """ - get loras - """ - try: - base_url = self.runtime.credentials.get("base_url", None) - if not base_url: - return [] - api_url = str(URL(base_url) / "models" / "loras") - response = get(url=api_url, timeout=(2, 10)) - if response.status_code != 200: - return [] - else: - return response.json() - except Exception as e: - return [] - - def get_sample_methods(self) -> tuple[list[str], list[str]]: - """ - get sample method - """ - try: - base_url = self.runtime.credentials.get("base_url", None) - if not base_url: - return [], [] - api_url = str(URL(base_url) / "object_info" / "KSampler") - response = get(url=api_url, timeout=(2, 10)) - if response.status_code != 200: - return [], [] - else: - data = response.json()["KSampler"]["input"]["required"] - return data["sampler_name"][0], data["scheduler"][0] - except Exception as e: - return [], [] - - def validate_models(self) -> Union[ToolInvokeMessage, list[ToolInvokeMessage]]: - """ - validate models - """ - try: - base_url = self.runtime.credentials.get("base_url", None) - if not base_url: - raise ToolProviderCredentialValidationError("Please input base_url") - model = self.runtime.credentials.get("model", None) - if not model: - raise ToolProviderCredentialValidationError("Please input model") - - api_url = str(URL(base_url) / "models" / "checkpoints") - response = get(url=api_url, timeout=(2, 10)) - if response.status_code != 200: - raise ToolProviderCredentialValidationError("Failed to get models") - else: - models = response.json() - if len([d for d in models if d == model]) > 0: - return self.create_text_message(json.dumps(models)) - else: - raise ToolProviderCredentialValidationError(f"model {model} does not exist") - except Exception as e: - raise ToolProviderCredentialValidationError(f"Failed to get models, {e}") - - def get_history(self, base_url, prompt_id): - """ - get history - """ - url = str(URL(base_url) / "history") - respond = get(url, params={"prompt_id": prompt_id}, timeout=(2, 10)) - return respond.json() - - def download_image(self, base_url, filename, subfolder, folder_type): - """ - download image - """ - url = str(URL(base_url) / "view") - response = get(url, params={"filename": filename, "subfolder": subfolder, "type": folder_type}, timeout=(2, 10)) - return response.content - - def queue_prompt_image(self, base_url, client_id, prompt): - """ - send prompt task and rotate - """ - # initiate task execution - url = str(URL(base_url) / "prompt") - respond = post(url, data=json.dumps({"client_id": client_id, "prompt": prompt}), timeout=(2, 10)) - prompt_id = respond.json()["prompt_id"] - - ws = websocket.WebSocket() - if "https" in base_url: - ws_url = base_url.replace("https", "ws") - else: - ws_url = base_url.replace("http", "ws") - ws.connect(str(URL(f"{ws_url}") / "ws") + f"?clientId={client_id}", timeout=120) - - # websocket rotate execution status - output_images = {} - while True: - out = ws.recv() - if isinstance(out, str): - message = json.loads(out) - if message["type"] == "executing": - data = message["data"] - if data["node"] is None and data["prompt_id"] == prompt_id: - break # Execution is done - elif message["type"] == "status": - data = message["data"] - if data["status"]["exec_info"]["queue_remaining"] == 0 and data.get("sid"): - break # Execution is done - else: - continue # previews are binary data - - # download image when execution finished - history = self.get_history(base_url, prompt_id)[prompt_id] - for o in history["outputs"]: - for node_id in history["outputs"]: - node_output = history["outputs"][node_id] - if "images" in node_output: - images_output = [] - for image in node_output["images"]: - image_data = self.download_image(base_url, image["filename"], image["subfolder"], image["type"]) - images_output.append(image_data) - output_images[node_id] = images_output - - ws.close() - - return output_images - - def text2img( - self, - base_url: str, - model: str, - model_type: str, - prompt: str, - negative_prompt: str, - width: int, - height: int, - steps: int, - sampler_name: str, - scheduler: str, - cfg: float, - lora_list: list, - lora_strength_list: list, - ) -> Union[ToolInvokeMessage, list[ToolInvokeMessage]]: - """ - generate image - """ - if not SD_TXT2IMG_OPTIONS: - current_dir = os.path.dirname(os.path.realpath(__file__)) - with open(os.path.join(current_dir, "txt2img.json")) as file: - SD_TXT2IMG_OPTIONS.update(json.load(file)) - - draw_options = deepcopy(SD_TXT2IMG_OPTIONS) - draw_options["3"]["inputs"]["steps"] = steps - draw_options["3"]["inputs"]["sampler_name"] = sampler_name - draw_options["3"]["inputs"]["scheduler"] = scheduler - draw_options["3"]["inputs"]["cfg"] = cfg - # generate different image when using same prompt next time - draw_options["3"]["inputs"]["seed"] = random.randint(0, 100000000) - draw_options["4"]["inputs"]["ckpt_name"] = model - draw_options["5"]["inputs"]["width"] = width - draw_options["5"]["inputs"]["height"] = height - draw_options["6"]["inputs"]["text"] = prompt - draw_options["7"]["inputs"]["text"] = negative_prompt - # if the model is SD3 or FLUX series, the Latent class should be corresponding to SD3 Latent - if model_type in {ModelType.SD3.name, ModelType.FLUX.name}: - draw_options["5"]["class_type"] = "EmptySD3LatentImage" - - if lora_list: - # last Lora node link to KSampler node - draw_options["3"]["inputs"]["model"][0] = "10" - # last Lora node link to positive and negative Clip node - draw_options["6"]["inputs"]["clip"][0] = "10" - draw_options["7"]["inputs"]["clip"][0] = "10" - # every Lora node link to next Lora node, and Checkpoints node link to first Lora node - for i, (lora, strength) in enumerate(zip(lora_list, lora_strength_list), 10): - if i - 10 == len(lora_list) - 1: - next_node_id = "4" - else: - next_node_id = str(i + 1) - lora_node = deepcopy(LORA_NODE) - lora_node["inputs"]["lora_name"] = lora - lora_node["inputs"]["strength_model"] = strength - lora_node["inputs"]["strength_clip"] = strength - lora_node["inputs"]["model"][0] = next_node_id - lora_node["inputs"]["clip"][0] = next_node_id - draw_options[str(i)] = lora_node - - # FLUX need to add FluxGuidance Node - if model_type == ModelType.FLUX.name: - last_node_id = str(10 + len(lora_list)) - draw_options[last_node_id] = deepcopy(FluxGuidanceNode) - draw_options[last_node_id]["inputs"]["conditioning"][0] = "6" - draw_options["3"]["inputs"]["positive"][0] = last_node_id - - try: - client_id = str(uuid.uuid4()) - result = self.queue_prompt_image(base_url, client_id, prompt=draw_options) - - # get first image - image = b"" - for node in result: - for img in result[node]: - if img: - image = img - break - - return self.create_blob_message( - blob=image, meta={"mime_type": "image/png"}, save_as=self.VARIABLE_KEY.IMAGE.value - ) - - except Exception as e: - return self.create_text_message(f"Failed to generate image: {str(e)}") - - def get_runtime_parameters(self) -> list[ToolParameter]: - parameters = [ - ToolParameter( - name="prompt", - label=I18nObject(en_US="Prompt", zh_Hans="Prompt"), - human_description=I18nObject( - en_US="Image prompt, you can check the official documentation of Stable Diffusion", - zh_Hans="图像提示词,您可以查看 Stable Diffusion 的官方文档", - ), - type=ToolParameter.ToolParameterType.STRING, - form=ToolParameter.ToolParameterForm.LLM, - llm_description="Image prompt of Stable Diffusion, you should describe the image " - "you want to generate as a list of words as possible as detailed, " - "the prompt must be written in English.", - required=True, - ), - ] - if self.runtime.credentials: - try: - models = self.get_checkpoints() - if len(models) != 0: - parameters.append( - ToolParameter( - name="model", - label=I18nObject(en_US="Model", zh_Hans="Model"), - human_description=I18nObject( - en_US="Model of Stable Diffusion or FLUX, " - "you can check the official documentation of Stable Diffusion or FLUX", - zh_Hans="Stable Diffusion 或者 FLUX 的模型,您可以查看 Stable Diffusion 的官方文档", - ), - type=ToolParameter.ToolParameterType.SELECT, - form=ToolParameter.ToolParameterForm.FORM, - llm_description="Model of Stable Diffusion or FLUX, " - "you can check the official documentation of Stable Diffusion or FLUX", - required=True, - default=models[0], - options=[ - ToolParameterOption(value=i, label=I18nObject(en_US=i, zh_Hans=i)) for i in models - ], - ) - ) - loras = self.get_loras() - if len(loras) != 0: - for n in range(1, 4): - parameters.append( - ToolParameter( - name=f"lora_{n}", - label=I18nObject(en_US=f"Lora {n}", zh_Hans=f"Lora {n}"), - human_description=I18nObject( - en_US="Lora of Stable Diffusion, " - "you can check the official documentation of Stable Diffusion", - zh_Hans="Stable Diffusion 的 Lora 模型,您可以查看 Stable Diffusion 的官方文档", - ), - type=ToolParameter.ToolParameterType.SELECT, - form=ToolParameter.ToolParameterForm.FORM, - llm_description="Lora of Stable Diffusion, " - "you can check the official documentation of " - "Stable Diffusion", - required=False, - options=[ - ToolParameterOption(value=i, label=I18nObject(en_US=i, zh_Hans=i)) for i in loras - ], - ) - ) - sample_methods, schedulers = self.get_sample_methods() - if len(sample_methods) != 0: - parameters.append( - ToolParameter( - name="sampler_name", - label=I18nObject(en_US="Sampling method", zh_Hans="Sampling method"), - human_description=I18nObject( - en_US="Sampling method of Stable Diffusion, " - "you can check the official documentation of Stable Diffusion", - zh_Hans="Stable Diffusion 的Sampling method,您可以查看 Stable Diffusion 的官方文档", - ), - type=ToolParameter.ToolParameterType.SELECT, - form=ToolParameter.ToolParameterForm.FORM, - llm_description="Sampling method of Stable Diffusion, " - "you can check the official documentation of Stable Diffusion", - required=True, - default=sample_methods[0], - options=[ - ToolParameterOption(value=i, label=I18nObject(en_US=i, zh_Hans=i)) - for i in sample_methods - ], - ) - ) - if len(schedulers) != 0: - parameters.append( - ToolParameter( - name="scheduler", - label=I18nObject(en_US="Scheduler", zh_Hans="Scheduler"), - human_description=I18nObject( - en_US="Scheduler of Stable Diffusion, " - "you can check the official documentation of Stable Diffusion", - zh_Hans="Stable Diffusion 的Scheduler,您可以查看 Stable Diffusion 的官方文档", - ), - type=ToolParameter.ToolParameterType.SELECT, - form=ToolParameter.ToolParameterForm.FORM, - llm_description="Scheduler of Stable Diffusion, " - "you can check the official documentation of Stable Diffusion", - required=True, - default=schedulers[0], - options=[ - ToolParameterOption(value=i, label=I18nObject(en_US=i, zh_Hans=i)) for i in schedulers - ], - ) - ) - parameters.append( - ToolParameter( - name="model_type", - label=I18nObject(en_US="Model Type", zh_Hans="Model Type"), - human_description=I18nObject( - en_US="Model Type of Stable Diffusion or Flux, " - "you can check the official documentation of Stable Diffusion or Flux", - zh_Hans="Stable Diffusion 或 FLUX 的模型类型," - "您可以查看 Stable Diffusion 或 Flux 的官方文档", - ), - type=ToolParameter.ToolParameterType.SELECT, - form=ToolParameter.ToolParameterForm.FORM, - llm_description="Model Type of Stable Diffusion or Flux, " - "you can check the official documentation of Stable Diffusion or Flux", - required=True, - default=ModelType.SD15.name, - options=[ - ToolParameterOption(value=i, label=I18nObject(en_US=i, zh_Hans=i)) - for i in ModelType.__members__ - ], - ) - ) - except: - pass - - return parameters diff --git a/api/core/tools/provider/builtin/comfyui/tools/comfyui_stable_diffusion.yaml b/api/core/tools/provider/builtin/comfyui/tools/comfyui_stable_diffusion.yaml deleted file mode 100644 index 4f4a6942b3bac4..00000000000000 --- a/api/core/tools/provider/builtin/comfyui/tools/comfyui_stable_diffusion.yaml +++ /dev/null @@ -1,212 +0,0 @@ -identity: - name: txt2img workflow - author: Qun - label: - en_US: Txt2Img Workflow - zh_Hans: Txt2Img Workflow - pt_BR: Txt2Img Workflow -description: - human: - en_US: a pre-defined comfyui workflow that can use one model and up to 3 loras to generate images. Support SD1.5, SDXL, SD3 and FLUX which contain text encoders/clip, but does not support models that requires a triple clip loader. - zh_Hans: 一个预定义的 ComfyUI 工作流,可以使用一个模型和最多3个loras来生成图像。支持包含文本编码器/clip的SD1.5、SDXL、SD3和FLUX,但不支持需要clip加载器的模型。 - pt_BR: a pre-defined comfyui workflow that can use one model and up to 3 loras to generate images. Support SD1.5, SDXL, SD3 and FLUX which contain text encoders/clip, but does not support models that requires a triple clip loader. - llm: draw the image you want based on your prompt. -parameters: - - name: prompt - type: string - required: true - label: - en_US: Prompt - zh_Hans: 提示词 - pt_BR: Prompt - human_description: - en_US: Image prompt, you can check the official documentation of Stable Diffusion or FLUX - zh_Hans: 图像提示词,您可以查看 Stable Diffusion 或者 FLUX 的官方文档 - pt_BR: Image prompt, you can check the official documentation of Stable Diffusion or FLUX - llm_description: Image prompt of Stable Diffusion, you should describe the image you want to generate as a list of words as possible as detailed, the prompt must be written in English. - form: llm - - name: model - type: string - required: true - label: - en_US: Model Name - zh_Hans: 模型名称 - pt_BR: Model Name - human_description: - en_US: Model Name - zh_Hans: 模型名称 - pt_BR: Model Name - form: form - - name: model_type - type: string - required: true - label: - en_US: Model Type - zh_Hans: 模型类型 - pt_BR: Model Type - human_description: - en_US: Model Type - zh_Hans: 模型类型 - pt_BR: Model Type - form: form - - name: lora_1 - type: string - required: false - label: - en_US: Lora 1 - zh_Hans: Lora 1 - pt_BR: Lora 1 - human_description: - en_US: Lora 1 - zh_Hans: Lora 1 - pt_BR: Lora 1 - form: form - - name: lora_strength_1 - type: number - required: false - label: - en_US: Lora Strength 1 - zh_Hans: Lora Strength 1 - pt_BR: Lora Strength 1 - human_description: - en_US: Lora Strength 1 - zh_Hans: Lora模型的权重 - pt_BR: Lora Strength 1 - form: form - - name: steps - type: number - required: false - label: - en_US: Steps - zh_Hans: Steps - pt_BR: Steps - human_description: - en_US: Steps - zh_Hans: Steps - pt_BR: Steps - form: form - default: 20 - - name: width - type: number - required: false - label: - en_US: Width - zh_Hans: Width - pt_BR: Width - human_description: - en_US: Width - zh_Hans: Width - pt_BR: Width - form: form - default: 1024 - - name: height - type: number - required: false - label: - en_US: Height - zh_Hans: Height - pt_BR: Height - human_description: - en_US: Height - zh_Hans: Height - pt_BR: Height - form: form - default: 1024 - - name: negative_prompt - type: string - required: false - label: - en_US: Negative prompt - zh_Hans: Negative prompt - pt_BR: Negative prompt - human_description: - en_US: Negative prompt - zh_Hans: Negative prompt - pt_BR: Negative prompt - form: form - default: bad art, ugly, deformed, watermark, duplicated, discontinuous lines - - name: cfg - type: number - required: false - label: - en_US: CFG Scale - zh_Hans: CFG Scale - pt_BR: CFG Scale - human_description: - en_US: CFG Scale - zh_Hans: 提示词相关性(CFG Scale) - pt_BR: CFG Scale - form: form - default: 7.0 - - name: sampler_name - type: string - required: false - label: - en_US: Sampling method - zh_Hans: Sampling method - pt_BR: Sampling method - human_description: - en_US: Sampling method - zh_Hans: Sampling method - pt_BR: Sampling method - form: form - - name: scheduler - type: string - required: false - label: - en_US: Scheduler - zh_Hans: Scheduler - pt_BR: Scheduler - human_description: - en_US: Scheduler - zh_Hans: Scheduler - pt_BR: Scheduler - form: form - - name: lora_2 - type: string - required: false - label: - en_US: Lora 2 - zh_Hans: Lora 2 - pt_BR: Lora 2 - human_description: - en_US: Lora 2 - zh_Hans: Lora 2 - pt_BR: Lora 2 - form: form - - name: lora_strength_2 - type: number - required: false - label: - en_US: Lora Strength 2 - zh_Hans: Lora Strength 2 - pt_BR: Lora Strength 2 - human_description: - en_US: Lora Strength 2 - zh_Hans: Lora模型的权重 - pt_BR: Lora Strength 2 - form: form - - name: lora_3 - type: string - required: false - label: - en_US: Lora 3 - zh_Hans: Lora 3 - pt_BR: Lora 3 - human_description: - en_US: Lora 3 - zh_Hans: Lora 3 - pt_BR: Lora 3 - form: form - - name: lora_strength_3 - type: number - required: false - label: - en_US: Lora Strength 3 - zh_Hans: Lora Strength 3 - pt_BR: Lora Strength 3 - human_description: - en_US: Lora Strength 3 - zh_Hans: Lora模型的权重 - pt_BR: Lora Strength 3 - form: form diff --git a/api/core/tools/provider/builtin/comfyui/tools/txt2img.json b/api/core/tools/provider/builtin/comfyui/tools/txt2img.json deleted file mode 100644 index 8ea869ff106c38..00000000000000 --- a/api/core/tools/provider/builtin/comfyui/tools/txt2img.json +++ /dev/null @@ -1,107 +0,0 @@ -{ - "3": { - "inputs": { - "seed": 156680208700286, - "steps": 20, - "cfg": 8, - "sampler_name": "euler", - "scheduler": "normal", - "denoise": 1, - "model": [ - "4", - 0 - ], - "positive": [ - "6", - 0 - ], - "negative": [ - "7", - 0 - ], - "latent_image": [ - "5", - 0 - ] - }, - "class_type": "KSampler", - "_meta": { - "title": "KSampler" - } - }, - "4": { - "inputs": { - "ckpt_name": "3dAnimationDiffusion_v10.safetensors" - }, - "class_type": "CheckpointLoaderSimple", - "_meta": { - "title": "Load Checkpoint" - } - }, - "5": { - "inputs": { - "width": 512, - "height": 512, - "batch_size": 1 - }, - "class_type": "EmptyLatentImage", - "_meta": { - "title": "Empty Latent Image" - } - }, - "6": { - "inputs": { - "text": "beautiful scenery nature glass bottle landscape, , purple galaxy bottle,", - "clip": [ - "4", - 1 - ] - }, - "class_type": "CLIPTextEncode", - "_meta": { - "title": "CLIP Text Encode (Prompt)" - } - }, - "7": { - "inputs": { - "text": "text, watermark", - "clip": [ - "4", - 1 - ] - }, - "class_type": "CLIPTextEncode", - "_meta": { - "title": "CLIP Text Encode (Prompt)" - } - }, - "8": { - "inputs": { - "samples": [ - "3", - 0 - ], - "vae": [ - "4", - 2 - ] - }, - "class_type": "VAEDecode", - "_meta": { - "title": "VAE Decode" - } - }, - "9": { - "inputs": { - "filename_prefix": "ComfyUI", - "images": [ - "8", - 0 - ] - }, - "class_type": "SaveImage", - "_meta": { - "title": "Save Image" - } - } -} \ No newline at end of file diff --git a/api/core/tools/provider/builtin/crossref/_assets/icon.svg b/api/core/tools/provider/builtin/crossref/_assets/icon.svg deleted file mode 100644 index aa629de7cb1660..00000000000000 --- a/api/core/tools/provider/builtin/crossref/_assets/icon.svg +++ /dev/null @@ -1,49 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/api/core/tools/provider/builtin/crossref/crossref.py b/api/core/tools/provider/builtin/crossref/crossref.py deleted file mode 100644 index 8ba3c1b48ae6d7..00000000000000 --- a/api/core/tools/provider/builtin/crossref/crossref.py +++ /dev/null @@ -1,20 +0,0 @@ -from core.tools.errors import ToolProviderCredentialValidationError -from core.tools.provider.builtin.crossref.tools.query_doi import CrossRefQueryDOITool -from core.tools.provider.builtin_tool_provider import BuiltinToolProviderController - - -class CrossRefProvider(BuiltinToolProviderController): - def _validate_credentials(self, credentials: dict) -> None: - try: - CrossRefQueryDOITool().fork_tool_runtime( - runtime={ - "credentials": credentials, - } - ).invoke( - user_id="", - tool_parameters={ - "doi": "10.1007/s00894-022-05373-8", - }, - ) - except Exception as e: - raise ToolProviderCredentialValidationError(str(e)) diff --git a/api/core/tools/provider/builtin/crossref/crossref.yaml b/api/core/tools/provider/builtin/crossref/crossref.yaml deleted file mode 100644 index da67fbec3a480b..00000000000000 --- a/api/core/tools/provider/builtin/crossref/crossref.yaml +++ /dev/null @@ -1,29 +0,0 @@ -identity: - author: Sakura4036 - name: crossref - label: - en_US: CrossRef - zh_Hans: CrossRef - description: - en_US: Crossref is a cross-publisher reference linking registration query system using DOI technology created in 2000. Crossref establishes cross-database links between the reference list and citation full text of papers, making it very convenient for readers to access the full text of papers. - zh_Hans: Crossref是于2000年创建的使用DOI技术的跨出版商参考文献链接注册查询系统。Crossref建立了在论文的参考文献列表和引文全文之间的跨数据库链接,使得读者能够非常便捷地获取文献全文。 - icon: icon.svg - tags: - - search -credentials_for_provider: - mailto: - type: text-input - required: true - label: - en_US: email address - zh_Hans: email地址 - pt_BR: email address - placeholder: - en_US: Please input your email address - zh_Hans: 请输入你的email地址 - pt_BR: Please input your email address - help: - en_US: According to the requirements of Crossref, an email address is required - zh_Hans: 根据Crossref的要求,需要提供一个邮箱地址 - pt_BR: According to the requirements of Crossref, an email address is required - url: https://api.crossref.org/swagger-ui/index.html diff --git a/api/core/tools/provider/builtin/crossref/tools/query_doi.py b/api/core/tools/provider/builtin/crossref/tools/query_doi.py deleted file mode 100644 index 746139dd69d27b..00000000000000 --- a/api/core/tools/provider/builtin/crossref/tools/query_doi.py +++ /dev/null @@ -1,28 +0,0 @@ -from typing import Any, Union - -import requests - -from core.tools.entities.tool_entities import ToolInvokeMessage -from core.tools.errors import ToolParameterValidationError -from core.tools.tool.builtin_tool import BuiltinTool - - -class CrossRefQueryDOITool(BuiltinTool): - """ - Tool for querying the metadata of a publication using its DOI. - """ - - def _invoke( - self, user_id: str, tool_parameters: dict[str, Any] - ) -> Union[ToolInvokeMessage, list[ToolInvokeMessage]]: - doi = tool_parameters.get("doi") - if not doi: - raise ToolParameterValidationError("doi is required.") - # doc: https://github.com/CrossRef/rest-api-doc - url = f"https://api.crossref.org/works/{doi}" - response = requests.get(url) - response.raise_for_status() - response = response.json() - message = response.get("message", {}) - - return self.create_json_message(message) diff --git a/api/core/tools/provider/builtin/crossref/tools/query_doi.yaml b/api/core/tools/provider/builtin/crossref/tools/query_doi.yaml deleted file mode 100644 index 9c16da25edf2b3..00000000000000 --- a/api/core/tools/provider/builtin/crossref/tools/query_doi.yaml +++ /dev/null @@ -1,23 +0,0 @@ -identity: - name: crossref_query_doi - author: Sakura4036 - label: - en_US: CrossRef Query DOI - zh_Hans: CrossRef DOI 查询 - pt_BR: CrossRef Query DOI -description: - human: - en_US: A tool for searching literature information using CrossRef by DOI. - zh_Hans: 一个使用CrossRef通过DOI获取文献信息的工具。 - pt_BR: A tool for searching literature information using CrossRef by DOI. - llm: A tool for searching literature information using CrossRef by DOI. -parameters: - - name: doi - type: string - required: true - label: - en_US: DOI - zh_Hans: DOI - pt_BR: DOI - llm_description: DOI for searching in CrossRef - form: llm diff --git a/api/core/tools/provider/builtin/crossref/tools/query_title.py b/api/core/tools/provider/builtin/crossref/tools/query_title.py deleted file mode 100644 index e2452381832938..00000000000000 --- a/api/core/tools/provider/builtin/crossref/tools/query_title.py +++ /dev/null @@ -1,143 +0,0 @@ -import time -from typing import Any, Union - -import requests - -from core.tools.entities.tool_entities import ToolInvokeMessage -from core.tools.tool.builtin_tool import BuiltinTool - - -def convert_time_str_to_seconds(time_str: str) -> int: - """ - Convert a time string to seconds. - example: 1s -> 1, 1m30s -> 90, 1h30m -> 5400, 1h30m30s -> 5430 - """ - time_str = time_str.lower().strip().replace(" ", "") - seconds = 0 - if "h" in time_str: - hours, time_str = time_str.split("h") - seconds += int(hours) * 3600 - if "m" in time_str: - minutes, time_str = time_str.split("m") - seconds += int(minutes) * 60 - if "s" in time_str: - seconds += int(time_str.replace("s", "")) - return seconds - - -class CrossRefQueryTitleAPI: - """ - Tool for querying the metadata of a publication using its title. - Crossref API doc: https://github.com/CrossRef/rest-api-doc - """ - - query_url_template: str = "https://api.crossref.org/works?query.bibliographic={query}&rows={rows}&offset={offset}&sort={sort}&order={order}&mailto={mailto}" - rate_limit: int = 50 - rate_interval: float = 1 - max_limit: int = 1000 - - def __init__(self, mailto: str): - self.mailto = mailto - - def _query( - self, - query: str, - rows: int = 5, - offset: int = 0, - sort: str = "relevance", - order: str = "desc", - fuzzy_query: bool = False, - ) -> list[dict]: - """ - Query the metadata of a publication using its title. - :param query: the title of the publication - :param rows: the number of results to return - :param sort: the sort field - :param order: the sort order - :param fuzzy_query: whether to return all items that match the query - """ - url = self.query_url_template.format( - query=query, rows=rows, offset=offset, sort=sort, order=order, mailto=self.mailto - ) - response = requests.get(url) - response.raise_for_status() - rate_limit = int(response.headers["x-ratelimit-limit"]) - # convert time string to seconds - rate_interval = convert_time_str_to_seconds(response.headers["x-ratelimit-interval"]) - - self.rate_limit = rate_limit - self.rate_interval = rate_interval - - response = response.json() - if response["status"] != "ok": - return [] - - message = response["message"] - if fuzzy_query: - # fuzzy query return all items - return message["items"] - else: - for paper in message["items"]: - title = paper["title"][0] - if title.lower() != query.lower(): - continue - return [paper] - return [] - - def query( - self, query: str, rows: int = 5, sort: str = "relevance", order: str = "desc", fuzzy_query: bool = False - ) -> list[dict]: - """ - Query the metadata of a publication using its title. - :param query: the title of the publication - :param rows: the number of results to return - :param sort: the sort field - :param order: the sort order - :param fuzzy_query: whether to return all items that match the query - """ - rows = min(rows, self.max_limit) - if rows > self.rate_limit: - # query multiple times - query_times = rows // self.rate_limit + 1 - results = [] - - for i in range(query_times): - result = self._query( - query, - rows=self.rate_limit, - offset=i * self.rate_limit, - sort=sort, - order=order, - fuzzy_query=fuzzy_query, - ) - if fuzzy_query: - results.extend(result) - else: - # fuzzy_query=False, only one result - if result: - return result - time.sleep(self.rate_interval) - return results - else: - # query once - return self._query(query, rows, sort=sort, order=order, fuzzy_query=fuzzy_query) - - -class CrossRefQueryTitleTool(BuiltinTool): - """ - Tool for querying the metadata of a publication using its title. - """ - - def _invoke( - self, user_id: str, tool_parameters: dict[str, Any] - ) -> Union[ToolInvokeMessage, list[ToolInvokeMessage]]: - query = tool_parameters.get("query") - fuzzy_query = tool_parameters.get("fuzzy_query", False) - rows = tool_parameters.get("rows", 3) - sort = tool_parameters.get("sort", "relevance") - order = tool_parameters.get("order", "desc") - mailto = self.runtime.credentials["mailto"] - - result = CrossRefQueryTitleAPI(mailto).query(query, rows, sort, order, fuzzy_query) - - return [self.create_json_message(r) for r in result] diff --git a/api/core/tools/provider/builtin/crossref/tools/query_title.yaml b/api/core/tools/provider/builtin/crossref/tools/query_title.yaml deleted file mode 100644 index 5579c77f5293d3..00000000000000 --- a/api/core/tools/provider/builtin/crossref/tools/query_title.yaml +++ /dev/null @@ -1,105 +0,0 @@ -identity: - name: crossref_query_title - author: Sakura4036 - label: - en_US: CrossRef Title Query - zh_Hans: CrossRef 标题查询 - pt_BR: CrossRef Title Query -description: - human: - en_US: A tool for querying literature information using CrossRef by title. - zh_Hans: 一个使用CrossRef通过标题搜索文献信息的工具。 - pt_BR: A tool for querying literature information using CrossRef by title. - llm: A tool for querying literature information using CrossRef by title. -parameters: - - name: query - type: string - required: true - label: - en_US: 标题 - zh_Hans: 查询语句 - pt_BR: 标题 - human_description: - en_US: Query bibliographic information, useful for citation look up. Includes titles, authors, ISSNs and publication years - zh_Hans: 用于搜索文献信息,有助于查找引用。包括标题,作者,ISSN和出版年份 - pt_BR: Query bibliographic information, useful for citation look up. Includes titles, authors, ISSNs and publication years - llm_description: key words for querying in Web of Science - form: llm - - name: fuzzy_query - type: boolean - default: false - label: - en_US: Whether to fuzzy search - zh_Hans: 是否模糊搜索 - pt_BR: Whether to fuzzy search - human_description: - en_US: used for selecting the query type, fuzzy query returns more results, precise query returns 1 or none - zh_Hans: 用于选择搜索类型,模糊搜索返回更多结果,精确搜索返回1条结果或无 - pt_BR: used for selecting the query type, fuzzy query returns more results, precise query returns 1 or none - form: form - - name: limit - type: number - required: false - label: - en_US: max query number - zh_Hans: 最大搜索数 - pt_BR: max query number - human_description: - en_US: max query number(fuzzy search returns the maximum number of results or precise search the maximum number of matches) - zh_Hans: 最大搜索数(模糊搜索返回的最大结果数或精确搜索最大匹配数) - pt_BR: max query number(fuzzy search returns the maximum number of results or precise search the maximum number of matches) - form: llm - default: 50 - - name: sort - type: select - required: true - options: - - value: relevance - label: - en_US: relevance - zh_Hans: 相关性 - pt_BR: relevance - - value: published - label: - en_US: publication date - zh_Hans: 出版日期 - pt_BR: publication date - - value: references-count - label: - en_US: references-count - zh_Hans: 引用次数 - pt_BR: references-count - default: relevance - label: - en_US: sorting field - zh_Hans: 排序字段 - pt_BR: sorting field - human_description: - en_US: Sorting of query results - zh_Hans: 检索结果的排序字段 - pt_BR: Sorting of query results - form: form - - name: order - type: select - required: true - options: - - value: desc - label: - en_US: descending - zh_Hans: 降序 - pt_BR: descending - - value: asc - label: - en_US: ascending - zh_Hans: 升序 - pt_BR: ascending - default: desc - label: - en_US: Order - zh_Hans: 排序 - pt_BR: Order - human_description: - en_US: Order of query results - zh_Hans: 检索结果的排序方式 - pt_BR: Order of query results - form: form diff --git a/api/core/tools/provider/builtin/dalle/__init__.py b/api/core/tools/provider/builtin/dalle/__init__.py deleted file mode 100644 index e69de29bb2d1d6..00000000000000 diff --git a/api/core/tools/provider/builtin/dalle/_assets/icon.png b/api/core/tools/provider/builtin/dalle/_assets/icon.png deleted file mode 100644 index 5155a73059ed5590099f285324a68a6437b0458e..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 156474 zcmXtQ?Z8siG*B_O9D{9K{J*&-@ zqE!VUNYp3$&BnNdT)W4a?!%%S^5hi43jt>>Gv`#zmwo3)HqB8|8)-AIw~r6PnHRQ$ zdQ0U-uRI;MgRTxnN;(hyg9E=yW%eE^1_zr4(rwent$Oz^k5mTq&d-&)w@x}mQss5;;E$Y{+FTdY(hq+g@Ir<%_Cm6yBvXos< z${apr%f?-f%50_u?Kc|j9X4nSH3wfX{$6*3x)#ptrsZEKPH9%;HGSLJahATQp)J|3 z-n{W`gv?Sx*KAZg_U7`}uI@xr3ERWVj0~-#c$pfu%Jx33x|WfLB#i0M_4P6DN^9v= zu64RnMsvpVIg(3WQ(pqS54S(Kisjy!$T}IE&;%b@Od=&9H3*dag(h%+ zy=$d;?gmw?38F&DFx4P1E?V1gq)&EUe!G#GH_U>e^6zo?Q8PPed04{^2K3oV*SNs- z=dc6v3blqsEr=DvQQ2*8B2@ln1~kPrObQpV{-v5w2hc$j{Gt<1AMcgv*`GNyH=Qmb07;uh)Hj}e z!C*RG6hnWx&zG)uWP~#c3LTiBSvclts_BP-+s5Yi%2)Tkp~&90ba?C=+}4c?D?T%e zyD_w9?m&lO&yqUZv-!cnuH-%f@=Zx+6iARz`^Py62A!CaFGQuGjeb1_AB%a1s!fUT zyw!PLG^xMnswx&1+y1Us&tFG$MQKX@R9_D^(e2ik6-;AMY^uuR7UXjX=z4VO(Yo|)Th$j>9@weT|hVD^RkzpZdjVhfGawvOiFKfji{^n3{F&M(ul zl5k50xBnc$u(*wIa#Rt`=u99C<5qp@i_o z<#z}SG!6$>(D_~dIAzSNZ(#)L9y;a zOzT6o@^-Nc$kofZ;0tZl_ib9l=etH(!E$^$;w6(as=({(Xnc4w0`skwwzoahp6DdH zPn56=Mo+Yj)rX^6_T{4_Ccq{xbqSTeoecH~#bXq@l_Eq`yN8bT&$iq_lNS;AnF4Y2 z37vu{N(pUZYfm9}rQ{YACi!@n8j=XvDy# znVY)w(|dQkf0T>^6uE9G6%MS@qQ=Fmzr=Sxm6^k3CIPufpi`Islua6qt%Zl{Qh-k_Wr3!V$i!GKPFH1+>#z++vDEhF0`hYvdah^cL{nLn4N~40 zQe2>00xr_M_#M%Pt$WDJPjINvv7kW2^5?NRY-O3bPlJ%BpefK9W)b4*l{yk}59@8k zi=0GF6}UX(KJkqJ8mD~21N?n;2qZygmCm{r#34jG$9tO^IV^BC!rRdjb@2WH^Bb6E zHzfV=G?x`f#Z{?Krqo^x+k@7W;uYOTfGFR}yK%OI#vF>;7`M6JCHe5vFY-zWbl{d9c~5xVu+u zs?NJ}kS?RDjqVr4p=R2AiP-fyEl9SDU&fdHauM+w^Ev|Xu4B8Az@kW2P1%zdym=?S z`_Ep-HT*E>SeIrGpLu z-Xz_8D#CLUlJ9m=2Lp$@bTsR#?*yR>?qIlx)6}PElId7z*M$YpKe-1vP-G?RHY-reY~Zia z{&v6bC(#hxA-Q?J?BL(#94Sua&@%C(DNW~n8ih>E{x8qp!=}w|NCP+^@vU#I6__@L z6@RTq!Dct}n(g4IQG(Ib$5uok+JuzC(&N1x!0uTdw@f3B2~uYJn|?tM2>J}p?-s|o zJsFnGk=;`H%b=dz=_YJQTi_wq)`fl?(rKV905#F6x!^AinHgkGJUDd6_@u<8Eo)WV zl$x=Flr%keng;_)0NP+#PeLKQ}U_%aO=Bsse z7+QaSXgGID1AvNOC{xnEOaU3-zQz1mQK{f~TCX=0{WMcf2)@oRUHus5gMTjf z8u0X*`{8Qnn#C-a1tP?M6A9PlxA`NG#z%cO-3G`Q_naEO0Gj0rMY`v&a#TdTlyZ#k z<6}&{p{NDzAbGd9V77hi24)mwR-!n&!>5!NP3rgDZA8rc~+8sDW^e9~0 zsVMNw0FGiSDo(C~`SpXm#H??m{0n zV}h_-<(@cmJIQgw7$!Y1?7V!#$=;(k6>vC_#<^S_JNo0YUhB!P#jyZxri~?H>nEqd zyXTgf8yC)U7ejIrbs16+&XkY$Fr_iU+R%7NILmG70*;^y7tg2nfjj2{oZ#s zK6vV!CQ_h;*kXLp>5KuzLf!x|T#KNi|~i$E#26~hISs;B<_J)6SAW#-ufuUbL8hnmvU%#f8Y z3`=4SkARCKF%w~DRUrfuk;aBF?S5VA%-o|1qO)oxCbSm!ctN#o%;(G72;g7g4a0tV zeZ@ z*JK^Lds1s~%c$R;V>R;2U#DWcM}JokYz@QTHQqQgOV2`I(0eof`Vo%4Q0DYsd&Qec zjP#8GSlXr`%0$$>Ad2o`QQ{bU<=jZ%b+6l{r3K+yhHl9DsgGCfT1G=TI|ELGE0^o9b3g`5HzsmKs4RL z@FGm|=8ClS2m<5vO}_Bt<=7JnbtTl2?)4Lrf}Tt0m+nm3{Bq?_(5MI8YE(=_R`796 zYGCPXnh|m=4b_pKZi-%6*64d|N@N)1LIQd=tMVOx7=ur!MWeWb`%Cx=8IbJ;=}oI_ zYl&W&`?s{ubk=E~nj!d{*F;UK3ff@f!MT*ptdH^dunZ4;e%}gZYPIuIh00GEhnC%a zSFcjJKDczkl(4?%Fb2HmfsP?bSY+AC#p$BB*ueO3iZeV_*zaW<7LI@x$Z?zd7{4&?5D^uA+~YR{HYqY$r6?L9f%lRKspWG$F^e0ZL| z5PV@djQ^OW^mM;wnd13v6*IwQd4u^GnvcAVi!+ZVe-g`dX)#y*Jj92*G>Uy$i@HDk z)p{0F5m?1sOBZ3P3PEx_Z?i>fmr|lM?*~LSC-O7yt;1LpzN;M-nL1th!O-j=Zid;I zu%7e7yP~)Zt%!3R95p`DM15(@28jFXbos>7(GuL}zv zv9ZUJ4t^n3_Ov65Xu&a`u46G454ym3Xh2Iv*5r zekrJ{iUAHfd+`2rcP^co_ggU;hJ=z{BNXQut>=T}An5OP+oI`LCP(qP2+hjDfyoyB zRUj1#BElA&mqYiW5rGPkM2AQd%2UW(2mr!>$oGq6Cs%;2emyk6@XeRVuk=9|q z+YXm!1;In);5p-e4j!pBW-^9fRyed0tKFju4?M-bH8Ip@2_u}dieo^G7r@KPZC=Ps z!&rQ!wQs@Rn4Dfeo&n>G{x5}|*v-ESNOzI*^PORWq^L;eo-SybH}*Wfcn`gH{)FTx zW=1F`YUe+Le|$j!O6WqQS8Sw~w_YIsJ|CCbVnFeJB#Q(tPt#`7Cr)9|KI%4JWJ?o6 z3hC}wws)`W`KNF>`XCDI|IjZCjs&_R-aimg`t{OVaLl?G(7URFfnWHGSmv8+RSHO( zx3`4SKO9f6VHT_w_5ayC^G}ji4g7kR76;+QYsI(I4S;X+k57c}xSyi%BYL0sdbT%o zyi#GU!hbGA4QY!tmVU`4y?;xsRAy(V>O>Mh{}u$P^z?Q(o^3PzDfF(ta-g#y_AA?; z#H(QIOo1ZW7oP}7t6RfYZ-%5wof6QsZ~%Bs`&>)+m&&{BDHEHwxP;yio=xT_|4pmMBt*F}X&T^R zM!g#rccgBM$y~`u)pG*=kw;Dwa0p#vfjgiyKy4+Osiu0m*S6)Y`6iX*FO$9XyCair zrmuodjU$owxV3JjS>boKT6+1rC-H=@?ag+)%`4{VmX26w`U(7Y`Fn6Mj47AuHe2s@ zH^U8Pu*PSR{7m{R=-+^^Q|*@aVn|6k?`T8zfT6c)R&8{;D!t8@1kkUM=FS@Zb{y6j z@5nP$FjOt9p0jp|v|X1Tlngz*@G7R-Jm+$&S?Uk~i8?ve>Ry_~kEUxwdnF#+)p_-* z)6W)!F&JJU7Q&0BR{GScOhMeo+j;m)S97?0pP+9lbyURq*tK)UMAVLR%}5LlL)Fsu zuu#GLzE(h~|9}r!wEm8_zYXK1vzH$S6p8Pt5m1xGmGuIKzU#?v|4Cgb^l9!}fTRDN ztq$!27Le~htonTn8RO*MHzY1K-dp>5+D==o`1t*1Y*bE>b3JbYgvD+0lKsAYVJSN?Qa6`y>_ ztxdx*)^}r*$HMnz()Lc9EFQ|;`hJz1lRqEpvwPT%^pYM31-`gj2KPI4NL%N&GA{j# zowT7Y$lt(z*tAqhc^^JO@tG;2_cW2fQ;=H7%T1YaZr)UPn2D6$k{83R;d*rYeT#@~ z=g$HBuNrn@`vbCDM~q zmDW!729JoamMZ;g4GIFss4$t8SlQ5ayR91xngWgoX7CSghH>*gVE}C-0(m-8*68@> z6-BYF9w6(z=Ud?;V(~|**PTm*Spg*Ggv|Zaggv*-=C-=q64=iv-q&ss?t-RbH!M56zvRxE3;Jxl7F?Xw6ErH$Ugt299Ks@j5Z(j{Lw* zF3J2PitS9vR~bBG`r-0T_f!gsG$;O^|C8vSko;v88<<`EVw-Gt`#scA;d?V7H2x#$ zpZ3i@Rp{jOH>-FPLH%}I=&dJ2U)~tMv7gR>KyXyd2-ypWW{h^_X(9h*;+?E{N zGEUkKrh|}s-g+eM<-4r6p&ZDER4<_9!hPQJ74J6@9q#X19SSNOvDimaP<*`G^l+87 z`rj~xtVAZ>U8Z;A>l2a%7-IS zB}e)+afobCOXRvOJD5sKQnb;$={7s)4aGlk;^rHlb}i?^6HnrR{eeb26!3qEozj^! z=b|budL#R^S)0WXW)c0+!&6-mcK3zv&_#*w6wNOyca7k0zN0er6G1-XV=mBurXUV5 zbz@9tHjvc^XYjI>fX8~eUqhVesaC6B7z``U(vid0XND3v_+A>RIsKk`{0S7Tsy=nu zf+;sHVm#r1S{6d(Ue%*i0vy_M9}8`b*-UjZ|N1)t_;&ujAg%2>jBZ$Q-6k`wtGj`P zTyjY%(q^)+)H~1q3}S@{Yd$;0))VyH-JIRrULaj)vnnuj7GKm5~gv^cVa_)?h?m_#WHPsZO- zdOuoK%6o{%IylKe-V7r2atY%Nq%>-#NMDvWK5q{2?KK5y(M(zk#&w06Qxn~ztCe_r zu{xU`294XB?ai0T5`W6q6`Vb~kY7J9NoYdR0Y~6-)xnRK17n{>h**dZp8uFdm~um| zd)%BJ27G;a@0BVW$Rh2%Zar$hxAram^CCgrMktVX4Z!Y|}pcWSv zecL^d`yNO*IdHavY8)MW2L z&yFwQ7{*&Pk#*yXRXLUcY|wa&KD}{HI%zcdp`qFIlU_ZB$Mh|v^fz%O!ysk37-_#> zzY~_gL4&i0%YRDNQ(jv*S6G;%giF|n{f)2tNs+72W|856D)^JwoL4?lFFQ6qIqj~( z`t-3K;VzT7Q#<=@zq1`@QkcbS?}66?5`#(8Uk+ZeFT0C z5ty}BJey|~g}@C0BUrLOB+fiR05#`rh}W4>9;>hgkmQ64FFTS~&E8RJo>m6pgb;xz zt8LMspk>;}Nq+dWPWXlN!3G`+0D~tnQc&{Nw_la7ozO#v^%vbyLI^1);iQ6{Qk#Q% zLMKI*Vrl=Y^Lt5$V?5y>ZObrijieAt7&v$x)gYS$up)^p#NtS$gG^Q+Kpyn~AavH- z`C!=D@ASAJQHai)Z=PP~_uuC*`;dk}Z?e%#q1)^$v|V0PWEOgo5s8+PqdrQD0x#&* zEAN9~eQ*26-)bAr&qX1}Eg@iIbV8I~250l)myP~gk;G#Cm1r(*ldBI*9x8-NC z(6E~*VXd`?3NBRFE#>imG3s4S3I&`nv`sPh$YdwGnH6fG1&Q5Rux=*GCQadJPY3X+ z4Qvhjt!+-wu`Wz|kGo`V=S)qRusOD8k^&``!9d+gHGeM5eG-0T*S21Ew&mZCi z7#1*j&%jVG*y?gE--haMLf&SmuY7Ero=P2VZlp#AIDQ%EPW7L2k;oj8E8N_MW0)W{ zK%=?+dUWkKE#y!C2knrloOhAEf zX)OV1h2RZ(owo(QqW@-jQGmCr&cVk6&Ev#(%lK~>VW=7->^=RH*X#|+t0ipgZ${V! z+$OLeIz7tYaM(ISR6KE*BUDk%ZCVedI14q>OG6 z>uYL%mIb?Jf*J#@Y7!TQw;7o}G$=EU0yf#{*Vh$v{QC{eIw|hQU|h)82CZS&E?{LH z4r!p2Pw|F)f}^;$w%GyVy&7cHjv^B5VA=2^oCcSl-@a1CJ{}oy?vQ$Ham5eed83S~ zhYFp?s1gZ|F&*>isG&9#$shbk$Xc^iwjf9eby(Kx-pp9cpBet1^{r!5_UX_d&EX2u%576V$wNPe&j-IR13HKQ85bJU zx+{03E1*e)WK2HAae;q7mF+bIhkv!D8*E_hU}zcm%f^AWZnq6K(BLUtSg<<4F0Hu4 zx%qv}fAiQ1qNT%0uzUTsOIblmPLz`7^Mi*vYV&r}Kc|CEyp7a)n)<2D5;PMkNnoeq zDwtDmu9@er|Av8lYhV0xnp7PFdnq%>We49D92RNqokHL7ZKXA>I`VT_n}jhWEc9Ruz==Rsjy z2vP?^tii(KXN9H3zehw^!ABV~E=tb^{WE<8G3mo2v)Hd0=cAPLXjY2d6{f#l9v4u^ z3UG;D{&oEBYjiLdQyq>}>CuKvgV+b_=e9#U{QSGW(n-(Gz44PCeh+jC=)h*cSd_A- z)xmBy+O<{%a17j+^Pi5IL1id|H`L|?A7plL_m?$+UJQ&ja_$7`z9bt5G%`#VsFpuz zOI;^B;x)NTVZbKw2@J~xIJM73eQ2Byxqx;jx#m6OFIvn;F|#V>H1nIALMe?y-O(5Y>%UsHTLVzqd0%{%{M_NqADBgw+ij`lO&b zUB^t2_&?&NSE)sq2DiaD{8_7LKxqj6heC44xeuaJ^b=R8+ z6u$p(-t0ki9jd7WJg1X?^HTuo`d`CXfal%+xyc27#JitZcoQL&KD*wiXe-?g1&`I} zGQGR`4E3cmB4ks`rEErbwR?XyzYScz+}eVVHPFdbW56Rr0hd7!dx*xa$VhRDkNp=x zR}IGxW2wSMO7(hwPMR1XIJNEsk2`7zGnOjzJkis$9609d@2&h*KSfV+i6}oGD|9wc z2)ZQ$@5dDnf;W>-f}|*z%Sl{}@#2TK_QXK`*5T1%^~1~w$fkb>m&IOpr3c8W9vIq7;Uou^vW1Em8OI7`8wv?Pb!22LZDX##*TPGG z4}aAgGU1`t>TArG`=v)bpQfjLYj_UuxjK-QsiZVVlu(PW1-`ke@>EQil zM>zI*n8NCZKR5~3DO5A3O#Kx68&MYV`I3sw#EFhIH zT6Jo>zNHA}9^Op?MM6*?*AxL(0$Tii(m9xdwv0aou_?vHC`r=KCQO$N!B4&mE}{vp z@=Wi$E;%!AbVrN{4M|X&6;GLJp!sPqg&E>Lw9x8Tt?aa_;i3CgSOTk-%&7y-ao3Np z@FJ&DiDaGApEnjK)k<>DrQY>)TeBncnozy@&D%X5K70MsI7`a|YD9j>HrAg}?J@ zS+D=-sAhG#i0VB%Tj;fkSL|%NpZwx>YYitj=eNrZ$%7KHn1qx)?=GqYgRDwTDQ?W6 zpy-f!Vz`6ch#BXhyfIM-LjwNoD>`=>jvou%ra`g5V&ToNS%NtP1%F4&>FaDz?RMa9 z_bW&a5)s6$J6qMsTjoLD7YnTB%@N4)DmZ#NBv>8L4{BD<7++kv029yU03IQuoB!tI zZ^Ef~0;!L=$BKRpv6#oZ$}oK#{7bnU-lbJgS;16c-&Md2)i5S!`?CT^`kAGSw0q+3 zMv|B7N+;mWtoElI6uwcTq-WC)=-oPpnDHHNaLy?O4NQ1KEgo$mp(YeZ=J`^N1^4i> zVf3{&oenv(8awBIzQ|t36iuQ5YX~i<)iP+I;ux zKd!H+G^cPvC4WgawCHFT{v)pnQfWE}<^h?E(@&KpY2S`7A0uV80W_ON1?C6J3PrUe z0ZK4pGcF(Rx>KgI)LAOxn4wWk%rR`9dIp>9~@#zPiiO07^Qb zaFdq>*(2>p399cNn?V*Pwbr(|eJ}e{(l;@10CF(U_Yqw}~-RIVHs^T?y4OfQqj?CX`f zJZ9ieqIjh+kQZ>839g%!c!%1QhN>U5?{zRa>PPZSoUlF5^E9S=WlKut0XDzJPR$)8 zj;LY^4I<6QBUGPp6f_zMzp}_SDhQb$^dhUz(?_sR?LL5vu9|#UTqXw*YMksuZw8|0 z^kJB`T-Ba41W*J;1<_#%Z&6=Sju&_1>CGd^el`e@N?(aO{-(NwI#}CuzY8tM;sB!E zIp*5AfEuEM(vMyZd&)E4)tnP#?qevgjtz7D)O^S9eE!uU0a-D!7Jj%l zd!0^ef>v>4!hadKiQ)ekIi-Z_v$Iq`vXY@IANr(gYDsi1yz_wG&R7QY`jWS)QN^jL zvnlqAx_JN7`Se&Bb%+=88gZ)-ZFVC9jXR_~-8@g*D+wuD#h>X*TV&OkrO*LDRwfZe}8Ml_Qa9z)Sp!?tz+1a1YU@BQ=F(DG+n+>;7lgk{5^(O z*?4Eq2v2x=$BqOQSH4CaEDc|z$cu|$KyA${$xa?&-Aw?GI z?OYSut0%>Vsj=Ub;yQztXZl^j@2s;S8%qBWGicf`Dcoj0SRAt*c-l_8R7+bL?JuBS zs=tX$b6<@+v$;1eUM#5LU#0Pn28#Y9C~0Ul{k@V_(kpZ3`9Q1ipfq(W?n`GRr(>4N z?WsVf2b$$a3EOw`x~)gXy5|FWCl1^}S!X2is3*Y%3FZgh*bU_C1j+&*GWx?HH&GR1 zRuku%S~3o9V@a@a{zhcZAjJ7G3*P5x_bfe@5*=Ka6>ezKreDVka{TRH8F}RTvcQ{L zMSLgdJfe!rR=^LbXka1_(^=*focpXa{{!d5F8!##jqa?Wr7~`euV9@eDfcN=Q}Ij3 z#WWA$whM7DL6mFw9>hV~a>b$@f1?h86*#N45o@|R{1FWPC`1vg`~5rXwKJ8EdyAv6|#$Y-)XTw>_#g(~kfydd!5R z_nAL%uaT}6pTk=o5S1UP^|Bh8JJ%^tIN)SDA$uj*%tP&5oT>UKFj4JvT6g zkv`^c;Gx93&y~JtbPlW3HQQIC_cZ-zXJdvM1af$fG}-cl+~U zzgvcKAm-eRgPJu4=j9KzXuE{~J$o=&?v`kabpwF1=A6QSxo=6Z8B%2(16}KPX4WIWcQgpvQ+*<|l?7!u6WE0Medjx-rlf;i zA=G?Q^tqHZe9D~phdN<^49hxkWKqX~8CBlt_}HlrK5+C+J~QYLRkND*$r&3qIj4MDm*{$`cgYD?Gz~500GY=}1EduF+ zAz!$z0&EoL6W;OwF3P#eKIaQnAMAa8DwhC0=>rBSA*v3loA-A2CO7~irSP2z|82>r z3ogJ#riJL5afYH!cQ2H$=bxD0VWiQ3-<%KXLIoaF7=>Kp%duk6=GSm!dnW(nD-N`+ z5rNBzHor$b`}N%NkLVc}D5JZ5zlKO^SuJ4Hz$Xuc{Z-{9!vz%U4DydKL0B@bTZ1O> zdwP`yvq)~qklC^a`6BP#YeccmLim__BEwipmHRfil4LL?4pb3$JZ<&A)VR5<3(SMwqW67`~taqk!*20rziVGQfkPS9;kezE^$_L zYe#@{QPduk1d$$dgulm5On)LNBM6_mp>@WQzE--}Wv&Ss#06^*n^T!be!UZHvvtde z{;2|#B>@!V(+zVMNFjM*s0U14jJkJQPHSGD;{H*|$^?KGiq>j^bnH+xCQX!m-;X9QUotB4L5+MnlcQ zef)8fX0W6txDFNQ`9V7Q!%{*C6%;qhB|bwaW!^?^+kE5Cz#ifg;bo~gj~mNgi(u8u zsOqO8F2YE`oT?|fUdkSuX82bFdoPu}Y6EFJXJ-bRjV-=BZ-DZrf2m@6X+0r2lV*iq zXXzNW)$M;Wv)ZKJ{O#4Q>?hYpUkB?E$nZ=41elx4&L92r)PyBBNiP= z{~#^bvVurDKXImd-dBG%<7NSZ{i9g#nZwlFheynPshS|)ebIIN;o`8*SROICweA`1 z12J0zHwWW=1MLPEk+uypkKJb@k*&p1qO{D$e%!HiOR>us&23AG_HadT`{EC?rP`jx zoxEOq9rqUO3^@w_@>Os%)!izS8gF{ft4>NFjx}4=o;WEX2TxKA-~gjk8%A*NN&mny zDw&7oSRMClq2~7551{(J5Ul`B9VLno5tI65|BT`TAMEGGhlJMHd~^AhF-zIszTVN z_%DvUlPy7Q`hldMG7*mmxB_}xGt56m(Y_F7r;nuh=q0;%Jkvg;p|<9w;W|~M&qi(^ zZO(yjVMX|Edd>FT=qP*-B4^a{c<{A#tRLMb8RFD?!^l8go3bFrzpJMHzLBj(dzeVr zs;`*{H3ZX1d`_l2aDdr?>K=PuVh^_9MFs$uYKn(+*hZho8n$Rdi0z?MclTs8bR zx=x_e~a9X`@L zwmkau9-P_7F)GLh@!X8F_v@a9>&o!+i`m_j!%ytAdqU8Iq!7_Qy2G`q_^4Yy`o_DD zBQ(*fH=r~Gj0@$srtxTEwYy|DVR`4OhaBm>vV<&D)OSbbxy4c$p*o~O&soBI#lIdo zZ~8oD@3dj~m1>sqNT_pf3WkYJea6oX1uREIkQM2I9l-~1c)j;q@VM5RymbX1@AZg7 zyau@n4Se}%@XOyF=}qrtTWWH?ilMP*+_wN1??FF$dWVH2Wj({xT45lxxuAk-=G=hG zc#?~i=U`P{OL|a1``N9tH-Rxgf|=W(3id%-&O#G1Pr0XdmWD%VXi^&#TY1 z5GdVFD#W#b-cG&VGC>4CUsJ)JcE#ShPO!0$AK+h$_a%Fd5mRoD7w8TYKyz4O8lU>NR7y~<;)IT)AC&}L;b^E=#IGdnx7WGx(6F3 zkN<8f%i??4U8M3owQ3|#jRf;&D{go%wC94^n?@HzbeIBuylC^bA948*my^_60}_-= z^rj!#XKvYIw)Mb;YEoWtoc8eb)HB7sZwOdu`M%ZDzxA;hzs#c}`d)s@o$n0JWS%O8 zVv8+1GWql+O&(lVWQB=!e}{ssNPyr-l@|*{BJc(};X~2&Ew)J1PNvJMC^=lTBSChs zrU$pI*X(7GDxrub^GqLxYUG{BGjq_2&WlaFc=Jxv>F?D*N>#0g#85`#^o#lVsi%Zx z&FLi!K@P+3J0EuqlxK!6&2P5LT^O6E0D9o@oC08~tA5`+vt|DEt`y=lzO49U-0vd^ zhhg=)&~GDoRiQN_&C=#Jo9X=0X2sN@l=&9n5^^1u+#5vy`rCp$IRy9S@81o&B1Mr! zRm<)|L)UeDdz9>JkCWPMugol7RapGXRdcRDl8kj^&7OaRql`G-+Y*m1vC6u={)Kf! zY0!^IMO}7_-*t|tqoyo|q2wQhQW<&;G?!bHt>GLR`Jmqq`*d=f=1EC~ii)JjiG0R_ zXz#R1hkO$m#YYJS;e^1Tr2`V zAY*#8uwHiJz_1tMqfLPoEzN2s=|l9#1)o;Sg#&7GrKAwNjkGITHc&!D8qmZPSM=G} z(!G4-uC)tqv<<^|N=U@vx^jfwOtjsEtRCL_*prn3FFH{DiW(fHvLgT*XDR83ZV7ry zcnup&JsGSH9WLG1Bs{#0v8Efp}^!n46CZ-mD)ioN2ISaIFtu7x+-b;Rf8ZvBkmdek)5ByKoV*ynWE+R zh`UQSCBWk5&0QtoT!@jHc-U}UvfN1eh+I~Fd%uaB-LEju^b9t)Y2mHx3IB)1iJJz-abeMEY)duSj0+WN56`YmX10tLoD&F<(2;mPmm9 zoQTDPVNj#dh7b+{PkR#z6qTjX`pg6djta>E#SZ)I3d13CjB~W3i2G6s*xj%nQCNHI z3-xOzf~Z0E0}zJHSKhX8dNLD_q<-Y1bwA$C z$jFE?U@p#Gdqw+XEYG~)fIj=*3>73rPYl~T|7kmMy1jJ0+iJy}6})VCzubcsKebYZ zL|v+uo*|%pjHgFbC?;22#qGndUDTr{CA>{+g-M2y9GZg5G!UI>H#YKL`*$SW7A%&Z zIQKHI^yHJb;&n?>hBNKxsQhY;vV4xZnzhIg@bePi6XkmIIyZ#!;^aD%idSSM8;^>d zX!`MA077~CRE8iAy!@y5bNbU5C1jKwbpU&_Ua9rHfPt*^spwapnJ8XVAH)Gk0BqFNxU86pz>(dBZ4|*>CgLU9H2)&TeW*!^n)5s4Uvo5m{+amS|a`&5i zxe;a1+p@(Cmh3hpCvJgAXHyE)&BNF$+`R_S@SY$d1Ai|w8G)pVEPWfN0mV&*Fnms# zc>T>`R~+d&6$QFc`WZ2Fzf21uDW_{oQ{f+wd1J%0Y-GaaI_FgcXsI5rA>a}i+HnlL z9vkex!)E`%<;h+6Gb6z<4dLkz-Y;sLkze4BnvXwjkt*KBxfXt?w|X*K+szl&NkuPl zy5NNU>Rg3~C4!KP0dd_vCrw{rR->>ZY6$9^zgV2-veI{|deUoDK;L=ZKBRVtP`)fJ zg1Qkt(qm7h7x_AZ*nRzEJAf46IYU`Q56f;?L{P~dhqB}}JYt-x*>=3HsX7-~8o%u9 zFKO!&I9{3XLw0E_tQGASw0kRYk2BmX;|KD@6EI}bTBqP8uRV|Z(Gyv zbRg^U!kZG{n=Bh1`cKoW7{srQ-Ap7~3!fM)33l)(7g`^`_K`dsEbbpq2>iFdx-nfb zZ3_`~qMovW`X(F9!>_XG%jh&PIXDgD5pXB?&0kq7R-ABz*P8JJZCE!@;49m@al-*` zfeka5tpqdRp!6s@=yV|1<R&sHwUP(tsHVwH@ z4b|}*rT{4x<@xt~jUwJg%>7`ZF!B+=cliJ~-kNYwura;^P~Ui7lpmU;N0AYN+i6Au zgJenwa%<{V$vn#&|BQAIq#7FGoOgH3pimhGASq|@eXYa+7*U}OQKSKP+>Z=z0M|{o$qD&J$ zH)2_~8w7J@zjHvoJk%rO5y)A(;DnZ)5bw~Q%O}4tDi@NKD5U__4L&&sIO#%{(p^#| z!3WON-_eej8P^+#8KD*goR}?EclKSep+xJWryi1vXnnv05!T+FeX@?vb*=tops$R} zC#X?>el}m*-+s3^nSFnAm=6_-ik$eXKNafp*v1E2WtQla&V3me8X~pvkSy#%6}5Uo zf@nZs(t4>>Ph{aBL)MVkQyX-#Z^^^60^fpL5lE4>I5~Xv|7QUvdrd|0+;<*T_b4ve zD}w&~VV|#09Uk_uG{@Lpjo_|$*YL1ozQtzjIx@Q&vSr05M`IE`PD<<1K7SrZ2^HC2 zL+&g3vhj_N<|Bt4#42!D!`tX?mhVGIY8l)wa8X6=85tJIb zLy)ck1nD#YX%JDm8%9t{=?3X;knVS$|8>3J&!=5)=$n0` z0eWCxTpNf)kbeIM?Wo7WPuHb!BVh>Yy&o_j7gHAa zf(BecaPhDIsvz2z=r_ba2@@SqZ|Ja=DETdq0jI!-#QB5SRx%I`cBh%bY)v1I%~04} zXXCt|7hKhm(YIt!5tPLNSRST5XT(L9Y<>^>F^Bc$^a036>V12_Ce%Eot?&46!!wpI zBuNAq(nYkExqsf}bBdD}Xv#1A`MZ8PWnKEAU%IrS7``B3t_UoOLy!cn5ItX+e1dIv%50A`E)t5e!O1EkC8J2;AlK6mdVDtyYn_u7GH#(u&(0YEa`mJuoSClJ>FoF|QL}B+Y{t1V{7NA$kxdEcESYiz}|(5s9TUq}NEGfLVL zI^?AcWUYknU&u|Jl#mEHh00B&SHC7+Mlup65oMdnskU4^C9gpEL84v@G^s$^q6L>I_0%V6}&}|YVHrGqU29@4{IjE7GkhGVKxrj%P)9X0UUUo zOsOysKszjk?!3D)4``KeYAZl7rP8ToJpm#XjKGyMXXh@fVF>fZg%EA^h8FFH6dR(% zs7bhfM?rOfh7>3KjWAX`OW&^rg{Rq0_LdxLEo^jotc)IazI$s#TrXD`q`b$U?C*@} zbluErO%#3IO3S`Kdk8|Mou-*b9Z4@YoeFBkvXK9J-`GXC5**F=hLUrT1ULYLQff7o z96c@UjO|Y9loB<_n%o)^pW!8SpOyQ5WzNMGY~j=e9Q9YG@g4tW;mfXHc*x}I${s<8WblJ2w%Lwm&XtG9Z50c`kfzx=K)1UzAOLK zeIN<0c(|oDmhkn4YjM0>!V%dwM|i&Nku@nH(!KMLKF0v-Zu?Qn)R8I93?7Z$dx{)( zon)h(LQdluEFE6-`VUdyj&Mn5QD#rOM@xU!yFEAjIU+BQgZb;lCS_Q;ret3piBRA8 zim*wu*IB9RS!V*gLUB(f2#htrjJLJXLCl;x;+c{;()%~lQ%x8DxE)^y zrll|wpZqib>mKVi*BbDJoJ}_2@7u?pJO47Z7LQyNUn>~Nt)4%?n*YtlqH2JZ@PST^p;Bp?vc>up-vS>D zP;_|A_86wh$?RJ0c3MIBwG`0C}ezHivLY43QLl2zVlGX;~$-=ub6w%03)opbkKL zSk-jj_S!wMEnLeWDe}92BA=S*I{omf^p*;hNcTC759SK%5V`aR52cxNWSe9+HO`Wu z6g1UXc5JzBm-aT=TWLUUB+sFX4|{CGXm76xz(X}_rncho3ydI`65&WLHDxkc-=%X~ ztT^HO#IODtPsy324Vm{S-Lt*dWX2XIq-ivdJZEG-$CXh&Gl>HX+Xa`+$zoq?TY8WK zpZ7iko0-4@9!T{WHnt03P(9#<{hdXO;3SBc*ZvzZhGyzIiXlVE-t7gulj1f$3M77i z`H*J)3-|30UfY=};-uXO%Pd!VX8+t;&)>`Qt7ioi~;t`9Ikh0#XD5?45r zThJfV~$Zn4g%A4&LffRn!T&Lt}?7isQiAix@YNSqA^;wlADdhg!< zR#16^CAX5#0!$o6Krd9R(xq3w9|-OrFk>ZdKonknh^$p~RtLZ*$J76g#EdpKC{X=@ z9I3U4#K?#*!0PkzP5GHh^UKNUc~LORInOph3FQsNbfkZgwP9o;N;jjYL&#w=e zj70B>G_WHd#jarwns--EJHDtYao6y@O=hJQdC)sUgH6uQ_@pnNs?nKeIhl=>zqVhx3)S#v$+< zvDjeJZ9nNkAV=v_7(*7-%hoSWA~<0{tcIV7#4GFyD=dTFR9s9Yj-z$t zkI&mcw$?*!SA+gRob>6Vmfd$a1Nl4&>7~bw;b3Zl*vLfwu;%L z&zpE>FC6$ZNSL?r;!x+4LoQ#MSmEggz2j3#KZhy zWc5Om+=|3#;g5j=po=I$NsM*svUaAr?I>1Df@yG*m?ga?)lBg z?0>(JV-quaC`6^T3B25*-Y(d*OU)>^pyuhNaF*x%o{oVwM8oua`geN)kT zb#8kNTn#sx7!yEehtzcxbT=bP$+i<$?>N3YVfpE?w57RgsS{P{xBM)qIoKvv_Jn;v z8o>~H8K_9C3uQh>NwWw4W+C*rq5uIDPp^0!fIsSnnsgN!V7Sc;3||=t(4_UvNua;< zZ{r0CK1$J5QBbOW7LWl!cIqNg?T@GAplInpu(`46Rx3AK7BvNgCG@drwQsup+pTXe zW#bh&#sx|rQ@zEKi@z4d9i`Q16ex7j64YLdSZt?4V+g>ZL$h~{F*O!?Fxx7rQxq`3>L&t%B+xvGGl*5rWM2ou1Fzy;0nz= zCk5PT^l;pTGU0kAsyBEbLoZ1!&H6(BkuF=T77c)^l6z!6l$nYj4=k8ft9SGyw6R1| zM2c{e<&`+dF!?1XXhaTy!JIme*&m!|k^me=TDmB%DfguwMV2w`^_$0;Nlrf^eo7>o z0?RQz8}-cFT#@OC6JG%@_&uh*lzqR1PYQ55gHsp;zZkWPfD&D~t|X86_u6aN^&IZt0bM2l>FJQD6%5rT3O-+B%B36kvnM^$M8K^O${!#d#q z^P?OY5Pyeasytp+Plq@!aFV0S%;v*QD8h9$xcxBjB;AXVgN$GMwU|`4Oa@S6 zZXHtDS(c56dUrif0T}#D5zDhF={>xuUi+b(nkA(tN${?t2)FhefJA}MqNcT8;thNi#1ObzXwr|i&3ijp$$DE!VTbFj?P2KeF+r*mO8^mYt5 zfXp`!Kb5!O9#W7@F}OG!`^-%Uno z7mo)XpuVttq^_4uduZUA%JSxOo>H*Ecax%8itp*Bz5^y)S*2`YI+z;2hbdcS0XrmX zN7hRc*bTJ4i=@vf!AJTdbpHq+&YGN<&%D4IyVvPPb!N z7Js?-1cX$3_|1Lu6%gip<2WRkLuMlpM_d0>j;X5fOxM9M6B=aPYRN_Qr;sAVa{dSY z;KJzBK#_9skcrRw#g=1`juWVx^SApfUlN^xa}Po^y+7)q-?b=$STkPZ^t=34`tFZXt2{IAf+y4_GZok} zX5Og#2Pok^ZS;DXyO#VZ@e*j$E&{0nbv!o^q&I6s`;P@uL7(R5AZb4pOxN;Qyi043 zQzw}N+gT^Rc<&0OryIk)G0uSdx)iCM@ka|h=SfHms@i#K=EJulZ7<0fV*IF`Wg>N+ z$7<%xnm6SV1^(mgw*=fV010(3ek#-zJHTOObp@+BEm5{zf{D)8PQOY9cfl81_qT%r z|5)4NRFcZOIh7=N=L5O2>{-hcxnBL$_yk=B;r$^G2Un zn|&}5V*Xgma$#6n!|22xB#fInBdZX!a=>xW<4N3BL$X!C@DM*pVsoWN+d~+d=&gZ~ z9o3p&3-EK~z2z)eniAA`e$i6XqrPCCt5vgV(O;R=O603&7IdXzfDt2IG?S zRdDv5tZ(%3!X_Wj3;y-*K1cA$uFmtLNMaH!^zr%vG0IqGg%d%Ai-tp{4>N?AqW}b`4tsL%IZ45Zi6Y`F_jqZ&|N} z7m8tM${}kLggni9j8hyqI6YcEnnqF1w2yy)a%M3SUfFk5e9m%F|4(b9f9SKfD0vH^ z8zk&(qW(X_gn40Zfopa*Wk0rX1^s!XYouB*S3LN|Zsj*UMH-Bg&T#4eae|H0r}?ED!e`c&P8$G)tGe%1 z+>!=bGYP=qeRNgLgIO&>J5H`!#Gs94Tp+e`wa36{T~nB*=&s5{HA~`)0F6}hc{b|X z=-(F8-jJ8ySDc^tP1b+&A8{4rv{7ITemHo!s>#_-()1{DhFMAQZy~lK`7k+nCxb6Y zD+!f5nK#E%us*!6M~Y%vpaYSxN=KMRR82ASzDjx&(jX7$K*RxDW#0M#@VGY_YE6HK z?=>(rK}c-@7#Mx60C@^5N=DpwUKpi5-t+FdIZ1({J!!i{9s^X<6*rC)7QYrI-`Qg5 zgA)VyQFx->4SW*kFhPWvapOt~gm$9$e>1&fs{D!Iz?K{!stm>kA&Dj8=rl&t!Ne;& zZQ&W1?wW>Ojxz+u8cz%Hs#od>ZHl-ID;&TDp#39_F6!?s3z9(`J+;D?dfSJC;ML#GkkvFb6dHXUx}?x8QEMqmNnCFJU~Uo@1-^$8n6ZN4 zQvaoq2=1_F7<6)|?`ff^R{Ik#$x$Eh@8|wHPT#SEtAWunWu! zZEf3uB45zSNglv~A6Q9a(8xj4w4JKE6JPmyud=DnK#_0F!}|cfx`a?;J<#40+#&hA zK@yY@!*S%!W6pQc7v%_mk^8NhQ(kbkJn)v92Nz{KkjL@$@4JHVJoaV^1cN2ECM&pH z5uhS7_~j^dPkwQofo@rpnqJ6e2d2vEiiO@172L*a^52bn8;vn1i6n1RYl3c;tigct zXOX9x^278GA64%>!PJE>H)-Svw$?hxrac|2)pwCcAAa^t#sIc5_R<)5%a8)Xmy3kHCKhTsV~lu6B(;d?o@3IM4aidCWdPi+fyl(!z~kaX6ihGQS;&ayQ-q zx4we*REgb^wqMiJ_>w-cYhw4@eSKdniphiD450t*Pan=)5?1nX4)(2hpObKdNe21% z;Az^d7h(}V8He{5xCgb+yAhrsbpwdtC+Ii;Oc~K-7}Sf34J4yH06euR-*-CDqFsHQ zpPl)h`dPu|#2-PSYWmdDGzVnkTN?a_jAE}d2SxTjtMcx|k*9K#aXE9l-Fce$jkiJU z=bxhtNJ!E`3%@i$kaP!SFp`b39#XZhG?LqeWc@+)G?{WZ0)lhC-?0~ogYN{Muik+zU81>156 zJpl{+B4^jt(l|s-vJy1Y({hRwnEwLyIAX>aLg=F3AlQDbik)Iqu;hR`t|Am>#S24G zgUZ%MLqh1JpJncMy=Q?EMOf)%V*dNeel^5*yko>U%>^DTKS>i&gm0d<$TsvZj>YXe zBPsRAcek_YFJ_l%XvW*?S1gVvr`q`a;6FZ}?{~V(<$cfk^*E`Sm>+!4PvOcDu2x+> zG4U+J8xuw%eaI2WB`>=3FFD3Y?He`&akg)Du%)#-{>ss0qQFUOCP&X6~G6{D;`jOag_;U*bsjK z9@;auV+D$|Q$r<4QSasL@L=R1L(JM2@vGcEV5>NmQ5r1x3Dtb9I;GIF_W#gY(zrJa4VG3)bC!D#)q^n^Sz7&r{T^NPb8lrW9F601 zYFu>irBr&{1WiGnM^C^BG?Gub0rhiXWlsZu8oK}GWy4p}ku-Nma=UYuWpH{_xTJCr zL!A^O`b@=}7Jfs;__vYx*!1O)?mCI}Hx^*ZrH5x(}Z z@!6+0-9dh$h3vJt5eXNyaq6*yRzIORsCz#zz~($DFM$P++M%LcQTWbcBKH0Elh7Ir zJtQfC>o#Cp@{AzgXofwg838Enx5vQPI-LefxfKx^dJ&`r#$jmXx|;66P*rc{nM7vB z1rukcu;#GJV#6{}1Nd%K()fw?Bl9>NQb7G{hLnN5o)$Fc^;sE8sxLwpA_1t;Fv$}YL@A4I0lL~aB$wz0A7&h z9OfhnzFB2vVkrQCAJ_(U-$UE@k&t0b4RLvWY$Y-w`wncz0`$}TcJqS5!yZvHuDd-> z`J~nPD3T}*H%};T8iN&l0E&d9t{%o;@R6bt*Z}PG;|9|-UIwc*Ru(`SR8$Rq27zdj z_BK8RZY0kJdZ|EDJ~m{#puaeV9U^(=`=}Zc4Dog`NoU3WGR7&lDFHUWZIlQ2do@!p&&g0$g~ER> z+M|sHNa|apub#-zvUz)y%%9La!`t|JRR;>1ib{Bb&r z*(L+_PoZ`kFZjExTqms+?ZbPczTQ|Hm6HKW>0u%odjNRe57|7lQrffZ`OgyeVMm6h51jKWis+61@`O1x0+pB3kkz%5Sc31-N6}rnCDcZ++=LAL0yk3aCf# z>b1D`G5k=w4cvB_QX>fY>ef&d2KG_;4E$7m^E}uK`PW2!@!sWGFD3XoP*CA(KU051J&Z zF|Xx~(T$JQU-4-|tz~bgq!c+sy&NRIIn{J$M$ZMz#8M#ewMF=*OJy zhOf{Gz}e8r6DD-WE|05;)Q;K03+t>}@uW`vn>rXBAmDToWG zRa3v{W0w;aq$FE%2Z5W%Mti9NUr^WUeqCwb*qpOVsUSbIBvz0kHm|W+9iIJA|Fr-) z@V}LzsG&9$qXs7somW%sm$}e1`9=n(iNZ@NJfFuGsWAK_M$fv_*#v;MgIP z+d}+>W^{}zBjLIoq;JQ|qqtRl3iS^c;KcMs%bT}~#_J*@8;RlOkMy`Vro4gY;KM}c zo?YsxI@()23>O`6eMA9$3RpiI_F=p1+_oUvD~0#&3yHaoABPDV3vsj}&F|XIC@75$ozBQC_bY zdAt>s@6z(iXn&F$E#QY`nSsRsi*#Iuf1rMcUsJc0e>p}Z;r(!d+hS$ zPDm*SV!Kwfj|&zSug{_Yibo&1kxYp1NpC%wR#_XhD}AVYMR=ZdTqIPI7FklZfC?aM z`d~QU^i%t>Ib04SsQ^xL@|~|rn8ax6Vo^=^iHy=yGE&%W#-aigN&i7Gn9jd9WhsLc z5HC6LK69C~1LN{QQ2=)&$7)+I$wN3|PEn_}=pA@@M7Ew`+Sqd#0ad_-retbKMlHB< zK0;2CT5(VkAm#JE?5iAzg`7~r*&j8sf(&qh^jQ~|zP=;7*Cu*T>krE?J$%C!E0A&b zYlC-*?DO5NXEJ-b#9l5jHaJF8R}5sp*`2>WAvou_Na$t1q}Vh30r8Ef^FR@n1847f zDDo+bs?``=WAqcS%K@nU3Ju^^n9huaeM?u0H{}2{2d>n~0qnyEN`g3+$*(wt)lW;> zyAtTFdf9qb?8ki}1VrCAF8BzCQY292SFP20{^3R{R~|q@e~dMBhi4 z;EzOZNU{dT8v+u151^hc1(=;D2KA8=-i9!WZ!=os(p4WjIk#}zIe1d`GX`=noE>03 z3@h_L6HH8Zh)V0(#*xQN$APUyMZqXw{TB21@Hnuq#j%5)6!jtfVbF1e5YE7_ z*F+JH)HtC0di?P0kzy4V@Kp+8v!ZmMd-{Tb)Erg%Vam+OfO0cxxxp&@lDub!C1O~+ z$^eLamfpRvGUbu``?uHZ2=6M0aoR*U*)r><%w0N7F+jF2W+~d6+jCIOP+>>qkx%H! zo2q?SlF{3)x&phD(DMG&5BeBW)Sv+_s&ro%S3=VdNXO29B^Bme7R9}2ey|;L^hiMG zVQuWmCW_|kUT0>aacxIfAwc9iHvjZrH&b1 zhBjhRIodI+ad=g?(g@eI$3_Mk7m{(68P`7+f}6z2-f`#2*b|qNLb$*gO@jFHa|%I! zRc04=(|?0{$Ku~ojn@T@N-Va#aJ{XS7^3cxW^~=X2Fi6S`9Hr0h4zru;g$-@XR;6@|jn(b)@Znnv>Drma8hl~C zp`+7yqblaK%W2T(*f7->Qwu1mMEbl#9sOzz~{#l8ukIK1>beQ9f?A>L%WKO8GOZ~T5=e5s2 zh(Wd6mH^zP2vbYW7&jA)47JCed<%c={_@(4QKhZly=2M-YmXp;F73c`7;HlDOyBb{@N zCPzK^c?!t|iHpQ{hapMn$2|p*uJ(D$YI2K`C!~R61sw8|a%3DrxiTf2avT5%x6@6k zZCl0BQCUw%+`@RPx?K|r-~vIS1UOCYOW7@&d}0%r~Ju zI0n<7r8M@4vx32v=@y=)B-uapEX5V5uMTE}^9-cqXx?qHNLnyVeQCRzI`bcPrt=1i zfC#?^xid|dcm{fzIHIYTbJ(fqa_aa*qgR9*eZm`@;0}9Kx)RMH{0%j%xJqr9eT|MJ zNFV6=Af-sBbwI-k=-a3gR0U#a)eaf6?}g6iyCUiI6A9l>6IIT9L~&JD1Y)Q;gKJ}$ zjVS_03Y6{Rp{T1!##(X!nt$mHQA@Gv#LGo4}z$A)F zD33$GN-;VAxig-|ZrE3FOoZ9>w2meuzr%6EFo0NjJ0;QaC@2VVB}90(TK^iYP4kqM zIzlo1ZxIEG6!;6a3uZdXT&tyCk7mf>*~s+0(3;?8%%kg>w^WOn!{Z{ z5`S`BK%`^Z%3!Tk^ZLw($_{A5e2UlFt5fbhWc!A}qOA7-E~5auX*1g^b>J`%W95kO zatVdMWSk*pZIb1F{lHkH=`onZx#DK|!Uo`@1Rd(MKV5`_Qtfdyf#-y1HsFPb&G(2c z(roc-)#$m^;IgBa?IC|?KoX?nvp*iq_IZTPexv|ZG!Ab(bkYUz|1yb!wE*OO6Z!VQ zwd3(6>9-0I8uGVz@vnzM{$-5%a#*Fw(1znJS=7errOk3V~zJ zjVe%`=#~!(nmtzoB96L*Plv-N7{v*V3NLIyIly$qejbegm4_HOAlY>PhbAC_m57Vz zZG<4PeFXkNN%4c#U$$U=L}l?pEp$B=^7ZeIKbUrP)E-4z#e3Cguyw~r2S!CGT!kZ4 zl|c`{WVhImY=eQ+d~S`*Mdj@8tbTypBH`~>S#c45)X7A`e8nalLSKlH(aJv^wz^gZ z2d+IAS5zM+g;>Q=40(Nh+99UO=~A2>Hl&L9-JTz%iRd4khb{_N1}6_42!REUkqLcl z?}=aCWI>m$k^jA%tfvx=QEoiX|)v0Ae z0f`^sXU;9(_$^r2z+tEUp}1U)_-yC}FGwr{uyf0fb%=$Fz+76|V z;5f)e`OwIpFk`M_lJnMI#wo@?)QOZ*E8E29F;iA3OfQr%aWByp}_{zr(#t}(X>lzqg z&7VKBHz_;?N~52v-|JH_Cx~vGLVTVNsI5m%?qq3$Qa>qE%y737*ej-1H~pL-r-1|Z z4Lrclsk1`ZkVP2VO&}rgN{!N2P%y)+Q$cVez^B8s=ffvkyLSo$2^$uT8jLR(@RXUA zQ-k7ud=2g8@|mgVM1z7o1Jr@UImEW5Tg0X8H-vB@;0-RxuZTkyc>vgU8iEW6btcGz zMXCLl3t$poZ7ftO&ly*^??MNDw|6WI%>vwLhUSvX5LILGL_+*h=>gcU>qkvAgo(Zw zQ8W83aQwa-JJ9sjT3+xmk+LV863qr-ff-00$}>vLD0xj^%gwhx1tEKwG&}U&QBOM0 zjC;9ihf|d(+4~OmkG9et0PG2)#~$_SqeuMxd8qvkSIkWqm+c2e4+U~4Uqufr>qp0N za5=XdQ#FW$i!acM`kVJmd}x^w4;#QaDx%w&5EaR(1bQt>WgI^KO~U*84E?y3K1%Q~ zO8n^k-g_lbO5FF)gFqP|!-fNTBcbvO@hta&Y4%mFDq+ovCnNsNFss*XFPzS39^U=i z$|mG3eZ&GG8H<-SGEG;2!J?tKF^nq# zZ2HJO=hm7E^gt)V+ojooWd#+1aez5@P+SMtQ-^8~#t7i};h|2xRm}&!uS-KXEO~62r<&X4kovR;Su96V zS$OB8|0bsjXIR-*aT`~vpXfAq7S~?=o8kPGr&meqSMsmv8bR-+aZ~>t3mr{k6I__C2+k5#V3PW($^_Pj z+QJWy2mPMr7lCL#-0fJQJxNGHOX()fCdCjs{qn#Afni+0)NGD1r)T!O1RI@4_C{Lu z`RNT)xl#q={T&v{uz=|>jp`z|efti)ujp5Xk`1-1<8I1pA>k7epEuyZbxG-k6v75$ zJG$)OeG5`R|0G&;E^F4**sCt!=^M+QVQ^2Bs3{p-dC&@t3J-DI-u15KCa3~&?@V(j zlwwFV@C6i*dbLCMD#9gQ^M!`Qt6ClB$%9GnuFB`1Aw}zl?Bu}5dI=nK=X&+=%ivhJ zVvy{?CtEVCS~i_s+vEKu`vciAfR}Wd{^NlNaMWuwI#h(6o5p6Dg(c|eGLDHe0$s6| zbZ_m9{hjo8C#>Uu0##sjkl`ZO(CB`X4ecbd0*6DVpo@;P@Ccb(hV^m$L|rIS5h`^{ zlFey~S81h^`+CAtAf#vmNHzU5bZZls+bTZ|Cz&{At3%ERk7oOmNc1L7XL< z9yjJ)BNyXT+o^04FwWYRC^MD&DVc75@Hlzi+F9Ii$z?De5E=aLt9Con>zHBr$jEj| zV{rkwt!27B#yE_rXs{d#s5-L61;dzbDKsAjM)>9o0~GSFn&>n(~iTbkK4?G zd_3GGSi#c@-gttS(c_tj_uG!7fS&alE*)aE2=@x1Vg5)5Yi3yktT#%3;^HDB9Pw z*a!J8s`&l}b!Ru)7UCb2&Ns>j!>lp4o=gqfvI1tIfD~IIPyg3n!I{7q1GVACkEJ(o zB4FxuMM@IhYP1`gc>P{)+g4_Si7-?*J`G1{R6>uP+Z}bxi@E#nta5MA>NHj086}r-wtgG|9WSLnRTg2nQ#V$>7dJ18Q zKR*9hV3)*VdiRe6B^k1qC=fnKn4{Hb%?4oA5Y1PxV2#_D?>CXf(M!ebz> zzjsCvMLPv~|IC8~XN)d923;`WHA-5x|7+RSi~iMRvcZD(6?iK0F;KlhXD{HDCdF7mC@?u@)FD;ibvqxu9X^KG1P6)-e!dAs+PMPQQ?*{XJgdL`_e=ZPo0sa$ zpIg*6;k^tC81VN4Bh4K?rerz_u@VZ;kdUG(tIhG;ZlYF2yy^^R&|G!v{(h7UJzoh-dWwlRfr$;WF;gn3 zb_LQLuvlGys!xS>stl4P(NpjMXu<@_12y(T57~@tEF-R+zZDA6bepnS7s<5roVsqc z`Atl)O$t}csVNp8Eqm8RXB(W?86y4_O^6;wC4o=%;_dqf<5O!2a4;7mRjxdcoc(vp z;3|8+MhQj=%z3;ERPhH3QYD|$!O*YFM{RiX?KpEmfxEas)r0{IpteaB%FN_@%jvBD z+pI1I?>wUwu@Y||_ka&G-eQ!lPUz1V`vR^sn56~-2T$W0Lnu%iS5+q8qXerErp_z@ zio1`@9asST5?cYqBa6NjphB{d#A=nVZY!?S&Z%#rp77DuOEZa#%K3oQBsMY`K=fSplph8J^V*qqy?#c4f}sy79{>o`kKv#%aYjX%*iYAZI=ws_9uU~Ia%nU@%vws8emmHd@ zul@wO)u?+3wQsT&r+nz%`R`Q*|IPQguIseqRQA4@QsjLFG0Arl-fjA`zG*@Mgz82I z<+H3p!nXT05K;ped)lg467=WCOu@E^GDz`zSFCgQd!odp%ww^Oj(Ii7i`LK1sNRZb z7Ey>3y~k#4h-=kivG>x^08^p{3Z#(}aZveQ1v9jxI5xpfm;3n$p|xWJb-jJraasIo zGLCs+BTaD;_BWmspvh+g^h{ZUcOT_8nrv`zfk~PT6!DNKw;pSsYs-s65L^Pr)qibC z2aje%{E+fy1##%Ifnj!cl1>MmTphqJVcOw)Qku=xaIsvU^Pyam>>y)xYmeqnD_LY@@!H&2I`_@_i&s0u@j%N{KuooYwnf%61bBO-f?pk5hI8jgiu9pw zOi-E@SLiV{sS!9HX=wNe60D#d_Bgca_ran0rX>h6#5=lZ1n$$?8|&XR>9@>|@LBUS zE@W~NOZ#6$pCDlY&mKbc&3bjz7cG?q#k3Ia?%r7LTe)sADpUTU1Qj^4rGD5BHx>&G zZC4Cez#_3F-ZJ*jFJm-_w?B&Z4m3!fWHUeK&+{CiOlm94$QIt#u>Wyc%1u&M`_9>5@MX4E zMGlF>{cI9|^f~FLkK%t5);|DQ&-IQHB8Opl!`Oi*OqC^xuXtQfzQTRG}1e==2 zrlZIk$Mg-4#ThdU!Os48W9iGKu(q>uN6a$8VP`aj8`RN(vFBox^UVXF^ZCk$Og){( zQkqNoVZv|kA^j{f$QRR=nvc87Foyltj@0V}tdwmti9>LZ;oye{sK*?Xm4d#4AasfV zX5&4@qXHQ=U8Uk-4yX@CN%86^fG=o*U8ZNiO{^e<@W!nrqfMc56EErvm z$Eyu1HVgUiFfy*(#rTjU!eHhFM-#SX#M``c0!{JnbBM;gTT7Jl4+`|$3GOGQs?1a{ z+^F5L8KP$9&_U0_tvm5gk!a-RO<{a+eMngO&>1Gw6tXc^hFFn2@WmL*v_dIQ-1#Zh zz?Dt4L;XDK&PL{m`qzXYL3kt0U46J{oWh;zQ%MKE^N+w`U%1c;u2=er;d97~X+0=1 zImI48n1;!h!w^rN`ovv~!qD6AeA?M7v5|!}N{K)`+ZYS_Gk{^rpNF?#!V^wylwd(= zg;Akazci;7g1h{*S8J}Nd5qQ6cid1o>8;e6Gfmqx*7Ax(>fJz+M!_-%ARpbn1p`1t z0EwR<#|N&DLI<9{6V>u1mjdL(#P@mUK0>5cSSF*t?df8Jq|^a7(UNmpWS!fwiXu?B z%L>*RC3(ixgVEie4Z?v01LDU1B5b8^HtG)H1Uaq$nnh4F4oo z_$$Ha_)1Qr92vU)l@xHAum6sf6KIU-bt1??G=alPDAHH>6%)0LUC18_OcT`6c&*sL z&I5NB=`YU4A6QP2&)ujte~IICd0-3_?TVYt0DmDcM6MOQk@*D5zu*T2MyafS#c<)I zfHtMB$_|9xQrvZ6Qf`qFBgRQr{r{=i#jla?4wgO1NtfV&%vU{0Nbueu{N zQ(c%GL4POw{OQOGw0gb)N_%U@5gf5Cu!V~=UD37Js4dz{6APhz< zR3v0SgP;%&?|?XAs1;XRL^^3rCCMRXwC7RPQ|5a&W>ZD@xg7Mwki(tsSNOLgw{O}< zyXg<0yoVhmPrzPuiz_hHYyHY=^qA?XdYnM`acSlNY4{G_$$K3BeC~>qH<+`xg;YAv z<9BX@69J*dA6lQU;vGnu>v#t;hw*}2pf9!OPcZkU-fd{di0G`AEI(t+Ph|nv8pS7q zs4-p>cA0TY*unHh_xUevh3)RcD~U&qQ{zTZ-$h@;y;lr~5keOF-ea54PP1iv`6aA) zhnRw|G|PceFh3X36ED+*>idlb4@J0&Xq8i^Ui3-W;3>(sBtEUQu}+kfMGX&_qUGR- zL&Tvv(#=MBObng4w9Vw}aYovMe^T3&O4K!vFbgsz4U! z%{iz=ul<$4!gASo568#^TJ4>zq=|5Kb2)J{@+7-qZ7(7C(o7}}*fq~At~j34-^hDl zOjXGJ)YGCIBSKnT8o)9Ku}8{5Y{q6osRWe)^jXA~+M_9xw5^tI|9dFpdd4*gZ?Z!; z%Y6h|@^)6eb6v@Ud7%b)K_Tw{_VyLd5nB!+cYix5hXr&Pcv5Kz3YpBRt<&ef{WHz` z|13ao;j_YS6K`v@m*#f0iksm_Dos`Q8WT`MCaJ0%i+Ek2_MhtFZ%KJi+A*{!3Hnfi z&MfpWTvi4K1HIN7-zKNEkWs1tzLjFp=u*r~*oDbvgCz_7r|Ke^dcwuVk>nS{_MVqw1hNOAip9y^_@>+bfOap zEpuq^4=Jk;_1^nQ=mnVC{AprOIGmjIi3R@wP0Ve3(q{=)q}J~n6P39{VD<8r04z94 z>Ug~{gB@wMCp`EU7d54D*PxQgjENRmW1BvN)X~``fe&+Xzu?;$El_dAmLpHlZ_*Kh zAq4w2CRk_!d4fBFp%k#q;8<5_WLR82J3!-U4!q-1s&c@OY!-6jSX4)-LtArk+g|2O(?k~y6JX)0>w`E ziRz6kT;fYtiGzks?~>wv7lWR+mN!j>y5Cf3XO`lh4`6k7KY6*xg0a)FtXx$z>lXU_0 zXtCb)1Xp!KZ?j-o_04!yfJ4BOPHbE&#T|}2nbnWZZ+Hl?k#BUN*fD^N9dpDJ_u+RQ zk=@u?c)RYWkP3N8tC#N$YMfd6@={eG2Ja1!1mHcAGuCL_4`M5cg|+gkLvCQ#hIPpc zih=CiJ7Yw8v#vFv0Zh9*&jmNr&(^gk_{$DdwE9yzU3_Qk%TddlyQMBr><0KJ7rUyl zw@vO{h9Hg5} z`nXl_`7N4%$_TVJN2!TKO9WiU12e7b+=C%oQ?a~Rr?DZ- zIdt#X{n!P|1IFYY%x1f~BB0|vi6TlRP@o6TFWkFN6sw_HqKK7J*I!@3j{)(nPA#<> zj|?|X_Noy(HM4&bxb&kzwp$Ytx<6Q1{E_1cnb(J;u3@AtGYJ9oaf`1J>v1|}q`EyzNDT#LFJdSwhMMbHZlrCf^ddW%l@l#0gTYhgL z&&p|Fx*J>Cw^HzHfCI_c{(%DMogYqrjld?w{k2eMS-E5~RZ3Fi5)w)AsIY?t&-#2s#8;IUNw?qBm$zh0vQHA5`-+fUTNzbu$yhRdzla(3jiA&I+x zFIj&5J(NJhD`DL%kGu!|{Mu#!0kiJ}c^GgibizZh!P@8aTN+p?+IAO8cgAvgMbJV+ zP~0<{5@v;#uusun4BT9@sIagVlN7z60$c6ZqTs}QVPB&GP7V*t5c|MfWK|rnd^a3D z7&QZ>4`U$H_Akqb?Wt;h4q9__)rJJ&pX3w`d`e!572UA>V1D4LSWi*YQ^=v=QPBpu zTMQaWMN{M?jWL&?q6(=2P(nH%Nic_oBXhXBNMzH4M*)BP*|ko1>X#zgKP=5&4&&a) zq;iFsO%go~F}t&8+xgo5?RxvP6StWfrR?`tu#yT@DjCe0@7rr9mhSzNA1tzu;>vG< zwV=yYtD94DpE%0)KVK|f!#euu6RUt>B_>A$nkD@ujNsS*Qr*~y!YDvjXO7I0hAo#b z+uYJbhQs&^g=)lD`*te<0kz+EzFmLBJoyTNy7s(b|4x?p5{g=h5_<2|ONb_-J};t$6$)`WFU%4zo(A9r zYv$@EL;N+GgYEhBEWK?tQ?>mIp>}Wjcs9Z%`q@Rn3I)`bM<&bN(Eb2z)`LE~SO4RF z#%rE~a70iG(mHEAjOlBvsK26a@3NqedU<9>x3I?SNH2@RYVqx&O;_atMz_byPcy{T$J6e;;i{DG#8beh9GMhchce3E`kZVjob5VCOZ1dA#9 z{p7^}(qof)52Ijr*m^}`POeMv)xfQX^54?k&l>Z<%6Wv{h&B{sdqplNx&9gc%?Pe- zw&nTfbB+$3@2+i~)7yo9?d-rnp9t}%cZl=mm3uKJu+(_Ot1590$G2CtW!{r1%&!>tOrp%_pv^8?QgPjgMe&-JlM5rh)E zJivGqBL#Lr1}qg4O!o)H@6vFd|?<6~Aw*|T1cD*lnyMeDfOKc4vy&%6C z|6%=pXtUzZOQ(Lho@uzeO7HmQnq=CBeel$?s$AV571!U%N6$br`G*lPkrcbB;+XwIO=Pqf7HuLzjw^kcE2zYkBUST9$?fTLrG5^z8Q8;SegWZ&6z z_Cv8H1%V4qR|WKF$Nq@e8>tjD)BMq(fGhn^Xij&#bPNUB&Z3~We4^Y2wF`wH10NjCPo>azcd>lbXqonO0@L!_MeH~ zds`Byq};*TZS;@fvId<0hDT8B3n{VEEaLip3@~!#4#nAz_+IqiFO5PjI2KG1ruam% ziyMt_n=H|WT&R*!|MIYy1C89(3be=4?)qMyGedKz(AZTLhvO$eXp6IrhE*K!P*5n+ z(IHqAR8`@SXW&Av9D1%IFF?-$_}zIh|0J(tdaufw3&?2fWJeUP5Ke&YIkm8+d&9>o_r7_y8G{o|GN2UJJS^5GKw!XmeGsu;agU z4)gu*x>AiCNU6x-Sc2Ve%9L8HZM5J-YVkNQx+Og%ivjbNjB)B%6bExv=>G`*!z1;$vU~0XU9YvH`hovcOdqK+Pl6`a;y(;k=F8Bw&!4?Mf$n zmYc@AIHqQvM=rS!!Zr9W@4Y zykn;iDU5Zi%krkWgF(uIlC30*L5QdnCJ1lqt7PU6lROAqeB(!SYjY4DX#_=6s)Bek z%iJpFgMu}|d*k@$>r81ZYb=TShWVS;+AIgn>oytP^08a*Ie|2)gjY!~1 zEqvh&x($Nwi4nEj@n84s-Kph;=-DyW(4LNI*p=_rjAL4+Nh6RYzp#P&9Bmm7Knc@* z{rY`XuG?F>-_{LNU^Ncua??jamyHJZw?2|iy%g^GD^iyU`D52sqT_zC_vN|9VRWyK9JU*Qu5apg6QxAxjks=>iDZgC%%}@2LmY~efQ7SC2(BOj&Y7PRng9MDn&dkCSQdc@&|Uryg1M^W03JEyB+$k5Oa_DB{Rww6>$vXp4~$a@ zgwK&Cu(Qf%LD8^C!@Gm>P3!`S54F>!aP* zwApg!rF%0JUn?wbnFoIo1`n3)RQ3Ozs5rTlKmB%nbbD<4xMuV>Ef>sO0$z5Rbg6Q_Jdu$Ep~FR;hx2F7N;5$Do)3|ETxF%&z;j)k2)K8D^9h+z&ufTrEz0p>;}nOVSFN!^Z;-$ zgR@vbF+Zd?FZ-`;NpVrBWq-+r8ILzNKqI0c$ywcX#zlU`Mi*K0HU0S~Tjc#aG(5XNRO^cJ1bc{H zB}&|sX=GHzk{9>6{$@|;i}GEi|8j;R{NR3cuU&tJ!CpQs%z#DaHP?Zktc3`49wbgS zl5RE;Vb)<~;CZx#tJ{&+W$~RaTE3OWR#U=M$`fYz>JJ|StTNc|%I5}63lb_Yr3jdr4uCDmt0{#0J{eSOG z?}T!oM|RS?a05R+pXM@=&E)pJ26v8-b522D+xa~Ld|_*fVW?%<1n5)Se|LU0;biB` zeg<9i-0BiXR!Y9gi;Dc0IQ+!jP=264QRabOr?nt%^Pl-Cf*-H^>MFcgOd=3{&CK(8 zTS4@9LNP5!s=MFEl7tg-#BuVeG$-E?F3@zbkWTH_lrSwiYdXyzC75XTSoqi({M;%98pZ>MEBN|x7p%qS~Tm~T_c&uB0Gw% z+9|+jy#fu*1wwa+ovFY_vY$QuMfhEObx)Z|a+Sos?n>{P`Tss#GccL`EavVCWHrjx z1v`BK^oD+pdRRA6;`tjiFM{nT!97NrWWA*t0eY`v7^~f$JoPfv1aAK$+l{TwOEx)V zvZl|O(%4a_Rg+Q39X!#!zn5|9=!J6Oe7nZPBN7lSaigA-`)XH`054B$@9EFN2Jmdz z_KAjKb0!*`zDn^Fy(Ot4EpvKLx4#?WDQ{^5>TQ2?9o&aHJ!VNKJQ3zdB(>VTw8wBJ zgI_%_a&wyXw8;wv{R{?P76=3Ee|)l1cqs@wOEX?%{gnE8&yM?nZNciZxdyU2($1%r zBu<&Jn7ztp(2QFU2ziTd&b9ZD!@O;Ob`w&8GNt=R>%3VLK=0miSBugD$C{Ot4(0zT zIU%7MD4Pr^N`SGOpAbj@fvq_K3067i7Nyg!dztcFWIW)mNO@`o7vmo?C{|3{6ZOV= z}GbXF~)0Qrk|Gn>GJTEWe%;w&il;W z$C!v~qj{-i+77zf^sD&eF5e#`f*&>}XIPYAzOOU@HDdR+WJeV6=V6$;uEU=JZGh8V zr{E)Nwm68+h51lAsRbq+taM_i3rx&;63taDTNuPX)%x;?(5K@u<5luNH0xgnPxo&y zv_|ATO5zH(e=tH?50Gk(q#o@^^ganS;`bsQsUD2bEW&0-u_G?{4$cvZ#zoIp>_FH- zH_p}|Pv_u=TeiZ0!k%8%oFQ&v>G7&P(Z2jRYOGAjLKHU{9c45O|I^tS?`DXJec2YOzmAEV7U zX|S)fbH0o(z|(4I`$l2`y@3E6tr?K^u!#;_F%4vPz1RTbrABP@l#K0~fvw>*Ks3Ck zzyw^R1)RlL#Y*??Kv4PdghHV1M=Cle>knoB!zN>UkcGN=6dKx9Krc%)T0Q*jlrc@m zfAevHS$!^VlSYyd+#1x4hT}1|*=N5^nc`QGIo!veV@Es)Xwk|vRoV2`f4i+?OyD*9 z)vkl)iw6mD2TABIVFN-Yf&1}*Yxl;zs&t-i1xMfwclIc(M8JpUh;_p+rygzNL!GI;xQ!Z%5N*06P*@jg-1K)gsp~vp79wM|}RVWTXP=VAcJ+yzoiC0i%U@)&k z|8@mTcXpHi=ZW|}bGF#Qhj0B;-N^I> z$+!&{OL<&7b!8sV+pfWA1xn$tRhp{A(s7L;DqnZnjaC)Dw5Cd1W$JP6>zLA3*lHeZ zcNQ>O$_`qsWnKShpZhjuK?1Y+Rf&~EO{ZDXoU7Vo@40yM$)ee#9f`yfD0a>NrYY>Z zo$8l1jiu_{@h*`m47Eki|K98VAT3vbH+!Ie<{C0p+#zaq8=uP;6lXrKEGD zDe0L4?C=({zmlT>reFuhL4m4dZ%6`h|9Nm$@HtDs&OQ_iA)h6;zHTlJpZq9TP0+IW zLP6=0R2?}ff)uwStLvY#C+UvMS@n5-WTlH+(g40FW?ZVPes^<8Wkr!8|I5>O8A!6L zF>BCp*)VSa*N_TU#z#+NI0A%N>bO2xXqh7PM(Mx5&HSos+vCTPiW>9Z!rtmF_s6k8 zQ7)gqzW-RZ_~*Zy!9S7;*WcJaKP1qJ_z!tO22)#L59N)Izlz&^?b#VFK{UDCQ`^U$ zUQJvu@}qvtp6ZAb00~!lopRQeV|_+Yedw2@dh2u zwZVk6tk6e%QYPndXtBMbG`u~A!L=hN0jYp4tr0M_9Kn8r})z;6?f zru41gaLQh!9B>u%K1zM4>s6{?C_5ae7YO$o5^Cx2Z=!&6@})3><~1O`QFanL9d64e z*tSVq8f+}01TnNg9!_>)1@%_10Ba0l9gi}7?`jfi5+)p@2_R}|aZnTSGkj$@tT$B}pX~iY=Ei6bJN`W2;`urhlbddkc`#B^vUNyExT!X% z*!;cv_cEES+$!S2@4+SmEzN&s%+VjhD4(7EVv2tlTp<`}LnXo$t@JeVtW)8T{$=hl zpS(XMHeqJgxANNT_5zUcrWSJe6XrJ>oSh9W(WXpbJ>Ywio8WLwC(vtKXb2?^deKP{ zhM3%1LPm`~h(AlJ*CmV`A<|og3n8mo3F_@pO@7zUVYgIE@RB0^G(oAj=cFkbiVggiB|xVy#EzqkEhjKBV3|OQHz=c`hAxmg=OjU%)=Ah#ES@#N!FqhS zV~SY-14W|g!VsFRH&|a6a_6&A$1K3KET8|qBpED=8y#zx%Q63R4rw1-t9mrgAVJ=e zKEbwBfpaHN-A-44qkr483uN0Au~vkTk_Ja;s`skJ>e5?(cOBaC4Ef*BcxgllAm#2# z4Q5mk-k3s>u{w$a`lz97TI*-c-_y&#EKOw7cSsxv0gn(leRIloKAl8va;tecEL$Ie zL{pDvNRp;SVs?zx;f(c9uj%S=K#F~KqjFlwa0p1CHTX3Vj9(f1-=(6|hDwZU|B%~NWwd-%{E01RBSqu%J+h~2w^NetwF)%pOM4CeQzk~+4PJ@re%YyF< zod2V3dF~-wr2v2YF77F{H2;Ax8{G$ zalBfLY_4NlWOEBMev2ky5QbMUvjZ#2o0lyt|I+8dVLw_NaC*JoAiKf4jJF$jnncj& zx83QcNNh9=y$B{`tKqOw5V0~U>jstsd?+!hAqs8rz=Zc99oDbN>XE$$3Dzxu4P@?Z zc0eb^dlSH6EXAVW!ZuXsbbE059M4_CTeXkTtK{K1<2S;bno<4nfQi!r_y`PVCd6t4 zlbs0d-^I|1|9$umI(9(4H;jIyp~c;5gx{M6T#X%Rz|hf)29__R1vZ2e(T)1jvd#<2 zO#y~r=TsM4?o7l`+?T+Mq2U{h@xsIWd2u^9u?wm9{1gEs zRtRZ0z=zENCjGgGBPnkytt?c4GV}Q~upWUq-~`eGCsGOT`bS>OuLgKp&x5fqm*>o> zfEbOQ?8M*X8d%Q5r)Va@%l(cDs2LT_C&KK$FjLaC2i2;2X3&eL0zonYfLqzT2n}s~ zf+SiSo^@^Y8^y}hD{MUCJq|K;naSw>0+5!NNZ2xqEh*qprFqqSwTR!f1kAIAq0$LcJ?2UST z80$f-Ige9M;L>(zrm>s0tUVc31Wi0>bjoiUzOQ?`07gO4<kC(tYZc2G@rbB;$>5)9~5zcf2wpYa(L{*lcB9syrx{CiMT-vGRj22fJN6=LgO zs*xr791m`rL8!Ffx%b&lN7)mTf}rD%9>1IyYXrW}`p*)lA4nK&($Jr%wMSZCecjK9 zKG6X$ABf9A&RF-S{byG?ql3QG38+T6!B!Zdm=CT3IWIK<8(j2T+V<5q`1dj)h7-A$ zmZ1&xmRljkvaG2>4-#TS+ORb|KtWdm zA&~#!hwn896UPhNjf4k8^hw518=S!B2njFQE?~1-$x^}Qvk_~MCsZ)hv8eAAC|n$= zKX}DJxk`u|qKXH_t17FFd^`C-1&m%LAqH{5G_mGQ(cD}S`n=NnH>c#g>vHl)Xft} zqTeeUU`-9LiOnQu4{m~?A3?F=l}Y{>u2K7NR5YjaQkpAKN)as?B-@X3}#1-CmJbLlx}j=R>v>pF1$FT=(Id# zmOxC;XdUq^<-2w?(f76K-zhy-wa6eakN?}2XJ!i)s_0ZphQmTG=?M22wq3gf45^Of zAc2xX>J^Zkhd(J&zZ0dB&JTry&PKphQZOMI5b|>BiG>9KF6wC4L}2}rese30hR^QK z@e#SSOLm9&EKHd2S?UTnm+tds^Vp@%IMm%2z#;$X+yfl6C9GNx|7k0R@rQ#tpYrO6 zz$uaU!Fg|zHhEdsi8p-+t#ZPJfus8nKU#nEc1H`~sl`SKL@Oe#WZ=`Y`)vvoi3Jl} zsCW6#&;af#u4fqeNl>bC-lv>Ot9PV> zna!knQ{JWTH+a}U3ekxsQfqHh9A>H98=?YZ*DBGEplDB+h6(PeP&ep4B-#5eTz#ro zQD4b+$acvDa0b}F6&Hg5^RAD(!6^>q(#I5HBj9BO5K;{~(-yQ7tDXqA*b^ygD^Vugt^=3w?G)C z$5(XP_DTRUdq#hv7@(dKca^;&P zSp=O-iQI86Vhmw(7moURYaODZ#6!Gsbhpxru5%B4mI|ol2}jR*2(b?)PCEtE=2hI^ zdj@Hwu1=9P(i(vE<`;Ru$v25pKU@srU-Ct-isQXg+a4xRP@S*v-5eDmOAafd=&`YTo;mJtSfjR)yE1+!>()EGh{>Ic0%r0Izm7 zfjrM3Rc^T<1O;?f&TNqPy~2~8LA{hfpI6P7WA zOJO67jt0Jg=r5vJ|LuRMV1|s&4=EYT(Mvpyi(HwAKN4!sT$-x_eDJfNpWZg`wdo-5 zvM~9Pv(vUzeBiY|y%{|oWfL7l{&ronRFrlyZqZWFeh7V-e<^!b>j z!KHFriO@3>L{1fPl2`~*?hOn;VDDQV?I$46PK=GHk!)7pEKr`7A65Sk$0Y$!ARnf` zp|VWd7Ggk)Ej8~~xove?C0V&s^#T5TxDG9EQ)G8wHzJAo#5yMY{RbFjh&{<;lCAT| zuVEEi6%PJ!A%H%*#g5CHf zFyP;Auc;~xU)Ht31l*~5&7WHoFe2}M53fkRr9W$^eiszl+pm!S*7;o|^Io>|G)I6m zHdL1jFj1^F8QZA)G>o`$V`=r}0ws2zx05YXL~tP1Zc7lr>HO)e?ry(b0r$Vr&ruj! zC$qy}FJ&Pc+W)2C`CSu8ssi`2_`pd@t1&Zx9YM^PtYLDc;L1j@tPGs_W<ENzC6)zISJ~|lQ(8f9Pui}s0od|*uACDB2^J&0RCs!q;C&b3=UI2G zc#o^`$xiHxPcx|YBue0q845BMjJWBu0W?yW5+K|{2Om<}bwdQQ?xVvTw>Ub17ac!_ zWrnh3Swx{`DNglmK`naRPO(t!_ctJBT^ad+ z1baw^e9}ih#@5dqHo>{b8V1*?zY?TkueNx*$}_^ouUPg!P=|rNQoJkIiBL*)Ya*UZl&j_Dze=RA2rm`)1HRC=zx9M^1N%*+G#I0Xd-yp^xHc?0Um_vW|mqTNz;>Q^uMIB_ZLJ6g5|Y&kMta zZUIY1vfmvT0%N-Jhf0l3G6;WCMf=brWe5nl4lTPhf|mHe9?jwEk{QI{PU4`FrJ^oI zw!JsT-IR6y_fuBx3?@`+21=C(eFaLC`2=O*ysd#8pjSu81e&o76DllCop-g@0rbj; zL}(2J4ABUZdFH@#1VMq|G5qdRo|u*U7}yrWVr@K$(ES{bUR;TwsMqPG@Yg|*ljt_< zC1W%WLGodcnwUc14<64B+{UIpl}!b$UApn?k$Yp9tT?rZKAwU5U9gBi^3KUxFh@-L zOh^QMD~FYmP(^RY5j9TBW6@sdYT?g^?AyyBb|XwACK(CtNo#jX% zQ4)*4A?!u~n_A72;jBFApk^yQzey;!Cv)$`oKCe9Fv`^W5Y{Rpt4owDabS z8>1fwD(pEY@oUA)zm8s-u%l`KV|fiQCt{R`E(1@W%km7eq`a(Je=ZH;Lnh?ycG)Zs zEt1aQlWOL^QAy)aVY}RAcMJyR#ip(v;Wq z(tSBjGiuy5y00taz>k)$TvGr5Nb6IiY6UoVn4ttMCt@zY`EuOz9fuEkV}aiLi>tcd zxECee>vL-Sad$eoLKU_}=LOUnP=A?*5CAqn9g6!e^L}JNK(7@G`@fJxjiVM2Q>>(B z1hQnKF!DDY(!<;NpcPcb(q3A0Rxp~hW^G~6P{+Dxbq|*S`l`bn`=}iVc!ikD30}V<$c*U!*v+hy%R)zxRY9QYuU9}rS@FyPZO0nP7^#h)*lwzLg> zLw_(*M-CnwHN7Dqqyf0@4`t|QpZ*CL;zB$4$-z*oMXL-{pqDfN3P_HP8oDig45$-g zA0{Ar-&MR9pU;P)h-QC?Sq_~iF6!C<(2pG0kqviAw8pJAl;C%~372ZU_j08+W-Sy# zDjw~oS3|84!Ps+V!G6QAW5*eUW^(vF5M$)NA|f=-v17vb<^)EGZLFl&axh@XRuM^m zH~o40x%A6G9|w)ZlV5a2G+5OtdDzuF!^E*P3V4>QkO>aZB*YT_ta#Jtoc;6m0U>J` zK=OHk8$c{5Bt}zIl%hk)ui448W$>-k3LZd$8?2|H{86i%V*6WP67-%NM&DeinN~-} zd4dB@h}~ApGX+$mG)d48X*%ue=g;j(4>~uSZhd4iu2w8i4F~^VBCNS2wI~X&`X*hT zvIe7`hTntZ7D(+#6U`wa72=Im3WqqmLQ{$>VO%yfsJD!eeV&9G1xF#e|7&)83{t{k zEE8x#?j!W3wCc(2?{(hG-=&kL167*dO~y*92Y~6(zLooYgbbwrGE;f9=qRfnepq4y z)Wt18%@vFKe)}IC3=ThGnnTG2+Bj5y{U&*5Xqy~Vxx&dKFLPY_-6(T)x2+S3nFtdc zliyo_V$iOX-Pt)(A*#ktJzxB&jD-GuqV~(1o)C@BKf%umvh<4tz#==~PVbGE)Hx4s zv*vuH*^n@16FoDh_a!?AviQbu^&7_5p35og+dnQ#>siW)|2xIE;b0;*>?kh_jb=9D z95`%Bq4u`zyw^8bYEKhZ`SsozT}~}Px5r2bfKk2xL-ZLtA@=DC9aO57&Izdk(1O$L zIqe1?CoO*oK61$~`lJm5|JRHs{bA9-g2MQ=h6fA60pU(B#Zl$`E`h|nkyN;drokL? zp!6|>ntpyJr6ObjMvP9iu_SY4PXLfCDTBDSClXb_R_~DQkcTdQ9v9xM-q#ytUIzz~ zGIN%x{2VHSRKYm;J~aWG!Cw^t`ka_ZfCMdn^pvN@OYQHLu}0-2AKF|}jLF@%;+TA6s7e6!pN|EAWJW&X zT|~}#&Z+Qgl~vkACXW%TN5;w~%-WgmC|r(I0LXpD;DV~A^5A;u8^rjqj*9iB zmvjUtiJckbe6){hiDvgEe-}Y9V(Y4+_E3bLQ}u;&%rM=laz)RDdNuTKvG!lR0}nbC zvX&|ogL*sbhJ3wpe@5JK$U8w+d3XH3WdBS4(z`*5#`K|rxZ66}!{D`r#FjSyh0=!oK!pb^dmGna|P+7Hn zpDv{7_zlrc8EJ@IPBU)XWXO7K06BWqMTA|yPIcN7QQu}RC&)~k2poN*%So!j#T~=Q zw3>eDRclFG=ZpTnnqR%`k*Z$h6&0?UM>qf3#)rzNVym>})y4V--p#DD=Oe>z;t}Me ziPL>3EAYq*r<~(wv-`f1V^7obHA~{VE<_ytgB6}0&y{ZP(4JuW&!?+Py#IZU@lUH$ zZVYBGMBS+ZF=&$qeOfv)JmJlaK)4=$(N0GzuoEsHb3Y?@Or9n~Fo_a+K*s_oQ|)!Z z=Lq`${C91rFeJYo$~;}DUfirtyaYJOu~^01XRmC@z-zXi5>Hl&miL`{;KFx=)@YoJ z6trWkJ@}2g+usp2)&k*JX$jnmVqiHfr>G33V-D13P7>VvAOI}Gakk+$%o<;UYbj^k zuBV%LKEY?q(sK9&xNv20bb7G7nD@N3TZ;%XLUf`uKh05L+0g!UdurwGUI&%n!%Bshl)p~P!3Fol!VN)*jp-57Z!r#+u0fycuTshw1L zP9K9?IQ#BIZJ2iP^Ka5xK-COAonXdhxT`Cl=jSpq(5KA%3cu5hy@SIt6$QmUxky5+ zO~cbA$17h>#|PN2w4LC#m8_n~0^%Zp-m~OQtpTsCj~{LLV7h7l+y%B>v)G}XR#}?L z4E&*R4l1IFvTsSB+Uk`&k090Kufj?-b@rv?_^iXBL{@LATeJH?^Sssl0B4k&9oCkf8<8v;JUQO5Au&TPmVm43G2161_7L}`}G3gI5 z>dIs3OHBgBNcqXPtzvviKY zH&4Ha-keh?S`-*cw zjT^X1bPeoO>R5a72B)ck4s0nO0ntBYeKR$!NqvnDyvSub^dOdrp2<87^J5GD6kIPZ zt4ec;C`Q+tHgEC-PtQHzpa5n3-hWPyF|i~%ty;Du)OPWeT6=|-qDu<9jN1Z8!!uN=^elVKj2BD0wzD289iJT zJZQlBp}-V4;Y`79aA&^nrm|Boagx*HjzdtZzxk88Cnm%1GN%AcQADp8|Q(xXK!ugm6}_af_^_G{YDQ#2%YIWYzbR9I~UmdgEr9HL0#c_<9>Q!d_#ul#ytjj1j6vGoSc68DRL$T&cSAyZ>a$6`ll!U8i4W` z3Y2p{N+?b?t-0MPV;c&DesI3wphZW)HlrR8$ZZ`)7G{Q|IQi>472lHKvOj~I?`fY< ze^pE_PY+QnpS^jZR$uE)h@Reo{rQ8HiV|i6__(q`SKbSFTUIu@ji>_b7OX6zaTCs> zmPdaniULdrK`tHU%Hs`+N?_zroArFtD8$Oue^5vF%)5?-GiReejbwZbzuY}oNrex5Yq$0Etz2v*?~JzUK1ixZA`5aY_bL5XZCS7!{f!_z7= z7qdKoC_j*~>5@6>xND)-cyq6);1R((BYNAa_jTE~64)@%7ru7|1%a z-i?$l1(WqcqWF9cSoP<(1q9_Cs<-bgon8bPRMw*RPGvLN`Cm;q9%jiwlWn>ci z`WW9W=C3sK9QkY9O>*U7448k7y7=}h|NaJw#Kv_dCmn_!TxoKrPN~9TtRfscK@$t* z{si3#C@;>+*@Wk&j9-Wb)QPcmnVB6I#tQ0VDhI#Xn=^@Xh-g_M7G^Ic*D2$P44-0%61_VxVr$>Ebjb zR_r@JC6-;I&uZOPckp~vK(U4zLxP>6ZTQ0eqr349`xLKERqhmOCUhv zG$}-jWs;}P!}b8WTdTzYdi3VGlpy-BOV=C_2_bzrLSD%rXW}M=7>+0R#AO53aB{!W zFb+U&@zG|7sdEAN;fk@59}zHAW6kgVvKRl%tRqt$V*pCvw9fslo;X-PgRhbOChydT zvXptjR_d<2WKT&(+y(UH%2lHRbXPzk_Z}6jX>mHf?DjU?)*7{Us_wG@_`?~J%9z$J14kF`uNw zaWyUqq%kUrDPq-orRUj@&{BX!@F?lBE@-cInc%sBZCw|1(|0kax<7fO-MQYo$OlJ60Zl(BzKB?*oup$givR|V=#8l}-X+`~dSi}DGLxO)Y zWH~)>f9%II^PGROYj$y*N@R+0K-#DTHUB)9n{G-937K#{(>Ih^5$%pI0f!LzYiq)H zrN7?Ttpd3Ksr?h}lk0x4PzE7+!MR!t#cqYb1{lCFe+{ZS7ET;){kMp|p84vEeYW#( zh8QUHU|S5}z-!cP=W3FmjI}dXV|IkRd%nIE70xpLx=KFEd}m`Uq-&yH-|BuP>HPi) zL-DZm#-Ml*g9U=Wv*$@8U&!hGr3LF*z{vy1v7F>J9Iv4dC&3u?-3%DI1}q_hq5))x zc#8p%xEd3HQvv8zsRaPu;WCh_$TBr>TaHgs{1#uYKExWc9?udSJa`;Pki$EA)~Qx> z-k+>ufU?g|XMJD9fqaZf&jv?5gKnuP81-GlIDUyGS!o}%&#&?UZ^HjE{7P>LTUK<& z3kHBrqvNMjkqN2-YEKkK!ByJ8ccD+xUv%$&=pj>ek6MggN+^PX{8OI1;-Ky8?v8DG z5b)p|!aw|AC) zd95)7e}5W3IWg>2T0pS=6~^j0pOJ2+hnQBmmjIgiO@p$*Od~Jl0Q}0-qs*bY{fZ=t zX6-FKh{^f?E&u{eEMmtGp-fx{J3+A)6gJF>QNF_8L_w&%h5*2h6iJNE9#MBN7dIcg znKrnHIbVvWc%|ekEQ+E6NaV^2!eSX7;=m=nelZH$0)Sp`1en`YS?wz%nd~PO^B(Eo z75&}KUi6l31q`{JWlR}~q0tymOhs+HEKr{c!cQoUS-N)#_od8fT96xds%P&CoH>6~ z+QUU%`X9^4nNBbH9Tpp*DBl(dI%H0wLXNb9iOpg*&VCa4>|J*C%322xMX7-kx6<^h zEdVx!-tDZx?oFnOq3%ul6CEDYX2NQw*l#$ck$Jl2B6}S$>tq7!-)G!;TTF^^FPZP| zYtFg$)WZ&z%_%8+BUjlEGhjN(CIK%-&2-lMTjv^nUsh&^_=D<#Va{AK0S7tn%1_xQ zQL5X#E~m*)^g=Hh$sQeQ_mH%MM;^($527y@43OiEA>AL501=#o1{mr7=lu*;+Q(`E zQv;ajMEBh@nw)PeFDq5i?W|Inu?flKu^ia8{NGATt9Rd@&hP$56ER#Uf8fep7xAAR zPQkZTA$|(9#KaK!RoMt1q19=as@?9qI)z3}eicGS>|Byo|6)1UfTFlCeG9tf5Xj>5 zm^Y<22?*RIuYBd+(fsq9thv#U3bY0C@cT(o839b@~!}&0W91L zSxN5I1vPd`hXBSX6m0bgKd#*AG;z9ZNPO!F37C^q!5Ba>X3LV|6oO&44<(?T%6}hw zQrDe2fl6*~jD3Ony0efcxb360bS7Lo4B1E9CX~v;xJ`F{>-BxxOp5*gXgbTdCf~P> z-x~u)C@`gKbax{$6zN9k`hhf((yF$Q@+5g4!a&OPi zb?rW{^ZXviA;9Nd!zQ!WbT~O?P9~tk(>QZIMPP)_5(p@^o~h8eiI&YHfq%R2X9{M>`X@2`=w99LZV2!X zD!pE4N|m1iSZv1v4M49tiYnDH9d8Z{Sy(qW29NxK-oVi`bkmWnz1W$Et5H*VDIOTkV>g9Z*3{dRJx!0Q5`cbbEA4KYjueO1gmeEatD8NC#MkQ9h9zuPBX;?<{d@|Pk5gQu5bc)DgAl|D zFdby8{HreD5x(By1x`es4-AR`f`F_J_dbzWg_O-&$Q|sKs}|4zM!+Mzh=T0rI7ISD zUk_dY2EKJTt)Jx)Oy=%S&2yoB$G-~?6hwHphLeBTGr!s-SVBrQ;x~zm<96D-V-a4) zW(+ZvO0}A(ziFMof-mDJ(0XfVqA_MRI-rIs?3wl#3qxWV{ ze?QRCweQ-Tkr`d}hdwq6<5YJXwqL0c`%)q5->!Ifo};+9?vV9bw!PJmCa*Z%PWtE` zoga4V^LT4~7(mW$WdB3Fn{tJCp|T~8F8ZMv0|X9}en{wGGXq=l;ET2l1`sb21Aqa?l+m~hjXvUnF}fr~JjFLgB;ekx`s zWDf5(BHHqSIyRIO$OeqCxtDQw=A*fjbfy4 zr%5$C5U=n zLCsG?Tg<0HgrJIV+_y%iFAi^oP{4q7Re6@|=JO$6c(yF9+VcvBi%?lk=wz{iCaQ>w zCn!yb5I!@}ekg}GGV++Bes|L08}QR9#5=7<6M<%~$4)BaUD9n%8Qh413hkn)tPu#Z z$=3tTYhYo`a8n@L&q~7c94p0#G691@O_TywI`}9d>u_ln;QbST2-Qd)P-BEh-Ffp@ z7LiVYr}&C+{7g_3UtMX3KJr$05dmN=%+F{A>fOVP1?%63W`Nvob!OvH-&g!O#rHqH zPEjDLDOF~Zi#8YD-)VSjUvR%}onkpNRx+zvlBqB95#l?36ydDa*_%Kl!Ku2sIr!K- zlrE`(ZAcDx96P3I?Wm6wgNj0#@ULP5V@PZ&&e<$y0{k3Owg*?C$9x8&`3*)mlXK=B z@wNda-y$ND6}kB5^-Y*H4%AMaW+!3%fS$Z#GIQp-CIhKYyi#&T=)_CWl9Q zJ?sc8E79ch2E$+cvXGc09N>0R1&W`mgx-9%U!g&$Ak_LE;oQosw)+6l`KjYsc`&&n zVm17LU}Tn%M)4JvKCN_U~2m}#Wgo72UJcpvPHq5 zG5Xh}FEVR~!60ry&{D}~_DFSv7SS$(VNqQPI;Zo8(CX_;#>g}`0u0ofZ|~8B zp0CWBAc_=ixD?&MCz6#E&O5gCaDB|@*xwt`tpMue7YgH4eHNmnjJ1&8Sz&bwyk-o8 zq)O?P%YTh;GkJKrdVw1eLcmD1tj!9UvXor4u#cTRWyPT;e^U)~#I`uuA*=ex?r$EfdP|G#t=t zP~shkrCF;SMC9Zsa~;U4Eiip5QZ4Q`RH)HByiVr*Q|cuA@>i;QXP>_Axre*$*`o_UoqDR_ zVWl>i5K37N778Q<&bv1$%_2|x(wcYA$S8iq$+v}6`Iywl=mj4mGlga?qf&$_*t717 zhLs=rE)Rf7^)I<8^XQOw06FDpI50h$U3_4(xcv{-u>ghO!zGoD1DU#>H)#UrE>z?N zcBSMMzCHk;K+fd0Tdg) zb{&Z_D}I@uC&=rtc89zeEprHQt(YYHN$P>a^F~nFoiZd~_CrD#+K9R~UJqrX&91Dt z=hlJ46Zyy$I}{lafxxsqIvpIBxWpC|4$RrI7Sa)-sDL4y`fSGnm3MW#!UK%=^PCiR zB&h{7Gjgn&MCy~nvwqFlD`>Dfd_sXBI7#tLrqz!pJLM0MHL72a9T4N;VrDe|;iQf++S5Y1E7Ncf2_M~d6B zM?6`wLJPIh9*%D_GM6<}9B$Lr?IZ4I>B$l=Vjgqbhvx&5&VW9E^ZZ60K_jk%=3ZBKJ|m4`Jkylsqd1CfZ4;WeTNluwD^_Z==&g#c=lINJ7mEbC1ghd)8vN z_7Jo;!AB~8fh7uc6BtZUEM#ow^ z@QpYV9}vJFSNN`oN<59_D|Eh1C8i-l)Pu*B5dvySmY`DxGcf#HA>_!?NiOQ)^l0lL zzsu5N_9aaBC)BJ`81)Dtq({)gTzw|rE{Fq_+sw?_po z+>;sMQU!}Tq`u;HB{O4p2yLahoxKga*|Xk@_u5ej_C&kf`v0Z;_Mgi{>!SxF!FmS; zGkd>dJP6=l^L6RIeSQG)m?(R0dKJJ!q}I;#0LXd{QBo8%q(+ha+i7&?bJhT8xJw3_ z{xO1}d(R>=DFDtVsd#9LlsOru1s|0ct>QTi{}Vx|8OGCHM*-y|C=UpG9O@$xl_o;`PV)!6 zfT-qNwGSAsHN$1uNt2-Fx|W9Sz^w+QfYF^40ks1=P%s2}`SA1DKocB6tt2=5i6TlY zBoq8^Q+xykaF1}+f|4Ok$dmkV#<9OgdImp_X25Gh@TN57xzBuNiUhXL`n@4Qw=GAT zed}to>8xFg7o&p@o4o_7LxMnkMRzgmrg-9V+m-lTktDX6QZ+G<<>l+qMx$p!gDa>? zf&yZOacY;b7?nmmw#(`_4rgs1N5b5e-CJ+(U#v_uI`g-Zud=Ja$pJ^f1INMwyP)nm z&;F#1cOH*pRvAJRzI_-SYy4xxJImY^cc4iQe}{j9Z_=~0lDM>JRr9@-5+K$r%kOLf z)n;MT?Y)vKrz5cF?NU8f&yQa%mfivJXw z(;;sLWS=9$_!~1w7ViPH;4J9U^Uk13J$JnERJ4K&nHfQ^9cy3uKW)@)9@Lg<{YB8m zEna8D(tF^@KNDsmamHx#VuW$uuZ_;kn`O0hJ>E(B=)rZ;9&rcP?E|%_hy1IFL`pGd zFKk5jg9|{T17`xC+6Ke3Cy>z7{8)gymHn+J;ou`d6f~Ki$G`6S$sMy+*Ve2@SZlQH zNY*)cY9Id1)t6MO>+_GzrItLbzX!!%L)8Fd{zvaort@m|dVD8$j3|tcW8?Ot*+TOh z1Np^iHpL~p?;5wc=Nyob{>Hc4L!q~}*Q(S&y1*w75owVmvgkP?nH+|bIJCAMa80`U z??5TPyF@@eZN@^eh3Sn?Otw7XNo+?u$MJG;(0jpY15Of<%~y2KY0SmpUu5%c^zfOX zTQuVl8!UVj^AIpm!oG?KhRh5_Jxk==rX@$UY{P60xikUisWkRye-LGmpZ9FoKcrQN zfKrKsU9Qyt@z*t|LQQ(&WCud?0 z?<|S4%jP~2;5x~lwJX93q|c&I3S59`UaO!w%->a4)WTp=k++d?EK)N<=N)kQLOz?y z1gE_?R+>TbZP+^At+B%UFsiCItC`Gc??A{s2;xrW=%`DBX(rRB;c?%Q5a$KK@hY~u zz(aI*naKSbEccwr|2kPg@2+9q#p3%s8c4;@2WN^dv9 z9A#C);mTjvpbz)MMtLIM8P4nrzo#Fm7TVznGsZIIp#pdhcg(PRIIWOuYNlj18e@P{ zMIHCS&$vOorGW+q$0bQ;In&{QGuB;@PKCU6ham^6WyaPY*oukIco)Mi z|G_g9E`$(lZvjjOgMiwnaCs29=#o1-Br>k9L?^gPHL?jlt&93gIe&!k49^lkL?HSiXLk^vIG?g33gon^~lqzpOA(oW%e&tjFIUw zc8yVhM`XoF!MCBA$@L!>q@v2JUl~7?nBi5aZ;%~hFbqM@mS|L6> zP|yX=5~CO{ECbbc{b9FLfHD|(deV^E`uJfl3|$5t$4?>$)b}c86d6g_k3o>|L^1;= zl-c4F9wHEVP3b1a2DBsxtP<3$Aid>qK^}%*fLOK~2Fz(zGoI6ij3y8neD=3Ou|r#8 zfdDE9G@eRX0!a|H7*NE2F4u&5IBp27?x7FuCSi(7@U)s{8^?gBlNe zDil8>7ym&QJr*u za4NoA5|oKsQ*H<)0Gp`$j_$$_@k$+a4}e-7>+%`ZpdIu|GX*9KIQHq(UudD4jVKJgovnJn6@RY0;{{)n+cl#+wv1hc{9eaCm3UTVs5v82cKR8@mxdz2130mz?Q{HFk&4Bw}mSs3_z+d^3WsjE5ZAK zIzk#Bp%kG`3bf^~x#a=*^UEGdGc|2jum z+H_95)n)+smpx{u0(exf1D=T|qBT7o(OLKZvzjVZS9BVPT-z$_kRO|KwKnpJVt0E= ze&09j@Nmlj8@5(oWm1-K8_Yi|0#U zWEX2)wD-jIT~b(L!wWYd%p;ZAn}fO1Xqk%@g-`x;yR`hkZ*7*G2Q$TsaRQGyA8lE0 zE1+eT!tf4r@KM`F4b^4r%E~N6y+o)M1~mhTigS<7WN~i=96fgF$_l@~%a#EUV)D3+ zP=gq64^%kz6Fy`z>TzmR3>5u(2z#3I`^qpC*#@KO#GdmzAT`Nfs6u$C3d5PRZ|&R= zwxAfa&XTshIw^_(^xetNj7@_R2G;9{>jA_JFQK_?AT?t#Gk_}jmNAoHbo5&ENlPqM z?J>!t(WF-|h@`X9lp5ZpHa>9k2b#P8oFZ8IilwQZ5T!K%w1@8p8%hGdB$93ruN^Z0rAZ4qWe;G;eUwENxu-3G^)gx@h505 zzU<{F&{;^f8vPbGX(I4QDNW~C?@Ga={b8d^86?7BXK-tIac6}DeC32aMOP|+Wo-L* zkmC1Eo40kCi;7M~OmD@^sX;#QK}WP73iL7ji7IdyQ$&t3qr%7Ne_!&nEBJ_c&WD;~ z>P?6Btqu18_fBQP$y>nea6NajI^<%;F}Gr>ehmkRm+};+6}-!dxXHv}KnONG?b(fT zq%TPlJ^(To_w@JmOi#2HBzfBzq6MZ4=&9=zWxS2w#bbwQ5P!Hru6EqlY*{o`k^CM? zNNE3Y;qO8&(fOxXid3zCDvQvjISq$<2+QK* zZV0^V%^@wBa70^*ZINg0jVl@{c+#jcDF5nu@@ClhPl(Cq_TGx&6eaNj($6%r%_jSV zh!KjDCmCr9)+T|$l``L8svNT$fm@rwzu&x)zc;4P{Ei&e@N(Sr3b8m#{NQll77Oh^ z!JmryuZ({Dym@z!kBP- zDE|D%O}sajh1QTd?xu3+!?sHYt0lqGo<+Y&|b zW@clAe*Tc-(0ln5XsDv=sr3koa=*ZU48Xe`6R!i0SL>IsB`g3G<0UXgKwcyRX2N|k zVhx+$f#+;VH@%qJds0Y}$~^`plSw$3_%p8}d`cW=6RDp(j+J4Rd!*p^Yi7b&c{-k2 zB_`e?tEDK0vQBQH7XL$0)j z3@=7+XBWoJDZvDPp-T=Mp;C(TM$lUc)b3R7PJw!a)4Ct>$(CK)YBbkU`PCm~zW{G8;dL&JT zbF)^$On)BxhrZP}SpGgNMH&=$PfqYmP1NJE1}#(epKnbeI}G|nc^28e+!B%al`q=_)H6xf|dbaN?ISN5$yO`u*Lnc;72`uHDJ4&qkwekIObFmN8UBLN(J*_T>ok%}XoxZ?kP0@> z%GNo2H)KmgbFiiDJjMEe@jdM)(Xi4GMvu1CszISGVYHrIUtLR?he9X*$fVgHI`NW3`W83{2JhxG(4h@TD*4kWB}B5i zZ2rAjwTj9`>Jr0FLHhS1WbtMx;X2D6t+6vOKZ_`yr4%z@qpN+fNzaUDA<*E0xsU{M zPqGWS1w%RMwfWYwp8vNVo6=DX^rxNun5kD+Rwo-I;q=Xd9--j~Ak0ZHp=E?z7pN2( zr)?!c`~yDfJ_c5QFxArNJz~EiYsO=u1HlJKduZwk6%uZZNq|t59*jb&DhntLu-H_y zt*0X2d_Y9b3eN!gJSH%-n>p54{wavhAc_-{Y=MQ5$JBxxx~EnG7#``B4WY89i|`Mg z5A@r0p0=jY4`ff@)v;UF8d{qh0F%)D*UFk|V z2&zm(A$N4jrOx+yFQ39Kl3z2DC`p#_U36-My+c{f?7RR-dx?Mg10hD;IC{N(i@`!0 za9HPX3SF4Sztr?3QZ9;FFc!)zYm|>~92WkxB#cOia?fsWc$v@F+3{SE3gs3Y^Jxk% ztDv0y!_&XE3t!~H!sho8E_#|q^7Jc&aIm}UYa)=^SNU#fIb$hxy$vSMXNksm*GG9T zN<>O8fy+f`9e90<4A#-@F%DGK~3cpg-3n`&;d#nXbCul%n-R}v#6fJm{ zqP?9Vr~`T@vvxq%^;Qe&e)$to56CIBd!(a!H0yz+^+hOP4nzTUCm8R0hio@@w7Wb0 zToV|9`v@J9q-N%jvk%S1l4M(fzbp|Q&Of(8V;O4Q8CF#cex z(Lm)Sn;XqRFl1f=l>q$MEB}5QP|&@eaJSBVcvuNpYE-abhDbkrZywnJ2lY!kKd-25 zJhMVGtD)liUp!cnsFeW;7yHggl|=o0=xOdUVt!muT99X|zEPL{|11EYl6pollU7MO z?^*F&DlG)!c%P+3(;uPXG2tr@s#)dZj;%eh%}RP>Giws*?=%c2v#iTHcGa&UV_QDR zDRNCQ;wb=ou112dVt@dF0zr)*xTepMqm=;?!-LwQfkWsUzV3auCO)lLvYN-gfZR$z zYba1;L;uq$Y>PE+Xg0f2pIA2`CPD^li55+5Y zMN7Ew@_RTiKHt6qR@0#=#9D8QZ~d2&qyC}d;#rOK3r(kLW+ zD|f`q9zs}zGG1-BBm zmn7bGY#($x&M-q_4_7L-Gwr8fv);AduV0yXf-wr^`0x<#YCXjOKXq+I#PgVYG9}tb zNn4k!-ly@L8T#+UClyA&qyeY)J?{eKAH4uC*|PtHFoq*N`ow2GLim~|8@`tgY zR$>!eVj@ZrFKA-gl6=9soP zP`!5TcAhNdB1d^q%Yk~SDbPLZU4r4aS1;TU#RQy@ACsp zG+%j+mZr|^XbXe~h-zB+*`PVPbAVa?U+yvO#~t^BwCMqbs_qAX`IAdwGm%3BcvvZA zr|Ppc54R20gBuGw5D8#z$emrVkLR?+KLys*vth&a(FmLz?XV~=t@%04$h%G_L7r=|QoHJ29%f3eQgj&~I^n@>7Js#9d3WZls^$c4u)rVcO&$Be%SWT1{+{QrpW< zS_weIlxL2%cwZ%Pl@v-b4YyIlM@)hD>(_oSv@{4vONu?`O7AG6lF!I(xAo$z&JEkS zo1OP_tUhrJ;}-tqa#Lz}!V>f-hr@F9loeF*oH;ITQL*pmIvH#|G=b9lW*v{GeVGEq zd!Xq!*lMtnbeu~Z$!efmC+n=z-feNmwJ45Nn zP3l8r!PEbvIRwdG-BxE~(*SacLQP~4ctjNQ9ILnpZJS+}3|46dVhdTR-f?S82cHsY0r|KX$pZKMsT$o-ADNOLM ze^_`f-_H-72}xHjPiIy~rOt-sIDtQEyn1-$7rpTV*7Y4$TSjTAdY>k$Y_N%1zgUpr zXm)rtQDZ35uAVJ{frl;?J4TGLR_?$lKs{gMqgnQwvYZR^Y=w!oSxg#+G!`{{yI#*_ zb`#Gn&!JU0fY9nRa_2WqZd-|+?-#hrOnOuZactupRsfoW?)VPcUQ_{?`iHV`7A~sQm^BiV@=i5-xE9vi8kI*?UIwtM zBG_)vWF-2cQ3@=eD%bQ~)w`?K-po^ z+_00b7J0W#4Mx|2UMew|&B$Uwqe>A!x*6A+@-^@S#joU76!b5iYOos?s{*PskTY4X zr>-`Pgk0`g7mvrzIRpds>Yyyi_s zNjDsEHb21IND|IZ01VzTu$z3t%T`}J2Gm#!{fNkwukC8!C3kCaVQ-zW>)>(`APs1B zEd$(tU%a|K(1)U%Tg%yTc(~^-4)=P^4@$b8TBJr~5ZDU>B5M^6f?fbjnA@xahdbDz zSQ=5SW2Jly@`JV@3)FIVSk~pTqX%2SF!;WzeL!=TT#300|JSpe8pRd*eVI$|rYdut z#M=%I=fCvFr#!6B0+AW4PHYbUX?%2`lvAReWlYG!?4?Us_$*#SAvQw^(%-LQ@NK+* zvI-_%JuWHpfrk-;Hd&cp4SsqSJI(ZRf!5}D-KPw6UYh^ng@<$BqE)ygvg2v6vHf11 z<}I(63PpKJ{ZZ}0dWoT z!G9m&i0{j~f|tpdPfV>?4lGkZly_`;S<=Y3W|k1 z6ul9p9sUTHRL) z`%P95()i8C=)H|eH|vt;bEz-7Ax8e_pVS>C+?KJtd-Y1F6Wn3?M@6?`oCo_VqHCrd z8P?%byy*8IAycb&_Wf~0V*WW4{m`H+&B0_8##kdGN9X}^K48zxvaiXdG-P!B$7?3~ zYijwq#Ih?{+M&|xxo4utkSwgi;P&9XwM0q4jif)x5E zY`uhhC>S_*{dGF_p--NhAR?l?!?1FtF`zb6JL+ByCyQfK^_IWu7#*RRz8F2F|+9=`^SP`x9}Gh zL)-okty7Sx$T|$ToHG*rK0!ywu0Dw z0_Be?ehVWGS2Hr^indnr&Eu7o0KSW~} z4K@EK8|ePA-NpTj+fPo=G_8wkrxl^Mo+*0TQ_;_w{hbqd+Rwn+DA@7d>8Y*Tr{^>k zrCiJTms>B^Jch0R{SgJXc61V1;dL=dAL;2r(BD0h;qo>J;=EfOdx8rtfBxy9 zgIU5NQ6_LC+kga|_*Xa#&}H_l>v+V=4MJ%FK&hPx zTbx70BQnzB-Q3E8tCyi*qK5|0frd;QUt>n}@ik^V%qmoxV#JtLPXZI&b9~$$fi1cF z0x5crcU@E*L6b?$5ThgRsbfgIneqT|xaJ~3z#+h&vUJN4Yxt_sI}`e)yz@|7iWrp- z$!F^6+wnkBtWugwSj^|C9XA(sCXER7% zW|UHOBm29f;Xn3yAL{&Pax4HJoWcLeBayZQ0~#!K)67#@IQmzX$B!4{#}y)d>i3?r z%q;e79`qXPoEcOEE6)6u`&-*kd`!@Z9N!3R2?hf#lO^`+$4`TH?2@I`m>ql&dn%3Z zep^dH`a&uaC#P6^5LCdG&M`o~X`UxSjj}x8gCO{wUM(t}-F5IK-rHPE@A1(Fh48HL z>mR2*Y;{)57}!{|P~giL+gS4sGTav=TT3C6T00BA4h+;?5;DV9-1Ho8ciB02@X+Jy{+$ z-omvC_q_iwvX?gQL_GR(pkM}krDQX3Qvn?6-0Fy*!aq1)%~+9cGcOq0aFPAXaczi| z0x`|~R~7s|)gXwL53y= zkXVq`gDcmstD=x&%rmPFp*<<*TguHo#>3;Tr=7TH)X!H^un`Lo=~?*Zz_iPWbkZ<_ z4t8lH6=9^}j>iI(T*?QgTNBg}#B`ZU#7r?u1=&d)tYI}prQ4C76RwTew1d8BsK}`P ztY>Y%_xwGM{q7sO(PiR3)$z&B06m94Gr!&-DEL14tUcUq^Fv?P7%$y^8f)BN)Gg&Y z?!M#wG%|V^S4jVKz2f+65p~Y=w zEw2}_`dzz?hNw}LD@q$Q!At(8dWKsAt)$0ZuY#7_R%>4EHlg}#`!z3S0xi}`MR^#` z{gwowL$=8S)}W976fv+>balc!aASbm7+&%bIk}@6ODr-Wxr#!Gl(#|!^@~Eo7i%dO ziEDfZ8dv?nqF2Tb6q~`Di)R}Xcdu2xnW8nbEF@ApUz>N98xJE-QEuiNEvSxQZEnSV3- z!1#hbtG{!*Y-xC>Z#9Y%6Xnk2_8+!sQ<+twz+&kG&f->)6pB7wZwdVLeQ4RO71?!L zWGob4p`s#QI(n!;2%?Ep1P|7(uW;M5NlKN({SDLrTq3`JV1qI_O8{7Im}Bs}p@tWc z5ea6!j*{z_y`QZ!t6Aje>n-oi6d(IS$2du3ZL>D%J`4+Zk(=uQw;YCWM!eLsTZ9ox z18l~CF3BA}i?#rj(Ovq7ja33?{>>p~Ov44~-dB~aApzFy*ZZ|0Yj-6fXYyCqe1~JY z_&BGWGY?q~y~MkTR^Y3(3$xo%4EX~RfR>LpH_i-x!wkMOGdA(AS+`!nbC*7EjYDi~ zaB#b8fCy)c~?l%z3~ zJ9hE$JZ3uh-#0}{2>SHea9gK3PU*M#u)tCl8ovxXUlGtM zL9aUrI|*7o3Du(l2vT3mJRhlsEG?gS%T5&x_ix=g-`pLy!%&1687Xu%E*TLaPiHa@ znh@|rIvauTklrA8?+$72P^Qy@2jVy07-~WFt~kiK^sfsDu8H`qeZuCYA2)0~Vp7b= zNT2F04i#3SBxRLr_bxc^{=C1Vl(~8)VaD_Omv39n!2GC=)h8{>!)@Pp!RL)LG~d)Z z^4<5Oc8I7@l%W<%kBELx7~WX^4TY%z2{pX$&l2?){nXG-GY(Va)w4RBV_AfMiBGD9 zNRwA6K#AOA81#PNJcLeN7bkt0o;9-CnFpS;xvlO(vRdC z*qT*J8q%8bv<5T&p#tD^8>%#1#J?SGf5co4vJW^hyj%Ta&5ZZ)HAK#yako8~15{H~ z9De*H7Rx|3LQLb=;<+jl7c*Sue!p~wEn;OsBdXC!LDfuqIAP2awb2VQ5TADsgp7ZHf3CFYF|8yg)ieMjws^dV=AIw9gFqQ+&Wu$KuVPEYk zJ{;S@!1I#5kdwrP=neFC=P&j<_IF~&v7S0&2zR5*`$O@uxMPGlIsS(kJ3c-xoomPL zc;Ea@>^D|G8e=9~$krSe#6acn;aJp9Mm+&Q0_S>5QW=Z#B`@bYyLLLbg^bCue0gQ`mAc>m3we=>8dhsjF=5SG4ZHjtf!1&5HNQN(Z;~-R{Y8 zBR%L+&uZ!!+LIdKgnSPz-;WCNE7-^PbUYQoBol%X|NhNa)K@W;xgVR16}C$i8MOtj z^JMRXQ?~}v#feNF5lrW6s~z^|Dx?T&?j5l~{Tj^)xD{sfFE2BJ)&2CmFPHKl4>yWL zl{KojKeY1b8$Bg;WAxhw>qQeeZDweNin{nFT+1pa#B|w!xP{>ktMH!BxJ{MEMpZ3o z)-eU_3)&iihrE$I@996k7N%AatozJ&xB^237N}1v*wd1&M^ID!RerzMtiR@YvP10^ zs^eR%Li3&SgJ{Y^8sc;AD}8IaRp;los%@AiXB^!z0w6S~wWe3+Qe({CHnA+W{}E zPQR5c3Sx0~9iEY|0?aTmTwuylfOutewknpL2;FO*7CE|4Y?jniYWlM9sOGx}En8Bq zGr>xb^U8*OnIpD7v`C9f2~6|0XcqMDCy8Eo*s1jN-ZLC=nzGW!2i8kDPI9mcJ>4+m zxv<3zd(){iF4oSrp4@~6U95OtY+0xGFyA>h-`rOBPc4Q{qOq>XyQwvgj;q$ze*>u1 znMJC->I=M)A-A(2tLCd=-;X(}5ADvSnuqTKin=*oxNx4Et1bV)C(l{&ck%>@zhA@B zYeuT|c+vR7aJ{GKA~ z33M@9sUQ0;qTu)Wry6+gnONw$1{m?ff3R@#roakZLw6Apc>5EINNJ4JLg9sf-7X6C zFxysJf*CPSi<0;%i(~&%_I1fFtNbGJeQBB8!X45vsXQ!CmiXW{#fAm?ld7hDc0^x6 z6$YQ_xLx`!5)QJtr$dXJ0GY-dpP6sSDuXZx_NUW?-NylH!{FSd@F3T3#2+^OO_f)&OSq~ zXdkDr1I6iz&Q!0A6K}K-txWY#bPq>`Z8Mvj%7)VF2Vz{AjJA}x39)PuAHyXkg z@ryNh9%p%(^`Zn|;^acPo4vu1w)Yp_hQFmKZ*|v-FbdCc&)Sf^a|^2}5|jc~Kk)7f z_XoTfYUYmF)*ZM}ylgwZ7Q)os?#@cBZ7fiLoRq?<Yj zxYQIK$-d2VRuo#Y-c@2h<>$F?BxZcKocd?;fqPgPf8-E=O*_)X;$j+^bJu6P1oAk* zf2b3nV`UdX_kWD?ApXskbD;2fB4PE4W}xdE7Yy-oMSY zIOR)y&cMN*et+iFB2e?`dk_WTt@?dn2!G^u@tINR&ic_CHS~(M59rxpwQ^xM(E7bq z$hs|%yPBB~bm?`qebczMfjd4&|0~4J;#HpaO%Ku{?C4f6409L!6?kU+_|txrPjj`D=gOZ~`P*um0k_vdiKl|W>AfYt0J-|shb0E`MR|7m7xQnlWT)$D zEmrNS-vR8e-T5!rv}NwjyP};B92|_m=|?K2_huOwsN(#dUwS?as^v>GOBU114Q+H8 z*D8FsI6)U(>IF$)8}k_6kC|zIQRjPo{>ZESm&l}@<5?V)|JI%-Q2_HI;12?grL z`vRkVwH?Z%c=0dgcwO<<^a_i~K$|3F8VX=AREhSYdH^gue{5wPXDciSt8!Ff5*je-qstf zG<2M;jhhaCsr-;d>%}f(RPrF~9>zSjct=()i8u7W z6szQDtHs+kUQ@R#_?G{Ayu z6&t$!Qv=5R1(?IcfsWfU?1$E?J4K$3>tm0!f|gd^>Vf9g)@C6g52pT#S=`uU-NI^e zSch!wsf?5>qF!9w6_u~N6jmRWxf6lTcbELwK$0x-&-PrBtXJa$6_Nj&2vL!O2HNDk zKm6RN^`9E47uvB;mmY0pD||2bn1qjxGqF<7%11k_xE_9wbHus$rW54pSRV!R1uJjP zly@`6%ER7$hkp@RHFF=8J@ZnLWdSa_vL{RVX=xQ73k19^Vxg?vrR(KNT+Dmc?mjh& zAFz7WIxJFpV-hP0>wC7TS{(F?QVi2BP`1UY@X^R=D!o*R1IUTH+6=ro>R*#Q&zsbJ zBZr4z(fW|!sR3-x&b|MWNfjwxOc=?PmA^+A3uiifcv!upBQknAR(CkooSGBFjL;^! z)!WyQrwS1CPkbJUd!0PNLtZxv6j+3n-rcq^-v(Y$FJ4iv^n| zY#eW&hX(rId}(dJ(Np-89yx~l^<7A4Ae0R_qgygNc=_@P8}r8b4SVSFqEs3=^sd*yXL->wwK3w#G~NJ|l{T zp}qtW>>0^erD&<%ZhDFD(B~C?l+N((8t5{&{(lx=rj6mPOUI7Clrq4S1tgVX^!5ce zj*pLuOaTQM`MQGY+pw0BzIg!LOAuXbm5zMHJ6_k5<%{9GzProT#tup8Eu z?boAKMa@Ilp`$oW5yG$&^y=enW#eDcQuuCb4eeu>{KHQ zW1}`7kKBKQ(khSFS|JSyhmeEFQ^X0+k7~6iPAjl&I0+3N1JlI&qZs@`ShxeIeEmAw z+xQNb{S@LW-V@<8O@}dhKCXvunPA?1vq} z##Y9BKqPXA?eO!Q)Rqf#`tkj7AQJUnjJqPV^jG<*zCE6O+wbZ}#~a4jD62P?4MOE#SO9(*??Ufj6-kTT zN3ep}wZQe%g@b=feb3od+x5A)m$#c$en^0UQC{8se-1dfyvkQ_=R%^5)>wf9=(kVt zt4q8L&j<8oRkf=eh8swuL;q7zBp(U(ItVTE`c(bK&oOe?r8hc+ASa;CMf|_}?X88* zE0zNn!<%w4hE&A*`Ms1M$kypyyI9>q?VZN5RA_5~7{Gcw`dW zf@}15Lz?a%my&zmB-001Ur3fAANYK8MOJk4(|O?`6MCyrf3^09@DW#+Iq6l9r8x^= zf*1}dGzAM@tgWqt2F{~9nD9gNtrRgKfxeilW7S2h*VV@2)#5c9870$-R7E~^XQh~U%cqpW%+?G?KW{rq6eL%Uf{AzCCZn4d2K)9LcxZ@C`A%8n(IGg|X|D6aDds8)HQ?o5atr#`iQqv^7gwEo#)>D@DyJEwx9i+G5X0{`vl|>-XS(mIwEh`@GLN?{i+`H8iL- z{=d=#k=g8*5DYEhu+~EtD7=oEnfH3%8hdv61}z7cd5;YUn5<`-Z+L1kCCtpo3!zrN zTXH$AcWgOF!8|-Sl|Mcb<96#7A`pU5rFivMeZon%AmvY&-BRO}4EHqW{@Q%obv=p% zq@SSGHLBk~TAGh3g&UtU-=x4ieqLbweg2Ln8KNFsYtyU`?gnGY{@rLy&(BAZXqm7* zuihg_cL_P#b3y)?A6;^8Zr^4BN(5Rxu}HBBL+QcNLFu>V8TJy$qpJ=n;kQF9fiVks#=QkL;a+J}^QG0241vhKRVqjep!%u3&^p^h?%La1EzLi)Z=wI&Y*rmwDSWRmov~@wY;|FG{^`}~J93-z zX{v=!V{O%(j0Qx1e;WS;Ko*=)p2Q83d^BR-mY-m?L-&kJ>&5W0)2yTyuIY^|s54j+ zWHiJp$0-Pu@#xvX0A#Ai_gUG6b$$U?!>?yq$}7?P!j3n2G1KwG2_-UO{Kq%ikdggwDzxUaI-pwtDUcMoi1zTQm`j5n}sGV`zq& zIr!LG42mowbz)~9n?QEsqrl$|WW-}T+nA=-yF}yjE*#Q~FcnG|KENUOEYDZ)2CM0Z z(r+`0n5}_CNG%6yloAezz)4ubR`H^ohA+9BxCCP|;#BozB9{7MIe@~7>7&UPm3pEiwlOAy0c8f@!g{y6^r+8X15qq)aa&^*^g=02h@Ss& z{ZQiAw+LsiIiQqqFIRTHuip#^!WfHuuavZ2+;_I@G)a0I zIuxq++(*GeZdPxfXpa5{eAPKwxv!=**dP!@b@JI`)CZ{rluUh?1@PQtK5bNual{8- zA=v09mB&6;(M_dRkxGL|F=@|-s8NyC)pPUdVcr81UE9%9UwU%}&(r`=u037eFHIVZL*ldCyT zp9eJ{0mT_Xl#&M%E(@cgoQ8A_SL8?EFJ29d5gv3C#Qc-=8wxPp7_=1levu^J?(LkWWkvO{BS!KI+CUk~P zyV?{2j>pD(J^;J>a-`lJ6+KDlHLtUU zsQr!^s$KZ>k9FJdL5}bqI}=;g8zOx|9&RH{)RLUDbkhaWZ5xWx`40hh!}7} z9K~&h6k~{mv!?w1CR=XQ2qE$N4-nBBqkIg(Hu)bM<Io_a;!9gLLRGY)raYahYOLOentPd7HtYbh#8^W*|qq>@jfT zUucSp!|YB#S?>z>F0Y&%-Du1Ow`tREJ%3f+!I@v3FJFYk7j{!aC!-(z4XbTtHsB zw_kR@O(bKaepRh)LN58YGD)Wue)M7c zStZ+daE?y)>3<88-zWa-!X}N=<cJr!wCRAf7gc>+|y~t?<*AKMrnLI*$g$ zB6G5@ff#6m|Mveqz0!RRLv^*)Vnsso8PBHSA2drBR9@x;#OU)=Rs8qC`4ua7R2D|2 z+^Stgdq>*~A|Epz!xFNC^Btw;+Xs&?kqPU(w^(-(e{v6CfH$uuSUnMMjiXW)Qun#B zCp0q5Pb{5kZ31NBskMf3m!;$=Oh*&IGxgF=57kJSePSqxjw9F}UYYM9;b5crk>L%= zwG-)~P~xYky4epz^`^)(!vWqXTsuB=;p)RdnY=)4tAJE{%7ybh54EW)W^w! zF@mUN0jm?loyRjtGS+W@N7I2_Pwce(lA%hvj8Qg3uo6 z{)<}(6N7sVcU~b<6}hL`>+NUnj6Wg15&-=oH_E^5w5-=&r>5ADzFRD;TJqRAkP2R} zm@YbQ$I;Nwb5gwsg(V6~njUB^w!fV-Xp#*F5@!=%?C5WOaLpzB?jx@RA;cND<$-JN zgt2TMZ&?|(A@S5{72hl737v#1kS^nuTv8%N9V5i#g-8^1`O+l%yrBm3t0hQL-~XU3htvT{{H zu|UNz!-r(LzESm@BjM*Ii_|c<{oOf?TQ>^B@(?PUf%r=h2cefqB~w+pXp^EiThV@X z28tP)7~JWS9~dG%9q03!=S2(W$Uz)w*h=_&5XQ%jpaxnCyitKv=qKQ)C`0V9%csKY z?QQ1>2==J`2cPLqdy^!`NX}yoLupg$dv%<;PVg#V#P0 zCmbX2L;Ljxw6HI4n+2sj=nKtqwxN2_Q=O0J6z1pu{-TNHHrLKmENWqrXKRIZUZy|A z13+5>c-n+EtWkg6T{x6ice0eu&i2hSj<~vg^}M6boQdHMF)J-*Ej= zD0b|81A<6GR6jk1rL4$}Iwl|2<+co4TgAnb?S2bR5vE#r?|M`7r}0 z?dNxF)yLW#!0&?y2klx7Gxt9D}1UI&4n{!P!v)z);69b|DRz^P)l2 zfK6|qn=TDkcaM$(g9yig&7?7jF(C&>C_QLV3$vG6vlmOr=rG@X3yhfnxGca(3Z)Np z<4N=g`67MNF(Q7PEhb?t3=Xe?M)EBHt69toVEv0axEo?2*?`EYF8NV2taNroyRs!t zarQR5vE6l-+^VB>KG-9f+5w_G#-?PQ&!`?EqD387C~&f=9bXl3D(RxAFW2b|Nf|7n

ixkfWh*^)sU9muCFl0|R?H*|IqoidnB2fkaR-1f6&8 zN2+}jeMC;my>GL3hH?$*ay_%`y&&jWWC6aC2?zkmg7->ScZ1FC=H?pmGa8P0fJU12 zszPufx;>%*|NkL#ed&W>(fTZW5R2O^QFJw&o$I=-%9BUIbhCfTi z>z0GG+2%Gt+(@faPfG`Cgka`P|CBS7Jv$h_@@M(#Ed*H~I@fvR)vRWmPmXnvOunj39Xk5=B3d8ND&Tp-K64LXX)25)hH&#xUKTv#XJ8@d&=fyvn+9sKm= zeF%v&hOb})z(zxqBeP(Z0OUj*QT6T2`_hg)Vi5$fgX^WIDSIoncXISD@Gh}v^$iT8 zMrztG{RTGMa<@Fir9d#))3y3_ZI$0D&Oj1%R9u#4Yda1MHCqU^Gwy%!-e4g|8dDPXv7avUk z2gcN^PDN2@?G|xVFy}2pF%`sg$UZoMtfx2(MZa>(qTpxut4n(Dxv`?RPpwZr870YY zCu%(=5v5oSN%QiexSxcHr)3k05?=28FNK+5T8V6GLipCtnYX&zeTWFxd%(Ew-vC(c zYahuAGfA&`oL2e2j#)TRz*HxegPMAGdn1_@#+<3jE23cGn($852hR93a_>3qz;Wp# z#U{Hy%;qBFO!-@j8C8YcFCp&_B9uw$Y|b~nx=Og^MH;hQx&K?%`Q&t+ zw!-&bAbQk5J~R$sufz$wZrl3A&QS2#?UsgUnAHb{+K`fKIE!R+2Imgs;i^|m)Is>* z8+pAG0l~Pua#!X@ZJyB!31|8_j!p#_c&1qY>HHiAhs0bG7)p{yY?t<~wP8mB0|R>{v1DB8?U8 zVPxf3;s>tND(vMgw;sgcNA&tyxH&T^Yf03 zNXlZYF<-y@=6^ZJz@C}f4^cGAFm#-Z=NuH(M7B+#W{_AAgFFtU#=c=CRk}%XgcJze z#Tk|m4z`LXJ|Lg=B#?q#N?LmDV98jTtRx#rP8z5ywn?R|*-K}TU_e^HB-ls~Gq>?L zJluL9xy9IS-i*ngZ>K|uqvRU}7}>8hA_45Is0)EUZ=^4D{F5j~5BD-(2f>*}9)kLY zo~QAGNcx{j^eY;CRf$j#n$_p?@UE@nR^uxUgRwat&Z=Elv9td>PzFbCg2?5vEU&r;nY%#d1e!AcHCrsu_gC2|f z_mL8*_Tc^TPYJ+sSjXqh+kz~H#o=5`(k^UxaW+g*{+@)|#%)i{0Y?jtI{PaN3yjN> zlmm!q!>C@uhwWw%_mlk^Yuw6DrP#Z!-G>l_6Gf&h){JGIy&nBspy+f*qv|C22s%SjWSb7H$a+io zPzsjO_h)1Y@Ra>*1yy!G7x4YP;sf}V{;{EZ2Piafrok+pjy&K% z$&dkh#3oVuK@!x|^j6?BedpR{oN~^OAH~u=eislAv~|@4pKVZBlhVeUJ}!|$gtkOK zre>*F{JtKRg;&R&d1AlU%PnWAV~V809ES6Ht=S621;%_3x~99wc&(X|c6nkh+vr{= zwO|z8cY7iaL%vzXrPM}}frWs7FYRwh(mJZg)`-XWuW#GGH)AI>$dnR{f7@D_T`UKG zncly+)G_YX#2z0s!zN zG4caIcCYRK_pMSSIc_b$&P?Oaed6)-Jk_KY@QZ#ybb&arc3@95?=XoudwNQX-x}W8 zknb;|}PawfMNL7NhG!F{(Su)DP&gZ_Vfzrn$RGTUy%-3Be}?4;W5HN5CGJMgd07ToK{$OZ2hj#ry6XeP7ulcuY8TLao*A zP71W{Gt|3Idng)b4J@1fHdyISrHSJ5f4AAgaZ58pIvb#Q4T{PA*z{@4kwGWZ{pJU# z2#4tt>pnv;fm(hjCRsUivyn-`SO%fqpg5UAXj?z$z|RUaU0y2+wthv zX@^<+X$9{<=!_VOel_@7=f$yuhYb)Hvk{7IYGY*fbgyaw{m+) zvV$xN(5Dmn3`&pZntUpM-@(5zzX+G1MsbnL-b0jwBpdgB>QE>$)bQD~1zsyRF6_VL zNGpCes4URHm1>;9!+FA-&ZZ-UiHMDLR@CagaMBRxNbt?qmwFjb!uOkxVGV{Y z-U-6QTYuHnmJjx>`8*mui)>OJ$}Sdkpupd+j`!Z7J}%AgRxPL6c=!@wT=KWY@QQGh zf?IGstdj`saU|J8MLTfzXL-NyI9Fj%%2Zb71kcy@{agOir5~LG0cRA@%ql5G*k?i6 zw!I}`5LSxw4IzkYb)me#%L0s*;aG7;x!T+YL~PjW)>hrhnnipoN>N4**U-k3Fj z^yV$QDUJ4_xtk{1nS(Syk-a{>68tkM2`4< zaxAP$)h$5kw2180z}RpOD_Ye=5?BzWe+|rLj4oHaGln6;ZSC;H?KZEBC;b83A*B%r zrq4t&avESWVc?Uu!F%s?Sr2Q23n?&OG7aj00`07xUS9DRwv@Ks;t$#8e)DE+e)Aea z@QFWdY4dK=n#;fbmu=2EE-NkUhlGy0C87by;?#y{S+QX9Mz~&AsMF) zg%Vy8Tl5_iX@;w)G2aT_ra%x2>ezq?1D^YP6U%Sp=6Nf)Kaa-@gi|X-C~g5~_#S*P zxrmvD3=uVux)pk9Y<@34N7RhP>(Rf&HQ{X~np;W1@QK43sQZ6Am6A;*$9Z{GccnZp zKaX{|%PCWR5)${PX5wS8aZ2Gr<&}2aaUSG-!EQW0^Ia-*>&wT?>MKSpFYc`U$t97z;5>0Cc83 z3?5NkAR}trw%XJp7y7Xp^}~8;T6SIuG~)Di9z5_74(K~>f*s#njVsmzccJmi-DiZ+>#Kj)SewUcYSQ=x-gtp`IftIpxA*@{zPSgIa32 z#U`tP+n~zcKBsnlCJD&L)`HityLS#-a`}#^Mt{+`i3nA;RyIanKWLksDK{FM@rwuW zYJS?j45+MVl>zOC|5yh-oRrevEWj|2sr_wS74OtI9X<_z<+hifcE9Cw#lrL0{eT}X z@9b>0?~I7Oo2&F*116&?d$_x>la72vr3uW*jL50toWsc-!s7NA!jjQKBdq;o*OT+5M6w11CMd%D$7+KevV7)&LQO{HFD zj`DNtOivn*Dyo0&U8LET-*D+2Bu{P(EReX)y6e0)Rlj9ob3Sk?^x4(E=u~7`kF<_p zUB4EY`S=m}1oNI6Rrtijl!T&9Adg2q?&hb2VCsfOiRD?|sH^UDxyTD8-gX|4f7u(@ znXc0IL6*pqrU?pZouH_coBixp#tY7oZ`_m|H}}r2wF>X`W)wBHBkQkcMLDFWhgga` z=KK8oBr3zCI^Oi3*o)6tx!j`j`QD3&tDrHiiDxul=bY*lABUk3IM#^hdEy$(%>k=+g{+-CT>x<{d8)9UX~B z1>zx1_J&Fhrhct+&&m2(S3Z0QDWz@r9DUfdGGpT$Pn9>1`(U?!R3tlyA{CGXF$%Yec~@*=stU;>=J}ui%!NVut|4#F@AofI-Izo$fDkK5EW7 zz1dZ)osPrG28B8&x>1Z2G|%ya0uocM^^yYH>74)aI=0G_TO9izIbwM+?`FQ`etktu z_7}mVAl?wIy+JL-gq;EG zC1g{rFP$h+BA2#eKrvb6=Jl}1E1USl^IpS>Xd`;HYNG*8HB<05j^_oY1}y%Ni$;h< z)@9`HtJC_SGjz@dHVRknBBOd8y?YjNZ!RP=sEFZyn(jZLU~oir+$(^^I`O1(>_iNZ zd8X(s@xlp1xc2N#6xhu6vr1agXy)pEHyKD#a_;3ToOm@;ASj}$uBI!P{;}|N@i|~G z`#d#dal*Bxj^whWG`*T<<00%Z_&4=LtFQ7&VjJ!8>Sl#HP|KXqAVPNhp6%hEgY(O` zEn9L=VU(H;|LjuJ_m?TZBfZy;L!b5z*?Oa!pDgoGJom~N{)G$q!=m}f`s>Qa^z>%L zUVE?>HFgs}bV5gk(GI~cshmht)8(RP=TI-9g<>)A6-eG5_O`1 zT=GgL(DGX?0u3wTQt7X%#a~8H*#XZ)OX!d8g=djhbOa$v_%IaP8iaLaO_)A$0dOnf!_>Cn7ZD1j|z14`;=#@bkWwf!V-6ptDw8ItrkeI`$#5Vb^A%_&7Ql2p^VT29Zvagni@(qR zNb{&OY)Kg=X8l=sbup||$O5dZa865$j z!{Zu(Z7oCKdB^d^d=MEJTh8y$bN(pFsPyV1q;i}Rg->~!`pH+rl>F!nX#&XN8G8jT zTar$pObjKp<1TyY#1ODd_-dh<-m$$XaPPt}5X0bPUhVzx4yr;QLr`>TF$EsxUG=~I zb7vpT&j+%wrd;}y&f%kX*;#n)W{^Jhi_aSNgomb4Wf^|&OqDpJo+w?VQI{;9Q;xF! zaE;>LQ;WsdRsGi6|{A%l|#YfDL!8oG+xO^{2FY%c(_!BC-Y9It-f^qi0_wDv) zpi{=L)(zr9QXiu7eTIV=N~?&CJVhXFA%ry9%j!naY%=)E=F>YEx(f=z$>rsYPyb;d z7zIR~o)E4EKo)!`WH_mO47wAsC;O`{4Kv9ipbN8()GPapQ=)1E$SMvUe*OqNNe6pz zpV&vgH%jEVVd2@yFZzv(#D}q`JqvK+dvVqMyZ1|2ns>4+GgH6YuCe0Y{38Hv9?De7 zjM~d|zPL~-ariS3ROLio7hCMgD|sShPzXM zTg}_(#b>Iw8YiwIOgp8SZGD?k){k17X}>a~l3lP4x!Y^U{7==reMXQyv=P%gjbv=I`$I7`rc>Ln>?wcU# zpW{i~nyum*Gx%yRri&#WLheCXIUOc%$~JbxC1`rVc@aWhoHX?{)BU&|{y_cnAAz5$ z!PlCfrZpJPyddD%Jl4tB-bw{!PJKswNOhslLW^i7K6<&SrGFoZy9yO7phPivjD)Pyv; z(-5}jm(L47^!jl)z2`+ZbH- z6QTsjz00@*@a2DdQd~4l$!bSIUpTQ~;>_sXlT-2Z%M9ba@{E}s>xzaMH|ebp&rgZI zZ(>=&XHuGgI$-Cg))WNxNJX?4QKZRkXX>&2d>*x=C+)zS~T`{aQC(**sH*33xg? zG_Z9E24>n%=<&A29FLXTN_q^=G@r&g^QN`1NPq2_jTL?sbBH}4Y%y;9=Y$U)Zipm= zw?`|{^yvY^#iz=E7>o;oVFAYIM#36dhhXq1t6N0-dEeoV*15cx@JKf-G5B(P@NlQ* z@8^3A@9`a`C?miOiU|Z_j@Al+)^;iIUYj5ggBjKrjt65G0cu((Xpby*Nx?yJXIK-6 zA-U?UB^-o79Y9i^!rj9(AYDE<86gSij;g#YlV`=%#seB4Orz@QCj(U-jyMmLbU;)N z_00`tQCpr$cEe{5!oXu6_aHAQ+1M9pua9Mm=_+{>)zNiI`vGEq6<4kto&|j=y}iAB zb4u?flSjjrmGsD4%`me!r|Hu(swt5!lJ4o64j0 zJHcgvKWyEvFJ<*|d8y1WEFMt4%m2Ce82Egl%EKoW&7zQH`lnm`UtITB)P9Df@A}X2 zUylryul*_cdF@!)+mlieen{?cdgjhSyS?E@S?NusX(huZ`H_W7`v&frAa@+(RhW=y z_)HX$L(OF?@-jLBUi2QCsxsp(BqTh)cP*y znXu>%U7((DzKF0?XS|0R2*Bjw8+-g&sjP^Mwb8r(NTS|}m5JMsN*PTq3KyIT6rKJp zY+;}7aN(xm*0O~yheX_d!7W#n>#_z#J+Lm_EQ{xFl5p;XkkBn8=24aBz?Q^ zc1ieY?4N9N8)%>m^$+Mqe%krH@yl^%)GSQAusnHA`~BkV%L`D>pIY zUkHJ8UGEsm=j+~xjt+tVS?A_WFlFVn`LOO4gf{WyThV@V9)nBHhX;0%L@qz#13kJ; z#Y!WF$b4%1e*@Y8&8&vT->`~qKNY&NNG zV4x#ep3-PQKbmj3fFacY4~aY`yz+Ku!q!v6Hvz5I*=qa)kcA*tn)XO7l62^{j?Yha zmz&)gQS9cRybhl#!YOGGvy&`sX9GGruMr1$`5~$A2vPE}j3_wR;6jHR06NkEX^r@F zg7Vj4m?4{5wID*}jPjTo!I{g2PGr&$ir`3Hh3fuQyn*0Z4o&rP13?%q_<(%oySwdD z3>tm2Y5b?mN71hs8#j20@E z9ccwM-VS;f%t;S19n+zBFM5H(X_3H&)54NI=aySU;@wiOUe0H7xn>qSp3~okW_xlK zz>^+ZO$)`XUpIJiURKZ=kq1^UqoKFv&&NWpM15*+o?V?1FW&$*v(IKuOa^Wh00i`u9;FDn{zt{Ih^6Pl3X=4|p#7j*9F)7G|MOziLLq&bxPl0*O+( zK!xPQ%H?s%3rF)?LVXVeB+04is6p;RW_W%=6kT=HTi(Vs{&D@c!OJ%*a8tX*Cl6B5<;|6{<>bw zZuuYqIYqG!L8Jp&lwfe`mrvA*4P^uinxBfreBhsaow51aq6Tq}LmHedzng)p_1k&s zm$8xEoHtH&xR4F>?I(=`RjfYs;-4}(*q1yZ1CvInuan6r`_EIQVo&K*PFCjN;2qkXl|0@rrmSCXfR8PzJZ| zh&q|22=7QCiwJ24wn1~&65-7y+IiQnzd-&gsE=3sNKdjJ%9bH3l^SlzF!|`iO3}Pi z!5$~`%$4NdGH)WHM74L!D$G74)nNIQ`ZM=C8UOu^_;#i)bB1sG(O`vo@5ia3Pg{Ea z69ME1rD^_d*wMo}?zhN8bXFz;P^SgC(4rc8W+TaBtYpKIS|sGTO2Q zXb){^!xxI+?rE#RFveizbNV`9cFTTKljOelHc>C8)aNJ2pP`Hj#Z19K`J7UQLbQm> z8uExkZ1pm%gF@pYFv;?S5iSgBpG5=itIe}rucN3q9Flfsl$n%@Fn+lEg(_+CL7NFn z=z&M?mYS#?C<692vuNB~N)*k&7#T`#RE%0`INwHw<(|GU0zh2-=k`=oz>%^T`l7tb zUIabe1L>&pOYNjr3J%a;!5jqtlx;{0pbF<4X=FfCRwW!Q;? zJ6>(zxS;@N$d6CTol2LD0jkjH|`HizSYL(5+deGG`OLou%cD~GRS@O~t}A;~gI zfpa1e#g($v?eyk3Ii^Ea#x998LZHS^;F5t^@9Su06g{u12nb4!%?27z{S<*m6k%V1 zWZ;aBYz3?UayTe>4%o2jf*tbOnE^Ep=zjpeY+S+Hog(wcHZFa1G;vmMpQR!`Sx{os z=W!|es60vrEpa`qVqM+!`x?)8o>y!CsRV4E@bo$r>)HIr^Hjn#a0HV!Ixv9}6u1~I z!P}%AUWD#g2^Kz|;Q35q@9U5|)NAVDot4L|6!>M0$-b&-0)JJ%)VZG-8lJh(55Kn? z+K>8M*Y(7QbL>wPC3)AUXdmT}yHC%ZFve%Emn4E7x@xRAdhW3P%ffDwVg1U_g2Dcj zE;7QSqRNvB?I)u~c4SYG`>f;7J*p(pABx?L4Wc7N$TVlHguW53#ObOMC^j?4!sK_A zAD~S)KP*5P2hIPmd{C5DPk${OF4a>{TXi)mxC2+NN`w>1QmH`*I#l$%;~Z@uFo*Zj z<+N%=>m8%@tlH8J;4MnAYnjhNDRk{<M$Q1@rI;#M>3e< z3reyBqf0tL8d*^E7cf>I!^z78Y=iyxdZ&PJuF6=|w*c4EG6+V+&+vU3N6fzjEe{u3 zkUNtD5~p#**L6WhV`%{H@e094i;F6VrI*Bo*vC~q3ys%JHFavghuj8zv)c$~J+?Gj zSi?DSv+~=#e97b~c+B)u>hK+>!U3MGj5f}AzutK$bS8!>Iu0@!xP8){jm(GSLV=zG zT?VQ}W-pc3j$6NS$KBqkKeIMX683p(9Q6!P2W(dLMqH)VN_gnvd1Lg>aZ>1>{WUAHG^}j;j1onJ&u!w4M{9$JtRL z8$O>GjjLnZP5zZ!4l{=AMS4uxZ$mJ2q%MdgeFrz(Qbkfm^>nF7?aqG_>YjRp5vO;E z-7_cEpb8l2en?r(fH@#&g3ivGcEFVkn3R~POT`e71ryfqCWHY^YUX~#+8xMZ>c~f! zX*6M^Ur2rlF;)4gg_#6rCdGuPgo0(188G&`2o|-t1Yd|~tJU?VlP&r20dr65fbF1u z5U?nI!-ylrvM1vhw5Jpgo$$&0QO?ue6AF8K&r)AR@ZZ#DAik=ig*l@AVSdeM%~rN_ zQapRlfLiv8S6_s4Zvfjj$N%`&UQT~AXP`Ot`!kM%e?(>#j7~iKJ zP6yH<@G83bC>SlKuPRuK(v-dNAJnvK9Rc8!Ub;m1U)BjS{`M6z5d~PHW$D@Ue7<~^ zJjJ(YROT&<%zH$(mY|ub28f&#z?46@vicz3%yPc&DTW0#LkqzW93T#205hmq5!f12 zGY9-dM+QK^Spp{`$$t{HI;3<{y+ef(9s<^i0bqbKas}Kih0=Xn%b=bLh!|GU%B)=O zdN{Kqdai`7jr4>I0DUchLeC{z;LRWAx8u!PY5D3L3t-5tAXMOJ%P8Z5p2~ zBk#vP!9U{4k3)liUoI55qu;eUHQXkOYm^`NH*jr4`xNN=bWhux_e1hq4=1h`A6IfG_dBn-y#(fcdH&5l zjf40cU(G?a5r?Fzmpjn~c;1`$kx`$-$T=LRJX@mC*eXJ=wDt};B z$8QS>d=%7zVw}epzdNTy!a(jIcw8Fm1q_T`1H!n03YtWhAICwAsMRYa(3u1j(}{lr zkU~DxhaE$20$SHY*|jD>;4vvqs^WSh%G+O5Taz_3PvpavsgDG9^Q}PvOZH=ghSb;$ zdhLUPQsBl_0mF!=A;}pyNIghyeCv-7Y88$qZ6)S&yg@XbKPFpOm;@6kjpQ$T52u`^ z010q$G8m4qS8quNvMp07j}LBlv$*zrV|<3${ZM%LIc+Dzr-3&;+uSGctA{a$H`grgclnqFipoH=eJ8WUvJ{~B83S)` z>l*Wu(>}kTsn%yU?)go$458%%j>_K&(2)w4=Tk(PcG(wfNMfxzj>sNhz4V(NO2Q3}^8L;(p%_g?2_?{pmd z)L9`Af9eNuV70f@Lwy5URf+XG;IqxEP(OS8_UcI&FX8NDHN&u;wsIGoc$e@Df|cQw zKJh*U;?B;!_weP!uc=vP)B9MpBaqCy#1LF^f5CO9C|2A zXz|wvUkTnVz-G2Xee^LQYJQ?=PF(LGDozoCIIxBRts~VBbChkM8bK_ZR!ROa@9Z+h zyQ)se6~(Ozi;1Q$4G!PRENv@nP7-vl$guQszMUGE&GIfsnQGx8*{18tPNsE#=K>d` zDNOa=s8Zm9(Q*QlH#P>(&n7ec42k2H#MQ*kXGiaK2A486gU1W+I@>+sqpNFox2BX6RWIiP}4&W93^w`<~MkrmxZKM#5@UpG?^Gd8ErDV)sT)Oqnp zA>5LNPaOnLOs@=_+KLiM@;AQ!DVHx{@A>2KCwg`~3${KXE3!~qu`N$NnZ;#fkl~eJh`7ZFxm+u$z$9423aJ|<)WDteYM~?-}A6_~? zAl()Jik_tET5gYfaNmBeb?dh)Km^k*em^z&XUq!y4`JsF8_i_}1RI}KGw?_sGCcf( z`T&H1(Z~!()I9(nHh&K=V?wV25Lg%t7j?CN{0f5U1hkcNX%yTGV$@(L?sn%DplI#y zL2nsF<1ZPgbcZ0Q#|**6>b;MQS5dBYPz1gvmLb&?VTny-tKS!#y@}$_P~%)_jmWg` zj|G4&7ehus% z_7Lw_UHCoVmxFUkG@RW*M9fww;w>QR(W0XiHe7_f)BvGn+YFDQ4{+3uz7hB94al)} zVORn>w<3(%t9$7!f%jKj{!4W~&DW`NjxR<}*+r;>1{+51MGAum#X*wgKUGAJX*Vxj zW5Dd*^j(N4G_3CeY`FMIfblLJB`uWRk%$pyM zd*Q;H`%0EaIxD~P1X7DmenbyQp=S9xh9#b%z}PpCuwpW-9KZnHi#}g~fc)tf+M>e< zUgGGl^wjwLu1k4r~a^Ue3DZ|9b#!lu#duxGXA+?mdXtr5@kV+Jxu+Sjg8((CQF{4fK{`stid zY!LIe3HN!%#&=O!GeOiFN>)b)vD4>~MrFDB4+@ENIpMLPGn4h94R3NHDvFS6FMyZB zkBzbu)yd1ni&g7)wseW7+R6owE=NV~)Qemk@Adw2>Wz8e&2Z2-x8!NFrB$5lXhu2t zeC*!gWB>1_M)M-GW5RsbpBxxM++G+pB)na4t z9wltj;D=}2r-;-nWnl`-HR|CC(ze&`CQ3IHRJ^viA9(5?blqW!s!tGM_}Q8XFMkg( z%1nS@SW!TP03Zk|gq9g0ax+k>$ASnUfJe$7ym(B;V6|5e)kc9>0s0&fduKQ6LlhXG z2p8X)i+oWK8yF6%tQyCsOnx+a42OXnq}7BzTNwWL3K8H8MW;QEe;X*t@`cJJ#H<1u z0#GV2R{6+nC#HAm4`kh^yp3KdnBldZ1g~=*Y|k)LGNFByJ|x0#nqyMq^?M!fY-^+F z?(qH^zEAZH6-Nc=UNtoxtNxu=8PRJJ+%FOenQx))(`fs4ij#>!5}nvFVVboEJcpCO zgUAT>FSz6L-Ae`Ld}?xs?R4-PraT_XGK|Galhch~56u0S9z)C?oQ!1HeZF8jZ19`m ze58+71Vy}VcAdekydEBF`GFHM;1%a!?Vx=Aip?V+-__JJH$SiWdM7i;|5#K!?FrjH zn#ui6Q~%4oo{6@sESxr)iA;Kz>0sq{53}~)-d`_{wN~$*S_EjI<3Sp9=6jUTsqgZ? z%`=FM6sdS{#>0RPQ;L!=vA(iGYmG^}MrYw%^pvaMDI zHk-$B*uKi>=$7ytlACIjwDdw{f<6GP1GWh}PtgC;C{>QA|N6_)zUeY4;Vkmo|D)+F z(GRJxUrlCIGpASuF+8liMc*Jv8@x0vI z?Q`F|uJe1G$G7dB>asjILW1YSZzSP+Q?lHKlbn?LAp4wd$IME;ZKshk;1pkZau;;{ zo)3DZ`vDUvGwEr$(R`=eq>^mQ0>YL*`>B&v6HQMYtx10d&;6D@jiRbO#Q~VA*=zNuGc;+%S(Le#s{y0>iPG zTPUP!wf``N+et&Ys$`j`5Gy)5nqwrHxvu8UugvsT-iQXl?h_)d1g!iN9KLQVm8nXr zvo1*n#PF&lAH=Ahd5JH<=%4w*0Wla?P$~oRY!17?4jY{WSaG2->2E)1RuFK+a#Tht z4V{eNwi2VkN5fQ&6>{N|Z7`fFw!m(khWn)i)|7o4MkGIJtP4oxj5Wm^(kX}S@H0OW zOtpQag4{{762fjZb#^465W1(8=dUNt<3I8ZGKHert(fqBHDiYZfPY?>h5o4ykUY}( z@Ote$tplR5q4+k5cS>r2K$hfv>AxX5LG}>oC6l=7Sr7HJWb>ho#;@%Ir ztg;ELZy#tl*=m)>3lgp)r%(qj2G2`mil%JMKqJ~wdGT3aa3{RB4ZO>0BH)c#pCUxt z3S!SnT~mH-){JBUhdtHJG3Pwh$|!$^}da3`$nqdU1%g8Iy|;&P{N>0-fS5X zmG!UHNvQ#5k}lFEO42ajR21@nfB+$TcB{@uoCM{msqa{4?_;Bx$8RPEo8NE8;2$ig zDwdjcDc4osSCc%x9B*9ao`07JjTJv6eqpXi|s`F1DgDjQX zK*7Iv%RfA#e|ALW+nFcE;KwJ~kO>>9pL`wP8MIw<%jqdSM8V>rgv zM;m2;Q})iUKqEzmC&(KF7l6o<9nLn8w81oC$P4BUFTa30wIKDLjW|rK==}%<>UgkI zB(PFMlmPpCE%?YqJ&m;*%{~{J7J0N3khy|xW7D1vU0IVR8l3`{~+k9fzUW6xo3&{_w1AO!=U~NX$U#7^fb$;rjw2kL` zPbex>tbz6VMW!dDh^>)}UjLP*UTAC63aE@K2I+AbUvJ!~&jlXYQ*5i{%^=sjwMR2M zjhV1MXT6|6zI!W3IbAjQuypru`!vjn0#Rt^> zOo$Ae@Z)}q%la~ZGZt+?fM@P%`-^1Zd(Zfv>N5DGWRM7D0mmg{1Geg z8=?_Cyl3^47M~${ZuK)9bX(jjtnC^_!?T*-eDPWw0){0^Z2Z@;sFG%Gy;bKU!n7FQ zfiwdV)ZryqFx?HTDM&a=@*qcAbq7{z`U8T!c^-x3De8d$jCvz@id=$}*gVp(H7C=~ z2KklDoD7)3xg6()oSrBE@1+}gKxn*xVt{`z0QB#A_#^&=Kyclb#+tbNxpWNB*-aw+ z8-n-)85#Ctg*SjUhyhw74L7TGdFYvU(0O|Ql}yFQNawW(kp~G8O)uM&EcgUfQ5#-6 zBZ6-k**G9=C!Ptp^q0wyV1{q-jH1=lk_%{8_gU)f>9c@K#<%$;S@S8zbu#aYh872J zQd?K+>l06DaI6}d=TcG2AY zDCK3nZ^@Ad)J>Qq)=1;5$Ub$JI|H|re)UJqa^M;VKQpqZ-R#ZbeuDydYxMK#>AG*V zzqN(6z_f9QJ7PKT(W#**lW*0M-CHcj`RGlCU?B9< zY9g_U8Y3D4REg2{uEYaDZZ~rt7k~k1Uhsl>cqx%S9Bs)!h!k;ba#g7eK1+FxD>2JdFhi+R@nCvdzCR6+^YAOJ)%o@ z@MC^XiwZK8*vsdsRWzO%r69DnG)Um+rC`O)1xpUuPtJl^6W z<)uZDBRos;-TsW^XsE$kGaxj{+l^*c|Iti1v_kL4t?R`_pH{YUl25vB9B>_XCPaQW zI-KepF=4x5B$@9k7TQ`4Xy|!CQ&w|n#K4f_^T2miMZV`21SXW{0thKDX+$>Ctj03z z^-~#bLf{3HDpKYJ)dIWcA9dE`RZ_)-WxyPA0qv{RX*0MWpvn*mFs5);kq;XKP9$CDEyW`rz@9qtH6`Zk+bj?up1IdQ z&y7Wi1f7Fu(G61vCUrf&LY@G-ny6?%o)rEIOOgN+cWhB1NY+nb2<6*fB`yQF2GQPJ zoBu2uA1$MHPP#W^AA|zqg$j0+-Rm#Z!b9f zo;)V5%gd-HSCUtmXutDpJQ2meehUjjDD@`Tt#LLOt=jW@etrD6K)&EWLA5PZd77EmTy0$u>Di)}>ZrGR^2=jIm8eM(tFIG!w%K17H<~p{eu{OF@)LtGFC@kBoBw< z=QiPNC}V@F?$)2A6P5t|C^E8VhWG3g|14_gFfSJ^G?~}N475?b9`xp=gc1UVq-^+9 z9wTOj#ga0`1QD=oiE^Kx|FxfDs%NB?8illDuWQxTxR3z$b=zObv5iyTI|1 z^{eT!NT+yZNL48up{ojF%T+JA>fBMJiKN7Tjf<<@LJsHwtlJvOIbi@Vt$|Oy>OFho z6(gumcQ52lCp`o#jQk9@tT>wMQc?CxKEae8Hdr)G?j|zM#keMA-bF)t77M;9E+&pY zKG93eh_%C(_%=UvU{UtxSWTS!&*YOiGE}@l(o?H`uBF`e{2K~nd4{Xtq8yvxy5Dx| zlUMDK&hC?iR{*}@7C8w@+WUCPxo}S)9(l!OVx<-qG?|_Y-X5X;g`3XK$gsnj#6!MV zXXxKVQV9Z08+qV7obt-Ar?&^aAV9yA+rXwuaflwjA+$se%Y*CudKY~z@rbnbAyvEn z?0wjHxHY}g+_9Qi29G~X48+Njo~q{#Ur(Ae74^-S#*y+QK75TWvrVyb_IZ-Bpf99R z%7U!{35#`pa@rl^wN2mGz5$eV+z8-#|MJ&38_avw06xgu6&s$z;c@jl zCO)ls;!i&@vQB}d?p6ka{0vwjLt%zFtTPqtQ#RV>Na1Y0UZgfDY1Mcr(vdI(*jjkV z0!|J6xPf7~V?bf#J`8Qw%<14y^)Z$6x7K2oop~>JJU2uQ>+Ab*6Q69hF1&U03!kL&0=caC1Ra2)q#87?0P+K9v{%v>V zePKFX>_rhd??&gyk$7nE1$pwt(MI5JzLVRxV@S9D?Jc9$KjIM7;gD3`3n6In0J=wA9){G+Pp~WMv{paQh!S(3dNjkj!^DRl6qB$B?4gDU==D z?ZU@oma2uKVg4hA$7^Z7Ulru5u=9#U29=zKNdg} zqylD^@Swo@n0#GBjDS&!k3Bo4YGwtB8IPZ_AFYH}9yosnBy|>*Nf#Mc7Sg1%?Yoc8 z0`45ju>3trb5g;nWPNq}Sf0qOqbd{_IEidyf)!eWuGqld)-hjJ=&a7z zM{A?K{W~k7aK@@2eRwC^cjgHk(x9JDdhy`2hhaax)gu}hdO$AhbarGwjTSDFOSHP6 zR^?!MjrOVDH?n|luKF30FWf_@#T%c+67+wIP_}LTSMSte-^Mj1e(@l3)b#y|h~*?i z?_sO-;fNd;Ni+3{^Xec5z$8Uz?wE!9!B-CfeERN$Y}Qb7X;=mB@mRWAFrFbP^bigO=-AJ14@)S1PV$H*NKBm#^fnxs*DAbGOrCr;Bo6|L zYpPu=JDYr1LF@qDHR$DYjfA&bi-8uR*bzqf7VxlU#HUzA~7nTfcHNGav%)ifR7Nub~KtD1Ki(RLIEYTDh0U4 z9$5|>!3SmHBG<`jE@fP!Ufk)Ilhi#p;G&O#r`W&~kav#=aw-`JCFI2Q8_UnahBX1| z;K_GL+)AZ)3rgQ&jHfi^{+<6mo52j&<%E^sA3SpRA%K=#`tqnzY*i8;z36;}i)d2O z6+}(NfGpvh7gyFK=!g)H;O$$7?;7blV+iqYpRT!@zhyl?P@et%8b33!D9t?TjGx_y zjjUz^YJO^8vUe4?%K)nIhZAR=`q1$i_B>YCb+fs)Z7;-w@_LWEZq-wd^_Br|{nS;4 zB`NdfRu^ynbv#g{FxDp-hnii9TJ;uU0+4|2HVy@wYyAtA6FORKVoj%h!THwmEKx|E z%?oGs;p{%4^otBiz@*Cmv(4z@Z_rZSZ#<|~eS4|Mc^+)!@+}A2{u1|BmNi9iXm1f%7` zsu)L<6xc!fSQ`pRUq++?cydxw2auBIS<-_pDN45o9IclO_G8o!Km;fSSQ16-N<=YY zfu^T%29A4+AVv7dfrccCO(v;NL1&COaFD4yo~bV~TIDC4j5b>~3wt;YGo}39!aWO| zEf5x3dt;+aCb1>W`dhLzlVDM-sSw~fC{su|7M6XumST8c@ynvznjuiF-olhGcvvE= zHOqHz;--HU|8hp!GWgkI!So*>FrunkU1>U@L7Or!&Zg(Kd^5@;Ds(<8B)?q9V#D@= zW@72mUaXU8o(GL5cwiwDa4yv+rT!E@#?j`eK7um5@zidmTaF(3`D)tWjr|m@bEmr^kI)Bx9dlTH zp*{zWDysObD2Inb$-CDa55}cDG2K;B7KHVy1czU7ftKo3>g5^9>on%&KHRXK%T2Zg=%c?D&)}$pBoj&e z{L8mjvBojwKUZoXTD~m(x7~yKNtUiSSMSsIsgK0PZnd*gfm}JhKQid5*O{os1g*+u za?VXuUauy9D?rcK4u7(x?GJjuykdY~8yxC;S-}$IqI9bzUYD32IqWWz_VG=d%tncN z@`!(D#TVh5Zp(GlBL=Vy2Ep-7)7Wysr$)lE@IDEoT6<}7wcz7{&niPM0X+yKTy&?w zK`~=7f#}!Enw&-Da4b7mED$tjB(2H{H~n(nDHYjSLxZ`)omWdRpWWUa0OoxmwXhL= z;znO<7_SU)y9k;r90zh=47^9)sxv@$$^6T)05H;FBJ8#V53R#6z1n|vy^NsEj`9Z~ z#qe=$Xk@GdLqH36d*^~AyRYxQ;+tEezU(}ylpa7R?G>XSc&?9-D#$#EwK=?X&SU7u zTjmQ-xPHjLx9RSBc=j;Lp;3K4*3V&Sp*%lh6tme;;WZR>UFnBSpL&Gri^(WBSdFLf z5#L#=yztNW5tnDT`cnB~!V-KS7KyqP)W7Z66Gvz1)@CTZ? zn+_pfo4wDt2(k(6*rt)_K`ETpgAb{}eJbKcLfLuj!rPC3p3rZ*9(4Dv{RS=QEj2@2pksHzASC_^HK-E{5;AL@fN+Du0d~5`s zj%J}*HMlBiTeFpm-B;d0|5RiB#d64$${!_lXK}g_y;_?svmyIQQ_5@j%!nseB&0%kbxXcWmUVJc3 z_0K=M0xu4pvwjc1fw*iAMNH~zy{Qu9duQk8HotKhR#S(geE)4nym`|BJuS~)ymy4l;{O>O@N%DwyW#FY1BCo=q~9C5s!GY0)UL<5P}O$QT3IcYBz7j)9{f7GZlK%SHtDrW@yXaHA#rJ!UQO-*3q#-K@?Oc*Pdc!x zbeQeo`|(E4!~PpCSSY~$u3A~`32q;weAuQYIuVwU#{wWskbDE*Drif{x?8oc%&3Am zM#Yg=?%8p%>LE_Psh1@@kFx-nzhnUkJ(+_ITJ1YuvasT>!9rC@!m@s)PTG>-|?$xk)d`6C)E3 zB--APwb_%?y^hl)Z42UvYF0JZ21?qr5-u1de(e;Tr{9ciCN`L53;a-R$kZd+st5I! z;~G3B*`Xj$d@pQXaSUow_G4ags z_Sh)#WMMoz9FKQ%xOh=TD*JxvF378+cNVoHhmji-K|YCHnkV-);0d{3j~v8=h|aPC zr7i5h?VgMh_BdbO-;w)4gZcmXD&P!RQWa`9x zMqY;4nLq(TJxu?(*2iHkY`7a-c*gxhuiN=1IMeAZYsP}I7Y$4o680+RrekVz-=z6b zNK$nP_>gE*Ie;5d0Kv3+!a-JXBK1~boN2Y`|IY#teb-m0c?l#yxN))|XcF*?ES$6! zAnAXw2pqtKA`CbbT&Lj$d&)?bJ1$UExo0%%7IN;)gv<0rO zZ8=B>E>kj zURou6l){d7{PJB(j(z(!a=z29%Ig-d#Z?1@a>{MehxeHU#BKe34$7+lG1}dmQUVQD z)ap8A*iMVjonz(w*#U6L$I8dLxmi%|4vGdL7p?bRzVCmBmkduG>@G@^BDW>`UtkbZ(RNcJVLZh5e$&_v3oP&&SrHc@rHn*( zO3d$7JiK{N&}AZ;GZG3KU~}osH?nek4iF$04e_eTkj^(22sps`b>}Gp6Otc>LgWKO z!T{rY%n}{cMFS$5pR)%geX$3QN(N0`P+ukJke9z;UQlNL!V)}^5+xxiVo&&e%gk5y zeEv*m=3{Z;V|*S1sGJ^e7$Jg6btNwW=(`A zb%pNyQs|DyX>Ysa)|PduMaYzd1}8}!EAfC$D(Az~r{Iy!s7FH?xCp{sy>F}O>yqc} zaqsp{{t=cudCAWXUwwDx9OxX~&p1;?j=Wilzqu$-4bq0*2Uu)bF30NS_xhBtA`rda zdU)A;CGtvtDP5go@xGMx@wITV7^Q`sjs&7iTqzuo(0&H8+*kJ2bonlDGU0D}1oy@tUOnSRc3U|me_p|&B;E#7>0ESPTeTN(X83qKtZmTQkz`oZ67}he2 zj;y<;pIETtTw%9^b+P??UzQWXVz;P~Mp2Lcv}Ye+4hGoSth!+r8Q4**yDSbIUPVQl zLhMgFrKrU_r#3T@9Rc#gL8_Yb3J#I}&clgG;i8 z<{9G2fg>iYQz^y*kLBS|#PDNQD>__Y60TOkU&1Xrn~{ zGY1zdkQ3o5rXsd+_(r;2lJH>I3(%}lP<{>dp}FFhC*M2v2S5+Phm)K9bKTe1hsY<% zIj>jm83!g!RT{~G8JWZHZ&r^VepbqW4y&Lt(gX+SpYh~`Q;W-Yp}-$2FAJzr$cHaz zZHY#DI4&~IAv5#sLQpF;JGIKa44EJsxtJn+Bkm5)@@!_}o_QdX`h}YUz?54_R2D7q zxF7+G2vqexVjoxYbVib&{|*VRW7|vi7&1%96ENNYXjp$d7barP zmi8i>O&+;`>Ou_;Fj9-Zce#}sL6;MuEJ#ku9ob7r6tVN5+1Rt2j`XkRcE7S4ZNGZH zd~NfTAM$D-O{lP$bcHe7&o8j}PF$sL?V?*VRm>8|sa%Zyvz#>`8{)|Ix`}68aeOz4 zWru3~CAGJ&Xp?u7At}M;<=CY?Wo_{%;3ZEq&VWYG6_*AMVkZJeoRAnF$HtHx6w`j^idrdXh8 zJOtUW@VE9!y}N?)sz3!&Rvm>{nhMVoI19MN+s{Z$U{YSJ`ta2KQ@+5SJ9paKhRSic zp$#?W?A-H*uf;Hh@s*_7Btt z8H~;loG!OVfAQeyw<-Yt1q*^I9LcKN#Q=P=>sxecBg99uJqNsS(R#SR(yArahDR7D zKFc?-x%q!}iUm=`lPUYWReq<|jllo~C4!npm07LDi zs}#W=g`N3@K}KwYV|De;+Q*f@uK4eLYcC9LZrt{)~)HPr?Sm>9P2unDFoLVd*Y4sJ1;d$ zdHrYmtVcZ*yx|>xly%G0`XmW{wlIh5m&d4t`htby{nO`A37?>9=(41`9UD2ly)4C4 z5Y$sZ1Uo77^N#q;F7XO@!w5KI>`JqsC2_E64%KC4C{k+Sy+%jC&Ee6>?9GKba4U-5 zoQ*VqAwNT+$U4s0(HyH&1ucgwpCN0F6Jipcq;R|{{nsC^yV|a0fC7ptyW`S-r0$gN z0&;-&VhyFR93YVc@_YpP=?3^$JT7Fprf2`pU3P+%WdQD~RSWo#z8T<*F5Vz2rY3Qw zJ7*shb<+C#@ulNuSoetmb~hz;nO&?|*%vr*3m66$$i+8%hd*X1C)0EC@~-*ctr~uA zNlbOpqh0Ns^7F!aJkOaO&P?Pnq-q(`g;Ot+F3WV1WqYGW_*Z)Be*YkE*~kJOe^LGY zfn*T_qy4A*P|5*x&>dD16tvg)^YAF|@$-RXsezX#EZ`-9w0i@PHrL4q5A2859breq zQ-9uAq~IHD4HnQWE3}c!T8C!rs=CiQE|Q(>YnHl3R=_JTrkaFeRk6}G$bfQAX65Zj zZ-xrek`*=t2F#88YM8|1KbJoTK-cM5NR@rRqz&&YHEO|_0`inG6UF;>Hv$y(-$XjxqZY;?w!+<9OLILSt#Ru)b)sCt zGC7ov4Hr}(_JAfJvao~b#SfdOTgbstU2f zlADpjfVWR5){Qi)BXX5LL?V z;W>3yPx~3uOI8_jHx@Ld1Q(%|FEJ~rdQV}`-jBykX#3^-H)lVI!hNBly4#>4>fF82 zgSvBr1KbNjFu?O<^-dS+O@+XAihr0qS>EP&=`V)(#bIOCKIZ{*yVK8%z(**H`HMo} z5R4u}6(WyoPKo6kP$Mhxm24?_3grE8FW!*zh7m~#_V&8Dybhf;VD1R=3y5<<7PWt= zyJbW^h9c_aSP92guO$e87LYZ)xzM))3zG6w?ytzU0oZ_CkGHjQZf0e$$&AP{3!o2< zeO~gl40AM_W$AP^iPVA4+ZxiaZN%rOOyeSY?p!S`-s8nyWt@;B8S%`FA7Dt18O_OW z@1gRqvKQy4$aQ%+K+1@&wbe@h}A zaunYyetx)WADX0@O5lGt>}AB*S>V>K__ujA155f}QYPxvew)Y2kh{3+RriH67x1nz z-e_STt3fT{h-l7E;|p#Fh`UlhVUd=Wt7RgjKW`^y4FxMosoPYzOg@MN)T1BYbgdRl zD$v^-&`&-3lg?6kH?i4QpeWJK*jDvyQhzzUI$VJf3|&YR7)%%D*^F z;HBHti_FAkJ;eQbQ$lSuEr;9BRBYp)X17XsD4=g z4`fm;_1y~xvwk6d(x9BVThcKHJ6DQPt>sHNZ_nwg72s{M60~d}3irOWi;dZXt53GX z_ex==Ja#u@?m=ixHVIT}x@D7yiS$Cx~XE(k!?6p87 z5*qS?0`WmnY=JUQB@#&yO+a7e2!e$P~k`5 z^Wtf4=FQ%z7%%mN(ULw&x@*DWU=OMN(k|U0{(bynKN(=$Iihvuq&`b$e8|4v>V6JI z#Tvc-5IFApc!J9JZln8c{9AuB$;Lo0;XW?1<>)u!@;7J1D^Mgeq2g=EXq6Ud!29x* zOk>e#OF&I#6y9QZD>d+zI8~}4@#C_*q#(tdPPELe+CKs7n=_h)x%ZR=Z#>zGNb`@- z499jN5zG%%Apqb(&)EVg=#cS{#Y6o7xeOhsD4~8H@&5@8dsdE(wD%MiJDY#u-Myq} zX*39eX(t--ryCZgPkBzOBmct^&}cAvsG{l#2{6TQZ)8jK7`w>@k+2Tf96#y_p=(e% zbZK=tlBT@9)>9_ik_LjpEI@(bir8M*=)q9mfo7=$X@t;%0}Mjb??X~*^4z%|M!`VF zGoO*mI871v)j(XP#@PKOCpIeQe)BmS`^bwJ$BK_E-&AC7`Y;#b%pJY;AiYD;=fpum z@-G?`Sj}E4xBrFJks9wP4vWhHmX-B?1^lV-IrLhDoPuXyZu#;X*29%9kf%1ywhc=j zg6|g-o7vx$k_a2Pngh;PI=!K3Bwt;<{?=_wMgwaBPb;6^qb`_e^^IL#r)&!JED_rf zWI$$0Uw!bO;JN-SFnuqrr2U>f1(5)oyScg7zH}K@l4LAy5@K~)mWVc*Q+umMNpCxr z+an{B)>lSPD-yPRpau53I`bB;(C`l)g7m!$c^@D(o-)J|Jf6H)*$XCN$ z{%ro#%W1L77X7okzP>1!xkj3xc|bk*(9=!1hp#@VtyqE|h+!4c;uta-?2&%bh2u@; zaK5t=W!#hJzSt58ARwt=b8*0p8a<1L%xUjm6Xyn~{zYM$g~80kik7q(F4T7=1}Hmt z!ywgg#}hoJ5(HN*0k%x$gtKEJetbBJ&0VW>pJCD)+PF9T-)*JVa0g#}n3~M0Mbf3^P(N zQozOcVs;_-3v&*@#bs3^+G&46zC}ueOLi=0Y5=iqD)`x{&#r|4HRZ==xF4aw<#=AB zR(IzAY$>2jlmbBp8z%8-+b=AnL(T4%*sW3Fu#FMf9Xv)eFXTSZ54)PHR0-w+zF`BP zqE^EO8k{19#56)KdE<^qG2jh91;yE+7DDmI2F`6Hc*zJO-ov=NuNgc5smrJ69E=z4 zGvxB*Glcs&K(%n07REt|#9r~{N+K|4H;+qna<~2+aDr|)?p+>K#Z^d#x~8~nyy$s{ zeCoWr5JRQ5Q20kJL5I4U9K=GYF_a$w-%i^5mU`L0B6&BD5@7!!Vy4CHG}+JT`?4T=+~-N2=27@e1;%+Ei6C2Cr$QUF^;Cy^GGozZA$?%S`q) zB-TWYkMtJAKFtSbYvxJF-8Szfd>37m>$Dw6g*+X@KAYXb= zqrC?s#KzMu+vBnT_OZVs3oA3WF5mIjFDw3R0w3_r48Z~5?Mv&0xd=0C<5djL z4?=Ca@oqURzV?h=5sx=2E6Gdhnpm5(LwbFfTm0fjJb0f*ufUp#CyALtmysbQY- z21n~4*hoUet`#SEv5DFXj9UFo+|z$;3rO5Trq? z%CnNTiZ1@K19og>e&KYfOK$|=crDZ zv*X0RwBSC2qPl;Njt#p|#0CEDHB49cI8BIB{^h5#EEV8SD1bLo4>Xf4 zDIikn#-if7`fA78>G^=!;?3euX$>l+V1=`MxIOjG{RhBhB!d<4Wtv$MoO)i*hxh zS^CqQez-O9@B~MFzD?Qy*>Q=+(*=f3?M`oNZJxD6!4~z{|aV z30oL-jmfM0So;z92H$%w!K(s-1~tms5`~NH>@sy7W_77s2}x`nysS5LwA0lGFBOT3 zhMKHw3Zp%AkLs0xX@f8(;Qq7YP|LA2o9l2Kj)E2JzJYh^xb%?k|NQ z>7OZWOKmL}JV@c8$M@!cmg)gL`iN4>pq$ccO~ACb7MGyh z=>H(zsr{0b!7buT%Gv>?o`~qQwpCdGrOpyd;SNI=-bcAB@o3KN#gP%UI=}AF+TCij z?YYrlO(Ztp0I?q!Lir;Z$r%lqVk#1m!9~A_RT|iS2U6vYNztz^TXL~pRZLT#8nlb& z2cK?clf3c&LEH@gDLWZ&>R)^J`=n2Lo=N&+)D-*DwzLE{(0F%`is9l?wY8*%2H79# z2{G+kG>Nq7JW^%UmpY%V5(w5Xz$RU`(&5^R&gk@?-8*c!9L0zKqpUX!k zGbAL+bp13LmpK`#fsFV->wVN+_v+`r5KPbCDw2(UQI;An>Q~Z~Krja+p^ikK8}K_T zPI?YH^vQyB#yXwctI}G~c<-+$ok5$pX8O+5tIOkE&tAmSd^}3!cZ(7< za0i{JX30H>j}Fra#?|X|Zac!<)Em9Os+@X}uUb5%#`@b?|HUlQN^kFvq;>bEu7Hj4 zC&!h)iGkOv$zXJ#a*OBi=`USRlFw5sk7vM7=eJ%Z3if?0Yp7)k89V+{nBg_nb+69f zAf!F#nqp_^VJ7nIAsgch+qmETtGK6J&qA$s&(U8-0KdD(JI`m;?w9BI5WqtU;F^I0 z^0X|$3uQn-uu{A$2`xKILRgMJm)c5xq<&m9C%8f^kP3Tjt)*Tsz6p>rTk_GnE@g@$ z19XoGD*4r!!q_XMF;dZZ5jWV1o%O7h)ZH~MauoU9|)wE?z1&Ls-W8jC3EdE+b-4pfrF7>atDL*zR!5~bWF>sM~Vt|X}_DYi` zl)+{(Y;exiRP8`wk)u>y%G|3X_~d;O{p|Bh+cV~pR4LKdtXEIxFW)fJ;TjB^?CF(J zPjj&>=6|8fML?QL;#|c|FJ;_g{r$PG5n$O~ZkGSPj85f?oXxff(j2 z^eXTZkYE1t`XD<5iS%Zk`bMpVhrBDTa_5+Cp%7E{wh1Bx;F$~jX%VD6{gvsQUOXH#M*VkI- zlbaNXks#loGJT}?JNyV?)9lJ6k)vjtPiqPPGu1HR6`?fCdmt@d`A9s$nE!h3`LxGpjm0v&CQn*AAvO>*GsC)f24LX;3)XfO1PG100 z*-!hl#iH#%yM7C=%b$lQ#{dkrswuQ5NeLcS^oMtN~>=hp%fA~OX zu~)001j@QKHF=bH=xA=;2>xY$$A29Y35Ais*twedD8=lZzTD-7tK-7JEOXGM(5KT} z-1g_4Vj}bMD$m(0U`d=|&b`CCcJxMLM9r(65a+c; z#_h4j=bt6VfJWF})(#Qcy=x`)>83j!NU7LO|7GTt&h;$km&=FynF>HRge8nX9FQG5 zbLo`?BiQg$8Rev{HO4zbJ|>!K4*snO_hq0tYh1EYQJ$1?>83$4FAjVAytnyN1(sM> zg5T7^kR6P6pj%INkUi0T(YW*^2=>}cj!Fv;ri@eoMI{9LJ^<=eJH>V&nxe>m%joL9 zi2fc!ZwpH98lRWg$O680=92_==Gk+KtvvJO2uKKxwgs8C;5+7QNECaQJ1)`!YDR0g zNH;n7gq+~#rWUrXv6Z{QMtMbD>q}YD^II1(C4@&nQ<+Q)u6XUNRngn_jjm4a_f4~F z4DQO^C3e1q_4nE!mnG@vCG58LKluzx<}1%jLwOl_h8Qj5#A)g{Zl_-PiaL&shq`*BMkH%GJjjDo8&(HonABT*7nGLF(+Tok)#yXmJ zf&vFuV%4-SZ?nOgD@Wopma`s~o^D_^ee%&mee<{K3sr(+qWEN$nX*h=;H5rZ>mK3e z+yI4r8E<};XeT#2jPk>KEAVdqGWTKBiW}66h-8V$)gcR&lJmVdN)QNgqXPOqF1#`8 z5>CUFXljVvL%z|fI;cYF-qV0Edt%$#0Y+p?8t3O85B+$S z>WE-~)g5w*5ePd9-8|5PkT#ps09k`{WyL7JbKaK}7YJrV5P!?^Wf zZ^?9u3!opa@c!y#ttxij=8>tWsb^qClE!vdG~|3hEpV68;1wq${+>`t_x|*>pZUJl z0gOsrUzlGxi9|lyFYVdpRetY2Q!9{Fyc`-wM-~*3sK+TrPAx#`$CM)L@6q~WBa~cZ zJ7C!(ZH+hJi^A6ykelYzg*aauZ{-XoY@yG6WFrW#FV!Fh{8bG@NO{A(K1hHBmnoyE z^$ERbMnqZ32kbKIH?)=*Ih5b^Cd{F*ii`s>hXo-l{s@hTT%3$09tZv$h+N#DB-ngG z@EPeBrSsU4o=zF6M;CEKCk0q$xIxXUq~Kl#7&kU*I_w zOvYRe;=wbUJo^)%!7p_xzn?%*o9c%xz)cpUQClnpRNs&MKbp=mDyr}O+h>NMap;s7 zx|EQRZj^4MJC*K+p}SjBK)O@OL6DME=>~x>-CfW8*0cUE&%0T(*SXKW_x{}1^({tJ z)G8(NyvUQV)xWulQU3d4E|q0%YT3(erUu|!73u+&WoImJ>NLl(4|=*cD=VG)Zj0Im zxp_p@XW`(=B=}ghij3B3GI&1#@ni{BGqZa8#5Jn3Tx>}Mf3tD{If+!E*~KAQ!C65j z2u3`*oO}|4|AJC?T`Z2J|<|S9W+|yX|y&=!X!|Eu#>ypx-D05KpBe`p!cGEv2=dh{Co^h+FO+^AJ#y^ ziA}c~&s0mYLP5-h>nOKaGf_H{(_b*VNIbaD+f%tbS0`jY_J>ovBcO!{o+$pUONI(Q z3Yt{#RY%H7K$Mt(EA(O5ynf5W=xItaM z^SUp!1l<+Qj7+GFclVZCmM{2-ucNORo!=7Un^xF8Ynb7sRPpOLR4h0N#CC5#61MdS z9h;*3$Qa_`_y}kK44Ay$fG9T&Ui^8$RTe=s+N_ZuC_?nkd}pVC%f}{6cvs$( z6Md(j=U0;vb$G*2{<{Sfcs-PFu=wTs9%FXT=?0ZTcB5_G3$e`Z7_)LT7+f4P)Y7i4 zM%t&x!HN4t#w9I)%rGarHsYy<@oz}LB8?2wDoj<#!>Zom7D*v!R(C);p<1a#At!wwi@loP_evV*>$ zZ>c@KCl-%uk-wS)v(N(9QM;G4X{MEE<8J}IM!Yae^iB;0o^h&v1tLIED(#d_81(N# z=ASe0K>JL;U*OQ+X>h&lfFr@YlS%dZ&>}=eRU3#IWZ5H<5vo@9J|hIl)ytn0)|@0^ zN5#zH(ZX6bXiMxE@7q1i7(M-(7rj|9H41(l&_!#qK~llau!o9U!{`A`I2=)xCN8mU$@$t zn`Jw%znJ}wjcZaWFY<@>({t{lt;>kzB`+KB#yyC^_2)P3VU0}>d5#Th-lUTUd|{*4 z*UtyH`qHrNNy=FC_05#C<3a8&!ArK@)6Mtf;1kb}XC|xC30JZod?fklI z8-dy`@?0-a5ebkCtgxAJ|2Dyfng^#0aI1Ne2r`(4xIbR$zM21ZSn&8CGqSh#_hJI- z_H~y5n>cB??Jc?0SNWkg+(2Fj@Tn`5tO>))w9_q9!uM$iRRW4oc#!JM31i>!V2Izq zqdS0{Y(~?IaIxVfMW+8f>2jAhkaiWiRy&+>1)5LPF|AHTU#drflk(q6f0&jV4s-rsl{PGYu4jacY+HE<_xd>F4XYq28Ft@ z=2L7sz?H@w%yb?NMGLAw)*|Qj{Lg8rPfF=d?}1|teZzaI1*RgrpgsEW(9zAj$Fpbk zj}e$F2{V93cn%BCT&bvRmOIdalDuMKGCTN)QyX+}qVd!As4qC{`g@T{UaUu>y6bjKl>zPRIXW=)jUiRsX zzcyNllVW`zerBMU=F=Ye)_GT8t^Oy^na zeK&8(VlOg-^_VY8@K6O%V#*?*W8wu~OT9M(pZxsa%8;i3nMlACHVAH8NaRcapzvCL zizFx-b{kUU`uUXrm{BSGjB-r&yBdY;FTnwLl0Het0R$J51LA8Uftml@uwfGrUDRl{ zWLZp$a=Z$lnG3bkn#|G8qAI!fpXdP_M$zXoloW(-tXb8CDMuJq0Y{R}4<- zH<7l)=f_64nsa_CpgVrYzcz1i@KRFer}83M8LGZtsAZ;4&0 z%7{$IZ8~8LllpX0{T6)D@AeXuZkD!IO(Q6D$VdAw*GADv5+}eG(TwwgsWf z$x&<^n>sQPZL*@^733U*4){~YtcL;sXr)O$@=8SC|5}%Vgn38qz0eOXq5ylx0nI`{ z-$%G2=w_EQ-w5P=D<%zS#CYLzB*BqVy)Eo|_uov~rwI_CB<5Z0iic=qZku zf6T)`bdfSE1$o~=I{y@DW_NM`$%%yCU3vNp1F>Z{{i01ajM}Ex8Z7Una6P6Z6n6G= ziGWvPPN**|c3+y;(#>wSphcyU4$KgNlgtt;PqMT=Gla-E3X?l7KD9Njf8`f4U--aa zsRk6)YzeHns#2Pa2=>doI^LP32RPeiMWEhWzzMGe0h~q)Y--$0BW}OtU#wt`+U)D@ z$i|G03J+uT1Yl0|UWSM&6ShQl&OkD}RuFjY;~IIbOJTi&kM-Q)=_se)z{8zdVx$;G*8vGINSbY3xyUZtqa^P zxNamaVJrDY5Xyy3ZP(+DVU%{wr7W4!7ygr{h8q-0F~Cti1+h)730-*>&?a4lUKMz{ zHF{909y8`Kr}OP!SuY?xAva#~0I&U`!Kj$sYH8r*vh_~4jrcVEs{J`%XAiQCXOUwb zxMxo#(s14ap|h~EoLoP-dZ?wHPDT|(jAithNwKo$Vj{A$4}tDB39G`pT~_yoM8P)> z9`++rZzj~uyf8ovn3FCw7B&;RdQAjDtK`>`m&x`raUT;7TOVt~!Efj_MO{ui*MfhV z=1VDAuWfvxc#cHZk$&4%h{a%c%eq3Z4SL(8QVo#vjl$1&D_(Kc6?Urm zwl_86&<7tW;##SrL`Fg6P)(5!TO0sh0%|`a1a|*KMQQc?K!pk)As#1yD?pOqJDqnl zYffwuMCxn+XMX9JPiFHauS729q!$S9!6<%+0^nbj{ednxCi-yU?a{?tp;FY!4p1MN zGKwk;dN|f*TWlpvM<*jM3m7M7Y0@wmBz+w)eD7g2m8}w(s32zz!d6?&dJyL#-vAv$g>{plkLNjbI3b5fbv zb|MIZidwISLW4cpw^~2SnC;`Zk4ToO_G$fOmkGCurfvNqm0qlF-?zwX>|qZ>_4b@i zOT9J}-$F3Ww4KjvyJle$U^2ggTHrJzwwt;@tCLx@E!Ek`LlDL0i<|xHoali#O`CSL zX7f+b#X9LV^`GwW;nkG2tx8Os-T@|(qP22Bzob?=WT-#vk(^?+*QE7b#`a&!wiSon z>&c9+cc_mI{nfg!*eEF_+%V!V{J9)MKktlNGIQT=Sw`^Dw^ zyK$tJ~$x=Tfk}-)&XfK|U2GsT6fPBXmWBpsqg5`hkt1PMvL5 z4nM`Huluft>L0#Xp)Y)2_-61Sg%T*ISmH_a`1yzDG7seKKy66{jAAbHv#z`XR9%bl zu3QNI#Cdmax#a;pZ>8uSo>YL}{7mG!nEL2E&Z_zWtc_ z8B*R+5aUoC)`){?ek^-dJ2O5*`$Oq&%U+g}HUdZy4X1VyZd5tvM1i;Z5V#N>Pu7{v&EFGSkWGOl$4JW-g${=TT3qHRZtL+v+Xx!N>U~?!c2Jy zmZBX(lW*)^#edLz^Ityw}1-Z z|H{xY^WPAVq6q~6!PV~pnQjmTTt5lmS~@@%E6Z}EE8w|p_1DI8PRLp`JfEh1Te3z% z?v{Vso#ka?&-7!d`dMSJDl)FXY=gf?BfZrL@{bj|Gao*W9GZ;R#kM zy$&^%=eozL9X8k6A(L{-&>qgG-%z~Dn{tneg&Uq9cXLWNgV$n4n#VU61)a}l)Dzzv z`%XTdMx6g#O-0-mwKMAqN6Rm!zP&`Vx;MnbM3^5;? zu8c67ql#dgWSViO76}i3TOKQ8MGnagI-(p`mUL-C`iypd3d4WiVeyTg6N?Y!cUo)A zc;h7zWGn9XH8=WRNwr=gTW-Z;IVim0!Zp9?2D-LMZJ`9SfpgCcn*9fXV;I|BQ*C~? z9^?N+cOsYbpHA!Gz?}3??51F*!^QTniM_Dcuxz?2yfDF!M1N`4-`xM(+z%raUfucp z-3&yzFsWMz{lT{$&HvLO-Eb>}KyT@2F)E__UJry-V5Esm-fm3sOoT4I}wGtP@MYAX~|$8Vye_iC*>fN5yF2$lH5$f2QtsmQ%=D_ z!9hi|$7YiJu21_}V+Nt6H&@G(_}Q7_02kx0qXtXFq}bK+6RvcLEsOb`7O+#EWP$5j zK}SexFfhac(2G?7avS+W&*dnE=&&t6K0Fd!Zk}sFbiAXrBXpmHWYo7uOT#Xa0GVWx z;0FRjzGy{Pz&>}e=Ww2T@uUeKEa8{eX1J6uK^+(34X;1aY}Znn*3VM3|E-sgiBh#ax#A+&^(~uAfFhplI&wiH<#b-q`I~nl`##@R8c!2g=yh zB2o+wdr7Dgw`K*VoJ)R8ll>e{PvLEka+4u>Uy6a?h3@!8)=9tCH~uFL0rubb&PQNl z>tp2Y7-y-_aJPqc}4|PG;kP!YF(Kj>}vCz_(XU8B@xzGwwO^ z#~54^@tFIf`$?s@B*pXr`eVR3ISl(nnjJ>rHWp!0<#^K~P3Emv%PSO_;H>WnhYnMR**s`A?wZz0 zdL-;OK_)ZCY)tfLSOyTcjNRa^F{U7_H@p50u*AG`Gl=Uccl=UbK`?btgJKL1U{Od1 znz0INmSLOg7X4+Rzh}+wnpHavEeiTMf}P_bh2}(UGRG=K3qO^gB1d>DzGQoWt0GT+ z;g|BLqU!v$VJCs(h(@*ah=H9a){f!jzeh12{C`7G-a!J&b(593kd?72njB(Mcgi+eItUmyl`d(#Jg`f;5|NAz&;RWV?UCGVtC8hmJELI zA3ux~_xTU}F~1UE@P3O5>w1P58%bj9g8gY7JQj4m?(|NdIUQ%J|4TBxf5ZytAoDcgQ%n=k!o=XD zw6T%5=2Ay$wg3EtQlsy}hs`LTJWVN`hCp-Vf4@h2q+DZqg!I1N{ih>=bTg2oXO&%h z-TL&eW5!)V*(#K=he*FE-lY{U2yFBB+;LnuQ@#==Hedt&XJAn|TR-^h@e-fJ&1pyM z6u-D7oOZr3o#@^3-*YwTNY1jQ!PENiqQx5-b46A8l(64~-->|}5EarmBBjHixJCUh z<>TQ;c*YGfw@5%JYi>^}ap1!F&IkwGYdH>bMp<+N{IzSCKZEgn0QLiAR8ts$ zE}9r`{jX?EfOJ*OgQ-!;RKanL-JZNhpBG2F%SH$_ACbbNN-)9N!}5Kr#M;<7k0?pW9W6k)9HPI zcsU;>%P)9Z%O=;t=Phxc-m}~KRZ(Hu8d2PO2JQGwucrr`GyQGP&vC-L^YAA;@H*Wt z7MR)iN}EU1fiAMm9M_`9ot}dE`|8)EP|1x-238!VGk{LBL+L-|-+j9BQmJ?)@8y8l z`><2?MatrdY(A*QCi@~ttMaq#D?Ee{iC@U$giPxR+Ic-SIG^NpKm_G5?Wma}ep}7>Y4WHHgvlui=BzpLBo^9ScV*#jACi;2HEv@{DT&%up^=AMt-# zfD)+0JQ(Ba4^QQ!FotMtc)~aF1jY|*Z`WdOHyOPw;;)Q=AzHjQy&>wnXJX=&bZx;O zx6!9w=H(Gp_bd#WvnN0#FnLJujG`dVX3Loj2nH9K?XH?p+4`3J{gwrS&YZlzlN?7G z&G-PV^*G7j@n{Jh9RA_=LIhKFmM;x7x;B0!V4AY=tf`)?P)T-0_%I6wO?5x<%$Rr8 ziXInq>LCv%9sDPu3pb@UU3bs%H=PYN=UsO&JR(m{@qh&3eDCCprq}Jx_m(xBO1%^( zn}}`Ay(JrN6|R!2xe+>cKQsZI#My$K?O*Xq0UXa?(Hc^jsl0qt$O#i-M-YLS6~?~X zr)w%%?U`^tb*9;!>Mn?t zsx0U#by2lDml(eIrxNm?gK;>V5~D;w#rqb(5YNwY*dL@Z=uo8W221!h2!8ki?v%Q; zg}?x^h=m^_-yKWP04y#CEQvT8DK^IHFIe$JDOjp6j~O>{G5_L|414_@S0ob|4%Djl z`-8<$PgjkqR`SU8P)l8&S82yz#p_55zZuWc4831}Y%=;dVW6#&6CHv{XjW06xY9uO zzJPPOp`3PPIORt^cW2SnSvOd15Kn=)fQa7}28xlLHVgfIj)D3O98`pk{oM@OR)7od z=_+#y1LpFCbWMjMFbNBJQRspV=BuDbPYdbuyE4lK4r0^lAcWN9jiN>R*1FIFRoN``&IbT!F?csP^Jn0{HD+S(f(Ii>ewF(t&===<_JJ!bV|Hz}({t zuFv0qz=)9*4JmMZO<-DqInm>Gf3tqRsFMu2L9WE+TLK^?Cbb#%F4-#aA^qYXBRXP2 z1Fl4M&QuEPQlb&1%nic>wj(mzUpVPvAntMqlUl~hDGG5KmU;5K7KsJ%Sk5VgKtMJl zEVy2a3wobAC??G+36*Jit(4%o5XD!-q%FCOY1twV8>SG0xsWWu10|E-QbggZ=K zru<91;^-Au7LO9>HaQeOL<{~7-VTPOwUpngGoPU2L9Yh7H5|?{Q{2R$4Q@YYvJDSl zfh{kL{oW*hUHS7_t!E-!MZvG&*ZGPHFfNM<%Vah9Y@C`SSK&R)p;2rc90Ya`sDK3J zIbU9_5`iXvawTV+wona4Mo=H&!y9F@s{Wea1n4PYwRBO;f{Z343o0+CWZ2#u>NQW{5+|nKIE% z$@@}&sK)mR{^wZgWs0EffljB2wu?qZpGD zZtY2vOBhcahHH|(2FwTyCwaV!E({O!T+F#LbohgIX5JJ-UdrI6)J z=83)Lx<2>=dgE8Z7SdYbuyy>Wv(noe--pC;b3*25!gAtiniVjZ`NDrpE%!F!&l^c> zrG*+^>9v)vpk>Hf|CP|UnOQGB5Qf5yN_xjd)Fz~oi)puJ2kdw#$4+1>kTMw^uX9pX z&B6e1ub!NP`v1lFf8#kz7ahB2%jTYW5z6%2#5nbFGr4n5`y!XXo$xmciCv@J7!zrV zw)xi}2PuFQVj2$E%Xh697d*$q0Au8^I^YUCZQ}!u+JFB5+(Do~CH*C&c*OE>O~C^n z&Z&2j^u$R7H>_*;t5@3^F~pl@y)Wg-S@REjTKar`okLcPiFsp#Avp|XW~Ou;7V1&x z#+r}v+);woBKdF5hj*9=@c;o#E}>F3cz2zmYyI_fuLP+%JoQoDZEzL*q&wL=-eG7p zaXuOPBkwsK$2*~+p$5r~lfnUVRBk4B5H6>q=uGU~R!((E`K=BbLteUdcwDdBr08aFKhjTe7wkzd|f{ zKX+510`tpJl)267tiK0Ai~@5rtqWgUW~aupWT(U?s^1TR{nqJOr^I5fQ{~~mo->hC zh&GC7$no8zK+pf3A8?=oi+~T=bRb)jGM# zqScNcI_%ztFw+e1Bh1P4(CJPY7#wKamPP9qXcvJtG9h9(qyum!P&&FiwlfzM_d>-$ zu9Q@CA1efJny2rb7I<>DxHi&oyN1G)aX~35t;Tyx&(`9bNjS{8Cg!WkB_NUaNh&at z;a3kjBZ^n=Qb8wy4)Ns*vHU;bk5xpPGflb8^Uu@^sPHSa267^Jf2?(?mzDfI@7fMD z)@1;7b;nu$h*D6zZbw1M;-&PN>gw~s!|sK9^0%t&9`wJL*4A@OT+NHQ{@6Gq|wHiy7E|qI&q+U-^%ItwoT_6I{4h@gbvUB2odEafI)nVGdS`awJdVO{N zIv~n1Y0VGEZtRwBX>4q?Z(^)^ukr{@Yi+i=a@21k`e)zR(h_~hxJht?MB%M5(Q(%+ zn5ti9jknbd4EGi#LC4W%Nu4+rU>+QPxcdwbq=#t%sO$h;F5gw)e?AW)gr>99I6FDE zpV$VN6UkS+r=z}5mC6+USSaZ-t--bpZB94(!%TGho3|*>NVSOkDwi6P6#-Vff%@wV zp$_;5tzt=%$M;QS^vF!sYxf=e2_~nqoSI?KbDefA@VfMgA-WN)HQv7dPf(k$;#fb1 z@?Vw3-|3zPXl;P)U@}D=KNH#JpK(Vba3Y@)yBO?N5V9JYX*GCUL8iirgZZc+C8n2ww{a!D8oug8ik49 z!h{?$C2va~KW8jVt-G)qOGwlP*VxebNNK~)I$SXbVw?Yx;C~EGtDfOyEDnCYyFyEn z1VtTd;AdgLN51ooYdaN2_UB?D0^5Y9HI(H5x|DH|6Km~tTPb&ZR9NvdF4ElvE7Svd zXD@Paqcw=eCJ*>NwkNzj;KxwTMI(bq6?-SVEDRgLmIwZ(+%!$R#I%C;uKdDf-kTEV z_^%=q-VRnYfbEW5(wq-B0Dp<2M9>Pm7aubR^S7wh@qA!(raGG= zN^vak^B$>fp(Y=}p3x>@%4ou;`vQSwKIPx1janEs!?|WHl0yxH4}xew{|O z2#0<)2gC{Ybw$l^E7K+gshy3Bjmw#_vq)Tmiof;xIP*Zdf7ikrn`QgJJ;7sv+3n=2Ny1e z2A-=$t@8=G-TAMdd?Wt)viR!bi1z6@M**(W(Iv^}TJnWM)&#NEJ)8(xO%*ci$Vz3n zR62 zKy>#P$6v@1z+1k2GEzhxiNP#t+o98_94ovhP7ww_WG{fYzlEF~zXv{T zKp5g&jJo)H_s4DKdp_XU45U2E&AvZLEKp5k#wc*u?B8Qx72h+gAl=*jomC)C3|2u6 z4A`)D(F$U8`t&=%&#PhJJ14aEbAc9$PTvPUi5G8Qh*>Sre_CYUfb2riKtz_l_tzP1 zIyUK37R0a2plNW^Ye^Y=GlLcMIRj##l!sQr?lr=vHb5}d7(p+CKD`bIiz}aK;=yZy zLNwE^k3PHF#>KMmInxyS;$V;Jx~uW`VJ4ltkau}wMJ#Erz-sQN9u67BJf zgDOF3n~dAMT>GqSkZQ!Ln?qs$(N`5y=GpI+k@^?cQQM$u;F@nW`kMR#W-{*L8Zr_k zP{3%}Wl!L+I1*N5f-dYkE06=zf_r-@1PhtSzEat;4Kl>&KuZRNcflw}bnd8eelnHW z+g!BGI^yw&ObP~E=YI|c%0sq}HG8UVc-A6roF6}QLa`*I>l;qFKY$t;z=!jzpOVelV8{W}TRfyC+h%DYvVf%%iF7%{!!_x^)LY?eAR=tir2%{;l4Uw~LZ%@3x@0u}h0mHe@8_6=VeL z-%AD9pUULw7h0Iv^U?*Ps2}@QCzUbuBvrbMNSa9-+=CAz`%F{=62N$F9t40GrG@(7 z*33>eS|Q=zA)26BA_#hl3wK=CAIpbzm zrheVU1aa{dZK6J8Bmpl3B8Yy3Nit;b)g=Y!uA1)E*droJ(F>NH?2`CQwOYR8!~5w5 zlT!ibgCGoW{mv-;`aKTaODnWDD3_2Q)~`^<<6(%xgh4v}3)#3tLHKoV2U;NdZ~5B9 zx8A*p6i(fWRxb3KOB@y!8jWUGb^;|RRd%nwg-NbYK=e$A{>%w=)RdCk$JGQfe@j)L zJ=V9ZtH^Yg8daq8wP~iq|3dtF7`%vTiWJz%;f!%_vrzH6wMMah2CMbUU?w}R+|}2nJsG8=3q5JK3*-!>E5m%!Cb|W_`gGk^tyGhFg7N zC6nLz>UQ*vm{JO#1( z+C9ZYeh~fh##q}dkKg6%Uk;{c`PMJ_)mPtx)QX~z$a#X@v0gTk@A1wDcPZ#PHN-@46hVu&}oYR;ii+9f*c3RQ&Lt$=JjGo&JzdM=izpQYsET0uApgS|FMoW1Be`fk%r?}fQc~6T zCfcm3_=l)=F;U~Foj?P`>=m?OKO5ZWX!4~2CfM}*+hgH^tlOVy*j^HybR-1;)!*)QIxpe!C{h){g(aA(To1LD-~ zF0NV1DOFx~Z(k(v2A5K;jhel{YW~ipoM6#>cNw z6a*PEO0LVCc1wbE3*9NLWn6bRKOc{`eq!{MrRoEzK&0a|L-dcLS2t5i9E~ne{K|j9 zt1+BuYVd_osk_^|0`5JG(cC*8&t`kN0y5D-kD9`m&cu{QY)@L8bb~p^3kaNWrhDBl zhH+F_eeUcu#X&loAqD*t&MTt5yz(W0%^VvMpu8wy6jdDO;{=%iA^iUC?pHW9fhT9A zUYD-m9>Jz8f*V@6GBOsthpx2RL>Si!%Kns1cu2*%9Gr=7hXS07a*Xi8L*MqJFla(# z%JfN{p)T5!6(@mKRL38WKgHfU#!;+C=ji}~NpxC-5K!lBLHkiTgr?WS)Ef$~YKSTD zdibxv__%lM>rSe~iNFUkJCU}{9CLYp#nU@^urtk4$5M^y1lkx%(V|*JLQxTKJX&a` zn5xik@(&bD17C&kV&hC??ipg_(EB7m!*!X|UZ%R;6IJB|;=(n_NACQ{;wgl(yC}%s z5yiT-1JQ^|!HVdym{dUqzX_Xg&C)L&Q*Hibhp|`Jboz1M5#ZEtCJ%h;$@$a&-tH`k zOB3v>FiVzWb*rGRA=xl3V@18ZZ$TU*eK5Y10g1(5p7d=L~a|b>L)-^L$=S%ZjC)ABg*0^5?Zm6RyguGy;ujWtsIU;B>nQU zZ-eK8Ga^?m0G8+1mZunCXWHNORi$@bnPFF$Y|`iSE!YyhS1auq8^)*e6iY-m(Lw)7 z=776W)=m~LnR$Qg6hV)Sj);RYd^ycpR3#}^Jotc^x!>EIKmiZRd zw*f|KkRTI7D_mue^L)Gmlv!cThPT*KqL&z!EzW*&9A272AaIu89nsU_36h98x?cy4 zmk-Wep<+T+3`+co%$0RS>?E~Pf5iiZPe7{(vSyL`(67b5es-#wC%l+GJnX1*ax$)? zfG-|q_S_;LJE7jJKw_JCR}CRR?|bP0|LXhio=kZw3ls0CNJ1V2qnYp#osiTW`%`Sv z;6Pwn$yW_)^}VQ>R~py3U`!A1(hQ!pCdE(Uc~PXgRExQ|AH$gEQj0B0)v(Qx9$OPP zlnwq*j=j}@=A1WN0E?(;MX#=WKpfL8MN3ij3;*^-o@{doq&w>D?wf!hSm1K(g-UB+ z_6UnCr$eTU+?#5t<_Fy)LahL5A(lOr2s^|vg!ThhuRvq6n954ZA7+y;8=5{o=AeKm zF>J9JluZ91{v{Ny%c}O5Z1-O2;QFXxrU2{LJ8*By8$)nmgmpF6I~x@Zj1ZNit@LFK zXN5yPP!{~@Q!);k7q`fDZ77aS29DuXxrJgn1x&Z+y(xr~fF$n4+uyliPOa9`R>GaMDzc z5vm~rEKdk%`0D$q^Cz`5NR2#;p_fvX{hg(;Fz_OIr!iDuS3K|oE8HL#O!giW3cRX& zXj)wKT9v_CL!wM{+MFEgRvsU&p03J#Y>^{6$cZ`;M}V>S9s6K5wATgU5_jUfJATAl zLeO{=273C+x(?4}Ya%sIv-wa{4!Qn398y9))U^#aa}QO`q28JGV*#l zU%V11J2zZC72^{Y&}i^~=jx&f7FH2VI%|Vvy;B>p@!-98hnSXq(vTGml`qXv0VkcO zwFodnZ*ntKpLLu9{Ya;*@*|eqX)qqI85sA038BbF?jiTV89Ye)rGS;--r8F??n^%= zKrtD_DXF8bxV z-Tf5=w$@yTTx3_131_c`KvDd&r$wU2tM=^&U(%?FT1Amf9x39NK`C&hjtKo<8dC!Z zd_=xKLH^&m^xHszPGDH;Yz)Bep^~gsFx+T%4NfW z!fv3XJyhWL>p))Tl;#Yun)77pllM#0Qb#O2(Imj-HSurH>hi?wEl2DmEP*$uzZ0ih zdEZwzq!YND#a+mIzsYwoZJ%wzB%Jhmf&@N^7u0!2J{HZ(dQ+5tul zyBCgBD}C8wZtayK-EC!modet^9=Wg1+%iVYCBR6q>4>pA4H{%3Wa~RIxN?&nF%2iO z>KMD30R8z#R$_QL>Kq?3rPL`m(BaoE-Hp7}+=ut5fbnFJ!%oQirZqIKpR^LEWd@Gb zUDK-R)(sA6UVQy3fJ`!5T_n~TDAu#36cWZb&ezi<1Ye>bE-1?E!TUY@ z+G8wS@mlyupZCUA<8_VS+4ekg5fqvRLeF@J;MhKc(b3Q}fVfC%*%z56KhloL$Chsc|cx64RCh(Tb^4#L*{ z)NY1rZ6NjQvTF}U)*2&WNmEv0ElJgf$h+9-r&qTO->0yGj1(;xR0@ju^yx{4?zVG# zy|?OV6eqq0IUdege`L$Q85F(3I@n93PYHi7tgVL!ho&h|w@(xY+<8Cs1$tsg$-8gELm7f4OvY*kSwa=8KK+! zzJe8n32r)5Dfw&@nW;b*eSM-o!PS;?jIp?8=R|v`k zCs!5cZ9?@>hjJrV%*I4~dF4m|WH0ggA`lL#yTEL1b{kcTuI0y(vJ-ghMfTN;ma3XupV3)nt_=qZx-_PWfH;S5^!9krQA6tsO;nEVs_@Edn# z;q@8uYAjgc4!-B~bfE-_x_I>t`DfYjkYu;jz!io-ST?62;&p@nmjyU=EdZar7ovU4 zg77kc`lHW=w_qp2)h+{O?JCnf)dd!$&_R=cPX8vk5t$Dc!pPK~>jF6CU-v%9?d!-a zxe{GG|II4htdzj>+1RsR__`U|mBH*&dC%y=j~ykuHyaB$oope6+vP;r6*6RgFK-N*K#mV5&50 zbF50<$_08Tw8flcSRW)r)fjlFDFm%wO3_%%M%h)X#Q-Y%aGg2Jc3ZXZtp%s1k3POM zZLRxDC^+sFMC#|QYGCwe^#cA%>eS#uho%mwrhtmeJseVbyD*dhg(@zi!=g_ylGM#0ismgkPnYnx%ac${0Q5aV>&OK@Wz0X4Sd4s6rrv9LgqG}Vgy0DOE z@xF-ZV<|JHM9U`R?4Gjl{S-8Ai{*W1Si^+Rd@WT^f}j@_0!Vxs`+ z|9;H_!k~F(*Z4 zIrJZ(lLWxUqb1(fiK>EexUc2X!w#Ut?>VcZ1(mFD_@lr*FHD!Z+X|vrYs7z9e~ofS zCEK#po`$hj>L%!p=6+<5_8*Bf4abW!`(lZ+>Xq!*w7V`DSH&GogK4LVbz!&sibiW=Zt=b%v zh4pju20X6U-5Mv=ocr}-)*Pz6Vg;-14If^eWI8<+_;pq0*;1`EgLv@&+nId}Uh`AK zVeu+nw~Tz4=#sz2RA#a?s|dY*q?34Cu7ZZK<`p?v+1JG{%3O1{`^NfpWWrA{=7}10 zvy@Kcf@>PH{1yJ{zz|CUfBUgr4%B&P34!GschE3_0ksSVIO>dFCl^iZHwLau@FWJ@_1aRK z%&)66y6kp~AL05RCfr4k4j1y~R-}#n!+a zmRHu~y~5x1g9&j9YST8Hl9`Em)Jrvon8`PyW&tB&20|LKhZLZwOT{(ehj0>1g@@87 zh5>D&OiCJ*5|V27`dH8ML5J?u8M7x9UEznQJ5EMSG3lp+s?}Tle`NTIS&gEhLh59S zK?3)d6#{WNwN;lO_iJju(6|z=+@o60?0qW@A7JY(W}Rg2-S2M?ZyyQ!VX9S!4Ju;#v|UbZn}Ytqy5;XN3yC7dl0?$&%hJVLmr5z};)&`$w>q5W z@;hqA!F9$S_cd$x^8cghEW?_9-!^`4jBcbPM|TU-3~7*%RzN^dkd$s1AtllRA|>6O zA}|CAgBFk+$dB%Bp8a1uulHidv3E1bJ~BX&?Y z!e0?8A=V)DkD2u%jb#hYjzp^!lda@=_ee@GoZ~j$zqjT74J01Mw4rWcuT|BT8_uaT z%pR}Zq}K(qLgCZTR40z~uZdcItxsDm?b~foa)pC+t^Yn@T)TJ>vb9Dk&Z-c!`L~NP zi6?q#NsGFfpwb!n7~({+FYf@1yOv#_V7=i53dB&oEt^FJWPYcizrVjGRZ#*v_~1YO z;?YE~;Z00AC7j}yfZF#!$rU)Cq_${_kR~y*=<>8PgqI0USCBC6!F9q(^+AS(%TEtA zDlyvij6AP03eIXiP?A6LmGKg8e_05vwftrnMkekudWTa!xlTnfG|$b>-O3fRSHEOAK8*!<+(tV3e>n$nK5aXt`Zd0* zb0GlJFB`eL#@Rw@$I~j}#AjzbSycXK^0#VSYNjMmEtZR&H&^|VceSt&%=kpbuRHt(ON1}e#xNM$B?7dhXTE%mI=*cM_8;f-4?%~WFE^_vVb7u5EpbN z8_SGe8&FU&$sXq^#xXk8!Kb)s@{E75&K)N;B;&Q!)x^E~+Ql@xqO?_56`ny7@fP3g zhAH1|O?eO_Nw$K7b8-KRE4=Yg3+?;vEld08ey@6OPS5sv@P`urtq@ae#EgK7#$&O^ zFcdD@1|Pwn6$}%?CuFmgq_{hp?(-J= zDB0Rs1+;e6cM8x#@}T=Wi)$LlD?y~f-D@&rue5UgcpH93$({# z=daQAb0IPP8mYAnHEb)7qUXAb|K0T7Z}#H^$dJ50WOz_mE+kUNNqG3Un`S>(=O_kM zm&EdG>f-S)V&^_bdW3&Z2%r2n0*zQ%q`c#rOH0(<`N^H!(`QW;M-?Uc7T32uRA9~B zHmgnF=N;`}o3tARNH3zAOCXkWsjdK~{ks6E3DX`F`Cnt|I0DM4BTj;QYB!|5~^^4T)6A;=UD!M`j)` z#&U5%$$WltNe)OIdVa%n+h|Mj!UnKlKOuG&k%;s zcl5#9T#tM<$a42w5{sS(!ZEl8npc;PK4-elK zF9RCIYJ$MKWRN00xe`Y!1j zug>U2$JtzesI*(5Q1j&Ak|q`L3;qwi-xVK?uBJ@W4Qho^Vnz8ZEq{KDZNEZtQZ^vA z5;9{n_+Qbj)e9Z$yl}lTYJJiXe0HY~c~wad3fld&J)ytTA;k=rJThmzJq6pQ#b>;vW#y=!JCe=q1l6M)uSrz+NVj@)m2icNtM5eVp z5^ou_*9PB`h{X)={-uq&`uaA3`2#fjS&MWHOy`5pRlxDQN;5liwEUZ26zk(G*u`(2 zXK$-7&W?8WcTD{~W|h~#$?y8gDzT!=#YGq7$G*BqXy&0dX24-|EbgLfxQ6Up8vf-- zgD!EVh4`BAAe|x8*xS-!U1y4hfYC?0*c1T=hj_Kes0oy@(ay?%9R>N|5K8a?owM`}FY40F=b^jC_Au zb@l*f;^^@3!LzwUvznwcC|UDMOx-H}$J-zkkT%CHb?vb% zVmwvYfA;+ho6en!Mgl=-lW(hIz#`KuzVnp}MdtBc)VtsbF3Z8mI5({!c#9 z@etrm6X@;?2}4b4~NHR$)J?|(_V|*lBPVogA;8J;~6S*c>Lv;5XD}g zd}w9aWdDHHzc~MxUU~?E0L=tJ2_h4?K%=hYQ)x4j<3iAjAG9Ub%H%6Yh#insck)#m z9-G(3#p~xwPi2HnM&$CJnPUnMn7tCh_T%R?FUDeJDZfX3u7{N;uvPYnHeF|R#{W;F zUNW4*DDhKVnow*Hfx1~Y%M{T79Tl>AJrtfL&s;*k>4PuItP9r11yp~k<=S>NZR=%b zdq!wVl;B~c;M+fl=g_zu9bnzfM|>x$`7k6P3PyRZbtTiuq5L6;_?*UZnRW8#>7?{` z(HmN*m1wj`3m@70(SN??SWEaHvfDU5<>1(dtD8VPl~a9t7x6U}RG=uGU{R zc9NbAF88Jp9fk~0qG~Yv#H*`x=28&I1O?2Bh|dIM4|B_K54=e94@6+dQE_z% zyurtyF^G{LLv%FkU@MBS`DY+VO1HO%Lx{8C3zc*%zPw_G6F>do!kEugNx;6$iZshJ0)xtv(;Hh6FQ4uJ!8Xa1B~ zwM6%ax@`Q3n&ED83xkA-jU-GCn&4)V9DqvdW;d?g*WkvsmM~TV}!909or!pV`s%l=5EB;v*{Be+Pc}(Ly$4Tt0JGWzMDi0h1g{iFQIaM~ z`hhA%HiigeEgrT;$J66uhyM3O`KJTTj1vb7f&j6$JWv!eNeQl|uV@O&o3YEp`;ST$ zP-{1^K2{f_kMc=ItLyU41XkqWmPQcn4N+!9DBlU4nFo0Vbi@J^VuTAO(2G{%1ouCb zHH=9kfqB|Z)XjnYt9fmt77~LaQoV&0deU3{Eh_{=_F|onUxI!j!Z$iFG$!*dNz1L+ zW*8fHIN{TmA>J;~o;EQ9>(#rZd+pX0V?eWXbZ$mO=%acJRGm8SC1}3Km>r(nc_dc( zcU%d^DZR&q7nuj0S#NayTz`Q5_N-i=EyIl)PgH_-4NHgQflXNOY ziR8jVkSJn@hMe?Dp!{!={+&ghsEmfpCd?J#DD|cK2QFV^wU@Pty7&GV&VjbF0>)dJ zQOPT;+WrW9hm(OKP2M_<1O&58L(;?kWm52v_9MC{&FT>#YiLFN3>n&!YK6Db~QDg@za@3Gh z6nrM`kH%OsFP8gZdnb|itY88i72qfWzF~p5>*lZBBPx-2-g1U<;)#A`0aEFX>gqA@Fj;75Y|&6=%OENhTNeon!ZQjPcU2Dz@JNkfL%bPj1hS*M&WB)Y1+Xu{mrRS@#OgRqVo6lTU~dN zcrqjMF$oVAmH4}#H8f4{YwQ%FE4G<#_H1};mq-p;2{M{+Y|8t&7hksLNJAvml7dI9 z&QC*OdH0{=F85$c&TqCu(xozHe-^paV>heF?M_}hV5fX2VK3&Or7Fp5i z!gFiPb~l*U1alqh*&Dl1Ks&qJ9)Bw-!+s-fo z+{qH>-*(Z?wQ1N%(PTg>gFE$s34p8qbI4BL!`L`58?{<}&~>v;iJ+!(n$^QZM=*S^ zVJJ!wVN~a8vRj-)HT|7o-T)wOGq8pjDF`mvAOqs33{d2Su2E?D%Y=RVIX&=gTg2y) zVEHMgG>zd;$J8CRX>y~3NTy_PAyB^Te@el?=XDEDbmAHoTHo^32d4A%4)|9;$bkIA zs}DS+*b1d$(*!e%vR46tnv`@JT<>})Ty;OtF78SN8ii6WEZsALu;fLi6RWd)JofcL zQC6J&*drOPJ>YIKa-}n(U!Mg*54b4y_f1@HhY8g)QFS*?Zz2f1aqj9BAisEV?(={1 z!GE)Sg(mKMI<%PW=1ORFmruF>g3l`8E{Wg?PC1D8F883pdQHD7=QTPaN|@^T7;gff ze^u7#$;M8)Ly5b-!GZ?sE0%|yt$XCi?FuSW?{GsdN}3YxP%&)u^nB=E>}I$jU{|yF zbiZaoCUzeWpN#7Z{*(Vbh3N4!v4kA4*7_>t?0MA^5seQh8_#OmNmU1rOkeI z5~!^%#WICi{dZ^Sq9QN^@jcnaSYH3E{#k4d%JY4BDv*Dtj+F>1C4LUZQpJjVz@l$0 z0i~A)?0G!E?Z!E7)a`ezq=Dp>`>~f-H81#ba`N@KcNCw4DXKvzpnP7@O^eG zc08Bd@M1KB2MIK<>y4di-!?o>iXCEV&~tD8aT~W97p^@XWW8QGtAl2SjQc^@y8}(s zkAJpj8MY5sMcUPXUQRlv^SkRzfaZQN0qJe^n>ELl#7C85ou}xkfLO}V5!-HSzY$tE z8~9kFKm{t(bvP{`(NYQtB~^6@%4gz$?n7ULICfs!%Q$L$M3xmEvQ8lKslAc+#I`&h z$alECZ})4Rj^>k=4B|nYK$pYXF>RFu!J^y**0jJF>w1Xg&tn!dNQR5+chl64pjz0m$wyoA+w5XKn3y^PR5qJ zvZd9ln|-v|DPDWH?M$SQs{A)Io4IJ`cX&x-)$(qa|51p!L8VLz`Y;B$aPjVjvI;U1 zkCBDneyM$x=E;10adTh&Z0|`toIUbi5Om(7TVX+txMbj{Kcl$Pb;$Y|q9M@4N-8VD zZfDXE$saqv5F2UB^kL&UH;`5+=^%g%Tno+=*l3qju)}ApVOEegvtRzuP}JzB$!2A) zb}uL%rh>u(b7FcbWUIwqqnP9v=Ck#^vXab`;XyJ!aioLG4%lfy7+~;wj+%gJa#jU$ zPK23im`t)Bu(}1VK7;7h>a@>B2`-(RA&E9=ybJTxC`{+_bXtF&)=kpkC#Kv;`@IPx z`c$!Bo9}92GWVlolWO69XXKVKH2UzJ=GZ-3)-MA%sB|9uH}xue*tMIl32{ttmpa3x z%Gc{LPAoFDmENHsv$M0hzKj0OgJGh<_iEA@}`Tm0fJC^AE0+*a)Oi!DHIA>IfcFa+<1Y_>H(+PKY0h_KEaG3b}&153TwsK+n! z_$|}>Y!?*p*c0x@dxjx{WaKCil#AmjpyQ3mcG+;HzG` zUb;E&Jc5+D)rWnlwWx#?(l|IWQhOKUf{Z27Pt4o(p43mK<{BT#-vpD4RiD68PhXq?)Im?}W9M{^VsQDvDpRJ`3o27Kj^YOPJP# zzf_^9AAsOiSnU2eSU9)Gae95Sb$3J8v{zrZrSY-Tn;Tf=`+Z9%haHA-Vz2~W(2o_6 z^`xSSH;woYUmOtaE6|WI(;jIpvy2P}ty3(OyKTpGO@A{WKniQkr=>1+JdJSndHr4Q zrsILJh-pQ{pPY42?8HfmJQ{a9=R6 z)tU}ip1ZuMlrz=?roT$ODMY>Kqo6rl`rtL2i2P%}UfTItY%&;H#1-~c4ag5lpkly; zam{u+4_FKb(Vp@Dvc)fzcnew0KIHqfPBcS!$th>|V0i@Vo1$OR9#m4b}YeFWm?E{HmBUf|fp%yF($@G+b-?wjzW>pT9x7l2EsM*d}Tsv7(| zcd>Tme_@|b6cq~BV}dP|@0PYx+|rZ2D4O2`37I4+MU{~@COC;92Qv{z9oUm6cwW*z zs4f`u6R4+Fit>Hx3)jPJLJk^p6A1sOC-t3KbdjAg<7MCf4l23a5bh_O@j}44e*Etr zXZ7jwuXgkq(ALs@*#iA0nM#CV%2G*5u8E1QFN}r0#+=Hp{wK|MX~~`^h_&z#61>}D}*%`}5P6=HApc?Uan17pgRF=CdY&l9BnW;_qW zm05|=?C~s3413*Z=B&WGY;D87b@7X>e2<4KS&!qgIy%X8N!vnXd?visB)wmT+&UK> zo;BeO9U&aA#zR~XX|$22Qj#qt91Yu8aQoHlKg4BKnERt;gzJ=Z*r5tLym;!lu?FxY z*GEVt-G&See@_>zDw& z!AY5cF)$FV2Hsv|$+sT5ddkZhFMxi<=DgpR%Op$%H7W6(eDu9~S;Hs6v}}$9NS*!a zI{DL|-`om&=CpsCr+uA{=8J5@LzYwAgA6SG`^!nH$@^c%9m5ra>xbqIxS_+uCw#xd zjIaAlhguvPL6~_vEMztU~C8Z(FTOPeC{+{PXTYT zBtA0By*X0n4dj4tDncqevy3$+hwUPw>kbHu zsD*WS0L4%*|H_sMysHGuqE}FSdyI)x05tmYxKDZPt(;ti? zJ?5z(8j^<8aE>x9;jD6sbS zlj8VC97wd>MTXstM=bK+e5+JK2rkkE8*YR=pS|4v0_;_8g36BM0ld$CK4VUKR>jr@CIBLqp2mIw&x-Yahnx zb*qt~!V58#T)GWAQ5&-ar4&+vQ~KoZDz`L?2;^gb5C<9G=yc9@)ZV)t>?*0LIF)!| z&{-Q|G8m5hTvR8#cFsQlp>A1DpAmXLL3MBE>aRh(7X^~Fb*EupK|*aeMGm{n#V)Z6NY_+Pc zJl^$^a#e!hr!*poUjdR*eFZZ$4#on7CtyK0p{}5S68Bw`^G4Py$Aj;R#6M@x$Nmo4 z>?FmN$UlNa+dttOQv~2{OKj`<4i02C`^TnL$zkXdX@YZ~mf4!%1{bWrfmx8N1s*V0 zj|CdX?%?O*A#|~9L&P~u*oD<~xiKWy@s`PNJY2Jr4rD;P?;WZRCiQ{*P z>Sneg2-p^9(W7nS^d+=Rq)0yVjr{R1b>mN@eR%)-U_FKML|5MCo`7Q6cI71_SGUWP z3cA8-CMUC>sg(GL?;L8{CS>Y2a*OXNr^#ki~8VsXSx$QC&3UbC=*~bXB80a7I3n=HG9>sFpuiG zGmGu9DJ2m~!L!mTT&>qyZo<>RM^@7C!2&fQ2y!@PMEF@*S3~)z#b$fSF6na^+n6)8 z^@~|}?AkV(rkspjuJ*gr`#CFr74YCt$WP`<&+&KuQYbvF{4=lZj2(|9*C47@yWBw~ z{uBG}+w3|aLn5RH(qcPaF;*mo6)&J@0+w2vaXOEm@~nStYi65C<}H(f(T}@5DW|y4 z(7sn%Qr;gPeR%d>z3P+~e@L4=^c!!-2VL;xGLX2zFya+hJW8zbW>&uI8J%d`4wk>V zn{vIKT4T8sD-8`^c@0I^ypQ>TC!RRi{Kk*HBpl7}YX~NG`G?1)y-FFzBDJBz*n`}s z3~;+9`};F8p%lTZ?mlskYXql)`JN8MX#b=+%M}gGjKYcJtdUAA@5vS~i-b`QRbc-_v1DAZ^`Q0ZLQ9ipoac zPUY^o!hz5tF8OrFEKeyMxS9}Hv19z$+tOi%;7~Xh4)nk}dZTui8}JD9XNgMlbL@y> zqs%UN+f85y_FLx{7Dhkmke)w^!FJ81)L~jwiwCfx_g_VX?djapY{DPYq*gf<_{tvS zD0?a35379F#B#&(q(s;4aMbXfg!d-jNnRnAt(o&T4cle-IxRlP$;uX??)~pl&DyEs z7-pR8>3V;EazKOHEp50?SmDO8$mi;)_BX}tLuXq)IED*?iqp$E{9eQ>9ugAz1*v#4 z8(&f8GI2&>U^YHnKUgyq0AG^GMF8JY+C&sZx|irjUYQL`G3E6-dlrSS+7P z@DW2blM3cxUCOZ^akcIby1PX8)zIMQCwqK&+b|$MQT^@;mghut*|6bDFvQg(X>qM zgomCQT+() zk%|x+jlW5?j2I29;0u|Z5~{Oc5wM+T5o|(84l6aaL6*OQJs*T582(;0+##TVRLbd| zH}hWOv$r_3zRmLKz4EA}Gos_3lw?@d6eEG-^zYlzAeC;Ge{*ORiApK%XQyFRC=rtE15-hnueuoR?Ypt zkpj(j_acfaGU3aoU`~QMee*#{#`huv0xLeCOOCV*3Aoy2R)_$VXKJ1%W^4vgD*3!w z9@AFYC=;svWMBIx)zq-;j<7MHlIa^kR7kubj)_kK_C#N|kiM zt9%~)HGZy)0%q}MZ8|LRVp*zxFnf99fvnGei;Es5uMf85{e>_W+)d1JLmrwelp+_xPnEYIh}NXO7y63G7^AXzcXybRXK zXe1*`r5fbnzC!b6Tl!yf`vJdk(#wDo+3R)R2#AtsCP2C=~Y0+kEiJ}^IQBSi562#eGus6YL`Wzi- z`dDauwfUL)N*_>5Al2AQ6`c0P-TS<)c0;t#V52TN z%Qz=oL#8x(Mbz0X7p%|Ya^dI2y&LFJNMSQE81&=5%8bq-2Nvk;z?CkaMcNy+2SgfX z1z8wmMDWQLH#88lzs&srRktcL4_2-F&&$dr^{pE|QzJpk|$W^T2dw+_Mc zuFw}d56JFD$H55aSvTi)v*4Y0o`KhC|S;9XrvVa zpd%>j0c*b2U6NN7`yNcrGATeBt1W6Kt43F!_Cox6D&RZn!4Day*YrdT-u1oh<{O#6 zlwq>>nEBJ_joSvCP3%r0LfLL0c$zSgevek+vll3pRf zZv#~xyhZfu#~GVWc_y8_%%%XRk&VCivv&YfztZS}zbCu@ zI&9>^fgk+ajeqCSA$z^~d+iB`=iOPrw^k*%^<;5BS4z?2w9`x8*3AY){H%qQ=6M0~ z0rAypMd!a%C~x0_Ldwf11bA@iT;EuzOag_UU`@f$)+w00QuQNLjf*01r$=)FjK!4s z?)n*o8w#IoU#U&sdBFElj2!$5*X-vZZHe_Ou1|!n^N=E)eRemgZuRgH)gEN(2QU9) z=EMfdGlyO5RNSv%ip11>UAi;g45Ee+CjACoxU(t*z+;3m%#8-9hj1~$FAN}J?;%X&WVQ4%eFSp+Y9U^JWeQ%Gb=81d$P7a_zc2@u9)M4M7%GEh#h> z+V+nh*$xBZf*LLovlcod}!@ zn}gOuYo8Ph9b~;(sD@WG3asK?88XpUArfQ_<&GUP<3kXTP0G4gB5aCT zy=gFbAImP(R}g|;dlVbuK!Y3#4?F57LpFL~z=H-Kv+FbS-aXnsk|o3auOjcKk0|7R z5j)0BwGa05M;k*G zUyb|640h^e^b$CJOw?An9wt26}|p{8l(N(@0BLN8`|%hTJby`~b9*kmrRe0D%ezJUOq&P%BLdqkdE zpF3#?5V~vCj2~laY+{wqHulLFab)B zH~Gt@*$CCA#vY(d0mI{+UsOL22T%*6xMhV&9%z~S`(BG&Q-lkZ9ma9_k=@_A-lX;{ z|9<=A)QyakIsD=q+sCM8j+DJ)H)19EY|4=8uJIo6^J-JWLzgE}*MCVOt{9`l(5)pSI+9tS-nP9qlioh>->QY@zA6i3~a!juIKTwhJ7jhmi`V(4L z4(hST{wnTi`H*I}gk?YMg{24m>wHAYYC1f$M9oT~ANOY#UmAQE^{8}C#;x?y7K>aw zq(mM!IUUZNS?v9n%X@IK>j;I1CjD->nmCM^ZQ1gbR{=U!FmA&qr$yh`yOjlc44#BO z!d55`kv`qG)R{xjg=SyWYZcw!9LxG4C4#X}-;IlldXah`xMFjP`{PP6;SRoQ$ZvlM zV@x8()Q2CJW&h6Jj?ZI*11sqBO?GJFq1{vveMaHxp^&@r5t+khLrYtWp|9s1yD`j! z#^<0Hb<{0FW z&^4#Skf{2Z9|B&A>}0g1VIM~CZuy(n6tf5K3I6_zg|1BxZ20|{6agI)H;jcXblW>7 zY`Fv+^WY(PfgXSTZ)+PsuZIj`{|9Mge`qe$=ok_*xcmtC$6FTsxOJMgJeOTG{&m~r zW%hbNCGl9bD_e| zQ9>QH$XR`wh)?9p&Pk1hV(L=p{%06MpDEJ^JB`uTr(N!P^&T10w^ML=_u5^w#lRFd zT{7fG7%Hqi5X742L7rrN9r3OrygZziIOLLH77PDQvwpcWD+qni`~@LZJr()5to-s~ z5D#I~q_*-qrLT5mYuMJ?oBd$-bd3W95J^MG`YLtQLz&4_RN2>LdGUi{!8yZ~NBc~n z4&e%W_j5;TRVb1UkRg5Q2(9Mv&gEMm&&c7;kZKQ5OPDtW_C@nvrt1MH!wC@>_-`z? z9cFbxOn!quM)jIYZ`%gB75yB!1${;wYNZQz!Ga7Neqz;2f$G8FGqfjkn{l%nKDqBf zqWSh|=J@37jykODhnwA>AShh^RR?_PYkIzZpY5Z!HwJ@ zU4usm9oLpc?g7A4x&@06Ab*Q+}v zQn-2aSzXK&pD93VZ&u~Ixs9iIhqyWb=T&L%=UBiojPuy$cte?;;VtbZ4U}w)!R9=L z18g)l*MR*HUT7i@@+*m{5SIVh?x)#LQk`I-BCGGI-N+GjM1Y?+0%71q4A#@3(ylJ`#=v- z7ygZB5KdtP&0xjYZtN2kA2EIRy;q0bIz%o)$h?M#v#IH#kr9Fl!&JY-n?0!R?KeOL zN_d=E>0`yv!oO49Q){SR{o9TyAWVfJdMd=cszP6G%hQf@-Qo2dxGIoC^uo8l`xBVA znn^v7cHa2ac$jpM))>9X7y)%B289 z$}O(l4<7P(h#&jj%C3{Tlur*?nT$-sZv1kZ5%vjO`UDubANg zbn^*HpcXDJo#}rRTyk_P-urUA8;tw#b8^4W};e)Auue;5o1o1_a51pcJM_~v` zBrCvdwu~{zM)V>P44$w=3`Mabih(>A1kNo+V7G(iZRZ(POdbqTro;M!mFT2=_Xuo$ zy%wi2AbSj{fsj-Bh`9b_6@hxb%@bo!B2NWrYS3N{O8km5pmJ z&fkmVr8jn?#sX;&+JAC06j+DOB%TbDB+73%qH>ZsoqAEsk&Kn41P!g|#*QjH!arlL zQ{n?{s*DjrO2G!jzQ9D4JZNG=}=2mgf-4 zDUH;h4ja?C2b2Hr1@Lt_4hemu+#|u%u2&VXox?Y|?kvtJq@V1!LnE-`WBBJGKG>wt zYcwv|93A*h{5GOG2@_$0Uffi5de`s5bAwjy#PWB%9{i`E%yI0M=GbCLj$2i$(HNlr zBItL-1=jVGPe~qnZ-dvFgNH-Tg0HIaFWV2h)I^xpx(0@yQyhP~oIiZ6_N-~KYuQS~ zwM4Zl=5*?+kJWbQ0GAxJ+Y~jGZ5Ko6ub=-NxW2j_j|w?Ez1o9vdkm;N4_~C)B9ZS< zuGIZ|H^j-b?I1nl)pae8KceFpuT03XQ7Y5=oB=T%M?6ZGuKMu&O;D1r-yZTl)^2FXD`rn_IYGlO61u6AR7iV=_ ze*e+hXInxweVm?M7bLxHUYeNas9Cy_WqN;u90>e7hXzzKJvX?GU zKDB+T9@IGVG9h|AvZ_<71$Cy^Fl8(h^j+J~6_pRMpDu}oz zU2?P_mp{bZa@%)oz;vztFH?`^{lt4Eo{ea+KfBjwQM#A%FvRwQ#j(HtF81iquWgUg zEtupLhu?eLZ6(99uuwbno3)oOvZVk1fnGCi!XHtkJnNPw2i;#!;NG+TYG zVky|9VksT%xPptwQKv0XlNJ+q`^C~1PH0X=78Y6!p^h2*>;9Pelp585gZAJ7n!x=Q z!{%D+!CZ&*{w~}?#j&qsaS=nWr_nX>Mp#k$ar;b*)RN3FO6T`}CKu*Z;O|BWMeDsGeI{l%g5?|K!>Vb*lTuSf^)D*_ zRJbh1Stvc!IV&h)p#LH}*FA7U> zx9v|hA4X8^GG8hGOZl-WjJg@Voqx^XENb}U@oV=NuT9yh?zKcK?c3u0RbY~g5g8&&0UOJ*z9S&;9@V8XcX<1tycp7Q%y8Bs$d+>|n zWiCs6Atf?Aw1Q$4tbekL%;nxo@qzKTznjb6AM8m_W!twg_L7i7pZSpFMuAFJ)L9h{ z9Mkg%-EH3f+Egv2(@FtMB+hIxFHsXZfNO=i3P3t0Hz!XzGjZPjn&5x?PethVHw5Do@`0 z_O04N!>Fq!;t&$(a3ZAXTzh^68QV50S9R!1&_xnGz+#2&1wc z!bBKDHct`m6*Gv%U088!Lth7Q`3@Eu(ZRFR-G37kMz?Jl$ivx8_NL~GAt?|&vhb$#vd@4_f77#Rc?j&j4Y)>!saT2>NJPn$cQ6rW4q)_DNXik2@Kuq|KY+rKg&U#pTq4mNcF;tK(uKm7 zMr9!0vwQ?T`<8dC8(yr$i~aT=t@;koF=dQI!>GY<9PDNIYl1XG#s8z}tRI@}7C3%3 zMoS|g(j8I~(hQ^o1*8$AL}{F~>}XxBPsLi5%NJ%b=Fj7#PNp}oc{c}7jdmFB zzei;FaDmOXB&_h*BOgZo$4uO1BC--b-s%QK4aXw|S?GYN&lEZG;mYt~xZp$EZ2<%& zY$0jEr2Xlk&MWskl2$WQypb>4TrwjT)>KG6EHZ{9pJdri0;9-{Nj#)tlGv|eYIinp znlZTY2aFJ0V@c~yb*UJ8*=dI@^UnP6Z(GEeyCvTK{t4uS$tpXN7wyRvain&) zpna8|)J69Y@xh+)NG*+KeQ8TQHECJsvwr54)Zbez2yt8ac$$1W)8K3Z?xVo~)^$-D zE1}*-7GQP#A#p#c;c;2NYyr-z&@pe;1s2;pIn!r=K>#GTaH1MMV z%y~`7!dqIOA$@(&>4}s^Leas+d0{l}*&^W-gA6(PJ_hu+p?l9x!^pE_VEQS}N?}xH ztdUMXP!(B4#sBZ(r6!3A^xrgit7>WgL|%f$^5z~C9uchGZz%(Nx%_GahHM~jL_XMr z>w$MBxg($NZ`mTS2$3edEwd#Jr&>?c7Arp1lu|He?eRONk1( z`r>&)aWdmE2M6$xdxrVj={Q+O9=Ej7oL&-YeW`Ahv#h`^OnIu4q;*w(5E&lq7V~%4 zRhK2cUda_YZVS~|HdIjL_;mm|@EJmQ`&1G0_+o6+S1F9y0ji!CAtW_bY2QzI?LNwlw09jf)0!c2^9sWRxi|*6i-$ zlXUxXzv(IXyDMq$92e`gQ~tgB(lC~IyN}kn7Nf;$gA-b&2Gzz~C_qyy$oFe&-`^c{ z?3^s~p_dzDK zNCZpvli9OWACM7IKAK$eFMG;8b+b)gQu$o{@n8+jIWwkzis+hq(2eNlVa~g*%wR24)EfPg(nwAB;>Wi`#;c z$^4!n(Xupp7xli`<@nSURo0nik#a zWG%HnV`hDBK=5fLtVV#ouJ+=_dG2=3_<4YW0lu<6)f8gPbbaZV8tK8uq6hKL@QtiY zhfhR&{aGvB9Vu%1VE~W((Zr$Ug974^f_8=9Fa5qz`dvtvHfXXRpe6k%sE-khK^Z=S z!Jq_bd94Ilew?WQ=p0JvTk4uT?y5Ne#K+dF7OHW2dlQasX%J+dy(0LK=}0|~&mjUY zFNt%&-ubADzn^(Ptr$rz9cBRM8#0p5q(j4>@w?ne@I2m5SKlCai&=rjO~4s z|A}!Xo^|9V+Kr9+bz)&-)O#p8#8D>QytT)Qi$=Do23-4=spA%gOA4WE*Q7WmGzG-M zLTbEqbsI`aW&XMb|J+%9Rg+MBwR6OPo@K9SyZ%TTdH4YXl8@x3J&yGC7bJVqo$-+x zKBYS_FVH_1#AY%ItfntIwt~Nj*Xoc>g@YOb8PWS%5u;%2jjgw%ojb z)!v9`CzyQjRl2vUlg77M&`0E;tjOyMt&MQ#-2P=J3yk}|+tAzUZ}8FEB>eqY-UT|9 z{{7{pz6-;^JW*jFl!-D-95xyJeeVb364BuG>y(FL0q`_Azfc~#->S72Rx1Tm=j{tT zcM(2y@w%7{bh1!%$7n#M-+cruh@c)puhA5Qh9;lu%hS!9!FT{*vcNd-dBfg8FqF6* zSL9^I9{Dfu11&;g?Bn3m^k9m*3orHF<e7>YO zwn`!Y7<<$T&FrN4I@hGwQa0K&auLV4N~6-IZXF$~On=DTI6K(jtLVfdFIba@i%Ou}`&hXXg^cbYg%f9j zg`1=EF7-8z!~mCEvIKzB6IcqDf+a{+5IPl+XI`?;p6v3hRLRSqwI$ZLR5@lG2zwqDu^;I+j@OAa{y&8_Z{sw*& z1)+mkJE;%|T)=c-9D5te0oC{ur~J(${rXM!oi1zw2R^~V`l@7yBI#fKr^;DeWR0si zW6nx+qARnJcW~@Yvx3vOjC22C821Q3!TY%R22h{)b}M=awJ-JQ05qY!AcQt)>SA) z`H<2ne(0pKSmPk!yNi_sgyEb%_EeU)$~$41htD^!ak%w6-)@P}`Y+d90{%zaKdeNE z`X>l3MKMvdy1YIZPQ31sh2NPL>_`Lq{P640sd8T47}(dRih7FVF}`W|?%@b>cd~1q z5+j&7!d+HTQK6*vqiO5~`h|frTM=7H^g)87Q`K^M5E3sC=bz@@tQMpP~FdeQb>f z7rws^mx_NjUGlFSB`XN!y(umU-#(o-Y*zTU zuk90fqf%Q6oE9VwxV0qSG9eelIb>O`L(U+1BPTI0QUXw37X2vYL!<==}pPkya2fC)PC zNceqh9+B+fq_l?=O!qYE#(n@|o|7~j)AV8xN|1kasvHLAS0aZld4#7H;HRq+K1c^S z5!fnn*O8G!5tNm8G6t9ndvcJU=;`9koBI%V67p9e9?%NAznH%PT zY`!EcKFSZrSB4|WyJs=?Z30ak-G10kV6?iiftckFxG(4LqRH@>|59iDb*V`E4)Rkc z8(!=jk}nwKwco~RoOSc{JQ=Go;&}Qu*3^ofBd%)i^#0__wN$)L1!}AfL`ecFAQtiz`iwhXBXN(Xbdr_wRBfu8Bc zH91rA#DPC9(4QWaDSJ%;>*k;Q1?iU@>{Av+P+Q4{RXoO^j)oF~PL^T231JBGDbvv& zyl~}tn$N0xIbs^>3Ps!xie1THq+CRE9#YeRLesF*&v#R{~i$wQP2 zu5L5|92oD@Af-M&DA2}3x`qj~w7d>S8`wJIrj0Eb7=V~I&43+8&e!X?j|H)K*v86e=}teT=OFkN7prt&{F^h^Z9bb3^g{{OGiQxhx&p zTUpXN-oLza@MjD6a&I;zE>+t^uPut{7gY$Q2DsD=R;gP1cdQC-53%Dj%9h%&zG?t9 z$~gAN!EdAyo48Qqahq(YJmDA@N$qO$&WNvJ!QkaV0dT)O!iDbd?ag8itvEs3tFEyW z#p%My;t?taH%WRGJ{iT=sp*`2{aZ9;d5#BkJ0}USY~R?6yhSU6h)^nh@Haw5O~vtX z0~0w^Uy6xOGvSQ}5_|fKNTjJ~OcHeEdiLOXwiZuvk-cc7P+-V*3&n>XyzR@R@80bN z%7k}~)D#h}bMLgkVJbObx{KsbRX)+8XN9Kx)wz%sZFuT0oVYksuz$^jA}WRe2o*V! z_I1Ugoomu^H_CDTzy+rC0*(=+k56DkEm1+Z=a$ii^Hfi#)vr*Hr!gLX zh{o^wC$epBt8I6|v7DGJ^pcV8MByVUgC-M7u*OnP{2l#twbhea&uT2&~C$Z3cqwEY)C4lp0 zW8=|_g6lFEoVVTJzgx;9WltmiDO_;aMg9H5T)MOXk0vnk6{C14S|%ff3+I_tiwg4E zh^>fwg@DGON=ZC8(X1H?Q~(X)EDXIx^2P~oAsuS!=oKr?2vJIY>eZU){XJXr5OtFJ zrZR%-q1EUans@E>00)fy+h~~izIl!C%(`;C9NZ#@5BSCM_*-MTsQ_}~yjv#simEMK zKVfyUB)Vh&g@N3NS{0!+}qnD8#tU6}t<=UA%8^sOXPZ$~c)}me_zHwBE zT0sA!6K!{pf3Z~|Q%OA3In4XvLY@Zt`!Obh(gq?B zjcJ0bN0^zL95WjUP>mIGS8pW$N4h@df_Q5;{{W!5p!qk^drW}dMMll z&Igst2(U0FMoLmCJ^Pf~vU$^0Gqq)2fAffYB&GG+bxA>>ve!X@doACkRN3pvLUAs%2LmoDl?}5W zP5L3->UzJ++sSq9h71l`)&H1#`Nm}cBL=$#9X*%vJetq=JVNW-)DmF&{;Pxt{rSJZ z(w7r>BZQbJ$B|2ExB4lww3M5vo+)R(it@IiR>`!VC;RTfx<2X4`KA+FbcFj~vZ`m* zlVo{5|1#e_S_t9)?Q}Ylb#Q%CP13c*l4PcxGB0+Rli=AWP#g5n`yFp}i`0Y!v+9kv z+cj6)SXPDP7A+JZN}i}0ge>H#f<702%moU$5}tkuKu%-~%7R-1-9mO;Ds)S{dWMia zZ~er`o)eHkkebS43~LD9_#}$7 z@gl|G?#Win3ez;3U3StN`&4|dWc2P*q zY}dgKLDpq!Y>tP(`}X65hkhvZ4CGje%1K(QrsPpL;6SOre4s@}caVEom6BdK2d(t3s@eLPFA}#8K5Yo~vg9>p_M$@s|B}+f*^Rc0 z4!_Xl(nt^1{vkU2B3Ji}8pM86^%;a0E?*mHZ}{EsDxL1q@f|kEd&Yr#E(dJv6y`3o zfY(c4(|Y?Y2jrs5kI458{mZ;S2e3h7IuKpFNN!Bv2IHTtl)mVC3WoL)?qoP2aB{i= zZXURfrEjj3kVo1kHcN5&0Q*lZQ&FYpm7mX<*ROp{peSW`$p@vb#0x7OWc!&V-mjp@ zWZV}RFYu7vJIR&Ksc$vAMPE0PHS>NR0?8oAvbi)Sny!oIDj=v_$(miX`jCGf| z`|5YM>-ft;gVn-kCG)Trx(lY6&5kcEA5PXZv2-pzwFTEwT+AW213~UHHs7ZSLT0IE zxc<&Dx)5iSV4<8ebuoqt-HLU=#l+(ZHVGe70;G&$BHtpn|z>1{Y97?~MJz zm1)6ZV17J%cJoabB${N{fCZDR3>}Q2Mw_n#+8FS=6=Cp5!^MxVh2!-6T3Kx`QsmBT)Uu_auLOLT@>wzh#u1a81h&tzcFl#0?4#3l2+l@5 zU7n-d*DU0@9r}GncAs3-k^rHaQM;3aT%x3RSQOGfA3 z+ie0n!m03+>&ibvTfd?!Mc=SNVFiR1mJaMx=2dSV6kTgke?js4{rhuPry1|6<{C+v z`pE74c~f4rrL@AmS(e7+QtN2~g71~8=r&O^^n+L|bu$9gr%RBGSnf*eCCX5GPZeIP z`H2!-lnn0&sM$mw@g@A3@<$^}S6;Y6O(CNVtCa-(>=L6(<<^ngl9yvq`q>VBG^)x` z;!_7_6PQCds23#|QJfQ8sGUX2+S}q-^@jHNf&CMHc z7a>g?Sp6CRY6gmqC}=`748`3bnbFoto>`(wh;D%>SQ=B*aviCW670{lf%LcEYZh*6r)cJ6cR%7 zJKR_}xFGwXI)bBb(3cfzkZ_Q9QNnEG)vcC zKmDtYSq9qQWy|K`{>^sXn?0W2=AB{XEe$PXHzCokErs6OzDg|yTCL@cD3bG@yQOeI zQ`~TuIiTF5yXK&|ne4Ntw(nJg(PK4Kk=TKyX{|Yx*f4VI@Szu4A#PBwG5Bm5nt}#i zaQa1d&a}Qbvb@=IMsB?)^ug3uXl5bBI>*^R;VGLvnD}Xsb8qe8AL6cXk-+i5;Rb!w z1-@GXi`!5ywE?VJ z?m~9;42=EL)7zR`&sw5y7xB|geaPnKCZD4dHqbKl1tPUuguE-D$82QUKa&MxFHsTq zv=f4?iV7E3+a-H^2i8Cg%+7z}9*;5@fP`wdUz*h3$F{#|n#za|2rsi}yzgwbtO#By zNU6bTY9ap!6JCW-;BWiPco1Sz*GyY`O@_kq7+24Bex ziw>d%EqAuA(XmtWUz_Hy4W%X=55Y7db^YWy;`Czn#fYQfinE-V3jVPaYiV`U|FZz$ zS_!+eZn(Pnq}45j4bL5C6s<<8P#aI3o3G~k&PI?iREWr4+OC;dW-#_3Y_B<(!dm3X zc#y8*FYV{m!pn~&lficseu*To!+?aw{y8ha0AId38NhVjUwxcVaJm;lgB2qG{ymox zZ%&`G82w%d?dyyU)GZh8z8tk%&JFU1qE-Jh^f70XI<-Y&!`?|Z0IGCe1ua9*Ya?Ag z4vIIQy`97PIUgeo(6sWMJ^J@|$8_wF+oMo9w+^>d*iV$N6cHZWF3C_mXnT;`|4rDR zuoI^PS^Y(J^!B-7B*sUL2vGO6(G??tbnI2V$3-+TpvkF}=~{dFT6xKJyLMd6#*#jg z6L}%j0!%kTQZY2-Z^hC5;J>Jh#z;b|K4Yx9H(%c0HCXFjNgU1Zm*r_SsRHq~`SEJwR$q4N!+~w?Nnvpr zu!OjGaH%ay!w&3>EkCWvEvI@kIG3A4*ifXUUkQDk@^Ky=J~Ej&i2@ zl>6N4`gic+HiMWs4(L~47+JJZw}c{ujcz7H z5&xDV@?EHv;(jGRWHpqkCr98JqRwOzfIMWO@61}k00X_Lv4zfhp;a()fFA6DoDWOB zVPqaiL~CGW=Hrq@UM#?X)khDqPPnz%7!14h10x>7(3m%TPMH->AWwPaq7FAf4_|Lb zHs-CrjI21sSsVw)bOMQ2q@hLf&duZg$-(Oh;Iy=xDbZuikYR%-OW_)Rbi+t%Qmpfu zk6lQ#;H56#PQ`xSm0vU?XfB!H@~z?iBd9#s6}Ba7K^QBpB3J-1_~vi9*-i^B8~2)( z6K^Z?ughp?J)@wDz>pExboE3Z4W>9fFyEya4J|;iTZq9qXEbp)s%_DW5rD%%b-+Y` z-~hT9`#QwPCo(NJyAs4`tXIaIFM8#DchF()eHw7aT#$i5iP?!g!h_|JXRy?AEkCo5 zhU_DQ%s?eB!NTLRC?H*BC$U#qlMp(hWq6(HzrQj+V2XAZ3p+!IL9V^B+M$%rc+3+L zT|Y;GrRQX=ZK;0QSKk#SBl6QW>TiM-dGCiyfrGV9y*(1-^)(rLqGLMSuSA(?lH!5B zBXl+MG6;OVccTE=mrPa~!``^a0zXNQvboVo3VF1jT-NxC3m9PR_o4L+Peu@(1Vvf9 zf58J5ch`c$kZG~oZ`|OD(7#f#?LKmJ(8Vl5)asdlk^O3eIv|xCWBBaUb;P=7rQjWs3JW;8F~ie7}N4{lV&fP8H~}M@1i>ul$hX$aQ~(%9;$ZphVeVv z@zn#Tocvk;h2cwE?*F%r76I1Z!V7>YC44TB((u-U07d@{Z{ZnqXzT5IR(56?J#)Xy zi4a6%aw2=9odY5b2} zeF0%3sr=Th7Sef?dmT;@UG}us-$}Eigz4RC%p}*^#by)bAB2u~ZI3m)eq13~$)WSH z$cf$x6`w_|`8kyvwRunfPHK?5^W$ARz*@0&5fE4KL$B**yDv`_(vDE*%cE(io*xON z+1lw7a?ErpmizU2IY@i+h81_n@w4@HmGo0equ;8}0+Z!_eO(>Q`EB$@eH=*6`GX(7 z8g#18*ttErUZT(!&xc~@jd!9X=vN%{`{~Z9fK-M&hh?f}I1(a7e?v=LDUs3RE(Gwm z^D4P7rUzWUxt2H`Z&M=!03xkJym^iJReJNYDi(0q-{`1sANYpFHuG}av9DV374Iw# z$abF+sTD|X5N>BjFMnY@ewlnXu7y>}z;@g1QCg5iH>&^VObr}c8j_Yjp`7HJwmGtJ z(cE3$Z*~fSnv^R!F#wh4Rmuiy_k41&wa;?Az3wEr<@FSo?%B2elTS?E-s88d8AvY$ zqEz1W`$Qonly1j_TxRPe*6d`Lp2{je8!l-?9XcZD`b|?a1$9Ekm?f6CYqD}!*m-c_ zkmM2p+d}BCXX*c-@ZUsB)W(t{8M4bPWl=wTYtk!BV z(iu7GroHM)F;DB1RH~Er^f7Zzmi?DsPDa1%fxYXpX=dE_SFp5(iS^i)D^ckF*EP_n z4$Txszh5|8Yc}tS%KW{wD&o3~`l*>w$Y8cbg zD3`ZI5Vrql5=opz>Sv5tc`c$UtB*sP6zz)EMi^J}H@JD=~>X-k@?Z{r!^ zg1w{b?Ho1RV3AtXcHw(`hr!WM1ni9{PW1Ku9|62tlJ)Idf0&x)$0GKjVMpb!`%f^x zP5xXL#bVOhrc+?gW1g5VY}&LIs<<+TjtF87t)1A_3VQbF&?z8USf;UZNUuzLb;I@s zs%TCe@Nk1-ku^kL7S#Q-qLTDl-t3PX^X zlmBm;LQ0)~$}Es|Jk%7{#DD)B_ew&j3Y1tF5TO^#_4XVo!QmbEeN$nuL}Xsh=Eo7uAuD2cx@w@k6V0*q#xprwmovCx~6N_2XbBK zbL|Die86F|S>_@sYOE!H&)hLCK-gHDhh{$`m^B|;n{Mh=;QMZ|wC-m#{U-%D$gAvl z*wR_$9$%yi(-UD^nrvgIKRC&>RipKn`)Fr7B~Qr%OgU;??x>O?p!x!f5w*i3??Bzf6LdFT=z4W z1w}7r*;t95hG+7dbmbhD$x$t>H)TPn9}Gl2-dV)ZLf7Kmz4S4W?;WD!pc8rLO}^zR zWro!$WUvKca%45ZKAJa!bON6vTL#FxodCi;HY$SSK|`(MK%nZ}&Ik(qk$KU|O92U; zzBjWw)}mX;Y=;}6-Li+h3pY;31$=^qlAag1N$RtOPZ}$?>;HON34T9Sd#)TRxB7MP z1!93pJ#X5!CjuO7YihKY*h@Z+YgPwVegDQ;im4ihn0L}$orUE?jeft$*pj0zhs1pz zT`QF0h)c&zZIgX%#?PQ|HlVFMojm&`HQ3|Ne1+ezYtrUV9L?z-oZ})&eGV$Btx`^9 zIa2$kAJNho;JnZN6<(EdtgGmdFZ)qE#?#H(3u^kaJc->{242L5{Q{$?N?gg@=!^He zvHtEdddb1bJ|85j?N;Gi}MHUrOs_lk=^i50du$K${-)QkaT`@ zusF*G@UN?}Ov!)|8W*Pp-9IbBLC|8NHPw?6?*DWzC+nbC8B)kThD9>oklGBo@Re_7 zW1Y?vsvu7}VN4R=9OC88J52R2RUj&|6Jqtw#Ecw@%wva2lI31(Z#HzF%i_Usp8Ua+ z*s&Gx`qI3}Vsn3ioI>=0Q%7J<_|5FVc2;zKuSAc;7Tq?i{pJG_&%3ZhNe}FMrAAIj zrR15emmZ`aC%tG*9U;0D%uv_8dTy+PFe__83$%B_BzB6T7X?Jk&~Xc)GrHQ}8}*$_ zyVgpJTBG{GuAiEyao}He-j>g@gB#5L7nk}L?be2NGy2XnzM@d%WF41Pa@N(b7&!w` z{0mDVu^$$9T}}ASNb=)1seuU-H4^Hh`E3Os1L^sd;_(ZkRujqLbmOW@bxkQL8KcCR z-WUB$AVxWlM|zMEa3!WEVEpFdYW-LTj!YD8PD~4M%c|2ssOq>Y)y{?0y((Z7hXlZs^ed%*f4dPcr zWE+7m#$Q!v=0%hHdnv+&4DR9;l;`V$`SR0DUz(5R*zIePYa_TW%JeDF7-nq(`IJg- z6pO(+))@;#!}m|8$}l)FifocMrA|D*GTL}4Qj)@u3XC{_Bd<&o{n{~ zzc0X2jTh`NZby0jx0zRe_c(v*^AA2#@x8(pbl7-@cMdNv4&J1^>GNKTK4A?C?M6aZ zie5WyIv+m#>`(Qcf2A+qg9FQ&ILjyYs9@PHy!SLn*%O&B`U-YZw!?!hYFr@X`5Zm$ zJ(rjWJug+vPZbzUN`X3H2j}q*KlJcP&YYpy|1RA5DMwK6dnoeLXOa#d*h&wL2M`HPX;NAR)b{z{1MMa ze#^tqTmm^?Cr1(^_3yTBRG^nZQgp2HcWY$p=G&LI&a_37HDz=8&(FVpr6rL{jmd`_&8>9+;nDus;^X`uJO(S!Y=HkV}sB z+rRqHe#O+pW=zSpPcnwb{jri}OIu3^E_|VX8KU*dT$QS;g`(q%AHCaxRnA zYY6eMH)OxQ{hofbOsk~rAhS&xSrRIESGvO>(RaoXhQH^TP&~VZ-E53{y%|)hsk=}Z zyE?xrktd*b{ew*4Z6dB{#ZE9>&HDS4lMV-SX7Pa1+RyH>mnFUBC(2;~w3x_6uh_xy z4ii{r7O7ak zn0Zu0NyJl|yg{e=?7ep>T}<$oa=lOJH^ljZOITm;>^x!Vy&{s?^Mm0%-jF$;-Nz*G zJzS0Eho`?a6UP9<51vm@ej7rB#9+obDE-Ni`fb(T@D)E^pp zm!+(~cBUQZduSSLjbSYc8{IbNz@HBi^k#!_3?p9_?-k3T+!R3;j)3jzVi}r)s(06X zb3p)GaJ$?8xq8P0QW98k^-1k_pv2~*Qv>#nD$VZlkGkk3AnRrPifT2#M5L2}WJ%Ko zp^EVM&ibp{)T@KqM?BNm4dJWEwPwENh>S4r5#0CGi>Vhj^U@Tq@3w|&P7i0RS1U$; z()5mLOlR5juD>B9Uh_PYbXnkZ6dd*7#1+-5`19s+iJKIV=2;Ao!@|BEJf9y(@>yco zrgG|eO0Ao(uH&`jF=x-kz9>_A+~MafmRl3?Oj?LE)1kVbQ;)_{GDl+ARJwcWH^(1luGirDm^d2#tt8g zNzDu>#DtUSiw@mN>@dS$!Zjj^d{-so7y`4$F`o$bkxdXM+RRV8Eh6n1R~l3x2q+vr zaG;`O7t0t@Me>{xn}~!m^$nKRm_Qqys~F-RdKSzi?BgG4lgQVFREzl<%bwyP&bE>$ZHM=Aq&u#WDOkfLpw~MYzhUN!q5ockveJQHd|^T{O$pg%6_+5!0vrb!yl3Fd*hx{Nw2;tAKXsz z{%n6g?Lkr|F;m-I0&IXlJeLuo-zX&JZ(3L z(el|Yhq*9I-c@RSW=t5YaMO_k$jsjTI~)!P14Ck9*J?Bhmt~_{|MZ>K$yopUK}2FDibrJNQ@W(C2ae!JyB!cpUwIJMBGSKS=Y!{oAVZR-k)hCzka|zjt29P5St)npibPFq^;fAeE`}WX25Z%PIN!2lG6evT0qB%V=-P z)lsf0nnqT+p)!XqTa-N%yT*_L=9R}6ipa(MAs!}RJJaM!U`1LGHT;4ViJ?wB18w8j zTCvL|S4Rd3SVFaE;za2-?`rVqg2OWG{ams?Qb35~^6&+bJ+ekfI_?C=k0fb-Xu^8Q z)$VsaUtVqeD^yY8c6m^vrQQ36T8=8y3;Qz2IS{%}4mI85??Mf*ZR>&n}? zCsl-5w%24}vDnaOB7J+aI(U`~XkZH=T2PJqfS6@hf(<4NHqM(Zpk)5jHznAuBd+2Yxp%!%s0PSZU*-nLtk0~0>7h4$olUX%^;}*LCjNllgmKH z@e63J6D3ln9=bI0D!c z6677VduZ?cl~u^YaqM4qAHA+8yNUV>cq`IYO4JI-fMvu(tW21xLoL4}lft=M^&1)Pk7@ zB>G$X-C?C-0?_?DB@(0usj1#SyFcgnfnSby3kEHY zZnGN$kmVO=oW)f75m)h<-PX05vxln1;Gdm_5=ld-`rD5N(E2HMy?mGz*TF)SHC4GZQl8eapfXD&s!{KMmYB6Dhc;~;zKEq5)joKT@ zpYFmkBgd5isnGEiuDHIX1nDUUCIf}NF2e z=!mfV-RGm*-NP@;n25XSzCBLNeX|tk0uvlR$=;#9ZFRnt_ln1fb?3l|3@OZZLIa3- zqa=I29K1@mu~H$&dMIdE66YfR_F;=G1Q9?Qv|ULGYOK%o_1J^!Lk)4@OkPC)D$JXI z`4vN6bZ}DT{t}v44kZYDu!9isX)&==u!!`1XhDP-h(X8DZEm;XZl(B5+t9VuN%|>n zc!6G7=_H>{YW>w3(cMMQeIymlyAl@%oubV7^tJ3iAET~wjY7-#;tU_nJELJi7$zY$ z{O>MsZdf%zSHqK>TrQB4K5}dIE&5+Fxum_7{Ss~D5`8YsGyx1XegFA*7*)WwM~C^4 zw!8M~--kFTc|_(Tg^Y0fx4c#2k0TZQxl$uy?iqiFxjrCHMW?=_V!GNpxrf1;FhIXg z<@S5atH0aYptsRK71wq}iM#hNcMiH6_IQ1Llrb0RB{2~a5QJe73?-!+hq2;d&ul(b zN(#!O5jD!zUg^}xl6mTDbZe0=y`5XS>Jl~+f+gF>vO9-b9E|Oh!T9P(f<H#B<2L^Fs!R zBsST$(=4VfS01vZ^h$EKE-I{62?vf9B|<5+|IEaTG_#79<7D>s>}>?Csv5q?&9Yzi zT?Hd>|1P&mNMpln#gJ={3?}puZTI=BH_z6ij2Qi7FZGU%Xa?H}B?!sS?B&_YF~vGa z&-j4xt_GJnSEQEbQ3_3vccNeaU3jtLh=Q8Ee30wNzfq=?!{;7EErRn+VwJ6wNRpCf zw2{%HtZK2&&3cuNKS1+9J8+k3rV+>aUkCmWB_htqv-a#{*Od@V)6I44{QCOuE#GF~ zf%gt`5!2?3i;H>R9OU*rFFj9slfq^0HhKm+)oiwpF0yn{?&6nB!L zx;T2wvMKBI1B}6*E=oxDg?C4NN6d@p?hbk2{NqFM2huHuQ1lE>P?>nlMzR{Opd3B} zhctJOL;xBGM9~sO&)xq^uhd0c>70r%n* z&|8HQWm15O5X-@c_=Rfn`NsQN^p7>~B2}b2v02XgyI6ci>$;zIFAjP|Q?gSlotxXQ zRpsv!s#>hlRK6z3q-FG!ze7jbm*3!^Uc$~7pL`)&=0zq$(do+u=ADHavS4)S2u^du zN{Yh!ql1aG6u%!#^~BLtoe@0G&r_y8wWyiIO^=Byqn}p8{q+T86ckr!neg4t9^+cP z*@{k12ARA7dxPZH(?GH3gPV09Da3UTPnf68Mjhb z&JQ8`mS%HmT0zbKUNM@~ykU`(7oYIqm!T07whLEc_nYDRL!c?}r2yC-~BkVuL%? zJF=4<4#>W5xF0j!G@=@6rzrN}wdAO-<+b~m z44Pn&0T9_mS53374MnSc)&dZ&hNg{S5%>w~Z^iT%AP(qZ(`~g6lCQIXD4n;aC4BlY z$X`7qW*+?`1sG=xZr9K4)E5C-r}URGNbn)q#Dg;^*~bNWCC8gn{2 zYQsk9?q*H7lrO@u$vo463zIDFaJ#3)o`!bD- zGSriHE}9Y$LHnbt@4{ZNgDy+2qO88Qe}RjWqFhaKfW?1#uOZ%B4iDs{F#lDIkp(bi z;m6wS_{cpadUZ~Yl9x6uL@7L5{C{~;PdTb7%32fwX@UbYkka^T&?kkVw1^%CL+&F&$VjU+kmRQovshnm&$lPvcw~_v=VT(Q zmrRJPm`C%`%)oL$l~OqrEFINc{!C$VReiD5B*r=t=`cu5+t*7@djWdh>8R#O;fWkC)rUO*GLLUr`(s z5hWk=7%lIGx1-*z{-vKYnlsjiAQNme?^Hl~A>fxSvwW0_IOVbq(fy;=yJo2c;^ea{ zyuu&JghBZ!-lg36z!$WXp(6ndcXG-zd^ell_?^S&wzLi7 z2iHQav;j=6T&C|ch;mhEz&3UN2L>NG>dSV)Z~5MASbUx9u9M+_-MT)tXb?k`96XaL z7<{0BD5v|R%5QR=a%-gKhu&Etgj_aUR=T55@E$&hyvH4ro&4J}~j z!@wUIawIeEKH}Pq8IoC?anpQij6sZz6Gl5BNh4zV<e;z_SlvO?}G z;P~+4U$``v{`eUgiXh6eOGtvMa5(uat$xwekS5yF^9;7Gk7!@D4h2em-UW$e9R}V|C^8_H3VzQy6(;{7xz=0wTS?@m zey)ACnA%!YYew}nChDr7fJZ6i6PR#8^^pcs0}g-&zkbF-FR4<-%3@}E0Lv&yDpotk zvBY)=uZd%|p)8MmD=)Nn$I)i^U4;0Ftm#uD7Pk6S$>|J<8%UX>+nMsk6Go!0K~>?+^Wowq63zZebOGGQ0l-I^5Anhb3o0X(%F z{nJrT>$}O;2)>G>=YB2o<-&u-7VG!P25%(qW#xXoNhEu8V<4sre%kFFQg`vj#rB_l z)D`Z;HLxR?B}V2-?j!d;tetCP_et^RMKdnR8Rk#8RT7c*hw%=lUN%Ok<9^RO`}nJk zMRV>d=&pr`IcKhBRg|BlB)AdcUYo^1L}b4;_~%$NBs1cajaxEQT(u4=vN8cNH+hjK;2mDjJ zzx2vQZKVS=q-OYe9%7RG<|bRbw-=)Y1ub zXY=jzQBxC_hH4(^;`Qiqhn3?-dIF3*Cb+Jpta$frtj}gMQFHYki#r+_`@f2=f-TCX z3(vFc!h+Hu-3<~d-MrEwDWQUt3W_w+v6M8@C8czOAV@DDC5?pi(kKEeo$~EB|6t~t zYv!DDSDRm=Pi6;)lJAL+C8i1vwtJQSB!h|Y*n`k&rlBVpsh*zwDI04FuUn&DkADh{vtXBfYZIm@UAC}aI zP1OJiaL&;w+*HOfp+kSKr~qMr5!oG%Tq;7XXC_2tziIQf%=6=xE(5{NYe%ORLi zGQQo&mifkBuWNkwmu1-`tR;u3Wq?z6kF?(TBF3URv5>l3?dTL|ysO15x*{LMTLBeY z^cWuKZ~+6Af6$fMVXUY|A~HG&9<pat#0unIchis#2 zZeVzth_UL>uucAL8wGW&?Sj!jnA~EP7gF zkwowU!l7(epT|?TZ1elKjYhWBDqDAbf2zEf`?RiZY5>A9Z2-ry;sd+ z3D~E0jv6dWwKh$*w(GMlE2cn}FQ6PvVEG;Cm|7bkR|fak(U=fYH7AFqE%-uS~? zJ2l?H3 zioac+JxnwkH@bUIGsHvrHm%rMNXf58IBjEG-#BOZ##L6#Kg({G8b98;%_fc2^gEzL zg6dbC|8P#n_{j4AQm-sneY7eXG%h~Z(Ee6aB(p^ZC(6YDDz(MPb@ym7h?%6~KPRpz zxrQ`yW0(#!M}FDguajcLfJBu+fym>w>6-r00e zgS&i{4wg9BvAA`;NHT}D`8K5{X2ETsI(3+}T%PSkf|@1yli&f#hJj2efrZHaoh=m< z(NWV&tAcVEx+(^&nf;{;b*{O^?kt1s6B_To?jV?$2!U8{`J+-u5Jw?1L8H+vkgoRJ~R+-`V9dz*+b>K#Ftdp?-z^s|(|&7TWcLB}Lga2P&nlv1 z>5n8jK|5obX-H_qAX287ZvPbgcD5ssI-{ME8@Ok&qj*z%;X*9O&thb(8yVK4VEAI| zU~JPo3+ua@_n=@gwLjO|vYi*I7JOS)EVMr{eVdDrkE$%)T$>1FSHQKY!NpDVQg(&F zH!4(48#98n5;1(|aqB*Zc}3N&WE<(i;b6_b#I&EBVjy^~B&cy5z$1+KB;m4ULi77R zg;R9qta}rj+b`fk$SmF$r`&2Od$JkEU9pYY=0kzA$3hC{FBnvvEq+uF&$YRmZK@5N zN5oWbH>z@u3mOxm&Z1m?Cl-Wv7i~OQtC=}#6r0|y+wSYccl{!T>@_mc-p0OoDDVDB zd*~Z)@P|);v;C<18VSR2*77%gav{@R^b5FVas7Jtyp@gq>2*HOH#66CeZwX%ACFH> zqwsPhC%q4UOe2oF9G8VoJ^0274CY1_8G3NKHdJT+XGZgUbx+Nr@(2aXz0HIKRXLx$ z3?AnrJCFI7_8!pr91@+BC+71fROwH3Dbu|^e}33#FI+q~#Kyt+%c?mEggxUpyJmgC zQ8F8t(9A(q`NEqBj&dC2M7clqrIxQ0`%$*y)7ElM*p+nCA(ykU;yd+>*WFJAD%b@Q z^b`SrRP);BW?MPrv{=ISVqa%pe0(mtqF4JY=+!GW6-spj6`9%04&}B^cd1!Tw6USt$MaP zY&cUyj!N&Q-I5Ci6OSwY@JM^ds93@z-~V9omx0o57OO4c_}ddN`54lOuD*u^+bg$G zkxx|Xwn@R_8qOYScX{<|=t zTNR)Ra}wn^8Dt4~0JDhAWZ&uQ+O(PSz;m;J|8u?K4Lc(vQ=Vn7|96M{C(T+R6)9@< z^raV9WOG`M7a~)uQR4Fd@47L2ORjsn)T0E#|!VvNjV!uWr>-nXn;H z-J|O94_DT$+3IIHr13W8AaxfDYf|<+rI3X8SGY$zU?!}2Z?8S4Z?ebBz4we0MrPw% zyQW+vJL3{a_=4lL%Si66;g>hvyzPd=?I!(hg!IgSugCX4rUj2_xoTUjOv>U4 zsl!e`q-fR4*y2TX`G5aeuTpz)^SU`>Xwv|^Az!Wg+HbPG_~hT9ki8}A#W;~-L$Jx# z^WIWGONJhAYhp(eLu3REDCp$wAdzC_`sY~la4>Nc^caGwm$qwq)mT!cbtAFLkV(4` zW{(K{t1^@I9RIQuj?R$L$}&1B@U#_c0hT@f9T&|YcZfkLca4bhc83<$xl8~=PCv6bWi<;U8R{Y1{i zjJRgmZ{pAv!HPt~Q^bPy&~HP1my3{J z#O3>krk8z*4=>)dBVZyjVGFkmJ`}YIUi-pxd4U5vyIRUADJRBm4nO=xp!A(#ul~z+ z>X%d3q3rveMCke<^0D*sa?=({06lp_>E;w-+8&!!;I{C0n@#W*VqKB&JiVmNw_dqG zpSV1yKaqQeXZ2p8ObAf4t~@3b5rnUCvN#;HbU=G^|D-W7O(N>+TJI!df+cEo`sBG> zrE~<1J5`!1XBkUlKA4fhS#K_0i4Tv{2FrPKXULY4GSTd{wO^kwwbGBqD@d=KPu5tm z&aJe5KI$^5Y%2?Nkj*3z4SdTDzSt^1P1(s>j49E_sy2%(&dIAe_rRO#K>i-TSvPGa zKk%FNJ`5j0k^9iAr5Pj-X`joz*b?depquIXk}0n0-o+!(#s$-TXP;tHS&5xV3(bcw95%H`yfy zIL!3Lf^zc=S`N5HPNb@9BU-P184#8dn&bvH`uD`J9+7opzE_x>Se> z@k+j4k`Pz^{3M765t3<~(v~UbiJHt`MTFWJ)*Pq@m)os!)Pv&2dYky6cbd!TKg21Q z+kWO4YYL9P#3P%Qw@0GBq`d~2f1ZD7ZFf|W)dY&SG7jGDJsrAB!0_DrKCsHtZZ|i` zkO?g{r@2UdB2Qka>i;)q} zf1?kxi2b)ck7w1FfA^0iNlqmRouFY{+|%x^i@5`ysKmv>dR4Cms@DuCFsnh=BkN%} zz=>ryATbrfof$JC6V$2tCC&dX@gyW!4o~e3@@sAl^o1*;J4zMq*o2W=-c297Jr%9l zlGwSXjCS;pSobUP|8nbsj9~ zQ*G>VSm4^JcP`rDUR%JXK<}MjiAh+knC1C6edqgghh82ybkp>GTuh z!~wl&y3JFyPNrVCqdw5(?(Ah<=-5-(pxDmvPuNE*-XVW}=0}EVx{uC@>AT0S$P}f! z(8qQMIqUZO+;oqp{jX%_H|y-c&ckek=SNObdvNC&6jmxoAxW|`y+Hq4CAv7-8;PG& z=-@h+dD^qN>8>V6(9dK2oBQBlXRYhZ*g4tV$)%@$>kB6**V1{Cw4G~auL9o>OK`3; zK~Cb{80}rA0i@zOIs1ueLFn`TthQG#oIVOa67lyj-2qc&tW9PHRk(s#)9wAXs zRJ)H~10{x*LGz&RNv2{G)s#OGEG~JUFiu0(w@zFP#~rWV+gBY)KYgB|6&&sVr+&nM zmruFWu7W({#^@}_RKE@dB7ByetUgD1tO*;2;r0kY9xfIn-^T{;Y1MglK~j%_%Ad*ImLO%<>^wX`CR(;c*!>EV)KZzHf5 zZ<(+G=8M8G?Djy|g*gq5g0eTnPU1KngIWv4JfhTawWQD+6)f(W<%W@sNd(FZ06Q~j zvhozIMm-)q!f#IJIYv+%yCS>8vh^qX0(@4I6039xLy|{xuJ$^J%X=`?i5eNXX0TeV zQfY48>1mRt9~RYQqzV&3Y2y`mlJ!|9s2kS5gjsNlkpZ@c%7FLDW{X0R{xCI44(hwJ z!DLtRJpFmcTjIB7#-H_sE|#094&SHv^}2qxCzEo$*_Cm7@87@G_CZ^J#G~Wg$ph0C z-MKJ3l1hJzK93)r?*!HJ7o}x91}>@hi_V^jAId=(_gi!o(Z<2FdCoAA4=!PN_-zLxcrKgs7CBJ!YL6P@$4rDyh`Dogd0{=ah%PG8}BCz&f35 zxa{#O8iHL@=ybK$BXy}&0)e@@xIp}4k-v$%ocxPj9~cNhh18lR7mGgwr073C#jg@+ zO7LUN#p9>2BFOZi+RO+sg?3tyy2-#8V7_htaw#AwzDWDTtXs!4wbEuyV!#C4huH4jyqmm;|6?s5<&>mcI#8fgGCZWx3=}82M#BDk?2TK|_bMbgnywb7E zqC`EprhY1NL(65^eXxjX-Txz;m8&H}RaOc5N`9H1HQn^(l~QIr<8XjSedb*o=B+#A zD7Q4f8ft{a%)MGI7taP>P$S$(zZ4?|IPy40`F(E$F7F-&j+oBGyYEY*N><1t%tQpR7+U)_dz#KByT0H z|BQ)!41dC#VEbs}9tWh3NKX+zm|0^d?4a$^j;s}+o+t87RJudSN%4r=nC92K-HU!a z^{60pNZFxt@xUkos|if#N^8dgyEkb$-@h<;&o9!}n4Cxxb2Y>MF-He7si1}8GimT( zbMZBeKi}sZhlsLm_GWp0ua{t0XDotiEFSlK=6@y?MA6q(m&HHZ*ls8sAuxMbLI_Ht zn3;G+e+%J)N9W(rGFI{fDzEL+lzE9l^T>q$>*RBoky)oix78R*=RM3Q^w$e&?^tbI zU?+_1|6BkdO4jjyZh=VuhMrq+xj0{b-*m&(Q#mAWa#;0b3Oik2E`e=5bC@_W7K`H~ z;sv<5h?yQc?#~emHmsrWZTK%o{S~#FYPy8ziHKnf);Kfa$JN2urg#bT!lYtwsjqPUyeo`J*R>BENI*Yc%riImjZ$;!m zDG^9az_CKO)?zbVV?$`qwq@OxSs(pC7k zP7bo4J`A>v7bkeT88{)-OUl?#((je!TU{)O9g~Pd*-)Yc!+joozPh@+A%*46gTy0= z?h0!GZj=JOHy*Ub^MLCIWxKl6=jQ|R_%HO&s+!qiR0^w};=NLiiFrbe@*+tBT3&*K z;{uSn&lfMu{5~##^0si**qpfc@(2P;R+3^W%LlkBdaS*)0rm~UUh)&g34ZG7Q@<>p zXFi~%^?srKf(u<-g+$q=u1e6-(@&>F`&9F9&rXzam`m~V8zV(t>eMOrsZ*CJV1i0J z_5{VNTCD^dhQww Z*92R|Jvv!ULn#2aG*q;eEB~_y`X9!4)eHat diff --git a/api/core/tools/provider/builtin/dalle/dalle.py b/api/core/tools/provider/builtin/dalle/dalle.py deleted file mode 100644 index 5bd16e49e85e29..00000000000000 --- a/api/core/tools/provider/builtin/dalle/dalle.py +++ /dev/null @@ -1,20 +0,0 @@ -from typing import Any - -from core.tools.errors import ToolProviderCredentialValidationError -from core.tools.provider.builtin.dalle.tools.dalle2 import DallE2Tool -from core.tools.provider.builtin_tool_provider import BuiltinToolProviderController - - -class DALLEProvider(BuiltinToolProviderController): - def _validate_credentials(self, credentials: dict[str, Any]) -> None: - try: - DallE2Tool().fork_tool_runtime( - runtime={ - "credentials": credentials, - } - ).invoke( - user_id="", - tool_parameters={"prompt": "cute girl, blue eyes, white hair, anime style", "size": "small", "n": 1}, - ) - except Exception as e: - raise ToolProviderCredentialValidationError(str(e)) diff --git a/api/core/tools/provider/builtin/dalle/dalle.yaml b/api/core/tools/provider/builtin/dalle/dalle.yaml deleted file mode 100644 index 37cf93c28aae58..00000000000000 --- a/api/core/tools/provider/builtin/dalle/dalle.yaml +++ /dev/null @@ -1,61 +0,0 @@ -identity: - author: Dify - name: dalle - label: - en_US: DALL-E - zh_Hans: DALL-E 绘画 - pt_BR: DALL-E - description: - en_US: DALL-E art - zh_Hans: DALL-E 绘画 - pt_BR: DALL-E art - icon: icon.png - tags: - - image - - productivity -credentials_for_provider: - openai_api_key: - type: secret-input - required: true - label: - en_US: OpenAI API key - zh_Hans: OpenAI API key - pt_BR: OpenAI API key - help: - en_US: Please input your OpenAI API key - zh_Hans: 请输入你的 OpenAI API key - pt_BR: Please input your OpenAI API key - placeholder: - en_US: Please input your OpenAI API key - zh_Hans: 请输入你的 OpenAI API key - pt_BR: Please input your OpenAI API key - openai_organization_id: - type: text-input - required: false - label: - en_US: OpenAI organization ID - zh_Hans: OpenAI organization ID - pt_BR: OpenAI organization ID - help: - en_US: Please input your OpenAI organization ID - zh_Hans: 请输入你的 OpenAI organization ID - pt_BR: Please input your OpenAI organization ID - placeholder: - en_US: Please input your OpenAI organization ID - zh_Hans: 请输入你的 OpenAI organization ID - pt_BR: Please input your OpenAI organization ID - openai_base_url: - type: text-input - required: false - label: - en_US: OpenAI base URL - zh_Hans: OpenAI base URL - pt_BR: OpenAI base URL - help: - en_US: Please input your OpenAI base URL - zh_Hans: 请输入你的 OpenAI base URL - pt_BR: Please input your OpenAI base URL - placeholder: - en_US: Please input your OpenAI base URL - zh_Hans: 请输入你的 OpenAI base URL - pt_BR: Please input your OpenAI base URL diff --git a/api/core/tools/provider/builtin/dalle/tools/dalle2.py b/api/core/tools/provider/builtin/dalle/tools/dalle2.py deleted file mode 100644 index fbd7397292155e..00000000000000 --- a/api/core/tools/provider/builtin/dalle/tools/dalle2.py +++ /dev/null @@ -1,66 +0,0 @@ -from base64 import b64decode -from typing import Any, Union - -from openai import OpenAI -from yarl import URL - -from core.tools.entities.tool_entities import ToolInvokeMessage -from core.tools.tool.builtin_tool import BuiltinTool - - -class DallE2Tool(BuiltinTool): - def _invoke( - self, - user_id: str, - tool_parameters: dict[str, Any], - ) -> Union[ToolInvokeMessage, list[ToolInvokeMessage]]: - """ - invoke tools - """ - openai_organization = self.runtime.credentials.get("openai_organization_id", None) - if not openai_organization: - openai_organization = None - openai_base_url = self.runtime.credentials.get("openai_base_url", None) - if not openai_base_url: - openai_base_url = None - else: - openai_base_url = str(URL(openai_base_url) / "v1") - - client = OpenAI( - api_key=self.runtime.credentials["openai_api_key"], - base_url=openai_base_url, - organization=openai_organization, - ) - - SIZE_MAPPING = { - "small": "256x256", - "medium": "512x512", - "large": "1024x1024", - } - - # prompt - prompt = tool_parameters.get("prompt", "") - if not prompt: - return self.create_text_message("Please input prompt") - - # get size - size = SIZE_MAPPING[tool_parameters.get("size", "large")] - - # get n - n = tool_parameters.get("n", 1) - - # call openapi dalle2 - response = client.images.generate(prompt=prompt, model="dall-e-2", size=size, n=n, response_format="b64_json") - - result = [] - - for image in response.data: - result.append( - self.create_blob_message( - blob=b64decode(image.b64_json), - meta={"mime_type": "image/png"}, - save_as=self.VariableKey.IMAGE.value, - ) - ) - - return result diff --git a/api/core/tools/provider/builtin/dalle/tools/dalle2.yaml b/api/core/tools/provider/builtin/dalle/tools/dalle2.yaml deleted file mode 100644 index e43e5df8cddd9b..00000000000000 --- a/api/core/tools/provider/builtin/dalle/tools/dalle2.yaml +++ /dev/null @@ -1,74 +0,0 @@ -identity: - name: dalle2 - author: Dify - label: - en_US: DALL-E 2 - zh_Hans: DALL-E 2 绘画 - description: - en_US: DALL-E 2 is a powerful drawing tool that can draw the image you want based on your prompt - zh_Hans: DALL-E 2 是一个强大的绘画工具,它可以根据您的提示词绘制出您想要的图像 - pt_BR: DALL-E 2 is a powerful drawing tool that can draw the image you want based on your prompt -description: - human: - en_US: DALL-E is a text to image tool - zh_Hans: DALL-E 是一个文本到图像的工具 - pt_BR: DALL-E is a text to image tool - llm: DALL-E is a tool used to generate images from text -parameters: - - name: prompt - type: string - required: true - label: - en_US: Prompt - zh_Hans: 提示词 - pt_BR: Prompt - human_description: - en_US: Image prompt, you can check the official documentation of DallE 2 - zh_Hans: 图像提示词,您可以查看 DallE 2 的官方文档 - pt_BR: Image prompt, you can check the official documentation of DallE 2 - llm_description: Image prompt of DallE 2, you should describe the image you want to generate as a list of words as possible as detailed - form: llm - - name: size - type: select - required: true - human_description: - en_US: used for selecting the image size - zh_Hans: 用于选择图像大小 - pt_BR: used for selecting the image size - label: - en_US: Image size - zh_Hans: 图像大小 - pt_BR: Image size - form: form - options: - - value: small - label: - en_US: Small(256x256) - zh_Hans: 小(256x256) - pt_BR: Small(256x256) - - value: medium - label: - en_US: Medium(512x512) - zh_Hans: 中(512x512) - pt_BR: Medium(512x512) - - value: large - label: - en_US: Large(1024x1024) - zh_Hans: 大(1024x1024) - pt_BR: Large(1024x1024) - default: large - - name: n - type: number - required: true - human_description: - en_US: used for selecting the number of images - zh_Hans: 用于选择图像数量 - pt_BR: used for selecting the number of images - label: - en_US: Number of images - zh_Hans: 图像数量 - pt_BR: Number of images - form: form - default: 1 - min: 1 - max: 10 diff --git a/api/core/tools/provider/builtin/dalle/tools/dalle3.py b/api/core/tools/provider/builtin/dalle/tools/dalle3.py deleted file mode 100644 index a8c647d71e69e0..00000000000000 --- a/api/core/tools/provider/builtin/dalle/tools/dalle3.py +++ /dev/null @@ -1,115 +0,0 @@ -import base64 -import random -from typing import Any, Union - -from openai import OpenAI -from yarl import URL - -from core.tools.entities.tool_entities import ToolInvokeMessage -from core.tools.tool.builtin_tool import BuiltinTool - - -class DallE3Tool(BuiltinTool): - def _invoke( - self, - user_id: str, - tool_parameters: dict[str, Any], - ) -> Union[ToolInvokeMessage, list[ToolInvokeMessage]]: - """ - invoke tools - """ - openai_organization = self.runtime.credentials.get("openai_organization_id", None) - if not openai_organization: - openai_organization = None - openai_base_url = self.runtime.credentials.get("openai_base_url", None) - if not openai_base_url: - openai_base_url = None - else: - openai_base_url = str(URL(openai_base_url) / "v1") - - client = OpenAI( - api_key=self.runtime.credentials["openai_api_key"], - base_url=openai_base_url, - organization=openai_organization, - ) - - SIZE_MAPPING = { - "square": "1024x1024", - "vertical": "1024x1792", - "horizontal": "1792x1024", - } - - # prompt - prompt = tool_parameters.get("prompt", "") - if not prompt: - return self.create_text_message("Please input prompt") - # get size - size = SIZE_MAPPING[tool_parameters.get("size", "square")] - # get n - n = tool_parameters.get("n", 1) - # get quality - quality = tool_parameters.get("quality", "standard") - if quality not in {"standard", "hd"}: - return self.create_text_message("Invalid quality") - # get style - style = tool_parameters.get("style", "vivid") - if style not in {"natural", "vivid"}: - return self.create_text_message("Invalid style") - - # call openapi dalle3 - response = client.images.generate( - prompt=prompt, model="dall-e-3", size=size, n=n, style=style, quality=quality, response_format="b64_json" - ) - - result = [] - - for image in response.data: - mime_type, blob_image = DallE3Tool._decode_image(image.b64_json) - blob_message = self.create_blob_message( - blob=blob_image, meta={"mime_type": mime_type}, save_as=self.VariableKey.IMAGE.value - ) - result.append(blob_message) - return result - - @staticmethod - def _decode_image(base64_image: str) -> tuple[str, bytes]: - """ - Decode a base64 encoded image. If the image is not prefixed with a MIME type, - it assumes 'image/png' as the default. - - :param base64_image: Base64 encoded image string - :return: A tuple containing the MIME type and the decoded image bytes - """ - if DallE3Tool._is_plain_base64(base64_image): - return "image/png", base64.b64decode(base64_image) - else: - return DallE3Tool._extract_mime_and_data(base64_image) - - @staticmethod - def _is_plain_base64(encoded_str: str) -> bool: - """ - Check if the given encoded string is plain base64 without a MIME type prefix. - - :param encoded_str: Base64 encoded image string - :return: True if the string is plain base64, False otherwise - """ - return not encoded_str.startswith("data:image") - - @staticmethod - def _extract_mime_and_data(encoded_str: str) -> tuple[str, bytes]: - """ - Extract MIME type and image data from a base64 encoded string with a MIME type prefix. - - :param encoded_str: Base64 encoded image string with MIME type prefix - :return: A tuple containing the MIME type and the decoded image bytes - """ - mime_type = encoded_str.split(";")[0].split(":")[1] - image_data_base64 = encoded_str.split(",")[1] - decoded_data = base64.b64decode(image_data_base64) - return mime_type, decoded_data - - @staticmethod - def _generate_random_id(length=8): - characters = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789" - random_id = "".join(random.choices(characters, k=length)) - return random_id diff --git a/api/core/tools/provider/builtin/dalle/tools/dalle3.yaml b/api/core/tools/provider/builtin/dalle/tools/dalle3.yaml deleted file mode 100644 index 0cea8af761e1e5..00000000000000 --- a/api/core/tools/provider/builtin/dalle/tools/dalle3.yaml +++ /dev/null @@ -1,123 +0,0 @@ -identity: - name: dalle3 - author: Dify - label: - en_US: DALL-E 3 - zh_Hans: DALL-E 3 绘画 - pt_BR: DALL-E 3 - description: - en_US: DALL-E 3 is a powerful drawing tool that can draw the image you want based on your prompt, compared to DallE 2, DallE 3 has stronger drawing ability, but it will consume more resources - zh_Hans: DALL-E 3 是一个强大的绘画工具,它可以根据您的提示词绘制出您想要的图像,相比于DallE 2, DallE 3拥有更强的绘画能力,但会消耗更多的资源 - pt_BR: DALL-E 3 is a powerful drawing tool that can draw the image you want based on your prompt, compared to DallE 2, DallE 3 has stronger drawing ability, but it will consume more resources -description: - human: - en_US: DALL-E is a text to image tool - zh_Hans: DALL-E 是一个文本到图像的工具 - pt_BR: DALL-E is a text to image tool - llm: DALL-E is a tool used to generate images from text -parameters: - - name: prompt - type: string - required: true - label: - en_US: Prompt - zh_Hans: 提示词 - pt_BR: Prompt - human_description: - en_US: Image prompt, you can check the official documentation of DallE 3 - zh_Hans: 图像提示词,您可以查看 DallE 3 的官方文档 - pt_BR: Image prompt, you can check the official documentation of DallE 3 - llm_description: Image prompt of DallE 3, you should describe the image you want to generate as a list of words as possible as detailed - form: llm - - name: size - type: select - required: true - human_description: - en_US: selecting the image size - zh_Hans: 选择图像大小 - pt_BR: selecting the image size - label: - en_US: Image size - zh_Hans: 图像大小 - pt_BR: Image size - form: form - options: - - value: square - label: - en_US: Squre(1024x1024) - zh_Hans: 方(1024x1024) - pt_BR: Squre(1024x1024) - - value: vertical - label: - en_US: Vertical(1024x1792) - zh_Hans: 竖屏(1024x1792) - pt_BR: Vertical(1024x1792) - - value: horizontal - label: - en_US: Horizontal(1792x1024) - zh_Hans: 横屏(1792x1024) - pt_BR: Horizontal(1792x1024) - default: square - - name: n - type: number - required: true - human_description: - en_US: selecting the number of images - zh_Hans: 选择图像数量 - pt_BR: selecting the number of images - label: - en_US: Number of images - zh_Hans: 图像数量 - pt_BR: Number of images - form: form - min: 1 - max: 1 - default: 1 - - name: quality - type: select - required: true - human_description: - en_US: selecting the image quality - zh_Hans: 选择图像质量 - pt_BR: selecting the image quality - label: - en_US: Image quality - zh_Hans: 图像质量 - pt_BR: Image quality - form: form - options: - - value: standard - label: - en_US: Standard - zh_Hans: 标准 - pt_BR: Standard - - value: hd - label: - en_US: HD - zh_Hans: 高清 - pt_BR: HD - default: standard - - name: style - type: select - required: true - human_description: - en_US: selecting the image style - zh_Hans: 选择图像风格 - pt_BR: selecting the image style - label: - en_US: Image style - zh_Hans: 图像风格 - pt_BR: Image style - form: form - options: - - value: vivid - label: - en_US: Vivid - zh_Hans: 生动 - pt_BR: Vivid - - value: natural - label: - en_US: Natural - zh_Hans: 自然 - pt_BR: Natural - default: vivid diff --git a/api/core/tools/provider/builtin/devdocs/_assets/icon.svg b/api/core/tools/provider/builtin/devdocs/_assets/icon.svg deleted file mode 100644 index c7a19fabfb18bf..00000000000000 --- a/api/core/tools/provider/builtin/devdocs/_assets/icon.svg +++ /dev/null @@ -1,4 +0,0 @@ - - - - \ No newline at end of file diff --git a/api/core/tools/provider/builtin/devdocs/devdocs.py b/api/core/tools/provider/builtin/devdocs/devdocs.py deleted file mode 100644 index 446c1e548935c0..00000000000000 --- a/api/core/tools/provider/builtin/devdocs/devdocs.py +++ /dev/null @@ -1,21 +0,0 @@ -from core.tools.errors import ToolProviderCredentialValidationError -from core.tools.provider.builtin.devdocs.tools.searchDevDocs import SearchDevDocsTool -from core.tools.provider.builtin_tool_provider import BuiltinToolProviderController - - -class DevDocsProvider(BuiltinToolProviderController): - def _validate_credentials(self, credentials: dict) -> None: - try: - SearchDevDocsTool().fork_tool_runtime( - runtime={ - "credentials": credentials, - } - ).invoke( - user_id="", - tool_parameters={ - "doc": "python~3.12", - "topic": "library/code", - }, - ) - except Exception as e: - raise ToolProviderCredentialValidationError(str(e)) diff --git a/api/core/tools/provider/builtin/devdocs/devdocs.yaml b/api/core/tools/provider/builtin/devdocs/devdocs.yaml deleted file mode 100644 index 7552f5a4973f7e..00000000000000 --- a/api/core/tools/provider/builtin/devdocs/devdocs.yaml +++ /dev/null @@ -1,13 +0,0 @@ -identity: - author: Richards Tu - name: devdocs - label: - en_US: DevDocs - zh_Hans: DevDocs - description: - en_US: Get official developer documentations on DevDocs. - zh_Hans: 从DevDocs获取官方开发者文档。 - icon: icon.svg - tags: - - search - - productivity diff --git a/api/core/tools/provider/builtin/devdocs/tools/searchDevDocs.py b/api/core/tools/provider/builtin/devdocs/tools/searchDevDocs.py deleted file mode 100644 index 57cf6d7a308dba..00000000000000 --- a/api/core/tools/provider/builtin/devdocs/tools/searchDevDocs.py +++ /dev/null @@ -1,47 +0,0 @@ -from typing import Any, Union - -import requests -from pydantic import BaseModel, Field - -from core.tools.entities.tool_entities import ToolInvokeMessage -from core.tools.tool.builtin_tool import BuiltinTool - - -class SearchDevDocsInput(BaseModel): - doc: str = Field(..., description="The name of the documentation.") - topic: str = Field(..., description="The path of the section/topic.") - - -class SearchDevDocsTool(BuiltinTool): - def _invoke( - self, user_id: str, tool_parameters: dict[str, Any] - ) -> Union[ToolInvokeMessage, list[ToolInvokeMessage]]: - """ - Invokes the DevDocs search tool with the given user ID and tool parameters. - - Args: - user_id (str): The ID of the user invoking the tool. - tool_parameters (dict[str, Any]): The parameters for the tool, including 'doc' and 'topic'. - - Returns: - ToolInvokeMessage | list[ToolInvokeMessage]: The result of the tool invocation, - which can be a single message or a list of messages. - """ - doc = tool_parameters.get("doc", "") - topic = tool_parameters.get("topic", "") - - if not doc: - return self.create_text_message("Please provide the documentation name.") - if not topic: - return self.create_text_message("Please provide the topic path.") - - url = f"https://documents.devdocs.io/{doc}/{topic}.html" - response = requests.get(url) - - if response.status_code == 200: - content = response.text - return self.create_text_message(self.summary(user_id=user_id, content=content)) - else: - return self.create_text_message( - f"Failed to retrieve the documentation. Status code: {response.status_code}" - ) diff --git a/api/core/tools/provider/builtin/devdocs/tools/searchDevDocs.yaml b/api/core/tools/provider/builtin/devdocs/tools/searchDevDocs.yaml deleted file mode 100644 index 2476db9da42d60..00000000000000 --- a/api/core/tools/provider/builtin/devdocs/tools/searchDevDocs.yaml +++ /dev/null @@ -1,34 +0,0 @@ -identity: - name: searchDevDocs - author: Richards Tu - label: - en_US: Search Developer Docs - zh_Hans: 搜索开发者文档 -description: - human: - en_US: A tools for searching for a specific topic and path in DevDocs based on the provided documentation name and topic. Don't for get to add some shots in the system prompt; for example, the documentation name should be like \"vuex~4\", \"css\", or \"python~3.12\", while the topic should be like \"guide/actions\" for Vuex 4, \"display-box\" for CSS, or \"library/code\" for Python 3.12. - zh_Hans: 一个用于根据提供的文档名称和主题,在DevDocs中搜索特定主题和路径的工具。不要忘记在系统提示词中添加一些示例;例如,文档名称应该是\"vuex~4\"、\"css\"或\"python~3.12\",而主题应该是\"guide/actions\"用于Vuex 4,\"display-box\"用于CSS,或\"library/code\"用于Python 3.12。 - llm: A tools for searching for specific developer documentation in DevDocs based on the provided documentation name and topic. -parameters: - - name: doc - type: string - required: true - label: - en_US: Documentation name - zh_Hans: 文档名称 - human_description: - en_US: The name of the documentation. - zh_Hans: 文档名称。 - llm_description: The name of the documentation, such as \"vuex~4\", \"css\", or \"python~3.12\". The exact value should be identified by the user. - form: llm - - name: topic - type: string - required: true - label: - en_US: Topic name - zh_Hans: 主题名称 - human_description: - en_US: The path of the section/topic. - zh_Hans: 文档主题的路径。 - llm_description: The path of the section/topic, such as \"guide/actions\" for Vuex 4, \"display-box\" for CSS, or \"library/code\" for Python 3.12. - form: llm diff --git a/api/core/tools/provider/builtin/did/_assets/icon.svg b/api/core/tools/provider/builtin/did/_assets/icon.svg deleted file mode 100644 index c477d7cb71dea2..00000000000000 --- a/api/core/tools/provider/builtin/did/_assets/icon.svg +++ /dev/null @@ -1,14 +0,0 @@ - - - - - - - - - - - - - - \ No newline at end of file diff --git a/api/core/tools/provider/builtin/did/did.py b/api/core/tools/provider/builtin/did/did.py deleted file mode 100644 index 5af78794f625b7..00000000000000 --- a/api/core/tools/provider/builtin/did/did.py +++ /dev/null @@ -1,18 +0,0 @@ -from core.tools.errors import ToolProviderCredentialValidationError -from core.tools.provider.builtin.did.tools.talks import TalksTool -from core.tools.provider.builtin_tool_provider import BuiltinToolProviderController - - -class DIDProvider(BuiltinToolProviderController): - def _validate_credentials(self, credentials: dict) -> None: - try: - # Example validation using the D-ID talks tool - TalksTool().fork_tool_runtime(runtime={"credentials": credentials}).invoke( - user_id="", - tool_parameters={ - "source_url": "https://www.d-id.com/wp-content/uploads/2023/11/Hero-image-1.png", - "text_input": "Hello, welcome to use D-ID tool in Dify", - }, - ) - except Exception as e: - raise ToolProviderCredentialValidationError(str(e)) diff --git a/api/core/tools/provider/builtin/did/did.yaml b/api/core/tools/provider/builtin/did/did.yaml deleted file mode 100644 index a70b71812e4648..00000000000000 --- a/api/core/tools/provider/builtin/did/did.yaml +++ /dev/null @@ -1,28 +0,0 @@ -identity: - author: Matri Qi - name: did - label: - en_US: D-ID - description: - en_US: D-ID is a tool enabling the creation of high-quality, custom videos of Digital Humans from a single image. - icon: icon.svg - tags: - - videos -credentials_for_provider: - did_api_key: - type: secret-input - required: true - label: - en_US: D-ID API Key - placeholder: - en_US: Please input your D-ID API key - help: - en_US: Get your D-ID API key from your D-ID account settings. - url: https://studio.d-id.com/account-settings - base_url: - type: text-input - required: false - label: - en_US: D-ID server's Base URL - placeholder: - en_US: https://api.d-id.com diff --git a/api/core/tools/provider/builtin/did/did_appx.py b/api/core/tools/provider/builtin/did/did_appx.py deleted file mode 100644 index c68878630d67b0..00000000000000 --- a/api/core/tools/provider/builtin/did/did_appx.py +++ /dev/null @@ -1,87 +0,0 @@ -import logging -import time -from collections.abc import Mapping -from typing import Any - -import requests -from requests.exceptions import HTTPError - -logger = logging.getLogger(__name__) - - -class DIDApp: - def __init__(self, api_key: str | None = None, base_url: str | None = None): - self.api_key = api_key - self.base_url = base_url or "https://api.d-id.com" - if not self.api_key: - raise ValueError("API key is required") - - def _prepare_headers(self, idempotency_key: str | None = None): - headers = {"Content-Type": "application/json", "Authorization": f"Basic {self.api_key}"} - if idempotency_key: - headers["Idempotency-Key"] = idempotency_key - return headers - - def _request( - self, - method: str, - url: str, - data: Mapping[str, Any] | None = None, - headers: Mapping[str, str] | None = None, - retries: int = 3, - backoff_factor: float = 0.3, - ) -> Mapping[str, Any] | None: - for i in range(retries): - try: - response = requests.request(method, url, json=data, headers=headers) - response.raise_for_status() - return response.json() - except requests.exceptions.RequestException as e: - if i < retries - 1 and isinstance(e, HTTPError) and e.response.status_code >= 500: - time.sleep(backoff_factor * (2**i)) - else: - raise - return None - - def talks(self, wait: bool = True, poll_interval: int = 5, idempotency_key: str | None = None, **kwargs): - endpoint = f"{self.base_url}/talks" - headers = self._prepare_headers(idempotency_key) - data = kwargs["params"] - logger.debug(f"Send request to {endpoint=} body={data}") - response = self._request("POST", endpoint, data, headers) - if response is None: - raise HTTPError("Failed to initiate D-ID talks after multiple retries") - id: str = response["id"] - if wait: - return self._monitor_job_status(id=id, target="talks", poll_interval=poll_interval) - return id - - def animations(self, wait: bool = True, poll_interval: int = 5, idempotency_key: str | None = None, **kwargs): - endpoint = f"{self.base_url}/animations" - headers = self._prepare_headers(idempotency_key) - data = kwargs["params"] - logger.debug(f"Send request to {endpoint=} body={data}") - response = self._request("POST", endpoint, data, headers) - if response is None: - raise HTTPError("Failed to initiate D-ID talks after multiple retries") - id: str = response["id"] - if wait: - return self._monitor_job_status(target="animations", id=id, poll_interval=poll_interval) - return id - - def check_did_status(self, target: str, id: str): - endpoint = f"{self.base_url}/{target}/{id}" - headers = self._prepare_headers() - response = self._request("GET", endpoint, headers=headers) - if response is None: - raise HTTPError(f"Failed to check status for talks {id} after multiple retries") - return response - - def _monitor_job_status(self, target: str, id: str, poll_interval: int): - while True: - status = self.check_did_status(target=target, id=id) - if status["status"] == "done": - return status - elif status["status"] == "error" or status["status"] == "rejected": - raise HTTPError(f'Talks {id} failed: {status["status"]} {status.get("error", {}).get("description")}') - time.sleep(poll_interval) diff --git a/api/core/tools/provider/builtin/did/tools/animations.py b/api/core/tools/provider/builtin/did/tools/animations.py deleted file mode 100644 index bc9d17e40d2878..00000000000000 --- a/api/core/tools/provider/builtin/did/tools/animations.py +++ /dev/null @@ -1,49 +0,0 @@ -import json -from typing import Any, Union - -from core.tools.entities.tool_entities import ToolInvokeMessage -from core.tools.provider.builtin.did.did_appx import DIDApp -from core.tools.tool.builtin_tool import BuiltinTool - - -class AnimationsTool(BuiltinTool): - def _invoke( - self, user_id: str, tool_parameters: dict[str, Any] - ) -> Union[ToolInvokeMessage, list[ToolInvokeMessage]]: - app = DIDApp(api_key=self.runtime.credentials["did_api_key"], base_url=self.runtime.credentials["base_url"]) - - driver_expressions_str = tool_parameters.get("driver_expressions") - driver_expressions = json.loads(driver_expressions_str) if driver_expressions_str else None - - config = { - "stitch": tool_parameters.get("stitch", True), - "mute": tool_parameters.get("mute"), - "result_format": tool_parameters.get("result_format") or "mp4", - } - config = {k: v for k, v in config.items() if v is not None and v != ""} - - options = { - "source_url": tool_parameters["source_url"], - "driver_url": tool_parameters.get("driver_url"), - "config": config, - } - options = {k: v for k, v in options.items() if v is not None and v != ""} - - if not options.get("source_url"): - raise ValueError("Source URL is required") - - if config.get("logo_url"): - if not config.get("logo_x"): - raise ValueError("Logo X position is required when logo URL is provided") - if not config.get("logo_y"): - raise ValueError("Logo Y position is required when logo URL is provided") - - animations_result = app.animations(params=options, wait=True) - - if not isinstance(animations_result, str): - animations_result = json.dumps(animations_result, ensure_ascii=False, indent=4) - - if not animations_result: - return self.create_text_message("D-ID animations request failed.") - - return self.create_text_message(animations_result) diff --git a/api/core/tools/provider/builtin/did/tools/animations.yaml b/api/core/tools/provider/builtin/did/tools/animations.yaml deleted file mode 100644 index 2a2036c7b2a88f..00000000000000 --- a/api/core/tools/provider/builtin/did/tools/animations.yaml +++ /dev/null @@ -1,86 +0,0 @@ -identity: - name: animations - author: Matri Qi - label: - en_US: Animations -description: - human: - en_US: Animations enables to create videos matching head movements, expressions, emotions, and voice from a driver video and image. - llm: Animations enables to create videos matching head movements, expressions, emotions, and voice from a driver video and image. -parameters: - - name: source_url - type: string - required: true - label: - en_US: source url - human_description: - en_US: The URL of the source image to be animated by the driver video, or a selection from the list of provided studio actors. - llm_description: The URL of the source image to be animated by the driver video, or a selection from the list of provided studio actors. - form: llm - - name: driver_url - type: string - required: false - label: - en_US: driver url - human_description: - en_US: The URL of the driver video to drive the animation, or a provided driver name from D-ID. - form: form - - name: mute - type: boolean - required: false - label: - en_US: mute - human_description: - en_US: Mutes the driver sound in the animated video result, defaults to true - form: form - - name: stitch - type: boolean - required: false - label: - en_US: stitch - human_description: - en_US: If enabled, the driver video will be stitched with the animationing head video. - form: form - - name: logo_url - type: string - required: false - label: - en_US: logo url - human_description: - en_US: The URL of the logo image to be added to the animation video. - form: form - - name: logo_x - type: number - required: false - label: - en_US: logo position x - human_description: - en_US: The x position of the logo image in the animation video. It's required when logo url is provided. - form: form - - name: logo_y - type: number - required: false - label: - en_US: logo position y - human_description: - en_US: The y position of the logo image in the animation video. It's required when logo url is provided. - form: form - - name: result_format - type: string - default: mp4 - required: false - label: - en_US: result format - human_description: - en_US: The format of the result video. - form: form - options: - - value: mp4 - label: - en_US: mp4 - - value: gif - label: - en_US: gif - - value: mov - label: - en_US: mov diff --git a/api/core/tools/provider/builtin/did/tools/talks.py b/api/core/tools/provider/builtin/did/tools/talks.py deleted file mode 100644 index d6f0c7ff179793..00000000000000 --- a/api/core/tools/provider/builtin/did/tools/talks.py +++ /dev/null @@ -1,65 +0,0 @@ -import json -from typing import Any, Union - -from core.tools.entities.tool_entities import ToolInvokeMessage -from core.tools.provider.builtin.did.did_appx import DIDApp -from core.tools.tool.builtin_tool import BuiltinTool - - -class TalksTool(BuiltinTool): - def _invoke( - self, user_id: str, tool_parameters: dict[str, Any] - ) -> Union[ToolInvokeMessage, list[ToolInvokeMessage]]: - app = DIDApp(api_key=self.runtime.credentials["did_api_key"], base_url=self.runtime.credentials["base_url"]) - - driver_expressions_str = tool_parameters.get("driver_expressions") - driver_expressions = json.loads(driver_expressions_str) if driver_expressions_str else None - - script = { - "type": tool_parameters.get("script_type") or "text", - "input": tool_parameters.get("text_input"), - "audio_url": tool_parameters.get("audio_url"), - "reduce_noise": tool_parameters.get("audio_reduce_noise", False), - } - script = {k: v for k, v in script.items() if v is not None and v != ""} - config = { - "stitch": tool_parameters.get("stitch", True), - "sharpen": tool_parameters.get("sharpen"), - "fluent": tool_parameters.get("fluent"), - "result_format": tool_parameters.get("result_format") or "mp4", - "pad_audio": tool_parameters.get("pad_audio"), - "driver_expressions": driver_expressions, - } - config = {k: v for k, v in config.items() if v is not None and v != ""} - - options = { - "source_url": tool_parameters["source_url"], - "driver_url": tool_parameters.get("driver_url"), - "script": script, - "config": config, - } - options = {k: v for k, v in options.items() if v is not None and v != ""} - - if not options.get("source_url"): - raise ValueError("Source URL is required") - - if script.get("type") == "audio": - script.pop("input", None) - if not script.get("audio_url"): - raise ValueError("Audio URL is required for audio script type") - - if script.get("type") == "text": - script.pop("audio_url", None) - script.pop("reduce_noise", None) - if not script.get("input"): - raise ValueError("Text input is required for text script type") - - talks_result = app.talks(params=options, wait=True) - - if not isinstance(talks_result, str): - talks_result = json.dumps(talks_result, ensure_ascii=False, indent=4) - - if not talks_result: - return self.create_text_message("D-ID talks request failed.") - - return self.create_text_message(talks_result) diff --git a/api/core/tools/provider/builtin/did/tools/talks.yaml b/api/core/tools/provider/builtin/did/tools/talks.yaml deleted file mode 100644 index 88d430512923e4..00000000000000 --- a/api/core/tools/provider/builtin/did/tools/talks.yaml +++ /dev/null @@ -1,126 +0,0 @@ -identity: - name: talks - author: Matri Qi - label: - en_US: Talks -description: - human: - en_US: Talks enables the creation of realistic talking head videos from text or audio inputs. - llm: Talks enables the creation of realistic talking head videos from text or audio inputs. -parameters: - - name: source_url - type: string - required: true - label: - en_US: source url - human_description: - en_US: The URL of the source image to be animated by the driver video, or a selection from the list of provided studio actors. - llm_description: The URL of the source image to be animated by the driver video, or a selection from the list of provided studio actors. - form: llm - - name: driver_url - type: string - required: false - label: - en_US: driver url - human_description: - en_US: The URL of the driver video to drive the talk, or a provided driver name from D-ID. - form: form - - name: script_type - type: string - required: false - label: - en_US: script type - human_description: - en_US: The type of the script. - form: form - options: - - value: text - label: - en_US: text - - value: audio - label: - en_US: audio - - name: text_input - type: string - required: false - label: - en_US: text input - human_description: - en_US: The text input to be spoken by the talking head. Required when script type is text. - form: form - - name: audio_url - type: string - required: false - label: - en_US: audio url - human_description: - en_US: The URL of the audio file to be spoken by the talking head. Required when script type is audio. - form: form - - name: audio_reduce_noise - type: boolean - required: false - label: - en_US: audio reduce noise - human_description: - en_US: If enabled, the audio will be processed to reduce noise before being spoken by the talking head. It only works when script type is audio. - form: form - - name: stitch - type: boolean - required: false - label: - en_US: stitch - human_description: - en_US: If enabled, the driver video will be stitched with the talking head video. - form: form - - name: sharpen - type: boolean - required: false - label: - en_US: sharpen - human_description: - en_US: If enabled, the talking head video will be sharpened. - form: form - - name: result_format - type: string - required: false - label: - en_US: result format - human_description: - en_US: The format of the result video. - form: form - options: - - value: mp4 - label: - en_US: mp4 - - value: gif - label: - en_US: gif - - value: mov - label: - en_US: mov - - name: fluent - type: boolean - required: false - label: - en_US: fluent - human_description: - en_US: Interpolate between the last & first frames of the driver video When used together with pad_audio can create a seamless transition between videos of the same driver - form: form - - name: pad_audio - type: number - required: false - label: - en_US: pad audio - human_description: - en_US: Pad the audio with silence at the end (given in seconds) Will increase the video duration & the credits it consumes - form: form - min: 1 - max: 60 - - name: driver_expressions - type: string - required: false - label: - en_US: driver expressions - human_description: - en_US: timed expressions for animation. It should be an JSON array style string. Take D-ID documentation(https://docs.d-id.com/reference/createtalk) for more information. - form: form diff --git a/api/core/tools/provider/builtin/dingtalk/_assets/icon.svg b/api/core/tools/provider/builtin/dingtalk/_assets/icon.svg deleted file mode 100644 index b60653b7a59409..00000000000000 --- a/api/core/tools/provider/builtin/dingtalk/_assets/icon.svg +++ /dev/null @@ -1,7 +0,0 @@ - - - - - - - \ No newline at end of file diff --git a/api/core/tools/provider/builtin/dingtalk/dingtalk.py b/api/core/tools/provider/builtin/dingtalk/dingtalk.py deleted file mode 100644 index be1d5e099c2246..00000000000000 --- a/api/core/tools/provider/builtin/dingtalk/dingtalk.py +++ /dev/null @@ -1,8 +0,0 @@ -from core.tools.provider.builtin.dingtalk.tools.dingtalk_group_bot import DingTalkGroupBotTool -from core.tools.provider.builtin_tool_provider import BuiltinToolProviderController - - -class DingTalkProvider(BuiltinToolProviderController): - def _validate_credentials(self, credentials: dict) -> None: - DingTalkGroupBotTool() - pass diff --git a/api/core/tools/provider/builtin/dingtalk/dingtalk.yaml b/api/core/tools/provider/builtin/dingtalk/dingtalk.yaml deleted file mode 100644 index c922c140a8badc..00000000000000 --- a/api/core/tools/provider/builtin/dingtalk/dingtalk.yaml +++ /dev/null @@ -1,16 +0,0 @@ -identity: - author: Bowen Liang - name: dingtalk - label: - en_US: DingTalk - zh_Hans: 钉钉 - pt_BR: DingTalk - description: - en_US: DingTalk group robot - zh_Hans: 钉钉群机器人 - pt_BR: DingTalk group robot - icon: icon.svg - tags: - - social - - productivity -credentials_for_provider: diff --git a/api/core/tools/provider/builtin/dingtalk/tools/dingtalk_group_bot.py b/api/core/tools/provider/builtin/dingtalk/tools/dingtalk_group_bot.py deleted file mode 100644 index f33ad5be59b403..00000000000000 --- a/api/core/tools/provider/builtin/dingtalk/tools/dingtalk_group_bot.py +++ /dev/null @@ -1,89 +0,0 @@ -import base64 -import hashlib -import hmac -import logging -import time -import urllib.parse -from typing import Any, Union - -import httpx - -from core.tools.entities.tool_entities import ToolInvokeMessage -from core.tools.tool.builtin_tool import BuiltinTool - - -class DingTalkGroupBotTool(BuiltinTool): - def _invoke( - self, user_id: str, tool_parameters: dict[str, Any] - ) -> Union[ToolInvokeMessage, list[ToolInvokeMessage]]: - """ - invoke tools - Dingtalk custom group robot API docs: - https://open.dingtalk.com/document/orgapp/custom-robot-access - """ - content = tool_parameters.get("content") - if not content: - return self.create_text_message("Invalid parameter content") - - access_token = tool_parameters.get("access_token") - if not access_token: - return self.create_text_message( - "Invalid parameter access_token. " - "Regarding information about security details," - "please refer to the DingTalk docs:" - "https://open.dingtalk.com/document/robots/customize-robot-security-settings" - ) - - sign_secret = tool_parameters.get("sign_secret") - if not sign_secret: - return self.create_text_message( - "Invalid parameter sign_secret. " - "Regarding information about security details," - "please refer to the DingTalk docs:" - "https://open.dingtalk.com/document/robots/customize-robot-security-settings" - ) - - msgtype = "text" - api_url = "https://oapi.dingtalk.com/robot/send" - headers = { - "Content-Type": "application/json", - } - params = { - "access_token": access_token, - } - - self._apply_security_mechanism(params, sign_secret) - - payload = { - "msgtype": msgtype, - "text": { - "content": content, - }, - } - - try: - res = httpx.post(api_url, headers=headers, params=params, json=payload) - if res.is_success: - return self.create_text_message("Text message sent successfully") - else: - return self.create_text_message( - f"Failed to send the text message, status code: {res.status_code}, response: {res.text}" - ) - except Exception as e: - return self.create_text_message("Failed to send message to group chat bot. {}".format(e)) - - @staticmethod - def _apply_security_mechanism(params: dict[str, Any], sign_secret: str): - try: - timestamp = str(round(time.time() * 1000)) - secret_enc = sign_secret.encode("utf-8") - string_to_sign = f"{timestamp}\n{sign_secret}" - string_to_sign_enc = string_to_sign.encode("utf-8") - hmac_code = hmac.new(secret_enc, string_to_sign_enc, digestmod=hashlib.sha256).digest() - sign = urllib.parse.quote_plus(base64.b64encode(hmac_code)) - - params["timestamp"] = timestamp - params["sign"] = sign - except Exception: - msg = "Failed to apply security mechanism to the request." - logging.exception(msg) diff --git a/api/core/tools/provider/builtin/dingtalk/tools/dingtalk_group_bot.yaml b/api/core/tools/provider/builtin/dingtalk/tools/dingtalk_group_bot.yaml deleted file mode 100644 index dc8a90b7193903..00000000000000 --- a/api/core/tools/provider/builtin/dingtalk/tools/dingtalk_group_bot.yaml +++ /dev/null @@ -1,52 +0,0 @@ -identity: - name: dingtalk_group_bot - author: Bowen Liang - label: - en_US: Send Group Message - zh_Hans: 发送群消息 - pt_BR: Send Group Message - icon: icon.svg -description: - human: - en_US: Sending a group message on DingTalk via the webhook of group bot - zh_Hans: 通过钉钉的群机器人webhook发送群消息 - pt_BR: Sending a group message on DingTalk via the webhook of group bot - llm: A tool for sending messages to a chat group on DingTalk(钉钉) . -parameters: - - name: access_token - type: secret-input - required: true - label: - en_US: access token - zh_Hans: access token - pt_BR: access token - human_description: - en_US: access_token in the group robot webhook - zh_Hans: 群自定义机器人webhook中access_token字段的值 - pt_BR: access_token in the group robot webhook - form: form - - name: sign_secret - type: secret-input - required: true - label: - en_US: secret key for signing - zh_Hans: 加签秘钥 - pt_BR: secret key for signing - human_description: - en_US: secret key for signing - zh_Hans: 加签秘钥 - pt_BR: secret key for signing - form: form - - name: content - type: string - required: true - label: - en_US: content - zh_Hans: 消息内容 - pt_BR: content - human_description: - en_US: Content to sent to the group. - zh_Hans: 群消息文本 - pt_BR: Content to sent to the group. - llm_description: Content of the message - form: llm diff --git a/api/core/tools/provider/builtin/duckduckgo/_assets/icon.svg b/api/core/tools/provider/builtin/duckduckgo/_assets/icon.svg deleted file mode 100644 index a816a6b49ebb78..00000000000000 --- a/api/core/tools/provider/builtin/duckduckgo/_assets/icon.svg +++ /dev/null @@ -1 +0,0 @@ - \ No newline at end of file diff --git a/api/core/tools/provider/builtin/duckduckgo/duckduckgo.py b/api/core/tools/provider/builtin/duckduckgo/duckduckgo.py deleted file mode 100644 index 8269167127b8e5..00000000000000 --- a/api/core/tools/provider/builtin/duckduckgo/duckduckgo.py +++ /dev/null @@ -1,20 +0,0 @@ -from core.tools.errors import ToolProviderCredentialValidationError -from core.tools.provider.builtin.duckduckgo.tools.ddgo_search import DuckDuckGoSearchTool -from core.tools.provider.builtin_tool_provider import BuiltinToolProviderController - - -class DuckDuckGoProvider(BuiltinToolProviderController): - def _validate_credentials(self, credentials: dict) -> None: - try: - DuckDuckGoSearchTool().fork_tool_runtime( - runtime={ - "credentials": credentials, - } - ).invoke( - user_id="", - tool_parameters={ - "query": "John Doe", - }, - ) - except Exception as e: - raise ToolProviderCredentialValidationError(str(e)) diff --git a/api/core/tools/provider/builtin/duckduckgo/duckduckgo.yaml b/api/core/tools/provider/builtin/duckduckgo/duckduckgo.yaml deleted file mode 100644 index f3faa060455772..00000000000000 --- a/api/core/tools/provider/builtin/duckduckgo/duckduckgo.yaml +++ /dev/null @@ -1,12 +0,0 @@ -identity: - author: Yash Parmar - name: duckduckgo - label: - en_US: DuckDuckGo - zh_Hans: DuckDuckGo - description: - en_US: A privacy-focused search engine. - zh_Hans: 一个注重隐私的搜索引擎。 - icon: icon.svg - tags: - - search diff --git a/api/core/tools/provider/builtin/duckduckgo/tools/ddgo_ai.py b/api/core/tools/provider/builtin/duckduckgo/tools/ddgo_ai.py deleted file mode 100644 index 8bdd638f4a01d1..00000000000000 --- a/api/core/tools/provider/builtin/duckduckgo/tools/ddgo_ai.py +++ /dev/null @@ -1,20 +0,0 @@ -from typing import Any - -from duckduckgo_search import DDGS - -from core.tools.entities.tool_entities import ToolInvokeMessage -from core.tools.tool.builtin_tool import BuiltinTool - - -class DuckDuckGoAITool(BuiltinTool): - """ - Tool for performing a search using DuckDuckGo search engine. - """ - - def _invoke(self, user_id: str, tool_parameters: dict[str, Any]) -> ToolInvokeMessage: - query_dict = { - "keywords": tool_parameters.get("query"), - "model": tool_parameters.get("model"), - } - response = DDGS().chat(**query_dict) - return self.create_text_message(text=response) diff --git a/api/core/tools/provider/builtin/duckduckgo/tools/ddgo_ai.yaml b/api/core/tools/provider/builtin/duckduckgo/tools/ddgo_ai.yaml deleted file mode 100644 index 21cbae6bd3e002..00000000000000 --- a/api/core/tools/provider/builtin/duckduckgo/tools/ddgo_ai.yaml +++ /dev/null @@ -1,47 +0,0 @@ -identity: - name: ddgo_ai - author: hjlarry - label: - en_US: DuckDuckGo AI Chat - zh_Hans: DuckDuckGo AI聊天 -description: - human: - en_US: Use the anonymous private chat provided by DuckDuckGo. - zh_Hans: 使用DuckDuckGo提供的匿名私密聊天。 - llm: Use the anonymous private chat provided by DuckDuckGo. -parameters: - - name: query - type: string - required: true - label: - en_US: Chat Content - zh_Hans: 聊天内容 - human_description: - en_US: The chat content. - zh_Hans: 要聊天的内容。 - llm_description: Key words for chat - form: llm - - name: model - type: select - required: true - options: - - value: gpt-4o-mini - label: - en_US: GPT-4o-mini - - value: claude-3-haiku - label: - en_US: Claude 3 - - value: llama-3-70b - label: - en_US: Llama 3 - - value: mixtral-8x7b - label: - en_US: Mixtral - default: gpt-3.5 - label: - en_US: Choose Model - zh_Hans: 选择模型 - human_description: - en_US: used to select the model for AI chat. - zh_Hans: 用于选择使用AI聊天的模型 - form: form diff --git a/api/core/tools/provider/builtin/duckduckgo/tools/ddgo_img.py b/api/core/tools/provider/builtin/duckduckgo/tools/ddgo_img.py deleted file mode 100644 index 396570248ae785..00000000000000 --- a/api/core/tools/provider/builtin/duckduckgo/tools/ddgo_img.py +++ /dev/null @@ -1,30 +0,0 @@ -from typing import Any - -from duckduckgo_search import DDGS - -from core.file.file_obj import FileTransferMethod -from core.tools.entities.tool_entities import ToolInvokeMessage -from core.tools.tool.builtin_tool import BuiltinTool - - -class DuckDuckGoImageSearchTool(BuiltinTool): - """ - Tool for performing an image search using DuckDuckGo search engine. - """ - - def _invoke(self, user_id: str, tool_parameters: dict[str, Any]) -> list[ToolInvokeMessage]: - query_dict = { - "keywords": tool_parameters.get("query"), - "timelimit": tool_parameters.get("timelimit"), - "size": tool_parameters.get("size"), - "max_results": tool_parameters.get("max_results"), - } - response = DDGS().images(**query_dict) - result = [] - for res in response: - res["transfer_method"] = FileTransferMethod.REMOTE_URL - msg = ToolInvokeMessage( - type=ToolInvokeMessage.MessageType.IMAGE_LINK, message=res.get("image"), save_as="", meta=res - ) - result.append(msg) - return result diff --git a/api/core/tools/provider/builtin/duckduckgo/tools/ddgo_img.yaml b/api/core/tools/provider/builtin/duckduckgo/tools/ddgo_img.yaml deleted file mode 100644 index 168cface224e40..00000000000000 --- a/api/core/tools/provider/builtin/duckduckgo/tools/ddgo_img.yaml +++ /dev/null @@ -1,88 +0,0 @@ -identity: - name: ddgo_img - author: hjlarry - label: - en_US: DuckDuckGo Image Search - zh_Hans: DuckDuckGo 图片搜索 -description: - human: - en_US: Perform image searches on DuckDuckGo and get results. - zh_Hans: 在 DuckDuckGo 上进行图片搜索并获取结果。 - llm: Perform image searches on DuckDuckGo and get results. -parameters: - - name: query - type: string - required: true - label: - en_US: Query string - zh_Hans: 查询语句 - human_description: - en_US: The search query. - zh_Hans: 搜索查询语句。 - llm_description: Key words for searching - form: llm - - name: max_results - type: number - required: true - default: 3 - label: - en_US: Max results - zh_Hans: 最大结果数量 - human_description: - en_US: The max results. - zh_Hans: 最大结果数量 - form: form - - name: timelimit - type: select - required: false - options: - - value: Day - label: - en_US: current day - zh_Hans: 当天 - - value: Week - label: - en_US: current week - zh_Hans: 本周 - - value: Month - label: - en_US: current month - zh_Hans: 当月 - - value: Year - label: - en_US: current year - zh_Hans: 今年 - label: - en_US: Result time limit - zh_Hans: 结果时间限制 - human_description: - en_US: Use when querying results within a specific time range only. - zh_Hans: 只查询一定时间范围内的结果时使用 - form: form - - name: size - type: select - required: false - options: - - value: Small - label: - en_US: small - zh_Hans: 小 - - value: Medium - label: - en_US: medium - zh_Hans: 中 - - value: Large - label: - en_US: large - zh_Hans: 大 - - value: Wallpaper - label: - en_US: xl - zh_Hans: 超大 - label: - en_US: image size - zh_Hans: 图片大小 - human_description: - en_US: The size of the image to be searched. - zh_Hans: 要搜索的图片的大小 - form: form diff --git a/api/core/tools/provider/builtin/duckduckgo/tools/ddgo_search.py b/api/core/tools/provider/builtin/duckduckgo/tools/ddgo_search.py deleted file mode 100644 index cbd65d2e7756e0..00000000000000 --- a/api/core/tools/provider/builtin/duckduckgo/tools/ddgo_search.py +++ /dev/null @@ -1,45 +0,0 @@ -from typing import Any - -from duckduckgo_search import DDGS - -from core.model_runtime.entities.message_entities import SystemPromptMessage -from core.tools.entities.tool_entities import ToolInvokeMessage -from core.tools.tool.builtin_tool import BuiltinTool - -SUMMARY_PROMPT = """ -User's query: -{query} - -Here is the search engine result: -{content} - -Please summarize the result in a few sentences. -""" - - -class DuckDuckGoSearchTool(BuiltinTool): - """ - Tool for performing a search using DuckDuckGo search engine. - """ - - def _invoke(self, user_id: str, tool_parameters: dict[str, Any]) -> ToolInvokeMessage | list[ToolInvokeMessage]: - query = tool_parameters.get("query") - max_results = tool_parameters.get("max_results", 5) - require_summary = tool_parameters.get("require_summary", False) - response = DDGS().text(query, max_results=max_results) - if require_summary: - results = "\n".join([res.get("body") for res in response]) - results = self.summary_results(user_id=user_id, content=results, query=query) - return self.create_text_message(text=results) - return [self.create_json_message(res) for res in response] - - def summary_results(self, user_id: str, content: str, query: str) -> str: - prompt = SUMMARY_PROMPT.format(query=query, content=content) - summary = self.invoke_model( - user_id=user_id, - prompt_messages=[ - SystemPromptMessage(content=prompt), - ], - stop=[], - ) - return summary.message.content diff --git a/api/core/tools/provider/builtin/duckduckgo/tools/ddgo_search.yaml b/api/core/tools/provider/builtin/duckduckgo/tools/ddgo_search.yaml deleted file mode 100644 index 333c0cb093dbd2..00000000000000 --- a/api/core/tools/provider/builtin/duckduckgo/tools/ddgo_search.yaml +++ /dev/null @@ -1,41 +0,0 @@ -identity: - name: ddgo_search - author: Yash Parmar - label: - en_US: DuckDuckGo Search - zh_Hans: DuckDuckGo 搜索 -description: - human: - en_US: Perform searches on DuckDuckGo and get results. - zh_Hans: 在 DuckDuckGo 上进行搜索并获取结果。 - llm: Perform searches on DuckDuckGo and get results. -parameters: - - name: query - type: string - required: true - label: - en_US: Query string - zh_Hans: 查询语句 - human_description: - en_US: The search query. - zh_Hans: 搜索查询语句。 - llm_description: Key words for searching - form: llm - - name: max_results - type: number - required: true - default: 5 - label: - en_US: Max results - zh_Hans: 最大结果数量 - form: form - - name: require_summary - type: boolean - default: false - label: - en_US: Require Summary - zh_Hans: 是否总结 - human_description: - en_US: Whether to pass the search results to llm for summarization. - zh_Hans: 是否需要将搜索结果传给大模型总结 - form: form diff --git a/api/core/tools/provider/builtin/duckduckgo/tools/ddgo_translate.py b/api/core/tools/provider/builtin/duckduckgo/tools/ddgo_translate.py deleted file mode 100644 index 396ce21b183afc..00000000000000 --- a/api/core/tools/provider/builtin/duckduckgo/tools/ddgo_translate.py +++ /dev/null @@ -1,20 +0,0 @@ -from typing import Any - -from duckduckgo_search import DDGS - -from core.tools.entities.tool_entities import ToolInvokeMessage -from core.tools.tool.builtin_tool import BuiltinTool - - -class DuckDuckGoTranslateTool(BuiltinTool): - """ - Tool for performing a search using DuckDuckGo search engine. - """ - - def _invoke(self, user_id: str, tool_parameters: dict[str, Any]) -> ToolInvokeMessage: - query_dict = { - "keywords": tool_parameters.get("query"), - "to": tool_parameters.get("translate_to"), - } - response = DDGS().translate(**query_dict)[0].get("translated", "Unable to translate!") - return self.create_text_message(text=response) diff --git a/api/core/tools/provider/builtin/duckduckgo/tools/ddgo_translate.yaml b/api/core/tools/provider/builtin/duckduckgo/tools/ddgo_translate.yaml deleted file mode 100644 index 78b5d0b02275b2..00000000000000 --- a/api/core/tools/provider/builtin/duckduckgo/tools/ddgo_translate.yaml +++ /dev/null @@ -1,51 +0,0 @@ -identity: - name: ddgo_translate - author: hjlarry - label: - en_US: DuckDuckGo Translate - zh_Hans: DuckDuckGo 翻译 -description: - human: - en_US: Use DuckDuckGo's translation feature. - zh_Hans: 使用DuckDuckGo的翻译功能。 - llm: Use DuckDuckGo's translation feature. -parameters: - - name: query - type: string - required: true - label: - en_US: Translate Content - zh_Hans: 翻译内容 - human_description: - en_US: The translate content. - zh_Hans: 要翻译的内容。 - llm_description: Key words for translate - form: llm - - name: translate_to - type: select - required: true - options: - - value: en - label: - en_US: English - zh_Hans: 英语 - - value: zh-Hans - label: - en_US: Simplified Chinese - zh_Hans: 简体中文 - - value: zh-Hant - label: - en_US: Traditional Chinese - zh_Hans: 繁体中文 - - value: ja - label: - en_US: Japanese - zh_Hans: 日语 - default: en - label: - en_US: Choose Language - zh_Hans: 选择语言 - human_description: - en_US: select the language to translate. - zh_Hans: 选择要翻译的语言 - form: form diff --git a/api/core/tools/provider/builtin/feishu/_assets/icon.svg b/api/core/tools/provider/builtin/feishu/_assets/icon.svg deleted file mode 100644 index bf3c202abf3ff6..00000000000000 --- a/api/core/tools/provider/builtin/feishu/_assets/icon.svg +++ /dev/null @@ -1 +0,0 @@ - diff --git a/api/core/tools/provider/builtin/feishu/feishu.py b/api/core/tools/provider/builtin/feishu/feishu.py deleted file mode 100644 index 72a9333619988d..00000000000000 --- a/api/core/tools/provider/builtin/feishu/feishu.py +++ /dev/null @@ -1,7 +0,0 @@ -from core.tools.provider.builtin.feishu.tools.feishu_group_bot import FeishuGroupBotTool -from core.tools.provider.builtin_tool_provider import BuiltinToolProviderController - - -class FeishuProvider(BuiltinToolProviderController): - def _validate_credentials(self, credentials: dict) -> None: - FeishuGroupBotTool() diff --git a/api/core/tools/provider/builtin/feishu/feishu.yaml b/api/core/tools/provider/builtin/feishu/feishu.yaml deleted file mode 100644 index a029c7edb8853b..00000000000000 --- a/api/core/tools/provider/builtin/feishu/feishu.yaml +++ /dev/null @@ -1,16 +0,0 @@ -identity: - author: Arkii Sun - name: feishu - label: - en_US: Feishu - zh_Hans: 飞书 - pt_BR: Feishu - description: - en_US: Feishu group bot - zh_Hans: 飞书群机器人 - pt_BR: Feishu group bot - icon: icon.svg - tags: - - social - - productivity -credentials_for_provider: diff --git a/api/core/tools/provider/builtin/feishu/tools/feishu_group_bot.py b/api/core/tools/provider/builtin/feishu/tools/feishu_group_bot.py deleted file mode 100644 index e82da8ca534b96..00000000000000 --- a/api/core/tools/provider/builtin/feishu/tools/feishu_group_bot.py +++ /dev/null @@ -1,51 +0,0 @@ -from typing import Any, Union - -import httpx - -from core.tools.entities.tool_entities import ToolInvokeMessage -from core.tools.tool.builtin_tool import BuiltinTool -from core.tools.utils.uuid_utils import is_valid_uuid - - -class FeishuGroupBotTool(BuiltinTool): - def _invoke( - self, user_id: str, tool_parameters: dict[str, Any] - ) -> Union[ToolInvokeMessage, list[ToolInvokeMessage]]: - """ - invoke tools - API document: https://open.feishu.cn/document/client-docs/bot-v3/add-custom-bot - """ - - url = "https://open.feishu.cn/open-apis/bot/v2/hook" - - content = tool_parameters.get("content", "") - if not content: - return self.create_text_message("Invalid parameter content") - - hook_key = tool_parameters.get("hook_key", "") - if not is_valid_uuid(hook_key): - return self.create_text_message(f"Invalid parameter hook_key ${hook_key}, not a valid UUID") - - msg_type = "text" - api_url = f"{url}/{hook_key}" - headers = { - "Content-Type": "application/json", - } - params = {} - payload = { - "msg_type": msg_type, - "content": { - "text": content, - }, - } - - try: - res = httpx.post(api_url, headers=headers, params=params, json=payload) - if res.is_success: - return self.create_text_message("Text message sent successfully") - else: - return self.create_text_message( - f"Failed to send the text message, status code: {res.status_code}, response: {res.text}" - ) - except Exception as e: - return self.create_text_message("Failed to send message to group chat bot. {}".format(e)) diff --git a/api/core/tools/provider/builtin/feishu/tools/feishu_group_bot.yaml b/api/core/tools/provider/builtin/feishu/tools/feishu_group_bot.yaml deleted file mode 100644 index 6c3f084e4dafe3..00000000000000 --- a/api/core/tools/provider/builtin/feishu/tools/feishu_group_bot.yaml +++ /dev/null @@ -1,40 +0,0 @@ -identity: - name: feishu_group_bot - author: Arkii Sun - label: - en_US: Send Group Message - zh_Hans: 发送群消息 - pt_BR: Send Group Message - icon: icon.png -description: - human: - en_US: Sending a group message on Feishu via the webhook of group bot - zh_Hans: 通过飞书的群机器人webhook发送群消息 - pt_BR: Sending a group message on Feishu via the webhook of group bot - llm: A tool for sending messages to a chat group on Feishu(飞书) . -parameters: - - name: hook_key - type: secret-input - required: true - label: - en_US: Feishu Group bot webhook key - zh_Hans: 群机器人webhook的key - pt_BR: Feishu Group bot webhook key - human_description: - en_US: Feishu Group bot webhook key - zh_Hans: 群机器人webhook的key - pt_BR: Feishu Group bot webhook key - form: form - - name: content - type: string - required: true - label: - en_US: content - zh_Hans: 消息内容 - pt_BR: content - human_description: - en_US: Content to sent to the group. - zh_Hans: 群消息文本 - pt_BR: Content to sent to the group. - llm_description: Content of the message - form: llm diff --git a/api/core/tools/provider/builtin/feishu_base/_assets/icon.svg b/api/core/tools/provider/builtin/feishu_base/_assets/icon.svg deleted file mode 100644 index 2663a0f59ee6a4..00000000000000 --- a/api/core/tools/provider/builtin/feishu_base/_assets/icon.svg +++ /dev/null @@ -1,47 +0,0 @@ - - - - diff --git a/api/core/tools/provider/builtin/feishu_base/feishu_base.py b/api/core/tools/provider/builtin/feishu_base/feishu_base.py deleted file mode 100644 index 04056af53b5f95..00000000000000 --- a/api/core/tools/provider/builtin/feishu_base/feishu_base.py +++ /dev/null @@ -1,8 +0,0 @@ -from core.tools.provider.builtin.feishu_base.tools.get_tenant_access_token import GetTenantAccessTokenTool -from core.tools.provider.builtin_tool_provider import BuiltinToolProviderController - - -class FeishuBaseProvider(BuiltinToolProviderController): - def _validate_credentials(self, credentials: dict) -> None: - GetTenantAccessTokenTool() - pass diff --git a/api/core/tools/provider/builtin/feishu_base/feishu_base.yaml b/api/core/tools/provider/builtin/feishu_base/feishu_base.yaml deleted file mode 100644 index f3dcbb6136b3a3..00000000000000 --- a/api/core/tools/provider/builtin/feishu_base/feishu_base.yaml +++ /dev/null @@ -1,14 +0,0 @@ -identity: - author: Doug Lea - name: feishu_base - label: - en_US: Feishu Base - zh_Hans: 飞书多维表格 - description: - en_US: Feishu Base - zh_Hans: 飞书多维表格 - icon: icon.svg - tags: - - social - - productivity -credentials_for_provider: diff --git a/api/core/tools/provider/builtin/feishu_base/tools/add_base_record.py b/api/core/tools/provider/builtin/feishu_base/tools/add_base_record.py deleted file mode 100644 index 4a605fbffeef0b..00000000000000 --- a/api/core/tools/provider/builtin/feishu_base/tools/add_base_record.py +++ /dev/null @@ -1,56 +0,0 @@ -import json -from typing import Any, Union - -import httpx - -from core.tools.entities.tool_entities import ToolInvokeMessage -from core.tools.tool.builtin_tool import BuiltinTool - - -class AddBaseRecordTool(BuiltinTool): - def _invoke( - self, user_id: str, tool_parameters: dict[str, Any] - ) -> Union[ToolInvokeMessage, list[ToolInvokeMessage]]: - url = "https://open.feishu.cn/open-apis/bitable/v1/apps/{app_token}/tables/{table_id}/records" - - access_token = tool_parameters.get("Authorization", "") - if not access_token: - return self.create_text_message("Invalid parameter access_token") - - app_token = tool_parameters.get("app_token", "") - if not app_token: - return self.create_text_message("Invalid parameter app_token") - - table_id = tool_parameters.get("table_id", "") - if not table_id: - return self.create_text_message("Invalid parameter table_id") - - fields = tool_parameters.get("fields", "") - if not fields: - return self.create_text_message("Invalid parameter fields") - - headers = { - "Content-Type": "application/json", - "Authorization": f"Bearer {access_token}", - } - - params = {} - payload = {"fields": json.loads(fields)} - - try: - res = httpx.post( - url.format(app_token=app_token, table_id=table_id), - headers=headers, - params=params, - json=payload, - timeout=30, - ) - res_json = res.json() - if res.is_success: - return self.create_text_message(text=json.dumps(res_json)) - else: - return self.create_text_message( - f"Failed to add base record, status code: {res.status_code}, response: {res.text}" - ) - except Exception as e: - return self.create_text_message("Failed to add base record. {}".format(e)) diff --git a/api/core/tools/provider/builtin/feishu_base/tools/add_base_record.yaml b/api/core/tools/provider/builtin/feishu_base/tools/add_base_record.yaml deleted file mode 100644 index 3ce0154efd69dc..00000000000000 --- a/api/core/tools/provider/builtin/feishu_base/tools/add_base_record.yaml +++ /dev/null @@ -1,66 +0,0 @@ -identity: - name: add_base_record - author: Doug Lea - label: - en_US: Add Base Record - zh_Hans: 在多维表格数据表中新增一条记录 -description: - human: - en_US: Add Base Record - zh_Hans: | - 在多维表格数据表中新增一条记录,详细请参考:https://open.larkoffice.com/document/server-docs/docs/bitable-v1/app-table-record/create - llm: Add a new record in the multidimensional table data table. -parameters: - - name: Authorization - type: string - required: true - label: - en_US: token - zh_Hans: 凭证 - human_description: - en_US: API access token parameter, tenant_access_token or user_access_token - zh_Hans: API 的访问凭证参数,tenant_access_token 或 user_access_token - llm_description: API access token parameter, tenant_access_token or user_access_token - form: llm - - - name: app_token - type: string - required: true - label: - en_US: app_token - zh_Hans: 多维表格 - human_description: - en_US: bitable app token - zh_Hans: 多维表格的唯一标识符 app_token - llm_description: bitable app token - form: llm - - - name: table_id - type: string - required: true - label: - en_US: table_id - zh_Hans: 多维表格的数据表 - human_description: - en_US: bitable table id - zh_Hans: 多维表格数据表的唯一标识符 table_id - llm_description: bitable table id - form: llm - - - name: fields - type: string - required: true - label: - en_US: fields - zh_Hans: 数据表的列字段内容 - human_description: - en_US: The fields of the Base data table are the columns of the data table. - zh_Hans: | - 要增加一行多维表格记录,字段结构拼接如下:{"多行文本":"多行文本内容","单选":"选项1","多选":["选项1","选项2"],"复选框":true,"人员":[{"id":"ou_2910013f1e6456f16a0ce75ede950a0a"}],"群组":[{"id":"oc_cd07f55f14d6f4a4f1b51504e7e97f48"}],"电话号码":"13026162666"} - 当前接口支持的字段类型为:多行文本、单选、条码、多选、日期、人员、附件、复选框、超链接、数字、单向关联、双向关联、电话号码、地理位置。 - 不同类型字段的数据结构请参考数据结构概述:https://open.larkoffice.com/document/server-docs/docs/bitable-v1/bitable-structure - llm_description: | - 要增加一行多维表格记录,字段结构拼接如下:{"多行文本":"多行文本内容","单选":"选项1","多选":["选项1","选项2"],"复选框":true,"人员":[{"id":"ou_2910013f1e6456f16a0ce75ede950a0a"}],"群组":[{"id":"oc_cd07f55f14d6f4a4f1b51504e7e97f48"}],"电话号码":"13026162666"} - 当前接口支持的字段类型为:多行文本、单选、条码、多选、日期、人员、附件、复选框、超链接、数字、单向关联、双向关联、电话号码、地理位置。 - 不同类型字段的数据结构请参考数据结构概述:https://open.larkoffice.com/document/server-docs/docs/bitable-v1/bitable-structure - form: llm diff --git a/api/core/tools/provider/builtin/feishu_base/tools/create_base.py b/api/core/tools/provider/builtin/feishu_base/tools/create_base.py deleted file mode 100644 index 6b755e2007d7d6..00000000000000 --- a/api/core/tools/provider/builtin/feishu_base/tools/create_base.py +++ /dev/null @@ -1,41 +0,0 @@ -import json -from typing import Any, Union - -import httpx - -from core.tools.entities.tool_entities import ToolInvokeMessage -from core.tools.tool.builtin_tool import BuiltinTool - - -class CreateBaseTool(BuiltinTool): - def _invoke( - self, user_id: str, tool_parameters: dict[str, Any] - ) -> Union[ToolInvokeMessage, list[ToolInvokeMessage]]: - url = "https://open.feishu.cn/open-apis/bitable/v1/apps" - - access_token = tool_parameters.get("Authorization", "") - if not access_token: - return self.create_text_message("Invalid parameter access_token") - - name = tool_parameters.get("name", "") - folder_token = tool_parameters.get("folder_token", "") - - headers = { - "Content-Type": "application/json", - "Authorization": f"Bearer {access_token}", - } - - params = {} - payload = {"name": name, "folder_token": folder_token} - - try: - res = httpx.post(url, headers=headers, params=params, json=payload, timeout=30) - res_json = res.json() - if res.is_success: - return self.create_text_message(text=json.dumps(res_json)) - else: - return self.create_text_message( - f"Failed to create base, status code: {res.status_code}, response: {res.text}" - ) - except Exception as e: - return self.create_text_message("Failed to create base. {}".format(e)) diff --git a/api/core/tools/provider/builtin/feishu_base/tools/create_base.yaml b/api/core/tools/provider/builtin/feishu_base/tools/create_base.yaml deleted file mode 100644 index 76c76a916d4951..00000000000000 --- a/api/core/tools/provider/builtin/feishu_base/tools/create_base.yaml +++ /dev/null @@ -1,47 +0,0 @@ -identity: - name: create_base - author: Doug Lea - label: - en_US: Create Base - zh_Hans: 创建多维表格 -description: - human: - en_US: Create base - zh_Hans: 在指定目录下创建多维表格 - llm: A tool for create a multidimensional table in the specified directory. -parameters: - - name: Authorization - type: string - required: true - label: - en_US: token - zh_Hans: 凭证 - human_description: - en_US: API access token parameter, tenant_access_token or user_access_token - zh_Hans: API 的访问凭证参数,tenant_access_token 或 user_access_token - llm_description: API access token parameter, tenant_access_token or user_access_token - form: llm - - - name: name - type: string - required: false - label: - en_US: name - zh_Hans: name - human_description: - en_US: Base App Name - zh_Hans: 多维表格App名字 - llm_description: Base App Name - form: llm - - - name: folder_token - type: string - required: false - label: - en_US: folder_token - zh_Hans: 多维表格App归属文件夹 - human_description: - en_US: Base App home folder. The default is empty, indicating that Base will be created in the cloud space root directory. - zh_Hans: 多维表格App归属文件夹。默认为空,表示多维表格将被创建在云空间根目录。 - llm_description: Base App home folder. The default is empty, indicating that Base will be created in the cloud space root directory. - form: llm diff --git a/api/core/tools/provider/builtin/feishu_base/tools/create_base_table.py b/api/core/tools/provider/builtin/feishu_base/tools/create_base_table.py deleted file mode 100644 index b05d700113880b..00000000000000 --- a/api/core/tools/provider/builtin/feishu_base/tools/create_base_table.py +++ /dev/null @@ -1,48 +0,0 @@ -import json -from typing import Any, Union - -import httpx - -from core.tools.entities.tool_entities import ToolInvokeMessage -from core.tools.tool.builtin_tool import BuiltinTool - - -class CreateBaseTableTool(BuiltinTool): - def _invoke( - self, user_id: str, tool_parameters: dict[str, Any] - ) -> Union[ToolInvokeMessage, list[ToolInvokeMessage]]: - url = "https://open.feishu.cn/open-apis/bitable/v1/apps/{app_token}/tables" - - access_token = tool_parameters.get("Authorization", "") - if not access_token: - return self.create_text_message("Invalid parameter access_token") - - app_token = tool_parameters.get("app_token", "") - if not app_token: - return self.create_text_message("Invalid parameter app_token") - - name = tool_parameters.get("name", "") - - fields = tool_parameters.get("fields", "") - if not fields: - return self.create_text_message("Invalid parameter fields") - - headers = { - "Content-Type": "application/json", - "Authorization": f"Bearer {access_token}", - } - - params = {} - payload = {"table": {"name": name, "fields": json.loads(fields)}} - - try: - res = httpx.post(url.format(app_token=app_token), headers=headers, params=params, json=payload, timeout=30) - res_json = res.json() - if res.is_success: - return self.create_text_message(text=json.dumps(res_json)) - else: - return self.create_text_message( - f"Failed to create base table, status code: {res.status_code}, response: {res.text}" - ) - except Exception as e: - return self.create_text_message("Failed to create base table. {}".format(e)) diff --git a/api/core/tools/provider/builtin/feishu_base/tools/create_base_table.yaml b/api/core/tools/provider/builtin/feishu_base/tools/create_base_table.yaml deleted file mode 100644 index 48c46bec14f448..00000000000000 --- a/api/core/tools/provider/builtin/feishu_base/tools/create_base_table.yaml +++ /dev/null @@ -1,106 +0,0 @@ -identity: - name: create_base_table - author: Doug Lea - label: - en_US: Create Base Table - zh_Hans: 多维表格新增一个数据表 -description: - human: - en_US: Create base table - zh_Hans: | - 多维表格新增一个数据表,详细请参考:https://open.larkoffice.com/document/server-docs/docs/bitable-v1/app-table/create - llm: A tool for add a new data table to the multidimensional table. -parameters: - - name: Authorization - type: string - required: true - label: - en_US: token - zh_Hans: 凭证 - human_description: - en_US: API access token parameter, tenant_access_token or user_access_token - zh_Hans: API 的访问凭证参数,tenant_access_token 或 user_access_token - llm_description: API access token parameter, tenant_access_token or user_access_token - form: llm - - - name: app_token - type: string - required: true - label: - en_US: app_token - zh_Hans: 多维表格 - human_description: - en_US: bitable app token - zh_Hans: 多维表格的唯一标识符 app_token - llm_description: bitable app token - form: llm - - - name: name - type: string - required: false - label: - en_US: name - zh_Hans: name - human_description: - en_US: Multidimensional table data table name - zh_Hans: 多维表格数据表名称 - llm_description: Multidimensional table data table name - form: llm - - - name: fields - type: string - required: true - label: - en_US: fields - zh_Hans: fields - human_description: - en_US: Initial fields of the data table - zh_Hans: | - 数据表的初始字段,格式为:[{"field_name":"多行文本","type":1},{"field_name":"数字","type":2},{"field_name":"单选","type":3},{"field_name":"多选","type":4},{"field_name":"日期","type":5}]。 - field_name:字段名; - type: 字段类型;可选值有 - 1:多行文本 - 2:数字 - 3:单选 - 4:多选 - 5:日期 - 7:复选框 - 11:人员 - 13:电话号码 - 15:超链接 - 17:附件 - 18:单向关联 - 20:公式 - 21:双向关联 - 22:地理位置 - 23:群组 - 1001:创建时间 - 1002:最后更新时间 - 1003:创建人 - 1004:修改人 - 1005:自动编号 - llm_description: | - 数据表的初始字段,格式为:[{"field_name":"多行文本","type":1},{"field_name":"数字","type":2},{"field_name":"单选","type":3},{"field_name":"多选","type":4},{"field_name":"日期","type":5}]。 - field_name:字段名; - type: 字段类型;可选值有 - 1:多行文本 - 2:数字 - 3:单选 - 4:多选 - 5:日期 - 7:复选框 - 11:人员 - 13:电话号码 - 15:超链接 - 17:附件 - 18:单向关联 - 20:公式 - 21:双向关联 - 22:地理位置 - 23:群组 - 1001:创建时间 - 1002:最后更新时间 - 1003:创建人 - 1004:修改人 - 1005:自动编号 - form: llm diff --git a/api/core/tools/provider/builtin/feishu_base/tools/delete_base_records.py b/api/core/tools/provider/builtin/feishu_base/tools/delete_base_records.py deleted file mode 100644 index 862eb2171b9269..00000000000000 --- a/api/core/tools/provider/builtin/feishu_base/tools/delete_base_records.py +++ /dev/null @@ -1,56 +0,0 @@ -import json -from typing import Any, Union - -import httpx - -from core.tools.entities.tool_entities import ToolInvokeMessage -from core.tools.tool.builtin_tool import BuiltinTool - - -class DeleteBaseRecordsTool(BuiltinTool): - def _invoke( - self, user_id: str, tool_parameters: dict[str, Any] - ) -> Union[ToolInvokeMessage, list[ToolInvokeMessage]]: - url = "https://open.feishu.cn/open-apis/bitable/v1/apps/{app_token}/tables/{table_id}/records/batch_delete" - - access_token = tool_parameters.get("Authorization", "") - if not access_token: - return self.create_text_message("Invalid parameter access_token") - - app_token = tool_parameters.get("app_token", "") - if not app_token: - return self.create_text_message("Invalid parameter app_token") - - table_id = tool_parameters.get("table_id", "") - if not table_id: - return self.create_text_message("Invalid parameter table_id") - - record_ids = tool_parameters.get("record_ids", "") - if not record_ids: - return self.create_text_message("Invalid parameter record_ids") - - headers = { - "Content-Type": "application/json", - "Authorization": f"Bearer {access_token}", - } - - params = {} - payload = {"records": json.loads(record_ids)} - - try: - res = httpx.post( - url.format(app_token=app_token, table_id=table_id), - headers=headers, - params=params, - json=payload, - timeout=30, - ) - res_json = res.json() - if res.is_success: - return self.create_text_message(text=json.dumps(res_json)) - else: - return self.create_text_message( - f"Failed to delete base records, status code: {res.status_code}, response: {res.text}" - ) - except Exception as e: - return self.create_text_message("Failed to delete base records. {}".format(e)) diff --git a/api/core/tools/provider/builtin/feishu_base/tools/delete_base_records.yaml b/api/core/tools/provider/builtin/feishu_base/tools/delete_base_records.yaml deleted file mode 100644 index 595b2870298af9..00000000000000 --- a/api/core/tools/provider/builtin/feishu_base/tools/delete_base_records.yaml +++ /dev/null @@ -1,60 +0,0 @@ -identity: - name: delete_base_records - author: Doug Lea - label: - en_US: Delete Base Records - zh_Hans: 在多维表格数据表中删除多条记录 -description: - human: - en_US: Delete base records - zh_Hans: | - 该接口用于删除多维表格数据表中的多条记录,单次调用中最多删除 500 条记录。 - llm: A tool for delete multiple records in a multidimensional table data table, up to 500 records can be deleted in a single call. -parameters: - - name: Authorization - type: string - required: true - label: - en_US: token - zh_Hans: 凭证 - human_description: - en_US: API access token parameter, tenant_access_token or user_access_token - zh_Hans: API 的访问凭证参数,tenant_access_token 或 user_access_token - llm_description: API access token parameter, tenant_access_token or user_access_token - form: llm - - - name: app_token - type: string - required: true - label: - en_US: app_token - zh_Hans: 多维表格 - human_description: - en_US: bitable app token - zh_Hans: 多维表格的唯一标识符 app_token - llm_description: bitable app token - form: llm - - - name: table_id - type: string - required: true - label: - en_US: table_id - zh_Hans: 多维表格的数据表 - human_description: - en_US: bitable table id - zh_Hans: 多维表格数据表的唯一标识符 table_id - llm_description: bitable table id - form: llm - - - name: record_ids - type: string - required: true - label: - en_US: record_ids - zh_Hans: record_ids - human_description: - en_US: A list of multiple record IDs to be deleted, for example ["recwNXzPQv","recpCsf4ME"] - zh_Hans: 待删除的多条记录id列表,示例为 ["recwNXzPQv","recpCsf4ME"] - llm_description: A list of multiple record IDs to be deleted, for example ["recwNXzPQv","recpCsf4ME"] - form: llm diff --git a/api/core/tools/provider/builtin/feishu_base/tools/delete_base_tables.py b/api/core/tools/provider/builtin/feishu_base/tools/delete_base_tables.py deleted file mode 100644 index f5121863035313..00000000000000 --- a/api/core/tools/provider/builtin/feishu_base/tools/delete_base_tables.py +++ /dev/null @@ -1,46 +0,0 @@ -import json -from typing import Any, Union - -import httpx - -from core.tools.entities.tool_entities import ToolInvokeMessage -from core.tools.tool.builtin_tool import BuiltinTool - - -class DeleteBaseTablesTool(BuiltinTool): - def _invoke( - self, user_id: str, tool_parameters: dict[str, Any] - ) -> Union[ToolInvokeMessage, list[ToolInvokeMessage]]: - url = "https://open.feishu.cn/open-apis/bitable/v1/apps/{app_token}/tables/batch_delete" - - access_token = tool_parameters.get("Authorization", "") - if not access_token: - return self.create_text_message("Invalid parameter access_token") - - app_token = tool_parameters.get("app_token", "") - if not app_token: - return self.create_text_message("Invalid parameter app_token") - - table_ids = tool_parameters.get("table_ids", "") - if not table_ids: - return self.create_text_message("Invalid parameter table_ids") - - headers = { - "Content-Type": "application/json", - "Authorization": f"Bearer {access_token}", - } - - params = {} - payload = {"table_ids": json.loads(table_ids)} - - try: - res = httpx.post(url.format(app_token=app_token), headers=headers, params=params, json=payload, timeout=30) - res_json = res.json() - if res.is_success: - return self.create_text_message(text=json.dumps(res_json)) - else: - return self.create_text_message( - f"Failed to delete base tables, status code: {res.status_code}, response: {res.text}" - ) - except Exception as e: - return self.create_text_message("Failed to delete base tables. {}".format(e)) diff --git a/api/core/tools/provider/builtin/feishu_base/tools/delete_base_tables.yaml b/api/core/tools/provider/builtin/feishu_base/tools/delete_base_tables.yaml deleted file mode 100644 index 5d72814363d86f..00000000000000 --- a/api/core/tools/provider/builtin/feishu_base/tools/delete_base_tables.yaml +++ /dev/null @@ -1,48 +0,0 @@ -identity: - name: delete_base_tables - author: Doug Lea - label: - en_US: Delete Base Tables - zh_Hans: 删除多维表格中的数据表 -description: - human: - en_US: Delete base tables - zh_Hans: | - 删除多维表格中的数据表 - llm: A tool for deleting a data table in a multidimensional table -parameters: - - name: Authorization - type: string - required: true - label: - en_US: token - zh_Hans: 凭证 - human_description: - en_US: API access token parameter, tenant_access_token or user_access_token - zh_Hans: API 的访问凭证参数,tenant_access_token 或 user_access_token - llm_description: API access token parameter, tenant_access_token or user_access_token - form: llm - - - name: app_token - type: string - required: true - label: - en_US: app_token - zh_Hans: 多维表格 - human_description: - en_US: bitable app token - zh_Hans: 多维表格的唯一标识符 app_token - llm_description: bitable app token - form: llm - - - name: table_ids - type: string - required: true - label: - en_US: table_ids - zh_Hans: table_ids - human_description: - en_US: The ID list of the data tables to be deleted. Currently, a maximum of 50 data tables can be deleted at a time. The example is ["tbl1TkhyTWDkSoZ3","tblsRc9GRRXKqhvW"] - zh_Hans: 待删除数据表的id列表,当前一次操作最多支持50个数据表,示例为 ["tbl1TkhyTWDkSoZ3","tblsRc9GRRXKqhvW"] - llm_description: The ID list of the data tables to be deleted. Currently, a maximum of 50 data tables can be deleted at a time. The example is ["tbl1TkhyTWDkSoZ3","tblsRc9GRRXKqhvW"] - form: llm diff --git a/api/core/tools/provider/builtin/feishu_base/tools/get_base_info.py b/api/core/tools/provider/builtin/feishu_base/tools/get_base_info.py deleted file mode 100644 index f664bbeed08693..00000000000000 --- a/api/core/tools/provider/builtin/feishu_base/tools/get_base_info.py +++ /dev/null @@ -1,39 +0,0 @@ -import json -from typing import Any, Union - -import httpx - -from core.tools.entities.tool_entities import ToolInvokeMessage -from core.tools.tool.builtin_tool import BuiltinTool - - -class GetBaseInfoTool(BuiltinTool): - def _invoke( - self, user_id: str, tool_parameters: dict[str, Any] - ) -> Union[ToolInvokeMessage, list[ToolInvokeMessage]]: - url = "https://open.feishu.cn/open-apis/bitable/v1/apps/{app_token}" - - access_token = tool_parameters.get("Authorization", "") - if not access_token: - return self.create_text_message("Invalid parameter access_token") - - app_token = tool_parameters.get("app_token", "") - if not app_token: - return self.create_text_message("Invalid parameter app_token") - - headers = { - "Content-Type": "application/json", - "Authorization": f"Bearer {access_token}", - } - - try: - res = httpx.get(url.format(app_token=app_token), headers=headers, timeout=30) - res_json = res.json() - if res.is_success: - return self.create_text_message(text=json.dumps(res_json)) - else: - return self.create_text_message( - f"Failed to get base info, status code: {res.status_code}, response: {res.text}" - ) - except Exception as e: - return self.create_text_message("Failed to get base info. {}".format(e)) diff --git a/api/core/tools/provider/builtin/feishu_base/tools/get_base_info.yaml b/api/core/tools/provider/builtin/feishu_base/tools/get_base_info.yaml deleted file mode 100644 index de0868901834ee..00000000000000 --- a/api/core/tools/provider/builtin/feishu_base/tools/get_base_info.yaml +++ /dev/null @@ -1,54 +0,0 @@ -identity: - name: get_base_info - author: Doug Lea - label: - en_US: Get Base Info - zh_Hans: 获取多维表格元数据 -description: - human: - en_US: Get base info - zh_Hans: | - 获取多维表格元数据,响应体如下: - { - "code": 0, - "msg": "success", - "data": { - "app": { - "app_token": "appbcbWCzen6D8dezhoCH2RpMAh", - "name": "mybase", - "revision": 1, - "is_advanced": false, - "time_zone": "Asia/Beijing" - } - } - } - app_token: 多维表格的 app_token; - name: 多维表格的名字; - revision: 多维表格的版本号; - is_advanced: 多维表格是否开启了高级权限。取值包括:(true-表示开启了高级权限,false-表示关闭了高级权限); - time_zone: 文档时区; - llm: A tool to get Base Metadata, imported parameter is Unique Device Identifier app_token of Base, app_token is required. -parameters: - - name: Authorization - type: string - required: true - label: - en_US: token - zh_Hans: 凭证 - human_description: - en_US: API access token parameter, tenant_access_token or user_access_token - zh_Hans: API 的访问凭证参数,tenant_access_token 或 user_access_token - llm_description: API access token parameter, tenant_access_token or user_access_token - form: llm - - - name: app_token - type: string - required: true - label: - en_US: app_token - zh_Hans: 多维表格 - human_description: - en_US: bitable app token - zh_Hans: 多维表格的唯一标识符 app_token - llm_description: bitable app token - form: llm diff --git a/api/core/tools/provider/builtin/feishu_base/tools/get_tenant_access_token.py b/api/core/tools/provider/builtin/feishu_base/tools/get_tenant_access_token.py deleted file mode 100644 index 2ea61d0068237b..00000000000000 --- a/api/core/tools/provider/builtin/feishu_base/tools/get_tenant_access_token.py +++ /dev/null @@ -1,48 +0,0 @@ -import json -from typing import Any, Union - -import httpx - -from core.tools.entities.tool_entities import ToolInvokeMessage -from core.tools.tool.builtin_tool import BuiltinTool - - -class GetTenantAccessTokenTool(BuiltinTool): - def _invoke( - self, user_id: str, tool_parameters: dict[str, Any] - ) -> Union[ToolInvokeMessage, list[ToolInvokeMessage]]: - url = "https://open.feishu.cn/open-apis/auth/v3/tenant_access_token/internal" - - app_id = tool_parameters.get("app_id", "") - if not app_id: - return self.create_text_message("Invalid parameter app_id") - - app_secret = tool_parameters.get("app_secret", "") - if not app_secret: - return self.create_text_message("Invalid parameter app_secret") - - headers = { - "Content-Type": "application/json", - } - params = {} - payload = {"app_id": app_id, "app_secret": app_secret} - - """ - { - "code": 0, - "msg": "ok", - "tenant_access_token": "t-caecc734c2e3328a62489fe0648c4b98779515d3", - "expire": 7200 - } - """ - try: - res = httpx.post(url, headers=headers, params=params, json=payload, timeout=30) - res_json = res.json() - if res.is_success: - return self.create_text_message(text=json.dumps(res_json)) - else: - return self.create_text_message( - f"Failed to get tenant access token, status code: {res.status_code}, response: {res.text}" - ) - except Exception as e: - return self.create_text_message("Failed to get tenant access token. {}".format(e)) diff --git a/api/core/tools/provider/builtin/feishu_base/tools/get_tenant_access_token.yaml b/api/core/tools/provider/builtin/feishu_base/tools/get_tenant_access_token.yaml deleted file mode 100644 index 88acc27e06eca1..00000000000000 --- a/api/core/tools/provider/builtin/feishu_base/tools/get_tenant_access_token.yaml +++ /dev/null @@ -1,39 +0,0 @@ -identity: - name: get_tenant_access_token - author: Doug Lea - label: - en_US: Get Tenant Access Token - zh_Hans: 获取飞书自建应用的 tenant_access_token -description: - human: - en_US: Get tenant access token - zh_Hans: | - 获取飞书自建应用的 tenant_access_token,响应体示例: - {"code":0,"msg":"ok","tenant_access_token":"t-caecc734c2e3328a62489fe0648c4b98779515d3","expire":7200} - tenant_access_token: 租户访问凭证; - expire: tenant_access_token 的过期时间,单位为秒; - llm: A tool for obtaining a tenant access token. The input parameters must include app_id and app_secret. -parameters: - - name: app_id - type: string - required: true - label: - en_US: app_id - zh_Hans: 应用唯一标识 - human_description: - en_US: app_id is the unique identifier of the Lark Open Platform application - zh_Hans: app_id 是飞书开放平台应用的唯一标识 - llm_description: app_id is the unique identifier of the Lark Open Platform application - form: llm - - - name: app_secret - type: secret-input - required: true - label: - en_US: app_secret - zh_Hans: 应用秘钥 - human_description: - en_US: app_secret is the secret key of the application - zh_Hans: app_secret 是应用的秘钥 - llm_description: app_secret is the secret key of the application - form: llm diff --git a/api/core/tools/provider/builtin/feishu_base/tools/list_base_records.py b/api/core/tools/provider/builtin/feishu_base/tools/list_base_records.py deleted file mode 100644 index e579d02f6967e7..00000000000000 --- a/api/core/tools/provider/builtin/feishu_base/tools/list_base_records.py +++ /dev/null @@ -1,65 +0,0 @@ -import json -from typing import Any, Union - -import httpx - -from core.tools.entities.tool_entities import ToolInvokeMessage -from core.tools.tool.builtin_tool import BuiltinTool - - -class ListBaseRecordsTool(BuiltinTool): - def _invoke( - self, user_id: str, tool_parameters: dict[str, Any] - ) -> Union[ToolInvokeMessage, list[ToolInvokeMessage]]: - url = "https://open.feishu.cn/open-apis/bitable/v1/apps/{app_token}/tables/{table_id}/records/search" - - access_token = tool_parameters.get("Authorization", "") - if not access_token: - return self.create_text_message("Invalid parameter access_token") - - app_token = tool_parameters.get("app_token", "") - if not app_token: - return self.create_text_message("Invalid parameter app_token") - - table_id = tool_parameters.get("table_id", "") - if not table_id: - return self.create_text_message("Invalid parameter table_id") - - page_token = tool_parameters.get("page_token", "") - page_size = tool_parameters.get("page_size", "") - sort_condition = tool_parameters.get("sort_condition", "") - filter_condition = tool_parameters.get("filter_condition", "") - - headers = { - "Content-Type": "application/json", - "Authorization": f"Bearer {access_token}", - } - - params = { - "page_token": page_token, - "page_size": page_size, - } - - payload = {"automatic_fields": True} - if sort_condition: - payload["sort"] = json.loads(sort_condition) - if filter_condition: - payload["filter"] = json.loads(filter_condition) - - try: - res = httpx.post( - url.format(app_token=app_token, table_id=table_id), - headers=headers, - params=params, - json=payload, - timeout=30, - ) - res_json = res.json() - if res.is_success: - return self.create_text_message(text=json.dumps(res_json)) - else: - return self.create_text_message( - f"Failed to list base records, status code: {res.status_code}, response: {res.text}" - ) - except Exception as e: - return self.create_text_message("Failed to list base records. {}".format(e)) diff --git a/api/core/tools/provider/builtin/feishu_base/tools/list_base_records.yaml b/api/core/tools/provider/builtin/feishu_base/tools/list_base_records.yaml deleted file mode 100644 index 8647c880a60024..00000000000000 --- a/api/core/tools/provider/builtin/feishu_base/tools/list_base_records.yaml +++ /dev/null @@ -1,108 +0,0 @@ -identity: - name: list_base_records - author: Doug Lea - label: - en_US: List Base Records - zh_Hans: 查询多维表格数据表中的现有记录 -description: - human: - en_US: List base records - zh_Hans: | - 查询多维表格数据表中的现有记录,单次最多查询 500 行记录,支持分页获取。 - llm: Query existing records in a multidimensional table data table. A maximum of 500 rows of records can be queried at a time, and paging retrieval is supported. -parameters: - - name: Authorization - type: string - required: true - label: - en_US: token - zh_Hans: 凭证 - human_description: - en_US: API access token parameter, tenant_access_token or user_access_token - zh_Hans: API 的访问凭证参数,tenant_access_token 或 user_access_token - llm_description: API access token parameter, tenant_access_token or user_access_token - form: llm - - - name: app_token - type: string - required: true - label: - en_US: app_token - zh_Hans: 多维表格 - human_description: - en_US: bitable app token - zh_Hans: 多维表格的唯一标识符 app_token - llm_description: bitable app token - form: llm - - - name: table_id - type: string - required: true - label: - en_US: table_id - zh_Hans: 多维表格的数据表 - human_description: - en_US: bitable table id - zh_Hans: 多维表格数据表的唯一标识符 table_id - llm_description: bitable table id - form: llm - - - name: page_token - type: string - required: false - label: - en_US: page_token - zh_Hans: 分页标记 - human_description: - en_US: Pagination mark. If it is not filled in the first request, it means to traverse from the beginning. - zh_Hans: 分页标记,第一次请求不填,表示从头开始遍历。 - llm_description: 分页标记,第一次请求不填,表示从头开始遍历;分页查询结果还有更多项时会同时返回新的 page_token,下次遍历可采用该 page_token 获取查询结果。 - form: llm - - - name: page_size - type: number - required: false - default: 20 - label: - en_US: page_size - zh_Hans: 分页大小 - human_description: - en_US: paging size - zh_Hans: 分页大小,默认值为 20,最大值为 100。 - llm_description: The default value of paging size is 20 and the maximum value is 100. - form: llm - - - name: sort_condition - type: string - required: false - label: - en_US: sort_condition - zh_Hans: 排序条件 - human_description: - en_US: sort condition - zh_Hans: | - 排序条件,格式为:[{"field_name":"多行文本","desc":true}]。 - field_name: 字段名称; - desc: 是否倒序排序; - llm_description: | - Sorting conditions, the format is: [{"field_name":"multi-line text","desc":true}]. - form: llm - - - name: filter_condition - type: string - required: false - label: - en_US: filter_condition - zh_Hans: 筛选条件 - human_description: - en_US: filter condition - zh_Hans: | - 筛选条件,格式为:{"conjunction":"and","conditions":[{"field_name":"字段1","operator":"is","value":["文本内容"]}]}。 - conjunction:条件逻辑连接词; - conditions:筛选条件集合; - field_name:筛选条件的左值,值为字段的名称; - operator:条件运算符; - value:目标值; - llm_description: | - The format of the filter condition is: {"conjunction":"and","conditions":[{"field_name":"Field 1","operator":"is","value":["text content"]}]}. - form: llm diff --git a/api/core/tools/provider/builtin/feishu_base/tools/list_base_tables.py b/api/core/tools/provider/builtin/feishu_base/tools/list_base_tables.py deleted file mode 100644 index 4ec9a476bc8832..00000000000000 --- a/api/core/tools/provider/builtin/feishu_base/tools/list_base_tables.py +++ /dev/null @@ -1,47 +0,0 @@ -import json -from typing import Any, Union - -import httpx - -from core.tools.entities.tool_entities import ToolInvokeMessage -from core.tools.tool.builtin_tool import BuiltinTool - - -class ListBaseTablesTool(BuiltinTool): - def _invoke( - self, user_id: str, tool_parameters: dict[str, Any] - ) -> Union[ToolInvokeMessage, list[ToolInvokeMessage]]: - url = "https://open.feishu.cn/open-apis/bitable/v1/apps/{app_token}/tables" - - access_token = tool_parameters.get("Authorization", "") - if not access_token: - return self.create_text_message("Invalid parameter access_token") - - app_token = tool_parameters.get("app_token", "") - if not app_token: - return self.create_text_message("Invalid parameter app_token") - - page_token = tool_parameters.get("page_token", "") - page_size = tool_parameters.get("page_size", "") - - headers = { - "Content-Type": "application/json", - "Authorization": f"Bearer {access_token}", - } - - params = { - "page_token": page_token, - "page_size": page_size, - } - - try: - res = httpx.get(url.format(app_token=app_token), headers=headers, params=params, timeout=30) - res_json = res.json() - if res.is_success: - return self.create_text_message(text=json.dumps(res_json)) - else: - return self.create_text_message( - f"Failed to list base tables, status code: {res.status_code}, response: {res.text}" - ) - except Exception as e: - return self.create_text_message("Failed to list base tables. {}".format(e)) diff --git a/api/core/tools/provider/builtin/feishu_base/tools/list_base_tables.yaml b/api/core/tools/provider/builtin/feishu_base/tools/list_base_tables.yaml deleted file mode 100644 index 9887124a28823a..00000000000000 --- a/api/core/tools/provider/builtin/feishu_base/tools/list_base_tables.yaml +++ /dev/null @@ -1,65 +0,0 @@ -identity: - name: list_base_tables - author: Doug Lea - label: - en_US: List Base Tables - zh_Hans: 根据 app_token 获取多维表格下的所有数据表 -description: - human: - en_US: List base tables - zh_Hans: | - 根据 app_token 获取多维表格下的所有数据表 - llm: A tool for getting all data tables under a multidimensional table based on app_token. -parameters: - - name: Authorization - type: string - required: true - label: - en_US: token - zh_Hans: 凭证 - human_description: - en_US: API access token parameter, tenant_access_token or user_access_token - zh_Hans: API 的访问凭证参数,tenant_access_token 或 user_access_token - llm_description: API access token parameter, tenant_access_token or user_access_token - form: llm - - - name: app_token - type: string - required: true - label: - en_US: app_token - zh_Hans: 多维表格 - human_description: - en_US: bitable app token - zh_Hans: 多维表格的唯一标识符 app_token - llm_description: bitable app token - form: llm - - - name: page_token - type: string - required: false - label: - en_US: page_token - zh_Hans: 分页标记 - human_description: - en_US: Pagination mark. If it is not filled in the first request, it means to traverse from the beginning. - zh_Hans: 分页标记,第一次请求不填,表示从头开始遍历。 - llm_description: | - Pagination token. If it is not filled in the first request, it means to start traversal from the beginning. - If there are more items in the pagination query result, a new page_token will be returned at the same time. - The page_token can be used to obtain the query result in the next traversal. - 分页标记,第一次请求不填,表示从头开始遍历;分页查询结果还有更多项时会同时返回新的 page_token,下次遍历可采用该 page_token 获取查询结果。 - form: llm - - - name: page_size - type: number - required: false - default: 20 - label: - en_US: page_size - zh_Hans: 分页大小 - human_description: - en_US: paging size - zh_Hans: 分页大小,默认值为 20,最大值为 100。 - llm_description: The default value of paging size is 20 and the maximum value is 100. - form: llm diff --git a/api/core/tools/provider/builtin/feishu_base/tools/read_base_record.py b/api/core/tools/provider/builtin/feishu_base/tools/read_base_record.py deleted file mode 100644 index fb818f838073fa..00000000000000 --- a/api/core/tools/provider/builtin/feishu_base/tools/read_base_record.py +++ /dev/null @@ -1,49 +0,0 @@ -import json -from typing import Any, Union - -import httpx - -from core.tools.entities.tool_entities import ToolInvokeMessage -from core.tools.tool.builtin_tool import BuiltinTool - - -class ReadBaseRecordTool(BuiltinTool): - def _invoke( - self, user_id: str, tool_parameters: dict[str, Any] - ) -> Union[ToolInvokeMessage, list[ToolInvokeMessage]]: - url = "https://open.feishu.cn/open-apis/bitable/v1/apps/{app_token}/tables/{table_id}/records/{record_id}" - - access_token = tool_parameters.get("Authorization", "") - if not access_token: - return self.create_text_message("Invalid parameter access_token") - - app_token = tool_parameters.get("app_token", "") - if not app_token: - return self.create_text_message("Invalid parameter app_token") - - table_id = tool_parameters.get("table_id", "") - if not table_id: - return self.create_text_message("Invalid parameter table_id") - - record_id = tool_parameters.get("record_id", "") - if not record_id: - return self.create_text_message("Invalid parameter record_id") - - headers = { - "Content-Type": "application/json", - "Authorization": f"Bearer {access_token}", - } - - try: - res = httpx.get( - url.format(app_token=app_token, table_id=table_id, record_id=record_id), headers=headers, timeout=30 - ) - res_json = res.json() - if res.is_success: - return self.create_text_message(text=json.dumps(res_json)) - else: - return self.create_text_message( - f"Failed to read base record, status code: {res.status_code}, response: {res.text}" - ) - except Exception as e: - return self.create_text_message("Failed to read base record. {}".format(e)) diff --git a/api/core/tools/provider/builtin/feishu_base/tools/read_base_record.yaml b/api/core/tools/provider/builtin/feishu_base/tools/read_base_record.yaml deleted file mode 100644 index 400e9a1021f2db..00000000000000 --- a/api/core/tools/provider/builtin/feishu_base/tools/read_base_record.yaml +++ /dev/null @@ -1,60 +0,0 @@ -identity: - name: read_base_record - author: Doug Lea - label: - en_US: Read Base Record - zh_Hans: 根据 record_id 的值检索多维表格数据表的记录 -description: - human: - en_US: Read base record - zh_Hans: | - 根据 record_id 的值检索多维表格数据表的记录 - llm: Retrieve records from a multidimensional table based on the value of record_id -parameters: - - name: Authorization - type: string - required: true - label: - en_US: token - zh_Hans: 凭证 - human_description: - en_US: API access token parameter, tenant_access_token or user_access_token - zh_Hans: API 的访问凭证参数,tenant_access_token 或 user_access_token - llm_description: API access token parameter, tenant_access_token or user_access_token - form: llm - - - name: app_token - type: string - required: true - label: - en_US: app_token - zh_Hans: 多维表格 - human_description: - en_US: bitable app token - zh_Hans: 多维表格的唯一标识符 app_token - llm_description: bitable app token - form: llm - - - name: table_id - type: string - required: true - label: - en_US: table_id - zh_Hans: 多维表格的数据表 - human_description: - en_US: bitable table id - zh_Hans: 多维表格数据表的唯一标识符 table_id - llm_description: bitable table id - form: llm - - - name: record_id - type: string - required: true - label: - en_US: record_id - zh_Hans: 单条记录的 id - human_description: - en_US: The id of a single record - zh_Hans: 单条记录的 id - llm_description: The id of a single record - form: llm diff --git a/api/core/tools/provider/builtin/feishu_base/tools/update_base_record.py b/api/core/tools/provider/builtin/feishu_base/tools/update_base_record.py deleted file mode 100644 index 6d7e33f3ffef7c..00000000000000 --- a/api/core/tools/provider/builtin/feishu_base/tools/update_base_record.py +++ /dev/null @@ -1,60 +0,0 @@ -import json -from typing import Any, Union - -import httpx - -from core.tools.entities.tool_entities import ToolInvokeMessage -from core.tools.tool.builtin_tool import BuiltinTool - - -class UpdateBaseRecordTool(BuiltinTool): - def _invoke( - self, user_id: str, tool_parameters: dict[str, Any] - ) -> Union[ToolInvokeMessage, list[ToolInvokeMessage]]: - url = "https://open.feishu.cn/open-apis/bitable/v1/apps/{app_token}/tables/{table_id}/records/{record_id}" - - access_token = tool_parameters.get("Authorization", "") - if not access_token: - return self.create_text_message("Invalid parameter access_token") - - app_token = tool_parameters.get("app_token", "") - if not app_token: - return self.create_text_message("Invalid parameter app_token") - - table_id = tool_parameters.get("table_id", "") - if not table_id: - return self.create_text_message("Invalid parameter table_id") - - record_id = tool_parameters.get("record_id", "") - if not record_id: - return self.create_text_message("Invalid parameter record_id") - - fields = tool_parameters.get("fields", "") - if not fields: - return self.create_text_message("Invalid parameter fields") - - headers = { - "Content-Type": "application/json", - "Authorization": f"Bearer {access_token}", - } - - params = {} - payload = {"fields": json.loads(fields)} - - try: - res = httpx.put( - url.format(app_token=app_token, table_id=table_id, record_id=record_id), - headers=headers, - params=params, - json=payload, - timeout=30, - ) - res_json = res.json() - if res.is_success: - return self.create_text_message(text=json.dumps(res_json)) - else: - return self.create_text_message( - f"Failed to update base record, status code: {res.status_code}, response: {res.text}" - ) - except Exception as e: - return self.create_text_message("Failed to update base record. {}".format(e)) diff --git a/api/core/tools/provider/builtin/feishu_base/tools/update_base_record.yaml b/api/core/tools/provider/builtin/feishu_base/tools/update_base_record.yaml deleted file mode 100644 index 788798c4b3b40e..00000000000000 --- a/api/core/tools/provider/builtin/feishu_base/tools/update_base_record.yaml +++ /dev/null @@ -1,78 +0,0 @@ -identity: - name: update_base_record - author: Doug Lea - label: - en_US: Update Base Record - zh_Hans: 更新多维表格数据表中的一条记录 -description: - human: - en_US: Update base record - zh_Hans: | - 更新多维表格数据表中的一条记录,详细请参考:https://open.larkoffice.com/document/server-docs/docs/bitable-v1/app-table-record/update - llm: Update a record in a multidimensional table data table -parameters: - - name: Authorization - type: string - required: true - label: - en_US: token - zh_Hans: 凭证 - human_description: - en_US: API access token parameter, tenant_access_token or user_access_token - zh_Hans: API 的访问凭证参数,tenant_access_token 或 user_access_token - llm_description: API access token parameter, tenant_access_token or user_access_token - form: llm - - - name: app_token - type: string - required: true - label: - en_US: app_token - zh_Hans: 多维表格 - human_description: - en_US: bitable app token - zh_Hans: 多维表格的唯一标识符 app_token - llm_description: bitable app token - form: llm - - - name: table_id - type: string - required: true - label: - en_US: table_id - zh_Hans: 多维表格的数据表 - human_description: - en_US: bitable table id - zh_Hans: 多维表格数据表的唯一标识符 table_id - llm_description: bitable table id - form: llm - - - name: record_id - type: string - required: true - label: - en_US: record_id - zh_Hans: 单条记录的 id - human_description: - en_US: The id of a single record - zh_Hans: 单条记录的 id - llm_description: The id of a single record - form: llm - - - name: fields - type: string - required: true - label: - en_US: fields - zh_Hans: 数据表的列字段内容 - human_description: - en_US: The fields of a multidimensional table data table, that is, the columns of the data table. - zh_Hans: | - 要更新一行多维表格记录,字段结构拼接如下:{"多行文本":"多行文本内容","单选":"选项1","多选":["选项1","选项2"],"复选框":true,"人员":[{"id":"ou_2910013f1e6456f16a0ce75ede950a0a"}],"群组":[{"id":"oc_cd07f55f14d6f4a4f1b51504e7e97f48"}],"电话号码":"13026162666"} - 当前接口支持的字段类型为:多行文本、单选、条码、多选、日期、人员、附件、复选框、超链接、数字、单向关联、双向关联、电话号码、地理位置。 - 不同类型字段的数据结构请参考数据结构概述:https://open.larkoffice.com/document/server-docs/docs/bitable-v1/bitable-structure - llm_description: | - 要更新一行多维表格记录,字段结构拼接如下:{"多行文本":"多行文本内容","单选":"选项1","多选":["选项1","选项2"],"复选框":true,"人员":[{"id":"ou_2910013f1e6456f16a0ce75ede950a0a"}],"群组":[{"id":"oc_cd07f55f14d6f4a4f1b51504e7e97f48"}],"电话号码":"13026162666"} - 当前接口支持的字段类型为:多行文本、单选、条码、多选、日期、人员、附件、复选框、超链接、数字、单向关联、双向关联、电话号码、地理位置。 - 不同类型字段的数据结构请参考数据结构概述:https://open.larkoffice.com/document/server-docs/docs/bitable-v1/bitable-structure - form: llm diff --git a/api/core/tools/provider/builtin/feishu_document/_assets/icon.svg b/api/core/tools/provider/builtin/feishu_document/_assets/icon.svg deleted file mode 100644 index 5a0a6416b3db32..00000000000000 --- a/api/core/tools/provider/builtin/feishu_document/_assets/icon.svg +++ /dev/null @@ -1,9 +0,0 @@ - - - - - - - - - diff --git a/api/core/tools/provider/builtin/feishu_document/feishu_document.py b/api/core/tools/provider/builtin/feishu_document/feishu_document.py deleted file mode 100644 index b0a1e393eb8116..00000000000000 --- a/api/core/tools/provider/builtin/feishu_document/feishu_document.py +++ /dev/null @@ -1,15 +0,0 @@ -from core.tools.errors import ToolProviderCredentialValidationError -from core.tools.provider.builtin_tool_provider import BuiltinToolProviderController -from core.tools.utils.feishu_api_utils import FeishuRequest - - -class FeishuDocumentProvider(BuiltinToolProviderController): - def _validate_credentials(self, credentials: dict) -> None: - app_id = credentials.get("app_id") - app_secret = credentials.get("app_secret") - if not app_id or not app_secret: - raise ToolProviderCredentialValidationError("app_id and app_secret is required") - try: - assert FeishuRequest(app_id, app_secret).tenant_access_token is not None - except Exception as e: - raise ToolProviderCredentialValidationError(str(e)) diff --git a/api/core/tools/provider/builtin/feishu_document/feishu_document.yaml b/api/core/tools/provider/builtin/feishu_document/feishu_document.yaml deleted file mode 100644 index 8eaa6b27049c6b..00000000000000 --- a/api/core/tools/provider/builtin/feishu_document/feishu_document.yaml +++ /dev/null @@ -1,34 +0,0 @@ -identity: - author: Doug Lea - name: feishu_document - label: - en_US: Lark Cloud Document - zh_Hans: 飞书云文档 - description: - en_US: Lark Cloud Document - zh_Hans: 飞书云文档 - icon: icon.svg - tags: - - social - - productivity -credentials_for_provider: - app_id: - type: text-input - required: true - label: - en_US: APP ID - placeholder: - en_US: Please input your feishu app id - zh_Hans: 请输入你的飞书 app id - help: - en_US: Get your app_id and app_secret from Feishu - zh_Hans: 从飞书获取您的 app_id 和 app_secret - url: https://open.feishu.cn - app_secret: - type: secret-input - required: true - label: - en_US: APP Secret - placeholder: - en_US: Please input your app secret - zh_Hans: 请输入你的飞书 app secret diff --git a/api/core/tools/provider/builtin/feishu_document/tools/create_document.py b/api/core/tools/provider/builtin/feishu_document/tools/create_document.py deleted file mode 100644 index 090a0828e89bbf..00000000000000 --- a/api/core/tools/provider/builtin/feishu_document/tools/create_document.py +++ /dev/null @@ -1,19 +0,0 @@ -from typing import Any - -from core.tools.entities.tool_entities import ToolInvokeMessage -from core.tools.tool.builtin_tool import BuiltinTool -from core.tools.utils.feishu_api_utils import FeishuRequest - - -class CreateDocumentTool(BuiltinTool): - def _invoke(self, user_id: str, tool_parameters: dict[str, Any]) -> ToolInvokeMessage: - app_id = self.runtime.credentials.get("app_id") - app_secret = self.runtime.credentials.get("app_secret") - client = FeishuRequest(app_id, app_secret) - - title = tool_parameters.get("title") - content = tool_parameters.get("content") - folder_token = tool_parameters.get("folder_token") - - res = client.create_document(title, content, folder_token) - return self.create_json_message(res) diff --git a/api/core/tools/provider/builtin/feishu_document/tools/create_document.yaml b/api/core/tools/provider/builtin/feishu_document/tools/create_document.yaml deleted file mode 100644 index ddf2729f0e4b5c..00000000000000 --- a/api/core/tools/provider/builtin/feishu_document/tools/create_document.yaml +++ /dev/null @@ -1,47 +0,0 @@ -identity: - name: create_document - author: Doug Lea - label: - en_US: Create Lark document - zh_Hans: 创建飞书文档 -description: - human: - en_US: Create Lark document - zh_Hans: 创建飞书文档,支持创建空文档和带内容的文档,支持 markdown 语法创建。 - llm: A tool for creating Feishu documents. -parameters: - - name: title - type: string - required: false - label: - en_US: Document title - zh_Hans: 文档标题 - human_description: - en_US: Document title, only supports plain text content. - zh_Hans: 文档标题,只支持纯文本内容。 - llm_description: 文档标题,只支持纯文本内容,可以为空。 - form: llm - - - name: content - type: string - required: false - label: - en_US: Document content - zh_Hans: 文档内容 - human_description: - en_US: Document content, supports markdown syntax, can be empty. - zh_Hans: 文档内容,支持 markdown 语法,可以为空。 - llm_description: 文档内容,支持 markdown 语法,可以为空。 - form: llm - - - name: folder_token - type: string - required: false - label: - en_US: folder_token - zh_Hans: 文档所在文件夹的 Token - human_description: - en_US: The token of the folder where the document is located. If it is not passed or is empty, it means the root directory. - zh_Hans: 文档所在文件夹的 Token,不传或传空表示根目录。 - llm_description: 文档所在文件夹的 Token,不传或传空表示根目录。 - form: llm diff --git a/api/core/tools/provider/builtin/feishu_document/tools/get_document_content.py b/api/core/tools/provider/builtin/feishu_document/tools/get_document_content.py deleted file mode 100644 index c94a5f70ed7e34..00000000000000 --- a/api/core/tools/provider/builtin/feishu_document/tools/get_document_content.py +++ /dev/null @@ -1,19 +0,0 @@ -from typing import Any - -from core.tools.entities.tool_entities import ToolInvokeMessage -from core.tools.tool.builtin_tool import BuiltinTool -from core.tools.utils.feishu_api_utils import FeishuRequest - - -class GetDocumentRawContentTool(BuiltinTool): - def _invoke(self, user_id: str, tool_parameters: dict[str, Any]) -> ToolInvokeMessage: - app_id = self.runtime.credentials.get("app_id") - app_secret = self.runtime.credentials.get("app_secret") - client = FeishuRequest(app_id, app_secret) - - document_id = tool_parameters.get("document_id") - mode = tool_parameters.get("mode") - lang = tool_parameters.get("lang", 0) - - res = client.get_document_content(document_id, mode, lang) - return self.create_json_message(res) diff --git a/api/core/tools/provider/builtin/feishu_document/tools/get_document_content.yaml b/api/core/tools/provider/builtin/feishu_document/tools/get_document_content.yaml deleted file mode 100644 index 51eda73a60095c..00000000000000 --- a/api/core/tools/provider/builtin/feishu_document/tools/get_document_content.yaml +++ /dev/null @@ -1,49 +0,0 @@ -identity: - name: get_document_content - author: Doug Lea - label: - en_US: Get Document Content - zh_Hans: 获取飞书云文档的内容 -description: - human: - en_US: Get document content - zh_Hans: 获取飞书云文档的内容 - llm: A tool for retrieving content from Feishu cloud documents. -parameters: - - name: document_id - type: string - required: true - label: - en_US: document_id - zh_Hans: 飞书文档的唯一标识 - human_description: - en_US: Unique identifier for a Feishu document. You can also input the document's URL. - zh_Hans: 飞书文档的唯一标识,支持输入文档的 URL。 - llm_description: 飞书文档的唯一标识,支持输入文档的 URL。 - form: llm - - - name: mode - type: string - required: false - label: - en_US: mode - zh_Hans: 文档返回格式 - human_description: - en_US: Format of the document return, optional values are text, markdown, can be empty, default is markdown. - zh_Hans: 文档返回格式,可选值有 text、markdown,可以为空,默认值为 markdown。 - llm_description: 文档返回格式,可选值有 text、markdown,可以为空,默认值为 markdown。 - form: llm - - - name: lang - type: number - required: false - default: 0 - label: - en_US: lang - zh_Hans: 指定@用户的语言 - human_description: - en_US: | - Specifies the language for MentionUser, optional values are [0, 1]. 0: User's default name, 1: User's English name, default is 0. - zh_Hans: 指定返回的 MentionUser,即 @用户 的语言,可选值有 [0,1]。0:该用户的默认名称,1:该用户的英文名称,默认值为 0。 - llm_description: 指定返回的 MentionUser,即 @用户 的语言,可选值有 [0,1]。0:该用户的默认名称,1:该用户的英文名称,默认值为 0。 - form: llm diff --git a/api/core/tools/provider/builtin/feishu_document/tools/list_document_blocks.py b/api/core/tools/provider/builtin/feishu_document/tools/list_document_blocks.py deleted file mode 100644 index 572a7abf284193..00000000000000 --- a/api/core/tools/provider/builtin/feishu_document/tools/list_document_blocks.py +++ /dev/null @@ -1,19 +0,0 @@ -from typing import Any - -from core.tools.entities.tool_entities import ToolInvokeMessage -from core.tools.tool.builtin_tool import BuiltinTool -from core.tools.utils.feishu_api_utils import FeishuRequest - - -class ListDocumentBlockTool(BuiltinTool): - def _invoke(self, user_id: str, tool_parameters: dict[str, Any]) -> ToolInvokeMessage: - app_id = self.runtime.credentials.get("app_id") - app_secret = self.runtime.credentials.get("app_secret") - client = FeishuRequest(app_id, app_secret) - - document_id = tool_parameters.get("document_id") - page_size = tool_parameters.get("page_size", 500) - page_token = tool_parameters.get("page_token", "") - - res = client.list_document_blocks(document_id, page_token, page_size) - return self.create_json_message(res) diff --git a/api/core/tools/provider/builtin/feishu_document/tools/list_document_blocks.yaml b/api/core/tools/provider/builtin/feishu_document/tools/list_document_blocks.yaml deleted file mode 100644 index 019ac983906ff1..00000000000000 --- a/api/core/tools/provider/builtin/feishu_document/tools/list_document_blocks.yaml +++ /dev/null @@ -1,74 +0,0 @@ -identity: - name: list_document_blocks - author: Doug Lea - label: - en_US: List Document Blocks - zh_Hans: 获取飞书文档所有块 -description: - human: - en_US: List document blocks - zh_Hans: 获取飞书文档所有块的富文本内容并分页返回 - llm: A tool to get all blocks of Feishu documents -parameters: - - name: document_id - type: string - required: true - label: - en_US: document_id - zh_Hans: 飞书文档的唯一标识 - human_description: - en_US: Unique identifier for a Feishu document. You can also input the document's URL. - zh_Hans: 飞书文档的唯一标识,支持输入文档的 URL。 - llm_description: 飞书文档的唯一标识,支持输入文档的 URL。 - form: llm - - - name: user_id_type - type: select - required: false - options: - - value: open_id - label: - en_US: open_id - zh_Hans: open_id - - value: union_id - label: - en_US: union_id - zh_Hans: union_id - - value: user_id - label: - en_US: user_id - zh_Hans: user_id - default: "open_id" - label: - en_US: user_id_type - zh_Hans: 用户 ID 类型 - human_description: - en_US: User ID type, optional values are open_id, union_id, user_id, with a default value of open_id. - zh_Hans: 用户 ID 类型,可选值有 open_id、union_id、user_id,默认值为 open_id。 - llm_description: 用户 ID 类型,可选值有 open_id、union_id、user_id,默认值为 open_id。 - form: llm - - - name: page_size - type: number - required: false - default: "500" - label: - en_US: page_size - zh_Hans: 分页大小 - human_description: - en_US: Paging size, the default and maximum value is 500. - zh_Hans: 分页大小, 默认值和最大值为 500。 - llm_description: 分页大小, 表示一次请求最多返回多少条数据,默认值和最大值为 500。 - form: llm - - - name: page_token - type: string - required: false - label: - en_US: page_token - zh_Hans: 分页标记 - human_description: - en_US: Pagination token used to navigate through query results, allowing retrieval of additional items in subsequent requests. - zh_Hans: 分页标记,用于分页查询结果,以便下次遍历时获取更多项。 - llm_description: 分页标记,第一次请求不填,表示从头开始遍历;分页查询结果还有更多项时会同时返回新的 page_token,下次遍历可采用该 page_token 获取查询结果。 - form: llm diff --git a/api/core/tools/provider/builtin/feishu_document/tools/write_document.py b/api/core/tools/provider/builtin/feishu_document/tools/write_document.py deleted file mode 100644 index 6061250e48e136..00000000000000 --- a/api/core/tools/provider/builtin/feishu_document/tools/write_document.py +++ /dev/null @@ -1,19 +0,0 @@ -from typing import Any - -from core.tools.entities.tool_entities import ToolInvokeMessage -from core.tools.tool.builtin_tool import BuiltinTool -from core.tools.utils.feishu_api_utils import FeishuRequest - - -class CreateDocumentTool(BuiltinTool): - def _invoke(self, user_id: str, tool_parameters: dict[str, Any]) -> ToolInvokeMessage: - app_id = self.runtime.credentials.get("app_id") - app_secret = self.runtime.credentials.get("app_secret") - client = FeishuRequest(app_id, app_secret) - - document_id = tool_parameters.get("document_id") - content = tool_parameters.get("content") - position = tool_parameters.get("position") - - res = client.write_document(document_id, content, position) - return self.create_json_message(res) diff --git a/api/core/tools/provider/builtin/feishu_document/tools/write_document.yaml b/api/core/tools/provider/builtin/feishu_document/tools/write_document.yaml deleted file mode 100644 index 4282e3dcf3977f..00000000000000 --- a/api/core/tools/provider/builtin/feishu_document/tools/write_document.yaml +++ /dev/null @@ -1,59 +0,0 @@ -identity: - name: write_document - author: Doug Lea - label: - en_US: Write Document - zh_Hans: 在飞书文档中新增内容 -description: - human: - en_US: Adding new content to Lark documents - zh_Hans: 在飞书文档中新增内容 - llm: A tool for adding new content to Lark documents. -parameters: - - name: document_id - type: string - required: true - label: - en_US: document_id - zh_Hans: 飞书文档的唯一标识 - human_description: - en_US: Unique identifier for a Feishu document. You can also input the document's URL. - zh_Hans: 飞书文档的唯一标识,支持输入文档的 URL。 - llm_description: 飞书文档的唯一标识,支持输入文档的 URL。 - form: llm - - - name: content - type: string - required: true - label: - en_US: Plain text or Markdown content - zh_Hans: 纯文本或 Markdown 内容 - human_description: - en_US: Plain text or Markdown content. Note that embedded tables in the document should not have merged cells. - zh_Hans: 纯文本或 Markdown 内容。注意文档的内嵌套表格不允许有单元格合并。 - llm_description: 纯文本或 Markdown 内容,注意文档的内嵌套表格不允许有单元格合并。 - form: llm - - - name: position - type: string - required: false - label: - en_US: position - zh_Hans: 添加位置 - human_description: - en_US: | - Enumeration values: start or end. Use 'start' to add content at the beginning of the document, and 'end' to add content at the end. The default value is 'end'. - zh_Hans: 枚举值:start 或 end。使用 'start' 在文档开头添加内容,使用 'end' 在文档结尾添加内容,默认值为 'end'。 - llm_description: | - 枚举值 start、end,start: 在文档开头添加内容;end: 在文档结尾添加内容,默认值为 end。 - form: llm - options: - - value: start - label: - en_US: start - zh_Hans: 在文档开头添加内容 - - value: end - label: - en_US: end - zh_Hans: 在文档结尾添加内容 - default: start diff --git a/api/core/tools/provider/builtin/feishu_message/_assets/icon.svg b/api/core/tools/provider/builtin/feishu_message/_assets/icon.svg deleted file mode 100644 index 222a1571f9bbbb..00000000000000 --- a/api/core/tools/provider/builtin/feishu_message/_assets/icon.svg +++ /dev/null @@ -1,19 +0,0 @@ - - - - diff --git a/api/core/tools/provider/builtin/feishu_message/feishu_message.py b/api/core/tools/provider/builtin/feishu_message/feishu_message.py deleted file mode 100644 index 7b3adb9293750b..00000000000000 --- a/api/core/tools/provider/builtin/feishu_message/feishu_message.py +++ /dev/null @@ -1,15 +0,0 @@ -from core.tools.errors import ToolProviderCredentialValidationError -from core.tools.provider.builtin_tool_provider import BuiltinToolProviderController -from core.tools.utils.feishu_api_utils import FeishuRequest - - -class FeishuMessageProvider(BuiltinToolProviderController): - def _validate_credentials(self, credentials: dict) -> None: - app_id = credentials.get("app_id") - app_secret = credentials.get("app_secret") - if not app_id or not app_secret: - raise ToolProviderCredentialValidationError("app_id and app_secret is required") - try: - assert FeishuRequest(app_id, app_secret).tenant_access_token is not None - except Exception as e: - raise ToolProviderCredentialValidationError(str(e)) diff --git a/api/core/tools/provider/builtin/feishu_message/feishu_message.yaml b/api/core/tools/provider/builtin/feishu_message/feishu_message.yaml deleted file mode 100644 index 1bd8953dddcb24..00000000000000 --- a/api/core/tools/provider/builtin/feishu_message/feishu_message.yaml +++ /dev/null @@ -1,34 +0,0 @@ -identity: - author: Doug Lea - name: feishu_message - label: - en_US: Lark Message - zh_Hans: 飞书消息 - description: - en_US: Lark Message - zh_Hans: 飞书消息 - icon: icon.svg - tags: - - social - - productivity -credentials_for_provider: - app_id: - type: text-input - required: true - label: - en_US: APP ID - placeholder: - en_US: Please input your feishu app id - zh_Hans: 请输入你的飞书 app id - help: - en_US: Get your app_id and app_secret from Feishu - zh_Hans: 从飞书获取您的 app_id 和 app_secret - url: https://open.feishu.cn - app_secret: - type: secret-input - required: true - label: - en_US: APP Secret - placeholder: - en_US: Please input your app secret - zh_Hans: 请输入你的飞书 app secret diff --git a/api/core/tools/provider/builtin/feishu_message/tools/send_bot_message.py b/api/core/tools/provider/builtin/feishu_message/tools/send_bot_message.py deleted file mode 100644 index 1dd315d0e293a0..00000000000000 --- a/api/core/tools/provider/builtin/feishu_message/tools/send_bot_message.py +++ /dev/null @@ -1,20 +0,0 @@ -from typing import Any - -from core.tools.entities.tool_entities import ToolInvokeMessage -from core.tools.tool.builtin_tool import BuiltinTool -from core.tools.utils.feishu_api_utils import FeishuRequest - - -class SendBotMessageTool(BuiltinTool): - def _invoke(self, user_id: str, tool_parameters: dict[str, Any]) -> ToolInvokeMessage: - app_id = self.runtime.credentials.get("app_id") - app_secret = self.runtime.credentials.get("app_secret") - client = FeishuRequest(app_id, app_secret) - - receive_id_type = tool_parameters.get("receive_id_type") - receive_id = tool_parameters.get("receive_id") - msg_type = tool_parameters.get("msg_type") - content = tool_parameters.get("content") - - res = client.send_bot_message(receive_id_type, receive_id, msg_type, content) - return self.create_json_message(res) diff --git a/api/core/tools/provider/builtin/feishu_message/tools/send_bot_message.yaml b/api/core/tools/provider/builtin/feishu_message/tools/send_bot_message.yaml deleted file mode 100644 index 6e398b18ab3aee..00000000000000 --- a/api/core/tools/provider/builtin/feishu_message/tools/send_bot_message.yaml +++ /dev/null @@ -1,91 +0,0 @@ -identity: - name: send_bot_message - author: Doug Lea - label: - en_US: Send Bot Message - zh_Hans: 发送飞书应用消息 -description: - human: - en_US: Send bot message - zh_Hans: 发送飞书应用消息 - llm: A tool for sending Feishu application messages. -parameters: - - name: receive_id_type - type: select - required: true - options: - - value: open_id - label: - en_US: open id - zh_Hans: open id - - value: union_id - label: - en_US: union id - zh_Hans: union id - - value: user_id - label: - en_US: user id - zh_Hans: user id - - value: email - label: - en_US: email - zh_Hans: email - - value: chat_id - label: - en_US: chat id - zh_Hans: chat id - label: - en_US: User ID Type - zh_Hans: 用户 ID 类型 - human_description: - en_US: User ID Type - zh_Hans: 用户 ID 类型,可选值有 open_id、union_id、user_id、email、chat_id。 - llm_description: 用户 ID 类型,可选值有 open_id、union_id、user_id、email、chat_id。 - form: llm - - - name: receive_id - type: string - required: true - label: - en_US: Receive Id - zh_Hans: 消息接收者的 ID - human_description: - en_US: The ID of the message receiver. The ID type should correspond to the query parameter receive_id_type. - zh_Hans: 消息接收者的 ID,ID 类型应与查询参数 receive_id_type 对应。 - llm_description: 消息接收者的 ID,ID 类型应与查询参数 receive_id_type 对应。 - form: llm - - - name: msg_type - type: string - required: true - options: - - value: text - label: - en_US: text - zh_Hans: 文本 - - value: interactive - label: - en_US: message card - zh_Hans: 消息卡片 - label: - en_US: Message type - zh_Hans: 消息类型 - human_description: - en_US: Message type, optional values are, text (text), interactive (message card). - zh_Hans: 消息类型,可选值有:text(文本)、interactive(消息卡片)。 - llm_description: 消息类型,可选值有:text(文本)、interactive(消息卡片)。 - form: llm - - - name: content - type: string - required: true - label: - en_US: Message content - zh_Hans: 消息内容 - human_description: - en_US: Message content - zh_Hans: | - 消息内容,JSON 结构序列化后的字符串。不同 msg_type 对应不同内容, - 具体格式说明参考:https://open.larkoffice.com/document/server-docs/im-v1/message-content-description/create_json - llm_description: 消息内容,JSON 结构序列化后的字符串。不同 msg_type 对应不同内容。 - form: llm diff --git a/api/core/tools/provider/builtin/feishu_message/tools/send_webhook_message.py b/api/core/tools/provider/builtin/feishu_message/tools/send_webhook_message.py deleted file mode 100644 index 44e70e0a15b64d..00000000000000 --- a/api/core/tools/provider/builtin/feishu_message/tools/send_webhook_message.py +++ /dev/null @@ -1,19 +0,0 @@ -from typing import Any - -from core.tools.entities.tool_entities import ToolInvokeMessage -from core.tools.tool.builtin_tool import BuiltinTool -from core.tools.utils.feishu_api_utils import FeishuRequest - - -class SendWebhookMessageTool(BuiltinTool): - def _invoke(self, user_id: str, tool_parameters: dict[str, Any]) -> ToolInvokeMessage: - app_id = self.runtime.credentials.get("app_id") - app_secret = self.runtime.credentials.get("app_secret") - client = FeishuRequest(app_id, app_secret) - - webhook = tool_parameters.get("webhook") - msg_type = tool_parameters.get("msg_type") - content = tool_parameters.get("content") - - res = client.send_webhook_message(webhook, msg_type, content) - return self.create_json_message(res) diff --git a/api/core/tools/provider/builtin/feishu_message/tools/send_webhook_message.yaml b/api/core/tools/provider/builtin/feishu_message/tools/send_webhook_message.yaml deleted file mode 100644 index 8b39ce4874d506..00000000000000 --- a/api/core/tools/provider/builtin/feishu_message/tools/send_webhook_message.yaml +++ /dev/null @@ -1,58 +0,0 @@ -identity: - name: send_webhook_message - author: Doug Lea - label: - en_US: Send Webhook Message - zh_Hans: 使用自定义机器人发送飞书消息 -description: - human: - en_US: Send webhook message - zh_Hans: 使用自定义机器人发送飞书消息 - llm: A tool for sending Lark messages using a custom robot. -parameters: - - name: webhook - type: string - required: true - label: - en_US: webhook - zh_Hans: webhook 的地址 - human_description: - en_US: The address of the webhook - zh_Hans: webhook 的地址 - llm_description: webhook 的地址 - form: llm - - - name: msg_type - type: string - required: true - options: - - value: text - label: - en_US: text - zh_Hans: 文本 - - value: interactive - label: - en_US: message card - zh_Hans: 消息卡片 - label: - en_US: Message type - zh_Hans: 消息类型 - human_description: - en_US: Message type, optional values are, text (text), interactive (message card). - zh_Hans: 消息类型,可选值有:text(文本)、interactive(消息卡片)。 - llm_description: 消息类型,可选值有:text(文本)、interactive(消息卡片)。 - form: llm - - - name: content - type: string - required: true - label: - en_US: Message content - zh_Hans: 消息内容 - human_description: - en_US: Message content - zh_Hans: | - 消息内容,JSON 结构序列化后的字符串。不同 msg_type 对应不同内容, - 具体格式说明参考:https://open.larkoffice.com/document/server-docs/im-v1/message-content-description/create_json - llm_description: 消息内容,JSON 结构序列化后的字符串。不同 msg_type 对应不同内容。 - form: llm diff --git a/api/core/tools/provider/builtin/firecrawl/_assets/icon.svg b/api/core/tools/provider/builtin/firecrawl/_assets/icon.svg deleted file mode 100644 index e1e5f54117b1be..00000000000000 --- a/api/core/tools/provider/builtin/firecrawl/_assets/icon.svg +++ /dev/null @@ -1,3 +0,0 @@ - - 🔥 - \ No newline at end of file diff --git a/api/core/tools/provider/builtin/firecrawl/firecrawl.py b/api/core/tools/provider/builtin/firecrawl/firecrawl.py deleted file mode 100644 index 01455d7206f185..00000000000000 --- a/api/core/tools/provider/builtin/firecrawl/firecrawl.py +++ /dev/null @@ -1,14 +0,0 @@ -from core.tools.errors import ToolProviderCredentialValidationError -from core.tools.provider.builtin.firecrawl.tools.scrape import ScrapeTool -from core.tools.provider.builtin_tool_provider import BuiltinToolProviderController - - -class FirecrawlProvider(BuiltinToolProviderController): - def _validate_credentials(self, credentials: dict) -> None: - try: - # Example validation using the ScrapeTool, only scraping title for minimize content - ScrapeTool().fork_tool_runtime(runtime={"credentials": credentials}).invoke( - user_id="", tool_parameters={"url": "https://google.com", "onlyIncludeTags": "title"} - ) - except Exception as e: - raise ToolProviderCredentialValidationError(str(e)) diff --git a/api/core/tools/provider/builtin/firecrawl/firecrawl.yaml b/api/core/tools/provider/builtin/firecrawl/firecrawl.yaml deleted file mode 100644 index a48b9d9f541eb3..00000000000000 --- a/api/core/tools/provider/builtin/firecrawl/firecrawl.yaml +++ /dev/null @@ -1,35 +0,0 @@ -identity: - author: Richards Tu - name: firecrawl - label: - en_US: Firecrawl - zh_CN: Firecrawl - description: - en_US: Firecrawl API integration for web crawling and scraping. - zh_Hans: Firecrawl API 集成,用于网页爬取和数据抓取。 - icon: icon.svg - tags: - - search - - utilities -credentials_for_provider: - firecrawl_api_key: - type: secret-input - required: true - label: - en_US: Firecrawl API Key - zh_Hans: Firecrawl API 密钥 - placeholder: - en_US: Please input your Firecrawl API key - zh_Hans: 请输入您的 Firecrawl API 密钥,如果是自托管版本,可以随意填写密钥 - help: - en_US: Get your Firecrawl API key from your Firecrawl account settings.If you are using a self-hosted version, you may enter any key at your convenience. - zh_Hans: 从您的 Firecrawl 账户设置中获取 Firecrawl API 密钥。如果是自托管版本,可以随意填写密钥。 - url: https://www.firecrawl.dev/account - base_url: - type: text-input - required: false - label: - en_US: Firecrawl server's Base URL - zh_Hans: Firecrawl服务器的API URL - placeholder: - en_US: https://api.firecrawl.dev diff --git a/api/core/tools/provider/builtin/firecrawl/firecrawl_appx.py b/api/core/tools/provider/builtin/firecrawl/firecrawl_appx.py deleted file mode 100644 index d9fb6f04bcfa75..00000000000000 --- a/api/core/tools/provider/builtin/firecrawl/firecrawl_appx.py +++ /dev/null @@ -1,122 +0,0 @@ -import json -import logging -import time -from collections.abc import Mapping -from typing import Any - -import requests -from requests.exceptions import HTTPError - -logger = logging.getLogger(__name__) - - -class FirecrawlApp: - def __init__(self, api_key: str | None = None, base_url: str | None = None): - self.api_key = api_key - self.base_url = base_url or "https://api.firecrawl.dev" - if not self.api_key: - raise ValueError("API key is required") - - def _prepare_headers(self, idempotency_key: str | None = None): - headers = {"Content-Type": "application/json", "Authorization": f"Bearer {self.api_key}"} - if idempotency_key: - headers["Idempotency-Key"] = idempotency_key - return headers - - def _request( - self, - method: str, - url: str, - data: Mapping[str, Any] | None = None, - headers: Mapping[str, str] | None = None, - retries: int = 3, - backoff_factor: float = 0.3, - ) -> Mapping[str, Any] | None: - if not headers: - headers = self._prepare_headers() - for i in range(retries): - try: - response = requests.request(method, url, json=data, headers=headers) - return response.json() - except requests.exceptions.RequestException: - if i < retries - 1: - time.sleep(backoff_factor * (2**i)) - else: - raise - return None - - def scrape_url(self, url: str, **kwargs): - endpoint = f"{self.base_url}/v1/scrape" - data = {"url": url, **kwargs} - logger.debug(f"Sent request to {endpoint=} body={data}") - response = self._request("POST", endpoint, data) - if response is None: - raise HTTPError("Failed to scrape URL after multiple retries") - return response - - def map(self, url: str, **kwargs): - endpoint = f"{self.base_url}/v1/map" - data = {"url": url, **kwargs} - logger.debug(f"Sent request to {endpoint=} body={data}") - response = self._request("POST", endpoint, data) - if response is None: - raise HTTPError("Failed to perform map after multiple retries") - return response - - def crawl_url( - self, url: str, wait: bool = True, poll_interval: int = 5, idempotency_key: str | None = None, **kwargs - ): - endpoint = f"{self.base_url}/v1/crawl" - headers = self._prepare_headers(idempotency_key) - data = {"url": url, **kwargs} - logger.debug(f"Sent request to {endpoint=} body={data}") - response = self._request("POST", endpoint, data, headers) - if response is None: - raise HTTPError("Failed to initiate crawl after multiple retries") - elif response.get("success") == False: - raise HTTPError(f'Failed to crawl: {response.get("error")}') - job_id: str = response["id"] - if wait: - return self._monitor_job_status(job_id=job_id, poll_interval=poll_interval) - return response - - def check_crawl_status(self, job_id: str): - endpoint = f"{self.base_url}/v1/crawl/{job_id}" - response = self._request("GET", endpoint) - if response is None: - raise HTTPError(f"Failed to check status for job {job_id} after multiple retries") - return response - - def cancel_crawl_job(self, job_id: str): - endpoint = f"{self.base_url}/v1/crawl/{job_id}" - response = self._request("DELETE", endpoint) - if response is None: - raise HTTPError(f"Failed to cancel job {job_id} after multiple retries") - return response - - def _monitor_job_status(self, job_id: str, poll_interval: int): - while True: - status = self.check_crawl_status(job_id) - if status["status"] == "completed": - return status - elif status["status"] == "failed": - raise HTTPError(f'Job {job_id} failed: {status["error"]}') - time.sleep(poll_interval) - - -def get_array_params(tool_parameters: dict[str, Any], key): - param = tool_parameters.get(key) - if param: - return param.split(",") - - -def get_json_params(tool_parameters: dict[str, Any], key): - param = tool_parameters.get(key) - if param: - try: - # support both single quotes and double quotes - param = param.replace("'", '"') - param = json.loads(param) - except Exception: - raise ValueError(f"Invalid {key} format.") - return param diff --git a/api/core/tools/provider/builtin/firecrawl/tools/crawl.py b/api/core/tools/provider/builtin/firecrawl/tools/crawl.py deleted file mode 100644 index 9675b8eb913351..00000000000000 --- a/api/core/tools/provider/builtin/firecrawl/tools/crawl.py +++ /dev/null @@ -1,45 +0,0 @@ -from typing import Any - -from core.tools.entities.tool_entities import ToolInvokeMessage -from core.tools.provider.builtin.firecrawl.firecrawl_appx import FirecrawlApp, get_array_params, get_json_params -from core.tools.tool.builtin_tool import BuiltinTool - - -class CrawlTool(BuiltinTool): - def _invoke(self, user_id: str, tool_parameters: dict[str, Any]) -> ToolInvokeMessage: - """ - the api doc: - https://docs.firecrawl.dev/api-reference/endpoint/crawl - """ - app = FirecrawlApp( - api_key=self.runtime.credentials["firecrawl_api_key"], base_url=self.runtime.credentials["base_url"] - ) - - scrapeOptions = {} - payload = {} - - wait_for_results = tool_parameters.get("wait_for_results", True) - - payload["excludePaths"] = get_array_params(tool_parameters, "excludePaths") - payload["includePaths"] = get_array_params(tool_parameters, "includePaths") - payload["maxDepth"] = tool_parameters.get("maxDepth") - payload["ignoreSitemap"] = tool_parameters.get("ignoreSitemap", False) - payload["limit"] = tool_parameters.get("limit", 5) - payload["allowBackwardLinks"] = tool_parameters.get("allowBackwardLinks", False) - payload["allowExternalLinks"] = tool_parameters.get("allowExternalLinks", False) - payload["webhook"] = tool_parameters.get("webhook") - - scrapeOptions["formats"] = get_array_params(tool_parameters, "formats") - scrapeOptions["headers"] = get_json_params(tool_parameters, "headers") - scrapeOptions["includeTags"] = get_array_params(tool_parameters, "includeTags") - scrapeOptions["excludeTags"] = get_array_params(tool_parameters, "excludeTags") - scrapeOptions["onlyMainContent"] = tool_parameters.get("onlyMainContent", False) - scrapeOptions["waitFor"] = tool_parameters.get("waitFor", 0) - scrapeOptions = {k: v for k, v in scrapeOptions.items() if v not in {None, ""}} - payload["scrapeOptions"] = scrapeOptions or None - - payload = {k: v for k, v in payload.items() if v not in {None, ""}} - - crawl_result = app.crawl_url(url=tool_parameters["url"], wait=wait_for_results, **payload) - - return self.create_json_message(crawl_result) diff --git a/api/core/tools/provider/builtin/firecrawl/tools/crawl.yaml b/api/core/tools/provider/builtin/firecrawl/tools/crawl.yaml deleted file mode 100644 index 0d7dbcac20ea16..00000000000000 --- a/api/core/tools/provider/builtin/firecrawl/tools/crawl.yaml +++ /dev/null @@ -1,200 +0,0 @@ -identity: - name: crawl - author: Richards Tu - label: - en_US: Crawl - zh_Hans: 深度爬取 -description: - human: - en_US: Recursively search through a urls subdomains, and gather the content. - zh_Hans: 递归爬取一个网址的子域名,并收集内容。 - llm: This tool initiates a web crawl to extract data from a specified URL. It allows configuring crawler options such as including or excluding URL patterns, generating alt text for images using LLMs (paid plan required), limiting the maximum number of pages to crawl, and returning only the main content of the page. The tool can return either a list of crawled documents or a list of URLs based on the provided options. -parameters: - - name: url - type: string - required: true - label: - en_US: Start URL - zh_Hans: 起始URL - human_description: - en_US: The base URL to start crawling from. - zh_Hans: 要爬取网站的起始URL。 - llm_description: The URL of the website that needs to be crawled. This is a required parameter. - form: llm - - name: wait_for_results - type: boolean - default: true - label: - en_US: Wait For Results - zh_Hans: 等待爬取结果 - human_description: - en_US: If you choose not to wait, it will directly return a job ID. You can use this job ID to check the crawling results or cancel the crawling task, which is usually very useful for a large-scale crawling task. - zh_Hans: 如果选择不等待,则会直接返回一个job_id,可以通过job_id查询爬取结果或取消爬取任务,这通常对于一个大型爬取任务来说非常有用。 - form: form -############## Payload ####################### - - name: excludePaths - type: string - label: - en_US: URL patterns to exclude - zh_Hans: 要排除的URL模式 - placeholder: - en_US: Use commas to separate multiple tags - zh_Hans: 多个标签时使用半角逗号分隔 - human_description: - en_US: | - Pages matching these patterns will be skipped. Example: blog/*, about/* - zh_Hans: 匹配这些模式的页面将被跳过。示例:blog/*, about/* - form: form - - name: includePaths - type: string - required: false - label: - en_US: URL patterns to include - zh_Hans: 要包含的URL模式 - placeholder: - en_US: Use commas to separate multiple tags - zh_Hans: 多个标签时使用半角逗号分隔 - human_description: - en_US: | - Only pages matching these patterns will be crawled. Example: blog/*, about/* - zh_Hans: 只有与这些模式匹配的页面才会被爬取。示例:blog/*, about/* - form: form - - name: maxDepth - type: number - label: - en_US: Maximum crawl depth - zh_Hans: 爬取深度 - human_description: - en_US: Maximum depth to crawl relative to the entered URL. A maxDepth of 0 scrapes only the entered URL. A maxDepth of 1 scrapes the entered URL and all pages one level deep. A maxDepth of 2 scrapes the entered URL and all pages up to two levels deep. Higher values follow the same pattern. - zh_Hans: 相对于输入的URL,爬取的最大深度。maxDepth为0时,仅抓取输入的URL。maxDepth为1时,抓取输入的URL以及所有一级深层页面。maxDepth为2时,抓取输入的URL以及所有两级深层页面。更高值遵循相同模式。 - form: form - min: 0 - default: 2 - - name: ignoreSitemap - type: boolean - default: true - label: - en_US: ignore Sitemap - zh_Hans: 忽略站点地图 - human_description: - en_US: Ignore the website sitemap when crawling. - zh_Hans: 爬取时忽略网站站点地图。 - form: form - - name: limit - type: number - required: false - label: - en_US: Maximum pages to crawl - zh_Hans: 最大爬取页面数 - human_description: - en_US: Specify the maximum number of pages to crawl. The crawler will stop after reaching this limit. - zh_Hans: 指定要爬取的最大页面数。爬虫将在达到此限制后停止。 - form: form - min: 1 - default: 5 - - name: allowBackwardLinks - type: boolean - default: false - label: - en_US: allow Backward Crawling - zh_Hans: 允许向后爬取 - human_description: - en_US: Enables the crawler to navigate from a specific URL to previously linked pages. For instance, from 'example.com/product/123' back to 'example.com/product' - zh_Hans: 使爬虫能够从特定URL导航到之前链接的页面。例如,从'example.com/product/123'返回到'example.com/product' - form: form - - name: allowExternalLinks - type: boolean - default: false - label: - en_US: allow External Content Links - zh_Hans: 允许爬取外链 - human_description: - en_US: Allows the crawler to follow links to external websites. - zh_Hans: - form: form - - name: webhook - type: string - label: - en_US: webhook - human_description: - en_US: | - The URL to send the webhook to. This will trigger for crawl started (crawl.started) ,every page crawled (crawl.page) and when the crawl is completed (crawl.completed or crawl.failed). The response will be the same as the /scrape endpoint. - zh_Hans: 发送Webhook的URL。这将在开始爬取(crawl.started)、每爬取一个页面(crawl.page)以及爬取完成(crawl.completed或crawl.failed)时触发。响应将与/scrape端点相同。 - form: form -############## Scrape Options ####################### - - name: formats - type: string - label: - en_US: Formats - zh_Hans: 结果的格式 - placeholder: - en_US: Use commas to separate multiple tags - zh_Hans: 多个标签时使用半角逗号分隔 - human_description: - en_US: | - Formats to include in the output. Available options: markdown, html, rawHtml, links, screenshot - zh_Hans: | - 输出中应包含的格式。可以填入: markdown, html, rawHtml, links, screenshot - form: form - - name: headers - type: string - label: - en_US: headers - zh_Hans: 请求头 - human_description: - en_US: | - Headers to send with the request. Can be used to send cookies, user-agent, etc. Example: {"cookies": "testcookies"} - zh_Hans: | - 随请求发送的头部。可以用来发送cookies、用户代理等。示例:{"cookies": "testcookies"} - placeholder: - en_US: Please enter an object that can be serialized in JSON - zh_Hans: 请输入可以json序列化的对象 - form: form - - name: includeTags - type: string - label: - en_US: Include Tags - zh_Hans: 仅抓取这些标签 - placeholder: - en_US: Use commas to separate multiple tags - zh_Hans: 多个标签时使用半角逗号分隔 - human_description: - en_US: | - Only include tags, classes and ids from the page in the final output. Use comma separated values. Example: script, .ad, #footer - zh_Hans: | - 仅在最终输出中包含HTML页面的这些标签,可以通过标签名、类或ID来设定,使用逗号分隔值。示例:script, .ad, #footer - form: form - - name: excludeTags - type: string - label: - en_US: Exclude Tags - zh_Hans: 要移除这些标签 - human_description: - en_US: | - Tags, classes and ids to remove from the page. Use comma separated values. Example: script, .ad, #footer - zh_Hans: | - 要在最终输出中移除HTML页面的这些标签,可以通过标签名、类或ID来设定,使用逗号分隔值。示例:script, .ad, #footer - placeholder: - en_US: Use commas to separate multiple tags - zh_Hans: 多个标签时使用半角逗号分隔 - form: form - - name: onlyMainContent - type: boolean - default: false - label: - en_US: only Main Content - zh_Hans: 仅抓取主要内容 - human_description: - en_US: Only return the main content of the page excluding headers, navs, footers, etc. - zh_Hans: 只返回页面的主要内容,不包括头部、导航栏、尾部等。 - form: form - - name: waitFor - type: number - min: 0 - label: - en_US: wait For - zh_Hans: 等待时间 - human_description: - en_US: Wait x amount of milliseconds for the page to load to fetch content. - zh_Hans: 等待x毫秒以使页面加载并获取内容。 - form: form diff --git a/api/core/tools/provider/builtin/firecrawl/tools/crawl_job.py b/api/core/tools/provider/builtin/firecrawl/tools/crawl_job.py deleted file mode 100644 index 0d2486c7ca4426..00000000000000 --- a/api/core/tools/provider/builtin/firecrawl/tools/crawl_job.py +++ /dev/null @@ -1,21 +0,0 @@ -from typing import Any - -from core.tools.entities.tool_entities import ToolInvokeMessage -from core.tools.provider.builtin.firecrawl.firecrawl_appx import FirecrawlApp -from core.tools.tool.builtin_tool import BuiltinTool - - -class CrawlJobTool(BuiltinTool): - def _invoke(self, user_id: str, tool_parameters: dict[str, Any]) -> ToolInvokeMessage: - app = FirecrawlApp( - api_key=self.runtime.credentials["firecrawl_api_key"], base_url=self.runtime.credentials["base_url"] - ) - operation = tool_parameters.get("operation", "get") - if operation == "get": - result = app.check_crawl_status(job_id=tool_parameters["job_id"]) - elif operation == "cancel": - result = app.cancel_crawl_job(job_id=tool_parameters["job_id"]) - else: - raise ValueError(f"Invalid operation: {operation}") - - return self.create_json_message(result) diff --git a/api/core/tools/provider/builtin/firecrawl/tools/crawl_job.yaml b/api/core/tools/provider/builtin/firecrawl/tools/crawl_job.yaml deleted file mode 100644 index 78008e4ad4d8a6..00000000000000 --- a/api/core/tools/provider/builtin/firecrawl/tools/crawl_job.yaml +++ /dev/null @@ -1,37 +0,0 @@ -identity: - name: crawl_job - author: hjlarry - label: - en_US: Crawl Job - zh_Hans: 爬取任务处理 -description: - human: - en_US: Retrieve the scraping results based on the job ID, or cancel the scraping task. - zh_Hans: 根据爬取任务ID获取爬取结果,或者取消爬取任务 - llm: Retrieve the scraping results based on the job ID, or cancel the scraping task. -parameters: - - name: job_id - type: string - required: true - label: - en_US: Job ID - human_description: - en_US: Set wait_for_results to false in the Crawl tool can get the job ID. - zh_Hans: 在深度爬取工具中将等待爬取结果设置为否可以获取Job ID。 - llm_description: Set wait_for_results to false in the Crawl tool can get the job ID. - form: llm - - name: operation - type: select - required: true - options: - - value: get - label: - en_US: get crawl status - - value: cancel - label: - en_US: cancel crawl job - label: - en_US: operation - zh_Hans: 操作 - llm_description: choose the operation to perform. `get` is for getting the crawl status, `cancel` is for cancelling the crawl job. - form: llm diff --git a/api/core/tools/provider/builtin/firecrawl/tools/map.py b/api/core/tools/provider/builtin/firecrawl/tools/map.py deleted file mode 100644 index bdfb5faeb8e2c9..00000000000000 --- a/api/core/tools/provider/builtin/firecrawl/tools/map.py +++ /dev/null @@ -1,25 +0,0 @@ -from typing import Any - -from core.tools.entities.tool_entities import ToolInvokeMessage -from core.tools.provider.builtin.firecrawl.firecrawl_appx import FirecrawlApp -from core.tools.tool.builtin_tool import BuiltinTool - - -class MapTool(BuiltinTool): - def _invoke(self, user_id: str, tool_parameters: dict[str, Any]) -> ToolInvokeMessage: - """ - the api doc: - https://docs.firecrawl.dev/api-reference/endpoint/map - """ - app = FirecrawlApp( - api_key=self.runtime.credentials["firecrawl_api_key"], base_url=self.runtime.credentials["base_url"] - ) - payload = {} - payload["search"] = tool_parameters.get("search") - payload["ignoreSitemap"] = tool_parameters.get("ignoreSitemap", True) - payload["includeSubdomains"] = tool_parameters.get("includeSubdomains", False) - payload["limit"] = tool_parameters.get("limit", 5000) - - map_result = app.map(url=tool_parameters["url"], **payload) - - return self.create_json_message(map_result) diff --git a/api/core/tools/provider/builtin/firecrawl/tools/map.yaml b/api/core/tools/provider/builtin/firecrawl/tools/map.yaml deleted file mode 100644 index 9913756983370a..00000000000000 --- a/api/core/tools/provider/builtin/firecrawl/tools/map.yaml +++ /dev/null @@ -1,59 +0,0 @@ -identity: - name: map - author: hjlarry - label: - en_US: Map - zh_Hans: 地图式快爬 -description: - human: - en_US: Input a website and get all the urls on the website - extremly fast - zh_Hans: 输入一个网站,快速获取网站上的所有网址。 - llm: Input a website and get all the urls on the website - extremly fast -parameters: - - name: url - type: string - required: true - label: - en_US: Start URL - zh_Hans: 起始URL - human_description: - en_US: The base URL to start crawling from. - zh_Hans: 要爬取网站的起始URL。 - llm_description: The URL of the website that needs to be crawled. This is a required parameter. - form: llm - - name: search - type: string - label: - en_US: search - zh_Hans: 搜索查询 - human_description: - en_US: Search query to use for mapping. During the Alpha phase, the 'smart' part of the search functionality is limited to 100 search results. However, if map finds more results, there is no limit applied. - zh_Hans: 用于映射的搜索查询。在Alpha阶段,搜索功能的“智能”部分限制为最多100个搜索结果。然而,如果地图找到了更多结果,则不施加任何限制。 - llm_description: Search query to use for mapping. During the Alpha phase, the 'smart' part of the search functionality is limited to 100 search results. However, if map finds more results, there is no limit applied. - form: llm -############## Page Options ####################### - - name: ignoreSitemap - type: boolean - default: true - label: - en_US: ignore Sitemap - zh_Hans: 忽略站点地图 - human_description: - en_US: Ignore the website sitemap when crawling. - zh_Hans: 爬取时忽略网站站点地图。 - form: form - - name: includeSubdomains - type: boolean - default: false - label: - en_US: include Subdomains - zh_Hans: 包含子域名 - form: form - - name: limit - type: number - min: 0 - default: 5000 - label: - en_US: Maximum results - zh_Hans: 最大结果数量 - form: form diff --git a/api/core/tools/provider/builtin/firecrawl/tools/scrape.py b/api/core/tools/provider/builtin/firecrawl/tools/scrape.py deleted file mode 100644 index 538b4a1fcbf056..00000000000000 --- a/api/core/tools/provider/builtin/firecrawl/tools/scrape.py +++ /dev/null @@ -1,39 +0,0 @@ -from typing import Any - -from core.tools.entities.tool_entities import ToolInvokeMessage -from core.tools.provider.builtin.firecrawl.firecrawl_appx import FirecrawlApp, get_array_params, get_json_params -from core.tools.tool.builtin_tool import BuiltinTool - - -class ScrapeTool(BuiltinTool): - def _invoke(self, user_id: str, tool_parameters: dict[str, Any]) -> list[ToolInvokeMessage]: - """ - the api doc: - https://docs.firecrawl.dev/api-reference/endpoint/scrape - """ - app = FirecrawlApp( - api_key=self.runtime.credentials["firecrawl_api_key"], base_url=self.runtime.credentials["base_url"] - ) - - payload = {} - extract = {} - - payload["formats"] = get_array_params(tool_parameters, "formats") - payload["onlyMainContent"] = tool_parameters.get("onlyMainContent", True) - payload["includeTags"] = get_array_params(tool_parameters, "includeTags") - payload["excludeTags"] = get_array_params(tool_parameters, "excludeTags") - payload["headers"] = get_json_params(tool_parameters, "headers") - payload["waitFor"] = tool_parameters.get("waitFor", 0) - payload["timeout"] = tool_parameters.get("timeout", 30000) - - extract["schema"] = get_json_params(tool_parameters, "schema") - extract["systemPrompt"] = tool_parameters.get("systemPrompt") - extract["prompt"] = tool_parameters.get("prompt") - extract = {k: v for k, v in extract.items() if v not in {None, ""}} - payload["extract"] = extract or None - - payload = {k: v for k, v in payload.items() if v not in {None, ""}} - - crawl_result = app.scrape_url(url=tool_parameters["url"], **payload) - markdown_result = crawl_result.get("data", {}).get("markdown", "") - return [self.create_text_message(markdown_result), self.create_json_message(crawl_result)] diff --git a/api/core/tools/provider/builtin/firecrawl/tools/scrape.yaml b/api/core/tools/provider/builtin/firecrawl/tools/scrape.yaml deleted file mode 100644 index 8f1f1348a459ca..00000000000000 --- a/api/core/tools/provider/builtin/firecrawl/tools/scrape.yaml +++ /dev/null @@ -1,152 +0,0 @@ -identity: - name: scrape - author: ahasasjeb - label: - en_US: Scrape - zh_Hans: 单页面抓取 -description: - human: - en_US: Turn any url into clean data. - zh_Hans: 将任何网址转换为干净的数据。 - llm: This tool is designed to scrape URL and output the content in Markdown format. -parameters: - - name: url - type: string - required: true - label: - en_US: URL to scrape - zh_Hans: 要抓取的URL - human_description: - en_US: The URL of the website to scrape and extract data from. - zh_Hans: 要抓取并提取数据的网站URL。 - llm_description: The URL of the website that needs to be crawled. This is a required parameter. - form: llm -############## Payload ####################### - - name: formats - type: string - label: - en_US: Formats - zh_Hans: 结果的格式 - placeholder: - en_US: Use commas to separate multiple tags - zh_Hans: 多个标签时使用半角逗号分隔 - human_description: - en_US: | - Formats to include in the output. Available options: markdown, html, rawHtml, links, screenshot, extract, screenshot@fullPage - zh_Hans: | - 输出中应包含的格式。可以填入: markdown, html, rawHtml, links, screenshot, extract, screenshot@fullPage - form: form - - name: onlyMainContent - type: boolean - default: false - label: - en_US: only Main Content - zh_Hans: 仅抓取主要内容 - human_description: - en_US: Only return the main content of the page excluding headers, navs, footers, etc. - zh_Hans: 只返回页面的主要内容,不包括头部、导航栏、尾部等。 - form: form - - name: includeTags - type: string - label: - en_US: Include Tags - zh_Hans: 仅抓取这些标签 - placeholder: - en_US: Use commas to separate multiple tags - zh_Hans: 多个标签时使用半角逗号分隔 - human_description: - en_US: | - Only include tags, classes and ids from the page in the final output. Use comma separated values. Example: script, .ad, #footer - zh_Hans: | - 仅在最终输出中包含HTML页面的这些标签,可以通过标签名、类或ID来设定,使用逗号分隔值。示例:script, .ad, #footer - form: form - - name: excludeTags - type: string - label: - en_US: Exclude Tags - zh_Hans: 要移除这些标签 - human_description: - en_US: | - Tags, classes and ids to remove from the page. Use comma separated values. Example: script, .ad, #footer - zh_Hans: | - 要在最终输出中移除HTML页面的这些标签,可以通过标签名、类或ID来设定,使用逗号分隔值。示例:script, .ad, #footer - placeholder: - en_US: Use commas to separate multiple tags - zh_Hans: 多个标签时使用半角逗号分隔 - form: form - - name: headers - type: string - label: - en_US: headers - zh_Hans: 请求头 - human_description: - en_US: | - Headers to send with the request. Can be used to send cookies, user-agent, etc. Example: {"cookies": "testcookies"} - zh_Hans: | - 随请求发送的头部。可以用来发送cookies、用户代理等。示例:{"cookies": "testcookies"} - placeholder: - en_US: Please enter an object that can be serialized in JSON - zh_Hans: 请输入可以json序列化的对象 - form: form - - name: waitFor - type: number - min: 0 - default: 0 - label: - en_US: wait For - zh_Hans: 等待时间 - human_description: - en_US: Wait x amount of milliseconds for the page to load to fetch content. - zh_Hans: 等待x毫秒以使页面加载并获取内容。 - form: form - - name: timeout - type: number - min: 0 - default: 30000 - label: - en_US: Timeout - human_description: - en_US: Timeout in milliseconds for the request. - zh_Hans: 请求的超时时间(以毫秒为单位)。 - form: form -############## Extractor Options ####################### - - name: schema - type: string - label: - en_US: Extractor Schema - zh_Hans: 提取时的结构 - placeholder: - en_US: Please enter an object that can be serialized in JSON - zh_Hans: 请输入可以json序列化的对象 - human_description: - en_US: | - The schema for the data to be extracted. Example: { - "type": "object", - "properties": {"company_mission": {"type": "string"}}, - "required": ["company_mission"] - } - zh_Hans: | - 使用该结构去提取,示例:{ - "type": "object", - "properties": {"company_mission": {"type": "string"}}, - "required": ["company_mission"] - } - form: form - - name: systemPrompt - type: string - label: - en_US: Extractor System Prompt - zh_Hans: 提取时的系统提示词 - human_description: - en_US: The system prompt to use for the extraction. - zh_Hans: 用于提取的系统提示。 - form: form - - name: prompt - type: string - label: - en_US: Extractor Prompt - zh_Hans: 提取时的提示词 - human_description: - en_US: The prompt to use for the extraction without a schema. - zh_Hans: 用于无schema时提取的提示词 - form: form diff --git a/api/core/tools/provider/builtin/gaode/_assets/icon.svg b/api/core/tools/provider/builtin/gaode/_assets/icon.svg deleted file mode 100644 index 0f5729e17aea8d..00000000000000 --- a/api/core/tools/provider/builtin/gaode/_assets/icon.svg +++ /dev/null @@ -1 +0,0 @@ - \ No newline at end of file diff --git a/api/core/tools/provider/builtin/gaode/gaode.py b/api/core/tools/provider/builtin/gaode/gaode.py deleted file mode 100644 index 49a8e537fb9070..00000000000000 --- a/api/core/tools/provider/builtin/gaode/gaode.py +++ /dev/null @@ -1,28 +0,0 @@ -import urllib.parse - -import requests - -from core.tools.errors import ToolProviderCredentialValidationError -from core.tools.provider.builtin_tool_provider import BuiltinToolProviderController - - -class GaodeProvider(BuiltinToolProviderController): - def _validate_credentials(self, credentials: dict) -> None: - try: - if "api_key" not in credentials or not credentials.get("api_key"): - raise ToolProviderCredentialValidationError("Gaode API key is required.") - - try: - response = requests.get( - url="https://restapi.amap.com/v3/geocode/geo?address={address}&key={apikey}".format( - address=urllib.parse.quote("广东省广州市天河区广州塔"), apikey=credentials.get("api_key") - ) - ) - if response.status_code == 200 and (response.json()).get("info") == "OK": - pass - else: - raise ToolProviderCredentialValidationError((response.json()).get("info")) - except Exception as e: - raise ToolProviderCredentialValidationError("Gaode API Key is invalid. {}".format(e)) - except Exception as e: - raise ToolProviderCredentialValidationError(str(e)) diff --git a/api/core/tools/provider/builtin/gaode/gaode.yaml b/api/core/tools/provider/builtin/gaode/gaode.yaml deleted file mode 100644 index 2eb3b161a29915..00000000000000 --- a/api/core/tools/provider/builtin/gaode/gaode.yaml +++ /dev/null @@ -1,34 +0,0 @@ -identity: - author: CharlieWei - name: gaode - label: - en_US: Autonavi - zh_Hans: 高德 - pt_BR: Autonavi - description: - en_US: Autonavi Open Platform service toolkit. - zh_Hans: 高德开放平台服务工具包。 - pt_BR: Kit de ferramentas de serviço Autonavi Open Platform. - icon: icon.svg - tags: - - utilities - - productivity - - travel - - weather -credentials_for_provider: - api_key: - type: secret-input - required: true - label: - en_US: API Key - zh_Hans: API Key - pt_BR: Fogo a chave - placeholder: - en_US: Please enter your Autonavi API Key - zh_Hans: 请输入你的高德开放平台 API Key - pt_BR: Insira sua chave de API Autonavi - help: - en_US: Get your API Key from Autonavi - zh_Hans: 从高德获取您的 API Key - pt_BR: Obtenha sua chave de API do Autonavi - url: https://console.amap.com/dev/key/app diff --git a/api/core/tools/provider/builtin/gaode/tools/gaode_weather.py b/api/core/tools/provider/builtin/gaode/tools/gaode_weather.py deleted file mode 100644 index ea06e2ce611cbc..00000000000000 --- a/api/core/tools/provider/builtin/gaode/tools/gaode_weather.py +++ /dev/null @@ -1,64 +0,0 @@ -import json -from typing import Any, Union - -import requests - -from core.tools.entities.tool_entities import ToolInvokeMessage -from core.tools.tool.builtin_tool import BuiltinTool - - -class GaodeRepositoriesTool(BuiltinTool): - def _invoke( - self, user_id: str, tool_parameters: dict[str, Any] - ) -> Union[ToolInvokeMessage, list[ToolInvokeMessage]]: - """ - invoke tools - """ - city = tool_parameters.get("city", "") - if not city: - return self.create_text_message("Please tell me your city") - - if "api_key" not in self.runtime.credentials or not self.runtime.credentials.get("api_key"): - return self.create_text_message("Gaode API key is required.") - - try: - s = requests.session() - api_domain = "https://restapi.amap.com/v3" - city_response = s.request( - method="GET", - headers={"Content-Type": "application/json; charset=utf-8"}, - url="{url}/config/district?keywords={keywords}&subdistrict=0&extensions=base&key={apikey}".format( - url=api_domain, keywords=city, apikey=self.runtime.credentials.get("api_key") - ), - ) - City_data = city_response.json() - if city_response.status_code == 200 and City_data.get("info") == "OK": - if len(City_data.get("districts")) > 0: - CityCode = City_data["districts"][0]["adcode"] - weatherInfo_response = s.request( - method="GET", - url="{url}/weather/weatherInfo?city={citycode}&extensions=all&key={apikey}&output=json" - "".format(url=api_domain, citycode=CityCode, apikey=self.runtime.credentials.get("api_key")), - ) - weatherInfo_data = weatherInfo_response.json() - if weatherInfo_response.status_code == 200 and weatherInfo_data.get("info") == "OK": - contents = [] - if len(weatherInfo_data.get("forecasts")) > 0: - for item in weatherInfo_data["forecasts"][0]["casts"]: - content = {} - content["date"] = item.get("date") - content["week"] = item.get("week") - content["dayweather"] = item.get("dayweather") - content["daytemp_float"] = item.get("daytemp_float") - content["daywind"] = item.get("daywind") - content["nightweather"] = item.get("nightweather") - content["nighttemp_float"] = item.get("nighttemp_float") - contents.append(content) - s.close() - return self.create_text_message( - self.summary(user_id=user_id, content=json.dumps(contents, ensure_ascii=False)) - ) - s.close() - return self.create_text_message(f"No weather information for {city} was found.") - except Exception as e: - return self.create_text_message("Gaode API Key and Api Version is invalid. {}".format(e)) diff --git a/api/core/tools/provider/builtin/gaode/tools/gaode_weather.yaml b/api/core/tools/provider/builtin/gaode/tools/gaode_weather.yaml deleted file mode 100644 index e41851e188edee..00000000000000 --- a/api/core/tools/provider/builtin/gaode/tools/gaode_weather.yaml +++ /dev/null @@ -1,28 +0,0 @@ -identity: - name: gaode_weather - author: CharlieWei - label: - en_US: Weather Forecast - zh_Hans: 天气预报 - pt_BR: Previsão do tempo - icon: icon.svg -description: - human: - en_US: Weather forecast inquiry - zh_Hans: 天气预报查询。 - pt_BR: Inquérito sobre previsão meteorológica. - llm: A tool when you want to ask about the weather or weather-related question. -parameters: - - name: city - type: string - required: true - label: - en_US: city - zh_Hans: 城市 - pt_BR: cidade - human_description: - en_US: Target city for weather forecast query. - zh_Hans: 天气预报查询的目标城市。 - pt_BR: Cidade de destino para consulta de previsão do tempo. - llm_description: If you don't know you can extract the city name from the question or you can reply:Please tell me your city. You have to extract the Chinese city name from the question. - form: llm diff --git a/api/core/tools/provider/builtin/getimgai/_assets/icon.svg b/api/core/tools/provider/builtin/getimgai/_assets/icon.svg deleted file mode 100644 index 6b2513386da458..00000000000000 --- a/api/core/tools/provider/builtin/getimgai/_assets/icon.svg +++ /dev/null @@ -1 +0,0 @@ - \ No newline at end of file diff --git a/api/core/tools/provider/builtin/getimgai/getimgai.py b/api/core/tools/provider/builtin/getimgai/getimgai.py deleted file mode 100644 index bbd07d120fd0ea..00000000000000 --- a/api/core/tools/provider/builtin/getimgai/getimgai.py +++ /dev/null @@ -1,19 +0,0 @@ -from core.tools.errors import ToolProviderCredentialValidationError -from core.tools.provider.builtin.getimgai.tools.text2image import Text2ImageTool -from core.tools.provider.builtin_tool_provider import BuiltinToolProviderController - - -class GetImgAIProvider(BuiltinToolProviderController): - def _validate_credentials(self, credentials: dict) -> None: - try: - # Example validation using the text2image tool - Text2ImageTool().fork_tool_runtime(runtime={"credentials": credentials}).invoke( - user_id="", - tool_parameters={ - "prompt": "A fire egg", - "response_format": "url", - "style": "photorealism", - }, - ) - except Exception as e: - raise ToolProviderCredentialValidationError(str(e)) diff --git a/api/core/tools/provider/builtin/getimgai/getimgai.yaml b/api/core/tools/provider/builtin/getimgai/getimgai.yaml deleted file mode 100644 index c9db0a9e22a6c4..00000000000000 --- a/api/core/tools/provider/builtin/getimgai/getimgai.yaml +++ /dev/null @@ -1,29 +0,0 @@ -identity: - author: Matri Qi - name: getimgai - label: - en_US: getimg.ai - zh_CN: getimg.ai - description: - en_US: GetImg API integration for image generation and scraping. - icon: icon.svg - tags: - - image -credentials_for_provider: - getimg_api_key: - type: secret-input - required: true - label: - en_US: getimg.ai API Key - placeholder: - en_US: Please input your getimg.ai API key - help: - en_US: Get your getimg.ai API key from your getimg.ai account settings. If you are using a self-hosted version, you may enter any key at your convenience. - url: https://dashboard.getimg.ai/api-keys - base_url: - type: text-input - required: false - label: - en_US: getimg.ai server's Base URL - placeholder: - en_US: https://api.getimg.ai/v1 diff --git a/api/core/tools/provider/builtin/getimgai/getimgai_appx.py b/api/core/tools/provider/builtin/getimgai/getimgai_appx.py deleted file mode 100644 index 0e95a5f654505f..00000000000000 --- a/api/core/tools/provider/builtin/getimgai/getimgai_appx.py +++ /dev/null @@ -1,55 +0,0 @@ -import logging -import time -from collections.abc import Mapping -from typing import Any - -import requests -from requests.exceptions import HTTPError - -logger = logging.getLogger(__name__) - - -class GetImgAIApp: - def __init__(self, api_key: str | None = None, base_url: str | None = None): - self.api_key = api_key - self.base_url = base_url or "https://api.getimg.ai/v1" - if not self.api_key: - raise ValueError("API key is required") - - def _prepare_headers(self): - headers = {"Content-Type": "application/json", "Authorization": f"Bearer {self.api_key}"} - return headers - - def _request( - self, - method: str, - url: str, - data: Mapping[str, Any] | None = None, - headers: Mapping[str, str] | None = None, - retries: int = 3, - backoff_factor: float = 0.3, - ) -> Mapping[str, Any] | None: - for i in range(retries): - try: - response = requests.request(method, url, json=data, headers=headers) - response.raise_for_status() - return response.json() - except requests.exceptions.RequestException as e: - if i < retries - 1 and isinstance(e, HTTPError) and e.response.status_code >= 500: - time.sleep(backoff_factor * (2**i)) - else: - raise - return None - - def text2image(self, mode: str, **kwargs): - data = kwargs["params"] - if not data.get("prompt"): - raise ValueError("Prompt is required") - - endpoint = f"{self.base_url}/{mode}/text-to-image" - headers = self._prepare_headers() - logger.debug(f"Send request to {endpoint=} body={data}") - response = self._request("POST", endpoint, data, headers) - if response is None: - raise HTTPError("Failed to initiate getimg.ai after multiple retries") - return response diff --git a/api/core/tools/provider/builtin/getimgai/tools/text2image.py b/api/core/tools/provider/builtin/getimgai/tools/text2image.py deleted file mode 100644 index c556749552c8ef..00000000000000 --- a/api/core/tools/provider/builtin/getimgai/tools/text2image.py +++ /dev/null @@ -1,39 +0,0 @@ -import json -from typing import Any, Union - -from core.tools.entities.tool_entities import ToolInvokeMessage -from core.tools.provider.builtin.getimgai.getimgai_appx import GetImgAIApp -from core.tools.tool.builtin_tool import BuiltinTool - - -class Text2ImageTool(BuiltinTool): - def _invoke( - self, user_id: str, tool_parameters: dict[str, Any] - ) -> Union[ToolInvokeMessage, list[ToolInvokeMessage]]: - app = GetImgAIApp( - api_key=self.runtime.credentials["getimg_api_key"], base_url=self.runtime.credentials["base_url"] - ) - - options = { - "style": tool_parameters.get("style"), - "prompt": tool_parameters.get("prompt"), - "aspect_ratio": tool_parameters.get("aspect_ratio"), - "output_format": tool_parameters.get("output_format", "jpeg"), - "response_format": tool_parameters.get("response_format", "url"), - "width": tool_parameters.get("width"), - "height": tool_parameters.get("height"), - "steps": tool_parameters.get("steps"), - "negative_prompt": tool_parameters.get("negative_prompt"), - "prompt_2": tool_parameters.get("prompt_2"), - } - options = {k: v for k, v in options.items() if v} - - text2image_result = app.text2image(mode=tool_parameters.get("mode", "essential-v2"), params=options, wait=True) - - if not isinstance(text2image_result, str): - text2image_result = json.dumps(text2image_result, ensure_ascii=False, indent=4) - - if not text2image_result: - return self.create_text_message("getimg.ai request failed.") - - return self.create_text_message(text2image_result) diff --git a/api/core/tools/provider/builtin/getimgai/tools/text2image.yaml b/api/core/tools/provider/builtin/getimgai/tools/text2image.yaml deleted file mode 100644 index d972186f56d6a6..00000000000000 --- a/api/core/tools/provider/builtin/getimgai/tools/text2image.yaml +++ /dev/null @@ -1,167 +0,0 @@ -identity: - name: text2image - author: Matri Qi - label: - en_US: text2image - icon: icon.svg -description: - human: - en_US: Generate image via getimg.ai. - llm: This tool is used to generate image from prompt or image via https://getimg.ai. -parameters: - - name: prompt - type: string - required: true - label: - en_US: prompt - human_description: - en_US: The text prompt used to generate the image. The getimg.aier will generate an image based on this prompt. - llm_description: this prompt text will be used to generate image. - form: llm - - name: mode - type: select - required: false - label: - en_US: mode - human_description: - en_US: The getimg.ai mode to use. The mode determines the endpoint used to generate the image. - form: form - options: - - value: "essential-v2" - label: - en_US: essential-v2 - - value: stable-diffusion-xl - label: - en_US: stable-diffusion-xl - - value: stable-diffusion - label: - en_US: stable-diffusion - - value: latent-consistency - label: - en_US: latent-consistency - - name: style - type: select - required: false - label: - en_US: style - human_description: - en_US: The style preset to use. The style preset guides the generation towards a particular style. It's just efficient for `Essential V2` mode. - form: form - options: - - value: photorealism - label: - en_US: photorealism - - value: anime - label: - en_US: anime - - value: art - label: - en_US: art - - name: aspect_ratio - type: select - required: false - label: - en_US: "aspect ratio" - human_description: - en_US: The aspect ratio of the generated image. It's just efficient for `Essential V2` mode. - form: form - options: - - value: "1:1" - label: - en_US: "1:1" - - value: "4:5" - label: - en_US: "4:5" - - value: "5:4" - label: - en_US: "5:4" - - value: "2:3" - label: - en_US: "2:3" - - value: "3:2" - label: - en_US: "3:2" - - value: "4:7" - label: - en_US: "4:7" - - value: "7:4" - label: - en_US: "7:4" - - name: output_format - type: select - required: false - label: - en_US: "output format" - human_description: - en_US: The file format of the generated image. - form: form - options: - - value: jpeg - label: - en_US: jpeg - - value: png - label: - en_US: png - - name: response_format - type: select - required: false - label: - en_US: "response format" - human_description: - en_US: The format in which the generated images are returned. Must be one of url or b64. URLs are only valid for 1 hour after the image has been generated. - form: form - options: - - value: url - label: - en_US: url - - value: b64 - label: - en_US: b64 - - name: model - type: string - required: false - label: - en_US: model - human_description: - en_US: Model ID supported by this pipeline and family. It's just efficient for `Stable Diffusion XL`, `Stable Diffusion`, `Latent Consistency` mode. - form: form - - name: negative_prompt - type: string - required: false - label: - en_US: negative prompt - human_description: - en_US: Text input that will not guide the image generation. It's just efficient for `Stable Diffusion XL`, `Stable Diffusion`, `Latent Consistency` mode. - form: form - - name: prompt_2 - type: string - required: false - label: - en_US: prompt2 - human_description: - en_US: Prompt sent to second tokenizer and text encoder. If not defined, prompt is used in both text-encoders. It's just efficient for `Stable Diffusion XL` mode. - form: form - - name: width - type: number - required: false - label: - en_US: width - human_description: - en_US: he width of the generated image in pixels. Width needs to be multiple of 64. - form: form - - name: height - type: number - required: false - label: - en_US: height - human_description: - en_US: he height of the generated image in pixels. Height needs to be multiple of 64. - form: form - - name: steps - type: number - required: false - label: - en_US: steps - human_description: - en_US: The number of denoising steps. More steps usually can produce higher quality images, but take more time to generate. It's just efficient for `Stable Diffusion XL`, `Stable Diffusion`, `Latent Consistency` mode. - form: form diff --git a/api/core/tools/provider/builtin/github/_assets/icon.svg b/api/core/tools/provider/builtin/github/_assets/icon.svg deleted file mode 100644 index d56adb2c2f9955..00000000000000 --- a/api/core/tools/provider/builtin/github/_assets/icon.svg +++ /dev/null @@ -1,17 +0,0 @@ - - - github [#142] - Created with Sketch. - - - - - - - - - - - \ No newline at end of file diff --git a/api/core/tools/provider/builtin/github/github.py b/api/core/tools/provider/builtin/github/github.py deleted file mode 100644 index 87a34ac3e806ea..00000000000000 --- a/api/core/tools/provider/builtin/github/github.py +++ /dev/null @@ -1,32 +0,0 @@ -import requests - -from core.tools.errors import ToolProviderCredentialValidationError -from core.tools.provider.builtin_tool_provider import BuiltinToolProviderController - - -class GithubProvider(BuiltinToolProviderController): - def _validate_credentials(self, credentials: dict) -> None: - try: - if "access_tokens" not in credentials or not credentials.get("access_tokens"): - raise ToolProviderCredentialValidationError("Github API Access Tokens is required.") - if "api_version" not in credentials or not credentials.get("api_version"): - api_version = "2022-11-28" - else: - api_version = credentials.get("api_version") - - try: - headers = { - "Content-Type": "application/vnd.github+json", - "Authorization": f"Bearer {credentials.get('access_tokens')}", - "X-GitHub-Api-Version": api_version, - } - - response = requests.get( - url="https://api.github.com/search/users?q={account}".format(account="charli117"), headers=headers - ) - if response.status_code != 200: - raise ToolProviderCredentialValidationError((response.json()).get("message")) - except Exception as e: - raise ToolProviderCredentialValidationError("Github API Key and Api Version is invalid. {}".format(e)) - except Exception as e: - raise ToolProviderCredentialValidationError(str(e)) diff --git a/api/core/tools/provider/builtin/github/github.yaml b/api/core/tools/provider/builtin/github/github.yaml deleted file mode 100644 index c3d85fc3f69cf7..00000000000000 --- a/api/core/tools/provider/builtin/github/github.yaml +++ /dev/null @@ -1,48 +0,0 @@ -identity: - author: CharlieWei - name: github - label: - en_US: Github - zh_Hans: Github - pt_BR: Github - description: - en_US: GitHub is an online software source code hosting service. - zh_Hans: GitHub是一个在线软件源代码托管服务平台。 - pt_BR: GitHub é uma plataforma online para serviços de hospedagem de código fonte de software. - icon: icon.svg - tags: - - utilities -credentials_for_provider: - access_tokens: - type: secret-input - required: true - label: - en_US: Access Tokens - zh_Hans: Access Tokens - pt_BR: Tokens de acesso - placeholder: - en_US: Please input your Github Access Tokens - zh_Hans: 请输入你的 Github Access Tokens - pt_BR: Insira seus Tokens de Acesso do Github - help: - en_US: Get your Access Tokens from Github - zh_Hans: 从 Github 获取您的 Access Tokens - pt_BR: Obtenha sua chave da API do Google no Google - url: https://github.com/settings/tokens?type=beta - api_version: - type: text-input - required: false - default: '2022-11-28' - label: - en_US: API Version - zh_Hans: API Version - pt_BR: Versão da API - placeholder: - en_US: Please input your Github API Version - zh_Hans: 请输入你的 Github API Version - pt_BR: Insira sua versão da API do Github - help: - en_US: Get your API Version from Github - zh_Hans: 从 Github 获取您的 API Version - pt_BR: Obtenha sua versão da API do Github - url: https://docs.github.com/en/rest/about-the-rest-api/api-versions?apiVersion=2022-11-28 diff --git a/api/core/tools/provider/builtin/github/tools/github_repositories.py b/api/core/tools/provider/builtin/github/tools/github_repositories.py deleted file mode 100644 index 32f9922e651785..00000000000000 --- a/api/core/tools/provider/builtin/github/tools/github_repositories.py +++ /dev/null @@ -1,70 +0,0 @@ -import json -from datetime import datetime -from typing import Any, Union -from urllib.parse import quote - -import requests - -from core.tools.entities.tool_entities import ToolInvokeMessage -from core.tools.tool.builtin_tool import BuiltinTool - - -class GithubRepositoriesTool(BuiltinTool): - def _invoke( - self, user_id: str, tool_parameters: dict[str, Any] - ) -> Union[ToolInvokeMessage, list[ToolInvokeMessage]]: - """ - invoke tools - """ - top_n = tool_parameters.get("top_n", 5) - query = tool_parameters.get("query", "") - if not query: - return self.create_text_message("Please input symbol") - - if "access_tokens" not in self.runtime.credentials or not self.runtime.credentials.get("access_tokens"): - return self.create_text_message("Github API Access Tokens is required.") - if "api_version" not in self.runtime.credentials or not self.runtime.credentials.get("api_version"): - api_version = "2022-11-28" - else: - api_version = self.runtime.credentials.get("api_version") - - try: - headers = { - "Content-Type": "application/vnd.github+json", - "Authorization": f"Bearer {self.runtime.credentials.get('access_tokens')}", - "X-GitHub-Api-Version": api_version, - } - s = requests.session() - api_domain = "https://api.github.com" - response = s.request( - method="GET", - headers=headers, - url=f"{api_domain}/search/repositories?q={quote(query)}&sort=stars&per_page={top_n}&order=desc", - ) - response_data = response.json() - if response.status_code == 200 and isinstance(response_data.get("items"), list): - contents = [] - if len(response_data.get("items")) > 0: - for item in response_data.get("items"): - content = {} - updated_at_object = datetime.strptime(item["updated_at"], "%Y-%m-%dT%H:%M:%SZ") - content["owner"] = item["owner"]["login"] - content["name"] = item["name"] - content["description"] = ( - item["description"][:100] + "..." if len(item["description"]) > 100 else item["description"] - ) - content["url"] = item["html_url"] - content["star"] = item["watchers"] - content["forks"] = item["forks"] - content["updated"] = updated_at_object.strftime("%Y-%m-%d") - contents.append(content) - s.close() - return self.create_text_message( - self.summary(user_id=user_id, content=json.dumps(contents, ensure_ascii=False)) - ) - else: - return self.create_text_message(f"No items related to {query} were found.") - else: - return self.create_text_message((response.json()).get("message")) - except Exception as e: - return self.create_text_message("Github API Key and Api Version is invalid. {}".format(e)) diff --git a/api/core/tools/provider/builtin/github/tools/github_repositories.yaml b/api/core/tools/provider/builtin/github/tools/github_repositories.yaml deleted file mode 100644 index c170aee797fe4d..00000000000000 --- a/api/core/tools/provider/builtin/github/tools/github_repositories.yaml +++ /dev/null @@ -1,42 +0,0 @@ -identity: - name: github_repositories - author: CharlieWei - label: - en_US: Search Repositories - zh_Hans: 仓库搜索 - pt_BR: Pesquisar Repositórios - icon: icon.svg -description: - human: - en_US: Search the Github repository to retrieve the open source projects you need - zh_Hans: 搜索Github仓库,检索你需要的开源项目。 - pt_BR: Pesquise o repositório do Github para recuperar os projetos de código aberto necessários. - llm: A tool when you wants to search for popular warehouses or open source projects for any keyword. format query condition like "keywords+language:js", language can be other dev languages. -parameters: - - name: query - type: string - required: true - label: - en_US: query - zh_Hans: 关键字 - pt_BR: consulta - human_description: - en_US: You want to find the project development language, keywords, For example. Find 10 Python developed PDF document parsing projects. - zh_Hans: 你想要找的项目开发语言、关键字,如:找10个Python开发的PDF文档解析项目。 - pt_BR: Você deseja encontrar a linguagem de desenvolvimento do projeto, palavras-chave, Por exemplo. Encontre 10 projetos de análise de documentos PDF desenvolvidos em Python. - llm_description: The query of you want to search, format query condition like "keywords+language:js", language can be other dev languages. - form: llm - - name: top_n - type: number - default: 5 - required: true - label: - en_US: Top N - zh_Hans: Top N - pt_BR: Topo N - human_description: - en_US: Number of records returned by sorting based on stars. 5 is returned by default. - zh_Hans: 基于stars排序返回的记录数, 默认返回5条。 - pt_BR: Número de registros retornados por classificação com base em estrelas. 5 é retornado por padrão. - llm_description: Extract the first N records from the returned result. - form: llm diff --git a/api/core/tools/provider/builtin/gitlab/_assets/gitlab.svg b/api/core/tools/provider/builtin/gitlab/_assets/gitlab.svg deleted file mode 100644 index 07734077d5d300..00000000000000 --- a/api/core/tools/provider/builtin/gitlab/_assets/gitlab.svg +++ /dev/null @@ -1,2 +0,0 @@ - - \ No newline at end of file diff --git a/api/core/tools/provider/builtin/gitlab/gitlab.py b/api/core/tools/provider/builtin/gitlab/gitlab.py deleted file mode 100644 index 9bd4a0bd52ea64..00000000000000 --- a/api/core/tools/provider/builtin/gitlab/gitlab.py +++ /dev/null @@ -1,32 +0,0 @@ -from typing import Any - -import requests - -from core.tools.errors import ToolProviderCredentialValidationError -from core.tools.provider.builtin_tool_provider import BuiltinToolProviderController - - -class GitlabProvider(BuiltinToolProviderController): - def _validate_credentials(self, credentials: dict[str, Any]) -> None: - try: - if "access_tokens" not in credentials or not credentials.get("access_tokens"): - raise ToolProviderCredentialValidationError("Gitlab Access Tokens is required.") - - if "site_url" not in credentials or not credentials.get("site_url"): - site_url = "https://gitlab.com" - else: - site_url = credentials.get("site_url") - - try: - headers = { - "Content-Type": "application/vnd.text+json", - "Authorization": f"Bearer {credentials.get('access_tokens')}", - } - - response = requests.get(url=f"{site_url}/api/v4/user", headers=headers) - if response.status_code != 200: - raise ToolProviderCredentialValidationError((response.json()).get("message")) - except Exception as e: - raise ToolProviderCredentialValidationError("Gitlab Access Tokens is invalid. {}".format(e)) - except Exception as e: - raise ToolProviderCredentialValidationError(str(e)) diff --git a/api/core/tools/provider/builtin/gitlab/gitlab.yaml b/api/core/tools/provider/builtin/gitlab/gitlab.yaml deleted file mode 100644 index 22d7ebf73ac2aa..00000000000000 --- a/api/core/tools/provider/builtin/gitlab/gitlab.yaml +++ /dev/null @@ -1,38 +0,0 @@ -identity: - author: Leo.Wang - name: gitlab - label: - en_US: GitLab - zh_Hans: GitLab - description: - en_US: GitLab plugin, API v4 only. - zh_Hans: 用于获取GitLab内容的插件,目前仅支持 API v4。 - icon: gitlab.svg -credentials_for_provider: - access_tokens: - type: secret-input - required: true - label: - en_US: GitLab access token - zh_Hans: GitLab access token - placeholder: - en_US: Please input your GitLab access token - zh_Hans: 请输入你的 GitLab access token - help: - en_US: Get your GitLab access token from GitLab - zh_Hans: 从 GitLab 获取您的 access token - url: https://docs.gitlab.com/16.9/ee/api/oauth2.html - site_url: - type: text-input - required: false - default: 'https://gitlab.com' - label: - en_US: GitLab site url - zh_Hans: GitLab site url - placeholder: - en_US: Please input your GitLab site url - zh_Hans: 请输入你的 GitLab site url - help: - en_US: Find your GitLab url - zh_Hans: 找到你的 GitLab url - url: https://gitlab.com/help diff --git a/api/core/tools/provider/builtin/gitlab/tools/gitlab_commits.py b/api/core/tools/provider/builtin/gitlab/tools/gitlab_commits.py deleted file mode 100644 index 45ab15f437e19a..00000000000000 --- a/api/core/tools/provider/builtin/gitlab/tools/gitlab_commits.py +++ /dev/null @@ -1,142 +0,0 @@ -import json -import urllib.parse -from datetime import datetime, timedelta -from typing import Any, Union - -import requests - -from core.tools.entities.tool_entities import ToolInvokeMessage -from core.tools.tool.builtin_tool import BuiltinTool - - -class GitlabCommitsTool(BuiltinTool): - def _invoke( - self, user_id: str, tool_parameters: dict[str, Any] - ) -> Union[ToolInvokeMessage, list[ToolInvokeMessage]]: - project = tool_parameters.get("project", "") - repository = tool_parameters.get("repository", "") - employee = tool_parameters.get("employee", "") - start_time = tool_parameters.get("start_time", "") - end_time = tool_parameters.get("end_time", "") - change_type = tool_parameters.get("change_type", "all") - - if not project and not repository: - return self.create_text_message("Either project or repository is required") - - if not start_time: - start_time = (datetime.utcnow() - timedelta(days=1)).isoformat() - if not end_time: - end_time = datetime.utcnow().isoformat() - - access_token = self.runtime.credentials.get("access_tokens") - site_url = self.runtime.credentials.get("site_url") - - if "access_tokens" not in self.runtime.credentials or not self.runtime.credentials.get("access_tokens"): - return self.create_text_message("Gitlab API Access Tokens is required.") - if "site_url" not in self.runtime.credentials or not self.runtime.credentials.get("site_url"): - site_url = "https://gitlab.com" - - # Get commit content - if repository: - result = self.fetch_commits( - site_url, access_token, repository, employee, start_time, end_time, change_type, is_repository=True - ) - else: - result = self.fetch_commits( - site_url, access_token, project, employee, start_time, end_time, change_type, is_repository=False - ) - - return [self.create_json_message(item) for item in result] - - def fetch_commits( - self, - site_url: str, - access_token: str, - identifier: str, - employee: str, - start_time: str, - end_time: str, - change_type: str, - is_repository: bool, - ) -> list[dict[str, Any]]: - domain = site_url - headers = {"PRIVATE-TOKEN": access_token} - results = [] - - try: - if is_repository: - # URL encode the repository path - encoded_identifier = urllib.parse.quote(identifier, safe="") - commits_url = f"{domain}/api/v4/projects/{encoded_identifier}/repository/commits" - else: - # Get all projects - url = f"{domain}/api/v4/projects" - response = requests.get(url, headers=headers) - response.raise_for_status() - projects = response.json() - - filtered_projects = [p for p in projects if identifier == "*" or p["name"] == identifier] - - for project in filtered_projects: - project_id = project["id"] - project_name = project["name"] - print(f"Project: {project_name}") - - commits_url = f"{domain}/api/v4/projects/{project_id}/repository/commits" - - params = {"since": start_time, "until": end_time} - if employee: - params["author"] = employee - - commits_response = requests.get(commits_url, headers=headers, params=params) - commits_response.raise_for_status() - commits = commits_response.json() - - for commit in commits: - commit_sha = commit["id"] - author_name = commit["author_name"] - - if is_repository: - diff_url = f"{domain}/api/v4/projects/{encoded_identifier}/repository/commits/{commit_sha}/diff" - else: - diff_url = f"{domain}/api/v4/projects/{project_id}/repository/commits/{commit_sha}/diff" - - diff_response = requests.get(diff_url, headers=headers) - diff_response.raise_for_status() - diffs = diff_response.json() - - for diff in diffs: - # Calculate code lines of changes - added_lines = diff["diff"].count("\n+") - removed_lines = diff["diff"].count("\n-") - total_changes = added_lines + removed_lines - - if change_type == "new": - if added_lines > 1: - final_code = "".join( - [ - line[1:] - for line in diff["diff"].split("\n") - if line.startswith("+") and not line.startswith("+++") - ] - ) - results.append({"commit_sha": commit_sha, "author_name": author_name, "diff": final_code}) - else: - if total_changes > 1: - final_code = "".join( - [ - line[1:] - for line in diff["diff"].split("\n") - if (line.startswith("+") or line.startswith("-")) - and not line.startswith("+++") - and not line.startswith("---") - ] - ) - final_code_escaped = json.dumps(final_code)[1:-1] # Escape the final code - results.append( - {"commit_sha": commit_sha, "author_name": author_name, "diff": final_code_escaped} - ) - except requests.RequestException as e: - print(f"Error fetching data from GitLab: {e}") - - return results diff --git a/api/core/tools/provider/builtin/gitlab/tools/gitlab_commits.yaml b/api/core/tools/provider/builtin/gitlab/tools/gitlab_commits.yaml deleted file mode 100644 index 669378ac97c89a..00000000000000 --- a/api/core/tools/provider/builtin/gitlab/tools/gitlab_commits.yaml +++ /dev/null @@ -1,88 +0,0 @@ -identity: - name: gitlab_commits - author: Leo.Wang - label: - en_US: GitLab Commits - zh_Hans: GitLab 提交内容查询 -description: - human: - en_US: A tool for query GitLab commits, Input should be a exists username or project. - zh_Hans: 一个用于查询 GitLab 代码提交内容的工具,输入的内容应该是一个已存在的用户名或者项目名。 - llm: A tool for query GitLab commits, Input should be a exists username or project. -parameters: - - name: username - type: string - required: false - label: - en_US: username - zh_Hans: 员工用户名 - human_description: - en_US: username - zh_Hans: 员工用户名 - llm_description: User name for GitLab - form: llm - - name: repository - type: string - required: false - label: - en_US: repository - zh_Hans: 仓库路径 - human_description: - en_US: repository - zh_Hans: 仓库路径,以namespace/project_name的形式。 - llm_description: Repository path for GitLab, like namespace/project_name. - form: llm - - name: project - type: string - required: false - label: - en_US: project - zh_Hans: 项目名 - human_description: - en_US: project - zh_Hans: 项目名 - llm_description: project for GitLab - form: llm - - name: start_time - type: string - required: false - label: - en_US: start_time - zh_Hans: 开始时间 - human_description: - en_US: start_time - zh_Hans: 开始时间 - llm_description: Start time for GitLab - form: llm - - name: end_time - type: string - required: false - label: - en_US: end_time - zh_Hans: 结束时间 - human_description: - en_US: end_time - zh_Hans: 结束时间 - llm_description: End time for GitLab - form: llm - - name: change_type - type: select - required: false - options: - - value: all - label: - en_US: all - zh_Hans: 所有 - - value: new - label: - en_US: new - zh_Hans: 新增 - default: all - label: - en_US: change_type - zh_Hans: 变更类型 - human_description: - en_US: change_type - zh_Hans: 变更类型 - llm_description: Content change type for GitLab - form: llm diff --git a/api/core/tools/provider/builtin/gitlab/tools/gitlab_files.py b/api/core/tools/provider/builtin/gitlab/tools/gitlab_files.py deleted file mode 100644 index 1e77f3c6dfc678..00000000000000 --- a/api/core/tools/provider/builtin/gitlab/tools/gitlab_files.py +++ /dev/null @@ -1,103 +0,0 @@ -import urllib.parse -from typing import Any, Union - -import requests - -from core.tools.entities.tool_entities import ToolInvokeMessage -from core.tools.tool.builtin_tool import BuiltinTool - - -class GitlabFilesTool(BuiltinTool): - def _invoke( - self, user_id: str, tool_parameters: dict[str, Any] - ) -> Union[ToolInvokeMessage, list[ToolInvokeMessage]]: - project = tool_parameters.get("project", "") - repository = tool_parameters.get("repository", "") - branch = tool_parameters.get("branch", "") - path = tool_parameters.get("path", "") - - if not project and not repository: - return self.create_text_message("Either project or repository is required") - if not branch: - return self.create_text_message("Branch is required") - if not path: - return self.create_text_message("Path is required") - - access_token = self.runtime.credentials.get("access_tokens") - site_url = self.runtime.credentials.get("site_url") - - if "access_tokens" not in self.runtime.credentials or not self.runtime.credentials.get("access_tokens"): - return self.create_text_message("Gitlab API Access Tokens is required.") - if "site_url" not in self.runtime.credentials or not self.runtime.credentials.get("site_url"): - site_url = "https://gitlab.com" - - # Get file content - if repository: - result = self.fetch_files(site_url, access_token, repository, branch, path, is_repository=True) - else: - result = self.fetch_files(site_url, access_token, project, branch, path, is_repository=False) - - return [self.create_json_message(item) for item in result] - - def fetch_files( - self, site_url: str, access_token: str, identifier: str, branch: str, path: str, is_repository: bool - ) -> list[dict[str, Any]]: - domain = site_url - headers = {"PRIVATE-TOKEN": access_token} - results = [] - - try: - if is_repository: - # URL encode the repository path - encoded_identifier = urllib.parse.quote(identifier, safe="") - tree_url = f"{domain}/api/v4/projects/{encoded_identifier}/repository/tree?path={path}&ref={branch}" - else: - # Get project ID from project name - project_id = self.get_project_id(site_url, access_token, identifier) - if not project_id: - return self.create_text_message(f"Project '{identifier}' not found.") - tree_url = f"{domain}/api/v4/projects/{project_id}/repository/tree?path={path}&ref={branch}" - - response = requests.get(tree_url, headers=headers) - response.raise_for_status() - items = response.json() - - for item in items: - item_path = item["path"] - if item["type"] == "tree": # It's a directory - results.extend( - self.fetch_files(site_url, access_token, identifier, branch, item_path, is_repository) - ) - else: # It's a file - if is_repository: - file_url = ( - f"{domain}/api/v4/projects/{encoded_identifier}/repository/files" - f"/{item_path}/raw?ref={branch}" - ) - else: - file_url = ( - f"{domain}/api/v4/projects/{project_id}/repository/files/{item_path}/raw?ref={branch}" - ) - - file_response = requests.get(file_url, headers=headers) - file_response.raise_for_status() - file_content = file_response.text - results.append({"path": item_path, "branch": branch, "content": file_content}) - except requests.RequestException as e: - print(f"Error fetching data from GitLab: {e}") - - return results - - def get_project_id(self, site_url: str, access_token: str, project_name: str) -> Union[str, None]: - headers = {"PRIVATE-TOKEN": access_token} - try: - url = f"{site_url}/api/v4/projects?search={project_name}" - response = requests.get(url, headers=headers) - response.raise_for_status() - projects = response.json() - for project in projects: - if project["name"] == project_name: - return project["id"] - except requests.RequestException as e: - print(f"Error fetching project ID from GitLab: {e}") - return None diff --git a/api/core/tools/provider/builtin/gitlab/tools/gitlab_files.yaml b/api/core/tools/provider/builtin/gitlab/tools/gitlab_files.yaml deleted file mode 100644 index 4c733673f15254..00000000000000 --- a/api/core/tools/provider/builtin/gitlab/tools/gitlab_files.yaml +++ /dev/null @@ -1,56 +0,0 @@ -identity: - name: gitlab_files - author: Leo.Wang - label: - en_US: GitLab Files - zh_Hans: GitLab 文件获取 -description: - human: - en_US: A tool for query GitLab files, Input should be branch and a exists file or directory path. - zh_Hans: 一个用于查询 GitLab 文件的工具,输入的内容应该是分支和一个已存在文件或者文件夹路径。 - llm: A tool for query GitLab files, Input should be a exists file or directory path. -parameters: - - name: repository - type: string - required: false - label: - en_US: repository - zh_Hans: 仓库路径 - human_description: - en_US: repository - zh_Hans: 仓库路径,以namespace/project_name的形式。 - llm_description: Repository path for GitLab, like namespace/project_name. - form: llm - - name: project - type: string - required: false - label: - en_US: project - zh_Hans: 项目 - human_description: - en_US: project - zh_Hans: 项目 - llm_description: Project for GitLab - form: llm - - name: branch - type: string - required: true - label: - en_US: branch - zh_Hans: 分支 - human_description: - en_US: branch - zh_Hans: 分支 - llm_description: Branch for GitLab - form: llm - - name: path - type: string - required: true - label: - en_US: path - zh_Hans: 文件路径 - human_description: - en_US: path - zh_Hans: 文件路径 - llm_description: File path for GitLab - form: llm diff --git a/api/core/tools/provider/builtin/google/_assets/icon.svg b/api/core/tools/provider/builtin/google/_assets/icon.svg deleted file mode 100644 index bebbf52d3a23a4..00000000000000 --- a/api/core/tools/provider/builtin/google/_assets/icon.svg +++ /dev/null @@ -1,6 +0,0 @@ - - - - - - \ No newline at end of file diff --git a/api/core/tools/provider/builtin/google/google.py b/api/core/tools/provider/builtin/google/google.py deleted file mode 100644 index 6b5395f9d3e5b8..00000000000000 --- a/api/core/tools/provider/builtin/google/google.py +++ /dev/null @@ -1,20 +0,0 @@ -from typing import Any - -from core.tools.errors import ToolProviderCredentialValidationError -from core.tools.provider.builtin.google.tools.google_search import GoogleSearchTool -from core.tools.provider.builtin_tool_provider import BuiltinToolProviderController - - -class GoogleProvider(BuiltinToolProviderController): - def _validate_credentials(self, credentials: dict[str, Any]) -> None: - try: - GoogleSearchTool().fork_tool_runtime( - runtime={ - "credentials": credentials, - } - ).invoke( - user_id="", - tool_parameters={"query": "test", "result_type": "link"}, - ) - except Exception as e: - raise ToolProviderCredentialValidationError(str(e)) diff --git a/api/core/tools/provider/builtin/google/google.yaml b/api/core/tools/provider/builtin/google/google.yaml deleted file mode 100644 index afb4d5b2145ba6..00000000000000 --- a/api/core/tools/provider/builtin/google/google.yaml +++ /dev/null @@ -1,31 +0,0 @@ -identity: - author: Dify - name: google - label: - en_US: Google - zh_Hans: Google - pt_BR: Google - description: - en_US: Google - zh_Hans: GoogleSearch - pt_BR: Google - icon: icon.svg - tags: - - search -credentials_for_provider: - serpapi_api_key: - type: secret-input - required: true - label: - en_US: SerpApi API key - zh_Hans: SerpApi API key - pt_BR: SerpApi API key - placeholder: - en_US: Please input your SerpApi API key - zh_Hans: 请输入你的 SerpApi API key - pt_BR: Please input your SerpApi API key - help: - en_US: Get your SerpApi API key from SerpApi - zh_Hans: 从 SerpApi 获取您的 SerpApi API key - pt_BR: Get your SerpApi API key from SerpApi - url: https://serpapi.com/manage-api-key diff --git a/api/core/tools/provider/builtin/google/tools/google_search.py b/api/core/tools/provider/builtin/google/tools/google_search.py deleted file mode 100644 index a9f65925d86f94..00000000000000 --- a/api/core/tools/provider/builtin/google/tools/google_search.py +++ /dev/null @@ -1,40 +0,0 @@ -from typing import Any, Union - -import requests - -from core.tools.entities.tool_entities import ToolInvokeMessage -from core.tools.tool.builtin_tool import BuiltinTool - -SERP_API_URL = "https://serpapi.com/search" - - -class GoogleSearchTool(BuiltinTool): - def _parse_response(self, response: dict) -> dict: - result = {} - if "knowledge_graph" in response: - result["title"] = response["knowledge_graph"].get("title", "") - result["description"] = response["knowledge_graph"].get("description", "") - if "organic_results" in response: - result["organic_results"] = [ - {"title": item.get("title", ""), "link": item.get("link", ""), "snippet": item.get("snippet", "")} - for item in response["organic_results"] - ] - return result - - def _invoke( - self, - user_id: str, - tool_parameters: dict[str, Any], - ) -> Union[ToolInvokeMessage, list[ToolInvokeMessage]]: - params = { - "api_key": self.runtime.credentials["serpapi_api_key"], - "q": tool_parameters["query"], - "engine": "google", - "google_domain": "google.com", - "gl": "us", - "hl": "en", - } - response = requests.get(url=SERP_API_URL, params=params) - response.raise_for_status() - valuable_res = self._parse_response(response.json()) - return self.create_json_message(valuable_res) diff --git a/api/core/tools/provider/builtin/google/tools/google_search.yaml b/api/core/tools/provider/builtin/google/tools/google_search.yaml deleted file mode 100644 index 72db3839eb022a..00000000000000 --- a/api/core/tools/provider/builtin/google/tools/google_search.yaml +++ /dev/null @@ -1,27 +0,0 @@ -identity: - name: google_search - author: Dify - label: - en_US: GoogleSearch - zh_Hans: 谷歌搜索 - pt_BR: GoogleSearch -description: - human: - en_US: A tool for performing a Google SERP search and extracting snippets and webpages.Input should be a search query. - zh_Hans: 一个用于执行 Google SERP 搜索并提取片段和网页的工具。输入应该是一个搜索查询。 - pt_BR: A tool for performing a Google SERP search and extracting snippets and webpages.Input should be a search query. - llm: A tool for performing a Google SERP search and extracting snippets and webpages.Input should be a search query. -parameters: - - name: query - type: string - required: true - label: - en_US: Query string - zh_Hans: 查询语句 - pt_BR: Query string - human_description: - en_US: used for searching - zh_Hans: 用于搜索网页内容 - pt_BR: used for searching - llm_description: key words for searching - form: llm diff --git a/api/core/tools/provider/builtin/google_translate/_assets/icon.svg b/api/core/tools/provider/builtin/google_translate/_assets/icon.svg deleted file mode 100644 index de69a9c5e58316..00000000000000 --- a/api/core/tools/provider/builtin/google_translate/_assets/icon.svg +++ /dev/null @@ -1,18 +0,0 @@ - - - - - - - - - - - - - - - - - - diff --git a/api/core/tools/provider/builtin/google_translate/google_translate.py b/api/core/tools/provider/builtin/google_translate/google_translate.py deleted file mode 100644 index ea53aa4eeb906f..00000000000000 --- a/api/core/tools/provider/builtin/google_translate/google_translate.py +++ /dev/null @@ -1,13 +0,0 @@ -from typing import Any - -from core.tools.errors import ToolProviderCredentialValidationError -from core.tools.provider.builtin.google_translate.tools.translate import GoogleTranslate -from core.tools.provider.builtin_tool_provider import BuiltinToolProviderController - - -class JsonExtractProvider(BuiltinToolProviderController): - def _validate_credentials(self, credentials: dict[str, Any]) -> None: - try: - GoogleTranslate().invoke(user_id="", tool_parameters={"content": "这是一段测试文本", "dest": "en"}) - except Exception as e: - raise ToolProviderCredentialValidationError(str(e)) diff --git a/api/core/tools/provider/builtin/google_translate/google_translate.yaml b/api/core/tools/provider/builtin/google_translate/google_translate.yaml deleted file mode 100644 index 8bc821a3d5e9fa..00000000000000 --- a/api/core/tools/provider/builtin/google_translate/google_translate.yaml +++ /dev/null @@ -1,12 +0,0 @@ -identity: - author: Ron Liu - name: google_translate - label: - en_US: Google Translate - zh_Hans: 谷歌翻译 - description: - en_US: Translate text using Google - zh_Hans: 使用 Google 进行翻译 - icon: icon.svg - tags: - - utilities diff --git a/api/core/tools/provider/builtin/google_translate/tools/translate.py b/api/core/tools/provider/builtin/google_translate/tools/translate.py deleted file mode 100644 index ea3f2077d5d485..00000000000000 --- a/api/core/tools/provider/builtin/google_translate/tools/translate.py +++ /dev/null @@ -1,47 +0,0 @@ -from typing import Any, Union - -import requests - -from core.tools.entities.tool_entities import ToolInvokeMessage -from core.tools.tool.builtin_tool import BuiltinTool - - -class GoogleTranslate(BuiltinTool): - def _invoke( - self, - user_id: str, - tool_parameters: dict[str, Any], - ) -> Union[ToolInvokeMessage, list[ToolInvokeMessage]]: - """ - invoke tools - """ - content = tool_parameters.get("content", "") - if not content: - return self.create_text_message("Invalid parameter content") - - dest = tool_parameters.get("dest", "") - if not dest: - return self.create_text_message("Invalid parameter destination language") - - try: - result = self._translate(content, dest) - return self.create_text_message(str(result)) - except Exception: - return self.create_text_message("Translation service error, please check the network") - - def _translate(self, content: str, dest: str) -> str: - try: - url = "https://translate.googleapis.com/translate_a/single" - params = {"client": "gtx", "sl": "auto", "tl": dest, "dt": "t", "q": content} - - headers = { - "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko)" - " Chrome/91.0.4472.124 Safari/537.36" - } - - response_json = requests.get(url, params=params, headers=headers).json() - result = response_json[0] - translated_text = "".join([item[0] for item in result if item[0]]) - return str(translated_text) - except Exception as e: - return str(e) diff --git a/api/core/tools/provider/builtin/google_translate/tools/translate.yaml b/api/core/tools/provider/builtin/google_translate/tools/translate.yaml deleted file mode 100644 index a4189cd7439ad7..00000000000000 --- a/api/core/tools/provider/builtin/google_translate/tools/translate.yaml +++ /dev/null @@ -1,215 +0,0 @@ -identity: - name: translate - author: Ron Liu - label: - en_US: Translate - zh_Hans: 翻译 -description: - human: - en_US: A tool for Google Translate - zh_Hans: Google 翻译 - llm: A tool for Google Translate -parameters: - - name: content - type: string - required: true - label: - en_US: Text content - zh_Hans: 文本内容 - human_description: - en_US: Text content - zh_Hans: 需要翻译的文本内容 - llm_description: Text content - form: llm - - name: dest - type: select - required: true - label: - en_US: destination language - zh_Hans: 目标语言 - human_description: - en_US: The destination language you want to translate. - zh_Hans: 你想翻译的目标语言 - default: en - form: form - options: - - value: ar - label: - en_US: Arabic - zh_Hans: 阿拉伯语 - - value: bg - label: - en_US: Bulgarian - zh_Hans: 保加利亚语 - - value: ca - label: - en_US: Catalan - zh_Hans: 加泰罗尼亚语 - - value: zh-cn - label: - en_US: Chinese (Simplified) - zh_Hans: 中文(简体) - - value: zh-tw - label: - en_US: Chinese (Traditional) - zh_Hans: 中文(繁体) - - value: cs - label: - en_US: Czech - zh_Hans: 捷克语 - - value: da - label: - en_US: Danish - zh_Hans: 丹麦语 - - value: nl - label: - en_US: Dutch - zh_Hans: 荷兰语 - - value: en - label: - en_US: English - zh_Hans: 英语 - - value: et - label: - en_US: Estonian - zh_Hans: 爱沙尼亚语 - - value: fi - label: - en_US: Finnish - zh_Hans: 芬兰语 - - value: fr - label: - en_US: French - zh_Hans: 法语 - - value: de - label: - en_US: German - zh_Hans: 德语 - - value: el - label: - en_US: Greek - zh_Hans: 希腊语 - - value: iw - label: - en_US: Hebrew - zh_Hans: 希伯来语 - - value: hi - label: - en_US: Hindi - zh_Hans: 印地语 - - value: hu - label: - en_US: Hungarian - zh_Hans: 匈牙利语 - - value: id - label: - en_US: Indonesian - zh_Hans: 印尼语 - - value: it - label: - en_US: Italian - zh_Hans: 意大利语 - - value: ja - label: - en_US: Japanese - zh_Hans: 日语 - - value: kn - label: - en_US: Kannada - zh_Hans: 卡纳达语 - - value: ko - label: - en_US: Korean - zh_Hans: 韩语 - - value: lv - label: - en_US: Latvian - zh_Hans: 拉脱维亚语 - - value: lt - label: - en_US: Lithuanian - zh_Hans: 立陶宛语 - - value: my - label: - en_US: Malay - zh_Hans: 马来语 - - value: ml - label: - en_US: Malayalam - zh_Hans: 马拉雅拉姆语 - - value: mr - label: - en_US: Marathi - zh_Hans: 马拉地语 - - value: "no" - label: - en_US: Norwegian - zh_Hans: 挪威语 - - value: pl - label: - en_US: Polish - zh_Hans: 波兰语 - - value: pt-br - label: - en_US: Portuguese (Brazil) - zh_Hans: 葡萄牙语(巴西) - - value: pt-pt - label: - en_US: Portuguese (Portugal) - zh_Hans: 葡萄牙语(葡萄牙) - - value: pa - label: - en_US: Punjabi - zh_Hans: 旁遮普语 - - value: ro - label: - en_US: Romanian - zh_Hans: 罗马尼亚语 - - value: ru - label: - en_US: Russian - zh_Hans: 俄语 - - value: sr - label: - en_US: Serbian - zh_Hans: 塞尔维亚语 - - value: sk - label: - en_US: Slovak - zh_Hans: 斯洛伐克语 - - value: sl - label: - en_US: Slovenian - zh_Hans: 斯洛文尼亚语 - - value: es - label: - en_US: Spanish - zh_Hans: 西班牙语 - - value: sv - label: - en_US: Swedish - zh_Hans: 瑞典语 - - value: ta - label: - en_US: Tamil - zh_Hans: 泰米尔语 - - value: te - label: - en_US: Telugu - zh_Hans: 泰卢固语 - - value: th - label: - en_US: Thai - zh_Hans: 泰语 - - value: tr - label: - en_US: Turkish - zh_Hans: 土耳其语 - - value: uk - label: - en_US: Ukrainian - zh_Hans: 乌克兰语 - - value: vi - label: - en_US: Vietnamese - zh_Hans: 越南语 diff --git a/api/core/tools/provider/builtin/hap/_assets/icon.svg b/api/core/tools/provider/builtin/hap/_assets/icon.svg deleted file mode 100644 index 0fa6f0886fdfdb..00000000000000 --- a/api/core/tools/provider/builtin/hap/_assets/icon.svg +++ /dev/null @@ -1,16 +0,0 @@ - - - - - - - - - - - - - - - - diff --git a/api/core/tools/provider/builtin/hap/hap.py b/api/core/tools/provider/builtin/hap/hap.py deleted file mode 100644 index cbdf9504659568..00000000000000 --- a/api/core/tools/provider/builtin/hap/hap.py +++ /dev/null @@ -1,8 +0,0 @@ -from typing import Any - -from core.tools.provider.builtin_tool_provider import BuiltinToolProviderController - - -class HapProvider(BuiltinToolProviderController): - def _validate_credentials(self, credentials: dict[str, Any]) -> None: - pass diff --git a/api/core/tools/provider/builtin/hap/hap.yaml b/api/core/tools/provider/builtin/hap/hap.yaml deleted file mode 100644 index 25b473cf9dd211..00000000000000 --- a/api/core/tools/provider/builtin/hap/hap.yaml +++ /dev/null @@ -1,15 +0,0 @@ -identity: - author: Mingdao - name: hap - label: - en_US: HAP - zh_Hans: HAP - pt_BR: HAP - description: - en_US: "Hyper application platform that is particularly friendly to AI" - zh_Hans: "对 AI 特别友好的超级应用平台" - pt_BR: "Plataforma de aplicação hiper que é particularmente amigável à IA" - icon: icon.svg - tags: - - productivity -credentials_for_provider: diff --git a/api/core/tools/provider/builtin/hap/tools/add_worksheet_record.py b/api/core/tools/provider/builtin/hap/tools/add_worksheet_record.py deleted file mode 100644 index 597adc91db9768..00000000000000 --- a/api/core/tools/provider/builtin/hap/tools/add_worksheet_record.py +++ /dev/null @@ -1,52 +0,0 @@ -import json -from typing import Any, Union - -import httpx - -from core.tools.entities.tool_entities import ToolInvokeMessage -from core.tools.tool.builtin_tool import BuiltinTool - - -class AddWorksheetRecordTool(BuiltinTool): - def _invoke( - self, user_id: str, tool_parameters: dict[str, Any] - ) -> Union[ToolInvokeMessage, list[ToolInvokeMessage]]: - appkey = tool_parameters.get("appkey", "") - if not appkey: - return self.create_text_message("Invalid parameter App Key") - sign = tool_parameters.get("sign", "") - if not sign: - return self.create_text_message("Invalid parameter Sign") - worksheet_id = tool_parameters.get("worksheet_id", "") - if not worksheet_id: - return self.create_text_message("Invalid parameter Worksheet ID") - record_data = tool_parameters.get("record_data", "") - if not record_data: - return self.create_text_message("Invalid parameter Record Row Data") - - host = tool_parameters.get("host", "") - if not host: - host = "https://api.mingdao.com" - elif not host.startswith(("http://", "https://")): - return self.create_text_message("Invalid parameter Host Address") - else: - host = f"{host.removesuffix('/')}/api" - - url = f"{host}/v2/open/worksheet/addRow" - headers = {"Content-Type": "application/json"} - payload = {"appKey": appkey, "sign": sign, "worksheetId": worksheet_id} - - try: - payload["controls"] = json.loads(record_data) - res = httpx.post(url, headers=headers, json=payload, timeout=60) - res.raise_for_status() - res_json = res.json() - if res_json.get("error_code") != 1: - return self.create_text_message(f"Failed to add the new record. {res_json['error_msg']}") - return self.create_text_message(f"New record added successfully. The record ID is {res_json['data']}.") - except httpx.RequestError as e: - return self.create_text_message(f"Failed to add the new record, request error: {e}") - except json.JSONDecodeError as e: - return self.create_text_message(f"Failed to parse JSON response: {e}") - except Exception as e: - return self.create_text_message(f"Failed to add the new record, unexpected error: {e}") diff --git a/api/core/tools/provider/builtin/hap/tools/add_worksheet_record.yaml b/api/core/tools/provider/builtin/hap/tools/add_worksheet_record.yaml deleted file mode 100644 index add7742cd74db1..00000000000000 --- a/api/core/tools/provider/builtin/hap/tools/add_worksheet_record.yaml +++ /dev/null @@ -1,78 +0,0 @@ -identity: - name: add_worksheet_record - author: Ryan Tian - label: - en_US: Add Worksheet Record - zh_Hans: 新增一条工作表记录 -description: - human: - en_US: Adds a new record to the specified worksheet - zh_Hans: 向指定的工作表新增一条记录数据 - llm: A tool to append a new data entry into a specified worksheet. -parameters: - - name: appkey - type: secret-input - required: true - label: - en_US: App Key - zh_Hans: App Key - human_description: - en_US: The AppKey parameter for the HAP application, typically found in the application's API documentation. - zh_Hans: HAP 应用的 AppKey 参数,可以从应用 API 文档中查找到 - llm_description: the AppKey parameter for the HAP application - form: form - - - name: sign - type: secret-input - required: true - label: - en_US: Sign - zh_Hans: Sign - human_description: - en_US: The Sign parameter for the HAP application - zh_Hans: HAP 应用的 Sign 参数 - llm_description: the Sign parameter for the HAP application - form: form - - - name: worksheet_id - type: string - required: true - label: - en_US: Worksheet ID - zh_Hans: 工作表 ID - human_description: - en_US: The ID of the specified worksheet - zh_Hans: 要获取字段信息的工作表 ID - llm_description: The ID of the specified worksheet which to get the fields information. - form: llm - - - name: record_data - type: string - required: true - label: - en_US: Record Row Data - zh_Hans: 记录数据 - human_description: - en_US: The fields with data of the specified record - zh_Hans: 要新增的记录数据,JSON 对象数组格式。数组元素属性:controlId-字段ID,value-字段值 - llm_description: | - The fields with data of the specified record which to be created. It is in the format of an array of JSON objects, and the structure is defined as follows: - ``` - type RowData = { - controlId: string; // Field ID to be updated - value: string; // Field value to be updated - }[]; - ``` - form: llm - - - name: host - type: string - required: false - label: - en_US: Host Address - zh_Hans: 服务器地址 - human_description: - en_US: The address for the privately deployed HAP server. - zh_Hans: 私有部署 HAP 服务器地址,公有云无需填写 - llm_description: the address for the privately deployed HAP server. - form: form diff --git a/api/core/tools/provider/builtin/hap/tools/delete_worksheet_record.py b/api/core/tools/provider/builtin/hap/tools/delete_worksheet_record.py deleted file mode 100644 index 5d42af4c490598..00000000000000 --- a/api/core/tools/provider/builtin/hap/tools/delete_worksheet_record.py +++ /dev/null @@ -1,48 +0,0 @@ -from typing import Any, Union - -import httpx - -from core.tools.entities.tool_entities import ToolInvokeMessage -from core.tools.tool.builtin_tool import BuiltinTool - - -class DeleteWorksheetRecordTool(BuiltinTool): - def _invoke( - self, user_id: str, tool_parameters: dict[str, Any] - ) -> Union[ToolInvokeMessage, list[ToolInvokeMessage]]: - appkey = tool_parameters.get("appkey", "") - if not appkey: - return self.create_text_message("Invalid parameter App Key") - sign = tool_parameters.get("sign", "") - if not sign: - return self.create_text_message("Invalid parameter Sign") - worksheet_id = tool_parameters.get("worksheet_id", "") - if not worksheet_id: - return self.create_text_message("Invalid parameter Worksheet ID") - row_id = tool_parameters.get("row_id", "") - if not row_id: - return self.create_text_message("Invalid parameter Record Row ID") - - host = tool_parameters.get("host", "") - if not host: - host = "https://api.mingdao.com" - elif not host.startswith(("http://", "https://")): - return self.create_text_message("Invalid parameter Host Address") - else: - host = f"{host.removesuffix('/')}/api" - - url = f"{host}/v2/open/worksheet/deleteRow" - headers = {"Content-Type": "application/json"} - payload = {"appKey": appkey, "sign": sign, "worksheetId": worksheet_id, "rowId": row_id} - - try: - res = httpx.post(url, headers=headers, json=payload, timeout=30) - res.raise_for_status() - res_json = res.json() - if res_json.get("error_code") != 1: - return self.create_text_message(f"Failed to delete the record. {res_json['error_msg']}") - return self.create_text_message("Successfully deleted the record.") - except httpx.RequestError as e: - return self.create_text_message(f"Failed to delete the record, request error: {e}") - except Exception as e: - return self.create_text_message(f"Failed to delete the record, unexpected error: {e}") diff --git a/api/core/tools/provider/builtin/hap/tools/delete_worksheet_record.yaml b/api/core/tools/provider/builtin/hap/tools/delete_worksheet_record.yaml deleted file mode 100644 index 7c0c2a6439003f..00000000000000 --- a/api/core/tools/provider/builtin/hap/tools/delete_worksheet_record.yaml +++ /dev/null @@ -1,71 +0,0 @@ -identity: - name: delete_worksheet_record - author: Ryan Tian - label: - en_US: Delete Worksheet Record - zh_Hans: 删除指定的一条工作表记录 -description: - human: - en_US: Deletes a single record from a worksheet based on the specified record row ID - zh_Hans: 根据指定的记录ID删除一条工作表记录数据 - llm: A tool to remove a particular record from a worksheet by specifying its unique record identifier. -parameters: - - name: appkey - type: secret-input - required: true - label: - en_US: App Key - zh_Hans: App Key - human_description: - en_US: The AppKey parameter for the HAP application, typically found in the application's API documentation. - zh_Hans: HAP 应用的 AppKey 参数,可以从应用 API 文档中查找到 - llm_description: the AppKey parameter for the HAP application - form: form - - - name: sign - type: secret-input - required: true - label: - en_US: Sign - zh_Hans: Sign - human_description: - en_US: The Sign parameter for the HAP application - zh_Hans: HAP 应用的 Sign 参数 - llm_description: the Sign parameter for the HAP application - form: form - - - name: worksheet_id - type: string - required: true - label: - en_US: Worksheet ID - zh_Hans: 工作表 ID - human_description: - en_US: The ID of the specified worksheet - zh_Hans: 要获取字段信息的工作表 ID - llm_description: The ID of the specified worksheet which to get the fields information. - form: llm - - - name: row_id - type: string - required: true - label: - en_US: Record Row ID - zh_Hans: 记录 ID - human_description: - en_US: The row ID of the specified record - zh_Hans: 要删除的记录 ID - llm_description: The row ID of the specified record which to be deleted. - form: llm - - - name: host - type: string - required: false - label: - en_US: Host Address - zh_Hans: 服务器地址 - human_description: - en_US: The address for the privately deployed HAP server. - zh_Hans: 私有部署 HAP 服务器地址,公有云无需填写 - llm_description: the address for the privately deployed HAP server. - form: form diff --git a/api/core/tools/provider/builtin/hap/tools/get_worksheet_fields.py b/api/core/tools/provider/builtin/hap/tools/get_worksheet_fields.py deleted file mode 100644 index 6887b8b4e99df6..00000000000000 --- a/api/core/tools/provider/builtin/hap/tools/get_worksheet_fields.py +++ /dev/null @@ -1,152 +0,0 @@ -import json -from typing import Any, Union - -import httpx - -from core.tools.entities.tool_entities import ToolInvokeMessage -from core.tools.tool.builtin_tool import BuiltinTool - - -class GetWorksheetFieldsTool(BuiltinTool): - def _invoke( - self, user_id: str, tool_parameters: dict[str, Any] - ) -> Union[ToolInvokeMessage, list[ToolInvokeMessage]]: - appkey = tool_parameters.get("appkey", "") - if not appkey: - return self.create_text_message("Invalid parameter App Key") - sign = tool_parameters.get("sign", "") - if not sign: - return self.create_text_message("Invalid parameter Sign") - worksheet_id = tool_parameters.get("worksheet_id", "") - if not worksheet_id: - return self.create_text_message("Invalid parameter Worksheet ID") - - host = tool_parameters.get("host", "") - if not host: - host = "https://api.mingdao.com" - elif not host.startswith(("http://", "https://")): - return self.create_text_message("Invalid parameter Host Address") - else: - host = f"{host.removesuffix('/')}/api" - - url = f"{host}/v2/open/worksheet/getWorksheetInfo" - headers = {"Content-Type": "application/json"} - payload = {"appKey": appkey, "sign": sign, "worksheetId": worksheet_id} - - try: - res = httpx.post(url, headers=headers, json=payload, timeout=60) - res.raise_for_status() - res_json = res.json() - if res_json.get("error_code") != 1: - return self.create_text_message(f"Failed to get the worksheet information. {res_json['error_msg']}") - - fields_json, fields_table = self.get_controls(res_json["data"]["controls"]) - result_type = tool_parameters.get("result_type", "table") - return self.create_text_message( - text=json.dumps(fields_json, ensure_ascii=False) if result_type == "json" else fields_table - ) - except httpx.RequestError as e: - return self.create_text_message(f"Failed to get the worksheet information, request error: {e}") - except json.JSONDecodeError as e: - return self.create_text_message(f"Failed to parse JSON response: {e}") - except Exception as e: - return self.create_text_message(f"Failed to get the worksheet information, unexpected error: {e}") - - def get_field_type_by_id(self, field_type_id: int) -> str: - field_type_map = { - 2: "Text", - 3: "Text-Phone", - 4: "Text-Phone", - 5: "Text-Email", - 6: "Number", - 7: "Text", - 8: "Number", - 9: "Option-Single Choice", - 10: "Option-Multiple Choices", - 11: "Option-Single Choice", - 15: "Date", - 16: "Date", - 24: "Option-Region", - 25: "Text", - 26: "Option-Member", - 27: "Option-Department", - 28: "Number", - 29: "Option-Linked Record", - 30: "Unknown Type", - 31: "Number", - 32: "Text", - 33: "Text", - 35: "Option-Linked Record", - 36: "Number-Yes1/No0", - 37: "Number", - 38: "Date", - 40: "Location", - 41: "Text", - 46: "Time", - 48: "Option-Organizational Role", - 50: "Text", - 51: "Query Record", - } - return field_type_map.get(field_type_id, "") - - def get_controls(self, controls: list) -> dict: - fields = [] - fields_list = ["|fieldId|fieldName|fieldType|fieldTypeId|description|options|", "|" + "---|" * 6] - for control in controls: - if control["type"] in self._get_ignore_types(): - continue - field_type_id = control["type"] - field_type = self.get_field_type_by_id(control["type"]) - if field_type_id == 30: - source_type = control["sourceControl"]["type"] - if source_type in self._get_ignore_types(): - continue - else: - field_type_id = source_type - field_type = self.get_field_type_by_id(source_type) - field = { - "id": control["controlId"], - "name": control["controlName"], - "type": field_type, - "typeId": field_type_id, - "description": control["remark"].replace("\n", " ").replace("\t", " "), - "options": self._extract_options(control), - } - fields.append(field) - fields_list.append( - f"|{field['id']}|{field['name']}|{field['type']}|{field['typeId']}|{field['description']}" - f"|{field['options'] or ''}|" - ) - - fields.append( - { - "id": "ctime", - "name": "Created Time", - "type": self.get_field_type_by_id(16), - "typeId": 16, - "description": "", - "options": [], - } - ) - fields_list.append("|ctime|Created Time|Date|16|||") - return fields, "\n".join(fields_list) - - def _extract_options(self, control: dict) -> list: - options = [] - if control["type"] in {9, 10, 11}: - options.extend([{"key": opt["key"], "value": opt["value"]} for opt in control.get("options", [])]) - elif control["type"] in {28, 36}: - itemnames = control["advancedSetting"].get("itemnames") - if itemnames and itemnames.startswith("[{"): - try: - options = json.loads(itemnames) - except json.JSONDecodeError: - pass - elif control["type"] == 30: - source_type = control["sourceControl"]["type"] - if source_type not in self._get_ignore_types(): - options.extend([{"key": opt["key"], "value": opt["value"]} for opt in control.get("options", [])]) - return options - - def _get_ignore_types(self): - return {14, 21, 22, 34, 42, 43, 45, 47, 49, 10010} diff --git a/api/core/tools/provider/builtin/hap/tools/get_worksheet_fields.yaml b/api/core/tools/provider/builtin/hap/tools/get_worksheet_fields.yaml deleted file mode 100644 index f0d4973e8549f7..00000000000000 --- a/api/core/tools/provider/builtin/hap/tools/get_worksheet_fields.yaml +++ /dev/null @@ -1,80 +0,0 @@ -identity: - name: get_worksheet_fields - author: Ryan Tian - label: - en_US: Get Worksheet Fields - zh_Hans: 获取工作表字段结构 -description: - human: - en_US: Get fields information of the worksheet - zh_Hans: 获取指定工作表的所有字段结构信息 - llm: A tool to get fields information of the specific worksheet. -parameters: - - name: appkey - type: secret-input - required: true - label: - en_US: App Key - zh_Hans: App Key - human_description: - en_US: The AppKey parameter for the HAP application, typically found in the application's API documentation. - zh_Hans: HAP 应用的 AppKey 参数,可以从应用 API 文档中查找到 - llm_description: the AppKey parameter for the HAP application - form: form - - - name: sign - type: secret-input - required: true - label: - en_US: Sign - zh_Hans: Sign - human_description: - en_US: The Sign parameter for the HAP application - zh_Hans: HAP 应用的 Sign 参数 - llm_description: the Sign parameter for the HAP application - form: form - - - name: worksheet_id - type: string - required: true - label: - en_US: Worksheet ID - zh_Hans: 工作表 ID - human_description: - en_US: The ID of the specified worksheet - zh_Hans: 要获取字段信息的工作表 ID - llm_description: The ID of the specified worksheet which to get the fields information. - form: llm - - - name: host - type: string - required: false - label: - en_US: Host Address - zh_Hans: 服务器地址 - human_description: - en_US: The address for the privately deployed HAP server. - zh_Hans: 私有部署 HAP 服务器地址,公有云无需填写 - llm_description: the address for the privately deployed HAP server. - form: form - - - name: result_type - type: select - required: true - options: - - value: table - label: - en_US: table text - zh_Hans: 表格文本 - - value: json - label: - en_US: json text - zh_Hans: JSON文本 - default: table - label: - en_US: Result type - zh_Hans: 结果类型 - human_description: - en_US: used for selecting the result type, table styled text or json text - zh_Hans: 用于选择结果类型,使用表格格式文本还是JSON格式文本 - form: form diff --git a/api/core/tools/provider/builtin/hap/tools/get_worksheet_pivot_data.py b/api/core/tools/provider/builtin/hap/tools/get_worksheet_pivot_data.py deleted file mode 100644 index 26d7116869b6d9..00000000000000 --- a/api/core/tools/provider/builtin/hap/tools/get_worksheet_pivot_data.py +++ /dev/null @@ -1,137 +0,0 @@ -import json -from typing import Any, Union - -import httpx - -from core.tools.entities.tool_entities import ToolInvokeMessage -from core.tools.tool.builtin_tool import BuiltinTool - - -class GetWorksheetPivotDataTool(BuiltinTool): - def _invoke( - self, user_id: str, tool_parameters: dict[str, Any] - ) -> Union[ToolInvokeMessage, list[ToolInvokeMessage]]: - appkey = tool_parameters.get("appkey", "") - if not appkey: - return self.create_text_message("Invalid parameter App Key") - sign = tool_parameters.get("sign", "") - if not sign: - return self.create_text_message("Invalid parameter Sign") - worksheet_id = tool_parameters.get("worksheet_id", "") - if not worksheet_id: - return self.create_text_message("Invalid parameter Worksheet ID") - x_column_fields = tool_parameters.get("x_column_fields", "") - if not x_column_fields or not x_column_fields.startswith("["): - return self.create_text_message("Invalid parameter Column Fields") - y_row_fields = tool_parameters.get("y_row_fields", "") - if y_row_fields and not y_row_fields.strip().startswith("["): - return self.create_text_message("Invalid parameter Row Fields") - elif not y_row_fields: - y_row_fields = "[]" - value_fields = tool_parameters.get("value_fields", "") - if not value_fields or not value_fields.strip().startswith("["): - return self.create_text_message("Invalid parameter Value Fields") - - host = tool_parameters.get("host", "") - if not host: - host = "https://api.mingdao.com" - elif not host.startswith(("http://", "https://")): - return self.create_text_message("Invalid parameter Host Address") - else: - host = f"{host.removesuffix('/')}/api" - - url = f"{host}/report/getPivotData" - headers = {"Content-Type": "application/json"} - payload = {"appKey": appkey, "sign": sign, "worksheetId": worksheet_id, "options": {"showTotal": True}} - - try: - x_column_fields = json.loads(x_column_fields) - payload["columns"] = x_column_fields - y_row_fields = json.loads(y_row_fields) - if y_row_fields: - payload["rows"] = y_row_fields - value_fields = json.loads(value_fields) - payload["values"] = value_fields - sort_fields = tool_parameters.get("sort_fields", "") - if not sort_fields: - sort_fields = "[]" - sort_fields = json.loads(sort_fields) - if sort_fields: - payload["options"]["sort"] = sort_fields - res = httpx.post(url, headers=headers, json=payload, timeout=60) - res.raise_for_status() - res_json = res.json() - if res_json.get("status") != 1: - return self.create_text_message(f"Failed to get the worksheet pivot data. {res_json['msg']}") - - pivot_json = self.generate_pivot_json(res_json["data"]) - pivot_table = self.generate_pivot_table(res_json["data"]) - result_type = tool_parameters.get("result_type", "") - text = pivot_table if result_type == "table" else json.dumps(pivot_json, ensure_ascii=False) - return self.create_text_message(text) - except httpx.RequestError as e: - return self.create_text_message(f"Failed to get the worksheet pivot data, request error: {e}") - except json.JSONDecodeError as e: - return self.create_text_message(f"Failed to parse JSON response: {e}") - except Exception as e: - return self.create_text_message(f"Failed to get the worksheet pivot data, unexpected error: {e}") - - def generate_pivot_table(self, data: dict[str, Any]) -> str: - columns = data["metadata"]["columns"] - rows = data["metadata"]["rows"] - values = data["metadata"]["values"] - - rows_data = data["data"] - - header = ( - ([row["displayName"] for row in rows] if rows else []) - + [column["displayName"] for column in columns] - + [value["displayName"] for value in values] - ) - line = (["---"] * len(rows) if rows else []) + ["---"] * len(columns) + ["--:"] * len(values) - - table = [header, line] - for row in rows_data: - row_data = [self.replace_pipe(row["rows"][r["controlId"]]) for r in rows] if rows else [] - row_data.extend([self.replace_pipe(row["columns"][column["controlId"]]) for column in columns]) - row_data.extend([self.replace_pipe(str(row["values"][value["controlId"]])) for value in values]) - table.append(row_data) - - return "\n".join([("|" + "|".join(row) + "|") for row in table]) - - def replace_pipe(self, text: str) -> str: - return text.replace("|", "▏").replace("\n", " ") - - def generate_pivot_json(self, data: dict[str, Any]) -> dict: - fields = { - "x-axis": [ - {"fieldId": column["controlId"], "fieldName": column["displayName"]} - for column in data["metadata"]["columns"] - ], - "y-axis": [ - {"fieldId": row["controlId"], "fieldName": row["displayName"]} for row in data["metadata"]["rows"] - ] - if data["metadata"]["rows"] - else [], - "values": [ - {"fieldId": value["controlId"], "fieldName": value["displayName"]} - for value in data["metadata"]["values"] - ], - } - # fields = ([ - # {"fieldId": row["controlId"], "fieldName": row["displayName"]} - # for row in data["metadata"]["rows"] - # ] if data["metadata"]["rows"] else []) + [ - # {"fieldId": column["controlId"], "fieldName": column["displayName"]} - # for column in data["metadata"]["columns"] - # ] + [ - # {"fieldId": value["controlId"], "fieldName": value["displayName"]} - # for value in data["metadata"]["values"] - # ] - rows = [] - for row in data["data"]: - row_data = row["rows"] or {} - row_data.update(row["columns"]) - row_data.update(row["values"]) - rows.append(row_data) - return {"fields": fields, "rows": rows, "summary": data["metadata"]["totalRow"]} diff --git a/api/core/tools/provider/builtin/hap/tools/get_worksheet_pivot_data.yaml b/api/core/tools/provider/builtin/hap/tools/get_worksheet_pivot_data.yaml deleted file mode 100644 index cf8c57b26208a9..00000000000000 --- a/api/core/tools/provider/builtin/hap/tools/get_worksheet_pivot_data.yaml +++ /dev/null @@ -1,248 +0,0 @@ -identity: - name: get_worksheet_pivot_data - author: Ryan Tian - label: - en_US: Get Worksheet Pivot Data - zh_Hans: 获取工作表统计透视数据 -description: - human: - en_US: Retrieve statistical pivot table data from a specified worksheet - zh_Hans: 从指定的工作表中检索统计透视表数据 - llm: A tool for extracting statistical pivot table data from a specific worksheet, providing summarized information for analysis and reporting purposes. -parameters: - - name: appkey - type: secret-input - required: true - label: - en_US: App Key - zh_Hans: App Key - human_description: - en_US: The AppKey parameter for the HAP application, typically found in the application's API documentation. - zh_Hans: HAP 应用的 AppKey 参数,可以从应用 API 文档中查找到 - llm_description: the AppKey parameter for the HAP application - form: form - - - name: sign - type: secret-input - required: true - label: - en_US: Sign - zh_Hans: Sign - human_description: - en_US: The Sign parameter for the HAP application - zh_Hans: HAP 应用的 Sign 参数 - llm_description: the Sign parameter for the HAP application - form: form - - - name: worksheet_id - type: string - required: true - label: - en_US: Worksheet ID - zh_Hans: 工作表 ID - human_description: - en_US: The ID of the specified worksheet - zh_Hans: 要获取字段信息的工作表 ID - llm_description: The ID of the specified worksheet which to get the fields information. - form: llm - - - name: x_column_fields - type: string - required: true - label: - en_US: Columns (X-axis) - zh_Hans: 统计列字段(X轴) - human_description: - en_US: The column fields that make up the pivot table's X-axis groups or other dimensions for the X-axis in pivot charts - zh_Hans: 组成透视表的统计列或者统计图表的X轴分组及X轴其它维度。JSON 对象数组格式,数组元素属性:controlId-列ID,displayName-显示名称,particleSize(可选)-字段类型是日期或者地区时,通过此参数设置统计维度(日期时间:1-日,2-周,3-月;地区:1-全国,2-省,3-市) - llm_description: | - This parameter allows you to specify the columns that make up the pivot table's X-axis groups or other dimensions for the X-axis in pivot charts. It is formatted as a JSON array, with its structure defined as follows: - ``` - type XColumnFields = { // X-axis or column object array - controlId: string; // fieldId - displayName: string; // displayName - particleSize?: number; // field type is date or area, set the statistical dimension (date time: 1-day, 2-week, 3-month; area: 1-nation, 2-province, 3-city) - }[]; - ``` - form: llm - - - name: y_row_fields - type: string - required: false - label: - en_US: Rows (Y-axis) - zh_Hans: 统计行字段(Y轴) - human_description: - en_US: The row fields that make up the pivot table's Y-axis groups or other dimensions for the Y-axis in pivot charts - zh_Hans: 组成透视表的统计行或者统计图表的Y轴分组及Y轴其它维度。JSON 对象数组格式,数组元素属性:controlId-列ID,displayName-显示名称,particleSize(可选)-字段类型是日期或者地区时,通过此参数设置统计维度(日期时间:1-日,2-周,3-月;地区:1-全国,2-省,3-市) - llm_description: | - This parameter allows you to specify the rows that make up the pivot table's Y-axis groups or other dimensions for the Y-axis in pivot charts. It is formatted as a JSON array, with its structure defined as follows: - ``` - type YRowFields = { // Y-axis or row object array - controlId: string; // fieldId - displayName: string; // displayName - particleSize?: number; // field type is date or area, set the statistical dimension (date time: 1-day, 2-week, 3-month; area: 1-nation, 2-province, 3-city) - }[]; - ``` - form: llm - - - name: value_fields - type: string - required: true - label: - en_US: Aggregated Values - zh_Hans: 统计值字段 - human_description: - en_US: The aggregated value fields in the pivot table - zh_Hans: 透视表中经过聚合计算后的统计值字段。JSON 对象数组格式,数组元素属性:controlId-列ID,displayName-显示名称,aggregation-聚合方式(SUM,AVG,MIN,MAX,COUNT) - llm_description: | - This parameter allows you to specify the aggregated value fields in the pivot table. It is formatted as a JSON array, with its structure defined as follows: - ``` - type ValueFields = { // aggregated value object array - controlId: string; // fieldId - displayName: string; // displayName - aggregation: string; // aggregation method, e.g.: SUM, AVG, MIN, MAX, COUNT - }[]; - ``` - form: llm - - - name: filters - type: string - required: false - label: - en_US: Filter Set - zh_Hans: 筛选器组合 - human_description: - en_US: A combination of filters applied to query records, formatted as a JSON array. See the application's API documentation for details on its structure and usage. - zh_Hans: 查询记录的筛选条件组合,格式为 JSON 数组,可以从应用 API 文档中了解参数结构详情 - llm_description: | - This parameter allows you to specify a set of conditions that records must meet to be included in the result set. It is formatted as a JSON array, with its structure defined as follows: - ``` - type Filters = { // filter object array - controlId: string; // fieldId - dataType: number; // fieldTypeId - spliceType: number; // condition concatenation method, 1: And, 2: Or - filterType: number; // expression type, refer to the for enumerable values - values?: string[]; // values in the condition, for option-type fields, multiple values can be passed - value?: string; // value in the condition, a single value can be passed according to the field type - dateRange?: number; // date range, mandatory when filterType is 17 or 18, refer to the for enumerable values - minValue?: string; // minimum value for custom range - maxValue?: string; // maximum value for custom range - isAsc?: boolean; // ascending order, false: descending, true: ascending - }[]; - ``` - For option-type fields, if this option field has `options`, then you need to get the corresponding `key` value from the `options` in the current field information via `value`, and pass it into `values` in array format. Do not use the `options` value of other fields as input conditions. - - ### FilterTypeEnum Reference - ``` - Enum Value, Enum Character, Description - 1, Like, Contains - 2, Eq, Is (Equal) - 3, Start, Starts With - 4, End, Ends With - 5, NotLike, Does Not Contain - 6, Ne, Is Not (Not Equal) - 7, IsEmpty, Empty - 8, HasValue, Not Empty - 11, Between, Within Range - 12, NotBetween, Outside Range - 13, Gt, Greater Than - 14, Gte, Greater Than or Equal To - 15, Lt, Less Than - 16, Lte, Less Than or Equal To - 17, DateEnum, Date Is - 18, NotDateEnum, Date Is Not - 21, MySelf, Owned by Me - 22, UnRead, Unread - 23, Sub, Owned by Subordinate - 24, RCEq, Associated Field Is - 25, RCNe, Associated Field Is Not - 26, ArrEq, Array Equals - 27, ArrNe, Array Does Not Equal - 31, DateBetween, Date Within Range (can only be used with minValue and maxValue) - 32, DateNotBetween, Date Not Within Range (can only be used with minValue and maxValue) - 33, DateGt, Date Later Than - 34, DateGte, Date Later Than or Equal To - 35, DateLt, Date Earlier Than - 36, DateLte, Date Earlier Than or Equal To - ``` - - ### DateRangeEnum Reference - ``` - Enum Value, Enum Character, Description - 1, Today, Today - 2, Yesterday, Yesterday - 3, Tomorrow, Tomorrow - 4, ThisWeek, This Week - 5, LastWeek, Last Week - 6, NextWeek, Next Week - 7, ThisMonth, This Month - 8, LastMonth, Last Month - 9, NextMonth, Next Month - 12, ThisQuarter, This Quarter - 13, LastQuarter, Last Quarter - 14, NextQuarter, Next Quarter - 15, ThisYear, This Year - 16, LastYear, Last Year - 17, NextYear, Next Year - 18, Customize, Custom - 21, Last7Day, Past 7 Days - 22, Last14Day, Past 14 Days - 23, Last30Day, Past 30 Days - 31, Next7Day, Next 7 Days - 32, Next14Day, Next 14 Days - 33, Next33Day, Next 33 Days - ``` - form: llm - - - name: sort_fields - type: string - required: false - label: - en_US: Sort Fields - zh_Hans: 排序字段 - human_description: - en_US: The fields to used for sorting - zh_Hans: 用于确定排序的字段,不超过3个 - llm_description: | - This optional parameter specifies the unique identifier of the fields that will be used to sort the results. It is in the format of an array of JSON objects, and its structure is defined as follows: - ``` - type SortByFields = { - controlId: string; // Field ID used for sorting - isAsc: boolean; // Sorting direction, true indicates ascending order, false indicates descending order - }[]; - ``` - form: llm - - - name: host - type: string - required: false - label: - en_US: Host Address - zh_Hans: 服务器地址 - human_description: - en_US: The address for the privately deployed HAP server. - zh_Hans: 私有部署 HAP 服务器地址,公有云无需填写 - llm_description: the address for the privately deployed HAP server. - form: form - - - name: result_type - type: select - required: true - options: - - value: table - label: - en_US: table text - zh_Hans: 表格文本 - - value: json - label: - en_US: json text - zh_Hans: JSON文本 - default: table - label: - en_US: Result type - zh_Hans: 结果类型 - human_description: - en_US: used for selecting the result type, table styled text or json text - zh_Hans: 用于选择结果类型,使用表格格式文本还是JSON格式文本 - form: form diff --git a/api/core/tools/provider/builtin/hap/tools/list_worksheet_records.py b/api/core/tools/provider/builtin/hap/tools/list_worksheet_records.py deleted file mode 100644 index d6ac3688b7794a..00000000000000 --- a/api/core/tools/provider/builtin/hap/tools/list_worksheet_records.py +++ /dev/null @@ -1,231 +0,0 @@ -import json -import re -from typing import Any, Union - -import httpx - -from core.tools.entities.tool_entities import ToolInvokeMessage -from core.tools.tool.builtin_tool import BuiltinTool - - -class ListWorksheetRecordsTool(BuiltinTool): - def _invoke( - self, user_id: str, tool_parameters: dict[str, Any] - ) -> Union[ToolInvokeMessage, list[ToolInvokeMessage]]: - appkey = tool_parameters.get("appkey", "") - if not appkey: - return self.create_text_message("Invalid parameter App Key") - - sign = tool_parameters.get("sign", "") - if not sign: - return self.create_text_message("Invalid parameter Sign") - - worksheet_id = tool_parameters.get("worksheet_id", "") - if not worksheet_id: - return self.create_text_message("Invalid parameter Worksheet ID") - - host = tool_parameters.get("host", "") - if not host: - host = "https://api.mingdao.com" - elif not (host.startswith("http://") or host.startswith("https://")): - return self.create_text_message("Invalid parameter Host Address") - else: - host = f"{host.removesuffix('/')}/api" - - url_fields = f"{host}/v2/open/worksheet/getWorksheetInfo" - headers = {"Content-Type": "application/json"} - payload = {"appKey": appkey, "sign": sign, "worksheetId": worksheet_id} - - field_ids = tool_parameters.get("field_ids", "") - - try: - res = httpx.post(url_fields, headers=headers, json=payload, timeout=30) - res_json = res.json() - if res.is_success: - if res_json["error_code"] != 1: - return self.create_text_message( - "Failed to get the worksheet information. {}".format(res_json["error_msg"]) - ) - else: - worksheet_name = res_json["data"]["name"] - fields, schema, table_header = self.get_schema(res_json["data"]["controls"], field_ids) - else: - return self.create_text_message( - f"Failed to get the worksheet information, status code: {res.status_code}, response: {res.text}" - ) - except Exception as e: - return self.create_text_message( - "Failed to get the worksheet information, something went wrong: {}".format(e) - ) - - if field_ids: - payload["controls"] = [v.strip() for v in field_ids.split(",")] if field_ids else [] - filters = tool_parameters.get("filters", "") - if filters: - payload["filters"] = json.loads(filters) - sort_id = tool_parameters.get("sort_id", "") - sort_is_asc = tool_parameters.get("sort_is_asc", False) - if sort_id: - payload["sortId"] = sort_id - payload["isAsc"] = sort_is_asc - limit = tool_parameters.get("limit", 50) - payload["pageSize"] = limit - page_index = tool_parameters.get("page_index", 1) - payload["pageIndex"] = page_index - payload["useControlId"] = True - payload["listType"] = 1 - - url = f"{host}/v2/open/worksheet/getFilterRows" - try: - res = httpx.post(url, headers=headers, json=payload, timeout=90) - res_json = res.json() - if res.is_success: - if res_json["error_code"] != 1: - return self.create_text_message("Failed to get the records. {}".format(res_json["error_msg"])) - else: - result = { - "fields": fields, - "rows": [], - "total": res_json.get("data", {}).get("total"), - "payload": { - key: payload[key] - for key in [ - "worksheetId", - "controls", - "filters", - "sortId", - "isAsc", - "pageSize", - "pageIndex", - ] - if key in payload - }, - } - rows = res_json.get("data", {}).get("rows", []) - result_type = tool_parameters.get("result_type", "") - if not result_type: - result_type = "table" - if result_type == "json": - for row in rows: - result["rows"].append(self.get_row_field_value(row, schema)) - return self.create_text_message(json.dumps(result, ensure_ascii=False)) - else: - result_text = f"Found {result['total']} rows in worksheet \"{worksheet_name}\"." - if result["total"] > 0: - result_text += ( - f" The following are {min(limit, result['total'])}" - f" pieces of data presented in a table format:\n\n{table_header}" - ) - for row in rows: - result_values = [] - for f in fields: - result_values.append( - self.handle_value_type(row[f["fieldId"]], schema[f["fieldId"]]) - ) - result_text += "\n|" + "|".join(result_values) + "|" - return self.create_text_message(result_text) - else: - return self.create_text_message( - f"Failed to get the records, status code: {res.status_code}, response: {res.text}" - ) - except Exception as e: - return self.create_text_message("Failed to get the records, something went wrong: {}".format(e)) - - def get_row_field_value(self, row: dict, schema: dict): - row_value = {"rowid": row["rowid"]} - for field in schema: - row_value[field] = self.handle_value_type(row[field], schema[field]) - return row_value - - def get_schema(self, controls: list, fieldids: str): - allow_fields = {v.strip() for v in fieldids.split(",")} if fieldids else set() - fields = [] - schema = {} - field_names = [] - for control in controls: - control_type_id = self.get_real_type_id(control) - if (control_type_id in self._get_ignore_types()) or ( - allow_fields and control["controlId"] not in allow_fields - ): - continue - else: - fields.append({"fieldId": control["controlId"], "fieldName": control["controlName"]}) - schema[control["controlId"]] = {"typeId": control_type_id, "options": self.set_option(control)} - field_names.append(control["controlName"]) - if not allow_fields or ("ctime" in allow_fields): - fields.append({"fieldId": "ctime", "fieldName": "Created Time"}) - schema["ctime"] = {"typeId": 16, "options": {}} - field_names.append("Created Time") - fields.append({"fieldId": "rowid", "fieldName": "Record Row ID"}) - schema["rowid"] = {"typeId": 2, "options": {}} - field_names.append("Record Row ID") - return fields, schema, "|" + "|".join(field_names) + "|\n|" + "---|" * len(field_names) - - def get_real_type_id(self, control: dict) -> int: - return control["sourceControlType"] if control["type"] == 30 else control["type"] - - def set_option(self, control: dict) -> dict: - options = {} - if control.get("options"): - options = {option["key"]: option["value"] for option in control["options"]} - elif control.get("advancedSetting", {}).get("itemnames"): - try: - itemnames = json.loads(control["advancedSetting"]["itemnames"]) - options = {item["key"]: item["value"] for item in itemnames} - except json.JSONDecodeError: - pass - return options - - def _get_ignore_types(self): - return {14, 21, 22, 34, 42, 43, 45, 47, 49, 10010} - - def handle_value_type(self, value, field): - type_id = field.get("typeId") - if type_id == 10: - value = value if isinstance(value, str) else "、".join(value) - elif type_id in {28, 36}: - value = field.get("options", {}).get(value, value) - elif type_id in {26, 27, 48, 14}: - value = self.process_value(value) - elif type_id in {35, 29}: - value = self.parse_cascade_or_associated(field, value) - elif type_id == 40: - value = self.parse_location(value) - return self.rich_text_to_plain_text(value) if value else "" - - def process_value(self, value): - if isinstance(value, str): - if value.startswith('[{"accountId"'): - value = json.loads(value) - value = ", ".join([item["fullname"] for item in value]) - elif value.startswith('[{"departmentId"'): - value = json.loads(value) - value = "、".join([item["departmentName"] for item in value]) - elif value.startswith('[{"organizeId"'): - value = json.loads(value) - value = "、".join([item["organizeName"] for item in value]) - elif value.startswith('[{"file_id"') or value == "[]": - value = "" - elif hasattr(value, "accountId"): - value = value["fullname"] - return value - - def parse_cascade_or_associated(self, field, value): - if (field["typeId"] == 35 and value.startswith("[")) or (field["typeId"] == 29 and value.startswith("[{")): - value = json.loads(value) - value = value[0]["name"] if len(value) > 0 else "" - else: - value = "" - return value - - def parse_location(self, value): - if len(value) > 10: - parsed_value = json.loads(value) - value = parsed_value.get("address", "") - else: - value = "" - return value - - def rich_text_to_plain_text(self, rich_text): - text = re.sub(r"<[^>]+>", "", rich_text) if "<" in rich_text else rich_text - return text.replace("|", "▏").replace("\n", " ") diff --git a/api/core/tools/provider/builtin/hap/tools/list_worksheet_records.yaml b/api/core/tools/provider/builtin/hap/tools/list_worksheet_records.yaml deleted file mode 100644 index 3c37746b921d05..00000000000000 --- a/api/core/tools/provider/builtin/hap/tools/list_worksheet_records.yaml +++ /dev/null @@ -1,226 +0,0 @@ -identity: - name: list_worksheet_records - author: Ryan Tian - label: - en_US: List Worksheet Records - zh_Hans: 查询工作表记录数据 -description: - human: - en_US: List records from the worksheet - zh_Hans: 查询工作表的记录列表数据,一次最多1000行,可分页获取 - llm: A tool to retrieve record data from the specific worksheet. -parameters: - - name: appkey - type: secret-input - required: true - label: - en_US: App Key - zh_Hans: App Key - human_description: - en_US: The AppKey parameter for the HAP application, typically found in the application's API documentation. - zh_Hans: HAP 应用的 AppKey 参数,可以从应用 API 文档中查找到 - llm_description: the AppKey parameter for the HAP application - form: form - - - name: sign - type: secret-input - required: true - label: - en_US: Sign - zh_Hans: Sign - human_description: - en_US: The Sign parameter for the HAP application - zh_Hans: HAP 应用的 Sign 参数 - llm_description: the Sign parameter for the HAP application - form: form - - - name: worksheet_id - type: string - required: true - label: - en_US: Worksheet ID - zh_Hans: 工作表 ID - human_description: - en_US: The ID of the worksheet from which to retrieve record data - zh_Hans: 要获取记录数据的工作表 ID - llm_description: This parameter specifies the ID of the worksheet where the records are stored. - form: llm - - - name: field_ids - type: string - required: false - label: - en_US: Field IDs - zh_Hans: 字段 ID 列表 - human_description: - en_US: A comma-separated list of field IDs whose data to retrieve. If not provided, all fields' data will be fetched - zh_Hans: 要获取记录数据的字段 ID,多个 ID 间用英文逗号隔开,不传此参数则将获取所有字段的数据 - llm_description: This optional parameter lets you specify a comma-separated list of field IDs. Unless the user explicitly requests to output the specified field in the question, this parameter should usually be omitted. If this parameter is omitted, the API will return data for all fields by default. When provided, only the data associated with these fields will be included in the response. - form: llm - - - name: filters - type: string - required: false - label: - en_US: Filter Set - zh_Hans: 筛选器组合 - human_description: - en_US: A combination of filters applied to query records, formatted as a JSON array. See the application's API documentation for details on its structure and usage. - zh_Hans: 查询记录的筛选条件组合,格式为 JSON 数组,可以从应用 API 文档中了解参数结构详情 - llm_description: | - This parameter allows you to specify a set of conditions that records must meet to be included in the result set. It is formatted as a JSON array, with its structure defined as follows: - ``` - type Filters = { // filter object array - controlId: string; // fieldId - dataType: number; // fieldTypeId - spliceType: number; // condition concatenation method, 1: And, 2: Or - filterType: number; // expression type, refer to the for enumerable values - values?: string[]; // values in the condition, for option-type fields, multiple values can be passed - value?: string; // value in the condition, a single value can be passed according to the field type - dateRange?: number; // date range, mandatory when filterType is 17 or 18, refer to the for enumerable values - minValue?: string; // minimum value for custom range - maxValue?: string; // maximum value for custom range - isAsc?: boolean; // ascending order, false: descending, true: ascending - }[]; - ``` - For option-type fields, if this option field has `options`, then you need to get the corresponding `key` value from the `options` in the current field information via `value`, and pass it into `values` in array format. Do not use the `options` value of other fields as input conditions. - - ### FilterTypeEnum Reference - ``` - Enum Value, Enum Character, Description - 1, Like, Contains(Include) - 2, Eq, Is (Equal) - 3, Start, Starts With - 4, End, Ends With - 5, NotLike, Does Not Contain(Not Include) - 6, Ne, Is Not (Not Equal) - 7, IsEmpty, Empty - 8, HasValue, Not Empty - 11, Between, Within Range(Belong to) - 12, NotBetween, Outside Range(Not belong to) - 13, Gt, Greater Than - 14, Gte, Greater Than or Equal To - 15, Lt, Less Than - 16, Lte, Less Than or Equal To - 17, DateEnum, Date Is - 18, NotDateEnum, Date Is Not - 24, RCEq, Associated Field Is - 25, RCNe, Associated Field Is Not - 26, ArrEq, Array Equals - 27, ArrNe, Array Does Not Equal - 31, DateBetween, Date Within Range (can only be used with minValue and maxValue) - 32, DateNotBetween, Date Not Within Range (can only be used with minValue and maxValue) - 33, DateGt, Date Later Than - 34, DateGte, Date Later Than or Equal To - 35, DateLt, Date Earlier Than - 36, DateLte, Date Earlier Than or Equal To - ``` - - ### DateRangeEnum Reference - ``` - Enum Value, Enum Character, Description - 1, Today, Today - 2, Yesterday, Yesterday - 3, Tomorrow, Tomorrow - 4, ThisWeek, This Week - 5, LastWeek, Last Week - 6, NextWeek, Next Week - 7, ThisMonth, This Month - 8, LastMonth, Last Month - 9, NextMonth, Next Month - 12, ThisQuarter, This Quarter - 13, LastQuarter, Last Quarter - 14, NextQuarter, Next Quarter - 15, ThisYear, This Year - 16, LastYear, Last Year - 17, NextYear, Next Year - 18, Customize, Custom - 21, Last7Day, Past 7 Days - 22, Last14Day, Past 14 Days - 23, Last30Day, Past 30 Days - 31, Next7Day, Next 7 Days - 32, Next14Day, Next 14 Days - 33, Next33Day, Next 33 Days - ``` - form: llm - - - name: sort_id - type: string - required: false - label: - en_US: Sort Field ID - zh_Hans: 排序字段 ID - human_description: - en_US: The ID of the field used for sorting - zh_Hans: 用以排序的字段 ID - llm_description: This optional parameter specifies the unique identifier of the field that will be used to sort the results. It should be set to the ID of an existing field within your data structure. - form: llm - - - name: sort_is_asc - type: boolean - required: false - label: - en_US: Ascending Order - zh_Hans: 是否升序排列 - human_description: - en_US: Determines whether the sorting is in ascending (true) or descending (false) order - zh_Hans: 排序字段的排序方式:true-升序,false-降序 - llm_description: This optional parameter controls the direction of the sort. If set to true, the results will be sorted in ascending order; if false, they will be sorted in descending order. - form: llm - - - name: limit - type: number - required: false - label: - en_US: Record Limit - zh_Hans: 记录数量限制 - human_description: - en_US: The maximum number of records to retrieve - zh_Hans: 要获取的记录数量限制条数 - llm_description: This optional parameter allows you to specify the maximum number of records that should be returned in the result set. When retrieving paginated record data, this parameter indicates the number of rows to fetch per page, and must be used in conjunction with the `page_index` parameter. - form: llm - - - name: page_index - type: number - required: false - label: - en_US: Page Index - zh_Hans: 页码 - human_description: - en_US: The page number when paginating through a list of records - zh_Hans: 分页读取记录列表时的页码 - llm_description: This parameter is used when you need to paginate through a large set of records. The default value is 1, which refers to the first page. When it is used, the meaning of the `limit` parameter becomes the number of records per page. - form: llm - - - name: host - type: string - required: false - label: - en_US: Host Address - zh_Hans: 服务器地址 - human_description: - en_US: The address for the privately deployed HAP server. - zh_Hans: 私有部署 HAP 服务器地址,公有云无需填写 - llm_description: the address for the privately deployed HAP server. - form: form - - - name: result_type - type: select - required: true - options: - - value: table - label: - en_US: table text - zh_Hans: 表格文本 - - value: json - label: - en_US: json text - zh_Hans: JSON文本 - default: table - label: - en_US: Result type - zh_Hans: 结果类型 - human_description: - en_US: used for selecting the result type, table styled text or json text - zh_Hans: 用于选择结果类型,使用表格格式文本还是JSON格式文本 - form: form diff --git a/api/core/tools/provider/builtin/hap/tools/list_worksheets.py b/api/core/tools/provider/builtin/hap/tools/list_worksheets.py deleted file mode 100644 index 4e852c0028497c..00000000000000 --- a/api/core/tools/provider/builtin/hap/tools/list_worksheets.py +++ /dev/null @@ -1,83 +0,0 @@ -import json -from typing import Any, Union - -import httpx - -from core.tools.entities.tool_entities import ToolInvokeMessage -from core.tools.tool.builtin_tool import BuiltinTool - - -class ListWorksheetsTool(BuiltinTool): - def _invoke( - self, user_id: str, tool_parameters: dict[str, Any] - ) -> Union[ToolInvokeMessage, list[ToolInvokeMessage]]: - appkey = tool_parameters.get("appkey", "") - if not appkey: - return self.create_text_message("Invalid parameter App Key") - sign = tool_parameters.get("sign", "") - if not sign: - return self.create_text_message("Invalid parameter Sign") - - host = tool_parameters.get("host", "") - if not host: - host = "https://api.mingdao.com" - elif not (host.startswith("http://") or host.startswith("https://")): - return self.create_text_message("Invalid parameter Host Address") - else: - host = f"{host.removesuffix('/')}/api" - url = f"{host}/v1/open/app/get" - - result_type = tool_parameters.get("result_type", "") - if not result_type: - result_type = "table" - - headers = {"Content-Type": "application/json"} - params = { - "appKey": appkey, - "sign": sign, - } - try: - res = httpx.get(url, headers=headers, params=params, timeout=30) - res_json = res.json() - if res.is_success: - if res_json["error_code"] != 1: - return self.create_text_message( - "Failed to access the application. {}".format(res_json["error_msg"]) - ) - else: - if result_type == "json": - worksheets = [] - for section in res_json["data"]["sections"]: - worksheets.extend(self._extract_worksheets(section, result_type)) - return self.create_text_message(text=json.dumps(worksheets, ensure_ascii=False)) - else: - worksheets = "|worksheetId|worksheetName|description|\n|---|---|---|" - for section in res_json["data"]["sections"]: - worksheets += self._extract_worksheets(section, result_type) - return self.create_text_message(worksheets) - - else: - return self.create_text_message( - f"Failed to list worksheets, status code: {res.status_code}, response: {res.text}" - ) - except Exception as e: - return self.create_text_message("Failed to list worksheets, something went wrong: {}".format(e)) - - def _extract_worksheets(self, section, type): - items = [] - tables = "" - for item in section.get("items", []): - if item.get("type") == 0 and ("notes" not in item or item.get("notes") != "NO"): - if type == "json": - filtered_item = {"id": item["id"], "name": item["name"], "notes": item.get("notes", "")} - items.append(filtered_item) - else: - tables += f"\n|{item['id']}|{item['name']}|{item.get('notes', '')}|" - - for child_section in section.get("childSections", []): - if type == "json": - items.extend(self._extract_worksheets(child_section, "json")) - else: - tables += self._extract_worksheets(child_section, "table") - - return items if type == "json" else tables diff --git a/api/core/tools/provider/builtin/hap/tools/list_worksheets.yaml b/api/core/tools/provider/builtin/hap/tools/list_worksheets.yaml deleted file mode 100644 index 935b72a89564cd..00000000000000 --- a/api/core/tools/provider/builtin/hap/tools/list_worksheets.yaml +++ /dev/null @@ -1,68 +0,0 @@ -identity: - name: list_worksheets - author: Ryan Tian - label: - en_US: List Worksheets - zh_Hans: 获取应用下所有工作表 -description: - human: - en_US: List worksheets within an application - zh_Hans: 获取应用下的所有工作表和说明信息 - llm: A tool to list worksheets info within an application, imported parameter is AppKey and Sign of the application. -parameters: - - name: appkey - type: secret-input - required: true - label: - en_US: App Key - zh_Hans: App Key - human_description: - en_US: The AppKey parameter for the HAP application, typically found in the application's API documentation. - zh_Hans: HAP 应用的 AppKey 参数,可以从应用 API 文档中查找到 - llm_description: the AppKey parameter for the HAP application - form: form - - - name: sign - type: secret-input - required: true - label: - en_US: Sign - zh_Hans: Sign - human_description: - en_US: The Sign parameter for the HAP application - zh_Hans: HAP 应用的 Sign 参数 - llm_description: the Sign parameter for the HAP application - form: form - - - name: host - type: string - required: false - label: - en_US: Host Address - zh_Hans: 服务器地址 - human_description: - en_US: The address for the privately deployed HAP server. - zh_Hans: 私有部署 HAP 服务器地址,公有云无需填写 - llm_description: the address for the privately deployed HAP server. - form: form - - - name: result_type - type: select - required: true - options: - - value: table - label: - en_US: table text - zh_Hans: 表格文本 - - value: json - label: - en_US: json text - zh_Hans: JSON文本 - default: table - label: - en_US: Result type - zh_Hans: 结果类型 - human_description: - en_US: used for selecting the result type, table styled text or json text - zh_Hans: 用于选择结果类型,使用表格格式文本还是JSON格式文本 - form: form diff --git a/api/core/tools/provider/builtin/hap/tools/update_worksheet_record.py b/api/core/tools/provider/builtin/hap/tools/update_worksheet_record.py deleted file mode 100644 index 971f3d37f6dfbf..00000000000000 --- a/api/core/tools/provider/builtin/hap/tools/update_worksheet_record.py +++ /dev/null @@ -1,55 +0,0 @@ -import json -from typing import Any, Union - -import httpx - -from core.tools.entities.tool_entities import ToolInvokeMessage -from core.tools.tool.builtin_tool import BuiltinTool - - -class UpdateWorksheetRecordTool(BuiltinTool): - def _invoke( - self, user_id: str, tool_parameters: dict[str, Any] - ) -> Union[ToolInvokeMessage, list[ToolInvokeMessage]]: - appkey = tool_parameters.get("appkey", "") - if not appkey: - return self.create_text_message("Invalid parameter App Key") - sign = tool_parameters.get("sign", "") - if not sign: - return self.create_text_message("Invalid parameter Sign") - worksheet_id = tool_parameters.get("worksheet_id", "") - if not worksheet_id: - return self.create_text_message("Invalid parameter Worksheet ID") - row_id = tool_parameters.get("row_id", "") - if not row_id: - return self.create_text_message("Invalid parameter Record Row ID") - record_data = tool_parameters.get("record_data", "") - if not record_data: - return self.create_text_message("Invalid parameter Record Row Data") - - host = tool_parameters.get("host", "") - if not host: - host = "https://api.mingdao.com" - elif not host.startswith(("http://", "https://")): - return self.create_text_message("Invalid parameter Host Address") - else: - host = f"{host.removesuffix('/')}/api" - - url = f"{host}/v2/open/worksheet/editRow" - headers = {"Content-Type": "application/json"} - payload = {"appKey": appkey, "sign": sign, "worksheetId": worksheet_id, "rowId": row_id} - - try: - payload["controls"] = json.loads(record_data) - res = httpx.post(url, headers=headers, json=payload, timeout=60) - res.raise_for_status() - res_json = res.json() - if res_json.get("error_code") != 1: - return self.create_text_message(f"Failed to update the record. {res_json['error_msg']}") - return self.create_text_message("Record updated successfully.") - except httpx.RequestError as e: - return self.create_text_message(f"Failed to update the record, request error: {e}") - except json.JSONDecodeError as e: - return self.create_text_message(f"Failed to parse JSON response: {e}") - except Exception as e: - return self.create_text_message(f"Failed to update the record, unexpected error: {e}") diff --git a/api/core/tools/provider/builtin/hap/tools/update_worksheet_record.yaml b/api/core/tools/provider/builtin/hap/tools/update_worksheet_record.yaml deleted file mode 100644 index fe1f8f671a4e2f..00000000000000 --- a/api/core/tools/provider/builtin/hap/tools/update_worksheet_record.yaml +++ /dev/null @@ -1,90 +0,0 @@ -identity: - name: update_worksheet_record - author: Ryan Tian - label: - en_US: Update Worksheet Record - zh_Hans: 更新指定的一条工作表记录 -description: - human: - en_US: Updates a single record in a worksheet based on the specified record row ID - zh_Hans: 根据指定的记录ID更新一条工作表记录数据 - llm: A tool to modify existing information within a particular record of a worksheet by referencing its unique identifier. -parameters: - - name: appkey - type: secret-input - required: true - label: - en_US: App Key - zh_Hans: App Key - human_description: - en_US: The AppKey parameter for the HAP application, typically found in the application's API documentation. - zh_Hans: HAP 应用的 AppKey 参数,可以从应用 API 文档中查找到 - llm_description: the AppKey parameter for the HAP application - form: form - - - name: sign - type: secret-input - required: true - label: - en_US: Sign - zh_Hans: Sign - human_description: - en_US: The Sign parameter for the HAP application - zh_Hans: HAP 应用的 Sign 参数 - llm_description: the Sign parameter for the HAP application - form: form - - - name: worksheet_id - type: string - required: true - label: - en_US: Worksheet ID - zh_Hans: 工作表 ID - human_description: - en_US: The ID of the specified worksheet - zh_Hans: 要获取字段信息的工作表 ID - llm_description: The ID of the specified worksheet which to get the fields information. - form: llm - - - name: row_id - type: string - required: true - label: - en_US: Record Row ID - zh_Hans: 记录 ID - human_description: - en_US: The row ID of the specified record - zh_Hans: 要更新的记录 ID - llm_description: The row ID of the specified record which to be updated. - form: llm - - - name: record_data - type: string - required: true - label: - en_US: Record Row Data - zh_Hans: 记录数据 - human_description: - en_US: The fields with data of the specified record - zh_Hans: 要更新的记录数据,JSON 对象数组格式。数组元素属性:controlId-字段ID,value-字段值 - llm_description: | - The fields with data of the specified record which to be updated. It is in the format of an array of JSON objects, and the structure is defined as follows: - ``` - type RowData = { - controlId: string; // Field ID to be updated - value: string; // Field value to be updated - }[]; - ``` - form: llm - - - name: host - type: string - required: false - label: - en_US: Host Address - zh_Hans: 服务器地址 - human_description: - en_US: The address for the privately deployed HAP server. - zh_Hans: 私有部署 HAP 服务器地址,公有云无需填写 - llm_description: the address for the privately deployed HAP server. - form: form diff --git a/api/core/tools/provider/builtin/jina/_assets/icon.svg b/api/core/tools/provider/builtin/jina/_assets/icon.svg deleted file mode 100644 index 2e1b00fa52e43c..00000000000000 --- a/api/core/tools/provider/builtin/jina/_assets/icon.svg +++ /dev/null @@ -1,4 +0,0 @@ - - - - diff --git a/api/core/tools/provider/builtin/jina/jina.py b/api/core/tools/provider/builtin/jina/jina.py deleted file mode 100644 index 154e15db016dd1..00000000000000 --- a/api/core/tools/provider/builtin/jina/jina.py +++ /dev/null @@ -1,38 +0,0 @@ -import json -from typing import Any - -from core.tools.entities.values import ToolLabelEnum -from core.tools.errors import ToolProviderCredentialValidationError -from core.tools.provider.builtin.jina.tools.jina_reader import JinaReaderTool -from core.tools.provider.builtin_tool_provider import BuiltinToolProviderController - - -class GoogleProvider(BuiltinToolProviderController): - def _validate_credentials(self, credentials: dict[str, Any]) -> None: - try: - if credentials["api_key"] is None: - credentials["api_key"] = "" - else: - result = ( - JinaReaderTool() - .fork_tool_runtime( - runtime={ - "credentials": credentials, - } - ) - .invoke( - user_id="", - tool_parameters={ - "url": "https://example.com", - }, - )[0] - ) - - message = json.loads(result.message) - if message["code"] != 200: - raise ToolProviderCredentialValidationError(message["message"]) - except Exception as e: - raise ToolProviderCredentialValidationError(str(e)) - - def _get_tool_labels(self) -> list[ToolLabelEnum]: - return [ToolLabelEnum.SEARCH, ToolLabelEnum.PRODUCTIVITY] diff --git a/api/core/tools/provider/builtin/jina/jina.yaml b/api/core/tools/provider/builtin/jina/jina.yaml deleted file mode 100644 index 06f23382d92a3a..00000000000000 --- a/api/core/tools/provider/builtin/jina/jina.yaml +++ /dev/null @@ -1,32 +0,0 @@ -identity: - author: Dify - name: jina - label: - en_US: Jina - zh_Hans: Jina - pt_BR: Jina - description: - en_US: Convert any URL to an LLM-friendly input or perform searches on the web for grounding information. Experience improved output for your agent and RAG systems at no cost. - zh_Hans: 将任何URL转换为LLM易读的输入或在网页上搜索引擎上搜索引擎。 - pt_BR: Converte qualquer URL em uma entrada LLm-fácil de ler ou realize pesquisas na web para obter informação de grounding. Tenha uma experiência melhor para seu agente e sistemas RAG sem custo. - icon: icon.svg - tags: - - search - - productivity -credentials_for_provider: - api_key: - type: secret-input - required: false - label: - en_US: API Key (leave empty if you don't have one) - zh_Hans: API 密钥(可留空) - pt_BR: Chave API (deixe vazio se você não tiver uma) - placeholder: - en_US: Please enter your Jina API key - zh_Hans: 请输入你的 Jina API 密钥 - pt_BR: Por favor, insira sua chave de API do Jina - help: - en_US: Get your Jina API key from Jina (optional, but you can get a higher rate) - zh_Hans: 从 Jina 获取您的 Jina API 密钥(非必须,能得到更高的速率) - pt_BR: Obtenha sua chave de API do Jina na Jina (opcional, mas você pode obter uma taxa mais alta) - url: https://jina.ai diff --git a/api/core/tools/provider/builtin/jina/tools/jina_reader.py b/api/core/tools/provider/builtin/jina/tools/jina_reader.py deleted file mode 100644 index 0dd55c65291783..00000000000000 --- a/api/core/tools/provider/builtin/jina/tools/jina_reader.py +++ /dev/null @@ -1,74 +0,0 @@ -import json -from typing import Any, Union - -from yarl import URL - -from core.helper import ssrf_proxy -from core.tools.entities.tool_entities import ToolInvokeMessage -from core.tools.tool.builtin_tool import BuiltinTool - - -class JinaReaderTool(BuiltinTool): - _jina_reader_endpoint = "https://r.jina.ai/" - - def _invoke( - self, - user_id: str, - tool_parameters: dict[str, Any], - ) -> Union[ToolInvokeMessage, list[ToolInvokeMessage]]: - """ - invoke tools - """ - url = tool_parameters["url"] - - headers = {"Accept": "application/json"} - - if "api_key" in self.runtime.credentials and self.runtime.credentials.get("api_key"): - headers["Authorization"] = "Bearer " + self.runtime.credentials.get("api_key") - - request_params = tool_parameters.get("request_params") - if request_params is not None and request_params != "": - try: - request_params = json.loads(request_params) - if not isinstance(request_params, dict): - raise ValueError("request_params must be a JSON object") - except (json.JSONDecodeError, ValueError) as e: - raise ValueError(f"Invalid request_params: {e}") - - target_selector = tool_parameters.get("target_selector") - if target_selector is not None and target_selector != "": - headers["X-Target-Selector"] = target_selector - - wait_for_selector = tool_parameters.get("wait_for_selector") - if wait_for_selector is not None and wait_for_selector != "": - headers["X-Wait-For-Selector"] = wait_for_selector - - if tool_parameters.get("image_caption", False): - headers["X-With-Generated-Alt"] = "true" - - if tool_parameters.get("gather_all_links_at_the_end", False): - headers["X-With-Links-Summary"] = "true" - - if tool_parameters.get("gather_all_images_at_the_end", False): - headers["X-With-Images-Summary"] = "true" - - proxy_server = tool_parameters.get("proxy_server") - if proxy_server is not None and proxy_server != "": - headers["X-Proxy-Url"] = proxy_server - - if tool_parameters.get("no_cache", False): - headers["X-No-Cache"] = "true" - - max_retries = tool_parameters.get("max_retries", 3) - response = ssrf_proxy.get( - str(URL(self._jina_reader_endpoint + url)), - headers=headers, - params=request_params, - timeout=(10, 60), - max_retries=max_retries, - ) - - if tool_parameters.get("summary", False): - return self.create_text_message(self.summary(user_id, response.text)) - - return self.create_text_message(response.text) diff --git a/api/core/tools/provider/builtin/jina/tools/jina_reader.yaml b/api/core/tools/provider/builtin/jina/tools/jina_reader.yaml deleted file mode 100644 index 58ad6d8694222d..00000000000000 --- a/api/core/tools/provider/builtin/jina/tools/jina_reader.yaml +++ /dev/null @@ -1,166 +0,0 @@ -identity: - name: jina_reader - author: Dify - label: - en_US: JinaReader - zh_Hans: JinaReader - pt_BR: JinaReader -description: - human: - en_US: Convert any URL to an LLM-friendly input. Experience improved output for your agent and RAG systems at no cost. - zh_Hans: 将任何 URL 转换为 LLM 友好的输入。无需付费即可体验为您的 Agent 和 RAG 系统提供的改进输出。 - pt_BR: Converta qualquer URL em uma entrada amigável ao LLM. Experimente uma saída aprimorada para seus sistemas de agente e RAG sem custo. - llm: A tool for scraping webpages. Input should be a URL. -parameters: - - name: url - type: string - required: true - label: - en_US: URL - zh_Hans: 网页链接 - pt_BR: URL - human_description: - en_US: used for linking to webpages - zh_Hans: 用于链接到网页 - pt_BR: used for linking to webpages - llm_description: url for scraping - form: llm - - name: request_params - type: string - required: false - label: - en_US: Request params - zh_Hans: 请求参数 - pt_BR: Request params - human_description: - en_US: | - request parameters, format: {"key1": "value1", "key2": "value2"} - zh_Hans: | - 请求参数,格式:{"key1": "value1", "key2": "value2"} - pt_BR: | - request parameters, format: {"key1": "value1", "key2": "value2"} - llm_description: request parameters - form: llm - - name: target_selector - type: string - required: false - label: - en_US: Target selector - zh_Hans: 目标选择器 - pt_BR: Seletor de destino - human_description: - en_US: css selector for scraping specific elements - zh_Hans: css 选择器用于抓取特定元素 - pt_BR: css selector for scraping specific elements - llm_description: css selector of the target element to scrape - form: form - - name: wait_for_selector - type: string - required: false - label: - en_US: Wait for selector - zh_Hans: 等待选择器 - pt_BR: Aguardar por seletor - human_description: - en_US: css selector for waiting for specific elements - zh_Hans: css 选择器用于等待特定元素 - pt_BR: css selector for waiting for specific elements - llm_description: css selector of the target element to wait for - form: form - - name: image_caption - type: boolean - required: false - default: false - label: - en_US: Image caption - zh_Hans: 图片说明 - pt_BR: Legenda da imagem - human_description: - en_US: "Captions all images at the specified URL, adding 'Image [idx]: [caption]' as an alt tag for those without one. This allows downstream LLMs to interact with the images in activities such as reasoning and summarizing." - zh_Hans: "为指定 URL 上的所有图像添加标题,为没有标题的图像添加“Image [idx]: [caption]”作为 alt 标签。这允许下游 LLM 在推理和总结等活动中与图像进行交互。" - pt_BR: "Captions all images at the specified URL, adding 'Image [idx]: [caption]' as an alt tag for those without one. This allows downstream LLMs to interact with the images in activities such as reasoning and summarizing." - llm_description: Captions all images at the specified URL - form: form - - name: gather_all_links_at_the_end - type: boolean - required: false - default: false - label: - en_US: Gather all links at the end - zh_Hans: 将所有链接集中到最后 - pt_BR: Coletar todos os links ao final - human_description: - en_US: A "Buttons & Links" section will be created at the end. This helps the downstream LLMs or web agents navigating the page or take further actions. - zh_Hans: 最后会创建一个“按钮和链接”部分。这可以帮助下游 LLM 或 Web 代理浏览页面或采取进一步的行动。 - pt_BR: A "Buttons & Links" section will be created at the end. This helps the downstream LLMs or web agents navigating the page or take further actions. - llm_description: Gather all links at the end - form: form - - name: gather_all_images_at_the_end - type: boolean - required: false - default: false - label: - en_US: Gather all images at the end - zh_Hans: 将所有图片集中到最后 - pt_BR: Coletar todas as imagens ao final - human_description: - en_US: An "Images" section will be created at the end. This gives the downstream LLMs an overview of all visuals on the page, which may improve reasoning. - zh_Hans: 最后会创建一个“图像”部分。这可以让下游的 LLM 概览页面上的所有视觉效果,从而提高推理能力。 - pt_BR: An "Images" section will be created at the end. This gives the downstream LLMs an overview of all visuals on the page, which may improve reasoning. - llm_description: Gather all images at the end - form: form - - name: proxy_server - type: string - required: false - label: - en_US: Proxy server - zh_Hans: 代理服务器 - pt_BR: Servidor de proxy - human_description: - en_US: Use proxy to access URLs - zh_Hans: 利用代理访问 URL - pt_BR: Use proxy to access URLs - llm_description: Use proxy to access URLs - form: form - - name: no_cache - type: boolean - required: false - default: false - label: - en_US: Bypass the Cache - zh_Hans: 绕过缓存 - pt_BR: Ignorar o cache - human_description: - en_US: Bypass the Cache - zh_Hans: 是否绕过缓存 - pt_BR: Ignorar o cache - llm_description: bypass the cache - form: form - - name: summary - type: boolean - required: false - default: false - label: - en_US: Enable summary - zh_Hans: 是否启用摘要 - pt_BR: Habilitar resumo - human_description: - en_US: Enable summary for the output - zh_Hans: 为输出启用摘要 - pt_BR: Habilitar resumo para a saída - llm_description: enable summary - form: form - - name: max_retries - type: number - required: false - default: 3 - label: - en_US: Retry - zh_Hans: 重试 - pt_BR: Repetir - human_description: - en_US: Number of times to retry the request if it fails - zh_Hans: 请求失败时重试的次数 - pt_BR: Número de vezes para repetir a solicitação se falhar - llm_description: Number of times to retry the request if it fails - form: form diff --git a/api/core/tools/provider/builtin/jina/tools/jina_search.py b/api/core/tools/provider/builtin/jina/tools/jina_search.py deleted file mode 100644 index 30af6de7831e59..00000000000000 --- a/api/core/tools/provider/builtin/jina/tools/jina_search.py +++ /dev/null @@ -1,46 +0,0 @@ -from typing import Any, Union - -from yarl import URL - -from core.helper import ssrf_proxy -from core.tools.entities.tool_entities import ToolInvokeMessage -from core.tools.tool.builtin_tool import BuiltinTool - - -class JinaSearchTool(BuiltinTool): - _jina_search_endpoint = "https://s.jina.ai/" - - def _invoke( - self, - user_id: str, - tool_parameters: dict[str, Any], - ) -> Union[ToolInvokeMessage, list[ToolInvokeMessage]]: - query = tool_parameters["query"] - - headers = {"Accept": "application/json"} - - if "api_key" in self.runtime.credentials and self.runtime.credentials.get("api_key"): - headers["Authorization"] = "Bearer " + self.runtime.credentials.get("api_key") - - if tool_parameters.get("image_caption", False): - headers["X-With-Generated-Alt"] = "true" - - if tool_parameters.get("gather_all_links_at_the_end", False): - headers["X-With-Links-Summary"] = "true" - - if tool_parameters.get("gather_all_images_at_the_end", False): - headers["X-With-Images-Summary"] = "true" - - proxy_server = tool_parameters.get("proxy_server") - if proxy_server is not None and proxy_server != "": - headers["X-Proxy-Url"] = proxy_server - - if tool_parameters.get("no_cache", False): - headers["X-No-Cache"] = "true" - - max_retries = tool_parameters.get("max_retries", 3) - response = ssrf_proxy.get( - str(URL(self._jina_search_endpoint + query)), headers=headers, timeout=(10, 60), max_retries=max_retries - ) - - return self.create_text_message(response.text) diff --git a/api/core/tools/provider/builtin/jina/tools/jina_search.yaml b/api/core/tools/provider/builtin/jina/tools/jina_search.yaml deleted file mode 100644 index 2bc70e1be1934d..00000000000000 --- a/api/core/tools/provider/builtin/jina/tools/jina_search.yaml +++ /dev/null @@ -1,107 +0,0 @@ -identity: - name: jina_search - author: Dify - label: - en_US: JinaSearch - zh_Hans: JinaSearch - pt_BR: JinaSearch -description: - human: - en_US: Search on the web and get the top 5 results. Useful for grounding using information from the web. - zh_Hans: 在网络上搜索返回前 5 个结果。 - llm: A tool for searching results on the web for grounding. Input should be a simple question. -parameters: - - name: query - type: string - required: true - label: - en_US: Question (Query) - zh_Hans: 信息查询 - human_description: - en_US: used to find information on the web - zh_Hans: 在网络上搜索信息 - llm_description: simple question to ask on the web - form: llm - - name: image_caption - type: boolean - required: false - default: false - label: - en_US: Image caption - zh_Hans: 图片说明 - pt_BR: Legenda da imagem - human_description: - en_US: "Captions all images at the specified URL, adding 'Image [idx]: [caption]' as an alt tag for those without one. This allows downstream LLMs to interact with the images in activities such as reasoning and summarizing." - zh_Hans: "为指定 URL 上的所有图像添加标题,为没有标题的图像添加“Image [idx]: [caption]”作为 alt 标签。这允许下游 LLM 在推理和总结等活动中与图像进行交互。" - pt_BR: "Captions all images at the specified URL, adding 'Image [idx]: [caption]' as an alt tag for those without one. This allows downstream LLMs to interact with the images in activities such as reasoning and summarizing." - llm_description: Captions all images at the specified URL - form: form - - name: gather_all_links_at_the_end - type: boolean - required: false - default: false - label: - en_US: Gather all links at the end - zh_Hans: 将所有链接集中到最后 - pt_BR: Coletar todos os links ao final - human_description: - en_US: A "Buttons & Links" section will be created at the end. This helps the downstream LLMs or web agents navigating the page or take further actions. - zh_Hans: 最后会创建一个“按钮和链接”部分。这可以帮助下游 LLM 或 Web 代理浏览页面或采取进一步的行动。 - pt_BR: A "Buttons & Links" section will be created at the end. This helps the downstream LLMs or web agents navigating the page or take further actions. - llm_description: Gather all links at the end - form: form - - name: gather_all_images_at_the_end - type: boolean - required: false - default: false - label: - en_US: Gather all images at the end - zh_Hans: 将所有图片集中到最后 - pt_BR: Coletar todas as imagens ao final - human_description: - en_US: An "Images" section will be created at the end. This gives the downstream LLMs an overview of all visuals on the page, which may improve reasoning. - zh_Hans: 最后会创建一个“图像”部分。这可以让下游的 LLM 概览页面上的所有视觉效果,从而提高推理能力。 - pt_BR: An "Images" section will be created at the end. This gives the downstream LLMs an overview of all visuals on the page, which may improve reasoning. - llm_description: Gather all images at the end - form: form - - name: proxy_server - type: string - required: false - label: - en_US: Proxy server - zh_Hans: 代理服务器 - pt_BR: Servidor de proxy - human_description: - en_US: Use proxy to access URLs - zh_Hans: 利用代理访问 URL - pt_BR: Use proxy to access URLs - llm_description: Use proxy to access URLs - form: form - - name: no_cache - type: boolean - required: false - default: false - label: - en_US: Bypass the Cache - zh_Hans: 绕过缓存 - pt_BR: Ignorar o cache - human_description: - en_US: Bypass the Cache - zh_Hans: 是否绕过缓存 - pt_BR: Ignorar o cache - llm_description: bypass the cache - form: form - - name: max_retries - type: number - required: false - default: 3 - label: - en_US: Retry - zh_Hans: 重试 - pt_BR: Repetir - human_description: - en_US: Number of times to retry the request if it fails - zh_Hans: 请求失败时重试的次数 - pt_BR: Número de vezes para repetir a solicitação se falhar - llm_description: Number of times to retry the request if it fails - form: form diff --git a/api/core/tools/provider/builtin/jina/tools/jina_tokenizer.py b/api/core/tools/provider/builtin/jina/tools/jina_tokenizer.py deleted file mode 100644 index 06dabcc9c2a74e..00000000000000 --- a/api/core/tools/provider/builtin/jina/tools/jina_tokenizer.py +++ /dev/null @@ -1,39 +0,0 @@ -from typing import Any - -from core.helper import ssrf_proxy -from core.tools.entities.tool_entities import ToolInvokeMessage -from core.tools.tool.builtin_tool import BuiltinTool - - -class JinaTokenizerTool(BuiltinTool): - _jina_tokenizer_endpoint = "https://tokenize.jina.ai/" - - def _invoke( - self, - user_id: str, - tool_parameters: dict[str, Any], - ) -> ToolInvokeMessage: - content = tool_parameters["content"] - body = {"content": content} - - headers = {"Content-Type": "application/json"} - - if "api_key" in self.runtime.credentials and self.runtime.credentials.get("api_key"): - headers["Authorization"] = "Bearer " + self.runtime.credentials.get("api_key") - - if tool_parameters.get("return_chunks", False): - body["return_chunks"] = True - - if tool_parameters.get("return_tokens", False): - body["return_tokens"] = True - - if tokenizer := tool_parameters.get("tokenizer"): - body["tokenizer"] = tokenizer - - response = ssrf_proxy.post( - self._jina_tokenizer_endpoint, - headers=headers, - json=body, - ) - - return self.create_json_message(response.json()) diff --git a/api/core/tools/provider/builtin/jina/tools/jina_tokenizer.yaml b/api/core/tools/provider/builtin/jina/tools/jina_tokenizer.yaml deleted file mode 100644 index 62a5c7e7bacd75..00000000000000 --- a/api/core/tools/provider/builtin/jina/tools/jina_tokenizer.yaml +++ /dev/null @@ -1,70 +0,0 @@ -identity: - name: jina_tokenizer - author: hjlarry - label: - en_US: JinaTokenizer -description: - human: - en_US: Free API to tokenize text and segment long text into chunks. - zh_Hans: 免费的API可以将文本tokenize,也可以将长文本分割成多个部分。 - llm: Free API to tokenize text and segment long text into chunks. -parameters: - - name: content - type: string - required: true - label: - en_US: Content - zh_Hans: 内容 - llm_description: the content which need to tokenize or segment - form: llm - - name: return_tokens - type: boolean - required: false - label: - en_US: Return the tokens - zh_Hans: 是否返回tokens - human_description: - en_US: Return the tokens and their corresponding ids in the response. - zh_Hans: 返回tokens及其对应的ids。 - form: form - - name: return_chunks - type: boolean - label: - en_US: Return the chunks - zh_Hans: 是否分块 - human_description: - en_US: Chunking the input into semantically meaningful segments while handling a wide variety of text types and edge cases based on common structural cues. - zh_Hans: 将输入分块为具有语义意义的片段,同时根据常见的结构线索处理各种文本类型和边缘情况。 - form: form - - name: tokenizer - type: select - options: - - value: cl100k_base - label: - en_US: cl100k_base - - value: o200k_base - label: - en_US: o200k_base - - value: p50k_base - label: - en_US: p50k_base - - value: r50k_base - label: - en_US: r50k_base - - value: p50k_edit - label: - en_US: p50k_edit - - value: gpt2 - label: - en_US: gpt2 - label: - en_US: Tokenizer - human_description: - en_US: | - · cl100k_base --- gpt-4, gpt-3.5-turbo, gpt-3.5 - · o200k_base --- gpt-4o, gpt-4o-mini - · p50k_base --- text-davinci-003, text-davinci-002 - · r50k_base --- text-davinci-001, text-curie-001 - · p50k_edit --- text-davinci-edit-001, code-davinci-edit-001 - · gpt2 --- gpt-2 - form: form diff --git a/api/core/tools/provider/builtin/json_process/_assets/icon.svg b/api/core/tools/provider/builtin/json_process/_assets/icon.svg deleted file mode 100644 index b123983836962a..00000000000000 --- a/api/core/tools/provider/builtin/json_process/_assets/icon.svg +++ /dev/null @@ -1,358 +0,0 @@ - - - - - - - - - - - - - - - - \ No newline at end of file diff --git a/api/core/tools/provider/builtin/json_process/json_process.py b/api/core/tools/provider/builtin/json_process/json_process.py deleted file mode 100644 index 10746210b5c652..00000000000000 --- a/api/core/tools/provider/builtin/json_process/json_process.py +++ /dev/null @@ -1,16 +0,0 @@ -from typing import Any - -from core.tools.errors import ToolProviderCredentialValidationError -from core.tools.provider.builtin.json_process.tools.parse import JSONParseTool -from core.tools.provider.builtin_tool_provider import BuiltinToolProviderController - - -class JsonExtractProvider(BuiltinToolProviderController): - def _validate_credentials(self, credentials: dict[str, Any]) -> None: - try: - JSONParseTool().invoke( - user_id="", - tool_parameters={"content": '{"name": "John", "age": 30, "city": "New York"}', "json_filter": "$.name"}, - ) - except Exception as e: - raise ToolProviderCredentialValidationError(str(e)) diff --git a/api/core/tools/provider/builtin/json_process/json_process.yaml b/api/core/tools/provider/builtin/json_process/json_process.yaml deleted file mode 100644 index c7896bbea7a69f..00000000000000 --- a/api/core/tools/provider/builtin/json_process/json_process.yaml +++ /dev/null @@ -1,14 +0,0 @@ -identity: - author: Mingwei Zhang - name: json_process - label: - en_US: JSON Process - zh_Hans: JSON 处理 - pt_BR: JSON Process - description: - en_US: Tools for processing JSON content using jsonpath_ng - zh_Hans: 利用 jsonpath_ng 处理 JSON 内容的工具 - pt_BR: Tools for processing JSON content using jsonpath_ng - icon: icon.svg - tags: - - utilities diff --git a/api/core/tools/provider/builtin/json_process/tools/delete.py b/api/core/tools/provider/builtin/json_process/tools/delete.py deleted file mode 100644 index fcab3d71a93cf9..00000000000000 --- a/api/core/tools/provider/builtin/json_process/tools/delete.py +++ /dev/null @@ -1,61 +0,0 @@ -import json -from typing import Any, Union - -from jsonpath_ng import parse - -from core.tools.entities.tool_entities import ToolInvokeMessage -from core.tools.tool.builtin_tool import BuiltinTool - - -class JSONDeleteTool(BuiltinTool): - def _invoke( - self, - user_id: str, - tool_parameters: dict[str, Any], - ) -> Union[ToolInvokeMessage, list[ToolInvokeMessage]]: - """ - Invoke the JSON delete tool - """ - # Get content - content = tool_parameters.get("content", "") - if not content: - return self.create_text_message("Invalid parameter content") - - # Get query - query = tool_parameters.get("query", "") - if not query: - return self.create_text_message("Invalid parameter query") - - ensure_ascii = tool_parameters.get("ensure_ascii", True) - try: - result = self._delete(content, query, ensure_ascii) - return self.create_text_message(str(result)) - except Exception as e: - return self.create_text_message(f"Failed to delete JSON content: {str(e)}") - - def _delete(self, origin_json: str, query: str, ensure_ascii: bool) -> str: - try: - input_data = json.loads(origin_json) - expr = parse("$." + query.lstrip("$.")) # Ensure query path starts with $ - - matches = expr.find(input_data) - - if not matches: - return json.dumps(input_data, ensure_ascii=ensure_ascii) # No changes if no matches found - - for match in matches: - if isinstance(match.context.value, dict): - # Delete key from dictionary - del match.context.value[match.path.fields[-1]] - elif isinstance(match.context.value, list): - # Remove item from list - match.context.value.remove(match.value) - else: - # For other cases, we might want to set to None or remove the parent key - parent = match.context.parent - if parent: - del parent.value[match.path.fields[-1]] - - return json.dumps(input_data, ensure_ascii=ensure_ascii) - except Exception as e: - raise Exception(f"Delete operation failed: {str(e)}") diff --git a/api/core/tools/provider/builtin/json_process/tools/delete.yaml b/api/core/tools/provider/builtin/json_process/tools/delete.yaml deleted file mode 100644 index 4d390e40d17232..00000000000000 --- a/api/core/tools/provider/builtin/json_process/tools/delete.yaml +++ /dev/null @@ -1,52 +0,0 @@ -identity: - name: json_delete - author: Mingwei Zhang - label: - en_US: JSON Delete - zh_Hans: JSON 删除 - pt_BR: JSON Delete -description: - human: - en_US: A tool for deleting JSON content - zh_Hans: 一个删除 JSON 内容的工具 - pt_BR: A tool for deleting JSON content - llm: A tool for deleting JSON content -parameters: - - name: content - type: string - required: true - label: - en_US: JSON content - zh_Hans: JSON 内容 - pt_BR: JSON content - human_description: - en_US: JSON content to be processed - zh_Hans: 待处理的 JSON 内容 - pt_BR: JSON content to be processed - llm_description: JSON content to be processed - form: llm - - name: query - type: string - required: true - label: - en_US: Query - zh_Hans: 查询 - pt_BR: Query - human_description: - en_US: JSONPath query to locate the element to delete - zh_Hans: 用于定位要删除元素的 JSONPath 查询 - pt_BR: JSONPath query to locate the element to delete - llm_description: JSONPath query to locate the element to delete - form: llm - - name: ensure_ascii - type: boolean - default: true - label: - en_US: Ensure ASCII - zh_Hans: 确保 ASCII - pt_BR: Ensure ASCII - human_description: - en_US: Ensure the JSON output is ASCII encoded - zh_Hans: 确保输出的 JSON 是 ASCII 编码 - pt_BR: Ensure the JSON output is ASCII encoded - form: form diff --git a/api/core/tools/provider/builtin/json_process/tools/insert.py b/api/core/tools/provider/builtin/json_process/tools/insert.py deleted file mode 100644 index 793c74e5f9df51..00000000000000 --- a/api/core/tools/provider/builtin/json_process/tools/insert.py +++ /dev/null @@ -1,105 +0,0 @@ -import json -from typing import Any, Union - -from jsonpath_ng import parse - -from core.tools.entities.tool_entities import ToolInvokeMessage -from core.tools.tool.builtin_tool import BuiltinTool - - -class JSONParseTool(BuiltinTool): - def _invoke( - self, - user_id: str, - tool_parameters: dict[str, Any], - ) -> Union[ToolInvokeMessage, list[ToolInvokeMessage]]: - """ - invoke tools - """ - # get content - content = tool_parameters.get("content", "") - if not content: - return self.create_text_message("Invalid parameter content") - - # get query - query = tool_parameters.get("query", "") - if not query: - return self.create_text_message("Invalid parameter query") - - # get new value - new_value = tool_parameters.get("new_value", "") - if not new_value: - return self.create_text_message("Invalid parameter new_value") - - # get insert position - index = tool_parameters.get("index") - - # get create path - create_path = tool_parameters.get("create_path", False) - - # get value decode. - # if true, it will be decoded to an dict - value_decode = tool_parameters.get("value_decode", False) - - ensure_ascii = tool_parameters.get("ensure_ascii", True) - try: - result = self._insert(content, query, new_value, ensure_ascii, value_decode, index, create_path) - return self.create_text_message(str(result)) - except Exception: - return self.create_text_message("Failed to insert JSON content") - - def _insert( - self, origin_json, query, new_value, ensure_ascii: bool, value_decode: bool, index=None, create_path=False - ): - try: - input_data = json.loads(origin_json) - expr = parse(query) - if value_decode is True: - try: - new_value = json.loads(new_value) - except json.JSONDecodeError: - return "Cannot decode new value to json object" - - matches = expr.find(input_data) - - if not matches and create_path: - # create new path - path_parts = query.strip("$").strip(".").split(".") - current = input_data - for i, part in enumerate(path_parts): - if "[" in part and "]" in part: - # process array index - array_name, index = part.split("[") - index = int(index.rstrip("]")) - if array_name not in current: - current[array_name] = [] - while len(current[array_name]) <= index: - current[array_name].append({}) - current = current[array_name][index] - else: - if i == len(path_parts) - 1: - current[part] = new_value - elif part not in current: - current[part] = {} - current = current[part] - else: - for match in matches: - if isinstance(match.value, dict): - # insert new value into dict - if isinstance(new_value, dict): - match.value.update(new_value) - else: - raise ValueError("Cannot insert non-dict value into dict") - elif isinstance(match.value, list): - # insert new value into list - if index is None: - match.value.append(new_value) - else: - match.value.insert(int(index), new_value) - else: - # replace old value with new value - match.full_path.update(input_data, new_value) - - return json.dumps(input_data, ensure_ascii=ensure_ascii) - except Exception as e: - return str(e) diff --git a/api/core/tools/provider/builtin/json_process/tools/insert.yaml b/api/core/tools/provider/builtin/json_process/tools/insert.yaml deleted file mode 100644 index 21b51312dab6b3..00000000000000 --- a/api/core/tools/provider/builtin/json_process/tools/insert.yaml +++ /dev/null @@ -1,101 +0,0 @@ -identity: - name: json_insert - author: Mingwei Zhang - label: - en_US: JSON Insert - zh_Hans: JSON 插入 - pt_BR: JSON Insert -description: - human: - en_US: A tool for inserting JSON content - zh_Hans: 一个插入 JSON 内容的工具 - pt_BR: A tool for inserting JSON content - llm: A tool for inserting JSON content -parameters: - - name: content - type: string - required: true - label: - en_US: JSON content - zh_Hans: JSON 内容 - pt_BR: JSON content - human_description: - en_US: JSON content - zh_Hans: JSON 内容 - pt_BR: JSON content - llm_description: JSON content to be processed - form: llm - - name: query - type: string - required: true - label: - en_US: Query - zh_Hans: 查询 - pt_BR: Query - human_description: - en_US: Object to insert - zh_Hans: 待插入的对象 - pt_BR: Object to insert - llm_description: JSONPath query to locate the element to insert - form: llm - - name: new_value - type: string - required: true - label: - en_US: New Value - zh_Hans: 新值 - pt_BR: New Value - human_description: - en_US: New Value - zh_Hans: 插入的新值 - pt_BR: New Value - llm_description: New Value to insert - form: llm - - name: value_decode - type: boolean - default: false - label: - en_US: Decode Value - zh_Hans: 解码值 - pt_BR: Decode Value - human_description: - en_US: Whether to decode the value to a JSON object - zh_Hans: 是否将值解码为 JSON 对象 - pt_BR: Whether to decode the value to a JSON object - form: form - - name: create_path - type: select - required: true - default: "False" - label: - en_US: Whether to create a path - zh_Hans: 是否创建路径 - pt_BR: Whether to create a path - human_description: - en_US: Whether to create a path when the path does not exist - zh_Hans: 查询路径不存在时是否创建路径 - pt_BR: Whether to create a path when the path does not exist - options: - - value: "True" - label: - en_US: "Yes" - zh_Hans: 是 - pt_BR: "Yes" - - value: "False" - label: - en_US: "No" - zh_Hans: 否 - pt_BR: "No" - form: form - - name: ensure_ascii - type: boolean - default: true - label: - en_US: Ensure ASCII - zh_Hans: 确保 ASCII - pt_BR: Ensure ASCII - human_description: - en_US: Ensure the JSON output is ASCII encoded - zh_Hans: 确保输出的 JSON 是 ASCII 编码 - pt_BR: Ensure the JSON output is ASCII encoded - form: form diff --git a/api/core/tools/provider/builtin/json_process/tools/parse.py b/api/core/tools/provider/builtin/json_process/tools/parse.py deleted file mode 100644 index 37cae401533190..00000000000000 --- a/api/core/tools/provider/builtin/json_process/tools/parse.py +++ /dev/null @@ -1,53 +0,0 @@ -import json -from typing import Any, Union - -from jsonpath_ng import parse - -from core.tools.entities.tool_entities import ToolInvokeMessage -from core.tools.tool.builtin_tool import BuiltinTool - - -class JSONParseTool(BuiltinTool): - def _invoke( - self, - user_id: str, - tool_parameters: dict[str, Any], - ) -> Union[ToolInvokeMessage, list[ToolInvokeMessage]]: - """ - invoke tools - """ - # get content - content = tool_parameters.get("content", "") - if not content: - return self.create_text_message("Invalid parameter content") - - # get json filter - json_filter = tool_parameters.get("json_filter", "") - if not json_filter: - return self.create_text_message("Invalid parameter json_filter") - - ensure_ascii = tool_parameters.get("ensure_ascii", True) - try: - result = self._extract(content, json_filter, ensure_ascii) - return self.create_text_message(str(result)) - except Exception: - return self.create_text_message("Failed to extract JSON content") - - # Extract data from JSON content - def _extract(self, content: str, json_filter: str, ensure_ascii: bool) -> str: - try: - input_data = json.loads(content) - expr = parse(json_filter) - result = [match.value for match in expr.find(input_data)] - - if len(result) == 1: - result = result[0] - - if isinstance(result, dict | list): - return json.dumps(result, ensure_ascii=ensure_ascii) - elif isinstance(result, str | int | float | bool) or result is None: - return str(result) - else: - return repr(result) - except Exception as e: - return str(e) diff --git a/api/core/tools/provider/builtin/json_process/tools/parse.yaml b/api/core/tools/provider/builtin/json_process/tools/parse.yaml deleted file mode 100644 index c35f4eac0775ad..00000000000000 --- a/api/core/tools/provider/builtin/json_process/tools/parse.yaml +++ /dev/null @@ -1,52 +0,0 @@ -identity: - name: parse - author: Mingwei Zhang - label: - en_US: JSON Parse - zh_Hans: JSON 解析 - pt_BR: JSON Parse -description: - human: - en_US: A tool for extracting JSON objects - zh_Hans: 一个解析JSON对象的工具 - pt_BR: A tool for extracting JSON objects - llm: A tool for extracting JSON objects -parameters: - - name: content - type: string - required: true - label: - en_US: JSON data - zh_Hans: JSON数据 - pt_BR: JSON data - human_description: - en_US: JSON data - zh_Hans: JSON数据 - pt_BR: JSON数据 - llm_description: JSON data to be processed - form: llm - - name: json_filter - type: string - required: true - label: - en_US: JSON filter - zh_Hans: JSON解析对象 - pt_BR: JSON filter - human_description: - en_US: JSON fields to be parsed - zh_Hans: 需要解析的 JSON 字段 - pt_BR: JSON fields to be parsed - llm_description: JSON fields to be parsed - form: llm - - name: ensure_ascii - type: boolean - default: true - label: - en_US: Ensure ASCII - zh_Hans: 确保 ASCII - pt_BR: Ensure ASCII - human_description: - en_US: Ensure the JSON output is ASCII encoded - zh_Hans: 确保输出的 JSON 是 ASCII 编码 - pt_BR: Ensure the JSON output is ASCII encoded - form: form diff --git a/api/core/tools/provider/builtin/json_process/tools/replace.py b/api/core/tools/provider/builtin/json_process/tools/replace.py deleted file mode 100644 index 383825c2d0b259..00000000000000 --- a/api/core/tools/provider/builtin/json_process/tools/replace.py +++ /dev/null @@ -1,129 +0,0 @@ -import json -from typing import Any, Union - -from jsonpath_ng import parse - -from core.tools.entities.tool_entities import ToolInvokeMessage -from core.tools.tool.builtin_tool import BuiltinTool - - -class JSONReplaceTool(BuiltinTool): - def _invoke( - self, - user_id: str, - tool_parameters: dict[str, Any], - ) -> Union[ToolInvokeMessage, list[ToolInvokeMessage]]: - """ - invoke tools - """ - # get content - content = tool_parameters.get("content", "") - if not content: - return self.create_text_message("Invalid parameter content") - - # get query - query = tool_parameters.get("query", "") - if not query: - return self.create_text_message("Invalid parameter query") - - # get replace value - replace_value = tool_parameters.get("replace_value", "") - if not replace_value: - return self.create_text_message("Invalid parameter replace_value") - - # get replace model - replace_model = tool_parameters.get("replace_model", "") - if not replace_model: - return self.create_text_message("Invalid parameter replace_model") - - # get value decode. - # if true, it will be decoded to an dict - value_decode = tool_parameters.get("value_decode", False) - - ensure_ascii = tool_parameters.get("ensure_ascii", True) - try: - if replace_model == "pattern": - # get replace pattern - replace_pattern = tool_parameters.get("replace_pattern", "") - if not replace_pattern: - return self.create_text_message("Invalid parameter replace_pattern") - result = self._replace_pattern( - content, query, replace_pattern, replace_value, ensure_ascii, value_decode - ) - elif replace_model == "key": - result = self._replace_key(content, query, replace_value, ensure_ascii) - elif replace_model == "value": - result = self._replace_value(content, query, replace_value, ensure_ascii, value_decode) - return self.create_text_message(str(result)) - except Exception: - return self.create_text_message("Failed to replace JSON content") - - # Replace pattern - def _replace_pattern( - self, content: str, query: str, replace_pattern: str, replace_value: str, ensure_ascii: bool, value_decode: bool - ) -> str: - try: - input_data = json.loads(content) - expr = parse(query) - - matches = expr.find(input_data) - - for match in matches: - new_value = match.value.replace(replace_pattern, replace_value) - if value_decode is True: - try: - new_value = json.loads(new_value) - except json.JSONDecodeError: - return "Cannot decode replace value to json object" - - match.full_path.update(input_data, new_value) - - return json.dumps(input_data, ensure_ascii=ensure_ascii) - except Exception as e: - return str(e) - - # Replace key - def _replace_key(self, content: str, query: str, replace_value: str, ensure_ascii: bool) -> str: - try: - input_data = json.loads(content) - expr = parse(query) - - matches = expr.find(input_data) - - for match in matches: - parent = match.context.value - if isinstance(parent, dict): - old_key = match.path.fields[0] - if old_key in parent: - value = parent.pop(old_key) - parent[replace_value] = value - elif isinstance(parent, list): - for item in parent: - if isinstance(item, dict) and old_key in item: - value = item.pop(old_key) - item[replace_value] = value - return json.dumps(input_data, ensure_ascii=ensure_ascii) - except Exception as e: - return str(e) - - # Replace value - def _replace_value( - self, content: str, query: str, replace_value: str, ensure_ascii: bool, value_decode: bool - ) -> str: - try: - input_data = json.loads(content) - expr = parse(query) - if value_decode is True: - try: - replace_value = json.loads(replace_value) - except json.JSONDecodeError: - return "Cannot decode replace value to json object" - - matches = expr.find(input_data) - - for match in matches: - match.full_path.update(input_data, replace_value) - - return json.dumps(input_data, ensure_ascii=ensure_ascii) - except Exception as e: - return str(e) diff --git a/api/core/tools/provider/builtin/json_process/tools/replace.yaml b/api/core/tools/provider/builtin/json_process/tools/replace.yaml deleted file mode 100644 index ae238b1fbcd05e..00000000000000 --- a/api/core/tools/provider/builtin/json_process/tools/replace.yaml +++ /dev/null @@ -1,119 +0,0 @@ -identity: - name: json_replace - author: Mingwei Zhang - label: - en_US: JSON Replace - zh_Hans: JSON 替换 - pt_BR: JSON Replace -description: - human: - en_US: A tool for replacing JSON content - zh_Hans: 一个替换 JSON 内容的工具 - pt_BR: A tool for replacing JSON content - llm: A tool for replacing JSON content -parameters: - - name: content - type: string - required: true - label: - en_US: JSON content - zh_Hans: JSON 内容 - pt_BR: JSON content - human_description: - en_US: JSON content - zh_Hans: JSON 内容 - pt_BR: JSON content - llm_description: JSON content to be processed - form: llm - - name: query - type: string - required: true - label: - en_US: Query - zh_Hans: 查询 - pt_BR: Query - human_description: - en_US: Query - zh_Hans: 查询 - pt_BR: Query - llm_description: JSONPath query to locate the element to replace - form: llm - - name: replace_pattern - type: string - required: false - label: - en_US: String to be replaced - zh_Hans: 待替换字符串 - pt_BR: String to be replaced - human_description: - en_US: String to be replaced - zh_Hans: 待替换字符串 - pt_BR: String to be replaced - llm_description: String to be replaced - form: llm - - name: replace_value - type: string - required: true - label: - en_US: Replace Value - zh_Hans: 替换值 - pt_BR: Replace Value - human_description: - en_US: New Value - zh_Hans: 新值 - pt_BR: New Value - llm_description: New Value to replace - form: llm - - name: value_decode - type: boolean - default: false - label: - en_US: Decode Value - zh_Hans: 解码值 - pt_BR: Decode Value - human_description: - en_US: Whether to decode the value to a JSON object (Does not apply to replace key) - zh_Hans: 是否将值解码为 JSON 对象 (不适用于键替换) - pt_BR: Whether to decode the value to a JSON object (Does not apply to replace key) - form: form - - name: replace_model - type: select - required: true - default: pattern - label: - en_US: Replace Model - zh_Hans: 替换模式 - pt_BR: Replace Model - human_description: - en_US: Replace Model - zh_Hans: 替换模式 - pt_BR: Replace Model - options: - - value: key - label: - en_US: replace key - zh_Hans: 键替换 - pt_BR: replace key - - value: value - label: - en_US: replace value - zh_Hans: 值替换 - pt_BR: replace value - - value: pattern - label: - en_US: replace string - zh_Hans: 字符串替换 - pt_BR: replace string - form: form - - name: ensure_ascii - type: boolean - default: true - label: - en_US: Ensure ASCII - zh_Hans: 确保 ASCII - pt_BR: Ensure ASCII - human_description: - en_US: Ensure the JSON output is ASCII encoded - zh_Hans: 确保输出的 JSON 是 ASCII 编码 - pt_BR: Ensure the JSON output is ASCII encoded - form: form diff --git a/api/core/tools/provider/builtin/judge0ce/_assets/icon.svg b/api/core/tools/provider/builtin/judge0ce/_assets/icon.svg deleted file mode 100644 index 3e7e33da6e8b25..00000000000000 --- a/api/core/tools/provider/builtin/judge0ce/_assets/icon.svg +++ /dev/null @@ -1,21 +0,0 @@ - - - - - - - - - - diff --git a/api/core/tools/provider/builtin/judge0ce/judge0ce.py b/api/core/tools/provider/builtin/judge0ce/judge0ce.py deleted file mode 100644 index 50db74dd9ebced..00000000000000 --- a/api/core/tools/provider/builtin/judge0ce/judge0ce.py +++ /dev/null @@ -1,23 +0,0 @@ -from typing import Any - -from core.tools.errors import ToolProviderCredentialValidationError -from core.tools.provider.builtin.judge0ce.tools.executeCode import ExecuteCodeTool -from core.tools.provider.builtin_tool_provider import BuiltinToolProviderController - - -class Judge0CEProvider(BuiltinToolProviderController): - def _validate_credentials(self, credentials: dict[str, Any]) -> None: - try: - ExecuteCodeTool().fork_tool_runtime( - runtime={ - "credentials": credentials, - } - ).invoke( - user_id="", - tool_parameters={ - "source_code": "print('hello world')", - "language_id": 71, - }, - ) - except Exception as e: - raise ToolProviderCredentialValidationError(str(e)) diff --git a/api/core/tools/provider/builtin/judge0ce/judge0ce.yaml b/api/core/tools/provider/builtin/judge0ce/judge0ce.yaml deleted file mode 100644 index 9ff8aaac6debc6..00000000000000 --- a/api/core/tools/provider/builtin/judge0ce/judge0ce.yaml +++ /dev/null @@ -1,32 +0,0 @@ -identity: - author: Richards Tu - name: judge0ce - label: - en_US: Judge0 CE - zh_Hans: Judge0 CE - pt_BR: Judge0 CE - description: - en_US: Judge0 CE is an open-source code execution system. Support various languages, including C, C++, Java, Python, Ruby, etc. - zh_Hans: Judge0 CE 是一个开源的代码执行系统。支持多种语言,包括 C、C++、Java、Python、Ruby 等。 - pt_BR: Judge0 CE é um sistema de execução de código de código aberto. Suporta várias linguagens, incluindo C, C++, Java, Python, Ruby, etc. - icon: icon.svg - tags: - - utilities - - other -credentials_for_provider: - X-RapidAPI-Key: - type: secret-input - required: true - label: - en_US: RapidAPI Key - zh_Hans: RapidAPI Key - pt_BR: RapidAPI Key - help: - en_US: RapidAPI Key is required to access the Judge0 CE API. - zh_Hans: RapidAPI Key 是访问 Judge0 CE API 所必需的。 - pt_BR: RapidAPI Key é necessário para acessar a API do Judge0 CE. - placeholder: - en_US: Enter your RapidAPI Key - zh_Hans: 输入你的 RapidAPI Key - pt_BR: Insira sua RapidAPI Key - url: https://rapidapi.com/judge0-official/api/judge0-ce diff --git a/api/core/tools/provider/builtin/judge0ce/tools/executeCode.py b/api/core/tools/provider/builtin/judge0ce/tools/executeCode.py deleted file mode 100644 index b8d654ff639575..00000000000000 --- a/api/core/tools/provider/builtin/judge0ce/tools/executeCode.py +++ /dev/null @@ -1,61 +0,0 @@ -import json -from typing import Any, Union - -import requests -from httpx import post - -from core.tools.entities.tool_entities import ToolInvokeMessage -from core.tools.tool.builtin_tool import BuiltinTool - - -class ExecuteCodeTool(BuiltinTool): - def _invoke( - self, user_id: str, tool_parameters: dict[str, Any] - ) -> Union[ToolInvokeMessage, list[ToolInvokeMessage]]: - """ - invoke tools - """ - api_key = self.runtime.credentials["X-RapidAPI-Key"] - - url = "https://judge0-ce.p.rapidapi.com/submissions" - - querystring = {"base64_encoded": "false", "fields": "*"} - - headers = { - "Content-Type": "application/json", - "X-RapidAPI-Key": api_key, - "X-RapidAPI-Host": "judge0-ce.p.rapidapi.com", - } - - payload = { - "language_id": tool_parameters["language_id"], - "source_code": tool_parameters["source_code"], - "stdin": tool_parameters.get("stdin", ""), - "expected_output": tool_parameters.get("expected_output", ""), - "additional_files": tool_parameters.get("additional_files", ""), - } - - response = post(url, data=json.dumps(payload), headers=headers, params=querystring) - - if response.status_code != 201: - raise Exception(response.text) - - token = response.json()["token"] - - url = f"https://judge0-ce.p.rapidapi.com/submissions/{token}" - headers = {"X-RapidAPI-Key": api_key} - - response = requests.get(url, headers=headers) - if response.status_code == 200: - result = response.json() - return self.create_text_message( - text=f"stdout: {result.get('stdout', '')}\n" - f"stderr: {result.get('stderr', '')}\n" - f"compile_output: {result.get('compile_output', '')}\n" - f"message: {result.get('message', '')}\n" - f"status: {result['status']['description']}\n" - f"time: {result.get('time', '')} seconds\n" - f"memory: {result.get('memory', '')} bytes" - ) - else: - return self.create_text_message(text=f"Error retrieving submission details: {response.text}") diff --git a/api/core/tools/provider/builtin/judge0ce/tools/executeCode.yaml b/api/core/tools/provider/builtin/judge0ce/tools/executeCode.yaml deleted file mode 100644 index a8c0776f40185e..00000000000000 --- a/api/core/tools/provider/builtin/judge0ce/tools/executeCode.yaml +++ /dev/null @@ -1,67 +0,0 @@ -identity: - name: submitCodeExecutionTask - author: Richards Tu - label: - en_US: Submit Code Execution Task to Judge0 CE and get execution result. - zh_Hans: 提交代码执行任务到 Judge0 CE 并获取执行结果。 -description: - human: - en_US: A tool for executing code and getting the result. - zh_Hans: 一个用于执行代码并获取结果的工具。 - llm: This tool is used for executing code and getting the result. -parameters: - - name: source_code - type: string - required: true - label: - en_US: Source Code - zh_Hans: 源代码 - human_description: - en_US: The source code to be executed. - zh_Hans: 要执行的源代码。 - llm_description: The source code to be executed. - form: llm - - name: language_id - type: number - required: true - label: - en_US: Language ID - zh_Hans: 语言 ID - human_description: - en_US: The ID of the language in which the source code is written. - zh_Hans: 源代码所使用的语言的 ID。 - llm_description: The ID of the language in which the source code is written. For example, 50 for C++, 71 for Python, etc. - form: llm - - name: stdin - type: string - required: false - label: - en_US: Standard Input - zh_Hans: 标准输入 - human_description: - en_US: The standard input to be provided to the program. - zh_Hans: 提供给程序的标准输入。 - llm_description: The standard input to be provided to the program. Optional. - form: llm - - name: expected_output - type: string - required: false - label: - en_US: Expected Output - zh_Hans: 期望输出 - human_description: - en_US: The expected output of the program. Used for comparison in some scenarios. - zh_Hans: 程序的期望输出。在某些场景下用于比较。 - llm_description: The expected output of the program. Used for comparison in some scenarios. Optional. - form: llm - - name: additional_files - type: string - required: false - label: - en_US: Additional Files - zh_Hans: 附加文件 - human_description: - en_US: Base64 encoded additional files for the submission. - zh_Hans: 提交的 Base64 编码的附加文件。 - llm_description: Base64 encoded additional files for the submission. Optional. - form: llm diff --git a/api/core/tools/provider/builtin/maths/_assets/icon.svg b/api/core/tools/provider/builtin/maths/_assets/icon.svg deleted file mode 100644 index f94d1152113830..00000000000000 --- a/api/core/tools/provider/builtin/maths/_assets/icon.svg +++ /dev/null @@ -1,10 +0,0 @@ - - - - - - - - - - diff --git a/api/core/tools/provider/builtin/maths/maths.py b/api/core/tools/provider/builtin/maths/maths.py deleted file mode 100644 index d4b449ec87a18a..00000000000000 --- a/api/core/tools/provider/builtin/maths/maths.py +++ /dev/null @@ -1,18 +0,0 @@ -from typing import Any - -from core.tools.errors import ToolProviderCredentialValidationError -from core.tools.provider.builtin.maths.tools.eval_expression import EvaluateExpressionTool -from core.tools.provider.builtin_tool_provider import BuiltinToolProviderController - - -class MathsProvider(BuiltinToolProviderController): - def _validate_credentials(self, credentials: dict[str, Any]) -> None: - try: - EvaluateExpressionTool().invoke( - user_id="", - tool_parameters={ - "expression": "1+(2+3)*4", - }, - ) - except Exception as e: - raise ToolProviderCredentialValidationError(str(e)) diff --git a/api/core/tools/provider/builtin/maths/maths.yaml b/api/core/tools/provider/builtin/maths/maths.yaml deleted file mode 100644 index 35c2380e29a701..00000000000000 --- a/api/core/tools/provider/builtin/maths/maths.yaml +++ /dev/null @@ -1,15 +0,0 @@ -identity: - author: Bowen Liang - name: maths - label: - en_US: Maths - zh_Hans: 数学工具 - pt_BR: Maths - description: - en_US: A tool for maths. - zh_Hans: 一个用于数学计算的工具。 - pt_BR: A tool for maths. - icon: icon.svg - tags: - - utilities - - productivity diff --git a/api/core/tools/provider/builtin/maths/tools/eval_expression.py b/api/core/tools/provider/builtin/maths/tools/eval_expression.py deleted file mode 100644 index 0c5b5e41cbe1e1..00000000000000 --- a/api/core/tools/provider/builtin/maths/tools/eval_expression.py +++ /dev/null @@ -1,30 +0,0 @@ -import logging -from typing import Any, Union - -import numexpr as ne - -from core.tools.entities.tool_entities import ToolInvokeMessage -from core.tools.tool.builtin_tool import BuiltinTool - - -class EvaluateExpressionTool(BuiltinTool): - def _invoke( - self, - user_id: str, - tool_parameters: dict[str, Any], - ) -> Union[ToolInvokeMessage, list[ToolInvokeMessage]]: - """ - invoke tools - """ - # get expression - expression = tool_parameters.get("expression", "").strip() - if not expression: - return self.create_text_message("Invalid expression") - - try: - result = ne.evaluate(expression) - result_str = str(result) - except Exception as e: - logging.exception(f"Error evaluating expression: {expression}") - return self.create_text_message(f"Invalid expression: {expression}, error: {str(e)}") - return self.create_text_message(f'The result of the expression "{expression}" is {result_str}') diff --git a/api/core/tools/provider/builtin/maths/tools/eval_expression.yaml b/api/core/tools/provider/builtin/maths/tools/eval_expression.yaml deleted file mode 100644 index c936a4293fbe72..00000000000000 --- a/api/core/tools/provider/builtin/maths/tools/eval_expression.yaml +++ /dev/null @@ -1,26 +0,0 @@ -identity: - name: eval_expression - author: Bowen Liang - label: - en_US: Evaluate Math Expression - zh_Hans: 计算数学表达式 - pt_BR: Evaluate Math Expression -description: - human: - en_US: A tool for evaluating an math expression, calculated locally with NumExpr. - zh_Hans: 一个用于计算数学表达式的工具,表达式将通过NumExpr本地执行。 - pt_BR: A tool for evaluating an math expression, calculated locally with NumExpr. - llm: A tool for evaluating an math expression. -parameters: - - name: expression - type: string - required: true - label: - en_US: Math Expression - zh_Hans: 数学计算表达式 - pt_BR: Math Expression - human_description: - en_US: Math Expression - zh_Hans: 数学计算表达式 - pt_BR: Math Expression - form: llm diff --git a/api/core/tools/provider/builtin/nominatim/_assets/icon.svg b/api/core/tools/provider/builtin/nominatim/_assets/icon.svg deleted file mode 100644 index db5a4eb868c5e8..00000000000000 --- a/api/core/tools/provider/builtin/nominatim/_assets/icon.svg +++ /dev/null @@ -1,277 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 010110010011010110010011 - 010110010011010110010011 - - \ No newline at end of file diff --git a/api/core/tools/provider/builtin/nominatim/nominatim.py b/api/core/tools/provider/builtin/nominatim/nominatim.py deleted file mode 100644 index 5a24bed7507eb6..00000000000000 --- a/api/core/tools/provider/builtin/nominatim/nominatim.py +++ /dev/null @@ -1,27 +0,0 @@ -from typing import Any - -from core.tools.errors import ToolProviderCredentialValidationError -from core.tools.provider.builtin.nominatim.tools.nominatim_search import NominatimSearchTool -from core.tools.provider.builtin_tool_provider import BuiltinToolProviderController - - -class NominatimProvider(BuiltinToolProviderController): - def _validate_credentials(self, credentials: dict[str, Any]) -> None: - try: - result = ( - NominatimSearchTool() - .fork_tool_runtime( - runtime={ - "credentials": credentials, - } - ) - .invoke( - user_id="", - tool_parameters={ - "query": "London", - "limit": 1, - }, - ) - ) - except Exception as e: - raise ToolProviderCredentialValidationError(str(e)) diff --git a/api/core/tools/provider/builtin/nominatim/nominatim.yaml b/api/core/tools/provider/builtin/nominatim/nominatim.yaml deleted file mode 100644 index 7d014bd78c6a59..00000000000000 --- a/api/core/tools/provider/builtin/nominatim/nominatim.yaml +++ /dev/null @@ -1,43 +0,0 @@ -identity: - author: Charles Zhou - name: nominatim - label: - en_US: Nominatim - zh_Hans: Nominatim - de_DE: Nominatim - ja_JP: Nominatim - description: - en_US: Nominatim is a search engine for OpenStreetMap data - zh_Hans: Nominatim是OpenStreetMap数据的搜索引擎 - de_DE: Nominatim ist eine Suchmaschine für OpenStreetMap-Daten - ja_JP: NominatimはOpenStreetMapデータの検索エンジンです - icon: icon.svg - tags: - - search - - utilities -credentials_for_provider: - base_url: - type: text-input - required: false - default: https://nominatim.openstreetmap.org - label: - en_US: Nominatim Base URL - zh_Hans: Nominatim 基础 URL - de_DE: Nominatim Basis-URL - ja_JP: Nominatim ベースURL - placeholder: - en_US: "Enter your Nominatim instance URL (default: - https://nominatim.openstreetmap.org)" - zh_Hans: 输入您的Nominatim实例URL(默认:https://nominatim.openstreetmap.org) - de_DE: "Geben Sie Ihre Nominatim-Instanz-URL ein (Standard: - https://nominatim.openstreetmap.org)" - ja_JP: NominatimインスタンスのURLを入力してください(デフォルト:https://nominatim.openstreetmap.org) - help: - en_US: The base URL for the Nominatim instance. Use the default for the public - service or enter your self-hosted instance URL. - zh_Hans: Nominatim实例的基础URL。使用默认值可访问公共服务,或输入您的自托管实例URL。 - de_DE: Die Basis-URL für die Nominatim-Instanz. Verwenden Sie den Standardwert - für den öffentlichen Dienst oder geben Sie die URL Ihrer selbst - gehosteten Instanz ein. - ja_JP: NominatimインスタンスのベースURL。公共サービスにはデフォルトを使用するか、自己ホスティングインスタンスのURLを入力してください。 - url: https://nominatim.org/ diff --git a/api/core/tools/provider/builtin/nominatim/tools/nominatim_lookup.py b/api/core/tools/provider/builtin/nominatim/tools/nominatim_lookup.py deleted file mode 100644 index ffa8ad0fcc02e0..00000000000000 --- a/api/core/tools/provider/builtin/nominatim/tools/nominatim_lookup.py +++ /dev/null @@ -1,40 +0,0 @@ -import json -from typing import Any, Union - -import requests - -from core.tools.entities.tool_entities import ToolInvokeMessage -from core.tools.tool.builtin_tool import BuiltinTool - - -class NominatimLookupTool(BuiltinTool): - def _invoke( - self, user_id: str, tool_parameters: dict[str, Any] - ) -> Union[ToolInvokeMessage, list[ToolInvokeMessage]]: - osm_ids = tool_parameters.get("osm_ids", "") - - if not osm_ids: - return self.create_text_message("Please provide OSM IDs") - - params = {"osm_ids": osm_ids, "format": "json", "addressdetails": 1} - - return self._make_request(user_id, "lookup", params) - - def _make_request(self, user_id: str, endpoint: str, params: dict) -> ToolInvokeMessage: - base_url = self.runtime.credentials.get("base_url", "https://nominatim.openstreetmap.org") - - try: - headers = {"User-Agent": "DifyNominatimTool/1.0"} - s = requests.session() - response = s.request(method="GET", headers=headers, url=f"{base_url}/{endpoint}", params=params) - response_data = response.json() - - if response.status_code == 200: - s.close() - return self.create_text_message( - self.summary(user_id=user_id, content=json.dumps(response_data, ensure_ascii=False)) - ) - else: - return self.create_text_message(f"Error: {response.status_code} - {response.text}") - except Exception as e: - return self.create_text_message(f"An error occurred: {str(e)}") diff --git a/api/core/tools/provider/builtin/nominatim/tools/nominatim_lookup.yaml b/api/core/tools/provider/builtin/nominatim/tools/nominatim_lookup.yaml deleted file mode 100644 index 508c4dcd88ff15..00000000000000 --- a/api/core/tools/provider/builtin/nominatim/tools/nominatim_lookup.yaml +++ /dev/null @@ -1,31 +0,0 @@ -identity: - name: nominatim_lookup - author: Charles Zhou - label: - en_US: Nominatim OSM Lookup - zh_Hans: Nominatim OSM 对象查找 - de_DE: Nominatim OSM-Objektsuche - ja_JP: Nominatim OSM ルックアップ -description: - human: - en_US: Look up OSM objects using their IDs with Nominatim - zh_Hans: 使用Nominatim通过ID查找OSM对象 - de_DE: Suchen Sie OSM-Objekte anhand ihrer IDs mit Nominatim - ja_JP: Nominatimを使用してIDでOSMオブジェクトを検索 - llm: A tool for looking up OpenStreetMap objects using their IDs with Nominatim. -parameters: - - name: osm_ids - type: string - required: true - label: - en_US: OSM IDs - zh_Hans: OSM ID - de_DE: OSM-IDs - ja_JP: OSM ID - human_description: - en_US: Comma-separated list of OSM IDs to lookup (e.g., N123,W456,R789) - zh_Hans: 要查找的OSM ID的逗号分隔列表(例如:N123,W456,R789) - de_DE: Kommagetrennte Liste von OSM-IDs für die Suche (z.B. N123,W456,R789) - ja_JP: 検索するOSM IDのカンマ区切りリスト(例:N123,W456,R789) - llm_description: A comma-separated list of OSM IDs (prefixed with N, W, or R) for lookup. - form: llm diff --git a/api/core/tools/provider/builtin/nominatim/tools/nominatim_reverse.py b/api/core/tools/provider/builtin/nominatim/tools/nominatim_reverse.py deleted file mode 100644 index f46691e1a3ebb4..00000000000000 --- a/api/core/tools/provider/builtin/nominatim/tools/nominatim_reverse.py +++ /dev/null @@ -1,41 +0,0 @@ -import json -from typing import Any, Union - -import requests - -from core.tools.entities.tool_entities import ToolInvokeMessage -from core.tools.tool.builtin_tool import BuiltinTool - - -class NominatimReverseTool(BuiltinTool): - def _invoke( - self, user_id: str, tool_parameters: dict[str, Any] - ) -> Union[ToolInvokeMessage, list[ToolInvokeMessage]]: - lat = tool_parameters.get("lat") - lon = tool_parameters.get("lon") - - if lat is None or lon is None: - return self.create_text_message("Please provide both latitude and longitude") - - params = {"lat": lat, "lon": lon, "format": "json", "addressdetails": 1} - - return self._make_request(user_id, "reverse", params) - - def _make_request(self, user_id: str, endpoint: str, params: dict) -> ToolInvokeMessage: - base_url = self.runtime.credentials.get("base_url", "https://nominatim.openstreetmap.org") - - try: - headers = {"User-Agent": "DifyNominatimTool/1.0"} - s = requests.session() - response = s.request(method="GET", headers=headers, url=f"{base_url}/{endpoint}", params=params) - response_data = response.json() - - if response.status_code == 200: - s.close() - return self.create_text_message( - self.summary(user_id=user_id, content=json.dumps(response_data, ensure_ascii=False)) - ) - else: - return self.create_text_message(f"Error: {response.status_code} - {response.text}") - except Exception as e: - return self.create_text_message(f"An error occurred: {str(e)}") diff --git a/api/core/tools/provider/builtin/nominatim/tools/nominatim_reverse.yaml b/api/core/tools/provider/builtin/nominatim/tools/nominatim_reverse.yaml deleted file mode 100644 index f1a2dd09fbc5d5..00000000000000 --- a/api/core/tools/provider/builtin/nominatim/tools/nominatim_reverse.yaml +++ /dev/null @@ -1,47 +0,0 @@ -identity: - name: nominatim_reverse - author: Charles Zhou - label: - en_US: Nominatim Reverse Geocoding - zh_Hans: Nominatim 反向地理编码 - de_DE: Nominatim Rückwärts-Geocodierung - ja_JP: Nominatim リバースジオコーディング -description: - human: - en_US: Convert coordinates to addresses using Nominatim - zh_Hans: 使用Nominatim将坐标转换为地址 - de_DE: Konvertieren Sie Koordinaten in Adressen mit Nominatim - ja_JP: Nominatimを使用して座標を住所に変換 - llm: A tool for reverse geocoding using Nominatim, which can convert latitude - and longitude coordinates to an address. -parameters: - - name: lat - type: number - required: true - label: - en_US: Latitude - zh_Hans: 纬度 - de_DE: Breitengrad - ja_JP: 緯度 - human_description: - en_US: Latitude coordinate for reverse geocoding - zh_Hans: 用于反向地理编码的纬度坐标 - de_DE: Breitengrad-Koordinate für die Rückwärts-Geocodierung - ja_JP: リバースジオコーディングの緯度座標 - llm_description: The latitude coordinate for reverse geocoding. - form: llm - - name: lon - type: number - required: true - label: - en_US: Longitude - zh_Hans: 经度 - de_DE: Längengrad - ja_JP: 経度 - human_description: - en_US: Longitude coordinate for reverse geocoding - zh_Hans: 用于反向地理编码的经度坐标 - de_DE: Längengrad-Koordinate für die Rückwärts-Geocodierung - ja_JP: リバースジオコーディングの経度座標 - llm_description: The longitude coordinate for reverse geocoding. - form: llm diff --git a/api/core/tools/provider/builtin/nominatim/tools/nominatim_search.py b/api/core/tools/provider/builtin/nominatim/tools/nominatim_search.py deleted file mode 100644 index 34851d86dcaa5f..00000000000000 --- a/api/core/tools/provider/builtin/nominatim/tools/nominatim_search.py +++ /dev/null @@ -1,41 +0,0 @@ -import json -from typing import Any, Union - -import requests - -from core.tools.entities.tool_entities import ToolInvokeMessage -from core.tools.tool.builtin_tool import BuiltinTool - - -class NominatimSearchTool(BuiltinTool): - def _invoke( - self, user_id: str, tool_parameters: dict[str, Any] - ) -> Union[ToolInvokeMessage, list[ToolInvokeMessage]]: - query = tool_parameters.get("query", "") - limit = tool_parameters.get("limit", 10) - - if not query: - return self.create_text_message("Please input a search query") - - params = {"q": query, "format": "json", "limit": limit, "addressdetails": 1} - - return self._make_request(user_id, "search", params) - - def _make_request(self, user_id: str, endpoint: str, params: dict) -> ToolInvokeMessage: - base_url = self.runtime.credentials.get("base_url", "https://nominatim.openstreetmap.org") - - try: - headers = {"User-Agent": "DifyNominatimTool/1.0"} - s = requests.session() - response = s.request(method="GET", headers=headers, url=f"{base_url}/{endpoint}", params=params) - response_data = response.json() - - if response.status_code == 200: - s.close() - return self.create_text_message( - self.summary(user_id=user_id, content=json.dumps(response_data, ensure_ascii=False)) - ) - else: - return self.create_text_message(f"Error: {response.status_code} - {response.text}") - except Exception as e: - return self.create_text_message(f"An error occurred: {str(e)}") diff --git a/api/core/tools/provider/builtin/nominatim/tools/nominatim_search.yaml b/api/core/tools/provider/builtin/nominatim/tools/nominatim_search.yaml deleted file mode 100644 index e0c53c046a2a41..00000000000000 --- a/api/core/tools/provider/builtin/nominatim/tools/nominatim_search.yaml +++ /dev/null @@ -1,51 +0,0 @@ -identity: - name: nominatim_search - author: Charles Zhou - label: - en_US: Nominatim Search - zh_Hans: Nominatim 搜索 - de_DE: Nominatim Suche - ja_JP: Nominatim 検索 -description: - human: - en_US: Search for locations using Nominatim - zh_Hans: 使用Nominatim搜索位置 - de_DE: Suche nach Orten mit Nominatim - ja_JP: Nominatimを使用して場所を検索 - llm: A tool for geocoding using Nominatim, which can search for locations based - on addresses or place names. -parameters: - - name: query - type: string - required: true - label: - en_US: Search Query - zh_Hans: 搜索查询 - de_DE: Suchanfrage - ja_JP: 検索クエリ - human_description: - en_US: Enter an address or place name to search for - zh_Hans: 输入要搜索的地址或地名 - de_DE: Geben Sie eine Adresse oder einen Ortsnamen für die Suche ein - ja_JP: 検索する住所または場所の名前を入力してください - llm_description: The search query for Nominatim, which can be an address or place name. - form: llm - - name: limit - type: number - default: 10 - min: 1 - max: 40 - required: false - label: - en_US: Result Limit - zh_Hans: 结果限制 - de_DE: Ergebnislimit - ja_JP: 結果の制限 - human_description: - en_US: "Maximum number of results to return (default: 10, max: 40)" - zh_Hans: 要返回的最大结果数(默认:10,最大:40) - de_DE: "Maximale Anzahl der zurückzugebenden Ergebnisse (Standard: 10, max: 40)" - ja_JP: 返す結果の最大数(デフォルト:10、最大:40) - llm_description: Limit the number of returned results. The default is 10, and - the maximum is 40. - form: form diff --git a/api/core/tools/provider/builtin/novitaai/_assets/icon.ico b/api/core/tools/provider/builtin/novitaai/_assets/icon.ico deleted file mode 100644 index e353ecf711cac1d0b1843a9d82793a70a209728f..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 9366 zcmX|nXEYpK)b;2ki7trVB}DI|cM&9r-eN@WqJ$Yli{3kf1QAh3@7?GG(QC|TVe~Q> zym`LwUGF_>-@Wfy`~0~7&bsGDztK^DLi~&v002DE)KJm;$5#K~LWuuwfBenu@*iP& z>!~XNswNor{vG^37;D;TYXe^V(}VzAtY-kc|1AF?heiK?S`CW_fb;+I*Z{y6M*!~s zdUXEre=F%9{;T=_j8lm9zwUpzLhS!@TNUE`pLY4LOi?bS{T~r|Xqb2d08csp1D2*9 z$1woF-lVCbXyA{9f|^yJjNZZLmu~a)lC%Jd3O~pmVX2T`C?dbHWRmf?a;E=yKF}RY z%F&5NhH=Bqz#CW)Y*n5O9EW~315q`q6V3J{G8Q~+RmXTutk|^tQPZHVBYM#N&XTzese1y_-l}Z>1Ah@WnG7DgGq;FP_Sm* zvH0)KPDP+dHcABV6_L+F%GhCD!VbNykHof*x`9E7;Z^YIrxp)r(jItNd7 zMROB8Vb#f5tOY6oK5G+&y&YEI>zQV0h;C$fsu?0C8&Si4V_9iKyIut+dB%aju|g|; z*bq@Ol#5H%4$DF9-6iQ%g2D^d%Hq_)mVHXn8%gY~%d@vfm}wlV%g>}0lYf zQjePk=%BB`Gk9b&qw*5>)2`=HbK~HUwB*|mI;{fZi~jtc9Ty1~58(SH%hpAER;(tp z3c186OngssjW$SDDl}8`sRjT8M zZO|Z!vLZ6EHMVvw)fpEIA5!AZrNg0 z0$ybRGz^|{m}=nK-s%{RW@vIRojZM?{8`f^+Gvf&vcnA8E3}e@@rd_zCqDGcF6v58 z#ak}>=u)wo+@W!@{>^IUj9y*rVdz&!!aH6K%(5f_DzjM(7Ce5u`LW)2)p21kk;E~HN8rh(G8;37Q`UJeHhhGIbf|^FL1PVu?$qh+SLCZg@g$nt}ESq9f3yWCd z)u-sL@}i<=j*)8kj{WsWnigBqP)wT(R)y{2XDfDD)S$bNy-v=kbyd2eYzFa=?5-IBL3+z|cISg2JOG zk-g#|x`*S3`%0|s6l1Oi;c$bXv=M^$h;BwILw0ovO;5QD_puV1m;OGOB%qgfI2A)< zf~)35659R3I@J1G5C^9$p!Ui%u@h{xQTt-iMZn~mN%fKeV_lNCPUlC}5JP6ulP8X! zWqGtaWHx@noq%T*6EC&nz8EJzsGDbEw#E|j{3{rFxMa68I;M1k+jg#_=QWq7kUalc ziL3I$rg}9T3(E^OQBq3sYUTwA`&>z>&$3@4xUq?;)e(wYH@A-_n5!w+thrVSMV+t~ z!I@h~0zDL~lv;E-WF$HV9h^eO7#tY8rtBCdbQU#o`l9dvFv9|ow2PrYdmne8Y&;NK zA!OxUD9<1L+9b-B@K{Fi&hDuN@#(=PWPN{55ETt`zm>R-v9P7BqfV>Wutlc1f4(HsBjsHAh|C_```ay>#z~_wdo;vi43WWOKjtPCIZo5d%H{%wByKl#?rxd zX?*+HDj?_R@2byz>9?$YR>uQ}m$X#&Jh1N*9XlZ$pR-q>FLY;1qquzky3P|#=Z0`b zFN6qmGh)Vw`e=NaQVD`6=r4z8%(deRf>^16i?mi@@RS_XC#T262cHBND4_C|ey8q% zPe&y3d}K<|9wSc_q{O}+it#7G>O_yQY134;3^l>N4NER09_3d9=Q%(RFzGED4r89a z1IuQ~3GZjkd^Y7`rUf2D)6MjbQjU`E4HLsbjOsCDe0u%y08sx&x-X% zi-NBEIr)zB83#yPQ+X9;Q+pF}O6EjiKDM2J>Ei?osp;neCTM?F9tB_E36*{hPkxCE z$o%t?V;)*lu}r6fIX=|W6laz(XKdfXqS1L6=5{k)cT02cD=i?u9$7~o9UMoSyQOTW z$r-Kol+Q}7JMz4J(y50Kxs>ZW&Gt1)_~{r&c_Fz;Qd9X;UtwRrJZ$IZru+ThKZ|C} z!52{XyUxt@6B)edo2GZ{>sx0S2A^%E>{`S)RKm^s=^BrL|LOuOzgt7+i?I5 zUf5h&+kiTi&IxDtc{#z4;Mtqwjdz(=kjpxPa^QOFqFeO?vQ6MF?f2oC>qsgN%b>6c zOflWR06z(f8S&#Yv#keuqkPiaT2~EK_+gJmaD^lkW&Dix;_xcYB1yqGJ&5`hzH~;T zdyB`PX}{IX#1bQ~?fnJT@d@f@5M0FyIB4cY@cq{>?f!e9FNp7H7A>7(Nn2+2lBneo z*@PGmH=(un8=0dl73q@$69JP`)6DsDn7@^}ZGRK?gLnS|^Q#>M+jJXTy4O6AFM`^8 zQY7-u=_yYoy8ezw1RR`ES4Bc)WBGp3&P)>wF~dxs#u`WzPc|{|-x${2wbR=QI46`i zfAB_;TKNo)@6SH8`*cXP-xs(ypQl;}l{Xz1^8f`n^FnP?Qy$Q9R!$I_EcR`4(yp7l zL`Gd?lz1LXf6IptZ$K}9lXMv$va6{9i$_NXB2rQ?Kynb>EwgCw=-#-|+Vxj@rjwVq zZhJIU?I^B+e>eRFEpH+J(r}U_aQ*InS8^UD<*t)zlcKD$HsGQ2#XnU)9pxDy^`huJ3!UiRhs+-W{8B?Q4A9z zLo(DX!lcse>B2>+cy3eBM&uW;7K7Hj;|}k3`{bmd@5(qRHBV0JBFjv<9cm{33K?iK zqmz-Z9zY37$@SPh;crNflv*$AB-7i%JNCEn)3h9NTdAedI}R&D%5n6V9R^>n0NvY9 z{ydDIc1*EY3H-b!&WztbW_gF>|K<(y_i4mrvQYvfFLid@Ado&Tc~l?Cx>(z)kIh!o zx^?=3xkE0|$|ChI0kohDYSubTJAUTa(K51K=K8GcBHh6nX9bQF_K$P=2~unBMZuvqx~hL>P< zDtR#A@&;uhDtzz21n>6=(4!NkzF9WL)pFdy?rUqG?k~7~B-(u$fE&C@gEBjso$vCB zT}XgN1LarOde$$2!fhT0A79$u*|fNR_X(qsT3Ar8X0KLQYC>tjCC}h{)z7DIb({kO zo*%6x?+-dEW0wAKmdXdrzO(XgH@IA5sP;7&)Skqr|K!wpaIeaAH{!?T$M;6{g9u>* zf=tAX{UdSQS^<%!z&%A2YRmp-BSieW(D6&g;ABHpr?!njd*6<2%)BUx zSI$TA0&+W;x-zOn4Hps$=My`o6z82I=#eI6SfH|~L=Kd6wsAQvdGOJcnG-P!#~-kL{xM*VD#P|{mMp#2N6xJ&5(fiPtP-l=2TDy4 znlLnxN|20sJHH6a_aaa*j)xg3 zU-4u;YEnK>5f(>xH^e!+{MZ(i8svl`jvz_7T1z#LL zbw_rHQs|uPI|4Ix_na068lT9;zsnk{w7l<${d8WzjC&5Z`$2mWAK*^-i`m$L)ECpd!dV zFkT=Mti_G@V;cJS+1`I3ru_h^SeNQ}U}x&ZNs?nT<6ZSUhzOr%E>;JUFHe{B8kjXC z__iX=3sHf5Uq1kpf4H<4 zkbpbB|QOt0Un|7 z=*9Qz0lR?&N;;IT(}>aioSBT(^`kpPAi6uBuqH7&b-(v$bM#Lf2;6sY_dJ|i&Hb?_ z0`Z1YQp23%O&)ovUCX21*mcxGR(FE~!Fm3B+LqQNy)(&VlP%S>Nancsi;r`z(&+!LKqbQiqMzp1a%A>g}?=|r#G<6O<_@$~-UAomA21hq_*zu^!Bh74cW0bKedwZG8I)~WSH!i<>xFiTU zL*-9h&SR|3@~*w!vxkZisfK?SSE{lLWoag&@=)LuDNKumbmRQo;kt@q#)!jq4|kKZ zm{|2eG_+7IIbm*2&_0oUy1Z*hQ%8em>2umdZt!U#5AMjxZ$G@AkNiWBZGo;|)U_b+ znDL{E6Dsa(zv)KhFV&dcyEkNP#pQAV8ZqVMNhqxhAtncouirJV>JIsXrqv(?m44tZ zwFK>=?toOu;M3pogFix~gEuE1RXxrASVAi$MW?ih*%bDbb>7`8`@pq0y;QQr9a2J? z;o8k(vR78l;^#xqBAj<5w@(UwLPgmM<=MY=N>H8vHcm{LCmVVG{E_EAn4!etdMfL} z)|v@27FLYq`*vUp%upM>xwMh{sY}-Cfa_$Ons&=3SG=Lbj#D0hsgFlBM0U5P$`j>2 z4xra`EqGE3DW+BNB&SS_K{aEDWa-05@kzb5{X(@!=|->=FRJ+Zf3I|8D2sNh8Z2ux z1q%?r^~IN_F21dWF)P_K5ulG&ap4PuI9-Abjg+xqjSoktUo0^_f3_1uXjmxOudl$47FuS_4Um2Jr+e6`bD0=`i|3=GcN%Tkd@6_WhG(a=rnkG=*~44ZpjC~c!4t$ z=g@M1?>1&s3gBaL?r2XCg}ok39i@5J9;T&0R12o&5{mE5jKjWw4tnu}g_eRx@yoLC z>s7ldwXUid-gs$8NY6Uy~F3MgwQI@XwpbAx-fH9I}eS+Akta`al z(PRJfme}K(4Xs4lH~ipbfAh}c{n!=~Xx-T5GF;b`kZS??^!0uszFD(usdx2?pBNEPJ8F-jd!Mxj2)h4u^hYFAMG%Jx)7dZMS(WuI)q ziheg{>eBR~l_X>5g3L~{5Jnux(yErTEI|euug%Flg=%O)ctGX_KT{I{$p^3VUi;MU z<}jEnR((axu9R*L>En-&yOLx>2ML@bLnl{$&*;8k@0}cphU{6nw2A*W2IvB!@ zqPa6SbrGXSU(0786bEhC%jQ~}Ty=S{IcsGjoYYxWl}yiztaHy$?SoRAngsK4yt%U) z0`pFnBcD$!_jULB1C5LG`r>MU*!XGaD0YIeL9DldU$jC> zIYas(V0QDHK0@)bo{R6?;qHS!Gds;9aF;>Iaz=l@-*WP@G|RZz_Q9+`#40I)9T}O( zuQaH>PLCVLu>VKGT}-`!u+#UcniFl4H+?{YK`R%vUP}EDQzdPj<@lu`cQcwv-L!I$ zU+#85Bo{z`e6fN@WjNTElUmuZ@gk>=+haGku5~}=OR}G9#o8Y5I)mRipyvr|(xPTc z9;Lo6z2;?e=NqrDuei=iehLa-J6oL;+2aWcibh~|Cn3rF@=%pqn5?hf)j(=$Z)VEU z1AOu=`}xNHrwv|w?T)W8W;U;a)`I57VPVOXVLF6vl%7k|!-C3I*%9M2ceqt>%x1XWMSX2pCe}?)LST!}NUtTbWA9X+rf1?zcm3 z=FAlW^G3@8^V8!n0SM7?s@3dU2JX7Eagm0J?bmz>!TmZU;56Yau`MfsX4AB!j3N&2 zqae<;YMhF|dFtdcVZfBoHM$!PN$xc^A})VBWG>i%n=J~4`=0Evxry6PtQ+3yyYi63uiso(n!VNp49*SFJ;Pe%Ki$SDXZ1-oxA7+(gt*eQeG$3MWkK`V| z>D9pLj%3K?dOLd*LlDya!+)q|yyhiCsJGy>EXn$1WjWB=2aQKj2>mn(ax<*$>fA8m z>;2V5>DW8Bp26N}J$apKbcmL^_fQ9K4+PIV!A|?(&P-38tk?TBm}C|Cx3u>)1ra>b zW+6Lp_}3r}U`|!whDh{=6}nkyCcV{`NQZ=Io%O6m%tn+>R#-N#w=^{z!Bq-CryMzg zKEzzi4>lW7P2v_i)z&;oN8!)<@R(mZ?0gB?av_Cj$0QPdZdMSl8u?>xw0v>%x)!2k zzw)w9w&(9NKLTI}IK^Q7(fAGbsrw8$?9hD%il-PFBFNctK8cT2UjM+SSwvE@`zjir+g>3k~!$zhU0;N zyF!IWZ!-A@&WF8%Ydx6?!3!zO z@0(VA&mz}bHqUhDEiQxZIvVGRDVx!auEab=ln!Q$K9m-gTvakNS8cM5oaHZum#GGt zcHCNgpM2R7(YEc4{FhERLm(c$(}3~*Z&IKwJmN+26u(%pxvAGZ{BY83qV{o8Bg#Mh z^||uiTcp8%P4XUzBrtNn?T-ZPsjZXfTYQpOB+Fsrvu4e{Scy4@$3jMl@VhEn1}PD^ zQpVk|~Adc?~T6egnUb+|&d1$akfjAvz;(EXGd)H8JGL8{V>0dg9UwOs0 z-funT+m>-1SN(La(Dawj$=q5fJPxu4j=X5%I3i_z+x!pm(Kavfa0|RpZJ*m*5-h-> zHmmbGnCKCP=OZ^VRxxDpZ+VUI@oUT@O`ZD=>ztL903V`Axt?v45IQbqn|6~D!k-O0OqAfqh z!@`u4h-ulT-qCB07#+vDRF?cmFMKS`M1Oca`~ao;6snaE*Tqc0j)#j{-oGBp-+#II zJXlH&0;q2reZ%ee6;Svk7FR07^UoWjRB3FOId90K+>Hfbd05dml5xS0QXaE3847AS z`QUs@SMZK=Yv)exlRlwa|6;MP02*p8*2J&79q!KVq(DC*6S1o|7m#8eemN3xO2Tq5 z-+SsWiD%*FQue}YC_Z2fF{`qHMvMvpEGI#zSSi%D1=P!+K1tZUU3+CzmLr;NSjjf5 zbxi)fFzb4=3=Yi;w1wLKX|UwWMpbY5M6`#0r>%2zQfAqeA^I$t9-L618r;5-669-! zJ<+Vu-_l=p%;7BM9LGx*#tL-m3m6qWW&D6mS)XBGEZWM0TlvU~6Jb?Q&`A&Qf&Du9 zb?56tA>WbI{JOT#9s_%xSAG71dr-jQVXek?>6-89^TWXWa8K#Ez_{*W#lksR9RMA3 za<0PprqPPE0zV}1%}%69flkAs^Dty#^ zMawQHQde5<>VVksl{dsJc-fY#x&Ak`q-7z7-EhCOcZFM(2%aQZlxjfIVUSMNHv=-y zh9-L(C-9q01U?f>j{4FBqgAdMZxcg>#4_nuTO0-l=EHBxcJqO|t#Z z0sE@kfPO9TJ5s|vT=~_tY=?iJ)2=3sswgq5{Ew~ii{l1dFc)BTa{dl7NDo_sc zC#dNz$`Uuo!~nvlw%DQlY$T&xKBpckL5{E7Wa%%Ek_p)($ME3N#?S0?br3HK@2h5n z`hC?!s*6QfE-)s>*3cu+@2k?qF9L2`tM7L*gAbkuU#CqDEyTAcx^|t^PC}0E4EIOR zaVSWgBCd;;n5rdq-j#t%^^LA-OR zMKV_aIensgkMZ;*dyt(*2Wv5%{4ulisA=_pPIsBMo!pB^X;)x%XniztNC7mf(tHCo zQ915vv}t}Xe6H53`*w8QuEX#2h-f-xUiOdU=}Bbf%K3&CsQ{!Tugg>wO{B1p+{rF^ z>$JnZ^DSFLTgK0`mA0erot{fcn|U_rt?4o*n2TAs^=@iZ?y7%0;MS>Q#EN(NJUI9F z6bsum!B?&Kb~c%w38iBvKwmKRusRfp7;q0fj-x?cFYI9sb&lH5hmvU1#x68XV%whB zQOkjm{8>?AVEcXfJ`M3PBikM?g9u_^8Nu+JHG>gKBM{J;f9zK3!8G3*4*AI1Bu{n3 zj>~ab7}`CkXa9L~MiHchUFlt4wR$zH+ej zauihHVP(tNvRGo-D0*a+;*GCA^IQJV#2*KTu~A3!WJ5;c1i6nU{nu{u>*xJ|SGbN> z($`R2dB2;(%vB7SywXo=*@#E*esu1@>4}WpA+KM!Tb$*?F={P? z*A?ZoZ;}yQZB|z6Q5bv3&O%G>nC>=%)=}_YY0%%_Hyg7{OaEqB>sw7(7pC29Y|NT3 zhI+PvNZsG7b$wkO+eM`2>z;dJ%j1{Q)Eg};00@TAvB>Ji|F^e0cr$pl?#ETT#Cq9P z`*P|T+Rq4#AtEqi)GEK2+)F^aUD)SeuZ;#DA=%K|$@esMB0^oz82S(Y{qNCK)lsQZ HvJU+pg(G@~ diff --git a/api/core/tools/provider/builtin/novitaai/_novita_tool_base.py b/api/core/tools/provider/builtin/novitaai/_novita_tool_base.py deleted file mode 100644 index 762e158459cc2a..00000000000000 --- a/api/core/tools/provider/builtin/novitaai/_novita_tool_base.py +++ /dev/null @@ -1,69 +0,0 @@ -from novita_client import ( - Txt2ImgV3Embedding, - Txt2ImgV3HiresFix, - Txt2ImgV3LoRA, - Txt2ImgV3Refiner, - V3TaskImage, -) - - -class NovitaAiToolBase: - def _extract_loras(self, loras_str: str): - if not loras_str: - return [] - - loras_ori_list = lora_str.strip().split(";") - result_list = [] - for lora_str in loras_ori_list: - lora_info = lora_str.strip().split(",") - lora = Txt2ImgV3LoRA( - model_name=lora_info[0].strip(), - strength=float(lora_info[1]), - ) - result_list.append(lora) - - return result_list - - def _extract_embeddings(self, embeddings_str: str): - if not embeddings_str: - return [] - - embeddings_ori_list = embeddings_str.strip().split(";") - result_list = [] - for embedding_str in embeddings_ori_list: - embedding = Txt2ImgV3Embedding(model_name=embedding_str.strip()) - result_list.append(embedding) - - return result_list - - def _extract_hires_fix(self, hires_fix_str: str): - hires_fix_info = hires_fix_str.strip().split(",") - if "upscaler" in hires_fix_info: - hires_fix = Txt2ImgV3HiresFix( - target_width=int(hires_fix_info[0]), - target_height=int(hires_fix_info[1]), - strength=float(hires_fix_info[2]), - upscaler=hires_fix_info[3].strip(), - ) - else: - hires_fix = Txt2ImgV3HiresFix( - target_width=int(hires_fix_info[0]), - target_height=int(hires_fix_info[1]), - strength=float(hires_fix_info[2]), - ) - - return hires_fix - - def _extract_refiner(self, switch_at: str): - refiner = Txt2ImgV3Refiner(switch_at=float(switch_at)) - return refiner - - def _is_hit_nsfw_detection(self, image: V3TaskImage, confidence_threshold: float) -> bool: - """ - is hit nsfw - """ - if image.nsfw_detection_result is None: - return False - if image.nsfw_detection_result.valid and image.nsfw_detection_result.confidence >= confidence_threshold: - return True - return False diff --git a/api/core/tools/provider/builtin/novitaai/novitaai.py b/api/core/tools/provider/builtin/novitaai/novitaai.py deleted file mode 100644 index d5e32eff29373a..00000000000000 --- a/api/core/tools/provider/builtin/novitaai/novitaai.py +++ /dev/null @@ -1,34 +0,0 @@ -from typing import Any - -from core.tools.errors import ToolProviderCredentialValidationError -from core.tools.provider.builtin.novitaai.tools.novitaai_txt2img import NovitaAiTxt2ImgTool -from core.tools.provider.builtin_tool_provider import BuiltinToolProviderController - - -class NovitaAIProvider(BuiltinToolProviderController): - def _validate_credentials(self, credentials: dict[str, Any]) -> None: - try: - result = ( - NovitaAiTxt2ImgTool() - .fork_tool_runtime( - runtime={ - "credentials": credentials, - } - ) - .invoke( - user_id="", - tool_parameters={ - "model_name": "cinenautXLATRUE_cinenautV10_392434.safetensors", - "prompt": "a futuristic city with flying cars", - "negative_prompt": "", - "width": 128, - "height": 128, - "image_num": 1, - "guidance_scale": 7.5, - "seed": -1, - "steps": 1, - }, - ) - ) - except Exception as e: - raise ToolProviderCredentialValidationError(str(e)) diff --git a/api/core/tools/provider/builtin/novitaai/novitaai.yaml b/api/core/tools/provider/builtin/novitaai/novitaai.yaml deleted file mode 100644 index 3eed8a889c1bd8..00000000000000 --- a/api/core/tools/provider/builtin/novitaai/novitaai.yaml +++ /dev/null @@ -1,32 +0,0 @@ -identity: - author: Xiao Ley - name: novitaai - label: - en_US: Novita AI - zh_Hans: Novita AI - pt_BR: Novita AI - description: - en_US: Innovative AI for Image Generation - zh_Hans: 用于图像生成的创新人工智能。 - pt_BR: Innovative AI for Image Generation - icon: icon.ico - tags: - - image - - productivity -credentials_for_provider: - api_key: - type: secret-input - required: true - label: - en_US: API Key - zh_Hans: API 密钥 - pt_BR: Chave API - placeholder: - en_US: Please enter your Novita AI API key - zh_Hans: 请输入你的 Novita AI API 密钥 - pt_BR: Por favor, insira sua chave de API do Novita AI - help: - en_US: Get your Novita AI API key from Novita AI - zh_Hans: 从 Novita AI 获取您的 Novita AI API 密钥 - pt_BR: Obtenha sua chave de API do Novita AI na Novita AI - url: https://novita.ai diff --git a/api/core/tools/provider/builtin/novitaai/tools/novitaai_createtile.py b/api/core/tools/provider/builtin/novitaai/tools/novitaai_createtile.py deleted file mode 100644 index 0b4f2edff3607f..00000000000000 --- a/api/core/tools/provider/builtin/novitaai/tools/novitaai_createtile.py +++ /dev/null @@ -1,54 +0,0 @@ -from base64 import b64decode -from copy import deepcopy -from typing import Any, Union - -from novita_client import ( - NovitaClient, -) - -from core.tools.entities.tool_entities import ToolInvokeMessage -from core.tools.errors import ToolProviderCredentialValidationError -from core.tools.tool.builtin_tool import BuiltinTool - - -class NovitaAiCreateTileTool(BuiltinTool): - def _invoke( - self, - user_id: str, - tool_parameters: dict[str, Any], - ) -> Union[ToolInvokeMessage, list[ToolInvokeMessage]]: - """ - invoke tools - """ - if "api_key" not in self.runtime.credentials or not self.runtime.credentials.get("api_key"): - raise ToolProviderCredentialValidationError("Novita AI API Key is required.") - - api_key = self.runtime.credentials.get("api_key") - - client = NovitaClient(api_key=api_key) - param = self._process_parameters(tool_parameters) - client_result = client.create_tile(**param) - - results = [] - results.append( - self.create_blob_message( - blob=b64decode(client_result.image_file), - meta={"mime_type": f"image/{client_result.image_type}"}, - save_as=self.VariableKey.IMAGE.value, - ) - ) - - return results - - def _process_parameters(self, parameters: dict[str, Any]) -> dict[str, Any]: - """ - process parameters - """ - res_parameters = deepcopy(parameters) - - # delete none and empty - keys_to_delete = [k for k, v in res_parameters.items() if v is None or v == ""] - for k in keys_to_delete: - del res_parameters[k] - - return res_parameters diff --git a/api/core/tools/provider/builtin/novitaai/tools/novitaai_createtile.yaml b/api/core/tools/provider/builtin/novitaai/tools/novitaai_createtile.yaml deleted file mode 100644 index 8e5df5042937d3..00000000000000 --- a/api/core/tools/provider/builtin/novitaai/tools/novitaai_createtile.yaml +++ /dev/null @@ -1,80 +0,0 @@ -identity: - name: novitaai_createtile - author: Xiao Ley - label: - en_US: Novita AI Create Tile - zh_Hans: Novita AI 创建平铺图案 -description: - human: - en_US: This feature produces images designed for seamless tiling, ideal for creating continuous patterns in fabrics, wallpapers, and various textures. - zh_Hans: 该功能生成设计用于无缝平铺的图像,非常适合用于制作连续图案的织物、壁纸和各种纹理。 - llm: A tool for create images designed for seamless tiling, ideal for creating continuous patterns in fabrics, wallpapers, and various textures. -parameters: - - name: prompt - type: string - required: true - label: - en_US: prompt - zh_Hans: 提示 - human_description: - en_US: Positive prompt word of the created tile, divided by `,`, Range [1, 512]. Only English input is allowed. - zh_Hans: 生成平铺图案的正向提示,用 `,` 分隔,范围 [1, 512]。仅允许输入英文。 - llm_description: Image prompt of Novita AI, you should describe the image you want to generate as a list of words as possible as detailed, divided by `,`, Range [1, 512]. Only English input is allowed. - form: llm - - name: negative_prompt - type: string - required: false - label: - en_US: negative prompt - zh_Hans: 负向提示 - human_description: - en_US: Negtive prompt word of the created tile, divided by `,`, Range [1, 512]. Only English input is allowed. - zh_Hans: 生成平铺图案的负向提示,用 `,` 分隔,范围 [1, 512]。仅允许输入英文。 - llm_description: Image negative prompt of Novita AI, divided by `,`, Range [1, 512]. Only English input is allowed. - form: llm - - name: width - type: number - default: 256 - min: 128 - max: 1024 - required: true - label: - en_US: width - zh_Hans: 宽 - human_description: - en_US: Image width, Range [128, 1024]. - zh_Hans: 图像宽度,范围 [128, 1024] - form: form - - name: height - type: number - default: 256 - min: 128 - max: 1024 - required: true - label: - en_US: height - zh_Hans: 高 - human_description: - en_US: Image height, Range [128, 1024]. - zh_Hans: 图像高度,范围 [128, 1024] - form: form - - name: response_image_type - type: select - default: jpeg - required: false - label: - en_US: response image type - zh_Hans: 响应图像类型 - human_description: - en_US: Response image type, png or jpeg - zh_Hans: 响应图像类型,png 或 jpeg - form: form - options: - - value: jpeg - label: - en_US: jpeg - zh_Hans: jpeg - - value: png - label: - en_US: png - zh_Hans: png diff --git a/api/core/tools/provider/builtin/novitaai/tools/novitaai_modelquery.py b/api/core/tools/provider/builtin/novitaai/tools/novitaai_modelquery.py deleted file mode 100644 index a200ee81231f00..00000000000000 --- a/api/core/tools/provider/builtin/novitaai/tools/novitaai_modelquery.py +++ /dev/null @@ -1,148 +0,0 @@ -import json -from copy import deepcopy -from typing import Any, Union - -from pandas import DataFrame -from yarl import URL - -from core.helper import ssrf_proxy -from core.tools.entities.tool_entities import ToolInvokeMessage -from core.tools.errors import ToolProviderCredentialValidationError -from core.tools.tool.builtin_tool import BuiltinTool - - -class NovitaAiModelQueryTool(BuiltinTool): - _model_query_endpoint = "https://api.novita.ai/v3/model" - - def _invoke( - self, - user_id: str, - tool_parameters: dict[str, Any], - ) -> Union[ToolInvokeMessage, list[ToolInvokeMessage]]: - """ - invoke tools - """ - if "api_key" not in self.runtime.credentials or not self.runtime.credentials.get("api_key"): - raise ToolProviderCredentialValidationError("Novita AI API Key is required.") - - api_key = self.runtime.credentials.get("api_key") - headers = {"Content-Type": "application/json", "Authorization": "Bearer " + api_key} - params = self._process_parameters(tool_parameters) - result_type = params.get("result_type") - del params["result_type"] - - models_data = self._query_models( - models_data=[], - headers=headers, - params=params, - recursive=result_type not in {"first sd_name", "first name sd_name pair"}, - ) - - result_str = "" - if result_type == "first sd_name": - result_str = models_data[0]["sd_name_in_api"] if len(models_data) > 0 else "" - elif result_type == "first name sd_name pair": - result_str = ( - json.dumps({"name": models_data[0]["name"], "sd_name": models_data[0]["sd_name_in_api"]}) - if len(models_data) > 0 - else "" - ) - elif result_type == "sd_name array": - sd_name_array = [model["sd_name_in_api"] for model in models_data] if len(models_data) > 0 else [] - result_str = json.dumps(sd_name_array) - elif result_type == "name array": - name_array = [model["name"] for model in models_data] if len(models_data) > 0 else [] - result_str = json.dumps(name_array) - elif result_type == "name sd_name pair array": - name_sd_name_pair_array = ( - [{"name": model["name"], "sd_name": model["sd_name_in_api"]} for model in models_data] - if len(models_data) > 0 - else [] - ) - result_str = json.dumps(name_sd_name_pair_array) - elif result_type == "whole info array": - result_str = json.dumps(models_data) - else: - raise NotImplementedError - - return self.create_text_message(result_str) - - def _query_models( - self, - models_data: list, - headers: dict[str, Any], - params: dict[str, Any], - pagination_cursor: str = "", - recursive: bool = True, - ) -> list: - """ - query models - """ - inside_params = deepcopy(params) - - if pagination_cursor != "": - inside_params["pagination.cursor"] = pagination_cursor - - response = ssrf_proxy.get( - url=str(URL(self._model_query_endpoint)), headers=headers, params=params, timeout=(10, 60) - ) - - res_data = response.json() - - models_data.extend(res_data["models"]) - - res_data_len = len(res_data["models"]) - if res_data_len == 0 or res_data_len < int(params["pagination.limit"]) or recursive is False: - # deduplicate - df = DataFrame.from_dict(models_data) - df_unique = df.drop_duplicates(subset=["id"]) - models_data = df_unique.to_dict("records") - return models_data - - return self._query_models( - models_data=models_data, - headers=headers, - params=inside_params, - pagination_cursor=res_data["pagination"]["next_cursor"], - ) - - def _process_parameters(self, parameters: dict[str, Any]) -> dict[str, Any]: - """ - process parameters - """ - process_parameters = deepcopy(parameters) - res_parameters = {} - - # delete none or empty - keys_to_delete = [k for k, v in process_parameters.items() if v is None or v == ""] - for k in keys_to_delete: - del process_parameters[k] - - if "query" in process_parameters and process_parameters.get("query") != "unspecified": - res_parameters["filter.query"] = process_parameters["query"] - - if "visibility" in process_parameters and process_parameters.get("visibility") != "unspecified": - res_parameters["filter.visibility"] = process_parameters["visibility"] - - if "source" in process_parameters and process_parameters.get("source") != "unspecified": - res_parameters["filter.source"] = process_parameters["source"] - - if "type" in process_parameters and process_parameters.get("type") != "unspecified": - res_parameters["filter.types"] = process_parameters["type"] - - if "is_sdxl" in process_parameters: - if process_parameters["is_sdxl"] == "true": - res_parameters["filter.is_sdxl"] = True - elif process_parameters["is_sdxl"] == "false": - res_parameters["filter.is_sdxl"] = False - - res_parameters["result_type"] = process_parameters.get("result_type", "first sd_name") - - res_parameters["pagination.limit"] = ( - 1 - if res_parameters.get("result_type") == "first sd_name" - or res_parameters.get("result_type") == "first name sd_name pair" - else 100 - ) - - return res_parameters diff --git a/api/core/tools/provider/builtin/novitaai/tools/novitaai_modelquery.yaml b/api/core/tools/provider/builtin/novitaai/tools/novitaai_modelquery.yaml deleted file mode 100644 index a14795e45e0e4f..00000000000000 --- a/api/core/tools/provider/builtin/novitaai/tools/novitaai_modelquery.yaml +++ /dev/null @@ -1,175 +0,0 @@ -identity: - name: novitaai_modelquery - author: Xiao Ley - label: - en_US: Novita AI Model Query - zh_Hans: Novita AI 模型查询 -description: - human: - en_US: Retrieve information on both public and private models. It allows users to access details such as model specifications, status, and usage guidelines, ensuring comprehensive insight into the available modeling resources. - zh_Hans: 检索公开和私有模型信息。它允许用户访问模型规范、状态和使用指南等详细信息,确保了解可用的建模资源。 - llm: A tool for retrieve information on both public and private Novita AI models. -parameters: - - name: query - type: string - required: false - label: - en_US: query - zh_Hans: 查询 - human_description: - en_US: Seaching the content of sd_name, name, tags. - zh_Hans: 搜索 sd_name、name、tags 中的内容 - llm_description: Enter the content to search - form: llm - - name: result_type - type: select - default: "first sd_name" - required: true - label: - en_US: result format - zh_Hans: 结果格式 - human_description: - en_US: The format of result - zh_Hans: 请求结果的格式 - form: form - options: - - value: "first sd_name" - label: - en_US: "first sd_name" - zh_Hans: "第一个 sd_name" - - value: "first name sd_name pair" - label: - en_US: "first name and sd_name pair: {name, sd_name}" - zh_Hans: "第一个 name sd_name 组合:{name, sd_name}" - - value: "sd_name array" - label: - en_US: "sd_name array: [sd_name]" - zh_Hans: "sd_name 数组:[sd_name]" - - value: "name array" - label: - en_US: "name array: [name]" - zh_Hans: "name 数组:[name]" - - value: "name sd_name pair array" - label: - en_US: "name and sd_name pair array: [{name, sd_name}]" - zh_Hans: "name sd_name 组合数组:[{name, sd_name}]" - - value: "whole info array" - label: - en_US: whole info array - zh_Hans: 完整信息数组 - - name: visibility - type: select - default: unspecified - required: false - label: - en_US: visibility - zh_Hans: 可见性 - human_description: - en_US: Whether the model is public or private - zh_Hans: 模型是否公开或私有 - form: form - options: - - value: unspecified - label: - en_US: Unspecified - zh_Hans: 未指定 - - value: public - label: - en_US: Public - zh_Hans: 公开 - - value: private - label: - en_US: Private - zh_Hans: 私有 - - name: source - type: select - default: unspecified - required: false - label: - en_US: source - zh_Hans: 来源 - human_description: - en_US: Source of the model - zh_Hans: 模型来源 - form: form - options: - - value: unspecified - label: - en_US: Unspecified - zh_Hans: 未指定 - - value: civitai - label: - en_US: Civitai - zh_Hans: Civitai - - value: training - label: - en_US: Training - zh_Hans: 训练 - - value: uploading - label: - en_US: Uploading - zh_Hans: 上传 - - name: type - type: select - default: unspecified - required: false - label: - en_US: type - zh_Hans: 类型 - human_description: - en_US: Specifies the type of models to include in the query. - zh_Hans: 指定要查询的模型类型 - form: form - options: - - value: unspecified - label: - en_US: Unspecified - zh_Hans: 未指定 - - value: checkpoint - label: - en_US: Checkpoint - zh_Hans: Checkpoint - - value: lora - label: - en_US: LoRA - zh_Hans: LoRA - - value: vae - label: - en_US: VAE - zh_Hans: VAE - - value: controlnet - label: - en_US: ControlNet - zh_Hans: ControlNet - - value: upscaler - label: - en_US: Upscaler - zh_Hans: Upscaler - - value: textualinversion - label: - en_US: Textual inversion - zh_Hans: Textual Inversion - - name: is_sdxl - type: select - default: unspecified - required: false - label: - en_US: is sdxl - zh_Hans: 是否是 SDXL - human_description: - en_US: Whether sdxl model or not. Setting this parameter to `true` includes only sdxl models in the query results, which are typically large-scale, high-performance models designed for extensive data processing tasks. Conversely, setting it to `false` excludes these models from the results. If left unspecified, the filter will not discriminate based on the sdxl classification, including all model types in the search results. - zh_Hans: 是否是 SDXL 模型。设置此参数为 `是`,只查询 SDXL 模型,并包含大规模,高性能的模型。相反,设置为 `否`,将排除这些模型。如果未指定,将不会根据 SDXL 分类进行区分,包括查询结果中的所有模型类型。 - form: form - options: - - value: unspecified - label: - en_US: Unspecified - zh_Hans: 未指定 - - value: "true" - label: - en_US: "True" - zh_Hans: 是 - - value: "false" - label: - en_US: "False" - zh_Hans: 否 diff --git a/api/core/tools/provider/builtin/novitaai/tools/novitaai_txt2img.py b/api/core/tools/provider/builtin/novitaai/tools/novitaai_txt2img.py deleted file mode 100644 index 9c61eab9f95784..00000000000000 --- a/api/core/tools/provider/builtin/novitaai/tools/novitaai_txt2img.py +++ /dev/null @@ -1,90 +0,0 @@ -from base64 import b64decode -from copy import deepcopy -from typing import Any, Union - -from novita_client import ( - NovitaClient, -) - -from core.tools.entities.tool_entities import ToolInvokeMessage -from core.tools.errors import ToolProviderCredentialValidationError -from core.tools.provider.builtin.novitaai._novita_tool_base import NovitaAiToolBase -from core.tools.tool.builtin_tool import BuiltinTool - - -class NovitaAiTxt2ImgTool(BuiltinTool, NovitaAiToolBase): - def _invoke( - self, - user_id: str, - tool_parameters: dict[str, Any], - ) -> Union[ToolInvokeMessage, list[ToolInvokeMessage]]: - """ - invoke tools - """ - if "api_key" not in self.runtime.credentials or not self.runtime.credentials.get("api_key"): - raise ToolProviderCredentialValidationError("Novita AI API Key is required.") - - api_key = self.runtime.credentials.get("api_key") - - client = NovitaClient(api_key=api_key) - param = self._process_parameters(tool_parameters) - client_result = client.txt2img_v3(**param) - - results = [] - for image_encoded, image in zip(client_result.images_encoded, client_result.images): - if self._is_hit_nsfw_detection(image, 0.8): - results = self.create_text_message(text="NSFW detected!") - break - - results.append( - self.create_blob_message( - blob=b64decode(image_encoded), - meta={"mime_type": f"image/{image.image_type}"}, - save_as=self.VariableKey.IMAGE.value, - ) - ) - - return results - - def _process_parameters(self, parameters: dict[str, Any]) -> dict[str, Any]: - """ - process parameters - """ - res_parameters = deepcopy(parameters) - - # delete none and empty - keys_to_delete = [k for k, v in res_parameters.items() if v is None or v == ""] - for k in keys_to_delete: - del res_parameters[k] - - if "clip_skip" in res_parameters and res_parameters.get("clip_skip") == 0: - del res_parameters["clip_skip"] - - if "refiner_switch_at" in res_parameters and res_parameters.get("refiner_switch_at") == 0: - del res_parameters["refiner_switch_at"] - - if "enabled_enterprise_plan" in res_parameters: - res_parameters["enterprise_plan"] = {"enabled": res_parameters["enabled_enterprise_plan"]} - del res_parameters["enabled_enterprise_plan"] - - if "nsfw_detection_level" in res_parameters: - res_parameters["nsfw_detection_level"] = int(res_parameters["nsfw_detection_level"]) - - # process loras - if "loras" in res_parameters: - res_parameters["loras"] = self._extract_loras(res_parameters.get("loras")) - - # process embeddings - if "embeddings" in res_parameters: - res_parameters["embeddings"] = self._extract_embeddings(res_parameters.get("embeddings")) - - # process hires_fix - if "hires_fix" in res_parameters: - res_parameters["hires_fix"] = self._extract_hires_fix(res_parameters.get("hires_fix")) - - # process refiner - if "refiner_switch_at" in res_parameters: - res_parameters["refiner"] = self._extract_refiner(res_parameters.get("refiner_switch_at")) - del res_parameters["refiner_switch_at"] - - return res_parameters diff --git a/api/core/tools/provider/builtin/novitaai/tools/novitaai_txt2img.yaml b/api/core/tools/provider/builtin/novitaai/tools/novitaai_txt2img.yaml deleted file mode 100644 index d625a643f915b1..00000000000000 --- a/api/core/tools/provider/builtin/novitaai/tools/novitaai_txt2img.yaml +++ /dev/null @@ -1,341 +0,0 @@ -identity: - name: novitaai_txt2img - author: Xiao Ley - label: - en_US: Novita AI Text to Image - zh_Hans: Novita AI 文字转图像 -description: - human: - en_US: Generate images from text prompts using Stable Diffusion models - zh_Hans: 通过 Stable Diffusion 模型根据文字提示生成图像 - llm: A tool for generate images from English text prompts. -parameters: - - name: model_name - type: string - required: true - label: - en_US: model name - zh_Hans: 模块名字 - human_description: - en_US: Specify the name of the model checkpoint. You can use the "Novita AI Model Query" tool to query the corresponding "sd_name" value (type select "Checkpoint"). - zh_Hans: 指定 Model Checkpoint 名称。可通过“Novita AI 模型请求”工具查询对应的“sd_name”值(类型选择“Checkpoint”)。 - form: form - - name: prompt - type: string - required: true - label: - en_US: prompt - zh_Hans: 提示 - human_description: - en_US: Text input required to guide the image generation, divided by `,`, Range [1, 1024]. Only English input is allowed. - zh_Hans: 生成图像的正向提示,用 `,` 分隔,范围 [1, 1024]。仅允许输入英文。 - llm_description: Image prompt of Novita AI, you should describe the image you want to generate as a list of words as possible as detailed, divided by `,`, Range [1, 1024]. Only English input is allowed. - form: llm - - name: negative_prompt - type: string - required: false - label: - en_US: negative prompt - zh_Hans: 负向提示 - human_description: - en_US: Text input that will not guide the image generation, divided by `,`, Range [1, 1024]. Only English input is allowed. - zh_Hans: 生成图像的负向提示,用 `,` 分隔,范围 [1, 1024]。仅允许输入英文。 - llm_description: Image negative prompt of Novita AI, divided by `,`, Range [1, 1024]. Only English input is allowed. - form: llm - - name: width - type: number - default: 512 - min: 128 - max: 2048 - required: true - label: - en_US: width - zh_Hans: 宽 - human_description: - en_US: Image width, Range [128, 2048]. - zh_Hans: 图像宽度,范围 [128, 2048] - form: form - - name: height - type: number - default: 512 - min: 128 - max: 2048 - required: true - label: - en_US: height - zh_Hans: 高 - human_description: - en_US: Image height, Range [128, 2048]. - zh_Hans: 图像高度,范围 [128, 2048] - form: form - - name: image_num - type: number - default: 1 - min: 1 - max: 8 - required: true - label: - en_US: image num - zh_Hans: 图片数 - human_description: - en_US: Image num, Range [1, 8]. - zh_Hans: 图片数,范围 [1, 8] - form: form - - name: steps - type: number - default: 20 - min: 1 - max: 100 - required: true - label: - en_US: steps - zh_Hans: 步数 - human_description: - en_US: The number of denoising steps. More steps usually can produce higher quality images, but take more time to generate, Range [1, 100]. - zh_Hans: 生成步数。更多步数可能会产生更好的图像,但生成时间更长,范围 [1, 100] - form: form - - name: seed - type: number - default: -1 - required: true - label: - en_US: seed - zh_Hans: 种子 - human_description: - en_US: A seed is a number from which Stable Diffusion generates noise, which, makes generation deterministic. Using the same seed and set of parameters will produce identical image each time, minimum -1. - zh_Hans: 种子是 Stable Diffusion 生成噪声的数字,它使生成具有确定性。使用相同的种子和参数设置将生成每次生成相同的图像,最小值 -1。 - form: form - - name: clip_skip - type: number - min: 1 - max: 12 - required: false - label: - en_US: clip skip - zh_Hans: 层跳过数 - human_description: - en_US: This parameter indicates the number of layers to stop from the bottom during optimization, so clip_skip on 2 would mean, that in SD1.x model where the CLIP has 12 layers, you would stop at 10th layer, Range [1, 12], get reference at https://novita.ai/get-started/Misc.html#what-s-clip-skip. - zh_Hans: 此参数表示优化过程中从底部停止的层数,因此 clip_skip 的值为 2,表示在 SD1.x 模型中,CLIP 有 12 层,你将停止在 10 层,范围 [1, 12],参考 https://novita.ai/get-started/Misc.html#what-s-clip-skip。 - form: form - - name: guidance_scale - type: number - default: "7.5" - min: 1.0 - max: 30.0 - required: true - label: - en_US: guidance scale - zh_Hans: 提示词遵守程度 - human_description: - en_US: This setting says how close the Stable Diffusion will listen to your prompt, higer guidance forces the model to better follow the prompt, but result in lower quality output.Range [1, 30]. - zh_Hans: 此设置表明 Stable Diffusion 如何听从您的提示,较高的 guidance_scale 会强制模型更好跟随提示,但结果会更低质量输出。范围 [1.0, 30.0]。 - form: form - - name: sampler_name - type: select - required: true - label: - en_US: sampler name - zh_Hans: 采样器名称 - human_description: - en_US: This parameter determines the denoising algorithm employed during the sampling phase of Stable Diffusion. Get reference at https://novita.ai/get-started/Misc.htmll#what-is-samplers. - zh_Hans: 此参数决定了在稳定扩散采样阶段使用的去噪算法。参考 https://novita.ai/get-started/Misc.htmll#what-is-samplers。 - form: form - options: - - value: "Euler a" - label: - en_US: Euler a - zh_Hans: Euler a - - value: "Euler" - label: - en_US: Euler - zh_Hans: Euler - - value: "LMS" - label: - en_US: LMS - zh_Hans: LMS - - value: "Heun" - label: - en_US: Heun - zh_Hans: Heun - - value: "DPM2" - label: - en_US: DPM2 - zh_Hans: DPM2 - - value: "DPM2 a" - label: - en_US: DPM2 a - zh_Hans: DPM2 a - - value: "DPM++ 2S a" - label: - en_US: DPM++ 2S a - zh_Hans: DPM++ 2S a - - value: "DPM++ 2M" - label: - en_US: DPM++ 2M - zh_Hans: DPM++ 2M - - value: "DPM++ SDE" - label: - en_US: DPM++ SDE - zh_Hans: DPM++ SDE - - value: "DPM fast" - label: - en_US: DPM fast - zh_Hans: DPM fast - - value: "DPM adaptive" - label: - en_US: DPM adaptive - zh_Hans: DPM adaptive - - value: "LMS Karras" - label: - en_US: LMS Karras - zh_Hans: LMS Karras - - value: "DPM2 Karras" - label: - en_US: DPM2 Karras - zh_Hans: DPM2 Karras - - value: "DPM2 a Karras" - label: - en_US: DPM2 a Karras - zh_Hans: DPM2 a Karras - - value: "DPM++ 2S a Karras" - label: - en_US: DPM++ 2S a Karras - zh_Hans: DPM++ 2S a Karras - - value: "DPM++ 2M Karras" - label: - en_US: DPM++ 2M Karras - zh_Hans: DPM++ 2M Karras - - value: "DPM++ SDE Karras" - label: - en_US: DPM++ SDE Karras - zh_Hans: DPM++ SDE Karras - - value: "DDIM" - label: - en_US: DDIM - zh_Hans: DDIM - - value: "PLMS" - label: - en_US: PLMS - zh_Hans: PLMS - - value: "UniPC" - label: - en_US: UniPC - zh_Hans: UniPC - - name: sd_vae - type: string - required: false - label: - en_US: sd vae - zh_Hans: sd vae - human_description: - en_US: VAE(Variational Autoencoder), get reference at https://novita.ai/get-started/Misc.html#what-s-variational-autoencoders-vae. You can use the "Novita AI Model Query" tool to query the corresponding "sd_name" value (type select "VAE"). - zh_Hans: VAE(变分自编码器),参考 https://novita.ai/get-started/Misc.html#what-s-variational-autoencoders-vae。可通过“Novita AI 模型请求”工具查询对应的“sd_name”值(类型选择“VAE”)。 - form: form - - name: loras - type: string - required: false - label: - en_US: loRAs - zh_Hans: loRAs - human_description: - en_US: LoRA models. Currenlty supports up to 5 LoRAs. You can use the "Novita AI Model Query" tool to query the corresponding "sd_name" value (type select "LoRA"). Input template is ",;,;...". Such as"Film Grain style_331903,0.5;DoggystylePOV_9600,0.5" - zh_Hans: LoRA 模型。目前仅支持 5 个 LoRA。可通过“Novita AI 模型请求”工具查询对应的“sd_name”值(类型选择“LoRA”)。输入模板:“,;,;...”,例如:“Film Grain style_331903,0.5;DoggystylePOV_9600,0.5” - form: form - - name: embeddings - type: string - required: false - label: - en_US: text embeddings - zh_Hans: 文本嵌入 - human_description: - en_US: Textual Inversion is a training method for personalizing models by learning new text embeddings from a few example images, currenlty supports up to 5 embeddings. You can use the "Novita AI Model Query" tool to query the corresponding "sd_name" value (type select "Text Inversion"). Input template is ";;...". Such as "EasyNegativeV2_75525;AS-YoungerV2" - zh_Hans: 文本反转是一种通过从一些示例图像中学习新的文本嵌入来个性化模型的训练方法,目前仅支持 5 个嵌入。可通过“Novita AI 模型请求”工具查询对应的“sd_name”值(类型选择“Text Inversion”)。输入模板:“;;...”,例如:“EasyNegativeV2_75525;AS-YoungerV2” - form: form - - name: hires_fix - type: string - required: false - label: - en_US: hires fix - zh_Hans: 高分辨率修复 - human_description: - en_US: Use high resolution image fix. Input template is ",,,". Such as "1024,1024,0.8", "1024,1024,0.8,RealESRGAN_x4plus_anime_6B" - zh_Hans: 使用高分辨率修复。输入模板 “,,,”。例如 “1024,1024,0.8”、“1024,1024,0.8,RealESRGAN_x4plus_anime_6B” - form: form - - name: refiner_switch_at - type: number - min: 0.0 - max: 1.0 - required: false - label: - en_US: refiner switch at - zh_Hans: 重采样参与时刻 - human_description: - en_US: This parameter in the context of a refiner allows you to set the extent to which the refiner alters the output of a model. When set to 0, the refiner has no effect; at 1, it's fully active. Intermediate values like 0.5 provide a balanced effect, where the refiner is moderately engaged, enhancing or adjusting the output without dominating the original model's characteristics. This setting is particularly useful for fine-tuning the output to achieve a desired balance between refinement and the original generative features, Range [0, 1.0]. Is not all models support refiners! - zh_Hans: 此参数允许您设置重采样更改模型输出的程度。当设置为0时,重采样不起作用;1时,它处于完全活动状态。像0.5这样的中间值提供了一种平衡效果,其中重采样适度参与,增强或调整输出,而不会主导原始模型的特性。此设置对于微调输出特别有用,范围 [0, 1.0]。不是所有模型都支持重采样! - form: form - - name: response_image_type - type: select - default: jpeg - required: false - label: - en_US: response image type - zh_Hans: 响应图像类型 - human_description: - en_US: Response image type, png or jpeg - zh_Hans: 响应图像类型,png 或 jpeg - form: form - options: - - value: jpeg - label: - en_US: jpeg - zh_Hans: jpeg - - value: png - label: - en_US: png - zh_Hans: png - - name: enabled_enterprise_plan - type: boolean - default: false - required: false - label: - en_US: enterprise plan enabled - zh_Hans: 企业版计划启用 - human_description: - en_US: Enable enterprise plan - zh_Hans: 启用企业版计划 - form: form - - name: enable_nsfw_detection - type: boolean - default: false - required: false - label: - en_US: enable nsfw detection - zh_Hans: 启用 NSFW 检测 - human_description: - en_US: Enable nsfw detection - zh_Hans: 启用 NSFW 检测 - form: form - - name: nsfw_detection_level - type: select - default: "2" - required: false - label: - en_US: nsfw detection level - zh_Hans: NSFW 检测级别 - human_description: - en_US: Nsfw detection level, from low to high - zh_Hans: NSFW 检测级别,越高越严格 - form: form - options: - - value: "0" - label: - en_US: low - zh_Hans: 低 - - value: "1" - label: - en_US: middle - zh_Hans: 中 - - value: "2" - label: - en_US: high - zh_Hans: 高 diff --git a/api/core/tools/provider/builtin/onebot/_assets/icon.ico b/api/core/tools/provider/builtin/onebot/_assets/icon.ico deleted file mode 100644 index 1b07e965b9910b4b006bc112378a8ba0306895a8..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 38078 zcmeI52douE7soFaiWQKeqLElp5s8Xju@}S-kfNf5r-`46AYwryvBqdL8u_AVVuOeY zU@u4%6rWg8?4S@!C>AUziqc`e|8L)$Wp>$pckk}r`yR+4|F^Sqch8(TGdpwU%-N+- zXrzBtRR#TRUl@OXq0pvKD0C2%Rj3xo_h@9zp+5f_V+uvEnSO66bQXFGBZS9ft`Pu-NW>>~BvTEBM`s)PrGw}b`4-@+Op zEpabA;5AJcB@7fOYbznPEy}5jO7gCwj!pC%ef&yclJJADS|~TIQUR1jnZpERK$gh1 zsHCp)EGw=)%5Ni_Eld!86zZFhVZ6{!Xf15;?GRIkU6emWcvo01G&Dk{cZ9(LvaZ(- zdG^n8@QRXId2I#s{;^Pr@kPaCB4qqXpdENt8+ji z;1U6y_&SQyW14)IZ*4i~ezCBLYBcbN4zxk;&w+UQZK^cmJ!8(MelI12i05FS;OZ+) zclYlJ^U^tUZG#%`moHy#!0OcvVu{UskjtDLp?hr1irP7A)~qqV{q~#r@%}y(INwn3KMow zX4_Oe+~8GDI z&6+jye&ct*dAoM)%+W_5ZLYZD3iIr<&w4y7*cPl00&6sXP)U#o>7MmA>)NssW5|2& zy=Shu<{HzfQ>XQGYIWP9MGN!FE3f#nEL^zI9C5@E0sY%+?UD94@W2Dj@ZrPFyYIeh zmMvT6-(SWA>*Rp1B7y3?OLU*x_nB}1DVLlpSFSYErcLw4kG5^w)~QFUPxNT}GRLd0 zzB* zC!BDCH?~VQkH(D~=Z{bJnqo)F95Q5xnK^T&WmAqLbHV;XqBd}^QyQQ57$JA!`5!xW ztU2I-0}}UJSD(A@zPtJAE2>sQ)2B~2ty{NF#cTil_cxC|`l$K)@3OhdiNgr&AE+*o zr0e{4Qo65@yHG#<^piIyZoBQasq`hmP0N-o&6{t&De-FP>#x5yd+f1CYJS^nvyHjz zvdg?V+Vaft$o>H_Furc1l<)qobX$Rat6XW;tXZae_wJ@qqeiK9h5onRdTaCIi!b`J zEm*L?bnMtM6HjEqw)*tbPkp{QCg?1Iz07bM=y$(;lpZ4Fx}f>+!w-wvDd;u{utl*c zEn% zn0+4CCwJa?XVa%oA8(EO!3Q50Y<=c#)@WhEj$OQXvH9kkZ_K1glgte_+~BRh(zXT9 zLC#|lzN!(u`5W=j1}nwT$<1`i;wP-@d))OL+hN_su{5q+fdc^2;xtFM?;L zdGqEa%fWoW*p?%`EnckzzYRj^vvp1!Qf6X3%i6z`xpuqlwlf!Ac%fmuTD6KHGap*A zWQlqH`RC1vC!S~+%i`KSoR92-1`W#7HeIF~VOK`!pBTsq`PA5$C!Tnsq;a@IhYn`U zm@#?Qb-k+m_~Vb}#v5-mty;B8+y>aDtPgUeapHxp!)-tuD*n^VG-u8n&xaH4PjTnR z9e14P`^j8z0tU_yj(%%l8Lc)a%7 zYi6&#_Da+S_%~jE9k+5WG*MjK_Ya7zH5a!i2=Wqw~){ zKa)M?)b)F%_Y>?hP^EO{^)B`z{@u7|*yiKI$7k6Pi22}_TW(2Y7qw~A#`BfuAUDR{ z00I58Xa9hZiC7a|d+oJFbx#`%9z57AUAi<=d3oNjx^hKV)vF~=NZ zdiU;~+NOH><(JL&+izdA4X{OXh<8Efy@V0Mns9weIsg6l-#uTK?dNgFi!Qn-^`7gD z88Zy?HoWh=^Un3e8#!{MuQ$f>GHOjbhRs#!YAJdj}%ySk1_6U&xm&Pu9(#wf-) zY#IE=M;&!kNn3f(QM5j+Vba=hP7C1gOuxaQo6+fj0RxK4d)HleImMQqOsapz z4r1J}m+)s3n-aBO7AFDqx%lFXi`EBQEQk25C6Z%hoGeN5@d@_q*|Sc0+4Fehkw=oS zi@l~4{lEP-+f3={`s=T+vpziEJR@25=4z$Gp^85=HQ`5KY>ygqY+mEWjlKPt)P?BA zO6os}4RrF!CmZ(FG7>(w%=$n5^wT%#KV3th|B8%14?XlyItgp6>=_n?^VIpNYwc>-RSQ z@WT)H*4JsuaPGnO94=IB|7F)*clB%8X|6& z_Op79JMykZ7GGAaY%Jb-2*hGlWdD)+@U9#&2Wf~o8M`RT?jVo-$Fx3HdZ3A?AcK>W$5tla2- zN#jq5V+P#P*^2oqNMRxyuUSS)TliC=b7Z? zX5v-xw{5xQmPN;X;;*9So)khb0QVQrJ;*KokobMuSL!-44!!#7t17yFiSHMn*Sci2 zzcD}L^zJ+_b#xG{{)q*xc>L`_2OVU1X6lRhmd8H(?2}0Mv=P_mM9+&)Gr{Vg>Mi-vh$6O9AmYYN}{ubDBY9STFC9?Z{9yW^UO07kFmDitb>Rj zdEtc@yx5Vb^{(VBO00(&*YI1|y&GFbk|TZLhaWP(mg34u%romROmyAX@>*I$2mvEkUYyi?J&YgYr0 zFyAES#CWES6^>CH>#IuyU*D3$BR5a!U4)N>+z4OoA%`52N)HmyyN_|0Z=4)^?6KZ{ z7;Qp~k0sjqth3HCykC$chS~CsI${^I_n9-na3^6s{nH3;zS6lq1LEiT%hP^`AWBE< zhY>4iiFfH(S4Ul2N*?{b47z8%d$Hi_SuzF4{Y~jMLS~((7#TPq<}|1^gCyV8Ax%mGH?5g_Jd?4q>a#jrZ$9SC>5 zaR2@H`!Tonnxsc$AwHEji!#t(;@MO1^((m|$-PEt*2klT+zH#1^+74?PYLWl2gK8j z9zEKZi@BKgaIZ<4X0QG6#~;tjuj9+)Bpz0}sZdn^G>Utj(yY@m$ES-&B(YBBy`@sW zzsI`-?s}AYxYW6YGU@YK-bsk8R4|i8^S*+wS9DyA-0PLD5_ngsEW{YXJM;KWle{}D z*}cB*`eG(eo?J9$f%^b@=UcgaKQ8S)TtY=Kb`KCr(mieC-lH_nPUig?@u-u~C%#bj zaQIfLjrFm5vq$_Cw3j7x$bPK-J?aQA#yY+m%HCT}-*K=a#2xR`5BPL^eM+{(vJ-16_A;?&7@Jssp>y_KiNoSMV{YF^pW%1UxR!4g^X>)T7v)=Ve521@ua&tX zQ^bE~A(ifFB=_FhbQc{n$I6LEVnScU%n@IN4vFD)zemOI_{Mtl=;7_1V^fF|gG5=s8pu@x+-?=c%L+9OvvKVRNn~E4qfNP7UVwBHKIE%UonRRcQA==bOzos>oQwkcfxO4HT@g$6YrNS;a(<2_?e+4{ZJy*;Wdzm03t%*hqCi&S1i zAncr6=US`dVn>x5ZUb!7imVSBg5o6u_I)lkW-(pI{jT!EZNM6hb#mE!72;jrginEW zZD4Js^Efq?HJ@@rz*jL)*citn#v8`^fKNf^Wz}&p=9C`+{?85GtD(Pa=!E|sA9BEt zsdKO1#q?8&->od6Bl`!v1@`<`2^$)%l)&s;v3J6Ay*$;q@WDbyp|KFqw>{sGbX}LMwmvb(t(3o?fPII3%{V}C4{M1vLRwrYhE?yOO=UCcPW!xix7<5_5A+ zni~4uN6ED{*@ePQKAS?JtIwuT*v)6-Nio-6FQ(NtEs1WnX%%3TlFnzZ0&1n?*RF25 zR$}sZbJJeM$yLE~y@U6siJPwWvb;Z=yXl+P#{ecHbxD&ZSAJ3*Ht#qPv)-6 zZ{nuizUTHyH|_ReH|_R$_gM%$Bh|KGJv)BEdZzq>^~C#y=^6A3(6i|maFbd0uqkN^ d@SgEnrrxt(%V;;Qsb$oav^$0b#uk6f`9C{jz#jkr diff --git a/api/core/tools/provider/builtin/onebot/onebot.py b/api/core/tools/provider/builtin/onebot/onebot.py deleted file mode 100644 index b8e5ed24d6b43f..00000000000000 --- a/api/core/tools/provider/builtin/onebot/onebot.py +++ /dev/null @@ -1,10 +0,0 @@ -from typing import Any - -from core.tools.errors import ToolProviderCredentialValidationError -from core.tools.provider.builtin_tool_provider import BuiltinToolProviderController - - -class OneBotProvider(BuiltinToolProviderController): - def _validate_credentials(self, credentials: dict[str, Any]) -> None: - if not credentials.get("ob11_http_url"): - raise ToolProviderCredentialValidationError("OneBot HTTP URL is required.") diff --git a/api/core/tools/provider/builtin/onebot/onebot.yaml b/api/core/tools/provider/builtin/onebot/onebot.yaml deleted file mode 100644 index 1922adc4de4d56..00000000000000 --- a/api/core/tools/provider/builtin/onebot/onebot.yaml +++ /dev/null @@ -1,35 +0,0 @@ -identity: - author: RockChinQ - name: onebot - label: - en_US: OneBot v11 Protocol - zh_Hans: OneBot v11 协议 - description: - en_US: Unofficial OneBot v11 Protocol Tool - zh_Hans: 非官方 OneBot v11 协议工具 - icon: icon.ico -credentials_for_provider: - ob11_http_url: - type: text-input - required: true - label: - en_US: HTTP URL - zh_Hans: HTTP URL - description: - en_US: Forward HTTP URL of OneBot v11 - zh_Hans: OneBot v11 正向 HTTP URL - help: - en_US: Fill this with the HTTP URL of your OneBot server - zh_Hans: 请在你的 OneBot 协议端开启 正向 HTTP 并填写其 URL - access_token: - type: secret-input - required: false - label: - en_US: Access Token - zh_Hans: 访问令牌 - description: - en_US: Access Token for OneBot v11 Protocol - zh_Hans: OneBot 协议访问令牌 - help: - en_US: Fill this if you set a access token in your OneBot server - zh_Hans: 如果你在 OneBot 服务器中设置了 access token,请填写此项 diff --git a/api/core/tools/provider/builtin/onebot/tools/__init__.py b/api/core/tools/provider/builtin/onebot/tools/__init__.py deleted file mode 100644 index e69de29bb2d1d6..00000000000000 diff --git a/api/core/tools/provider/builtin/onebot/tools/send_group_msg.py b/api/core/tools/provider/builtin/onebot/tools/send_group_msg.py deleted file mode 100644 index 9c95bbc2ae8d2d..00000000000000 --- a/api/core/tools/provider/builtin/onebot/tools/send_group_msg.py +++ /dev/null @@ -1,39 +0,0 @@ -from typing import Any, Union - -import requests -from yarl import URL - -from core.tools.entities.tool_entities import ToolInvokeMessage -from core.tools.tool.builtin_tool import BuiltinTool - - -class SendGroupMsg(BuiltinTool): - """OneBot v11 Tool: Send Group Message""" - - def _invoke( - self, user_id: str, tool_parameters: dict[str, Any] - ) -> Union[ToolInvokeMessage, list[ToolInvokeMessage]]: - # Get parameters - send_group_id = tool_parameters.get("group_id", "") - - message = tool_parameters.get("message", "") - if not message: - return self.create_json_message({"error": "Message is empty."}) - - auto_escape = tool_parameters.get("auto_escape", False) - - try: - url = URL(self.runtime.credentials["ob11_http_url"]) / "send_group_msg" - - resp = requests.post( - url, - json={"group_id": send_group_id, "message": message, "auto_escape": auto_escape}, - headers={"Authorization": "Bearer " + self.runtime.credentials["access_token"]}, - ) - - if resp.status_code != 200: - return self.create_json_message({"error": f"Failed to send group message: {resp.text}"}) - - return self.create_json_message({"response": resp.json()}) - except Exception as e: - return self.create_json_message({"error": f"Failed to send group message: {e}"}) diff --git a/api/core/tools/provider/builtin/onebot/tools/send_group_msg.yaml b/api/core/tools/provider/builtin/onebot/tools/send_group_msg.yaml deleted file mode 100644 index 64beaa85457a3a..00000000000000 --- a/api/core/tools/provider/builtin/onebot/tools/send_group_msg.yaml +++ /dev/null @@ -1,46 +0,0 @@ -identity: - name: send_group_msg - author: RockChinQ - label: - en_US: Send Group Message - zh_Hans: 发送群消息 -description: - human: - en_US: Send a message to a group - zh_Hans: 发送消息到群聊 - llm: A tool for sending a message segment to a group -parameters: - - name: group_id - type: number - required: true - label: - en_US: Target Group ID - zh_Hans: 目标群 ID - human_description: - en_US: The group ID of the target group - zh_Hans: 目标群的群 ID - llm_description: The group ID of the target group - form: llm - - name: message - type: string - required: true - label: - en_US: Message - zh_Hans: 消息 - human_description: - en_US: The message to send - zh_Hans: 要发送的消息。支持 CQ码(需要同时设置 auto_escape 为 true) - llm_description: The message to send - form: llm - - name: auto_escape - type: boolean - required: false - default: false - label: - en_US: Auto Escape - zh_Hans: 自动转义 - human_description: - en_US: If true, the message will be treated as a CQ code for parsing, otherwise it will be treated as plain text for direct sending. Since Dify currently does not support passing Object-format message chains, developers can send complex message components through CQ codes. - zh_Hans: 若为 true 则会把 message 视为 CQ 码解析,否则视为 纯文本 直接发送。由于 Dify 目前不支持传入 Object格式 的消息,故开发者可以通过 CQ 码来发送复杂消息组件。 - llm_description: If true, the message will be treated as a CQ code for parsing, otherwise it will be treated as plain text for direct sending. - form: form diff --git a/api/core/tools/provider/builtin/onebot/tools/send_private_msg.py b/api/core/tools/provider/builtin/onebot/tools/send_private_msg.py deleted file mode 100644 index 1174c7f07d002f..00000000000000 --- a/api/core/tools/provider/builtin/onebot/tools/send_private_msg.py +++ /dev/null @@ -1,39 +0,0 @@ -from typing import Any, Union - -import requests -from yarl import URL - -from core.tools.entities.tool_entities import ToolInvokeMessage -from core.tools.tool.builtin_tool import BuiltinTool - - -class SendPrivateMsg(BuiltinTool): - """OneBot v11 Tool: Send Private Message""" - - def _invoke( - self, user_id: str, tool_parameters: dict[str, Any] - ) -> Union[ToolInvokeMessage, list[ToolInvokeMessage]]: - # Get parameters - send_user_id = tool_parameters.get("user_id", "") - - message = tool_parameters.get("message", "") - if not message: - return self.create_json_message({"error": "Message is empty."}) - - auto_escape = tool_parameters.get("auto_escape", False) - - try: - url = URL(self.runtime.credentials["ob11_http_url"]) / "send_private_msg" - - resp = requests.post( - url, - json={"user_id": send_user_id, "message": message, "auto_escape": auto_escape}, - headers={"Authorization": "Bearer " + self.runtime.credentials["access_token"]}, - ) - - if resp.status_code != 200: - return self.create_json_message({"error": f"Failed to send private message: {resp.text}"}) - - return self.create_json_message({"response": resp.json()}) - except Exception as e: - return self.create_json_message({"error": f"Failed to send private message: {e}"}) diff --git a/api/core/tools/provider/builtin/onebot/tools/send_private_msg.yaml b/api/core/tools/provider/builtin/onebot/tools/send_private_msg.yaml deleted file mode 100644 index 8200ce4a83f4e2..00000000000000 --- a/api/core/tools/provider/builtin/onebot/tools/send_private_msg.yaml +++ /dev/null @@ -1,46 +0,0 @@ -identity: - name: send_private_msg - author: RockChinQ - label: - en_US: Send Private Message - zh_Hans: 发送私聊消息 -description: - human: - en_US: Send a private message to a user - zh_Hans: 发送私聊消息给用户 - llm: A tool for sending a message segment to a user in private chat -parameters: - - name: user_id - type: number - required: true - label: - en_US: Target User ID - zh_Hans: 目标用户 ID - human_description: - en_US: The user ID of the target user - zh_Hans: 目标用户的用户 ID - llm_description: The user ID of the target user - form: llm - - name: message - type: string - required: true - label: - en_US: Message - zh_Hans: 消息 - human_description: - en_US: The message to send - zh_Hans: 要发送的消息。支持 CQ码(需要同时设置 auto_escape 为 true) - llm_description: The message to send - form: llm - - name: auto_escape - type: boolean - required: false - default: false - label: - en_US: Auto Escape - zh_Hans: 自动转义 - human_description: - en_US: If true, the message will be treated as a CQ code for parsing, otherwise it will be treated as plain text for direct sending. Since Dify currently does not support passing Object-format message chains, developers can send complex message components through CQ codes. - zh_Hans: 若为 true 则会把 message 视为 CQ 码解析,否则视为 纯文本 直接发送。由于 Dify 目前不支持传入 Object格式 的消息,故开发者可以通过 CQ 码来发送复杂消息组件。 - llm_description: If true, the message will be treated as a CQ code for parsing, otherwise it will be treated as plain text for direct sending. - form: form diff --git a/api/core/tools/provider/builtin/openweather/_assets/icon.svg b/api/core/tools/provider/builtin/openweather/_assets/icon.svg deleted file mode 100644 index f06cd87e64c9d3..00000000000000 --- a/api/core/tools/provider/builtin/openweather/_assets/icon.svg +++ /dev/null @@ -1,12 +0,0 @@ - - - - - - - - - - - - \ No newline at end of file diff --git a/api/core/tools/provider/builtin/openweather/openweather.py b/api/core/tools/provider/builtin/openweather/openweather.py deleted file mode 100644 index 9e40249aba6b40..00000000000000 --- a/api/core/tools/provider/builtin/openweather/openweather.py +++ /dev/null @@ -1,29 +0,0 @@ -import requests - -from core.tools.errors import ToolProviderCredentialValidationError -from core.tools.provider.builtin_tool_provider import BuiltinToolProviderController - - -def query_weather(city="Beijing", units="metric", language="zh_cn", api_key=None): - url = "https://api.openweathermap.org/data/2.5/weather" - params = {"q": city, "appid": api_key, "units": units, "lang": language} - - return requests.get(url, params=params) - - -class OpenweatherProvider(BuiltinToolProviderController): - def _validate_credentials(self, credentials: dict) -> None: - try: - if "api_key" not in credentials or not credentials.get("api_key"): - raise ToolProviderCredentialValidationError("Open weather API key is required.") - apikey = credentials.get("api_key") - try: - response = query_weather(api_key=apikey) - if response.status_code == 200: - pass - else: - raise ToolProviderCredentialValidationError((response.json()).get("info")) - except Exception as e: - raise ToolProviderCredentialValidationError("Open weather API Key is invalid. {}".format(e)) - except Exception as e: - raise ToolProviderCredentialValidationError(str(e)) diff --git a/api/core/tools/provider/builtin/openweather/openweather.yaml b/api/core/tools/provider/builtin/openweather/openweather.yaml deleted file mode 100644 index d4b66f87f908c6..00000000000000 --- a/api/core/tools/provider/builtin/openweather/openweather.yaml +++ /dev/null @@ -1,31 +0,0 @@ -identity: - author: Onelevenvy - name: openweather - label: - en_US: Open weather query - zh_Hans: Open Weather - pt_BR: Consulta de clima open weather - description: - en_US: Weather query toolkit based on Open Weather - zh_Hans: 基于open weather的天气查询工具包 - pt_BR: Kit de consulta de clima baseado no Open Weather - icon: icon.svg - tags: - - weather -credentials_for_provider: - api_key: - type: secret-input - required: true - label: - en_US: API Key - zh_Hans: API Key - pt_BR: Fogo a chave - placeholder: - en_US: Please enter your open weather API Key - zh_Hans: 请输入你的open weather API Key - pt_BR: Insira sua chave de API open weather - help: - en_US: Get your API Key from open weather - zh_Hans: 从open weather获取您的 API Key - pt_BR: Obtenha sua chave de API do open weather - url: https://openweathermap.org diff --git a/api/core/tools/provider/builtin/openweather/tools/weather.py b/api/core/tools/provider/builtin/openweather/tools/weather.py deleted file mode 100644 index ed4ec487fa984a..00000000000000 --- a/api/core/tools/provider/builtin/openweather/tools/weather.py +++ /dev/null @@ -1,52 +0,0 @@ -import json -from typing import Any, Union - -import requests - -from core.tools.entities.tool_entities import ToolInvokeMessage -from core.tools.tool.builtin_tool import BuiltinTool - - -class OpenweatherTool(BuiltinTool): - def _invoke( - self, user_id: str, tool_parameters: dict[str, Any] - ) -> Union[ToolInvokeMessage, list[ToolInvokeMessage]]: - """ - invoke tools - """ - city = tool_parameters.get("city", "") - if not city: - return self.create_text_message("Please tell me your city") - if "api_key" not in self.runtime.credentials or not self.runtime.credentials.get("api_key"): - return self.create_text_message("OpenWeather API key is required.") - - units = tool_parameters.get("units", "metric") - lang = tool_parameters.get("lang", "zh_cn") - try: - # request URL - url = "https://api.openweathermap.org/data/2.5/weather" - - # request params - params = { - "q": city, - "appid": self.runtime.credentials.get("api_key"), - "units": units, - "lang": lang, - } - response = requests.get(url, params=params) - - if response.status_code == 200: - data = response.json() - return self.create_text_message( - self.summary(user_id=user_id, content=json.dumps(data, ensure_ascii=False)) - ) - else: - error_message = { - "error": f"failed:{response.status_code}", - "data": response.text, - } - # return error - return json.dumps(error_message) - - except Exception as e: - return self.create_text_message("Openweather API Key is invalid. {}".format(e)) diff --git a/api/core/tools/provider/builtin/openweather/tools/weather.yaml b/api/core/tools/provider/builtin/openweather/tools/weather.yaml deleted file mode 100644 index f2dae5c2df9c08..00000000000000 --- a/api/core/tools/provider/builtin/openweather/tools/weather.yaml +++ /dev/null @@ -1,80 +0,0 @@ -identity: - name: weather - author: Onelevenvy - label: - en_US: Open Weather Query - zh_Hans: 天气查询 - pt_BR: Previsão do tempo - icon: icon.svg -description: - human: - en_US: Weather forecast inquiry - zh_Hans: 天气查询 - pt_BR: Inquérito sobre previsão meteorológica - llm: A tool when you want to ask about the weather or weather-related question -parameters: - - name: city - type: string - required: true - label: - en_US: city - zh_Hans: 城市 - pt_BR: cidade - human_description: - en_US: Target city for weather forecast query - zh_Hans: 天气预报查询的目标城市 - pt_BR: Cidade de destino para consulta de previsão do tempo - llm_description: If you don't know you can extract the city name from the - question or you can reply:Please tell me your city. You have to extract - the Chinese city name from the question.If the input region is in Chinese - characters for China, it should be replaced with the corresponding English - name, such as '北京' for correct input is 'Beijing' - form: llm - - name: lang - type: select - required: true - human_description: - en_US: language - zh_Hans: 语言 - pt_BR: language - label: - en_US: language - zh_Hans: 语言 - pt_BR: language - form: form - options: - - value: zh_cn - label: - en_US: cn - zh_Hans: 中国 - pt_BR: cn - - value: en_us - label: - en_US: usa - zh_Hans: 美国 - pt_BR: usa - default: zh_cn - - name: units - type: select - required: true - human_description: - en_US: units for temperature - zh_Hans: 温度单位 - pt_BR: units for temperature - label: - en_US: units - zh_Hans: 单位 - pt_BR: units - form: form - options: - - value: metric - label: - en_US: metric - zh_Hans: ℃ - pt_BR: metric - - value: imperial - label: - en_US: imperial - zh_Hans: ℉ - pt_BR: imperial - default: metric diff --git a/api/core/tools/provider/builtin/perplexity/_assets/icon.svg b/api/core/tools/provider/builtin/perplexity/_assets/icon.svg deleted file mode 100644 index c2974c142fc622..00000000000000 --- a/api/core/tools/provider/builtin/perplexity/_assets/icon.svg +++ /dev/null @@ -1,3 +0,0 @@ - - - diff --git a/api/core/tools/provider/builtin/perplexity/perplexity.py b/api/core/tools/provider/builtin/perplexity/perplexity.py deleted file mode 100644 index 80518853fb4a4b..00000000000000 --- a/api/core/tools/provider/builtin/perplexity/perplexity.py +++ /dev/null @@ -1,38 +0,0 @@ -from typing import Any - -import requests - -from core.tools.errors import ToolProviderCredentialValidationError -from core.tools.provider.builtin.perplexity.tools.perplexity_search import PERPLEXITY_API_URL -from core.tools.provider.builtin_tool_provider import BuiltinToolProviderController - - -class PerplexityProvider(BuiltinToolProviderController): - def _validate_credentials(self, credentials: dict[str, Any]) -> None: - headers = { - "Authorization": f"Bearer {credentials.get('perplexity_api_key')}", - "Content-Type": "application/json", - } - - payload = { - "model": "llama-3.1-sonar-small-128k-online", - "messages": [ - {"role": "system", "content": "You are a helpful assistant."}, - {"role": "user", "content": "Hello"}, - ], - "max_tokens": 5, - "temperature": 0.1, - "top_p": 0.9, - "stream": False, - } - - try: - response = requests.post(PERPLEXITY_API_URL, json=payload, headers=headers) - response.raise_for_status() - except requests.RequestException as e: - raise ToolProviderCredentialValidationError(f"Failed to validate Perplexity API key: {str(e)}") - - if response.status_code != 200: - raise ToolProviderCredentialValidationError( - f"Perplexity API key is invalid. Status code: {response.status_code}" - ) diff --git a/api/core/tools/provider/builtin/perplexity/perplexity.yaml b/api/core/tools/provider/builtin/perplexity/perplexity.yaml deleted file mode 100644 index c0b504f300c45a..00000000000000 --- a/api/core/tools/provider/builtin/perplexity/perplexity.yaml +++ /dev/null @@ -1,26 +0,0 @@ -identity: - author: Dify - name: perplexity - label: - en_US: Perplexity - zh_Hans: Perplexity - description: - en_US: Perplexity.AI - zh_Hans: Perplexity.AI - icon: icon.svg - tags: - - search -credentials_for_provider: - perplexity_api_key: - type: secret-input - required: true - label: - en_US: Perplexity API key - zh_Hans: Perplexity API key - placeholder: - en_US: Please input your Perplexity API key - zh_Hans: 请输入你的 Perplexity API key - help: - en_US: Get your Perplexity API key from Perplexity - zh_Hans: 从 Perplexity 获取您的 Perplexity API key - url: https://www.perplexity.ai/settings/api diff --git a/api/core/tools/provider/builtin/perplexity/tools/perplexity_search.py b/api/core/tools/provider/builtin/perplexity/tools/perplexity_search.py deleted file mode 100644 index 5ed4b9ca993483..00000000000000 --- a/api/core/tools/provider/builtin/perplexity/tools/perplexity_search.py +++ /dev/null @@ -1,67 +0,0 @@ -import json -from typing import Any, Union - -import requests - -from core.tools.entities.tool_entities import ToolInvokeMessage -from core.tools.tool.builtin_tool import BuiltinTool - -PERPLEXITY_API_URL = "https://api.perplexity.ai/chat/completions" - - -class PerplexityAITool(BuiltinTool): - def _parse_response(self, response: dict) -> dict: - """Parse the response from Perplexity AI API""" - if "choices" in response and len(response["choices"]) > 0: - message = response["choices"][0]["message"] - return { - "content": message.get("content", ""), - "role": message.get("role", ""), - "citations": response.get("citations", []), - } - else: - return {"content": "Unable to get a valid response", "role": "assistant", "citations": []} - - def _invoke( - self, - user_id: str, - tool_parameters: dict[str, Any], - ) -> Union[ToolInvokeMessage, list[ToolInvokeMessage]]: - headers = { - "Authorization": f"Bearer {self.runtime.credentials['perplexity_api_key']}", - "Content-Type": "application/json", - } - - payload = { - "model": tool_parameters.get("model", "llama-3.1-sonar-small-128k-online"), - "messages": [ - {"role": "system", "content": "Be precise and concise."}, - {"role": "user", "content": tool_parameters["query"]}, - ], - "max_tokens": tool_parameters.get("max_tokens", 4096), - "temperature": tool_parameters.get("temperature", 0.7), - "top_p": tool_parameters.get("top_p", 1), - "top_k": tool_parameters.get("top_k", 5), - "presence_penalty": tool_parameters.get("presence_penalty", 0), - "frequency_penalty": tool_parameters.get("frequency_penalty", 1), - "stream": False, - } - - if "search_recency_filter" in tool_parameters: - payload["search_recency_filter"] = tool_parameters["search_recency_filter"] - if "return_citations" in tool_parameters: - payload["return_citations"] = tool_parameters["return_citations"] - if "search_domain_filter" in tool_parameters: - if isinstance(tool_parameters["search_domain_filter"], str): - payload["search_domain_filter"] = [tool_parameters["search_domain_filter"]] - elif isinstance(tool_parameters["search_domain_filter"], list): - payload["search_domain_filter"] = tool_parameters["search_domain_filter"] - - response = requests.post(url=PERPLEXITY_API_URL, json=payload, headers=headers) - response.raise_for_status() - valuable_res = self._parse_response(response.json()) - - return [ - self.create_json_message(valuable_res), - self.create_text_message(json.dumps(valuable_res, ensure_ascii=False, indent=2)), - ] diff --git a/api/core/tools/provider/builtin/perplexity/tools/perplexity_search.yaml b/api/core/tools/provider/builtin/perplexity/tools/perplexity_search.yaml deleted file mode 100644 index 02a645df335aaf..00000000000000 --- a/api/core/tools/provider/builtin/perplexity/tools/perplexity_search.yaml +++ /dev/null @@ -1,178 +0,0 @@ -identity: - name: perplexity - author: Dify - label: - en_US: Perplexity Search -description: - human: - en_US: Search information using Perplexity AI's language models. - llm: This tool is used to search information using Perplexity AI's language models. -parameters: - - name: query - type: string - required: true - label: - en_US: Query - zh_Hans: 查询 - human_description: - en_US: The text query to be processed by the AI model. - zh_Hans: 要由 AI 模型处理的文本查询。 - form: llm - - name: model - type: select - required: false - label: - en_US: Model Name - zh_Hans: 模型名称 - human_description: - en_US: The Perplexity AI model to use for generating the response. - zh_Hans: 用于生成响应的 Perplexity AI 模型。 - form: form - default: "llama-3.1-sonar-small-128k-online" - options: - - value: llama-3.1-sonar-small-128k-online - label: - en_US: llama-3.1-sonar-small-128k-online - zh_Hans: llama-3.1-sonar-small-128k-online - - value: llama-3.1-sonar-large-128k-online - label: - en_US: llama-3.1-sonar-large-128k-online - zh_Hans: llama-3.1-sonar-large-128k-online - - value: llama-3.1-sonar-huge-128k-online - label: - en_US: llama-3.1-sonar-huge-128k-online - zh_Hans: llama-3.1-sonar-huge-128k-online - - name: max_tokens - type: number - required: false - label: - en_US: Max Tokens - zh_Hans: 最大令牌数 - pt_BR: Máximo de Tokens - human_description: - en_US: The maximum number of tokens to generate in the response. - zh_Hans: 在响应中生成的最大令牌数。 - pt_BR: O número máximo de tokens a serem gerados na resposta. - form: form - default: 4096 - min: 1 - max: 4096 - - name: temperature - type: number - required: false - label: - en_US: Temperature - zh_Hans: 温度 - pt_BR: Temperatura - human_description: - en_US: Controls randomness in the output. Lower values make the output more focused and deterministic. - zh_Hans: 控制输出的随机性。较低的值使输出更加集中和确定。 - form: form - default: 0.7 - min: 0 - max: 1 - - name: top_k - type: number - required: false - label: - en_US: Top K - zh_Hans: 取样数量 - human_description: - en_US: The number of top results to consider for response generation. - zh_Hans: 用于生成响应的顶部结果数量。 - form: form - default: 5 - min: 1 - max: 100 - - name: top_p - type: number - required: false - label: - en_US: Top P - zh_Hans: Top P - human_description: - en_US: Controls diversity via nucleus sampling. - zh_Hans: 通过核心采样控制多样性。 - form: form - default: 1 - min: 0.1 - max: 1 - step: 0.1 - - name: presence_penalty - type: number - required: false - label: - en_US: Presence Penalty - zh_Hans: 存在惩罚 - human_description: - en_US: Positive values penalize new tokens based on whether they appear in the text so far. - zh_Hans: 正值会根据新词元是否已经出现在文本中来对其进行惩罚。 - form: form - default: 0 - min: -1.0 - max: 1.0 - step: 0.1 - - name: frequency_penalty - type: number - required: false - label: - en_US: Frequency Penalty - zh_Hans: 频率惩罚 - human_description: - en_US: Positive values penalize new tokens based on their existing frequency in the text so far. - zh_Hans: 正值会根据新词元在文本中已经出现的频率来对其进行惩罚。 - form: form - default: 1 - min: 0.1 - max: 1.0 - step: 0.1 - - name: return_citations - type: boolean - required: false - label: - en_US: Return Citations - zh_Hans: 返回引用 - human_description: - en_US: Whether to return citations in the response. - zh_Hans: 是否在响应中返回引用。 - form: form - default: true - - name: search_domain_filter - type: string - required: false - label: - en_US: Search Domain Filter - zh_Hans: 搜索域过滤器 - human_description: - en_US: Domain to filter the search results. - zh_Hans: 用于过滤搜索结果的域名。 - form: form - default: "" - - name: search_recency_filter - type: select - required: false - label: - en_US: Search Recency Filter - zh_Hans: 搜索时间过滤器 - human_description: - en_US: Filter for search results based on recency. - zh_Hans: 基于时间筛选搜索结果。 - form: form - default: "month" - options: - - value: day - label: - en_US: Day - zh_Hans: 天 - - value: week - label: - en_US: Week - zh_Hans: 周 - - value: month - label: - en_US: Month - zh_Hans: 月 - - value: year - label: - en_US: Year - zh_Hans: 年 diff --git a/api/core/tools/provider/builtin/pubmed/_assets/icon.svg b/api/core/tools/provider/builtin/pubmed/_assets/icon.svg deleted file mode 100644 index 6d6ff593f0c999..00000000000000 --- a/api/core/tools/provider/builtin/pubmed/_assets/icon.svg +++ /dev/null @@ -1 +0,0 @@ - \ No newline at end of file diff --git a/api/core/tools/provider/builtin/pubmed/pubmed.py b/api/core/tools/provider/builtin/pubmed/pubmed.py deleted file mode 100644 index ea3a477c30178d..00000000000000 --- a/api/core/tools/provider/builtin/pubmed/pubmed.py +++ /dev/null @@ -1,20 +0,0 @@ -from core.tools.errors import ToolProviderCredentialValidationError -from core.tools.provider.builtin.pubmed.tools.pubmed_search import PubMedSearchTool -from core.tools.provider.builtin_tool_provider import BuiltinToolProviderController - - -class PubMedProvider(BuiltinToolProviderController): - def _validate_credentials(self, credentials: dict) -> None: - try: - PubMedSearchTool().fork_tool_runtime( - runtime={ - "credentials": credentials, - } - ).invoke( - user_id="", - tool_parameters={ - "query": "John Doe", - }, - ) - except Exception as e: - raise ToolProviderCredentialValidationError(str(e)) diff --git a/api/core/tools/provider/builtin/pubmed/pubmed.yaml b/api/core/tools/provider/builtin/pubmed/pubmed.yaml deleted file mode 100644 index 5f8303147c397b..00000000000000 --- a/api/core/tools/provider/builtin/pubmed/pubmed.yaml +++ /dev/null @@ -1,13 +0,0 @@ -identity: - author: Pink Banana - name: pubmed - label: - en_US: PubMed - zh_Hans: PubMed - description: - en_US: A search engine for biomedical literature. - zh_Hans: 一款生物医学文献搜索引擎。 - icon: icon.svg - tags: - - medical - - search diff --git a/api/core/tools/provider/builtin/pubmed/tools/pubmed_search.py b/api/core/tools/provider/builtin/pubmed/tools/pubmed_search.py deleted file mode 100644 index 3a4f374ea0b0bc..00000000000000 --- a/api/core/tools/provider/builtin/pubmed/tools/pubmed_search.py +++ /dev/null @@ -1,191 +0,0 @@ -import json -import time -import urllib.error -import urllib.parse -import urllib.request -from typing import Any - -from pydantic import BaseModel, Field - -from core.tools.entities.tool_entities import ToolInvokeMessage -from core.tools.tool.builtin_tool import BuiltinTool - - -class PubMedAPIWrapper(BaseModel): - """ - Wrapper around PubMed API. - - This wrapper will use the PubMed API to conduct searches and fetch - document summaries. By default, it will return the document summaries - of the top-k results of an input search. - - Parameters: - top_k_results: number of the top-scored document used for the PubMed tool - load_max_docs: a limit to the number of loaded documents - load_all_available_meta: - if True: the `metadata` of the loaded Documents gets all available meta info - (see https://www.ncbi.nlm.nih.gov/books/NBK25499/#chapter4.ESearch) - if False: the `metadata` gets only the most informative fields. - """ - - base_url_esearch: str = "https://eutils.ncbi.nlm.nih.gov/entrez/eutils/esearch.fcgi?" - base_url_efetch: str = "https://eutils.ncbi.nlm.nih.gov/entrez/eutils/efetch.fcgi?" - max_retry: int = 5 - sleep_time: float = 0.2 - - # Default values for the parameters - top_k_results: int = 3 - load_max_docs: int = 25 - ARXIV_MAX_QUERY_LENGTH: int = 300 - doc_content_chars_max: int = 2000 - load_all_available_meta: bool = False - email: str = "your_email@example.com" - - def run(self, query: str) -> str: - """ - Run PubMed search and get the article meta information. - See https://www.ncbi.nlm.nih.gov/books/NBK25499/#chapter4.ESearch - It uses only the most informative fields of article meta information. - """ - - try: - # Retrieve the top-k results for the query - docs = [ - f"Published: {result['pub_date']}\nTitle: {result['title']}\nSummary: {result['summary']}" - for result in self.load(query[: self.ARXIV_MAX_QUERY_LENGTH]) - ] - - # Join the results and limit the character count - return "\n\n".join(docs)[: self.doc_content_chars_max] if docs else "No good PubMed Result was found" - except Exception as ex: - return f"PubMed exception: {ex}" - - def load(self, query: str) -> list[dict]: - """ - Search PubMed for documents matching the query. - Return a list of dictionaries containing the document metadata. - """ - - url = ( - self.base_url_esearch - + "db=pubmed&term=" - + str({urllib.parse.quote(query)}) - + f"&retmode=json&retmax={self.top_k_results}&usehistory=y" - ) - result = urllib.request.urlopen(url) - text = result.read().decode("utf-8") - json_text = json.loads(text) - - articles = [] - webenv = json_text["esearchresult"]["webenv"] - for uid in json_text["esearchresult"]["idlist"]: - article = self.retrieve_article(uid, webenv) - articles.append(article) - - # Convert the list of articles to a JSON string - return articles - - def retrieve_article(self, uid: str, webenv: str) -> dict: - url = self.base_url_efetch + "db=pubmed&retmode=xml&id=" + uid + "&webenv=" + webenv - - retry = 0 - while True: - try: - result = urllib.request.urlopen(url) - break - except urllib.error.HTTPError as e: - if e.code == 429 and retry < self.max_retry: - # Too Many Requests error - # wait for an exponentially increasing amount of time - print(f"Too Many Requests, waiting for {self.sleep_time:.2f} seconds...") - time.sleep(self.sleep_time) - self.sleep_time *= 2 - retry += 1 - else: - raise e - - xml_text = result.read().decode("utf-8") - - # Get title - title = "" - if "" in xml_text and "" in xml_text: - start_tag = "" - end_tag = "" - title = xml_text[xml_text.index(start_tag) + len(start_tag) : xml_text.index(end_tag)] - - # Get abstract - abstract = "" - if "" in xml_text and "" in xml_text: - start_tag = "" - end_tag = "" - abstract = xml_text[xml_text.index(start_tag) + len(start_tag) : xml_text.index(end_tag)] - - # Get publication date - pub_date = "" - if "" in xml_text and "" in xml_text: - start_tag = "" - end_tag = "" - pub_date = xml_text[xml_text.index(start_tag) + len(start_tag) : xml_text.index(end_tag)] - - # Return article as dictionary - article = { - "uid": uid, - "title": title, - "summary": abstract, - "pub_date": pub_date, - } - return article - - -class PubmedQueryRun(BaseModel): - """Tool that searches the PubMed API.""" - - name: str = "PubMed" - description: str = ( - "A wrapper around PubMed.org " - "Useful for when you need to answer questions about Physics, Mathematics, " - "Computer Science, Quantitative Biology, Quantitative Finance, Statistics, " - "Electrical Engineering, and Economics " - "from scientific articles on PubMed.org. " - "Input should be a search query." - ) - api_wrapper: PubMedAPIWrapper = Field(default_factory=PubMedAPIWrapper) - - def _run( - self, - query: str, - ) -> str: - """Use the Arxiv tool.""" - return self.api_wrapper.run(query) - - -class PubMedInput(BaseModel): - query: str = Field(..., description="Search query.") - - -class PubMedSearchTool(BuiltinTool): - """ - Tool for performing a search using PubMed search engine. - """ - - def _invoke(self, user_id: str, tool_parameters: dict[str, Any]) -> ToolInvokeMessage | list[ToolInvokeMessage]: - """ - Invoke the PubMed search tool. - - Args: - user_id (str): The ID of the user invoking the tool. - tool_parameters (dict[str, Any]): The parameters for the tool invocation. - - Returns: - ToolInvokeMessage | list[ToolInvokeMessage]: The result of the tool invocation. - """ - query = tool_parameters.get("query", "") - - if not query: - return self.create_text_message("Please input query") - - tool = PubmedQueryRun(args_schema=PubMedInput) - - result = tool._run(query) - - return self.create_text_message(self.summary(user_id=user_id, content=result)) diff --git a/api/core/tools/provider/builtin/pubmed/tools/pubmed_search.yaml b/api/core/tools/provider/builtin/pubmed/tools/pubmed_search.yaml deleted file mode 100644 index 77ab809fbc3e05..00000000000000 --- a/api/core/tools/provider/builtin/pubmed/tools/pubmed_search.yaml +++ /dev/null @@ -1,23 +0,0 @@ -identity: - name: pubmed_search - author: Pink Banana - label: - en_US: PubMed Search - zh_Hans: PubMed 搜索 -description: - human: - en_US: PubMed® comprises more than 35 million citations for biomedical literature from MEDLINE, life science journals, and online books. Citations may include links to full text content from PubMed Central and publisher web sites. - zh_Hans: PubMed® 包含来自 MEDLINE、生命科学期刊和在线书籍的超过 3500 万篇生物医学文献引用。引用可能包括来自 PubMed Central 和出版商网站的全文内容链接。 - llm: Perform searches on PubMed and get results. -parameters: - - name: query - type: string - required: true - label: - en_US: Query string - zh_Hans: 查询语句 - human_description: - en_US: The search query. - zh_Hans: 搜索查询语句。 - llm_description: Key words for searching - form: llm diff --git a/api/core/tools/provider/builtin/regex/_assets/icon.svg b/api/core/tools/provider/builtin/regex/_assets/icon.svg deleted file mode 100644 index 0231a2b4aa9da2..00000000000000 --- a/api/core/tools/provider/builtin/regex/_assets/icon.svg +++ /dev/null @@ -1 +0,0 @@ - \ No newline at end of file diff --git a/api/core/tools/provider/builtin/regex/regex.py b/api/core/tools/provider/builtin/regex/regex.py deleted file mode 100644 index c498105979f13e..00000000000000 --- a/api/core/tools/provider/builtin/regex/regex.py +++ /dev/null @@ -1,19 +0,0 @@ -from typing import Any - -from core.tools.errors import ToolProviderCredentialValidationError -from core.tools.provider.builtin.regex.tools.regex_extract import RegexExpressionTool -from core.tools.provider.builtin_tool_provider import BuiltinToolProviderController - - -class RegexProvider(BuiltinToolProviderController): - def _validate_credentials(self, credentials: dict[str, Any]) -> None: - try: - RegexExpressionTool().invoke( - user_id="", - tool_parameters={ - "content": "1+(2+3)*4", - "expression": r"(\d+)", - }, - ) - except Exception as e: - raise ToolProviderCredentialValidationError(str(e)) diff --git a/api/core/tools/provider/builtin/regex/regex.yaml b/api/core/tools/provider/builtin/regex/regex.yaml deleted file mode 100644 index d05776f214e8d2..00000000000000 --- a/api/core/tools/provider/builtin/regex/regex.yaml +++ /dev/null @@ -1,15 +0,0 @@ -identity: - author: zhuhao - name: regex - label: - en_US: Regex - zh_Hans: 正则表达式提取 - pt_BR: Regex - description: - en_US: A tool for regex extraction. - zh_Hans: 一个用于正则表达式内容提取的工具。 - pt_BR: A tool for regex extraction. - icon: icon.svg - tags: - - utilities - - productivity diff --git a/api/core/tools/provider/builtin/regex/tools/regex_extract.py b/api/core/tools/provider/builtin/regex/tools/regex_extract.py deleted file mode 100644 index 786b4694040030..00000000000000 --- a/api/core/tools/provider/builtin/regex/tools/regex_extract.py +++ /dev/null @@ -1,28 +0,0 @@ -import re -from typing import Any, Union - -from core.tools.entities.tool_entities import ToolInvokeMessage -from core.tools.tool.builtin_tool import BuiltinTool - - -class RegexExpressionTool(BuiltinTool): - def _invoke( - self, - user_id: str, - tool_parameters: dict[str, Any], - ) -> Union[ToolInvokeMessage, list[ToolInvokeMessage]]: - """ - invoke tools - """ - # get expression - content = tool_parameters.get("content", "").strip() - if not content: - return self.create_text_message("Invalid content") - expression = tool_parameters.get("expression", "").strip() - if not expression: - return self.create_text_message("Invalid expression") - try: - result = re.findall(expression, content) - return self.create_text_message(str(result)) - except Exception as e: - return self.create_text_message(f"Failed to extract result, error: {str(e)}") diff --git a/api/core/tools/provider/builtin/regex/tools/regex_extract.yaml b/api/core/tools/provider/builtin/regex/tools/regex_extract.yaml deleted file mode 100644 index de4100def176c9..00000000000000 --- a/api/core/tools/provider/builtin/regex/tools/regex_extract.yaml +++ /dev/null @@ -1,38 +0,0 @@ -identity: - name: regex_extract - author: zhuhao - label: - en_US: Regex Extract - zh_Hans: 正则表达式内容提取 - pt_BR: Regex Extract -description: - human: - en_US: A tool for extracting matching content using regular expressions. - zh_Hans: 一个用于利用正则表达式提取匹配内容结果的工具。 - pt_BR: A tool for extracting matching content using regular expressions. - llm: A tool for extracting matching content using regular expressions. -parameters: - - name: content - type: string - required: true - label: - en_US: Content to be extracted - zh_Hans: 内容 - pt_BR: Content to be extracted - human_description: - en_US: Content to be extracted - zh_Hans: 内容 - pt_BR: Content to be extracted - form: llm - - name: expression - type: string - required: true - label: - en_US: Regular expression - zh_Hans: 正则表达式 - pt_BR: Regular expression - human_description: - en_US: Regular expression - zh_Hans: 正则表达式 - pt_BR: Regular expression - form: llm diff --git a/api/core/tools/provider/builtin/searchapi/_assets/icon.svg b/api/core/tools/provider/builtin/searchapi/_assets/icon.svg deleted file mode 100644 index 7660b2f351c43b..00000000000000 --- a/api/core/tools/provider/builtin/searchapi/_assets/icon.svg +++ /dev/null @@ -1 +0,0 @@ - \ No newline at end of file diff --git a/api/core/tools/provider/builtin/searchapi/searchapi.py b/api/core/tools/provider/builtin/searchapi/searchapi.py deleted file mode 100644 index 109bba8b2d8f79..00000000000000 --- a/api/core/tools/provider/builtin/searchapi/searchapi.py +++ /dev/null @@ -1,20 +0,0 @@ -from typing import Any - -from core.tools.errors import ToolProviderCredentialValidationError -from core.tools.provider.builtin.searchapi.tools.google import GoogleTool -from core.tools.provider.builtin_tool_provider import BuiltinToolProviderController - - -class SearchAPIProvider(BuiltinToolProviderController): - def _validate_credentials(self, credentials: dict[str, Any]) -> None: - try: - GoogleTool().fork_tool_runtime( - runtime={ - "credentials": credentials, - } - ).invoke( - user_id="", - tool_parameters={"query": "SearchApi dify", "result_type": "link"}, - ) - except Exception as e: - raise ToolProviderCredentialValidationError(str(e)) diff --git a/api/core/tools/provider/builtin/searchapi/searchapi.yaml b/api/core/tools/provider/builtin/searchapi/searchapi.yaml deleted file mode 100644 index c2fa3f398e192f..00000000000000 --- a/api/core/tools/provider/builtin/searchapi/searchapi.yaml +++ /dev/null @@ -1,34 +0,0 @@ -identity: - author: SearchApi - name: searchapi - label: - en_US: SearchApi - zh_Hans: SearchApi - pt_BR: SearchApi - description: - en_US: SearchApi is a robust real-time SERP API delivering structured data from a collection of search engines including Google Search, Google Jobs, YouTube, Google News, and many more. - zh_Hans: SearchApi 是一个强大的实时 SERP API,可提供来自 Google 搜索、Google 招聘、YouTube、Google 新闻等搜索引擎集合的结构化数据。 - pt_BR: SearchApi is a robust real-time SERP API delivering structured data from a collection of search engines including Google Search, Google Jobs, YouTube, Google News, and many more. - icon: icon.svg - tags: - - search - - business - - news - - productivity -credentials_for_provider: - searchapi_api_key: - type: secret-input - required: true - label: - en_US: SearchApi API key - zh_Hans: SearchApi API key - pt_BR: SearchApi API key - placeholder: - en_US: Please input your SearchApi API key - zh_Hans: 请输入你的 SearchApi API key - pt_BR: Please input your SearchApi API key - help: - en_US: Get your SearchApi API key from SearchApi - zh_Hans: 从 SearchApi 获取您的 SearchApi API key - pt_BR: Get your SearchApi API key from SearchApi - url: https://www.searchapi.io/ diff --git a/api/core/tools/provider/builtin/searchapi/tools/google.py b/api/core/tools/provider/builtin/searchapi/tools/google.py deleted file mode 100644 index 17e2978194c6a3..00000000000000 --- a/api/core/tools/provider/builtin/searchapi/tools/google.py +++ /dev/null @@ -1,112 +0,0 @@ -from typing import Any, Union - -import requests - -from core.tools.entities.tool_entities import ToolInvokeMessage -from core.tools.tool.builtin_tool import BuiltinTool - -SEARCH_API_URL = "https://www.searchapi.io/api/v1/search" - - -class SearchAPI: - """ - SearchAPI tool provider. - """ - - def __init__(self, api_key: str) -> None: - """Initialize SearchAPI tool provider.""" - self.searchapi_api_key = api_key - - def run(self, query: str, **kwargs: Any) -> str: - """Run query through SearchAPI and parse result.""" - type = kwargs.get("result_type", "text") - return self._process_response(self.results(query, **kwargs), type=type) - - def results(self, query: str, **kwargs: Any) -> dict: - """Run query through SearchAPI and return the raw result.""" - params = self.get_params(query, **kwargs) - response = requests.get( - url=SEARCH_API_URL, - params=params, - headers={"Authorization": f"Bearer {self.searchapi_api_key}"}, - ) - response.raise_for_status() - return response.json() - - def get_params(self, query: str, **kwargs: Any) -> dict[str, str]: - """Get parameters for SearchAPI.""" - return { - "engine": "google", - "q": query, - **{key: value for key, value in kwargs.items() if value not in {None, ""}}, - } - - @staticmethod - def _process_response(res: dict, type: str) -> str: - """Process response from SearchAPI.""" - if "error" in res: - raise ValueError(f"Got error from SearchApi: {res['error']}") - - toret = "" - if type == "text": - if "answer_box" in res and "answer" in res["answer_box"]: - toret += res["answer_box"]["answer"] + "\n" - if "answer_box" in res and "snippet" in res["answer_box"]: - toret += res["answer_box"]["snippet"] + "\n" - if "knowledge_graph" in res and "description" in res["knowledge_graph"]: - toret += res["knowledge_graph"]["description"] + "\n" - if "organic_results" in res and "snippet" in res["organic_results"][0]: - for item in res["organic_results"]: - toret += "content: " + item["snippet"] + "\n" + "link: " + item["link"] + "\n" - if toret == "": - toret = "No good search result found" - - elif type == "link": - if "answer_box" in res and "organic_result" in res["answer_box"]: - if "title" in res["answer_box"]["organic_result"]: - toret = ( - f"[{res['answer_box']['organic_result']['title']}]" - f"({res['answer_box']['organic_result']['link']})\n" - ) - elif "organic_results" in res and "link" in res["organic_results"][0]: - toret = "" - for item in res["organic_results"]: - toret += f"[{item['title']}]({item['link']})\n" - elif "related_questions" in res and "link" in res["related_questions"][0]: - toret = "" - for item in res["related_questions"]: - toret += f"[{item['title']}]({item['link']})\n" - elif "related_searches" in res and "link" in res["related_searches"][0]: - toret = "" - for item in res["related_searches"]: - toret += f"[{item['title']}]({item['link']})\n" - else: - toret = "No good search result found" - return toret - - -class GoogleTool(BuiltinTool): - def _invoke( - self, - user_id: str, - tool_parameters: dict[str, Any], - ) -> Union[ToolInvokeMessage, list[ToolInvokeMessage]]: - """ - Invoke the SearchApi tool. - """ - query = tool_parameters["query"] - result_type = tool_parameters["result_type"] - num = tool_parameters.get("num", 10) - google_domain = tool_parameters.get("google_domain", "google.com") - gl = tool_parameters.get("gl", "us") - hl = tool_parameters.get("hl", "en") - location = tool_parameters.get("location") - - api_key = self.runtime.credentials["searchapi_api_key"] - result = SearchAPI(api_key).run( - query, result_type=result_type, num=num, google_domain=google_domain, gl=gl, hl=hl, location=location - ) - - if result_type == "text": - return self.create_text_message(text=result) - return self.create_link_message(link=result) diff --git a/api/core/tools/provider/builtin/searchapi/tools/google.yaml b/api/core/tools/provider/builtin/searchapi/tools/google.yaml deleted file mode 100644 index b69a0e1d3e706b..00000000000000 --- a/api/core/tools/provider/builtin/searchapi/tools/google.yaml +++ /dev/null @@ -1,481 +0,0 @@ -identity: - name: google_search_api - author: SearchApi - label: - en_US: Google Search API - zh_Hans: Google Search API -description: - human: - en_US: A tool to retrieve answer boxes, knowledge graphs, snippets, and webpages from Google Search engine. - zh_Hans: 一种从 Google 搜索引擎检索答案框、知识图、片段和网页的工具。 - llm: A tool to retrieve answer boxes, knowledge graphs, snippets, and webpages from Google Search engine. -parameters: - - name: query - type: string - required: true - label: - en_US: Query - zh_Hans: 询问 - human_description: - en_US: Defines the query you want to search. - zh_Hans: 定义您要搜索的查询。 - llm_description: Defines the search query you want to search. - form: llm - - name: result_type - type: select - required: true - options: - - value: text - label: - en_US: text - zh_Hans: 文本 - - value: link - label: - en_US: link - zh_Hans: 链接 - default: text - label: - en_US: Result type - zh_Hans: 结果类型 - human_description: - en_US: used for selecting the result type, text or link - zh_Hans: 用于选择结果类型,使用文本还是链接进行展示 - form: form - - name: location - type: string - required: false - label: - en_US: Location - zh_Hans: 询问 - human_description: - en_US: Defines from where you want the search to originate. (For example - New York) - zh_Hans: 定义您想要搜索的起始位置。 (例如 - 纽约) - llm_description: Defines from where you want the search to originate. (For example - New York) - form: llm - - name: gl - type: select - label: - en_US: Country - zh_Hans: 国家 - required: false - human_description: - en_US: Defines the country of the search. Default is "US". - zh_Hans: 定义搜索的国家/地区。默认为“美国”。 - llm_description: Defines the gl parameter of the Google search. - form: form - default: US - options: - - value: AR - label: - en_US: Argentina - zh_Hans: 阿根廷 - pt_BR: Argentina - - value: AU - label: - en_US: Australia - zh_Hans: 澳大利亚 - pt_BR: Australia - - value: AT - label: - en_US: Austria - zh_Hans: 奥地利 - pt_BR: Austria - - value: BE - label: - en_US: Belgium - zh_Hans: 比利时 - pt_BR: Belgium - - value: BR - label: - en_US: Brazil - zh_Hans: 巴西 - pt_BR: Brazil - - value: CA - label: - en_US: Canada - zh_Hans: 加拿大 - pt_BR: Canada - - value: CL - label: - en_US: Chile - zh_Hans: 智利 - pt_BR: Chile - - value: CO - label: - en_US: Colombia - zh_Hans: 哥伦比亚 - pt_BR: Colombia - - value: CN - label: - en_US: China - zh_Hans: 中国 - pt_BR: China - - value: CZ - label: - en_US: Czech Republic - zh_Hans: 捷克共和国 - pt_BR: Czech Republic - - value: DK - label: - en_US: Denmark - zh_Hans: 丹麦 - pt_BR: Denmark - - value: FI - label: - en_US: Finland - zh_Hans: 芬兰 - pt_BR: Finland - - value: FR - label: - en_US: France - zh_Hans: 法国 - pt_BR: France - - value: DE - label: - en_US: Germany - zh_Hans: 德国 - pt_BR: Germany - - value: HK - label: - en_US: Hong Kong - zh_Hans: 香港 - pt_BR: Hong Kong - - value: IN - label: - en_US: India - zh_Hans: 印度 - pt_BR: India - - value: ID - label: - en_US: Indonesia - zh_Hans: 印度尼西亚 - pt_BR: Indonesia - - value: IT - label: - en_US: Italy - zh_Hans: 意大利 - pt_BR: Italy - - value: JP - label: - en_US: Japan - zh_Hans: 日本 - pt_BR: Japan - - value: KR - label: - en_US: Korea - zh_Hans: 韩国 - pt_BR: Korea - - value: MY - label: - en_US: Malaysia - zh_Hans: 马来西亚 - pt_BR: Malaysia - - value: MX - label: - en_US: Mexico - zh_Hans: 墨西哥 - pt_BR: Mexico - - value: NL - label: - en_US: Netherlands - zh_Hans: 荷兰 - pt_BR: Netherlands - - value: NZ - label: - en_US: New Zealand - zh_Hans: 新西兰 - pt_BR: New Zealand - - value: 'NO' - label: - en_US: Norway - zh_Hans: 挪威 - pt_BR: Norway - - value: PH - label: - en_US: Philippines - zh_Hans: 菲律宾 - pt_BR: Philippines - - value: PL - label: - en_US: Poland - zh_Hans: 波兰 - pt_BR: Poland - - value: PT - label: - en_US: Portugal - zh_Hans: 葡萄牙 - pt_BR: Portugal - - value: RU - label: - en_US: Russia - zh_Hans: 俄罗斯 - pt_BR: Russia - - value: SA - label: - en_US: Saudi Arabia - zh_Hans: 沙特阿拉伯 - pt_BR: Saudi Arabia - - value: SG - label: - en_US: Singapore - zh_Hans: 新加坡 - pt_BR: Singapore - - value: ZA - label: - en_US: South Africa - zh_Hans: 南非 - pt_BR: South Africa - - value: ES - label: - en_US: Spain - zh_Hans: 西班牙 - pt_BR: Spain - - value: SE - label: - en_US: Sweden - zh_Hans: 瑞典 - pt_BR: Sweden - - value: CH - label: - en_US: Switzerland - zh_Hans: 瑞士 - pt_BR: Switzerland - - value: TW - label: - en_US: Taiwan - zh_Hans: 台湾 - pt_BR: Taiwan - - value: TH - label: - en_US: Thailand - zh_Hans: 泰国 - pt_BR: Thailand - - value: TR - label: - en_US: Turkey - zh_Hans: 土耳其 - pt_BR: Turkey - - value: GB - label: - en_US: United Kingdom - zh_Hans: 英国 - pt_BR: United Kingdom - - value: US - label: - en_US: United States - zh_Hans: 美国 - pt_BR: United States - - name: hl - type: select - label: - en_US: Language - zh_Hans: 语言 - human_description: - en_US: Defines the interface language of the search. Default is "en". - zh_Hans: 定义搜索的界面语言。默认为“en”。 - required: false - default: en - form: form - options: - - value: ar - label: - en_US: Arabic - zh_Hans: 阿拉伯语 - - value: bg - label: - en_US: Bulgarian - zh_Hans: 保加利亚语 - - value: ca - label: - en_US: Catalan - zh_Hans: 加泰罗尼亚语 - - value: zh-cn - label: - en_US: Chinese (Simplified) - zh_Hans: 中文(简体) - - value: zh-tw - label: - en_US: Chinese (Traditional) - zh_Hans: 中文(繁体) - - value: cs - label: - en_US: Czech - zh_Hans: 捷克语 - - value: da - label: - en_US: Danish - zh_Hans: 丹麦语 - - value: nl - label: - en_US: Dutch - zh_Hans: 荷兰语 - - value: en - label: - en_US: English - zh_Hans: 英语 - - value: et - label: - en_US: Estonian - zh_Hans: 爱沙尼亚语 - - value: fi - label: - en_US: Finnish - zh_Hans: 芬兰语 - - value: fr - label: - en_US: French - zh_Hans: 法语 - - value: de - label: - en_US: German - zh_Hans: 德语 - - value: el - label: - en_US: Greek - zh_Hans: 希腊语 - - value: iw - label: - en_US: Hebrew - zh_Hans: 希伯来语 - - value: hi - label: - en_US: Hindi - zh_Hans: 印地语 - - value: hu - label: - en_US: Hungarian - zh_Hans: 匈牙利语 - - value: id - label: - en_US: Indonesian - zh_Hans: 印尼语 - - value: it - label: - en_US: Italian - zh_Hans: 意大利语 - - value: ja - label: - en_US: Japanese - zh_Hans: 日语 - - value: kn - label: - en_US: Kannada - zh_Hans: 卡纳达语 - - value: ko - label: - en_US: Korean - zh_Hans: 韩语 - - value: lv - label: - en_US: Latvian - zh_Hans: 拉脱维亚语 - - value: lt - label: - en_US: Lithuanian - zh_Hans: 立陶宛语 - - value: my - label: - en_US: Malay - zh_Hans: 马来语 - - value: ml - label: - en_US: Malayalam - zh_Hans: 马拉雅拉姆语 - - value: mr - label: - en_US: Marathi - zh_Hans: 马拉地语 - - value: "no" - label: - en_US: Norwegian - zh_Hans: 挪威语 - - value: pl - label: - en_US: Polish - zh_Hans: 波兰语 - - value: pt-br - label: - en_US: Portuguese (Brazil) - zh_Hans: 葡萄牙语(巴西) - - value: pt-pt - label: - en_US: Portuguese (Portugal) - zh_Hans: 葡萄牙语(葡萄牙) - - value: pa - label: - en_US: Punjabi - zh_Hans: 旁遮普语 - - value: ro - label: - en_US: Romanian - zh_Hans: 罗马尼亚语 - - value: ru - label: - en_US: Russian - zh_Hans: 俄语 - - value: sr - label: - en_US: Serbian - zh_Hans: 塞尔维亚语 - - value: sk - label: - en_US: Slovak - zh_Hans: 斯洛伐克语 - - value: sl - label: - en_US: Slovenian - zh_Hans: 斯洛文尼亚语 - - value: es - label: - en_US: Spanish - zh_Hans: 西班牙语 - - value: sv - label: - en_US: Swedish - zh_Hans: 瑞典语 - - value: ta - label: - en_US: Tamil - zh_Hans: 泰米尔语 - - value: te - label: - en_US: Telugu - zh_Hans: 泰卢固语 - - value: th - label: - en_US: Thai - zh_Hans: 泰语 - - value: tr - label: - en_US: Turkish - zh_Hans: 土耳其语 - - value: uk - label: - en_US: Ukrainian - zh_Hans: 乌克兰语 - - value: vi - label: - en_US: Vietnamese - zh_Hans: 越南语 - - name: google_domain - type: string - required: false - label: - en_US: google_domain - zh_Hans: google_domain - human_description: - en_US: Defines the Google domain of the search. Default is "google.com". - zh_Hans: 定义搜索的 Google 域。默认为“google.com”。 - llm_description: Defines Google domain in which you want to search. - form: llm - - name: num - type: number - required: false - label: - en_US: num - zh_Hans: num - human_description: - en_US: Specifies the number of results to display per page. Default is 10. Max number - 100, min - 1. - zh_Hans: 指定每页显示的结果数。默认值为 10。最大数量 - 100,最小数量 - 1。 - llm_description: Specifies the num of results to display per page. - form: llm diff --git a/api/core/tools/provider/builtin/searchapi/tools/google_jobs.py b/api/core/tools/provider/builtin/searchapi/tools/google_jobs.py deleted file mode 100644 index c478bc108b47e1..00000000000000 --- a/api/core/tools/provider/builtin/searchapi/tools/google_jobs.py +++ /dev/null @@ -1,102 +0,0 @@ -from typing import Any, Union - -import requests - -from core.tools.entities.tool_entities import ToolInvokeMessage -from core.tools.tool.builtin_tool import BuiltinTool - -SEARCH_API_URL = "https://www.searchapi.io/api/v1/search" - - -class SearchAPI: - """ - SearchAPI tool provider. - """ - - def __init__(self, api_key: str) -> None: - """Initialize SearchAPI tool provider.""" - self.searchapi_api_key = api_key - - def run(self, query: str, **kwargs: Any) -> str: - """Run query through SearchAPI and parse result.""" - type = kwargs.get("result_type", "text") - return self._process_response(self.results(query, **kwargs), type=type) - - def results(self, query: str, **kwargs: Any) -> dict: - """Run query through SearchAPI and return the raw result.""" - params = self.get_params(query, **kwargs) - response = requests.get( - url=SEARCH_API_URL, - params=params, - headers={"Authorization": f"Bearer {self.searchapi_api_key}"}, - ) - response.raise_for_status() - return response.json() - - def get_params(self, query: str, **kwargs: Any) -> dict[str, str]: - """Get parameters for SearchAPI.""" - return { - "engine": "google_jobs", - "q": query, - **{key: value for key, value in kwargs.items() if value not in {None, ""}}, - } - - @staticmethod - def _process_response(res: dict, type: str) -> str: - """Process response from SearchAPI.""" - if "error" in res: - raise ValueError(f"Got error from SearchApi: {res['error']}") - - toret = "" - if type == "text": - if "jobs" in res and "title" in res["jobs"][0]: - for item in res["jobs"]: - toret += ( - "title: " - + item["title"] - + "\n" - + "company_name: " - + item["company_name"] - + "content: " - + item["description"] - + "\n" - ) - if toret == "": - toret = "No good search result found" - - elif type == "link": - if "jobs" in res and "apply_link" in res["jobs"][0]: - for item in res["jobs"]: - toret += f"[{item['title']} - {item['company_name']}]({item['apply_link']})\n" - else: - toret = "No good search result found" - return toret - - -class GoogleJobsTool(BuiltinTool): - def _invoke( - self, - user_id: str, - tool_parameters: dict[str, Any], - ) -> Union[ToolInvokeMessage, list[ToolInvokeMessage]]: - """ - Invoke the SearchApi tool. - """ - query = tool_parameters["query"] - result_type = tool_parameters["result_type"] - is_remote = tool_parameters.get("is_remote") - google_domain = tool_parameters.get("google_domain", "google.com") - gl = tool_parameters.get("gl", "us") - hl = tool_parameters.get("hl", "en") - location = tool_parameters.get("location") - - ltype = 1 if is_remote else None - - api_key = self.runtime.credentials["searchapi_api_key"] - result = SearchAPI(api_key).run( - query, result_type=result_type, google_domain=google_domain, gl=gl, hl=hl, location=location, ltype=ltype - ) - - if result_type == "text": - return self.create_text_message(text=result) - return self.create_link_message(link=result) diff --git a/api/core/tools/provider/builtin/searchapi/tools/google_jobs.yaml b/api/core/tools/provider/builtin/searchapi/tools/google_jobs.yaml deleted file mode 100644 index 9033bc0f8784cc..00000000000000 --- a/api/core/tools/provider/builtin/searchapi/tools/google_jobs.yaml +++ /dev/null @@ -1,478 +0,0 @@ -identity: - name: google_jobs_api - author: SearchApi - label: - en_US: Google Jobs API - zh_Hans: Google Jobs API -description: - human: - en_US: A tool to retrieve job titles, company names and description from Google Jobs engine. - zh_Hans: 一个从 Google 招聘引擎检索职位名称、公司名称和描述的工具。 - llm: A tool to retrieve job titles, company names and description from Google Jobs engine. -parameters: - - name: query - type: string - required: true - label: - en_US: Query - zh_Hans: 询问 - human_description: - en_US: Defines the query you want to search. - zh_Hans: 定义您要搜索的查询。 - llm_description: Defines the search query you want to search. - form: llm - - name: result_type - type: select - required: true - options: - - value: text - label: - en_US: text - zh_Hans: 文本 - - value: link - label: - en_US: link - zh_Hans: 链接 - default: text - label: - en_US: Result type - zh_Hans: 结果类型 - human_description: - en_US: used for selecting the result type, text or link - zh_Hans: 用于选择结果类型,使用文本还是链接进行展示 - form: form - - name: location - type: string - required: false - label: - en_US: Location - zh_Hans: 询问 - human_description: - en_US: Defines from where you want the search to originate. (For example - New York) - zh_Hans: 定义您想要搜索的起始位置。 (例如 - 纽约) - llm_description: Defines from where you want the search to originate. (For example - New York) - form: llm - - name: gl - type: select - label: - en_US: Country - zh_Hans: 国家 - required: false - human_description: - en_US: Defines the country of the search. Default is "US". - zh_Hans: 定义搜索的国家/地区。默认为“美国”。 - llm_description: Defines the gl parameter of the Google search. - form: form - default: US - options: - - value: AR - label: - en_US: Argentina - zh_Hans: 阿根廷 - pt_BR: Argentina - - value: AU - label: - en_US: Australia - zh_Hans: 澳大利亚 - pt_BR: Australia - - value: AT - label: - en_US: Austria - zh_Hans: 奥地利 - pt_BR: Austria - - value: BE - label: - en_US: Belgium - zh_Hans: 比利时 - pt_BR: Belgium - - value: BR - label: - en_US: Brazil - zh_Hans: 巴西 - pt_BR: Brazil - - value: CA - label: - en_US: Canada - zh_Hans: 加拿大 - pt_BR: Canada - - value: CL - label: - en_US: Chile - zh_Hans: 智利 - pt_BR: Chile - - value: CO - label: - en_US: Colombia - zh_Hans: 哥伦比亚 - pt_BR: Colombia - - value: CN - label: - en_US: China - zh_Hans: 中国 - pt_BR: China - - value: CZ - label: - en_US: Czech Republic - zh_Hans: 捷克共和国 - pt_BR: Czech Republic - - value: DK - label: - en_US: Denmark - zh_Hans: 丹麦 - pt_BR: Denmark - - value: FI - label: - en_US: Finland - zh_Hans: 芬兰 - pt_BR: Finland - - value: FR - label: - en_US: France - zh_Hans: 法国 - pt_BR: France - - value: DE - label: - en_US: Germany - zh_Hans: 德国 - pt_BR: Germany - - value: HK - label: - en_US: Hong Kong - zh_Hans: 香港 - pt_BR: Hong Kong - - value: IN - label: - en_US: India - zh_Hans: 印度 - pt_BR: India - - value: ID - label: - en_US: Indonesia - zh_Hans: 印度尼西亚 - pt_BR: Indonesia - - value: IT - label: - en_US: Italy - zh_Hans: 意大利 - pt_BR: Italy - - value: JP - label: - en_US: Japan - zh_Hans: 日本 - pt_BR: Japan - - value: KR - label: - en_US: Korea - zh_Hans: 韩国 - pt_BR: Korea - - value: MY - label: - en_US: Malaysia - zh_Hans: 马来西亚 - pt_BR: Malaysia - - value: MX - label: - en_US: Mexico - zh_Hans: 墨西哥 - pt_BR: Mexico - - value: NL - label: - en_US: Netherlands - zh_Hans: 荷兰 - pt_BR: Netherlands - - value: NZ - label: - en_US: New Zealand - zh_Hans: 新西兰 - pt_BR: New Zealand - - value: 'NO' - label: - en_US: Norway - zh_Hans: 挪威 - pt_BR: Norway - - value: PH - label: - en_US: Philippines - zh_Hans: 菲律宾 - pt_BR: Philippines - - value: PL - label: - en_US: Poland - zh_Hans: 波兰 - pt_BR: Poland - - value: PT - label: - en_US: Portugal - zh_Hans: 葡萄牙 - pt_BR: Portugal - - value: RU - label: - en_US: Russia - zh_Hans: 俄罗斯 - pt_BR: Russia - - value: SA - label: - en_US: Saudi Arabia - zh_Hans: 沙特阿拉伯 - pt_BR: Saudi Arabia - - value: SG - label: - en_US: Singapore - zh_Hans: 新加坡 - pt_BR: Singapore - - value: ZA - label: - en_US: South Africa - zh_Hans: 南非 - pt_BR: South Africa - - value: ES - label: - en_US: Spain - zh_Hans: 西班牙 - pt_BR: Spain - - value: SE - label: - en_US: Sweden - zh_Hans: 瑞典 - pt_BR: Sweden - - value: CH - label: - en_US: Switzerland - zh_Hans: 瑞士 - pt_BR: Switzerland - - value: TW - label: - en_US: Taiwan - zh_Hans: 台湾 - pt_BR: Taiwan - - value: TH - label: - en_US: Thailand - zh_Hans: 泰国 - pt_BR: Thailand - - value: TR - label: - en_US: Turkey - zh_Hans: 土耳其 - pt_BR: Turkey - - value: GB - label: - en_US: United Kingdom - zh_Hans: 英国 - pt_BR: United Kingdom - - value: US - label: - en_US: United States - zh_Hans: 美国 - pt_BR: United States - - name: hl - type: select - label: - en_US: Language - zh_Hans: 语言 - human_description: - en_US: Defines the interface language of the search. Default is "en". - zh_Hans: 定义搜索的界面语言。默认为“en”。 - required: false - default: en - form: form - options: - - value: ar - label: - en_US: Arabic - zh_Hans: 阿拉伯语 - - value: bg - label: - en_US: Bulgarian - zh_Hans: 保加利亚语 - - value: ca - label: - en_US: Catalan - zh_Hans: 加泰罗尼亚语 - - value: zh-cn - label: - en_US: Chinese (Simplified) - zh_Hans: 中文(简体) - - value: zh-tw - label: - en_US: Chinese (Traditional) - zh_Hans: 中文(繁体) - - value: cs - label: - en_US: Czech - zh_Hans: 捷克语 - - value: da - label: - en_US: Danish - zh_Hans: 丹麦语 - - value: nl - label: - en_US: Dutch - zh_Hans: 荷兰语 - - value: en - label: - en_US: English - zh_Hans: 英语 - - value: et - label: - en_US: Estonian - zh_Hans: 爱沙尼亚语 - - value: fi - label: - en_US: Finnish - zh_Hans: 芬兰语 - - value: fr - label: - en_US: French - zh_Hans: 法语 - - value: de - label: - en_US: German - zh_Hans: 德语 - - value: el - label: - en_US: Greek - zh_Hans: 希腊语 - - value: iw - label: - en_US: Hebrew - zh_Hans: 希伯来语 - - value: hi - label: - en_US: Hindi - zh_Hans: 印地语 - - value: hu - label: - en_US: Hungarian - zh_Hans: 匈牙利语 - - value: id - label: - en_US: Indonesian - zh_Hans: 印尼语 - - value: it - label: - en_US: Italian - zh_Hans: 意大利语 - - value: ja - label: - en_US: Japanese - zh_Hans: 日语 - - value: kn - label: - en_US: Kannada - zh_Hans: 卡纳达语 - - value: ko - label: - en_US: Korean - zh_Hans: 韩语 - - value: lv - label: - en_US: Latvian - zh_Hans: 拉脱维亚语 - - value: lt - label: - en_US: Lithuanian - zh_Hans: 立陶宛语 - - value: my - label: - en_US: Malay - zh_Hans: 马来语 - - value: ml - label: - en_US: Malayalam - zh_Hans: 马拉雅拉姆语 - - value: mr - label: - en_US: Marathi - zh_Hans: 马拉地语 - - value: "no" - label: - en_US: Norwegian - zh_Hans: 挪威语 - - value: pl - label: - en_US: Polish - zh_Hans: 波兰语 - - value: pt-br - label: - en_US: Portuguese (Brazil) - zh_Hans: 葡萄牙语(巴西) - - value: pt-pt - label: - en_US: Portuguese (Portugal) - zh_Hans: 葡萄牙语(葡萄牙) - - value: pa - label: - en_US: Punjabi - zh_Hans: 旁遮普语 - - value: ro - label: - en_US: Romanian - zh_Hans: 罗马尼亚语 - - value: ru - label: - en_US: Russian - zh_Hans: 俄语 - - value: sr - label: - en_US: Serbian - zh_Hans: 塞尔维亚语 - - value: sk - label: - en_US: Slovak - zh_Hans: 斯洛伐克语 - - value: sl - label: - en_US: Slovenian - zh_Hans: 斯洛文尼亚语 - - value: es - label: - en_US: Spanish - zh_Hans: 西班牙语 - - value: sv - label: - en_US: Swedish - zh_Hans: 瑞典语 - - value: ta - label: - en_US: Tamil - zh_Hans: 泰米尔语 - - value: te - label: - en_US: Telugu - zh_Hans: 泰卢固语 - - value: th - label: - en_US: Thai - zh_Hans: 泰语 - - value: tr - label: - en_US: Turkish - zh_Hans: 土耳其语 - - value: uk - label: - en_US: Ukrainian - zh_Hans: 乌克兰语 - - value: vi - label: - en_US: Vietnamese - zh_Hans: 越南语 - - name: is_remote - type: select - label: - en_US: is_remote - zh_Hans: 很遥远 - human_description: - en_US: Filter results based on the work arrangement. Set it to true to find jobs that offer work from home or remote work opportunities. - zh_Hans: 根据工作安排过滤结果。将其设置为 true 可查找提供在家工作或远程工作机会的工作。 - required: false - form: form - options: - - value: 'true' - label: - en_US: "true" - zh_Hans: "true" - - value: 'false' - label: - en_US: "false" - zh_Hans: "false" diff --git a/api/core/tools/provider/builtin/searchapi/tools/google_news.py b/api/core/tools/provider/builtin/searchapi/tools/google_news.py deleted file mode 100644 index 562bc01964b4c3..00000000000000 --- a/api/core/tools/provider/builtin/searchapi/tools/google_news.py +++ /dev/null @@ -1,97 +0,0 @@ -from typing import Any, Union - -import requests - -from core.tools.entities.tool_entities import ToolInvokeMessage -from core.tools.tool.builtin_tool import BuiltinTool - -SEARCH_API_URL = "https://www.searchapi.io/api/v1/search" - - -class SearchAPI: - """ - SearchAPI tool provider. - """ - - def __init__(self, api_key: str) -> None: - """Initialize SearchAPI tool provider.""" - self.searchapi_api_key = api_key - - def run(self, query: str, **kwargs: Any) -> str: - """Run query through SearchAPI and parse result.""" - type = kwargs.get("result_type", "text") - return self._process_response(self.results(query, **kwargs), type=type) - - def results(self, query: str, **kwargs: Any) -> dict: - """Run query through SearchAPI and return the raw result.""" - params = self.get_params(query, **kwargs) - response = requests.get( - url=SEARCH_API_URL, - params=params, - headers={"Authorization": f"Bearer {self.searchapi_api_key}"}, - ) - response.raise_for_status() - return response.json() - - def get_params(self, query: str, **kwargs: Any) -> dict[str, str]: - """Get parameters for SearchAPI.""" - return { - "engine": "google_news", - "q": query, - **{key: value for key, value in kwargs.items() if value not in {None, ""}}, - } - - @staticmethod - def _process_response(res: dict, type: str) -> str: - """Process response from SearchAPI.""" - if "error" in res: - raise ValueError(f"Got error from SearchApi: {res['error']}") - - toret = "" - if type == "text": - if "organic_results" in res and "snippet" in res["organic_results"][0]: - for item in res["organic_results"]: - toret += "content: " + item["snippet"] + "\n" + "link: " + item["link"] + "\n" - if "top_stories" in res and "title" in res["top_stories"][0]: - for item in res["top_stories"]: - toret += "title: " + item["title"] + "\n" + "link: " + item["link"] + "\n" - if toret == "": - toret = "No good search result found" - - elif type == "link": - if "organic_results" in res and "title" in res["organic_results"][0]: - for item in res["organic_results"]: - toret += f"[{item['title']}]({item['link']})\n" - elif "top_stories" in res and "title" in res["top_stories"][0]: - for item in res["top_stories"]: - toret += f"[{item['title']}]({item['link']})\n" - else: - toret = "No good search result found" - return toret - - -class GoogleNewsTool(BuiltinTool): - def _invoke( - self, - user_id: str, - tool_parameters: dict[str, Any], - ) -> Union[ToolInvokeMessage, list[ToolInvokeMessage]]: - """ - Invoke the SearchApi tool. - """ - query = tool_parameters["query"] - result_type = tool_parameters["result_type"] - num = tool_parameters.get("num", 10) - google_domain = tool_parameters.get("google_domain", "google.com") - gl = tool_parameters.get("gl", "us") - hl = tool_parameters.get("hl", "en") - location = tool_parameters.get("location") - - api_key = self.runtime.credentials["searchapi_api_key"] - result = SearchAPI(api_key).run( - query, result_type=result_type, num=num, google_domain=google_domain, gl=gl, hl=hl, location=location - ) - - if result_type == "text": - return self.create_text_message(text=result) - return self.create_link_message(link=result) diff --git a/api/core/tools/provider/builtin/searchapi/tools/google_news.yaml b/api/core/tools/provider/builtin/searchapi/tools/google_news.yaml deleted file mode 100644 index cbb0edf9829595..00000000000000 --- a/api/core/tools/provider/builtin/searchapi/tools/google_news.yaml +++ /dev/null @@ -1,482 +0,0 @@ -identity: - name: google_news_api - author: SearchApi - label: - en_US: Google News API - zh_Hans: Google News API -description: - human: - en_US: A tool to retrieve organic search results snippets and links from Google News engine. - zh_Hans: 一种从 Google 新闻引擎检索有机搜索结果片段和链接的工具。 - llm: A tool to retrieve organic search results snippets and links from Google News engine. -parameters: - - name: query - type: string - required: true - label: - en_US: Query - zh_Hans: 询问 - human_description: - en_US: Defines the query you want to search. - zh_Hans: 定义您要搜索的查询。 - llm_description: Defines the search query you want to search. - form: llm - - name: result_type - type: select - required: true - options: - - value: text - label: - en_US: text - zh_Hans: 文本 - - value: link - label: - en_US: link - zh_Hans: 链接 - default: text - label: - en_US: Result type - zh_Hans: 结果类型 - human_description: - en_US: used for selecting the result type, text or link. - zh_Hans: 用于选择结果类型,使用文本还是链接进行展示。 - form: form - - name: location - type: string - required: false - label: - en_US: Location - zh_Hans: 询问 - human_description: - en_US: Defines from where you want the search to originate. (For example - New York) - zh_Hans: 定义您想要搜索的起始位置。 (例如 - 纽约) - llm_description: Defines from where you want the search to originate. (For example - New York) - form: llm - - name: gl - type: select - label: - en_US: Country - zh_Hans: 国家 - required: false - human_description: - en_US: Defines the country of the search. Default is "US". - zh_Hans: 定义搜索的国家/地区。默认为“美国”。 - llm_description: Defines the gl parameter of the Google search. - form: form - default: US - options: - - value: AR - label: - en_US: Argentina - zh_Hans: 阿根廷 - pt_BR: Argentina - - value: AU - label: - en_US: Australia - zh_Hans: 澳大利亚 - pt_BR: Australia - - value: AT - label: - en_US: Austria - zh_Hans: 奥地利 - pt_BR: Austria - - value: BE - label: - en_US: Belgium - zh_Hans: 比利时 - pt_BR: Belgium - - value: BR - label: - en_US: Brazil - zh_Hans: 巴西 - pt_BR: Brazil - - value: CA - label: - en_US: Canada - zh_Hans: 加拿大 - pt_BR: Canada - - value: CL - label: - en_US: Chile - zh_Hans: 智利 - pt_BR: Chile - - value: CO - label: - en_US: Colombia - zh_Hans: 哥伦比亚 - pt_BR: Colombia - - value: CN - label: - en_US: China - zh_Hans: 中国 - pt_BR: China - - value: CZ - label: - en_US: Czech Republic - zh_Hans: 捷克共和国 - pt_BR: Czech Republic - - value: DK - label: - en_US: Denmark - zh_Hans: 丹麦 - pt_BR: Denmark - - value: FI - label: - en_US: Finland - zh_Hans: 芬兰 - pt_BR: Finland - - value: FR - label: - en_US: France - zh_Hans: 法国 - pt_BR: France - - value: DE - label: - en_US: Germany - zh_Hans: 德国 - pt_BR: Germany - - value: HK - label: - en_US: Hong Kong - zh_Hans: 香港 - pt_BR: Hong Kong - - value: IN - label: - en_US: India - zh_Hans: 印度 - pt_BR: India - - value: ID - label: - en_US: Indonesia - zh_Hans: 印度尼西亚 - pt_BR: Indonesia - - value: IT - label: - en_US: Italy - zh_Hans: 意大利 - pt_BR: Italy - - value: JP - label: - en_US: Japan - zh_Hans: 日本 - pt_BR: Japan - - value: KR - label: - en_US: Korea - zh_Hans: 韩国 - pt_BR: Korea - - value: MY - label: - en_US: Malaysia - zh_Hans: 马来西亚 - pt_BR: Malaysia - - value: MX - label: - en_US: Mexico - zh_Hans: 墨西哥 - pt_BR: Mexico - - value: NL - label: - en_US: Netherlands - zh_Hans: 荷兰 - pt_BR: Netherlands - - value: NZ - label: - en_US: New Zealand - zh_Hans: 新西兰 - pt_BR: New Zealand - - value: 'NO' - label: - en_US: Norway - zh_Hans: 挪威 - pt_BR: Norway - - value: PH - label: - en_US: Philippines - zh_Hans: 菲律宾 - pt_BR: Philippines - - value: PL - label: - en_US: Poland - zh_Hans: 波兰 - pt_BR: Poland - - value: PT - label: - en_US: Portugal - zh_Hans: 葡萄牙 - pt_BR: Portugal - - value: RU - label: - en_US: Russia - zh_Hans: 俄罗斯 - pt_BR: Russia - - value: SA - label: - en_US: Saudi Arabia - zh_Hans: 沙特阿拉伯 - pt_BR: Saudi Arabia - - value: SG - label: - en_US: Singapore - zh_Hans: 新加坡 - pt_BR: Singapore - - value: ZA - label: - en_US: South Africa - zh_Hans: 南非 - pt_BR: South Africa - - value: ES - label: - en_US: Spain - zh_Hans: 西班牙 - pt_BR: Spain - - value: SE - label: - en_US: Sweden - zh_Hans: 瑞典 - pt_BR: Sweden - - value: CH - label: - en_US: Switzerland - zh_Hans: 瑞士 - pt_BR: Switzerland - - value: TW - label: - en_US: Taiwan - zh_Hans: 台湾 - pt_BR: Taiwan - - value: TH - label: - en_US: Thailand - zh_Hans: 泰国 - pt_BR: Thailand - - value: TR - label: - en_US: Turkey - zh_Hans: 土耳其 - pt_BR: Turkey - - value: GB - label: - en_US: United Kingdom - zh_Hans: 英国 - pt_BR: United Kingdom - - value: US - label: - en_US: United States - zh_Hans: 美国 - pt_BR: United States - - name: hl - type: select - label: - en_US: Language - zh_Hans: 语言 - human_description: - en_US: Defines the interface language of the search. Default is "en". - zh_Hans: 定义搜索的界面语言。默认为“en”。 - required: false - default: en - form: form - options: - - value: ar - label: - en_US: Arabic - zh_Hans: 阿拉伯语 - - value: bg - label: - en_US: Bulgarian - zh_Hans: 保加利亚语 - - value: ca - label: - en_US: Catalan - zh_Hans: 加泰罗尼亚语 - - value: zh-cn - label: - en_US: Chinese (Simplified) - zh_Hans: 中文(简体) - - value: zh-tw - label: - en_US: Chinese (Traditional) - zh_Hans: 中文(繁体) - - value: cs - label: - en_US: Czech - zh_Hans: 捷克语 - - value: da - label: - en_US: Danish - zh_Hans: 丹麦语 - - value: nl - label: - en_US: Dutch - zh_Hans: 荷兰语 - - value: en - label: - en_US: English - zh_Hans: 英语 - - value: et - label: - en_US: Estonian - zh_Hans: 爱沙尼亚语 - - value: fi - label: - en_US: Finnish - zh_Hans: 芬兰语 - - value: fr - label: - en_US: French - zh_Hans: 法语 - - value: de - label: - en_US: German - zh_Hans: 德语 - - value: el - label: - en_US: Greek - zh_Hans: 希腊语 - - value: iw - label: - en_US: Hebrew - zh_Hans: 希伯来语 - - value: hi - label: - en_US: Hindi - zh_Hans: 印地语 - - value: hu - label: - en_US: Hungarian - zh_Hans: 匈牙利语 - - value: id - label: - en_US: Indonesian - zh_Hans: 印尼语 - - value: it - label: - en_US: Italian - zh_Hans: 意大利语 - - value: ja - label: - en_US: Japanese - zh_Hans: 日语 - - value: kn - label: - en_US: Kannada - zh_Hans: 卡纳达语 - - value: ko - label: - en_US: Korean - zh_Hans: 韩语 - - value: lv - label: - en_US: Latvian - zh_Hans: 拉脱维亚语 - - value: lt - label: - en_US: Lithuanian - zh_Hans: 立陶宛语 - - value: my - label: - en_US: Malay - zh_Hans: 马来语 - - value: ml - label: - en_US: Malayalam - zh_Hans: 马拉雅拉姆语 - - value: mr - label: - en_US: Marathi - zh_Hans: 马拉地语 - - value: "no" - label: - en_US: Norwegian - zh_Hans: 挪威语 - - value: pl - label: - en_US: Polish - zh_Hans: 波兰语 - - value: pt-br - label: - en_US: Portuguese (Brazil) - zh_Hans: 葡萄牙语(巴西) - - value: pt-pt - label: - en_US: Portuguese (Portugal) - zh_Hans: 葡萄牙语(葡萄牙) - - value: pa - label: - en_US: Punjabi - zh_Hans: 旁遮普语 - - value: ro - label: - en_US: Romanian - zh_Hans: 罗马尼亚语 - - value: ru - label: - en_US: Russian - zh_Hans: 俄语 - - value: sr - label: - en_US: Serbian - zh_Hans: 塞尔维亚语 - - value: sk - label: - en_US: Slovak - zh_Hans: 斯洛伐克语 - - value: sl - label: - en_US: Slovenian - zh_Hans: 斯洛文尼亚语 - - value: es - label: - en_US: Spanish - zh_Hans: 西班牙语 - - value: sv - label: - en_US: Swedish - zh_Hans: 瑞典语 - - value: ta - label: - en_US: Tamil - zh_Hans: 泰米尔语 - - value: te - label: - en_US: Telugu - zh_Hans: 泰卢固语 - - value: th - label: - en_US: Thai - zh_Hans: 泰语 - - value: tr - label: - en_US: Turkish - zh_Hans: 土耳其语 - - value: uk - label: - en_US: Ukrainian - zh_Hans: 乌克兰语 - - value: vi - label: - en_US: Vietnamese - zh_Hans: 越南语 - - name: google_domain - type: string - required: false - label: - en_US: google_domain - zh_Hans: google_domain - human_description: - en_US: Defines the Google domain of the search. Default is "google.com". - zh_Hans: 定义搜索的 Google 域。默认为“google.com”。 - llm_description: Defines Google domain in which you want to search. - form: llm - - name: num - type: number - required: false - label: - en_US: num - zh_Hans: num - human_description: - en_US: Specifies the number of results to display per page. Default is 10. Max number - 100, min - 1. - zh_Hans: 指定每页显示的结果数。默认值为 10。最大数量 - 100,最小数量 - 1。 - pt_BR: Specifies the number of results to display per page. Default is 10. Max number - 100, min - 1. - llm_description: Specifies the num of results to display per page. - form: llm diff --git a/api/core/tools/provider/builtin/searchapi/tools/youtube_transcripts.py b/api/core/tools/provider/builtin/searchapi/tools/youtube_transcripts.py deleted file mode 100644 index 1867cf7be79be5..00000000000000 --- a/api/core/tools/provider/builtin/searchapi/tools/youtube_transcripts.py +++ /dev/null @@ -1,75 +0,0 @@ -from typing import Any, Union - -import requests - -from core.tools.entities.tool_entities import ToolInvokeMessage -from core.tools.tool.builtin_tool import BuiltinTool - -SEARCH_API_URL = "https://www.searchapi.io/api/v1/search" - - -class SearchAPI: - """ - SearchAPI tool provider. - """ - - def __init__(self, api_key: str) -> None: - """Initialize SearchAPI tool provider.""" - self.searchapi_api_key = api_key - - def run(self, video_id: str, language: str, **kwargs: Any) -> str: - """Run video_id through SearchAPI and parse result.""" - return self._process_response(self.results(video_id, language, **kwargs)) - - def results(self, video_id: str, language: str, **kwargs: Any) -> dict: - """Run video_id through SearchAPI and return the raw result.""" - params = self.get_params(video_id, language, **kwargs) - response = requests.get( - url=SEARCH_API_URL, - params=params, - headers={"Authorization": f"Bearer {self.searchapi_api_key}"}, - ) - response.raise_for_status() - return response.json() - - def get_params(self, video_id: str, language: str, **kwargs: Any) -> dict[str, str]: - """Get parameters for SearchAPI.""" - return { - "engine": "youtube_transcripts", - "video_id": video_id, - "lang": language or "en", - **{key: value for key, value in kwargs.items() if value not in {None, ""}}, - } - - @staticmethod - def _process_response(res: dict) -> str: - """Process response from SearchAPI.""" - if "error" in res: - raise ValueError(f"Got error from SearchApi: {res['error']}") - - toret = "" - if "transcripts" in res and "text" in res["transcripts"][0]: - for item in res["transcripts"]: - toret += item["text"] + " " - if toret == "": - toret = "No good search result found" - - return toret - - -class YoutubeTranscriptsTool(BuiltinTool): - def _invoke( - self, - user_id: str, - tool_parameters: dict[str, Any], - ) -> Union[ToolInvokeMessage, list[ToolInvokeMessage]]: - """ - Invoke the SearchApi tool. - """ - video_id = tool_parameters["video_id"] - language = tool_parameters.get("language", "en") - - api_key = self.runtime.credentials["searchapi_api_key"] - result = SearchAPI(api_key).run(video_id, language=language) - - return self.create_text_message(text=result) diff --git a/api/core/tools/provider/builtin/searchapi/tools/youtube_transcripts.yaml b/api/core/tools/provider/builtin/searchapi/tools/youtube_transcripts.yaml deleted file mode 100644 index 8bdcd6bb936d96..00000000000000 --- a/api/core/tools/provider/builtin/searchapi/tools/youtube_transcripts.yaml +++ /dev/null @@ -1,34 +0,0 @@ -identity: - name: youtube_transcripts_api - author: SearchApi - label: - en_US: YouTube Transcripts API - zh_Hans: YouTube 脚本 API -description: - human: - en_US: A tool to retrieve transcripts from the specific YouTube video. - zh_Hans: 一种从特定 YouTube 视频检索文字记录的工具。 - llm: A tool to retrieve transcripts from the specific YouTube video. -parameters: - - name: video_id - type: string - required: true - label: - en_US: video_id - zh_Hans: 视频ID - human_description: - en_US: Used to define the video you want to search. You can find the video id's in YouTube page that appears in URL. For example - https://www.youtube.com/watch?v=video_id. - zh_Hans: 用于定义要搜索的视频。您可以在 URL 中显示的 YouTube 页面中找到视频 ID。例如 - https://www.youtube.com/watch?v=video_id。 - llm_description: Used to define the video you want to search. - form: llm - - name: language - type: string - required: false - label: - en_US: language - zh_Hans: 语言 - human_description: - en_US: Used to set the language for transcripts. The default value is "en". You can find all supported languages in SearchApi documentation. - zh_Hans: 用于设置成绩单的语言。默认值为“en”。您可以在 SearchApi 文档中找到所有支持的语言。 - llm_description: Used to set the language for transcripts. - form: llm diff --git a/api/core/tools/provider/builtin/searxng/_assets/icon.svg b/api/core/tools/provider/builtin/searxng/_assets/icon.svg deleted file mode 100644 index b94fe3728adbff..00000000000000 --- a/api/core/tools/provider/builtin/searxng/_assets/icon.svg +++ /dev/null @@ -1,56 +0,0 @@ - - - - - - - image/svg+xml - - - - - - - - - - - - diff --git a/api/core/tools/provider/builtin/searxng/docker/settings.yml b/api/core/tools/provider/builtin/searxng/docker/settings.yml deleted file mode 100644 index 18e18688002cbc..00000000000000 --- a/api/core/tools/provider/builtin/searxng/docker/settings.yml +++ /dev/null @@ -1,2501 +0,0 @@ -general: - # Debug mode, only for development. Is overwritten by ${SEARXNG_DEBUG} - debug: false - # displayed name - instance_name: "searxng" - # For example: https://example.com/privacy - privacypolicy_url: false - # use true to use your own donation page written in searx/info/en/donate.md - # use false to disable the donation link - donation_url: false - # mailto:contact@example.com - contact_url: false - # record stats - enable_metrics: true - -brand: - new_issue_url: https://github.com/searxng/searxng/issues/new - docs_url: https://docs.searxng.org/ - public_instances: https://searx.space - wiki_url: https://github.com/searxng/searxng/wiki - issue_url: https://github.com/searxng/searxng/issues - # custom: - # maintainer: "Jon Doe" - # # Custom entries in the footer: [title]: [link] - # links: - # Uptime: https://uptime.searxng.org/history/darmarit-org - # About: "https://searxng.org" - -search: - # Filter results. 0: None, 1: Moderate, 2: Strict - safe_search: 0 - # Existing autocomplete backends: "dbpedia", "duckduckgo", "google", "yandex", "mwmbl", - # "seznam", "startpage", "stract", "swisscows", "qwant", "wikipedia" - leave blank to turn it off - # by default. - autocomplete: "" - # minimun characters to type before autocompleter starts - autocomplete_min: 4 - # Default search language - leave blank to detect from browser information or - # use codes from 'languages.py' - default_lang: "auto" - # max_page: 0 # if engine supports paging, 0 means unlimited numbers of pages - # Available languages - # languages: - # - all - # - en - # - en-US - # - de - # - it-IT - # - fr - # - fr-BE - # ban time in seconds after engine errors - ban_time_on_fail: 5 - # max ban time in seconds after engine errors - max_ban_time_on_fail: 120 - suspended_times: - # Engine suspension time after error (in seconds; set to 0 to disable) - # For error "Access denied" and "HTTP error [402, 403]" - SearxEngineAccessDenied: 86400 - # For error "CAPTCHA" - SearxEngineCaptcha: 86400 - # For error "Too many request" and "HTTP error 429" - SearxEngineTooManyRequests: 3600 - # Cloudflare CAPTCHA - cf_SearxEngineCaptcha: 1296000 - cf_SearxEngineAccessDenied: 86400 - # ReCAPTCHA - recaptcha_SearxEngineCaptcha: 604800 - - # remove format to deny access, use lower case. - # formats: [html, csv, json, rss] - formats: - - html - - json - -server: - # Is overwritten by ${SEARXNG_PORT} and ${SEARXNG_BIND_ADDRESS} - port: 8888 - bind_address: "127.0.0.1" - # public URL of the instance, to ensure correct inbound links. Is overwritten - # by ${SEARXNG_URL}. - base_url: http://0.0.0.0:8081/ # "http://example.com/location" - # rate limit the number of request on the instance, block some bots. - # Is overwritten by ${SEARXNG_LIMITER} - limiter: false - # enable features designed only for public instances. - # Is overwritten by ${SEARXNG_PUBLIC_INSTANCE} - public_instance: false - - # If your instance owns a /etc/searxng/settings.yml file, then set the following - # values there. - - secret_key: "772ba36386fb56d0f8fe818941552dabbe69220d4c0eb4a385a5729cdbc20c2d" # Is overwritten by ${SEARXNG_SECRET} - # Proxy image results through SearXNG. Is overwritten by ${SEARXNG_IMAGE_PROXY} - image_proxy: false - # 1.0 and 1.1 are supported - http_protocol_version: "1.0" - # POST queries are more secure as they don't show up in history but may cause - # problems when using Firefox containers - method: "POST" - default_http_headers: - X-Content-Type-Options: nosniff - X-Download-Options: noopen - X-Robots-Tag: noindex, nofollow - Referrer-Policy: no-referrer - -redis: - # URL to connect redis database. Is overwritten by ${SEARXNG_REDIS_URL}. - # https://docs.searxng.org/admin/settings/settings_redis.html#settings-redis - url: false - -ui: - # Custom static path - leave it blank if you didn't change - static_path: "" - # Is overwritten by ${SEARXNG_STATIC_USE_HASH}. - static_use_hash: false - # Custom templates path - leave it blank if you didn't change - templates_path: "" - # query_in_title: When true, the result page's titles contains the query - # it decreases the privacy, since the browser can records the page titles. - query_in_title: false - # infinite_scroll: When true, automatically loads the next page when scrolling to bottom of the current page. - infinite_scroll: false - # ui theme - default_theme: simple - # center the results ? - center_alignment: false - # URL prefix of the internet archive, don't forget trailing slash (if needed). - # cache_url: "https://webcache.googleusercontent.com/search?q=cache:" - # Default interface locale - leave blank to detect from browser information or - # use codes from the 'locales' config section - default_locale: "" - # Open result links in a new tab by default - # results_on_new_tab: false - theme_args: - # style of simple theme: auto, light, dark - simple_style: auto - # Perform search immediately if a category selected. - # Disable to select multiple categories at once and start the search manually. - search_on_category_select: true - # Hotkeys: default or vim - hotkeys: default - -# Lock arbitrary settings on the preferences page. To find the ID of the user -# setting you want to lock, check the ID of the form on the page "preferences". -# -# preferences: -# lock: -# - language -# - autocomplete -# - method -# - query_in_title - -# searx supports result proxification using an external service: -# https://github.com/asciimoo/morty uncomment below section if you have running -# morty proxy the key is base64 encoded (keep the !!binary notation) -# Note: since commit af77ec3, morty accepts a base64 encoded key. -# -# result_proxy: -# url: http://127.0.0.1:3000/ -# # the key is a base64 encoded string, the YAML !!binary prefix is optional -# key: !!binary "your_morty_proxy_key" -# # [true|false] enable the "proxy" button next to each result -# proxify_results: true - -# communication with search engines -# -outgoing: - # default timeout in seconds, can be override by engine - request_timeout: 3.0 - # the maximum timeout in seconds - # max_request_timeout: 10.0 - # suffix of searx_useragent, could contain information like an email address - # to the administrator - useragent_suffix: "" - # The maximum number of concurrent connections that may be established. - pool_connections: 100 - # Allow the connection pool to maintain keep-alive connections below this - # point. - pool_maxsize: 20 - # See https://www.python-httpx.org/http2/ - enable_http2: true - # uncomment below section if you want to use a custom server certificate - # see https://www.python-httpx.org/advanced/#changing-the-verification-defaults - # and https://www.python-httpx.org/compatibility/#ssl-configuration - # verify: ~/.mitmproxy/mitmproxy-ca-cert.cer - # - # uncomment below section if you want to use a proxyq see: SOCKS proxies - # https://2.python-requests.org/en/latest/user/advanced/#proxies - # are also supported: see - # https://2.python-requests.org/en/latest/user/advanced/#socks - # - # proxies: - # all://: - # - http://host.docker.internal:1080 - # - # using_tor_proxy: true - # - # Extra seconds to add in order to account for the time taken by the proxy - # - # extra_proxy_timeout: 10 - # - # uncomment below section only if you have more than one network interface - # which can be the source of outgoing search requests - # - # source_ips: - # - 1.1.1.1 - # - 1.1.1.2 - # - fe80::/126 - -# External plugin configuration, for more details see -# https://docs.searxng.org/dev/plugins.html -# -# plugins: -# - plugin1 -# - plugin2 -# - ... - -# Comment or un-comment plugin to activate / deactivate by default. -# -# enabled_plugins: -# # these plugins are enabled if nothing is configured .. -# - 'Hash plugin' -# - 'Self Information' -# - 'Tracker URL remover' -# - 'Ahmia blacklist' # activation depends on outgoing.using_tor_proxy -# # these plugins are disabled if nothing is configured .. -# - 'Hostnames plugin' # see 'hostnames' configuration below -# - 'Basic Calculator' -# - 'Open Access DOI rewrite' -# - 'Tor check plugin' -# # Read the docs before activate: auto-detection of the language could be -# # detrimental to users expectations / users can activate the plugin in the -# # preferences if they want. -# - 'Autodetect search language' - -# Configuration of the "Hostnames plugin": -# -# hostnames: -# replace: -# '(.*\.)?youtube\.com$': 'invidious.example.com' -# '(.*\.)?youtu\.be$': 'invidious.example.com' -# '(.*\.)?reddit\.com$': 'teddit.example.com' -# '(.*\.)?redd\.it$': 'teddit.example.com' -# '(www\.)?twitter\.com$': 'nitter.example.com' -# remove: -# - '(.*\.)?facebook.com$' -# low_priority: -# - '(.*\.)?google(\..*)?$' -# high_priority: -# - '(.*\.)?wikipedia.org$' -# -# Alternatively you can use external files for configuring the "Hostnames plugin": -# -# hostnames: -# replace: 'rewrite-hosts.yml' -# -# Content of 'rewrite-hosts.yml' (place the file in the same directory as 'settings.yml'): -# '(.*\.)?youtube\.com$': 'invidious.example.com' -# '(.*\.)?youtu\.be$': 'invidious.example.com' -# - -checker: - # disable checker when in debug mode - off_when_debug: true - - # use "scheduling: false" to disable scheduling - # scheduling: interval or int - - # to activate the scheduler: - # * uncomment "scheduling" section - # * add "cache2 = name=searxngcache,items=2000,blocks=2000,blocksize=4096,bitmap=1" - # to your uwsgi.ini - - # scheduling: - # start_after: [300, 1800] # delay to start the first run of the checker - # every: [86400, 90000] # how often the checker runs - - # additional tests: only for the YAML anchors (see the engines section) - # - additional_tests: - rosebud: &test_rosebud - matrix: - query: rosebud - lang: en - result_container: - - not_empty - - ['one_title_contains', 'citizen kane'] - test: - - unique_results - - android: &test_android - matrix: - query: ['android'] - lang: ['en', 'de', 'fr', 'zh-CN'] - result_container: - - not_empty - - ['one_title_contains', 'google'] - test: - - unique_results - - # tests: only for the YAML anchors (see the engines section) - tests: - infobox: &tests_infobox - infobox: - matrix: - query: ["linux", "new york", "bbc"] - result_container: - - has_infobox - -categories_as_tabs: - general: - images: - videos: - news: - map: - music: - it: - science: - files: - social media: - -engines: - - name: 9gag - engine: 9gag - shortcut: 9g - disabled: true - - - name: alpine linux packages - engine: alpinelinux - disabled: true - shortcut: alp - - - name: annas archive - engine: annas_archive - disabled: true - shortcut: aa - - # - name: annas articles - # engine: annas_archive - # shortcut: aaa - # # https://docs.searxng.org/dev/engines/online/annas_archive.html - # aa_content: 'magazine' # book_fiction, book_unknown, book_nonfiction, book_comic - # aa_ext: 'pdf' # pdf, epub, .. - # aa_sort: oldest' # newest, oldest, largest, smallest - - - name: apk mirror - engine: apkmirror - timeout: 4.0 - shortcut: apkm - disabled: true - - - name: apple app store - engine: apple_app_store - shortcut: aps - disabled: true - - # Requires Tor - - name: ahmia - engine: ahmia - categories: onions - enable_http: true - shortcut: ah - - - name: anaconda - engine: xpath - paging: true - first_page_num: 0 - search_url: https://anaconda.org/search?q={query}&page={pageno} - results_xpath: //tbody/tr - url_xpath: ./td/h5/a[last()]/@href - title_xpath: ./td/h5 - content_xpath: ./td[h5]/text() - categories: it - timeout: 6.0 - shortcut: conda - disabled: true - - - name: arch linux wiki - engine: archlinux - shortcut: al - - - name: artic - engine: artic - shortcut: arc - timeout: 4.0 - - - name: arxiv - engine: arxiv - shortcut: arx - timeout: 4.0 - - - name: ask - engine: ask - shortcut: ask - disabled: true - - # tmp suspended: dh key too small - # - name: base - # engine: base - # shortcut: bs - - - name: bandcamp - engine: bandcamp - shortcut: bc - categories: music - - - name: wikipedia - engine: wikipedia - shortcut: wp - # add "list" to the array to get results in the results list - display_type: ["infobox"] - base_url: 'https://{language}.wikipedia.org/' - categories: [general] - - - name: bilibili - engine: bilibili - shortcut: bil - disabled: true - - - name: bing - engine: bing - shortcut: bi - disabled: false - - - name: bing images - engine: bing_images - shortcut: bii - - - name: bing news - engine: bing_news - shortcut: bin - - - name: bing videos - engine: bing_videos - shortcut: biv - - - name: bitbucket - engine: xpath - paging: true - search_url: https://bitbucket.org/repo/all/{pageno}?name={query} - url_xpath: //article[@class="repo-summary"]//a[@class="repo-link"]/@href - title_xpath: //article[@class="repo-summary"]//a[@class="repo-link"] - content_xpath: //article[@class="repo-summary"]/p - categories: [it, repos] - timeout: 4.0 - disabled: true - shortcut: bb - about: - website: https://bitbucket.org/ - wikidata_id: Q2493781 - official_api_documentation: https://developer.atlassian.com/bitbucket - use_official_api: false - require_api_key: false - results: HTML - - - name: bpb - engine: bpb - shortcut: bpb - disabled: true - - - name: btdigg - engine: btdigg - shortcut: bt - disabled: true - - - name: openverse - engine: openverse - categories: images - shortcut: opv - - - name: media.ccc.de - engine: ccc_media - shortcut: c3tv - # We don't set language: de here because media.ccc.de is not just - # for a German audience. It contains many English videos and many - # German videos have English subtitles. - disabled: true - - - name: chefkoch - engine: chefkoch - shortcut: chef - # to show premium or plus results too: - # skip_premium: false - - # - name: core.ac.uk - # engine: core - # categories: science - # shortcut: cor - # # get your API key from: https://core.ac.uk/api-keys/register/ - # api_key: 'unset' - - - name: cppreference - engine: cppreference - shortcut: cpp - paging: false - disabled: true - - - name: crossref - engine: crossref - shortcut: cr - timeout: 30 - disabled: true - - - name: crowdview - engine: json_engine - shortcut: cv - categories: general - paging: false - search_url: https://crowdview-next-js.onrender.com/api/search-v3?query={query} - results_query: results - url_query: link - title_query: title - content_query: snippet - disabled: true - about: - website: https://crowdview.ai/ - - - name: yep - engine: yep - shortcut: yep - categories: general - search_type: web - timeout: 5 - disabled: true - - - name: yep images - engine: yep - shortcut: yepi - categories: images - search_type: images - disabled: true - - - name: yep news - engine: yep - shortcut: yepn - categories: news - search_type: news - disabled: true - - - name: curlie - engine: xpath - shortcut: cl - categories: general - disabled: true - paging: true - lang_all: '' - search_url: https://curlie.org/search?q={query}&lang={lang}&start={pageno}&stime=92452189 - page_size: 20 - results_xpath: //div[@id="site-list-content"]/div[@class="site-item"] - url_xpath: ./div[@class="title-and-desc"]/a/@href - title_xpath: ./div[@class="title-and-desc"]/a/div - content_xpath: ./div[@class="title-and-desc"]/div[@class="site-descr"] - about: - website: https://curlie.org/ - wikidata_id: Q60715723 - use_official_api: false - require_api_key: false - results: HTML - - - name: currency - engine: currency_convert - categories: general - shortcut: cc - - - name: bahnhof - engine: json_engine - search_url: https://www.bahnhof.de/api/stations/search/{query} - url_prefix: https://www.bahnhof.de/ - url_query: slug - title_query: name - content_query: state - shortcut: bf - disabled: true - about: - website: https://www.bahn.de - wikidata_id: Q22811603 - use_official_api: false - require_api_key: false - results: JSON - language: de - tests: - bahnhof: - matrix: - query: berlin - lang: en - result_container: - - not_empty - - ['one_title_contains', 'Berlin Hauptbahnhof'] - test: - - unique_results - - - name: deezer - engine: deezer - shortcut: dz - disabled: true - - - name: destatis - engine: destatis - shortcut: destat - disabled: true - - - name: deviantart - engine: deviantart - shortcut: da - timeout: 3.0 - - - name: ddg definitions - engine: duckduckgo_definitions - shortcut: ddd - weight: 2 - disabled: true - tests: *tests_infobox - - # cloudflare protected - # - name: digbt - # engine: digbt - # shortcut: dbt - # timeout: 6.0 - # disabled: true - - - name: docker hub - engine: docker_hub - shortcut: dh - categories: [it, packages] - - - name: encyclosearch - engine: json_engine - shortcut: es - categories: general - paging: true - search_url: https://encyclosearch.org/encyclosphere/search?q={query}&page={pageno}&resultsPerPage=15 - results_query: Results - url_query: SourceURL - title_query: Title - content_query: Description - disabled: true - about: - website: https://encyclosearch.org - official_api_documentation: https://encyclosearch.org/docs/#/rest-api - use_official_api: true - require_api_key: false - results: JSON - - - name: erowid - engine: xpath - paging: true - first_page_num: 0 - page_size: 30 - search_url: https://www.erowid.org/search.php?q={query}&s={pageno} - url_xpath: //dl[@class="results-list"]/dt[@class="result-title"]/a/@href - title_xpath: //dl[@class="results-list"]/dt[@class="result-title"]/a/text() - content_xpath: //dl[@class="results-list"]/dd[@class="result-details"] - categories: [] - shortcut: ew - disabled: true - about: - website: https://www.erowid.org/ - wikidata_id: Q1430691 - official_api_documentation: - use_official_api: false - require_api_key: false - results: HTML - - # - name: elasticsearch - # shortcut: es - # engine: elasticsearch - # base_url: http://localhost:9200 - # username: elastic - # password: changeme - # index: my-index - # # available options: match, simple_query_string, term, terms, custom - # query_type: match - # # if query_type is set to custom, provide your query here - # #custom_query_json: {"query":{"match_all": {}}} - # #show_metadata: false - # disabled: true - - - name: wikidata - engine: wikidata - shortcut: wd - timeout: 3.0 - weight: 2 - # add "list" to the array to get results in the results list - display_type: ["infobox"] - tests: *tests_infobox - categories: [general] - - - name: duckduckgo - engine: duckduckgo - shortcut: ddg - - - name: duckduckgo images - engine: duckduckgo_extra - categories: [images, web] - ddg_category: images - shortcut: ddi - disabled: true - - - name: duckduckgo videos - engine: duckduckgo_extra - categories: [videos, web] - ddg_category: videos - shortcut: ddv - disabled: true - - - name: duckduckgo news - engine: duckduckgo_extra - categories: [news, web] - ddg_category: news - shortcut: ddn - disabled: true - - - name: duckduckgo weather - engine: duckduckgo_weather - shortcut: ddw - disabled: true - - - name: apple maps - engine: apple_maps - shortcut: apm - disabled: true - timeout: 5.0 - - - name: emojipedia - engine: emojipedia - timeout: 4.0 - shortcut: em - disabled: true - - - name: tineye - engine: tineye - shortcut: tin - timeout: 9.0 - disabled: true - - - name: etymonline - engine: xpath - paging: true - search_url: https://etymonline.com/search?page={pageno}&q={query} - url_xpath: //a[contains(@class, "word__name--")]/@href - title_xpath: //a[contains(@class, "word__name--")] - content_xpath: //section[contains(@class, "word__defination")] - first_page_num: 1 - shortcut: et - categories: [dictionaries] - about: - website: https://www.etymonline.com/ - wikidata_id: Q1188617 - official_api_documentation: - use_official_api: false - require_api_key: false - results: HTML - - # - name: ebay - # engine: ebay - # shortcut: eb - # base_url: 'https://www.ebay.com' - # disabled: true - # timeout: 5 - - - name: 1x - engine: www1x - shortcut: 1x - timeout: 3.0 - disabled: true - - - name: fdroid - engine: fdroid - shortcut: fd - disabled: true - - - name: findthatmeme - engine: findthatmeme - shortcut: ftm - disabled: true - - - name: flickr - categories: images - shortcut: fl - # You can use the engine using the official stable API, but you need an API - # key, see: https://www.flickr.com/services/apps/create/ - # engine: flickr - # api_key: 'apikey' # required! - # Or you can use the html non-stable engine, activated by default - engine: flickr_noapi - - - name: free software directory - engine: mediawiki - shortcut: fsd - categories: [it, software wikis] - base_url: https://directory.fsf.org/ - search_type: title - timeout: 5.0 - disabled: true - about: - website: https://directory.fsf.org/ - wikidata_id: Q2470288 - - # - name: freesound - # engine: freesound - # shortcut: fnd - # disabled: true - # timeout: 15.0 - # API key required, see: https://freesound.org/docs/api/overview.html - # api_key: MyAPIkey - - - name: frinkiac - engine: frinkiac - shortcut: frk - disabled: true - - - name: fyyd - engine: fyyd - shortcut: fy - timeout: 8.0 - disabled: true - - - name: geizhals - engine: geizhals - shortcut: geiz - disabled: true - - - name: genius - engine: genius - shortcut: gen - - - name: gentoo - engine: mediawiki - shortcut: ge - categories: ["it", "software wikis"] - base_url: "https://wiki.gentoo.org/" - api_path: "api.php" - search_type: text - timeout: 10 - - - name: gitlab - engine: json_engine - paging: true - search_url: https://gitlab.com/api/v4/projects?search={query}&page={pageno} - url_query: web_url - title_query: name_with_namespace - content_query: description - page_size: 20 - categories: [it, repos] - shortcut: gl - timeout: 10.0 - disabled: true - about: - website: https://about.gitlab.com/ - wikidata_id: Q16639197 - official_api_documentation: https://docs.gitlab.com/ee/api/ - use_official_api: false - require_api_key: false - results: JSON - - - name: github - engine: github - shortcut: gh - - - name: codeberg - # https://docs.searxng.org/dev/engines/online/gitea.html - engine: gitea - base_url: https://codeberg.org - shortcut: cb - disabled: true - - - name: gitea.com - engine: gitea - base_url: https://gitea.com - shortcut: gitea - disabled: true - - - name: goodreads - engine: goodreads - shortcut: good - timeout: 4.0 - disabled: true - - - name: google - engine: google - shortcut: go - # additional_tests: - # android: *test_android - - - name: google images - engine: google_images - shortcut: goi - # additional_tests: - # android: *test_android - # dali: - # matrix: - # query: ['Dali Christ'] - # lang: ['en', 'de', 'fr', 'zh-CN'] - # result_container: - # - ['one_title_contains', 'Salvador'] - - - name: google news - engine: google_news - shortcut: gon - # additional_tests: - # android: *test_android - - - name: google videos - engine: google_videos - shortcut: gov - # additional_tests: - # android: *test_android - - - name: google scholar - engine: google_scholar - shortcut: gos - - - name: google play apps - engine: google_play - categories: [files, apps] - shortcut: gpa - play_categ: apps - disabled: true - - - name: google play movies - engine: google_play - categories: videos - shortcut: gpm - play_categ: movies - disabled: true - - - name: material icons - engine: material_icons - categories: images - shortcut: mi - disabled: true - - - name: gpodder - engine: json_engine - shortcut: gpod - timeout: 4.0 - paging: false - search_url: https://gpodder.net/search.json?q={query} - url_query: url - title_query: title - content_query: description - page_size: 19 - categories: music - disabled: true - about: - website: https://gpodder.net - wikidata_id: Q3093354 - official_api_documentation: https://gpoddernet.readthedocs.io/en/latest/api/ - use_official_api: false - requires_api_key: false - results: JSON - - - name: habrahabr - engine: xpath - paging: true - search_url: https://habr.com/en/search/page{pageno}/?q={query} - results_xpath: //article[contains(@class, "tm-articles-list__item")] - url_xpath: .//a[@class="tm-title__link"]/@href - title_xpath: .//a[@class="tm-title__link"] - content_xpath: .//div[contains(@class, "article-formatted-body")] - categories: it - timeout: 4.0 - disabled: true - shortcut: habr - about: - website: https://habr.com/ - wikidata_id: Q4494434 - official_api_documentation: https://habr.com/en/docs/help/api/ - use_official_api: false - require_api_key: false - results: HTML - - - name: hackernews - engine: hackernews - shortcut: hn - disabled: true - - - name: hex - engine: hex - shortcut: hex - disabled: true - # Valid values: name inserted_at updated_at total_downloads recent_downloads - sort_criteria: "recent_downloads" - page_size: 10 - - - name: crates.io - engine: crates - shortcut: crates - disabled: true - timeout: 6.0 - - - name: hoogle - engine: xpath - search_url: https://hoogle.haskell.org/?hoogle={query} - results_xpath: '//div[@class="result"]' - title_xpath: './/div[@class="ans"]//a' - url_xpath: './/div[@class="ans"]//a/@href' - content_xpath: './/div[@class="from"]' - page_size: 20 - categories: [it, packages] - shortcut: ho - about: - website: https://hoogle.haskell.org/ - wikidata_id: Q34010 - official_api_documentation: https://hackage.haskell.org/api - use_official_api: false - require_api_key: false - results: JSON - - - name: imdb - engine: imdb - shortcut: imdb - timeout: 6.0 - disabled: true - - - name: imgur - engine: imgur - shortcut: img - disabled: true - - - name: ina - engine: ina - shortcut: in - timeout: 6.0 - disabled: true - - - name: invidious - engine: invidious - # Instanes will be selected randomly, see https://api.invidious.io/ for - # instances that are stable (good uptime) and close to you. - base_url: - - https://invidious.io.lol - - https://invidious.fdn.fr - - https://yt.artemislena.eu - - https://invidious.tiekoetter.com - - https://invidious.flokinet.to - - https://vid.puffyan.us - - https://invidious.privacydev.net - - https://inv.tux.pizza - shortcut: iv - timeout: 3.0 - disabled: true - - - name: jisho - engine: jisho - shortcut: js - timeout: 3.0 - disabled: true - - - name: kickass - engine: kickass - base_url: - - https://kickasstorrents.to - - https://kickasstorrents.cr - - https://kickasstorrent.cr - - https://kickass.sx - - https://kat.am - shortcut: kc - timeout: 4.0 - disabled: true - - - name: lemmy communities - engine: lemmy - lemmy_type: Communities - shortcut: leco - - - name: lemmy users - engine: lemmy - network: lemmy communities - lemmy_type: Users - shortcut: leus - - - name: lemmy posts - engine: lemmy - network: lemmy communities - lemmy_type: Posts - shortcut: lepo - - - name: lemmy comments - engine: lemmy - network: lemmy communities - lemmy_type: Comments - shortcut: lecom - - - name: library genesis - engine: xpath - # search_url: https://libgen.is/search.php?req={query} - search_url: https://libgen.rs/search.php?req={query} - url_xpath: //a[contains(@href,"book/index.php?md5")]/@href - title_xpath: //a[contains(@href,"book/")]/text()[1] - content_xpath: //td/a[1][contains(@href,"=author")]/text() - categories: files - timeout: 7.0 - disabled: true - shortcut: lg - about: - website: https://libgen.fun/ - wikidata_id: Q22017206 - official_api_documentation: - use_official_api: false - require_api_key: false - results: HTML - - - name: z-library - engine: zlibrary - shortcut: zlib - categories: files - timeout: 7.0 - disabled: true - - - name: library of congress - engine: loc - shortcut: loc - categories: images - - - name: libretranslate - engine: libretranslate - # https://github.com/LibreTranslate/LibreTranslate?tab=readme-ov-file#mirrors - base_url: - - https://translate.terraprint.co - - https://trans.zillyhuhn.com - # api_key: abc123 - shortcut: lt - disabled: true - - - name: lingva - engine: lingva - shortcut: lv - # set lingva instance in url, by default it will use the official instance - # url: https://lingva.thedaviddelta.com - - - name: lobste.rs - engine: xpath - search_url: https://lobste.rs/search?q={query}&what=stories&order=relevance - results_xpath: //li[contains(@class, "story")] - url_xpath: .//a[@class="u-url"]/@href - title_xpath: .//a[@class="u-url"] - content_xpath: .//a[@class="domain"] - categories: it - shortcut: lo - timeout: 5.0 - disabled: true - about: - website: https://lobste.rs/ - wikidata_id: Q60762874 - official_api_documentation: - use_official_api: false - require_api_key: false - results: HTML - - - name: mastodon users - engine: mastodon - mastodon_type: accounts - base_url: https://mastodon.social - shortcut: mau - - - name: mastodon hashtags - engine: mastodon - mastodon_type: hashtags - base_url: https://mastodon.social - shortcut: mah - - # - name: matrixrooms - # engine: mrs - # # https://docs.searxng.org/dev/engines/online/mrs.html - # # base_url: https://mrs-api-host - # shortcut: mtrx - # disabled: true - - - name: mdn - shortcut: mdn - engine: json_engine - categories: [it] - paging: true - search_url: https://developer.mozilla.org/api/v1/search?q={query}&page={pageno} - results_query: documents - url_query: mdn_url - url_prefix: https://developer.mozilla.org - title_query: title - content_query: summary - about: - website: https://developer.mozilla.org - wikidata_id: Q3273508 - official_api_documentation: null - use_official_api: false - require_api_key: false - results: JSON - - - name: metacpan - engine: metacpan - shortcut: cpan - disabled: true - number_of_results: 20 - - # - name: meilisearch - # engine: meilisearch - # shortcut: mes - # enable_http: true - # base_url: http://localhost:7700 - # index: my-index - - - name: mixcloud - engine: mixcloud - shortcut: mc - - # MongoDB engine - # Required dependency: pymongo - # - name: mymongo - # engine: mongodb - # shortcut: md - # exact_match_only: false - # host: '127.0.0.1' - # port: 27017 - # enable_http: true - # results_per_page: 20 - # database: 'business' - # collection: 'reviews' # name of the db collection - # key: 'name' # key in the collection to search for - - - name: mozhi - engine: mozhi - base_url: - - https://mozhi.aryak.me - - https://translate.bus-hit.me - - https://nyc1.mz.ggtyler.dev - # mozhi_engine: google - see https://mozhi.aryak.me for supported engines - timeout: 4.0 - shortcut: mz - disabled: true - - - name: mwmbl - engine: mwmbl - # api_url: https://api.mwmbl.org - shortcut: mwm - disabled: true - - - name: npm - engine: npm - shortcut: npm - timeout: 5.0 - disabled: true - - - name: nyaa - engine: nyaa - shortcut: nt - disabled: true - - - name: mankier - engine: json_engine - search_url: https://www.mankier.com/api/v2/mans/?q={query} - results_query: results - url_query: url - title_query: name - content_query: description - categories: it - shortcut: man - about: - website: https://www.mankier.com/ - official_api_documentation: https://www.mankier.com/api - use_official_api: true - require_api_key: false - results: JSON - - # read https://docs.searxng.org/dev/engines/online/mullvad_leta.html - # - name: mullvadleta - # engine: mullvad_leta - # leta_engine: google # choose one of the following: google, brave - # use_cache: true # Only 100 non-cache searches per day, suggested only for private instances - # search_url: https://leta.mullvad.net - # categories: [general, web] - # shortcut: ml - - - name: odysee - engine: odysee - shortcut: od - disabled: true - - - name: openairedatasets - engine: json_engine - paging: true - search_url: https://api.openaire.eu/search/datasets?format=json&page={pageno}&size=10&title={query} - results_query: response/results/result - url_query: metadata/oaf:entity/oaf:result/children/instance/webresource/url/$ - title_query: metadata/oaf:entity/oaf:result/title/$ - content_query: metadata/oaf:entity/oaf:result/description/$ - content_html_to_text: true - categories: "science" - shortcut: oad - timeout: 5.0 - about: - website: https://www.openaire.eu/ - wikidata_id: Q25106053 - official_api_documentation: https://api.openaire.eu/ - use_official_api: false - require_api_key: false - results: JSON - - - name: openairepublications - engine: json_engine - paging: true - search_url: https://api.openaire.eu/search/publications?format=json&page={pageno}&size=10&title={query} - results_query: response/results/result - url_query: metadata/oaf:entity/oaf:result/children/instance/webresource/url/$ - title_query: metadata/oaf:entity/oaf:result/title/$ - content_query: metadata/oaf:entity/oaf:result/description/$ - content_html_to_text: true - categories: science - shortcut: oap - timeout: 5.0 - about: - website: https://www.openaire.eu/ - wikidata_id: Q25106053 - official_api_documentation: https://api.openaire.eu/ - use_official_api: false - require_api_key: false - results: JSON - - - name: openmeteo - engine: open_meteo - shortcut: om - disabled: true - - # - name: opensemanticsearch - # engine: opensemantic - # shortcut: oss - # base_url: 'http://localhost:8983/solr/opensemanticsearch/' - - - name: openstreetmap - engine: openstreetmap - shortcut: osm - - - name: openrepos - engine: xpath - paging: true - search_url: https://openrepos.net/search/node/{query}?page={pageno} - url_xpath: //li[@class="search-result"]//h3[@class="title"]/a/@href - title_xpath: //li[@class="search-result"]//h3[@class="title"]/a - content_xpath: //li[@class="search-result"]//div[@class="search-snippet-info"]//p[@class="search-snippet"] - categories: files - timeout: 4.0 - disabled: true - shortcut: or - about: - website: https://openrepos.net/ - wikidata_id: - official_api_documentation: - use_official_api: false - require_api_key: false - results: HTML - - - name: packagist - engine: json_engine - paging: true - search_url: https://packagist.org/search.json?q={query}&page={pageno} - results_query: results - url_query: url - title_query: name - content_query: description - categories: [it, packages] - disabled: true - timeout: 5.0 - shortcut: pack - about: - website: https://packagist.org - wikidata_id: Q108311377 - official_api_documentation: https://packagist.org/apidoc - use_official_api: true - require_api_key: false - results: JSON - - - name: pdbe - engine: pdbe - shortcut: pdb - # Hide obsolete PDB entries. Default is not to hide obsolete structures - # hide_obsolete: false - - - name: photon - engine: photon - shortcut: ph - - - name: pinterest - engine: pinterest - shortcut: pin - - - name: piped - engine: piped - shortcut: ppd - categories: videos - piped_filter: videos - timeout: 3.0 - - # URL to use as link and for embeds - frontend_url: https://srv.piped.video - # Instance will be selected randomly, for more see https://piped-instances.kavin.rocks/ - backend_url: - - https://pipedapi.kavin.rocks - - https://pipedapi-libre.kavin.rocks - - https://pipedapi.adminforge.de - - - name: piped.music - engine: piped - network: piped - shortcut: ppdm - categories: music - piped_filter: music_songs - timeout: 3.0 - - - name: piratebay - engine: piratebay - shortcut: tpb - # You may need to change this URL to a proxy if piratebay is blocked in your - # country - url: https://thepiratebay.org/ - timeout: 3.0 - - - name: pixiv - shortcut: pv - engine: pixiv - disabled: true - inactive: true - pixiv_image_proxies: - - https://pximg.example.org - # A proxy is required to load the images. Hosting an image proxy server - # for Pixiv: - # --> https://pixivfe.pages.dev/hosting-image-proxy-server/ - # Proxies from public instances. Ask the public instances owners if they - # agree to receive traffic from SearXNG! - # --> https://codeberg.org/VnPower/PixivFE#instances - # --> https://github.com/searxng/searxng/pull/3192#issuecomment-1941095047 - # image proxy of https://pixiv.cat - # - https://i.pixiv.cat - # image proxy of https://www.pixiv.pics - # - https://pximg.cocomi.eu.org - # image proxy of https://pixivfe.exozy.me - # - https://pximg.exozy.me - # image proxy of https://pixivfe.ducks.party - # - https://pixiv.ducks.party - # image proxy of https://pixiv.perennialte.ch - # - https://pximg.perennialte.ch - - - name: podcastindex - engine: podcastindex - shortcut: podcast - - # Required dependency: psychopg2 - # - name: postgresql - # engine: postgresql - # database: postgres - # username: postgres - # password: postgres - # limit: 10 - # query_str: 'SELECT * from my_table WHERE my_column = %(query)s' - # shortcut : psql - - - name: presearch - engine: presearch - search_type: search - categories: [general, web] - shortcut: ps - timeout: 4.0 - disabled: true - - - name: presearch images - engine: presearch - network: presearch - search_type: images - categories: [images, web] - timeout: 4.0 - shortcut: psimg - disabled: true - - - name: presearch videos - engine: presearch - network: presearch - search_type: videos - categories: [general, web] - timeout: 4.0 - shortcut: psvid - disabled: true - - - name: presearch news - engine: presearch - network: presearch - search_type: news - categories: [news, web] - timeout: 4.0 - shortcut: psnews - disabled: true - - - name: pub.dev - engine: xpath - shortcut: pd - search_url: https://pub.dev/packages?q={query}&page={pageno} - paging: true - results_xpath: //div[contains(@class,"packages-item")] - url_xpath: ./div/h3/a/@href - title_xpath: ./div/h3/a - content_xpath: ./div/div/div[contains(@class,"packages-description")]/span - categories: [packages, it] - timeout: 3.0 - disabled: true - first_page_num: 1 - about: - website: https://pub.dev/ - official_api_documentation: https://pub.dev/help/api - use_official_api: false - require_api_key: false - results: HTML - - - name: pubmed - engine: pubmed - shortcut: pub - timeout: 3.0 - - - name: pypi - shortcut: pypi - engine: pypi - - - name: qwant - qwant_categ: web - engine: qwant - disabled: true - shortcut: qw - categories: [general, web] - additional_tests: - rosebud: *test_rosebud - - - name: qwant news - qwant_categ: news - engine: qwant - shortcut: qwn - categories: news - network: qwant - - - name: qwant images - qwant_categ: images - engine: qwant - shortcut: qwi - categories: [images, web] - network: qwant - - - name: qwant videos - qwant_categ: videos - engine: qwant - shortcut: qwv - categories: [videos, web] - network: qwant - - # - name: library - # engine: recoll - # shortcut: lib - # base_url: 'https://recoll.example.org/' - # search_dir: '' - # mount_prefix: /export - # dl_prefix: 'https://download.example.org' - # timeout: 30.0 - # categories: files - # disabled: true - - # - name: recoll library reference - # engine: recoll - # base_url: 'https://recoll.example.org/' - # search_dir: reference - # mount_prefix: /export - # dl_prefix: 'https://download.example.org' - # shortcut: libr - # timeout: 30.0 - # categories: files - # disabled: true - - - name: radio browser - engine: radio_browser - shortcut: rb - - - name: reddit - engine: reddit - shortcut: re - page_size: 25 - disabled: true - - - name: rottentomatoes - engine: rottentomatoes - shortcut: rt - disabled: true - - # Required dependency: redis - # - name: myredis - # shortcut : rds - # engine: redis_server - # exact_match_only: false - # host: '127.0.0.1' - # port: 6379 - # enable_http: true - # password: '' - # db: 0 - - # tmp suspended: bad certificate - # - name: scanr structures - # shortcut: scs - # engine: scanr_structures - # disabled: true - - - name: searchmysite - engine: xpath - shortcut: sms - categories: general - paging: true - search_url: https://searchmysite.net/search/?q={query}&page={pageno} - results_xpath: //div[contains(@class,'search-result')] - url_xpath: .//a[contains(@class,'result-link')]/@href - title_xpath: .//span[contains(@class,'result-title-txt')]/text() - content_xpath: ./p[@id='result-hightlight'] - disabled: true - about: - website: https://searchmysite.net - - - name: sepiasearch - engine: sepiasearch - shortcut: sep - - - name: soundcloud - engine: soundcloud - shortcut: sc - - - name: stackoverflow - engine: stackexchange - shortcut: st - api_site: 'stackoverflow' - categories: [it, q&a] - - - name: askubuntu - engine: stackexchange - shortcut: ubuntu - api_site: 'askubuntu' - categories: [it, q&a] - - - name: internetarchivescholar - engine: internet_archive_scholar - shortcut: ias - timeout: 15.0 - - - name: superuser - engine: stackexchange - shortcut: su - api_site: 'superuser' - categories: [it, q&a] - - - name: discuss.python - engine: discourse - shortcut: dpy - base_url: 'https://discuss.python.org' - categories: [it, q&a] - disabled: true - - - name: caddy.community - engine: discourse - shortcut: caddy - base_url: 'https://caddy.community' - categories: [it, q&a] - disabled: true - - - name: pi-hole.community - engine: discourse - shortcut: pi - categories: [it, q&a] - base_url: 'https://discourse.pi-hole.net' - disabled: true - - - name: searchcode code - engine: searchcode_code - shortcut: scc - disabled: true - - # - name: searx - # engine: searx_engine - # shortcut: se - # instance_urls : - # - http://127.0.0.1:8888/ - # - ... - # disabled: true - - - name: semantic scholar - engine: semantic_scholar - disabled: true - shortcut: se - - # Spotify needs API credentials - # - name: spotify - # engine: spotify - # shortcut: stf - # api_client_id: ******* - # api_client_secret: ******* - - # - name: solr - # engine: solr - # shortcut: slr - # base_url: http://localhost:8983 - # collection: collection_name - # sort: '' # sorting: asc or desc - # field_list: '' # comma separated list of field names to display on the UI - # default_fields: '' # default field to query - # query_fields: '' # query fields - # enable_http: true - - # - name: springer nature - # engine: springer - # # get your API key from: https://dev.springernature.com/signup - # # working API key, for test & debug: "a69685087d07eca9f13db62f65b8f601" - # api_key: 'unset' - # shortcut: springer - # timeout: 15.0 - - - name: startpage - engine: startpage - shortcut: sp - timeout: 6.0 - disabled: true - additional_tests: - rosebud: *test_rosebud - - - name: tokyotoshokan - engine: tokyotoshokan - shortcut: tt - timeout: 6.0 - disabled: true - - - name: solidtorrents - engine: solidtorrents - shortcut: solid - timeout: 4.0 - base_url: - - https://solidtorrents.to - - https://bitsearch.to - - # For this demo of the sqlite engine download: - # https://liste.mediathekview.de/filmliste-v2.db.bz2 - # and unpack into searx/data/filmliste-v2.db - # Query to test: "!demo concert" - # - # - name: demo - # engine: sqlite - # shortcut: demo - # categories: general - # result_template: default.html - # database: searx/data/filmliste-v2.db - # query_str: >- - # SELECT title || ' (' || time(duration, 'unixepoch') || ')' AS title, - # COALESCE( NULLIF(url_video_hd,''), NULLIF(url_video_sd,''), url_video) AS url, - # description AS content - # FROM film - # WHERE title LIKE :wildcard OR description LIKE :wildcard - # ORDER BY duration DESC - - - name: tagesschau - engine: tagesschau - # when set to false, display URLs from Tagesschau, and not the actual source - # (e.g. NDR, WDR, SWR, HR, ...) - use_source_url: true - shortcut: ts - disabled: true - - - name: tmdb - engine: xpath - paging: true - categories: movies - search_url: https://www.themoviedb.org/search?page={pageno}&query={query} - results_xpath: //div[contains(@class,"movie") or contains(@class,"tv")]//div[contains(@class,"card")] - url_xpath: .//div[contains(@class,"poster")]/a/@href - thumbnail_xpath: .//img/@src - title_xpath: .//div[contains(@class,"title")]//h2 - content_xpath: .//div[contains(@class,"overview")] - shortcut: tm - disabled: true - - # Requires Tor - - name: torch - engine: xpath - paging: true - search_url: - http://xmh57jrknzkhv6y3ls3ubitzfqnkrwxhopf5aygthi7d6rplyvk3noyd.onion/cgi-bin/omega/omega?P={query}&DEFAULTOP=and - results_xpath: //table//tr - url_xpath: ./td[2]/a - title_xpath: ./td[2]/b - content_xpath: ./td[2]/small - categories: onions - enable_http: true - shortcut: tch - - # torznab engine lets you query any torznab compatible indexer. Using this - # engine in combination with Jackett opens the possibility to query a lot of - # public and private indexers directly from SearXNG. More details at: - # https://docs.searxng.org/dev/engines/online/torznab.html - # - # - name: Torznab EZTV - # engine: torznab - # shortcut: eztv - # base_url: http://localhost:9117/api/v2.0/indexers/eztv/results/torznab - # enable_http: true # if using localhost - # api_key: xxxxxxxxxxxxxxx - # show_magnet_links: true - # show_torrent_files: false - # # https://github.com/Jackett/Jackett/wiki/Jackett-Categories - # torznab_categories: # optional - # - 2000 - # - 5000 - - # tmp suspended - too slow, too many errors - # - name: urbandictionary - # engine : xpath - # search_url : https://www.urbandictionary.com/define.php?term={query} - # url_xpath : //*[@class="word"]/@href - # title_xpath : //*[@class="def-header"] - # content_xpath: //*[@class="meaning"] - # shortcut: ud - - - name: unsplash - engine: unsplash - shortcut: us - - - name: yandex music - engine: yandex_music - shortcut: ydm - disabled: true - # https://yandex.com/support/music/access.html - inactive: true - - - name: yahoo - engine: yahoo - shortcut: yh - disabled: true - - - name: yahoo news - engine: yahoo_news - shortcut: yhn - - - name: youtube - shortcut: yt - # You can use the engine using the official stable API, but you need an API - # key See: https://console.developers.google.com/project - # - # engine: youtube_api - # api_key: 'apikey' # required! - # - # Or you can use the html non-stable engine, activated by default - engine: youtube_noapi - - - name: dailymotion - engine: dailymotion - shortcut: dm - - - name: vimeo - engine: vimeo - shortcut: vm - disabled: true - - - name: wiby - engine: json_engine - paging: true - search_url: https://wiby.me/json/?q={query}&p={pageno} - url_query: URL - title_query: Title - content_query: Snippet - categories: [general, web] - shortcut: wib - disabled: true - about: - website: https://wiby.me/ - - - name: alexandria - engine: json_engine - shortcut: alx - categories: general - paging: true - search_url: https://api.alexandria.org/?a=1&q={query}&p={pageno} - results_query: results - title_query: title - url_query: url - content_query: snippet - timeout: 1.5 - disabled: true - about: - website: https://alexandria.org/ - official_api_documentation: https://github.com/alexandria-org/alexandria-api/raw/master/README.md - use_official_api: true - require_api_key: false - results: JSON - - - name: wikibooks - engine: mediawiki - weight: 0.5 - shortcut: wb - categories: [general, wikimedia] - base_url: "https://{language}.wikibooks.org/" - search_type: text - disabled: true - about: - website: https://www.wikibooks.org/ - wikidata_id: Q367 - - - name: wikinews - engine: mediawiki - shortcut: wn - categories: [news, wikimedia] - base_url: "https://{language}.wikinews.org/" - search_type: text - srsort: create_timestamp_desc - about: - website: https://www.wikinews.org/ - wikidata_id: Q964 - - - name: wikiquote - engine: mediawiki - weight: 0.5 - shortcut: wq - categories: [general, wikimedia] - base_url: "https://{language}.wikiquote.org/" - search_type: text - disabled: true - additional_tests: - rosebud: *test_rosebud - about: - website: https://www.wikiquote.org/ - wikidata_id: Q369 - - - name: wikisource - engine: mediawiki - weight: 0.5 - shortcut: ws - categories: [general, wikimedia] - base_url: "https://{language}.wikisource.org/" - search_type: text - disabled: true - about: - website: https://www.wikisource.org/ - wikidata_id: Q263 - - - name: wikispecies - engine: mediawiki - shortcut: wsp - categories: [general, science, wikimedia] - base_url: "https://species.wikimedia.org/" - search_type: text - disabled: true - about: - website: https://species.wikimedia.org/ - wikidata_id: Q13679 - tests: - wikispecies: - matrix: - query: "Campbell, L.I. et al. 2011: MicroRNAs" - lang: en - result_container: - - not_empty - - ['one_title_contains', 'Tardigrada'] - test: - - unique_results - - - name: wiktionary - engine: mediawiki - shortcut: wt - categories: [dictionaries, wikimedia] - base_url: "https://{language}.wiktionary.org/" - search_type: text - about: - website: https://www.wiktionary.org/ - wikidata_id: Q151 - - - name: wikiversity - engine: mediawiki - weight: 0.5 - shortcut: wv - categories: [general, wikimedia] - base_url: "https://{language}.wikiversity.org/" - search_type: text - disabled: true - about: - website: https://www.wikiversity.org/ - wikidata_id: Q370 - - - name: wikivoyage - engine: mediawiki - weight: 0.5 - shortcut: wy - categories: [general, wikimedia] - base_url: "https://{language}.wikivoyage.org/" - search_type: text - disabled: true - about: - website: https://www.wikivoyage.org/ - wikidata_id: Q373 - - - name: wikicommons.images - engine: wikicommons - shortcut: wc - categories: images - search_type: images - number_of_results: 10 - - - name: wikicommons.videos - engine: wikicommons - shortcut: wcv - categories: videos - search_type: videos - number_of_results: 10 - - - name: wikicommons.audio - engine: wikicommons - shortcut: wca - categories: music - search_type: audio - number_of_results: 10 - - - name: wikicommons.files - engine: wikicommons - shortcut: wcf - categories: files - search_type: files - number_of_results: 10 - - - name: wolframalpha - shortcut: wa - # You can use the engine using the official stable API, but you need an API - # key. See: https://products.wolframalpha.com/api/ - # - # engine: wolframalpha_api - # api_key: '' - # - # Or you can use the html non-stable engine, activated by default - engine: wolframalpha_noapi - timeout: 6.0 - categories: general - disabled: true - - - name: dictzone - engine: dictzone - shortcut: dc - - - name: mymemory translated - engine: translated - shortcut: tl - timeout: 5.0 - # You can use without an API key, but you are limited to 1000 words/day - # See: https://mymemory.translated.net/doc/usagelimits.php - # api_key: '' - - # Required dependency: mysql-connector-python - # - name: mysql - # engine: mysql_server - # database: mydatabase - # username: user - # password: pass - # limit: 10 - # query_str: 'SELECT * from mytable WHERE fieldname=%(query)s' - # shortcut: mysql - - - name: 1337x - engine: 1337x - shortcut: 1337x - disabled: true - - - name: duden - engine: duden - shortcut: du - disabled: true - - - name: seznam - shortcut: szn - engine: seznam - disabled: true - - # - name: deepl - # engine: deepl - # shortcut: dpl - # # You can use the engine using the official stable API, but you need an API key - # # See: https://www.deepl.com/pro-api?cta=header-pro-api - # api_key: '' # required! - # timeout: 5.0 - # disabled: true - - - name: mojeek - shortcut: mjk - engine: mojeek - categories: [general, web] - disabled: true - - - name: mojeek images - shortcut: mjkimg - engine: mojeek - categories: [images, web] - search_type: images - paging: false - disabled: true - - - name: mojeek news - shortcut: mjknews - engine: mojeek - categories: [news, web] - search_type: news - paging: false - disabled: true - - - name: moviepilot - engine: moviepilot - shortcut: mp - disabled: true - - - name: naver - shortcut: nvr - categories: [general, web] - engine: xpath - paging: true - search_url: https://search.naver.com/search.naver?where=webkr&sm=osp_hty&ie=UTF-8&query={query}&start={pageno} - url_xpath: //a[@class="link_tit"]/@href - title_xpath: //a[@class="link_tit"] - content_xpath: //div[@class="total_dsc_wrap"]/a - first_page_num: 1 - page_size: 10 - disabled: true - about: - website: https://www.naver.com/ - wikidata_id: Q485639 - official_api_documentation: https://developers.naver.com/docs/nmt/examples/ - use_official_api: false - require_api_key: false - results: HTML - language: ko - - - name: rubygems - shortcut: rbg - engine: xpath - paging: true - search_url: https://rubygems.org/search?page={pageno}&query={query} - results_xpath: /html/body/main/div/a[@class="gems__gem"] - url_xpath: ./@href - title_xpath: ./span/h2 - content_xpath: ./span/p - suggestion_xpath: /html/body/main/div/div[@class="search__suggestions"]/p/a - first_page_num: 1 - categories: [it, packages] - disabled: true - about: - website: https://rubygems.org/ - wikidata_id: Q1853420 - official_api_documentation: https://guides.rubygems.org/rubygems-org-api/ - use_official_api: false - require_api_key: false - results: HTML - - - name: peertube - engine: peertube - shortcut: ptb - paging: true - # alternatives see: https://instances.joinpeertube.org/instances - # base_url: https://tube.4aem.com - categories: videos - disabled: true - timeout: 6.0 - - - name: mediathekviewweb - engine: mediathekviewweb - shortcut: mvw - disabled: true - - - name: yacy - # https://docs.searxng.org/dev/engines/online/yacy.html - engine: yacy - categories: general - search_type: text - base_url: - - https://yacy.searchlab.eu - # see https://github.com/searxng/searxng/pull/3631#issuecomment-2240903027 - # - https://search.kyun.li - # - https://yacy.securecomcorp.eu - # - https://yacy.myserv.ca - # - https://yacy.nsupdate.info - # - https://yacy.electroncash.de - shortcut: ya - disabled: true - # if you aren't using HTTPS for your local yacy instance disable https - # enable_http: false - search_mode: 'global' - # timeout can be reduced in 'local' search mode - timeout: 5.0 - - - name: yacy images - engine: yacy - network: yacy - categories: images - search_type: image - shortcut: yai - disabled: true - # timeout can be reduced in 'local' search mode - timeout: 5.0 - - - name: rumble - engine: rumble - shortcut: ru - base_url: https://rumble.com/ - paging: true - categories: videos - disabled: true - - - name: livespace - engine: livespace - shortcut: ls - categories: videos - disabled: true - timeout: 5.0 - - - name: wordnik - engine: wordnik - shortcut: def - base_url: https://www.wordnik.com/ - categories: [dictionaries] - timeout: 5.0 - - - name: woxikon.de synonyme - engine: xpath - shortcut: woxi - categories: [dictionaries] - timeout: 5.0 - disabled: true - search_url: https://synonyme.woxikon.de/synonyme/{query}.php - url_xpath: //div[@class="upper-synonyms"]/a/@href - content_xpath: //div[@class="synonyms-list-group"] - title_xpath: //div[@class="upper-synonyms"]/a - no_result_for_http_status: [404] - about: - website: https://www.woxikon.de/ - wikidata_id: # No Wikidata ID - use_official_api: false - require_api_key: false - results: HTML - language: de - - - name: seekr news - engine: seekr - shortcut: senews - categories: news - seekr_category: news - disabled: true - - - name: seekr images - engine: seekr - network: seekr news - shortcut: seimg - categories: images - seekr_category: images - disabled: true - - - name: seekr videos - engine: seekr - network: seekr news - shortcut: sevid - categories: videos - seekr_category: videos - disabled: true - - - name: sjp.pwn - engine: sjp - shortcut: sjp - base_url: https://sjp.pwn.pl/ - timeout: 5.0 - disabled: true - - - name: stract - engine: stract - shortcut: str - disabled: true - - - name: svgrepo - engine: svgrepo - shortcut: svg - timeout: 10.0 - disabled: true - - - name: tootfinder - engine: tootfinder - shortcut: toot - - - name: voidlinux - engine: voidlinux - shortcut: void - disabled: true - - - name: wallhaven - engine: wallhaven - # api_key: abcdefghijklmnopqrstuvwxyz - shortcut: wh - - # wikimini: online encyclopedia for children - # The fulltext and title parameter is necessary for Wikimini because - # sometimes it will not show the results and redirect instead - - name: wikimini - engine: xpath - shortcut: wkmn - search_url: https://fr.wikimini.org/w/index.php?search={query}&title=Sp%C3%A9cial%3ASearch&fulltext=Search - url_xpath: //li/div[@class="mw-search-result-heading"]/a/@href - title_xpath: //li//div[@class="mw-search-result-heading"]/a - content_xpath: //li/div[@class="searchresult"] - categories: general - disabled: true - about: - website: https://wikimini.org/ - wikidata_id: Q3568032 - use_official_api: false - require_api_key: false - results: HTML - language: fr - - - name: wttr.in - engine: wttr - shortcut: wttr - timeout: 9.0 - - - name: yummly - engine: yummly - shortcut: yum - disabled: true - - - name: brave - engine: brave - shortcut: br - time_range_support: true - paging: true - categories: [general, web] - brave_category: search - # brave_spellcheck: true - - - name: brave.images - engine: brave - network: brave - shortcut: brimg - categories: [images, web] - brave_category: images - - - name: brave.videos - engine: brave - network: brave - shortcut: brvid - categories: [videos, web] - brave_category: videos - - - name: brave.news - engine: brave - network: brave - shortcut: brnews - categories: news - brave_category: news - - # - name: brave.goggles - # engine: brave - # network: brave - # shortcut: brgog - # time_range_support: true - # paging: true - # categories: [general, web] - # brave_category: goggles - # Goggles: # required! This should be a URL ending in .goggle - - - name: lib.rs - shortcut: lrs - engine: lib_rs - disabled: true - - - name: sourcehut - shortcut: srht - engine: xpath - paging: true - search_url: https://sr.ht/projects?page={pageno}&search={query} - results_xpath: (//div[@class="event-list"])[1]/div[@class="event"] - url_xpath: ./h4/a[2]/@href - title_xpath: ./h4/a[2] - content_xpath: ./p - first_page_num: 1 - categories: [it, repos] - disabled: true - about: - website: https://sr.ht - wikidata_id: Q78514485 - official_api_documentation: https://man.sr.ht/ - use_official_api: false - require_api_key: false - results: HTML - - - name: goo - shortcut: goo - engine: xpath - paging: true - search_url: https://search.goo.ne.jp/web.jsp?MT={query}&FR={pageno}0 - url_xpath: //div[@class="result"]/p[@class='title fsL1']/a/@href - title_xpath: //div[@class="result"]/p[@class='title fsL1']/a - content_xpath: //p[contains(@class,'url fsM')]/following-sibling::p - first_page_num: 0 - categories: [general, web] - disabled: true - timeout: 4.0 - about: - website: https://search.goo.ne.jp - wikidata_id: Q249044 - use_official_api: false - require_api_key: false - results: HTML - language: ja - - - name: bt4g - engine: bt4g - shortcut: bt4g - - - name: pkg.go.dev - engine: pkg_go_dev - shortcut: pgo - disabled: true - -# Doku engine lets you access to any Doku wiki instance: -# A public one or a privete/corporate one. -# - name: ubuntuwiki -# engine: doku -# shortcut: uw -# base_url: 'https://doc.ubuntu-fr.org' - -# Be careful when enabling this engine if you are -# running a public instance. Do not expose any sensitive -# information. You can restrict access by configuring a list -# of access tokens under tokens. -# - name: git grep -# engine: command -# command: ['git', 'grep', '{{QUERY}}'] -# shortcut: gg -# tokens: [] -# disabled: true -# delimiter: -# chars: ':' -# keys: ['filepath', 'code'] - -# Be careful when enabling this engine if you are -# running a public instance. Do not expose any sensitive -# information. You can restrict access by configuring a list -# of access tokens under tokens. -# - name: locate -# engine: command -# command: ['locate', '{{QUERY}}'] -# shortcut: loc -# tokens: [] -# disabled: true -# delimiter: -# chars: ' ' -# keys: ['line'] - -# Be careful when enabling this engine if you are -# running a public instance. Do not expose any sensitive -# information. You can restrict access by configuring a list -# of access tokens under tokens. -# - name: find -# engine: command -# command: ['find', '.', '-name', '{{QUERY}}'] -# query_type: path -# shortcut: fnd -# tokens: [] -# disabled: true -# delimiter: -# chars: ' ' -# keys: ['line'] - -# Be careful when enabling this engine if you are -# running a public instance. Do not expose any sensitive -# information. You can restrict access by configuring a list -# of access tokens under tokens. -# - name: pattern search in files -# engine: command -# command: ['fgrep', '{{QUERY}}'] -# shortcut: fgr -# tokens: [] -# disabled: true -# delimiter: -# chars: ' ' -# keys: ['line'] - -# Be careful when enabling this engine if you are -# running a public instance. Do not expose any sensitive -# information. You can restrict access by configuring a list -# of access tokens under tokens. -# - name: regex search in files -# engine: command -# command: ['grep', '{{QUERY}}'] -# shortcut: gr -# tokens: [] -# disabled: true -# delimiter: -# chars: ' ' -# keys: ['line'] - -doi_resolvers: - oadoi.org: 'https://oadoi.org/' - doi.org: 'https://doi.org/' - doai.io: 'https://dissem.in/' - sci-hub.se: 'https://sci-hub.se/' - sci-hub.st: 'https://sci-hub.st/' - sci-hub.ru: 'https://sci-hub.ru/' - -default_doi_resolver: 'oadoi.org' diff --git a/api/core/tools/provider/builtin/searxng/docker/uwsgi.ini b/api/core/tools/provider/builtin/searxng/docker/uwsgi.ini deleted file mode 100644 index 9db3d762649fc5..00000000000000 --- a/api/core/tools/provider/builtin/searxng/docker/uwsgi.ini +++ /dev/null @@ -1,54 +0,0 @@ -[uwsgi] -# Who will run the code -uid = searxng -gid = searxng - -# Number of workers (usually CPU count) -# default value: %k (= number of CPU core, see Dockerfile) -workers = %k - -# Number of threads per worker -# default value: 4 (see Dockerfile) -threads = 4 - -# The right granted on the created socket -chmod-socket = 666 - -# Plugin to use and interpreter config -single-interpreter = true -master = true -plugin = python3 -lazy-apps = true -enable-threads = 4 - -# Module to import -module = searx.webapp - -# Virtualenv and python path -pythonpath = /usr/local/searxng/ -chdir = /usr/local/searxng/searx/ - -# automatically set processes name to something meaningful -auto-procname = true - -# Disable request logging for privacy -disable-logging = true -log-5xx = true - -# Set the max size of a request (request-body excluded) -buffer-size = 8192 - -# No keep alive -# See https://github.com/searx/searx-docker/issues/24 -add-header = Connection: close - -# Follow SIGTERM convention -# See https://github.com/searxng/searxng/issues/3427 -die-on-term - -# uwsgi serves the static files -static-map = /static=/usr/local/searxng/searx/static -# expires set to one day -static-expires = /* 86400 -static-gzip-all = True -offload-threads = 4 diff --git a/api/core/tools/provider/builtin/searxng/searxng.py b/api/core/tools/provider/builtin/searxng/searxng.py deleted file mode 100644 index b7bbcc60b1ed26..00000000000000 --- a/api/core/tools/provider/builtin/searxng/searxng.py +++ /dev/null @@ -1,20 +0,0 @@ -from typing import Any - -from core.tools.errors import ToolProviderCredentialValidationError -from core.tools.provider.builtin.searxng.tools.searxng_search import SearXNGSearchTool -from core.tools.provider.builtin_tool_provider import BuiltinToolProviderController - - -class SearXNGProvider(BuiltinToolProviderController): - def _validate_credentials(self, credentials: dict[str, Any]) -> None: - try: - SearXNGSearchTool().fork_tool_runtime( - runtime={ - "credentials": credentials, - } - ).invoke( - user_id="", - tool_parameters={"query": "SearXNG", "limit": 1, "search_type": "general"}, - ) - except Exception as e: - raise ToolProviderCredentialValidationError(str(e)) diff --git a/api/core/tools/provider/builtin/searxng/searxng.yaml b/api/core/tools/provider/builtin/searxng/searxng.yaml deleted file mode 100644 index 9554c93d5a0c53..00000000000000 --- a/api/core/tools/provider/builtin/searxng/searxng.yaml +++ /dev/null @@ -1,24 +0,0 @@ -identity: - author: Junytang - name: searxng - label: - en_US: SearXNG - zh_Hans: SearXNG - description: - en_US: A free internet metasearch engine. - zh_Hans: 开源免费的互联网元搜索引擎 - icon: icon.svg - tags: - - search - - productivity -credentials_for_provider: - searxng_base_url: - type: text-input - required: true - label: - en_US: SearXNG base URL - zh_Hans: SearXNG base URL - placeholder: - en_US: Please input your SearXNG base URL - zh_Hans: 请输入您的 SearXNG base URL - url: https://docs.dify.ai/tutorials/tool-configuration/searxng diff --git a/api/core/tools/provider/builtin/searxng/tools/searxng_search.py b/api/core/tools/provider/builtin/searxng/tools/searxng_search.py deleted file mode 100644 index c5e339a108e5b2..00000000000000 --- a/api/core/tools/provider/builtin/searxng/tools/searxng_search.py +++ /dev/null @@ -1,46 +0,0 @@ -from typing import Any - -import requests - -from core.tools.entities.tool_entities import ToolInvokeMessage -from core.tools.tool.builtin_tool import BuiltinTool - - -class SearXNGSearchTool(BuiltinTool): - """ - Tool for performing a search using SearXNG engine. - """ - - def _invoke(self, user_id: str, tool_parameters: dict[str, Any]) -> ToolInvokeMessage | list[ToolInvokeMessage]: - """ - Invoke the SearXNG search tool. - - Args: - user_id (str): The ID of the user invoking the tool. - tool_parameters (dict[str, Any]): The parameters for the tool invocation. - - Returns: - ToolInvokeMessage | list[ToolInvokeMessage]: The result of the tool invocation. - """ - - host = self.runtime.credentials.get("searxng_base_url") - if not host: - raise Exception("SearXNG api is required") - - response = requests.get( - host, - params={ - "q": tool_parameters.get("query"), - "format": "json", - "categories": tool_parameters.get("search_type", "general"), - }, - ) - - if response.status_code != 200: - raise Exception(f"Error {response.status_code}: {response.text}") - - res = response.json().get("results", []) - if not res: - return self.create_text_message(f"No results found, get response: {response.content}") - - return [self.create_json_message(item) for item in res] diff --git a/api/core/tools/provider/builtin/searxng/tools/searxng_search.yaml b/api/core/tools/provider/builtin/searxng/tools/searxng_search.yaml deleted file mode 100644 index a5e448a30375b4..00000000000000 --- a/api/core/tools/provider/builtin/searxng/tools/searxng_search.yaml +++ /dev/null @@ -1,69 +0,0 @@ -identity: - name: searxng_search - author: Junytang - label: - en_US: SearXNG Search - zh_Hans: SearXNG 搜索 -description: - human: - en_US: SearXNG is a free internet metasearch engine which aggregates results from more than 70 search services. - zh_Hans: SearXNG 是一个免费的互联网元搜索引擎,它从70多个不同的搜索服务中聚合搜索结果。 - llm: Perform searches on SearXNG and get results. -parameters: - - name: query - type: string - required: true - label: - en_US: Query string - zh_Hans: 查询语句 - llm_description: Key words for searching - form: llm - - name: search_type - type: select - required: true - label: - en_US: search type - zh_Hans: 搜索类型 - default: general - options: - - value: general - label: - en_US: General - zh_Hans: 综合 - - value: images - label: - en_US: Images - zh_Hans: 图片 - - value: videos - label: - en_US: Videos - zh_Hans: 视频 - - value: news - label: - en_US: News - zh_Hans: 新闻 - - value: map - label: - en_US: Map - zh_Hans: 地图 - - value: music - label: - en_US: Music - zh_Hans: 音乐 - - value: it - label: - en_US: It - zh_Hans: 信息技术 - - value: science - label: - en_US: Science - zh_Hans: 科学 - - value: files - label: - en_US: Files - zh_Hans: 文件 - - value: social_media - label: - en_US: Social Media - zh_Hans: 社交媒体 - form: form diff --git a/api/core/tools/provider/builtin/serper/_assets/icon.svg b/api/core/tools/provider/builtin/serper/_assets/icon.svg deleted file mode 100644 index 3f973a552e5e17..00000000000000 --- a/api/core/tools/provider/builtin/serper/_assets/icon.svg +++ /dev/null @@ -1,12 +0,0 @@ - - - serper - - - - - - - - - \ No newline at end of file diff --git a/api/core/tools/provider/builtin/serper/serper.py b/api/core/tools/provider/builtin/serper/serper.py deleted file mode 100644 index cb1d090a9dd4b0..00000000000000 --- a/api/core/tools/provider/builtin/serper/serper.py +++ /dev/null @@ -1,20 +0,0 @@ -from typing import Any - -from core.tools.errors import ToolProviderCredentialValidationError -from core.tools.provider.builtin.serper.tools.serper_search import SerperSearchTool -from core.tools.provider.builtin_tool_provider import BuiltinToolProviderController - - -class SerperProvider(BuiltinToolProviderController): - def _validate_credentials(self, credentials: dict[str, Any]) -> None: - try: - SerperSearchTool().fork_tool_runtime( - runtime={ - "credentials": credentials, - } - ).invoke( - user_id="", - tool_parameters={"query": "test", "result_type": "link"}, - ) - except Exception as e: - raise ToolProviderCredentialValidationError(str(e)) diff --git a/api/core/tools/provider/builtin/serper/serper.yaml b/api/core/tools/provider/builtin/serper/serper.yaml deleted file mode 100644 index b3b2d76c4b6573..00000000000000 --- a/api/core/tools/provider/builtin/serper/serper.yaml +++ /dev/null @@ -1,31 +0,0 @@ -identity: - author: zhuhao - name: serper - label: - en_US: Serper - zh_Hans: Serper - pt_BR: Serper - description: - en_US: Serper is a powerful real-time search engine tool API that provides structured data from Google Search. - zh_Hans: Serper 是一个强大的实时搜索引擎工具API,可提供来自 Google 搜索引擎搜索的结构化数据。 - pt_BR: Serper is a powerful real-time search engine tool API that provides structured data from Google Search. - icon: icon.svg - tags: - - search -credentials_for_provider: - serperapi_api_key: - type: secret-input - required: true - label: - en_US: Serper API key - zh_Hans: Serper API key - pt_BR: Serper API key - placeholder: - en_US: Please input your Serper API key - zh_Hans: 请输入你的 Serper API key - pt_BR: Please input your Serper API key - help: - en_US: Get your Serper API key from Serper - zh_Hans: 从 Serper 获取您的 Serper API key - pt_BR: Get your Serper API key from Serper - url: https://serper.dev/api-key diff --git a/api/core/tools/provider/builtin/serper/tools/serper_search.py b/api/core/tools/provider/builtin/serper/tools/serper_search.py deleted file mode 100644 index 7baebbf95855e0..00000000000000 --- a/api/core/tools/provider/builtin/serper/tools/serper_search.py +++ /dev/null @@ -1,34 +0,0 @@ -from typing import Any, Union - -import requests - -from core.tools.entities.tool_entities import ToolInvokeMessage -from core.tools.tool.builtin_tool import BuiltinTool - -SERPER_API_URL = "https://google.serper.dev/search" - - -class SerperSearchTool(BuiltinTool): - def _parse_response(self, response: dict) -> dict: - result = {} - if "knowledgeGraph" in response: - result["title"] = response["knowledgeGraph"].get("title", "") - result["description"] = response["knowledgeGraph"].get("description", "") - if "organic" in response: - result["organic"] = [ - {"title": item.get("title", ""), "link": item.get("link", ""), "snippet": item.get("snippet", "")} - for item in response["organic"] - ] - return result - - def _invoke( - self, - user_id: str, - tool_parameters: dict[str, Any], - ) -> Union[ToolInvokeMessage, list[ToolInvokeMessage]]: - params = {"q": tool_parameters["query"], "gl": "us", "hl": "en"} - headers = {"X-API-KEY": self.runtime.credentials["serperapi_api_key"], "Content-Type": "application/json"} - response = requests.get(url=SERPER_API_URL, params=params, headers=headers) - response.raise_for_status() - valuable_res = self._parse_response(response.json()) - return self.create_json_message(valuable_res) diff --git a/api/core/tools/provider/builtin/serper/tools/serper_search.yaml b/api/core/tools/provider/builtin/serper/tools/serper_search.yaml deleted file mode 100644 index e1c0a056e65513..00000000000000 --- a/api/core/tools/provider/builtin/serper/tools/serper_search.yaml +++ /dev/null @@ -1,27 +0,0 @@ -identity: - name: serper - author: zhuhao - label: - en_US: Serper - zh_Hans: Serper - pt_BR: Serper -description: - human: - en_US: A tool for performing a Google search and extracting snippets and webpages.Input should be a search query. - zh_Hans: 一个用于执行 Google 搜索并提取片段和网页的工具。输入应该是一个搜索查询。 - pt_BR: A tool for performing a Google search and extracting snippets and webpages.Input should be a search query. - llm: A tool for performing a Google search and extracting snippets and webpages.Input should be a search query. -parameters: - - name: query - type: string - required: true - label: - en_US: Query string - zh_Hans: 查询语句 - pt_BR: Query string - human_description: - en_US: used for searching - zh_Hans: 用于搜索网页内容 - pt_BR: used for searching - llm_description: key words for searching - form: llm diff --git a/api/core/tools/provider/builtin/siliconflow/_assets/icon.svg b/api/core/tools/provider/builtin/siliconflow/_assets/icon.svg deleted file mode 100644 index ad6b384f7acd21..00000000000000 --- a/api/core/tools/provider/builtin/siliconflow/_assets/icon.svg +++ /dev/null @@ -1 +0,0 @@ - \ No newline at end of file diff --git a/api/core/tools/provider/builtin/siliconflow/siliconflow.py b/api/core/tools/provider/builtin/siliconflow/siliconflow.py deleted file mode 100644 index 37a0b0755b1d39..00000000000000 --- a/api/core/tools/provider/builtin/siliconflow/siliconflow.py +++ /dev/null @@ -1,17 +0,0 @@ -import requests - -from core.tools.errors import ToolProviderCredentialValidationError -from core.tools.provider.builtin_tool_provider import BuiltinToolProviderController - - -class SiliconflowProvider(BuiltinToolProviderController): - def _validate_credentials(self, credentials: dict) -> None: - url = "https://api.siliconflow.cn/v1/models" - headers = { - "accept": "application/json", - "authorization": f"Bearer {credentials.get('siliconFlow_api_key')}", - } - - response = requests.get(url, headers=headers) - if response.status_code != 200: - raise ToolProviderCredentialValidationError("SiliconFlow API key is invalid") diff --git a/api/core/tools/provider/builtin/siliconflow/siliconflow.yaml b/api/core/tools/provider/builtin/siliconflow/siliconflow.yaml deleted file mode 100644 index 46be99f262f211..00000000000000 --- a/api/core/tools/provider/builtin/siliconflow/siliconflow.yaml +++ /dev/null @@ -1,21 +0,0 @@ -identity: - author: hjlarry - name: siliconflow - label: - en_US: SiliconFlow - zh_CN: 硅基流动 - description: - en_US: The image generation API provided by SiliconFlow includes Flux and Stable Diffusion models. - zh_CN: 硅基流动提供的图片生成 API,包含 Flux 和 Stable Diffusion 模型。 - icon: icon.svg - tags: - - image -credentials_for_provider: - siliconFlow_api_key: - type: secret-input - required: true - label: - en_US: SiliconFlow API Key - placeholder: - en_US: Please input your SiliconFlow API key - url: https://cloud.siliconflow.cn/account/ak diff --git a/api/core/tools/provider/builtin/siliconflow/tools/flux.py b/api/core/tools/provider/builtin/siliconflow/tools/flux.py deleted file mode 100644 index 0d16ff385eb30d..00000000000000 --- a/api/core/tools/provider/builtin/siliconflow/tools/flux.py +++ /dev/null @@ -1,43 +0,0 @@ -from typing import Any, Union - -import requests - -from core.tools.entities.tool_entities import ToolInvokeMessage -from core.tools.tool.builtin_tool import BuiltinTool - -FLUX_URL = { - "schnell": "https://api.siliconflow.cn/v1/black-forest-labs/FLUX.1-schnell/text-to-image", - "dev": "https://api.siliconflow.cn/v1/image/generations", -} - - -class FluxTool(BuiltinTool): - def _invoke( - self, user_id: str, tool_parameters: dict[str, Any] - ) -> Union[ToolInvokeMessage, list[ToolInvokeMessage]]: - headers = { - "accept": "application/json", - "content-type": "application/json", - "authorization": f"Bearer {self.runtime.credentials['siliconFlow_api_key']}", - } - - payload = { - "prompt": tool_parameters.get("prompt"), - "image_size": tool_parameters.get("image_size", "1024x1024"), - "seed": tool_parameters.get("seed"), - "num_inference_steps": tool_parameters.get("num_inference_steps", 20), - } - model = tool_parameters.get("model", "schnell") - url = FLUX_URL.get(model) - if model == "dev": - payload["model"] = "black-forest-labs/FLUX.1-dev" - - response = requests.post(url, json=payload, headers=headers) - if response.status_code != 200: - return self.create_text_message(f"Got Error Response:{response.text}") - - res = response.json() - result = [self.create_json_message(res)] - for image in res.get("images", []): - result.append(self.create_image_message(image=image.get("url"), save_as=self.VariableKey.IMAGE.value)) - return result diff --git a/api/core/tools/provider/builtin/siliconflow/tools/flux.yaml b/api/core/tools/provider/builtin/siliconflow/tools/flux.yaml deleted file mode 100644 index d06b9bf3e1f489..00000000000000 --- a/api/core/tools/provider/builtin/siliconflow/tools/flux.yaml +++ /dev/null @@ -1,88 +0,0 @@ -identity: - name: flux - author: hjlarry - label: - en_US: Flux - icon: icon.svg -description: - human: - en_US: Generate image via SiliconFlow's flux model. - llm: This tool is used to generate image from prompt via SiliconFlow's flux model. -parameters: - - name: prompt - type: string - required: true - label: - en_US: prompt - zh_Hans: 提示词 - human_description: - en_US: The text prompt used to generate the image. - zh_Hans: 建议用英文的生成图片提示词以获得更好的生成效果。 - llm_description: this prompt text will be used to generate image. - form: llm - - name: model - type: select - required: true - options: - - value: schnell - label: - en_US: Flux.1-schnell - - value: dev - label: - en_US: Flux.1-dev - default: schnell - label: - en_US: Choose Image Model - zh_Hans: 选择生成图片的模型 - form: form - - name: image_size - type: select - required: true - options: - - value: 1024x1024 - label: - en_US: 1024x1024 - - value: 768x1024 - label: - en_US: 768x1024 - - value: 576x1024 - label: - en_US: 576x1024 - - value: 512x1024 - label: - en_US: 512x1024 - - value: 1024x576 - label: - en_US: 1024x576 - - value: 768x512 - label: - en_US: 768x512 - default: 1024x1024 - label: - en_US: Choose Image Size - zh_Hans: 选择生成的图片大小 - form: form - - name: num_inference_steps - type: number - required: true - default: 20 - min: 1 - max: 100 - label: - en_US: Num Inference Steps - zh_Hans: 生成图片的步数 - form: form - human_description: - en_US: The number of inference steps to perform. More steps produce higher quality but take longer. - zh_Hans: 执行的推理步骤数量。更多的步骤可以产生更高质量的结果,但需要更长的时间。 - - name: seed - type: number - min: 0 - max: 9999999999 - label: - en_US: Seed - zh_Hans: 种子 - human_description: - en_US: The same seed and prompt can produce similar images. - zh_Hans: 相同的种子和提示可以产生相似的图像。 - form: form diff --git a/api/core/tools/provider/builtin/siliconflow/tools/stable_diffusion.py b/api/core/tools/provider/builtin/siliconflow/tools/stable_diffusion.py deleted file mode 100644 index d6a0b03d1b185e..00000000000000 --- a/api/core/tools/provider/builtin/siliconflow/tools/stable_diffusion.py +++ /dev/null @@ -1,45 +0,0 @@ -from typing import Any, Union - -import requests - -from core.tools.entities.tool_entities import ToolInvokeMessage -from core.tools.tool.builtin_tool import BuiltinTool - -SDURL = { - "sd_3": "https://api.siliconflow.cn/v1/stabilityai/stable-diffusion-3-medium/text-to-image", - "sd_xl": "https://api.siliconflow.cn/v1/stabilityai/stable-diffusion-xl-base-1.0/text-to-image", -} - - -class StableDiffusionTool(BuiltinTool): - def _invoke( - self, user_id: str, tool_parameters: dict[str, Any] - ) -> Union[ToolInvokeMessage, list[ToolInvokeMessage]]: - headers = { - "accept": "application/json", - "content-type": "application/json", - "authorization": f"Bearer {self.runtime.credentials['siliconFlow_api_key']}", - } - - model = tool_parameters.get("model", "sd_3") - url = SDURL.get(model) - - payload = { - "prompt": tool_parameters.get("prompt"), - "negative_prompt": tool_parameters.get("negative_prompt", ""), - "image_size": tool_parameters.get("image_size", "1024x1024"), - "batch_size": tool_parameters.get("batch_size", 1), - "seed": tool_parameters.get("seed"), - "guidance_scale": tool_parameters.get("guidance_scale", 7.5), - "num_inference_steps": tool_parameters.get("num_inference_steps", 20), - } - - response = requests.post(url, json=payload, headers=headers) - if response.status_code != 200: - return self.create_text_message(f"Got Error Response:{response.text}") - - res = response.json() - result = [self.create_json_message(res)] - for image in res.get("images", []): - result.append(self.create_image_message(image=image.get("url"), save_as=self.VariableKey.IMAGE.value)) - return result diff --git a/api/core/tools/provider/builtin/siliconflow/tools/stable_diffusion.yaml b/api/core/tools/provider/builtin/siliconflow/tools/stable_diffusion.yaml deleted file mode 100644 index dce10adc87f222..00000000000000 --- a/api/core/tools/provider/builtin/siliconflow/tools/stable_diffusion.yaml +++ /dev/null @@ -1,121 +0,0 @@ -identity: - name: stable_diffusion - author: hjlarry - label: - en_US: Stable Diffusion - icon: icon.svg -description: - human: - en_US: Generate image via SiliconFlow's stable diffusion model. - llm: This tool is used to generate image from prompt via SiliconFlow's stable diffusion model. -parameters: - - name: prompt - type: string - required: true - label: - en_US: prompt - zh_Hans: 提示词 - human_description: - en_US: The text prompt used to generate the image. - zh_Hans: 用于生成图片的文字提示词 - llm_description: this prompt text will be used to generate image. - form: llm - - name: negative_prompt - type: string - label: - en_US: negative prompt - zh_Hans: 负面提示词 - human_description: - en_US: Describe what you don't want included in the image. - zh_Hans: 描述您不希望包含在图片中的内容。 - llm_description: Describe what you don't want included in the image. - form: llm - - name: model - type: select - required: true - options: - - value: sd_3 - label: - en_US: Stable Diffusion 3 - - value: sd_xl - label: - en_US: Stable Diffusion XL - default: sd_3 - label: - en_US: Choose Image Model - zh_Hans: 选择生成图片的模型 - form: form - - name: image_size - type: select - required: true - options: - - value: 1024x1024 - label: - en_US: 1024x1024 - - value: 1024x2048 - label: - en_US: 1024x2048 - - value: 1152x2048 - label: - en_US: 1152x2048 - - value: 1536x1024 - label: - en_US: 1536x1024 - - value: 1536x2048 - label: - en_US: 1536x2048 - - value: 2048x1152 - label: - en_US: 2048x1152 - default: 1024x1024 - label: - en_US: Choose Image Size - zh_Hans: 选择生成图片的大小 - form: form - - name: batch_size - type: number - required: true - default: 1 - min: 1 - max: 4 - label: - en_US: Number Images - zh_Hans: 生成图片的数量 - form: form - - name: guidance_scale - type: number - required: true - default: 7.5 - min: 0 - max: 100 - label: - en_US: Guidance Scale - zh_Hans: 与提示词紧密性 - human_description: - en_US: Classifier Free Guidance. How close you want the model to stick to your prompt when looking for a related image to show you. - zh_Hans: 无分类器引导。您希望模型在寻找相关图片向您展示时,与您的提示保持多紧密的关联度。 - form: form - - name: num_inference_steps - type: number - required: true - default: 20 - min: 1 - max: 100 - label: - en_US: Num Inference Steps - zh_Hans: 生成图片的步数 - human_description: - en_US: The number of inference steps to perform. More steps produce higher quality but take longer. - zh_Hans: 执行的推理步骤数量。更多的步骤可以产生更高质量的结果,但需要更长的时间。 - form: form - - name: seed - type: number - min: 0 - max: 9999999999 - label: - en_US: Seed - zh_Hans: 种子 - human_description: - en_US: The same seed and prompt can produce similar images. - zh_Hans: 相同的种子和提示可以产生相似的图像。 - form: form diff --git a/api/core/tools/provider/builtin/slack/_assets/icon.svg b/api/core/tools/provider/builtin/slack/_assets/icon.svg deleted file mode 100644 index e43c2c47dc128e..00000000000000 --- a/api/core/tools/provider/builtin/slack/_assets/icon.svg +++ /dev/null @@ -1,22 +0,0 @@ - - - Slack - - - - - - - diff --git a/api/core/tools/provider/builtin/slack/slack.py b/api/core/tools/provider/builtin/slack/slack.py deleted file mode 100644 index 2de7911f63072a..00000000000000 --- a/api/core/tools/provider/builtin/slack/slack.py +++ /dev/null @@ -1,8 +0,0 @@ -from core.tools.provider.builtin.slack.tools.slack_webhook import SlackWebhookTool -from core.tools.provider.builtin_tool_provider import BuiltinToolProviderController - - -class SlackProvider(BuiltinToolProviderController): - def _validate_credentials(self, credentials: dict) -> None: - SlackWebhookTool() - pass diff --git a/api/core/tools/provider/builtin/slack/slack.yaml b/api/core/tools/provider/builtin/slack/slack.yaml deleted file mode 100644 index 1070ffbf038a40..00000000000000 --- a/api/core/tools/provider/builtin/slack/slack.yaml +++ /dev/null @@ -1,16 +0,0 @@ -identity: - author: Pan YANG - name: slack - label: - en_US: Slack - zh_Hans: Slack - pt_BR: Slack - description: - en_US: Slack Webhook - zh_Hans: Slack Webhook - pt_BR: Slack Webhook - icon: icon.svg - tags: - - social - - productivity -credentials_for_provider: diff --git a/api/core/tools/provider/builtin/slack/tools/slack_webhook.py b/api/core/tools/provider/builtin/slack/tools/slack_webhook.py deleted file mode 100644 index 85e0de76755898..00000000000000 --- a/api/core/tools/provider/builtin/slack/tools/slack_webhook.py +++ /dev/null @@ -1,46 +0,0 @@ -from typing import Any, Union - -import httpx - -from core.tools.entities.tool_entities import ToolInvokeMessage -from core.tools.tool.builtin_tool import BuiltinTool - - -class SlackWebhookTool(BuiltinTool): - def _invoke( - self, user_id: str, tool_parameters: dict[str, Any] - ) -> Union[ToolInvokeMessage, list[ToolInvokeMessage]]: - """ - Incoming Webhooks - API Document: https://api.slack.com/messaging/webhooks - """ - - content = tool_parameters.get("content", "") - if not content: - return self.create_text_message("Invalid parameter content") - - webhook_url = tool_parameters.get("webhook_url", "") - - if not webhook_url.startswith("https://hooks.slack.com/"): - return self.create_text_message( - f"Invalid parameter webhook_url ${webhook_url}, not a valid Slack webhook URL" - ) - - headers = { - "Content-Type": "application/json", - } - params = {} - payload = { - "text": content, - } - - try: - res = httpx.post(webhook_url, headers=headers, params=params, json=payload) - if res.is_success: - return self.create_text_message("Text message was sent successfully") - else: - return self.create_text_message( - f"Failed to send the text message, status code: {res.status_code}, response: {res.text}" - ) - except Exception as e: - return self.create_text_message("Failed to send message through webhook. {}".format(e)) diff --git a/api/core/tools/provider/builtin/slack/tools/slack_webhook.yaml b/api/core/tools/provider/builtin/slack/tools/slack_webhook.yaml deleted file mode 100644 index b838d743733ec9..00000000000000 --- a/api/core/tools/provider/builtin/slack/tools/slack_webhook.yaml +++ /dev/null @@ -1,40 +0,0 @@ -identity: - name: slack_webhook - author: Pan YANG - label: - en_US: Incoming Webhook to send message - zh_Hans: 通过入站 Webhook 发送消息 - pt_BR: Incoming Webhook to send message - icon: icon.svg -description: - human: - en_US: Sending a message on Slack via the Incoming Webhook - zh_Hans: 通过入站 Webhook 在 Slack 上发送消息 - pt_BR: Sending a message on Slack via the Incoming Webhook - llm: A tool for sending messages to a chat on Slack. -parameters: - - name: webhook_url - type: string - required: true - label: - en_US: Slack Incoming Webhook url - zh_Hans: Slack 入站 Webhook 的 url - pt_BR: Slack Incoming Webhook url - human_description: - en_US: Slack Incoming Webhook url - zh_Hans: Slack 入站 Webhook 的 url - pt_BR: Slack Incoming Webhook url - form: form - - name: content - type: string - required: true - label: - en_US: content - zh_Hans: 消息内容 - pt_BR: content - human_description: - en_US: Content to sent to the channel or person. - zh_Hans: 消息内容文本 - pt_BR: Content to sent to the channel or person. - llm_description: Content of the message - form: llm diff --git a/api/core/tools/provider/builtin/spark/__init__.py b/api/core/tools/provider/builtin/spark/__init__.py deleted file mode 100644 index e69de29bb2d1d6..00000000000000 diff --git a/api/core/tools/provider/builtin/spark/_assets/icon.svg b/api/core/tools/provider/builtin/spark/_assets/icon.svg deleted file mode 100644 index ef0a9131a48e43..00000000000000 --- a/api/core/tools/provider/builtin/spark/_assets/icon.svg +++ /dev/null @@ -1,5 +0,0 @@ - - - - - diff --git a/api/core/tools/provider/builtin/spark/spark.py b/api/core/tools/provider/builtin/spark/spark.py deleted file mode 100644 index e0b1a58a3f679a..00000000000000 --- a/api/core/tools/provider/builtin/spark/spark.py +++ /dev/null @@ -1,36 +0,0 @@ -import json - -from core.tools.errors import ToolProviderCredentialValidationError -from core.tools.provider.builtin.spark.tools.spark_img_generation import spark_response -from core.tools.provider.builtin_tool_provider import BuiltinToolProviderController - - -class SparkProvider(BuiltinToolProviderController): - def _validate_credentials(self, credentials: dict) -> None: - try: - if "APPID" not in credentials or not credentials.get("APPID"): - raise ToolProviderCredentialValidationError("APPID is required.") - if "APISecret" not in credentials or not credentials.get("APISecret"): - raise ToolProviderCredentialValidationError("APISecret is required.") - if "APIKey" not in credentials or not credentials.get("APIKey"): - raise ToolProviderCredentialValidationError("APIKey is required.") - - appid = credentials.get("APPID") - apisecret = credentials.get("APISecret") - apikey = credentials.get("APIKey") - prompt = "a cute black dog" - - try: - response = spark_response(prompt, appid, apikey, apisecret) - data = json.loads(response) - code = data["header"]["code"] - - if code == 0: - # 0 success, - pass - else: - raise ToolProviderCredentialValidationError("image generate error, code:{}".format(code)) - except Exception as e: - raise ToolProviderCredentialValidationError("APPID APISecret APIKey is invalid. {}".format(e)) - except Exception as e: - raise ToolProviderCredentialValidationError(str(e)) diff --git a/api/core/tools/provider/builtin/spark/spark.yaml b/api/core/tools/provider/builtin/spark/spark.yaml deleted file mode 100644 index fa1543443a2af8..00000000000000 --- a/api/core/tools/provider/builtin/spark/spark.yaml +++ /dev/null @@ -1,61 +0,0 @@ -identity: - author: Onelevenvy - name: spark - label: - en_US: Spark - zh_Hans: 讯飞星火 - pt_BR: Spark - description: - en_US: Spark Platform Toolkit - zh_Hans: 讯飞星火平台工具 - pt_BR: Pacote de Ferramentas da Plataforma Spark - icon: icon.svg - tags: - - image -credentials_for_provider: - APPID: - type: secret-input - required: true - label: - en_US: Spark APPID - zh_Hans: APPID - pt_BR: Spark APPID - help: - en_US: Please input your APPID - zh_Hans: 请输入你的 APPID - pt_BR: Please input your APPID - placeholder: - en_US: Please input your APPID - zh_Hans: 请输入你的 APPID - pt_BR: Please input your APPID - APISecret: - type: secret-input - required: true - label: - en_US: Spark APISecret - zh_Hans: APISecret - pt_BR: Spark APISecret - help: - en_US: Please input your Spark APISecret - zh_Hans: 请输入你的 APISecret - pt_BR: Please input your Spark APISecret - placeholder: - en_US: Please input your Spark APISecret - zh_Hans: 请输入你的 APISecret - pt_BR: Please input your Spark APISecret - APIKey: - type: secret-input - required: true - label: - en_US: Spark APIKey - zh_Hans: APIKey - pt_BR: Spark APIKey - help: - en_US: Please input your Spark APIKey - zh_Hans: 请输入你的 APIKey - pt_BR: Please input your Spark APIKey - placeholder: - en_US: Please input your Spark APIKey - zh_Hans: 请输入你的 APIKey - pt_BR: Please input Spark APIKey - url: https://console.xfyun.cn/services diff --git a/api/core/tools/provider/builtin/spark/tools/spark_img_generation.py b/api/core/tools/provider/builtin/spark/tools/spark_img_generation.py deleted file mode 100644 index 81d9e8d94185f7..00000000000000 --- a/api/core/tools/provider/builtin/spark/tools/spark_img_generation.py +++ /dev/null @@ -1,139 +0,0 @@ -import base64 -import hashlib -import hmac -import json -from base64 import b64decode -from datetime import datetime -from time import mktime -from typing import Any, Union -from urllib.parse import urlencode -from wsgiref.handlers import format_date_time - -import requests - -from core.tools.entities.tool_entities import ToolInvokeMessage -from core.tools.tool.builtin_tool import BuiltinTool - - -class AssembleHeaderError(Exception): - def __init__(self, msg): - self.message = msg - - -class Url: - def __init__(self, host, path, schema): - self.host = host - self.path = path - self.schema = schema - - -# calculate sha256 and encode to base64 -def sha256base64(data): - sha256 = hashlib.sha256() - sha256.update(data) - digest = base64.b64encode(sha256.digest()).decode(encoding="utf-8") - return digest - - -def parse_url(request_url): - stidx = request_url.index("://") - host = request_url[stidx + 3 :] - schema = request_url[: stidx + 3] - edidx = host.index("/") - if edidx <= 0: - raise AssembleHeaderError("invalid request url:" + request_url) - path = host[edidx:] - host = host[:edidx] - u = Url(host, path, schema) - return u - - -def assemble_ws_auth_url(request_url, method="GET", api_key="", api_secret=""): - u = parse_url(request_url) - host = u.host - path = u.path - now = datetime.now() - date = format_date_time(mktime(now.timetuple())) - signature_origin = "host: {}\ndate: {}\n{} {} HTTP/1.1".format(host, date, method, path) - signature_sha = hmac.new( - api_secret.encode("utf-8"), - signature_origin.encode("utf-8"), - digestmod=hashlib.sha256, - ).digest() - signature_sha = base64.b64encode(signature_sha).decode(encoding="utf-8") - authorization_origin = ( - f'api_key="{api_key}", algorithm="hmac-sha256", headers="host date request-line", signature="{signature_sha}"' - ) - - authorization = base64.b64encode(authorization_origin.encode("utf-8")).decode(encoding="utf-8") - values = {"host": host, "date": date, "authorization": authorization} - - return request_url + "?" + urlencode(values) - - -def get_body(appid, text): - body = { - "header": {"app_id": appid, "uid": "123456789"}, - "parameter": {"chat": {"domain": "general", "temperature": 0.5, "max_tokens": 4096}}, - "payload": {"message": {"text": [{"role": "user", "content": text}]}}, - } - return body - - -def spark_response(text, appid, apikey, apisecret): - host = "http://spark-api.cn-huabei-1.xf-yun.com/v2.1/tti" - url = assemble_ws_auth_url(host, method="POST", api_key=apikey, api_secret=apisecret) - content = get_body(appid, text) - response = requests.post(url, json=content, headers={"content-type": "application/json"}).text - return response - - -class SparkImgGeneratorTool(BuiltinTool): - def _invoke( - self, - user_id: str, - tool_parameters: dict[str, Any], - ) -> Union[ToolInvokeMessage, list[ToolInvokeMessage]]: - """ - invoke tools - """ - - if "APPID" not in self.runtime.credentials or not self.runtime.credentials.get("APPID"): - return self.create_text_message("APPID is required.") - if "APISecret" not in self.runtime.credentials or not self.runtime.credentials.get("APISecret"): - return self.create_text_message("APISecret is required.") - if "APIKey" not in self.runtime.credentials or not self.runtime.credentials.get("APIKey"): - return self.create_text_message("APIKey is required.") - - prompt = tool_parameters.get("prompt", "") - if not prompt: - return self.create_text_message("Please input prompt") - res = self.img_generation(prompt) - result = [] - for image in res: - result.append( - self.create_blob_message( - blob=b64decode(image["base64_image"]), - meta={"mime_type": "image/png"}, - save_as=self.VariableKey.IMAGE.value, - ) - ) - return result - - def img_generation(self, prompt): - response = spark_response( - text=prompt, - appid=self.runtime.credentials.get("APPID"), - apikey=self.runtime.credentials.get("APIKey"), - apisecret=self.runtime.credentials.get("APISecret"), - ) - data = json.loads(response) - code = data["header"]["code"] - if code != 0: - return self.create_text_message(f"error: {code}, {data}") - else: - text = data["payload"]["choices"]["text"] - image_content = text[0] - image_base = image_content["content"] - json_data = {"base64_image": image_base} - return [json_data] diff --git a/api/core/tools/provider/builtin/spark/tools/spark_img_generation.yaml b/api/core/tools/provider/builtin/spark/tools/spark_img_generation.yaml deleted file mode 100644 index d44bbc9564ef88..00000000000000 --- a/api/core/tools/provider/builtin/spark/tools/spark_img_generation.yaml +++ /dev/null @@ -1,36 +0,0 @@ -identity: - name: spark_img_generation - author: Onelevenvy - label: - en_US: Spark Image Generation - zh_Hans: 图片生成 - pt_BR: Geração de imagens Spark - icon: icon.svg - description: - en_US: Spark Image Generation - zh_Hans: 图片生成 - pt_BR: Geração de imagens Spark -description: - human: - en_US: Generate images based on user input, with image generation API - provided by Spark - zh_Hans: 根据用户的输入生成图片,由讯飞星火提供图片生成api - pt_BR: Gerar imagens com base na entrada do usuário, com API de geração - de imagem fornecida pela Spark - llm: spark_img_generation is a tool used to generate images from text -parameters: - - name: prompt - type: string - required: true - label: - en_US: Prompt - zh_Hans: 提示词 - pt_BR: Prompt - human_description: - en_US: Image prompt - zh_Hans: 图像提示词 - pt_BR: Image prompt - llm_description: Image prompt of spark_img_generation tooll, you should - describe the image you want to generate as a list of words as possible - as detailed - form: llm diff --git a/api/core/tools/provider/builtin/spider/_assets/icon.svg b/api/core/tools/provider/builtin/spider/_assets/icon.svg deleted file mode 100644 index 604a09d01d7444..00000000000000 --- a/api/core/tools/provider/builtin/spider/_assets/icon.svg +++ /dev/null @@ -1 +0,0 @@ -Spider v1 Logo diff --git a/api/core/tools/provider/builtin/spider/spider.py b/api/core/tools/provider/builtin/spider/spider.py deleted file mode 100644 index 5959555318722e..00000000000000 --- a/api/core/tools/provider/builtin/spider/spider.py +++ /dev/null @@ -1,20 +0,0 @@ -from typing import Any - -from core.tools.errors import ToolProviderCredentialValidationError -from core.tools.provider.builtin.spider.spiderApp import Spider -from core.tools.provider.builtin_tool_provider import BuiltinToolProviderController - - -class SpiderProvider(BuiltinToolProviderController): - def _validate_credentials(self, credentials: dict[str, Any]) -> None: - try: - app = Spider(api_key=credentials["spider_api_key"]) - app.scrape_url(url="https://spider.cloud") - except AttributeError as e: - # Handle cases where NoneType is not iterable, which might indicate API issues - if "NoneType" in str(e) and "not iterable" in str(e): - raise ToolProviderCredentialValidationError("API is currently down, try again in 15 minutes", str(e)) - else: - raise ToolProviderCredentialValidationError("An unexpected error occurred.", str(e)) - except Exception as e: - raise ToolProviderCredentialValidationError("An unexpected error occurred.", str(e)) diff --git a/api/core/tools/provider/builtin/spider/spider.yaml b/api/core/tools/provider/builtin/spider/spider.yaml deleted file mode 100644 index 45702c85ddea24..00000000000000 --- a/api/core/tools/provider/builtin/spider/spider.yaml +++ /dev/null @@ -1,27 +0,0 @@ -identity: - author: William Espegren - name: spider - label: - en_US: Spider - zh_CN: Spider - description: - en_US: Spider API integration, returning LLM-ready data by scraping & crawling websites. - zh_CN: Spider API 集成,通过爬取和抓取网站返回 LLM-ready 数据。 - icon: icon.svg - tags: - - search - - utilities -credentials_for_provider: - spider_api_key: - type: secret-input - required: true - label: - en_US: Spider API Key - zh_CN: Spider API 密钥 - placeholder: - en_US: Please input your Spider API key - zh_CN: 请输入您的 Spider API 密钥 - help: - en_US: Get your Spider API key from your Spider dashboard - zh_CN: 从您的 Spider 仪表板中获取 Spider API 密钥。 - url: https://spider.cloud/ diff --git a/api/core/tools/provider/builtin/spider/spiderApp.py b/api/core/tools/provider/builtin/spider/spiderApp.py deleted file mode 100644 index 4bc446a1a092a3..00000000000000 --- a/api/core/tools/provider/builtin/spider/spiderApp.py +++ /dev/null @@ -1,221 +0,0 @@ -import os -from typing import Literal, Optional, TypedDict - -import requests - - -class RequestParamsDict(TypedDict, total=False): - url: Optional[str] - request: Optional[Literal["http", "chrome", "smart"]] - limit: Optional[int] - return_format: Optional[Literal["raw", "markdown", "html2text", "text", "bytes"]] - tld: Optional[bool] - depth: Optional[int] - cache: Optional[bool] - budget: Optional[dict[str, int]] - locale: Optional[str] - cookies: Optional[str] - stealth: Optional[bool] - headers: Optional[dict[str, str]] - anti_bot: Optional[bool] - metadata: Optional[bool] - viewport: Optional[dict[str, int]] - encoding: Optional[str] - subdomains: Optional[bool] - user_agent: Optional[str] - store_data: Optional[bool] - gpt_config: Optional[list[str]] - fingerprint: Optional[bool] - storageless: Optional[bool] - readability: Optional[bool] - proxy_enabled: Optional[bool] - respect_robots: Optional[bool] - query_selector: Optional[str] - full_resources: Optional[bool] - request_timeout: Optional[int] - run_in_background: Optional[bool] - skip_config_checks: Optional[bool] - - -class Spider: - def __init__(self, api_key: Optional[str] = None): - """ - Initialize the Spider with an API key. - - :param api_key: A string of the API key for Spider. Defaults to the SPIDER_API_KEY environment variable. - :raises ValueError: If no API key is provided. - """ - self.api_key = api_key or os.getenv("SPIDER_API_KEY") - if self.api_key is None: - raise ValueError("No API key provided") - - def api_post( - self, - endpoint: str, - data: dict, - stream: bool, - content_type: str = "application/json", - ): - """ - Send a POST request to the specified API endpoint. - - :param endpoint: The API endpoint to which the POST request is sent. - :param data: The data (dictionary) to be sent in the POST request. - :param stream: Boolean indicating if the response should be streamed. - :return: The JSON response or the raw response stream if stream is True. - """ - headers = self._prepare_headers(content_type) - response = self._post_request(f"https://api.spider.cloud/v1/{endpoint}", data, headers, stream) - - if stream: - return response - elif response.status_code == 200: - return response.json() - else: - self._handle_error(response, f"post to {endpoint}") - - def api_get(self, endpoint: str, stream: bool, content_type: str = "application/json"): - """ - Send a GET request to the specified endpoint. - - :param endpoint: The API endpoint from which to retrieve data. - :return: The JSON decoded response. - """ - headers = self._prepare_headers(content_type) - response = self._get_request(f"https://api.spider.cloud/v1/{endpoint}", headers, stream) - if response.status_code == 200: - return response.json() - else: - self._handle_error(response, f"get from {endpoint}") - - def get_credits(self): - """ - Retrieve the account's remaining credits. - - :return: JSON response containing the number of credits left. - """ - return self.api_get("credits", stream=False) - - def scrape_url( - self, - url: str, - params: Optional[RequestParamsDict] = None, - stream: bool = False, - content_type: str = "application/json", - ): - """ - Scrape data from the specified URL. - - :param url: The URL from which to scrape data. - :param params: Optional dictionary of additional parameters for the scrape request. - :return: JSON response containing the scraping results. - """ - params = params or {} - - # Add { "return_format": "markdown" } to the params if not already present - if "return_format" not in params: - params["return_format"] = "markdown" - - # Set limit to 1 - params["limit"] = 1 - - return self.api_post("crawl", {"url": url, **(params or {})}, stream, content_type) - - def crawl_url( - self, - url: str, - params: Optional[RequestParamsDict] = None, - stream: bool = False, - content_type: str = "application/json", - ): - """ - Start crawling at the specified URL. - - :param url: The URL to begin crawling. - :param params: Optional dictionary with additional parameters to customize the crawl. - :param stream: Boolean indicating if the response should be streamed. Defaults to False. - :return: JSON response or the raw response stream if streaming enabled. - """ - params = params or {} - - # Add { "return_format": "markdown" } to the params if not already present - if "return_format" not in params: - params["return_format"] = "markdown" - - return self.api_post("crawl", {"url": url, **(params or {})}, stream, content_type) - - def links( - self, - url: str, - params: Optional[RequestParamsDict] = None, - stream: bool = False, - content_type: str = "application/json", - ): - """ - Retrieve links from the specified URL. - - :param url: The URL from which to extract links. - :param params: Optional parameters for the link retrieval request. - :return: JSON response containing the links. - """ - return self.api_post("links", {"url": url, **(params or {})}, stream, content_type) - - def extract_contacts( - self, - url: str, - params: Optional[RequestParamsDict] = None, - stream: bool = False, - content_type: str = "application/json", - ): - """ - Extract contact information from the specified URL. - - :param url: The URL from which to extract contact information. - :param params: Optional parameters for the contact extraction. - :return: JSON response containing extracted contact details. - """ - return self.api_post( - "pipeline/extract-contacts", - {"url": url, **(params or {})}, - stream, - content_type, - ) - - def label( - self, - url: str, - params: Optional[RequestParamsDict] = None, - stream: bool = False, - content_type: str = "application/json", - ): - """ - Apply labeling to data extracted from the specified URL. - - :param url: The URL to label data from. - :param params: Optional parameters to guide the labeling process. - :return: JSON response with labeled data. - """ - return self.api_post("pipeline/label", {"url": url, **(params or {})}, stream, content_type) - - def _prepare_headers(self, content_type: str = "application/json"): - return { - "Content-Type": content_type, - "Authorization": f"Bearer {self.api_key}", - "User-Agent": "Spider-Client/0.0.27", - } - - def _post_request(self, url: str, data, headers, stream=False): - return requests.post(url, headers=headers, json=data, stream=stream) - - def _get_request(self, url: str, headers, stream=False): - return requests.get(url, headers=headers, stream=stream) - - def _delete_request(self, url: str, headers, stream=False): - return requests.delete(url, headers=headers, stream=stream) - - def _handle_error(self, response, action): - if response.status_code in {402, 409, 500}: - error_message = response.json().get("error", "Unknown error occurred") - raise Exception(f"Failed to {action}. Status code: {response.status_code}. Error: {error_message}") - else: - raise Exception(f"Unexpected error occurred while trying to {action}. Status code: {response.status_code}") diff --git a/api/core/tools/provider/builtin/spider/tools/scraper_crawler.py b/api/core/tools/provider/builtin/spider/tools/scraper_crawler.py deleted file mode 100644 index 20d2daef550de1..00000000000000 --- a/api/core/tools/provider/builtin/spider/tools/scraper_crawler.py +++ /dev/null @@ -1,49 +0,0 @@ -from typing import Any, Union - -from core.tools.entities.tool_entities import ToolInvokeMessage -from core.tools.provider.builtin.spider.spiderApp import Spider -from core.tools.tool.builtin_tool import BuiltinTool - - -class ScrapeTool(BuiltinTool): - def _invoke( - self, user_id: str, tool_parameters: dict[str, Any] - ) -> Union[ToolInvokeMessage, list[ToolInvokeMessage]]: - # initialize the app object with the api key - app = Spider(api_key=self.runtime.credentials["spider_api_key"]) - - url = tool_parameters["url"] - mode = tool_parameters["mode"] - - options = { - "limit": tool_parameters.get("limit", 0), - "depth": tool_parameters.get("depth", 0), - "blacklist": tool_parameters.get("blacklist", "").split(",") if tool_parameters.get("blacklist") else [], - "whitelist": tool_parameters.get("whitelist", "").split(",") if tool_parameters.get("whitelist") else [], - "readability": tool_parameters.get("readability", False), - } - - result = "" - - try: - if mode == "scrape": - scrape_result = app.scrape_url( - url=url, - params=options, - ) - - for i in scrape_result: - result += "URL: " + i.get("url", "") + "\n" - result += "CONTENT: " + i.get("content", "") + "\n\n" - elif mode == "crawl": - crawl_result = app.crawl_url( - url=tool_parameters["url"], - params=options, - ) - for i in crawl_result: - result += "URL: " + i.get("url", "") + "\n" - result += "CONTENT: " + i.get("content", "") + "\n\n" - except Exception as e: - return self.create_text_message("An error occurred", str(e)) - - return self.create_text_message(result) diff --git a/api/core/tools/provider/builtin/spider/tools/scraper_crawler.yaml b/api/core/tools/provider/builtin/spider/tools/scraper_crawler.yaml deleted file mode 100644 index 5b20c2fc2f70ad..00000000000000 --- a/api/core/tools/provider/builtin/spider/tools/scraper_crawler.yaml +++ /dev/null @@ -1,102 +0,0 @@ -identity: - name: scraper_crawler - author: William Espegren - label: - en_US: Web Scraper & Crawler - zh_Hans: 网页抓取与爬虫 -description: - human: - en_US: A tool for scraping & crawling webpages. Input should be a url. - zh_Hans: 用于抓取和爬取网页的工具。输入应该是一个网址。 - llm: A tool for scraping & crawling webpages. Input should be a url. -parameters: - - name: url - type: string - required: true - label: - en_US: URL - zh_Hans: 网址 - human_description: - en_US: url to be scraped or crawled - zh_Hans: 要抓取或爬取的网址 - llm_description: url to either be scraped or crawled - form: llm - - name: mode - type: select - required: true - options: - - value: scrape - label: - en_US: scrape - zh_Hans: 抓取 - - value: crawl - label: - en_US: crawl - zh_Hans: 爬取 - default: crawl - label: - en_US: Mode - zh_Hans: 模式 - human_description: - en_US: used for selecting to either scrape the website or crawl the entire website following subpages - zh_Hans: 用于选择抓取网站或爬取整个网站及其子页面 - form: form - - name: limit - type: number - required: false - label: - en_US: maximum number of pages to crawl - zh_Hans: 最大爬取页面数 - human_description: - en_US: specify the maximum number of pages to crawl per website. the crawler will stop after reaching this limit. - zh_Hans: 指定每个网站要爬取的最大页面数。爬虫将在达到此限制后停止。 - form: form - min: 0 - default: 0 - - name: depth - type: number - required: false - label: - en_US: maximum depth of pages to crawl - zh_Hans: 最大爬取深度 - human_description: - en_US: the crawl limit for maximum depth. - zh_Hans: 最大爬取深度的限制。 - form: form - min: 0 - default: 0 - - name: blacklist - type: string - required: false - label: - en_US: url patterns to exclude - zh_Hans: 要排除的URL模式 - human_description: - en_US: blacklist a set of paths that you do not want to crawl. you can use regex patterns to help with the list. - zh_Hans: 指定一组不想爬取的路径。您可以使用正则表达式模式来帮助定义列表。 - placeholder: - en_US: /blog/*, /about - form: form - - name: whitelist - type: string - required: false - label: - en_US: URL patterns to include - zh_Hans: 要包含的URL模式 - human_description: - en_US: Whitelist a set of paths that you want to crawl, ignoring all other routes that do not match the patterns. You can use regex patterns to help with the list. - zh_Hans: 指定一组要爬取的路径,忽略所有不匹配模式的其他路由。您可以使用正则表达式模式来帮助定义列表。 - placeholder: - en_US: /blog/*, /about - form: form - - name: readability - type: boolean - required: false - label: - en_US: Pre-process the content for LLM usage - zh_Hans: 仅返回页面的主要内容 - human_description: - en_US: Use Mozilla's readability to pre-process the content for reading. This may drastically improve the content for LLM usage. - zh_Hans: 如果启用,爬虫将仅返回页面的主要内容,不包括标题、导航、页脚等。 - form: form - default: false diff --git a/api/core/tools/provider/builtin/stability/_assets/icon.svg b/api/core/tools/provider/builtin/stability/_assets/icon.svg deleted file mode 100644 index 56357a35557ac3..00000000000000 --- a/api/core/tools/provider/builtin/stability/_assets/icon.svg +++ /dev/null @@ -1,10 +0,0 @@ - - - - - - - - - - \ No newline at end of file diff --git a/api/core/tools/provider/builtin/stability/stability.py b/api/core/tools/provider/builtin/stability/stability.py deleted file mode 100644 index f09d81ac270288..00000000000000 --- a/api/core/tools/provider/builtin/stability/stability.py +++ /dev/null @@ -1,16 +0,0 @@ -from typing import Any - -from core.tools.provider.builtin.stability.tools.base import BaseStabilityAuthorization -from core.tools.provider.builtin_tool_provider import BuiltinToolProviderController - - -class StabilityToolProvider(BuiltinToolProviderController, BaseStabilityAuthorization): - """ - This class is responsible for providing the stability tool. - """ - - def _validate_credentials(self, credentials: dict[str, Any]) -> None: - """ - This method is responsible for validating the credentials. - """ - self.sd_validate_credentials(credentials) diff --git a/api/core/tools/provider/builtin/stability/stability.yaml b/api/core/tools/provider/builtin/stability/stability.yaml deleted file mode 100644 index c3e01c1e314d51..00000000000000 --- a/api/core/tools/provider/builtin/stability/stability.yaml +++ /dev/null @@ -1,31 +0,0 @@ -identity: - author: Dify - name: stability - label: - en_US: Stability - zh_Hans: Stability - pt_BR: Stability - description: - en_US: Activating humanity's potential through generative AI - zh_Hans: 通过生成式 AI 激活人类的潜力 - pt_BR: Activating humanity's potential through generative AI - icon: icon.svg - tags: - - image -credentials_for_provider: - api_key: - type: secret-input - required: true - label: - en_US: API key - zh_Hans: API key - pt_BR: API key - placeholder: - en_US: Please input your API key - zh_Hans: 请输入你的 API key - pt_BR: Please input your API key - help: - en_US: Get your API key from Stability - zh_Hans: 从 Stability 获取你的 API key - pt_BR: Get your API key from Stability - url: https://platform.stability.ai/account/keys diff --git a/api/core/tools/provider/builtin/stability/tools/base.py b/api/core/tools/provider/builtin/stability/tools/base.py deleted file mode 100644 index c3b7edbefa2447..00000000000000 --- a/api/core/tools/provider/builtin/stability/tools/base.py +++ /dev/null @@ -1,31 +0,0 @@ -import requests -from yarl import URL - -from core.tools.errors import ToolProviderCredentialValidationError - - -class BaseStabilityAuthorization: - def sd_validate_credentials(self, credentials: dict): - """ - This method is responsible for validating the credentials. - """ - api_key = credentials.get("api_key", "") - if not api_key: - raise ToolProviderCredentialValidationError("API key is required.") - - response = requests.get( - URL("https://api.stability.ai") / "v1" / "user" / "account", - headers=self.generate_authorization_headers(credentials), - timeout=(5, 30), - ) - - if not response.ok: - raise ToolProviderCredentialValidationError("Invalid API key.") - - return True - - def generate_authorization_headers(self, credentials: dict) -> dict[str, str]: - """ - This method is responsible for generating the authorization headers. - """ - return {"Authorization": f'Bearer {credentials.get("api_key", "")}'} diff --git a/api/core/tools/provider/builtin/stability/tools/text2image.py b/api/core/tools/provider/builtin/stability/tools/text2image.py deleted file mode 100644 index 6bcf315484ad50..00000000000000 --- a/api/core/tools/provider/builtin/stability/tools/text2image.py +++ /dev/null @@ -1,56 +0,0 @@ -from typing import Any - -from httpx import post - -from core.tools.entities.tool_entities import ToolInvokeMessage -from core.tools.provider.builtin.stability.tools.base import BaseStabilityAuthorization -from core.tools.tool.builtin_tool import BuiltinTool - - -class StableDiffusionTool(BuiltinTool, BaseStabilityAuthorization): - """ - This class is responsible for providing the stable diffusion tool. - """ - - model_endpoint_map: dict[str, str] = { - "sd3": "https://api.stability.ai/v2beta/stable-image/generate/sd3", - "sd3-turbo": "https://api.stability.ai/v2beta/stable-image/generate/sd3", - "core": "https://api.stability.ai/v2beta/stable-image/generate/core", - } - - def _invoke(self, user_id: str, tool_parameters: dict[str, Any]) -> ToolInvokeMessage | list[ToolInvokeMessage]: - """ - Invoke the tool. - """ - payload = { - "prompt": tool_parameters.get("prompt", ""), - "aspect_ratio": tool_parameters.get("aspect_ratio", "16:9") or tool_parameters.get("aspect_radio", "16:9"), - "mode": "text-to-image", - "seed": tool_parameters.get("seed", 0), - "output_format": "png", - } - - model = tool_parameters.get("model", "core") - - if model in {"sd3", "sd3-turbo"}: - payload["model"] = tool_parameters.get("model") - - if model != "sd3-turbo": - payload["negative_prompt"] = tool_parameters.get("negative_prompt", "") - - response = post( - self.model_endpoint_map[tool_parameters.get("model", "core")], - headers={ - "accept": "image/*", - **self.generate_authorization_headers(self.runtime.credentials), - }, - files={key: (None, str(value)) for key, value in payload.items()}, - timeout=(5, 30), - ) - - if not response.status_code == 200: - raise Exception(response.text) - - return self.create_blob_message( - blob=response.content, meta={"mime_type": "image/png"}, save_as=self.VariableKey.IMAGE.value - ) diff --git a/api/core/tools/provider/builtin/stability/tools/text2image.yaml b/api/core/tools/provider/builtin/stability/tools/text2image.yaml deleted file mode 100644 index 21345f9f187f07..00000000000000 --- a/api/core/tools/provider/builtin/stability/tools/text2image.yaml +++ /dev/null @@ -1,142 +0,0 @@ -identity: - name: stability_text2image - author: Dify - label: - en_US: StableDiffusion - zh_Hans: 稳定扩散 - pt_BR: StableDiffusion -description: - human: - en_US: A tool for generate images based on the text input - zh_Hans: 一个基于文本输入生成图像的工具 - pt_BR: A tool for generate images based on the text input - llm: A tool for generate images based on the text input -parameters: - - name: prompt - type: string - required: true - label: - en_US: Prompt - zh_Hans: 提示词 - pt_BR: Prompt - human_description: - en_US: used for generating images - zh_Hans: 用于生成图像 - pt_BR: used for generating images - llm_description: key words for generating images - form: llm - - name: model - type: select - default: sd3-turbo - required: true - label: - en_US: Model - zh_Hans: 模型 - pt_BR: Model - options: - - value: core - label: - en_US: Core - zh_Hans: Core - pt_BR: Core - - value: sd3 - label: - en_US: Stable Diffusion 3 - zh_Hans: Stable Diffusion 3 - pt_BR: Stable Diffusion 3 - - value: sd3-turbo - label: - en_US: Stable Diffusion 3 Turbo - zh_Hans: Stable Diffusion 3 Turbo - pt_BR: Stable Diffusion 3 Turbo - human_description: - en_US: Model for generating images - zh_Hans: 用于生成图像的模型 - pt_BR: Model for generating images - llm_description: Model for generating images - form: form - - name: negative_prompt - type: string - default: bad art, ugly, deformed, watermark, duplicated, discontinuous lines - required: false - label: - en_US: Negative Prompt - zh_Hans: 负面提示 - pt_BR: Negative Prompt - human_description: - en_US: Negative Prompt - zh_Hans: 负面提示 - pt_BR: Negative Prompt - llm_description: Negative Prompt - form: form - - name: seeds - type: number - default: 0 - required: false - label: - en_US: Seeds - zh_Hans: 种子 - pt_BR: Seeds - human_description: - en_US: Seeds - zh_Hans: 种子 - pt_BR: Seeds - llm_description: Seeds - min: 0 - max: 4294967294 - form: form - - name: aspect_ratio - type: select - default: '16:9' - options: - - value: '16:9' - label: - en_US: '16:9' - zh_Hans: '16:9' - pt_BR: '16:9' - - value: '1:1' - label: - en_US: '1:1' - zh_Hans: '1:1' - pt_BR: '1:1' - - value: '21:9' - label: - en_US: '21:9' - zh_Hans: '21:9' - pt_BR: '21:9' - - value: '2:3' - label: - en_US: '2:3' - zh_Hans: '2:3' - pt_BR: '2:3' - - value: '4:5' - label: - en_US: '4:5' - zh_Hans: '4:5' - pt_BR: '4:5' - - value: '5:4' - label: - en_US: '5:4' - zh_Hans: '5:4' - pt_BR: '5:4' - - value: '9:16' - label: - en_US: '9:16' - zh_Hans: '9:16' - pt_BR: '9:16' - - value: '9:21' - label: - en_US: '9:21' - zh_Hans: '9:21' - pt_BR: '9:21' - required: false - label: - en_US: Aspect Ratio - zh_Hans: 长宽比 - pt_BR: Aspect Ratio - human_description: - en_US: Aspect Ratio - zh_Hans: 长宽比 - pt_BR: Aspect Ratio - llm_description: Aspect Ratio - form: form diff --git a/api/core/tools/provider/builtin/stablediffusion/_assets/icon.png b/api/core/tools/provider/builtin/stablediffusion/_assets/icon.png deleted file mode 100644 index fc372b28f1ccfd7bea27dfe7ef0450e98a0be7e1..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 16324 zcmZ{rbyQT}8}1Lo(A^=;7Z8x{PATbB8UZQkMq&m;KvKFvX;47A8ITqMkr0U)=?G1&oAkx%OH3R?*@FNC*iw*v<^r?IW z01EM%s!GN|e-5ky{fy^NGF6Xgy*&GtlPvbzN+koxxdK)xWxT_^-zuK1yph1R$5d9v zg?_5}+*DopOvBl%X621aW$}B`o0Y{d|C?P2&t}+v^@-O#%Q9O_3O>4s{Z0{Nf#>0p z=l*^QtI9hB-FNw1m6XX>K~vL`j<*t{ z+DVsMwLE|6N-`hC^^giXm^!N!{3)!7eKfg#9S)DF#ZSAL$o$^@-~<3uA2egkxpUq? z^Ntj3ei{DDwqAG-1L%Z#rO$__7ihwtsd~s`C`C|Yl9}T40)R9C5Z2Gsd+1jUt1PSv zaxwnACB2n;b9SD>O2IQcT&Lsk?+$6EdL$WnB{i>vaPzG9h1dO(taLRjH02C$#UgkYKm%*l-QrLBp5<6tx)2lW;wKTxEmxu!@V9d< zj~!=@pO+k`bvplL&fts)xU$1^Q&Q`*|@A)68vRdpF* z1OTk|97Bv-YDu+>fnbeiRo=|zd*PUZwz5nTVa`H=`D{?F@L%?)`?ZbP&K`TVg=iH* zKu!emD-pwt`W&YuS4|o^^7?j0Vp|PEFI*=FeW#?oZYgjXFhgvDA`t@o4jz}Xus<#l znPf6H;Rr1@YtdE}gE+LL?=HG5)*Sf#>riJUrM6ogTp*2I(#N?M#88}1QpU;%{jX^B z{KkCI^h%oK9itki52=GG<1KrWJ5{ogaI#p?)W3b^H)T$Pz|`pw>~9@$&MmA?jE_Sg zV7pl(0h;5V10OtS|F<3IH?$K$_R+0>qo*-LW%QJf5(s$(mCW#YlF3I{Sm81jF@qrA z9s+Vqj&Fsudmi4X{_ddgDSGim#bAMk7B~YSS*FqlZ*hTBfspDujCG|EsEc%$ohiTx&!o=Py*7lB^|_gno+J7lP*S@JP^vBfY_=SEH zI6d zg;_Qf1=zwm@ylv;{N}EF!O|HBr!Q7!ZMe^Y8zrssye#6P=tah2JsFSmvvK5R48;Y) zRV(y&O|_hCet)Uf4Z(uh68>{==q3z}E+~8*MjLehS~f_^W})IU_x2ZBpu?KcFn!+{ z+k&z|SzYvAyYBT@6$$K-@M%8@9_C`he5Y^h2R3%)%roPWrj(PjT|&IZl1<+ ziN(}=m+S)D%Hzhh>jAVpCfy+t~4dU-r#*>mJ8yrsVHyP^Egdw8C zKn^!r-IvPyi1%dxQ=jUDI0af9`oBYxFd;8sXu@^(fjKXU4G};PTC%WO-2 z;1A^ayHn`}_|Y1KA>KofO1^xjcGAmx&tx4;(w73oJ=|oOF%V&MIREiO!E^DRuN|FH zE_CGWXV2nSSjRS8$+H%r{`L!%eNW@!=Pz*;Me#$b6N3)Zeci_YJ_x3XGeaURB(UAR zY~R+nrari$D2??0!{a-q3uQ=KJFR8&+4F9%GZK+L>Ez?%%(YOpv}{!-Yjk5Xqu)W8 z3?<{lJn(Y+6GZ~fPZhlga4kKibDRb(Uz{S&L7Q5tzS zRJwCgpMGAUYpY204xZmd^ki(k*vrPpIQX|R&k}L?ClFC)p@eXS9-AB!`+H}!%t}eA zM+FPu`U4Jw!6II#2e7^8OGNiqmUGh4O5u;Qx~vNz$DgjUD?riF6J!y~&NO;&t` zVUdGXT!2-FajKm9++^pm^xZm63N|xjOF(@FB~JQy+pkWeR(8`SQ>HT@(E-8YWI44E4I>IhUNI#XHD>7LE)JE)?JUL&nn(M zA#m$`IZPSL{LxTuD<$fHQ(NjW+3V54Va=*Vo)KKbtE+HasD~9} zN2c;n#|x9Zf5II?+dA;%uES!NkuYdb1_e`_F+|n|4(a)xQJ7r903IaMf&J_5^0E;& zhWHvS29+|Vi0D+tWy}eIr*wUMCTuiV89v%W8EQ|aT1;f^iZ|HxN&1q@GRHDUVfjTA zA8XdorY4lf%hd3An&`)$0z=0#hRye9Dbm93-8G{06ah;W7OzhIktr80N6Qy8Y~2lx z*zeB{t9rW2xg*F6yszzL{m!}DkM{Rsw*p_tClMasKEb_>;1)P2@YuP*<0_C#+mjhx zGT+CWh`{ux%-#6TQ7ci^L_q`K%EOOD7d8o^WhD%)+HVP-o?U#)F7lPbzdhWM=gc*G z`nL9k+UOEiaQ6Up%tZ0D?1Pe)<)?8)m@vdTLDi?Na6&r5_*cpu(*W$CiW+lrYoi6J zKNJ?$WR!@56`oZV>1n9@_RB)wCXOt$qN-RAuU2@aaP+c>*b?1~LO(74iNu8x7F4Tx zoY(hq&CClU#l7C2%rT?KOv6()*T$aRWMUTmQ2oRpM-bZRdW_nzRz<%v`z!OpWaGIN zyRU6ScTt4{Q|IV^S&(1bo>1uRDk^VX5DRW0_5(|RmbTr0h1PkICs?d-A!y3bIx$8G z1DUwR>vgh903;k)M5vfuXh@&#yQS}M&!cXGW2yZxBGc_(yfzqtFa{h5@w%>#qUz4> zqj2hC({7~gw%x<$DUh0RaWq&wF9X6LvJqXn)@~xADWfM$By@cOfA!JW!jUz&xQEW9 zpx|Z)CB=htU6x$`a0*I%zWCKXD8ktPs^1p2wN22R=0(i}bqHWn{NU49^$2)L0IB%n z!%C~$HHtbe`d~OuI-@YIs{{Cb2ew%Uflh*_MS}|-BmuGoT6V?A6!0k71 zJ8@@0jjXeC0MO2vW|z7>svq`zuDJ%!& zx?7X`hMvk3X~K=T$<{+%P?m(_b6K!FNMeE8u=NUfn;m5Eb;M6z5Y;rKk@Z z&{u~z31b21wcVR>af4V8mEe&u$;gs_L{0?h{^Y2&1zkSx^Xgw##X!1$eU4C$V{=_J z1DEgdEHfcXr8X@I7m}H!M{(4UEr-YP9i(|>mR(xzPS5^9DI}n-3=B*m+{Ff zdc(CH&ToTupW5kE2g*&ro0`NemvoZZ*j%67gJfaLPx^;1Ketr=z%w6KQb9fM+yxPAM~d zLizGx!s$dy*qB5dzI6^hp)Jmy#^=idpwJuEag}A;A}?)3u4NP=^QmRMP6Cg=MhvaT zO~onXw_;u06Tdb%76C8n4Tscc#M9jS)8bY>6A|E_6%pL zJs&hFI~<}4A6SU-y1r5(ZTigvJYKsDApKY)BWP1W9J>(;1;S<*F7qgG;16ZO9+Sqk zPqe1`l**V|%?9>^GH_pPa9;YmuDeNlDJyntr|M~0kxbE5tQeX+8^ULVit-I!fQ_Ru z&+Mr$+G>l1CD)zvSm0ofg}-(!s_@b#jhP+T{uTz7LkoD!XUnG*1)(E6JX>@X1AvDVNyHaZ-7*FX1T}QR;ot2+vqUX!uUg99jbuUizh`K7p}8=7uX+`- zukDzpMG;vF;A00h_`nVb3%4V&CZYAcS%}}yVnR|mR4}q=7%~fIO?i26ayCF`ozKnh9 zHgHteB+=WzOPk~`m{q7M4La*yb3_<}9{$SxIbwK&mCL$r8CT1zQ={gxe zd@;e1{X?UO(%W^*q4@mwrgCq3Sih?92q10iKjI~S2`3@3Uco?w+sG#Ys1^2RuP3%j zDQq5->gnTF$I$|)D|@hOKLVHYwofSc@bdaX9w`2{^aiP`xjgZct^(;4uZ%?kwKVy} zc$@jvZ*zeJ6?i}ziQQK)cE4|ZB@KajM2RxejYn8B)ivr>ywRbMA6AfvoRB>g&Bu)q z>Ro8=4oeCv0=%3wEvvy>{!+Ve8;f%?qEyTQ`WlYL7{Tp(}@_*=4cIca>~Dr?SODgRdCh$ww5)Ll9reU`8Tvt&`_N>uA}kZfj1oXHBgRBi@WH z=Tv{j@#0IJ>$`6T3&|@haGWPyo`uQa+RFA)>E7Cpm8uAU4GP|U`pakRL!@MRy#o;H zp>IbEExHR8gy?;p#;D>Nfv*`nZ z{ljjUh0Itx_cpLCf*wPiiXsLg_yd8g*C^h2U`wdrD;`2Y;P{!lv{0ExCny+u%Nqp} zW<{xR<=JijVKTvxr)1pTQl8=V#f-6saz^mvy8Rm|67$!;AwVSypDrt#_+5SiT(?uc(;Ah2wDC;e0ob0XcHjwJ4zli2dE`T(l1VTMkBg&b0DSAZA zE(&nu0Na9A zHJX+@aO%(4Ug4>HiIlR6jZayLwQG+=(K^pSd#%aZg%lA-a{p(cLk1>`oC9E6SBV3I z=ZhoXaSB@@F%4@xFS-6HVpG0j&K)D3ZDoLf&Rni$oExnJ^kIW%wY-XF4a+8dR+J{O zxN$-bR3QB5Fgk@?GGNkd;BXf)7>^yjwMYmYC!5#9ux@r3m}`P6mBG_7q+)va=&|-+ zFO6CvR$$3oG-sV$ucGMskp-#fw+XF;(bF3#ppU%p0`aoL>;mjnzR`$8;sD)C^E>Y} z6S;i8KS~L~vrfm%cT5!ypJYbKs3YRM?Av+{1i?x$i!XaEn{jyV#EFbZ6@vk0^iqYo zGAGXrcQ+@ZfCu-uDU(~T?7#A&(L{jPs?FcJK2aj6NV6{(g`RqxOhL>f=7)}%rp%_J z&2~_jMGXWi7=Mw|<2QdN)01e*BFWDc?zS8RwkbphP z&F&vcCGlKmPiueup@ZNwT&o{$d@TV~8};76_7h6Zb_b(T^`0^j-R|0+Og@cbCxWPo z)sHm~F;`L*V_^(b)T(*WSir3t7u)MZ&6Uc|G=>hXQCU;r_8x(+R~YP9fk&B>M0bpo z28&E@5yC!--i;O2(f)3^Dr9^UEOAOV9RJ zc1La$zbtU~D$>k!64ZNf(gbaY9} z=0o0eW%Cg?KGzhNgfXP!-K-2oWd^I5a<$G_hX$RqLRf8qol1BiimZGj?m%ZR+RCMMW>!-m9W?{(fA zQnXBn@9M*hDCPZAbC!bO%|HZmj319c8nYn1Zj_A@HT!1SK(wg#Qx;&#&ZEJlEHyd5 zLS(Y1k1Cpon^eESo`m8<9u4RXZ7AoZJ1PMvXG5{SyfIxI0+B>Np~f~X+A z`Zrj=8!A7fw09-o@O!7-Tv&q^iXt;yakbwGy127XFzHPt6vcz8)|M94Bx^kNM73s( zr!_z?mFL(17_%{gH?45)X6;D=0FKqJe*?9s)3ZV^z#AMq zuOgTqpJS$P2(i6B@JWdHytYNCab;aQD z%MoyeH;QQwFfjo@Aqn$W6}ryFGt!sY1*$F`rdl#rSW}UBmYP9z`EPlXzZ$*ur2QDX zBu#Ky#LO{ZjC_xQnoGqzk1rL`i-6do5E)M|lh(MHzgGOxlhb-zdru$QyK+nZi{~Q9 z83RUg%OZaqXuf^d;6|I_vu|XwQFcR)Z8BS$V5I7Cg~M;1FQ_kU3EE=*8AgT@0>9Tt zjAv7C$fNBVCgiZI?n6$`{;NqtFw+Al-kZ=pkC_%?;S-r>0i=nVKLh|BB*&&1Qw<78VJUe}ps2SdEWbv(-fSRTE-Lo7 zDD$arW_EB72KYV!n3))Bzl8r|KAGdAW*(MvU%Q5GoB0-#C6E6gwn5Yc&%KNfWjbg7 zaIwGO2K>6I#wr{REpkFnv=4sNl(XpDe5WsoUAUs7t{0q04Y|O=0Se%~;m5US?_Rlp zjioftpgjds*X=w4qZgy1vSp3}xe1^G=C%y+ZF?z6tS^L^+C;|~(AXBg*WfrnkAGI( zeqR1oF`{6t7!Sd6yeQbaAQ3&i91Ro)R(Eo?yPtd#_l+t4nRyX}0uWB7I806BuKDG@ z>(ki3Ov;Folw0wD?es-!7gy!RS-C?6;26%_R*D%!QM_~6ANYRYNScx?FUB~V3-L&{2?q*jXWWhOm3u#(LovM1X5z>)&9dU)ngT3|as?zrWp%+_4 zhcgWXxw>UR zLU4!oxig9vTsW%oJ?EynWsjZ5zyPJ;W{5m-eDy! z*P|g_DiQDf9D6eP&)kcJOSLN7WWOR9g~D^Nf?O(ymFQVz$>-|I^pUjx{;ts4oP8&j zJR6zzMJ0*nCh#;=VarW1IM8R~r@+Zq>EAD7lYdFAe6Mu4)IPNoU9OiJ`+^{O4rBOx zcu7a=wPq_Rq+npBx!Y`nV0wtul3&F`w&R_lGCp2OaV^=2Eyt^N z-MIk`a?H{hvA_{H!gvAc9uoZcQVINi2s1_L>`$>PCuPDXoj5mL!_T`J^Oz++fYkzT z|AZTTC=ru>eR4;C%c8@yWB{*wgSwGwCI67uy~9wXu;XDJR%|2Nt5EK=PVv|8iVC!w z9(xVrpN689urTg-@kw4pLmsS?SaZ!VZiBEgLT>xuOHM7KKJ*2L=iH#OaKn!5JD88% zyb+@=k?YRe;*E>nTg_o8ir9_a+@b!M>@nkZfDz5gz0GA@-9CnQ0^v3el%G!O@n-Q&n)1z`< z`X{z_7@-x6yVnU@3G{QT6&xoL*zA7j=8E6_#T)74>=^ea2L+QJZ}6a@X*iP@1{Pc0 zl$li7K|h5Y`&)bITcmKmlo5k9@ELH?Y1@l`9pK(MDT+<(=pV?(xbK-E?EPS^<92cy zs=a_WiVk=w7aW$b>-z6I!pzjjuvPT8*J@8K@f9L@v$K=>lkst_u~#NJ!E;hARgZgu zLbj?3rZATESN2MEI-BS@qi2DE|w-N`$LJRF8vS8hEKo# z=N!c>Xl{~mOk8jf1{LqU7^aU)|7w$0$WVglb^8jYp0Xpc`3RwM^Hup8P?;KV4~MI3 zE$c2LoB8l>nV#Vp^T$q{->Jv^lpLMN9k1NjKQq38f{|8?Uq==Ww>RHWh`lX-Xiq1B z)YHkQ$|xst)*5Pzqrx#O&8F7%FffALZ>t{oSwH;nRxYVM%pQRgeii_#%!8BVU-fuk zDG{}xylzi&&V|S#S0>Df8ZdBBNhIDIUv0o%evFlB=HIeaCEB+Rl43+~yyou3BP-Ap zHrGNaH3gOxSb}Y{um_3T*slJb$0V;DCR0l}-l}bThxoZu*5Tu!2$&Natgc=sg7Tj5&wQchSpgj1-Nf6`AV)fQSwx)dR;|Fb z6wC|gv^IM%7-SzYFk(eaBDN|x^3t=kRg9zzcSDl2&TrrH^*pB4)!^uygnjl9;>^wC zzVtR;$i+tLVaZ4A^(mOMrxE}2CDt5C{RET#G2cR|FgP45R)`J=as2MkqIm1c<|n4$ z@IiHA%E&I*1BdMERm>}%CPn=(wmxAir6RgAiXG0uOTCgaNw`Q+WwOpa&+LcL4`1SH zR%qIA1dagd8DGQ9K4Z$PHqmJpu7T>^(%!w%y)>xJRqyGmVsGKJwL2lQ9@eGLBk%J+ zI-z1b+iXUgUyl@C{G@ZT{wca)F|PTsT8s9X>g#JvPHP6d-W|?A^PQMBWH;w`32j>& z-|K$vSQlD8U6Kje9a8;Rh`s==GKz)%ViL-qi%Iab`Nj7W;dSL4F<%ZDa{qKL#8Ku` zFQkaqI{Mr(IpPm|8)DoU7b(fWyRpHN4m3a9n{n1TJ;P=@1_P7wyku+qon0MsLc>-< zP=-nD(l5M`?()8^m%G?8nnXA6OQfG5Lu0U>R5rfbsm5ITvmj*z*TcG*m+@pNt7X#) zI1_0R{Rd~=x~dv38oj2cLMU$zes)L-EhHo>Tt4NE&ah(C-^U zy6@V>Piinmc*al9rd9$9(y98AM-~-f-HxV;C%Z$6bzSnxzYNcC z^Ks)5eFCRns{i+Jo$d^L;@YMVqS8bU0NU#quRP?sSV#Dq$u0Na!RR`>5#7E7!zv*3U-$!AX z1>w7dW=yAnnDIJ~{$0b$g*=4|{O?6@Zw1Fala@^1J-MiD=>2(ya{P^5sAWkG;LiQp zu@d8ckQRS&6G{P!#sZM;;XW*2-a8Wb^2LeW|$Xl9TlmJj8YIsZd>YfA=(Ir#k#_hwBMl6d{agPCJD&|eYa=bSL z+LKv)e)KJUesWLz(}dc4@@CF+>S)FGx9@S}D`@+uW8{e@8p#a%$bBuid}2N1Zl1}c zf9-t7PPPBN1MdwTw#(XZJ!dt~+5x!9FF^l5B=(r7M(~3#lYqdxiA39W7ev?V|zZ^l)`u@H*2xUmebHB8vN-A)ujm3AWimo!z z=3w?NNnL&b6<$sh))Nh-J9_XPa`{f)TYo(Gay5jS^5>ZqdCyCHS3&HVJ-xRQ=Jme7 ztN$YeX=N5d0IoxSQRZg|ss1W!jpR((LSsYkCl$J0Czczrck$Gf@RoVfzmFu&cirK^ zvTc-7s>yR!Jq?~$Rj4ol07x4X{$u0YG1)tiu)=zVIASa9{NSl89C`HWB1Bg5_9MeO zD`B zexynxX()wcx>%`<&3#dej{#>^K7Jf8g-UJo+^@3$<&hhln6y;pG^VCoF7zjiPtAWYe$Jfla#FoMP&YZ(r!CoouDP#`W>E@&W?qB`~>HH+t_t^!^20L6_x0U%X zo_@vz?nHSG1{g+g8?^bFscF!gLOb`TH=>-37_ciA3zTqsc+c&>o)H-6rgUT{f#7KdwCk#BKk6QGbGq1 zCg=B0Df$07?%x%OGQUYw3u-o!U``$p-g02het($;*rF_QPKq6sKnZ6vmE$x-980b2 zOBP|MT`{eDPaqt3T5nfc|6aos6HrUTt*SG^BkOpT%=T(n8zit`-0NN$Cu?x|E`qa3 zEaAw=@Z`2$$qb`W=Z7@*Cm~N4XeR^z^fwg#OAtYmO4;Ts8#|Q&LKS z;Z1BIHxGa&*Z(4JE1o#scm}+tRWhE9-=RR{zrUJv?%2Q?DO!TH<}~syj3CmdX_++p zLaj{Bh^0`iEaaapbANBsqvNq2=fh$mon|aRP;tx;%kYj0lHfJDHl@1KC5I{U-=8#v=xiCmX}j!%n-B_;+t*%q0M4&@22pFi7;^ zcsfX35jIGwTfFBS?6Z{{)tBe zPa0P)q++nHPbt!FO$a>Mhx7VMre@l2J z;?zEIm7b#PSxmr*41fZGt*^et*?^M4ufj;K$fYb4M zaWEg=l^|A61^}}MEsv4jMQd$N=3qsxx<-oXDmMpq5UQKh=N=YU9;0<*I&jerw9IgvHaimM=bUr!FaVBi?{PkKT#muWY}$NaHVWv4Cvk z>qfz~mD*1X_!>nNtj=>7{qiL)_@J`ay~#Q-X4;+~}XgcN{|g3BA>O?8s? zh4m2%^z$Q;r$V>2is&#MQ$hgDQ(a{pHl(HZ_U-5D15-xjl-|~^5cI~AgeFGsnuCsD zwqqL_hqOCYZnI*!H0YMuf?VEYJE^pX=c|3ZG8cI1DwLIaSeGw&&S3)5+Uuc-xK_;> zAZUYK;J;$Mee-m5sR*HBdhk}V>gyS|Rwe-;Z^gOI89r~O?!i@7`vy*ZOFRelcdtIy z%V0R-s!Ctu)r@vECjg`voG>FtUw9Y`K@FY{;4PzfwRk~2((cP{mtcHt9v$~m7L zJ^+aC)D&coZYa=lW_XfkcG7ES7P;k-2Ff@XXrkMiZ(A(iUJ&&1S-}97n9sVUu`5?( zUJiXUXk!z+H87NP7U2^# zrv;qh6I5h0^OI#pvgLd0&HbC`947{Zo((AOlE%WEY-s*9Iot~#%_cL0nTyVp)hps# z*519%jcx%@w%OEQ-6xx?%)VGO6?(6$a4z&e#}_&G5(7Yh(e>cKVDp}_SVPs^apLIP zyE;-kw+n=4r(H-O+wG3&HvkAlol^MeuxGssOL_GOnn02f@UKX_(3z+1fphy>(Y_E9 zbbFOXa9^pEf8+f#c9TkWMgg3831|Lg<+Vc1L?rRXlZ!4-i*Cp~rnr#9=yhbDnhABI zPm^BP<*AM4a{5Lhh1)5ie z!X)>*_2l6tuSfv|$TPAXBRFV4$6W!I;F{iY_a2cKbj!8Y_x%A6UiA3Vdj>aWi$nlG zOXu+3yooN2Q~&oeIek2g2nE**x(%i@If`P^HUph7>+a}kG?o_2k2YNA4}gBrrjQsKBD(l zKmY*z|N6u?%_if<5(4-zmS|mtD^C!7QK=q#7KsA^8!AfaHPMG5mVV2}T{eLJxBD#e zzk3oGlM#mKz$>SHstN z?U6^2d;^hk=5j;1V4>8uA}GLI@d`Jc-C^~3T$NlA?xY;Z_Ex89+8}+oE3tb zE|;R!JCUiDKpkRRS)Hb4)qWfuq$P6!Y`6_pQ$}?Vx zALl-A!tta_+I@o4L2l&HZ4*m-vQfbfo-lq$ z4ljCx^yJofGUgaX#Vh!;OKaeyQ_rA(FkUMI4|y=tVx}-WdfyAtU0<7N=Jpf` zzQ56^vd9P?!Ggd`RX)4WzFj|NLAWGp{jD>5h6X!+@bRTv<#J|s!v*AOf0!w($~ywH*LGm*5Q z{HNz`F*sw*eNV_jDhZIvPGmg2OV&EoewzSOr0Y8@JqrDjCi5Pj$>@-E=Fx0u)RzW_P=7eQndl)l-exePzH%$cC7(M;HK=G2as4fn zLUx-w+T%qDEAF#oQDK?7A`*VQ@H0#?$J%7RJgKF*7^FOgDOhKZ zyf8iTVU7t6f+9l4c}Oq_pBxP<$aOz(zfUeg>*C8gIMRZ$61iI*iSw?kL7`F%a=rP@ z(q$kN4WHK4O}a3zFVH{)gpR*rVJQdG<9Her|z#Kh&`o&-?apdN*j~y*6n0 zK_&0mP2=y&la$5e41ZaoMe_qrt0(93r3r zK0gMU%_mlFk5Qh_wi>g8?Bt>MJSNTSQke0{O(VPa#W46!Npldal(t}5QQ=^&Qap!y z?7erHuDJkC;o2uFHz&H8Xrk(VhfnZ4 z8-HS=7$K~T;tm-NAr6=>8AAPSOn8$r4}KfF9wkd z^Th^-AUsihtfi7{m3bdjvKhZzm+rB{=6jA)zH7pPe@f{7xHHXZo*S;`5-YFM&(&6re<4q&?TKuvfZp}ef?gkuh@`O!(LNMEs*3a|SB^%9-tVC19nsHPM?oop1 zbQO7fI=K5|a$XpW`;pl5uO1C=%qpDHVvDvM1G{tSXN;X{GQ^SrHjr#~3=UCB6na&E+- zGkt+hKULA!&q1YN+0EAA(Kyg-etdmt!tM!Wh zoPK7+UP%mczk95y5oTu4P?A%QO|MGi@r83FC9?7?t;jhSKfGSPxLP+?4J4Z5ucWd* zwO+{ug(D)_Ks4P#R3}=Y5tokN;ZmH@R6z?o#$YEatv$N*Gf`b{bKBL+>!I<+7c-0nl=UFTx)Osxc$LQUM(N#7_9 zjK!0)8Hy+r3Cy*zgrShl4MT0MWz(-%@Vajg_p}s062(cTRh2xcH(*Ds;4oer=tA9% zB1X7Mh~@uA_C5Ev`g64N+1EM+yR~>&Ezq>}1m(O;zJUN8L^d!N7){yPXMc2i4QnY2 zk_Mexsf-X=@?}~UpiLc>_8eu%T@d_=$svj4+^vvFn4p$yWUuh$v;Hg$Qp(>Qu0mkc zE8C6WC@Y=u|5Dz*EoF>qjJ^iqlc!cHLkjuuU+9g>Ht5Vd7SLfpfW_dlilM=H{Lz6U z2~f@1%l|0Ofz-2@-0Mw)el26G9H2uV0OLtksHwHQ z$0_jsLnCRI*KETG`eL+#$OT%m7V+!}`3C_V`+LF?>nQxtk#O`7fJQ}GzDO6z2oO$+ z@NoMgfh`UIuChVV=~@9cmF~U&1grPiJ3W8#T3o$|A*nATYpgV$H1_>CRqa9enHDex zlVXNq-*`XOwW!cmc43FG66}xzXaBIxN3RXGJ;32RUC#FUxm2>}jKHz0Z1{v2n4=>b zIB1Z``_4FCX=tp8~dmqwd4|ep@^*Be32?LzoweVX*GQ zS3jWsVM926n=kZ6a9yJXE+gz8h}6}Mr!Bg)ifkO+nWpm6g3XT9F6}`XxyW{MPB3VA27v z={O^VDy&|OJmF`j1*K^y2r~cH(OmZ7>yA-m0c_WY<3J=%?sK(P5FVKIHwKv(cN>Tq zf>|_u+TM--mSUPpDaQ)^b4d_^``M-TzmWp3)!vG4wTTCU+aID>DJBx(a1PFzYI240 z#)pUZRKhE?-mS%6LnM^ zjib?yHNoE-S-7fH$_oKfTB84r2TnFbD}8^wU_b}`iQJV@@s3WtP5$c z(9*`_adG@;weC43J<%Tyr_>h|VvyctJ$6ga>hZ@E3>~aAVi#j_XmJT~ z+zSHlak2k}RHZXtDxZCva*hTNH78Br9-kBr{zT#@0nWIPD85UW#6QdR24j15?TcUm z+zIScMT)3%0LESxyF@;?eih*|BbYXo#I7>;Ex|m~j3Y!5%x3ApmKw?}6*~Lb@-^vW z;8@sFl&r;6*~#X4r3-iXNHLoMoP@5SsFF<5`40~hSXLRU((Sz+QVGOq&*6PofI8^y zTH-z={@Cp}+tSBj2DiB&IL!MP^eFODgO4S!e=Dxg5|RVk;EaUy#zvG4ICE&DCKF5e zLg7d8EA>9^Yx=aKUaClOx8ho29BP2!9BkvLH_ALGeKv%cA4zTm&Gf|(E-237&YT4S zk%4f%&J@FWL)@4C5sJ1500HXgr!y@qwUYY+*wjVu1Muk@`f5mi@2&gaB7XhP?UmIW z{p+e2Crt1IR}J)gt?dY+df+Tae>5ESF#%dTZm>9Z{XJUSXc;aRNFdB9i-IW) z&*Q4O)MfRkI29~D(^|%7MO78j^(6x^G770yvJZKB)wNpTi*{4V;;zrn>^`tkn1_J| zW8SA~Ei`6<_ZUIj8i9>)Q;_h&H%6(v8y|9o q257{AYw-Uad;fp`8kgRGz^JYy`M|BjoefUs1Da2DRcn=PBmWQ3F(GCE diff --git a/api/core/tools/provider/builtin/stablediffusion/stablediffusion.py b/api/core/tools/provider/builtin/stablediffusion/stablediffusion.py deleted file mode 100644 index abaa297cf36eb1..00000000000000 --- a/api/core/tools/provider/builtin/stablediffusion/stablediffusion.py +++ /dev/null @@ -1,17 +0,0 @@ -from typing import Any - -from core.tools.errors import ToolProviderCredentialValidationError -from core.tools.provider.builtin.stablediffusion.tools.stable_diffusion import StableDiffusionTool -from core.tools.provider.builtin_tool_provider import BuiltinToolProviderController - - -class StableDiffusionProvider(BuiltinToolProviderController): - def _validate_credentials(self, credentials: dict[str, Any]) -> None: - try: - StableDiffusionTool().fork_tool_runtime( - runtime={ - "credentials": credentials, - } - ).validate_models() - except Exception as e: - raise ToolProviderCredentialValidationError(str(e)) diff --git a/api/core/tools/provider/builtin/stablediffusion/stablediffusion.yaml b/api/core/tools/provider/builtin/stablediffusion/stablediffusion.yaml deleted file mode 100644 index 9b3c804f722dfc..00000000000000 --- a/api/core/tools/provider/builtin/stablediffusion/stablediffusion.yaml +++ /dev/null @@ -1,42 +0,0 @@ -identity: - author: Dify - name: stablediffusion - label: - en_US: Stable Diffusion - zh_Hans: Stable Diffusion - pt_BR: Stable Diffusion - description: - en_US: Stable Diffusion is a tool for generating images which can be deployed locally. - zh_Hans: Stable Diffusion 是一个可以在本地部署的图片生成的工具。 - pt_BR: Stable Diffusion is a tool for generating images which can be deployed locally. - icon: icon.png - tags: - - image -credentials_for_provider: - base_url: - type: secret-input - required: true - label: - en_US: Base URL - zh_Hans: StableDiffusion服务器的Base URL - pt_BR: Base URL - placeholder: - en_US: Please input your StableDiffusion server's Base URL - zh_Hans: 请输入你的 StableDiffusion 服务器的 Base URL - pt_BR: Please input your StableDiffusion server's Base URL - model: - type: text-input - required: true - label: - en_US: Model - zh_Hans: 模型 - pt_BR: Model - placeholder: - en_US: Please input your model - zh_Hans: 请输入你的模型名称 - pt_BR: Please input your model - help: - en_US: The model name of the StableDiffusion server - zh_Hans: StableDiffusion服务器的模型名称 - pt_BR: The model name of the StableDiffusion server - url: https://docs.dify.ai/tutorials/tool-configuration/stable-diffusion diff --git a/api/core/tools/provider/builtin/stablediffusion/tools/stable_diffusion.py b/api/core/tools/provider/builtin/stablediffusion/tools/stable_diffusion.py deleted file mode 100644 index 64fdc961b4c5db..00000000000000 --- a/api/core/tools/provider/builtin/stablediffusion/tools/stable_diffusion.py +++ /dev/null @@ -1,390 +0,0 @@ -import io -import json -from base64 import b64decode, b64encode -from copy import deepcopy -from typing import Any, Union - -from httpx import get, post -from PIL import Image -from yarl import URL - -from core.tools.entities.common_entities import I18nObject -from core.tools.entities.tool_entities import ToolInvokeMessage, ToolParameter, ToolParameterOption -from core.tools.errors import ToolProviderCredentialValidationError -from core.tools.tool.builtin_tool import BuiltinTool - -# All commented out parameters default to null -DRAW_TEXT_OPTIONS = { - # Prompts - "prompt": "", - "negative_prompt": "", - # "styles": [], - # Seeds - "seed": -1, - "subseed": -1, - "subseed_strength": 0, - "seed_resize_from_h": -1, - "seed_resize_from_w": -1, - # Samplers - "sampler_name": "DPM++ 2M", - # "scheduler": "", - # "sampler_index": "Automatic", - # Latent Space Options - "batch_size": 1, - "n_iter": 1, - "steps": 10, - "cfg_scale": 7, - "width": 512, - "height": 512, - # "restore_faces": True, - # "tiling": True, - "do_not_save_samples": False, - "do_not_save_grid": False, - # "eta": 0, - # "denoising_strength": 0.75, - # "s_min_uncond": 0, - # "s_churn": 0, - # "s_tmax": 0, - # "s_tmin": 0, - # "s_noise": 0, - "override_settings": {}, - "override_settings_restore_afterwards": True, - # Refinement Options - "refiner_checkpoint": "", - "refiner_switch_at": 0, - "disable_extra_networks": False, - # "firstpass_image": "", - # "comments": "", - # High-Resolution Options - "enable_hr": False, - "firstphase_width": 0, - "firstphase_height": 0, - "hr_scale": 2, - # "hr_upscaler": "", - "hr_second_pass_steps": 0, - "hr_resize_x": 0, - "hr_resize_y": 0, - # "hr_checkpoint_name": "", - # "hr_sampler_name": "", - # "hr_scheduler": "", - "hr_prompt": "", - "hr_negative_prompt": "", - # Task Options - # "force_task_id": "", - # Script Options - # "script_name": "", - "script_args": [], - # Output Options - "send_images": True, - "save_images": False, - "alwayson_scripts": {}, - # "infotext": "", -} - - -class StableDiffusionTool(BuiltinTool): - def _invoke( - self, user_id: str, tool_parameters: dict[str, Any] - ) -> Union[ToolInvokeMessage, list[ToolInvokeMessage]]: - """ - invoke tools - """ - # base url - base_url = self.runtime.credentials.get("base_url", None) - if not base_url: - return self.create_text_message("Please input base_url") - - if tool_parameters.get("model"): - self.runtime.credentials["model"] = tool_parameters["model"] - - model = self.runtime.credentials.get("model", None) - if not model: - return self.create_text_message("Please input model") - - # set model - try: - url = str(URL(base_url) / "sdapi" / "v1" / "options") - response = post(url, data=json.dumps({"sd_model_checkpoint": model})) - if response.status_code != 200: - raise ToolProviderCredentialValidationError("Failed to set model, please tell user to set model") - except Exception as e: - raise ToolProviderCredentialValidationError("Failed to set model, please tell user to set model") - - # get image id and image variable - image_id = tool_parameters.get("image_id", "") - image_variable = self.get_default_image_variable() - # Return text2img if there's no image ID or no image variable - if not image_id or not image_variable: - return self.text2img(base_url=base_url, tool_parameters=tool_parameters) - - # Proceed with image-to-image generation - return self.img2img(base_url=base_url, tool_parameters=tool_parameters) - - def validate_models(self) -> Union[ToolInvokeMessage, list[ToolInvokeMessage]]: - """ - validate models - """ - try: - base_url = self.runtime.credentials.get("base_url", None) - if not base_url: - raise ToolProviderCredentialValidationError("Please input base_url") - model = self.runtime.credentials.get("model", None) - if not model: - raise ToolProviderCredentialValidationError("Please input model") - - api_url = str(URL(base_url) / "sdapi" / "v1" / "sd-models") - response = get(url=api_url, timeout=10) - if response.status_code == 404: - # try draw a picture - self._invoke( - user_id="test", - tool_parameters={ - "prompt": "a cat", - "width": 1024, - "height": 1024, - "steps": 1, - "lora": "", - }, - ) - elif response.status_code != 200: - raise ToolProviderCredentialValidationError("Failed to get models") - else: - models = [d["model_name"] for d in response.json()] - if len([d for d in models if d == model]) > 0: - return self.create_text_message(json.dumps(models)) - else: - raise ToolProviderCredentialValidationError(f"model {model} does not exist") - except Exception as e: - raise ToolProviderCredentialValidationError(f"Failed to get models, {e}") - - def get_sd_models(self) -> list[str]: - """ - get sd models - """ - try: - base_url = self.runtime.credentials.get("base_url", None) - if not base_url: - return [] - api_url = str(URL(base_url) / "sdapi" / "v1" / "sd-models") - response = get(url=api_url, timeout=(2, 10)) - if response.status_code != 200: - return [] - else: - return [d["model_name"] for d in response.json()] - except Exception as e: - return [] - - def get_sample_methods(self) -> list[str]: - """ - get sample method - """ - try: - base_url = self.runtime.credentials.get("base_url", None) - if not base_url: - return [] - api_url = str(URL(base_url) / "sdapi" / "v1" / "samplers") - response = get(url=api_url, timeout=(2, 10)) - if response.status_code != 200: - return [] - else: - return [d["name"] for d in response.json()] - except Exception as e: - return [] - - def img2img( - self, base_url: str, tool_parameters: dict[str, Any] - ) -> Union[ToolInvokeMessage, list[ToolInvokeMessage]]: - """ - generate image - """ - - # Fetch the binary data of the image - image_variable = self.get_default_image_variable() - image_binary = self.get_variable_file(image_variable.name) - if not image_binary: - return self.create_text_message("Image not found, please request user to generate image firstly.") - - # Convert image to RGB and save as PNG - try: - with Image.open(io.BytesIO(image_binary)) as image, io.BytesIO() as buffer: - image.convert("RGB").save(buffer, format="PNG") - image_binary = buffer.getvalue() - except Exception as e: - return self.create_text_message(f"Failed to process the image: {str(e)}") - - # copy draw options - draw_options = deepcopy(DRAW_TEXT_OPTIONS) - # set image options - model = tool_parameters.get("model", "") - draw_options_image = { - "init_images": [b64encode(image_binary).decode("utf-8")], - "denoising_strength": 0.9, - "restore_faces": False, - "script_args": [], - "override_settings": {"sd_model_checkpoint": model}, - "resize_mode": 0, - "image_cfg_scale": 0, - # "mask": None, - "mask_blur_x": 4, - "mask_blur_y": 4, - "mask_blur": 0, - "mask_round": True, - "inpainting_fill": 0, - "inpaint_full_res": True, - "inpaint_full_res_padding": 0, - "inpainting_mask_invert": 0, - "initial_noise_multiplier": 0, - # "latent_mask": None, - "include_init_images": True, - } - # update key and values - draw_options.update(draw_options_image) - draw_options.update(tool_parameters) - - # get prompt lora model - prompt = tool_parameters.get("prompt", "") - lora = tool_parameters.get("lora", "") - model = tool_parameters.get("model", "") - if lora: - draw_options["prompt"] = f"{lora},{prompt}" - else: - draw_options["prompt"] = prompt - - try: - url = str(URL(base_url) / "sdapi" / "v1" / "img2img") - response = post(url, data=json.dumps(draw_options), timeout=120) - if response.status_code != 200: - return self.create_text_message("Failed to generate image") - - image = response.json()["images"][0] - - return self.create_blob_message( - blob=b64decode(image), meta={"mime_type": "image/png"}, save_as=self.VariableKey.IMAGE.value - ) - - except Exception as e: - return self.create_text_message("Failed to generate image") - - def text2img( - self, base_url: str, tool_parameters: dict[str, Any] - ) -> Union[ToolInvokeMessage, list[ToolInvokeMessage]]: - """ - generate image - """ - # copy draw options - draw_options = deepcopy(DRAW_TEXT_OPTIONS) - draw_options.update(tool_parameters) - # get prompt lora model - prompt = tool_parameters.get("prompt", "") - lora = tool_parameters.get("lora", "") - model = tool_parameters.get("model", "") - if lora: - draw_options["prompt"] = f"{lora},{prompt}" - else: - draw_options["prompt"] = prompt - draw_options["override_settings"]["sd_model_checkpoint"] = model - - try: - url = str(URL(base_url) / "sdapi" / "v1" / "txt2img") - response = post(url, data=json.dumps(draw_options), timeout=120) - if response.status_code != 200: - return self.create_text_message("Failed to generate image") - - image = response.json()["images"][0] - - return self.create_blob_message( - blob=b64decode(image), meta={"mime_type": "image/png"}, save_as=self.VariableKey.IMAGE.value - ) - - except Exception as e: - return self.create_text_message("Failed to generate image") - - def get_runtime_parameters(self) -> list[ToolParameter]: - parameters = [ - ToolParameter( - name="prompt", - label=I18nObject(en_US="Prompt", zh_Hans="Prompt"), - human_description=I18nObject( - en_US="Image prompt, you can check the official documentation of Stable Diffusion", - zh_Hans="图像提示词,您可以查看 Stable Diffusion 的官方文档", - ), - type=ToolParameter.ToolParameterType.STRING, - form=ToolParameter.ToolParameterForm.LLM, - llm_description="Image prompt of Stable Diffusion, you should describe the image you want to generate" - " as a list of words as possible as detailed, the prompt must be written in English.", - required=True, - ), - ] - if len(self.list_default_image_variables()) != 0: - parameters.append( - ToolParameter( - name="image_id", - label=I18nObject(en_US="image_id", zh_Hans="image_id"), - human_description=I18nObject( - en_US="Image id of the image you want to generate based on, if you want to generate image based" - " on the default image, you can leave this field empty.", - zh_Hans="您想要生成的图像的图像 ID,如果您想要基于默认图像生成图像,则可以将此字段留空。", - ), - type=ToolParameter.ToolParameterType.STRING, - form=ToolParameter.ToolParameterForm.LLM, - llm_description="Image id of the original image, you can leave this field empty if you want to" - " generate a new image.", - required=True, - options=[ - ToolParameterOption(value=i.name, label=I18nObject(en_US=i.name, zh_Hans=i.name)) - for i in self.list_default_image_variables() - ], - ) - ) - - if self.runtime.credentials: - try: - models = self.get_sd_models() - if len(models) != 0: - parameters.append( - ToolParameter( - name="model", - label=I18nObject(en_US="Model", zh_Hans="Model"), - human_description=I18nObject( - en_US="Model of Stable Diffusion, you can check the official documentation" - " of Stable Diffusion", - zh_Hans="Stable Diffusion 的模型,您可以查看 Stable Diffusion 的官方文档", - ), - type=ToolParameter.ToolParameterType.SELECT, - form=ToolParameter.ToolParameterForm.FORM, - llm_description="Model of Stable Diffusion, you can check the official documentation" - " of Stable Diffusion", - required=True, - default=models[0], - options=[ - ToolParameterOption(value=i, label=I18nObject(en_US=i, zh_Hans=i)) for i in models - ], - ) - ) - - except: - pass - - sample_methods = self.get_sample_methods() - if len(sample_methods) != 0: - parameters.append( - ToolParameter( - name="sampler_name", - label=I18nObject(en_US="Sampling method", zh_Hans="Sampling method"), - human_description=I18nObject( - en_US="Sampling method of Stable Diffusion, you can check the official documentation" - " of Stable Diffusion", - zh_Hans="Stable Diffusion 的Sampling method,您可以查看 Stable Diffusion 的官方文档", - ), - type=ToolParameter.ToolParameterType.SELECT, - form=ToolParameter.ToolParameterForm.FORM, - llm_description="Sampling method of Stable Diffusion, you can check the official documentation" - " of Stable Diffusion", - required=True, - default=sample_methods[0], - options=[ - ToolParameterOption(value=i, label=I18nObject(en_US=i, zh_Hans=i)) for i in sample_methods - ], - ) - ) - return parameters diff --git a/api/core/tools/provider/builtin/stablediffusion/tools/stable_diffusion.yaml b/api/core/tools/provider/builtin/stablediffusion/tools/stable_diffusion.yaml deleted file mode 100644 index bbbdb16caf21bb..00000000000000 --- a/api/core/tools/provider/builtin/stablediffusion/tools/stable_diffusion.yaml +++ /dev/null @@ -1,104 +0,0 @@ -identity: - name: stable_diffusion - author: Dify - label: - en_US: Stable Diffusion WebUI - zh_Hans: Stable Diffusion WebUI - pt_BR: Stable Diffusion WebUI -description: - human: - en_US: A tool for generating images which can be deployed locally, you can use stable-diffusion-webui to deploy it. - zh_Hans: 一个可以在本地部署的图片生成的工具,您可以使用 stable-diffusion-webui 来部署它。 - pt_BR: A tool for generating images which can be deployed locally, you can use stable-diffusion-webui to deploy it. - llm: draw the image you want based on your prompt. -parameters: - - name: prompt - type: string - required: true - label: - en_US: Prompt - zh_Hans: 提示词 - pt_BR: Prompt - human_description: - en_US: Image prompt, you can check the official documentation of Stable Diffusion - zh_Hans: 图像提示词,您可以查看 Stable Diffusion 的官方文档 - pt_BR: Image prompt, you can check the official documentation of Stable Diffusion - llm_description: Image prompt of Stable Diffusion, you should describe the image you want to generate as a list of words as possible as detailed, the prompt must be written in English. - form: llm - - name: model - type: string - required: false - label: - en_US: Model Name - zh_Hans: 模型名称 - pt_BR: Model Name - human_description: - en_US: Model Name - zh_Hans: 模型名称 - pt_BR: Model Name - form: form - - name: lora - type: string - required: false - label: - en_US: Lora - zh_Hans: Lora - pt_BR: Lora - human_description: - en_US: Lora - zh_Hans: Lora - pt_BR: Lora - form: form - default: "" - - name: steps - type: number - required: false - label: - en_US: Steps - zh_Hans: Steps - pt_BR: Steps - human_description: - en_US: Steps - zh_Hans: Steps - pt_BR: Steps - form: form - default: 10 - - name: width - type: number - required: false - label: - en_US: Width - zh_Hans: Width - pt_BR: Width - human_description: - en_US: Width - zh_Hans: Width - pt_BR: Width - form: form - default: 1024 - - name: height - type: number - required: false - label: - en_US: Height - zh_Hans: Height - pt_BR: Height - human_description: - en_US: Height - zh_Hans: Height - pt_BR: Height - form: form - default: 1024 - - name: negative_prompt - type: string - required: false - label: - en_US: Negative prompt - zh_Hans: Negative prompt - pt_BR: Negative prompt - human_description: - en_US: Negative prompt - zh_Hans: Negative prompt - pt_BR: Negative prompt - form: form - default: bad art, ugly, deformed, watermark, duplicated, discontinuous lines diff --git a/api/core/tools/provider/builtin/stackexchange/_assets/icon.svg b/api/core/tools/provider/builtin/stackexchange/_assets/icon.svg deleted file mode 100644 index 7042bc0e4156c9..00000000000000 --- a/api/core/tools/provider/builtin/stackexchange/_assets/icon.svg +++ /dev/null @@ -1 +0,0 @@ - \ No newline at end of file diff --git a/api/core/tools/provider/builtin/stackexchange/stackexchange.py b/api/core/tools/provider/builtin/stackexchange/stackexchange.py deleted file mode 100644 index 9680c633cc701c..00000000000000 --- a/api/core/tools/provider/builtin/stackexchange/stackexchange.py +++ /dev/null @@ -1,25 +0,0 @@ -from core.tools.errors import ToolProviderCredentialValidationError -from core.tools.provider.builtin.stackexchange.tools.searchStackExQuestions import SearchStackExQuestionsTool -from core.tools.provider.builtin_tool_provider import BuiltinToolProviderController - - -class StackExchangeProvider(BuiltinToolProviderController): - def _validate_credentials(self, credentials: dict) -> None: - try: - SearchStackExQuestionsTool().fork_tool_runtime( - runtime={ - "credentials": credentials, - } - ).invoke( - user_id="", - tool_parameters={ - "intitle": "Test", - "sort": "relevance", - "order": "desc", - "site": "stackoverflow", - "accepted": True, - "pagesize": 1, - }, - ) - except Exception as e: - raise ToolProviderCredentialValidationError(str(e)) diff --git a/api/core/tools/provider/builtin/stackexchange/stackexchange.yaml b/api/core/tools/provider/builtin/stackexchange/stackexchange.yaml deleted file mode 100644 index d382a3cca9cef2..00000000000000 --- a/api/core/tools/provider/builtin/stackexchange/stackexchange.yaml +++ /dev/null @@ -1,13 +0,0 @@ -identity: - author: Richards Tu - name: stackexchange - label: - en_US: Stack Exchange - zh_Hans: Stack Exchange - description: - en_US: Access questions and answers from the Stack Exchange and its sub-sites. - zh_Hans: 从 Stack Exchange 和其子论坛获取问题和答案。 - icon: icon.svg - tags: - - search - - utilities diff --git a/api/core/tools/provider/builtin/stackexchange/tools/fetchAnsByStackExQuesID.py b/api/core/tools/provider/builtin/stackexchange/tools/fetchAnsByStackExQuesID.py deleted file mode 100644 index 534532009501f5..00000000000000 --- a/api/core/tools/provider/builtin/stackexchange/tools/fetchAnsByStackExQuesID.py +++ /dev/null @@ -1,39 +0,0 @@ -from typing import Any, Union - -import requests -from pydantic import BaseModel, Field - -from core.tools.entities.tool_entities import ToolInvokeMessage -from core.tools.tool.builtin_tool import BuiltinTool - - -class FetchAnsByStackExQuesIDInput(BaseModel): - id: int = Field(..., description="The question ID") - site: str = Field(..., description="The Stack Exchange site") - order: str = Field(..., description="asc or desc") - sort: str = Field(..., description="activity, votes, creation") - pagesize: int = Field(..., description="Number of answers per page") - page: int = Field(..., description="Page number") - - -class FetchAnsByStackExQuesIDTool(BuiltinTool): - def _invoke( - self, user_id: str, tool_parameters: dict[str, Any] - ) -> Union[ToolInvokeMessage, list[ToolInvokeMessage]]: - input = FetchAnsByStackExQuesIDInput(**tool_parameters) - - params = { - "site": input.site, - "filter": "!nNPvSNdWme", - "order": input.order, - "sort": input.sort, - "pagesize": input.pagesize, - "page": input.page, - } - - response = requests.get(f"https://api.stackexchange.com/2.3/questions/{input.id}/answers", params=params) - - if response.status_code == 200: - return self.create_text_message(self.summary(user_id=user_id, content=response.text)) - else: - return self.create_text_message(f"API request failed with status code {response.status_code}") diff --git a/api/core/tools/provider/builtin/stackexchange/tools/fetchAnsByStackExQuesID.yaml b/api/core/tools/provider/builtin/stackexchange/tools/fetchAnsByStackExQuesID.yaml deleted file mode 100644 index d663bce6097441..00000000000000 --- a/api/core/tools/provider/builtin/stackexchange/tools/fetchAnsByStackExQuesID.yaml +++ /dev/null @@ -1,107 +0,0 @@ -identity: - name: fetchAnsByStackExQuesID - author: Richards Tu - label: - en_US: Fetch Stack Exchange Answers - zh_Hans: 获取 Stack Exchange 答案 -description: - human: - en_US: A tool for retrieving answers for a specific Stack Exchange question ID. Must be used with the searchStackExQuesID tool. - zh_Hans: 用于检索特定Stack Exchange问题ID的答案的工具。必须与searchStackExQuesID工具一起使用。 - llm: A tool for retrieving answers for Stack Exchange question ID. -parameters: - - name: id - type: string - required: true - label: - en_US: Question ID - zh_Hans: 问题ID - human_description: - en_US: The ID of the Stack Exchange question to fetch answers for. - zh_Hans: 要获取答案的Stack Exchange问题的ID。 - llm_description: The ID of the Stack Exchange question. - form: llm - - name: site - type: string - required: true - label: - en_US: Stack Exchange site - zh_Hans: Stack Exchange站点 - human_description: - en_US: The Stack Exchange site the question is from, e.g. stackoverflow, unix, etc. - zh_Hans: 问题所在的Stack Exchange站点,例如stackoverflow、unix等。 - llm_description: Stack Exchange site identifier - 'stackoverflow', 'serverfault', 'superuser', 'askubuntu', 'unix', 'cs', 'softwareengineering', 'codegolf', 'codereview', 'cstheory', 'security', 'cryptography', 'reverseengineering', 'datascience', 'devops', 'ux', 'dba', 'gis', 'webmasters', 'arduino', 'raspberrypi', 'networkengineering', 'iot', 'tor', 'sqa', 'mathoverflow', 'math', 'mathematica', 'dsp', 'gamedev', 'robotics', 'genai', 'computergraphics'. - form: llm - - name: filter - type: string - required: true - label: - en_US: Filter - zh_Hans: 过滤器 - human_description: - en_US: This is required in order to actually get the body of the answer. - zh_Hans: 为了实际获取答案的正文是必需的。 - options: - - value: "!nNPvSNdWme" - label: - en_US: Must Select - zh_Hans: 必须选择 - form: form - default: "!nNPvSNdWme" - - name: order - type: string - required: true - label: - en_US: Sort direction - zh_Hans: 排序方向 - human_description: - en_US: The direction to sort the answers - ascending or descending. - zh_Hans: 答案的排序方向 - 升序或降序。 - form: form - options: - - value: asc - label: - en_US: Ascending - zh_Hans: 升序 - - value: desc - label: - en_US: Descending - zh_Hans: 降序 - default: desc - - name: sort - type: string - required: true - label: - en_US: Sort order - zh_Hans: 排序 - human_description: - en_US: The sort order for the answers - activity, votes, or creation date. - zh_Hans: 答案的排序顺序 - 活动、投票或创建日期。 - llm_description: activity, votes, or creation. - form: llm - - name: pagesize - type: number - required: true - label: - en_US: Results per page - zh_Hans: 每页结果数 - human_description: - en_US: The number of answers to return per page. - zh_Hans: 每页返回的答案数。 - form: form - min: 1 - max: 5 - default: 1 - - name: page - type: number - required: true - label: - en_US: Page number - zh_Hans: 页码 - human_description: - en_US: The page number of answers to retrieve. - zh_Hans: 要检索的答案的页码。 - form: form - min: 1 - max: 5 - default: 3 diff --git a/api/core/tools/provider/builtin/stackexchange/tools/searchStackExQuestions.py b/api/core/tools/provider/builtin/stackexchange/tools/searchStackExQuestions.py deleted file mode 100644 index 4a25a808adf26a..00000000000000 --- a/api/core/tools/provider/builtin/stackexchange/tools/searchStackExQuestions.py +++ /dev/null @@ -1,45 +0,0 @@ -from typing import Any, Union - -import requests -from pydantic import BaseModel, Field - -from core.tools.entities.tool_entities import ToolInvokeMessage -from core.tools.tool.builtin_tool import BuiltinTool - - -class SearchStackExQuestionsInput(BaseModel): - intitle: str = Field(..., description="The search query.") - sort: str = Field(..., description="The sort order - relevance, activity, votes, creation.") - order: str = Field(..., description="asc or desc") - site: str = Field(..., description="The Stack Exchange site.") - tagged: str = Field(None, description="Semicolon-separated tags to include.") - nottagged: str = Field(None, description="Semicolon-separated tags to exclude.") - accepted: bool = Field(..., description="true for only accepted answers, false otherwise") - pagesize: int = Field(..., description="Number of results per page") - - -class SearchStackExQuestionsTool(BuiltinTool): - def _invoke( - self, user_id: str, tool_parameters: dict[str, Any] - ) -> Union[ToolInvokeMessage, list[ToolInvokeMessage]]: - input = SearchStackExQuestionsInput(**tool_parameters) - - params = { - "intitle": input.intitle, - "sort": input.sort, - "order": input.order, - "site": input.site, - "accepted": input.accepted, - "pagesize": input.pagesize, - } - if input.tagged: - params["tagged"] = input.tagged - if input.nottagged: - params["nottagged"] = input.nottagged - - response = requests.get("https://api.stackexchange.com/2.3/search", params=params) - - if response.status_code == 200: - return self.create_text_message(self.summary(user_id=user_id, content=response.text)) - else: - return self.create_text_message(f"API request failed with status code {response.status_code}") diff --git a/api/core/tools/provider/builtin/stackexchange/tools/searchStackExQuestions.yaml b/api/core/tools/provider/builtin/stackexchange/tools/searchStackExQuestions.yaml deleted file mode 100644 index bbfbae38b06e1a..00000000000000 --- a/api/core/tools/provider/builtin/stackexchange/tools/searchStackExQuestions.yaml +++ /dev/null @@ -1,121 +0,0 @@ -identity: - name: searchStackExQuestions - author: Richards Tu - label: - en_US: Search Stack Exchange Questions - zh_Hans: 搜索Stack Exchange问题 -description: - human: - en_US: A tool for searching questions on a Stack Exchange site. - zh_Hans: 在Stack Exchange站点上搜索问题的工具。 - llm: A tool for searching questions on Stack Exchange site. -parameters: - - name: intitle - type: string - required: true - label: - en_US: Search query - zh_Hans: 搜索查询 - human_description: - en_US: The search query to use for finding questions. - zh_Hans: 用于查找问题的搜索查询。 - llm_description: The search query. - form: llm - - name: sort - type: string - required: true - label: - en_US: Sort order - zh_Hans: 排序 - human_description: - en_US: The sort order for the search results - relevance, activity, votes, or creation date. - zh_Hans: 搜索结果的排序顺序 - 相关性、活动、投票或创建日期。 - llm_description: The sort order - 'relevance', 'activity', 'votes', or 'creation'. - form: llm - - name: order - type: select - required: true - label: - en_US: Sort direction - zh_Hans: 排序方向 - human_description: - en_US: The direction to sort - ascending or descending. - zh_Hans: 排序方向 - 升序或降序。 - form: form - options: - - value: asc - label: - en_US: Ascending - zh_Hans: 升序 - - value: desc - label: - en_US: Descending - zh_Hans: 降序 - default: desc - - name: site - type: string - required: true - label: - en_US: Stack Exchange site - zh_Hans: Stack Exchange 站点 - human_description: - en_US: The Stack Exchange site to search, e.g. stackoverflow, unix, etc. - zh_Hans: 要搜索的Stack Exchange站点,例如stackoverflow、unix等。 - llm_description: Stack Exchange site identifier - 'stackoverflow', 'serverfault', 'superuser', 'askubuntu', 'unix', 'cs', 'softwareengineering', 'codegolf', 'codereview', 'cstheory', 'security', 'cryptography', 'reverseengineering', 'datascience', 'devops', 'ux', 'dba', 'gis', 'webmasters', 'arduino', 'raspberrypi', 'networkengineering', 'iot', 'tor', 'sqa', 'mathoverflow', 'math', 'mathematica', 'dsp', 'gamedev', 'robotics', 'genai', 'computergraphics'. - form: llm - - name: tagged - type: string - required: false - label: - en_US: Include tags - zh_Hans: 包含标签 - human_description: - en_US: A semicolon-separated list of tags that questions must have. - zh_Hans: 问题必须具有的标签的分号分隔列表。 - llm_description: Semicolon-separated tags to include. Leave blank if not needed. - form: llm - - name: nottagged - type: string - required: false - label: - en_US: Exclude tags - zh_Hans: 排除标签 - human_description: - en_US: A semicolon-separated list of tags to exclude from the search. - zh_Hans: 从搜索中排除的标签的分号分隔列表。 - llm_description: Semicolon-separated tags to exclude. Leave blank if not needed. - form: llm - - name: accepted - type: boolean - required: true - label: - en_US: Has accepted answer - zh_Hans: 有已接受的答案 - human_description: - en_US: Whether to limit to only questions that have an accepted answer. - zh_Hans: 是否限制为只有已接受答案的问题。 - form: form - options: - - value: 'true' - label: - en_US: 'Yes' - zh_Hans: 是 - - value: 'false' - label: - en_US: 'No' - zh_Hans: 否 - default: 'true' - - name: pagesize - type: number - required: true - label: - en_US: Results per page - zh_Hans: 每页结果数 - human_description: - en_US: The number of results to return per page. - zh_Hans: 每页返回的结果数。 - llm_description: The number of results per page. - form: form - min: 1 - max: 50 - default: 10 diff --git a/api/core/tools/provider/builtin/stepfun/__init__.py b/api/core/tools/provider/builtin/stepfun/__init__.py deleted file mode 100644 index e69de29bb2d1d6..00000000000000 diff --git a/api/core/tools/provider/builtin/stepfun/_assets/icon.png b/api/core/tools/provider/builtin/stepfun/_assets/icon.png deleted file mode 100644 index 85b96d0c74c24c2c28ccd0d363f02b35e359f561..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1991 zcmc&#Yfuwc6kdW9h^0hYTcvf}2HHlac@iLrMhHoeR01@h;sY>`1yV^iZZ?vD3M!(g z9aA4|@c~t-Xr+LIFs*jVIHfk!_j77|j*lXWU{tVH@N|PfTL=5McV_qAd*=JT^WA&S z?3Se@O%0-l(h&p+QY9!f)a>#;0lw6Gb7^J`HH{^e>0~ONP3CI}43Qad9fqn*+8j)S zX$_7At=MD)@nsp)(#dpnqC}6II9e};V>ekSHiAr!v0JqI`51}nupFaV%IrGW%tVa_ zDRX9|ny0qNv0P(90fD6!B&F#K=Ig}%(P9oiny(i`XKQsIq5_^kz~za!{3y0SAc=^S z2>Iy1U{VPQL$*Yth#yEt?WD|HlC(&;-2D7}PJRRjCvv!au~^LI3Ah3Qn?kUy4l}8> zv(479K?Mb7)e}YwX~fN_S5d3OZKRY*nSFi=lVwQOY#q2M>dv@!t%b|y@VuuQ1gh2l z4KaUo`iCvX$me`<+w7%$P0 zaS`!6xrove4)5hT*YHv=BIQfHa49ofZ*Bik>%kD!K;Gq0^HQ6k_Q%Xr&l6O0?|oDr zgdjc(QWDdY$ZUAp3-9ECa1r1;K)n{)J@DNrKye712e&%GF9SZX!sT{2aSfzP;l@4C zZw8+^u>UyJAA|4(;GYFC%VFATV3DwTKa|$PdjyD}-QD4CPdyVmrr7hS zo|WL&xasrLGj8v(`_Ng}*(-_N*OvNETt!=5%)FC@w_hg@A4+sz5IWAZJ!u**XvGEP zmks*fo`hEzXlya!LsNtrgqqGKyQ^^LC8fu?flkl^7{}Nw*L08b2>rnf)Q~$lrG0Lk zEHnA?#80!QhRbTl0-hLBb!J3}FRiMl^UK(f^=NV;8ZkY#!q#DXyj1V(Y<+#xt|FE! z%OUi9Pj~&;w{}|R35QKw`AsDwcyHL2mb%7b#K>~h&P4ZSF5TKguNvu!ac;}|ssdZH zP?kBS``)sQ6RzzNxf?3}s0gk8S$?27xaEfD`e+ZM!6q4SJ|f`Xi&1{FNCpw+-oe(l z%3KdCkGTuFf0Mbo3vjO6FOcR#dyy~@sgaeQ!@Ae3%u^oe3kjZ375BAftjD?Bm)4TO z$U0oz(D3$aGWuug=HsWM$1S@_0)yYh2qzU0;Q=F7NuA=l*z=c0&T?$P8|QZUKaM-E zJlgRlx@7!wZ%qFOCoek_I>vf9$z6DUea None: - try: - StepfunTool().fork_tool_runtime( - runtime={ - "credentials": credentials, - } - ).invoke( - user_id="", - tool_parameters={ - "prompt": "cute girl, blue eyes, white hair, anime style", - "size": "1024x1024", - "n": 1, - }, - ) - except Exception as e: - raise ToolProviderCredentialValidationError(str(e)) diff --git a/api/core/tools/provider/builtin/stepfun/stepfun.yaml b/api/core/tools/provider/builtin/stepfun/stepfun.yaml deleted file mode 100644 index 1f841ec369b5c3..00000000000000 --- a/api/core/tools/provider/builtin/stepfun/stepfun.yaml +++ /dev/null @@ -1,46 +0,0 @@ -identity: - author: Stepfun - name: stepfun - label: - en_US: Image-1X - zh_Hans: 阶跃星辰绘画 - pt_BR: Image-1X - description: - en_US: Image-1X - zh_Hans: 阶跃星辰绘画 - pt_BR: Image-1X - icon: icon.png - tags: - - image - - productivity -credentials_for_provider: - stepfun_api_key: - type: secret-input - required: true - label: - en_US: Stepfun API key - zh_Hans: 阶跃星辰API key - pt_BR: Stepfun API key - help: - en_US: Please input your stepfun API key - zh_Hans: 请输入你的阶跃星辰 API key - pt_BR: Please input your stepfun API key - placeholder: - en_US: Please input your stepfun API key - zh_Hans: 请输入你的阶跃星辰 API key - pt_BR: Please input your stepfun API key - stepfun_base_url: - type: text-input - required: false - label: - en_US: Stepfun base URL - zh_Hans: 阶跃星辰 base URL - pt_BR: Stepfun base URL - help: - en_US: Please input your Stepfun base URL - zh_Hans: 请输入你的阶跃星辰 base URL - pt_BR: Please input your Stepfun base URL - placeholder: - en_US: Please input your Stepfun base URL - zh_Hans: 请输入你的阶跃星辰 base URL - pt_BR: Please input your Stepfun base URL diff --git a/api/core/tools/provider/builtin/stepfun/tools/image.py b/api/core/tools/provider/builtin/stepfun/tools/image.py deleted file mode 100644 index 0b92b122bf59bc..00000000000000 --- a/api/core/tools/provider/builtin/stepfun/tools/image.py +++ /dev/null @@ -1,75 +0,0 @@ -import random -from typing import Any, Union - -from openai import OpenAI -from yarl import URL - -from core.tools.entities.tool_entities import ToolInvokeMessage -from core.tools.tool.builtin_tool import BuiltinTool - - -class StepfunTool(BuiltinTool): - """Stepfun Image Generation Tool""" - - def _invoke( - self, - user_id: str, - tool_parameters: dict[str, Any], - ) -> Union[ToolInvokeMessage, list[ToolInvokeMessage]]: - """ - invoke tools - """ - base_url = self.runtime.credentials.get("stepfun_base_url", "https://api.stepfun.com") - base_url = str(URL(base_url) / "v1") - - client = OpenAI( - api_key=self.runtime.credentials["stepfun_api_key"], - base_url=base_url, - ) - - extra_body = {} - model = tool_parameters.get("model", "step-1x-medium") - if not model: - return self.create_text_message("Please input model name") - # prompt - prompt = tool_parameters.get("prompt", "") - if not prompt: - return self.create_text_message("Please input prompt") - - seed = tool_parameters.get("seed", 0) - if seed > 0: - extra_body["seed"] = seed - steps = tool_parameters.get("steps", 0) - if steps > 0: - extra_body["steps"] = steps - negative_prompt = tool_parameters.get("negative_prompt", "") - if negative_prompt: - extra_body["negative_prompt"] = negative_prompt - - # call openapi stepfun model - response = client.images.generate( - prompt=prompt, - model=model, - size=tool_parameters.get("size", "1024x1024"), - n=tool_parameters.get("n", 1), - extra_body=extra_body, - ) - print(response) - - result = [] - for image in response.data: - result.append(self.create_image_message(image=image.url)) - result.append( - self.create_json_message( - { - "url": image.url, - } - ) - ) - return result - - @staticmethod - def _generate_random_id(length=8): - characters = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789" - random_id = "".join(random.choices(characters, k=length)) - return random_id diff --git a/api/core/tools/provider/builtin/stepfun/tools/image.yaml b/api/core/tools/provider/builtin/stepfun/tools/image.yaml deleted file mode 100644 index dcc5bd2db2f5ba..00000000000000 --- a/api/core/tools/provider/builtin/stepfun/tools/image.yaml +++ /dev/null @@ -1,158 +0,0 @@ -identity: - name: stepfun - author: Stepfun - label: - en_US: step-1x - zh_Hans: 阶跃星辰绘画 - pt_BR: step-1x - description: - en_US: step-1x is a powerful drawing tool by stepfun, you can draw the image based on your prompt - zh_Hans: step-1x 系列是阶跃星辰提供的强大的绘画工具,它可以根据您的提示词绘制出您想要的图像。 - pt_BR: step-1x is a powerful drawing tool by stepfun, you can draw the image based on your prompt -description: - human: - en_US: step-1x is a text to image tool - zh_Hans: step-1x 是一个文本/图像到图像的工具 - pt_BR: step-1x is a text to image tool - llm: step-1x is a tool used to generate images from text or image -parameters: - - name: prompt - type: string - required: true - label: - en_US: Prompt - zh_Hans: 提示词 - pt_BR: Prompt - human_description: - en_US: Image prompt, you can check the official documentation of step-1x - zh_Hans: 图像提示词,您可以查看 step-1x 的官方文档 - pt_BR: Image prompt, you can check the official documentation of step-1x - llm_description: Image prompt of step-1x you should describe the image you want to generate as a list of words as possible as detailed - form: llm - - name: model - type: select - required: false - human_description: - en_US: used for selecting the model name - zh_Hans: 用于选择模型的名字 - pt_BR: used for selecting the model name - label: - en_US: Model Name - zh_Hans: 模型名字 - pt_BR: Model Name - form: form - options: - - value: step-1x-turbo - label: - en_US: turbo - zh_Hans: turbo - pt_BR: turbo - - value: step-1x-medium - label: - en_US: medium - zh_Hans: medium - pt_BR: medium - - value: step-1x-large - label: - en_US: large - zh_Hans: large - pt_BR: large - default: step-1x-medium - - name: size - type: select - required: false - human_description: - en_US: used for selecting the image size - zh_Hans: 用于选择图像大小 - pt_BR: used for selecting the image size - label: - en_US: Image size - zh_Hans: 图像大小 - pt_BR: Image size - form: form - options: - - value: 256x256 - label: - en_US: 256x256 - zh_Hans: 256x256 - pt_BR: 256x256 - - value: 512x512 - label: - en_US: 512x512 - zh_Hans: 512x512 - pt_BR: 512x512 - - value: 768x768 - label: - en_US: 768x768 - zh_Hans: 768x768 - pt_BR: 768x768 - - value: 1024x1024 - label: - en_US: 1024x1024 - zh_Hans: 1024x1024 - pt_BR: 1024x1024 - - value: 1280x800 - label: - en_US: 1280x800 - zh_Hans: 1280x800 - pt_BR: 1280x800 - - value: 800x1280 - label: - en_US: 800x1280 - zh_Hans: 800x1280 - pt_BR: 800x1280 - default: 1024x1024 - - name: n - type: number - required: true - human_description: - en_US: used for selecting the number of images - zh_Hans: 用于选择图像数量 - pt_BR: used for selecting the number of images - label: - en_US: Number of images - zh_Hans: 图像数量 - pt_BR: Number of images - form: form - default: 1 - min: 1 - max: 10 - - name: seed - type: number - required: false - label: - en_US: seed - zh_Hans: seed - pt_BR: seed - human_description: - en_US: seed - zh_Hans: seed - pt_BR: seed - form: form - default: 10 - - name: steps - type: number - required: false - label: - en_US: Steps - zh_Hans: Steps - pt_BR: Steps - human_description: - en_US: Steps - zh_Hans: Steps - pt_BR: Steps - form: form - default: 10 - - name: negative_prompt - type: string - required: false - label: - en_US: Negative prompt - zh_Hans: Negative prompt - pt_BR: Negative prompt - human_description: - en_US: Negative prompt - zh_Hans: Negative prompt - pt_BR: Negative prompt - form: form - default: (worst quality:1.3), (nsfw), low quality diff --git a/api/core/tools/provider/builtin/tavily/_assets/icon.png b/api/core/tools/provider/builtin/tavily/_assets/icon.png deleted file mode 100644 index fdb40ab5689ba9f40b22d2c700ed2ce1b2602829..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 2070 zcmV+x2(^!000yK02}}S4*(A$00|%Q@$&cg z_|?|f=;`XMud<(^r8GH2ZE$zW%gtL}W{QlFgocd5!^XP1zI=a$-{9hMb$gVSnn+Al zKSND1HbPEPS{EKJAtyDasH~Wpo?&Ec8zL|~KugHU%PB8AlScC&000L;Nkl zhnA`^5Qa^F01*LuLq&JjwO-%Do~`A?FmAv78_H8nLgH8nLgH8nK? zGV4KGz?$_8+5*<#KC}gl@$nAY1jh976xsyFEXWet1jY=N3A71p=_t+++6K1xnEpcB zz?QyksBLH?nBpl2%bnD$9i)$;jbMsqAWiC|riT$yJZLLeG9w7NduS_IvQiO}J6ud$ zgb?mPo57F+4HR~v&0t61Xpq#-G;lO$XS#s4gB?>fP-se=dmqg}pN$oYb{gcV;-s1RXjOBm6EEKWJOnFuyC5wY(U)H-+X8v@L8n zSHCx~4{Zz!#`;|#vIfxBu)tIP6v~;nT`H&hHkGj9HpUMErQD{=)}ulhx2bA8DwNQh z{)PK%(3h?Zp198n4Hep=lIdaef-PCM=2kdK>uWj+eN~9CRF!_;eiMjXsm9$W^If4- z0b~5;XD$I_1Z`J&+}jr~izrQU8oh)vE#i0d63TNDBwOJg#tq?_*Xkb3+xZ8lJrMxKq)D>e?)`yU{<0K;x)|o~|bX zEKFGRJ`*U8L@!4me3dA~M*VP$h7bYV&4{GyNI>I0XsoqD0?%>r$aQb3{gb8SO~(E8 zp=HF=ZRO2mqA>X_`nNc}E0YBSb9xbc^m&wcg?w>XYIHaGQ_*Vvc;b` zpjq73&=_sdJBH zeg57wTK1x%dvNW+2`g3W3osPCW?xrz<`dm2Hv`RsOIn`KRNh`AEb_A)&?f%bvDvM? z{vHz-Vt2k`T6nlCIV&gr`m}eG4aVnF=TLsUN@h&g!U3%$iQU*T%n# zd5QU3uRlUK!)?(@pbBG85j*fkH+IoF7rDdS#vXTHWw zpoMgZx~Qbp?|YV3eXeS__^Z0nX=l69tJP?z`pBpJclgO=(rj13|uH!>XiNi-_%-)(O@H;f|bdJr!0&UK3dan~77|4Lm{fE!{Vz(4U_^4!A zL?W;EpPwlpQ4E0vB#I)i(HMP}i0cBJ!5foLJgUvT7k%Wrm9FRo(LR$sU`(O!z| z<-D3n6^>K2XyV^6i^34`)vs+HiDLeLb)Wq+A&bj`H=W2gJ)>}hdr;wupRwpS#C-&_ zpY14${4Cu%OU>`Dq0KBnTT73Mh9+Djd?dp&JRpPq zSI*o$v`Hn?d41tUmnPcZ7(M$M`U`DP>2#lcjck4=6Tf-!&ePB3BuYvDc-p2i z{)hDWXR_i>Dw0_JZBz3IZ4470wyCv)HiiwxeVfYYNdxXdpUPAjJ~F?xzZHQlKjor* zJBUQ0_Qf-|v<7qi7;+I7ip;Sh;QH%oRl=i_@% diff --git a/api/core/tools/provider/builtin/tavily/tavily.py b/api/core/tools/provider/builtin/tavily/tavily.py deleted file mode 100644 index a702b0a74e6131..00000000000000 --- a/api/core/tools/provider/builtin/tavily/tavily.py +++ /dev/null @@ -1,29 +0,0 @@ -from typing import Any - -from core.tools.errors import ToolProviderCredentialValidationError -from core.tools.provider.builtin.tavily.tools.tavily_search import TavilySearchTool -from core.tools.provider.builtin_tool_provider import BuiltinToolProviderController - - -class TavilyProvider(BuiltinToolProviderController): - def _validate_credentials(self, credentials: dict[str, Any]) -> None: - try: - TavilySearchTool().fork_tool_runtime( - runtime={ - "credentials": credentials, - } - ).invoke( - user_id="", - tool_parameters={ - "query": "Sachin Tendulkar", - "search_depth": "basic", - "include_answer": True, - "include_images": False, - "include_raw_content": False, - "max_results": 5, - "include_domains": "", - "exclude_domains": "", - }, - ) - except Exception as e: - raise ToolProviderCredentialValidationError(str(e)) diff --git a/api/core/tools/provider/builtin/tavily/tavily.yaml b/api/core/tools/provider/builtin/tavily/tavily.yaml deleted file mode 100644 index 7b25a8184857ca..00000000000000 --- a/api/core/tools/provider/builtin/tavily/tavily.yaml +++ /dev/null @@ -1,31 +0,0 @@ -identity: - author: Yash Parmar - name: tavily - label: - en_US: Tavily - zh_Hans: Tavily - pt_BR: Tavily - description: - en_US: Tavily - zh_Hans: Tavily - pt_BR: Tavily - icon: icon.png - tags: - - search -credentials_for_provider: - tavily_api_key: - type: secret-input - required: true - label: - en_US: Tavily API key - zh_Hans: Tavily API key - pt_BR: Tavily API key - placeholder: - en_US: Please input your Tavily API key - zh_Hans: 请输入你的 Tavily API key - pt_BR: Please input your Tavily API key - help: - en_US: Get your Tavily API key from Tavily - zh_Hans: 从 TavilyApi 获取您的 Tavily API key - pt_BR: Get your Tavily API key from Tavily - url: https://docs.tavily.com/docs/tavily-api/introduction diff --git a/api/core/tools/provider/builtin/tavily/tools/tavily_search.py b/api/core/tools/provider/builtin/tavily/tools/tavily_search.py deleted file mode 100644 index ca6d8633e4b0af..00000000000000 --- a/api/core/tools/provider/builtin/tavily/tools/tavily_search.py +++ /dev/null @@ -1,124 +0,0 @@ -from typing import Any - -import requests - -from core.tools.entities.tool_entities import ToolInvokeMessage -from core.tools.tool.builtin_tool import BuiltinTool - -TAVILY_API_URL = "https://api.tavily.com" - - -class TavilySearch: - """ - A class for performing search operations using the Tavily Search API. - - Args: - api_key (str): The API key for accessing the Tavily Search API. - - Methods: - raw_results: Retrieves raw search results from the Tavily Search API. - results: Retrieves cleaned search results from the Tavily Search API. - clean_results: Cleans the raw search results. - """ - - def __init__(self, api_key: str) -> None: - self.api_key = api_key - - def raw_results(self, params: dict[str, Any]) -> dict: - """ - Retrieves raw search results from the Tavily Search API. - - Args: - params (Dict[str, Any]): The search parameters. - - Returns: - dict: The raw search results. - - """ - params["api_key"] = self.api_key - if ( - "exclude_domains" in params - and isinstance(params["exclude_domains"], str) - and params["exclude_domains"] != "None" - ): - params["exclude_domains"] = params["exclude_domains"].split() - else: - params["exclude_domains"] = [] - if ( - "include_domains" in params - and isinstance(params["include_domains"], str) - and params["include_domains"] != "None" - ): - params["include_domains"] = params["include_domains"].split() - else: - params["include_domains"] = [] - - response = requests.post(f"{TAVILY_API_URL}/search", json=params) - response.raise_for_status() - return response.json() - - def results(self, params: dict[str, Any]) -> list[dict]: - """ - Retrieves cleaned search results from the Tavily Search API. - - Args: - params (Dict[str, Any]): The search parameters. - - Returns: - list: The cleaned search results. - - """ - raw_search_results = self.raw_results(params) - return self.clean_results(raw_search_results["results"]) - - def clean_results(self, results: list[dict]) -> list[dict]: - """ - Cleans the raw search results. - - Args: - results (list): The raw search results. - - Returns: - list: The cleaned search results. - - """ - clean_results = [] - for result in results: - clean_results.append( - { - "url": result["url"], - "content": result["content"], - } - ) - # return clean results as a string - return "\n".join([f"{res['url']}\n{res['content']}" for res in clean_results]) - - -class TavilySearchTool(BuiltinTool): - """ - A tool for searching Tavily using a given query. - """ - - def _invoke(self, user_id: str, tool_parameters: dict[str, Any]) -> ToolInvokeMessage | list[ToolInvokeMessage]: - """ - Invokes the Tavily search tool with the given user ID and tool parameters. - - Args: - user_id (str): The ID of the user invoking the tool. - tool_parameters (Dict[str, Any]): The parameters for the Tavily search tool. - - Returns: - ToolInvokeMessage | list[ToolInvokeMessage]: The result of the Tavily search tool invocation. - """ - query = tool_parameters.get("query", "") - - api_key = self.runtime.credentials["tavily_api_key"] - if not query: - return self.create_text_message("Please input query") - tavily_search = TavilySearch(api_key) - results = tavily_search.results(tool_parameters) - print(results) - if not results: - return self.create_text_message(f"No results found for '{query}' in Tavily") - else: - return self.create_text_message(text=results) diff --git a/api/core/tools/provider/builtin/tavily/tools/tavily_search.yaml b/api/core/tools/provider/builtin/tavily/tools/tavily_search.yaml deleted file mode 100644 index 88426056afb353..00000000000000 --- a/api/core/tools/provider/builtin/tavily/tools/tavily_search.yaml +++ /dev/null @@ -1,162 +0,0 @@ -identity: - name: tavily_search - author: Yash Parmar - label: - en_US: TavilySearch - zh_Hans: TavilySearch - pt_BR: TavilySearch -description: - human: - en_US: A tool for search engine built specifically for AI agents (LLMs), delivering real-time, accurate, and factual results at speed. - zh_Hans: 专为人工智能代理 (LLM) 构建的搜索引擎工具,可快速提供实时、准确和真实的结果。 - pt_BR: A tool for search engine built specifically for AI agents (LLMs), delivering real-time, accurate, and factual results at speed. - llm: A tool for search engine built specifically for AI agents (LLMs), delivering real-time, accurate, and factual results at speed. -parameters: - - name: query - type: string - required: true - label: - en_US: Query string - zh_Hans: 查询语句 - pt_BR: Query string - human_description: - en_US: used for searching - zh_Hans: 用于搜索网页内容 - pt_BR: used for searching - llm_description: key words for searching - form: llm - - name: search_depth - type: select - required: false - label: - en_US: Search Depth - zh_Hans: 搜索深度 - pt_BR: Search Depth - human_description: - en_US: The depth of search results - zh_Hans: 搜索结果的深度 - pt_BR: The depth of search results - form: form - options: - - value: basic - label: - en_US: Basic - zh_Hans: 基本 - pt_BR: Basic - - value: advanced - label: - en_US: Advanced - zh_Hans: 高级 - pt_BR: Advanced - default: basic - - name: include_images - type: boolean - required: false - label: - en_US: Include Images - zh_Hans: 包含图片 - pt_BR: Include Images - human_description: - en_US: Include images in the search results - zh_Hans: 在搜索结果中包含图片 - pt_BR: Include images in the search results - form: form - options: - - value: 'true' - label: - en_US: 'Yes' - zh_Hans: 是 - pt_BR: 'Yes' - - value: 'false' - label: - en_US: 'No' - zh_Hans: 否 - pt_BR: 'No' - default: 'false' - - name: include_answer - type: boolean - required: false - label: - en_US: Include Answer - zh_Hans: 包含答案 - pt_BR: Include Answer - human_description: - en_US: Include answers in the search results - zh_Hans: 在搜索结果中包含答案 - pt_BR: Include answers in the search results - form: form - options: - - value: 'true' - label: - en_US: 'Yes' - zh_Hans: 是 - pt_BR: 'Yes' - - value: 'false' - label: - en_US: 'No' - zh_Hans: 否 - pt_BR: 'No' - default: 'false' - - name: include_raw_content - type: boolean - required: false - label: - en_US: Include Raw Content - zh_Hans: 包含原始内容 - pt_BR: Include Raw Content - human_description: - en_US: Include raw content in the search results - zh_Hans: 在搜索结果中包含原始内容 - pt_BR: Include raw content in the search results - form: form - options: - - value: 'true' - label: - en_US: 'Yes' - zh_Hans: 是 - pt_BR: 'Yes' - - value: 'false' - label: - en_US: 'No' - zh_Hans: 否 - pt_BR: 'No' - default: 'false' - - name: max_results - type: number - required: false - label: - en_US: Max Results - zh_Hans: 最大结果 - pt_BR: Max Results - human_description: - en_US: The number of maximum search results to return - zh_Hans: 返回的最大搜索结果数 - pt_BR: The number of maximum search results to return - form: form - min: 1 - max: 20 - default: 5 - - name: include_domains - type: string - required: false - label: - en_US: Include Domains - zh_Hans: 包含域 - pt_BR: Include Domains - human_description: - en_US: A list of domains to specifically include in the search results - zh_Hans: 在搜索结果中特别包含的域名列表 - pt_BR: A list of domains to specifically include in the search results - form: form - - name: exclude_domains - type: string - required: false - label: - en_US: Exclude Domains - zh_Hans: 排除域 - pt_BR: Exclude Domains - human_description: - en_US: A list of domains to specifically exclude from the search results - zh_Hans: 从搜索结果中特别排除的域名列表 - pt_BR: A list of domains to specifically exclude from the search results - form: form diff --git a/api/core/tools/provider/builtin/tianditu/_assets/icon.svg b/api/core/tools/provider/builtin/tianditu/_assets/icon.svg deleted file mode 100644 index 749d4bda265ab0..00000000000000 --- a/api/core/tools/provider/builtin/tianditu/_assets/icon.svg +++ /dev/null @@ -1,21 +0,0 @@ - - - - - - - - - - - - - - - - - - - \ No newline at end of file diff --git a/api/core/tools/provider/builtin/tianditu/tianditu.py b/api/core/tools/provider/builtin/tianditu/tianditu.py deleted file mode 100644 index cb7d7bd8bb2c41..00000000000000 --- a/api/core/tools/provider/builtin/tianditu/tianditu.py +++ /dev/null @@ -1,23 +0,0 @@ -from typing import Any - -from core.tools.errors import ToolProviderCredentialValidationError -from core.tools.provider.builtin.tianditu.tools.poisearch import PoiSearchTool -from core.tools.provider.builtin_tool_provider import BuiltinToolProviderController - - -class TiandituProvider(BuiltinToolProviderController): - def _validate_credentials(self, credentials: dict[str, Any]) -> None: - try: - PoiSearchTool().fork_tool_runtime( - runtime={ - "credentials": credentials, - } - ).invoke( - user_id="", - tool_parameters={ - "content": "北京", - "specify": "156110000", - }, - ) - except Exception as e: - raise ToolProviderCredentialValidationError(str(e)) diff --git a/api/core/tools/provider/builtin/tianditu/tianditu.yaml b/api/core/tools/provider/builtin/tianditu/tianditu.yaml deleted file mode 100644 index 77af834bdc5893..00000000000000 --- a/api/core/tools/provider/builtin/tianditu/tianditu.yaml +++ /dev/null @@ -1,32 +0,0 @@ -identity: - author: Listeng - name: tianditu - label: - en_US: Tianditu - zh_Hans: 天地图 - pt_BR: Tianditu - description: - en_US: The Tianditu tool provided the functions of place name search, geocoding, static maps generation, etc. in China region. - zh_Hans: 天地图工具可以调用天地图的接口,实现中国区域内的地名搜索、地理编码、静态地图等功能。 - pt_BR: The Tianditu tool provided the functions of place name search, geocoding, static maps generation, etc. in China region. - icon: icon.svg - tags: - - utilities - - travel -credentials_for_provider: - tianditu_api_key: - type: secret-input - required: true - label: - en_US: Tianditu API Key - zh_Hans: 天地图Key - pt_BR: Tianditu API key - placeholder: - en_US: Please input your Tianditu API key - zh_Hans: 请输入你的天地图Key - pt_BR: Please input your Tianditu API key - help: - en_US: Get your Tianditu API key from Tianditu - zh_Hans: 获取您的天地图Key - pt_BR: Get your Tianditu API key from Tianditu - url: http://lbs.tianditu.gov.cn/home.html diff --git a/api/core/tools/provider/builtin/tianditu/tools/geocoder.py b/api/core/tools/provider/builtin/tianditu/tools/geocoder.py deleted file mode 100644 index 690a0aed6f5aff..00000000000000 --- a/api/core/tools/provider/builtin/tianditu/tools/geocoder.py +++ /dev/null @@ -1,33 +0,0 @@ -import json -from typing import Any, Union - -import requests - -from core.tools.entities.tool_entities import ToolInvokeMessage -from core.tools.tool.builtin_tool import BuiltinTool - - -class GeocoderTool(BuiltinTool): - def _invoke( - self, - user_id: str, - tool_parameters: dict[str, Any], - ) -> Union[ToolInvokeMessage, list[ToolInvokeMessage]]: - """ - invoke tools - """ - base_url = "http://api.tianditu.gov.cn/geocoder" - - keyword = tool_parameters.get("keyword", "") - if not keyword: - return self.create_text_message("Invalid parameter keyword") - - tk = self.runtime.credentials["tianditu_api_key"] - - params = { - "keyWord": keyword, - } - - result = requests.get(base_url + "?ds=" + json.dumps(params, ensure_ascii=False) + "&tk=" + tk).json() - - return self.create_json_message(result) diff --git a/api/core/tools/provider/builtin/tianditu/tools/geocoder.yaml b/api/core/tools/provider/builtin/tianditu/tools/geocoder.yaml deleted file mode 100644 index d6a168f9502019..00000000000000 --- a/api/core/tools/provider/builtin/tianditu/tools/geocoder.yaml +++ /dev/null @@ -1,26 +0,0 @@ -identity: - name: geocoder - author: Listeng - label: - en_US: Get coords converted from address name - zh_Hans: 地理编码 - pt_BR: Get coords converted from address name -description: - human: - en_US: Geocoder - zh_Hans: 中国区域地理编码查询 - pt_BR: Geocoder - llm: A tool for geocoder in China -parameters: - - name: keyword - type: string - required: true - label: - en_US: keyword - zh_Hans: 搜索的关键字 - pt_BR: keyword - human_description: - en_US: keyword - zh_Hans: 搜索的关键字 - pt_BR: keyword - form: llm diff --git a/api/core/tools/provider/builtin/tianditu/tools/poisearch.py b/api/core/tools/provider/builtin/tianditu/tools/poisearch.py deleted file mode 100644 index 798dd94d335654..00000000000000 --- a/api/core/tools/provider/builtin/tianditu/tools/poisearch.py +++ /dev/null @@ -1,58 +0,0 @@ -import json -from typing import Any, Union - -import requests - -from core.tools.entities.tool_entities import ToolInvokeMessage -from core.tools.tool.builtin_tool import BuiltinTool - - -class PoiSearchTool(BuiltinTool): - def _invoke( - self, - user_id: str, - tool_parameters: dict[str, Any], - ) -> Union[ToolInvokeMessage, list[ToolInvokeMessage]]: - """ - invoke tools - """ - geocoder_base_url = "http://api.tianditu.gov.cn/geocoder" - base_url = "http://api.tianditu.gov.cn/v2/search" - - keyword = tool_parameters.get("keyword", "") - if not keyword: - return self.create_text_message("Invalid parameter keyword") - - baseAddress = tool_parameters.get("baseAddress", "") - if not baseAddress: - return self.create_text_message("Invalid parameter baseAddress") - - tk = self.runtime.credentials["tianditu_api_key"] - - base_coords = requests.get( - geocoder_base_url - + "?ds=" - + json.dumps( - { - "keyWord": baseAddress, - }, - ensure_ascii=False, - ) - + "&tk=" - + tk - ).json() - - params = { - "keyWord": keyword, - "queryRadius": 5000, - "queryType": 3, - "pointLonlat": base_coords["location"]["lon"] + "," + base_coords["location"]["lat"], - "start": 0, - "count": 100, - } - - result = requests.get( - base_url + "?postStr=" + json.dumps(params, ensure_ascii=False) + "&type=query&tk=" + tk - ).json() - - return self.create_json_message(result) diff --git a/api/core/tools/provider/builtin/tianditu/tools/poisearch.yaml b/api/core/tools/provider/builtin/tianditu/tools/poisearch.yaml deleted file mode 100644 index 01289d24e3d29a..00000000000000 --- a/api/core/tools/provider/builtin/tianditu/tools/poisearch.yaml +++ /dev/null @@ -1,38 +0,0 @@ -identity: - name: point_of_interest_search - author: Listeng - label: - en_US: Point of Interest search - zh_Hans: 兴趣点搜索 - pt_BR: Point of Interest search -description: - human: - en_US: Search for certain types of points of interest around a location - zh_Hans: 搜索某个位置周边的5公里内某种类型的兴趣点 - pt_BR: Search for certain types of points of interest around a location - llm: A tool for searching for certain types of points of interest around a location -parameters: - - name: keyword - type: string - required: true - label: - en_US: poi keyword - zh_Hans: 兴趣点的关键字 - pt_BR: poi keyword - human_description: - en_US: poi keyword - zh_Hans: 兴趣点的关键字 - pt_BR: poi keyword - form: llm - - name: baseAddress - type: string - required: true - label: - en_US: base current point - zh_Hans: 当前位置的关键字 - pt_BR: base current point - human_description: - en_US: base current point - zh_Hans: 当前位置的关键字 - pt_BR: base current point - form: llm diff --git a/api/core/tools/provider/builtin/tianditu/tools/staticmap.py b/api/core/tools/provider/builtin/tianditu/tools/staticmap.py deleted file mode 100644 index aeaef088057686..00000000000000 --- a/api/core/tools/provider/builtin/tianditu/tools/staticmap.py +++ /dev/null @@ -1,49 +0,0 @@ -import json -from typing import Any, Union - -import requests - -from core.tools.entities.tool_entities import ToolInvokeMessage -from core.tools.tool.builtin_tool import BuiltinTool - - -class PoiSearchTool(BuiltinTool): - def _invoke( - self, - user_id: str, - tool_parameters: dict[str, Any], - ) -> Union[ToolInvokeMessage, list[ToolInvokeMessage]]: - """ - invoke tools - """ - - geocoder_base_url = "http://api.tianditu.gov.cn/geocoder" - base_url = "http://api.tianditu.gov.cn/staticimage" - - keyword = tool_parameters.get("keyword", "") - if not keyword: - return self.create_text_message("Invalid parameter keyword") - - tk = self.runtime.credentials["tianditu_api_key"] - - keyword_coords = requests.get( - geocoder_base_url - + "?ds=" - + json.dumps( - { - "keyWord": keyword, - }, - ensure_ascii=False, - ) - + "&tk=" - + tk - ).json() - coords = keyword_coords["location"]["lon"] + "," + keyword_coords["location"]["lat"] - - result = requests.get( - base_url + "?center=" + coords + "&markers=" + coords + "&width=400&height=300&zoom=14&tk=" + tk - ).content - - return self.create_blob_message( - blob=result, meta={"mime_type": "image/png"}, save_as=self.VariableKey.IMAGE.value - ) diff --git a/api/core/tools/provider/builtin/tianditu/tools/staticmap.yaml b/api/core/tools/provider/builtin/tianditu/tools/staticmap.yaml deleted file mode 100644 index fc54c428066af5..00000000000000 --- a/api/core/tools/provider/builtin/tianditu/tools/staticmap.yaml +++ /dev/null @@ -1,26 +0,0 @@ -identity: - name: generate_static_map - author: Listeng - label: - en_US: Generate a static map - zh_Hans: 生成静态地图 - pt_BR: Generate a static map -description: - human: - en_US: Generate a static map - zh_Hans: 生成静态地图 - pt_BR: Generate a static map - llm: A tool for generate a static map -parameters: - - name: keyword - type: string - required: true - label: - en_US: keyword - zh_Hans: 搜索的关键字 - pt_BR: keyword - human_description: - en_US: keyword - zh_Hans: 搜索的关键字 - pt_BR: keyword - form: llm diff --git a/api/core/tools/provider/builtin/trello/_assets/icon.svg b/api/core/tools/provider/builtin/trello/_assets/icon.svg deleted file mode 100644 index f8e2bd47c0b818..00000000000000 --- a/api/core/tools/provider/builtin/trello/_assets/icon.svg +++ /dev/null @@ -1 +0,0 @@ - \ No newline at end of file diff --git a/api/core/tools/provider/builtin/trello/tools/create_board.py b/api/core/tools/provider/builtin/trello/tools/create_board.py deleted file mode 100644 index 5a61d221578995..00000000000000 --- a/api/core/tools/provider/builtin/trello/tools/create_board.py +++ /dev/null @@ -1,44 +0,0 @@ -from typing import Union - -import requests - -from core.tools.entities.tool_entities import ToolInvokeMessage -from core.tools.tool.builtin_tool import BuiltinTool - - -class CreateBoardTool(BuiltinTool): - """ - Tool for creating a new Trello board. - """ - - def _invoke(self, user_id: str, tool_parameters: dict[str, Union[str, int, bool]]) -> ToolInvokeMessage: - """ - Invoke the tool to create a new Trello board. - - Args: - user_id (str): The ID of the user invoking the tool. - tool_parameters (dict[str, Union[str, int, bool]]): The parameters for the tool invocation. - - Returns: - ToolInvokeMessage: The result of the tool invocation. - """ - api_key = self.runtime.credentials.get("trello_api_key") - token = self.runtime.credentials.get("trello_api_token") - board_name = tool_parameters.get("name") - - if not (api_key and token and board_name): - return self.create_text_message("Missing required parameters: API key, token, or board name.") - - url = "https://api.trello.com/1/boards/" - query_params = {"name": board_name, "key": api_key, "token": token} - - try: - response = requests.post(url, params=query_params) - response.raise_for_status() - except requests.exceptions.RequestException as e: - return self.create_text_message("Failed to create board") - - board = response.json() - return self.create_text_message( - text=f"Board created successfully! Board name: {board['name']}, ID: {board['id']}" - ) diff --git a/api/core/tools/provider/builtin/trello/tools/create_board.yaml b/api/core/tools/provider/builtin/trello/tools/create_board.yaml deleted file mode 100644 index 60dbab61f5ee5c..00000000000000 --- a/api/core/tools/provider/builtin/trello/tools/create_board.yaml +++ /dev/null @@ -1,27 +0,0 @@ -identity: - name: create_board - author: Yash Parmar - label: - en_US: Create Board - zh_Hans: 创建看板 - pt_BR: Criar Quadro -description: - human: - en_US: Creates a new Trello board with a specified name. This tool allows users to quickly add new boards to their Trello account, facilitating project organization and management. - zh_Hans: 使用指定的名称创建一个新的 Trello 看板。此工具允许用户快速向其 Trello 账户添加新的看板,促进项目组织和管理。 - pt_BR: Cria um novo quadro Trello com um nome especificado. Esta ferramenta permite que os usuários adicionem rapidamente novos quadros à sua conta Trello, facilitando a organização e gestão de projetos. - llm: Create a new Trello board using the specified name. This functionality simplifies the addition of boards, enhancing project organization and management within Trello. -parameters: - - name: name - type: string - required: true - label: - en_US: Board Name - zh_Hans: 看板名称 - pt_BR: Nome do Quadro - human_description: - en_US: The name for the new Trello board. This name helps in identifying and organizing your projects on Trello. - zh_Hans: 新 Trello 看板的名称。这个名称有助于在 Trello 上识别和组织您的项目。 - pt_BR: O nome para o novo quadro Trello. Este nome ajuda a identificar e organizar seus projetos no Trello. - llm_description: Specify the name for your new Trello board, aiding in project identification and organization within Trello. - form: llm diff --git a/api/core/tools/provider/builtin/trello/tools/create_list_on_board.py b/api/core/tools/provider/builtin/trello/tools/create_list_on_board.py deleted file mode 100644 index b32b0124dd31da..00000000000000 --- a/api/core/tools/provider/builtin/trello/tools/create_list_on_board.py +++ /dev/null @@ -1,46 +0,0 @@ -from typing import Union - -import requests - -from core.tools.entities.tool_entities import ToolInvokeMessage -from core.tools.tool.builtin_tool import BuiltinTool - - -class CreateListOnBoardTool(BuiltinTool): - """ - Tool for creating a list on a Trello board by its ID. - """ - - def _invoke(self, user_id: str, tool_parameters: dict[str, Union[str, int, bool]]) -> ToolInvokeMessage: - """ - Invoke the tool to create a list on a Trello board by its ID. - - Args: - user_id (str): The ID of the user invoking the tool. - tool_parameters (dict[str, Union[str, int, bool]]): The parameters for the tool invocation, - including the board ID and list name. - - Returns: - ToolInvokeMessage: The result of the tool invocation. - """ - api_key = self.runtime.credentials.get("trello_api_key") - token = self.runtime.credentials.get("trello_api_token") - board_id = tool_parameters.get("id") - list_name = tool_parameters.get("name") - - if not (api_key and token and board_id and list_name): - return self.create_text_message("Missing required parameters: API key, token, board ID, or list name.") - - url = f"https://api.trello.com/1/boards/{board_id}/lists" - params = {"name": list_name, "key": api_key, "token": token} - - try: - response = requests.post(url, params=params) - response.raise_for_status() - except requests.exceptions.RequestException as e: - return self.create_text_message("Failed to create list") - - new_list = response.json() - return self.create_text_message( - text=f"List '{new_list['name']}' created successfully with Id {new_list['id']} on board {board_id}." - ) diff --git a/api/core/tools/provider/builtin/trello/tools/create_list_on_board.yaml b/api/core/tools/provider/builtin/trello/tools/create_list_on_board.yaml deleted file mode 100644 index 789b92437a3b3e..00000000000000 --- a/api/core/tools/provider/builtin/trello/tools/create_list_on_board.yaml +++ /dev/null @@ -1,40 +0,0 @@ -identity: - name: create_list_on_board - author: Yash Parmar - label: - en_US: Create List on Board - zh_Hans: 在看板上创建列表 - pt_BR: Criar Lista no Quadro -description: - human: - en_US: Creates a new list on a specified Trello board by providing the board's ID and the desired name for the list. Streamlines the process of organizing board content. - zh_Hans: 通过提供看板的 ID 和列表的所需名称,在指定的 Trello 看板上创建一个新列表。简化了组织看板内容的过程。 - pt_BR: Cria uma nova lista em um quadro Trello especificado, fornecendo o ID do quadro e o nome desejado para a lista. Facilita o processo de organização do conteúdo do quadro. - llm: Generate a new list within a Trello board by specifying the board's ID and a name for the list. Enhances board management by allowing quick additions of new lists. -parameters: - - name: id - type: string - required: true - label: - en_US: Board ID - zh_Hans: 看板 ID - pt_BR: ID do Quadro - human_description: - en_US: The unique identifier of the Trello board where the new list will be created. - zh_Hans: 新列表将被创建在其上的 Trello 看板的唯一标识符。 - pt_BR: O identificador único do quadro Trello onde a nova lista será criada. - llm_description: Input the ID of the Trello board to pinpoint where the new list should be added, ensuring correct placement. - form: llm - - name: name - type: string - required: true - label: - en_US: List Name - zh_Hans: 列表名称 - pt_BR: Nome da Lista - human_description: - en_US: The name for the new list to be created on the Trello board. - zh_Hans: 将在 Trello 看板上创建的新列表的名称。 - pt_BR: O nome para a nova lista que será criada no quadro Trello. - llm_description: Provide a name for the new list, defining its purpose or content focus, to facilitate board organization. - form: llm diff --git a/api/core/tools/provider/builtin/trello/tools/create_new_card_on_board.py b/api/core/tools/provider/builtin/trello/tools/create_new_card_on_board.py deleted file mode 100644 index e98efb81ca673e..00000000000000 --- a/api/core/tools/provider/builtin/trello/tools/create_new_card_on_board.py +++ /dev/null @@ -1,45 +0,0 @@ -from typing import Union - -import requests - -from core.tools.entities.tool_entities import ToolInvokeMessage -from core.tools.tool.builtin_tool import BuiltinTool - - -class CreateNewCardOnBoardTool(BuiltinTool): - """ - Tool for creating a new card on a Trello board. - """ - - def _invoke(self, user_id: str, tool_parameters: dict[str, Union[str, int, bool, None]]) -> ToolInvokeMessage: - """ - Invoke the tool to create a new card on a Trello board. - - Args: - user_id (str): The ID of the user invoking the tool. - tool_parameters (dict[str, Union[str, int, bool, None]]): The parameters for the tool invocation, - including details for the new card. - - Returns: - ToolInvokeMessage: The result of the tool invocation. - """ - api_key = self.runtime.credentials.get("trello_api_key") - token = self.runtime.credentials.get("trello_api_token") - - # Ensure required parameters are present - if "name" not in tool_parameters or "idList" not in tool_parameters: - return self.create_text_message("Missing required parameters: name or idList.") - - url = "https://api.trello.com/1/cards" - params = {**tool_parameters, "key": api_key, "token": token} - - try: - response = requests.post(url, params=params) - response.raise_for_status() - new_card = response.json() - except requests.exceptions.RequestException as e: - return self.create_text_message("Failed to create card") - - return self.create_text_message( - text=f"New card '{new_card['name']}' created successfully with ID {new_card['id']}." - ) diff --git a/api/core/tools/provider/builtin/trello/tools/create_new_card_on_board.yaml b/api/core/tools/provider/builtin/trello/tools/create_new_card_on_board.yaml deleted file mode 100644 index 9953af718ddd58..00000000000000 --- a/api/core/tools/provider/builtin/trello/tools/create_new_card_on_board.yaml +++ /dev/null @@ -1,145 +0,0 @@ -identity: - name: create_new_card_on_board - author: Yash Parmar - label: - en_US: Create New Card on Board - zh_Hans: 在看板上创建新卡片 - pt_BR: Criar Novo Cartão no Quadro -description: - human: - en_US: Creates a new card on a Trello board with specified details like name, description, list ID, and other optional parameters. Facilitates task addition and project management within Trello. - zh_Hans: 用指定的详情(如名称、描述、列表 ID 和其他可选参数)在 Trello 看板上创建一个新卡片。便于在 Trello 中添加任务和管理项目。 - pt_BR: Cria um novo cartão em um quadro Trello com detalhes especificados, como nome, descrição, ID da lista e outros parâmetros opcionais. Facilita a adição de tarefas e a gestão de projetos dentro do Trello. - llm: Initiate a new card on a Trello board by specifying essential details such as the card's name, description, and the list it belongs to, among other settings. Streamlines project task additions and organizational workflows. -parameters: - - name: name - type: string - required: true - label: - en_US: Card Name - zh_Hans: 卡片名称 - pt_BR: Nome do Cartão - human_description: - en_US: The name for the new card. Acts as the primary identifier and summary of the card's purpose. - zh_Hans: 新卡片的名称。作为卡片目的的主要标识和总结。 - pt_BR: O nome para o novo cartão. Funciona como o identificador principal e resumo do propósito do cartão. - llm_description: Provide a concise, descriptive name for the card, outlining its main focus or task. - form: llm - # Include additional parameters like desc, pos, due, idList, etc., following the same pattern. - - name: desc - type: string - required: false - label: - en_US: Card Description - zh_Hans: 卡片描述 - pt_BR: Descrição do Cartão - human_description: - en_US: Optional. A brief description of the card's purpose or contents. - zh_Hans: 可选。卡片目的或内容的简要描述。 - pt_BR: Opcional. Uma breve descrição do propósito ou conteúdo do cartão. - llm_description: Add a brief description to the card to provide context or additional information about its purpose. - form: llm - - name: pos - type: string - required: false - label: - en_US: Position - zh_Hans: 位置 - pt_BR: Posição - human_description: - en_US: Optional. The position of the card in the list. Can be 'top', 'bottom', or a positive number. - zh_Hans: 可选。卡片在列表中的位置。可以是“top”、“bottom” 或正数。 - pt_BR: Opcional. A posição do cartão na lista. Pode ser 'top', 'bottom' ou um número positivo. - llm_description: Specify the position of the card within the list, either at the top, bottom, or a specific numerical index. - form: llm - - name: due - type: string - required: false - label: - en_US: Due Date - zh_Hans: 截止日期 - pt_BR: Data de Vencimento - human_description: - en_US: Optional. The due date for the card in the format 'MM/DD/YYYY'. - zh_Hans: 可选。卡片的截止日期,格式为“MM/DD/YYYY”。 - pt_BR: Opcional. A data de vencimento do cartão no formato 'MM/DD/YYYY'. - llm_description: Set a due date for the card to establish a deadline for completion or action. - form: llm - - name: start - type: string - required: false - label: - en_US: Start Date - zh_Hans: 开始日期 - pt_BR: Data de Início - human_description: - en_US: Optional. The start date for the card in the format 'MM/DD/YYYY'. - zh_Hans: 可选。卡片的开始日期,格式为“MM/DD/YYYY”。 - pt_BR: Opcional. A data de início do cartão no formato 'MM/DD/YYYY'. - llm_description: Specify a start date for the card to mark the beginning of a task or project phase. - form: llm - - name: dueComplete - type: boolean - required: false - label: - en_US: Due Complete - zh_Hans: 截止日期已完成 - pt_BR: Vencimento Concluído - human_description: - en_US: Optional. Set to true if the due date has been completed, or false if it is pending. - zh_Hans: 可选。如果截止日期已完成,则设置为 true;如果尚未完成,则设置为 false。 - pt_BR: Opcional. Defina como true se a data de vencimento foi concluída, ou como false se estiver pendente. - llm_description: Indicate whether the due date for the card has been marked as complete or is still pending. - form: llm - - name: idList - type: string - required: true - label: - en_US: List ID - zh_Hans: 列表 ID - pt_BR: ID da Lista - human_description: - en_US: The unique identifier of the list where the card will be added. - zh_Hans: 卡片将被添加到的列表的唯一标识符。 - pt_BR: O identificador único da lista onde o cartão será adicionado. - llm_description: Input the ID of the list where the card should be placed, ensuring it is added to the correct list. - form: llm - - name: idMembers - type: string - required: false - label: - en_US: Member IDs - zh_Hans: 成员 ID - pt_BR: IDs de Membros - human_description: - en_US: Optional. The IDs of members to assign to the card. - zh_Hans: 可选。要分配给卡片的成员的 ID。 - pt_BR: Opcional. Os IDs dos membros a serem atribuídos ao cartão. - llm_description: Specify the IDs of members to assign to the card, allowing for task delegation or collaboration. - form: llm - - name: idLabels - type: string - required: false - label: - en_US: Label IDs - zh_Hans: 标签 ID - pt_BR: IDs de Etiquetas - human_description: - en_US: Optional. The IDs of labels to assign to the card. - zh_Hans: 可选。要分配给卡片的标签的 ID。 - pt_BR: Opcional. Os IDs das etiquetas a serem atribuídos ao cartão. - llm_description: Assign specific labels to the card by providing their IDs, aiding in visual categorization or prioritization. - form: llm - - name: urlSource - type: string - required: false - label: - en_US: Source URL - zh_Hans: 来源 URL - pt_BR: URL de Origem - human_description: - en_US: Optional. The URL to attach as the card's source. - zh_Hans: 可选。要附加为卡片来源的 URL。 - pt_BR: Opcional. O URL a ser anexado como a fonte do cartão. - llm_description: Provide a URL to serve as the source reference for the card, linking to external resources or documents. - form: llm diff --git a/api/core/tools/provider/builtin/trello/tools/delete_board.py b/api/core/tools/provider/builtin/trello/tools/delete_board.py deleted file mode 100644 index 7fc9d1f13c2664..00000000000000 --- a/api/core/tools/provider/builtin/trello/tools/delete_board.py +++ /dev/null @@ -1,41 +0,0 @@ -from typing import Union - -import requests - -from core.tools.entities.tool_entities import ToolInvokeMessage -from core.tools.tool.builtin_tool import BuiltinTool - - -class DeleteBoardTool(BuiltinTool): - """ - Tool for deleting a Trello board by ID. - """ - - def _invoke(self, user_id: str, tool_parameters: dict[str, Union[str, int, bool]]) -> ToolInvokeMessage: - """ - Invoke the tool to delete a Trello board by its ID. - - Args: - user_id (str): The ID of the user invoking the tool. - tool_parameters (dict[str, Union[str, int, bool]]): The parameters for the tool invocation, - including the board ID. - - Returns: - ToolInvokeMessage: The result of the tool invocation. - """ - api_key = self.runtime.credentials.get("trello_api_key") - token = self.runtime.credentials.get("trello_api_token") - board_id = tool_parameters.get("boardId") - - if not (api_key and token and board_id): - return self.create_text_message("Missing required parameters: API key, token, or board ID.") - - url = f"https://api.trello.com/1/boards/{board_id}?key={api_key}&token={token}" - - try: - response = requests.delete(url) - response.raise_for_status() - except requests.exceptions.RequestException as e: - return self.create_text_message("Failed to delete board") - - return self.create_text_message(text=f"Board with ID {board_id} deleted successfully.") diff --git a/api/core/tools/provider/builtin/trello/tools/delete_board.yaml b/api/core/tools/provider/builtin/trello/tools/delete_board.yaml deleted file mode 100644 index f043e78870d062..00000000000000 --- a/api/core/tools/provider/builtin/trello/tools/delete_board.yaml +++ /dev/null @@ -1,27 +0,0 @@ -identity: - name: delete_board - author: Yash Parmar - label: - en_US: Delete Board - zh_Hans: 删除看板 - pt_BR: Excluir Quadro -description: - human: - en_US: Deletes a Trello board using its unique ID. This tool allows for the removal of boards that are no longer needed, ensuring a tidy workspace. - zh_Hans: 使用其唯一 ID 删除 Trello 看板。此工具允许删除不再需要的看板,确保工作区整洁。 - pt_BR: Exclui um quadro Trello usando seu ID único. Esta ferramenta permite a remoção de quadros que não são mais necessários, garantindo um espaço de trabalho organizado. - llm: Remove a Trello board by specifying its ID. This functionality is helpful for cleaning up unnecessary boards from your Trello account. -parameters: - - name: boardId - type: string - required: true - label: - en_US: Board ID - zh_Hans: 看板 ID - pt_BR: ID do Quadro - human_description: - en_US: The unique identifier for the Trello board you wish to delete. This ensures the specific board is accurately targeted for deletion. - zh_Hans: 您希望删除的 Trello 看板的唯一标识符。这确保了准确地针对特定看板进行删除。 - pt_BR: O identificador único para o quadro Trello que você deseja excluir. Isso garante que o quadro específico seja precisamente direcionado para exclusão. - llm_description: Enter the ID of the Trello board you want to remove. This ID is essential to identify the board precisely and perform the deletion. - form: llm diff --git a/api/core/tools/provider/builtin/trello/tools/delete_card.py b/api/core/tools/provider/builtin/trello/tools/delete_card.py deleted file mode 100644 index 1de98d639ebb7d..00000000000000 --- a/api/core/tools/provider/builtin/trello/tools/delete_card.py +++ /dev/null @@ -1,41 +0,0 @@ -from typing import Union - -import requests - -from core.tools.entities.tool_entities import ToolInvokeMessage -from core.tools.tool.builtin_tool import BuiltinTool - - -class DeleteCardByIdTool(BuiltinTool): - """ - Tool for deleting a Trello card by its ID. - """ - - def _invoke(self, user_id: str, tool_parameters: dict[str, Union[str, int, bool]]) -> ToolInvokeMessage: - """ - Invoke the tool to delete a Trello card by its ID. - - Args: - user_id (str): The ID of the user invoking the tool. - tool_parameters (dict[str, Union[str, int, bool]]): The parameters for the tool invocation, - including the card ID. - - Returns: - ToolInvokeMessage: The result of the tool invocation. - """ - api_key = self.runtime.credentials.get("trello_api_key") - token = self.runtime.credentials.get("trello_api_token") - card_id = tool_parameters.get("id") - - if not (api_key and token and card_id): - return self.create_text_message("Missing required parameters: API key, token, or card ID.") - - url = f"https://api.trello.com/1/cards/{card_id}?key={api_key}&token={token}" - - try: - response = requests.delete(url) - response.raise_for_status() - except requests.exceptions.RequestException as e: - return self.create_text_message("Failed to delete card") - - return self.create_text_message(text=f"Card with ID {card_id} has been successfully deleted.") diff --git a/api/core/tools/provider/builtin/trello/tools/delete_card.yaml b/api/core/tools/provider/builtin/trello/tools/delete_card.yaml deleted file mode 100644 index 8898ef1bde3680..00000000000000 --- a/api/core/tools/provider/builtin/trello/tools/delete_card.yaml +++ /dev/null @@ -1,27 +0,0 @@ -identity: - name: delete_card_by_id - author: Yash Parmar - label: - en_US: Delete Card by ID - zh_Hans: 通过 ID 删除卡片 - pt_BR: Deletar Cartão por ID -description: - human: - en_US: Deletes a Trello card using its unique ID. This tool facilitates the removal of cards that are no longer needed, maintaining an organized board. - zh_Hans: 使用其唯一 ID 删除 Trello 卡片。此工具便于删除不再需要的卡片,保持看板的有序。 - pt_BR: Exclui um cartão Trello usando seu ID único. Esta ferramenta facilita a remoção de cartões que não são mais necessários, mantendo um quadro organizado. - llm: Remove a specific Trello card by providing its ID. Ideal for cleaning up and organizing your Trello boards by eliminating unwanted cards. -parameters: - - name: id - type: string - required: true - label: - en_US: Card ID - zh_Hans: 卡片 ID - pt_BR: ID do Cartão - human_description: - en_US: The unique identifier of the Trello card you wish to delete. This ensures the precise card is removed. - zh_Hans: 您希望删除的 Trello 卡片的唯一标识符。这确保了精确移除特定卡片。 - pt_BR: O identificador único do cartão Trello que você deseja excluir. Isso garante que o cartão exato seja removido. - llm_description: Input the ID of the Trello card targeted for deletion to ensure accurate and specific removal. - form: llm diff --git a/api/core/tools/provider/builtin/trello/tools/fetch_all_boards.py b/api/core/tools/provider/builtin/trello/tools/fetch_all_boards.py deleted file mode 100644 index 0c5ed9ea8533ff..00000000000000 --- a/api/core/tools/provider/builtin/trello/tools/fetch_all_boards.py +++ /dev/null @@ -1,50 +0,0 @@ -from typing import Union - -import requests - -from core.tools.entities.tool_entities import ToolInvokeMessage -from core.tools.tool.builtin_tool import BuiltinTool - - -class FetchAllBoardsTool(BuiltinTool): - """ - Tool for fetching all boards from Trello. - """ - - def _invoke( - self, user_id: str, tool_parameters: dict[str, Union[str, int, bool]] - ) -> Union[ToolInvokeMessage, list[ToolInvokeMessage]]: - """ - Invoke the fetch all boards tool. - - Args: - user_id (str): The ID of the user invoking the tool. - tool_parameters (dict[str, Union[str, int, bool]]): The parameters for the tool invocation. - - Returns: - Union[ToolInvokeMessage, List[ToolInvokeMessage]]: The result of the tool invocation. - """ - api_key = self.runtime.credentials.get("trello_api_key") - token = self.runtime.credentials.get("trello_api_token") - - if not (api_key and token): - return self.create_text_message("Missing Trello API key or token in credentials.") - - # Including board filter in the request if provided - board_filter = tool_parameters.get("boards", "open") - url = f"https://api.trello.com/1/members/me/boards?filter={board_filter}&key={api_key}&token={token}" - - try: - response = requests.get(url) - response.raise_for_status() # Raises stored HTTPError, if one occurred. - except requests.exceptions.RequestException as e: - return self.create_text_message("Failed to fetch boards") - - boards = response.json() - - if not boards: - return self.create_text_message("No boards found in Trello.") - - # Creating a string with both board names and IDs - boards_info = ", ".join([f"{board['name']} (ID: {board['id']})" for board in boards]) - return self.create_text_message(text=f"Boards: {boards_info}") diff --git a/api/core/tools/provider/builtin/trello/tools/fetch_all_boards.yaml b/api/core/tools/provider/builtin/trello/tools/fetch_all_boards.yaml deleted file mode 100644 index d0ac4beaaa723a..00000000000000 --- a/api/core/tools/provider/builtin/trello/tools/fetch_all_boards.yaml +++ /dev/null @@ -1,28 +0,0 @@ -identity: - name: fetch_all_boards - author: Yash Parmar - label: - en_US: Fetch All Boards - zh_Hans: 获取所有看板 - pt_BR: Buscar Todos os Quadros -description: - human: - en_US: Retrieves all the Trello boards associated with the user's account. This tool provides a quick overview of all open boards, aiding in efficient project management and organization. - zh_Hans: 检索与用户账户关联的所有 Trello 看板。该工具提供了所有打开的看板的快速概览,有助于高效的项目管理和组织。 - pt_BR: Recupera todos os quadros do Trello associados à conta do usuário. Esta ferramenta oferece uma visão geral rápida de todos os quadros abertos, auxiliando na gestão e organização eficiente do projeto. - llm: This tool fetches all Trello boards linked to the user's account, offering a swift snapshot of open boards to streamline project management and organization tasks. -parameters: - - name: boards - type: string - required: false - default: open - label: - en_US: Boards filter - zh_Hans: 看板过滤器 - pt_BR: Filtro de quadros - human_description: - en_US: Specifies the type of boards to retrieve. Default is 'open', fetching all open boards. Other options include 'closed', 'members', 'organization', etc. - zh_Hans: 指定要检索的看板类型。默认为“open”,获取所有打开的看板。其他选项包括“closed”,“members”,“organization”等。 - pt_BR: Especifica o tipo de quadros a serem recuperados. O padrão é 'open', buscando todos os quadros abertos. Outras opções incluem 'closed', 'members', 'organization', etc. - llm_description: Determines the category of boards to be displayed, with 'open' as the default setting to show all open boards. Variants like 'closed', 'members', and 'organization' are also selectable. - form: llm diff --git a/api/core/tools/provider/builtin/trello/tools/get_board_actions.py b/api/core/tools/provider/builtin/trello/tools/get_board_actions.py deleted file mode 100644 index cabc7ce09359d5..00000000000000 --- a/api/core/tools/provider/builtin/trello/tools/get_board_actions.py +++ /dev/null @@ -1,45 +0,0 @@ -from typing import Union - -import requests - -from core.tools.entities.tool_entities import ToolInvokeMessage -from core.tools.tool.builtin_tool import BuiltinTool - - -class GetBoardActionsTool(BuiltinTool): - """ - Tool for retrieving actions for a Trello board by its ID. - """ - - def _invoke(self, user_id: str, tool_parameters: dict[str, Union[str, int, bool]]) -> ToolInvokeMessage: - """ - Invoke the tool to retrieve actions for a Trello board by its ID. - - Args: - user_id (str): The ID of the user invoking the tool. - tool_parameters (dict[str, Union[str, int, bool]]): The parameters for the tool invocation, - including the board ID. - - Returns: - ToolInvokeMessage: The result of the tool invocation. - """ - api_key = self.runtime.credentials.get("trello_api_key") - token = self.runtime.credentials.get("trello_api_token") - board_id = tool_parameters.get("boardId") - - if not (api_key and token and board_id): - return self.create_text_message("Missing required parameters: API key, token, or board ID.") - - url = f"https://api.trello.com/1/boards/{board_id}/actions?key={api_key}&token={token}" - - try: - response = requests.get(url) - response.raise_for_status() - actions = response.json() - except requests.exceptions.RequestException as e: - return self.create_text_message("Failed to retrieve board actions") - - actions_summary = "\n".join( - [f"{action['type']}: {action.get('data', {}).get('text', 'No details available')}" for action in actions] - ) - return self.create_text_message(text=f"Actions for Board ID {board_id}:\n{actions_summary}") diff --git a/api/core/tools/provider/builtin/trello/tools/get_board_actions.yaml b/api/core/tools/provider/builtin/trello/tools/get_board_actions.yaml deleted file mode 100644 index 1ba89f9e44abbd..00000000000000 --- a/api/core/tools/provider/builtin/trello/tools/get_board_actions.yaml +++ /dev/null @@ -1,27 +0,0 @@ -identity: - name: get_board_actions - author: Yash Parmar - label: - en_US: Get Board Actions - zh_Hans: 获取看板操作 - pt_BR: Obter Ações do Quadro -description: - human: - en_US: Retrieves a list of actions (such as updates, movements, and comments) for a Trello board by its ID. This tool provides insights into the board's activity history. - zh_Hans: 通过其 ID 为 Trello 看板检索操作列表(如更新、移动和评论)。此工具提供了看板活动历史的见解。 - pt_BR: Recupera uma lista de ações (como atualizações, movimentos e comentários) para um quadro Trello pelo seu ID. Esta ferramenta oferece insights sobre o histórico de atividades do quadro. - llm: Fetch the sequence of actions performed on a Trello board, such as card updates, movements, and comments, by providing the board's ID. Offers a historical view of board activities. -parameters: - - name: boardId - type: string - required: true - label: - en_US: Board ID - zh_Hans: 看板 ID - pt_BR: ID do Quadro - human_description: - en_US: The unique identifier of the Trello board for which you want to retrieve actions. It targets the specific board to fetch its activity log. - zh_Hans: 您想要检索操作的 Trello 看板的唯一标识符。它定位特定的看板以获取其活动日志。 - pt_BR: O identificador único do quadro Trello para o qual você deseja recuperar ações. Direciona especificamente para o quadro para buscar seu registro de atividades. - llm_description: Input the ID of the Trello board to access its detailed action history, including all updates, comments, and movements related to the board. - form: llm diff --git a/api/core/tools/provider/builtin/trello/tools/get_board_by_id.py b/api/core/tools/provider/builtin/trello/tools/get_board_by_id.py deleted file mode 100644 index fe42cd9c5cbf86..00000000000000 --- a/api/core/tools/provider/builtin/trello/tools/get_board_by_id.py +++ /dev/null @@ -1,66 +0,0 @@ -from typing import Union - -import requests - -from core.tools.entities.tool_entities import ToolInvokeMessage -from core.tools.tool.builtin_tool import BuiltinTool - - -class GetBoardByIdTool(BuiltinTool): - """ - Tool for retrieving detailed information about a Trello board by its ID. - """ - - def _invoke(self, user_id: str, tool_parameters: dict[str, Union[str, int, bool]]) -> ToolInvokeMessage: - """ - Invoke the tool to retrieve a Trello board by its ID. - - Args: - user_id (str): The ID of the user invoking the tool. - tool_parameters (dict[str, Union[str, int, bool]]): The parameters for the tool invocation, - including the board ID. - - Returns: - ToolInvokeMessage: The result of the tool invocation. - """ - api_key = self.runtime.credentials.get("trello_api_key") - token = self.runtime.credentials.get("trello_api_token") - board_id = tool_parameters.get("boardId") - - if not (api_key and token and board_id): - return self.create_text_message("Missing required parameters: API key, token, or board ID.") - - url = f"https://api.trello.com/1/boards/{board_id}?key={api_key}&token={token}" - - try: - response = requests.get(url) - response.raise_for_status() - board = response.json() - board_details = self.format_board_details(board) - except requests.exceptions.RequestException as e: - return self.create_text_message("Failed to retrieve board") - - return self.create_text_message(text=board_details) - - def format_board_details(self, board: dict) -> str: - """ - Format the board details into a human-readable string. - - Args: - board (dict): The board information as a dictionary. - - Returns: - str: Formatted board details. - """ - details = ( - f"Board Name: {board['name']}\n" - f"Board ID: {board['id']}\n" - f"Description: {board['desc'] or 'No description provided.'}\n" - f"Status: {'Closed' if board['closed'] else 'Open'}\n" - f"Organization ID: {board['idOrganization'] or 'Not part of an organization.'}\n" - f"URL: {board['url']}\n" - f"Short URL: {board['shortUrl']}\n" - f"Permission Level: {board['prefs']['permissionLevel']}\n" - f"Background Color: {board['prefs']['backgroundColor']}" - ) - return details diff --git a/api/core/tools/provider/builtin/trello/tools/get_board_by_id.yaml b/api/core/tools/provider/builtin/trello/tools/get_board_by_id.yaml deleted file mode 100644 index 45c93006ba4414..00000000000000 --- a/api/core/tools/provider/builtin/trello/tools/get_board_by_id.yaml +++ /dev/null @@ -1,27 +0,0 @@ -identity: - name: get_board_by_id - author: Yash Parmar - label: - en_US: Get Board by ID - zh_Hans: 通过 ID 获取看板 - pt_BR: Obter Quadro por ID -description: - human: - en_US: Retrieves detailed information about a specific Trello board using its unique ID. This tool enables users to quickly access board details without navigating through the Trello interface. - zh_Hans: 使用其唯一 ID 检索有关特定 Trello 看板的详细信息。此工具使用户能够快速访问看板详情,无需通过 Trello 界面导航。 - pt_BR: Recupera informações detalhadas sobre um quadro Trello específico usando seu ID único. Esta ferramenta permite que os usuários acessem rapidamente os detalhes do quadro sem navegar pela interface do Trello. - llm: Access details of a Trello board by providing its ID. This tool offers a direct way to view board information, simplifying the process of managing and reviewing Trello boards. -parameters: - - name: boardId - type: string - required: true - label: - en_US: Board ID - zh_Hans: 看板 ID - pt_BR: ID do Quadro - human_description: - en_US: The unique identifier for the Trello board you wish to retrieve. This ID enables precise targeting and fetching of the board's details. - zh_Hans: 您希望检索的 Trello 看板的唯一标识符。此 ID 使能够准确定位和获取看板的详细信息。 - pt_BR: O identificador único do quadro Trello que você deseja recuperar. Este ID permite o direcionamento preciso e a obtenção dos detalhes do quadro. - llm_description: Input the ID of the Trello board to get its details. This unique ID ensures accurate retrieval of information about the specified board. - form: llm diff --git a/api/core/tools/provider/builtin/trello/tools/get_board_cards.py b/api/core/tools/provider/builtin/trello/tools/get_board_cards.py deleted file mode 100644 index ff2b1221e767de..00000000000000 --- a/api/core/tools/provider/builtin/trello/tools/get_board_cards.py +++ /dev/null @@ -1,43 +0,0 @@ -from typing import Union - -import requests - -from core.tools.entities.tool_entities import ToolInvokeMessage -from core.tools.tool.builtin_tool import BuiltinTool - - -class GetBoardCardsTool(BuiltinTool): - """ - Tool for retrieving cards on a Trello board by its ID. - """ - - def _invoke(self, user_id: str, tool_parameters: dict[str, Union[str, int, bool]]) -> ToolInvokeMessage: - """ - Invoke the tool to retrieve cards on a Trello board by its ID. - - Args: - user_id (str): The ID of the user invoking the tool. - tool_parameters (dict[str, Union[str, int, bool]]): The parameters for the tool invocation, - including the board ID. - - Returns: - ToolInvokeMessage: The result of the tool invocation. - """ - api_key = self.runtime.credentials.get("trello_api_key") - token = self.runtime.credentials.get("trello_api_token") - board_id = tool_parameters.get("boardId") - - if not (api_key and token and board_id): - return self.create_text_message("Missing required parameters: API key, token, or board ID.") - - url = f"https://api.trello.com/1/boards/{board_id}/cards?key={api_key}&token={token}" - - try: - response = requests.get(url) - response.raise_for_status() - cards = response.json() - except requests.exceptions.RequestException as e: - return self.create_text_message("Failed to retrieve board cards") - - cards_summary = "\n".join([f"{card['name']} (ID: {card['id']})" for card in cards]) - return self.create_text_message(text=f"Cards for Board ID {board_id}:\n{cards_summary}") diff --git a/api/core/tools/provider/builtin/trello/tools/get_board_cards.yaml b/api/core/tools/provider/builtin/trello/tools/get_board_cards.yaml deleted file mode 100644 index 852ea278af341c..00000000000000 --- a/api/core/tools/provider/builtin/trello/tools/get_board_cards.yaml +++ /dev/null @@ -1,27 +0,0 @@ -identity: - name: get_board_cards - author: Yash Parmar - label: - en_US: Get Board Cards - zh_Hans: 获取看板卡片 - pt_BR: Obter Cartões do Quadro -description: - human: - en_US: Retrieves all cards present on a specific Trello board by its ID, providing a list of card names and their IDs. Useful for managing and organizing project tasks. - zh_Hans: 通过其 ID 检索特定 Trello 看板上的所有卡片,提供卡片名称及其 ID 的列表。用于管理和组织项目任务。 - pt_BR: Recupera todos os cartões presentes em um quadro Trello específico pelo seu ID, fornecendo uma lista dos nomes dos cartões e seus IDs. Útil para gerenciar e organizar tarefas de projetos. - llm: Obtain a list of all cards on a specific Trello board by entering the board's ID. This tool helps in quickly assessing the tasks or items associated with the board. -parameters: - - name: boardId - type: string - required: true - label: - en_US: Board ID - zh_Hans: 看板 ID - pt_BR: ID do Quadro - human_description: - en_US: The unique identifier of the Trello board from which you want to retrieve cards. It specifies the exact board to gather card details from. - zh_Hans: 您想要从中检索卡片的 Trello 看板的唯一标识符。它指定了要从中收集卡片详细信息的确切看板。 - pt_BR: O identificador único do quadro Trello do qual você deseja recuperar os cartões. Especifica o quadro exato para obter detalhes dos cartões. - llm_description: Input the ID of the Trello board to fetch its cards, allowing for a detailed overview of the board's contents. - form: llm diff --git a/api/core/tools/provider/builtin/trello/tools/get_filterd_board_cards.py b/api/core/tools/provider/builtin/trello/tools/get_filterd_board_cards.py deleted file mode 100644 index 3d7f9f4ad1c996..00000000000000 --- a/api/core/tools/provider/builtin/trello/tools/get_filterd_board_cards.py +++ /dev/null @@ -1,46 +0,0 @@ -from typing import Union - -import requests - -from core.tools.entities.tool_entities import ToolInvokeMessage -from core.tools.tool.builtin_tool import BuiltinTool - - -class GetFilteredBoardCardsTool(BuiltinTool): - """ - Tool for retrieving filtered cards on a Trello board by its ID and a specified filter. - """ - - def _invoke(self, user_id: str, tool_parameters: dict[str, Union[str, int, bool]]) -> ToolInvokeMessage: - """ - Invoke the tool to retrieve filtered cards on a Trello board by its ID and filter. - - Args: - user_id (str): The ID of the user invoking the tool. - tool_parameters (dict[str, Union[str, int, bool]]): The parameters for the tool invocation, - including the board ID and filter. - - Returns: - ToolInvokeMessage: The result of the tool invocation. - """ - api_key = self.runtime.credentials.get("trello_api_key") - token = self.runtime.credentials.get("trello_api_token") - board_id = tool_parameters.get("boardId") - filter = tool_parameters.get("filter") - - if not (api_key and token and board_id and filter): - return self.create_text_message("Missing required parameters: API key, token, board ID, or filter.") - - url = f"https://api.trello.com/1/boards/{board_id}/cards/{filter}?key={api_key}&token={token}" - - try: - response = requests.get(url) - response.raise_for_status() - filtered_cards = response.json() - except requests.exceptions.RequestException as e: - return self.create_text_message("Failed to retrieve filtered cards") - - card_details = "\n".join([f"{card['name']} (ID: {card['id']})" for card in filtered_cards]) - return self.create_text_message( - text=f"Filtered Cards for Board ID {board_id} with Filter '{filter}':\n{card_details}" - ) diff --git a/api/core/tools/provider/builtin/trello/tools/get_filterd_board_cards.yaml b/api/core/tools/provider/builtin/trello/tools/get_filterd_board_cards.yaml deleted file mode 100644 index 390595645771e4..00000000000000 --- a/api/core/tools/provider/builtin/trello/tools/get_filterd_board_cards.yaml +++ /dev/null @@ -1,40 +0,0 @@ -identity: - name: get_filtered_board_cards - author: Yash Parmar - label: - en_US: Get Filtered Board Cards - zh_Hans: 获取筛选的看板卡片 - pt_BR: Obter Cartões Filtrados do Quadro -description: - human: - en_US: Retrieves cards from a Trello board using a specified filter and the board's ID. Filters include options like 'all', 'open', 'closed', 'none', and 'visible', allowing for tailored views of board content. - zh_Hans: 使用指定的过滤器和看板的 ID 从 Trello 看板检索卡片。过滤器包括 'all', 'open', 'closed', 'none' 和 'visible' 等选项,允许对看板内容进行定制查看。 - pt_BR: Recupera cartões de um quadro Trello usando um filtro especificado e o ID do quadro. Os filtros incluem opções como 'all', 'open', 'closed', 'none' e 'visible', permitindo visualizações personalizadas do conteúdo do quadro. - llm: Access cards on a Trello board through specific filters such as 'all', 'open', 'closed', 'none', and 'visible' by providing the board's ID. This feature enables focused examination of the board's cards. -parameters: - - name: boardId - type: string - required: true - label: - en_US: Board ID - zh_Hans: 看板 ID - pt_BR: ID do Quadro - human_description: - en_US: The unique identifier for the Trello board from which to retrieve the filtered cards. - zh_Hans: 用于检索筛选卡片的 Trello 看板的唯一标识符。 - pt_BR: O identificador único do quadro Trello do qual os cartões filtrados serão recuperados. - llm_description: Enter the Trello board's ID to specify from which board to fetch the cards using the filter. - form: llm - - name: filter - type: string - required: true - label: - en_US: Filter - zh_Hans: 过滤器 - pt_BR: Filtro - human_description: - en_US: The filter to apply when retrieving cards. Valid values are 'all', 'open', 'closed', 'none', and 'visible'. - zh_Hans: 检索卡片时应用的过滤器。有效值为 'all', 'open', 'closed', 'none', 和 'visible'。 - pt_BR: O filtro a ser aplicado ao recuperar cartões. Os valores válidos são 'all', 'open', 'closed', 'none' e 'visible'. - llm_description: Specify the filter for card retrieval. Choose from 'all', 'open', 'closed', 'none', or 'visible' to control which cards are fetched. - form: llm diff --git a/api/core/tools/provider/builtin/trello/tools/get_lists_on_board.py b/api/core/tools/provider/builtin/trello/tools/get_lists_on_board.py deleted file mode 100644 index ccf404068f225e..00000000000000 --- a/api/core/tools/provider/builtin/trello/tools/get_lists_on_board.py +++ /dev/null @@ -1,43 +0,0 @@ -from typing import Union - -import requests - -from core.tools.entities.tool_entities import ToolInvokeMessage -from core.tools.tool.builtin_tool import BuiltinTool - - -class GetListsFromBoardTool(BuiltinTool): - """ - Tool for retrieving all lists from a specified Trello board by its ID. - """ - - def _invoke(self, user_id: str, tool_parameters: dict[str, Union[str, int, bool]]) -> ToolInvokeMessage: - """ - Invoke the tool to get all lists from a specified Trello board. - - Args: - user_id (str): The ID of the user invoking the tool. - tool_parameters (dict[str, Union[str, int, bool]]): The parameters for the tool invocation, - including the board ID. - - Returns: - ToolInvokeMessage: The result of the tool invocation. - """ - api_key = self.runtime.credentials.get("trello_api_key") - token = self.runtime.credentials.get("trello_api_token") - board_id = tool_parameters.get("boardId") - - if not (api_key and token and board_id): - return self.create_text_message("Missing required parameters: API key, token, or board ID.") - - url = f"https://api.trello.com/1/boards/{board_id}/lists?key={api_key}&token={token}" - - try: - response = requests.get(url) - response.raise_for_status() - lists = response.json() - except requests.exceptions.RequestException as e: - return self.create_text_message("Failed to retrieve lists") - - lists_info = "\n".join([f"{list['name']} (ID: {list['id']})" for list in lists]) - return self.create_text_message(text=f"Lists on Board ID {board_id}:\n{lists_info}") diff --git a/api/core/tools/provider/builtin/trello/tools/get_lists_on_board.yaml b/api/core/tools/provider/builtin/trello/tools/get_lists_on_board.yaml deleted file mode 100644 index 31028a80404de3..00000000000000 --- a/api/core/tools/provider/builtin/trello/tools/get_lists_on_board.yaml +++ /dev/null @@ -1,27 +0,0 @@ -identity: - name: get_lists_from_board - author: Yash Parmar - label: - en_US: Get Lists from Board - zh_Hans: 获取看板的列表 - pt_BR: Obter Listas do Quadro -description: - human: - en_US: Retrieves all lists from a specified Trello board by its ID, providing an overview of the board's organization and current phases or categories. - zh_Hans: 通过其 ID 从指定的 Trello 看板检索所有列表,提供看板组织和当前阶段或类别的概览。 - pt_BR: Recupera todas as listas de um quadro Trello especificado pelo seu ID, fornecendo uma visão geral da organização do quadro e das fases ou categorias atuais. - llm: Fetch and display all lists from a specific Trello board by inputting the board's ID. This aids in understanding the board's structure and task categorization. -parameters: - - name: boardId - type: string - required: true - label: - en_US: Board ID - zh_Hans: 看板 ID - pt_BR: ID do Quadro - human_description: - en_US: The unique identifier of the Trello board from which to retrieve the lists. - zh_Hans: 用于检索列表的 Trello 看板的唯一标识符。 - pt_BR: O identificador único do quadro Trello do qual as listas serão recuperadas. - llm_description: Enter the ID of the Trello board to obtain a detailed list of all its lists, providing insight into the board's structure. - form: llm diff --git a/api/core/tools/provider/builtin/trello/tools/update_board.py b/api/core/tools/provider/builtin/trello/tools/update_board.py deleted file mode 100644 index 1e358b00f49add..00000000000000 --- a/api/core/tools/provider/builtin/trello/tools/update_board.py +++ /dev/null @@ -1,47 +0,0 @@ -from typing import Union - -import requests - -from core.tools.entities.tool_entities import ToolInvokeMessage -from core.tools.tool.builtin_tool import BuiltinTool - - -class UpdateBoardByIdTool(BuiltinTool): - """ - Tool for updating a Trello board by its ID with various parameters. - """ - - def _invoke(self, user_id: str, tool_parameters: dict[str, Union[str, int, bool, None]]) -> ToolInvokeMessage: - """ - Invoke the tool to update a Trello board by its ID. - - Args: - user_id (str): The ID of the user invoking the tool. - tool_parameters (dict[str, Union[str, int, bool, None]]): The parameters for the tool invocation, - including board ID and updates. - - Returns: - ToolInvokeMessage: The result of the tool invocation. - """ - api_key = self.runtime.credentials.get("trello_api_key") - token = self.runtime.credentials.get("trello_api_token") - board_id = tool_parameters.pop("boardId", None) - - if not (api_key and token and board_id): - return self.create_text_message("Missing required parameters: API key, token, or board ID.") - - url = f"https://api.trello.com/1/boards/{board_id}" - - # Removing parameters not intended for update action or with None value - params = {k: v for k, v in tool_parameters.items() if v is not None} - params["key"] = api_key - params["token"] = token - - try: - response = requests.put(url, params=params) - response.raise_for_status() - except requests.exceptions.RequestException as e: - return self.create_text_message("Failed to update board") - - updated_board = response.json() - return self.create_text_message(text=f"Board '{updated_board['name']}' updated successfully.") diff --git a/api/core/tools/provider/builtin/trello/tools/update_board.yaml b/api/core/tools/provider/builtin/trello/tools/update_board.yaml deleted file mode 100644 index 487919631ade34..00000000000000 --- a/api/core/tools/provider/builtin/trello/tools/update_board.yaml +++ /dev/null @@ -1,157 +0,0 @@ -identity: - name: update_board_by_id - author: Yash Parmar - label: - en_US: Update Board by ID - zh_Hans: 通过 ID 更新看板 - pt_BR: Atualizar Quadro por ID -description: - human: - en_US: Updates a Trello board's settings based on the provided ID and parameters. Allows for changing the board's name, description, status, and other preferences. - zh_Hans: 根据提供的 ID 和参数更新 Trello 看板的设置。允许更改看板的名称、描述、状态和其他偏好设置。 - pt_BR: Atualiza as configurações de um quadro Trello com base no ID fornecido e nos parâmetros. Permite alterar o nome, descrição, status e outras preferências do quadro. - llm: Modify a Trello board's attributes like its name, description, and visibility settings using the board's ID. This tool streamlines board customization and management. -parameters: - - name: boardId - type: string - required: true - label: - en_US: Board ID - zh_Hans: 看板 ID - pt_BR: ID do Quadro - human_description: - en_US: The unique identifier of the Trello board you want to update. Ensures targeted and precise updates. - zh_Hans: 您要更新的 Trello 看板的唯一标识符。确保目标准确和更新精确。 - pt_BR: O identificador único do quadro Trello que você deseja atualizar. Garante atualizações direcionadas e precisas. - llm_description: Provide the specific ID of the Trello board you aim to update to ensure accuracy in modification process. - form: llm - - name: name - type: string - required: false - label: - en_US: Board Name - zh_Hans: 看板名称 - pt_BR: Nome do Quadro - human_description: - en_US: Optional. The new name for the board. - zh_Hans: 可选。看板的新名称。 - pt_BR: Opcional. O novo nome para o quadro. - llm_description: Enter a new name for the board if you wish to change it; this name identifies the board in Trello. - form: llm - - name: desc - type: string - required: false - label: - en_US: Board Description - zh_Hans: 看板描述 - pt_BR: Descrição do Quadro - human_description: - en_US: Optional. The new description for the board. - zh_Hans: 可选。看板的新描述。 - pt_BR: Opcional. A nova descrição para o quadro. - llm_description: Provide a new description for the board if you wish to update it; this description provides additional context about the board. - form: llm - - name: closed - type: boolean - required: false - label: - en_US: Closed - zh_Hans: 已关闭 - pt_BR: Fechado - human_description: - en_US: Optional. Set to true to close the board, or false to keep it open. - zh_Hans: 可选。设置为 true 以关闭看板,或设置为 false 以保持打开。 - pt_BR: Opcional. Defina como true para fechar o quadro ou como false para mantê-lo aberto. - llm_description: Specify whether the board should be closed or kept open by setting this parameter to true or false. - form: llm - - name: subscribed - type: string - required: false - label: - en_US: Subscribed - zh_Hans: 订阅 - pt_BR: Inscrito - human_description: - en_US: Optional. Set to true to subscribe to the board, or false to unsubscribe. - zh_Hans: 可选。设置为 true 以订阅看板,或设置为 false 以取消订阅。 - pt_BR: Opcional. Defina como true para se inscrever no quadro ou como false para cancelar a inscrição. - llm_description: Choose to subscribe or unsubscribe from the board by setting this parameter to true or false. - form: llm - - name: idOrganization - type: string - required: false - label: - en_US: Organization ID - zh_Hans: 组织 ID - pt_BR: ID da Organização - human_description: - en_US: Optional. The ID of the organization to which the board belongs. - zh_Hans: 可选。看板所属组织的 ID。 - pt_BR: Opcional. O ID da organização à qual o quadro pertence. - llm_description: Input the ID of the organization to which the board is associated, if applicable. - form: llm - - name: prefs_permissionLevel - type: string - required: false - label: - en_US: Permission Level - zh_Hans: 权限级别 - pt_BR: Nível de Permissão - human_description: - en_US: Optional. The permission level for the board. Valid values are 'private', 'org', or 'public'. - zh_Hans: 可选。看板的权限级别。有效值为 'private'、'org' 或 'public'。 - pt_BR: Opcional. O nível de permissão para o quadro. Os valores válidos são 'private', 'org' ou 'public'. - llm_description: Specify the permission level for the board by choosing from 'private', 'org', or 'public'. - form: llm - - name: prefs_selfJoin - type: boolean - required: false - label: - en_US: Allow Self-Join - zh_Hans: 允许自行加入 - pt_BR: Permitir Auto-Inscrição - human_description: - en_US: Optional. Set to true to allow members to join the board without an invitation, or false to require an invitation. - zh_Hans: 可选。设置为 true 以允许成员加入看板而无需邀请,或设置为 false 以要求邀请。 - pt_BR: Opcional. Defina como true para permitir que os membros se inscrevam no quadro sem um convite, ou como false para exigir um convite. - llm_description: Choose whether to allow members to join the board without an invitation by setting this parameter to true or false. - form: llm - - name: prefs_cardCovers - type: boolean - required: false - label: - en_US: Card Covers - zh_Hans: 卡片封面 - pt_BR: Capas de Cartão - human_description: - en_US: Optional. Set to true to enable card covers, or false to disable them. - zh_Hans: 可选。设置为 true 以启用卡片封面,或设置为 false 以禁用卡片封面。 - pt_BR: Opcional. Defina como true para habilitar capas de cartão ou como false para desabilitá-las. - llm_description: Enable or disable card covers by setting this parameter to true or false. - form: llm - - name: prefs_hideVotes - type: boolean - required: false - label: - en_US: Hide Votes - zh_Hans: 隐藏投票 - pt_BR: Ocultar Votos - human_description: - en_US: Optional. Set to true to hide votes, or false to show them. - zh_Hans: 可选。设置为 true 以隐藏投票,或设置为 false 以显示投票。 - pt_BR: Opcional. Defina como true para ocultar votos ou como false para mostrá-los. - llm_description: Choose to hide or show votes by setting this parameter to true or false. - form: llm - - name: prefs_invitations - type: string - required: false - label: - en_US: Invitations - zh_Hans: 邀请 - pt_BR: Convites - human_description: - en_US: Optional. Set to 'members' to allow only board members to send invitations, or 'admins' to allow admins to send invitations. - zh_Hans: 可选。设置为 'members' 以仅允许看板成员发送邀请,或设置为 'admins' 以允许管理员发送邀请。 - pt_BR: Opcional. Defina como 'members' para permitir que apenas membros do quadro enviem convites, ou 'admins' para permitir que os administradores enviem convites. - llm_description: Choose who can send invitations by setting this parameter to 'members' or 'admins'. - form: llm diff --git a/api/core/tools/provider/builtin/trello/tools/update_card.py b/api/core/tools/provider/builtin/trello/tools/update_card.py deleted file mode 100644 index d25fcbafaa6326..00000000000000 --- a/api/core/tools/provider/builtin/trello/tools/update_card.py +++ /dev/null @@ -1,45 +0,0 @@ -from typing import Union - -import requests - -from core.tools.entities.tool_entities import ToolInvokeMessage -from core.tools.tool.builtin_tool import BuiltinTool - - -class UpdateCardByIdTool(BuiltinTool): - """ - Tool for updating a Trello card by its ID. - """ - - def _invoke(self, user_id: str, tool_parameters: dict[str, Union[str, int, bool, None]]) -> ToolInvokeMessage: - """ - Invoke the tool to update a Trello card by its ID. - - Args: - user_id (str): The ID of the user invoking the tool. - tool_parameters (dict[str, Union[str, int, bool, None]]): The parameters for the tool invocation, - including the card ID and updates. - - Returns: - ToolInvokeMessage: The result of the tool invocation. - """ - api_key = self.runtime.credentials.get("trello_api_key") - token = self.runtime.credentials.get("trello_api_token") - card_id = tool_parameters.get("id") - - if not (api_key and token and card_id): - return self.create_text_message("Missing required parameters: API key, token, or card ID.") - - # Constructing the URL and the payload for the PUT request - url = f"https://api.trello.com/1/cards/{card_id}" - params = {k: v for k, v in tool_parameters.items() if v is not None and k != "id"} - params.update({"key": api_key, "token": token}) - - try: - response = requests.put(url, params=params) - response.raise_for_status() - except requests.exceptions.RequestException as e: - return self.create_text_message("Failed to update card") - - updated_card_info = f"Card '{card_id}' updated successfully." - return self.create_text_message(text=updated_card_info) diff --git a/api/core/tools/provider/builtin/trello/tools/update_card.yaml b/api/core/tools/provider/builtin/trello/tools/update_card.yaml deleted file mode 100644 index 5240dfc3ed2564..00000000000000 --- a/api/core/tools/provider/builtin/trello/tools/update_card.yaml +++ /dev/null @@ -1,81 +0,0 @@ -identity: - name: update_card_by_id - author: Yash Parmar - label: - en_US: Update Card by ID - zh_Hans: 通过 ID 更新卡片 - pt_BR: Atualizar Cartão por ID -description: - human: - en_US: Updates specified attributes of a Trello card, such as its name, description, list ID, and board ID, by providing the card's unique ID. - zh_Hans: 通过提供卡片的唯一 ID,更新 Trello 卡片的特定属性,如其名称、描述、列表 ID 和看板 ID。 - pt_BR: Atualiza atributos específicos de um cartão Trello, como seu nome, descrição, ID da lista e ID do quadro, fornecendo o ID único do cartão. - llm: Modify a Trello card's key details, including name, description, and its placement on the board, by using the card's ID. Enables precise and targeted updates to card information. -parameters: - - name: id - type: string - required: true - label: - en_US: Card ID - zh_Hans: 卡片 ID - pt_BR: ID do Cartão - human_description: - en_US: The unique identifier of the Trello card you intend to update. - zh_Hans: 您打算更新的 Trello 卡片的唯一标识符。 - pt_BR: O identificador único do cartão Trello que você pretende atualizar. - llm_description: Input the ID of the Trello card to be updated to ensure the correct card is targeted. - form: llm - # Include other parameters following the same pattern - - name: name - type: string - required: false - label: - en_US: New Name - zh_Hans: 新名称 - pt_BR: Novo Nome - human_description: - en_US: Optional. The new name to assign to the card. - zh_Hans: 可选。要分配给卡片的新名称。 - pt_BR: Opcional. O novo nome a ser atribuído ao cartão. - llm_description: Specify a new name for the card if changing it. This name is what will be displayed on the Trello board. - form: llm - # Add definitions for desc, idList and idBoard parameters - - name: desc - type: string - required: false - label: - en_US: New Description - zh_Hans: 新描述 - pt_BR: Nova Descrição - human_description: - en_US: Optional. The new description to assign to the card. - zh_Hans: 可选。要分配给卡片的新描述。 - pt_BR: Opcional. A nova descrição a ser atribuída ao cartão. - llm_description: Provide a new description for the card if you wish to update it; this description provides additional context about the card. - form: llm - - name: idList - type: string - required: false - label: - en_US: List ID - zh_Hans: 列表 ID - pt_BR: ID da Lista - human_description: - en_US: Optional. The ID of the list to which the card should be moved. - zh_Hans: 可选。卡片应移动到的列表的 ID。 - pt_BR: Opcional. O ID da lista para a qual o cartão deve ser movido. - llm_description: Enter the ID of the list where you want to move the card. This action relocates the card to the specified list. - form: llm - - name: idBoard - type: string - required: false - label: - en_US: Board ID - zh_Hans: 看板 ID - pt_BR: ID do Quadro - human_description: - en_US: Optional. The ID of the board to which the card should be moved. - zh_Hans: 可选。卡片应移动到的看板的 ID。 - pt_BR: Opcional. O ID do quadro para o qual o cartão deve ser movido. - llm_description: Provide the ID of the board where you want to move the card. This action relocates the card to the specified board. - form: llm diff --git a/api/core/tools/provider/builtin/trello/trello.py b/api/core/tools/provider/builtin/trello/trello.py deleted file mode 100644 index e0dca50ec99aee..00000000000000 --- a/api/core/tools/provider/builtin/trello/trello.py +++ /dev/null @@ -1,34 +0,0 @@ -from typing import Any - -import requests - -from core.tools.errors import ToolProviderCredentialValidationError -from core.tools.provider.builtin_tool_provider import BuiltinToolProviderController - - -class TrelloProvider(BuiltinToolProviderController): - def _validate_credentials(self, credentials: dict[str, Any]) -> None: - """Validate Trello API credentials by making a test API call. - - Args: - credentials (dict[str, Any]): The Trello API credentials to validate. - - Raises: - ToolProviderCredentialValidationError: If the credentials are invalid. - """ - api_key = credentials.get("trello_api_key") - token = credentials.get("trello_api_token") - url = f"https://api.trello.com/1/members/me?key={api_key}&token={token}" - - try: - response = requests.get(url) - response.raise_for_status() # Raises an HTTPError for bad responses - except requests.exceptions.HTTPError as e: - if response.status_code == 401: - # Unauthorized, indicating invalid credentials - raise ToolProviderCredentialValidationError("Invalid Trello credentials: Unauthorized.") - # Handle other potential HTTP errors - raise ToolProviderCredentialValidationError("Error validating Trello credentials") - except requests.exceptions.RequestException as e: - # Handle other exceptions, such as connection errors - raise ToolProviderCredentialValidationError("Error validating Trello credentials") diff --git a/api/core/tools/provider/builtin/trello/trello.yaml b/api/core/tools/provider/builtin/trello/trello.yaml deleted file mode 100644 index 49c9f4f9a178f8..00000000000000 --- a/api/core/tools/provider/builtin/trello/trello.yaml +++ /dev/null @@ -1,47 +0,0 @@ -identity: - author: Yash Parmar - name: trello - label: - en_US: Trello - zh_Hans: Trello - pt_BR: Trello - description: - en_US: "Trello: A visual tool for organizing your work and life." - zh_Hans: "Trello: 一个用于组织工作和生活的视觉工具。" - pt_BR: "Trello: Uma ferramenta visual para organizar seu trabalho e vida." - icon: icon.svg - tags: - - productivity -credentials_for_provider: - trello_api_key: - type: secret-input - required: true - label: - en_US: Trello API key - zh_Hans: Trello API key - pt_BR: Trello API key - placeholder: - en_US: Enter your Trello API key - zh_Hans: 输入您的 Trello API key - pt_BR: Insira sua chave API do Trello - help: - en_US: Obtain your API key from Trello's website. - zh_Hans: 从 Trello 网站获取您的 API key。 - pt_BR: Obtenha sua chave API no site do Trello. - url: https://developer.atlassian.com/cloud/trello/guides/rest-api/api-introduction/ - trello_api_token: - type: secret-input - required: true - label: - en_US: Trello API token - zh_Hans: Trello API token - pt_BR: Trello API token - placeholder: - en_US: Enter your Trello API token - zh_Hans: 输入您的 Trello API token - pt_BR: Insira seu token API do Trello - help: - en_US: Secure your API token from Trello's website. - zh_Hans: 从 Trello 网站获取您的 API token。 - pt_BR: Garanta seu token API no site do Trello. - url: https://developer.atlassian.com/cloud/trello/guides/rest-api/api-introduction/ diff --git a/api/core/tools/provider/builtin/twilio/_assets/icon.svg b/api/core/tools/provider/builtin/twilio/_assets/icon.svg deleted file mode 100644 index a1e2bd12c27d64..00000000000000 --- a/api/core/tools/provider/builtin/twilio/_assets/icon.svg +++ /dev/null @@ -1 +0,0 @@ - \ No newline at end of file diff --git a/api/core/tools/provider/builtin/twilio/tools/send_message.py b/api/core/tools/provider/builtin/twilio/tools/send_message.py deleted file mode 100644 index 5ee839baa56f02..00000000000000 --- a/api/core/tools/provider/builtin/twilio/tools/send_message.py +++ /dev/null @@ -1,97 +0,0 @@ -from typing import Any, Optional, Union - -from pydantic import BaseModel, field_validator - -from core.tools.entities.tool_entities import ToolInvokeMessage -from core.tools.tool.builtin_tool import BuiltinTool - - -class TwilioAPIWrapper(BaseModel): - """Messaging Client using Twilio. - - To use, you should have the ``twilio`` python package installed, - and the environment variables ``TWILIO_ACCOUNT_SID``, ``TWILIO_AUTH_TOKEN``, and - ``TWILIO_FROM_NUMBER``, or pass `account_sid`, `auth_token`, and `from_number` as - named parameters to the constructor. - """ - - client: Any = None #: :meta private: - account_sid: Optional[str] = None - """Twilio account string identifier.""" - auth_token: Optional[str] = None - """Twilio auth token.""" - from_number: Optional[str] = None - """A Twilio phone number in [E.164](https://www.twilio.com/docs/glossary/what-e164) - format, an - [alphanumeric sender ID](https://www.twilio.com/docs/sms/send-messages#use-an-alphanumeric-sender-id), - or a [Channel Endpoint address](https://www.twilio.com/docs/sms/channels#channel-addresses) - that is enabled for the type of message you want to send. Phone numbers or - [short codes](https://www.twilio.com/docs/sms/api/short-code) purchased from - Twilio also work here. You cannot, for example, spoof messages from a private - cell phone number. If you are using `messaging_service_sid`, this parameter - must be empty. - """ - - @field_validator("client", mode="before") - @classmethod - def set_validator(cls, values: dict) -> dict: - """Validate that api key and python package exists in environment.""" - try: - from twilio.rest import Client - except ImportError: - raise ImportError("Could not import twilio python package. Please install it with `pip install twilio`.") - account_sid = values.get("account_sid") - auth_token = values.get("auth_token") - values["from_number"] = values.get("from_number") - values["client"] = Client(account_sid, auth_token) - - return values - - def run(self, body: str, to: str) -> str: - """Run body through Twilio and respond with message sid. - - Args: - body: The text of the message you want to send. Can be up to 1,600 - characters in length. - to: The destination phone number in - [E.164](https://www.twilio.com/docs/glossary/what-e164) format for - SMS/MMS or - [Channel user address](https://www.twilio.com/docs/sms/channels#channel-addresses) - for other 3rd-party channels. - """ - message = self.client.messages.create(to, from_=self.from_number, body=body) - return message.sid - - -class SendMessageTool(BuiltinTool): - """ - A tool for sending messages using Twilio API. - - Args: - user_id (str): The ID of the user invoking the tool. - tool_parameters (Dict[str, Any]): The parameters required for sending the message. - - Returns: - Union[ToolInvokeMessage, List[ToolInvokeMessage]]: The result of invoking the tool, - which includes the status of the message sending operation. - """ - - def _invoke( - self, user_id: str, tool_parameters: dict[str, Any] - ) -> Union[ToolInvokeMessage, list[ToolInvokeMessage]]: - account_sid = self.runtime.credentials["account_sid"] - auth_token = self.runtime.credentials["auth_token"] - from_number = self.runtime.credentials["from_number"] - - message = tool_parameters["message"] - to_number = tool_parameters["to_number"] - - if to_number.startswith("whatsapp:"): - from_number = f"whatsapp: {from_number}" - - twilio = TwilioAPIWrapper(account_sid=account_sid, auth_token=auth_token, from_number=from_number) - - # Sending the message through Twilio - result = twilio.run(message, to_number) - - return self.create_text_message(text="Message sent successfully.") diff --git a/api/core/tools/provider/builtin/twilio/tools/send_message.yaml b/api/core/tools/provider/builtin/twilio/tools/send_message.yaml deleted file mode 100644 index e129698c86aeb6..00000000000000 --- a/api/core/tools/provider/builtin/twilio/tools/send_message.yaml +++ /dev/null @@ -1,40 +0,0 @@ -identity: - name: send_message - author: Yash Parmar - label: - en_US: SendMessage - zh_Hans: 发送消息 - pt_BR: SendMessage -description: - human: - en_US: Send SMS or Twilio Messaging Channels messages. - zh_Hans: 发送SMS或Twilio消息通道消息。 - pt_BR: Send SMS or Twilio Messaging Channels messages. - llm: Send SMS or Twilio Messaging Channels messages. Supports different channels including WhatsApp. -parameters: - - name: message - type: string - required: true - label: - en_US: Message - zh_Hans: 消息内容 - pt_BR: Message - human_description: - en_US: The content of the message to be sent. - zh_Hans: 要发送的消息内容。 - pt_BR: The content of the message to be sent. - llm_description: The content of the message to be sent. - form: llm - - name: to_number - type: string - required: true - label: - en_US: To Number - zh_Hans: 收信号码 - pt_BR: Para Número - human_description: - en_US: The recipient's phone number. Prefix with 'whatsapp:' for WhatsApp messages, e.g., "whatsapp:+1234567890". - zh_Hans: 收件人的电话号码。WhatsApp消息前缀为'whatsapp:',例如,"whatsapp:+1234567890"。 - pt_BR: The recipient's phone number. Prefix with 'whatsapp:' for WhatsApp messages, e.g., "whatsapp:+1234567890". - llm_description: The recipient's phone number. Prefix with 'whatsapp:' for WhatsApp messages, e.g., "whatsapp:+1234567890". - form: llm diff --git a/api/core/tools/provider/builtin/twilio/twilio.py b/api/core/tools/provider/builtin/twilio/twilio.py deleted file mode 100644 index b1d100aad93dba..00000000000000 --- a/api/core/tools/provider/builtin/twilio/twilio.py +++ /dev/null @@ -1,29 +0,0 @@ -from typing import Any - -from twilio.base.exceptions import TwilioRestException -from twilio.rest import Client - -from core.tools.errors import ToolProviderCredentialValidationError -from core.tools.provider.builtin_tool_provider import BuiltinToolProviderController - - -class TwilioProvider(BuiltinToolProviderController): - def _validate_credentials(self, credentials: dict[str, Any]) -> None: - try: - # Extract credentials - account_sid = credentials["account_sid"] - auth_token = credentials["auth_token"] - from_number = credentials["from_number"] - - # Initialize twilio client - client = Client(account_sid, auth_token) - - # fetch account - client.api.accounts(account_sid).fetch() - - except TwilioRestException as e: - raise ToolProviderCredentialValidationError(f"Twilio API error: {e.msg}") from e - except KeyError as e: - raise ToolProviderCredentialValidationError(f"Missing required credential: {e}") from e - except Exception as e: - raise ToolProviderCredentialValidationError(str(e)) diff --git a/api/core/tools/provider/builtin/twilio/twilio.yaml b/api/core/tools/provider/builtin/twilio/twilio.yaml deleted file mode 100644 index 21867c1da5dc32..00000000000000 --- a/api/core/tools/provider/builtin/twilio/twilio.yaml +++ /dev/null @@ -1,48 +0,0 @@ -identity: - author: Yash Parmar - name: twilio - label: - en_US: Twilio - zh_Hans: Twilio - pt_BR: Twilio - description: - en_US: Send messages through SMS or Twilio Messaging Channels. - zh_Hans: 通过SMS或Twilio消息通道发送消息。 - pt_BR: Send messages through SMS or Twilio Messaging Channels. - icon: icon.svg - tags: - - social -credentials_for_provider: - account_sid: - type: secret-input - required: true - label: - en_US: Account SID - zh_Hans: 账户SID - pt_BR: Account SID - placeholder: - en_US: Please input your Twilio Account SID - zh_Hans: 请输入您的Twilio账户SID - pt_BR: Please input your Twilio Account SID - auth_token: - type: secret-input - required: true - label: - en_US: Auth Token - zh_Hans: 认证令牌 - pt_BR: Auth Token - placeholder: - en_US: Please input your Twilio Auth Token - zh_Hans: 请输入您的Twilio认证令牌 - pt_BR: Please input your Twilio Auth Token - from_number: - type: secret-input - required: true - label: - en_US: From Number - zh_Hans: 发信号码 - pt_BR: De Número - placeholder: - en_US: Please input your Twilio phone number - zh_Hans: 请输入您的Twilio电话号码 - pt_BR: Please input your Twilio phone number diff --git a/api/core/tools/provider/builtin/vanna/_assets/icon.png b/api/core/tools/provider/builtin/vanna/_assets/icon.png deleted file mode 100644 index 3a9011b54d8a07f01e6b2fb934f3937bca0fd85a..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 4612 zcmb7Ic{r49`+mm04=H<05wZ=Vk}<}<8)VBikzID9#%>VVDJ`-WLPcbcWX+PSk)7-m z#xVBfqu$^9e!t`RzCXVEKJMo_pX<8L^SbW)e*Su*^mNqdX*g&A0HD`USJpqVR;P=K z;^aACqpptv06%^Jz}*Lcy%QI15diLr1HiHs0Kk(0 zfYl@I!wtkq;J%%ShCK=e2%TUmfE;uNAU#2#lL>%0fK#;-1ZaXd|Hk?t!M}1y0N{ZW zK=xOT*@-L=@z^T(2Af&NjXC%7K;_b zN{YFAIY1?(rKO?bFenTrdXgcE@pbdQ?IY@j;r%1>e>%!`7#lAq4{s-TH^`~(ZEN?t z-m(`jo)-Fh{OQx%$^M^8ZkWHeb+SR|X#^@ECJy~iG&>)s|3W*B{Gt8c*Pn8TQ!%)% zmy_K|!>4T_B@n+0{6F76?IBK8;CfCzb}lB$POf%tnBN?6*cAlyKZgDz)%3qp|1Iz5f7I(Q>0 z9z#a%GgjtDak%Kka6#m8->AW@@hhfsTURS5tFfQiWw!jVvNaP~)lsrF8&%t5YiS$l z7#^F?wbK`-{^yw}NfKIGsMx)`#k`_gQXad{57eQNF`7fmfMu~wVEH~GTa6Rn+PpB# zTY$+Gsk!g2YUfq5-cpldXUW0_M#F-A`q>SMuxGUj7?mz~o(g_pXx^LGys`hS$_~09 zm_pczQS_+d($^))IFvKng6^IQ3^nat3cp!ff1doHNl~!jjGjsgB{7fKdWN;691)(( zn5=L>+=E!)CktcKAYr_FgSc-gxxVR)#B0hZf7V8T|RvC>M0i1l*+OL|_wIABIGkq2g z)m_wWh8lQ(qlSCgYPk^`3;hz?HAYOyzO4EF5-No6P)ZhwWd7t})~1Rh?=S%^C7y{7 z!W1b@#H!Ota?{DK2K!BU4Vq-yuHX}4qVz7=IqA^5a&^%UKy?z@hd7yRDDD$m3mTlC zsBflO-=I&z86|1VH1aEWZ}Eu>6A4Dkf7nj#+DhlX&zC#gMI*{n*_1%0bp1k+a|4=O zox*DZXXI@Yhnti2gzWv3c-~qWe6v+N5y~tx#s6JVr>m{!NodR`3 z+hK!T@P}LR$)lQY$y8+OuGR4c`h72T_N{tPKFrWY(InEon?wxTH0UyA^aY;X688kh zw~#eN?2*%|tx_Km@${FBNa8$X3@b2u4RYwh2XDAxaTloQN1o$01Xd&c9mdyj;Vg?3 z7V@^91{zoJ*6~c&{W_;f`N1k{bgn-?Xb8P{(#7hT2H;sGdl(1@{a_@voTbQ*af-Px zAD7BpG$R?8#+|asfATfnv}9Ls@XFG-OFI(5l10W6e|T;4;5aCn2@_FXzVx54q9U^034* z-$TnY(fCwEqcj`^fw9Ue^yxg%H9PVj9b8x60d?Hd(^l8^yu2NHAuhEAP0oig#Ep|K z??_y-En7|*ZGv*|RBzrNG{!DQzI>FEu2N9FR_jAg`+zC{?jL*4C9HTZ+ z2TxUdQDbEFD!Cq&4Q+bk9LzLvbX54ZM50NM|BB6CZjW^@gHna$_R1%8jNwL*Bm-Ve zRF0?rnPQ|i;-Mu^UgAf57s}9+%b?d#z1d|rv9^7eR=%DY&&Em`$S!(jT<@??hBHS% z#AS}Wg^arHWqP%{`zPZ!4C4ZA1nPsg1+NUm792V_lI3jn=0fS5F}H*dN@ZxWrZlt? zouvuVrBMZ!*p?2eqbL@iNIXL5HTGUfXm*QR9Xr-qqBhh(X2`FTHq_emzWkOZg|&E4 zoxZQltK+aiU8U(w7bTJRWKbxKZ)QBb*XUyzm&P00<%p$2e)GHU7}dXyeY+;WV|U~D z90K{sD#f&+I_k&AJy36t9#0_0c6!ka*J>-1boEn-@)VBD$nB#K!R-g;IV35N*R_{p z+Q=|!4-!BJ29smH>D&b$Sf4+Y-%q|(A5Z^4}4dl`X%Z=bu{O8R-gJ!tAvG<#H_h>2;4mIl?E==TIqY(fqH+QZs+sdzH7`BWR% z{UD`iJI-r*oyivV0z2t4-<@CJKicj(W+vwD3Ua;OdIitl*Eh?BCbF|Fvs^3*<$g%J zDHftjoD*UgPc^sC_5sV3vQ;kYOTeXN@#nd~%eyyH#ZWJ29#gnZ9OzqVH2JGM)8 z_}A$~Yv-&aDgN_?I{P!TgNBX+WJu9wOP1l*&Q2k2K7nF3B+p$fT?onGmFD}8S>nb` z_B%MSOmgjepMUjV7#UtPnEK=;`!0S=G`4CMJf^$ka`Z7|V8@y%gI)8g7G7qYGV^{1 z<)Z_5Z*10BXa61#rPR9PfDqD=&&u;$GUHcnQWtP>ytXf`XVoUzHILc44EG?{LpEK< zRub^_C z42#ka4L^>Y1l910gTm!6{*l|}(iGt;LfPZO`iI(|Nq^DPAO?ajzsjsEepq)UpDI0# zkG{O?&Ci+3N>_1?szzfQ|)09deVTeh>Q))4cu#)g73x}6T#cFPgLdt^N=xW3SGv? zJBeaxX2vx-Y-*Mc4OIE+kn~i|V`lWT00q(uC_dHVW@(pxr5}L`pPPh9L8b1+qt7%o zCLCNj#T`rEOF39t99dNL>{2PN*N?F0dD5@fv)%qM8BIwBlYde*Z*qA&hKVF2=dL}6 z6)LBIOIQhYKMh3Tgc+jj?HX5=}ex`WMp+ z(vv<2ma~HoiHqe-U%51|@m#lS>Ds$Ex8EvN)TQ0(#s5Zf-&dI1O5uo`r4ZdcJ?<4C zeoe4HfRWq@=jT+6n#-6ven1Xj+nX0OPnYxj|_Sx8f{mb)d~#rbO8RA&W~8GBLNy!UEl$umNr zadSX0%t`AV@TfVSaxx~6O6!2>U4EK4T|S${^F{KEn{bpAQ8-X8i0sDl*uiY9;$O!@9|` z#E5#g9pj8Xu3-ONLELl*@t&-40%y2{$74#8W;JqiAx8d*Rs^_buxt*E>He8p5b=CI zFpr}{4a9fuBRb(+QA4if4%t$J=Bs{MgvoHnox&ux_p;s0G1h}uhP@YK7V1P6e3!@3 zhS*TkHV0PqibVT7p9teVJk0W|mb?2@$xvbA^_PQS@oRb*rLJm7%6VtD_CxqAiDsDI@uZess7goI! zUF0_%C^FVvkI=oRFfc$c1lm>Cff-VprBH4UZQk6R9m-rtk-aBbzF1+)ORhI|s0YiR zvgdt^Ju+$8Bt3PX7Iogg&Rpiy5@s^WDc66;Ip`9FUsCchIQBbCT}@5>9)-5b3( z>tts(5X_tNRR(iEK^6Hz+>UCj!Mpcd(7_6C;wh}IQu_|;-~_)mt3SIiDMcCUhTb-> zO|TGhrm&NOVOj|{aj}%HnmUtHnu0&XX$^b$-)%bTMaba%0=&Fih;wM7Wu*_#D88fi z@h1{0c|Ui$QLukEJZ9^;rHFQvF#VZCOe4>Ib?2*7ft?!xgaP;3F`U0slksz#V3KIH zVY-gDvghRrMz=GPiEB3vO1D>0Y41N7SUFEm+9iDBd^*Q7sw2PC-nZ@`F<=QrIAQ&N z{_3i1{;y>y{px(55b0~ytg&-^Mvn>!3NaV~FFeDWvA?5m7B`*-sX+tyjKCQnva<%ORi&yP@c#*?;yUbly6Q)+=%V diff --git a/api/core/tools/provider/builtin/vanna/tools/vanna.py b/api/core/tools/provider/builtin/vanna/tools/vanna.py deleted file mode 100644 index c90d766e483aaa..00000000000000 --- a/api/core/tools/provider/builtin/vanna/tools/vanna.py +++ /dev/null @@ -1,129 +0,0 @@ -from typing import Any, Union - -from vanna.remote import VannaDefault - -from core.tools.entities.tool_entities import ToolInvokeMessage -from core.tools.errors import ToolProviderCredentialValidationError -from core.tools.tool.builtin_tool import BuiltinTool - - -class VannaTool(BuiltinTool): - def _invoke( - self, user_id: str, tool_parameters: dict[str, Any] - ) -> Union[ToolInvokeMessage, list[ToolInvokeMessage]]: - """ - invoke tools - """ - api_key = self.runtime.credentials.get("api_key", None) - if not api_key: - raise ToolProviderCredentialValidationError("Please input api key") - - model = tool_parameters.get("model", "") - if not model: - return self.create_text_message("Please input RAG model") - - prompt = tool_parameters.get("prompt", "") - if not prompt: - return self.create_text_message("Please input prompt") - - url = tool_parameters.get("url", "") - if not url: - return self.create_text_message("Please input URL/Host/DSN") - - db_name = tool_parameters.get("db_name", "") - username = tool_parameters.get("username", "") - password = tool_parameters.get("password", "") - port = tool_parameters.get("port", 0) - - vn = VannaDefault(model=model, api_key=api_key) - - db_type = tool_parameters.get("db_type", "") - if db_type in {"Postgres", "MySQL", "Hive", "ClickHouse"}: - if not db_name: - return self.create_text_message("Please input database name") - if not username: - return self.create_text_message("Please input username") - if port < 1: - return self.create_text_message("Please input port") - - schema_sql = "SELECT * FROM INFORMATION_SCHEMA.COLUMNS" - match db_type: - case "SQLite": - schema_sql = "SELECT type, sql FROM sqlite_master WHERE sql is not null" - vn.connect_to_sqlite(url) - case "Postgres": - vn.connect_to_postgres(host=url, dbname=db_name, user=username, password=password, port=port) - case "DuckDB": - vn.connect_to_duckdb(url=url) - case "SQLServer": - vn.connect_to_mssql(url) - case "MySQL": - vn.connect_to_mysql(host=url, dbname=db_name, user=username, password=password, port=port) - case "Oracle": - vn.connect_to_oracle(user=username, password=password, dsn=url) - case "Hive": - vn.connect_to_hive(host=url, dbname=db_name, user=username, password=password, port=port) - case "ClickHouse": - vn.connect_to_clickhouse(host=url, dbname=db_name, user=username, password=password, port=port) - - enable_training = tool_parameters.get("enable_training", False) - reset_training_data = tool_parameters.get("reset_training_data", False) - if enable_training: - if reset_training_data: - existing_training_data = vn.get_training_data() - if len(existing_training_data) > 0: - for _, training_data in existing_training_data.iterrows(): - vn.remove_training_data(training_data["id"]) - - ddl = tool_parameters.get("ddl", "") - question = tool_parameters.get("question", "") - sql = tool_parameters.get("sql", "") - memos = tool_parameters.get("memos", "") - training_metadata = tool_parameters.get("training_metadata", False) - - if training_metadata: - if db_type == "SQLite": - df_ddl = vn.run_sql(schema_sql) - for ddl in df_ddl["sql"].to_list(): - vn.train(ddl=ddl) - else: - df_information_schema = vn.run_sql(schema_sql) - plan = vn.get_training_plan_generic(df_information_schema) - vn.train(plan=plan) - - if ddl: - vn.train(ddl=ddl) - - if sql: - if question: - vn.train(question=question, sql=sql) - else: - vn.train(sql=sql) - if memos: - vn.train(documentation=memos) - - ######################################################################################### - # Due to CVE-2024-5565, we have to disable the chart generation feature - # The Vanna library uses a prompt function to present the user with visualized results, - # it is possible to alter the prompt using prompt injection and run arbitrary Python code - # instead of the intended visualization code. - # Specifically - allowing external input to the library’s “ask” method - # with "visualize" set to True (default behavior) leads to remote code execution. - # Affected versions: <= 0.5.5 - ######################################################################################### - generate_chart = False - # generate_chart = tool_parameters.get("generate_chart", True) - res = vn.ask(prompt, False, True, generate_chart) - - result = [] - - if res is not None: - result.append(self.create_text_message(res[0])) - if len(res) > 1 and res[1] is not None: - result.append(self.create_text_message(res[1].to_markdown())) - if len(res) > 2 and res[2] is not None: - result.append( - self.create_blob_message(blob=res[2].to_image(format="svg"), meta={"mime_type": "image/svg+xml"}) - ) - - return result diff --git a/api/core/tools/provider/builtin/vanna/tools/vanna.yaml b/api/core/tools/provider/builtin/vanna/tools/vanna.yaml deleted file mode 100644 index ae2eae94c4dbc4..00000000000000 --- a/api/core/tools/provider/builtin/vanna/tools/vanna.yaml +++ /dev/null @@ -1,213 +0,0 @@ -identity: - name: vanna - author: QCTC - label: - en_US: Vanna.AI - zh_Hans: Vanna.AI -description: - human: - en_US: The fastest way to get actionable insights from your database just by asking questions. - zh_Hans: 一个基于大模型和RAG的Text2SQL工具。 - llm: A tool for converting text to SQL. -parameters: - - name: prompt - type: string - required: true - label: - en_US: Prompt - zh_Hans: 提示词 - pt_BR: Prompt - human_description: - en_US: used for generating SQL - zh_Hans: 用于生成SQL - llm_description: key words for generating SQL - form: llm - - name: model - type: string - required: true - label: - en_US: RAG Model - zh_Hans: RAG模型 - human_description: - en_US: RAG Model for your database DDL - zh_Hans: 存储数据库训练数据的RAG模型 - llm_description: RAG Model for generating SQL - form: form - - name: db_type - type: select - required: true - options: - - value: SQLite - label: - en_US: SQLite - zh_Hans: SQLite - - value: Postgres - label: - en_US: Postgres - zh_Hans: Postgres - - value: DuckDB - label: - en_US: DuckDB - zh_Hans: DuckDB - - value: SQLServer - label: - en_US: Microsoft SQL Server - zh_Hans: 微软 SQL Server - - value: MySQL - label: - en_US: MySQL - zh_Hans: MySQL - - value: Oracle - label: - en_US: Oracle - zh_Hans: Oracle - - value: Hive - label: - en_US: Hive - zh_Hans: Hive - - value: ClickHouse - label: - en_US: ClickHouse - zh_Hans: ClickHouse - default: SQLite - label: - en_US: DB Type - zh_Hans: 数据库类型 - human_description: - en_US: Database type. - zh_Hans: 选择要链接的数据库类型。 - form: form - - name: url - type: string - required: true - label: - en_US: URL/Host/DSN - zh_Hans: URL/Host/DSN - human_description: - en_US: Please input depending on DB type, visit https://vanna.ai/docs/ for more specification - zh_Hans: 请根据数据库类型,填入对应值,详情参考https://vanna.ai/docs/ - form: form - - name: db_name - type: string - required: false - label: - en_US: DB name - zh_Hans: 数据库名 - human_description: - en_US: Database name - zh_Hans: 数据库名 - form: form - - name: username - type: string - required: false - label: - en_US: Username - zh_Hans: 用户名 - human_description: - en_US: Username - zh_Hans: 用户名 - form: form - - name: password - type: secret-input - required: false - label: - en_US: Password - zh_Hans: 密码 - human_description: - en_US: Password - zh_Hans: 密码 - form: form - - name: port - type: number - required: false - label: - en_US: Port - zh_Hans: 端口 - human_description: - en_US: Port - zh_Hans: 端口 - form: form - - name: ddl - type: string - required: false - label: - en_US: Training DDL - zh_Hans: 训练DDL - human_description: - en_US: DDL statements for training data - zh_Hans: 用于训练RAG Model的建表语句 - form: form - - name: question - type: string - required: false - label: - en_US: Training Question - zh_Hans: 训练问题 - human_description: - en_US: Question-SQL Pairs - zh_Hans: Question-SQL中的问题 - form: form - - name: sql - type: string - required: false - label: - en_US: Training SQL - zh_Hans: 训练SQL - human_description: - en_US: SQL queries to your training data - zh_Hans: 用于训练RAG Model的SQL语句 - form: form - - name: memos - type: string - required: false - label: - en_US: Training Memos - zh_Hans: 训练说明 - human_description: - en_US: Sometimes you may want to add documentation about your business terminology or definitions - zh_Hans: 添加更多关于数据库的业务说明 - form: form - - name: enable_training - type: boolean - required: false - default: false - label: - en_US: Training Data - zh_Hans: 训练数据 - human_description: - en_US: You only need to train once. Do not train again unless you want to add more training data - zh_Hans: 训练数据无更新时,训练一次即可 - form: form - - name: reset_training_data - type: boolean - required: false - default: false - label: - en_US: Reset Training Data - zh_Hans: 重置训练数据 - human_description: - en_US: Remove all training data in the current RAG Model - zh_Hans: 删除当前RAG Model中的所有训练数据 - form: form - - name: training_metadata - type: boolean - required: false - default: false - label: - en_US: Training Metadata - zh_Hans: 训练元数据 - human_description: - en_US: If enabled, it will attempt to train on the metadata of that database - zh_Hans: 是否自动从数据库获取元数据来训练 - form: form - - name: generate_chart - type: boolean - required: false - default: True - label: - en_US: Generate Charts - zh_Hans: 生成图表 - human_description: - en_US: Generate Charts - zh_Hans: 是否生成图表 - form: form diff --git a/api/core/tools/provider/builtin/vanna/vanna.py b/api/core/tools/provider/builtin/vanna/vanna.py deleted file mode 100644 index 84724e921a1b00..00000000000000 --- a/api/core/tools/provider/builtin/vanna/vanna.py +++ /dev/null @@ -1,25 +0,0 @@ -from typing import Any - -from core.tools.errors import ToolProviderCredentialValidationError -from core.tools.provider.builtin.vanna.tools.vanna import VannaTool -from core.tools.provider.builtin_tool_provider import BuiltinToolProviderController - - -class VannaProvider(BuiltinToolProviderController): - def _validate_credentials(self, credentials: dict[str, Any]) -> None: - try: - VannaTool().fork_tool_runtime( - runtime={ - "credentials": credentials, - } - ).invoke( - user_id="", - tool_parameters={ - "model": "chinook", - "db_type": "SQLite", - "url": "https://vanna.ai/Chinook.sqlite", - "query": "What are the top 10 customers by sales?", - }, - ) - except Exception as e: - raise ToolProviderCredentialValidationError(str(e)) diff --git a/api/core/tools/provider/builtin/vanna/vanna.yaml b/api/core/tools/provider/builtin/vanna/vanna.yaml deleted file mode 100644 index b29fa103e1d8c9..00000000000000 --- a/api/core/tools/provider/builtin/vanna/vanna.yaml +++ /dev/null @@ -1,25 +0,0 @@ -identity: - author: QCTC - name: vanna - label: - en_US: Vanna.AI - zh_Hans: Vanna.AI - description: - en_US: The fastest way to get actionable insights from your database just by asking questions. - zh_Hans: 一个基于大模型和RAG的Text2SQL工具。 - icon: icon.png -credentials_for_provider: - api_key: - type: secret-input - required: true - label: - en_US: API key - zh_Hans: API key - placeholder: - en_US: Please input your API key - zh_Hans: 请输入你的 API key - pt_BR: Please input your API key - help: - en_US: Get your API key from Vanna.AI - zh_Hans: 从 Vanna.AI 获取你的 API key - url: https://vanna.ai/account/profile diff --git a/api/core/tools/provider/builtin/vectorizer/_assets/icon.png b/api/core/tools/provider/builtin/vectorizer/_assets/icon.png deleted file mode 100644 index 52f18db84372dcfc2968be27d75aec0fca430d55..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1875 zcmV-Z2dwysP)@~0drDELIAGL9O(c600d`2O+f$vv5yP#K9NCX-bJ2W9kC`eGaijACx zhBFjc*ky2>?e6T%?##SRZhn%L(|PVTe*c;GF(11` z9*@W4X;*@*2Gy~!>z}`AGz{piAf%TRy zPyBEts3p9=o9el+Yx61knV_D>7>ErW`{=pWm=JS8n@1VE}u9xEi-HYR}2dhC&8+dbrA#S6fU7l0Wr05e_yX1oB*JVDGH=-zq# z;33h*CK;o%Lezhd=ultdI{WuF-oLj-RR4=;sZO-AMyxm&;IJ^p-~iFkz;5G5EDOW@ zDhxAoBfoKA`08t7_UmZlwT#+NL=)c;8?nhaF?AY}1&+N;bn;lT(UaH^qhWep80ZFR zfgbN*6;j=W-%_sAPfoE?2c#JU2AH1PDYlR@e)&ErgMw;hQT=+&w7Q<9 z+oBeEA>IxmBMz7>2xT~#iohRpX{zA@n*T8LM2rSiio}%G&&p!KvxZ`q3RVPIxxe!o zRqeZ1h0FOPxoz6}NfZt0{XILcQRt=7xtE+_#RXlTZx|MTBm|IwIhmq7c&OHHP`I4s zyV>nr3SeghtTlpcmhq#uI^Ubbs^WK?cvUUWcUcQO7SqwHqiAb-arV6*9I0s0jMSWi=zIA~Nw;+eH8_|DTomwnYrk@&9$h{y$6j)%)ZW z76DXyKQKK44rOdRF&%HRU=e`R|I4c4x?|&mQjY-BbMUgwU0}tr`vJ}c(DDCEhDQMA zg=Dyx`)S3w06PBPWI)8i&rpWzt6N(2lHptcRsV0g02_Kri zJ38-POdaE+_j9@*;7kBrpMG5N*l9CxaXM#{xqClwIs$YyIH~UC-)dWFoJ-a*ei?c{ zaJnr8#g$GbRNX$&GJh)`D5G1^U~1#|5&csw*hg9hoC*LV>ed3NVxv>=&2Fqnkj5D{ z&djG7r!MGb!GdRNS6g>6<717X48A|YsQ~D8t`0yaff4kHg=0oVaGr)v|41yjyeX3o z&IQ0N!dx$WM2E%8W_-X>h~c!MD`Vhrh3!55MPL!Yg*!m6$8Bk9md3kwu%8pgoe{BbKRV5#n_|MXAhkq_i^Ftfbv0>};-oxA1BAd2s@rF} zcebxI@8yq{DbXIAXt9xMnPDe`NlJIC0vI&v(e1!vakGv`87}}cUI1pi0L*v+nDGKI z6Mv8!(K>mokYwBVgWT{iVviM)Y@75b!x{3}A)u+&w_*Zx{7aMG4Ot+dJCzE>o+lg= zpmufZdADcCW0ru%lb$c1m<4S=`FWf?rWl{Socvt*K=!GTkHS$3>7w}QC66}SuKY~W zqs*Jy$f=HnU7MR!6(XF*^Kkv+Zsow}$aav-bnK(aB)8-7csw2t_#fh*6^8a$&I|wm N002ovPDHLkV1mtmh`ay* diff --git a/api/core/tools/provider/builtin/vectorizer/tools/test_data.py b/api/core/tools/provider/builtin/vectorizer/tools/test_data.py deleted file mode 100644 index 8effa9818a0c40..00000000000000 --- a/api/core/tools/provider/builtin/vectorizer/tools/test_data.py +++ /dev/null @@ -1 +0,0 @@ -VECTORIZER_ICON_PNG = "iVBORw0KGgoAAAANSUhEUgAAAGAAAABgCAYAAADimHc4AAAACXBIWXMAACxLAAAsSwGlPZapAAAAAXNSR0IArs4c6QAAAARnQU1BAACxjwv8YQUAAAboSURBVHgB7Z09bBxFFMffRoAvcQqbguBUxu4wCUikMCZ0TmQK4NLQJCJOlQIkokgEGhQ7NCFIKEhQuIqNnIaGMxRY2GVwmlggDHS+pIHELmIXMTEULPP3eeXz7e7szO7MvE1ufpKV03nuNn7/mfcxH7tEHo/H42lXgqwG1bGw65+/aTQM6K0gpJdCoi7ypCIMui5s9Qv9R1OVTqrVxoL1jPbpvH4hrIp/rnmj5+YOhTQ++1kwmdZgT9ovRi6EF4Xhv/XGL0Sv6OLXYMu0BokjYOSDcBQfJI8xhKFP/HAlqCW8v5vqubBr8yn6maCexxiIDR376LnWmBBzQZtPEvx+L3mMAleOZKb1/XgM2EOnyWMFZJKt78UEQKpJHisk2TYmgM967JFk2z3kYcULwIwXgBkvADNeAGa8AMw8Qcwc6N55/eAh0cYmGaOzQtR/kOhQX+M6+/c23r+3RlT/i2ipTrSyRqw4F+CwMMbgANHQwG7jRywLw/wqDDNzI79xYPjqa2L262jjtYzaT0QT3xEbsck4MXUakgWOvUx08liy0ZPYEKNhel4Y6AZpgR7/8Tvq1wEQ+sMJN6Nh9kqwy+bWYwAM8elZovNv6xmlU7iLs280RNO9ls51os/h/8eBVQEig8Dt5OXUsNrno2tluZw0cI3qUXKONQHy9sYkVHqnjntLA2LnFTAv1gSA+zBhfIDvkfVO/B4xRgWZn4fbe2WAnGJFAAxn03+I7PtUXdzE90Sjl4ne+6L4d5nCigAyYyHPn7tFdPN30uJwX/qI6jtISkQZFVLdhd9SrtNPTrFSB6QZBAaYntsptpAyfvk+KYOCamVR/XrNtLqepduiFnkh3g4iIw6YLAhlOJmKwB9zaarhApr/MPREjAZVisSU1s/KYsGzhmKXClYEWLm/8xpV7btXhcv5I7lt2vtJFA3q/T07r1HopdG5l5xhxQVdn28YFn8kBJCBOZmiPHio1m5QuJzlu9ntXApgZwSsNYJslvGjtjrfm8Sq4neceFUtz3dZCzwW09Gqo2hreuPN7HZRnNqa1BP1x8lhczVNK+zT0TqkjYAF4e7Okxoo2PZX5K4IrhNpb/P8FTK2S1+TcUq1HpBFmquJYo1qEYU6RVarJE0c2ooL7C5IRwBZ5nJ9joyRtk5hA3YBdHqWzG1gBKgE/bzMaK5LqMIugKrbUDHu59/YWVRBsWhrsYZdANV5HBUXYGNlC9dFBW8LdgH6FQVYUnQvkQgm3NH8YuO7bM4LsWZBfT3qRY9OxRyJgJRz+Ij+FDPEQ1C3GVMiWAVQ7f31u/ncytxi4wdZTbRGgdcHnpYLD/FcwSrAoOKizfKfVAiIF4kBMPK+Opfe1iWsMUB1BJh2BRgBabSNAOiFqkXYbcNFUF9P+u82FGdWTcEmgGrvh0FUppB1kC073muXEaDq/21kIjLxV9tFAC7/n5X6tkUM0PH/dcP+P0v41fvkFBYBVHs/MD0CDmVsOzEdb7JgEYDT/8uq4rpj44NSjwDTc/CyzV1gxbH7Ac4F0PH/S4ZHAOaFZLiY+2nFuQA6/t9kQMTCz1CG66tbWvWS4VwAVf9vugAbel6efqrsYbKBcwFeVNz8ajobyTppw2F84FQAnfl/kwER6wJZcWdBc7e2KZwKoOP/TVakWb0f7md+kVhwOwI0BDCFyq42rt4PSiuAiRGAEXdK4ZQlV+8HTgVwefwHvR7nhbOA0FwBGDgTIM/Z3SLXUj2hOW1wR10eSrs7Ou9eTB3jo/dzuh/gTABdn35c8dhpM3BxOmeTuXs/cDoCdDY4qe7l32pbaZxL1jF+GXo/cLotBcWVTiZU3T7RMn8rHiijW9FgauP4Ef1TLdhHWgacCgAj6tYCqGKjU/DNbqxIkMYZNs7MpxmnLuhmwYJna1dbdzHjY42hDL4/wqkA6HWuDkAngRH0iYVjRkVwnoZO/0gsuLwpkw7OBcAtwlwvfESHxctmfMBSiOG0oStj4HCF7T3+RWARwIU7QK/HbWlqls52mYJtezqMj3v34C5VOveFy8Ll4QoTsJ8Txp0RsW8/Os2im2LCtSC1RIqLw3RldTVplOKkPEYDhMAPqttnune2rzTv5Y+WKdEem2ixkWqZYSeDSUp3qwIYNOrR7cBjcbOORxkvADNeAGa8AMx4AZjxAjATf5Ab0Tp5rJBk2/iD3PAwYo8Vkmyb9CjDGfLYIaCp1rdiAnT8S5PeDVkgoDuVCsWeJxwToHZ163m3Z8hjloDGk54vn5gFbT/5eZw8phifvZz8XPlA9qmRj8JRCumi+OkljzbbrvxM0qPMm9rIqY6FXZubVBUinMbzcP3jbuXA6Mh2kMx07KPJJLfj8Xg8Hg/4H+KfFYb2WM4MAAAAAElFTkSuQmCC" # noqa: E501 diff --git a/api/core/tools/provider/builtin/vectorizer/tools/vectorizer.py b/api/core/tools/provider/builtin/vectorizer/tools/vectorizer.py deleted file mode 100644 index 4bd601c0bd31e0..00000000000000 --- a/api/core/tools/provider/builtin/vectorizer/tools/vectorizer.py +++ /dev/null @@ -1,69 +0,0 @@ -from base64 import b64decode -from typing import Any, Union - -from httpx import post - -from core.tools.entities.tool_entities import ToolInvokeMessage, ToolParameter -from core.tools.errors import ToolProviderCredentialValidationError -from core.tools.provider.builtin.vectorizer.tools.test_data import VECTORIZER_ICON_PNG -from core.tools.tool.builtin_tool import BuiltinTool - - -class VectorizerTool(BuiltinTool): - def _invoke( - self, user_id: str, tool_parameters: dict[str, Any] - ) -> Union[ToolInvokeMessage, list[ToolInvokeMessage]]: - """ - invoke tools - """ - api_key_name = self.runtime.credentials.get("api_key_name", None) - api_key_value = self.runtime.credentials.get("api_key_value", None) - mode = tool_parameters.get("mode", "test") - if mode == "production": - mode = "preview" - - if not api_key_name or not api_key_value: - raise ToolProviderCredentialValidationError("Please input api key name and value") - - image_id = tool_parameters.get("image_id", "") - if not image_id: - return self.create_text_message("Please input image id") - - if image_id.startswith("__test_"): - image_binary = b64decode(VECTORIZER_ICON_PNG) - else: - image_binary = self.get_variable_file(self.VariableKey.IMAGE) - if not image_binary: - return self.create_text_message("Image not found, please request user to generate image firstly.") - - response = post( - "https://vectorizer.ai/api/v1/vectorize", - files={"image": image_binary}, - data={"mode": mode} if mode == "test" else {}, - auth=(api_key_name, api_key_value), - timeout=30, - ) - - if response.status_code != 200: - raise Exception(response.text) - - return [ - self.create_text_message("the vectorized svg is saved as an image."), - self.create_blob_message(blob=response.content, meta={"mime_type": "image/svg+xml"}), - ] - - def get_runtime_parameters(self) -> list[ToolParameter]: - """ - override the runtime parameters - """ - return [ - ToolParameter.get_simple_instance( - name="image_id", - llm_description=f"the image id that you want to vectorize, \ - and the image id should be specified in \ - {[i.name for i in self.list_default_image_variables()]}", - type=ToolParameter.ToolParameterType.SELECT, - required=True, - options=[i.name for i in self.list_default_image_variables()], - ) - ] diff --git a/api/core/tools/provider/builtin/vectorizer/tools/vectorizer.yaml b/api/core/tools/provider/builtin/vectorizer/tools/vectorizer.yaml deleted file mode 100644 index 4b4fb9e2452c3c..00000000000000 --- a/api/core/tools/provider/builtin/vectorizer/tools/vectorizer.yaml +++ /dev/null @@ -1,38 +0,0 @@ -identity: - name: vectorizer - author: Dify - label: - en_US: Vectorizer.AI - zh_Hans: Vectorizer.AI - pt_BR: Vectorizer.AI -description: - human: - en_US: Convert your PNG and JPG images to SVG vectors quickly and easily. Fully automatically. Using AI. - zh_Hans: 一个将 PNG 和 JPG 图像快速轻松地转换为 SVG 矢量图的工具。 - pt_BR: Convert your PNG and JPG images to SVG vectors quickly and easily. Fully automatically. Using AI. - llm: A tool for converting images to SVG vectors. you should input the image id as the input of this tool. the image id can be got from parameters. -parameters: - - name: mode - type: select - required: true - options: - - value: production - label: - en_US: production - zh_Hans: 生产模式 - pt_BR: production - - value: test - label: - en_US: test - zh_Hans: 测试模式 - pt_BR: test - default: test - label: - en_US: Mode - zh_Hans: 模式 - pt_BR: Mode - human_description: - en_US: It is free to integrate with and test out the API in test mode, no subscription required. - zh_Hans: 在测试模式下,可以免费测试API。 - pt_BR: It is free to integrate with and test out the API in test mode, no subscription required. - form: form diff --git a/api/core/tools/provider/builtin/vectorizer/vectorizer.py b/api/core/tools/provider/builtin/vectorizer/vectorizer.py deleted file mode 100644 index 3b868572f93bae..00000000000000 --- a/api/core/tools/provider/builtin/vectorizer/vectorizer.py +++ /dev/null @@ -1,20 +0,0 @@ -from typing import Any - -from core.tools.errors import ToolProviderCredentialValidationError -from core.tools.provider.builtin.vectorizer.tools.vectorizer import VectorizerTool -from core.tools.provider.builtin_tool_provider import BuiltinToolProviderController - - -class VectorizerProvider(BuiltinToolProviderController): - def _validate_credentials(self, credentials: dict[str, Any]) -> None: - try: - VectorizerTool().fork_tool_runtime( - runtime={ - "credentials": credentials, - } - ).invoke( - user_id="", - tool_parameters={"mode": "test", "image_id": "__test_123"}, - ) - except Exception as e: - raise ToolProviderCredentialValidationError(str(e)) diff --git a/api/core/tools/provider/builtin/vectorizer/vectorizer.yaml b/api/core/tools/provider/builtin/vectorizer/vectorizer.yaml deleted file mode 100644 index 1257f8d285c986..00000000000000 --- a/api/core/tools/provider/builtin/vectorizer/vectorizer.yaml +++ /dev/null @@ -1,47 +0,0 @@ -identity: - author: Dify - name: vectorizer - label: - en_US: Vectorizer.AI - zh_Hans: Vectorizer.AI - pt_BR: Vectorizer.AI - description: - en_US: Convert your PNG and JPG images to SVG vectors quickly and easily. Fully automatically. Using AI. - zh_Hans: 一个将 PNG 和 JPG 图像快速轻松地转换为 SVG 矢量图的工具。 - pt_BR: Convert your PNG and JPG images to SVG vectors quickly and easily. Fully automatically. Using AI. - icon: icon.png - tags: - - productivity - - image -credentials_for_provider: - api_key_name: - type: secret-input - required: true - label: - en_US: Vectorizer.AI API Key name - zh_Hans: Vectorizer.AI API Key name - pt_BR: Vectorizer.AI API Key name - placeholder: - en_US: Please input your Vectorizer.AI ApiKey name - zh_Hans: 请输入你的 Vectorizer.AI ApiKey name - pt_BR: Please input your Vectorizer.AI ApiKey name - help: - en_US: Get your Vectorizer.AI API Key from Vectorizer.AI. - zh_Hans: 从 Vectorizer.AI 获取您的 Vectorizer.AI API Key。 - pt_BR: Get your Vectorizer.AI API Key from Vectorizer.AI. - url: https://vectorizer.ai/api - api_key_value: - type: secret-input - required: true - label: - en_US: Vectorizer.AI API Key - zh_Hans: Vectorizer.AI API Key - pt_BR: Vectorizer.AI API Key - placeholder: - en_US: Please input your Vectorizer.AI ApiKey - zh_Hans: 请输入你的 Vectorizer.AI ApiKey - pt_BR: Please input your Vectorizer.AI ApiKey - help: - en_US: Get your Vectorizer.AI API Key from Vectorizer.AI. - zh_Hans: 从 Vectorizer.AI 获取您的 Vectorizer.AI API Key。 - pt_BR: Get your Vectorizer.AI API Key from Vectorizer.AI. diff --git a/api/core/tools/provider/builtin/webscraper/_assets/icon.svg b/api/core/tools/provider/builtin/webscraper/_assets/icon.svg deleted file mode 100644 index 8123199a38a5e7..00000000000000 --- a/api/core/tools/provider/builtin/webscraper/_assets/icon.svg +++ /dev/null @@ -1,3 +0,0 @@ - - - \ No newline at end of file diff --git a/api/core/tools/provider/builtin/webscraper/tools/webscraper.py b/api/core/tools/provider/builtin/webscraper/tools/webscraper.py deleted file mode 100644 index 12670b4b8b9289..00000000000000 --- a/api/core/tools/provider/builtin/webscraper/tools/webscraper.py +++ /dev/null @@ -1,33 +0,0 @@ -from typing import Any, Union - -from core.tools.entities.tool_entities import ToolInvokeMessage -from core.tools.errors import ToolInvokeError -from core.tools.tool.builtin_tool import BuiltinTool - - -class WebscraperTool(BuiltinTool): - def _invoke( - self, - user_id: str, - tool_parameters: dict[str, Any], - ) -> Union[ToolInvokeMessage, list[ToolInvokeMessage]]: - """ - invoke tools - """ - try: - url = tool_parameters.get("url", "") - user_agent = tool_parameters.get("user_agent", "") - if not url: - return self.create_text_message("Please input url") - - # get webpage - result = self.get_url(url, user_agent=user_agent) - - if tool_parameters.get("generate_summary"): - # summarize and return - return self.create_text_message(self.summary(user_id=user_id, content=result)) - else: - # return full webpage - return self.create_text_message(result) - except Exception as e: - raise ToolInvokeError(str(e)) diff --git a/api/core/tools/provider/builtin/webscraper/tools/webscraper.yaml b/api/core/tools/provider/builtin/webscraper/tools/webscraper.yaml deleted file mode 100644 index 0bb48a941dcffe..00000000000000 --- a/api/core/tools/provider/builtin/webscraper/tools/webscraper.yaml +++ /dev/null @@ -1,60 +0,0 @@ -identity: - name: webscraper - author: Dify - label: - en_US: Web Scraper - zh_Hans: 网页爬虫 - pt_BR: Web Scraper -description: - human: - en_US: A tool for scraping webpages. - zh_Hans: 一个用于爬取网页的工具。 - pt_BR: A tool for scraping webpages. - llm: A tool for scraping webpages. Input should be a URL. -parameters: - - name: url - type: string - required: true - label: - en_US: URL - zh_Hans: 网页链接 - pt_BR: URL - human_description: - en_US: used for linking to webpages - zh_Hans: 用于链接到网页 - pt_BR: used for linking to webpages - llm_description: url for scraping - form: llm - - name: user_agent - type: string - required: false - label: - en_US: User Agent - zh_Hans: User Agent - pt_BR: User Agent - human_description: - en_US: used for identifying the browser. - zh_Hans: 用于识别浏览器。 - pt_BR: used for identifying the browser. - form: form - default: Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/100.0.1000.0 Safari/537.36 - - name: generate_summary - type: boolean - required: false - label: - en_US: Whether to generate summary - zh_Hans: 是否生成摘要 - human_description: - en_US: If true, the crawler will only return the page summary content. - zh_Hans: 如果启用,爬虫将仅返回页面摘要内容。 - form: form - options: - - value: 'true' - label: - en_US: 'Yes' - zh_Hans: 是 - - value: 'false' - label: - en_US: 'No' - zh_Hans: 否 - default: 'false' diff --git a/api/core/tools/provider/builtin/webscraper/webscraper.py b/api/core/tools/provider/builtin/webscraper/webscraper.py deleted file mode 100644 index 3c51393ac64cc4..00000000000000 --- a/api/core/tools/provider/builtin/webscraper/webscraper.py +++ /dev/null @@ -1,23 +0,0 @@ -from typing import Any - -from core.tools.errors import ToolProviderCredentialValidationError -from core.tools.provider.builtin.webscraper.tools.webscraper import WebscraperTool -from core.tools.provider.builtin_tool_provider import BuiltinToolProviderController - - -class WebscraperProvider(BuiltinToolProviderController): - def _validate_credentials(self, credentials: dict[str, Any]) -> None: - try: - WebscraperTool().fork_tool_runtime( - runtime={ - "credentials": credentials, - } - ).invoke( - user_id="", - tool_parameters={ - "url": "https://www.google.com", - "user_agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 ", - }, - ) - except Exception as e: - raise ToolProviderCredentialValidationError(str(e)) diff --git a/api/core/tools/provider/builtin/webscraper/webscraper.yaml b/api/core/tools/provider/builtin/webscraper/webscraper.yaml deleted file mode 100644 index 6c2eb97784e298..00000000000000 --- a/api/core/tools/provider/builtin/webscraper/webscraper.yaml +++ /dev/null @@ -1,15 +0,0 @@ -identity: - author: Dify - name: webscraper - label: - en_US: WebScraper - zh_Hans: 网页抓取 - pt_BR: WebScraper - description: - en_US: Web Scrapper tool kit is used to scrape web - zh_Hans: 一个用于抓取网页的工具。 - pt_BR: Web Scrapper tool kit is used to scrape web - icon: icon.svg - tags: - - productivity -credentials_for_provider: diff --git a/api/core/tools/provider/builtin/websearch/_assets/icon.svg b/api/core/tools/provider/builtin/websearch/_assets/icon.svg deleted file mode 100644 index d6ef5d878f8636..00000000000000 --- a/api/core/tools/provider/builtin/websearch/_assets/icon.svg +++ /dev/null @@ -1,23 +0,0 @@ - - - - \ No newline at end of file diff --git a/api/core/tools/provider/builtin/websearch/tools/get_markdown.py b/api/core/tools/provider/builtin/websearch/tools/get_markdown.py deleted file mode 100644 index 043879deeab18f..00000000000000 --- a/api/core/tools/provider/builtin/websearch/tools/get_markdown.py +++ /dev/null @@ -1,51 +0,0 @@ -from typing import Any, Union - -import requests - -from core.tools.entities.tool_entities import ToolInvokeMessage -from core.tools.tool.builtin_tool import BuiltinTool - -BASE_URL = "https://api.serply.io/v1/request" - - -class SerplyApi: - """ - SerplyAPI tool provider. - """ - - def __init__(self, api_key: str) -> None: - """Initialize SerplyAPI tool provider.""" - self.serply_api_key = api_key - - def run(self, url: str, **kwargs: Any) -> str: - """Run query through SerplyAPI and parse result.""" - - location = kwargs.get("location", "US") - - headers = { - "X-API-KEY": self.serply_api_key, - "X-User-Agent": kwargs.get("device", "desktop"), - "X-Proxy-Location": location, - "User-Agent": "Dify", - } - data = {"url": url, "method": "GET", "response_type": "markdown"} - res = requests.post(url, headers=headers, json=data) - return res.text - - -class GetMarkdownTool(BuiltinTool): - def _invoke( - self, - user_id: str, - tool_parameters: dict[str, Any], - ) -> Union[ToolInvokeMessage, list[ToolInvokeMessage]]: - """ - Invoke the SerplyApi tool. - """ - url = tool_parameters["url"] - location = tool_parameters.get("location") - - api_key = self.runtime.credentials["serply_api_key"] - result = SerplyApi(api_key).run(url, location=location) - - return self.create_text_message(text=result) diff --git a/api/core/tools/provider/builtin/websearch/tools/get_markdown.yaml b/api/core/tools/provider/builtin/websearch/tools/get_markdown.yaml deleted file mode 100644 index 06a302bd14b82d..00000000000000 --- a/api/core/tools/provider/builtin/websearch/tools/get_markdown.yaml +++ /dev/null @@ -1,96 +0,0 @@ -identity: - name: get_markdown - author: Dify - label: - en_US: Get Markdown API - zh_Hans: Get Markdown API -description: - human: - en_US: A tool to perform convert a webpage to markdown to make it easier for LLMs to understand. - zh_Hans: 一个将网页转换为 Markdown 的工具,以便模型更容易理解 - llm: A tool to perform convert a webpage to markdown to make it easier for LLMs to understand. -parameters: - - name: url - type: string - required: true - label: - en_US: URL - zh_Hans: URL - human_description: - en_US: URL that you want to grab the content from - zh_Hans: 您要从中获取内容的 URL - llm_description: Defines the link want to grab content from. - form: llm - - name: location - type: string - required: false - default: US - label: - en_US: Location - zh_Hans: 询问 - human_description: - en_US: Defines from where you want the search to originate. (For example - New York) - zh_Hans: 定义您想要搜索的起始位置。 (例如 - 纽约) - llm_description: Defines from where you want the search to originate. (For example - New York) - form: form - options: - - value: AU - label: - en_US: Australia - zh_Hans: 澳大利亚 - pt_BR: Australia - - value: BR - label: - en_US: Brazil - zh_Hans: 巴西 - pt_BR: Brazil - - value: CA - label: - en_US: Canada - zh_Hans: 加拿大 - pt_BR: Canada - - value: DE - label: - en_US: Germany - zh_Hans: 德国 - pt_BR: Germany - - value: FR - label: - en_US: France - zh_Hans: 法国 - pt_BR: France - - value: GB - label: - en_US: United Kingdom - zh_Hans: 英国 - pt_BR: United Kingdom - - value: US - label: - en_US: United States - zh_Hans: 美国 - pt_BR: United States - - value: JP - label: - en_US: Japan - zh_Hans: 日本 - pt_BR: Japan - - value: IN - label: - en_US: India - zh_Hans: 印度 - pt_BR: India - - value: KR - label: - en_US: Korea - zh_Hans: 韩国 - pt_BR: Korea - - value: SG - label: - en_US: Singapore - zh_Hans: 新加坡 - pt_BR: Singapore - - value: SE - label: - en_US: Sweden - zh_Hans: 瑞典 - pt_BR: Sweden diff --git a/api/core/tools/provider/builtin/websearch/tools/job_search.py b/api/core/tools/provider/builtin/websearch/tools/job_search.py deleted file mode 100644 index 293f4f63297120..00000000000000 --- a/api/core/tools/provider/builtin/websearch/tools/job_search.py +++ /dev/null @@ -1,88 +0,0 @@ -from typing import Any, Union -from urllib.parse import urlencode - -import requests - -from core.tools.entities.tool_entities import ToolInvokeMessage -from core.tools.tool.builtin_tool import BuiltinTool - -BASE_URL = "https://api.serply.io/v1/news/" - - -class SerplyApi: - """ - SerplyAPI tool provider. - """ - - def __init__(self, api_key: str) -> None: - """Initialize SerplyAPI tool provider.""" - self.serply_api_key = api_key - - def run(self, query: str, **kwargs: Any) -> str: - """Run query through SerplyAPI and parse result.""" - params = {"q": query, "hl": kwargs.get("hl", "en"), "gl": kwargs.get("gl", "US"), "num": kwargs.get("num", 10)} - location = kwargs.get("location", "US") - - headers = { - "X-API-KEY": self.serply_api_key, - "X-User-Agent": kwargs.get("device", "desktop"), - "X-Proxy-Location": location, - "User-Agent": "Dify", - } - - url = f"{BASE_URL}{urlencode(params)}" - res = requests.get( - url, - headers=headers, - ) - res = res.json() - - return self.parse_results(res) - - @staticmethod - def parse_results(res: dict) -> str: - """Process response from Serply Job Search.""" - jobs = res.get("jobs", []) - if not jobs: - raise ValueError(f"Got error from Serply: {res}") - - string = [] - for job in jobs[:10]: - try: - string.append( - "\n".join( - [ - f"Position: {job['position']}", - f"Employer: {job['employer']}", - f"Location: {job['location']}", - f"Link: {job['link']}", - f"""Highest: {", ".join(list(job["highlights"]))}""", - "---", - ] - ) - ) - except KeyError: - continue - - content = "\n".join(string) - return f"\nJobs results:\n {content}\n" - - -class JobSearchTool(BuiltinTool): - def _invoke( - self, - user_id: str, - tool_parameters: dict[str, Any], - ) -> Union[ToolInvokeMessage, list[ToolInvokeMessage]]: - """ - Invoke the SerplyApi tool. - """ - query = tool_parameters["query"] - gl = tool_parameters.get("gl", "us") - hl = tool_parameters.get("hl", "en") - location = tool_parameters.get("location") - - api_key = self.runtime.credentials["serply_api_key"] - result = SerplyApi(api_key).run(query, gl=gl, hl=hl, location=location) - - return self.create_text_message(text=result) diff --git a/api/core/tools/provider/builtin/websearch/tools/job_search.yaml b/api/core/tools/provider/builtin/websearch/tools/job_search.yaml deleted file mode 100644 index b5ede3df46ab01..00000000000000 --- a/api/core/tools/provider/builtin/websearch/tools/job_search.yaml +++ /dev/null @@ -1,41 +0,0 @@ -identity: - name: job_search - author: Dify - label: - en_US: Job Search API - zh_Hans: Job Search API -description: - human: - en_US: A tool to retrieve job titles, company names and description from Google Jobs engine. - zh_Hans: 一个从 Google 招聘引擎检索职位名称、公司名称和描述的工具。 - llm: A tool to retrieve job titles, company names and description from Google Jobs engine. -parameters: - - name: query - type: string - required: true - label: - en_US: Query - zh_Hans: 询问 - human_description: - en_US: Defines the query you want to search. - zh_Hans: 定义您要搜索的查询。 - llm_description: Defines the search query you want to search. - form: llm - - name: location - type: string - required: false - default: US - label: - en_US: Location - zh_Hans: 询问 - human_description: - en_US: Defines from where you want the search to originate. (For example - New York) - zh_Hans: 定义您想要搜索的起始位置。 (例如 - 纽约) - llm_description: Defines from where you want the search to originate. (For example - New York) - form: form - options: - - value: US - label: - en_US: United States - zh_Hans: 美国 - pt_BR: United States diff --git a/api/core/tools/provider/builtin/websearch/tools/news_search.py b/api/core/tools/provider/builtin/websearch/tools/news_search.py deleted file mode 100644 index 9b5482fe183e18..00000000000000 --- a/api/core/tools/provider/builtin/websearch/tools/news_search.py +++ /dev/null @@ -1,90 +0,0 @@ -from typing import Any, Union -from urllib.parse import urlencode - -import requests - -from core.tools.entities.tool_entities import ToolInvokeMessage -from core.tools.tool.builtin_tool import BuiltinTool - -BASE_URL = "https://api.serply.io/v1/news/" - - -class SerplyApi: - """ - SerplyApi tool provider. - """ - - def __init__(self, api_key: str) -> None: - """Initialize SerplyApi tool provider.""" - self.serply_api_key = api_key - - def run(self, query: str, **kwargs: Any) -> str: - """Run query through SerplyApi and parse result.""" - params = {"q": query, "hl": kwargs.get("hl", "en"), "gl": kwargs.get("gl", "US"), "num": kwargs.get("num", 10)} - location = kwargs.get("location", "US") - - headers = { - "X-API-KEY": self.serply_api_key, - "X-User-Agent": kwargs.get("device", "desktop"), - "X-Proxy-Location": location, - "User-Agent": "Dify", - } - - url = f"{BASE_URL}{urlencode(params)}" - res = requests.get( - url, - headers=headers, - ) - res = res.json() - - return self.parse_results(res) - - @staticmethod - def parse_results(res: dict) -> str: - """Process response from Serply News Search.""" - news = res.get("entries", []) - if not news: - raise ValueError(f"Got error from Serply: {res}") - - string = [] - for entry in news: - try: - # follow url - r = requests.get(entry["link"]) - final_link = r.history[-1].headers["Location"] - string.append( - "\n".join( - [ - f"Title: {entry['title']}", - f"Link: {final_link}", - f"Source: {entry['source']['title']}", - f"Published: {entry['published']}", - "---", - ] - ) - ) - except KeyError: - continue - - content = "\n".join(string) - return f"\nNews:\n {content}\n" - - -class NewsSearchTool(BuiltinTool): - def _invoke( - self, - user_id: str, - tool_parameters: dict[str, Any], - ) -> Union[ToolInvokeMessage, list[ToolInvokeMessage]]: - """ - Invoke the SerplyApi tool. - """ - query = tool_parameters["query"] - gl = tool_parameters.get("gl", "us") - hl = tool_parameters.get("hl", "en") - location = tool_parameters.get("location") - - api_key = self.runtime.credentials["serply_api_key"] - result = SerplyApi(api_key).run(query, gl=gl, hl=hl, location=location) - - return self.create_text_message(text=result) diff --git a/api/core/tools/provider/builtin/websearch/tools/news_search.yaml b/api/core/tools/provider/builtin/websearch/tools/news_search.yaml deleted file mode 100644 index 126c610825ebbb..00000000000000 --- a/api/core/tools/provider/builtin/websearch/tools/news_search.yaml +++ /dev/null @@ -1,501 +0,0 @@ -identity: - name: news_search - author: Dify - label: - en_US: News Search API - zh_Hans: News Search API -description: - human: - en_US: A tool to retrieve organic search results snippets and links from Google News engine. - zh_Hans: 一种从 Google 新闻引擎检索有机搜索结果片段和链接的工具。 - llm: A tool to retrieve organic search results snippets and links from Google News engine. -parameters: - - name: query - type: string - required: true - label: - en_US: Query - zh_Hans: 询问 - human_description: - en_US: Defines the query you want to search. - zh_Hans: 定义您要搜索的查询。 - llm_description: Defines the search query you want to search. - form: llm - - name: location - type: string - required: false - default: US - label: - en_US: Location - zh_Hans: 询问 - human_description: - en_US: Defines from where you want the search to originate. (For example - New York) - zh_Hans: 定义您想要搜索的起始位置。 (例如 - 纽约) - llm_description: Defines from where you want the search to originate. (For example - New York) - form: form - options: - - value: AU - label: - en_US: Australia - zh_Hans: 澳大利亚 - pt_BR: Australia - - value: BR - label: - en_US: Brazil - zh_Hans: 巴西 - pt_BR: Brazil - - value: CA - label: - en_US: Canada - zh_Hans: 加拿大 - pt_BR: Canada - - value: DE - label: - en_US: Germany - zh_Hans: 德国 - pt_BR: Germany - - value: FR - label: - en_US: France - zh_Hans: 法国 - pt_BR: France - - value: GB - label: - en_US: United Kingdom - zh_Hans: 英国 - pt_BR: United Kingdom - - value: US - label: - en_US: United States - zh_Hans: 美国 - pt_BR: United States - - value: JP - label: - en_US: Japan - zh_Hans: 日本 - pt_BR: Japan - - value: IN - label: - en_US: India - zh_Hans: 印度 - pt_BR: India - - value: KR - label: - en_US: Korea - zh_Hans: 韩国 - pt_BR: Korea - - value: SG - label: - en_US: Singapore - zh_Hans: 新加坡 - pt_BR: Singapore - - value: SE - label: - en_US: Sweden - zh_Hans: 瑞典 - pt_BR: Sweden - - name: gl - type: select - label: - en_US: Country - zh_Hans: 国家/地区 - required: false - human_description: - en_US: Defines the country of the search. Default is "US". - zh_Hans: 定义搜索的国家/地区。默认为“美国”。 - llm_description: Defines the gl parameter of the Google search. - form: form - default: US - options: - - value: AR - label: - en_US: Argentina - zh_Hans: 阿根廷 - pt_BR: Argentina - - value: AU - label: - en_US: Australia - zh_Hans: 澳大利亚 - pt_BR: Australia - - value: AT - label: - en_US: Austria - zh_Hans: 奥地利 - pt_BR: Austria - - value: BE - label: - en_US: Belgium - zh_Hans: 比利时 - pt_BR: Belgium - - value: BR - label: - en_US: Brazil - zh_Hans: 巴西 - pt_BR: Brazil - - value: CA - label: - en_US: Canada - zh_Hans: 加拿大 - pt_BR: Canada - - value: CL - label: - en_US: Chile - zh_Hans: 智利 - pt_BR: Chile - - value: CO - label: - en_US: Colombia - zh_Hans: 哥伦比亚 - pt_BR: Colombia - - value: CN - label: - en_US: China - zh_Hans: 中国 - pt_BR: China - - value: CZ - label: - en_US: Czech Republic - zh_Hans: 捷克共和国 - pt_BR: Czech Republic - - value: DK - label: - en_US: Denmark - zh_Hans: 丹麦 - pt_BR: Denmark - - value: FI - label: - en_US: Finland - zh_Hans: 芬兰 - pt_BR: Finland - - value: FR - label: - en_US: France - zh_Hans: 法国 - pt_BR: France - - value: DE - label: - en_US: Germany - zh_Hans: 德国 - pt_BR: Germany - - value: HK - label: - en_US: Hong Kong - zh_Hans: 香港 - pt_BR: Hong Kong - - value: IN - label: - en_US: India - zh_Hans: 印度 - pt_BR: India - - value: ID - label: - en_US: Indonesia - zh_Hans: 印度尼西亚 - pt_BR: Indonesia - - value: IT - label: - en_US: Italy - zh_Hans: 意大利 - pt_BR: Italy - - value: JP - label: - en_US: Japan - zh_Hans: 日本 - pt_BR: Japan - - value: KR - label: - en_US: Korea - zh_Hans: 韩国 - pt_BR: Korea - - value: MY - label: - en_US: Malaysia - zh_Hans: 马来西亚 - pt_BR: Malaysia - - value: MX - label: - en_US: Mexico - zh_Hans: 墨西哥 - pt_BR: Mexico - - value: NL - label: - en_US: Netherlands - zh_Hans: 荷兰 - pt_BR: Netherlands - - value: NZ - label: - en_US: New Zealand - zh_Hans: 新西兰 - pt_BR: New Zealand - - value: NO - label: - en_US: Norway - zh_Hans: 挪威 - pt_BR: Norway - - value: PH - label: - en_US: Philippines - zh_Hans: 菲律宾 - pt_BR: Philippines - - value: PL - label: - en_US: Poland - zh_Hans: 波兰 - pt_BR: Poland - - value: PT - label: - en_US: Portugal - zh_Hans: 葡萄牙 - pt_BR: Portugal - - value: RU - label: - en_US: Russia - zh_Hans: 俄罗斯 - pt_BR: Russia - - value: SA - label: - en_US: Saudi Arabia - zh_Hans: 沙特阿拉伯 - pt_BR: Saudi Arabia - - value: SG - label: - en_US: Singapore - zh_Hans: 新加坡 - pt_BR: Singapore - - value: ZA - label: - en_US: South Africa - zh_Hans: 南非 - pt_BR: South Africa - - value: ES - label: - en_US: Spain - zh_Hans: 西班牙 - pt_BR: Spain - - value: SE - label: - en_US: Sweden - zh_Hans: 瑞典 - pt_BR: Sweden - - value: CH - label: - en_US: Switzerland - zh_Hans: 瑞士 - pt_BR: Switzerland - - value: TW - label: - en_US: Taiwan - zh_Hans: 台湾 - pt_BR: Taiwan - - value: TH - label: - en_US: Thailand - zh_Hans: 泰国 - pt_BR: Thailand - - value: TR - label: - en_US: Turkey - zh_Hans: 土耳其 - pt_BR: Turkey - - value: GB - label: - en_US: United Kingdom - zh_Hans: 英国 - pt_BR: United Kingdom - - value: US - label: - en_US: United States - zh_Hans: 美国 - pt_BR: United States - - name: hl - type: select - label: - en_US: Language - zh_Hans: 语言 - human_description: - en_US: Defines the interface language of the search. Default is "en". - zh_Hans: 定义搜索的界面语言。默认为“en”。 - required: false - default: en - form: form - options: - - value: ar - label: - en_US: Arabic - zh_Hans: 阿拉伯语 - - value: bg - label: - en_US: Bulgarian - zh_Hans: 保加利亚语 - - value: ca - label: - en_US: Catalan - zh_Hans: 加泰罗尼亚语 - - value: zh-cn - label: - en_US: Chinese (Simplified) - zh_Hans: 中文(简体) - - value: zh-tw - label: - en_US: Chinese (Traditional) - zh_Hans: 中文(繁体) - - value: cs - label: - en_US: Czech - zh_Hans: 捷克语 - - value: da - label: - en_US: Danish - zh_Hans: 丹麦语 - - value: nl - label: - en_US: Dutch - zh_Hans: 荷兰语 - - value: en - label: - en_US: English - zh_Hans: 英语 - - value: et - label: - en_US: Estonian - zh_Hans: 爱沙尼亚语 - - value: fi - label: - en_US: Finnish - zh_Hans: 芬兰语 - - value: fr - label: - en_US: French - zh_Hans: 法语 - - value: de - label: - en_US: German - zh_Hans: 德语 - - value: el - label: - en_US: Greek - zh_Hans: 希腊语 - - value: iw - label: - en_US: Hebrew - zh_Hans: 希伯来语 - - value: hi - label: - en_US: Hindi - zh_Hans: 印地语 - - value: hu - label: - en_US: Hungarian - zh_Hans: 匈牙利语 - - value: id - label: - en_US: Indonesian - zh_Hans: 印尼语 - - value: it - label: - en_US: Italian - zh_Hans: 意大利语 - - value: ja - label: - en_US: Japanese - zh_Hans: 日语 - - value: kn - label: - en_US: Kannada - zh_Hans: 卡纳达语 - - value: ko - label: - en_US: Korean - zh_Hans: 韩语 - - value: lv - label: - en_US: Latvian - zh_Hans: 拉脱维亚语 - - value: lt - label: - en_US: Lithuanian - zh_Hans: 立陶宛语 - - value: my - label: - en_US: Malay - zh_Hans: 马来语 - - value: ml - label: - en_US: Malayalam - zh_Hans: 马拉雅拉姆语 - - value: mr - label: - en_US: Marathi - zh_Hans: 马拉地语 - - value: "no" - label: - en_US: Norwegian - zh_Hans: 挪威语 - - value: pl - label: - en_US: Polish - zh_Hans: 波兰语 - - value: pt-br - label: - en_US: Portuguese (Brazil) - zh_Hans: 葡萄牙语(巴西) - - value: pt-pt - label: - en_US: Portuguese (Portugal) - zh_Hans: 葡萄牙语(葡萄牙) - - value: pa - label: - en_US: Punjabi - zh_Hans: 旁遮普语 - - value: ro - label: - en_US: Romanian - zh_Hans: 罗马尼亚语 - - value: ru - label: - en_US: Russian - zh_Hans: 俄语 - - value: sr - label: - en_US: Serbian - zh_Hans: 塞尔维亚语 - - value: sk - label: - en_US: Slovak - zh_Hans: 斯洛伐克语 - - value: sl - label: - en_US: Slovenian - zh_Hans: 斯洛文尼亚语 - - value: es - label: - en_US: Spanish - zh_Hans: 西班牙语 - - value: sv - label: - en_US: Swedish - zh_Hans: 瑞典语 - - value: ta - label: - en_US: Tamil - zh_Hans: 泰米尔语 - - value: te - label: - en_US: Telugu - zh_Hans: 泰卢固语 - - value: th - label: - en_US: Thai - zh_Hans: 泰语 - - value: tr - label: - en_US: Turkish - zh_Hans: 土耳其语 - - value: uk - label: - en_US: Ukrainian - zh_Hans: 乌克兰语 - - value: vi - label: - en_US: Vietnamese - zh_Hans: 越南语 diff --git a/api/core/tools/provider/builtin/websearch/tools/scholar_search.py b/api/core/tools/provider/builtin/websearch/tools/scholar_search.py deleted file mode 100644 index 798d059b512edf..00000000000000 --- a/api/core/tools/provider/builtin/websearch/tools/scholar_search.py +++ /dev/null @@ -1,93 +0,0 @@ -from typing import Any, Union -from urllib.parse import urlencode - -import requests - -from core.tools.entities.tool_entities import ToolInvokeMessage -from core.tools.tool.builtin_tool import BuiltinTool - -BASE_URL = "https://api.serply.io/v1/scholar/" - - -class SerplyApi: - """ - SerplyApi tool provider. - """ - - def __init__(self, api_key: str) -> None: - """Initialize SerplyApi tool provider.""" - self.serply_api_key = api_key - - def run(self, query: str, **kwargs: Any) -> str: - """Run query through SerplyApi and parse result.""" - params = {"q": query, "hl": kwargs.get("hl", "en"), "gl": kwargs.get("gl", "US"), "num": kwargs.get("num", 10)} - location = kwargs.get("location", "US") - - headers = { - "X-API-KEY": self.serply_api_key, - "X-User-Agent": kwargs.get("device", "desktop"), - "X-Proxy-Location": location, - "User-Agent": "Dify", - } - - url = f"{BASE_URL}{urlencode(params)}" - res = requests.get( - url, - headers=headers, - ) - res = res.json() - - return self.parse_results(res) - - @staticmethod - def parse_results(res: dict) -> str: - """Process response from Serply News Search.""" - articles = res.get("articles", []) - if not articles: - raise ValueError(f"Got error from Serply: {res}") - - string = [] - for article in articles: - try: - if "doc" in article: - link = article["doc"]["link"] - else: - link = article["link"] - authors = [author["name"] for author in article["author"]["authors"]] - string.append( - "\n".join( - [ - f"Title: {article['title']}", - f"Link: {link}", - f"Description: {article['description']}", - f"Cite: {article['cite']}", - f"Authors: {', '.join(authors)}", - "---", - ] - ) - ) - except KeyError: - continue - - content = "\n".join(string) - return f"\nScholar results:\n {content}\n" - - -class ScholarSearchTool(BuiltinTool): - def _invoke( - self, - user_id: str, - tool_parameters: dict[str, Any], - ) -> Union[ToolInvokeMessage, list[ToolInvokeMessage]]: - """ - Invoke the SerplyApi tool. - """ - query = tool_parameters["query"] - gl = tool_parameters.get("gl", "us") - hl = tool_parameters.get("hl", "en") - location = tool_parameters.get("location") - - api_key = self.runtime.credentials["serply_api_key"] - result = SerplyApi(api_key).run(query, gl=gl, hl=hl, location=location) - - return self.create_text_message(text=result) diff --git a/api/core/tools/provider/builtin/websearch/tools/scholar_search.yaml b/api/core/tools/provider/builtin/websearch/tools/scholar_search.yaml deleted file mode 100644 index 63e79d7ebfaa49..00000000000000 --- a/api/core/tools/provider/builtin/websearch/tools/scholar_search.yaml +++ /dev/null @@ -1,501 +0,0 @@ -identity: - name: scholar_search - author: Dify - label: - en_US: Scholar API - zh_Hans: Scholar API -description: - human: - en_US: A tool to retrieve scholarly literature. - zh_Hans: 学术文献检索工具 - llm: A tool to retrieve scholarly literature. -parameters: - - name: query - type: string - required: true - label: - en_US: Query - zh_Hans: 询问 - human_description: - en_US: Defines the query you want to search. - zh_Hans: 定义您要搜索的查询。 - llm_description: Defines the search query you want to search. - form: llm - - name: location - type: string - required: false - default: US - label: - en_US: Location - zh_Hans: 询问 - human_description: - en_US: Defines from where you want the search to originate. (For example - New York) - zh_Hans: 定义您想要搜索的起始位置。 (例如 - 纽约) - llm_description: Defines from where you want the search to originate. (For example - New York) - form: form - options: - - value: AU - label: - en_US: Australia - zh_Hans: 澳大利亚 - pt_BR: Australia - - value: BR - label: - en_US: Brazil - zh_Hans: 巴西 - pt_BR: Brazil - - value: CA - label: - en_US: Canada - zh_Hans: 加拿大 - pt_BR: Canada - - value: DE - label: - en_US: Germany - zh_Hans: 德国 - pt_BR: Germany - - value: FR - label: - en_US: France - zh_Hans: 法国 - pt_BR: France - - value: GB - label: - en_US: United Kingdom - zh_Hans: 英国 - pt_BR: United Kingdom - - value: US - label: - en_US: United States - zh_Hans: 美国 - pt_BR: United States - - value: JP - label: - en_US: Japan - zh_Hans: 日本 - pt_BR: Japan - - value: IN - label: - en_US: India - zh_Hans: 印度 - pt_BR: India - - value: KR - label: - en_US: Korea - zh_Hans: 韩国 - pt_BR: Korea - - value: SG - label: - en_US: Singapore - zh_Hans: 新加坡 - pt_BR: Singapore - - value: SE - label: - en_US: Sweden - zh_Hans: 瑞典 - pt_BR: Sweden - - name: gl - type: select - label: - en_US: Country - zh_Hans: 国家/地区 - required: false - human_description: - en_US: Defines the country of the search. Default is "US". - zh_Hans: 定义搜索的国家/地区。默认为“美国”。 - llm_description: Defines the gl parameter of the Google search. - form: form - default: US - options: - - value: AR - label: - en_US: Argentina - zh_Hans: 阿根廷 - pt_BR: Argentina - - value: AU - label: - en_US: Australia - zh_Hans: 澳大利亚 - pt_BR: Australia - - value: AT - label: - en_US: Austria - zh_Hans: 奥地利 - pt_BR: Austria - - value: BE - label: - en_US: Belgium - zh_Hans: 比利时 - pt_BR: Belgium - - value: BR - label: - en_US: Brazil - zh_Hans: 巴西 - pt_BR: Brazil - - value: CA - label: - en_US: Canada - zh_Hans: 加拿大 - pt_BR: Canada - - value: CL - label: - en_US: Chile - zh_Hans: 智利 - pt_BR: Chile - - value: CO - label: - en_US: Colombia - zh_Hans: 哥伦比亚 - pt_BR: Colombia - - value: CN - label: - en_US: China - zh_Hans: 中国 - pt_BR: China - - value: CZ - label: - en_US: Czech Republic - zh_Hans: 捷克共和国 - pt_BR: Czech Republic - - value: DK - label: - en_US: Denmark - zh_Hans: 丹麦 - pt_BR: Denmark - - value: FI - label: - en_US: Finland - zh_Hans: 芬兰 - pt_BR: Finland - - value: FR - label: - en_US: France - zh_Hans: 法国 - pt_BR: France - - value: DE - label: - en_US: Germany - zh_Hans: 德国 - pt_BR: Germany - - value: HK - label: - en_US: Hong Kong - zh_Hans: 香港 - pt_BR: Hong Kong - - value: IN - label: - en_US: India - zh_Hans: 印度 - pt_BR: India - - value: ID - label: - en_US: Indonesia - zh_Hans: 印度尼西亚 - pt_BR: Indonesia - - value: IT - label: - en_US: Italy - zh_Hans: 意大利 - pt_BR: Italy - - value: JP - label: - en_US: Japan - zh_Hans: 日本 - pt_BR: Japan - - value: KR - label: - en_US: Korea - zh_Hans: 韩国 - pt_BR: Korea - - value: MY - label: - en_US: Malaysia - zh_Hans: 马来西亚 - pt_BR: Malaysia - - value: MX - label: - en_US: Mexico - zh_Hans: 墨西哥 - pt_BR: Mexico - - value: NL - label: - en_US: Netherlands - zh_Hans: 荷兰 - pt_BR: Netherlands - - value: NZ - label: - en_US: New Zealand - zh_Hans: 新西兰 - pt_BR: New Zealand - - value: "NO" - label: - en_US: Norway - zh_Hans: 挪威 - pt_BR: Norway - - value: PH - label: - en_US: Philippines - zh_Hans: 菲律宾 - pt_BR: Philippines - - value: PL - label: - en_US: Poland - zh_Hans: 波兰 - pt_BR: Poland - - value: PT - label: - en_US: Portugal - zh_Hans: 葡萄牙 - pt_BR: Portugal - - value: RU - label: - en_US: Russia - zh_Hans: 俄罗斯 - pt_BR: Russia - - value: SA - label: - en_US: Saudi Arabia - zh_Hans: 沙特阿拉伯 - pt_BR: Saudi Arabia - - value: SG - label: - en_US: Singapore - zh_Hans: 新加坡 - pt_BR: Singapore - - value: ZA - label: - en_US: South Africa - zh_Hans: 南非 - pt_BR: South Africa - - value: ES - label: - en_US: Spain - zh_Hans: 西班牙 - pt_BR: Spain - - value: SE - label: - en_US: Sweden - zh_Hans: 瑞典 - pt_BR: Sweden - - value: CH - label: - en_US: Switzerland - zh_Hans: 瑞士 - pt_BR: Switzerland - - value: TW - label: - en_US: Taiwan - zh_Hans: 台湾 - pt_BR: Taiwan - - value: TH - label: - en_US: Thailand - zh_Hans: 泰国 - pt_BR: Thailand - - value: TR - label: - en_US: Turkey - zh_Hans: 土耳其 - pt_BR: Turkey - - value: GB - label: - en_US: United Kingdom - zh_Hans: 英国 - pt_BR: United Kingdom - - value: US - label: - en_US: United States - zh_Hans: 美国 - pt_BR: United States - - name: hl - type: select - label: - en_US: Language - zh_Hans: 语言 - human_description: - en_US: Defines the interface language of the search. Default is "en". - zh_Hans: 定义搜索的界面语言。默认为“en”。 - required: false - default: en - form: form - options: - - value: ar - label: - en_US: Arabic - zh_Hans: 阿拉伯语 - - value: bg - label: - en_US: Bulgarian - zh_Hans: 保加利亚语 - - value: ca - label: - en_US: Catalan - zh_Hans: 加泰罗尼亚语 - - value: zh-cn - label: - en_US: Chinese (Simplified) - zh_Hans: 中文(简体) - - value: zh-tw - label: - en_US: Chinese (Traditional) - zh_Hans: 中文(繁体) - - value: cs - label: - en_US: Czech - zh_Hans: 捷克语 - - value: da - label: - en_US: Danish - zh_Hans: 丹麦语 - - value: nl - label: - en_US: Dutch - zh_Hans: 荷兰语 - - value: en - label: - en_US: English - zh_Hans: 英语 - - value: et - label: - en_US: Estonian - zh_Hans: 爱沙尼亚语 - - value: fi - label: - en_US: Finnish - zh_Hans: 芬兰语 - - value: fr - label: - en_US: French - zh_Hans: 法语 - - value: de - label: - en_US: German - zh_Hans: 德语 - - value: el - label: - en_US: Greek - zh_Hans: 希腊语 - - value: iw - label: - en_US: Hebrew - zh_Hans: 希伯来语 - - value: hi - label: - en_US: Hindi - zh_Hans: 印地语 - - value: hu - label: - en_US: Hungarian - zh_Hans: 匈牙利语 - - value: id - label: - en_US: Indonesian - zh_Hans: 印尼语 - - value: it - label: - en_US: Italian - zh_Hans: 意大利语 - - value: ja - label: - en_US: Japanese - zh_Hans: 日语 - - value: kn - label: - en_US: Kannada - zh_Hans: 卡纳达语 - - value: ko - label: - en_US: Korean - zh_Hans: 韩语 - - value: lv - label: - en_US: Latvian - zh_Hans: 拉脱维亚语 - - value: lt - label: - en_US: Lithuanian - zh_Hans: 立陶宛语 - - value: my - label: - en_US: Malay - zh_Hans: 马来语 - - value: ml - label: - en_US: Malayalam - zh_Hans: 马拉雅拉姆语 - - value: mr - label: - en_US: Marathi - zh_Hans: 马拉地语 - - value: "no" - label: - en_US: Norwegian - zh_Hans: 挪威语 - - value: pl - label: - en_US: Polish - zh_Hans: 波兰语 - - value: pt-br - label: - en_US: Portuguese (Brazil) - zh_Hans: 葡萄牙语(巴西) - - value: pt-pt - label: - en_US: Portuguese (Portugal) - zh_Hans: 葡萄牙语(葡萄牙) - - value: pa - label: - en_US: Punjabi - zh_Hans: 旁遮普语 - - value: ro - label: - en_US: Romanian - zh_Hans: 罗马尼亚语 - - value: ru - label: - en_US: Russian - zh_Hans: 俄语 - - value: sr - label: - en_US: Serbian - zh_Hans: 塞尔维亚语 - - value: sk - label: - en_US: Slovak - zh_Hans: 斯洛伐克语 - - value: sl - label: - en_US: Slovenian - zh_Hans: 斯洛文尼亚语 - - value: es - label: - en_US: Spanish - zh_Hans: 西班牙语 - - value: sv - label: - en_US: Swedish - zh_Hans: 瑞典语 - - value: ta - label: - en_US: Tamil - zh_Hans: 泰米尔语 - - value: te - label: - en_US: Telugu - zh_Hans: 泰卢固语 - - value: th - label: - en_US: Thai - zh_Hans: 泰语 - - value: tr - label: - en_US: Turkish - zh_Hans: 土耳其语 - - value: uk - label: - en_US: Ukrainian - zh_Hans: 乌克兰语 - - value: vi - label: - en_US: Vietnamese - zh_Hans: 越南语 diff --git a/api/core/tools/provider/builtin/websearch/tools/web_search.py b/api/core/tools/provider/builtin/websearch/tools/web_search.py deleted file mode 100644 index fe363ac7a4d5d0..00000000000000 --- a/api/core/tools/provider/builtin/websearch/tools/web_search.py +++ /dev/null @@ -1,90 +0,0 @@ -import typing -from urllib.parse import urlencode - -import requests - -from core.tools.entities.tool_entities import ToolInvokeMessage -from core.tools.tool.builtin_tool import BuiltinTool - - -class SerplyApi: - """ - SerplyApi tool provider. - """ - - def __init__(self, api_key: str) -> None: - """Initialize Serply Web Search Tool provider.""" - self.serply_api_key = api_key - self.base_url = "https://api.serply.io/v1/search/" - - def run(self, query: str, **kwargs: typing.Any) -> str: - """Run query through Serply and parse result.""" - params = {"q": query, "hl": kwargs.get("hl", "en"), "gl": kwargs.get("gl", "US"), "num": kwargs.get("num", 10)} - location = kwargs.get("location", "US") - - headers = { - "X-API-KEY": self.serply_api_key, - "X-User-Agent": kwargs.get("device", "desktop"), - "X-Proxy-Location": location, - "User-Agent": "Dify", - } - - url = f"{self.base_url}{urlencode(params)}" - res = requests.get( - url, - headers=headers, - ) - res = res.json() - - return self.parse_results(res) - - @staticmethod - def parse_results(res: dict) -> str: - """Process response from Serply Web Search.""" - results = res.get("results", []) - if not results: - raise ValueError(f"Got error from Serply: {res}") - - string = [] - for result in results: - try: - string.append( - "\n".join( - [ - f"Title: {result['title']}", - f"Link: {result['link']}", - f"Description: {result['description'].strip()}", - "---", - ] - ) - ) - except KeyError: - continue - - if related_questions := res.get("related_questions", []): - string.append("---") - string.append("Related Questions: ") - string.append("\n".join(related_questions)) - - content = "\n".join(string) - return f"\nSearch results:\n {content}\n" - - -class WebSearchTool(BuiltinTool): - def _invoke( - self, - user_id: str, - tool_parameters: dict[str, typing.Any], - ) -> typing.Union[ToolInvokeMessage, list[ToolInvokeMessage]]: - """ - Invoke the SerplyApi tool. - """ - query = tool_parameters["query"] - num = tool_parameters.get("num", 10) - gl = tool_parameters.get("gl", "us") - hl = tool_parameters.get("hl", "en") - location = tool_parameters.get("location", "None") - - api_key = self.runtime.credentials["serply_api_key"] - result = SerplyApi(api_key).run(query=query, num=num, gl=gl, hl=hl, location=location) - return self.create_text_message(text=result) diff --git a/api/core/tools/provider/builtin/websearch/tools/web_search.yaml b/api/core/tools/provider/builtin/websearch/tools/web_search.yaml deleted file mode 100644 index 055029253c1753..00000000000000 --- a/api/core/tools/provider/builtin/websearch/tools/web_search.yaml +++ /dev/null @@ -1,376 +0,0 @@ -identity: - name: web_search - author: Dify - label: - en_US: Web Search API - zh_Hans: Web Search API -description: - human: - en_US: A tool to retrieve answer boxes, knowledge graphs, snippets, and webpages from Google Search engine. - zh_Hans: 一种从 Google 搜索引擎检索答案框、知识图、片段和网页的工具。 - llm: A tool to retrieve answer boxes, knowledge graphs, snippets, and webpages from Google Search engine. -parameters: - - name: query - type: string - required: true - label: - en_US: Query - zh_Hans: 询问 - human_description: - en_US: Defines the query you want to search. - zh_Hans: 定义您要搜索的查询。 - llm_description: Defines the search query you want to search. - form: llm - - name: location - type: string - required: false - default: US - label: - en_US: Location - zh_Hans: 询问 - human_description: - en_US: Defines from where you want the search to originate. (For example - New York) - zh_Hans: 定义您想要搜索的起始位置。 (例如 - 纽约) - llm_description: Defines from where you want the search to originate. (For example - New York) - form: form - options: - - value: AU - label: - en_US: Australia - zh_Hans: 澳大利亚 - pt_BR: Australia - - value: BR - label: - en_US: Brazil - zh_Hans: 巴西 - pt_BR: Brazil - - value: CA - label: - en_US: Canada - zh_Hans: 加拿大 - pt_BR: Canada - - value: DE - label: - en_US: Germany - zh_Hans: 德国 - pt_BR: Germany - - value: FR - label: - en_US: France - zh_Hans: 法国 - pt_BR: France - - value: GB - label: - en_US: United Kingdom - zh_Hans: 英国 - pt_BR: United Kingdom - - value: US - label: - en_US: United States - zh_Hans: 美国 - pt_BR: United States - - value: JP - label: - en_US: Japan - zh_Hans: 日本 - pt_BR: Japan - - value: IN - label: - en_US: India - zh_Hans: 印度 - pt_BR: India - - value: KR - label: - en_US: Korea - zh_Hans: 韩国 - pt_BR: Korea - - value: SG - label: - en_US: Singapore - zh_Hans: 新加坡 - pt_BR: Singapore - - value: SE - label: - en_US: Sweden - zh_Hans: 瑞典 - pt_BR: Sweden - - name: device - type: select - label: - en_US: Device Type - zh_Hans: 汉斯先生 - human_description: - en_US: Defines the device to make interface search. Default is "desktop". - zh_Hans: 定义进行接口搜索的设备。默认为“桌面” - required: false - default: desktop - form: form - options: - - value: desktop - label: - en_US: Desktop - zh_Hans: 桌面 - - value: mobile - label: - en_US: Mobile - zh_Hans: 移动的 - - name: gl - type: select - label: - en_US: Country - zh_Hans: 国家/地区 - required: false - human_description: - en_US: Defines the country of the search. Default is "US". - zh_Hans: 定义搜索的国家/地区。默认为“美国”。 - llm_description: Defines the gl parameter of the Google search. - form: form - default: US - options: - - value: AU - label: - en_US: Australia - zh_Hans: 澳大利亚 - pt_BR: Australia - - value: BR - label: - en_US: Brazil - zh_Hans: 巴西 - pt_BR: Brazil - - value: CA - label: - en_US: Canada - zh_Hans: 加拿大 - pt_BR: Canada - - value: DE - label: - en_US: Germany - zh_Hans: 德国 - pt_BR: Germany - - value: FR - label: - en_US: France - zh_Hans: 法国 - pt_BR: France - - value: GB - label: - en_US: United Kingdom - zh_Hans: 英国 - pt_BR: United Kingdom - - value: IN - label: - en_US: India - zh_Hans: 印度 - pt_BR: India - - value: KR - label: - en_US: Korea - zh_Hans: 韩国 - pt_BR: Korea - - value: SE - label: - en_US: Sweden - zh_Hans: 瑞典 - pt_BR: Sweden - - value: SG - label: - en_US: Singapore - zh_Hans: 新加坡 - pt_BR: Singapore - - value: US - label: - en_US: United States - zh_Hans: 美国 - pt_BR: United States - - name: hl - type: select - label: - en_US: Language - zh_Hans: 语言 - human_description: - en_US: Defines the interface language of the search. Default is "en". - zh_Hans: 定义搜索的界面语言。默认为“en”。 - required: false - default: en - form: form - options: - - value: ar - label: - en_US: Arabic - zh_Hans: 阿拉伯语 - - value: bg - label: - en_US: Bulgarian - zh_Hans: 保加利亚语 - - value: ca - label: - en_US: Catalan - zh_Hans: 加泰罗尼亚语 - - value: zh-cn - label: - en_US: Chinese (Simplified) - zh_Hans: 中文(简体) - - value: zh-tw - label: - en_US: Chinese (Traditional) - zh_Hans: 中文(繁体) - - value: cs - label: - en_US: Czech - zh_Hans: 捷克语 - - value: da - label: - en_US: Danish - zh_Hans: 丹麦语 - - value: nl - label: - en_US: Dutch - zh_Hans: 荷兰语 - - value: en - label: - en_US: English - zh_Hans: 英语 - - value: et - label: - en_US: Estonian - zh_Hans: 爱沙尼亚语 - - value: fi - label: - en_US: Finnish - zh_Hans: 芬兰语 - - value: fr - label: - en_US: French - zh_Hans: 法语 - - value: de - label: - en_US: German - zh_Hans: 德语 - - value: el - label: - en_US: Greek - zh_Hans: 希腊语 - - value: iw - label: - en_US: Hebrew - zh_Hans: 希伯来语 - - value: hi - label: - en_US: Hindi - zh_Hans: 印地语 - - value: hu - label: - en_US: Hungarian - zh_Hans: 匈牙利语 - - value: id - label: - en_US: Indonesian - zh_Hans: 印尼语 - - value: it - label: - en_US: Italian - zh_Hans: 意大利语 - - value: ja - label: - en_US: Japanese - zh_Hans: 日语 - - value: kn - label: - en_US: Kannada - zh_Hans: 卡纳达语 - - value: ko - label: - en_US: Korean - zh_Hans: 韩语 - - value: lv - label: - en_US: Latvian - zh_Hans: 拉脱维亚语 - - value: lt - label: - en_US: Lithuanian - zh_Hans: 立陶宛语 - - value: my - label: - en_US: Malay - zh_Hans: 马来语 - - value: ml - label: - en_US: Malayalam - zh_Hans: 马拉雅拉姆语 - - value: mr - label: - en_US: Marathi - zh_Hans: 马拉地语 - - value: "no" - label: - en_US: Norwegian - zh_Hans: 挪威语 - - value: pl - label: - en_US: Polish - zh_Hans: 波兰语 - - value: pt-br - label: - en_US: Portuguese (Brazil) - zh_Hans: 葡萄牙语(巴西) - - value: pt-pt - label: - en_US: Portuguese (Portugal) - zh_Hans: 葡萄牙语(葡萄牙) - - value: pa - label: - en_US: Punjabi - zh_Hans: 旁遮普语 - - value: ro - label: - en_US: Romanian - zh_Hans: 罗马尼亚语 - - value: ru - label: - en_US: Russian - zh_Hans: 俄语 - - value: sr - label: - en_US: Serbian - zh_Hans: 塞尔维亚语 - - value: sk - label: - en_US: Slovak - zh_Hans: 斯洛伐克语 - - value: sl - label: - en_US: Slovenian - zh_Hans: 斯洛文尼亚语 - - value: es - label: - en_US: Spanish - zh_Hans: 西班牙语 - - value: sv - label: - en_US: Swedish - zh_Hans: 瑞典语 - - value: ta - label: - en_US: Tamil - zh_Hans: 泰米尔语 - - value: te - label: - en_US: Telugu - zh_Hans: 泰卢固语 - - value: th - label: - en_US: Thai - zh_Hans: 泰语 - - value: tr - label: - en_US: Turkish - zh_Hans: 土耳其语 - - value: uk - label: - en_US: Ukrainian - zh_Hans: 乌克兰语 - - value: vi - label: - en_US: Vietnamese - zh_Hans: 越南语 diff --git a/api/core/tools/provider/builtin/websearch/websearch.py b/api/core/tools/provider/builtin/websearch/websearch.py deleted file mode 100644 index 90cc0c573ac97e..00000000000000 --- a/api/core/tools/provider/builtin/websearch/websearch.py +++ /dev/null @@ -1,21 +0,0 @@ -from typing import Any - -from core.tools.errors import ToolProviderCredentialValidationError -from core.tools.provider.builtin.websearch.tools.web_search import WebSearchTool -from core.tools.provider.builtin_tool_provider import BuiltinToolProviderController - - -class WebSearchAPIProvider(BuiltinToolProviderController): - # validate when saving the api_key - def _validate_credentials(self, credentials: dict[str, Any]) -> None: - try: - WebSearchTool().fork_tool_runtime( - runtime={ - "credentials": credentials, - } - ).invoke( - user_id="", - tool_parameters={"query": "what is llm"}, - ) - except Exception as e: - raise ToolProviderCredentialValidationError(str(e)) diff --git a/api/core/tools/provider/builtin/websearch/websearch.yaml b/api/core/tools/provider/builtin/websearch/websearch.yaml deleted file mode 100644 index c4267e1022dfa1..00000000000000 --- a/api/core/tools/provider/builtin/websearch/websearch.yaml +++ /dev/null @@ -1,34 +0,0 @@ -identity: - name: websearch - author: Serply.io - label: - en_US: Serply.io - zh_Hans: Serply.io - pt_BR: Serply.io - description: - en_US: Serply.io is a robust real-time SERP API delivering structured data from a collection of search engines including Web Search, Jobs, News, and many more. - zh_Hans: Serply.io 是一个强大的实时 SERP API,可提供来自 搜索 招聘 新闻等搜索引擎集合的结构化数据。 - pt_BR: Serply.io is a robust real-time SERP API delivering structured data from a collection of search engines including Web Search, Jobs, News, and many more. - icon: icon.svg - tags: - - search - - business - - news - - productivity -credentials_for_provider: - serply_api_key: - type: secret-input - required: true - label: - en_US: Serply.io API key - zh_Hans: Serply.io API key - pt_BR: Serply.io API key - placeholder: - en_US: Please input your Serply.io API key - zh_Hans: 请输入你的 Serply.io API key - pt_BR: Please input your Serply.io API key - help: - en_US: Get your Serply.io API key from https://Serply.io/ - zh_Hans: 从 Serply.io 获取您的 Serply.io API key - pt_BR: Get your Serply.io API key from Serply.io - url: https://Serply.io/ diff --git a/api/core/tools/provider/builtin/wecom/_assets/icon.png b/api/core/tools/provider/builtin/wecom/_assets/icon.png deleted file mode 100644 index 8588c20d5781e566d7cd911836c61be1268e5510..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 262939 zcmbrmdpy)>{|8JK+W~C~A=+&_*a(|rnL#Q=j6cyg%=2Z#&pq z%WYEKBq1Rocly+ECkY9u-Qs_~l>y)UaaqP!LLyG$^l{6x5r|o0)@9#=lIqx?7_RYpaBg%J2ZU^bvezZMDpwSK|oPPW9T#LT?pZP?r z!2i?nFT!)ga6UFNvv=iDW(|sBzJ@&z|G2Sn$K$4^q~_4+SKeP-+k~`dt!7tehDvVy zsx@#O@_82hE@a$<;Z&8+IwTV`b-4ef&+C&=W-^wv{sa4^&&EBt5Vhsx!=H8&QX`-b-*4^% zzR0Ew;?~s|F?UUeaeGj@*;3ywIBYSnIDJ>oRi{QaIYHnb7r?05(M94OLbSNubMEww z*3uFiBt*BRVEYd*Ql?q;;jurQsCG=o;#z8` zlt)_(@qiJ+^_k`OCxr!oKquvp6Xxcl>a}zg$%#%nyt4OltWZ#(Frp?*V)OT_X z;ZWP>L< zvbw1G?MWzm=L$QNKl;p^yxRO0&NuzLv5n?r$sfJ4E0q{hq;{hcNdu%QN$yHpBkP}Z z{hpch|VDpw%t5-E8&`fq1G1I;&dn@1b5>e0C4`uZ-nLecKtZkC?i4W8%I~ zzNy|?UR^ePLS04bs}dH;3yz^ua}Qw2mEC@}(0JBC>p2 zt*6ePq4@U;_SfWcVbV9dQd6o~GXBP$+uXxrNnZ}x5=zVi7CgEAcvppo3oN?kJMKC2 zk)ly1TL$7snovGfe>+?O0<((k`D>H&?(bxF%9!ECZeACMutBahJg(;4%I zeYRWr58{1Lzo<(v?!BKh%!NJDh8#MRDt)W^am>W*Xni<&QZ@RGH``xeVDv0rPuEEE z`LJ%wAs2I9y!``suk?|t_F875^X_db`jA}#-0>pz94rbFKn}23n8#GasJ{J)->gj> z5e{Q(9>t6=y)30E5d5HXuiXXS0<{(KB5z4-GM^Jv?ILImc5usue%Q)h_M-)FE{xy& zN*Kx9lj~yM@+~Rfwszh$Ald{XWcOKfXlOnNwy- zp!6P71FLJF8!B}T;~Iv9T1K)hztS-It5fX9`MHl)UCPO6%c`GV=AXZj|1Kpsoy(qq z!OJzwSplfE-rQ>1hC`u3y=SSl- z(yHkqERDtyqz3Q8fQ>vqf6<2~`HgftD)5@AGx9fk70SIp6@SiKtK0dlHLV63(u@)8 zPyRwoxrTSgNsmXCjqmTVoaLB;Av0`YUsv?&0dK|L`$Fd{ zFCuDw>2Ppf)38}s2osd>4&=a2*dcXk#lLpT(AEPm#nO9gVJd4zchg2@hP|~gF8nH% zTb&qy8MlZcGu+d1_KHS}5(8}L&&Ec;!7x(?7yE(UVJJ3P!!TEVcit7T!Lh&DEB$3~ zQhvX=bpVuxlkI%DWMdaS?c0YZ#=poCrp2>J*{Z{cmtM>&{l0Ul^d$pV(^O3CVk)6Y z1ARVJsWa@8_&uCC>@jV^5se;g9mn)ekC-z{SItINwV`pIMLKdd zs~ID|xEpy2J{F#Uv2n$QIF7C?`n5A*FRsKGHV~RHh?tLC(j%+ZM}8Vm#j$=OJAR~y zEhu&&sQG?e!f*v2VDD2zhl7k@8vi7U3go}0BIV~O6;j`-PbjrVtMN4^`I#JMIUMasjqVu}(WNf?Bng)E~%BpEZLK#>?J%zk` zIV>B|!3z9TZ*d`zWdK#UMaV(~U~?+ft%>I1s42j8j0b+Fh!#G;XwIRmK?nGvatftm z#C#a|R}V)N)`N@^6c{d28i2SlT$k)tdlmcn#8WnQBCY0)2TeiNtjmP+%U45vu_dL= zPN5i^es3sREHGbvzzVx$h0KqjQjz~0#D>gBsdHz=2`eb0Rhpjz9_jT-%@mzrc1wc& zz0^)LUEo4ko!x~DhWLuT4x-hVBfne1em)Ck`yZr}mbdj?u>bQ_YRRPZgq$bwm{rXn zL*U1Mcn_p)w)>@mJ-C&bH#p!PZPsDyp4B4WQ~_%}X+n3C^1o)$t;kuN9|%~3Imlmd z1k#s(`4`azP5m+l8i%Ay{%$q3Q*Jxbvg|hD`g-CPL{e9PBXw3lR3wS{#sguj`YeRF z2J#vE%P)UELj{qw1On-WKH`$*2aB|}sfSZ%*f=T7%l%MpOxaRbcoOd?Ri|F{kGh+K0pq+K4u5EKe!b_Tfi4ql z)oO#prkcrad9>@=Chv|tq)C+B6Qest7*ne=FZZx)*_>zk&Pr}H_sE)h zq7F3Wo_=WGXr%jDa%Iy8tj6@}H>kDOZ}H7bSK7{!R!3dh!U&(wk@0mulj>qgS~RP^ z`3DN>7I$zcGq(&ZSMk`QT=>RQRfdE^4SJCR{6@z+9%Fg2hRG>i_|+Y>VL6BR!nnZ8 zv>wL_@|wRKYVHN?M*0TB+EQ8SSx8~mN!`iXF8uRZj6*D(;DJS;ptM+d+VB^xTIFN0 z-g`Z3UEFF|2DZmx=BkXCo@cWpZxy9obSnF@>dLZH@? zoh!&HzpMFbEXm&@PaMcpS5P58=e^fCRWT2jkY;!$BEm2auKRUkI-(>19h!~^E z^gC9P6@9g`_rO{rs4~NtAvfJhZuv9_?PQp$T6dS58cwKFc=BV}=lw=bRl$At=(eVT zJT1NV-s&;f_WnPLjv$B~cPU^;>=l=)ihn)r^HI=R0SvgJ4{BT4a$1K;r(2jre4(U4 zgu=JmZ#@6y&OG%STHx~%>G;BtiX9!C(FEzi4F?7u=9A0HL+(i&Ora>+ZlNlq+#Kuj z&KoW+?MDVYP8JGMb(LI`FLot{AnZzxr{!>puWy6E3Bu8(`4!}m@QddK+#Q;7sIU)t zYw9H!YUhZ?as#F@DsT=Fq@HC@9!+z9_kE%)KIUyHL2vpq#DmkHO;O?vWT#l6;v8L3f=N&#Q{Q zm-OhJbKBmG_m0DA$mBmY@VSS+Z(rKIH+oj{#;5n;NlJKP=|Wt&@REqM*Q0f@8q{ll+1tLh63BeDlfYZD zmpqt4_`E^Gcp##skyrk9o6SPmOp5Nw^*k=ST?aAxGQOT{AG}^$DEPO3&H0G`6&!Jf zSAG8LQLsE4Jp+!$d2UgaNWZ<~$BW+@Wd0~1Q1{)!{Kh$GEs}U4q-jssJ&C825+l7^ zNO+KDsnz1R<&QXlxcPHkF$snHSfi+p;1t4RbA1=~hr&gfv4c1lT^1;)HG+1DTdQ`U z2XjPKtnzK06F}w1F<{H$dR6tm0ksa$_O9&KT?O;1ErTbYA2?8VdUww5xVBHHtQ@tq zAlh3D9teki=|nWeMqdzYr#Y`R-;`KTOtLQ2SbG)a2L-G8#lVt^V*Y+r&{A&_Y=vnf zImmX+waLQ%0PZaXQ4ue4;g0g6b{)jMrrP04+B1|PAcXM)@sk=2afT39fo{d6pK@2~Od3le- zE@z6X?eZKnZxlP$(!Z0-KodWUK^+qRHrM7etTzb_xh&DWQ-=A?Z5fLVvz^(O_uV}G zC`p1bX&9sYqt6o=#?O`}rspA*&6^-!^i^!nbQtX;MXJf&47d+lnt1>tl|(k``wjEX ze$-#w%#uS66w*CVipdAN@y5^+BPap!6VAj8!}(PX-Jx!XbLm;UoubH?Y<%IGTNt*Mb48a#{Qf`6N1=RLGj-)DV! zSHg3FMS7qVN-|!_H1m!sUwc~Gmk!%KYUkz zBwJ1G_ZU;n7m;IM0_FlQ&61V=u+r~;a5S8eMcUS(s9A?@tc!fdG(i~SvtI2tiCUa} zvvm1^|A8f&Eyh;4luh5LUUatyPp1eczXY}9B|KmZ z!_1?NV|=-)u4ce!<)aG0v7g|ib57>ZMOzq?l`|EE7;6a5sP!CYGBQ`KPjL{V1*(BG zpV-3E8d(o(^COh)08i+_*4V(9dwSIf!!g~{&RieSu@vIJwX_#o4hWi8Kq|A&^ zWFZV?S8fQB?x)D!d^(Bp^)_SwCa8(}b&Ef3VKe_BajVpkr^qbMsAx;nJ^!-&i;AB{ zqlj7bG~OgSxsiAYXaG_pVP!Ml?>~qD_RC%Y-L@uj2Gf+I5;Y-lSL(#Xb^V=3Ez?4Z z7Vzo6{1BMF+u{sd@tr=*J0lxmS+=&$^3;aMtKN~+{B}b`J^wymO$v1DtarnVC~nu6=~{ua z?aWXpW(@a4#@Nt|^O}9dw}hKkH;HOW*MFMU%Bk_ z&*Rj*Q|`SsD^DAlcSB2E#O`gFO6QHnq)qG75C6ZY*Vs|qA8ns_Qf}!}RU2LH07?9w zX1`_a!i&J`3dx}50-bveq8FKJ=kalV`g|;EEw3q?q&@&p+AaFTjGA?@0-VFF6gc8C zgg*Gvlx<#+G= zA9uVmiR;Z3Qrep(VP*|yMP>~wMWzB@E+dRS;?0}k5(QKohy0#6$!}C;M&$0Fi%qGX zmQm&3N2Px%UxhT(gp(%AHB~?#b|(vf@-UdFJSN!={!u-c|4>AzTkku~Wer?ICA#_vsOq|u z-;C`KaxfikW>1)HeOo%?H||vRGp^!dd0jK&wxGxBSv>0uA77SPLIUs16!fc%Fq?(( zp#KGsRpsz-ONrY>=(!#z6q>k=3pDHpwo2}Lh6CsG^>qzGB<>pv?j@khh=o9A!?;C| z{f-ocs?Y={lSI4W;k>@OSJ(wu#4Ct*|%GcBf)Ih9tFraH;O8guQz^ADC#hax9-T**gmXiy4@#ii% zY+_3g#n47(A%6MQx;VjBpOh3eUk`#2j`OzT5q)JH-d9o8bh{Y)Q=U~%jLVCk7r9Obc%A zT{|cbAOc_7@RC`>lrx)9rX=uRxd=)jOylxd07}r0qAU>P)zN%dzv&HcbL2V${Yg}_-0?=eLCGM`nLnx5ecF~cZhyWwc>a;|=*7%=P z)z}ht3qIvv|2}Q~?;#cnE50e?n?*9J#d~p_9;+SB#DCJe}OJ?~Mg|)zAnb z+m|Efwr*|2y0ssrSIXFHGp=gJ117*+bFsOkJ=QO|Bj$hbZ#VnPqn86`D0#6dDGb^N zAC9hdWj={nxEFUMR=1HG4SIinetAsj?$!{A?tvmapQ10nn_t@6lC0+YIuyw3Cx9TY zhBk2g%rxMi%K4wKMK_e1ljRKTgP=5ExFD7H^1Dw*7AcEZ@0y$r52(BFFdKDIRnjXOV z6Sw!y(5v0Ub6d94dI|;gJT7LiPu@a-;rSsVuZR5;2fu2F8}NG2IBo@V*n6CbFDG2VZ_~|G_X<*RNbNDi1A-Nw1walALI%oTJni3<@4T z7+`go6?6&YHH{7h?$eql-R89yKxErEPVQ|USerw*-@x126?%tt-8R48PgUkBx*r^0 z3KZ#SQWZk#EZg*7){0y+3)Jb4HFbsf^n15 zGNU!I$rfdExJue#GkKRmeXI&58?q=*p za}8ZS>)W{8JJYpA{}?PfasbQPVtOMoRM|??WOiUqhuANGLeSk@;S+8SgsyDm}Q1 zhYF2w`CaKnClwk85TD~zmUnz3+H^*{%^+7Zi3KY!2bTERB?E}#L2SRC3yOay307G0 zsoWRJ^YCk%_#OFr09!6LrZkbQ?@C!MnOfKm|A?iJTYwQF;w=cBK7S56@DI7k6X zx_mIH4^m37NXP8WeB~;yeLS!wJ1S68t0JohxkW`e3AEGsjJA@J7}eY?RR}D&(?V2x z`FDyN<>7(}f?czmB*~|zR>Nz zv*B7>1wIXra9dn0YG9>D&ij@e7_8@&Ybxjnm`mF*WY$ zenq1FA!=jo`)d^s^fs&VeQx*RPr|~SFq@QQ1UAX7=XA-B)I9jgQZDsz#8n*~!SF~6 z^?es+xTExqMR{?^Lol#Tz;K_&d$-W{lOKInE-}RQN(oPh@s4%r)q-K{^!r`N>hblA z<`;m^&*b5|l(0Em-U{gO7tZc4!Ou&)c!5dzo3v+fG}qKjp{p8MqRErBYpBWxF_3WHy|;_lHYT=#YpH#?Dm zk**=8=V^RJ>xvX_{?} z6Wfc~E8~5gR%PCIFY%vE=4+^G<|jn*n`=brXieEW$FGUqTQE7{qi(U7%V+|ek({qy z1&-{060bo8M{1NWHbS1&$9{ULARlo%Ud79h3qwyAhGQGS_%0c6IugKVOCl+e?KzIE z30RwY4JZ%4@1~ue(K3rGYs-tolHIN*1gv^Lx8bq{)t>5ArR(`j43VVg zF8wDv?o~;vkTZvlw!+185KxH4fX{I&W(Qg?7|Ljj4gCOWvGm=v-)6LynEF>IF)0`@ z8;K=tbt%e%pUG)csT~qJ!lI|2tGES;z|6-G(`f7vc+1du;4D`B+ZNyL#g8yrVJ-NN{-rr1D7QEF=6PTP=a|`E{3hCs8|xGz5>9o2J&6~@v^^wBm)$l3yp>d zGsdH~37HdT!$+5?%ZE$da3kFLo0Xka`J_^P4&QSJuY@keB85p?ee7*FEkqWCxN)+6 zQ()ZtpsPnxPU`k2=Lerx$T?UUrm_&&HiGTj$;GeOY7^=h0V@~9%np1%PQQe{3&d!0 z)I3RHw@UJcp?J~e$ulbZGB=44mKHzO_0XHg^(`wd3JgLd7+y8m^*#whOY;mL9ep{q zJtSTyw=N@}dBr z0{TD)Rv}7c(Tz!*iP@Dj=*=H{s(~T2W#m`qrVtvC4FD1IxFqtf+f^#=v_y|Ti1^tu zKg)j8Y%=$wiPpXik`I*(6uat-BK0pnI*E4f*k_ThaKXd2pgSPj43Tw>`5u{GE&a|= zdkD;kJ{ToM*FX571mFD2o>J8ZrZwIB>vD`Vd;0O!YD`q}Rp{U76j4EkaF1_R759}G z<0ATso2rE68ij3c$vsuLBFxkA_zU71>{+htv$|4xLJEa5iq&Jc+$7AXqb9<9Lm(qi zr@nJj^`*$ma{1&i)wFF~Wq}WW_#o7q-Q65Ql$x?cIhmxUCA%`~`R-v@v|vvjzArX; zBxcv}!$YEfz<7*XMjLi6(QvIF|4|%4-fo|EMCh;F;c65K?sd0e0TZe$b>{(LQ^iNvBF_WM726 z!b!WL7}l*Fg*{DX2PRc-epNn6|0gQHONcADd0h zmV3Ko%>yHXSldk@F_*X}-`vF>+SBSn>_C`bP{4Fh$7V3My2zB}oa$ndh56Ga-~LED zK!)M?q+Xm-4Bk4K6b!v$?r4BB<$kAW-V}z)o9_^$E;anBDswIB zPSd9gT~UFRt#3b}-C>-XV`)lB3d{)KPc4%}D;;?ny(C$i0LGZEI5j}M9jV~)?1QNO zy~MHax8Ll(y#uj7aPV|_?A{W#q40p@=@X7uKInf%DK<4YY*9jrrmG%GvflAk8%K_F zj-EV2sq9fSHRP3v3LD<*90aVO7W%UoBWP5@nHlHJX?sb_W#$)xH`SQ-t|Y-LK8B&2 zmb>hJb$-W;_dq$zu_PJHFl=&et!pCvD6~el_dMg6cE1xujEbxse6sJchJ2+MtZ;)8 zPo^|uwDci_59(br}djP=lP4qw;Ag0eNY{VL|CcPDe{yS&0x4oM+Vb z#b#t*Cpj0Na+tm!%(P`HVjvBjKB%F_tx5G~VE|_uTiaA~yE?I>LYbenClf|+MAaZ{ zfJ=^o^Q2+wFm3-+`Jfo`>U|>tx1Hn~CeiL`<{#><$Y6y1%t4->iL?W#k3rlz=5j;M zIZDD2qENTgLdtn)^c?Vw2!}|2*kwV3Vh>bfO8!)~Pi{MfS=VWzDZq2>+6@yUV;yY; z#Br0LnB^UguPtot!ps^v6M3(dg9sHe`~u?vC+9W|_`7=j8ThTP^A%z$%v`KAqm$m@ zMN3-l`K|}gDARY<-CN40?E(V{_^10E#S4i>Z2+*z6$(6=2LaLIk;gYVd2ZJh!X6|G zU7aupXNbaHxdJh%X1`1g027|fl-lBq!flfBg`X~~EQk}J#Ml9mMBc{4datrfX4gX> zWN+Bk5?{lVDW2L==k=xZwZ*->(d;Wao$s`1!wg#1V%uG~LgKk~wC4YcHiW_hy>U(nON8KFr8+DfTESX{qcp&MG8CxjLIJQ}nnWblI2WAd?p=wz*H{7tv zbyr0*uh9z#g&rzEZ+!dqbF(V4hKj(Jn$S>V4oY#aKu|>F_PQNCE&X-W{kpnG(6_!i!c@dCqdXich4e9s1(?!Kr}?TbCo4E<8a97uf_CP&-g5 ze!5d53!)Qf{D|87BbW3_jKcZJf}W?r1tq_thC%Lu6<%ZhAS!j7>vbNJQ*R3AZ`4E! zoZt(fUb{mV_zb=VU~`@^7!j39eVY&`McE+cr(n48nKBMY(~)l))iR7S0NB-UN*mCJ zFO(HV<`$R4fJv4PuUuC_2SenBj|3Owwzm0x*v^V{zEqIhz4+3jt!hOE?r#LWu(@tY z1vMuG@Yj2y#yUVWm;+nNuKc@4?7i~EtvpbnMBQS3gxUFryF!DJ#m2yEV{$fXudI?Z z*m?f%Rdg^a1k;{DCv)&1eaXge5R9|N0BlLkUObj+_ZDdozKP$WW$bX2t)lu??0}oR zWJj-1X+mfrP=xof-21t&OmNV2I!*L zVBaHgdKTE%#k+|D3tOo&g8F~qb5J4%+yeIHFCLR;ZP3)DiKORco^r6)GMC)awvXvz zq#(1r|7<+^-y6Tn0+W~hkF;9~D1u{QQodG^zN1@B*=McDzJnbRedj?BH1ir#RU89I zZ(P?Xsjep)YEX21S{i3{1Lm#8pspba^Kc;MJVUO*2%7AZ{8Y)Cr%xIa;L()9CdZ()Zk0?0Nbb;QfW_m!DgkZ20y?kC^AebaW8x+Jh&voiaEN%wuCF zKfi89Cs`-&S<2%}!aXD)9%#UriALMU8{dsMZaH7ZAAF~Uhv5*t0X{zL=U7z1sGc!Y ziuJj?J@J8qT-HaoHFQicj8=euW%D8PbQn%WcKFY+Vaa{_dojcqE4 zF@wz#xqoDTc(k(D+8(w-ZvEaFIZ;biuFSe2dSN{$DuZ&RV^-zO<-(1IcA>L9ltN2>Czjx$)mo&{r9CL=O1EM7K3;(1EHA5 zqo@qd_gW@1-s;esMusS9hs(R#c5J-h+b=2SVGVGgL*Gg>=6xWAn~~_4SYMA zZfTl+&ukXJNLJKQuK`ge*ILkYn|B@*o@-a4530zv>A_-IbgR^H3_#kNh2^m&P5CV6 z6_l8{*)X**i%#@fGb@#QkZ^;UyYs5(Q>o2!^}5|&5mf1i z111`8IrW|zXwUD46qMhq8w6FjiJjV{B|w=~B?(NGeS)wH(IYE+m65&k*f}`^Jbobc z)9SS&-uZQ!dj{Z=l})oT-MY){@smHK-{Y_VhR`sRk&fM>^FFchbb zxaKr%^VA}$=&h4m`kX6ODwON9_mL*SxW!nub)cbFBu?cNiau^E)}}oFSY?U=I*vAr zs|%~a9i%4BUDO9en`hr-hl6#h^DJgeSKMM&0s?RCS*Vyqo5tzVF1;NBYSGozW_{T^ zIo?!208ndLl+zCW@kqFUkSuk|1DuRhh3+nQH5}*Rg0~xlP@Z^F{<|7_(5e13!N zOjleaT1>HU`2+V=zb*qqr4oTm@oMPQ0(D8wEf&+Jh5Y+S(?t?uA-Ek4S2 z90q)g-e($cLFNVXxh$31G2uPVq^jFbIDc?}S_{0DhC82Rmy(LKpP8e@Z+iW75!|A< z?c46%$0}Tun>C5+9L*4xYu-(2GJ*oEpsN;CN19)q2X07c(2~xdQDl1Os3r|z5*z`< zhw;Z=x?t=8$b7&}A8@}3T`#9S#+ZuH#Z7;AQlsD??O&awZ;L~&8dw5rRuc=&gMTV2M1v*(ub0~1TCddC*FYyd>KFn%>JEQUIQ!yfD9~& z$~Wda#GZa>sl(|qXheEH2NaBo!~oB7Yz07={$|Ca+11|Phn<;d?wI&WNRf1sVXkbn zRORZ9O4s7%4^p=nV)onrsR-MUdMtYIt$MYwiXdwb9#3Mo7(3iptHuka^&d1T`@YC* zjAcEKkwG0olLGwxtv$yD0~J|Z^AP%Or!=5`P2HS5yIsd-$nbPD#q;sU*rH1ra3j58dcs{^jDpkWe4G zSDa7crd=OkxF)VAn2vy_g+l5>X?zK=?%tP+-&MgjTxb*)wV6ijPNe_5C_WGjp^0^4 zQ5q2)k6X=d)*14=)T@47JB!rtSJ>T%kzid=cpPK6j4gb-Mx9+fzgfFPIRR6*WCqAT zWv;(*BE^*q930-lYmD8=r;GJE52+R@lF*BU{Xd*WKi{gdEnss|${nHnhMF!M7Vl@d z9gz^RQ2A}ej$x~07noQ_bV=RVvfkR>8;SaW4BRMH$H;PQa|oj$yrtgdbX)1$e&C88lacL96=g7F**)xEfnNm_VZ|P-lJW z0G*kNO{WS`XY?U4#;pr6)NQ!_HXo6J4|;j3g;%z^qkkjME6@+hd;J;%u3}3d zDt3d+!BT=~yipZrK?k}{1ZO@1v#|>bJ;@Yja}QT%F_CLUI&et5k+LtfWH+5I8sJ|&`CV6R`*_XC0XWK=#)$HVx}!$c-ACj?T%EsnhSRPw_;()VpdovZu1Y z^&neZ)@ctrvzUL<(Bv;nL9Lqbns7yx7a7EEP%%A2y?ph0f}$9ZmdVF<=STto_E zv~B^v{0lMbjqXBwBoAJ#DN1ldXjkd8f8~ar_4?zqQZHRBSiV!nTTHAjIJC%&4L zM4N__$-UCiox-lv3%_Dj!jt7&^e)?*_?YlUg4=z0tlI8hon~ZXbijQe#a;GQ$J0#{ zUDI%ULtm$+a}-kXNjr{Y^5hR*&VW^Kuke^7>}m+aa?*uYQQu!hwCpSCu&zc`gDYm| zhq0tD58ZsAt~)dZk#>imLdH{_08!V1if*F|v8{K;v8UMDE~e!-VRbnTK-B2M`s5dJHP0iIA_-q3ui>V27}2K$N=ux|Y>(YecVmx`858ufLY z6+9<@P&Oo)W(^$10m^ct4b$0OAar)m(%`h~?sGePu!P+>G~)Cp$tL!|t@|tIU!CoE z#95*O7H!7*y`0!h^doPdzmv1G*?*Dy3NOstQ_403LzWCNkBHG+8G8f%L@D@*V@JmM zU9E*vdA0{`!-CF(!#UtWEaG5_PD$1fO^jV6KKdX+qvUrFP$wY5#*Ig+Fv6!s8A7^r zslayTVIWN)MmR{76FFh(7PSTbX9Q*oz3d96bM?>?^?mVmJL*AVd__Kknw9t`gK122 zQ*gn+nfv~|Gm&3q+Rlg_z{fXeSe&I;oyPk2`fLOk5g6AzTyT0SeA<6Oba3D&)3^x@ zKXVs=(D2Ijt@mnM5yC`~fb?){rsRqMTm%voRzQr98snJheY|cgL|bN0X8CE z5Bwol(i#>&nd8yXH(J46zcuCBZaDCwSO~wN;Wn+dpaS7HbC%9l8bA#NAHC8SvWvkD zQq&t`hrK>M6-XN~B9>!*aaevgAWfX11mqCZwbm&`V)lK5jr{Jb8v5&st)mQ8r@2CK z2!7f+Y&*jNmaSNw|Gau0DJ&8~IDK zI<=(#sMnr9*h(}TfUo`C@ZGGiaez62BU5V~U|h`*6Bhm+Y-zZm?AHDh>r|}LR&a@@ zZeKN*5+Jrlx1>@Gohe)dX$vlTdA>802{DS#x_hk*7?bbPK7Iz2&c zqmgo2tf7EiI4$=S*pMNBubhaYbnKhax2`sJAON5Ya5O}BI?z%N4uhw{AiJ%J9MI7I zek3kJ@DIES@M$ny%N`AjCDco;;YeT5>2~73Dn_K^{(Q-wD@pNY`k0&w7*Yx!6qkSq z0AY1y95lF6;`>qP$7fBjY>5B|5|ck#&D1Xv=`n-j{Myr)-oYpFAL_oM8==(7m?NLp zc|jm};F8cej6_{&^QVDr>%z!63-DOrjimfVtJJWM;+sSJ%C1V0+|C?}?jLm8DwqaI zVT10Lv0&2N)`ePlM0DyFF|vjH0pZPu8O2oqcm{QW(djf(RpS8&Jz7e~Fxfg4Lj%ah zIz5MfueeD=oGbo02mivOs!kIG>sI@jPq~2{36qe_j!605;mk2n-@4Sv1Khfe^C2mM z8k|wE{G}gE;^t$Q4(J2N3gk{}S)V5~A{#!N1>V&_8vs`OI*z51rVi-r{g8D|61e;; zzK|q!YRoMi!36{Y-@)OSmvsOZ=SaLbMpN0)^&^$7t|RS;)JT1#fleciWssJpn0V|RJndoWHwgtEXcPQYz!aSHB zN6pJ@dIKnG8gPabYivDjU%OjSq+af;ZrcWD0>HDNt5sF@PQuOQ7uJ8}Pf#e>{me-F zzcYBA#2_FQjDyFMuLT6ZdD@u$4z>J5?ZES5p=<4BQ!g`&Dk)UBFmuA==+UD| z*6;h{ORpc&q=8CKeR1h>--iz$ytSb&v8FbH%m*+tpQwAHO%s{~)FqFz0Qx-xGc3Qz zC6kAQI_|pGexeASPvy;@wk82O`9M1v+ljtoL{i|;D6L3Gzy zLXz;^(vtjGt81FRl=zMvHDvMSDFcn^T5{sM4n5aoz)um|Ny+9_N)1@1GrwyFIopEq zP>+Z0<9?W95N+g{RwS_4#ly zFK4QwV*OGS7|#^~YmiSg3|fa`v{DFRkf{aGhX+H+!auCIB@z7Qs#Wd2{ocnkB6kli zvk!o)3Zk;c7*pa!LR((`kg8@)yk<5&MiuE7n@Pg2u=VY)=Wac{v&*deR{X{R))lIr z(bX694O-X^yJ3?ZstpcX1$f?pLcneon3~M}EX=2n0aVGCk1&5O(b?Q94u{-KM+` z-}QSY7M|Hv`}WAlDW1G*AWotE$Fx0` zj%}c}c?15lp~gk$cPMRuF_4~*oZ%n~OWqBB(Rw07!m^V2zxh=kOS>7O)u&#Iz})(d zRjtX+7#b4)+C>$2VRMD4Y-{NC6lGorszE79u9VwfdEwcr)2f)HFfKe#%wMs6)kZ1; z9XJ8xP8!-~#@9^WlgN?vBz}sdf*Td_LC2tl<$Q{xnkMNUNe07Ec#f%V8|MLd$4}(H zQf~n5zkNh43|zz7=xC$(5W@j|qmdR=FfFP7!e8m~8D~CzLNz05O1crmDG9#Ly#s@p*#u#a1nUyHz+rUc$EC}1RShw~suEs^J>bh)0FE`OIM4~p z>5&2~oI?}YZKjvydY=`T5XOwZEx{Pjv?S@a3i>Zv1k5}P`*7)W;q=X2R-%JCTwN9t zaH&|;IF42YVL@ymMgUeup7R_=q81^LszofDaX@SR93)zPM8??Cm2t#BN5>nkA@(1j z4aalJ%|X^bT%u;lc7Ds;u$UvPTV*{r)cbhtU5o$Q-5H$0v-oe^bRnJJ9;`-cXDt}Z z*2Lc^bK_J;`uk3QUZ>OyJD}^4lOxhOyQSC^r>w_k5<6~ z1&gUf;JizB-A;ta31s2fXKnxAvZYUUt4!s_mxUW&S$3Gb$um2B&~B^FH1Z#-$wpZZ zUF^HM-9w6h=s^EwUSYRuwKcf8*~=N`xMnja zbT&UO=Hyt{<8tY(!>JH^_DsY zV^kNj=4i5i3a!Wimxe1q#s<9*c9`_5g;686yJ_P%;X9V6BIds1nk&WHL8IQL`EzA4 zHEo$f;?_lA(Ve{ZjrLda^v0(oztHUL@7x}=kjP0?{{Z5!lIN03K2^uUvu1_7$ z&A;_Ye9TQH)}?5&f72|$5WN}a&(xpOrRhGtlrwxc#`2IOSJ_4cSf>8``$I>oxD-;? zO1ONA?qeT(=x7clJ9AE<&(9C)g^9g2vnx{=G$ur0r^@DV9A|NS|VYr%&OraR1 z(|@_o8;vCUKlurA)P;L1V532#9J`zAG!irD=9XnDz9#0Xhzl@N^0lyKavxB}F+@&c z$PrkSPS{-7b$v=#A|v7fN0I~36MeA$89%$QWZc&DYug6WglUp;)B$0dV2j&T(YIu{ zPHwjPXpT5xbVDWRkE&>}U28XyUKaX-)d?0xL--GAQi4}{|g zYh}$`*IYB_oO4EVt(Y-KJnFC;pw2Orqtb8ub#Dy@kK83cU0OK9p9kYP7iRE7%??ZONz)f{md#OPL^Zrl3e82Fh)3c|0 zKM+r{l&cS=R(ONbWboGNYm8$6DhoOEliK_c0N=GKFZLwoI&f>+#i8 z^kv+>9>l6<_q`Kr@@-R40cCitFLyO}JGtV;il4XcG5z?$OeFu@OTimJ&(d}jFZhPF zUj7M}z)A{0ZwE+AD{v#TanNisT=Fe!qYTIWaRy|zYj!%E9C}JV$|V;Z+wD=)Nu*>l za_E^QaO=K9S$tx0)gKM{+Wh;Kp44u}2n=u<~c6 zcHpw_oFYX0u-Ktv%@_+u@}qx=lAt{PQh+gZP95Hl@}oO$zZceu?ezD6nw2&OVSmas zI(!7w8vphDKwE7FxO{S^v2ozifWWI>FSbwaRLa`_=C~Po%-#HjCgr2f=V4jJZ01t%$1myAqw`1i2m}s{*zptP~9fn?=@&xEW|57DM zJRK$jrm_$ZEtP?atZrKd(CYvMJ^uBm@bk}yPZ>$@)we)_4p=npYrbQTCT33&Xa@a* zJe|Ik^~cDZCZG~x26Xxq0lv@JUOq6u(*a!Ubin6eUnxN#1Icy@sZ0Sh9kX#@4GaSMI{@(qXoqO(q5)b8z?wJrJDt)4RDS-o=+w_t z*b_{l$u&oD|fKP%KLx1~8Ym5H?4$+1`Kaf&DZ8GZ%QbGdA((#1AQ!6~irLBm^}a*77N8yRojO z@W?8}NMUi-f9I2b8*IkYQ4c`4O!28;_Abd8*ehxE!;w1Nt7k|6wg7a5uCc2h+!&~! zj2OoO!vG~}r$eKFxKKS(c;QU?S?Ql)TjJ6khdVgl@vVpS9_f%kxU37njd9uXw8Bnk zsh=%CE{zlz#c1^k#mByPpD=NqX-paIv0iA+S%%%hX7^}ipcP`cVqmt9)+} zK$nTvkQ}#t)GBJ(X-;?}%ik&(c@g#Lr~t(^Y^3ktx9jn7k-N0=Wl&a9oADQ|K46Tg z;dJIv9&V1GI3xT6QDnq2#tJV#vZwHjE~wd%L$pwAv%YP7qv<&}v*1ueAkR1aUK72S z`77TJ822;$64^Pk_w+8r*$KQz=EevObF&&GdRh(nhJ~8k^04AktD7ePN`oo%!B`bN zL69TO2=Xj3cp4Qr837+Bj#8(0eGcyYXu;Oji9hQ%-vEf%%)?__J5_v~U$qN>Zc;_Z zVT|ekihPMR`J1D+R4tn#OwEScR!aDKDinBw?Fc1S9_3)-mhqN7)$@57<7rcejm6VRr-isw)t~tIq>Yc@F`0f;|+Rt zGw7@S?Bl9_pi*mHZFkC8gW=g%VYUx-_Ta@n_|pMhF?emY9T~o~6JCqFjptLOF?jP^ z@e<3~;M?Y*m}b_*JQRSa$k^)#9U04XIQY$nTRZhdSPFm)vdU>Q6n+4eghgns2OwS3 zTi*7W@!8*t_W6xZN0r)gz^AzlhWUg zfgZi>yrFr1QrY4t?-6!^h@)SFT(iPN=^k}yXP6#lb|KpOAr|)-crT2X8SCt#~SFu~5&R}sjqcG=1Q#Hm}2DNxg2}sqBlkfI~ zi!5JFU?*6ZkVY-}Idt`+0Io!xyWes$W!YcdyxzmxM%5hU;TKJ-4#KVOd;M)}6v~{; zcuAhx!J4WWL_9kK!KRg8O2jaX;R~_9_S?$>4@tp$Ri&0SB{SiNGs9_e2UDK@@n@?1 zj~C6+Kl4|YzWpi9?y34r@lwAk50@!%2HaNIY4QW-`4>J8mwW<_SO(wgqZsbIJTn52 z?j9rgV+;%J)~o@6gQ~;lQn|oz5+DKd5M!qU*3yS$$G8uu0@)eAL}4@TXWE0tg67LI z7xmxV)+KjE0vWTW2Y`cFr;nrQ74uSwhJ;s|0JY-@p!u^Mbd+7rDIkDJ1?ujL3VTbz z1lfFx%BC;M896s0W^d027Z)PxZ(UIf5~)t5edF>V6}=L;0eR(r*!aVjyW!y8KFN!y z1Z@h2fch=#c=|1-S?nZE?D93Kl%PVw(t45M8;-`jma^==H%Pr3?WzmEgG2G3v!?xA z-23{jsm8>EGc%8tNk8JivrK@g%-sUv!>B;NYvw37S+&DNiI%z>cKNuHh_!^{eAlq8 zkwqGGMT{cJ_cBf#rADA{UEt2j@I5hLLaVb92R8fz+%Gd&<9BOn9iCmOIc|7R=L$dWA|qb}5M zsU%b_o$*wVzr?ahu*Bj|&+%AzrS)1={=NsjKik^YVIX6XBJCgV}W7Oi|Ut z>tpoB)nuE3Pqw{el+*($xhu~OIt}m<2LXg9L<0NtSv;vYW9t(VA>D|zLvc}lL62G+ zNZ=D9dn@u}IP1*G24-pjef$R#d9r((tmJ1g zv7?T4eAgALTjG(VC&WLUXnq8_cL!^0bk;I?{;0((n)r)nvOuzAco+xMFYphD-sw@? z_t+i@$F<(@Sq&M<7TD4Z_-jT&OA-xwX0Wz+jqUZ1Q|;7?9VrDA7?{oFX;P{F-qgbO z{$9iH*3P{cA2yNU9qhiR|LpEuPU{bRfao!HeK^7iGXVE}v+Q@qD_ zv#B&`J^u;Dwu`o7FM#&xF(vm2c*$}tcVv+}Petsf&Mr{W<)p)skG2fcU=#@z08EtG z!{%0t6!`wgK4(r_7+F1(K_+ydwM zVDOnUrYG}WOov*qu&sXfXDmqabbRCqHVdIlIy_l3!opBHO<0&3aR)oAM$pC1!WWZT z(d56S5y*{2*KmBm;xP6p=+J|>ua(6#`+A$ObzK6*gIdW3OA#X$ESW5QNITfa65_&# zQxFTnCnyuU+HMp*k$8@sJQGrioWyNzPjph;njMA7q&D8xLeODLW~u6&r`pLxC+Y_k zXyMQpLi$5r3jfRAN_HhaCmw-F0lsskh|7(e$54J0`8tSYmN4hv>VsM_ZUr4TTQVW% zvwtt8}f3^pT+1*?VPX@NF!v|#k>d=J`=eGX7La_T~m1CmAmR>nDi zc`wA_#bl_PnL(+83o*uB!wxM8aI5;%HF`TX?~nEQGrsFFKKSW#s|4Fwy8r8k@15e! z02R9l9}9&gD#L~vK)WpAz-%x7I4Z=^s=P|LwF8)_wlz_YzsSxvzQgLYahzn<^brX( zLlR)g05JWZZNA#7>kI*E5!=%jxBkz#{@K;4=}94@RePNILtuPxq+;G?mRD2ilcZyZ!dR|)jPT$5|EW{-cVA$FOd4D|g| zSfi)I0;>|4tmv)sN-Q3QcOu3>4_CGT1`aq34_{Yo1qBj~TYb`ryx7T^?Q7UH#2jyn z;|x(3OHo8AHj)@2d=`hx@dmDcp{Ti0b{J#07NpL1_$0cPU2j|*ymrPSz{9^;7c{v*@iAvVXgXKwyrF(D)`LaWIEN>p1O2HjTa!zb z5t4%bL=UC?xpwuL0BbxUTNMLq{M+`G&^4;wr`ovNwzu`GW`+%%S%N-!uZvxqK}a?( zJLk6)g2>zRRy(Zmh)V$rQL|@C{r~1**FYpUqAmU?fLPrLsUs3cT}jhh#YR)5C(Y~; z$c06%7N3(Xd^nzbnBUrpc2^+%#al8%ciN}7vI_#CNPln*u5~6p^CBL$8Mc*JIJ`57 z39o#L(X0Bi(6V32U6UVrneCEV*gBcQSc@PpPlsMQYHpaX!Z)6BL#==+H2-GSkBI|t z6u2ng{?$j~%BkLcY!qMmzz!3gSRZ<<{)ay!nT~ zhTj$HvHSJUmWP$Yt5VCnN7>^&rH-k-P;43BO%8h#IWu&POCo!WUBirc!I8Sd_4Qsm z_kN?AxT~|8E=X@$u=2XPfDtK2M;ka^#yBHx&@3y~E{hEt2_fnEww6va%^qZOP;i!K zv1p2fVoTO>R17vAN^IUbQluCVJJ8#XSoxDl*9c?EUc6ppG;H=zx)s-2Xn?>-M3}Aj?-BtAuCLMrkSY>>RVl?!{l-j> zdyy7T-fsjp(s{}>?h=K;P=t3gnH?6H%FWvvC;<*T7FOrpMD9NFom(`*CQ4*6l|l%2 zeESIMs7-9Cp4tTqZ&LSG6}$R53-Vmb)_27$mQuJ|{bn;b{1}B_z*6wXR<$RrXaRst z=^8nX-nfCCJsiw$u_V?BVEyyWp+r?oB;%~7I;l8bjTb@aZUUJ$$c^sFA&Q(;KBhNa zXfZO*VutBQ##c`Iqj`{rEO0+Z!m)&6DfalP8WARuhC3uD`VFIYhTv3^;U^nXMV;`{ zQQeIffa00F3}(h2mvgZ9fjqM6cXqF}Hq+J0aTYzYno9x%@=^m@PWvxWGb*b-A9t9I zKo$-=0F-m#^u_;uw5LUD6+dC$V!a&&Ot_wECp;;A!d+c*gmsVe7SgCLojHrEdEK&7 zh}k4tbnKGsN+2L%c7L=JnbAl$zzL2#%9L;oN0S}1&dl!EqB$uf_LG;ygBi+GtfdN} z0h=aF`nlV2$eyP-3JWhyoAE;Lm-k?+Tqjvd5xXs3WlA-3lms8&b<%Dg$=r9PyOJ9x zAsxXxi**#PGH2^FuwdyF{u3P%`lbaBm#W>Fj{0kD+Z-o@Wa@UG6&-J(?JNy9hN!ud zbZSrNGCQs9=gW1mFXAmy*)AuYpS|EW*Ot4I>aQl#M-JxZ^K@2m+UTQ|ZT!pGhIEOy#m-hFQ4zvL8JhKTC1!#S+qdSR zhYyJ52U*OZM%hu>b+Yz2W6f<}8+d@ncRaUZCM(Rc?!5H$Z#Gqo`56o(6#BD9DAS(- z{Y;KcB)RZt`$1vSM9&Bs2-}%W!~I(LHL>G@(H%Yj%vPWMId)ng=^g^IICW8q2&t)qWlT74(^3ysY*=9Nc-CEB@R(8>X6p)R z@Es=qR0Xy!Y2a;pJfw-VVayAi4+Hs|3EnhFf4cQWOIm8?Ie6yPLK(GiQ>(3&ijx}r z1su{M8WOlsUU=ZU>p)EHEV)~I@$cG;#>K_AD_icOjYroNTMC6Im=8d+s=phFiw2)N zW`da_6Qfq#Ja_{Tk?m{oo8&jYb!UONyc;PUk<{e)>yN5*gl%VjfST8f?7a%^Nh~U| zKg<~Fp%Bn&YuBPEcw*>#cyjaY^VaZ!l}KhWqnk0ix6^FSITQVy2aYob(;zPkavc_m>*X|6H1k$y%DNudVW(I&Po053R2c#G%_+ zt$pC2AIGGblP_T>%FIxOa8PZtkK)!bt(^qKUo`9hg5pN3(poYj*NhAPxN(2AeVjV$;HoomoF&jcMGbAsi{X7ZaXbazp_L*ks%u{TRQEZPl zXYZV;=8kLH>)2Ilx`>?I+xk*tfYEpT8h;%X4RxRIr?%x6c%c$2sH#l3Qs>E$GV^&> zY&+wftwhOGXykjX(hVVVU#67hN31p;Tk~43VT^Hjq1(c~z7kVDHg+n>8xf^`UOuU< z{WOBNEl7WxW{s4)W_Y!Q|n}}R6^PPJ(iM=b4kDDb%Z4IIK*RTS} zIUpU~?rLa7A6HwlxFQaCTiH)Gja$ITB{)%uVpmaia4hzhvxtY3g#HWi8nLk3{4;$r z<3l*muWrpPOloZ}J`;0dMw}QEULS36bah-EbW}Wla*JB(07~%)oaaOa6}}{o3Ny)6 z=UX^{jZUIB?UY~f)YIdhPhjU?E2|Y)FO2`h z`$)fzc|fQ^p3n?m=I*HbR8dXK6!u<3U!jbCoSD`sS}Y<;ll_d+6D!?y@8fa)J=RaR z)o*?Wv=s<4Jd+PTM?vLb!!%c(;APtL-3k1sVi5eRcDPdaf;h1;a4NvrZrlSd{{*&{ zU#|i}osR+ArNT;u2hbKOju->ieV27Ebg}iyR-F0q($G2;h)qMgT zQ(MEYVYmMP32#AhnZG;S)@SKut=BRj#k%>rw%p&JkjELxCakozysbXm(1kus;h=OP zUtcBVW~uX1MrJ9`iN>>(@~tCACvQou%8PXPy8}_;i7L}e=h&d^hf2b zj86$b>^ch93c|5j)NCx2_Z;|MV-xjlthRI|OebACvl)l=`HyIX3n`Dm@0(+z@gc7{v+&RmOS|GYcKz9;=D&g?N|7tq%XX4gE)}Lo2 z;$H z(pb@SU`gZp><{y}>)hu+9978mx9i8~rp*gus$)`PTw{UE`>OT6BY}F^nVWX?u9A&x z$G6`KFSG_+uODw)Hu`}MBOl|^XMZCiaDZBAVNy)rX6e^u3e^c5`Jnj4YWt>^u)11J z^Z|Fl@%0i%KQzEP)l$wXn>g6gl(a+|n~XbIbSYZt|*Z)*V-irg>d?tCc?bJhBZhdzH;w zbS&b2d%jcy@NFFxW3+lZriF^fLho=}k3Zgb#MPDgZ|v`OY%I?s)9T%@=1NFvWaDP{ z%7R4e*q>o;yVk@?-j+nqqSGs zh4$g7Yn@ia_zbk(c4ZS3x45#k6%yK$FerdnqKMiv8qQt}#Kxmml3I~lC9`K|cZRc=kS9ipEyvXdl(_B!`vQcY%3Jf~YBWqQ za@G^0CQzNXu$fX^i5>bQ0TkHrk=N{m*`L~Fnh3rDXDV~|1M9y(6V2CeiK-yRvz5#? zHW3NXUCNHpLP8UWdh%FSCP+TCDs-=Z?|CzS`mx~Jr(>@TgwWHSf~KjG)%ShK&@ znZH#5-56KA7Ch_cT37s@q830jSVv_}VEAz91)0SO)2d0{=ob)dNfHN zo4glZ*210b2P!_%3946{sBrPUfRNJNCTC&LgS**xKRwCF1sT#152I03jn6qmchwdgu-N1TF z^oNfNLj_R##1%IX!rV?JXiilKtP(fd_xm_2uY>w|g6yn%%I||7^6kQzP#V$LvK_Ik0m}+8kdvFn+dHPUeUEvW~-d*Da{m zDb@#k{mq+xKT)PFYbqe!-kNDGd59|dH{#PI)b>~&7pyg{d256G2|iNh-oC00t%AQs zDL__f3v;V%*#LIKvm%5-{JQ=IOjT>Fz5ijeW7F!vN#%j?fQKM11An)XSy@K%!>{VB zJ*ncR!qG`@B}crOg(GGH(doKj>sP#xn0VpNrD;LxcRXUIDj%dwtHn$!E?+eb{`g8Z zi1{+pB`#(2x#e!<>0RVr>1OKW4uBG3-qI<7`q|Ze@)}SjtE;Q{*=Nrlm#Ag8wcIzPUD-FA2q!zwdNI;Xv;Rx*1ahkK~q{7hYa^;t>FP<^Ayga(-S{G+2f0%#^nS}J^tMx&2PZ$befi$)#j*#~sS$gyKR0RVb2E4o% zn4B_@b*L7L_iVFj!nofLJlK*MI2mE0U=E}grn4Xs`ZdrHV;@j^UsJw&PhjZpJeuz7 z{+hpk3p5{S1QT2Se2J1!xvgs z<3yem!ppvk1yvQ_y7{9L)mC19LjSpFvkrcR04PKI?HOW+rEOy_1NC5KZ-R0%KAs1jt9CId&Xj*sit-pN!<&^ z1&ICw_|xv^eoy>KU(q^Cl4QTWr;j@4V*4hf4|mwc&0vJup7NZOKo}=d z$(FtM*eoh~<&mH9aRO_YEVLA5-Vc+!VgD#J4O)trW~t|Lp){IRp>z&+R%<>${YXqO zmM31N%TplmifW20<@_o!aa#hJWZzAyEq}LU%FJ*bYr<17MQ6!42&TxrS+I*{e_aT_ zM=n2UXC`gp;;_(wBf*&wPp-*L#CZXHzV=`52SpnfI#-3! z>XMdvfk^|cO&(wKBC2D#F4`B_>)Lqx3#MCP-b9~E|5(ZrO{e{K=~g4`Qv9&gnCdmb zhcO|cJ4p|c6>2(ezIA4P4COK@q$|v%4R00qVb9J!=1OhoxAN?CpBi}0r&yyNIbNd{ zHA@;{4%qFysP^sZD*f*pwU}PiPsoKnYctskJt6|S7th6raxy=~N8KqF`Dr!5kjZWS z%d<}K<;NOQ&$fcxU$;<{Z{Je;Y>YBlWl~P^)s)+eiQIhDBe}{?qfyGXUD@$n``wrQXlspDNL^aLy+A?WxYLIai6t!^FNq zOAqCqIM1A#-UqcL`Ck0g%-f!=7tp=;gWT-uKB`vosXDQ|k#JXr=IR~ogq*sp4`O`k zI`QG{fBw9-1QUj~uym>{jHVwihy{Z8USx0M?#WXY@p9KwK#J!p)O5LTi}19S**u`v zX)Uln3p4ZicnI`ysOJhh^S1h*vClgT#oHX0*p;uw@s`E4&Q9#;+^jkFI!IZbdFVLu zJyUk%;ONJe$>>^V^o+=p-F6a_F`;c?qk}U<<{n zpbmAYaH~&0tzBNRgeWD@!S_D1VH9tf1)r@c}{jl&dTF zGUklSxafb(r+?vbt0ACPMe>uc!&|uR;70KQJw})GDbm9qmT(a_5CvvI15unJE~=OH zDHa=SNmoTm#mRoneObn6x$EHC!ySiz{aPItOfc+P&^j+?^~qj$ISsmkIWtO3K==H3 zj+ip(+Lr3{mC9}~&iF+QZA@>*S!mCdqLqVu4$yDLXZLQp{Bft5oV-`2PygwuO3T&X zJ2@z^ZxW4;oA)yF+#kOd4u=uCJvBCC^=Xb(Fhr!N%%@ zo|<`~Pt;yb%wj#}`rQbPEk5pWvz;nx4lpTU*Pg+c;{AD6tGcr3oIjtUK zl!)4oGF%?gzNe|JMSJ&B7|v3j=1%w)e>A(OM*5{Y3=1v;I74O*PoEEy<+_pg^FO5Q zncUkI8@?WS135~(RMGdrIX7%g%A4vbw@9vj-cooj1mOah(POr0ZW&W(nevJEJK>*z zv+Zd+LlgFvA?g30@w<{F5+q`Eclo{?g@!Cp670!#<>+^rpkQ!c&p12YEsCpfCvbQ_Y&`1EvLbg1Ywb)+42;Od!XepzlUsf%v~SklW)4@k zzRt(Lum72Vk}R4?>&7%UXPYW#g=HFjQT#@y{fIt}n?+1R$u9CJ4R-Izo_Tv}rfxKK z(WW5SH(!$_R%e8+t0(L`-5asnt16z~)-GKTSnH15qr|HRf6Yv56Xr7U*{I0;{GiBW zrK6~RSRt-{jI+o=;I<}?=?T6rEU^aER`^;y&JvFlMD89$63Q82bDSWzmxdX%rdHZf zoRC8SU4u*SxEeEre=qp@tH6LTGDp}*O{I|anAYs1^O{}oDIUZVfVli-wrYB+CarNvy zMZp_BsjhZzOnRt{W@DGiPcr2`p@o)0sOaYG9VTMGs1Q0j>Ji7nLJpt4!NLzNkk+Wq zfzU7P0mPHZ)yeOyVvTUi*uK&4DFmCq1US1+lip1TLjE8_+x&;huvgw1qj-yujf*Zz z$pD48WqhSa+ELkCXpiMwI`sN1c{iMQQI%2N!sR2^XR4tr*zCJZ! zFUHqF*0|O-CikaY3-} zZ7b>f@XC;{#+i7H=NyCfukS4?=RZkKAC%y2FeRMrE#Q{UH|V|J7b~8R=+V=^_(}Wx z>(&YhgPUE|cdNne9r*NQ0nYPB%yDD`-OD^9y#~S#Fp>oL<D{3w# z+b2Q4d+ZTzEF4woDL?qn_@6_n#0~15T}Y5rcR1Jjjazn37ZZm)Ay0M^s-Hod`x@A+ zrZ;)ZOV(J%ULS8X^qoIP8>rkZ!-gZ2Asu`=&hFo|Et-wAR#AE`{!7wm>| zU%foIj#@-ji+*kQvh8HzS7jWS@}u6nu2o)l!ZYSA*z)!nY#vJ9@cg|K<4$_?EGx^1 zCeS1UoK$(=JcCIj8}~eG5C`&xU?93d4}9GQ7;K_ASl;cneHETLf}a-=e^nUqokoWj z`5#MJ>IZ$i_piXylX_~WrCo`ho-;dKBFpdXuYdMZ1VT`W7;-uJ<85K6;c1lGMDxxQ zF4HuBJ7?4Td&?8@ScjE`)25?nUT<+&DNP%jezeB9H##${9I#8^LEw65Yu!5+UezwR zJN;D)?by5qH>|DicUKRsI#=7L8T3)p#gb;FhRfraAMASmn3&@AmP)plg0Wz%Huq-V{^*0p8Y}aq{NMK_^zw|Ej|!RJ z4R!sqvt3PhB-{z*>6GVwe%80>v{x9)8@qR}UKFspFP8AOj@N;rlcp@5`>r#7gihN& z;TTW<$xei)S{VM5-|%BH`g)}q(~fFZWUQoc8z1SIgV-db>`h? zYeAXs>r0+S#qB=~Kbl&qp?kA={xP?R{Q3BH2%`VTp6ccOnAJPzkr``Na>E{lS^cE< zyLyhvtk4~@x^Y>`jVhmX;F~BU_YO@-4H!`KEa@*7DnKenE6ga#W$Uf>4a}Q8K*g#R zb3-Sn_Z4+Zx0+=?40w8D{UDw+y;81SW59#&?ft9^;%6(CVe|90%H;0t?UqJ5CV@NL zbbf*Mz4pNV>0&T3?0#;$8J>zBdKq_La*-e^Pi;7{Nb$dwWY<+%Ev- zW0X**512YhJh2M~TVZ>bcjIfFvWBlk$Iyka5qhO>(noE|m%;CyW1K9b`^2ugy^Gb_ z(PZIeiKpihX{pUPt8^86Ry+Ee6%c}X=*@NKN`bvCI=tOt2tvQ;%W7-y^f*dbS_=0& z!_gx0W+CjKjP?IC!*UU=3y{(La|)0TLiV^+=--T1T~&X7M=eRyu+FyBtKU`T8$!kH z-0vMsJ|WA8#5Z|zP_s4hN%Z%KI|J)9Z&@{t=E<(b<1d;~L6q1Ka7-apaU?h>ekDk^ zHEIU9aK~Omdg8NjJF(mKL($A)_wuABB#N;}WmVO?Obz1u9{um%K`i;3|sU-2|v5a2GscRUZ9pPMCKVX6J}^Z&8U&xmU% z=~-5Z4;O|x1&Jl1?t{TS za*oM3=vH6@sL+gIa7sBye?yD#={Oz>21P9}x?7oNBrGe7CB47nVL|hCD@EeOBZ@^3 z*q;~gQjc>H|NG62P2~|0lrU%Tv7k}=lk9J;OzM*lN(A-=@tloIu<<)qsri@^fxJ1ld`6m!!W@5PaP{jMPWA_~&wk2iZIL&s&IZuwb=X(pB zb7V{Ph?CzZYBOcWOfFM?K-Je?UZRz;*iZ@2jC-qo;byfdc)9}Ac;Twg`D;oGQ!JIO zbA|!HXMVdDMnioYYyI*es1h1!&C1%xELS-;B`P$wzLuUi&2ubS96@2ggEfo>IMmQynnOz+#}gDrp=QtCVwtooP#Bw zE{lqa!qG!3KP*xMuhZk7d(QS6=1I^xMM=SoTym{GJ8c&P<@pi(aL;dh~I^VWYaC+twB@a%}kTp~( zYEJt~MDy9LfuENlo(#kd3_6SzbY$!_Nl>fyv1slnkx5mp{D#kLSADBuF3J76SOe(FO3FMd zi)9+TQLy7`5Vw+EBVH0#+wLIt*VspEV%)#3SEFK$g4e)Il{;Ic^5ypI{vSPU)W>B%(F!)}|=Ark)_&yQgvKcN)$hb~ZKYhROTpu;*T{OiGgs zowGdy^R)N^wiFlQgU_RH=q?QR)d-9p*;0r#rrWC0QJgSw=Q?m0rIxMcM)M=(WCQWU zG3ul#9~|Y=?PZuSI7(7$*;RT;Ij;mS#khGRDbGo#Ibg8V^w+5ubu)m1Z@sf87p0Aq zQ%?bPdw@@Re+^zs z@p=3=3DVR2Gj+{V74d1MyHUH=b$KZuPcv)jbMnH`z2DutU+3O+ZGXjhbZwjeC0+jk zozF!jtB8`D%-3Rs_wGe;K>Dwl2Z)-M$tcZ`(SV}rEdkX7_dtC8f{Sz zst@t;J`qgAKk!O#WA%?L+nC|tfAsT5o*m6K|DeRCGTJX3PY&pBio3}zhTmguzF5Hg zj(?oBzr@7s-5n=txfst(ejlm#HYR`4x^mWhB6Vt<`B<+5FkL2&uRjL#I3$7h+tLV6pqo%6^ScDzx(wI_u&p^tr}6fE+ayS$0ggDq>dh>X}*=*G42;qyE#U%(CBw4eLI}` zPTtS`;lSfzA;@7F*HX*bk&PhzYVK6C)VchLpgW9H;zH+qlYb7Zl#-~{ppza_$q8+_@iCMS&=4=;wFWh6)kV> zwB2R!O|+tNxRWp`1do!fECsyB%S`()A@1pCDcP5F;d(F@HXurxBDiejEYBpO!}qmH zYK~eGz``sS&uv?)sFjS^b=B|L;~`mIZbWmGNq?Ox>h-0t0|dpi(P2F2)s!F2-ZMne z1}wXzH9qk=@4$c`wZPH6G887V!{v3A&gX2^;xKJ9=J&~(L193$a__Ai&zV@A+B5GG z$@lxdqkiO}kX(JodF2TFk4@n^xYpUU$qqvOrYLc=Khh2FDJy8ym*yJLYF}6_f1_I~ zOXtluDsf(|CsmyH!XAlg_~wMdAXgy06&y)Hcdql9lnH28Q8SMZ`rLkO2JgO{QG3|x z6lCH2Q{EVvr~7f&n0a|{im)_9XPztL4FAX2JWIeTv`HiUv)W~@-2Bpg{oO}C&rQ7H z5{7kU_HB}PV0qtqA+T2OR;+hoZ2eLjrDCYEWqmyvViRMjU(|gr;b~m!|C!iae#7eK z@+ZaAvzMEWmY=yXl<4(b7fCt|Ws*vl%_Cp_s^l_2_R9e?)3>v3ie^4P`fzw8`eUsC zRd&{|Gx!$$e5l6ucS9l0K-h-^U1`Z0#@3&9>~{%TmMSrmW|kV$TI^S?Wu3fvu^gWR z*K7K%Z$*~n`D>FEzVm4r88hi;$m-h~>!AKJ&KI#>0nK-18}W&? z37iyja#oKU-+%<|+g13gyqNZ#(`VwJq-T55oHY37^&Zbp`Lf+2x1!m3 zscjVP&vH!7Jbg#~CTz`wvi>8*LfL8IwFQlo;wvEZ%rr81bEUFfZ6a0%e%co8!LXWH z^ge2-u3LWFGQ6nI_^kq=eU+Ht_ufs&j6!_eg2niq)jjmd`3XvpigflY1~>NKkg+qZ z7jBh1^wi7)o9-{*dea1|-BphoUea}$lds>^SXngV5^>Q!`Eba@q4@mBWuT<WD-kD0DYU;SgLvDX%%rlH7?CHh8M2SR9Y`J}Qmy#J&D>xx z5Uj19jK5c9;i<*9rmAU@|7x9W6v(*vg(c7PC0;o%Qs~dMYJUH(#QR3hvVPP>z?^q9 zEm<57UX(dNy9>N;m`o==vJrd3nqlm=kcr#sM@bAGbul3z4y+43=iNjY&fcwHY8!(8 zP>YVc(*~8GHXJ~72er8jO@+K~XVYG0DK;#$-@nMAZCYP?00Mjb|Z9V3MR#Cb|B4NpAk}xh`oFpn6zbpEN_ zSiDC+wI>Rd{$986LYKbePK#ZpKJnP|Rhy3c zf*q)Z43PKiKTkd-h(~A}v?Lea|!Z zK3x(?IrGDTocPO9ELkfr0j~)j_Q{Eb0%rk3*$;<+!LOiFDSxV`%vjhZa>K*9`1_w) z6-w=O9cb=YrQJU{C!z7&;<5qKrE6EL^d|NE=yR1(ji+%(hMUBt;|G)HjpjwqYhUHG z;bd#~MsLWT6N#lO5#dsb@!Dk(m*A?u)?S(OGZ6Ef(1277b8d*UX^+gjz0f~wGOirk zVSt4E&SHJpVlvp5)K!e{YovwC-C{6!#(7vL0lgAZ84X>xcfxjBbU zt|gs&CU4RGkw>P<+PEl+!`3eyw~<;BRQ1RW#hHnAVy~{YY+ifKK9}@5y@e5dz5|_3 zCtaXmCz8?P+ZO*Gd*;n?=_Mh)Ay4|;7M-xLVyfWKX9KZR*?+s5;pi}3$}?_}8Eav0 zm0TLN70kZruPh)>IxnI(M*Lu}>y&_KtXQXZ5(|Sy?c(nRL&Z(mCL8mvOLB?GLAfHK zKmhvi0*cV95d5zN*O4V47kFPks5VV6pZ`_QL(5-6(Cd(wN@kxPv$@cORr1CcZ1cx4 zaO!LA`U~^g{!GfO4S_b|sj35r`QI8Al%7<_+mb1C3NxLmUn*Og58JYd(uMjEKDkS6 zEU#X+hwz+6CIOcbx;NoxwH+HPzFpDiomKE4du)-u@Yy@XSnZVXxS%9LHb6?zmQ6VO z#*?&80yl``k#jUykrjD47JDsedtQ_u`!(ENE6D2Y<-a^4z=ruA>UR@iPW!(tAiVr2 z7id9j{=c`@|My8_I5sis+#IjbS4DV6rprFE-7+L2bk0peg@4UKYshC^Cl0SvZy=}@ z@t|AN{bztpQmYtM|LMIS6G!N|TAo3n?TD!o8aWYU?n85myv2R(!LoN-_ilS0^C?u( zmD&5Sr1>nnOALy8{CPqpi#REF7KOjFM_V=kUcX_rq~tv5R+GZ&yTb z-0pmH{vy!ZZKq3B%`eMFa;@#(EiXUja`{aVoC!E+tv!y^ByQG8ruR`LZ5-T*_+Yf9 zRUBn5ktaoHekyLTU96NQr$tphLLK$zwe-OEJ=`Znup89diZK!Rqv^dK#li~F*8H2C$m*uaT+uQ#5|r|_bFH4|FYPFGK1$8y5btKbJk`(9;DMq zyimPstazO0V>|caWz}m2SMfJXf@c|Os1y@qMbn?!C${q0eOun||2f%LL(1&>)6kT> zYy7K}l%cePYfRW===&(qqnr1*8>s8`EH43Iz;s*^=~@5z&Gud?Z0>f|0A(sJYgY{G zIt4qV`^saKu==Hc$KWug@2gDm-h#P8bduZ+rF_)!{)M@wgA5>u_G9y=X@3uOzIT_p zRbFjpj+t)G_UtoaKeJD+9vfbJ-6?;!CC7}qLc1%7lz8vIOditw=Z`WDRx01CU}g&JvCC|)Nz`OqJVs>L*%&DBZ$jXI+o(VnXI+eB8j6HTy}0d(cfS0Z}! z$+>A24?4!PW_&+o%I2mRJ)}A8=CEya+7$O$r-Aj+mqk(Rbxmho((N1QwMhmT1?SuJ z7q+z5y=(i-N$)>U)<4JjDZVnAO)+hFVP;_Ig7!q~sbtdr;-bW|dzw!#4}L~F!4(ja zI+pWWox4*sk5WK~4W9chc7N(lF_agGDFoP$G*veKey{!dGezVboNFdU=P(h?jUyy1 zNeou&b;chSngz-Dz6g3r_huG9PDc~mIA~a#zB3_~LZeTt~)r4DQT&5jRB}%;W-p%C76%D07e`h$;++z@IE2?qwfucHLvDx=C zLiID!JugS>>AZ8Q$1#)Ov!cW)&rsj{?H6;D3kx5I#6v&!y{?(h;GS7o9;)~6rV93P zyg*8(3Jz&eRLG1(G6o`8?R1j3e<<78>_2xf_U+7|37fp3Z&xq=*Sg;27U#I<%!`s& zx*M(&sTU_c+7);t+{y@^Tp~Y0H8vb2Kr&VYdQtBJ6f>2HHYO@1x2WRBOwZl@Oka1;kL?H8 z)T~B*gchgGrdliN8%s73oU_Na1>91P zE6`EEyu?$s8M9t(mON@2;zk2{58AB0qCbYP!^5T5jGulc$E}xO*}vaFR+xRW+{BU+B37!*xd3 z7v-%f=H$>%;HMs$*uME22hE2-jc=j)t54mFreXjStKDfGpr6W8J=;5aVR92WEAJ4Gi)

WYsuKPBcw>_2-UyqK|IK6DlU~J*zP0}nGl@uK*4ze$ zN3!DzX`e?zGPK@tXaTx#nlvBLWFcXoF_s^Xmvxq=2Zn$1PKr(FDSJ8_j8qm%x6h>@Ws`&Dkj;4GB#AvqfR>6B5~X1 zTw-0~)^b@Y1vup%m6DzLaTkTa4UM$oka__Dn|N9c%(J@#L8e9GZ+^iTv<@c^J5BCF zp)Id>+Kz|XfB{j`n;laeyqHh#R#GkT*N%AsR*ppV3ChVzwAHjgTZ$YFHdu1xf^5hb z1hsX?cc5KWDJSQJ-+3vQProVnx`S*y62S``mYkzpo-X}cHwLOjTNv+nzcYS!!K2kU z__RGY`lc7plU{aaiD{*#5ZXVvOr^2EpI^y$J~Qms()nYLtCzH8-%0L{kxsQy#qFK^ zXJO|T0qkqbUr2hu7y)P9o`E^$I)Ax2jS820frSc_ujZ*T7-ZpqwB`43&wAm&O%|`i zUB5Y8#2GiD@40c$NM#b_tze!m293Oe(=lqYH#bKhV?NjGP7w}yz7AG*9ckbd9mOnr zOf0G~MNYp0!FCq?79U*aArh;}TKESx@3|MrH$h-Q+e;K8T3Cu0p8BK8GJjnA&!#i@ zf*Sc8*Eon7c$7c;6*?YNcHl2L*~I>@#4UgJv_~s|x|8l+Zxn*OVK^e+XEE zOX2E`zJULvLp}t)s`f#?3Sy!oxMPb@btod^dQ4E>06K7K6@d zh(V&i)z$#Wq-8%Zj0&ecLZVLx6^MEm{fAr9QAS}jx?SI^s z#i3NwejC^Or0yUtO?4U#{g+$y_<+wDGw=<4s~o}pIy1ykJhrOc$U?uccp&K1nuv{$<+Nd_{Nlo)X6QE?Qt8Ku)IDH6c~ zBOlgi3)}{x5B>J%WN(2w zmu>}Af`y3MQ5i!bvUtK_wu2#a;+oz~jhMXwew5QR9lQ3~B^--fO@9T;wJ~W_zzl9a zM>Y1<&y&8!C3N&I4aZX-H%ksGq>k7mtqR*0hw(*YL-nq{%^feRgVk7m&1rq*8EW&- z2QmpCTf_(}EE@-82yLJkMd*$STDQ4e4<;B8NKXStVyzye>C!&<@pY2KCs2^hyZP!c z&IDK}cfmp_WN6Q1;afgVs`$zR#{ZhY<&1%`|8fpQB2BP5tH)>K=zl01m>mFfnP2YS zzfYI-F={V1n+O+P4awkGF1BoS3dDZ)yJRr*e)(5Z3-SDZX)-Q5)fDgDgwVQwIaB>% z`dlvUtBrb>lbnintZ*te2VGy;f!LuV`#n^O9xR^hZ2Ldrf>94VsurBSZIDcTCGRCy zCe#dQv3DRieXzK-^y_J-EQ7-=)4mI$#By?kWL9erDm%S8rT#cTUSJGOz6MCg&DYK}NToK2cE>86W|x zZb(Pys3`_s%7*u%sd)f$YgEEJj8uS3zA9p=QOjGcx${Stil!%G@(<7asvblz!=kbT z_+9)DYuOLo|Hpc;Xpr^-Gr6rbCvzTrafy76vIhR24+ByyrLVL6ABgP# zyfGPW@JFE7nJ_b)c)DkRwB!c5rxgHRZ^j*0`l@6c0x%Q_n>%M z00T>LAg{LayBNLPzMo1;f4BpN1%V8}!sGidOTPL}FS=D7dw5G5p3sV!W-FUskl!B4 zTz>-kN(`+3I3s)$2&?O|LFRRRscUl4C>E_cIY!IWtIfN(m@ixNmEAYPQY5mgW-c3a zB07DcsdEzj?U=8ky9OACs~>W+=Kt?0p-003pAw17WSj)>(3oQ+8#)OlK3fL}#kXLq zRX6ySW}zU_w;LC}bOQ+6GgX3jXyE4Fvz2 znfEV#O-;u`XFFdIZSV{YJ*LYSd;B0D0sj6_rw$*uwwL@7Sq}!o_wNCIBVV`pOCbIn zN^j7@sXX+;ENuAeMka_0Yl&WR-6*wYXw2A|_NieZp@MIum|#nUrXs#nK0k(##f9DD z5$F|KS=Z}qVGIo4L-V+o)Kv?HR13>d{U#l(=(@13Y!zzq&U{BJz->evHoM;PcA&5dKUcAHsJdC66;|1%dFzWLYK{8alM?i z(|QZBEI#G^_oR3}e}jjlkd)#_gpQba*X95tnPnA1q5n2#$>_g(toDKlma%?kFeZkE zn@Vk5^W^fcKKTvkWCyopj_RE?H;I z7kkQ^vi%f^_4dq9vqc*r;iZ-Vy?|x&fgfhHwY@hBA80uwtr!5`er94#obHY_iL~U^ zRe;-SVN5eGPtSp~L677&4WR^A3#qvzGK9C}`c&6EAASH-tvV$dJg9-RJ?#w15}4jQ zSeo}Zzu5D5i^FwV6UJE+b%kizyo1`@^hCYv{(yRa5zQX{c-Z#zIYJlxNi=Ev;G(NI z(Z|%D8@Wel833oef3q&nelBIKRbVUIP9+aPtVg8)A zO60ynHdOadqQ}&mheb2a$Ja)LGf}jm+|uO7qdkPqj}epCr(o62!|1}t*j!NSU17Kr zNE&tLfLOZTZdrA`?ghH}oIkLdxwNkM4!OD6u(#x1UT4csGx7qR%Qddt8S&`C>}juN z;&OdR3O$A2$fmc(O5FuK^D+1H`q}m4;8hiHng~?UH`If z{5Y$m^e}jl*J<>IuM2R`0h3WBbP+olpRnZ-@{T+aeme z_LI1mP~=|t^~Jk8T=eV-ebaqQ`MOOSxMyiO&zGhdM2^z=3j$caPhG6TYEjLP&Ei2sWZE|wSuC>M6-SvqcLVUFi=eD^TVXT$aW1@Ra(;Ql|t^$C^D>O4si^fs4CJ= zQletkl*hW&@W9`T#Roxh2}wSVb5iFY0eU1_Ya->Qh2dd=mnabaPoalbo8F`Y4rLr3 zZ*V=4EkA|hm)!upQwE21)$VA753sX!qUF-sbC}jAdu4IzwBb2g;#CzdXD3Z1W<)k? z8oDhg){yF>=|TOh#>5?lB$mJw)9H+)T1xcr75J4_1T@+PKO^v0Ts<8NQXm6sv;0$} zGQCWDl{5F3;(E~##V*R$`ctM&zb*7z`X+huf(rEC$g-iDE}4$dw6@-z}405v#Q-o3M9&A*7_EL+3Wgj{mo4UhIXES=s?6Y)i;efg`o?tBT6DRNE3bMAru?VIv$ z*nQ*5>QB^fYpkZ;51J?d8ar#@Qb38V6{(m@_?v_zF|G&#Jzhon?tov#uFx>J4Vqa2 zErSauh~oRTwfe*J^;nd&Pkb>Z?(lM>Y@EFu7$MtSYb%~C=ciBx zGDEC)MI4hrQ%}rUO9t_WdNbIYZF#L-m8R(fZ8c_6K-3VQ~Ii6XKOF%+r79m(T z42ug0P7ubb#$Y(dZ!%PXYxMc!?K?uv)sij#R)cA!)pynW4)QpCPktl?2t>sDT>o$t z@<0Z7J!1KJ@dOm6Wq!FOO8dloCJ}Pn@&$g zTGYErym_oNIVs}QR<*iW`R5l7b;w4qp+?Wg;Afw`e(3J2Tf7V3BMy=4DNc_qaHS!r zM>_EBUMP6^f-GUZ2P>alnmBOFW#oFGi>0keXM>Pni!+PW zVFIJ^20Fgb@u8JKjL(FqPXq&xLbV~B@z3iuf0fh0{6CY0r`GNN7IT%xVA1bPdR>H$ zK$0hB>%n-vew-QtW46Y=`4EwCBN`I2kV$oUc&g0KLY7SfxSCi`ZHIFn+8^j#zS{`4 zogliUbD{9q91-6ggN?$V^><{gyOHQYZ~-vs|It=W64&gj-xHkQapuZ_hjJvi_o^`4 zS0gZWZl<#(QXu`4B1rqmp4<0<|p?xxsp@ zl=RHB-=@70?o}Te9tJWPW;q}a3VZ}*AWbDA(8=_!Mi12J%f?)B)c^FC26Mr(zKRfZ zhUgr1%DF@AjG_&jipNl4-~pA4*G1CQD#XH)dS%@dpIS_|Z7c)2O9PdKe)T$B`Vu&7 zy}5L{Ewf%d8%$fV_s{k|+RM6(Bw)m0#rtq@6b%*CAmyZ%(WM&|yXdjwoZ(lQI1^&%5pN{XQ@TDvjucG>vrKYqJ0>jI zv4qsX^m;0;>fUmj0RxUSO|F<$4sV7G{ILp}t|ZHmwDC3*#Q<*NRXoMiR#FAA>_LKp zGFeWh7)!>I`JCK(Wy5EBRgRL@OjsaoRVj_M*%A`SNaWX$k`>F5r?sCWHZS9WF zJ4;Job7ix7YyBRP{SirTs$Wm;ii=$fxBGa7pPM~By54fd;FzK2Ry!Xcin}x_LmOOR z7CA~i*`Q}!4w02~NY^-^kOV6?gVMB((c2(NYE6izr^%xZ}*W`LtBjpdA7!k zIQT}A@?k{K=U9ii-AOl z^;$G%NV^v5Grh26JjeK-eok*LZnHs`4koPTB7h|##ofV6f?Xj zw7+ayWzUY?WcJjz@uW~BXxgfOKi-oFfUC6JK3WnBvW86KsvqA~_{~a*$vqS4~*;4Jan^te#&$R@@MkcnKz8I%^(}fqgpq1M{ zvI|Yfg8xYd6rip;olC=Op`5{rds*9c;=$?;*6q{>UrfN`J&X{$y_;Dlp6mdwwi^?P zswr1od3lWWL+Nc((Ba7M3P$@$owOthlO1eN5x&zN!auG`wbSR1y097?0lyrHO9nbx zgaFYoih<*pzG%6Z>#X*cgK)lCzax;JvmJ%ssJ`O=~PIfek5v6D8RAKP9*TEH(|9xU-h8`ltLYb4( zdyF^pFp9S6u^eA_4Ww1=Ii;o+fo*~?(FQ!%)X|9A+8cV{fv-|f+C&%8my&sT{5c}d z0R#h;_W;QO>F)S%?(ca1SD{~9-@?HFGqof*rYH%eK6n!HPnO z4^L74t^}8y9&=EYhF`+8(+AZ2Rr9mT41Z1TZvPizRHN=9*)R>7MxV0^ZznRa8X}xx zAmTGGBu2dYrU$R#tpZyAa285E7T&&bp~0MowIGYt0jF=Kpx<)=-cE8hSASv^4nqDB zG!)`{P7KwpdV)S*fIe45q&e2SI+M8N+T^rQZS@83b7ObEj;QXt2Z+27km@L#xc7Nb z1OLGB2VYR-JM!-QQuHP6V%yWv6H&JxU6?Xy#BSjSN5IyFA2P4sUuiH%1XHz92`v@> z+0E`N0~!e{8igTr1W`%zc1tG&`&Y4PR2W}7GLW$7%~!g$eU2CY{H?ZK^@Oa|;p@~3 z_Sf;;!pG$4o(QmZmO0`M2ZNnHRSkmJr!kFay=HKL2VlIgzRBXO8c`cw z+!d>Vm1?Z%p!NtMLYot^4q3$_fXQUWCTLDBumK_U2N;9&4{--(q^=R)I={D`Gfo*% zGZ2GOELs>T>hb;B)|~X_uwac@3$EmTD~@WTbfJQp$PRyju;r~c_~zfagG+CI!kn%K z6A2eC_6TS;o2bG<7K#BnN89$z)4n@08sA_3UrjI&(3a2H~A{p#pN13)+04 zPuId-A6f_nT}B9Y)Bzz`pN5JClU%Sdu!M3E4|_})6YcR8$Oai!K+jXo!6 zH}t~`hv-G05POu-C3w(AxM1M-0J4li&9KDA%=M?DxR|y~i(jgfwJN1|l^{(q8%}&w z)R{zN$k67LMXXYC1JZ}n-J?9jlW`eY>t?^}ab4n%P>D0#>cw+iM$7|y>#8P0p+W&IQmX10^AuYaRjs%)QM z5Ge!d+OXh)FgnmFs{l(yN%W5I*_skHNA~F^rm+OF0 zQ+Ro+wC3(}yc!8A;PN(^QzmJvM!sSY2OOYsrrr2}>xsW$$apk#9B@qy_Aa=Xk-ur~ z>Xow=-_X8wI59tEwLp~Pz@~6vW`i@EsRA4niIHB~3?7rIq_3os0bRF3Pe~LbVC@(9d3majyyG^i?OLI+SWn7BhJ| z9gKl?UA@gU_*I<)@pr`Rw&(Hr<#&koC&Yuy=cSJGIm)}*dbuNTJ^VqJxmo?!t#@jS zHaOu@@MQx+sX4=JZ?(!2_1?4yw#6qvLd>MDyhEMXQ$+|3Eh1_eH8+Oc$u&)mks9ghsfh#t%q zN$-xc!cgrT3W-z4^eoW2aJ)Sev5iIA&)@!a3!(>k9jbK$Oh2Qru>X&rj$A*8nYO;7ap{MqMV zRk=worMbD>jr>SW+jViz&nIKYEZ0k}44WO*$TbXQ;c4rBQjMJ``nqU*Q=N`-;hlkZ{TT_kOm~+n936J7c{k4|`40+B{w4us}&fC(QIC zpxw{^L_+N}X34{biD17-0|&#GLKqpo;`~SBRi!>L9TK2m81)-p)h`J4?i^eJ$}bo@ ztu$vyCNwsSh+V%w<6oA8j^X(W#;*X=kQ>U7{vb!x%hm_JOp#kmu5~{M8*uNlBo4{y zgt&!fbs`#G-l{Ik!i*L!8y@(xZ2_w2*NWrIMGCc8M7F5JR-P(Ft$mO6r~A;lwo43f z1B%P<5FhMSMfM{3#${zpQ zMm%5=d$nozpP4sZp^(68xw6MP9`L6$6o)Ky*f(S0?Ul7slSF%V-2Y`{Z>8=Og6BTo zbpB0u113TPgG|lp*X}benZgzhxv^2zu%7Fm)U6jg@o30Kio zdL#HYAuv7=7atLk=A0TLvle3K=kHe-4GO!}cD_+sGS6REY`eisEf722!cqZs(bK6ZKR4s!IZK^%hoTv*p&3 z?r*dEEgc4{m%bU0bnWM2Ym$yKQPZWxC6DU{$VCvdc80YWWE=F3q6Dkq^%m0zw$E4b zSELUjp&LE$UvqAZPwjHt&T>uM7H94(Gu_5&lk{n!OVs@*kW`sph}Q31Y3!iqRp z=Qu-27_z0Q2c)y;spI1moyW`Tn7XO)D=8^aaN4)KwRGCOMNuGym&h5g*R;qKHiVr@E9bR)0;%JPcWZ5?={ zf9BoR7K>NAfLvP5wDs+FL@3@YyP9vfb)H}&XUREx=OJVGHD`x+F^>ybtm0KjoyZb+ zbbwXv+aD3IWr6r0Q45odr+JL4B9KShe}YAGgJ!-A7M#c&YDT6NebR5o z``oN({B==v%|Gm722Lx|KMBJHF1L{O;b5C{J?I1#=F`TxdCx!BeD`%Ave6JbH^0yM zJbyP%$f^yg_Udd$9Tgnw6Ci&7SohNi7UhLz;SepeV{T=RH}W7AYiWZsK-2%tx3lTg zA&$SbzvSMl_CZaEck}?0?ob<^kv^~+l6k;cE z%$(RT?tLU59@`Y)z=q#XybrPuzp!0+PmEpTP~ zf9X&gNlyb*2ra`knOJt}-;FyWMTGo=>3^%M*TK=wcD|FY1i`Js5>>`VOMw2Ke9u(dj=8^Ir#oi z9v;lF3XG_YPCAOj;Zuugs8kdrAT#aM&wz6bT2Vv!sZqenAICddf>#^afU9FTK#0N( z^X)PIw*Vb!Ma#BcQcl1C7inoZt50|R>x)35iV7!=;%?RRQ-a*ZiwLz8N%T%jw+F04 z>jU(XJJCo9*d$xxFOxI!RcDdcmeV?aFoIg50-NlBC~1ooXXx<4G>H|?4`}9Qw;w=a zuFCab&{qO1^!T=H_PAPK)eYJAb!33Bg{vf2DGVI{#rdB1mM`F&KKFEK zJ;i{PmH&jk{u|hyg^>J1k{maGWgH&jXdM92Gw3@Z(E6$`0fG&`|9fb;vKj zA=-H3+JWQ?=R$!~g1JM29l66BTf{>>70%!f(#SZbTPhdI3IkH)eYQzi$U=PRUD;5? z6HIh-OEm_KNecd)nTc!+B6Bs)VtQ(^ik%cPv6bpE^MOTx)zt>H41emk0LK+WX0cnO zx7TTc_cXvG3A}q>CtN|=d@9xs*fueie-#nx6UFW(+SNDA+xy**Y5Li5+Q#}Z;yU1(BY2AdxVoPchvzl=s+apm3us zc1GT=?G|6&BVAXr7IeA9Ret#d?PaEoR@==R%G@!%o7zDld6NqwWqssE|Ob^1KEj7Zl^EjjJ}XN*=M=Cl|}UIDjwY zcIPK{>^f(b010rkQ6v82~2wcnPGevc)^7sRm}LzOc*tK<`$>%7pxeP$j2##`Jmb zL$6?hOv2p#ikM&xP7Tpe<}Bg}FcJ5`81E9nmsn&syGO%c5YaN4!IEScz-BTYBl|Yn z8KHFD7pLvT&&eppCzo*tJ%~Y8Uq)K*0b$msiX8q5Z?5*de-~Y`fv;fcq(MFNw2SNq zvKjCI`AXMU-|5<(3k1b((AL!)auIm5obI$wMIRHfGm5H~3%fcPXu}>z+6rrj`MzH@ zwh%&qigl=%Xo{i14i5vf=OI7T>9wkHtDNzV=X)%k%D43y{s=qo+`c#?=v%t)La6oS zx{w7!;nsqDe`v$kzIPano^hLzL&Lu@7lX?R7yhkE@FCK-hU~XXqw;Ug@U6zNm#)+b z_;o`&p$zt|xQuNzspNw0w5i1K22%&B%ve@{d2^k3K-@SKAnx$gb7!zYtSEuyAlRi* z!*Acc(S10%vY2^$cuNmb2WkIN@SW|}dAdCk{DDnpZi!G@J?lteB8;FXL?k9;a(8<8 zR}0@Qf0mQBGOGK*b9*WEtJfg4l6{MmvII`Obc3uwT~N4gUp?C>5@=49i4C!y2KqM* z)Nkl8lCg^E@OHl}5gR@OSlLr=3@a-nQx^wlhu1T%=AUt*x#n zru=BGP;$AnU0IwxqN^6HPac18@_hX!Nu#M$1!Z;#dyHlHS~Y z?uf09IQf$zWehF_ct4&g!I2FZ%T+$0u-;G+C_Kz)%TAxAs zkgq%Vu#l39d+nnq=SdI~NkC9Ni@=-j%4XV7Q|C@xKPB#>--#fM_YC5}>2rrD*Cg54 zmZCJy2z9R>OUleCrW}+z-~d8M4JDRt?F=D_7sR}=kCc#e09?M?P{PEmT^=u-J@vNh zq1w$aDXiXfIyWSoBc&a{27tZ?aniB4yt3>)+TFk552YqJ+`w{Qbzyo;#Np$N@(GwJs|$v{tXojD-9{2J0&xiv?Ru zV4g%8IWi%s_y~%JLwwaNu=eL%!bGqi@a63WFPP*|2Jc(Q$UQY{Iu;=;h!->xUDGz2 z?(6={C9`$#BU-zVZU^}mna->p`n4~CChw#KQ`n|UFYXD7pl0BipT|eH*usqUHA4@v9L$;ji>V=rAlIH(|_^P6iw@!UZ+Bjpj$4 zS#>5NoG4SLI*o{*vS-UDB#ZBKI;c#n!sZS}Xj%91`Nw;3SEWYyeeOzn!Rl-nSH&fc z)L6?<<{C{;olfY;v5c*^Ys)^H>r5faVinx#Mszf{O{0)7v4MAyd@vE{!foqCEwn5w z=knZ#Moou;)8fy2JO>CRwj+Pll}h<`ywkT$bX zy>iZgpW~PyDU`a@s5XL;I-Q|_YGMSf?JG-!2lc)U($bDxtR7L_619G4NyQ!6MJo0* zQ8VPL<&J_0AEXzE<~od6dgsn_rZB?3`W>v*L|n zbJGv)Gf~uWemkqG)3QZ;U3G-z~p*Wqj%Y`v@v9QofsW%x8YuaA-8O( z@Kb`LX(9YE2g;Muki%SCqt0A^AgFxdiDlaR@M&MDR>7;mH{r12X{tz-#s9l-hA8My zM$eG7XwDB=fJADbtn`H?{rW3^i<`YeTua~I5gX6UbH7TWwfX|dF zY9^l|Y8HJG@BV3JdJVEeyX;gnX`Zv)pSMkbU{+bg@25S+NAU}J=YIFR&YB-A%3H8b z$21trmE?n#`9_3to=p{l39mPC&>WE4u@@%%snzY!sk7q+zw?8U_UaCENo5$9vX`!7 zpnRBu1m2V)C&*vl;P0ZR!`&Ef%D|W0PZ)?QAg*AyUQZGEM?#qBAKzLHnxy>Kmm-ej z-pzxv{jq@0$043gmscRvxcr5oE~4)S_NP8U+c1`UtgHe+q=+?<>key>B?}%fx}b3~ z8&|?5arZ&r$X(c2xZd~g+p62hIVy3BTdHcJs5IM;APGt-JSwFd)PkwOg&3NVQfoxN z5qH$qNu?`;LfIk?{tcGUU_Z^n;0QS1D0T(vKPA++bl$8q0n?pqkz3Dy**h!F@px1s z99K4T^AM}5J6?P~Gku`y#`}(dXO;|Mk`}dgeIxnnhYzW?^+WMOXSo!dI7|T;=I7!I zEikjUk$d2lHRmkt6ozTDCBRVYAHX7biJD7gt0<`6Dz7_nWHj4kgrks)c ztwXowjp^o)tAj}qpGnIzvuDbI#$nmi69)l}^$Kb_x`T6u4tr3}#qf0k22RN%&{)hQ zh?sWn--uyMwzOb({Z@y?6}9JYC$G#=qVre$sN|w*u8xZL6C`TcszQ!zAx1q9p0+mp zG3N%jceZP%?R*D9mfZ0`&=-B8zdhOXI7Ff8w4|vJk*03)Vg_deI$g9ui|(kqVI9hQ ziY=mXf<1l+FWqifmS%FUbTtyCxA0mb#49l9C~-Cz^%2|+DpXPQKeK+n{5^a*hz2QA ztfA`quN(XBII|2tA9Six%%`S@CE(gyQW`ao8ceK(%}m>5KSams3y_dW4K3Q(PKa$BtS=2FSlIUux_ni zYHcjHd}!ZQoOBJ_a4m>FqFyF+XK}S3fMK6PC+@qF3DpG^rINyT;alUuyrmAxA#JeP?|lP*1|dH zp49JD{A8ieIl4(EDI$f0%IE2ySHT;;;rkcaUf`C^@1=IDU)}}i$V3UUJ#(ZQ>l9rn zv>5P=tM0QLCt)){)fYCA1R^B5DF#`zX1#~L2R1)?$_{9r@KAKen@;3q9Y@ZAb=5zLOYB$l7t^UM|lQ*VPM) zM#!dGly%>p(>-CTjC2mBCtti$iKASlHGGTe%SXSY8-+N_A&(lnFGn`R+i2x=H_ ziKO>xEs_~glD$KRxCrryf3)`@tp9w(5b@ZOeO)cx8nW!k8nH<`P*H)5GOM01r$@1z zY(hcJpR9t1+-W0xmqQQs!cRa29uBLQt#Wz5AV=7VI~9$A4w;D#pexuxLqi=H=fm5b+jI*p z7*BJu6?nwhbiYD>9rpKxQ7+9+Q_fCNZ}B2|one98x$3aTNU(oOQ$mPXz=hpVFf#Yv z1uvioD=X7A07Gr?a?TJ>LJrVtJuw}J08faoDshs80>xT|o=`k@errJD&D7^Jv>)JK z|L}FlO&BYplI70-kZF_mm7|9L7S(HH_ei)VXtf?=iFC|#PX@7Tz($x1LmqxKfa1p| zOUH@)!z&wraIC0re#|mp-jX1@BZ=e&L9b&q$Docg*mWu>mp$CTL^1j|L}6(-TOQk^l31ljG2G z1e&Ts#-KrbJCqx$K?5AFN@whJj!$rG!jXzk zOB;$S1FUu8Q)W1`)y2E2g1pgBrLmt>*!m1$`>(9*{~AT=G=Q!Gu& z2@@ubG*!Y(kSGqw=&bnK2=s%((saJs;BY)l_IzES-5Lf`TBh^}Twau2+y-!+Q}$o- z3CZWzueFmA^H5#4&LKp&Y)N)$up>kmX1cSL%!x*BzSG;D$)7%vfBR+B^KQny?&FWq zg4z|B^>M}c7|Yrj;}AQ2%1^F;<@*yDh|>_)Mu2nhU2`Q}Qs8hf-5wo{^P!MEez36n zk1_v6iN7u-8Pxwqc@;Vu(nL0Z7!&Q!HarGcIp90dLIY$HQFhsmiXf@nLi-l2mEoY{ z9vFWacr!Htdx-3dAL5<6zi4uQ$9Gg>e7(n=-o6(EEKLm5i1O`0b0{4VF&V$<=mtVl zg`-g7+vu(z7~`Q%#lA6qvKJ$Dm4Qnmq`9@$3SM6;%yZf+&0k)>{KFfI^1^C5AKnxG z`NER`{KjML?zCa~ktf%grYWeZgqJFa*CKY>0yvBoj zS}VX+29n~`8P8QsWZ)SqcobDfOkkT#295|jDL_WXTcTxtx$ZE(7U7FRLIF!gGaESI z(s8N2r&LkTPz{O;7|qC3c(LWgP#P5p)8rLwEx4?pc}tKmLb;2kjwn9}2cBMdLJ@w> zRdWL=>U%TAZ+_*x5TS6oMO8GOAEk~(p~PfCH(`YgXF$&$y?AAo9XX*S+lM_a_;B{B zPWen)vz=MbFS$^0r1@i$KdF~^q@-?tKzX+`+&^F%;1*`4imcn4ZR*u{vgWzin|m09 zd0b+cKz60;f*L{SMG;9wX1Kov+jIK;4Ma_rHbyc#b?auWECOJ&IcqsuV)BYfuk2P;THsZytn z-VTqg9qp@A|DPmA{GSynUbE>1i-H{xQTR%Ojkjr58v87MX5BwH)=P=$eH8%xXAe4a zyL);#0GfeCBr5xV8d!x3foiYj1Q_#4Bth`DMdDqx4b;=YFe%=*7{yh*|$D8 z*`5o=PK`tX1)jf#(%kHzB_z?1=fIi%x$039OUh>(HnBvcyx0ncfx&Kt`(AG^mYC3G zS@$v%{kZ?(LjZX98$Ui)*g*R+%34fYvT|!5S#$PgS)=CG#=a`xFtYv_t02^yIZivZGL&bGjr~( znmKp=qp70Uz4qEmpJy#~F*^jyJCsN|;%ZYAl=M`Ou1*mijR?Xpne3Oro}qyw&gd;QgSRHERt=pvV*`r{-}t5@ zUp({P;oR7vp!f@dQAUR3caD5hYxgeN4e#+X!|dBGb^O+A%Fgffa)`7C49txOF#oix z;1wP^u`kGSqVOol{5|G-nM3qY$I{w&z8804Qp$nkR9bp(zI`Klm7!-=mvr|-UQ%=t z46g`i^e;PPpR`5R`ozr-n3Wavf+@Spl6s?SC z8PEyX-QCrxaUDr&2*@y($OHT%AV#VG!`rXRJRY&8HCrlZ6mXT?KZ_fOa+}JW-kh&# zYv1LGU%syCI<;C35Dl$<%Xx*RcZDzz_w@D+roc_WqcaYUb=1acLALQkQU`^w&8aW) z+#i1mi2&%@x@&DJFtHM3eT$47GOMA}^P@^bXI6$U)U)1y4b@(!SOyBxm`^&p$wxW7(x=PeY_y`E+YkP6dIKW^JsbA-H&$~`cYVDR7fPyYR)7_oX zQDs;C36TlGFE3_))|5+=sJ(CsU2|`IuEvG@hCmrH-rpa+N^I@TFX?zS+>RESIu7Ln zZ3e56>;u|b#o#Lig*M3yQI6>)vi|-6>cu+=J5IUZqbKYOJa{M3ef3nkIY`TOO!7|3 zx-R;sj60!U_N)G8izL6tIvgA|;Vv+r%BpXBn4vuWe8(Qa^K;_d*+ZZ^>RKy8hjz)t zPelIkTlu>`cR4*IOvXGe=v%nkuGIc4MQvGbWopncOmP@#eH0%jHOfcX z91oHy0$$8Nthp#NPpya{ygCgNz6vv^3%ut)gGyWi>)vie9YuX`=O}-gqdyVfjK9%+ zGxoMPl(EnklJHAX660?DV?+P%jVcCeItsG*@40;qgP-QbwR#-gS5-$}R+{#_{SNuo zKL}d>k#^BN{k174u3}LpC5-?FE}ip{Y^S>VM@nF3u3YTS50F|H7EU<}#`{I|pz%sh z{gFo_S}KbD;$U_llK9lXm`6!kzAIGHkTJkF-G~LZQPZ0tP57f9<4tj;>qN9lLM6&Y zj7)<05v5qT-2=2--UChm%d##;ndkHON=?E^0L>_6ty3UzoBhcz`SGpMCn?88mBD(mY4R1{x^%9bx*dcAbT z0^RRzOE$Y{PuII%_8VKC`_#9>eL2kM3fliXeGuRW0_b#@rQ3p+2?+b5o&+Vbk=2F7 zFjal_jhY>(mq?9{QcU6SP=6>~T(1B5%;8wuW&QdAzoqC!j?AwnL|`2j0tp(X)-^*x zvCwgIY67hLl72zX zp}=fR`bC2`p(#Mt5^SmlJ)i$A5Jife(}RpH9v65wwRg5)o%}^(Kukv~SU^uf*kDlF zy~jX0mJ_Rw*QaH$KH#BM6v!kcTfvfxY%F*aA70Jb)>VGw#o*LzU(fs}GyAfaPP>dtWTHrMZNJ%4=EDg@ReJ!3Uk@%D7#&CqE2nb315&jNghZ}XqSG~4nYVvfR??6RpmC3?HDycH z;|$rqh0`8AY(rD49K4+4=~l-Qi^vsGH-9*9bxd&`a*PY`7_x?a{1CUpbhUVEgunKI zGVdX{h0{0G4MZ$iB!h>Ax+khCNw6@_D_Gq_9?XZtKyVOwL?0nQQBmlrjs|QfBWgbO zMh)~ll>uHJ-Pz_{h_RL@w}F&s^;@TCglXHL-FJrzWsM})pLBr{Zlz%E*HMGtd;;k3 z{Q^XjOmBm`E=go0W3hxZI|vKsX*YSeR+f^Y8DW}ug;cWNheTN~*(!WfCWKIC@Mji- z^`dqBE#dPC-w0yDZD{h>g>v$Vm*fbZ{t|R0oi^6Ew{(W&i{&bO5YA%w2#jQ+Z%?*97FWXgfJ`jWU(((&d0bm4ec$KIiO@d|M=i)2<0{i4 zF?_#~If(R!ntLCwrhvDJT-9{tdCvK1ee6~MTXmiOSEP`H;edghG-FCvhSCs@x~koa z)Y*WQ=s{3lji6hNcC`&&FEl*1PN6?&@6OEcvvi(`Y3$gARc>Q&3EWsTW|GCj8(CAj z!h0gs=?@^9qqHez|LC}{*R3ZeA&+I>IH*rD9!%e(ZpoC0xf)^lHnA;PN7NIAr3M6O zUN$02?|$5S9}=fevG8VR?<=Oh@Q21w;Vb9w5Z$lZZ}0t9_I2_~#KD4eURYNepZKgV z8VP2yq($iDMK9h5#AkNaUE5gjEjH$@ffzqbu!Pj->kk}Ph39r-7M%)in|bvd4CqhOuB&wNx4}2lP?1m+)1XIrEGaqH>=lbH>tEPt@73ZS@|G1?+ zvXVRFc;y7lDEeL@j{n+yAENeU@BDp08XO&p+n@GaY5zu@n$~Y9wj50U-1&-UWQQ9| zZx72(bWPWJ4z{dq)uk8FmGTh4a03uvRcUXKdyH0P#$uE9{>-?I{BN;QtoZXz zSIIvlV!=1QeeK&Kz=S95tfpeYY;v7M1}(ht!VvR!D)e0QblLNq*eDUQ7Pt?4DdaLeg!ij! zmgP!*Yo@zqEDXlXU$}VG6o`6}H{D5O@2lDy{q?YoSZAbtJMm@|nm(3WPi)D7!t(T( zNk=8&i&4*ssSPhQ3&!&0j++8hhuHm0}v>I4>acGBg*qT<7RQhNHSnK3Vq zXBAnEep5j@%lVnfD#oXLMM6ZU2bv%rALl7vFxP!5#2M_MVCR@`q$HV!q~^&Z&JI?_ zAWZ8G)s_v$h@GN_Z}45~V@9qxpX8}{reG6PKETrHM(Q7Y#{11O-D8Hhgv{VPwu;~K znhBgBzkBRkZ;)H0$C&LSB z8-4XL=r$mc_R#OPyE%%!Qov7yopI+r@B|3LQ}pJZCcZs94;ZnWfPud~unWl=^gu<4^6Og`HHn^zU>|1Ij@!_d_i_+H z2-8oI`{%KLXlPblH@m>V#EUu@d*uc|1aHgP3%hA{K@Nz`ITWR!*h`|E)xL4`clwwJ z#kPuR%Hpr3@HW}GHy!zvLDQ~lZ7m<&(6UYwEQaQn=6&`A&O}VQg33Nmpc#z!$<wi;Y2k=%}4EHR)rL zv*I@^^{Dh}PMf)0m#@=s$iZqUSb9sB&h2>eDrZT+J8xIG9E?)93!jAMqB2}OTS+Fa z3*G78=Ghf)h#iRikT)ujeT}^t-GR+ECx~VPc_#fZK3qthIZ16pFbRUNSu{0KLolmZ ze{C=;c{>8Mpl-_k2p!LO4@mHs^eQ_BFN;EThBCQ@pdsV)Bk3~sLTwxccdt*g(t`p; zZxR0NOGJr07gvO$)jOQwHDMc1_Ca8fN+|)GXw(!SiNVQUYuIweSi)lA?p+xJixZ$O zz^1%m$09Ly*zF`BT}E=u`kBEjNlnVhf6OR&0ngqk-X+AG8Y2VY!JG-fBrA#cSE|szul_6QWorpCLhBYs%!O)W)*be_V(mp2^79oc`f07t9iWq!0UmL zH*nu~YWlREa?O=>&!b!iWCOu;G?=`R3e&->Uo{7!r&6LRi7~O3T6ea(Yl3atJz;E* zJu!&V0zYApBl$6g6eip*R%@&le|#B_Wh>K3$J^86|fujZ_^?w+1$rJ;Efs$QzlOK7a% zOO6Zin6&Ax0fDC3|8t)_2(rLzziyBsULiyeE;)GJMTh?-9h%+k{$Si2ce7XjB=r^P zw#s-ay05MYJGTuD_&ylp_87%=my^66WJ=xCRD4yGEEZ#DyiKc@qR;2&ryG|XyRgSE z%!Iu7sUbAv6LzRc2`o`TXYYBC>ChJG5wz6xzu7^daL5H8A~>#O;Rfb59)^{0aLSj^ zr&m8wQ;k?I+G_aR{@+4B({E<%r>63YQLG(ZJe?K88uF9zzdE5is}+AD`y+VfV6^ta zU9)f_%zrZkI8IgRSdLE+EFRvJmQBD255-b^E=i!H%JdVwl;g+JaSn}UUJBo>dtQyX zosy~D>S09tQYFOkSp_TU?U}^8$lD*)LQpt+@%)i>@FF7#5AKlC}!dE4P}kZ58v3RO<@Q%upEk}zXN(fGSD5flS3 z;MR@X!Xyt-V?0jogx0CP9i5frGUeY2`%wZ-fK`2FkhzbTqBx06y`theZ>B*CipSf_ z=xrlR{2es4^xy<>_tz2BBR3)7lk#0IZYfEF-zQcvO_#RVB&PM?!^~ z#+~W6(l9}za@t;MrP*CDP~3X$pxJwmkD4n3*e@@s#T(GTT%b}5jk_#diQ%>G7$%SaZY;^Z?-i_=W(4k zkU-`|ktYh5t+nuG48EeHM-isF0tJ>T+ zCwcz4XVOppklTM10X0=vlRL3KHnuzoz0WDijp65x%bfxvM~9uYO@I{}onk{6rt8Ck zipe*n@Omk52GN>tWYje%tIW67MVnwy3(pr&jaYVHoXnQj+Ozdpce1TBBlSwm@>e4J zcTAg{dl)@RJ2^t+K#ddLwz!#RycMv6oBXTf#|O?7qd4AKqXhYdd!d z1h01No%PU`ow1mR##w70ZK|d!M77Y6&sgakT*i$=kIMyMR1l9TNrFrnK3%qY=$3p+ z$59kBo2n6$Kqd?GAn%;TMv zlha14zeG3Vn)7C|0f};#O8H!!MkS5OD0PN1v$WZy3h$$&K{Wy$%Et`D9NxpEXSrTN z@3nv{!;n1<>0j;w4g?27kB=EdOr6i~dI`&E(E#&J^rO9HGd=La8w^}W+kqi1l{n3b zEZokg@759$NW)ccca}p1#gD4YUPSVIi6uVIdDdT@VenJRS)gh)^6AYnhG*!Tm$oJF<^P z%N&>6_ip|(T||}y=Hvc)*6rra+t&BI{9<`K#zKgyyGa?_ByhO;Ky$N3kz7;qAgyC~ zXMnKlF&U~1h;&hP?SHex85ghhcAm0@<3)wzZ=N<}TF$$P?aAL3TARKG$nC_>2*nxB z+pi-Gy)l0zoly~LQo)-Go-Gbg;3TmkdB=BjUiAnqe{PQ^hiA5b7-oKK)K`u4?hM-2ay$ZqWiXVcSp@;sk2mVjS77Q0_T`pa)$TmzZ9TkkMeDP2@J7jUG3+*H1lq>(+v zyMyDb!9NMQ^+$ksP8Cf0Wt=MScO4VDf4W**zpGdsOnr$CZktvP!9^G)fpgi2Zg=Xs zXX|lgDmZM-;*W+|1U!$U6PUa(Z$!`HBBnSl#26VHosUV}wF7B?$qMYds-%4c38Du) zfAm?a;{=>7qXSY35aS=fZG7}TzI)?+EE@3frP=v17{A3BG1%^22mYx}I!{hKk_u#= zoWPP6w7qQ&X;V#n3owg`){sGb$+V5x1j zs?WOoGzBe1l-6G*827*2WuSZckbN2boI~*EN0KzVK&Ui(!KDUXER{wf9?rYpbe8^* z$mM*uG3UR>yQ(H^~Gu!WW2PLj=c_JoCg*ekt-NpF-!0xG_s;#J9V?j|rU8mG%=9zMsfW zABF?_enxcG<~$E>grL2$g2B?=PhTgx; zMmPAeck=Rf8QmbZ(!-f`XtmVa@-w}T1OePIgXU<7re@$An!ENTAFDRex5h~<8EtKj zjc{8G<8!ZuD4gQtk+gN8kLEvK1F^a@l&gcXnqQokuDg>mz=X}P%~1IXkHDQOk#nwj zngbQ3i-o9q#lYJh@rDe^SlHBY+tO5IK^N=C`&~tUz0?P?r+ll`1#ewpKYRoXgELx; zrjF8}%~bI#G5BP?B08q^GZA0U4?q>!qILy#-YReU4P*&%PPYY1s?b7LZ&mqU-I4p^ z7okLP=QHgQ%+&eaW7Uw9fYD?DPo0>blx# zb{X0@HMLs<+mn$hoIZh#Tv>M@I9NLGUthS;hD34P*n>*``b zacL|y&-h$@w=CU1jj*)VxLl;4F;@Q|Mn!TT157m^#22Su8(u+2f#jErFK3e=;ztU? zOBlLxYyk6rWnD7Hv<=yta+wK^aKFFu`A`-&H<%kuH|C>`v<+aP1Yp;F5!| zeF;2^j9{m&aGAB42cOCFTFM&JEe&vJGD58`sFxxuZfI=&LK1f=fqu&O2K_dbBYrDu zrknm(d}c;Gtyrovr&x7jcBbUVk4Ou$c-}f0$_s@w)Px_R?sn7F64yx$dRKMQF)1ln z4vmnwI{#4}z*mOE4j2|{z2y+5h;~!NizXP?wZjCND`Jp7guj7Ao_X?sKO_~(*M>RF zsm}_Drq5}lgVWPLaD48)^pdzz`ab15qh+Hmq=sL2BH7z@;B^pr5@ao6m1}(yf-u_N zXi`Q&l;8~H=`LE4BQIy~mMK^*4v z6bS3^AZ39s?k}fbpTuTy6n}#|UzuXlIV?WUgaZdXh(Wh8X(2`OPeY==j*4roE$wtmo-%_}d2s^?S4_z5r?J0dIn{>?anE>b+nTh$j&ikw(taiN^9N#Spjkm;&g5_4tupJZyL3$F(P0Z+*ctoC`K=p5C)g zSa)jA4L)D0g|6?N24Wonquq|gNrrw~7}Wy<>5Fzo>B5XNbrG?^qBTgM-dD>!g8z^g z?Kz)(z}})=`_j*2Ykjksk^PSHTc{!t9~$#N#T5c07)oz%38qIEth1Z7s~{L!NPaBJ zDayYX&OB#cabpI;FUU{>F?Dz=u^K&UEJD<)t3^v0Akh=UxfbG=vXRrUue%B1|9REK z?p^(Dr0DKvr+{sr_z(iOh=z}l;Q=J>9e@0qkzL*qqHE5S12T>)8@yWOD>aRXdM9KN z5uC-nksvE%Hei_4w6*CP547MpV?=+;X*V>7A2rwFFqry{XeP8YA~a4+M#ZZW4$31& z%P&;#t73VI3f=fFDttA0-iI`8qBK4}&bt{-$7b9=kj-NDNADrkjlwFRjO17Hh~Px| zmXKT$JF^6=ZllH274{DP;sL)D81}_1wQL(X(tJ#Wx9JCg?CTQ zWvTc=>#?xYoa)k^T>0s)Xzjh)7dus2AED_XyL}5U_5?pyrw7*O6J*z%dW**@|3EzB z=ke`C6%&!?_r1$yKURO!*M83{5*m7Z%^>m9pV${^*ZAbf!Eid)o4rYu<4oW(d;CtU zbc5uzThgv;NyE3JMU1Rz{1ksvf%dq|G=2%Y6&(GT-Tv_B(@_l;xZR!rG-F)s~&*}H9?y!2fg94un4A_bcd z4IMgCjlI2)GIBr&bHaS*2vIW~>lSpR5Dd;E_eaW;z|4+lJhrHLFXvDb{b{9w(ythy zF>|HuyYNRAtCe|%<8Dh5%`kgTV!)jJd_R+gY%zo!gLx{~YmSdd+HgK`O48~V)EeGqe`GM3|7zK+h`CjF{{pPo@S$(V|wR)@jZ>p>Zh0UQdIPDUZ} z-{{wP_djFlSmO_Bw5E%-z>7uM_}mz`A|NNGr9#EIYEr$Z~HLL z7dMLD^QlX|?W5>7%5N zmmI!u6nNFqEnjC#Fs*m{+ttxWk`{)CED&dUmiy2$5^3$WSIUQ zGE0Ooqrq^V*owk@;d#(q62Jj@!}Ufs(Q)xe_x>BEXSy-k*n?$ZWO`}P%a*!ve^Y{J z4e5pl70pJS%g-R5yY{L5bcDip|H+c4OkK?VO5u{?-5(~*hbHBYg@r>gyIwd0PKpxN z&zrTrxOwu{R?rC9;aXyTG@^Z%9%5xEAu{y}4MZyazIKub;Zk>DjCvkx%!9;@z#fBN zNSXuX{Ii>szB$mN0yaW?;1)B^zqHKdJ-Aj(c>E8ZoQg@pYSHfmydtkF=zpEnTNi2h zFYNp=xh^V^!pAfw0h4t1gt9grQ?TuvdS=MyD1)N)WA87gVH|&)mFwAC0;R(Tfr{=# zB1Y%2o`@;mGeY~;QwIQt6Qvr8bvG~084R>dVTrPqE6&DHr!7tXdk4iX?;p6bifLHT zw_kzflwS|F;)4-9RxUw~FQFV8$xxO3yO{Udk(CwgZ=pY#3wxfA3fp?vZ%jALS(e58 zrII0~LNx5>XS6ks@s?r5d>W#o39RT1qDs7}>P5ds#)$t((Z)!xigR|sUyC>Qxdz_vJvb-EhOZWKdV?R5_9v57) zl*yAoYm^cC11gVyttU?O3d*h_X}pS!XZrWv^cj$=fcl22B5s$muNe-rKS=71d1lv6 z{gA(*_aH_|oM!3Yt9_w6M#>Gq|Md65s)r-={Oy%YBz=yXJEH%|Pugw&D#(B?l&!ml zDJ~F4DFIZQhr26V3SLB;Fz9*V7KEI^xDfRD1&Ls`&}DZ~UmYAsH9WnlxHq%7Dl6+hSb}JaaLUfMM16=FiJYVjP!vrt{v@2k#ST1dQ0OO-a%#hmJfw zCI~0>f|Xr$;LOpU#ARn{=$EaDe_4&EMIBCf21$@ zwvO?$qA-70K^`}05f9Y+gNBY8lLoIh5U7Eo|HW{ZDge^$Jt~>djm+-UNz0QRyCn)z zJE_LS!47lCUz%3+1xO8Mjub+K!=K&PM`F4outa0Yk&Or0@`ekn*IFEll`qWxqQ9MJ zaB`#%6HtgJ?IA`E2g+uKd1w>_{K;ROJAvZhG;5RC977fd`VH(*ZAz)o^|umb^obW z`nW_Cz_$PzR~G@9(?k#ou%sYZENygCi~paI&##jgz|Q2O)?3{ifyZ*ozde<36TruB zNahq|BOl_P`W;%k>G`q=($9j-4yiko#tKy^qNRURiZ zG&A_m)qEg@0H$$&8n#4Km%tfsu8!v66^%2y&7$hvt5-pCWB$4V=BQt^yQZ+ zcuhtmf2COW7sh)kf$?5={lC$NNsuPtpX%ij7`n+~875LTG*lGAhEl7(Z)7V1Jr?&~ zYm5{_!1OL)zL23xdO9W4VsvJ@i$*VHuEdT6ZRfEL=ti1Mne$jA574fK=t9IBmmo3? z{7el}5I0B=46DoyTOIn9hN9?lpV&YW%BeYU{VcfJ{NhgLRb1m6t|*P-bI~v=>3d5^ zWJ(Rk{8_=LnJ5fF_TGvkjDd>aT-53gJ%j@e)qv7+0RZ;??&1SYlUzLanF?(s&Yl2d z4!oc`Og9byOS!J}edt!H`A!<{Rxi&N(NN+M(@4aN!cfT3%S&F!$K#iE?R%i%K^1<4 zwAPDxBOY(^RJ(Vciry_^K$SLX!JwndR3Z889!0TIpk~^DS4b8+r<&?2qK9xN=)V#0 zO45smyb>kPocgNQmH~)nzi@Fn8jSGX8A&%~%K*Jr6>7K|5YapKm`iD_XNT9PU6)xc zcdJCs4lhDt$d=#naU#OD>s-gk*@szWF=2?A)o# zhXh(-;|AUt(5{HLY>&27L9Ci5yZBLItQmV%NNxc1bh70j@V1{L|7y!2b|}jz9C9Q8 zQfE6sl-x}X`}WH=K zU07QgcAzVoM8~N}%iWnR@U;5j1ImzdN9pQ=G%UJ5Rq32=kXvomq&tNkzfXZ1kC$ay6}(HFn=Kni<2 z2g}h->kG(RQ>aK|}Y=(F=C$Cn_;))<^Fukk(6H#aksdBS_FAD`FTI+J7}54u>a zjoto?nAk)mOlAM7vH_`-p~7&!K=Rc`vgX_=eS5G^w=%_ynpe45KOp zgg99@bZk+u`*{(4;`0STC!)C?EH96F6CNH~uxBhMe$Oz)qXbIQRQr&KYSO_?P=!Z@U8|cZuPwC% z-10sjGqD&{Z5pv%4M6&&TZGDJaLyGj0r$L<2m|JCFBepR%{s0v4E%a98bqS z!LYvg=VpSX1U8f<(&5z(znx38hb!Vm*7qV z+RR_8$MYLOwjP^xJL!umuvwL4H)nM`<*Evi1{a6A;Stg7FH{?FD{3>003}Dbo)IwV zX*?9*mA{q&PM=G9GW!@X%# zAMM5eTar83D8!9@g^J2ZM*PzqfWZFQQb^|fmvJ{aZT!j-*rIHhvs*SxiBvg~J6=mw z^%m4n=Vn3}!0;6sRG->koR*<0OIM;7e1HD~@iCbeYI?J(2YVYtid3;^?WlwKXotO5 zcUdPRSwsAmA2-CYi2iR6f+E_=$AuM!Wd%!WZ$lcYw=*RXAooLmG2KV>`}0d^<-z1@ z#pH0hD;oND;n$;b1N#kM^zKSf6$ek`u)2Y;FQWa03ZPZ~2FxeY7;!?e<6n~HtExBD zHZlO~M*kR2lisJP6pRvKRKJul)Hu5FeDvZ65}*ixn}c<BCdgV+(Ld7y_$Qvs5$LLtIc@%9LWRF_{*UrA!bIqY;5ep~`ATd1Ra=(vwT#`AXW(^; zp*~)xTg-poV}l(lf6D8!tqMB2C&QDb9F0S}OJcTRU)c$xghPeL8d(ImiPe3}==5d2 zvW}$DTSI@mUndN_O5OUBrIVpwYYWnKA4x ziVS%N!f;VP>426u^7d1Nlrl2ee^Yz#rgh8in6Dz>PUtNhu`9zO;&$rh9Pj^n=W}&3dbEKm&2x`C$Y7)`aqrLaqF9!qte|WU_qFQD+C- z$r&TceUv4iFXE0iGvxCsskLnaD=HO#Q=}g%3|}h-DCq zR9!GTX_FZz7gxd68ybsFfl8%1m+>{Fk|d;)Gbh(=Pf@&&glMi#*%+)+zM>Ix=Za6& zrpI-`jyKnD0FH*ND&=4Fwu1M|bf+xib#}Y583V3f+%e56J3OTb+;YQ>!Lgv<>o!W( z!&I5qj4-xd-rHJH&}PT`m{4913IIoAgOsH(J?}vmnxjy9mRsbsMHw$%l)jLoky@yM zT(Lv));l%gjzH_J;7~FbW*RnW6*^L>O6F%$TC=J3G;(Ff?KBaEL6MF4=m&KDVK7_ltfZ1y`Fi7Z08P)P*O|i75BsBU0IIL2N|$Hv2`K`2NhJ{p@gR5 z4&iWtI7KTtI-4$0_k8^XRCmg9x*qSZZoW4!xARar5gyRLpMB9&!k{&&>LxFVHqX63 zj(8xHsWrTwIYr}v(eFG14B?~<;0xz5qE|8d{GRWRPJr{Kc*0xJeEs*6bn(FasD<$J zIX}(*wVzWN5-liq<8QSiy3s$3s(cg@5=k3qPv_{)tqF0 zL#f3Wl9yuf42r0onp2?tJ`DRls{U~Ip~j1jdS)&(vRD4~+VNuC3e&p#l`!(XWNqaM z{cLS|=R4D)3_IsbVHxpW9Bla9p(EyR%30Oj;dIbF_eWOJSZ*;q%^3w!Q4-&a=MsO8 zZ*~cGnT$Y+o{=r9l5+$36!g-8ygJKb1dwkNuKZnA_}E5vW0j%nEJd@75W|( z;U^zps*RQ18i5>KGY|~Qz^cckfnT)|iLyD8WeTFqMF|w41lkmQ2 zaShcO$y4c0aqMH-_{Otr zx)|X?XwhVL{#6h)Nu|u7D#{qMr$k#q8FQbPvYX|+K5}0ZV1%fiGkE9apN!T&=Sgk^jRW4-q*J>q8ptHR9nV*kGLnBphI44JG!X%S37jChqNp4(6(gTFR_o+rL?)IZ=F5Q0?I(Sy&3inBh7|zTr4gzFjcPWmZXr-v4pfga(^PTcVeJhIMf}AGD z_a%Tmek_>Q*Hi?ok~w1j+Xx8n5+(1G(_!AEm(!>`T+r6}u{F#qEQ)k2kL8Pgnbo0m zu@D9a4r3$J{e>n!|9wpj8-={tLpmitOmo?FKfir-7$I1#8f?PS();SrN?q6pHA5Vq zDXxzX!e8E&^Qv+AIt1rSUON$K4Xa)N|G^C?`(pD-UUe}Y1?Z-+jHPyV?;)3I_F6Mu zk!ErKOw0v-8E{~Sh`;S1rd9l-746gyJP>tYKuC(03S!?LjjW_Ry?ZepM2;6v$Sv9< z!~2|qP`n97%l7s#%I(PDWo4v;ivo2*4$0L_k0!t6h6S8-(|N%1hMIbmTQ;`bjT-kb zo(~u&Hq}j#OIzORs_`}9;YSB;yMIKTThSWVsQQXfoP2Shvu1gh4!O=eY$BQr@K3+) zElyC7sVnZFQuGbp%Q%O zcb1p2*9u$*Tno}g&y6Da;MmN-aL$-UQo?W-GDL|W4p9HiKb6GK zY9RBl*5n1UiHUJjt*58+#%^+p+WfK?nx;UIepjHD)ATGrKijq_K|#5>o2)ds!-u%b zSjmYhN?y#*=JkFgwBbLM2sMmQ-$)m~^Y=TcW=(o)Sjugi-6dGFMlxP54D2RRQI*-L zd`kqdh2LREGV_dkCMjG#ZUa-Yu7mcY;+7o29=C%ppRdL0PSQmApD?EXwptMrmF zBTGAJVRBP5#^OS0kN06Rj@Rgobou(9hd!ic@~#f%63OsI5t#)*5d*+sY9@)NV>Uw| zrNpZle&B~((}zqmnprMP6K&p|cY0Nmg)q)p64G2Y6Ets8M{tT{}EV zce;x;4iK&MGZHrL|8^16^>Q_Ho4B>sSGs{JMy8zns)Hz!2V0gyjr;Jpa6-r<-8aJ+ z{FcT{uv*|ZJ=~8bj^Lwv^A^W_Jde6`>r*<96_cJTAZ~xTKQ?I3g)@KWGOlK#L7Xyo zr>pg>bscou!O8tB{1%QHprU>BO2jmHrq4VE|2_9-il$D11@uJmfHgIXg=KZd|K&wX zmvU$aY+ek|qSEdL=!FGozzltqO(2jUS=$uSv!A@Ak;m?JsNuL>u)Bq%f3LfJV$2w4 z93nWX}On2@|LBpjSuFW3TAIHig(MFnlL zJGHdH95jn;?^f6ggub%k;jGB;lyZ)u?%CQOWevvrJ6Q|`-WooRZ6&PlfSlRi$*3TP zSPCB@p#1v!rIKx*)xs&V<{NdtsP6%1NVmXiH@|OXEMMJl0Gx$72K&d@X3TPvUbhT^ zY)wUEIqRP0*V7w_1=TP!uKbY)V624~mR#8si$8!C7Uu9Z`wO7zeuULG+uso*=-E`} zTo8W|M;*&e4*&O5bvl&mKGc(g zHdFBZG44=)UP1_YQ5@crY!M* z7krS6bNz4B@vpo=WFQ)MV*l#8K(jvXi&7eA zQ2?bcZ#?DQuz>T5m_+w?8B`-*0yQ#0{`g$viE=^i0u%ECLX?t(CMs0fGRS?XX?XO= zDJmZ1=YvJdRX6eDWr9RPZPy3s-{h7W6=fPZ7FEp@zyiVkB2om|LuyO!RGS6>EN>~6 zpk>TlFf-JLhJ%yrd9fN=w|wdV=IRWy z_ktVJ9=hZjzU{Kd8KRLV-7n|Xj!-ZRD=|pLtk33?J0663?>YPX;SabC#APlY(90vf z`j&x5S*3kc$iP;2dWtA+Z1g=PWN z1Tw@afb59F3yWSUK&v>XZlBj5$lw_LD|3Qa{dZZ(vtt!On|?gW)f?Z@&VYvdVU(vx z3(c@3rs4hQ{B+(xS`V|Tw5|g-iUc-Lq@)U6eMVjO+9rQ|95U%`E)Bl4+7~c5z(5_I zHj}F&U_eozrYfx(Mk5JpdV3~Kgc_NJDBtYnC1u0nUn5ov1E8@0JAe&&*}*sX?!dxn z;erPRFo6fwCN!M>c)hu^j5D-TDdHJ{#@#xKlFO$KAg%T6+FsV_1)9+V!Ch`1{b71O zR5SHqLqyZlbd#a#t{eJgQLp!><@N5IMG>6UA^>Q)XhvV4p%r!bw?7dK;VLc^)#!4= zn)7}>WtA;Js%zFFHt#a~l3ETg0VtJzy!YB?fOByOSB>chAglj^4gXtE5^e~x0A-hTj@ll z)qL!Owo0?L5Gv{FbL5X0souD=c4o(UGzwQsI zhtPoi=~x+|R9$*L`QFlpE_0gk5{iDtNF^0%^kS5FfQD`YqJd>~VZwpST1Jf#@1%eJ zIMv2$Jb1IK#WkDclA>)NyK^XLi2Ucd65s2k)5&6svMb2$L2F)EqqmjoPi5jHN44c~ z-IfAvycZu0rVF(FnK@d=+5s?Nd8DQux=^POR>68!{Kw{`r z0Rf4jBqWBIA>Y&AbzS#+zwfi2XWi@hlUb~F?l@xaV}JH{N9eOrWTipSh0+#J}4aiHJTPjV=+TKP^kBL>@|*T&)i}&&i30`xN6V+N6;A)-d+l=rBcP zAX(xBai-;<7I*RbPW*{nn6i5U$zfkX%ouPI+zb(+eGWW3YX@l-=%#A;&`6bM0#pYE zKj_)%?4*%D{s5$$QFEvL%5cVK+e6_ztefSuJRPQs6PheS7G1&>FSJH zI8%sPBIG^M|GldLDAfr5{^i9W|C9uKYUG5R_jD(Amz?fI{7?HqS4`LW3WgTESg?pX6k-e$__0&rD>3*ZXc2Uz<>?5?kc5qdAL z+aP!k=Lg#tXd)N!HchR@_-bxF$+m3io&Nd{5}tuF<yCJ&yxewe-|7pS~H#HE+0jUsY!b?dqjaeNN*K6b{j$|3i~% zjU$SIa zGKKB87FoSYlp(wAx9I>bKX@YVn1(F^S6LsyIE{k@1{PsoYo<)=&g+Rg13-QZC{Ffl zD~}a`5vv ztAS6PIlD;91=+mH&=g_+)Jk}Rn8??dP^=aBP!tov$&l{?2M(HjG#@EX0}^Z&aVIo- z%Jj+)w2bOZyKE<0n{SzcgWdqeHT+-0`nI| z_Uf+qX!$GlCX-OQnqVB&rTsaiZ2Bki?Af>TB)Ho0XC;NAM1M1C#}C0E@mqjk=6N07 zHjonJHR_Fi$2_1AA5 zggA3*IjRe1)oNnWep9i6u#|q?lKL_Sn;wY5Z9FV7oA0P4jF~wwqKZ2@gNO`W<*59P z?Lb2RWPF#4G&w2O06L}fA!_t9=Et|i!;o;`nPt?U=u~KS124yZU?`Pbkiw|ksf{Eh zrN~mdy;|g`60xa&b(Z3P#Y5naPIl3teYCTsPM~7El{$5!POA%qy7cJJcj%U%m*27I zS7-m~CLY(}1L8k>j3x<+RP;IaC8A3%l9))|8Mo@s0e6pN0i~rUt&UoY=C0~CBdN^3 zdm$`rKT&#+oq4=p<=8V0G@>LtC0+r2#_C*{n2%|XN|5oGTH2zAEk|pC+|a7-f%`wdJPZ5qC-MCEccOO}oxv@BppP@J@V&|Rk1WSt+YlJF=z?55 zxtiX7WF5b6v{_JK)SdyhGXS%t5&Vh{Es*kt|8o5q0~8<9Bm1w?&qYuMr7aEtp04PR zwF?-nK%*|xo1x{PnJoY(esU{!AGWz4^1k&n`Adh(i<+w@l+I?c6^1)A-5@7~O54mp zOSzPf&v0~|5if$o*M2(kaNn`*7$$5BA0S;RVG_Xd*G?qm@RLIwJ1(GVY zG~*#V;gT}-5hb9+m_p14E}bg2)tgx1aZx}M+^ixDet+-2cQB0-Fu&e3yM3ehrd_R@ zZ1pwSanI6Z8%C&SY3h3TqY8w#XvDWzDB^A<*gvCd#01>_bXhoX=~IZpFna6b>c}08GX^p$;Y@RDf04UZfalOtugT4lRE%R9<4`{J1HHmcaq?{ovm*Kk2ZvJ}L24$EQkAbQ;=TtF zo>al)(=vuqP)72ij8S>{e8y;-J%+0{L6m=Mwd))##F4VpFG(tY=Ybj!SjT$zn6}KxcJ}%!AJHUwZGQ@L9QqLw@|4c+o zlPzep1Z$Pqn()Y0*BEkh%SR*Zx+hLU2=fjg1o9`AM&osY1Bs_x`E*9lXz!J;kDz_ZPP+7CB75NRvxnrF3x%Om zj#lo{H~W|3T|<{AI3dNtIq*7+(=v5(hu>Kgy|K0fcj?+A$1NYzqZc8kw75f9#Jga1Ik+n1=cp@CC}&^AA&ysz0ilV%ax zmE-Kn=jihD%pQ02!Pf+8vDrbCYqXn-HlOoN^TE9GX6O_Ls*l8G#&J0dv$XCwcXD@A z!EWM*(-sfxc_mak`_e4~yylfjmnI@_owMb(Tp3i|0be?{wO(jMrPbALI2JFghsz>B zK~z@-&AP#ia}I+R?hEhEbT91Tb9J?iu!3tBmq6cOA-`Zomd$6VDtTd;Lh_lJh6C%* zFS?65ZVf6~^Zg4i~CyjdWP1*x>PwgMHz;dQkL zKDc#ja3s0)b!D6*=Ot>g=^~y2Bya49m)+)4FyDup-*^vGZ_CW_⁢qjw>Q5K!; z!W0lhDH&L3CcA#2D_WsSF%RO#uJ>cl>U;k@<){Wb58BF({aZ12iaR2r#BT+s8FxNW@09d4 zIK6?Y0`k+hTKwJsOzSYv>A-8Y>(S9fuVoqLcTVyWB=~Qaf1ZMxY+@*SoVlnerF=2G zzubftQaD4-r|IydHnkvMwOjV*r^y0&U0p;`W1Zg|Q^q&NGRBc_@X;F+@VKS(&KOs& zC#^4qDjEMck+laZ+^GSs7&t`uQNU)_ky%OL#4Mw3dpFu4+k(KZmXwc$C5Dz3Wp?@R zJ5%=9vT_%eSrrytZ7IMqS_O!G~MIrs=?rw<&ZHjt-M{rZlAsi4$ghvXk@ z4##l%d#2)1`!^m~Rz$6$6Cwr;ehrXynE8FtAiNQ}UDq@>5K-*w3B1<@;QL^w298sF z4zJ)lYQnHR%x5MgU%NgxGtcMu;p?W05AA9!5fGl_c?+BjU0n*qThq_88ihHCKni%2 zUf{2<`A3%QcVH40v=9Qd|zF8Iu2rgk~*irh}gU#K_8{2q2n97 zBZiZI0}?fYy=@W@jOKKz5iHi|V`-yT?4ToWVeRj+l!WRwXsDV;DAdCWg5&G$--_7diqM3*&8t;gIs2>pJ zZDM%(#dRXnZBD|;jv_`+*`DH2m09nuEDavq`)&)1Kh-Q-fiaoy#slEEPi{}y{XwfX z@t|b*eHYm)a-dj97P1A8J{;3g)~!!ch^922L$31ZkFD08F-T$CIN0M8KX1#F;U_kC z>Idk$H&7}#aFYe#Ujdj^@2sQ%OO1z3?~D_elsmWvfx*7`A4)2P|1EP&Y!x`2boZ#U ziH@M~>}t<98yx1KOZd249`D1?=l5}_uSSo0+sH{^L=bd)}Fz|hU*IdC(U(X%g{`3Ae1m6dsM-WlL&$QnpxD!_=je%`@n0Xq}fP1unhV55M$~bO%qjlilT@nI|mI&Uq z?EUoy-wT;hZG*ybhX*b=;+6zAPs^{Tu0s;X00~xCO}jZUg~+9cPv#`=A;{gpGV7En zyoAhDSp`tiE#|=k8Y!~AI)?*_!=iA56(z8IVu?U-&RW$UoUI7u0#nX|+}A0TPu~Gm zjIV`F)X=(sSH)>3fCB4^)5L^Z%l==w6+ zIDt3 zB$LA{i9QO4FGZc53#utI25JXTQ$AEZNe!XsuS1y;D2e&rSueO**HTwqdn+lX zKUyo%Xd#wd!*K=}t0^GGc_;*GaH0QQ*Ma_tYGg_Q|NBq@kPbJp0(39=W&qE^{B}I| znnyJ*6m>dqu&brZk~9wjhev7vpv&nKsls<=4+apSSF@~J&~34^9Ok(B1aEuM@v*h) z-}qM>3@Zo?%btsHcN^q(Q=tOFvywULvx6CaXi<72;o#&NZ?H`dhHg5iC7gG9Dh^8O zE&bE{WHAe&7x!T4NYmi`?FiqI+e=nzEp>WrngUqN!h%7 zt>7mAuGtI;$U^!^yO41Jo=E{x$F&#{xwAKlAclJdP}@iqXV&@I?u%uW6OUb&q(iC|zxE zHy_9sWsz}XRs>kd61eXH>cs_VDZJAhQ~q2UpP-++Vt&<|FTP#gMN7lplGP@fp8gVi zX@j@Phf+M=OmmC0?FTGV_2b{aNk#vVe zxXCMu*N*j0w|Mjq-)h_;2UKtI@zfB#MG8MEFPFt+Vkyo#!$5`jRNjlElQ}7ct0_%J z_>%q8dzRdw!K+L_-SmsOC18^yfxt{_6<~7$0yzt3F_||)|4}w>5W*|6!}(fBA$9!I zN%V<5Ql9ju>-#4FKm^SHCi~%gD%>+ON(z6&&0(=30FzKBEI?SqNXX~BOfY1Cz zjrdLtezkuS_5|?VO`iSZK?f_6ajV(G2Xu)9iyxMCzFOegq{>B^CTU+ zY0s(R9WuzC#Me2id+#=JpPf10{oC<(ARG(250mv+Vrj2+bH!HUofR-=DvQKlT=zx@ z3l7`7Xd+u7*=Ev2DIJV5UMBZN;;(KVst5EI7!3LNWrQtM?U@oKA&b1Z6r?Pl{))q~ zzukVvOkXoHPEFEqeyt7ZI1m&+j5N#M4vUq!i#;`tfRf1Rgy>^D!0D2>OCR+x=^{*rzexAxKDT2#&w z;S7vlq7R&g5k2P^QQyVSJx18HH^G?@gn7?nh1V*gRwRgn*Im+9K+I|Y52Ex7b7Q(Z zR^fx>^PTOhT7aKJS7@uAm5bP{UN2Ad}lUkQZJICb&I2H#z$*pT&6n5cAZyuwk z4?MT>(MzM#v3O3$O3tpMZ@J8n*=sL$JriYjvxSurYgwmtgv3*P|&q_7|@x{&%)! zf*nR|X@MMSp;*(ObOk1TJ0cm?2Y(SW;)plsrAM5s{US4-#(d1dzJ}fF_=czL=7*;J zdF*|~*kvgk`=$?I!3O76dUBc8bvS!FxT(UOK_0S2a(wNl$YD_VKyTukLw%jV$2YQ{ z=AMeAdZToSSoIG`QbbVyL%%DLOgJQZpVXP1;l;tE0Q+>Y%kbTA!eQ3l&BnaYj1rS$ zkl}z}0r4LlMNh4Ih@O)5S3rfsB}a);JUh}I^n5Aem=*6g z?2aewfp0n>2tw9$J`|^STRtsE-OCB+gLG#i5jHk%b1S(GhA#RaSQoQ+!2(Voi zfl^F-L+1)BKc!<=ru}HdKN3%q8WMQy)OI(k@gP6r+Yv@sMER?}1s5fU*|OaMWfGS^ zyXQpoj+gaST*seg(2ypp$cTkuiFkoYSS9D@03IqYChypMCTye1?^M+6U&6f}O!C*& z@@JEaNn|D# zzIa5()G-t~(0f}MSVKBIPcj6vM!Xy4Qd|k%;C$vS;*S>7WCwJ_=7<7MMbb!+;~~8RCRUL&hxoI z9_4oxps3oVo3!1)dyH##u)9DjQXa5PHYml5o^6C@Lhi17p~UA>K9oK)$cfD}S7b`F zL1}Q%d4wqP`fp0n6?$l5Q;;I5na}V#8A9X=ek|u094=VjlG4G(9MGhW_pI)fmXonh`C@3RA_Dnf zvqoK8hPM||ylM;U_tKLV0Q#LX#IV%ULkK1_kboj?>_Zk;7-A@3n?8A^)Z%FbeG$u) zO5G6jI5SBz0`9x@bp`S@L{OCN66sIxiHbe$tIshwLlj7cqnwU^kkSw8=9k{|#U^8F zJ|D+3!G_8(_Hh2bYLE-RFMtL1uLY?D?$1(;8+fhok-g%?b%V;nMY z!yp3QY(g>g(?zzP;v*tl^_0@48-~O`9Y5_hq0p@vi(pA}DSV*WVg@O*#u7vh^xu4dYE(|d^?W?W9e=8f?Dv(?zZ$z~edyKG_t}CZ)|CYLppv~f z4TtSaqM*}2)4dym=P3Aw(P|O$qr#_C@vL`wZ{p3$*grpefRO_T2wC&k2*?ex((Nem zj}2foBRn~?fK`_#MsCcJ8NAMP%>Xtu9WBB4wDq!`Wdf7SgJ(*^8jKv;j43j;P0cD_ zY*bbvd%WsF6(ZMtvuc(Shw04pokHcQ^OXKXid_!)F$BND@UhqEHP6W>hRC=;BegfE zINeGay5raHz*gV8m%%s@3%KWeq;3ZmU<)OB&mEG<{hP)fsWv5C!e$A`UNX|BhYw6` z2TU*g#o_8g)eZGVq+PdXn1loG#Y?I`-XnK-12;**2HC~XpoE;~u~@Wn5TQA`w38^} zlJMdzh~dg9pG+;Oos2iz`JI@W2mN1GUP7%dRn^^C-|G7owwCanMMvY};grllH%Uka z@cp)@x}gn@{FLE1v1d3K<_b7T zFE4bJ>GKUXZRnk);l8NYhTi%HC00>}e1j*TXR2Z3%O+OYupY?lGfaQtM0ye6upcJf zf>c=3)}5tcu1D4Q)YNEc&x>y?imZ{;xdccl>(BW4W^e1*kJH#fRNNSY#3l=q;;ua8fb8CJTVtd4r!jDW}tH4rCWXIMp{oBtWZHf=pC-9xRLYyk$ zkIpY9KApMk&_XW1X=&`;RP{mz5+~Nb^|j1H3}c>KJ!mn*;a(KZ%#k=dW7w1tr66pt zdGsLfeO|16FW&SjY)TK#&kow$w-zbt6@96O&n6#F&NQ!TQtIt|+;L}@DOr(M^I&*n zd^I8W+t&J*Rj0&iVP%}2(1n_?mIC|F{IVr@gJga}!+Fbo>TcJ@8kQQt_a=RWgjXx3 zu&YFzYoPR=PIwLXmE>U-mW%Hv`9~S>Ah`rCm4TO}vGcTK!#+{mcV3-5>w7e5 z?xERZRtvI;?)~d6Dli6fvWb+#mHq;aSNoQ$P8kMt37?s)+A$AsIWLjeX;Uc%<7`|H zl18NA1aRl7tDlJz=$CN*rsr05Ah!|g5W`nlwM&rZ{xT_q+Q6bwe||OeqCq9X1LJ8f zVsfI@-t_aPQG)aKVqL=P)yca(=A6uXZvC#_|C)v=Rp`pj7?~evq{vT^_u;L$aF%fb z>SIGXs=`IwC(^-0BO?mzAaR-1yc7SX{`SrHyP6QKbH_ad6zNk`fKE6V+l*IjKkYyK z%%d!tDxKy~cuy1ivRl-!iSNpAGgtQlC-WW~FFYG7JifN7)!HRURrf|FMCzrE;PGDA zpet--M2}BoFljYz;8&_DzcTM<`l<7>s5!;v_w_dg#mEH0Kagp#Q0BHu z&e(Zg*LcryxNAoNW(olz|lpNQqzS_x`>?3-l<11Fd{ZamfXO3 zm@Pnz)zx|zlc4%)WhV>9RM&cBqhxg}1w?;<=^LRVJ5uLp@?8krLx0Wx+Opczb4BV> zjN#5+krb}A>vff$%XM+18c(hFMKrq_%&&F!E^Lq5uDlfzqR_2D z5~|KEO3wdG{7CdQ!`m5WK$q!!9twk3)Nz)Vo{kC0e|V{_pQ-Bm%A_ZETI|rIYJ){nVT)c{_?X$QH?@m_TI(A| z9viq%tva7Y9~o1uP_YlT2VfPThE@n@WLIq?qJs3I842r|JYJ3Q;_WTtn=O(L&q{NN z!}QjWtq>#-8pf_r(4lUrI$vHQ5z#J^I_J+1k1_F`u^U;*mHbhmRs@pemRzB=q@3bZx9Fg3~?mNxQ09%BO2@9ATg7N zh}5~$S$ksUG?}~Kt;N5v`YLC4(E!JCvfb;YTcTxGSEzc~GrkdD(Ri~FV*ai;d~1C$ zmSH|b)d%u0{z71|FqPrj1)n5^z~uwh)K_Hs*B5uFd#vlu(Fu^=OSY_Jh^PK$`f;|f zkhY@kOoQ7Kw5CVYn42~K7QlF=VVHJ=P%6CuGVr#9V*17i!ZV)4u2wFzG{Jcw9$K9 zTldv~`VcoVtQrp?i*SabwpIwXbMyrM**=bt2-{^0u9*>}epkoL@#60WI~w$klD$n; zYct+N_60TOMkzn7Q`^PD&-N)f>x2{bJ4SQTU!sz7Wq6N@4BxmgV85fF_4*>uSDuE? zle@W|cTo6_P44tl{&ULv8G`QwX;0$nMTt_BAuB%-J5dA!ICxoYmpFZ7|9hh=;Uyx2 zZVnb(k&i*zm z?V8$*V|2c+_v`}+s!l!k!uD8U>56<3UcHC$$GByznO)!)8JI8I{S5K_SKp&geZC}M z5qoQr-62s}ZM5iHcs_}}*3Ju$lb`Yk!IQ-xaG;I;ZYj~)Ic5myrXRL%b>{y&m>b`@mDg}C8LULWjfcPFCauCj zOhT-AAd$h0#W3CShd2Gniu|)(TZZ$BGQxoxKmnyl=VHuAro6Ov&kp;AV&)-zCNrb{ zwt2U(nc2b})7NK7r5mSwmMJiWQgGY5ZThnNE+PV7wXz#`G2&;OW7QZtX zu?^NQ)p7PZzs3(hUb3b1hCg9aCy3CbKDxWI!}Lk&IduEdpYSkcwTZbCMU3fray}A7 zH!PkfIHX<8iD}a+zvtcX#ia|J^NjM^+lOHduNJ>gaP0!8?>~9rH?fO7Ml2@z+A~0` zj%-v=ijXZF^8)&!a)YpYZ2$p=Oz8&!P5S|rMA~B8y46bJ*vl!_;y-e#=LHD}OT2Y| z_Ur8xhpfkf`Fse~#GMMQ5jB1C$l4qPP3nM{kZj$`96RtJaDzc?GH$QaymTD0Q%D+q zkZ{#KH}e+Xe=#a!OuZ|LjuwbzioebOXL+GOow>(>YZfJc=TUjnm#^cmD%Ahjni{{Ezrk^`c7U z{ZFecQZjgie0zoY@UCZQ*@ORaLw<0?q^{%e&4;=dctmHYQ+M10Vs}RzXEgmD?5=rm z!elOH3qWj`vQ1rRt=f+y4}tMhMV*c*V{Cl(S~n1$r*h!*$l4GlN;A)0lBBW2e)WeC zL!#;z*tU4wkbr)=&3l&@Xg};9LSR~Ue{z;mL~CS&?nLQlte)7Gj*ETs@29uPTvqA3 z%hx`#kOtn7{|+%R+2xmimMj-vkHl1+2exp{+99O>Bn#M( zp8Vff7}!C!tNR4|E44iVDYc7)xs>CdpwR2rpDQc*8bYGPboa(?6bxIEdruXAM8r6c zI(q*+dJ|5eiho&06pE#_rG2sYSWLbw$Z?004KrQRv6-aF<8$SO&L@ojXD00LadLwQ z-IRp84(Ybm!vy}Qwuy}N2Smz}bufH57n>WKZ1 z+s!0AGDB%gVy61!YJ3vEoa_B@npfX#R^>b8DvQ?0!|uA-(JH?2*_N{CTBywW0DzRP zl|9-mXGh+ei!Nb!M$$GNSoW5W%;2p^V4~#Hv$2*-~{VVcgh9L@eUyB zML{7H+ZWRuRuY!82TPw|zPhDu7vCnLdfwf2LkcLdfn8?F*)HhN-W!!BU|H}uTkqlL zyopa}mm}<6KH5xS+M^#|@pDXr8o)=TP`c_Br>-u4PCobc>$^dpL$=EEOL)b-?9mO2 zqW-avbW75FQ=E^8@W;0MIpg;>rm6-fu7B$^oF~z7`ykSH@XW2%Y<7uK z6uD4d{tzR}^P9_!7B(Z<8t!^^2>bNOBPT%&Vp*}b>Vr1BX9L!Xc zy_fHsYY=#dvo{pF`q{3p{`KlAHPfgB-0}xqpvQpUnAf~8*z~#y*<`n`as9w2-EPm} zS~o5^fHX@mt8*w@3JE5T`USU-_gm@zHL4KIqWA`QtF<-nS;@v_-!GwQ$RhC}zes z4VG@ZmP9tnrhS5Sz7836Krzhie|Ub)hZxwFguQTM>DR>Q<_vX&=6FblD{4uld7Vw& zd5M!eTY|G$p(3#|4DBop$2>?2&SOJ3T;f~K0 zOH77=!O%#Btv=oBw_*Z|Bpt0O8U41;h9j$l?xA)$BkF^BxYSJcNsf9ZLb3(fDLDu> z@5C$(mWmksCWtwhX+)6~WG$iVl5^hY61?_nm?OA+pDMMRl4#ZLDN#n2@9J@cEd>je z1xv(AA>7VZ_ZeO)dAs-$_SH+~DdX~PK0R8MZY&scv}UWE|0N>dNVbw@;BCPRBY=>O z_Ct7Kh7kDCXU6V*yW}-oi_^$+N!fiGSZF6`Yx*!pAunhTX_vikfoZan@M+9Cl^_O3 z4Q0&#YMOOx@k^TM2J3^HUmk5Vm}2)}r`q&seyrLVw|z1EJ$JzNqrhqY&TW?3&7XIJ z(!b6eWEK3?&kTeHLsOk(ABeYogkg)dSno4w?N+i;vR@)h*9W)5I^5PBFjL83E|6(> zT8>3|3pl72vRonjd&5Q_aX+sUzXiJo4m6h7ZI?_iUaX zg;L=68&a6;=BXSD>vPbh!7O}JX4Ds*(Cc_i+3#q(`v7bTexe}uvS%eMM;R&b$f6f- znU<*9niqM-UEn^}K(>smz%4u%U5UN9=!4^eQSK-v88UZfFo9P?k4I+6M-ii1exXMi zL8o2U8%=v{*Ru0sa-hrLAoJ#MEbL_SBj#G(zFoiiO@v^eg*j(CcPF_{N=OWVPAFn3?|;gb3w+&7|KURPD5)i^Z7Zq`Td%%hkfv zzBK3ZUd2sD-T}9nF~3I~f^{hh-%m=DkIaNLuP$?xsWjU8h00Uz&3e#|uSwT)BomYt zAFrOQk=Q-EZCcya2EPbs8ciDObMzbBYph1Us{(1EdiD$iG_znY4~NV8VUMm8RwOL; zk7ZF#LZcsLOD#8Mt`@Fdp2G|=5p&k`N1>G*{`^5s4#1gUA&8#F(r9_v)8J{+ud@VZ z=8^k%!+caqdp)BNb?e<9dNmFy-ySyCC_~fN4`2AmmcR`<5Aq=+hiS(j?;~0rKNCw3 zcs_5uWRZyND=XL?dljM&wj=&-(7POeHrwrpU>Nu@b2F1pmyx%+Pi#epL_(y9QuwoD zWox7k`xX(SCHZ{Y)zDbk)}XQAsFED^SHP)joiD}(JM8(-J;GTa?`zi!_Uyl=u&_uE z@m>SV%J~}l8jOx45%chCj5Gw6>`^j~rQ~CUqUR)xjm=WhskF=M8>rYFIv=gwtfG!H zdoDNF#=rAaK6pxIswVxd>^+b12+wsA8fQG>{$1NTxG0z6vP}eGjmO*vJC?;dBH!YO zW6g)2lA=u8=z5mRXYQ>7qkc~{=c}+=!lc}muh zAtEy|_9>XSWML+lEBScganH8j-bB7(OooAKzSk}R?8t_|1}K%MB?mDTA0P*Qq}MWl zINZRASmsqfPCd%3#^N2HVK08|U=)&z*SbLNq`-H>C$x9a`16sDDg;|!ni$_+T2Iv{ ziB*|Ub%^+%XWwNvdoNmco9Lqg)MHiLt`|ExN^E<}5goa=-)FzJ)+^funRt2keD`;= zBgmg9-9R`vB*5J_8)=&HfCL_PH062eJn*;eB1&`}7X~-*S%|1+f+t1>mdB~QraOsE zgkuqK^x2IlMdt5}B%r%9I8dcEWH@S>ClBYeqXw5?@*sDXt6XN*E}_7IE&FqSF^gHd z-8(7Q;Q{lNYDAV~`HbawSw#k8Iy)@q!^zj158w9Ozs`Mu(nWm1mt;1T|9O|)f3sKx za`G6uO|Ws;(lhGE@?gabXa~g8T8`9$VQt(b9K!-(w!3G0G2$r8G|GRyS8D4|={Cl4=zu&MsfGheHPZTpvu zcn8@Gpza8$%+V_kyJ%AiG*8O+SMkjst<-|3cbPI*RubTpd}9}a^sorT$LM3-UJ}xt z(4;&Jf8u5R$mRq8vcl1atClj7FSY$B3w;|(#vdqA_o~+xYISuY%A|({^JP6{gt}bV zUqqCx$>FVl0SvGZgtxW!zQRIL&vpHqvl)SCbg%Q1HycA-v{0!4JkhiskCl~3A$Py4 z&;a_^6gF_vIez_#hnfpQt8GIilO+`xW2K`UeZ8x0YR_C7z@@);ftj}sCX04oGVlird(?$h z4=?(P!vVl$-*H1z$=9&-D}A`2qB9xIf|(Wc0Vge+xiR6W#iGpvD28}%`QH!?5H0>~ z1kELwL7*Maj@6+f1B3qg!MbtoN!%HxaqeQU7DzRGebzq;S-E;f)AbMS5cWaBg zR(}MpPfpS}G5=!=Fu<~rSl1T~jYb&DA3vd@!WN6g-252^%dbuA6A0|RaaedNNu zbc3D?niK~&yAB{QdnA;E@74uH__5!S(Sjm!-jen1zxUo@dkE`f4_pp_1*1%M?8PWK zE<&Z=*!vUMJKv$~;Pgj4ei5JY5Epfvi`-g|x;=PuOb3b`t(-gFFaqi~SLz)9_ub{j z=rYuhP@;+3Jmi1GUEr8K?PQN{OZPVO+iitO`8ob0TwqMue*?7ApYY71dRGbXxFgWU6mB+>6rU$) z$8i__k5~pM3a=_GLawTU_gBVaZP``zY14`BZ;!hH>Ec81wMgN0w>P`{7gT`kO=P}> zktfHqlSX=bpft;Bdz7l~(>>bHqJf(Y>%IvBh&8q@if5Y=IJ}ykbsyL4baIpb10=aX zCbcl?pOxt3Rsp)^(L3PyPX~(CgG%Xn$bqgB{(kBm?$7mO+2URR@(5vndzik*I21*) zM1af=-oJdLexg+uB~}yoijr0A9|l8AijOe4lYJSot9Z;e5o5WlQ(N=2wz8*ok&c}l z@EYZ509jii6h_)DyoBUWz_C2piQyJv%J5e9ocs$Rn$w&@pXcV&X(!1F!d|G$!gt|* z7MTSI@dig172l-F@l$*BGo?3P6}`x$dRa5>6Ut)q>*1wvWMc#J2i=Biiz0`WsZ(_g zzWcSZX8VVmW-G?-mphEZ;k!vSN=ew&4&iZ7CvIf}JR*jFs4GJjQE$(GqJLco8$iq; z4#G1r3D6-!W+3qYg|@79j{XpzB}PXA8;{Le5EV1EEte`7AAE6Xc8rDceQ6lyYWTdDmC2i zrTy*EyFI2J+-*vBhR73II(Afpvx_!4DV#qr1QX2Ap_$QDA>P_Ww}#89W`z&b;mo!p zKoz(zMgE8~c33n3yIx_6M4*|-=`hNzYY-r1_%xTjC2_E z42tE9UA62H_J=x4?U(qQ2MrUcO6(D!crU%@W+xLkE; z%UIK>Vux^&T}|{pV*SeSdv7qxm10P*x6#Y~J$rO?WkX0)1s%I?0e~}=&#%a)2n#AV zcpdiA1kBV<47YTh!!~BhOe`vftE$XuO^vrnQvCm~`wq)_R7*lBMHr3kW*BU^$_GA^ z)atzCkYrtp!|6kW?J)=p|0-Hh=TEe=mR_Ylw|K>?%uD;-QQen9f5h1>P)&cybaF!T zYo>E$5cZm~4xH#ekq`bCJAL#n8P(>BM}+<<_BY7l5#6!ph@agv;kNp;8$Gcz(%LHI zrJ^@|i%uBqvAO`Zi3*@u3fhfrw2=pXA5lu$%yUa<$7YM^M(F}9<@Wdp!P6KwdtU&s z8@kyrN#(abB$+iUi`fd+jKqI2#s9?typH88}?7$`QKRq z01Mh#8H!RnvA{-{nF;*Ozma@2!Gt&Jc%Ah1zGKY&W~$6CcQ~E)Fx>mCaK(UFU7klm zphzoT3>?4mgZfNik$D>?sce1g7M-t9xrvRRvHRUY0oVLREuRqh8F5M}@}CbkxrF=& zJl${Z@?F3M$k%5BbT$BPss*eLp9tZJuJ+bS!2klH4m3LsjQjnTV@%S+a z@p7b!{7YLYA3o*=&npd&YP|KebawVUibhe_W4&rck3)c^AXk2B_%kiX1s@ z;RoP8wuBBT-fVtZ!L0#pUivPuW$_g+Kn$_iu6pUfdTc*P2f5H=e1CDV8uZ?rE`)dB z6M9am_itm5hi(}DZl&%&eIPg@D2%;Y_vK3k-*#e44WB?d!|RW>?JvC5~v32Xk$jUdjmS%@NH|?Xt4TG^W(I(lk|>HcQ$Jq zINe#>4BS{11T_g}yk31`CyW|?2Hi6(p#6b8F94vcM5u#ruO`LV=yOwB3iH$1ZdcO0 z+IYn!eAF9I5*LY2Nibi8*3-8IjPWWvaeDS&^^@V?E!(K@&P`9p4gWqS5So?331=u2 z6T@Ya&gQr7^5_)2ewvwaF-))`&-BU|-3Ld@yWtY7&^a=BT#-dhnwW?k3?Np*3?6u0 zv19>klp$svJE7;OK4jqOh~vEv$3xq`U?!oLkkNy`xR`F-gfyI=`$e!8$`jnyDJJI{ z;)%p19Mv_;p3tp((XBP)a-T2j`wf>tY=`FwLtF4l0wc&OwV1DFa5a=+UZTSXpXmMa zqGaxO9K2ooAmkC>MgeSSh(tYgMB=rD5FWMkH`jHYEm~K0dMYN;G`S`U*q2qD*(`$i z`J2|@)=2=fu~zz~L+>AO(~U*g><(17TUV%;pYAKow~%CWkrI?{kcJuZ zz3{%D_`Sbpz3W};`Ny@;k-5%3`|RtSy+8YV{_I7a*)9c*1TUS=Cog@?hNHIZsi5l* z7@!^(H*b4FJ?JKcsP|9vVKS!hO#O zs0d=qDZp#fqIA>Ej0rOQvNB10S=3JDXhO zn`;Rjj)y|n5YGqpd+-?j7wva<{2VfnsyD^a5yM{-haN6kdUv|7{>P%K$}XvcZr3zT z?gfhZ4e5Fd-ex8U@%wpo@B)c#nU&*oRyZzij6(@P&w@pHEYhEi3_p;J3a!X$?UJ4^Q@q(k14S;;=RTa=~&R$Imv{ypPgEzUK>GVCJ11f0aWeRO1K|& z!_RT)m$l~PCbD|h(8E_YAP6|ThQ&W=n{XirAON*7YBzZKs^8&R5}jyIx9D%ovG6A# z!keDcz>oU%S|!I2sp`;92X)1~$~KSZPsmwtBlhd)X@dGPj&LaY?N73WdAUbLQAY^c~oB_oU zbn=F;%cFH3pz>vb(1&?~EF}R1fhpV7q!B2Ey-xWX+AR79weMrML}4p$EaVSVZm#cZ zzAw9pPFGZYp3aN6V*8l|9U6!=c;$eifu_!U_GTGp3PfOo?XVx^caqZx2iJ;rJT5K! z6axO>wg)pSo<#8Bh%0<5^J7(T)%m^KNVH5X1*v^ZYrZ*ZBLV~L7x5_M54~HS3fj;K|O^P>xp6A!8^Jc!zCxZ zvGQgyn?W|#vuY1Rqy5bGSQnImSa386#CH!>WwMwGfFJIybT)r{c?MSCjUL2jwg-yP z495M*E_8yBbGEQ(lNKRZbsETyp}ZErCos7Fcz~-#;nb!$eT)*5LeLj2J+)K;52!|GE)}ic~I~G3Qy|oi_^&%IM4_=M?>Bj zIbpw)b;mA>^kmN)a>Jw%C=;)4)*%mkwlX33Zh=qJJ6&hD!$FDt!6CiF1Hrz+{MD*D zxXW|uN*$!tfyy^VPGFlzO&s~ZG{Y=mXRsXIV$MDuwRq8U#dvzp?K4Qi=AT~m7Y#Y+ z`wq0%dklkhii_K3#9%ybxKooLkU#9HVXH^L3TVd?gy7~TS4`R19~2DC8)Rny%CbyP zR1QOg3|})|)wj`hJ3v;eJ%yLj+!dqug5xNVJJpy9d#K4V>v{HV-|>xU0?U0Kz$c8{ zqX`%AHoY7^g2cdwKxT9N0J3rR4PrFfbslu$^T2zDDYkA5j(YBz-5%8VlR6CCOb-Dz z>bpb#dm99KQ#gO*vL+X;(5b!Eg*bRi6;0xmP1q_BDIt z_!>6)TgDWHVLq$4_@X552kKOC1syDq`Iwam_B?adQPeuwj0UYkK$48>6hf)&gpC zG+k~D0V`};;NgCtD~}u;2~%e_j3@mgZ((zfGV1|mQk%7TLE8Lyo{U_){D|ZctPqEfpSo&;{^=xeN8<*f?QD5Pi*sShUE0EukmsP^s8XWyb*{SCJg_OkN1L<|y=z)DExh$2Nk}Gzd|K9<%f3Yvv(OmsY z)5HCq#vGKu(v-sZeV~0XulP~o*L`fn>FOJ=UBjt&3)Ws|FF!nA<6-X7LQIww%iwZJ znUR+IMGSSHWFjB4qse<8Bts$qN>Xwuc+(GD4iPLko>=7!=mZd|>M^X=JD}5v=dX(_ zK9Mtcl5zCdG^`NL-uRiP%%_WCd2Q|Uj0e9+Fi+*%F*kZh8b2H=t%Q@~Gm-IJ#j&RN zQSuMR`2Fz;iI;kQGUS%|$afXrtnCBM6+zIQ+AMsy#QlI&9bIYiv?B zP`PO?&W+OU$=oa6O68LZi59vEKg4@A-s*2jcX5_2Kh!(q_3Yv7!|EJDs6q`6P7b0b zEemva+37yYd{5}W%zhR0Ghpb|?`^>4acTg{*W8sCJ{j${-uuroDTlb#VN6imi9_9| zt>qY#J(o+yYttYnZX><}K-Et>pZ?{x|MC+fj^yN=_dbFvt5zk2E3`!hB9`jWbrNAG z2QL^ee}ff^CPp+U@{SJZ>hHU4)=)b$;pB7;MgktAmt=aw`r`KQHy88zc{%$}Oi4RO z2*dM5OdvTZuWK)aL2iE5Cqo$H;zzQQkt(5TdCvbrG1&*L{^V`JiNmJ2TpFNx`Qj}0 ziSit|<7G?R!9mPZWM50~dX;soZ__s}XGU3(0vy3L20`&!x11w7D*r5r@aRs;#M}B8 zSwOJ_?}Nc%IV&;Rb*vzIh!ET+u*LSE?}4&NZ<_bE+diExuRy*-I9cV#r^NJDl106X z#kyjMVLW2<0N_@#RmnN)74ephujA%)c110{c~sC+%5lOg${&UL3^DD{7j1H=LIc+o zaM$L-3n|p%HQ-~HK4Z=_FKxPbQ1Qi_+h*mUhqtjx|NaV6{BsE*l27q?=&0@JT^!aV zT7(DP!@E%s$>;5o?JX=MOpwvIH`U9!=Sn`Xpm`e&Gz?EyoTs!k1E1b58%6F{Nbp`p zO6AgPsuYOOTIHpa0Zoc30}M$=EEO(c5+%Ewyo$i-GZPTo$^}^caZB1#KXcbs(>5~Z zNK3Oy(M1;*Kau+R_5})MhGlu6UNw_Uci|9Tmd$Fqh^Ruo#7Y<2K&&5Zd=f##ZGSUH z3O9XQ(!AYhc943^Zj6GHn=9Gjx167!6VAQE)4GlGjwae`H_cE;p*KY&_wK@1|I!O#gMlSD>HY?wmrw)C$ z+vO}ERM=OT=qaP5siWsNKiknMisln|RVdzZ#n6JvPov!=8cB0WwKmJr#yNS+7r8CM zi0xe#9xgLiCmdK>-^!v%b)q)EBJm>@VM_T^zQsoi;=V`7nX})@vI!Gzau~{ZuuXjCbagET`aEU%^s=ZfN6e*&lUrm zDH0QQCyj8ecGiG~S)yfJ9PxtouHr4-NtxRMvn~tj*zROo?vhiHnU`M-`qk@Vl6!I4 zX-$6cF=*{Ot^E zAP)AoSx5OO%0-TqEyXE*Y}qmwPR&X>xFLgvj;(h_$z`X@r;x{u$CqtR~W_C z;#M-^-LfrYr>N+nz)MrZB=IG42f?yib}>OW^0lE2gZ{FB8^`QXcGvRPJ+~LWBGliviMjXC$!vUvgvX$)UP`FWDg8GE}6 zZ;MMVKY!^QPFMfC*1Wg$+x}+5g(E#=$9{iG02Vc5?lz5hv!FSp#6hZ%+WBTvqak!& z<}8VSTA$XVHR>V)!caEfdcZ)qHM+}UVc{Q5HY>F#hIU7~Vo?q^1>!YO$hAIh&x3c3 zJVepj;f*q{IZ7%L@rmh`XT*$tLE*J$>k=qe=9u{ku1 zR4i1G$`{Kc>2-#57wN>Adms!s8UJlzlEqQUTAR7vDAGpnq6Dvx-IwIicOhLbUDkg} zaxrKryE*!Mv0xm2$Kct*DJFyW9B<>Y^w)7?93+;i{&Mk+8C$$Bk*;3dAHs?z?M5gAAeL$RCbVtz48LL9>; z>t;nprNG5N8uO!qta7s*NHKZb^*#j<5TpvlTe&f;b=4*=evQd|MB->|P8uOqj-l-Y zY5wG+Z5E_hG{8-;khDnx_)D>?!#=OoLs^(gd=$t)y4^Fy=Bo|*f@U6NdxtP7<+5j{~J_JT{D1WvvQ#UM*t`3~F zSnwyR6bJ_r)IYe5OM8(8#RO`4krFKn)pB)D>9K899J4N4W>ZowdhXg*%fq_>0+2qs zxCL~58w{VU)Y~qv$twy(3x%eOglQ$~J&mxw);z za`yI#YEaAe!;%2FgeA~{)(IdQKGB@STYTVO*y6OxPi_6TokzU}+tnaar z^+?CjCGZUn8;3B1({6`C=i0mDj=4@N=7c|})V390xo3xaLz9(A(bEhp^8dDBID7)@ zATfI+4zv7AjHtIW;QTdWxwGZB1^Z4-z~F-!WI_ja#+YBd`|p2j6g@UGNld4?nfmg< ziiWQGVl5e?o7-bSE33B_zTle{2&_Bby+%vCAL;ix%_SL_FIDgppq zmH7lOdD#)-tsdoboYPP#C~jyp=1Tw4@RoJe1A>q@SZbhh^=!{k$*YbBi@63k-clj1 z%epC`TM1U)*&OJ%rCiP|-e$*zl&)O>#-T+8eWzNJ4`LE2vLr4YHaVd_+A&5a$w1zz zT|Rt$EV~(?K~EeLA-Logmzp}N)NW@oU*Nw%7NhE52ml;|NSI3!qUvSt>=Z-Sg5Mh? zk>7T_-fnlDm5X&G;(Hb2)+S9E&Q22}2Bl>XCMBNLMc4P&As?zR96;60%Z5w=hp9N^V9p2+T zGI(wE+=vVy3R>FMfVvvF^-^=F#6BnFqEO+}v%06$Y(U8#Gm&f`7u~c2T)0TI_xb@! z@&>-cKSDCw#8j4BMRY3~Ql9D$CY`UQC0_2booH@E3B)mX8b+rB+rP144!Foe+EyA) zW=b7uqM4gNYx&_XPmco6epGC;3HSYpeOU{7rh1L;9qn8cM)+JN&K(7Jgn1u~-QHK} z83bxX$vSv*q<{Pk*q&*@oU~@Ln447ruZK9;!Nh@84QLpcUu)TPjoUX8V}9IM)r3g{ zX28zr4lv*dD?$z{6|Z@Y!9;)!h=1S}Ok|OqG=5n0yDyRMR>EeSfUt(NGg(Z;Wq(rN zese>2+RkHKt6czRA;uB^K+t`eZSHEyeiC<26GyzT^Q7Xvcjvxw*Pqy-WIq8YALopB zVEnsd<+WeHetH=wF4qcUpiW-v2VXjE_K&VP7Lu5$?_udh;pg3x3`=X<(S4=9x2ELM zuRHgtUpN$kbQnb(03tR>(?R6o$ZvSo{va$ehh22h` z3!Y-q@hzi-XW2Bx+=(h_d}}>&_tc~%xEb{CUH^mWd^_9$;?f;p{c0>C*AAEPFRf=vvm zu;04qm!|;;hULCUZ)0fs7E|gd2NxA9nXwUS?MTtd3;z3{BgaIi;**qptuho{GJ&LC^TY!5Vjof23xGhtVzZJjQpqxQT}zR` z`_N%SUJ(a(DPaeA_m%d28gsb}n%;TBv6N#*1oY-C>NXgkGEoV^UsSru0?|Z~|7yK# zu)B)?Sb%`exPU*r!(7Tvm3J9n82~T_NTmUS6w;VUpoo>R3m^{-?=e7Aldl;m)jNw0 zfpCr;Oq%9?=c0DE@4Udvxu-fn1Do}Uf$HwZf*?1(1cda`P1I}H$}#fEJLUm*X5(TeS~y4f3g_+>3aSavVlPkYmC{${BpL&;|o41JhMrv9;8khA@kqR{IwzLp2_MSy&?OGD0=x(2%=c?Z|d z6izY)dmBd#FFFf;)Y3q1d=V0YN0B4qAPr}(*e&i@^#*VKP%tMPXm~g#_i*2r($N1uW6@~`}ip? zm%bBU(-HFJlxM>A$dbxLk{r+W{(iZ1{NGW21dS9W8eI)qe`h8mBir8x4p|X*ZW0lR zsJ}2bI@Y39LkT)If=14P%&a*DQP-=BUCn(mw1?rKz)h(P#P_n4Oud(G4U6ckF^Qw$g^Fv~4P?*UOQZ`hiZ-#`Ai+6y2U>*B9j8-aXd zJ#?y{E>$1|IZqcmL9US3`PoxQq54!~&0{pzd!Ug9>OtK%_CEisYBR88Z;Z+vT_+S9Cc6u$-GFd#TSlWQM2M*_ zK9VMST$tME`RJF9FNx*3Ue@o~TA$2)rh(c`-7!n>;#@DnwL-?1p(5s(5;neqTGGnh zqXW_x3F_=H1a;e^t4HIy&`tN6gsNSqAK(PPadcqqQqe`-qc0v_0`$m}8)v(Y@pw}~Vj_0;$YT5&adAx) zF~to#;S5W~VLp|7FET3&SLh;f@v+Ue%KxJ~h*&GqZsTd?2qcElhIz1{}q{5+` z9~N>6e$Gh+mHLLsqKBML(H?!bjiz&|pb2C%#%&vN;$XS97kX>JHY{+-NsxOR1R}E~}9lws%}Ud~4i1pgh!b zbv`1}7q4M#ax1ivLg2&80O0V3g6*TyIc3t7OCjXKilvZa}u;6x8p%1rlWApHW}FLet4^1&)7hrIngrQv?r8eldt~u$L_dbtl`lr8=|m#CFw}3@_L7*PVYbb<+l!lbO{BRs3&Lpohr$ zIcw8#@?u-@Ns2?4FEx0Zl!`dy$nwdJ*Nbo3xJ+ASo&`2p>^hQx>6r&WBhOe4L?e8Q zG6bOg6Xr*imvG+waG(8%h82vQ5FDg1<9I23^0CrIY)|fboqv0Lv&{2^Vfl0q=yP<6a5V-*{-&bY7MUV9gAobNipMIRNVyPeYGuK+ zFgibS>tI$_$~te++H+GGKkL4zxhFxVBRXCRYLTw}4nmX)KnE}` znon8hEkF-B-ycqS2Ip5=L@e${g40#I=iCf0NS^Bij?!uR!aWV=ZM}Cs&%9bV|GHL= zgA*4(JWuJuNpCfwri+N2y2ETj`Y@xn&lkLEHM8)*<4k8Ws(H6UBkITSZXrn z`9p+YlEqTWCe3tIUdxtrC+Fj3?xck79h3qx{gUiXk96Q4;COy2(p+GeQ@QY(1W`UO z27oq>2LSaqhRG&?mJlJ;*%OOM#Yu_IAbPH<2vY)(0!wd(Y6%3b^tW?Bc;Y#7hOmD} zN#>xZk;#mMOERJXVM^tBW*Xfao9$M4LfhUo&Hyb41qG{KH*_dMoAwMIpfLg>77yKy z1RGP&Z| z&a-<2Zq>L>O#Z{ZAQ#u_u%V8Q3Q-3f+<6@Y8O*iX3_+xqVoZze=Z@WB@-m2`Y?y%p zi7b#H>sNw5AemP|VG!pATqxXy-exG6ztU|9NcdqE_fz9~Z{U7u?bQkgm*vcpv3el6_R4PnNO4$i+q3 zSk7IM5yQ=a?;@txv&_{y82s{B!W#IhP zD{_9sEd<}t^R!dOB!R@1o&7hj;T75B2h=~^3q(Sw#~+?eQ(jxokrV!YPsO5&K+RET zjqgwwmm9X5su#RRWP9)T?cEes_J;3JSalpo%-(0Ng!R;+KU5&@_VMS2iyCw+uWrQG z)4Edc&s0zk=r0rj(T5v2-zhnG%S~7C%(o@y+=tbPws6nh-~h!gK%RoQ#85#2F7X#S zxho&tHy1-M_O35F{BR zd~jNx-niR_$KhEw4Ee z_N;3in5W2$By=A_z#IT-_DnaCX=dI!#F5jU64y_+ECJ+ERyOAd9k>)QXHP!qFFn}t zF^aTI4RUp_8Uc>69PdL5fiopTG1oiQPfPVXFrhNyhYLm|0VM=Sh1Z%WVy!px>!;)^%^()R|J0oPa8rWN?rL zJNbG_zY1z=@!vyva?b&WJl48x5^Nb-q0G6d)!NbYfVK652>cvmUxU-t@#Qzp{UiwI zXPdoU(7@mM+WdTOa%qI~FDx^CxDTL3GlOY+^&m}tv^Oq3V$O3^_W`z!*Ov<9fVjmfEa~Y3gdgV7lcQy8!-6oAg3$AR-j|gJ zz&&_F-AH9#D}MsznieFB>yePb8dU{-_mRJ6-Fk8(2l^d%zO!%c6Cb$%pjicjGwy^J z6&^$>OPXDQ1{7cD5P!aU%qT{i?A{yN<>(uW1q^+Nqzd}Q0=#IVV zbm-$@$;@86f^@wwH)o0O3j$cR=O)_KFTCzXlskP?L}of{=9^;%hN7=n`fGgUO^CP*D82!F ztlP_jiin2wSzuAd2MNIsjm$qtd2Q-${ILt{tK9kH>DjkdwNC`L z<_^!R&MGNR0ZpQq&{I<4KP}%+EQE+`t)6!H2ny{F@HPrh zWb|)JhzgR%;lDviA4s61Nt#WA4A8pEAMQ<;D-tV?}Gh}vob6Z?U+e2 zNz$0PDX4wS`hlOm0%gp<*zIpm{&3?Lb4r+zWPyJUNXG&9RD{lriFhAnFQXkn$#k zf=+l&fHI)`oI4f-#2%Hui}(LFfct{m@+c9Ycr+O;0;tV6zQqDS%T`ys`s{uHccL~i zRfcA*{qr4YGX#S(gc95o-~d#Vv_l$JCM{n+cj?8>U&_5Kdn#=6rD}bt5IbF9b+3hiZ&&syp>WpbTV1HBlU;=|ICIMh5@N(Ow z>2gOCZ>lzH5zJ}xfFF+fN*fYAwyT&fU;jyPKgd{-_dX%Q&}aK%%(aYKLHd{!K-a-p zKwu^Bod5A$#fdR94vo2BuzK9#yDpgwIlXykTJ^+@;)1F73ti=U@uf)_8LT>TLlX}j zEr&7j{swd+sMb;(e7Po&1EvAp8eqe;hY}zKQYOr<&bqCzWBfb4P{L+ge6`ydNm#5W z1*dFEnhLS(z>77VPGnFYXtu_$ZUJ{R8WmvhSmwUMC}$(!6g>L!5HqoS9PD4EHkP;>_Pw{VvA4y*u+VO=!r+!YkjIEsAL7>$ z`=IFS@YakoxbhXKC>zjJXso7x)to=~9J=-hM`EhwjXURJXVCVrj-}H27)cqSGn?3r z(tZ=wwvP1|!=NZ#ZLj@FrbQ}MR-Z6R!E|DjBUulrMuFnwf%6Y#f|pjo_?h@5$G z>)^@>TOVXE)T7V_blF%Jk#eQxyIBlzPhBO zrH%Tv5^7`SZy;AT{JrRKVRL;B;TLJ9kz%+l+>f>}{qDz&o#=rn@|BdEO1*^^D~a^C z_j_u$Y~**ZM?8E#_cNO$x>aUa4p6I zAusZ6ZV^MikOXO4`nAiHvRHFcQv|GX0%TGFZvi1IV9qNU{E<(NbfR`{Y{RTD?m)!ta>Cgw~pwwYJ^C>CQHPUvjH(>W$YNVz_bK)C5e%VQsjqt>MUuQbv)(Y-0vA2DnX36;3(mNvo|{! zONd7O1$aGFtyJ`$8rFfR{fx_Z?VQHM{qAu(1Q6ygtvEm3KkD1#Q^Gsg*_Cm_GTT;8 zq3sN2_G+1#y(JpQJ%zm6EYNup6jR63q-J}m8_b=A5Alv|ySnBqe~Kd=fl2JK?OGJA z-_pWPu~qJE;zEo^F>(E$07~Hj4Ch+R!x6kP>t2`YKH>}(!#Zf@oGV2g0XiSYQ?AG; zAzpK- zS`0^7;mt`eg}7Fx0sB!>Uren4gULm+kRflIzM82H(nZ7zHMGjhUNffrpAX2Kt6YX&7{m6L@3i3Jmje{xmn@~Wrj zudSpH1M}wNq=ZFAxqcIydOkNzd~D79RHA)_vzFNg(2aP_2tnbH;A#0gH%k-Q4q<>| z_MTWbP+(>07>~yQ)u_KR9lDAn7stbY#RQ@*QYA>I`OzU|B)C^8j74JkkG?mn4{4P` zN9!ujNBpk~cW514t|I*Ii1&v>Yp&YSk&6d7Z-wD5(-XS*WS4}^>P$=tqH}d9q0Ok#XNw>nBD~Jx35)GjisP%_f6l&Pw;PY%ZI=Ij-Hv~AAVQL} zd9vPq=dELzTPu@g&ZB!)<$=^te6=je4a&$Lkmmf9#h zefVpq^GQwNpYzvk22)90g+u4``i=r?QXLT>i$<}P2<|}qK%-P>43Pn8HgkEcbQFm zH7YT9yn^)e6j0An%md0>)Z+Qf&8(EP2_PN1!)5!Omfq?Q2H&N^QqPH#MxmX$&c#sCp`P0iF{fyM99lfRpb9A)Jf!Zb z^yT*#!=Am`!)DDRog|v1I5Rtu-);Tj%T-gzXHAFPma7Dt9&cS93P2ugB_pL%boaE9 z!lPbf?V30BZ!hZ1NljcDl@W;Fgi9x|*Nfxk9y02A&Q9_@rH2{?ym~-NHHZ>4Q4Cmi zWD;z835I=hWDVAgHcy=b`&$ASA@W_S{-soXLq8o1c!q%iSYpAhpVv#8_$e%HfcIDv zA?|Kg9`%W6_4EEnyLXSRzaia_Rb6*JA!)ROblDYfGlqIS4Rre(H;&TCJPoAF0JZiB ze4CBBl_@rmCCuP^QhVIcpBCO~pm(M-+SRJjwXYT;fwM60*4nUNHnnVBy4y4RDJ@!> zVqM+_H#d%I&(D|&UuwON&4o)O2mi1j z>n3&Ze~VcPZWd#=X&T&Y)!$%K6W)!3)Q$zG|v;4TpRjUar5g`Y?6Wq%j*Yk#!Us^YIhT{)Kx9(!hQn zdZ2oGtfULG1AN)qAC7Fz6=8Y0eBc}LiK6xN(bT^`0smBp{NC<;OVnYG9_n*=!9{QS zhE{X>$*Ehz!R=`Ny}OTq)1*X$E`A6NM047$5Io2?Pdx(hx7*gR^fqqVhzBojUsXV& zPi|i~|2;w_7=J=oHKa5R>QiaUBxP+%4tSjP)3DjP9cg_$V?e=We+#U1K zNV7-6Vmvv$-ev!tFhJrQ3bXj0Oi{Rxg?Ek;T6pC<{QDE|Pk)C`%iyi{sjBcwV~xd@ z7J6kXAi7i7W-zAvY8)>cG&Na*-`y<608PIz_?X-A z!5045-w5eK-|k5+kk`cCGc*|;9VG`mTDLwK=w$n|?F{C0Blyv3ZdqV_^= z5xS*7MrFA2c&?ZlDj=WP!L%v8DKFgf94GWCr5Etcx9m~e3bHf*`-y|&w7$9fW0K9@ zo8q(gh9T>$R6fx;+3k9p85ooo~K<^?~!>z~qO!)b8W!jc=bw16_UX)bjiD=o)XO-Mu)HcA7!b z{Y0w&m#ZF%0SizfY=}tQIoDYP@K3QC$f}cVolmIh-c$}h8f>NDC~CafX40?t&q~z_ zBU5x)@n7)g;3^!v>Sdq9L~pn5v8B5{THEUIi@ZNvDJH(RaMCc?(}BZng=S(+Bp|*x z*o>#;czgVz&lA5WkJFcXg12JhhP=yJ<6ZKlAgTupqL1FTz_L3vOwJkFl$%2e@jomk zZu*NF);Lh9p@`$HTL_O57;4alhF3_f)5?~LB{G5#BIa9g8j?=_Fb6ST|BXYkvf1!N zHXXq+#P5(b)%#(7HaQJt<{6QV--xE)?wl-z)cj zFbCRj%J=lzv#Y+q`a90!8yy}c4t3==@8Wd?ds3T=W zP+_;T)Wjp;SBtQ)YBoMXd1L{XV|-gBDtele9bMqAa~4oe%K7O%#Ql zqu1Zi&-b5^d8jJ_oCI8W^zSGuVEtqNeUt>mHL+^_)KF~!oc-vN#oB7Kivny1%V{F< z`UE2V#rn(kt_W(VNqmt6__#0Jx&JnBS6Ds`lv4v`HNp^iQ=d+o7;*hm|eS`u>7Y7B`muP8@xz{~R40mf^#GdESp9 z{^>j3$ABLRL|+?AuaqJzFYw06JXH6c4;II%yiXDn?budW-4+EhgtX*PRDAEh@3cZp z#SP=ZSG&(1?%Yj=6NjdPj!c5S7liO%`4ba#Kb3&lI9?oFub-~CvJUcnIRhS9mlbm{ z^w2xMg|=^SC{gOm`Q{h0Zx8cVZsmuy3e^?*B7>5R<(iL_KVWlZ#a$&l<^vf%dx&xH zr0VSTbbbL%$&i~I)~)j5cRQruWP=RL|L`T1c(VGuv5%&QhQ7kxdqpb74LiptUKEv- z%w6nhTE46dBp`TO|L!meaWt2d!wERSoqq{OyRv9HjHl7KijX%A%uKeXsWwYcv|Va6L|Ul z$KEF8#!>0@7Pg$Tln5)d86J~eH)rEzYUyfzMdU)kn*oNL>jB<14i&eSF!Qjo7rMbB*_TscM z1@%}R3f6%4bOZPAt9}`J!ABo{>XF&ivvGD*wS{lg zggebBhO@Uk?)q$dNZwtznc2+y#!9SNw(u&F533Q8gG}nC=!~&>rc1949=X{vMDkcD znw-)O@g#!(+p9Ohh*NXR79jzgt0cN!aIxfR_{~>x#ouKljp&xH33%oTfoL`*@U5E~Sv@colR zdIjt2M^9O29;Y7`IW6-B`-VHe#Vwjf3B66>J(}XSvAugd@(y$Lx_z%AZcW z!KdOZDu&tT%^G4I_dYxgii-``K%B-W>gMRde&5Moj=E9iDxNtUDPXa{D47|DcvgLY z1stZ!ji=a1V!&(h;b~8weaSNAgh{mRhf!WEckB5LKEo1h*A%%WE%OJ?MP^kOMi+=h zL>1Sa7tdYZka|q}zygzBH-_jn5`Aaa|9e~}Hu~9R(*VLAJJXosYZW}B2@VQBr7m_5 zr!hcD%3m>?!nm8-9m;&Z1Ai_Gq~TPEQS!^dll|3|NGVAuhvjbEUDO+_RAZFD^X^&w z$3lZB;9V;%tB~8#6~g@S32K-7kj!~9czK&d(5MJst$;c%Co7fDnd;NCp2)W6bsH+_ z<$p+!hTJIUg^1`Uev{$g_o1eS3U}@W2V`O&Ehsx{QncaE5WEvVTNwHk7PojKP+Eq2O9)C__arq#y!ssAc0-BD^s@;8$Bi5=+N|pJB3s0f=Yr@Ip z#1q-N1J8^6Jz5-bP8&EM?C}$Xk_bsm9vVr7o*s{M%44GOZ@8JiZxCt02Y=k-`yOAd z>L9vHvJ$D`P4aSZ<&@X!orUxsaL>W>M8X8`xqcFe&ca**p0}KNqNoji#NoeIyRjjzP5rNTBS3qLG1!vanM) z1SD3m8%XQqJBexi(nb7}~$I$Q`u2ROrmT33YH<3;)$0|LC+fQ%K2+NO6 zrf9&A&R%~QbZK?YrO|oNfN_AQ6(Ipjx%)Y+_2iB6jdEf*XriGUguyD|3Qsz?? zStg9La73I0VHP4jAAa7Lg&R->`JD3;RzkQ_hB#RY02xlU)Uz)GJ{04p?1vi=&Qn^u zF>lBVe{r{jVpsDn>IdmhlVyANORRgA8R`jaUGX`V>$#g$rb5to@nImA_fe(BdMuW^ z#q#;x(Jqf;KAi6#E-#yXU{O!Tw=+24M)HKf_9vwU*J}Qgcchg3OWCaNF)g_u3v^o8 z5r<+Iftpat=8<#t-k1=431vbl%Kgui!cxr?ag?wvK6GO~x8P=O+romT$?e~de{4}f zCn&UtV<7F1sz720ZGrH=lNE?-#gE{=s`O2s6Z>q$5@Gbu95mJa_LY|ZA0KuVIBZ>x z@w)8)v^{KXz5yg}%+|ZgY(9bvUwMryqarL^Jnlui(TsfH%S|sc2Yx?|+qcUBX<%E1 zzZAdzg&NwQJOu!>&&F&aKX8}eI(fDc2e-39t}@NppaSxLj5csY`FUz@N$j&to=Cr< zZ=9}1^#8-!TSrCxwST{Kw@5dFbV>;fL$`DzAgR)#GRVMC($d{X3J3yHO4raSqS9Rw zL&yvzwDD^E~VP0c#Dc1)tB}*S_}N*ZZ~KdqkL!MVUAY#0f?sr)<%) zp59YgWxyI^?;?{Q43Vd1BbIXJ#u<2$idS7HAuU;fOR)5c)CJrsI=i>zw&vQ5fxzw% zaIzutB}ZQCkXgHXSK$(zgq>wzt*?}Gg42LAYO**QR54!-&D+rgrZy;yz)W};#w5a? zbAwWu(2@t-YQX=0*HCq2#uw>3{3Vkw3mSqX&DI7To{nnSX27txfX=7K>=rgc&&=j` zM$O9YK*MLbz@G^+3$cnD_}|UAkFRnzy>z8a4dDb6X$6tIBjHS@o4^VaE*#D<>2VuW z7<7nfvAJ<(wB-TI;SGOQBciA#0v5{Ee!v96lH#|pDu`W8dckAF)0$#J9t7%KVewS9jF%Iy|1 z)XB04hu~&a-4Av4j3{B&<}2gyoS0TnaV~DWb2A38;kHMGE1L&wJOT`fD4H;8aRjN^ z7Rys>l3+hOwU6j5Ka#2m7L1y-J8vynAA+-)^;nc$O)vh=B=slbf?)K4m0l%!ziCWs zTbG6{XV9z1H0YCODF&+YP_C$B-N{s$H!kkI$OAf}1fqY2g#dURJWtaAWYIl|pfl`Q z{tv1_N7lliOcpClB{>FfoPnuVJHSeM^A*=8$7{tZ^l!3x6AUsMCQN&s{&C{Z&cqbc z&A6%`NxeQ|^*)GHL_w<4|9RR8ywI&Th@U~G#QCQV#Fq8Q=YqniLlGmwQ9q}Gzgya5 zv|FwY`m`Z|yaplpXtA^QkoXC@MPDp$ukZeeb_jeTq%_{T4WW2i(Q$Jq#RAtf14VBz zUt5NK_9!4QSS=8jWyjcj)5tS??N-IBPR$b0z;&j_jBv9dUF`5t6}CuyAVDtB(mj9~*gzKdbD}NfuXi*41+AIP92op|WiRB?y)>df;k9mBFu+5ANd; zk9DU$kc^$NL;jeY?S?PSS1xGXP{KyZv1v1pi}9z@!7+B=V~dmz7KUQ5PZVxB-S=sPa3`ywX?yptNUEq9$gADY*FXAiC5 z9}oL?<2zm!k`>@KA8*!);R`0lU#=6#$8bvcc+8O*vwENmHgF-Iua=V8FS(&*Wqr&l~!TbHx z<5#>&BM#=0+$fTCOqg$IJK@-G3TiVPRdP{#F^iwr>cO)%*Enf4x~`OB{J>UDdyAi<4TfdUkPA zH94wTk<^*WL@zT*maTT}Rab{GqJq*c5h&h4|3W2VRc7ABSrNKVchZMvpC_97E{XAu z@61)D|H;XC(m1oaujSr`k1sb4?7j|~9&~58ZfqJZ9VvoRw>@|?oZF}s8V>ZgkuzT^ zt?va^dOE#6YwOgO(EWqYxVk_VemAP{-GD_~p|<(BQs_k7b}rZqwrNu}??o5$^&$mK z*V)Z@Awe@w;S)BX%PVkcO{84|1Ol9LoDp<&Z(u`2(jQxw;`gB zgZpPWH85SkKhjk+R1sTdIc?c4J5-aCXWqVN{^x7rJK|ueE&4>e-l$7+;bv59r#CU_ zxr9*;WORT+O$0(njeQ^vZyA?BJN_a#Z`r(<-#Y!HPNC;QVFH4+$QHVQO{<2t&w&c9 zqw`yg6_SAuW{16GB`bg61*^%^z81%!C^5uVxa`3JgkgJam9Q)^wl+!Cq;HLx5ucjH z*|}Hn(TFEQb+o$vyLzu|qBRd{M^%Z`fyZ4VAYZ3srh^mRt!RRx8 z;^b_)b9@DmI#sdcKGn}Y|IlaXIKYb$@vflJd!LDNsUD6W&s9q%n=Zs#;hf(Q_ey8? zX_s|B*xOZjD1wl!QNIA*m+q!NqQxUqEqQE*M_4Qk@4!_OPX8l?_P_`JsDCJ?o_lGo z!9}NyfaN)dsrAV{o7XeS($`u3-YZ|+kqdF7$QW1$D=WI+q4^l@Kl%ESz3S`!GS2t; zKsp$t$F%kOV!s)3{V3<>9}3ez`}=8+QPOEuKO;7n@x4QKt@M6)5e&<6UQBGjah&@z(ME;7jWh+ycwVFJqNJNQPj z5Z}79Ppae>{a7fWV){SNKpqK zt%?V7Lmppohqv}Tv16!U%h^11UeCY$5e?<8<`O!l`!>3j>M7&{Pg?dX4RwI)M=ho3 z`l|P}mJe+i*tAxErE(jNK{d%P>9i0U+O#5vj>p|~wenY_p80I$uT=KZZ`ww|`EyA% z@@4ZTzwx{u+K6bc$A{PWF%YLBGkgQA34@Ew7f>XREN*aUU{^Ldi)SS0BY(=PwFN)u zK_caq#ycb|8UNy2i=6GNDCq>eJfj=whtg3VxI*)nI+PF{F88kwok!D1K*$)n#p?#_ z<|7~fqwrVptDpZYScVC#tXVOlwr3I+j}iPtanG^V4+B4ukH6!~gD5Wj{%sQ4tD!@h zGwmHqzE#>k-c3h4ZDRl)rqeotocIdvXEIHE*^3BU{|>EdZANK3^)u|WpI)>nFqU_>x+b4d@_4pHXdm4_NSGGyR04%#Lui)lWKjh z10BcdAKrF!p3Q$WtK9VV^bkS^UhS{DXs3!rp3)4dMo_TrIb3j^rrY|k9+;blX5MQyHU*9UlVyw zJaw*w_)AE4H;6-`p{WquRt6RYoYgy*CBm63!se;8S@}73* zD{zyge=hCnW2`X#!mqdk`Bz&)az!{3iKilX-yodTztY-+c?>mQO};69?kP8)@>OeN zn%HExEKFoaI}Ijm?5?oj9O#o;SpZcFJMz?9Vqx+)k{jH?AHUH5U_*8<6^b`0;t&*D)cVD`TDM{-&_aeT4l-6#P;~Jdw0e-ZLYBHnsMm;Ij?XhdDNYAb1dx&-s!{opXCIxB_x)-a&PngQ#cX_~L=Ji&@6!OH~;ZEVi6oHcts#{N5SHDDj z@{yxvqq@ug>GU?-QEvL=`K^Hp#_N^6vX6Hvd;dz(@;0CNgddQPg`c2;wzS8dUpXOC zKqWw%caUOjEWJc%F7=nj)KJxQTGO%tVxJtU*wcY@eQlPPSLk88kHVtw*}2y=^W~@e zdy!N`da7XWm0dwk?#ka#4{UE5bk#z_5 zvAuCrJ~YrC@7{-MIH4m3W9601!t-)VPyk!N;PK(xiv6;=5zp!ucT+qY2`PYz{XcE@ zRzC>Y88iNU2d&0u0>?h%obt;!LEW@Xgi5gflcx&5^0bS9=q~E` z{91!SyE=X?rE#HorRxD%MGS~ybo*SVj37aDc>EUwqt8%TY&vU2njWi$ZQYfB;q%KK z$&>6#YupWB#~q}1;ds?5B8b)dx{1mhTsuD9J&=Cg&y(|C#X;0U~=dSN#CoWJrdL4U*kVFBMs*N6jasuFA0iu zcO!Y$;~z*MbE7Z)w#oX3Ne6-I`3@jXDKLIZ1~#~i-;8Vkq3Tw;tO zj(@pR5K*vx>$&fcs&PLI!;g}`I%bps(1*dM?!Ujs+j+Aq5hWOlQM2KlncDrN`|(d= zOV_jc`VT$eoJMO4Vm*S=3rpQIyUk~;$!LI8xNv;)53!(64pdTx4D{qmgc;2dCL%JX z3D?Y*{!$QZF)GH1xw;s9+S89$3hno&KKAyxN8TKA1GEi;PrJVU;UFAo$u zV#!bX-734J@>t%!d5)PLqWu01rf}$IvGy0R8GTROw0FUO-~BM+4Qjn%?r)PwK6Um( zY@A?@rz2h{zew#fx&V~falEr)y;AtC77PsGv!l0I#Y|_tMi+I#cWrUSufH&fXP~PS zvBJ`)XY=u9CpRzI^OwYeq?SI>^4Iv0ReoJHN38g#U__-5CImyYU`XeC%VwXRI$`yY zi{))-RuTR8?)?isxHc*BfT2K&j7_CsB|_)`HLE7*$}&(mZD)Ur%e3xK5b9iL{gME) zu95tH04O>clETt&sEsvgwLxbXbwU731eo@HmrazeRH=P=vH*qV7;iEmcHP7Zee z#?LArGJ`yz26Zly*0} zKC>>J4k0*6X@_euM&VtgsvwsV(KFzyvtRd_bzPv8&2%H{{eUCM`QtrMaZC|V1w-&R zVbHTM^077{i)(c3kc$2Fa#1GiCu(V`!p4I60Ee9z>Or|UGafy)=cF^ zmaDNxJA#6hpWj&B>=T$)zgtC;Y6=)(U(pMLtM~ej&(yu9QpuN%npkIyx?|&o@CEci z^=$2kQ=V@|#La;EGeZoa2ekWR<+i)#l6e1Ti2$&CdhzX!-rw{cHrSUzI`V_P9C%nZ z)^Rpgel})SRk;;t`+3CqpS4lej=NjXBdZ^EU(FEIi6b-=nvUN@{Zvd^v|N1X80cT0 zH54&y5avDqSMQ4v-Xbl-?`dz7VsiV{GCvr2*m}j8)4xnh@NQ^O>uW|pY{ox3YFS&S zVf&zDNSwgm4xy|3Wp5cIu&*)Vh50_t0=P(ZKMn&m>(WHB01Y$Adsn$wQAX+$TnkSBJN-^Muzn!q@qL31i1$DxY*$GhZj3beC5cm{@mL zh;cP!cB9JvMG$52gZoo4Gz%$=IPT>sOWJgLdEi|0xq1Zh#4X)K2rti$|JnH&|1wZx z9NL@wlNT$@;ektzDHYq5SQ`3an&y!)|DAu0Y^DD>vg0*v1OI!d%RW*I>YXwuj^Pyq zFa9avv<#{`7}0!2#0E;4bGVic3;S%Hx z_3j5;Ya+6!UEEd@5O)BoaPj}Yw^vKl`{N)o8B++UK^J?1%z6to@ch?`kO#Tst1X<@ zmJt1*Q+SePP%jlVTP8qIWSnk9=m!}`0G&6Yg{cG@bn3a^_ZDbYAgqC_MU*6no~@tI zpO#NAAxf~#8yd;sm0dS=ahYhUH-Mc`-(H*@Dm8xj9Mp6t!h|8IC56T`)6FkX6WKpq zq^~EqZ0Mx{uy0OB00Tf?iUr5B-$6T;v172|{YR^&0Tm>Pz( zu(#;Q@sm;f4X}U4Hgh@9!Ec6KfW$6ZbfT9rUB*w@jc&vFbLRZpj%NFneg}poJpO&c z?=HI}*QnD~FFh$dF`1Y5ry^<9KOIi6_ZmKHBVadswq+iQ6?PazLPq`s@`IAyM>psx zh#VFO^a49I+YEcn0-h00hdC!p&cjP14HDi_y}oGDORq^QF4n#q zGng87A0@`v^=)*FrgdcQtsvxPk!iC^3GwhFIU|`$#bZsFm^{^!i_%g6f$WPPCl8Ph z_~j`oRo=ucOhOWm$@u2n)CN$8w2rj$W!PofKNTOt*7>FIe!o8y~+Mjkx~ScOG%nI zkyYS1rWiso%XpsXVkZisK!vbbuM?#v<~yVolbJc7IcBhss9x#tc^h)lwCg|G+)FQv z;Klt@KbFPy^QED~i+!pswY(kA%QzO?%kTE>1x>$wE4zemuDq&%ib0#9zxxGy8l%r6 z3J79!<6?_eFSDJ7fRUO0BN5qpixd0S(hD#z{bTvypN)<}zxKr{gP!t*a-{VOGyeMy^!{8WJYl zqiLf3#-?D!O_W8*D}B%Md*9?QmaeJl9_cmU4o&|73!V?s)kEE0WXeUM58FziBK;o7 zJ|iMB;n=!Xo(4IwZ3CO~UhU_(w#<%wYiX%Luwf!T{tQ~R2sLdeOTMT8A~Dpign^M| zb_?NYDrqWr%m_PXm|ymm%U82`VTwK7yVi4RqLy$9z`95TD!5k4cGfXxU>^_WJZ~c32(=JvwSIg<3D77-Zem$k{_QtD93mGtFX&WQ5Y)=^GAyy3g~=G0Um{;SY`>WxIi$Pl;?GH-+LBT z-8h3)%b?~ybqN5&6}7QU3WyE(vg`lve$mcEox%X*R}$(|f=wQ)>6?pjGFHZyYi-vdorUPOOoyyn~D&UeY%gz(bAz~8FWeVMCz2J zw5E2dxp()My?}JlwLm^j0TYi(M*D{y*VlfCwG(W{ecsJK+DDf2JoJZ>eyz-y>gP4da4Tx(DEoOd&{O=ZVSL(-(a-o2)pZqyk%(2q z03jP^o>?tQ($$^c=J9;L`FUWXvcU-292Sv}u`&XuHVW`CNvSeKqgKq*X*(_oZu%Vq z5}<(uLoQQKk&f5LJv#AaI<`MaB7v9eAF;D0qQ)^RO>Th;#JzNsf2c-ZAKH@H0ls2&w> zA5pOp&rO?u&+Ob`_CNZFKQ-{-rK7qK)T{eo^@wKaa~QBMU5A$UL@}%@@NCa z|6e0#TCijfk9-N{jd^x8(`x)YoR@~qO&AG+zy_PMR8i*2xI8>ZhzmYJr z(5tYgd#h*wTGcjr<#4}Q` zUocKco4+C^QSgEJDP2_Jf$nG)I^YdV@e#7f=vVhXejF70$ni9CHNJf5HD6Bq3IIH| zTXo!sCgT#gxX^swsx2=HD%m}puJ&g!3s`%0l=u2|80qgw4>~83wpELrJ$6GkNN_2m z>Y{fduJwDz(&Hf_C+w{1-YSu)4wbFKcKa!6nD>@_uXwX*6YRtNQQ3l zI5`(*x~|+`lqb{u&8>?0{i?DzUH53ZiN1}3ZD~oM_s#%(mCnrGE+<}vl=jpuH1Jf@ zfILpt_2u?HPQN}K{gm{)zlyM$jsbf|iV2}y+WS5^r)4*%5Jx;iy4ZpYUuVVjA~UMv zA|dE1Sm9XbiE1#ri`+c`?fBc+F(mmagE|YzP{{(^jWxf9qtP_TvD`6m0_8cbU^xbY z;c9N#oQOjHDGU81DHbCqQ35{oV)GMx(?(Xg&DBsITILNvWOty5`4xeva3x=D9 z!3rqY*v>%M=mwE|2&1`sPJwi^=>TXT7q=fFsmzy!W(?;P0Eg4EoxumToTW{{pznX* z=-s-z8wma5y1-K*a`zQ|4Z2#N2Ye-Jg}2?<&gHCqQ5?_quNV+y>P*uYn>*FG72UDT z2?vEKH}k=RZqu9qh$41_%xexb142v^`r)O@UkuLX%Or*nTQoW`^QsXsd#*4xV%;a^q5-&niv@@ zoIS0CDW+(asFy~5%W+t;wl=#d5)a)E_+j7JJB=jA)p(0p&%`s!0hU&k1*siS%1bbs zbjVkGJ-OVMesVoR%LY|(^H#@;ne#FK28uNLe!A+g^c&mflRi;uY<^fRuw->;B$;pz z&>l%U)nru!15Qqi0hZE1f`yKaJBiDp4`hNqMkWnPo35r4oahz&^TSM-TF`dj!1W=O z3rF7MGU_+nVF&Yc>x=ixZCoaXxfogm^`8}km-E*K)_#}yD~FzbQs`4!+BN15Y0~uT zT@`DuEwLOJ+&%}kXK#&+;c8yqit$40(XRR)f}UU=VydMU-zZ05i!Y# zxcpY4L-fiX;=Yt^nCrof_SFM)&5zLBZMeayOVM_``q@Jc9V5xYFfd0cE4H>8a)s=^ z8a3M+Ma5FjZ@~^mq0ezNgp&F0Fi`=AR}=^T7l9}MmOe6E)_W_uWz;G)KOF&JSu^z{ zFD4X{!I(;jTEM)F<`r{y^jMqpWvRNC_BTo+-~bb$1MnyN-RKMEF48gtcalLNLEi-c zJJQcy7*wt1 zhqd_%f{yl^R@)R{%!@DJMAk2tn7XI>TTu7@=6&x7-sEy5c-#?&DlE? zglspTjz2&kg|&_z{%N`?9IH3^|821Hfz^ylGb|Dm&!o%!M+emKfpwN9kn0b8ap`U`CFfy`h3`$hZkqY0lsJHw zO6_FXTkIcHTF%)lK%%JU%VvR6CH>cUYdszxp$?kuQj~+wJnuOdW#yBLJp^tBe#n9* zL2s79BF~|hOjqZ!!kr}wvSjIQmfsc=cfVJ|d~wi7WBplmg@p@r;L%m$E7WTx zl^diT=*KaJ>Ox3gR#qbXZ3CInhvSP8p8F`dydEK@A^CWX3cV#;C9|AX zS4GV3?oRk89&lrCaB6-a#mO&D@m2W*t>p$m8`kM~+XfH0kdnvaPY`8|b_<-G8s#sk zi5M22>^_!YZ?xDzfw^8T+}T7@9(nICYmUXW_OGA1**?uf6vH^d)Y5kGoNohu z#E;8Q2cOSFMHjEGr@c^uWh>FLHt%9QWm#w!Ye~|Ve3^vi+c9b`EQrVEzFXE~$iQ5Q zg$!2L>b|BR^|97ReFY~D-{tsj5#b!pNCXeA@$ga3Lie1ii_Wz(v{U(`T{|i*N~Q3&h`kAEN`#qoiQ`j8L5g zMU%S^ZL8kp1)u(oGlxzDVTIKK==(!cwV>v;rz&b%gBio08YZEQqAn%I=VxE1RgB+g zrsagb9mG{Hfe}jO&-z}Jz-q1=)`3H&+C4}&H^7~i*7u8!MJ=ze6LTrRQW*hSIPA;R zY{Ky~hbL$g9oD_XKb(6Ogqc+gp0SPzbcVAPZ$3tZA9N{rUh`WYpUti%*|w2wdB_mD zxSNI8&53b|q?N;(R-x}uL~$6C9K|{iBw+%L^(Wn^cwNEft4HcV*5$neGTF4t0$h$` zKiY9z5`6ZEtm~fP0(XHDU3HD?^A+Ca_wO0jYTN zgU}aby={2=a%1xRPaKlQ1EdzTaUNuilY*u;@ZxLWEP+3sDobT{D4k?M2*JgNtth8?-J zMYuIfKSQPuUAq{4$KXGAmuTJH4Q9VZpD&=at`O%o@#ISv3f}(bqmi-lHPhh2E)(FA zfKdZ3Ej9QQgAOO0J}mH_s~dGsC@#Q$iO#_PoSWcYnn_7x+k5=6SUA12k?ho4DOi~T zJ4+ic*eJ1Se|C5yxBGSdMH#%)Yj&p%OPPc6=Y3pCjnQ!sWIOb2)|r+_2+m?O3waD7+(9G-C|dQX|$hj+LgI`SnoSBYdibp_t;(J!kp})jOOJTQ8j3I_ctp^XK)s&S^dLqM)kdRq(as$ykaW zw$b;tAzjX4mx*IT;e6gL;KsZrgKO3mlO-pCg)_KyvP}PP@z!G|sgYNH8~L)MN!rJm zkS{WAodMj!BM3ofwx`%KpVkYkL3@XiE;OBf=iQyILC>L@8e^ILTTa*4*Orm`y^eL! z_tW*~GKk*j+^IM%=eaW;rQTr7LJ3tFC8lPF;>1{;lz&h0RZ(I0HKU<<5s)5g zN5hZ-OaG%gFKVP^bKzqqO=$6x?>KYSXq6*gu?ID8s#SDZU0}<(5^fgibft6j1*!WB z643*Gtu|xVCpCLg^AqZE!Xs=G$k)YpqlI>A&wOFz=_j+fLwquxH60_Yae+j7IpT3fWpSt|xXEZU3#ar#yOz|g@g7EA{ zf&NU{G#*4Pj*F)mmZPg{56(b~tmT8TpPS27%*j|`5a8yN17941zHy7I&xi6ch)s8G zub*v#klFp1qhQaR@caXdc;z2&+Y5Ulc?}YENZE7gzdiddWi`{R6fZIL`_9_M0$Dq@ zn3(a)kb9zB14i#SdgrfVNL5PzG_cqKCY9edrumm*b(K*hhBgF7OTZaf2*Pg$zsGY| z>Em_&$^)4H_jAK6?#6?QiO4^F{Bc4)WuwL8o%^%I$G;$MY?$_)nRw^R^Q6ySz_=9k zNvj;0zRg+IqvoAifO|($h~CI$s&Z7PxZf*HG{vnlyPFk1pVxIfe$f9u4l_RZMn{o~ z_~D(5*}KZaq-_B8MQ-}~Z29i2Tr>>!Af6?4vE8xIjRrV+HXTV}LG#j&Qn5?f!KDAdEPtOS?NP%II@(WKj)jF5`wWF}{V;@m~WM9ZLXmU~xvnLL*=a~kadoe^L z3v)#G{9r;6f=-WEmq(jEcKi9tDy{xzj`Lb(VA=BU#=(D!uNd65#D&!s*6z?nov|d- zqnJ~IGeA8rYunQj&Nb%GljRE%`ZX1;eQ5J`7QGD`m1_V3OYgmEWVU=+TEAqR;e?=en`!-rKO zsM(-f0NAf#e3#V(hoDTwX(MNhsEUzfC)+FgAD0I}E(!)(`Tq2F1o*Gj#q`==tBWyS z5a5!4as!YN97x+#KI>nwX7thD2X8%A%77h7p81Q}7+swf zm&p%XarQe}s5)5QkWiL`B+OKNZKsTaGUyW~SGuxrA{&rigb7hsC&N;;wrRMGIDEpZ zz7|ba`iTJhgMW(+{fCY-cDIG<3)|{qg@yWfJafX(b_*y1RWr@*$!DT5N=0I1Ck`>2uQ$>5tY>!%x&E;l)f+5hk-UI=zp)fnE7GFI&M^ zx@BAJTR1^u3%}m0!l@}aaNh#z^)W;n(>{)z1HAY${^4fcMl%#%>Al0;5y~Ivh4&?| z8BcGu>B6TQpWIj0Aq$Tv0lZrD?Is+nSPbA90LzR2#2vs*psgZXG;B@*ZW04ml_{S6y?-4yeEbm0$**+Z1L%|uD~R#`8a%xub*_Io z4+5;eghvamhFG$iu{-C^1C9@^oGQklleSD6fem(?OPEpieOX)HzrvSR`{D3fSR*k7riF!@egl@at7N_iJwM@~iKko%>|c^R zUht~SEGkUXRuW)yvH#-?@)!E}_i_2Z#vQ4o&*RE|z~1 zSl`__y) zNs|Yj9Z(Ghh1stEL1oGr^jHU6(*cu}bPUl@Esrw{u>wt)WVt$p2JlJ|HP%7J6E%$i znQ|A?7Wbw^RiahpEHKaq5#uY!#;swzYcl6>jAuI|g(dtF?BF!dSo@tX%8;MoZAjau zKWi;I0Q6&O_iS|Y4tK|Ic8B{dyYI>XzVffd3!A>&*2Di8x(tW>hgwLktplt=E;9bQ zn}8C4Px1(+5o%GQnS5uKZd#VF46ql&eUL5rnr^PS^oO*azxWuu`N(=PRkPSAawBPbyu|B)8Q4=O;-zRwBfXpgj&<7y z#>%<88SH@eqy4IA4^uTMtaUoV#7EfbK}W>q#F8vst7ANAFHfJSZYf${^xgk-U)BWR z3m*MsL*{z-ivzX|KuCT1gejz602zDrA14$bTmGA0yYB#aydeHMOWgY2I5=UCLymdb zm0mPLn=K1krLRrtYI{vCXkobJ%^G7G#s#4tOS=qH<<$G# z9s5zoRJJf1GJW9Lg(4U1-N-guP7)SYn``Xo>KaLYq~Pid)*2?H7O!w%A;XRQ{ON{Y z9q0$J&DYcYx8Vd7;sqqnZxItpoWF1huz(WoCw1g~_+oo}Gl6kv7sg7@1n-cWh)A9V zy~EAr9XX6lrQymc#By}#pkiw?|8W=IeP$qVu-oHz^GELWMy?*h5b{aKtMW!j@MSLq zsJ1x)fkGqMxj{fZ-ZFL{FzWy;U|0w00N80L=9X~)urW7%i97%dxi$PD2R<#+>v4h| zp$nY^7vjRpgTG%}H=GC`E!yUWH}N67YpEF0=NF=54&D0vva@8d|7i~XSv-qyDG#g2 z>BCnv!g6{m$NuT7=N`6iZxB7=-s6d^t4|SZ_B-VV4%0tl7dNNrxxmH=YCxg5@xL|6 z7o14H#B3{}d$iyFYs8!&Gws*-6CZ6KCc~$p?DC;HppUAWt@^#n2FoGBEGn+*9W38i zZ)!rTQzrpS>XE7#@cqI+zD^G|U&-tRe}2t%Cei2-ySCmakGz-$*YIhyMdbwUfVw?X z(gBlA5XK|?2(03|+k_J#>=rcqFP5*h7zmF4r!f@lBKHo1y_F_#&2wP2^9Ls~T^&t- zv(#i8bAGdH+`2!e+WgXoSjbTNhDE||%b#`5IsrggSKZm>g`52qS;kXm`Mb~RzF0_w z5i`69?u|LH2y0q+L$r9l5OXx}!}K(sq`<6CBz2rKPp7E-!k**;vB>IW?Uwf(lZz|wbC(Y_C}rvSGe5rWna z*9IeBofS+Uv(Cg&jR{~TUy}-V6|g6m@6HZZ%@GEa!1a zpQvje#j&&eZQ(!aAN?(xp)hZCz2@K2f? z6i_d{Lf`N|vbiMLwDrLd+%tN*GveyK97)Mb$MEjOR{YxPnD zV;%*SQ=-GNc>(x+M@5>au%>JD?mwJ@5A(}GuExRE2BA!ggTB;^F zKoxM>cvy+|Ei;g$gesD3eX{6Jo5&lAoAxd4)|2l(UMp#LyjS8>FL3w856xCX3y{rC zCgUMFZ9SQHR~#&?QzqVJa%+SvUya6iAvR|qTe)RxZA)^Dvh@mzo``28&iEoD4d7Km z$JONg?K7o^nOPx=2P_MRb)FmFug2Qb2TH`>u+Gx6Uk$|w#8cU97^O|BeaG)Oeg+{a zEeiNhhSm%u6b22_-^&X;iYW_=bs@%L!2b?K&C3^%V3AFYjtovv9D1ZM9zqjBd8mor z4Evyb^%d=gU8MH_iGFkP!EsVOmErS=Kz(N4gR;ZkXYT%9^WlM?UpjuqC`6in0xjZD z8VWsh!yR}}N-6O8Qeg4sx@g;IM}sDp4mjJwsr8YZG@dh`f45oWwOVBJ8V_y+4r3d_ zKnu2gnC0VL0`zt+)@$cA&WY6ETNH|s01${2jJG&LFLdD_)HZ5_BA9UaS(@b1Z-0ww zo6rIEzulVdH%1R25mHiFclDC{-%Ps<&z3{Np}_7u@Q6R3YLqX3^tQL+OwgIG;#)p; z_MK>sq-gfEU1g!y)GgaV3|Cm{#Jux&T8LpVI1Ikwn-hNGE6^h&YvtA0ZJ`yG)Ax~z z#l>6A5q_nK5QPI(7As5duEtcz@TQR$%J)%sdft^Z6~xl%B@^cd>0?nY5&N-O9)86? zO*oe(Wv!6d^p}40vjJi&W;Tu%ry;s1H{$@{!Jgch*^~j0R34RBo*xY~P2@z3j zB}A97!>A)>U4eLbngsq))*oAk8X~K_yP9+^extvW&8{5R?ssLze;|Y7BD0CPin3}p z;&4sAR2OFre=rXF@Ch#|x!vM}BW~&y)##=Jr+A;^6-3Y@MIg3JN~zrr8h9j_PsK$% zTqeaZx)srhGF#=OT^Y+Z7AwQiupvySV$e}P3wopRDvQ$b=-rk=d{e;QO!HOV9j~r9 zKk0amwM80hunqxfJ5nBI2pti{eBY7K;Daml>x~?cV>zx4t%Ul3Ih!v_hJ@?=Kz~v& zK+Q;T4-YrD?`#P?-6kLl<-Hc>OQ*G9;R1YO0P$q$nEnqec$rZzDk&4vIN)-8ZKp%d zB99Z!3Sl`!Pp8kus&IkCA*6f+S)=d=42k_8sl7b(gP06&P^;K7Iah<1pWGhDbAlxu z8;}#WeOZ%O_k#WQ+oU(Xz`f_#*(#=#x(bVh)KWTE^96ijmANQ67{P>NL7y;-{UpMi z?p8Y9$u;l93x#jX;yaYc64WVH=ojj&vGJF$pDT%@ehhq4e2bD5qF<{klf?|J)&Xh|*y-ytY0J78P1W4^{4fZ^~<-S^F^ULj}wP@Cw zey;4aXHUFA3VM}|w3NGtG2bw*Lq6uPXUt)(k1%U{p~q4oC#9;8OW#MmoRh2{o{|+L z=mo`VBREBDmZNvZDt3%mYwhzZT|g<#!1;$`G0iI~80p1Mwpe^Xc}N zz$p)i7M(akh8ruaAG3KbKi`kk#$7Qx3Ob){gha2YwqEVR0oBB2i?Zs>f(zsvfno6? zfU0I!E-d~=I;vTVc(|2@t>W{$r-WF{_$QA@*4Bi`kLisG7ZWFqx=Wrd)!}{8dV5^) zsV+DHU^O0p*w!<(1Nv`>ALVw7a!hMXzr(x+Ak&l)IU06GHsKrL5?~Z#aaE?q`yQQg z;zSSofY}Vh=8ky)kY!YB$$E=82Ua5YK%C8kn#wO$C5A4Vj6GX`BW<)umbvF(1%C9$ zvz2HIU9MUa;XSsATcRj4JK*(wu-UGjiZy%76;n(#Y(xgLj>Ki5iNxj58W4%SKF*Ulzp?Xx@(yNf=(6uw(U*z2LM)S0A(xw59 zAl*V6&cpDC6y)}l!h%>(a?P)|T<-Gb1uF3FIArV%k?G$5;OnddqVBqV55v$kL-!2b zDM$}PqaY!T2qGvLq$o9X=YRqN(x5bBA>AN|bO<6H(lLb0koV_%p7-8!?>*=I4-9+l zwf365)@OZp6hpdb_B3P9cT5n)748-IHUpd@oJC#zYSM4MyNO660ua>!FfvN^dw>73 zI`Tg@%^}`jS$4xHT^C4OfV#^^0@4pK7=9ve@guDA z>z!H&3>+z7$&vRR6tQ5#?JV51(c)KUOrHFy^)uVN>iHfEQjG_C_|gw3d30O?P!nJ{ z(B#R>NKjYLUx|L!*qLuDCCj#2$QwgoJ)P3F`G+vO?xEV8 z=h&y1R0?amx2n-RQK?{74lWI#dL))Iyzs0{O9;sUxS!<@g0ljrkBc>llEFm)H^>cO zWB&mPVdiLVoxWN%0azeFpQ(P3S>fuYpHO!+Apj?~@TT0_N^8k$)ASvh9PDY_^_7#D z|3bn1)VeUDi-#7d6THRTGjHE!f+v;Ah_>d{J|G@EKfP)D>?%413FDd7{oWF`T1(Tb zZy16DK7Ia^t2>SzkW_hc{h#OpW)!6ULVFgdnW_T`Ij~BJ%pUscxK<%(=kQfWk!f7y z2C{zrJtMqnFw#}J(gl6*c-qZ&xP|o2$+JJ_V7s_rH103|9L&)G9m~L$UYffuy+KX%U zzCWT{&o~U5b~9fRd=nwm&}$!dNsXs@Limw83C{@?vz|o1u1Z{@&hw*t67+%$ssM7* zYFz|f+?accz-a3N!w~9EnCzzrC{qni$v`%}f1xhgJLCWLfdN-Igbwsb8^?$e(&?T| z1)(q}hrW|Tb0P8WssKM@A4^#)VIIEUP46pAo_tuVl*jRoZ1M4)vnZk0Z^_q-SGy>+ z)963<`Ld7#Y!ndr|LlmFxCt?2LPjz$C>BbxE^%$`2WYx~#Z>Wje?NECl+r3FI5 z_(6*!tXT7Jit|ea^sXMhb+0}o9&EkMm30oF%p<7Jd`LjgQ03|o@O|8J`TSRUS^Eiv z?fV^mG31xe3rjD_7UTAu-R;9Z?b;-@&R$A*GY1vv_?bA}C%OfN*Q7ItS?J@B%J+g6 z{66Pjz=qNS^O&#ns{+Yw=kKCyLJTe$`up{}Z`(p9n0A-ly=pwzfwZ1x`*I8rt=Ye2 zbZSP<0{iOwAtr&%QLSq8JPtjR)XiQ8cTi(ORj-R8XZGS{*=<6+b;h3?OFj5dTtq7l zyI^b?BRZlLhNdqo?LDq|nK{6mqW1uzUwm{u*Wd{sJq;Z;RS?H>7tzDdPBc~bX;gr+ zuBqa;8_SFoHHGCL4lyc+I)!ba0CrPU6Fo3Lx4iskXW|}KzM4#Fgr%26qtM#`pilZ4 zjW8rXsQ(YP^5F(Bdl%uy{eDzGw7-@76?HziDg~R0=QmOMZ3PfFIfjT|N6>~Q@XBGu zUK;Q=Q{c&gat@%s$Q!l-+frHH`=gfPa|#u6{==JNSm?ZM{dwo~Nz?M5*Nf-MsZ&!e zh@KyTDSWHY{Tp+ z=+#!RK_Kfa8Jz^8mp#ZPBtzx1R64WIocZRH+0}2`Z?G4dvFJ-D>yrb?6Zub1+E^`!FSWDibhIx#F*Z&ByTlNmt&KOe8PG>=KYn-PZ=b83TB{#lm?^IxIb2d_$^gX=V4`=i|D_Oi!r?N2i@EkjP!BnAbz13J+UMgm4coq2WQ{+BW<)V#*mreaPL`9ty6(50g7_vki+y@MuZ+?S z@%LMM8Zi2S&HT)|?_;_)dpw((_S0&9?e-;Bf#zO@TGY@uehLXralw#W1$}tWzQuL* ztZ667eDD;e%(OfsvxvP-2R}?BSX}d2+frC(O>SgfJ_!y|atl1QH{l3^ru3?7kt7+y znKmY4bV+6DKEy<2asz|mMr>1AN~y=utC6d16;D5^WSmY`Ubf#Am<=R?mIm)HJbUIh z5&P5Z>M8zRh3XT}`GMN@E5|+Mw&l9PC$GVTVL7Q<$B*UW=qiLNq`%lY^~CCD@8a;Q zpXk?`r*7Et>09uUhZXaktap2+xc*FeP+@ufy(y1&EJ#^9j*-vg%SqB~x8F(YC+6%6 zpBKB?Fqf;U~5Om{P{{#^67+EGc7p=43smbPrVSUMjUPf;c^ zwP|M-St%l|%8ZdZ&M#qi&O-OZE4}tLM?uU=G`dTcEE7|qHe-|FklAo)SU_lm9}d_Cv1E9a(J+ z3`^&!(xP4QB=;2}jE_F@&P{j!Y!BTT9r*hVpDH2#1}GC*Z$ed{FD z4OYq_H;w@cTU zITy7=(6FfoF<0(pX+=^CS2+&#(Y^;)c6fl6M!KFkU2G zMy})MtFh!FOG9cTFZ_st$#tT_<}Em;?|f7KffqitQ=w6hSFV~=@?O)@X!tYz7P}sn z-d;=p;j!5|&5Lh%q&iex+{p=Y~> z`k;~2|G3l&)f5cwwf&IOsDu0X#bhw-`RV~Q<79?mruEW&p+_P`<#WGuvwD8ML0$$3 zPa-cg{a7^>A|0k8Xm4c7cAHhC7d>;}Ez$}}IJhfp@bz%(*29fN{B`){MpYIH6& z`!oJ(qEL$8zJQ>9Z>P0;KvMSp?x~J#t>M|9(0B^vg`Cp@V$z3WVpbYf19sOe&Q-ZA zF?3-otK_&TNr|t}N^Leb7D~v2Na&!hV!wlQtB_;QhJUVe|GkQqhsHZZvyzeR+9Ycq z2YQkY-}_$5hrPwEoRMas``db24yaiPH857zHIENd%=>iL>aw};H|YW!f^5g316$1* z`-#*UxKbZuN{Fxulg`L{sf4eQb{`l|{M{jmCk-B#!B4w+*C4`B-9rE5}m-ghO#hD4&unp3*CJk{a;&Y2z)VE zB42}H!``sCg=lHjK!^m$QfRcscch{Kq0gV8m?Mhhjn8EBXSN~3Z1CF=WP}=&P`k!2 zT+i-&UG8kni%Y}i#UUc;BS8^#H{Bzlc4AWf_x@!Wzn~AGJI8W=t>R!AV)fgx$InXT zioN|d>fnNCK=bJ3ObSo`9U{sCM6^^&qqFK`4w040_Q3666p*VB939Q zE>Y(x5%Y$U;%VQzWruMNHMBUC|L2F}grY#e3d=&z) zd3)|WIl`B(uDTa`o?&4Nnv!|(TbAgq$|^o&TwL9H*VPUDRHdzy2hi zPNLu#xfBV~szs0m#+&d#%E59JJh~cLJf(NUhB@e3i4^t5<4m`D-)Tv> zr9#&QTqK^26p%7=a5ND>NU8|9|N5PUR4G2=?}v%yk^7zkK0p!`I^W2c3cN6!IK=P( zZ_suKqWwko0AKfESw&2gWf1in#AgrDev_fa@Z^=za#D$8&-Se%&tvv!iUnJws{f2byJj{vbhY?1J2b2JvcWCFQ`%C;uF9da(R@j2)7x{w>beanjfVEThE$)j@Y zx-b+v z=(bczB=~1^tw)dhyTt}ZUg=NIyMVAy=tmQ*68AQc;BRKJF`f?VzrKIRP5gEYZ*JW8 zfOSE72o*T$j8>JzK+pL>k(0qK!%lR9j5aD}n_fZHsS-V~<0 zvoQfblo#nPovz7_#g^h-82GMkaR7U4>jAWy3QIc{i2J}QVs^d>9s-me&^q%sXjal)SwHy~Slr--_i_ki$a(WMvLU4^(IRX@?+60m5+ zUuL9DR#sNpCecBq+*j27+cj)QEwh(>-MGuaNV1M-D+!U;6ze+g1d!))gfdn*-~@}9 z7Pb2NwNr*DFv9TDy<-Z91Jp!uc!qI4sEYD6Vk(ZD((t~l_#Z+0=D5xeeB8>oA|}!} za9d`SU5Kn~qsQQh;bKGN)(1f_->C4>k<)6ej2DE@^70IwMIfzXk0Y5;dkFaCUxV)E zF;cxHiJDgid^K=!*}=L!f9aZf&GM&d6z~(J0(tcWimrUfCWS~$>%Ke=H~n48_4&Mx z8ksidX#t4<@-mR;91mn6pDe+pXkai>ztWGzbV4vAEEJQ4! zpy2z!#LQtI^Z>*K5DYr+%|kN(Myn)hMvZ20!VDs0! z4A?{x#ys-ME|{)0f_@prkzIlL#}H)%-=5tzDV~h}SUBN}?OLwekau>tO#gI%;bNqu ztPt89rj<*D!sV+}Nb>?$Uo>&}lN{M;zZ2y3XSV)%`G*2x(Sz+Ki6$jbuOX#29U&kHA-JKX zZnl{3f&QWNEiMb)MH;yEvj1VcmhP>PR9$;qzliPsZH}nhMAufvhPr&MER55Kz!ppB zBC)*KiBSyX9dLk=d>Xcc^~Iz++8;be9h5!s=y_L`(Fpi18Ia9e>MI17)19l^&}cc{ z1OmFuVga?%`FH8W4%oZ&aLvZRg@3ITNLHoG2071aAO&}qb38;#)8VDp?G;#%d<&72 zKe;Cf6~&S3s|LMkBUc^}g}tk$m=T|yuNl!r9xB-pn%3K){5=+>If#2_4@SWE|sD>7ZB868d zMoP#jV1n+iog$?CNR?efoPTQ>bg3vBtY?@Mw#|I?=6K+>$F9$OXT$^H1PJFTRDW3m z1(td^vcL@IW!72n9VYIXC6!)Fu~016G;(;ku48+ymV1*aHmN`e$(N_@`|xY$$FLSN zmWPZIYqwG1U)BDbWuDUI)f|bE;B{JxNf@6A^E&oK=2Taa6>j zYZ8e;E2q@F{866>>K!=oRyHC1R+BNbZVI${{;L9&CsoiLSDPrG98mMab2R5HdRT=i&5yV+V0~vG-fl9P+c`;-5=u%+#lGA${4Rai02Qu! zBiC3xYZAF>h(^hGN}a9O(Lcu zM|^sucMHH@V+=1cv}Hkb$)@vOP!EB2;uy!25Xgo~728c0i;gnfx5Rz8~W z(Md;Wy%UT4^T!aMf)LkWiS2@*`L)lx`PbdcIn2IVQ2X*^Mf9wS_u(vtC+7G&V|ACj z^c~WORGUp1rCh7bT>L!{&tWUfp&iAJ*+#BA2a3wsGBZwM{Iv{UxF+_E_TC&AdA`9r zlU%z}DCOa^t64o6IwXw-UUbn7Y|6s-;1&DN*|#@(I@hF@ma|_hjZrE#i5hgl3xhRm zepN6t2>vqETTF@Rh8_VagFL$Ld#akBT`fo%-+l8^mI!NtV_3d2I!|E(p{JNh(MVk>Nu6_^P$eyG?D!Y5>dW7Rw&%vQ?^Gb|IAYmTXfnou`Y|ET zf)z)EV=OEwdX}XYcNbOsNZha(-er!I70HRw?bI&RR8R7V5syvUb=$cEWevtx{n6#} zD>AtCDt3c1oOAg3PuT%lDyQ45D-8jCVHAw68%n3HwFJ*vczEa6A2F)d(B=2#e(}Z} z{gQy0Q{cb-SP7)B<#O&*TP?~AekgT_=1tT4ehB>1z+TQ8EltHGaN-5yE5DYo_Ak9E z!r;3T_AXL`R+ziFyUFjC(B|Rj+ofbK;W8RAUFVA2NEsKj?{KW6Ld5er_t`bKt{OY#7evUb{n(%=2%L*; z15QYnazWKhz-_`^@~xz}@Av2V2gn@#n?`95UpN58p>KH)^S6ZNQ=pQbo9*8n?Y}pg zUkz&E*{M)86C;*69+9_o)?S+EzKN$4%`ds?i-}fR1WS11njv|crN5xy@Emkf$}qPC zV^k%>`kn_zJKr#W$7wvwXK9l-DElH`mjc3e!LmKJKPqPRIV!g3Cv}gEJR1|NlmEoE zTNCpF&Uf%sWK#zy#n5q4EP8ylM5W5uIrMgf^oeR&7nE-sS?2`3A+!lCUm*vrnR3#a z-(xi6=O-LcD`(g_Fe#9f3aop3-o1bwqI!4fxsr&gSRuCzJ_BOI;9~iaeD5eadT3-G z^-wC&D5=Rj%iaL*J57CfROHAYcrzk7@8}JLk~oD%iC9Y+%HZmtKyJiC;{1bJK4~ek zI&X>I=vI#_#!ko?l{G5^oy52&FD+m2AoZRK6MC7(lkg+oxib8^b)0cH>IBTG!g_wC zh>vbVbw(`gqtY#L`W)~M){$;C4}k)LP8@IHw@NeXwjp~DKu(cNRyJd zsIB#wyWq?~Ub`oECVl?{EJ-_DKVqGjjJ)_%paeHd-a{v^6SEUR9PRPz#Aji^RKr!b zL%Xok1sq{lJBkir$WV8@rNf$SNn(MVyuqLMUDQ9YU0P$#x4p(DBpz3%_5inT(}*%# zH!pIqF`?49>c96J7A+Hay48M2s{1ed_MeOPID@%bpyeC(!H1dcVhF`lz`*^uKD;w{ z&^_WZ!HDY%^eJ}rv#&9aR(A@(YbBfVFpYEdc5AcjI+2m0_Lii8NAFu(s9^!SY{D8Y z3{IycIRq&f!9fHH(d&;d+h+MS+WcFsPs;hZ)y;F~(d!KynWW5^4Uqj1x7%wUFf+7-> zW>0BaGliwwl+P@X*E8@gS`jT31$&x6@0F3e@KumXM*APB{gij|oy)|paUumn?0Nuf57*yBBk=5>F2zK9PGALOn`5AIaGC?BxgIlryndf(V~cuV{! z)jAgKA|%#|PKauz+e{>|m#t0=;|p`yQj8L$xrF4QP$Y<>rPaFAdZnM-tq(tGZe?I5 z5jWQgmoNB`)psqU>qUV=fm&{~kGzag4jrHQq?O8MI%AY zl|z@ZrjA2I^v?N9`G#tS64u_E2We6w0TzAp`6p;zcjq{FJ@Vw6>VQ@I$dmPCBMeil zdTzHh55c;}-J250;{#pJLm}`&$G!X%2obl(JLB6S!zXZkUJsOHjIciK;?UKh>(gC= z1|C(CB~5yGL;)W%p^dTv+AAX5mKK8It0*RN`s;PQ<*W8J%BkKo)-~tsyBn6SEsxh; z+nh2Mr4o!xx8_qufRnr*yWhEj5NE9?acN8!$) zqwm(t!!+NNTehNoHlykK!7)nL1D^B#Zj~hu{_{(8|I;kx2><<^jFXH6OMLP~Fk2c0 zWI}PxFLW)C<`IrG-H0MpF=t`SQ>XW02 z>-IH=`r;RH4t&Utf;w>EiTWpd;uQ1Mp^wWk1;gz_^spbg1c`fN9%iN2Qah$|bv{sA@1mx4``UP; zjdNEAo|@bd{_uD5r)Mkn?S&gYV~-wJJpo0WS(5kaeq5X;EPoOG^8X8sRN%z}u;#`X z1UQ8?#$)uP=(H?LskyVePUe6Sipx`ue%ouTW7lPuqHZnAK|dDQQ6#fR;_5sH6#89R zB}=y7{`|ON!%6~(-+(?@zh5Uh&)kY6u-{ z+@E*{z6S2YU$O{G-ng|ZUV6aIw&@;i%S8vYPMZU)5mGS*=*Uu`_h(@V zH;3u7vIs`F|M38SW#q$Nc3Qv>|C^ZT)C6ztM9d)AtusX%2fAu&A-KQ9=|w^pG9lgZ}>;blzd;cO($!5n_e z4pW5IZ&7lDw>x}UULYSvV}N_kiG3$`UerB;oDr_dLxNgF6XTjZtZTjdu2sf1qo_Vn zs)LlLbuU87XA6p-0VbLEFlv2#Xs+BkK8j%h`Xdb7O6?|agiE@Zr^@e}0QbkC$#N&0 z9or2+g4V6AR4^+!uE|kbQoCruEB0eCtf8DbHbpcIYwb z66i*q#tnr?Tz;leD#5prZNGYr)b9&TGVj9*K@XyuuyJK(#1Ft9Z&JR$n~ZkqtRXVn zq2bTbv=G0F3OF@?gPF#^O%Lh@^nI9+o;NsudtZ@Wep{pd#{%gLx3aXNZzW*Ir;-%ZHrp-cX`Saf*1EVa_$cqx4D*>M!Tm-bO91P;dZ9+57$RYty%{&Pv>VznYc zN9ata-4U)XiTo<@XKCpHXWce%NQbBUj7Xdx5POlGjuC_SZd9bi^;d!Q99E=PZZQVg zac2~iA~U(&o9>I4tFwmoyAi$%s9|g2^5)E(rAdx^a}#M18?3@1TKa#VBaea>7lroE z9RRD24haR@5L(wPja`RvmsBMiWZOEW5~;qYe5=)%5qtdZ7U}f9J!xySb!$CGty=@# zv&roE`X@u}+i9j!?gQ2tHU5?B324QOlrKL;tu%qkMlUdT`hv8VeI$2xoe>`D!ZPB^F>CEQ%KTtl%|ytCln=Hy!2~BtSJP*~Tiha)dHOwIj=?^K zV8r=4Z|Sqr{b%a=Ok0O|6hHHR(bjsb8td~>JjEi%TbwVh%R2704)ZonSLc36_1aSG zvu9ZKvn9`firCvK!>Tt&3Mc`=@n`jLXpYYBC+k~n_@_~4`EE}k9!t%VNz4&1 zy!orlmuX#2%09qN;i(l!mpof>^GE^)js6sa>SU}rW08s5XYCEt*6QeMf;jjw2xOmPIyNM(fDfCM)ybDaY?X6f5?!K ztv*WSQsH3Dqb2TQdGXoGP}%i7e;ap(TvjK?s)X0_N#}}9O@~sX4NbI8sN2mtmw44n z5aQafhJQ;j>7qB8Y8e-+9qmW5jzk!jdq;ndWcNTtH6!P~zQ^pQaNT2V)pwY(S4D*K z)t-9&bU+6V1c(P;{yrOm#A)ktkMkr<_na(y?o9h*PZ&_BZmO9Yti1$-31U^n$bQ!CWr2aOqPg1k`a(t~D#VYmK{Ts80y$bEYeqs`B_n&6_r7<%6v%lLa zeQmRNjAH&K#8-uj95eY#l%Vv`w!E`CxHVZuo|!;r0oB!wuOl1oJZjc?=q+(q(}C3! zYr@%#@#Z2YXB#mFm1l3%uiGqnHO|r*)-rRpkY)1}Y0vKHDWbV|Ts6hg+ASpaKkT`8 zt`O_SX(8v^fNl^4W-TmSQxd5oPTptPJ9yPXxi;I(b%LSJ*YAoI3fQj?TvvL2TOx@e z)@43a2zpzJu27~%1}kswbTh=bNM~5BU@1o2Wn&P_pGnvSy<|zZurd(BRX)AtA&uqgimbvtrAofg12;TmA(& zEhtXEoBwU(wy{mDzD)e(#^H_No;44{*4wBH#yoEttVs~uP&JRMsb9EvFb07c-t&?Y z+S5U7wkw|5%qcP(HMYEi<>+6I)Ns9?RMN6<<7DDo^Cj3Qb%D`k#qVUknnLH`<_d^2 zuA(zD@aNhY&6g_uQ!0d$u3VuTZ#caCZP-{dKK#fcC}1ri)4UNfyiSvNH0&?*%+e@5 zS+qO;;?TmVd(Ezn)4hH^L>WGvoV-5SRdm2uY39>#LMU}?RC@I``37UyCL%3JlR z^H?xP^F(jb7?-U-RFW^r>^+D4yvIRwckWCSp_r$c$RSip<0EYBY)upRTN${dC_EoF zXStcNZZEvV`Qka(n$+?bCG~~s>*XRTG98dMEnBlOMq}jp>3dwwCSu(0vw4qilSkS*ggQiZ zY}GFap$P;oq@q%2bKAxr*csgUOvG**qAWs@KbaWuiIWh0=M;uEAY%+);OA8)00{EnIqfD}5F)z4aF)H*NGVzbV(yt% z<|JJ#;Y|p6ljum7u_gp|E_Lh^X#g`osW4xRLs7>!rUc^P1^K$(k&P)AW)3pc>BW~pZ*ixhhz*vznkesTk@&b*`Ly$7W&-944DT8+x1~h(MCR(;iOlExP@Bs| zn2iz~!_j4CXos<@uxjzmE?_WXcHigBL(CJLqmaPw9!MR0dGx$)r{%Kat*Hss+2oI1|@RxdJ)4^ruXM7RpJIQGIi@xOmIhOp9w{ckZk@*>P!JJ z3m*G{xw?BuUNuTjCe-N*`=S;H0-878GRn?cruDH_tr#*C&LlSO(IAHP$~OuQv|GeS z{@FSdHF5^t@v#ox+)@u}R`aGXQ zc9%eXFP0WK9VPS<^7ouX-}dW@_m1y#mbnn~gO50OZZG7V+{7P&I;i|Tk&_#pmhUbt zdd5|u^0Y?yLS~)S{MyR=8?Wvc7Yu&3=V3*EbL6{DTO6Vf^QEW0WvKWP2{d2?kQ|iC zQeI`SSFQ8mpZ3%ug8lwLUXX_FX6(OL7G*O>XJ@^(Xs7NOBhzsti#YUTQ^~;|GlsRa zozTeLekokqde}3*FQ^(j5?vrUC8DC5pb(EcN?$WWfr~tDhB7|{025QG%B(d@#0Whp zvAO5eqFF2T#n@TqNumOyuWw+|P^h6=k@r|8Lc$!iDeA;n4aGI5{e@7hU0Xs8)OcHh z4$#QN_e!tro)5o%z$h@p>%Yz(6lg6A@}%lJuoJjYFsjFm=jnsOn6lG&e;owl0I-97D<^Z28$ zD0axz(Of|F}VNuD;u3z)_UR2Ezz&bnypH(?_W?v z>RL#98l39V>q)^bUBSYU7xiEQa%x`i1!27*^R4*pZmCq2h=aGr0s%S9@7-Ne{KN9z8sBR|uj= z;%h;~c}0|c&gvz2MfuYd%{_ZLA5(M1G>h;QJFbQZ=EP{<-6w94T})wyg@(8=YZT_u zG3bRkFe*j_g2q^kR zCIv)TV=$SZ%)+naS*h6Y-p5rW>#w1|Pz$i8lc%I#a8tmhpw)QS$1p>nVzafnRP10a z>ox_*%@$%6%83X9xR(oQjklnP-+!j_2oYD`RXxtnt}AfSbYk_$cwhUOw00$qfQp!C zB?xcF2yJHFcCpIn?m7#-apL3H)$FRUm0OEF7E~VQmZ`fYG%A%JXap^wlIYSA%0R-s z*^X-IxEU#vRZea4%(1!0xfxfJiE11p@Vv9{tCSZb3uFz z+IKG{hX9-U_kULgMNA`tJEBd41ZN1@Ch`l=olEb6Bi8}^s^?t=#P>`DH&@TG`{Vma z%`#i|(;U*Z%OM`$rFn?YrI~m7yhi_6=zF8$ST&yfK<2{d$rpvL9=se_o-8u%Y5EA- z9pBUSA#6=rUT2k2Hd+yr8_D`aK!0Xf#^nCj$D!$1uYrUolkztW?o0|P9Z+{;9#MM% z?=bOcJ!~0?sR*`=?nw$_=4ujL8i`I7jV;!DV-=$`CnAvuR2%$+9M2T<7^g&nC;o%x zx=a|xfV!&xJL8vcebJ+}s0(=#>`k>C%+c8!O_W7oy~zOT%7hWKW`l~|Ow3}?ZV7&+ zr1X14U)A6dy+y(af$LJ$P0pf0A2e~yQMSx{n~6M|H1OS$5*uM$xV5jQDjC=mxcgUk z0BvYBOJi;0uP3w&dE}=#M(&U8<(6Jm0wC^XLOcb~8K=eGSCF}FJX(YkCZITPb2X&3h< z-9D~fRFYw7`KiGjWxE#t=R5vv*nG>^Y|<5x!*K1MnF+S227>w*;Y614b+Vg%3B z!QY3q>|@Vr!@NLU{T(A@bMZvMr}5zm0pFy@B@gd)$pm8v%Iue3kInuu4E)8BrXa3y zpH7y|gf%WQ61vIcsb$h?OqQ%oo?wUM;S;TWuJ7oAnu`C#%+VbkmZ4N+FtjAxCo zEi}PKL9yM7P(1MLHK@4I@_ijE!#z8Xo9$kaF8BU{RO}qA0LCuvAHM1r@-Q=`hw(qb zRi)UCO7!1i6_-VK^pQA=W=Uxm5rVlP@GMyg`=suUTKM|lla!x+O#U`Kguetp?n#qhq#e{xE{G09akPi z0pY8GxD^6cg2Z{liJ9kOS`%O`0knv#ZP$#eaZ;(hiL$z#s%M^98&Zio%WW#$8g6&x zNMN=TF@B<(k$Pp0z`&*9_wU+~oVJZ7xVpP!$>KDq?X(-X!7MdR(iffdo}D|i+_}Bq zEVC)KqWc6j)kBj1!eiHlRaq$@@vEU{&cuPj!GKZLDPd?yQ6T*3k0R0Yt&C<8NdE;l zW=3~5-*?n?FR;H@8b1>yJKoq=7rHX^vg&N z2DqpJdbr9Sw!YKQLdy?JF{fc4Jw1t^c|@)Ac11Y+-Ua+3@*T%h^B91f{TC3@bnqeU znzcG(=>N12=a=8P*{I;#J4mJXlx444kx#GGcp8@di+yq;`>V0*e$HVKm^0xW0YdkQ z*jCQ%qaBPP}^b1$f@f zF5-v+QnhR^PB^M;zIVShqUfh6{EI#jn~RPZ{+FOS=H)Hh7j$c=Cvv;?Ywf~|JDzId zQ|oDa#|<5x3Y|B&krZp|)m9-u&xub`>fw8HZkd0`mHoaRia{r0bFdrHfHR28QspUc z>gw`WfDNw9|61}1@was8w|^!Xt+mhau{}}mISPgi&r?TDn)hyJC-m>?@c9^NJr*QK z|LsZ>eo;<#eGV7z(!I}|7?z;pw5Fgn$CY?*t7;$S2b%SCRaXTbxbdLOt=1#heSasY zIG4dyx8ILK5pDEl6?`QA03?hxnn`nUo_;U=Bh0xWC_2FYisJW`7l+$YQ;o{vPKLf>)Q?X@+@Iqs)eN_MF=9SBZ^nJY}slKUh~ zPc{*JTcf7q0AI}it;qgQ`_Q%bJTBl*Cw<&Z&npy_~pN99F}{0O@vS#jUYPsiQ`B zAXhmOvs0;Su#Mhrrpx7xhmvoHNG<^DkZTq?P7uh2_|g&%oTN}fEra0x_6-(Pg@w4-PFI|V=` zg8`KA#GN>e64D!q?*K`ge9OHF^1*m5Z=cOaEw^6><{`3P)=H@HyJH9nEOWhun3ycu zBcoUiC2iKNI8wLLdQ)imkt?VCpJVQWsc#V)FrEZqn}1=~=I_MkcrKX9hTl_3jM<;q zdCR>N-t_)&hDiB0p=pV*@DLC!<;h_A2ig25T^Ja^Xx>r+nd73{nXxkyBY^BH zTDXkF#hliVYk(FB&Y>{dY6(8xBZ_f5E!r!pjqp63$3y&jyDaDg2*b}=YPv_!Snq66GVaR}T4SB#Wsal&eHix9SnZa&UnGhbCMvyoWqbus(SxT!Io8_>x-2Rc zu;8G2AL*d;xmqmDJ&*<#ym-_0sxC(9IK)4m*w3Q1B^(InV!!Q`Dbp10GB2lk*Im zoJ&#fos+%N1jlEHdK^JM)>tpLeV5e5`tp(BV@SCIv2y&-vpxb#RCX(Qz_?4Nn753X;5M2)-(yRs#4p?1)9LJK(233W`K9~)K~6%dO8pT zrDSiN(PmJDU8fg5%qmzcRuTQVaIqa|aCuQY*Kd@C-i9L^NYMnsp(eMMTDZs@J3fMS z=6o2O)jks)X8kDWCijk6-%L6J=K6Rj->VbKUHc0kSB2ba6NLeAD5Dwl-yCY&a$K}) z#AUCf1^e*j(x>o`*hlyOg<=0=>i91M?8oh}Eu^eJOZ}9Mmf&(+yOVesKoQW+Pd%u^&BojmP-d~xGs{EfY@^?PHkx?qdp*9Sd+S=N0l z?qe8;uuy5X{F*_I`zA!U$V9Vl*3BfoL7#+d5V|r&77JX?qv$R)j47&Sy|7Gb>13ix>-WGBr z-o4p_%CzNY*oSx;CIF+}Xd>XIvLnv>N#?$9pSxLf!{pVAI`*Z~ICd(DXB3n)Y%%}`UmUDN< z18)@b!c|QsMXuMMEAq*jE7@Ly-k?X?Peqb?&Z3lG*7NZOTcx06xNzjkpbl=1HJbO% zyLP`i*4}MxAsiPfv>NpWvgyihR-dm>EPioohp49U^o996sSK!QZ~Ua-S->x^P%~MH zlq9|s^0y=OyG*UxLC~BR_8+Mc9HFGq?|Bu-~^S*-54^13iwAe?(xcN1$x=f*r>SeEGjlTb3a-1c>t2&q|~`E9OK zo8WTPVCU*df$`2k&$8)`eECrM-C#DXVw>2TZ|TI(vmWUaCkMUWg1^@AksnYbEFOFG zwVE|ic6gnl0E05iEM@#9%&{a9?Ia^SRI;@$Pm89cp=rEKkt!6wtKV85%yAk}5Z9*5 z+Ndl#E^u_* zhd$2x8>Hu4=HKVk6i2q$?4P8CO*w5w8L*Vkp%e9Q=!!RrUS=oKqNBZSJco_6IZZcA z3J0|7T*>uVyQk~v<k@P1 zI~rJF??iR`YrUX#;gQEt=_OlY4l9f^7YDS+<^IEpr@!VqG0po9381Lz0kyK|tYj-- z`qDcxvS0fQX^L53K6X|9_w+P(ID18Z$QJCGfI!4< zlTT)64CRJLY(fP0%`j)LwZ3``k@4+Scns5o%>~fdrQ-x~2K*deo5wwx3* zwD4s)n%L{Pe~O6h2GlsjWFdoAa#pcibU<|c8hgch{o5h(RWGmg*z2NLROJeb$-lo~ zn}$i9EZ1WnD7^h(9bv)uc-oPAt#V|<GH`&i<@G&9 zI|<}&p>cZmK6)ig0bV`nkR=^gNwuwk&GVKJ2KA*EtYhEuv(snVQ04z87^^I<2(w?r ziy9#8kBEN5tAs%vZhKG(gfBVG-c;--`sOen_0iGkChnFhr_%2*b@q7P0;SJ739$+8 zMByj>I;0kMOT^KtPrk%NM-3^j_3@I{U(o<;{-qV?7_YPml8cjj-AcDYFH(lcCld5S z{g>1%O%`d!jU)~%l{0ni%toJ{;-jJ>UUdE3Uqbom1Ed1qwLrhfrhns&$w$SB=HxSj zvGSjogw7+o`pxG(dPCu~c+tBy$u%6_9J&om^kpwP2_)jJ)sHyc!6QEtzs$-Um&3ZE z=m?XIeDUJav8mwkgi7>o26|yo7mC=|9wED_uY1=1o+3t49{ol*ON8d*5O}ne-8tKz zII4F3C7zkw^F>(Eb!;y$P(5zxH#XIX(5J>HxP>z}DaBPjOtO`s=sSBN>%U~{sBE?% zfbH}dG1Bh0=KhFTdE1WQU{5JtpVadJ3vMCh#O1s!Paaz@`@DgCuqf839%dm_spaX4 z2W}oAt#mmG%Dom;UE zgZy+LUcumrD@{1}r#13NH8(;gE{B%}#no4-ON-8gq?yke5vr+M`fPZvNad@qRReNe zrb-(2O7nzL;t%A1;)QgTG$)yG>71;Td2*>aGZZ>@ zI?pzvQqFSa{H%dDewa7y-M{pnB2!qf^wJTGZunQmEQLvmL4?8rBs3DaDz77le{OKm z2N;T6!8i&Lz7UR^e!V7LL|zERvvu&hBISc z8uJtT6*B)DBB_Rr$kbK+Y~UxzzUBg%+5$Akq+20RRXw&%wo}_PI-yXUHJF!gxAT+g z-9(KZl(})sYr@;d+zCkLMeFyln%5auf14aMGnJZWR=pD-e0cw>91onH!u|>4LaAHZ z%^@CY=WMzx3twAbgX~_U)=e83&;{uy*yQSc=KUl$Tlslj_PKgcaLY7I401$To!W9Q zc(5RXmY>|I;q1P+jv(sd%h3pcy;+AXTxvH$d?GqwK-+UU0n6u&t(-kw)DM!Os71I@ zB91RoKeVo}&hV+V4^SLG_O+_iep+?M%-PfNGrjxW(ko`JdsBH)1sXbW0Q7}rAdL!K z&2`|3Z4~B*RcLGAiD^cgP56Sk)l~|{i=R?$l8gNzz7_9&^#OnmttUuoFFtU)sr$k^CB*;Onj+|2+vNv;KuWn@$@%;o=lGp18P`m@XMA_ z0Pw|uiXQWueB}*d!Gy)0Y}WQVQcAm-j+DH^xoGI`K~9*``W)EIU@}a@t5EoFSnJub zq2ZI)aBg0(-`ym2d~2ujAcI(ohjI7jKh7b}X)2hR(Bh8H;xzTgHXgm*>lndCepq$WkgX>A!Vdjl7TEzGF7wf={A z@tOE8L8Gw7^GS+*j&s(f5DqTrzo3gk@jNRaE{P@{&yBWQ3QN=;*)oIj^VJa;Uh3pN zChQo)6w!#R0fNROXf`tc&Rv7)Th8@A+{VVbT^_DI8+?s*yN>aW>DWms>?fOy550Yd%d>Av$p5?GADD zO|f%U`|^>f3n*_RgKrh5)C*S1-zmytT!&?zdvA*0AACmoKUB~@2)=~_n+l`~9H?H9 z-P31fI1jq%WD(nFoEB}^0J6qf6A?x#Ry@`G1on<8Q+r!AeY#l*BEkQ3Rki zLD?DNB~{OsYtGhnW7$PaM!3i*$V^(G)oGvD0^)s3Fq>(s;XC;zyf!0t+fqfeIzgD4 z%^QHG=2XC=QOzDid5vWn+Y7dwTURd2Dwc`BEb4e?gk9GU-cKV_es>lPb%OsY<1Kbx zDA8ayddH&Qdpz-pM6DmO(OX{&`?>*1E12eIk1%H?bW+a-4~;}UeydsGDd%MtwwMyh zxV8u9HS%u_3}RVy7}1o?`=TAP&a~k_#Q#Xx&mDX@TQGXR#OoZ}r4W&{nfu3dx%3Tv zKkyu~iCWPCL5_7yllBevCjnYAx4yprA8U>$?clFT<*I?z_t5;eH>}K`PTEIic)CeZ zGYa;VC(w%=1{euEKQM`dYj3A*nG~}y)A(nxD5@Ho?bx8%^@uW8$&_}<+%SZ5{r`8F z!w5KW%41E9>Yp_B0ru_^+_d{#?Az4Rp^j$+d0(EP@6HF$*OHfEvqJ(VU${Ec+b_mz zJiYV{F>I6LiVepXveBw4=)X*C#6DxxS=I<}r-h*M6mh362zXWt`{zXq&AzOL;ZKfY ze{9G;BcAZgN)Ag0!?Qd?EZodC`9}A1c#b7nnz6bcZf8tBoF4FesJW6Ku#zE``o=yf z=PrgyIk})k^%_?QffK-s7PJ(Na|-*)h~$1NWT7BjusvZ1_qtvf!+Sl>!x zOy1GK{t2O&xH}aSN!b6w)p{q*I&cK>q2XfgPCodO6>GmZY`ehmZ7W=h-HYfmLxkwt zBr=*$l-Tc4l8X<@9pxih*$788Kmg=aRBhAnyAIcK(a1*pOt-`vN&g^69=U5f*Q>)~ zK=i-WqB#hF)<;vR5FmnxlwH7LKIBfdmd0hyN=J{*W-+H%afKFLur5sNo zs%t5jH8YJ^uhjKPO{E7{FFN4L|eDWH6LrFtJ8oj_}zW(GloB#Bm zTJM{9dhQYbmR~*MP?4VE*L3Df$ZM}dl7;*@l9jk;>WGH@wn!WL)bx!BF9DeQmN#|m zP_Qzjb0bQWsrs@A!G)FgRx;(sBRuMBnzf!}I?%KmsdxxOvWL6367`-rlafmiX@&b7-!P2c)>dN&VN&O;;&e+d8B| z4(YeM^P}`Z(50SY9y@2Z4*8hE_6N$9uInP^_`(?X7Qr)8@i)vZR-Q3n5`o;rdB2Gz z(nQMCy(Y>h_`mw5lG$MDzOQuIsv&#VFr{%Rm~|3C2ItPRqvBdsV4@UM5yzrNc}vYGTa>>wRRq(PCY`;tDj~iSkKK%-V`yQ&*giBLBM1Dk@PV7_ z^SaY+5p;mGkUCn#A2Ncb0gZ@ggK%;cuBDCDMsh!Zj?yOO5Q9sbvP^_nFUbA?v^uFdmFRtPa-;u9N9P18 z6JU3zB7%Gm`2I8oV;6<22%W!bJ=quuXv%YoKa21c=X;oH%IN9*K#;uQ55B2Q^$uRQ z;6m(D;k#YD>;gZ#>Z}aaIn*tncv$marsBalEoajun*8@U7%vZ_J?%1!CdlP&PhDmx zVf#x2sD45%zQJ8Q_GUmc@U+h*y2j7s5SfJ+tYjt9gzSrTsRSfalxp^zGd}LU6;TOr zQGR-fZ;&v`oA;t1Oq9db4e4wj2@LsomDaKD7NlyWRH1C~)C#4~{JS+BPc=tSuR#7P zrF}ickGwk>Hfg=iIg?0Kg4Iw(w(W52MmHzYWIh;xY=Pt1z^}*>yZDnF&uik16#j$T zNAFpjcbQG zXj`?mDCcDm7{bFfbjs+Lu&hxnc+oc*>zw{^R@Ffx}5fOn8Y^E zsK-z!QSI(K)hXWHKOCuCCL^u}Wxv&TTwxjGkkRo}Yr@A)enkhGC7;YnZj6r~L^xaB zFdk@3eq5pA`rkI|ySRz1nUGTPGkrGCx%0clgN=ryStlbZ2=Dl^kwc~kISBIOD@6+N zKx#lWSSnuU?-Q|_(X;$xgx2;xF&*7~?gn?Kkvd1>Obz9FeM4AacGn23oLP{NBCRJ#>rhkgXXGqtsf&lX#7`{xS%6JE!Dg{%i_ zeI|vdDHW3f`W}g;_vc3Mh0}zJ&Nc0raO?GPCzGT4q~Du@Ja4_nFelR|B1Pv8tyhMZ z6FgJ!ug5h%_!KQ0N&aa#j{O-uS_!Y#+En|dx$hKvujlGAX}=}1daYNh=9UcE_*Ui3 z8eQMEJRZNpx#ttfNrZ4to;tGe6O%fFi06Yu300Q3nu5ZWGOvbNOKFIExT;WaBFjOI zwI9>ay)&8nP|m<5{4jJp`k0Fq>gCgQF9{vdg(^AQ=GPM;p%(lCc*Ml=RV#SdK9t)A ztcFQX)mF?3;Y3zlm2crbjBPT+suK!BaB`=$yPjgF<6W!slMS}|0P#0B11cWSnKPvlwcCcig1| zeKugJ=(QGK(mLlY+7e^zLsA)6?Aqv*x0?ftf%Clm%`d$dcmokWyD0h4u+P{dRG1(S zis$YT0qcy5=;=U5X@1VRB;=r<<*mLqQ8?LGz0`AT49j%QcXe*PI-miugzKl+OpB72$_AD5}2E#H?? zHTH}G1Ou7Riq;IR3S{l>tZ-;{-yA#cnU;`f6tG})n{EAVCO{_o6A$BY8-hMWJG{kL zd1voj)Z>eyM^gCdvT4&=pkg|W78mJ^heK~K{+*QTuVAJy-d*Mi!LBVV-oBG&(3~C);&;&!w%&Or*0wTfW98F$j>lV zZlyvOR(|Eyi5sU+8cQk`T+gRz2mDT$ksi~v6;zt8qModQBl9^;OkUk;mVyUgEOk{e z^ZZ@W9K!k*urRH67XqeWo4K!6b3>lj{fA^bW?r#S2itR z@9DB(Po2VB@IDpm+F^2Ycl3n>1viZ!8aw83?b06~5Ay9hVBWOxQ0*iM<_p@1*(F&f z#KbutJUdzv-Td^~K1l4W7T(>F+iD;rTSu_Cq?&8D==QML<<4#p7up9NZ1xW7!_7lx zB)g#AvzJ;`Y(QvAJUy|I77=46sgZ;CVHz`&{rcD-Vxl-KPnQ3yLCNd0bJk5+6V7$P zCtmkV*uTQXzUBirTjB+1#cz)?F1AWmvO{kW=gm)W`k9&;0)a0io*$ z1xiF_IR164xpn!{qll8hf~zklW7qrp6J8g-W_Bg>8VCW!*BfRZF$o)EIvho&qeEpu z8c2UWO0Ph-i)fEf8$##mUSqv%-8jnjI)Z1IpCg@qKb5bBHzN2pU+O{Fjdw_M+XUJ=th?moY5skK5yL-}$mA zmw5_w?QHBDSDyPgnuC6sc%!DsWPWI0=uv|2WchJH2aji}lNZDJqW zp5RR_VpQiB&7AVaLDUZo^y~T%-wPRI-hW`< zwRG8^SJR;8FMGn{2Au81G-Z3fcx%!jKz91bdIY-_!57!~w!Pn9gWNs2Qj@tjbQp2_ zSGh3xTxnUjObGea_y|3n@nydeuxHG*Fr-Frq2W>ro1xw-+VBt4^TIC9cDpOT(7T?k z>aQRGh{{L!5(637{NrAoJndMLjJvH}IcxtRJ(bg}C&)oQ4DnC1d9;-zH=f8%2Q~p2 zEMke#szH?2f_Hcgs1Kg!w|ZFtQ0gy91|LlxQx*u=uqo~o@k(FtI$p$tMGe_?Nikj- z1(iHo{(N9$(&a5Z;nGmnr`VkBr`N5yu=u5J!fgBkY#CIxEFPnoiQ~=j%zU+A%w^xD zzi^(iR48P7@A}~w=F&_OVVDspz$$pd)a{TIy!mE%F%NYf z+m>G6-Xx|oBJCkrOUrQ(wzJ$S!qO}|;?o~V`?(8}0UMV`!Hjknks1&EB3|||M^Syt z3TJt5n?QB@_HLlJE2u*{lH2i`dWF{6M3%bx`e@%|-p|y{6zqx2`6(yzzN>)!VE9Jd zFo^!V$ah?F=!@^=PX}XL!9cqA7Z=~kk8AD`<|*kf9cR8!sKa5K#(sB75_x@h;%@tD z@=4!iVqxr3b5c{>tjgTxReX2yy;H^(xLv_`Uop)NZzQDvWH`0@Fh} z*I5^J%{V>7Kb#=;TaQymf=~sY$D27y)*`f$MAwNKXi>xru_x=D_=Mr!4H^#V z1SVaoT&FvaY>QKQzlVZ_sB9MTYIXoPNgypL@uFK}|=O zJ9z;b@D>mG$D?-8bR zcmdB;iE~gADfIClhqpotas3I0)r+`a$}irj0k4HcY1oN3-YaMEk#=5noZ}@n*F9!2 zr?*w7>8L>)Es z>&f=ujJU^M{aTjK+h@h&w(r%I9JL^9GWF6afN)RQ)K^>Lx(|-hvoTbhM*d%V!?e)0 zw34W_Q+o(xXdQj^DPs&8a_a2M>@w@mNa1{4{F+7MccJ0H>~iN!JYoGq5nc}Q$YAv4 zS0^32#Yo1qZ%iG(oi%aSvrpoHcRB8_I=S)outCb@O>@BNdS4y8O*&fEoMb`@T2%a* z#>G>0ln#z?)u|a=LuO!|(cZu{C%4PW&`s-Ky3>T)S(vS#0f?--3@38>^B|p{L)6(r zV4jVViYxq=j+__rTinbM9Qu}^%fS0ug<_L#i`Fnu? z%(XBw$`t0>7NO>wX!Leu#92N}8poV-b))NiV42Wo|X7%-Y zu#{N>mA^7{>%0}{(=4ju~5Xhsoh**Jx9@(8PaR<5|v7B9VScg%`2SVQl1VZPksywOTQ7?9i zdnZD}t$4&pC!MpyPURnPv{vmnpLGQ_lUX+Gsd3CfPDQyTNR_z5{YwY(h`osNyfu7u z{&QoonRZUk-uAJPln;`jCCt5_0hja#Z+H;t1}t8@tJw3i(0lr&Jkf?^g+@2ziETe1 zoXAXFNlUtYp(Ci=8$X*yz5Ei1z}sr#aa`sjZAU`PjSM95qL?&XKwO5`y;Dw2$aA(w zsj^Y6luvAc2+EPr;} zF;oO^pJfv4cDaX;c%Yl8>Ao5e3zkuzDCqwJ2_4go{>owPIoDD0wa_z2M&P$LR_dwg z2Od~N`g{14_d1;|+Oi^h#N`EJq{N1&7QW}W`U2LQUamo;k>C)`E z-u0axN~CdOd`YfXsZw4QeKe)AETFOvja6&Z%{JC+{Ipj}I>iF;%d!4xO|q)B45hpy zF{FAU91Z>?uA*8oXCAZs>pj=Vr8cZiTkTh4SNzy8o4<{}k8KlaS4{AS0D0fGGSlzL zfUXP|$`^%@M1#I4e$Xl+ffG%*y|0kadhw5UKcq+`<#|U!_{{#}FalKBMDA+Hc5!>7 zZH6GC(e;o_cxFG2ObTHXb=uOzEOomMN7f#aF%GoAw1!2O-EBNMdcN9lTMQSQk?8Qk zXsC~#-26UFS&VP(Ieb?iE1M9EnrfN+1uO=crU2q521 z6|DP*)D72wtqYuG@%40hd_qvr|2*FA3(Xg?bB##YubyRyACG`>F4)wHXUr<(u%p-liVt)zL0WeE)fqY5D7rM;EJRidMaFpT48Y-z3~H70=t|}uPrUCs zcXBS57BPm4W?Q}ElnBX%$=mD7T;IA5w|e|OZA{&aqDRhA5hK&L`v4t9a59k8n7##g z-onk1Y|hyAnUQdWH=X7oAiR$>*CQbViJ0>s&YSgtiSGydmc8#*z$pFD!5ZSEh*4wu zN{)T>)?8zJhJ$`Y)RZ5Aealv4UODxg)m)7CrsUA7bsnc{`ysZiG6umiFD6*SffE;{D?gp2=ljpQd>09iNq?aL`o zoOAkPMo(zSozJ?sA*1|-u;Iynkssai3IeHGcd7f9>#sz+F2@|IuJ?{r^dG}H(RUvA z%b;Vduuw}mX;}~K6*~!iAZfDIfJ~-84>#zLf(GnwdtCZP0fS`nd1)p3tYfRrwt7`EY2D;h8 zW^pQ)v}qoRJ2?+ULI`u4<;D$nu!hf~XZGm3Iv*;HUIKFe6-B-RZPNh?o9VH$!j1}L zZoYe#BSA|piR1{btGx>Hd);c_|VV_1TIfcK{B znfFjm#B-+u@EKee5prRJrw%Kn>o!eB_Cmk!SY#4ReVpQ+j3zM$QY!|B08d8EW(o99#>)OKzSOgG=2WHnS zyLF8fiY*L2Wl1PyZ^(5|JFVO!at_u4MoAY^Cx-&mKq88fR|i#^hPO^!^7nZ8z7CPh z{K?F*CsNTAyyFU&SKuw?Q>xV(qD)u|0VnfZxUPe^Eiem?c z;}10P1wJrEQ;ysmb^)ozM8(Z`9PoFReNinA&ukR zS7TR~^;4N?@ojU1v-4k1)W`&pX~_W8h*Q&69Js{Og3a3)BZP{g6(x^Ukq1G$ZI5ni zz_o~aS`O4(I>dQL_u25FF79THVd;c{+4WbfSDl|`Y1ciuRc*c{w=J><)XBlXVX&po z8_Qsfd_A!BbWC@j;TFX*#S7@?W}(~U4R@c?~b} z7aCr7!PwhbUs(pH+O8=@fY$2DUPrJH$yihZn5@ zQsp^<*m;Xzp08|FX^l}wLN*cuIMCkX90#2oAT8g|vNI!zA;2amiZjVTEo`!!N=7W~ zN~*MgQ?n|rmv}=O%+1aaKWv(FE2zbBNnYJq%jLPh13dYrk$-u=t5H}v?~poQPMCiA zSv@!G30dAEZHDKvlIphxYpvtW#LnvF8H`1H{HxrZ?5M>%dun}?B==5ad#DLf^4q>D zutK?K6ACUr6@^U!U2j;LuHIojAEf+>r{^nDxo-qSD=L-Bm`Tzd1xaV7zQYfsnfpf>Ie7%La0NNzJeNW!#7B~!rQ)ovW$A)H zq%oVdt-$tQ{EID(%8C(@XiaNfJ|Fp7kd_yI$jl`(D+#y=4(VTyT=YH~y97##M3yMw9 zU4?9o=R=oCQyy%iJ0(efYazHfJk+w;wMKI8^YyjZXr}H;Ptd=FbBDCxBgkqW9x+?v z;%fPPi90Uq+1Q=e6u6S0PEvke9j>foPtxNV*?oY~l>vk&RDYQgA{^=Te0BOI>M2C^ ztmZcAtcc;(JTdW?E4F-d#kJ&9?J1xE{GfK5qC?zLO z|Cn5XLM&EK3yW44uJwgFd2cS+&Z2IwXmT~P<-ToY^(7JNNq|FWAjx9sZap69Qj3Yi z^4e-z2M4h~M*#o#J}Il?GsFa~#8{zk0rr>9(;vPS7573(l;wfP&UPR;9H6c?$V}MpjCO2`j2ngs z5jWgMQVdo#2eSLy&dxb`YNDlV};hZU*WdJD1Zvhk!Y!&B&f-!J5 za@Y#~$|B2M#@pfckkwXM<&uA?#RcZSeu(zGMufV|fp>ntPV&RPTKFv(Y^@tSTm5Q6 zrM{oz(b=H)ME_irXD#JL#!iM{dvt`+GL(vDnH4fRF##L27CP=eX3gUyN`RK`g#G*ZD}L6DJ4yUu%9l5>E_ToHi3SCu0fj*8Tx?m@DqFEl=`aB zm^!gIZ#!GtnR6!O@_Uyg7Wx)5NaCsDUI&~0Q2S+8S!Y1DWjIT%NPDA?P{*TbFCvv@ z*ea)axTVdvZm(9i#mJ&f*Gr7q;$9H07yXOJ<%rNw0w}}?(j`h}iFCXp``-T%vXFDx z^Pz({nm68UyvN75NlowW-5zfA_~R?poc9$-Bkj=dT*LzSS%~rO3Q!-z)rxe!DvJNF zOD#sQxLW!Ud7$`WgCTs5g_%9NbXi$6#?(27_`g!4#G`@}xft&}()=t1{dzxTxZ>N+ zbsO8(eAoBOgr{?|RGS~e%!j|GU=AaKhZAP7RLXVGLqM@Ls-CsP^&zO z-ibc!tHV1*yKDx#+eEEHiH`PXz$i*i>KzSpTodsYy-*zi&*@o;Sz- zku2|^EO>w#k}wKHb92!4^h+hu0D&s5LP!8|NWXir@nODNBK@`anafb%T9h<<~xfZ-)c7*(g>}-W%m2{#7O(UyIG75 zet!RXO~L=Pte|21jThfmrza2UfTPnH2Yu`u%CKXV{ zPxg9Lpw%LdJaB3?Rb2wF(hyd{bgus10XWRFr9>=3Cf;e_^=QS@-ykcudBMMD4Us72 zDM^s~m6tkYCrOjLY)C!PTDwXVlV|I@9wEYXcGL_#K8^wmSImI_-^CVdpQ{==NEc!= z&Kvq72mlJA-N>QZST~I8ZV=6)ea_YV2oTaryLN40by&Xiu@uOD!wM(8WeB|tB5lc{ zY9I4p(Kxk^&D|9g8?B)Z86@cfe9`irgiWY7GYJp7H(bORsVTh*ks|gUO7ReN6pqwW z@_b00c+jRz$_w)!ik-Vm-n0PQ_RsMF2+BK{{s%Q09SKz46eMze_ zYQUUcm3CiONjd|cd_l3WsF5)<+Mm%RB6NB`Q5_^rMQ+^Z%)3DwDQDwGy8D$53=1eZ z=KbCGul5rrSC@WHuKmQu%YEa`rE#pEdwnv{IUzFC>vFw%|LXdOB7N5atz(e43Y;rT z(_`*ZEHyY0mAD?km_4jbs{VkNm=fqocEp?wipp_mLK@^~E>zTwvyA;RZlC!>Q+yRe zok+A&7+%zyWZNQngYQW3du^OefiJ}&^$2`?k{RcfTM?;vy!r{%6WvbKR2o%#a3nl3 z;LD7~k@rX(XeC;bUQg)H)>Ur+HzzZ@@*Ww1fXct`q-z7zP=#C8zqLcE$cq6O?`o=bHIV2U$VGf6(dqbGHDF@W^d?+7Z zu5zk-2%pq1%f||p%Bo-bLU4Z{=+6&*ww{mAFfID1qvSc-(H}wMIx0rI^MG;MGAfRM zqC3X3T;n!NA1-?a>m1HIuM8468!B(Bk$dgB!Xn(X@`z#Xq(K)#jW;s5 z41QA%{os$!*8fJXV+;kN0Gp6c12?31_SDB0F!{N<(P|lE!7mc zHQ5@<@@H>gF>Q4c7rn>YwHCPBJNZK$*R?3-iB>97BqRrtM^|gA=e4X$UyTS6wbvcJ zzybf|MHr-iahFhAxevN0GN2vBbiPt@sqZ8*BPp~UMux|#1`Augal!v6tGS=vPY>_K zrhh;DVMI7ZDEvOqWIX+~gdFP7S-+$^vt!IWi>E-V#MC0wAtQ9b4mwYX@P z?hRLF*;aFRVjzODjg}-_-5jzF7{`%Q^!?Q7{5sCU(nm7E5jbOPW3v$9S0l0i}55glpseWiZ0npFS$17gVX>Vihyba-8jJqJ} z$RY~#Rg<|yiPbl{3emoAtbZJ~%7ptFqn`TE-- z|8+SpPZihzBO7z-#@%Q9zb@g{FMVebep&bj88Jme$_a^CMr?lmGk)&Ia*9Qk*=*Yf zMjM9GH0S-!Q{OHhDsuFu`yy`QR*@$DeYRUWQ7uRZ(F};PEjixVYp!A+lrGlQGm7R` z4@i-YQG3bK({sbdzeA`NbxVJvnjPXYzXu!jBdbm*fjS3~=CPT4B(}PlBTrxmRc1_dsabQTL{ZMX2Of+VhBm*}dEo z-FY4xgb`dwIy&;Ck8`{@O#anay6p5$CQ^Fp=xCe_{)d?9x!7q?ml- ze(kM*RL2kk>JD~n;MBsC#Lqc*`?b3mQky?QTM^~I1o1Pmt{1BCUFPU;x6PwqA*t`g zHYb1s-j@bfIalPVA(<%G!g-7M#b9E4f(BhPex5}|~J5^HbrKdXo}x_5FA!MKr&)4byE{QeZ(U3@Av#iS;l zvdKodLj{{$yqxn_hTqdcU#7_~2Fcy2J@a3eX7K4}cj|Fy3M8eijd>lV7xl6UlV%$I zzRGxu@~-%$ZxcB2Q~x;5;o@BS>oM}{)qQRMXXH$01&{fMSsXT^h(Vl0OUDrl&~|Gj z53&O{u*IwQz;8D+?0b_7>}RydqPMCG!^a^BthDXS<5VSit+1vP|Y zE42p3_R=*V9bWj|=s4s}aze!R*kGVe4MXFFK_!CI2>bqn^veS4(KY0#7@aKPh&E8! zT1rxqNw8ZjI#Y>v7j^AXw^B*c6m)5Cb)7*bwPzFT1H!W$+iJquJiH+1zW*Y&qBDTW z?%3AJMV3sT#pmwjT0x9%*>j!HXp-gFVPgT#NfsD&GL)mo>1|~N#Z+Y`ocWcH6H=?S zjmk%JzkuU(k!!2L^*S=e{HP7GDmi*W03EMUH*7WhwPPe{R*jYTYUkcP_~pa=Qtv?& z+RfjP9L5n5iBW@c8Aj zkQF2r_r^j6N*Q5w`om2aO1{Zv_*~URtB7(EtP#pce(w61_FP2$QPV|>gR9$?w#1up zv#wVVK$xUJ#w4{W0`~!rm?c3D9pIdYf2cn1?%wg84~xS68f)W6fjri0{&HjepQ>=H zHPe|IxUhpm%KCYv5F3egpC2n{3A|d#sZ(8`j19{epcO09%?Vfx-p%4MCpxMC`?@&3q?KAB{=Yy9KJWJOld9 zs}0{~1O0B9=5puT_ZLr}<{W~RzIN_Bal=OcOcOclgCa$XD>(Qx#= zfR=E~?Lz6<*_carY3q~F*EShZKHK8~>Sg{D!Dq-C>A5Cn5aJ3eh{N0-M@cjK5pRfb zB-8QUJ+6S3=_Ew}4dAa(2hXk^}O$4K^x zm=Ib#D;;z3n|=I;pJ?)&J$m@7VtR)Ix`!O$^JI#MA zN}HA`py^J3U=^s?do^280G4@1O9mBD&-gdrf4>bbL_|&Mc z3M%3mPgGvm(gnSImhHW8?4r|PDynw=nt=+P8Qi74Zwnl!QQj~=)-vCK8(#SSmuovb zH>>U>(Zi*=P1T}Qqv5pwlVepyb54;!_uSLq3hJTw5)(kzu)`EXKBU-zoA&gX0R(E{ zb(3q>RCf?EYd#_w%2P@xl%v$DokXrDKX{+8pNsPZ&^1G*Qat@psZ*7yZg;ywljCjg zYfcomkNZQ?X2+bLdUPNE%F7UZ?aBzJy<>tr{-et7NAM(vsH7)W4zODxy?)cjneM`>My5-+<@h`g-u6FXj1u3iAah7oWCJ6LsuFyf@i_$>URg(Lqj|yJ%CiV|o`uRgXO7244ltfyNr%-3htyb~# z{@c_u2S?p?Z>Lq7F1wkV0-|)I&XCu!y89u#WgVHVXF>_#bC4Iinkw%cerBr8e0t}h z;25YWtLK5bi>02z2ubA9U{Z)IZ8_Sa@Ne84+xioE;H5>$q@Nuf$0}T#sbVJsDjban zC@hIWt?Cvn6batLc*14kxnv2U%UqgDLjKBDRB<_WRgvmc-3u0SajEkU?QyH3ul`Wj z+hJjIClnl}vxm%xrVR@DKdIeN<;cio8@ngIo!Y)mXI=(-=ZWZtZ(#zQfkt)=wd$d8 z;LAKE6>saen73j9{1b?swk?hz1SQXA;1HOQGk0Oe%5FQnOwf#1yXMFcLv~D^Bs^DT zPs|h3wtsX0B-Kn*vG23N^s^_}58q3Ygmara8-Gy!i*fY?aQ?nyD-lN%x_$FID5K4C z(%bI%ZtYBd#Sh#79#qJgV|ZaoZwzd2Xd|+a?;SO6gGUj_o9 zVBY-BxTcFX!#11ZnbPfuXARpcaT|iL802szqcjulT)|z7mOn-0jW%fn-T2>rg8N}3 z+sg7@+fXPOl8ECL!{T<`THeu{RS))&59ANLG<(08mTeV|L3k;!uiwD+90H&e{&87U z$_ROQ@kx>Tewm#Fv+{f~l_B#VM_}m#br!(Y0?hT-sVC=o4nhv9FK?ngtuSyFLAoSj(hX4Tr zB3(d4I#Q%JX`xH6B2_vlsB};e>Aguuib(I$JE2Xy_r5!K&6@XS{>)m*$}gvV=R0Tb zo%DQP;b7sL&lR)0)m2lmcnStP#`AV|NBv9`#>*)%L1(6(;|-Vz-`stDrAu!KlT?(g zG8Vj8)B5~y})7aBeaUiAUDmp-yd_wr*YS0Oo?(dr4BhsBGmd+YSUBpvtObdom? zN2A>JcPeeEvAJoWQ{%5;lu1ZyByf=JuxosV_vW^wWg@J;Ts|0mjTUN6HfR~(#G6vY z9GpymLbmX%c-W-NsioUbprr{fPrYT2Q62nbgYvm2kDcFo!okuD4(o6oPjpX-0r-^pH}# zc2;q}WE#V|fG1kpZ>3g~bv3}i(mBJS21K79VY`KH*U;oTv%D*-40bKl0X3hN&o2zIlKymo`i% zQJ)yj;$e>!;T}q$Y#|Ma<7ibk%$0y3icY-TblFqWT<3&)xoWHYF)`6Ta}uxf&0-!| zKAE3~x*}$D8Ox1e1EtM%MLoY$BBz<#YgG99#GOLb;l)3o?WG? z=^wD}aR(ObuT9}6Q0~1cvi4sV3NLvMV*kqv$+n{ga6%@D6jNrRz4|gj2l_{gY?*Jh zfNzXjg!XJmbdz6&lvtlO*$VFX^?)(t*K#m!XKqf z(pDfoM_!%eUl+WXNtK+gUe` zQNYEQ%f&>-o>#DDPHQOQM6tB3Kd&-s4&%h|m?ZmxTL1-|lir*Zq5t`ewiMvQ5EV`M`u@MEvX{9)8r zrBp^-fzJucZ~eNo7KosxK-@1PGVDi?txRv$za5pRqf5_$)IB?r`~!4)m`Q!bR5YXB8m-R}+s;D4iO`F3 zP@C-;+c92F!Y`=8U$Dd4w?i&YD|I~-3$m0H5x`$(q*8O7O>L7oSWI|qOlod1$UXGi zm9pYGanp~@j=CfCQBJ_5)3G{0#HBVm*b!Q@oW7=zoEQMGth?q%h!G!;rI91sza}RT z9HBIFt@i0ywC6nLLf;aP-SRn~So9m3cKRROk=>NN0+t}~a(?wJ7p1Z)GcYHzuYbGbA6&R&3?6Ev8ffuR_gl;&iLgBw|b4@%@ zl(EaB-S6X5_IIreuSUZjPW01X=Wd@cm&Ox~5o(8ehAYd^OsXs;yjhnW_c5pCUMwxu zraFurG<$*-bUUb1))Is-;PfZ)DxP!6fsVo&Qg(rKEG`(AZtqjLd1BorYu%{rZwhih z5LArCUUl*KEl;2zI|n470NQE(EyXG?dytf}wCKL5HXpEPS|_LF--Ls&L_l0Z>1sQE z7$r+!FRU;;E{o3|6J0bTSmyFsivQfwB34uTQ)@0e1UI~7nR=4}ds(Kkr~72=I0%aB z@VzhV>O(%hu3@gOJ+#GumR|2y5*G=8|0Yx6c5UZR91`7@-s;U4DEM zC)9cxGg;mL@Z+1nFeX@HzgA((%iPYczI2N105TDOLm_tye5zTYUZ!vdFuwM%bBJ8F=t#<$Htn8!`ffB^tQV5%7FRflRRb9XPu$J!D*M`X$K{7;!hDM zFFr=}>m;Z2IUgy^_Vk#A02%&G#FdQ?z7`hvI8sHJDKS4tGIW9Va8q77`th&5_Rwn? z?H_kZ5NxGT^2;+Og6L8xgBY`3q^9H~*PHK7SzXo)Ir8d?y_4Vk-v%S%oro+f!KEU|js4dE8UMALq!u+fk0D`U}T!N@v|6eOy z=R_lvNi}d?eno936b%4dCtd|Y0)AkB`!kVr<*kfa&-40*vC&_N-`}gA)juph0*`S4 z_i2a0o`APxfw>-&kNm63&ZrH_|HShipv7BGV^d}xyk&`*R}kkw;@=cestr6=fZvQk z1F|H00HB@#_sR$V-|kS}P7a#*E=PM3z?jU&9An95zK6v&)%56N zlp4d`{UfUZyz#-Mn*>Tl>|c}uFPqt-qON86Dz0}gE-bI)<=?Phc$?e&R%_Uo;Y%O! z`Wha;tRt)jFO@)?`yg3GQiDged7@2z3`OHd2IjItnx2ec0&DQZpZFN_E{9*eqg9XM z2aWYu@g#Z75Z_vsdl#TW8T~7DuW6SXFuVci=KnMzA*CGu$CfxGoP% z{?Nv@BrfL4^^+Q)INSmTyp7}Zl$*4(%>|gMz{PbvZIseKSihGjcnUum%bacpxeSbj#JAMZAUrg}P z3IjW*YKg?Ohk6c~16cj#=b{7n9zw0T$6v%Mx6H>)Myn*-UPCUbzj0M&(_Wci(nyK@ ztRM@Bc=0N7d!nQl?;LYNa!Hm<#sq7FQ#3f%1Mo$$lX(?!^`d}OQD{5RYg#sAJbdM6 zsH^V^eCUpm>P8!RfmzAm4?n})4@jo{5+QhXf`#2e5%Uq-!^SdRQE}0PTq~L zWSj@4%uxximpsdR3NyZSUf9!|%Tva}3dT}HDCir+_i+KaSJ9gFH+NH_pXx^pup!&U z$AbheZJ19YnLpTi-FN3E4k&z7y0S0M^nV!IL~~>_$Hp3y*D-yBAGPZ*Q-^w-JZBht zB(Q^>56S-5B|#W5u*tPQCBjDmBkmfz-+>k(m&x+vxQflp2-01W)Hy1L=RW?$bJ?H5 zt#*j2-S<)g2xgzN-0Ol&>$QG;FfrEbDs1ui;c2q?U4B_;xFDJEU7p_YKwsx4LGVeW zIlHC4^Tz=jOdC#JG)81Ng+mF7C3%jZ1HS8=`Krw|b!G?j1U}Y?^ z`w0`D!+Al0THObR4m4$B3GO zmeThqF_MD)SCuOmk}u)nB2^M9NU5+aIxH7Y0Ao z=tNLF@)#9dF8}3`5vaK!`$vI@$PdfQtrAxQI`fawY=AHL(%NvP>_3E51)8a_Qa&)VHczde?@{a zshjQU^h#X}N-Y&)4Ci5jD$PuBKJ$g;C#$EW5rJC7*!*l(;gP0fD?4X0uH6#oWxpj! zdSigmD0MQ?%%r%oa4UDj_t4<2FSa`=gCoxXieCaN65FSWBVwaaKhWQ-fq$vDNL4`z#)vr&dpFN+pRg5oBQ-`L)zO?1#BO|X`)W=GW z6}I)3e22U;&$vr;x8f;+Jc_d$X1~8L@RRQg$y6u~OBml~kWSHg;$800Ugaq*hH^SN zDQZA37`o0%C~LP2ztVSS^sp}(|Md<#Cw$HDk`7RN@``GvOpS89iai+Z)mv?eB1;Rr zBXIN)2yJGy!dq!e62vw}3xVmCAtHo9U10@d?b(c_A_72ZEehSIcN_WYxbADzgt+EB zeWEf{9*4aOE_5KkIhY_keNQMhG=h=0#RONb z;EoAXaI&UXLZgMPA^p&5ZMxLf z5G}cSN6@bgI3xnBz=ET#UXlMfW76;}!;-dVCS!$=tQ<&Fb3z_MtvD!1Ndp(%@5$0( zHx47>$kRKzp610a#K#wPA3BlU9Z35l@OU+1U-T>UyNj3Z81;?8(q^5mu6d3zFU`FP zN85o%L*6gHJ@cxV6?lryrEuAos3oBZeQ_(g=<@9B&*Xar?Y+7I`7ce`Xm=emzdR&9 z<0t%Za}JIj836yp;|!BV2}$+^;fr7&WrislkcE(Nf%q#+@o?$I%OF*8xigYKH2Y(_^@0yk%g z*KR@@jGwGP(SA#$K5OLSiV$$pVQdrIC1Qb+usiQf{%#HUTK{}xPY?Cz_hzO;qKRxD zH{KVltc3;RpI(rH1N#|}urzLJHBZC0FsaLrudYVnD2H+K&)?l=hXM%S;y?8b1_MfF1qozlkgwkwWpY{(}X|jIXn8y&oZ>gH@ef) z&3|ij_xg9$zJxva!K{RaRgHDvIC+<3PclNE=y8csC*^mPtI=Gn-+SbCmV!t{>rs&e zE8&3D0|)b;+L*uYX!+6z^FTjTbkQ4HBGvuy3OCTjcKvD?Q@%zQBQ$ZpcoK$4Jo3y@ z=Pa2cOz~&+VEF2XW z9VO32@gRhoaq744Q@phGkBGFz@ZD}hR|nn!(;bBXa`6XFe`9bGEnhu`}Q9+9`IjI8N;rZKAdrjmP5bEwsk@PT6 zL6LYA#nR@~DXOdJS9`otKPTP~Pv}1QNUNW2 z$*t>zQXe9X#Gvr-DafX_Hl#k@><(T3(RgVWIg#N)O|V#HR~Ct&!*w(6!iwzr^x7%b z>L}f;2e53n!>nOBmWCOdzge0?yuZuF2O>iJ+@QpqT-iE%D#og^T)lOseBkY!AO5)9 zu`6|b>B4i}I;8kdi12gLToia}(k<6ZT13V3-+)zwwY8B6JX+JUqrL6cb7z?!=tpeR z9R9Ldv`(CvlrWZB6u?IU7NuNoi490n8Tl}Lpwz0?Pt(~8Ya4y|&GFQ}>*popXT&(S zBaqrnB2a4}8^?L&!;o{rI4^$1Z~ja@_M)xBzS0EMY)rBldC}TH>w|z0T*{+qlfF|v zg{@J-?EUYIR_z$TPBRi8KdG34;H{?y>+Xpr_PPuO`Cz)m=Q0h!Hc9ob4?g3IL6cz5 z=LqnrM3=u3?NjsZ)h6ZCEczNZIiH;8zy;Yp<%~T3eIxt5l|;E`f%y$6%GYBBGl-4I z`7)KeCHZozNXYM`!mY{b50lBb4>gT=PS^Thve<7&k`L>~NLRD4QBxYItH6oli?`O) ztPsEJ!GCDzuba5-G?im!W#dE!9Z+masC5|G$@@;D@5|H+?wcu>2Svfpf%{^M6TAbx z3~_`ag=(Ywha28z-Hnqy>1^|+*-Jxnwy>c+ompTwSc2@l9E43XV69%H4_MJ>zRxM? z4wK^5UY=-9Wu5v}vgSxhuBLdyq^2l#5aoyraLX6OZDPH--V7X%{BW_ayf;Tx`if4W z^QBI5Jrozy`=s=Y`>81BRttg6@6gL@2r8o{3M2PLA!jE)y59&{sv^^*V$+~)If`c7 zPkp8_sd;E;dT@lNHilU|Y8z%hZTs--Pi6qDR{uFO!FA4G4}imH>5ql~bhu}$telL| z&Z<4HMUY+&rWKj)ME+2kcs{^Ixp3Qd^eFD&R?_lE1^nA<_VGW1z^hRL`mM`bMz3Ci~{hx0iP?GyS7gWuOLOduf zN?G{&m1z3pNEN@T=@WD8`^fd98WK!bs6+YvR!-yESA(5-6M$VE!3xxOxM{Kt59 zMX6LqW(Fewoly}9RXbyFZMR;@Od2Q(Gw`7gG!v&Zfd?^wuh)xSs0*IT`N4 zH4CWk(lnWqad2s&nX)s|MM?S_o>hPPI^GwxR(v%x^R!?f;fyVdj7{rFozI_*z3W*F z7wKBzi0s2h5XkF%!o99s#>&Y19`@Z4_2fCCE*hK9fi874eK&93dQ^7Vr1$$1IAI8~ zFIz>z#7rjYwbNW;;~iY1gCVYcZ(@n$gPXg4rz_zMi^HD-xf|)U%I)M6R*)BeIE62?q>J}J zLlL%(nYEb23yaAv_5S{KVKtj~Pp$sfNu;&&J)*Cz!TJ+#l9`pkunTXQxQ&6I>rY`C%zST>=0RFlFfkjQv5 zzAfZKTRR>U0m9Od5(m9DI+%$j`YbC zcKuz8GfOl}h}y81YNz9b5M%JO$%{QQtPrV8Kc#F}S4<7B0}FOrJ+o5toeM|=^~v8& zyOo(v6HU`6LYP>W=EVe(eo`b$xmziS5{yaE54A(UVA0UvK3^7#h8Az)!4@(md$Nz4OJwyw0`S)!%(1LkAs@OkF-;~%*_BM~7M zR%V6&RU_JRc(r~LDxbMoU{W@FDf?G%IL7eGXS2cSc~tNI1jj6yC#-|& z8ahcPVo585BY=))41^oqR?zP~p8pa0H@M?5uJBV)aI8>jELn z6?YY+38FkcRN#Bz58|yy3)|khE&wa~2V@@HTzKGK6?cYjgiG2IpJeq2wFOI!Y;6}vCoV3iNJfP#e;75$nhDZ34D{v z-zfzUv9&VcBzICH$gvAQ#kB|FMw&n9_^ttGMUUTL&MG^`t{TDC;Dfy}R~`iqg&rF% zq!2h~{QqQEDL5R|wPC;Y;4BzJr=F??)eZae!SI5_i0G#-k%F$PJ*bAVGhmwIF`XQQ zMaoo{(W5Jjr*i%|TdR8>Zyy`uL!h9!V3@bFW(ZDHhvkp$>N~)Tsr^%npFRW z>7s|?h$B2mot2z~^?~%8p(V29nb*8^F9gDZ#gNAZpgzQfNmzhM2GF{xnQ1n`4|Vn> zY?@W}7&or>JT%-dIUviC0*79mSY9=v zE>G)U7w9$gD3BBeQgc8sJeU7nP9vyDqmiHxSc==Hsp?YI@C2BGYy?WDE3C17tYOqF z8>`~Q1&@F;3~w5Zz3pQXUnHGne{q_x025=JO!ZB8n=*U&=j5C6?a+Z>qqridxG9T?RMBI7&~pDQVaS(4IA zIcnrUK@p6fe{n$zDnrifs8DsC6SeEb((IB8fp~@L^G&iyFlR>+%;8{`PrdWpeH3J* zu}h|*d9N$r6y#ocZ-Yl&#N7qreyw(F-94q1JhMj>-gAg}kVD&D0U@sHnssN9r{_6y z;DupcZLG)Fs>(&ezNk+fP9vn&f}1qh{i*fYU6+ZL+IekpN6$$K9g5k?O!*FAz)a;9HQH4V(n7Rkk%$6(Z(VPd_B$#|*c!ZK12SdqTx;OwT|ILd(Ja3ZZ zk{54i4vgywK*l`8s89~%j6C?T#Q8OO-)rV2XyUTYo+Sj>x+^n7yt?GajQpH~a)0z9Aq#GdYIC%1T{1_MuXleD3v zm9E-H`93Xfb6KD$g3gZ=31oYx%O5M5+XwhNOMklpJm{~(^$vH|tWv72&7K=`ve-8L zsFPhwAteH^=7aiHo`f~r+-)A^y-;#5`iQk>%z){b6ENqJjA8D#2QfRf6c}X^lTi%i zeQ<N$*3+i{yy+d23j{4c>yTHL*kR)4?X<|IIxwCLif0Pl9Ii;QYM5)sYK`>u7y1UDGlNw{(LaZ;oP*Kbb5 zl5c!PlK?1Ff!6CddzU5mQUuK6cT|NfM&#sEJ(ztYp!6NNHhnG7I_or54Zh{0LJ)<` z4-Q%Uo2J1`<@ZLNeVmqI+F!8+MQ-GKL2B~L8-cf3@hiBrEX$-&tB_r|lK^Q+i2V$+ zwv25>i?v)F>pW{6WQXW87r!$JydrHUO%=Dh+MX@=$3gv<^MLIX(isd}d>PYijcxvi z6|?pJ(~3Oy@G12t)kzVa*31Bmpx^7Qm(WImv=eX*<4G)raDKZJzzK1H#YP8v{$i_h z!NqHYMIBvEu(ByXOZ*_5Os_OqDcCO1uUfh&j2(>-d&f3^z6(EB{HCbtOr(hs1sKAd zer!5qmVE;(zg)uYJUazU8f`#l49&i4s~xNmv3iV9g;UFJ&x&~S<*F*#in);)c1ryo zp@Z#}?wjAaRM}6iJwM?a{DD!=k9PL1x;^%3HuX)Ac8~1n$LT#hteQ+gLfz3rkUuYd zWSKCOpfn7e#}3KF%`bgyEB-e-VY7C;*DLE}S~HJPMLAG6DKCz5O<2>O2J6;YnUdN~ zC+`ZOAld*Utqm)DP({f~RtS?1U}HOsc@<`ga+l^Gd;Ius`KsQy)ai`q^4yr!R+~ zau>`N#&+D~qu*+P($b8A4~GOkwo@aRtBY;X>(ym2=S97Ip`JHxUot{kq*NrbXU)f+ z$g`&iQAPSqB`?a4$di7-uy=hvp485vhtrQ!V7zOpVnT5lL9|rWhD&m1_CQk85YJ8b z?{gDAKQ!T!YwntGpSHlj0u@+m3w8kp&6~Yr6skR)O1p!mvhJvaup;sHK2RPh3r|W! zRgJ`EY$&%0!c^_@aBlepNhH=GfVb*4)oSKx3<2eBw5|2{1rf`!=int%xzlKHk#FfzJoSgX00135<0&>d}j};z(qRT);ht(F9eYr17@M~uus=@bKJ01$ZNDA)S ztpne{dXu!I%LqV>Q{-h&FJ#&L;fk4Vz#`)@bXziJpL?aBxaIMQG;EmeabkZx+3Y(Q z)H!DKJ&BtP)icJmzJCvCyMNda$SXNk;yJfniz6l1m}@O7%Hl%2;1EgX7&8`0Jwi=}aWKbQN&+T)Ax@vn}B@-F|#P4@3lL zel-hs-Sj#E$5*Ap4O|JbtTyHYtHx)wtPpb7|ebV$QbU zyzc{G**)UXv*zl}c1e7js%P0d{D#cU;s%P_yq2BO7*b;6uK_F{G9Fg;s5c*BW9UeMo58Xz^1wb2&L~qyb|W$jL_q zm$Dc!dLj3ksgn~fSpT%*ZJU=YJnB0x%2y1!GWh)NFY=|?96{U<^wEm{8{Op z+iiA$_CZf^OZ0rKGAsCHsgEU}49jn!m=}13!lzM#unYMY?q^?o8I<4Gb7!f-1dxF7 zjns^LZOTEWt#+$x5zrHjmtQHxch%#wmXDb51s5}4=K0D&G?j7Zed#PQ+joV`&myw| zLK65U7)&b)xD%aUSLO$e*)DoD@(m`Vod}cyQSz#`7vxD|vw}_O@AO!)m(h|#a{R|G z%lRVx)NczF$8?+C1qy+~INyfINaMYRgeYjyaz>&gxmSXJj^wrNl~?0eh~04LbDbQJde$xK1k`KRH^wJV?7m48XomrF*$ z)@PLq;~7$mi~Sfo_rV7-`N4?(#R?PbILENceUNLzNz?2QXr1fx$l-z=+x`qAn5OdQ zMb72^euhaDr}C`bSc@K?(J*-##xW15l5`a^)eA-55F-ylogeKsBZQI)3B+)voL@$!1zb1q&fVAmANy)l#6CX?jAty!o zsFO!v(KB21OS7*TSFY{OcvxEh+1b z6-|d7jp^9?A&%ju>SJe~VyhYym?b3tE%E0W`9hu{iy)vi*Ba_)b<~gZobkM_*@ZYcQ-B$c|({pC?KQOZK zx)>xLN;Dz3blD`ZnZ}yyZ5vYZBiX@mZHldxb1@-iNGmguvz0R6S)#XMP zP{mI-jGElTISW~~umHmIPc;((6ermf^fld#9iD6$`>;ZUhg=xjKjTYR-K|6CS+xS% zDG;oU&0Noy&qoTZF?e{Zk5WtSaQBv-FV0udkZB1ucwy1voBF?Vm@>TnQqTt@aS5mC z^B#WVZ4N{GSF|I|iVipZmHlB{hQ~VjspTS8jVz@_R2adnU;m=Jh5ZM^xB!vLXL;fK4zYy=HdtTw=Mro~+p3Wdm&5zCe)ef~8Z_K`aQ%Da413ZB za8h<9`t$QZ%+2lLO91P!;_p__?X`1PUq63T?>P%K9R{?DNe|3^r00aLU8yCJBYp|o+v9=n^>eZ6NEI{#~h(uUaf{*Sl6Sc2YpkUdkNQdN45uJjf<|_J0Z?#<*GmX4hh}2(z>!06bhDC;X8G)o!Nm(DM z9kQs#ulVd?As63T5)73e&{7X8404diZ-4pdhGCqjza)qff0>0NfDhgL(z26=f$vx2 z^Eb;Fl6DqK2^${}Lhw)S~|vxx;(8P-`G`OnXg9fik651MZMzm9hAaJpYO(u*^|9eB{22`Gv4K zL>xOvam`jXcQZIfp&#=Tdo(=S)`QmqrVRfazgLHg9k97mHgbTFP0z`bDn9uKn;Zg* zQ51E#Pm#m{^j2df1#i%B!5EAb9|dj4ndFq=@qc(9@dg;qQ83Xls8smGf0n4}hds^1 zM`s1zwRVvb90GQjfps~6>mg?C8fXRlG7aw8Sd8myc!^Q#a+LgoJ9_PtpSmb24d$7d2%*X@_`^jS@Xitj)0`AAiNmdG@cE>YS;e{#`Bh3}r(D@aPd!|1QP8WO`dAw>=wqEge$ z1hmC(hKdgc1ZC9VH>?hC|BjuqZTM5S&Vl4 z=D0WY3Phx_kGL=w)tLXM(5TwOSqH`B~%kBnKIDFDI#gU4*e= zu5}|oKt>x!{cnE?><6BHygXndI)NuH6>61Cv*!+B|TpQJXL6;B(7Dxk)~%0iKg8X!smNoLFc)(>$t(!u zeN(AZ4ir*%XM>R)5Mx?fS7RXjjFQ^DDfvtUwH<_=fIuU}q+SVgsq@)%JLPNv~vI^L9ekM6VR`_uo_;0UlOG|aNX zYM4u?msFr6{_7XX<(iA0w^BU>(`94Oo#N*y*bWG0=LhB?jupZa2n;{>H8fPiQiDUu z7zL1-Z*y_-0jHPGhIGesJ^_Xs^s48|0E@XXON5Y!uh00NFGeQbyds`% zo<0f1*6}ta=}~i;48d1?VXz6wxBLWS=_gClt#RDpN#7 zt_z8{yiHIzdQGnLWF`n2z&=lxJU8ye7OBq1o!g(2iVM;Zn@W? zP&heoMoq1b@0o$g1DR+P6+7oQsys=F{$?qu?{4nLKh(ey{okP1Myb>cJlIM4Umbg! zyUxGbNF_7&e=Ad0d)Gzy*4zbxRp@xojODX`Jyc(NDfFXlKlDY%;e{*BkHz5tn~j_c z`KJtmIk`Xc4OjtD&e#LJGO6ZH`Ni>-zVbgWpIpfWo_&r(eODNcO?WiHch)pS7?O8H z&&z+%JyrlftIug4SmSR!-X2PKt&$oseNzA_`d zH=jlw+$Q;`CPEmtR74!txvz`e_Fp>hkyzj=uIP{VB3P>dSYvMJfKzV1%#_I|{ssm~ zry0{TM3x))wM`GM7g~PTWePsoKFz4)V!e_tN zxvMGt$Kvs4d~1{1-$m-@Bl=_t6p{_(B7aXjzSucqXMuwqkMq6tz$ebXqbpmow7-~F zlZs)w9Z7n<*i-VYuXQe6BU7(au>4t_cOy)B;^H^8YVUYcgFZgJbG)j>f~(_7Kb~KH zE(2#uRPFzJwh^Kv)srGQI$%#}E7NJAWP`^vxF{r$laOg3+?%uFHjk1^RZrbp%G`eN z)Tl2Ik#76U1A;$x#!GD1;G5r{wm1931A9o;e}n)%jsm#l1Ib z-&sHs;k|pD`t*~K104|w1Xlla;e{z#1d53mXp$C6N_@?D=)-vUgE6WhC#wHd$6ti* zu?NqPh~PJ+@h3u>EivQAfs>{de^1?ehBwcjLhsUgTzNL9-u;n+XwTRznCUfkx9qkZ zb4D1O70EH8NCRGaue!WO`hZ(k#WxtUKXpL2Q+XE{?iQMCQ??y*nI~3Vd{BHRO)3k& zE!9i-DHK7SE%`H5`7=)v*W^dY0Zb)*@k!Gq*3!ntgmC?5QAB+AgbVwLkeZ-RJGk5> z_+7vl^`~3AW)3w=VKRj!-YO_=;scSsv6*7H~LtU6t=;)E1^$FL}AwPKTv~^|lW?iby;Mdosw4v*kM* zf$ac2tvT6{?)EN-wYnjcByps2?ZIO7Xn{Z7S5K{c0R}!%N8r+^q`y*IaeJR&_fEaX zju!3q(^-FBu01R*4ry38*&gV3j-~XFG${$kTy>qyDD`Uo=>Z3oKUK!ZyXNhvzhYjE z(xy}DyuCX$G`?)mNWvZ^W6)UaxLo;_J}U1Fh>yASpiku;=zs!$AZt#N&Ar4IP{^V+trH)^oVw|D(#rmQqOY1D0!{ ztY(wC5X;;073wODsDil13+qDhrIL~JuENymgaXMbkW6Jb7lL1id?aS#?Kg;?*fYO% zq?!E4^ZKE8l)T>8w<4=z2azw(f#cf0M`k_U_g2KmFLzGFM_%+wWGWS}R?G0dq+c&K ztnd+t>uhpK**1}tN9CRmjOd#<68g`nbPIF^ghgFPPGyH9jM^l2(-yDvwLnf}A69E( z?W;7#RM^=ptZxK6f|e@DXY0XZwWE9pr$)tftV?0jGTr)38F&G^xwQBTpHyRh5k1E&Bu z_)n#ms@u;w+v?Jv1v_TAkQ_hKJ_Z{2hAXoWnu8Dy&X^JjUjJg&Np{XDM2jn1uok`? z3nYv2;=bSb66kVpJ1SH-7$-p1=$N@)nsM1&^WpnVp^YG$^vKL$(y(E`)MD!MyWMZ; zL57){)dbLP>(}`|#pCvrN-m`~6|1@p27yt!H8SX%aw)Mvrpxb|dV_IVCFP8gKZZQA z_~dTu?=%;~PEvD8yDLUuCh8aFTc*T%8;3lBX$POU9$tPIE*l_~?zRRb?!OpIvL>C) z(q*%XYMe**qoRu~X^*AG78he#HX(o_0>?8P-sz@@otJq}`($d;!NL0FTBT2a1pdvX zIhvUGb(1JLj$~!Kk3C_i_G`YpA$Ta=N}ZBfpJDbfAh61qVVB2sEk^QXH_r=tdir>E zoE#Y^UchpajRw0Z4;QJ1q;i*dA zk+D13b&qdibCF#SM0dPZI|XV*GLlR)Eo1ptt2b#*?lKp1ex6K>6dBokWaPAY+*N@9 zn0AE;5WxXyWmjPEv!CCTA7KwDbNO2~=adS&Qhps-4P9LCj0&Ow{J9VATaaLDbg9Nd zZrk`g$uHG^qj~=U3g`>?ihgQ+0JywpWvHI0AB^``cIMw4UOVPgmEAy;aPTB>7lbaE zM^fo|W@$tsd-WVkAIfj`yv`&>5k5Y@u^qYSE0?GTzFyJKAq8_EC$A9yi)NoHShUvCCr{Z^31fD6^gX7 zC$A3s!MH?p{5bkXI8|Op^GbAr7c0pfVH>KM;@u=3{En(_^{3CSP?3%Fv$?UAYK}GP z#}hXTaf6;aF=}LjU!WMo>F%^GAQ;s!X+7RqKp3aHyvKKmdBZzj7`^ADwHB3-x|+IC z$EsdAUrQBpZWObl+U5qTQ&>LGyCe3CL2JzH6Q&^tEp=?>@0VCL!q%VSBg8+p*(t2r z-;7j(6%*$b6C@3rr&qx7efK5~mu1JXM*)P^zzI->N@2!nwcoBNuiDgH4wqcq_})9Q z8*V6T5#VhRNLrEhaU9S8#nxLewfRNwq6r#2xI?f~+={!jxNCvpMT@%!cPYh-Q=qsP zcP&~RibHYN;GF#a=iZrf@0?GNne1n;z23DRLH==BH;K$WV@@_uWm?u9ofFTzoiRd^ zX`_CO#pM1vmWNeMx}ddnAu=QtK@xUAzW&`gV?k5tZz)G)8>Wp3vv$j3(WJf5WCFO) zx3GHrv>R7Ugq>prJAoPX80+S41$_$<$UmLYtTl`Pq0^^$Gm`0Du_2~UrunEBDlI01 zxJ7}2jqnWXZaX<~ax{pX?L;P7H822BI+6gEM2Q7{;gwbmoV#-bJ>F46Hdk3E8)8&P zd`t$f8et9uuAxd7!a~x=2b}qZS^0)3sS)y{xfw8bPBH#%mLmC?@2sD;9~G(-GV9vY zK&`+YC62S5`RqtMwj^H(@ zK}LN!OQS$sZB;C^%-3j8wlk1K_^CTlgf91AoY_VXjV_tkG4rrkA-=XlOTG9c(^U); zU6%bz79Z5fOS|a2r+a;R%1k5$016rkjWIjO{O`}3Fyxx**>2IsW4G|xLy#Zajhbz z1Z_IZ2}O6EADR$&1Zdu1{NzQ3jUea2iuJ2IM>~KkN0mu=$mCo=DBQ=az-OG7(@uK$ zmt{bI7{cP|cqh^bL%;?kuF@A%WdS5|+92Tjv0sXmH#}<^P%tT5QlgK}g83vGw`@@v z{EaEle^-Fy>Ka3htHF9`I+>Dx7#~WY#IgtJpoZVS{wZY;6n%kWFi8*{A_jJ?t_f?Xazt* z)fs-)!-Q=To@d?j{McR(;oT9S$5wUlmNH0Zht@RpyA@9`LJ5+^dCrd1eu!ul{QnL( z1t14W@BRA1`AdGTbL-T&Ab?J{h(bqkTh~2w?Hr&nL-F314%z$-)O`_ZV!MwiOIK#*B$;c3jaQEK;&`@`))B8snkvg`5 zznFptnJg5={aholZ=5fHGU9Dg(vk<;T4#WY92`RH;{u{D*DG~zyX z()xoU32%Bq`@mfVcCU;KdEm)G1rE_69}+U5h>LOsv%SuRZR4_tTh0j(-MfUccrCqM z0-snxwj?2ki9(j3!hDB|yuj#M#9SzB&MtAkN*55!PVO|kUif&u?urgA&>$3ePD?_P z_~5}V;WQ*ApxQ2fxY5-I%1Qjr5BW$Jsl;sQ>!OnmOyvIm-aljjUL+ITiFf-Z%otOE zJpJ@Vow&;WOiFtjZOo4JgMGQ`q3G$0C?W(a4vp0?SL%}{f;$34QP@a&sIlCrB983yJ)+J?(4EMy)Z(Donq#Ff3)~HEWzLhA(LA$2eN{ zG^*Z^V2V+zVC&Xpp^$;t4akTYX%ArrX9G)g?+&Q28|{KczUyXn)3tFoKc@kojE&bq z){S6DRh`d6&6g9@=J%iGkCx9~n5x?Dm+uOB#fQdU1Hj*#9?o}kL++NCg7EqO&{1te zp`(ezxD-LM8{Bw}w?S_S8`Sx#RdIR>8om|A)^=J-zl*$QKpOYeq18G}*Gx;6){tsd z`i`Ofc4>l+WWy^U-aNjHfnJlfOKAV@I#*MRJd1o|+leJ=h+5$%mV5?(y6gk3PJnk2 zW7wH^Ai8n_Re!Fd57t89rj-}Win>Q#uY<-$-lT#RKpG@|hzB5*65xhTLHjrSkpgClIBKW#kCjp~UT+2;RU@e4u6Y|zXl4e)L%(OMXBS-%_% zw4E-8Gvety|IROj0|JRtAu8j~8X`9#I9>Ad#>PD&m?wS0K(LGeblekLPLZwb^9GPy z7!BwL6<_JZPoPLxBWX*b*OYYQq;W*zZ^gFLV;?T_wFf`klyJ5Qnvn8Hffx&o6SMym zDy$s1(L(PyOIAY!DY3OYG4ona-mSs9C7b`Pm41Fd^v+MO4ly!=p!{?wN1u-ZUND zh+6ye07TSCH8dT^So`AyDV3LcsBX|)Z^{cv_lrRA!*Y}P!J%mLuO0B(Oeb;f(*@d> zze2I=m zeATAy=N5m!#jV*w+D$b4{$Oy*m`21gl#{;Mju=TZlb%*+qs`eOA;54aTOj5<-C;Y7 zx2LR!g)r}LNqU32b&kC46t?Ze*l&s(Z1E_-5%oC(mP+`xP$hm>3^liijSDS6aKY3O zB;5%@p%3npYWtLg72rj2x9EwzrOM~N*}?Yce3B{M3=cz3U0;sjuNIH>ta?T^O2+JU zNljXm9RgAB03s1z784JF@{bWn65&BSUC`HVV-v4O1Pc!iWDmCf{ZkZ+)?GBeSz6~j zl+8X@u;@F~4F51S^(geL7e(byyd;8mI+&*qJh5S2Z2XmznUL`V-WDc44^ZhWu+;(} zGs=n@K6a;YAXOr({BcbYNf`Rap%sbyEw=AGp!ajtOb~DmawAJhwT-KpD_5gMY=s$?ejUU`5b?o4HXeFMC)j>RJQY;1f*O<@VenU<+Q-#O) zD&G04A6Lc%D%6WEi676IewwL~-e!7_0DNKohp;D4AZS^%JEU6X+zQ8idqlh1bf=hx zb!$4<4z<5oh_ieus>NPx%ns-6oI4VRR@+p|v%Ma|7Y@P~3&@gQrB<|A&isLsnQU;il>xf#t8TS_Jr ztRfXZBt2|#gPEP>HK`EF&kVKp()_&QC<$WJYW;CZ5kIss{6UXM@$MpIZiM)s5@8V!~<$D7ApNob=T@KbSs487|2ER z=v5}3c3Eu~29cd2&Qou?dg}4=@sScMo)n|+Hz*QodwnaY&yWDlrOG^;UnzLo@i#U3 zO}$`nUd2a;N+gE!uS>$jrKe>5I1`d`fKYIfPX}ToU}3li2Jtvnp^>c$ARHuFu-$k- zxGcLp-|QH(L6Km0@cEx&#tH1lRW=n!4Hiy<<_1VZNB41P4yd(m@9US7(%UYOYz^S$ zEH z7HkRl;l;VlENk^G2aR}V-vt5eD>8;bd&s755zC zFHX;+dbcfB7jJ&Kp)HQD(he31#+AM43P!`{QyM>N&|1coXp|sJQ^{$x1i9*DOZ0UN zIeorDGvpv#x>5e*b<|;SVNf0r!Xbh`po{c@iqPtMr-An-OL!5PdfhW=|0D2j74_iL2V$Zoq?!W3=$)hY zc655b*u%pcYn=h4wI(Ujt`60Zv&-kTDu$*Wn-)-LxB*fB^Y*Pt6CgS@a@tT&=;H9H=Dnquigj1x&5}P1#=oMhZ*> zAvI^w4&A#bouLI{Zk?IST4UNzX7~Fph(#RD9}j*1uDV&KEXKN+C3AylCLmK!g|NGt zHV*Sqn!m~i{&3JFxq-b`ujjI6qo|n($$WGFsh;|(zb9+qUFbuq*pAmcFI9JRJl)TS zh1dQh;herW*jeA}!ztRzRX^#|vIq6AXbv}4b7d$s7^@HA+nP1fvn2HjO|E`JH<^nz zh&q)Ou!-Vvk%xFdGK#brVy?J2Hp)1_=j)G%(RYL=aqT_Kl2)?Q5ffo%8zTRroL=7- zfshDb(d@M&nTVXj*cy-h=h@Ck|JPSA=V){zXivp_)iM0s3g+0y>-mJB3okGjm&5<{EvSzZ;2`sb^!DR0E_qa(!gQYk1RNPfFyQA372nw=-5N0 zsd%eI@gr%-_a$eaDZNyl=%gO2CCZkA6X=bRB{WuDlLc@>rE83;0Z9k861(TZV3Bi} zS%pWizoOx8`nRXKPBnN}qd_brYCCdiM7szKoeam)qzD*zX1-hdE!tdQx+4>4pRh@P&NQt^Z;h^nAwUH zBXUGj)_Bi3AeCs%i*Y)prx!Y5=egZyy+s!!_sWtvykJ|6=i;|zzv$K7UnAS&q2Cq8 z?Qrt>7(1I@?W4~nYf*m`|Bm?kf%-7B{imHrOx90(DhXw~E(+HVHb$e6L;VR8byx9J z-W!v(y{63(UyqGX_NY$Vh!N++>w@tHITu^st&kLU!cXx$1hNoRobT6Vh}Nw2l`U`4 z^zPf;yb4A(^Kt*Zi3&mNIQOQIV>qB4Wh47-_U=3RN>y&sGZopUJ2C{*Z-=3TXZVeV zEmv^jatyQ&>nsspG7Z(V_8GSthbC8XRk%7~R32sSyqxB@VhF-Y9~}|NZB$N*V#jME ziNnK3YO%{W{N~MZ04tPyYmFEi>toVJ&Bz_+qb=Jv36T$4N54ZP)Y}AP(`NyH*LcfkD+(~eaMyy zG`I5V`jMmyyvPEb1BrjN1{sy6Wx1&Ksh6xQ zn<7Je=%m=#w|e{~#ws+FEICJYnNr3$7_@BQ!?N$dL|P$o$p7(@T_S?*yinI&dE?=h zU&A@a3^ZxEx#d1-kM*y|j2ZB|FG5R+;@5yq zeG8kRAOoW_y`6rz+A1@wdOYf*y-~~1ya-i1Qf&Mj83~1Xw zIw@b-c`b)qoy%rVZW;&H;NYC1ZxZ))bL{MPZQyaD0cG(das>vh`U4N+;x}RsZ{#5) zgahAfjS%*dtesV&T!N2be?ZkNfc#kfIF=^wfh8x|dJ=Sc&W$K$Ec9WlP)%FVoXu~m z$G-0J%OORLvB1Ubp3I^6?Lg5>GO@*b?zQDw#0B27{yMBjuCDuJpI^+cUy@uJe%sxU zXpl#^l4*VSD0P{YJl_e%c9?DOC4|mKZ>uqWb_`H7WOHn(7ziTte5RO;Q#l4Qp+RVc zbVI=%WE8KKM-~EW5nBJcz=9Y2k(A9uQysdHrA9qy;hKK9?gk>dm{AuXX~myyy2Jso zpevY{F6PQZ;13rngf#d;1_U31{O@;*Kjhia1Q0pfltn${vT4Z?`IZc$mvQHr^fz{3 zjSV$XRmx?HSQ%L|f!`Mjjj~jjqZ?q8IZKgG$oZyB*LfX7EQ^S-8<$*LRBoz9H|pjd z6RP!gt$`H4L^ak7?~;F5^}ZKuZ3pyczOB$q{t?}KE@8J?SVAo_Wxy6(Q5{POEbwlvH8>Jx9ZXAYCS z77@23*ceNjyau4Ca}IT`ncFV8a7nx>+=3dDsCMD=DyQYTzeZ)SPS*SL){MLe18>~i{X z!iRvN<;5547J%Nb?&uPF2YV11w&CviwaB+bA7}#rbbUv-Cod4Gw)K#^)1$Ae^$6yl z+@VlZU}&}X;v$cyVKpKk>dc3j^ZIkk$C94uuY`)J@1^=Tqex?aRLH;Z0JvM&-TrH; zS3$P#>$6Cd08uC{>Ls#35V@V%S7==t8;5L-*&O?9<=0jd>A^Bo5cNri!rTe==WS&` zhSD1n!?%Rrv5thw)`*4&&#!pNW6|y#PYNx@#P|twU}c7xv&EKcZiUI z5K>)1q3_p9&2MGXaGGP$(?Uuq#bKlv6e^ya{#!VeiGwI)L?T z{U%EUITg0<_`*=ta>X_Fxb^L42aJ%_ zRycO&JIX(A%%4VOVX!!}-+dB&A#^`!EI?X1l?SqY&$zShcE~1po zDK}qMGekq5j0hGoV3H{eA;Ay{AO89sy!OZ#c^ZKF!Lu64g9AKX2a%z;LECU%0?N6o zkQEES%*B%0){)jHx{}d%kvU6Tw|lPj0Kcb5rsI39YZ(R%xWhOrH(`y+TqkL3^X?EP9d?)gq8k~8qc?%^5_O+is$#6{>=X9i zSL;91@LMVfvD9ID!;=nGS73mTv$geK+GAFxj)fKuu&}U7bq5D@{o`&M-1RTK41);8XR7yTMR}C7=Ue0^xdeV> z`Ndo9m4{;rJrkIQtTUK7Gq9_VOkUd+No*$X4S9XL2=)2AqA7pELUVAa%hOp(aPGZv zgVlFNu2oJ@v`s^=VLtNvZll=k;}#14kYo%S}0rYYdg%1Wbzb`t^^{P+`$g#?TPP zK?uM3;>Y~-t+>=-Ee+`7dYH&JYL0l^NPU`RoY*&-!q<2azlh1plxeQnNzdHlYI4`B z8mjaWnEeoaS>_|*GTlM#s*h0w?4Om{ulcdHD!~5oiNx{3-|%`bdF{*eUniF>NAD_~ zv$&lG^E6o?+jN)b@L7YKp9e1VN!>1Oi%PIs+9l_)-)0}Lb&7J`?z)G}mK2fB^TIDZS4Q2VIk{s|SFXkkIQm>r7-Ab@V z(4U>7){6^jDq;{p%_U~pV5zl;eM-oF^Ig*?X=!Zj7lC^o+Ru!%ldM^Iza8;^gOjG{ z{QeV5qiRs0n?}VE|gb!KzauB zlNR)#KI(yV*zzzEcG*s|pe!?0+5>k^d7sv^nLVE3Uf8d(?TelFk3?}J&jjh;7207) z;O8=a?yN7;(wY-AB|-@ntVKpGHiudT+vb5qh4yI&Y8ft=LbEJ^cAm{{F?`u90VP-D zXK~MSv6F5SN`#7&IjJhDCGs^}Ru7G{Hy52YJ3W1>T{Lgp{V=1NRY#%>Iz7wZ6`1h{ zG>C?z*xi~MSxvm~xh|YF7wJ^jA+}Q09y?VT)-z^fVH2z``^EE(AWOnqKln3I{016qs}`fn2lCJI{)qr;NW;)sw_xA{KP$#71;d}9H2#1Wf*AO&?xAdVlM#2>CNlJB7@ zH9Pcn8Q2xMZpR`g6@t zkR4YwNL2N>XG0%51M}#9YkKH#T^k+Z=HpWSy*!5vTR2<6E!m5rI^s+@V#MuFj69D0 zTn7q%aGB#*THlnNecqUY$61vO`u9(DH~9m1mX2tIL#e9KHIPlX-fLYtBKhj6IDR{wW8%%8 zQF5*t)U#DDdzgt zd05h3m~j%LqT=hLH!Og))m4}%aPuQ;`@Qqn>z%684sl={i^FT9Pf@e42u#rX@o(CB zYFlpKnZruqbtj+V+4^G2!dc_#ps#`FW_T*4mu`7cQ^iuW-I+gd0VL6RpR-qUSykdZ zm)+Hq;we%zUD|ff#oD~Hrx#H2ocq$geQH&sYr?jX zU}F8JP)BrI!SNP-#?4`gOyJ}DY#j3r=$W&HYK4@~dIV=3Ovrhj*wn2$Ji&)N^q<5( zZU9&%Z}QBMHv04v_M}QnXD44N#Hio;r|y?%RMQs6jS2N=4IJ!DTdXM6&)yuY z5wHn`-CkLeXv*biQFpxqwC@z})iijkE&cutq_=`K#w~Q4mVpQiNdD-ic(H)vpmx6?+N{DY132Ls8J6;KmeU&U|$pJ5zTSVQaD-fLQ>y_EgK_7~9%?ruie;A&Xfr^NUjTTq&5Xho?yee5nKN)$J?TrZ(t>)@g(&Bw=W zR(o>j?a)(6rJutBy+odA;eSvDJCQ^O#WPV}|KWu9YyC%81Hq9Np>=arr1Mq3#l?G- zK?!+>Ir$g!5iyX@{RDoYYI9778*tzIwWmK2vFmee3KZRrbP5T*{YHF?GawV8wc4Wo zPv*vz7tGK^Yfh#M^eiOVoi*S@Brg-pz&H4Am~Q3$10g)70jzlHX@aJE(7IkGfxcma z{^=HAmgddI7ne((Yg2_V-n8c2OzGP$;AyJ0CcQOk;-QJ@%76E#rYEdOSw&lZfj?K$ z9)r?Pu~E50;mPmxfy$ez1R3dZ>W{UJbC)GgPtA|5-fYgAVl7kR+ld#eBe1{ z=~}A&)>X0vd7Zq;7#`mp(=x=BmH9>e*;0(C&q_K$#l4%xo-nVj9~r!$Va)QVWx_L zXd2SvHmrQm-7u2f;1C^DqgfR))V-cU4)^oLNTH&1M?pqL!km%F7(w3INCQ$7gK#m? z*^_}3-}PJ6*f$mj5cU(Lh(@!$TxS3NInFQ1ZnnLl(c*v4E(QgQ%-V`=b-PFN!S$&- zAhCyGK3=Uc@=d3v-stx+F)aHuzGPC-JAu9?OY(1M%}zO8b{frK_gQ zNuBC@ll5mv{CO-2CPIsSc9@;sR*Z4gvLH2zHpR44ak%4v0Wi(X;}r8WH?*V}uo!d= z(7t1Boc#eyy=cL>ZxO4UcQ3{IHQS5G)#8XI2*Y(=8~V4%<@_Xzgrs3>=GO6VymeQN z#&2qhgLWb+@A!PMYYxUnNY+Da(tfrN!lkdo}AWQ1(I{6;{>KG2~`v_|t zW#pp^_ceK|)w|H2bH~=znKS7#$>U2SW&rM}XbE`hZ{x^3b-W9*T_~w(W(o_}wmMH8 zENp(6FwT1}GHrC($WT(KEH2qBB!Q+&*Stl+f)tMc!6>3gbt{~!Pu#1QC8K>Gfx~WY zd;s`F`e~q_lM<3%MZ&HHuY9D%h>)2nWaLB>k`TjUqEu=yx%nJuYRslGv7xcCKfXqWfp2l>whlD$ zuUiy;%X^Xx^!31Mc4*6geYt+LT#)pNp4=EhJV1|@N@TUQvf6H zAAkRcBMUKUI|iP%#T18Vwom%524)Jj8g3ZbTd`WLbP7m`hpDj=pcIXDUHjQ`;V3?r z0(ZE3UAX?8ZKZ&vJoWbwR0TFKlW*|`1G$f50V2LmS{(7B*!KpNe1z?>0D5&KiD`}9 zx-#M)Dwh}9


>Y^vrF(twq3QbI{u!cE^lSJ48dl)KB`qAw-D_l|xDFEji-Q|^eg z!l@<}X=v1Gs8BfNf~MV*G~8nqa27Rb`tGvX@F=mB*sw9N&(iz#rx~Cio2YH|vkrS@x((yc;mBYEVgwvuVJ?8JzAwNemic2i z%@smQ`AtDYU9EKSGb&gmG@JEx8S#op_lJBq;;>f)t_Nn{rgDL?!}NAX_v=$rp`PE- zOmkTwbz?RW!T9QNDh#_($xL}UFSAc=?yxucTpb0P{J82B#?v?^Fp+)j$`XoQl^iw`t6B(hli+&F8HIhTqEE?QAgedDw^N0H&ALX@cXD+fckTP;?X1&2?Bk5fGq z8;kB&&Nt^8&Ip5y>Aos$-lF9Ay+J|HyU9acglT{Np_Y+#neAPfsB;Tdtux9YvEuQImOB7T9d^ax^N z{ik@?mw?=*dhplhGs%BEai-xuNa z#1`Ul8UDQ9|H;!%1q==QO$aK@!P&JpD%EVbxF<=iI%s(Z2o)A8|I~wM+t=_>ImBx{ zwQqmNux9LMX0E9Yvqx=FZ8~EA(cI1w`gT$JV}R3p0rt&9tT+h$khr@*SfJ0w`}U)W zlJjC;n5oSXm}r%H=n3^(I?M#25kcOS-*+y4+Hj||jE!Gr2nk!00bp8pHa^Kg3CNEI z$f<*B0sZWA+f$h#IF0Wr8bw$K6f)wuBP>fdKavZ|u%J?*i@B(&D~+eV0f59c*CPf$ z-)1UBio-IfJcpm#n6F2YGSi0F9E&22AC2;$P#>U(ZPCu z<|E2a{m_h`E^IJA^q0EdX)}~kw7@Arx^bg6b(d*pz)fA!80r1Uk3?*tQ~l+`0#q`G z`|FU-Bm2lN#>L_0pJ6U4a@hiR+sxSXsS_xWJK>4#nsTRHT)SMHBfX&bjybv0?+IQD zF7U)AXm@AA$c`WNJi;`cZ8}T!3?=CYfm5k>BApGwpAjU~C>R)a)SPTCz}IH6{}M?7 zwY?S>-*19S4~ngdHGhF1f>5nTMR_**+>Ll*KfPvK;nXjnyeemzWBCu=;+9L`ImDy} zw~xR`-j!6*WuN^@|KZBjUxY7wku7Wv747Q@o*opENAu14E`fh?iF~=kHU4JBAl*UlucHUT^ z*6Z1(JmuaD0ThSJNXHVUw!Et*?7aT+g&;(O#2nX7oTy%-R#?gnI?Fv$LjmCvb1vvu zV~y8Uu>!3K+P@2{C9vuNEz)Z^$-RHAz-fNWKnmzO;ceu3(k90?b)K=AZPX{196tp` z_x@a>|Ij!?rlScU)dUPRqhmEQTnR`c6Hd%BBSl>0_gdMMf|5z5<7;*ZRM}kX*={~c zW@?TG$1_=oC75+k>$8P@8&u9qcf#8B+=acbE;-LpX1jvn^WsX-w$SLo_o`X1>h8IK z;pc+!ExCp^u0VI{wGLZGqZ49ZT#ArMN8^{j%%0Ari})HnngP|m2vF;YkBY)0^L6by z8y_MLeqhkm`kN&NL|RM|g9WaK&d&mQwjPPh+&I$qD^Wez!0^3V>fo&&{``GMk^Vjw zhH~>;hANZy8*1fx?z%J?-;Ac1>s6s47gV26C_XY*C!Jp0%Vq1M!1C=j1kQm-ssTpK zIYa#$!|t)p=6`==f=0PsSFprL>h+xvfz&HGrp<=fn8MjkKK}FJw(2b<>Zg-CQmiCI zzS-cm?4a~F1NNb$f9pvBE!P10Ex2pci?H`EwV1tFCoEG!Vpv>j=T4gSzCgGSd&AV* zoyu$lmhY8-{&bd-lhGeH-S1JrpMCWO`uS{>^Bfg+Z@Fm_V>uYUoLKd$BZ#cmFr2~| zaydS&$BElM@piV1$02TIxxrkGOOorjYQbma#wQfwh!DP=UN}7!WJ!?{xhYj7E1SJ} z1A;i`Qh(e74{?_cgCHyU4~|`)crlqR%ZCFeKy_>z2WwDv;ln(Twe1L5WLmT=JX&x? zdj3uFduF250&8GoV=^Trjm~^UgQnmC%{~!SFJ40e`z#9h`fD`;N5l(&<=I|*>)2e- zL5z|4I9pd^%kvY>u9kd67r{{mP&?{ZT>OU0cq38wEl;Ng^Zq!e*8K&3^(0cXb9Y@( z>N6QT3UqOr4UouI1Caq(=#&_~GQ-J~-Xxx{`y?x7c%R&&pIfk2T>hv6@|zv8BTS0w zcYu*qwReK#fGu_6x_k%v)dbqbF0#(;K)CU+CvBah;IwZgVI7>xI?ZAkhci>RVJ-wn zI(h~JeuIUM0kr3mMQyu~`a+b0Pgno0d_r-I!p?t-cX#)6Nm)CE82-L0FlrH~t)N@6 zi}Ka@{58pk9vp=pDzgzkBB~gg7}dPTFW7are+` zL>VTiPo$0mhKo=k6~@XSb%+$;H%@-Y+~dVQRfV-b6$Ulc2RIeHF-~Lc?_hrpW6Jn1 zqH&VkS5HAeB$-%B2pB_VYr6w-NcJyNpernSNjr^nhXZ?H$5{q=pwNJko)dC;+3_EN9yfC*yhi9hQMoc1vVJ#EqUjQ@qa%&yV zV)U)-w=?;jvlj4a1NSxy13pAaJK(8bj)I4)(gT_DFvx}SWXTf-vZ$rXA6hli)H{b>ZBq& zRxC(97}mKjE8<3g_KGb-xgN3iughfr+;>x`og}|+>*=g1+{n34sz_AuWS7NL6FvjJ zzQ7MGj4-TS2!V`gl+n~dl23po-U9;&!j}P%IyW)#f21DzaDJ=(8g*~qR&$Oke)wHlubC#u zTUl}pPWIT?=0*bhtDI=;xqGoK9ZKZ1A=9WZOr%uUkn50Rx^NUvihH0}%Y8MGE;O!d z>*KhE2t~^Yr1*MVRv;b&(a@k{N>coz>mLz8lq@4|k%~600PgID1`=lE;8BME9-I^e zxguW|YdF8R_*>3lubV3d~!ibJK~j0|C&p-6gPPy05!EwG)stL=0_&us`J zz3Vpd@IvqJL2vXUVtU7~p}z#GmE!^I!2Wk@eDLP?{r>@vCp$Pi{FJb0!X(s7(MM-P zuV!Eq*boHj)TH!PzUU?L`uX+C)u)ILH750_jy9|DlOjGKQ4Oz^&h#=?&8cSIuR`fi z;jADQZiEPqOw`0!wQx#-cWxx^B|_|YF)XOvo1_wNkCR5y%=Z#v5MgYXS)FL2vf|L7 zz6Clrld$e?gdgt<%l7C{seGFehz4NEXk_1*P^poKi7H z*X^z~XLwrWr(L?@ip4}PW$+vhYk4na$yCd1<4wuH3G$1jAFzQ--_n2kqd$UUvFJ_& z(HFiM58W=mqN9pEvtI|?GGngPf^)Rp1#WAZuUACKht9lEZyoRKOg4B+#~ua(!Yb}Q zumZS(`Y>mZ!4B3QeAtOhGd;|2!VdtFwRht_J+p3b<#tR_32y*S;h`hnt$JwPY$We> zlj^lDPFHx5osmr6b4NN1N{yuqWc>>UgPaTUm%>`uvO>j_0ZiIWS*kX7i?n#Y>T^BF zA1NT#=RVCnT-9+mOrCtki+%`jPTt_-ccX4y&mm_i!=#yjTIwFmm7wIs^iZ+@LeOtJ z&azXMGY&3vh zH~V#}P@&>HhUI76?jjX1F%TUM}qh*(1TRE_58w^qWP-2!5Uk)TpLm>zZxc#ZHc2*GOZ z8F61y?`8nA(s{c6|Hkc{)*e4L^@`S4g@R(XD50qG@cV@C%pP7fH75dVzc#&}ba6Ju zB<1+N*6uRPd^=anxQxNaRaQN|Pyf+5hAJh`{7qXF2`-(i+935RDA_=xmJ$dcS^D~I zxVR0BErKok>LZZw?0JX%Lge>Yr6XNmhnfaJC5Ji$#Xr|WI}mU_xv~;=4-(*=lPnvB zU5{$ZwbNHuPkgY|Mcn$N3z>FjdwQ&juseI?T6u=aH4%gWN3s^)5wP z9OXd#V!?8C^0D;6Bp6vJ;lH-d`Nd)?sk?x(uRB5lFWzWR)VUaER%II-#6@Zaz$f9E{415_Agr89fgj8%{4wPf>)s%MP zYj<})l9w)hMu0LQB)Bw?WJ>_oRT)@B?e?sMV3vNx(z?gKj}9}IPsPUv8GwQ#s@2Cn zWW@HCFvMOeoC2yemn<~uiax4EqW&9u;o6$3ce|qdgIUT{v;2=W79q0lBE#bP1OXW! z>i0kkyaN{_0BBFMwF>W5veHc6Sv~awW7#~`$A!0hoj-pE9r|K#v7JPmzob@LKg^N3 zDQf$FsB2XG63vr7vfc$txu7i>x6Nd$jAN3uEp zBVm?`=Af39e3_{5`LcW}1@n3mi9Vfwtleso-!fo&-C5SP%~7z(1#V_j*RG8T`7&p9 zBt~g@@{ZUwR{8wTmSgk@SWJ6k(d#At0?%a8wt}C5H8u5F1RlBm7Vc_+C2Hv-H2!oiXgRH8=^M6f;8ZAxl6DzV%XDx?c{de|jo z{!SJqopdAu7&CAuanE)b+eLx$(Rt**2V`)l==s-Y4V8%*c6i`WdELeyrn)oKgNo`& zJX$x&*l8p1snW=4axn}KxoJ>BT*XNIE`_oheZ+D^GJ3?tuHnP4m-iK4goE$NQRgg{ z|9W6cx|-{t4)waeRWP(4)>$R0k&IM*hAZLrRNg?c95W7bJ7PurG7yK4J?uPTB2c#v zvx9jn5c|*Dl76?cq5>rN;zDwa{Be1s$X%l$G>!;}YXqV41Zp*P#j{diS~&Cfc2n=^ z`%u}Y?iR!;F3{Kw+_`9Thb+~ePvtC?Wkx4zRviJx>Wcn(CWaK@5DvRr?+x4!IY0Ih z4i-kj`!y+TYlZc}Ll;EF2T&Jf%sO9d1RrGU(l%^YlK35<2zWj-Aq>6XZobU9IIBhj z#b{IMKMMkmPfSCweaCiuv>qpZ&Y6wmBxtchB2c)`^kH zta+Y(aJ&H-h|=gkWkt)Fc|&T24c(xZl{J5RGH7ciMvAA)u(!n+zX|HR0a=&2xf#0M zeMC+L^WV-$Y{2w7FmK_qVx97bE>PTNi@6Z0=7FOR@UK?jqOATzh0@!bqn||)Z{$tzH z9GNDR9MmMpSyGC_A{6fQSrI3-;;{G0AZLJD%!`217itwA^sRT7#`)8d+&;!2TX*7P zDdR=`ULU^PTh7{K@FQ4O>5@Ex*Yd-Vq)cBV=^a`^Tz>&N=OVrtv3L_vVB}i}S5H=3 zZhck?<$kVwjwPwa0_6UOo>A?dqOYKOpTSO>5;qTaw zf`n3fQGap8qLS*$#hZgvH(}OAUrwc3STHB&8Un#Y*FqPAg*71W>-?58JSfiPM->N= zdL@X$z*`7=e>%Y|1uhLIDYRQ zL0m7~@$(ce^gS$#YPO~DL($H{r&D3^6$4T809;N#BlO;*zLKlnpR96VJ z%a*L=fpR5T26hOFWS^RMVmU)iJ!_+bZ9#(gLLxRG|X1I z%-+9-HAAu3gsA2ZRk`f&e3;{9M(DaoQOI!xn&?@vBaLCjzA^LupWOp-qAMu1B7vTQf^V87aU1gOhB^C|22i<`P+F8uH^g`uX z@T03P*rC{Ge+Z+Ei0uon4&a_F_Z8YYeq#_xHZX?3N9s?ZQX+$f8K#m`P||09?cF9q zgS@4Z;CJQ%*`m<6&oibti7>w2_`96jJw2|m;BaB8gQ}~2{j zK$kea0Uga=VbR#SOQ2dd-4y0OCwC74EA(I@I#mG_)c3pCUsfo4FvRQ*0&l&V1^hIh zxOXP|XI!e24n-q8n8hQoC~8@sF4VmikI2S*Nu#?t?qm5FG86qWV@q|J@o@GerDBHE zZztb5?dRY;!h`li_Up{fL@B5p_|>0O_IR;KJV1O`m_?%*r#-`u+#keP-ef1Z3z}t= zO~<>m6!286Gk$BSJ<4=&@%60P3FMwynh>7+42WEv+H)LxmOPd$8K)wvGy}i4kzHpG za1i&XyS`rEy~(M*c6;mLf4heEzv%j==(ykT&xvi@wi?@Ltj27NiETS++)2{dwi?@N z*s!tfWaszav%6P&F?Vxj&dm3Hp7+5A4kmQ2cT=0lU@4L8?3RhK9M4P!{17@f0VO2AQ+`7&_AW%Se@_`*Kfaf3xQs;>5*f#EEZ5y-6ef_`T$K zaNZJz_BPK?VKEYrSjIFf=LJido; zFyH_&7|a4suPmRbg|?kWt;j?h6ro*co^v>y3qcP`0aQs%I(mCK^%5 z*;jFBk{uDk*HW~$g)>~zf+WV?2}MDK05Yza^K+&QuUHeWDrqt033Il z5$6s@9Wu$?)ry}zjqGgs7p^4^Xj<^zvr0X5LCyV5f7+~>t=alv{()FHA`16J258t& za?UuyUJP2Y$IRkOLT@BON+`dL^%?@+4D}&2@elJl|* z)~s)Z1{({7xouIzK@0!MrF3h9w^GM*<|8J-I7h~i!Y?7LDE6&rYNAT@Jc@rA!zNlf zi#NwQ=`u(R=dT)LJ@?B(4FHoi@kJ*d_0%Sj<00>B96+N2*dcKQ-GgbZLAC`>^8dXW%z9kT15 zHYvQ>Kg8$F3D>ho@48w{?I80%m)Gc$%+tb!Gz` z%*Ku2-B))UZ?!k>rL8w=&(^gaH`tnij&<9zU9?l9mjD8x2hgz~k31HGmfG|nJ4a$V zE<1j3C;it6!t$9+Y>^7%cc7|0VP9$BDjtLfBSTkxKtC}f!6-iZL zje{;O6R7r`y(bSDz66trq{L9ZE$Mi*c80r-5WXbk#0MA3AZcuJy20qn4XscOD z7~OwQ`jg9~jW)WGj*sg(I7^QHnV2ZBgt=L9w{MRm$$k+kdZs-AWYQmXy6K{l>kudYSuiO@Sf`?Soy^aG#T@=DKI~Z&<2bKs* zd#ykvtN6mN+H48XLPIV%Fr}*?a~ZRORG13YeN#nLyqqz)ctOW)mIW@+;(>MT`uf*f z&HBEI4$zSj^|OfbN3RF^p+A0D(|r-?KoMPENZgaROU~d@*UC#SSi@5>7XvJ@BnaIy z?NFpOq?eNt?Ol~`m^Z!tER;&!pqKsKe`7ohVX+H+GbV*@$`&cysYJ_#eY&5iFiGdr zmYXG-dL|zDT@ZrnGwXRPl@$p)(kurjsE%jT?MZPAI!=8a(7z-5-5`}<^0E{O8ah{@ zKU`@hz`rDzC+E|w0HYX#b=3ageA23|zH8dsM*~3{;sF}WrPOttZd4_L1}c*H;w zTVyXL4qBlIyW}S;%EV&Vnibw^iaiU4M>v8W7(quR`O%Z8kmD)E@K2QT&#cZYpt`s3q0_D4UDnHZeE&TNcC1#rly#H8c-3RmIx8r?;Mya!qm&Nv6}YyeMv+d zwO9xkGZdo4)ah0DVvGLFkc42RHo)lMOZDtn!vrb}RxS$HFT>~9pY#iTOf)&^wo|N_ zd4^qYgPF}uf`>#dnFv-|xVj%MSh%4jAbXfYO&_$Q2nz{~#AUgo+tact||?pL(Cy_k{#U5{l)=JtEbuFiU* zz~_6?jW;ATQhvbOJOc6bsCx_wehSjVpv>ZxIaTQKzO&=+62y=;=*5TF)hK%J-r|H8G7koi0_`B`!X28E=8=jjCs z!9hC`C00Em3%%LXfgE2EpdXTEoXx#+ZtqG`ptTs?`ryNuTQ;jSDJLp9) z#*l=6(6;^UawT;p_3(r^hegmaVZY9SzgJzC|B@@Kxud)A=8=FS;%DCVwj_)G_4E?k z_L_SqP+Ff1x9L@FfnmFzK|>@c)5H|jpYchglmOrzS~)w`1F}FxXpCiA1WNym7=~cp zbVk0k6%LD^N!Y>G`E5p^x@~YVDmCfy)W&l7C zKwe5*b0~I~Hs&mPj#DVr?8-C}y*VX=&#*>ST|5dhX-9lJEl~EdG!SWLhFMdxx``1c zsn@Eew?5G@c)`vIx@8lN&x3-90JjeF=(c@{e+8llgwUk16jJv_^>_j45RD%L~@Qmy)t`BT?P*he7tRKNqF>Ct?D0212n|6jZ7sL7J#i&DzJ)f!W!qC=i9~_-S`%UPDQlr;^Ra=HTow^wTL4mF42`= zxS_iqG(c^~f8qVVUpGNe-yc8YKW{PZyq>w-`Pp(LYFo(gj$aOmo(zlX5x)Eq_R+r7{K`JE;tNtVak_7vPZD{uH)q2Gn^;B@QAOQ z8|oMT4OC;GPzDYOLS`<)cfmf6X#N;3sk1M-SU8 z^xByOuliiSKrDIQZ8Pq@g zS`+U6Jrw@zV6@o$jN`ZHLg_856s>rw?sFl#4=@XGEc+h>?W3Cvo?f`}`#v?60BzY) zVtaKc*tA&6-yA)583wrDL1Ey#>}bD|9h`{;O6@<1x9BsR>)t)WVjm6;B{%DfA`vsF zP@}F&->>HWheyF-UlajZ6PJ;(72n1u0+Fb3Kdd{24_Z(D`OSU`eyY3lho#1}cqXFQ zkuC~S5HfX_vvidyWiEyM$R4DJa5PljD^ze~F&2Q@^_EzY2aEgWcnx9KDtcy-;;Eg+ zd7}k0(y0@5EYBZViTRLc%D; zfHj(b3DvGRbO41V4(M7$%K3UHQml{qGx5@`7WHpC9AK;6;rS)s`nz=F?NAbZ;OmUa zUgxXC$_|7w-LNk|Uusx3 zlK>k8AiBWT0e?c!?`dOZpaL$ex?(4J&Hycn=OyN+Y=GBPZd;Nb?= zZ((CCoyYT&T+~o<2@s^{X70s$>&vEo>-4h@pO)jr2zHQCIcu80z0MDXUjyZiEiI_! z9R-&9khd-0**)#Bo!0jUd`W#FCg#i|2!``trYBTdP*H^p9OV%UdY4Fr{da0L*;EA}~px z3R$1mOaFMLOa>RK?9BRql3chiFT#+orlA79F^DzTW_O9AWN9^`8!tf4oZx~wUvr#A zU5MDPnCCwrmL{gkgq1V^>dZv!73HJ=(+9VeJe-BDw_K~ad68e-`a%9Oo_Q-cA#!P? z=z;<{@7^@~hmO)(m%|_uf8Dx0A>7NZpnbuOw%_ZKtxafq*_{cy8Tx2D=}u^S`og^` zhP}^fD8z_ghGo;7%6&+Nc|5HHe}pr57F4Ll@B6dvot;K$49#-DkfW}Sy=<^vE^)(< z^BmcZ*EUI1)x?OIl%_!H_;lG;WE$ht&sJO|sV|)QS&%te3;zOy<=?k0QGD8$o{ZZ( zgi>X8?+XREE}Ieu*UZYoh+xW<@95S>cRWB{uHIm7&H<5cTS3DHACAgpc6e9TkLKE? zc#R!U3h|IAVMzp?1qwskYO`TKW=^#T=-W~nY7bLuniO(gD&=WrcanTqJ&w_H!Vww(m3S>Gs~Fb^&KY8WbeRW*{oi5q-=6Sz;SH_MySht|#gM;;?2Rysrde z0)VMOJ=0O9?>j^quLggqBu(eV~Q6=K0oyQKs#)L!9#eN?WJsaujG#;utAFy`xWB}Nl zh{O^I`e@c=U^+Vk!rmvB5GljubU_=WnqQn9GmsQJ;G7-6+-5t`a8D;+oWPD(9>0@F zk+WO)+VT6VQJzzz5e{MxBzWg1ipi==1MCXm(o=?nUaD45e~H+|@I*DSldLyI*!nCG ztiyK?4ifDD|MRu{QK-5@Q4IU-wY?N5`DzM7)+i`-E{X;K4zn#&xEz96>mS31b+5k? zz78B0)G8J~U7gzaiEV-!k9j6n{Up9dIGmtIT6gMzE};+nF$GFGG=EPSB75rwB@O2g zij}zjO%(FFYStD(z61yBae?N^qVZQo_aJ+~TzAmaI%j69B{a%U$jsH8{yS8lSBGzX z*Dc`VbztOb6yvW~5^_{1#x|w9h*SY6j8u{uq#BEy&EB#}9Aq)f4-1<9%CGaw&!q-J ziyDeNp_v#zH~lox%b#KiX)@JQNLN(ghoR?zM-?>r8Bi^`O@egQ30@5%$wXC*oPN`2 zID9U2YdHVwC+PrDIz^L5Frh+m#J74`^RH0q1(UW!5@7YF@}+pWnyh?sDTvW2l#}kG z;qaqQcie}q>o3H9Ps&Bp-L!bg!sO;BqRLb3hkbw$Z*q=9ZNBce`$lnd%I;%`u#s>N zFuC)h+jP?CmaXj}*pxYdz8sgJ$id;{1VeDKa`}TE3f=qZNGWEXp9Tp%Wqqa{o%K8| z_Y*^NWXVhWNPneHsM|Ov77vNU7!Kat~8ecH2TpoT0dDr?>Q;< zmb2J&xH!HG-47-@Xebo&F`*!Qc(wiTM`{ognkj44)Uz?DPz{kR15ykg8=QM)v4a+W zRWkIO3O1k+k=;KR+O}C=5jjshCKbbb?{Ux-LSR4!tMhj`P(*?9L zB>9hy=1=XEuuOjOAy?Jg!LyJEt0$B8kp=K3{LFSqEa@3eM!WyXuO=nue+m z)VQ!nSN)o**w@U*ld^kJW>kkJ8v@r>Q02f;uy2p5GmVx{PN|!v)_>=tA%B3O22KKg zr=>jfG$bl?u?CN4uiZ6av_Zv^!)`%s!|Sqf<%zAEFB!Ac@FXXRExL^^ri`~d^|~lI z*{nZ9p`(=Xv#+1i-&dgDd1vz4lSf>wtr~l>!B$WkeY;1t7>sFQSBf1WjvKuy z#*hv#MMn!k{{57*gKpS>UCqELNnqGy>{dFNVumi=bakS%tmdf8LVqHe$XuaRp@wf-eF){N>BDA>WRKZC+23!P>vCPET8TYq33n z3~^SzBa4thxv9B%IB^zI48p|f#BPbQXn2c@YJ6-tci|*Zk`{P?KI(T6&OuNZRY3K| z7BoufA;lFoXkq|7e49K_F-wsQJ40=k_;LOq_3W83Ce@TNUH{$h`YGeOKNY%&Hn>BKM zQ}P+oSh>mCTMut;XGQK~2SrIlsEHPdLNWyI^>z!A&Rk#8hrBLYw#0T<15*)2DeowB z*weUVZc=W~mV|cIgnt`%27PNk`KAOR_YUABPk48A$qFpKM_@iV>jS}QRE%_&S#6C* zBvJw>oZ-FP;%phb3BbG#Bm4qlIbmYsKKDmu_#n6);a>l5(#p}iXTlCGPH}8 z-#HGY^HgblogTsaQ~Fl__W3fU zpFUSXD-UIdBkZmn|6~O=t8MZL!M6p#=lHR+gAd(9jjyP|)&+DI-7nZFY8iIcpBU|< zHtm0oIfOwGdvERYm~d0&6c+qXnUF~?fc_dPf9Bi}Mb>q-_L82@F@Ig>$5`Di!C#TD z2ipZ3=zExHQ0NO#{f|olAH-hBr>Y(8n`!OUc+Q3+9B(HV-B0guq$8oCd@+TPFWu^0 zumAXoZc4WwWEZa_dC+2<3H|__zX1m4i(!*|d_}>}BnCNX)-)F~Y2JdY64y{Epcu!@ zT?;ydag4ao2!P<_U+lf~D@d@6vE&jNl6Ui#vgVW&7jVt^WTJD81D8Yz&k&v*pMRT@QnVE-ThNsgWE9Yh_J=KO5-0{h zZH#ssIMAEVw%1Sh0XV$yBl2ry6F>Xo9aU??3K6R(4d{#b*6Xi`ZXgKvewwkjuXWi! zi?<9_`zaR}ulX{TT7dcxgf50@-aaa8JFM&n9+3J6CPRdIpAJGDqgY|wM}3@-q3-<6 ziL7g@By;t^mTj*<6PtWPLm>m{TW*Ki`6{jRgEffgqPRU#%;aAQHrnoK-iAbTM|5V6 z5Uv8A4KKvhg;fkrC-xEg_E4ilvB9EUZ@pp|L0;XCJUTXRsWg4Rf2CPMMOK6BK7R6h zNV)EAvc(xFG0kUYjgJbN&MqAdgrF7FKpx!7!L@k^|Zr@t}b8v_+8YJNwgIa{FVVD$C&AAvPS zWO;p|@%A1vZs2@u$$7W+CTE!EnaiGor*QmZ%{%6Y)#=tQ(M&hAlitZ{*y3J2MtSYX zVt=?Br;@+@dZ*uFCFtelW%0Es|HKveg%)#%iuOy$uYREok=h0X>5*1qGa`D}X=7Ea z=>?AR+V{SS0v)d0F-i(|PfmQvFRA@<)ZuOhBC$DeFM0j3=RJuFt()+l<1lhnIr?B6 zqZyifxw@Uzgy$joZ&Q(i(?jz109^3jInWaT;q%8Za_?sV$weeDz@H7GdxYcSksL>b ziddzOYTAl@mJRh01{p0U5(3F_Z_Zg7NCw47?D$=lwDCNrTHROQxr$!EHM(qNO;;g0 zA>GvuD)juD6W zM1+CKHZtqUm;>{$))w8Lea5`~TsX(6!@JX`Z=qkgmoRgz7JiuOv@}O;O?v zX8Pgf_fVr+t_f~&#(&m2z;)p+rR0tMhzQpI(EeS62X8wy1KJuo7*TLeeU?YzN@4v$ zW|vzJY~E#%#@|jKj;VEs+^TV&u(6l&Jk9pIZP1HtBikEyRb=1sJb$Z*-Xcg{csW`I zH*gbxbZ}8Uu$DZ9s2WCab{=7{}8BM z(Fre;OiCop$R8rdR}3yP;lx&w4w}%8?dwyr+>8#or38wJaa&?;Ue8!~&ALxCS9F*K zJZyKN7e;k0_i@$O0V(iNl1El|wbUDP+gi4i_QNTPjxuOD0q{%3^Ez2v3^*g_Q$ za#)zB->JF+Y9N!6-51zfYQ!C$AFm?z`Px5(kGAfudT)&(A`&S71j~)=yPQd5D7ny(r{yRIp^e^Lpfm|ynFa1*^ZU_E28be9l4#6sWlu%GAK!#`4z20Xl zaTV=_<55Fo(x;4!Yb_yve{=aNws50ou`;PtC0Xo~8_)wvi2 zI<5*3ChpTqN`}g*_xA6NMD~8z@CY>^>M4#!`*;g3hRIEj11S6}*#h4`8CXQ6UC(c( zE2v&POFzMfa;vzn3=h7>ExsdDq^Uoo%0p}|<-X?n zK5|3peY3LfV^9&D2D@kYHYOTYU&Rv_ey+wHyH^CVs)?L)5Y)`jFIHBi!2$gO6&k{b z#of)apb(_FtpcR{r}to*SUJM*0UXaFtoB@uK zl1r(=B2e**^&%w{dYu8ke%`TGePk;%u)k_$Gd&xn&phF zVf1teK#q}6l9^svhCSMZ9!Z57sg{o+I*AA#G<3xD84{}h0r0m=Plrod`fjbw;HP@b z^Hr`kr##h=%dihF&5&2}J`CDXAM!E64!Fy9_{;M)lKo9(9oo-GdSjg}p+p}7J}u7G zT5l5&Q%#hmYt9enEL5rYli-_@;f%nUadh{TEdkubC@GaB+)NYZ^4|8fV8X)0p_K*5 z{qT}rOb*#p1=Gm%*9(2!B0GsesVwFCu?~)R0K@362f64jL||1x@zYc z3QF4XYsuk6m*6NDpdn;{K-K>eRChR#HQ7;wHAW42W8ou4%b16g7s-)BmKP0MN@WnM z#Hh3ShcY0MMN~+;ois$@`K$Jiu^G*~d&s`D>BO`G+vwzRRR`CEKqKwiF!_>(W|Dx; z{^lWEss+fwY6#MpK#eyNneWB{o}7UGe0}bZz^fs4Tu++1U!j#HF<29FsRYmVvC`rHPo)&TEet9Sh` z2Xw`?Y4d|Myn2fTkuJ|e;M=s7GML8A;a7{s$K5 zi2zK|BI=Pg1cC?Bfyyf(3X!W>iE7Ohy0*GFzPy=dq-gruXz(Oq&QXAr2)2|6(bQfO zC$|33Lx3^bu+cN_gOeAp0a1EcF0xGXE(kp)--Cfj=mwAil{m)(Nm_8-38*g_bKi2>xjS@%fi;3Kpi8>WmDun!3P)#MDRe{q;zmVQn`66A>lHUX zMNAVncdI}(xidqY=twKLZ2eg_{_sbgmB6aT2rlupEZuUG(($WEd+H4B<8R5kJ+twh{QKrLpUBfQ){4K>6tQ}Oiyr8qYNN6$2r4o+^4~nnex&R^FWoTD#v#QZ zFyH`rO0*vyqM!e6oIi3P_f~^4EpU+|aPW8VkXh~-0!7%$t%NNEdJGCYG?L@_?hZ5> z6q!=j_qUq=NtdQ*@V+9MZCVQ;#!+f3?@sFdg;5dH5+KSy;)r`!2z1&9M7~5Fmi3eK$Dh!jW@ri`a@!}B_y8d)^qhnYvZ@1PWQ8p6{8oHIY2QWY+ zgTOpO$-`uhbNjtJv+6vwE-)a_9wd4mUO$Z7d^92%w})ox23ly4Ad67WOI z#39n~^0iKrGe|qv226On{T;1)SHoLk|MK)p)d%{TN8-%LKon4_Hcs!b3OF~R;9uI+p)f1hBBa9saagF(shUMEKte~i_hTu;M0~gnIX*0FMH`bN* z&sEfpGTce7yRYEEanhOjB2@`f8X1#x$rxbNLK8=99Dz{m=^b;ljJgN=YMXvK>isEx z#(S>L9lg=>sjo~GrLzR^uK_YDCD&x8S^2vsGC~+KJTr2cyYBZ^xYS!KV$qs0&d#OE z#rcbo5E72bbsCcfE|w8_8~o$N<>Gqb6i?!xf+4obDz2p$2sUSw_C>_m_#)uXv`vFZ zvPTseW&v;rhw7d6%{KV*Kjj*YHfAyXM51Rq!IcMl-kcL3{PiThy;zHsCNdse|8hQ1 z>mV8=y5@?^>wirx@+tu-y9-GIJRjtsX|(5IJ3)3{5!Tajs7D}2{x$*gs;i>aAe%A& zvD%`tZ?&A2p6c%`Fxd>hCSna>xU0#0Szig!{H21tECQWpKcsl7ZQ?b6`Sb@}OwR={ zuMQzrvJWxH02!s`VOR*6R1`GHr(@bH^4wd$Qu8&e#GojS(^rc77yuNX6exYb+6|5 z>n^$zPIM@nJWp%1eL!%?`1&}X*Kq011Cf!|m1BclOpPK{s`a)O^YI+*k8scL4vPj5 zD*t>@?kb&IPu2*K2q=OU`tN-FkrF1i`8zOYIhOt&{p!!j=S{mqP<$_!q{gbB{QIZh#G;M(ZGg4Uq{&q9snfPF~^;e%X3mtPG)^igbV(3 z{+!V5Iu2W$1LJx~)F?bpVUoTH!hxf9Jz2eb9(j#DW5C(d>z|ju?(cKJrtjt6(`;wsnse#?6_1C23f;< zQvsq#A+dDKZ;3ki!eS)L{}Fc>S|4M})}G@q_uQ(zEzfj^u+%2(FqtWO49upjqNf-t&pWV{SeukA=|A@GypfT zbSM;vV6)u(ZK^CX<7tk41;6UytW%D|sT@ecoY2fIptu_;i{T-A>rdrBrd8H~2+Iy1 z7$1A3#{Dm3?N?dv1VTVUKAQb`n$1-cp%4hN0W%kg6L^Wuszt@?H|%JO>|0huKtY__ z<^_v(^)=IA`;h(a+?~(+6`Ta7HLsG}+NEowj3CMG!M{DG81Tj&z=X`nY^?W^yJ}(XI8HSgU#9nZL>njFNu|c~}f?8hK zUk(k4EUN+-TbSvZ$YmvTom4Pl>v_@jskz6SAokLikYyd!Z1};|+L{O#AtSIPB4)_1LLn2(9TrHHG^~lHQ>GA4&1a75+GH-woZS zQ%GE-H`Y@>5+|ELoXb-m+@9Osg{SVwfH)SFyKO=LUZZOpWqEI*jPwTj2Ozsp%#O1< zaRGZ5#acXwWbEsP8>HMA^_1V=@0<7|)&j*Kr=IoV6Q1=Q&+%O9>Br0)cdPhv(0L~& zzzp~2{?^0u+=nx&H2s-4qnozSw3YKg@YcZf&qInX9}Y^gg;*5A75dU>0CneYc0B4% z$8^FF9v=-XPO`j+!Ex9y3**ZdSmbI4@yiZNzV;mluls~Ys<*!^y9lC4sO^L^&`C-@ z;&8&D7=yB0a#0$J=+Q+#;D;$sh|hdATcMF7fL1% ze<-*e1%kspdm;jtUpO5GoP?s)aK44@pfF&ig&~EuVBlBI0Yq#<{(uR_( ziD9U^+me!$P78UR4+QL=>RzxBJ4~eBC(%n!_gA4CoqWYezMMS@pi5y0B^99Z2HBA4 zIm_TJ`2Q&`jibdIo!Y7m8c-~&JHY#iGVad6wOIZ~JAlo_13h48aGwFt-UV*vjn9C# zU<#CD+ogBa`yUv;=Wu{e(KN!~eirg~_A}Q$oTErl#%bM+LL7H0%-dc(V%V=Dr^WPl zE3XuDecV^?o7^8eV(`X3JfYQzsC#Ig#{Kc9A1JYZ-l1bJ1mb9_-)%48C0PX{FH!{> zG&-{P!_cNLZ8vzx_b=jlM=*c^xkDj>zhgyTA`9C-hxi0?ANQE(=Dl3-`6%uYiRilS zsHmR@pHD~*k`j<5OlyiJ(}ISgWFj$Qkl~$?qr1tSqxx$?z#AQXWO6@IanbcOny3{rPiL7+D-p1<@7Bm$q(1)QTu86Z`8-nnt|;DX`j4OYIB~@O(8uS9iDht~6- z*!cl?W*dwFnQ(pJM-N|9j$$F6fZtF z0``AIUVn<__P$33T}G8s5iKQ5A=J3}>3ZN_Mm?|+$~Sl+AkTWyCZWql#Y8Twc_$z% z{uLk}_LjT%!4{9QKOO`RSR?euRyzGihW%Y|zf9~!E)pk!%ex&XOLMU9S3cB1{EH?V zmSn}vi|lJe5C-rjcRf@R5;|8OkD@^tU}{*H4a(JBF47j)0hsm4@&nY6uxoNke8ki% z4-mqb#cvLVQym<<#zm?SlLDm{Ow4#2#PECE=g0Mr{r zz0x4zdny4^Jz9ag;aaG5FnF3YV%xq3Q6;&9KZoCR5!QKEL;~7pe#mzS39K@utaCos zpH6bS;yClj}d#VfT!f!oLQSDj!wnZ~R4q?Z^Jd=6g3Hh1igRaLdb|4Qx& zSXBMuBsKZsUei9X*F?ObWaeLvU;Rg%pqdz|=1{CPf@E6&O6PvINa|4kUj*ikPwvg; zeVn)92U+06Kw_beqj8=lKCz!Xm!c2vel|jNv#L%_xKSp(Ciadv_>;ttakt13ax;?8fU^beH>TZ#?fo z?Al5uG4ViU?I@hEx;|3?T!)$5IOw0$7A4dl$Xp^!osq~~CRK~ooj#2+uL-Ke`21pP z6C1PJ5UAv|9^V#as2hn~a=URpzs8}ddLjIPPjDQBAYd&qH~6A3E~!Y97MwzP{AaSu zHUA&T%a%4z2AlrKT^n&H0f9*1tOOz=C`532#@ggxxpHgjPg8C;W~Q0E_a}q2jo~1%Bq&`-<5dMbF#;wsSuevgbRMx{Khz{#;q3 zK@F=tJ~|l<11a`g3_}=5I794g{scE2r1br96ho-=Qa4*oe%=5=HK4~$z9^OYAvQmG zl};=P=0i%RPhNHVbcf=SF^zdc=5dsmGQN0iY1v27nZP9O;|`bUywpQ)76{yGB9!FP z!mqMhoH$tp(A1eKT9%!Mme=M0%x*Xe%u5_hRhzFmU-2<@F0`+RmHph2jJo#Eo&S2U z7}!}+xIdbOEwf>xXCdtF^>O#a5B*lSKZUs)1Mn3_^4edGC- zZqu-oXQ^#l?35dQH4bxR{HRUb{hqP&{OYb=@28KrsxtILgc4?=G6D@)e5TqPTK6^u zn<^G2Y$16S4y&b7lMb+A4zm7|Utn90j2RZs^s}?K-BU?EyaU`Kv_Up~gIQzPn_a#1 z{Yz@OwPZWrKF~NKju>9Sgccic?^${NWwR>`m)e;D;R;AU} z^;&VSH+>q40Tw+&NYcP!fg5r$1)j3rLX!>gCUc5U?RhXPHKGTo9{gh8At z6zpxToNArK)j#0mZe+MLdgZHKldlq%@YcTi)W(jS>S`sw@wM z=pR!3A@vC6o>XL=Ugf?da<1UUy&kMoiFCE_W%2kPS`>;}9s z=O`aCeJDip#xo5QlGok399aG9ve`)7;;H%(j2cfnrS2 z8bF<^5t&?}xOBSg3;FsA+&fn>nbJwzjkFr57~-d#Hl)nPJc^19#faEqnF)&>Pzh0h zV4F~t^Opgh<6aOYQm3_1Itg$TFw!&rAW$>QD#d@7vf-{yC6+JW!RX5*=4C`J;8QX6 zP8Blu0e$`m?afGpTF;X=n=*Iw`8Ha9SG%>+rybm2|NLXX_lHY&LnLVU zYbpEZhY|DT-FAebxB;D|RXhHnrI`s{tM?!Qs(EfIO+|PTMIEPNuce1RbuH~-2TcO5 z-{031sAL2%Jco>NJR^m~yCh}O6V&j?kOmPS@mJ<|SB#`{`a_j9fTJ6?{nnE0M`}iv zAS{RZGR&Me&}G`J@6WBSN1$kp15v6S*o;RI>{Hwb*VsxTdHeUZ&VJA6P45)rZD$K* zr!_uz7*&W7zu740fYXJ&PKwp2>t!~oc|E6)JOuWCun=-7A@vio{`7PYn&wG|sJ zi5q<^z2Ef#*f8o@Qx_>;pqCLW0Npp)%$oVDw3tj#oXV88o$wDCp~Ya-HH?LwKtzu| zqF58v8}V>n@_dpCvEIq}ow8P!#39*}wMsWIps=UNc&)z_RMUB?PzD z_hEUx=O=DI_vj(c7%o#ZJ`05XVUuzD>qq?uiI^UcrA;E{GEmoG_norQM~_tK=87X& zMmxs$7Rd6D`SiyiG&laTZ}rdCP`NOf%uO_05SEI$pO_f6mkeVkXn~CEF!3J6F_M&1vLC0}@{9m-Yz?1oj7_$Xinasun+sj_KE_&x92NoKcQD`( zld5N=)}m*Iv(RTkX&x=i>(eHjkL-s}o#|_3X7ejI;hIW$$8R-&)V)Vpt`o^O4l;R; zE+#(uVOsAD zETDA)ZMjaxA?KzIivKwVJ4QzHQnuPgN-oZWGCQh_!tHGC7f2Yp!!$VHhognFgn~y{ z0G&-udL#s*Iy|$1cg|MpEyr(yJeBbWnCD!GBs~q54-+H)BrZUI1mH_ok?VkbC9TLM z!J5~DiOkLZLe!-OY&J?1K49nMu;Q_>W#U_kapSI%JJRL*qK;MR!}ROW5cul1-vKcQ_k@A;;cAPZ7FYs=>0r`&<>!PCQ~+r{Zy&V$p>GBE+m zhtAAXo4B|IzzXarLrV$Y$b*3X0jSylRtYqm_Np~L6BPQ)I&_Ku#9nKs)w8%|d zzbo(Ech3DkhlH7(@9xgde1>+W;#VL{%&^S+2duTxP%?}JT$WhEN2v{bYrgT|4jvf`u0kak62mA)vkswH*m zkhurOi&jWd)Oi{SA?tIS{xB8g_w86t#LKO5=C%6*>esF9Pd=Nld*w-!-sQrAOiS%9 z%Et8FhmGX`(dv^nuz8Nqgz;Lk<&Q;qVA?+l(}4BV+eCi654!B}OKCXYkJu@^6)xni zF3bB|7@BxyM(i0F6Cbn30&M#z4#LWP`F50~a7WDKHshSql zeZdHuyMgj+xm(hi7gu7%590d`SQS6Z?0`sJRo(kJqCeVrp5{p|+>0N$c)&&0z>kr$W6wbES7n$WZ;~wLRXX z2Y(-o`4ikb>X&OlSC^BE0^2(@S!a{5%Lv~nr!Ttb7)6T{AI&XfhGlu&<$3JUV0GY8 zF46#!?p9!dpoU(5V_&)9mSH_0;3R!};Mhi}`jD=EAY}x$!_f(ZTp$~Lx+0tWJcvF(QKwS@PZ9*81Ed1ryIVd41z5Ujm&U@ z@mJ9tWo~w!RN8NEz=zplKU6-zdU6VoyPu;|9yoLTP{An99hB)Rcq(+b??&9Wq!`b2 z4had(62Vjy=CHRxQ_RLdy{f|kL#&aS;mQaLt*&6>#ox_}rlJES=iN=92v8h6)OC znzHjSIdP~rR$1MDF}4 zTw*e1j8ERVru_wXC>u>M0Pm$;_wFzHYH?;$gt-4`j8B`pB2~UzeJ(=s@NXF=YkNsUq=rwjdDS_-7kU?A(s^5{%{XYl2xs1 z-wqLZ?orLj34f`(OnhbB;O*Km;foIX>B^wFo0Jmmf#V?L&owp*Qh#s38HHMWqFNRe zPqbGt%$%T7y`Se0U)ByM$8!URLUDSvHQ+w^LJg`B9H^>~`Iz}sUpPHfC*0&%@;M>< zu6cBP`CI>oX5)2uuI2Lg<> z4(#4lL+nmRLfjK01j#JV)N-C0E%nbiij@HA(megmwWA{jj|IKq=ADmZI3 zD3P!Ez(^<4xB#+^5IT`J)*TIR_?HA=ZAKmveEcz?U!^Nzvs3q6Y;TF7()tL+?2f%a zmZq_7*nY}>)+C`(ER*W~7?y{R5oYN&7dNJ(CB<>dlQtzg=or;kpt*~Fi{`Sn5t&y6 znE+ISGl-P<+@|IGfKpDGNU|;i15)Zulp3vxhspiK+-Y0Q?aK}NN1T(g8YCc^pFix| zA@k9>y-?J*A%7)gaf>b9U6js~7tJ00w2oWK5(_2L+;tqqQ+wyYFNJO4 zj5L_TRQ#}a6L4={v)Dfp9QBS`{R{WC-@5YiUrB}D%QOk@*?MQ$2Uu+(A)s`SiZ7@f z{GeDj>!^)I(->(@g3T>?lKS))h!Pfxs?M5=GOJFa?<(EszLfq+Akk&B4H!n0Of>K7 znq8Oj;@i5bgA1`R?Ql)I&U`t3FYC9y-*3ixs>WHf}ohI^w?d1>~iy8=w)~{hz_AjYfevJ#~sUX5Xjq zaZ)fQa}~L!VwWB^L6<1#ZvLB3gDpO^(~4Nxb-M)Tp@4+JOf@!QVITFq%+W3MW^`^? zUKb77Quv=TlbP_rSm3q+R$0u(cuX$4o~7~9x)afu@?e_3f3g7vn&cf*-H}q$9^jh5N;7q%fo_?m&&x^4`SX-H-C-+y`mV8AN)~dC2r&4vl>{GpXJIcLl;8 z34`vxyFZKviH5IIok=Zz;&^^ZLZCb$>vwnSydU=zVOV^sAnp`rCj7Qjf)U=jfy9MP zK4|_v-lEhhzd%U}3v*%190#c1sLW3kRq#;V@Gxbui_O9tHx?1^M(X9SqvDXcqvr+^ z$<*~<&?pn%wT~n|8yo33q#*q$nQ!ZBxdUe)^YV2y)UE6b)#)?)JUo^BGiG)4jkkzY zadJ#N-X+ZmS#n<;QEByhB=5-vd`57jsd1th^&Kz@Pde_SJ!5iw+n;su*PTIc59 zUQGj@zZBEy`+;fr9Fm@F%2g)rz8qf@F=p3ajRqs1NeMbi^Kr@#WUU_$U5f!b&UmHR zzKVpBoxh>J?zuxR-6WCyQ4pUMpZsyqF=d*Oo?3^<{MH0Mm1yw1EI085FVqf^+$C zKHu$eKI=j@ye#>66#Tj-Kq&x7#RJNc5VyN5FK0qnh&7>0O&Gx?3k@IefQ7q{IBcyK z{QO9rd8|T+)OP?Be@U-c?v*pdMUAg5yV#_9;vsL4ZTE+Eyq;dREy8NCtQjj~PojEl zK>v74>%tWJFDF+->PO2bm|9uK4OY z^zO7Cnv5rkY-MquxONn%cbIVtJcajT>W@tz+ZFMG>wBW%3XTJ->= zdil96hWg&f-*k{FAW%a5Ns&`P6sM;X%uxGiQ0WMkz4AsLx&*^LYQzLJ3Rck_6JKR< zIq-~+)VK_2x{;rIAZ;`VNo59SC6eJusy!t~Jq{>cj|SXE%TN)^^PLYK5$F0w5dB7> zA!o>K|J2$x$RU0@!F3}DE9N_w*t;%a_ZK>~rrK05Bces7PQ?>?&$~A^0zhfj!B#ns(gDFa6(9vWoBN3bBEDehAf$E zU(C#XtqZCIfV0ShE3!~4@ByuI9`zkvtxBfg-jZNNw=-JUT{_%<{o`$gVU)Xf2Riip z0edp&fUxlO%z2NTl0Z5=l3|L|4c)jk@gB>ZjVH`ujsYC1BD`-`qLF%6LW$;c1xVJs z?DgbFp)>y4p;0#UeN!|R(HmB@>&VR4#H6h~G8C`rG$RZgC_W6pmPDZv2=%_N9Z=|YLy6qy7GrGwdqO3reY{)P4Ti&pi!_UdEc2e~bdPhOE9=b6!%;UR z3^M5&7c7BoHYD61zTE*v!oclm90b8GT?(U}H1y_|49CHJM(QPctOzF3ejST(g2Dbc8F`r97}l_n;iQKW52TJlTxB_KIhRA27T zlos|OC)f3SBP_puW}>mxbn$N2p9n#8$&kHkwuUQw5pRLLox5}{#i=u1AuI&RG74Qt z_Jy09E08~KjMDT^vdOSf^o>NSTbc#~Kr1c4X^j(zu#1qGeO(>PyDh>(iq01mW~pTJ zkg0Ixt~>O@l>&abdy%TwoW3;QrS!5VSxyskh#>Hvz>QcSVY(%WTT~AZH8e*VuG)7n z{XE#}Q@e59vATZBG3#H31`>6y2Vp=qtA?foo6t%nLTbD8_6ER; za(rP)J&1+hzA1*SeKfs&ru|@)fh!rYm&{|h_T>D|{3_tvmo-ejrue+t@6@=ZB$A|F z1JjpVKaLv%%noTB9yfkxUARiIgiv~;!H3`*=h2Inalp{^1pw-{Nz77Tma>pCRo{~I zjW~d%#AXwV_=T?DC%CBAsAD+ocFBdq`&wm+Bv@ri>i@S6@q<&d>E0H0g$5CJu*3t1 zZ<;Cf@WO1*l)C-_!e%NB+ic3;7g`qQ92q~T1Jo!e`I@mTOpT~uxsYhHw}G$E2^yUd z3X>-br@$*jp#~Ow71okVNIN#M3k|*(Kv6rgNHwO`YP`vLq?*>)gc)ElSPhV+35S=& z55tG}UZ&wm3hjl631wgEYm*%2@UL#Q2AJ~WB=07UGpNgGNqtgHR}Mdv`o8)u%snl@ zc%LG=2q*r;o*cV$gWh$cRMHZmcaOLaPEbqO%ed;RbbKbcAbc95uBGt>)x?)Bgj4*2SevI6{iQE|?+$L0xb?;P zefiLU5vG4P3A1Neen8Dlv-P1~EKC;$WVj3and8QIGjWCq`m6JrPlC){CU_7LF?wTl z-jykY$`r^8k7iF-?bX-ve5uX zpZP`r!TRmHN^IElhs)jwyM7hk4n^1EK_^epxcX&4I3zBVK*wE@80RYDeD~-<$&mkp zPrk7c)AuiK_%b0s&|bO8)ysxR#V-z)!5Xj<7f&=v&1>U>mwILZ^Ryg1${@OMruMOq zwr;23fcpJKm7LzEz@~n)SUV(a#uIm_{=7Dc%MVY8Ma5DF)y3v3oYUh9v#1+a(So+QCqDVv_)qTAa%WH6g;~g6r-;#Rud`Y53-kPM`5)a<# z>0w2-Dj#v) zdU~(yt9dPCIna|6FrLd-0a71( zt30ZVzNjMH-O2sN5_*L{?S^=B`7C<&P~3zhfDy!L+iV!+^}w=zm0#3SIzCqqs3M1H zQ^xFP|4=9BkD`p1`?K&@Q>M1$l*KM?CJIr zk62*u55HrbYCr#Q8-@9t+bYy*hZu1lNEIRShDV#Q7P2&p1w2Gho+-|=w-oR?LZ*qX z3HFnICDr?mtaLOKp~Y{;f~fviSL+YlhcCbr!i8;d;el>S)vX~BEX#ij)Px)Bv4aq^ zo=`_i_?5`sZP^gAqsN(o&6|{S#x#DV(Mnk-yC8rfFTp?686e*fXSk7Ada%C&;ZD10F@PcLxQ3=2GIB8mSNIL*kH5@bb4(eaWV+#!zi~g%s!5n z)_W&eR`~IugaZ!O=<#f!+|Zh8T;Tn_xc7&nBGXZYPj!SFWEr8-nNuuY+9ihgv0;0! zzPH@h5Eohkrj2G;0P7AumyJZk8hVrhWEHS(%5&&2V^yPfW4c~0#TUL8?EF>`tOAIC zAbL1qR1@KI7dfMOdsURk58O}#lJUca-(}FE*@H)zkTzuwv$Xg&UJ^_ap z1sCNfTS3O=d$%cl)V3jP_Fp*V|xm)zV7^e~fNN@n7N85TLh?4gh76(ib^M2wNYo7SI{$AMUsgr`j zeH#gdp$Spo%7Y+TI7)sT6JhYNFrN&EXTl(Uur`e0@Ubiw4%<}r|L0`0|ErVzBpX;^ zGl$~olhTh9&j*UlNQZWy5auIISQ(a-ohySa$bB`WT2J7=I?l&c9(NHplLO;gP$ZTy zU*d_cOPUd8z1{I;sm{dDCi0={c)iOIF2IpOSPr^c`B<0n$C|6O7nn$ zD#dI-GM2KAk}eAj74i5!V8b0L9}O;=*SkpY9*$e;%JtXa=+z-xOy0ggEztfG| zXh3*SNH)~q(16udWfRgfy6G%?Ek+pXdQR{pZ8wncyTUo-y zYc2RpdYpa#zB~7ps$C35Pjn-_6-!_|e`(j&N0t*CiIc_FdwEqtq-6YAyT*- zMDwnpi!0*92+PW1)qu~NXD4TAG``_|x$MJ_g(TdEs62~|rAb)zq9N8cg-F8IdGU^& zIx!5+5umX5iukf3JSWBx=cW^kHAczfSqsj65!IXJgbT*!|D8H|1B=>KO6`<)2zO<3 zT^VqZl`3%52XOO0|KHgCzqoY_Ks)LhKSYt2>#u%W90cVfY^HPtM7^m2spWP(y)k09 ziDy=X3w7J}pW_);!IpzfGWvdtRu8#m%aK9H6c%%S{LYMQf-LVpGl-7SaFKpcg*76$ zq;0^0NA16Dt_Uztx&uQS{rQhsv+SZ3%fS%5_Xj{9=9)YAv1>8O?*QHBy5K_I<<5vZ za2C$mIg?2pRDUeC6>}Jdrmo6~Oy#T%nC>=-wU3w8n~*(SAdFk9$4^>4D0tg_N8I)Y zwafEn+dzVt!}0Wy8pP%L9VdvZ zEFvJ&ln9>6Sssx0gX(O|8itNaJh2^oQF~0&zU42; zmnu&_wbJ?I2^d9Pp-?KcvV#GIrk&40xUnD9OA$AOEYza`v|N_jfTU5l=y$LOHm}zv zG%@PgHnAVep1M6Jg~Vy5d=3}Ax3KE5_lst2y9!GWl_?L4?#$#-C;BqXt%-CaESb^c z_w4Kyia$T4p@m(wwNAiCok{32OZ*w;WT#UnVQKJa1bl|SmXXC!YMqTY~ zaH{y;-cS-KMvl32f8_^3~%^L|w*OjTrZ zi%mxmT|wq>J@D{W8JX*O6PD#x-dp&b1{H$5|K5jVfW*~p6x?^WOR3g;RPqGtt1y4^ z=9eu|r=_P)u<9eMdJ*FvZ<25mB9=eq59B_rBJz3OnQJXdb@;D#Tbw{NddEu*;!d_p zjFvFB-!Z0%+L00AD57Cs$0AYB4~TLiNydrgXK%r1YD7S4e9j8fw3aUG%`LmOB{Ivm zb3+gQeOt0}L+$C`I!gDoWu+{^`po=Ux{i_>V>ho|6kziZ?QDd^eFP57pQ~#YcCf$So_2OGBN_p$Z(2zY{!Z|Vc3|1q^Qb|M*$>E=Z_Q^ z)y6SL#|(;u+%tCL2Pk`eQim_I zbaO?|)bYMh;3CxZn6+7jMwpgwOHLro=ubNa$R$apamhm9d(PZ@Qxrh%8c{FuYHm-A zTEcFYe_(|7nP^0_H+j$Ms_DS@N{yXP$Q!SCJGi)SwcJ&YyO!W~n|R$))tL`exQ+IC z8|}|_67=9*r!O$<(YJF$=@9ns{%NxY^!+(FQM-RvRTSGv^S_V1wPkVy?+nvmM3t;# zL~MEZ?UuQ25#+V0RwDOySO*!g@b>O^AocxKgHT9znde(5+96Npe8Lda7{+=b>DaC6 ze7O6bu#E~#$RXBd^AQig*c{`kn%3%MpnI?3$yRFf2c|j;_`z{RBIygBjobNzCK$m& z{t^yGn(8{>YUTVC&>vvo%9W$e-|86U;NM6lFv3&L&Gmjt(n|793lCqTHsh;l;p2!tgT{ciinC9%do6y02Yoqh0*=x>(O79h z?&%;re-Mr~cH5|toM~Y*YH{V2YQXHVL;Ii@`yIyK_O}~xw9BO6$m!c;I#^#nzIMx= z>xU%bK~poNAjphC#T#_kb+(vl5I%vb$K8}-At&ZafQLmu%`MAysMJ-byXH`)Hxlrj zRVW8O(QJ)I|34yNnvUJ3N=W~L4k|Sp!u>?aK1_hxVYnJ#&Ho?@Nv!J6=f@ybD#HLn zYV&}?rFYX3=K2j*W@xO2W_@c_A?2i!adU85X97Qt2H{Ai#~g%vvs1>6R)oO?$QNsY zgh5yPTwQ)BLX5gY3|rHSV9h93>lrJ>Seoc@s}WCzzp~PvQo?Umw=j2hXUraDo?=VT(hMv^E&fYc|F`xj&PM@JZc}IZ`iA5x$!Djl!yLu` zY0WP)_BLZcexEFQun(<4V|qAgyyvNz%k>79rOUkKybM=U8_>$$4ZtIHF-!(8=|BLF zBJ&(ym4#H1Ri$T|adtFD;+HSxFODDlSL)!PmALgQ2kcr>O=nNEb-Kc^qe%g2m zhU&yU_$8tET1XepUO~piXu~j&ccW~E!s&QippiIn(5?8#tucCSY9d3EJ|%4@Oz4n7 zIMV=MAzuEE=U;pPp0Z;i%yvsK@^(J~xlSy$+dFJBSiZ|5Kp@k4ad_Su5VmN+Gg>84 z25XLEDgPgvG5bMrzTFsv42__*OR>W|`wC3pJRVInp^ClNbw5~M)Y~i7XhXTCJDw{T z8uZhVU2v+DhJ%!0SPcgE8Mo5L1;;*SRR)N*2d|Vtp7mQEbCwtU1N-5>jeVit`=S0# zo=$q&=c?XXG;a^$Gw=VoyubwCnvBlq?m$6is5>+FF&-zVJHv$;Ggcy>i1Zrag*Z8; z1{%&sxcNwaU;yX~hm=H172+eYj3dR^)pa|U17tqq^cLL%vQ`^DH;3a#aq*xgH1E_a z@ipH6-hQqB4{XLtNiKQw3tVuu`xYwkGJ^`*H7A8e+$qmHw4x2v*}X$;Ud**8n3Wg9 zpD0To)T9rGm48@XvX1soPQ}Q}$WZ!-x#hxmlvk=s`#l6xzs8E{EOUo+!t?SA6gN^$ zu2kt4f2&YLmVa>{=h0M(`;ihQhaWspkgP$1yvPjoErC21Y!||hQ)161nI0u1E8pSIYdZ&ua_&+e@qfJa95KsSyD{?Nh^?-+|021bVx7HDM>Jg$ z(^9>7#^T$ZT{fRv0crINWiYE3KYS^`Q7f)L_PcHj=qwv$xDUD0Wl7RDY8TD&`&%Vp zP_0(nqz ztDOeygM>e^KNS-)8V{l#&CX|0?@M348UqTmm8lD<>zh!RW{3s`F=HbgYBRz}O-h@| z+Xh={b3D;qkDHolpXU?cZM$M!A>55^em@d%upigAF4_1MzRA??Z6Fm~{~f76OgStt z%YmR%QX&1RqD+TMCti-!_rJ7AoXOH>e}pJX#9G`v8z(;bNuI*{i?{sv8U@y;jrh{h z@o^MGUSUY*rO$S%imE&sERy^6j_ok#EPAh^`P6Uo{wL#%^@~)(aKU}^O(kQyc)ep% zt-=uF{27+v-SXB}%}U+*p#T=CCF}bYK;Y*bIRUmA1ui42O?#c_fCNUC7;#yi^fBlC zubk@Ph-G83os=EE2cc>QML%*WM+qfyv+KL%09K{icT#YUqJD)dhAS0!9GT6TK>_lqV zc>V$!l5fe8EE)KH1@SKcw#XSPWn%X^ZG_-ohQs%~`)Sq}I$2I6i4pb%aFzFGzPO20 z+R`p+UNs~^*paE`_n9Z#`ZY_s<1_H}VY}c#FJd-LW%OSZiK#u8Ku1Tq{a3>DPvvq@ zX*9(+kK5!>)rN8yXNla$8j)qLvn>7L2pfbTc?`s&(^dX_WfqDtuFmlNyLBq@D|cE{ zzgLvwGFYY)OvP$Js;RpGawHqEv;5=ar|y&OknZ_+AiwwE^_kLa6{#@mcmgagv9PNC z&2;733>5G$`!cWw`^a%$_RHP}+Jp9a?aJ4RIk=H--}oZisKuc!)A@lLJ*^)4+8h@4 zgaWy`lX|}su-)UbGSer!pumLcvt6CpKa*f2%;LX?9UP*%UBhAbJ!{?h8~CNf@&!H7 zR9j1qg0%GNBCrlaYP*q{vSnunK;BpnY!4L1Vt{(DKr_R!Z+Ot@CnaSi_fDVw5I;TV z0$wBtbVV2O%5LLt9_(dk=kRHx~iWtm4JAd5#hfcEVFquUL1*~+uB zunQzWZ~Q^)<`}MjVm8z}en+e*b0`vAi7;|g5yFiQ3j6@*^GX&nkh|&L*1lN^5ef zsj4RDd!ew$cF`YR5b+Mr6Yym>rFdL571!<#E7k9MEP^ z4S7Kzaeg2N#kMX|b(trcT`T1uFw-H>0r96Q3}C^V7_{{$dJ>MtH1kD(=*c%3V7b|C zH=0cccIH`3`OYOb%8~Gl+VndzU%d#q&@QmaZS5(3GaPU@fHl0Wwb&s_wo8p+j0V>a4GWSwb*lB&o|D}u1loTn@ z`7l<0C9lBTgdZ?NpgCjjE8Bn{RU;UNO*=Kc@Xe-j*!Aq5?nlst%0nlt$3E6J#B?vh zaKDN~g?+!H<2b-KNzrD`uEm%jqrWEZRG2ahla49OrS{|*Xs-x+a?`a#v!-{o|5jEB z3T&0Dafn=kO>k7be=GpnAUeThgtnn3)RskWdX;C?67&fd zR~0KN0(~`qX{Ot;4(4ZxcDWhi%MKdwFS_b+*r7$8mrU~q3xDC#-Lm!n5DuPFe0=-= zBqQQK$#5tfg9JaD-ESZmCZDk3ff(lVXY7`*;fBz?cqOSKfPQU}@VpaK=wbm8=k9)4 zlme^CcGbx?9iiVH4AS+uFa*v%yTiY8iiqHq`8Gi!h-B3CV58fDA55cbX*z%l}5b#l8)7zP)to^GJUVdT*t?_qv%>y>Ugs z<9~oSBkbDgKq*Y^wrA(Rnrzf#rUkdRq{HLwp~~^ALwB<4e*b!~{WY9K|8xEk@$|x1 z;y=^vZQ-nYeran=k~M36GXjzx4*ff!osD)j6LF1jc(Mk-Q0{6y6wvO^JS6Vl61rLl zHYZYnoLV@knF%%3OX-l@0C#bCmwou1K5s<&6NI$M3pOrk#fggu9xOY*ac^c|XvExT zC{rSK%{vU_O~=c`f9S>^k*NXiH9Z_$@?@@nwk0+cF7c5ep5}79{a)f-*on6mUdd8@ z$TpO?{a)O7MOZcYAZ%*puE#9qe5MMNf9TksJk-G)BMhHTo@1K>Rde3%-9^|W>w5YM zegJA|_kO*Do+p*$%8aeq68~AS9OE5}2`X`K3i`CGW@u)>-NN+R2 zdLWXBJa}AxepC_F3zqcw%gF&rVv-E@qsZv+A0FUXZ<}^syLAqD{e~!y(sF^Pr`t2_ z9~Hn$wcrv~!w&Wke-&g72FCkyICtgYZ7UO-D z=>CIt*Z`;%T`~BGhD^`l=O`Wp2(h8)t^if)`8?2!W3O2*6K<_hNo~{CdbAG$5kLHO z>!rHkM%qIk8b-OSJu(M|+FISBDVl|~0K)S(ZB-BaZ}E8E6r7VI6S}oY6`T|ARw1!# z0-ve&n!-PXMIjqsUI^3xwqKgNpZ4J-WU_y|KL@krd|c-;QQ=Qb|M%k}t>-W;S`)wN^yuFwD^G&8`=iA{0 z_RNhxAi6Y|L@_v3Y8f8alS_0-1+EYS=5RSgpaz>9fOYKf=U?q$K8&IaKdeH|iFt=w zBVP9z`#>oAb)~mNHjU%~?rACjs1L%&qlTVWj^;sBf4)#;)VAlQ)(VPKbNZ|XTx&?& zdOp4k%bp8zi_hWr3QHQ_x!i=u>oqf9IY%Y%-yn0OK{$R5n*p_e`GY4;gw#Ii=BaRn zam0Z87{K(|W1OL#>bVv$bjgJv4CeD*I0}3YT$N@c@|;mPgq~~dIrEcFAvqW;i0A~Vq+;d1ZQpMUoGk@!&u`*UnB3MW!%mm zRQ8@ZJ3m@qg(?zVTrIW1@st2+_2!8DcCmC(6WJ%nX(aQvGU?215EMxY4|xjVzB(S(w%bGc3eb%q2v(I{4P19OsH;hIc>#2iVv1?Vn+XQ#loH{LfbtRBQ>D z(f(;Xfi+ETV*8lyjPtNk6SF8+!gLcl0NV(nQo zn~4;h^~-}4bVvHxgEO#?CKlLdvd%Ifef8!w)9HANc0~=aOfJ*z?0B4Cpt>7wOC#qF$=ioZaw=S~784V%x2T3?ijsG)K89UpD>gzU)k>-#(y!Z<&5ud%dMuaW93G zGZ<^OPKC~^sVhDOTo~LH#P0D1Vnv1G;nHRQ7HEcVT80u(2IZo{>$U{R5A4VYiHG*< zMc`qb_p%WU@5oq!)}Eeo?%k7Ee#j`QHnYb_#Zz_9G9vZvX7mmZGZ5M&P(h>RxWwfx z4PH`K6G9v-9ES#@ytjIf>rD)*Gk3lopte4+Diq$hbK(p0d=46Ga$5!_XQK|@48aPO5Lb1NU{Jw62o@RS4&(t1WwX^6%_uQ2sVC~dr#A$3FG@| zblOemJ@S6*#aH6mT5AMFnW{dGMQP9y*h@P2c0d8P0`3MB;7;KIX4uq=9gc=hzK4Y` zja5HQAtb(!Cb@}0uOS+vRN@S7lLKP2gYz*Ati&E)H?2h?H!Z4HgJgmqDg;fh*1dpm z{V;S<_NCC{zmIuv2&1LA)M6j>RoTSDcIkvbHbUcsHUG;8TCixQqRP-u6$?CNBJ6!2 zj<&heaFRe;s7;>_4O4~vOho9DkOrvyO!e|l|zWh6RD{8`&B zHOiMixjdDEtM&tX4;Durae!X*udKj2iX}{#xSkEc#*utG$WRW}hAZTPE%mzC(VV3z zedE8A)gy)f8oXZ~!$jZSVU6iHXC?+* z%3&`vQQI4k9M}5nhPRENi%{dYa~qQO8{p%8bD+HGqXO2b0%c|jlWND|DpF&*>YTqc ze11OxW&K^2oMJHG!89aSA#4y0yGQLI>-Vi!o_6TgZU#sfW`w>g9P%!nY#Vfaa*>Ox zn}*z26HZB7^?{_g3Z<_6BDPOQ%jNcci>( z|M=3cpk=hPSR<{ALEWnBf!`a{DN7I(BLH8syW}W+U*4#%M{;A?CYO!eErce^Rmx}7 zHxL`|nr_*xQL#JLbicPp%!+;d@5gv_v9qA~bjQMWnuK9rHlzv8G{7`tZCg zKP+beE8d%BTH=C*jI#|Anbd|~^O}QnF|!{#$U7kUolhaGG(Lm_F-U|YzKW=rLta3) zMjR^W1$T?LI%b!hje6`rv*D0*W{KF(q?3 zLKIc$qrgl}<>aCqkabK?AQ$n5iLO=^*oP<#e;|%9fW;pXe0J#vr3GWSo9mr3$cU;O*A?Wl&<3M+j^wZPdeFbHm{a0 zl$EFCK}0TaLY&O=W*^=5W*ggXw|Cp+G);8o?b{7^ehCF#KqZ(Yo=$R;C5VYyH!&*Q zDUw`yy#=yP>gGefyL@%cp7-K)2q^63g;>oWFq++^ zf?J{$Qret(raTj6n;954uPk{_=%#;<>{@=q!wtgnST*7_mrh=Z@C3(BscuKVMw`u% z7`*WfW1vnG8RcyC*)BD)QJL-)nwy&W@9;v->PzqG^?4`I-Zpj>&W3lfa$B8e4|WHS zt{nXk3n_U6m=Z69b}r^t+hx*kH8yKU@lDmCZJaYP{QG3{SKks{AU}6U+xigIy`TS} z!O7QQK6Jc3!mjaQxW-IbMC8nzXoIqyVv7T>OrQ(T7$@1 zes9_n%LaR>8?#o5$XIq(c?BVAF zI>c%6UjuT#X-vGlU{a-G^~E7ZiFC7qZa~AooVkbGri@B6z-m0$13UfHIzOyD7P6{(g)Yf8F0C~8Dszg z`YO$yd78~?;XRV!V(zvpn`kQ41I3NfRK@qwe-ra?X!rp3--$y6$JJi;H>_mH0Kf&W_7fvI+-hcnd$y3IlVDG+7wV$}H zw-@anwCAQ1b?saDE}w`qOFW7sjBo~c8B%K8qaQ|r!9Th>TRyMY%Ne4Z{FHR@gx3=; z`dnD>l{{q}IMRkqE zHlsl@7lW`UPN`nCEtvcxs)23N+?sF31{XxBQ3+Oa4?^7V`dFKlUb@X_L!xxz%`P6T zQ=j|4Xq@6Lgi~EyxevAXIqEeC$0Nooy`kykOkb6Q{~BI$TbE!<<@D6fj>aHj!skmt znO2IAuKK77+9c7vVVIRtT+C=4_|&0P0U4FKcta)W-hE61jABziK%O#XuJ@+N3hzajWH~@2 zSg2^n8lrC@T|B~$=JBhk|5$rG0!%;u70uI=bD5V30-|?Upx}gdugc>eX>_sbE()^i zss_vsk7bFi#`jUWV-s!Bn3Lf{TFQVD`Uk)h$(ov6eRER|w>x-H6i&BSl<++J(|YSd z_;}>qEsMtAj#$r{Dm3f{)*g4!egc}O@i;B7%)@SOj+#cmWM~yHFYhd8^0ESD!|w%J zQ*;R1mNG(&J9|$nw)bPL*|8pC=+m}i-PSepTyA?O8sM%R`e*Yh@G1DBXYi`EPkwjy z{_w$L#|xcXy!{ByHj;bPK5v_lqKzF5k^Nay@qNuk%|ocZEZOAvM{gy@!_Ry9Bw-00 zvE(oPH`nadZxw6T2g7??WSnDp{t*F)2+gJpK8qFPKH!Qq*eU!v**oA<*F_F^#993P zb`tD&zQOu@uHYZp0s%fDbp78{Ep2lHNcdP%^az9My7Z z!rgm#E_3EIGbnhmlh_d~cP~iDJ>1NaJLG(LBrsfc7$fxET;O0NS z^QL3)x-LYndr{y$ZZ5?)#^L7g5Bj{mfu5D@cn@FCKFe4?hVn^VmAhTdb2ju@?fcM5 zt5c>MnFc^d99zr1{QR%AU0u7&d~Me zou+c?7DRLv&}6giUEqCyeE{GFQ{8UD{Pb_0?uz(kSgFG?~g7?vBI1)eJ6g@odh}|&@@$~HWBs1YeZ~OJM5a4;}<^9&+Xgpm2t$1EbN2`c* z^>00psE`;!6hcR$Y8V;PNz9Lc+hqTJ(=#X{zH3x(+<*AwKZ9mAeL?s{)0Mc55syxY za=eJV(adx8HDrs>${@Up5Y9w=`95NZ2*HWZFs)T?_VBxD262V~(1kBZV9JxbXDaz8LYZ1?EQtecNoE=u`V_D)iFNgx&G- z>FpP2QG(30DCh$;G(c_ea42&if_DooJ~CE-Xbm$RyclnnbMKL$hVsF;1<{H{d@)(T zvzqH+$dwQ@tnt!fW%PDNDG1P0z6%RywOp!(qm>(kx9l%G+SsUncf>PV+fO2dxT28o z4$a3&X47XX=o@%7-cCj+Q1mrsXn5y6mbkcckZXog9H}i^#(uwRwxICH-h&ge`&m{1 zGA=b)g#U4@+d2Q{*G;4i;Xuii;i}~QPssj40ckiiddr8>EhU>O?-?ca!%j0AKS3S* zVSg5$HpoNX`bp`Nzk{N`PGMRHgGoF3Wc04{3r5eklVHlv#xE{|5$Ihe=#pCBdGFtQ zxZ1$osNwupj0o-p4InmQ>0_N(VOp7SYl^d_C7`<_cJ-5&4t{8niD+Hy{4++tLc=A| zbTe`B-#wRRn>IH9 zs+kII)VgfFa>eN}y8$iQf4rz(g}4|)<9&sFG*s@p1J=!FW6-0pqrQJbYO!>C z$PsQmKl2p6^{QZU+cM=2iY0#@%sTa7m$|Mm18L?!n>fd?0=8nWf0{Xt95R03vA8dB z;%e<+GE~*yh-g;$*qQV8Z-v>YS}8M#2{pN;_cGkSzs5G8q{YQ zwV}su)|Aed%xiKS{xrh4#r@D#5LY z@sX?G$S>l=0pKp_9`QWI;kvaeBgMwADB=!=AZL0$X z=mJ=}@&Kmt8bLTsrRo&Mx{oVhEwfBx| zvfI{0lh8uZL^`4*AWcC+Q9y-I6h0IbD@Bk{MU;+GB#_WMK~aiS1wlTME>%J&bX1gL zkQRza4J8Bu;l8-m+Gp*1_TImHe)s$tc-tIvj4{U?&ojoXY`KYJXq8skeCYrFw&s)k z@ey7GS0r3$G-Hhwia7{jX_p4E#byc*E#d3Ctsq#EY*|3N%#`pViZ${u^vGhG9INf= zyHIs)=ZWb6!sKa5^sHxlT`);Apr4Yuo(x;w^)Y8PNQE3XwX$^)hU7xC9_37khJ#Ak zAU+D{^u23UBc#vUW%#lBQt7l!5%aVq&V;bAa|Is?fph7o8ETyqKFhc(fzx|g&xJnC z-|}3_0GOELd(|3@<&!ncrK{&2utME`iJds_?r;L!8Zq_5pX^CLuJ8Nl&UAQ{d`H#T zlN(kW?WhjI-j3SL7^t8@S& zDXmoC5qwAre)YQSnkQf4(SSbYtyTV)veq7=*Z%h16e!uyWK0oL^ikqJSbK=zp`CP# z3E%2HY~*anm#sxP*2P;5+90352HZyI)Xie{lwrlQ(EWHQ{4*qhVwc~D%`0;_E^D$nP9a(3H?REG3S~r^VdmrM? z)}M0Q9zFpuEq7jENup?(aaxj0EVQ8WBpHfhqkbKV+`smY@d2NA!sly?PkmOlcw&93gw`v%U6mHfx%di}lvM~>h@O#%sViFCgy z(dpe%q0XL_`c5M&LZ^P!n4A7kHA4|dkskNIiZ3aKE0g}{?uux?_-%+J{tdP$E_fvx zj;suEGay&k3`D{$-~9Cq=R_2D%$J_wJYgXA@Y{6r&Z~3_f$c(%{W0U7POX5gq4u<%m$eiG$748zzhRSBWEGwcmt91*^Yv4WEYOzWYBAT+EtH#y{ouLn!Nz zgtjhI#Ef*{PVDd~b}KaPgDMhX+ry8Pn=OA^zV^+Z2}{*j*PYL-F-eL8HL1Y*MX+2952+^VIAIH>YbDc1SmmJszdkq$B?LHj;v$fbyzQf2%C(nXg`OCu&x@U|0 zoWtnEo9)F;6N|i_`FOImU(po|Du?7jz;u53IIZc=gtE8#g> zN?TK61;n@nkBR?cXqANSziH=GeWA@IBMV^|H{H5@_-3Q$IJcdmPUDh5h%VN?fObL= zAQ*5Z3{&RLp~ZPTcZW@(C&4#AV_lLs3NZ%w1S-{*PWOJi>!)}H-CBJWU(Fo6z<$=8 z8MD=qBZHA$C&5y6841-eJIYY$!OCr|bA!KLNcpIe_tagd^uQpE&Ak1~)7=fIM=G!@ zK0o<;X}I~8ch+({JVEGUyn|6tPdn2#K}K7*9aueDmc292VCC6uryOuf!`~sHjoYW# zUxGNT;lvxHWQBLl9RjS*1&5PqLb?hVkngjbcEpqGy_NOH{~kBiE`cHwMyAV~oSq2f8@Gs52$-6y}1cC6N%a?R+Yaz6Rm|-`wN*mz7dtrU1 zNZtK)(oSZTO_Rq5RN0YS5>G3Or`@~g89 z%@}eqZzwDD5*MwC7fEjVvrL5WmyB*T%gRD;V&_>!b;Z$T%52V3+GV{%_x&1ofE_J6 zgA1&5pF3eXWacl2-}PFjgsjfq=U(pRGwp5%Acu)7*SA`A7|uMb1`E^Wx!$BIgRPe0 zGl^qrM&u_SKqqfDnn3I?xN{38MfTUT$yI8OV0eI0hg`u=liyJcg>KtSb3rO^M;J1= z#gqFvMga6h+ntTIxe*;=6RMvR^bnC8jok~o&Y6I9Cq@IP4@5ZB?Hlzi_uq=1Y&wm@raWD%PWx4EUKws(Q#b3j z^apcSN!(mjb_*_Bm71dEgCSQNIiKz?aE5&3$*!K`Xt-B`ypE2IQ*PgrVs$_1m`bf$ zV2$JmS*0>b)^MFhqOBqnlM3-hf~Wq&yn(@RFbGYu+i;31c&c1L#bg%smhV;L8r=f|i%A{43s#Pe)*No&lO_fri}&M{obnumW|HZKy))2#NJUOi0E z5z)zRC)!>QH)d${u@9F+dXoe=>Q%|SqUh?SPEK`J=M~!46+BN;_ArizeUh?AXhH_# z1KI#j@DG(cgs$#g!*9xgpC6(LKx}!q67H=39MMXQ7+)Rl{?^&@Rr9R$Blh<)q+DL) zknlTDjmhtPx98L?9lXX@|AG^}ZsPA*JG_CV5HV`?A0~xXpxIs5hra_7bUdw&J4b)q z;9e2txrJ9OV>JM>?R!$8fF_1)X|abuA%9kYf9eK(;=mmgvl^%Z?lV%!82{L`2WMhr zVH{}Ej@!g^4&=L*tn8a<0$3?DVbgBNOgr)&D3$&e!RMr5J09#h7x~Eo*?F*7M{;-1 zymc_gn2%Gbpjgk{LtP}mQ)`~oZZvHNv?JX7dYK!7J&F*sC(I)W)nMuhM;7OTzR>K0 zjzB8s&?^##cmO;Z#pr(a)~k)9O`RJ~hI{ufmYeDp*@Md-vUY@XCGc@9UAXx$zi5RsVW;JU0Fp-E-ubJs zS}XKG?MndBHXn@11#;=ab3wOXcv;U^y?H|A<>fU0d!7+IHnz6So)u)5I{SSdXwZzq zC!V}2dz>X!xt&B>db;{{A;gsO*e(ngPvHIx*FQc!{nmq_I9o;ydn>r-3+LCS5>|eE zaERIIDcmzk_U#Yj4(v!&az?vqwU>}pLg5H(Eq?X7mHY?>DvoCN9iO$$IG9a!<80d{%z#)#yvHefHc!ny$j((O%kEt&O3;-pDHnV=E)=$Z6mX!IJa-u*R)ngg@oOvteG@2 z;fQEfd-VWS>H4u>(?xBUkF>VulVFLm=oqI9h!g@Ld94~tKSrKh$#Ctz5wEofQwL_? zc4W1NVVvLmH%I$&T17_-A~D8o+v?yD>!ki;Wo{Q6p0gS33l>s*Pf`LI;q;Njq0SAI zC|G$tvUjWAPt^(_5m^(%feda-dUA*jA>zds`hZe#n+72Q9v#oo>u4_Ht}EN6)JgfIQJotT&0G!{cs1z4|v>NMJaf|I7*fH*2QHrn4YTQ%;_Y|P? zh(6un5FV8^675be?>GH>u&)GG4UI3HIkbs&@Oat?u5rJt9a8_bo=gy`Xmw=lETyKolUwCR!^| zy^+{hhV^17dEdkG%u{n3T3cIWk7n&zbO~7k$}8v42_QB-B3;T$oTA_9Ek37^_YUuh zPT6^QoBYUg4~ITWE$-xNptIc6^2#`K$i2P6YgfBgvEgPm>@~Tn8S`BTsj{-490i*H z=}}@TQ41Kg)c< zN9>+n(AmCes$D%{^UgzDfSdm+9Hf{9w|?i;)XS_!I$9r3S869RitzSr5mQ2BPn~1U z?#bnvI2;k{Mb6pZJ44r`gF(Cm2XG%rG_4x2(Wz8E;-;vrj7<+H!+ zr@^M$0KqN%t#oWp>$4Q;^f#Pbm)Qet(EB`RufF@2CMt<`ilpA&mz2ya<^P8o8d0V0 zo#|GqLfBQck?=NAE}J&962fQDKmZ#}*i&g}nPaI!)4NZ?);Itq5jM=kx9Quk5q<0A-*=hHXGR0;6E%I4E zP#+T`-ZBm0$hn@M+lSMaSzm-RTRt*0R|L2uxBZTv!yc|OYT7X+u{pIFN- zTjIq3u=K7APkD>DZ*!T1-yNYRoUvAQ%AS}z+-P9b29n7ZZVYcDQI*4gW{QW>%L*5L zq10{srnqGxtiL-<7Z31B7`8dFl(xLis6=C^`bFD*36Y{e-TsTV=9~!!mfh(5b*qhJ zz(d}q+k!*|5VoNsm7!X$TKdhYZEZ^=fCzP?&9u5fGW}@`>k1AUMrk>(+)A@o#Mn*@ zSiFelhlY^{CfB#;e1!|>>SC}GO!d!8D^$~AIpoL7PD2WpN!rp4vV?{OMCal{=IP|X zr&*8Vex? z7eqb{|3;0PdgnG+mk!oRz!WjiiQ9-l#1>A!S%fbJRu{M{6~2VUD&b$w03@z(fVmYd z_~M}hMf4K<1)H-O_87`;cu7Pz64l)m9|6yToziyRr$J-kf4EkP1vZ?)sCZx*yg3n*hK=x zCU=VBtI@%SGz37A@8a4DcfrbgX<7=D;s!lwjK}rbzls{(uo=v?5;IrTRR;Ma^zrh? zF37)cV6FfZF*nwZXs4F87r%x%!O9iXiFpJWuP3Fge8AKE!Pl+;0d=Fk z0A`Z;+}#AyO8C#;%!^Dx2#VgyyPjEMs$>5nMy2~a6%MnxU9f+i# z3|A)j9FF|4j2ht>3#vgHp0xz~-RFj9HE38L-?Z8D*msmnS1P5KERZm^J-1j(%CJBH ztT-E`_a*q|2WM=HAgdDoPz-_M@K4J(AECmh)?f1?M*s~g<1FQ|&Y8LxTk?kQUGeGf z7_pMW&bO%o%P3AqDeYa^(acs7Q(a8|6gr+1q5)POPnnNO*m=>hF{Fpyms@MhK)r1L zGS}7qOw?iXmw)=2@41F50F?`qdcZgLjDUuTY`flx?&)erJ0Mq?NnPz zE?>+kFdwfSqEtJ!gvkn2cGSAuh&@ki-jkXv1PBXZCf9MxOfDQzXZ*1J)s$w*y(o>w*J=y{G08XFOjIZiuV8C zG#$7vIJBG>8OZNyJY|PF1{}|TAW?KOL8)iEN7^#s=9zVp%6kq@fs3wwyN<5BHB0Uo z{07D#(#P)D03;x5yN*L)H0vZDG@|bPxuWf?K3)Tp$Ncp@Z}^;JR%$7!rKi;CKyLyu zj{CsoBoqu-p2q1_1=Ctl4@Rb}cd% zfbFaxOX+{of04@5Dq^}=^gB=xPEbe*4?M%bl^7}viH=t06ejP`)hCsKr#;Y>7ad1p z7iHK1TGeg5ETYO3niCJ()5Dpf{|20Ljv(;rAJfL2)a=W+B`rUD>it5o9@xweGl2cM#T1DKDj zoOVf;L;FQ@@Xe%RV5u$nKtQpVC}W2!Ux0LughHTfLPD*U*4sp%ac1lrIapVOZm;R* zFfKGUT+LSKEP%zaWy(})pyN@4XV4YzW8hk2mOos4>^}}HX=(`C<(-(YQDG$Amf7CW z)9H)hq>4Ph-Js&ZVJEqunAKlQfR(s8p6(dByI9XAh%&iJuRaH6d0_nUTTC)tTqDK2 zc|TnQwishKxpEQ9BOI?%`KV{`W+#&M)2B{i#`nwKI^pAa?!edKd*xO=+CNk}>qNLs&hW%cu zymsubyNVE2SeDxuRoCzL_wh%k>Wb5vtk-5^of<|e1GeQeU)#vbzwy3UACx=^&;`7j zAUHSY4c-$;X=&;nX<@l7Dci91)3a(m#3yr>J_?T)VwCsT_R1H1ISP?J*cnI+xcx%= zvkOIU7P(&Sb8*8TpHDCS9$aJc-h~BiN{@cvOv&6lB(mPy*LPQJ%ULDb6Z878wdUf* zDsmk0cd$wi964-7xrv^&?bF%bFcD)zb5v-)G_)g;I~b1gw$A0Ijk<88NOmOKb&dBR z?Xr$j!=+-xkcltB>O&gX)g~upjy>ajU0%D>Mb@mQ9up z^v~=MxCJ!Q;TN^v%*$j7lYiAu-sa^ypF;9Mb?($|oB&@}ddO+n_Prd0Q@i})K z*ZsIdgX$FPiZ$mmnxuc$>a9IkS$Qp8jaeJl`nHbc2&C+5ejfWF0#319XA+{;@3c{%@+Dqc+sChX>@jGEN z$8@Xo$%R1`ST*D;I>2DIe^l0Zl%_GyB&kT06FS?Q0UR&(-TNnr=WV3zo=h{%~RTG7Z4)>v<;EQ~483~^NNd?SJ)-v7-ZfyQS4f$^^ zAB%C;>;+J5oT$*nAqsuNO$~juC#02Q;vwY6?JA!DC7JBNv*^thMk*Tu;kF>Ql`%n~ z^FJ*d-8hm6T6lX_9TA1TM%kQm#Qn**T=W8IrHSPhl^p21QnXQb#(w=m6NjaHz`%Svws+&c6 zmbcC}(LB0$3DSi;VH+H{)RPT(^JQ``QjrHSZ0^VCO%RW;7DEmF4zsjcf8bwfb}OXP`*ekQql- zo!xoZkE#*lLFU@d0H4ZU|HX8({^O6hHfQ36HVeiXYa;k3=aNr8*6c&+{kZj-v9z26 z)6j}_(h+4*$fKr?C1*O&`v7czQy{q5+Efc|HPgH<#ziaGb#3aGxER!5=KFZQU5y9e zjXaFvRv3O_&6p4zzq>(mlhQRfAd93=HVO!`F(%%S*}SX-(L)Wdm=+C&G|o}97CI2n z9!a{qK-MG9WPiguk^LMqB&DtN7hP=6S^K&kl)1mY9L)DJFh2B4!{ct^5#?*Og|0tT zj&WU$O@9|KVISsVxlfKyQ*cS6ftY4R^2Cf-<8zyC&dJgJaVk-q>0GqAYq~sR`S)6Y z*@;hE6P#?>`k_Tti`Pq~%ry&)^JIiJ6)-A3>}kw;*6it#yIoXTo6?)8RRg5rOIWV> z#K|@50Dfs#Egj$Lb02*|IGACLHm9G>aLmlZ_OjN#rKZ_;LgQa1+LZ+ft?6J>8j`Oa zPiVUpk^5vVRbH3Y+1?qjwo#B~zpHgTxo@N)>1BbF`77tHQ?taENrlCg4h~Pwg?;~6 z*|xJ2&E{GKK`Lwv$_t2^H*Tprz4OY-An5M3eUn${+7PN5&uzbG{V503UimZ$L#lwO zo99#N7kwMOPrcHGuuf2ykIyah8Vu@uB6_5=&9-~%k-a`|7bk$e1dD_z7pXr0_pv?A z1dxw1dYSJwLtZC=qyFdn$hcR=r{AmhvhcD!4^LaHXL&!~q{i^or#AiackWO3+okhJ zKul#u_wF}1YzsHhArqmE6S=ujoSi?U^E7UW$#@7{zQ*Moa;b;YUG&?p^*5HOSo2Sz zuiKK_{u(bwW#$GnN(NC<)(%zAPL3*}nV%%Ht}U7gCFTH!&tHDw2kjvRWCcA@6m(W< z)-R>g{r2tXrB)+U5;Ju?Vw6Hc%C$u3!WQy=bPKyC&Hq#kLHYMfMpo$63fXTCAf6Xz z1{PF|(KY7BN-?U(!OFOugq{pyA6J579^JsT>AaVACblKU;de34T-8D-_nhMouZLFW zW8TTs=84pmRn|P`3@l=4W&x^@?i{ZF`i#x;8EG`4reMG2jQM&NbEzkBbjWU}zmhp0 ze|A|*>>X@&a__ozy4~WW+jO##SPFA#5w>z+eP<#AfyF8sV*$1U3L}WJ0mNPbYFfJH zr;F?!#z4&hiR=cL>wubWwd(ig&&plDs11I^ecw3Z$NilVIBQeUG{6VwAz3S6*}=go zIu5K*225NS14`!+PRD~Kh0{4t-2w?*y@r?G8@=?-L=lbn^q+)0fo%+R;X^anE!W+b zg%LQb=O{zGFZnCWEQQ$LcwXH*Qptfo1_r!=s6#?9=4k8UTXeST;V5JL1KIlhqLK1V z+N+M8=N;Sq9f^otbHAy+WW%jP{mC1+yZ`kKI@YFya^)b%ZDPl_8Dv3+<$=tC9?xJ>2F=E3BF!0Fb` z&ps8I>coDK@0y+4zfEN3;iM^!#Cx-D4f@j^DQsd_+z<^#TkD@Pr}k1ok%jqz%jYDr zu!&A|w<`g^FQ;NM6Qj08Qr#w>a4ei`Y69RwS~Ux>S%LwG#T7g&fN#XUGVV3a4o|RX ztj)`cy8`@%>5r=*LTXmcrL=hg3kfO(PTb~`Aieq5#O2HaWCbB(cl5O2iy}XDpICwx zs&kGHIp*C1TK5=tv_3%Nb9gX7$=uxOC0Ji^n!rP$nNHZa6ei%iq5UMid=Z7|d=^ml zbi;jR&Tnfi?Q7`@O;d*c1hu}BV|tm8=5i`!qbA2^{jiEf5(t$59iRu#wdVQE1C4@KQ_m8p^f8@@w9fyR7jOBahkBcleN89w_Q8qWzs z)bdZpICCQH$I7!G1q=TD*kfAAB|9~03KbRGuZQ4ts!kFYNp6 z_`lod`KmP{w664m8@FZb`l4rO@d9^94-CK*k`s%0kz)U&EQ$fW14h0pJbg{^zZv-{ zu&zl$Kj6@WKr0s*G;ve>=jxJ`;If)Kg`k2oi0_jf8*>Kz=i2lF#`RK1+}HCS26stY zL{Jm4ity7}uVd%g7`I@utCqYFZs+MapM~yA%(Xp(d{LG7wK(5uCp+rn`RD-gM)<=LTG@qPp$L66Q33=0j3N1SJCVV0FcV= zl><)!q%%$}_8LB;1L)Lq8!}ND-Gt=K+R|jNp_VQw5nq68h#>lKD5ww6&ycN*00xlc z5GJ>G=S?^~?HyT{NQ8CRH9XN*Anxdy;426eLHSSF?ki)#VRGj_@C}(Og;{g~xhBWf zj;m|mmQYDlSf*gApMlAO&Nh0K-(*ANOh|=q$7H zDFTLSNd#4YuvIi@o(mx8xMH3CW8I25;aOTjyP75M+cBU|D~sd^9oCNElb;-AvR>SG zv7FwWo0+T&yd-U0uPtai`e}|08rbw`e&b`A@7j5W26s{TH{<%-eGS{BOJX~@6*7I@ zykqxWe^{5Eb!2NBwf)J(D}aleZ&A+No`BvNAhRv zYgJu5bDXk9@-Hnj`1}?#5y>RCfT9ZHpOQcMhH|+W^$^QW^Xhf;vK?;J^N*(E5<-|w z=k2cJT9Rw2TB;nC(ka}928wT3f>|V5t=N*2HH4tr3;k z_;1Wa=wPC5tSy}dH#d~EFa!R>DYRHa?5%#Y$8GD)rySKulD-B?>dcfTZVhti;pe$5Bu5WNp?qU2LfHML20%#aD(u5iOy_?xP^AmFlZDI`{TWj~YYh5&<1-#xA8 zsX1y_7_wstR@Xy^vl^c?jtjN^O_8DXE>^R>x2g1-9(*+ONNXSFux30ap}dEeH`YA4 zRFhX>vGCQ>YB}bxC1=yP8IaWlRvq1cxN1dyu=0fqX}l~K#1GcXe|J!L$e>_P=3xP# zt~^&B!HCvz4H5%{3$YCc74-L9Iknv`X-(+p2x(6q$ppJAM>Z9m!vFW02ISm`0Nnry z1Pp%^4G${C66uh4-yNT}ty&V?b_(+K?&n`p2EtK9cG=X`OTgA1eR`&B{olnYN1HJ(oU zt*xf*J38&oFfQd0<>fw-9K6?pjf{aka^FTDBj&oz554p)axHZI6uyu{6Gf?OG&z zc?kM9{Pq{3Gh$d3HYkDO2{-DDR;&p%Jn@gA3u0n57 zZmEJF-@R9NDY%z~E5y5e0OP4V8QPoj>5S7*2CE8kTBUvXN0|mN1sQxJW zv?6YO_udU*qm2WX4(=R;Dk)I3;thy&9r&VWM ztL1MXh`Vo;6i>Lk5Zg4bbM8o(|wJmH#5-(8iiR<*_5W~GPDM0$H}_G^RoG=uVJ55Z$Rh2^ym$L&tZ$;nOE zJ64y04}q_qo^P zi+Se zs^j;PnjvF4iQYI)2GYn7`x!bhGrQE(+~~K z_XFwE+|%oBJ0#`(w++f1Ev-T4Y+bIJsF~!J*GL$C{rIcU$KlBbIl1@8W8w~-K?i24 zpH{F10mXYPh$0MPRdd$h+Y$A1;e56!{n5~b>$oOI1>aqdeQxpAIS9q!WBCu%>>b7G z8tE5a|CrV2R*u+H4G);p^!DqcLP3H<=56ZShuQm>TU)*SQ@>5QJ;UUMg zXOd^AL(Gc>2waUkA|Um#TR_lbg5^r&pAihSh&pbvZp5O)vxVE~177!Z*0g#sz}m!B`%7BLth!77&g2 z?^5ZGFQQ=HHEZ1|7EqG*>G#5pO-+sGtCc|*q8zI^)>j`w_B=3{7n8TflhaVFJWu^s z-YQ{U_gtq)zP4^^f>Pgd&9v>Cq$}{BV?O(La83qE;lrnY)Y4ykPecPe> zJ$DvU{PpDPmK0a`t3Pl*z%)4l;QP;S`}6V+*mpDfchNkFsL;PPQCf%}$6Z3m(CT_! z*%pog3u3fw@m4att34n4Y2WX&s`e3ky~y%?&6dE5Se}B;`;I2|Pj0F*XRBs*f9~Jy z6We2WF3V(|HY8=)xkcSQ_oMD;=~#n=N5|OF8MKBKzwhv;CtoLSdkRP%L7viG1)}aJ zv@GlY8C2H`&^~g;|zf*ZIsR zKEnnr!Z&Ii64T8l%^ooEPOng9uqNoHO9dEcX*(qaMN5l1VQ?=fsst z4);jw(E$dAEnW!7NPC z8z48%gg1>|$DhiX8e2PSneBeArHN#8muy5O$g?sG_4j*@!o;)t8{d!+NDGQ)Ofq98 zQv??!5C@2TNd)}7O^C(p2AXz*6<`4zvTVGmYh60YQWy)O25kob&>ZVx&Oy5lTRR(o z^%sE1lzb7<%&A`5{Vcl+qI!y0|6;WpQu%0`w@(?S1pAsvv=d@+Z_X^B6tDUk*!J)~ z2JlEzymL!lA=#p=Bf^2{gS$!e{LSV`n?6lDb!p*Oc`USM5SkDoQLL_* z0BZ6=dfLk$5$9O^U=o)#hRPTiKNV9PSejnHS+mwy+90FS;9)eqfZ#LFc=mFJtw2r4 z;i=8t`^gUl-(Nma0Cc*!B03hFDXKzVov?>Wlty9L#<^IkPw{6UH z^8)eKtns+SqrYee9hGomYNr*Z@N?+d72(9U13)-Y`}Dh4t{>TOrt}2PnCLkK>sAXS zLkZkjys2opv^dVx>Wvphef^!GN01+9{6fNw)E?a*|B~>P?>ZbIXaQ;UVPmz0RNX3V zgaTv(F#{le2go%Jlmn#s|K5Nfup0EQI$!+xjgK!exkvTte4mEXQF?mE3B=27t-u^3 zW>>>=8LR!DYa1Ek(uqDAf8AfMzI?xbRC04bZN$e5v@X+DKgUR?L|O@ddBs+@J5dE$wvf8)*#mCiTdxeE=0yeTC%W*Z>CZ zTRfU2Mhpk^+>qo37!0bwme^jHAqyFP2auRb%pv%;a99Zu#3`;cdlF)HL#Gk_r?1ga zk`mCZVF#^oSr%q+v?LI6SX_xl*#Wq=K`*afcK}#H<5q@r2N3xqmUAvs04Xln9%za4 z`&W-7E<%L85sx7rKM*$C+Z%^|xhK#?^Bf}H1T|=k+SHvjbC{6J0_jXr0; z9kYKqlMd4opBvx0xE7~|BaH60AIV!oUg@idp$fSG6l;>OB-{1RO zn%tpAAP!G)=MxQyYq)yd`%H4NA!7wCvG$!1qp^AD**v#Cw&zOzNW!e`$`C4O`>kH; zBbs8v8MmbM&eLXqA_UOv7#~cd$8>IkZ}RsJz7co;7uvb*FILh2%d-8WE_A27c95_f zD8@?(E#VbV>iuhN3L(eGRW}Fm>_g7$5R5u5HbRI8Qw5w9L0 z=}YGg%YOW>PjT@maIvXLMrtE3RkBFr63tY~a%8l3^l(zzH}@n3f2oxGq9z~kf?xo1 zNEFzt|6d+p2*{}i)fHQya=-W3kl7hVnYx@Dw{d6tXik}xznOh|_I=j2Q9ky$t1`Y< z?2nkLC`8L^o&I{fN^4W@J)dUbRgo`BF3{xMvzxh(MF+l`rzZ}64X)q$$}uLpd<3a& z_gb7B$QHw<{kM|8&cPu5Id4GDiUUyII5#BtO>i?C5GMk^PJZr=h68RM<6L^+z9t|^ zjhxLKWWb$pPj71Si+U0J_~5PIZ9x$pZv|fP`LUmwZ5p zw}VW=345*C&drWmTnCMFf?h_sp(-40hzH$ISjjJ)fIdNh|A(Q@$ej^;uA~$RDlNUt zo+VSc`SGfgYVy^f@w(ryAwYJ#%v1{mP1wQSd+1f3x16G6o2$^<#(uOcVpCwGVrAl2 z$8Y_=YSYN2(s?DSh9hRGvG2eBZQSwu=G9umLy0O+fvlU}WPiVH^)I!Xr_>QogZ?%a z6c&FWfJ`B*+FBL~=mM;a3O6CP$`C!mpUy0-asBvqBYK(Ym0FC zi2uka4*bcS6wYKbH#c9nx0hluDy_YtT8NH?&i9sE*n-%0x}FX}W1lFkUHmayEVk+c zJI?s2zdz^2HeO|9F(~u)oSIbk`aszkEeC)f)!K`_N)N+xvD$G5=mb0$1jS~$4DZ>4 z-zZfx%@-%i$H<>ELK3;>H~Z5$K6}kIC{r};G~BgjnD560GWU&AhNWX#IPL*wcYG{= zi1{jXCsziWBZixhQ(>--Ah+L^fPx-0iV|-qKKimK7i;b-YvNI zh2I{YjVVm#t7W@@-+SzLG5ksCA?$Xwy0odhwI;OGwTdisT|8#etLIg{$EL~0R65Edu*yU3^rM2WV$WQ*7qA9;gNUD`@#$s;xjEEhCX|x9Nnr^5 zW`gsEO94Q=slRcD?{+|*>m=pLr1T{7PSh|{m#Q?RNakC>!>B<6Gu>%s{7>tmcCK8N z6?6CBvF&--o$=7P9l2CMZ6c6?J`T5<)c$79x1U)A=08sOC$~Z9yYO&!DKXrbU%u_I*!PJh>Ek{fsgBQ~l?MXhy#q5%2UsqTs>qecbPRj{Vw-CSru~;H>668TaV&niQ%M&fjpzQG)59Nru!?csF zbM3#zE<|ru82E6g39WKrBQ9lcJ+v(_)#a!Ah#`^RVAzMvoxz^Jh&7y0@`)SswW~|Z zF>>%1S|~Kgl&`UXTu1b^@bUceWTP&H_HB=gTBB6!Qs5%o&5=tXU*UQcneeWRH&Xhf znt5$!OpuD!D{MJ$T@klc;d@YiQ4XVE!9l0$*8Zog<=U==qWsS(>r(@p>O^1`T{u>4 z?#10X2OiQ2A&W66TwDn3(!7|OnkwCM>+J)72$9*mG+!C9oLH1IKJziZE~D-PIk0D* zR-Cb%^&{!R_e_QSy1jQ(x2j1kai>+$f9M^*i96~)>5nmB=CLJq?QGfAbFOvG-+}rn z#xZD-rA!s32Cv<;^Yd+dp|EvS|IrUgMpd!y^RJc$P3(&XN~@fk2r+Gy+t`{DD&gds>^MK3SVzFJ;7>UlH>1);^$@_4rr3VQOg_GAG|JN>bi%qReJ5! zdHntwCp5ZfZ08YnBqkP~^~y8ouh)7N7DjbZ_#nybg{eX5^e3hhgQq}_XUZl7$6j4} zk=WFB27RmO+lO0pTp3IutGlUm2PC#9O0HotKI3G@YpCT7pJ>woUDGVqoXM5z?(~V| z>K*J?t@ZrRp|2-$DY}WFY4$gdH0?XwEr}i(AoY;;N;Km@u}q&Wh3`q z#4V>6iWGV$^W4o#x;qx`UFX0S!*z&g`?|SV;FMWy)vZa!lIi8ApK%L5)n8w)>`hpd zrSymGDV#;P(6UMWn(@SK!VW7?Fglw5iDDy)SrJa^9im&Ub2Ig+i=wcutg~bHRyifY z)B0)6^q85hDLHxV>yngM4dx)nhnPkIRwegyBSLUx%xi@15i~P1z(WK5YssN>cFjSc z#9aINXL8|A)mZ`U8|bS>6NXWTF>fU3Tq|%c>H)M(p@Jnin`MBe5WOwujuKD1-<*gd zb4Z?*jO7+%D1J$}i_N;(U>lY#i*_VZ zmQIbz4kE`CqT5aW*dgtMewNTo*L3&PPbq3m`d!7^&%`&loZ)f}`zu*Hji5J|ic@+{ zZw8*_MxA+V{Jhpnix>fK5)jyR*>jbKm9osbX>`OVbJH$0(h_56tU38s?oFHR?-r6h z*T39Hn3GPzzO3KKa^q%MuL<}zWX#z^(o?@kf+RW66^7_?kYpfMo2uhI5^S{>8Rj`f zJ3(U$H45O@`Im&UuYS%0L?i%k0l>Bh>A848KtP2o*DU=eZO7%H(BC=G|Bw*pFqatZ zfLL_Cm39x-*xUzC&1eA? z`(cz5?+6mIt!mc(kPs+*Hb&<)-TQCNkt(2z1JQP3rhBTshw6dh(1qBGB2vG;ACVJ3 z?9MG@UF>;AurdlJh&;b<7fAAwA`9<$C{esAcY~U+IYpSR-oMfU?2qSbT zKlo~2tgtMY5bDOA&Z9oE1J78s+0a~XYfOWe^HSbz|MI7`xsZk*K|;4}uSdEy(pWdl zV_rD8PgC2nuVVqPxoF{%4-kHQqJcgLU*<-Kf-(e=0n{7Xc>dJ!d;k4c({5n2q!ba{ zhuc-bkacIT-OP6b4W_Q0!x&DKCZp>OG(axn9th(uT5B%3SA!kCbIh0Sb8% zXf>dp<%Qw!zKM6{L%ak%`)`#hIOO#@i|+?N>}=4stzrjIRQb=k(}P;w1lG1TC+GqA zT0H@X^)V#;o;UGv@FDT@lm4**ZOCL4=$g0Es`SJGhamt4WDK?suFC@j`KhzdkPY_| zf!bIUm0D^uPFz`MH2^Yol})#_s)Zj1&fOIUKw9$V`j>0#Y>j(LtgDkYKD#b)0qn7V zaB?=_`Hetv%D>8n6GEH*?NgepIn|d?p-uF!oZ_L~soBxN0LU0_4*FBWyb4GYD3q|q zC=DB8vChL)D-0c0XrRt}UZlnFzr9cgKk+c~EKNl553%A8ZVE6_6rqR}pt$$4uw{BU z0B@d&;2zr-=1Ia209CEAs0_GnbvozYvg)r%)+ub)0oinN0G9(0CQ$GjQr}7N0V1cx-mvjUR0Mr|7Zb3D?a{1}~s zs$?E7bhO?wojYa0QFa{z8XdaWhz^3eVI47o;Qxb>Rq{4n=XL)VGa^(h5xO*H0pxv*>6nyvX6wM&l7~D3BL@VCU zmq`CMVtsQE(3%+m+yTf6D39i$@gfju@S9k+4?tT1rNZt1r?>YGYpPq{1_@n?phBnu z;;Z0Gm#UC}A~wW;C>=r-5D*YW2mt~pDAfwSpoF5*n^Z-T&{f3HiK4V9p((@=NGM@e zQ0JWAne)wc%{AYQe?-K+*Lv2y*3RDd{oK#mh5T1Kk%WQHo+&&>Lq)>4z@6^e8vGq9 zu2VrD$4+I>pW4bB|Kz!m3@Wd|0C*+V=CN9Z3a&VIDe;v?TY9RZitAzS$iO zv6eT@+TfmCS(`LsX_ppnRc}{U`+iw0Brz*8e2D29UmjGkF=9L~q+g4RReRX7N zvFsF-1^Lzi&($P&zw*S1g0kk)zR~}Hu#NGHKYm`a~$OxwL&F5b( z*$i#ozy=@s6e5b#ve9WMzI$_U@B-ktcQ4ob8SXi}_ptZfSSgIWB$_E?EB#>C6Le(2 zm&*zGaLkK9US8pK!Z_dO@ae0ELnHkY@E^hV|M6bN9lOf5I!2=pTC#0!T>H|Mvbl-4 z#4n@=-E?8W(j>a&Kwcr`hA3yEd&D>H%iTL!KRLFSCge#+OITQrw?=jTyf(7C@mGy8 z4<9HxLik50?pN*V(APvZ-++*`7dnP)rv)URM7E(yo?K`==H5}~#dzRdmSla{bGya| zhJuyH?Z4vj|7dXk1&f&CSMQFBGZgu{p?~gi9vw4I+rFqO=djO;GQp# z!#`pWV(2eE-QUm-jfikR*Ba$!h=j<*DWZu3EGa`a(vIY=Sq<=J1hq{m-6zZdgX;|JVQmu%VF~6=hndJIsJ|DnhWrVKlUQW z-Kpf0x+ri#Yz6}@O=PK^e2X%0j8TL=6y-Fz_wgb+Y1dw@CX|)Vk%!+*(O%^kvCG68 zyKtbUjlnym@<>y2W;vkbWncV)NrGp<^hD!(shxlIl`&8(4vUkC0dJA0jPkp$*>Kec z9KU~B;PxIpB&2Iwwv_?S6SlYq4icJR!`2qum58e7Z3MNNplK&ci`H zIlwG_(j%(la$4FNd%s_Ht#i4&cpo5@!7 zlpJ)Cih@>rYQpvpPxPUdLyHsEBzvyYSi=d(JDimAtQN)<7t>}LQsL78Qi;l-s{EV| zT*y6Q>hoWmjc0hz%HudAtCjfOcXD%?y^7$uA?OvKc^Rp5geoqvOk|(7$f7{zKpmKe zwE1sX?mS&e=1I@g2rIbL9_7}IZ`iHNE3roD zSCy|yH{}$6xXDhlJd=CXx9@I*v~cJ}?9$um^ba>rwbShiCy*O--)JOANg4V|s%P~a zf`VPysX?)z%-ilRqY_E_a2=^d(+*mH*Oox$6Z|75XUx+5)$i_>P7_7t=XGLL9$^nnR~&IumK$2uKAY$2E02JaE_H*mE?B4 zoK0tfx6ki`oa6Np*izZSuxpf(!$GnllXzeBr|201{LPS*>DkKpF+RJ0xG(T}?TWH7 zdOR)#k{*AtlOAlKYLwo0iVL#SdOMPu`m#aoW7AG-nhLKM*~7R>xhrEZ8&ur|AbuzP zBEV%lPi>S1&L~hU$qQ9*!X#MMnXKH7kvz|N<3Hay@4y0qN((L9RE_-H!PX^jaOIe1!)4e^ebU7JP>J#x|E9(|W zq9OUc(t!TX89xbj0`mQnPy6o(XwI{jFa~62%&hb#yxUv1WwWb=CqY(A4@kNvBU;c- zRUm;3DpE-*-`^U6Pb#y)-BJ$@>g`{k?Y~QL*@4|mhW#!*r7-q^x53jI zyP#L``!-r|J15x5JhIKbqDF}ZKHDhd#3LUWB!ud_pI)`zLat^bw9UW;|C$lKP_@S^ z(cRC#Cz2nBAkJ<6WieS|5u(zw-+*vuz|NDmWbB^l_Nr~k2As-`=jC+?4l}i^b@JwN z_@Xvt?6wG2c-4i-VPRK-E+3o|J{f3IEFC+F;mc@!X4~gYf-a%yIg)29kyTIi7j~O=8K+H>N?l-5`7UfIAt9< zz~T37c$HS6uh8OSRHv`d1Sz`2kK$5HetuY&0G&G|c=Yy=pe9DmIp@;HRW3(~Fdon{ zmCoxammD+rb=Q=ySIppSQJQMR+k#ookLKuXoA_tcBCM)8ed^~w5SuIn-HNO9q01Z* z&kmbPY8So*9km}4n|3i3jiEeL^);DV(vbs=)>0?z5E|~MAW28%+L!O~audJV^@*wm znDi_z9+J{U84S@XgcD-5Dl^e7L8S#5kh)`m2BY?(r^h!nQVSaE959g5(3lqYS6{iV z`o^Ssz+lef;vmMh1X@d#xwPo}v3rbkJ%x2b`Cw~Rc62xPS5s3<2ML$xVdb^{&X$*0 zV#zUd#_{8{nuDq-3pT$7&^GLV*zok(VAel}G*efj(g<3WHcY1yeP5Fr1zUr71ih_Z zp$$3_!edzXu^?hL?yA^eJ~GPm$l_yBxJnymcR>LXfgcDHE8&YCW!B#!cDx~dW}G#f zrsapfDu8}{XTWssM=s}?ye8-xwMcn_Zy&+yz^ApaF5pEGx>Pq-dRqZX zI_+pv{Ts@?6~T@RlwHGPlwr`<%Q>{~Ym%N(%Ev~QQHPcRZ|4vWdB?mhA56T?H8@QvWfEguM`Fw$<%h zK+IZ5-+5!HO5XO5>Z1U+_p!3w{L6kGoSNI$7&YB*&M*A<@#7o0GmR@mL}@_r2|oUz z)?hxgIaDUAqrnWFY(<-GPf%Q1_d8wnSuoKKoll7U2<_y+aV%Hw>D}*{B8R)?w&!vd z=H-5V@_fBQ?jaP^-`hWX%AsjZFDI#<>u93`-ep_6tv!LdU>gZOggy+hN>m87Qa?9Oh_E$F24ysfP;9<<(lNvlFy z{z?drE33*A{Vtt4=p-_nxZprLp*Bdn^73YO!Lwl(JZmZ-5F7nQ(A`75w%N*oVoe|Y z*f?h*c$uf&DA5c0x$dLuU4NNf$x4!)ikUYXch9-eUmyLPtu1i9;sA%gYz+plJhi#D zs^X{z@8F-WGLq;%Vu6+|p!$NB|4GOTdP9d?V$3Vk$YHXHfsHAhBU2c%zenYHVjj=S z01SycSV)E^H7l;uez%-ot&X1FK3$ZocZjzKxyt7_=~0*kLJvDm#U|`IE*B^PQ$J=W zRx?iKGm!YZ@llYBacP(;LCXovtS)#^eyS&1g(B7BPP(1!Y3CBheY z+Z%G3#$ag^%cwNlJyH~ugSckDrlM3SI5BxEi&-ynYt&aWa>+}%@*QKfQ^f50S*s9p zSQbJiKf^qA0xsuAL`)bM+M==}Ua{tl=BYFhRzS7;HCPU9Xt>o-t)=$l^0=KC;qJtC z{Kmt*j;<%BUrrdTQlqX3j8N+(7xXcx1@u_)tXl{~t zs^M6X+vEJ`l2%fX_f0{M{|bwxiH@_8wgK zcy1m}MCrgC9(Z5%ljfY9@hY?NJF8LRX#M(Ai}>{^4FTGrHvc{TNfSFIP@hB#Q~fJa zzF%CLx_WbvMp0doc*AB?vu)QVrz#` znjh217qs(cQKFYOR!2rC*wuHI&y>~%Gp2~GIAaNY1;(|UbYXsJIkgYkugqmQ2fH#? z-;K|BUGqz_g;p|ZbI!z+PCG}_zX#{)+dk^|Y+QX8v1*PCePsz zlvECn1^rux()i;+nE6(jkfznb#V24S^eUA5%~8rZ=>kXhO_wK zl?j|jB9%x^Y0pQ}pz_Hh0Q@0`xn@uNtXJVHIqt_5t4W1@_cvzELki}<(-4tfg?)mP z{p;bR70QH+sM@l27|PxnDP`1DS`ICcLUHw(1fIPf(v)F--s4*=bwr&on^jkvJzP=hUohhE8>&PeFTxZ`=Sx%#!}W7$H?w-kL1h2h1r5_dde7D|-A{gwM6tSOc&NTC zBNW$80Eb1S*W%KPR}POaP79ypYC80#o9?1+mGV%qBe0R9B=FM^s(4qA%$)<}qmM89 z!9G^tmQXZh=mSXBT<_Jj-pRP&5nT$=!4LLm-nL2T!m#PqF*?`Z1+Fpm3nbbKn6!q| zT=&!;J=ve%_KDLuX+tQYuYXdf)j~(1TVN@FuE}ZA$L^}utw+un-_#CinB_W$a1W1` z=^*Fp+H7)l+eP7%B@D3>Ysg-W`z;^?ckx0G%BDp&J4P9?&Yk z(Q|OHMAW4h$Ve~cS)O?~9IP+*tsYD$8m3)y716ARy&*(^0|qwZ8=RS|uRtrT5DF|= zxpF(-t+!nvPlMn!U<-PE584JDkIhNdy_OrOKVgNiST3m$GHb$o=`0v>K%Cj?VS3%m zCwzlW#2-Zc0Q0-EQVFJZR{K{>69HfXf^}41Ngi9`X-Hmu2YL+$ZNp|}B4DlQ!*Q$2 z%CkjasXfdsl&D-|3^>f)E@Jn<@dPiVR)J?RZI2USrhz7$%TEj1eSgRFx@@#-UuwLW zMBJ?)$o4ZEUZj4`(*F)a%vNh4UWrHGlCb{Y0PO$YEjsHd^jMcAXG7PdhIF`RRa&M0 zZ{S^o3cuM%So8JY7D<0NVL>Z0kPa*=SfC3zx`@n9e+zj)H2e{bIf(Yn@5!%FsYM!O;xO?<@ZzmO2T*iP-?*<2S1oFaOwG4Pu~`u=3UfYz&m zU_*%H?v#ob3LA52`1s)jqDZTX1dr-bjwG&SSX8()q2)$&KIWU2ZWziY2{mwIdmZ%x zIf=2hF&4K_5S^W1yPC}o=LyKxYDY`en=C7IduqFq>b)k0s^!bf_dE*PR?S z;{wNU!VXA$lsMhC^+`0kk#lG^cRE2{a^0k{t-rrtQ?7Hun_eKCa85>EdhhE=yLBfS z`NRS27?QjebLg;dMpzVDx`S-6Udj{FHiUAxqaw*=Je|H78T#|~S@e1xJ@`~e0NP-CaC44e_s$*y;}9&x_G1M60`g-7 zdItCL1@iv0~uxXARzOXTViHJWm z)S=JhJqBtRIwl+HTzMH6IdHE7`KOi^RjkaHEC=!-nD7+L(dscDyP65h^p1KRtnyK- z%9htH7jmmOhGGx>ia%KjCuJZ$+NyHy;y=FpB>w?Dg~p^`^Ac|;ZrouA-34>gL7SCC zy-@A-P?bhX#nt^-H`#4$BKVNIU;LyouF&f;WBC&qRC;7yN@9TG?w4&8_qvWJ% z@QOKf@j{h%)2*iW$jx<{z0BBC$Xn?oFST3RgdQOhr_n(j1@ooF6z-^f0uHO)aeM*g zke+x`K)1o1e&|mk>|UC%4LjRDV=3y_Tx_Xfe@8_*(=BdjkvBx=?#9HW*>v|m(oZB@LJ6cefMImQm`T&1b|*K#9pAZ&IEbx zaU4un!JPEUHPxh7XvCE3#f>sr5mi}|%TX3v&2L7yBq0k;vAl6+uib+mhzDPZkWXW9`+PnxY_~E=N6gs{Ss6e}6SQxmP3znFLOq)nF#bo?2$ zcE}I~k3hHhz~J)9F|~;c)2N7lj1ekefM7*@txA#+6sm=MV^l29ZcKCEu@o#yr-Wq!ffzvQ=`D>KvDp1SEa25I^glD~v(Q|U#4t9O;^J!vNLHEpK_!Z_lW9jGj8D=f@Ey)a> zkpvlcpHx!@!h=Pl9gWzGShj1H(S}7I#v760@o(MRLRZ?Q{o&|4DUZWK=G7(c(1DQ*cjmvA#h-25thJEYZ->hHO zxnS{LRPfWs(-qg?V;|l1INLP@ya=(RC~Zx#wt_sVc5Ojl=$tAkTxks2zq}nE8>o05 zk>(XrSu2-o9PzsQHcaD3KW&Mmh*k9r*b}f&snTGbHaR%@Y$ee{Eyw;?wI!u0w3Mv? zIL^}^8$eSyJ9e%jrR6(D_McAs79J$);OPnr4F-%;vN+=+Iu5Sb@CttT!FKM_zD zvNLE1sg0!y3iwy(X^}KSM{MgJ{8~ zH;dnIn#pjl(tAQ2cVcayerg%XYgelieo~^b@+xxOk$`Rmy%*;Oo=C^a?!esT(QN)fBC!#}I;N(btVZp6{a__QEed_nSef{5u zhvI5){rc}DhR?579h$!AC@^%Nmevk|a|4rjJ$?9>x^&CIn#>eF zDT%#|Pj>;_b*LdAj%No_3sEzA4pEOZ*Kh@$e;!iARzgvLa@v#Y{UOYL`raBq{z}I6m`7pmR=r4~PJ${x#_4Q1Y_?GYq75vA+tj6^Xyr`~XNM*s|`cmMzq0JBSC{QAvsG9dO9 z&m<~sDqca3SaloM#QVnOK3{W`m{waVt?h~E;(cl(;sNdyMD~cv+GF|sia?;?jF;KE zm;LS{>;#)I-SpC6$Fz!*fW>Z&k*sp>501Zn6WaX?NQfUDs;l6CH2u+Pm!)HlBVfBj z)3rV(rGSX;aV8vo&QqP|K(ZOc@Xu>#U0bKKiESg%+axXU^#9=Kp*l?%c(gUp6Z~lQ zUoe%09V&8(wtll3t(lxa__50$mbqhImKA8y2FPHZi^&7yv^*ErUr4igE<4UpNVe#2 z?);|<4b4A%ofFL8<$Zgc+69{zwVct#>lsGa9;aQ?*eU-AEL z!F7cxG)-ks+4JACh;;om!As1na|42QSwu5oenK371lPgZQH&EH1W(WW?+DjYCwgu9 zyY8&Ozb%eu!u;9l__i8@?#kzw@4|1u*Z?#l)56TF>t@fBj>J!LivNbhTw)w?9lIaL z%N|D}^-uIzB1UfPf!x?@0Af4iKT*Q<#~$JwLmR*Uj3n}e zM2=)9E+HWyrm?XR*j}k87NIcX_Wo*zf&7b4C8!i^^md%Qy!;liP1(LhY?;=qRq}>q z1N<@iBPZ0%*rX~kGHugH=yP;~OdVDClJWyQg4(>$-%4H-7=zA=icKwF*zgT53!{D{ zNy^s_gJ+PUix%*`dFa3IqixhVqBp3+nm;CJb5);N-!^eVuE7Lcrv`!xJQgGbpf?sa zHl1@;6AQd+3r}?WO@mX>JEHNM5sPa}MB3szyD7p^5og>ia}IsLv9kaiSbveFxSUPA z<#?-tIEEuTYDX7WgrSP5vs!>5YA@Lr961(yqpy5(s9%dpTmq!CPQ4;C&NTLf$BfM^jSB0*IBcl*W0eo z?gtEd(zIZeEH<_x&!v2onwTeT?02M;=Vh#-RA+0{o)J}I8lp8!HUT?`)sqFh zxT8T2SL&jN-C+fPcnYo}aV(k1qOVDg;WMh5;~8f)HgT~xc`6K`#&eucSJ_e2^>wjn zsRU>dy>`e4u=(}F$(_Lrd!jSN{NrKx;L4k6FN%iy0yDW`kg>Y{4Sc?!6@YV_{NaQ3 zo$06@kt~iw?7HK6_J3FdF*i>*R2@i%T?}&#V;M=8&wS{6a%@^4#q)^j88eMT-o)VFgdbFBZ<-^+lSYT}8>_(BnZkHXgUe7UJTID=d(60iC{$gJ% zD>d~|2I;@3_pfjtAZ~_!Kzxh6rPcc6t~`17y;C3nr*(FmX|Opn*WM0J5gHVA^CG1+ zkM_)eH+D9{zeu8L1$}&pfkFpLfq&22Yvd7;Ph8qQp>e0UI8B2uZ_PAHP9)aYt){6BkSlm}i;p>d$Yr7N zl1VzJ-?RDxUe9e2|N8krW&UiLGiDI`Ap@k1ZGJJP+-)PKkeVCpb0cm8&J@YI(F@34 z?Qle($;C5~o-@L?G#vs>&|BGb_$5qjpCb^&jh|=$;#W55R|Xj9(OsbfoP|WJ<|h}x zl3hSNI)G_^r;p0qkpi@weddsiD0VC3pC%26HMslCA}Hot9MfSAki?!FyK6aSX-dg; z8Gv*SQxK9Ck)HMuO)I>f9Uz#oa>#8-3z@W0%Et34w_H;m@TP z05t+bNu%3x{-W>yHL&L?Cjw1e*n`YmEd*RW@H=NI>naYY8nuZPwWGbXZ5AXe!5lU` zvz@CdF>Ks+J6C|gW&(!2j9L(WlMIXlO^_YO`#Zp+LB$g9F%Uf4kWe2Av57v6{FmHu zWB+_1kL4CT9vcq{zZmwv2CH$|gmmVNz$Aw(S4jYW*gRW;STs2Tzy9<&B*vwS_eOi9 znz@XqY(aOYu8inx(WX%4KGdudmz?q&O6q`)0)^puq=&~6p!%J&-6Mm;uvkv6ux)bz z)y*l?l+H8U$JoN!V7rYYNB4}c5eO{3jq**KlUDy!p}8tMo2?B1*bKIZ4lo__3qawB z!wj9d>v&d@hWmHa#*g9Whg+XSsok;Hp?3APdgMvp7>xWV5hcxr057i&nRXD7jC-7T>W}^kd_!4 zh>vtINL<<(_e9{@?959p){*tk^6h)E-u=8LQfDuP!--TaY-sSJWTz(Fi2qGqlKl7= zUW`%(+&Bu>gv2$epIfp&*tWuSGjNG4iLc*{Fe*zy(5EKD{a~6#!gc3F1ohVegLs18 zelw)QrcM8?cgMx0*WpXcBL(95CzZ8GX-MWk2;U}?fz{Tt$(H{auBT@5AXsQ=oM^b| zm?%OnCXe*42Y~N|S!NKji|&<|Q6&W~xQbHy3JF~WhWK9ao;~9z*@hEG^r2ss(*vmf zNg>ItI0}W5q!g2JhCXfzQrVFzFb%ku3asOJ<1-Ha z_k&oqg*P5cU@tjqV?(t#?M`y22SHs!cQk)VygDHeH(I0_hhQ+TuFSYovmCE(>zFXI z9DgD~x_1aQ@VLJ^bGWa%eLfu!tdUUGs3O({J&eK^rVr9G)sw5QA7Jx(ZZ9Z5xPkK- zISbOZ1(lm>XS^)}MhotdQ(cVt*B?p6WW&|1cD;} zYRS6mv-XntK^G@dTmEsC-5|9%>ng%v>FukvY^M9X^?x#5FS^~=58@5bdD962gHh;) zYMYf<^_}B;5SpoGuLfVVH^v5rsn@-{eR(O`9$ zR^w6R*SVv^DN$r?-f0KO{eM)1zn2j-Uabu$cJQSdMFW2V)w_G9wj4mSOoB(x{bZ)z z41hvgRb%OaQl-=dg~b(#V;di19o!&iFPo)pPQiW7T`zO-WYo3g@sz18iU*w3TwX6Z z#{ZTR%*FL>jQE>{BCD(mqR6QGJ7(=$@h2=Mng5t3U6Z}e8dRnES+DD_Xz8J=2}VOc zKfU#Few6;Me67y`m8AC8JV8l`U4k7?@UO0sBcy)Bj;9{9l3yxY4eb@AGZup7@i<)O zs4jVqR{won?R`Oa(G>ML%{Sov0opbr;}D-J40)PTYPdQa&h_5groX@OCZmu@nqO#nM|G)Xte4%Y>9+miqQfQS$H;s7cT(}KCX5s zUro(5A=ZA(>2M@P{|iyub};U3)L<7To&*2mX*+I2l-~eJm$3Skg#VELh=_S6*imV6 zh^3x9qGy+L^nbyke{*67Q6#U}UyN&lTj3Iz#rQw*>dDm#zd5!QJFa^waLX7Fxe)QR zg6{;a82A7xP*TM|4~(M+k;fip&)Xt14S^K{P@hDv*DW>8mi-&q_*)4i}?IRfaY{a!nc0bIRG@1C#{1tJ>r*LBLrX zp80=j6lG>`-y>+LVu5>pGev=)YT14Va0>(pxQ-*2$ZT<5J=MI#pVNq zy*A?0srXyYGXc(o;;jVp>EHQkAGr2;U4>YXSHD0lbJE;j$o}7n`k~*{JE#Gj_$vk7 z?HymtIh{h+^E0`J0%SCMX{jkAm%%#kSElrfrCz;!@b$O#3;GJ{W1Z^27Qkubej(-u zBZ>4dTddWg6eT8TSNm4x@~lSQzjeOE+XnmvNU12lKjb{2s{qhRpaJxtFWOmifZqL< zfZ*bll8swAC^m+@R7U?r&@mq@;_DydXL-5DMS3J8-k*iYRTZ0DUpJdSjYxD;vw?YS@^zgMK}-#SjPO4!P4HODWXS8_Ub!eyog-zQpOo`B~5`T z{1*yf1}AyBjlpyHCD6*>umkb6c61Ynq1gKmGJn5#Cj90cfYc4FTph_BcvG%}s+PZ8 z0+XDwzh#a{7Er1<9Vj8bkec(r<3l=#1yY&`Xid7whh;Rxh6Y$+R$SsI*R(5rSm%>a z8>=+nG70L%RPuM|OLMd@0y}|=oSdAK=JL$U%sQT2_HAQr#gxV=tf#YY(|SB zin>$^r5A<^!v;%lYfLT}Z^{ac7bJ8}&>=X%oS?{c%EWqcdkwVl^iw`;I`B=JHYaoL zuCU_DU#EAjO^D;dqk2Ggvs91HC`RqDLcjZjMgb3jUd4UzWSzqH1+>P}*gD1PA~TmC znh10aimmhOtHD-4L#c3+MF$&Qg+LWewoC}d{^XKu{H#iL_N8-kP`cvk`gcR@T{&_e z!%`uPT7;YY7Sg?Lc8q$!k2b@$7Q8+VetAWZTK13epzY;7@RYk%wYMse(HWEq+Ox2& zFr>#CC91$vrr42Qq3ZT)!UjJ^>I37_Dv)@Gnd*rw@mUxgzMU;G_(95rF(;0vWtOp1 z@Hr?$f|dmTprOFy4&3wm}{tW-K7_65+po zSgcO34dlftgC@m0C%s|OY$@Xic?*Q5`NIH(I{U@k6kA3I5ydKAxck;Sl%<%*cu>oH z-;oiCFT?@6f)@oO;2l#c2G{9x3qL_j(>5ziC-*N;yIr4X>xoc#ry{;0d>dHWidWtV{nfe;{vkz zOY|xxT2D{)&Fzj`9=&3PF2^LX2S4?p%Rwsp#_91qd}S~-%S-2TncbBH?(4=;?JHMY zNG}z34BiM-O$0$#%sc;52ae65<59Pq!^1ZS%Rg=v7f9g=V$V#s z__V|HuE7;a;22;^yIc!ao}RE9Ou98`?PE@Rc^K}!=-v5|2Oe2zbOB z?gbWtzwU#Ka8S8{4G~F)Etdj~tm3Rj-Zl8%rgLby;)UR^zazwXblUdOEH~vGJJ=K4 zGkS?N`VdY;5S#7R;c!rhAn=w=2v7OEpPpFw zt7hCbQiRkaMv!+(mkdz;aG#}CM!T(5*-kwq=(0^!y3&mrW9+Q(pmz%DIrnv0bpr0Xbv5kef#Pdsk6 z1kwdgA7s-Z}0SO%d zp@2DXBw~N75nCb-LKwjJJ%9!7W*)Qoyuhe1#G{P?^&6fw&%o;VJrP5KmFg$is)o9y zIcE2T-y%)lDwhnQE)x$H^VBZ35;THhZPjX&P0j(u>3I~;v=RdYY4yO1pq(pxyseLO zcc(gYY@oc?+SndUNjmhsgR-~aMUIjHlc2}8Io=pxNKpOZXP-$N5`J@NYlU5fOu6Sz z$36uM2z&`vjmAC#l^vkqBBr*F-HDzAvaL_VpKhuXEmygZcKs{5P8~B()h+$Um<~4j z(&VlC3$B6P5vK6v9uw4c{7Z?c8(jCosNDuFk^1Qg9!lV8erl&IjY*@NG9@ICzl@C` z@Y_x;4H)ZCu41#ZiznkIZciDwWGM{}#dC~+`{#$K7j@@F%rvqI zk=ag9g1ltol&+I=BOElV@2G2ub?KDyh4%j6=B$`#Vxf;7*-xpJej}|vQl=N&;qoZ_0I0kuaW4hZ4UZLQZ;q;B%WDmXm zE>bf1Qc%JkMuNf)>O?LQ(3XkT%;!{PvSfF`>gNb}uu!VmS+%?H(X3kwBmMQmEh@gZ z5VhxtH+fBEI79~H(r0~lxgH4) zytcR1PL>OqogP)<;nCi(xv{XcKRODoZ+X{iY23Ot-VQ;woX*4S{z2l4*HXUPE1Gj_ zFr#U1Gzm$@Gb;iH)6dKFhKyr^ExVuS+G@7`{F1s}wXiV?>Oq+`H zj61oc7r`HE`&!=?NRXY>D;^@eeBl=mv|HaX!t%9}GFP;9uKq)EEkL^)ZSNm(E|yF}-8t_U`V_8o+;W!F?)=+p~GWd&iT05JaVjnP#GGa_vp6X1sY$OpZLmGsc)t zH|+AN1xMo5)SC-014Rmvi6Y}!{60U!=FES2GhS-kM`TOTbrQC`74HiMmk4k@9sJ~4 zZDEKTp+##2LSQ>HUYR%sd2=W6=)OLh>hV3c-7Lu?EL&C|xIUt{>OMyE-aj&@20!i? zK8!7+&sRmVelk7fauTYRe!bU&d3BM4o3tV=}?K8vj*tMmV`6m?dZkIf@h$!hmj>wR6i zaK8|{^HsxS1I>0}VE2aO?v4(_{@ToeD`dYX#39ts>bf{>o26%vDz3Zo<%{6=ZpMs& zQn_S}cS_`MhwPe4zTV%|qn!#EJn*eQ6*d>XVD154kiZbrAa;kkQ$rYHcSXJDx9OI9ObIN%JPldN{tqk*dsKtdaLh*KFs) jX&3#w=>?RVRhuHlX1xbR)vItE;Lj Union[ToolInvokeMessage, list[ToolInvokeMessage]]: - """ - invoke tools - """ - content = tool_parameters.get("content", "") - if not content: - return self.create_text_message("Invalid parameter content") - - hook_key = tool_parameters.get("hook_key", "") - if not is_valid_uuid(hook_key): - return self.create_text_message(f"Invalid parameter hook_key ${hook_key}, not a valid UUID") - - message_type = tool_parameters.get("message_type", "text") - if message_type == "markdown": - payload = { - "msgtype": "markdown", - "markdown": { - "content": content, - }, - } - else: - payload = { - "msgtype": "text", - "text": { - "content": content, - }, - } - api_url = "https://qyapi.weixin.qq.com/cgi-bin/webhook/send" - headers = { - "Content-Type": "application/json", - } - params = { - "key": hook_key, - } - - try: - res = httpx.post(api_url, headers=headers, params=params, json=payload) - if res.is_success: - return self.create_text_message("Text message sent successfully") - else: - return self.create_text_message( - f"Failed to send the text message, status code: {res.status_code}, response: {res.text}" - ) - except Exception as e: - return self.create_text_message("Failed to send message to group chat bot. {}".format(e)) diff --git a/api/core/tools/provider/builtin/wecom/tools/wecom_group_bot.yaml b/api/core/tools/provider/builtin/wecom/tools/wecom_group_bot.yaml deleted file mode 100644 index 379005a1021420..00000000000000 --- a/api/core/tools/provider/builtin/wecom/tools/wecom_group_bot.yaml +++ /dev/null @@ -1,64 +0,0 @@ -identity: - name: wecom_group_bot - author: Bowen Liang - label: - en_US: Send Group Message - zh_Hans: 发送群消息 - pt_BR: Send Group Message - icon: icon.svg -description: - human: - en_US: Sending a group message on Wecom via the webhook of group bot - zh_Hans: 通过企业微信的群机器人webhook发送群消息 - pt_BR: Sending a group message on Wecom via the webhook of group bot - llm: A tool for sending messages to a chat group on Wecom(企业微信) . -parameters: - - name: hook_key - type: secret-input - required: true - label: - en_US: Wecom Group bot webhook key - zh_Hans: 群机器人webhook的key - pt_BR: Wecom Group bot webhook key - human_description: - en_US: Wecom Group bot webhook key - zh_Hans: 群机器人webhook的key - pt_BR: Wecom Group bot webhook key - form: form - - name: content - type: string - required: true - label: - en_US: content - zh_Hans: 消息内容 - pt_BR: content - human_description: - en_US: Content to sent to the group. - zh_Hans: 群消息文本 - pt_BR: Content to sent to the group. - llm_description: Content of the message - form: llm - - name: message_type - type: select - default: text - required: true - label: - en_US: Wecom Group bot message type - zh_Hans: 群机器人webhook的消息类型 - pt_BR: Wecom Group bot message type - human_description: - en_US: Wecom Group bot message type - zh_Hans: 群机器人webhook的消息类型 - pt_BR: Wecom Group bot message type - options: - - value: text - label: - en_US: Text - zh_Hans: 文本 - pt_BR: Text - - value: markdown - label: - en_US: Markdown - zh_Hans: Markdown - pt_BR: Markdown - form: form diff --git a/api/core/tools/provider/builtin/wecom/wecom.py b/api/core/tools/provider/builtin/wecom/wecom.py deleted file mode 100644 index 573f76ee56da67..00000000000000 --- a/api/core/tools/provider/builtin/wecom/wecom.py +++ /dev/null @@ -1,7 +0,0 @@ -from core.tools.provider.builtin.wecom.tools.wecom_group_bot import WecomGroupBotTool -from core.tools.provider.builtin_tool_provider import BuiltinToolProviderController - - -class WecomProvider(BuiltinToolProviderController): - def _validate_credentials(self, credentials: dict) -> None: - WecomGroupBotTool() diff --git a/api/core/tools/provider/builtin/wecom/wecom.yaml b/api/core/tools/provider/builtin/wecom/wecom.yaml deleted file mode 100644 index a544055ba4cb67..00000000000000 --- a/api/core/tools/provider/builtin/wecom/wecom.yaml +++ /dev/null @@ -1,15 +0,0 @@ -identity: - author: Bowen Liang - name: wecom - label: - en_US: Wecom - zh_Hans: 企业微信 - pt_BR: Wecom - description: - en_US: Wecom group bot - zh_Hans: 企业微信群机器人 - pt_BR: Wecom group bot - icon: icon.png - tags: - - social -credentials_for_provider: diff --git a/api/core/tools/provider/builtin/wikipedia/_assets/icon.svg b/api/core/tools/provider/builtin/wikipedia/_assets/icon.svg deleted file mode 100644 index fe652aacf9c871..00000000000000 --- a/api/core/tools/provider/builtin/wikipedia/_assets/icon.svg +++ /dev/null @@ -1,3 +0,0 @@ - - - \ No newline at end of file diff --git a/api/core/tools/provider/builtin/wikipedia/tools/wikipedia_search.py b/api/core/tools/provider/builtin/wikipedia/tools/wikipedia_search.py deleted file mode 100644 index cb88e9519a4346..00000000000000 --- a/api/core/tools/provider/builtin/wikipedia/tools/wikipedia_search.py +++ /dev/null @@ -1,105 +0,0 @@ -from typing import Any, Optional, Union - -import wikipedia - -from core.tools.entities.tool_entities import ToolInvokeMessage -from core.tools.tool.builtin_tool import BuiltinTool - -WIKIPEDIA_MAX_QUERY_LENGTH = 300 - - -class WikipediaAPIWrapper: - """Wrapper around WikipediaAPI. - - To use, you should have the ``wikipedia`` python package installed. - This wrapper will use the Wikipedia API to conduct searches and - fetch page summaries. By default, it will return the page summaries - of the top-k results. - It limits the Document content by doc_content_chars_max. - """ - - top_k_results: int = 3 - lang: str = "en" - load_all_available_meta: bool = False - doc_content_chars_max: int = 4000 - - def __init__(self, doc_content_chars_max: int = 4000): - self.doc_content_chars_max = doc_content_chars_max - - def run(self, query: str, lang: str = "") -> str: - if lang in wikipedia.languages(): - self.lang = lang - - wikipedia.set_lang(self.lang) - wiki_client = wikipedia - - """Run Wikipedia search and get page summaries.""" - page_titles = wiki_client.search(query[:WIKIPEDIA_MAX_QUERY_LENGTH]) - summaries = [] - for page_title in page_titles[: self.top_k_results]: - if wiki_page := self._fetch_page(page_title): - if summary := self._formatted_page_summary(page_title, wiki_page): - summaries.append(summary) - if not summaries: - return "No good Wikipedia Search Result was found" - return "\n\n".join(summaries)[: self.doc_content_chars_max] - - @staticmethod - def _formatted_page_summary(page_title: str, wiki_page: Any) -> Optional[str]: - return f"Page: {page_title}\nSummary: {wiki_page.summary}" - - def _fetch_page(self, page: str) -> Optional[str]: - try: - return wikipedia.page(title=page, auto_suggest=False) - except ( - wikipedia.exceptions.PageError, - wikipedia.exceptions.DisambiguationError, - ): - return None - - -class WikipediaQueryRun: - """Tool that searches the Wikipedia API.""" - - name = "Wikipedia" - description = ( - "A wrapper around Wikipedia. " - "Useful for when you need to answer general questions about " - "people, places, companies, facts, historical events, or other subjects. " - "Input should be a search query." - ) - api_wrapper: WikipediaAPIWrapper - - def __init__(self, api_wrapper: WikipediaAPIWrapper): - self.api_wrapper = api_wrapper - - def _run( - self, - query: str, - lang: str = "", - ) -> str: - """Use the Wikipedia tool.""" - return self.api_wrapper.run(query, lang) - - -class WikiPediaSearchTool(BuiltinTool): - def _invoke( - self, - user_id: str, - tool_parameters: dict[str, Any], - ) -> Union[ToolInvokeMessage, list[ToolInvokeMessage]]: - """ - invoke tools - """ - query = tool_parameters.get("query", "") - lang = tool_parameters.get("language", "") - if not query: - return self.create_text_message("Please input query") - - tool = WikipediaQueryRun( - api_wrapper=WikipediaAPIWrapper(doc_content_chars_max=4000), - ) - - result = tool._run(query, lang) - - return self.create_text_message(self.summary(user_id=user_id, content=result)) diff --git a/api/core/tools/provider/builtin/wikipedia/tools/wikipedia_search.yaml b/api/core/tools/provider/builtin/wikipedia/tools/wikipedia_search.yaml deleted file mode 100644 index 98d002df1c0daa..00000000000000 --- a/api/core/tools/provider/builtin/wikipedia/tools/wikipedia_search.yaml +++ /dev/null @@ -1,101 +0,0 @@ -identity: - name: wikipedia_search - author: Dify - label: - en_US: WikipediaSearch - zh_Hans: 维基百科搜索 - pt_BR: WikipediaSearch - icon: icon.svg -description: - human: - en_US: A tool for performing a Wikipedia search and extracting snippets and webpages. - zh_Hans: 一个用于执行维基百科搜索并提取片段和网页的工具。 - pt_BR: A tool for performing a Wikipedia search and extracting snippets and webpages. - llm: A tool for performing a Wikipedia search and extracting snippets and webpages. Input should be a search query. -parameters: - - name: query - type: string - required: true - label: - en_US: Query string - zh_Hans: 查询语句 - pt_BR: Query string - human_description: - en_US: key words for searching - zh_Hans: 查询关键词 - pt_BR: key words for searching - llm_description: key words for searching, this should be in the language of "language" parameter - form: llm - - name: language - type: string - required: true - label: - en_US: Language - zh_Hans: 语言 - human_description: - en_US: The language of the Wikipedia to be searched - zh_Hans: 要搜索的维基百科语言 - llm_description: >- - language of the wikipedia to be searched, - only "de" for German, - "en" for English, - "fr" for French, - "hi" for Hindi, - "ja" for Japanese, - "ko" for Korean, - "pl" for Polish, - "pt" for Portuguese, - "ro" for Romanian, - "uk" for Ukrainian, - "vi" for Vietnamese, - and "zh" for Chinese are supported - form: llm - options: - - value: de - label: - en_US: German - zh_Hans: 德语 - - value: en - label: - en_US: English - zh_Hans: 英语 - - value: fr - label: - en_US: French - zh_Hans: 法语 - - value: hi - label: - en_US: Hindi - zh_Hans: 印地语 - - value: ja - label: - en_US: Japanese - zh_Hans: 日语 - - value: ko - label: - en_US: Korean - zh_Hans: 韩语 - - value: pl - label: - en_US: Polish - zh_Hans: 波兰语 - - value: pt - label: - en_US: Portuguese - zh_Hans: 葡萄牙语 - - value: ro - label: - en_US: Romanian - zh_Hans: 罗马尼亚语 - - value: uk - label: - en_US: Ukrainian - zh_Hans: 乌克兰语 - - value: vi - label: - en_US: Vietnamese - zh_Hans: 越南语 - - value: zh - label: - en_US: Chinese - zh_Hans: 中文 diff --git a/api/core/tools/provider/builtin/wikipedia/wikipedia.py b/api/core/tools/provider/builtin/wikipedia/wikipedia.py deleted file mode 100644 index 178bf7b0ceb2e9..00000000000000 --- a/api/core/tools/provider/builtin/wikipedia/wikipedia.py +++ /dev/null @@ -1,20 +0,0 @@ -from core.tools.errors import ToolProviderCredentialValidationError -from core.tools.provider.builtin.wikipedia.tools.wikipedia_search import WikiPediaSearchTool -from core.tools.provider.builtin_tool_provider import BuiltinToolProviderController - - -class WikiPediaProvider(BuiltinToolProviderController): - def _validate_credentials(self, credentials: dict) -> None: - try: - WikiPediaSearchTool().fork_tool_runtime( - runtime={ - "credentials": credentials, - } - ).invoke( - user_id="", - tool_parameters={ - "query": "misaka mikoto", - }, - ) - except Exception as e: - raise ToolProviderCredentialValidationError(str(e)) diff --git a/api/core/tools/provider/builtin/wikipedia/wikipedia.yaml b/api/core/tools/provider/builtin/wikipedia/wikipedia.yaml deleted file mode 100644 index c5828240225d00..00000000000000 --- a/api/core/tools/provider/builtin/wikipedia/wikipedia.yaml +++ /dev/null @@ -1,15 +0,0 @@ -identity: - author: Dify - name: wikipedia - label: - en_US: Wikipedia - zh_Hans: 维基百科 - pt_BR: Wikipedia - description: - en_US: Wikipedia is a free online encyclopedia, created and edited by volunteers around the world. - zh_Hans: 维基百科是一个由全世界的志愿者创建和编辑的免费在线百科全书。 - pt_BR: Wikipedia is a free online encyclopedia, created and edited by volunteers around the world. - icon: icon.svg - tags: - - social -credentials_for_provider: diff --git a/api/core/tools/provider/builtin/wolframalpha/_assets/icon.svg b/api/core/tools/provider/builtin/wolframalpha/_assets/icon.svg deleted file mode 100644 index 2caf32ee67be0a..00000000000000 --- a/api/core/tools/provider/builtin/wolframalpha/_assets/icon.svg +++ /dev/null @@ -1,23 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - \ No newline at end of file diff --git a/api/core/tools/provider/builtin/wolframalpha/tools/wolframalpha.py b/api/core/tools/provider/builtin/wolframalpha/tools/wolframalpha.py deleted file mode 100644 index 9dc5bed824d715..00000000000000 --- a/api/core/tools/provider/builtin/wolframalpha/tools/wolframalpha.py +++ /dev/null @@ -1,72 +0,0 @@ -from typing import Any, Union - -from httpx import get - -from core.tools.entities.tool_entities import ToolInvokeMessage -from core.tools.errors import ToolInvokeError, ToolProviderCredentialValidationError -from core.tools.tool.builtin_tool import BuiltinTool - - -class WolframAlphaTool(BuiltinTool): - _base_url = "https://api.wolframalpha.com/v2/query" - - def _invoke( - self, - user_id: str, - tool_parameters: dict[str, Any], - ) -> Union[ToolInvokeMessage, list[ToolInvokeMessage]]: - """ - invoke tools - """ - query = tool_parameters.get("query", "") - if not query: - return self.create_text_message("Please input query") - appid = self.runtime.credentials.get("appid", "") - if not appid: - raise ToolProviderCredentialValidationError("Please input appid") - - params = {"appid": appid, "input": query, "includepodid": "Result", "format": "plaintext", "output": "json"} - - finished = False - result = None - # try 3 times at most - counter = 0 - - while not finished and counter < 3: - counter += 1 - try: - response = get(self._base_url, params=params, timeout=20) - response.raise_for_status() - response_data = response.json() - except Exception as e: - raise ToolInvokeError(str(e)) - - if "success" not in response_data["queryresult"] or response_data["queryresult"]["success"] != True: - query_result = response_data.get("queryresult", {}) - if query_result.get("error"): - if "msg" in query_result["error"]: - if query_result["error"]["msg"] == "Invalid appid": - raise ToolProviderCredentialValidationError("Invalid appid") - raise ToolInvokeError("Failed to invoke tool") - - if "didyoumeans" in response_data["queryresult"]: - # get the most likely interpretation - query = "" - max_score = 0 - for didyoumean in response_data["queryresult"]["didyoumeans"]: - if float(didyoumean["score"]) > max_score: - query = didyoumean["val"] - max_score = float(didyoumean["score"]) - - params["input"] = query - else: - finished = True - if "souces" in response_data["queryresult"]: - return self.create_link_message(response_data["queryresult"]["sources"]["url"]) - elif "pods" in response_data["queryresult"]: - result = response_data["queryresult"]["pods"][0]["subpods"][0]["plaintext"] - - if not finished or not result: - return self.create_text_message("No result found") - - return self.create_text_message(result) diff --git a/api/core/tools/provider/builtin/wolframalpha/tools/wolframalpha.yaml b/api/core/tools/provider/builtin/wolframalpha/tools/wolframalpha.yaml deleted file mode 100644 index 08b5668691e23a..00000000000000 --- a/api/core/tools/provider/builtin/wolframalpha/tools/wolframalpha.yaml +++ /dev/null @@ -1,27 +0,0 @@ -identity: - name: wolframalpha - author: Dify - label: - en_US: WolframAlpha - zh_Hans: WolframAlpha - pt_BR: WolframAlpha -description: - human: - en_US: WolframAlpha is a powerful computational knowledge engine. - zh_Hans: WolframAlpha 是一个强大的计算知识引擎。 - pt_BR: WolframAlpha is a powerful computational knowledge engine. - llm: WolframAlpha is a powerful computational knowledge engine. one single query can get the answer of a question. -parameters: - - name: query - type: string - required: true - label: - en_US: Query string - zh_Hans: 计算语句 - pt_BR: Query string - human_description: - en_US: used for calculating - zh_Hans: 用于计算最终结果 - pt_BR: used for calculating - llm_description: a single query for calculating - form: llm diff --git a/api/core/tools/provider/builtin/wolframalpha/wolframalpha.py b/api/core/tools/provider/builtin/wolframalpha/wolframalpha.py deleted file mode 100644 index 7be288b5387f34..00000000000000 --- a/api/core/tools/provider/builtin/wolframalpha/wolframalpha.py +++ /dev/null @@ -1,22 +0,0 @@ -from typing import Any - -from core.tools.errors import ToolProviderCredentialValidationError -from core.tools.provider.builtin.wolframalpha.tools.wolframalpha import WolframAlphaTool -from core.tools.provider.builtin_tool_provider import BuiltinToolProviderController - - -class GoogleProvider(BuiltinToolProviderController): - def _validate_credentials(self, credentials: dict[str, Any]) -> None: - try: - WolframAlphaTool().fork_tool_runtime( - runtime={ - "credentials": credentials, - } - ).invoke( - user_id="", - tool_parameters={ - "query": "1+2+....+111", - }, - ) - except Exception as e: - raise ToolProviderCredentialValidationError(str(e)) diff --git a/api/core/tools/provider/builtin/wolframalpha/wolframalpha.yaml b/api/core/tools/provider/builtin/wolframalpha/wolframalpha.yaml deleted file mode 100644 index 91265eb3c00d0a..00000000000000 --- a/api/core/tools/provider/builtin/wolframalpha/wolframalpha.yaml +++ /dev/null @@ -1,32 +0,0 @@ -identity: - author: Dify - name: wolframalpha - label: - en_US: WolframAlpha - zh_Hans: WolframAlpha - pt_BR: WolframAlpha - description: - en_US: WolframAlpha is a powerful computational knowledge engine. - zh_Hans: WolframAlpha 是一个强大的计算知识引擎。 - pt_BR: WolframAlpha is a powerful computational knowledge engine. - icon: icon.svg - tags: - - productivity - - utilities -credentials_for_provider: - appid: - type: secret-input - required: true - label: - en_US: WolframAlpha AppID - zh_Hans: WolframAlpha AppID - pt_BR: WolframAlpha AppID - placeholder: - en_US: Please input your WolframAlpha AppID - zh_Hans: 请输入你的 WolframAlpha AppID - pt_BR: Please input your WolframAlpha AppID - help: - en_US: Get your WolframAlpha AppID from WolframAlpha, please use "full results" api access. - zh_Hans: 从 WolframAlpha 获取您的 WolframAlpha AppID,请使用 "full results" API。 - pt_BR: Get your WolframAlpha AppID from WolframAlpha, please use "full results" api access. - url: https://products.wolframalpha.com/api diff --git a/api/core/tools/provider/builtin/yahoo/_assets/icon.png b/api/core/tools/provider/builtin/yahoo/_assets/icon.png deleted file mode 100644 index 35d756f75410dbdf74ca14c8fba6e660e20b27d8..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 7647 zcmV<59U$U~P)@~0drDELIAGL9O(c600d`2O+f$vv5yPY47U@2jewnGN83KVCjEO!wE-RbTzTv#Yyj zfo-&nw$V1)M*m;Q!&cH|&7!ua0?h)L3V}(orzZJbThz%y`2Vtli;M_Z@>^(u!hPB{ zyA6{Uv^Dz;&JFt@Lo2@Dn}NH=D4vH zLH}U!8sKAwZ9JuTo!mKqx{W8?ULYK~ipiK|i?(D;h+Xj8BC?&X82;)1n%%BUStsVXZ@dOr~ zOnE`eVV*leK7w(j@|*hm%P)oAfiI4=Y+oIOp9f?1LW)5>yJvYr~-Tk#Jfyl$`m-Q*FcZJ<(Cp z-8Z(iZ7jP!usoow56#$GQuqpHRi4>~eW)oLN(m+=$fRi;_CkKpPJ?CuXDGEe@L`OH zXz*c=r;IObltFD@GHGEOb6_|X10`W3)}SMgD8x#ZAs?ET{bZsOE(KY2Cph3_7d*T; z5-S8#Fg<2gG=Au++Vu?!d^IK+&zzfT;CWLGTeYu!PlowY&gb^3i_sFE79{ z3fHnol*d+#e+x~Qd>%6O06XmDHz1P+tblyW3#@+wP|9Q9#9~eGm^Hyr zR(8UkYAlq3MAll(#Hzs&bVwy5$ftJir_prz$$rvQ(@AZr!_puBSdV4Q!=nyHz&Gb$ zQt=89AQQE(4i#Ss!Iu|wKfJ6|@=?%P?4d7e3KFDWLGF+t4`1#lQ>Z`gHq_6! zm4V;&!6Yoc;8ygn-i|8^FciwIohjrKDL~L3ZN7^6L@1xk)j<}AsRBbV$hnd2Vcc@0 z!U$Vd6J!us1(`#vtQVutJ{`6DKMi{2(*DOo@X@tbAl+R9I%h=A&B1rgEuaB;fa{_J zCJ@Xx4a)~PCu}R+YdEIGI%S}Jo9&rLn0(%EvUVNDT{9cy6W)Rfu9*bZJa7_L-f=cW ze}c&y%Eyex02~J26T*dmfg3VHUtqCG3w@!&6Dyga00CGiAzT*2<9?1mX!oZv>NL50 zBKlchD)&&(1sGpX8pPtY$$NxXTK_&K+;d<6zLxEytFFe1Th50_B?6hKbr{&F^N8UQ zzFudtHBi_C)R=-bFadhZUS0sJTo^d=inIQ>D>1a?CLcnSjsGi_UUd)pSB|!< zny(oNO9nP+K|~A28#3*Fj6Cx)#FD*A+`cspSbX-~=w3Q8xO5Oo-cjQdSpI%L_aHhR z<*Rw>xw**g0ZhZB6EV=`g`hOFmKTI!Ex7}J93Js+7b@_5DjWR{#?Cter6b?d^ALJ` zVNRBM2eH`fs*Ez83`QJ(3r3wakAdI4cmh5+^FegAP1HslR8&)@mE+i}YlsXK8{yy^ z;0vI*5EPsc4$dv>vs+$>Rkow?2ghQ_!FR)4q%Em$!`Lgoi$RkYMvNB%OS(7Y4!2_^ zT@v{7F=EzF;3ayLV4Ggv2OpmPTl6h!U?DLP2M#&43ddbjgJh{^%)vAX|E#t0eB_#i?RI;`=M&5=d~26O7yNAul5sz*I~?$j=}Jw zZX|jm%M0K+L3s+{mr6yB|7i<`eCM~I^IduS9Q^Zw1+vz!X6?%=funys2;VxbN>`-> zd?DN=Xqa$S?l9*}$h8z<#8jdBcY$neqdTV^6ry+(_XF2P4vUyBV- zHA5LvAm6Z23Cy~_2BUXM!bQUHK59^|oFrWF2T>u2#5d+%YCz4ms-Td^@`F%9c*;!$ z>b`XwMqh9g1HW&@I4nN@5p=#i1?kQje00SfSo`p4dM489hfcdodc$laYSxl|VSu3x zXvp*jB=|EJ_dG*I$g@)OeB39a^+IGG(6^k!=0)4B-Ahbx4iec5=7h~nmuaL>SM76En zeIdr*axjuLALS+v!Nkv=+b?9SpO415=Sc83sB?RkOvH!h{0>{*+(ii;8r0-N%5e6t z>M&$X98MDSdWK-7oLX4(*GjW*!N(l^gT-qhSSNyL)m&VIh^}_R?RZ58MqhjqhBfO- z$3L+C@e|QD?|$@es0F>)3d_#55BbM`iPZXfC2MlnDonU*8me}Cm3UfChIrX#H2r7} zMjUqo8>i#>X|mG!HF{QT$Glmjf&GuI!U@;ZB3a^mRw100ZT$|9V4gy)qL2c@6NKez zxs$CGKLQr_0zteacOW@*wG25kP`%slp|3dOSb67_SasJG$n?gI{sb{Jfz7XeRaPnw zqi^FtrueHyql2fsMU$mZ(ru&q#J9{ zd|?$`0&)`#Eg(i_;9{4^aOQ!8a|8l7J7V#RTwah=E|+&loqaq?hOSU&XL_o!to7&E z_}^0jW4O^1W7vMz1Q!0H7i7^Bz3rp%!C6nrYlH8h;!A(ZwvAV8!pPIEl$FwEYD#h|!uR`C)jc7Ra zdZ{3}X5`-?P~Z!6`^(4|-ip4}qxcNd0#lmHari~mveqy7e6j_P6a3Kl6yMVpNO3eM zPZFkmJC+wj{^bQ9i25Thmlnyv-~9UFG7jH|{`TRl--Il1TFxv(%UR`?kc|@)IQ8aA z)QyPoI4l2K^YAHHdd=O)^i{BP+Fx$Q2dDo=fgj47k$O7lD7)FbF>Bp9D)C|l_?oIevCiqnhK2EzHl6_tcl^28>;cp^__U(LA{RS|LgwX%lP1o2T`%( z+h~90AYGUTo>E;E!^yYTV%#oCd=e7W_~63Byb(p^u&HnrB-H;Rm7)&Hf1~D}!N5;# z8HQ!o--Ru2?Zt1L%6}urNkDI^Kviw=p&%|LKK#N;)C~3T)PioLdZ0pJ@0#tW8q|*)@GWqg2;jmof3T##FP7|9UHQujORu^Q zJyjwL=2;p1&DAof*gN&)oiAGTFAASA~i}1C93) zWP(p}5-b^nfv4SFp8El?Tub*WqMu#}8g>3-4=ld;ar9=v*B3(>dBf$v6H0LO#pPQK zen?;bN(tsJs6kC#4A!kmxxfF+3e1qJnV%Z?s_S4Z|FIiV`Q!~d!rfQQ*?De*#aSe6w|xTV{${YuAxR~7T&9zw z=2fHl+-iL4L_rSs!(9eG^$-dYCwCGyV;dQu(9;WRADD}cuO6Jek*zP%L^fv6xS$;4 zcmEXPutBxMV)*_~Yw(BLw&3lBy*N=WD~#W>1fK;77UE!lD(sj~+80*^K}4KvFE3b1 zmD?RJPh+0q)6kJId{0(7LmNNcP(Z0>cnrr}HVB<_GXqNMTSdX#=sg-3Ay}8DVIy}} zP7lbQ9NKz#g$KU8kb7LvbKF?ZZHJE+HjI(r|D*yn!#-nBkh_xpSAp*|QUvC!6cM0c zd5Wfjcf`WVg^Ls9c}VdL6!oX2^V0&PdEmq z$=e6~o3l&tjiXCHU*HEM9Lo*53?^U`y|Bbh$ej@_s|M>-jcUd>$@ACNTh1uOltVt7 zB>2Bh!iqW0YY<)$k!^z_t03V37epsK5epUbW?WQ`o%Z?kE1mxuN>I@ zMXUvp(W0zh@wlhdkNOno6;qP-zQOgUVQiQDt zF_0w8lZPOWB7$k#6A>>*e9pV@* zkDeP5khKFFgY(UKFwyk}Rf>b$Nmzu7NDMiLW%5DzqbeezJveA=QVt7p@&&XPI{2(l zL1YT=RtgTfnm64qu)JW*KZ-BdoJC;;*cM~#Gho3yLPWHQ0^g(E!stiMMl09!2oH8_ z_cuLNu~GR9KmZa(*<^_vCJyG@Oe}ihnJY1bu(0-3f~y9RK=>_5ut5FScDy>xKM(ftCP53axjpi z_(UdQ$plMr7GF>~jQPVoH|MsYtSDom;>ryaC|zB#v0z6LgPb9VPCytFa)EDwNf0Lvz$PsW3xNWGk2j<-j?p{h zZtheMj`4n}vIyX&5!fM1%gxV)b3Ri~tU=RGW#LS4|2eDKI}6I=1#Ixa@iZx-3d&!- zm~O_G5x7w9zw;!-d5?-zi0={W9QcnK?>C*C8_I&|>$Va?s>>lN) z+fLi*!rR+(k6#1;Tjgpv9(7egvEUT36aiQkE8NQq$^s-EFsmF>rsrIB{jxOfoU;+B z;C+dm_bb77=2U0f_xELR{Sh0oZzO(O^89CdeBfTVzYAk0m!Pg8XBYEltOHd+{TC9r z;L+h2vr~!eL*SV^H{;Q3JK)7-o3w|?(<(9j+!~Aso|5bB6nO6EoAC6{I?&mkQC_g) z{^e-?K^+=*EX~IC{vLsse!U5gU$+4rt5bSNdCK&2>(uyLHl*?T!<#kmHgs@Rc*?7b z2|*NmdwBtH#mJDq=&3$TJFPNnAj8MR(KsQ2CGYfS`rh7JY(&jx%`?)yu`Z#>(!)V#)nLpX zN!)VyYV=ENryNm<H_qcqE}s*YjjeM=R8(bK({ zeztyYv#TS6rGM>5Pglk*OF$VJDptOqVj=Y_$5f+oP%LZtA^5(vFL|vu7wRNU{7MB5 zIJE|Y>S8$J@}UZRzg^qwJ;(&h+)b0paKP!c>L-^EJiSi6$l8q zqsXCv5N@MH6dDy*wGuL3{H=aG{cAJIQ6`Q0Ye$uN*IWOF}O17KahAZ~}7`M+_sW98?$U#WT#9JwqHMylqoxk|C9_ej; za9;C-VVGLGFmgczML!imwdxEzWFTlIy!T2UI@V-RE8}{J3=_K@Scb_5Y3qOG;Z6*0 z&~L;1QGS|L*2+k_V*uDyQezQwK1K2eeI2hwxDx<8dbG%R7wSvl;nO6@K zcvFU-&Fi$4H||)1l8W5vwZr04ISD=qDpw}_<`eDsIIVV@b}G&O=#$^!@TMfnf_DQp ztxaR&TCKQoyOpY6Qm-GIM0r&ZmZVbrP&oYuL-5@zhT%)!sKndPbjr}zOF-6@oGQdf zSzL_ehvdExV=kp9YA-JsSAYS%B*V}_r&eTNiYu$qI(|_q#<$M*|J;kTEH~q_R2<~UCiRu-TBCc)mlr5ay9WpH+X>bMfD|F|EI zU%L)gb;I_-Lr{_AdQG(hstO7RGV_5@U|4xVGP({TzUwxuGd%S&RT9$@e z)UlBhl9+mI6$=qc9v8!4uKKVafB1?1RroJ{wH#At4^qls{`Wro>AsEFxH5%DuhPHF zGGe_^nsg1_@?6(EXV5Pa)TTUiQ_7TuON=7U5c5Wb<1 zXYT1ls#i<1=&2s8{IFl`|KY|>vWDNJlCFQ=yMOAznx(0181p~jO?dycrc4IR;uueU zo@i~y13%n=_7ytJ_*K(?-O+&?rZ1OaFP)v-!rMD=@Ap^B>l7`EU*Rlzd<(AJuMHbl z_v1}j>#tv$>u2@iKD=;mhYq>!Y_uq<4Bytl>Nz78c znQXX7mg;f+uoCv`C(U89e)p|*{gPBR(W>c_ip@{6L!`%5)OlFDq#s@FLA#(If7}rh zO60LV^lr(hV|mwn3Ms(Df(4nKXHk~xLhLzN{>@Zc4TVw-3_kM|&EMsKG6nudSrxta zN-tswdlE7gVA*@Qi!~p-bSA^xyI*=ye-EEvsz2}$(k_r9n;ch%o)%oadxG4+3Hr&% zhHF2P*DOmzK7V?_SN5`ZdI&DYoP~p5h%$KIWLY67-XDKNcGh7f+%N$O0Un2K@Z&NW z?mt^!)js!&El%+DgaiS{Q{zzut0x&)d!wCz%?AY^mNBV~FoeP&K|F^V91MO@RQ-s_ zalWf~=Hzv)a)`6ca6v67-yNYU8GiO`E?382d#o#41|n>x!Wo>3;8rVai(}&fYQvC; zoliVS2%FDCs_rM6F9rZSoZ%$j?M>-eYDoxw-k5JF#j`*G2a9Ywmz0BTLAM22 zQfB>z!`L8mkP5VC^D&iZgU<}GSQ6>63$SaO$#!0dwJsXmCZU|KfK@ZnLYd%0n2(Oo z_J-97@@oPP%c9^z=%*LBMF*$2z!Yw!h=}%lKH>Rp@CR~{rG@n7yTJD;@+sOxCVm_UwWo4 zi#i&q0RQjd`hSvD7}o_RXu=F521qVog#{E03pe|5reT=d}prJi&^QfV~hEg3q)7;t^oyXE&KQ zkhCOtJ2rm`K2bU~6hlAyB&)qX&#wNAF^Z-v_$^VPCnOI=&?%EvHpSE4v$BqD(tz5r zS_w>*MJ(C%c%kJD1U2muy6=2XFF1{WT0TrZ;Tij&9yB1}v*)%2mfu#WWl?X`-61_8 z8SmcJ^D^@R>Y||L1u(Y`SO5uA1ZW7*`^9E^GooN!n&42xrk^Y+973-Je1f<~5QU0V^P+IZaDy`^u8dGI8ze32zrEX_N5(2AK(RL2!)UQy_rnzTApnkNVN% z41jxsAj(4?BQVCKPtw`ckqtARw Union[ToolInvokeMessage, list[ToolInvokeMessage]]: - """ - invoke tools - """ - symbol = tool_parameters.get("symbol", "") - if not symbol: - return self.create_text_message("Please input symbol") - - time_range = [None, None] - start_date = tool_parameters.get("start_date", "") - if start_date: - time_range[0] = start_date - else: - time_range[0] = "1800-01-01" - - end_date = tool_parameters.get("end_date", "") - if end_date: - time_range[1] = end_date - else: - time_range[1] = datetime.now().strftime("%Y-%m-%d") - - stock_data = download(symbol, start=time_range[0], end=time_range[1]) - max_segments = min(15, len(stock_data)) - rows_per_segment = len(stock_data) // (max_segments or 1) - summary_data = [] - for i in range(max_segments): - start_idx = i * rows_per_segment - end_idx = (i + 1) * rows_per_segment if i < max_segments - 1 else len(stock_data) - segment_data = stock_data.iloc[start_idx:end_idx] - segment_summary = { - "Start Date": segment_data.index[0], - "End Date": segment_data.index[-1], - "Average Close": segment_data["Close"].mean(), - "Average Volume": segment_data["Volume"].mean(), - "Average Open": segment_data["Open"].mean(), - "Average High": segment_data["High"].mean(), - "Average Low": segment_data["Low"].mean(), - "Average Adj Close": segment_data["Adj Close"].mean(), - "Max Close": segment_data["Close"].max(), - "Min Close": segment_data["Close"].min(), - "Max Volume": segment_data["Volume"].max(), - "Min Volume": segment_data["Volume"].min(), - "Max Open": segment_data["Open"].max(), - "Min Open": segment_data["Open"].min(), - "Max High": segment_data["High"].max(), - "Min High": segment_data["High"].min(), - } - - summary_data.append(segment_summary) - - summary_df = pd.DataFrame(summary_data) - - try: - return self.create_text_message(str(summary_df.to_dict())) - except (HTTPError, ReadTimeout): - return self.create_text_message("There is a internet connection problem. Please try again later.") diff --git a/api/core/tools/provider/builtin/yahoo/tools/analytics.yaml b/api/core/tools/provider/builtin/yahoo/tools/analytics.yaml deleted file mode 100644 index 89e66fb5814908..00000000000000 --- a/api/core/tools/provider/builtin/yahoo/tools/analytics.yaml +++ /dev/null @@ -1,54 +0,0 @@ -identity: - name: yahoo_finance_analytics - author: Dify - label: - en_US: Analytics - zh_Hans: 分析 - pt_BR: Análises - icon: icon.svg -description: - human: - en_US: A tool for get analytics about a ticker from Yahoo Finance. - zh_Hans: 一个用于从雅虎财经获取分析数据的工具。 - pt_BR: Uma ferramenta para obter análises sobre um ticker do Yahoo Finance. - llm: A tool for get analytics from Yahoo Finance. Input should be the ticker symbol like AAPL. -parameters: - - name: symbol - type: string - required: true - label: - en_US: Ticker symbol - zh_Hans: 股票代码 - pt_BR: Símbolo do ticker - human_description: - en_US: The ticker symbol of the company you want to analyze. - zh_Hans: 你想要搜索的公司的股票代码。 - pt_BR: O símbolo do ticker da empresa que você deseja analisar. - llm_description: The ticker symbol of the company you want to analyze. - form: llm - - name: start_date - type: string - required: false - label: - en_US: Start date - zh_Hans: 开始日期 - pt_BR: Data de início - human_description: - en_US: The start date of the analytics. - zh_Hans: 分析的开始日期。 - pt_BR: A data de início das análises. - llm_description: The start date of the analytics, the format of the date must be YYYY-MM-DD like 2020-01-01. - form: llm - - name: end_date - type: string - required: false - label: - en_US: End date - zh_Hans: 结束日期 - pt_BR: Data de término - human_description: - en_US: The end date of the analytics. - zh_Hans: 分析的结束日期。 - pt_BR: A data de término das análises. - llm_description: The end date of the analytics, the format of the date must be YYYY-MM-DD like 2024-01-01. - form: llm diff --git a/api/core/tools/provider/builtin/yahoo/tools/news.py b/api/core/tools/provider/builtin/yahoo/tools/news.py deleted file mode 100644 index ff820430f9f366..00000000000000 --- a/api/core/tools/provider/builtin/yahoo/tools/news.py +++ /dev/null @@ -1,46 +0,0 @@ -from typing import Any, Union - -import yfinance -from requests.exceptions import HTTPError, ReadTimeout - -from core.tools.entities.tool_entities import ToolInvokeMessage -from core.tools.tool.builtin_tool import BuiltinTool - - -class YahooFinanceSearchTickerTool(BuiltinTool): - def _invoke( - self, user_id: str, tool_parameters: dict[str, Any] - ) -> Union[ToolInvokeMessage, list[ToolInvokeMessage]]: - """ - invoke tools - """ - - query = tool_parameters.get("symbol", "") - if not query: - return self.create_text_message("Please input symbol") - - try: - return self.run(ticker=query, user_id=user_id) - except (HTTPError, ReadTimeout): - return self.create_text_message("There is a internet connection problem. Please try again later.") - - def run(self, ticker: str, user_id: str) -> ToolInvokeMessage: - company = yfinance.Ticker(ticker) - try: - if company.isin is None: - return self.create_text_message(f"Company ticker {ticker} not found.") - except (HTTPError, ReadTimeout, ConnectionError): - return self.create_text_message(f"Company ticker {ticker} not found.") - - links = [] - try: - links = [n["link"] for n in company.news if n["type"] == "STORY"] - except (HTTPError, ReadTimeout, ConnectionError): - if not links: - return self.create_text_message(f"There is nothing about {ticker} ticker") - if not links: - return self.create_text_message(f"No news found for company that searched with {ticker} ticker.") - - result = "\n\n".join([self.get_url(link) for link in links]) - - return self.create_text_message(self.summary(user_id=user_id, content=result)) diff --git a/api/core/tools/provider/builtin/yahoo/tools/news.yaml b/api/core/tools/provider/builtin/yahoo/tools/news.yaml deleted file mode 100644 index 4118c1a82f280f..00000000000000 --- a/api/core/tools/provider/builtin/yahoo/tools/news.yaml +++ /dev/null @@ -1,28 +0,0 @@ -identity: - name: yahoo_finance_news - author: Dify - label: - en_US: News - zh_Hans: 新闻 - pt_BR: Notícias - icon: icon.svg -description: - human: - en_US: A tool for get news about a ticker from Yahoo Finance. - zh_Hans: 一个用于从雅虎财经获取新闻的工具。 - pt_BR: Uma ferramenta para obter notícias sobre um ticker da Yahoo Finance. - llm: A tool for get news from Yahoo Finance. Input should be the ticker symbol like AAPL. -parameters: - - name: symbol - type: string - required: true - label: - en_US: Ticker symbol - zh_Hans: 股票代码 - pt_BR: Símbolo do ticker - human_description: - en_US: The ticker symbol of the company you want to search. - zh_Hans: 你想要搜索的公司的股票代码。 - pt_BR: O símbolo do ticker da empresa que você deseja pesquisar. - llm_description: The ticker symbol of the company you want to search. - form: llm diff --git a/api/core/tools/provider/builtin/yahoo/tools/ticker.py b/api/core/tools/provider/builtin/yahoo/tools/ticker.py deleted file mode 100644 index dfc7e460473c33..00000000000000 --- a/api/core/tools/provider/builtin/yahoo/tools/ticker.py +++ /dev/null @@ -1,27 +0,0 @@ -from typing import Any, Union - -from requests.exceptions import HTTPError, ReadTimeout -from yfinance import Ticker - -from core.tools.entities.tool_entities import ToolInvokeMessage -from core.tools.tool.builtin_tool import BuiltinTool - - -class YahooFinanceSearchTickerTool(BuiltinTool): - def _invoke( - self, user_id: str, tool_parameters: dict[str, Any] - ) -> Union[ToolInvokeMessage, list[ToolInvokeMessage]]: - """ - invoke tools - """ - query = tool_parameters.get("symbol", "") - if not query: - return self.create_text_message("Please input symbol") - - try: - return self.create_text_message(self.run(ticker=query)) - except (HTTPError, ReadTimeout): - return self.create_text_message("There is a internet connection problem. Please try again later.") - - def run(self, ticker: str) -> str: - return str(Ticker(ticker).info) diff --git a/api/core/tools/provider/builtin/yahoo/tools/ticker.yaml b/api/core/tools/provider/builtin/yahoo/tools/ticker.yaml deleted file mode 100644 index 3c1ee9cf316be9..00000000000000 --- a/api/core/tools/provider/builtin/yahoo/tools/ticker.yaml +++ /dev/null @@ -1,28 +0,0 @@ -identity: - name: yahoo_finance_ticker - author: Dify - label: - en_US: Ticker - zh_Hans: 股票信息 - pt_BR: Ticker - icon: icon.svg -description: - human: - en_US: A tool for search ticker information from Yahoo Finance. - zh_Hans: 一个用于从雅虎财经搜索股票信息的工具。 - pt_BR: Uma ferramenta para buscar informações de ticker do Yahoo Finance. - llm: A tool for search ticker information from Yahoo Finance. Input should be the ticker symbol like AAPL. -parameters: - - name: symbol - type: string - required: true - label: - en_US: Ticker symbol - zh_Hans: 股票代码 - pt_BR: Símbolo do ticker - human_description: - en_US: The ticker symbol of the company you want to search. - zh_Hans: 你想要搜索的公司的股票代码。 - pt_BR: O símbolo do ticker da empresa que você deseja pesquisar. - llm_description: The ticker symbol of the company you want to search. - form: llm diff --git a/api/core/tools/provider/builtin/yahoo/yahoo.py b/api/core/tools/provider/builtin/yahoo/yahoo.py deleted file mode 100644 index 8d82084e769703..00000000000000 --- a/api/core/tools/provider/builtin/yahoo/yahoo.py +++ /dev/null @@ -1,20 +0,0 @@ -from core.tools.errors import ToolProviderCredentialValidationError -from core.tools.provider.builtin.yahoo.tools.ticker import YahooFinanceSearchTickerTool -from core.tools.provider.builtin_tool_provider import BuiltinToolProviderController - - -class YahooFinanceProvider(BuiltinToolProviderController): - def _validate_credentials(self, credentials: dict) -> None: - try: - YahooFinanceSearchTickerTool().fork_tool_runtime( - runtime={ - "credentials": credentials, - } - ).invoke( - user_id="", - tool_parameters={ - "ticker": "MSFT", - }, - ) - except Exception as e: - raise ToolProviderCredentialValidationError(str(e)) diff --git a/api/core/tools/provider/builtin/yahoo/yahoo.yaml b/api/core/tools/provider/builtin/yahoo/yahoo.yaml deleted file mode 100644 index f1e82952c09ba4..00000000000000 --- a/api/core/tools/provider/builtin/yahoo/yahoo.yaml +++ /dev/null @@ -1,16 +0,0 @@ -identity: - author: Dify - name: yahoo - label: - en_US: YahooFinance - zh_Hans: 雅虎财经 - pt_BR: YahooFinance - description: - en_US: Finance, and Yahoo! get the latest news, stock quotes, and interactive chart with Yahoo! - zh_Hans: 雅虎财经,获取并整理出最新的新闻、股票报价等一切你想要的财经信息。 - pt_BR: Finance, and Yahoo! get the latest news, stock quotes, and interactive chart with Yahoo! - icon: icon.png - tags: - - business - - finance -credentials_for_provider: diff --git a/api/core/tools/provider/builtin/youtube/_assets/icon.svg b/api/core/tools/provider/builtin/youtube/_assets/icon.svg deleted file mode 100644 index 83b0700fecbf30..00000000000000 --- a/api/core/tools/provider/builtin/youtube/_assets/icon.svg +++ /dev/null @@ -1,11 +0,0 @@ - - - - - - - - - \ No newline at end of file diff --git a/api/core/tools/provider/builtin/youtube/tools/videos.py b/api/core/tools/provider/builtin/youtube/tools/videos.py deleted file mode 100644 index 95dec2eac9a752..00000000000000 --- a/api/core/tools/provider/builtin/youtube/tools/videos.py +++ /dev/null @@ -1,74 +0,0 @@ -from datetime import datetime -from typing import Any, Union - -from googleapiclient.discovery import build - -from core.tools.entities.tool_entities import ToolInvokeMessage -from core.tools.tool.builtin_tool import BuiltinTool - - -class YoutubeVideosAnalyticsTool(BuiltinTool): - def _invoke( - self, user_id: str, tool_parameters: dict[str, Any] - ) -> Union[ToolInvokeMessage, list[ToolInvokeMessage]]: - """ - invoke tools - """ - channel = tool_parameters.get("channel", "") - if not channel: - return self.create_text_message("Please input symbol") - - time_range = [None, None] - start_date = tool_parameters.get("start_date", "") - if start_date: - time_range[0] = start_date - else: - time_range[0] = "1800-01-01" - - end_date = tool_parameters.get("end_date", "") - if end_date: - time_range[1] = end_date - else: - time_range[1] = datetime.now().strftime("%Y-%m-%d") - - if "google_api_key" not in self.runtime.credentials or not self.runtime.credentials["google_api_key"]: - return self.create_text_message("Please input api key") - - youtube = build("youtube", "v3", developerKey=self.runtime.credentials["google_api_key"]) - - # try to get channel id - search_results = youtube.search().list(q=channel, type="channel", order="relevance", part="id").execute() - channel_id = search_results["items"][0]["id"]["channelId"] - - start_date, end_date = time_range - - start_date = datetime.strptime(start_date, "%Y-%m-%d").strftime("%Y-%m-%dT%H:%M:%SZ") - end_date = datetime.strptime(end_date, "%Y-%m-%d").strftime("%Y-%m-%dT%H:%M:%SZ") - - # get videos - time_range_videos = ( - youtube.search() - .list( - part="snippet", - channelId=channel_id, - order="date", - type="video", - publishedAfter=start_date, - publishedBefore=end_date, - ) - .execute() - ) - - def extract_video_data(video_list): - data = [] - for video in video_list["items"]: - video_id = video["id"]["videoId"] - video_info = youtube.videos().list(part="snippet,statistics", id=video_id).execute() - title = video_info["items"][0]["snippet"]["title"] - views = video_info["items"][0]["statistics"]["viewCount"] - data.append({"Title": title, "Views": views}) - return data - - summary = extract_video_data(time_range_videos) - - return self.create_text_message(str(summary)) diff --git a/api/core/tools/provider/builtin/youtube/tools/videos.yaml b/api/core/tools/provider/builtin/youtube/tools/videos.yaml deleted file mode 100644 index 976699eb627910..00000000000000 --- a/api/core/tools/provider/builtin/youtube/tools/videos.yaml +++ /dev/null @@ -1,54 +0,0 @@ -identity: - name: youtube_video_statistics - author: Dify - label: - en_US: Video statistics - zh_Hans: 视频统计 - pt_BR: Estatísticas de vídeo - icon: icon.svg -description: - human: - en_US: A tool for get statistics about a channel's videos. - zh_Hans: 一个用于获取油管频道视频统计数据的工具。 - pt_BR: Uma ferramenta para obter estatísticas sobre os vídeos de um canal. - llm: A tool for get statistics about a channel's videos. Input should be the name of the channel like PewDiePie. -parameters: - - name: channel - type: string - required: true - label: - en_US: Channel name - zh_Hans: 频道名 - pt_BR: Nome do canal - human_description: - en_US: The name of the channel you want to search. - zh_Hans: 你想要搜索的油管频道名。 - pt_BR: O nome do canal que você deseja pesquisar. - llm_description: The name of the channel you want to search. - form: llm - - name: start_date - type: string - required: false - label: - en_US: Start date - zh_Hans: 开始日期 - pt_BR: Data de início - human_description: - en_US: The start date of the analytics. - zh_Hans: 分析的开始日期。 - pt_BR: A data de início da análise. - llm_description: The start date of the analytics, the format of the date must be YYYY-MM-DD like 2020-01-01. - form: llm - - name: end_date - type: string - required: false - label: - en_US: End date - zh_Hans: 结束日期 - pt_BR: Data de término - human_description: - en_US: The end date of the analytics. - zh_Hans: 分析的结束日期。 - pt_BR: A data de término da análise. - llm_description: The end date of the analytics, the format of the date must be YYYY-MM-DD like 2024-01-01. - form: llm diff --git a/api/core/tools/provider/builtin/youtube/youtube.py b/api/core/tools/provider/builtin/youtube/youtube.py deleted file mode 100644 index aad876491c85dc..00000000000000 --- a/api/core/tools/provider/builtin/youtube/youtube.py +++ /dev/null @@ -1,22 +0,0 @@ -from core.tools.errors import ToolProviderCredentialValidationError -from core.tools.provider.builtin.youtube.tools.videos import YoutubeVideosAnalyticsTool -from core.tools.provider.builtin_tool_provider import BuiltinToolProviderController - - -class YahooFinanceProvider(BuiltinToolProviderController): - def _validate_credentials(self, credentials: dict) -> None: - try: - YoutubeVideosAnalyticsTool().fork_tool_runtime( - runtime={ - "credentials": credentials, - } - ).invoke( - user_id="", - tool_parameters={ - "channel": "TOKYO GIRLS COLLECTION", - "start_date": "2020-01-01", - "end_date": "2024-12-31", - }, - ) - except Exception as e: - raise ToolProviderCredentialValidationError(str(e)) diff --git a/api/core/tools/provider/builtin/youtube/youtube.yaml b/api/core/tools/provider/builtin/youtube/youtube.yaml deleted file mode 100644 index d6915b9a324767..00000000000000 --- a/api/core/tools/provider/builtin/youtube/youtube.yaml +++ /dev/null @@ -1,31 +0,0 @@ -identity: - author: Dify - name: youtube - label: - en_US: YouTube - zh_Hans: YouTube - pt_BR: YouTube - description: - en_US: YouTube - zh_Hans: YouTube(油管)是全球最大的视频分享网站,用户可以在上面上传、观看和分享视频。 - pt_BR: YouTube é o maior site de compartilhamento de vídeos do mundo, onde os usuários podem fazer upload, assistir e compartilhar vídeos. - icon: icon.svg - tags: - - videos -credentials_for_provider: - google_api_key: - type: secret-input - required: true - label: - en_US: Google API key - zh_Hans: Google API key - pt_BR: Chave da API do Google - placeholder: - en_US: Please input your Google API key - zh_Hans: 请输入你的 Google API key - pt_BR: Insira sua chave da API do Google - help: - en_US: Get your Google API key from Google - zh_Hans: 从 Google 获取您的 Google API key - pt_BR: Obtenha sua chave da API do Google no Google - url: https://console.developers.google.com/apis/credentials diff --git a/api/core/tools/tool_engine.py b/api/core/tools/tool_engine.py index 469eea67a6920d..cb2f6a899cd8e8 100644 --- a/api/core/tools/tool_engine.py +++ b/api/core/tools/tool_engine.py @@ -13,6 +13,7 @@ from core.callback_handler.workflow_tool_callback_handler import DifyWorkflowCallbackHandler from core.file.file_obj import FileTransferMethod from core.ops.ops_trace_manager import TraceQueueManager +from core.tools.__base.tool import Tool from core.tools.entities.tool_entities import ToolInvokeMessage, ToolInvokeMessageBinary, ToolInvokeMeta, ToolParameter from core.tools.errors import ( ToolEngineInvokeError, @@ -23,9 +24,8 @@ ToolProviderCredentialValidationError, ToolProviderNotFoundError, ) -from core.tools.tool.tool import Tool -from core.tools.tool.workflow_tool import WorkflowTool from core.tools.utils.message_transformer import ToolFileMessageTransformer +from core.tools.workflow_as_tool.tool import WorkflowTool from extensions.ext_database import db from models.model import Message, MessageFile diff --git a/api/core/tools/tool_label_manager.py b/api/core/tools/tool_label_manager.py index 2a5a2944ef8471..f9a126bf9bfc65 100644 --- a/api/core/tools/tool_label_manager.py +++ b/api/core/tools/tool_label_manager.py @@ -1,8 +1,8 @@ +from core.tools.__base.tool_provider import ToolProviderController +from core.tools.builtin_tool.provider import BuiltinToolProviderController +from core.tools.custom_tool.provider import ApiToolProviderController from core.tools.entities.values import default_tool_label_name_list -from core.tools.provider.api_tool_provider import ApiToolProviderController -from core.tools.provider.builtin_tool_provider import BuiltinToolProviderController -from core.tools.provider.tool_provider import ToolProviderController -from core.tools.provider.workflow_tool_provider import WorkflowToolProviderController +from core.tools.workflow_as_tool.provider import WorkflowToolProviderController from extensions.ext_database import db from models.tools import ToolLabelBinding @@ -55,7 +55,7 @@ def get_tool_labels(cls, controller: ToolProviderController) -> list[str]: else: raise ValueError("Unsupported tool type") - labels: list[ToolLabelBinding] = ( + labels = ( db.session.query(ToolLabelBinding.label_name) .filter( ToolLabelBinding.tool_id == provider_id, @@ -84,7 +84,10 @@ def get_tools_labels(cls, tool_providers: list[ToolProviderController]) -> dict[ if not isinstance(controller, ApiToolProviderController | WorkflowToolProviderController): raise ValueError("Unsupported tool type") - provider_ids = [controller.provider_id for controller in tool_providers] + provider_ids = [] + for controller in tool_providers: + assert isinstance(controller, ApiToolProviderController | WorkflowToolProviderController) + provider_ids.append(controller.provider_id) labels: list[ToolLabelBinding] = ( db.session.query(ToolLabelBinding).filter(ToolLabelBinding.tool_id.in_(provider_ids)).all() diff --git a/api/core/tools/tool_manager.py b/api/core/tools/tool_manager.py index 108b74018daf07..0cfcb6d9b9dbaf 100644 --- a/api/core/tools/tool_manager.py +++ b/api/core/tools/tool_manager.py @@ -4,7 +4,11 @@ from collections.abc import Generator from os import listdir, path from threading import Lock -from typing import Any, Union, cast +from typing import TYPE_CHECKING, Any, Union, cast + +if TYPE_CHECKING: + from core.workflow.nodes.tool.entities import ToolEntity + from configs import dify_config from core.agent.entities import AgentToolEntity @@ -12,20 +16,20 @@ from core.helper.module_import_helper import load_single_subclass_from_source from core.helper.position_helper import is_filtered from core.model_runtime.utils.encoders import jsonable_encoder +from core.tools.__base.tool import Tool +from core.tools.builtin_tool.provider import BuiltinToolProviderController +from core.tools.builtin_tool.providers._positions import BuiltinToolProviderSort +from core.tools.builtin_tool.tool import BuiltinTool +from core.tools.custom_tool.provider import ApiToolProviderController +from core.tools.custom_tool.tool import ApiTool from core.tools.entities.api_entities import UserToolProvider, UserToolProviderTypeLiteral from core.tools.entities.common_entities import I18nObject from core.tools.entities.tool_entities import ApiProviderAuthType, ToolInvokeFrom, ToolParameter, ToolProviderType from core.tools.errors import ToolProviderNotFoundError -from core.tools.provider.api_tool_provider import ApiToolProviderController -from core.tools.provider.builtin._positions import BuiltinToolProviderSort -from core.tools.provider.builtin_tool_provider import BuiltinToolProviderController -from core.tools.tool.api_tool import ApiTool -from core.tools.tool.builtin_tool import BuiltinTool -from core.tools.tool.tool import Tool -from core.tools.tool.workflow_tool import WorkflowTool from core.tools.tool_label_manager import ToolLabelManager from core.tools.utils.configuration import ProviderConfigEncrypter, ToolParameterConfigurationManager from core.tools.utils.tool_parameter_converter import ToolParameterConverter +from core.tools.workflow_as_tool.tool import WorkflowTool from extensions.ext_database import db from models.tools import ApiToolProvider, BuiltinToolProvider, WorkflowToolProvider from services.tools.tools_transform_service import ToolTransformService @@ -328,8 +332,8 @@ def get_builtin_provider_icon(cls, provider: str) -> tuple[str, str]: absolute_path = path.join( path.dirname(path.realpath(__file__)), - "provider", - "builtin", + "builtin_tool", + "providers", provider, "_assets", provider_controller.identity.icon, @@ -363,22 +367,22 @@ def _list_builtin_providers(cls) -> Generator[BuiltinToolProviderController, Non """ list all the builtin providers """ - for provider_path in listdir(path.join(path.dirname(path.realpath(__file__)), "provider", "builtin")): + for provider_path in listdir(path.join(path.dirname(path.realpath(__file__)), "builtin_tool", "providers")): if provider_path.startswith("__"): continue - if path.isdir(path.join(path.dirname(path.realpath(__file__)), "provider", "builtin", provider_path)): + if path.isdir(path.join(path.dirname(path.realpath(__file__)), "builtin_tool", "providers", provider_path)): if provider_path.startswith("__"): continue # init provider try: provider_class = load_single_subclass_from_source( - module_name=f"core.tools.provider.builtin.{provider_path}.{provider_path}", + module_name=f"core.tools.builtin_tool.providers.{provider_path}.{provider_path}", script_path=path.join( path.dirname(path.realpath(__file__)), - "provider", - "builtin", + "builtin_tool", + "providers", provider_path, f"{provider_path}.py", ), @@ -391,7 +395,7 @@ def _list_builtin_providers(cls) -> Generator[BuiltinToolProviderController, Non yield provider except Exception as e: - logger.error(f"load builtin provider {provider} error: {e}") + logger.error(f"load builtin provider error: {e}") continue # set builtin providers loaded cls._builtin_providers_loaded = True diff --git a/api/core/tools/utils/configuration.py b/api/core/tools/utils/configuration.py index f3fce03f8cee2f..0ab2b0021a36dd 100644 --- a/api/core/tools/utils/configuration.py +++ b/api/core/tools/utils/configuration.py @@ -8,11 +8,11 @@ from core.helper import encrypter from core.helper.tool_parameter_cache import ToolParameterCache, ToolParameterCacheType from core.helper.tool_provider_cache import ToolProviderCredentialsCache, ToolProviderCredentialsCacheType +from core.tools.__base.tool import Tool from core.tools.entities.tool_entities import ( ToolParameter, ToolProviderType, ) -from core.tools.tool.tool import Tool class ProviderConfigEncrypter(BaseModel): @@ -27,7 +27,7 @@ def _deep_copy(self, data: dict[str, str]) -> dict[str, str]: """ return deepcopy(data) - def encrypt(self, data: dict[str, str]) -> Mapping[str, str]: + def encrypt(self, data: dict[str, str]) -> dict[str, str]: """ encrypt tool credentials with tenant id @@ -45,7 +45,7 @@ def encrypt(self, data: dict[str, str]) -> Mapping[str, str]: return data - def mask_tool_credentials(self, data: dict[str, Any]) -> Mapping[str, Any]: + def mask_tool_credentials(self, data: dict[str, Any]) -> dict[str, Any]: """ mask tool credentials @@ -68,7 +68,7 @@ def mask_tool_credentials(self, data: dict[str, Any]) -> Mapping[str, Any]: return data - def decrypt(self, data: dict[str, str]) -> Mapping[str, str]: + def decrypt(self, data: dict[str, str]) -> dict[str, str]: """ decrypt tool credentials with tenant id diff --git a/api/core/tools/tool/dataset_retriever/dataset_multi_retriever_tool.py b/api/core/tools/utils/dataset_retriever/dataset_multi_retriever_tool.py similarity index 98% rename from api/core/tools/tool/dataset_retriever/dataset_multi_retriever_tool.py rename to api/core/tools/utils/dataset_retriever/dataset_multi_retriever_tool.py index ab7b40a2536db8..408dd43d228814 100644 --- a/api/core/tools/tool/dataset_retriever/dataset_multi_retriever_tool.py +++ b/api/core/tools/utils/dataset_retriever/dataset_multi_retriever_tool.py @@ -9,7 +9,7 @@ from core.rag.datasource.retrieval_service import RetrievalService from core.rag.rerank.rerank_model import RerankModelRunner from core.rag.retrieval.retrieval_methods import RetrievalMethod -from core.tools.tool.dataset_retriever.dataset_retriever_base_tool import DatasetRetrieverBaseTool +from core.tools.utils.dataset_retriever.dataset_retriever_base_tool import DatasetRetrieverBaseTool from extensions.ext_database import db from models.dataset import Dataset, Document, DocumentSegment diff --git a/api/core/tools/tool/dataset_retriever/dataset_retriever_base_tool.py b/api/core/tools/utils/dataset_retriever/dataset_retriever_base_tool.py similarity index 100% rename from api/core/tools/tool/dataset_retriever/dataset_retriever_base_tool.py rename to api/core/tools/utils/dataset_retriever/dataset_retriever_base_tool.py diff --git a/api/core/tools/tool/dataset_retriever/dataset_retriever_tool.py b/api/core/tools/utils/dataset_retriever/dataset_retriever_tool.py similarity index 100% rename from api/core/tools/tool/dataset_retriever/dataset_retriever_tool.py rename to api/core/tools/utils/dataset_retriever/dataset_retriever_tool.py diff --git a/api/core/tools/tool/dataset_retriever_tool.py b/api/core/tools/utils/dataset_retriever_tool.py similarity index 96% rename from api/core/tools/tool/dataset_retriever_tool.py rename to api/core/tools/utils/dataset_retriever_tool.py index 9f41b5d5eb2fe9..7b612227227dc4 100644 --- a/api/core/tools/tool/dataset_retriever_tool.py +++ b/api/core/tools/utils/dataset_retriever_tool.py @@ -5,6 +5,7 @@ from core.app.entities.app_invoke_entities import InvokeFrom from core.callback_handler.index_tool_callback_handler import DatasetIndexToolCallbackHandler from core.rag.retrieval.dataset_retrieval import DatasetRetrieval +from core.tools.__base.tool import Tool from core.tools.entities.common_entities import I18nObject from core.tools.entities.tool_entities import ( ToolDescription, @@ -13,8 +14,7 @@ ToolParameter, ToolProviderType, ) -from core.tools.tool.dataset_retriever.dataset_retriever_base_tool import DatasetRetrieverBaseTool -from core.tools.tool.tool import Tool +from core.tools.utils.dataset_retriever.dataset_retriever_base_tool import DatasetRetrieverBaseTool class DatasetRetrieverTool(Tool): diff --git a/api/core/tools/utils/web_reader_tool.py b/api/core/tools/utils/web_reader_tool.py index 1ced7d0488e3f2..dcbae9f5aa2260 100644 --- a/api/core/tools/utils/web_reader_tool.py +++ b/api/core/tools/utils/web_reader_tool.py @@ -36,7 +36,7 @@ def page_result(text: str, cursor: int, max_length: int) -> str: return text[cursor : cursor + max_length] -def get_url(url: str, user_agent: str = None) -> str: +def get_url(url: str, user_agent: str | None = None) -> str: """Fetch URL and return the contents as a string.""" headers = { "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko)" diff --git a/api/core/tools/workflow_as_tool/provider.py b/api/core/tools/workflow_as_tool/provider.py new file mode 100644 index 00000000000000..cab5f84506d1c0 --- /dev/null +++ b/api/core/tools/workflow_as_tool/provider.py @@ -0,0 +1,207 @@ +from collections.abc import Mapping +from typing import Optional + +from pydantic import Field + +from core.app.app_config.entities import VariableEntity, VariableEntityType +from core.app.apps.workflow.app_config_manager import WorkflowAppConfigManager +from core.tools.__base.tool_provider import ToolProviderController +from core.tools.entities.common_entities import I18nObject +from core.tools.entities.tool_entities import ( + ToolDescription, + ToolIdentity, + ToolParameter, + ToolParameterOption, + ToolProviderType, +) +from core.tools.utils.workflow_configuration_sync import WorkflowToolConfigurationUtils +from core.tools.workflow_as_tool.tool import WorkflowTool +from extensions.ext_database import db +from models.model import App, AppMode +from models.tools import WorkflowToolProvider +from models.workflow import Workflow + +VARIABLE_TO_PARAMETER_TYPE_MAPPING = { + VariableEntityType.TEXT_INPUT: ToolParameter.ToolParameterType.STRING, + VariableEntityType.PARAGRAPH: ToolParameter.ToolParameterType.STRING, + VariableEntityType.SELECT: ToolParameter.ToolParameterType.SELECT, + VariableEntityType.NUMBER: ToolParameter.ToolParameterType.NUMBER, +} + + +class WorkflowToolProviderController(ToolProviderController): + provider_id: str + tools: list[WorkflowTool] = Field(default_factory=list) + + @classmethod + def from_db(cls, db_provider: WorkflowToolProvider) -> "WorkflowToolProviderController": + app = db_provider.app + + if not app: + raise ValueError("app not found") + + controller = WorkflowToolProviderController( + **{ + "identity": { + "author": db_provider.user.name if db_provider.user_id and db_provider.user else "", + "name": db_provider.label, + "label": {"en_US": db_provider.label, "zh_Hans": db_provider.label}, + "description": {"en_US": db_provider.description, "zh_Hans": db_provider.description}, + "icon": db_provider.icon, + }, + "credentials_schema": {}, + "provider_id": db_provider.id or "", + } + ) + + # init tools + + controller.tools = [controller._get_db_provider_tool(db_provider, app)] + + return controller + + @property + def provider_type(self) -> ToolProviderType: + return ToolProviderType.WORKFLOW + + def _get_db_provider_tool(self, db_provider: WorkflowToolProvider, app: App) -> WorkflowTool: + """ + get db provider tool + :param db_provider: the db provider + :param app: the app + :return: the tool + """ + workflow: Workflow | None = db.session.query(Workflow).filter( + Workflow.app_id == db_provider.app_id, + Workflow.version == db_provider.version + ).first() + + if not workflow: + raise ValueError("workflow not found") + + # fetch start node + graph: Mapping = workflow.graph_dict + features_dict: Mapping = workflow.features_dict + features = WorkflowAppConfigManager.convert_features( + config_dict=features_dict, + app_mode=AppMode.WORKFLOW + ) + + parameters = db_provider.parameter_configurations + variables = WorkflowToolConfigurationUtils.get_workflow_graph_variables(graph) + + def fetch_workflow_variable(variable_name: str) -> VariableEntity | None: + return next(filter(lambda x: x.variable == variable_name, variables), None) + + user = db_provider.user + + workflow_tool_parameters = [] + for parameter in parameters: + variable = fetch_workflow_variable(parameter.name) + if variable: + parameter_type = None + options = [] + if variable.type not in VARIABLE_TO_PARAMETER_TYPE_MAPPING: + raise ValueError(f"unsupported variable type {variable.type}") + parameter_type = VARIABLE_TO_PARAMETER_TYPE_MAPPING[variable.type] + + if variable.type == VariableEntityType.SELECT and variable.options: + options = [ + ToolParameterOption(value=option, label=I18nObject(en_US=option, zh_Hans=option)) + for option in variable.options + ] + + workflow_tool_parameters.append( + ToolParameter( + name=parameter.name, + label=I18nObject(en_US=variable.label, zh_Hans=variable.label), + human_description=I18nObject(en_US=parameter.description, zh_Hans=parameter.description), + type=parameter_type, + form=parameter.form, + llm_description=parameter.description, + required=variable.required, + options=options, + default=variable.default, + ) + ) + elif features.file_upload: + workflow_tool_parameters.append( + ToolParameter( + name=parameter.name, + label=I18nObject(en_US=parameter.name, zh_Hans=parameter.name), + human_description=I18nObject(en_US=parameter.description, zh_Hans=parameter.description), + type=ToolParameter.ToolParameterType.FILE, + llm_description=parameter.description, + required=False, + form=parameter.form, + ) + ) + else: + raise ValueError("variable not found") + + return WorkflowTool( + identity=ToolIdentity( + author=user.name if user else "", + name=db_provider.name, + label=I18nObject(en_US=db_provider.label, zh_Hans=db_provider.label), + provider=self.provider_id, + icon=db_provider.icon, + ), + description=ToolDescription( + human=I18nObject(en_US=db_provider.description, zh_Hans=db_provider.description), + llm=db_provider.description, + ), + parameters=workflow_tool_parameters, + is_team_authorization=True, + workflow_app_id=app.id, + workflow_entities={ + "app": app, + "workflow": workflow, + }, + version=db_provider.version, + workflow_call_depth=0, + label=db_provider.label, + ) + + def get_tools(self, tenant_id: str) -> list[WorkflowTool]: + """ + fetch tools from database + + :param user_id: the user id + :param tenant_id: the tenant id + :return: the tools + """ + if self.tools is not None: + return self.tools + + db_providers: WorkflowToolProvider | None = db.session.query(WorkflowToolProvider).filter( + WorkflowToolProvider.tenant_id == tenant_id, + WorkflowToolProvider.app_id == self.provider_id, + ).first() + + if not db_providers: + return [] + + app = db_providers.app + if not app: + raise ValueError("can not read app of workflow") + + self.tools = [self._get_db_provider_tool(db_providers, app)] + + return self.tools + + def get_tool(self, tool_name: str) -> Optional[WorkflowTool]: + """ + get tool by name + + :param tool_name: the name of the tool + :return: the tool + """ + if self.tools is None: + return None + + for tool in self.tools: + if tool.identity.name == tool_name: + return tool + + return None diff --git a/api/core/tools/tool/workflow_tool.py b/api/core/tools/workflow_as_tool/tool.py similarity index 99% rename from api/core/tools/tool/workflow_tool.py rename to api/core/tools/workflow_as_tool/tool.py index 42ceffb834ee83..72aae2796c893b 100644 --- a/api/core/tools/tool/workflow_tool.py +++ b/api/core/tools/workflow_as_tool/tool.py @@ -5,8 +5,8 @@ from typing import Any, Optional, Union from core.file.file_obj import FileTransferMethod, FileVar +from core.tools.__base.tool import Tool from core.tools.entities.tool_entities import ToolInvokeMessage, ToolParameter, ToolProviderType -from core.tools.tool.tool import Tool from extensions.ext_database import db from models.account import Account from models.model import App, EndUser diff --git a/api/core/tools/provider/workflow_tool_provider.py b/api/core/tools/workflow_as_tool/workflow_tool_provider.py similarity index 98% rename from api/core/tools/provider/workflow_tool_provider.py rename to api/core/tools/workflow_as_tool/workflow_tool_provider.py index 6f80767bd55b7d..cab5f84506d1c0 100644 --- a/api/core/tools/provider/workflow_tool_provider.py +++ b/api/core/tools/workflow_as_tool/workflow_tool_provider.py @@ -5,6 +5,7 @@ from core.app.app_config.entities import VariableEntity, VariableEntityType from core.app.apps.workflow.app_config_manager import WorkflowAppConfigManager +from core.tools.__base.tool_provider import ToolProviderController from core.tools.entities.common_entities import I18nObject from core.tools.entities.tool_entities import ( ToolDescription, @@ -13,9 +14,8 @@ ToolParameterOption, ToolProviderType, ) -from core.tools.provider.tool_provider import ToolProviderController -from core.tools.tool.workflow_tool import WorkflowTool from core.tools.utils.workflow_configuration_sync import WorkflowToolConfigurationUtils +from core.tools.workflow_as_tool.tool import WorkflowTool from extensions.ext_database import db from models.model import App, AppMode from models.tools import WorkflowToolProvider diff --git a/api/services/tools/api_tools_manage_service.py b/api/services/tools/api_tools_manage_service.py index e1962305b92c12..11aa3ba529f605 100644 --- a/api/services/tools/api_tools_manage_service.py +++ b/api/services/tools/api_tools_manage_service.py @@ -5,6 +5,7 @@ from core.entities.provider_entities import ProviderConfig from core.model_runtime.utils.encoders import jsonable_encoder +from core.tools.custom_tool.provider import ApiToolProviderController from core.tools.entities.api_entities import UserTool, UserToolProvider from core.tools.entities.common_entities import I18nObject from core.tools.entities.tool_bundle import ApiToolBundle @@ -12,7 +13,6 @@ ApiProviderAuthType, ApiProviderSchemaType, ) -from core.tools.provider.api_tool_provider import ApiToolProviderController from core.tools.tool_label_manager import ToolLabelManager from core.tools.tool_manager import ToolManager from core.tools.utils.configuration import ProviderConfigEncrypter diff --git a/api/services/tools/builtin_tools_manage_service.py b/api/services/tools/builtin_tools_manage_service.py index af1cdbacac3450..6db8718b6b46c0 100644 --- a/api/services/tools/builtin_tools_manage_service.py +++ b/api/services/tools/builtin_tools_manage_service.py @@ -5,10 +5,10 @@ from configs import dify_config from core.helper.position_helper import is_filtered from core.model_runtime.utils.encoders import jsonable_encoder +from core.tools.__base.tool_provider import ToolProviderController +from core.tools.builtin_tool.providers._positions import BuiltinToolProviderSort from core.tools.entities.api_entities import UserTool, UserToolProvider from core.tools.errors import ToolNotFoundError, ToolProviderCredentialValidationError, ToolProviderNotFoundError -from core.tools.provider.builtin._positions import BuiltinToolProviderSort -from core.tools.provider.tool_provider import ToolProviderController from core.tools.tool_label_manager import ToolLabelManager from core.tools.tool_manager import ToolManager from core.tools.utils.configuration import ProviderConfigEncrypter diff --git a/api/services/tools/tools_transform_service.py b/api/services/tools/tools_transform_service.py index 552e2a0fbc7585..b7488621c643cb 100644 --- a/api/services/tools/tools_transform_service.py +++ b/api/services/tools/tools_transform_service.py @@ -3,6 +3,9 @@ from typing import Optional, Union from configs import dify_config +from core.tools.__base.tool import Tool +from core.tools.builtin_tool.provider import BuiltinToolProviderController +from core.tools.custom_tool.provider import ApiToolProviderController from core.tools.entities.api_entities import UserTool, UserToolProvider from core.tools.entities.common_entities import I18nObject from core.tools.entities.tool_bundle import ApiToolBundle @@ -11,12 +14,9 @@ ToolParameter, ToolProviderType, ) -from core.tools.provider.api_tool_provider import ApiToolProviderController -from core.tools.provider.builtin_tool_provider import BuiltinToolProviderController -from core.tools.provider.workflow_tool_provider import WorkflowToolProviderController -from core.tools.tool.tool import Tool -from core.tools.tool.workflow_tool import WorkflowTool from core.tools.utils.configuration import ProviderConfigEncrypter +from core.tools.workflow_as_tool.provider import WorkflowToolProviderController +from core.tools.workflow_as_tool.tool import WorkflowTool from models.tools import ApiToolProvider, BuiltinToolProvider, WorkflowToolProvider logger = logging.getLogger(__name__) diff --git a/api/services/tools/workflow_tools_manage_service.py b/api/services/tools/workflow_tools_manage_service.py index 1544b39c23571e..60e26aa28281ec 100644 --- a/api/services/tools/workflow_tools_manage_service.py +++ b/api/services/tools/workflow_tools_manage_service.py @@ -5,9 +5,9 @@ from core.model_runtime.utils.encoders import jsonable_encoder from core.tools.entities.api_entities import UserTool, UserToolProvider -from core.tools.provider.workflow_tool_provider import WorkflowToolProviderController from core.tools.tool_label_manager import ToolLabelManager from core.tools.utils.workflow_configuration_sync import WorkflowToolConfigurationUtils +from core.tools.workflow_as_tool.provider import WorkflowToolProviderController from extensions.ext_database import db from models.model import App from models.tools import WorkflowToolProvider diff --git a/api/tests/integration_tests/tools/api_tool/test_api_tool.py b/api/tests/integration_tests/tools/api_tool/test_api_tool.py index 09729a961eff33..e4798e02c38148 100644 --- a/api/tests/integration_tests/tools/api_tool/test_api_tool.py +++ b/api/tests/integration_tests/tools/api_tool/test_api_tool.py @@ -1,5 +1,5 @@ -from core.tools.tool.api_tool import ApiTool -from core.tools.tool.tool import Tool +from core.tools.__base.tool import Tool +from core.tools.custom_tool.tool import ApiTool from tests.integration_tests.tools.__mock.http import setup_http_mock tool_bundle = { From 73ce692e24153f76652e306711ea3af6b3691cf4 Mon Sep 17 00:00:00 2001 From: Yeuoly Date: Fri, 20 Sep 2024 13:32:11 +0800 Subject: [PATCH 033/325] feat: add inner api key --- api/.env.example | 9 +++++---- api/configs/feature/__init__.py | 17 +++++++++++------ api/controllers/inner_api/wraps.py | 10 +++++----- .../dataset_multi_retriever_tool.py | 7 +++++-- 4 files changed, 26 insertions(+), 17 deletions(-) diff --git a/api/.env.example b/api/.env.example index 21940402f4561e..3cff88c400f05e 100644 --- a/api/.env.example +++ b/api/.env.example @@ -278,10 +278,6 @@ WORKFLOW_CALL_MAX_DEPTH=5 APP_MAX_EXECUTION_TIME=1200 APP_MAX_ACTIVE_REQUESTS=0 -# Plugin configuration -PLUGIN_INNER_API_URL=http://127.0.0.1:5002 -PLUGIN_INNER_API_KEY=lYkiYYT6owG+71oLerGzA7GXCgOT++6ovaezWAjpCjf+Sjc3ZtU+qUEi - # Celery beat configuration CELERY_BEAT_SCHEDULER_TIME=1 @@ -293,3 +289,8 @@ POSITION_TOOL_EXCLUDES= POSITION_PROVIDER_PINS= POSITION_PROVIDER_INCLUDES= POSITION_PROVIDER_EXCLUDES= + +# Plugin configuration +PLUGIN_API_KEY=lYkiYYT6owG+71oLerGzA7GXCgOT++6ovaezWAjpCjf+Sjc3ZtU+qUEi+vRjI/+XbV1AaFy691iy+kGDv2Jvy0/eAh8Y1 +PLUGIN_API_URL=http://127.0.0.1:5002 +INNER_API_KEY=QaHbTe77CtuXmsfyhR7+vRjI/+XbV1AaFy691iy+kGDv2Jvy0/eAh8Y1 \ No newline at end of file diff --git a/api/configs/feature/__init__.py b/api/configs/feature/__init__.py index 4e1dfe73ad0787..30b97749ee9454 100644 --- a/api/configs/feature/__init__.py +++ b/api/configs/feature/__init__.py @@ -115,14 +115,19 @@ class PluginConfig(BaseSettings): """ Plugin configs """ - PLUGIN_INNER_API_URL: str = Field( - description='Plugin inner API URL', - default='http://plugin:8194', + PLUGIN_API_URL: str = Field( + description='Plugin API URL', + default='http://plugin:5002', ) - PLUGIN_INNER_API_KEY: str = Field( - description='Plugin inner API key', - default='dify-inner-api-key', + PLUGIN_API_KEY: str = Field( + description='Plugin API key', + default='plugin-api-key', + ) + + INNER_API_KEY_FOR_PLUGIN: str = Field( + description='Inner api key for plugin', + default='inner-api-key' ) diff --git a/api/controllers/inner_api/wraps.py b/api/controllers/inner_api/wraps.py index 3e184515d3a796..b543b5e1641f50 100644 --- a/api/controllers/inner_api/wraps.py +++ b/api/controllers/inner_api/wraps.py @@ -18,7 +18,7 @@ def decorated(*args, **kwargs): # get header 'X-Inner-Api-Key' inner_api_key = request.headers.get("X-Inner-Api-Key") - if not inner_api_key or inner_api_key != dify_config.INNER_API_KEY: + if not inner_api_key or inner_api_key != dify_config.INNER_API_KEY_FOR_PLUGIN: abort(401) return view(*args, **kwargs) @@ -67,14 +67,14 @@ def decorated(*args, **kwargs): def plugin_inner_api_only(view): @wraps(view) def decorated(*args, **kwargs): - if not dify_config.PLUGIN_INNER_API_KEY: + if not dify_config.PLUGIN_API_KEY: abort(404) # get header 'X-Inner-Api-Key' - inner_api_key = request.headers.get('X-Inner-Api-Key') - if not inner_api_key or inner_api_key != dify_config.PLUGIN_INNER_API_KEY: + inner_api_key = request.headers.get("X-Inner-Api-Key") + if not inner_api_key or inner_api_key != dify_config.INNER_API_KEY_FOR_PLUGIN: abort(404) return view(*args, **kwargs) - return decorated \ No newline at end of file + return decorated diff --git a/api/core/tools/utils/dataset_retriever/dataset_multi_retriever_tool.py b/api/core/tools/utils/dataset_retriever/dataset_multi_retriever_tool.py index 408dd43d228814..d3a8752d9ae37a 100644 --- a/api/core/tools/utils/dataset_retriever/dataset_multi_retriever_tool.py +++ b/api/core/tools/utils/dataset_retriever/dataset_multi_retriever_tool.py @@ -49,7 +49,7 @@ def _run(self, query: str) -> str: retrieval_thread = threading.Thread( target=self._retriever, kwargs={ - "flask_app": current_app._get_current_object(), + "flask_app": current_app._get_current_object(), # type: ignore "dataset_id": dataset_id, "query": query, "all_documents": all_documents, @@ -77,11 +77,12 @@ def _run(self, query: str) -> str: document_score_list = {} for item in all_documents: + assert item.metadata if item.metadata.get("score"): document_score_list[item.metadata["doc_id"]] = item.metadata["score"] document_context_list = [] - index_node_ids = [document.metadata["doc_id"] for document in all_documents] + index_node_ids = [document.metadata["doc_id"] for document in all_documents if document.metadata] segments = DocumentSegment.query.filter( DocumentSegment.dataset_id.in_(self.dataset_ids), DocumentSegment.completed_at.isnot(None), @@ -140,6 +141,8 @@ def _run(self, query: str) -> str: return str("\n".join(document_context_list)) + raise RuntimeError("not segments found") + def _retriever( self, flask_app: Flask, From d4bf575d0a87641bc64e0ab6eccfdcb8f9323d41 Mon Sep 17 00:00:00 2001 From: Yeuoly Date: Fri, 20 Sep 2024 13:55:09 +0800 Subject: [PATCH 034/325] impl: basic plugin manager --- api/configs/feature/__init__.py | 2 +- api/core/plugin/manager/asset.py | 5 +++ api/core/plugin/manager/base.py | 48 +++++++++++++++++++++++++++++ api/core/plugin/manager/endpoint.py | 5 +++ api/core/plugin/manager/model.py | 5 +++ api/core/plugin/manager/tool.py | 5 +++ 6 files changed, 69 insertions(+), 1 deletion(-) create mode 100644 api/core/plugin/manager/asset.py create mode 100644 api/core/plugin/manager/base.py create mode 100644 api/core/plugin/manager/endpoint.py create mode 100644 api/core/plugin/manager/model.py create mode 100644 api/core/plugin/manager/tool.py diff --git a/api/configs/feature/__init__.py b/api/configs/feature/__init__.py index 30b97749ee9454..660ca5a2a07eca 100644 --- a/api/configs/feature/__init__.py +++ b/api/configs/feature/__init__.py @@ -115,7 +115,7 @@ class PluginConfig(BaseSettings): """ Plugin configs """ - PLUGIN_API_URL: str = Field( + PLUGIN_API_URL: HttpUrl = Field( description='Plugin API URL', default='http://plugin:5002', ) diff --git a/api/core/plugin/manager/asset.py b/api/core/plugin/manager/asset.py new file mode 100644 index 00000000000000..f55ff748cace84 --- /dev/null +++ b/api/core/plugin/manager/asset.py @@ -0,0 +1,5 @@ +from core.plugin.manager.base import BasePluginManager + + +class PluginAssetManager(BasePluginManager): + pass \ No newline at end of file diff --git a/api/core/plugin/manager/base.py b/api/core/plugin/manager/base.py new file mode 100644 index 00000000000000..5f1e5cd1d2bf66 --- /dev/null +++ b/api/core/plugin/manager/base.py @@ -0,0 +1,48 @@ +import json +from collections.abc import Generator +from typing import TypeVar + +import requests +from pydantic import BaseModel +from yarl import URL + +from configs import dify_config + +plugin_daemon_inner_api_baseurl = dify_config.PLUGIN_API_URL +plugin_daemon_inner_api_key = dify_config.INNER_API_KEY_FOR_PLUGIN + +T = TypeVar("T", bound=(BaseModel | dict)) + + +class BasePluginManager: + def _request(self, method: str, path: str, headers: dict, data: bytes, stream: bool = False) -> requests.Response: + """ + Make a request to the plugin daemon inner API. + """ + url = URL(str(plugin_daemon_inner_api_baseurl)) / path + headers["X-Api-Key"] = plugin_daemon_inner_api_key + response = requests.request(method=method, url=str(url), headers=headers, data=data, stream=stream) + return response + + def _stream_request(self, method: str, path: str, headers: dict, data: bytes) -> Generator[bytes, None, None]: + """ + Make a stream request to the plugin daemon inner API + """ + response = self._request(method, path, headers, data, stream=True) + yield from response.iter_lines() + + def _stream_request_with_model( + self, method: str, path: str, headers: dict, data: bytes, type: type[T] + ) -> Generator[T, None, None]: + """ + Make a stream request to the plugin daemon inner API and yield the response as a model. + """ + for line in self._stream_request(method, path, headers, data): + yield type(**json.loads(line)) + + def _request_with_model(self, method: str, path: str, headers: dict, data: bytes, type: type[T]) -> T: + """ + Make a request to the plugin daemon inner API and return the response as a model. + """ + response = self._request(method, path, headers, data) + return type(**response.json()) diff --git a/api/core/plugin/manager/endpoint.py b/api/core/plugin/manager/endpoint.py new file mode 100644 index 00000000000000..a3f49903fd8595 --- /dev/null +++ b/api/core/plugin/manager/endpoint.py @@ -0,0 +1,5 @@ +from core.plugin.manager.base import BasePluginManager + + +class PluginEndpointManager(BasePluginManager): + pass \ No newline at end of file diff --git a/api/core/plugin/manager/model.py b/api/core/plugin/manager/model.py new file mode 100644 index 00000000000000..f03dbfd1e33100 --- /dev/null +++ b/api/core/plugin/manager/model.py @@ -0,0 +1,5 @@ +from core.plugin.manager.base import BasePluginManager + + +class PluginModelManager(BasePluginManager): + pass \ No newline at end of file diff --git a/api/core/plugin/manager/tool.py b/api/core/plugin/manager/tool.py new file mode 100644 index 00000000000000..83517f1caf6fc8 --- /dev/null +++ b/api/core/plugin/manager/tool.py @@ -0,0 +1,5 @@ +from core.plugin.manager.base import BasePluginManager + + +class PluginToolManager(BasePluginManager): + pass \ No newline at end of file From 9693b5ad0c62e14de69e430c5491e541a970f796 Mon Sep 17 00:00:00 2001 From: Yeuoly Date: Fri, 20 Sep 2024 14:43:01 +0800 Subject: [PATCH 035/325] feat: debugging key --- api/core/plugin/entities/plugin_daemon.py | 15 +++++++ api/core/plugin/manager/asset.py | 9 +++- api/core/plugin/manager/base.py | 51 +++++++++++++++++++++-- api/core/plugin/manager/debugging.py | 17 ++++++++ 4 files changed, 87 insertions(+), 5 deletions(-) create mode 100644 api/core/plugin/entities/plugin_daemon.py create mode 100644 api/core/plugin/manager/debugging.py diff --git a/api/core/plugin/entities/plugin_daemon.py b/api/core/plugin/entities/plugin_daemon.py new file mode 100644 index 00000000000000..ca8ea0f780edcd --- /dev/null +++ b/api/core/plugin/entities/plugin_daemon.py @@ -0,0 +1,15 @@ +from typing import Generic, Optional, TypeVar + +from pydantic import BaseModel + +T = TypeVar("T", bound=(BaseModel | dict)) + + +class PluginDaemonBasicResponse(BaseModel, Generic[T]): + """ + Basic response from plugin daemon. + """ + + code: int + message: str + data: Optional[T] diff --git a/api/core/plugin/manager/asset.py b/api/core/plugin/manager/asset.py index f55ff748cace84..df76f56a6dc8d9 100644 --- a/api/core/plugin/manager/asset.py +++ b/api/core/plugin/manager/asset.py @@ -2,4 +2,11 @@ class PluginAssetManager(BasePluginManager): - pass \ No newline at end of file + def fetch_asset(self, id: str) -> bytes: + """ + Fetch an asset by id. + """ + response = self._request(method="GET", path=f"/assets/plugin/{id}") + if response.status_code != 200: + raise ValueError(f"can not found asset {id}") + return response.content diff --git a/api/core/plugin/manager/base.py b/api/core/plugin/manager/base.py index 5f1e5cd1d2bf66..b26ab851c72f2e 100644 --- a/api/core/plugin/manager/base.py +++ b/api/core/plugin/manager/base.py @@ -7,6 +7,7 @@ from yarl import URL from configs import dify_config +from core.plugin.entities.plugin_daemon import PluginDaemonBasicResponse plugin_daemon_inner_api_baseurl = dify_config.PLUGIN_API_URL plugin_daemon_inner_api_key = dify_config.INNER_API_KEY_FOR_PLUGIN @@ -15,16 +16,21 @@ class BasePluginManager: - def _request(self, method: str, path: str, headers: dict, data: bytes, stream: bool = False) -> requests.Response: + def _request( + self, method: str, path: str, headers: dict | None = None, data: bytes | None = None, stream: bool = False + ) -> requests.Response: """ Make a request to the plugin daemon inner API. """ url = URL(str(plugin_daemon_inner_api_baseurl)) / path + headers = headers or {} headers["X-Api-Key"] = plugin_daemon_inner_api_key response = requests.request(method=method, url=str(url), headers=headers, data=data, stream=stream) return response - def _stream_request(self, method: str, path: str, headers: dict, data: bytes) -> Generator[bytes, None, None]: + def _stream_request( + self, method: str, path: str, headers: dict | None = None, data: bytes | None = None + ) -> Generator[bytes, None, None]: """ Make a stream request to the plugin daemon inner API """ @@ -32,7 +38,12 @@ def _stream_request(self, method: str, path: str, headers: dict, data: bytes) -> yield from response.iter_lines() def _stream_request_with_model( - self, method: str, path: str, headers: dict, data: bytes, type: type[T] + self, + method: str, + path: str, + type: type[T], + headers: dict | None = None, + data: bytes | None = None, ) -> Generator[T, None, None]: """ Make a stream request to the plugin daemon inner API and yield the response as a model. @@ -40,9 +51,41 @@ def _stream_request_with_model( for line in self._stream_request(method, path, headers, data): yield type(**json.loads(line)) - def _request_with_model(self, method: str, path: str, headers: dict, data: bytes, type: type[T]) -> T: + def _request_with_model( + self, method: str, path: str, type: type[T], headers: dict | None = None, data: bytes | None = None + ) -> T: """ Make a request to the plugin daemon inner API and return the response as a model. """ response = self._request(method, path, headers, data) return type(**response.json()) + + def _request_with_plugin_daemon_response( + self, method: str, path: str, type: type[T], headers: dict | None = None, data: bytes | None = None + ) -> T: + """ + Make a request to the plugin daemon inner API and return the response as a model. + """ + response = self._request(method, path, headers, data) + rep = PluginDaemonBasicResponse[type](**response.json()) + if rep.code != 0: + raise Exception(f"got error from plugin daemon: {rep.message}, code: {rep.code}") + if rep.data is None: + raise Exception("got empty data from plugin daemon") + + return rep.data + + def _request_with_plugin_daemon_response_stream( + self, method: str, path: str, type: type[T], headers: dict | None = None, data: bytes | None = None + ) -> Generator[T, None, None]: + """ + Make a stream request to the plugin daemon inner API and yield the response as a model. + """ + for line in self._stream_request(method, path, headers, data): + line_data = json.loads(line) + rep = PluginDaemonBasicResponse[type](**line_data) + if rep.code != 0: + raise Exception(f"got error from plugin daemon: {rep.message}, code: {rep.code}") + if rep.data is None: + raise Exception("got empty data from plugin daemon") + yield rep.data \ No newline at end of file diff --git a/api/core/plugin/manager/debugging.py b/api/core/plugin/manager/debugging.py new file mode 100644 index 00000000000000..6b26e3ad676dda --- /dev/null +++ b/api/core/plugin/manager/debugging.py @@ -0,0 +1,17 @@ +from pydantic import BaseModel + +from core.plugin.manager.base import BasePluginManager + + +class PluginDebuggingManager(BasePluginManager): + def get_debugging_key(self, tenant_id: str) -> str: + """ + Get the debugging key for the given tenant. + """ + + class Response(BaseModel): + key: str + + response = self._request_with_plugin_daemon_response("POST", f"/plugin/{tenant_id}/debugging/key", Response) + + return response.key From 2223dfb26679f32dff0fb7dddbaa43193c202445 Mon Sep 17 00:00:00 2001 From: Yeuoly Date: Fri, 20 Sep 2024 15:08:39 +0800 Subject: [PATCH 036/325] feat: get debugging key --- api/controllers/console/__init__.py | 2 +- api/controllers/console/workspace/plugin.py | 27 +++++++++++++++++++ api/core/plugin/manager/base.py | 6 ++--- api/core/plugin/manager/debugging.py | 2 +- api/core/plugin/manager/tool.py | 6 ++++- .../plugin/plugin_debugging_service.py | 8 ++++++ 6 files changed, 45 insertions(+), 6 deletions(-) create mode 100644 api/controllers/console/workspace/plugin.py create mode 100644 api/services/plugin/plugin_debugging_service.py diff --git a/api/controllers/console/__init__.py b/api/controllers/console/__init__.py index eb7c1464d39722..f36703192a9444 100644 --- a/api/controllers/console/__init__.py +++ b/api/controllers/console/__init__.py @@ -56,4 +56,4 @@ from .tag import tags # Import workspace controllers -from .workspace import account, load_balancing_config, members, model_providers, models, tool_providers, workspace +from .workspace import account, load_balancing_config, members, model_providers, models, tool_providers, workspace, plugin diff --git a/api/controllers/console/workspace/plugin.py b/api/controllers/console/workspace/plugin.py new file mode 100644 index 00000000000000..c3e89321d6bbf5 --- /dev/null +++ b/api/controllers/console/workspace/plugin.py @@ -0,0 +1,27 @@ +from flask_login import current_user +from flask_restful import Resource +from werkzeug.exceptions import Forbidden + +from controllers.console import api +from controllers.console.setup import setup_required +from controllers.console.wraps import account_initialization_required +from libs.login import login_required +from services.plugin.plugin_debugging_service import PluginDebuggingService + + +class PluginDebuggingKeyApi(Resource): + @setup_required + @login_required + @account_initialization_required + def get(self): + user = current_user + if not user.is_admin_or_owner: + raise Forbidden() + + tenant_id = user.current_tenant_id + return { + "key": PluginDebuggingService.get_plugin_debugging_key(tenant_id) + } + + +api.add_resource(PluginDebuggingKeyApi, "/workspaces/current/plugin/debugging-key") \ No newline at end of file diff --git a/api/core/plugin/manager/base.py b/api/core/plugin/manager/base.py index b26ab851c72f2e..704afe71d1c173 100644 --- a/api/core/plugin/manager/base.py +++ b/api/core/plugin/manager/base.py @@ -10,7 +10,7 @@ from core.plugin.entities.plugin_daemon import PluginDaemonBasicResponse plugin_daemon_inner_api_baseurl = dify_config.PLUGIN_API_URL -plugin_daemon_inner_api_key = dify_config.INNER_API_KEY_FOR_PLUGIN +plugin_daemon_inner_api_key = dify_config.PLUGIN_API_KEY T = TypeVar("T", bound=(BaseModel | dict)) @@ -69,9 +69,9 @@ def _request_with_plugin_daemon_response( response = self._request(method, path, headers, data) rep = PluginDaemonBasicResponse[type](**response.json()) if rep.code != 0: - raise Exception(f"got error from plugin daemon: {rep.message}, code: {rep.code}") + raise ValueError(f"got error from plugin daemon: {rep.message}, code: {rep.code}") if rep.data is None: - raise Exception("got empty data from plugin daemon") + raise ValueError("got empty data from plugin daemon") return rep.data diff --git a/api/core/plugin/manager/debugging.py b/api/core/plugin/manager/debugging.py index 6b26e3ad676dda..fb6bad7fa3132c 100644 --- a/api/core/plugin/manager/debugging.py +++ b/api/core/plugin/manager/debugging.py @@ -12,6 +12,6 @@ def get_debugging_key(self, tenant_id: str) -> str: class Response(BaseModel): key: str - response = self._request_with_plugin_daemon_response("POST", f"/plugin/{tenant_id}/debugging/key", Response) + response = self._request_with_plugin_daemon_response("POST", f"plugin/{tenant_id}/debugging/key", Response) return response.key diff --git a/api/core/plugin/manager/tool.py b/api/core/plugin/manager/tool.py index 83517f1caf6fc8..10ce33d5e7a243 100644 --- a/api/core/plugin/manager/tool.py +++ b/api/core/plugin/manager/tool.py @@ -2,4 +2,8 @@ class PluginToolManager(BasePluginManager): - pass \ No newline at end of file + def fetch_tool_providers(self, asset_id: str) -> list[str]: + """ + Fetch tool providers for the given asset. + """ + response = self._request('GET', f'/plugin/asset/{asset_id}') \ No newline at end of file diff --git a/api/services/plugin/plugin_debugging_service.py b/api/services/plugin/plugin_debugging_service.py new file mode 100644 index 00000000000000..2bf24e6de89805 --- /dev/null +++ b/api/services/plugin/plugin_debugging_service.py @@ -0,0 +1,8 @@ +from core.plugin.manager.debugging import PluginDebuggingManager + + +class PluginDebuggingService: + @staticmethod + def get_plugin_debugging_key(tenant_id: str) -> str: + manager = PluginDebuggingManager() + return manager.get_debugging_key(tenant_id) From eef79a519602e0759eb78bbd205ee00303be705a Mon Sep 17 00:00:00 2001 From: Yeuoly Date: Fri, 20 Sep 2024 21:35:19 +0800 Subject: [PATCH 037/325] feat: support install plugin --- api/controllers/console/__init__.py | 11 +++++- api/core/plugin/entities/plugin_daemon.py | 16 +++++++- api/core/plugin/manager/base.py | 27 +++++++------ api/core/plugin/manager/plugin.py | 46 +++++++++++++++++++++++ 4 files changed, 87 insertions(+), 13 deletions(-) create mode 100644 api/core/plugin/manager/plugin.py diff --git a/api/controllers/console/__init__.py b/api/controllers/console/__init__.py index f36703192a9444..1cf987050a7866 100644 --- a/api/controllers/console/__init__.py +++ b/api/controllers/console/__init__.py @@ -56,4 +56,13 @@ from .tag import tags # Import workspace controllers -from .workspace import account, load_balancing_config, members, model_providers, models, tool_providers, workspace, plugin +from .workspace import ( + account, + load_balancing_config, + members, + model_providers, + models, + plugin, + tool_providers, + workspace, +) diff --git a/api/core/plugin/entities/plugin_daemon.py b/api/core/plugin/entities/plugin_daemon.py index ca8ea0f780edcd..63c839a4e4a5d5 100644 --- a/api/core/plugin/entities/plugin_daemon.py +++ b/api/core/plugin/entities/plugin_daemon.py @@ -1,8 +1,9 @@ +from enum import Enum from typing import Generic, Optional, TypeVar from pydantic import BaseModel -T = TypeVar("T", bound=(BaseModel | dict)) +T = TypeVar("T", bound=(BaseModel | dict | bool)) class PluginDaemonBasicResponse(BaseModel, Generic[T]): @@ -13,3 +14,16 @@ class PluginDaemonBasicResponse(BaseModel, Generic[T]): code: int message: str data: Optional[T] + + +class InstallPluginMessage(BaseModel): + """ + Message for installing a plugin. + """ + class Event(Enum): + Info = "info" + Done = "done" + Error = "error" + + event: Event + data: str \ No newline at end of file diff --git a/api/core/plugin/manager/base.py b/api/core/plugin/manager/base.py index 704afe71d1c173..f6b44d05dd264d 100644 --- a/api/core/plugin/manager/base.py +++ b/api/core/plugin/manager/base.py @@ -12,12 +12,17 @@ plugin_daemon_inner_api_baseurl = dify_config.PLUGIN_API_URL plugin_daemon_inner_api_key = dify_config.PLUGIN_API_KEY -T = TypeVar("T", bound=(BaseModel | dict)) +T = TypeVar("T", bound=(BaseModel | dict | bool)) class BasePluginManager: def _request( - self, method: str, path: str, headers: dict | None = None, data: bytes | None = None, stream: bool = False + self, + method: str, + path: str, + headers: dict | None = None, + data: bytes | dict | None = None, + stream: bool = False, ) -> requests.Response: """ Make a request to the plugin daemon inner API. @@ -29,7 +34,7 @@ def _request( return response def _stream_request( - self, method: str, path: str, headers: dict | None = None, data: bytes | None = None + self, method: str, path: str, headers: dict | None = None, data: bytes | dict | None = None ) -> Generator[bytes, None, None]: """ Make a stream request to the plugin daemon inner API @@ -43,7 +48,7 @@ def _stream_request_with_model( path: str, type: type[T], headers: dict | None = None, - data: bytes | None = None, + data: bytes | dict | None = None, ) -> Generator[T, None, None]: """ Make a stream request to the plugin daemon inner API and yield the response as a model. @@ -61,7 +66,7 @@ def _request_with_model( return type(**response.json()) def _request_with_plugin_daemon_response( - self, method: str, path: str, type: type[T], headers: dict | None = None, data: bytes | None = None + self, method: str, path: str, type: type[T], headers: dict | None = None, data: bytes | dict | None = None ) -> T: """ Make a request to the plugin daemon inner API and return the response as a model. @@ -72,11 +77,11 @@ def _request_with_plugin_daemon_response( raise ValueError(f"got error from plugin daemon: {rep.message}, code: {rep.code}") if rep.data is None: raise ValueError("got empty data from plugin daemon") - + return rep.data - + def _request_with_plugin_daemon_response_stream( - self, method: str, path: str, type: type[T], headers: dict | None = None, data: bytes | None = None + self, method: str, path: str, type: type[T], headers: dict | None = None, data: bytes | dict | None = None ) -> Generator[T, None, None]: """ Make a stream request to the plugin daemon inner API and yield the response as a model. @@ -85,7 +90,7 @@ def _request_with_plugin_daemon_response_stream( line_data = json.loads(line) rep = PluginDaemonBasicResponse[type](**line_data) if rep.code != 0: - raise Exception(f"got error from plugin daemon: {rep.message}, code: {rep.code}") + raise ValueError(f"got error from plugin daemon: {rep.message}, code: {rep.code}") if rep.data is None: - raise Exception("got empty data from plugin daemon") - yield rep.data \ No newline at end of file + raise ValueError("got empty data from plugin daemon") + yield rep.data diff --git a/api/core/plugin/manager/plugin.py b/api/core/plugin/manager/plugin.py new file mode 100644 index 00000000000000..85e865bb93b8f8 --- /dev/null +++ b/api/core/plugin/manager/plugin.py @@ -0,0 +1,46 @@ +from collections.abc import Generator +from urllib.parse import quote + +from core.plugin.entities.plugin_daemon import InstallPluginMessage +from core.plugin.manager.base import BasePluginManager + + +class PluginInstallationManager(BasePluginManager): + def fetch_plugin_by_identifier(self, tenant_id: str, identifier: str) -> bool: + # urlencode the identifier + + identifier = quote(identifier) + return self._request_with_plugin_daemon_response( + "GET", f"/plugin/{tenant_id}/fetch/identifier?plugin_unique_identifier={identifier}", bool + ) + + def install_from_pkg(self, tenant_id: str, pkg: bytes) -> Generator[InstallPluginMessage, None, None]: + """ + Install a plugin from a package. + """ + # using multipart/form-data to encode body + body = {"dify_pkg": ("dify_pkg", pkg, "application/octet-stream")} + + return self._request_with_plugin_daemon_response_stream( + "POST", f"/plugin/{tenant_id}/install/pkg", InstallPluginMessage, data=body + ) + + def install_from_identifier(self, tenant_id: str, identifier: str) -> bool: + """ + Install a plugin from an identifier. + """ + identifier = quote(identifier) + # exception will be raised if the request failed + self._request_with_plugin_daemon_response( + "POST", + f"/plugin/{tenant_id}/install/identifier", + dict, + headers={ + "Content-Type": "application/json", + }, + data={ + "plugin_unique_identifier": identifier, + }, + ) + + return True From 3c1d32e3ac687486785c307789b7a93f79aadb2f Mon Sep 17 00:00:00 2001 From: Yeuoly Date: Fri, 20 Sep 2024 21:50:44 +0800 Subject: [PATCH 038/325] feat: uninstall plugin --- api/core/plugin/manager/plugin.py | 12 +++++++++--- 1 file changed, 9 insertions(+), 3 deletions(-) diff --git a/api/core/plugin/manager/plugin.py b/api/core/plugin/manager/plugin.py index 85e865bb93b8f8..101827246ac773 100644 --- a/api/core/plugin/manager/plugin.py +++ b/api/core/plugin/manager/plugin.py @@ -31,10 +31,10 @@ def install_from_identifier(self, tenant_id: str, identifier: str) -> bool: """ identifier = quote(identifier) # exception will be raised if the request failed - self._request_with_plugin_daemon_response( + return self._request_with_plugin_daemon_response( "POST", f"/plugin/{tenant_id}/install/identifier", - dict, + bool, headers={ "Content-Type": "application/json", }, @@ -43,4 +43,10 @@ def install_from_identifier(self, tenant_id: str, identifier: str) -> bool: }, ) - return True + def uninstall(self, tenant_id: str, identifier: str) -> bool: + """ + Uninstall a plugin. + """ + return self._request_with_plugin_daemon_response( + "DELETE", f"/plugin/{tenant_id}/uninstall?plugin_unique_identifier={identifier}", bool + ) From 91cb80f795789773a419d8ecb0d7264034df9dc1 Mon Sep 17 00:00:00 2001 From: Yeuoly Date: Fri, 20 Sep 2024 23:48:48 +0800 Subject: [PATCH 039/325] refactor: tool --- api/core/agent/base_agent_runner.py | 104 ++++----- api/core/agent/cot_agent_runner.py | 49 ++-- api/core/agent/cot_chat_agent_runner.py | 10 +- api/core/agent/fc_agent_runner.py | 33 ++- api/core/app/apps/agent_chat/app_runner.py | 56 ----- api/core/model_manager.py | 38 +++- api/core/tools/__base/tool.py | 209 +++--------------- api/core/tools/__base/tool_provider.py | 21 +- api/core/tools/__base/tool_runtime.py | 36 +++ api/core/tools/builtin_tool/provider.py | 51 +++-- .../builtin_tool/providers/qrcode/qrcode.py | 7 +- .../tools/builtin_tool/providers/time/time.py | 10 +- api/core/tools/builtin_tool/tool.py | 5 +- api/core/tools/custom_tool/provider.py | 40 ++-- api/core/tools/custom_tool/tool.py | 17 +- api/core/tools/entities/tool_entities.py | 171 +++----------- api/core/tools/tool_engine.py | 20 +- api/core/tools/tool_manager.py | 68 +++--- api/core/tools/utils/configuration.py | 42 ++-- .../tools/utils/dataset_retriever_tool.py | 25 ++- api/core/tools/workflow_as_tool/provider.py | 32 +-- api/core/tools/workflow_as_tool/tool.py | 32 ++- .../workflow_tool_provider.py | 207 ----------------- api/models/model.py | 2 +- .../tools/api_tools_manage_service.py | 16 +- .../tools/builtin_tools_manage_service.py | 12 +- api/services/tools/tools_transform_service.py | 61 ++--- .../tools/workflow_tools_manage_service.py | 18 +- .../tools/api_tool/test_api_tool.py | 12 +- 29 files changed, 498 insertions(+), 906 deletions(-) create mode 100644 api/core/tools/__base/tool_runtime.py delete mode 100644 api/core/tools/workflow_as_tool/workflow_tool_provider.py diff --git a/api/core/agent/base_agent_runner.py b/api/core/agent/base_agent_runner.py index 55fd8825de6909..64075ed231f4af 100644 --- a/api/core/agent/base_agent_runner.py +++ b/api/core/agent/base_agent_runner.py @@ -2,7 +2,6 @@ import logging import uuid from collections.abc import Mapping, Sequence -from datetime import datetime, timezone from typing import Optional, Union, cast from core.agent.entities import AgentEntity, AgentToolEntity @@ -23,6 +22,7 @@ from core.model_runtime.entities.message_entities import ( AssistantPromptMessage, PromptMessage, + PromptMessageContent, PromptMessageTool, SystemPromptMessage, TextPromptMessageContent, @@ -31,18 +31,15 @@ ) from core.model_runtime.entities.model_entities import ModelFeature from core.model_runtime.model_providers.__base.large_language_model import LargeLanguageModel -from core.model_runtime.utils.encoders import jsonable_encoder from core.tools.__base.tool import Tool from core.tools.entities.tool_entities import ( ToolParameter, - ToolRuntimeVariablePool, ) from core.tools.tool_manager import ToolManager from core.tools.utils.dataset_retriever_tool import DatasetRetrieverTool from core.tools.utils.tool_parameter_converter import ToolParameterConverter from extensions.ext_database import db from models.model import Conversation, Message, MessageAgentThought -from models.tools import ToolConversationVariables logger = logging.getLogger(__name__) @@ -59,11 +56,9 @@ def __init__( queue_manager: AppQueueManager, message: Message, user_id: str, + model_instance: ModelInstance, memory: Optional[TokenBufferMemory] = None, prompt_messages: Optional[list[PromptMessage]] = None, - variables_pool: Optional[ToolRuntimeVariablePool] = None, - db_variables: Optional[ToolConversationVariables] = None, - model_instance: ModelInstance = None, ) -> None: """ Agent runner @@ -93,8 +88,6 @@ def __init__( self.user_id = user_id self.memory = memory self.history_prompt_messages = self.organize_agent_history(prompt_messages=prompt_messages or []) - self.variables_pool = variables_pool - self.db_variables_pool = db_variables self.model_instance = model_instance # init callback @@ -162,11 +155,10 @@ def _convert_tool_to_prompt_message_tool(self, tool: AgentToolEntity) -> tuple[P agent_tool=tool, invoke_from=self.application_generate_entity.invoke_from, ) - tool_entity.load_variables(self.variables_pool) - + assert tool_entity.entity.description message_tool = PromptMessageTool( name=tool.tool_name, - description=tool_entity.description.llm, + description=tool_entity.entity.description.llm, parameters={ "type": "object", "properties": {}, @@ -201,9 +193,11 @@ def _convert_dataset_retriever_tool_to_prompt_message_tool(self, tool: DatasetRe """ convert dataset retriever tool to prompt message tool """ + assert tool.entity.description + prompt_tool = PromptMessageTool( - name=tool.identity.name, - description=tool.description.llm, + name=tool.entity.identity.name, + description=tool.entity.description.llm, parameters={ "type": "object", "properties": {}, @@ -232,7 +226,7 @@ def _init_prompt_tools(self) -> tuple[Mapping[str, Tool], Sequence[PromptMessage tool_instances = {} prompt_messages_tools = [] - for tool in self.app_config.agent.tools if self.app_config.agent else []: + for tool in self.app_config.agent.tools or [] if self.app_config.agent else []: try: prompt_tool, tool_entity = self._convert_tool_to_prompt_message_tool(tool) except Exception: @@ -249,7 +243,7 @@ def _init_prompt_tools(self) -> tuple[Mapping[str, Tool], Sequence[PromptMessage # save prompt tool prompt_messages_tools.append(prompt_tool) # save tool entity - tool_instances[dataset_tool.identity.name] = dataset_tool + tool_instances[dataset_tool.entity.identity.name] = dataset_tool return tool_instances, prompt_messages_tools @@ -328,25 +322,29 @@ def create_agent_thought( def save_agent_thought( self, agent_thought: MessageAgentThought, - tool_name: str, - tool_input: Union[str, dict], - thought: str, - observation: Union[str, dict], - tool_invoke_meta: Union[str, dict], - answer: str, + tool_name: str | None, + tool_input: Union[str, dict, None], + thought: str | None, + observation: Union[str, dict, None], + tool_invoke_meta: Union[str, dict, None], + answer: str | None, messages_ids: list[str], - llm_usage: LLMUsage = None, - ) -> MessageAgentThought: + llm_usage: LLMUsage | None = None, + ): """ Save agent thought """ - agent_thought = db.session.query(MessageAgentThought).filter(MessageAgentThought.id == agent_thought.id).first() + updated_agent_thought = ( + db.session.query(MessageAgentThought).filter(MessageAgentThought.id == agent_thought.id).first() + ) + if not updated_agent_thought: + raise ValueError("agent thought not found") if thought is not None: - agent_thought.thought = thought + updated_agent_thought.thought = thought if tool_name is not None: - agent_thought.tool = tool_name + updated_agent_thought.tool = tool_name if tool_input is not None: if isinstance(tool_input, dict): @@ -355,7 +353,7 @@ def save_agent_thought( except Exception as e: tool_input = json.dumps(tool_input) - agent_thought.tool_input = tool_input + updated_agent_thought.tool_input = tool_input if observation is not None: if isinstance(observation, dict): @@ -364,27 +362,27 @@ def save_agent_thought( except Exception as e: observation = json.dumps(observation) - agent_thought.observation = observation + updated_agent_thought.observation = observation if answer is not None: - agent_thought.answer = answer + updated_agent_thought.answer = answer if messages_ids is not None and len(messages_ids) > 0: - agent_thought.message_files = json.dumps(messages_ids) + updated_agent_thought.message_files = json.dumps(messages_ids) if llm_usage: - agent_thought.message_token = llm_usage.prompt_tokens - agent_thought.message_price_unit = llm_usage.prompt_price_unit - agent_thought.message_unit_price = llm_usage.prompt_unit_price - agent_thought.answer_token = llm_usage.completion_tokens - agent_thought.answer_price_unit = llm_usage.completion_price_unit - agent_thought.answer_unit_price = llm_usage.completion_unit_price - agent_thought.tokens = llm_usage.total_tokens - agent_thought.total_price = llm_usage.total_price + updated_agent_thought.message_token = llm_usage.prompt_tokens + updated_agent_thought.message_price_unit = llm_usage.prompt_price_unit + updated_agent_thought.message_unit_price = llm_usage.prompt_unit_price + updated_agent_thought.answer_token = llm_usage.completion_tokens + updated_agent_thought.answer_price_unit = llm_usage.completion_price_unit + updated_agent_thought.answer_unit_price = llm_usage.completion_unit_price + updated_agent_thought.tokens = llm_usage.total_tokens + updated_agent_thought.total_price = llm_usage.total_price # check if tool labels is not empty - labels = agent_thought.tool_labels or {} - tools = agent_thought.tool.split(";") if agent_thought.tool else [] + labels = updated_agent_thought.tool_labels or {} + tools = updated_agent_thought.tool.split(";") if updated_agent_thought.tool else [] for tool in tools: if not tool: continue @@ -395,7 +393,7 @@ def save_agent_thought( else: labels[tool] = {"en_US": tool, "zh_Hans": tool} - agent_thought.tool_labels_str = json.dumps(labels) + updated_agent_thought.tool_labels_str = json.dumps(labels) if tool_invoke_meta is not None: if isinstance(tool_invoke_meta, dict): @@ -404,25 +402,8 @@ def save_agent_thought( except Exception as e: tool_invoke_meta = json.dumps(tool_invoke_meta) - agent_thought.tool_meta_str = tool_invoke_meta - - db.session.commit() - db.session.close() - - def update_db_variables(self, tool_variables: ToolRuntimeVariablePool, db_variables: ToolConversationVariables): - """ - convert tool variables to db variables - """ - db_variables = ( - db.session.query(ToolConversationVariables) - .filter( - ToolConversationVariables.conversation_id == self.message.conversation_id, - ) - .first() - ) + updated_agent_thought.tool_meta_str = tool_invoke_meta - db_variables.updated_at = datetime.now(timezone.utc).replace(tzinfo=None) - db_variables.variables_str = json.dumps(jsonable_encoder(tool_variables.pool)) db.session.commit() db.session.close() @@ -515,6 +496,7 @@ def organize_agent_user_prompt(self, message: Message) -> UserPromptMessage: files = message.message_files if files: + assert message.app_model_config file_extra_config = FileUploadConfigManager.convert(message.app_model_config.to_dict()) if file_extra_config: @@ -525,7 +507,7 @@ def organize_agent_user_prompt(self, message: Message) -> UserPromptMessage: if not file_objs: return UserPromptMessage(content=message.query) else: - prompt_message_contents = [TextPromptMessageContent(data=message.query)] + prompt_message_contents: list[PromptMessageContent] = [TextPromptMessageContent(data=message.query)] for file_obj in file_objs: prompt_message_contents.append(file_obj.prompt_message_content) diff --git a/api/core/agent/cot_agent_runner.py b/api/core/agent/cot_agent_runner.py index 0d74b1e5ebe933..1e62b4308d2c0c 100644 --- a/api/core/agent/cot_agent_runner.py +++ b/api/core/agent/cot_agent_runner.py @@ -1,6 +1,6 @@ import json from abc import ABC, abstractmethod -from collections.abc import Generator +from collections.abc import Generator, Mapping, Sequence from typing import Optional, Union from core.agent.base_agent_runner import BaseAgentRunner @@ -12,6 +12,7 @@ from core.model_runtime.entities.message_entities import ( AssistantPromptMessage, PromptMessage, + PromptMessageTool, ToolPromptMessage, UserPromptMessage, ) @@ -26,11 +27,11 @@ class CotAgentRunner(BaseAgentRunner, ABC): _is_first_iteration = True _ignore_observation_providers = ["wenxin"] - _historic_prompt_messages: list[PromptMessage] = None - _agent_scratchpad: list[AgentScratchpadUnit] = None - _instruction: str = None - _query: str = None - _prompt_messages_tools: list[PromptMessage] = None + _historic_prompt_messages: list[PromptMessage] + _agent_scratchpad: list[AgentScratchpadUnit] + _instruction: str + _query: str + _prompt_messages_tools: Sequence[PromptMessageTool] def run( self, @@ -41,6 +42,7 @@ def run( """ Run Cot agent application """ + app_generate_entity = self.application_generate_entity self._repack_app_generate_entity(app_generate_entity) self._init_react_state(query) @@ -53,9 +55,11 @@ def run( app_generate_entity.model_conf.stop.append("Observation") app_config = self.app_config + assert app_config.agent # init instruction inputs = inputs or {} + assert app_config.prompt_template.simple_prompt_template instruction = app_config.prompt_template.simple_prompt_template self._instruction = self._fill_in_inputs_from_external_data_tools(instruction, inputs) @@ -63,13 +67,14 @@ def run( max_iteration_steps = min(app_config.agent.max_iteration, 5) + 1 # convert tools into ModelRuntime Tool format - tool_instances, self._prompt_messages_tools = self._init_prompt_tools() + tool_instances, prompt_messages_tools = self._init_prompt_tools() + self._prompt_messages_tools = prompt_messages_tools function_call_state = True - llm_usage = {"usage": None} + llm_usage: dict[str, Optional[LLMUsage]] = {"usage": None} final_answer = "" - def increase_usage(final_llm_usage_dict: dict[str, LLMUsage], usage: LLMUsage): + def increase_usage(final_llm_usage_dict: dict[str, Optional[LLMUsage]], usage: LLMUsage): if not final_llm_usage_dict["usage"]: final_llm_usage_dict["usage"] = usage else: @@ -115,10 +120,6 @@ def increase_usage(final_llm_usage_dict: dict[str, LLMUsage], usage: LLMUsage): callbacks=[], ) - # check llm result - if not chunks: - raise ValueError("failed to invoke llm") - usage_dict = {} react_chunks = CotAgentOutputParser.handle_react_stream_output(chunks, usage_dict) scratchpad = AgentScratchpadUnit( @@ -139,11 +140,14 @@ def increase_usage(final_llm_usage_dict: dict[str, LLMUsage], usage: LLMUsage): if isinstance(chunk, AgentScratchpadUnit.Action): action = chunk # detect action + assert scratchpad.agent_response is not None scratchpad.agent_response += json.dumps(chunk.model_dump()) scratchpad.action_str = json.dumps(chunk.model_dump()) scratchpad.action = action else: + assert scratchpad.agent_response is not None scratchpad.agent_response += chunk + assert scratchpad.thought is not None scratchpad.thought += chunk yield LLMResultChunk( model=self.model_config.model, @@ -152,6 +156,7 @@ def increase_usage(final_llm_usage_dict: dict[str, LLMUsage], usage: LLMUsage): delta=LLMResultChunkDelta(index=0, message=AssistantPromptMessage(content=chunk), usage=None), ) + assert scratchpad.thought is not None scratchpad.thought = scratchpad.thought.strip() or "I am thinking about how to help you" self._agent_scratchpad.append(scratchpad) @@ -168,7 +173,7 @@ def increase_usage(final_llm_usage_dict: dict[str, LLMUsage], usage: LLMUsage): tool_invoke_meta={}, thought=scratchpad.thought, observation="", - answer=scratchpad.agent_response, + answer=scratchpad.agent_response or "", messages_ids=[], llm_usage=usage_dict["usage"], ) @@ -248,7 +253,6 @@ def increase_usage(final_llm_usage_dict: dict[str, LLMUsage], usage: LLMUsage): messages_ids=[], ) - self.update_db_variables(self.variables_pool, self.db_variables_pool) # publish end event self.queue_manager.publish( QueueMessageEndEvent( @@ -266,7 +270,7 @@ def increase_usage(final_llm_usage_dict: dict[str, LLMUsage], usage: LLMUsage): def _handle_invoke_action( self, action: AgentScratchpadUnit.Action, - tool_instances: dict[str, Tool], + tool_instances: Mapping[str, Tool], message_file_ids: list[str], trace_manager: Optional[TraceQueueManager] = None, ) -> tuple[str, ToolInvokeMeta]: @@ -307,15 +311,12 @@ def _handle_invoke_action( # publish files for message_file_id, save_as in message_files: - if save_as: - self.variables_pool.set_file(tool_name=tool_call_name, value=message_file_id, name=save_as) - # publish message file self.queue_manager.publish( - QueueMessageFileEvent(message_file_id=message_file_id), PublishFrom.APPLICATION_MANAGER + QueueMessageFileEvent(message_file_id=message_file_id.id), PublishFrom.APPLICATION_MANAGER ) # add message file ids - message_file_ids.append(message_file_id) + message_file_ids.append(message_file_id.id) return tool_invoke_response, tool_invoke_meta @@ -369,18 +370,19 @@ def _format_assistant_message(self, agent_scratchpad: list[AgentScratchpadUnit]) return message def _organize_historic_prompt_messages( - self, current_session_messages: list[PromptMessage] = None + self, current_session_messages: list[PromptMessage] | None = None ) -> list[PromptMessage]: """ organize historic prompt messages """ result: list[PromptMessage] = [] scratchpads: list[AgentScratchpadUnit] = [] - current_scratchpad: AgentScratchpadUnit = None + current_scratchpad: AgentScratchpadUnit | None = None for message in self.history_prompt_messages: if isinstance(message, AssistantPromptMessage): if not current_scratchpad: + assert isinstance(message.content, str) current_scratchpad = AgentScratchpadUnit( agent_response=message.content, thought=message.content or "I am thinking about how to help you", @@ -400,6 +402,7 @@ def _organize_historic_prompt_messages( pass elif isinstance(message, ToolPromptMessage): if current_scratchpad: + assert isinstance(message.content, str) current_scratchpad.observation = message.content elif isinstance(message, UserPromptMessage): if scratchpads: diff --git a/api/core/agent/cot_chat_agent_runner.py b/api/core/agent/cot_chat_agent_runner.py index bdec6b7ed15d7b..095f8775aeed03 100644 --- a/api/core/agent/cot_chat_agent_runner.py +++ b/api/core/agent/cot_chat_agent_runner.py @@ -4,6 +4,7 @@ from core.model_runtime.entities.message_entities import ( AssistantPromptMessage, PromptMessage, + PromptMessageContent, SystemPromptMessage, TextPromptMessageContent, UserPromptMessage, @@ -16,6 +17,9 @@ def _organize_system_prompt(self) -> SystemPromptMessage: """ Organize system prompt """ + assert self.app_config.agent + assert self.app_config.agent.prompt + prompt_entity = self.app_config.agent.prompt first_prompt = prompt_entity.first_prompt @@ -27,12 +31,12 @@ def _organize_system_prompt(self) -> SystemPromptMessage: return SystemPromptMessage(content=system_prompt) - def _organize_user_query(self, query, prompt_messages: list[PromptMessage] = None) -> list[PromptMessage]: + def _organize_user_query(self, query, prompt_messages: list[PromptMessage]) -> list[PromptMessage]: """ Organize user query """ if self.files: - prompt_message_contents = [TextPromptMessageContent(data=query)] + prompt_message_contents: list[PromptMessageContent] = [TextPromptMessageContent(data=query)] for file_obj in self.files: prompt_message_contents.append(file_obj.prompt_message_content) @@ -57,8 +61,10 @@ def _organize_prompt_messages(self) -> list[PromptMessage]: assistant_message = AssistantPromptMessage(content="") for unit in agent_scratchpad: if unit.is_final(): + assert isinstance(assistant_message.content, str) assistant_message.content += f"Final Answer: {unit.agent_response}" else: + assert isinstance(assistant_message.content, str) assistant_message.content += f"Thought: {unit.thought}\n\n" if unit.action_str: assistant_message.content += f"Action: {unit.action_str}\n\n" diff --git a/api/core/agent/fc_agent_runner.py b/api/core/agent/fc_agent_runner.py index 13164e0bfca218..991c542846626a 100644 --- a/api/core/agent/fc_agent_runner.py +++ b/api/core/agent/fc_agent_runner.py @@ -2,7 +2,7 @@ import logging from collections.abc import Generator from copy import deepcopy -from typing import Any, Union +from typing import Any, Optional, Union from core.agent.base_agent_runner import BaseAgentRunner from core.app.apps.base_app_queue_manager import PublishFrom @@ -11,6 +11,7 @@ from core.model_runtime.entities.message_entities import ( AssistantPromptMessage, PromptMessage, + PromptMessageContent, PromptMessageContentType, SystemPromptMessage, TextPromptMessageContent, @@ -38,18 +39,20 @@ def run(self, message: Message, query: str, **kwargs: Any) -> Generator[LLMResul # convert tools into ModelRuntime Tool format tool_instances, prompt_messages_tools = self._init_prompt_tools() + assert app_config.agent + iteration_step = 1 max_iteration_steps = min(app_config.agent.max_iteration, 5) + 1 # continue to run until there is not any tool call function_call_state = True - llm_usage = {"usage": None} + llm_usage: dict[str, Optional[LLMUsage]] = {"usage": None} final_answer = "" # get tracing instance trace_manager = app_generate_entity.trace_manager - def increase_usage(final_llm_usage_dict: dict[str, LLMUsage], usage: LLMUsage): + def increase_usage(final_llm_usage_dict: dict[str, Optional[LLMUsage]], usage: LLMUsage): if not final_llm_usage_dict["usage"]: final_llm_usage_dict["usage"] = usage else: @@ -99,7 +102,7 @@ def increase_usage(final_llm_usage_dict: dict[str, LLMUsage], usage: LLMUsage): current_llm_usage = None - if self.stream_tool_call: + if isinstance(chunks, Generator): is_first_chunk = True for chunk in chunks: if is_first_chunk: @@ -133,7 +136,7 @@ def increase_usage(final_llm_usage_dict: dict[str, LLMUsage], usage: LLMUsage): yield chunk else: - result: LLMResult = chunks + result = chunks # check if there is any tool call if self.check_blocking_tool_calls(result): function_call_state = True @@ -236,15 +239,12 @@ def increase_usage(final_llm_usage_dict: dict[str, LLMUsage], usage: LLMUsage): ) # publish files for message_file_id, save_as in message_files: - if save_as: - self.variables_pool.set_file(tool_name=tool_call_name, value=message_file_id, name=save_as) - # publish message file self.queue_manager.publish( - QueueMessageFileEvent(message_file_id=message_file_id), PublishFrom.APPLICATION_MANAGER + QueueMessageFileEvent(message_file_id=message_file_id.id), PublishFrom.APPLICATION_MANAGER ) # add message file ids - message_file_ids.append(message_file_id) + message_file_ids.append(message_file_id.id) tool_response = { "tool_call_id": tool_call_id, @@ -290,7 +290,6 @@ def increase_usage(final_llm_usage_dict: dict[str, LLMUsage], usage: LLMUsage): iteration_step += 1 - self.update_db_variables(self.variables_pool, self.db_variables_pool) # publish end event self.queue_manager.publish( QueueMessageEndEvent( @@ -321,9 +320,7 @@ def check_blocking_tool_calls(self, llm_result: LLMResult) -> bool: return True return False - def extract_tool_calls( - self, llm_result_chunk: LLMResultChunk - ) -> Union[None, list[tuple[str, str, dict[str, Any]]]]: + def extract_tool_calls(self, llm_result_chunk: LLMResultChunk) -> list[tuple[str, str, dict[str, Any]]]: """ Extract tool calls from llm result chunk @@ -346,7 +343,7 @@ def extract_tool_calls( return tool_calls - def extract_blocking_tool_calls(self, llm_result: LLMResult) -> Union[None, list[tuple[str, str, dict[str, Any]]]]: + def extract_blocking_tool_calls(self, llm_result: LLMResult) -> list[tuple[str, str, dict[str, Any]]]: """ Extract blocking tool calls from llm result @@ -370,7 +367,7 @@ def extract_blocking_tool_calls(self, llm_result: LLMResult) -> Union[None, list return tool_calls def _init_system_message( - self, prompt_template: str, prompt_messages: list[PromptMessage] = None + self, prompt_template: str, prompt_messages: list[PromptMessage] ) -> list[PromptMessage]: """ Initialize system message @@ -385,12 +382,12 @@ def _init_system_message( return prompt_messages - def _organize_user_query(self, query, prompt_messages: list[PromptMessage] = None) -> list[PromptMessage]: + def _organize_user_query(self, query, prompt_messages: list[PromptMessage]) -> list[PromptMessage]: """ Organize user query """ if self.files: - prompt_message_contents = [TextPromptMessageContent(data=query)] + prompt_message_contents: list[PromptMessageContent] = [TextPromptMessageContent(data=query)] for file_obj in self.files: prompt_message_contents.append(file_obj.prompt_message_content) diff --git a/api/core/app/apps/agent_chat/app_runner.py b/api/core/app/apps/agent_chat/app_runner.py index 45b1bf00934d35..3a9262f54e7dcc 100644 --- a/api/core/app/apps/agent_chat/app_runner.py +++ b/api/core/app/apps/agent_chat/app_runner.py @@ -16,10 +16,8 @@ from core.model_runtime.entities.model_entities import ModelFeature, ModelPropertyKey from core.model_runtime.model_providers.__base.large_language_model import LargeLanguageModel from core.moderation.base import ModerationError -from core.tools.entities.tool_entities import ToolRuntimeVariablePool from extensions.ext_database import db from models.model import App, Conversation, Message, MessageAgentThought -from models.tools import ToolConversationVariables logger = logging.getLogger(__name__) @@ -174,14 +172,6 @@ def run( agent_entity = app_config.agent - # load tool variables - tool_conversation_variables = self._load_tool_variables( - conversation_id=conversation.id, user_id=application_generate_entity.user_id, tenant_id=app_config.tenant_id - ) - - # convert db variables to tool variables - tool_variables = self._convert_db_variables_to_tool_variables(tool_conversation_variables) - # init model instance model_instance = ModelInstance( provider_model_bundle=application_generate_entity.model_conf.provider_model_bundle, @@ -234,8 +224,6 @@ def run( user_id=application_generate_entity.user_id, memory=memory, prompt_messages=prompt_message, - variables_pool=tool_variables, - db_variables=tool_conversation_variables, model_instance=model_instance, ) @@ -253,50 +241,6 @@ def run( agent=True, ) - def _load_tool_variables(self, conversation_id: str, user_id: str, tenant_id: str) -> ToolConversationVariables: - """ - load tool variables from database - """ - tool_variables: ToolConversationVariables = ( - db.session.query(ToolConversationVariables) - .filter( - ToolConversationVariables.conversation_id == conversation_id, - ToolConversationVariables.tenant_id == tenant_id, - ) - .first() - ) - - if tool_variables: - # save tool variables to session, so that we can update it later - db.session.add(tool_variables) - else: - # create new tool variables - tool_variables = ToolConversationVariables( - conversation_id=conversation_id, - user_id=user_id, - tenant_id=tenant_id, - variables_str="[]", - ) - db.session.add(tool_variables) - db.session.commit() - - return tool_variables - - def _convert_db_variables_to_tool_variables( - self, db_variables: ToolConversationVariables - ) -> ToolRuntimeVariablePool: - """ - convert db variables to tool variables - """ - return ToolRuntimeVariablePool( - **{ - "conversation_id": db_variables.conversation_id, - "user_id": db_variables.user_id, - "tenant_id": db_variables.tenant_id, - "pool": db_variables.variables, - } - ) - def _get_usage_of_all_agent_thoughts( self, model_config: ModelConfigWithCredentialsEntity, message: Message ) -> LLMUsage: diff --git a/api/core/model_manager.py b/api/core/model_manager.py index 990efd36c609c2..28f01e1a19c050 100644 --- a/api/core/model_manager.py +++ b/api/core/model_manager.py @@ -1,7 +1,7 @@ import logging import os from collections.abc import Callable, Generator, Sequence -from typing import IO, Optional, Union, cast +from typing import IO, Literal, Optional, Union, cast, overload from core.entities.provider_configuration import ProviderConfiguration, ProviderModelBundle from core.entities.provider_entities import ModelLoadBalancingConfiguration @@ -97,6 +97,42 @@ def _get_load_balancing_manager( return None + @overload + def invoke_llm( + self, + prompt_messages: list[PromptMessage], + model_parameters: Optional[dict] = None, + tools: Sequence[PromptMessageTool] | None = None, + stop: Optional[list[str]] = None, + stream: Literal[True] = True, + user: Optional[str] = None, + callbacks: Optional[list[Callback]] = None, + ) -> Generator: ... + + @overload + def invoke_llm( + self, + prompt_messages: list[PromptMessage], + model_parameters: Optional[dict] = None, + tools: Sequence[PromptMessageTool] | None = None, + stop: Optional[list[str]] = None, + stream: Literal[False] = False, + user: Optional[str] = None, + callbacks: Optional[list[Callback]] = None, + ) -> LLMResult: ... + + @overload + def invoke_llm( + self, + prompt_messages: list[PromptMessage], + model_parameters: Optional[dict] = None, + tools: Sequence[PromptMessageTool] | None = None, + stop: Optional[list[str]] = None, + stream: bool = True, + user: Optional[str] = None, + callbacks: Optional[list[Callback]] = None, + ) -> Union[LLMResult, Generator]: ... + def invoke_llm( self, prompt_messages: list[PromptMessage], diff --git a/api/core/tools/__base/tool.py b/api/core/tools/__base/tool.py index 49f9bf68eaf9a3..548db51a2aedfb 100644 --- a/api/core/tools/__base/tool.py +++ b/api/core/tools/__base/tool.py @@ -1,72 +1,34 @@ from abc import ABC, abstractmethod from collections.abc import Generator from copy import deepcopy -from enum import Enum -from typing import TYPE_CHECKING, Any, Optional, Union +from typing import TYPE_CHECKING, Any, Optional -from pydantic import BaseModel, ConfigDict, Field, field_validator -from pydantic_core.core_schema import ValidationInfo - -from core.app.entities.app_invoke_entities import InvokeFrom +from core.tools.__base.tool_runtime import ToolRuntime from core.tools.entities.tool_entities import ( - ToolDescription, - ToolIdentity, - ToolInvokeFrom, + ToolEntity, ToolInvokeMessage, ToolParameter, ToolProviderType, - ToolRuntimeImageVariable, - ToolRuntimeVariable, - ToolRuntimeVariablePool, ) -from core.tools.tool_file_manager import ToolFileManager from core.tools.utils.tool_parameter_converter import ToolParameterConverter if TYPE_CHECKING: from core.file.file_obj import FileVar -class Tool(BaseModel, ABC): - identity: ToolIdentity - parameters: list[ToolParameter] = Field(default_factory=list) - description: Optional[ToolDescription] = None - is_team_authorization: bool = False +class Tool(ABC): + """ + The base class of a tool + """ - # pydantic configs - model_config = ConfigDict(protected_namespaces=()) + entity: ToolEntity + runtime: ToolRuntime - @field_validator("parameters", mode="before") - @classmethod - def set_parameters(cls, v, validation_info: ValidationInfo) -> list[ToolParameter]: - return v or [] + def __init__(self, entity: ToolEntity, runtime: ToolRuntime) -> None: + self.entity = entity + self.runtime = runtime - class Runtime(BaseModel): - """ - Meta data of a tool call processing - """ - - def __init__(self, **data: Any): - super().__init__(**data) - if not self.runtime_parameters: - self.runtime_parameters = {} - - tenant_id: Optional[str] = None - tool_id: Optional[str] = None - invoke_from: Optional[InvokeFrom] = None - tool_invoke_from: Optional[ToolInvokeFrom] = None - credentials: Optional[dict[str, Any]] = None - runtime_parameters: dict[str, Any] = Field(default_factory=dict) - - runtime: Optional[Runtime] = None - variables: Optional[ToolRuntimeVariablePool] = None - - def __init__(self, **data: Any): - super().__init__(**data) - - class VariableKey(Enum): - IMAGE = "image" - - def fork_tool_runtime(self, runtime: dict[str, Any]) -> "Tool": + def fork_tool_runtime(self, runtime: ToolRuntime) -> "Tool": """ fork a new tool with meta data @@ -74,10 +36,8 @@ def fork_tool_runtime(self, runtime: dict[str, Any]) -> "Tool": :return: the new tool """ return self.__class__( - identity=self.identity.model_copy() if self.identity else None, - parameters=self.parameters.copy() if self.parameters else None, - description=self.description.model_copy() if self.description else None, - runtime=Tool.Runtime(**runtime), + entity=self.entity.model_copy(), + runtime=runtime, ) @abstractmethod @@ -88,112 +48,6 @@ def tool_provider_type(self) -> ToolProviderType: :return: the tool provider type """ - def load_variables(self, variables: ToolRuntimeVariablePool): - """ - load variables from database - - :param conversation_id: the conversation id - """ - self.variables = variables - - def set_image_variable(self, variable_name: str, image_key: str) -> None: - """ - set an image variable - """ - if not self.variables: - return - - self.variables.set_file(self.identity.name, variable_name, image_key) - - def set_text_variable(self, variable_name: str, text: str) -> None: - """ - set a text variable - """ - if not self.variables: - return - - self.variables.set_text(self.identity.name, variable_name, text) - - def get_variable(self, name: Union[str, Enum]) -> Optional[ToolRuntimeVariable]: - """ - get a variable - - :param name: the name of the variable - :return: the variable - """ - if not self.variables: - return None - - if isinstance(name, Enum): - name = name.value - - for variable in self.variables.pool: - if variable.name == name: - return variable - - return None - - def get_default_image_variable(self) -> Optional[ToolRuntimeVariable]: - """ - get the default image variable - - :return: the image variable - """ - if not self.variables: - return None - - return self.get_variable(self.VariableKey.IMAGE) - - def get_variable_file(self, name: Union[str, Enum]) -> Optional[bytes]: - """ - get a variable file - - :param name: the name of the variable - :return: the variable file - """ - variable = self.get_variable(name) - if not variable: - return None - - if not isinstance(variable, ToolRuntimeImageVariable): - return None - - message_file_id = variable.value - # get file binary - file_binary = ToolFileManager.get_file_binary_by_message_file_id(message_file_id) - if not file_binary: - return None - - return file_binary[0] - - def list_variables(self) -> list[ToolRuntimeVariable]: - """ - list all variables - - :return: the variables - """ - if not self.variables: - return [] - - return self.variables.pool - - def list_default_image_variables(self) -> list[ToolRuntimeVariable]: - """ - list all image variables - - :return: the image variables - """ - if not self.variables: - return [] - - result = [] - - for variable in self.variables.pool: - if variable.name.startswith(self.VariableKey.IMAGE.value): - result.append(variable) - - return result - def invoke(self, user_id: str, tool_parameters: dict[str, Any]) -> Generator[ToolInvokeMessage]: if self.runtime and self.runtime.runtime_parameters: tool_parameters.update(self.runtime.runtime_parameters) @@ -227,7 +81,7 @@ def _transform_tool_parameters_type(self, tool_parameters: dict[str, Any]) -> di """ # Temp fix for the issue that the tool parameters will be converted to empty while validating the credentials result = deepcopy(tool_parameters) - for parameter in self.parameters or []: + for parameter in self.entity.parameters: if parameter.name in tool_parameters: result[parameter.name] = ToolParameterConverter.cast_parameter_by_type( tool_parameters[parameter.name], parameter.type @@ -241,15 +95,6 @@ def _invoke( ) -> ToolInvokeMessage | list[ToolInvokeMessage] | Generator[ToolInvokeMessage, None, None]: pass - def validate_credentials(self, credentials: dict[str, Any], parameters: dict[str, Any]) -> None: - """ - validate the credentials - - :param credentials: the credentials - :param parameters: the parameters - """ - pass - def get_runtime_parameters(self) -> list[ToolParameter]: """ get the runtime parameters @@ -258,7 +103,7 @@ def get_runtime_parameters(self) -> list[ToolParameter]: :return: the runtime parameters """ - return self.parameters or [] + return self.entity.parameters def get_all_runtime_parameters(self) -> list[ToolParameter]: """ @@ -266,7 +111,7 @@ def get_all_runtime_parameters(self) -> list[ToolParameter]: :return: all runtime parameters """ - parameters = self.parameters or [] + parameters = self.entity.parameters parameters = parameters.copy() user_parameters = self.get_runtime_parameters() or [] user_parameters = user_parameters.copy() @@ -274,20 +119,16 @@ def get_all_runtime_parameters(self) -> list[ToolParameter]: # override parameters for parameter in user_parameters: # check if parameter in tool parameters - found = False for tool_parameter in parameters: if tool_parameter.name == parameter.name: - found = True + # override parameter + tool_parameter.type = parameter.type + tool_parameter.form = parameter.form + tool_parameter.required = parameter.required + tool_parameter.default = parameter.default + tool_parameter.options = parameter.options + tool_parameter.llm_description = parameter.llm_description break - - if found: - # override parameter - tool_parameter.type = parameter.type - tool_parameter.form = parameter.form - tool_parameter.required = parameter.required - tool_parameter.default = parameter.default - tool_parameter.options = parameter.options - tool_parameter.llm_description = parameter.llm_description else: # add new parameter parameters.append(parameter) diff --git a/api/core/tools/__base/tool_provider.py b/api/core/tools/__base/tool_provider.py index 7960ed5f84d5dc..c71885e48de191 100644 --- a/api/core/tools/__base/tool_provider.py +++ b/api/core/tools/__base/tool_provider.py @@ -1,23 +1,22 @@ from abc import ABC, abstractmethod from typing import Any -from pydantic import BaseModel, ConfigDict, Field - from core.entities.provider_entities import ProviderConfig from core.tools.__base.tool import Tool from core.tools.entities.tool_entities import ( - ToolProviderIdentity, + ToolProviderEntity, ToolProviderType, ) from core.tools.errors import ToolProviderCredentialValidationError -class ToolProviderController(BaseModel, ABC): - identity: ToolProviderIdentity - tools: list[Tool] = Field(default_factory=list) - credentials_schema: dict[str, ProviderConfig] = Field(default_factory=dict) +class ToolProviderController(ABC): + entity: ToolProviderEntity + tools: list[Tool] - model_config = ConfigDict(validate_assignment=True) + def __init__(self, entity: ToolProviderEntity) -> None: + self.entity = entity + self.tools = [] def get_credentials_schema(self) -> dict[str, ProviderConfig]: """ @@ -25,7 +24,7 @@ def get_credentials_schema(self) -> dict[str, ProviderConfig]: :return: the credentials schema """ - return self.credentials_schema.copy() + return self.entity.credentials_schema.copy() @abstractmethod def get_tool(self, tool_name: str) -> Tool: @@ -51,7 +50,7 @@ def validate_credentials_format(self, credentials: dict[str, Any]) -> None: :param credentials: the credentials of the tool """ - credentials_schema = self.credentials_schema + credentials_schema = self.entity.credentials_schema if credentials_schema is None: return @@ -62,7 +61,7 @@ def validate_credentials_format(self, credentials: dict[str, Any]) -> None: for credential_name in credentials: if credential_name not in credentials_need_to_validate: raise ToolProviderCredentialValidationError( - f"credential {credential_name} not found in provider {self.identity.name}" + f"credential {credential_name} not found in provider {self.entity.identity.name}" ) # check type diff --git a/api/core/tools/__base/tool_runtime.py b/api/core/tools/__base/tool_runtime.py new file mode 100644 index 00000000000000..d4b2ef61049d92 --- /dev/null +++ b/api/core/tools/__base/tool_runtime.py @@ -0,0 +1,36 @@ +from typing import Any, Optional + +from openai import BaseModel +from pydantic import Field + +from core.app.entities.app_invoke_entities import InvokeFrom +from core.tools.entities.tool_entities import ToolInvokeFrom + + +class ToolRuntime(BaseModel): + """ + Meta data of a tool call processing + """ + + tenant_id: str + tool_id: Optional[str] = None + invoke_from: Optional[InvokeFrom] = None + tool_invoke_from: Optional[ToolInvokeFrom] = None + credentials: Optional[dict[str, Any]] = None + runtime_parameters: dict[str, Any] = Field(default_factory=dict) + + +class FakeToolRuntime(ToolRuntime): + """ + Fake tool runtime for testing + """ + + def __init__(self): + super().__init__( + tenant_id="fake_tenant_id", + tool_id="fake_tool_id", + invoke_from=InvokeFrom.DEBUGGER, + tool_invoke_from=ToolInvokeFrom.AGENT, + credentials={}, + runtime_parameters={}, + ) diff --git a/api/core/tools/builtin_tool/provider.py b/api/core/tools/builtin_tool/provider.py index 7d1775b7f561fa..4ebd82f8e778a8 100644 --- a/api/core/tools/builtin_tool/provider.py +++ b/api/core/tools/builtin_tool/provider.py @@ -2,13 +2,12 @@ from os import listdir, path from typing import Any -from pydantic import Field - from core.entities.provider_entities import ProviderConfig from core.helper.module_import_helper import load_single_subclass_from_source from core.tools.__base.tool_provider import ToolProviderController +from core.tools.__base.tool_runtime import ToolRuntime from core.tools.builtin_tool.tool import BuiltinTool -from core.tools.entities.tool_entities import ToolProviderType +from core.tools.entities.tool_entities import ToolEntity, ToolProviderEntity, ToolProviderType from core.tools.entities.values import ToolLabelEnum, default_tool_label_dict from core.tools.errors import ( ToolProviderNotFoundError, @@ -17,10 +16,10 @@ class BuiltinToolProviderController(ToolProviderController): - tools: list[BuiltinTool] = Field(default_factory=list) + tools: list[BuiltinTool] def __init__(self, **data: Any) -> None: - if self.provider_type in {ToolProviderType.API, ToolProviderType.APP}: + if self.provider_type == ToolProviderType.API: super().__init__(**data) return @@ -37,10 +36,12 @@ def __init__(self, **data: Any) -> None: for credential_name in provider_yaml["credentials_for_provider"]: provider_yaml["credentials_for_provider"][credential_name]["name"] = credential_name - super().__init__(**{ - 'identity': provider_yaml['identity'], - 'credentials_schema': provider_yaml.get('credentials_for_provider', {}) or {}, - }) + super().__init__( + entity=ToolProviderEntity( + identity=provider_yaml["identity"], + credentials_schema=provider_yaml.get("credentials_for_provider", {}) or {}, + ), + ) def _get_builtin_tools(self) -> list[BuiltinTool]: """ @@ -51,7 +52,7 @@ def _get_builtin_tools(self) -> list[BuiltinTool]: if self.tools: return self.tools - provider = self.identity.name + provider = self.entity.identity.name tool_path = path.join(path.dirname(path.realpath(__file__)), "providers", provider, "tools") # get all the yaml files in the tool path tool_files = list(filter(lambda x: x.endswith(".yaml") and not x.startswith("__"), listdir(tool_path))) @@ -62,30 +63,36 @@ def _get_builtin_tools(self) -> list[BuiltinTool]: tool = load_yaml_file(path.join(tool_path, tool_file), ignore_error=False) # get tool class, import the module - assistant_tool_class = load_single_subclass_from_source( + assistant_tool_class: type[BuiltinTool] = load_single_subclass_from_source( module_name=f"core.tools.builtin_tool.providers.{provider}.tools.{tool_name}", script_path=path.join( - path.dirname(path.realpath(__file__)), - "builtin_tool", "providers", provider, "tools", f"{tool_name}.py" + path.dirname(path.realpath(__file__)), + "builtin_tool", + "providers", + provider, + "tools", + f"{tool_name}.py", ), parent_type=BuiltinTool, ) tool["identity"]["provider"] = provider - tools.append(assistant_tool_class(**tool)) + tools.append(assistant_tool_class( + entity=ToolEntity(**tool), runtime=ToolRuntime(tenant_id=""), + )) self.tools = tools return tools - + def get_credentials_schema(self) -> dict[str, ProviderConfig]: """ returns the credentials schema of the provider :return: the credentials schema """ - if not self.credentials_schema: + if not self.entity.credentials_schema: return {} - return self.credentials_schema.copy() + return self.entity.credentials_schema.copy() def get_tools(self) -> list[BuiltinTool]: """ @@ -94,12 +101,12 @@ def get_tools(self) -> list[BuiltinTool]: :return: list of tools """ return self._get_builtin_tools() - + def get_tool(self, tool_name: str) -> BuiltinTool | None: """ returns the tool that the provider can provide """ - return next(filter(lambda x: x.identity.name == tool_name, self.get_tools()), None) + return next(filter(lambda x: x.entity.identity.name == tool_name, self.get_tools()), None) @property def need_credentials(self) -> bool: @@ -108,7 +115,7 @@ def need_credentials(self) -> bool: :return: whether the provider needs credentials """ - return self.credentials_schema is not None and len(self.credentials_schema) != 0 + return self.entity.credentials_schema is not None and len(self.entity.credentials_schema) != 0 @property def provider_type(self) -> ToolProviderType: @@ -133,8 +140,8 @@ def _get_tool_labels(self) -> list[ToolLabelEnum]: """ returns the labels of the provider """ - return self.identity.tags or [] - + return self.entity.identity.tags or [] + def validate_credentials(self, credentials: dict[str, Any]) -> None: """ validate the credentials of the provider diff --git a/api/core/tools/builtin_tool/providers/qrcode/qrcode.py b/api/core/tools/builtin_tool/providers/qrcode/qrcode.py index 542ee7b63e9e09..e792382ee3f451 100644 --- a/api/core/tools/builtin_tool/providers/qrcode/qrcode.py +++ b/api/core/tools/builtin_tool/providers/qrcode/qrcode.py @@ -1,13 +1,8 @@ from typing import Any from core.tools.builtin_tool.provider import BuiltinToolProviderController -from core.tools.builtin_tool.providers.qrcode.tools.qrcode_generator import QRCodeGeneratorTool -from core.tools.errors import ToolProviderCredentialValidationError class QRCodeProvider(BuiltinToolProviderController): def _validate_credentials(self, credentials: dict[str, Any]) -> None: - try: - QRCodeGeneratorTool().invoke(user_id="", tool_parameters={"content": "Dify 123 😊"}) - except Exception as e: - raise ToolProviderCredentialValidationError(str(e)) + pass diff --git a/api/core/tools/builtin_tool/providers/time/time.py b/api/core/tools/builtin_tool/providers/time/time.py index 234ca9d9d6389b..d70fc22dfcae51 100644 --- a/api/core/tools/builtin_tool/providers/time/time.py +++ b/api/core/tools/builtin_tool/providers/time/time.py @@ -1,16 +1,8 @@ from typing import Any from core.tools.builtin_tool.provider import BuiltinToolProviderController -from core.tools.builtin_tool.providers.time.tools.current_time import CurrentTimeTool -from core.tools.errors import ToolProviderCredentialValidationError class WikiPediaProvider(BuiltinToolProviderController): def _validate_credentials(self, credentials: dict[str, Any]) -> None: - try: - CurrentTimeTool().invoke( - user_id="", - tool_parameters={}, - ) - except Exception as e: - raise ToolProviderCredentialValidationError(str(e)) + pass diff --git a/api/core/tools/builtin_tool/tool.py b/api/core/tools/builtin_tool/tool.py index 243d99dee3f36c..fe77f9ac77bc74 100644 --- a/api/core/tools/builtin_tool/tool.py +++ b/api/core/tools/builtin_tool/tool.py @@ -32,9 +32,9 @@ def invoke_model(self, user_id: str, prompt_messages: list[PromptMessage], stop: # invoke model return ModelInvocationUtils.invoke( user_id=user_id, - tenant_id=self.runtime.tenant_id or "", + tenant_id=self.runtime.tenant_id, tool_type="builtin", - tool_name=self.identity.name, + tool_name=self.entity.identity.name, prompt_messages=prompt_messages, ) @@ -79,6 +79,7 @@ def summarize(content: str) -> str: stop=[], ) + assert isinstance(summary.message.content, str) return summary.message.content lines = content.split("\n") diff --git a/api/core/tools/custom_tool/provider.py b/api/core/tools/custom_tool/provider.py index 7ebaa6c5c6fc29..32eda1d9bcda8a 100644 --- a/api/core/tools/custom_tool/provider.py +++ b/api/core/tools/custom_tool/provider.py @@ -7,6 +7,8 @@ from core.tools.entities.tool_bundle import ApiToolBundle from core.tools.entities.tool_entities import ( ApiProviderAuthType, + ToolProviderEntity, + ToolProviderIdentity, ToolProviderType, ) from extensions.ext_database import db @@ -18,6 +20,11 @@ class ApiToolProviderController(ToolProviderController): tenant_id: str tools: list[ApiTool] = Field(default_factory=list) + def __init__(self, entity: ToolProviderEntity, provider_id: str, tenant_id: str) -> None: + super().__init__(entity) + self.provider_id = provider_id + self.tenant_id = tenant_id + @staticmethod def from_db(db_provider: ApiToolProvider, auth_type: ApiProviderAuthType) -> "ApiToolProviderController": credentials_schema = { @@ -64,25 +71,23 @@ def from_db(db_provider: ApiToolProvider, auth_type: ApiProviderAuthType) -> "Ap } elif auth_type == ApiProviderAuthType.NONE: pass - else: - raise ValueError(f"invalid auth type {auth_type}") user = db_provider.user user_name = user.name if user else "" return ApiToolProviderController( - **{ - "identity": { - "author": user_name, - "name": db_provider.name, - "label": {"en_US": db_provider.name, "zh_Hans": db_provider.name}, - "description": {"en_US": db_provider.description, "zh_Hans": db_provider.description}, - "icon": db_provider.icon, - }, - "credentials_schema": credentials_schema, - "provider_id": db_provider.id or "", - "tenant_id": db_provider.tenant_id or "", - }, + entity=ToolProviderEntity( + identity=ToolProviderIdentity( + author=user_name, + name=db_provider.name, + label=I18nObject(en_US=db_provider.name, zh_Hans=db_provider.name), + description=I18nObject(en_US=db_provider.description, zh_Hans=db_provider.description), + icon=db_provider.icon, + ), + credentials_schema=credentials_schema, + ), + provider_id=db_provider.id or "", + tenant_id=db_provider.tenant_id or "", ) @property @@ -103,7 +108,7 @@ def _parse_tool_bundle(self, tool_bundle: ApiToolBundle) -> ApiTool: "author": tool_bundle.author, "name": tool_bundle.operation_id, "label": {"en_US": tool_bundle.operation_id, "zh_Hans": tool_bundle.operation_id}, - "icon": self.identity.icon, + "icon": self.entity.identity.icon, "provider": self.provider_id, }, "description": { @@ -141,7 +146,7 @@ def get_tools(self, tenant_id: str) -> list[ApiTool]: # get tenant api providers db_providers: list[ApiToolProvider] = ( db.session.query(ApiToolProvider) - .filter(ApiToolProvider.tenant_id == tenant_id, ApiToolProvider.name == self.identity.name) + .filter(ApiToolProvider.tenant_id == tenant_id, ApiToolProvider.name == self.entity.identity.name) .all() ) @@ -149,7 +154,6 @@ def get_tools(self, tenant_id: str) -> list[ApiTool]: for db_provider in db_providers: for tool in db_provider.tools: assistant_tool = self._parse_tool_bundle(tool) - assistant_tool.is_team_authorization = True tools.append(assistant_tool) self.tools = tools @@ -166,7 +170,7 @@ def get_tool(self, tool_name: str) -> ApiTool: self.get_tools(self.tenant_id) for tool in self.tools: - if tool.identity.name == tool_name: + if tool.entity.identity.name == tool_name: return tool raise ValueError(f"tool {tool_name} not found") diff --git a/api/core/tools/custom_tool/tool.py b/api/core/tools/custom_tool/tool.py index 9a728bb684b664..e36c97a2defcec 100644 --- a/api/core/tools/custom_tool/tool.py +++ b/api/core/tools/custom_tool/tool.py @@ -8,8 +8,9 @@ from core.helper import ssrf_proxy from core.tools.__base.tool import Tool +from core.tools.__base.tool_runtime import ToolRuntime from core.tools.entities.tool_bundle import ApiToolBundle -from core.tools.entities.tool_entities import ToolInvokeMessage, ToolProviderType +from core.tools.entities.tool_entities import ToolEntity, ToolInvokeMessage, ToolProviderType from core.tools.errors import ToolInvokeError, ToolParameterValidationError, ToolProviderCredentialValidationError API_TOOL_DEFAULT_TIMEOUT = ( @@ -25,7 +26,11 @@ class ApiTool(Tool): Api tool """ - def fork_tool_runtime(self, runtime: dict[str, Any]) -> "Tool": + def __init__(self, entity: ToolEntity, api_bundle: ApiToolBundle, runtime: ToolRuntime): + super().__init__(entity, runtime) + self.api_bundle = api_bundle + + def fork_tool_runtime(self, runtime: ToolRuntime): """ fork a new tool with meta data @@ -33,11 +38,9 @@ def fork_tool_runtime(self, runtime: dict[str, Any]) -> "Tool": :return: the new tool """ return self.__class__( - identity=self.identity.model_copy(), - parameters=self.parameters.copy() if self.parameters else [], - description=self.description.model_copy() if self.description else None, + entity=self.entity, api_bundle=self.api_bundle.model_copy(), - runtime=Tool.Runtime(**runtime), + runtime=runtime, ) def validate_credentials( @@ -62,7 +65,7 @@ def tool_provider_type(self) -> ToolProviderType: def assembling_request(self, parameters: dict[str, Any]) -> dict[str, Any]: if self.runtime == None: raise ToolProviderCredentialValidationError("runtime not initialized") - + headers = {} credentials = self.runtime.credentials or {} diff --git a/api/core/tools/entities/tool_entities.py b/api/core/tools/entities/tool_entities.py index cc84f6eaada186..80334a274ec441 100644 --- a/api/core/tools/entities/tool_entities.py +++ b/api/core/tools/entities/tool_entities.py @@ -1,10 +1,11 @@ import base64 from enum import Enum -from typing import Any, Optional, Union, cast +from typing import Any, Optional, Union -from pydantic import BaseModel, Field, field_serializer, field_validator +from pydantic import BaseModel, ConfigDict, Field, ValidationInfo, field_serializer, field_validator from core.entities.parameter_entities import AppSelectorScope, CommonParameterType, ModelConfigScope +from core.entities.provider_entities import ProviderConfig from core.tools.entities.common_entities import I18nObject @@ -122,14 +123,14 @@ def transform_variable_value(cls, value, values) -> Any: """ if not isinstance(value, dict | list | str | int | float | bool): raise ValueError("Only basic types and lists are allowed.") - + # if stream is true, the value must be a string - if values.get('stream'): + if values.get("stream"): if not isinstance(value, str): raise ValueError("When 'stream' is True, 'variable_value' must be a string.") return value - + @field_validator("variable_name", mode="before") @classmethod def transform_variable_name(cls, value) -> str: @@ -158,22 +159,20 @@ class MessageType(Enum): meta: dict[str, Any] | None = None save_as: str = "" - @field_validator('message', mode='before') + @field_validator("message", mode="before") @classmethod def decode_blob_message(cls, v): - if isinstance(v, dict) and 'blob' in v: + if isinstance(v, dict) and "blob" in v: try: - v['blob'] = base64.b64decode(v['blob']) + v["blob"] = base64.b64decode(v["blob"]) except Exception: pass return v - @field_serializer('message') + @field_serializer("message") def serialize_message(self, v): if isinstance(v, self.BlobMessage): - return { - 'blob': base64.b64encode(v.blob).decode('utf-8') - } + return {"blob": base64.b64encode(v.blob).decode("utf-8")} return v @@ -252,9 +251,9 @@ def get_simple_instance( option_objs = [] return cls( name=name, - label=I18nObject(en_US='', zh_Hans=''), + label=I18nObject(en_US="", zh_Hans=""), placeholder=None, - human_description=I18nObject(en_US='', zh_Hans=''), + human_description=I18nObject(en_US="", zh_Hans=""), type=type, form=cls.ToolParameterForm.LLM, llm_description=llm_description, @@ -275,6 +274,11 @@ class ToolProviderIdentity(BaseModel): ) +class ToolProviderEntity(BaseModel): + identity: ToolProviderIdentity + credentials_schema: dict[str, ProviderConfig] = Field(default_factory=dict) + + class ToolDescription(BaseModel): human: I18nObject = Field(..., description="The description presented to the user") llm: str = Field(..., description="The description presented to the LLM") @@ -288,131 +292,6 @@ class ToolIdentity(BaseModel): icon: Optional[str] = None -class ToolRuntimeVariableType(Enum): - TEXT = "text" - IMAGE = "image" - - -class ToolRuntimeVariable(BaseModel): - type: ToolRuntimeVariableType = Field(..., description="The type of the variable") - name: str = Field(..., description="The name of the variable") - position: int = Field(..., description="The position of the variable") - tool_name: str = Field(..., description="The name of the tool") - - -class ToolRuntimeTextVariable(ToolRuntimeVariable): - value: str = Field(..., description="The value of the variable") - - -class ToolRuntimeImageVariable(ToolRuntimeVariable): - value: str = Field(..., description="The path of the image") - - -class ToolRuntimeVariablePool(BaseModel): - conversation_id: str = Field(..., description="The conversation id") - user_id: str = Field(..., description="The user id") - tenant_id: str = Field(..., description="The tenant id of assistant") - - pool: list[ToolRuntimeVariable] = Field(..., description="The pool of variables") - - def __init__(self, **data: Any): - pool = data.get("pool", []) - # convert pool into correct type - for index, variable in enumerate(pool): - if variable["type"] == ToolRuntimeVariableType.TEXT.value: - pool[index] = ToolRuntimeTextVariable(**variable) - elif variable["type"] == ToolRuntimeVariableType.IMAGE.value: - pool[index] = ToolRuntimeImageVariable(**variable) - super().__init__(**data) - - def dict(self) -> dict: - return { - "conversation_id": self.conversation_id, - "user_id": self.user_id, - "tenant_id": self.tenant_id, - "pool": [variable.model_dump() for variable in self.pool], - } - - def set_text(self, tool_name: str, name: str, value: str) -> None: - """ - set a text variable - """ - for variable in self.pool: - if variable.name == name: - if variable.type == ToolRuntimeVariableType.TEXT: - variable = cast(ToolRuntimeTextVariable, variable) - variable.value = value - return - - variable = ToolRuntimeTextVariable( - type=ToolRuntimeVariableType.TEXT, - name=name, - position=len(self.pool), - tool_name=tool_name, - value=value, - ) - - self.pool.append(variable) - - def set_file(self, tool_name: str, value: str, name: Optional[str] = None) -> None: - """ - set an image variable - - :param tool_name: the name of the tool - :param value: the id of the file - """ - # check how many image variables are there - image_variable_count = 0 - for variable in self.pool: - if variable.type == ToolRuntimeVariableType.IMAGE: - image_variable_count += 1 - - if name is None: - name = f"file_{image_variable_count}" - - for variable in self.pool: - if variable.name == name: - if variable.type == ToolRuntimeVariableType.IMAGE: - variable = cast(ToolRuntimeImageVariable, variable) - variable.value = value - return - - variable = ToolRuntimeImageVariable( - type=ToolRuntimeVariableType.IMAGE, - name=name, - position=len(self.pool), - tool_name=tool_name, - value=value, - ) - - self.pool.append(variable) - - -class ModelToolPropertyKey(Enum): - IMAGE_PARAMETER_NAME = "image_parameter_name" - - -class ModelToolConfiguration(BaseModel): - """ - Model tool configuration - """ - - type: str = Field(..., description="The type of the model tool") - model: str = Field(..., description="The model") - label: I18nObject = Field(..., description="The label of the model tool") - properties: dict[ModelToolPropertyKey, Any] = Field(..., description="The properties of the model tool") - - -class ModelToolProviderConfiguration(BaseModel): - """ - Model tool provider configuration - """ - - provider: str = Field(..., description="The provider of the model tool") - models: list[ModelToolConfiguration] = Field(..., description="The models of the model tool") - label: I18nObject = Field(..., description="The label of the model tool") - - class WorkflowToolParameterConfiguration(BaseModel): """ Workflow tool configuration @@ -471,3 +350,17 @@ class ToolInvokeFrom(Enum): WORKFLOW = "workflow" AGENT = "agent" + + +class ToolEntity(BaseModel): + identity: ToolIdentity + parameters: list[ToolParameter] = Field(default_factory=list) + description: Optional[ToolDescription] = None + + # pydantic configs + model_config = ConfigDict(protected_namespaces=()) + + @field_validator("parameters", mode="before") + @classmethod + def set_parameters(cls, v, validation_info: ValidationInfo) -> list[ToolParameter]: + return v or [] diff --git a/api/core/tools/tool_engine.py b/api/core/tools/tool_engine.py index cb2f6a899cd8e8..d8889917f0ded5 100644 --- a/api/core/tools/tool_engine.py +++ b/api/core/tools/tool_engine.py @@ -65,7 +65,7 @@ def agent_invoke( # invoke the tool try: # hit the callback handler - agent_tool_callback.on_tool_start(tool_name=tool.identity.name, tool_inputs=tool_parameters) + agent_tool_callback.on_tool_start(tool_name=tool.entity.identity.name, tool_inputs=tool_parameters) messages = ToolEngine._invoke(tool, tool_parameters, user_id) invocation_meta_dict: dict[str, ToolInvokeMeta] = {} @@ -99,7 +99,7 @@ def message_callback( # hit the callback handler agent_tool_callback.on_tool_end( - tool_name=tool.identity.name, + tool_name=tool.entity.identity.name, tool_inputs=tool_parameters, tool_outputs=plain_text, message_id=message.id, @@ -112,7 +112,7 @@ def message_callback( error_response = "Please check your tool provider credentials" agent_tool_callback.on_tool_error(e) except (ToolNotFoundError, ToolNotSupportedError, ToolProviderNotFoundError) as e: - error_response = f"there is not a tool named {tool.identity.name}" + error_response = f"there is not a tool named {tool.entity.identity.name}" agent_tool_callback.on_tool_error(e) except ToolParameterValidationError as e: error_response = f"tool parameters validation error: {e}, please check your tool parameters" @@ -145,7 +145,7 @@ def workflow_invoke( """ try: # hit the callback handler - workflow_tool_callback.on_tool_start(tool_name=tool.identity.name, tool_inputs=tool_parameters) + workflow_tool_callback.on_tool_start(tool_name=tool.entity.identity.name, tool_inputs=tool_parameters) if isinstance(tool, WorkflowTool): tool.workflow_call_depth = workflow_call_depth + 1 @@ -158,7 +158,7 @@ def workflow_invoke( # hit the callback handler workflow_tool_callback.on_tool_end( - tool_name=tool.identity.name, + tool_name=tool.entity.identity.name, tool_inputs=tool_parameters, tool_outputs=response, ) @@ -177,13 +177,13 @@ def plugin_invoke( """ try: # hit the callback handler - callback.on_tool_start(tool_name=tool.identity.name, tool_inputs=tool_parameters) + callback.on_tool_start(tool_name=tool.entity.identity.name, tool_inputs=tool_parameters) response = tool.invoke(user_id, tool_parameters) # hit the callback handler callback.on_tool_end( - tool_name=tool.identity.name, + tool_name=tool.entity.identity.name, tool_inputs=tool_parameters, tool_outputs=response, ) @@ -208,11 +208,11 @@ def _invoke( time_cost=0.0, error=None, tool_config={ - "tool_name": tool.identity.name, - "tool_provider": tool.identity.provider, + "tool_name": tool.entity.identity.name, + "tool_provider": tool.entity.identity.provider, "tool_provider_type": tool.tool_provider_type().value, "tool_parameters": deepcopy(tool.runtime.runtime_parameters), - "tool_icon": tool.identity.icon, + "tool_icon": tool.entity.identity.icon, }, ) try: diff --git a/api/core/tools/tool_manager.py b/api/core/tools/tool_manager.py index 0cfcb6d9b9dbaf..c37ee730c832e9 100644 --- a/api/core/tools/tool_manager.py +++ b/api/core/tools/tool_manager.py @@ -6,6 +6,8 @@ from threading import Lock from typing import TYPE_CHECKING, Any, Union, cast +from core.tools.__base.tool_runtime import ToolRuntime + if TYPE_CHECKING: from core.workflow.nodes.tool.entities import ToolEntity @@ -105,12 +107,12 @@ def get_tool_runtime( return cast( BuiltinTool, builtin_tool.fork_tool_runtime( - runtime={ - "tenant_id": tenant_id, - "credentials": {}, - "invoke_from": invoke_from, - "tool_invoke_from": tool_invoke_from, - } + runtime=ToolRuntime( + tenant_id=tenant_id, + credentials={}, + invoke_from=invoke_from, + tool_invoke_from=tool_invoke_from, + ) ), ) @@ -134,7 +136,7 @@ def get_tool_runtime( tenant_id=tenant_id, config=controller.get_credentials_schema(), provider_type=controller.provider_type.value, - provider_identity=controller.identity.name, + provider_identity=controller.entity.identity.name, ) decrypted_credentials = tool_configuration.decrypt(credentials) @@ -142,13 +144,13 @@ def get_tool_runtime( return cast( BuiltinTool, builtin_tool.fork_tool_runtime( - runtime={ - "tenant_id": tenant_id, - "credentials": decrypted_credentials, - "runtime_parameters": {}, - "invoke_from": invoke_from, - "tool_invoke_from": tool_invoke_from, - } + runtime=ToolRuntime( + tenant_id=tenant_id, + credentials=decrypted_credentials, + runtime_parameters={}, + invoke_from=invoke_from, + tool_invoke_from=tool_invoke_from, + ) ), ) @@ -163,19 +165,19 @@ def get_tool_runtime( tenant_id=tenant_id, config=api_provider.get_credentials_schema(), provider_type=api_provider.provider_type.value, - provider_identity=api_provider.identity.name, + provider_identity=api_provider.entity.identity.name, ) decrypted_credentials = tool_configuration.decrypt(credentials) return cast( ApiTool, api_provider.get_tool(tool_name).fork_tool_runtime( - runtime={ - "tenant_id": tenant_id, - "credentials": decrypted_credentials, - "invoke_from": invoke_from, - "tool_invoke_from": tool_invoke_from, - } + runtime=ToolRuntime( + tenant_id=tenant_id, + credentials=decrypted_credentials, + invoke_from=invoke_from, + tool_invoke_from=tool_invoke_from, + ) ), ) elif provider_type == ToolProviderType.WORKFLOW: @@ -193,12 +195,12 @@ def get_tool_runtime( return cast( WorkflowTool, controller.get_tools(tenant_id=workflow_provider.tenant_id)[0].fork_tool_runtime( - runtime={ - "tenant_id": tenant_id, - "credentials": {}, - "invoke_from": invoke_from, - "tool_invoke_from": tool_invoke_from, - } + runtime=ToolRuntime( + tenant_id=tenant_id, + credentials={}, + invoke_from=invoke_from, + tool_invoke_from=tool_invoke_from, + ) ), ) elif provider_type == ToolProviderType.APP: @@ -336,7 +338,7 @@ def get_builtin_provider_icon(cls, provider: str) -> tuple[str, str]: "providers", provider, "_assets", - provider_controller.identity.icon, + provider_controller.entity.identity.icon, ) # check if the icon exists if not path.exists(absolute_path): @@ -389,9 +391,9 @@ def _list_builtin_providers(cls) -> Generator[BuiltinToolProviderController, Non parent_type=BuiltinToolProviderController, ) provider: BuiltinToolProviderController = provider_class() - cls._builtin_providers[provider.identity.name] = provider + cls._builtin_providers[provider.entity.identity.name] = provider for tool in provider.get_tools(): - cls._builtin_tools_labels[tool.identity.name] = tool.identity.label + cls._builtin_tools_labels[tool.entity.identity.name] = tool.entity.identity.label yield provider except Exception as e: @@ -466,11 +468,11 @@ def user_list_providers( user_provider = ToolTransformService.builtin_provider_to_user_provider( provider_controller=provider, - db_provider=find_db_builtin_provider(provider.identity.name), + db_provider=find_db_builtin_provider(provider.entity.identity.name), decrypt_credentials=False, ) - result_providers[provider.identity.name] = user_provider + result_providers[provider.entity.identity.name] = user_provider # get db api providers @@ -589,7 +591,7 @@ def user_get_api_provider(cls, provider: str, tenant_id: str) -> dict: tenant_id=tenant_id, config=controller.get_credentials_schema(), provider_type=controller.provider_type.value, - provider_identity=controller.identity.name, + provider_identity=controller.entity.identity.name, ) decrypted_credentials = tool_configuration.decrypt(credentials) diff --git a/api/core/tools/utils/configuration.py b/api/core/tools/utils/configuration.py index 0ab2b0021a36dd..9f685a89b6a627 100644 --- a/api/core/tools/utils/configuration.py +++ b/api/core/tools/utils/configuration.py @@ -59,12 +59,11 @@ def mask_tool_credentials(self, data: dict[str, Any]) -> dict[str, Any]: if field.type == BasicProviderConfig.Type.SECRET_INPUT: if field_name in data: if len(data[field_name]) > 6: - data[field_name] = \ - data[field_name][:2] + \ - '*' * (len(data[field_name]) - 4) + \ - data[field_name][-2:] + data[field_name] = ( + data[field_name][:2] + "*" * (len(data[field_name]) - 4) + data[field_name][-2:] + ) else: - data[field_name] = '*' * len(data[field_name]) + data[field_name] = "*" * len(data[field_name]) return data @@ -75,9 +74,9 @@ def decrypt(self, data: dict[str, str]) -> dict[str, str]: return a deep copy of credentials with decrypted values """ cache = ToolProviderCredentialsCache( - tenant_id=self.tenant_id, - identity_id=f'{self.provider_type}.{self.provider_identity}', - cache_type=ToolProviderCredentialsCacheType.PROVIDER + tenant_id=self.tenant_id, + identity_id=f"{self.provider_type}.{self.provider_identity}", + cache_type=ToolProviderCredentialsCacheType.PROVIDER, ) cached_credentials = cache.get() if cached_credentials: @@ -98,14 +97,14 @@ def decrypt(self, data: dict[str, str]) -> dict[str, str]: def delete_tool_credentials_cache(self): cache = ToolProviderCredentialsCache( - tenant_id=self.tenant_id, - identity_id=f'{self.provider_type}.{self.provider_identity}', - cache_type=ToolProviderCredentialsCacheType.PROVIDER + tenant_id=self.tenant_id, + identity_id=f"{self.provider_type}.{self.provider_identity}", + cache_type=ToolProviderCredentialsCacheType.PROVIDER, ) cache.delete() -class ToolParameterConfigurationManager(BaseModel): +class ToolParameterConfigurationManager: """ Tool parameter configuration manager """ @@ -116,6 +115,15 @@ class ToolParameterConfigurationManager(BaseModel): provider_type: ToolProviderType identity_id: str + def __init__( + self, tenant_id: str, tool_runtime: Tool, provider_name: str, provider_type: ToolProviderType, identity_id: str + ) -> None: + self.tenant_id = tenant_id + self.tool_runtime = tool_runtime + self.provider_name = provider_name + self.provider_type = provider_type + self.identity_id = identity_id + def _deep_copy(self, parameters: dict[str, Any]) -> dict[str, Any]: """ deep copy parameters @@ -127,7 +135,7 @@ def _merge_parameters(self) -> list[ToolParameter]: merge parameters """ # get tool parameters - tool_parameters = self.tool_runtime.parameters or [] + tool_parameters = self.tool_runtime.entity.parameters or [] # get tool runtime parameters runtime_parameters = self.tool_runtime.get_runtime_parameters() or [] # override parameters @@ -203,8 +211,8 @@ def decrypt_tool_parameters(self, parameters: dict[str, Any]) -> dict[str, Any]: """ cache = ToolParameterCache( tenant_id=self.tenant_id, - provider=f'{self.provider_type.value}.{self.provider_name}', - tool_name=self.tool_runtime.identity.name, + provider=f"{self.provider_type.value}.{self.provider_name}", + tool_name=self.tool_runtime.entity.identity.name, cache_type=ToolParameterCacheType.PARAMETER, identity_id=self.identity_id, ) @@ -236,8 +244,8 @@ def decrypt_tool_parameters(self, parameters: dict[str, Any]) -> dict[str, Any]: def delete_tool_parameters_cache(self): cache = ToolParameterCache( tenant_id=self.tenant_id, - provider=f'{self.provider_type.value}.{self.provider_name}', - tool_name=self.tool_runtime.identity.name, + provider=f"{self.provider_type.value}.{self.provider_name}", + tool_name=self.tool_runtime.entity.identity.name, cache_type=ToolParameterCacheType.PARAMETER, identity_id=self.identity_id, ) diff --git a/api/core/tools/utils/dataset_retriever_tool.py b/api/core/tools/utils/dataset_retriever_tool.py index 7b612227227dc4..136491005c38a2 100644 --- a/api/core/tools/utils/dataset_retriever_tool.py +++ b/api/core/tools/utils/dataset_retriever_tool.py @@ -6,9 +6,11 @@ from core.callback_handler.index_tool_callback_handler import DatasetIndexToolCallbackHandler from core.rag.retrieval.dataset_retrieval import DatasetRetrieval from core.tools.__base.tool import Tool +from core.tools.__base.tool_runtime import ToolRuntime from core.tools.entities.common_entities import I18nObject from core.tools.entities.tool_entities import ( ToolDescription, + ToolEntity, ToolIdentity, ToolInvokeMessage, ToolParameter, @@ -20,11 +22,15 @@ class DatasetRetrieverTool(Tool): retrieval_tool: DatasetRetrieverBaseTool + def __init__(self, entity: ToolEntity, runtime: ToolRuntime, retrieval_tool: DatasetRetrieverBaseTool) -> None: + super().__init__(entity, runtime) + self.retrieval_tool = retrieval_tool + @staticmethod def get_dataset_tools( tenant_id: str, dataset_ids: list[str], - retrieve_config: DatasetRetrieveConfigEntity, + retrieve_config: DatasetRetrieveConfigEntity | None, return_resource: bool, invoke_from: InvokeFrom, hit_callback: DatasetIndexToolCallbackHandler, @@ -54,7 +60,7 @@ def get_dataset_tools( ) if retrieval_tools is None or len(retrieval_tools) == 0: return [] - + # restore retrieve strategy retrieve_config.retrieve_strategy = original_retriever_mode @@ -63,13 +69,14 @@ def get_dataset_tools( for retrieval_tool in retrieval_tools: tool = DatasetRetrieverTool( retrieval_tool=retrieval_tool, - identity=ToolIdentity( - provider="", author="", name=retrieval_tool.name, label=I18nObject(en_US="", zh_Hans="") + entity=ToolEntity( + identity=ToolIdentity( + provider="", author="", name=retrieval_tool.name, label=I18nObject(en_US="", zh_Hans="") + ), + parameters=[], + description=ToolDescription(human=I18nObject(en_US="", zh_Hans=""), llm=retrieval_tool.description), ), - parameters=[], - is_team_authorization=True, - description=ToolDescription(human=I18nObject(en_US="", zh_Hans=""), llm=retrieval_tool.description), - runtime=DatasetRetrieverTool.Runtime(), + runtime=ToolRuntime(tenant_id=tenant_id), ) tools.append(tool) @@ -99,7 +106,7 @@ def _invoke(self, user_id: str, tool_parameters: dict[str, Any]) -> Generator[To """ query = tool_parameters.get("query") if not query: - yield self.create_text_message(text='please input query') + yield self.create_text_message(text="please input query") else: # invoke dataset retriever tool result = self.retrieval_tool._run(query=query) diff --git a/api/core/tools/workflow_as_tool/provider.py b/api/core/tools/workflow_as_tool/provider.py index cab5f84506d1c0..2d0d33ffd9a138 100644 --- a/api/core/tools/workflow_as_tool/provider.py +++ b/api/core/tools/workflow_as_tool/provider.py @@ -6,9 +6,11 @@ from core.app.app_config.entities import VariableEntity, VariableEntityType from core.app.apps.workflow.app_config_manager import WorkflowAppConfigManager from core.tools.__base.tool_provider import ToolProviderController +from core.tools.__base.tool_runtime import ToolRuntime from core.tools.entities.common_entities import I18nObject from core.tools.entities.tool_entities import ( ToolDescription, + ToolEntity, ToolIdentity, ToolParameter, ToolParameterOption, @@ -63,7 +65,7 @@ def from_db(cls, db_provider: WorkflowToolProvider) -> "WorkflowToolProviderCont @property def provider_type(self) -> ToolProviderType: return ToolProviderType.WORKFLOW - + def _get_db_provider_tool(self, db_provider: WorkflowToolProvider, app: App) -> WorkflowTool: """ get db provider tool @@ -140,19 +142,23 @@ def fetch_workflow_variable(variable_name: str) -> VariableEntity | None: raise ValueError("variable not found") return WorkflowTool( - identity=ToolIdentity( - author=user.name if user else "", - name=db_provider.name, - label=I18nObject(en_US=db_provider.label, zh_Hans=db_provider.label), - provider=self.provider_id, - icon=db_provider.icon, + entity=ToolEntity( + identity=ToolIdentity( + author=user.name if user else "", + name=db_provider.name, + label=I18nObject(en_US=db_provider.label, zh_Hans=db_provider.label), + provider=self.provider_id, + icon=db_provider.icon, + ), + description=ToolDescription( + human=I18nObject(en_US=db_provider.description, zh_Hans=db_provider.description), + llm=db_provider.description, + ), + parameters=workflow_tool_parameters, ), - description=ToolDescription( - human=I18nObject(en_US=db_provider.description, zh_Hans=db_provider.description), - llm=db_provider.description, + runtime=ToolRuntime( + tenant_id=db_provider.tenant_id, ), - parameters=workflow_tool_parameters, - is_team_authorization=True, workflow_app_id=app.id, workflow_entities={ "app": app, @@ -201,7 +207,7 @@ def get_tool(self, tool_name: str) -> Optional[WorkflowTool]: return None for tool in self.tools: - if tool.identity.name == tool_name: + if tool.entity.identity.name == tool_name: return tool return None diff --git a/api/core/tools/workflow_as_tool/tool.py b/api/core/tools/workflow_as_tool/tool.py index 72aae2796c893b..e1fc5140d0d72a 100644 --- a/api/core/tools/workflow_as_tool/tool.py +++ b/api/core/tools/workflow_as_tool/tool.py @@ -1,12 +1,12 @@ import json import logging from collections.abc import Generator -from copy import deepcopy from typing import Any, Optional, Union from core.file.file_obj import FileTransferMethod, FileVar from core.tools.__base.tool import Tool -from core.tools.entities.tool_entities import ToolInvokeMessage, ToolParameter, ToolProviderType +from core.tools.__base.tool_runtime import ToolRuntime +from core.tools.entities.tool_entities import ToolEntity, ToolInvokeMessage, ToolParameter, ToolProviderType from extensions.ext_database import db from models.account import Account from models.model import App, EndUser @@ -28,6 +28,26 @@ class WorkflowTool(Tool): Workflow tool. """ + def __init__( + self, + workflow_app_id: str, + version: str, + workflow_entities: dict[str, Any], + workflow_call_depth: int, + entity: ToolEntity, + runtime: ToolRuntime, + label: str = "Workflow", + thread_pool_id: Optional[str] = None, + ): + self.workflow_app_id = workflow_app_id + self.version = version + self.workflow_entities = workflow_entities + self.workflow_call_depth = workflow_call_depth + self.thread_pool_id = thread_pool_id + self.label = label + + super().__init__(entity=entity, runtime=runtime) + def tool_provider_type(self) -> ToolProviderType: """ get the tool provider type @@ -94,7 +114,7 @@ def _get_user(self, user_id: str) -> Union[EndUser, Account]: return user - def fork_tool_runtime(self, runtime: dict[str, Any]) -> "WorkflowTool": + def fork_tool_runtime(self, runtime: ToolRuntime) -> "WorkflowTool": """ fork a new tool with meta data @@ -102,10 +122,8 @@ def fork_tool_runtime(self, runtime: dict[str, Any]) -> "WorkflowTool": :return: the new tool """ return self.__class__( - identity=deepcopy(self.identity), - parameters=deepcopy(self.parameters), - description=deepcopy(self.description), - runtime=Tool.Runtime(**runtime), + entity=self.entity.model_copy(), + runtime=runtime, workflow_app_id=self.workflow_app_id, workflow_entities=self.workflow_entities, workflow_call_depth=self.workflow_call_depth, diff --git a/api/core/tools/workflow_as_tool/workflow_tool_provider.py b/api/core/tools/workflow_as_tool/workflow_tool_provider.py deleted file mode 100644 index cab5f84506d1c0..00000000000000 --- a/api/core/tools/workflow_as_tool/workflow_tool_provider.py +++ /dev/null @@ -1,207 +0,0 @@ -from collections.abc import Mapping -from typing import Optional - -from pydantic import Field - -from core.app.app_config.entities import VariableEntity, VariableEntityType -from core.app.apps.workflow.app_config_manager import WorkflowAppConfigManager -from core.tools.__base.tool_provider import ToolProviderController -from core.tools.entities.common_entities import I18nObject -from core.tools.entities.tool_entities import ( - ToolDescription, - ToolIdentity, - ToolParameter, - ToolParameterOption, - ToolProviderType, -) -from core.tools.utils.workflow_configuration_sync import WorkflowToolConfigurationUtils -from core.tools.workflow_as_tool.tool import WorkflowTool -from extensions.ext_database import db -from models.model import App, AppMode -from models.tools import WorkflowToolProvider -from models.workflow import Workflow - -VARIABLE_TO_PARAMETER_TYPE_MAPPING = { - VariableEntityType.TEXT_INPUT: ToolParameter.ToolParameterType.STRING, - VariableEntityType.PARAGRAPH: ToolParameter.ToolParameterType.STRING, - VariableEntityType.SELECT: ToolParameter.ToolParameterType.SELECT, - VariableEntityType.NUMBER: ToolParameter.ToolParameterType.NUMBER, -} - - -class WorkflowToolProviderController(ToolProviderController): - provider_id: str - tools: list[WorkflowTool] = Field(default_factory=list) - - @classmethod - def from_db(cls, db_provider: WorkflowToolProvider) -> "WorkflowToolProviderController": - app = db_provider.app - - if not app: - raise ValueError("app not found") - - controller = WorkflowToolProviderController( - **{ - "identity": { - "author": db_provider.user.name if db_provider.user_id and db_provider.user else "", - "name": db_provider.label, - "label": {"en_US": db_provider.label, "zh_Hans": db_provider.label}, - "description": {"en_US": db_provider.description, "zh_Hans": db_provider.description}, - "icon": db_provider.icon, - }, - "credentials_schema": {}, - "provider_id": db_provider.id or "", - } - ) - - # init tools - - controller.tools = [controller._get_db_provider_tool(db_provider, app)] - - return controller - - @property - def provider_type(self) -> ToolProviderType: - return ToolProviderType.WORKFLOW - - def _get_db_provider_tool(self, db_provider: WorkflowToolProvider, app: App) -> WorkflowTool: - """ - get db provider tool - :param db_provider: the db provider - :param app: the app - :return: the tool - """ - workflow: Workflow | None = db.session.query(Workflow).filter( - Workflow.app_id == db_provider.app_id, - Workflow.version == db_provider.version - ).first() - - if not workflow: - raise ValueError("workflow not found") - - # fetch start node - graph: Mapping = workflow.graph_dict - features_dict: Mapping = workflow.features_dict - features = WorkflowAppConfigManager.convert_features( - config_dict=features_dict, - app_mode=AppMode.WORKFLOW - ) - - parameters = db_provider.parameter_configurations - variables = WorkflowToolConfigurationUtils.get_workflow_graph_variables(graph) - - def fetch_workflow_variable(variable_name: str) -> VariableEntity | None: - return next(filter(lambda x: x.variable == variable_name, variables), None) - - user = db_provider.user - - workflow_tool_parameters = [] - for parameter in parameters: - variable = fetch_workflow_variable(parameter.name) - if variable: - parameter_type = None - options = [] - if variable.type not in VARIABLE_TO_PARAMETER_TYPE_MAPPING: - raise ValueError(f"unsupported variable type {variable.type}") - parameter_type = VARIABLE_TO_PARAMETER_TYPE_MAPPING[variable.type] - - if variable.type == VariableEntityType.SELECT and variable.options: - options = [ - ToolParameterOption(value=option, label=I18nObject(en_US=option, zh_Hans=option)) - for option in variable.options - ] - - workflow_tool_parameters.append( - ToolParameter( - name=parameter.name, - label=I18nObject(en_US=variable.label, zh_Hans=variable.label), - human_description=I18nObject(en_US=parameter.description, zh_Hans=parameter.description), - type=parameter_type, - form=parameter.form, - llm_description=parameter.description, - required=variable.required, - options=options, - default=variable.default, - ) - ) - elif features.file_upload: - workflow_tool_parameters.append( - ToolParameter( - name=parameter.name, - label=I18nObject(en_US=parameter.name, zh_Hans=parameter.name), - human_description=I18nObject(en_US=parameter.description, zh_Hans=parameter.description), - type=ToolParameter.ToolParameterType.FILE, - llm_description=parameter.description, - required=False, - form=parameter.form, - ) - ) - else: - raise ValueError("variable not found") - - return WorkflowTool( - identity=ToolIdentity( - author=user.name if user else "", - name=db_provider.name, - label=I18nObject(en_US=db_provider.label, zh_Hans=db_provider.label), - provider=self.provider_id, - icon=db_provider.icon, - ), - description=ToolDescription( - human=I18nObject(en_US=db_provider.description, zh_Hans=db_provider.description), - llm=db_provider.description, - ), - parameters=workflow_tool_parameters, - is_team_authorization=True, - workflow_app_id=app.id, - workflow_entities={ - "app": app, - "workflow": workflow, - }, - version=db_provider.version, - workflow_call_depth=0, - label=db_provider.label, - ) - - def get_tools(self, tenant_id: str) -> list[WorkflowTool]: - """ - fetch tools from database - - :param user_id: the user id - :param tenant_id: the tenant id - :return: the tools - """ - if self.tools is not None: - return self.tools - - db_providers: WorkflowToolProvider | None = db.session.query(WorkflowToolProvider).filter( - WorkflowToolProvider.tenant_id == tenant_id, - WorkflowToolProvider.app_id == self.provider_id, - ).first() - - if not db_providers: - return [] - - app = db_providers.app - if not app: - raise ValueError("can not read app of workflow") - - self.tools = [self._get_db_provider_tool(db_providers, app)] - - return self.tools - - def get_tool(self, tool_name: str) -> Optional[WorkflowTool]: - """ - get tool by name - - :param tool_name: the name of the tool - :return: the tool - """ - if self.tools is None: - return None - - for tool in self.tools: - if tool.identity.name == tool_name: - return tool - - return None diff --git a/api/models/model.py b/api/models/model.py index f4e768684900be..26b42963ccbc93 100644 --- a/api/models/model.py +++ b/api/models/model.py @@ -1304,7 +1304,7 @@ class MessageChain(db.Model): created_at = db.Column(db.DateTime, nullable=False, server_default=db.func.current_timestamp()) -class MessageAgentThought(db.Model): +class MessageAgentThought(Base): __tablename__ = "message_agent_thoughts" __table_args__ = ( db.PrimaryKeyConstraint("id", name="message_agent_thought_pkey"), diff --git a/api/services/tools/api_tools_manage_service.py b/api/services/tools/api_tools_manage_service.py index 11aa3ba529f605..e39c2b8a5bc83e 100644 --- a/api/services/tools/api_tools_manage_service.py +++ b/api/services/tools/api_tools_manage_service.py @@ -5,6 +5,7 @@ from core.entities.provider_entities import ProviderConfig from core.model_runtime.utils.encoders import jsonable_encoder +from core.tools.__base.tool_runtime import ToolRuntime from core.tools.custom_tool.provider import ApiToolProviderController from core.tools.entities.api_entities import UserTool, UserToolProvider from core.tools.entities.common_entities import I18nObject @@ -160,7 +161,7 @@ def create_api_tool_provider( tenant_id=tenant_id, config=provider_controller.get_credentials_schema(), provider_type=provider_controller.provider_type.value, - provider_identity=provider_controller.identity.name + provider_identity=provider_controller.entity.identity.name ) encrypted_credentials = tool_configuration.encrypt(credentials) @@ -222,6 +223,7 @@ def list_api_tool_provider_tools(user_id: str, tenant_id: str, provider_name: st return [ ToolTransformService.tool_to_user_tool( tool_bundle, + tenant_id=tenant_id, labels=labels, ) for tool_bundle in provider.tools @@ -291,7 +293,7 @@ def update_api_tool_provider( tenant_id=tenant_id, config=provider_controller.get_credentials_schema(), provider_type=provider_controller.provider_type.value, - provider_identity=provider_controller.identity.name + provider_identity=provider_controller.entity.identity.name ) original_credentials = tool_configuration.decrypt(provider.credentials) @@ -410,7 +412,7 @@ def test_api_tool_preview( tenant_id=tenant_id, config=provider_controller.get_credentials_schema(), provider_type=provider_controller.provider_type.value, - provider_identity=provider_controller.identity.name + provider_identity=provider_controller.entity.identity.name ) decrypted_credentials = tool_configuration.decrypt(credentials) # check if the credential has changed, save the original credential @@ -424,10 +426,10 @@ def test_api_tool_preview( # get tool tool = provider_controller.get_tool(tool_name) tool = tool.fork_tool_runtime( - runtime={ - "credentials": credentials, - "tenant_id": tenant_id, - } + runtime=ToolRuntime( + credentials=credentials, + tenant_id=tenant_id, + ) ) result = tool.validate_credentials(credentials, parameters) except Exception as e: diff --git a/api/services/tools/builtin_tools_manage_service.py b/api/services/tools/builtin_tools_manage_service.py index 6db8718b6b46c0..83b363bb580588 100644 --- a/api/services/tools/builtin_tools_manage_service.py +++ b/api/services/tools/builtin_tools_manage_service.py @@ -32,7 +32,7 @@ def list_builtin_tool_provider_tools(user_id: str, tenant_id: str, provider: str tenant_id=tenant_id, config=provider_controller.get_credentials_schema(), provider_type=provider_controller.provider_type.value, - provider_identity=provider_controller.identity.name, + provider_identity=provider_controller.entity.identity.name, ) # check if user has added the provider builtin_provider: BuiltinToolProvider | None = ( @@ -71,7 +71,7 @@ def list_builtin_provider_credentials_schema(provider_name): :return: the list of tool providers """ provider = ToolManager.get_builtin_provider(provider_name) - return jsonable_encoder([v for _, v in (provider.credentials_schema or {}).items()]) + return jsonable_encoder([v for _, v in (provider.entity.credentials_schema or {}).items()]) @staticmethod def update_builtin_tool_provider(user_id: str, tenant_id: str, provider_name: str, credentials: dict): @@ -97,7 +97,7 @@ def update_builtin_tool_provider(user_id: str, tenant_id: str, provider_name: st tenant_id=tenant_id, config=provider_controller.get_credentials_schema(), provider_type=provider_controller.provider_type.value, - provider_identity=provider_controller.identity.name, + provider_identity=provider_controller.entity.identity.name, ) # get original credentials if exists @@ -159,7 +159,7 @@ def get_builtin_tool_provider_credentials(user_id: str, tenant_id: str, provider tenant_id=tenant_id, config=provider_controller.get_credentials_schema(), provider_type=provider_controller.provider_type.value, - provider_identity=provider_controller.identity.name, + provider_identity=provider_controller.entity.identity.name, ) credentials = tool_configuration.decrypt(provider_obj.credentials) credentials = tool_configuration.mask_tool_credentials(credentials) @@ -191,7 +191,7 @@ def delete_builtin_tool_provider(user_id: str, tenant_id: str, provider_name: st tenant_id=tenant_id, config=provider_controller.get_credentials_schema(), provider_type=provider_controller.provider_type.value, - provider_identity=provider_controller.identity.name, + provider_identity=provider_controller.entity.identity.name, ) tool_configuration.delete_tool_credentials_cache() @@ -241,7 +241,7 @@ def list_builtin_tools(user_id: str, tenant_id: str) -> list[UserToolProvider]: # convert provider controller to user provider user_builtin_provider = ToolTransformService.builtin_provider_to_user_provider( provider_controller=provider_controller, - db_provider=find_provider(provider_controller.identity.name), + db_provider=find_provider(provider_controller.entity.identity.name), decrypt_credentials=True, ) diff --git a/api/services/tools/tools_transform_service.py b/api/services/tools/tools_transform_service.py index b7488621c643cb..d4f132e902d08e 100644 --- a/api/services/tools/tools_transform_service.py +++ b/api/services/tools/tools_transform_service.py @@ -4,6 +4,7 @@ from configs import dify_config from core.tools.__base.tool import Tool +from core.tools.__base.tool_runtime import ToolRuntime from core.tools.builtin_tool.provider import BuiltinToolProviderController from core.tools.custom_tool.provider import ApiToolProviderController from core.tools.entities.api_entities import UserTool, UserToolProvider @@ -69,19 +70,19 @@ def builtin_provider_to_user_provider( convert provider controller to user provider """ result = UserToolProvider( - id=provider_controller.identity.name, - author=provider_controller.identity.author, - name=provider_controller.identity.name, + id=provider_controller.entity.identity.name, + author=provider_controller.entity.identity.author, + name=provider_controller.entity.identity.name, description=I18nObject( - en_US=provider_controller.identity.description.en_US, - zh_Hans=provider_controller.identity.description.zh_Hans, - pt_BR=provider_controller.identity.description.pt_BR, + en_US=provider_controller.entity.identity.description.en_US, + zh_Hans=provider_controller.entity.identity.description.zh_Hans, + pt_BR=provider_controller.entity.identity.description.pt_BR, ), - icon=provider_controller.identity.icon, + icon=provider_controller.entity.identity.icon, label=I18nObject( - en_US=provider_controller.identity.label.en_US, - zh_Hans=provider_controller.identity.label.zh_Hans, - pt_BR=provider_controller.identity.label.pt_BR, + en_US=provider_controller.entity.identity.label.en_US, + zh_Hans=provider_controller.entity.identity.label.zh_Hans, + pt_BR=provider_controller.entity.identity.label.pt_BR, ), type=ToolProviderType.BUILT_IN, masked_credentials={}, @@ -111,7 +112,7 @@ def builtin_provider_to_user_provider( tenant_id=db_provider.tenant_id, config=provider_controller.get_credentials_schema(), provider_type=provider_controller.provider_type.value, - provider_identity=provider_controller.identity.name + provider_identity=provider_controller.entity.identity.name, ) # decrypt the credentials and mask the credentials decrypted_credentials = tool_configuration.decrypt(data=credentials) @@ -155,16 +156,16 @@ def workflow_provider_to_user_provider( """ return UserToolProvider( id=provider_controller.provider_id, - author=provider_controller.identity.author, - name=provider_controller.identity.name, + author=provider_controller.entity.identity.author, + name=provider_controller.entity.identity.name, description=I18nObject( - en_US=provider_controller.identity.description.en_US, - zh_Hans=provider_controller.identity.description.zh_Hans, + en_US=provider_controller.entity.identity.description.en_US, + zh_Hans=provider_controller.entity.identity.description.zh_Hans, ), - icon=provider_controller.identity.icon, + icon=provider_controller.entity.identity.icon, label=I18nObject( - en_US=provider_controller.identity.label.en_US, - zh_Hans=provider_controller.identity.label.zh_Hans, + en_US=provider_controller.entity.identity.label.en_US, + zh_Hans=provider_controller.entity.identity.label.zh_Hans, ), type=ToolProviderType.WORKFLOW, masked_credentials={}, @@ -189,7 +190,7 @@ def api_provider_to_user_provider( user = db_provider.user if not user: raise ValueError("user not found") - + username = user.name except Exception as e: logger.error(f"failed to get user name for api provider {db_provider.id}: {str(e)}") @@ -222,7 +223,7 @@ def api_provider_to_user_provider( tenant_id=db_provider.tenant_id, config=provider_controller.get_credentials_schema(), provider_type=provider_controller.provider_type.value, - provider_identity=provider_controller.identity.name + provider_identity=provider_controller.entity.identity.name, ) # decrypt the credentials and mask the credentials @@ -236,8 +237,8 @@ def api_provider_to_user_provider( @staticmethod def tool_to_user_tool( tool: Union[ApiToolBundle, WorkflowTool, Tool], + tenant_id: str, credentials: dict | None = None, - tenant_id: str | None = None, labels: list[str] | None = None, ) -> UserTool: """ @@ -246,14 +247,14 @@ def tool_to_user_tool( if isinstance(tool, Tool): # fork tool runtime tool = tool.fork_tool_runtime( - runtime={ - "credentials": credentials, - "tenant_id": tenant_id, - } + runtime=ToolRuntime( + credentials=credentials, + tenant_id=tenant_id, + ) ) # get tool parameters - parameters = tool.parameters or [] + parameters = tool.entity.parameters or [] # get tool runtime parameters runtime_parameters = tool.get_runtime_parameters() or [] # override parameters @@ -270,10 +271,10 @@ def tool_to_user_tool( current_parameters.append(runtime_parameter) return UserTool( - author=tool.identity.author, - name=tool.identity.name, - label=tool.identity.label, - description=tool.description.human if tool.description else I18nObject(en_US=''), + author=tool.entity.identity.author, + name=tool.entity.identity.name, + label=tool.entity.identity.label, + description=tool.entity.description.human if tool.entity.description else I18nObject(en_US=""), parameters=current_parameters, labels=labels or [], ) diff --git a/api/services/tools/workflow_tools_manage_service.py b/api/services/tools/workflow_tools_manage_service.py index 60e26aa28281ec..58bf7946bf190f 100644 --- a/api/services/tools/workflow_tools_manage_service.py +++ b/api/services/tools/workflow_tools_manage_service.py @@ -211,7 +211,9 @@ def list_tenant_workflow_tools(cls, user_id: str, tenant_id: str) -> list[UserTo ToolTransformService.repack_provider(user_tool_provider) user_tool_provider.tools = [ ToolTransformService.tool_to_user_tool( - tool.get_tools(user_id, tenant_id)[0], labels=labels.get(tool.provider_id, []) + tool=tool.get_tools(user_id, tenant_id)[0], + labels=labels.get(tool.provider_id, []), + tenant_id=tenant_id, ) ] result.append(user_tool_provider) @@ -248,7 +250,7 @@ def get_workflow_tool_by_tool_id(cls, user_id: str, tenant_id: str, workflow_too .filter(WorkflowToolProvider.tenant_id == tenant_id, WorkflowToolProvider.id == workflow_tool_id) .first() ) - return cls._get_workflow_tool(db_tool) + return cls._get_workflow_tool(tenant_id, db_tool) @classmethod def get_workflow_tool_by_app_id(cls, user_id: str, tenant_id: str, workflow_app_id: str) -> dict: @@ -264,10 +266,10 @@ def get_workflow_tool_by_app_id(cls, user_id: str, tenant_id: str, workflow_app_ .filter(WorkflowToolProvider.tenant_id == tenant_id, WorkflowToolProvider.app_id == workflow_app_id) .first() ) - return cls._get_workflow_tool(db_tool) + return cls._get_workflow_tool(tenant_id, db_tool) @classmethod - def _get_workflow_tool(cls, db_tool: WorkflowToolProvider | None): + def _get_workflow_tool(cls, tenant_id: str, db_tool: WorkflowToolProvider | None): """ Get a workflow tool. :db_tool: the database tool @@ -298,7 +300,9 @@ def _get_workflow_tool(cls, db_tool: WorkflowToolProvider | None): "description": db_tool.description, "parameters": jsonable_encoder(db_tool.parameter_configurations), "tool": ToolTransformService.tool_to_user_tool( - tool.get_tools(db_tool.tenant_id)[0], labels=ToolLabelManager.get_tool_labels(tool) + tool=tool.get_tools(db_tool.tenant_id)[0], + labels=ToolLabelManager.get_tool_labels(tool), + tenant_id=tenant_id, ), "synced": workflow.version == db_tool.version, "privacy_policy": db_tool.privacy_policy, @@ -326,6 +330,8 @@ def list_single_workflow_tools(cls, user_id: str, tenant_id: str, workflow_tool_ return [ ToolTransformService.tool_to_user_tool( - tool=tool.get_tools(db_tool.tenant_id)[0], labels=ToolLabelManager.get_tool_labels(tool) + tool=tool.get_tools(db_tool.tenant_id)[0], + labels=ToolLabelManager.get_tool_labels(tool), + tenant_id=tenant_id, ) ] diff --git a/api/tests/integration_tests/tools/api_tool/test_api_tool.py b/api/tests/integration_tests/tools/api_tool/test_api_tool.py index e4798e02c38148..b945d3fef7b697 100644 --- a/api/tests/integration_tests/tools/api_tool/test_api_tool.py +++ b/api/tests/integration_tests/tools/api_tool/test_api_tool.py @@ -1,5 +1,9 @@ from core.tools.__base.tool import Tool +from core.tools.__base.tool_runtime import ToolRuntime from core.tools.custom_tool.tool import ApiTool +from core.tools.entities.common_entities import I18nObject +from core.tools.entities.tool_bundle import ApiToolBundle +from core.tools.entities.tool_entities import ToolEntity, ToolIdentity from tests.integration_tests.tools.__mock.http import setup_http_mock tool_bundle = { @@ -29,7 +33,13 @@ def test_api_tool(setup_http_mock): - tool = ApiTool(api_bundle=tool_bundle, runtime=Tool.Runtime(credentials={"auth_type": "none"})) + tool = ApiTool( + entity=ToolEntity( + identity=ToolIdentity(provider="", author="", name="", label=I18nObject()), + ), + api_bundle=ApiToolBundle(**tool_bundle), + runtime=ToolRuntime(tenant_id="", credentials={"auth_type": "none"}), + ) headers = tool.assembling_request(parameters) response = tool.do_http_request(tool.api_bundle.server_url, tool.api_bundle.method, headers, parameters) From 435e71eb60f319d48f69dfca93e2316329c189e7 Mon Sep 17 00:00:00 2001 From: Yeuoly Date: Mon, 23 Sep 2024 13:09:46 +0800 Subject: [PATCH 040/325] refactor --- api/core/plugin/entities/plugin_daemon.py | 2 +- api/core/plugin/manager/base.py | 49 ++++++++++---- api/core/plugin/manager/model.py | 10 ++- api/core/plugin/manager/plugin.py | 15 ++--- api/core/plugin/manager/tool.py | 8 ++- api/core/tools/entities/tool_entities.py | 44 +++++++------ api/tests/integration_tests/.env.example | 5 ++ .../integration_tests/plugin/__mock/http.py | 66 +++++++++++++++++++ .../plugin/tools/test_fetch_all_tools.py | 9 +++ .../tools/test_all_provider.py | 23 ------- 10 files changed, 164 insertions(+), 67 deletions(-) create mode 100644 api/tests/integration_tests/plugin/__mock/http.py create mode 100644 api/tests/integration_tests/plugin/tools/test_fetch_all_tools.py delete mode 100644 api/tests/integration_tests/tools/test_all_provider.py diff --git a/api/core/plugin/entities/plugin_daemon.py b/api/core/plugin/entities/plugin_daemon.py index 63c839a4e4a5d5..0c710c87161c39 100644 --- a/api/core/plugin/entities/plugin_daemon.py +++ b/api/core/plugin/entities/plugin_daemon.py @@ -3,7 +3,7 @@ from pydantic import BaseModel -T = TypeVar("T", bound=(BaseModel | dict | bool)) +T = TypeVar("T", bound=(BaseModel | dict | list | bool)) class PluginDaemonBasicResponse(BaseModel, Generic[T]): diff --git a/api/core/plugin/manager/base.py b/api/core/plugin/manager/base.py index f6b44d05dd264d..3f6a87dca8b8e0 100644 --- a/api/core/plugin/manager/base.py +++ b/api/core/plugin/manager/base.py @@ -12,7 +12,7 @@ plugin_daemon_inner_api_baseurl = dify_config.PLUGIN_API_URL plugin_daemon_inner_api_key = dify_config.PLUGIN_API_KEY -T = TypeVar("T", bound=(BaseModel | dict | bool)) +T = TypeVar("T", bound=(BaseModel | dict | list | bool)) class BasePluginManager: @@ -22,6 +22,7 @@ def _request( path: str, headers: dict | None = None, data: bytes | dict | None = None, + params: dict | None = None, stream: bool = False, ) -> requests.Response: """ @@ -30,16 +31,23 @@ def _request( url = URL(str(plugin_daemon_inner_api_baseurl)) / path headers = headers or {} headers["X-Api-Key"] = plugin_daemon_inner_api_key - response = requests.request(method=method, url=str(url), headers=headers, data=data, stream=stream) + response = requests.request( + method=method, url=str(url), headers=headers, data=data, params=params, stream=stream + ) return response def _stream_request( - self, method: str, path: str, headers: dict | None = None, data: bytes | dict | None = None + self, + method: str, + path: str, + params: dict | None = None, + headers: dict | None = None, + data: bytes | dict | None = None, ) -> Generator[bytes, None, None]: """ Make a stream request to the plugin daemon inner API """ - response = self._request(method, path, headers, data, stream=True) + response = self._request(method, path, headers, data, params, stream=True) yield from response.iter_lines() def _stream_request_with_model( @@ -49,29 +57,42 @@ def _stream_request_with_model( type: type[T], headers: dict | None = None, data: bytes | dict | None = None, + params: dict | None = None, ) -> Generator[T, None, None]: """ Make a stream request to the plugin daemon inner API and yield the response as a model. """ - for line in self._stream_request(method, path, headers, data): + for line in self._stream_request(method, path, params, headers, data): yield type(**json.loads(line)) def _request_with_model( - self, method: str, path: str, type: type[T], headers: dict | None = None, data: bytes | None = None + self, + method: str, + path: str, + type: type[T], + headers: dict | None = None, + data: bytes | None = None, + params: dict | None = None, ) -> T: """ Make a request to the plugin daemon inner API and return the response as a model. """ - response = self._request(method, path, headers, data) + response = self._request(method, path, headers, data, params) return type(**response.json()) def _request_with_plugin_daemon_response( - self, method: str, path: str, type: type[T], headers: dict | None = None, data: bytes | dict | None = None + self, + method: str, + path: str, + type: type[T], + headers: dict | None = None, + data: bytes | dict | None = None, + params: dict | None = None, ) -> T: """ Make a request to the plugin daemon inner API and return the response as a model. """ - response = self._request(method, path, headers, data) + response = self._request(method, path, headers, data, params) rep = PluginDaemonBasicResponse[type](**response.json()) if rep.code != 0: raise ValueError(f"got error from plugin daemon: {rep.message}, code: {rep.code}") @@ -81,12 +102,18 @@ def _request_with_plugin_daemon_response( return rep.data def _request_with_plugin_daemon_response_stream( - self, method: str, path: str, type: type[T], headers: dict | None = None, data: bytes | dict | None = None + self, + method: str, + path: str, + type: type[T], + headers: dict | None = None, + data: bytes | dict | None = None, + params: dict | None = None, ) -> Generator[T, None, None]: """ Make a stream request to the plugin daemon inner API and yield the response as a model. """ - for line in self._stream_request(method, path, headers, data): + for line in self._stream_request(method, path, params, headers, data): line_data = json.loads(line) rep = PluginDaemonBasicResponse[type](**line_data) if rep.code != 0: diff --git a/api/core/plugin/manager/model.py b/api/core/plugin/manager/model.py index f03dbfd1e33100..4411d76fe10079 100644 --- a/api/core/plugin/manager/model.py +++ b/api/core/plugin/manager/model.py @@ -1,5 +1,13 @@ +from core.model_runtime.entities.provider_entities import ProviderEntity from core.plugin.manager.base import BasePluginManager class PluginModelManager(BasePluginManager): - pass \ No newline at end of file + def fetch_model_providers(self, tenant_id: str) -> list[ProviderEntity]: + """ + Fetch model providers for the given tenant. + """ + response = self._request_with_plugin_daemon_response( + "GET", f"plugin/{tenant_id}/models", list[ProviderEntity], params={"page": 1, "page_size": 256} + ) + return response diff --git a/api/core/plugin/manager/plugin.py b/api/core/plugin/manager/plugin.py index 101827246ac773..cabe028ff34058 100644 --- a/api/core/plugin/manager/plugin.py +++ b/api/core/plugin/manager/plugin.py @@ -1,5 +1,4 @@ from collections.abc import Generator -from urllib.parse import quote from core.plugin.entities.plugin_daemon import InstallPluginMessage from core.plugin.manager.base import BasePluginManager @@ -9,9 +8,8 @@ class PluginInstallationManager(BasePluginManager): def fetch_plugin_by_identifier(self, tenant_id: str, identifier: str) -> bool: # urlencode the identifier - identifier = quote(identifier) return self._request_with_plugin_daemon_response( - "GET", f"/plugin/{tenant_id}/fetch/identifier?plugin_unique_identifier={identifier}", bool + "GET", f"plugin/{tenant_id}/fetch/identifier", bool, params={"plugin_unique_identifier": identifier} ) def install_from_pkg(self, tenant_id: str, pkg: bytes) -> Generator[InstallPluginMessage, None, None]: @@ -22,21 +20,20 @@ def install_from_pkg(self, tenant_id: str, pkg: bytes) -> Generator[InstallPlugi body = {"dify_pkg": ("dify_pkg", pkg, "application/octet-stream")} return self._request_with_plugin_daemon_response_stream( - "POST", f"/plugin/{tenant_id}/install/pkg", InstallPluginMessage, data=body + "POST", f"plugin/{tenant_id}/install/pkg", InstallPluginMessage, data=body ) def install_from_identifier(self, tenant_id: str, identifier: str) -> bool: """ Install a plugin from an identifier. """ - identifier = quote(identifier) # exception will be raised if the request failed return self._request_with_plugin_daemon_response( "POST", - f"/plugin/{tenant_id}/install/identifier", + f"plugin/{tenant_id}/install/identifier", bool, - headers={ - "Content-Type": "application/json", + params={ + "plugin_unique_identifier": identifier, }, data={ "plugin_unique_identifier": identifier, @@ -48,5 +45,5 @@ def uninstall(self, tenant_id: str, identifier: str) -> bool: Uninstall a plugin. """ return self._request_with_plugin_daemon_response( - "DELETE", f"/plugin/{tenant_id}/uninstall?plugin_unique_identifier={identifier}", bool + "DELETE", f"plugin/{tenant_id}/uninstall", bool, params={"plugin_unique_identifier": identifier} ) diff --git a/api/core/plugin/manager/tool.py b/api/core/plugin/manager/tool.py index 10ce33d5e7a243..f617400355c082 100644 --- a/api/core/plugin/manager/tool.py +++ b/api/core/plugin/manager/tool.py @@ -1,9 +1,13 @@ from core.plugin.manager.base import BasePluginManager +from core.tools.entities.tool_entities import ToolProviderEntity class PluginToolManager(BasePluginManager): - def fetch_tool_providers(self, asset_id: str) -> list[str]: + def fetch_tool_providers(self, tenant_id: str) -> list[ToolProviderEntity]: """ Fetch tool providers for the given asset. """ - response = self._request('GET', f'/plugin/asset/{asset_id}') \ No newline at end of file + response = self._request_with_plugin_daemon_response( + "GET", f"plugin/{tenant_id}/tools", list[ToolProviderEntity], params={"page": 1, "page_size": 256} + ) + return response diff --git a/api/core/tools/entities/tool_entities.py b/api/core/tools/entities/tool_entities.py index 80334a274ec441..2a85c0f882bf21 100644 --- a/api/core/tools/entities/tool_entities.py +++ b/api/core/tools/entities/tool_entities.py @@ -274,9 +274,12 @@ class ToolProviderIdentity(BaseModel): ) -class ToolProviderEntity(BaseModel): - identity: ToolProviderIdentity - credentials_schema: dict[str, ProviderConfig] = Field(default_factory=dict) +class ToolIdentity(BaseModel): + author: str = Field(..., description="The author of the tool") + name: str = Field(..., description="The name of the tool") + label: I18nObject = Field(..., description="The label of the tool") + provider: str = Field(..., description="The provider of the tool") + icon: Optional[str] = None class ToolDescription(BaseModel): @@ -284,12 +287,24 @@ class ToolDescription(BaseModel): llm: str = Field(..., description="The description presented to the LLM") -class ToolIdentity(BaseModel): - author: str = Field(..., description="The author of the tool") - name: str = Field(..., description="The name of the tool") - label: I18nObject = Field(..., description="The label of the tool") - provider: str = Field(..., description="The provider of the tool") - icon: Optional[str] = None +class ToolEntity(BaseModel): + identity: ToolIdentity + parameters: list[ToolParameter] = Field(default_factory=list) + description: Optional[ToolDescription] = None + + # pydantic configs + model_config = ConfigDict(protected_namespaces=()) + + @field_validator("parameters", mode="before") + @classmethod + def set_parameters(cls, v, validation_info: ValidationInfo) -> list[ToolParameter]: + return v or [] + + +class ToolProviderEntity(BaseModel): + identity: ToolProviderIdentity + credentials_schema: dict[str, ProviderConfig] = Field(default_factory=dict) + tools: list[ToolEntity] = Field(default_factory=list) class WorkflowToolParameterConfiguration(BaseModel): @@ -352,15 +367,4 @@ class ToolInvokeFrom(Enum): AGENT = "agent" -class ToolEntity(BaseModel): - identity: ToolIdentity - parameters: list[ToolParameter] = Field(default_factory=list) - description: Optional[ToolDescription] = None - - # pydantic configs - model_config = ConfigDict(protected_namespaces=()) - @field_validator("parameters", mode="before") - @classmethod - def set_parameters(cls, v, validation_info: ValidationInfo) -> list[ToolParameter]: - return v or [] diff --git a/api/tests/integration_tests/.env.example b/api/tests/integration_tests/.env.example index 2d52399d29a995..8bb1ab96fc0033 100644 --- a/api/tests/integration_tests/.env.example +++ b/api/tests/integration_tests/.env.example @@ -83,3 +83,8 @@ VOLC_EMBEDDING_ENDPOINT_ID= # 360 AI Credentials ZHINAO_API_KEY= + +# Plugin configuration +PLUGIN_API_KEY= +PLUGIN_API_URL= +INNER_API_KEY= \ No newline at end of file diff --git a/api/tests/integration_tests/plugin/__mock/http.py b/api/tests/integration_tests/plugin/__mock/http.py new file mode 100644 index 00000000000000..25177274c6c049 --- /dev/null +++ b/api/tests/integration_tests/plugin/__mock/http.py @@ -0,0 +1,66 @@ +import os +from typing import Literal + +import pytest +import requests +from _pytest.monkeypatch import MonkeyPatch + +from core.plugin.entities.plugin_daemon import PluginDaemonBasicResponse +from core.tools.entities.common_entities import I18nObject +from core.tools.entities.tool_entities import ToolProviderEntity, ToolProviderIdentity + + +class MockedHttp: + @classmethod + def list_tools(cls) -> list[ToolProviderEntity]: + return [ + ToolProviderEntity( + identity=ToolProviderIdentity( + author="Yeuoly", + name="Yeuoly", + description=I18nObject(en_US="Yeuoly"), + icon="ssss.svg", + label=I18nObject(en_US="Yeuoly"), + ) + ) + ] + + @classmethod + def requests_request( + cls, method: Literal["GET", "POST", "PUT", "DELETE", "PATCH", "HEAD"], url: str, **kwargs + ) -> requests.Response: + """ + Mocked requests.request + """ + request = requests.PreparedRequest() + request.method = method + request.url = url + if url.endswith("/tools"): + content = PluginDaemonBasicResponse[list[ToolProviderEntity]]( + code=0, message="success", data=cls.list_tools() + ).model_dump_json() + else: + raise ValueError("") + + response = requests.Response() + response.status_code = 200 + response.request = request + response._content = content.encode("utf-8") + return response + + +MOCK_SWITCH = os.getenv("MOCK_SWITCH", "false").lower() == "true" + + +@pytest.fixture +def setup_http_mock(request, monkeypatch: MonkeyPatch): + if MOCK_SWITCH: + monkeypatch.setattr(requests, "request", MockedHttp.requests_request) + + def unpatch(): + monkeypatch.undo() + + yield + + if MOCK_SWITCH: + unpatch() diff --git a/api/tests/integration_tests/plugin/tools/test_fetch_all_tools.py b/api/tests/integration_tests/plugin/tools/test_fetch_all_tools.py new file mode 100644 index 00000000000000..d50bba4eccc103 --- /dev/null +++ b/api/tests/integration_tests/plugin/tools/test_fetch_all_tools.py @@ -0,0 +1,9 @@ +from core.plugin.manager.tool import PluginToolManager +from tests.integration_tests.plugin.__mock.http import setup_http_mock + + +def test_fetch_all_plugin_tools(setup_http_mock): + manager = PluginToolManager() + tools = manager.fetch_tool_providers(tenant_id="test-tenant") + assert len(tools) >= 1 + diff --git a/api/tests/integration_tests/tools/test_all_provider.py b/api/tests/integration_tests/tools/test_all_provider.py deleted file mode 100644 index 2dfce749b3e16f..00000000000000 --- a/api/tests/integration_tests/tools/test_all_provider.py +++ /dev/null @@ -1,23 +0,0 @@ -import pytest - -from core.tools.tool_manager import ToolManager - -provider_generator = ToolManager.list_builtin_providers() -provider_names = [provider.identity.name for provider in provider_generator] -ToolManager.clear_builtin_providers_cache() -provider_generator = ToolManager.list_builtin_providers() - - -@pytest.mark.parametrize("name", provider_names) -def test_tool_providers(benchmark, name): - """ - Test that all tool providers can be loaded - """ - - def test(generator): - try: - return next(generator) - except StopIteration: - return None - - benchmark.pedantic(test, args=(provider_generator,), iterations=1, rounds=1) From 7a3e756020b8867f65b2a2cd711a246d015e0b2b Mon Sep 17 00:00:00 2001 From: Yeuoly Date: Mon, 23 Sep 2024 18:06:16 +0800 Subject: [PATCH 041/325] refactor: list tools --- .../console/workspace/tool_providers.py | 7 +- api/core/agent/base_agent_runner.py | 2 +- api/core/plugin/entities/plugin_daemon.py | 11 +- api/core/plugin/manager/base.py | 9 +- api/core/plugin/manager/tool.py | 58 ++++++++- api/core/tools/__base/tool.py | 6 +- api/core/tools/__base/tool_provider.py | 2 - api/core/tools/__base/tool_runtime.py | 2 +- api/core/tools/builtin_tool/provider.py | 19 +-- .../builtin_tool/providers/_positions.py | 6 +- .../tools/builtin_tool/providers/code/code.py | 2 +- .../builtin_tool/providers/qrcode/qrcode.py | 2 +- .../tools/builtin_tool/providers/time/time.py | 2 +- api/core/tools/custom_tool/provider.py | 5 +- api/core/tools/entities/api_entities.py | 10 +- api/core/tools/entities/tool_entities.py | 10 ++ .../tools/plugin_tool/plugin_tool_provider.py | 30 ----- api/core/tools/plugin_tool/provider.py | 72 +++++++++++ api/core/tools/plugin_tool/tool.py | 41 ++++++ api/core/tools/tool_manager.py | 122 +++++++++++++----- api/core/tools/workflow_as_tool/tool.py | 2 +- .../tools/api_tools_manage_service.py | 8 +- .../tools/builtin_tools_manage_service.py | 39 +++--- api/services/tools/tools_manage_service.py | 4 +- api/services/tools/tools_transform_service.py | 27 ++-- .../tools/workflow_tools_manage_service.py | 6 +- 26 files changed, 365 insertions(+), 139 deletions(-) delete mode 100644 api/core/tools/plugin_tool/plugin_tool_provider.py create mode 100644 api/core/tools/plugin_tool/provider.py create mode 100644 api/core/tools/plugin_tool/tool.py diff --git a/api/controllers/console/workspace/tool_providers.py b/api/controllers/console/workspace/tool_providers.py index d2a17b133b6d12..68ad383a744d70 100644 --- a/api/controllers/console/workspace/tool_providers.py +++ b/api/controllers/console/workspace/tool_providers.py @@ -118,7 +118,9 @@ def get(self, provider): class ToolBuiltinProviderIconApi(Resource): @setup_required def get(self, provider): - icon_bytes, mimetype = BuiltinToolManageService.get_builtin_tool_provider_icon(provider) + tenant_id = current_user.current_tenant_id + + icon_bytes, mimetype = BuiltinToolManageService.get_builtin_tool_provider_icon(provider, tenant_id) icon_cache_max_age = dify_config.TOOL_ICON_CACHE_MAX_AGE return send_file(io.BytesIO(icon_bytes), mimetype=mimetype, max_age=icon_cache_max_age) @@ -290,7 +292,8 @@ class ToolBuiltinProviderCredentialsSchemaApi(Resource): @login_required @account_initialization_required def get(self, provider): - return BuiltinToolManageService.list_builtin_provider_credentials_schema(provider) + tenant_id = current_user.current_tenant_id + return BuiltinToolManageService.list_builtin_provider_credentials_schema(provider, tenant_id) class ToolApiProviderSchemaApi(Resource): diff --git a/api/core/agent/base_agent_runner.py b/api/core/agent/base_agent_runner.py index 64075ed231f4af..aea68050bdf3e1 100644 --- a/api/core/agent/base_agent_runner.py +++ b/api/core/agent/base_agent_runner.py @@ -166,7 +166,7 @@ def _convert_tool_to_prompt_message_tool(self, tool: AgentToolEntity) -> tuple[P }, ) - parameters = tool_entity.get_all_runtime_parameters() + parameters = tool_entity.get_merged_runtime_parameters() for parameter in parameters: if parameter.form != ToolParameter.ToolParameterForm.LLM: continue diff --git a/api/core/plugin/entities/plugin_daemon.py b/api/core/plugin/entities/plugin_daemon.py index 0c710c87161c39..51cc36d7dfeda8 100644 --- a/api/core/plugin/entities/plugin_daemon.py +++ b/api/core/plugin/entities/plugin_daemon.py @@ -3,6 +3,8 @@ from pydantic import BaseModel +from core.tools.entities.tool_entities import ToolProviderEntityWithPlugin + T = TypeVar("T", bound=(BaseModel | dict | list | bool)) @@ -26,4 +28,11 @@ class Event(Enum): Error = "error" event: Event - data: str \ No newline at end of file + data: str + + +class PluginToolProviderEntity(BaseModel): + provider: str + plugin_unique_identifier: str + plugin_id: str + declaration: ToolProviderEntityWithPlugin \ No newline at end of file diff --git a/api/core/plugin/manager/base.py b/api/core/plugin/manager/base.py index 3f6a87dca8b8e0..8b80d923f64425 100644 --- a/api/core/plugin/manager/base.py +++ b/api/core/plugin/manager/base.py @@ -93,7 +93,14 @@ def _request_with_plugin_daemon_response( Make a request to the plugin daemon inner API and return the response as a model. """ response = self._request(method, path, headers, data, params) - rep = PluginDaemonBasicResponse[type](**response.json()) + json_response = response.json() + for provider in json_response.get("data", []): + declaration = provider.get("declaration", {}) or {} + provider_name = declaration.get("identity", {}).get("name") + for tool in declaration.get("tools", []): + tool["identity"]["provider"] = provider_name + + rep = PluginDaemonBasicResponse[type](**json_response) if rep.code != 0: raise ValueError(f"got error from plugin daemon: {rep.message}, code: {rep.code}") if rep.data is None: diff --git a/api/core/plugin/manager/tool.py b/api/core/plugin/manager/tool.py index f617400355c082..fe5f7bb757d84e 100644 --- a/api/core/plugin/manager/tool.py +++ b/api/core/plugin/manager/tool.py @@ -1,13 +1,65 @@ +from collections.abc import Generator +from typing import Any + +from core.plugin.entities.plugin_daemon import PluginToolProviderEntity from core.plugin.manager.base import BasePluginManager -from core.tools.entities.tool_entities import ToolProviderEntity +from core.tools.entities.tool_entities import ToolInvokeMessage class PluginToolManager(BasePluginManager): - def fetch_tool_providers(self, tenant_id: str) -> list[ToolProviderEntity]: + def fetch_tool_providers(self, tenant_id: str) -> list[PluginToolProviderEntity]: """ Fetch tool providers for the given asset. """ response = self._request_with_plugin_daemon_response( - "GET", f"plugin/{tenant_id}/tools", list[ToolProviderEntity], params={"page": 1, "page_size": 256} + "GET", f"plugin/{tenant_id}/tools", list[PluginToolProviderEntity], params={"page": 1, "page_size": 256} + ) + return response + + def invoke( + self, + tenant_id: str, + user_id: str, + plugin_unique_identifier: str, + tool_provider: str, + tool_name: str, + credentials: dict[str, Any], + tool_parameters: dict[str, Any], + ) -> Generator[ToolInvokeMessage, None, None]: + response = self._request_with_plugin_daemon_response_stream( + "POST", + f"plugin/{tenant_id}/tool/invoke", + ToolInvokeMessage, + data={ + "plugin_unique_identifier": plugin_unique_identifier, + "user_id": user_id, + "data": { + "provider": tool_provider, + "tool": tool_name, + "credentials": credentials, + "tool_parameters": tool_parameters, + }, + }, + ) + return response + + def validate_provider_credentials( + self, tenant_id: str, user_id: str, plugin_unique_identifier: str, provider: str, credentials: dict[str, Any] + ) -> bool: + """ + validate the credentials of the provider + """ + response = self._request_with_plugin_daemon_response( + "POST", + f"plugin/{tenant_id}/tool/validate_credentials", + bool, + data={ + "plugin_unique_identifier": plugin_unique_identifier, + "user_id": user_id, + "data": { + "provider": provider, + "credentials": credentials, + }, + }, ) return response diff --git a/api/core/tools/__base/tool.py b/api/core/tools/__base/tool.py index 548db51a2aedfb..d0bf2f0c31cb02 100644 --- a/api/core/tools/__base/tool.py +++ b/api/core/tools/__base/tool.py @@ -105,11 +105,11 @@ def get_runtime_parameters(self) -> list[ToolParameter]: """ return self.entity.parameters - def get_all_runtime_parameters(self) -> list[ToolParameter]: + def get_merged_runtime_parameters(self) -> list[ToolParameter]: """ - get all runtime parameters + get merged runtime parameters - :return: all runtime parameters + :return: merged runtime parameters """ parameters = self.entity.parameters parameters = parameters.copy() diff --git a/api/core/tools/__base/tool_provider.py b/api/core/tools/__base/tool_provider.py index c71885e48de191..795812a1096a7a 100644 --- a/api/core/tools/__base/tool_provider.py +++ b/api/core/tools/__base/tool_provider.py @@ -12,11 +12,9 @@ class ToolProviderController(ABC): entity: ToolProviderEntity - tools: list[Tool] def __init__(self, entity: ToolProviderEntity) -> None: self.entity = entity - self.tools = [] def get_credentials_schema(self) -> dict[str, ProviderConfig]: """ diff --git a/api/core/tools/__base/tool_runtime.py b/api/core/tools/__base/tool_runtime.py index d4b2ef61049d92..c9e157cb77a26d 100644 --- a/api/core/tools/__base/tool_runtime.py +++ b/api/core/tools/__base/tool_runtime.py @@ -16,7 +16,7 @@ class ToolRuntime(BaseModel): tool_id: Optional[str] = None invoke_from: Optional[InvokeFrom] = None tool_invoke_from: Optional[ToolInvokeFrom] = None - credentials: Optional[dict[str, Any]] = None + credentials: dict[str, Any] = Field(default_factory=dict) runtime_parameters: dict[str, Any] = Field(default_factory=dict) diff --git a/api/core/tools/builtin_tool/provider.py b/api/core/tools/builtin_tool/provider.py index 4ebd82f8e778a8..e7e374f2e66d4d 100644 --- a/api/core/tools/builtin_tool/provider.py +++ b/api/core/tools/builtin_tool/provider.py @@ -19,9 +19,7 @@ class BuiltinToolProviderController(ToolProviderController): tools: list[BuiltinTool] def __init__(self, **data: Any) -> None: - if self.provider_type == ToolProviderType.API: - super().__init__(**data) - return + self.tools = [] # load provider yaml provider = self.__class__.__module__.split(".")[-1] @@ -76,9 +74,12 @@ def _get_builtin_tools(self) -> list[BuiltinTool]: parent_type=BuiltinTool, ) tool["identity"]["provider"] = provider - tools.append(assistant_tool_class( - entity=ToolEntity(**tool), runtime=ToolRuntime(tenant_id=""), - )) + tools.append( + assistant_tool_class( + entity=ToolEntity(**tool), + runtime=ToolRuntime(tenant_id=""), + ) + ) self.tools = tools return tools @@ -142,7 +143,7 @@ def _get_tool_labels(self) -> list[ToolLabelEnum]: """ return self.entity.identity.tags or [] - def validate_credentials(self, credentials: dict[str, Any]) -> None: + def validate_credentials(self, user_id: str, credentials: dict[str, Any]) -> None: """ validate the credentials of the provider @@ -153,10 +154,10 @@ def validate_credentials(self, credentials: dict[str, Any]) -> None: self.validate_credentials_format(credentials) # validate credentials - self._validate_credentials(credentials) + self._validate_credentials(user_id, credentials) @abstractmethod - def _validate_credentials(self, credentials: dict[str, Any]) -> None: + def _validate_credentials(self, user_id: str, credentials: dict[str, Any]) -> None: """ validate the credentials of the provider diff --git a/api/core/tools/builtin_tool/providers/_positions.py b/api/core/tools/builtin_tool/providers/_positions.py index 5c10f72fdaed01..224b695ff979da 100644 --- a/api/core/tools/builtin_tool/providers/_positions.py +++ b/api/core/tools/builtin_tool/providers/_positions.py @@ -1,18 +1,18 @@ import os.path from core.helper.position_helper import get_tool_position_map, sort_by_position_map -from core.tools.entities.api_entities import UserToolProvider +from core.tools.entities.api_entities import ToolProviderApiEntity class BuiltinToolProviderSort: _position = {} @classmethod - def sort(cls, providers: list[UserToolProvider]) -> list[UserToolProvider]: + def sort(cls, providers: list[ToolProviderApiEntity]) -> list[ToolProviderApiEntity]: if not cls._position: cls._position = get_tool_position_map(os.path.join(os.path.dirname(__file__), "..")) - def name_func(provider: UserToolProvider) -> str: + def name_func(provider: ToolProviderApiEntity) -> str: return provider.name sorted_providers = sort_by_position_map(cls._position, providers, name_func) diff --git a/api/core/tools/builtin_tool/providers/code/code.py b/api/core/tools/builtin_tool/providers/code/code.py index 53210e9c439cb6..18b7cd4c9010e8 100644 --- a/api/core/tools/builtin_tool/providers/code/code.py +++ b/api/core/tools/builtin_tool/providers/code/code.py @@ -4,5 +4,5 @@ class CodeToolProvider(BuiltinToolProviderController): - def _validate_credentials(self, credentials: dict[str, Any]) -> None: + def _validate_credentials(self, user_id: str, credentials: dict[str, Any]) -> None: pass diff --git a/api/core/tools/builtin_tool/providers/qrcode/qrcode.py b/api/core/tools/builtin_tool/providers/qrcode/qrcode.py index e792382ee3f451..3999f3b3ef1ff6 100644 --- a/api/core/tools/builtin_tool/providers/qrcode/qrcode.py +++ b/api/core/tools/builtin_tool/providers/qrcode/qrcode.py @@ -4,5 +4,5 @@ class QRCodeProvider(BuiltinToolProviderController): - def _validate_credentials(self, credentials: dict[str, Any]) -> None: + def _validate_credentials(self, user_id: str, credentials: dict[str, Any]) -> None: pass diff --git a/api/core/tools/builtin_tool/providers/time/time.py b/api/core/tools/builtin_tool/providers/time/time.py index d70fc22dfcae51..323a7c41b8f7a1 100644 --- a/api/core/tools/builtin_tool/providers/time/time.py +++ b/api/core/tools/builtin_tool/providers/time/time.py @@ -4,5 +4,5 @@ class WikiPediaProvider(BuiltinToolProviderController): - def _validate_credentials(self, credentials: dict[str, Any]) -> None: + def _validate_credentials(self, user_id: str, credentials: dict[str, Any]) -> None: pass diff --git a/api/core/tools/custom_tool/provider.py b/api/core/tools/custom_tool/provider.py index 32eda1d9bcda8a..c5e3e8488e21e0 100644 --- a/api/core/tools/custom_tool/provider.py +++ b/api/core/tools/custom_tool/provider.py @@ -24,9 +24,10 @@ def __init__(self, entity: ToolProviderEntity, provider_id: str, tenant_id: str) super().__init__(entity) self.provider_id = provider_id self.tenant_id = tenant_id + self.tools = [] - @staticmethod - def from_db(db_provider: ApiToolProvider, auth_type: ApiProviderAuthType) -> "ApiToolProviderController": + @classmethod + def from_db(cls, db_provider: ApiToolProvider, auth_type: ApiProviderAuthType): credentials_schema = { "auth_type": ProviderConfig( name="auth_type", diff --git a/api/core/tools/entities/api_entities.py b/api/core/tools/entities/api_entities.py index b3a98b4a6dcf79..18db659bb9f830 100644 --- a/api/core/tools/entities/api_entities.py +++ b/api/core/tools/entities/api_entities.py @@ -9,7 +9,7 @@ from core.tools.entities.tool_entities import ToolProviderType -class UserTool(BaseModel): +class ToolApiEntity(BaseModel): author: str name: str # identifier label: I18nObject # label @@ -18,10 +18,10 @@ class UserTool(BaseModel): labels: list[str] = Field(default_factory=list) -UserToolProviderTypeLiteral = Optional[Literal["builtin", "api", "workflow"]] +ToolProviderTypeApiLiteral = Optional[Literal["builtin", "api", "workflow"]] -class UserToolProvider(BaseModel): +class ToolProviderApiEntity(BaseModel): id: str author: str name: str # identifier @@ -33,7 +33,7 @@ class UserToolProvider(BaseModel): original_credentials: Optional[dict] = None is_team_authorization: bool = False allow_delete: bool = True - tools: list[UserTool] = Field(default_factory=list) + tools: list[ToolApiEntity] = Field(default_factory=list) labels: list[str] = Field(default_factory=list) def to_dict(self) -> dict: @@ -63,5 +63,5 @@ def to_dict(self) -> dict: } -class UserToolProviderCredentials(BaseModel): +class ToolProviderCredentialsApiEntity(BaseModel): credentials: dict[str, ProviderConfig] diff --git a/api/core/tools/entities/tool_entities.py b/api/core/tools/entities/tool_entities.py index 2a85c0f882bf21..07ea2d2b11f1fe 100644 --- a/api/core/tools/entities/tool_entities.py +++ b/api/core/tools/entities/tool_entities.py @@ -224,6 +224,13 @@ class ToolParameterForm(Enum): max: Optional[Union[float, int]] = None options: list[ToolParameterOption] = Field(default_factory=list) + @field_validator("options", mode="before") + @classmethod + def transform_options(cls, v): + if not isinstance(v, list): + return [] + return v + @classmethod def get_simple_instance( cls, @@ -304,6 +311,9 @@ def set_parameters(cls, v, validation_info: ValidationInfo) -> list[ToolParamete class ToolProviderEntity(BaseModel): identity: ToolProviderIdentity credentials_schema: dict[str, ProviderConfig] = Field(default_factory=dict) + + +class ToolProviderEntityWithPlugin(ToolProviderEntity): tools: list[ToolEntity] = Field(default_factory=list) diff --git a/api/core/tools/plugin_tool/plugin_tool_provider.py b/api/core/tools/plugin_tool/plugin_tool_provider.py deleted file mode 100644 index 47a78ee3183fda..00000000000000 --- a/api/core/tools/plugin_tool/plugin_tool_provider.py +++ /dev/null @@ -1,30 +0,0 @@ - - -from core.entities.provider_entities import ProviderConfig -from core.tools.__base.tool import Tool -from core.tools.__base.tool_provider import ToolProviderController -from core.tools.entities.tool_entities import ToolProviderType - - -class PluginToolProvider(ToolProviderController): - @property - def provider_type(self) -> ToolProviderType: - """ - returns the type of the provider - - :return: type of the provider - """ - return ToolProviderType.PLUGIN - - def get_tool(self, tool_name: str) -> Tool: - """ - return tool with given name - """ - return super().get_tool(tool_name) - - def get_credentials_schema(self) -> dict[str, ProviderConfig]: - """ - get credentials schema - """ - return super().get_credentials_schema() - \ No newline at end of file diff --git a/api/core/tools/plugin_tool/provider.py b/api/core/tools/plugin_tool/provider.py new file mode 100644 index 00000000000000..a52e7c967f4be0 --- /dev/null +++ b/api/core/tools/plugin_tool/provider.py @@ -0,0 +1,72 @@ +from typing import Any + +from core.plugin.manager.tool import PluginToolManager +from core.tools.__base.tool_runtime import ToolRuntime +from core.tools.builtin_tool.provider import BuiltinToolProviderController +from core.tools.entities.tool_entities import ToolProviderEntityWithPlugin, ToolProviderType +from core.tools.errors import ToolProviderCredentialValidationError +from core.tools.plugin_tool.tool import PluginTool + + +class PluginToolProviderController(BuiltinToolProviderController): + entity: ToolProviderEntityWithPlugin + tenant_id: str + plugin_unique_identifier: str + + def __init__(self, entity: ToolProviderEntityWithPlugin, tenant_id: str, plugin_unique_identifier: str) -> None: + self.entity = entity + self.tenant_id = tenant_id + self.plugin_unique_identifier = plugin_unique_identifier + + @property + def provider_type(self) -> ToolProviderType: + """ + returns the type of the provider + + :return: type of the provider + """ + return ToolProviderType.PLUGIN + + def _validate_credentials(self, user_id: str, credentials: dict[str, Any]) -> None: + """ + validate the credentials of the provider + """ + manager = PluginToolManager() + if not manager.validate_provider_credentials( + tenant_id=self.tenant_id, + user_id=user_id, + plugin_unique_identifier=self.plugin_unique_identifier, + provider=self.entity.identity.name, + credentials=credentials, + ): + raise ToolProviderCredentialValidationError("Invalid credentials") + + def get_tool(self, tool_name: str) -> PluginTool: + """ + return tool with given name + """ + tool_entity = next(tool_entity for tool_entity in self.entity.tools if tool_entity.identity.name == tool_name) + + if not tool_entity: + raise ValueError(f"Tool with name {tool_name} not found") + + return PluginTool( + entity=tool_entity, + runtime=ToolRuntime(tenant_id=self.tenant_id), + tenant_id=self.tenant_id, + plugin_unique_identifier=self.plugin_unique_identifier, + ) + + def get_tools(self) -> list[PluginTool]: + """ + get all tools + """ + return [ + PluginTool( + entity=tool_entity, + runtime=ToolRuntime(tenant_id=self.tenant_id), + tenant_id=self.tenant_id, + plugin_unique_identifier=self.plugin_unique_identifier, + ) + for tool_entity in self.entity.tools + ] diff --git a/api/core/tools/plugin_tool/tool.py b/api/core/tools/plugin_tool/tool.py new file mode 100644 index 00000000000000..898e8a552d0f6d --- /dev/null +++ b/api/core/tools/plugin_tool/tool.py @@ -0,0 +1,41 @@ +from collections.abc import Generator +from typing import Any + +from core.plugin.manager.tool import PluginToolManager +from core.tools.__base.tool import Tool +from core.tools.__base.tool_runtime import ToolRuntime +from core.tools.entities.tool_entities import ToolEntity, ToolInvokeMessage, ToolProviderType + + +class PluginTool(Tool): + tenant_id: str + plugin_unique_identifier: str + + def __init__(self, entity: ToolEntity, runtime: ToolRuntime, tenant_id: str, plugin_unique_identifier: str) -> None: + super().__init__(entity, runtime) + self.tenant_id = tenant_id + self.plugin_unique_identifier = plugin_unique_identifier + + @property + def tool_provider_type(self) -> ToolProviderType: + return ToolProviderType.PLUGIN + + def _invoke(self, user_id: str, tool_parameters: dict[str, Any]) -> Generator[ToolInvokeMessage, None, None]: + manager = PluginToolManager() + return manager.invoke( + tenant_id=self.tenant_id, + user_id=user_id, + plugin_unique_identifier=self.plugin_unique_identifier, + tool_provider=self.entity.identity.provider, + tool_name=self.entity.identity.name, + credentials=self.runtime.credentials, + tool_parameters=tool_parameters, + ) + + def fork_tool_runtime(self, runtime: ToolRuntime) -> "PluginTool": + return PluginTool( + entity=self.entity, + runtime=runtime, + tenant_id=self.tenant_id, + plugin_unique_identifier=self.plugin_unique_identifier, + ) \ No newline at end of file diff --git a/api/core/tools/tool_manager.py b/api/core/tools/tool_manager.py index c37ee730c832e9..76c7232e01129d 100644 --- a/api/core/tools/tool_manager.py +++ b/api/core/tools/tool_manager.py @@ -6,7 +6,10 @@ from threading import Lock from typing import TYPE_CHECKING, Any, Union, cast +from core.plugin.manager.tool import PluginToolManager from core.tools.__base.tool_runtime import ToolRuntime +from core.tools.plugin_tool.provider import PluginToolProviderController +from core.tools.plugin_tool.tool import PluginTool if TYPE_CHECKING: from core.workflow.nodes.tool.entities import ToolEntity @@ -24,7 +27,7 @@ from core.tools.builtin_tool.tool import BuiltinTool from core.tools.custom_tool.provider import ApiToolProviderController from core.tools.custom_tool.tool import ApiTool -from core.tools.entities.api_entities import UserToolProvider, UserToolProviderTypeLiteral +from core.tools.entities.api_entities import ToolProviderApiEntity, ToolProviderTypeApiLiteral from core.tools.entities.common_entities import I18nObject from core.tools.entities.tool_entities import ApiProviderAuthType, ToolInvokeFrom, ToolParameter, ToolProviderType from core.tools.errors import ToolProviderNotFoundError @@ -41,38 +44,61 @@ class ToolManager: _builtin_provider_lock = Lock() - _builtin_providers = {} + _hardcoded_providers = {} _builtin_providers_loaded = False _builtin_tools_labels = {} @classmethod - def get_builtin_provider(cls, provider: str) -> BuiltinToolProviderController: + def get_builtin_provider( + cls, provider: str, tenant_id: str + ) -> BuiltinToolProviderController | PluginToolProviderController: """ get the builtin provider :param provider: the name of the provider + :param tenant_id: the id of the tenant :return: the provider """ - if len(cls._builtin_providers) == 0: + if len(cls._hardcoded_providers) == 0: # init the builtin providers - cls.load_builtin_providers_cache() + cls.load_hardcoded_providers_cache() - if provider not in cls._builtin_providers: - raise ToolProviderNotFoundError(f"builtin provider {provider} not found") + if provider not in cls._hardcoded_providers: + # get plugin provider + plugin_provider = cls.get_plugin_provider(provider, tenant_id) + if plugin_provider: + return plugin_provider - return cls._builtin_providers[provider] + return cls._hardcoded_providers[provider] @classmethod - def get_builtin_tool(cls, provider: str, tool_name: str) -> BuiltinTool | None: + def get_plugin_provider(cls, provider: str, tenant_id: str) -> PluginToolProviderController: + """ + get the plugin provider + """ + manager = PluginToolManager() + providers = manager.fetch_tool_providers(tenant_id) + provider_entity = next((x for x in providers if x.declaration.identity.name == provider), None) + if not provider_entity: + raise ToolProviderNotFoundError(f"plugin provider {provider} not found") + + return PluginToolProviderController( + entity=provider_entity.declaration, + tenant_id=tenant_id, + plugin_unique_identifier=provider_entity.plugin_unique_identifier, + ) + + @classmethod + def get_builtin_tool(cls, provider: str, tool_name: str, tenant_id: str) -> BuiltinTool | PluginTool | None: """ get the builtin tool :param provider: the name of the provider :param tool_name: the name of the tool - + :param tenant_id: the id of the tenant :return: the provider, the tool """ - provider_controller = cls.get_builtin_provider(provider) + provider_controller = cls.get_builtin_provider(provider, tenant_id) tool = provider_controller.get_tool(tool_name) return tool @@ -97,12 +123,12 @@ def get_tool_runtime( :return: the tool """ if provider_type == ToolProviderType.BUILT_IN: - builtin_tool = cls.get_builtin_tool(provider_id, tool_name) + builtin_tool = cls.get_builtin_tool(provider_id, tool_name, tenant_id) if not builtin_tool: raise ValueError(f"tool {tool_name} not found") # check if the builtin tool need credentials - provider_controller = cls.get_builtin_provider(provider_id) + provider_controller = cls.get_builtin_provider(provider_id, tenant_id) if not provider_controller.need_credentials: return cast( BuiltinTool, @@ -131,7 +157,7 @@ def get_tool_runtime( # decrypt the credentials credentials = builtin_provider.credentials - controller = cls.get_builtin_provider(provider_id) + controller = cls.get_builtin_provider(provider_id, tenant_id) tool_configuration = ProviderConfigEncrypter( tenant_id=tenant_id, config=controller.get_credentials_schema(), @@ -246,7 +272,7 @@ def get_agent_tool_runtime( tool_invoke_from=ToolInvokeFrom.AGENT, ) runtime_parameters = {} - parameters = tool_entity.get_all_runtime_parameters() + parameters = tool_entity.get_merged_runtime_parameters() for parameter in parameters: # check file types if parameter.type == ToolParameter.ToolParameterType.FILE: @@ -294,7 +320,7 @@ def get_workflow_tool_runtime( tool_invoke_from=ToolInvokeFrom.WORKFLOW, ) runtime_parameters = {} - parameters = tool_entity.get_all_runtime_parameters() + parameters = tool_entity.get_merged_runtime_parameters() for parameter in parameters: # save tool parameter to tool entity memory @@ -321,16 +347,17 @@ def get_workflow_tool_runtime( return tool_entity @classmethod - def get_builtin_provider_icon(cls, provider: str) -> tuple[str, str]: + def get_builtin_provider_icon(cls, provider: str, tenant_id: str) -> tuple[str, str]: """ get the absolute path of the icon of the builtin provider :param provider: the name of the provider + :param tenant_id: the id of the tenant :return: the absolute path of the icon, the mime type of the icon """ # get provider - provider_controller = cls.get_builtin_provider(provider) + provider_controller = cls.get_builtin_provider(provider, tenant_id) absolute_path = path.join( path.dirname(path.realpath(__file__)), @@ -351,21 +378,48 @@ def get_builtin_provider_icon(cls, provider: str) -> tuple[str, str]: return absolute_path, mime_type @classmethod - def list_builtin_providers(cls) -> Generator[BuiltinToolProviderController, None, None]: + def list_hardcoded_providers(cls): # use cache first if cls._builtin_providers_loaded: - yield from list(cls._builtin_providers.values()) + yield from list(cls._hardcoded_providers.values()) return with cls._builtin_provider_lock: if cls._builtin_providers_loaded: - yield from list(cls._builtin_providers.values()) + yield from list(cls._hardcoded_providers.values()) return - yield from cls._list_builtin_providers() + yield from cls._list_hardcoded_providers() + + @classmethod + def list_plugin_providers(cls, tenant_id: str) -> list[PluginToolProviderController]: + """ + list all the plugin providers + """ + manager = PluginToolManager() + provider_entities = manager.fetch_tool_providers(tenant_id) + return [ + PluginToolProviderController( + entity=provider.declaration, + tenant_id=tenant_id, + plugin_unique_identifier=provider.plugin_unique_identifier, + ) + for provider in provider_entities + ] + + @classmethod + def list_builtin_providers( + cls, tenant_id: str + ) -> Generator[BuiltinToolProviderController | PluginToolProviderController, None, None]: + """ + list all the builtin providers + """ + yield from cls.list_hardcoded_providers() + # get plugin providers + yield from cls.list_plugin_providers(tenant_id) @classmethod - def _list_builtin_providers(cls) -> Generator[BuiltinToolProviderController, None, None]: + def _list_hardcoded_providers(cls) -> Generator[BuiltinToolProviderController, None, None]: """ list all the builtin providers """ @@ -391,7 +445,7 @@ def _list_builtin_providers(cls) -> Generator[BuiltinToolProviderController, Non parent_type=BuiltinToolProviderController, ) provider: BuiltinToolProviderController = provider_class() - cls._builtin_providers[provider.entity.identity.name] = provider + cls._hardcoded_providers[provider.entity.identity.name] = provider for tool in provider.get_tools(): cls._builtin_tools_labels[tool.entity.identity.name] = tool.entity.identity.label yield provider @@ -403,13 +457,13 @@ def _list_builtin_providers(cls) -> Generator[BuiltinToolProviderController, Non cls._builtin_providers_loaded = True @classmethod - def load_builtin_providers_cache(cls): - for _ in cls.list_builtin_providers(): + def load_hardcoded_providers_cache(cls): + for _ in cls.list_hardcoded_providers(): pass @classmethod - def clear_builtin_providers_cache(cls): - cls._builtin_providers = {} + def clear_hardcoded_providers_cache(cls): + cls._hardcoded_providers = {} cls._builtin_providers_loaded = False @classmethod @@ -423,7 +477,7 @@ def get_tool_label(cls, tool_name: str) -> Union[I18nObject, None]: """ if len(cls._builtin_tools_labels) == 0: # init the builtin providers - cls.load_builtin_providers_cache() + cls.load_hardcoded_providers_cache() if tool_name not in cls._builtin_tools_labels: return None @@ -432,9 +486,9 @@ def get_tool_label(cls, tool_name: str) -> Union[I18nObject, None]: @classmethod def user_list_providers( - cls, user_id: str, tenant_id: str, typ: UserToolProviderTypeLiteral - ) -> list[UserToolProvider]: - result_providers: dict[str, UserToolProvider] = {} + cls, user_id: str, tenant_id: str, typ: ToolProviderTypeApiLiteral + ) -> list[ToolProviderApiEntity]: + result_providers: dict[str, ToolProviderApiEntity] = {} filters = [] if not typ: @@ -444,7 +498,7 @@ def user_list_providers( if "builtin" in filters: # get builtin providers - builtin_providers = cls.list_builtin_providers() + builtin_providers = cls.list_builtin_providers(tenant_id) # get db builtin providers db_builtin_providers: list[BuiltinToolProvider] = ( @@ -666,4 +720,4 @@ def get_tool_icon(cls, tenant_id: str, provider_type: ToolProviderType, provider raise ValueError(f"provider type {provider_type} not found") -ToolManager.load_builtin_providers_cache() +ToolManager.load_hardcoded_providers_cache() diff --git a/api/core/tools/workflow_as_tool/tool.py b/api/core/tools/workflow_as_tool/tool.py index e1fc5140d0d72a..d8cddd02d8c060 100644 --- a/api/core/tools/workflow_as_tool/tool.py +++ b/api/core/tools/workflow_as_tool/tool.py @@ -167,7 +167,7 @@ def _transform_args(self, tool_parameters: dict) -> tuple[dict, list[dict]]: :param tool_parameters: the tool parameters :return: tool_parameters, files """ - parameter_rules = self.get_all_runtime_parameters() + parameter_rules = self.get_merged_runtime_parameters() parameters_result = {} files = [] for parameter in parameter_rules: diff --git a/api/services/tools/api_tools_manage_service.py b/api/services/tools/api_tools_manage_service.py index e39c2b8a5bc83e..58f0e7bbf52098 100644 --- a/api/services/tools/api_tools_manage_service.py +++ b/api/services/tools/api_tools_manage_service.py @@ -7,7 +7,7 @@ from core.model_runtime.utils.encoders import jsonable_encoder from core.tools.__base.tool_runtime import ToolRuntime from core.tools.custom_tool.provider import ApiToolProviderController -from core.tools.entities.api_entities import UserTool, UserToolProvider +from core.tools.entities.api_entities import ToolApiEntity, ToolProviderApiEntity from core.tools.entities.common_entities import I18nObject from core.tools.entities.tool_bundle import ApiToolBundle from core.tools.entities.tool_entities import ( @@ -201,7 +201,7 @@ def get_api_tool_provider_remote_schema(user_id: str, tenant_id: str, url: str): return {"schema": schema} @staticmethod - def list_api_tool_provider_tools(user_id: str, tenant_id: str, provider_name: str) -> list[UserTool]: + def list_api_tool_provider_tools(user_id: str, tenant_id: str, provider_name: str) -> list[ToolApiEntity]: """ list api tool provider tools """ @@ -438,7 +438,7 @@ def test_api_tool_preview( return {"result": result or "empty response"} @staticmethod - def list_api_tools(user_id: str, tenant_id: str) -> list[UserToolProvider]: + def list_api_tools(user_id: str, tenant_id: str) -> list[ToolProviderApiEntity]: """ list api tools """ @@ -447,7 +447,7 @@ def list_api_tools(user_id: str, tenant_id: str) -> list[UserToolProvider]: db.session.query(ApiToolProvider).filter(ApiToolProvider.tenant_id == tenant_id).all() or [] ) - result: list[UserToolProvider] = [] + result: list[ToolProviderApiEntity] = [] for provider in db_providers: # convert provider controller to user provider diff --git a/api/services/tools/builtin_tools_manage_service.py b/api/services/tools/builtin_tools_manage_service.py index 83b363bb580588..c3d778558bedaf 100644 --- a/api/services/tools/builtin_tools_manage_service.py +++ b/api/services/tools/builtin_tools_manage_service.py @@ -5,9 +5,8 @@ from configs import dify_config from core.helper.position_helper import is_filtered from core.model_runtime.utils.encoders import jsonable_encoder -from core.tools.__base.tool_provider import ToolProviderController from core.tools.builtin_tool.providers._positions import BuiltinToolProviderSort -from core.tools.entities.api_entities import UserTool, UserToolProvider +from core.tools.entities.api_entities import ToolApiEntity, ToolProviderApiEntity from core.tools.errors import ToolNotFoundError, ToolProviderCredentialValidationError, ToolProviderNotFoundError from core.tools.tool_label_manager import ToolLabelManager from core.tools.tool_manager import ToolManager @@ -21,11 +20,17 @@ class BuiltinToolManageService: @staticmethod - def list_builtin_tool_provider_tools(user_id: str, tenant_id: str, provider: str) -> list[UserTool]: + def list_builtin_tool_provider_tools(user_id: str, tenant_id: str, provider: str) -> list[ToolApiEntity]: """ list builtin tool provider tools + + :param user_id: the id of the user + :param tenant_id: the id of the tenant + :param provider: the name of the provider + + :return: the list of tools """ - provider_controller: ToolProviderController = ToolManager.get_builtin_provider(provider) + provider_controller = ToolManager.get_builtin_provider(provider, tenant_id) tools = provider_controller.get_tools() tool_provider_configurations = ProviderConfigEncrypter( @@ -64,14 +69,16 @@ def list_builtin_tool_provider_tools(user_id: str, tenant_id: str, provider: str return result @staticmethod - def list_builtin_provider_credentials_schema(provider_name): + def list_builtin_provider_credentials_schema(provider_name: str, tenant_id: str): """ list builtin provider credentials schema + :param provider_name: the name of the provider + :param tenant_id: the id of the tenant :return: the list of tool providers """ - provider = ToolManager.get_builtin_provider(provider_name) - return jsonable_encoder([v for _, v in (provider.entity.credentials_schema or {}).items()]) + provider = ToolManager.get_builtin_provider(provider_name, tenant_id) + return jsonable_encoder([v for _, v in (provider.get_credentials_schema() or {}).items()]) @staticmethod def update_builtin_tool_provider(user_id: str, tenant_id: str, provider_name: str, credentials: dict): @@ -90,7 +97,7 @@ def update_builtin_tool_provider(user_id: str, tenant_id: str, provider_name: st try: # get provider - provider_controller = ToolManager.get_builtin_provider(provider_name) + provider_controller = ToolManager.get_builtin_provider(provider_name, tenant_id) if not provider_controller.need_credentials: raise ValueError(f"provider {provider_name} does not need credentials") tool_configuration = ProviderConfigEncrypter( @@ -109,7 +116,7 @@ def update_builtin_tool_provider(user_id: str, tenant_id: str, provider_name: st if name in masked_credentials and value == masked_credentials[name]: credentials[name] = original_credentials[name] # validate credentials - provider_controller.validate_credentials(credentials) + provider_controller.validate_credentials(user_id, credentials) # encrypt credentials credentials = tool_configuration.encrypt(credentials) except (ToolProviderNotFoundError, ToolNotFoundError, ToolProviderCredentialValidationError) as e: @@ -154,7 +161,7 @@ def get_builtin_tool_provider_credentials(user_id: str, tenant_id: str, provider if provider_obj is None: return {} - provider_controller = ToolManager.get_builtin_provider(provider_obj.provider) + provider_controller = ToolManager.get_builtin_provider(provider_obj.provider, tenant_id) tool_configuration = ProviderConfigEncrypter( tenant_id=tenant_id, config=provider_controller.get_credentials_schema(), @@ -186,7 +193,7 @@ def delete_builtin_tool_provider(user_id: str, tenant_id: str, provider_name: st db.session.commit() # delete cache - provider_controller = ToolManager.get_builtin_provider(provider_name) + provider_controller = ToolManager.get_builtin_provider(provider_name, tenant_id) tool_configuration = ProviderConfigEncrypter( tenant_id=tenant_id, config=provider_controller.get_credentials_schema(), @@ -198,22 +205,22 @@ def delete_builtin_tool_provider(user_id: str, tenant_id: str, provider_name: st return {"result": "success"} @staticmethod - def get_builtin_tool_provider_icon(provider: str): + def get_builtin_tool_provider_icon(provider: str, tenant_id: str): """ get tool provider icon and it's mimetype """ - icon_path, mime_type = ToolManager.get_builtin_provider_icon(provider) + icon_path, mime_type = ToolManager.get_builtin_provider_icon(provider, tenant_id) icon_bytes = Path(icon_path).read_bytes() return icon_bytes, mime_type @staticmethod - def list_builtin_tools(user_id: str, tenant_id: str) -> list[UserToolProvider]: + def list_builtin_tools(user_id: str, tenant_id: str) -> list[ToolProviderApiEntity]: """ list builtin tools """ # get all builtin providers - provider_controllers = ToolManager.list_builtin_providers() + provider_controllers = ToolManager.list_builtin_providers(tenant_id) # get all user added providers db_providers: list[BuiltinToolProvider] = ( @@ -225,7 +232,7 @@ def list_builtin_tools(user_id: str, tenant_id: str) -> list[UserToolProvider]: filter(lambda db_provider: db_provider.provider == provider, db_providers), None ) - result: list[UserToolProvider] = [] + result: list[ToolProviderApiEntity] = [] for provider_controller in provider_controllers: try: diff --git a/api/services/tools/tools_manage_service.py b/api/services/tools/tools_manage_service.py index 1c67f7648ca99f..184596fc23c95b 100644 --- a/api/services/tools/tools_manage_service.py +++ b/api/services/tools/tools_manage_service.py @@ -1,6 +1,6 @@ import logging -from core.tools.entities.api_entities import UserToolProviderTypeLiteral +from core.tools.entities.api_entities import ToolProviderTypeApiLiteral from core.tools.tool_manager import ToolManager from services.tools.tools_transform_service import ToolTransformService @@ -9,7 +9,7 @@ class ToolCommonService: @staticmethod - def list_tool_providers(user_id: str, tenant_id: str, typ: UserToolProviderTypeLiteral = None): + def list_tool_providers(user_id: str, tenant_id: str, typ: ToolProviderTypeApiLiteral = None): """ list tool providers diff --git a/api/services/tools/tools_transform_service.py b/api/services/tools/tools_transform_service.py index d4f132e902d08e..4c2876dca34a06 100644 --- a/api/services/tools/tools_transform_service.py +++ b/api/services/tools/tools_transform_service.py @@ -7,7 +7,7 @@ from core.tools.__base.tool_runtime import ToolRuntime from core.tools.builtin_tool.provider import BuiltinToolProviderController from core.tools.custom_tool.provider import ApiToolProviderController -from core.tools.entities.api_entities import UserTool, UserToolProvider +from core.tools.entities.api_entities import ToolApiEntity, ToolProviderApiEntity from core.tools.entities.common_entities import I18nObject from core.tools.entities.tool_bundle import ApiToolBundle from core.tools.entities.tool_entities import ( @@ -15,6 +15,7 @@ ToolParameter, ToolProviderType, ) +from core.tools.plugin_tool.provider import PluginToolProviderController from core.tools.utils.configuration import ProviderConfigEncrypter from core.tools.workflow_as_tool.provider import WorkflowToolProviderController from core.tools.workflow_as_tool.tool import WorkflowTool @@ -44,7 +45,7 @@ def get_tool_provider_icon_url(cls, provider_type: str, provider_name: str, icon return "" @staticmethod - def repack_provider(provider: Union[dict, UserToolProvider]): + def repack_provider(provider: Union[dict, ToolProviderApiEntity]): """ repack provider @@ -54,7 +55,7 @@ def repack_provider(provider: Union[dict, UserToolProvider]): provider["icon"] = ToolTransformService.get_tool_provider_icon_url( provider_type=provider["type"], provider_name=provider["name"], icon=provider["icon"] ) - elif isinstance(provider, UserToolProvider): + elif isinstance(provider, ToolProviderApiEntity): provider.icon = ToolTransformService.get_tool_provider_icon_url( provider_type=provider.type.value, provider_name=provider.name, icon=provider.icon ) @@ -62,14 +63,14 @@ def repack_provider(provider: Union[dict, UserToolProvider]): @classmethod def builtin_provider_to_user_provider( cls, - provider_controller: BuiltinToolProviderController, + provider_controller: BuiltinToolProviderController | PluginToolProviderController, db_provider: Optional[BuiltinToolProvider], decrypt_credentials: bool = True, - ) -> UserToolProvider: + ) -> ToolProviderApiEntity: """ convert provider controller to user provider """ - result = UserToolProvider( + result = ToolProviderApiEntity( id=provider_controller.entity.identity.name, author=provider_controller.entity.identity.author, name=provider_controller.entity.identity.name, @@ -154,7 +155,7 @@ def workflow_provider_to_user_provider( """ convert provider controller to user provider """ - return UserToolProvider( + return ToolProviderApiEntity( id=provider_controller.provider_id, author=provider_controller.entity.identity.author, name=provider_controller.entity.identity.name, @@ -181,7 +182,7 @@ def api_provider_to_user_provider( db_provider: ApiToolProvider, decrypt_credentials: bool = True, labels: list[str] | None = None, - ) -> UserToolProvider: + ) -> ToolProviderApiEntity: """ convert provider controller to user provider """ @@ -197,7 +198,7 @@ def api_provider_to_user_provider( # add provider into providers credentials = db_provider.credentials - result = UserToolProvider( + result = ToolProviderApiEntity( id=db_provider.id, author=username, name=db_provider.name, @@ -240,7 +241,7 @@ def tool_to_user_tool( tenant_id: str, credentials: dict | None = None, labels: list[str] | None = None, - ) -> UserTool: + ) -> ToolApiEntity: """ convert tool to user tool """ @@ -248,7 +249,7 @@ def tool_to_user_tool( # fork tool runtime tool = tool.fork_tool_runtime( runtime=ToolRuntime( - credentials=credentials, + credentials=credentials or {}, tenant_id=tenant_id, ) ) @@ -270,7 +271,7 @@ def tool_to_user_tool( if not found and runtime_parameter.form == ToolParameter.ToolParameterForm.FORM: current_parameters.append(runtime_parameter) - return UserTool( + return ToolApiEntity( author=tool.entity.identity.author, name=tool.entity.identity.name, label=tool.entity.identity.label, @@ -279,7 +280,7 @@ def tool_to_user_tool( labels=labels or [], ) if isinstance(tool, ApiToolBundle): - return UserTool( + return ToolApiEntity( author=tool.author, name=tool.operation_id, label=I18nObject(en_US=tool.operation_id, zh_Hans=tool.operation_id), diff --git a/api/services/tools/workflow_tools_manage_service.py b/api/services/tools/workflow_tools_manage_service.py index 58bf7946bf190f..3178fe7999ee6b 100644 --- a/api/services/tools/workflow_tools_manage_service.py +++ b/api/services/tools/workflow_tools_manage_service.py @@ -4,7 +4,7 @@ from sqlalchemy import or_ from core.model_runtime.utils.encoders import jsonable_encoder -from core.tools.entities.api_entities import UserTool, UserToolProvider +from core.tools.entities.api_entities import ToolApiEntity, ToolProviderApiEntity from core.tools.tool_label_manager import ToolLabelManager from core.tools.utils.workflow_configuration_sync import WorkflowToolConfigurationUtils from core.tools.workflow_as_tool.provider import WorkflowToolProviderController @@ -183,7 +183,7 @@ def update_workflow_tool( return {"result": "success"} @classmethod - def list_tenant_workflow_tools(cls, user_id: str, tenant_id: str) -> list[UserToolProvider]: + def list_tenant_workflow_tools(cls, user_id: str, tenant_id: str) -> list[ToolProviderApiEntity]: """ List workflow tools. :param user_id: the user id @@ -309,7 +309,7 @@ def _get_workflow_tool(cls, tenant_id: str, db_tool: WorkflowToolProvider | None } @classmethod - def list_single_workflow_tools(cls, user_id: str, tenant_id: str, workflow_tool_id: str) -> list[UserTool]: + def list_single_workflow_tools(cls, user_id: str, tenant_id: str, workflow_tool_id: str) -> list[ToolApiEntity]: """ List workflow tool provider tools. :param user_id: the user id From 947bfdc8075d9f3a3ef4392770bb991050cfb473 Mon Sep 17 00:00:00 2001 From: Yeuoly Date: Mon, 23 Sep 2024 21:13:02 +0800 Subject: [PATCH 042/325] feat: validate credentials --- api/core/plugin/entities/plugin_daemon.py | 9 ++++- api/core/plugin/manager/base.py | 23 ++++++++----- api/core/plugin/manager/tool.py | 40 +++++++++++++++++++---- 3 files changed, 56 insertions(+), 16 deletions(-) diff --git a/api/core/plugin/entities/plugin_daemon.py b/api/core/plugin/entities/plugin_daemon.py index 51cc36d7dfeda8..c5dced121faa05 100644 --- a/api/core/plugin/entities/plugin_daemon.py +++ b/api/core/plugin/entities/plugin_daemon.py @@ -35,4 +35,11 @@ class PluginToolProviderEntity(BaseModel): provider: str plugin_unique_identifier: str plugin_id: str - declaration: ToolProviderEntityWithPlugin \ No newline at end of file + declaration: ToolProviderEntityWithPlugin + + +class PluginBasicBooleanResponse(BaseModel): + """ + Basic boolean response from plugin daemon. + """ + result: bool \ No newline at end of file diff --git a/api/core/plugin/manager/base.py b/api/core/plugin/manager/base.py index 8b80d923f64425..dac6d9d63f387a 100644 --- a/api/core/plugin/manager/base.py +++ b/api/core/plugin/manager/base.py @@ -1,5 +1,5 @@ import json -from collections.abc import Generator +from collections.abc import Callable, Generator from typing import TypeVar import requests @@ -21,7 +21,7 @@ def _request( method: str, path: str, headers: dict | None = None, - data: bytes | dict | None = None, + data: bytes | dict | str | None = None, params: dict | None = None, stream: bool = False, ) -> requests.Response: @@ -31,6 +31,10 @@ def _request( url = URL(str(plugin_daemon_inner_api_baseurl)) / path headers = headers or {} headers["X-Api-Key"] = plugin_daemon_inner_api_key + + if headers.get("Content-Type") == "application/json" and isinstance(data, dict): + data = json.dumps(data) + response = requests.request( method=method, url=str(url), headers=headers, data=data, params=params, stream=stream ) @@ -48,7 +52,11 @@ def _stream_request( Make a stream request to the plugin daemon inner API """ response = self._request(method, path, headers, data, params, stream=True) - yield from response.iter_lines() + for line in response.iter_lines(): + line = line.decode("utf-8").strip() + if line.startswith("data:"): + line = line[5:].strip() + yield line def _stream_request_with_model( self, @@ -88,17 +96,15 @@ def _request_with_plugin_daemon_response( headers: dict | None = None, data: bytes | dict | None = None, params: dict | None = None, + transformer: Callable[[dict], dict] | None = None, ) -> T: """ Make a request to the plugin daemon inner API and return the response as a model. """ response = self._request(method, path, headers, data, params) json_response = response.json() - for provider in json_response.get("data", []): - declaration = provider.get("declaration", {}) or {} - provider_name = declaration.get("identity", {}).get("name") - for tool in declaration.get("tools", []): - tool["identity"]["provider"] = provider_name + if transformer: + json_response = transformer(json_response) rep = PluginDaemonBasicResponse[type](**json_response) if rep.code != 0: @@ -128,3 +134,4 @@ def _request_with_plugin_daemon_response_stream( if rep.data is None: raise ValueError("got empty data from plugin daemon") yield rep.data + \ No newline at end of file diff --git a/api/core/plugin/manager/tool.py b/api/core/plugin/manager/tool.py index fe5f7bb757d84e..a26fa0dd2673cd 100644 --- a/api/core/plugin/manager/tool.py +++ b/api/core/plugin/manager/tool.py @@ -1,7 +1,7 @@ from collections.abc import Generator from typing import Any -from core.plugin.entities.plugin_daemon import PluginToolProviderEntity +from core.plugin.entities.plugin_daemon import PluginBasicBooleanResponse, PluginToolProviderEntity from core.plugin.manager.base import BasePluginManager from core.tools.entities.tool_entities import ToolInvokeMessage @@ -11,8 +11,22 @@ def fetch_tool_providers(self, tenant_id: str) -> list[PluginToolProviderEntity] """ Fetch tool providers for the given asset. """ + + def transformer(json_response: dict[str, Any]) -> dict: + for provider in json_response.get("data", []): + declaration = provider.get("declaration", {}) or {} + provider_name = declaration.get("identity", {}).get("name") + for tool in declaration.get("tools", []): + tool["identity"]["provider"] = provider_name + + return json_response + response = self._request_with_plugin_daemon_response( - "GET", f"plugin/{tenant_id}/tools", list[PluginToolProviderEntity], params={"page": 1, "page_size": 256} + "GET", + f"plugin/{tenant_id}/management/tools", + list[PluginToolProviderEntity], + params={"page": 1, "page_size": 256}, + transformer=transformer, ) return response @@ -28,7 +42,7 @@ def invoke( ) -> Generator[ToolInvokeMessage, None, None]: response = self._request_with_plugin_daemon_response_stream( "POST", - f"plugin/{tenant_id}/tool/invoke", + f"plugin/{tenant_id}/dispatch/tool/invoke", ToolInvokeMessage, data={ "plugin_unique_identifier": plugin_unique_identifier, @@ -40,6 +54,10 @@ def invoke( "tool_parameters": tool_parameters, }, }, + headers={ + "X-Plugin-Identifier": plugin_unique_identifier, + "Content-Type": "application/json", + } ) return response @@ -49,10 +67,10 @@ def validate_provider_credentials( """ validate the credentials of the provider """ - response = self._request_with_plugin_daemon_response( + response = self._request_with_plugin_daemon_response_stream( "POST", - f"plugin/{tenant_id}/tool/validate_credentials", - bool, + f"plugin/{tenant_id}/dispatch/tool/validate_credentials", + PluginBasicBooleanResponse, data={ "plugin_unique_identifier": plugin_unique_identifier, "user_id": user_id, @@ -61,5 +79,13 @@ def validate_provider_credentials( "credentials": credentials, }, }, + headers={ + "X-Plugin-Identifier": plugin_unique_identifier, + "Content-Type": "application/json", + } ) - return response + + for resp in response: + return resp.result + + return False From fb3a701c86b0bc29ef6d627a65274ddd95ba97dd Mon Sep 17 00:00:00 2001 From: Yeuoly Date: Tue, 24 Sep 2024 16:02:01 +0800 Subject: [PATCH 043/325] fix: stream with empty line --- api/core/plugin/manager/base.py | 3 ++- api/core/workflow/nodes/tool/tool_node.py | 4 ++-- 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/api/core/plugin/manager/base.py b/api/core/plugin/manager/base.py index dac6d9d63f387a..fdc15f633c043d 100644 --- a/api/core/plugin/manager/base.py +++ b/api/core/plugin/manager/base.py @@ -56,7 +56,8 @@ def _stream_request( line = line.decode("utf-8").strip() if line.startswith("data:"): line = line[5:].strip() - yield line + if line: + yield line def _stream_request_with_model( self, diff --git a/api/core/workflow/nodes/tool/tool_node.py b/api/core/workflow/nodes/tool/tool_node.py index 73c22bc700407a..2bcd0b10f2f3a5 100644 --- a/api/core/workflow/nodes/tool/tool_node.py +++ b/api/core/workflow/nodes/tool/tool_node.py @@ -212,8 +212,8 @@ def _transform_message( chunk_content=message.message.text, from_variable_selector=[self.node_id, "text"] ) elif message.type == ToolInvokeMessage.MessageType.JSON: - assert isinstance(message, ToolInvokeMessage.JsonMessage) - json.append(message.json_object) + assert isinstance(message.message, ToolInvokeMessage.JsonMessage) + json.append(message.message.json_object) elif message.type == ToolInvokeMessage.MessageType.LINK: assert isinstance(message.message, ToolInvokeMessage.TextMessage) stream_text = f"Link: {message.message.text}\n" From 1907d791e1762aae7db732eae63ead3c98c62638 Mon Sep 17 00:00:00 2001 From: Yeuoly Date: Tue, 24 Sep 2024 16:15:50 +0800 Subject: [PATCH 044/325] enhance: add gzip --- api/core/plugin/manager/base.py | 1 + 1 file changed, 1 insertion(+) diff --git a/api/core/plugin/manager/base.py b/api/core/plugin/manager/base.py index fdc15f633c043d..fd18b3798e9b6a 100644 --- a/api/core/plugin/manager/base.py +++ b/api/core/plugin/manager/base.py @@ -31,6 +31,7 @@ def _request( url = URL(str(plugin_daemon_inner_api_baseurl)) / path headers = headers or {} headers["X-Api-Key"] = plugin_daemon_inner_api_key + headers["Accept-Encoding"] = "gzip, deflate, br" if headers.get("Content-Type") == "application/json" and isinstance(data, dict): data = json.dumps(data) From 9722e6bcb1a49e18e7a736d940082b439d457bb2 Mon Sep 17 00:00:00 2001 From: Yeuoly Date: Tue, 24 Sep 2024 16:33:19 +0800 Subject: [PATCH 045/325] fix: allow duplicate tool providers --- api/core/tools/plugin_tool/tool.py | 2 +- api/core/tools/tool_manager.py | 11 +++++++++-- 2 files changed, 10 insertions(+), 3 deletions(-) diff --git a/api/core/tools/plugin_tool/tool.py b/api/core/tools/plugin_tool/tool.py index 898e8a552d0f6d..dd47f11ba27a2e 100644 --- a/api/core/tools/plugin_tool/tool.py +++ b/api/core/tools/plugin_tool/tool.py @@ -38,4 +38,4 @@ def fork_tool_runtime(self, runtime: ToolRuntime) -> "PluginTool": runtime=runtime, tenant_id=self.tenant_id, plugin_unique_identifier=self.plugin_unique_identifier, - ) \ No newline at end of file + ) diff --git a/api/core/tools/tool_manager.py b/api/core/tools/tool_manager.py index 76c7232e01129d..a4b834e8d88af6 100644 --- a/api/core/tools/tool_manager.py +++ b/api/core/tools/tool_manager.py @@ -485,7 +485,7 @@ def get_tool_label(cls, tool_name: str) -> Union[I18nObject, None]: return cls._builtin_tools_labels[tool_name] @classmethod - def user_list_providers( + def list_providers_from_api( cls, user_id: str, tenant_id: str, typ: ToolProviderTypeApiLiteral ) -> list[ToolProviderApiEntity]: result_providers: dict[str, ToolProviderApiEntity] = {} @@ -526,7 +526,14 @@ def user_list_providers( decrypt_credentials=False, ) - result_providers[provider.entity.identity.name] = user_provider + if isinstance(provider, PluginToolProviderController): + result_providers[f"plugin_provider.{user_provider.name}.{provider.plugin_unique_identifier}"] = ( + user_provider + ) + else: + result_providers[f"builtin_provider.{user_provider.name}"] = ( + user_provider + ) # get db api providers From 64706c709cfb4526d43865224f2cad09b57ff3bc Mon Sep 17 00:00:00 2001 From: Yeuoly Date: Tue, 24 Sep 2024 16:35:01 +0800 Subject: [PATCH 046/325] fix --- api/services/tools/tools_manage_service.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/api/services/tools/tools_manage_service.py b/api/services/tools/tools_manage_service.py index 184596fc23c95b..a9d0d0f86a0c92 100644 --- a/api/services/tools/tools_manage_service.py +++ b/api/services/tools/tools_manage_service.py @@ -15,7 +15,7 @@ def list_tool_providers(user_id: str, tenant_id: str, typ: ToolProviderTypeApiLi :return: the list of tool providers """ - providers = ToolManager.user_list_providers(user_id, tenant_id, typ) + providers = ToolManager.list_providers_from_api(user_id, tenant_id, typ) # add icon for provider in providers: From 592f85f7a904076592f78e83900af4ab1a3ef141 Mon Sep 17 00:00:00 2001 From: Yeuoly Date: Tue, 24 Sep 2024 16:40:42 +0800 Subject: [PATCH 047/325] formatter --- api/services/tools/tools_transform_service.py | 24 ++++--------------- 1 file changed, 4 insertions(+), 20 deletions(-) diff --git a/api/services/tools/tools_transform_service.py b/api/services/tools/tools_transform_service.py index 81d0ed762b6d11..e74301fe9de471 100644 --- a/api/services/tools/tools_transform_service.py +++ b/api/services/tools/tools_transform_service.py @@ -74,19 +74,9 @@ def builtin_provider_to_user_provider( id=provider_controller.entity.identity.name, author=provider_controller.entity.identity.author, name=provider_controller.entity.identity.name, - description=I18nObject( - en_US=provider_controller.entity.identity.description.en_US, - zh_Hans=provider_controller.entity.identity.description.zh_Hans, - pt_BR=provider_controller.entity.identity.description.pt_BR, - ja_JP=provider_controller.entity.identity.description.ja_JP, - ), + description=provider_controller.entity.identity.description, icon=provider_controller.entity.identity.icon, - label=I18nObject( - en_US=provider_controller.entity.identity.label.en_US, - zh_Hans=provider_controller.entity.identity.label.zh_Hans, - pt_BR=provider_controller.entity.identity.label.pt_BR, - ja_JP=provider_controller.entity.identity.label.ja_JP, - ), + label=provider_controller.entity.identity.label, type=ToolProviderType.BUILT_IN, masked_credentials={}, is_team_authorization=False, @@ -161,15 +151,9 @@ def workflow_provider_to_user_provider( id=provider_controller.provider_id, author=provider_controller.entity.identity.author, name=provider_controller.entity.identity.name, - description=I18nObject( - en_US=provider_controller.entity.identity.description.en_US, - zh_Hans=provider_controller.entity.identity.description.zh_Hans, - ), + description=provider_controller.entity.identity.description, icon=provider_controller.entity.identity.icon, - label=I18nObject( - en_US=provider_controller.entity.identity.label.en_US, - zh_Hans=provider_controller.entity.identity.label.zh_Hans, - ), + label=provider_controller.entity.identity.label, type=ToolProviderType.WORKFLOW, masked_credentials={}, is_team_authorization=True, From 68c10a1672f51c3f3ddb45f1e20615fc15a0cb99 Mon Sep 17 00:00:00 2001 From: Yeuoly Date: Tue, 24 Sep 2024 18:03:48 +0800 Subject: [PATCH 048/325] feat: add backwards invoke node api --- api/controllers/inner_api/plugin/plugin.py | 70 ++++++++---- api/core/plugin/backwards_invocation/node.py | 114 +++++++++++++++++++ api/core/plugin/entities/request.py | 30 ++++- api/core/workflow/workflow_entry.py | 82 +++++++++++++ api/services/workflow_service.py | 81 ++++++++++--- 5 files changed, 335 insertions(+), 42 deletions(-) create mode 100644 api/core/plugin/backwards_invocation/node.py diff --git a/api/controllers/inner_api/plugin/plugin.py b/api/controllers/inner_api/plugin/plugin.py index 4c28e6acb3c202..ae3533268927a2 100644 --- a/api/controllers/inner_api/plugin/plugin.py +++ b/api/controllers/inner_api/plugin/plugin.py @@ -8,13 +8,15 @@ from controllers.inner_api.wraps import plugin_inner_api_only from core.plugin.backwards_invocation.app import PluginAppBackwardsInvocation from core.plugin.backwards_invocation.model import PluginModelBackwardsInvocation +from core.plugin.backwards_invocation.node import PluginNodeBackwardsInvocation from core.plugin.encrypt import PluginEncrypter from core.plugin.entities.request import ( RequestInvokeApp, RequestInvokeEncrypt, RequestInvokeLLM, RequestInvokeModeration, - RequestInvokeNode, + RequestInvokeParameterExtractorNode, + RequestInvokeQuestionClassifierNode, RequestInvokeRerank, RequestInvokeSpeech2Text, RequestInvokeTextEmbedding, @@ -96,23 +98,46 @@ def generator(): yield ( ToolInvokeMessage( type=ToolInvokeMessage.MessageType.TEXT, - message=ToolInvokeMessage.TextMessage(text='helloworld'), + message=ToolInvokeMessage.TextMessage(text="helloworld"), ) .model_dump_json() .encode() - + b'\n\n' + + b"\n\n" ) return compact_generate_response(generator()) -class PluginInvokeNodeApi(Resource): +class PluginInvokeParameterExtractorNodeApi(Resource): @setup_required @plugin_inner_api_only @get_tenant - @plugin_data(payload_type=RequestInvokeNode) - def post(self, user_id: str, tenant_model: Tenant, payload: RequestInvokeNode): - pass + @plugin_data(payload_type=RequestInvokeParameterExtractorNode) + def post(self, user_id: str, tenant_model: Tenant, payload: RequestInvokeParameterExtractorNode): + return PluginNodeBackwardsInvocation.invoke_parameter_extractor( + tenant_id=tenant_model.id, + user_id=user_id, + parameters=payload.parameters, + model_config=payload.model, + instruction=payload.instruction, + query=payload.query, + ) + + +class PluginInvokeQuestionClassifierNodeApi(Resource): + @setup_required + @plugin_inner_api_only + @get_tenant + @plugin_data(payload_type=RequestInvokeQuestionClassifierNode) + def post(self, user_id: str, tenant_model: Tenant, payload: RequestInvokeQuestionClassifierNode): + return PluginNodeBackwardsInvocation.invoke_question_classifier( + tenant_id=tenant_model.id, + user_id=user_id, + query=payload.query, + model_config=payload.model, + classes=payload.classes, + instruction=payload.instruction, + ) class PluginInvokeAppApi(Resource): @@ -127,15 +152,13 @@ def post(self, user_id: str, tenant_model: Tenant, payload: RequestInvokeApp): tenant_id=tenant_model.id, conversation_id=payload.conversation_id, query=payload.query, - stream=payload.response_mode == 'streaming', + stream=payload.response_mode == "streaming", inputs=payload.inputs, - files=payload.files - ) - - return compact_generate_response( - PluginAppBackwardsInvocation.convert_to_event_stream(response) + files=payload.files, ) + return compact_generate_response(PluginAppBackwardsInvocation.convert_to_event_stream(response)) + class PluginInvokeEncryptApi(Resource): @setup_required @@ -149,13 +172,14 @@ def post(self, user_id: str, tenant_model: Tenant, payload: RequestInvokeEncrypt return PluginEncrypter.invoke_encrypt(tenant_model, payload) -api.add_resource(PluginInvokeLLMApi, '/invoke/llm') -api.add_resource(PluginInvokeTextEmbeddingApi, '/invoke/text-embedding') -api.add_resource(PluginInvokeRerankApi, '/invoke/rerank') -api.add_resource(PluginInvokeTTSApi, '/invoke/tts') -api.add_resource(PluginInvokeSpeech2TextApi, '/invoke/speech2text') -api.add_resource(PluginInvokeModerationApi, '/invoke/moderation') -api.add_resource(PluginInvokeToolApi, '/invoke/tool') -api.add_resource(PluginInvokeNodeApi, '/invoke/node') -api.add_resource(PluginInvokeAppApi, '/invoke/app') -api.add_resource(PluginInvokeEncryptApi, '/invoke/encrypt') +api.add_resource(PluginInvokeLLMApi, "/invoke/llm") +api.add_resource(PluginInvokeTextEmbeddingApi, "/invoke/text-embedding") +api.add_resource(PluginInvokeRerankApi, "/invoke/rerank") +api.add_resource(PluginInvokeTTSApi, "/invoke/tts") +api.add_resource(PluginInvokeSpeech2TextApi, "/invoke/speech2text") +api.add_resource(PluginInvokeModerationApi, "/invoke/moderation") +api.add_resource(PluginInvokeToolApi, "/invoke/tool") +api.add_resource(PluginInvokeParameterExtractorNodeApi, "/invoke/parameter-extractor") +api.add_resource(PluginInvokeQuestionClassifierNodeApi, "/invoke/question-classifier") +api.add_resource(PluginInvokeAppApi, "/invoke/app") +api.add_resource(PluginInvokeEncryptApi, "/invoke/encrypt") diff --git a/api/core/plugin/backwards_invocation/node.py b/api/core/plugin/backwards_invocation/node.py new file mode 100644 index 00000000000000..9a7fd5fc3a3491 --- /dev/null +++ b/api/core/plugin/backwards_invocation/node.py @@ -0,0 +1,114 @@ +from core.plugin.backwards_invocation.base import BaseBackwardsInvocation +from core.workflow.nodes.parameter_extractor.entities import ( + ModelConfig as ParameterExtractorModelConfig, +) +from core.workflow.nodes.parameter_extractor.entities import ( + ParameterConfig, + ParameterExtractorNodeData, +) +from core.workflow.nodes.question_classifier.entities import ( + ClassConfig, + QuestionClassifierNodeData, +) +from core.workflow.nodes.question_classifier.entities import ( + ModelConfig as QuestionClassifierModelConfig, +) +from services.workflow_service import WorkflowService + + +class PluginNodeBackwardsInvocation(BaseBackwardsInvocation): + @classmethod + def invoke_parameter_extractor( + cls, + tenant_id: str, + user_id: str, + parameters: list[ParameterConfig], + model_config: ParameterExtractorModelConfig, + instruction: str, + query: str, + ) -> dict: + """ + Invoke parameter extractor node. + + :param tenant_id: str + :param user_id: str + :param parameters: list[ParameterConfig] + :param model_config: ModelConfig + :param instruction: str + :param query: str + :return: dict with __reason, __is_success, and other parameters + """ + workflow_service = WorkflowService() + node_id = "1919810" + node_data = ParameterExtractorNodeData( + title="parameter_extractor", + desc="parameter_extractor", + parameters=parameters, + reasoning_mode="function_call", + query=[node_id, "query"], + model=model_config, + instruction=instruction, # instruct with variables are not supported + ) + node_data_dict = node_data.model_dump() + execution = workflow_service.run_free_workflow_node( + node_data_dict, + tenant_id=tenant_id, + user_id=user_id, + node_id=node_id, + user_inputs={ + f"{node_id}.query": query, + }, + ) + + output = execution.outputs_dict + return output or { + "__reason": "No parameters extracted", + "__is_success": False, + } + + @classmethod + def invoke_question_classifier( + cls, + tenant_id: str, + user_id: str, + model_config: QuestionClassifierModelConfig, + classes: list[ClassConfig], + instruction: str, + query: str, + ) -> dict: + """ + Invoke question classifier node. + + :param tenant_id: str + :param user_id: str + :param model_config: ModelConfig + :param classes: list[ClassConfig] + :param instruction: str + :param query: str + :return: dict with class_name + """ + workflow_service = WorkflowService() + node_id = "1919810" + node_data = QuestionClassifierNodeData( + title="question_classifier", + desc="question_classifier", + query_variable_selector=[node_id, "query"], + model=model_config, + classes=classes, + instruction=instruction, # instruct with variables are not supported + ) + node_data_dict = node_data.model_dump() + execution = workflow_service.run_free_workflow_node( + node_data_dict, + tenant_id=tenant_id, + user_id=user_id, + node_id=node_id, + user_inputs={ + f"{node_id}.query": query, + }, + ) + + output = execution.outputs_dict + return output or { + "class_name": classes[0].name, + } diff --git a/api/core/plugin/entities/request.py b/api/core/plugin/entities/request.py index 2e87b76636ca2d..bf4c4448c77243 100644 --- a/api/core/plugin/entities/request.py +++ b/api/core/plugin/entities/request.py @@ -14,6 +14,16 @@ UserPromptMessage, ) from core.model_runtime.entities.model_entities import ModelType +from core.workflow.nodes.question_classifier.entities import ( + ClassConfig, + ModelConfig as QuestionClassifierModelConfig, +) +from core.workflow.nodes.parameter_extractor.entities import ( + ModelConfig as ParameterExtractorModelConfig, +) +from core.workflow.nodes.parameter_extractor.entities import ( + ParameterConfig, +) class RequestInvokeTool(BaseModel): @@ -92,11 +102,27 @@ class RequestInvokeModeration(BaseModel): """ -class RequestInvokeNode(BaseModel): +class RequestInvokeParameterExtractorNode(BaseModel): """ - Request to invoke node + Request to invoke parameter extractor node """ + parameters: list[ParameterConfig] + model: ParameterExtractorModelConfig + instruction: str + query: str + + +class RequestInvokeQuestionClassifierNode(BaseModel): + """ + Request to invoke question classifier node + """ + + query: str + model: QuestionClassifierModelConfig + classes: list[ClassConfig] + instruction: str + class RequestInvokeApp(BaseModel): """ diff --git a/api/core/workflow/workflow_entry.py b/api/core/workflow/workflow_entry.py index 74a598ada58544..9477e98c926881 100644 --- a/api/core/workflow/workflow_entry.py +++ b/api/core/workflow/workflow_entry.py @@ -205,6 +205,88 @@ def single_step_run( except Exception as e: raise WorkflowNodeRunFailedError(node_instance=node_instance, error=str(e)) + @classmethod + def run_free_node( + cls, node_data: dict, node_id: str, tenant_id: str, user_id: str, user_inputs: dict[str, Any] + ) -> tuple[BaseNode, Generator[RunEvent | InNodeEvent, None, None]]: + """ + Run free node + + NOTE: only parameter_extractor/question_classifier are supported + + :param node_data: node data + :param user_id: user id + :param user_inputs: user inputs + :return: + """ + # generate a fake graph + node_config = {"id": node_id, "width": 114, "height": 514, "type": "custom", "data": node_data} + graph_dict = { + "nodes": [node_config], + } + + node_type = NodeType.value_of(node_data.get("type", "")) + if node_type not in {NodeType.PARAMETER_EXTRACTOR, NodeType.QUESTION_CLASSIFIER}: + raise ValueError(f"Node type {node_type} not supported") + + node_cls = node_classes.get(node_type) + if not node_cls: + raise ValueError(f"Node class not found for node type {node_type}") + + graph = Graph.init(graph_config=graph_dict) + + # init variable pool + variable_pool = VariablePool( + system_variables={}, + user_inputs={}, + environment_variables=[], + ) + + node_cls = cast(type[BaseNode], node_cls) + # init workflow run state + node_instance: BaseNode = node_cls( + id=str(uuid.uuid4()), + config=node_config, + graph_init_params=GraphInitParams( + tenant_id=tenant_id, + app_id="", + workflow_type=WorkflowType.WORKFLOW, + workflow_id="", + graph_config=graph_dict, + user_id=user_id, + user_from=UserFrom.ACCOUNT, + invoke_from=InvokeFrom.DEBUGGER, + call_depth=0, + ), + graph=graph, + graph_runtime_state=GraphRuntimeState(variable_pool=variable_pool, start_at=time.perf_counter()), + ) + + try: + # variable selector to variable mapping + try: + variable_mapping = node_cls.extract_variable_selector_to_variable_mapping( + graph_config=graph_dict, config=node_config + ) + except NotImplementedError: + variable_mapping = {} + + cls.mapping_user_inputs_to_variable_pool( + variable_mapping=variable_mapping, + user_inputs=user_inputs, + variable_pool=variable_pool, + tenant_id=tenant_id, + node_type=node_type, + node_data=node_instance.node_data, + ) + + # run node + generator = node_instance.run() + + return node_instance, generator + except Exception as e: + raise WorkflowNodeRunFailedError(node_instance=node_instance, error=str(e)) + @classmethod def handle_special_values(cls, value: Optional[Mapping[str, Any]]) -> Optional[dict]: """ diff --git a/api/services/workflow_service.py b/api/services/workflow_service.py index 0ff81f1f7e834d..399451cb8e446c 100644 --- a/api/services/workflow_service.py +++ b/api/services/workflow_service.py @@ -1,8 +1,8 @@ import json import time -from collections.abc import Sequence +from collections.abc import Callable, Generator, Sequence from datetime import datetime, timezone -from typing import Optional +from typing import Any, Optional from core.app.apps.advanced_chat.app_config_manager import AdvancedChatAppConfigManager from core.app.apps.workflow.app_config_manager import WorkflowAppConfigManager @@ -10,7 +10,9 @@ from core.model_runtime.utils.encoders import jsonable_encoder from core.workflow.entities.node_entities import NodeRunResult, NodeType from core.workflow.errors import WorkflowNodeRunFailedError -from core.workflow.nodes.event import RunCompletedEvent +from core.workflow.graph_engine.entities.event import InNodeEvent +from core.workflow.nodes.base_node import BaseNode +from core.workflow.nodes.event import RunCompletedEvent, RunEvent from core.workflow.nodes.node_mapping import node_classes from core.workflow.workflow_entry import WorkflowEntry from events.app_event import app_draft_workflow_was_synced, app_published_workflow_was_updated @@ -216,13 +218,64 @@ def run_draft_workflow_node( # run draft workflow node start_at = time.perf_counter() - try: - node_instance, generator = WorkflowEntry.single_step_run( + workflow_node_execution = self._handle_node_run_result( + getter=lambda: WorkflowEntry.single_step_run( workflow=draft_workflow, node_id=node_id, user_inputs=user_inputs, user_id=account.id, - ) + ), + start_at=start_at, + tenant_id=app_model.tenant_id, + node_id=node_id, + ) + + db.session.add(workflow_node_execution) + db.session.commit() + + return workflow_node_execution + + def run_free_workflow_node( + self, node_data: dict, tenant_id: str, user_id: str, node_id: str, user_inputs: dict[str, Any] + ) -> WorkflowNodeExecution: + """ + Run draft workflow node + """ + # run draft workflow node + start_at = time.perf_counter() + + workflow_node_execution = self._handle_node_run_result( + getter=lambda: WorkflowEntry.run_free_node( + node_id=node_id, + node_data=node_data, + tenant_id=tenant_id, + user_id=user_id, + user_inputs=user_inputs, + ), + start_at=start_at, + tenant_id=tenant_id, + node_id=node_id + ) + + return workflow_node_execution + + def _handle_node_run_result( + self, + getter: Callable[[], tuple[BaseNode, Generator[RunEvent | InNodeEvent, None, None]]], + start_at: float, + tenant_id: str, + node_id: str, + ): + """ + Handle node run result + + :param getter: Callable[[], tuple[BaseNode, Generator[RunEvent | InNodeEvent, None, None]]] + :param start_at: float + :param tenant_id: str + :param node_id: str + """ + try: + node_instance, generator = getter() node_run_result: NodeRunResult | None = None for event in generator: @@ -245,9 +298,7 @@ def run_draft_workflow_node( error = e.error workflow_node_execution = WorkflowNodeExecution() - workflow_node_execution.tenant_id = app_model.tenant_id - workflow_node_execution.app_id = app_model.id - workflow_node_execution.workflow_id = draft_workflow.id + workflow_node_execution.tenant_id = tenant_id workflow_node_execution.triggered_from = WorkflowNodeExecutionTriggeredFrom.SINGLE_STEP.value workflow_node_execution.index = 1 workflow_node_execution.node_id = node_id @@ -255,7 +306,6 @@ def run_draft_workflow_node( workflow_node_execution.title = node_instance.node_data.title workflow_node_execution.elapsed_time = time.perf_counter() - start_at workflow_node_execution.created_by_role = CreatedByRole.ACCOUNT.value - workflow_node_execution.created_by = account.id workflow_node_execution.created_at = datetime.now(timezone.utc).replace(tzinfo=None) workflow_node_execution.finished_at = datetime.now(timezone.utc).replace(tzinfo=None) @@ -277,9 +327,6 @@ def run_draft_workflow_node( workflow_node_execution.status = WorkflowNodeExecutionStatus.FAILED.value workflow_node_execution.error = error - db.session.add(workflow_node_execution) - db.session.commit() - return workflow_node_execution def convert_to_workflow(self, app_model: App, account: Account, args: dict) -> App: @@ -302,10 +349,10 @@ def convert_to_workflow(self, app_model: App, account: Account, args: dict) -> A new_app = workflow_converter.convert_to_workflow( app_model=app_model, account=account, - name=args.get("name"), - icon_type=args.get("icon_type"), - icon=args.get("icon"), - icon_background=args.get("icon_background"), + name=args.get("name", ""), + icon_type=args.get("icon_type", ""), + icon=args.get("icon", ""), + icon_background=args.get("icon_background", ""), ) return new_app From a91951b3742c02c6017e1afaac8afa3fc472a4f9 Mon Sep 17 00:00:00 2001 From: Yeuoly Date: Tue, 24 Sep 2024 20:15:13 +0800 Subject: [PATCH 049/325] feat: invoke node --- api/core/plugin/backwards_invocation/node.py | 23 +++++++++++--------- api/core/plugin/entities/request.py | 10 +++++---- api/core/workflow/workflow_entry.py | 21 +++++++++++++++++- api/services/workflow_service.py | 4 ++++ 4 files changed, 43 insertions(+), 15 deletions(-) diff --git a/api/core/plugin/backwards_invocation/node.py b/api/core/plugin/backwards_invocation/node.py index 9a7fd5fc3a3491..4c1d21437e609f 100644 --- a/api/core/plugin/backwards_invocation/node.py +++ b/api/core/plugin/backwards_invocation/node.py @@ -1,4 +1,5 @@ from core.plugin.backwards_invocation.base import BaseBackwardsInvocation +from core.workflow.entities.node_entities import NodeType from core.workflow.nodes.parameter_extractor.entities import ( ModelConfig as ParameterExtractorModelConfig, ) @@ -36,7 +37,7 @@ def invoke_parameter_extractor( :param model_config: ModelConfig :param instruction: str :param query: str - :return: dict with __reason, __is_success, and other parameters + :return: dict """ workflow_service = WorkflowService() node_id = "1919810" @@ -50,6 +51,7 @@ def invoke_parameter_extractor( instruction=instruction, # instruct with variables are not supported ) node_data_dict = node_data.model_dump() + node_data_dict["type"] = NodeType.PARAMETER_EXTRACTOR.value execution = workflow_service.run_free_workflow_node( node_data_dict, tenant_id=tenant_id, @@ -60,10 +62,10 @@ def invoke_parameter_extractor( }, ) - output = execution.outputs_dict - return output or { - "__reason": "No parameters extracted", - "__is_success": False, + return { + "inputs": execution.inputs_dict, + "outputs": execution.outputs_dict, + "process_data": execution.process_data_dict, } @classmethod @@ -85,7 +87,7 @@ def invoke_question_classifier( :param classes: list[ClassConfig] :param instruction: str :param query: str - :return: dict with class_name + :return: dict """ workflow_service = WorkflowService() node_id = "1919810" @@ -108,7 +110,8 @@ def invoke_question_classifier( }, ) - output = execution.outputs_dict - return output or { - "class_name": classes[0].name, - } + return { + "inputs": execution.inputs_dict, + "outputs": execution.outputs_dict, + "process_data": execution.process_data_dict, + } \ No newline at end of file diff --git a/api/core/plugin/entities/request.py b/api/core/plugin/entities/request.py index bf4c4448c77243..00ac53ca72e64c 100644 --- a/api/core/plugin/entities/request.py +++ b/api/core/plugin/entities/request.py @@ -14,16 +14,18 @@ UserPromptMessage, ) from core.model_runtime.entities.model_entities import ModelType -from core.workflow.nodes.question_classifier.entities import ( - ClassConfig, - ModelConfig as QuestionClassifierModelConfig, -) from core.workflow.nodes.parameter_extractor.entities import ( ModelConfig as ParameterExtractorModelConfig, ) from core.workflow.nodes.parameter_extractor.entities import ( ParameterConfig, ) +from core.workflow.nodes.question_classifier.entities import ( + ClassConfig, +) +from core.workflow.nodes.question_classifier.entities import ( + ModelConfig as QuestionClassifierModelConfig, +) class RequestInvokeTool(BaseModel): diff --git a/api/core/workflow/workflow_entry.py b/api/core/workflow/workflow_entry.py index 9477e98c926881..0420d62ef79b80 100644 --- a/api/core/workflow/workflow_entry.py +++ b/api/core/workflow/workflow_entry.py @@ -221,8 +221,27 @@ def run_free_node( """ # generate a fake graph node_config = {"id": node_id, "width": 114, "height": 514, "type": "custom", "data": node_data} + start_node_config = { + "id": "start", + "width": 114, + "height": 514, + "type": "custom", + "data": { + "type": NodeType.START.value, + "title": "Start", + "desc": "Start", + }, + } graph_dict = { - "nodes": [node_config], + "nodes": [start_node_config, node_config], + "edges": [ + { + "source": "start", + "target": node_id, + "sourceHandle": "source", + "targetHandle": "target", + } + ], } node_type = NodeType.value_of(node_data.get("type", "")) diff --git a/api/services/workflow_service.py b/api/services/workflow_service.py index 399451cb8e446c..a1e2f3105738b8 100644 --- a/api/services/workflow_service.py +++ b/api/services/workflow_service.py @@ -230,6 +230,10 @@ def run_draft_workflow_node( node_id=node_id, ) + workflow_node_execution.app_id = app_model.id + workflow_node_execution.created_by = account.id + workflow_node_execution.workflow_id = draft_workflow.id + db.session.add(workflow_node_execution) db.session.commit() From 153dc5b3f39546a8f383dabe0b2d467b3c803b86 Mon Sep 17 00:00:00 2001 From: Yeuoly Date: Thu, 26 Sep 2024 10:26:45 +0800 Subject: [PATCH 050/325] feat: endpoint apis --- api/controllers/console/__init__.py | 1 + api/controllers/console/workspace/endpoint.py | 62 ++++++++ .../console/workspace/tool_providers.py | 144 ++++++++++++------ api/services/plugin/endpoint_service.py | 2 + 4 files changed, 161 insertions(+), 48 deletions(-) create mode 100644 api/controllers/console/workspace/endpoint.py create mode 100644 api/services/plugin/endpoint_service.py diff --git a/api/controllers/console/__init__.py b/api/controllers/console/__init__.py index 1cf987050a7866..d17d5bc827b58c 100644 --- a/api/controllers/console/__init__.py +++ b/api/controllers/console/__init__.py @@ -58,6 +58,7 @@ # Import workspace controllers from .workspace import ( account, + endpoint, load_balancing_config, members, model_providers, diff --git a/api/controllers/console/workspace/endpoint.py b/api/controllers/console/workspace/endpoint.py new file mode 100644 index 00000000000000..668e4ea9534cd1 --- /dev/null +++ b/api/controllers/console/workspace/endpoint.py @@ -0,0 +1,62 @@ +from flask_restful import Resource + +from controllers.console import api +from controllers.console.setup import setup_required +from controllers.console.wraps import account_initialization_required +from libs.login import login_required + + +class EndpointCreateApi(Resource): + @setup_required + @login_required + @account_initialization_required + def post(self): + pass + + +class EndpointListApi(Resource): + @setup_required + @login_required + @account_initialization_required + def get(self): + pass + + +class EndpointDeleteApi(Resource): + @setup_required + @login_required + @account_initialization_required + def post(self): + pass + + +class EndpointUpdateApi(Resource): + @setup_required + @login_required + @account_initialization_required + def post(self): + pass + + +class EndpointEnableApi(Resource): + @setup_required + @login_required + @account_initialization_required + def post(self): + pass + + +class EndpointDisableApi(Resource): + @setup_required + @login_required + @account_initialization_required + def post(self): + pass + + +api.add_resource(EndpointCreateApi, "/workspaces/current/endpoints/create") +api.add_resource(EndpointListApi, "/workspaces/current/endpoints/list") +api.add_resource(EndpointDeleteApi, "/workspaces/current/endpoints/delete") +api.add_resource(EndpointUpdateApi, "/workspaces/current/endpoints/update") +api.add_resource(EndpointEnableApi, "/workspaces/current/endpoints/enable") +api.add_resource(EndpointDisableApi, "/workspaces/current/endpoints/disable") \ No newline at end of file diff --git a/api/controllers/console/workspace/tool_providers.py b/api/controllers/console/workspace/tool_providers.py index 68ad383a744d70..1b49103ced63ed 100644 --- a/api/controllers/console/workspace/tool_providers.py +++ b/api/controllers/console/workspace/tool_providers.py @@ -24,8 +24,10 @@ class ToolProviderListApi(Resource): @login_required @account_initialization_required def get(self): - user_id = current_user.id - tenant_id = current_user.current_tenant_id + user = current_user + + user_id = user.id + tenant_id = user.current_tenant_id req = reqparse.RequestParser() req.add_argument( @@ -46,8 +48,10 @@ class ToolBuiltinProviderListToolsApi(Resource): @login_required @account_initialization_required def get(self, provider): - user_id = current_user.id - tenant_id = current_user.current_tenant_id + user = current_user + + user_id = user.id + tenant_id = user.current_tenant_id return jsonable_encoder( BuiltinToolManageService.list_builtin_tool_provider_tools( @@ -63,11 +67,13 @@ class ToolBuiltinProviderDeleteApi(Resource): @login_required @account_initialization_required def post(self, provider): - if not current_user.is_admin_or_owner: + user = current_user + + if not user.is_admin_or_owner: raise Forbidden() - user_id = current_user.id - tenant_id = current_user.current_tenant_id + user_id = user.id + tenant_id = user.current_tenant_id return BuiltinToolManageService.delete_builtin_tool_provider( user_id, @@ -81,11 +87,13 @@ class ToolBuiltinProviderUpdateApi(Resource): @login_required @account_initialization_required def post(self, provider): - if not current_user.is_admin_or_owner: + user = current_user + + if not user.is_admin_or_owner: raise Forbidden() - user_id = current_user.id - tenant_id = current_user.current_tenant_id + user_id = user.id + tenant_id = user.current_tenant_id parser = reqparse.RequestParser() parser.add_argument("credentials", type=dict, required=True, nullable=False, location="json") @@ -105,8 +113,10 @@ class ToolBuiltinProviderGetCredentialsApi(Resource): @login_required @account_initialization_required def get(self, provider): - user_id = current_user.id - tenant_id = current_user.current_tenant_id + user = current_user + + user_id = user.id + tenant_id = user.current_tenant_id return BuiltinToolManageService.get_builtin_tool_provider_credentials( user_id, @@ -118,7 +128,10 @@ def get(self, provider): class ToolBuiltinProviderIconApi(Resource): @setup_required def get(self, provider): - tenant_id = current_user.current_tenant_id + user = current_user + + user_id = user.id + tenant_id = user.current_tenant_id icon_bytes, mimetype = BuiltinToolManageService.get_builtin_tool_provider_icon(provider, tenant_id) icon_cache_max_age = dify_config.TOOL_ICON_CACHE_MAX_AGE @@ -130,11 +143,13 @@ class ToolApiProviderAddApi(Resource): @login_required @account_initialization_required def post(self): - if not current_user.is_admin_or_owner: + user = current_user + + if not user.is_admin_or_owner: raise Forbidden() - user_id = current_user.id - tenant_id = current_user.current_tenant_id + user_id = user.id + tenant_id = user.current_tenant_id parser = reqparse.RequestParser() parser.add_argument("credentials", type=dict, required=True, nullable=False, location="json") @@ -167,6 +182,11 @@ class ToolApiProviderGetRemoteSchemaApi(Resource): @login_required @account_initialization_required def get(self): + user = current_user + + user_id = user.id + tenant_id = user.current_tenant_id + parser = reqparse.RequestParser() parser.add_argument("url", type=str, required=True, nullable=False, location="args") @@ -174,8 +194,8 @@ def get(self): args = parser.parse_args() return ApiToolManageService.get_api_tool_provider_remote_schema( - current_user.id, - current_user.current_tenant_id, + user_id, + tenant_id, args["url"], ) @@ -185,8 +205,10 @@ class ToolApiProviderListToolsApi(Resource): @login_required @account_initialization_required def get(self): - user_id = current_user.id - tenant_id = current_user.current_tenant_id + user = current_user + + user_id = user.id + tenant_id = user.current_tenant_id parser = reqparse.RequestParser() @@ -208,11 +230,13 @@ class ToolApiProviderUpdateApi(Resource): @login_required @account_initialization_required def post(self): - if not current_user.is_admin_or_owner: + user = current_user + + if not user.is_admin_or_owner: raise Forbidden() - user_id = current_user.id - tenant_id = current_user.current_tenant_id + user_id = user.id + tenant_id = user.current_tenant_id parser = reqparse.RequestParser() parser.add_argument("credentials", type=dict, required=True, nullable=False, location="json") @@ -247,11 +271,13 @@ class ToolApiProviderDeleteApi(Resource): @login_required @account_initialization_required def post(self): - if not current_user.is_admin_or_owner: + user = current_user + + if not user.is_admin_or_owner: raise Forbidden() - user_id = current_user.id - tenant_id = current_user.current_tenant_id + user_id = user.id + tenant_id = user.current_tenant_id parser = reqparse.RequestParser() @@ -271,8 +297,10 @@ class ToolApiProviderGetApi(Resource): @login_required @account_initialization_required def get(self): - user_id = current_user.id - tenant_id = current_user.current_tenant_id + user = current_user + + user_id = user.id + tenant_id = user.current_tenant_id parser = reqparse.RequestParser() @@ -292,7 +320,11 @@ class ToolBuiltinProviderCredentialsSchemaApi(Resource): @login_required @account_initialization_required def get(self, provider): - tenant_id = current_user.current_tenant_id + user = current_user + + user_id = user.id + tenant_id = user.current_tenant_id + return BuiltinToolManageService.list_builtin_provider_credentials_schema(provider, tenant_id) @@ -344,11 +376,13 @@ class ToolWorkflowProviderCreateApi(Resource): @login_required @account_initialization_required def post(self): - if not current_user.is_admin_or_owner: + user = current_user + + if not user.is_admin_or_owner: raise Forbidden() - user_id = current_user.id - tenant_id = current_user.current_tenant_id + user_id = user.id + tenant_id = user.current_tenant_id reqparser = reqparse.RequestParser() reqparser.add_argument("workflow_app_id", type=uuid_value, required=True, nullable=False, location="json") @@ -381,11 +415,13 @@ class ToolWorkflowProviderUpdateApi(Resource): @login_required @account_initialization_required def post(self): - if not current_user.is_admin_or_owner: + user = current_user + + if not user.is_admin_or_owner: raise Forbidden() - user_id = current_user.id - tenant_id = current_user.current_tenant_id + user_id = user.id + tenant_id = user.current_tenant_id reqparser = reqparse.RequestParser() reqparser.add_argument("workflow_tool_id", type=uuid_value, required=True, nullable=False, location="json") @@ -421,11 +457,13 @@ class ToolWorkflowProviderDeleteApi(Resource): @login_required @account_initialization_required def post(self): - if not current_user.is_admin_or_owner: + user = current_user + + if not user.is_admin_or_owner: raise Forbidden() - user_id = current_user.id - tenant_id = current_user.current_tenant_id + user_id = user.id + tenant_id = user.current_tenant_id reqparser = reqparse.RequestParser() reqparser.add_argument("workflow_tool_id", type=uuid_value, required=True, nullable=False, location="json") @@ -444,8 +482,10 @@ class ToolWorkflowProviderGetApi(Resource): @login_required @account_initialization_required def get(self): - user_id = current_user.id - tenant_id = current_user.current_tenant_id + user = current_user + + user_id = user.id + tenant_id = user.current_tenant_id parser = reqparse.RequestParser() parser.add_argument("workflow_tool_id", type=uuid_value, required=False, nullable=True, location="args") @@ -476,8 +516,10 @@ class ToolWorkflowProviderListToolApi(Resource): @login_required @account_initialization_required def get(self): - user_id = current_user.id - tenant_id = current_user.current_tenant_id + user = current_user + + user_id = user.id + tenant_id = user.current_tenant_id parser = reqparse.RequestParser() parser.add_argument("workflow_tool_id", type=uuid_value, required=True, nullable=False, location="args") @@ -498,8 +540,10 @@ class ToolBuiltinListApi(Resource): @login_required @account_initialization_required def get(self): - user_id = current_user.id - tenant_id = current_user.current_tenant_id + user = current_user + + user_id = user.id + tenant_id = user.current_tenant_id return jsonable_encoder( [ @@ -517,8 +561,10 @@ class ToolApiListApi(Resource): @login_required @account_initialization_required def get(self): - user_id = current_user.id - tenant_id = current_user.current_tenant_id + user = current_user + + user_id = user.id + tenant_id = user.current_tenant_id return jsonable_encoder( [ @@ -536,8 +582,10 @@ class ToolWorkflowListApi(Resource): @login_required @account_initialization_required def get(self): - user_id = current_user.id - tenant_id = current_user.current_tenant_id + user = current_user + + user_id = user.id + tenant_id = user.current_tenant_id return jsonable_encoder( [ diff --git a/api/services/plugin/endpoint_service.py b/api/services/plugin/endpoint_service.py new file mode 100644 index 00000000000000..e9b395efcc84b7 --- /dev/null +++ b/api/services/plugin/endpoint_service.py @@ -0,0 +1,2 @@ +class EndpointService: + pass \ No newline at end of file From ea497f828f0df394a91c98bbbd761944c2340d30 Mon Sep 17 00:00:00 2001 From: Yeuoly Date: Thu, 26 Sep 2024 12:49:00 +0800 Subject: [PATCH 051/325] feat: endpoint management --- api/controllers/console/workspace/endpoint.py | 86 +++++++++++++-- api/core/plugin/entities/base.py | 9 ++ api/core/plugin/entities/endpoint.py | 11 ++ api/core/plugin/entities/plugin.py | 10 ++ api/core/plugin/manager/endpoint.py | 101 +++++++++++++++++- api/core/plugin/manager/plugin.py | 9 ++ api/services/plugin/endpoint_service.py | 52 ++++++++- 7 files changed, 268 insertions(+), 10 deletions(-) create mode 100644 api/core/plugin/entities/base.py create mode 100644 api/core/plugin/entities/endpoint.py create mode 100644 api/core/plugin/entities/plugin.py diff --git a/api/controllers/console/workspace/endpoint.py b/api/controllers/console/workspace/endpoint.py index 668e4ea9534cd1..44e35f32bd2b91 100644 --- a/api/controllers/console/workspace/endpoint.py +++ b/api/controllers/console/workspace/endpoint.py @@ -1,9 +1,12 @@ -from flask_restful import Resource +from flask_login import current_user +from flask_restful import Resource, reqparse +from werkzeug.exceptions import Forbidden from controllers.console import api from controllers.console.setup import setup_required from controllers.console.wraps import account_initialization_required from libs.login import login_required +from services.plugin.endpoint_service import EndpointService class EndpointCreateApi(Resource): @@ -11,7 +14,24 @@ class EndpointCreateApi(Resource): @login_required @account_initialization_required def post(self): - pass + user = current_user + if not user.is_admin_or_owner: + raise Forbidden() + + parser = reqparse.RequestParser() + parser.add_argument("plugin_unique_identifier", type=str, required=True) + parser.add_argument("settings", type=dict, required=True) + args = parser.parse_args() + + plugin_unique_identifier = args["plugin_unique_identifier"] + settings = args["settings"] + + return EndpointService.create_endpoint( + tenant_id=user.current_tenant_id, + user_id=user.id, + plugin_unique_identifier=plugin_unique_identifier, + settings=settings, + ) class EndpointListApi(Resource): @@ -19,7 +39,12 @@ class EndpointListApi(Resource): @login_required @account_initialization_required def get(self): - pass + user = current_user + + return EndpointService.list_endpoints( + tenant_id=user.current_tenant_id, + user_id=user.id, + ) class EndpointDeleteApi(Resource): @@ -27,7 +52,17 @@ class EndpointDeleteApi(Resource): @login_required @account_initialization_required def post(self): - pass + user = current_user + + parser = reqparse.RequestParser() + parser.add_argument("endpoint_id", type=str, required=True) + args = parser.parse_args() + + endpoint_id = args["endpoint_id"] + + return EndpointService.delete_endpoint( + tenant_id=user.current_tenant_id, user_id=user.id, endpoint_id=endpoint_id + ) class EndpointUpdateApi(Resource): @@ -35,7 +70,22 @@ class EndpointUpdateApi(Resource): @login_required @account_initialization_required def post(self): - pass + user = current_user + + parser = reqparse.RequestParser() + parser.add_argument("endpoint_id", type=str, required=True) + parser.add_argument("settings", type=dict, required=True) + args = parser.parse_args() + + endpoint_id = args["endpoint_id"] + settings = args["settings"] + + return EndpointService.update_endpoint( + tenant_id=user.current_tenant_id, + user_id=user.id, + endpoint_id=endpoint_id, + settings=settings, + ) class EndpointEnableApi(Resource): @@ -43,7 +93,17 @@ class EndpointEnableApi(Resource): @login_required @account_initialization_required def post(self): - pass + user = current_user + + parser = reqparse.RequestParser() + parser.add_argument("endpoint_id", type=str, required=True) + args = parser.parse_args() + + endpoint_id = args["endpoint_id"] + + return EndpointService.enable_endpoint( + tenant_id=user.current_tenant_id, user_id=user.id, endpoint_id=endpoint_id + ) class EndpointDisableApi(Resource): @@ -51,7 +111,17 @@ class EndpointDisableApi(Resource): @login_required @account_initialization_required def post(self): - pass + user = current_user + + parser = reqparse.RequestParser() + parser.add_argument("endpoint_id", type=str, required=True) + args = parser.parse_args() + + endpoint_id = args["endpoint_id"] + + return EndpointService.disable_endpoint( + tenant_id=user.current_tenant_id, user_id=user.id, endpoint_id=endpoint_id + ) api.add_resource(EndpointCreateApi, "/workspaces/current/endpoints/create") @@ -59,4 +129,4 @@ def post(self): api.add_resource(EndpointDeleteApi, "/workspaces/current/endpoints/delete") api.add_resource(EndpointUpdateApi, "/workspaces/current/endpoints/update") api.add_resource(EndpointEnableApi, "/workspaces/current/endpoints/enable") -api.add_resource(EndpointDisableApi, "/workspaces/current/endpoints/disable") \ No newline at end of file +api.add_resource(EndpointDisableApi, "/workspaces/current/endpoints/disable") diff --git a/api/core/plugin/entities/base.py b/api/core/plugin/entities/base.py new file mode 100644 index 00000000000000..bfec0d43025b93 --- /dev/null +++ b/api/core/plugin/entities/base.py @@ -0,0 +1,9 @@ +from datetime import datetime + +from pydantic import BaseModel + + +class BasePluginEntity(BaseModel): + id: str + created_at: datetime + updated_at: datetime diff --git a/api/core/plugin/entities/endpoint.py b/api/core/plugin/entities/endpoint.py new file mode 100644 index 00000000000000..54d718fd5b2316 --- /dev/null +++ b/api/core/plugin/entities/endpoint.py @@ -0,0 +1,11 @@ +from datetime import datetime + +from core.plugin.entities.base import BasePluginEntity + + +class EndpointEntity(BasePluginEntity): + settings: dict + hook_id: str + tenant_id: str + plugin_id: str + expired_at: datetime diff --git a/api/core/plugin/entities/plugin.py b/api/core/plugin/entities/plugin.py new file mode 100644 index 00000000000000..8098ade025b763 --- /dev/null +++ b/api/core/plugin/entities/plugin.py @@ -0,0 +1,10 @@ +from core.plugin.entities.base import BasePluginEntity + + +class PluginEntity(BasePluginEntity): + name: str + plugin_id: str + plugin_unique_identifier: str + tenant_id: str + endpoints_setups: int + endpoints_active: int diff --git a/api/core/plugin/manager/endpoint.py b/api/core/plugin/manager/endpoint.py index a3f49903fd8595..1c48f8200fe761 100644 --- a/api/core/plugin/manager/endpoint.py +++ b/api/core/plugin/manager/endpoint.py @@ -1,5 +1,104 @@ +from core.plugin.entities.endpoint import EndpointEntity from core.plugin.manager.base import BasePluginManager class PluginEndpointManager(BasePluginManager): - pass \ No newline at end of file + def create_endpoint(self, tenant_id: str, user_id: str, plugin_unique_identifier: str, settings: dict): + """ + Create an endpoint for the given plugin. + + Errors will be raised if any error occurs. + """ + self._request_with_plugin_daemon_response( + "POST", + f"plugin/{tenant_id}/endpoint/setup", + dict, + headers={ + "Content-Type": "application/json", + }, + data={ + "user_id": user_id, + "plugin_unique_identifier": plugin_unique_identifier, + "settings": settings, + }, + ) + + def list_endpoints(self, tenant_id: str, user_id: str): + """ + List all endpoints for the given tenant and user. + """ + return self._request_with_plugin_daemon_response( + "GET", + f"plugin/{tenant_id}/endpoint/list", + list[EndpointEntity], + params={"page": 1, "page_size": 256}, + ) + + def list_plugin_endpoints(self, tenant_id: str, user_id: str, plugin_unique_identifier: str): + """ + List all endpoints for the given tenant, user and plugin. + """ + return self._request_with_plugin_daemon_response( + "GET", + f"plugin/{tenant_id}/endpoint/list/plugin", + list[EndpointEntity], + headers={ + "Content-Type": "application/json", + }, + data={ + "plugin_unique_identifier": plugin_unique_identifier, + }, + ) + + def update_endpoint(self, tenant_id: str, user_id: str, endpoint_id: str, settings: dict): + """ + Update the settings of the given endpoint. + """ + self._request_with_plugin_daemon_response( + "POST", + f"plugin/{tenant_id}/endpoint/update", + dict, + data={ + "endpoint_id": endpoint_id, + "settings": settings, + }, + ) + + def delete_endpoint(self, tenant_id: str, user_id: str, endpoint_id: str): + """ + Delete the given endpoint. + """ + self._request_with_plugin_daemon_response( + "DELETE", + f"plugin/{tenant_id}/endpoint/remove", + dict, + data={ + "endpoint_id": endpoint_id, + }, + ) + + def enable_endpoint(self, tenant_id: str, user_id: str, endpoint_id: str): + """ + Enable the given endpoint. + """ + self._request_with_plugin_daemon_response( + "POST", + f"plugin/{tenant_id}/endpoint/enable", + dict, + data={ + "endpoint_id": endpoint_id, + }, + ) + + def disable_endpoint(self, tenant_id: str, user_id: str, endpoint_id: str): + """ + Disable the given endpoint. + """ + self._request_with_plugin_daemon_response( + "POST", + f"plugin/{tenant_id}/endpoint/disable", + dict, + data={ + "endpoint_id": endpoint_id, + }, + ) diff --git a/api/core/plugin/manager/plugin.py b/api/core/plugin/manager/plugin.py index cabe028ff34058..5a9eb336c5adac 100644 --- a/api/core/plugin/manager/plugin.py +++ b/api/core/plugin/manager/plugin.py @@ -1,5 +1,6 @@ from collections.abc import Generator +from core.plugin.entities.plugin import PluginEntity from core.plugin.entities.plugin_daemon import InstallPluginMessage from core.plugin.manager.base import BasePluginManager @@ -12,6 +13,14 @@ def fetch_plugin_by_identifier(self, tenant_id: str, identifier: str) -> bool: "GET", f"plugin/{tenant_id}/fetch/identifier", bool, params={"plugin_unique_identifier": identifier} ) + def list_plugins(self, tenant_id: str) -> list[PluginEntity]: + return self._request_with_plugin_daemon_response( + "GET", + f"plugin/{tenant_id}/management/list", + list[PluginEntity], + params={"page": 1, "page_size": 256}, + ) + def install_from_pkg(self, tenant_id: str, pkg: bytes) -> Generator[InstallPluginMessage, None, None]: """ Install a plugin from a package. diff --git a/api/services/plugin/endpoint_service.py b/api/services/plugin/endpoint_service.py index e9b395efcc84b7..4b3258ef6e5d52 100644 --- a/api/services/plugin/endpoint_service.py +++ b/api/services/plugin/endpoint_service.py @@ -1,2 +1,52 @@ +from core.plugin.manager.endpoint import PluginEndpointManager + + class EndpointService: - pass \ No newline at end of file + @classmethod + def create_endpoint(cls, tenant_id: str, user_id: str, plugin_unique_identifier: str, settings: dict): + return PluginEndpointManager().create_endpoint( + tenant_id=tenant_id, + user_id=user_id, + plugin_unique_identifier=plugin_unique_identifier, + settings=settings, + ) + + @classmethod + def list_endpoints(cls, tenant_id: str, user_id: str): + return PluginEndpointManager().list_endpoints( + tenant_id=tenant_id, + user_id=user_id, + ) + + @classmethod + def update_endpoint(cls, tenant_id: str, user_id: str, endpoint_id: str, settings: dict): + return PluginEndpointManager().update_endpoint( + tenant_id=tenant_id, + user_id=user_id, + endpoint_id=endpoint_id, + settings=settings, + ) + + @classmethod + def delete_endpoint(cls, tenant_id: str, user_id: str, endpoint_id: str): + return PluginEndpointManager().delete_endpoint( + tenant_id=tenant_id, + user_id=user_id, + endpoint_id=endpoint_id, + ) + + @classmethod + def enable_endpoint(cls, tenant_id: str, user_id: str, endpoint_id: str): + return PluginEndpointManager().enable_endpoint( + tenant_id=tenant_id, + user_id=user_id, + endpoint_id=endpoint_id, + ) + + @classmethod + def disable_endpoint(cls, tenant_id: str, user_id: str, endpoint_id: str): + return PluginEndpointManager().disable_endpoint( + tenant_id=tenant_id, + user_id=user_id, + endpoint_id=endpoint_id, + ) From 1d575524c3862447eed628ffcc49c666b84254fe Mon Sep 17 00:00:00 2001 From: Yeuoly Date: Thu, 26 Sep 2024 14:20:05 +0800 Subject: [PATCH 052/325] fix: missing user id --- api/core/plugin/manager/endpoint.py | 1 + 1 file changed, 1 insertion(+) diff --git a/api/core/plugin/manager/endpoint.py b/api/core/plugin/manager/endpoint.py index 1c48f8200fe761..c4718b72ddd782 100644 --- a/api/core/plugin/manager/endpoint.py +++ b/api/core/plugin/manager/endpoint.py @@ -59,6 +59,7 @@ def update_endpoint(self, tenant_id: str, user_id: str, endpoint_id: str, settin f"plugin/{tenant_id}/endpoint/update", dict, data={ + "user_id": user_id, "endpoint_id": endpoint_id, "settings": settings, }, From 4c28034224dee2b60c39a5634c2252fe2fcf5a92 Mon Sep 17 00:00:00 2001 From: Yeuoly Date: Thu, 26 Sep 2024 14:51:10 +0800 Subject: [PATCH 053/325] refactor: encryption --- api/core/plugin/encrypt/__init__.py | 17 +++++++++++++---- api/core/plugin/entities/endpoint.py | 17 +++++++++++++++++ 2 files changed, 30 insertions(+), 4 deletions(-) diff --git a/api/core/plugin/encrypt/__init__.py b/api/core/plugin/encrypt/__init__.py index 614ce81d20c196..313d161ec93d25 100644 --- a/api/core/plugin/encrypt/__init__.py +++ b/api/core/plugin/encrypt/__init__.py @@ -16,7 +16,16 @@ def invoke_encrypt(cls, tenant: Tenant, payload: RequestInvokeEncrypt) -> Mappin provider_identity=payload.identity, ) - if payload.opt == "encrypt": - return encrypter.encrypt(payload.data) - else: - return encrypter.decrypt(payload.data) \ No newline at end of file + try: + if payload.opt == "encrypt": + return { + "data": encrypter.encrypt(payload.data), + } + else: + return { + "data": encrypter.decrypt(payload.data), + } + except Exception as e: + return { + "error": str(e), + } diff --git a/api/core/plugin/entities/endpoint.py b/api/core/plugin/entities/endpoint.py index 54d718fd5b2316..2faa0e8f5806bb 100644 --- a/api/core/plugin/entities/endpoint.py +++ b/api/core/plugin/entities/endpoint.py @@ -1,11 +1,28 @@ +from collections.abc import Mapping from datetime import datetime +from pydantic import BaseModel, Field + +from core.entities.provider_entities import ProviderConfig from core.plugin.entities.base import BasePluginEntity +class EndpointDeclaration(BaseModel): + """ + declaration of an endpoint + """ + + settings: Mapping[str, ProviderConfig] = Field(default_factory=Mapping) + + class EndpointEntity(BasePluginEntity): + """ + entity of an endpoint + """ + settings: dict hook_id: str tenant_id: str plugin_id: str expired_at: datetime + declaration: EndpointDeclaration = Field(default_factory=EndpointDeclaration) From 0ad9dbea63ac532e9194ee403c51a20218e7c6da Mon Sep 17 00:00:00 2001 From: Yeuoly Date: Thu, 26 Sep 2024 15:38:22 +0800 Subject: [PATCH 054/325] feat: backwards invoke model --- api/controllers/inner_api/plugin/plugin.py | 6 +- api/core/model_manager.py | 4 +- api/core/plugin/backwards_invocation/model.py | 131 +++++++++++++++++- api/core/plugin/entities/request.py | 38 ++++- 4 files changed, 169 insertions(+), 10 deletions(-) diff --git a/api/controllers/inner_api/plugin/plugin.py b/api/controllers/inner_api/plugin/plugin.py index ae3533268927a2..7dde4f01483394 100644 --- a/api/controllers/inner_api/plugin/plugin.py +++ b/api/controllers/inner_api/plugin/plugin.py @@ -47,7 +47,11 @@ class PluginInvokeTextEmbeddingApi(Resource): @get_tenant @plugin_data(payload_type=RequestInvokeTextEmbedding) def post(self, user_id: str, tenant_model: Tenant, payload: RequestInvokeTextEmbedding): - pass + return PluginModelBackwardsInvocation.invoke_text_embedding( + user_id=user_id, + tenant=tenant_model, + payload=payload, + ) class PluginInvokeRerankApi(Resource): diff --git a/api/core/model_manager.py b/api/core/model_manager.py index 482ca2d4b9702e..1a4a03e27751bc 100644 --- a/api/core/model_manager.py +++ b/api/core/model_manager.py @@ -310,7 +310,9 @@ def invoke_speech2text(self, file: IO[bytes], user: Optional[str] = None) -> str user=user, ) - def invoke_tts(self, content_text: str, tenant_id: str, voice: str, user: Optional[str] = None) -> str: + def invoke_tts( + self, content_text: str, tenant_id: str, voice: str, user: Optional[str] = None + ) -> Generator[bytes, None, None]: """ Invoke large language tts model diff --git a/api/core/plugin/backwards_invocation/model.py b/api/core/plugin/backwards_invocation/model.py index 7904fd62340ccf..b3ecced55c0294 100644 --- a/api/core/plugin/backwards_invocation/model.py +++ b/api/core/plugin/backwards_invocation/model.py @@ -1,9 +1,18 @@ +import tempfile +from binascii import hexlify, unhexlify from collections.abc import Generator from core.model_manager import ModelManager from core.model_runtime.entities.llm_entities import LLMResult, LLMResultChunk from core.plugin.backwards_invocation.base import BaseBackwardsInvocation -from core.plugin.entities.request import RequestInvokeLLM +from core.plugin.entities.request import ( + RequestInvokeLLM, + RequestInvokeModeration, + RequestInvokeRerank, + RequestInvokeSpeech2Text, + RequestInvokeTextEmbedding, + RequestInvokeTTS, +) from core.workflow.nodes.llm.llm_node import LLMNode from models.account import Tenant @@ -48,5 +57,121 @@ def handle() -> Generator[LLMResultChunk, None, None]: if response.usage: LLMNode.deduct_llm_quota(tenant_id=tenant.id, model_instance=model_instance, usage=response.usage) return response - - \ No newline at end of file + + @classmethod + def invoke_text_embedding(cls, user_id: str, tenant: Tenant, payload: RequestInvokeTextEmbedding): + """ + invoke text embedding + """ + model_instance = ModelManager().get_model_instance( + tenant_id=tenant.id, + provider=payload.provider, + model_type=payload.model_type, + model=payload.model, + ) + + # invoke model + response = model_instance.invoke_text_embedding( + texts=payload.texts, + user=user_id, + ) + + return response + + @classmethod + def invoke_rerank(cls, user_id: str, tenant: Tenant, payload: RequestInvokeRerank): + """ + invoke rerank + """ + model_instance = ModelManager().get_model_instance( + tenant_id=tenant.id, + provider=payload.provider, + model_type=payload.model_type, + model=payload.model, + ) + + # invoke model + response = model_instance.invoke_rerank( + query=payload.query, + docs=payload.docs, + score_threshold=payload.score_threshold, + top_n=payload.top_n, + user=user_id, + ) + + return response + + @classmethod + def invoke_tts(cls, user_id: str, tenant: Tenant, payload: RequestInvokeTTS): + """ + invoke tts + """ + model_instance = ModelManager().get_model_instance( + tenant_id=tenant.id, + provider=payload.provider, + model_type=payload.model_type, + model=payload.model, + ) + + # invoke model + response = model_instance.invoke_tts( + content_text=payload.content_text, + tenant_id=tenant.id, + voice=payload.voice, + user=user_id, + ) + + def handle() -> Generator[dict, None, None]: + for chunk in response: + yield {"result": hexlify(chunk).decode("utf-8")} + + return handle() + + @classmethod + def invoke_speech2text(cls, user_id: str, tenant: Tenant, payload: RequestInvokeSpeech2Text): + """ + invoke speech2text + """ + model_instance = ModelManager().get_model_instance( + tenant_id=tenant.id, + provider=payload.provider, + model_type=payload.model_type, + model=payload.model, + ) + + # invoke model + with tempfile.NamedTemporaryFile(suffix=".mp3", mode="wb", delete=True) as temp: + temp.write(unhexlify(payload.file)) + temp.flush() + temp.seek(0) + + response = model_instance.invoke_speech2text( + file=temp, + user=user_id, + ) + + return { + "result": response, + } + + @classmethod + def invoke_moderation(cls, user_id: str, tenant: Tenant, payload: RequestInvokeModeration): + """ + invoke moderation + """ + model_instance = ModelManager().get_model_instance( + tenant_id=tenant.id, + provider=payload.provider, + model_type=payload.model_type, + model=payload.model, + ) + + # invoke model + response = model_instance.invoke_moderation( + text=payload.text, + user=user_id, + ) + + return { + "result": response, + } diff --git a/api/core/plugin/entities/request.py b/api/core/plugin/entities/request.py index 00ac53ca72e64c..a27f8751a64485 100644 --- a/api/core/plugin/entities/request.py +++ b/api/core/plugin/entities/request.py @@ -74,35 +74,63 @@ def convert_prompt_messages(cls, v): return v -class RequestInvokeTextEmbedding(BaseModel): +class RequestInvokeTextEmbedding(BaseRequestInvokeModel): """ Request to invoke text embedding """ + model_type: ModelType = ModelType.TEXT_EMBEDDING + texts: list[str] -class RequestInvokeRerank(BaseModel): + +class RequestInvokeRerank(BaseRequestInvokeModel): """ Request to invoke rerank """ + model_type: ModelType = ModelType.RERANK + query: str + docs: list[str] + score_threshold: float + top_n: int + -class RequestInvokeTTS(BaseModel): +class RequestInvokeTTS(BaseRequestInvokeModel): """ Request to invoke TTS """ + model_type: ModelType = ModelType.TTS + content_text: str + voice: str + -class RequestInvokeSpeech2Text(BaseModel): +class RequestInvokeSpeech2Text(BaseRequestInvokeModel): """ Request to invoke speech2text """ + model_type: ModelType = ModelType.SPEECH2TEXT + file: bytes -class RequestInvokeModeration(BaseModel): + @field_validator("file", mode="before") + @classmethod + def convert_file(cls, v): + # hex string to bytes + if isinstance(v, str): + return bytes.fromhex(v) + else: + raise ValueError("file must be a hex string") + + +class RequestInvokeModeration(BaseRequestInvokeModel): """ Request to invoke moderation """ + model_type: ModelType = ModelType.MODERATION + text: str + class RequestInvokeParameterExtractorNode(BaseModel): """ From 507fff0259535988ce9c09405466466da8df2fbd Mon Sep 17 00:00:00 2001 From: Yeuoly Date: Thu, 26 Sep 2024 15:47:16 +0800 Subject: [PATCH 055/325] fix: tts file was deleted before invocation --- api/core/plugin/backwards_invocation/model.py | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/api/core/plugin/backwards_invocation/model.py b/api/core/plugin/backwards_invocation/model.py index b3ecced55c0294..405fcb069ddf8e 100644 --- a/api/core/plugin/backwards_invocation/model.py +++ b/api/core/plugin/backwards_invocation/model.py @@ -145,14 +145,14 @@ def invoke_speech2text(cls, user_id: str, tenant: Tenant, payload: RequestInvoke temp.flush() temp.seek(0) - response = model_instance.invoke_speech2text( - file=temp, - user=user_id, - ) - - return { - "result": response, - } + response = model_instance.invoke_speech2text( + file=temp, + user=user_id, + ) + + return { + "result": response, + } @classmethod def invoke_moderation(cls, user_id: str, tenant: Tenant, payload: RequestInvokeModeration): From 5dcd25a61308b8ffe28cf89581ae257fbc029c09 Mon Sep 17 00:00:00 2001 From: Yeuoly Date: Thu, 26 Sep 2024 17:22:39 +0800 Subject: [PATCH 056/325] fix: missing error message --- api/controllers/inner_api/plugin/plugin.py | 108 ++++++++++++++----- api/core/plugin/backwards_invocation/base.py | 32 ++++-- api/core/plugin/encrypt/__init__.py | 19 +--- 3 files changed, 110 insertions(+), 49 deletions(-) diff --git a/api/controllers/inner_api/plugin/plugin.py b/api/controllers/inner_api/plugin/plugin.py index 7dde4f01483394..f785b2aed610a3 100644 --- a/api/controllers/inner_api/plugin/plugin.py +++ b/api/controllers/inner_api/plugin/plugin.py @@ -7,6 +7,7 @@ from controllers.inner_api.plugin.wraps import get_tenant, plugin_data from controllers.inner_api.wraps import plugin_inner_api_only from core.plugin.backwards_invocation.app import PluginAppBackwardsInvocation +from core.plugin.backwards_invocation.base import BaseBackwardsInvocationResponse from core.plugin.backwards_invocation.model import PluginModelBackwardsInvocation from core.plugin.backwards_invocation.node import PluginNodeBackwardsInvocation from core.plugin.encrypt import PluginEncrypter @@ -47,11 +48,16 @@ class PluginInvokeTextEmbeddingApi(Resource): @get_tenant @plugin_data(payload_type=RequestInvokeTextEmbedding) def post(self, user_id: str, tenant_model: Tenant, payload: RequestInvokeTextEmbedding): - return PluginModelBackwardsInvocation.invoke_text_embedding( - user_id=user_id, - tenant=tenant_model, - payload=payload, - ) + try: + return BaseBackwardsInvocationResponse( + data=PluginModelBackwardsInvocation.invoke_text_embedding( + user_id=user_id, + tenant=tenant_model, + payload=payload, + ) + ).model_dump() + except Exception as e: + return BaseBackwardsInvocationResponse(error=str(e)).model_dump() class PluginInvokeRerankApi(Resource): @@ -60,7 +66,16 @@ class PluginInvokeRerankApi(Resource): @get_tenant @plugin_data(payload_type=RequestInvokeRerank) def post(self, user_id: str, tenant_model: Tenant, payload: RequestInvokeRerank): - pass + try: + return BaseBackwardsInvocationResponse( + data=PluginModelBackwardsInvocation.invoke_rerank( + user_id=user_id, + tenant=tenant_model, + payload=payload, + ) + ).model_dump() + except Exception as e: + return BaseBackwardsInvocationResponse(error=str(e)).model_dump() class PluginInvokeTTSApi(Resource): @@ -69,7 +84,15 @@ class PluginInvokeTTSApi(Resource): @get_tenant @plugin_data(payload_type=RequestInvokeTTS) def post(self, user_id: str, tenant_model: Tenant, payload: RequestInvokeTTS): - pass + def generator(): + response = PluginModelBackwardsInvocation.invoke_tts( + user_id=user_id, + tenant=tenant_model, + payload=payload, + ) + return PluginModelBackwardsInvocation.convert_to_event_stream(response) + + return compact_generate_response(generator()) class PluginInvokeSpeech2TextApi(Resource): @@ -78,7 +101,16 @@ class PluginInvokeSpeech2TextApi(Resource): @get_tenant @plugin_data(payload_type=RequestInvokeSpeech2Text) def post(self, user_id: str, tenant_model: Tenant, payload: RequestInvokeSpeech2Text): - pass + try: + return BaseBackwardsInvocationResponse( + data=PluginModelBackwardsInvocation.invoke_speech2text( + user_id=user_id, + tenant=tenant_model, + payload=payload, + ) + ).model_dump() + except Exception as e: + return BaseBackwardsInvocationResponse(error=str(e)).model_dump() class PluginInvokeModerationApi(Resource): @@ -87,7 +119,16 @@ class PluginInvokeModerationApi(Resource): @get_tenant @plugin_data(payload_type=RequestInvokeModeration) def post(self, user_id: str, tenant_model: Tenant, payload: RequestInvokeModeration): - pass + try: + return BaseBackwardsInvocationResponse( + data=PluginModelBackwardsInvocation.invoke_moderation( + user_id=user_id, + tenant=tenant_model, + payload=payload, + ) + ).model_dump() + except Exception as e: + return BaseBackwardsInvocationResponse(error=str(e)).model_dump() class PluginInvokeToolApi(Resource): @@ -118,14 +159,19 @@ class PluginInvokeParameterExtractorNodeApi(Resource): @get_tenant @plugin_data(payload_type=RequestInvokeParameterExtractorNode) def post(self, user_id: str, tenant_model: Tenant, payload: RequestInvokeParameterExtractorNode): - return PluginNodeBackwardsInvocation.invoke_parameter_extractor( - tenant_id=tenant_model.id, - user_id=user_id, - parameters=payload.parameters, - model_config=payload.model, - instruction=payload.instruction, - query=payload.query, - ) + try: + return BaseBackwardsInvocationResponse( + data=PluginNodeBackwardsInvocation.invoke_parameter_extractor( + tenant_id=tenant_model.id, + user_id=user_id, + parameters=payload.parameters, + model_config=payload.model, + instruction=payload.instruction, + query=payload.query, + ) + ).model_dump() + except Exception as e: + return BaseBackwardsInvocationResponse(error=str(e)).model_dump() class PluginInvokeQuestionClassifierNodeApi(Resource): @@ -134,14 +180,19 @@ class PluginInvokeQuestionClassifierNodeApi(Resource): @get_tenant @plugin_data(payload_type=RequestInvokeQuestionClassifierNode) def post(self, user_id: str, tenant_model: Tenant, payload: RequestInvokeQuestionClassifierNode): - return PluginNodeBackwardsInvocation.invoke_question_classifier( - tenant_id=tenant_model.id, - user_id=user_id, - query=payload.query, - model_config=payload.model, - classes=payload.classes, - instruction=payload.instruction, - ) + try: + return BaseBackwardsInvocationResponse( + data=PluginNodeBackwardsInvocation.invoke_question_classifier( + tenant_id=tenant_model.id, + user_id=user_id, + query=payload.query, + model_config=payload.model, + classes=payload.classes, + instruction=payload.instruction, + ) + ).model_dump() + except Exception as e: + return BaseBackwardsInvocationResponse(error=str(e)).model_dump() class PluginInvokeAppApi(Resource): @@ -173,7 +224,12 @@ def post(self, user_id: str, tenant_model: Tenant, payload: RequestInvokeEncrypt """ encrypt or decrypt data """ - return PluginEncrypter.invoke_encrypt(tenant_model, payload) + try: + return BaseBackwardsInvocationResponse( + data=PluginEncrypter.invoke_encrypt(tenant_model, payload) + ).model_dump() + except Exception as e: + return BaseBackwardsInvocationResponse(error=str(e)).model_dump() api.add_resource(PluginInvokeLLMApi, "/invoke/llm") diff --git a/api/core/plugin/backwards_invocation/base.py b/api/core/plugin/backwards_invocation/base.py index 7b699b4d6784f5..2ec71fdc5b4921 100644 --- a/api/core/plugin/backwards_invocation/base.py +++ b/api/core/plugin/backwards_invocation/base.py @@ -1,5 +1,6 @@ import json from collections.abc import Generator +from typing import Generic, Optional, TypeVar from pydantic import BaseModel @@ -8,15 +9,28 @@ class BaseBackwardsInvocation: @classmethod def convert_to_event_stream(cls, response: Generator[BaseModel | dict | str, None, None] | BaseModel | dict): if isinstance(response, Generator): - for chunk in response: - if isinstance(chunk, BaseModel): - yield chunk.model_dump_json().encode() + b'\n\n' - elif isinstance(chunk, str): - yield f"event: {chunk}\n\n".encode() - else: - yield json.dumps(chunk).encode() + b'\n\n' + try: + for chunk in response: + if isinstance(chunk, BaseModel): + yield BaseBackwardsInvocationResponse(data=chunk).model_dump_json().encode() + b"\n\n" + + elif isinstance(chunk, str): + yield f"event: {chunk}\n\n".encode() + else: + yield json.dumps(chunk).encode() + b"\n\n" + except Exception as e: + error_message = BaseBackwardsInvocationResponse(error=str(e)).model_dump_json() + yield f"{error_message}\n\n".encode() else: if isinstance(response, BaseModel): - yield response.model_dump_json().encode() + b'\n\n' + yield response.model_dump_json().encode() + b"\n\n" else: - yield json.dumps(response).encode() + b'\n\n' + yield json.dumps(response).encode() + b"\n\n" + + +T = TypeVar("T", bound=BaseModel | dict | str | bool | int) + + +class BaseBackwardsInvocationResponse(BaseModel, Generic[T]): + data: Optional[T] = None + error: str = "" diff --git a/api/core/plugin/encrypt/__init__.py b/api/core/plugin/encrypt/__init__.py index 313d161ec93d25..95c416d28c70e0 100644 --- a/api/core/plugin/encrypt/__init__.py +++ b/api/core/plugin/encrypt/__init__.py @@ -8,7 +8,7 @@ class PluginEncrypter: @classmethod - def invoke_encrypt(cls, tenant: Tenant, payload: RequestInvokeEncrypt) -> Mapping[str, Any]: + def invoke_encrypt(cls, tenant: Tenant, payload: RequestInvokeEncrypt) -> dict: encrypter = ProviderConfigEncrypter( tenant_id=tenant.id, config=payload.data, @@ -16,16 +16,7 @@ def invoke_encrypt(cls, tenant: Tenant, payload: RequestInvokeEncrypt) -> Mappin provider_identity=payload.identity, ) - try: - if payload.opt == "encrypt": - return { - "data": encrypter.encrypt(payload.data), - } - else: - return { - "data": encrypter.decrypt(payload.data), - } - except Exception as e: - return { - "error": str(e), - } + if payload.opt == "encrypt": + return encrypter.encrypt(payload.data) + else: + return encrypter.decrypt(payload.data) From 1837692a66418eff52f184b980b8cb37e0b1a71e Mon Sep 17 00:00:00 2001 From: Yeuoly Date: Thu, 26 Sep 2024 17:40:27 +0800 Subject: [PATCH 057/325] fix: sse error message --- api/core/plugin/backwards_invocation/base.py | 7 ++----- 1 file changed, 2 insertions(+), 5 deletions(-) diff --git a/api/core/plugin/backwards_invocation/base.py b/api/core/plugin/backwards_invocation/base.py index 2ec71fdc5b4921..1ce126b893191b 100644 --- a/api/core/plugin/backwards_invocation/base.py +++ b/api/core/plugin/backwards_invocation/base.py @@ -11,13 +11,10 @@ def convert_to_event_stream(cls, response: Generator[BaseModel | dict | str, Non if isinstance(response, Generator): try: for chunk in response: - if isinstance(chunk, BaseModel): + if isinstance(chunk, BaseModel | dict): yield BaseBackwardsInvocationResponse(data=chunk).model_dump_json().encode() + b"\n\n" - elif isinstance(chunk, str): yield f"event: {chunk}\n\n".encode() - else: - yield json.dumps(chunk).encode() + b"\n\n" except Exception as e: error_message = BaseBackwardsInvocationResponse(error=str(e)).model_dump_json() yield f"{error_message}\n\n".encode() @@ -28,7 +25,7 @@ def convert_to_event_stream(cls, response: Generator[BaseModel | dict | str, Non yield json.dumps(response).encode() + b"\n\n" -T = TypeVar("T", bound=BaseModel | dict | str | bool | int) +T = TypeVar("T", bound=dict | str | bool | int | BaseModel) class BaseBackwardsInvocationResponse(BaseModel, Generic[T]): From 2da32e49d0de9ca418ec685838d9398baacb630e Mon Sep 17 00:00:00 2001 From: Yeuoly Date: Thu, 26 Sep 2024 17:51:13 +0800 Subject: [PATCH 058/325] fix: tests --- api/core/plugin/encrypt/__init__.py | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/api/core/plugin/encrypt/__init__.py b/api/core/plugin/encrypt/__init__.py index 95c416d28c70e0..285e97b45fcf70 100644 --- a/api/core/plugin/encrypt/__init__.py +++ b/api/core/plugin/encrypt/__init__.py @@ -17,6 +17,10 @@ def invoke_encrypt(cls, tenant: Tenant, payload: RequestInvokeEncrypt) -> dict: ) if payload.opt == "encrypt": - return encrypter.encrypt(payload.data) + return { + "data": encrypter.encrypt(payload.data), + } else: - return encrypter.decrypt(payload.data) + return { + "data": encrypter.decrypt(payload.data), + } From c3359a92915ddca90e94edd08d1ec45971452a04 Mon Sep 17 00:00:00 2001 From: Yeuoly Date: Fri, 27 Sep 2024 21:48:48 +0800 Subject: [PATCH 059/325] refactor: using plugin id to dispatch request instead --- api/core/plugin/encrypt/__init__.py | 8 +++++++- api/core/plugin/entities/request.py | 2 +- api/core/plugin/manager/tool.py | 16 +++++++--------- api/core/tools/plugin_tool/provider.py | 12 ++++++------ api/core/tools/plugin_tool/tool.py | 10 +++++----- api/core/tools/tool_manager.py | 12 ++++-------- api/services/app_service.py | 5 ++++- 7 files changed, 34 insertions(+), 31 deletions(-) diff --git a/api/core/plugin/encrypt/__init__.py b/api/core/plugin/encrypt/__init__.py index 285e97b45fcf70..6303e2ade10ec0 100644 --- a/api/core/plugin/encrypt/__init__.py +++ b/api/core/plugin/encrypt/__init__.py @@ -20,7 +20,13 @@ def invoke_encrypt(cls, tenant: Tenant, payload: RequestInvokeEncrypt) -> dict: return { "data": encrypter.encrypt(payload.data), } - else: + elif payload.opt == "decrypt": return { "data": encrypter.decrypt(payload.data), } + elif payload.opt == "clear": + return { + "data": encrypter.delete_tool_credentials_cache(), + } + else: + raise ValueError(f"Invalid opt: {payload.opt}") diff --git a/api/core/plugin/entities/request.py b/api/core/plugin/entities/request.py index a27f8751a64485..5fe9ea1ddaf878 100644 --- a/api/core/plugin/entities/request.py +++ b/api/core/plugin/entities/request.py @@ -173,7 +173,7 @@ class RequestInvokeEncrypt(BaseModel): Request to encryption """ - opt: Literal["encrypt", "decrypt"] + opt: Literal["encrypt", "decrypt", "clear"] namespace: Literal["endpoint"] identity: str data: dict = Field(default_factory=dict) diff --git a/api/core/plugin/manager/tool.py b/api/core/plugin/manager/tool.py index a26fa0dd2673cd..4f5fa1fa5ccfba 100644 --- a/api/core/plugin/manager/tool.py +++ b/api/core/plugin/manager/tool.py @@ -34,7 +34,7 @@ def invoke( self, tenant_id: str, user_id: str, - plugin_unique_identifier: str, + plugin_id: str, tool_provider: str, tool_name: str, credentials: dict[str, Any], @@ -45,7 +45,6 @@ def invoke( f"plugin/{tenant_id}/dispatch/tool/invoke", ToolInvokeMessage, data={ - "plugin_unique_identifier": plugin_unique_identifier, "user_id": user_id, "data": { "provider": tool_provider, @@ -55,14 +54,14 @@ def invoke( }, }, headers={ - "X-Plugin-Identifier": plugin_unique_identifier, + "X-Plugin-ID": plugin_id, "Content-Type": "application/json", - } + }, ) return response def validate_provider_credentials( - self, tenant_id: str, user_id: str, plugin_unique_identifier: str, provider: str, credentials: dict[str, Any] + self, tenant_id: str, user_id: str, plugin_id: str, provider: str, credentials: dict[str, Any] ) -> bool: """ validate the credentials of the provider @@ -72,7 +71,6 @@ def validate_provider_credentials( f"plugin/{tenant_id}/dispatch/tool/validate_credentials", PluginBasicBooleanResponse, data={ - "plugin_unique_identifier": plugin_unique_identifier, "user_id": user_id, "data": { "provider": provider, @@ -80,12 +78,12 @@ def validate_provider_credentials( }, }, headers={ - "X-Plugin-Identifier": plugin_unique_identifier, + "X-Plugin-ID": plugin_id, "Content-Type": "application/json", - } + }, ) for resp in response: return resp.result - + return False diff --git a/api/core/tools/plugin_tool/provider.py b/api/core/tools/plugin_tool/provider.py index a52e7c967f4be0..4982e7405662d2 100644 --- a/api/core/tools/plugin_tool/provider.py +++ b/api/core/tools/plugin_tool/provider.py @@ -11,12 +11,12 @@ class PluginToolProviderController(BuiltinToolProviderController): entity: ToolProviderEntityWithPlugin tenant_id: str - plugin_unique_identifier: str + plugin_id: str - def __init__(self, entity: ToolProviderEntityWithPlugin, tenant_id: str, plugin_unique_identifier: str) -> None: + def __init__(self, entity: ToolProviderEntityWithPlugin, tenant_id: str, plugin_id: str) -> None: self.entity = entity self.tenant_id = tenant_id - self.plugin_unique_identifier = plugin_unique_identifier + self.plugin_id = plugin_id @property def provider_type(self) -> ToolProviderType: @@ -35,7 +35,7 @@ def _validate_credentials(self, user_id: str, credentials: dict[str, Any]) -> No if not manager.validate_provider_credentials( tenant_id=self.tenant_id, user_id=user_id, - plugin_unique_identifier=self.plugin_unique_identifier, + plugin_id=self.plugin_id, provider=self.entity.identity.name, credentials=credentials, ): @@ -54,7 +54,7 @@ def get_tool(self, tool_name: str) -> PluginTool: entity=tool_entity, runtime=ToolRuntime(tenant_id=self.tenant_id), tenant_id=self.tenant_id, - plugin_unique_identifier=self.plugin_unique_identifier, + plugin_id=self.plugin_id, ) def get_tools(self) -> list[PluginTool]: @@ -66,7 +66,7 @@ def get_tools(self) -> list[PluginTool]: entity=tool_entity, runtime=ToolRuntime(tenant_id=self.tenant_id), tenant_id=self.tenant_id, - plugin_unique_identifier=self.plugin_unique_identifier, + plugin_id=self.plugin_id, ) for tool_entity in self.entity.tools ] diff --git a/api/core/tools/plugin_tool/tool.py b/api/core/tools/plugin_tool/tool.py index dd47f11ba27a2e..7a4f147cd04085 100644 --- a/api/core/tools/plugin_tool/tool.py +++ b/api/core/tools/plugin_tool/tool.py @@ -9,12 +9,12 @@ class PluginTool(Tool): tenant_id: str - plugin_unique_identifier: str + plugin_id: str - def __init__(self, entity: ToolEntity, runtime: ToolRuntime, tenant_id: str, plugin_unique_identifier: str) -> None: + def __init__(self, entity: ToolEntity, runtime: ToolRuntime, tenant_id: str, plugin_id: str) -> None: super().__init__(entity, runtime) self.tenant_id = tenant_id - self.plugin_unique_identifier = plugin_unique_identifier + self.plugin_id = plugin_id @property def tool_provider_type(self) -> ToolProviderType: @@ -25,7 +25,7 @@ def _invoke(self, user_id: str, tool_parameters: dict[str, Any]) -> Generator[To return manager.invoke( tenant_id=self.tenant_id, user_id=user_id, - plugin_unique_identifier=self.plugin_unique_identifier, + plugin_id=self.plugin_id, tool_provider=self.entity.identity.provider, tool_name=self.entity.identity.name, credentials=self.runtime.credentials, @@ -37,5 +37,5 @@ def fork_tool_runtime(self, runtime: ToolRuntime) -> "PluginTool": entity=self.entity, runtime=runtime, tenant_id=self.tenant_id, - plugin_unique_identifier=self.plugin_unique_identifier, + plugin_id=self.plugin_id, ) diff --git a/api/core/tools/tool_manager.py b/api/core/tools/tool_manager.py index a4b834e8d88af6..225cd0c251f596 100644 --- a/api/core/tools/tool_manager.py +++ b/api/core/tools/tool_manager.py @@ -85,7 +85,7 @@ def get_plugin_provider(cls, provider: str, tenant_id: str) -> PluginToolProvide return PluginToolProviderController( entity=provider_entity.declaration, tenant_id=tenant_id, - plugin_unique_identifier=provider_entity.plugin_unique_identifier, + plugin_id=provider_entity.plugin_id, ) @classmethod @@ -402,7 +402,7 @@ def list_plugin_providers(cls, tenant_id: str) -> list[PluginToolProviderControl PluginToolProviderController( entity=provider.declaration, tenant_id=tenant_id, - plugin_unique_identifier=provider.plugin_unique_identifier, + plugin_id=provider.plugin_id, ) for provider in provider_entities ] @@ -527,13 +527,9 @@ def list_providers_from_api( ) if isinstance(provider, PluginToolProviderController): - result_providers[f"plugin_provider.{user_provider.name}.{provider.plugin_unique_identifier}"] = ( - user_provider - ) + result_providers[f"plugin_provider.{user_provider.name}.{provider.plugin_id}"] = user_provider else: - result_providers[f"builtin_provider.{user_provider.name}"] = ( - user_provider - ) + result_providers[f"builtin_provider.{user_provider.name}"] = user_provider # get db api providers diff --git a/api/services/app_service.py b/api/services/app_service.py index ac45d623e84bc9..cd20a13d5a5a71 100644 --- a/api/services/app_service.py +++ b/api/services/app_service.py @@ -155,7 +155,10 @@ def get_app(self, app: App) -> App: """ # get original app model config if app.mode == AppMode.AGENT_CHAT.value or app.is_agent: - model_config: AppModelConfig = app.app_model_config + model_config: AppModelConfig | None = app.app_model_config + if not model_config: + return app + agent_mode = model_config.agent_mode_dict # decrypt agent tool parameters if it's secret-input for tool in agent_mode.get("tools") or []: From 1c3213184e1b3784a25c520d4b23cb6c7f28c36e Mon Sep 17 00:00:00 2001 From: takatost Date: Sun, 29 Sep 2024 00:13:53 +0800 Subject: [PATCH 060/325] feat: move model request to plugin daemon --- .../console/workspace/model_providers.py | 2 +- .../model_config/converter.py | 13 +- .../easy_ui_based_app/model_config/manager.py | 3 +- api/core/entities/provider_configuration.py | 192 +- api/core/helper/moderation.py | 3 + .../entities/provider_entities.py | 6 +- .../model_providers/__base/ai_model.py | 35 +- .../__base/large_language_model.py | 469 +- .../__base/text_embedding_model.py | 14 +- .../model_runtime/model_providers/__init__.py | 3 - .../model_providers/anthropic/__init__.py | 0 .../anthropic/_assets/icon_l_en.svg | 78 - .../anthropic/_assets/icon_s_en.svg | 4 - .../model_providers/anthropic/anthropic.py | 28 - .../model_providers/anthropic/anthropic.yaml | 39 - .../model_providers/anthropic/llm/__init__.py | 0 .../anthropic/llm/_position.yaml | 8 - .../anthropic/llm/claude-2.1.yaml | 36 - .../anthropic/llm/claude-2.yaml | 37 - .../llm/claude-3-5-sonnet-20240620.yaml | 39 - .../llm/claude-3-haiku-20240307.yaml | 39 - .../anthropic/llm/claude-3-opus-20240229.yaml | 39 - .../llm/claude-3-sonnet-20240229.yaml | 39 - .../anthropic/llm/claude-instant-1.2.yaml | 36 - .../anthropic/llm/claude-instant-1.yaml | 36 - .../model_providers/anthropic/llm/llm.py | 624 - .../azure_ai_studio/__init__.py | 0 .../azure_ai_studio/_assets/icon_l_en.png | Bin 21236 -> 0 bytes .../azure_ai_studio/_assets/icon_s_en.png | Bin 10541 -> 0 bytes .../azure_ai_studio/azure_ai_studio.py | 17 - .../azure_ai_studio/azure_ai_studio.yaml | 65 - .../azure_ai_studio/llm/__init__.py | 0 .../azure_ai_studio/llm/llm.py | 334 - .../azure_ai_studio/rerank/__init__.py | 0 .../azure_ai_studio/rerank/rerank.py | 164 - .../model_providers/azure_openai/__init__.py | 0 .../azure_openai/_assets/icon_l_en.png | Bin 5007 -> 0 bytes .../azure_openai/_assets/icon_s_en.svg | 8 - .../model_providers/azure_openai/_common.py | 42 - .../model_providers/azure_openai/_constant.py | 1262 - .../azure_openai/azure_openai.py | 10 - .../azure_openai/azure_openai.yaml | 227 - .../azure_openai/llm/__init__.py | 0 .../model_providers/azure_openai/llm/llm.py | 665 - .../azure_openai/speech2text/__init__.py | 0 .../azure_openai/speech2text/speech2text.py | 79 - .../azure_openai/text_embedding/__init__.py | 0 .../azure_openai/tts/__init__.py | 0 .../model_providers/azure_openai/tts/tts.py | 128 - .../model_providers/baichuan/__init__.py | 0 .../baichuan/_assets/icon_l_en.svg | 19 - .../baichuan/_assets/icon_s_en.svg | 11 - .../model_providers/baichuan/baichuan.py | 28 - .../model_providers/baichuan/baichuan.yaml | 29 - .../model_providers/baichuan/llm/__init__.py | 0 .../baichuan/llm/baichuan2-53b.yaml | 46 - .../baichuan/llm/baichuan2-turbo-192k.yaml | 46 - .../baichuan/llm/baichuan2-turbo.yaml | 41 - .../baichuan/llm/baichuan3-turbo-128k.yaml | 53 - .../baichuan/llm/baichuan3-turbo.yaml | 53 - .../baichuan/llm/baichuan4.yaml | 53 - .../baichuan/llm/baichuan_tokenizer.py | 21 - .../baichuan/llm/baichuan_turbo.py | 144 - .../baichuan/llm/baichuan_turbo_errors.py | 22 - .../model_providers/baichuan/llm/llm.py | 296 - .../baichuan/text_embedding/__init__.py | 0 .../baichuan-text-embedding.yaml | 5 - .../model_providers/bedrock/__init__.py | 0 .../bedrock/_assets/icon_l_en.svg | 14 - .../bedrock/_assets/icon_s_en.svg | 15 - .../model_providers/bedrock/bedrock.py | 29 - .../model_providers/bedrock/bedrock.yaml | 89 - .../model_providers/bedrock/llm/__init__.py | 0 .../bedrock/llm/_position.yaml | 24 - .../bedrock/llm/ai21.j2-mid-v1.yaml | 47 - .../bedrock/llm/ai21.j2-ultra-v1.yaml | 47 - .../llm/amazon.titan-text-express-v1.yaml | 23 - .../llm/amazon.titan-text-lite-v1.yaml | 23 - .../llm/anthropic.claude-3-haiku-v1.yaml | 61 - .../llm/anthropic.claude-3-opus-v1.yaml | 61 - .../llm/anthropic.claude-3-sonnet-v1.5.yaml | 60 - .../llm/anthropic.claude-3-sonnet-v1.yaml | 60 - .../llm/anthropic.claude-instant-v1.yaml | 52 - .../bedrock/llm/anthropic.claude-v1.yaml | 53 - .../bedrock/llm/anthropic.claude-v2.1.yaml | 54 - .../bedrock/llm/anthropic.claude-v2.yaml | 54 - .../llm/cohere.command-light-text-v14.yaml | 35 - .../llm/cohere.command-r-plus-v1.0.yaml | 44 - .../bedrock/llm/cohere.command-r-v1.0.yaml | 43 - .../bedrock/llm/cohere.command-text-v14.yaml | 32 - .../llm/eu.anthropic.claude-3-haiku-v1.yaml | 59 - .../eu.anthropic.claude-3-sonnet-v1.5.yaml | 58 - .../llm/eu.anthropic.claude-3-sonnet-v1.yaml | 58 - .../model_providers/bedrock/llm/llm.py | 903 - .../bedrock/llm/meta.llama2-13b-chat-v1.yaml | 23 - .../bedrock/llm/meta.llama2-70b-chat-v1.yaml | 23 - .../llm/meta.llama3-1-405b-instruct-v1.0.yaml | 25 - .../llm/meta.llama3-1-70b-instruct-v1.0.yaml | 25 - .../llm/meta.llama3-1-8b-instruct-v1.0.yaml | 25 - .../llm/meta.llama3-70b-instruct-v1.yaml | 23 - .../llm/meta.llama3-8b-instruct-v1.yaml | 23 - .../llm/mistral.mistral-7b-instruct-v0.2.yaml | 39 - .../llm/mistral.mistral-large-2402-v1.0.yaml | 30 - .../llm/mistral.mistral-large-2407-v1.0.yaml | 29 - .../llm/mistral.mistral-small-2402-v1.0.yaml | 29 - .../mistral.mixtral-8x7b-instruct-v0.1.yaml | 39 - .../llm/us.anthropic.claude-3-haiku-v1.yaml | 59 - .../llm/us.anthropic.claude-3-opus-v1.yaml | 59 - .../us.anthropic.claude-3-sonnet-v1.5.yaml | 58 - .../llm/us.anthropic.claude-3-sonnet-v1.yaml | 58 - .../bedrock/text_embedding/__init__.py | 0 .../bedrock/text_embedding/_position.yaml | 4 - .../amazon.titan-embed-text-v1.yaml | 8 - .../amazon.titan-embed-text-v2.yaml | 8 - .../cohere.embed-english-v3.yaml | 8 - .../cohere.embed-multilingual-v3.yaml | 8 - .../model_providers/chatglm/__init__.py | 0 .../chatglm/_assets/icon_l_en.svg | 1 - .../chatglm/_assets/icon_s_en.svg | 9 - .../model_providers/chatglm/chatglm.py | 28 - .../model_providers/chatglm/chatglm.yaml | 28 - .../model_providers/chatglm/llm/__init__.py | 0 .../chatglm/llm/chatglm2-6b-32k.yaml | 21 - .../chatglm/llm/chatglm2-6b.yaml | 21 - .../chatglm/llm/chatglm3-6b-32k.yaml | 22 - .../chatglm/llm/chatglm3-6b.yaml | 22 - .../model_providers/chatglm/llm/llm.py | 507 - .../model_providers/cohere/__init__.py | 0 .../cohere/_assets/icon_l_en.svg | 11 - .../cohere/_assets/icon_s_en.svg | 16 - .../model_providers/cohere/cohere.py | 28 - .../model_providers/cohere/cohere.yaml | 90 - .../model_providers/cohere/llm/__init__.py | 0 .../model_providers/cohere/llm/_position.yaml | 10 - .../cohere/llm/command-chat.yaml | 62 - .../cohere/llm/command-light-chat.yaml | 62 - .../llm/command-light-nightly-chat.yaml | 62 - .../cohere/llm/command-light-nightly.yaml | 44 - .../cohere/llm/command-light.yaml | 44 - .../cohere/llm/command-nightly-chat.yaml | 62 - .../cohere/llm/command-nightly.yaml | 44 - .../cohere/llm/command-r-plus.yaml | 45 - .../model_providers/cohere/llm/command-r.yaml | 45 - .../model_providers/cohere/llm/command.yaml | 44 - .../model_providers/cohere/llm/llm.py | 733 - .../model_providers/cohere/rerank/__init__.py | 0 .../cohere/rerank/_position.yaml | 4 - .../cohere/rerank/rerank-english-v2.0.yaml | 4 - .../cohere/rerank/rerank-english-v3.0.yaml | 4 - .../rerank/rerank-multilingual-v2.0.yaml | 4 - .../rerank/rerank-multilingual-v3.0.yaml | 4 - .../model_providers/cohere/rerank/rerank.py | 125 - .../cohere/text_embedding/__init__.py | 0 .../cohere/text_embedding/_position.yaml | 7 - .../embed-english-light-v2.0.yaml | 9 - .../embed-english-light-v3.0.yaml | 9 - .../text_embedding/embed-english-v2.0.yaml | 9 - .../text_embedding/embed-english-v3.0.yaml | 9 - .../embed-multilingual-light-v3.0.yaml | 9 - .../embed-multilingual-v2.0.yaml | 9 - .../embed-multilingual-v3.0.yaml | 9 - .../model_providers/deepseek/__init__.py | 0 .../deepseek/_assets/icon_l_en.svg | 22 - .../deepseek/_assets/icon_s_en.svg | 3 - .../model_providers/deepseek/deepseek.py | 28 - .../model_providers/deepseek/deepseek.yaml | 41 - .../model_providers/deepseek/llm/__init__.py | 0 .../deepseek/llm/_position.yaml | 2 - .../deepseek/llm/deepseek-chat.yaml | 78 - .../deepseek/llm/deepseek-coder.yaml | 28 - .../model_providers/deepseek/llm/llm.py | 116 - .../model_providers/fireworks/__init__.py | 0 .../fireworks/_assets/icon_l_en.svg | 3 - .../fireworks/_assets/icon_s_en.svg | 5 - .../model_providers/fireworks/_common.py | 52 - .../model_providers/fireworks/fireworks.py | 27 - .../model_providers/fireworks/llm/__init__.py | 0 .../fireworks/llm/_position.yaml | 16 - .../fireworks/llm/firefunction-v1.yaml | 46 - .../fireworks/llm/firefunction-v2.yaml | 46 - .../fireworks/llm/gemma2-9b-it.yaml | 45 - .../llm/llama-v3-70b-instruct-hf.yaml | 46 - .../fireworks/llm/llama-v3-70b-instruct.yaml | 46 - .../llm/llama-v3-8b-instruct-hf.yaml | 46 - .../fireworks/llm/llama-v3-8b-instruct.yaml | 46 - .../llm/llama-v3p1-405b-instruct.yaml | 46 - .../llm/llama-v3p1-70b-instruct.yaml | 46 - .../fireworks/llm/llama-v3p1-8b-instruct.yaml | 46 - .../model_providers/fireworks/llm/llm.py | 610 - .../fireworks/llm/mixtral-8x22b-instruct.yaml | 46 - .../llm/mixtral-8x7b-instruct-hf.yaml | 46 - .../fireworks/llm/mixtral-8x7b-instruct.yaml | 46 - .../fireworks/llm/mythomax-l2-13b.yaml | 46 - .../llm/phi-3-vision-128k-instruct.yaml | 46 - .../fireworks/llm/yi-large.yaml | 45 - .../model_providers/fishaudio/__init__.py | 0 .../fishaudio/_assets/fishaudio_l_en.svg | 1 - .../fishaudio/_assets/fishaudio_s_en.svg | 1 - .../model_providers/fishaudio/fishaudio.py | 26 - .../model_providers/fishaudio/tts/__init__.py | 0 .../model_providers/fishaudio/tts/tts.py | 158 - .../model_providers/fishaudio/tts/tts.yaml | 5 - .../model_providers/google/__init__.py | 0 .../google/_assets/icon_l_en.svg | 15 - .../google/_assets/icon_s_en.svg | 11 - .../model_providers/google/google.py | 28 - .../model_providers/google/google.yaml | 31 - .../model_providers/google/llm/__init__.py | 0 .../llm/gemini-1.5-flash-8b-exp-0827.yaml | 48 - .../google/llm/gemini-1.5-flash-exp-0827.yaml | 48 - .../google/llm/gemini-1.5-flash-latest.yaml | 48 - .../google/llm/gemini-1.5-pro-exp-0801.yaml | 48 - .../google/llm/gemini-1.5-pro-exp-0827.yaml | 48 - .../google/llm/gemini-1.5-pro-latest.yaml | 48 - .../google/llm/gemini-pro-vision.yaml | 43 - .../google/llm/gemini-pro.yaml | 47 - .../model_providers/google/llm/llm.py | 443 - .../groq/_assets/icon_l_en.svg | 11 - .../groq/_assets/icon_s_en.svg | 4 - .../model_providers/groq/groq.py | 26 - .../model_providers/groq/groq.yaml | 32 - .../model_providers/groq/llm/_position.yaml | 7 - .../groq/llm/llama-3.1-405b-reasoning.yaml | 25 - .../groq/llm/llama-3.1-70b-versatile.yaml | 25 - .../groq/llm/llama-3.1-8b-instant.yaml | 25 - .../groq/llm/llama2-70b-4096.yaml | 25 - .../groq/llm/llama3-70b-8192.yaml | 25 - .../groq/llm/llama3-8b-8192.yaml | 25 - .../model_providers/groq/llm/llm.py | 31 - .../groq/llm/mixtral-8x7b-instruct-v0.1.yaml | 25 - .../huggingface_hub/__init__.py | 0 .../huggingface_hub/_assets/icon_l_en.svg | 42 - .../huggingface_hub/_assets/icon_s_en.svg | 19 - .../huggingface_hub/_common.py | 9 - .../huggingface_hub/huggingface_hub.py | 10 - .../huggingface_hub/huggingface_hub.yaml | 102 - .../huggingface_hub/llm/__init__.py | 0 .../huggingface_hub/llm/llm.py | 313 - .../text_embedding/__init__.py | 0 .../huggingface_tei/__init__.py | 0 .../huggingface_tei/huggingface_tei.py | 10 - .../huggingface_tei/huggingface_tei.yaml | 36 - .../huggingface_tei/rerank/__init__.py | 0 .../huggingface_tei/rerank/rerank.py | 136 - .../huggingface_tei/tei_helper.py | 182 - .../text_embedding/__init__.py | 0 .../model_providers/hunyuan/__init__.py | 0 .../hunyuan/_assets/icon_l_en.png | Bin 68638 -> 0 bytes .../hunyuan/_assets/icon_s_en.png | Bin 60824 -> 0 bytes .../model_providers/hunyuan/hunyuan.py | 27 - .../model_providers/hunyuan/hunyuan.yaml | 41 - .../model_providers/hunyuan/llm/__init__.py | 0 .../hunyuan/llm/_position.yaml | 6 - .../hunyuan/llm/hunyuan-lite.yaml | 28 - .../hunyuan/llm/hunyuan-pro.yaml | 38 - .../hunyuan/llm/hunyuan-standard-256k.yaml | 38 - .../hunyuan/llm/hunyuan-standard.yaml | 38 - .../hunyuan/llm/hunyuan-turbo.yaml | 38 - .../hunyuan/llm/hunyuan-vision.yaml | 39 - .../model_providers/hunyuan/llm/llm.py | 348 - .../hunyuan/text_embedding/__init__.py | 0 .../hunyuan-text-embedding.yaml | 5 - .../jina/_assets/icon_l_en.svg | 12 - .../jina/_assets/icon_s_en.svg | 4 - .../model_providers/jina/jina.py | 28 - .../model_providers/jina/rerank/__init__.py | 0 .../jina/rerank/_position.yaml | 5 - .../jina/rerank/jina-colbert-v1-en.yaml | 4 - .../jina/rerank/jina-reranker-v1-base-en.yaml | 4 - .../jina/rerank/jina-reranker-v1-tiny-en.yaml | 4 - .../rerank/jina-reranker-v1-turbo-en.yaml | 4 - .../jina-reranker-v2-base-multilingual.yaml | 4 - .../model_providers/jina/rerank/rerank.py | 125 - .../jina/text_embedding/jina-clip-v1.yaml | 9 - .../jina-embeddings-v2-base-de.yaml | 9 - .../jina-embeddings-v2-base-en.yaml | 9 - .../jina-embeddings-v2-base-zh.yaml | 9 - .../jina-embeddings-v2-small-en.yaml | 9 - .../text_embedding/jina-embeddings-v3.yaml | 9 - .../jina/text_embedding/jina_tokenizer.py | 32 - .../text_embedding/tokenizer/tokenizer.json | 30678 ---------------- .../tokenizer/tokenizer_config.json | 15 - .../leptonai/_assets/icon_l_en.png | Bin 528134 -> 0 bytes .../leptonai/_assets/icon_s_en.png | Bin 12101 -> 0 bytes .../model_providers/leptonai/leptonai.py | 26 - .../model_providers/leptonai/leptonai.yaml | 29 - .../leptonai/llm/_position.yaml | 6 - .../leptonai/llm/gemma-7b.yaml | 20 - .../leptonai/llm/llama2-13b.yaml | 20 - .../leptonai/llm/llama2-7b.yaml | 20 - .../leptonai/llm/llama3-70b.yaml | 20 - .../model_providers/leptonai/llm/llm.py | 40 - .../leptonai/llm/mistral-7b.yaml | 20 - .../leptonai/llm/mixtral-8x7b.yaml | 20 - .../model_providers/localai/__init__.py | 0 .../localai/_assets/icon_l_en.svg | 22 - .../localai/_assets/icon_s_en.svg | 15 - .../model_providers/localai/llm/__init__.py | 0 .../model_providers/localai/llm/llm.py | 674 - .../model_providers/localai/localai.py | 10 - .../model_providers/localai/localai.yaml | 72 - .../localai/rerank/__init__.py | 0 .../model_providers/localai/rerank/rerank.py | 134 - .../localai/speech2text/__init__.py | 0 .../localai/speech2text/speech2text.py | 89 - .../localai/text_embedding/__init__.py | 0 .../model_providers/minimax/__init__.py | 0 .../minimax/_assets/icon_l_en.png | Bin 5767 -> 0 bytes .../minimax/_assets/icon_s_en.png | Bin 2007 -> 0 bytes .../model_providers/minimax/llm/__init__.py | 0 .../minimax/llm/abab5-chat.yaml | 38 - .../minimax/llm/abab5.5-chat.yaml | 53 - .../minimax/llm/abab5.5s-chat.yaml | 44 - .../minimax/llm/abab6-chat.yaml | 46 - .../minimax/llm/abab6.5-chat.yaml | 46 - .../minimax/llm/abab6.5s-chat.yaml | 46 - .../minimax/llm/chat_completion.py | 166 - .../minimax/llm/chat_completion_pro.py | 191 - .../model_providers/minimax/llm/errors.py | 22 - .../model_providers/minimax/llm/llm.py | 271 - .../model_providers/minimax/llm/types.py | 30 - .../model_providers/minimax/minimax.py | 28 - .../model_providers/minimax/minimax.yaml | 37 - .../minimax/text_embedding/__init__.py | 0 .../minimax/text_embedding/embo-01.yaml | 9 - .../model_providers/mistralai/__init__.py | 0 .../mistralai/_assets/icon_l_en.png | Bin 7064 -> 0 bytes .../mistralai/_assets/icon_s_en.png | Bin 7418 -> 0 bytes .../mistralai/llm/_position.yaml | 11 - .../mistralai/llm/codestral-latest.yaml | 51 - .../model_providers/mistralai/llm/llm.py | 36 - .../mistralai/llm/mistral-embed.yaml | 51 - .../mistralai/llm/mistral-large-latest.yaml | 51 - .../mistralai/llm/mistral-medium-latest.yaml | 51 - .../mistralai/llm/mistral-small-latest.yaml | 51 - .../mistralai/llm/open-codestral-mamba.yaml | 51 - .../mistralai/llm/open-mistral-7b.yaml | 51 - .../mistralai/llm/open-mistral-nemo.yaml | 51 - .../mistralai/llm/open-mixtral-8x22b.yaml | 51 - .../mistralai/llm/open-mixtral-8x7b.yaml | 51 - .../mistralai/llm/pixtral-12b-2409.yaml | 51 - .../model_providers/mistralai/mistralai.py | 26 - .../model_providers/mistralai/mistralai.yaml | 31 - .../model_providers/mixedbread/__init__.py | 0 .../mixedbread/_assets/icon_l_en.png | Bin 123637 -> 0 bytes .../mixedbread/_assets/icon_s_en.png | Bin 37303 -> 0 bytes .../model_providers/mixedbread/mixedbread.py | 27 - .../mixedbread/mixedbread.yaml | 31 - .../mixedbread/rerank/__init__.py | 0 .../rerank/mxbai-rerank-large-v1-en.yaml | 4 - .../mixedbread/rerank/rerank.py | 125 - .../mixedbread/text_embedding/__init__.py | 0 .../mxbai-embed-2d-large-v1-en.yaml | 8 - .../mxbai-embed-large-v1-en.yaml | 8 - .../model_providers/model_provider_factory.py | 355 +- .../model_providers/moonshot/__init__.py | 0 .../moonshot/_assets/icon_l_en.png | Bin 13654 -> 0 bytes .../moonshot/_assets/icon_s_en.png | Bin 7419 -> 0 bytes .../model_providers/moonshot/llm/__init__.py | 0 .../moonshot/llm/_position.yaml | 3 - .../model_providers/moonshot/llm/llm.py | 327 - .../moonshot/llm/moonshot-v1-128k.yaml | 40 - .../moonshot/llm/moonshot-v1-32k.yaml | 40 - .../moonshot/llm/moonshot-v1-8k.yaml | 40 - .../model_providers/moonshot/moonshot.py | 26 - .../model_providers/moonshot/moonshot.yaml | 89 - .../model_providers/nomic/__init__.py | 0 .../nomic/_assets/icon_l_en.svg | 13 - .../nomic/_assets/icon_s_en.png | Bin 25814 -> 0 bytes .../model_providers/nomic/_common.py | 28 - .../model_providers/nomic/nomic.py | 26 - .../model_providers/nomic/nomic.yaml | 29 - .../nomic/text_embedding/__init__.py | 0 .../text_embedding/nomic-embed-text-v1.5.yaml | 8 - .../text_embedding/nomic-embed-text-v1.yaml | 8 - .../novita/_assets/icon_l_en.svg | 19 - .../novita/_assets/icon_s_en.svg | 10 - .../llm/Nous-Hermes-2-Mixtral-8x7B-DPO.yaml | 41 - .../novita/llm/airoboros-l2-70b.yaml | 41 - .../novita/llm/dolphin-mixtral-8x22b.yaml | 41 - .../novita/llm/gemma-2-9b-it.yaml | 41 - .../novita/llm/hermes-2-pro-llama-3-8b.yaml | 41 - .../novita/llm/l3-70b-euryale-v2.1.yaml | 41 - .../novita/llm/llama-3-70b-instruct.yaml | 41 - .../novita/llm/llama-3-8b-instruct.yaml | 41 - .../novita/llm/llama-3.1-405b-instruct.yaml | 41 - .../novita/llm/llama-3.1-70b-instruct.yaml | 41 - .../novita/llm/llama-3.1-8b-instruct.yaml | 41 - .../model_providers/novita/llm/llm.py | 69 - .../model_providers/novita/llm/lzlv_70b.yaml | 41 - .../novita/llm/midnight-rose-70b.yaml | 41 - .../novita/llm/mistral-7b-instruct.yaml | 41 - .../novita/llm/mythomax-l2-13b.yaml | 41 - .../novita/llm/nous-hermes-llama2-13b.yaml | 41 - .../novita/llm/openhermes-2.5-mistral-7b.yaml | 41 - .../novita/llm/wizardlm-2-8x22b.yaml | 41 - .../model_providers/novita/novita.py | 28 - .../model_providers/novita/novita.yaml | 31 - .../model_providers/nvidia/__init__.py | 0 .../nvidia/_assets/icon_l_en.png | Bin 112528 -> 0 bytes .../nvidia/_assets/icon_s_en.svg | 3 - .../model_providers/nvidia/llm/_position.yaml | 17 - .../model_providers/nvidia/llm/arctic.yaml | 36 - .../nvidia/llm/codegemma-7b.yaml | 36 - .../model_providers/nvidia/llm/fuyu-8b.yaml | 27 - .../model_providers/nvidia/llm/gemma-7b.yaml | 36 - .../nvidia/llm/llama-3.1-405b.yaml | 36 - .../nvidia/llm/llama-3.1-70b.yaml | 36 - .../nvidia/llm/llama-3.1-8b.yaml | 36 - .../nvidia/llm/llama2-70b.yaml | 36 - .../nvidia/llm/llama3-70b.yaml | 36 - .../model_providers/nvidia/llm/llama3-8b.yaml | 36 - .../model_providers/nvidia/llm/llm.py | 247 - .../nvidia/llm/mistral-large.yaml | 36 - .../mistralai_mixtral-8x7b-instruct-v0.1.yaml | 36 - .../llm/mixtral-8x22b-instruct-v0.1.yaml | 36 - .../nvidia/llm/nemotron-4-340b-instruct.yaml | 36 - .../llm/phi-3-medium-128k-instruct.yaml | 36 - .../nvidia/llm/phi-3-mini-128k-instruct.yaml | 36 - .../nvidia/llm/recurrentgemma-2b.yaml | 37 - .../model_providers/nvidia/nvidia.py | 26 - .../model_providers/nvidia/nvidia.yaml | 33 - .../model_providers/nvidia/rerank/__init__.py | 0 .../nvidia/rerank/rerank-qa-mistral-4b.yaml | 4 - .../model_providers/nvidia/rerank/rerank.py | 121 - .../nvidia/text_embedding/__init__.py | 0 .../nvidia/text_embedding/embed-qa-4.yaml | 5 - .../model_providers/nvidia_nim/__init__.py | 0 .../nvidia_nim/_assets/icon_l_en.png | Bin 112528 -> 0 bytes .../nvidia_nim/_assets/icon_s_en.svg | 3 - .../nvidia_nim/llm/__init__.py | 0 .../model_providers/nvidia_nim/llm/llm.py | 13 - .../model_providers/nvidia_nim/nvidia_nim.py | 10 - .../nvidia_nim/nvidia_nim.yaml | 79 - .../model_providers/oci/__init__.py | 0 .../model_providers/oci/_assets/icon_l_en.svg | 1 - .../model_providers/oci/_assets/icon_s_en.svg | 1 - .../oci/llm/cohere.command-r-16k.yaml | 52 - .../oci/llm/cohere.command-r-plus.yaml | 52 - .../model_providers/oci/llm/llm.py | 469 - .../oci/llm/meta.llama-3-70b-instruct.yaml | 51 - .../model_runtime/model_providers/oci/oci.py | 28 - .../model_providers/oci/oci.yaml | 42 - .../oci/text_embedding/__init__.py | 0 .../oci/text_embedding/_position.yaml | 5 - .../cohere.embed-english-light-v2.0.yaml | 9 - .../cohere.embed-english-light-v3.0.yaml | 9 - .../cohere.embed-english-v3.0.yaml | 9 - .../cohere.embed-multilingual-light-v3.0.yaml | 9 - .../cohere.embed-multilingual-v3.0.yaml | 9 - .../model_providers/ollama/__init__.py | 0 .../ollama/_assets/icon_l_en.svg | 15 - .../ollama/_assets/icon_s_en.svg | 15 - .../model_providers/ollama/llm/__init__.py | 0 .../model_providers/ollama/llm/llm.py | 726 - .../model_providers/ollama/ollama.py | 16 - .../model_providers/ollama/ollama.yaml | 98 - .../ollama/text_embedding/__init__.py | 0 .../model_providers/openai/__init__.py | 0 .../openai/_assets/icon_l_en.svg | 11 - .../openai/_assets/icon_s_en.svg | 4 - .../model_providers/openai/_common.py | 60 - .../model_providers/openai/llm/__init__.py | 0 .../model_providers/openai/llm/_position.yaml | 26 - .../openai/llm/chatgpt-4o-latest.yaml | 44 - .../openai/llm/gpt-3.5-turbo-0125.yaml | 43 - .../openai/llm/gpt-3.5-turbo-0613.yaml | 34 - .../openai/llm/gpt-3.5-turbo-1106.yaml | 43 - .../openai/llm/gpt-3.5-turbo-16k-0613.yaml | 34 - .../openai/llm/gpt-3.5-turbo-16k.yaml | 33 - .../openai/llm/gpt-3.5-turbo-instruct.yaml | 30 - .../openai/llm/gpt-3.5-turbo.yaml | 43 - .../openai/llm/gpt-4-0125-preview.yaml | 56 - .../openai/llm/gpt-4-1106-preview.yaml | 56 - .../model_providers/openai/llm/gpt-4-32k.yaml | 56 - .../openai/llm/gpt-4-turbo-2024-04-09.yaml | 57 - .../openai/llm/gpt-4-turbo-preview.yaml | 56 - .../openai/llm/gpt-4-turbo.yaml | 57 - .../openai/llm/gpt-4-vision-preview.yaml | 54 - .../model_providers/openai/llm/gpt-4.yaml | 56 - .../openai/llm/gpt-4o-2024-05-13.yaml | 44 - .../openai/llm/gpt-4o-2024-08-06.yaml | 47 - .../openai/llm/gpt-4o-mini-2024-07-18.yaml | 47 - .../openai/llm/gpt-4o-mini.yaml | 47 - .../model_providers/openai/llm/gpt-4o.yaml | 44 - .../model_providers/openai/llm/llm.py | 1182 - .../openai/llm/o1-mini-2024-09-12.yaml | 33 - .../model_providers/openai/llm/o1-mini.yaml | 33 - .../openai/llm/o1-preview-2024-09-12.yaml | 33 - .../openai/llm/o1-preview.yaml | 33 - .../openai/llm/text-davinci-003.yaml | 29 - .../openai/moderation/moderation.py | 59 +- .../moderation/text-moderation-stable.yaml | 5 - .../model_providers/openai/openai.py | 29 - .../model_providers/openai/openai.yaml | 89 - .../openai/speech2text/__init__.py | 0 .../openai/speech2text/speech2text.py | 60 - .../openai/speech2text/whisper-1.yaml | 5 - .../openai/text_embedding/__init__.py | 0 .../text-embedding-3-large.yaml | 9 - .../text-embedding-3-small.yaml | 9 - .../text-embedding-ada-002.yaml | 9 - .../model_providers/openai/tts/__init__.py | 0 .../model_providers/openai/tts/tts-1-hd.yaml | 31 - .../model_providers/openai/tts/tts-1.yaml | 31 - .../model_providers/openai/tts/tts.py | 118 - .../openai_api_compatible/__init__.py | 0 .../openai_api_compatible/_common.py | 43 - .../openai_api_compatible/llm/__init__.py | 0 .../openai_api_compatible/llm/llm.py | 828 - .../openai_api_compatible.py | 10 - .../openai_api_compatible.yaml | 162 - .../speech2text/__init__.py | 0 .../speech2text/speech2text.py | 61 - .../text_embedding/__init__.py | 0 .../model_providers/openllm/__init__.py | 0 .../openllm/_assets/icon_l_en.svg | 19 - .../openllm/_assets/icon_s_en.svg | 12 - .../model_providers/openllm/llm/__init__.py | 0 .../model_providers/openllm/llm/llm.py | 264 - .../openllm/llm/openllm_generate.py | 198 - .../openllm/llm/openllm_generate_errors.py | 22 - .../model_providers/openllm/openllm.py | 10 - .../model_providers/openllm/openllm.yaml | 37 - .../openllm/text_embedding/__init__.py | 0 .../model_providers/openrouter/__init__.py | 0 .../openrouter/_assets/openrouter.svg | 11 - .../openrouter/_assets/openrouter_square.svg | 10 - .../openrouter/llm/__init__.py | 0 .../openrouter/llm/_position.yaml | 27 - .../openrouter/llm/claude-3-5-sonnet.yaml | 39 - .../openrouter/llm/claude-3-haiku.yaml | 39 - .../openrouter/llm/claude-3-opus.yaml | 39 - .../openrouter/llm/claude-3-sonnet.yaml | 39 - .../openrouter/llm/command-r-plus.yaml | 45 - .../openrouter/llm/command-r.yaml | 45 - .../openrouter/llm/deepseek-chat.yaml | 50 - .../openrouter/llm/deepseek-coder.yaml | 30 - .../openrouter/llm/gemini-1.5-flash.yaml | 39 - .../openrouter/llm/gemini-1.5-pro.yaml | 39 - .../openrouter/llm/gemini-pro.yaml | 38 - .../openrouter/llm/gpt-3.5-turbo.yaml | 42 - .../openrouter/llm/gpt-4-32k.yaml | 57 - .../model_providers/openrouter/llm/gpt-4.yaml | 57 - .../openrouter/llm/gpt-4o-2024-08-06.yaml | 44 - .../openrouter/llm/gpt-4o-mini.yaml | 43 - .../openrouter/llm/gpt-4o.yaml | 43 - .../openrouter/llm/llama-3-70b-instruct.yaml | 23 - .../openrouter/llm/llama-3-8b-instruct.yaml | 23 - .../llm/llama-3.1-405b-instruct.yaml | 23 - .../llm/llama-3.1-70b-instruct.yaml | 23 - .../openrouter/llm/llama-3.1-8b-instruct.yaml | 23 - .../model_providers/openrouter/llm/llm.py | 106 - .../openrouter/llm/mistral-7b-instruct.yaml | 30 - .../llm/mixtral-8x22b-instruct.yaml | 30 - .../openrouter/llm/mixtral-8x7b-instruct.yaml | 31 - .../openrouter/llm/o1-mini.yaml | 40 - .../openrouter/llm/o1-preview.yaml | 40 - .../openrouter/llm/qwen2-72b-instruct.yaml | 30 - .../model_providers/openrouter/openrouter.py | 20 - .../openrouter/openrouter.yaml | 105 - .../model_providers/perfxcloud/__init__.py | 0 .../perfxcloud/_assets/icon_l_en.svg | 8 - .../perfxcloud/_assets/icon_s_en.svg | 8 - .../perfxcloud/llm/Llama3-Chinese_v2.yaml | 62 - .../Meta-Llama-3-70B-Instruct-GPTQ-Int4.yaml | 62 - .../llm/Meta-Llama-3-8B-Instruct.yaml | 62 - ...Meta-Llama-3.1-405B-Instruct-AWQ-INT4.yaml | 62 - .../llm/Meta-Llama-3.1-8B-Instruct.yaml | 61 - .../perfxcloud/llm/Qwen-14B-Chat-Int4.yaml | 62 - .../llm/Qwen1.5-110B-Chat-GPTQ-Int4.yaml | 62 - .../llm/Qwen1.5-72B-Chat-GPTQ-Int4.yaml | 62 - .../perfxcloud/llm/Qwen1.5-7B.yaml | 62 - .../llm/Qwen2-72B-Instruct-AWQ-int4.yaml | 61 - .../llm/Qwen2-72B-Instruct-GPTQ-Int4.yaml | 64 - .../perfxcloud/llm/Qwen2-72B-Instruct.yaml | 61 - .../perfxcloud/llm/Qwen2-7B-Instruct.yaml | 63 - .../perfxcloud/llm/Qwen2-7B.yaml | 64 - .../perfxcloud/llm/Qwen2.5-72B-Instruct.yaml | 61 - .../perfxcloud/llm/Qwen2.5-7B-Instruct.yaml | 61 - .../llm/Reflection-Llama-3.1-70B.yaml | 61 - .../perfxcloud/llm/Yi-1_5-9B-Chat-16K.yaml | 61 - .../perfxcloud/llm/Yi-Coder-1.5B-Chat.yaml | 61 - .../perfxcloud/llm/Yi-Coder-9B-Chat.yaml | 61 - .../perfxcloud/llm/__init__.py | 0 .../perfxcloud/llm/_position.yaml | 24 - .../perfxcloud/llm/chatglm3-6b.yaml | 62 - .../perfxcloud/llm/deepseek-v2-chat.yaml | 62 - .../perfxcloud/llm/deepseek-v2-lite-chat.yaml | 62 - .../model_providers/perfxcloud/llm/llm.py | 116 - .../model_providers/perfxcloud/perfxcloud.py | 10 - .../perfxcloud/perfxcloud.yaml | 42 - .../BAAI-bge-large-en-v1.5.yaml | 4 - .../BAAI-bge-large-zh-v1.5.yaml | 4 - .../text_embedding/BAAI-bge-m3.yaml | 4 - .../perfxcloud/text_embedding/__init__.py | 0 .../text_embedding/gte-Qwen2-7B-instruct.yaml | 4 - .../model_providers/replicate/__init__.py | 0 .../replicate/_assets/icon_l_en.svg | 13 - .../replicate/_assets/icon_s_en.svg | 4 - .../model_providers/replicate/_common.py | 9 - .../model_providers/replicate/llm/__init__.py | 0 .../model_providers/replicate/llm/llm.py | 305 - .../model_providers/replicate/replicate.py | 10 - .../model_providers/replicate/replicate.yaml | 41 - .../replicate/text_embedding/__init__.py | 0 .../model_providers/sagemaker/__init__.py | 0 .../sagemaker/_assets/icon_l_en.png | Bin 9395 -> 0 bytes .../sagemaker/_assets/icon_s_en.png | Bin 9720 -> 0 bytes .../model_providers/sagemaker/llm/__init__.py | 0 .../sagemaker/rerank/__init__.py | 0 .../sagemaker/rerank/rerank.py | 173 - .../model_providers/sagemaker/sagemaker.py | 41 - .../model_providers/sagemaker/sagemaker.yaml | 193 - .../sagemaker/speech2text/__init__.py | 0 .../sagemaker/speech2text/speech2text.py | 125 - .../sagemaker/text_embedding/__init__.py | 0 .../model_providers/sagemaker/tts/__init__.py | 0 .../model_providers/sagemaker/tts/tts.py | 275 - .../siliconflow/_assets/siliconflow.svg | 1 - .../_assets/siliconflow_square.svg | 1 - .../llm/deepdeek-coder-v2-instruct.yaml | 30 - .../siliconflow/llm/deepseek-v2-chat.yaml | 30 - .../siliconflow/llm/deepseek-v2.5.yaml | 30 - .../siliconflow/llm/gemma-2-27b-it.yaml | 30 - .../siliconflow/llm/gemma-2-9b-it.yaml | 30 - .../siliconflow/llm/glm4-9b-chat.yaml | 30 - .../siliconflow/llm/internlm2_5-7b-chat.yaml | 30 - .../model_providers/siliconflow/llm/llm.py | 31 - .../llm/meta-mlama-3-70b-instruct.yaml | 30 - .../llm/meta-mlama-3-8b-instruct.yaml | 30 - .../llm/meta-mlama-3.1-405b-instruct.yaml | 30 - .../llm/meta-mlama-3.1-70b-instruct.yaml | 30 - .../llm/meta-mlama-3.1-8b-instruct.yaml | 30 - .../llm/mistral-7b-instruct-v0.2.yaml | 31 - .../llm/mistral-8x7b-instruct-v0.1.yaml | 31 - .../siliconflow/llm/qwen2-1.5b-instruct.yaml | 30 - .../llm/qwen2-57b-a14b-instruct.yaml | 30 - .../siliconflow/llm/qwen2-72b-instruct.yaml | 30 - .../siliconflow/llm/qwen2-7b-instruct.yaml | 30 - .../siliconflow/llm/qwen2.5-14b-instruct.yaml | 30 - .../siliconflow/llm/qwen2.5-32b-instruct.yaml | 30 - .../siliconflow/llm/qwen2.5-72b-instruct.yaml | 30 - .../siliconflow/llm/qwen2.5-7b-instruct.yaml | 30 - .../siliconflow/llm/yi-1.5-34b-chat.yaml | 30 - .../siliconflow/llm/yi-1.5-6b-chat.yaml | 30 - .../siliconflow/llm/yi-1.5-9b-chat.yaml | 30 - .../siliconflow/rerank/__init__.py | 0 .../rerank/bce-reranker-base_v1.yaml | 4 - .../rerank/bge-reranker-v2-m3.yaml | 4 - .../siliconflow/rerank/rerank.py | 85 - .../siliconflow/siliconflow.py | 26 - .../siliconflow/siliconflow.yaml | 32 - .../siliconflow/speech2text/__init__.py | 0 .../speech2text/sense-voice-small.yaml | 5 - .../siliconflow/speech2text/speech2text.py | 30 - .../text_embedding/bce-embedding-base-v1.yaml | 5 - .../text_embedding/bge-large-en-v1.5.yaml | 5 - .../text_embedding/bge-large-zh-v1.5.yaml | 5 - .../siliconflow/text_embedding/bge-m3.yaml | 5 - .../model_providers/spark/__init__.py | 0 .../spark/_assets/icon_l_en.svg | 24 - .../spark/_assets/icon_l_zh.svg | 11 - .../spark/_assets/icon_s_en.svg | 5 - .../model_providers/spark/llm/__init__.py | 0 .../model_providers/spark/llm/_client.py | 163 - .../model_providers/spark/llm/_position.yaml | 11 - .../model_providers/spark/llm/spark-1.5.yaml | 34 - .../model_providers/spark/llm/spark-2.yaml | 34 - .../model_providers/spark/llm/spark-3.5.yaml | 34 - .../model_providers/spark/llm/spark-3.yaml | 34 - .../spark/llm/spark-4.0-ultra.yaml | 42 - .../model_providers/spark/llm/spark-4.yaml | 34 - .../model_providers/spark/llm/spark-lite.yaml | 33 - .../spark/llm/spark-max-32k.yaml | 33 - .../model_providers/spark/llm/spark-max.yaml | 33 - .../spark/llm/spark-pro-128k.yaml | 33 - .../model_providers/spark/llm/spark-pro.yaml | 33 - .../model_providers/spark/spark.py | 18 - .../model_providers/spark/spark.yaml | 46 - .../model_providers/stepfun/__init__.py | 0 .../stepfun/_assets/icon_l_en.png | Bin 9176 -> 0 bytes .../stepfun/_assets/icon_s_en.png | Bin 1991 -> 0 bytes .../model_providers/stepfun/llm/__init__.py | 0 .../stepfun/llm/_position.yaml | 8 - .../model_providers/stepfun/llm/llm.py | 328 - .../stepfun/llm/step-1-128k.yaml | 25 - .../stepfun/llm/step-1-256k.yaml | 25 - .../stepfun/llm/step-1-32k.yaml | 28 - .../stepfun/llm/step-1-8k.yaml | 28 - .../stepfun/llm/step-1-flash.yaml | 25 - .../stepfun/llm/step-1v-32k.yaml | 28 - .../stepfun/llm/step-1v-8k.yaml | 28 - .../stepfun/llm/step-2-16k.yaml | 28 - .../model_providers/stepfun/stepfun.py | 26 - .../model_providers/stepfun/stepfun.yaml | 81 - .../model_providers/tencent/__init__.py | 0 .../tencent/_assets/icon_l_en.svg | 13 - .../tencent/_assets/icon_l_zh.svg | 13 - .../tencent/_assets/icon_s_en.svg | 11 - .../tencent/speech2text/__init__.py | 0 .../tencent/speech2text/flash_recognizer.py | 164 - .../tencent/speech2text/speech2text.py | 86 - .../tencent/speech2text/tencent.yaml | 5 - .../model_providers/tencent/tencent.py | 26 - .../model_providers/tencent/tencent.yaml | 49 - .../model_providers/togetherai/__init__.py | 0 .../togetherai/_assets/togetherai.svg | 13 - .../togetherai/_assets/togetherai_square.svg | 19 - .../togetherai/llm/__init__.py | 0 .../model_providers/togetherai/llm/llm.py | 170 - .../model_providers/togetherai/togetherai.py | 10 - .../togetherai/togetherai.yaml | 75 - .../model_providers/tongyi/__init__.py | 0 .../tongyi/_assets/icon_l_en.png | Bin 4741 -> 0 bytes .../tongyi/_assets/icon_l_zh.png | Bin 7052 -> 0 bytes .../tongyi/_assets/icon_s_en.png | Bin 2835 -> 0 bytes .../model_providers/tongyi/_common.py | 55 - .../model_providers/tongyi/llm/__init__.py | 0 .../model_providers/tongyi/llm/_position.yaml | 51 - .../tongyi/llm/farui-plus.yaml | 77 - .../model_providers/tongyi/llm/llm.py | 593 - .../tongyi/llm/qwen-coder-turbo-0919.yaml | 75 - .../tongyi/llm/qwen-coder-turbo-latest.yaml | 75 - .../tongyi/llm/qwen-coder-turbo.yaml | 75 - .../model_providers/tongyi/llm/qwen-long.yaml | 77 - .../tongyi/llm/qwen-math-plus-0816.yaml | 75 - .../tongyi/llm/qwen-math-plus-0919.yaml | 75 - .../tongyi/llm/qwen-math-plus-latest.yaml | 75 - .../tongyi/llm/qwen-math-plus.yaml | 75 - .../tongyi/llm/qwen-math-turbo-0919.yaml | 75 - .../tongyi/llm/qwen-math-turbo-latest.yaml | 75 - .../tongyi/llm/qwen-math-turbo.yaml | 75 - .../tongyi/llm/qwen-max-0107.yaml | 78 - .../tongyi/llm/qwen-max-0403.yaml | 78 - .../tongyi/llm/qwen-max-0428.yaml | 78 - .../tongyi/llm/qwen-max-0919.yaml | 78 - .../tongyi/llm/qwen-max-1201.yaml | 78 - .../tongyi/llm/qwen-max-latest.yaml | 78 - .../tongyi/llm/qwen-max-longcontext.yaml | 78 - .../model_providers/tongyi/llm/qwen-max.yaml | 87 - .../tongyi/llm/qwen-plus-0206.yaml | 76 - .../tongyi/llm/qwen-plus-0624.yaml | 76 - .../tongyi/llm/qwen-plus-0723.yaml | 76 - .../tongyi/llm/qwen-plus-0806.yaml | 76 - .../tongyi/llm/qwen-plus-0919.yaml | 76 - .../tongyi/llm/qwen-plus-chat.yaml | 79 - .../tongyi/llm/qwen-plus-latest.yaml | 76 - .../model_providers/tongyi/llm/qwen-plus.yaml | 87 - .../tongyi/llm/qwen-turbo-0206.yaml | 77 - .../tongyi/llm/qwen-turbo-0624.yaml | 76 - .../tongyi/llm/qwen-turbo-0919.yaml | 76 - .../tongyi/llm/qwen-turbo-chat.yaml | 79 - .../tongyi/llm/qwen-turbo-latest.yaml | 76 - .../tongyi/llm/qwen-turbo.yaml | 87 - .../tongyi/llm/qwen-vl-max-0201.yaml | 49 - .../tongyi/llm/qwen-vl-max-0809.yaml | 79 - .../tongyi/llm/qwen-vl-max.yaml | 79 - .../tongyi/llm/qwen-vl-plus-0201.yaml | 79 - .../tongyi/llm/qwen-vl-plus-0809.yaml | 79 - .../tongyi/llm/qwen-vl-plus.yaml | 79 - .../tongyi/llm/qwen2-math-1.5b-instruct.yaml | 75 - .../tongyi/llm/qwen2-math-72b-instruct.yaml | 75 - .../tongyi/llm/qwen2-math-7b-instruct.yaml | 75 - .../tongyi/llm/qwen2.5-0.5b-instruct.yaml | 75 - .../tongyi/llm/qwen2.5-1.5b-instruct.yaml | 75 - .../tongyi/llm/qwen2.5-14b-instruct.yaml | 75 - .../tongyi/llm/qwen2.5-32b-instruct.yaml | 75 - .../tongyi/llm/qwen2.5-3b-instruct.yaml | 75 - .../tongyi/llm/qwen2.5-72b-instruct.yaml | 75 - .../tongyi/llm/qwen2.5-7b-instruct.yaml | 75 - .../tongyi/llm/qwen2.5-coder-7b-instruct.yaml | 75 - .../tongyi/text_embedding/__init__.py | 0 .../text_embedding/text-embedding-v1.yaml | 10 - .../text_embedding/text-embedding-v2.yaml | 10 - .../text_embedding/text-embedding-v3.yaml | 10 - .../model_providers/tongyi/tongyi.py | 28 - .../model_providers/tongyi/tongyi.yaml | 87 - .../model_providers/tongyi/tts/__init__.py | 0 .../model_providers/tongyi/tts/tts-1.yaml | 139 - .../model_providers/tongyi/tts/tts.py | 152 - .../triton_inference_server/__init__.py | 0 .../_assets/icon_l_en.png | Bin 79751 -> 0 bytes .../_assets/icon_s_en.svg | 3 - .../triton_inference_server/llm/__init__.py | 0 .../triton_inference_server/llm/llm.py | 280 - .../triton_inference_server.py | 10 - .../triton_inference_server.yaml | 84 - .../model_providers/upstage/__init__.py | 0 .../upstage/_assets/icon_l_en.svg | 14 - .../upstage/_assets/icon_s_en.svg | 3 - .../model_providers/upstage/_common.py | 54 - .../model_providers/upstage/llm/__init__.py | 0 .../upstage/llm/_position.yaml | 1 - .../model_providers/upstage/llm/llm.py | 603 - .../upstage/llm/solar-1-mini-chat.yaml | 43 - .../upstage/text_embedding/__init__.py | 0 .../solar-embedding-1-large-passage.yaml | 9 - .../solar-embedding-1-large-query.yaml | 9 - .../model_providers/upstage/upstage.py | 27 - .../model_providers/upstage/upstage.yaml | 49 - .../model_providers/vertex_ai/__init__.py | 0 .../vertex_ai/_assets/icon_l_en.png | Bin 18078 -> 0 bytes .../vertex_ai/_assets/icon_s_en.svg | 1 - .../model_providers/vertex_ai/_common.py | 15 - .../model_providers/vertex_ai/llm/__init__.py | 0 .../llm/anthropic.claude-3-haiku.yaml | 56 - .../llm/anthropic.claude-3-opus.yaml | 56 - .../llm/anthropic.claude-3-sonnet.yaml | 55 - .../llm/anthropic.claude-3.5-sonnet.yaml | 55 - .../vertex_ai/llm/gemini-1.0-pro-vision.yaml | 37 - .../vertex_ai/llm/gemini-1.0-pro.yaml | 36 - .../vertex_ai/text_embedding/__init__.py | 0 .../text_embedding/text-embedding-004.yaml | 8 - .../text-multilingual-embedding-002.yaml | 8 - .../model_providers/vertex_ai/vertex_ai.py | 28 - .../model_providers/vertex_ai/vertex_ai.yaml | 43 - .../volcengine_maas/__init__.py | 0 .../volcengine_maas/_assets/icon_l_en.svg | 23 - .../volcengine_maas/_assets/icon_l_zh.svg | 39 - .../volcengine_maas/_assets/icon_s_en.svg | 8 - .../model_providers/volcengine_maas/client.py | 216 - .../volcengine_maas/legacy/__init__.py | 0 .../volcengine_maas/legacy/client.py | 123 - .../volcengine_maas/legacy/errors.py | 156 - .../legacy/volc_sdk/__init__.py | 4 - .../legacy/volc_sdk/base/__init__.py | 1 - .../legacy/volc_sdk/base/auth.py | 159 - .../legacy/volc_sdk/base/service.py | 216 - .../legacy/volc_sdk/base/util.py | 44 - .../volcengine_maas/legacy/volc_sdk/common.py | 77 - .../volcengine_maas/legacy/volc_sdk/maas.py | 198 - .../volcengine_maas/llm/__init__.py | 0 .../volcengine_maas/llm/llm.py | 388 - .../volcengine_maas/llm/models.py | 142 - .../text_embedding/__init__.py | 0 .../volcengine_maas/text_embedding/models.py | 28 - .../volcengine_maas/volcengine_maas.py | 10 - .../volcengine_maas/volcengine_maas.yaml | 266 - .../model_providers/wenxin/__init__.py | 0 .../wenxin/_assets/icon_l_en.png | Bin 6615 -> 0 bytes .../wenxin/_assets/icon_l_zh.png | Bin 7967 -> 0 bytes .../wenxin/_assets/icon_s_en.png | Bin 3350 -> 0 bytes .../model_providers/wenxin/_common.py | 194 - .../model_providers/wenxin/llm/__init__.py | 0 .../wenxin/llm/ernie-3.5-128k.yaml | 37 - .../wenxin/llm/ernie-3.5-4k-0205.yaml | 38 - .../wenxin/llm/ernie-3.5-8k-0205.yaml | 38 - .../wenxin/llm/ernie-3.5-8k-1222.yaml | 38 - .../wenxin/llm/ernie-3.5-8k.yaml | 40 - .../wenxin/llm/ernie-4.0-8k-latest.yaml | 40 - .../wenxin/llm/ernie-4.0-8k.yaml | 40 - .../llm/ernie-4.0-turbo-8k-preview.yaml | 40 - .../wenxin/llm/ernie-4.0-turbo-8k.yaml | 40 - .../wenxin/llm/ernie-bot-4.yaml | 39 - .../wenxin/llm/ernie-bot-8k.yaml | 39 - .../wenxin/llm/ernie-bot-turbo.yaml | 30 - .../model_providers/wenxin/llm/ernie-bot.yaml | 39 - .../wenxin/llm/ernie-character-8k-0321.yaml | 31 - .../wenxin/llm/ernie-character-8k.yaml | 30 - .../wenxin/llm/ernie-lite-8k-0308.yaml | 31 - .../wenxin/llm/ernie-lite-8k-0922.yaml | 31 - .../wenxin/llm/ernie-speed-128k.yaml | 30 - .../wenxin/llm/ernie-speed-8k.yaml | 30 - .../wenxin/llm/ernie-speed-appbuilder.yaml | 25 - .../model_providers/wenxin/llm/ernie_bot.py | 245 - .../model_providers/wenxin/llm/llm.py | 316 - .../wenxin/llm/yi_34b_chat.yaml | 30 - .../wenxin/text_embedding/__init__.py | 0 .../wenxin/text_embedding/bge-large-en.yaml | 9 - .../wenxin/text_embedding/bge-large-zh.yaml | 9 - .../wenxin/text_embedding/embedding-v1.yaml | 9 - .../wenxin/text_embedding/tao-8k.yaml | 9 - .../model_providers/wenxin/wenxin.py | 28 - .../model_providers/wenxin/wenxin.yaml | 40 - .../model_providers/wenxin/wenxin_errors.py | 54 - .../model_providers/xinference/__init__.py | 0 .../xinference/_assets/icon_l_en.svg | 42 - .../xinference/_assets/icon_s_en.svg | 24 - .../xinference/llm/__init__.py | 0 .../model_providers/xinference/llm/llm.py | 816 - .../xinference/rerank/__init__.py | 0 .../xinference/rerank/rerank.py | 189 - .../xinference/speech2text/__init__.py | 0 .../xinference/speech2text/speech2text.py | 144 - .../xinference/text_embedding/__init__.py | 0 .../xinference/tts/__init__.py | 0 .../model_providers/xinference/tts/tts.py | 228 - .../model_providers/xinference/xinference.py | 10 - .../xinference/xinference.yaml | 58 - .../xinference/xinference_helper.py | 134 - .../model_providers/yi/__init__.py | 0 .../model_providers/yi/_assets/icon_l_en.svg | 12 - .../model_providers/yi/_assets/icon_s_en.svg | 8 - .../model_providers/yi/llm/__init__.py | 0 .../model_providers/yi/llm/_position.yaml | 9 - .../model_providers/yi/llm/llm.py | 127 - .../yi/llm/yi-34b-chat-0205.yaml | 43 - .../yi/llm/yi-34b-chat-200k.yaml | 43 - .../yi/llm/yi-large-turbo.yaml | 43 - .../model_providers/yi/llm/yi-large.yaml | 43 - .../yi/llm/yi-medium-200k.yaml | 43 - .../model_providers/yi/llm/yi-medium.yaml | 43 - .../model_providers/yi/llm/yi-spark.yaml | 43 - .../model_providers/yi/llm/yi-vision.yaml | 44 - .../model_providers/yi/llm/yi-vl-plus.yaml | 43 - .../model_runtime/model_providers/yi/yi.py | 28 - .../model_runtime/model_providers/yi/yi.yaml | 41 - .../model_providers/zhinao/__init__.py | 0 .../zhinao/_assets/icon_l_en.svg | 8 - .../zhinao/_assets/icon_s_en.svg | 8 - .../llm/360gpt-turbo-responsibility-8k.yaml | 36 - .../zhinao/llm/360gpt-turbo.yaml | 36 - .../zhinao/llm/360gpt2-pro.yaml | 36 - .../model_providers/zhinao/llm/__init__.py | 0 .../model_providers/zhinao/llm/_position.yaml | 3 - .../model_providers/zhinao/llm/llm.py | 31 - .../model_providers/zhinao/zhinao.py | 28 - .../model_providers/zhinao/zhinao.yaml | 32 - .../model_providers/zhipuai/__init__.py | 0 .../zhipuai/_assets/icon_l_en.svg | 6 - .../zhipuai/_assets/icon_l_zh.svg | 8 - .../zhipuai/_assets/icon_s_en.svg | 8 - .../model_providers/zhipuai/_common.py | 41 - .../model_providers/zhipuai/llm/__init__.py | 0 .../zhipuai/llm/chatglm_lite.yaml | 22 - .../zhipuai/llm/chatglm_lite_32k.yaml | 22 - .../zhipuai/llm/chatglm_pro.yaml | 22 - .../zhipuai/llm/chatglm_std.yaml | 22 - .../zhipuai/llm/chatglm_turbo.yaml | 51 - .../zhipuai/llm/glm-4-0520.yaml | 62 - .../zhipuai/llm/glm-4-air.yaml | 62 - .../zhipuai/llm/glm-4-airx.yaml | 62 - .../zhipuai/llm/glm-4-flash.yaml | 62 - .../zhipuai/llm/glm_3_turbo.yaml | 62 - .../model_providers/zhipuai/llm/glm_4.yaml | 62 - .../zhipuai/llm/glm_4_long.yaml | 65 - .../zhipuai/llm/glm_4_plus.yaml | 62 - .../model_providers/zhipuai/llm/glm_4v.yaml | 60 - .../zhipuai/llm/glm_4v_plus.yaml | 60 - .../model_providers/zhipuai/llm/llm.py | 486 - .../zhipuai/text_embedding/__init__.py | 0 .../zhipuai/text_embedding/embedding-2.yaml | 8 - .../zhipuai/text_embedding/embedding-3.yaml | 8 - .../text_embedding/text_embedding.yaml | 4 - .../model_providers/zhipuai/zhipuai.py | 27 - .../model_providers/zhipuai/zhipuai.yaml | 31 - .../zhipuai/zhipuai_sdk/__init__.py | 15 - .../zhipuai/zhipuai_sdk/__version__.py | 1 - .../zhipuai/zhipuai_sdk/_client.py | 82 - .../zhipuai_sdk/api_resource/__init__.py | 34 - .../api_resource/assistant/__init__.py | 3 - .../api_resource/assistant/assistant.py | 122 - .../zhipuai_sdk/api_resource/batches.py | 146 - .../zhipuai_sdk/api_resource/chat/__init__.py | 5 - .../api_resource/chat/async_completions.py | 115 - .../zhipuai_sdk/api_resource/chat/chat.py | 18 - .../api_resource/chat/completions.py | 108 - .../zhipuai_sdk/api_resource/embeddings.py | 50 - .../zhipuai/zhipuai_sdk/api_resource/files.py | 194 - .../api_resource/fine_tuning/__init__.py | 5 - .../api_resource/fine_tuning/fine_tuning.py | 18 - .../api_resource/fine_tuning/jobs/__init__.py | 3 - .../api_resource/fine_tuning/jobs/jobs.py | 152 - .../fine_tuning/models/__init__.py | 3 - .../fine_tuning/models/fine_tuned_models.py | 41 - .../zhipuai_sdk/api_resource/images.py | 59 - .../api_resource/knowledge/__init__.py | 3 - .../knowledge/document/__init__.py | 3 - .../knowledge/document/document.py | 217 - .../api_resource/knowledge/knowledge.py | 173 - .../api_resource/tools/__init__.py | 3 - .../zhipuai_sdk/api_resource/tools/tools.py | 65 - .../api_resource/videos/__init__.py | 7 - .../zhipuai_sdk/api_resource/videos/videos.py | 77 - .../zhipuai/zhipuai_sdk/core/__init__.py | 108 - .../zhipuai/zhipuai_sdk/core/_base_api.py | 19 - .../zhipuai/zhipuai_sdk/core/_base_compat.py | 209 - .../zhipuai/zhipuai_sdk/core/_base_models.py | 670 - .../zhipuai/zhipuai_sdk/core/_base_type.py | 170 - .../zhipuai/zhipuai_sdk/core/_constants.py | 12 - .../zhipuai/zhipuai_sdk/core/_errors.py | 86 - .../zhipuai/zhipuai_sdk/core/_files.py | 75 - .../zhipuai/zhipuai_sdk/core/_http_client.py | 910 - .../zhipuai/zhipuai_sdk/core/_jwt_token.py | 31 - .../core/_legacy_binary_response.py | 207 - .../zhipuai_sdk/core/_legacy_response.py | 341 - .../zhipuai/zhipuai_sdk/core/_request_opt.py | 97 - .../zhipuai/zhipuai_sdk/core/_response.py | 398 - .../zhipuai/zhipuai_sdk/core/_sse_client.py | 206 - .../zhipuai_sdk/core/_utils/__init__.py | 52 - .../zhipuai_sdk/core/_utils/_transform.py | 383 - .../zhipuai_sdk/core/_utils/_typing.py | 122 - .../zhipuai/zhipuai_sdk/core/_utils/_utils.py | 409 - .../zhipuai/zhipuai_sdk/core/logs.py | 78 - .../zhipuai/zhipuai_sdk/core/pagination.py | 62 - .../zhipuai/zhipuai_sdk/types/__init__.py | 0 .../zhipuai_sdk/types/assistant/__init__.py | 5 - .../types/assistant/assistant_completion.py | 40 - .../assistant_conversation_params.py | 7 - .../assistant/assistant_conversation_resp.py | 29 - .../assistant/assistant_create_params.py | 32 - .../types/assistant/assistant_support_resp.py | 21 - .../types/assistant/message/__init__.py | 3 - .../assistant/message/message_content.py | 13 - .../assistant/message/text_content_block.py | 14 - .../tools/code_interpreter_delta_block.py | 27 - .../message/tools/drawing_tool_delta_block.py | 21 - .../message/tools/function_delta_block.py | 22 - .../message/tools/retrieval_delta_black.py | 41 - .../assistant/message/tools/tools_type.py | 16 - .../message/tools/web_browser_delta_block.py | 48 - .../assistant/message/tools_delta_block.py | 16 - .../zhipuai/zhipuai_sdk/types/batch.py | 82 - .../zhipuai_sdk/types/batch_create_params.py | 37 - .../zhipuai/zhipuai_sdk/types/batch_error.py | 21 - .../zhipuai_sdk/types/batch_list_params.py | 20 - .../zhipuai_sdk/types/batch_request_counts.py | 14 - .../zhipuai_sdk/types/chat/__init__.py | 0 .../types/chat/async_chat_completion.py | 22 - .../zhipuai_sdk/types/chat/chat_completion.py | 43 - .../types/chat/chat_completion_chunk.py | 57 - .../chat/chat_completions_create_param.py | 8 - .../types/chat/code_geex/code_geex_params.py | 146 - .../zhipuai/zhipuai_sdk/types/embeddings.py | 21 - .../zhipuai_sdk/types/files/__init__.py | 5 - .../types/files/file_create_params.py | 38 - .../zhipuai_sdk/types/files/file_deleted.py | 13 - .../zhipuai_sdk/types/files/file_object.py | 22 - .../zhipuai_sdk/types/files/upload_detail.py | 13 - .../zhipuai_sdk/types/fine_tuning/__init__.py | 4 - .../types/fine_tuning/fine_tuning_job.py | 51 - .../fine_tuning/fine_tuning_job_event.py | 35 - .../types/fine_tuning/job_create_params.py | 15 - .../types/fine_tuning/models/__init__.py | 1 - .../fine_tuning/models/fine_tuned_models.py | 13 - .../zhipuai/zhipuai_sdk/types/image.py | 18 - .../zhipuai_sdk/types/knowledge/__init__.py | 8 - .../types/knowledge/document/__init__.py | 8 - .../types/knowledge/document/document.py | 51 - .../document/document_edit_params.py | 29 - .../document/document_list_params.py | 26 - .../knowledge/document/document_list_resp.py | 11 - .../zhipuai_sdk/types/knowledge/knowledge.py | 21 - .../knowledge/knowledge_create_params.py | 30 - .../types/knowledge/knowledge_list_params.py | 15 - .../types/knowledge/knowledge_list_resp.py | 11 - .../types/knowledge/knowledge_used.py | 21 - .../types/sensitive_word_check/__init__.py | 3 - .../sensitive_word_check.py | 14 - .../zhipuai_sdk/types/tools/__init__.py | 9 - .../types/tools/tools_web_search_params.py | 35 - .../zhipuai_sdk/types/tools/web_search.py | 71 - .../types/tools/web_search_chunk.py | 33 - .../zhipuai_sdk/types/video/__init__.py | 3 - .../types/video/video_create_params.py | 27 - .../zhipuai_sdk/types/video/video_object.py | 30 - api/core/plugin/entities/plugin_daemon.py | 48 +- api/core/plugin/entities/request.py | 6 +- api/core/plugin/manager/asset.py | 4 +- api/core/plugin/manager/base.py | 15 +- api/core/plugin/manager/model.py | 518 +- api/core/provider_manager.py | 11 +- .../tools/utils/model_invocation_utils.py | 4 +- api/services/model_load_balancing_service.py | 3 +- api/services/model_provider_service.py | 70 +- .../test_model_provider_factory.py | 18 +- .../workflow/nodes/test_llm.py | 2 - .../nodes/test_parameter_extractor.py | 14 +- .../unit_tests/core/test_provider_manager.py | 5 +- 1070 files changed, 1081 insertions(+), 87437 deletions(-) delete mode 100644 api/core/model_runtime/model_providers/anthropic/__init__.py delete mode 100644 api/core/model_runtime/model_providers/anthropic/_assets/icon_l_en.svg delete mode 100644 api/core/model_runtime/model_providers/anthropic/_assets/icon_s_en.svg delete mode 100644 api/core/model_runtime/model_providers/anthropic/anthropic.py delete mode 100644 api/core/model_runtime/model_providers/anthropic/anthropic.yaml delete mode 100644 api/core/model_runtime/model_providers/anthropic/llm/__init__.py delete mode 100644 api/core/model_runtime/model_providers/anthropic/llm/_position.yaml delete mode 100644 api/core/model_runtime/model_providers/anthropic/llm/claude-2.1.yaml delete mode 100644 api/core/model_runtime/model_providers/anthropic/llm/claude-2.yaml delete mode 100644 api/core/model_runtime/model_providers/anthropic/llm/claude-3-5-sonnet-20240620.yaml delete mode 100644 api/core/model_runtime/model_providers/anthropic/llm/claude-3-haiku-20240307.yaml delete mode 100644 api/core/model_runtime/model_providers/anthropic/llm/claude-3-opus-20240229.yaml delete mode 100644 api/core/model_runtime/model_providers/anthropic/llm/claude-3-sonnet-20240229.yaml delete mode 100644 api/core/model_runtime/model_providers/anthropic/llm/claude-instant-1.2.yaml delete mode 100644 api/core/model_runtime/model_providers/anthropic/llm/claude-instant-1.yaml delete mode 100644 api/core/model_runtime/model_providers/anthropic/llm/llm.py delete mode 100644 api/core/model_runtime/model_providers/azure_ai_studio/__init__.py delete mode 100644 api/core/model_runtime/model_providers/azure_ai_studio/_assets/icon_l_en.png delete mode 100644 api/core/model_runtime/model_providers/azure_ai_studio/_assets/icon_s_en.png delete mode 100644 api/core/model_runtime/model_providers/azure_ai_studio/azure_ai_studio.py delete mode 100644 api/core/model_runtime/model_providers/azure_ai_studio/azure_ai_studio.yaml delete mode 100644 api/core/model_runtime/model_providers/azure_ai_studio/llm/__init__.py delete mode 100644 api/core/model_runtime/model_providers/azure_ai_studio/llm/llm.py delete mode 100644 api/core/model_runtime/model_providers/azure_ai_studio/rerank/__init__.py delete mode 100644 api/core/model_runtime/model_providers/azure_ai_studio/rerank/rerank.py delete mode 100644 api/core/model_runtime/model_providers/azure_openai/__init__.py delete mode 100644 api/core/model_runtime/model_providers/azure_openai/_assets/icon_l_en.png delete mode 100644 api/core/model_runtime/model_providers/azure_openai/_assets/icon_s_en.svg delete mode 100644 api/core/model_runtime/model_providers/azure_openai/_common.py delete mode 100644 api/core/model_runtime/model_providers/azure_openai/_constant.py delete mode 100644 api/core/model_runtime/model_providers/azure_openai/azure_openai.py delete mode 100644 api/core/model_runtime/model_providers/azure_openai/azure_openai.yaml delete mode 100644 api/core/model_runtime/model_providers/azure_openai/llm/__init__.py delete mode 100644 api/core/model_runtime/model_providers/azure_openai/llm/llm.py delete mode 100644 api/core/model_runtime/model_providers/azure_openai/speech2text/__init__.py delete mode 100644 api/core/model_runtime/model_providers/azure_openai/speech2text/speech2text.py delete mode 100644 api/core/model_runtime/model_providers/azure_openai/text_embedding/__init__.py delete mode 100644 api/core/model_runtime/model_providers/azure_openai/tts/__init__.py delete mode 100644 api/core/model_runtime/model_providers/azure_openai/tts/tts.py delete mode 100644 api/core/model_runtime/model_providers/baichuan/__init__.py delete mode 100644 api/core/model_runtime/model_providers/baichuan/_assets/icon_l_en.svg delete mode 100644 api/core/model_runtime/model_providers/baichuan/_assets/icon_s_en.svg delete mode 100644 api/core/model_runtime/model_providers/baichuan/baichuan.py delete mode 100644 api/core/model_runtime/model_providers/baichuan/baichuan.yaml delete mode 100644 api/core/model_runtime/model_providers/baichuan/llm/__init__.py delete mode 100644 api/core/model_runtime/model_providers/baichuan/llm/baichuan2-53b.yaml delete mode 100644 api/core/model_runtime/model_providers/baichuan/llm/baichuan2-turbo-192k.yaml delete mode 100644 api/core/model_runtime/model_providers/baichuan/llm/baichuan2-turbo.yaml delete mode 100644 api/core/model_runtime/model_providers/baichuan/llm/baichuan3-turbo-128k.yaml delete mode 100644 api/core/model_runtime/model_providers/baichuan/llm/baichuan3-turbo.yaml delete mode 100644 api/core/model_runtime/model_providers/baichuan/llm/baichuan4.yaml delete mode 100644 api/core/model_runtime/model_providers/baichuan/llm/baichuan_tokenizer.py delete mode 100644 api/core/model_runtime/model_providers/baichuan/llm/baichuan_turbo.py delete mode 100644 api/core/model_runtime/model_providers/baichuan/llm/baichuan_turbo_errors.py delete mode 100644 api/core/model_runtime/model_providers/baichuan/llm/llm.py delete mode 100644 api/core/model_runtime/model_providers/baichuan/text_embedding/__init__.py delete mode 100644 api/core/model_runtime/model_providers/baichuan/text_embedding/baichuan-text-embedding.yaml delete mode 100644 api/core/model_runtime/model_providers/bedrock/__init__.py delete mode 100644 api/core/model_runtime/model_providers/bedrock/_assets/icon_l_en.svg delete mode 100644 api/core/model_runtime/model_providers/bedrock/_assets/icon_s_en.svg delete mode 100644 api/core/model_runtime/model_providers/bedrock/bedrock.py delete mode 100644 api/core/model_runtime/model_providers/bedrock/bedrock.yaml delete mode 100644 api/core/model_runtime/model_providers/bedrock/llm/__init__.py delete mode 100644 api/core/model_runtime/model_providers/bedrock/llm/_position.yaml delete mode 100644 api/core/model_runtime/model_providers/bedrock/llm/ai21.j2-mid-v1.yaml delete mode 100644 api/core/model_runtime/model_providers/bedrock/llm/ai21.j2-ultra-v1.yaml delete mode 100644 api/core/model_runtime/model_providers/bedrock/llm/amazon.titan-text-express-v1.yaml delete mode 100644 api/core/model_runtime/model_providers/bedrock/llm/amazon.titan-text-lite-v1.yaml delete mode 100644 api/core/model_runtime/model_providers/bedrock/llm/anthropic.claude-3-haiku-v1.yaml delete mode 100644 api/core/model_runtime/model_providers/bedrock/llm/anthropic.claude-3-opus-v1.yaml delete mode 100644 api/core/model_runtime/model_providers/bedrock/llm/anthropic.claude-3-sonnet-v1.5.yaml delete mode 100644 api/core/model_runtime/model_providers/bedrock/llm/anthropic.claude-3-sonnet-v1.yaml delete mode 100644 api/core/model_runtime/model_providers/bedrock/llm/anthropic.claude-instant-v1.yaml delete mode 100644 api/core/model_runtime/model_providers/bedrock/llm/anthropic.claude-v1.yaml delete mode 100644 api/core/model_runtime/model_providers/bedrock/llm/anthropic.claude-v2.1.yaml delete mode 100644 api/core/model_runtime/model_providers/bedrock/llm/anthropic.claude-v2.yaml delete mode 100644 api/core/model_runtime/model_providers/bedrock/llm/cohere.command-light-text-v14.yaml delete mode 100644 api/core/model_runtime/model_providers/bedrock/llm/cohere.command-r-plus-v1.0.yaml delete mode 100644 api/core/model_runtime/model_providers/bedrock/llm/cohere.command-r-v1.0.yaml delete mode 100644 api/core/model_runtime/model_providers/bedrock/llm/cohere.command-text-v14.yaml delete mode 100644 api/core/model_runtime/model_providers/bedrock/llm/eu.anthropic.claude-3-haiku-v1.yaml delete mode 100644 api/core/model_runtime/model_providers/bedrock/llm/eu.anthropic.claude-3-sonnet-v1.5.yaml delete mode 100644 api/core/model_runtime/model_providers/bedrock/llm/eu.anthropic.claude-3-sonnet-v1.yaml delete mode 100644 api/core/model_runtime/model_providers/bedrock/llm/llm.py delete mode 100644 api/core/model_runtime/model_providers/bedrock/llm/meta.llama2-13b-chat-v1.yaml delete mode 100644 api/core/model_runtime/model_providers/bedrock/llm/meta.llama2-70b-chat-v1.yaml delete mode 100644 api/core/model_runtime/model_providers/bedrock/llm/meta.llama3-1-405b-instruct-v1.0.yaml delete mode 100644 api/core/model_runtime/model_providers/bedrock/llm/meta.llama3-1-70b-instruct-v1.0.yaml delete mode 100644 api/core/model_runtime/model_providers/bedrock/llm/meta.llama3-1-8b-instruct-v1.0.yaml delete mode 100644 api/core/model_runtime/model_providers/bedrock/llm/meta.llama3-70b-instruct-v1.yaml delete mode 100644 api/core/model_runtime/model_providers/bedrock/llm/meta.llama3-8b-instruct-v1.yaml delete mode 100644 api/core/model_runtime/model_providers/bedrock/llm/mistral.mistral-7b-instruct-v0.2.yaml delete mode 100644 api/core/model_runtime/model_providers/bedrock/llm/mistral.mistral-large-2402-v1.0.yaml delete mode 100644 api/core/model_runtime/model_providers/bedrock/llm/mistral.mistral-large-2407-v1.0.yaml delete mode 100644 api/core/model_runtime/model_providers/bedrock/llm/mistral.mistral-small-2402-v1.0.yaml delete mode 100644 api/core/model_runtime/model_providers/bedrock/llm/mistral.mixtral-8x7b-instruct-v0.1.yaml delete mode 100644 api/core/model_runtime/model_providers/bedrock/llm/us.anthropic.claude-3-haiku-v1.yaml delete mode 100644 api/core/model_runtime/model_providers/bedrock/llm/us.anthropic.claude-3-opus-v1.yaml delete mode 100644 api/core/model_runtime/model_providers/bedrock/llm/us.anthropic.claude-3-sonnet-v1.5.yaml delete mode 100644 api/core/model_runtime/model_providers/bedrock/llm/us.anthropic.claude-3-sonnet-v1.yaml delete mode 100644 api/core/model_runtime/model_providers/bedrock/text_embedding/__init__.py delete mode 100644 api/core/model_runtime/model_providers/bedrock/text_embedding/_position.yaml delete mode 100644 api/core/model_runtime/model_providers/bedrock/text_embedding/amazon.titan-embed-text-v1.yaml delete mode 100644 api/core/model_runtime/model_providers/bedrock/text_embedding/amazon.titan-embed-text-v2.yaml delete mode 100644 api/core/model_runtime/model_providers/bedrock/text_embedding/cohere.embed-english-v3.yaml delete mode 100644 api/core/model_runtime/model_providers/bedrock/text_embedding/cohere.embed-multilingual-v3.yaml delete mode 100644 api/core/model_runtime/model_providers/chatglm/__init__.py delete mode 100644 api/core/model_runtime/model_providers/chatglm/_assets/icon_l_en.svg delete mode 100644 api/core/model_runtime/model_providers/chatglm/_assets/icon_s_en.svg delete mode 100644 api/core/model_runtime/model_providers/chatglm/chatglm.py delete mode 100644 api/core/model_runtime/model_providers/chatglm/chatglm.yaml delete mode 100644 api/core/model_runtime/model_providers/chatglm/llm/__init__.py delete mode 100644 api/core/model_runtime/model_providers/chatglm/llm/chatglm2-6b-32k.yaml delete mode 100644 api/core/model_runtime/model_providers/chatglm/llm/chatglm2-6b.yaml delete mode 100644 api/core/model_runtime/model_providers/chatglm/llm/chatglm3-6b-32k.yaml delete mode 100644 api/core/model_runtime/model_providers/chatglm/llm/chatglm3-6b.yaml delete mode 100644 api/core/model_runtime/model_providers/chatglm/llm/llm.py delete mode 100644 api/core/model_runtime/model_providers/cohere/__init__.py delete mode 100644 api/core/model_runtime/model_providers/cohere/_assets/icon_l_en.svg delete mode 100644 api/core/model_runtime/model_providers/cohere/_assets/icon_s_en.svg delete mode 100644 api/core/model_runtime/model_providers/cohere/cohere.py delete mode 100644 api/core/model_runtime/model_providers/cohere/cohere.yaml delete mode 100644 api/core/model_runtime/model_providers/cohere/llm/__init__.py delete mode 100644 api/core/model_runtime/model_providers/cohere/llm/_position.yaml delete mode 100644 api/core/model_runtime/model_providers/cohere/llm/command-chat.yaml delete mode 100644 api/core/model_runtime/model_providers/cohere/llm/command-light-chat.yaml delete mode 100644 api/core/model_runtime/model_providers/cohere/llm/command-light-nightly-chat.yaml delete mode 100644 api/core/model_runtime/model_providers/cohere/llm/command-light-nightly.yaml delete mode 100644 api/core/model_runtime/model_providers/cohere/llm/command-light.yaml delete mode 100644 api/core/model_runtime/model_providers/cohere/llm/command-nightly-chat.yaml delete mode 100644 api/core/model_runtime/model_providers/cohere/llm/command-nightly.yaml delete mode 100644 api/core/model_runtime/model_providers/cohere/llm/command-r-plus.yaml delete mode 100644 api/core/model_runtime/model_providers/cohere/llm/command-r.yaml delete mode 100644 api/core/model_runtime/model_providers/cohere/llm/command.yaml delete mode 100644 api/core/model_runtime/model_providers/cohere/llm/llm.py delete mode 100644 api/core/model_runtime/model_providers/cohere/rerank/__init__.py delete mode 100644 api/core/model_runtime/model_providers/cohere/rerank/_position.yaml delete mode 100644 api/core/model_runtime/model_providers/cohere/rerank/rerank-english-v2.0.yaml delete mode 100644 api/core/model_runtime/model_providers/cohere/rerank/rerank-english-v3.0.yaml delete mode 100644 api/core/model_runtime/model_providers/cohere/rerank/rerank-multilingual-v2.0.yaml delete mode 100644 api/core/model_runtime/model_providers/cohere/rerank/rerank-multilingual-v3.0.yaml delete mode 100644 api/core/model_runtime/model_providers/cohere/rerank/rerank.py delete mode 100644 api/core/model_runtime/model_providers/cohere/text_embedding/__init__.py delete mode 100644 api/core/model_runtime/model_providers/cohere/text_embedding/_position.yaml delete mode 100644 api/core/model_runtime/model_providers/cohere/text_embedding/embed-english-light-v2.0.yaml delete mode 100644 api/core/model_runtime/model_providers/cohere/text_embedding/embed-english-light-v3.0.yaml delete mode 100644 api/core/model_runtime/model_providers/cohere/text_embedding/embed-english-v2.0.yaml delete mode 100644 api/core/model_runtime/model_providers/cohere/text_embedding/embed-english-v3.0.yaml delete mode 100644 api/core/model_runtime/model_providers/cohere/text_embedding/embed-multilingual-light-v3.0.yaml delete mode 100644 api/core/model_runtime/model_providers/cohere/text_embedding/embed-multilingual-v2.0.yaml delete mode 100644 api/core/model_runtime/model_providers/cohere/text_embedding/embed-multilingual-v3.0.yaml delete mode 100644 api/core/model_runtime/model_providers/deepseek/__init__.py delete mode 100644 api/core/model_runtime/model_providers/deepseek/_assets/icon_l_en.svg delete mode 100644 api/core/model_runtime/model_providers/deepseek/_assets/icon_s_en.svg delete mode 100644 api/core/model_runtime/model_providers/deepseek/deepseek.py delete mode 100644 api/core/model_runtime/model_providers/deepseek/deepseek.yaml delete mode 100644 api/core/model_runtime/model_providers/deepseek/llm/__init__.py delete mode 100644 api/core/model_runtime/model_providers/deepseek/llm/_position.yaml delete mode 100644 api/core/model_runtime/model_providers/deepseek/llm/deepseek-chat.yaml delete mode 100644 api/core/model_runtime/model_providers/deepseek/llm/deepseek-coder.yaml delete mode 100644 api/core/model_runtime/model_providers/deepseek/llm/llm.py delete mode 100644 api/core/model_runtime/model_providers/fireworks/__init__.py delete mode 100644 api/core/model_runtime/model_providers/fireworks/_assets/icon_l_en.svg delete mode 100644 api/core/model_runtime/model_providers/fireworks/_assets/icon_s_en.svg delete mode 100644 api/core/model_runtime/model_providers/fireworks/_common.py delete mode 100644 api/core/model_runtime/model_providers/fireworks/fireworks.py delete mode 100644 api/core/model_runtime/model_providers/fireworks/llm/__init__.py delete mode 100644 api/core/model_runtime/model_providers/fireworks/llm/_position.yaml delete mode 100644 api/core/model_runtime/model_providers/fireworks/llm/firefunction-v1.yaml delete mode 100644 api/core/model_runtime/model_providers/fireworks/llm/firefunction-v2.yaml delete mode 100644 api/core/model_runtime/model_providers/fireworks/llm/gemma2-9b-it.yaml delete mode 100644 api/core/model_runtime/model_providers/fireworks/llm/llama-v3-70b-instruct-hf.yaml delete mode 100644 api/core/model_runtime/model_providers/fireworks/llm/llama-v3-70b-instruct.yaml delete mode 100644 api/core/model_runtime/model_providers/fireworks/llm/llama-v3-8b-instruct-hf.yaml delete mode 100644 api/core/model_runtime/model_providers/fireworks/llm/llama-v3-8b-instruct.yaml delete mode 100644 api/core/model_runtime/model_providers/fireworks/llm/llama-v3p1-405b-instruct.yaml delete mode 100644 api/core/model_runtime/model_providers/fireworks/llm/llama-v3p1-70b-instruct.yaml delete mode 100644 api/core/model_runtime/model_providers/fireworks/llm/llama-v3p1-8b-instruct.yaml delete mode 100644 api/core/model_runtime/model_providers/fireworks/llm/llm.py delete mode 100644 api/core/model_runtime/model_providers/fireworks/llm/mixtral-8x22b-instruct.yaml delete mode 100644 api/core/model_runtime/model_providers/fireworks/llm/mixtral-8x7b-instruct-hf.yaml delete mode 100644 api/core/model_runtime/model_providers/fireworks/llm/mixtral-8x7b-instruct.yaml delete mode 100644 api/core/model_runtime/model_providers/fireworks/llm/mythomax-l2-13b.yaml delete mode 100644 api/core/model_runtime/model_providers/fireworks/llm/phi-3-vision-128k-instruct.yaml delete mode 100644 api/core/model_runtime/model_providers/fireworks/llm/yi-large.yaml delete mode 100644 api/core/model_runtime/model_providers/fishaudio/__init__.py delete mode 100644 api/core/model_runtime/model_providers/fishaudio/_assets/fishaudio_l_en.svg delete mode 100644 api/core/model_runtime/model_providers/fishaudio/_assets/fishaudio_s_en.svg delete mode 100644 api/core/model_runtime/model_providers/fishaudio/fishaudio.py delete mode 100644 api/core/model_runtime/model_providers/fishaudio/tts/__init__.py delete mode 100644 api/core/model_runtime/model_providers/fishaudio/tts/tts.py delete mode 100644 api/core/model_runtime/model_providers/fishaudio/tts/tts.yaml delete mode 100644 api/core/model_runtime/model_providers/google/__init__.py delete mode 100644 api/core/model_runtime/model_providers/google/_assets/icon_l_en.svg delete mode 100644 api/core/model_runtime/model_providers/google/_assets/icon_s_en.svg delete mode 100644 api/core/model_runtime/model_providers/google/google.py delete mode 100644 api/core/model_runtime/model_providers/google/google.yaml delete mode 100644 api/core/model_runtime/model_providers/google/llm/__init__.py delete mode 100644 api/core/model_runtime/model_providers/google/llm/gemini-1.5-flash-8b-exp-0827.yaml delete mode 100644 api/core/model_runtime/model_providers/google/llm/gemini-1.5-flash-exp-0827.yaml delete mode 100644 api/core/model_runtime/model_providers/google/llm/gemini-1.5-flash-latest.yaml delete mode 100644 api/core/model_runtime/model_providers/google/llm/gemini-1.5-pro-exp-0801.yaml delete mode 100644 api/core/model_runtime/model_providers/google/llm/gemini-1.5-pro-exp-0827.yaml delete mode 100644 api/core/model_runtime/model_providers/google/llm/gemini-1.5-pro-latest.yaml delete mode 100644 api/core/model_runtime/model_providers/google/llm/gemini-pro-vision.yaml delete mode 100644 api/core/model_runtime/model_providers/google/llm/gemini-pro.yaml delete mode 100644 api/core/model_runtime/model_providers/google/llm/llm.py delete mode 100644 api/core/model_runtime/model_providers/groq/_assets/icon_l_en.svg delete mode 100644 api/core/model_runtime/model_providers/groq/_assets/icon_s_en.svg delete mode 100644 api/core/model_runtime/model_providers/groq/groq.py delete mode 100644 api/core/model_runtime/model_providers/groq/groq.yaml delete mode 100644 api/core/model_runtime/model_providers/groq/llm/_position.yaml delete mode 100644 api/core/model_runtime/model_providers/groq/llm/llama-3.1-405b-reasoning.yaml delete mode 100644 api/core/model_runtime/model_providers/groq/llm/llama-3.1-70b-versatile.yaml delete mode 100644 api/core/model_runtime/model_providers/groq/llm/llama-3.1-8b-instant.yaml delete mode 100644 api/core/model_runtime/model_providers/groq/llm/llama2-70b-4096.yaml delete mode 100644 api/core/model_runtime/model_providers/groq/llm/llama3-70b-8192.yaml delete mode 100644 api/core/model_runtime/model_providers/groq/llm/llama3-8b-8192.yaml delete mode 100644 api/core/model_runtime/model_providers/groq/llm/llm.py delete mode 100644 api/core/model_runtime/model_providers/groq/llm/mixtral-8x7b-instruct-v0.1.yaml delete mode 100644 api/core/model_runtime/model_providers/huggingface_hub/__init__.py delete mode 100644 api/core/model_runtime/model_providers/huggingface_hub/_assets/icon_l_en.svg delete mode 100644 api/core/model_runtime/model_providers/huggingface_hub/_assets/icon_s_en.svg delete mode 100644 api/core/model_runtime/model_providers/huggingface_hub/_common.py delete mode 100644 api/core/model_runtime/model_providers/huggingface_hub/huggingface_hub.py delete mode 100644 api/core/model_runtime/model_providers/huggingface_hub/huggingface_hub.yaml delete mode 100644 api/core/model_runtime/model_providers/huggingface_hub/llm/__init__.py delete mode 100644 api/core/model_runtime/model_providers/huggingface_hub/llm/llm.py delete mode 100644 api/core/model_runtime/model_providers/huggingface_hub/text_embedding/__init__.py delete mode 100644 api/core/model_runtime/model_providers/huggingface_tei/__init__.py delete mode 100644 api/core/model_runtime/model_providers/huggingface_tei/huggingface_tei.py delete mode 100644 api/core/model_runtime/model_providers/huggingface_tei/huggingface_tei.yaml delete mode 100644 api/core/model_runtime/model_providers/huggingface_tei/rerank/__init__.py delete mode 100644 api/core/model_runtime/model_providers/huggingface_tei/rerank/rerank.py delete mode 100644 api/core/model_runtime/model_providers/huggingface_tei/tei_helper.py delete mode 100644 api/core/model_runtime/model_providers/huggingface_tei/text_embedding/__init__.py delete mode 100644 api/core/model_runtime/model_providers/hunyuan/__init__.py delete mode 100644 api/core/model_runtime/model_providers/hunyuan/_assets/icon_l_en.png delete mode 100644 api/core/model_runtime/model_providers/hunyuan/_assets/icon_s_en.png delete mode 100644 api/core/model_runtime/model_providers/hunyuan/hunyuan.py delete mode 100644 api/core/model_runtime/model_providers/hunyuan/hunyuan.yaml delete mode 100644 api/core/model_runtime/model_providers/hunyuan/llm/__init__.py delete mode 100644 api/core/model_runtime/model_providers/hunyuan/llm/_position.yaml delete mode 100644 api/core/model_runtime/model_providers/hunyuan/llm/hunyuan-lite.yaml delete mode 100644 api/core/model_runtime/model_providers/hunyuan/llm/hunyuan-pro.yaml delete mode 100644 api/core/model_runtime/model_providers/hunyuan/llm/hunyuan-standard-256k.yaml delete mode 100644 api/core/model_runtime/model_providers/hunyuan/llm/hunyuan-standard.yaml delete mode 100644 api/core/model_runtime/model_providers/hunyuan/llm/hunyuan-turbo.yaml delete mode 100644 api/core/model_runtime/model_providers/hunyuan/llm/hunyuan-vision.yaml delete mode 100644 api/core/model_runtime/model_providers/hunyuan/llm/llm.py delete mode 100644 api/core/model_runtime/model_providers/hunyuan/text_embedding/__init__.py delete mode 100644 api/core/model_runtime/model_providers/hunyuan/text_embedding/hunyuan-text-embedding.yaml delete mode 100644 api/core/model_runtime/model_providers/jina/_assets/icon_l_en.svg delete mode 100644 api/core/model_runtime/model_providers/jina/_assets/icon_s_en.svg delete mode 100644 api/core/model_runtime/model_providers/jina/jina.py delete mode 100644 api/core/model_runtime/model_providers/jina/rerank/__init__.py delete mode 100644 api/core/model_runtime/model_providers/jina/rerank/_position.yaml delete mode 100644 api/core/model_runtime/model_providers/jina/rerank/jina-colbert-v1-en.yaml delete mode 100644 api/core/model_runtime/model_providers/jina/rerank/jina-reranker-v1-base-en.yaml delete mode 100644 api/core/model_runtime/model_providers/jina/rerank/jina-reranker-v1-tiny-en.yaml delete mode 100644 api/core/model_runtime/model_providers/jina/rerank/jina-reranker-v1-turbo-en.yaml delete mode 100644 api/core/model_runtime/model_providers/jina/rerank/jina-reranker-v2-base-multilingual.yaml delete mode 100644 api/core/model_runtime/model_providers/jina/rerank/rerank.py delete mode 100644 api/core/model_runtime/model_providers/jina/text_embedding/jina-clip-v1.yaml delete mode 100644 api/core/model_runtime/model_providers/jina/text_embedding/jina-embeddings-v2-base-de.yaml delete mode 100644 api/core/model_runtime/model_providers/jina/text_embedding/jina-embeddings-v2-base-en.yaml delete mode 100644 api/core/model_runtime/model_providers/jina/text_embedding/jina-embeddings-v2-base-zh.yaml delete mode 100644 api/core/model_runtime/model_providers/jina/text_embedding/jina-embeddings-v2-small-en.yaml delete mode 100644 api/core/model_runtime/model_providers/jina/text_embedding/jina-embeddings-v3.yaml delete mode 100644 api/core/model_runtime/model_providers/jina/text_embedding/jina_tokenizer.py delete mode 100644 api/core/model_runtime/model_providers/jina/text_embedding/tokenizer/tokenizer.json delete mode 100644 api/core/model_runtime/model_providers/jina/text_embedding/tokenizer/tokenizer_config.json delete mode 100644 api/core/model_runtime/model_providers/leptonai/_assets/icon_l_en.png delete mode 100644 api/core/model_runtime/model_providers/leptonai/_assets/icon_s_en.png delete mode 100644 api/core/model_runtime/model_providers/leptonai/leptonai.py delete mode 100644 api/core/model_runtime/model_providers/leptonai/leptonai.yaml delete mode 100644 api/core/model_runtime/model_providers/leptonai/llm/_position.yaml delete mode 100644 api/core/model_runtime/model_providers/leptonai/llm/gemma-7b.yaml delete mode 100644 api/core/model_runtime/model_providers/leptonai/llm/llama2-13b.yaml delete mode 100644 api/core/model_runtime/model_providers/leptonai/llm/llama2-7b.yaml delete mode 100644 api/core/model_runtime/model_providers/leptonai/llm/llama3-70b.yaml delete mode 100644 api/core/model_runtime/model_providers/leptonai/llm/llm.py delete mode 100644 api/core/model_runtime/model_providers/leptonai/llm/mistral-7b.yaml delete mode 100644 api/core/model_runtime/model_providers/leptonai/llm/mixtral-8x7b.yaml delete mode 100644 api/core/model_runtime/model_providers/localai/__init__.py delete mode 100644 api/core/model_runtime/model_providers/localai/_assets/icon_l_en.svg delete mode 100644 api/core/model_runtime/model_providers/localai/_assets/icon_s_en.svg delete mode 100644 api/core/model_runtime/model_providers/localai/llm/__init__.py delete mode 100644 api/core/model_runtime/model_providers/localai/llm/llm.py delete mode 100644 api/core/model_runtime/model_providers/localai/localai.py delete mode 100644 api/core/model_runtime/model_providers/localai/localai.yaml delete mode 100644 api/core/model_runtime/model_providers/localai/rerank/__init__.py delete mode 100644 api/core/model_runtime/model_providers/localai/rerank/rerank.py delete mode 100644 api/core/model_runtime/model_providers/localai/speech2text/__init__.py delete mode 100644 api/core/model_runtime/model_providers/localai/speech2text/speech2text.py delete mode 100644 api/core/model_runtime/model_providers/localai/text_embedding/__init__.py delete mode 100644 api/core/model_runtime/model_providers/minimax/__init__.py delete mode 100644 api/core/model_runtime/model_providers/minimax/_assets/icon_l_en.png delete mode 100644 api/core/model_runtime/model_providers/minimax/_assets/icon_s_en.png delete mode 100644 api/core/model_runtime/model_providers/minimax/llm/__init__.py delete mode 100644 api/core/model_runtime/model_providers/minimax/llm/abab5-chat.yaml delete mode 100644 api/core/model_runtime/model_providers/minimax/llm/abab5.5-chat.yaml delete mode 100644 api/core/model_runtime/model_providers/minimax/llm/abab5.5s-chat.yaml delete mode 100644 api/core/model_runtime/model_providers/minimax/llm/abab6-chat.yaml delete mode 100644 api/core/model_runtime/model_providers/minimax/llm/abab6.5-chat.yaml delete mode 100644 api/core/model_runtime/model_providers/minimax/llm/abab6.5s-chat.yaml delete mode 100644 api/core/model_runtime/model_providers/minimax/llm/chat_completion.py delete mode 100644 api/core/model_runtime/model_providers/minimax/llm/chat_completion_pro.py delete mode 100644 api/core/model_runtime/model_providers/minimax/llm/errors.py delete mode 100644 api/core/model_runtime/model_providers/minimax/llm/llm.py delete mode 100644 api/core/model_runtime/model_providers/minimax/llm/types.py delete mode 100644 api/core/model_runtime/model_providers/minimax/minimax.py delete mode 100644 api/core/model_runtime/model_providers/minimax/minimax.yaml delete mode 100644 api/core/model_runtime/model_providers/minimax/text_embedding/__init__.py delete mode 100644 api/core/model_runtime/model_providers/minimax/text_embedding/embo-01.yaml delete mode 100644 api/core/model_runtime/model_providers/mistralai/__init__.py delete mode 100644 api/core/model_runtime/model_providers/mistralai/_assets/icon_l_en.png delete mode 100644 api/core/model_runtime/model_providers/mistralai/_assets/icon_s_en.png delete mode 100644 api/core/model_runtime/model_providers/mistralai/llm/_position.yaml delete mode 100644 api/core/model_runtime/model_providers/mistralai/llm/codestral-latest.yaml delete mode 100644 api/core/model_runtime/model_providers/mistralai/llm/llm.py delete mode 100644 api/core/model_runtime/model_providers/mistralai/llm/mistral-embed.yaml delete mode 100644 api/core/model_runtime/model_providers/mistralai/llm/mistral-large-latest.yaml delete mode 100644 api/core/model_runtime/model_providers/mistralai/llm/mistral-medium-latest.yaml delete mode 100644 api/core/model_runtime/model_providers/mistralai/llm/mistral-small-latest.yaml delete mode 100644 api/core/model_runtime/model_providers/mistralai/llm/open-codestral-mamba.yaml delete mode 100644 api/core/model_runtime/model_providers/mistralai/llm/open-mistral-7b.yaml delete mode 100644 api/core/model_runtime/model_providers/mistralai/llm/open-mistral-nemo.yaml delete mode 100644 api/core/model_runtime/model_providers/mistralai/llm/open-mixtral-8x22b.yaml delete mode 100644 api/core/model_runtime/model_providers/mistralai/llm/open-mixtral-8x7b.yaml delete mode 100644 api/core/model_runtime/model_providers/mistralai/llm/pixtral-12b-2409.yaml delete mode 100644 api/core/model_runtime/model_providers/mistralai/mistralai.py delete mode 100644 api/core/model_runtime/model_providers/mistralai/mistralai.yaml delete mode 100644 api/core/model_runtime/model_providers/mixedbread/__init__.py delete mode 100644 api/core/model_runtime/model_providers/mixedbread/_assets/icon_l_en.png delete mode 100644 api/core/model_runtime/model_providers/mixedbread/_assets/icon_s_en.png delete mode 100644 api/core/model_runtime/model_providers/mixedbread/mixedbread.py delete mode 100644 api/core/model_runtime/model_providers/mixedbread/mixedbread.yaml delete mode 100644 api/core/model_runtime/model_providers/mixedbread/rerank/__init__.py delete mode 100644 api/core/model_runtime/model_providers/mixedbread/rerank/mxbai-rerank-large-v1-en.yaml delete mode 100644 api/core/model_runtime/model_providers/mixedbread/rerank/rerank.py delete mode 100644 api/core/model_runtime/model_providers/mixedbread/text_embedding/__init__.py delete mode 100644 api/core/model_runtime/model_providers/mixedbread/text_embedding/mxbai-embed-2d-large-v1-en.yaml delete mode 100644 api/core/model_runtime/model_providers/mixedbread/text_embedding/mxbai-embed-large-v1-en.yaml delete mode 100644 api/core/model_runtime/model_providers/moonshot/__init__.py delete mode 100644 api/core/model_runtime/model_providers/moonshot/_assets/icon_l_en.png delete mode 100644 api/core/model_runtime/model_providers/moonshot/_assets/icon_s_en.png delete mode 100644 api/core/model_runtime/model_providers/moonshot/llm/__init__.py delete mode 100644 api/core/model_runtime/model_providers/moonshot/llm/_position.yaml delete mode 100644 api/core/model_runtime/model_providers/moonshot/llm/llm.py delete mode 100644 api/core/model_runtime/model_providers/moonshot/llm/moonshot-v1-128k.yaml delete mode 100644 api/core/model_runtime/model_providers/moonshot/llm/moonshot-v1-32k.yaml delete mode 100644 api/core/model_runtime/model_providers/moonshot/llm/moonshot-v1-8k.yaml delete mode 100644 api/core/model_runtime/model_providers/moonshot/moonshot.py delete mode 100644 api/core/model_runtime/model_providers/moonshot/moonshot.yaml delete mode 100644 api/core/model_runtime/model_providers/nomic/__init__.py delete mode 100644 api/core/model_runtime/model_providers/nomic/_assets/icon_l_en.svg delete mode 100644 api/core/model_runtime/model_providers/nomic/_assets/icon_s_en.png delete mode 100644 api/core/model_runtime/model_providers/nomic/_common.py delete mode 100644 api/core/model_runtime/model_providers/nomic/nomic.py delete mode 100644 api/core/model_runtime/model_providers/nomic/nomic.yaml delete mode 100644 api/core/model_runtime/model_providers/nomic/text_embedding/__init__.py delete mode 100644 api/core/model_runtime/model_providers/nomic/text_embedding/nomic-embed-text-v1.5.yaml delete mode 100644 api/core/model_runtime/model_providers/nomic/text_embedding/nomic-embed-text-v1.yaml delete mode 100644 api/core/model_runtime/model_providers/novita/_assets/icon_l_en.svg delete mode 100644 api/core/model_runtime/model_providers/novita/_assets/icon_s_en.svg delete mode 100644 api/core/model_runtime/model_providers/novita/llm/Nous-Hermes-2-Mixtral-8x7B-DPO.yaml delete mode 100644 api/core/model_runtime/model_providers/novita/llm/airoboros-l2-70b.yaml delete mode 100644 api/core/model_runtime/model_providers/novita/llm/dolphin-mixtral-8x22b.yaml delete mode 100644 api/core/model_runtime/model_providers/novita/llm/gemma-2-9b-it.yaml delete mode 100644 api/core/model_runtime/model_providers/novita/llm/hermes-2-pro-llama-3-8b.yaml delete mode 100644 api/core/model_runtime/model_providers/novita/llm/l3-70b-euryale-v2.1.yaml delete mode 100644 api/core/model_runtime/model_providers/novita/llm/llama-3-70b-instruct.yaml delete mode 100644 api/core/model_runtime/model_providers/novita/llm/llama-3-8b-instruct.yaml delete mode 100644 api/core/model_runtime/model_providers/novita/llm/llama-3.1-405b-instruct.yaml delete mode 100644 api/core/model_runtime/model_providers/novita/llm/llama-3.1-70b-instruct.yaml delete mode 100644 api/core/model_runtime/model_providers/novita/llm/llama-3.1-8b-instruct.yaml delete mode 100644 api/core/model_runtime/model_providers/novita/llm/llm.py delete mode 100644 api/core/model_runtime/model_providers/novita/llm/lzlv_70b.yaml delete mode 100644 api/core/model_runtime/model_providers/novita/llm/midnight-rose-70b.yaml delete mode 100644 api/core/model_runtime/model_providers/novita/llm/mistral-7b-instruct.yaml delete mode 100644 api/core/model_runtime/model_providers/novita/llm/mythomax-l2-13b.yaml delete mode 100644 api/core/model_runtime/model_providers/novita/llm/nous-hermes-llama2-13b.yaml delete mode 100644 api/core/model_runtime/model_providers/novita/llm/openhermes-2.5-mistral-7b.yaml delete mode 100644 api/core/model_runtime/model_providers/novita/llm/wizardlm-2-8x22b.yaml delete mode 100644 api/core/model_runtime/model_providers/novita/novita.py delete mode 100644 api/core/model_runtime/model_providers/novita/novita.yaml delete mode 100644 api/core/model_runtime/model_providers/nvidia/__init__.py delete mode 100644 api/core/model_runtime/model_providers/nvidia/_assets/icon_l_en.png delete mode 100644 api/core/model_runtime/model_providers/nvidia/_assets/icon_s_en.svg delete mode 100644 api/core/model_runtime/model_providers/nvidia/llm/_position.yaml delete mode 100644 api/core/model_runtime/model_providers/nvidia/llm/arctic.yaml delete mode 100644 api/core/model_runtime/model_providers/nvidia/llm/codegemma-7b.yaml delete mode 100644 api/core/model_runtime/model_providers/nvidia/llm/fuyu-8b.yaml delete mode 100644 api/core/model_runtime/model_providers/nvidia/llm/gemma-7b.yaml delete mode 100644 api/core/model_runtime/model_providers/nvidia/llm/llama-3.1-405b.yaml delete mode 100644 api/core/model_runtime/model_providers/nvidia/llm/llama-3.1-70b.yaml delete mode 100644 api/core/model_runtime/model_providers/nvidia/llm/llama-3.1-8b.yaml delete mode 100644 api/core/model_runtime/model_providers/nvidia/llm/llama2-70b.yaml delete mode 100644 api/core/model_runtime/model_providers/nvidia/llm/llama3-70b.yaml delete mode 100644 api/core/model_runtime/model_providers/nvidia/llm/llama3-8b.yaml delete mode 100644 api/core/model_runtime/model_providers/nvidia/llm/llm.py delete mode 100644 api/core/model_runtime/model_providers/nvidia/llm/mistral-large.yaml delete mode 100644 api/core/model_runtime/model_providers/nvidia/llm/mistralai_mixtral-8x7b-instruct-v0.1.yaml delete mode 100644 api/core/model_runtime/model_providers/nvidia/llm/mixtral-8x22b-instruct-v0.1.yaml delete mode 100644 api/core/model_runtime/model_providers/nvidia/llm/nemotron-4-340b-instruct.yaml delete mode 100644 api/core/model_runtime/model_providers/nvidia/llm/phi-3-medium-128k-instruct.yaml delete mode 100644 api/core/model_runtime/model_providers/nvidia/llm/phi-3-mini-128k-instruct.yaml delete mode 100644 api/core/model_runtime/model_providers/nvidia/llm/recurrentgemma-2b.yaml delete mode 100644 api/core/model_runtime/model_providers/nvidia/nvidia.py delete mode 100644 api/core/model_runtime/model_providers/nvidia/nvidia.yaml delete mode 100644 api/core/model_runtime/model_providers/nvidia/rerank/__init__.py delete mode 100644 api/core/model_runtime/model_providers/nvidia/rerank/rerank-qa-mistral-4b.yaml delete mode 100644 api/core/model_runtime/model_providers/nvidia/rerank/rerank.py delete mode 100644 api/core/model_runtime/model_providers/nvidia/text_embedding/__init__.py delete mode 100644 api/core/model_runtime/model_providers/nvidia/text_embedding/embed-qa-4.yaml delete mode 100644 api/core/model_runtime/model_providers/nvidia_nim/__init__.py delete mode 100644 api/core/model_runtime/model_providers/nvidia_nim/_assets/icon_l_en.png delete mode 100644 api/core/model_runtime/model_providers/nvidia_nim/_assets/icon_s_en.svg delete mode 100644 api/core/model_runtime/model_providers/nvidia_nim/llm/__init__.py delete mode 100644 api/core/model_runtime/model_providers/nvidia_nim/llm/llm.py delete mode 100644 api/core/model_runtime/model_providers/nvidia_nim/nvidia_nim.py delete mode 100644 api/core/model_runtime/model_providers/nvidia_nim/nvidia_nim.yaml delete mode 100644 api/core/model_runtime/model_providers/oci/__init__.py delete mode 100644 api/core/model_runtime/model_providers/oci/_assets/icon_l_en.svg delete mode 100644 api/core/model_runtime/model_providers/oci/_assets/icon_s_en.svg delete mode 100644 api/core/model_runtime/model_providers/oci/llm/cohere.command-r-16k.yaml delete mode 100644 api/core/model_runtime/model_providers/oci/llm/cohere.command-r-plus.yaml delete mode 100644 api/core/model_runtime/model_providers/oci/llm/llm.py delete mode 100644 api/core/model_runtime/model_providers/oci/llm/meta.llama-3-70b-instruct.yaml delete mode 100644 api/core/model_runtime/model_providers/oci/oci.py delete mode 100644 api/core/model_runtime/model_providers/oci/oci.yaml delete mode 100644 api/core/model_runtime/model_providers/oci/text_embedding/__init__.py delete mode 100644 api/core/model_runtime/model_providers/oci/text_embedding/_position.yaml delete mode 100644 api/core/model_runtime/model_providers/oci/text_embedding/cohere.embed-english-light-v2.0.yaml delete mode 100644 api/core/model_runtime/model_providers/oci/text_embedding/cohere.embed-english-light-v3.0.yaml delete mode 100644 api/core/model_runtime/model_providers/oci/text_embedding/cohere.embed-english-v3.0.yaml delete mode 100644 api/core/model_runtime/model_providers/oci/text_embedding/cohere.embed-multilingual-light-v3.0.yaml delete mode 100644 api/core/model_runtime/model_providers/oci/text_embedding/cohere.embed-multilingual-v3.0.yaml delete mode 100644 api/core/model_runtime/model_providers/ollama/__init__.py delete mode 100644 api/core/model_runtime/model_providers/ollama/_assets/icon_l_en.svg delete mode 100644 api/core/model_runtime/model_providers/ollama/_assets/icon_s_en.svg delete mode 100644 api/core/model_runtime/model_providers/ollama/llm/__init__.py delete mode 100644 api/core/model_runtime/model_providers/ollama/llm/llm.py delete mode 100644 api/core/model_runtime/model_providers/ollama/ollama.py delete mode 100644 api/core/model_runtime/model_providers/ollama/ollama.yaml delete mode 100644 api/core/model_runtime/model_providers/ollama/text_embedding/__init__.py delete mode 100644 api/core/model_runtime/model_providers/openai/__init__.py delete mode 100644 api/core/model_runtime/model_providers/openai/_assets/icon_l_en.svg delete mode 100644 api/core/model_runtime/model_providers/openai/_assets/icon_s_en.svg delete mode 100644 api/core/model_runtime/model_providers/openai/_common.py delete mode 100644 api/core/model_runtime/model_providers/openai/llm/__init__.py delete mode 100644 api/core/model_runtime/model_providers/openai/llm/_position.yaml delete mode 100644 api/core/model_runtime/model_providers/openai/llm/chatgpt-4o-latest.yaml delete mode 100644 api/core/model_runtime/model_providers/openai/llm/gpt-3.5-turbo-0125.yaml delete mode 100644 api/core/model_runtime/model_providers/openai/llm/gpt-3.5-turbo-0613.yaml delete mode 100644 api/core/model_runtime/model_providers/openai/llm/gpt-3.5-turbo-1106.yaml delete mode 100644 api/core/model_runtime/model_providers/openai/llm/gpt-3.5-turbo-16k-0613.yaml delete mode 100644 api/core/model_runtime/model_providers/openai/llm/gpt-3.5-turbo-16k.yaml delete mode 100644 api/core/model_runtime/model_providers/openai/llm/gpt-3.5-turbo-instruct.yaml delete mode 100644 api/core/model_runtime/model_providers/openai/llm/gpt-3.5-turbo.yaml delete mode 100644 api/core/model_runtime/model_providers/openai/llm/gpt-4-0125-preview.yaml delete mode 100644 api/core/model_runtime/model_providers/openai/llm/gpt-4-1106-preview.yaml delete mode 100644 api/core/model_runtime/model_providers/openai/llm/gpt-4-32k.yaml delete mode 100644 api/core/model_runtime/model_providers/openai/llm/gpt-4-turbo-2024-04-09.yaml delete mode 100644 api/core/model_runtime/model_providers/openai/llm/gpt-4-turbo-preview.yaml delete mode 100644 api/core/model_runtime/model_providers/openai/llm/gpt-4-turbo.yaml delete mode 100644 api/core/model_runtime/model_providers/openai/llm/gpt-4-vision-preview.yaml delete mode 100644 api/core/model_runtime/model_providers/openai/llm/gpt-4.yaml delete mode 100644 api/core/model_runtime/model_providers/openai/llm/gpt-4o-2024-05-13.yaml delete mode 100644 api/core/model_runtime/model_providers/openai/llm/gpt-4o-2024-08-06.yaml delete mode 100644 api/core/model_runtime/model_providers/openai/llm/gpt-4o-mini-2024-07-18.yaml delete mode 100644 api/core/model_runtime/model_providers/openai/llm/gpt-4o-mini.yaml delete mode 100644 api/core/model_runtime/model_providers/openai/llm/gpt-4o.yaml delete mode 100644 api/core/model_runtime/model_providers/openai/llm/llm.py delete mode 100644 api/core/model_runtime/model_providers/openai/llm/o1-mini-2024-09-12.yaml delete mode 100644 api/core/model_runtime/model_providers/openai/llm/o1-mini.yaml delete mode 100644 api/core/model_runtime/model_providers/openai/llm/o1-preview-2024-09-12.yaml delete mode 100644 api/core/model_runtime/model_providers/openai/llm/o1-preview.yaml delete mode 100644 api/core/model_runtime/model_providers/openai/llm/text-davinci-003.yaml delete mode 100644 api/core/model_runtime/model_providers/openai/moderation/text-moderation-stable.yaml delete mode 100644 api/core/model_runtime/model_providers/openai/openai.py delete mode 100644 api/core/model_runtime/model_providers/openai/openai.yaml delete mode 100644 api/core/model_runtime/model_providers/openai/speech2text/__init__.py delete mode 100644 api/core/model_runtime/model_providers/openai/speech2text/speech2text.py delete mode 100644 api/core/model_runtime/model_providers/openai/speech2text/whisper-1.yaml delete mode 100644 api/core/model_runtime/model_providers/openai/text_embedding/__init__.py delete mode 100644 api/core/model_runtime/model_providers/openai/text_embedding/text-embedding-3-large.yaml delete mode 100644 api/core/model_runtime/model_providers/openai/text_embedding/text-embedding-3-small.yaml delete mode 100644 api/core/model_runtime/model_providers/openai/text_embedding/text-embedding-ada-002.yaml delete mode 100644 api/core/model_runtime/model_providers/openai/tts/__init__.py delete mode 100644 api/core/model_runtime/model_providers/openai/tts/tts-1-hd.yaml delete mode 100644 api/core/model_runtime/model_providers/openai/tts/tts-1.yaml delete mode 100644 api/core/model_runtime/model_providers/openai/tts/tts.py delete mode 100644 api/core/model_runtime/model_providers/openai_api_compatible/__init__.py delete mode 100644 api/core/model_runtime/model_providers/openai_api_compatible/_common.py delete mode 100644 api/core/model_runtime/model_providers/openai_api_compatible/llm/__init__.py delete mode 100644 api/core/model_runtime/model_providers/openai_api_compatible/llm/llm.py delete mode 100644 api/core/model_runtime/model_providers/openai_api_compatible/openai_api_compatible.py delete mode 100644 api/core/model_runtime/model_providers/openai_api_compatible/openai_api_compatible.yaml delete mode 100644 api/core/model_runtime/model_providers/openai_api_compatible/speech2text/__init__.py delete mode 100644 api/core/model_runtime/model_providers/openai_api_compatible/speech2text/speech2text.py delete mode 100644 api/core/model_runtime/model_providers/openai_api_compatible/text_embedding/__init__.py delete mode 100644 api/core/model_runtime/model_providers/openllm/__init__.py delete mode 100644 api/core/model_runtime/model_providers/openllm/_assets/icon_l_en.svg delete mode 100644 api/core/model_runtime/model_providers/openllm/_assets/icon_s_en.svg delete mode 100644 api/core/model_runtime/model_providers/openllm/llm/__init__.py delete mode 100644 api/core/model_runtime/model_providers/openllm/llm/llm.py delete mode 100644 api/core/model_runtime/model_providers/openllm/llm/openllm_generate.py delete mode 100644 api/core/model_runtime/model_providers/openllm/llm/openllm_generate_errors.py delete mode 100644 api/core/model_runtime/model_providers/openllm/openllm.py delete mode 100644 api/core/model_runtime/model_providers/openllm/openllm.yaml delete mode 100644 api/core/model_runtime/model_providers/openllm/text_embedding/__init__.py delete mode 100644 api/core/model_runtime/model_providers/openrouter/__init__.py delete mode 100644 api/core/model_runtime/model_providers/openrouter/_assets/openrouter.svg delete mode 100644 api/core/model_runtime/model_providers/openrouter/_assets/openrouter_square.svg delete mode 100644 api/core/model_runtime/model_providers/openrouter/llm/__init__.py delete mode 100644 api/core/model_runtime/model_providers/openrouter/llm/_position.yaml delete mode 100644 api/core/model_runtime/model_providers/openrouter/llm/claude-3-5-sonnet.yaml delete mode 100644 api/core/model_runtime/model_providers/openrouter/llm/claude-3-haiku.yaml delete mode 100644 api/core/model_runtime/model_providers/openrouter/llm/claude-3-opus.yaml delete mode 100644 api/core/model_runtime/model_providers/openrouter/llm/claude-3-sonnet.yaml delete mode 100644 api/core/model_runtime/model_providers/openrouter/llm/command-r-plus.yaml delete mode 100644 api/core/model_runtime/model_providers/openrouter/llm/command-r.yaml delete mode 100644 api/core/model_runtime/model_providers/openrouter/llm/deepseek-chat.yaml delete mode 100644 api/core/model_runtime/model_providers/openrouter/llm/deepseek-coder.yaml delete mode 100644 api/core/model_runtime/model_providers/openrouter/llm/gemini-1.5-flash.yaml delete mode 100644 api/core/model_runtime/model_providers/openrouter/llm/gemini-1.5-pro.yaml delete mode 100644 api/core/model_runtime/model_providers/openrouter/llm/gemini-pro.yaml delete mode 100644 api/core/model_runtime/model_providers/openrouter/llm/gpt-3.5-turbo.yaml delete mode 100644 api/core/model_runtime/model_providers/openrouter/llm/gpt-4-32k.yaml delete mode 100644 api/core/model_runtime/model_providers/openrouter/llm/gpt-4.yaml delete mode 100644 api/core/model_runtime/model_providers/openrouter/llm/gpt-4o-2024-08-06.yaml delete mode 100644 api/core/model_runtime/model_providers/openrouter/llm/gpt-4o-mini.yaml delete mode 100644 api/core/model_runtime/model_providers/openrouter/llm/gpt-4o.yaml delete mode 100644 api/core/model_runtime/model_providers/openrouter/llm/llama-3-70b-instruct.yaml delete mode 100644 api/core/model_runtime/model_providers/openrouter/llm/llama-3-8b-instruct.yaml delete mode 100644 api/core/model_runtime/model_providers/openrouter/llm/llama-3.1-405b-instruct.yaml delete mode 100644 api/core/model_runtime/model_providers/openrouter/llm/llama-3.1-70b-instruct.yaml delete mode 100644 api/core/model_runtime/model_providers/openrouter/llm/llama-3.1-8b-instruct.yaml delete mode 100644 api/core/model_runtime/model_providers/openrouter/llm/llm.py delete mode 100644 api/core/model_runtime/model_providers/openrouter/llm/mistral-7b-instruct.yaml delete mode 100644 api/core/model_runtime/model_providers/openrouter/llm/mixtral-8x22b-instruct.yaml delete mode 100644 api/core/model_runtime/model_providers/openrouter/llm/mixtral-8x7b-instruct.yaml delete mode 100644 api/core/model_runtime/model_providers/openrouter/llm/o1-mini.yaml delete mode 100644 api/core/model_runtime/model_providers/openrouter/llm/o1-preview.yaml delete mode 100644 api/core/model_runtime/model_providers/openrouter/llm/qwen2-72b-instruct.yaml delete mode 100644 api/core/model_runtime/model_providers/openrouter/openrouter.py delete mode 100644 api/core/model_runtime/model_providers/openrouter/openrouter.yaml delete mode 100644 api/core/model_runtime/model_providers/perfxcloud/__init__.py delete mode 100644 api/core/model_runtime/model_providers/perfxcloud/_assets/icon_l_en.svg delete mode 100644 api/core/model_runtime/model_providers/perfxcloud/_assets/icon_s_en.svg delete mode 100644 api/core/model_runtime/model_providers/perfxcloud/llm/Llama3-Chinese_v2.yaml delete mode 100644 api/core/model_runtime/model_providers/perfxcloud/llm/Meta-Llama-3-70B-Instruct-GPTQ-Int4.yaml delete mode 100644 api/core/model_runtime/model_providers/perfxcloud/llm/Meta-Llama-3-8B-Instruct.yaml delete mode 100644 api/core/model_runtime/model_providers/perfxcloud/llm/Meta-Llama-3.1-405B-Instruct-AWQ-INT4.yaml delete mode 100644 api/core/model_runtime/model_providers/perfxcloud/llm/Meta-Llama-3.1-8B-Instruct.yaml delete mode 100644 api/core/model_runtime/model_providers/perfxcloud/llm/Qwen-14B-Chat-Int4.yaml delete mode 100644 api/core/model_runtime/model_providers/perfxcloud/llm/Qwen1.5-110B-Chat-GPTQ-Int4.yaml delete mode 100644 api/core/model_runtime/model_providers/perfxcloud/llm/Qwen1.5-72B-Chat-GPTQ-Int4.yaml delete mode 100644 api/core/model_runtime/model_providers/perfxcloud/llm/Qwen1.5-7B.yaml delete mode 100644 api/core/model_runtime/model_providers/perfxcloud/llm/Qwen2-72B-Instruct-AWQ-int4.yaml delete mode 100644 api/core/model_runtime/model_providers/perfxcloud/llm/Qwen2-72B-Instruct-GPTQ-Int4.yaml delete mode 100644 api/core/model_runtime/model_providers/perfxcloud/llm/Qwen2-72B-Instruct.yaml delete mode 100644 api/core/model_runtime/model_providers/perfxcloud/llm/Qwen2-7B-Instruct.yaml delete mode 100644 api/core/model_runtime/model_providers/perfxcloud/llm/Qwen2-7B.yaml delete mode 100644 api/core/model_runtime/model_providers/perfxcloud/llm/Qwen2.5-72B-Instruct.yaml delete mode 100644 api/core/model_runtime/model_providers/perfxcloud/llm/Qwen2.5-7B-Instruct.yaml delete mode 100644 api/core/model_runtime/model_providers/perfxcloud/llm/Reflection-Llama-3.1-70B.yaml delete mode 100644 api/core/model_runtime/model_providers/perfxcloud/llm/Yi-1_5-9B-Chat-16K.yaml delete mode 100644 api/core/model_runtime/model_providers/perfxcloud/llm/Yi-Coder-1.5B-Chat.yaml delete mode 100644 api/core/model_runtime/model_providers/perfxcloud/llm/Yi-Coder-9B-Chat.yaml delete mode 100644 api/core/model_runtime/model_providers/perfxcloud/llm/__init__.py delete mode 100644 api/core/model_runtime/model_providers/perfxcloud/llm/_position.yaml delete mode 100644 api/core/model_runtime/model_providers/perfxcloud/llm/chatglm3-6b.yaml delete mode 100644 api/core/model_runtime/model_providers/perfxcloud/llm/deepseek-v2-chat.yaml delete mode 100644 api/core/model_runtime/model_providers/perfxcloud/llm/deepseek-v2-lite-chat.yaml delete mode 100644 api/core/model_runtime/model_providers/perfxcloud/llm/llm.py delete mode 100644 api/core/model_runtime/model_providers/perfxcloud/perfxcloud.py delete mode 100644 api/core/model_runtime/model_providers/perfxcloud/perfxcloud.yaml delete mode 100644 api/core/model_runtime/model_providers/perfxcloud/text_embedding/BAAI-bge-large-en-v1.5.yaml delete mode 100644 api/core/model_runtime/model_providers/perfxcloud/text_embedding/BAAI-bge-large-zh-v1.5.yaml delete mode 100644 api/core/model_runtime/model_providers/perfxcloud/text_embedding/BAAI-bge-m3.yaml delete mode 100644 api/core/model_runtime/model_providers/perfxcloud/text_embedding/__init__.py delete mode 100644 api/core/model_runtime/model_providers/perfxcloud/text_embedding/gte-Qwen2-7B-instruct.yaml delete mode 100644 api/core/model_runtime/model_providers/replicate/__init__.py delete mode 100644 api/core/model_runtime/model_providers/replicate/_assets/icon_l_en.svg delete mode 100644 api/core/model_runtime/model_providers/replicate/_assets/icon_s_en.svg delete mode 100644 api/core/model_runtime/model_providers/replicate/_common.py delete mode 100644 api/core/model_runtime/model_providers/replicate/llm/__init__.py delete mode 100644 api/core/model_runtime/model_providers/replicate/llm/llm.py delete mode 100644 api/core/model_runtime/model_providers/replicate/replicate.py delete mode 100644 api/core/model_runtime/model_providers/replicate/replicate.yaml delete mode 100644 api/core/model_runtime/model_providers/replicate/text_embedding/__init__.py delete mode 100644 api/core/model_runtime/model_providers/sagemaker/__init__.py delete mode 100644 api/core/model_runtime/model_providers/sagemaker/_assets/icon_l_en.png delete mode 100644 api/core/model_runtime/model_providers/sagemaker/_assets/icon_s_en.png delete mode 100644 api/core/model_runtime/model_providers/sagemaker/llm/__init__.py delete mode 100644 api/core/model_runtime/model_providers/sagemaker/rerank/__init__.py delete mode 100644 api/core/model_runtime/model_providers/sagemaker/rerank/rerank.py delete mode 100644 api/core/model_runtime/model_providers/sagemaker/sagemaker.py delete mode 100644 api/core/model_runtime/model_providers/sagemaker/sagemaker.yaml delete mode 100644 api/core/model_runtime/model_providers/sagemaker/speech2text/__init__.py delete mode 100644 api/core/model_runtime/model_providers/sagemaker/speech2text/speech2text.py delete mode 100644 api/core/model_runtime/model_providers/sagemaker/text_embedding/__init__.py delete mode 100644 api/core/model_runtime/model_providers/sagemaker/tts/__init__.py delete mode 100644 api/core/model_runtime/model_providers/sagemaker/tts/tts.py delete mode 100644 api/core/model_runtime/model_providers/siliconflow/_assets/siliconflow.svg delete mode 100644 api/core/model_runtime/model_providers/siliconflow/_assets/siliconflow_square.svg delete mode 100644 api/core/model_runtime/model_providers/siliconflow/llm/deepdeek-coder-v2-instruct.yaml delete mode 100644 api/core/model_runtime/model_providers/siliconflow/llm/deepseek-v2-chat.yaml delete mode 100644 api/core/model_runtime/model_providers/siliconflow/llm/deepseek-v2.5.yaml delete mode 100644 api/core/model_runtime/model_providers/siliconflow/llm/gemma-2-27b-it.yaml delete mode 100644 api/core/model_runtime/model_providers/siliconflow/llm/gemma-2-9b-it.yaml delete mode 100644 api/core/model_runtime/model_providers/siliconflow/llm/glm4-9b-chat.yaml delete mode 100644 api/core/model_runtime/model_providers/siliconflow/llm/internlm2_5-7b-chat.yaml delete mode 100644 api/core/model_runtime/model_providers/siliconflow/llm/llm.py delete mode 100644 api/core/model_runtime/model_providers/siliconflow/llm/meta-mlama-3-70b-instruct.yaml delete mode 100644 api/core/model_runtime/model_providers/siliconflow/llm/meta-mlama-3-8b-instruct.yaml delete mode 100644 api/core/model_runtime/model_providers/siliconflow/llm/meta-mlama-3.1-405b-instruct.yaml delete mode 100644 api/core/model_runtime/model_providers/siliconflow/llm/meta-mlama-3.1-70b-instruct.yaml delete mode 100644 api/core/model_runtime/model_providers/siliconflow/llm/meta-mlama-3.1-8b-instruct.yaml delete mode 100644 api/core/model_runtime/model_providers/siliconflow/llm/mistral-7b-instruct-v0.2.yaml delete mode 100644 api/core/model_runtime/model_providers/siliconflow/llm/mistral-8x7b-instruct-v0.1.yaml delete mode 100644 api/core/model_runtime/model_providers/siliconflow/llm/qwen2-1.5b-instruct.yaml delete mode 100644 api/core/model_runtime/model_providers/siliconflow/llm/qwen2-57b-a14b-instruct.yaml delete mode 100644 api/core/model_runtime/model_providers/siliconflow/llm/qwen2-72b-instruct.yaml delete mode 100644 api/core/model_runtime/model_providers/siliconflow/llm/qwen2-7b-instruct.yaml delete mode 100644 api/core/model_runtime/model_providers/siliconflow/llm/qwen2.5-14b-instruct.yaml delete mode 100644 api/core/model_runtime/model_providers/siliconflow/llm/qwen2.5-32b-instruct.yaml delete mode 100644 api/core/model_runtime/model_providers/siliconflow/llm/qwen2.5-72b-instruct.yaml delete mode 100644 api/core/model_runtime/model_providers/siliconflow/llm/qwen2.5-7b-instruct.yaml delete mode 100644 api/core/model_runtime/model_providers/siliconflow/llm/yi-1.5-34b-chat.yaml delete mode 100644 api/core/model_runtime/model_providers/siliconflow/llm/yi-1.5-6b-chat.yaml delete mode 100644 api/core/model_runtime/model_providers/siliconflow/llm/yi-1.5-9b-chat.yaml delete mode 100644 api/core/model_runtime/model_providers/siliconflow/rerank/__init__.py delete mode 100644 api/core/model_runtime/model_providers/siliconflow/rerank/bce-reranker-base_v1.yaml delete mode 100644 api/core/model_runtime/model_providers/siliconflow/rerank/bge-reranker-v2-m3.yaml delete mode 100644 api/core/model_runtime/model_providers/siliconflow/rerank/rerank.py delete mode 100644 api/core/model_runtime/model_providers/siliconflow/siliconflow.py delete mode 100644 api/core/model_runtime/model_providers/siliconflow/siliconflow.yaml delete mode 100644 api/core/model_runtime/model_providers/siliconflow/speech2text/__init__.py delete mode 100644 api/core/model_runtime/model_providers/siliconflow/speech2text/sense-voice-small.yaml delete mode 100644 api/core/model_runtime/model_providers/siliconflow/speech2text/speech2text.py delete mode 100644 api/core/model_runtime/model_providers/siliconflow/text_embedding/bce-embedding-base-v1.yaml delete mode 100644 api/core/model_runtime/model_providers/siliconflow/text_embedding/bge-large-en-v1.5.yaml delete mode 100644 api/core/model_runtime/model_providers/siliconflow/text_embedding/bge-large-zh-v1.5.yaml delete mode 100644 api/core/model_runtime/model_providers/siliconflow/text_embedding/bge-m3.yaml delete mode 100644 api/core/model_runtime/model_providers/spark/__init__.py delete mode 100644 api/core/model_runtime/model_providers/spark/_assets/icon_l_en.svg delete mode 100644 api/core/model_runtime/model_providers/spark/_assets/icon_l_zh.svg delete mode 100644 api/core/model_runtime/model_providers/spark/_assets/icon_s_en.svg delete mode 100644 api/core/model_runtime/model_providers/spark/llm/__init__.py delete mode 100644 api/core/model_runtime/model_providers/spark/llm/_client.py delete mode 100644 api/core/model_runtime/model_providers/spark/llm/_position.yaml delete mode 100644 api/core/model_runtime/model_providers/spark/llm/spark-1.5.yaml delete mode 100644 api/core/model_runtime/model_providers/spark/llm/spark-2.yaml delete mode 100644 api/core/model_runtime/model_providers/spark/llm/spark-3.5.yaml delete mode 100644 api/core/model_runtime/model_providers/spark/llm/spark-3.yaml delete mode 100644 api/core/model_runtime/model_providers/spark/llm/spark-4.0-ultra.yaml delete mode 100644 api/core/model_runtime/model_providers/spark/llm/spark-4.yaml delete mode 100644 api/core/model_runtime/model_providers/spark/llm/spark-lite.yaml delete mode 100644 api/core/model_runtime/model_providers/spark/llm/spark-max-32k.yaml delete mode 100644 api/core/model_runtime/model_providers/spark/llm/spark-max.yaml delete mode 100644 api/core/model_runtime/model_providers/spark/llm/spark-pro-128k.yaml delete mode 100644 api/core/model_runtime/model_providers/spark/llm/spark-pro.yaml delete mode 100644 api/core/model_runtime/model_providers/spark/spark.py delete mode 100644 api/core/model_runtime/model_providers/spark/spark.yaml delete mode 100644 api/core/model_runtime/model_providers/stepfun/__init__.py delete mode 100644 api/core/model_runtime/model_providers/stepfun/_assets/icon_l_en.png delete mode 100644 api/core/model_runtime/model_providers/stepfun/_assets/icon_s_en.png delete mode 100644 api/core/model_runtime/model_providers/stepfun/llm/__init__.py delete mode 100644 api/core/model_runtime/model_providers/stepfun/llm/_position.yaml delete mode 100644 api/core/model_runtime/model_providers/stepfun/llm/llm.py delete mode 100644 api/core/model_runtime/model_providers/stepfun/llm/step-1-128k.yaml delete mode 100644 api/core/model_runtime/model_providers/stepfun/llm/step-1-256k.yaml delete mode 100644 api/core/model_runtime/model_providers/stepfun/llm/step-1-32k.yaml delete mode 100644 api/core/model_runtime/model_providers/stepfun/llm/step-1-8k.yaml delete mode 100644 api/core/model_runtime/model_providers/stepfun/llm/step-1-flash.yaml delete mode 100644 api/core/model_runtime/model_providers/stepfun/llm/step-1v-32k.yaml delete mode 100644 api/core/model_runtime/model_providers/stepfun/llm/step-1v-8k.yaml delete mode 100644 api/core/model_runtime/model_providers/stepfun/llm/step-2-16k.yaml delete mode 100644 api/core/model_runtime/model_providers/stepfun/stepfun.py delete mode 100644 api/core/model_runtime/model_providers/stepfun/stepfun.yaml delete mode 100644 api/core/model_runtime/model_providers/tencent/__init__.py delete mode 100644 api/core/model_runtime/model_providers/tencent/_assets/icon_l_en.svg delete mode 100644 api/core/model_runtime/model_providers/tencent/_assets/icon_l_zh.svg delete mode 100644 api/core/model_runtime/model_providers/tencent/_assets/icon_s_en.svg delete mode 100644 api/core/model_runtime/model_providers/tencent/speech2text/__init__.py delete mode 100644 api/core/model_runtime/model_providers/tencent/speech2text/flash_recognizer.py delete mode 100644 api/core/model_runtime/model_providers/tencent/speech2text/speech2text.py delete mode 100644 api/core/model_runtime/model_providers/tencent/speech2text/tencent.yaml delete mode 100644 api/core/model_runtime/model_providers/tencent/tencent.py delete mode 100644 api/core/model_runtime/model_providers/tencent/tencent.yaml delete mode 100644 api/core/model_runtime/model_providers/togetherai/__init__.py delete mode 100644 api/core/model_runtime/model_providers/togetherai/_assets/togetherai.svg delete mode 100644 api/core/model_runtime/model_providers/togetherai/_assets/togetherai_square.svg delete mode 100644 api/core/model_runtime/model_providers/togetherai/llm/__init__.py delete mode 100644 api/core/model_runtime/model_providers/togetherai/llm/llm.py delete mode 100644 api/core/model_runtime/model_providers/togetherai/togetherai.py delete mode 100644 api/core/model_runtime/model_providers/togetherai/togetherai.yaml delete mode 100644 api/core/model_runtime/model_providers/tongyi/__init__.py delete mode 100644 api/core/model_runtime/model_providers/tongyi/_assets/icon_l_en.png delete mode 100644 api/core/model_runtime/model_providers/tongyi/_assets/icon_l_zh.png delete mode 100644 api/core/model_runtime/model_providers/tongyi/_assets/icon_s_en.png delete mode 100644 api/core/model_runtime/model_providers/tongyi/_common.py delete mode 100644 api/core/model_runtime/model_providers/tongyi/llm/__init__.py delete mode 100644 api/core/model_runtime/model_providers/tongyi/llm/_position.yaml delete mode 100644 api/core/model_runtime/model_providers/tongyi/llm/farui-plus.yaml delete mode 100644 api/core/model_runtime/model_providers/tongyi/llm/llm.py delete mode 100644 api/core/model_runtime/model_providers/tongyi/llm/qwen-coder-turbo-0919.yaml delete mode 100644 api/core/model_runtime/model_providers/tongyi/llm/qwen-coder-turbo-latest.yaml delete mode 100644 api/core/model_runtime/model_providers/tongyi/llm/qwen-coder-turbo.yaml delete mode 100644 api/core/model_runtime/model_providers/tongyi/llm/qwen-long.yaml delete mode 100644 api/core/model_runtime/model_providers/tongyi/llm/qwen-math-plus-0816.yaml delete mode 100644 api/core/model_runtime/model_providers/tongyi/llm/qwen-math-plus-0919.yaml delete mode 100644 api/core/model_runtime/model_providers/tongyi/llm/qwen-math-plus-latest.yaml delete mode 100644 api/core/model_runtime/model_providers/tongyi/llm/qwen-math-plus.yaml delete mode 100644 api/core/model_runtime/model_providers/tongyi/llm/qwen-math-turbo-0919.yaml delete mode 100644 api/core/model_runtime/model_providers/tongyi/llm/qwen-math-turbo-latest.yaml delete mode 100644 api/core/model_runtime/model_providers/tongyi/llm/qwen-math-turbo.yaml delete mode 100644 api/core/model_runtime/model_providers/tongyi/llm/qwen-max-0107.yaml delete mode 100644 api/core/model_runtime/model_providers/tongyi/llm/qwen-max-0403.yaml delete mode 100644 api/core/model_runtime/model_providers/tongyi/llm/qwen-max-0428.yaml delete mode 100644 api/core/model_runtime/model_providers/tongyi/llm/qwen-max-0919.yaml delete mode 100644 api/core/model_runtime/model_providers/tongyi/llm/qwen-max-1201.yaml delete mode 100644 api/core/model_runtime/model_providers/tongyi/llm/qwen-max-latest.yaml delete mode 100644 api/core/model_runtime/model_providers/tongyi/llm/qwen-max-longcontext.yaml delete mode 100644 api/core/model_runtime/model_providers/tongyi/llm/qwen-max.yaml delete mode 100644 api/core/model_runtime/model_providers/tongyi/llm/qwen-plus-0206.yaml delete mode 100644 api/core/model_runtime/model_providers/tongyi/llm/qwen-plus-0624.yaml delete mode 100644 api/core/model_runtime/model_providers/tongyi/llm/qwen-plus-0723.yaml delete mode 100644 api/core/model_runtime/model_providers/tongyi/llm/qwen-plus-0806.yaml delete mode 100644 api/core/model_runtime/model_providers/tongyi/llm/qwen-plus-0919.yaml delete mode 100644 api/core/model_runtime/model_providers/tongyi/llm/qwen-plus-chat.yaml delete mode 100644 api/core/model_runtime/model_providers/tongyi/llm/qwen-plus-latest.yaml delete mode 100644 api/core/model_runtime/model_providers/tongyi/llm/qwen-plus.yaml delete mode 100644 api/core/model_runtime/model_providers/tongyi/llm/qwen-turbo-0206.yaml delete mode 100644 api/core/model_runtime/model_providers/tongyi/llm/qwen-turbo-0624.yaml delete mode 100644 api/core/model_runtime/model_providers/tongyi/llm/qwen-turbo-0919.yaml delete mode 100644 api/core/model_runtime/model_providers/tongyi/llm/qwen-turbo-chat.yaml delete mode 100644 api/core/model_runtime/model_providers/tongyi/llm/qwen-turbo-latest.yaml delete mode 100644 api/core/model_runtime/model_providers/tongyi/llm/qwen-turbo.yaml delete mode 100644 api/core/model_runtime/model_providers/tongyi/llm/qwen-vl-max-0201.yaml delete mode 100644 api/core/model_runtime/model_providers/tongyi/llm/qwen-vl-max-0809.yaml delete mode 100644 api/core/model_runtime/model_providers/tongyi/llm/qwen-vl-max.yaml delete mode 100644 api/core/model_runtime/model_providers/tongyi/llm/qwen-vl-plus-0201.yaml delete mode 100644 api/core/model_runtime/model_providers/tongyi/llm/qwen-vl-plus-0809.yaml delete mode 100644 api/core/model_runtime/model_providers/tongyi/llm/qwen-vl-plus.yaml delete mode 100644 api/core/model_runtime/model_providers/tongyi/llm/qwen2-math-1.5b-instruct.yaml delete mode 100644 api/core/model_runtime/model_providers/tongyi/llm/qwen2-math-72b-instruct.yaml delete mode 100644 api/core/model_runtime/model_providers/tongyi/llm/qwen2-math-7b-instruct.yaml delete mode 100644 api/core/model_runtime/model_providers/tongyi/llm/qwen2.5-0.5b-instruct.yaml delete mode 100644 api/core/model_runtime/model_providers/tongyi/llm/qwen2.5-1.5b-instruct.yaml delete mode 100644 api/core/model_runtime/model_providers/tongyi/llm/qwen2.5-14b-instruct.yaml delete mode 100644 api/core/model_runtime/model_providers/tongyi/llm/qwen2.5-32b-instruct.yaml delete mode 100644 api/core/model_runtime/model_providers/tongyi/llm/qwen2.5-3b-instruct.yaml delete mode 100644 api/core/model_runtime/model_providers/tongyi/llm/qwen2.5-72b-instruct.yaml delete mode 100644 api/core/model_runtime/model_providers/tongyi/llm/qwen2.5-7b-instruct.yaml delete mode 100644 api/core/model_runtime/model_providers/tongyi/llm/qwen2.5-coder-7b-instruct.yaml delete mode 100644 api/core/model_runtime/model_providers/tongyi/text_embedding/__init__.py delete mode 100644 api/core/model_runtime/model_providers/tongyi/text_embedding/text-embedding-v1.yaml delete mode 100644 api/core/model_runtime/model_providers/tongyi/text_embedding/text-embedding-v2.yaml delete mode 100644 api/core/model_runtime/model_providers/tongyi/text_embedding/text-embedding-v3.yaml delete mode 100644 api/core/model_runtime/model_providers/tongyi/tongyi.py delete mode 100644 api/core/model_runtime/model_providers/tongyi/tongyi.yaml delete mode 100644 api/core/model_runtime/model_providers/tongyi/tts/__init__.py delete mode 100644 api/core/model_runtime/model_providers/tongyi/tts/tts-1.yaml delete mode 100644 api/core/model_runtime/model_providers/tongyi/tts/tts.py delete mode 100644 api/core/model_runtime/model_providers/triton_inference_server/__init__.py delete mode 100644 api/core/model_runtime/model_providers/triton_inference_server/_assets/icon_l_en.png delete mode 100644 api/core/model_runtime/model_providers/triton_inference_server/_assets/icon_s_en.svg delete mode 100644 api/core/model_runtime/model_providers/triton_inference_server/llm/__init__.py delete mode 100644 api/core/model_runtime/model_providers/triton_inference_server/llm/llm.py delete mode 100644 api/core/model_runtime/model_providers/triton_inference_server/triton_inference_server.py delete mode 100644 api/core/model_runtime/model_providers/triton_inference_server/triton_inference_server.yaml delete mode 100644 api/core/model_runtime/model_providers/upstage/__init__.py delete mode 100644 api/core/model_runtime/model_providers/upstage/_assets/icon_l_en.svg delete mode 100644 api/core/model_runtime/model_providers/upstage/_assets/icon_s_en.svg delete mode 100644 api/core/model_runtime/model_providers/upstage/_common.py delete mode 100644 api/core/model_runtime/model_providers/upstage/llm/__init__.py delete mode 100644 api/core/model_runtime/model_providers/upstage/llm/_position.yaml delete mode 100644 api/core/model_runtime/model_providers/upstage/llm/llm.py delete mode 100644 api/core/model_runtime/model_providers/upstage/llm/solar-1-mini-chat.yaml delete mode 100644 api/core/model_runtime/model_providers/upstage/text_embedding/__init__.py delete mode 100644 api/core/model_runtime/model_providers/upstage/text_embedding/solar-embedding-1-large-passage.yaml delete mode 100644 api/core/model_runtime/model_providers/upstage/text_embedding/solar-embedding-1-large-query.yaml delete mode 100644 api/core/model_runtime/model_providers/upstage/upstage.py delete mode 100644 api/core/model_runtime/model_providers/upstage/upstage.yaml delete mode 100644 api/core/model_runtime/model_providers/vertex_ai/__init__.py delete mode 100644 api/core/model_runtime/model_providers/vertex_ai/_assets/icon_l_en.png delete mode 100644 api/core/model_runtime/model_providers/vertex_ai/_assets/icon_s_en.svg delete mode 100644 api/core/model_runtime/model_providers/vertex_ai/_common.py delete mode 100644 api/core/model_runtime/model_providers/vertex_ai/llm/__init__.py delete mode 100644 api/core/model_runtime/model_providers/vertex_ai/llm/anthropic.claude-3-haiku.yaml delete mode 100644 api/core/model_runtime/model_providers/vertex_ai/llm/anthropic.claude-3-opus.yaml delete mode 100644 api/core/model_runtime/model_providers/vertex_ai/llm/anthropic.claude-3-sonnet.yaml delete mode 100644 api/core/model_runtime/model_providers/vertex_ai/llm/anthropic.claude-3.5-sonnet.yaml delete mode 100644 api/core/model_runtime/model_providers/vertex_ai/llm/gemini-1.0-pro-vision.yaml delete mode 100644 api/core/model_runtime/model_providers/vertex_ai/llm/gemini-1.0-pro.yaml delete mode 100644 api/core/model_runtime/model_providers/vertex_ai/text_embedding/__init__.py delete mode 100644 api/core/model_runtime/model_providers/vertex_ai/text_embedding/text-embedding-004.yaml delete mode 100644 api/core/model_runtime/model_providers/vertex_ai/text_embedding/text-multilingual-embedding-002.yaml delete mode 100644 api/core/model_runtime/model_providers/vertex_ai/vertex_ai.py delete mode 100644 api/core/model_runtime/model_providers/vertex_ai/vertex_ai.yaml delete mode 100644 api/core/model_runtime/model_providers/volcengine_maas/__init__.py delete mode 100644 api/core/model_runtime/model_providers/volcengine_maas/_assets/icon_l_en.svg delete mode 100644 api/core/model_runtime/model_providers/volcengine_maas/_assets/icon_l_zh.svg delete mode 100644 api/core/model_runtime/model_providers/volcengine_maas/_assets/icon_s_en.svg delete mode 100644 api/core/model_runtime/model_providers/volcengine_maas/client.py delete mode 100644 api/core/model_runtime/model_providers/volcengine_maas/legacy/__init__.py delete mode 100644 api/core/model_runtime/model_providers/volcengine_maas/legacy/client.py delete mode 100644 api/core/model_runtime/model_providers/volcengine_maas/legacy/errors.py delete mode 100644 api/core/model_runtime/model_providers/volcengine_maas/legacy/volc_sdk/__init__.py delete mode 100644 api/core/model_runtime/model_providers/volcengine_maas/legacy/volc_sdk/base/__init__.py delete mode 100644 api/core/model_runtime/model_providers/volcengine_maas/legacy/volc_sdk/base/auth.py delete mode 100644 api/core/model_runtime/model_providers/volcengine_maas/legacy/volc_sdk/base/service.py delete mode 100644 api/core/model_runtime/model_providers/volcengine_maas/legacy/volc_sdk/base/util.py delete mode 100644 api/core/model_runtime/model_providers/volcengine_maas/legacy/volc_sdk/common.py delete mode 100644 api/core/model_runtime/model_providers/volcengine_maas/legacy/volc_sdk/maas.py delete mode 100644 api/core/model_runtime/model_providers/volcengine_maas/llm/__init__.py delete mode 100644 api/core/model_runtime/model_providers/volcengine_maas/llm/llm.py delete mode 100644 api/core/model_runtime/model_providers/volcengine_maas/llm/models.py delete mode 100644 api/core/model_runtime/model_providers/volcengine_maas/text_embedding/__init__.py delete mode 100644 api/core/model_runtime/model_providers/volcengine_maas/text_embedding/models.py delete mode 100644 api/core/model_runtime/model_providers/volcengine_maas/volcengine_maas.py delete mode 100644 api/core/model_runtime/model_providers/volcengine_maas/volcengine_maas.yaml delete mode 100644 api/core/model_runtime/model_providers/wenxin/__init__.py delete mode 100644 api/core/model_runtime/model_providers/wenxin/_assets/icon_l_en.png delete mode 100644 api/core/model_runtime/model_providers/wenxin/_assets/icon_l_zh.png delete mode 100644 api/core/model_runtime/model_providers/wenxin/_assets/icon_s_en.png delete mode 100644 api/core/model_runtime/model_providers/wenxin/_common.py delete mode 100644 api/core/model_runtime/model_providers/wenxin/llm/__init__.py delete mode 100644 api/core/model_runtime/model_providers/wenxin/llm/ernie-3.5-128k.yaml delete mode 100644 api/core/model_runtime/model_providers/wenxin/llm/ernie-3.5-4k-0205.yaml delete mode 100644 api/core/model_runtime/model_providers/wenxin/llm/ernie-3.5-8k-0205.yaml delete mode 100644 api/core/model_runtime/model_providers/wenxin/llm/ernie-3.5-8k-1222.yaml delete mode 100644 api/core/model_runtime/model_providers/wenxin/llm/ernie-3.5-8k.yaml delete mode 100644 api/core/model_runtime/model_providers/wenxin/llm/ernie-4.0-8k-latest.yaml delete mode 100644 api/core/model_runtime/model_providers/wenxin/llm/ernie-4.0-8k.yaml delete mode 100644 api/core/model_runtime/model_providers/wenxin/llm/ernie-4.0-turbo-8k-preview.yaml delete mode 100644 api/core/model_runtime/model_providers/wenxin/llm/ernie-4.0-turbo-8k.yaml delete mode 100644 api/core/model_runtime/model_providers/wenxin/llm/ernie-bot-4.yaml delete mode 100644 api/core/model_runtime/model_providers/wenxin/llm/ernie-bot-8k.yaml delete mode 100644 api/core/model_runtime/model_providers/wenxin/llm/ernie-bot-turbo.yaml delete mode 100644 api/core/model_runtime/model_providers/wenxin/llm/ernie-bot.yaml delete mode 100644 api/core/model_runtime/model_providers/wenxin/llm/ernie-character-8k-0321.yaml delete mode 100644 api/core/model_runtime/model_providers/wenxin/llm/ernie-character-8k.yaml delete mode 100644 api/core/model_runtime/model_providers/wenxin/llm/ernie-lite-8k-0308.yaml delete mode 100644 api/core/model_runtime/model_providers/wenxin/llm/ernie-lite-8k-0922.yaml delete mode 100644 api/core/model_runtime/model_providers/wenxin/llm/ernie-speed-128k.yaml delete mode 100644 api/core/model_runtime/model_providers/wenxin/llm/ernie-speed-8k.yaml delete mode 100644 api/core/model_runtime/model_providers/wenxin/llm/ernie-speed-appbuilder.yaml delete mode 100644 api/core/model_runtime/model_providers/wenxin/llm/ernie_bot.py delete mode 100644 api/core/model_runtime/model_providers/wenxin/llm/llm.py delete mode 100644 api/core/model_runtime/model_providers/wenxin/llm/yi_34b_chat.yaml delete mode 100644 api/core/model_runtime/model_providers/wenxin/text_embedding/__init__.py delete mode 100644 api/core/model_runtime/model_providers/wenxin/text_embedding/bge-large-en.yaml delete mode 100644 api/core/model_runtime/model_providers/wenxin/text_embedding/bge-large-zh.yaml delete mode 100644 api/core/model_runtime/model_providers/wenxin/text_embedding/embedding-v1.yaml delete mode 100644 api/core/model_runtime/model_providers/wenxin/text_embedding/tao-8k.yaml delete mode 100644 api/core/model_runtime/model_providers/wenxin/wenxin.py delete mode 100644 api/core/model_runtime/model_providers/wenxin/wenxin.yaml delete mode 100644 api/core/model_runtime/model_providers/wenxin/wenxin_errors.py delete mode 100644 api/core/model_runtime/model_providers/xinference/__init__.py delete mode 100644 api/core/model_runtime/model_providers/xinference/_assets/icon_l_en.svg delete mode 100644 api/core/model_runtime/model_providers/xinference/_assets/icon_s_en.svg delete mode 100644 api/core/model_runtime/model_providers/xinference/llm/__init__.py delete mode 100644 api/core/model_runtime/model_providers/xinference/llm/llm.py delete mode 100644 api/core/model_runtime/model_providers/xinference/rerank/__init__.py delete mode 100644 api/core/model_runtime/model_providers/xinference/rerank/rerank.py delete mode 100644 api/core/model_runtime/model_providers/xinference/speech2text/__init__.py delete mode 100644 api/core/model_runtime/model_providers/xinference/speech2text/speech2text.py delete mode 100644 api/core/model_runtime/model_providers/xinference/text_embedding/__init__.py delete mode 100644 api/core/model_runtime/model_providers/xinference/tts/__init__.py delete mode 100644 api/core/model_runtime/model_providers/xinference/tts/tts.py delete mode 100644 api/core/model_runtime/model_providers/xinference/xinference.py delete mode 100644 api/core/model_runtime/model_providers/xinference/xinference.yaml delete mode 100644 api/core/model_runtime/model_providers/xinference/xinference_helper.py delete mode 100644 api/core/model_runtime/model_providers/yi/__init__.py delete mode 100644 api/core/model_runtime/model_providers/yi/_assets/icon_l_en.svg delete mode 100644 api/core/model_runtime/model_providers/yi/_assets/icon_s_en.svg delete mode 100644 api/core/model_runtime/model_providers/yi/llm/__init__.py delete mode 100644 api/core/model_runtime/model_providers/yi/llm/_position.yaml delete mode 100644 api/core/model_runtime/model_providers/yi/llm/llm.py delete mode 100644 api/core/model_runtime/model_providers/yi/llm/yi-34b-chat-0205.yaml delete mode 100644 api/core/model_runtime/model_providers/yi/llm/yi-34b-chat-200k.yaml delete mode 100644 api/core/model_runtime/model_providers/yi/llm/yi-large-turbo.yaml delete mode 100644 api/core/model_runtime/model_providers/yi/llm/yi-large.yaml delete mode 100644 api/core/model_runtime/model_providers/yi/llm/yi-medium-200k.yaml delete mode 100644 api/core/model_runtime/model_providers/yi/llm/yi-medium.yaml delete mode 100644 api/core/model_runtime/model_providers/yi/llm/yi-spark.yaml delete mode 100644 api/core/model_runtime/model_providers/yi/llm/yi-vision.yaml delete mode 100644 api/core/model_runtime/model_providers/yi/llm/yi-vl-plus.yaml delete mode 100644 api/core/model_runtime/model_providers/yi/yi.py delete mode 100644 api/core/model_runtime/model_providers/yi/yi.yaml delete mode 100644 api/core/model_runtime/model_providers/zhinao/__init__.py delete mode 100644 api/core/model_runtime/model_providers/zhinao/_assets/icon_l_en.svg delete mode 100644 api/core/model_runtime/model_providers/zhinao/_assets/icon_s_en.svg delete mode 100644 api/core/model_runtime/model_providers/zhinao/llm/360gpt-turbo-responsibility-8k.yaml delete mode 100644 api/core/model_runtime/model_providers/zhinao/llm/360gpt-turbo.yaml delete mode 100644 api/core/model_runtime/model_providers/zhinao/llm/360gpt2-pro.yaml delete mode 100644 api/core/model_runtime/model_providers/zhinao/llm/__init__.py delete mode 100644 api/core/model_runtime/model_providers/zhinao/llm/_position.yaml delete mode 100644 api/core/model_runtime/model_providers/zhinao/llm/llm.py delete mode 100644 api/core/model_runtime/model_providers/zhinao/zhinao.py delete mode 100644 api/core/model_runtime/model_providers/zhinao/zhinao.yaml delete mode 100644 api/core/model_runtime/model_providers/zhipuai/__init__.py delete mode 100644 api/core/model_runtime/model_providers/zhipuai/_assets/icon_l_en.svg delete mode 100644 api/core/model_runtime/model_providers/zhipuai/_assets/icon_l_zh.svg delete mode 100644 api/core/model_runtime/model_providers/zhipuai/_assets/icon_s_en.svg delete mode 100644 api/core/model_runtime/model_providers/zhipuai/_common.py delete mode 100644 api/core/model_runtime/model_providers/zhipuai/llm/__init__.py delete mode 100644 api/core/model_runtime/model_providers/zhipuai/llm/chatglm_lite.yaml delete mode 100644 api/core/model_runtime/model_providers/zhipuai/llm/chatglm_lite_32k.yaml delete mode 100644 api/core/model_runtime/model_providers/zhipuai/llm/chatglm_pro.yaml delete mode 100644 api/core/model_runtime/model_providers/zhipuai/llm/chatglm_std.yaml delete mode 100644 api/core/model_runtime/model_providers/zhipuai/llm/chatglm_turbo.yaml delete mode 100644 api/core/model_runtime/model_providers/zhipuai/llm/glm-4-0520.yaml delete mode 100644 api/core/model_runtime/model_providers/zhipuai/llm/glm-4-air.yaml delete mode 100644 api/core/model_runtime/model_providers/zhipuai/llm/glm-4-airx.yaml delete mode 100644 api/core/model_runtime/model_providers/zhipuai/llm/glm-4-flash.yaml delete mode 100644 api/core/model_runtime/model_providers/zhipuai/llm/glm_3_turbo.yaml delete mode 100644 api/core/model_runtime/model_providers/zhipuai/llm/glm_4.yaml delete mode 100644 api/core/model_runtime/model_providers/zhipuai/llm/glm_4_long.yaml delete mode 100644 api/core/model_runtime/model_providers/zhipuai/llm/glm_4_plus.yaml delete mode 100644 api/core/model_runtime/model_providers/zhipuai/llm/glm_4v.yaml delete mode 100644 api/core/model_runtime/model_providers/zhipuai/llm/glm_4v_plus.yaml delete mode 100644 api/core/model_runtime/model_providers/zhipuai/llm/llm.py delete mode 100644 api/core/model_runtime/model_providers/zhipuai/text_embedding/__init__.py delete mode 100644 api/core/model_runtime/model_providers/zhipuai/text_embedding/embedding-2.yaml delete mode 100644 api/core/model_runtime/model_providers/zhipuai/text_embedding/embedding-3.yaml delete mode 100644 api/core/model_runtime/model_providers/zhipuai/text_embedding/text_embedding.yaml delete mode 100644 api/core/model_runtime/model_providers/zhipuai/zhipuai.py delete mode 100644 api/core/model_runtime/model_providers/zhipuai/zhipuai.yaml delete mode 100644 api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/__init__.py delete mode 100644 api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/__version__.py delete mode 100644 api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/_client.py delete mode 100644 api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/api_resource/__init__.py delete mode 100644 api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/api_resource/assistant/__init__.py delete mode 100644 api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/api_resource/assistant/assistant.py delete mode 100644 api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/api_resource/batches.py delete mode 100644 api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/api_resource/chat/__init__.py delete mode 100644 api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/api_resource/chat/async_completions.py delete mode 100644 api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/api_resource/chat/chat.py delete mode 100644 api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/api_resource/chat/completions.py delete mode 100644 api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/api_resource/embeddings.py delete mode 100644 api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/api_resource/files.py delete mode 100644 api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/api_resource/fine_tuning/__init__.py delete mode 100644 api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/api_resource/fine_tuning/fine_tuning.py delete mode 100644 api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/api_resource/fine_tuning/jobs/__init__.py delete mode 100644 api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/api_resource/fine_tuning/jobs/jobs.py delete mode 100644 api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/api_resource/fine_tuning/models/__init__.py delete mode 100644 api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/api_resource/fine_tuning/models/fine_tuned_models.py delete mode 100644 api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/api_resource/images.py delete mode 100644 api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/api_resource/knowledge/__init__.py delete mode 100644 api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/api_resource/knowledge/document/__init__.py delete mode 100644 api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/api_resource/knowledge/document/document.py delete mode 100644 api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/api_resource/knowledge/knowledge.py delete mode 100644 api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/api_resource/tools/__init__.py delete mode 100644 api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/api_resource/tools/tools.py delete mode 100644 api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/api_resource/videos/__init__.py delete mode 100644 api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/api_resource/videos/videos.py delete mode 100644 api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/core/__init__.py delete mode 100644 api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/core/_base_api.py delete mode 100644 api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/core/_base_compat.py delete mode 100644 api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/core/_base_models.py delete mode 100644 api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/core/_base_type.py delete mode 100644 api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/core/_constants.py delete mode 100644 api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/core/_errors.py delete mode 100644 api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/core/_files.py delete mode 100644 api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/core/_http_client.py delete mode 100644 api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/core/_jwt_token.py delete mode 100644 api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/core/_legacy_binary_response.py delete mode 100644 api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/core/_legacy_response.py delete mode 100644 api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/core/_request_opt.py delete mode 100644 api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/core/_response.py delete mode 100644 api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/core/_sse_client.py delete mode 100644 api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/core/_utils/__init__.py delete mode 100644 api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/core/_utils/_transform.py delete mode 100644 api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/core/_utils/_typing.py delete mode 100644 api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/core/_utils/_utils.py delete mode 100644 api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/core/logs.py delete mode 100644 api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/core/pagination.py delete mode 100644 api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/__init__.py delete mode 100644 api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/assistant/__init__.py delete mode 100644 api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/assistant/assistant_completion.py delete mode 100644 api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/assistant/assistant_conversation_params.py delete mode 100644 api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/assistant/assistant_conversation_resp.py delete mode 100644 api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/assistant/assistant_create_params.py delete mode 100644 api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/assistant/assistant_support_resp.py delete mode 100644 api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/assistant/message/__init__.py delete mode 100644 api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/assistant/message/message_content.py delete mode 100644 api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/assistant/message/text_content_block.py delete mode 100644 api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/assistant/message/tools/code_interpreter_delta_block.py delete mode 100644 api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/assistant/message/tools/drawing_tool_delta_block.py delete mode 100644 api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/assistant/message/tools/function_delta_block.py delete mode 100644 api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/assistant/message/tools/retrieval_delta_black.py delete mode 100644 api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/assistant/message/tools/tools_type.py delete mode 100644 api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/assistant/message/tools/web_browser_delta_block.py delete mode 100644 api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/assistant/message/tools_delta_block.py delete mode 100644 api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/batch.py delete mode 100644 api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/batch_create_params.py delete mode 100644 api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/batch_error.py delete mode 100644 api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/batch_list_params.py delete mode 100644 api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/batch_request_counts.py delete mode 100644 api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/chat/__init__.py delete mode 100644 api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/chat/async_chat_completion.py delete mode 100644 api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/chat/chat_completion.py delete mode 100644 api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/chat/chat_completion_chunk.py delete mode 100644 api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/chat/chat_completions_create_param.py delete mode 100644 api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/chat/code_geex/code_geex_params.py delete mode 100644 api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/embeddings.py delete mode 100644 api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/files/__init__.py delete mode 100644 api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/files/file_create_params.py delete mode 100644 api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/files/file_deleted.py delete mode 100644 api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/files/file_object.py delete mode 100644 api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/files/upload_detail.py delete mode 100644 api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/fine_tuning/__init__.py delete mode 100644 api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/fine_tuning/fine_tuning_job.py delete mode 100644 api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/fine_tuning/fine_tuning_job_event.py delete mode 100644 api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/fine_tuning/job_create_params.py delete mode 100644 api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/fine_tuning/models/__init__.py delete mode 100644 api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/fine_tuning/models/fine_tuned_models.py delete mode 100644 api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/image.py delete mode 100644 api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/knowledge/__init__.py delete mode 100644 api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/knowledge/document/__init__.py delete mode 100644 api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/knowledge/document/document.py delete mode 100644 api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/knowledge/document/document_edit_params.py delete mode 100644 api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/knowledge/document/document_list_params.py delete mode 100644 api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/knowledge/document/document_list_resp.py delete mode 100644 api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/knowledge/knowledge.py delete mode 100644 api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/knowledge/knowledge_create_params.py delete mode 100644 api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/knowledge/knowledge_list_params.py delete mode 100644 api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/knowledge/knowledge_list_resp.py delete mode 100644 api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/knowledge/knowledge_used.py delete mode 100644 api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/sensitive_word_check/__init__.py delete mode 100644 api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/sensitive_word_check/sensitive_word_check.py delete mode 100644 api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/tools/__init__.py delete mode 100644 api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/tools/tools_web_search_params.py delete mode 100644 api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/tools/web_search.py delete mode 100644 api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/tools/web_search_chunk.py delete mode 100644 api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/video/__init__.py delete mode 100644 api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/video/video_create_params.py delete mode 100644 api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/video/video_object.py diff --git a/api/controllers/console/workspace/model_providers.py b/api/controllers/console/workspace/model_providers.py index fe0bcf73384ed2..03d11b56197291 100644 --- a/api/controllers/console/workspace/model_providers.py +++ b/api/controllers/console/workspace/model_providers.py @@ -132,7 +132,7 @@ class ModelProviderIconApi(Resource): def get(self, provider: str, icon_type: str, lang: str): model_provider_service = ModelProviderService() icon, mimetype = model_provider_service.get_model_provider_icon( - provider=provider, icon_type=icon_type, lang=lang + tenant_id=current_user.current_tenant_id, provider=provider, icon_type=icon_type, lang=lang ) return send_file(io.BytesIO(icon), mimetype=mimetype) diff --git a/api/core/app/app_config/easy_ui_based_app/model_config/converter.py b/api/core/app/app_config/easy_ui_based_app/model_config/converter.py index a91b9f0f020073..c72c838d066a02 100644 --- a/api/core/app/app_config/easy_ui_based_app/model_config/converter.py +++ b/api/core/app/app_config/easy_ui_based_app/model_config/converter.py @@ -4,7 +4,8 @@ from core.app.entities.app_invoke_entities import ModelConfigWithCredentialsEntity from core.entities.model_entities import ModelStatus from core.errors.error import ModelCurrentlyNotSupportError, ProviderTokenNotInitError, QuotaExceededError -from core.model_runtime.entities.model_entities import ModelType +from core.model_runtime.entities.llm_entities import LLMMode +from core.model_runtime.entities.model_entities import ModelPropertyKey, ModelType from core.model_runtime.model_providers.__base.large_language_model import LargeLanguageModel from core.provider_manager import ProviderManager @@ -67,14 +68,14 @@ def convert(cls, app_config: EasyUIBasedAppConfig, skip_check: bool = False) -> stop = completion_params["stop"] del completion_params["stop"] + model_schema = model_type_instance.get_model_schema(model_config.model, model_credentials) + # get model mode model_mode = model_config.mode if not model_mode: - mode_enum = model_type_instance.get_model_mode(model=model_config.model, credentials=model_credentials) - - model_mode = mode_enum.value - - model_schema = model_type_instance.get_model_schema(model_config.model, model_credentials) + model_mode = LLMMode.CHAT.value + if model_schema and model_schema.model_properties.get(ModelPropertyKey.MODE): + model_mode = LLMMode.value_of(model_schema.model_properties[ModelPropertyKey.MODE]).value if not skip_check and not model_schema: raise ValueError(f"Model {model_name} not exist.") diff --git a/api/core/app/app_config/easy_ui_based_app/model_config/manager.py b/api/core/app/app_config/easy_ui_based_app/model_config/manager.py index b5e4554181c06e..acc1a2d35b1a28 100644 --- a/api/core/app/app_config/easy_ui_based_app/model_config/manager.py +++ b/api/core/app/app_config/easy_ui_based_app/model_config/manager.py @@ -1,6 +1,6 @@ from core.app.app_config.entities import ModelConfigEntity from core.model_runtime.entities.model_entities import ModelPropertyKey, ModelType -from core.model_runtime.model_providers import model_provider_factory +from core.model_runtime.model_providers.model_provider_factory import ModelProviderFactory from core.provider_manager import ProviderManager @@ -50,6 +50,7 @@ def validate_and_set_defaults(cls, tenant_id: str, config: dict) -> tuple[dict, raise ValueError("model must be of object type") # model.provider + model_provider_factory = ModelProviderFactory(tenant_id) provider_entities = model_provider_factory.get_providers() model_provider_names = [provider.provider for provider in provider_entities] if "provider" not in config["model"] or config["model"]["provider"] not in model_provider_names: diff --git a/api/core/entities/provider_configuration.py b/api/core/entities/provider_configuration.py index 807f09598c7607..764221dec5d091 100644 --- a/api/core/entities/provider_configuration.py +++ b/api/core/entities/provider_configuration.py @@ -2,7 +2,7 @@ import json import logging from collections import defaultdict -from collections.abc import Iterator +from collections.abc import Iterator, Sequence from json import JSONDecodeError from typing import Optional @@ -18,16 +18,15 @@ ) from core.helper import encrypter from core.helper.model_provider_cache import ProviderCredentialsCache, ProviderCredentialsCacheType -from core.model_runtime.entities.model_entities import FetchFrom, ModelType +from core.model_runtime.entities.model_entities import AIModelEntity, FetchFrom, ModelType from core.model_runtime.entities.provider_entities import ( ConfigurateMethod, CredentialFormSchema, FormType, ProviderEntity, ) -from core.model_runtime.model_providers import model_provider_factory from core.model_runtime.model_providers.__base.ai_model import AIModel -from core.model_runtime.model_providers.__base.model_provider import ModelProvider +from core.model_runtime.model_providers.model_provider_factory import ModelProviderFactory from extensions.ext_database import db from models.provider import ( LoadBalancingModelConfig, @@ -100,7 +99,9 @@ def get_current_credentials(self, model_type: ModelType, model: str) -> Optional restrict_models = quota_configuration.restrict_models - copy_credentials = self.system_configuration.credentials.copy() + copy_credentials = ( + self.system_configuration.credentials.copy() if self.system_configuration.credentials else {} + ) if restrict_models: for restrict_model in restrict_models: if ( @@ -137,6 +138,9 @@ def get_system_configuration_status(self) -> SystemConfigurationStatus: (q for q in self.system_configuration.quota_configurations if q.quota_type == current_quota_type), None ) + if not current_quota_configuration: + return SystemConfigurationStatus.UNSUPPORTED + return ( SystemConfigurationStatus.ACTIVE if current_quota_configuration.is_valid @@ -172,7 +176,7 @@ def get_custom_credentials(self, obfuscated: bool = False) -> Optional[dict]: else [], ) - def custom_credentials_validate(self, credentials: dict) -> tuple[Provider, dict]: + def custom_credentials_validate(self, credentials: dict) -> tuple[Provider | None, dict]: """ Validate custom credentials. :param credentials: provider credentials @@ -216,6 +220,7 @@ def custom_credentials_validate(self, credentials: dict) -> tuple[Provider, dict if value == HIDDEN_VALUE and key in original_credentials: credentials[key] = encrypter.decrypt_token(self.tenant_id, original_credentials[key]) + model_provider_factory = ModelProviderFactory(self.tenant_id) credentials = model_provider_factory.provider_credentials_validate( provider=self.provider.provider, credentials=credentials ) @@ -243,13 +248,13 @@ def add_or_update_custom_credentials(self, credentials: dict) -> None: provider_record.updated_at = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None) db.session.commit() else: - provider_record = Provider( - tenant_id=self.tenant_id, - provider_name=self.provider.provider, - provider_type=ProviderType.CUSTOM.value, - encrypted_config=json.dumps(credentials), - is_valid=True, - ) + provider_record = Provider() + provider_record.tenant_id = self.tenant_id + provider_record.provider_name = self.provider.provider + provider_record.provider_type = ProviderType.CUSTOM.value + provider_record.encrypted_config = json.dumps(credentials) + provider_record.is_valid = True + db.session.add(provider_record) db.session.commit() @@ -324,7 +329,7 @@ def get_custom_model_credentials( def custom_model_credentials_validate( self, model_type: ModelType, model: str, credentials: dict - ) -> tuple[ProviderModel, dict]: + ) -> tuple[ProviderModel | None, dict]: """ Validate custom model credentials. @@ -367,6 +372,7 @@ def custom_model_credentials_validate( if value == HIDDEN_VALUE and key in original_credentials: credentials[key] = encrypter.decrypt_token(self.tenant_id, original_credentials[key]) + model_provider_factory = ModelProviderFactory(self.tenant_id) credentials = model_provider_factory.model_credentials_validate( provider=self.provider.provider, model_type=model_type, model=model, credentials=credentials ) @@ -397,14 +403,13 @@ def add_or_update_custom_model_credentials(self, model_type: ModelType, model: s provider_model_record.updated_at = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None) db.session.commit() else: - provider_model_record = ProviderModel( - tenant_id=self.tenant_id, - provider_name=self.provider.provider, - model_name=model, - model_type=model_type.to_origin_model_type(), - encrypted_config=json.dumps(credentials), - is_valid=True, - ) + provider_model_record = ProviderModel() + provider_model_record.tenant_id = self.tenant_id + provider_model_record.provider_name = self.provider.provider + provider_model_record.model_name = model + provider_model_record.model_type = model_type.to_origin_model_type() + provider_model_record.encrypted_config = json.dumps(credentials) + provider_model_record.is_valid = True db.session.add(provider_model_record) db.session.commit() @@ -471,13 +476,12 @@ def enable_model(self, model_type: ModelType, model: str) -> ProviderModelSettin model_setting.updated_at = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None) db.session.commit() else: - model_setting = ProviderModelSetting( - tenant_id=self.tenant_id, - provider_name=self.provider.provider, - model_type=model_type.to_origin_model_type(), - model_name=model, - enabled=True, - ) + model_setting = ProviderModelSetting() + model_setting.tenant_id = self.tenant_id + model_setting.provider_name = self.provider.provider + model_setting.model_type = model_type.to_origin_model_type() + model_setting.model_name = model + model_setting.enabled = True db.session.add(model_setting) db.session.commit() @@ -506,13 +510,12 @@ def disable_model(self, model_type: ModelType, model: str) -> ProviderModelSetti model_setting.updated_at = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None) db.session.commit() else: - model_setting = ProviderModelSetting( - tenant_id=self.tenant_id, - provider_name=self.provider.provider, - model_type=model_type.to_origin_model_type(), - model_name=model, - enabled=False, - ) + model_setting = ProviderModelSetting() + model_setting.tenant_id = self.tenant_id + model_setting.provider_name = self.provider.provider + model_setting.model_type = model_type.to_origin_model_type() + model_setting.model_name = model + model_setting.enabled = False db.session.add(model_setting) db.session.commit() @@ -573,13 +576,12 @@ def enable_model_load_balancing(self, model_type: ModelType, model: str) -> Prov model_setting.updated_at = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None) db.session.commit() else: - model_setting = ProviderModelSetting( - tenant_id=self.tenant_id, - provider_name=self.provider.provider, - model_type=model_type.to_origin_model_type(), - model_name=model, - load_balancing_enabled=True, - ) + model_setting = ProviderModelSetting() + model_setting.tenant_id = self.tenant_id + model_setting.provider_name = self.provider.provider + model_setting.model_type = model_type.to_origin_model_type() + model_setting.model_name = model + model_setting.load_balancing_enabled = True db.session.add(model_setting) db.session.commit() @@ -608,25 +610,17 @@ def disable_model_load_balancing(self, model_type: ModelType, model: str) -> Pro model_setting.updated_at = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None) db.session.commit() else: - model_setting = ProviderModelSetting( - tenant_id=self.tenant_id, - provider_name=self.provider.provider, - model_type=model_type.to_origin_model_type(), - model_name=model, - load_balancing_enabled=False, - ) + model_setting = ProviderModelSetting() + model_setting.tenant_id = self.tenant_id + model_setting.provider_name = self.provider.provider + model_setting.model_type = model_type.to_origin_model_type() + model_setting.model_name = model + model_setting.load_balancing_enabled = False db.session.add(model_setting) db.session.commit() return model_setting - def get_provider_instance(self) -> ModelProvider: - """ - Get provider instance. - :return: - """ - return model_provider_factory.get_provider_instance(self.provider.provider) - def get_model_type_instance(self, model_type: ModelType) -> AIModel: """ Get current model type instance. @@ -634,11 +628,19 @@ def get_model_type_instance(self, model_type: ModelType) -> AIModel: :param model_type: model type :return: """ - # Get provider instance - provider_instance = self.get_provider_instance() + model_provider_factory = ModelProviderFactory(self.tenant_id) # Get model instance of LLM - return provider_instance.get_model_instance(model_type) + return model_provider_factory.get_model_type_instance(provider=self.provider.provider, model_type=model_type) + + def get_model_schema(self, model_type: ModelType, model: str, credentials: dict) -> AIModelEntity | None: + """ + Get model schema + """ + model_provider_factory = ModelProviderFactory(self.tenant_id) + return model_provider_factory.get_model_schema( + provider=self.provider.provider, model_type=model_type, model=model, credentials=credentials + ) def switch_preferred_provider_type(self, provider_type: ProviderType) -> None: """ @@ -665,11 +667,10 @@ def switch_preferred_provider_type(self, provider_type: ProviderType) -> None: if preferred_model_provider: preferred_model_provider.preferred_provider_type = provider_type.value else: - preferred_model_provider = TenantPreferredModelProvider( - tenant_id=self.tenant_id, - provider_name=self.provider.provider, - preferred_provider_type=provider_type.value, - ) + preferred_model_provider = TenantPreferredModelProvider() + preferred_model_provider.tenant_id = self.tenant_id + preferred_model_provider.provider_name = self.provider.provider + preferred_model_provider.preferred_provider_type = provider_type.value db.session.add(preferred_model_provider) db.session.commit() @@ -734,13 +735,14 @@ def get_provider_models( :param only_active: only active models :return: """ - provider_instance = self.get_provider_instance() + model_provider_factory = ModelProviderFactory(self.tenant_id) + provider_schema = model_provider_factory.get_provider_schema(self.provider.provider) model_types = [] if model_type: model_types.append(model_type) else: - model_types = provider_instance.get_provider_schema().supported_model_types + model_types = provider_schema.supported_model_types # Group model settings by model type and model model_setting_map = defaultdict(dict) @@ -749,11 +751,11 @@ def get_provider_models( if self.using_provider_type == ProviderType.SYSTEM: provider_models = self._get_system_provider_models( - model_types=model_types, provider_instance=provider_instance, model_setting_map=model_setting_map + model_types=model_types, provider_schema=provider_schema, model_setting_map=model_setting_map ) else: provider_models = self._get_custom_provider_models( - model_types=model_types, provider_instance=provider_instance, model_setting_map=model_setting_map + model_types=model_types, provider_schema=provider_schema, model_setting_map=model_setting_map ) if only_active: @@ -764,23 +766,26 @@ def get_provider_models( def _get_system_provider_models( self, - model_types: list[ModelType], - provider_instance: ModelProvider, + model_types: Sequence[ModelType], + provider_schema: ProviderEntity, model_setting_map: dict[ModelType, dict[str, ModelSettings]], ) -> list[ModelWithProviderEntity]: """ Get system provider models. :param model_types: model types - :param provider_instance: provider instance + :param provider_schema: provider schema :param model_setting_map: model setting map :return: """ provider_models = [] for model_type in model_types: - for m in provider_instance.models(model_type): + for m in provider_schema.models: + if m.model_type != model_type: + continue + status = ModelStatus.ACTIVE - if m.model_type in model_setting_map and m.model in model_setting_map[m.model_type]: + if m.model in model_setting_map: model_setting = model_setting_map[m.model_type][m.model] if model_setting.enabled is False: status = ModelStatus.DISABLED @@ -801,7 +806,7 @@ def _get_system_provider_models( if self.provider.provider not in original_provider_configurate_methods: original_provider_configurate_methods[self.provider.provider] = [] - for configurate_method in provider_instance.get_provider_schema().configurate_methods: + for configurate_method in provider_schema.configurate_methods: original_provider_configurate_methods[self.provider.provider].append(configurate_method) should_use_custom_model = False @@ -822,14 +827,20 @@ def _get_system_provider_models( ]: # only customizable model for restrict_model in restrict_models: - copy_credentials = self.system_configuration.credentials.copy() + copy_credentials = ( + self.system_configuration.credentials.copy() + if self.system_configuration.credentials + else {} + ) if restrict_model.base_model_name: copy_credentials["base_model_name"] = restrict_model.base_model_name try: - custom_model_schema = provider_instance.get_model_instance( - restrict_model.model_type - ).get_customizable_model_schema_from_credentials(restrict_model.model, copy_credentials) + custom_model_schema = self.get_model_schema( + model_type=restrict_model.model_type, + model=restrict_model.model, + credentials=copy_credentials, + ) except Exception as ex: logger.warning(f"get custom model schema failed, {ex}") continue @@ -875,15 +886,15 @@ def _get_system_provider_models( def _get_custom_provider_models( self, - model_types: list[ModelType], - provider_instance: ModelProvider, + model_types: Sequence[ModelType], + provider_schema: ProviderEntity, model_setting_map: dict[ModelType, dict[str, ModelSettings]], ) -> list[ModelWithProviderEntity]: """ Get custom provider models. :param model_types: model types - :param provider_instance: provider instance + :param provider_schema: provider schema :param model_setting_map: model setting map :return: """ @@ -897,8 +908,10 @@ def _get_custom_provider_models( if model_type not in self.provider.supported_model_types: continue - models = provider_instance.models(model_type) - for m in models: + for m in provider_schema.models: + if m.model_type != model_type: + continue + status = ModelStatus.ACTIVE if credentials else ModelStatus.NO_CONFIGURE load_balancing_enabled = False if m.model_type in model_setting_map and m.model in model_setting_map[m.model_type]: @@ -930,10 +943,10 @@ def _get_custom_provider_models( continue try: - custom_model_schema = provider_instance.get_model_instance( - model_configuration.model_type - ).get_customizable_model_schema_from_credentials( - model_configuration.model, model_configuration.credentials + custom_model_schema = self.get_model_schema( + model_type=model_configuration.model_type, + model=model_configuration.model, + credentials=model_configuration.credentials, ) except Exception as ex: logger.warning(f"get custom model schema failed, {ex}") @@ -1043,7 +1056,7 @@ def __iter__(self): return iter(self.configurations) def values(self) -> Iterator[ProviderConfiguration]: - return self.configurations.values() + return iter(self.configurations.values()) def get(self, key, default=None): return self.configurations.get(key, default) @@ -1055,7 +1068,6 @@ class ProviderModelBundle(BaseModel): """ configuration: ProviderConfiguration - provider_instance: ModelProvider model_type_instance: AIModel # pydantic configs diff --git a/api/core/helper/moderation.py b/api/core/helper/moderation.py index b880590de28476..f3144039e31654 100644 --- a/api/core/helper/moderation.py +++ b/api/core/helper/moderation.py @@ -23,6 +23,9 @@ def check_moderation(model_config: ModelConfigWithCredentialsEntity, text: str) if using_provider_type == ProviderType.SYSTEM and provider_name in moderation_config.providers: hosting_openai_config = hosting_configuration.provider_map["openai"] + if hosting_openai_config.credentials is None: + return False + # 2000 text per chunk length = 2000 text_chunks = [text[i : i + length] for i in range(0, len(text), length)] diff --git a/api/core/model_runtime/entities/provider_entities.py b/api/core/model_runtime/entities/provider_entities.py index bfe861a97ffbf8..a6a7b67577757f 100644 --- a/api/core/model_runtime/entities/provider_entities.py +++ b/api/core/model_runtime/entities/provider_entities.py @@ -5,7 +5,7 @@ from pydantic import BaseModel, ConfigDict from core.model_runtime.entities.common_entities import I18nObject -from core.model_runtime.entities.model_entities import ModelType, ProviderModel +from core.model_runtime.entities.model_entities import AIModelEntity, ModelType class ConfigurateMethod(Enum): @@ -101,7 +101,7 @@ class SimpleProviderEntity(BaseModel): icon_small: Optional[I18nObject] = None icon_large: Optional[I18nObject] = None supported_model_types: Sequence[ModelType] - models: list[ProviderModel] = [] + models: list[AIModelEntity] = [] class ProviderHelpEntity(BaseModel): @@ -127,7 +127,7 @@ class ProviderEntity(BaseModel): help: Optional[ProviderHelpEntity] = None supported_model_types: Sequence[ModelType] configurate_methods: list[ConfigurateMethod] - models: list[ProviderModel] = [] + models: list[AIModelEntity] = [] provider_credential_schema: Optional[ProviderCredentialSchema] = None model_credential_schema: Optional[ModelCredentialSchema] = None diff --git a/api/core/model_runtime/model_providers/__base/ai_model.py b/api/core/model_runtime/model_providers/__base/ai_model.py index 79a1d28ebe637e..6b04ba2efd9ca3 100644 --- a/api/core/model_runtime/model_providers/__base/ai_model.py +++ b/api/core/model_runtime/model_providers/__base/ai_model.py @@ -1,10 +1,9 @@ import decimal import os -from abc import ABC, abstractmethod from collections.abc import Mapping from typing import Optional -from pydantic import ConfigDict +from pydantic import ConfigDict, Field from core.helper.position_helper import get_position_map, sort_by_position_map from core.model_runtime.entities.common_entities import I18nObject @@ -20,34 +19,26 @@ ) from core.model_runtime.errors.invoke import InvokeAuthorizationError, InvokeError from core.model_runtime.model_providers.__base.tokenizers.gpt2_tokenzier import GPT2Tokenizer +from core.plugin.entities.plugin_daemon import PluginModelProviderEntity from core.tools.utils.yaml_utils import load_yaml_file -class AIModel(ABC): +class AIModel: """ Base class for all models. """ - model_type: ModelType - model_schemas: Optional[list[AIModelEntity]] = None - started_at: float = 0 + tenant_id: str = Field(description="Tenant ID") + model_type: ModelType = Field(description="Model type") + plugin_id: str = Field(description="Plugin ID") + provider_name: str = Field(description="Provider") + plugin_model_provider: PluginModelProviderEntity = Field(description="Plugin model provider") + started_at: float = Field(description="Invoke start time", default=0) # pydantic configs model_config = ConfigDict(protected_namespaces=()) - @abstractmethod - def validate_credentials(self, model: str, credentials: Mapping) -> None: - """ - Validate model credentials - - :param model: model name - :param credentials: model credentials - :return: - """ - raise NotImplementedError - @property - @abstractmethod def _invoke_error_mapping(self) -> dict[type[InvokeError], list[type[Exception]]]: """ Map model invoke error to unified error @@ -66,20 +57,18 @@ def _transform_invoke_error(self, error: Exception) -> InvokeError: :param error: model invoke error :return: unified error """ - provider_name = self.__class__.__module__.split(".")[-3] - for invoke_error, model_errors in self._invoke_error_mapping.items(): if isinstance(error, tuple(model_errors)): if invoke_error == InvokeAuthorizationError: return invoke_error( description=( - f"[{provider_name}] Incorrect model credentials provided, please check and try again." + f"[{self.provider_name}] Incorrect model credentials provided, please check and try again." ) ) - return invoke_error(description=f"[{provider_name}] {invoke_error.description}, {str(error)}") + return invoke_error(description=f"[{self.provider_name}] {invoke_error.description}, {str(error)}") - return InvokeError(description=f"[{provider_name}] Error: {str(error)}") + return InvokeError(description=f"[{self.provider_name}] Error: {str(error)}") def get_price(self, model: str, credentials: dict, price_type: PriceType, tokens: int) -> PriceInfo: """ diff --git a/api/core/model_runtime/model_providers/__base/large_language_model.py b/api/core/model_runtime/model_providers/__base/large_language_model.py index ba88cc1f384fc5..33dbce37c4c9d2 100644 --- a/api/core/model_runtime/model_providers/__base/large_language_model.py +++ b/api/core/model_runtime/model_providers/__base/large_language_model.py @@ -1,32 +1,25 @@ import logging import os -import re import time -from abc import abstractmethod -from collections.abc import Generator, Mapping +from collections.abc import Generator from typing import Optional, Union from pydantic import ConfigDict from core.model_runtime.callbacks.base_callback import Callback from core.model_runtime.callbacks.logging_callback import LoggingCallback -from core.model_runtime.entities.llm_entities import LLMMode, LLMResult, LLMResultChunk, LLMResultChunkDelta, LLMUsage +from core.model_runtime.entities.llm_entities import LLMResult, LLMResultChunk, LLMUsage from core.model_runtime.entities.message_entities import ( AssistantPromptMessage, PromptMessage, - PromptMessageContentType, PromptMessageTool, - SystemPromptMessage, - UserPromptMessage, ) from core.model_runtime.entities.model_entities import ( - ModelPropertyKey, ModelType, - ParameterRule, - ParameterType, PriceType, ) from core.model_runtime.model_providers.__base.ai_model import AIModel +from core.plugin.manager.model import PluginModelManager logger = logging.getLogger(__name__) @@ -71,8 +64,6 @@ def invoke( if model_parameters is None: model_parameters = {} - model_parameters = self._validate_and_filter_model_parameters(model, model_parameters, credentials) - self.started_at = time.perf_counter() callbacks = callbacks or [] @@ -94,20 +85,43 @@ def invoke( ) try: - if "response_format" in model_parameters: - result = self._code_block_mode_wrapper( + plugin_model_manager = PluginModelManager() + result = plugin_model_manager.invoke_llm( + tenant_id=self.tenant_id, + user_id=user or "unknown", + plugin_id=self.plugin_id, + provider=self.provider_name, + model=model, + credentials=credentials, + model_parameters=model_parameters, + prompt_messages=prompt_messages, + tools=tools, + stop=stop, + stream=stream, + ) + + if not stream: + content = "" + content_list = [] + usage = LLMUsage.empty_usage() + system_fingerprint = None + for chunk in result: + if isinstance(chunk.delta.message.content, str): + content += chunk.delta.message.content + elif isinstance(chunk.delta.message.content, list): + content_list.extend(chunk.delta.message.content) + + usage = chunk.delta.usage or LLMUsage.empty_usage() + system_fingerprint = chunk.system_fingerprint + break + + result = LLMResult( model=model, - credentials=credentials, prompt_messages=prompt_messages, - model_parameters=model_parameters, - tools=tools, - stop=stop, - stream=stream, - user=user, - callbacks=callbacks, + message=AssistantPromptMessage(content=content or content_list), + usage=usage, + system_fingerprint=system_fingerprint, ) - else: - result = self._invoke(model, credentials, prompt_messages, model_parameters, tools, stop, stream, user) except Exception as e: self._trigger_invoke_error_callbacks( model=model, @@ -122,6 +136,7 @@ def invoke( callbacks=callbacks, ) + # TODO raise self._transform_invoke_error(e) if stream and isinstance(result, Generator): @@ -153,244 +168,6 @@ def invoke( return result - def _code_block_mode_wrapper( - self, - model: str, - credentials: dict, - prompt_messages: list[PromptMessage], - model_parameters: dict, - tools: Optional[list[PromptMessageTool]] = None, - stop: Optional[list[str]] = None, - stream: bool = True, - user: Optional[str] = None, - callbacks: Optional[list[Callback]] = None, - ) -> Union[LLMResult, Generator]: - """ - Code block mode wrapper, ensure the response is a code block with output markdown quote - - :param model: model name - :param credentials: model credentials - :param prompt_messages: prompt messages - :param model_parameters: model parameters - :param tools: tools for tool calling - :param stop: stop words - :param stream: is stream response - :param user: unique user id - :param callbacks: callbacks - :return: full response or stream response chunk generator result - """ - - block_prompts = """You should always follow the instructions and output a valid {{block}} object. -The structure of the {{block}} object you can found in the instructions, use {"answer": "$your_answer"} as the default structure -if you are not sure about the structure. - - -{{instructions}} - -""" # noqa: E501 - - code_block = model_parameters.get("response_format", "") - if not code_block: - return self._invoke( - model=model, - credentials=credentials, - prompt_messages=prompt_messages, - model_parameters=model_parameters, - tools=tools, - stop=stop, - stream=stream, - user=user, - ) - - model_parameters.pop("response_format") - stop = stop or [] - stop.extend(["\n```", "```\n"]) - block_prompts = block_prompts.replace("{{block}}", code_block) - - # check if there is a system message - if len(prompt_messages) > 0 and isinstance(prompt_messages[0], SystemPromptMessage): - # override the system message - prompt_messages[0] = SystemPromptMessage( - content=block_prompts.replace("{{instructions}}", str(prompt_messages[0].content)) - ) - else: - # insert the system message - prompt_messages.insert( - 0, - SystemPromptMessage( - content=block_prompts.replace("{{instructions}}", f"Please output a valid {code_block} object.") - ), - ) - - if len(prompt_messages) > 0 and isinstance(prompt_messages[-1], UserPromptMessage): - # add ```JSON\n to the last text message - if isinstance(prompt_messages[-1].content, str): - prompt_messages[-1].content += f"\n```{code_block}\n" - elif isinstance(prompt_messages[-1].content, list): - for i in range(len(prompt_messages[-1].content) - 1, -1, -1): - if prompt_messages[-1].content[i].type == PromptMessageContentType.TEXT: - prompt_messages[-1].content[i].data += f"\n```{code_block}\n" - break - else: - # append a user message - prompt_messages.append(UserPromptMessage(content=f"```{code_block}\n")) - - response = self._invoke( - model=model, - credentials=credentials, - prompt_messages=prompt_messages, - model_parameters=model_parameters, - tools=tools, - stop=stop, - stream=stream, - user=user, - ) - - if isinstance(response, Generator): - first_chunk = next(response) - - def new_generator(): - yield first_chunk - yield from response - - if first_chunk.delta.message.content and first_chunk.delta.message.content.startswith("`"): - return self._code_block_mode_stream_processor_with_backtick( - model=model, prompt_messages=prompt_messages, input_generator=new_generator() - ) - else: - return self._code_block_mode_stream_processor( - model=model, prompt_messages=prompt_messages, input_generator=new_generator() - ) - - return response - - def _code_block_mode_stream_processor( - self, model: str, prompt_messages: list[PromptMessage], input_generator: Generator[LLMResultChunk, None, None] - ) -> Generator[LLMResultChunk, None, None]: - """ - Code block mode stream processor, ensure the response is a code block with output markdown quote - - :param model: model name - :param prompt_messages: prompt messages - :param input_generator: input generator - :return: output generator - """ - state = "normal" - backtick_count = 0 - for piece in input_generator: - if piece.delta.message.content: - content = piece.delta.message.content - piece.delta.message.content = "" - yield piece - piece = content - else: - yield piece - continue - new_piece: str = "" - for char in piece: - char = str(char) - if state == "normal": - if char == "`": - state = "in_backticks" - backtick_count = 1 - else: - new_piece += char - elif state == "in_backticks": - if char == "`": - backtick_count += 1 - if backtick_count == 3: - state = "skip_content" - backtick_count = 0 - else: - new_piece += "`" * backtick_count + char - state = "normal" - backtick_count = 0 - elif state == "skip_content": - if char.isspace(): - state = "normal" - - if new_piece: - yield LLMResultChunk( - model=model, - prompt_messages=prompt_messages, - delta=LLMResultChunkDelta( - index=0, - message=AssistantPromptMessage(content=new_piece, tool_calls=[]), - ), - ) - - def _code_block_mode_stream_processor_with_backtick( - self, model: str, prompt_messages: list, input_generator: Generator[LLMResultChunk, None, None] - ) -> Generator[LLMResultChunk, None, None]: - """ - Code block mode stream processor, ensure the response is a code block with output markdown quote. - This version skips the language identifier that follows the opening triple backticks. - - :param model: model name - :param prompt_messages: prompt messages - :param input_generator: input generator - :return: output generator - """ - state = "search_start" - backtick_count = 0 - - for piece in input_generator: - if piece.delta.message.content: - content = piece.delta.message.content - # Reset content to ensure we're only processing and yielding the relevant parts - piece.delta.message.content = "" - # Yield a piece with cleared content before processing it to maintain the generator structure - yield piece - piece = content - else: - # Yield pieces without content directly - yield piece - continue - - if state == "done": - continue - - new_piece: str = "" - for char in piece: - if state == "search_start": - if char == "`": - backtick_count += 1 - if backtick_count == 3: - state = "skip_language" - backtick_count = 0 - else: - backtick_count = 0 - elif state == "skip_language": - # Skip everything until the first newline, marking the end of the language identifier - if char == "\n": - state = "in_code_block" - elif state == "in_code_block": - if char == "`": - backtick_count += 1 - if backtick_count == 3: - state = "done" - break - else: - if backtick_count > 0: - # If backticks were counted but we're still collecting content, it was a false start - new_piece += "`" * backtick_count - backtick_count = 0 - new_piece += str(char) - - elif state == "done": - break - - if new_piece: - # Only yield content collected within the code block - yield LLMResultChunk( - model=model, - prompt_messages=prompt_messages, - delta=LLMResultChunkDelta( - index=0, - message=AssistantPromptMessage(content=new_piece, tool_calls=[]), - ), - ) - def _invoke_result_generator( self, model: str, @@ -462,34 +239,6 @@ def _invoke_result_generator( callbacks=callbacks, ) - @abstractmethod - def _invoke( - self, - model: str, - credentials: dict, - prompt_messages: list[PromptMessage], - model_parameters: dict, - tools: Optional[list[PromptMessageTool]] = None, - stop: Optional[list[str]] = None, - stream: bool = True, - user: Optional[str] = None, - ) -> Union[LLMResult, Generator]: - """ - Invoke large language model - - :param model: model name - :param credentials: model credentials - :param prompt_messages: prompt messages - :param model_parameters: model parameters - :param tools: tools for tool calling - :param stop: stop words - :param stream: is stream response - :param user: unique user id - :return: full response or stream response chunk generator result - """ - raise NotImplementedError - - @abstractmethod def get_num_tokens( self, model: str, @@ -506,41 +255,18 @@ def get_num_tokens( :param tools: tools for tool calling :return: """ - raise NotImplementedError - - def enforce_stop_tokens(self, text: str, stop: list[str]) -> str: - """Cut off the text as soon as any stop words occur.""" - return re.split("|".join(stop), text, maxsplit=1)[0] - - def get_parameter_rules(self, model: str, credentials: dict) -> list[ParameterRule]: - """ - Get parameter rules - - :param model: model name - :param credentials: model credentials - :return: parameter rules - """ - model_schema = self.get_model_schema(model, credentials) - if model_schema: - return model_schema.parameter_rules - - return [] - - def get_model_mode(self, model: str, credentials: Optional[Mapping] = None) -> LLMMode: - """ - Get model mode - - :param model: model name - :param credentials: model credentials - :return: model mode - """ - model_schema = self.get_model_schema(model, credentials) - - mode = LLMMode.CHAT - if model_schema and model_schema.model_properties.get(ModelPropertyKey.MODE): - mode = LLMMode.value_of(model_schema.model_properties[ModelPropertyKey.MODE]) - - return mode + plugin_model_manager = PluginModelManager() + return plugin_model_manager.get_llm_num_tokens( + tenant_id=self.tenant_id, + user_id="unknown", + plugin_id=self.plugin_id, + provider=self.provider_name, + model_type=self.model_type.value, + model=model, + credentials=credentials, + prompt_messages=prompt_messages, + tools=tools, + ) def _calc_response_usage( self, model: str, credentials: dict, prompt_tokens: int, completion_tokens: int @@ -772,98 +498,3 @@ def _trigger_invoke_error_callbacks( raise e else: logger.warning(f"Callback {callback.__class__.__name__} on_invoke_error failed with error {e}") - - def _validate_and_filter_model_parameters(self, model: str, model_parameters: dict, credentials: dict) -> dict: - """ - Validate model parameters - - :param model: model name - :param model_parameters: model parameters - :param credentials: model credentials - :return: - """ - parameter_rules = self.get_parameter_rules(model, credentials) - - # validate model parameters - filtered_model_parameters = {} - for parameter_rule in parameter_rules: - parameter_name = parameter_rule.name - parameter_value = model_parameters.get(parameter_name) - if parameter_value is None: - if parameter_rule.use_template and parameter_rule.use_template in model_parameters: - # if parameter value is None, use template value variable name instead - parameter_value = model_parameters[parameter_rule.use_template] - else: - if parameter_rule.required: - if parameter_rule.default is not None: - filtered_model_parameters[parameter_name] = parameter_rule.default - continue - else: - raise ValueError(f"Model Parameter {parameter_name} is required.") - else: - continue - - # validate parameter value type - if parameter_rule.type == ParameterType.INT: - if not isinstance(parameter_value, int): - raise ValueError(f"Model Parameter {parameter_name} should be int.") - - # validate parameter value range - if parameter_rule.min is not None and parameter_value < parameter_rule.min: - raise ValueError( - f"Model Parameter {parameter_name} should be greater than or equal to {parameter_rule.min}." - ) - - if parameter_rule.max is not None and parameter_value > parameter_rule.max: - raise ValueError( - f"Model Parameter {parameter_name} should be less than or equal to {parameter_rule.max}." - ) - elif parameter_rule.type == ParameterType.FLOAT: - if not isinstance(parameter_value, float | int): - raise ValueError(f"Model Parameter {parameter_name} should be float.") - - # validate parameter value precision - if parameter_rule.precision is not None: - if parameter_rule.precision == 0: - if parameter_value != int(parameter_value): - raise ValueError(f"Model Parameter {parameter_name} should be int.") - else: - if parameter_value != round(parameter_value, parameter_rule.precision): - raise ValueError( - f"Model Parameter {parameter_name} should be round to {parameter_rule.precision}" - f" decimal places." - ) - - # validate parameter value range - if parameter_rule.min is not None and parameter_value < parameter_rule.min: - raise ValueError( - f"Model Parameter {parameter_name} should be greater than or equal to {parameter_rule.min}." - ) - - if parameter_rule.max is not None and parameter_value > parameter_rule.max: - raise ValueError( - f"Model Parameter {parameter_name} should be less than or equal to {parameter_rule.max}." - ) - elif parameter_rule.type == ParameterType.BOOLEAN: - if not isinstance(parameter_value, bool): - raise ValueError(f"Model Parameter {parameter_name} should be bool.") - elif parameter_rule.type == ParameterType.STRING: - if not isinstance(parameter_value, str): - raise ValueError(f"Model Parameter {parameter_name} should be string.") - - # validate options - if parameter_rule.options and parameter_value not in parameter_rule.options: - raise ValueError(f"Model Parameter {parameter_name} should be one of {parameter_rule.options}.") - elif parameter_rule.type == ParameterType.TEXT: - if not isinstance(parameter_value, str): - raise ValueError(f"Model Parameter {parameter_name} should be text.") - - # validate options - if parameter_rule.options and parameter_value not in parameter_rule.options: - raise ValueError(f"Model Parameter {parameter_name} should be one of {parameter_rule.options}.") - else: - raise ValueError(f"Model Parameter {parameter_name} type {parameter_rule.type} is not supported.") - - filtered_model_parameters[parameter_name] = parameter_value - - return filtered_model_parameters diff --git a/api/core/model_runtime/model_providers/__base/text_embedding_model.py b/api/core/model_runtime/model_providers/__base/text_embedding_model.py index a948dca20d69a4..1a5c40ed516da1 100644 --- a/api/core/model_runtime/model_providers/__base/text_embedding_model.py +++ b/api/core/model_runtime/model_providers/__base/text_embedding_model.py @@ -8,6 +8,7 @@ from core.model_runtime.entities.model_entities import ModelPropertyKey, ModelType from core.model_runtime.entities.text_embedding_entities import TextEmbeddingResult from core.model_runtime.model_providers.__base.ai_model import AIModel +from core.plugin.manager.model import PluginModelManager class TextEmbeddingModel(AIModel): @@ -66,7 +67,6 @@ def _invoke( """ raise NotImplementedError - @abstractmethod def get_num_tokens(self, model: str, credentials: dict, texts: list[str]) -> int: """ Get number of tokens for given prompt messages @@ -76,7 +76,17 @@ def get_num_tokens(self, model: str, credentials: dict, texts: list[str]) -> int :param texts: texts to embed :return: """ - raise NotImplementedError + plugin_model_manager = PluginModelManager() + return plugin_model_manager.get_text_embedding_num_tokens( + tenant_id=self.tenant_id, + user_id="unknown", + plugin_id=self.plugin_id, + provider=self.provider_name, + model_type=self.model_type.value, + model=model, + credentials=credentials, + texts=texts, + ) def _get_context_size(self, model: str, credentials: dict) -> int: """ diff --git a/api/core/model_runtime/model_providers/__init__.py b/api/core/model_runtime/model_providers/__init__.py index 9d71013dbfa151..e69de29bb2d1d6 100644 --- a/api/core/model_runtime/model_providers/__init__.py +++ b/api/core/model_runtime/model_providers/__init__.py @@ -1,3 +0,0 @@ -from core.model_runtime.model_providers.model_provider_factory import ModelProviderFactory - -model_provider_factory = ModelProviderFactory() diff --git a/api/core/model_runtime/model_providers/anthropic/__init__.py b/api/core/model_runtime/model_providers/anthropic/__init__.py deleted file mode 100644 index e69de29bb2d1d6..00000000000000 diff --git a/api/core/model_runtime/model_providers/anthropic/_assets/icon_l_en.svg b/api/core/model_runtime/model_providers/anthropic/_assets/icon_l_en.svg deleted file mode 100644 index cace17da73bd0a..00000000000000 --- a/api/core/model_runtime/model_providers/anthropic/_assets/icon_l_en.svg +++ /dev/null @@ -1,78 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/api/core/model_runtime/model_providers/anthropic/_assets/icon_s_en.svg b/api/core/model_runtime/model_providers/anthropic/_assets/icon_s_en.svg deleted file mode 100644 index d852f04401fd94..00000000000000 --- a/api/core/model_runtime/model_providers/anthropic/_assets/icon_s_en.svg +++ /dev/null @@ -1,4 +0,0 @@ - - - - diff --git a/api/core/model_runtime/model_providers/anthropic/anthropic.py b/api/core/model_runtime/model_providers/anthropic/anthropic.py deleted file mode 100644 index 5b12f04a3e59b8..00000000000000 --- a/api/core/model_runtime/model_providers/anthropic/anthropic.py +++ /dev/null @@ -1,28 +0,0 @@ -import logging - -from core.model_runtime.entities.model_entities import ModelType -from core.model_runtime.errors.validate import CredentialsValidateFailedError -from core.model_runtime.model_providers.__base.model_provider import ModelProvider - -logger = logging.getLogger(__name__) - - -class AnthropicProvider(ModelProvider): - def validate_provider_credentials(self, credentials: dict) -> None: - """ - Validate provider credentials - - if validate failed, raise exception - - :param credentials: provider credentials, credentials form defined in `provider_credential_schema`. - """ - try: - model_instance = self.get_model_instance(ModelType.LLM) - - # Use `claude-3-opus-20240229` model for validate, - model_instance.validate_credentials(model="claude-3-opus-20240229", credentials=credentials) - except CredentialsValidateFailedError as ex: - raise ex - except Exception as ex: - logger.exception(f"{self.get_provider_schema().provider} credentials validate failed") - raise ex diff --git a/api/core/model_runtime/model_providers/anthropic/anthropic.yaml b/api/core/model_runtime/model_providers/anthropic/anthropic.yaml deleted file mode 100644 index cf41f544ef4aa3..00000000000000 --- a/api/core/model_runtime/model_providers/anthropic/anthropic.yaml +++ /dev/null @@ -1,39 +0,0 @@ -provider: anthropic -label: - en_US: Anthropic -description: - en_US: Anthropic’s powerful models, such as Claude 3. - zh_Hans: Anthropic 的强大模型,例如 Claude 3。 -icon_small: - en_US: icon_s_en.svg -icon_large: - en_US: icon_l_en.svg -background: "#F0F0EB" -help: - title: - en_US: Get your API Key from Anthropic - zh_Hans: 从 Anthropic 获取 API Key - url: - en_US: https://console.anthropic.com/account/keys -supported_model_types: - - llm -configurate_methods: - - predefined-model -provider_credential_schema: - credential_form_schemas: - - variable: anthropic_api_key - label: - en_US: API Key - type: secret-input - required: true - placeholder: - zh_Hans: 在此输入您的 API Key - en_US: Enter your API Key - - variable: anthropic_api_url - label: - en_US: API URL - type: text-input - required: false - placeholder: - zh_Hans: 在此输入您的 API URL - en_US: Enter your API URL diff --git a/api/core/model_runtime/model_providers/anthropic/llm/__init__.py b/api/core/model_runtime/model_providers/anthropic/llm/__init__.py deleted file mode 100644 index e69de29bb2d1d6..00000000000000 diff --git a/api/core/model_runtime/model_providers/anthropic/llm/_position.yaml b/api/core/model_runtime/model_providers/anthropic/llm/_position.yaml deleted file mode 100644 index 8394c4276a786e..00000000000000 --- a/api/core/model_runtime/model_providers/anthropic/llm/_position.yaml +++ /dev/null @@ -1,8 +0,0 @@ -- claude-3-5-sonnet-20240620 -- claude-3-haiku-20240307 -- claude-3-opus-20240229 -- claude-3-sonnet-20240229 -- claude-2.1 -- claude-instant-1.2 -- claude-2 -- claude-instant-1 diff --git a/api/core/model_runtime/model_providers/anthropic/llm/claude-2.1.yaml b/api/core/model_runtime/model_providers/anthropic/llm/claude-2.1.yaml deleted file mode 100644 index 6707c3459402f3..00000000000000 --- a/api/core/model_runtime/model_providers/anthropic/llm/claude-2.1.yaml +++ /dev/null @@ -1,36 +0,0 @@ -model: claude-2.1 -label: - en_US: claude-2.1 -model_type: llm -features: - - agent-thought -model_properties: - mode: chat - context_size: 200000 -parameter_rules: - - name: temperature - use_template: temperature - - name: top_p - use_template: top_p - - name: top_k - label: - zh_Hans: 取样数量 - en_US: Top k - type: int - help: - zh_Hans: 仅从每个后续标记的前 K 个选项中采样。 - en_US: Only sample from the top K options for each subsequent token. - required: false - - name: max_tokens_to_sample - use_template: max_tokens - required: true - default: 4096 - min: 1 - max: 4096 - - name: response_format - use_template: response_format -pricing: - input: '8.00' - output: '24.00' - unit: '0.000001' - currency: USD diff --git a/api/core/model_runtime/model_providers/anthropic/llm/claude-2.yaml b/api/core/model_runtime/model_providers/anthropic/llm/claude-2.yaml deleted file mode 100644 index 1986947129462f..00000000000000 --- a/api/core/model_runtime/model_providers/anthropic/llm/claude-2.yaml +++ /dev/null @@ -1,37 +0,0 @@ -model: claude-2 -label: - en_US: claude-2 -model_type: llm -features: - - agent-thought -model_properties: - mode: chat - context_size: 100000 -parameter_rules: - - name: temperature - use_template: temperature - - name: top_p - use_template: top_p - - name: top_k - label: - zh_Hans: 取样数量 - en_US: Top k - type: int - help: - zh_Hans: 仅从每个后续标记的前 K 个选项中采样。 - en_US: Only sample from the top K options for each subsequent token. - required: false - - name: max_tokens_to_sample - use_template: max_tokens - required: true - default: 4096 - min: 1 - max: 4096 - - name: response_format - use_template: response_format -pricing: - input: '8.00' - output: '24.00' - unit: '0.000001' - currency: USD -deprecated: true diff --git a/api/core/model_runtime/model_providers/anthropic/llm/claude-3-5-sonnet-20240620.yaml b/api/core/model_runtime/model_providers/anthropic/llm/claude-3-5-sonnet-20240620.yaml deleted file mode 100644 index e02c5517fe1f3c..00000000000000 --- a/api/core/model_runtime/model_providers/anthropic/llm/claude-3-5-sonnet-20240620.yaml +++ /dev/null @@ -1,39 +0,0 @@ -model: claude-3-5-sonnet-20240620 -label: - en_US: claude-3-5-sonnet-20240620 -model_type: llm -features: - - agent-thought - - vision - - tool-call - - stream-tool-call -model_properties: - mode: chat - context_size: 200000 -parameter_rules: - - name: temperature - use_template: temperature - - name: top_p - use_template: top_p - - name: top_k - label: - zh_Hans: 取样数量 - en_US: Top k - type: int - help: - zh_Hans: 仅从每个后续标记的前 K 个选项中采样。 - en_US: Only sample from the top K options for each subsequent token. - required: false - - name: max_tokens - use_template: max_tokens - required: true - default: 8192 - min: 1 - max: 8192 - - name: response_format - use_template: response_format -pricing: - input: '3.00' - output: '15.00' - unit: '0.000001' - currency: USD diff --git a/api/core/model_runtime/model_providers/anthropic/llm/claude-3-haiku-20240307.yaml b/api/core/model_runtime/model_providers/anthropic/llm/claude-3-haiku-20240307.yaml deleted file mode 100644 index cb2af1308a0791..00000000000000 --- a/api/core/model_runtime/model_providers/anthropic/llm/claude-3-haiku-20240307.yaml +++ /dev/null @@ -1,39 +0,0 @@ -model: claude-3-haiku-20240307 -label: - en_US: claude-3-haiku-20240307 -model_type: llm -features: - - agent-thought - - vision - - tool-call - - stream-tool-call -model_properties: - mode: chat - context_size: 200000 -parameter_rules: - - name: temperature - use_template: temperature - - name: top_p - use_template: top_p - - name: top_k - label: - zh_Hans: 取样数量 - en_US: Top k - type: int - help: - zh_Hans: 仅从每个后续标记的前 K 个选项中采样。 - en_US: Only sample from the top K options for each subsequent token. - required: false - - name: max_tokens - use_template: max_tokens - required: true - default: 4096 - min: 1 - max: 4096 - - name: response_format - use_template: response_format -pricing: - input: '0.25' - output: '1.25' - unit: '0.000001' - currency: USD diff --git a/api/core/model_runtime/model_providers/anthropic/llm/claude-3-opus-20240229.yaml b/api/core/model_runtime/model_providers/anthropic/llm/claude-3-opus-20240229.yaml deleted file mode 100644 index 101f54c3f8ad6a..00000000000000 --- a/api/core/model_runtime/model_providers/anthropic/llm/claude-3-opus-20240229.yaml +++ /dev/null @@ -1,39 +0,0 @@ -model: claude-3-opus-20240229 -label: - en_US: claude-3-opus-20240229 -model_type: llm -features: - - agent-thought - - vision - - tool-call - - stream-tool-call -model_properties: - mode: chat - context_size: 200000 -parameter_rules: - - name: temperature - use_template: temperature - - name: top_p - use_template: top_p - - name: top_k - label: - zh_Hans: 取样数量 - en_US: Top k - type: int - help: - zh_Hans: 仅从每个后续标记的前 K 个选项中采样。 - en_US: Only sample from the top K options for each subsequent token. - required: false - - name: max_tokens - use_template: max_tokens - required: true - default: 4096 - min: 1 - max: 4096 - - name: response_format - use_template: response_format -pricing: - input: '15.00' - output: '75.00' - unit: '0.000001' - currency: USD diff --git a/api/core/model_runtime/model_providers/anthropic/llm/claude-3-sonnet-20240229.yaml b/api/core/model_runtime/model_providers/anthropic/llm/claude-3-sonnet-20240229.yaml deleted file mode 100644 index daf55553f8bea5..00000000000000 --- a/api/core/model_runtime/model_providers/anthropic/llm/claude-3-sonnet-20240229.yaml +++ /dev/null @@ -1,39 +0,0 @@ -model: claude-3-sonnet-20240229 -label: - en_US: claude-3-sonnet-20240229 -model_type: llm -features: - - agent-thought - - vision - - tool-call - - stream-tool-call -model_properties: - mode: chat - context_size: 200000 -parameter_rules: - - name: temperature - use_template: temperature - - name: top_p - use_template: top_p - - name: top_k - label: - zh_Hans: 取样数量 - en_US: Top k - type: int - help: - zh_Hans: 仅从每个后续标记的前 K 个选项中采样。 - en_US: Only sample from the top K options for each subsequent token. - required: false - - name: max_tokens - use_template: max_tokens - required: true - default: 4096 - min: 1 - max: 4096 - - name: response_format - use_template: response_format -pricing: - input: '3.00' - output: '15.00' - unit: '0.000001' - currency: USD diff --git a/api/core/model_runtime/model_providers/anthropic/llm/claude-instant-1.2.yaml b/api/core/model_runtime/model_providers/anthropic/llm/claude-instant-1.2.yaml deleted file mode 100644 index ac69bbf4d293d3..00000000000000 --- a/api/core/model_runtime/model_providers/anthropic/llm/claude-instant-1.2.yaml +++ /dev/null @@ -1,36 +0,0 @@ -model: claude-instant-1.2 -label: - en_US: claude-instant-1.2 -model_type: llm -features: [ ] -model_properties: - mode: chat - context_size: 100000 -parameter_rules: - - name: temperature - use_template: temperature - - name: top_p - use_template: top_p - - name: top_k - label: - zh_Hans: 取样数量 - en_US: Top k - type: int - help: - zh_Hans: 仅从每个后续标记的前 K 个选项中采样。 - en_US: Only sample from the top K options for each subsequent token. - required: false - - name: max_tokens - use_template: max_tokens - required: true - default: 4096 - min: 1 - max: 4096 - - name: response_format - use_template: response_format -pricing: - input: '1.63' - output: '5.51' - unit: '0.000001' - currency: USD -deprecated: true diff --git a/api/core/model_runtime/model_providers/anthropic/llm/claude-instant-1.yaml b/api/core/model_runtime/model_providers/anthropic/llm/claude-instant-1.yaml deleted file mode 100644 index 5e76d5b1c2e74b..00000000000000 --- a/api/core/model_runtime/model_providers/anthropic/llm/claude-instant-1.yaml +++ /dev/null @@ -1,36 +0,0 @@ -model: claude-instant-1 -label: - en_US: claude-instant-1 -model_type: llm -features: [ ] -model_properties: - mode: chat - context_size: 100000 -parameter_rules: - - name: temperature - use_template: temperature - - name: top_p - use_template: top_p - - name: top_k - label: - zh_Hans: 取样数量 - en_US: Top k - type: int - help: - zh_Hans: 仅从每个后续标记的前 K 个选项中采样。 - en_US: Only sample from the top K options for each subsequent token. - required: false - - name: max_tokens_to_sample - use_template: max_tokens - required: true - default: 4096 - min: 1 - max: 4096 - - name: response_format - use_template: response_format -pricing: - input: '1.63' - output: '5.51' - unit: '0.000001' - currency: USD -deprecated: true diff --git a/api/core/model_runtime/model_providers/anthropic/llm/llm.py b/api/core/model_runtime/model_providers/anthropic/llm/llm.py deleted file mode 100644 index 46e1b415b81253..00000000000000 --- a/api/core/model_runtime/model_providers/anthropic/llm/llm.py +++ /dev/null @@ -1,624 +0,0 @@ -import base64 -import io -import json -from collections.abc import Generator -from typing import Optional, Union, cast - -import anthropic -import requests -from anthropic import Anthropic, Stream -from anthropic.types import ( - ContentBlockDeltaEvent, - Message, - MessageDeltaEvent, - MessageStartEvent, - MessageStopEvent, - MessageStreamEvent, - completion_create_params, -) -from anthropic.types.beta.tools import ToolsBetaMessage -from httpx import Timeout -from PIL import Image - -from core.model_runtime.callbacks.base_callback import Callback -from core.model_runtime.entities.llm_entities import LLMResult, LLMResultChunk, LLMResultChunkDelta -from core.model_runtime.entities.message_entities import ( - AssistantPromptMessage, - ImagePromptMessageContent, - PromptMessage, - PromptMessageContentType, - PromptMessageTool, - SystemPromptMessage, - TextPromptMessageContent, - ToolPromptMessage, - UserPromptMessage, -) -from core.model_runtime.errors.invoke import ( - InvokeAuthorizationError, - InvokeBadRequestError, - InvokeConnectionError, - InvokeError, - InvokeRateLimitError, - InvokeServerUnavailableError, -) -from core.model_runtime.errors.validate import CredentialsValidateFailedError -from core.model_runtime.model_providers.__base.large_language_model import LargeLanguageModel - -ANTHROPIC_BLOCK_MODE_PROMPT = """You should always follow the instructions and output a valid {{block}} object. -The structure of the {{block}} object you can found in the instructions, use {"answer": "$your_answer"} as the default structure -if you are not sure about the structure. - - -{{instructions}} - -""" # noqa: E501 - - -class AnthropicLargeLanguageModel(LargeLanguageModel): - def _invoke( - self, - model: str, - credentials: dict, - prompt_messages: list[PromptMessage], - model_parameters: dict, - tools: Optional[list[PromptMessageTool]] = None, - stop: Optional[list[str]] = None, - stream: bool = True, - user: Optional[str] = None, - ) -> Union[LLMResult, Generator]: - """ - Invoke large language model - - :param model: model name - :param credentials: model credentials - :param prompt_messages: prompt messages - :param model_parameters: model parameters - :param tools: tools for tool calling - :param stop: stop words - :param stream: is stream response - :param user: unique user id - :return: full response or stream response chunk generator result - """ - # invoke model - return self._chat_generate(model, credentials, prompt_messages, model_parameters, tools, stop, stream, user) - - def _chat_generate( - self, - model: str, - credentials: dict, - prompt_messages: list[PromptMessage], - model_parameters: dict, - tools: Optional[list[PromptMessageTool]] = None, - stop: Optional[list[str]] = None, - stream: bool = True, - user: Optional[str] = None, - ) -> Union[LLMResult, Generator]: - """ - Invoke llm chat model - - :param model: model name - :param credentials: credentials - :param prompt_messages: prompt messages - :param model_parameters: model parameters - :param stop: stop words - :param stream: is stream response - :param user: unique user id - :return: full response or stream response chunk generator result - """ - # transform credentials to kwargs for model instance - credentials_kwargs = self._to_credential_kwargs(credentials) - - # transform model parameters from completion api of anthropic to chat api - if "max_tokens_to_sample" in model_parameters: - model_parameters["max_tokens"] = model_parameters.pop("max_tokens_to_sample") - - # init model client - client = Anthropic(**credentials_kwargs) - - extra_model_kwargs = {} - if stop: - extra_model_kwargs["stop_sequences"] = stop - - if user: - extra_model_kwargs["metadata"] = completion_create_params.Metadata(user_id=user) - - system, prompt_message_dicts = self._convert_prompt_messages(prompt_messages) - - if system: - extra_model_kwargs["system"] = system - - # Add the new header for claude-3-5-sonnet-20240620 model - extra_headers = {} - if model == "claude-3-5-sonnet-20240620": - if model_parameters.get("max_tokens") > 4096: - extra_headers["anthropic-beta"] = "max-tokens-3-5-sonnet-2024-07-15" - - if tools: - extra_model_kwargs["tools"] = [self._transform_tool_prompt(tool) for tool in tools] - response = client.beta.tools.messages.create( - model=model, - messages=prompt_message_dicts, - stream=stream, - extra_headers=extra_headers, - **model_parameters, - **extra_model_kwargs, - ) - else: - # chat model - response = client.messages.create( - model=model, - messages=prompt_message_dicts, - stream=stream, - extra_headers=extra_headers, - **model_parameters, - **extra_model_kwargs, - ) - - if stream: - return self._handle_chat_generate_stream_response(model, credentials, response, prompt_messages) - - return self._handle_chat_generate_response(model, credentials, response, prompt_messages) - - def _code_block_mode_wrapper( - self, - model: str, - credentials: dict, - prompt_messages: list[PromptMessage], - model_parameters: dict, - tools: Optional[list[PromptMessageTool]] = None, - stop: Optional[list[str]] = None, - stream: bool = True, - user: Optional[str] = None, - callbacks: list[Callback] = None, - ) -> Union[LLMResult, Generator]: - """ - Code block mode wrapper for invoking large language model - """ - if model_parameters.get("response_format"): - stop = stop or [] - # chat model - self._transform_chat_json_prompts( - model=model, - credentials=credentials, - prompt_messages=prompt_messages, - model_parameters=model_parameters, - tools=tools, - stop=stop, - stream=stream, - user=user, - response_format=model_parameters["response_format"], - ) - model_parameters.pop("response_format") - - return self._invoke(model, credentials, prompt_messages, model_parameters, tools, stop, stream, user) - - def _transform_tool_prompt(self, tool: PromptMessageTool) -> dict: - return {"name": tool.name, "description": tool.description, "input_schema": tool.parameters} - - def _transform_chat_json_prompts( - self, - model: str, - credentials: dict, - prompt_messages: list[PromptMessage], - model_parameters: dict, - tools: list[PromptMessageTool] | None = None, - stop: list[str] | None = None, - stream: bool = True, - user: str | None = None, - response_format: str = "JSON", - ) -> None: - """ - Transform json prompts - """ - if "```\n" not in stop: - stop.append("```\n") - if "\n```" not in stop: - stop.append("\n```") - - # check if there is a system message - if len(prompt_messages) > 0 and isinstance(prompt_messages[0], SystemPromptMessage): - # override the system message - prompt_messages[0] = SystemPromptMessage( - content=ANTHROPIC_BLOCK_MODE_PROMPT.replace("{{instructions}}", prompt_messages[0].content).replace( - "{{block}}", response_format - ) - ) - prompt_messages.append(AssistantPromptMessage(content=f"\n```{response_format}")) - else: - # insert the system message - prompt_messages.insert( - 0, - SystemPromptMessage( - content=ANTHROPIC_BLOCK_MODE_PROMPT.replace( - "{{instructions}}", f"Please output a valid {response_format} object." - ).replace("{{block}}", response_format) - ), - ) - prompt_messages.append(AssistantPromptMessage(content=f"\n```{response_format}")) - - def get_num_tokens( - self, - model: str, - credentials: dict, - prompt_messages: list[PromptMessage], - tools: Optional[list[PromptMessageTool]] = None, - ) -> int: - """ - Get number of tokens for given prompt messages - - :param model: model name - :param credentials: model credentials - :param prompt_messages: prompt messages - :param tools: tools for tool calling - :return: - """ - prompt = self._convert_messages_to_prompt_anthropic(prompt_messages) - - client = Anthropic(api_key="") - tokens = client.count_tokens(prompt) - - tool_call_inner_prompts_tokens_map = { - "claude-3-opus-20240229": 395, - "claude-3-haiku-20240307": 264, - "claude-3-sonnet-20240229": 159, - } - - if model in tool_call_inner_prompts_tokens_map and tools: - tokens += tool_call_inner_prompts_tokens_map[model] - - return tokens - - def validate_credentials(self, model: str, credentials: dict) -> None: - """ - Validate model credentials - - :param model: model name - :param credentials: model credentials - :return: - """ - try: - self._chat_generate( - model=model, - credentials=credentials, - prompt_messages=[ - UserPromptMessage(content="ping"), - ], - model_parameters={ - "temperature": 0, - "max_tokens": 20, - }, - stream=False, - ) - except Exception as ex: - raise CredentialsValidateFailedError(str(ex)) - - def _handle_chat_generate_response( - self, - model: str, - credentials: dict, - response: Union[Message, ToolsBetaMessage], - prompt_messages: list[PromptMessage], - ) -> LLMResult: - """ - Handle llm chat response - - :param model: model name - :param credentials: credentials - :param response: response - :param prompt_messages: prompt messages - :return: llm response - """ - # transform assistant message to prompt message - assistant_prompt_message = AssistantPromptMessage(content="", tool_calls=[]) - - for content in response.content: - if content.type == "text": - assistant_prompt_message.content += content.text - elif content.type == "tool_use": - tool_call = AssistantPromptMessage.ToolCall( - id=content.id, - type="function", - function=AssistantPromptMessage.ToolCall.ToolCallFunction( - name=content.name, arguments=json.dumps(content.input) - ), - ) - assistant_prompt_message.tool_calls.append(tool_call) - - # calculate num tokens - if response.usage: - # transform usage - prompt_tokens = response.usage.input_tokens - completion_tokens = response.usage.output_tokens - else: - # calculate num tokens - prompt_tokens = self.get_num_tokens(model, credentials, prompt_messages) - completion_tokens = self.get_num_tokens(model, credentials, [assistant_prompt_message]) - - # transform usage - usage = self._calc_response_usage(model, credentials, prompt_tokens, completion_tokens) - - # transform response - response = LLMResult( - model=response.model, prompt_messages=prompt_messages, message=assistant_prompt_message, usage=usage - ) - - return response - - def _handle_chat_generate_stream_response( - self, model: str, credentials: dict, response: Stream[MessageStreamEvent], prompt_messages: list[PromptMessage] - ) -> Generator: - """ - Handle llm chat stream response - - :param model: model name - :param response: response - :param prompt_messages: prompt messages - :return: llm response chunk generator - """ - full_assistant_content = "" - return_model = None - input_tokens = 0 - output_tokens = 0 - finish_reason = None - index = 0 - - tool_calls: list[AssistantPromptMessage.ToolCall] = [] - - for chunk in response: - if isinstance(chunk, MessageStartEvent): - if hasattr(chunk, "content_block"): - content_block = chunk.content_block - if isinstance(content_block, dict): - if content_block.get("type") == "tool_use": - tool_call = AssistantPromptMessage.ToolCall( - id=content_block.get("id"), - type="function", - function=AssistantPromptMessage.ToolCall.ToolCallFunction( - name=content_block.get("name"), arguments="" - ), - ) - tool_calls.append(tool_call) - elif hasattr(chunk, "delta"): - delta = chunk.delta - if isinstance(delta, dict) and len(tool_calls) > 0: - if delta.get("type") == "input_json_delta": - tool_calls[-1].function.arguments += delta.get("partial_json", "") - elif chunk.message: - return_model = chunk.message.model - input_tokens = chunk.message.usage.input_tokens - elif isinstance(chunk, MessageDeltaEvent): - output_tokens = chunk.usage.output_tokens - finish_reason = chunk.delta.stop_reason - elif isinstance(chunk, MessageStopEvent): - # transform usage - usage = self._calc_response_usage(model, credentials, input_tokens, output_tokens) - - # transform empty tool call arguments to {} - for tool_call in tool_calls: - if not tool_call.function.arguments: - tool_call.function.arguments = "{}" - - yield LLMResultChunk( - model=return_model, - prompt_messages=prompt_messages, - delta=LLMResultChunkDelta( - index=index + 1, - message=AssistantPromptMessage(content="", tool_calls=tool_calls), - finish_reason=finish_reason, - usage=usage, - ), - ) - elif isinstance(chunk, ContentBlockDeltaEvent): - chunk_text = chunk.delta.text or "" - full_assistant_content += chunk_text - - # transform assistant message to prompt message - assistant_prompt_message = AssistantPromptMessage(content=chunk_text) - - index = chunk.index - - yield LLMResultChunk( - model=return_model, - prompt_messages=prompt_messages, - delta=LLMResultChunkDelta( - index=chunk.index, - message=assistant_prompt_message, - ), - ) - - def _to_credential_kwargs(self, credentials: dict) -> dict: - """ - Transform credentials to kwargs for model instance - - :param credentials: - :return: - """ - credentials_kwargs = { - "api_key": credentials["anthropic_api_key"], - "timeout": Timeout(315.0, read=300.0, write=10.0, connect=5.0), - "max_retries": 1, - } - - if credentials.get("anthropic_api_url"): - credentials["anthropic_api_url"] = credentials["anthropic_api_url"].rstrip("/") - credentials_kwargs["base_url"] = credentials["anthropic_api_url"] - - return credentials_kwargs - - def _convert_prompt_messages(self, prompt_messages: list[PromptMessage]) -> tuple[str, list[dict]]: - """ - Convert prompt messages to dict list and system - """ - system = "" - first_loop = True - for message in prompt_messages: - if isinstance(message, SystemPromptMessage): - message.content = message.content.strip() - if first_loop: - system = message.content - first_loop = False - else: - system += "\n" - system += message.content - - prompt_message_dicts = [] - for message in prompt_messages: - if not isinstance(message, SystemPromptMessage): - if isinstance(message, UserPromptMessage): - message = cast(UserPromptMessage, message) - if isinstance(message.content, str): - message_dict = {"role": "user", "content": message.content} - prompt_message_dicts.append(message_dict) - else: - sub_messages = [] - for message_content in message.content: - if message_content.type == PromptMessageContentType.TEXT: - message_content = cast(TextPromptMessageContent, message_content) - sub_message_dict = {"type": "text", "text": message_content.data} - sub_messages.append(sub_message_dict) - elif message_content.type == PromptMessageContentType.IMAGE: - message_content = cast(ImagePromptMessageContent, message_content) - if not message_content.data.startswith("data:"): - # fetch image data from url - try: - image_content = requests.get(message_content.data).content - with Image.open(io.BytesIO(image_content)) as img: - mime_type = f"image/{img.format.lower()}" - base64_data = base64.b64encode(image_content).decode("utf-8") - except Exception as ex: - raise ValueError( - f"Failed to fetch image data from url {message_content.data}, {ex}" - ) - else: - data_split = message_content.data.split(";base64,") - mime_type = data_split[0].replace("data:", "") - base64_data = data_split[1] - - if mime_type not in {"image/jpeg", "image/png", "image/gif", "image/webp"}: - raise ValueError( - f"Unsupported image type {mime_type}, " - f"only support image/jpeg, image/png, image/gif, and image/webp" - ) - - sub_message_dict = { - "type": "image", - "source": {"type": "base64", "media_type": mime_type, "data": base64_data}, - } - sub_messages.append(sub_message_dict) - prompt_message_dicts.append({"role": "user", "content": sub_messages}) - elif isinstance(message, AssistantPromptMessage): - message = cast(AssistantPromptMessage, message) - content = [] - if message.tool_calls: - for tool_call in message.tool_calls: - content.append( - { - "type": "tool_use", - "id": tool_call.id, - "name": tool_call.function.name, - "input": json.loads(tool_call.function.arguments), - } - ) - if message.content: - content.append({"type": "text", "text": message.content}) - - if prompt_message_dicts[-1]["role"] == "assistant": - prompt_message_dicts[-1]["content"].extend(content) - else: - prompt_message_dicts.append({"role": "assistant", "content": content}) - elif isinstance(message, ToolPromptMessage): - message = cast(ToolPromptMessage, message) - message_dict = { - "role": "user", - "content": [ - {"type": "tool_result", "tool_use_id": message.tool_call_id, "content": message.content} - ], - } - prompt_message_dicts.append(message_dict) - else: - raise ValueError(f"Got unknown type {message}") - - return system, prompt_message_dicts - - def _convert_one_message_to_text(self, message: PromptMessage) -> str: - """ - Convert a single message to a string. - - :param message: PromptMessage to convert. - :return: String representation of the message. - """ - human_prompt = "\n\nHuman:" - ai_prompt = "\n\nAssistant:" - content = message.content - - if isinstance(message, UserPromptMessage): - message_text = f"{human_prompt} {content}" - if not isinstance(message.content, list): - message_text = f"{ai_prompt} {content}" - else: - message_text = "" - for sub_message in message.content: - if sub_message.type == PromptMessageContentType.TEXT: - message_text += f"{human_prompt} {sub_message.data}" - elif sub_message.type == PromptMessageContentType.IMAGE: - message_text += f"{human_prompt} [IMAGE]" - elif isinstance(message, AssistantPromptMessage): - if not isinstance(message.content, list): - message_text = f"{ai_prompt} {content}" - else: - message_text = "" - for sub_message in message.content: - if sub_message.type == PromptMessageContentType.TEXT: - message_text += f"{ai_prompt} {sub_message.data}" - elif sub_message.type == PromptMessageContentType.IMAGE: - message_text += f"{ai_prompt} [IMAGE]" - elif isinstance(message, SystemPromptMessage): - message_text = content - elif isinstance(message, ToolPromptMessage): - message_text = f"{human_prompt} {message.content}" - else: - raise ValueError(f"Got unknown type {message}") - - return message_text - - def _convert_messages_to_prompt_anthropic(self, messages: list[PromptMessage]) -> str: - """ - Format a list of messages into a full prompt for the Anthropic model - - :param messages: List of PromptMessage to combine. - :return: Combined string with necessary human_prompt and ai_prompt tags. - """ - if not messages: - return "" - - messages = messages.copy() # don't mutate the original list - if not isinstance(messages[-1], AssistantPromptMessage): - messages.append(AssistantPromptMessage(content="")) - - text = "".join(self._convert_one_message_to_text(message) for message in messages) - - # trim off the trailing ' ' that might come from the "Assistant: " - return text.rstrip() - - @property - def _invoke_error_mapping(self) -> dict[type[InvokeError], list[type[Exception]]]: - """ - Map model invoke error to unified error - The key is the error type thrown to the caller - The value is the error type thrown by the model, - which needs to be converted into a unified error type for the caller. - - :return: Invoke error mapping - """ - return { - InvokeConnectionError: [anthropic.APIConnectionError, anthropic.APITimeoutError], - InvokeServerUnavailableError: [anthropic.InternalServerError], - InvokeRateLimitError: [anthropic.RateLimitError], - InvokeAuthorizationError: [anthropic.AuthenticationError, anthropic.PermissionDeniedError], - InvokeBadRequestError: [ - anthropic.BadRequestError, - anthropic.NotFoundError, - anthropic.UnprocessableEntityError, - anthropic.APIError, - ], - } diff --git a/api/core/model_runtime/model_providers/azure_ai_studio/__init__.py b/api/core/model_runtime/model_providers/azure_ai_studio/__init__.py deleted file mode 100644 index e69de29bb2d1d6..00000000000000 diff --git a/api/core/model_runtime/model_providers/azure_ai_studio/_assets/icon_l_en.png b/api/core/model_runtime/model_providers/azure_ai_studio/_assets/icon_l_en.png deleted file mode 100644 index 4b941654a78c1593450d336c2d784d45179f0e3e..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 21236 zcmYhiWk6fa_C4GJ#ft?kPHBP_Ef6SD+`YIv6l-v&IKe6I?i6<~1h)dkiv@RgC~uzo z{-1k)A98Y%4>^1G%wBt~H4~_^qSR~5H<&M8ym&1mEw1|F1quXlUxto~xH1_`cOu>} zzDjF5y?B8I_~-TVMOp?a;>(v#s#2maD#t1I5Fb!1MHEC{yr_x6dNh9Z;)NZ#}lmw|ML-6bs;C!SndIv1NR3 zKNW$6!sC@>>q&(UIHoQg|ZzSb;sVbqmxXq28jK!KP5$yEo{^wHMs zVd3e-Z!g+jrs`E{18#Ov4fPuHm= zXM_2;8!&(p90o0>rtny43L=b=p*=aseA{)u*Y$XLK@)^0icX8TNblg&s9ioxwVw8b zx^0}1RN@g-Ep{xlEnJLujs&=*KKtvhMX-?z8uc2kR3k0-ljxh#cZcn|cXMM)NDNDc1Jxv*4ga12jlA3bcP#n%DPXEbfkm<+&9JP6zNw6 zgNnyC8$O+bRpCD!d?@`M#=3eIrkDQSuW?GO@K4%g$C+#2Xys4F8Tzhi{>3yc4gG&( zM5omcq>G+-K0gzlfBJJ*)w(z*wzAXZoz3?B&u#?M&OnF76p{j`0MlJ-iI2_;g1^Fo zhTVKumw?;n=ZoyMOQ#7XD38DXdBgKbqwq5>L5Jbvmc!He+^7F!5OIkeueqjE?0U{* z?%SKDChECQc{bJ~ze$ln=U_B00)4OA=#fvVj=zQvT?~_D3Pw5n^?SwuQF)+<_M=eX z>$Bij62)t-^GSOz)Q$gjes=J^!vAmR1!@&P!YnTu?}Qi6sN<9#jGN57jcI_ zH-E}%KkD*Yo6COr&jR?*82EcpXR#RzZ#O~9BRo8kT_3)m7qb{2X~>whLs-+48Svx4 zbUlPz`wbLIKl#Zj>k3HQx(C0utuTq|+6dqtnScF&zo@$&XMX5jZ0P-Renmtntm2Uu z7Ji$^66Vo^ z)N_!ONZ;oT2F2#hF?#DW;Ms{Pr4NcmaMNXk2E<}FVUhuS1CH|RI={tlSeq)$?rs&7 z04kcs&te`|1L<)79oZD)EHGn#gZepenS7_?IJSFX^)xs-u+w&GdR=MnkykDEGu_8_ zCMul7ai93a6CiU9MA<@Mai`$HPT+pWkLn|?kv&+QZnB0nnI_cbx=6|U!tbxr|EdR6 zT@4I#s(Jur{keP3*w>JiAUI*j8-STlaN2)yN$5TxUjtrgqwl%t zF%w!~-EiIDqV$4`$0yz`o(HF|xYzb)33;})<_P-D_8z<4Od6&i$o-ot=gjd?HAY;D zlRuizrb=+lAiTC2e*En&70gf&JnKE@60)S^!Iki0zGSE=VpR8}DZ_pM+@A#T6#T<- zQo*L$H8=7>YS0Y&{*mKxj|tDjOq5VXE4PC{Ct)WN)Bn6@Bl2SY6BgePY{1kQ2s(rOSqXo~qj6Ca z_7=WVmK=ZZ{lK)azfX(473bqQ|MNQuiht|A0NGgscJ_+mAnxd78O8dk49x>VpGFTL zd#jNs3*~w%Zor{qI~fplopELYjcGuO<}H;bddHa+#!Uu|0U3aQ^(ZCW&J%Em00%BC z+COX+#?SkfzB+LEi_!$?*lh^$~iu*N#glceb_Xe|Fo!`8n(} zX6ZUq=8K^tj`v`K={rZ@KrCGx2`EkuUSK$<3NG+-ccuLh}fw|yw_2KZlmDzt=a%};o-(m{J>B|FKi$` z()86%BC%P87J+HvulaDOz^wa2KWs(z&<3Mj+xPaE>ffoxoATr*Q{3vXqP#A+%^?z( z7J-Qt1%;u}zgm|&`&9^OChWJi?SaAgL5iGl5YFF>WZ$>?&f(%G38RTziry^nmajee zNA9MRz{<*Z7jW^B+0DcJuJ_h&S?}j=1W#T4)(!rfrh*w~McCagh{|&ey?KVjlqW!s zCk~vCn2V$oaEjDWq&W*nQJIekTuJN)hel3AzXz6m{uv^DJIS1nss8f~pQ5LLQ1RD+ znhxG@_2S`5fmblcwXg-AS=KW}W!S$pD5|4W+*9g%o-JIzS_iWfU^F1YwVvXG0FCAN zXf^iO%T*-*rmIMX&7e=yYUKYuGaq#Yxg5s9=wX;OUzrN4--(us)7;w1i zObVH0ti8xC>L(3&J}t(B%1qvw8VEqD$TF??nA|ESwCM%xD(25PwxZ%I4TvuK$uxOM z*pBQ39%P?qvj5NOD&)!nQ+MiKOCmMNLY)#pkqL2V(X8*OZW`ml%G8In)XSV-m0PVc z(AH+0*WxoSZ9V`e%w6H8?>aA4vv`nHT*{oe%W^!`m1~C64oNyST!~pp(%r{6idPpL z&;PB+LH=K<{!a^4zB{N`NwV+=_h|B?1$fjCBsOwfXCDN*UI#6=Pb==<&%R0FuVwq)x7EUL^R?5 zzw8wN=%SezefK>e5p1!xyFq=j=y9BccH;Do1pqQoPl=iQ8h+_7W|%~pl^~E-n~w0C znXEUI5+pz!#!yp>GZc=+)}e)I<`b>5Ra&4G-@r=cO0+HSyOTA>#l(j5f6BkmSrq1R zf47qUxZovjs~?Ldg_0*rccW07mXKH-g(2X$C{!DT1&Zq+oZcFg7&ZtoQF-)+Qwd;F zioSYT&;fZtkHSDRpeqHN#h`#S-n@C&A;9B4T0?J1nBaD`VtT0qt$QtKRE;(S}+-HeV>>P}$Rp~TiWu}RwF zIk(LAbV&8z`6x)NUjgh0XWcF(I~*@weaU6^i72skn|ce%$_6q$Q+kZ0B)`Rd^!>iB zu;;ibqNO&fE0qnzyC{S@9?%dPi}#>`5N++~NBmwU;78kip3f}vDf?f7f7ceeHRaJY zEKc=ue8DM#E%h+XePz%c;H`K!+_-3K+EH{=|qzE;kK8Jw=7i2hkG;|$)&No7C;6@E)UrxZky3h;bIX+ks~ zA!C)0NMKh1bw{*#-MDXDD-)RF*+H3wf?lkygu&6@THPBB6Efe&PJ zD@hv2n1zQ`GsWx--*690WTj!flJvQp7iq7zgWsy`5+S-v9RZmxh8Lg8qyT-nmbqs3 zFlM=|Mn#%pn83pJfmlbE7st5XdY3oe|I$YEg9x(naOQBb(m(!@#aKUK^aFqebeSTP z$e7sx9Ln1o#1CgjO#8u8fB?VocxGwYXHM5`bV19!E(X>=FJvGf8~qQsbCP)zF8N1M zfjMDf|E8)wdNv$v$urLPV&Xp5Qil1w1nN~IZZhWH5jp%7?H~^}5NA1lDBhp1oB>Tn zR`Obqj$@I+q3>`HctU=F5SYbz8)i-e-*P~TiWNj*_tG<^5TCuFU1TXzj>GnFn>x>RAebc2apvr zt3YM8if=tLZ?%q+6@4oJfRWq3P#DalEId)ZLz6!aaxYlRWUx1ME(t~Hhsu~KbqGAb zIzqYAARF9anRsqe@$A|5Jf9E!g zFIw2=XLj`4H?pfcN=*;(uT>d;U>7gW6FVd$q0Knb`9nynH-l+q&y<<)mn!GPs6UN~ zb6zrb@(pVPg0!Lv#?1lhso|lTd$BB7Dn!h*@OAac2b309afW^rP`(qK=6a>u*@`e$ z-dPVj8DP1yV9#C!9Cn(Y6KpoqcEie z4ZNU*HpBj^c8=bYmyGLGpx1^*EwxPIRo`R}*$T-oSYu>{Y5epJV2& zySyZP1FTbj#z{auqNUD+6=xM+=}AQ5;XGxU=_KVbddVsACeAa8okY>w20s^#JPG8x zc7-e8mD=@)F0J{CvNW$5!Fs0N&DKy>zeb?7gz_rmKsFBTZM(y_g`G-ey78y=OK!h6 z0ds+|5*#gaZ44w!+0nW4f}cD#53h=q{1X%p#4|{Evp-gWCOj@!$luRK2IDg#=>7Mw zU#wdmZ{HdN^(zIk2HoK@fDk!r5ed!m^P`?3!NpPJXlCXgg05Mg+;?KhDWxfMV4w$yuP#>lh5Bg?HQc%8u0%ZfxhLrZXZY% ztV~K7hr}oD^kWrbDS@~oIcXt};CPpKJ)Y?nX9O1lwK&8k4uYAh6j@qoMuikPvtJWn zL^hDbj3?)V@K-HsSr>sOOQI?}1d|sy{?4z+w=*m9c>NGvWK8Pyci|UYE4ZTH!RT4F z!cm_Hos}thkaDFvRv{@Eu|csj`HLy!M&m~`h0{#XNdOQ6chfA zTrpM0_?$px_H}nEu~Tiaaz)(TQr)a6yzgz1fZLR4-1R0DmSuf5AlH=~*|=BGKMQjJ zW?_>-8-~n=IZnv^=LtVIr`3|UKLk27d$CEAXl)ji=@(+q2GwUve4N7>eso#MVMDVZJV-$J|=s|_1Rw2#e=(%Wq`BqV37u&iqhF@exNFcCE>#8KP z=yVS{dAdI`l3DBq{SbnK`X7XVs*J;S-bp?yhy~LnQ$vhFl|;s7f=)7K)c)RX;V*mFn?7$hQF z$URd!emyS85Y*0R;3H!(ns0Z;E!`2Gk3vZHIW;`dXKFzHgio6qA+-N$yRUa{IBuwV z4T(Q!t)B%rZl<6J6f(;}hlD5jpfJO4r42|j54WvW3!c+WzRyl1cUVdR^+aeRHNo93 zsR-BSBVWLlGA?om&B3?U*)lrq^sJI4bSC(vieahAJ8vSQ@5ds_vYD}!5H@0JH|&3c zql?B{NvHiWVqTIXPA4~t8rP(t+8En1@Y>cg`E4@5{h zvEOH_L?(`7S~5CrMh>bnUhMz{)kEmS1xos*fvN4mVWo`-e(?`mV-KY_Te7QY2qT3j z4>g%5N$jop9;Aj zO$OL<9`OjubuMH-%f&Vt=eLtWTry}8WTE~w+Exw~rZQ{~SQd3+%J+Ls!6Vt5BV#oN zt~A@wl)@h)wtmie`LsuP>+`Mv#~EyaKk;EM+rHni#aS9BO6kkeiJaW!X?*84uxZBUh`XR(F@+J&A!C-240CT~Qm^-`kO#-&Im6XU;6!EO{nw9- z_Cys2GH1Pp9=#5B0sq7UAsq!Hz{O7Nb+}zZAaWzJJ}hKG?c#$NO0Fv!K6d{x`dn&| zB*RasE$9Mfj8newa`2Wi@?dR;H#%+K?!)x^l=tJj0`LPk zt8@Dy`ym62Rm;LN?1xk+P8$wJv^yMI6y!Bdl{{tY0%isXEQHUE?w0DaAi7iU4cYCq zN{SchQqHNg^fLC(1nh=i?)XyClEU!kHqH0N!AagCpgr^W6)HbUmK{0Ky^SPy7NW+UqcBS>*_TG8#NnmfGk;yuAXziEWR5G0r#zU zw+Bxo8?a-2cVAP}!>A3XV80?w*z>VtT%#Ybu^xsy?1Pq|d}>28vPM|%)LU92m~DlR zfJU&bQ90_C1Yn(O(ubG*sz-$`j6ijtC2ARW2Phi7uU2~^imv_fP3HVam^Q$q(fXr9 zerPDG@V8aJCzsnl>3?zlo+RtPx_P`f9o+^>(y^U6a>qZr8LY|N89x6O_Blld0(B1wXH=OA?1BkS6B-@CXLg_fZUKWTh@yxf z1`oz9a?&S)s!X|NuH2=zaqq;wVg$#hl*FpIFxB3029dID%x^FPx5Zs2Ap{S>jb}{i zp2F|itsE#gJSf!*!W$(FK2Fb6e2ZgS$(Z*N`n!B!5Zq>X@x}WH9syabH966qp#QNl z0^8o+qKPn>`q_ykGT@{2Yej^c^&Ua5hc`5km)cY>Ja)xz3Oa0mj$%ncfeORWxzJSJ zl>2de#9(H_`Vr!%%5(n>#=#Fj3@P6cEg*ZxF5Ljcd9;Q7P+?am;7~Li{dVaB6|fFM z^cWe>B{Q#0vxPE0sdVz&i)6znzpD`PQ&6&gNHH&OjciPMYJ9@Zy&s1}MxQceVjk-m zYo4O=T!t{;>nyM!5XZ>zRm-@6AIds!2!6g=F&r#3TrbPNA-OSY& zKGx0A^Wm4ai{h8jBM%6#R>wUijQ5B zm9M?;T5Zk>%d2|g6$l(+vB}Z(E2)9P{WRXm0 z>0l#$->=t6=hH%1e_)bW;N}ch&dZ?FMitf2MF-ue7%t7nGyWc2@UV&H$|iRF4v!37 zKvFQsN*Ac2U=tgZp|*sV0x*T8p*){d>r<~sppC+gM%2-rXP($q%}AEwPq}Iy(>#L# z6g<@(T#Xg{XND||XG*zI%TA1h^5In@Yg=C8hzX5WUp5D;|it_9bEh+ zjlXjx{9MB1@23WVgWt2L$p{rGwez4a8G<@TRhRhv}*ql$kD)-y;h8mI2M;|Q`iL8 zq_MPU5)Hwqck$2?l;YvG(TYzG0F(O(m^2_pq6JaWBuVSApwx~l*VQfbk{DiLdS5(Z z4Vb7f<0Uu}uZ8*B86e4R$r{2&B-^G|Um`RJ50Vn}px6B4fFed2YrxcmORnz@Pk=~v znjvLnlPM3VcaIij#fLRbCB$8y&(G z#nN=6$y<-3>poyq#M*tPs92@p27*9Sc?GlC>ZS9JDn;;PV7wbm%;@0(WKDfMvAo!Oxmeaz+W&u=KiY91Pg zI#i!J<*C~Hv>Q9xi25Epc=LT}tL%2jrG!sEKW#}YsgeBbxM!{J3KNzm*zR`vdAM(-XxtcRa7x<b!>)3N!PHo{^WJ$rD+&NB4K)=W;q4?SxTF-S<+(D-$)xk9vOuo z?)G044mPb%{40&}DddHcfdW4&F1(KdTpuuPH7T#&f7sv}6N`|?8J5#I4vN2j&__Z3 z(e^7E^kCP=QJqN?kaxV-8!#{pJr1QQpsK$SRi9_*Qi}5paA-ZLJ+aFdtglhpp|N_P zV2O#!!OaDd{1oe;O;gf+fa46exneF$@~$&(550 zMl%UAHTsxQ_ryxLqk$({tDH9oLFtMs!UXG>sqQRDxv1|Pp5E^JMCa+!*~GKsU!XZP zrf-tny^z+luDz9)p8kqg?=+yS;PoYRz~ZBhkZ#HKqBRA|+zD7idA&x3ZafPx%iSq( zX7I#+eD$5!zP8bF+;nSjD`UfKEPu;n09s`0m1@jO>{>7$mv5eikgp$b^?YCWd9Qs> zvHG0_CC%mkhcV%3Cfl@#EAd=3BzhYlf`q zYu{1a0n(gix4s=tTLm{#aXxWJ=3F+^X7f8vKE&Hv)U5OB=u#83jj{oMEIUtS@91q_wbu6bvc#|8AL?To~<@By^Na>gA}WB(8rT z3^%2ti}QrMwpD*nWMdYsRiXQ|Y|w>Z&Q$Pre0)xc)Hk&dG{jKAUqS z;EfHfWqagiw{QoG+QCjZIzbDqhDUs;Harsgjn*-yJlSO>o#pck6Sv^kg_rFN;x+;f zyU;rJXn#9=Hj1(C?rZk>p|_jpY46ucR;~9agT8AOsbXO)zHG}m!s|mj8wM0xA>g_Z z*)F#vG*MJvExaO#eiGL%?R-gD^!sa7lqdEPNyOx4=vw0rkki8T{;_70dOWMxOeiwK zIBM`p&TQndxcBTl@)VQC+VPlEFR~VTMokR(JWjZQ&b^a8QLG<@_O);GBbHLJK*P{p zSpO>o)Y^Qn*3twq27DPMO_rNYegDBL#|eeGET|gAM|n^?fobU_u=Rur2+t7p&|aVk zVyOSNk;Av^lou;RKBK|&NyFv7X)%FbAV$rDa$n(zJ=-qTK>i|Eeyc(t7$ z?jEX8jeHU!-n?jK=+bkiZck3h4|=9tWXw}TZ^QljTV* zaFBEGb;=*AnXP8zlW7$lNfc#@-cxrQ>Ny)x5J>XJ45>}fj*`(>;f4P>&G5%C!$&n14E`F;@xtX0uLTQ2dg$rbxB@yDC?N>i`TmkzGV-grNR`~dzRqMX5JNdKnN8V26~|q`HE!wr)1BQJ z^3L}n3J+31(Xphdd~IegrZ*_UuxS=1P-@=sUg`Hx#z0U8*yEv~NovI8(Ddz2hS(!I z-8)4EIlk_QHn(;Ei*fx=T3?8%q+iOHsV={Aa&{po&UqqE<2Y4~n!_901drYlmZ{vz z@RjtpNQ-JxCYjtX>up2hqWZG4$}iLdBR?w3r_8J;i;36V;RN7Ry>_z4jp_S2?2SV{ z&$?Knxf+}osD?%eS?tE^>*E{Ku=2y2J@6l^&dH2|@A}hND|308iMcext!>gWWw*vsT(bpUR_%~Zc{!2%{O(T92L*{yaAYAUWI?&x#A ziw(+Em$iLa^oqQLV%0*N#ThGa9=}%5U;MBkqc&YAq`M;gbNG&%1H47QjUm998m;dm zHY@Q7i#KMU1jRchZV}j!+Q324E{!zTr#oZz0zLcey5;(fq=x4_mMLrPYj1AWfRn1U zd-NdIX(tuRWbzt=KnXQl!Y<+R3COoT(!ATJ^6*nZRiXPP`$g7L|IOE0CdwkxST!~7 zCExEeuWIyki%wRNM{z&S-a(|h^Kj!s&{1qiM%L*FUd)&=ZO@|zwWQa)XxM?(wCk5u z+-`X+nXjB9wK&2@F1GIlw-;!3<~h&!h2v^izUqxRk+&gK6l?uEf5-|f1MI}ng24h0 z&R6;3I2khV5hzkD~_%03{urtV1?@j4panmnSulr_M=>C3>dy&A5`wZj~$ zUwZial=lD}>Xq74yidcSKFx^Qub4{>Lh;Ckm=tKikT3hJa-1np$EUVqym;G0>- znF!i8d%vRq{SN1Y#$X+#yJuz&g6()CNZw@!QbIOe?H=2W*i{)77-UuP4^a7s%LhwH zKLOZ|h$JD6e-dB`)MXH=9+FG52pH;)$h@(R{(0CxA^<)r+Q;b@i0;9@@>vqS3W(?b zi)IW2mONxYU$iQMwG^a(^lyGV`Ucn4C8cXdV+oJ?R4o*}wzBap)*gal(X6G=oJ&lB zsAE$m&)o^9uziT}-)fD9(xRs}PYDVoojDms!&C!drNJR0D-FG9idQ?+@i^~?{Z=dX z_(poTdUSNeOTFm05LoX8O0o=t^S)`F0&)0=mR`igy55Yb+m@lhVR@(bHdWFvD)Fef6t4p^S)`?T9H{DZN-Q z&*&1(jGt~H&32E#L=F?Rsg6jXgyk9VT@+i> zT52>;HnOc9hFyEXWA$uPp1y@LpaD3-Xw_d1GY`>+%)D~?R!?^Iq0Ro~ahU&){fggb zKGT_s@gU_#efbU2`6SwH=1wak8&$!$-xI{c_>P1;-vh=;jD$#m1sEyyrQ!SnrTQ!+ z#0wI{E%wM2R5Q3-V}90`O*25a-W}8u#~sfhqH5&LX7QYP$M~`a4U9?2ETusosl|W~ zC@M+68Pe}<@REHE2G`oLDSj$}=ePe+e-SX}fIaIWt zIK(|ne3ojAw}UtGR{o~HVa9%aNJ$#`$_4=OTv_NsqaB~2lcZ4Nuq>*6TO5x@oizY8xMtGU?h1%z{E}6oaEbXLFV zZ+&>_Q%w^BM<2E7LH@iYWA1o_>Bnn>55e(1hhj;Jb~1OeDs{6PC&j8s@HrN&7&19!V+~fau&_Z&dn*;HW9D&5lJ{{@56Kksd@?Q)RUN0n6|;mptDuFtBE{T8-a~ z6->8{-z0!}G&D2Wu>dMza}}8k{#P^_Z!h`LQS}G#!uYW}=oGp?R3)DWKvdX3A`TS6 z2oC>=iz=s;T}fAEFusNu-*T?;#?$Y*$oi#uC!!PBq}KQhH@?~${1Hb2us3u6^Hr7S z@7a=2oVx>$RChrlngB$C|s6pksp9pq4Jgb z#&L*WjcV>NV^Bn4IXaao9bd-mh_qi}ucNPuJ>m{ZIM`H9v!s zQqM-Xx~M?ypQ6_1W=(}t^onwOlD52?-pu9Oc2IBmgA--fOp%e}a)_128{}yl&0Pom zmayp)xsiByM<-QH?wmOm=gj6X6zjc*iQ9o$LT1BfF0dBP=~t1UhW*O4ahGo z`f8LkSbM0<6eY9sgg1*uE<&1!F&a4idRX>N8v{>xs5Z}DX?*{tfjNQVqJ(XV531Pb z-hUV`_|*O*E7HqCRt~9k8Up9ve^6;>5Y`Wm!iUnb=T%e+Mn zRnU32M}$rmXb|O~wvWQ>F#U#KE+LhLd~;-|E|q&A@~Lin(yL$as|gbBuY0IGWWL)q zY@d{Lo?j~lw6hQN#NKAjIQa9+HMsTFe^!b31!-P0R!!j5b{L|DXP_{#lop?=w%b`E zJ^DThl5(j~d*l0HCedmGcI*li)Gg~-?ll#hyDqT!Z7w0!!PNy zM<%|*p0A2xbg8k&akH+t;A{THddX>-$pfz+9R}urP{`*}*7T6iKLVMb@KapqLyf31 zDt`sdUx>Xr3bC>us4O9M6`Um>=x*FEjsyJ@-8RjD#zdeu z7_tt$okJFc@P|S1;Cc6wee`h$#?Iu4**)|(F%db_a;^Rm_l)~nr`#Ws^L8vueb8)h zMq%(W!6*6XlW&2Cm7JfH^E9#efzYavyc4CHpNGaIv>piaMS%!8L`xWlY@TNUu(J7Q zTWDUVG;G6zq=UGpmD7PeyJ1Cqbco+z<=_?q7^%_Y@o;tQKikA^yY-^kY2I7>%%6K! z@)jvNEd>DjX^gx?TZVutH(|78ScrBXYp|@@VL+@SJBB)0UQ07-;{g*Y!l*<3EF4SK zbJ%L*2ao>NUOjOYhWnI0fWe;7(6n?f46uZxY2oB+<(WXTVxs{2m ze||D0R}G`SCW>UfD&vG@&^?UR#?{$6Ic~uWcPN#%=M{=Rhb1;*^ppfVOG*c8(B7sa zkO6~dR6JHa;UmuvMMsq7UddRr0hC0v;?mbb)*xVuq}~S-z)xe!i}Ky65sA_~eq-W7 z%_*8p5PMY zb%C5e3pRX(HRf`#v27K;!y1C~*$?Ouqyk*vE$^$TK#ZrrpMJ0l&0l;OFP>iRNJNb& z38R1wm%hF)9nQ8XB;+*WHMo5z4GMk?J0?Et*1C(Q>@)O_%UKN&WM-54mIsBTOM`6Q zNKG4wanFKA^93ease~v=r`b3 zbrnU)Wg8F5=BnoQU}|MSn7mbVQJIRP-e|2q@l!mcTC6OrEvJ0t`B{BFB$vOowX2{G zeQg`6MQMNQt0?02PI8hw4jbP`$qr?edaKm@{MQ#M9wb^R3`tgW#t=KBgS-U>S|j}+ zr})s13gfsXDo19MuB(W^&FP|J&qLVa`v!dQT+|-C+KP37@UsJrW{Y(6DLeII~P~iui-=yt@?lit)!KO*%Nb z!$z#Im$~OtJAKoXkN=q{w3fSxs0q>yUiQOB+knvwafa85cJ#6Wbhnuy0aB|pm{JlYC@b-Su#L~WG#_x$Sa9E~NL($_-? zh$e7K;632 zzy03K;G1RVTBwQ4xYWJ!suvZVb|m+lY##NxY@;c7#&l}^GgXlfdwVdUC8?qhOIEd3 z!}_q-{43f!=dt|23ckb$Fq_~+r?W0RvlH8R?7VGARzBkom2zW>n#4azv6yxAUPf2B z{p{_>w=iq}sAP@i=tX`IWzDA%U)d%aiy_q zB*a7dcX2o9gBf>q?}8nLb9^t28RQ=QJ5d|EDOS5BfoN;6pp9t*ouZ-6+c3Y1_OI~r zWFn`{$gA3Kc3*8(x>=taVAyez;qo8(Hb~>QCK>P`W_s-yL6i&rQ=k7Do)*-%yIh-m z{iXkTr{jGl1G)b}Bckhzp)(@Cb6?s8;($|tBe?{AtnjnpKVUQB%N(Gcsl|PqtVG6$ zQdYaH^jS#z(FHx!LsT(~Gy#+(UnLhV;X$Wte1^JUz{d{m@86sD{}xD73Q1Vg^4h8Q zAy{3$mORJ+4Ezr6RH!fIA5uwav#Jf0*1-6fvE28epwaHsKqjeALV0Ky#nV4dJ3(VH zwSsW2yYV$Hp8WXPo;G>;2y{9*N#Jf1S0rpC5m_dSBL7w7<53-e`oJ_S;Gk{}DhiFc zI6p2p+Gx~<#`Hy+y00fm??RIV>$51ItDdG0Rv$r1epoipD4JCBWEHv@K`S09BZI{o z3%pt#dHxJB=fJAmhj}YE?k+kCB-#-!@wJz_m+Q%timd2+JUr6G`T&T9YHC>BP0h*S z)i~eDQRh9oZ;U{$1eaf%R%vX!Ij_;B!JSAW#jhQ%dzuDH9rpg zZgc$fg8FW-HHlO>;CO?kS%iBg;!`lkr7`NoS}Z^N-D?#yTU?yhm_S!xD`+eqw@%p( zO};{*cJDiAoIkk2bu4h0r`pGAwtu*%l*cvmMmBZ7LVV@jLP8v~rG|TAhH1LXqiq{f zi(PG_IlYjgl57uI)T?wFQ1o~NAV_U!h+`3aC*XX)$@@%r5D|D~gf#|}#FI1+Aoz+( zV|G>HG;f{E7kQOaj`2A3MFqK;?ajQ~@=_{GG?er!cbdZVM3uaJ<&usE{4)?9mgOEX zc=w$M--+F+Eu~KIne+mM%;%AXqj<0e(=PsQN0`{|@i|Wbe72?WuzqXluzY}HFBEkJ z`+46i%)_x7*lJA`xG<*nPV+D@*7GZ(bHwr(nfkyUUGU4LudorH@xDjd7`0K4S!4=8 z^pKYhCcWi@wjV6*D#T%Tr^~gVf}3M=0f)^$M<&J`#bIs=vtBIwiSb*70FUCZ1dw9F zpq9eN8tX$Sa=l|fY>dwQUpFm798;gKzuNtW@l!yYJPKvf;iiG3z1-F2+&Qk*!B@Eo z&|{gpmep=oHtF>08h*r6iy~egY(lo*%r9CA0at&d8b~K@!tQSTuAj(VjpquO65qhs zl5s6tU<1H`G2bfp{dJRvG0#k`B&I!xSrY(p4iCiR$g)}w28~N&9;_bP9^Y|QfXcXR z;5tI$Tu#*`T|enreZnkqP$;IqFgAVj_$u9j;7Mg-mYlz6uZ-xnS7R&;)DS+_@czn2 z6xxxWWugTvdV~+r4CzJNZuK?3S>Szs!n^b(8v6y*My`h!iD6(thYKm6n)STfRyxtw z>}}yQdC-^*8wvj#g{t@(&3nP{6JFsnNP9pcW-(?XsKXx5bGu#*hCku?ab@k+l2v<- z0v8Am%_~I;X!VsS7iVY_+Lv1X^}sH+^^+{G?L+QalgPF)=Qm5Lu-xraniKyTqu{kNc#RB`&eCzz4!5z07LogEcezw zldN8E%CQ)Cj2IpkHuZJLoyR&gj;$E5z{++#Q!l7eoD{XI%%gow7D$}(*voQMXU4&x zXp#`MhWf$gQ{SiC%;cwcvn27wRbn5SdQGNMxPKLG~Y&t%_yda@US240wV>AwRV9bDIvsPw_h)-ko`u2Q0;VWRXmrSNAkq_d%u$OLG za7&3m@KN7-x7-$N<7@FN_Wdn}(TlY|A;;4}I2?jf;VPHb-;r^g)1nb7SKLX(t{c%` zp@<_TA+6bI&SUlxZT~r*n)iFzBE|h(LNGM6pY}t1QI=*Rq9W4&Y^P)_bzES}A`a~A zJ_!cpes+wu!>_Se`CHed6!D(nbVYl~tZ2+}YcQ@x00~GJjrn5{Vm%x)TH;Z*GZ! zWzqF9{=t?qCBfUZ`*f%bVl25NaAOjHtQ5*faRI)B%msQc&oP%m2s`IeGaS&0`7iK`KQn}Iw^nt=utFW?N7lfd~GzfuMX zV)Jhc?WFmTbT<7hYNOZ(LvhkK^RrvBUBP|lYah5n8qq8L?UpIs(Fz!Vl;gcL3VoY< z=@Hwj5vYiUSRlK-ttdU&D3XH^*#xeW5{S}meL!-l*9IC7hrj<2;o_sUk$3B2^M$l3 z5T0j58#9Lpa20%Yxg@Sw8R!U0F>jo}t@-|Hj*V%16yWyz9PEs#;&CPePSi=N``d#` zTcNhA-PL^YyDnR^p|7ShrR-J#r*H~;(zZ!*Dce4j47pwyH`*GlbG(>PBHk2G7af7@ z3FgKpx1c;J@>*w*7c09>ucDe3-?eMiObO39h?Ftx7U=+&paGP6ik91E?+m^;E~k;I zn$zhJdw$c*Li|^<%`0o?X}4MWH}-baZuu%}HG1tDKOy)p3oYohH4fSi9{y-4<*`dc z5b@?CM$J*wre#Oe&U3>@u=|^XsJ*e-%~G$-lpuGomcFG*#{@@8M|On$=1;`N*g=>W zV)<8|6{Uo=H3fWljb>3_NXd?$o)){?d643nW1C9}W~iY*d9#MVy^<5Rmf-t)Bs=D* zTuAlG69_>+bF}TEU)Oor)BY^`fWH%vy!_~R`Be>n3Fx&Y&9amDmMfYq?j-T8S{1m}jEa4F4UY5h?AZS=F{b49~*i{aCFSM$9!9-_>?T_+fKAX`*?G4ep!e*I@`SMp9* z&F%heNoeaz(->maCw~1?06OFe*j^_Mc=)u`10L=cAEnL{%t8@MS$3M`#x_z<3HP&; z0oyHFV5Ca6OascA{?jFv)-gEm~gTXKO#*W?)CwRZSp?x6j)3+zPT(E`LR%e+M+(X!%18 zKeSaOEnH$4c|gLA-2-XQ=t{|%7=Iy58$JJx0EL7Tm|~m~g>wDjaSt5eRKaz2S5|Ed z)N7zE(D+Bcxyt#FwEA`&h%Y}Yh*_txR@nWNH?xuzE*n2-u^vz65AwUp;@R(m#VarO z9$Y&vBXy7w0q4dQG0)5=7N1mZxJdjw=DR%5#EswdE)$b@M#)&paDT%o z6Qk_H<@zjUw6F5pS$=sh#>9oiF-kgUXzeeuRsY(kr0al0)pLL_W6YQ-I{zR~>i~WV zhj<&pm*FfZ)A_x&{2@Uaqg;V#);8tYH*G4W_05$D!Fgq6*WOM$wrqE)E}lxt_4B?r zdc(fWZ?oSnT=?bc!cE6se>lV*l!&-lBw#HC&$oBP?bN*lgGftT8Al^W!tWakZrD8a zQmxfSnJqyYsj@r$3m8rtC9wY2`1ZdqE6~pT?%#~Z_}$c@tz4WI-~1&D>z9-$vk4^4 zA*;OP3HX_V!P?5mkm71YSy99v^!ma^{5!VMweuCjK8Zv$p#U1%Povt1T#bL6z@rOn zik=Eegrfe!_~#C{W^&Z4F3M=ll>Bg`r~OMBMy3CcKu->$i-Zz*j%V5>`@rw~c15;1 z{#OM>xs~Zq-smXL-gWt^jCeo+@L#S|C|1nqNFI>B0%b3V|9&=*@ zErN{}wGLO*uTBOE>>5-Y(whc5>c6ZTVu1HsdG>A`m^}HlI;2L4wW?{NN_oeL_s`5! zpK^po+3yuu-y?RPAuYP-2gNF!60o$g>#A~!7BqPb-jC8r-0V!1m~~z2EG3sign>z7`49`(QB3UEuXaK(P`h*CE(i^xtM?gj;nr^%NiY8DW4sM z2a$aTM+MjU?(Z3Y+|SV-e?A>3zm%oKb80jV89RSC@Z6i)z11EVt9>0!P5BUM-m&K4 z_}F*ZeFyKm*#a=;5qfLYLcmEb(`dH>@S zAO@r6?sVlu@LY5C_R;1aW|W@^y(KmdTeHxqHx>#wM=5{LQuGz~m)-Bx-EM4h5i~gr zJ#%b&mq(s#s>GPRs|I{M-7TN(JWq<0P)n96S{wd--fD9r;ZI&2$f4++x~?K#iERI0 z6IUJ&<=4KEWy(6X5yB`T45cKbEMv(!cHb{qLkqH%Z3Z=DiBy#A`!aU2nhAk7obUt~#PHO}u7k34XPD*+!^DFXk z8m8J}EgU|!m$!FWh@T>$sGrubeXoh+*;2tQmNS@Az2NbRi_>#u#a|iZ`N&hH=DcU~ z;-|F!klFJy5@l;)+|JA{EZDnd1zHVqpl?2zN>Te22lnj|0WXVVc63UVksm7ZD}laz zi~xQ`&^$40xw)I`6uoXzib-v?0Sl1DO5ZyM2e_oZ19M-1+V(@0mW%m9OOx68@O9R` zmW59;iKd3)sSzUPS@gg6?c-u6yC`7kpKM8)c$PHc6PPS3sF;f(QI2{>FIMm~rwioY z`5=S)=B4Xy@xCD7sD0+qt-!0H4@xHEe|oGD~AfA&d&o zYRe^-lyM!KxQMl4Raek*7{89RRDzVznBp8tUBwgnQ(XegU9x&Ze|OG zWR;jRqCMCSZ#XSz;Q4b=At+n)F2i<>X)UzBvYSx8WH~&MuOu=24z+7Kl2ce<$dfTf z)2Np z;WMdsyX)T+bDLY5{8%#f+}P7d=MA78DQQV!2o}AS9}>%58V+-U6$18MR_!iW zvfUT^zioHo?C0@Q)k!WxVb+=$NvFwr2#Z6ya^3?or@3bd?Hp<^JO&I4zOsp%ZaAFX zHpM|4H&c8D&Vik1s#~)IiI?e%IIrzt9m`|Ai>hyyZ5<70ug{{RTUw%>E@eHme${ow zt8(^pjXEO$A>M<`loT}E3V6EH+EGQV%##gef-@6}J@d62qYKZnqvi@P?AJGS*a|9^+}u!UM=y|ch2GBhJvuRR_`${wqx z9^Owo)@AVvCcKKV$6AH2J3r%mix;^_8juEevBot&WOb1i#c9_D& z2s1tG%t4!GYCtvT^!Vz7BgeicTkg3VI5}ph@i|-2gd>f-#o5NnzWT!5MdT8Zq=wS+ z`zLukU?3owOjL*qD>dZAi!sUxNF=Ksi1cL94_*wo$9*sIHnNfFn?vXWWa=`E6|%&& z?^oYLvzqm4k~}OE+~5{LGs0_g)w05Zy-Dwo6>{NADiHk;DbAmgksUc=P&3y4#gWP$ zwq7?AZR#yZQUIt(eAqY7jum2I=QzPicq^n>+y=#}xg|^OakK`oV%-zu+y!uTqf(;m zKjJSjJz}!>M$?LJU!w%eDFm_D4QPjDoMT9xCaiwVGvu=CKE^{J*BvLPr#S+pwk6WIB(Vi{Zf*m8Q7E^;2mrtS z6O|>Z?n|dwqj%u9=y2=4mQ$O4f>3r-3(z*OQG%ri#eSIEBAI@}Ya{;^2^n7x=xZnu zr?8<4UM=BYfS;`;aJn^dG7fe;z?*_5_E(lL(3^1ZwPDkdO4J3S(@UseCA_r>-q31T zzV^)0Z~|<>v1uJ&OYL*Hnw4Y5khI@rZAS93ZaGZug7_TK-kOX#G$8Q3C5wZb%{ zDRyWwyP4#0j!Xy1?q4=ckitBqtuD=SHRynD3Ryz!I-!qQhv51Xry>gZ92|qGt?<9N zQj8As%M}l}T>!VWm`Z73`=s&huOWv->VGUFl6n>Vi2k|cwIWRPe*DDiL8H-@h|Dn5 z9d3~!hx2?_i*;=uav7;pd(-jDYpoo}ct}>U4oa6G-tAAErtUa}K^=!g5?bRu%hPy5 z8H!{^&|7=aApY+tF0$|UgV~JgWg~X5(t_VAv*MI5V7>(iQsbu++hbL;h>e}ZqlM>T zJZGpS(WATcBiQ!n%isSc9ewF~zxzb%2xR!FmZ)mL>-&z!X?`>*<_5PHRYq4LIPqKG z=*;!h<+vv1#sIFWI5(I&`^pa7(KBLIQgfF@#w?Y7?Og5{PUlEr_C?g*wKzRK=gS))}HEE(VnZl?mE6*xdM|LI;4OzliqTqs$KI$w=Np zGAuqzGF?65_My)Bza|H7j2wWjuKi!ff?qqZzsqHw>jd#Uj9#It@BRRwU|`2jB}ANf zJ3(9bf8H@HsrGSa+gSe~O$=Z3fa#+%u&r1Ke64d?%Y z=`i}-^de1=Kgxhp-C}7jp|S@PZEirDJ*rq|xsIK50A2P7#YDbCOVJ-r1&!V~TMi%I zQM)hR$Cd>gZ(8+%u9REKm6kfqQiN=!azSWHdn(mvVdsftrIW7U-y>Gv^s3zm?BOi` zVuRxH<~Hd6a$XMyp-<=hNre6Njq$S6<1#@{+(gekKA1*?Km)yy25;s<%QR?$mn?__6OUX6l8*Bj#-kF_|^fE;>CPWev;J>tsf(|sIAbX z_6XrO6=k=@Dg*-rDL?iu_Qf5YKck*Pd$ z+4-hMNiIjrVP~4%3nVk`JnZbOqo!%J#OaEV<>_EQZ4*K1#gr>-^EfTyeJ6&>{aC-%crQ`>|6; zy8~PO2T~_o;FAM-^~E(`|NSN+mchzkdgeiUYNE)77-fxk>#*T{2Hi2~BWW!jO~E@0 zE$-2wy_Ry)cZ_Eyr5$uSQ9?BHS5~v z5%+0*yntPW?PQv7M&nHo6YtNLX|jG)!7QC+_rDF;5b!oVnEK(l*S0ow{@6By`c5vL zLZHb}pN`cFV>LficF9+%0&cMOna++27-8lp!fxPL2$ck7!GMz_^Hgul5>gN9_U+J= zQo3ngyLsI6-fJ2P89fdiEx5RdQ5U<#hFcvpcma2<$JnoQLk|aahTCH^z0Ws3RwVN f`Pcf8yeB*B7`$HUNJF5SRMO}|4K>Tu?VtSz!T0V{ diff --git a/api/core/model_runtime/model_providers/azure_ai_studio/_assets/icon_s_en.png b/api/core/model_runtime/model_providers/azure_ai_studio/_assets/icon_s_en.png deleted file mode 100644 index ca3043dc8dcb19139d4d8c8f179f74ae397dc217..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 10541 zcmZ9y1yqzz)G)pRKU{hV=~{YGIwjquyFpr%a0R3hmK2t5SfrGaZUO1;mToBl>F)g2 z_x;ZQod0*8GtZnebMM@_cW%twnJ`Ut1p-`ZTmS$-prk0PjjlJ)1fwg) zcZ%9-004pw0Psfw05|9+|1AK(jTZpec>@55B?AByj$a$JB+wn$Z&egz0gwN_%;x+A zbPJB7qM-`_K-l@OJn3~PutYaLbyZT6d%6XDMlSG_bo9s>0ARmTl7;Ge%}TN>6&Ce;wERui3@&Z9Rc0MpWdGK8OEAIJ8{< zY4)6$n2?lSO7ol$m|&RN8Fux{&$r2wKD^!6xIANitxFhl_;~H=lI=tQ5X_bEYbAIo z{qc5&J;raHK|P}>vF%>s>DHkTB4bhI@&d>WSUESR^W5LN&UK(ioUhh~w~fmywKjG1W5eU$iTxixQt|?q zEs|_#vuOfZ)gIgHws=*gq9LQUDsc!DyW zzI;e^e}|#;FR-3e=f3OFC%;=kbvp4UjOobLvnBG=M@RBrsq=c$3}fz_6td5oM{=+6 z1Fx^Auex$oyy!-0NoxcwPc)Zlp7`VWCkxWYAl&yBE(U>K>by_70!(M8aS}TLw`Mvl zlg19M(G$JoD=ZhW0pDJezkI~d*=POQ*2;ZzP4>Cx->~5!Qnx$5)siwWUOv*w(wu#c zP@r-Xi{mu4D&>(GV&ao5<8B^y`TJ{)Liz?g>g1K4uyXc(?`(v2mVF0dfa)<`$Nw-?$RK69NXm&V?9Bxr^4*+Dn{^@-Yvao9Jx*)nmKZL}5G?@Z;C`Y5C=j;# zzvH>bc!i;noL#=%;QcGG{JwuC+}HX$l63=kyb@SiI1+!G5qk7At7qAZM2tVwv+~BU z8iZ7*k-mMX9W!j2E&rRzqMIkM{LtHabGJ-(e<%N+%@xy&_?4;*pZ$Z4h;QVi5B_84 zK}!`4cx_hxjlX|BWISJgI?*h#dgc3)WiITZU9g<6grn1Hs{Ywu%!);`V6OuvTG(MZ zP56yp>_xMnDS|iZRm?AkR0f_Ks%TvOt$r#OqMAJJ#w{trl?4CgHaUy`I`F9o!#L7g zR~{aivVtW;z>I;&h)lG`8jj)?(i;Sc++$ zozgyg0deWLe`+K?zuvME@DWAxgO)pd9T%Cfh-XX804wU7mZnJP5x$ZGF_0JUXx!JA z!IidTV@IF3b*`8xszz5`s~AXM{m9PP<#?l%g86V1W7gQ%d7#5R(I7~#@o}qiM%NQ^ z{^R&wHY1XngcG=QAFt^(3{_MNWdsM|EVzD^l#^c2YK!N8ch`ui(1MkJ%#k3l?Zx@C zcfxDP83)Q8i-%+7jG63Hf*laUY88A{oAL6*)BmF1N90?t%QJc&Ro#AOw1Ys1nZ7P5 z!92T{rMLm0xuocVf?=JlDO-}l6lf~3bq~jsO&jafYhKQZ_6icrD=|=qz zqV!m4zAayaxQ}JKtCD6#_CDSSTWD%P6DMcWm3L$PBJs?}2x4?dXccP~hmZ583)%7U za~g{59iATjCYG9y>x}2eTx@PV=X2lEB3BCB%8FSc)3y-SI2%=$c8^ZpjF=qGI_Hxpf5*Cshpjns9pZ}?9@L7y&5FjKRo^PGAlouR(_{0F` z-rZQq26YcqP`nK-B7`?P3QNFWiBPq(r@an>N?4E(4H7KjtW4cyIPz7bJHNVn@V0Gg zKO@6Tv$E1<9W7{H;y^ME=Ku;M&Ax(IytWUV)UDHb!$R!+Kl0pM|HV-mcmh=!v7xFq zWz%!qlKCB~M~a;tEVYh<={W?{R#yh~71D$spPaCOgC=Z>Bn01f&OE8G@!LgU6M9&@ zq3|u(w!gxF^G?b>hB^bGZvtq*SXsXbTV}r&t}w4(ZKyZsB#8XMN~hURU?6w_dFrn+ zVgTAwxVaap&)E9Omjnz(Mb;^?U_0wFpN)>>!mNu8w3U;^jFn!KF z!9H%xxDu2TfuAFd#qlezzYKBX`kD9>EBwew|&&pI&7~(dI$j$bLTYi6k*9sPuuyo z&L^$>&xHBC%P9}ou_fQuMHW zJN#y{65x~XOSD?ezcaq9G`y^!Z+CNumMtYl1+SpZi3kTG76Xiy5qT)LZQ(U_74Ihz z_Ktdz;1ZML#)-Uz#D4H7}qRX|I4#e!B zK{>>{8U#Oq+2r#V6TjBxblml}{c7rGr~M!k1NuE&aNKAYETt<$vcrQT*{%mJRXnQ8 zP|%!_Ngfk;OM=~s1tfGlok*iE?)rcw?5+2v`}&6=+fFk93HShOZFm)#SrM9`3Otd} z(}Kl{Jghdaf7KJ^Od`h*(SwnwiR~_BwBw*2KE2?ABxXu7HQy;&<2o+CSR!|p)|4hM zU`~{f;!Gl@WjMe2`9T}!nZjL>N6D4*kxC}$^9frP(=vRK5S4)v_D-yLS0>cmrdn4{ zNtcZP+oLLrcK)P@F>?(6Vg}5Ge9v}%u3klY+8tYWDW@HlMXaKXGrT$b5yUnndqTqo zK!zC`fD0%|+z0mx%YScsEj(TXm)k)a2ng_KS_aYZSmTG`u%2A zj5$P93go0fHHh)YVl_+?)l13^&-^6)9hrH&vGIU(edJU?t0$ew1LRYU2E;$?U<*&= zEd!TeaQ==4ArBV8g2rT0QaNa{*|_r(=P9R5rBmqi{ayplvaYK!HTJa5#nG5gZw;R# zUM8;2%a!B!Xeh42ttPTMtHMB7=ZrLv-a)WDcw4{wv~P8WOwhm@qxz^S>C_~c7&y0eYa9A==q zn{>0!qYhPeWq9OH@L+?pp-xR@(_XMtHjsOvAVfW&H35fA1t#~hw#o7SkB(ezA=IMN z0MP2SJeCHqOOjC(8X^xsougD;VWTS%{%%T`>!i_nT=|W)O{8t+Y;|!x?eQc4@hI_i zK|N`ny!{hzuU#9FQzCm&`j?z=|aF4Yns%{K;}icGg#N*v3+`Qn0al?;t|FZi`I z)U_rQG_;#RC~G^N0u@LQWBwbGV7nA+P3_4Rjcf5shUIqIw9gfFc4IIFs$kxak zH}T8i zho{c>hIJ=#2$dU0)ZrD@|w09p8xEROa|<)pdLSd_e->V+mp(%so8`6+l;Cu zO;Lz$bXoc+GmqgplU~Id#EJvx=7X7M+^3{VMc&@Ak+zLzXSuB z2q7V za8P|fODan8IGrz<{KESCvw&(Vmsw#Y2H}r-O*t@-=2!MeX`+vIzmGf$^_dG;A>|q3 zlHIBTN@yQ=-{we-g16tT1u-Tz6p;r^{=EpI@1-Td_1l{Jq*>-`#ica&-fWhjgC%z8 zPj|kaDG-(e!fN9n!7hCg@+~I1DURT?Q9Q}s*)mXR=Qx&w%MZ*^RQBX;qVgcWpO@jB*`M>WvV*eIqKXMAm7G;#N^x0RZ_S}%bB7_Vbh-axp_oaZHk?{C3^ zx~Y@hH4>3%d50Xfzo z`mSSg`)uEk-ACxfc9Dk^6}Gm*&)Zd!?|a!XZf&)g9^CCg6w&jZ$`sWqe}4wRws<15;q#C+vy$Y#?hCbOss(yR|{_u9*qaE z*!0ZRSaRM3WY(W#sZ0fL{zh_c4RE;T^S08A}@zX_SiJwz5zT?4Cc&)@?6$}4ly zHPR1=9ynOsQTl596Qg&gl!3BSGC=h z!X=M4&Wg|R&KpN@si+W9;n(ux@06TgXyGn!q`i7s^7Gl{q3=Df)X9z3zK`DW?wukK zU^7$Fk;X9V-@07!q}7NIC!xK~oX2#%@!>YBcmzY}1T}QI7Pes52#NH7;F0v&jEDJu|{bCPm*@2vy`$s-v|}?sR_t)FYf(^7DTBN;;NDv~-57b^_5P(VTZlb%aE9=wB)^t)7{kPLwnB z2)FCFpPsO1CHbDo&>R~W*mwU3!$DCEdH;InhnSlAunIA){Sguh?NS_%!J0JFSybOW z&J?8}L1oqIBtq7)B+_?gD`xQL5QNEV`wB`hB0PpBj9=a!&wX1yJS%!KkC}OsvnaSs z-TU$rIU_G=XhW1T8+EMu9dYd`S-?1nHwfHU#h?!yNJE+pY@h3R9PE@{`RUkd^4*bZ z21;$ryF7Qts4iQs@Z!?FNPSiP>1=Q9HhKFtYz%&kW$^jt31>)?^{jAwylEQxY>6jb z)KD-|SRBC_1aR%4z1wQAJ(>*G9(tlk)Ulq9S<02jo{KlXLC}L8+B2y`T}1>Z=Ji+!+M38^Gcrr)nEDbzqpQUNG;gOtCg;IBA{)j)N~WnbkZ)9`ZV{EO|^$a zjdVQk9JS_74~u=BRuaVclPfdZOA!dGVIeynm@+;ZCNd9V&8JWJ^#zTE$g+K#tptnR zNoESld;u&Y4LDm8HUoxJ8!4;6h<`2B(JlY3Aa;|nQt}*r2!huJQkAzq!BJxIP&=f2 zjwOc&ZI{|uVPRPnxk|s@U+((!K~XWYBgd}R^9yY%ttkFanC4q>fr^vR@A!DluIC2k zVeGcb{~%a;AFVb&cc-P|K;b>YZm>~C7BZa}hZYR;$5B1L^HO0>r2%`F-rBl0d#9Y^ zlM>K+E}lXzC_vFZH5AD-Qfl_O+{A@zM2GpnIH!`ahy!BU@$r=!wS!+~>?AG}7uej7 zVj;WD`o{Nkx#>Wx$so`ohC|m~5lJ`CDvWil;#U_tjSIC{U<4<`YWC=em_$$*l*s

IcVh-A3DEZpl9*{_K(M;i(VAon}%8Y6q8xbLkC~m%omBtiK4y(x{~`poR3-)h~Fh z74clzHCulg!~$~|8cDQIDY9X)vSvj--p3*E#Q8ZDl-c&ePGXA$Ns)}l$uA_*xrYb) zwSgov9mFw80U|YaU5_b7X`>B2f4LbED*%dCL@{LZu&WTlt z^it3B$HLw%k*Ohk&}zFpZCWzMq-@9Jrh1h;BK^Jj&P-xXD~H4bjX@_<hR|oS05vrv<2{SGxD7ciWRjjvpS4 zh;#!`q!CHO7}X;S2EnxV^kp6)|{sF}KN}1^~ zDP=r^)%TLs0ArSzFGmo?U^~HB3Ax{Ag(%S7m?_ zm5O?Q`hpB2Fq}m(US9{SSmM;skKHes6B3m8d|y)$i<4J3$ZQUo zX#tzBeofgrgh1neK^bpGJ6LpZEwvmNd#!+U!C0(PHJ2~Z%*-V!0hI%*)F+LPVB7t+8R(U_1=u z@Mi;o$s0o5)s?tV_!}jvm~pPBu=f`-22NCXR9fIfZjHpUD!g3Cw27*>T~|xfu+xI- z=LSdYjiQOOOS7jceU=>G@+~(FJxPd&R)oHz9RL{@Rw2yj_x-cWk`wBf@!_pGkL8YD zfSvwE*!?$0dW5IqSDT>Bby%#oHZ;t4BYu>^^&=dIn#lrMSZF!h&mU{ zD)4*tXT0F5jmR%tXx4sY5aSpgn_dFfB*%Y{@~`5OZaDjf4$}LgmjC)aaiChOReR!G zknd0qr3`1P!75o`X=g%>!|X7Px19hSvGguY%SWxtt~bh_Q0L1bUtqJMJa5;}Pv8e#tO^6B#m*P6Cy&l6nXW58nRIz2}pe1PfI|kL{JXH z4e+C(c2B2=c3GWlBRY|bpvR^P+1oS(b5wtW0deEixUODnq~PZ!k9Br9sLRK3JK_(W zr-qI5lLtto-60tb{StBDg~{0R3C{P6$6TQ4)#F&?S2<*L-Uu+>UOS|gPauHOpGGuk z$#_0-Z>`Nbp>kF5Qr^JNv79Ocy;h?&@<0;yxuE9Ur9h(s3qeSG6CR&5?Yp8G-3@B=K?MBZ^6Du_d6#|~og|B-_=YCa>iH&0ZgSgdy(|c?+tW)r@^Jq z0)n3f+)3p+?!$PTULby`nax%J#~QljMPZ)a*? z8hu)dB({U%lP-G-DZb`aVgu0;m8uwkmnWZ z8B`s(VMQ@cNF9JdIY$lU4zMU;7(sZp`{oB>3>^Z3aU>7W`}V=1S0@z{wY4x!4lV_7 z-=RSG-@gy9b3OD`R3wv+nBGuA#2wv*vN@yB3hr%#6~@XUSw<@mAfZvfo|Whk@BhjH zfF0-j_TUWF`R$w25XZbN!hO5hTBMh(9LG|mrv7B_)+dV{A=(u$fID=j(#EK57CrKU zEUWke<4q9bZ=XJ6VEn`)kR<%}q(%<3K3?Sy&0)#E9UOv7tTLKa*_w|~K^vN~oAVA? z-B_yW`JY5LIlup-;t$R-sq(X+rdwpo%RNP!z6NuAmBIycd=MtWqjFbdA<;n4$j4SX zg}E1F+l(iadXyIyJ0u1%Ml)K3W?nq?MZHOC`D0H5RnD%fFWWdy)jBUN0RAtTy+B8hzx1aNMf=Utnq8im!cK&2de3|9o&1$fk(ZbyT{XI4GMj{~kLhJ7a*|rvZ32%tIDJW&kJhT3>H!|Av>=~eK<>~Uy zu!~+6NW;^@$vF?lh@#|uN^_pa`jJ(V8t_D)7G$5?DFeQk;sUhQ8a)+x=;a86DbB6* z(CgvxMcDQ4?{Z_+zv05syA|p=rO^M@_tHseVzgD*$E$9S)l zvTQQ5vsP7w#P2c($NjGs#R&vt=gsjAE>)S8f8_fkLF~6vV0|SM6c_)`49G@PsJ7PL z;LeSIG4%l}(vsjLRJ6F5Q+ea6ermbyJ9B|Y(?B2w+Fl~WtY#tX-f^bM(3Z!H)-<$#h==r()WdX^CAc_rEV!JHM}WQM4I1jqtuPOilPZz9XFMGocV&AtYwAFk zDka{x%FMixdXG23Qrao7==RA1`yX+-a?g$2h%C4}KhEwtB<0A)fnm|{k=H6R5~aQ{ zm!`n5{G#&Np^)1-ojfp{)8(tUkG6bV1vcr^^iQi&j(fj^ABHRbi;6^HEymegZ^m-j z&u!vCoqF9wm82_ErKtxaKhe4~h$7zq5vk;KVaYLRJX{y%?JnyScrnCe?0n0Q&NB-( z_VFo~O;0|-&IpJI5!3nGf_Tx6^cSSWfrS0=y3&qkmISLFS8k97P^OQ5XSc{ji?wwx zAmjbfl|c->k$7w@X~-C|yxGk|KFT?ydgCbdo=TFMMBg&?_o?G`n0sJLt)t`3RFlRR zamdW#<&qk5MNHy5($MnxlI_lpcqE8r#HX}NP<%6q>I{4ITK&6{2L1h7&nE@Xn?UV!W*RCj2*TE_r3M7R9DX5XEB0R zMxqIAqTxEhw?{(X-8@v$fe>4~RgT|Md7!_3!WgiqpY+s zeF4k}dnBg`KZ4RyURk|@P~%&ZwJG{yID=}B+i)wE3PPv3R$u1EXg$9xSnTkjbB-wv z=YO>UP?Ptv2cg#7Ol7t3i8TTyobDX@qhBmeAj;k_h%sUn4cAZiV71u*sv?BNb~{q0 zq72JAb=b=dpQ4%N#aC0FapLz&sVD4G_4;Ogu7;Lim*)R?mrpArn_7IkLgyFODEnq> zGs@}@@zV$p?^j~5~`T-)Q>wR<#~ zo0TkEBev);!OIJYFbT=$Cg>)soZ1*1CBs)8$evj(RFUBev*R?bBpny%ef2-#E)^;u zXFy*-@p57|S{e9uH0AA~>Nzh6Qr}9b(IA&{B5N87mzm#r$EYpb7Y8$N2{jA}eoHcE zQs+SKBV80cnR7P1iu3n%IsC+JQ{Hl>MI%K>J3-qdM1eq{58j-&B(+ev9hH3uor+CG z5yp?Waiu4}YVJ$v5q^HFju{fsa!~|+<}!UCULQ4kZFMt|48;>@VH;{3f%<(+r~0cQ zRq1OrHiN#!JwbH^wh+I?rrTK;QXn2DIg%fnF_O!{HxKblnTu>;(r zVj@Y1PqC?zZl*FLEg<5nVq(WsSQ4+kD9tV}3|EvLn7X{bd1qMK`91h$SM|E?LJ0I1 zxeuM59FsPQ{IPuKc`BQ^O>*CUD@VhFp4{PB0W9Oy(Z`+Y265aoG&e z#Lx%iCTODMHh$TyVn1%j~vlp?iZt5YH7ij~wSW+A@M6$(xG_vkg*`Ej#zO|K>i1A_X>y z56Tbm29cO?h35xV9H9zDok#2!J$GCTxF}5H#F$nfRmLIgz$ytO0i1caOO&YAMI6Z1%}w zq#&o}=+RF53?;&w*zvu0Cz)wlG|wa}K#DI@-8?#YBa+Wq*fGuN&Cef9<;-j4D6m{- zRt*t;JFa6q0%=ejB5QBcbsvr7>vWwMyDOuy8TjgV95e_dHP&CLlYMI{$K&m$h<8W+ zZ1=Z(LVJ(0E!%2fHVyzO56&vawT@a%5voac@@!15jLQtGpU`<8X&9oQ z)t>!)RN=VmjCV-VV#WCHVo&~_Xiok6=)Oe+F z;gEj*c7z(BjhNyNEA_yDi)t`tGrX|-(A1>qPLaOcE(&+kQ|*U-z3DlGVk9AYSIXkQ zuOB}9AMCi1s{c*lClzqjl4mr2+b|^U9-6+Nc^devctqN(qXlWAbhdEVwYXh4SaAAR zJxs=MBZ%sA_4>LFpc%MQfoJAuA|WpI^NLiZ1Hm}yK7+nXJTP%CgEYJ~v?$ra&1rin zXkIN~|KdS9^5Ge9seI3 bAwDr)k^d#K?0= None: - """ - Validate provider credentials - - if validate failed, raise exception - - :param credentials: provider credentials, credentials form defined in `provider_credential_schema`. - """ - pass diff --git a/api/core/model_runtime/model_providers/azure_ai_studio/azure_ai_studio.yaml b/api/core/model_runtime/model_providers/azure_ai_studio/azure_ai_studio.yaml deleted file mode 100644 index 9e17ba088480db..00000000000000 --- a/api/core/model_runtime/model_providers/azure_ai_studio/azure_ai_studio.yaml +++ /dev/null @@ -1,65 +0,0 @@ -provider: azure_ai_studio -label: - zh_Hans: Azure AI Studio - en_US: Azure AI Studio -icon_small: - en_US: icon_s_en.png -icon_large: - en_US: icon_l_en.png -description: - en_US: Azure AI Studio - zh_Hans: Azure AI Studio -background: "#93c5fd" -help: - title: - en_US: How to deploy customized model on Azure AI Studio - zh_Hans: 如何在Azure AI Studio上的私有化部署的模型 - url: - en_US: https://learn.microsoft.com/en-us/azure/ai-studio/how-to/deploy-models - zh_Hans: https://learn.microsoft.com/zh-cn/azure/ai-studio/how-to/deploy-models -supported_model_types: - - llm - - rerank -configurate_methods: - - customizable-model -model_credential_schema: - model: - label: - en_US: Model Name - zh_Hans: 模型名称 - placeholder: - en_US: Enter your model name - zh_Hans: 输入模型名称 - credential_form_schemas: - - variable: endpoint - label: - en_US: Azure AI Studio Endpoint - type: text-input - required: true - placeholder: - zh_Hans: 请输入你的Azure AI Studio推理端点 - en_US: 'Enter your API Endpoint, eg: https://example.com' - - variable: api_key - required: true - label: - en_US: API Key - zh_Hans: API Key - type: secret-input - placeholder: - en_US: Enter your Azure AI Studio API Key - zh_Hans: 在此输入您的 Azure AI Studio API Key - show_on: - - variable: __model_type - value: llm - - variable: jwt_token - required: true - label: - en_US: JWT Token - zh_Hans: JWT令牌 - type: secret-input - placeholder: - en_US: Enter your Azure AI Studio JWT Token - zh_Hans: 在此输入您的 Azure AI Studio 推理 API Key - show_on: - - variable: __model_type - value: rerank diff --git a/api/core/model_runtime/model_providers/azure_ai_studio/llm/__init__.py b/api/core/model_runtime/model_providers/azure_ai_studio/llm/__init__.py deleted file mode 100644 index e69de29bb2d1d6..00000000000000 diff --git a/api/core/model_runtime/model_providers/azure_ai_studio/llm/llm.py b/api/core/model_runtime/model_providers/azure_ai_studio/llm/llm.py deleted file mode 100644 index 516ef8b295ef84..00000000000000 --- a/api/core/model_runtime/model_providers/azure_ai_studio/llm/llm.py +++ /dev/null @@ -1,334 +0,0 @@ -import logging -from collections.abc import Generator -from typing import Any, Optional, Union - -from azure.ai.inference import ChatCompletionsClient -from azure.ai.inference.models import StreamingChatCompletionsUpdate -from azure.core.credentials import AzureKeyCredential -from azure.core.exceptions import ( - ClientAuthenticationError, - DecodeError, - DeserializationError, - HttpResponseError, - ResourceExistsError, - ResourceModifiedError, - ResourceNotFoundError, - ResourceNotModifiedError, - SerializationError, - ServiceRequestError, - ServiceResponseError, -) - -from core.model_runtime.callbacks.base_callback import Callback -from core.model_runtime.entities.llm_entities import LLMResult, LLMResultChunk, LLMResultChunkDelta, LLMUsage -from core.model_runtime.entities.message_entities import ( - AssistantPromptMessage, - PromptMessage, - PromptMessageTool, -) -from core.model_runtime.entities.model_entities import ( - AIModelEntity, - FetchFrom, - I18nObject, - ModelType, - ParameterRule, - ParameterType, -) -from core.model_runtime.errors.invoke import ( - InvokeAuthorizationError, - InvokeBadRequestError, - InvokeConnectionError, - InvokeError, - InvokeServerUnavailableError, -) -from core.model_runtime.errors.validate import CredentialsValidateFailedError -from core.model_runtime.model_providers.__base.large_language_model import LargeLanguageModel - -logger = logging.getLogger(__name__) - - -class AzureAIStudioLargeLanguageModel(LargeLanguageModel): - """ - Model class for Azure AI Studio large language model. - """ - - client: Any = None - - from azure.ai.inference.models import StreamingChatCompletionsUpdate - - def _invoke( - self, - model: str, - credentials: dict, - prompt_messages: list[PromptMessage], - model_parameters: dict, - tools: Optional[list[PromptMessageTool]] = None, - stop: Optional[list[str]] = None, - stream: bool = True, - user: Optional[str] = None, - ) -> Union[LLMResult, Generator]: - """ - Invoke large language model - - :param model: model name - :param credentials: model credentials - :param prompt_messages: prompt messages - :param model_parameters: model parameters - :param tools: tools for tool calling - :param stop: stop words - :param stream: is stream response - :param user: unique user id - :return: full response or stream response chunk generator result - """ - - if not self.client: - endpoint = credentials.get("endpoint") - api_key = credentials.get("api_key") - self.client = ChatCompletionsClient(endpoint=endpoint, credential=AzureKeyCredential(api_key)) - - messages = [{"role": msg.role.value, "content": msg.content} for msg in prompt_messages] - - payload = { - "messages": messages, - "max_tokens": model_parameters.get("max_tokens", 4096), - "temperature": model_parameters.get("temperature", 0), - "top_p": model_parameters.get("top_p", 1), - "stream": stream, - } - - if stop: - payload["stop"] = stop - - if tools: - payload["tools"] = [tool.model_dump() for tool in tools] - - try: - response = self.client.complete(**payload) - - if stream: - return self._handle_stream_response(response, model, prompt_messages) - else: - return self._handle_non_stream_response(response, model, prompt_messages, credentials) - except Exception as e: - raise self._transform_invoke_error(e) - - def _handle_stream_response(self, response, model: str, prompt_messages: list[PromptMessage]) -> Generator: - for chunk in response: - if isinstance(chunk, StreamingChatCompletionsUpdate): - if chunk.choices: - delta = chunk.choices[0].delta - if delta.content: - yield LLMResultChunk( - model=model, - prompt_messages=prompt_messages, - delta=LLMResultChunkDelta( - index=0, - message=AssistantPromptMessage(content=delta.content, tool_calls=[]), - ), - ) - - def _handle_non_stream_response( - self, response, model: str, prompt_messages: list[PromptMessage], credentials: dict - ) -> LLMResult: - assistant_text = response.choices[0].message.content - assistant_prompt_message = AssistantPromptMessage(content=assistant_text) - usage = self._calc_response_usage( - model, credentials, response.usage.prompt_tokens, response.usage.completion_tokens - ) - result = LLMResult(model=model, prompt_messages=prompt_messages, message=assistant_prompt_message, usage=usage) - - if hasattr(response, "system_fingerprint"): - result.system_fingerprint = response.system_fingerprint - - return result - - def _invoke_result_generator( - self, - model: str, - result: Generator, - credentials: dict, - prompt_messages: list[PromptMessage], - model_parameters: dict, - tools: Optional[list[PromptMessageTool]] = None, - stop: Optional[list[str]] = None, - stream: bool = True, - user: Optional[str] = None, - callbacks: Optional[list[Callback]] = None, - ) -> Generator: - """ - Invoke result generator - - :param result: result generator - :return: result generator - """ - callbacks = callbacks or [] - prompt_message = AssistantPromptMessage(content="") - usage = None - system_fingerprint = None - real_model = model - - try: - for chunk in result: - if isinstance(chunk, dict): - content = chunk["choices"][0]["message"]["content"] - usage = chunk["usage"] - chunk = LLMResultChunk( - model=model, - prompt_messages=prompt_messages, - delta=LLMResultChunkDelta( - index=0, - message=AssistantPromptMessage(content=content, tool_calls=[]), - ), - system_fingerprint=chunk.get("system_fingerprint"), - ) - - yield chunk - - self._trigger_new_chunk_callbacks( - chunk=chunk, - model=model, - credentials=credentials, - prompt_messages=prompt_messages, - model_parameters=model_parameters, - tools=tools, - stop=stop, - stream=stream, - user=user, - callbacks=callbacks, - ) - - prompt_message.content += chunk.delta.message.content - real_model = chunk.model - if hasattr(chunk.delta, "usage"): - usage = chunk.delta.usage - - if chunk.system_fingerprint: - system_fingerprint = chunk.system_fingerprint - except Exception as e: - raise self._transform_invoke_error(e) - - self._trigger_after_invoke_callbacks( - model=model, - result=LLMResult( - model=real_model, - prompt_messages=prompt_messages, - message=prompt_message, - usage=usage or LLMUsage.empty_usage(), - system_fingerprint=system_fingerprint, - ), - credentials=credentials, - prompt_messages=prompt_messages, - model_parameters=model_parameters, - tools=tools, - stop=stop, - stream=stream, - user=user, - callbacks=callbacks, - ) - - def get_num_tokens( - self, - model: str, - credentials: dict, - prompt_messages: list[PromptMessage], - tools: Optional[list[PromptMessageTool]] = None, - ) -> int: - """ - Get number of tokens for given prompt messages - - :param model: model name - :param credentials: model credentials - :param prompt_messages: prompt messages - :param tools: tools for tool calling - :return: - """ - # Implement token counting logic here - # Might need to use a tokenizer specific to the Azure AI Studio model - return 0 - - def validate_credentials(self, model: str, credentials: dict) -> None: - """ - Validate model credentials - - :param model: model name - :param credentials: model credentials - :return: - """ - try: - endpoint = credentials.get("endpoint") - api_key = credentials.get("api_key") - client = ChatCompletionsClient(endpoint=endpoint, credential=AzureKeyCredential(api_key)) - client.get_model_info() - except Exception as ex: - raise CredentialsValidateFailedError(str(ex)) - - @property - def _invoke_error_mapping(self) -> dict[type[InvokeError], list[type[Exception]]]: - """ - Map model invoke error to unified error - The key is the error type thrown to the caller - The value is the error type thrown by the model, - which needs to be converted into a unified error type for the caller. - - :return: Invoke error mapping - """ - return { - InvokeConnectionError: [ - ServiceRequestError, - ], - InvokeServerUnavailableError: [ - ServiceResponseError, - ], - InvokeAuthorizationError: [ - ClientAuthenticationError, - ], - InvokeBadRequestError: [ - HttpResponseError, - DecodeError, - ResourceExistsError, - ResourceNotFoundError, - ResourceModifiedError, - ResourceNotModifiedError, - SerializationError, - DeserializationError, - ], - } - - def get_customizable_model_schema(self, model: str, credentials: dict) -> AIModelEntity | None: - """ - Used to define customizable model schema - """ - rules = [ - ParameterRule( - name="temperature", - type=ParameterType.FLOAT, - use_template="temperature", - label=I18nObject(zh_Hans="温度", en_US="Temperature"), - ), - ParameterRule( - name="top_p", - type=ParameterType.FLOAT, - use_template="top_p", - label=I18nObject(zh_Hans="Top P", en_US="Top P"), - ), - ParameterRule( - name="max_tokens", - type=ParameterType.INT, - use_template="max_tokens", - min=1, - default=512, - label=I18nObject(zh_Hans="最大生成长度", en_US="Max Tokens"), - ), - ] - - entity = AIModelEntity( - model=model, - label=I18nObject(en_US=model), - fetch_from=FetchFrom.CUSTOMIZABLE_MODEL, - model_type=ModelType.LLM, - features=[], - model_properties={}, - parameter_rules=rules, - ) - - return entity diff --git a/api/core/model_runtime/model_providers/azure_ai_studio/rerank/__init__.py b/api/core/model_runtime/model_providers/azure_ai_studio/rerank/__init__.py deleted file mode 100644 index e69de29bb2d1d6..00000000000000 diff --git a/api/core/model_runtime/model_providers/azure_ai_studio/rerank/rerank.py b/api/core/model_runtime/model_providers/azure_ai_studio/rerank/rerank.py deleted file mode 100644 index 6ed7ab277cde02..00000000000000 --- a/api/core/model_runtime/model_providers/azure_ai_studio/rerank/rerank.py +++ /dev/null @@ -1,164 +0,0 @@ -import json -import logging -import os -import ssl -import urllib.request -from typing import Optional - -from core.model_runtime.entities.common_entities import I18nObject -from core.model_runtime.entities.model_entities import AIModelEntity, FetchFrom, ModelType -from core.model_runtime.entities.rerank_entities import RerankDocument, RerankResult -from core.model_runtime.errors.invoke import ( - InvokeAuthorizationError, - InvokeBadRequestError, - InvokeConnectionError, - InvokeError, - InvokeRateLimitError, - InvokeServerUnavailableError, -) -from core.model_runtime.errors.validate import CredentialsValidateFailedError -from core.model_runtime.model_providers.__base.rerank_model import RerankModel - -logger = logging.getLogger(__name__) - - -class AzureRerankModel(RerankModel): - """ - Model class for Azure AI Studio rerank model. - """ - - def _allow_self_signed_https(self, allowed): - # bypass the server certificate verification on client side - if allowed and not os.environ.get("PYTHONHTTPSVERIFY", "") and getattr(ssl, "_create_unverified_context", None): - ssl._create_default_https_context = ssl._create_unverified_context - - def _azure_rerank(self, query_input: str, docs: list[str], endpoint: str, api_key: str): - # self._allow_self_signed_https(True) # Enable if using self-signed certificate - - data = {"inputs": query_input, "docs": docs} - - body = json.dumps(data).encode("utf-8") - headers = {"Content-Type": "application/json", "Authorization": f"Bearer {api_key}"} - - req = urllib.request.Request(endpoint, body, headers) - - try: - with urllib.request.urlopen(req) as response: - result = response.read() - return json.loads(result) - except urllib.error.HTTPError as error: - logger.error(f"The request failed with status code: {error.code}") - logger.error(error.info()) - logger.error(error.read().decode("utf8", "ignore")) - raise - - def _invoke( - self, - model: str, - credentials: dict, - query: str, - docs: list[str], - score_threshold: Optional[float] = None, - top_n: Optional[int] = None, - user: Optional[str] = None, - ) -> RerankResult: - """ - Invoke rerank model - - :param model: model name - :param credentials: model credentials - :param query: search query - :param docs: docs for reranking - :param score_threshold: score threshold - :param top_n: top n - :param user: unique user id - :return: rerank result - """ - try: - if len(docs) == 0: - return RerankResult(model=model, docs=[]) - - endpoint = credentials.get("endpoint") - api_key = credentials.get("jwt_token") - - if not endpoint or not api_key: - raise ValueError("Azure endpoint and API key must be provided in credentials") - - result = self._azure_rerank(query, docs, endpoint, api_key) - logger.info(f"Azure rerank result: {result}") - - rerank_documents = [] - for idx, (doc, score_dict) in enumerate(zip(docs, result)): - score = score_dict["score"] - rerank_document = RerankDocument(index=idx, text=doc, score=score) - - if score_threshold is None or score >= score_threshold: - rerank_documents.append(rerank_document) - - rerank_documents.sort(key=lambda x: x.score, reverse=True) - - if top_n: - rerank_documents = rerank_documents[:top_n] - - return RerankResult(model=model, docs=rerank_documents) - - except Exception as e: - logger.exception(f"Exception in Azure rerank: {e}") - raise - - def validate_credentials(self, model: str, credentials: dict) -> None: - """ - Validate model credentials - - :param model: model name - :param credentials: model credentials - :return: - """ - try: - self._invoke( - model=model, - credentials=credentials, - query="What is the capital of the United States?", - docs=[ - "Carson City is the capital city of the American state of Nevada. At the 2010 United States " - "Census, Carson City had a population of 55,274.", - "The Commonwealth of the Northern Mariana Islands is a group of islands in the Pacific Ocean that " - "are a political division controlled by the United States. Its capital is Saipan.", - ], - score_threshold=0.8, - ) - except Exception as ex: - raise CredentialsValidateFailedError(str(ex)) - - @property - def _invoke_error_mapping(self) -> dict[type[InvokeError], list[type[Exception]]]: - """ - Map model invoke error to unified error - The key is the error type thrown to the caller - The value is the error type thrown by the model, - which needs to be converted into a unified error type for the caller. - - :return: Invoke error mapping - """ - return { - InvokeConnectionError: [urllib.error.URLError], - InvokeServerUnavailableError: [urllib.error.HTTPError], - InvokeRateLimitError: [InvokeRateLimitError], - InvokeAuthorizationError: [InvokeAuthorizationError], - InvokeBadRequestError: [InvokeBadRequestError, KeyError, ValueError, json.JSONDecodeError], - } - - def get_customizable_model_schema(self, model: str, credentials: dict) -> AIModelEntity | None: - """ - used to define customizable model schema - """ - entity = AIModelEntity( - model=model, - label=I18nObject(en_US=model), - fetch_from=FetchFrom.CUSTOMIZABLE_MODEL, - model_type=ModelType.RERANK, - model_properties={}, - parameter_rules=[], - ) - - return entity diff --git a/api/core/model_runtime/model_providers/azure_openai/__init__.py b/api/core/model_runtime/model_providers/azure_openai/__init__.py deleted file mode 100644 index e69de29bb2d1d6..00000000000000 diff --git a/api/core/model_runtime/model_providers/azure_openai/_assets/icon_l_en.png b/api/core/model_runtime/model_providers/azure_openai/_assets/icon_l_en.png deleted file mode 100644 index 0791a67911ec40247c40bf13a2ba2d2512e013b9..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 5007 zcmY+IWmFVEyMUK2>5%RYsinI^8Wj*05KuyzrC}A8js*lMDFFciY3bMnq@+8f!KHKQ zy7=95&$-`o&dfXS%)IZ9`8DT>Hqg@`#HYmv004wqnyN+s0Gi$7S_l{Wapv>_cs~X_ zcTF=d0Dy|)-$2tcVn2G^MDsGzPzF>C({Dc}nD$D#N&rB00>SMYEC7IU^UnPUiWfcOnls2Rs=9U@?0G#O8)s!SCirDc|F0cSgHCT{^4{|&SnBiSm)LK5*NOtPk#W#|6bZ7*s1W?CZf$v@6n_=WyK zO+X#uY;A2V#3_V62ZzJ0b*wRKLPA0i>2O@T$Bt0p5Yj&EM{LpA5(ys*dob0R>8Pox zd~iZk%)P|5*gRK&DViM z9nFI^?TU_Br~3S^i;JO4`@I(mM+ktWl>a-!;CFy~jmH!_^~>EgJ?42<(0w_`-0{ki zz~^HD*GMCkZLy?nzPzv=fX+Ot@*vH_^RFhgn;5mN34-brvc6sW-NaYBcCot~e`6$9 zc73H+*RBVWPbfgaxg<$6X^A+Vt2L1VR{-o+m3zCAPgUOzqnE5T*w)&ASx~A1_J?L* zvfxM2eYaa{VS^6Tz%5BSIh5O`IU^Wi>=T0DLeQ@6Xd&L+xZl^q9!ldt8i=1YLswq{ zgR=sh>CVs_;WufLl7&Ld2w7@TopQn}z&b(Z#x-Jln3eho;dqP(`m!=%S%_$}WqQV` zTGmlRm_fQh9%EIBRz-4N1gK{$*;&A-g+Um+VWs5VEz+iu=g>@8|(eqC(F%Mi~&0DX7@zz)bL!e*jC zii-n()L58suPVM>8JUHlD2UW}X@$!XGElZao-(}LL8I;TjsABl7I=^tBX-pgGt zFwKBBC->iHNIS4dnZ6ga9-$}Li;uiWMo&`oCt}!Y9E=dmQ#r*A$eyVeXshaaR8Ur%l@ertxEitY=oa~5ktvn}nYjlG@-h#$){Vdx6#jx=3hK4IWOq*$Jk}Q~M zl1Qto*xXp2&sve|$$3Ic_v@JNDA&g&eyez#a(j_IpsZDoyVN9Y#v9c5(3qWG>+cPu zC2U+msV>;bxL=+8mbSnX$yZ4gR3VPW^WHgYh|NOGYwR)s6Gn!uVlxDR<{S8S?DP3M zFKEJIX7SIlh#bS2!lErP@xF`%x#U}0l-3aawP%f2Sy|UmX1-u>HQ-{1 zPEsLn8}1wv7d`xnfsB8HB|4@Ytgh?tKk75ubAd_K!nZvn)lz;gu;@FGaCchzCX?+< z#z~=m!Tyia78MiTYhbAjV!tsu-1NK+-rt8^r!syt3hq^q745inztj6lMT7V2`5i^O zldi6W;U(xZD$4IS)UaT&0=m=vH#Iknvf+xIUT^>zGd1;0Jjv_R{BF-wsEBx+7}HYV z2Oba%Ge_GzNdDoZzZU7>k+zw%BoA?6tu(%?=E1pI4}#Fnm)z1E@4zC|woK3Ie0-u7_3(lL+pBYN6OCj@v> z{@ih)9&@_7T;2^iJ7)|F_ydOXri6*^idb;tNPW|1u3Q{Fro}rw=gJ+m@eH>1e-Y;i z$(0SsI(IdZvLdzGun&We2Ozqnm5U?OE(8%M7TH-{Q`veg^=RhX|vEnJw2+0VamSQr{j6qg{M5*99(Zo$e+=m4VWcg z3*1!L#n)}+i<>{SlPO`uWk`jv8dGbhCbXw=9yzu&(@tkQ{H{>DDa2F|0MoGxMaCa6 zzn)O0=3*uuYa{_hHy9XH@(6+k&4q3@uky=r^8M}1rpdMQv6Qln{b9Z-aHr-D|HC^Y zbblY89IVz;@sd`ZJ0;74_QK1sH`fciZSU+suN{`*SZb7Y?OC>O7=&|$3MZyy1N8Iy zSsg2mW>nCWp|E$csWtIM(G#OnBy)?I&C9Bqr0x$eJ?=IBolI|E1L6OscOuB~T@ZSkPZB z8$Yo4G4*QH*>K*u-Z*jYTbfy+WX1|r0?PxN;13pE6KsYjS^}2x*M#G(4dpOm80w|P z))zG&pZxu$2Z(K{^9{N8sE`i#pxYt|R{W^l8$m9O9{#i`?s35MyR#1@ek=yfeZ1 zQku)vDJV?k=jW%}AJ?Jh7uXviK2>qQV%_GCbMgh+MJ{Pl;L8dJRWU}D+`q4grNC6WGmI=C$n{~>+BcZJxd-P)Zf!dU}8N6S}R zzmd*&eRkwK#wCj!0Dw8>r**JaLsx#`rI7p9%PcdA8q|`yUby)A%%h*eaMer=qQb=TiR)8 zkr{6kTiTCn6P4Sdi7zVweRr|W>1n1@{2^=No-g!@D;ne53kd*ol!oHQ2b{91I1gt&JUYWZ*I~S z2hy+?h{TY98?^W*uQaE6vTZ$esJ`#7k!<1*QEhau!)*mJWor|#yYt&Mg;wuin4HK4 z=%?b(r5q#I8msS(WO!VB_5HgMM4=V}jeV>3qLxTn-7gpARL0`=hTtB>{YckaC`fV1 z@J=ZzKbZCiokf?n*M20&$j9{RFE(kl2=x}l*{ama!~UIE&3l!erDX{RA;&lq;g(Xl{l64F04bElvPH9F3%ZM>h|{zQV!D~6W99PCITKmyK# zUdxAK&_oZp`CeNMmSW-w$~*e_x7ye8WG3ef>f8#K(yfeEXUwt~_}N-69cczx@o^3r zL%7TvG8?$xF%9{rX=s|4Ogilx)G{7tvXP6xu9YO2^LZyl&bof?KdHQA!YD9Mkj7;r z>N1U6OI;PxG4%KLEpDq$z?2?K*)jz7a;{rz4p<1gj~Yd)f_n@32DL4$s3Y!EE;dx-Q{VC&3UeqM$*L_=O$I8E==Nz1QQ~kd&aiY$ zq(f^QM#p>?A73^rQm;R%URp_lzvl3jZ%L4YZmI3H+GOuZy8k+7ASNFCetuP|`C~{-b)5}RxUaIUmTP&ZJn!I-U>?=hj$z66sbmBu;MtW?u!A^>aIe`&v|9sl3LYU!t;H~1 zqquCc)apHbU#18^4VmZrymC?N=NhP{+eFory`&qw@QkVUG6=vmFY9> zz@PMgV2BkUMb?@8cn?wxeWfuDP4e&{aCHsmM9J6kHL<&nS)P6)qz~XuQx)@o`n*7S zZ{3fL4&1MA7gZS?hM}Y(z~G@X%#6l^n#&s|2;nqpQY^>2r^zOVy)rd8lqjbgLE1b& zQ8DG}Fd*2CG6)w&HjK^`f-WyQ`a|csGWf2KK5eQ8dbE*zYL=nVG>G2i)7WMIdJ#>~ z>}3=COO901F$Zb4+ST!=2}XX1X(uQ((u#wn!V}nnIbSNMe4-c9C{Kq4JvgC5A0z#p zGd=d2xfQxfHGVraL@IL^A!efK%s>_xZD*z7=Mmf;$j;pMM`vu2!e(r+K%mIHudLal ze^OW(GHuaQ7{AL{Gkni+e&xl!K|Zv+_~bX!`&rOAFGUurFwu15ZsycaVH$9LgGlti zmYVENdtgM}A+1Zt4^>3p)o-1!$|;~MTF1-Gt*r}IAwq5o%u9X_V{~|H+0S)NS VjvA=y{3p`@T55W#70TA3{{wPWtZe`Q diff --git a/api/core/model_runtime/model_providers/azure_openai/_assets/icon_s_en.svg b/api/core/model_runtime/model_providers/azure_openai/_assets/icon_s_en.svg deleted file mode 100644 index df1f54f36ede83..00000000000000 --- a/api/core/model_runtime/model_providers/azure_openai/_assets/icon_s_en.svg +++ /dev/null @@ -1,8 +0,0 @@ - - - - - - - - diff --git a/api/core/model_runtime/model_providers/azure_openai/_common.py b/api/core/model_runtime/model_providers/azure_openai/_common.py deleted file mode 100644 index 32a0269af49314..00000000000000 --- a/api/core/model_runtime/model_providers/azure_openai/_common.py +++ /dev/null @@ -1,42 +0,0 @@ -import openai -from httpx import Timeout - -from core.model_runtime.errors.invoke import ( - InvokeAuthorizationError, - InvokeBadRequestError, - InvokeConnectionError, - InvokeError, - InvokeRateLimitError, - InvokeServerUnavailableError, -) -from core.model_runtime.model_providers.azure_openai._constant import AZURE_OPENAI_API_VERSION - - -class _CommonAzureOpenAI: - @staticmethod - def _to_credential_kwargs(credentials: dict) -> dict: - api_version = credentials.get("openai_api_version", AZURE_OPENAI_API_VERSION) - credentials_kwargs = { - "api_key": credentials["openai_api_key"], - "azure_endpoint": credentials["openai_api_base"], - "api_version": api_version, - "timeout": Timeout(315.0, read=300.0, write=10.0, connect=5.0), - "max_retries": 1, - } - - return credentials_kwargs - - @property - def _invoke_error_mapping(self) -> dict[type[InvokeError], list[type[Exception]]]: - return { - InvokeConnectionError: [openai.APIConnectionError, openai.APITimeoutError], - InvokeServerUnavailableError: [openai.InternalServerError], - InvokeRateLimitError: [openai.RateLimitError], - InvokeAuthorizationError: [openai.AuthenticationError, openai.PermissionDeniedError], - InvokeBadRequestError: [ - openai.BadRequestError, - openai.NotFoundError, - openai.UnprocessableEntityError, - openai.APIError, - ], - } diff --git a/api/core/model_runtime/model_providers/azure_openai/_constant.py b/api/core/model_runtime/model_providers/azure_openai/_constant.py deleted file mode 100644 index 0dada70cc5b7fa..00000000000000 --- a/api/core/model_runtime/model_providers/azure_openai/_constant.py +++ /dev/null @@ -1,1262 +0,0 @@ -from pydantic import BaseModel - -from core.model_runtime.entities.defaults import PARAMETER_RULE_TEMPLATE -from core.model_runtime.entities.llm_entities import LLMMode -from core.model_runtime.entities.model_entities import ( - AIModelEntity, - DefaultParameterName, - FetchFrom, - I18nObject, - ModelFeature, - ModelPropertyKey, - ModelType, - ParameterRule, - PriceConfig, -) - -AZURE_OPENAI_API_VERSION = "2024-02-15-preview" - -AZURE_DEFAULT_PARAM_SEED_HELP = I18nObject( - zh_Hans="如果指定,模型将尽最大努力进行确定性采样,使得重复的具有相同种子和参数的请求应该返回相同的结果。不能保证确定性," - "您应该参考 system_fingerprint 响应参数来监视变化。", - en_US="If specified, model will make a best effort to sample deterministically," - " such that repeated requests with the same seed and parameters should return the same result." - " Determinism is not guaranteed, and you should refer to the system_fingerprint response parameter" - " to monitor changes in the backend.", -) - - -def _get_max_tokens(default: int, min_val: int, max_val: int) -> ParameterRule: - rule = ParameterRule( - name="max_tokens", - **PARAMETER_RULE_TEMPLATE[DefaultParameterName.MAX_TOKENS], - ) - rule.default = default - rule.min = min_val - rule.max = max_val - return rule - - -class AzureBaseModel(BaseModel): - base_model_name: str - entity: AIModelEntity - - -LLM_BASE_MODELS = [ - AzureBaseModel( - base_model_name="gpt-35-turbo", - entity=AIModelEntity( - model="fake-deployment-name", - label=I18nObject( - en_US="fake-deployment-name-label", - ), - model_type=ModelType.LLM, - features=[ - ModelFeature.AGENT_THOUGHT, - ModelFeature.MULTI_TOOL_CALL, - ModelFeature.STREAM_TOOL_CALL, - ], - fetch_from=FetchFrom.CUSTOMIZABLE_MODEL, - model_properties={ - ModelPropertyKey.MODE: LLMMode.CHAT.value, - ModelPropertyKey.CONTEXT_SIZE: 16385, - }, - parameter_rules=[ - ParameterRule( - name="temperature", - **PARAMETER_RULE_TEMPLATE[DefaultParameterName.TEMPERATURE], - ), - ParameterRule( - name="top_p", - **PARAMETER_RULE_TEMPLATE[DefaultParameterName.TOP_P], - ), - ParameterRule( - name="presence_penalty", - **PARAMETER_RULE_TEMPLATE[DefaultParameterName.PRESENCE_PENALTY], - ), - ParameterRule( - name="frequency_penalty", - **PARAMETER_RULE_TEMPLATE[DefaultParameterName.FREQUENCY_PENALTY], - ), - _get_max_tokens(default=512, min_val=1, max_val=4096), - ParameterRule( - name="response_format", - label=I18nObject(zh_Hans="回复格式", en_US="response_format"), - type="string", - help=I18nObject( - zh_Hans="指定模型必须输出的格式", en_US="specifying the format that the model must output" - ), - required=False, - options=["text", "json_object"], - ), - ], - pricing=PriceConfig( - input=0.0005, - output=0.0015, - unit=0.001, - currency="USD", - ), - ), - ), - AzureBaseModel( - base_model_name="gpt-35-turbo-16k", - entity=AIModelEntity( - model="fake-deployment-name", - label=I18nObject( - en_US="fake-deployment-name-label", - ), - model_type=ModelType.LLM, - features=[ - ModelFeature.AGENT_THOUGHT, - ModelFeature.MULTI_TOOL_CALL, - ModelFeature.STREAM_TOOL_CALL, - ], - fetch_from=FetchFrom.CUSTOMIZABLE_MODEL, - model_properties={ - ModelPropertyKey.MODE: LLMMode.CHAT.value, - ModelPropertyKey.CONTEXT_SIZE: 16385, - }, - parameter_rules=[ - ParameterRule( - name="temperature", - **PARAMETER_RULE_TEMPLATE[DefaultParameterName.TEMPERATURE], - ), - ParameterRule( - name="top_p", - **PARAMETER_RULE_TEMPLATE[DefaultParameterName.TOP_P], - ), - ParameterRule( - name="presence_penalty", - **PARAMETER_RULE_TEMPLATE[DefaultParameterName.PRESENCE_PENALTY], - ), - ParameterRule( - name="frequency_penalty", - **PARAMETER_RULE_TEMPLATE[DefaultParameterName.FREQUENCY_PENALTY], - ), - _get_max_tokens(default=512, min_val=1, max_val=16385), - ], - pricing=PriceConfig( - input=0.003, - output=0.004, - unit=0.001, - currency="USD", - ), - ), - ), - AzureBaseModel( - base_model_name="gpt-35-turbo-0125", - entity=AIModelEntity( - model="fake-deployment-name", - label=I18nObject( - en_US="fake-deployment-name-label", - ), - model_type=ModelType.LLM, - features=[ - ModelFeature.AGENT_THOUGHT, - ModelFeature.MULTI_TOOL_CALL, - ModelFeature.STREAM_TOOL_CALL, - ], - fetch_from=FetchFrom.CUSTOMIZABLE_MODEL, - model_properties={ - ModelPropertyKey.MODE: LLMMode.CHAT.value, - ModelPropertyKey.CONTEXT_SIZE: 16385, - }, - parameter_rules=[ - ParameterRule( - name="temperature", - **PARAMETER_RULE_TEMPLATE[DefaultParameterName.TEMPERATURE], - ), - ParameterRule( - name="top_p", - **PARAMETER_RULE_TEMPLATE[DefaultParameterName.TOP_P], - ), - ParameterRule( - name="presence_penalty", - **PARAMETER_RULE_TEMPLATE[DefaultParameterName.PRESENCE_PENALTY], - ), - ParameterRule( - name="frequency_penalty", - **PARAMETER_RULE_TEMPLATE[DefaultParameterName.FREQUENCY_PENALTY], - ), - _get_max_tokens(default=512, min_val=1, max_val=4096), - ParameterRule( - name="response_format", - label=I18nObject(zh_Hans="回复格式", en_US="response_format"), - type="string", - help=I18nObject( - zh_Hans="指定模型必须输出的格式", en_US="specifying the format that the model must output" - ), - required=False, - options=["text", "json_object"], - ), - ], - pricing=PriceConfig( - input=0.0005, - output=0.0015, - unit=0.001, - currency="USD", - ), - ), - ), - AzureBaseModel( - base_model_name="gpt-4", - entity=AIModelEntity( - model="fake-deployment-name", - label=I18nObject( - en_US="fake-deployment-name-label", - ), - model_type=ModelType.LLM, - features=[ - ModelFeature.AGENT_THOUGHT, - ModelFeature.MULTI_TOOL_CALL, - ModelFeature.STREAM_TOOL_CALL, - ], - fetch_from=FetchFrom.CUSTOMIZABLE_MODEL, - model_properties={ - ModelPropertyKey.MODE: LLMMode.CHAT.value, - ModelPropertyKey.CONTEXT_SIZE: 8192, - }, - parameter_rules=[ - ParameterRule( - name="temperature", - **PARAMETER_RULE_TEMPLATE[DefaultParameterName.TEMPERATURE], - ), - ParameterRule( - name="top_p", - **PARAMETER_RULE_TEMPLATE[DefaultParameterName.TOP_P], - ), - ParameterRule( - name="presence_penalty", - **PARAMETER_RULE_TEMPLATE[DefaultParameterName.PRESENCE_PENALTY], - ), - ParameterRule( - name="frequency_penalty", - **PARAMETER_RULE_TEMPLATE[DefaultParameterName.FREQUENCY_PENALTY], - ), - _get_max_tokens(default=512, min_val=1, max_val=8192), - ParameterRule( - name="seed", - label=I18nObject(zh_Hans="种子", en_US="Seed"), - type="int", - help=AZURE_DEFAULT_PARAM_SEED_HELP, - required=False, - precision=2, - min=0, - max=1, - ), - ParameterRule( - name="response_format", - label=I18nObject(zh_Hans="回复格式", en_US="response_format"), - type="string", - help=I18nObject( - zh_Hans="指定模型必须输出的格式", en_US="specifying the format that the model must output" - ), - required=False, - options=["text", "json_object"], - ), - ], - pricing=PriceConfig( - input=0.03, - output=0.06, - unit=0.001, - currency="USD", - ), - ), - ), - AzureBaseModel( - base_model_name="gpt-4-32k", - entity=AIModelEntity( - model="fake-deployment-name", - label=I18nObject( - en_US="fake-deployment-name-label", - ), - model_type=ModelType.LLM, - features=[ - ModelFeature.AGENT_THOUGHT, - ModelFeature.MULTI_TOOL_CALL, - ModelFeature.STREAM_TOOL_CALL, - ], - fetch_from=FetchFrom.CUSTOMIZABLE_MODEL, - model_properties={ - ModelPropertyKey.MODE: LLMMode.CHAT.value, - ModelPropertyKey.CONTEXT_SIZE: 32768, - }, - parameter_rules=[ - ParameterRule( - name="temperature", - **PARAMETER_RULE_TEMPLATE[DefaultParameterName.TEMPERATURE], - ), - ParameterRule( - name="top_p", - **PARAMETER_RULE_TEMPLATE[DefaultParameterName.TOP_P], - ), - ParameterRule( - name="presence_penalty", - **PARAMETER_RULE_TEMPLATE[DefaultParameterName.PRESENCE_PENALTY], - ), - ParameterRule( - name="frequency_penalty", - **PARAMETER_RULE_TEMPLATE[DefaultParameterName.FREQUENCY_PENALTY], - ), - _get_max_tokens(default=512, min_val=1, max_val=32768), - ParameterRule( - name="seed", - label=I18nObject(zh_Hans="种子", en_US="Seed"), - type="int", - help=AZURE_DEFAULT_PARAM_SEED_HELP, - required=False, - precision=2, - min=0, - max=1, - ), - ParameterRule( - name="response_format", - label=I18nObject(zh_Hans="回复格式", en_US="response_format"), - type="string", - help=I18nObject( - zh_Hans="指定模型必须输出的格式", en_US="specifying the format that the model must output" - ), - required=False, - options=["text", "json_object"], - ), - ], - pricing=PriceConfig( - input=0.06, - output=0.12, - unit=0.001, - currency="USD", - ), - ), - ), - AzureBaseModel( - base_model_name="gpt-4-0125-preview", - entity=AIModelEntity( - model="fake-deployment-name", - label=I18nObject( - en_US="fake-deployment-name-label", - ), - model_type=ModelType.LLM, - features=[ - ModelFeature.AGENT_THOUGHT, - ModelFeature.MULTI_TOOL_CALL, - ModelFeature.STREAM_TOOL_CALL, - ], - fetch_from=FetchFrom.CUSTOMIZABLE_MODEL, - model_properties={ - ModelPropertyKey.MODE: LLMMode.CHAT.value, - ModelPropertyKey.CONTEXT_SIZE: 128000, - }, - parameter_rules=[ - ParameterRule( - name="temperature", - **PARAMETER_RULE_TEMPLATE[DefaultParameterName.TEMPERATURE], - ), - ParameterRule( - name="top_p", - **PARAMETER_RULE_TEMPLATE[DefaultParameterName.TOP_P], - ), - ParameterRule( - name="presence_penalty", - **PARAMETER_RULE_TEMPLATE[DefaultParameterName.PRESENCE_PENALTY], - ), - ParameterRule( - name="frequency_penalty", - **PARAMETER_RULE_TEMPLATE[DefaultParameterName.FREQUENCY_PENALTY], - ), - _get_max_tokens(default=512, min_val=1, max_val=4096), - ParameterRule( - name="seed", - label=I18nObject(zh_Hans="种子", en_US="Seed"), - type="int", - help=AZURE_DEFAULT_PARAM_SEED_HELP, - required=False, - precision=2, - min=0, - max=1, - ), - ParameterRule( - name="response_format", - label=I18nObject(zh_Hans="回复格式", en_US="response_format"), - type="string", - help=I18nObject( - zh_Hans="指定模型必须输出的格式", en_US="specifying the format that the model must output" - ), - required=False, - options=["text", "json_object"], - ), - ], - pricing=PriceConfig( - input=0.01, - output=0.03, - unit=0.001, - currency="USD", - ), - ), - ), - AzureBaseModel( - base_model_name="gpt-4-1106-preview", - entity=AIModelEntity( - model="fake-deployment-name", - label=I18nObject( - en_US="fake-deployment-name-label", - ), - model_type=ModelType.LLM, - features=[ - ModelFeature.AGENT_THOUGHT, - ModelFeature.MULTI_TOOL_CALL, - ModelFeature.STREAM_TOOL_CALL, - ], - fetch_from=FetchFrom.CUSTOMIZABLE_MODEL, - model_properties={ - ModelPropertyKey.MODE: LLMMode.CHAT.value, - ModelPropertyKey.CONTEXT_SIZE: 128000, - }, - parameter_rules=[ - ParameterRule( - name="temperature", - **PARAMETER_RULE_TEMPLATE[DefaultParameterName.TEMPERATURE], - ), - ParameterRule( - name="top_p", - **PARAMETER_RULE_TEMPLATE[DefaultParameterName.TOP_P], - ), - ParameterRule( - name="presence_penalty", - **PARAMETER_RULE_TEMPLATE[DefaultParameterName.PRESENCE_PENALTY], - ), - ParameterRule( - name="frequency_penalty", - **PARAMETER_RULE_TEMPLATE[DefaultParameterName.FREQUENCY_PENALTY], - ), - _get_max_tokens(default=512, min_val=1, max_val=4096), - ParameterRule( - name="seed", - label=I18nObject(zh_Hans="种子", en_US="Seed"), - type="int", - help=AZURE_DEFAULT_PARAM_SEED_HELP, - required=False, - precision=2, - min=0, - max=1, - ), - ParameterRule( - name="response_format", - label=I18nObject(zh_Hans="回复格式", en_US="response_format"), - type="string", - help=I18nObject( - zh_Hans="指定模型必须输出的格式", en_US="specifying the format that the model must output" - ), - required=False, - options=["text", "json_object"], - ), - ], - pricing=PriceConfig( - input=0.01, - output=0.03, - unit=0.001, - currency="USD", - ), - ), - ), - AzureBaseModel( - base_model_name="gpt-4o-mini", - entity=AIModelEntity( - model="fake-deployment-name", - label=I18nObject( - en_US="fake-deployment-name-label", - ), - model_type=ModelType.LLM, - features=[ - ModelFeature.AGENT_THOUGHT, - ModelFeature.VISION, - ModelFeature.MULTI_TOOL_CALL, - ModelFeature.STREAM_TOOL_CALL, - ], - fetch_from=FetchFrom.CUSTOMIZABLE_MODEL, - model_properties={ - ModelPropertyKey.MODE: LLMMode.CHAT.value, - ModelPropertyKey.CONTEXT_SIZE: 128000, - }, - parameter_rules=[ - ParameterRule( - name="temperature", - **PARAMETER_RULE_TEMPLATE[DefaultParameterName.TEMPERATURE], - ), - ParameterRule( - name="top_p", - **PARAMETER_RULE_TEMPLATE[DefaultParameterName.TOP_P], - ), - ParameterRule( - name="presence_penalty", - **PARAMETER_RULE_TEMPLATE[DefaultParameterName.PRESENCE_PENALTY], - ), - ParameterRule( - name="frequency_penalty", - **PARAMETER_RULE_TEMPLATE[DefaultParameterName.FREQUENCY_PENALTY], - ), - _get_max_tokens(default=512, min_val=1, max_val=16384), - ParameterRule( - name="seed", - label=I18nObject(zh_Hans="种子", en_US="Seed"), - type="int", - help=AZURE_DEFAULT_PARAM_SEED_HELP, - required=False, - precision=2, - min=0, - max=1, - ), - ParameterRule( - name="response_format", - label=I18nObject(zh_Hans="回复格式", en_US="response_format"), - type="string", - help=I18nObject( - zh_Hans="指定模型必须输出的格式", en_US="specifying the format that the model must output" - ), - required=False, - options=["text", "json_object"], - ), - ], - pricing=PriceConfig( - input=0.150, - output=0.600, - unit=0.000001, - currency="USD", - ), - ), - ), - AzureBaseModel( - base_model_name="gpt-4o-mini-2024-07-18", - entity=AIModelEntity( - model="fake-deployment-name", - label=I18nObject( - en_US="fake-deployment-name-label", - ), - model_type=ModelType.LLM, - features=[ - ModelFeature.AGENT_THOUGHT, - ModelFeature.VISION, - ModelFeature.MULTI_TOOL_CALL, - ModelFeature.STREAM_TOOL_CALL, - ], - fetch_from=FetchFrom.CUSTOMIZABLE_MODEL, - model_properties={ - ModelPropertyKey.MODE: LLMMode.CHAT.value, - ModelPropertyKey.CONTEXT_SIZE: 128000, - }, - parameter_rules=[ - ParameterRule( - name="temperature", - **PARAMETER_RULE_TEMPLATE[DefaultParameterName.TEMPERATURE], - ), - ParameterRule( - name="top_p", - **PARAMETER_RULE_TEMPLATE[DefaultParameterName.TOP_P], - ), - ParameterRule( - name="presence_penalty", - **PARAMETER_RULE_TEMPLATE[DefaultParameterName.PRESENCE_PENALTY], - ), - ParameterRule( - name="frequency_penalty", - **PARAMETER_RULE_TEMPLATE[DefaultParameterName.FREQUENCY_PENALTY], - ), - _get_max_tokens(default=512, min_val=1, max_val=16384), - ParameterRule( - name="seed", - label=I18nObject(zh_Hans="种子", en_US="Seed"), - type="int", - help=AZURE_DEFAULT_PARAM_SEED_HELP, - required=False, - precision=2, - min=0, - max=1, - ), - ParameterRule( - name="response_format", - label=I18nObject(zh_Hans="回复格式", en_US="response_format"), - type="string", - help=I18nObject( - zh_Hans="指定模型必须输出的格式", en_US="specifying the format that the model must output" - ), - required=False, - options=["text", "json_object", "json_schema"], - ), - ParameterRule( - name="json_schema", - label=I18nObject(en_US="JSON Schema"), - type="text", - help=I18nObject( - zh_Hans="设置返回的json schema,llm将按照它返回", - en_US="Set a response json schema will ensure LLM to adhere it.", - ), - required=False, - ), - ], - pricing=PriceConfig( - input=0.150, - output=0.600, - unit=0.000001, - currency="USD", - ), - ), - ), - AzureBaseModel( - base_model_name="gpt-4o", - entity=AIModelEntity( - model="fake-deployment-name", - label=I18nObject( - en_US="fake-deployment-name-label", - ), - model_type=ModelType.LLM, - features=[ - ModelFeature.AGENT_THOUGHT, - ModelFeature.VISION, - ModelFeature.MULTI_TOOL_CALL, - ModelFeature.STREAM_TOOL_CALL, - ], - fetch_from=FetchFrom.CUSTOMIZABLE_MODEL, - model_properties={ - ModelPropertyKey.MODE: LLMMode.CHAT.value, - ModelPropertyKey.CONTEXT_SIZE: 128000, - }, - parameter_rules=[ - ParameterRule( - name="temperature", - **PARAMETER_RULE_TEMPLATE[DefaultParameterName.TEMPERATURE], - ), - ParameterRule( - name="top_p", - **PARAMETER_RULE_TEMPLATE[DefaultParameterName.TOP_P], - ), - ParameterRule( - name="presence_penalty", - **PARAMETER_RULE_TEMPLATE[DefaultParameterName.PRESENCE_PENALTY], - ), - ParameterRule( - name="frequency_penalty", - **PARAMETER_RULE_TEMPLATE[DefaultParameterName.FREQUENCY_PENALTY], - ), - _get_max_tokens(default=512, min_val=1, max_val=4096), - ParameterRule( - name="seed", - label=I18nObject(zh_Hans="种子", en_US="Seed"), - type="int", - help=AZURE_DEFAULT_PARAM_SEED_HELP, - required=False, - precision=2, - min=0, - max=1, - ), - ParameterRule( - name="response_format", - label=I18nObject(zh_Hans="回复格式", en_US="response_format"), - type="string", - help=I18nObject( - zh_Hans="指定模型必须输出的格式", en_US="specifying the format that the model must output" - ), - required=False, - options=["text", "json_object"], - ), - ], - pricing=PriceConfig( - input=5.00, - output=15.00, - unit=0.000001, - currency="USD", - ), - ), - ), - AzureBaseModel( - base_model_name="gpt-4o-2024-05-13", - entity=AIModelEntity( - model="fake-deployment-name", - label=I18nObject( - en_US="fake-deployment-name-label", - ), - model_type=ModelType.LLM, - features=[ - ModelFeature.AGENT_THOUGHT, - ModelFeature.VISION, - ModelFeature.MULTI_TOOL_CALL, - ModelFeature.STREAM_TOOL_CALL, - ], - fetch_from=FetchFrom.CUSTOMIZABLE_MODEL, - model_properties={ - ModelPropertyKey.MODE: LLMMode.CHAT.value, - ModelPropertyKey.CONTEXT_SIZE: 128000, - }, - parameter_rules=[ - ParameterRule( - name="temperature", - **PARAMETER_RULE_TEMPLATE[DefaultParameterName.TEMPERATURE], - ), - ParameterRule( - name="top_p", - **PARAMETER_RULE_TEMPLATE[DefaultParameterName.TOP_P], - ), - ParameterRule( - name="presence_penalty", - **PARAMETER_RULE_TEMPLATE[DefaultParameterName.PRESENCE_PENALTY], - ), - ParameterRule( - name="frequency_penalty", - **PARAMETER_RULE_TEMPLATE[DefaultParameterName.FREQUENCY_PENALTY], - ), - _get_max_tokens(default=512, min_val=1, max_val=4096), - ParameterRule( - name="seed", - label=I18nObject(zh_Hans="种子", en_US="Seed"), - type="int", - help=AZURE_DEFAULT_PARAM_SEED_HELP, - required=False, - precision=2, - min=0, - max=1, - ), - ParameterRule( - name="response_format", - label=I18nObject(zh_Hans="回复格式", en_US="response_format"), - type="string", - help=I18nObject( - zh_Hans="指定模型必须输出的格式", en_US="specifying the format that the model must output" - ), - required=False, - options=["text", "json_object"], - ), - ], - pricing=PriceConfig( - input=5.00, - output=15.00, - unit=0.000001, - currency="USD", - ), - ), - ), - AzureBaseModel( - base_model_name="gpt-4o-2024-08-06", - entity=AIModelEntity( - model="fake-deployment-name", - label=I18nObject( - en_US="fake-deployment-name-label", - ), - model_type=ModelType.LLM, - features=[ - ModelFeature.AGENT_THOUGHT, - ModelFeature.VISION, - ModelFeature.MULTI_TOOL_CALL, - ModelFeature.STREAM_TOOL_CALL, - ], - fetch_from=FetchFrom.CUSTOMIZABLE_MODEL, - model_properties={ - ModelPropertyKey.MODE: LLMMode.CHAT.value, - ModelPropertyKey.CONTEXT_SIZE: 128000, - }, - parameter_rules=[ - ParameterRule( - name="temperature", - **PARAMETER_RULE_TEMPLATE[DefaultParameterName.TEMPERATURE], - ), - ParameterRule( - name="top_p", - **PARAMETER_RULE_TEMPLATE[DefaultParameterName.TOP_P], - ), - ParameterRule( - name="presence_penalty", - **PARAMETER_RULE_TEMPLATE[DefaultParameterName.PRESENCE_PENALTY], - ), - ParameterRule( - name="frequency_penalty", - **PARAMETER_RULE_TEMPLATE[DefaultParameterName.FREQUENCY_PENALTY], - ), - _get_max_tokens(default=512, min_val=1, max_val=4096), - ParameterRule( - name="seed", - label=I18nObject(zh_Hans="种子", en_US="Seed"), - type="int", - help=AZURE_DEFAULT_PARAM_SEED_HELP, - required=False, - precision=2, - min=0, - max=1, - ), - ParameterRule( - name="response_format", - label=I18nObject(zh_Hans="回复格式", en_US="response_format"), - type="string", - help=I18nObject( - zh_Hans="指定模型必须输出的格式", en_US="specifying the format that the model must output" - ), - required=False, - options=["text", "json_object", "json_schema"], - ), - ParameterRule( - name="json_schema", - label=I18nObject(en_US="JSON Schema"), - type="text", - help=I18nObject( - zh_Hans="设置返回的json schema,llm将按照它返回", - en_US="Set a response json schema will ensure LLM to adhere it.", - ), - required=False, - ), - ], - pricing=PriceConfig( - input=5.00, - output=15.00, - unit=0.000001, - currency="USD", - ), - ), - ), - AzureBaseModel( - base_model_name="gpt-4-turbo", - entity=AIModelEntity( - model="fake-deployment-name", - label=I18nObject( - en_US="fake-deployment-name-label", - ), - model_type=ModelType.LLM, - features=[ - ModelFeature.AGENT_THOUGHT, - ModelFeature.VISION, - ModelFeature.MULTI_TOOL_CALL, - ModelFeature.STREAM_TOOL_CALL, - ], - fetch_from=FetchFrom.CUSTOMIZABLE_MODEL, - model_properties={ - ModelPropertyKey.MODE: LLMMode.CHAT.value, - ModelPropertyKey.CONTEXT_SIZE: 128000, - }, - parameter_rules=[ - ParameterRule( - name="temperature", - **PARAMETER_RULE_TEMPLATE[DefaultParameterName.TEMPERATURE], - ), - ParameterRule( - name="top_p", - **PARAMETER_RULE_TEMPLATE[DefaultParameterName.TOP_P], - ), - ParameterRule( - name="presence_penalty", - **PARAMETER_RULE_TEMPLATE[DefaultParameterName.PRESENCE_PENALTY], - ), - ParameterRule( - name="frequency_penalty", - **PARAMETER_RULE_TEMPLATE[DefaultParameterName.FREQUENCY_PENALTY], - ), - _get_max_tokens(default=512, min_val=1, max_val=4096), - ParameterRule( - name="seed", - label=I18nObject(zh_Hans="种子", en_US="Seed"), - type="int", - help=AZURE_DEFAULT_PARAM_SEED_HELP, - required=False, - precision=2, - min=0, - max=1, - ), - ParameterRule( - name="response_format", - label=I18nObject(zh_Hans="回复格式", en_US="response_format"), - type="string", - help=I18nObject( - zh_Hans="指定模型必须输出的格式", en_US="specifying the format that the model must output" - ), - required=False, - options=["text", "json_object"], - ), - ], - pricing=PriceConfig( - input=0.01, - output=0.03, - unit=0.001, - currency="USD", - ), - ), - ), - AzureBaseModel( - base_model_name="gpt-4-turbo-2024-04-09", - entity=AIModelEntity( - model="fake-deployment-name", - label=I18nObject( - en_US="fake-deployment-name-label", - ), - model_type=ModelType.LLM, - features=[ - ModelFeature.AGENT_THOUGHT, - ModelFeature.VISION, - ModelFeature.MULTI_TOOL_CALL, - ModelFeature.STREAM_TOOL_CALL, - ], - fetch_from=FetchFrom.CUSTOMIZABLE_MODEL, - model_properties={ - ModelPropertyKey.MODE: LLMMode.CHAT.value, - ModelPropertyKey.CONTEXT_SIZE: 128000, - }, - parameter_rules=[ - ParameterRule( - name="temperature", - **PARAMETER_RULE_TEMPLATE[DefaultParameterName.TEMPERATURE], - ), - ParameterRule( - name="top_p", - **PARAMETER_RULE_TEMPLATE[DefaultParameterName.TOP_P], - ), - ParameterRule( - name="presence_penalty", - **PARAMETER_RULE_TEMPLATE[DefaultParameterName.PRESENCE_PENALTY], - ), - ParameterRule( - name="frequency_penalty", - **PARAMETER_RULE_TEMPLATE[DefaultParameterName.FREQUENCY_PENALTY], - ), - _get_max_tokens(default=512, min_val=1, max_val=4096), - ParameterRule( - name="seed", - label=I18nObject(zh_Hans="种子", en_US="Seed"), - type="int", - help=AZURE_DEFAULT_PARAM_SEED_HELP, - required=False, - precision=2, - min=0, - max=1, - ), - ParameterRule( - name="response_format", - label=I18nObject(zh_Hans="回复格式", en_US="response_format"), - type="string", - help=I18nObject( - zh_Hans="指定模型必须输出的格式", en_US="specifying the format that the model must output" - ), - required=False, - options=["text", "json_object"], - ), - ], - pricing=PriceConfig( - input=0.01, - output=0.03, - unit=0.001, - currency="USD", - ), - ), - ), - AzureBaseModel( - base_model_name="gpt-4-vision-preview", - entity=AIModelEntity( - model="fake-deployment-name", - label=I18nObject( - en_US="fake-deployment-name-label", - ), - model_type=ModelType.LLM, - features=[ModelFeature.VISION], - fetch_from=FetchFrom.CUSTOMIZABLE_MODEL, - model_properties={ - ModelPropertyKey.MODE: LLMMode.CHAT.value, - ModelPropertyKey.CONTEXT_SIZE: 128000, - }, - parameter_rules=[ - ParameterRule( - name="temperature", - **PARAMETER_RULE_TEMPLATE[DefaultParameterName.TEMPERATURE], - ), - ParameterRule( - name="top_p", - **PARAMETER_RULE_TEMPLATE[DefaultParameterName.TOP_P], - ), - ParameterRule( - name="presence_penalty", - **PARAMETER_RULE_TEMPLATE[DefaultParameterName.PRESENCE_PENALTY], - ), - ParameterRule( - name="frequency_penalty", - **PARAMETER_RULE_TEMPLATE[DefaultParameterName.FREQUENCY_PENALTY], - ), - _get_max_tokens(default=512, min_val=1, max_val=4096), - ParameterRule( - name="seed", - label=I18nObject(zh_Hans="种子", en_US="Seed"), - type="int", - help=AZURE_DEFAULT_PARAM_SEED_HELP, - required=False, - precision=2, - min=0, - max=1, - ), - ParameterRule( - name="response_format", - label=I18nObject(zh_Hans="回复格式", en_US="response_format"), - type="string", - help=I18nObject( - zh_Hans="指定模型必须输出的格式", en_US="specifying the format that the model must output" - ), - required=False, - options=["text", "json_object"], - ), - ], - pricing=PriceConfig( - input=0.01, - output=0.03, - unit=0.001, - currency="USD", - ), - ), - ), - AzureBaseModel( - base_model_name="gpt-35-turbo-instruct", - entity=AIModelEntity( - model="fake-deployment-name", - label=I18nObject( - en_US="fake-deployment-name-label", - ), - model_type=ModelType.LLM, - fetch_from=FetchFrom.CUSTOMIZABLE_MODEL, - model_properties={ - ModelPropertyKey.MODE: LLMMode.COMPLETION.value, - ModelPropertyKey.CONTEXT_SIZE: 4096, - }, - parameter_rules=[ - ParameterRule( - name="temperature", - **PARAMETER_RULE_TEMPLATE[DefaultParameterName.TEMPERATURE], - ), - ParameterRule( - name="top_p", - **PARAMETER_RULE_TEMPLATE[DefaultParameterName.TOP_P], - ), - ParameterRule( - name="presence_penalty", - **PARAMETER_RULE_TEMPLATE[DefaultParameterName.PRESENCE_PENALTY], - ), - ParameterRule( - name="frequency_penalty", - **PARAMETER_RULE_TEMPLATE[DefaultParameterName.FREQUENCY_PENALTY], - ), - _get_max_tokens(default=512, min_val=1, max_val=4096), - ], - pricing=PriceConfig( - input=0.0015, - output=0.002, - unit=0.001, - currency="USD", - ), - ), - ), - AzureBaseModel( - base_model_name="text-davinci-003", - entity=AIModelEntity( - model="fake-deployment-name", - label=I18nObject( - en_US="fake-deployment-name-label", - ), - model_type=ModelType.LLM, - fetch_from=FetchFrom.CUSTOMIZABLE_MODEL, - model_properties={ - ModelPropertyKey.MODE: LLMMode.COMPLETION.value, - ModelPropertyKey.CONTEXT_SIZE: 4096, - }, - parameter_rules=[ - ParameterRule( - name="temperature", - **PARAMETER_RULE_TEMPLATE[DefaultParameterName.TEMPERATURE], - ), - ParameterRule( - name="top_p", - **PARAMETER_RULE_TEMPLATE[DefaultParameterName.TOP_P], - ), - ParameterRule( - name="presence_penalty", - **PARAMETER_RULE_TEMPLATE[DefaultParameterName.PRESENCE_PENALTY], - ), - ParameterRule( - name="frequency_penalty", - **PARAMETER_RULE_TEMPLATE[DefaultParameterName.FREQUENCY_PENALTY], - ), - _get_max_tokens(default=512, min_val=1, max_val=4096), - ], - pricing=PriceConfig( - input=0.02, - output=0.02, - unit=0.001, - currency="USD", - ), - ), - ), -] - -EMBEDDING_BASE_MODELS = [ - AzureBaseModel( - base_model_name="text-embedding-ada-002", - entity=AIModelEntity( - model="fake-deployment-name", - label=I18nObject(en_US="fake-deployment-name-label"), - fetch_from=FetchFrom.CUSTOMIZABLE_MODEL, - model_type=ModelType.TEXT_EMBEDDING, - model_properties={ - ModelPropertyKey.CONTEXT_SIZE: 8097, - ModelPropertyKey.MAX_CHUNKS: 32, - }, - pricing=PriceConfig( - input=0.0001, - unit=0.001, - currency="USD", - ), - ), - ), - AzureBaseModel( - base_model_name="text-embedding-3-small", - entity=AIModelEntity( - model="fake-deployment-name", - label=I18nObject(en_US="fake-deployment-name-label"), - fetch_from=FetchFrom.CUSTOMIZABLE_MODEL, - model_type=ModelType.TEXT_EMBEDDING, - model_properties={ - ModelPropertyKey.CONTEXT_SIZE: 8191, - ModelPropertyKey.MAX_CHUNKS: 32, - }, - pricing=PriceConfig( - input=0.00002, - unit=0.001, - currency="USD", - ), - ), - ), - AzureBaseModel( - base_model_name="text-embedding-3-large", - entity=AIModelEntity( - model="fake-deployment-name", - label=I18nObject(en_US="fake-deployment-name-label"), - fetch_from=FetchFrom.CUSTOMIZABLE_MODEL, - model_type=ModelType.TEXT_EMBEDDING, - model_properties={ - ModelPropertyKey.CONTEXT_SIZE: 8191, - ModelPropertyKey.MAX_CHUNKS: 32, - }, - pricing=PriceConfig( - input=0.00013, - unit=0.001, - currency="USD", - ), - ), - ), -] -SPEECH2TEXT_BASE_MODELS = [ - AzureBaseModel( - base_model_name="whisper-1", - entity=AIModelEntity( - model="fake-deployment-name", - label=I18nObject(en_US="fake-deployment-name-label"), - fetch_from=FetchFrom.CUSTOMIZABLE_MODEL, - model_type=ModelType.SPEECH2TEXT, - model_properties={ - ModelPropertyKey.FILE_UPLOAD_LIMIT: 25, - ModelPropertyKey.SUPPORTED_FILE_EXTENSIONS: "flac,mp3,mp4,mpeg,mpga,m4a,ogg,wav,webm", - }, - ), - ) -] -TTS_BASE_MODELS = [ - AzureBaseModel( - base_model_name="tts-1", - entity=AIModelEntity( - model="fake-deployment-name", - label=I18nObject(en_US="fake-deployment-name-label"), - fetch_from=FetchFrom.CUSTOMIZABLE_MODEL, - model_type=ModelType.TTS, - model_properties={ - ModelPropertyKey.DEFAULT_VOICE: "alloy", - ModelPropertyKey.VOICES: [ - { - "mode": "alloy", - "name": "Alloy", - "language": ["zh-Hans", "en-US", "de-DE", "fr-FR", "es-ES", "it-IT", "th-TH", "id-ID"], - }, - { - "mode": "echo", - "name": "Echo", - "language": ["zh-Hans", "en-US", "de-DE", "fr-FR", "es-ES", "it-IT", "th-TH", "id-ID"], - }, - { - "mode": "fable", - "name": "Fable", - "language": ["zh-Hans", "en-US", "de-DE", "fr-FR", "es-ES", "it-IT", "th-TH", "id-ID"], - }, - { - "mode": "onyx", - "name": "Onyx", - "language": ["zh-Hans", "en-US", "de-DE", "fr-FR", "es-ES", "it-IT", "th-TH", "id-ID"], - }, - { - "mode": "nova", - "name": "Nova", - "language": ["zh-Hans", "en-US", "de-DE", "fr-FR", "es-ES", "it-IT", "th-TH", "id-ID"], - }, - { - "mode": "shimmer", - "name": "Shimmer", - "language": ["zh-Hans", "en-US", "de-DE", "fr-FR", "es-ES", "it-IT", "th-TH", "id-ID"], - }, - ], - ModelPropertyKey.WORD_LIMIT: 120, - ModelPropertyKey.AUDIO_TYPE: "mp3", - ModelPropertyKey.MAX_WORKERS: 5, - }, - pricing=PriceConfig( - input=0.015, - unit=0.001, - currency="USD", - ), - ), - ), - AzureBaseModel( - base_model_name="tts-1-hd", - entity=AIModelEntity( - model="fake-deployment-name", - label=I18nObject(en_US="fake-deployment-name-label"), - fetch_from=FetchFrom.CUSTOMIZABLE_MODEL, - model_type=ModelType.TTS, - model_properties={ - ModelPropertyKey.DEFAULT_VOICE: "alloy", - ModelPropertyKey.VOICES: [ - { - "mode": "alloy", - "name": "Alloy", - "language": ["zh-Hans", "en-US", "de-DE", "fr-FR", "es-ES", "it-IT", "th-TH", "id-ID"], - }, - { - "mode": "echo", - "name": "Echo", - "language": ["zh-Hans", "en-US", "de-DE", "fr-FR", "es-ES", "it-IT", "th-TH", "id-ID"], - }, - { - "mode": "fable", - "name": "Fable", - "language": ["zh-Hans", "en-US", "de-DE", "fr-FR", "es-ES", "it-IT", "th-TH", "id-ID"], - }, - { - "mode": "onyx", - "name": "Onyx", - "language": ["zh-Hans", "en-US", "de-DE", "fr-FR", "es-ES", "it-IT", "th-TH", "id-ID"], - }, - { - "mode": "nova", - "name": "Nova", - "language": ["zh-Hans", "en-US", "de-DE", "fr-FR", "es-ES", "it-IT", "th-TH", "id-ID"], - }, - { - "mode": "shimmer", - "name": "Shimmer", - "language": ["zh-Hans", "en-US", "de-DE", "fr-FR", "es-ES", "it-IT", "th-TH", "id-ID"], - }, - ], - ModelPropertyKey.WORD_LIMIT: 120, - ModelPropertyKey.AUDIO_TYPE: "mp3", - ModelPropertyKey.MAX_WORKERS: 5, - }, - pricing=PriceConfig( - input=0.03, - unit=0.001, - currency="USD", - ), - ), - ), -] diff --git a/api/core/model_runtime/model_providers/azure_openai/azure_openai.py b/api/core/model_runtime/model_providers/azure_openai/azure_openai.py deleted file mode 100644 index 2e3c6aab0588ec..00000000000000 --- a/api/core/model_runtime/model_providers/azure_openai/azure_openai.py +++ /dev/null @@ -1,10 +0,0 @@ -import logging - -from core.model_runtime.model_providers.__base.model_provider import ModelProvider - -logger = logging.getLogger(__name__) - - -class AzureOpenAIProvider(ModelProvider): - def validate_provider_credentials(self, credentials: dict) -> None: - pass diff --git a/api/core/model_runtime/model_providers/azure_openai/azure_openai.yaml b/api/core/model_runtime/model_providers/azure_openai/azure_openai.yaml deleted file mode 100644 index 867f9fec420008..00000000000000 --- a/api/core/model_runtime/model_providers/azure_openai/azure_openai.yaml +++ /dev/null @@ -1,227 +0,0 @@ -provider: azure_openai -label: - en_US: Azure OpenAI Service Model -icon_small: - en_US: icon_s_en.svg -icon_large: - en_US: icon_l_en.png -background: "#E3F0FF" -help: - title: - en_US: Get your API key from Azure - zh_Hans: 从 Azure 获取 API Key - url: - en_US: https://azure.microsoft.com/en-us/products/ai-services/openai-service -supported_model_types: - - llm - - text-embedding - - speech2text - - tts -configurate_methods: - - customizable-model -model_credential_schema: - model: - label: - en_US: Deployment Name - zh_Hans: 部署名称 - placeholder: - en_US: Enter your Deployment Name here, matching the Azure deployment name. - zh_Hans: 在此输入您的部署名称,与 Azure 部署名称匹配。 - credential_form_schemas: - - variable: openai_api_base - label: - en_US: API Endpoint URL - zh_Hans: API 域名 - type: text-input - required: true - placeholder: - zh_Hans: '在此输入您的 API 域名,如:https://example.com/xxx' - en_US: 'Enter your API Endpoint, eg: https://example.com/xxx' - - variable: openai_api_key - label: - en_US: API Key - zh_Hans: API Key - type: secret-input - required: true - placeholder: - zh_Hans: 在此输入您的 API Key - en_US: Enter your API key here - - variable: openai_api_version - label: - zh_Hans: API 版本 - en_US: API Version - type: select - required: true - options: - - label: - en_US: 2024-08-01-preview - value: 2024-08-01-preview - - label: - en_US: 2024-07-01-preview - value: 2024-07-01-preview - - label: - en_US: 2024-05-01-preview - value: 2024-05-01-preview - - label: - en_US: 2024-04-01-preview - value: 2024-04-01-preview - - label: - en_US: 2024-03-01-preview - value: 2024-03-01-preview - - label: - en_US: 2024-02-15-preview - value: 2024-02-15-preview - - label: - en_US: 2023-12-01-preview - value: 2023-12-01-preview - - label: - en_US: '2024-02-01' - value: '2024-02-01' - - label: - en_US: '2024-06-01' - value: '2024-06-01' - placeholder: - zh_Hans: 在此选择您的 API 版本 - en_US: Select your API Version here - - variable: base_model_name - label: - en_US: Base Model - zh_Hans: 基础模型 - type: select - required: true - options: - - label: - en_US: gpt-35-turbo - value: gpt-35-turbo - show_on: - - variable: __model_type - value: llm - - label: - en_US: gpt-35-turbo-0125 - value: gpt-35-turbo-0125 - show_on: - - variable: __model_type - value: llm - - label: - en_US: gpt-35-turbo-16k - value: gpt-35-turbo-16k - show_on: - - variable: __model_type - value: llm - - label: - en_US: gpt-4 - value: gpt-4 - show_on: - - variable: __model_type - value: llm - - label: - en_US: gpt-4-32k - value: gpt-4-32k - show_on: - - variable: __model_type - value: llm - - label: - en_US: gpt-4o-mini - value: gpt-4o-mini - show_on: - - variable: __model_type - value: llm - - label: - en_US: gpt-4o-mini-2024-07-18 - value: gpt-4o-mini-2024-07-18 - show_on: - - variable: __model_type - value: llm - - label: - en_US: gpt-4o - value: gpt-4o - show_on: - - variable: __model_type - value: llm - - label: - en_US: gpt-4o-2024-05-13 - value: gpt-4o-2024-05-13 - show_on: - - variable: __model_type - value: llm - - label: - en_US: gpt-4o-2024-08-06 - value: gpt-4o-2024-08-06 - show_on: - - variable: __model_type - value: llm - - label: - en_US: gpt-4-turbo - value: gpt-4-turbo - show_on: - - variable: __model_type - value: llm - - label: - en_US: gpt-4-turbo-2024-04-09 - value: gpt-4-turbo-2024-04-09 - show_on: - - variable: __model_type - value: llm - - label: - en_US: gpt-4-0125-preview - value: gpt-4-0125-preview - show_on: - - variable: __model_type - value: llm - - label: - en_US: gpt-4-1106-preview - value: gpt-4-1106-preview - show_on: - - variable: __model_type - value: llm - - label: - en_US: gpt-4-vision-preview - value: gpt-4-vision-preview - show_on: - - variable: __model_type - value: llm - - label: - en_US: gpt-35-turbo-instruct - value: gpt-35-turbo-instruct - show_on: - - variable: __model_type - value: llm - - label: - en_US: text-embedding-ada-002 - value: text-embedding-ada-002 - show_on: - - variable: __model_type - value: text-embedding - - label: - en_US: text-embedding-3-small - value: text-embedding-3-small - show_on: - - variable: __model_type - value: text-embedding - - label: - en_US: text-embedding-3-large - value: text-embedding-3-large - show_on: - - variable: __model_type - value: text-embedding - - label: - en_US: whisper-1 - value: whisper-1 - show_on: - - variable: __model_type - value: speech2text - - label: - en_US: tts-1 - value: tts-1 - show_on: - - variable: __model_type - value: tts - - label: - en_US: tts-1-hd - value: tts-1-hd - show_on: - - variable: __model_type - value: tts - placeholder: - zh_Hans: 在此输入您的模型版本 - en_US: Enter your model version diff --git a/api/core/model_runtime/model_providers/azure_openai/llm/__init__.py b/api/core/model_runtime/model_providers/azure_openai/llm/__init__.py deleted file mode 100644 index e69de29bb2d1d6..00000000000000 diff --git a/api/core/model_runtime/model_providers/azure_openai/llm/llm.py b/api/core/model_runtime/model_providers/azure_openai/llm/llm.py deleted file mode 100644 index f0033ea051b501..00000000000000 --- a/api/core/model_runtime/model_providers/azure_openai/llm/llm.py +++ /dev/null @@ -1,665 +0,0 @@ -import copy -import json -import logging -from collections.abc import Generator, Sequence -from typing import Optional, Union, cast - -import tiktoken -from openai import AzureOpenAI, Stream -from openai.types import Completion -from openai.types.chat import ChatCompletion, ChatCompletionChunk, ChatCompletionMessageToolCall -from openai.types.chat.chat_completion_chunk import ChoiceDeltaToolCall - -from core.model_runtime.entities.llm_entities import LLMMode, LLMResult, LLMResultChunk, LLMResultChunkDelta -from core.model_runtime.entities.message_entities import ( - AssistantPromptMessage, - ImagePromptMessageContent, - PromptMessage, - PromptMessageContentType, - PromptMessageFunction, - PromptMessageTool, - SystemPromptMessage, - TextPromptMessageContent, - ToolPromptMessage, - UserPromptMessage, -) -from core.model_runtime.entities.model_entities import AIModelEntity, ModelPropertyKey -from core.model_runtime.errors.validate import CredentialsValidateFailedError -from core.model_runtime.model_providers.__base.large_language_model import LargeLanguageModel -from core.model_runtime.model_providers.azure_openai._common import _CommonAzureOpenAI -from core.model_runtime.model_providers.azure_openai._constant import LLM_BASE_MODELS -from core.model_runtime.utils import helper - -logger = logging.getLogger(__name__) - - -class AzureOpenAILargeLanguageModel(_CommonAzureOpenAI, LargeLanguageModel): - def _invoke( - self, - model: str, - credentials: dict, - prompt_messages: list[PromptMessage], - model_parameters: dict, - tools: Optional[list[PromptMessageTool]] = None, - stop: Optional[list[str]] = None, - stream: bool = True, - user: Optional[str] = None, - ) -> Union[LLMResult, Generator]: - base_model_name = credentials.get("base_model_name") - if not base_model_name: - raise ValueError("Base Model Name is required") - ai_model_entity = self._get_ai_model_entity(base_model_name=base_model_name, model=model) - - if ai_model_entity and ai_model_entity.entity.model_properties.get(ModelPropertyKey.MODE) == LLMMode.CHAT.value: - # chat model - return self._chat_generate( - model=model, - credentials=credentials, - prompt_messages=prompt_messages, - model_parameters=model_parameters, - tools=tools, - stop=stop, - stream=stream, - user=user, - ) - else: - # text completion model - return self._generate( - model=model, - credentials=credentials, - prompt_messages=prompt_messages, - model_parameters=model_parameters, - stop=stop, - stream=stream, - user=user, - ) - - def get_num_tokens( - self, - model: str, - credentials: dict, - prompt_messages: list[PromptMessage], - tools: Optional[list[PromptMessageTool]] = None, - ) -> int: - base_model_name = credentials.get("base_model_name") - if not base_model_name: - raise ValueError("Base Model Name is required") - model_entity = self._get_ai_model_entity(base_model_name=base_model_name, model=model) - if not model_entity: - raise ValueError(f"Base Model Name {base_model_name} is invalid") - model_mode = model_entity.entity.model_properties.get(ModelPropertyKey.MODE) - - if model_mode == LLMMode.CHAT.value: - # chat model - return self._num_tokens_from_messages(credentials, prompt_messages, tools) - else: - # text completion model, do not support tool calling - content = prompt_messages[0].content - assert isinstance(content, str) - return self._num_tokens_from_string(credentials, content) - - def validate_credentials(self, model: str, credentials: dict) -> None: - if "openai_api_base" not in credentials: - raise CredentialsValidateFailedError("Azure OpenAI API Base Endpoint is required") - - if "openai_api_key" not in credentials: - raise CredentialsValidateFailedError("Azure OpenAI API key is required") - - if "base_model_name" not in credentials: - raise CredentialsValidateFailedError("Base Model Name is required") - - base_model_name = credentials.get("base_model_name") - if not base_model_name: - raise CredentialsValidateFailedError("Base Model Name is required") - ai_model_entity = self._get_ai_model_entity(base_model_name=base_model_name, model=model) - - if not ai_model_entity: - raise CredentialsValidateFailedError(f'Base Model Name {credentials["base_model_name"]} is invalid') - - try: - client = AzureOpenAI(**self._to_credential_kwargs(credentials)) - - if ai_model_entity.entity.model_properties.get(ModelPropertyKey.MODE) == LLMMode.CHAT.value: - # chat model - client.chat.completions.create( - messages=[{"role": "user", "content": "ping"}], - model=model, - temperature=0, - max_tokens=20, - stream=False, - ) - else: - # text completion model - client.completions.create( - prompt="ping", - model=model, - temperature=0, - max_tokens=20, - stream=False, - ) - except Exception as ex: - raise CredentialsValidateFailedError(str(ex)) - - def get_customizable_model_schema(self, model: str, credentials: dict) -> Optional[AIModelEntity]: - base_model_name = credentials.get("base_model_name") - if not base_model_name: - raise ValueError("Base Model Name is required") - ai_model_entity = self._get_ai_model_entity(base_model_name=base_model_name, model=model) - return ai_model_entity.entity if ai_model_entity else None - - def _generate( - self, - model: str, - credentials: dict, - prompt_messages: list[PromptMessage], - model_parameters: dict, - stop: Optional[list[str]] = None, - stream: bool = True, - user: Optional[str] = None, - ) -> Union[LLMResult, Generator]: - client = AzureOpenAI(**self._to_credential_kwargs(credentials)) - - extra_model_kwargs = {} - - if stop: - extra_model_kwargs["stop"] = stop - - if user: - extra_model_kwargs["user"] = user - - # text completion model - response = client.completions.create( - prompt=prompt_messages[0].content, model=model, stream=stream, **model_parameters, **extra_model_kwargs - ) - - if stream: - return self._handle_generate_stream_response(model, credentials, response, prompt_messages) - - return self._handle_generate_response(model, credentials, response, prompt_messages) - - def _handle_generate_response( - self, model: str, credentials: dict, response: Completion, prompt_messages: list[PromptMessage] - ): - assistant_text = response.choices[0].text - - # transform assistant message to prompt message - assistant_prompt_message = AssistantPromptMessage(content=assistant_text) - - # calculate num tokens - if response.usage: - # transform usage - prompt_tokens = response.usage.prompt_tokens - completion_tokens = response.usage.completion_tokens - else: - # calculate num tokens - content = prompt_messages[0].content - assert isinstance(content, str) - prompt_tokens = self._num_tokens_from_string(credentials, content) - completion_tokens = self._num_tokens_from_string(credentials, assistant_text) - - # transform usage - usage = self._calc_response_usage(model, credentials, prompt_tokens, completion_tokens) - - # transform response - result = LLMResult( - model=response.model, - prompt_messages=prompt_messages, - message=assistant_prompt_message, - usage=usage, - system_fingerprint=response.system_fingerprint, - ) - - return result - - def _handle_generate_stream_response( - self, model: str, credentials: dict, response: Stream[Completion], prompt_messages: list[PromptMessage] - ) -> Generator: - full_text = "" - for chunk in response: - if len(chunk.choices) == 0: - continue - - delta = chunk.choices[0] - - if delta.finish_reason is None and (delta.text is None or delta.text == ""): - continue - - # transform assistant message to prompt message - text = delta.text or "" - assistant_prompt_message = AssistantPromptMessage(content=text) - - full_text += text - - if delta.finish_reason is not None: - # calculate num tokens - if chunk.usage: - # transform usage - prompt_tokens = chunk.usage.prompt_tokens - completion_tokens = chunk.usage.completion_tokens - else: - # calculate num tokens - content = prompt_messages[0].content - assert isinstance(content, str) - prompt_tokens = self._num_tokens_from_string(credentials, content) - completion_tokens = self._num_tokens_from_string(credentials, full_text) - - # transform usage - usage = self._calc_response_usage(model, credentials, prompt_tokens, completion_tokens) - - yield LLMResultChunk( - model=chunk.model, - prompt_messages=prompt_messages, - system_fingerprint=chunk.system_fingerprint, - delta=LLMResultChunkDelta( - index=delta.index, - message=assistant_prompt_message, - finish_reason=delta.finish_reason, - usage=usage, - ), - ) - else: - yield LLMResultChunk( - model=chunk.model, - prompt_messages=prompt_messages, - system_fingerprint=chunk.system_fingerprint, - delta=LLMResultChunkDelta( - index=delta.index, - message=assistant_prompt_message, - ), - ) - - def _chat_generate( - self, - model: str, - credentials: dict, - prompt_messages: list[PromptMessage], - model_parameters: dict, - tools: Optional[list[PromptMessageTool]] = None, - stop: Optional[list[str]] = None, - stream: bool = True, - user: Optional[str] = None, - ) -> Union[LLMResult, Generator]: - client = AzureOpenAI(**self._to_credential_kwargs(credentials)) - - response_format = model_parameters.get("response_format") - if response_format: - if response_format == "json_schema": - json_schema = model_parameters.get("json_schema") - if not json_schema: - raise ValueError("Must define JSON Schema when the response format is json_schema") - try: - schema = json.loads(json_schema) - except: - raise ValueError(f"not correct json_schema format: {json_schema}") - model_parameters.pop("json_schema") - model_parameters["response_format"] = {"type": "json_schema", "json_schema": schema} - else: - model_parameters["response_format"] = {"type": response_format} - - extra_model_kwargs = {} - - if tools: - extra_model_kwargs["tools"] = [helper.dump_model(PromptMessageFunction(function=tool)) for tool in tools] - # extra_model_kwargs['functions'] = [{ - # "name": tool.name, - # "description": tool.description, - # "parameters": tool.parameters - # } for tool in tools] - - if stop: - extra_model_kwargs["stop"] = stop - - if user: - extra_model_kwargs["user"] = user - - # chat model - messages = [self._convert_prompt_message_to_dict(m) for m in prompt_messages] - response = client.chat.completions.create( - messages=messages, - model=model, - stream=stream, - **model_parameters, - **extra_model_kwargs, - ) - - if stream: - return self._handle_chat_generate_stream_response(model, credentials, response, prompt_messages, tools) - - return self._handle_chat_generate_response(model, credentials, response, prompt_messages, tools) - - def _handle_chat_generate_response( - self, - model: str, - credentials: dict, - response: ChatCompletion, - prompt_messages: list[PromptMessage], - tools: Optional[list[PromptMessageTool]] = None, - ): - assistant_message = response.choices[0].message - assistant_message_tool_calls = assistant_message.tool_calls - - # extract tool calls from response - tool_calls = [] - self._update_tool_calls(tool_calls=tool_calls, tool_calls_response=assistant_message_tool_calls) - - # transform assistant message to prompt message - assistant_prompt_message = AssistantPromptMessage(content=assistant_message.content, tool_calls=tool_calls) - - # calculate num tokens - if response.usage: - # transform usage - prompt_tokens = response.usage.prompt_tokens - completion_tokens = response.usage.completion_tokens - else: - # calculate num tokens - prompt_tokens = self._num_tokens_from_messages(credentials, prompt_messages, tools) - completion_tokens = self._num_tokens_from_messages(credentials, [assistant_prompt_message]) - - # transform usage - usage = self._calc_response_usage(model, credentials, prompt_tokens, completion_tokens) - - # transform response - result = LLMResult( - model=response.model or model, - prompt_messages=prompt_messages, - message=assistant_prompt_message, - usage=usage, - system_fingerprint=response.system_fingerprint, - ) - - return result - - def _handle_chat_generate_stream_response( - self, - model: str, - credentials: dict, - response: Stream[ChatCompletionChunk], - prompt_messages: list[PromptMessage], - tools: Optional[list[PromptMessageTool]] = None, - ): - index = 0 - full_assistant_content = "" - real_model = model - system_fingerprint = None - completion = "" - tool_calls = [] - for chunk in response: - if len(chunk.choices) == 0: - continue - - delta = chunk.choices[0] - # NOTE: For fix https://github.com/langgenius/dify/issues/5790 - if delta.delta is None: - continue - - # extract tool calls from response - self._update_tool_calls(tool_calls=tool_calls, tool_calls_response=delta.delta.tool_calls) - - # Handling exceptions when content filters' streaming mode is set to asynchronous modified filter - if delta.finish_reason is None and not delta.delta.content: - continue - - # transform assistant message to prompt message - assistant_prompt_message = AssistantPromptMessage(content=delta.delta.content or "", tool_calls=tool_calls) - - full_assistant_content += delta.delta.content or "" - - real_model = chunk.model - system_fingerprint = chunk.system_fingerprint - completion += delta.delta.content or "" - - yield LLMResultChunk( - model=real_model, - prompt_messages=prompt_messages, - system_fingerprint=system_fingerprint, - delta=LLMResultChunkDelta( - index=index, - message=assistant_prompt_message, - ), - ) - - index += 1 - - # calculate num tokens - prompt_tokens = self._num_tokens_from_messages(credentials, prompt_messages, tools) - - full_assistant_prompt_message = AssistantPromptMessage(content=completion) - completion_tokens = self._num_tokens_from_messages(credentials, [full_assistant_prompt_message]) - - # transform usage - usage = self._calc_response_usage(model, credentials, prompt_tokens, completion_tokens) - - yield LLMResultChunk( - model=real_model, - prompt_messages=prompt_messages, - system_fingerprint=system_fingerprint, - delta=LLMResultChunkDelta( - index=index, message=AssistantPromptMessage(content=""), finish_reason="stop", usage=usage - ), - ) - - @staticmethod - def _update_tool_calls( - tool_calls: list[AssistantPromptMessage.ToolCall], - tool_calls_response: Optional[Sequence[ChatCompletionMessageToolCall | ChoiceDeltaToolCall]], - ) -> None: - if tool_calls_response: - for response_tool_call in tool_calls_response: - if isinstance(response_tool_call, ChatCompletionMessageToolCall): - function = AssistantPromptMessage.ToolCall.ToolCallFunction( - name=response_tool_call.function.name, arguments=response_tool_call.function.arguments - ) - - tool_call = AssistantPromptMessage.ToolCall( - id=response_tool_call.id, type=response_tool_call.type, function=function - ) - tool_calls.append(tool_call) - elif isinstance(response_tool_call, ChoiceDeltaToolCall): - index = response_tool_call.index - if index < len(tool_calls): - tool_calls[index].id = response_tool_call.id or tool_calls[index].id - tool_calls[index].type = response_tool_call.type or tool_calls[index].type - if response_tool_call.function: - tool_calls[index].function.name = ( - response_tool_call.function.name or tool_calls[index].function.name - ) - tool_calls[index].function.arguments += response_tool_call.function.arguments or "" - else: - assert response_tool_call.id is not None - assert response_tool_call.type is not None - assert response_tool_call.function is not None - assert response_tool_call.function.name is not None - assert response_tool_call.function.arguments is not None - - function = AssistantPromptMessage.ToolCall.ToolCallFunction( - name=response_tool_call.function.name, arguments=response_tool_call.function.arguments - ) - tool_call = AssistantPromptMessage.ToolCall( - id=response_tool_call.id, type=response_tool_call.type, function=function - ) - tool_calls.append(tool_call) - - @staticmethod - def _convert_prompt_message_to_dict(message: PromptMessage): - if isinstance(message, UserPromptMessage): - message = cast(UserPromptMessage, message) - if isinstance(message.content, str): - message_dict = {"role": "user", "content": message.content} - else: - sub_messages = [] - assert message.content is not None - for message_content in message.content: - if message_content.type == PromptMessageContentType.TEXT: - message_content = cast(TextPromptMessageContent, message_content) - sub_message_dict = {"type": "text", "text": message_content.data} - sub_messages.append(sub_message_dict) - elif message_content.type == PromptMessageContentType.IMAGE: - message_content = cast(ImagePromptMessageContent, message_content) - sub_message_dict = { - "type": "image_url", - "image_url": {"url": message_content.data, "detail": message_content.detail.value}, - } - sub_messages.append(sub_message_dict) - message_dict = {"role": "user", "content": sub_messages} - elif isinstance(message, AssistantPromptMessage): - # message = cast(AssistantPromptMessage, message) - message_dict = {"role": "assistant", "content": message.content} - if message.tool_calls: - message_dict["tool_calls"] = [helper.dump_model(tool_call) for tool_call in message.tool_calls] - elif isinstance(message, SystemPromptMessage): - message = cast(SystemPromptMessage, message) - message_dict = {"role": "system", "content": message.content} - elif isinstance(message, ToolPromptMessage): - message = cast(ToolPromptMessage, message) - message_dict = { - "role": "tool", - "name": message.name, - "content": message.content, - "tool_call_id": message.tool_call_id, - } - else: - raise ValueError(f"Got unknown type {message}") - - if message.name: - message_dict["name"] = message.name - - return message_dict - - def _num_tokens_from_string( - self, credentials: dict, text: str, tools: Optional[list[PromptMessageTool]] = None - ) -> int: - try: - encoding = tiktoken.encoding_for_model(credentials["base_model_name"]) - except KeyError: - encoding = tiktoken.get_encoding("cl100k_base") - - num_tokens = len(encoding.encode(text)) - - if tools: - num_tokens += self._num_tokens_for_tools(encoding, tools) - - return num_tokens - - def _num_tokens_from_messages( - self, credentials: dict, messages: list[PromptMessage], tools: Optional[list[PromptMessageTool]] = None - ) -> int: - """Calculate num tokens for gpt-3.5-turbo and gpt-4 with tiktoken package. - - Official documentation: https://github.com/openai/openai-cookbook/blob/ - main/examples/How_to_format_inputs_to_ChatGPT_models.ipynb""" - model = credentials["base_model_name"] - try: - encoding = tiktoken.encoding_for_model(model) - except KeyError: - logger.warning("Warning: model not found. Using cl100k_base encoding.") - model = "cl100k_base" - encoding = tiktoken.get_encoding(model) - - if model.startswith("gpt-35-turbo-0301"): - # every message follows {role/name}\n{content}\n - tokens_per_message = 4 - # if there's a name, the role is omitted - tokens_per_name = -1 - elif model.startswith("gpt-35-turbo") or model.startswith("gpt-4"): - tokens_per_message = 3 - tokens_per_name = 1 - else: - raise NotImplementedError( - f"get_num_tokens_from_messages() is not presently implemented " - f"for model {model}." - "See https://github.com/openai/openai-python/blob/main/chatml.md for " - "information on how messages are converted to tokens." - ) - num_tokens = 0 - messages_dict = [self._convert_prompt_message_to_dict(m) for m in messages] - for message in messages_dict: - num_tokens += tokens_per_message - for key, value in message.items(): - # Cast str(value) in case the message value is not a string - # This occurs with function messages - # TODO: The current token calculation method for the image type is not implemented, - # which need to download the image and then get the resolution for calculation, - # and will increase the request delay - if isinstance(value, list): - text = "" - for item in value: - if isinstance(item, dict) and item["type"] == "text": - text += item["text"] - - value = text - - if key == "tool_calls": - for tool_call in value: - assert isinstance(tool_call, dict) - for t_key, t_value in tool_call.items(): - num_tokens += len(encoding.encode(t_key)) - if t_key == "function": - for f_key, f_value in t_value.items(): - num_tokens += len(encoding.encode(f_key)) - num_tokens += len(encoding.encode(f_value)) - else: - num_tokens += len(encoding.encode(t_key)) - num_tokens += len(encoding.encode(t_value)) - else: - num_tokens += len(encoding.encode(str(value))) - - if key == "name": - num_tokens += tokens_per_name - - # every reply is primed with assistant - num_tokens += 3 - - if tools: - num_tokens += self._num_tokens_for_tools(encoding, tools) - - return num_tokens - - @staticmethod - def _num_tokens_for_tools(encoding: tiktoken.Encoding, tools: list[PromptMessageTool]) -> int: - num_tokens = 0 - for tool in tools: - num_tokens += len(encoding.encode("type")) - num_tokens += len(encoding.encode("function")) - - # calculate num tokens for function object - num_tokens += len(encoding.encode("name")) - num_tokens += len(encoding.encode(tool.name)) - num_tokens += len(encoding.encode("description")) - num_tokens += len(encoding.encode(tool.description)) - parameters = tool.parameters - num_tokens += len(encoding.encode("parameters")) - if "title" in parameters: - num_tokens += len(encoding.encode("title")) - num_tokens += len(encoding.encode(parameters["title"])) - num_tokens += len(encoding.encode("type")) - num_tokens += len(encoding.encode(parameters["type"])) - if "properties" in parameters: - num_tokens += len(encoding.encode("properties")) - for key, value in parameters["properties"].items(): - num_tokens += len(encoding.encode(key)) - for field_key, field_value in value.items(): - num_tokens += len(encoding.encode(field_key)) - if field_key == "enum": - for enum_field in field_value: - num_tokens += 3 - num_tokens += len(encoding.encode(enum_field)) - else: - num_tokens += len(encoding.encode(field_key)) - num_tokens += len(encoding.encode(str(field_value))) - if "required" in parameters: - num_tokens += len(encoding.encode("required")) - for required_field in parameters["required"]: - num_tokens += 3 - num_tokens += len(encoding.encode(required_field)) - - return num_tokens - - @staticmethod - def _get_ai_model_entity(base_model_name: str, model: str): - for ai_model_entity in LLM_BASE_MODELS: - if ai_model_entity.base_model_name == base_model_name: - ai_model_entity_copy = copy.deepcopy(ai_model_entity) - ai_model_entity_copy.entity.model = model - ai_model_entity_copy.entity.label.en_US = model - ai_model_entity_copy.entity.label.zh_Hans = model - return ai_model_entity_copy diff --git a/api/core/model_runtime/model_providers/azure_openai/speech2text/__init__.py b/api/core/model_runtime/model_providers/azure_openai/speech2text/__init__.py deleted file mode 100644 index e69de29bb2d1d6..00000000000000 diff --git a/api/core/model_runtime/model_providers/azure_openai/speech2text/speech2text.py b/api/core/model_runtime/model_providers/azure_openai/speech2text/speech2text.py deleted file mode 100644 index a2b14cf3dbe6d4..00000000000000 --- a/api/core/model_runtime/model_providers/azure_openai/speech2text/speech2text.py +++ /dev/null @@ -1,79 +0,0 @@ -import copy -from typing import IO, Optional - -from openai import AzureOpenAI - -from core.model_runtime.entities.model_entities import AIModelEntity -from core.model_runtime.errors.validate import CredentialsValidateFailedError -from core.model_runtime.model_providers.__base.speech2text_model import Speech2TextModel -from core.model_runtime.model_providers.azure_openai._common import _CommonAzureOpenAI -from core.model_runtime.model_providers.azure_openai._constant import SPEECH2TEXT_BASE_MODELS, AzureBaseModel - - -class AzureOpenAISpeech2TextModel(_CommonAzureOpenAI, Speech2TextModel): - """ - Model class for OpenAI Speech to text model. - """ - - def _invoke(self, model: str, credentials: dict, file: IO[bytes], user: Optional[str] = None) -> str: - """ - Invoke speech2text model - - :param model: model name - :param credentials: model credentials - :param file: audio file - :param user: unique user id - :return: text for given audio file - """ - return self._speech2text_invoke(model, credentials, file) - - def validate_credentials(self, model: str, credentials: dict) -> None: - """ - Validate model credentials - - :param model: model name - :param credentials: model credentials - :return: - """ - try: - audio_file_path = self._get_demo_file_path() - - with open(audio_file_path, "rb") as audio_file: - self._speech2text_invoke(model, credentials, audio_file) - except Exception as ex: - raise CredentialsValidateFailedError(str(ex)) - - def _speech2text_invoke(self, model: str, credentials: dict, file: IO[bytes]) -> str: - """ - Invoke speech2text model - - :param model: model name - :param credentials: model credentials - :param file: audio file - :return: text for given audio file - """ - # transform credentials to kwargs for model instance - credentials_kwargs = self._to_credential_kwargs(credentials) - - # init model client - client = AzureOpenAI(**credentials_kwargs) - - response = client.audio.transcriptions.create(model=model, file=file) - - return response.text - - def get_customizable_model_schema(self, model: str, credentials: dict) -> Optional[AIModelEntity]: - ai_model_entity = self._get_ai_model_entity(credentials["base_model_name"], model) - return ai_model_entity.entity - - @staticmethod - def _get_ai_model_entity(base_model_name: str, model: str) -> AzureBaseModel: - for ai_model_entity in SPEECH2TEXT_BASE_MODELS: - if ai_model_entity.base_model_name == base_model_name: - ai_model_entity_copy = copy.deepcopy(ai_model_entity) - ai_model_entity_copy.entity.model = model - ai_model_entity_copy.entity.label.en_US = model - ai_model_entity_copy.entity.label.zh_Hans = model - return ai_model_entity_copy - - return None diff --git a/api/core/model_runtime/model_providers/azure_openai/text_embedding/__init__.py b/api/core/model_runtime/model_providers/azure_openai/text_embedding/__init__.py deleted file mode 100644 index e69de29bb2d1d6..00000000000000 diff --git a/api/core/model_runtime/model_providers/azure_openai/tts/__init__.py b/api/core/model_runtime/model_providers/azure_openai/tts/__init__.py deleted file mode 100644 index e69de29bb2d1d6..00000000000000 diff --git a/api/core/model_runtime/model_providers/azure_openai/tts/tts.py b/api/core/model_runtime/model_providers/azure_openai/tts/tts.py deleted file mode 100644 index af178703a06951..00000000000000 --- a/api/core/model_runtime/model_providers/azure_openai/tts/tts.py +++ /dev/null @@ -1,128 +0,0 @@ -import concurrent.futures -import copy -from typing import Optional - -from openai import AzureOpenAI - -from core.model_runtime.entities.model_entities import AIModelEntity -from core.model_runtime.errors.invoke import InvokeBadRequestError -from core.model_runtime.errors.validate import CredentialsValidateFailedError -from core.model_runtime.model_providers.__base.tts_model import TTSModel -from core.model_runtime.model_providers.azure_openai._common import _CommonAzureOpenAI -from core.model_runtime.model_providers.azure_openai._constant import TTS_BASE_MODELS, AzureBaseModel - - -class AzureOpenAIText2SpeechModel(_CommonAzureOpenAI, TTSModel): - """ - Model class for OpenAI Speech to text model. - """ - - def _invoke( - self, model: str, tenant_id: str, credentials: dict, content_text: str, voice: str, user: Optional[str] = None - ) -> any: - """ - _invoke text2speech model - - :param model: model name - :param tenant_id: user tenant id - :param credentials: model credentials - :param content_text: text content to be translated - :param voice: model timbre - :param user: unique user id - :return: text translated to audio file - """ - if not voice or voice not in [ - d["value"] for d in self.get_tts_model_voices(model=model, credentials=credentials) - ]: - voice = self._get_model_default_voice(model, credentials) - - return self._tts_invoke_streaming(model=model, credentials=credentials, content_text=content_text, voice=voice) - - def validate_credentials(self, model: str, credentials: dict) -> None: - """ - validate credentials text2speech model - - :param model: model name - :param credentials: model credentials - :return: text translated to audio file - """ - try: - self._tts_invoke_streaming( - model=model, - credentials=credentials, - content_text="Hello Dify!", - voice=self._get_model_default_voice(model, credentials), - ) - except Exception as ex: - raise CredentialsValidateFailedError(str(ex)) - - def _tts_invoke_streaming(self, model: str, credentials: dict, content_text: str, voice: str) -> any: - """ - _tts_invoke_streaming text2speech model - :param model: model name - :param credentials: model credentials - :param content_text: text content to be translated - :param voice: model timbre - :return: text translated to audio file - """ - try: - # doc: https://platform.openai.com/docs/guides/text-to-speech - credentials_kwargs = self._to_credential_kwargs(credentials) - client = AzureOpenAI(**credentials_kwargs) - # max length is 4096 characters, there is 3500 limit for each request - max_length = 3500 - if len(content_text) > max_length: - sentences = self._split_text_into_sentences(content_text, max_length=max_length) - executor = concurrent.futures.ThreadPoolExecutor(max_workers=min(3, len(sentences))) - futures = [ - executor.submit( - client.audio.speech.with_streaming_response.create, - model=model, - response_format="mp3", - input=sentences[i], - voice=voice, - ) - for i in range(len(sentences)) - ] - for future in futures: - yield from future.result().__enter__().iter_bytes(1024) # noqa:PLC2801 - - else: - response = client.audio.speech.with_streaming_response.create( - model=model, voice=voice, response_format="mp3", input=content_text.strip() - ) - - yield from response.__enter__().iter_bytes(1024) # noqa:PLC2801 - except Exception as ex: - raise InvokeBadRequestError(str(ex)) - - def _process_sentence(self, sentence: str, model: str, voice, credentials: dict): - """ - _tts_invoke openai text2speech model api - - :param model: model name - :param credentials: model credentials - :param voice: model timbre - :param sentence: text content to be translated - :return: text translated to audio file - """ - credentials_kwargs = self._to_credential_kwargs(credentials) - client = AzureOpenAI(**credentials_kwargs) - response = client.audio.speech.create(model=model, voice=voice, input=sentence.strip()) - if isinstance(response.read(), bytes): - return response.read() - - def get_customizable_model_schema(self, model: str, credentials: dict) -> Optional[AIModelEntity]: - ai_model_entity = self._get_ai_model_entity(credentials["base_model_name"], model) - return ai_model_entity.entity - - @staticmethod - def _get_ai_model_entity(base_model_name: str, model: str) -> AzureBaseModel | None: - for ai_model_entity in TTS_BASE_MODELS: - if ai_model_entity.base_model_name == base_model_name: - ai_model_entity_copy = copy.deepcopy(ai_model_entity) - ai_model_entity_copy.entity.model = model - ai_model_entity_copy.entity.label.en_US = model - ai_model_entity_copy.entity.label.zh_Hans = model - return ai_model_entity_copy - return None diff --git a/api/core/model_runtime/model_providers/baichuan/__init__.py b/api/core/model_runtime/model_providers/baichuan/__init__.py deleted file mode 100644 index e69de29bb2d1d6..00000000000000 diff --git a/api/core/model_runtime/model_providers/baichuan/_assets/icon_l_en.svg b/api/core/model_runtime/model_providers/baichuan/_assets/icon_l_en.svg deleted file mode 100644 index 7ff6b5a67ad618..00000000000000 --- a/api/core/model_runtime/model_providers/baichuan/_assets/icon_l_en.svg +++ /dev/null @@ -1,19 +0,0 @@ - - - - - - - - - - - - - - - - - - - diff --git a/api/core/model_runtime/model_providers/baichuan/_assets/icon_s_en.svg b/api/core/model_runtime/model_providers/baichuan/_assets/icon_s_en.svg deleted file mode 100644 index 4ddcd2672696c8..00000000000000 --- a/api/core/model_runtime/model_providers/baichuan/_assets/icon_s_en.svg +++ /dev/null @@ -1,11 +0,0 @@ - - - - - - - - - - - diff --git a/api/core/model_runtime/model_providers/baichuan/baichuan.py b/api/core/model_runtime/model_providers/baichuan/baichuan.py deleted file mode 100644 index 626fc811cfd47b..00000000000000 --- a/api/core/model_runtime/model_providers/baichuan/baichuan.py +++ /dev/null @@ -1,28 +0,0 @@ -import logging - -from core.model_runtime.entities.model_entities import ModelType -from core.model_runtime.errors.validate import CredentialsValidateFailedError -from core.model_runtime.model_providers.__base.model_provider import ModelProvider - -logger = logging.getLogger(__name__) - - -class BaichuanProvider(ModelProvider): - def validate_provider_credentials(self, credentials: dict) -> None: - """ - Validate provider credentials - - if validate failed, raise exception - - :param credentials: provider credentials, credentials form defined in `provider_credential_schema`. - """ - try: - model_instance = self.get_model_instance(ModelType.LLM) - - # Use `baichuan2-turbo` model for validate, - model_instance.validate_credentials(model="baichuan2-turbo", credentials=credentials) - except CredentialsValidateFailedError as ex: - raise ex - except Exception as ex: - logger.exception(f"{self.get_provider_schema().provider} credentials validate failed") - raise ex diff --git a/api/core/model_runtime/model_providers/baichuan/baichuan.yaml b/api/core/model_runtime/model_providers/baichuan/baichuan.yaml deleted file mode 100644 index 81e6e36215aa84..00000000000000 --- a/api/core/model_runtime/model_providers/baichuan/baichuan.yaml +++ /dev/null @@ -1,29 +0,0 @@ -provider: baichuan -label: - en_US: Baichuan -icon_small: - en_US: icon_s_en.svg -icon_large: - en_US: icon_l_en.svg -background: "#FFF6F2" -help: - title: - en_US: Get your API Key from BAICHUAN AI - zh_Hans: 从百川智能获取您的 API Key - url: - en_US: https://www.baichuan-ai.com -supported_model_types: - - llm - - text-embedding -configurate_methods: - - predefined-model -provider_credential_schema: - credential_form_schemas: - - variable: api_key - label: - en_US: API Key - type: secret-input - required: true - placeholder: - zh_Hans: 在此输入您的 API Key - en_US: Enter your API Key diff --git a/api/core/model_runtime/model_providers/baichuan/llm/__init__.py b/api/core/model_runtime/model_providers/baichuan/llm/__init__.py deleted file mode 100644 index e69de29bb2d1d6..00000000000000 diff --git a/api/core/model_runtime/model_providers/baichuan/llm/baichuan2-53b.yaml b/api/core/model_runtime/model_providers/baichuan/llm/baichuan2-53b.yaml deleted file mode 100644 index 8360dd5faffb00..00000000000000 --- a/api/core/model_runtime/model_providers/baichuan/llm/baichuan2-53b.yaml +++ /dev/null @@ -1,46 +0,0 @@ -model: baichuan2-53b -label: - en_US: Baichuan2-53B -model_type: llm -features: - - agent-thought -model_properties: - mode: chat - context_size: 32000 -parameter_rules: - - name: temperature - use_template: temperature - - name: top_p - use_template: top_p - - name: top_k - label: - zh_Hans: 取样数量 - en_US: Top k - type: int - help: - zh_Hans: 仅从每个后续标记的前 K 个选项中采样。 - en_US: Only sample from the top K options for each subsequent token. - required: false - - name: max_tokens - use_template: max_tokens - required: true - default: 1000 - min: 1 - max: 4000 - - name: presence_penalty - use_template: presence_penalty - - name: frequency_penalty - use_template: frequency_penalty - default: 1 - min: 1 - max: 2 - - name: with_search_enhance - label: - zh_Hans: 搜索增强 - en_US: Search Enhance - type: boolean - help: - zh_Hans: 允许模型自行进行外部搜索,以增强生成结果。 - en_US: Allow the model to perform external search to enhance the generation results. - required: false -deprecated: true diff --git a/api/core/model_runtime/model_providers/baichuan/llm/baichuan2-turbo-192k.yaml b/api/core/model_runtime/model_providers/baichuan/llm/baichuan2-turbo-192k.yaml deleted file mode 100644 index 0ce0265cfe5c6c..00000000000000 --- a/api/core/model_runtime/model_providers/baichuan/llm/baichuan2-turbo-192k.yaml +++ /dev/null @@ -1,46 +0,0 @@ -model: baichuan2-turbo-192k -label: - en_US: Baichuan2-Turbo-192K -model_type: llm -features: - - agent-thought -model_properties: - mode: chat - context_size: 192000 -parameter_rules: - - name: temperature - use_template: temperature - - name: top_p - use_template: top_p - - name: top_k - label: - zh_Hans: 取样数量 - en_US: Top k - type: int - help: - zh_Hans: 仅从每个后续标记的前 K 个选项中采样。 - en_US: Only sample from the top K options for each subsequent token. - required: false - - name: max_tokens - use_template: max_tokens - required: true - default: 8000 - min: 1 - max: 192000 - - name: presence_penalty - use_template: presence_penalty - - name: frequency_penalty - use_template: frequency_penalty - default: 1 - min: 1 - max: 2 - - name: with_search_enhance - label: - zh_Hans: 搜索增强 - en_US: Search Enhance - type: boolean - help: - zh_Hans: 允许模型自行进行外部搜索,以增强生成结果。 - en_US: Allow the model to perform external search to enhance the generation results. - required: false -deprecated: true diff --git a/api/core/model_runtime/model_providers/baichuan/llm/baichuan2-turbo.yaml b/api/core/model_runtime/model_providers/baichuan/llm/baichuan2-turbo.yaml deleted file mode 100644 index ccb4ee8b92bc16..00000000000000 --- a/api/core/model_runtime/model_providers/baichuan/llm/baichuan2-turbo.yaml +++ /dev/null @@ -1,41 +0,0 @@ -model: baichuan2-turbo -label: - en_US: Baichuan2-Turbo -model_type: llm -features: - - agent-thought - - multi-tool-call -model_properties: - mode: chat - context_size: 32000 -parameter_rules: - - name: temperature - use_template: temperature - default: 0.3 - - name: top_p - use_template: top_p - default: 0.85 - - name: top_k - label: - zh_Hans: 取样数量 - en_US: Top k - type: int - min: 0 - max: 20 - default: 5 - help: - zh_Hans: 仅从每个后续标记的前 K 个选项中采样。 - en_US: Only sample from the top K options for each subsequent token. - required: false - - name: max_tokens - use_template: max_tokens - default: 2048 - - name: with_search_enhance - label: - zh_Hans: 搜索增强 - en_US: Search Enhance - type: boolean - help: - zh_Hans: 允许模型自行进行外部搜索,以增强生成结果。 - en_US: Allow the model to perform external search to enhance the generation results. - required: false diff --git a/api/core/model_runtime/model_providers/baichuan/llm/baichuan3-turbo-128k.yaml b/api/core/model_runtime/model_providers/baichuan/llm/baichuan3-turbo-128k.yaml deleted file mode 100644 index d9cd086e82c994..00000000000000 --- a/api/core/model_runtime/model_providers/baichuan/llm/baichuan3-turbo-128k.yaml +++ /dev/null @@ -1,53 +0,0 @@ -model: baichuan3-turbo-128k -label: - en_US: Baichuan3-Turbo-128k -model_type: llm -features: - - agent-thought - - multi-tool-call -model_properties: - mode: chat - context_size: 128000 -parameter_rules: - - name: temperature - use_template: temperature - default: 0.3 - - name: top_p - use_template: top_p - default: 0.85 - - name: top_k - label: - zh_Hans: 取样数量 - en_US: Top k - type: int - min: 0 - max: 20 - default: 5 - help: - zh_Hans: 仅从每个后续标记的前 K 个选项中采样。 - en_US: Only sample from the top K options for each subsequent token. - required: false - - name: max_tokens - use_template: max_tokens - default: 2048 - - name: res_format - label: - zh_Hans: 回复格式 - en_US: Response Format - type: string - help: - zh_Hans: 指定模型必须输出的格式 - en_US: specifying the format that the model must output - required: false - options: - - text - - json_object - - name: with_search_enhance - label: - zh_Hans: 搜索增强 - en_US: Search Enhance - type: boolean - help: - zh_Hans: 允许模型自行进行外部搜索,以增强生成结果。 - en_US: Allow the model to perform external search to enhance the generation results. - required: false diff --git a/api/core/model_runtime/model_providers/baichuan/llm/baichuan3-turbo.yaml b/api/core/model_runtime/model_providers/baichuan/llm/baichuan3-turbo.yaml deleted file mode 100644 index 58f9b39a438a08..00000000000000 --- a/api/core/model_runtime/model_providers/baichuan/llm/baichuan3-turbo.yaml +++ /dev/null @@ -1,53 +0,0 @@ -model: baichuan3-turbo -label: - en_US: Baichuan3-Turbo -model_type: llm -features: - - agent-thought - - multi-tool-call -model_properties: - mode: chat - context_size: 32000 -parameter_rules: - - name: temperature - use_template: temperature - default: 0.3 - - name: top_p - use_template: top_p - default: 0.85 - - name: top_k - label: - zh_Hans: 取样数量 - en_US: Top k - type: int - min: 0 - max: 20 - default: 5 - help: - zh_Hans: 仅从每个后续标记的前 K 个选项中采样。 - en_US: Only sample from the top K options for each subsequent token. - required: false - - name: max_tokens - use_template: max_tokens - default: 2048 - - name: res_format - label: - zh_Hans: 回复格式 - en_US: Response Format - type: string - help: - zh_Hans: 指定模型必须输出的格式 - en_US: specifying the format that the model must output - required: false - options: - - text - - json_object - - name: with_search_enhance - label: - zh_Hans: 搜索增强 - en_US: Search Enhance - type: boolean - help: - zh_Hans: 允许模型自行进行外部搜索,以增强生成结果。 - en_US: Allow the model to perform external search to enhance the generation results. - required: false diff --git a/api/core/model_runtime/model_providers/baichuan/llm/baichuan4.yaml b/api/core/model_runtime/model_providers/baichuan/llm/baichuan4.yaml deleted file mode 100644 index 6a1135e165fcaf..00000000000000 --- a/api/core/model_runtime/model_providers/baichuan/llm/baichuan4.yaml +++ /dev/null @@ -1,53 +0,0 @@ -model: baichuan4 -label: - en_US: Baichuan4 -model_type: llm -features: - - agent-thought - - multi-tool-call -model_properties: - mode: chat - context_size: 32000 -parameter_rules: - - name: temperature - use_template: temperature - default: 0.3 - - name: top_p - use_template: top_p - default: 0.85 - - name: top_k - label: - zh_Hans: 取样数量 - en_US: Top k - type: int - min: 0 - max: 20 - default: 5 - help: - zh_Hans: 仅从每个后续标记的前 K 个选项中采样。 - en_US: Only sample from the top K options for each subsequent token. - required: false - - name: max_tokens - use_template: max_tokens - default: 2048 - - name: res_format - label: - zh_Hans: 回复格式 - en_US: Response Format - type: string - help: - zh_Hans: 指定模型必须输出的格式 - en_US: specifying the format that the model must output - required: false - options: - - text - - json_object - - name: with_search_enhance - label: - zh_Hans: 搜索增强 - en_US: Search Enhance - type: boolean - help: - zh_Hans: 允许模型自行进行外部搜索,以增强生成结果。 - en_US: Allow the model to perform external search to enhance the generation results. - required: false diff --git a/api/core/model_runtime/model_providers/baichuan/llm/baichuan_tokenizer.py b/api/core/model_runtime/model_providers/baichuan/llm/baichuan_tokenizer.py deleted file mode 100644 index a7ca28d49d636e..00000000000000 --- a/api/core/model_runtime/model_providers/baichuan/llm/baichuan_tokenizer.py +++ /dev/null @@ -1,21 +0,0 @@ -import re - - -class BaichuanTokenizer: - @classmethod - def count_chinese_characters(cls, text: str) -> int: - return len(re.findall(r"[\u4e00-\u9fa5]", text)) - - @classmethod - def count_english_vocabularies(cls, text: str) -> int: - # remove all non-alphanumeric characters but keep spaces and other symbols like !, ., etc. - text = re.sub(r"[^a-zA-Z0-9\s]", "", text) - # count the number of words not characters - return len(text.split()) - - @classmethod - def _get_num_tokens(cls, text: str) -> int: - # tokens = number of Chinese characters + number of English words * 1.3 - # (for estimation only, subject to actual return) - # https://platform.baichuan-ai.com/docs/text-Embedding - return int(cls.count_chinese_characters(text) + cls.count_english_vocabularies(text) * 1.3) diff --git a/api/core/model_runtime/model_providers/baichuan/llm/baichuan_turbo.py b/api/core/model_runtime/model_providers/baichuan/llm/baichuan_turbo.py deleted file mode 100644 index d5fda73009bba9..00000000000000 --- a/api/core/model_runtime/model_providers/baichuan/llm/baichuan_turbo.py +++ /dev/null @@ -1,144 +0,0 @@ -import json -from collections.abc import Iterator -from typing import Any, Optional, Union - -from requests import post - -from core.model_runtime.entities.message_entities import PromptMessageTool -from core.model_runtime.model_providers.baichuan.llm.baichuan_turbo_errors import ( - BadRequestError, - InsufficientAccountBalanceError, - InternalServerError, - InvalidAPIKeyError, - InvalidAuthenticationError, - RateLimitReachedError, -) - - -class BaichuanModel: - api_key: str - - def __init__(self, api_key: str) -> None: - self.api_key = api_key - - @property - def _model_mapping(self) -> dict: - return { - "baichuan2-turbo": "Baichuan2-Turbo", - "baichuan3-turbo": "Baichuan3-Turbo", - "baichuan3-turbo-128k": "Baichuan3-Turbo-128k", - "baichuan4": "Baichuan4", - } - - @property - def request_headers(self) -> dict[str, Any]: - return { - "Content-Type": "application/json", - "Authorization": "Bearer " + self.api_key, - } - - def _build_parameters( - self, - model: str, - stream: bool, - messages: list[dict], - parameters: dict[str, Any], - tools: Optional[list[PromptMessageTool]] = None, - ) -> dict[str, Any]: - if model in self._model_mapping: - # the LargeLanguageModel._code_block_mode_wrapper() method will remove the response_format of parameters. - # we need to rename it to res_format to get its value - if parameters.get("res_format") == "json_object": - parameters["response_format"] = {"type": "json_object"} - - if tools or parameters.get("with_search_enhance") is True: - parameters["tools"] = [] - - # with_search_enhance is deprecated, use web_search instead - if parameters.get("with_search_enhance") is True: - parameters["tools"].append( - { - "type": "web_search", - "web_search": {"enable": True}, - } - ) - if tools: - for tool in tools: - parameters["tools"].append( - { - "type": "function", - "function": { - "name": tool.name, - "description": tool.description, - "parameters": tool.parameters, - }, - } - ) - - # turbo api accepts flat parameters - return { - "model": self._model_mapping.get(model), - "stream": stream, - "messages": messages, - **parameters, - } - else: - raise BadRequestError(f"Unknown model: {model}") - - def generate( - self, - model: str, - stream: bool, - messages: list[dict], - parameters: dict[str, Any], - timeout: int, - tools: Optional[list[PromptMessageTool]] = None, - ) -> Union[Iterator, dict]: - if model in self._model_mapping: - api_base = "https://api.baichuan-ai.com/v1/chat/completions" - else: - raise BadRequestError(f"Unknown model: {model}") - - data = self._build_parameters(model, stream, messages, parameters, tools) - - try: - response = post( - url=api_base, - headers=self.request_headers, - data=json.dumps(data), - timeout=timeout, - stream=stream, - ) - except Exception as e: - raise InternalServerError(f"Failed to invoke model: {e}") - - if response.status_code != 200: - try: - resp = response.json() - # try to parse error message - err = resp["error"]["type"] - msg = resp["error"]["message"] - except Exception as e: - raise InternalServerError(f"Failed to convert response to json: {e} with text: {response.text}") - - if err == "invalid_api_key": - raise InvalidAPIKeyError(msg) - elif err == "insufficient_quota": - raise InsufficientAccountBalanceError(msg) - elif err == "invalid_authentication": - raise InvalidAuthenticationError(msg) - elif err == "invalid_request_error": - raise BadRequestError(msg) - elif "rate" in err: - raise RateLimitReachedError(msg) - elif "internal" in err: - raise InternalServerError(msg) - elif err == "api_key_empty": - raise InvalidAPIKeyError(msg) - else: - raise InternalServerError(f"Unknown error: {err} with message: {msg}") - - if stream: - return response.iter_lines() - else: - return response.json() diff --git a/api/core/model_runtime/model_providers/baichuan/llm/baichuan_turbo_errors.py b/api/core/model_runtime/model_providers/baichuan/llm/baichuan_turbo_errors.py deleted file mode 100644 index 309b5cf413bd54..00000000000000 --- a/api/core/model_runtime/model_providers/baichuan/llm/baichuan_turbo_errors.py +++ /dev/null @@ -1,22 +0,0 @@ -class InvalidAuthenticationError(Exception): - pass - - -class InvalidAPIKeyError(Exception): - pass - - -class RateLimitReachedError(Exception): - pass - - -class InsufficientAccountBalanceError(Exception): - pass - - -class InternalServerError(Exception): - pass - - -class BadRequestError(Exception): - pass diff --git a/api/core/model_runtime/model_providers/baichuan/llm/llm.py b/api/core/model_runtime/model_providers/baichuan/llm/llm.py deleted file mode 100644 index 91a14bf1009006..00000000000000 --- a/api/core/model_runtime/model_providers/baichuan/llm/llm.py +++ /dev/null @@ -1,296 +0,0 @@ -import json -from collections.abc import Generator, Iterator -from typing import cast - -from core.model_runtime.entities.llm_entities import ( - LLMResult, - LLMResultChunk, - LLMResultChunkDelta, -) -from core.model_runtime.entities.message_entities import ( - AssistantPromptMessage, - PromptMessage, - PromptMessageTool, - SystemPromptMessage, - ToolPromptMessage, - UserPromptMessage, -) -from core.model_runtime.errors.invoke import ( - InvokeAuthorizationError, - InvokeBadRequestError, - InvokeConnectionError, - InvokeError, - InvokeRateLimitError, - InvokeServerUnavailableError, -) -from core.model_runtime.errors.validate import CredentialsValidateFailedError -from core.model_runtime.model_providers.__base.large_language_model import LargeLanguageModel -from core.model_runtime.model_providers.baichuan.llm.baichuan_tokenizer import BaichuanTokenizer -from core.model_runtime.model_providers.baichuan.llm.baichuan_turbo import BaichuanModel -from core.model_runtime.model_providers.baichuan.llm.baichuan_turbo_errors import ( - BadRequestError, - InsufficientAccountBalanceError, - InternalServerError, - InvalidAPIKeyError, - InvalidAuthenticationError, - RateLimitReachedError, -) - - -class BaichuanLanguageModel(LargeLanguageModel): - def _invoke( - self, - model: str, - credentials: dict, - prompt_messages: list[PromptMessage], - model_parameters: dict, - tools: list[PromptMessageTool] | None = None, - stop: list[str] | None = None, - stream: bool = True, - user: str | None = None, - ) -> LLMResult | Generator: - return self._generate( - model=model, - credentials=credentials, - prompt_messages=prompt_messages, - model_parameters=model_parameters, - tools=tools, - stream=stream, - ) - - def get_num_tokens( - self, - model: str, - credentials: dict, - prompt_messages: list[PromptMessage], - tools: list[PromptMessageTool] | None = None, - ) -> int: - return self._num_tokens_from_messages(prompt_messages) - - def _num_tokens_from_messages( - self, - messages: list[PromptMessage], - ) -> int: - """Calculate num tokens for baichuan model""" - - def tokens(text: str): - return BaichuanTokenizer._get_num_tokens(text) - - tokens_per_message = 3 - - num_tokens = 0 - messages_dict = [self._convert_prompt_message_to_dict(m) for m in messages] - for message in messages_dict: - num_tokens += tokens_per_message - for key, value in message.items(): - if isinstance(value, list): - text = "" - for item in value: - if isinstance(item, dict) and item["type"] == "text": - text += item["text"] - - value = text - - num_tokens += tokens(str(value)) - num_tokens += 3 - - return num_tokens - - def _convert_prompt_message_to_dict(self, message: PromptMessage) -> dict: - """ - Convert PromptMessage to dict for Baichuan - """ - if isinstance(message, UserPromptMessage): - message = cast(UserPromptMessage, message) - if isinstance(message.content, str): - message_dict = {"role": "user", "content": message.content} - else: - raise ValueError("User message content must be str") - elif isinstance(message, AssistantPromptMessage): - message = cast(AssistantPromptMessage, message) - message_dict = {"role": "assistant", "content": message.content} - if message.tool_calls: - message_dict["tool_calls"] = [tool_call.dict() for tool_call in message.tool_calls] - elif isinstance(message, SystemPromptMessage): - message = cast(SystemPromptMessage, message) - message_dict = {"role": "system", "content": message.content} - elif isinstance(message, ToolPromptMessage): - message = cast(ToolPromptMessage, message) - message_dict = {"role": "tool", "content": message.content, "tool_call_id": message.tool_call_id} - else: - raise ValueError(f"Unknown message type {type(message)}") - - return message_dict - - def validate_credentials(self, model: str, credentials: dict) -> None: - # ping - instance = BaichuanModel(api_key=credentials["api_key"]) - - try: - instance.generate( - model=model, - stream=False, - messages=[{"content": "ping", "role": "user"}], - parameters={ - "max_tokens": 1, - }, - timeout=60, - ) - except Exception as e: - raise CredentialsValidateFailedError(f"Invalid API key: {e}") - - def _generate( - self, - model: str, - credentials: dict, - prompt_messages: list[PromptMessage], - model_parameters: dict, - tools: list[PromptMessageTool] | None = None, - stream: bool = True, - ) -> LLMResult | Generator: - instance = BaichuanModel(api_key=credentials["api_key"]) - messages = [self._convert_prompt_message_to_dict(m) for m in prompt_messages] - - # invoke model - response = instance.generate( - model=model, - stream=stream, - messages=messages, - parameters=model_parameters, - timeout=60, - tools=tools, - ) - - if stream: - return self._handle_chat_generate_stream_response(model, prompt_messages, credentials, response) - - return self._handle_chat_generate_response(model, prompt_messages, credentials, response) - - def _handle_chat_generate_response( - self, - model: str, - prompt_messages: list[PromptMessage], - credentials: dict, - response: dict, - ) -> LLMResult: - choices = response.get("choices", []) - assistant_message = AssistantPromptMessage(content="", tool_calls=[]) - if choices and choices[0]["finish_reason"] == "tool_calls": - for choice in choices: - for tool_call in choice["message"]["tool_calls"]: - tool = AssistantPromptMessage.ToolCall( - id=tool_call.get("id", ""), - type=tool_call.get("type", ""), - function=AssistantPromptMessage.ToolCall.ToolCallFunction( - name=tool_call.get("function", {}).get("name", ""), - arguments=tool_call.get("function", {}).get("arguments", ""), - ), - ) - assistant_message.tool_calls.append(tool) - else: - for choice in choices: - assistant_message.content += choice["message"]["content"] - assistant_message.role = choice["message"]["role"] - - usage = response.get("usage") - if usage: - # transform usage - prompt_tokens = usage["prompt_tokens"] - completion_tokens = usage["completion_tokens"] - else: - # calculate num tokens - prompt_tokens = self._num_tokens_from_messages(prompt_messages) - completion_tokens = self._num_tokens_from_messages([assistant_message]) - - usage = self._calc_response_usage( - model=model, - credentials=credentials, - prompt_tokens=prompt_tokens, - completion_tokens=completion_tokens, - ) - - return LLMResult( - model=model, - prompt_messages=prompt_messages, - message=assistant_message, - usage=usage, - ) - - def _handle_chat_generate_stream_response( - self, - model: str, - prompt_messages: list[PromptMessage], - credentials: dict, - response: Iterator, - ) -> Generator: - for line in response: - if not line: - continue - line = line.decode("utf-8") - # remove the first `data: ` prefix - if line.startswith("data:"): - line = line[5:].strip() - try: - data = json.loads(line) - except Exception as e: - if line.strip() == "[DONE]": - return - choices = data.get("choices", []) - - stop_reason = "" - for choice in choices: - if choice.get("finish_reason"): - stop_reason = choice["finish_reason"] - - if len(choice["delta"]["content"]) == 0: - continue - yield LLMResultChunk( - model=model, - prompt_messages=prompt_messages, - delta=LLMResultChunkDelta( - index=0, - message=AssistantPromptMessage(content=choice["delta"]["content"], tool_calls=[]), - finish_reason=stop_reason, - ), - ) - - # if there is usage, the response is the last one, yield it and return - if "usage" in data: - usage = self._calc_response_usage( - model=model, - credentials=credentials, - prompt_tokens=data["usage"]["prompt_tokens"], - completion_tokens=data["usage"]["completion_tokens"], - ) - yield LLMResultChunk( - model=model, - prompt_messages=prompt_messages, - delta=LLMResultChunkDelta( - index=0, - message=AssistantPromptMessage(content="", tool_calls=[]), - usage=usage, - finish_reason=stop_reason, - ), - ) - - @property - def _invoke_error_mapping(self) -> dict[type[InvokeError], list[type[Exception]]]: - """ - Map model invoke error to unified error - The key is the error type thrown to the caller - The value is the error type thrown by the model, - which needs to be converted into a unified error type for the caller. - - :return: Invoke error mapping - """ - return { - InvokeConnectionError: [], - InvokeServerUnavailableError: [InternalServerError], - InvokeRateLimitError: [RateLimitReachedError], - InvokeAuthorizationError: [ - InvalidAuthenticationError, - InsufficientAccountBalanceError, - InvalidAPIKeyError, - ], - InvokeBadRequestError: [BadRequestError, KeyError], - } diff --git a/api/core/model_runtime/model_providers/baichuan/text_embedding/__init__.py b/api/core/model_runtime/model_providers/baichuan/text_embedding/__init__.py deleted file mode 100644 index e69de29bb2d1d6..00000000000000 diff --git a/api/core/model_runtime/model_providers/baichuan/text_embedding/baichuan-text-embedding.yaml b/api/core/model_runtime/model_providers/baichuan/text_embedding/baichuan-text-embedding.yaml deleted file mode 100644 index 67e5fcc47c9f58..00000000000000 --- a/api/core/model_runtime/model_providers/baichuan/text_embedding/baichuan-text-embedding.yaml +++ /dev/null @@ -1,5 +0,0 @@ -model: baichuan-text-embedding -model_type: text-embedding -model_properties: - context_size: 512 - max_chunks: 16 diff --git a/api/core/model_runtime/model_providers/bedrock/__init__.py b/api/core/model_runtime/model_providers/bedrock/__init__.py deleted file mode 100644 index e69de29bb2d1d6..00000000000000 diff --git a/api/core/model_runtime/model_providers/bedrock/_assets/icon_l_en.svg b/api/core/model_runtime/model_providers/bedrock/_assets/icon_l_en.svg deleted file mode 100644 index 667db50800d881..00000000000000 --- a/api/core/model_runtime/model_providers/bedrock/_assets/icon_l_en.svg +++ /dev/null @@ -1,14 +0,0 @@ - - - - - - - - - - - - - - diff --git a/api/core/model_runtime/model_providers/bedrock/_assets/icon_s_en.svg b/api/core/model_runtime/model_providers/bedrock/_assets/icon_s_en.svg deleted file mode 100644 index 6a0235af92c3cb..00000000000000 --- a/api/core/model_runtime/model_providers/bedrock/_assets/icon_s_en.svg +++ /dev/null @@ -1,15 +0,0 @@ - - - - - - - - - - - - - - - diff --git a/api/core/model_runtime/model_providers/bedrock/bedrock.py b/api/core/model_runtime/model_providers/bedrock/bedrock.py deleted file mode 100644 index 1cfc1d199cbf8d..00000000000000 --- a/api/core/model_runtime/model_providers/bedrock/bedrock.py +++ /dev/null @@ -1,29 +0,0 @@ -import logging - -from core.model_runtime.entities.model_entities import ModelType -from core.model_runtime.errors.validate import CredentialsValidateFailedError -from core.model_runtime.model_providers.__base.model_provider import ModelProvider - -logger = logging.getLogger(__name__) - - -class BedrockProvider(ModelProvider): - def validate_provider_credentials(self, credentials: dict) -> None: - """ - Validate provider credentials - - if validate failed, raise exception - - :param credentials: provider credentials, credentials form defined in `provider_credential_schema`. - """ - try: - model_instance = self.get_model_instance(ModelType.LLM) - - # Use `amazon.titan-text-lite-v1` model by default for validating credentials - model_for_validation = credentials.get("model_for_validation", "amazon.titan-text-lite-v1") - model_instance.validate_credentials(model=model_for_validation, credentials=credentials) - except CredentialsValidateFailedError as ex: - raise ex - except Exception as ex: - logger.exception(f"{self.get_provider_schema().provider} credentials validate failed") - raise ex diff --git a/api/core/model_runtime/model_providers/bedrock/bedrock.yaml b/api/core/model_runtime/model_providers/bedrock/bedrock.yaml deleted file mode 100644 index c540ee23b31672..00000000000000 --- a/api/core/model_runtime/model_providers/bedrock/bedrock.yaml +++ /dev/null @@ -1,89 +0,0 @@ -provider: bedrock -label: - en_US: AWS -description: - en_US: AWS Bedrock's models. -icon_small: - en_US: icon_s_en.svg -icon_large: - en_US: icon_l_en.svg -background: "#FCFDFF" -help: - title: - en_US: Get your Access Key and Secret Access Key from AWS Console - url: - en_US: https://console.aws.amazon.com/ -supported_model_types: - - llm - - text-embedding -configurate_methods: - - predefined-model -provider_credential_schema: - credential_form_schemas: - - variable: aws_access_key_id - required: false - label: - en_US: Access Key (If not provided, credentials are obtained from the running environment.) - zh_Hans: Access Key - type: secret-input - placeholder: - en_US: Enter your Access Key - zh_Hans: 在此输入您的 Access Key - - variable: aws_secret_access_key - required: false - label: - en_US: Secret Access Key - zh_Hans: Secret Access Key - type: secret-input - placeholder: - en_US: Enter your Secret Access Key - zh_Hans: 在此输入您的 Secret Access Key - - variable: aws_region - required: true - label: - en_US: AWS Region - zh_Hans: AWS 地区 - type: select - default: us-east-1 - options: - - value: us-east-1 - label: - en_US: US East (N. Virginia) - zh_Hans: 美国东部 (弗吉尼亚北部) - - value: us-west-2 - label: - en_US: US West (Oregon) - zh_Hans: 美国西部 (俄勒冈州) - - value: ap-southeast-1 - label: - en_US: Asia Pacific (Singapore) - zh_Hans: 亚太地区 (新加坡) - - value: ap-northeast-1 - label: - en_US: Asia Pacific (Tokyo) - zh_Hans: 亚太地区 (东京) - - value: eu-central-1 - label: - en_US: Europe (Frankfurt) - zh_Hans: 欧洲 (法兰克福) - - value: eu-west-2 - label: - en_US: Eu west London (London) - zh_Hans: 欧洲西部 (伦敦) - - value: us-gov-west-1 - label: - en_US: AWS GovCloud (US-West) - zh_Hans: AWS GovCloud (US-West) - - value: ap-southeast-2 - label: - en_US: Asia Pacific (Sydney) - zh_Hans: 亚太地区 (悉尼) - - variable: model_for_validation - required: false - label: - en_US: Available Model Name - zh_Hans: 可用模型名称 - type: text-input - placeholder: - en_US: A model you have access to (e.g. amazon.titan-text-lite-v1) for validation. - zh_Hans: 为了进行验证,请输入一个您可用的模型名称 (例如:amazon.titan-text-lite-v1) diff --git a/api/core/model_runtime/model_providers/bedrock/llm/__init__.py b/api/core/model_runtime/model_providers/bedrock/llm/__init__.py deleted file mode 100644 index e69de29bb2d1d6..00000000000000 diff --git a/api/core/model_runtime/model_providers/bedrock/llm/_position.yaml b/api/core/model_runtime/model_providers/bedrock/llm/_position.yaml deleted file mode 100644 index 86c8061deefac8..00000000000000 --- a/api/core/model_runtime/model_providers/bedrock/llm/_position.yaml +++ /dev/null @@ -1,24 +0,0 @@ -- amazon.titan-text-express-v1 -- amazon.titan-text-lite-v1 -- anthropic.claude-instant-v1 -- anthropic.claude-v1 -- anthropic.claude-v2 -- anthropic.claude-v2:1 -- anthropic.claude-3-sonnet-v1:0 -- anthropic.claude-3-haiku-v1:0 -- cohere.command-light-text-v14 -- cohere.command-text-v14 -- cohere.command-r-plus-v1.0 -- cohere.command-r-v1.0 -- meta.llama3-1-8b-instruct-v1:0 -- meta.llama3-1-70b-instruct-v1:0 -- meta.llama3-1-405b-instruct-v1:0 -- meta.llama3-8b-instruct-v1:0 -- meta.llama3-70b-instruct-v1:0 -- meta.llama2-13b-chat-v1 -- meta.llama2-70b-chat-v1 -- mistral.mistral-large-2407-v1:0 -- mistral.mistral-small-2402-v1:0 -- mistral.mistral-large-2402-v1:0 -- mistral.mixtral-8x7b-instruct-v0:1 -- mistral.mistral-7b-instruct-v0:2 diff --git a/api/core/model_runtime/model_providers/bedrock/llm/ai21.j2-mid-v1.yaml b/api/core/model_runtime/model_providers/bedrock/llm/ai21.j2-mid-v1.yaml deleted file mode 100644 index 65dad02969652b..00000000000000 --- a/api/core/model_runtime/model_providers/bedrock/llm/ai21.j2-mid-v1.yaml +++ /dev/null @@ -1,47 +0,0 @@ -model: ai21.j2-mid-v1 -label: - en_US: J2 Mid V1 -model_type: llm -model_properties: - mode: completion - context_size: 8191 -parameter_rules: - - name: temperature - use_template: temperature - - name: topP - use_template: top_p - - name: maxTokens - use_template: max_tokens - required: true - default: 2048 - min: 1 - max: 2048 - - name: count_penalty - label: - en_US: Count Penalty - required: false - type: float - default: 0 - min: 0 - max: 1 - - name: presence_penalty - label: - en_US: Presence Penalty - required: false - type: float - default: 0 - min: 0 - max: 5 - - name: frequency_penalty - label: - en_US: Frequency Penalty - required: false - type: float - default: 0 - min: 0 - max: 500 -pricing: - input: '0.00' - output: '0.00' - unit: '0.000001' - currency: USD diff --git a/api/core/model_runtime/model_providers/bedrock/llm/ai21.j2-ultra-v1.yaml b/api/core/model_runtime/model_providers/bedrock/llm/ai21.j2-ultra-v1.yaml deleted file mode 100644 index b72f8064bde6d5..00000000000000 --- a/api/core/model_runtime/model_providers/bedrock/llm/ai21.j2-ultra-v1.yaml +++ /dev/null @@ -1,47 +0,0 @@ -model: ai21.j2-ultra-v1 -label: - en_US: J2 Ultra V1 -model_type: llm -model_properties: - mode: completion - context_size: 8191 -parameter_rules: - - name: temperature - use_template: temperature - - name: topP - use_template: top_p - - name: maxTokens - use_template: max_tokens - required: true - default: 2048 - min: 1 - max: 2048 - - name: count_penalty - label: - en_US: Count Penalty - required: false - type: float - default: 0 - min: 0 - max: 1 - - name: presence_penalty - label: - en_US: Presence Penalty - required: false - type: float - default: 0 - min: 0 - max: 5 - - name: frequency_penalty - label: - en_US: Frequency Penalty - required: false - type: float - default: 0 - min: 0 - max: 500 -pricing: - input: '0.00' - output: '0.00' - unit: '0.000001' - currency: USD diff --git a/api/core/model_runtime/model_providers/bedrock/llm/amazon.titan-text-express-v1.yaml b/api/core/model_runtime/model_providers/bedrock/llm/amazon.titan-text-express-v1.yaml deleted file mode 100644 index 543c16d5cd53e5..00000000000000 --- a/api/core/model_runtime/model_providers/bedrock/llm/amazon.titan-text-express-v1.yaml +++ /dev/null @@ -1,23 +0,0 @@ -model: amazon.titan-text-express-v1 -label: - en_US: Titan Text G1 - Express -model_type: llm -model_properties: - mode: chat - context_size: 8192 -parameter_rules: - - name: temperature - use_template: temperature - - name: topP - use_template: top_p - - name: maxTokenCount - use_template: max_tokens - required: true - default: 2048 - min: 1 - max: 8000 -pricing: - input: '0.0008' - output: '0.0016' - unit: '0.001' - currency: USD diff --git a/api/core/model_runtime/model_providers/bedrock/llm/amazon.titan-text-lite-v1.yaml b/api/core/model_runtime/model_providers/bedrock/llm/amazon.titan-text-lite-v1.yaml deleted file mode 100644 index 2c6151c2390485..00000000000000 --- a/api/core/model_runtime/model_providers/bedrock/llm/amazon.titan-text-lite-v1.yaml +++ /dev/null @@ -1,23 +0,0 @@ -model: amazon.titan-text-lite-v1 -label: - en_US: Titan Text G1 - Lite -model_type: llm -model_properties: - mode: chat - context_size: 4096 -parameter_rules: - - name: temperature - use_template: temperature - - name: topP - use_template: top_p - - name: maxTokenCount - use_template: max_tokens - required: true - default: 2048 - min: 1 - max: 2048 -pricing: - input: '0.0003' - output: '0.0004' - unit: '0.001' - currency: USD diff --git a/api/core/model_runtime/model_providers/bedrock/llm/anthropic.claude-3-haiku-v1.yaml b/api/core/model_runtime/model_providers/bedrock/llm/anthropic.claude-3-haiku-v1.yaml deleted file mode 100644 index c2d5eb64715616..00000000000000 --- a/api/core/model_runtime/model_providers/bedrock/llm/anthropic.claude-3-haiku-v1.yaml +++ /dev/null @@ -1,61 +0,0 @@ -model: anthropic.claude-3-haiku-20240307-v1:0 -label: - en_US: Claude 3 Haiku -model_type: llm -features: - - agent-thought - - vision - - tool-call - - stream-tool-call -model_properties: - mode: chat - context_size: 200000 -# docs: https://docs.aws.amazon.com/bedrock/latest/userguide/model-parameters-anthropic-claude-messages.html -parameter_rules: - - name: max_tokens - use_template: max_tokens - required: true - type: int - default: 4096 - min: 1 - max: 4096 - help: - zh_Hans: 停止前生成的最大令牌数。请注意,Anthropic Claude 模型可能会在达到 max_tokens 的值之前停止生成令牌。不同的 Anthropic Claude 模型对此参数具有不同的最大值。 - en_US: The maximum number of tokens to generate before stopping. Note that Anthropic Claude models might stop generating tokens before reaching the value of max_tokens. Different Anthropic Claude models have different maximum values for this parameter. - # docs: https://docs.anthropic.com/claude/docs/system-prompts - - name: temperature - use_template: temperature - required: false - type: float - default: 1 - min: 0.0 - max: 1.0 - help: - zh_Hans: 生成内容的随机性。 - en_US: The amount of randomness injected into the response. - - name: top_p - required: false - type: float - default: 0.999 - min: 0.000 - max: 1.000 - help: - zh_Hans: 在核采样中,Anthropic Claude 按概率递减顺序计算每个后续标记的所有选项的累积分布,并在达到 top_p 指定的特定概率时将其切断。您应该更改温度或top_p,但不能同时更改两者。 - en_US: In nucleus sampling, Anthropic Claude computes the cumulative distribution over all the options for each subsequent token in decreasing probability order and cuts it off once it reaches a particular probability specified by top_p. You should alter either temperature or top_p, but not both. - - name: top_k - required: false - type: int - default: 0 - min: 0 - # tip docs from aws has error, max value is 500 - max: 500 - help: - zh_Hans: 对于每个后续标记,仅从前 K 个选项中进行采样。使用 top_k 删除长尾低概率响应。 - en_US: Only sample from the top K options for each subsequent token. Use top_k to remove long tail low probability responses. - - name: response_format - use_template: response_format -pricing: - input: '0.00025' - output: '0.00125' - unit: '0.001' - currency: USD diff --git a/api/core/model_runtime/model_providers/bedrock/llm/anthropic.claude-3-opus-v1.yaml b/api/core/model_runtime/model_providers/bedrock/llm/anthropic.claude-3-opus-v1.yaml deleted file mode 100644 index f90fa04266187b..00000000000000 --- a/api/core/model_runtime/model_providers/bedrock/llm/anthropic.claude-3-opus-v1.yaml +++ /dev/null @@ -1,61 +0,0 @@ -model: anthropic.claude-3-opus-20240229-v1:0 -label: - en_US: Claude 3 Opus -model_type: llm -features: - - agent-thought - - vision - - tool-call - - stream-tool-call -model_properties: - mode: chat - context_size: 200000 -# docs: https://docs.aws.amazon.com/bedrock/latest/userguide/model-parameters-anthropic-claude-messages.html -parameter_rules: - - name: max_tokens - use_template: max_tokens - required: true - type: int - default: 4096 - min: 1 - max: 4096 - help: - zh_Hans: 停止前生成的最大令牌数。请注意,Anthropic Claude 模型可能会在达到 max_tokens 的值之前停止生成令牌。不同的 Anthropic Claude 模型对此参数具有不同的最大值。 - en_US: The maximum number of tokens to generate before stopping. Note that Anthropic Claude models might stop generating tokens before reaching the value of max_tokens. Different Anthropic Claude models have different maximum values for this parameter. - # docs: https://docs.anthropic.com/claude/docs/system-prompts - - name: temperature - use_template: temperature - required: false - type: float - default: 1 - min: 0.0 - max: 1.0 - help: - zh_Hans: 生成内容的随机性。 - en_US: The amount of randomness injected into the response. - - name: top_p - required: false - type: float - default: 0.999 - min: 0.000 - max: 1.000 - help: - zh_Hans: 在核采样中,Anthropic Claude 按概率递减顺序计算每个后续标记的所有选项的累积分布,并在达到 top_p 指定的特定概率时将其切断。您应该更改温度或top_p,但不能同时更改两者。 - en_US: In nucleus sampling, Anthropic Claude computes the cumulative distribution over all the options for each subsequent token in decreasing probability order and cuts it off once it reaches a particular probability specified by top_p. You should alter either temperature or top_p, but not both. - - name: top_k - required: false - type: int - default: 0 - min: 0 - # tip docs from aws has error, max value is 500 - max: 500 - help: - zh_Hans: 对于每个后续标记,仅从前 K 个选项中进行采样。使用 top_k 删除长尾低概率响应。 - en_US: Only sample from the top K options for each subsequent token. Use top_k to remove long tail low probability responses. - - name: response_format - use_template: response_format -pricing: - input: '0.015' - output: '0.075' - unit: '0.001' - currency: USD diff --git a/api/core/model_runtime/model_providers/bedrock/llm/anthropic.claude-3-sonnet-v1.5.yaml b/api/core/model_runtime/model_providers/bedrock/llm/anthropic.claude-3-sonnet-v1.5.yaml deleted file mode 100644 index dad0d6b6b6c23c..00000000000000 --- a/api/core/model_runtime/model_providers/bedrock/llm/anthropic.claude-3-sonnet-v1.5.yaml +++ /dev/null @@ -1,60 +0,0 @@ -model: anthropic.claude-3-5-sonnet-20240620-v1:0 -label: - en_US: Claude 3.5 Sonnet -model_type: llm -features: - - agent-thought - - vision - - tool-call - - stream-tool-call -model_properties: - mode: chat - context_size: 200000 -# docs: https://docs.aws.amazon.com/bedrock/latest/userguide/model-parameters-anthropic-claude-messages.html -parameter_rules: - - name: max_tokens - use_template: max_tokens - required: true - type: int - default: 4096 - min: 1 - max: 4096 - help: - zh_Hans: 停止前生成的最大令牌数。请注意,Anthropic Claude 模型可能会在达到 max_tokens 的值之前停止生成令牌。不同的 Anthropic Claude 模型对此参数具有不同的最大值。 - en_US: The maximum number of tokens to generate before stopping. Note that Anthropic Claude models might stop generating tokens before reaching the value of max_tokens. Different Anthropic Claude models have different maximum values for this parameter. - - name: temperature - use_template: temperature - required: false - type: float - default: 1 - min: 0.0 - max: 1.0 - help: - zh_Hans: 生成内容的随机性。 - en_US: The amount of randomness injected into the response. - - name: top_p - required: false - type: float - default: 0.999 - min: 0.000 - max: 1.000 - help: - zh_Hans: 在核采样中,Anthropic Claude 按概率递减顺序计算每个后续标记的所有选项的累积分布,并在达到 top_p 指定的特定概率时将其切断。您应该更改温度或top_p,但不能同时更改两者。 - en_US: In nucleus sampling, Anthropic Claude computes the cumulative distribution over all the options for each subsequent token in decreasing probability order and cuts it off once it reaches a particular probability specified by top_p. You should alter either temperature or top_p, but not both. - - name: top_k - required: false - type: int - default: 0 - min: 0 - # tip docs from aws has error, max value is 500 - max: 500 - help: - zh_Hans: 对于每个后续标记,仅从前 K 个选项中进行采样。使用 top_k 删除长尾低概率响应。 - en_US: Only sample from the top K options for each subsequent token. Use top_k to remove long tail low probability responses. - - name: response_format - use_template: response_format -pricing: - input: '0.003' - output: '0.015' - unit: '0.001' - currency: USD diff --git a/api/core/model_runtime/model_providers/bedrock/llm/anthropic.claude-3-sonnet-v1.yaml b/api/core/model_runtime/model_providers/bedrock/llm/anthropic.claude-3-sonnet-v1.yaml deleted file mode 100644 index 962def8011b157..00000000000000 --- a/api/core/model_runtime/model_providers/bedrock/llm/anthropic.claude-3-sonnet-v1.yaml +++ /dev/null @@ -1,60 +0,0 @@ -model: anthropic.claude-3-sonnet-20240229-v1:0 -label: - en_US: Claude 3 Sonnet -model_type: llm -features: - - agent-thought - - vision - - tool-call - - stream-tool-call -model_properties: - mode: chat - context_size: 200000 -# docs: https://docs.aws.amazon.com/bedrock/latest/userguide/model-parameters-anthropic-claude-messages.html -parameter_rules: - - name: max_tokens - use_template: max_tokens - required: true - type: int - default: 4096 - min: 1 - max: 4096 - help: - zh_Hans: 停止前生成的最大令牌数。请注意,Anthropic Claude 模型可能会在达到 max_tokens 的值之前停止生成令牌。不同的 Anthropic Claude 模型对此参数具有不同的最大值。 - en_US: The maximum number of tokens to generate before stopping. Note that Anthropic Claude models might stop generating tokens before reaching the value of max_tokens. Different Anthropic Claude models have different maximum values for this parameter. - - name: temperature - use_template: temperature - required: false - type: float - default: 1 - min: 0.0 - max: 1.0 - help: - zh_Hans: 生成内容的随机性。 - en_US: The amount of randomness injected into the response. - - name: top_p - required: false - type: float - default: 0.999 - min: 0.000 - max: 1.000 - help: - zh_Hans: 在核采样中,Anthropic Claude 按概率递减顺序计算每个后续标记的所有选项的累积分布,并在达到 top_p 指定的特定概率时将其切断。您应该更改温度或top_p,但不能同时更改两者。 - en_US: In nucleus sampling, Anthropic Claude computes the cumulative distribution over all the options for each subsequent token in decreasing probability order and cuts it off once it reaches a particular probability specified by top_p. You should alter either temperature or top_p, but not both. - - name: top_k - required: false - type: int - default: 0 - min: 0 - # tip docs from aws has error, max value is 500 - max: 500 - help: - zh_Hans: 对于每个后续标记,仅从前 K 个选项中进行采样。使用 top_k 删除长尾低概率响应。 - en_US: Only sample from the top K options for each subsequent token. Use top_k to remove long tail low probability responses. - - name: response_format - use_template: response_format -pricing: - input: '0.003' - output: '0.015' - unit: '0.001' - currency: USD diff --git a/api/core/model_runtime/model_providers/bedrock/llm/anthropic.claude-instant-v1.yaml b/api/core/model_runtime/model_providers/bedrock/llm/anthropic.claude-instant-v1.yaml deleted file mode 100644 index 8422f079c5e1b4..00000000000000 --- a/api/core/model_runtime/model_providers/bedrock/llm/anthropic.claude-instant-v1.yaml +++ /dev/null @@ -1,52 +0,0 @@ -model: anthropic.claude-instant-v1 -label: - en_US: Claude Instant 1 -model_type: llm -model_properties: - mode: chat - context_size: 100000 -parameter_rules: - - name: max_tokens - use_template: max_tokens - required: true - type: int - default: 4096 - min: 1 - max: 4096 - help: - zh_Hans: 停止前生成的最大令牌数。请注意,Anthropic Claude 模型可能会在达到 max_tokens 的值之前停止生成令牌。不同的 Anthropic Claude 模型对此参数具有不同的最大值。 - en_US: The maximum number of tokens to generate before stopping. Note that Anthropic Claude models might stop generating tokens before reaching the value of max_tokens. Different Anthropic Claude models have different maximum values for this parameter. - - name: temperature - use_template: temperature - required: false - type: float - default: 1 - min: 0.0 - max: 1.0 - help: - zh_Hans: 生成内容的随机性。 - en_US: The amount of randomness injected into the response. - - name: top_p - required: false - type: float - default: 0.999 - min: 0.000 - max: 1.000 - help: - zh_Hans: 在核采样中,Anthropic Claude 按概率递减顺序计算每个后续标记的所有选项的累积分布,并在达到 top_p 指定的特定概率时将其切断。您应该更改温度或top_p,但不能同时更改两者。 - en_US: In nucleus sampling, Anthropic Claude computes the cumulative distribution over all the options for each subsequent token in decreasing probability order and cuts it off once it reaches a particular probability specified by top_p. You should alter either temperature or top_p, but not both. - - name: top_k - required: false - type: int - default: 0 - min: 0 - # tip docs from aws has error, max value is 500 - max: 500 - help: - zh_Hans: 对于每个后续标记,仅从前 K 个选项中进行采样。使用 top_k 删除长尾低概率响应。 - en_US: Only sample from the top K options for each subsequent token. Use top_k to remove long tail low probability responses. -pricing: - input: '0.0008' - output: '0.0024' - unit: '0.001' - currency: USD diff --git a/api/core/model_runtime/model_providers/bedrock/llm/anthropic.claude-v1.yaml b/api/core/model_runtime/model_providers/bedrock/llm/anthropic.claude-v1.yaml deleted file mode 100644 index 6a714b1055b2a9..00000000000000 --- a/api/core/model_runtime/model_providers/bedrock/llm/anthropic.claude-v1.yaml +++ /dev/null @@ -1,53 +0,0 @@ -model: anthropic.claude-v1 -label: - en_US: Claude 1 -model_type: llm -model_properties: - mode: chat - context_size: 100000 -parameter_rules: - - name: max_tokens - use_template: max_tokens - required: true - type: int - default: 4096 - min: 1 - max: 4096 - help: - zh_Hans: 停止前生成的最大令牌数。请注意,Anthropic Claude 模型可能会在达到 max_tokens 的值之前停止生成令牌。不同的 Anthropic Claude 模型对此参数具有不同的最大值。 - en_US: The maximum number of tokens to generate before stopping. Note that Anthropic Claude models might stop generating tokens before reaching the value of max_tokens. Different Anthropic Claude models have different maximum values for this parameter. - - name: temperature - use_template: temperature - required: false - type: float - default: 1 - min: 0.0 - max: 1.0 - help: - zh_Hans: 生成内容的随机性。 - en_US: The amount of randomness injected into the response. - - name: top_p - required: false - type: float - default: 0.999 - min: 0.000 - max: 1.000 - help: - zh_Hans: 在核采样中,Anthropic Claude 按概率递减顺序计算每个后续标记的所有选项的累积分布,并在达到 top_p 指定的特定概率时将其切断。您应该更改温度或top_p,但不能同时更改两者。 - en_US: In nucleus sampling, Anthropic Claude computes the cumulative distribution over all the options for each subsequent token in decreasing probability order and cuts it off once it reaches a particular probability specified by top_p. You should alter either temperature or top_p, but not both. - - name: top_k - required: false - type: int - default: 0 - min: 0 - # tip docs from aws has error, max value is 500 - max: 500 - help: - zh_Hans: 对于每个后续标记,仅从前 K 个选项中进行采样。使用 top_k 删除长尾低概率响应。 - en_US: Only sample from the top K options for each subsequent token. Use top_k to remove long tail low probability responses. -pricing: - input: '0.008' - output: '0.024' - unit: '0.001' - currency: USD -deprecated: true diff --git a/api/core/model_runtime/model_providers/bedrock/llm/anthropic.claude-v2.1.yaml b/api/core/model_runtime/model_providers/bedrock/llm/anthropic.claude-v2.1.yaml deleted file mode 100644 index 70294e4ad3ffde..00000000000000 --- a/api/core/model_runtime/model_providers/bedrock/llm/anthropic.claude-v2.1.yaml +++ /dev/null @@ -1,54 +0,0 @@ -model: anthropic.claude-v2:1 -label: - en_US: Claude 2.1 -model_type: llm -model_properties: - mode: chat - context_size: 200000 -parameter_rules: - - name: max_tokens - use_template: max_tokens - required: true - type: int - default: 4096 - min: 1 - max: 4096 - help: - zh_Hans: 停止前生成的最大令牌数。请注意,Anthropic Claude 模型可能会在达到 max_tokens 的值之前停止生成令牌。不同的 Anthropic Claude 模型对此参数具有不同的最大值。 - en_US: The maximum number of tokens to generate before stopping. Note that Anthropic Claude models might stop generating tokens before reaching the value of max_tokens. Different Anthropic Claude models have different maximum values for this parameter. - - name: temperature - use_template: temperature - required: false - type: float - default: 1 - min: 0.0 - max: 1.0 - help: - zh_Hans: 生成内容的随机性。 - en_US: The amount of randomness injected into the response. - - name: top_p - required: false - type: float - default: 0.999 - min: 0.000 - max: 1.000 - help: - zh_Hans: 在核采样中,Anthropic Claude 按概率递减顺序计算每个后续标记的所有选项的累积分布,并在达到 top_p 指定的特定概率时将其切断。您应该更改温度或top_p,但不能同时更改两者。 - en_US: In nucleus sampling, Anthropic Claude computes the cumulative distribution over all the options for each subsequent token in decreasing probability order and cuts it off once it reaches a particular probability specified by top_p. You should alter either temperature or top_p, but not both. - - name: top_k - required: false - type: int - default: 0 - min: 0 - # tip docs from aws has error, max value is 500 - max: 500 - help: - zh_Hans: 对于每个后续标记,仅从前 K 个选项中进行采样。使用 top_k 删除长尾低概率响应。 - en_US: Only sample from the top K options for each subsequent token. Use top_k to remove long tail low probability responses. - - name: response_format - use_template: response_format -pricing: - input: '0.008' - output: '0.024' - unit: '0.001' - currency: USD diff --git a/api/core/model_runtime/model_providers/bedrock/llm/anthropic.claude-v2.yaml b/api/core/model_runtime/model_providers/bedrock/llm/anthropic.claude-v2.yaml deleted file mode 100644 index 0a8ea61b6df0c8..00000000000000 --- a/api/core/model_runtime/model_providers/bedrock/llm/anthropic.claude-v2.yaml +++ /dev/null @@ -1,54 +0,0 @@ -model: anthropic.claude-v2 -label: - en_US: Claude 2 -model_type: llm -model_properties: - mode: chat - context_size: 100000 -parameter_rules: - - name: max_tokens - use_template: max_tokens - required: true - type: int - default: 4096 - min: 1 - max: 4096 - help: - zh_Hans: 停止前生成的最大令牌数。请注意,Anthropic Claude 模型可能会在达到 max_tokens 的值之前停止生成令牌。不同的 Anthropic Claude 模型对此参数具有不同的最大值。 - en_US: The maximum number of tokens to generate before stopping. Note that Anthropic Claude models might stop generating tokens before reaching the value of max_tokens. Different Anthropic Claude models have different maximum values for this parameter. - - name: temperature - use_template: temperature - required: false - type: float - default: 1 - min: 0.0 - max: 1.0 - help: - zh_Hans: 生成内容的随机性。 - en_US: The amount of randomness injected into the response. - - name: top_p - required: false - type: float - default: 0.999 - min: 0.000 - max: 1.000 - help: - zh_Hans: 在核采样中,Anthropic Claude 按概率递减顺序计算每个后续标记的所有选项的累积分布,并在达到 top_p 指定的特定概率时将其切断。您应该更改温度或top_p,但不能同时更改两者。 - en_US: In nucleus sampling, Anthropic Claude computes the cumulative distribution over all the options for each subsequent token in decreasing probability order and cuts it off once it reaches a particular probability specified by top_p. You should alter either temperature or top_p, but not both. - - name: top_k - required: false - type: int - default: 0 - min: 0 - # tip docs from aws has error, max value is 500 - max: 500 - help: - zh_Hans: 对于每个后续标记,仅从前 K 个选项中进行采样。使用 top_k 删除长尾低概率响应。 - en_US: Only sample from the top K options for each subsequent token. Use top_k to remove long tail low probability responses. - - name: response_format - use_template: response_format -pricing: - input: '0.008' - output: '0.024' - unit: '0.001' - currency: USD diff --git a/api/core/model_runtime/model_providers/bedrock/llm/cohere.command-light-text-v14.yaml b/api/core/model_runtime/model_providers/bedrock/llm/cohere.command-light-text-v14.yaml deleted file mode 100644 index 74500095511f1c..00000000000000 --- a/api/core/model_runtime/model_providers/bedrock/llm/cohere.command-light-text-v14.yaml +++ /dev/null @@ -1,35 +0,0 @@ -model: cohere.command-light-text-v14 -label: - en_US: Command Light Text V14 -model_type: llm -model_properties: - mode: completion - context_size: 4096 -parameter_rules: - - name: temperature - use_template: temperature - - name: p - use_template: top_p - - name: k - label: - zh_Hans: 取样数量 - en_US: Top k - type: int - help: - zh_Hans: 仅从每个后续标记的前 K 个选项中采样。 - en_US: Only sample from the top K options for each subsequent token. - required: false - min: 0 - max: 500 - default: 0 - - name: max_tokens - use_template: max_tokens - required: true - default: 4096 - min: 1 - max: 4096 -pricing: - input: '0.0003' - output: '0.0006' - unit: '0.001' - currency: USD diff --git a/api/core/model_runtime/model_providers/bedrock/llm/cohere.command-r-plus-v1.0.yaml b/api/core/model_runtime/model_providers/bedrock/llm/cohere.command-r-plus-v1.0.yaml deleted file mode 100644 index 3c0bb4e8d58f04..00000000000000 --- a/api/core/model_runtime/model_providers/bedrock/llm/cohere.command-r-plus-v1.0.yaml +++ /dev/null @@ -1,44 +0,0 @@ -model: cohere.command-r-plus-v1:0 -label: - en_US: Command R+ -model_type: llm -features: - - tool-call - #- stream-tool-call -model_properties: - mode: chat - context_size: 128000 -parameter_rules: - - name: temperature - use_template: temperature - max: 5.0 - - name: p - use_template: top_p - default: 0.75 - min: 0.01 - max: 0.99 - - name: k - label: - zh_Hans: 取样数量 - en_US: Top k - type: int - help: - zh_Hans: 仅从每个后续标记的前 K 个选项中采样。 - en_US: Only sample from the top K options for each subsequent token. - required: false - default: 0 - min: 0 - max: 500 - - name: presence_penalty - use_template: presence_penalty - - name: frequency_penalty - use_template: frequency_penalty - - name: max_tokens - use_template: max_tokens - default: 1024 - max: 4096 -pricing: - input: '3' - output: '15' - unit: '0.000001' - currency: USD diff --git a/api/core/model_runtime/model_providers/bedrock/llm/cohere.command-r-v1.0.yaml b/api/core/model_runtime/model_providers/bedrock/llm/cohere.command-r-v1.0.yaml deleted file mode 100644 index a34f48319f2ac0..00000000000000 --- a/api/core/model_runtime/model_providers/bedrock/llm/cohere.command-r-v1.0.yaml +++ /dev/null @@ -1,43 +0,0 @@ -model: cohere.command-r-v1:0 -label: - en_US: Command R -model_type: llm -features: - - tool-call -model_properties: - mode: chat - context_size: 128000 -parameter_rules: - - name: temperature - use_template: temperature - max: 5.0 - - name: p - use_template: top_p - default: 0.75 - min: 0.01 - max: 0.99 - - name: k - label: - zh_Hans: 取样数量 - en_US: Top k - type: int - help: - zh_Hans: 仅从每个后续标记的前 K 个选项中采样。 - en_US: Only sample from the top K options for each subsequent token. - required: false - default: 0 - min: 0 - max: 500 - - name: presence_penalty - use_template: presence_penalty - - name: frequency_penalty - use_template: frequency_penalty - - name: max_tokens - use_template: max_tokens - default: 1024 - max: 4096 -pricing: - input: '0.5' - output: '1.5' - unit: '0.000001' - currency: USD diff --git a/api/core/model_runtime/model_providers/bedrock/llm/cohere.command-text-v14.yaml b/api/core/model_runtime/model_providers/bedrock/llm/cohere.command-text-v14.yaml deleted file mode 100644 index 6aea5be170c6ee..00000000000000 --- a/api/core/model_runtime/model_providers/bedrock/llm/cohere.command-text-v14.yaml +++ /dev/null @@ -1,32 +0,0 @@ -model: cohere.command-text-v14 -label: - en_US: Command Text V14 -model_type: llm -model_properties: - mode: completion - context_size: 4096 -parameter_rules: - - name: temperature - use_template: temperature - - name: p - use_template: top_p - - name: k - label: - zh_Hans: 取样数量 - en_US: Top k - type: int - help: - zh_Hans: 仅从每个后续标记的前 K 个选项中采样。 - en_US: Only sample from the top K options for each subsequent token. - required: false - - name: max_tokens - use_template: max_tokens - required: true - default: 4096 - min: 1 - max: 4096 -pricing: - input: '0.0015' - output: '0.0020' - unit: '0.001' - currency: USD diff --git a/api/core/model_runtime/model_providers/bedrock/llm/eu.anthropic.claude-3-haiku-v1.yaml b/api/core/model_runtime/model_providers/bedrock/llm/eu.anthropic.claude-3-haiku-v1.yaml deleted file mode 100644 index 24a65ef1bb84ab..00000000000000 --- a/api/core/model_runtime/model_providers/bedrock/llm/eu.anthropic.claude-3-haiku-v1.yaml +++ /dev/null @@ -1,59 +0,0 @@ -model: eu.anthropic.claude-3-haiku-20240307-v1:0 -label: - en_US: Claude 3 Haiku(EU.Cross Region Inference) -model_type: llm -features: - - agent-thought - - vision - - tool-call - - stream-tool-call -model_properties: - mode: chat - context_size: 200000 -# docs: https://docs.aws.amazon.com/bedrock/latest/userguide/model-parameters-anthropic-claude-messages.html -parameter_rules: - - name: max_tokens - use_template: max_tokens - required: true - type: int - default: 4096 - min: 1 - max: 4096 - help: - zh_Hans: 停止前生成的最大令牌数。请注意,Anthropic Claude 模型可能会在达到 max_tokens 的值之前停止生成令牌。不同的 Anthropic Claude 模型对此参数具有不同的最大值。 - en_US: The maximum number of tokens to generate before stopping. Note that Anthropic Claude models might stop generating tokens before reaching the value of max_tokens. Different Anthropic Claude models have different maximum values for this parameter. - # docs: https://docs.anthropic.com/claude/docs/system-prompts - - name: temperature - use_template: temperature - required: false - type: float - default: 1 - min: 0.0 - max: 1.0 - help: - zh_Hans: 生成内容的随机性。 - en_US: The amount of randomness injected into the response. - - name: top_p - required: false - type: float - default: 0.999 - min: 0.000 - max: 1.000 - help: - zh_Hans: 在核采样中,Anthropic Claude 按概率递减顺序计算每个后续标记的所有选项的累积分布,并在达到 top_p 指定的特定概率时将其切断。您应该更改温度或top_p,但不能同时更改两者。 - en_US: In nucleus sampling, Anthropic Claude computes the cumulative distribution over all the options for each subsequent token in decreasing probability order and cuts it off once it reaches a particular probability specified by top_p. You should alter either temperature or top_p, but not both. - - name: top_k - required: false - type: int - default: 0 - min: 0 - # tip docs from aws has error, max value is 500 - max: 500 - help: - zh_Hans: 对于每个后续标记,仅从前 K 个选项中进行采样。使用 top_k 删除长尾低概率响应。 - en_US: Only sample from the top K options for each subsequent token. Use top_k to remove long tail low probability responses. -pricing: - input: '0.00025' - output: '0.00125' - unit: '0.001' - currency: USD diff --git a/api/core/model_runtime/model_providers/bedrock/llm/eu.anthropic.claude-3-sonnet-v1.5.yaml b/api/core/model_runtime/model_providers/bedrock/llm/eu.anthropic.claude-3-sonnet-v1.5.yaml deleted file mode 100644 index e3d25c7d8f6a68..00000000000000 --- a/api/core/model_runtime/model_providers/bedrock/llm/eu.anthropic.claude-3-sonnet-v1.5.yaml +++ /dev/null @@ -1,58 +0,0 @@ -model: eu.anthropic.claude-3-5-sonnet-20240620-v1:0 -label: - en_US: Claude 3.5 Sonnet(EU.Cross Region Inference) -model_type: llm -features: - - agent-thought - - vision - - tool-call - - stream-tool-call -model_properties: - mode: chat - context_size: 200000 -# docs: https://docs.aws.amazon.com/bedrock/latest/userguide/model-parameters-anthropic-claude-messages.html -parameter_rules: - - name: max_tokens - use_template: max_tokens - required: true - type: int - default: 4096 - min: 1 - max: 4096 - help: - zh_Hans: 停止前生成的最大令牌数。请注意,Anthropic Claude 模型可能会在达到 max_tokens 的值之前停止生成令牌。不同的 Anthropic Claude 模型对此参数具有不同的最大值。 - en_US: The maximum number of tokens to generate before stopping. Note that Anthropic Claude models might stop generating tokens before reaching the value of max_tokens. Different Anthropic Claude models have different maximum values for this parameter. - - name: temperature - use_template: temperature - required: false - type: float - default: 1 - min: 0.0 - max: 1.0 - help: - zh_Hans: 生成内容的随机性。 - en_US: The amount of randomness injected into the response. - - name: top_p - required: false - type: float - default: 0.999 - min: 0.000 - max: 1.000 - help: - zh_Hans: 在核采样中,Anthropic Claude 按概率递减顺序计算每个后续标记的所有选项的累积分布,并在达到 top_p 指定的特定概率时将其切断。您应该更改温度或top_p,但不能同时更改两者。 - en_US: In nucleus sampling, Anthropic Claude computes the cumulative distribution over all the options for each subsequent token in decreasing probability order and cuts it off once it reaches a particular probability specified by top_p. You should alter either temperature or top_p, but not both. - - name: top_k - required: false - type: int - default: 0 - min: 0 - # tip docs from aws has error, max value is 500 - max: 500 - help: - zh_Hans: 对于每个后续标记,仅从前 K 个选项中进行采样。使用 top_k 删除长尾低概率响应。 - en_US: Only sample from the top K options for each subsequent token. Use top_k to remove long tail low probability responses. -pricing: - input: '0.003' - output: '0.015' - unit: '0.001' - currency: USD diff --git a/api/core/model_runtime/model_providers/bedrock/llm/eu.anthropic.claude-3-sonnet-v1.yaml b/api/core/model_runtime/model_providers/bedrock/llm/eu.anthropic.claude-3-sonnet-v1.yaml deleted file mode 100644 index 9a06a4ad6d5162..00000000000000 --- a/api/core/model_runtime/model_providers/bedrock/llm/eu.anthropic.claude-3-sonnet-v1.yaml +++ /dev/null @@ -1,58 +0,0 @@ -model: eu.anthropic.claude-3-sonnet-20240229-v1:0 -label: - en_US: Claude 3 Sonnet(EU.Cross Region Inference) -model_type: llm -features: - - agent-thought - - vision - - tool-call - - stream-tool-call -model_properties: - mode: chat - context_size: 200000 -# docs: https://docs.aws.amazon.com/bedrock/latest/userguide/model-parameters-anthropic-claude-messages.html -parameter_rules: - - name: max_tokens - use_template: max_tokens - required: true - type: int - default: 4096 - min: 1 - max: 4096 - help: - zh_Hans: 停止前生成的最大令牌数。请注意,Anthropic Claude 模型可能会在达到 max_tokens 的值之前停止生成令牌。不同的 Anthropic Claude 模型对此参数具有不同的最大值。 - en_US: The maximum number of tokens to generate before stopping. Note that Anthropic Claude models might stop generating tokens before reaching the value of max_tokens. Different Anthropic Claude models have different maximum values for this parameter. - - name: temperature - use_template: temperature - required: false - type: float - default: 1 - min: 0.0 - max: 1.0 - help: - zh_Hans: 生成内容的随机性。 - en_US: The amount of randomness injected into the response. - - name: top_p - required: false - type: float - default: 0.999 - min: 0.000 - max: 1.000 - help: - zh_Hans: 在核采样中,Anthropic Claude 按概率递减顺序计算每个后续标记的所有选项的累积分布,并在达到 top_p 指定的特定概率时将其切断。您应该更改温度或top_p,但不能同时更改两者。 - en_US: In nucleus sampling, Anthropic Claude computes the cumulative distribution over all the options for each subsequent token in decreasing probability order and cuts it off once it reaches a particular probability specified by top_p. You should alter either temperature or top_p, but not both. - - name: top_k - required: false - type: int - default: 0 - min: 0 - # tip docs from aws has error, max value is 500 - max: 500 - help: - zh_Hans: 对于每个后续标记,仅从前 K 个选项中进行采样。使用 top_k 删除长尾低概率响应。 - en_US: Only sample from the top K options for each subsequent token. Use top_k to remove long tail low probability responses. -pricing: - input: '0.003' - output: '0.015' - unit: '0.001' - currency: USD diff --git a/api/core/model_runtime/model_providers/bedrock/llm/llm.py b/api/core/model_runtime/model_providers/bedrock/llm/llm.py deleted file mode 100644 index 77bab0c2945887..00000000000000 --- a/api/core/model_runtime/model_providers/bedrock/llm/llm.py +++ /dev/null @@ -1,903 +0,0 @@ -# standard import -import base64 -import json -import logging -import mimetypes -from collections.abc import Generator -from typing import Optional, Union, cast - -# 3rd import -import boto3 -import requests -from botocore.config import Config -from botocore.exceptions import ( - ClientError, - EndpointConnectionError, - NoRegionError, - ServiceNotInRegionError, - UnknownServiceError, -) - -# local import -from core.model_runtime.callbacks.base_callback import Callback -from core.model_runtime.entities.llm_entities import LLMResult, LLMResultChunk, LLMResultChunkDelta -from core.model_runtime.entities.message_entities import ( - AssistantPromptMessage, - ImagePromptMessageContent, - PromptMessage, - PromptMessageContentType, - PromptMessageTool, - SystemPromptMessage, - TextPromptMessageContent, - ToolPromptMessage, - UserPromptMessage, -) -from core.model_runtime.errors.invoke import ( - InvokeAuthorizationError, - InvokeBadRequestError, - InvokeConnectionError, - InvokeError, - InvokeRateLimitError, - InvokeServerUnavailableError, -) -from core.model_runtime.errors.validate import CredentialsValidateFailedError -from core.model_runtime.model_providers.__base.large_language_model import LargeLanguageModel - -logger = logging.getLogger(__name__) -ANTHROPIC_BLOCK_MODE_PROMPT = """You should always follow the instructions and output a valid {{block}} object. -The structure of the {{block}} object you can found in the instructions, use {"answer": "$your_answer"} as the default structure -if you are not sure about the structure. - - -{{instructions}} - -""" # noqa: E501 - - -class BedrockLargeLanguageModel(LargeLanguageModel): - # please refer to the documentation: https://docs.aws.amazon.com/bedrock/latest/userguide/conversation-inference.html - # TODO There is invoke issue: context limit on Cohere Model, will add them after fixed. - CONVERSE_API_ENABLED_MODEL_INFO = [ - {"prefix": "anthropic.claude-v2", "support_system_prompts": True, "support_tool_use": False}, - {"prefix": "anthropic.claude-v1", "support_system_prompts": True, "support_tool_use": False}, - {"prefix": "us.anthropic.claude-3", "support_system_prompts": True, "support_tool_use": True}, - {"prefix": "eu.anthropic.claude-3", "support_system_prompts": True, "support_tool_use": True}, - {"prefix": "anthropic.claude-3", "support_system_prompts": True, "support_tool_use": True}, - {"prefix": "meta.llama", "support_system_prompts": True, "support_tool_use": False}, - {"prefix": "mistral.mistral-7b-instruct", "support_system_prompts": False, "support_tool_use": False}, - {"prefix": "mistral.mixtral-8x7b-instruct", "support_system_prompts": False, "support_tool_use": False}, - {"prefix": "mistral.mistral-large", "support_system_prompts": True, "support_tool_use": True}, - {"prefix": "mistral.mistral-small", "support_system_prompts": True, "support_tool_use": True}, - {"prefix": "cohere.command-r", "support_system_prompts": True, "support_tool_use": True}, - {"prefix": "amazon.titan", "support_system_prompts": False, "support_tool_use": False}, - ] - - @staticmethod - def _find_model_info(model_id): - for model in BedrockLargeLanguageModel.CONVERSE_API_ENABLED_MODEL_INFO: - if model_id.startswith(model["prefix"]): - return model - logger.info(f"current model id: {model_id} did not support by Converse API") - return None - - def _code_block_mode_wrapper( - self, - model: str, - credentials: dict, - prompt_messages: list[PromptMessage], - model_parameters: dict, - tools: Optional[list[PromptMessageTool]] = None, - stop: Optional[list[str]] = None, - stream: bool = True, - user: Optional[str] = None, - callbacks: list[Callback] = None, - ) -> Union[LLMResult, Generator]: - """ - Code block mode wrapper for invoking large language model - """ - if model_parameters.get("response_format"): - stop = stop or [] - if "```\n" not in stop: - stop.append("```\n") - if "\n```" not in stop: - stop.append("\n```") - response_format = model_parameters.pop("response_format") - format_prompt = SystemPromptMessage( - content=ANTHROPIC_BLOCK_MODE_PROMPT.replace("{{instructions}}", prompt_messages[0].content).replace( - "{{block}}", response_format - ) - ) - if len(prompt_messages) > 0 and isinstance(prompt_messages[0], SystemPromptMessage): - prompt_messages[0] = format_prompt - else: - prompt_messages.insert(0, format_prompt) - prompt_messages.append(AssistantPromptMessage(content=f"\n```{response_format}")) - return self._invoke(model, credentials, prompt_messages, model_parameters, tools, stop, stream, user) - - def _invoke( - self, - model: str, - credentials: dict, - prompt_messages: list[PromptMessage], - model_parameters: dict, - tools: Optional[list[PromptMessageTool]] = None, - stop: Optional[list[str]] = None, - stream: bool = True, - user: Optional[str] = None, - ) -> Union[LLMResult, Generator]: - """ - Invoke large language model - - :param model: model name - :param credentials: model credentials - :param prompt_messages: prompt messages - :param model_parameters: model parameters - :param tools: tools for tool calling - :param stop: stop words - :param stream: is stream response - :param user: unique user id - :return: full response or stream response chunk generator result - """ - - model_info = BedrockLargeLanguageModel._find_model_info(model) - if model_info: - model_info["model"] = model - # invoke models via boto3 converse API - return self._generate_with_converse( - model_info, credentials, prompt_messages, model_parameters, stop, stream, user, tools - ) - # invoke other models via boto3 client - return self._generate(model, credentials, prompt_messages, model_parameters, stop, stream, user) - - def _generate_with_converse( - self, - model_info: dict, - credentials: dict, - prompt_messages: list[PromptMessage], - model_parameters: dict, - stop: Optional[list[str]] = None, - stream: bool = True, - user: Optional[str] = None, - tools: Optional[list[PromptMessageTool]] = None, - ) -> Union[LLMResult, Generator]: - """ - Invoke large language model with converse API - - :param model_info: model information - :param credentials: model credentials - :param prompt_messages: prompt messages - :param model_parameters: model parameters - :param stop: stop words - :param stream: is stream response - :return: full response or stream response chunk generator result - """ - bedrock_client = boto3.client( - service_name="bedrock-runtime", - aws_access_key_id=credentials.get("aws_access_key_id"), - aws_secret_access_key=credentials.get("aws_secret_access_key"), - region_name=credentials["aws_region"], - ) - - system, prompt_message_dicts = self._convert_converse_prompt_messages(prompt_messages) - inference_config, additional_model_fields = self._convert_converse_api_model_parameters(model_parameters, stop) - - parameters = { - "modelId": model_info["model"], - "messages": prompt_message_dicts, - "inferenceConfig": inference_config, - "additionalModelRequestFields": additional_model_fields, - } - - if model_info["support_system_prompts"] and system and len(system) > 0: - parameters["system"] = system - - if model_info["support_tool_use"] and tools: - parameters["toolConfig"] = self._convert_converse_tool_config(tools=tools) - try: - if stream: - response = bedrock_client.converse_stream(**parameters) - return self._handle_converse_stream_response( - model_info["model"], credentials, response, prompt_messages - ) - else: - response = bedrock_client.converse(**parameters) - return self._handle_converse_response(model_info["model"], credentials, response, prompt_messages) - except ClientError as ex: - error_code = ex.response["Error"]["Code"] - full_error_msg = f"{error_code}: {ex.response['Error']['Message']}" - raise self._map_client_to_invoke_error(error_code, full_error_msg) - except (EndpointConnectionError, NoRegionError, ServiceNotInRegionError) as ex: - raise InvokeConnectionError(str(ex)) - - except UnknownServiceError as ex: - raise InvokeServerUnavailableError(str(ex)) - - except Exception as ex: - raise InvokeError(str(ex)) - - def _handle_converse_response( - self, model: str, credentials: dict, response: dict, prompt_messages: list[PromptMessage] - ) -> LLMResult: - """ - Handle llm chat response - - :param model: model name - :param credentials: credentials - :param response: response - :param prompt_messages: prompt messages - :return: full response chunk generator result - """ - response_content = response["output"]["message"]["content"] - # transform assistant message to prompt message - if response["stopReason"] == "tool_use": - tool_calls = [] - text, tool_use = self._extract_tool_use(response_content) - - tool_call = AssistantPromptMessage.ToolCall( - id=tool_use["toolUseId"], - type="function", - function=AssistantPromptMessage.ToolCall.ToolCallFunction( - name=tool_use["name"], arguments=json.dumps(tool_use["input"]) - ), - ) - tool_calls.append(tool_call) - - assistant_prompt_message = AssistantPromptMessage(content=text, tool_calls=tool_calls) - else: - assistant_prompt_message = AssistantPromptMessage(content=response_content[0]["text"]) - - # calculate num tokens - if response["usage"]: - # transform usage - prompt_tokens = response["usage"]["inputTokens"] - completion_tokens = response["usage"]["outputTokens"] - else: - # calculate num tokens - prompt_tokens = self.get_num_tokens(model, credentials, prompt_messages) - completion_tokens = self.get_num_tokens(model, credentials, [assistant_prompt_message]) - - # transform usage - usage = self._calc_response_usage(model, credentials, prompt_tokens, completion_tokens) - - result = LLMResult( - model=model, - prompt_messages=prompt_messages, - message=assistant_prompt_message, - usage=usage, - ) - return result - - def _extract_tool_use(self, content: dict) -> tuple[str, dict]: - tool_use = {} - text = "" - for item in content: - if "toolUse" in item: - tool_use = item["toolUse"] - elif "text" in item: - text = item["text"] - else: - raise ValueError(f"Got unknown item: {item}") - return text, tool_use - - def _handle_converse_stream_response( - self, - model: str, - credentials: dict, - response: dict, - prompt_messages: list[PromptMessage], - ) -> Generator: - """ - Handle llm chat stream response - - :param model: model name - :param credentials: credentials - :param response: response - :param prompt_messages: prompt messages - :return: full response or stream response chunk generator result - """ - - try: - full_assistant_content = "" - return_model = None - input_tokens = 0 - output_tokens = 0 - finish_reason = None - index = 0 - tool_calls: list[AssistantPromptMessage.ToolCall] = [] - tool_use = {} - - for chunk in response["stream"]: - if "messageStart" in chunk: - return_model = model - elif "messageStop" in chunk: - finish_reason = chunk["messageStop"]["stopReason"] - elif "contentBlockStart" in chunk: - tool = chunk["contentBlockStart"]["start"]["toolUse"] - tool_use["toolUseId"] = tool["toolUseId"] - tool_use["name"] = tool["name"] - elif "metadata" in chunk: - input_tokens = chunk["metadata"]["usage"]["inputTokens"] - output_tokens = chunk["metadata"]["usage"]["outputTokens"] - usage = self._calc_response_usage(model, credentials, input_tokens, output_tokens) - yield LLMResultChunk( - model=return_model, - prompt_messages=prompt_messages, - delta=LLMResultChunkDelta( - index=index, - message=AssistantPromptMessage(content="", tool_calls=tool_calls), - finish_reason=finish_reason, - usage=usage, - ), - ) - elif "contentBlockDelta" in chunk: - delta = chunk["contentBlockDelta"]["delta"] - if "text" in delta: - chunk_text = delta["text"] or "" - full_assistant_content += chunk_text - assistant_prompt_message = AssistantPromptMessage( - content=chunk_text or "", - ) - index = chunk["contentBlockDelta"]["contentBlockIndex"] - yield LLMResultChunk( - model=model, - prompt_messages=prompt_messages, - delta=LLMResultChunkDelta( - index=index + 1, - message=assistant_prompt_message, - ), - ) - elif "toolUse" in delta: - if "input" not in tool_use: - tool_use["input"] = "" - tool_use["input"] += delta["toolUse"]["input"] - elif "contentBlockStop" in chunk: - if "input" in tool_use: - tool_call = AssistantPromptMessage.ToolCall( - id=tool_use["toolUseId"], - type="function", - function=AssistantPromptMessage.ToolCall.ToolCallFunction( - name=tool_use["name"], arguments=tool_use["input"] - ), - ) - tool_calls.append(tool_call) - tool_use = {} - - except Exception as ex: - raise InvokeError(str(ex)) - - def _convert_converse_api_model_parameters( - self, model_parameters: dict, stop: Optional[list[str]] = None - ) -> tuple[dict, dict]: - inference_config = {} - additional_model_fields = {} - if "max_tokens" in model_parameters: - inference_config["maxTokens"] = model_parameters["max_tokens"] - - if "temperature" in model_parameters: - inference_config["temperature"] = model_parameters["temperature"] - - if "top_p" in model_parameters: - inference_config["topP"] = model_parameters["temperature"] - - if stop: - inference_config["stopSequences"] = stop - - if "top_k" in model_parameters: - additional_model_fields["top_k"] = model_parameters["top_k"] - - return inference_config, additional_model_fields - - def _convert_converse_prompt_messages(self, prompt_messages: list[PromptMessage]) -> tuple[str, list[dict]]: - """ - Convert prompt messages to dict list and system - """ - - system = [] - prompt_message_dicts = [] - for message in prompt_messages: - if isinstance(message, SystemPromptMessage): - message.content = message.content.strip() - system.append({"text": message.content}) - else: - prompt_message_dicts.append(self._convert_prompt_message_to_dict(message)) - - return system, prompt_message_dicts - - def _convert_converse_tool_config(self, tools: Optional[list[PromptMessageTool]] = None) -> dict: - tool_config = {} - configs = [] - if tools: - for tool in tools: - configs.append( - { - "toolSpec": { - "name": tool.name, - "description": tool.description, - "inputSchema": {"json": tool.parameters}, - } - } - ) - tool_config["tools"] = configs - return tool_config - - def _convert_prompt_message_to_dict(self, message: PromptMessage) -> dict: - """ - Convert PromptMessage to dict - """ - if isinstance(message, UserPromptMessage): - message = cast(UserPromptMessage, message) - if isinstance(message.content, str): - message_dict = {"role": "user", "content": [{"text": message.content}]} - else: - sub_messages = [] - for message_content in message.content: - if message_content.type == PromptMessageContentType.TEXT: - message_content = cast(TextPromptMessageContent, message_content) - sub_message_dict = {"text": message_content.data} - sub_messages.append(sub_message_dict) - elif message_content.type == PromptMessageContentType.IMAGE: - message_content = cast(ImagePromptMessageContent, message_content) - if not message_content.data.startswith("data:"): - # fetch image data from url - try: - url = message_content.data - image_content = requests.get(url).content - if "?" in url: - url = url.split("?")[0] - mime_type, _ = mimetypes.guess_type(url) - base64_data = base64.b64encode(image_content).decode("utf-8") - except Exception as ex: - raise ValueError(f"Failed to fetch image data from url {message_content.data}, {ex}") - else: - data_split = message_content.data.split(";base64,") - mime_type = data_split[0].replace("data:", "") - base64_data = data_split[1] - image_content = base64.b64decode(base64_data) - - if mime_type not in {"image/jpeg", "image/png", "image/gif", "image/webp"}: - raise ValueError( - f"Unsupported image type {mime_type}, " - f"only support image/jpeg, image/png, image/gif, and image/webp" - ) - - sub_message_dict = { - "image": {"format": mime_type.replace("image/", ""), "source": {"bytes": image_content}} - } - sub_messages.append(sub_message_dict) - - message_dict = {"role": "user", "content": sub_messages} - elif isinstance(message, AssistantPromptMessage): - message = cast(AssistantPromptMessage, message) - if message.tool_calls: - message_dict = { - "role": "assistant", - "content": [ - { - "toolUse": { - "toolUseId": message.tool_calls[0].id, - "name": message.tool_calls[0].function.name, - "input": json.loads(message.tool_calls[0].function.arguments), - } - } - ], - } - else: - message_dict = {"role": "assistant", "content": [{"text": message.content}]} - elif isinstance(message, SystemPromptMessage): - message = cast(SystemPromptMessage, message) - message_dict = [{"text": message.content}] - elif isinstance(message, ToolPromptMessage): - message = cast(ToolPromptMessage, message) - message_dict = { - "role": "user", - "content": [ - { - "toolResult": { - "toolUseId": message.tool_call_id, - "content": [{"json": {"text": message.content}}], - } - } - ], - } - else: - raise ValueError(f"Got unknown type {message}") - return message_dict - - def get_num_tokens( - self, - model: str, - credentials: dict, - prompt_messages: list[PromptMessage] | str, - tools: Optional[list[PromptMessageTool]] = None, - ) -> int: - """ - Get number of tokens for given prompt messages - - :param model: model name - :param credentials: model credentials - :param prompt_messages: prompt messages or message string - :param tools: tools for tool calling - :return:md = genai.GenerativeModel(model) - """ - prefix = model.split(".")[0] - model_name = model.split(".")[1] - - if isinstance(prompt_messages, str): - prompt = prompt_messages - else: - prompt = self._convert_messages_to_prompt(prompt_messages, prefix, model_name) - - return self._get_num_tokens_by_gpt2(prompt) - - def validate_credentials(self, model: str, credentials: dict) -> None: - """ - Validate model credentials - - :param model: model name - :param credentials: model credentials - :return: - """ - required_params = {} - if "anthropic" in model: - required_params = { - "max_tokens": 32, - } - elif "ai21" in model: - # ValidationException: Malformed input request: #/temperature: expected type: Number, - # found: Null#/maxTokens: expected type: Integer, found: Null#/topP: expected type: Number, found: Null, - # please reformat your input and try again. - required_params = { - "temperature": 0.7, - "topP": 0.9, - "maxTokens": 32, - } - - try: - ping_message = UserPromptMessage(content="ping") - self._invoke( - model=model, - credentials=credentials, - prompt_messages=[ping_message], - model_parameters=required_params, - stream=False, - ) - - except ClientError as ex: - error_code = ex.response["Error"]["Code"] - full_error_msg = f"{error_code}: {ex.response['Error']['Message']}" - raise CredentialsValidateFailedError(str(self._map_client_to_invoke_error(error_code, full_error_msg))) - - except Exception as ex: - raise CredentialsValidateFailedError(str(ex)) - - def _convert_one_message_to_text( - self, message: PromptMessage, model_prefix: str, model_name: Optional[str] = None - ) -> str: - """ - Convert a single message to a string. - - :param message: PromptMessage to convert. - :return: String representation of the message. - """ - human_prompt_prefix = "" - human_prompt_postfix = "" - ai_prompt = "" - - content = message.content - - if isinstance(message, UserPromptMessage): - body = content - if isinstance(content, list): - body = "".join([c.data for c in content if c.type == PromptMessageContentType.TEXT]) - message_text = f"{human_prompt_prefix} {body} {human_prompt_postfix}" - elif isinstance(message, AssistantPromptMessage): - message_text = f"{ai_prompt} {content}" - elif isinstance(message, SystemPromptMessage): - message_text = content - elif isinstance(message, ToolPromptMessage): - message_text = f"{human_prompt_prefix} {message.content}" - else: - raise ValueError(f"Got unknown type {message}") - - return message_text - - def _convert_messages_to_prompt( - self, messages: list[PromptMessage], model_prefix: str, model_name: Optional[str] = None - ) -> str: - """ - Format a list of messages into a full prompt for the Anthropic, Amazon and Llama models - - :param messages: List of PromptMessage to combine. - :param model_name: specific model name.Optional,just to distinguish llama2 and llama3 - :return: Combined string with necessary human_prompt and ai_prompt tags. - """ - if not messages: - return "" - - messages = messages.copy() # don't mutate the original list - if not isinstance(messages[-1], AssistantPromptMessage): - messages.append(AssistantPromptMessage(content="")) - - text = "".join(self._convert_one_message_to_text(message, model_prefix, model_name) for message in messages) - - # trim off the trailing ' ' that might come from the "Assistant: " - return text.rstrip() - - def _create_payload( - self, - model: str, - prompt_messages: list[PromptMessage], - model_parameters: dict, - stop: Optional[list[str]] = None, - stream: bool = True, - ): - """ - Create payload for bedrock api call depending on model provider - """ - payload = {} - model_prefix = model.split(".")[0] - model_name = model.split(".")[1] - - if model_prefix == "ai21": - payload["temperature"] = model_parameters.get("temperature") - payload["topP"] = model_parameters.get("topP") - payload["maxTokens"] = model_parameters.get("maxTokens") - payload["prompt"] = self._convert_messages_to_prompt(prompt_messages, model_prefix) - - if model_parameters.get("presencePenalty"): - payload["presencePenalty"] = {model_parameters.get("presencePenalty")} - if model_parameters.get("frequencyPenalty"): - payload["frequencyPenalty"] = {model_parameters.get("frequencyPenalty")} - if model_parameters.get("countPenalty"): - payload["countPenalty"] = {model_parameters.get("countPenalty")} - - elif model_prefix == "cohere": - payload = {**model_parameters} - payload["prompt"] = prompt_messages[0].content - payload["stream"] = stream - - else: - raise ValueError(f"Got unknown model prefix {model_prefix}") - - return payload - - def _generate( - self, - model: str, - credentials: dict, - prompt_messages: list[PromptMessage], - model_parameters: dict, - stop: Optional[list[str]] = None, - stream: bool = True, - user: Optional[str] = None, - ) -> Union[LLMResult, Generator]: - """ - Invoke large language model - - :param model: model name - :param credentials: credentials kwargs - :param prompt_messages: prompt messages - :param model_parameters: model parameters - :param stop: stop words - :param stream: is stream response - :param user: unique user id - :return: full response or stream response chunk generator result - """ - client_config = Config(region_name=credentials["aws_region"]) - - runtime_client = boto3.client( - service_name="bedrock-runtime", - config=client_config, - aws_access_key_id=credentials.get("aws_access_key_id"), - aws_secret_access_key=credentials.get("aws_secret_access_key"), - ) - - model_prefix = model.split(".")[0] - payload = self._create_payload(model, prompt_messages, model_parameters, stop, stream) - - # need workaround for ai21 models which doesn't support streaming - if stream and model_prefix != "ai21": - invoke = runtime_client.invoke_model_with_response_stream - else: - invoke = runtime_client.invoke_model - - try: - body_jsonstr = json.dumps(payload) - response = invoke(modelId=model, contentType="application/json", accept="*/*", body=body_jsonstr) - except ClientError as ex: - error_code = ex.response["Error"]["Code"] - full_error_msg = f"{error_code}: {ex.response['Error']['Message']}" - raise self._map_client_to_invoke_error(error_code, full_error_msg) - - except (EndpointConnectionError, NoRegionError, ServiceNotInRegionError) as ex: - raise InvokeConnectionError(str(ex)) - - except UnknownServiceError as ex: - raise InvokeServerUnavailableError(str(ex)) - - except Exception as ex: - raise InvokeError(str(ex)) - - if stream: - return self._handle_generate_stream_response(model, credentials, response, prompt_messages) - - return self._handle_generate_response(model, credentials, response, prompt_messages) - - def _handle_generate_response( - self, model: str, credentials: dict, response: dict, prompt_messages: list[PromptMessage] - ) -> LLMResult: - """ - Handle llm response - - :param model: model name - :param credentials: credentials - :param response: response - :param prompt_messages: prompt messages - :return: llm response - """ - response_body = json.loads(response.get("body").read().decode("utf-8")) - - finish_reason = response_body.get("error") - - if finish_reason is not None: - raise InvokeError(finish_reason) - - # get output text and calculate num tokens based on model / provider - model_prefix = model.split(".")[0] - - if model_prefix == "ai21": - output = response_body.get("completions")[0].get("data").get("text") - prompt_tokens = len(response_body.get("prompt").get("tokens")) - completion_tokens = len(response_body.get("completions")[0].get("data").get("tokens")) - - elif model_prefix == "cohere": - output = response_body.get("generations")[0].get("text") - prompt_tokens = self.get_num_tokens(model, credentials, prompt_messages) - completion_tokens = self.get_num_tokens(model, credentials, output or "") - - else: - raise ValueError(f"Got unknown model prefix {model_prefix} when handling block response") - - # construct assistant message from output - assistant_prompt_message = AssistantPromptMessage(content=output) - - # calculate usage - usage = self._calc_response_usage(model, credentials, prompt_tokens, completion_tokens) - - # construct response - result = LLMResult( - model=model, - prompt_messages=prompt_messages, - message=assistant_prompt_message, - usage=usage, - ) - - return result - - def _handle_generate_stream_response( - self, model: str, credentials: dict, response: dict, prompt_messages: list[PromptMessage] - ) -> Generator: - """ - Handle llm stream response - - :param model: model name - :param credentials: credentials - :param response: response - :param prompt_messages: prompt messages - :return: llm response chunk generator result - """ - model_prefix = model.split(".")[0] - if model_prefix == "ai21": - response_body = json.loads(response.get("body").read().decode("utf-8")) - - content = response_body.get("completions")[0].get("data").get("text") - finish_reason = response_body.get("completions")[0].get("finish_reason") - - prompt_tokens = len(response_body.get("prompt").get("tokens")) - completion_tokens = len(response_body.get("completions")[0].get("data").get("tokens")) - usage = self._calc_response_usage(model, credentials, prompt_tokens, completion_tokens) - yield LLMResultChunk( - model=model, - prompt_messages=prompt_messages, - delta=LLMResultChunkDelta( - index=0, message=AssistantPromptMessage(content=content), finish_reason=finish_reason, usage=usage - ), - ) - return - - stream = response.get("body") - if not stream: - raise InvokeError("No response body") - - index = -1 - for event in stream: - chunk = event.get("chunk") - - if not chunk: - exception_name = next(iter(event)) - full_ex_msg = f"{exception_name}: {event[exception_name]['message']}" - raise self._map_client_to_invoke_error(exception_name, full_ex_msg) - - payload = json.loads(chunk.get("bytes").decode()) - - model_prefix = model.split(".")[0] - if model_prefix == "cohere": - content_delta = payload.get("text") - finish_reason = payload.get("finish_reason") - - else: - raise ValueError(f"Got unknown model prefix {model_prefix} when handling stream response") - - # transform assistant message to prompt message - assistant_prompt_message = AssistantPromptMessage( - content=content_delta or "", - ) - index += 1 - - if not finish_reason: - yield LLMResultChunk( - model=model, - prompt_messages=prompt_messages, - delta=LLMResultChunkDelta(index=index, message=assistant_prompt_message), - ) - - else: - # get num tokens from metrics in last chunk - prompt_tokens = payload["amazon-bedrock-invocationMetrics"]["inputTokenCount"] - completion_tokens = payload["amazon-bedrock-invocationMetrics"]["outputTokenCount"] - - # transform usage - usage = self._calc_response_usage(model, credentials, prompt_tokens, completion_tokens) - - yield LLMResultChunk( - model=model, - prompt_messages=prompt_messages, - delta=LLMResultChunkDelta( - index=index, message=assistant_prompt_message, finish_reason=finish_reason, usage=usage - ), - ) - - @property - def _invoke_error_mapping(self) -> dict[type[InvokeError], list[type[Exception]]]: - """ - Map model invoke error to unified error - The key is the ermd = genai.GenerativeModel(model) error type thrown to the caller - The value is the md = genai.GenerativeModel(model) error type thrown by the model, - which needs to be converted into a unified error type for the caller. - - :return: Invoke emd = genai.GenerativeModel(model) error mapping - """ - return { - InvokeConnectionError: [], - InvokeServerUnavailableError: [], - InvokeRateLimitError: [], - InvokeAuthorizationError: [], - InvokeBadRequestError: [], - } - - def _map_client_to_invoke_error(self, error_code: str, error_msg: str) -> type[InvokeError]: - """ - Map client error to invoke error - - :param error_code: error code - :param error_msg: error message - :return: invoke error - """ - - if error_code == "AccessDeniedException": - return InvokeAuthorizationError(error_msg) - elif error_code in {"ResourceNotFoundException", "ValidationException"}: - return InvokeBadRequestError(error_msg) - elif error_code in {"ThrottlingException", "ServiceQuotaExceededException"}: - return InvokeRateLimitError(error_msg) - elif error_code in { - "ModelTimeoutException", - "ModelErrorException", - "InternalServerException", - "ModelNotReadyException", - }: - return InvokeServerUnavailableError(error_msg) - elif error_code == "ModelStreamErrorException": - return InvokeConnectionError(error_msg) - - return InvokeError(error_msg) diff --git a/api/core/model_runtime/model_providers/bedrock/llm/meta.llama2-13b-chat-v1.yaml b/api/core/model_runtime/model_providers/bedrock/llm/meta.llama2-13b-chat-v1.yaml deleted file mode 100644 index a8d3704c15bd87..00000000000000 --- a/api/core/model_runtime/model_providers/bedrock/llm/meta.llama2-13b-chat-v1.yaml +++ /dev/null @@ -1,23 +0,0 @@ -model: meta.llama2-13b-chat-v1 -label: - en_US: Llama 2 Chat 13B -model_type: llm -model_properties: - mode: chat - context_size: 4096 -parameter_rules: - - name: temperature - use_template: temperature - - name: top_p - use_template: top_p - - name: max_gen_len - use_template: max_tokens - required: true - default: 2048 - min: 1 - max: 2048 -pricing: - input: '0.00075' - output: '0.00100' - unit: '0.001' - currency: USD diff --git a/api/core/model_runtime/model_providers/bedrock/llm/meta.llama2-70b-chat-v1.yaml b/api/core/model_runtime/model_providers/bedrock/llm/meta.llama2-70b-chat-v1.yaml deleted file mode 100644 index 77525e630b5ab9..00000000000000 --- a/api/core/model_runtime/model_providers/bedrock/llm/meta.llama2-70b-chat-v1.yaml +++ /dev/null @@ -1,23 +0,0 @@ -model: meta.llama2-70b-chat-v1 -label: - en_US: Llama 2 Chat 70B -model_type: llm -model_properties: - mode: chat - context_size: 4096 -parameter_rules: - - name: temperature - use_template: temperature - - name: top_p - use_template: top_p - - name: max_gen_len - use_template: max_tokens - required: true - default: 2048 - min: 1 - max: 2048 -pricing: - input: '0.00195' - output: '0.00256' - unit: '0.001' - currency: USD diff --git a/api/core/model_runtime/model_providers/bedrock/llm/meta.llama3-1-405b-instruct-v1.0.yaml b/api/core/model_runtime/model_providers/bedrock/llm/meta.llama3-1-405b-instruct-v1.0.yaml deleted file mode 100644 index 401de65f89e044..00000000000000 --- a/api/core/model_runtime/model_providers/bedrock/llm/meta.llama3-1-405b-instruct-v1.0.yaml +++ /dev/null @@ -1,25 +0,0 @@ -model: meta.llama3-1-405b-instruct-v1:0 -label: - en_US: Llama 3.1 405B Instruct -model_type: llm -model_properties: - mode: completion - context_size: 128000 -parameter_rules: - - name: temperature - use_template: temperature - default: 0.5 - - name: top_p - use_template: top_p - default: 0.9 - - name: max_gen_len - use_template: max_tokens - required: true - default: 512 - min: 1 - max: 2048 -pricing: - input: '0.00532' - output: '0.016' - unit: '0.001' - currency: USD diff --git a/api/core/model_runtime/model_providers/bedrock/llm/meta.llama3-1-70b-instruct-v1.0.yaml b/api/core/model_runtime/model_providers/bedrock/llm/meta.llama3-1-70b-instruct-v1.0.yaml deleted file mode 100644 index 10bfa7b1d5cd42..00000000000000 --- a/api/core/model_runtime/model_providers/bedrock/llm/meta.llama3-1-70b-instruct-v1.0.yaml +++ /dev/null @@ -1,25 +0,0 @@ -model: meta.llama3-1-70b-instruct-v1:0 -label: - en_US: Llama 3.1 Instruct 70B -model_type: llm -model_properties: - mode: completion - context_size: 128000 -parameter_rules: - - name: temperature - use_template: temperature - default: 0.5 - - name: top_p - use_template: top_p - default: 0.9 - - name: max_gen_len - use_template: max_tokens - required: true - default: 512 - min: 1 - max: 2048 -pricing: - input: '0.00265' - output: '0.0035' - unit: '0.001' - currency: USD diff --git a/api/core/model_runtime/model_providers/bedrock/llm/meta.llama3-1-8b-instruct-v1.0.yaml b/api/core/model_runtime/model_providers/bedrock/llm/meta.llama3-1-8b-instruct-v1.0.yaml deleted file mode 100644 index 81cd53243f4445..00000000000000 --- a/api/core/model_runtime/model_providers/bedrock/llm/meta.llama3-1-8b-instruct-v1.0.yaml +++ /dev/null @@ -1,25 +0,0 @@ -model: meta.llama3-1-8b-instruct-v1:0 -label: - en_US: Llama 3.1 Instruct 8B -model_type: llm -model_properties: - mode: completion - context_size: 128000 -parameter_rules: - - name: temperature - use_template: temperature - default: 0.5 - - name: top_p - use_template: top_p - default: 0.9 - - name: max_gen_len - use_template: max_tokens - required: true - default: 512 - min: 1 - max: 2048 -pricing: - input: '0.0003' - output: '0.0006' - unit: '0.001' - currency: USD diff --git a/api/core/model_runtime/model_providers/bedrock/llm/meta.llama3-70b-instruct-v1.yaml b/api/core/model_runtime/model_providers/bedrock/llm/meta.llama3-70b-instruct-v1.yaml deleted file mode 100644 index 204662690e7bd4..00000000000000 --- a/api/core/model_runtime/model_providers/bedrock/llm/meta.llama3-70b-instruct-v1.yaml +++ /dev/null @@ -1,23 +0,0 @@ -model: meta.llama3-70b-instruct-v1:0 -label: - en_US: Llama 3 Instruct 70B -model_type: llm -model_properties: - mode: completion - context_size: 8192 -parameter_rules: - - name: temperature - use_template: temperature - - name: top_p - use_template: top_p - - name: max_gen_len - use_template: max_tokens - required: true - default: 512 - min: 1 - max: 2048 -pricing: - input: '0.00265' - output: '0.0035' - unit: '0.00001' - currency: USD diff --git a/api/core/model_runtime/model_providers/bedrock/llm/meta.llama3-8b-instruct-v1.yaml b/api/core/model_runtime/model_providers/bedrock/llm/meta.llama3-8b-instruct-v1.yaml deleted file mode 100644 index dd4f666a5f507c..00000000000000 --- a/api/core/model_runtime/model_providers/bedrock/llm/meta.llama3-8b-instruct-v1.yaml +++ /dev/null @@ -1,23 +0,0 @@ -model: meta.llama3-8b-instruct-v1:0 -label: - en_US: Llama 3 Instruct 8B -model_type: llm -model_properties: - mode: completion - context_size: 8192 -parameter_rules: - - name: temperature - use_template: temperature - - name: top_p - use_template: top_p - - name: max_gen_len - use_template: max_tokens - required: true - default: 512 - min: 1 - max: 2048 -pricing: - input: '0.0004' - output: '0.0006' - unit: '0.0001' - currency: USD diff --git a/api/core/model_runtime/model_providers/bedrock/llm/mistral.mistral-7b-instruct-v0.2.yaml b/api/core/model_runtime/model_providers/bedrock/llm/mistral.mistral-7b-instruct-v0.2.yaml deleted file mode 100644 index 175c14da37e230..00000000000000 --- a/api/core/model_runtime/model_providers/bedrock/llm/mistral.mistral-7b-instruct-v0.2.yaml +++ /dev/null @@ -1,39 +0,0 @@ -model: mistral.mistral-7b-instruct-v0:2 -label: - en_US: Mistral 7B Instruct -model_type: llm -model_properties: - mode: completion - context_size: 32000 -parameter_rules: - - name: temperature - use_template: temperature - required: false - default: 0.5 - - name: top_p - use_template: top_p - required: false - default: 0.9 - - name: top_k - use_template: top_k - label: - zh_Hans: 取样数量 - en_US: Top k - type: int - help: - zh_Hans: 仅从每个后续标记的前 K 个选项中采样。 - en_US: Only sample from the top K options for each subsequent token. - required: false - default: 50 - max: 200 - - name: max_tokens - use_template: max_tokens - required: true - default: 512 - min: 1 - max: 8192 -pricing: - input: '0.00015' - output: '0.0002' - unit: '0.00001' - currency: USD diff --git a/api/core/model_runtime/model_providers/bedrock/llm/mistral.mistral-large-2402-v1.0.yaml b/api/core/model_runtime/model_providers/bedrock/llm/mistral.mistral-large-2402-v1.0.yaml deleted file mode 100644 index 65eed5926b1e25..00000000000000 --- a/api/core/model_runtime/model_providers/bedrock/llm/mistral.mistral-large-2402-v1.0.yaml +++ /dev/null @@ -1,30 +0,0 @@ -model: mistral.mistral-large-2402-v1:0 -label: - en_US: Mistral Large -model_type: llm -features: - - tool-call - - agent-thought -model_properties: - mode: completion - context_size: 32000 -parameter_rules: - - name: temperature - use_template: temperature - required: false - default: 0.7 - - name: top_p - use_template: top_p - required: false - default: 1 - - name: max_tokens - use_template: max_tokens - required: true - default: 512 - min: 1 - max: 4096 -pricing: - input: '0.008' - output: '0.024' - unit: '0.001' - currency: USD diff --git a/api/core/model_runtime/model_providers/bedrock/llm/mistral.mistral-large-2407-v1.0.yaml b/api/core/model_runtime/model_providers/bedrock/llm/mistral.mistral-large-2407-v1.0.yaml deleted file mode 100644 index 19d7843a5730e9..00000000000000 --- a/api/core/model_runtime/model_providers/bedrock/llm/mistral.mistral-large-2407-v1.0.yaml +++ /dev/null @@ -1,29 +0,0 @@ -model: mistral.mistral-large-2407-v1:0 -label: - en_US: Mistral Large 2 (24.07) -model_type: llm -features: - - tool-call -model_properties: - mode: completion - context_size: 128000 -parameter_rules: - - name: temperature - use_template: temperature - required: false - default: 0.7 - - name: top_p - use_template: top_p - required: false - default: 1 - - name: max_tokens - use_template: max_tokens - required: true - default: 512 - min: 1 - max: 8192 -pricing: - input: '0.003' - output: '0.009' - unit: '0.001' - currency: USD diff --git a/api/core/model_runtime/model_providers/bedrock/llm/mistral.mistral-small-2402-v1.0.yaml b/api/core/model_runtime/model_providers/bedrock/llm/mistral.mistral-small-2402-v1.0.yaml deleted file mode 100644 index b97c2a94936d7b..00000000000000 --- a/api/core/model_runtime/model_providers/bedrock/llm/mistral.mistral-small-2402-v1.0.yaml +++ /dev/null @@ -1,29 +0,0 @@ -model: mistral.mistral-small-2402-v1:0 -label: - en_US: Mistral Small -model_type: llm -features: - - tool-call -model_properties: - mode: completion - context_size: 32000 -parameter_rules: - - name: temperature - use_template: temperature - required: false - default: 0.7 - - name: top_p - use_template: top_p - required: false - default: 1 - - name: max_tokens - use_template: max_tokens - required: true - default: 512 - min: 1 - max: 4096 -pricing: - input: '0.001' - output: '0.03' - unit: '0.001' - currency: USD diff --git a/api/core/model_runtime/model_providers/bedrock/llm/mistral.mixtral-8x7b-instruct-v0.1.yaml b/api/core/model_runtime/model_providers/bedrock/llm/mistral.mixtral-8x7b-instruct-v0.1.yaml deleted file mode 100644 index 03ec7eddaf2072..00000000000000 --- a/api/core/model_runtime/model_providers/bedrock/llm/mistral.mixtral-8x7b-instruct-v0.1.yaml +++ /dev/null @@ -1,39 +0,0 @@ -model: mistral.mixtral-8x7b-instruct-v0:1 -label: - en_US: Mixtral 8X7B Instruct -model_type: llm -model_properties: - mode: completion - context_size: 32000 -parameter_rules: - - name: temperature - use_template: temperature - required: false - default: 0.5 - - name: top_p - use_template: top_p - required: false - default: 0.9 - - name: top_k - use_template: top_k - label: - zh_Hans: 取样数量 - en_US: Top k - type: int - help: - zh_Hans: 仅从每个后续标记的前 K 个选项中采样。 - en_US: Only sample from the top K options for each subsequent token. - required: false - default: 50 - max: 200 - - name: max_tokens - use_template: max_tokens - required: true - default: 512 - min: 1 - max: 8192 -pricing: - input: '0.00045' - output: '0.0007' - unit: '0.00001' - currency: USD diff --git a/api/core/model_runtime/model_providers/bedrock/llm/us.anthropic.claude-3-haiku-v1.yaml b/api/core/model_runtime/model_providers/bedrock/llm/us.anthropic.claude-3-haiku-v1.yaml deleted file mode 100644 index 9247f46974dd40..00000000000000 --- a/api/core/model_runtime/model_providers/bedrock/llm/us.anthropic.claude-3-haiku-v1.yaml +++ /dev/null @@ -1,59 +0,0 @@ -model: us.anthropic.claude-3-haiku-20240307-v1:0 -label: - en_US: Claude 3 Haiku(US.Cross Region Inference) -model_type: llm -features: - - agent-thought - - vision - - tool-call - - stream-tool-call -model_properties: - mode: chat - context_size: 200000 -# docs: https://docs.aws.amazon.com/bedrock/latest/userguide/model-parameters-anthropic-claude-messages.html -parameter_rules: - - name: max_tokens - use_template: max_tokens - required: true - type: int - default: 4096 - min: 1 - max: 4096 - help: - zh_Hans: 停止前生成的最大令牌数。请注意,Anthropic Claude 模型可能会在达到 max_tokens 的值之前停止生成令牌。不同的 Anthropic Claude 模型对此参数具有不同的最大值。 - en_US: The maximum number of tokens to generate before stopping. Note that Anthropic Claude models might stop generating tokens before reaching the value of max_tokens. Different Anthropic Claude models have different maximum values for this parameter. - # docs: https://docs.anthropic.com/claude/docs/system-prompts - - name: temperature - use_template: temperature - required: false - type: float - default: 1 - min: 0.0 - max: 1.0 - help: - zh_Hans: 生成内容的随机性。 - en_US: The amount of randomness injected into the response. - - name: top_p - required: false - type: float - default: 0.999 - min: 0.000 - max: 1.000 - help: - zh_Hans: 在核采样中,Anthropic Claude 按概率递减顺序计算每个后续标记的所有选项的累积分布,并在达到 top_p 指定的特定概率时将其切断。您应该更改温度或top_p,但不能同时更改两者。 - en_US: In nucleus sampling, Anthropic Claude computes the cumulative distribution over all the options for each subsequent token in decreasing probability order and cuts it off once it reaches a particular probability specified by top_p. You should alter either temperature or top_p, but not both. - - name: top_k - required: false - type: int - default: 0 - min: 0 - # tip docs from aws has error, max value is 500 - max: 500 - help: - zh_Hans: 对于每个后续标记,仅从前 K 个选项中进行采样。使用 top_k 删除长尾低概率响应。 - en_US: Only sample from the top K options for each subsequent token. Use top_k to remove long tail low probability responses. -pricing: - input: '0.00025' - output: '0.00125' - unit: '0.001' - currency: USD diff --git a/api/core/model_runtime/model_providers/bedrock/llm/us.anthropic.claude-3-opus-v1.yaml b/api/core/model_runtime/model_providers/bedrock/llm/us.anthropic.claude-3-opus-v1.yaml deleted file mode 100644 index f9854d51f03a81..00000000000000 --- a/api/core/model_runtime/model_providers/bedrock/llm/us.anthropic.claude-3-opus-v1.yaml +++ /dev/null @@ -1,59 +0,0 @@ -model: us.anthropic.claude-3-opus-20240229-v1:0 -label: - en_US: Claude 3 Opus(US.Cross Region Inference) -model_type: llm -features: - - agent-thought - - vision - - tool-call - - stream-tool-call -model_properties: - mode: chat - context_size: 200000 -# docs: https://docs.aws.amazon.com/bedrock/latest/userguide/model-parameters-anthropic-claude-messages.html -parameter_rules: - - name: max_tokens - use_template: max_tokens - required: true - type: int - default: 4096 - min: 1 - max: 4096 - help: - zh_Hans: 停止前生成的最大令牌数。请注意,Anthropic Claude 模型可能会在达到 max_tokens 的值之前停止生成令牌。不同的 Anthropic Claude 模型对此参数具有不同的最大值。 - en_US: The maximum number of tokens to generate before stopping. Note that Anthropic Claude models might stop generating tokens before reaching the value of max_tokens. Different Anthropic Claude models have different maximum values for this parameter. - # docs: https://docs.anthropic.com/claude/docs/system-prompts - - name: temperature - use_template: temperature - required: false - type: float - default: 1 - min: 0.0 - max: 1.0 - help: - zh_Hans: 生成内容的随机性。 - en_US: The amount of randomness injected into the response. - - name: top_p - required: false - type: float - default: 0.999 - min: 0.000 - max: 1.000 - help: - zh_Hans: 在核采样中,Anthropic Claude 按概率递减顺序计算每个后续标记的所有选项的累积分布,并在达到 top_p 指定的特定概率时将其切断。您应该更改温度或top_p,但不能同时更改两者。 - en_US: In nucleus sampling, Anthropic Claude computes the cumulative distribution over all the options for each subsequent token in decreasing probability order and cuts it off once it reaches a particular probability specified by top_p. You should alter either temperature or top_p, but not both. - - name: top_k - required: false - type: int - default: 0 - min: 0 - # tip docs from aws has error, max value is 500 - max: 500 - help: - zh_Hans: 对于每个后续标记,仅从前 K 个选项中进行采样。使用 top_k 删除长尾低概率响应。 - en_US: Only sample from the top K options for each subsequent token. Use top_k to remove long tail low probability responses. -pricing: - input: '0.015' - output: '0.075' - unit: '0.001' - currency: USD diff --git a/api/core/model_runtime/model_providers/bedrock/llm/us.anthropic.claude-3-sonnet-v1.5.yaml b/api/core/model_runtime/model_providers/bedrock/llm/us.anthropic.claude-3-sonnet-v1.5.yaml deleted file mode 100644 index fbcab2d5f3bdaf..00000000000000 --- a/api/core/model_runtime/model_providers/bedrock/llm/us.anthropic.claude-3-sonnet-v1.5.yaml +++ /dev/null @@ -1,58 +0,0 @@ -model: us.anthropic.claude-3-5-sonnet-20240620-v1:0 -label: - en_US: Claude 3.5 Sonnet(US.Cross Region Inference) -model_type: llm -features: - - agent-thought - - vision - - tool-call - - stream-tool-call -model_properties: - mode: chat - context_size: 200000 -# docs: https://docs.aws.amazon.com/bedrock/latest/userguide/model-parameters-anthropic-claude-messages.html -parameter_rules: - - name: max_tokens - use_template: max_tokens - required: true - type: int - default: 4096 - min: 1 - max: 4096 - help: - zh_Hans: 停止前生成的最大令牌数。请注意,Anthropic Claude 模型可能会在达到 max_tokens 的值之前停止生成令牌。不同的 Anthropic Claude 模型对此参数具有不同的最大值。 - en_US: The maximum number of tokens to generate before stopping. Note that Anthropic Claude models might stop generating tokens before reaching the value of max_tokens. Different Anthropic Claude models have different maximum values for this parameter. - - name: temperature - use_template: temperature - required: false - type: float - default: 1 - min: 0.0 - max: 1.0 - help: - zh_Hans: 生成内容的随机性。 - en_US: The amount of randomness injected into the response. - - name: top_p - required: false - type: float - default: 0.999 - min: 0.000 - max: 1.000 - help: - zh_Hans: 在核采样中,Anthropic Claude 按概率递减顺序计算每个后续标记的所有选项的累积分布,并在达到 top_p 指定的特定概率时将其切断。您应该更改温度或top_p,但不能同时更改两者。 - en_US: In nucleus sampling, Anthropic Claude computes the cumulative distribution over all the options for each subsequent token in decreasing probability order and cuts it off once it reaches a particular probability specified by top_p. You should alter either temperature or top_p, but not both. - - name: top_k - required: false - type: int - default: 0 - min: 0 - # tip docs from aws has error, max value is 500 - max: 500 - help: - zh_Hans: 对于每个后续标记,仅从前 K 个选项中进行采样。使用 top_k 删除长尾低概率响应。 - en_US: Only sample from the top K options for each subsequent token. Use top_k to remove long tail low probability responses. -pricing: - input: '0.003' - output: '0.015' - unit: '0.001' - currency: USD diff --git a/api/core/model_runtime/model_providers/bedrock/llm/us.anthropic.claude-3-sonnet-v1.yaml b/api/core/model_runtime/model_providers/bedrock/llm/us.anthropic.claude-3-sonnet-v1.yaml deleted file mode 100644 index 9f5a1501f0da75..00000000000000 --- a/api/core/model_runtime/model_providers/bedrock/llm/us.anthropic.claude-3-sonnet-v1.yaml +++ /dev/null @@ -1,58 +0,0 @@ -model: us.anthropic.claude-3-sonnet-20240229-v1:0 -label: - en_US: Claude 3 Sonnet(US.Cross Region Inference) -model_type: llm -features: - - agent-thought - - vision - - tool-call - - stream-tool-call -model_properties: - mode: chat - context_size: 200000 -# docs: https://docs.aws.amazon.com/bedrock/latest/userguide/model-parameters-anthropic-claude-messages.html -parameter_rules: - - name: max_tokens - use_template: max_tokens - required: true - type: int - default: 4096 - min: 1 - max: 4096 - help: - zh_Hans: 停止前生成的最大令牌数。请注意,Anthropic Claude 模型可能会在达到 max_tokens 的值之前停止生成令牌。不同的 Anthropic Claude 模型对此参数具有不同的最大值。 - en_US: The maximum number of tokens to generate before stopping. Note that Anthropic Claude models might stop generating tokens before reaching the value of max_tokens. Different Anthropic Claude models have different maximum values for this parameter. - - name: temperature - use_template: temperature - required: false - type: float - default: 1 - min: 0.0 - max: 1.0 - help: - zh_Hans: 生成内容的随机性。 - en_US: The amount of randomness injected into the response. - - name: top_p - required: false - type: float - default: 0.999 - min: 0.000 - max: 1.000 - help: - zh_Hans: 在核采样中,Anthropic Claude 按概率递减顺序计算每个后续标记的所有选项的累积分布,并在达到 top_p 指定的特定概率时将其切断。您应该更改温度或top_p,但不能同时更改两者。 - en_US: In nucleus sampling, Anthropic Claude computes the cumulative distribution over all the options for each subsequent token in decreasing probability order and cuts it off once it reaches a particular probability specified by top_p. You should alter either temperature or top_p, but not both. - - name: top_k - required: false - type: int - default: 0 - min: 0 - # tip docs from aws has error, max value is 500 - max: 500 - help: - zh_Hans: 对于每个后续标记,仅从前 K 个选项中进行采样。使用 top_k 删除长尾低概率响应。 - en_US: Only sample from the top K options for each subsequent token. Use top_k to remove long tail low probability responses. -pricing: - input: '0.003' - output: '0.015' - unit: '0.001' - currency: USD diff --git a/api/core/model_runtime/model_providers/bedrock/text_embedding/__init__.py b/api/core/model_runtime/model_providers/bedrock/text_embedding/__init__.py deleted file mode 100644 index e69de29bb2d1d6..00000000000000 diff --git a/api/core/model_runtime/model_providers/bedrock/text_embedding/_position.yaml b/api/core/model_runtime/model_providers/bedrock/text_embedding/_position.yaml deleted file mode 100644 index afbea06a3e2f19..00000000000000 --- a/api/core/model_runtime/model_providers/bedrock/text_embedding/_position.yaml +++ /dev/null @@ -1,4 +0,0 @@ -- amazon.titan-embed-text-v1 -- amazon.titan-embed-text-v2:0 -- cohere.embed-english-v3 -- cohere.embed-multilingual-v3 diff --git a/api/core/model_runtime/model_providers/bedrock/text_embedding/amazon.titan-embed-text-v1.yaml b/api/core/model_runtime/model_providers/bedrock/text_embedding/amazon.titan-embed-text-v1.yaml deleted file mode 100644 index e5a55971a19711..00000000000000 --- a/api/core/model_runtime/model_providers/bedrock/text_embedding/amazon.titan-embed-text-v1.yaml +++ /dev/null @@ -1,8 +0,0 @@ -model: amazon.titan-embed-text-v1 -model_type: text-embedding -model_properties: - context_size: 8192 -pricing: - input: '0.0001' - unit: '0.0001' - currency: USD diff --git a/api/core/model_runtime/model_providers/bedrock/text_embedding/amazon.titan-embed-text-v2.yaml b/api/core/model_runtime/model_providers/bedrock/text_embedding/amazon.titan-embed-text-v2.yaml deleted file mode 100644 index 5069efeb109a85..00000000000000 --- a/api/core/model_runtime/model_providers/bedrock/text_embedding/amazon.titan-embed-text-v2.yaml +++ /dev/null @@ -1,8 +0,0 @@ -model: amazon.titan-embed-text-v2:0 -model_type: text-embedding -model_properties: - context_size: 8192 -pricing: - input: '0.00002' - unit: '0.00001' - currency: USD diff --git a/api/core/model_runtime/model_providers/bedrock/text_embedding/cohere.embed-english-v3.yaml b/api/core/model_runtime/model_providers/bedrock/text_embedding/cohere.embed-english-v3.yaml deleted file mode 100644 index d49aa2a99c3a95..00000000000000 --- a/api/core/model_runtime/model_providers/bedrock/text_embedding/cohere.embed-english-v3.yaml +++ /dev/null @@ -1,8 +0,0 @@ -model: cohere.embed-english-v3 -model_type: text-embedding -model_properties: - context_size: 512 -pricing: - input: '0.1' - unit: '0.000001' - currency: USD diff --git a/api/core/model_runtime/model_providers/bedrock/text_embedding/cohere.embed-multilingual-v3.yaml b/api/core/model_runtime/model_providers/bedrock/text_embedding/cohere.embed-multilingual-v3.yaml deleted file mode 100644 index 63bab59d2cea61..00000000000000 --- a/api/core/model_runtime/model_providers/bedrock/text_embedding/cohere.embed-multilingual-v3.yaml +++ /dev/null @@ -1,8 +0,0 @@ -model: cohere.embed-multilingual-v3 -model_type: text-embedding -model_properties: - context_size: 512 -pricing: - input: '0.1' - unit: '0.000001' - currency: USD diff --git a/api/core/model_runtime/model_providers/chatglm/__init__.py b/api/core/model_runtime/model_providers/chatglm/__init__.py deleted file mode 100644 index e69de29bb2d1d6..00000000000000 diff --git a/api/core/model_runtime/model_providers/chatglm/_assets/icon_l_en.svg b/api/core/model_runtime/model_providers/chatglm/_assets/icon_l_en.svg deleted file mode 100644 index a824d43d6f0f81..00000000000000 --- a/api/core/model_runtime/model_providers/chatglm/_assets/icon_l_en.svg +++ /dev/null @@ -1 +0,0 @@ - \ No newline at end of file diff --git a/api/core/model_runtime/model_providers/chatglm/_assets/icon_s_en.svg b/api/core/model_runtime/model_providers/chatglm/_assets/icon_s_en.svg deleted file mode 100644 index 466b4fce57a4e9..00000000000000 --- a/api/core/model_runtime/model_providers/chatglm/_assets/icon_s_en.svg +++ /dev/null @@ -1,9 +0,0 @@ - - - - - - - - - \ No newline at end of file diff --git a/api/core/model_runtime/model_providers/chatglm/chatglm.py b/api/core/model_runtime/model_providers/chatglm/chatglm.py deleted file mode 100644 index 71d9a1532281bd..00000000000000 --- a/api/core/model_runtime/model_providers/chatglm/chatglm.py +++ /dev/null @@ -1,28 +0,0 @@ -import logging - -from core.model_runtime.entities.model_entities import ModelType -from core.model_runtime.errors.validate import CredentialsValidateFailedError -from core.model_runtime.model_providers.__base.model_provider import ModelProvider - -logger = logging.getLogger(__name__) - - -class ChatGLMProvider(ModelProvider): - def validate_provider_credentials(self, credentials: dict) -> None: - """ - Validate provider credentials - - if validate failed, raise exception - - :param credentials: provider credentials, credentials form defined in `provider_credential_schema`. - """ - try: - model_instance = self.get_model_instance(ModelType.LLM) - - # Use `chatglm3-6b` model for validate, - model_instance.validate_credentials(model="chatglm3-6b", credentials=credentials) - except CredentialsValidateFailedError as ex: - raise ex - except Exception as ex: - logger.exception(f"{self.get_provider_schema().provider} credentials validate failed") - raise ex diff --git a/api/core/model_runtime/model_providers/chatglm/chatglm.yaml b/api/core/model_runtime/model_providers/chatglm/chatglm.yaml deleted file mode 100644 index 0c1688c3502c7c..00000000000000 --- a/api/core/model_runtime/model_providers/chatglm/chatglm.yaml +++ /dev/null @@ -1,28 +0,0 @@ -provider: chatglm -label: - en_US: ChatGLM -icon_small: - en_US: icon_s_en.svg -icon_large: - en_US: icon_l_en.svg -background: "#F4F7FF" -help: - title: - en_US: Deploy ChatGLM to your local - zh_Hans: 部署您的本地 ChatGLM - url: - en_US: https://github.com/THUDM/ChatGLM3 -supported_model_types: - - llm -configurate_methods: - - predefined-model -provider_credential_schema: - credential_form_schemas: - - variable: api_base - label: - en_US: API URL - type: text-input - required: true - placeholder: - zh_Hans: 在此输入您的 API URL - en_US: Enter your API URL diff --git a/api/core/model_runtime/model_providers/chatglm/llm/__init__.py b/api/core/model_runtime/model_providers/chatglm/llm/__init__.py deleted file mode 100644 index e69de29bb2d1d6..00000000000000 diff --git a/api/core/model_runtime/model_providers/chatglm/llm/chatglm2-6b-32k.yaml b/api/core/model_runtime/model_providers/chatglm/llm/chatglm2-6b-32k.yaml deleted file mode 100644 index d1075d74b5c489..00000000000000 --- a/api/core/model_runtime/model_providers/chatglm/llm/chatglm2-6b-32k.yaml +++ /dev/null @@ -1,21 +0,0 @@ -model: chatglm2-6b-32k -label: - en_US: ChatGLM2-6B-32K -model_type: llm -features: - - agent-thought -model_properties: - mode: chat - context_size: 32000 -parameter_rules: - - name: temperature - use_template: temperature - - name: top_p - use_template: top_p - required: false - - name: max_tokens - use_template: max_tokens - required: true - default: 2000 - min: 1 - max: 32000 diff --git a/api/core/model_runtime/model_providers/chatglm/llm/chatglm2-6b.yaml b/api/core/model_runtime/model_providers/chatglm/llm/chatglm2-6b.yaml deleted file mode 100644 index e3cfeb9001d676..00000000000000 --- a/api/core/model_runtime/model_providers/chatglm/llm/chatglm2-6b.yaml +++ /dev/null @@ -1,21 +0,0 @@ -model: chatglm2-6b -label: - en_US: ChatGLM2-6B -model_type: llm -features: - - agent-thought -model_properties: - mode: chat - context_size: 2000 -parameter_rules: - - name: temperature - use_template: temperature - - name: top_p - use_template: top_p - required: false - - name: max_tokens - use_template: max_tokens - required: true - default: 256 - min: 1 - max: 2000 diff --git a/api/core/model_runtime/model_providers/chatglm/llm/chatglm3-6b-32k.yaml b/api/core/model_runtime/model_providers/chatglm/llm/chatglm3-6b-32k.yaml deleted file mode 100644 index 6f34743513b7bf..00000000000000 --- a/api/core/model_runtime/model_providers/chatglm/llm/chatglm3-6b-32k.yaml +++ /dev/null @@ -1,22 +0,0 @@ -model: chatglm3-6b-32k -label: - en_US: ChatGLM3-6B-32K -model_type: llm -features: - - tool-call - - agent-thought -model_properties: - mode: chat - context_size: 32000 -parameter_rules: - - name: temperature - use_template: temperature - - name: top_p - use_template: top_p - required: false - - name: max_tokens - use_template: max_tokens - required: true - default: 8000 - min: 1 - max: 32000 diff --git a/api/core/model_runtime/model_providers/chatglm/llm/chatglm3-6b.yaml b/api/core/model_runtime/model_providers/chatglm/llm/chatglm3-6b.yaml deleted file mode 100644 index d6d87e2e094849..00000000000000 --- a/api/core/model_runtime/model_providers/chatglm/llm/chatglm3-6b.yaml +++ /dev/null @@ -1,22 +0,0 @@ -model: chatglm3-6b -label: - en_US: ChatGLM3-6B -model_type: llm -features: - - tool-call - - agent-thought -model_properties: - mode: chat - context_size: 8000 -parameter_rules: - - name: temperature - use_template: temperature - - name: top_p - use_template: top_p - required: false - - name: max_tokens - use_template: max_tokens - required: true - default: 256 - min: 1 - max: 8000 diff --git a/api/core/model_runtime/model_providers/chatglm/llm/llm.py b/api/core/model_runtime/model_providers/chatglm/llm/llm.py deleted file mode 100644 index b3eeb48e226e18..00000000000000 --- a/api/core/model_runtime/model_providers/chatglm/llm/llm.py +++ /dev/null @@ -1,507 +0,0 @@ -import logging -from collections.abc import Generator -from typing import Optional, cast - -from httpx import Timeout -from openai import ( - APIConnectionError, - APITimeoutError, - AuthenticationError, - ConflictError, - InternalServerError, - NotFoundError, - OpenAI, - PermissionDeniedError, - RateLimitError, - Stream, - UnprocessableEntityError, -) -from openai.types.chat import ChatCompletion, ChatCompletionChunk -from openai.types.chat.chat_completion_message import FunctionCall -from yarl import URL - -from core.model_runtime.entities.llm_entities import LLMResult, LLMResultChunk, LLMResultChunkDelta -from core.model_runtime.entities.message_entities import ( - AssistantPromptMessage, - PromptMessage, - PromptMessageTool, - SystemPromptMessage, - ToolPromptMessage, - UserPromptMessage, -) -from core.model_runtime.errors.invoke import ( - InvokeAuthorizationError, - InvokeBadRequestError, - InvokeConnectionError, - InvokeError, - InvokeRateLimitError, - InvokeServerUnavailableError, -) -from core.model_runtime.errors.validate import CredentialsValidateFailedError -from core.model_runtime.model_providers.__base.large_language_model import LargeLanguageModel -from core.model_runtime.utils import helper - -logger = logging.getLogger(__name__) - - -class ChatGLMLargeLanguageModel(LargeLanguageModel): - def _invoke( - self, - model: str, - credentials: dict, - prompt_messages: list[PromptMessage], - model_parameters: dict, - tools: list[PromptMessageTool] | None = None, - stop: list[str] | None = None, - stream: bool = True, - user: str | None = None, - ) -> LLMResult | Generator: - """ - Invoke large language model - - :param model: model name - :param credentials: model credentials - :param prompt_messages: prompt messages - :param model_parameters: model parameters - :param tools: tools for tool calling - :param stop: stop words - :param stream: is stream response - :param user: unique user id - :return: full response or stream response chunk generator result - """ - # invoke model - return self._generate( - model=model, - credentials=credentials, - prompt_messages=prompt_messages, - model_parameters=model_parameters, - tools=tools, - stop=stop, - stream=stream, - user=user, - ) - - def get_num_tokens( - self, - model: str, - credentials: dict, - prompt_messages: list[PromptMessage], - tools: list[PromptMessageTool] | None = None, - ) -> int: - """ - Get number of tokens for given prompt messages - - :param model: model name - :param credentials: model credentials - :param prompt_messages: prompt messages - :param tools: tools for tool calling - :return: - """ - return self._num_tokens_from_messages(messages=prompt_messages, tools=tools) - - def validate_credentials(self, model: str, credentials: dict) -> None: - """ - Validate model credentials - - :param model: model name - :param credentials: model credentials - :return: - """ - try: - self._invoke( - model=model, - credentials=credentials, - prompt_messages=[ - UserPromptMessage(content="ping"), - ], - model_parameters={ - "max_tokens": 16, - }, - ) - except Exception as e: - raise CredentialsValidateFailedError(str(e)) - - @property - def _invoke_error_mapping(self) -> dict[type[InvokeError], list[type[Exception]]]: - """ - Map model invoke error to unified error - The key is the error type thrown to the caller - The value is the error type thrown by the model, - which needs to be converted into a unified error type for the caller. - - :return: Invoke error mapping - """ - return { - InvokeConnectionError: [ - APIConnectionError, - APITimeoutError, - ], - InvokeServerUnavailableError: [ - InternalServerError, - ConflictError, - NotFoundError, - UnprocessableEntityError, - PermissionDeniedError, - ], - InvokeRateLimitError: [RateLimitError], - InvokeAuthorizationError: [AuthenticationError], - InvokeBadRequestError: [ValueError], - } - - def _generate( - self, - model: str, - credentials: dict, - prompt_messages: list[PromptMessage], - model_parameters: dict, - tools: list[PromptMessageTool] | None = None, - stop: list[str] | None = None, - stream: bool = True, - user: str | None = None, - ) -> LLMResult | Generator: - """ - Invoke large language model - - :param model: model name - :param credentials: credentials kwargs - :param prompt_messages: prompt messages - :param model_parameters: model parameters - :param stop: stop words - :param stream: is stream response - :param user: unique user id - :return: full response or stream response chunk generator result - """ - - self._check_chatglm_parameters(model=model, model_parameters=model_parameters, tools=tools) - - kwargs = self._to_client_kwargs(credentials) - # init model client - client = OpenAI(**kwargs) - - extra_model_kwargs = {} - if stop: - extra_model_kwargs["stop"] = stop - - if user: - extra_model_kwargs["user"] = user - - if tools and len(tools) > 0: - extra_model_kwargs["functions"] = [helper.dump_model(tool) for tool in tools] - - result = client.chat.completions.create( - messages=[self._convert_prompt_message_to_dict(m) for m in prompt_messages], - model=model, - stream=stream, - **model_parameters, - **extra_model_kwargs, - ) - - if stream: - return self._handle_chat_generate_stream_response( - model=model, credentials=credentials, response=result, tools=tools, prompt_messages=prompt_messages - ) - - return self._handle_chat_generate_response( - model=model, credentials=credentials, response=result, tools=tools, prompt_messages=prompt_messages - ) - - def _check_chatglm_parameters(self, model: str, model_parameters: dict, tools: list[PromptMessageTool]) -> None: - if model.find("chatglm2") != -1 and tools is not None and len(tools) > 0: - raise InvokeBadRequestError("ChatGLM2 does not support function calling") - - def _convert_prompt_message_to_dict(self, message: PromptMessage) -> dict: - """ - Convert PromptMessage to dict for OpenAI Compatibility API - """ - if isinstance(message, UserPromptMessage): - message = cast(UserPromptMessage, message) - if isinstance(message.content, str): - message_dict = {"role": "user", "content": message.content} - else: - raise ValueError("User message content must be str") - elif isinstance(message, AssistantPromptMessage): - message = cast(AssistantPromptMessage, message) - message_dict = {"role": "assistant", "content": message.content} - if message.tool_calls and len(message.tool_calls) > 0: - message_dict["function_call"] = { - "name": message.tool_calls[0].function.name, - "arguments": message.tool_calls[0].function.arguments, - } - elif isinstance(message, SystemPromptMessage): - message = cast(SystemPromptMessage, message) - message_dict = {"role": "system", "content": message.content} - elif isinstance(message, ToolPromptMessage): - # check if last message is user message - message = cast(ToolPromptMessage, message) - message_dict = {"role": "function", "content": message.content} - else: - raise ValueError(f"Unknown message type {type(message)}") - - return message_dict - - def _extract_response_tool_calls( - self, response_function_calls: list[FunctionCall] - ) -> list[AssistantPromptMessage.ToolCall]: - """ - Extract tool calls from response - - :param response_tool_calls: response tool calls - :return: list of tool calls - """ - tool_calls = [] - if response_function_calls: - for response_tool_call in response_function_calls: - function = AssistantPromptMessage.ToolCall.ToolCallFunction( - name=response_tool_call.name, arguments=response_tool_call.arguments - ) - - tool_call = AssistantPromptMessage.ToolCall(id=0, type="function", function=function) - tool_calls.append(tool_call) - - return tool_calls - - def _to_client_kwargs(self, credentials: dict) -> dict: - """ - Convert invoke kwargs to client kwargs - - :param stream: is stream response - :param model_name: model name - :param credentials: credentials dict - :param model_parameters: model parameters - :return: client kwargs - """ - client_kwargs = { - "timeout": Timeout(315.0, read=300.0, write=10.0, connect=5.0), - "api_key": "1", - "base_url": str(URL(credentials["api_base"]) / "v1"), - } - - return client_kwargs - - def _handle_chat_generate_stream_response( - self, - model: str, - credentials: dict, - response: Stream[ChatCompletionChunk], - prompt_messages: list[PromptMessage], - tools: Optional[list[PromptMessageTool]] = None, - ) -> Generator: - full_response = "" - - for chunk in response: - if len(chunk.choices) == 0: - continue - - delta = chunk.choices[0] - - if delta.finish_reason is None and (delta.delta.content is None or delta.delta.content == ""): - continue - - # check if there is a tool call in the response - function_calls = None - if delta.delta.function_call: - function_calls = [delta.delta.function_call] - - assistant_message_tool_calls = self._extract_response_tool_calls(function_calls or []) - - # transform assistant message to prompt message - assistant_prompt_message = AssistantPromptMessage( - content=delta.delta.content or "", tool_calls=assistant_message_tool_calls - ) - - if delta.finish_reason is not None: - # temp_assistant_prompt_message is used to calculate usage - temp_assistant_prompt_message = AssistantPromptMessage( - content=full_response, tool_calls=assistant_message_tool_calls - ) - - prompt_tokens = self._num_tokens_from_messages(messages=prompt_messages, tools=tools) - completion_tokens = self._num_tokens_from_messages(messages=[temp_assistant_prompt_message], tools=[]) - - usage = self._calc_response_usage( - model=model, - credentials=credentials, - prompt_tokens=prompt_tokens, - completion_tokens=completion_tokens, - ) - - yield LLMResultChunk( - model=model, - prompt_messages=prompt_messages, - system_fingerprint=chunk.system_fingerprint, - delta=LLMResultChunkDelta( - index=delta.index, - message=assistant_prompt_message, - finish_reason=delta.finish_reason, - usage=usage, - ), - ) - else: - yield LLMResultChunk( - model=model, - prompt_messages=prompt_messages, - system_fingerprint=chunk.system_fingerprint, - delta=LLMResultChunkDelta( - index=delta.index, - message=assistant_prompt_message, - ), - ) - - full_response += delta.delta.content - - def _handle_chat_generate_response( - self, - model: str, - credentials: dict, - response: ChatCompletion, - prompt_messages: list[PromptMessage], - tools: Optional[list[PromptMessageTool]] = None, - ) -> LLMResult: - """ - Handle llm chat response - - :param model: model name - :param credentials: credentials - :param response: response - :param prompt_messages: prompt messages - :param tools: tools for tool calling - :return: llm response - """ - if len(response.choices) == 0: - raise InvokeServerUnavailableError("Empty response") - assistant_message = response.choices[0].message - - # convert function call to tool call - function_calls = assistant_message.function_call - tool_calls = self._extract_response_tool_calls([function_calls] if function_calls else []) - - # transform assistant message to prompt message - assistant_prompt_message = AssistantPromptMessage(content=assistant_message.content, tool_calls=tool_calls) - - prompt_tokens = self._num_tokens_from_messages(messages=prompt_messages, tools=tools) - completion_tokens = self._num_tokens_from_messages(messages=[assistant_prompt_message], tools=tools) - - usage = self._calc_response_usage( - model=model, credentials=credentials, prompt_tokens=prompt_tokens, completion_tokens=completion_tokens - ) - - response = LLMResult( - model=model, - prompt_messages=prompt_messages, - system_fingerprint=response.system_fingerprint, - usage=usage, - message=assistant_prompt_message, - ) - - return response - - def _num_tokens_from_string(self, text: str, tools: Optional[list[PromptMessageTool]] = None) -> int: - """ - Calculate num tokens for text completion model with tiktoken package. - - :param model: model name - :param text: prompt text - :param tools: tools for tool calling - :return: number of tokens - """ - num_tokens = self._get_num_tokens_by_gpt2(text) - - if tools: - num_tokens += self._num_tokens_for_tools(tools) - - return num_tokens - - def _num_tokens_from_messages( - self, messages: list[PromptMessage], tools: Optional[list[PromptMessageTool]] = None - ) -> int: - """Calculate num tokens for chatglm2 and chatglm3 with GPT2 tokenizer. - - it's too complex to calculate num tokens for chatglm2 and chatglm3 with ChatGLM tokenizer, - As a temporary solution we use GPT2 tokenizer instead. - - """ - - def tokens(text: str): - return self._get_num_tokens_by_gpt2(text) - - tokens_per_message = 3 - tokens_per_name = 1 - num_tokens = 0 - messages_dict = [self._convert_prompt_message_to_dict(m) for m in messages] - for message in messages_dict: - num_tokens += tokens_per_message - for key, value in message.items(): - if isinstance(value, list): - text = "" - for item in value: - if isinstance(item, dict) and item["type"] == "text": - text += item["text"] - value = text - - if key == "function_call": - for t_key, t_value in value.items(): - num_tokens += tokens(t_key) - if t_key == "function": - for f_key, f_value in t_value.items(): - num_tokens += tokens(f_key) - num_tokens += tokens(f_value) - else: - num_tokens += tokens(t_key) - num_tokens += tokens(t_value) - else: - num_tokens += tokens(str(value)) - - if key == "name": - num_tokens += tokens_per_name - - # every reply is primed with assistant - num_tokens += 3 - - if tools: - num_tokens += self._num_tokens_for_tools(tools) - - return num_tokens - - def _num_tokens_for_tools(self, tools: list[PromptMessageTool]) -> int: - """ - Calculate num tokens for tool calling - - :param encoding: encoding - :param tools: tools for tool calling - :return: number of tokens - """ - - def tokens(text: str): - return self._get_num_tokens_by_gpt2(text) - - num_tokens = 0 - for tool in tools: - # calculate num tokens for function object - num_tokens += tokens("name") - num_tokens += tokens(tool.name) - num_tokens += tokens("description") - num_tokens += tokens(tool.description) - parameters = tool.parameters - num_tokens += tokens("parameters") - num_tokens += tokens("type") - num_tokens += tokens(parameters.get("type")) - if "properties" in parameters: - num_tokens += tokens("properties") - for key, value in parameters.get("properties").items(): - num_tokens += tokens(key) - for field_key, field_value in value.items(): - num_tokens += tokens(field_key) - if field_key == "enum": - for enum_field in field_value: - num_tokens += 3 - num_tokens += tokens(enum_field) - else: - num_tokens += tokens(field_key) - num_tokens += tokens(str(field_value)) - if "required" in parameters: - num_tokens += tokens("required") - for required_field in parameters["required"]: - num_tokens += 3 - num_tokens += tokens(required_field) - - return num_tokens diff --git a/api/core/model_runtime/model_providers/cohere/__init__.py b/api/core/model_runtime/model_providers/cohere/__init__.py deleted file mode 100644 index e69de29bb2d1d6..00000000000000 diff --git a/api/core/model_runtime/model_providers/cohere/_assets/icon_l_en.svg b/api/core/model_runtime/model_providers/cohere/_assets/icon_l_en.svg deleted file mode 100644 index 9c176896fe872c..00000000000000 --- a/api/core/model_runtime/model_providers/cohere/_assets/icon_l_en.svg +++ /dev/null @@ -1,11 +0,0 @@ - - - - - - - - - - - diff --git a/api/core/model_runtime/model_providers/cohere/_assets/icon_s_en.svg b/api/core/model_runtime/model_providers/cohere/_assets/icon_s_en.svg deleted file mode 100644 index 28fe96d390245c..00000000000000 --- a/api/core/model_runtime/model_providers/cohere/_assets/icon_s_en.svg +++ /dev/null @@ -1,16 +0,0 @@ - - - - - - - - - - - - - - - - diff --git a/api/core/model_runtime/model_providers/cohere/cohere.py b/api/core/model_runtime/model_providers/cohere/cohere.py deleted file mode 100644 index 8394a45fcf9ca1..00000000000000 --- a/api/core/model_runtime/model_providers/cohere/cohere.py +++ /dev/null @@ -1,28 +0,0 @@ -import logging - -from core.model_runtime.entities.model_entities import ModelType -from core.model_runtime.errors.validate import CredentialsValidateFailedError -from core.model_runtime.model_providers.__base.model_provider import ModelProvider - -logger = logging.getLogger(__name__) - - -class CohereProvider(ModelProvider): - def validate_provider_credentials(self, credentials: dict) -> None: - """ - Validate provider credentials - - if validate failed, raise exception - - :param credentials: provider credentials, credentials form defined in `provider_credential_schema`. - """ - try: - model_instance = self.get_model_instance(ModelType.RERANK) - - # Use `rerank-english-v2.0` model for validate, - model_instance.validate_credentials(model="rerank-english-v2.0", credentials=credentials) - except CredentialsValidateFailedError as ex: - raise ex - except Exception as ex: - logger.exception(f"{self.get_provider_schema().provider} credentials validate failed") - raise ex diff --git a/api/core/model_runtime/model_providers/cohere/cohere.yaml b/api/core/model_runtime/model_providers/cohere/cohere.yaml deleted file mode 100644 index bd40057fe97495..00000000000000 --- a/api/core/model_runtime/model_providers/cohere/cohere.yaml +++ /dev/null @@ -1,90 +0,0 @@ -provider: cohere -label: - zh_Hans: Cohere - en_US: Cohere -icon_small: - en_US: icon_s_en.svg -icon_large: - en_US: icon_l_en.svg -background: "#ECE9E3" -help: - title: - en_US: Get your API key from cohere - zh_Hans: 从 cohere 获取 API Key - url: - en_US: https://dashboard.cohere.com/api-keys -supported_model_types: - - llm - - text-embedding - - rerank -configurate_methods: - - predefined-model - - customizable-model -provider_credential_schema: - credential_form_schemas: - - variable: api_key - label: - zh_Hans: API Key - en_US: API Key - type: secret-input - required: true - placeholder: - zh_Hans: 在此输入您的 API Key - en_US: Enter your API Key - show_on: [ ] - - variable: base_url - label: - zh_Hans: API Base - en_US: API Base - type: text-input - required: false - placeholder: - zh_Hans: 在此输入您的 API Base,如 https://api.cohere.ai/v1 - en_US: Enter your API Base, e.g. https://api.cohere.ai/v1 -model_credential_schema: - model: - label: - en_US: Model Name - zh_Hans: 模型名称 - placeholder: - en_US: Enter your model name - zh_Hans: 输入模型名称 - credential_form_schemas: - - variable: mode - show_on: - - variable: __model_type - value: llm - label: - en_US: Completion mode - type: select - required: false - default: chat - placeholder: - zh_Hans: 选择对话类型 - en_US: Select completion mode - options: - - value: completion - label: - en_US: Completion - zh_Hans: 补全 - - value: chat - label: - en_US: Chat - zh_Hans: 对话 - - variable: api_key - label: - en_US: API Key - type: secret-input - required: true - placeholder: - zh_Hans: 在此输入您的 API Key - en_US: Enter your API Key - - variable: base_url - label: - zh_Hans: API Base - en_US: API Base - type: text-input - required: false - placeholder: - zh_Hans: 在此输入您的 API Base,如 https://api.cohere.ai/v1 - en_US: Enter your API Base, e.g. https://api.cohere.ai/v1 diff --git a/api/core/model_runtime/model_providers/cohere/llm/__init__.py b/api/core/model_runtime/model_providers/cohere/llm/__init__.py deleted file mode 100644 index e69de29bb2d1d6..00000000000000 diff --git a/api/core/model_runtime/model_providers/cohere/llm/_position.yaml b/api/core/model_runtime/model_providers/cohere/llm/_position.yaml deleted file mode 100644 index 42d06f49a2b394..00000000000000 --- a/api/core/model_runtime/model_providers/cohere/llm/_position.yaml +++ /dev/null @@ -1,10 +0,0 @@ -- command-r -- command-r-plus -- command-chat -- command-light-chat -- command-nightly-chat -- command-light-nightly-chat -- command -- command-light -- command-nightly -- command-light-nightly diff --git a/api/core/model_runtime/model_providers/cohere/llm/command-chat.yaml b/api/core/model_runtime/model_providers/cohere/llm/command-chat.yaml deleted file mode 100644 index 5f233f35ceeeba..00000000000000 --- a/api/core/model_runtime/model_providers/cohere/llm/command-chat.yaml +++ /dev/null @@ -1,62 +0,0 @@ -model: command-chat -label: - zh_Hans: command-chat - en_US: command-chat -model_type: llm -features: - - agent-thought -model_properties: - mode: chat - context_size: 4096 -parameter_rules: - - name: temperature - use_template: temperature - max: 5.0 - - name: p - use_template: top_p - default: 0.75 - min: 0.01 - max: 0.99 - - name: k - label: - zh_Hans: 取样数量 - en_US: Top k - type: int - help: - zh_Hans: 仅从每个后续标记的前 K 个选项中采样。 - en_US: Only sample from the top K options for each subsequent token. - required: false - default: 0 - min: 0 - max: 500 - - name: max_tokens - use_template: max_tokens - default: 1024 - max: 4096 - - name: preamble_override - label: - zh_Hans: 前导文本 - en_US: Preamble - type: string - help: - zh_Hans: 当指定时,将使用提供的前导文本替换默认的 Cohere 前导文本。 - en_US: When specified, the default Cohere preamble will be replaced with the provided one. - required: false - - name: prompt_truncation - label: - zh_Hans: 提示截断 - en_US: Prompt Truncation - type: string - help: - zh_Hans: 指定如何构造 Prompt。当 prompt_truncation 设置为 "AUTO" 时,将会丢弃一些来自聊天记录的元素,以尝试构造一个符合模型上下文长度限制的 Prompt。 - en_US: Dictates how the prompt will be constructed. With prompt_truncation set to "AUTO", some elements from chat histories will be dropped in an attempt to construct a prompt that fits within the model's context length limit. - required: true - default: 'AUTO' - options: - - 'AUTO' - - 'OFF' -pricing: - input: '1.0' - output: '2.0' - unit: '0.000001' - currency: USD diff --git a/api/core/model_runtime/model_providers/cohere/llm/command-light-chat.yaml b/api/core/model_runtime/model_providers/cohere/llm/command-light-chat.yaml deleted file mode 100644 index b5f00487703a0d..00000000000000 --- a/api/core/model_runtime/model_providers/cohere/llm/command-light-chat.yaml +++ /dev/null @@ -1,62 +0,0 @@ -model: command-light-chat -label: - zh_Hans: command-light-chat - en_US: command-light-chat -model_type: llm -features: - - agent-thought -model_properties: - mode: chat - context_size: 4096 -parameter_rules: - - name: temperature - use_template: temperature - max: 5.0 - - name: p - use_template: top_p - default: 0.75 - min: 0.01 - max: 0.99 - - name: k - label: - zh_Hans: 取样数量 - en_US: Top k - type: int - help: - zh_Hans: 仅从每个后续标记的前 K 个选项中采样。 - en_US: Only sample from the top K options for each subsequent token. - required: false - default: 0 - min: 0 - max: 500 - - name: max_tokens - use_template: max_tokens - default: 1024 - max: 4096 - - name: preamble_override - label: - zh_Hans: 前导文本 - en_US: Preamble - type: string - help: - zh_Hans: 当指定时,将使用提供的前导文本替换默认的 Cohere 前导文本。 - en_US: When specified, the default Cohere preamble will be replaced with the provided one. - required: false - - name: prompt_truncation - label: - zh_Hans: 提示截断 - en_US: Prompt Truncation - type: string - help: - zh_Hans: 指定如何构造 Prompt。当 prompt_truncation 设置为 "AUTO" 时,将会丢弃一些来自聊天记录的元素,以尝试构造一个符合模型上下文长度限制的 Prompt。 - en_US: Dictates how the prompt will be constructed. With prompt_truncation set to "AUTO", some elements from chat histories will be dropped in an attempt to construct a prompt that fits within the model's context length limit. - required: true - default: 'AUTO' - options: - - 'AUTO' - - 'OFF' -pricing: - input: '0.3' - output: '0.6' - unit: '0.000001' - currency: USD diff --git a/api/core/model_runtime/model_providers/cohere/llm/command-light-nightly-chat.yaml b/api/core/model_runtime/model_providers/cohere/llm/command-light-nightly-chat.yaml deleted file mode 100644 index 1c96b24030224d..00000000000000 --- a/api/core/model_runtime/model_providers/cohere/llm/command-light-nightly-chat.yaml +++ /dev/null @@ -1,62 +0,0 @@ -model: command-light-nightly-chat -label: - zh_Hans: command-light-nightly-chat - en_US: command-light-nightly-chat -model_type: llm -features: - - agent-thought -model_properties: - mode: chat - context_size: 4096 -parameter_rules: - - name: temperature - use_template: temperature - max: 5.0 - - name: p - use_template: top_p - default: 0.75 - min: 0.01 - max: 0.99 - - name: k - label: - zh_Hans: 取样数量 - en_US: Top k - type: int - help: - zh_Hans: 仅从每个后续标记的前 K 个选项中采样。 - en_US: Only sample from the top K options for each subsequent token. - required: false - default: 0 - min: 0 - max: 500 - - name: max_tokens - use_template: max_tokens - default: 1024 - max: 4096 - - name: preamble_override - label: - zh_Hans: 前导文本 - en_US: Preamble - type: string - help: - zh_Hans: 当指定时,将使用提供的前导文本替换默认的 Cohere 前导文本。 - en_US: When specified, the default Cohere preamble will be replaced with the provided one. - required: false - - name: prompt_truncation - label: - zh_Hans: 提示截断 - en_US: Prompt Truncation - type: string - help: - zh_Hans: 指定如何构造 Prompt。当 prompt_truncation 设置为 "AUTO" 时,将会丢弃一些来自聊天记录的元素,以尝试构造一个符合模型上下文长度限制的 Prompt。 - en_US: Dictates how the prompt will be constructed. With prompt_truncation set to "AUTO", some elements from chat histories will be dropped in an attempt to construct a prompt that fits within the model's context length limit. - required: true - default: 'AUTO' - options: - - 'AUTO' - - 'OFF' -pricing: - input: '0.3' - output: '0.6' - unit: '0.000001' - currency: USD diff --git a/api/core/model_runtime/model_providers/cohere/llm/command-light-nightly.yaml b/api/core/model_runtime/model_providers/cohere/llm/command-light-nightly.yaml deleted file mode 100644 index 4616f766897866..00000000000000 --- a/api/core/model_runtime/model_providers/cohere/llm/command-light-nightly.yaml +++ /dev/null @@ -1,44 +0,0 @@ -model: command-light-nightly -label: - zh_Hans: command-light-nightly - en_US: command-light-nightly -model_type: llm -features: - - agent-thought -model_properties: - mode: completion - context_size: 4096 -parameter_rules: - - name: temperature - use_template: temperature - max: 5.0 - - name: p - use_template: top_p - default: 0.75 - min: 0.01 - max: 0.99 - - name: k - label: - zh_Hans: 取样数量 - en_US: Top k - type: int - help: - zh_Hans: 仅从每个后续标记的前 K 个选项中采样。 - en_US: Only sample from the top K options for each subsequent token. - required: false - default: 0 - min: 0 - max: 500 - - name: presence_penalty - use_template: presence_penalty - - name: frequency_penalty - use_template: frequency_penalty - - name: max_tokens - use_template: max_tokens - default: 1024 - max: 4096 -pricing: - input: '0.3' - output: '0.6' - unit: '0.000001' - currency: USD diff --git a/api/core/model_runtime/model_providers/cohere/llm/command-light.yaml b/api/core/model_runtime/model_providers/cohere/llm/command-light.yaml deleted file mode 100644 index 161756b3220d28..00000000000000 --- a/api/core/model_runtime/model_providers/cohere/llm/command-light.yaml +++ /dev/null @@ -1,44 +0,0 @@ -model: command-light -label: - zh_Hans: command-light - en_US: command-light -model_type: llm -features: - - agent-thought -model_properties: - mode: completion - context_size: 4096 -parameter_rules: - - name: temperature - use_template: temperature - max: 5.0 - - name: p - use_template: top_p - default: 0.75 - min: 0.01 - max: 0.99 - - name: k - label: - zh_Hans: 取样数量 - en_US: Top k - type: int - help: - zh_Hans: 仅从每个后续标记的前 K 个选项中采样。 - en_US: Only sample from the top K options for each subsequent token. - required: false - default: 0 - min: 0 - max: 500 - - name: presence_penalty - use_template: presence_penalty - - name: frequency_penalty - use_template: frequency_penalty - - name: max_tokens - use_template: max_tokens - default: 1024 - max: 4096 -pricing: - input: '0.3' - output: '0.6' - unit: '0.000001' - currency: USD diff --git a/api/core/model_runtime/model_providers/cohere/llm/command-nightly-chat.yaml b/api/core/model_runtime/model_providers/cohere/llm/command-nightly-chat.yaml deleted file mode 100644 index 739e09e72e9e66..00000000000000 --- a/api/core/model_runtime/model_providers/cohere/llm/command-nightly-chat.yaml +++ /dev/null @@ -1,62 +0,0 @@ -model: command-nightly-chat -label: - zh_Hans: command-nightly-chat - en_US: command-nightly-chat -model_type: llm -features: - - agent-thought -model_properties: - mode: chat - context_size: 4096 -parameter_rules: - - name: temperature - use_template: temperature - max: 5.0 - - name: p - use_template: top_p - default: 0.75 - min: 0.01 - max: 0.99 - - name: k - label: - zh_Hans: 取样数量 - en_US: Top k - type: int - help: - zh_Hans: 仅从每个后续标记的前 K 个选项中采样。 - en_US: Only sample from the top K options for each subsequent token. - required: false - default: 0 - min: 0 - max: 500 - - name: max_tokens - use_template: max_tokens - default: 1024 - max: 4096 - - name: preamble_override - label: - zh_Hans: 前导文本 - en_US: Preamble - type: string - help: - zh_Hans: 当指定时,将使用提供的前导文本替换默认的 Cohere 前导文本。 - en_US: When specified, the default Cohere preamble will be replaced with the provided one. - required: false - - name: prompt_truncation - label: - zh_Hans: 提示截断 - en_US: Prompt Truncation - type: string - help: - zh_Hans: 指定如何构造 Prompt。当 prompt_truncation 设置为 "AUTO" 时,将会丢弃一些来自聊天记录的元素,以尝试构造一个符合模型上下文长度限制的 Prompt。 - en_US: Dictates how the prompt will be constructed. With prompt_truncation set to "AUTO", some elements from chat histories will be dropped in an attempt to construct a prompt that fits within the model's context length limit. - required: true - default: 'AUTO' - options: - - 'AUTO' - - 'OFF' -pricing: - input: '1.0' - output: '2.0' - unit: '0.000001' - currency: USD diff --git a/api/core/model_runtime/model_providers/cohere/llm/command-nightly.yaml b/api/core/model_runtime/model_providers/cohere/llm/command-nightly.yaml deleted file mode 100644 index 1e025e40c4b453..00000000000000 --- a/api/core/model_runtime/model_providers/cohere/llm/command-nightly.yaml +++ /dev/null @@ -1,44 +0,0 @@ -model: command-nightly -label: - zh_Hans: command-nightly - en_US: command-nightly -model_type: llm -features: - - agent-thought -model_properties: - mode: completion - context_size: 4096 -parameter_rules: - - name: temperature - use_template: temperature - max: 5.0 - - name: p - use_template: top_p - default: 0.75 - min: 0.01 - max: 0.99 - - name: k - label: - zh_Hans: 取样数量 - en_US: Top k - type: int - help: - zh_Hans: 仅从每个后续标记的前 K 个选项中采样。 - en_US: Only sample from the top K options for each subsequent token. - required: false - default: 0 - min: 0 - max: 500 - - name: presence_penalty - use_template: presence_penalty - - name: frequency_penalty - use_template: frequency_penalty - - name: max_tokens - use_template: max_tokens - default: 1024 - max: 4096 -pricing: - input: '1.0' - output: '2.0' - unit: '0.000001' - currency: USD diff --git a/api/core/model_runtime/model_providers/cohere/llm/command-r-plus.yaml b/api/core/model_runtime/model_providers/cohere/llm/command-r-plus.yaml deleted file mode 100644 index 617e6853ea6518..00000000000000 --- a/api/core/model_runtime/model_providers/cohere/llm/command-r-plus.yaml +++ /dev/null @@ -1,45 +0,0 @@ -model: command-r-plus -label: - en_US: command-r-plus -model_type: llm -features: - - multi-tool-call - - agent-thought - - stream-tool-call -model_properties: - mode: chat - context_size: 128000 -parameter_rules: - - name: temperature - use_template: temperature - max: 5.0 - - name: p - use_template: top_p - default: 0.75 - min: 0.01 - max: 0.99 - - name: k - label: - zh_Hans: 取样数量 - en_US: Top k - type: int - help: - zh_Hans: 仅从每个后续标记的前 K 个选项中采样。 - en_US: Only sample from the top K options for each subsequent token. - required: false - default: 0 - min: 0 - max: 500 - - name: presence_penalty - use_template: presence_penalty - - name: frequency_penalty - use_template: frequency_penalty - - name: max_tokens - use_template: max_tokens - default: 1024 - max: 4096 -pricing: - input: '3' - output: '15' - unit: '0.000001' - currency: USD diff --git a/api/core/model_runtime/model_providers/cohere/llm/command-r.yaml b/api/core/model_runtime/model_providers/cohere/llm/command-r.yaml deleted file mode 100644 index c36680443b7a4c..00000000000000 --- a/api/core/model_runtime/model_providers/cohere/llm/command-r.yaml +++ /dev/null @@ -1,45 +0,0 @@ -model: command-r -label: - en_US: command-r -model_type: llm -features: - - multi-tool-call - - agent-thought - - stream-tool-call -model_properties: - mode: chat - context_size: 128000 -parameter_rules: - - name: temperature - use_template: temperature - max: 5.0 - - name: p - use_template: top_p - default: 0.75 - min: 0.01 - max: 0.99 - - name: k - label: - zh_Hans: 取样数量 - en_US: Top k - type: int - help: - zh_Hans: 仅从每个后续标记的前 K 个选项中采样。 - en_US: Only sample from the top K options for each subsequent token. - required: false - default: 0 - min: 0 - max: 500 - - name: presence_penalty - use_template: presence_penalty - - name: frequency_penalty - use_template: frequency_penalty - - name: max_tokens - use_template: max_tokens - default: 1024 - max: 4096 -pricing: - input: '0.5' - output: '1.5' - unit: '0.000001' - currency: USD diff --git a/api/core/model_runtime/model_providers/cohere/llm/command.yaml b/api/core/model_runtime/model_providers/cohere/llm/command.yaml deleted file mode 100644 index 0cac7c35ea140b..00000000000000 --- a/api/core/model_runtime/model_providers/cohere/llm/command.yaml +++ /dev/null @@ -1,44 +0,0 @@ -model: command -label: - zh_Hans: command - en_US: command -model_type: llm -features: - - agent-thought -model_properties: - mode: completion - context_size: 4096 -parameter_rules: - - name: temperature - use_template: temperature - max: 5.0 - - name: p - use_template: top_p - default: 0.75 - min: 0.01 - max: 0.99 - - name: k - label: - zh_Hans: 取样数量 - en_US: Top k - type: int - help: - zh_Hans: 仅从每个后续标记的前 K 个选项中采样。 - en_US: Only sample from the top K options for each subsequent token. - required: false - default: 0 - min: 0 - max: 500 - - name: presence_penalty - use_template: presence_penalty - - name: frequency_penalty - use_template: frequency_penalty - - name: max_tokens - use_template: max_tokens - default: 1024 - max: 4096 -pricing: - input: '1.0' - output: '2.0' - unit: '0.000001' - currency: USD diff --git a/api/core/model_runtime/model_providers/cohere/llm/llm.py b/api/core/model_runtime/model_providers/cohere/llm/llm.py deleted file mode 100644 index 3863ad33081962..00000000000000 --- a/api/core/model_runtime/model_providers/cohere/llm/llm.py +++ /dev/null @@ -1,733 +0,0 @@ -import json -import logging -from collections.abc import Generator, Iterator -from typing import Optional, Union, cast - -import cohere -from cohere import ( - ChatMessage, - ChatStreamRequestToolResultsItem, - GenerateStreamedResponse, - GenerateStreamedResponse_StreamEnd, - GenerateStreamedResponse_StreamError, - GenerateStreamedResponse_TextGeneration, - Generation, - NonStreamedChatResponse, - StreamedChatResponse, - StreamedChatResponse_StreamEnd, - StreamedChatResponse_TextGeneration, - StreamedChatResponse_ToolCallsGeneration, - Tool, - ToolCall, - ToolParameterDefinitionsValue, -) -from cohere.core import RequestOptions - -from core.model_runtime.entities.llm_entities import LLMMode, LLMResult, LLMResultChunk, LLMResultChunkDelta -from core.model_runtime.entities.message_entities import ( - AssistantPromptMessage, - PromptMessage, - PromptMessageContentType, - PromptMessageRole, - PromptMessageTool, - SystemPromptMessage, - TextPromptMessageContent, - ToolPromptMessage, - UserPromptMessage, -) -from core.model_runtime.entities.model_entities import AIModelEntity, FetchFrom, I18nObject, ModelType -from core.model_runtime.errors.invoke import ( - InvokeAuthorizationError, - InvokeBadRequestError, - InvokeConnectionError, - InvokeError, - InvokeRateLimitError, - InvokeServerUnavailableError, -) -from core.model_runtime.errors.validate import CredentialsValidateFailedError -from core.model_runtime.model_providers.__base.large_language_model import LargeLanguageModel - -logger = logging.getLogger(__name__) - - -class CohereLargeLanguageModel(LargeLanguageModel): - """ - Model class for Cohere large language model. - """ - - def _invoke( - self, - model: str, - credentials: dict, - prompt_messages: list[PromptMessage], - model_parameters: dict, - tools: Optional[list[PromptMessageTool]] = None, - stop: Optional[list[str]] = None, - stream: bool = True, - user: Optional[str] = None, - ) -> Union[LLMResult, Generator]: - """ - Invoke large language model - - :param model: model name - :param credentials: model credentials - :param prompt_messages: prompt messages - :param model_parameters: model parameters - :param tools: tools for tool calling - :param stop: stop words - :param stream: is stream response - :param user: unique user id - :return: full response or stream response chunk generator result - """ - # get model mode - model_mode = self.get_model_mode(model, credentials) - - if model_mode == LLMMode.CHAT: - return self._chat_generate( - model=model, - credentials=credentials, - prompt_messages=prompt_messages, - model_parameters=model_parameters, - tools=tools, - stop=stop, - stream=stream, - user=user, - ) - else: - return self._generate( - model=model, - credentials=credentials, - prompt_messages=prompt_messages, - model_parameters=model_parameters, - stop=stop, - stream=stream, - user=user, - ) - - def get_num_tokens( - self, - model: str, - credentials: dict, - prompt_messages: list[PromptMessage], - tools: Optional[list[PromptMessageTool]] = None, - ) -> int: - """ - Get number of tokens for given prompt messages - - :param model: model name - :param credentials: model credentials - :param prompt_messages: prompt messages - :param tools: tools for tool calling - :return: - """ - # get model mode - model_mode = self.get_model_mode(model) - - try: - if model_mode == LLMMode.CHAT: - return self._num_tokens_from_messages(model, credentials, prompt_messages) - else: - return self._num_tokens_from_string(model, credentials, prompt_messages[0].content) - except Exception as e: - raise self._transform_invoke_error(e) - - def validate_credentials(self, model: str, credentials: dict) -> None: - """ - Validate model credentials - - :param model: model name - :param credentials: model credentials - :return: - """ - try: - # get model mode - model_mode = self.get_model_mode(model) - - if model_mode == LLMMode.CHAT: - self._chat_generate( - model=model, - credentials=credentials, - prompt_messages=[UserPromptMessage(content="ping")], - model_parameters={ - "max_tokens": 20, - "temperature": 0, - }, - stream=False, - ) - else: - self._generate( - model=model, - credentials=credentials, - prompt_messages=[UserPromptMessage(content="ping")], - model_parameters={ - "max_tokens": 20, - "temperature": 0, - }, - stream=False, - ) - except Exception as ex: - raise CredentialsValidateFailedError(str(ex)) - - def _generate( - self, - model: str, - credentials: dict, - prompt_messages: list[PromptMessage], - model_parameters: dict, - stop: Optional[list[str]] = None, - stream: bool = True, - user: Optional[str] = None, - ) -> Union[LLMResult, Generator]: - """ - Invoke llm model - - :param model: model name - :param credentials: credentials - :param prompt_messages: prompt messages - :param model_parameters: model parameters - :param stop: stop words - :param stream: is stream response - :param user: unique user id - :return: full response or stream response chunk generator result - """ - # initialize client - client = cohere.Client(credentials.get("api_key"), base_url=credentials.get("base_url")) - - if stop: - model_parameters["end_sequences"] = stop - - if stream: - response = client.generate_stream( - prompt=prompt_messages[0].content, - model=model, - **model_parameters, - request_options=RequestOptions(max_retries=0), - ) - - return self._handle_generate_stream_response(model, credentials, response, prompt_messages) - else: - response = client.generate( - prompt=prompt_messages[0].content, - model=model, - **model_parameters, - request_options=RequestOptions(max_retries=0), - ) - - return self._handle_generate_response(model, credentials, response, prompt_messages) - - def _handle_generate_response( - self, model: str, credentials: dict, response: Generation, prompt_messages: list[PromptMessage] - ) -> LLMResult: - """ - Handle llm response - - :param model: model name - :param credentials: credentials - :param response: response - :param prompt_messages: prompt messages - :return: llm response - """ - assistant_text = response.generations[0].text - - # transform assistant message to prompt message - assistant_prompt_message = AssistantPromptMessage(content=assistant_text) - - # calculate num tokens - prompt_tokens = int(response.meta.billed_units.input_tokens) - completion_tokens = int(response.meta.billed_units.output_tokens) - - # transform usage - usage = self._calc_response_usage(model, credentials, prompt_tokens, completion_tokens) - - # transform response - response = LLMResult( - model=model, prompt_messages=prompt_messages, message=assistant_prompt_message, usage=usage - ) - - return response - - def _handle_generate_stream_response( - self, - model: str, - credentials: dict, - response: Iterator[GenerateStreamedResponse], - prompt_messages: list[PromptMessage], - ) -> Generator: - """ - Handle llm stream response - - :param model: model name - :param response: response - :param prompt_messages: prompt messages - :return: llm response chunk generator - """ - index = 1 - full_assistant_content = "" - for chunk in response: - if isinstance(chunk, GenerateStreamedResponse_TextGeneration): - chunk = cast(GenerateStreamedResponse_TextGeneration, chunk) - text = chunk.text - - if text is None: - continue - - # transform assistant message to prompt message - assistant_prompt_message = AssistantPromptMessage(content=text) - - full_assistant_content += text - - yield LLMResultChunk( - model=model, - prompt_messages=prompt_messages, - delta=LLMResultChunkDelta( - index=index, - message=assistant_prompt_message, - ), - ) - - index += 1 - elif isinstance(chunk, GenerateStreamedResponse_StreamEnd): - chunk = cast(GenerateStreamedResponse_StreamEnd, chunk) - - # calculate num tokens - prompt_tokens = self._num_tokens_from_messages(model, credentials, prompt_messages) - completion_tokens = self._num_tokens_from_messages( - model, credentials, [AssistantPromptMessage(content=full_assistant_content)] - ) - - # transform usage - usage = self._calc_response_usage(model, credentials, prompt_tokens, completion_tokens) - - yield LLMResultChunk( - model=model, - prompt_messages=prompt_messages, - delta=LLMResultChunkDelta( - index=index, - message=AssistantPromptMessage(content=""), - finish_reason=chunk.finish_reason, - usage=usage, - ), - ) - break - elif isinstance(chunk, GenerateStreamedResponse_StreamError): - chunk = cast(GenerateStreamedResponse_StreamError, chunk) - raise InvokeBadRequestError(chunk.err) - - def _chat_generate( - self, - model: str, - credentials: dict, - prompt_messages: list[PromptMessage], - model_parameters: dict, - tools: Optional[list[PromptMessageTool]] = None, - stop: Optional[list[str]] = None, - stream: bool = True, - user: Optional[str] = None, - ) -> Union[LLMResult, Generator]: - """ - Invoke llm chat model - - :param model: model name - :param credentials: credentials - :param prompt_messages: prompt messages - :param model_parameters: model parameters - :param tools: tools for tool calling - :param stop: stop words - :param stream: is stream response - :param user: unique user id - :return: full response or stream response chunk generator result - """ - # initialize client - client = cohere.Client(credentials.get("api_key"), base_url=credentials.get("base_url")) - - if stop: - model_parameters["stop_sequences"] = stop - - if tools: - if len(tools) == 1: - raise ValueError("Cohere tool call requires at least two tools to be specified.") - - model_parameters["tools"] = self._convert_tools(tools) - - message, chat_histories, tool_results = self._convert_prompt_messages_to_message_and_chat_histories( - prompt_messages - ) - - if tool_results: - model_parameters["tool_results"] = tool_results - - # chat model - real_model = model - if self.get_model_schema(model, credentials).fetch_from == FetchFrom.PREDEFINED_MODEL: - real_model = model.removesuffix("-chat") - - if stream: - response = client.chat_stream( - message=message, - chat_history=chat_histories, - model=real_model, - **model_parameters, - request_options=RequestOptions(max_retries=0), - ) - - return self._handle_chat_generate_stream_response(model, credentials, response, prompt_messages) - else: - response = client.chat( - message=message, - chat_history=chat_histories, - model=real_model, - **model_parameters, - request_options=RequestOptions(max_retries=0), - ) - - return self._handle_chat_generate_response(model, credentials, response, prompt_messages) - - def _handle_chat_generate_response( - self, model: str, credentials: dict, response: NonStreamedChatResponse, prompt_messages: list[PromptMessage] - ) -> LLMResult: - """ - Handle llm chat response - - :param model: model name - :param credentials: credentials - :param response: response - :param prompt_messages: prompt messages - :return: llm response - """ - assistant_text = response.text - - tool_calls = [] - if response.tool_calls: - for cohere_tool_call in response.tool_calls: - tool_call = AssistantPromptMessage.ToolCall( - id=cohere_tool_call.name, - type="function", - function=AssistantPromptMessage.ToolCall.ToolCallFunction( - name=cohere_tool_call.name, arguments=json.dumps(cohere_tool_call.parameters) - ), - ) - tool_calls.append(tool_call) - - # transform assistant message to prompt message - assistant_prompt_message = AssistantPromptMessage(content=assistant_text, tool_calls=tool_calls) - - # calculate num tokens - prompt_tokens = self._num_tokens_from_messages(model, credentials, prompt_messages) - completion_tokens = self._num_tokens_from_messages(model, credentials, [assistant_prompt_message]) - - # transform usage - usage = self._calc_response_usage(model, credentials, prompt_tokens, completion_tokens) - - # transform response - response = LLMResult( - model=model, prompt_messages=prompt_messages, message=assistant_prompt_message, usage=usage - ) - - return response - - def _handle_chat_generate_stream_response( - self, - model: str, - credentials: dict, - response: Iterator[StreamedChatResponse], - prompt_messages: list[PromptMessage], - ) -> Generator: - """ - Handle llm chat stream response - - :param model: model name - :param response: response - :param prompt_messages: prompt messages - :return: llm response chunk generator - """ - - def final_response( - full_text: str, - tool_calls: list[AssistantPromptMessage.ToolCall], - index: int, - finish_reason: Optional[str] = None, - ) -> LLMResultChunk: - # calculate num tokens - prompt_tokens = self._num_tokens_from_messages(model, credentials, prompt_messages) - - full_assistant_prompt_message = AssistantPromptMessage(content=full_text, tool_calls=tool_calls) - completion_tokens = self._num_tokens_from_messages(model, credentials, [full_assistant_prompt_message]) - - # transform usage - usage = self._calc_response_usage(model, credentials, prompt_tokens, completion_tokens) - - return LLMResultChunk( - model=model, - prompt_messages=prompt_messages, - delta=LLMResultChunkDelta( - index=index, - message=AssistantPromptMessage(content="", tool_calls=tool_calls), - finish_reason=finish_reason, - usage=usage, - ), - ) - - index = 1 - full_assistant_content = "" - tool_calls = [] - for chunk in response: - if isinstance(chunk, StreamedChatResponse_TextGeneration): - chunk = cast(StreamedChatResponse_TextGeneration, chunk) - text = chunk.text - - if text is None: - continue - - # transform assistant message to prompt message - assistant_prompt_message = AssistantPromptMessage(content=text) - - full_assistant_content += text - - yield LLMResultChunk( - model=model, - prompt_messages=prompt_messages, - delta=LLMResultChunkDelta( - index=index, - message=assistant_prompt_message, - ), - ) - - index += 1 - elif isinstance(chunk, StreamedChatResponse_ToolCallsGeneration): - chunk = cast(StreamedChatResponse_ToolCallsGeneration, chunk) - if chunk.tool_calls: - for cohere_tool_call in chunk.tool_calls: - tool_call = AssistantPromptMessage.ToolCall( - id=cohere_tool_call.name, - type="function", - function=AssistantPromptMessage.ToolCall.ToolCallFunction( - name=cohere_tool_call.name, arguments=json.dumps(cohere_tool_call.parameters) - ), - ) - tool_calls.append(tool_call) - elif isinstance(chunk, StreamedChatResponse_StreamEnd): - chunk = cast(StreamedChatResponse_StreamEnd, chunk) - yield final_response(full_assistant_content, tool_calls, index, chunk.finish_reason) - index += 1 - - def _convert_prompt_messages_to_message_and_chat_histories( - self, prompt_messages: list[PromptMessage] - ) -> tuple[str, list[ChatMessage], list[ChatStreamRequestToolResultsItem]]: - """ - Convert prompt messages to message and chat histories - :param prompt_messages: prompt messages - :return: - """ - chat_histories = [] - latest_tool_call_n_outputs = [] - for prompt_message in prompt_messages: - if prompt_message.role == PromptMessageRole.ASSISTANT: - prompt_message = cast(AssistantPromptMessage, prompt_message) - if prompt_message.tool_calls: - for tool_call in prompt_message.tool_calls: - latest_tool_call_n_outputs.append( - ChatStreamRequestToolResultsItem( - call=ToolCall( - name=tool_call.function.name, parameters=json.loads(tool_call.function.arguments) - ), - outputs=[], - ) - ) - else: - cohere_prompt_message = self._convert_prompt_message_to_dict(prompt_message) - if cohere_prompt_message: - chat_histories.append(cohere_prompt_message) - elif prompt_message.role == PromptMessageRole.TOOL: - prompt_message = cast(ToolPromptMessage, prompt_message) - if latest_tool_call_n_outputs: - i = 0 - for tool_call_n_outputs in latest_tool_call_n_outputs: - if tool_call_n_outputs.call.name == prompt_message.tool_call_id: - latest_tool_call_n_outputs[i] = ChatStreamRequestToolResultsItem( - call=ToolCall( - name=tool_call_n_outputs.call.name, parameters=tool_call_n_outputs.call.parameters - ), - outputs=[{"result": prompt_message.content}], - ) - break - i += 1 - else: - cohere_prompt_message = self._convert_prompt_message_to_dict(prompt_message) - if cohere_prompt_message: - chat_histories.append(cohere_prompt_message) - - if latest_tool_call_n_outputs: - new_latest_tool_call_n_outputs = [] - for tool_call_n_outputs in latest_tool_call_n_outputs: - if tool_call_n_outputs.outputs: - new_latest_tool_call_n_outputs.append(tool_call_n_outputs) - - latest_tool_call_n_outputs = new_latest_tool_call_n_outputs - - # get latest message from chat histories and pop it - if len(chat_histories) > 0: - latest_message = chat_histories.pop() - message = latest_message.message - else: - raise ValueError("Prompt messages is empty") - - return message, chat_histories, latest_tool_call_n_outputs - - def _convert_prompt_message_to_dict(self, message: PromptMessage) -> Optional[ChatMessage]: - """ - Convert PromptMessage to dict for Cohere model - """ - if isinstance(message, UserPromptMessage): - message = cast(UserPromptMessage, message) - if isinstance(message.content, str): - chat_message = ChatMessage(role="USER", message=message.content) - else: - sub_message_text = "" - for message_content in message.content: - if message_content.type == PromptMessageContentType.TEXT: - message_content = cast(TextPromptMessageContent, message_content) - sub_message_text += message_content.data - - chat_message = ChatMessage(role="USER", message=sub_message_text) - elif isinstance(message, AssistantPromptMessage): - message = cast(AssistantPromptMessage, message) - if not message.content: - return None - chat_message = ChatMessage(role="CHATBOT", message=message.content) - elif isinstance(message, SystemPromptMessage): - message = cast(SystemPromptMessage, message) - chat_message = ChatMessage(role="USER", message=message.content) - elif isinstance(message, ToolPromptMessage): - return None - else: - raise ValueError(f"Got unknown type {message}") - - return chat_message - - def _convert_tools(self, tools: list[PromptMessageTool]) -> list[Tool]: - """ - Convert tools to Cohere model - """ - cohere_tools = [] - for tool in tools: - properties = tool.parameters["properties"] - required_properties = tool.parameters["required"] - - parameter_definitions = {} - for p_key, p_val in properties.items(): - required = False - if p_key in required_properties: - required = True - - desc = p_val["description"] - if "enum" in p_val: - desc += f"; Only accepts one of the following predefined options: [{', '.join(p_val['enum'])}]" - - parameter_definitions[p_key] = ToolParameterDefinitionsValue( - description=desc, type=p_val["type"], required=required - ) - - cohere_tool = Tool( - name=tool.name, description=tool.description, parameter_definitions=parameter_definitions - ) - - cohere_tools.append(cohere_tool) - - return cohere_tools - - def _num_tokens_from_string(self, model: str, credentials: dict, text: str) -> int: - """ - Calculate num tokens for text completion model. - - :param model: model name - :param credentials: credentials - :param text: prompt text - :return: number of tokens - """ - # initialize client - client = cohere.Client(credentials.get("api_key"), base_url=credentials.get("base_url")) - - response = client.tokenize(text=text, model=model) - - return len(response.tokens) - - def _num_tokens_from_messages(self, model: str, credentials: dict, messages: list[PromptMessage]) -> int: - """Calculate num tokens Cohere model.""" - calc_messages = [] - for message in messages: - cohere_message = self._convert_prompt_message_to_dict(message) - if cohere_message: - calc_messages.append(cohere_message) - message_strs = [f"{message.role}: {message.message}" for message in calc_messages] - message_str = "\n".join(message_strs) - - real_model = model - if self.get_model_schema(model, credentials).fetch_from == FetchFrom.PREDEFINED_MODEL: - real_model = model.removesuffix("-chat") - - return self._num_tokens_from_string(real_model, credentials, message_str) - - def get_customizable_model_schema(self, model: str, credentials: dict) -> AIModelEntity: - """ - Cohere supports fine-tuning of their models. This method returns the schema of the base model - but renamed to the fine-tuned model name. - - :param model: model name - :param credentials: credentials - - :return: model schema - """ - # get model schema - models = self.predefined_models() - model_map = {model.model: model for model in models} - - mode = credentials.get("mode") - - if mode == "chat": - base_model_schema = model_map["command-light-chat"] - else: - base_model_schema = model_map["command-light"] - - base_model_schema = cast(AIModelEntity, base_model_schema) - - base_model_schema_features = base_model_schema.features or [] - base_model_schema_model_properties = base_model_schema.model_properties or {} - base_model_schema_parameters_rules = base_model_schema.parameter_rules or [] - - entity = AIModelEntity( - model=model, - label=I18nObject(zh_Hans=model, en_US=model), - model_type=ModelType.LLM, - features=list(base_model_schema_features), - fetch_from=FetchFrom.CUSTOMIZABLE_MODEL, - model_properties=dict(base_model_schema_model_properties.items()), - parameter_rules=list(base_model_schema_parameters_rules), - pricing=base_model_schema.pricing, - ) - - return entity - - @property - def _invoke_error_mapping(self) -> dict[type[InvokeError], list[type[Exception]]]: - """ - Map model invoke error to unified error - The key is the error type thrown to the caller - The value is the error type thrown by the model, - which needs to be converted into a unified error type for the caller. - - :return: Invoke error mapping - """ - return { - InvokeConnectionError: [cohere.errors.service_unavailable_error.ServiceUnavailableError], - InvokeServerUnavailableError: [cohere.errors.internal_server_error.InternalServerError], - InvokeRateLimitError: [cohere.errors.too_many_requests_error.TooManyRequestsError], - InvokeAuthorizationError: [ - cohere.errors.unauthorized_error.UnauthorizedError, - cohere.errors.forbidden_error.ForbiddenError, - ], - InvokeBadRequestError: [ - cohere.core.api_error.ApiError, - cohere.errors.bad_request_error.BadRequestError, - cohere.errors.not_found_error.NotFoundError, - ], - } diff --git a/api/core/model_runtime/model_providers/cohere/rerank/__init__.py b/api/core/model_runtime/model_providers/cohere/rerank/__init__.py deleted file mode 100644 index e69de29bb2d1d6..00000000000000 diff --git a/api/core/model_runtime/model_providers/cohere/rerank/_position.yaml b/api/core/model_runtime/model_providers/cohere/rerank/_position.yaml deleted file mode 100644 index 4dd58fc1708b0a..00000000000000 --- a/api/core/model_runtime/model_providers/cohere/rerank/_position.yaml +++ /dev/null @@ -1,4 +0,0 @@ -- rerank-english-v2.0 -- rerank-english-v3.0 -- rerank-multilingual-v2.0 -- rerank-multilingual-v3.0 diff --git a/api/core/model_runtime/model_providers/cohere/rerank/rerank-english-v2.0.yaml b/api/core/model_runtime/model_providers/cohere/rerank/rerank-english-v2.0.yaml deleted file mode 100644 index 70b4a91a0dc7c5..00000000000000 --- a/api/core/model_runtime/model_providers/cohere/rerank/rerank-english-v2.0.yaml +++ /dev/null @@ -1,4 +0,0 @@ -model: rerank-english-v2.0 -model_type: rerank -model_properties: - context_size: 5120 diff --git a/api/core/model_runtime/model_providers/cohere/rerank/rerank-english-v3.0.yaml b/api/core/model_runtime/model_providers/cohere/rerank/rerank-english-v3.0.yaml deleted file mode 100644 index 3779f0b6c25dd8..00000000000000 --- a/api/core/model_runtime/model_providers/cohere/rerank/rerank-english-v3.0.yaml +++ /dev/null @@ -1,4 +0,0 @@ -model: rerank-english-v3.0 -model_type: rerank -model_properties: - context_size: 5120 diff --git a/api/core/model_runtime/model_providers/cohere/rerank/rerank-multilingual-v2.0.yaml b/api/core/model_runtime/model_providers/cohere/rerank/rerank-multilingual-v2.0.yaml deleted file mode 100644 index c9b90387cf04bc..00000000000000 --- a/api/core/model_runtime/model_providers/cohere/rerank/rerank-multilingual-v2.0.yaml +++ /dev/null @@ -1,4 +0,0 @@ -model: rerank-multilingual-v2.0 -model_type: rerank -model_properties: - context_size: 5120 diff --git a/api/core/model_runtime/model_providers/cohere/rerank/rerank-multilingual-v3.0.yaml b/api/core/model_runtime/model_providers/cohere/rerank/rerank-multilingual-v3.0.yaml deleted file mode 100644 index 4f6690ba7685b5..00000000000000 --- a/api/core/model_runtime/model_providers/cohere/rerank/rerank-multilingual-v3.0.yaml +++ /dev/null @@ -1,4 +0,0 @@ -model: rerank-multilingual-v3.0 -model_type: rerank -model_properties: - context_size: 5120 diff --git a/api/core/model_runtime/model_providers/cohere/rerank/rerank.py b/api/core/model_runtime/model_providers/cohere/rerank/rerank.py deleted file mode 100644 index aba8fedbc097e5..00000000000000 --- a/api/core/model_runtime/model_providers/cohere/rerank/rerank.py +++ /dev/null @@ -1,125 +0,0 @@ -from typing import Optional - -import cohere -from cohere.core import RequestOptions - -from core.model_runtime.entities.rerank_entities import RerankDocument, RerankResult -from core.model_runtime.errors.invoke import ( - InvokeAuthorizationError, - InvokeBadRequestError, - InvokeConnectionError, - InvokeError, - InvokeRateLimitError, - InvokeServerUnavailableError, -) -from core.model_runtime.errors.validate import CredentialsValidateFailedError -from core.model_runtime.model_providers.__base.rerank_model import RerankModel - - -class CohereRerankModel(RerankModel): - """ - Model class for Cohere rerank model. - """ - - def _invoke( - self, - model: str, - credentials: dict, - query: str, - docs: list[str], - score_threshold: Optional[float] = None, - top_n: Optional[int] = None, - user: Optional[str] = None, - ) -> RerankResult: - """ - Invoke rerank model - - :param model: model name - :param credentials: model credentials - :param query: search query - :param docs: docs for reranking - :param score_threshold: score threshold - :param top_n: top n - :param user: unique user id - :return: rerank result - """ - if len(docs) == 0: - return RerankResult(model=model, docs=docs) - - # initialize client - client = cohere.Client(credentials.get("api_key"), base_url=credentials.get("base_url")) - response = client.rerank( - query=query, - documents=docs, - model=model, - top_n=top_n, - return_documents=True, - request_options=RequestOptions(max_retries=0), - ) - - rerank_documents = [] - for idx, result in enumerate(response.results): - # format document - rerank_document = RerankDocument( - index=result.index, - text=result.document.text, - score=result.relevance_score, - ) - - # score threshold check - if score_threshold is not None: - if result.relevance_score >= score_threshold: - rerank_documents.append(rerank_document) - else: - rerank_documents.append(rerank_document) - - return RerankResult(model=model, docs=rerank_documents) - - def validate_credentials(self, model: str, credentials: dict) -> None: - """ - Validate model credentials - - :param model: model name - :param credentials: model credentials - :return: - """ - try: - self.invoke( - model=model, - credentials=credentials, - query="What is the capital of the United States?", - docs=[ - "Carson City is the capital city of the American state of Nevada. At the 2010 United States " - "Census, Carson City had a population of 55,274.", - "The Commonwealth of the Northern Mariana Islands is a group of islands in the Pacific Ocean that " - "are a political division controlled by the United States. Its capital is Saipan.", - ], - score_threshold=0.8, - ) - except Exception as ex: - raise CredentialsValidateFailedError(str(ex)) - - @property - def _invoke_error_mapping(self) -> dict[type[InvokeError], list[type[Exception]]]: - """ - Map model invoke error to unified error - The key is the error type thrown to the caller - The value is the error type thrown by the model, - which needs to be converted into a unified error type for the caller. - - :return: Invoke error mapping - """ - return { - InvokeConnectionError: [cohere.errors.service_unavailable_error.ServiceUnavailableError], - InvokeServerUnavailableError: [cohere.errors.internal_server_error.InternalServerError], - InvokeRateLimitError: [cohere.errors.too_many_requests_error.TooManyRequestsError], - InvokeAuthorizationError: [ - cohere.errors.unauthorized_error.UnauthorizedError, - cohere.errors.forbidden_error.ForbiddenError, - ], - InvokeBadRequestError: [ - cohere.core.api_error.ApiError, - cohere.errors.bad_request_error.BadRequestError, - cohere.errors.not_found_error.NotFoundError, - ], - } diff --git a/api/core/model_runtime/model_providers/cohere/text_embedding/__init__.py b/api/core/model_runtime/model_providers/cohere/text_embedding/__init__.py deleted file mode 100644 index e69de29bb2d1d6..00000000000000 diff --git a/api/core/model_runtime/model_providers/cohere/text_embedding/_position.yaml b/api/core/model_runtime/model_providers/cohere/text_embedding/_position.yaml deleted file mode 100644 index 967a946f343985..00000000000000 --- a/api/core/model_runtime/model_providers/cohere/text_embedding/_position.yaml +++ /dev/null @@ -1,7 +0,0 @@ -- embed-multilingual-v3.0 -- embed-multilingual-light-v3.0 -- embed-english-v3.0 -- embed-english-light-v3.0 -- embed-multilingual-v2.0 -- embed-english-v2.0 -- embed-english-light-v2.0 diff --git a/api/core/model_runtime/model_providers/cohere/text_embedding/embed-english-light-v2.0.yaml b/api/core/model_runtime/model_providers/cohere/text_embedding/embed-english-light-v2.0.yaml deleted file mode 100644 index 8d2aaf17378a0c..00000000000000 --- a/api/core/model_runtime/model_providers/cohere/text_embedding/embed-english-light-v2.0.yaml +++ /dev/null @@ -1,9 +0,0 @@ -model: embed-english-light-v2.0 -model_type: text-embedding -model_properties: - context_size: 1024 - max_chunks: 48 -pricing: - input: '0.1' - unit: '0.000001' - currency: USD diff --git a/api/core/model_runtime/model_providers/cohere/text_embedding/embed-english-light-v3.0.yaml b/api/core/model_runtime/model_providers/cohere/text_embedding/embed-english-light-v3.0.yaml deleted file mode 100644 index 43b79922e33137..00000000000000 --- a/api/core/model_runtime/model_providers/cohere/text_embedding/embed-english-light-v3.0.yaml +++ /dev/null @@ -1,9 +0,0 @@ -model: embed-english-light-v3.0 -model_type: text-embedding -model_properties: - context_size: 384 - max_chunks: 48 -pricing: - input: '0.1' - unit: '0.000001' - currency: USD diff --git a/api/core/model_runtime/model_providers/cohere/text_embedding/embed-english-v2.0.yaml b/api/core/model_runtime/model_providers/cohere/text_embedding/embed-english-v2.0.yaml deleted file mode 100644 index acee82b202059a..00000000000000 --- a/api/core/model_runtime/model_providers/cohere/text_embedding/embed-english-v2.0.yaml +++ /dev/null @@ -1,9 +0,0 @@ -model: embed-english-v2.0 -model_type: text-embedding -model_properties: - context_size: 4096 - max_chunks: 48 -pricing: - input: '0.1' - unit: '0.000001' - currency: USD diff --git a/api/core/model_runtime/model_providers/cohere/text_embedding/embed-english-v3.0.yaml b/api/core/model_runtime/model_providers/cohere/text_embedding/embed-english-v3.0.yaml deleted file mode 100644 index 0ad713253e1394..00000000000000 --- a/api/core/model_runtime/model_providers/cohere/text_embedding/embed-english-v3.0.yaml +++ /dev/null @@ -1,9 +0,0 @@ -model: embed-english-v3.0 -model_type: text-embedding -model_properties: - context_size: 1024 - max_chunks: 48 -pricing: - input: '0.1' - unit: '0.000001' - currency: USD diff --git a/api/core/model_runtime/model_providers/cohere/text_embedding/embed-multilingual-light-v3.0.yaml b/api/core/model_runtime/model_providers/cohere/text_embedding/embed-multilingual-light-v3.0.yaml deleted file mode 100644 index c25306723376c8..00000000000000 --- a/api/core/model_runtime/model_providers/cohere/text_embedding/embed-multilingual-light-v3.0.yaml +++ /dev/null @@ -1,9 +0,0 @@ -model: embed-multilingual-light-v3.0 -model_type: text-embedding -model_properties: - context_size: 384 - max_chunks: 48 -pricing: - input: '0.1' - unit: '0.000001' - currency: USD diff --git a/api/core/model_runtime/model_providers/cohere/text_embedding/embed-multilingual-v2.0.yaml b/api/core/model_runtime/model_providers/cohere/text_embedding/embed-multilingual-v2.0.yaml deleted file mode 100644 index 4dbc37d5e82c76..00000000000000 --- a/api/core/model_runtime/model_providers/cohere/text_embedding/embed-multilingual-v2.0.yaml +++ /dev/null @@ -1,9 +0,0 @@ -model: embed-multilingual-v2.0 -model_type: text-embedding -model_properties: - context_size: 768 - max_chunks: 48 -pricing: - input: '0.1' - unit: '0.000001' - currency: USD diff --git a/api/core/model_runtime/model_providers/cohere/text_embedding/embed-multilingual-v3.0.yaml b/api/core/model_runtime/model_providers/cohere/text_embedding/embed-multilingual-v3.0.yaml deleted file mode 100644 index ec689ada1b7d68..00000000000000 --- a/api/core/model_runtime/model_providers/cohere/text_embedding/embed-multilingual-v3.0.yaml +++ /dev/null @@ -1,9 +0,0 @@ -model: embed-multilingual-v3.0 -model_type: text-embedding -model_properties: - context_size: 1024 - max_chunks: 48 -pricing: - input: '0.1' - unit: '0.000001' - currency: USD diff --git a/api/core/model_runtime/model_providers/deepseek/__init__.py b/api/core/model_runtime/model_providers/deepseek/__init__.py deleted file mode 100644 index e69de29bb2d1d6..00000000000000 diff --git a/api/core/model_runtime/model_providers/deepseek/_assets/icon_l_en.svg b/api/core/model_runtime/model_providers/deepseek/_assets/icon_l_en.svg deleted file mode 100644 index 425494404f7e63..00000000000000 --- a/api/core/model_runtime/model_providers/deepseek/_assets/icon_l_en.svg +++ /dev/null @@ -1,22 +0,0 @@ - - - Created with Pixso. - - - - - - - - - - - - - - - - - - - diff --git a/api/core/model_runtime/model_providers/deepseek/_assets/icon_s_en.svg b/api/core/model_runtime/model_providers/deepseek/_assets/icon_s_en.svg deleted file mode 100644 index aa854a7504c15c..00000000000000 --- a/api/core/model_runtime/model_providers/deepseek/_assets/icon_s_en.svg +++ /dev/null @@ -1,3 +0,0 @@ - - - diff --git a/api/core/model_runtime/model_providers/deepseek/deepseek.py b/api/core/model_runtime/model_providers/deepseek/deepseek.py deleted file mode 100644 index 10feef897272db..00000000000000 --- a/api/core/model_runtime/model_providers/deepseek/deepseek.py +++ /dev/null @@ -1,28 +0,0 @@ -import logging - -from core.model_runtime.entities.model_entities import ModelType -from core.model_runtime.errors.validate import CredentialsValidateFailedError -from core.model_runtime.model_providers.__base.model_provider import ModelProvider - -logger = logging.getLogger(__name__) - - -class DeepSeekProvider(ModelProvider): - def validate_provider_credentials(self, credentials: dict) -> None: - """ - Validate provider credentials - if validate failed, raise exception - - :param credentials: provider credentials, credentials form defined in `provider_credential_schema`. - """ - try: - model_instance = self.get_model_instance(ModelType.LLM) - - # Use `deepseek-chat` model for validate, - # no matter what model you pass in, text completion model or chat model - model_instance.validate_credentials(model="deepseek-chat", credentials=credentials) - except CredentialsValidateFailedError as ex: - raise ex - except Exception as ex: - logger.exception(f"{self.get_provider_schema().provider} credentials validate failed") - raise ex diff --git a/api/core/model_runtime/model_providers/deepseek/deepseek.yaml b/api/core/model_runtime/model_providers/deepseek/deepseek.yaml deleted file mode 100644 index 16abd358d6dc70..00000000000000 --- a/api/core/model_runtime/model_providers/deepseek/deepseek.yaml +++ /dev/null @@ -1,41 +0,0 @@ -provider: deepseek -label: - en_US: deepseek - zh_Hans: 深度求索 -description: - en_US: Models provided by deepseek, such as deepseek-chat、deepseek-coder. - zh_Hans: 深度求索提供的模型,例如 deepseek-chat、deepseek-coder 。 -icon_small: - en_US: icon_s_en.svg -icon_large: - en_US: icon_l_en.svg -background: "#c0cdff" -help: - title: - en_US: Get your API Key from deepseek - zh_Hans: 从深度求索获取 API Key - url: - en_US: https://platform.deepseek.com/api_keys -supported_model_types: - - llm -configurate_methods: - - predefined-model -provider_credential_schema: - credential_form_schemas: - - variable: api_key - label: - en_US: API Key - type: secret-input - required: true - placeholder: - zh_Hans: 在此输入您的 API Key - en_US: Enter your API Key - - variable: endpoint_url - label: - zh_Hans: 自定义 API endpoint 地址 - en_US: Custom API endpoint URL - type: text-input - required: false - placeholder: - zh_Hans: Base URL, e.g. https://api.deepseek.com/v1 or https://api.deepseek.com - en_US: Base URL, e.g. https://api.deepseek.com/v1 or https://api.deepseek.com diff --git a/api/core/model_runtime/model_providers/deepseek/llm/__init__.py b/api/core/model_runtime/model_providers/deepseek/llm/__init__.py deleted file mode 100644 index e69de29bb2d1d6..00000000000000 diff --git a/api/core/model_runtime/model_providers/deepseek/llm/_position.yaml b/api/core/model_runtime/model_providers/deepseek/llm/_position.yaml deleted file mode 100644 index 43d03f2ee9598a..00000000000000 --- a/api/core/model_runtime/model_providers/deepseek/llm/_position.yaml +++ /dev/null @@ -1,2 +0,0 @@ -- deepseek-chat -- deepseek-coder diff --git a/api/core/model_runtime/model_providers/deepseek/llm/deepseek-chat.yaml b/api/core/model_runtime/model_providers/deepseek/llm/deepseek-chat.yaml deleted file mode 100644 index 4973ac8ad6981c..00000000000000 --- a/api/core/model_runtime/model_providers/deepseek/llm/deepseek-chat.yaml +++ /dev/null @@ -1,78 +0,0 @@ -model: deepseek-chat -label: - zh_Hans: deepseek-chat - en_US: deepseek-chat -model_type: llm -features: - - agent-thought - - multi-tool-call - - stream-tool-call -model_properties: - mode: chat - context_size: 128000 -parameter_rules: - - name: temperature - use_template: temperature - type: float - default: 1 - min: 0.0 - max: 2.0 - help: - zh_Hans: 控制生成结果的多样性和随机性。数值越小,越严谨;数值越大,越发散。 - en_US: Control the diversity and randomness of generated results. The smaller the value, the more rigorous it is; the larger the value, the more divergent it is. - - name: max_tokens - use_template: max_tokens - type: int - default: 4096 - min: 1 - max: 8192 - help: - zh_Hans: 指定生成结果长度的上限。如果生成结果截断,可以调大该参数。 - en_US: Specifies the upper limit on the length of generated results. If the generated results are truncated, you can increase this parameter. - - name: top_p - use_template: top_p - type: float - default: 1 - min: 0.01 - max: 1.00 - help: - zh_Hans: 控制生成结果的随机性。数值越小,随机性越弱;数值越大,随机性越强。一般而言,top_p 和 temperature 两个参数选择一个进行调整即可。 - en_US: Control the randomness of generated results. The smaller the value, the weaker the randomness; the larger the value, the stronger the randomness. Generally speaking, you can adjust one of the two parameters top_p and temperature. - - name: logprobs - help: - zh_Hans: 是否返回所输出 token 的对数概率。如果为 true,则在 message 的 content 中返回每个输出 token 的对数概率。 - en_US: Whether to return the log probability of the output token. If true, returns the log probability of each output token in the content of message . - type: boolean - - name: top_logprobs - type: int - default: 0 - min: 0 - max: 20 - help: - zh_Hans: 一个介于 0 到 20 之间的整数 N,指定每个输出位置返回输出概率 top N 的 token,且返回这些 token 的对数概率。指定此参数时,logprobs 必须为 true。 - en_US: An integer N between 0 and 20, specifying that each output position returns the top N tokens with output probability, and returns the logarithmic probability of these tokens. When specifying this parameter, logprobs must be true. - - name: frequency_penalty - use_template: frequency_penalty - default: 0 - min: -2.0 - max: 2.0 - help: - zh_Hans: 介于 -2.0 和 2.0 之间的数字。如果该值为正,那么新 token 会根据其在已有文本中的出现频率受到相应的惩罚,降低模型重复相同内容的可能性。 - en_US: A number between -2.0 and 2.0. If the value is positive, new tokens are penalized based on their frequency of occurrence in existing text, reducing the likelihood that the model will repeat the same content. - - name: response_format - label: - zh_Hans: 回复格式 - en_US: Response Format - type: string - help: - zh_Hans: 指定模型必须输出的格式 - en_US: specifying the format that the model must output - required: false - options: - - text - - json_object -pricing: - input: '1' - output: '2' - unit: '0.000001' - currency: RMB diff --git a/api/core/model_runtime/model_providers/deepseek/llm/deepseek-coder.yaml b/api/core/model_runtime/model_providers/deepseek/llm/deepseek-coder.yaml deleted file mode 100644 index caafeadadd999e..00000000000000 --- a/api/core/model_runtime/model_providers/deepseek/llm/deepseek-coder.yaml +++ /dev/null @@ -1,28 +0,0 @@ -model: deepseek-coder -label: - zh_Hans: deepseek-coder - en_US: deepseek-coder -model_type: llm -features: - - agent-thought - - multi-tool-call - - stream-tool-call -model_properties: - mode: chat - context_size: 128000 -parameter_rules: - - name: temperature - use_template: temperature - min: 0 - max: 1 - default: 0.5 - - name: top_p - use_template: top_p - min: 0 - max: 1 - default: 1 - - name: max_tokens - use_template: max_tokens - min: 1 - max: 4096 - default: 1024 diff --git a/api/core/model_runtime/model_providers/deepseek/llm/llm.py b/api/core/model_runtime/model_providers/deepseek/llm/llm.py deleted file mode 100644 index 6d0a3ee2628ea2..00000000000000 --- a/api/core/model_runtime/model_providers/deepseek/llm/llm.py +++ /dev/null @@ -1,116 +0,0 @@ -from collections.abc import Generator -from typing import Optional, Union -from urllib.parse import urlparse - -import tiktoken - -from core.model_runtime.entities.llm_entities import LLMResult -from core.model_runtime.entities.message_entities import ( - PromptMessage, - PromptMessageTool, -) -from core.model_runtime.model_providers.openai.llm.llm import OpenAILargeLanguageModel - - -class DeepSeekLargeLanguageModel(OpenAILargeLanguageModel): - def _invoke( - self, - model: str, - credentials: dict, - prompt_messages: list[PromptMessage], - model_parameters: dict, - tools: Optional[list[PromptMessageTool]] = None, - stop: Optional[list[str]] = None, - stream: bool = True, - user: Optional[str] = None, - ) -> Union[LLMResult, Generator]: - self._add_custom_parameters(credentials) - - return super()._invoke(model, credentials, prompt_messages, model_parameters, tools, stop, stream, user) - - def validate_credentials(self, model: str, credentials: dict) -> None: - self._add_custom_parameters(credentials) - super().validate_credentials(model, credentials) - - # refactored from openai model runtime, use cl100k_base for calculate token number - def _num_tokens_from_string(self, model: str, text: str, tools: Optional[list[PromptMessageTool]] = None) -> int: - """ - Calculate num tokens for text completion model with tiktoken package. - - :param model: model name - :param text: prompt text - :param tools: tools for tool calling - :return: number of tokens - """ - encoding = tiktoken.get_encoding("cl100k_base") - num_tokens = len(encoding.encode(text)) - - if tools: - num_tokens += self._num_tokens_for_tools(encoding, tools) - - return num_tokens - - # refactored from openai model runtime, use cl100k_base for calculate token number - def _num_tokens_from_messages( - self, model: str, messages: list[PromptMessage], tools: Optional[list[PromptMessageTool]] = None - ) -> int: - """Calculate num tokens for gpt-3.5-turbo and gpt-4 with tiktoken package. - - Official documentation: https://github.com/openai/openai-cookbook/blob/ - main/examples/How_to_format_inputs_to_ChatGPT_models.ipynb""" - encoding = tiktoken.get_encoding("cl100k_base") - tokens_per_message = 3 - tokens_per_name = 1 - - num_tokens = 0 - messages_dict = [self._convert_prompt_message_to_dict(m) for m in messages] - for message in messages_dict: - num_tokens += tokens_per_message - for key, value in message.items(): - # Cast str(value) in case the message value is not a string - # This occurs with function messages - # TODO: The current token calculation method for the image type is not implemented, - # which need to download the image and then get the resolution for calculation, - # and will increase the request delay - if isinstance(value, list): - text = "" - for item in value: - if isinstance(item, dict) and item["type"] == "text": - text += item["text"] - - value = text - - if key == "tool_calls": - for tool_call in value: - for t_key, t_value in tool_call.items(): - num_tokens += len(encoding.encode(t_key)) - if t_key == "function": - for f_key, f_value in t_value.items(): - num_tokens += len(encoding.encode(f_key)) - num_tokens += len(encoding.encode(f_value)) - else: - num_tokens += len(encoding.encode(t_key)) - num_tokens += len(encoding.encode(t_value)) - else: - num_tokens += len(encoding.encode(str(value))) - - if key == "name": - num_tokens += tokens_per_name - - # every reply is primed with assistant - num_tokens += 3 - - if tools: - num_tokens += self._num_tokens_for_tools(encoding, tools) - - return num_tokens - - @staticmethod - def _add_custom_parameters(credentials: dict) -> None: - credentials["mode"] = "chat" - credentials["openai_api_key"] = credentials["api_key"] - if "endpoint_url" not in credentials or credentials["endpoint_url"] == "": - credentials["openai_api_base"] = "https://api.deepseek.com" - else: - parsed_url = urlparse(credentials["endpoint_url"]) - credentials["openai_api_base"] = f"{parsed_url.scheme}://{parsed_url.netloc}" diff --git a/api/core/model_runtime/model_providers/fireworks/__init__.py b/api/core/model_runtime/model_providers/fireworks/__init__.py deleted file mode 100644 index e69de29bb2d1d6..00000000000000 diff --git a/api/core/model_runtime/model_providers/fireworks/_assets/icon_l_en.svg b/api/core/model_runtime/model_providers/fireworks/_assets/icon_l_en.svg deleted file mode 100644 index 582605cc422cce..00000000000000 --- a/api/core/model_runtime/model_providers/fireworks/_assets/icon_l_en.svg +++ /dev/null @@ -1,3 +0,0 @@ - - - \ No newline at end of file diff --git a/api/core/model_runtime/model_providers/fireworks/_assets/icon_s_en.svg b/api/core/model_runtime/model_providers/fireworks/_assets/icon_s_en.svg deleted file mode 100644 index 86eeba66f9290a..00000000000000 --- a/api/core/model_runtime/model_providers/fireworks/_assets/icon_s_en.svg +++ /dev/null @@ -1,5 +0,0 @@ - - - - - diff --git a/api/core/model_runtime/model_providers/fireworks/_common.py b/api/core/model_runtime/model_providers/fireworks/_common.py deleted file mode 100644 index 378ced3a4019ba..00000000000000 --- a/api/core/model_runtime/model_providers/fireworks/_common.py +++ /dev/null @@ -1,52 +0,0 @@ -from collections.abc import Mapping - -import openai - -from core.model_runtime.errors.invoke import ( - InvokeAuthorizationError, - InvokeBadRequestError, - InvokeConnectionError, - InvokeError, - InvokeRateLimitError, - InvokeServerUnavailableError, -) - - -class _CommonFireworks: - def _to_credential_kwargs(self, credentials: Mapping) -> dict: - """ - Transform credentials to kwargs for model instance - - :param credentials: - :return: - """ - credentials_kwargs = { - "api_key": credentials["fireworks_api_key"], - "base_url": "https://api.fireworks.ai/inference/v1", - "max_retries": 1, - } - - return credentials_kwargs - - @property - def _invoke_error_mapping(self) -> dict[type[InvokeError], list[type[Exception]]]: - """ - Map model invoke error to unified error - The key is the error type thrown to the caller - The value is the error type thrown by the model, - which needs to be converted into a unified error type for the caller. - - :return: Invoke error mapping - """ - return { - InvokeConnectionError: [openai.APIConnectionError, openai.APITimeoutError], - InvokeServerUnavailableError: [openai.InternalServerError], - InvokeRateLimitError: [openai.RateLimitError], - InvokeAuthorizationError: [openai.AuthenticationError, openai.PermissionDeniedError], - InvokeBadRequestError: [ - openai.BadRequestError, - openai.NotFoundError, - openai.UnprocessableEntityError, - openai.APIError, - ], - } diff --git a/api/core/model_runtime/model_providers/fireworks/fireworks.py b/api/core/model_runtime/model_providers/fireworks/fireworks.py deleted file mode 100644 index 15f25badab994f..00000000000000 --- a/api/core/model_runtime/model_providers/fireworks/fireworks.py +++ /dev/null @@ -1,27 +0,0 @@ -import logging - -from core.model_runtime.entities.model_entities import ModelType -from core.model_runtime.errors.validate import CredentialsValidateFailedError -from core.model_runtime.model_providers.__base.model_provider import ModelProvider - -logger = logging.getLogger(__name__) - - -class FireworksProvider(ModelProvider): - def validate_provider_credentials(self, credentials: dict) -> None: - """ - Validate provider credentials - if validate failed, raise exception - - :param credentials: provider credentials, credentials form defined in `provider_credential_schema`. - """ - try: - model_instance = self.get_model_instance(ModelType.LLM) - model_instance.validate_credentials( - model="accounts/fireworks/models/llama-v3p1-8b-instruct", credentials=credentials - ) - except CredentialsValidateFailedError as ex: - raise ex - except Exception as ex: - logger.exception(f"{self.get_provider_schema().provider} credentials validate failed") - raise ex diff --git a/api/core/model_runtime/model_providers/fireworks/llm/__init__.py b/api/core/model_runtime/model_providers/fireworks/llm/__init__.py deleted file mode 100644 index e69de29bb2d1d6..00000000000000 diff --git a/api/core/model_runtime/model_providers/fireworks/llm/_position.yaml b/api/core/model_runtime/model_providers/fireworks/llm/_position.yaml deleted file mode 100644 index 9f7c1af68cef72..00000000000000 --- a/api/core/model_runtime/model_providers/fireworks/llm/_position.yaml +++ /dev/null @@ -1,16 +0,0 @@ -- llama-v3p1-405b-instruct -- llama-v3p1-70b-instruct -- llama-v3p1-8b-instruct -- llama-v3-70b-instruct -- mixtral-8x22b-instruct -- mixtral-8x7b-instruct -- firefunction-v2 -- firefunction-v1 -- gemma2-9b-it -- llama-v3-70b-instruct-hf -- llama-v3-8b-instruct -- llama-v3-8b-instruct-hf -- mixtral-8x7b-instruct-hf -- mythomax-l2-13b -- phi-3-vision-128k-instruct -- yi-large diff --git a/api/core/model_runtime/model_providers/fireworks/llm/firefunction-v1.yaml b/api/core/model_runtime/model_providers/fireworks/llm/firefunction-v1.yaml deleted file mode 100644 index f6bac12832d646..00000000000000 --- a/api/core/model_runtime/model_providers/fireworks/llm/firefunction-v1.yaml +++ /dev/null @@ -1,46 +0,0 @@ -model: accounts/fireworks/models/firefunction-v1 -label: - zh_Hans: Firefunction V1 - en_US: Firefunction V1 -model_type: llm -features: - - agent-thought - - tool-call -model_properties: - mode: chat - context_size: 32768 -parameter_rules: - - name: temperature - use_template: temperature - - name: top_p - use_template: top_p - - name: top_k - label: - zh_Hans: 取样数量 - en_US: Top k - type: int - help: - zh_Hans: 仅从每个后续标记的前 K 个选项中采样。 - en_US: Only sample from the top K options for each subsequent token. - - name: max_tokens - use_template: max_tokens - - name: context_length_exceeded_behavior - default: None - label: - zh_Hans: 上下文长度超出行为 - en_US: Context Length Exceeded Behavior - help: - zh_Hans: 上下文长度超出行为 - en_US: Context Length Exceeded Behavior - type: string - options: - - None - - truncate - - error - - name: response_format - use_template: response_format -pricing: - input: '0.5' - output: '0.5' - unit: '0.000001' - currency: USD diff --git a/api/core/model_runtime/model_providers/fireworks/llm/firefunction-v2.yaml b/api/core/model_runtime/model_providers/fireworks/llm/firefunction-v2.yaml deleted file mode 100644 index 2979cb46d572a3..00000000000000 --- a/api/core/model_runtime/model_providers/fireworks/llm/firefunction-v2.yaml +++ /dev/null @@ -1,46 +0,0 @@ -model: accounts/fireworks/models/firefunction-v2 -label: - zh_Hans: Firefunction V2 - en_US: Firefunction V2 -model_type: llm -features: - - agent-thought - - tool-call -model_properties: - mode: chat - context_size: 8192 -parameter_rules: - - name: temperature - use_template: temperature - - name: top_p - use_template: top_p - - name: top_k - label: - zh_Hans: 取样数量 - en_US: Top k - type: int - help: - zh_Hans: 仅从每个后续标记的前 K 个选项中采样。 - en_US: Only sample from the top K options for each subsequent token. - - name: max_tokens - use_template: max_tokens - - name: context_length_exceeded_behavior - default: None - label: - zh_Hans: 上下文长度超出行为 - en_US: Context Length Exceeded Behavior - help: - zh_Hans: 上下文长度超出行为 - en_US: Context Length Exceeded Behavior - type: string - options: - - None - - truncate - - error - - name: response_format - use_template: response_format -pricing: - input: '0.9' - output: '0.9' - unit: '0.000001' - currency: USD diff --git a/api/core/model_runtime/model_providers/fireworks/llm/gemma2-9b-it.yaml b/api/core/model_runtime/model_providers/fireworks/llm/gemma2-9b-it.yaml deleted file mode 100644 index ee41a7e2fdc3d5..00000000000000 --- a/api/core/model_runtime/model_providers/fireworks/llm/gemma2-9b-it.yaml +++ /dev/null @@ -1,45 +0,0 @@ -model: accounts/fireworks/models/gemma2-9b-it -label: - zh_Hans: Gemma2 9B Instruct - en_US: Gemma2 9B Instruct -model_type: llm -features: - - agent-thought -model_properties: - mode: chat - context_size: 8192 -parameter_rules: - - name: temperature - use_template: temperature - - name: top_p - use_template: top_p - - name: top_k - label: - zh_Hans: 取样数量 - en_US: Top k - type: int - help: - zh_Hans: 仅从每个后续标记的前 K 个选项中采样。 - en_US: Only sample from the top K options for each subsequent token. - - name: max_tokens - use_template: max_tokens - - name: context_length_exceeded_behavior - default: None - label: - zh_Hans: 上下文长度超出行为 - en_US: Context Length Exceeded Behavior - help: - zh_Hans: 上下文长度超出行为 - en_US: Context Length Exceeded Behavior - type: string - options: - - None - - truncate - - error - - name: response_format - use_template: response_format -pricing: - input: '0.2' - output: '0.2' - unit: '0.000001' - currency: USD diff --git a/api/core/model_runtime/model_providers/fireworks/llm/llama-v3-70b-instruct-hf.yaml b/api/core/model_runtime/model_providers/fireworks/llm/llama-v3-70b-instruct-hf.yaml deleted file mode 100644 index 2ae89b88165d12..00000000000000 --- a/api/core/model_runtime/model_providers/fireworks/llm/llama-v3-70b-instruct-hf.yaml +++ /dev/null @@ -1,46 +0,0 @@ -model: accounts/fireworks/models/llama-v3-70b-instruct-hf -label: - zh_Hans: Llama3 70B Instruct(HF version) - en_US: Llama3 70B Instruct(HF version) -model_type: llm -features: - - agent-thought - - tool-call -model_properties: - mode: chat - context_size: 8192 -parameter_rules: - - name: temperature - use_template: temperature - - name: top_p - use_template: top_p - - name: top_k - label: - zh_Hans: 取样数量 - en_US: Top k - type: int - help: - zh_Hans: 仅从每个后续标记的前 K 个选项中采样。 - en_US: Only sample from the top K options for each subsequent token. - - name: max_tokens - use_template: max_tokens - - name: context_length_exceeded_behavior - default: None - label: - zh_Hans: 上下文长度超出行为 - en_US: Context Length Exceeded Behavior - help: - zh_Hans: 上下文长度超出行为 - en_US: Context Length Exceeded Behavior - type: string - options: - - None - - truncate - - error - - name: response_format - use_template: response_format -pricing: - input: '0.9' - output: '0.9' - unit: '0.000001' - currency: USD diff --git a/api/core/model_runtime/model_providers/fireworks/llm/llama-v3-70b-instruct.yaml b/api/core/model_runtime/model_providers/fireworks/llm/llama-v3-70b-instruct.yaml deleted file mode 100644 index 7c24b08ca5cca1..00000000000000 --- a/api/core/model_runtime/model_providers/fireworks/llm/llama-v3-70b-instruct.yaml +++ /dev/null @@ -1,46 +0,0 @@ -model: accounts/fireworks/models/llama-v3-70b-instruct -label: - zh_Hans: Llama3 70B Instruct - en_US: Llama3 70B Instruct -model_type: llm -features: - - agent-thought - - tool-call -model_properties: - mode: chat - context_size: 8192 -parameter_rules: - - name: temperature - use_template: temperature - - name: top_p - use_template: top_p - - name: top_k - label: - zh_Hans: 取样数量 - en_US: Top k - type: int - help: - zh_Hans: 仅从每个后续标记的前 K 个选项中采样。 - en_US: Only sample from the top K options for each subsequent token. - - name: max_tokens - use_template: max_tokens - - name: context_length_exceeded_behavior - default: None - label: - zh_Hans: 上下文长度超出行为 - en_US: Context Length Exceeded Behavior - help: - zh_Hans: 上下文长度超出行为 - en_US: Context Length Exceeded Behavior - type: string - options: - - None - - truncate - - error - - name: response_format - use_template: response_format -pricing: - input: '0.9' - output: '0.9' - unit: '0.000001' - currency: USD diff --git a/api/core/model_runtime/model_providers/fireworks/llm/llama-v3-8b-instruct-hf.yaml b/api/core/model_runtime/model_providers/fireworks/llm/llama-v3-8b-instruct-hf.yaml deleted file mode 100644 index 83507ef3e5276e..00000000000000 --- a/api/core/model_runtime/model_providers/fireworks/llm/llama-v3-8b-instruct-hf.yaml +++ /dev/null @@ -1,46 +0,0 @@ -model: accounts/fireworks/models/llama-v3-8b-instruct-hf -label: - zh_Hans: Llama3 8B Instruct(HF version) - en_US: Llama3 8B Instruct(HF version) -model_type: llm -features: - - agent-thought - - tool-call -model_properties: - mode: chat - context_size: 8192 -parameter_rules: - - name: temperature - use_template: temperature - - name: top_p - use_template: top_p - - name: top_k - label: - zh_Hans: 取样数量 - en_US: Top k - type: int - help: - zh_Hans: 仅从每个后续标记的前 K 个选项中采样。 - en_US: Only sample from the top K options for each subsequent token. - - name: max_tokens - use_template: max_tokens - - name: context_length_exceeded_behavior - default: None - label: - zh_Hans: 上下文长度超出行为 - en_US: Context Length Exceeded Behavior - help: - zh_Hans: 上下文长度超出行为 - en_US: Context Length Exceeded Behavior - type: string - options: - - None - - truncate - - error - - name: response_format - use_template: response_format -pricing: - input: '0.2' - output: '0.2' - unit: '0.000001' - currency: USD diff --git a/api/core/model_runtime/model_providers/fireworks/llm/llama-v3-8b-instruct.yaml b/api/core/model_runtime/model_providers/fireworks/llm/llama-v3-8b-instruct.yaml deleted file mode 100644 index d8ac9537b80e7f..00000000000000 --- a/api/core/model_runtime/model_providers/fireworks/llm/llama-v3-8b-instruct.yaml +++ /dev/null @@ -1,46 +0,0 @@ -model: accounts/fireworks/models/llama-v3-8b-instruct -label: - zh_Hans: Llama3 8B Instruct - en_US: Llama3 8B Instruct -model_type: llm -features: - - agent-thought - - tool-call -model_properties: - mode: chat - context_size: 8192 -parameter_rules: - - name: temperature - use_template: temperature - - name: top_p - use_template: top_p - - name: top_k - label: - zh_Hans: 取样数量 - en_US: Top k - type: int - help: - zh_Hans: 仅从每个后续标记的前 K 个选项中采样。 - en_US: Only sample from the top K options for each subsequent token. - - name: max_tokens - use_template: max_tokens - - name: context_length_exceeded_behavior - default: None - label: - zh_Hans: 上下文长度超出行为 - en_US: Context Length Exceeded Behavior - help: - zh_Hans: 上下文长度超出行为 - en_US: Context Length Exceeded Behavior - type: string - options: - - None - - truncate - - error - - name: response_format - use_template: response_format -pricing: - input: '0.2' - output: '0.2' - unit: '0.000001' - currency: USD diff --git a/api/core/model_runtime/model_providers/fireworks/llm/llama-v3p1-405b-instruct.yaml b/api/core/model_runtime/model_providers/fireworks/llm/llama-v3p1-405b-instruct.yaml deleted file mode 100644 index c4ddb3e9246d4a..00000000000000 --- a/api/core/model_runtime/model_providers/fireworks/llm/llama-v3p1-405b-instruct.yaml +++ /dev/null @@ -1,46 +0,0 @@ -model: accounts/fireworks/models/llama-v3p1-405b-instruct -label: - zh_Hans: Llama3.1 405B Instruct - en_US: Llama3.1 405B Instruct -model_type: llm -features: - - agent-thought - - tool-call -model_properties: - mode: chat - context_size: 131072 -parameter_rules: - - name: temperature - use_template: temperature - - name: top_p - use_template: top_p - - name: top_k - label: - zh_Hans: 取样数量 - en_US: Top k - type: int - help: - zh_Hans: 仅从每个后续标记的前 K 个选项中采样。 - en_US: Only sample from the top K options for each subsequent token. - - name: max_tokens - use_template: max_tokens - - name: context_length_exceeded_behavior - default: None - label: - zh_Hans: 上下文长度超出行为 - en_US: Context Length Exceeded Behavior - help: - zh_Hans: 上下文长度超出行为 - en_US: Context Length Exceeded Behavior - type: string - options: - - None - - truncate - - error - - name: response_format - use_template: response_format -pricing: - input: '3' - output: '3' - unit: '0.000001' - currency: USD diff --git a/api/core/model_runtime/model_providers/fireworks/llm/llama-v3p1-70b-instruct.yaml b/api/core/model_runtime/model_providers/fireworks/llm/llama-v3p1-70b-instruct.yaml deleted file mode 100644 index 62f84f87fa5609..00000000000000 --- a/api/core/model_runtime/model_providers/fireworks/llm/llama-v3p1-70b-instruct.yaml +++ /dev/null @@ -1,46 +0,0 @@ -model: accounts/fireworks/models/llama-v3p1-70b-instruct -label: - zh_Hans: Llama3.1 70B Instruct - en_US: Llama3.1 70B Instruct -model_type: llm -features: - - agent-thought - - tool-call -model_properties: - mode: chat - context_size: 131072 -parameter_rules: - - name: temperature - use_template: temperature - - name: top_p - use_template: top_p - - name: top_k - label: - zh_Hans: 取样数量 - en_US: Top k - type: int - help: - zh_Hans: 仅从每个后续标记的前 K 个选项中采样。 - en_US: Only sample from the top K options for each subsequent token. - - name: max_tokens - use_template: max_tokens - - name: context_length_exceeded_behavior - default: None - label: - zh_Hans: 上下文长度超出行为 - en_US: Context Length Exceeded Behavior - help: - zh_Hans: 上下文长度超出行为 - en_US: Context Length Exceeded Behavior - type: string - options: - - None - - truncate - - error - - name: response_format - use_template: response_format -pricing: - input: '0.2' - output: '0.2' - unit: '0.000001' - currency: USD diff --git a/api/core/model_runtime/model_providers/fireworks/llm/llama-v3p1-8b-instruct.yaml b/api/core/model_runtime/model_providers/fireworks/llm/llama-v3p1-8b-instruct.yaml deleted file mode 100644 index 9bb99c91b65b0b..00000000000000 --- a/api/core/model_runtime/model_providers/fireworks/llm/llama-v3p1-8b-instruct.yaml +++ /dev/null @@ -1,46 +0,0 @@ -model: accounts/fireworks/models/llama-v3p1-8b-instruct -label: - zh_Hans: Llama3.1 8B Instruct - en_US: Llama3.1 8B Instruct -model_type: llm -features: - - agent-thought - - tool-call -model_properties: - mode: chat - context_size: 131072 -parameter_rules: - - name: temperature - use_template: temperature - - name: top_p - use_template: top_p - - name: top_k - label: - zh_Hans: 取样数量 - en_US: Top k - type: int - help: - zh_Hans: 仅从每个后续标记的前 K 个选项中采样。 - en_US: Only sample from the top K options for each subsequent token. - - name: max_tokens - use_template: max_tokens - - name: context_length_exceeded_behavior - default: None - label: - zh_Hans: 上下文长度超出行为 - en_US: Context Length Exceeded Behavior - help: - zh_Hans: 上下文长度超出行为 - en_US: Context Length Exceeded Behavior - type: string - options: - - None - - truncate - - error - - name: response_format - use_template: response_format -pricing: - input: '0.2' - output: '0.2' - unit: '0.000001' - currency: USD diff --git a/api/core/model_runtime/model_providers/fireworks/llm/llm.py b/api/core/model_runtime/model_providers/fireworks/llm/llm.py deleted file mode 100644 index 2dcf1adba64518..00000000000000 --- a/api/core/model_runtime/model_providers/fireworks/llm/llm.py +++ /dev/null @@ -1,610 +0,0 @@ -import logging -from collections.abc import Generator -from typing import Optional, Union, cast - -from openai import OpenAI, Stream -from openai.types.chat import ChatCompletion, ChatCompletionChunk, ChatCompletionMessageToolCall -from openai.types.chat.chat_completion_chunk import ChoiceDeltaFunctionCall, ChoiceDeltaToolCall -from openai.types.chat.chat_completion_message import FunctionCall - -from core.model_runtime.callbacks.base_callback import Callback -from core.model_runtime.entities.llm_entities import LLMResult, LLMResultChunk, LLMResultChunkDelta -from core.model_runtime.entities.message_entities import ( - AssistantPromptMessage, - ImagePromptMessageContent, - PromptMessage, - PromptMessageContentType, - PromptMessageTool, - SystemPromptMessage, - TextPromptMessageContent, - ToolPromptMessage, - UserPromptMessage, -) -from core.model_runtime.errors.validate import CredentialsValidateFailedError -from core.model_runtime.model_providers.__base.large_language_model import LargeLanguageModel -from core.model_runtime.model_providers.fireworks._common import _CommonFireworks - -logger = logging.getLogger(__name__) - -FIREWORKS_BLOCK_MODE_PROMPT = """You should always follow the instructions and output a valid {{block}} object. -The structure of the {{block}} object you can found in the instructions, use {"answer": "$your_answer"} as the default structure -if you are not sure about the structure. - - -{{instructions}} - -""" # noqa: E501 - - -class FireworksLargeLanguageModel(_CommonFireworks, LargeLanguageModel): - """ - Model class for Fireworks large language model. - """ - - def _invoke( - self, - model: str, - credentials: dict, - prompt_messages: list[PromptMessage], - model_parameters: dict, - tools: Optional[list[PromptMessageTool]] = None, - stop: Optional[list[str]] = None, - stream: bool = True, - user: Optional[str] = None, - ) -> Union[LLMResult, Generator]: - """ - Invoke large language model - - :param model: model name - :param credentials: model credentials - :param prompt_messages: prompt messages - :param model_parameters: model parameters - :param tools: tools for tool calling - :param stop: stop words - :param stream: is stream response - :param user: unique user id - :return: full response or stream response chunk generator result - """ - - return self._chat_generate( - model=model, - credentials=credentials, - prompt_messages=prompt_messages, - model_parameters=model_parameters, - tools=tools, - stop=stop, - stream=stream, - user=user, - ) - - def _code_block_mode_wrapper( - self, - model: str, - credentials: dict, - prompt_messages: list[PromptMessage], - model_parameters: dict, - tools: Optional[list[PromptMessageTool]] = None, - stop: Optional[list[str]] = None, - stream: bool = True, - user: Optional[str] = None, - callbacks: Optional[list[Callback]] = None, - ) -> Union[LLMResult, Generator]: - """ - Code block mode wrapper for invoking large language model - """ - if "response_format" in model_parameters and model_parameters["response_format"] in {"JSON", "XML"}: - stop = stop or [] - self._transform_chat_json_prompts( - model=model, - credentials=credentials, - prompt_messages=prompt_messages, - model_parameters=model_parameters, - tools=tools, - stop=stop, - stream=stream, - user=user, - response_format=model_parameters["response_format"], - ) - model_parameters.pop("response_format") - - return self._invoke( - model=model, - credentials=credentials, - prompt_messages=prompt_messages, - model_parameters=model_parameters, - tools=tools, - stop=stop, - stream=stream, - user=user, - ) - - def _transform_chat_json_prompts( - self, - model: str, - credentials: dict, - prompt_messages: list[PromptMessage], - model_parameters: dict, - tools: list[PromptMessageTool] | None = None, - stop: list[str] | None = None, - stream: bool = True, - user: str | None = None, - response_format: str = "JSON", - ) -> None: - """ - Transform json prompts - """ - if stop is None: - stop = [] - if "```\n" not in stop: - stop.append("```\n") - if "\n```" not in stop: - stop.append("\n```") - - if len(prompt_messages) > 0 and isinstance(prompt_messages[0], SystemPromptMessage): - prompt_messages[0] = SystemPromptMessage( - content=FIREWORKS_BLOCK_MODE_PROMPT.replace("{{instructions}}", prompt_messages[0].content).replace( - "{{block}}", response_format - ) - ) - prompt_messages.append(AssistantPromptMessage(content=f"\n```{response_format}\n")) - else: - prompt_messages.insert( - 0, - SystemPromptMessage( - content=FIREWORKS_BLOCK_MODE_PROMPT.replace( - "{{instructions}}", f"Please output a valid {response_format} object." - ).replace("{{block}}", response_format) - ), - ) - prompt_messages.append(AssistantPromptMessage(content=f"\n```{response_format}")) - - def get_num_tokens( - self, - model: str, - credentials: dict, - prompt_messages: list[PromptMessage], - tools: Optional[list[PromptMessageTool]] = None, - ) -> int: - """ - Get number of tokens for given prompt messages - - :param model: model name - :param credentials: model credentials - :param prompt_messages: prompt messages - :param tools: tools for tool calling - :return: - """ - return self._num_tokens_from_messages(model, prompt_messages, tools) - - def validate_credentials(self, model: str, credentials: dict) -> None: - """ - Validate model credentials - - :param model: model name - :param credentials: model credentials - :return: - """ - try: - credentials_kwargs = self._to_credential_kwargs(credentials) - client = OpenAI(**credentials_kwargs) - - client.chat.completions.create( - messages=[{"role": "user", "content": "ping"}], model=model, temperature=0, max_tokens=10, stream=False - ) - except Exception as e: - raise CredentialsValidateFailedError(str(e)) - - def _chat_generate( - self, - model: str, - credentials: dict, - prompt_messages: list[PromptMessage], - model_parameters: dict, - tools: Optional[list[PromptMessageTool]] = None, - stop: Optional[list[str]] = None, - stream: bool = True, - user: Optional[str] = None, - ) -> Union[LLMResult, Generator]: - credentials_kwargs = self._to_credential_kwargs(credentials) - client = OpenAI(**credentials_kwargs) - - extra_model_kwargs = {} - - if tools: - extra_model_kwargs["functions"] = [ - {"name": tool.name, "description": tool.description, "parameters": tool.parameters} for tool in tools - ] - - if stop: - extra_model_kwargs["stop"] = stop - - if user: - extra_model_kwargs["user"] = user - - # chat model - response = client.chat.completions.create( - messages=[self._convert_prompt_message_to_dict(m) for m in prompt_messages], - model=model, - stream=stream, - **model_parameters, - **extra_model_kwargs, - ) - - if stream: - return self._handle_chat_generate_stream_response(model, credentials, response, prompt_messages, tools) - return self._handle_chat_generate_response(model, credentials, response, prompt_messages, tools) - - def _handle_chat_generate_response( - self, - model: str, - credentials: dict, - response: ChatCompletion, - prompt_messages: list[PromptMessage], - tools: Optional[list[PromptMessageTool]] = None, - ) -> LLMResult: - """ - Handle llm chat response - - :param model: model name - :param credentials: credentials - :param response: response - :param prompt_messages: prompt messages - :param tools: tools for tool calling - :return: llm response - """ - assistant_message = response.choices[0].message - # assistant_message_tool_calls = assistant_message.tool_calls - assistant_message_function_call = assistant_message.function_call - - # extract tool calls from response - # tool_calls = self._extract_response_tool_calls(assistant_message_tool_calls) - function_call = self._extract_response_function_call(assistant_message_function_call) - tool_calls = [function_call] if function_call else [] - - # transform assistant message to prompt message - assistant_prompt_message = AssistantPromptMessage(content=assistant_message.content, tool_calls=tool_calls) - - # calculate num tokens - if response.usage: - # transform usage - prompt_tokens = response.usage.prompt_tokens - completion_tokens = response.usage.completion_tokens - else: - # calculate num tokens - prompt_tokens = self._num_tokens_from_messages(model, prompt_messages, tools) - completion_tokens = self._num_tokens_from_messages(model, [assistant_prompt_message]) - - # transform usage - usage = self._calc_response_usage(model, credentials, prompt_tokens, completion_tokens) - - # transform response - response = LLMResult( - model=response.model, - prompt_messages=prompt_messages, - message=assistant_prompt_message, - usage=usage, - system_fingerprint=response.system_fingerprint, - ) - - return response - - def _handle_chat_generate_stream_response( - self, - model: str, - credentials: dict, - response: Stream[ChatCompletionChunk], - prompt_messages: list[PromptMessage], - tools: Optional[list[PromptMessageTool]] = None, - ) -> Generator: - """ - Handle llm chat stream response - - :param model: model name - :param response: response - :param prompt_messages: prompt messages - :param tools: tools for tool calling - :return: llm response chunk generator - """ - full_assistant_content = "" - delta_assistant_message_function_call_storage: Optional[ChoiceDeltaFunctionCall] = None - prompt_tokens = 0 - completion_tokens = 0 - final_tool_calls = [] - final_chunk = LLMResultChunk( - model=model, - prompt_messages=prompt_messages, - delta=LLMResultChunkDelta( - index=0, - message=AssistantPromptMessage(content=""), - ), - ) - - for chunk in response: - if len(chunk.choices) == 0: - if chunk.usage: - # calculate num tokens - prompt_tokens = chunk.usage.prompt_tokens - completion_tokens = chunk.usage.completion_tokens - continue - - delta = chunk.choices[0] - has_finish_reason = delta.finish_reason is not None - - if ( - not has_finish_reason - and (delta.delta.content is None or delta.delta.content == "") - and delta.delta.function_call is None - ): - continue - - # assistant_message_tool_calls = delta.delta.tool_calls - assistant_message_function_call = delta.delta.function_call - - # extract tool calls from response - if delta_assistant_message_function_call_storage is not None: - # handle process of stream function call - if assistant_message_function_call: - # message has not ended ever - delta_assistant_message_function_call_storage.arguments += assistant_message_function_call.arguments - continue - else: - # message has ended - assistant_message_function_call = delta_assistant_message_function_call_storage - delta_assistant_message_function_call_storage = None - else: - if assistant_message_function_call: - # start of stream function call - delta_assistant_message_function_call_storage = assistant_message_function_call - if delta_assistant_message_function_call_storage.arguments is None: - delta_assistant_message_function_call_storage.arguments = "" - if not has_finish_reason: - continue - - # tool_calls = self._extract_response_tool_calls(assistant_message_tool_calls) - function_call = self._extract_response_function_call(assistant_message_function_call) - tool_calls = [function_call] if function_call else [] - if tool_calls: - final_tool_calls.extend(tool_calls) - - # transform assistant message to prompt message - assistant_prompt_message = AssistantPromptMessage(content=delta.delta.content or "", tool_calls=tool_calls) - - full_assistant_content += delta.delta.content or "" - - if has_finish_reason: - final_chunk = LLMResultChunk( - model=chunk.model, - prompt_messages=prompt_messages, - system_fingerprint=chunk.system_fingerprint, - delta=LLMResultChunkDelta( - index=delta.index, - message=assistant_prompt_message, - finish_reason=delta.finish_reason, - ), - ) - else: - yield LLMResultChunk( - model=chunk.model, - prompt_messages=prompt_messages, - system_fingerprint=chunk.system_fingerprint, - delta=LLMResultChunkDelta( - index=delta.index, - message=assistant_prompt_message, - ), - ) - - if not prompt_tokens: - prompt_tokens = self._num_tokens_from_messages(model, prompt_messages, tools) - - if not completion_tokens: - full_assistant_prompt_message = AssistantPromptMessage( - content=full_assistant_content, tool_calls=final_tool_calls - ) - completion_tokens = self._num_tokens_from_messages(model, [full_assistant_prompt_message]) - - # transform usage - usage = self._calc_response_usage(model, credentials, prompt_tokens, completion_tokens) - final_chunk.delta.usage = usage - - yield final_chunk - - def _extract_response_tool_calls( - self, response_tool_calls: list[ChatCompletionMessageToolCall | ChoiceDeltaToolCall] - ) -> list[AssistantPromptMessage.ToolCall]: - """ - Extract tool calls from response - - :param response_tool_calls: response tool calls - :return: list of tool calls - """ - tool_calls = [] - if response_tool_calls: - for response_tool_call in response_tool_calls: - function = AssistantPromptMessage.ToolCall.ToolCallFunction( - name=response_tool_call.function.name, arguments=response_tool_call.function.arguments - ) - - tool_call = AssistantPromptMessage.ToolCall( - id=response_tool_call.id, type=response_tool_call.type, function=function - ) - tool_calls.append(tool_call) - - return tool_calls - - def _extract_response_function_call( - self, response_function_call: FunctionCall | ChoiceDeltaFunctionCall - ) -> AssistantPromptMessage.ToolCall: - """ - Extract function call from response - - :param response_function_call: response function call - :return: tool call - """ - tool_call = None - if response_function_call: - function = AssistantPromptMessage.ToolCall.ToolCallFunction( - name=response_function_call.name, arguments=response_function_call.arguments - ) - - tool_call = AssistantPromptMessage.ToolCall( - id=response_function_call.name, type="function", function=function - ) - - return tool_call - - def _convert_prompt_message_to_dict(self, message: PromptMessage) -> dict: - """ - Convert PromptMessage to dict for Fireworks API - """ - if isinstance(message, UserPromptMessage): - message = cast(UserPromptMessage, message) - if isinstance(message.content, str): - message_dict = {"role": "user", "content": message.content} - else: - sub_messages = [] - for message_content in message.content: - if message_content.type == PromptMessageContentType.TEXT: - message_content = cast(TextPromptMessageContent, message_content) - sub_message_dict = {"type": "text", "text": message_content.data} - sub_messages.append(sub_message_dict) - elif message_content.type == PromptMessageContentType.IMAGE: - message_content = cast(ImagePromptMessageContent, message_content) - sub_message_dict = { - "type": "image_url", - "image_url": {"url": message_content.data, "detail": message_content.detail.value}, - } - sub_messages.append(sub_message_dict) - - message_dict = {"role": "user", "content": sub_messages} - elif isinstance(message, AssistantPromptMessage): - message = cast(AssistantPromptMessage, message) - message_dict = {"role": "assistant", "content": message.content} - if message.tool_calls: - # message_dict["tool_calls"] = [tool_call.dict() for tool_call in - # message.tool_calls] - function_call = message.tool_calls[0] - message_dict["function_call"] = { - "name": function_call.function.name, - "arguments": function_call.function.arguments, - } - elif isinstance(message, SystemPromptMessage): - message = cast(SystemPromptMessage, message) - message_dict = {"role": "system", "content": message.content} - elif isinstance(message, ToolPromptMessage): - message = cast(ToolPromptMessage, message) - # message_dict = { - # "role": "tool", - # "content": message.content, - # "tool_call_id": message.tool_call_id - # } - message_dict = {"role": "function", "content": message.content, "name": message.tool_call_id} - else: - raise ValueError(f"Got unknown type {message}") - - if message.name: - message_dict["name"] = message.name - - return message_dict - - def _num_tokens_from_messages( - self, - model: str, - messages: list[PromptMessage], - tools: Optional[list[PromptMessageTool]] = None, - credentials: dict = None, - ) -> int: - """ - Approximate num tokens with GPT2 tokenizer. - """ - - tokens_per_message = 3 - tokens_per_name = 1 - - num_tokens = 0 - messages_dict = [self._convert_prompt_message_to_dict(m) for m in messages] - for message in messages_dict: - num_tokens += tokens_per_message - for key, value in message.items(): - # Cast str(value) in case the message value is not a string - # This occurs with function messages - # TODO: The current token calculation method for the image type is not implemented, - # which need to download the image and then get the resolution for calculation, - # and will increase the request delay - if isinstance(value, list): - text = "" - for item in value: - if isinstance(item, dict) and item["type"] == "text": - text += item["text"] - - value = text - - if key == "tool_calls": - for tool_call in value: - for t_key, t_value in tool_call.items(): - num_tokens += self._get_num_tokens_by_gpt2(t_key) - if t_key == "function": - for f_key, f_value in t_value.items(): - num_tokens += self._get_num_tokens_by_gpt2(f_key) - num_tokens += self._get_num_tokens_by_gpt2(f_value) - else: - num_tokens += self._get_num_tokens_by_gpt2(t_key) - num_tokens += self._get_num_tokens_by_gpt2(t_value) - else: - num_tokens += self._get_num_tokens_by_gpt2(str(value)) - - if key == "name": - num_tokens += tokens_per_name - - # every reply is primed with assistant - num_tokens += 3 - - if tools: - num_tokens += self._num_tokens_for_tools(tools) - - return num_tokens - - def _num_tokens_for_tools(self, tools: list[PromptMessageTool]) -> int: - """ - Calculate num tokens for tool calling with tiktoken package. - - :param tools: tools for tool calling - :return: number of tokens - """ - num_tokens = 0 - for tool in tools: - num_tokens += self._get_num_tokens_by_gpt2("type") - num_tokens += self._get_num_tokens_by_gpt2("function") - num_tokens += self._get_num_tokens_by_gpt2("function") - - # calculate num tokens for function object - num_tokens += self._get_num_tokens_by_gpt2("name") - num_tokens += self._get_num_tokens_by_gpt2(tool.name) - num_tokens += self._get_num_tokens_by_gpt2("description") - num_tokens += self._get_num_tokens_by_gpt2(tool.description) - parameters = tool.parameters - num_tokens += self._get_num_tokens_by_gpt2("parameters") - if "title" in parameters: - num_tokens += self._get_num_tokens_by_gpt2("title") - num_tokens += self._get_num_tokens_by_gpt2(parameters.get("title")) - num_tokens += self._get_num_tokens_by_gpt2("type") - num_tokens += self._get_num_tokens_by_gpt2(parameters.get("type")) - if "properties" in parameters: - num_tokens += self._get_num_tokens_by_gpt2("properties") - for key, value in parameters.get("properties").items(): - num_tokens += self._get_num_tokens_by_gpt2(key) - for field_key, field_value in value.items(): - num_tokens += self._get_num_tokens_by_gpt2(field_key) - if field_key == "enum": - for enum_field in field_value: - num_tokens += 3 - num_tokens += self._get_num_tokens_by_gpt2(enum_field) - else: - num_tokens += self._get_num_tokens_by_gpt2(field_key) - num_tokens += self._get_num_tokens_by_gpt2(str(field_value)) - if "required" in parameters: - num_tokens += self._get_num_tokens_by_gpt2("required") - for required_field in parameters["required"]: - num_tokens += 3 - num_tokens += self._get_num_tokens_by_gpt2(required_field) - - return num_tokens diff --git a/api/core/model_runtime/model_providers/fireworks/llm/mixtral-8x22b-instruct.yaml b/api/core/model_runtime/model_providers/fireworks/llm/mixtral-8x22b-instruct.yaml deleted file mode 100644 index 87d977e26cf1b2..00000000000000 --- a/api/core/model_runtime/model_providers/fireworks/llm/mixtral-8x22b-instruct.yaml +++ /dev/null @@ -1,46 +0,0 @@ -model: accounts/fireworks/models/mixtral-8x22b-instruct -label: - zh_Hans: Mixtral MoE 8x22B Instruct - en_US: Mixtral MoE 8x22B Instruct -model_type: llm -features: - - agent-thought - - tool-call -model_properties: - mode: chat - context_size: 65536 -parameter_rules: - - name: temperature - use_template: temperature - - name: top_p - use_template: top_p - - name: top_k - label: - zh_Hans: 取样数量 - en_US: Top k - type: int - help: - zh_Hans: 仅从每个后续标记的前 K 个选项中采样。 - en_US: Only sample from the top K options for each subsequent token. - - name: max_tokens - use_template: max_tokens - - name: context_length_exceeded_behavior - default: None - label: - zh_Hans: 上下文长度超出行为 - en_US: Context Length Exceeded Behavior - help: - zh_Hans: 上下文长度超出行为 - en_US: Context Length Exceeded Behavior - type: string - options: - - None - - truncate - - error - - name: response_format - use_template: response_format -pricing: - input: '1.2' - output: '1.2' - unit: '0.000001' - currency: USD diff --git a/api/core/model_runtime/model_providers/fireworks/llm/mixtral-8x7b-instruct-hf.yaml b/api/core/model_runtime/model_providers/fireworks/llm/mixtral-8x7b-instruct-hf.yaml deleted file mode 100644 index e3d5a90858c5ae..00000000000000 --- a/api/core/model_runtime/model_providers/fireworks/llm/mixtral-8x7b-instruct-hf.yaml +++ /dev/null @@ -1,46 +0,0 @@ -model: accounts/fireworks/models/mixtral-8x7b-instruct-hf -label: - zh_Hans: Mixtral MoE 8x7B Instruct(HF version) - en_US: Mixtral MoE 8x7B Instruct(HF version) -model_type: llm -features: - - agent-thought - - tool-call -model_properties: - mode: chat - context_size: 32768 -parameter_rules: - - name: temperature - use_template: temperature - - name: top_p - use_template: top_p - - name: top_k - label: - zh_Hans: 取样数量 - en_US: Top k - type: int - help: - zh_Hans: 仅从每个后续标记的前 K 个选项中采样。 - en_US: Only sample from the top K options for each subsequent token. - - name: max_tokens - use_template: max_tokens - - name: context_length_exceeded_behavior - default: None - label: - zh_Hans: 上下文长度超出行为 - en_US: Context Length Exceeded Behavior - help: - zh_Hans: 上下文长度超出行为 - en_US: Context Length Exceeded Behavior - type: string - options: - - None - - truncate - - error - - name: response_format - use_template: response_format -pricing: - input: '0.5' - output: '0.5' - unit: '0.000001' - currency: USD diff --git a/api/core/model_runtime/model_providers/fireworks/llm/mixtral-8x7b-instruct.yaml b/api/core/model_runtime/model_providers/fireworks/llm/mixtral-8x7b-instruct.yaml deleted file mode 100644 index 45f632ceff2cfc..00000000000000 --- a/api/core/model_runtime/model_providers/fireworks/llm/mixtral-8x7b-instruct.yaml +++ /dev/null @@ -1,46 +0,0 @@ -model: accounts/fireworks/models/mixtral-8x7b-instruct -label: - zh_Hans: Mixtral MoE 8x7B Instruct - en_US: Mixtral MoE 8x7B Instruct -model_type: llm -features: - - agent-thought - - tool-call -model_properties: - mode: chat - context_size: 32768 -parameter_rules: - - name: temperature - use_template: temperature - - name: top_p - use_template: top_p - - name: top_k - label: - zh_Hans: 取样数量 - en_US: Top k - type: int - help: - zh_Hans: 仅从每个后续标记的前 K 个选项中采样。 - en_US: Only sample from the top K options for each subsequent token. - - name: max_tokens - use_template: max_tokens - - name: context_length_exceeded_behavior - default: None - label: - zh_Hans: 上下文长度超出行为 - en_US: Context Length Exceeded Behavior - help: - zh_Hans: 上下文长度超出行为 - en_US: Context Length Exceeded Behavior - type: string - options: - - None - - truncate - - error - - name: response_format - use_template: response_format -pricing: - input: '0.5' - output: '0.5' - unit: '0.000001' - currency: USD diff --git a/api/core/model_runtime/model_providers/fireworks/llm/mythomax-l2-13b.yaml b/api/core/model_runtime/model_providers/fireworks/llm/mythomax-l2-13b.yaml deleted file mode 100644 index 9c3486ba10751b..00000000000000 --- a/api/core/model_runtime/model_providers/fireworks/llm/mythomax-l2-13b.yaml +++ /dev/null @@ -1,46 +0,0 @@ -model: accounts/fireworks/models/mythomax-l2-13b -label: - zh_Hans: MythoMax L2 13b - en_US: MythoMax L2 13b -model_type: llm -features: - - agent-thought - - tool-call -model_properties: - mode: chat - context_size: 4096 -parameter_rules: - - name: temperature - use_template: temperature - - name: top_p - use_template: top_p - - name: top_k - label: - zh_Hans: 取样数量 - en_US: Top k - type: int - help: - zh_Hans: 仅从每个后续标记的前 K 个选项中采样。 - en_US: Only sample from the top K options for each subsequent token. - - name: max_tokens - use_template: max_tokens - - name: context_length_exceeded_behavior - default: None - label: - zh_Hans: 上下文长度超出行为 - en_US: Context Length Exceeded Behavior - help: - zh_Hans: 上下文长度超出行为 - en_US: Context Length Exceeded Behavior - type: string - options: - - None - - truncate - - error - - name: response_format - use_template: response_format -pricing: - input: '0.2' - output: '0.2' - unit: '0.000001' - currency: USD diff --git a/api/core/model_runtime/model_providers/fireworks/llm/phi-3-vision-128k-instruct.yaml b/api/core/model_runtime/model_providers/fireworks/llm/phi-3-vision-128k-instruct.yaml deleted file mode 100644 index e399f2edb1b1bd..00000000000000 --- a/api/core/model_runtime/model_providers/fireworks/llm/phi-3-vision-128k-instruct.yaml +++ /dev/null @@ -1,46 +0,0 @@ -model: accounts/fireworks/models/phi-3-vision-128k-instruct -label: - zh_Hans: Phi3.5 Vision Instruct - en_US: Phi3.5 Vision Instruct -model_type: llm -features: - - agent-thought - - vision -model_properties: - mode: chat - context_size: 8192 -parameter_rules: - - name: temperature - use_template: temperature - - name: top_p - use_template: top_p - - name: top_k - label: - zh_Hans: 取样数量 - en_US: Top k - type: int - help: - zh_Hans: 仅从每个后续标记的前 K 个选项中采样。 - en_US: Only sample from the top K options for each subsequent token. - - name: max_tokens - use_template: max_tokens - - name: context_length_exceeded_behavior - default: None - label: - zh_Hans: 上下文长度超出行为 - en_US: Context Length Exceeded Behavior - help: - zh_Hans: 上下文长度超出行为 - en_US: Context Length Exceeded Behavior - type: string - options: - - None - - truncate - - error - - name: response_format - use_template: response_format -pricing: - input: '0.2' - output: '0.2' - unit: '0.000001' - currency: USD diff --git a/api/core/model_runtime/model_providers/fireworks/llm/yi-large.yaml b/api/core/model_runtime/model_providers/fireworks/llm/yi-large.yaml deleted file mode 100644 index bb4b6f994ec12a..00000000000000 --- a/api/core/model_runtime/model_providers/fireworks/llm/yi-large.yaml +++ /dev/null @@ -1,45 +0,0 @@ -model: accounts/yi-01-ai/models/yi-large -label: - zh_Hans: Yi-Large - en_US: Yi-Large -model_type: llm -features: - - agent-thought -model_properties: - mode: chat - context_size: 32768 -parameter_rules: - - name: temperature - use_template: temperature - - name: top_p - use_template: top_p - - name: top_k - label: - zh_Hans: 取样数量 - en_US: Top k - type: int - help: - zh_Hans: 仅从每个后续标记的前 K 个选项中采样。 - en_US: Only sample from the top K options for each subsequent token. - - name: max_tokens - use_template: max_tokens - - name: context_length_exceeded_behavior - default: None - label: - zh_Hans: 上下文长度超出行为 - en_US: Context Length Exceeded Behavior - help: - zh_Hans: 上下文长度超出行为 - en_US: Context Length Exceeded Behavior - type: string - options: - - None - - truncate - - error - - name: response_format - use_template: response_format -pricing: - input: '3' - output: '3' - unit: '0.000001' - currency: USD diff --git a/api/core/model_runtime/model_providers/fishaudio/__init__.py b/api/core/model_runtime/model_providers/fishaudio/__init__.py deleted file mode 100644 index e69de29bb2d1d6..00000000000000 diff --git a/api/core/model_runtime/model_providers/fishaudio/_assets/fishaudio_l_en.svg b/api/core/model_runtime/model_providers/fishaudio/_assets/fishaudio_l_en.svg deleted file mode 100644 index d6f7723bd5ca4c..00000000000000 --- a/api/core/model_runtime/model_providers/fishaudio/_assets/fishaudio_l_en.svg +++ /dev/null @@ -1 +0,0 @@ - \ No newline at end of file diff --git a/api/core/model_runtime/model_providers/fishaudio/_assets/fishaudio_s_en.svg b/api/core/model_runtime/model_providers/fishaudio/_assets/fishaudio_s_en.svg deleted file mode 100644 index d6f7723bd5ca4c..00000000000000 --- a/api/core/model_runtime/model_providers/fishaudio/_assets/fishaudio_s_en.svg +++ /dev/null @@ -1 +0,0 @@ - \ No newline at end of file diff --git a/api/core/model_runtime/model_providers/fishaudio/fishaudio.py b/api/core/model_runtime/model_providers/fishaudio/fishaudio.py deleted file mode 100644 index 3bc4b533e0815a..00000000000000 --- a/api/core/model_runtime/model_providers/fishaudio/fishaudio.py +++ /dev/null @@ -1,26 +0,0 @@ -import logging - -from core.model_runtime.entities.model_entities import ModelType -from core.model_runtime.errors.validate import CredentialsValidateFailedError -from core.model_runtime.model_providers.__base.model_provider import ModelProvider - -logger = logging.getLogger(__name__) - - -class FishAudioProvider(ModelProvider): - def validate_provider_credentials(self, credentials: dict) -> None: - """ - Validate provider credentials - - For debugging purposes, this method now always passes validation. - - :param credentials: provider credentials, credentials form defined in `provider_credential_schema`. - """ - try: - model_instance = self.get_model_instance(ModelType.TTS) - model_instance.validate_credentials(credentials=credentials) - except CredentialsValidateFailedError as ex: - raise ex - except Exception as ex: - logger.exception(f"{self.get_provider_schema().provider} credentials validate failed") - raise ex diff --git a/api/core/model_runtime/model_providers/fishaudio/tts/__init__.py b/api/core/model_runtime/model_providers/fishaudio/tts/__init__.py deleted file mode 100644 index e69de29bb2d1d6..00000000000000 diff --git a/api/core/model_runtime/model_providers/fishaudio/tts/tts.py b/api/core/model_runtime/model_providers/fishaudio/tts/tts.py deleted file mode 100644 index 895a7a914c97de..00000000000000 --- a/api/core/model_runtime/model_providers/fishaudio/tts/tts.py +++ /dev/null @@ -1,158 +0,0 @@ -from typing import Optional - -import httpx - -from core.model_runtime.errors.invoke import InvokeBadRequestError, InvokeError -from core.model_runtime.errors.validate import CredentialsValidateFailedError -from core.model_runtime.model_providers.__base.tts_model import TTSModel - - -class FishAudioText2SpeechModel(TTSModel): - """ - Model class for Fish.audio Text to Speech model. - """ - - def get_tts_model_voices(self, model: str, credentials: dict, language: Optional[str] = None) -> list: - api_base = credentials.get("api_base", "https://api.fish.audio") - api_key = credentials.get("api_key") - use_public_models = credentials.get("use_public_models", "false") == "true" - - params = { - "self": str(not use_public_models).lower(), - "page_size": "100", - } - - if language is not None: - if "-" in language: - language = language.split("-")[0] - params["language"] = language - - results = httpx.get( - f"{api_base}/model", - headers={"Authorization": f"Bearer {api_key}"}, - params=params, - ) - - results.raise_for_status() - data = results.json() - - return [{"name": i["title"], "value": i["_id"]} for i in data["items"]] - - def _invoke( - self, - model: str, - tenant_id: str, - credentials: dict, - content_text: str, - voice: str, - user: Optional[str] = None, - ) -> any: - """ - Invoke text2speech model - - :param model: model name - :param tenant_id: user tenant id - :param credentials: model credentials - :param voice: model timbre - :param content_text: text content to be translated - :param user: unique user id - :return: generator yielding audio chunks - """ - - return self._tts_invoke_streaming( - model=model, - credentials=credentials, - content_text=content_text, - voice=voice, - ) - - def validate_credentials(self, credentials: dict, user: Optional[str] = None) -> None: - """ - Validate credentials for text2speech model - - :param credentials: model credentials - :param user: unique user id - """ - - try: - self.get_tts_model_voices( - None, - credentials={ - "api_key": credentials["api_key"], - "api_base": credentials["api_base"], - # Disable public models will trigger a 403 error if user is not logged in - "use_public_models": "false", - }, - ) - except Exception as ex: - raise CredentialsValidateFailedError(str(ex)) - - def _tts_invoke_streaming(self, model: str, credentials: dict, content_text: str, voice: str) -> any: - """ - Invoke streaming text2speech model - :param model: model name - :param credentials: model credentials - :param content_text: text content to be translated - :param voice: ID of the reference audio (if any) - :return: generator yielding audio chunks - """ - - try: - word_limit = self._get_model_word_limit(model, credentials) - if len(content_text) > word_limit: - sentences = self._split_text_into_sentences(content_text, max_length=word_limit) - else: - sentences = [content_text.strip()] - - for i in range(len(sentences)): - yield from self._tts_invoke_streaming_sentence( - credentials=credentials, content_text=sentences[i], voice=voice - ) - - except Exception as ex: - raise InvokeBadRequestError(str(ex)) - - def _tts_invoke_streaming_sentence(self, credentials: dict, content_text: str, voice: Optional[str] = None) -> any: - """ - Invoke streaming text2speech model - - :param credentials: model credentials - :param content_text: text content to be translated - :param voice: ID of the reference audio (if any) - :return: generator yielding audio chunks - """ - api_key = credentials.get("api_key") - api_url = credentials.get("api_base", "https://api.fish.audio") - latency = credentials.get("latency") - - if not api_key: - raise InvokeBadRequestError("API key is required") - - with httpx.stream( - "POST", - api_url + "/v1/tts", - json={"text": content_text, "reference_id": voice, "latency": latency}, - headers={ - "Authorization": f"Bearer {api_key}", - }, - timeout=None, - ) as response: - if response.status_code != 200: - raise InvokeBadRequestError(f"Error: {response.status_code} - {response.text}") - yield from response.iter_bytes() - - @property - def _invoke_error_mapping(self) -> dict[type[InvokeError], list[type[Exception]]]: - """ - Map model invoke error to unified error - The key is the error type thrown to the caller - The value is the error type thrown by the model, - which needs to be converted into a unified error type for the caller. - - :return: Invoke error mapping - """ - return { - InvokeBadRequestError: [ - httpx.HTTPStatusError, - ], - } diff --git a/api/core/model_runtime/model_providers/fishaudio/tts/tts.yaml b/api/core/model_runtime/model_providers/fishaudio/tts/tts.yaml deleted file mode 100644 index b4a446a95701c1..00000000000000 --- a/api/core/model_runtime/model_providers/fishaudio/tts/tts.yaml +++ /dev/null @@ -1,5 +0,0 @@ -model: tts-default -model_type: tts -model_properties: - word_limit: 1000 - audio_type: 'mp3' diff --git a/api/core/model_runtime/model_providers/google/__init__.py b/api/core/model_runtime/model_providers/google/__init__.py deleted file mode 100644 index e69de29bb2d1d6..00000000000000 diff --git a/api/core/model_runtime/model_providers/google/_assets/icon_l_en.svg b/api/core/model_runtime/model_providers/google/_assets/icon_l_en.svg deleted file mode 100644 index bb23bffcf1c039..00000000000000 --- a/api/core/model_runtime/model_providers/google/_assets/icon_l_en.svg +++ /dev/null @@ -1,15 +0,0 @@ - - - - - - - - - - - - - - - diff --git a/api/core/model_runtime/model_providers/google/_assets/icon_s_en.svg b/api/core/model_runtime/model_providers/google/_assets/icon_s_en.svg deleted file mode 100644 index c5c608cd7c603d..00000000000000 --- a/api/core/model_runtime/model_providers/google/_assets/icon_s_en.svg +++ /dev/null @@ -1,11 +0,0 @@ - - - - - - - - - - - diff --git a/api/core/model_runtime/model_providers/google/google.py b/api/core/model_runtime/model_providers/google/google.py deleted file mode 100644 index 70f56a8337b2e6..00000000000000 --- a/api/core/model_runtime/model_providers/google/google.py +++ /dev/null @@ -1,28 +0,0 @@ -import logging - -from core.model_runtime.entities.model_entities import ModelType -from core.model_runtime.errors.validate import CredentialsValidateFailedError -from core.model_runtime.model_providers.__base.model_provider import ModelProvider - -logger = logging.getLogger(__name__) - - -class GoogleProvider(ModelProvider): - def validate_provider_credentials(self, credentials: dict) -> None: - """ - Validate provider credentials - - if validate failed, raise exception - - :param credentials: provider credentials, credentials form defined in `provider_credential_schema`. - """ - try: - model_instance = self.get_model_instance(ModelType.LLM) - - # Use `gemini-pro` model for validate, - model_instance.validate_credentials(model="gemini-pro", credentials=credentials) - except CredentialsValidateFailedError as ex: - raise ex - except Exception as ex: - logger.exception(f"{self.get_provider_schema().provider} credentials validate failed") - raise ex diff --git a/api/core/model_runtime/model_providers/google/google.yaml b/api/core/model_runtime/model_providers/google/google.yaml deleted file mode 100644 index 69d4e371c462fa..00000000000000 --- a/api/core/model_runtime/model_providers/google/google.yaml +++ /dev/null @@ -1,31 +0,0 @@ -provider: google -label: - en_US: Google -description: - en_US: Google's Gemini model. - zh_Hans: 谷歌提供的 Gemini 模型. -icon_small: - en_US: icon_s_en.svg -icon_large: - en_US: icon_l_en.svg -background: "#FCFDFF" -help: - title: - en_US: Get your API Key from Google - zh_Hans: 从 Google 获取 API Key - url: - en_US: https://ai.google.dev/ -supported_model_types: - - llm -configurate_methods: - - predefined-model -provider_credential_schema: - credential_form_schemas: - - variable: google_api_key - label: - en_US: API Key - type: secret-input - required: true - placeholder: - zh_Hans: 在此输入您的 API Key - en_US: Enter your API Key diff --git a/api/core/model_runtime/model_providers/google/llm/__init__.py b/api/core/model_runtime/model_providers/google/llm/__init__.py deleted file mode 100644 index e69de29bb2d1d6..00000000000000 diff --git a/api/core/model_runtime/model_providers/google/llm/gemini-1.5-flash-8b-exp-0827.yaml b/api/core/model_runtime/model_providers/google/llm/gemini-1.5-flash-8b-exp-0827.yaml deleted file mode 100644 index 4e0209890a336a..00000000000000 --- a/api/core/model_runtime/model_providers/google/llm/gemini-1.5-flash-8b-exp-0827.yaml +++ /dev/null @@ -1,48 +0,0 @@ -model: gemini-1.5-flash-8b-exp-0827 -label: - en_US: Gemini 1.5 Flash 8B 0827 -model_type: llm -features: - - agent-thought - - vision - - tool-call - - stream-tool-call -model_properties: - mode: chat - context_size: 1048576 -parameter_rules: - - name: temperature - use_template: temperature - - name: top_p - use_template: top_p - - name: top_k - label: - zh_Hans: 取样数量 - en_US: Top k - type: int - help: - zh_Hans: 仅从每个后续标记的前 K 个选项中采样。 - en_US: Only sample from the top K options for each subsequent token. - required: false - - name: max_tokens_to_sample - use_template: max_tokens - required: true - default: 8192 - min: 1 - max: 8192 - - name: response_format - use_template: response_format - - name: stream - label: - zh_Hans: 流式输出 - en_US: Stream - type: boolean - help: - zh_Hans: 流式输出允许模型在生成文本的过程中逐步返回结果,而不是一次性生成全部结果后再返回。 - en_US: Streaming output allows the model to return results incrementally as it generates text, rather than generating all the results at once. - default: false -pricing: - input: '0.00' - output: '0.00' - unit: '0.000001' - currency: USD diff --git a/api/core/model_runtime/model_providers/google/llm/gemini-1.5-flash-exp-0827.yaml b/api/core/model_runtime/model_providers/google/llm/gemini-1.5-flash-exp-0827.yaml deleted file mode 100644 index faabc5e4d13a73..00000000000000 --- a/api/core/model_runtime/model_providers/google/llm/gemini-1.5-flash-exp-0827.yaml +++ /dev/null @@ -1,48 +0,0 @@ -model: gemini-1.5-flash-exp-0827 -label: - en_US: Gemini 1.5 Flash 0827 -model_type: llm -features: - - agent-thought - - vision - - tool-call - - stream-tool-call -model_properties: - mode: chat - context_size: 1048576 -parameter_rules: - - name: temperature - use_template: temperature - - name: top_p - use_template: top_p - - name: top_k - label: - zh_Hans: 取样数量 - en_US: Top k - type: int - help: - zh_Hans: 仅从每个后续标记的前 K 个选项中采样。 - en_US: Only sample from the top K options for each subsequent token. - required: false - - name: max_tokens_to_sample - use_template: max_tokens - required: true - default: 8192 - min: 1 - max: 8192 - - name: response_format - use_template: response_format - - name: stream - label: - zh_Hans: 流式输出 - en_US: Stream - type: boolean - help: - zh_Hans: 流式输出允许模型在生成文本的过程中逐步返回结果,而不是一次性生成全部结果后再返回。 - en_US: Streaming output allows the model to return results incrementally as it generates text, rather than generating all the results at once. - default: false -pricing: - input: '0.00' - output: '0.00' - unit: '0.000001' - currency: USD diff --git a/api/core/model_runtime/model_providers/google/llm/gemini-1.5-flash-latest.yaml b/api/core/model_runtime/model_providers/google/llm/gemini-1.5-flash-latest.yaml deleted file mode 100644 index a22fcca9419b91..00000000000000 --- a/api/core/model_runtime/model_providers/google/llm/gemini-1.5-flash-latest.yaml +++ /dev/null @@ -1,48 +0,0 @@ -model: gemini-1.5-flash-latest -label: - en_US: Gemini 1.5 Flash Latest -model_type: llm -features: - - agent-thought - - vision - - tool-call - - stream-tool-call -model_properties: - mode: chat - context_size: 1048576 -parameter_rules: - - name: temperature - use_template: temperature - - name: top_p - use_template: top_p - - name: top_k - label: - zh_Hans: 取样数量 - en_US: Top k - type: int - help: - zh_Hans: 仅从每个后续标记的前 K 个选项中采样。 - en_US: Only sample from the top K options for each subsequent token. - required: false - - name: max_tokens_to_sample - use_template: max_tokens - required: true - default: 8192 - min: 1 - max: 8192 - - name: response_format - use_template: response_format - - name: stream - label: - zh_Hans: 流式输出 - en_US: Stream - type: boolean - help: - zh_Hans: 流式输出允许模型在生成文本的过程中逐步返回结果,而不是一次性生成全部结果后再返回。 - en_US: Streaming output allows the model to return results incrementally as it generates text, rather than generating all the results at once. - default: false -pricing: - input: '0.00' - output: '0.00' - unit: '0.000001' - currency: USD diff --git a/api/core/model_runtime/model_providers/google/llm/gemini-1.5-pro-exp-0801.yaml b/api/core/model_runtime/model_providers/google/llm/gemini-1.5-pro-exp-0801.yaml deleted file mode 100644 index 97c68f7a18d91e..00000000000000 --- a/api/core/model_runtime/model_providers/google/llm/gemini-1.5-pro-exp-0801.yaml +++ /dev/null @@ -1,48 +0,0 @@ -model: gemini-1.5-pro-exp-0801 -label: - en_US: Gemini 1.5 Pro 0801 -model_type: llm -features: - - agent-thought - - vision - - tool-call - - stream-tool-call -model_properties: - mode: chat - context_size: 2097152 -parameter_rules: - - name: temperature - use_template: temperature - - name: top_p - use_template: top_p - - name: top_k - label: - zh_Hans: 取样数量 - en_US: Top k - type: int - help: - zh_Hans: 仅从每个后续标记的前 K 个选项中采样。 - en_US: Only sample from the top K options for each subsequent token. - required: false - - name: max_tokens_to_sample - use_template: max_tokens - required: true - default: 8192 - min: 1 - max: 8192 - - name: response_format - use_template: response_format - - name: stream - label: - zh_Hans: 流式输出 - en_US: Stream - type: boolean - help: - zh_Hans: 流式输出允许模型在生成文本的过程中逐步返回结果,而不是一次性生成全部结果后再返回。 - en_US: Streaming output allows the model to return results incrementally as it generates text, rather than generating all the results at once. - default: false -pricing: - input: '0.00' - output: '0.00' - unit: '0.000001' - currency: USD diff --git a/api/core/model_runtime/model_providers/google/llm/gemini-1.5-pro-exp-0827.yaml b/api/core/model_runtime/model_providers/google/llm/gemini-1.5-pro-exp-0827.yaml deleted file mode 100644 index 860e4816a163cc..00000000000000 --- a/api/core/model_runtime/model_providers/google/llm/gemini-1.5-pro-exp-0827.yaml +++ /dev/null @@ -1,48 +0,0 @@ -model: gemini-1.5-pro-exp-0827 -label: - en_US: Gemini 1.5 Pro 0827 -model_type: llm -features: - - agent-thought - - vision - - tool-call - - stream-tool-call -model_properties: - mode: chat - context_size: 2097152 -parameter_rules: - - name: temperature - use_template: temperature - - name: top_p - use_template: top_p - - name: top_k - label: - zh_Hans: 取样数量 - en_US: Top k - type: int - help: - zh_Hans: 仅从每个后续标记的前 K 个选项中采样。 - en_US: Only sample from the top K options for each subsequent token. - required: false - - name: max_tokens_to_sample - use_template: max_tokens - required: true - default: 8192 - min: 1 - max: 8192 - - name: response_format - use_template: response_format - - name: stream - label: - zh_Hans: 流式输出 - en_US: Stream - type: boolean - help: - zh_Hans: 流式输出允许模型在生成文本的过程中逐步返回结果,而不是一次性生成全部结果后再返回。 - en_US: Streaming output allows the model to return results incrementally as it generates text, rather than generating all the results at once. - default: false -pricing: - input: '0.00' - output: '0.00' - unit: '0.000001' - currency: USD diff --git a/api/core/model_runtime/model_providers/google/llm/gemini-1.5-pro-latest.yaml b/api/core/model_runtime/model_providers/google/llm/gemini-1.5-pro-latest.yaml deleted file mode 100644 index d1bf7d269de765..00000000000000 --- a/api/core/model_runtime/model_providers/google/llm/gemini-1.5-pro-latest.yaml +++ /dev/null @@ -1,48 +0,0 @@ -model: gemini-1.5-pro-latest -label: - en_US: Gemini 1.5 Pro Latest -model_type: llm -features: - - agent-thought - - vision - - tool-call - - stream-tool-call -model_properties: - mode: chat - context_size: 2097152 -parameter_rules: - - name: temperature - use_template: temperature - - name: top_p - use_template: top_p - - name: top_k - label: - zh_Hans: 取样数量 - en_US: Top k - type: int - help: - zh_Hans: 仅从每个后续标记的前 K 个选项中采样。 - en_US: Only sample from the top K options for each subsequent token. - required: false - - name: max_tokens_to_sample - use_template: max_tokens - required: true - default: 8192 - min: 1 - max: 8192 - - name: response_format - use_template: response_format - - name: stream - label: - zh_Hans: 流式输出 - en_US: Stream - type: boolean - help: - zh_Hans: 流式输出允许模型在生成文本的过程中逐步返回结果,而不是一次性生成全部结果后再返回。 - en_US: Streaming output allows the model to return results incrementally as it generates text, rather than generating all the results at once. - default: false -pricing: - input: '0.00' - output: '0.00' - unit: '0.000001' - currency: USD diff --git a/api/core/model_runtime/model_providers/google/llm/gemini-pro-vision.yaml b/api/core/model_runtime/model_providers/google/llm/gemini-pro-vision.yaml deleted file mode 100644 index 2d213d56adb9c7..00000000000000 --- a/api/core/model_runtime/model_providers/google/llm/gemini-pro-vision.yaml +++ /dev/null @@ -1,43 +0,0 @@ -model: gemini-pro-vision -label: - en_US: Gemini Pro Vision -model_type: llm -features: - - vision -model_properties: - mode: chat - context_size: 12288 -parameter_rules: - - name: temperature - use_template: temperature - - name: top_p - use_template: top_p - - name: top_k - label: - zh_Hans: 取样数量 - en_US: Top k - type: int - help: - zh_Hans: 仅从每个后续标记的前 K 个选项中采样。 - en_US: Only sample from the top K options for each subsequent token. - required: false - - name: max_tokens_to_sample - use_template: max_tokens - required: true - default: 4096 - min: 1 - max: 4096 - - name: stream - label: - zh_Hans: 流式输出 - en_US: Stream - type: boolean - help: - zh_Hans: 流式输出允许模型在生成文本的过程中逐步返回结果,而不是一次性生成全部结果后再返回。 - en_US: Streaming output allows the model to return results incrementally as it generates text, rather than generating all the results at once. - default: false -pricing: - input: '0.00' - output: '0.00' - unit: '0.000001' - currency: USD diff --git a/api/core/model_runtime/model_providers/google/llm/gemini-pro.yaml b/api/core/model_runtime/model_providers/google/llm/gemini-pro.yaml deleted file mode 100644 index e2f487c1ee9219..00000000000000 --- a/api/core/model_runtime/model_providers/google/llm/gemini-pro.yaml +++ /dev/null @@ -1,47 +0,0 @@ -model: gemini-pro -label: - en_US: Gemini Pro -model_type: llm -features: - - agent-thought - - tool-call - - stream-tool-call -model_properties: - mode: chat - context_size: 30720 -parameter_rules: - - name: temperature - use_template: temperature - - name: top_p - use_template: top_p - - name: top_k - label: - zh_Hans: 取样数量 - en_US: Top k - type: int - help: - zh_Hans: 仅从每个后续标记的前 K 个选项中采样。 - en_US: Only sample from the top K options for each subsequent token. - required: false - - name: max_tokens_to_sample - use_template: max_tokens - required: true - default: 2048 - min: 1 - max: 2048 - - name: response_format - use_template: response_format - - name: stream - label: - zh_Hans: 流式输出 - en_US: Stream - type: boolean - help: - zh_Hans: 流式输出允许模型在生成文本的过程中逐步返回结果,而不是一次性生成全部结果后再返回。 - en_US: Streaming output allows the model to return results incrementally as it generates text, rather than generating all the results at once. - default: false -pricing: - input: '0.00' - output: '0.00' - unit: '0.000001' - currency: USD diff --git a/api/core/model_runtime/model_providers/google/llm/llm.py b/api/core/model_runtime/model_providers/google/llm/llm.py deleted file mode 100644 index e686ad08d9d355..00000000000000 --- a/api/core/model_runtime/model_providers/google/llm/llm.py +++ /dev/null @@ -1,443 +0,0 @@ -import base64 -import io -import json -import logging -from collections.abc import Generator -from typing import Optional, Union, cast - -import google.ai.generativelanguage as glm -import google.generativeai as genai -import requests -from google.api_core import exceptions -from google.generativeai.client import _ClientManager -from google.generativeai.types import ContentType, GenerateContentResponse -from google.generativeai.types.content_types import to_part -from PIL import Image - -from core.model_runtime.entities.llm_entities import LLMResult, LLMResultChunk, LLMResultChunkDelta -from core.model_runtime.entities.message_entities import ( - AssistantPromptMessage, - ImagePromptMessageContent, - PromptMessage, - PromptMessageContentType, - PromptMessageTool, - SystemPromptMessage, - ToolPromptMessage, - UserPromptMessage, -) -from core.model_runtime.errors.invoke import ( - InvokeAuthorizationError, - InvokeBadRequestError, - InvokeConnectionError, - InvokeError, - InvokeRateLimitError, - InvokeServerUnavailableError, -) -from core.model_runtime.errors.validate import CredentialsValidateFailedError -from core.model_runtime.model_providers.__base.large_language_model import LargeLanguageModel - -logger = logging.getLogger(__name__) - -GEMINI_BLOCK_MODE_PROMPT = """You should always follow the instructions and output a valid {{block}} object. -The structure of the {{block}} object you can found in the instructions, use {"answer": "$your_answer"} as the default structure -if you are not sure about the structure. - - -{{instructions}} - -""" # noqa: E501 - - -class GoogleLargeLanguageModel(LargeLanguageModel): - def _invoke( - self, - model: str, - credentials: dict, - prompt_messages: list[PromptMessage], - model_parameters: dict, - tools: Optional[list[PromptMessageTool]] = None, - stop: Optional[list[str]] = None, - stream: bool = True, - user: Optional[str] = None, - ) -> Union[LLMResult, Generator]: - """ - Invoke large language model - - :param model: model name - :param credentials: model credentials - :param prompt_messages: prompt messages - :param model_parameters: model parameters - :param tools: tools for tool calling - :param stop: stop words - :param stream: is stream response - :param user: unique user id - :return: full response or stream response chunk generator result - """ - # invoke model - return self._generate(model, credentials, prompt_messages, model_parameters, tools, stop, stream, user) - - def get_num_tokens( - self, - model: str, - credentials: dict, - prompt_messages: list[PromptMessage], - tools: Optional[list[PromptMessageTool]] = None, - ) -> int: - """ - Get number of tokens for given prompt messages - - :param model: model name - :param credentials: model credentials - :param prompt_messages: prompt messages - :param tools: tools for tool calling - :return:md = genai.GenerativeModel(model) - """ - prompt = self._convert_messages_to_prompt(prompt_messages) - - return self._get_num_tokens_by_gpt2(prompt) - - def _convert_messages_to_prompt(self, messages: list[PromptMessage]) -> str: - """ - Format a list of messages into a full prompt for the Google model - - :param messages: List of PromptMessage to combine. - :return: Combined string with necessary human_prompt and ai_prompt tags. - """ - messages = messages.copy() # don't mutate the original list - - text = "".join(self._convert_one_message_to_text(message) for message in messages) - - return text.rstrip() - - def _convert_tools_to_glm_tool(self, tools: list[PromptMessageTool]) -> glm.Tool: - """ - Convert tool messages to glm tools - - :param tools: tool messages - :return: glm tools - """ - return glm.Tool( - function_declarations=[ - glm.FunctionDeclaration( - name=tool.name, - parameters=glm.Schema( - type=glm.Type.OBJECT, - properties={ - key: { - "type_": value.get("type", "string").upper(), - "description": value.get("description", ""), - "enum": value.get("enum", []), - } - for key, value in tool.parameters.get("properties", {}).items() - }, - required=tool.parameters.get("required", []), - ), - ) - for tool in tools - ] - ) - - def validate_credentials(self, model: str, credentials: dict) -> None: - """ - Validate model credentials - - :param model: model name - :param credentials: model credentials - :return: - """ - - try: - ping_message = SystemPromptMessage(content="ping") - self._generate(model, credentials, [ping_message], {"max_tokens_to_sample": 5}) - - except Exception as ex: - raise CredentialsValidateFailedError(str(ex)) - - def _generate( - self, - model: str, - credentials: dict, - prompt_messages: list[PromptMessage], - model_parameters: dict, - tools: Optional[list[PromptMessageTool]] = None, - stop: Optional[list[str]] = None, - stream: bool = True, - user: Optional[str] = None, - ) -> Union[LLMResult, Generator]: - """ - Invoke large language model - - :param model: model name - :param credentials: credentials kwargs - :param prompt_messages: prompt messages - :param model_parameters: model parameters - :param stop: stop words - :param stream: is stream response - :param user: unique user id - :return: full response or stream response chunk generator result - """ - config_kwargs = model_parameters.copy() - config_kwargs["max_output_tokens"] = config_kwargs.pop("max_tokens_to_sample", None) - - if stop: - config_kwargs["stop_sequences"] = stop - - google_model = genai.GenerativeModel(model_name=model) - - history = [] - - # hack for gemini-pro-vision, which currently does not support multi-turn chat - if model == "gemini-pro-vision": - last_msg = prompt_messages[-1] - content = self._format_message_to_glm_content(last_msg) - history.append(content) - else: - for msg in prompt_messages: # makes message roles strictly alternating - content = self._format_message_to_glm_content(msg) - if history and history[-1]["role"] == content["role"]: - history[-1]["parts"].extend(content["parts"]) - else: - history.append(content) - - # Create a new ClientManager with tenant's API key - new_client_manager = _ClientManager() - new_client_manager.configure(api_key=credentials["google_api_key"]) - new_custom_client = new_client_manager.make_client("generative") - - google_model._client = new_custom_client - - response = google_model.generate_content( - contents=history, - generation_config=genai.types.GenerationConfig(**config_kwargs), - stream=stream, - tools=self._convert_tools_to_glm_tool(tools) if tools else None, - request_options={"timeout": 600}, - ) - - if stream: - return self._handle_generate_stream_response(model, credentials, response, prompt_messages) - - return self._handle_generate_response(model, credentials, response, prompt_messages) - - def _handle_generate_response( - self, model: str, credentials: dict, response: GenerateContentResponse, prompt_messages: list[PromptMessage] - ) -> LLMResult: - """ - Handle llm response - - :param model: model name - :param credentials: credentials - :param response: response - :param prompt_messages: prompt messages - :return: llm response - """ - # transform assistant message to prompt message - assistant_prompt_message = AssistantPromptMessage(content=response.text) - - # calculate num tokens - prompt_tokens = self.get_num_tokens(model, credentials, prompt_messages) - completion_tokens = self.get_num_tokens(model, credentials, [assistant_prompt_message]) - - # transform usage - usage = self._calc_response_usage(model, credentials, prompt_tokens, completion_tokens) - - # transform response - result = LLMResult( - model=model, - prompt_messages=prompt_messages, - message=assistant_prompt_message, - usage=usage, - ) - - return result - - def _handle_generate_stream_response( - self, model: str, credentials: dict, response: GenerateContentResponse, prompt_messages: list[PromptMessage] - ) -> Generator: - """ - Handle llm stream response - - :param model: model name - :param credentials: credentials - :param response: response - :param prompt_messages: prompt messages - :return: llm response chunk generator result - """ - index = -1 - for chunk in response: - for part in chunk.parts: - assistant_prompt_message = AssistantPromptMessage(content="") - - if part.text: - assistant_prompt_message.content += part.text - - if part.function_call: - assistant_prompt_message.tool_calls = [ - AssistantPromptMessage.ToolCall( - id=part.function_call.name, - type="function", - function=AssistantPromptMessage.ToolCall.ToolCallFunction( - name=part.function_call.name, - arguments=json.dumps(dict(part.function_call.args.items())), - ), - ) - ] - - index += 1 - - if not response._done: - # transform assistant message to prompt message - yield LLMResultChunk( - model=model, - prompt_messages=prompt_messages, - delta=LLMResultChunkDelta(index=index, message=assistant_prompt_message), - ) - else: - # calculate num tokens - prompt_tokens = self.get_num_tokens(model, credentials, prompt_messages) - completion_tokens = self.get_num_tokens(model, credentials, [assistant_prompt_message]) - - # transform usage - usage = self._calc_response_usage(model, credentials, prompt_tokens, completion_tokens) - - yield LLMResultChunk( - model=model, - prompt_messages=prompt_messages, - delta=LLMResultChunkDelta( - index=index, - message=assistant_prompt_message, - finish_reason=str(chunk.candidates[0].finish_reason), - usage=usage, - ), - ) - - def _convert_one_message_to_text(self, message: PromptMessage) -> str: - """ - Convert a single message to a string. - - :param message: PromptMessage to convert. - :return: String representation of the message. - """ - human_prompt = "\n\nuser:" - ai_prompt = "\n\nmodel:" - - content = message.content - if isinstance(content, list): - content = "".join(c.data for c in content if c.type != PromptMessageContentType.IMAGE) - - if isinstance(message, UserPromptMessage): - message_text = f"{human_prompt} {content}" - elif isinstance(message, AssistantPromptMessage): - message_text = f"{ai_prompt} {content}" - elif isinstance(message, SystemPromptMessage | ToolPromptMessage): - message_text = f"{human_prompt} {content}" - else: - raise ValueError(f"Got unknown type {message}") - - return message_text - - def _format_message_to_glm_content(self, message: PromptMessage) -> ContentType: - """ - Format a single message into glm.Content for Google API - - :param message: one PromptMessage - :return: glm Content representation of message - """ - if isinstance(message, UserPromptMessage): - glm_content = {"role": "user", "parts": []} - if isinstance(message.content, str): - glm_content["parts"].append(to_part(message.content)) - else: - for c in message.content: - if c.type == PromptMessageContentType.TEXT: - glm_content["parts"].append(to_part(c.data)) - elif c.type == PromptMessageContentType.IMAGE: - message_content = cast(ImagePromptMessageContent, c) - if message_content.data.startswith("data:"): - metadata, base64_data = c.data.split(",", 1) - mime_type = metadata.split(";", 1)[0].split(":")[1] - else: - # fetch image data from url - try: - image_content = requests.get(message_content.data).content - with Image.open(io.BytesIO(image_content)) as img: - mime_type = f"image/{img.format.lower()}" - base64_data = base64.b64encode(image_content).decode("utf-8") - except Exception as ex: - raise ValueError(f"Failed to fetch image data from url {message_content.data}, {ex}") - blob = {"inline_data": {"mime_type": mime_type, "data": base64_data}} - glm_content["parts"].append(blob) - - return glm_content - elif isinstance(message, AssistantPromptMessage): - glm_content = {"role": "model", "parts": []} - if message.content: - glm_content["parts"].append(to_part(message.content)) - if message.tool_calls: - glm_content["parts"].append( - to_part( - glm.FunctionCall( - name=message.tool_calls[0].function.name, - args=json.loads(message.tool_calls[0].function.arguments), - ) - ) - ) - return glm_content - elif isinstance(message, SystemPromptMessage): - return {"role": "user", "parts": [to_part(message.content)]} - elif isinstance(message, ToolPromptMessage): - return { - "role": "function", - "parts": [ - glm.Part( - function_response=glm.FunctionResponse( - name=message.name, response={"response": message.content} - ) - ) - ], - } - else: - raise ValueError(f"Got unknown type {message}") - - @property - def _invoke_error_mapping(self) -> dict[type[InvokeError], list[type[Exception]]]: - """ - Map model invoke error to unified error - The key is the ermd = genai.GenerativeModel(model) error type thrown to the caller - The value is the md = genai.GenerativeModel(model) error type thrown by the model, - which needs to be converted into a unified error type for the caller. - - :return: Invoke emd = genai.GenerativeModel(model) error mapping - """ - return { - InvokeConnectionError: [exceptions.RetryError], - InvokeServerUnavailableError: [ - exceptions.ServiceUnavailable, - exceptions.InternalServerError, - exceptions.BadGateway, - exceptions.GatewayTimeout, - exceptions.DeadlineExceeded, - ], - InvokeRateLimitError: [exceptions.ResourceExhausted, exceptions.TooManyRequests], - InvokeAuthorizationError: [ - exceptions.Unauthenticated, - exceptions.PermissionDenied, - exceptions.Unauthenticated, - exceptions.Forbidden, - ], - InvokeBadRequestError: [ - exceptions.BadRequest, - exceptions.InvalidArgument, - exceptions.FailedPrecondition, - exceptions.OutOfRange, - exceptions.NotFound, - exceptions.MethodNotAllowed, - exceptions.Conflict, - exceptions.AlreadyExists, - exceptions.Aborted, - exceptions.LengthRequired, - exceptions.PreconditionFailed, - exceptions.RequestRangeNotSatisfiable, - exceptions.Cancelled, - ], - } diff --git a/api/core/model_runtime/model_providers/groq/_assets/icon_l_en.svg b/api/core/model_runtime/model_providers/groq/_assets/icon_l_en.svg deleted file mode 100644 index 2505a5f493717b..00000000000000 --- a/api/core/model_runtime/model_providers/groq/_assets/icon_l_en.svg +++ /dev/null @@ -1,11 +0,0 @@ - - - - - - - - - - - diff --git a/api/core/model_runtime/model_providers/groq/_assets/icon_s_en.svg b/api/core/model_runtime/model_providers/groq/_assets/icon_s_en.svg deleted file mode 100644 index 087f37e4718b7a..00000000000000 --- a/api/core/model_runtime/model_providers/groq/_assets/icon_s_en.svg +++ /dev/null @@ -1,4 +0,0 @@ - - - - diff --git a/api/core/model_runtime/model_providers/groq/groq.py b/api/core/model_runtime/model_providers/groq/groq.py deleted file mode 100644 index d0d5ff68f8090e..00000000000000 --- a/api/core/model_runtime/model_providers/groq/groq.py +++ /dev/null @@ -1,26 +0,0 @@ -import logging - -from core.model_runtime.entities.model_entities import ModelType -from core.model_runtime.errors.validate import CredentialsValidateFailedError -from core.model_runtime.model_providers.__base.model_provider import ModelProvider - -logger = logging.getLogger(__name__) - - -class GroqProvider(ModelProvider): - def validate_provider_credentials(self, credentials: dict) -> None: - """ - Validate provider credentials - if validate failed, raise exception - - :param credentials: provider credentials, credentials form defined in `provider_credential_schema`. - """ - try: - model_instance = self.get_model_instance(ModelType.LLM) - - model_instance.validate_credentials(model="llama3-8b-8192", credentials=credentials) - except CredentialsValidateFailedError as ex: - raise ex - except Exception as ex: - logger.exception(f"{self.get_provider_schema().provider} credentials validate failed") - raise ex diff --git a/api/core/model_runtime/model_providers/groq/groq.yaml b/api/core/model_runtime/model_providers/groq/groq.yaml deleted file mode 100644 index db17cc8bdd086a..00000000000000 --- a/api/core/model_runtime/model_providers/groq/groq.yaml +++ /dev/null @@ -1,32 +0,0 @@ -provider: groq -label: - zh_Hans: GroqCloud - en_US: GroqCloud -description: - en_US: GroqCloud provides access to the Groq Cloud API, which hosts models like LLama2 and Mixtral. - zh_Hans: GroqCloud 提供对 Groq Cloud API 的访问,其中托管了 LLama2 和 Mixtral 等模型。 -icon_small: - en_US: icon_s_en.svg -icon_large: - en_US: icon_l_en.svg -background: "#F5F5F4" -help: - title: - en_US: Get your API Key from GroqCloud - zh_Hans: 从 GroqCloud 获取 API Key - url: - en_US: https://console.groq.com/ -supported_model_types: - - llm -configurate_methods: - - predefined-model -provider_credential_schema: - credential_form_schemas: - - variable: api_key - label: - en_US: API Key - type: secret-input - required: true - placeholder: - zh_Hans: 在此输入您的 API Key - en_US: Enter your API Key diff --git a/api/core/model_runtime/model_providers/groq/llm/_position.yaml b/api/core/model_runtime/model_providers/groq/llm/_position.yaml deleted file mode 100644 index be115ca920df08..00000000000000 --- a/api/core/model_runtime/model_providers/groq/llm/_position.yaml +++ /dev/null @@ -1,7 +0,0 @@ -- llama-3.1-405b-reasoning -- llama-3.1-70b-versatile -- llama-3.1-8b-instant -- llama3-70b-8192 -- llama3-8b-8192 -- mixtral-8x7b-32768 -- llama2-70b-4096 diff --git a/api/core/model_runtime/model_providers/groq/llm/llama-3.1-405b-reasoning.yaml b/api/core/model_runtime/model_providers/groq/llm/llama-3.1-405b-reasoning.yaml deleted file mode 100644 index 217785cea2d85f..00000000000000 --- a/api/core/model_runtime/model_providers/groq/llm/llama-3.1-405b-reasoning.yaml +++ /dev/null @@ -1,25 +0,0 @@ -model: llama-3.1-405b-reasoning -label: - zh_Hans: Llama-3.1-405b-reasoning - en_US: Llama-3.1-405b-reasoning -model_type: llm -features: - - agent-thought -model_properties: - mode: chat - context_size: 131072 -parameter_rules: - - name: temperature - use_template: temperature - - name: top_p - use_template: top_p - - name: max_tokens - use_template: max_tokens - default: 512 - min: 1 - max: 8192 -pricing: - input: '0.05' - output: '0.1' - unit: '0.000001' - currency: USD diff --git a/api/core/model_runtime/model_providers/groq/llm/llama-3.1-70b-versatile.yaml b/api/core/model_runtime/model_providers/groq/llm/llama-3.1-70b-versatile.yaml deleted file mode 100644 index ab5f6ab05efe31..00000000000000 --- a/api/core/model_runtime/model_providers/groq/llm/llama-3.1-70b-versatile.yaml +++ /dev/null @@ -1,25 +0,0 @@ -model: llama-3.1-70b-versatile -label: - zh_Hans: Llama-3.1-70b-versatile - en_US: Llama-3.1-70b-versatile -model_type: llm -features: - - agent-thought -model_properties: - mode: chat - context_size: 131072 -parameter_rules: - - name: temperature - use_template: temperature - - name: top_p - use_template: top_p - - name: max_tokens - use_template: max_tokens - default: 512 - min: 1 - max: 8192 -pricing: - input: '0.05' - output: '0.1' - unit: '0.000001' - currency: USD diff --git a/api/core/model_runtime/model_providers/groq/llm/llama-3.1-8b-instant.yaml b/api/core/model_runtime/model_providers/groq/llm/llama-3.1-8b-instant.yaml deleted file mode 100644 index a82e64532ee10d..00000000000000 --- a/api/core/model_runtime/model_providers/groq/llm/llama-3.1-8b-instant.yaml +++ /dev/null @@ -1,25 +0,0 @@ -model: llama-3.1-8b-instant -label: - zh_Hans: Llama-3.1-8b-instant - en_US: Llama-3.1-8b-instant -model_type: llm -features: - - agent-thought -model_properties: - mode: chat - context_size: 131072 -parameter_rules: - - name: temperature - use_template: temperature - - name: top_p - use_template: top_p - - name: max_tokens - use_template: max_tokens - default: 512 - min: 1 - max: 8192 -pricing: - input: '0.05' - output: '0.1' - unit: '0.000001' - currency: USD diff --git a/api/core/model_runtime/model_providers/groq/llm/llama2-70b-4096.yaml b/api/core/model_runtime/model_providers/groq/llm/llama2-70b-4096.yaml deleted file mode 100644 index 384912b0dd248e..00000000000000 --- a/api/core/model_runtime/model_providers/groq/llm/llama2-70b-4096.yaml +++ /dev/null @@ -1,25 +0,0 @@ -model: llama2-70b-4096 -label: - zh_Hans: Llama-2-70B-4096 - en_US: Llama-2-70B-4096 -model_type: llm -features: - - agent-thought -model_properties: - mode: chat - context_size: 4096 -parameter_rules: - - name: temperature - use_template: temperature - - name: top_p - use_template: top_p - - name: max_tokens - use_template: max_tokens - default: 512 - min: 1 - max: 4096 -pricing: - input: '0.7' - output: '0.8' - unit: '0.000001' - currency: USD diff --git a/api/core/model_runtime/model_providers/groq/llm/llama3-70b-8192.yaml b/api/core/model_runtime/model_providers/groq/llm/llama3-70b-8192.yaml deleted file mode 100644 index 91d0e307657911..00000000000000 --- a/api/core/model_runtime/model_providers/groq/llm/llama3-70b-8192.yaml +++ /dev/null @@ -1,25 +0,0 @@ -model: llama3-70b-8192 -label: - zh_Hans: Llama-3-70B-8192 - en_US: Llama-3-70B-8192 -model_type: llm -features: - - agent-thought -model_properties: - mode: chat - context_size: 8192 -parameter_rules: - - name: temperature - use_template: temperature - - name: top_p - use_template: top_p - - name: max_tokens - use_template: max_tokens - default: 512 - min: 1 - max: 8192 -pricing: - input: '0.59' - output: '0.79' - unit: '0.000001' - currency: USD diff --git a/api/core/model_runtime/model_providers/groq/llm/llama3-8b-8192.yaml b/api/core/model_runtime/model_providers/groq/llm/llama3-8b-8192.yaml deleted file mode 100644 index b6154f761f4b40..00000000000000 --- a/api/core/model_runtime/model_providers/groq/llm/llama3-8b-8192.yaml +++ /dev/null @@ -1,25 +0,0 @@ -model: llama3-8b-8192 -label: - zh_Hans: Llama-3-8B-8192 - en_US: Llama-3-8B-8192 -model_type: llm -features: - - agent-thought -model_properties: - mode: chat - context_size: 8192 -parameter_rules: - - name: temperature - use_template: temperature - - name: top_p - use_template: top_p - - name: max_tokens - use_template: max_tokens - default: 512 - min: 1 - max: 8192 -pricing: - input: '0.05' - output: '0.08' - unit: '0.000001' - currency: USD diff --git a/api/core/model_runtime/model_providers/groq/llm/llm.py b/api/core/model_runtime/model_providers/groq/llm/llm.py deleted file mode 100644 index 352a7b519ee168..00000000000000 --- a/api/core/model_runtime/model_providers/groq/llm/llm.py +++ /dev/null @@ -1,31 +0,0 @@ -from collections.abc import Generator -from typing import Optional, Union - -from core.model_runtime.entities.llm_entities import LLMResult -from core.model_runtime.entities.message_entities import PromptMessage, PromptMessageTool -from core.model_runtime.model_providers.openai_api_compatible.llm.llm import OAIAPICompatLargeLanguageModel - - -class GroqLargeLanguageModel(OAIAPICompatLargeLanguageModel): - def _invoke( - self, - model: str, - credentials: dict, - prompt_messages: list[PromptMessage], - model_parameters: dict, - tools: Optional[list[PromptMessageTool]] = None, - stop: Optional[list[str]] = None, - stream: bool = True, - user: Optional[str] = None, - ) -> Union[LLMResult, Generator]: - self._add_custom_parameters(credentials) - return super()._invoke(model, credentials, prompt_messages, model_parameters, tools, stop, stream) - - def validate_credentials(self, model: str, credentials: dict) -> None: - self._add_custom_parameters(credentials) - super().validate_credentials(model, credentials) - - @staticmethod - def _add_custom_parameters(credentials: dict) -> None: - credentials["mode"] = "chat" - credentials["endpoint_url"] = "https://api.groq.com/openai/v1" diff --git a/api/core/model_runtime/model_providers/groq/llm/mixtral-8x7b-instruct-v0.1.yaml b/api/core/model_runtime/model_providers/groq/llm/mixtral-8x7b-instruct-v0.1.yaml deleted file mode 100644 index 0dc6678fa23424..00000000000000 --- a/api/core/model_runtime/model_providers/groq/llm/mixtral-8x7b-instruct-v0.1.yaml +++ /dev/null @@ -1,25 +0,0 @@ -model: mixtral-8x7b-32768 -label: - zh_Hans: Mixtral-8x7b-Instruct-v0.1 - en_US: Mixtral-8x7b-Instruct-v0.1 -model_type: llm -features: - - agent-thought -model_properties: - mode: chat - context_size: 32768 -parameter_rules: - - name: temperature - use_template: temperature - - name: top_p - use_template: top_p - - name: max_tokens - use_template: max_tokens - default: 512 - min: 1 - max: 20480 -pricing: - input: '0.27' - output: '0.27' - unit: '0.000001' - currency: USD diff --git a/api/core/model_runtime/model_providers/huggingface_hub/__init__.py b/api/core/model_runtime/model_providers/huggingface_hub/__init__.py deleted file mode 100644 index e69de29bb2d1d6..00000000000000 diff --git a/api/core/model_runtime/model_providers/huggingface_hub/_assets/icon_l_en.svg b/api/core/model_runtime/model_providers/huggingface_hub/_assets/icon_l_en.svg deleted file mode 100644 index 70135a08de5cc7..00000000000000 --- a/api/core/model_runtime/model_providers/huggingface_hub/_assets/icon_l_en.svg +++ /dev/null @@ -1,42 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/api/core/model_runtime/model_providers/huggingface_hub/_assets/icon_s_en.svg b/api/core/model_runtime/model_providers/huggingface_hub/_assets/icon_s_en.svg deleted file mode 100644 index 5a444f127f88b6..00000000000000 --- a/api/core/model_runtime/model_providers/huggingface_hub/_assets/icon_s_en.svg +++ /dev/null @@ -1,19 +0,0 @@ - - - - - - - - - - - - - - - - - - - diff --git a/api/core/model_runtime/model_providers/huggingface_hub/_common.py b/api/core/model_runtime/model_providers/huggingface_hub/_common.py deleted file mode 100644 index 3c4020b6eedf24..00000000000000 --- a/api/core/model_runtime/model_providers/huggingface_hub/_common.py +++ /dev/null @@ -1,9 +0,0 @@ -from huggingface_hub.utils import BadRequestError, HfHubHTTPError - -from core.model_runtime.errors.invoke import InvokeBadRequestError, InvokeError - - -class _CommonHuggingfaceHub: - @property - def _invoke_error_mapping(self) -> dict[type[InvokeError], list[type[Exception]]]: - return {InvokeBadRequestError: [HfHubHTTPError, BadRequestError]} diff --git a/api/core/model_runtime/model_providers/huggingface_hub/huggingface_hub.py b/api/core/model_runtime/model_providers/huggingface_hub/huggingface_hub.py deleted file mode 100644 index 54d2a2bf399623..00000000000000 --- a/api/core/model_runtime/model_providers/huggingface_hub/huggingface_hub.py +++ /dev/null @@ -1,10 +0,0 @@ -import logging - -from core.model_runtime.model_providers.__base.model_provider import ModelProvider - -logger = logging.getLogger(__name__) - - -class HuggingfaceHubProvider(ModelProvider): - def validate_provider_credentials(self, credentials: dict) -> None: - pass diff --git a/api/core/model_runtime/model_providers/huggingface_hub/huggingface_hub.yaml b/api/core/model_runtime/model_providers/huggingface_hub/huggingface_hub.yaml deleted file mode 100644 index 1df234cf268f4e..00000000000000 --- a/api/core/model_runtime/model_providers/huggingface_hub/huggingface_hub.yaml +++ /dev/null @@ -1,102 +0,0 @@ -provider: huggingface_hub -label: - en_US: Hugging Face Model -icon_small: - en_US: icon_s_en.svg -icon_large: - en_US: icon_l_en.svg -background: "#FFF8DC" -help: - title: - en_US: Get your API key from Hugging Face Hub - zh_Hans: 从 Hugging Face Hub 获取 API Key - url: - en_US: https://huggingface.co/settings/tokens -supported_model_types: - - llm - - text-embedding -configurate_methods: - - customizable-model -model_credential_schema: - model: - label: - en_US: Model Name - zh_Hans: 模型名称 - credential_form_schemas: - - variable: huggingfacehub_api_type - label: - en_US: Endpoint Type - zh_Hans: 端点类型 - type: radio - required: true - default: hosted_inference_api - options: - - value: hosted_inference_api - label: - en_US: Hosted Inference API - - value: inference_endpoints - label: - en_US: Inference Endpoints - - variable: huggingfacehub_api_token - label: - en_US: API Token - zh_Hans: API Token - type: secret-input - required: true - placeholder: - en_US: Enter your Hugging Face Hub API Token here - zh_Hans: 在此输入您的 Hugging Face Hub API Token - - variable: huggingface_namespace - label: - en_US: 'User Name / Organization Name' - zh_Hans: '用户名 / 组织名称' - type: text-input - required: true - placeholder: - en_US: 'Enter your User Name / Organization Name here' - zh_Hans: '在此输入您的用户名 / 组织名称' - show_on: - - variable: __model_type - value: text-embedding - - variable: huggingfacehub_api_type - value: inference_endpoints - - variable: huggingfacehub_endpoint_url - label: - en_US: Endpoint URL - zh_Hans: 端点 URL - type: text-input - required: true - placeholder: - en_US: Enter your Endpoint URL here - zh_Hans: 在此输入您的端点 URL - show_on: - - variable: huggingfacehub_api_type - value: inference_endpoints - - variable: task_type - label: - en_US: Task - zh_Hans: Task - type: select - options: - - value: text2text-generation - label: - en_US: Text-to-Text Generation - show_on: - - variable: __model_type - value: llm - - value: text-generation - label: - en_US: Text Generation - zh_Hans: 文本生成 - show_on: - - variable: __model_type - value: llm - - value: feature-extraction - label: - en_US: Feature Extraction - show_on: - - variable: __model_type - value: text-embedding - show_on: - - variable: huggingfacehub_api_type - value: inference_endpoints diff --git a/api/core/model_runtime/model_providers/huggingface_hub/llm/__init__.py b/api/core/model_runtime/model_providers/huggingface_hub/llm/__init__.py deleted file mode 100644 index e69de29bb2d1d6..00000000000000 diff --git a/api/core/model_runtime/model_providers/huggingface_hub/llm/llm.py b/api/core/model_runtime/model_providers/huggingface_hub/llm/llm.py deleted file mode 100644 index 9d29237fdde573..00000000000000 --- a/api/core/model_runtime/model_providers/huggingface_hub/llm/llm.py +++ /dev/null @@ -1,313 +0,0 @@ -from collections.abc import Generator -from typing import Optional, Union - -from huggingface_hub import InferenceClient -from huggingface_hub.hf_api import HfApi -from huggingface_hub.utils import BadRequestError - -from core.model_runtime.entities.common_entities import I18nObject -from core.model_runtime.entities.defaults import PARAMETER_RULE_TEMPLATE -from core.model_runtime.entities.llm_entities import LLMMode, LLMResult, LLMResultChunk, LLMResultChunkDelta -from core.model_runtime.entities.message_entities import ( - AssistantPromptMessage, - PromptMessage, - PromptMessageTool, - SystemPromptMessage, - UserPromptMessage, -) -from core.model_runtime.entities.model_entities import ( - AIModelEntity, - DefaultParameterName, - FetchFrom, - ModelPropertyKey, - ModelType, - ParameterRule, -) -from core.model_runtime.errors.validate import CredentialsValidateFailedError -from core.model_runtime.model_providers.__base.large_language_model import LargeLanguageModel -from core.model_runtime.model_providers.huggingface_hub._common import _CommonHuggingfaceHub - - -class HuggingfaceHubLargeLanguageModel(_CommonHuggingfaceHub, LargeLanguageModel): - def _invoke( - self, - model: str, - credentials: dict, - prompt_messages: list[PromptMessage], - model_parameters: dict, - tools: Optional[list[PromptMessageTool]] = None, - stop: Optional[list[str]] = None, - stream: bool = True, - user: Optional[str] = None, - ) -> Union[LLMResult, Generator]: - client = InferenceClient(token=credentials["huggingfacehub_api_token"]) - - if credentials["huggingfacehub_api_type"] == "inference_endpoints": - model = credentials["huggingfacehub_endpoint_url"] - - if "baichuan" in model.lower(): - stream = False - - response = client.text_generation( - prompt=prompt_messages[0].content, - details=True, - stream=stream, - model=model, - stop_sequences=stop, - **model_parameters, - ) - - if stream: - return self._handle_generate_stream_response(model, credentials, prompt_messages, response) - - return self._handle_generate_response(model, credentials, prompt_messages, response) - - def get_num_tokens( - self, - model: str, - credentials: dict, - prompt_messages: list[PromptMessage], - tools: Optional[list[PromptMessageTool]] = None, - ) -> int: - prompt = self._convert_messages_to_prompt(prompt_messages) - return self._get_num_tokens_by_gpt2(prompt) - - def validate_credentials(self, model: str, credentials: dict) -> None: - try: - if "huggingfacehub_api_type" not in credentials: - raise CredentialsValidateFailedError("Huggingface Hub Endpoint Type must be provided.") - - if credentials["huggingfacehub_api_type"] not in {"inference_endpoints", "hosted_inference_api"}: - raise CredentialsValidateFailedError("Huggingface Hub Endpoint Type is invalid.") - - if "huggingfacehub_api_token" not in credentials: - raise CredentialsValidateFailedError("Huggingface Hub Access Token must be provided.") - - if credentials["huggingfacehub_api_type"] == "inference_endpoints": - if "huggingfacehub_endpoint_url" not in credentials: - raise CredentialsValidateFailedError("Huggingface Hub Endpoint URL must be provided.") - - if "task_type" not in credentials: - raise CredentialsValidateFailedError("Huggingface Hub Task Type must be provided.") - elif credentials["huggingfacehub_api_type"] == "hosted_inference_api": - credentials["task_type"] = self._get_hosted_model_task_type( - credentials["huggingfacehub_api_token"], model - ) - - if credentials["task_type"] not in {"text2text-generation", "text-generation"}: - raise CredentialsValidateFailedError( - "Huggingface Hub Task Type must be one of text2text-generation, text-generation." - ) - - client = InferenceClient(token=credentials["huggingfacehub_api_token"]) - - if credentials["huggingfacehub_api_type"] == "inference_endpoints": - model = credentials["huggingfacehub_endpoint_url"] - - try: - client.text_generation(prompt="Who are you?", stream=True, model=model) - except BadRequestError as e: - raise CredentialsValidateFailedError( - "Only available for models running on with the `text-generation-inference`. " - "To learn more about the TGI project, please refer to https://github.com/huggingface/text-generation-inference." - ) - except Exception as ex: - raise CredentialsValidateFailedError(str(ex)) - - def get_customizable_model_schema(self, model: str, credentials: dict) -> Optional[AIModelEntity]: - entity = AIModelEntity( - model=model, - label=I18nObject(en_US=model), - fetch_from=FetchFrom.CUSTOMIZABLE_MODEL, - model_type=ModelType.LLM, - model_properties={ModelPropertyKey.MODE: LLMMode.COMPLETION.value}, - parameter_rules=self._get_customizable_model_parameter_rules(), - ) - - return entity - - @staticmethod - def _get_customizable_model_parameter_rules() -> list[ParameterRule]: - temperature_rule_dict = PARAMETER_RULE_TEMPLATE.get(DefaultParameterName.TEMPERATURE).copy() - temperature_rule_dict["name"] = "temperature" - temperature_rule = ParameterRule(**temperature_rule_dict) - temperature_rule.default = 0.5 - - top_p_rule_dict = PARAMETER_RULE_TEMPLATE.get(DefaultParameterName.TOP_P).copy() - top_p_rule_dict["name"] = "top_p" - top_p_rule = ParameterRule(**top_p_rule_dict) - top_p_rule.default = 0.5 - - top_k_rule = ParameterRule( - name="top_k", - label={ - "en_US": "Top K", - "zh_Hans": "Top K", - }, - type="int", - help={ - "en_US": "The number of highest probability vocabulary tokens to keep for top-k-filtering.", - "zh_Hans": "保留的最高概率词汇标记的数量。", - }, - required=False, - default=2, - min=1, - max=10, - precision=0, - ) - - max_new_tokens = ParameterRule( - name="max_new_tokens", - label={ - "en_US": "Max New Tokens", - "zh_Hans": "最大新标记", - }, - type="int", - help={ - "en_US": "Maximum number of generated tokens.", - "zh_Hans": "生成的标记的最大数量。", - }, - required=False, - default=20, - min=1, - max=4096, - precision=0, - ) - - seed = ParameterRule( - name="seed", - label={ - "en_US": "Random sampling seed", - "zh_Hans": "随机采样种子", - }, - type="int", - help={ - "en_US": "Random sampling seed.", - "zh_Hans": "随机采样种子。", - }, - required=False, - precision=0, - ) - - repetition_penalty = ParameterRule( - name="repetition_penalty", - label={ - "en_US": "Repetition Penalty", - "zh_Hans": "重复惩罚", - }, - type="float", - help={ - "en_US": "The parameter for repetition penalty. 1.0 means no penalty.", - "zh_Hans": "重复惩罚的参数。1.0 表示没有惩罚。", - }, - required=False, - precision=1, - ) - - return [temperature_rule, top_k_rule, top_p_rule, max_new_tokens, seed, repetition_penalty] - - def _handle_generate_stream_response( - self, model: str, credentials: dict, prompt_messages: list[PromptMessage], response: Generator - ) -> Generator: - index = -1 - for chunk in response: - # skip special tokens - if chunk.token.special: - continue - - index += 1 - - assistant_prompt_message = AssistantPromptMessage(content=chunk.token.text) - - if chunk.details: - prompt_tokens = self.get_num_tokens(model, credentials, prompt_messages) - completion_tokens = self.get_num_tokens(model, credentials, [assistant_prompt_message]) - - usage = self._calc_response_usage(model, credentials, prompt_tokens, completion_tokens) - - yield LLMResultChunk( - model=model, - prompt_messages=prompt_messages, - delta=LLMResultChunkDelta( - index=index, - message=assistant_prompt_message, - usage=usage, - finish_reason=chunk.details.finish_reason, - ), - ) - else: - yield LLMResultChunk( - model=model, - prompt_messages=prompt_messages, - delta=LLMResultChunkDelta( - index=index, - message=assistant_prompt_message, - ), - ) - - def _handle_generate_response( - self, model: str, credentials: dict, prompt_messages: list[PromptMessage], response: any - ) -> LLMResult: - if isinstance(response, str): - content = response - else: - content = response.generated_text - - assistant_prompt_message = AssistantPromptMessage(content=content) - - prompt_tokens = self.get_num_tokens(model, credentials, prompt_messages) - completion_tokens = self.get_num_tokens(model, credentials, [assistant_prompt_message]) - - usage = self._calc_response_usage(model, credentials, prompt_tokens, completion_tokens) - - result = LLMResult( - model=model, - prompt_messages=prompt_messages, - message=assistant_prompt_message, - usage=usage, - ) - return result - - @staticmethod - def _get_hosted_model_task_type(huggingfacehub_api_token: str, model_name: str): - hf_api = HfApi(token=huggingfacehub_api_token) - model_info = hf_api.model_info(repo_id=model_name) - - try: - if not model_info: - raise ValueError(f"Model {model_name} not found.") - - if "inference" in model_info.cardData and not model_info.cardData["inference"]: - raise ValueError(f"Inference API has been turned off for this model {model_name}.") - - valid_tasks = ("text2text-generation", "text-generation") - if model_info.pipeline_tag not in valid_tasks: - raise ValueError(f"Model {model_name} is not a valid task, must be one of {valid_tasks}.") - except Exception as e: - raise CredentialsValidateFailedError(f"{str(e)}") - - return model_info.pipeline_tag - - def _convert_messages_to_prompt(self, messages: list[PromptMessage]) -> str: - messages = messages.copy() # don't mutate the original list - - text = "".join(self._convert_one_message_to_text(message) for message in messages) - - return text.rstrip() - - @staticmethod - def _convert_one_message_to_text(message: PromptMessage) -> str: - human_prompt = "\n\nHuman:" - ai_prompt = "\n\nAssistant:" - content = message.content - - if isinstance(message, UserPromptMessage): - message_text = f"{human_prompt} {content}" - elif isinstance(message, AssistantPromptMessage): - message_text = f"{ai_prompt} {content}" - elif isinstance(message, SystemPromptMessage): - message_text = content - else: - raise ValueError(f"Got unknown type {message}") - - return message_text diff --git a/api/core/model_runtime/model_providers/huggingface_hub/text_embedding/__init__.py b/api/core/model_runtime/model_providers/huggingface_hub/text_embedding/__init__.py deleted file mode 100644 index e69de29bb2d1d6..00000000000000 diff --git a/api/core/model_runtime/model_providers/huggingface_tei/__init__.py b/api/core/model_runtime/model_providers/huggingface_tei/__init__.py deleted file mode 100644 index e69de29bb2d1d6..00000000000000 diff --git a/api/core/model_runtime/model_providers/huggingface_tei/huggingface_tei.py b/api/core/model_runtime/model_providers/huggingface_tei/huggingface_tei.py deleted file mode 100644 index 97d7e28dc646f8..00000000000000 --- a/api/core/model_runtime/model_providers/huggingface_tei/huggingface_tei.py +++ /dev/null @@ -1,10 +0,0 @@ -import logging - -from core.model_runtime.model_providers.__base.model_provider import ModelProvider - -logger = logging.getLogger(__name__) - - -class HuggingfaceTeiProvider(ModelProvider): - def validate_provider_credentials(self, credentials: dict) -> None: - pass diff --git a/api/core/model_runtime/model_providers/huggingface_tei/huggingface_tei.yaml b/api/core/model_runtime/model_providers/huggingface_tei/huggingface_tei.yaml deleted file mode 100644 index f3a912d84d23d3..00000000000000 --- a/api/core/model_runtime/model_providers/huggingface_tei/huggingface_tei.yaml +++ /dev/null @@ -1,36 +0,0 @@ -provider: huggingface_tei -label: - en_US: Text Embedding Inference -description: - en_US: A blazing fast inference solution for text embeddings models. - zh_Hans: 用于文本嵌入模型的超快速推理解决方案。 -background: "#FFF8DC" -help: - title: - en_US: How to deploy Text Embedding Inference - zh_Hans: 如何部署 Text Embedding Inference - url: - en_US: https://github.com/huggingface/text-embeddings-inference -supported_model_types: - - text-embedding - - rerank -configurate_methods: - - customizable-model -model_credential_schema: - model: - label: - en_US: Model Name - zh_Hans: 模型名称 - placeholder: - en_US: Enter your model name - zh_Hans: 输入模型名称 - credential_form_schemas: - - variable: server_url - label: - zh_Hans: 服务器URL - en_US: Server url - type: secret-input - required: true - placeholder: - zh_Hans: 在此输入Text Embedding Inference的服务器地址,如 http://192.168.1.100:8080 - en_US: Enter the url of your Text Embedding Inference, e.g. http://192.168.1.100:8080 diff --git a/api/core/model_runtime/model_providers/huggingface_tei/rerank/__init__.py b/api/core/model_runtime/model_providers/huggingface_tei/rerank/__init__.py deleted file mode 100644 index e69de29bb2d1d6..00000000000000 diff --git a/api/core/model_runtime/model_providers/huggingface_tei/rerank/rerank.py b/api/core/model_runtime/model_providers/huggingface_tei/rerank/rerank.py deleted file mode 100644 index 74a1dfc3ffe076..00000000000000 --- a/api/core/model_runtime/model_providers/huggingface_tei/rerank/rerank.py +++ /dev/null @@ -1,136 +0,0 @@ -from typing import Optional - -import httpx - -from core.model_runtime.entities.common_entities import I18nObject -from core.model_runtime.entities.model_entities import AIModelEntity, FetchFrom, ModelPropertyKey, ModelType -from core.model_runtime.entities.rerank_entities import RerankDocument, RerankResult -from core.model_runtime.errors.invoke import ( - InvokeAuthorizationError, - InvokeBadRequestError, - InvokeConnectionError, - InvokeError, - InvokeRateLimitError, - InvokeServerUnavailableError, -) -from core.model_runtime.errors.validate import CredentialsValidateFailedError -from core.model_runtime.model_providers.__base.rerank_model import RerankModel -from core.model_runtime.model_providers.huggingface_tei.tei_helper import TeiHelper - - -class HuggingfaceTeiRerankModel(RerankModel): - """ - Model class for Text Embedding Inference rerank model. - """ - - def _invoke( - self, - model: str, - credentials: dict, - query: str, - docs: list[str], - score_threshold: Optional[float] = None, - top_n: Optional[int] = None, - user: Optional[str] = None, - ) -> RerankResult: - """ - Invoke rerank model - - :param model: model name - :param credentials: model credentials - :param query: search query - :param docs: docs for reranking - :param score_threshold: score threshold - :param top_n: top n - :param user: unique user id - :return: rerank result - """ - if len(docs) == 0: - return RerankResult(model=model, docs=[]) - server_url = credentials["server_url"] - - server_url = server_url.removesuffix("/") - - try: - results = TeiHelper.invoke_rerank(server_url, query, docs) - - rerank_documents = [] - for result in results: - rerank_document = RerankDocument( - index=result["index"], - text=result["text"], - score=result["score"], - ) - if score_threshold is None or result["score"] >= score_threshold: - rerank_documents.append(rerank_document) - if top_n is not None and len(rerank_documents) >= top_n: - break - - return RerankResult(model=model, docs=rerank_documents) - except httpx.HTTPStatusError as e: - raise InvokeServerUnavailableError(str(e)) - - def validate_credentials(self, model: str, credentials: dict) -> None: - """ - Validate model credentials - - :param model: model name - :param credentials: model credentials - :return: - """ - try: - server_url = credentials["server_url"] - extra_args = TeiHelper.get_tei_extra_parameter(server_url, model) - if extra_args.model_type != "reranker": - raise CredentialsValidateFailedError("Current model is not a rerank model") - - credentials["context_size"] = extra_args.max_input_length - - self.invoke( - model=model, - credentials=credentials, - query="Whose kasumi", - docs=[ - 'Kasumi is a girl\'s name of Japanese origin meaning "mist".', - "Her music is a kawaii bass, a mix of future bass, pop, and kawaii music ", - "and she leads a team named PopiParty.", - ], - score_threshold=0.8, - ) - except Exception as ex: - raise CredentialsValidateFailedError(str(ex)) - - @property - def _invoke_error_mapping(self) -> dict[type[InvokeError], list[type[Exception]]]: - """ - Map model invoke error to unified error - The key is the error type thrown to the caller - The value is the error type thrown by the model, - which needs to be converted into a unified error type for the caller. - - :return: Invoke error mapping - """ - return { - InvokeConnectionError: [InvokeConnectionError], - InvokeServerUnavailableError: [InvokeServerUnavailableError], - InvokeRateLimitError: [InvokeRateLimitError], - InvokeAuthorizationError: [InvokeAuthorizationError], - InvokeBadRequestError: [InvokeBadRequestError, KeyError, ValueError], - } - - def get_customizable_model_schema(self, model: str, credentials: dict) -> AIModelEntity | None: - """ - used to define customizable model schema - """ - entity = AIModelEntity( - model=model, - label=I18nObject(en_US=model), - fetch_from=FetchFrom.CUSTOMIZABLE_MODEL, - model_type=ModelType.RERANK, - model_properties={ - ModelPropertyKey.CONTEXT_SIZE: int(credentials.get("context_size", 512)), - }, - parameter_rules=[], - ) - - return entity diff --git a/api/core/model_runtime/model_providers/huggingface_tei/tei_helper.py b/api/core/model_runtime/model_providers/huggingface_tei/tei_helper.py deleted file mode 100644 index 81ab2492144e86..00000000000000 --- a/api/core/model_runtime/model_providers/huggingface_tei/tei_helper.py +++ /dev/null @@ -1,182 +0,0 @@ -from threading import Lock -from time import time -from typing import Optional - -import httpx -from requests.adapters import HTTPAdapter -from requests.exceptions import ConnectionError, MissingSchema, Timeout -from requests.sessions import Session -from yarl import URL - - -class TeiModelExtraParameter: - model_type: str - max_input_length: int - max_client_batch_size: int - - def __init__(self, model_type: str, max_input_length: int, max_client_batch_size: Optional[int] = None) -> None: - self.model_type = model_type - self.max_input_length = max_input_length - self.max_client_batch_size = max_client_batch_size - - -cache = {} -cache_lock = Lock() - - -class TeiHelper: - @staticmethod - def get_tei_extra_parameter(server_url: str, model_name: str) -> TeiModelExtraParameter: - TeiHelper._clean_cache() - with cache_lock: - if model_name not in cache: - cache[model_name] = { - "expires": time() + 300, - "value": TeiHelper._get_tei_extra_parameter(server_url), - } - return cache[model_name]["value"] - - @staticmethod - def _clean_cache() -> None: - try: - with cache_lock: - expired_keys = [model_uid for model_uid, model in cache.items() if model["expires"] < time()] - for model_uid in expired_keys: - del cache[model_uid] - except RuntimeError as e: - pass - - @staticmethod - def _get_tei_extra_parameter(server_url: str) -> TeiModelExtraParameter: - """ - get tei model extra parameter like model_type, max_input_length, max_batch_requests - """ - - url = str(URL(server_url) / "info") - - # this method is surrounded by a lock, and default requests may hang forever, - # so we just set a Adapter with max_retries=3 - session = Session() - session.mount("http://", HTTPAdapter(max_retries=3)) - session.mount("https://", HTTPAdapter(max_retries=3)) - - try: - response = session.get(url, timeout=10) - except (MissingSchema, ConnectionError, Timeout) as e: - raise RuntimeError(f"get tei model extra parameter failed, url: {url}, error: {e}") - if response.status_code != 200: - raise RuntimeError( - f"get tei model extra parameter failed, status code: {response.status_code}, response: {response.text}" - ) - - response_json = response.json() - - model_type = response_json.get("model_type", {}) - if len(model_type.keys()) < 1: - raise RuntimeError("model_type is empty") - model_type = list(model_type.keys())[0] - if model_type not in {"embedding", "reranker"}: - raise RuntimeError(f"invalid model_type: {model_type}") - - max_input_length = response_json.get("max_input_length", 512) - max_client_batch_size = response_json.get("max_client_batch_size", 1) - - return TeiModelExtraParameter( - model_type=model_type, max_input_length=max_input_length, max_client_batch_size=max_client_batch_size - ) - - @staticmethod - def invoke_tokenize(server_url: str, texts: list[str]) -> list[list[dict]]: - """ - Invoke tokenize endpoint - - Example response: - [ - [ - { - "id": 0, - "text": "", - "special": true, - "start": null, - "stop": null - }, - { - "id": 7704, - "text": "str", - "special": false, - "start": 0, - "stop": 3 - }, - < MORE TOKENS > - ] - ] - - :param server_url: server url - :param texts: texts to tokenize - """ - resp = httpx.post( - f"{server_url}/tokenize", - json={"inputs": texts}, - ) - resp.raise_for_status() - return resp.json() - - @staticmethod - def invoke_embeddings(server_url: str, texts: list[str]) -> dict: - """ - Invoke embeddings endpoint - - Example response: - { - "object": "list", - "data": [ - { - "object": "embedding", - "embedding": [...], - "index": 0 - } - ], - "model": "MODEL_NAME", - "usage": { - "prompt_tokens": 3, - "total_tokens": 3 - } - } - - :param server_url: server url - :param texts: texts to embed - """ - # Use OpenAI compatible API here, which has usage tracking - resp = httpx.post( - f"{server_url}/v1/embeddings", - json={"input": texts}, - ) - resp.raise_for_status() - return resp.json() - - @staticmethod - def invoke_rerank(server_url: str, query: str, docs: list[str]) -> list[dict]: - """ - Invoke rerank endpoint - - Example response: - [ - { - "index": 0, - "text": "Deep Learning is ...", - "score": 0.9950755 - } - ] - - :param server_url: server url - :param texts: texts to rerank - :param candidates: candidates to rerank - """ - params = {"query": query, "texts": docs, "return_text": True} - - response = httpx.post( - server_url + "/rerank", - json=params, - ) - response.raise_for_status() - return response.json() diff --git a/api/core/model_runtime/model_providers/huggingface_tei/text_embedding/__init__.py b/api/core/model_runtime/model_providers/huggingface_tei/text_embedding/__init__.py deleted file mode 100644 index e69de29bb2d1d6..00000000000000 diff --git a/api/core/model_runtime/model_providers/hunyuan/__init__.py b/api/core/model_runtime/model_providers/hunyuan/__init__.py deleted file mode 100644 index e69de29bb2d1d6..00000000000000 diff --git a/api/core/model_runtime/model_providers/hunyuan/_assets/icon_l_en.png b/api/core/model_runtime/model_providers/hunyuan/_assets/icon_l_en.png deleted file mode 100644 index 1303055ef8d1ee2b7bd127fa55c8f28a47a2ca4f..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 68638 zcmZsCRa70@7G-dUV8Jc8ySux)ySqcM5G=U6L-0Uwcb9`haCZyt)|K0%Ki<9l0S6cy zsM>q2xu>iOS5lBfg2#pb@ZkfJw3L|2hYt`Iz}Mekp@Glzhb#BM3z(~lq{xS=3H+lE zABaCliwUcF8Ju>&_^K_WzrAPamRQe0NfCcxE`|>-Eod=uitY&CSyXT(8FBEallG{4 zAE&^hT<41EII+X0RJd~8UR2PlMyhf~9K>cK?lzK>#DH^;X7k%`ynb)@WYh9O?fAaC zIte4e>N>`~;d8M5yn4?+IYvw&EcEdsBqZ4XyaXc1eAS<-jEqJA|Mr!b7#8+_UW8y- zoj?BXkN)}2DG(0U*dx&M>tg$V-UACRk``|I|NU2TLa=oR?GSIc9FyFdIHvoCqK0Bb z|G6OHO-G$H=ZUg+7e9WrJ`{P)f`~J&n1x|>6iiZn?eEcvJQSIknPD~=7JGYp^Q?oX zi8@jsvVUDUEVv}>%RC(GM9a)?Xvdi_{3V5hANbUgwb{LFI|DJ}kz8I3FxSj~4v|yc zC%n5u&n%IBHr)k{#+lsnQ+3D2!yiK6vTGLsq3$Stgo-X zX|VS}WBX-zYf};F+u8Rm@;}Rv#TOW9kT&mBl@hF+Qy)E2Y^}g%|BSY>Z~n>i3d{>> zWd_@9%~%i0vei#5mYfT5^Q*1mk<-)3;fOPS+N!kk1@e&da&$cot37MguuDbp(>>LC zd$P9@$2jgVj{@Pd;YJn`(m1DkKbj%z!>eOb>ZkVvW{a2GTa_EBe{C}tCnST6abvVA zJ!1x8((X@|^u`LgF!@?KTd`#p1iP`6nMNoyHI9&OjB!kZue)!#m=;lEe|#9s4Xj-b z{WzT^3dS4lu^XYrYK2aU?o+Zye(xkqV0IW)GFZz-_>CSHfS=}k5`!^4eWz)6t$Y76 zu5+TxL*W)GBKVvCEtv7pr}`&xbL#?|0-M;suIz9Rk^$HFcYli@j!cX^&%WAhZAnH7 zdr!Txjg(GI-yW$*6lA)%3TOnDS<^{bEs9msi-4bbco>#7<6!tjKVS-$Z4eu=L^ew~ zfz!@uLe@k@cC>!-h%tB25yG0CG} z)(8G}!nKbl6Ct1$+lG#+M^caHf%ycv!N_>?cA0S1?Q=cG|NLcOU0^7>)bS5CQN&

8BrTmGxskG>j;_+WHOA}tK_II*N z6m(+9r_NvD5LH}0;s>$iuKcgA}bamGg^k7psjf!n>P{n}LAm0Edv(DO%R4H1*ZifQnZ z-GePV5=rV9zk8+U94GtwuaRY#zGsx?I>lU}Yw>qlRDyp)&jfRz0VPO;Bxj{pl~V%k zNEa=XogHb{S;~pH&p3hh2mV4iDXVk1d9Y5?cWm)lDK|fnP=d#w&s;2?<3re}9%ghp zn#Z5%6ZK<)ab64W>$R)Ar;J%o?PX0BeejW-yQ|HuJ_Swxj>t=AbB!0^YoDsize>2; zh^-oa95WhJIq=#rh?_rmgx5UdReG-P-7UUCg>WE9$PKvo#wKQ`_OD89{tkPCYpgNU zvVdGtJfJdxL_Mqhu_lUyI+FD4L)HjJIXIPkyy^`NuRjPZE0*AdbsOX?uA`CWj%7y_ z%k#Xr8>F$|cZ$Os%4({S)Uq>{1>#~wv&iVpQ~6R@Ogdp+Pli7?KrlDNZNn>fdd^l| zK@XkwX|$4U5E4V}=2Fl{nrNe<@2W$0^lI+u?N$3(+^s#Ux-6PtMEDu`Gyu&gKM(?% zfl!I;=d~iNJm`OM{DM<4PZPFoIgXn)U1}ABJ*vezESdvH)sCXagvRdb@+;)1*bg_c zb#`nJrX(G@?3hCOca8m5N>Yn8aU4HIWtNBt?Wn#`sPM5(5OfQ>M}T@d{7}tmjcr1I z559~ko4zxZ^w+`W%=N=zcsg-$GF4)J|)Ke*2r;fKI``6%DU+mctpX%zzBZd4=@M+_le{L4vwXk zro3ztWNJvn3}Wn`9g$(R_o3EDP1ugXBn{u|6#U#2^9RXKG?eI_)BdvmIXh*1d58B8 z&uXA8Mh@?Bva(y%iK|Qj-?2}EWdh_N!Gl}(uagg|(>c3~D<+$TDP+q<#K^p89@rG0 zM=*YdS{N-D2>rf1l7;#66MNI!qc`u{-rg%;rUYi)VYu%4^Fiq^$1K_OTnOkJpOX6~ zje4bj*UbeaLmCtBT>MXD@+e&sy-o^aQ>A5=EiOc|V3wmWB0LBD=p(CHpOzJ`y4jv3 zzxJh?l*wX|JGNY1m~m#*IC@AoxQw4Y)#_|RUtdvUD$lG>nFICZL=jm`oY7$i0!xFY zCBXZVy}Z$sS$L;~-FM zZ%I#%L5el&E*ZWkV<8&|3oa}e_`t!b^N`0M@v0}H1=rnZj-dj<^HsFXb_@TfWDZW> zg}~b|JL}LIr=Q&$t7=%?ld5%Ln&!V&>mwwC1Neb@+WeqbuGnZRYqH!LreeBiLJU&k zNYz8a>Ww(AXbSuOa5JHSm9a;`S|+Lmh1=3M1_%L)oqyTkswA3<@a zqN^(L5IApSA9?Ttmk32o_0hjMF}aG3z+eXXDQY5*={t*uXt);GN1j9dFDn87^;b~0 zH@Zbb{M@kA%u5OIVK?LPf~=|6B)(F~|9kwcQ#Q@-MVq7O@=!|q(|b!yTDBEuVnyM& zycH-A9)|FPfPR?fJt_G}=^0z~ui5^_v~Y;x(73V>F`;=-R1};2f%~5zo{_`8hJHyJ zA@Q{tjFbG8rFOCVtt{j`*XnuO=U)IX^s)H?E(^<815aLKC}Dqr7wNjyYczfaxrL6# zC;~AZp376-Vbn;I#EP?WTAmcdHf`LYnAUPe+U7UpoQ-cj6dcP7YXg7vBH$!0Zn`Q` zSZO5N7A`-|JcoyC$NtrqLDX2t64B}$5*K+W(4IH&H*^%yY?JrakIq;{yxH^H_{Y(~ ztv>CiZNWJ)lz&k+FyTB(@B^SP?Q6{>hi;Yo9u-!rhknq7vZWe3l}o2c>nF9?hMpHQ zS(N-itykn|AESRC9>StANwFv8=1E|Jz{YOMn@Hu69>w9Ik+Spqh5Hx`;dkWrd)=8%A<^ z${0+~qnjR2Jms6%aDNp)s!;%5bqOIX&%>yEB4Ii!!~Fo9Uvk4|m%D{WQj89?e=+fi za3<7w{N_KAjbcz)=1SI9Xa;DUYa-(}oZ>mdZ|jvvgIW}5 z;_Ura$1>KZ_HA^e803ptf!z28TEp>jyA>4exOv|E-(>|(5g>@zN@{4UHjQ*7>WV&- zvo_Iu1&8p~z}3hib^E^4{l?|A2$mong;%d!jjFL-42D2jW=c;i1tw4Hn}gw+G(T()i&O&ksAF;tBVv% z<5to$E_Q|RACU;m2vu~?D`&wU2zv^S;3wel7-T>F?(CZc&O?qVFe02|*Ec!4DUINf z8KK157svzMMh|wn0_`Q*>^{9|xW?wWs0vZky*!eb?rqVgPe=bB;@=Ve_wsI01q%36 zn)!3sy7r_D*o~K~&2eJnj>iI#+HB=Mc@~;Bo_%G0q;i&sXBj-YHf8&jDn21)rt;iE zD(dnfS^I(sy(I!H*QT%VL&CQ3;+%4-N)2s!UNL?2tuqXL0!#>+Xb${?9SjrQp)@or z&DK;P+a1y2hhyEZYP`_$42!Nyi{CUWLQBV74w5U|Lzvq*38elFz5odzHrdBUPO5c-=q{!nhk(1o%p$Fc_iy&V?-^A?8IXGMhMbG)>|88dbV&{6^7socGy`&3mdg zV|Z3PEvYFQ8ABj9jwrfzqrURKIvvI#qJcDY^SAFiJu#)hp2xjjw5OYSDM2FI;xQ1M z%li;!G8~-Cy zyN^)0Y1vzebaIofGNg<~Rm`H%*jqn*`aMF7K@ z%KuE_T}?}|lPzl8{;Bpd5Wm}K4Ps;hv1rWrYZ!~bEVRlNb`-3nE4t*YFuIijgamHk znAy$c(49sM9H09*$ha$$Kj%!aL!U9_-=F{)xGu8!6Sj`t$9rV`_wMR)@x(8DI}QtW ziNHH9Nz2|q@d7BJZiJhWX(f!LDM~}pToFMRS4^sMu(dv7T78Bn{oW`@6!DoPQU~HI zWzY}1ts?}fgaUIltqh8J<2bz{iXa-v3Ys9rns~;Q;Y7QK^x*m*rivm1qpjEmaN1h* zFv<+p3`8y^WXi|+?mCDn;8SSu%P3?{R+79j1-LGGy>g!0Mpi_F@(icVJE1l2Ue{KW zbr8_iyHNea9RF=iOR3GMkfjx(tNga(naI0^Ci+rGX^=Hyx{aiYNS03=8yEzd+u)XZ zpdN~(QUmIZE5<&IQW~Y{RGi-Ko;0i%_a-(|I+AvZzUjvy7U`Yl!~rT?amQgRL3HNA z1qb)C$&UlpbQ4^5HjJ1f`xrR84mQONkuQS?KO^M|A*ZrwimN6Yn)BiG3m#HiMm`(Q6cV&{LUq$4@fFM$r?D5~$?>jW zL3VGUiOgP!cecA+t^ep*a>xm(w6mqY+tj=5Mox|2uSN$wq5jeNkock!{?#8jFsPLB zhiMRJztQmm;!r9g#Eb!r z+Nrn%R2jE;+C=Hm+J&WLji1Oi0nUlX3%@w#l2jZH=-cc(MeI*vNsYL3oh{u9rH}H- zsHvEz5n#HRGmA5D_$==I_$Ve>JxX>`QKUTt4{4Bss+;VAPKAMk%O7XPsUcNqX5!h< z!ZE&a8{E*ks3c0(a_%eA_}={H_IA8YFqL5w{m(g|5Z`TGfrn;Zam;jZMn7no`)ldF z#mxtiiJ&`S>vm6Q4~|A1WMP)l+<2p{8pn?G+S^GtWTUhbB5IHaVO=O)EQ`YQ8Ir z-;w|!-bi9iW0DZs;T%&*Pko9A=3Wd#kP@SyM209ehOB}dM&`OJ3YM;=V^5KR95($s z>nncaW-y%>5{^M}pe$-qYQ{LHA;;lch2{S17hl6qg06%nNdL=^|3Y*pTviIxPL^zd zSE2n@8_Nk{CPhk4uVPXaJ_7% z(c!Z8MhQoL2vNg=9h%fAV?^9PTTzrji;R}xFX*mmL`jI5FJ zKPRW9!e)K&QUrbYn`4#D>+#9SN#|PHO(A&_c)S0u&x?{CBO=`HQ~oFonf5pHrHeyc zl}50zmCcB#AL*feh-5lwfV@<eNx zN*HSRsgt%nsg|y^96Xb|+72&8&f@|9)694B6=nP+rY~khh>Z8f4<88pBFr#8*;Fyr zaYetHIAmAHmi%NQ#gt<2Rj9xl+BTYi0xtlIvxp6bT+9|c!;;W1ZSKHh=4m`gAA_&^EekSABgU@{rSMh=#`mH? z=PRIyS6n0urLHk1vZN;;|mz}f%Lqwv98A=-;$G6VnVhO;ctlr@Q6IX7wUr7he z%ke`^S*~qZ3QpcpKd(FESCt>fdmp{lJjq|r`m#;)u~A2CzX_kykGZ(%d%jOpkv=yK z?QE5)MNmh4W*$_NPS8?TR_@#E2~zDyud&MfXGtk#^NNcHv7#kp!2`c0@e|F^X2Z-L zv1Ri6c&v?7o!CuEmYif{WaJ;Y@kl0+NvD{nyE0Uik8WQ!?{dW`tEkK{eYrUdvh-Oo z@%yLqvh2Z2ep+eR?f7Y)GdcR2n(uUV-rq^SzV<;2Yq&$2;mSfg&3cGq&l9oILl~?f z%+_{m4=Tq6r}waKk0)@=98qoV9O`BT`HKt9OA#>;;0on-;?8K`OQaId;?r!)t)*@% z*$}hbPBPRp)$K*$6*{CV5g-tbd^1#!+fiOE%}v!+vV*|Yj5jkll^t1JH8%ExTujx7 zlcLSz^rT1kW{_APIysz5agKMfl!1kIM(RpElJvK6?mFz9 zPaQMp-$1l-PZRP+XF7uRiGxZB!lgS*-0|OeNF|~cU2f+!9k~oa{T~15HZCm4LlSLC z$)QpwF220V%1S-uq!0RT7F?@ah(rRu&%GfClRh-BjrJ?`zkmOB4nv+- z{616&!9KoI5{%v_WgAf-9q~Ed=@gsIYU+1;dwaBMiNe|!2Ci^-_`ex~QzL9v0+VhN zsY=Q4g*21ABKN?L{d*AtlnhbDpRuIN_;$`64z; zHHV4muY3~gF)3a&wsfv(g08)*0yl{nD}QN;7$mXfudRNSummySsOZ9~A(~eo~ zct7$4dq+yxKY~tspAT+O+TEBX#@}8J1Qj2~4Bu+0(&Y@|Wr}E2%aVZhTH^Vw=*jPv zrwqw5!mY?WC?$y(#r#FjPSqL)&7;_QIn=l;J3D`_Ks=JLYbz8lhn}8ZKI@OffXhF< zpl1cHDENA#e%{s98Nn<-gBu-FW8X%WXq1N=vm<6&qg7s4$LjpJm-=qe^9@Qd3Wt^A zx&Nk}&2PS3edqpkiM64)c>lPuv5~sRh>1!%VTcJ4K8u7<(Wy_$Q$s_e3YX7r(Uvp} z$;WlOKOAO4cBfzB|5gYRcnKNEz@Jy=bvL5#{c<^F!dqGqj%y+A{DZ)fIz}F%geJ!{ z7d5Vhg(V_0Uk3Ep#Nw!I?J+z`COktX1@V|w4HcQ$hd{1BY06)Zsr{67ihGk1zAqu0 zG4xp~S2F4NP?&@okbu6jCFZ-pfDL^k`UDOiq-d$cCX~opl|TkkD<&a%tYIuR()U5D zC`4>Sgo~-{K1Q>^U@laX$f)g=J794k&#OZ!6`TV9oe{d^@Ie``^%TOC1s5G|@hnW4 z+42W#K+@BRbzgmnmy!WStuD77J^Umi0 zhL+ugJ6649Exo@yClEoz==JFqBFAexoKfKEuyEsy=JT>CufY4`q@eF4|NTpKdHHdB z_Vw}caXTKTLsr|D+hg%bzH89g?d@%+DSO->PKV~L(1&{KS%Wk->zVU|@yz=>C@>lh zn!m?xxAaG@BD~Fw&Xk9?T!Dtyt=PN4XX(DMZfRExXE;JlCS|*D6-4XN7r&dN$3X#? zNqJ`q7hSW%I5IB#@vqC_yq}N;R?7vb=v=z_I6kXEobe&B^2A%}=FY>qXqrWHSG9%n zI!lKhWl$}{x}byA4lQ8wwL%sr6m>x`hD(iwQXGAk_MaQX{AnMtbLIij5J40nJZdE$Iw@1I~jL4PftwfGPV3Ma6U<>mL7_`cTG(acVWRHlPLiX5l82{EWDG^pQ6B_ zU%0u?!)eS>@Pqnnvx@##i7TCtmc@9v6)q@|?`s;jF#j_1m54=f@r11j0s+4mIrZ|72p{2#X7W3Rf} zR2pMj^4tEjo^+kJ9OMwGtEtuJyr$s$MDu$70WCGU9%MCCXsp$njU@2ioo{UH6gzf4 zTBvDgbh4X|CgqN&vsx}a+7vFv{BvxT$ZY-y(H70^mhqHsic?AV$EiFvB)s)ZG-(u@R4}^kEb1(v%;bRo%j2Lp2-OWZd~j1{#yjOf|0@S20pc-PnYU)pH3|WpI6CK z^=hAdC6XtCcZ=mSWpaJ~wD7*4biJK~1qywbwyGbRU-P?P6MVZ$?RweV9Z6i7Fv~2d zJ z!a;JkG+Z60Ev`i6Dn_UA3fP(I`KcStXA+3RirGU33~k(9(wK0ZtzMiWBEq#;JIL>Y zY37ufV^TsQX$8t#&`Q%)D^>VWOS4afjzn-q)Yy&k9>_}ifxq?BS_ly>9p0wG(Eq-4MGMNot?s`RG!FT1&755N;Dj16-g+j0AdXGQ4djU6pD?mo~S!6!T#{ z$z_+1D-A@mG_rRaKB`^s532>ef=uQ9e(`cBf-e`AM7|e2LwF#UfT#aA3xJE;uHA_x zJsWGY&)M}j9?)_7TbC%4%h}{$V9|ft0&rxwyunX?SK`$e_l1^^XL2e=5AD2(eco~e zn8eJ?3`=ce<8MXV3{J=O;}8Ta0Up1n8&AfR(j{PHM^hN|zZ^iI@HrU@3xh2$fBCKF z2~>&VCq{#p&4)-I+>{jl%?^kQHk83DIfgYp#q8 zk-2%UHQhkp>bEid_!3iiR*L+25>JV+mrdo(*zG6oy)@=B!KdV5rz}W2c+Qqys+GAa zSH=?=YQEVmsUcL0bpN<>(^Rg)4D(g!6PClg?S=9m-6c}6HeVGn4L^#g`UNFzT;^h5@n+*l%odX65=l6r6}1d6Fi)9nJWn-hG--+(K%X!EBo6E&!k4RcJj!aEG@l*xe=|ho(UEFyE9SGcW z%b2gS8TSUi|E#L|)O+fCMpi@cw|30IDak?pH+DN6g8pB23XKa<$Q%v5=!qpc@)Nb8 zVhqGNKBq)`xe;+D+h?Dw594zwO%0k$Wd!=`U=Osw$z|(>))dMrY5WB90RK5Aucc*|yRIlqrNkdW$702l8g_Cz7S05Gn14mzf9G*4ehqSa167MEG zG!Ke9BjL$Xna39tlY6cYPo28C3UXL}qTCdVA za~>Bw*S$-g>~Ox`uyJhT!}Gk|20WDny!U2%3h+3ra>b8&jh2wr062L0v9zp=dbz=N zA!@-e;B}%T=gaYvqQq0(LHZbDm+y)^=i1HGVvT94AziQCY_U8gn1)OUZAvj8C)u!yI8rRk6L1f^(kCU>ZX@HU|7-qXZ`k=UWE zcqUmEID#6sn-4?%D(FRi+!8S~5|7<_NtoVwR~TfhJ6sM*m&Xj3NtKo<$%2;PJ@Y>L zz?2I)X!tZp3`hI}wF)k-1PP1%z!QfUEbaggVq zRqk7${MU1ygcQHIy1FWBXnbOzJ?Y|LWb|w*pn|LIP9+PsNt(V}E0`V92`iw&n0H@6 zjU9q0Qyb+Bc*Arw@VmuyL&XV01ojw zu@j*6F76c(*h>sejoU5_o%mU*H7F@5oF+cV`1f1To51IpTAMyh!gLHVGz)5q{JIth z8AHs4m}}EHCZeaL15!#wVQGdSo04ul<9%ZKqv}>MczVW5JHHB3ZlRnPhLIHdy4Y0@ zB0mRDQj4rFdqSZQ*@J;)ENt>Yxz4*RLgpOJX5di?AItdB(h8epnFb9>^lLg?jD zPouzK>;)Tjxxdq(U15LR#4#@Bb{g7OUbfUoQ(glbbI!y0$~dTJ{9Act3!asB1aQK9 zTVX6lzmd6;p-PH0p(=WqNCkMWEc_z4j134-UpG|984k&Hoy_6?;_6ywkx~5fr1gh& zMv_7axpqoAnKXzAG2Ec;y_Vhse~8%FBEzD(p`pPF_;vIR?}Kc`F5qOG^*Np|RW4LA zFi_Y*F~H_@-kyb!k>!L24>V&*pR>Xla+b<+jy*7q%yd|B?W(iSR9%@ls;7`n07ie7 z1>6p&E0ML z?>Z?{&|WN~VQ5QyE7~*@(bDe)CCf`@VWlTh(`9F~6cfkLg6#J58fuWqA^aZZeu80h z76a}gj@*7#9|;KC&7#G9Ozy6R$+ZZ5GAn2;!$K0o&0^Zys+h}mHtC?C?4t35 zq1pX+E2NGGKSpN0vw4E4aBA5zu3NOQI)+!z7UlFHy%Y5{N>qg1?33G~-nUBVDQnq9 zEa`&cDgxeZG#zb18v;SQaGUISrX3{$=xobi9CA^M5FFLg#&ihwfz4P5s|M6%ERj7(6tYU#gb39BhUsDJMwvbW>A$jEl98 zYi8Ce^ev)3>m@vj6)l9UoeGJCwEYLDU9BQ&L`s?rpFj#Z3}kP`hmMm-4#y@SZGwj3 zWW(VGcQK00#}f7#0#|%YO1~$N9EXX!AyYUJ!>Nkcp8C6}WJRU1t`GhRp2%XR8mhWW zd{PIK{`63O-#6%Fwro81LTXx2^J%vmK9zN778h{?5A4~A*ozV(-^kYGlaY%ItvYHQ z5*J8FF5Y;-z1e-s|6-t=@%8!aZR3TWaGkN^$!1=5lHYR@U6IfE;4*lrDAgdsdbapg zS6zLc6remUr8Zr!^nWy`Ers)%K6<^s-aq?3Y=zaW1G+*t$M@`riC{w*pv| z!qcNL6%`eRSiL`+UcQO({w1x6kY6w?Fz8p>JFbhne1lg67A@sgIyy)`*1rzJed=+V zktzjZ8Hn+MBoQi#68vR;}2$QAo>%-X`rX( z9)@QfH@ZkPv#Oca#uNAqazh#6L6%gB`)oXH-=79v(FvjS`Qbo!$aiOy+Pka8m*qKA zw$kUQDAl6lW=56_W(7DDf2}$YHn0 ztYqx-3-2B zVzj$06-rnOHehPJDqS`xL{4qemXqry0Y#%(5jS%M4CAwhnlDJ)5N!4yQxoVoL|KE31{NJn~|(llrZF;w>^0sTEyF>H5Y_;V%L$4H?b0|@HM z@s0LgWojRs@QtB|F~((GOVu{&=Eb}McE2;tvSG?4bd%#y+q9XIBI8W=Wq-x*=~zJV zv+`H(ITui`bmX^{92c)Gu*~)I*th-FahFU9I9F*Lzvqz9EK>Dc;ELtauPy+2AHV|G z?bjCfzVSvF;}`%JA%y*@Rbw(0L%?fMrQdc}OquI@^^_x|4>Vo0rlH^MB!yhs#Zwly zYh!|=vSXNzsw(Tv<3VoMnb~IxEUh&2v#VHI5QTh3{m+7e?Rf1v3w<@yDxYWP&&z+~ zCCgNy*xsKOKJ6Qkw8*{(_veybxZ>2iUbwBIJLP;*+#LAh;FO#qnw!H$dJu(K!kj$j zL98;KWRwWyuSMN80bWD-$~stmCpDP zvfrfuXG1q?WJ(>%xg27MEUr@QUTv_D)B?wNPz9M!`75XHAy=BS`42S-G>ZhfHgg2T z{pMb}QSyFWs&nS3l`1M-4%eUO;XYNgIxZe4+3Q`6rPDAi0T>|je3Zj zzDxe9_4##mbz~WG&>%Av(o}ir1yw?A6yK){UGZ}Q`uxCoN@Ln~VDi zR9$+x({lnI_riGvU!3lqZciMwTHUPO74ce5Ysa=V4ZQalC1MFXegaMe-cT%&B5A14 zts4z$-7nugTEVXC30|V|pFjB=V-nju&Rk?M?Uw70Le0lg)@q!01_8Dm5fOp)w^XmW zUH5+;&Tsk{@8tO1X}18ugdrR5cr00>|4QxZM&`l^`v>-%%oE!Mwo22h-4Pr-d*;{i zk9n6bCkwxb(qxLjQ_RbDBkZw5_qk#+6iUpC%R$VD1J3y3${4ddmt^kxB|e{8Cz@{W zYDEB#vgPdT48m@u*tTS$(Ox^nTsPmA|06XuwKcEPrqBs})9Q@6-XqAgFf~@+r2NgEJ z@N4@f=V%IbEb5^%sfmNo$62IvdX>rg&5>?Hzv;*hr0lbCG!;+T3|HUejWLLi(X2+N zd3SGamFpVu=$>ROXULl;l~(GxgrpO2=o^3H#}K}9nqhyxNUT*;>$is~7Q5z2|E^sE zHtejYin^%FE%L=-8}>2aSEly5(|#Cepe_{4z{_~B)_hDV?G~H4qP*Ni6{kcY`>7j1 zA3_{%mtF6QKG)9)ZoRslzK>rgCMHOtqoWITbaf@!I5}gZ2TTT1%qv=}1`rSsP!9I? z{1yREbGf&-m%pW@<#}agC39k5dfV&lw48GYaP&#ZIXN{Q92^=c6mzoRQtz26wQ33i zR8?oIV`5@Z!%&C_@!&(C8=9K1TqX#*Ha1c+a}8ITewcXz@6q@7_jh%0`11z};ggs` z|B0TTp{i;jZYXnge!j615UmXZq05%My!?(xz7i1{RDbu*Wq(kQk*a~g`c_s}R*<*1 zH@$$Nf@rRQIBzH-PVIqSM9|Ky&swW{NWN12&xQtLBU{^(3m|vP2efaYN|~AfPQ=q| zXG%dq0Y9KK*EF@XyoebYe>ML2@xuXd%@3kDFTVg1_GWwAWO+ly)O08QB(B(+lF$G5 zPi<9?yiR{JUDer(r>7@40s;cjzAYvK!HehD>6e$6*6r4l-4GVaAk$c=Z#m)@CWm7|Jjk^BB~K$OGhebtFxd2i>p zA)mI^47gY;io_rI!Q!2dd0q4{Pe>#U9Aj=~MObEr5>D1M6xFLs3m|Y+}2z>gs zoQ76gTm>2QzOc~phkFAF$sio^Q?t{Kx<@<7D<-%t{$-{^ml#(*hGd}MAiid|6)>L1QXl-v#fEGIx zSt=$6Y<1WU7mQ>Sw(<&4P|&3|JKZxgXLT0iJe2HwWr9HXb0b!$yrWk=`3Eqze|sJ2 z?Ou9m(IoS1FUg++aB+45+!P{ndAwUxc*^~k4xZP>k%siQ=?%7u$phIb?p-qE5Y>#3 z#nZ)_LPeV7&j&WT$`~srw<QWPh@1I?e%peG9nI3bt3?QTXv~f$E^+r53g_a zlLvKx9&RH75=s;86cu%KO#XGP*-G7}{e}7YY5s+a*4EZ(pms@nx0v{0Z=QVxSg{cs z?@kuZ;}%ZVL>DrFnt2i;uJ-%0+G2UCI#tNzJZ!==U-olq)*Tgi-M|Y|ub> z9F`S_i2$82V#WzqbDn`|oez?J=p?xyGS@cDun%|n2 zdnoN?sWZy39wsZG8tePqhcwSVJ6O#q3W9Vo``7E3isv1qrZxiBYy8DbkpE zpyirsM$(VZ9|JXXPR3Kq3G3$wYG=^m5DtC^^#H-N)cqH z)n=IV9DR4C(PzpCD}^kL;`YJz1PR4$p(NBS=G$PD|Ek^wD5D_{k%va!Da&HepKn=ruBTqO;=ZM-^K|$daNVLD) z5eW7Xh+&NaSl9!2hm6x~j(H6YWmKOclN*}CLc>JPRCRTA8A*O$dadrs4=ECVtiHOw z7Nnx0TAXQe+*lhJ7`Wx;<|c`F0h2<;Vfmq&vUgX9Dz6J9U{k=i4a%W} z%+U{Aa{qGYcr$6;64+TkzzJ~+i&XS^=2tVnWla41+iIOJf!hiGx( zdl2PKUnZueh5^WYx4OET4IDNNWTWs1vUeCs7#zriOhE9)^DE(cduxBcr%a0u8$RTv zynIH!3N2dV`EO5i0hSaZ&3KLNLJdEEsgGzF@&MpEu8fV1ZBPR@k&>Fa@PkSw>q+4` z(4V)pN*A~xdbhxToaL;oe5b+4+z2M!cCX7q!-rlh!d9W)(_(-Z`Tc?H1(lbUK8OOa z`U_COnz^tOqsg=|=ZiHv2~J#DM6i&h0u=n0n*4+)+1G4+Oi^GE(5D0}X2bh+mu-1> z?azPO3T+X*07E|CuSoS+@N0@}$&MEM!P<`XCsT_0(v+DI|M7LZSx8jZT3PGnQX0ut3bX!DZq-Rocvi<#|%kk44y?~aIlGCQz z#KKrg2ebc1XLg?y;_OHuWQ}!(8fB6{(VvZYseVDD?yrsUq>@zthp-Wx?&M284_^vJ z%B0|Ao5KW{K@p3&9BbEfm1cM?l1WWk6QEoQf!ypZ*QXrF0Xkd&HQJ}HPCgi6zd{y~ zRzO9E8_ff(I(4fAGB%UZB;X9j=#eTGW#?vj4n$xUE-x=b6LC4^b(NR5R;jw{P5BpJ z#3aj6nDhKXA>{MYcXxL`Nw>4T-X9MFrUjldvV3?~c;GWKkWs9ytgwl~#UzSV$`ZWz z)&W|6=oWp1CX#3C`#C-j#`18b>DO#mK)@I5=0f11Q;v_Yrzp@t94Q@cP{jo91Uw`6ZIMY-V;#=-!tlE>5RCSPJJ4`fhx0M?@*z{4Y}bnQ>( z1}vkL0q%*T!xp*XtMdYBSAh5XM!p?n1jN@cF+@uyZdcX2Ly=O~FGk&Zb&;_lIN^0s zWg-38BGa(&?s-3j#Em;cMiQS)`iF@l5U602M={JT+%{boP^Yu0WW)vu6*Q8}gVUeS zP>lyk+8V}Lsv`f~>st{0p~2SPRDB`|>y=-y>Z5h`QiBo@cOHr^BNjzrInA?ImHE81 zG21&=EXOOA)?&K13Y(!Tz7n0VRCOk$IZ7%HnI2VIm?vPo6Ld$`=!RVyLECXOS~O(e0e0vQU4{b}|V|5rLOQ9HA&of*Ar5@3Fu1d2d2kAB7F<#sr>E>t?0WUjKM@JsmWT8a~$?sAK*;Z_(LM}R7B z0$6kV@zofkpj%bU(B7D)ddJmhq$dB}qUpO?hJ^w^lKChp!|sB7oTW=<9omkF&d~3Z zgbqlgT~^jQUV)NxGfmFouwUK>2nr309If+MAnwb4zCS0@(b6(IZry*QXgx<+Mp=s`5?9{hYV>hU zkfV5vBN4q@_q`sEWzecNg8PR$PpKZl0730+z|(B*pYH=EK|uNd{gLtu6PS=JS%M57 zp(soGDGjRYX~WEAK?OR||CW{H5D}s`@w0 zKZES0<9FWf-vF$Fw=Q55rZb?OLuLAcUbj&$;O+51@OgswTu)U^4Fdr>D29#}#5D6a zYUXycyoh%6-RUXEVKwO38^qJhvCci}=>r@lkFi?7C9jWHz~bQKOa*XuN}he~Luj9I zTUl9IaO3m1qyIIp=bHP1<6j28ig&s^eAW+UTqV%mL<=M*gIN6n;;$Nxvn5 z*@iIxKDW2R=O0yHJYO1D9DM8mHROndr%`iNUQryT&|g|rHSs)58tZ#RDfl?@K6PFM zAZcQM6y4a5m{Tu42VjMHQlb7jSi8BMx0RBPEti9Plse~SzKSTW0JOfH13JVW?4Z4_ zzJ5-MEFodx#JI;OPK<=t?NABqEbMRZY;+zKXA+*}q{vx2cD>#HdVd4rguVNtRxKT! zjH_pe)mk~A70bxt{@QRoz=%a-ECKIba==4az{5^i{khgPgJGxdOBj*AR)@#gbL#75 z?4+LTwNe|obS<_1Yj+?7Pmw>6Vw}es0uyUQ9e2yN8@$=}Dgl3sE*uD&GrN2rug1{A z#d7!e_rZW!Tgq&O3OBuG39p!)oXNU-x^^gPoA_BDQ(8Rs3POYOcqk`E*KqG5mF zp@X95=jQ`}(Ve>)ptEOj&lOQ-8XY$bE`bCl4e5i$PQT|ym>^+=-sb%Le3XWx zb)WOsrKqJkLmEc<*Y0`bnAreeVnh|NagsxTNr-iKj7kQ2`egaUZxW$CXFpE-Ql&hf zMP3V@alR}{zn(4pGAsp1ts`ItqfBJ;-tT2Pc6yH=RsT^=>*xqg~*pUm|syW>fU z6=G5piLnv(FkAe@Us*CIj$Bz`#5RyHQ$>{!_ak33i-g2^SOfRciuVcYD2=NY+x+$G zbRqmy6H3sKsi?*3S-og6ZtZC?o&^P(e?lgM*H0^)Xd+TD@m^4NX>)^Zr+35zp1LTZzena5E4+a;fWwU zQhh5(GQg`g;rQtRAyF4@C5mAo3%_c1kw6kIDajTOt*XAToExldi56#u7vjwvMaPmefoTK~MYu$0x6HZ#jWzuxKpTIm=9#5VX1Twc9GuZSK z;>r2THvLb1VA${l=CpX~gOSfU$bj0;;DzBl$#E{Pteo=&VE%~&pzmoApfL|;(%mq? zxYkCs(=tKq-}Tb%&FOP7)64ESN`+PQXR(w*d+MdDtCvC_jfDPfzRSNch3frF<+9#x ztVjs%&G8=?h^Amh+ZCtO>~P9q@iL$OXksboy6AsC@7f+P4b*ZP19W?`)b*(Vi-87>!Hr_kOk@du1K7K_-!_9!IdYz zes?D zADx-&YA!z(x6te}GSlLFG{j6dm`p-Ka$5}}rVre*?S!7)IHARc;{~6k+|y#3>y4oJ zQ_+j#cdh{ezyCu`aLoR4tfJjB{oa6oN3Z>Ygj_d3Z2n7LywD&2J9&{60G2(iZYDLJ zn}azYn_VWO0{KBpVo zWzWYfcz8FO2#vWFZFRBwvzyEPpflO|GUuzy_S;K|A-IYs>hja-bx!I_)uydYE7zw> z?OT*fXWc~19yeE0U)xR=ssnZ%@x0YL??{{&nxLyg8gZa}JaN5)F7sE^^HsC>Ry*E_ zpH#f5Zoj#Bp2w1VEPHKGW7W4q!u~AK;rimV{gcar$aD31fVLKHTlW`y_Dy^Oey5r! zeRZ1EstGvVOA`I+obv@r8l~O`LtU>T@7%r9Y13D`+gSHGUi!^^GI!)^-l@^1u5E%| z^82pb#6G|JLwY(nJ)IHo*Z=JT4Z!Elh7B)%SA+BYtI_=o3d-#PpZ&#v^Ko?vu~XUh zEX$K!qRY9fjG*%jbha`T->RUcwrzDrhn>ClykD(nf;1P$RhL0GKWnb_EJhX2JXBEh zDJZmmC$6@r6l!7C*h{BZtdS-nO#VdTkPF8~{UA>P*4CD5+NYO}4K`COv5f*W-@}9T z)I;35RFOU3?IFMa*_QGhL4F>{|G*lJ9MG_ zt7rHuVN=+rF=k%1k?JX{*pM`1fpG26yHi}?IG;MDn37A9d8VH_wvI@CkR3eRXJS6( zSC0ICL0ZI0njbOio(2(H&4z6{60uW?HvFU$YLa-TeIUeU@C?_m@VNmp_i1nCO};=)MCLh5z2bU)L5ja^mg8p1Jk!wVXohxa>g!vo>UY68U^|j$ zzBo@>C9*ej3M_-Xo}Lh};Mbj*yn=eWJ3E9CxKz<1pP$ksB_+uKs#1o6g5sp7BLO(} z*l3|l%r^_S6TqXJGSbp;e6KGaUH%HQOTpHo( zjRLgDOGii7zOue<=_$MS8Llh_J9`xo;LL~43ua2{>ia>5f8_E_n!T@IFFm#*(x3wI z>4N&+-X8iUOQV~c+Ztdxx2w=={^?ksY)xW$dV2C`nU0kzXNYp#uQ~(~)wQ;^HhR7J zhQF2P@z$iWfW@~+_Vo1hD1a$_;g@hUiZt$n$G_ZRlvq{I7pJ=2AUMfx3A(*8v$Z94 zfeqW*%|?AnYI`osQ1V z-(}V9x81WYE+3-#OxptkdSUyH`!PP=v%0pXR$fjp&j<2xCjRb&C6Jc|6W`Fz&> z-=~I?qM}$uzPhf*#wR2^&r3`+quUq$C1UlfL(0sIr&3>4b(egtBP}E2wEE*m>ck8M zF-9|;OqUd#0YG5++q75j-@lIua2cJAjm@;-a*{IU;BXI7**dqxET!9OIO;JC#;yQ< z=3z)20SDjgJ!|&N4gvxzh%^W+xK|u3>B>8ZvyAYZ<27z2O62!do)Q|$m&KR=jdoHM zYQG}>g`e0Tqv|j9BwMbjBx3BWT2b|CB&5;NN-+k-9HS&VI0d`oH#eEJV-bB#5(Ohc zut*Hz7Kfl+4dcp^0~xE#587xyMGql@k|a+C6l;dTu3oB;YzC{v2lD6yZwXXLYSg5W zl+9#5yhilu2+u%5ds6(r&RK2f2~9vT`ds6DAH zuPGUAF!0@HAR(KNR@GZ=;9ni!U7g+COmkgb%dg+Pv;DdLkI0mioIHwv{Iuch<_QsOVY1*m{f=3Mqt?!-+C>4qCgj2Qo_)qoWV5&oLBCmf3d23 zZHLlphv{_O7R`y=Oc0B204H)fn8{0@!gg+A`bt_x1|1Gw)7|zWw9sl~T5b2f&gQ`* z_6+^@z-tO1y4c|nQm+#p(g68`^~Gt{UC~r_$0Lt&^3e>gkkD5P8JRBrC)E3Yf+#a# zPCSvh_b$1E!w#HAA-5$724LA}R!E>WSYKPSh|h9;cHfJ3aDKjds<(FoV=ZfTW`+f2 z`S9>i=H9(~J6xtMXO5<(rp%S=;T?l>a~CC9S+lEOxZo2cCnWqsL`HT-@8V#thC6wu z;-Z0iiaUl_J2NvgChEO2llk`Tle|ifx3)a{=pF(_T#wTXUbFGvxEVXz+qY)J6(aZX zIpzQN?_apiv`MR9y{nB)nR8%Z+W-_1mdRAN-}A)^320E3V&H)l)t@Pg)Tgtf=@R zJ}RoP4q0=J^8E(6#-QdmP*>FV_4RE^JG#46n>3dh>F8KdQ&Mi{XJ+;~1}?3g+6%Yk z+_q+q{vlPWffqSx1mZ|lE@k0KbWuqF4 zN*A?2ej)&AP=^s4KN6MJJO&|5k71NnUotZ5KuQCzRfJcniV`u*tFb_$4jFr+3NhP( z&+jWMYWK0zg`Q(c|y+RAHk9rP{d+vk5GR+8DW`P*^|_rL{1)ntr^>Ogw?8LMLX``r;}HdS#a}4xu7x6a z`^+&M8k}wI6R#6!LOVJ-zEe|Ezw`UIGwYCLgCYn>NDELe*`y>Sjm!)T8+2qYrtM!; z9@jWlab)i>h2ZeHmeoa2+Qc~!%kDMz!jxPHz z3ax$Vaj^H|Jv^}~h25>PKYwX-N-){MVOMe&9)Yjq7 zPP(4%?zQ9Bm_%yYQ5dv$PiXJ}AODm3tovqOZqfS6=B7bWRn@Dx;o-f|Tq?0it=U%r z!@d}U($dmAJx(cvI^9d@90oNf`6Z#nicljUk!Hve*^ z$?|;4qa7Oyr!

g2neht*NGEi;9KC=z79q+Q!G`u?h+Aa>euTixY;$E_=L;z@)Mh#BjWK!EVXH* zBL2P4;)QHCB$-OS=jDGRKP0*1-F8j&Y$+eL^89(dZmvK;_^SYkK2k~sX;dv7nW}+| z)lylkSGJrTla?UK*UD;)fhPq?GD4q1xa*jkDY94av(%tfSYja(&W0QlZGqvpVCG4r zm&OYSrH^n9r2h+f`b7jIb1RsIWInVbIyN@xX^g5S4^9!QtI8aXl`Jm3gI6q*obqf) z$NLnNf>xB^TY04F{PeCPg?%aQUR19T>|-TyBz_%B+0h;-2Bm`{Xo&k5I^DjwHz}9? zx0j((Mrq{4w?@O5n0P`lagq9^D&KbUQEYyXjoI`hJUC28#@!Lv(RVD~gI$>_Kd#S3=BB>I3kq2k6$0orSv%77}Eu?BS$(Ax#S!n`u?x-CQzM7{Im{K$49Ar{tQG> zN-{EG4goSULgtp1)lE$zbvU$g(y3=>M2>*h?60kPOB+08V(QBm_OKa_B4U1)o|?Kb zb|Q=-qXSDC7^*uqbiF3uc5XTukOHUyl-S_@c~_*jthc`3&Vj@D!L zkhBeC9;@B6ttj^=PLc{&k40)|YATDP7P~kGpt5+w-92^l=*!!bl$3EHSfRM6c)GmL z&(Eo{M15R>OG_D@7fp?}Ca&gS({X{S@&%CnaUi3g0Bg${16};1w3&^Cg?oC>vZlP# zCj{$|*=Q2>cq4uNw}%G@4O3Hs=2l4bOCcd4ly&v>nk=C;itr4mX8tLp6$Kh%Iv%TFs)YV0w^)c>DwDNWLw{L^@ zFfh)fyxUq-m1jLVT`7kX$PS&C+5+T2*S5$y`IS2$t zM88gbXB#=wVeT#|8!7&SFvo`$b5uMumFeJEv$Ku-nn0AyqaAV8>-=>GB5yf= zRzaVclqeULK%>)-=2#^P86l-be#N?ZhEVPSTSqN3S*e7w5@!ObV0^a{6#E#CauS3V zSB75!t@w*E2&R*WPpU{eDckTX4UF;KCkpeTd|#?QPL3h`M?*O^fh+KJ%_2jIS6|sI zLVk>;IHBWxF@6Cua=}eKS)MpY*g9T+VT>Z`gcPa{nTj2Qtb0h-JuMCcm6!A#IJxVe zUbwf@;9hRuT)6VlK3iKZ+;}^M#1BL@vgcDh=MO}t?J&^yu|Ip?5Lq+8IgmnX(utP` z9XUBENz)TRvuF{Yy{r4BU&5v7zC!Vck)$0BIRDGY%`7xs@u{G|=ZH@e^J8J5)E+uM z^BG+EykCbepRzu9$OA^^pMMJq3e*X^Rs%Lx>aBsQ^FhO-emhm6{mudhC(cJJfoK;* z4cR~20_M3SBxof3gEsHWN!kPF@f|evvv0PZ!>sEK{L}iYF#?Y>!jy+cbJGm6<*))ri5K zu4sGJ)!WMw7N7U}1a|)J;L*|XnD__6f1@HJR7k2A6-P%$6KCe;@&p70Uyqy8Vq1Ntw)M8QHfH%)G6H8Or?+Q5etoP* zE%070UWbRDk0SDOIKrnlZPU*t{@9r^!^6or2?N&x1`%QA zn%R}5n%j&?yy~vNr0WR6LlmiBWjf8^x;=(RjM~f(x)DBE8%mWbEj?w;S>4?=vA44F zBx@Z6mNA;RzMVv;8-!-*bp2eOBn$lX^p^Oczj8%=KJHI_XjYlk*kgoic|F_J6_&nG zsPaw48@V#BQz?bX`ec7ef>aE#(U^B#~B^Zz|7ptZS-Y_``kij z{~UN9n-9i~e6rMviNi@tXsD=Tn&dR|Gc!9p_vZESmgy*ZNEU*KB83#r3=BV?GHJX0`BGyQ3ce@yTNMS| z*G7sf5_$RjlA_z_6j}L5oX3vmDYg(o4nBf*Xh*EFRs@DLhU&eTpbyakxJ=HMK}Cv+ zf>-L|**gDG`>+$F2B6r)6$~>-=JrK+KVT}aH7Gei*PS!0 zxHVB#BiPD$4QLStcO8rPEWTm4CmZt;;b%_PWF9oNQ3M|nt~LInsKL#4D+nxEZ>hYj={hLTf z&yS7sM#8Ql(g#+bk1$nCa&vPFwZZf*Wn;7Elyz8Jguc7I{c804EE+dDEsqityDpkZJzn^ozW z&Ha#r^Fx7h#_(7C8JNC}wFEEj*e+@~WhVfhn1dP$lj$Qt!Qt(( zudYdD(fyXp%{UGZuw;^|I zV@$8?vFHF!BMN#xrHM|eV|bt%CkunH6JS~?5j&lPi&x!WYH_c4|=${_1(I<78R0e$jW|uVjY|n z6%~cCnE_-mhn$R2lH%)qSB2woIZ-wg+nP41gYY+==E_W7mPR?(l)jY{%~h~F@;c2XmOpyIRHNqWD}0aHi-U~~NzOuSadB}KV-On$XJmeEj%{#L)+{|g-<5`f z;v%N#K`ltxc56R>Zd_Sgo3d%Vf-cd?a`V|^ec0>qGveKD08YG%-@Rl0ppjP75gVE$ zsgfhOTM`l?6$o$tW@oN`n_mEZ?dY6hL;D0&b%hO*L;d|VQp=nD{ZEt6rF_UJDM{fa zFV=niiU*lhg4XT1Pkev)L2GPZZ1i}eP_Lh{;0VP=l!JrA0+zZOiJp=MXwiBqDubz@ zDCs3pa6F`%gD&cEbu=7(SNdysJN}OHv9Kw%VT8Z zux3z)UxL>0au#l19<1RlsfZ(Q-#$2jLyXJ8%E~)9Fi`tlbr3LlO;1a-pyxnQDxssl z(S{5t#X_She_S3%O>_%5r46$=ldXEy&A9@P#?p%jvc!O*Y#qfC;N*1Hv9c|rdWcN5h-5|bW<}<8nUk_h z(!cSKdFWM3tP`;+UX#^dRK1WI7v`|{j8QV}vML~bB)`B=!dLS8wfsX3OFqf4t_a-9 z2O8G2W(0_IQg}!NbE+wtf?EDbos9eg8hsiVPWVxbv>h1a_7kzrR?@kONV-VDPsKY1 z&@fY{m4A8BBf5sGl+!*gar5-KABG}_A;LSohn$0mWA`8D!j&F<_Cs5%Jqklt{%;*j z`oX`j@z~6!;?)tpCLul=ar|{&`?Ngp8LA1RLx**_Kj2gxqm{u?nqSg8-Xl)tnubnK zEJ_5}R@ACK&p$kmChTvKx(ypX&qo{7h!2F|?b16w zKF(Wq$qfl{^S-?a*sIll@8j+5omyk>=BCv8ywz_8kC_fd_;i0M-^AEhWi429%27Jx8`Vda$#h|HKDET#tUfO<6dnROL7W|N|1kv zJtIidTol{c-F4MBFvy+e=jq*@{V-3&7N+C>5j=~nFfx^AWMr&+{@XE7uM~F;q@kh7 z$AStM%O@ZZ3!W)Q3MWone;-E&l&!e9$*m2EZkyV%4U!)+v`q&(+$h>nL(+>*=}5YD8$#eb$d9Vxg#N_XR3z{>vIiK-35im;Ok;gv4`l zvxs9YF)^`ZKK7Uw=4#PzX$`6>yRH62RA(BKSuhygv+8qCx{0bYDw%*#0cpB$aI7?A z7Zdd()rN*5y@?^^1{&i9>#07NUtFJ1bMGi4)IzcZU8d!IGO~tr+)PmLyATC#ZxFxM zOyi}cMerDWU^KpYb8{oAq9VVvHCB8n11p3Oyt@3B>BpoKQ&SFUX}Y*A0nHtc`T56N z>*_pFvH1SKGb=A|&u@RRbxe_}xL^6?;2?5* zd|X+8zgF#m=ri68msz~>9Rn>bzAY$TYUsER^i@$M6K!JaQ~{wUf9bgeA%-h_-7-JO5T-^pFaQ)xt4Ymo-W065Ey*M_Esim2$uz3qf1ma8{(&+2)KDrz9s2&&kaErk*XJ zM3-yG#S_EMsI#}0`l_d_W!7D+xlHtUDm>l^v7BBmu@f!SEOCqpkq${P0~N=vrTbSR zL2)-4UHRwNqqU;Qb@i`#tO`W1DSs099qXW@9faW1y|73bHd(Pi!e@?Ddch!ypIF#L z%i1sXCzA<(N>qwCKr4=4M9fdSYBQxh2>x{J%FS4^uz4OwNK_euaORM^nn_-om!`YJXb(-eRk$ancU=8iIKq*}nu(>=WSOB@j2&#^6(EQETyk15OCEV!vfU?&#MfAe^K`A-j`2IYm%ar z-bN*j)q{GKcr;`$eHT>l+v(ZaBAcL37n^&#HS7L9o{aZTX_{Bzd?A=SAe*vDI@})?ACtwE@&7A6X@FY*>VDU>LGnL7)0_xm*4%f zm>)Wdy5Ui*rraJ#04!Y>npOjWwSSs~3gZJaTv>G3%e<{2;l?MmkKayJ>?YpB2zgNB zJ9g>xVXhQH_=q}F$#{BG-)mN@4s6nEGNqm&cJcO*pLTLDIwWgH^i9AI7;2d*1)cPpUi_9WO>-Gb z6=r4aqvz&!{Q>*mrRHkxqo8*_;FrQDozn@7eEcT&vD;i>LxY(U+)F)}y~|Ro!NtV^ zZAG!BxOjPUw$`?%ZfzktF76R48(WRcJs&%}jh@r5t3EEjIzmE025H)T{u0oZ-#Wh# zqlg0sHQIiDO-CRkp@L(g%zCo_=g;4#AdGkmkWp%L z^QEfX{{CbYYd}nl$NN-No0@;%vlnp#4kHBC8tv%p1h2=we@hlV!gP9nZT(j6TGR9z zTy+K>3UP?ev3_UuO8$VEOj!ncouj6cnfdtCz=^yTtWY@P<&ldO+o53 zJWSq5gvnbB{F<3jjcJ6AGlb$db{*xNTLts^ICBfLqoqWC1JQ+=#tD127)pwGd=bRV z0-6{(d~LxfoMhF>2+EEb5hwKZWD6m)9>@_LPim@rW} zkjQ3Q%9Hb^)Z<(ijgt2C_p?pR&YrJ8P@cW-1>@-nou*MmbknTU&1> zzq3*&yc+yZ*OnNMz~CA|4a>BelAvIbX*niH0o(& z=YJd;3Q7QG4)n$~MoLOimKGL8#@v*%=b$~=;P&x@Akj<1{^#QhMD_)b9(ko`X}i9J z+i5{Dc@F=l?{~&3OB zsA#@+M8X)7jBL^a(ja^}aTifx;oLj{do7?Wvwj@mR-Qulo+TzGp2?R83Y>(?`DmkU z8s5dN16;gz_6t+mUMZU{z{b4A{m&BXcQsJX*zDFo~^a7nz-p{k@IAK3Ed>xX%2u&7R2zWRnw_nz zQY|bkJq93*%)znkuCrCbx>>7f(p*wv^&e1V5@R@t^_b3tKwoEBfR3&KI_o;|=Z7mu zwfLcCts^!zZln+9NG!W@JBjg1#(|IJd8ITB!uGkKP`;RUelNIf=ECw4k?k9uF4pQFWkqk-aVw6g80qHPFlm{eSFLZK< zKT*g@|B8_-dD5znL@Qa%Er>ao5}N-hO$Qfa)ZM`u0^=IY`$6Cx(Ei=iS&r2Zs!^P02ewZa$FzxbXQ*p6nFNE$0f{qCJX#d8&q_ni+PQG>pnI8#|8T{oPQ3L32 zyF#Jfr0{!-b6Lkolx@~;(>Ol z)>GI9WAGjp7S)|pzUrYF2#Wi!)6>^7+7i!tUZB_jg;=#2JPY}MK~f@AR6H?JjTYo& z4LBj>rJVx**pYHTFKq)Nq_;q(KK1nWzB%%^xI8=5Cc>o=KTw9IPOxxtS2Bi&hBzbl zE?=M5y*fps?(OR%m)Bc^;O4qJ?|nNSrJxFHM*Rlcp5TmLY?5VfrZ4s1pMM`U1cn+a8GQ8b;=(`=efWO&keXb zb1UKNA4=+O{L#@tYYl%5GjI&jo9<=AV$FcnB3q@=4uotyp{J)m{;zp^Wu?vsh8tVp zcrQqELf57NH2a{ZshJd4)8PakCod$FiCek2#NT*M4DS)ADXXYVu(7eZH-Maz&o^SK zCN2y{q@u*BR&Xh!8p4A8jMzLCYcP-r!={Hnjeq$^zc+)G&M938kc`ZFO> z2me#VvU(a^-T(S&6d8kSKY|X0S&j4aPxTw9$bP)RUGJH#*Oe^o$`S_&EEOMj;G4&1 zM#c)zA!^F5MLwcZ;X{*6Y_oWcZ@ru2uE*0MD?gQ~b!5!(#!MzG5+}_IiKXh}n`vDy zjRh7Y#4t=_92`3oG#vuz@~#%%_HN$sox8Pn$B-uV#$*& zE5$<*E+F*`i{2@<<@P7CeYT>rYype8P~WC92JfdVr%_^l$uvBT!=n3(;uWv)s<)(M zdJ(-avGh?=VnvFmCD6}LS*n>(ZT7Pe90++m@5dBSC-%=E(E6g^q<97y+ASX4rk*J% zzHD$awpwuj)U>hjoEx~G%Z-B7PB{>IZI=kw5i{~@JlJ=aK^rZDSXJ|qN><`+L- zUslE;Wv>xPAb5|9fq_x5?E@M<55Rb@`=+M;5owHH%|SPzv$j?Sh%c*%@6c#@cejPrhvUf8)CLC!FKq;q|GNdX&w3CT3wqKvEh{|&Z?DU% z+HXU7?rwTT;^4zTKiK`XC`8;R|HMxOoz!S}-P{09$EN zM5L`zb_0_>#^4${8d_j6HqX)Fp)e0fhxDTR<;@^u`dRA0JDtQ76A4&FQxp^4@|gc; zt8kYzB5nmKg+|~4w;8SXg&k9Z1H6DVx%%3mG(3Rz>KYptSIX%D>*6aQvOIbr0yFRO z<~CIpUhu0s^@paUB!abg=6fpbGL@*K)6=D2Ds=b^qTKZKGTSgXlJOQ>b@v8m#l;Qn zgKJk|vxC*-d3R5bD2#NMpaAw8MW{u29&{UQ0Qw|72GJl8P6hhxrIIbMHu7f5Y32jeMK{)pcGM8$Y5Tx{{N$w|%^Zf=?zSc-Z_1mtjYgS z{dBw%020^nS75!!Trv)K5X`lGcngi_eaJiy_q-il<`9jYb~Jli=EU z?_%RTU`i#jco`x5+?WJ`>0YG3qkvD5cvOtiT6`#Y1YrmSSW@~#JYC4DWpW%9BdI82 z|7DMoA=Q7Oo{>*oQP6-X0I8tg46%wp@~;%iqY;V3j`wTMVxDLu8kQA+3KQZZqk9~# zAk?ZU_F3`#_M%_QYz>nm@(QYZ{$pEDNFGZr?Tg^Apo0^P!9T@CQx7Bpf5ug24?VOo zf-hqTx-{-Hkz07d%GO`U!)&_+sNQ|kX74y+r$nE}qN28t&mcbwq*flUWyH^wKU3a5 zo}OCMgM*IzV4ew$)^$F7U>t zW;0amHToW{g-hI=Et@xmUxA33`=kEnKMJCvqEEY=P%6P^&IJMMN0qYu5o?5KTtTz>WB2Pd%WDP+Q z9F>kwo;(@O%+8k2%E?(ODzb=(h`=2h9){cahU*Xb2-{!XNd#dQCjICnwweo^p)k6- z`iNbWY*AVHTNvTfZ;!{-v97PL=?$T3(6vK;&SMmtJ0!j&HV%AIdnHOKFP7g3`nEng zy%im~evURMPDV~n#mC1dqRO1HbkEFt^yi#NDwnN7mThWwb|vZk`+`9rH+VjdSnIVt zbq#utpR^)o_en`@8A)ynP;Fp1|0xE4m@6+Yf3GvI=u1J+1S}1sLY}BD?^)&~rLC19 zyJu-bkcOY19~};We<#Ehv?&Y_3^JDx>K#DM*ma(MrbmR_>S`$-}>lPOylZ?dAjMA)0EMe^rCRgg|dO?81U#1h1 zgrdUouhTxchFKPYOxF@IQXc_{AQ+*67Sd-52!8E)1UH|ay}0~s`O?$~D-9!CV6s3t zVR%%nir&uAK2(vaAOi6K>F76^iWXuLiVb$~&BN51TlJZ$Qa-e`F}ivU5-CPjvWHA* zqy(wF;U#gV8?OZup$t_RqV<1%2z`|LYkYQHH>Cj*hxrc53%Bo{&p$Q$vM@JCL9&** zHpM=F1J_Cga;;1?^z|RJK6;e)w+05T!c>pQii%AffXw{X)fm}7K?c)TNLp=`7$x}!6 zbBx|`BqXG6SP4XcR<8kp<@p2LH&J8RKv0*-KD3{H_q_e`qU+5&v*WTx4LBcPn`~`N zbNqo&#RQ;p+%2?7r0rs>>I4=Cy(eN~9!Aw0`By&95FVJTAPX3Yi5&-HZeJJ}8T%g1I-9-&J&^A*6jh%(>q1USZdg&yq-gPZp0|~JTm(yQ z;NjoDMb_mYV5tLpK<3-j)Rlk0MD%54{WVxUyJ>1@NF#3Y{GS%U3Rqx8V1c*6Wj&zD zfE1d1060N0w@H)0EnKlwO11LDsLFO|%u51Ld1O41V4yLWyJM0|oxVB2_}`N@6je+i z!^@jNRa^7p3Kk>VAF_jlq-U4=qg!()O}V)X&xfQp(KeROM8#tO8UFYalF^we=X*QO zz#aDW)ytPV#dB9(+pa$w#+bRe@hW2NcvLY5k55h*-%dz zqzN9Z_;7JMY(AmABiz{3sJ_4W8->1KvojHYq6J6XqiYG$Mn19jFXygCFG@$yWrA<2 zzCH9vRg_iLQ6zjIxSC0qrYR>KT&9nWL|dSurL0BX?oHm{JzW$yFHxO$J6vUmJP2My zWJrRc7Fi5*e1%j@1rC};un{bYLs;1hHC*u6fY0n6B$H79>g?SUFZSGXx6_B=e$;AcnB6jM(E4QgKvekzl=-DDkz*PJ$;(p1bncVj+a7q z2RIn9U^bNj9kI-V2M<=(j#-cgtlOHWJV%}JnX`Kl3FMyqOxaho1jOv-H!(k4 zX@BWWT5mrt0W&iC`oU%+&>JPjZ5$_(niH~FegpRf`J||`)@fbT3d~;ta;inG420-O zcXu}#FV@=O%X@P1fc}H3^l|hbdC*;qC%Z9JW)sH-QUJFF?HS*4mEXYB4SG@$$MI0KnR9+~z+kAugWB z7c>NW2Pk~JaIR@NIXM25OA<$?Mam8m`0U^Ro1Bu;3WZkXli1&fZt3aIzKe(;yUfkb zGR9LpVw2YlBu`HQQTOu3SaH>g($K)gdxQ8{a}STBm5*xP@GI4T23Cr|;vlrK6s@VWaiDflC6K>@BHFDv^J z-riKgUfVzh_tNs_CV4r_Dv~aFOmq#8#ac#Yrea0FBF%tLe-0bJT*N*oTSgn5=4Yg4iuDug6?9qg-E!Cu36DEt~~A@_JRm+FlHQRS`hpq3kT6D*sGFjE@^7} zOFCkexCnTjgpZ`mmUQ$wH9dDOCR?1YDcETx%X?B0k&I4L&=QN|k4W0TwXGO}r(ua) zw;pPk;9W4@Oys%yACiVE8!l^xF&}OZ3ic>wCN_&L(nLUA8_F-i0rDyZ1 zQLj-nU*;u7ecCw+A^$;&icJv7K&WB+NlN>_2ybZwc}~giRd@$mzSb#(mbn-@au0}m zUAKtP!mf^3d1O|5nNB?)=~!Du7p5;rR`La|^KInpwgvdqTOVA{e{A*KOxE=DJf6GL zDm^@=byVhOTOI~VaHp*JP2Cqf4OAY$AvIwDKZPED$6T?0#qiGq`mGvX=@2ZSZPRT5 zuRL?Atv@GTVl}*A#?dRKX-u$IWFQ0=MoI@ZUd0=-?61klt-wJVxw##dgZP70C+0V` zm|q77m`q{I&V|YMuM#u!t42>8)-*R=xM?;9sD^4bVQ4sLkW2U*^4|jC+h_RJJbO>V zwZ72Yp3%R&`d}n9CGqkg*k>87KA|clMSC!rQCYB9FS4bEfL1|HZis`0MPJj{*eN@o zds;cOg?M!q@`1vbRkLN4VsyUTJpa=IM!ggOT)smGtp$JL1W8tMvA-LL3!aL2@0>~N z>YlVlM4&x4M33$7V^ZRKPKx(HsOB)NfI^B6Nn&;%B+hJiLvpZiGKf=<%jfv}J0xeS zY)2dw8Ea~5W6~5xFyo?s45+I2L>d$T@Ce^YWeKZ*o~M^rG3Z+<=6<}v7`&Ua>4k-1 zX+vUUzXbzAPOZiFX#Nk4G@+!a^KG!1y#*LJp}1+Pudl)rh!wJ=+v%0e!hFp;V-k2B zt`$jn$0sLI0qax%MDP0$!WT(eJ3IsmY_y9F0pY1crL`g4E7M1}L7PI#Uzq4_({CvA z-sF%=Go{Q16IX&4DTAacWn95*y@iL(wyb;c#Ov^?p7 z@8;r+)+N`w3L&V4k*A)GThmkwc;eCo0)8-$L&C?G#^&bzcQKjYdwPtNb;()jZO&T3 zo;+@O90GPb;!^)szI~KoFQ^Z-TH4xru=`1>j|q97|6L7%1@H|ACnpVywzrN>>N#Mc z_n$Ys=rj`H5=UGCI*Vb_4So#q&XE}w?H&1xRY+)>>(Qf0RH0@VVstA~so~uc^Sb-20h!~L{2<>f*du_x#U8~(7?eT(2#Jm4q z#>>ljeQwSHw1H1mLun^akU}aoSQv2P60|k z2xgMnW`20)jwGZ%iVC3Rk?EK={V(L-LnhXIbsgju-+$CkXFpEA`u-33*GC1C_N*I@qra-(eC3A$k2EY&@|vXZ@et=J74Rf@3h8 z8iN9uPd{?w47x0~%yJ3{9B(HkCe}9`=*sS&HPc{E{zp8-e+h8BnUj-0QmHF6=aov9 zBBTYN{`IIHzX(ot58hIO%tyB6!qz+KR|!Z!jZ-_5aay-Qig8{omeu@4Z5@GqP7QvS)JJgv{)jP)0Jl zk*$)EY_jf9WMqVsO=Yh__V~T;=emC9x}NjLbIuX&@Avb0zhASjWEGqS>n<)XDJ9s> zU?`jiZv&0iYT_H+JZ>&7;S?6x+H2C%lbC))G^5Jc*x0KP`$IQB^=Aml^Yh{=_$7Ek z0D{8sD_M#B4J#l{w17xu7)rWmdKwAK+5ssksfOS$@`NbH2=lG2t%oU)*W({)Gp7WS z{hqEkJkwde-p|ZxTvD4RwGOWlNw=6~MUbny9zM*#s!O*eLV`tL?@3FhX<2-(b!D1f$*rme6v?}!63yh|l>qE0 zaTJMDA3&|Pq5)gYtQ{SStLwfgG!5X7M23R$uL&?}Tp-LFP3qNzvFU#pzx~Kw+hqQ# zj<=QF2uDp-H49u=>1JhsYcLw>WC@>4%+KE!*`-`wEGaH-gMZ$k%GD1;jX>@);5@&7 zZiILcC9lO-Vwi|4KuW_MCoX4*($s?g+AmFH`1wN*?dYkW#l>9aIZrLAu)|3iIuuH;=(V7H+_kAmUQ(lzV%VUpiNk-@HD$5kijI z(P*a$&L9=Fu|E-UT_svoB!K0dpP#=9Vx0q+j8h*kycwrVLdHIMB0LR;q1ne(DUi;# z!}DeK1MdzZV40}M=b%5&liq>tModR{kqtiW+EdiCj5PYRRAEldP97Xlq(_kP48aB~ z1_^#6cAlaK!*~idc74mp>lmWjg_KV?I7*y?3X3_kW_gZod=Zx^i@f`SBPr&qqcR57 z-#-*sio#@V6?}`H*#!BD^va(Kyb8;6Ff%8x4AMVDbvwsIy?BwKS|dn&yg}mnpB{Go zX~2``(>WY&_#+≫^A7#ccO7PF%#bMdKd2nZ%p0tM3fo?ln0G9Y~}I`znwqK1va7 zfrIsiV7e~1*lAD>!!Q$PqkpsLr<0UP=LJS(2w|CMjzE(520vK{)eWu$S{>!gfw+&F zVjo&q786I?atJH&C?yO3vp&-t@w_UU?$+JRWp6uUFJYFv%8k%M(9zO1>;Q_Gk)U7u z6it@wz$k(Pajoi{UY!jW={Y&m`yk)mgxIvk(iVD}xt(zL-S1iZl*oz6$;a@fF_seQ zW{Yb)l+k`5dI5t3g;U!5ni?_9%ls5Zi4}9Oqoh|!<#|kz4N~6G(K%y_w)7DGcZ$z@ zMStC@-U;-MjC-e8n7%OYB!JL=7p{fuguMyC6_1hu{aR||b81@7mh3!)Z-P1k0`uC@ zNh7nbrYC&&&b*#qJE%&(&2tWqP~^>s^Pt(_D|H`1E-rfgP0@Y)E|n&HtQf*CA}*6p&pTQpvMTw1t16+ zs6kk%-$7n#AxPVW^IogD?_34u=Yr;87an2;u^DB0F}0;w(2H&P`um@&8y@VIagV(* zGBS$G$WpAoCL)p(w%kw4uQ4v`<>eI(90(>HDpv4d_GcEs{y|DQr~Atx0OYnT?~;5x zF(rQ}O-)Um0_)lVFST`SDmzZL3BKkaY$$LzEP#1imr)D?@g7HGKlwMbBf53m%y87Gkl!lhpjwZgg8Mu|Ld|5xf{?7~MEBQ!wIu@3;H8iZKUIhB9VX zR8)6NOZxm&Zz-<_6uAV<6h}6x6KBWx?y(%MK0&95G!kIK@a*~ za4z>}=skkps^|+0S}ecesB`NPRnMAW6B10z z{TQ-WUr`o*h%aeGafsikpI%_(BBDdEvDKwiT?M?Jd@sgn;3l|D=C=~K;`N=WxEk($zun)19Rv}tc5go zJrP!l5`93VRRy_uMb)S^z?7o}r~C}4vFShd)4$9EuA&B169vlylUXkNvi}ux-3J>@7>vw- zsKo#exNmIjcgNa#_$iX1x45k>8(NDZ67~YG$Pf_7QkxF(D@LF){WT-_CJMC$t8NFt zDkoae*;Q57BdWcggMMST(z0&E>}MTtJh{M-NC6ZvvaW+AQ8^ACK$oR+W6P5Wf46H= z`9J`OfPHV2PG?~B?;h7tYE6wabL26n=#3jGgV)np_nt6;!}y~IaF=%^l0{b!(4jWy z+!K1rrz=OP-%?#g!+XZY_DtMKl9v@;LGa1e=4-gqMf@oxXs$%1MVm!#;9HEwkf$U| zHK!MN0W&Li2tOvZV(5$dzg^ICa=!BGSDmq8&lbcz#rXR9UC+dD?gbkb_lx`Uldt4U z#o^ogboA0TVoK#dbsT31d;3v!JHLPmNlQ;XiyHY^kfNn<4}ZE_Ma>jbkO8kmN`6()JB6;MU5V5Y-1JoiW3&VIWuENJMWqBzaX92B+5#7{OTtP=!*Ggfm zXmwp9X0c-B%UlKrpS6y`*l9ezwje0w3anV1zaTd_d`>uQJAq~XTEW#l04Jd2>0oF& z0*_e@i&U7So)+a_vTl1Uk5vb^*FVr5A@xl!oMoLu0pkgXa6`nk-UPKvG?015G>?`dC8jwm z{)@uI-oeGg69I%hXV&L2N?1mQVMrqK&4WbAnvH@Xfocgn%ddfHG*7> z8Fp6p89GE9#tFp3GQ(ijwl3g_4h!i2^_Q&aQH{oo4Da0^osn1LVBBhO8!i1^H61n$ zchpS~#%*V6ks8t=FaC6sD}gPlc-`f`wa^p)_n?=pHpN431517h5^DZ5d?<=vaSaskdOOXoyy#Qz z>a=QD-gD=6wlMJbub%?C_7(h&Xa1iPJRntG8wJbyKiS#Gy&(7P5;2a*9N(j_NH5xJC8`PBud3|kVu2QyVTUi zc=UA6mcX+xayy%u{OW;=Yrlva*gt#tW)wYjh3?}W)FnNCo?oroL$Y22ztT{eWpg8} zx>qcZ{f-~}J&JcIxFng2dqK%Zioi6V#Tm3Q+__f=`a zXZ{#~cpO)br)^F{nan=)I-4M_6s6lmsT0kp`K0f+9~A>3uYH-jqw+F-)ET+^qVuFs zb9gnCT=^l^BeT#kYhB?NQB03DjC66XW#ar%A=u@8gc|*+TCFmtY^;@@gv_kdFc_lf z!so+&f`b_@D#vdZB{xdLo6aiySOZHEZxn~hX;i{;97>S>T;uY)Z>$~ThT|%*7mW|U z5ee7|ny!7&Ac_bJGACZXE4Xw~L0%|muYylYO7WD!_i{o)k51NoLr5&!3r`h~LYPmo z`0LGd(q;Bzl-*?XXYYpc)C)_)fK%4VyG@(LFVg1|e2%MSqDEqt9BnBwfU@dwdaJZPi8Z%O!C4eUbsw%y&`*IuVOd7gs9 z3_>d;JwdzbX(f4W|GcwD7TLYHxyj6N! zTM>sofCVvoO&zD%_RywkN&7bkJaa^d!?CHU;CN7|SJpnpxBVGh|2he*hBi3PK*62m zlOeCej$GK;d3D48F^%WDy94fqBdeg*b1-zHjSb)1EAt71H%~&4J0a4)uv|iOu+r1B zJDHS3q)8fUw03Ys*0~=;0!aEoFwfpHA1~;~sDptY(X*8Wa(y=NPDqPAFxUp&bNoYi z(fqs?MA)?O2VZ0=uM$tTh2M68ak%DNU!ONP<*vlb8%roinOb1!=7%z%UXah9Is}on z4}gzo8umV1O=H2Tj_{}2WLK&aSD!@ zZ(cd<%A?AgCta_M*EMo%a14b7INI4LO0WH>5qgR{>N-8a!`Cuu@u7#V)#3w=hTEM~ z5stCf++QBImk=0{y=^}tXYo^Cp+U{&p3R5Zc9gk*`wxT$^X}DGw#0=m#jzx4DF}i_ zRPC?}y0CGaOfYeOAFr9>jokS~xy-@A#YOr~7a_V_RKvLVw}LL{xRUnaf8(E$%dLW} z_Q{lM$PcoNaq;r-!t`g(wcplbO2`>R@h_^d9x053juur%lO2qi;wIoJOJEnqe#6Hk zb-KhYj1$hqFeOoH^n0=To#2gbkpCB?k9x#QT%=tqVKk$-GZ6#jG!`njH}eIEi8qS5d3G9(xF_GmXM3%8E&Z-j*XDEx z#&jss{`*<^SMM*`gmLY6vvZ9(%!gTJ$iJytq9||}? zB4jfFF}AJ5fEZBY!w7i!G_6fw^}4=q1aQ+r^uiY2x;bBUCND=a_m99h?gmTPDC_=! z+##ch5zX5lKQcoHLi7L(Z`4Sn^W066eU^|Qu7W@>PNpoPXAr?G{k);R?N+S1opBbr zm_j!0$rBFI{TEyrH9DxuwzgSLc-_?)wbjCaVJ|=kaEm)%(IB8r(a{@1h8+hxUg8Xz zRa*FXqq%V*7_DdFQ2Z|~jj9tUoOeb~#CLkz%(Z<_#;_zD2SqbRySljEwK_mX2MiZc z06c4xIH`ZBd>fSw$@B~i4EF;4{ALu~Cxzc-bEcg{8(LW-qUfv-zyk2QalNBBz0a+gY6#d9&zD+h1W|knY;RVAydeFN7+CcmM|8}y_8Z+XAn{n#Ch47!~ytk5qLes-l19-sD45?<@Uii2xTQs zn9g9N#RhM~!`!Sa=`&!+)8*eAJ}GZcTgg?4!Hb4J;qgb!;NTNHxx9HO=`ygSy}BR& zSh1nO>r^4)+;tq*SRN}|VcQOt{konwOrq#1_%^bD+n;(N^|Z4yvJ6c;K>vxo9z_{{lc3*}g+!loaZosTHf#?TFlWo%p1dmV^@3G^4Dryn;;dB2z_03lpawH!sXRNL)Z2E8Y^n^%!N1g_02pw}SsLF>S~c zj;jNOiFHFxw2WlYUdZk#9{~Xe-(3N6hc(_f>teCMRu9Gclia_@0s^Wr)2sgkvdH#t z{Uei0G9|TKDH`AYz+p+Jenq<4k2P3XS$Q}!HRX9hg7QFt-a{Il))H8x_e8<)I4@zZ z&L&`Zvyx3_1WMRl(0HlR5KT7{oF+dv0p7dPJrv$XZ9!$yFJ}mRD47FWl_*;8y$R;g z$!i0T`G)9kRQt^Ng(ujjs?FYRru5SWTRnp@`u0YmF5NHTu$>OEF? z)cHOuro|En+G95eb;Fk@c7mh=DE?7&{zTn42~S*MT^2nJzth@;s_ zi<*Lqk-lUsU&dkj`4JuX^qif)Df+D>@mtQp`}xd36E@-?ANkj{w`Nuq&FK(_1&Be& z!1xV8>0jvS>A4-GZLmZgAmeZQrj!(ErlaU}u9uxHWn~0TSMmx^P=kS{Lz;34q2NkD~;~3Zq89%DdWLd?-!z?5*=x2*^i%p2Qqd6+)->)m}1}}sr&xlf1MXI`3eJXgF@fhTNj%{OxEg3vG^AJeuva?&E!0M z2_MJAn9In?<-atos%Axkx24H(_ii7u#zI(9Kxx4yOBReNn_vYV05y472o`d#J=7fh z=MOZ&r?&xt9gL}%d_K8fk8@$vF9E}*A{eP7OUlcw`1$#d^1nJtkGV>+C2RhID2x?i%`4WT=3JVL%HSl(AUc-ux(l7EaOrF-6_xqb|exP^Uv4uO-ad=5= zuqdXi(?MUjh?4@rFU0;#*f~E426I$e$!TA0n|=z)U?Ca7GRVR=uq&f%v8^Kb9fd7$ zsxRuy!o>NJ6H(|RN$x7Ch$v-`AHZOKRqQYCkX5TP_aWkxZY3dMC1MIjsmXXYNsMnbF-k9T~ z-SObWQ>R&s#}1$BSxzqMQDF1D#cXOXz#VtP{s-4(yfEWqak9G@|2(8cnSxPa1F<)U z=era6A1Qk-M)TQH(p=LmmJ{TGMT^=IX>}JhK{GTC%z`K_DxIJ zqM-+PQG0k{w(fev>{O3C&+59Q+& zyJ!dJQ)VTm&3yG`VS4|j zVK7r?;2UKQWv@rF(aMESkK8;wejje${4-T%*j)`QU@`qDKfbs#tR$;&*-}GBg;Te5 z@Pr5(!QczAR*0I(D>cycL6Ok%IygAQfOc~dq$+oIzOnkAy@I{&(jCYF{;QoZ##%nr zmUCD&udIM0=6@)z?yUfcp41?C68SzpUg;#BeQ!3E2QS3#ye?@Mv$}guy@&(vbQlpy zlcjauxBC50Bq-j*{nz@=1O@k6@|IU^lAeiOAN=~sK(hw5^|m0Q>%0do?f6~$&`41$ zEbJC>v?cJGxjcwQFv;-RMx5|s)unU`^vn`3?}1fcUOw~?+%)Zd`u`3N!XT)-VZQ)p z5FEepKSjK~z5fKJG1Oh*h^hr20`tfvni~k$izSsRqqVM{-Vrckc{|q-XwioRtciy3 zQ|Q+;HokvZs}m5ygauBM5430mH*WuljBR87pqM+p^U3?29eJ0t)BN3U5*!BlIy$+K zSpK1`vNCLT70VeGi+|JL@OcAgLGI#*Nz3=fXGnaIjDM#Swov}PnlJaLKOE-8%{zDQ zu>6q|-}gJ;hCNmS3me;sY&!!FxwHbdx-L)>2gXq?~K#pMQ!1^y7@ z$QvHJ=FrF^>K&ig@NWX%{Ive>pdS5v{QK(&`*0pHw`m=pS?4w-Q{OpM7?$2sQ!Duh z=fyCvHKVN=-3U<`@FRF=XtbEg%C<}f+%>~-T5IZmYeYMb{lN_)<5~QGF`h#1aK5{O zseBu)rsw4qwa$6<%0t6!1q@F5@-i|SXD36wt~BrV6SOBa!%Ip^E?~9*)sw<(zJun+ zYbq^QZTeo`?EiAUJq!^1n{2q@siGV}8hDu3zuK-+h=ENzL4eSttydL#tw%^fi@DJD zFm6744drWs>AOd%-q@TeNiKxlh;j$*6c15q z=jRoKA~U%FlkXoZLyXf9tHNn>S{f5f8C?wNCQ^<^I8HzC2nK)SD*uk;e8!jYx%bL% zI=L)%F^Q{?ii2$ULZ_uBIhvZyKTMw}Q?&8RK=xX}q+rx^obzJ4i&}R#9-?FU)7eMo z#RiUae*8TX*}{E8H!7jWf|;Hu5=IHRO#N}n_Bcu;<+Vr_MS(F|f=F5E0_-YDxwh6T ztf)Pv4)R*751(zf>r2V5I*(h0QG_BvU3|93xM8!O`RxP6q*;dAl{b znuV41W^+pm>-NJP4S9#gqK_Y^JLFNqii-E&Lst(vKt~Cvy{1kXw{3J4MK7f2<$q{u zik$wwfJ;<1vhM#-O${5<0>}XY$RR5QPCy*g8Q%DR(+Rn_xVlb(kXisKwf?)Rs?UKM zCKVFv=P>zWLY$Meo29wOrS208~4IqN&LHa0tnagCvTil%yn@f&US*uaeutf3JZuuQbz`ztsy`8Idnu zP|Tuo`Pb+E@rbIKYQk>91<(~M$lPw-NaS+s4S^p>u=uMF)S4>8{r?@__`iR(1MKYH zUO?<^Sb<`Axe>6}+UbP7UjW`dg|ru9F#nj41VY59*WI*+Y6@5AR%}DJ7XF01+_Vyw zG}RGrMc`60WlS{>PRGB04@KG69JWHbCT!nnCek)Ww#uUSz}xzFx?v06;G5gvf9k

}}}THSnyE+lmI8*85Xf4(dt_hgPt@7Z-}IcW*B+C&jXU^oqAlmM$$uY<|4V!LHfuiG#4cRok2RH~HL z%EVP@>$-lh6pFoz>X@&#o*Fr*r3^WG9a9n`$B=CD&|77ES*#ZVMrDPMWzA6W&VmIP zW_?y-$7__Dm~d1yzD#a0`!_4{Bu_Pla(Mug5O~ZDRz$!(kPPu!zQ4SvvIgYdW-R$rs0qf{h7K|=*xg| z6pgy>8XHI=Sbr;su<(>l`UbPqQvABy-!!>_tDn2I80h<74etNkP$ncT9i6yABQ!Z4 ztwK7-5okHN^3-8mdkpFvf7INZjfc@uHDj7 zsSmuza=-1H#J?SK@Pw6u^E>HtAazq~2ar75AOTX?Tv{@mPWMc15o&rkC%*hQ$ON`H zK1YRA%+FWwhYvuf;w^Z{h9INpQb0^zT-;Y399YHha=QVP6!KG*36hGGxZxSDZIzEw zIGOFL9Wkfy8!W|emIw8130fy6vdQO!d&5@WTQ1_9K-~Ek=oCMTPr~(k|0D23)UZo5 zQwUwJ@*e#5O&y{6@fVEO0lmGwE=M4ZOt%oGa~o^QNcq?FDh>r1XGDI3lb1K)#PYz5 z7yx70Bk4vW<_D1Yg(E5fPWNSyp>w{+gv615aOckftN7tHXw&GYT0jT!tfr|cg-IdA ze-8{jtqqiEIXMr4f!iv8e~lwmYE7FR($ewp@OZ(&zA$Zrh)alXX~8unBHpKih6?!z z5k#CYsMVxu$JqeA_tPLPme-_#7iz`D9Y3B%FhxsSM`wQ;N`>F4G-PGbt-NV>sj*m+ zSpBB3@oSmaPeAH4>+)|edj&a*%!jzj1#sbX0c~&}vLtr{yuD@kpI@XN6gm8`gOld~ zMvYzDwjeQ_oBVy!?O+%%MiY5+fVP!LAmW%C0O~X=vjU2W8JU%JXt=N|Y~=tt+f_Jn zsQ`Er2OG*;oRm@*;Pgzs%{6(kyX6uNtZ5@wc6YDv6F(=6PfR4I(&`3;@eO`eLo|YH zCpj&jKAJH{7s4uMdE{Rj`R{)XaB^BEj>DdiGnF77aAji1H18BS?eKhB;_e78W$Hto%J!^RbxU2Se=cr3BFr)cSWdA{79g2oc?w+#rx;G)P9heye2Z`fr8m8yf`;-$r2r9up!a-yiXc>l(=Nnr&A8`gBl>Z*GyAShT42h2}RhBzkQDUU{I z1U7|$rluZ9stpH!dT{ZBEZ7LK-$StJCb;j}fLQUT6n8Gpq}R^@Rf7uf$TiR})B8!` zp}4=vl)3?o=<2W-|GG2n`4SUp*JqoNl5)3cTY&-zmvE2`a66l;7|skIp=phe#OZ*K zSqqI=xkDySD9qtvTifi=ibO<20*bEl*SKh`Ave@&!fdwfiNiDuzY3)|&Uy^}t`KZ( zMJ1}Et-WqzWc1Sq6p~e-I4JE8hbt=^%07}srKH-&Z0ICGxjhS@>|+ocUFRA4OfcyD z?bWm4J@wg^?eZk%_xIa3bOY|UrdYOAw4P;6b#$!qgm?eki7$U9(?Lr^BYa&*sOjt; ziexiNSO=0Vk0HXt1C}yFz6$#1%WzdVoVYkC`<4Y1lc$b*XkJq0!kKVLqOjIqv^S0Tm(C0xBV&&cbvyk;0O&ERsP z?Gb+N89lpP;ngV{RE(fDc!c3ES)`PVmt;o9*)b}xh?(0PbJHug@Gd*k5391rOQOMR z3n^qw5Bv>^1~C6vYeX^Du_7D9oRMB}@+e(gJ_DJ(KL?cTzKQv2w-0SF-V^nG&rku1 znuaT07n4>#ihOr%Eg26&$)xykFHiMNQ38Xx*J$C7&aX`9oiWNIe{Qzz*UE^2M3 zuKh232Ii(1BJFIN*Lx+HP)ki(wzCHW}PcM+T`T!RX z07mKz9|G07tf~RWsSEhTB|)q}4B|WOqk{t;J(A3ZA+Vm~nVFg@K_R#jPk?}wJw+mWfMmKT-hm6gJhhH!Y)BxQGWM6}1mbl;7Bz?cMAE4Nb%L$%(9D_CF?e?p!+bO#r#| z3gk)QXtfu^Zesrn7(jc}6O@V1Vj!RN62@}0S;csEZcb@S(O$*_s=@BMxlJ`ceVUl4 zPRlF5V~@76-ZVB&^oKXv1FCNB7IQdm%XjT_->_jiDtqJ1kp5L&bVD zaPgb1Z*M2d>R-E}G5vBCfQa*CrKbJ`^7C8fL|>9HoZ&%#3$cVkpTqGP4(}No|6&DS z$g6?^r6*gnfQ&0LNH`D50^uhBN27l7_Un^*Z=bvZ+qHm&Byi+1h1!^nFUNOlV|vxnCSk&iSo4eLEY!@AZ0=gA__ zyQ5tVhP^}5EDkUhE!Z8;L1x8-+{(_g=LPVI2g?BX(T)D-^B^9lNSI5xD`aV98QiKJ zw7rAxDk`WQItvP*Eot|&7=PNSDinjw!+|Q65Ie4@Y!O#c;eMy}lfR}5^Bb_6bpzqC z%FDwOW@Bw#;SXVvV>4%`r<#!K`sJr4)Gi1GN5qxlV?=>JHc0{0HvfT%K3gax#DyfU z9#sj>8a^-`WH+-&fOlLJfZIO963x&4fzNk+5N7u-f=XT$hb=vg&iPA2^!mH{g;de- zh^?NvfR<=XeD%4_Y95NBeXcm&SVVvua@XEeQ zM0KrV{*m2=4qeh_gJT@!663Ow>3&?fbi!A3e7C~;q6gc0y~P@sl;4q;3As`%5=Uoa zvGcCT<2$SJYH$MKqe{cjW~gP_0aa)~7yE#34-HHm4f zrrlyj=sRd!p(-c`W=;WagKnnk+% zV1fF?@qkWI^q9n;@GGXx$clShd>IAds6G#?HNoJY7Th0MCYU+&s0#Ds!kN3cct^kH z#Mx8{8`vdWo2swJN{$*Q?5?nf5@FTB4tYhx#B?X8N6=n0d>lfn{u{;|6m{X`=XcS( ze}6s!w8I^aqz^nj=SF};#esA~0&DA;<{iQCrEm(}xn>^+dl;;J$>JqUz^$ZGWMBdT zJBx&b2otXYXr&UBT_=bDR87Y z-I>e+O}1#*vG=(dxn8dH^+usWyo(ez#qbAtImk7r^`kT~G~_h&JZjzTnEAQ7+T;cK z%J$v-Fes?Sk+jId?vo5-5y2fenfYnyR;mTy`aYgTZ5f5N{rj{Kc?CfF619}cy>V*A z>h1X&_DL_32QzZ@>8^x2Is?TZt@3KDsZmCa@{S4)aw>xV^FLTt`2L;JJnjSQ8XF3q6JAsN$9-iwXG9$E=GGjy8c;320y`A zGptD<7BTqNT*V7ta>a@Js-V2;oaqQCkcNa1zmTpO|0c^!`PM#PSM;&F>&DJh6Td~dvwoZM-p zg@$c>Kc@(HG))}aWP(=NMvXAU{oUiVk1Pb^BMX&!YuIYs%*VI7h)LC`4IR}FIC`|E zuL!dn8e{aM1b?^v@uxYWC~PKRj_RgUo^wCob$@*1Wfk?`2vI#B+lUp0YZ#M2VW?b# z-epILYfe3t*zv)WSBiP$dH~}omEpH@mM=PX-ExpsUH{?hwXLu4?&H4Lh4K4LS|rq_ zc^tD8$v__D9f++}gc-I1onDk0RZRO>7aEo?pyIfsfD5cGH z*qoOD0_BsJn>$mq6DJPL8vwbH`Jf52uzm_Ms_(GY*`i&>DrvL3fT0{AIzW6__2UJn zb~V;byl;?_`Wb9Wtm^dlS$B8gRpb*8SmOt2$K6Ij5HraL-ncRB32?EzmGw-}$+wLZ zgf$fP1f!kWkOnn*-Ii-`iIAAs0i=_m_kfhT3$2rDz>{N^e<`A3n(u6B$vz&2*2&bO zq9rGgaqB>Crgq+SqZw011+-kz^6_!6zJyT4)34i?YH8Evv%A}}&Pwp~C8zfww}IK4 zDmSfo{%Xn1*8pjIU1NZBtl!PQ zWf*eVW~?%sA_DH`-J1~o8(F|}1>Q*Y8JK1~GJ24t0BDB**wJ~2BMLz{Uw*@Xc%#3+ zzZZ=eI*$UZW%~;3tJ-%ho&`aJXyyI(Q~AKaz_Bu7?FEWBKX5Mi|MTj+;1etL1h#S& zG;JM_NvQAf&B5iUC#<7?<3VffPx}-gP^aMGD5d1++N4D5b%JIfX2CqlxPx7iS+pl>9{s#{xg9Tc14c$adUw%>xAqEDzD>*I5>*zvJ5lS%iA zf%T*R%sSKdg(F`JlmElOHvJv_Z51Ovr92ud$ofAFt9BfOq!Hu6KZe2zA^eLoQ5Cgk z`cK5Uo|Y8f#utsgfi*Z%QISbJGBQro;1%@e4=4K_L3hsUSDaD4UTeJ7=ai!9zYL>Q zsg)&SvCEzd;5ZZX{dwJZ63S`hPQO^_T@*VU8#r#ZXcUaGM`K`OjMYBhu56=2+gvWW zC^j?Bi!0gl3X7|r@+u|n8^Prt)ENgxdT&@wfI%Fai|T8Q170;{O?M4tJd~F`c6BPg z*L_Ll{Z=~o#&MG9Fv|!SP#8?2$Nt6GzBAndtwe*OPg3<%3a5!k+svZmZ=hVCOgb$1 zY;3Z|k;trX3AZ2Am6H9td)97q?l1@SrYm@JX%D?~{F7*oK|$Tpm%{ zI6`h=CWsKiq_PW;lDnAu(IA_Bn>oUR_ ziRsbr0!+Vn0jEJmMy7N?zR%2KHl~|H!*J0W0RaI^C?shddfloPQ-7 zfwP>K@W5YfudfG>kB-(1EGmkK1pE6w=z2W<`*$o*w$nf9lzLZA3nJkAuoC+D=651;)hWO+kf8Pp6@C;Q4XD%MpLV}yt8xp7INdnsHqca$jM(u zK6zUzi~^tM+6$OXSFR|~I+GKs9wjCyeF{ZEN~ejD0Cz_>M4^>{GZOdhn>T5Ko6ZqG;AyeZBK*D%wR5P|BSCEhJxnM6fdSIT@uF6tw^A#LF+m1>sD2w5 zur|Ga-)^r3Ch^lZrKP(4eSOHiwNdVRJ=X!1b?+CRs7L2VK#;SM5fM$>ySV)4lnGjC zqt_7H;_w+1;Z1jPY#~AP1sfFbc>+M;R*MzW(<522wN+7PY-F?w@+*_NH=4CO=!x10 zRqHepx3TUNfHw&Jv3e0#7e`)@<+e)HzSx&<2XfWTmEnS`Q1f<0pYJz-src9Pj9Li{ z`BOR%gqC*4PLU1Rb<2>#P%4|N`j;HGi-Fq4$lg;p@Ozo=HE4f zK?dZ}zi3%CP6{?jq8}sbzp(Z43BxBcl=V%?7&delg^X3SEeSI+$~azNV-@~g!?^8i z$HvSi@GJ%Y&IEjxSHC_j7arqbwBi=N&c>eAB&(OKEn2skDL6^4Qg&Zbb5F$h-He4W zLUJtRcYj$eNyaE3FyA5=d(N(Lu^FK=pY?&7Aq`(`~T5my6wODVgtK#l3VHP`x5 zlF-s31*AlC^NBb%Vr27-(8Yslmd5N-I!|FCdN@w zRz_8$4e@RdA6~w%^Mi}V4I+=?RTb5PqME<@0l6&(D?JhwF#*nr6_)R?o-87?P}{rUo0Y3HFFE!hXhiwzv=siVjk$F{I9R$#LA~ z-stj@$a$!eRT+){=wGsd`2vpKuGM0AII*$bhO z+=?gp(BL%9sdGD;&rx_?fP4z;PZ`s!@R?(n#MH<_-b9O1Uj6QQLB#zalML;y*E$OY zGCq&}4bKC`oPT6NZb@DUETEAvg@yUs*s!{ZVDjO?c2fgHf;}irE?cXsz2fS3u`>I? zlHCgrT=VNvfL|RQ*K|GxfYr|h5{#tpP_d+9A}M8^6>tZ^v)^Q5Za#C{$SH#_gK~z8 zho@&5W?kF1uoGXbDUk}-Tt?`KIo|-I+Rd9cQ(j7IGYmCD@o66HYc{pD;$e{BtF5L7 z9X2Y4hGOV^KP?!)E;)>!ss^mU*{lLXof3SDhLF~)clyAmrl_RE51zo)5ol-aak2BE8*(|On>VCN(7^4itU zd<^*M=mI?OYU#h=+c1MzQjZY2DEiO@={T`wM7UM#?z$y-v!_5tr2aCAy63gg3ZsChMqrvzWTZb9}>v4n z>g#(dIsM)t_g9dmmF0TS_Qr%Pu=vz)s@Kbh-f$d-%;xnVkYxiPI4Eo zk&gevi@}Q3jLJ77qt9v4E9B^nxolrndE10Rau33T+Sif9O^k$$u(yb*9pVC^O%Al1;efXjxR=nzkyj6e+FlE0>dm^Z9LKsBm$_#Qh{~nL@FJnpW6~2B(y5&qJdDAbtcLn>kDMq_NrV;XN z^8U3^4jx0wly7nrMa@rJT+gNdY0Q4**jqgg>+)v)tcBxC5;XAxL()`)p{xV@ztlTt zFXh~-q@;Jg3%D7b*j@slGs`0)vU3LgDm5q%Nb*EDI|q!<&tKO4MiS02@P3EnDY1?Z zAG{xceo(l!k)G@h_|G5g9URO`0f$h@sTOyu)aacDE^cfHUd*U&$*2s7lmt-9&@Pey zuFrn>-!$BujDL>*I)Kbjuo^Va1JJq{)=c_U1c`!jnN2v?kF>4YxJBh5j`t=wYOG|4nQ*J6{Bt1ANvDPC&caRuemWt>-K*PR?&m z663PcqRG#XfdAPQhY~VL$Zfv;_Iu66!oQt4W_}*&JpH#HraZo!9qne0K3;@OxAxtI zPA@d6vIz^J4+M>|9v=HxPv3XZUWY(84T09}-@j>;9&MP^%)23PJ`*1Wb;%j5*wvjM zRMh#!N0shoi}Rwp=6%5+OA8`WFM#>1;F+Cwd+?wZh`nHl_Nbqpwvxq*Dcajx*2x5U zdIt!m##`b1SqCFl9^8hS+8^;(($RLTLwegVpZl{x;0}X5-VaE7=Fgb_n;9xHGihOT z#}IAZLT8iMeqh(`&viW#lNWLSv9Ua0F6`h?vcjSm7WBwvIAsBhka!XFTQKMF7NBJGd%l>$3(Os0@WeK{U@oeTUo8rh=@8r+^y-5aQOEiYv1j3qK?;^g3+>1(| zkw9Jq!w-WN$5~q#7${H60fMD&`lTC2AATN2T?bA8Okx7z0=|ULk(lQ?i;u%+XhjU} zc}n@;>CtW^P*j`XJe!usi}_?Ho~=Hj=k6{PGvqwb8G7{ITu+Zwghc`RBX&kJ|2wIU zym0>o3hz=Z=i@2}5NUle*9(p?T-|c$r;mU~X`9X=0M45sKrOX_+^)Df-yasUwEQii zfK)@LKdQk;HfP2T0y~LYVB7S?q3IGcqq63mW`oly$=lyQNmN2&fNP*SYgu8*;^{RU z01KKRY?Z5>#_!Q1e}4q*3AC`grE(`F5!FkDD8cyP3W5g@$ZzRi-N&UNiUqu=>=@j2 zm+(!qfsW~S*zWygh{2fh#KJXVgSGBiAM)h%)Hy)+1Hnk{hW|f}l&uqSA-*@KD-kZYUOZE3{*1 z1R4oKKq1(wlxeYhF>{g|ue0lOb~ws{L4%3ie2>k*jfTRY*^_USM4Dtv4V&1QmBF)= z?4te}M^s3m?~HB^rTSlj(D#jspG(DxQuU%k?=mfE{&R1>4}@8pInJfLoB+X^vIB?w z1l}M6mSBDHE@P@v+|e&Ac11EFO*10DsVDexl2q6Z@-p3mf2OaJP-H4hg4d{o8PzbV zNCZcI3n-f^cRLGuj|z~4e-mv=y32`?9W@YRM#e64i%qh&#Z^=NTs}RXgyYW9_vlY= zDM&ceCq;Htp0GBm)IPuOcF!X!@DZoHYCWqWqIc^+fM7FhX9LFS+(AV`1}Z%MC_gNr+<_L&r0x#xVem0{(o;1Y4GxhK37a-;-j}q;fsL z=;|Q+Fo0p9r1Vo644Z4gdzwH6^`KCT^T3xr0ARWRrsoMmMv@1{YBBbZzYYf6oFcSH zCV21Q-KUC|H$sbw(@MX(fycTUkA%T{;`8U_G`Jr#N)3o`oYCRnE8w^)MUf0i_}G25 zuQ53Pz={t@82F3Wn~{|aGhCaJmGEk0V5NY-=O-!XDEg_N9Q3>|K#&q3I}ma zY=4Xwyc=F5%+gyCfHCeKZA@&|)Ykg4#_GS79G5C<3;EYZj#Ud|TypT`dCNR3DvwHo z$r&n!?Q2BvaB%LWe_;USU7hpG8(Qz5J$=eC=S`tV-M>f9AX?~zio%?L!Y~18aq*CG z*dls$abvCIV#m6$5g-Hj2S0E$`g{k=we#4IA2MaNwMM2nPeSMtGbP|S9|X9=Hs@CG z-XotFZII+zMnrVDfWo%==jjUU)Q_twx46T@ZhbYMHhOLa$$OP*@nlO_xv{$bS}h)< z_8*A`z3rZAHi4F$ry-SM9-rO;EapQS8yh!%FE2AwjuYWv9Rjy`3OT*KLpf4HJ>Toj zhOayAKUJujLri3=hWl9msx1B8$ab9&l2iUB0(2)l#Vfvgb%*%s&MY*XE#Rl|b`Hpr16=@xl; zoc3CV6IQ>=jfo^e026m0!J)J~pMA271aEC~vkCG!S5+1|F3R!9C&S0)1DU)PQLlbN zVFzgha3E`-6k`TSg`l=!@?HJE2M4-6dKO8jb&vc{x1S{?k?aCiX$iFD&d*!sv9X}@ z$$;3;m(Vr-(f4W5QgU?onHgNAXHqW*XeDpn6jFo`@<8ZYe;6Fx;sZ>jQ&`x!317N) z@YQ8|Ps3f8;io7M`2Du_5DIWQn1TG8nVGp&_1@VHiw}$BCMY&p-`});ettE=O{ItL z%gPu4`{BzJbb#Gk#S!gDx#STtfAEEb__mOzSkj$}i@dD?N*SfIdQub&f>Z~frDlRDypN;UW!7Onhi^lg6fU>ZT3w4GVuOgxTBj5tv@tKQ~5d#4oy zCb@}Go<}{AUAuiqn-*WEZ3sxxp1Nj2ss3dUj2BNK50Hz`>efJV@@B$Kf8sOPn<9yOSP`FG&Du z1NFrB?{~tToSf_js*8#+KmCc?+apSzX;NbK`fEeEAXc5+PR|Bp{atg&fkoHL1jGFF z4oP(&B@d371YpZN=vF|Ywuubn>reaX1-CucVBYu1$q23+H?H(KDEfZ=J~>!7Ztv+) zFQ-U)cBnLZxc1{l-@X*F_)zEs zj&?1OG2N}T5UvR>>pBGY(sA?8pQ=gTNVxZ_h0X&<181f}zj)#Fx`Dldi=1XS_-Ka} z-rQ(!j~Dfv{5z}>XWLDk#eyX@H8mFKTp8Mdg@`jx2k1Bos@;RY7-guu%Brgyh{Gjb zd6X#Dy$da>2zxiT^>WzPew~N;cD3?HGsgA8wlxImzlBMVnz{Zr@zr^u*?B0(cI8`M zqTPG@8RO|)qSz*oiLbtZDi5@tD8=H|QYe4umCW7$>*_n8x&GV!?H#iBUYTEek3u0r z2xV^}NoHgd8JXF8M7FY5#@8&N$jT-mTekn@zMtp!zwf7WoKBsC^Z9(=uEo~{F4H_}%g6&@}q0Mgp5q@>#4 zFtBEnIS4*)cAB}zKDONo*H)q0{+D^lvzz^s5N$I*Zk_ z0;hc(wjR=*LhXl_T=T}E zr2dhI)jV->j+uPS5PSbEO@y3~t?sSJ)^phps>`J9B(80qPNn2bd7&Eyr-ru>9D}T# zejGl?Nk7U!Av88T;so4^jpkD`S=?=v^YtL6A3}H`+wO~_7~6-(mj0pRbJD}V2A^-W zuKRo@mzS=81aY5~h{?%Op zs1i5>NRI&g3s8tJ$l_S;vGcXwj;8Z+h1<>$yq|E>t<*?OK9=@!u53lzISip<6pSUwa;UsqP|oe@tK|1*0u?e zlq7|Ha`BoCIXE^ZW^s$?sFGR79r9cbdYen$y<_s{D0?hz$svsBlz9sQs)ybi)LvwPyM9e~9pdok> zJmh*3<_8l;N02KL1E*>caF_cb(u;xeV32175VIe+;uyF;B>Q0A%gg`3#i#ue$ALtS z5v=7fJKos146!{ij~Mp_ttKsunEzg*m(nez(!O;o5HnP$u%Mv9tvRc5D2i5`a4-D{ z`0AEkWnv_Y%nD9OLN7Jk*WaI-oR)TqDYON>R(>}o7S^BCD>iB-jR)O^=e8g4i7?u#dAtwUyg|IWxUFLHmbk4U@Y?d|Pv zDViCxhleJVJ_T-Fr{*s?NnetPUqB23XW7~Az?^+N2arNaJ)m540Co4svn)aUW#2cF*%)X+aV12y)%Rkg_ zb1t&?nqn4$;2=*?IE;|oV^;D0Ps?7weuo21`0%p*?d>rTivdD-wm84EG_C#pzq*{^ zC*xA{_@L%GCa`ftplEDv#B;@mHrRPGq5O74WtMr1$hp1EA!6;Yg=&Q_gkhQWT%DkW z%#tmtJtpckCwF4KK5433-D*i?Ls@&_vzzf_Xi|gT$-jk`)~~(B+K8)-zpTijq^Fma z5)#@y{WsYLgJd&u!u-yi9u2?o4i=b7s#{wZOn^D3uh#K<%ecyXWnYh<8Fh6Yk#Juh z>@0E`rHJ&!%Ul$7YG_$Mg%;zI1gs$f=tS(kz5vdcp9U!TKQ#Szy)G!|PF-v-{bcrH z^kHwZ+n5;i!!AHQJ9FtzxFJMs0#_g{xK=-cZs8>1t-d-0Lkwvy9LkS2&6Pd;$n6Q2 z;u@IOPF!hY1z$sR^%#l;cW9`pfz_W1@!k6YzMK6^;}gPhvY>ceBZW=z4dB}%Z%|9SzA~IAAY$F@kU!h*e83~!^g7-fB^gEv5?7PeaR4zSSduB~(po`$ z$G$0Nvy{kMd;pkh5rh28vjH&Pz_zC_%f+WPe*`wk;^`8;-u-0GTbFpEGho-u3^)G; ztTmG`+FL-20s~2+tBLWz>UIE`_!wlfG=lZykcOHX3C~*?C`~_r{cDLtB749Uw?NF~ zPF5qx4UVs7GAy|2;fMD2Jp=@PO2nLzHVRu@(?4!HD^xF9-e&FmyfCue4S&w-TKn0Vpe&day?yQ{f5JrN)*(H8GSp9-*XSV;yK z?l+;XeL_7zFH;2R&yU~8D6|UbvfLi4j#hyfx+N5}fGhk4bAHcvEH0mT$+3cri8^i3L08G?xHU#!gn^1 zL@RcHz}h3>hmPT&bF&vH_wG8q%6y$>4M-O@(A%UP#hMD>0pj09U-*ZxWZcwiv%yRCPd9iB?qOqw6-1=7$c5O`8G@(h;)xUg*QA zd6lk5+BQP87s#7o>iI;;J+Hg3D@>Z-v}FrN4gKa_J@k4qu~_GBuTHG&I{&863JCF0 z|C{I%+3yCitP5q#f2hyz`3D+=zIm~V7NizLQ7)tV(;=AWSd;lZo`;B=!pCq_H8i~K zWIOfGn0j*b7z#mOyHXDSmkZ!sLih9rnikmyk_KBRNgKbadyHYYEIo&mha07?JMst% zB`s!cl25-s1_=>RvR(Wn{6-vUZc7;`%&f?4Ri%WQX*cDLVeOQ?Q{f$t{PGG#n71i* z2pucx_P4FgQz;Z0?9x#C_THbjr|xr7G=DF&7@ z=+~#YLpdk9;7?s!1hY4YG&#@*lTA)ga8X>pZte@CyiL&V)WrZO;1o+OY{RCWCVP5n zNYW)N#ls{clMW^63@+RbTurjZld3AI-INoOy4;CvKp0eo9>CgB+(h^)Z3nCYy^!6n z=Wog|^i!x-jIgh@vbj+L*(L<8^y4n@coeq6)pNqOiHCewKeBNo*IQ>NC*PNVd>D&) zlgm9aGRn2I`&Fd@o)3q;CL&^S>&(oEM=joNukzj3Hh@F@JP%k5MEiOm7b^MIDwAt% z@31Z-PxkD2bUpoM`M0w7;{ap+0v^`ikZcN%jc(p#LJTadi?T(|F?K1b$*U~x3n&U# zA}sBo@ErhasB3~SrLl2#77#_?6A=~Z>FW!e{cXFXXGwM7uzJ);V#3kyPCzdeIn`8m zj}Kf*^uS3DGU3={io0+Hm=I8PTs%>wWdXzv&{1Zf>IH4Q=%lLw$b9;&ZE!*Wh!(h8 zk4-TQ@qKQ5?NMP#W0Fotapnh4`?lQf%eDGkb5=Lu%06@I` z#T;s_4rw7yF#8#&r>9?pnDf517Je}`3ya3#@9$N6neM4-YSxia1eGRBd_)qx2G6^b z$oftw@yH+MGJS0V^cC#-7ZBBZzCJ!9wx3f&+IbC)Qn<#n4GeyfgJC~p=`QCTYsIyt zP>_eB;p;1V018izE1pv=HrLy&t*tg=FdcD&YC-K3j`~tn6%{6HO~6{T zp7J;XCcZNhwQ+-wkDi!;AzIQVjqT$nKuy4!R~t z$XrOG(-7u?gVKo-W{d#qZ^HE?ea*V7HKxfF*yHwB%%Bl4Db@Ax9R#~;UQp?X11i>F z3fWc9bDz1p-!p3Q7Oj46tTsnTM5OPzyA(bO#U34)T-aO*_4QLLS?y;fOLo_O|CSpc z8R14gy<%681CF+y3+vyY&%wT5-en>(7LQam3`~!^xRB(8Jt;|6&kblw)oLgYV`KQ|bsf2U^HV}OLHo&e;0`jFF zlEi<6lcF1}Ne`;ZYhjb8A47J+#+7{+8wW@9Te6Gs9Z2Zg_aY}D5pspYAV24}aoHpN zaz*3@Tu?6|r{Sc^@|9c9E+XgrwLt5SeUB%O=JdJzrtZbuJZV?op`45Nfqq>a1~?xW zaUtGDW`1stiFB^VsO;#7XOU6@4UN=bknj|H8e!H>uZvfx#gzbc%TEwY)k3ZR#cJ8?8v0HvjQD|_s_e$x@7ORPwn2>Kkc-*EjMa>A5~y46L=im>GB{lr5^=%Klg~CHN(E?Q%b>3 z+PGGP?gDC@)R)WtEzsB3T;*h}9N{rYrt1_4aZ;8>)PIUHRRr#c$34Z85N}wRD z8U!*MxRh1p=e<+Z-zux9xHJPkJ+qC?SPBgdcMJqKWO}%H;3#tf#76~$1jCN0hz*!s zIb31sb?}xso4f_gC!lzBQ&(3va)qUaOX5y+45RiH(DEcn^_13}6y@jJfm?n9_yT{9 zj*L9(HF`i38d<6lsiFQyy2@Y`zOx#B(;wg(&Abh9irc>!9Uax~vNQlJR|kBVDWTrV z&xwlb|9#X1(!9u!5aosK;k>(|y1JQ>;CUWfibid)0(7QN{a?NiURhB3xVRlHy#8K9 zN4F7ua&lr4|FN8Y2=2k=E9g-Ztb6<_DVTU%fih@%1zx>qbR7Z}(>Yun@o8|raBy<) zzbh&E{jDf$g8|;=9xw$d8U&O)xH%U;+6B_Vh6IIjPGTH?1{B{%U|cyCp>sE0_qP5^ zaJX=Tt|7Au7z@h_+jI_3~x%YW!=u!otGPa8t za)S3x*%SRSnjWFBY7bz-0Z8nVfcA8v!VNH?-4Og1u}B)x-%<6hmJ17XoSNy$%;9e4 zRR-1|AbbQA<|gv~SM$M81pS0h_x<9+LRYjr^cONjVov+y?BXn@IpsVXKo^N!HvhL9JOS&SxtkJq`b-XlYgjsKv^_9ycyoh$<^u3R97 z%4lEheX$p!B!4MX16U_#(0?3uIH=wl#RM_IUx)QSj1N7dzH-l@C~HS&yC$yb+BsP(92ko&k+%gnfXnQUQegUXMo40r;NZgsD6^*r6!xua66ijF11dq`pNxMa)M8 zv?$JOb8xFF(i$y zAFHby$7Z(gxoCL8peqMPm%^Iq5>=UOA`gsaH*w8(KY0K~Quh1POrl<2<-eTt4ge(=o?L}BxEu7)1U$&mi zST!&xQiFl!sv}&gLG@8AG*KK^>SpJwANi*(O%MP=;Z}$>bAA3?q(ms1c>ouLOWviB z2bY?WVO&yH=JeLUjqVa^jjPf6Rj3K)m0ptUubk@SLxlZE{tyC=eZRrM zSp;1du&A1&@$>k$Wv_xO6hnD{&~?Wu{S=Y8cmR7!@QRBnX^A9w7SupU6l^)sS)^Ge zJUT-6l?(!ZCf{|6bLgFi-;B9+Q*TZQTyXnA=uiNf$yIa(xHIw}!J^WHyKFjbavpBo`f2Uh+(&QaSgB5G;h};gbh(A2 ztWxxvx-mVqMAhxHx*mtB8~bQt`5(1mL~y3ACi-`+Yq-fDHE5nXE)>uQ?X>pR^7h4* z9&8R6w-s;o;e~e5CKcHFJV{imp7ovGeW9fL8i1RyqY(nN;@dOe1%uF=u!ko@A0$l?~IZF-$U2RJ5f6GTk_&#pc zx#$X?tO;*9j!Tb84X;HZSWG%EBw&y4Td)!`j0Meq-teyt175Mw>Qc4g>H7S9r5~9dVV(1tPbFmR zQf+wY2szOM6z*x0Ns|=IzSwG^kLM7Bq}uxsTqMd$M>rKJcqb02*q5|?H_%DKvA&Vv zOHu;HDMT-Nx!^ayM``grb+7t60>i89@^$rbw^_HDvu2AgN-XBOy1E3wq@OPt6&9!^ z(0*irz6W;PWHpXrwBbFA-MQf&G7J@wJ&T|Ftf~d$`x3%C9AXzJ@j||Y+{$4d6;Zy- z4K}B`F&b$l=`jLhn=Rui3+3^ma1mq7lC$E4d#m6%NTWVVrtL19s1^dDG1}aT+;`bX zL>Ep}3+_kNI)y3MteGJ^#Yn4jIJXF*Y~WRt zTnp#hmT)HAB@I{lA)=vaB#^XxQ*!oV z#7+5>LbJ~*?j;x?trnzLms|N!`Vi~fxqW-(QIZ~aIpdBT>&hXO<;d~F`-0+@45K1Z zMfY#$#C`ad;qmh&_#%0!jTQ@htN{N4UO7ltQ05}D2u$PRAud)~edP9Z_HUvQf6@4N z(Z6NJr$ysFd#jJBG}C!#%pvq;tb6fBkYJxE0cLJpw&%15UF^LiaW)~1z3GpQz!z+Q zA33=GXZ=0n?=6Al^t1(8h5b}Dj`)Ke&)ggFs`{`0{#0Q4s;{HtDnt^#{|c-5lNn;I z@kwoQ@r@iVwGYrv>~*|5hM0%9U?x^_Us!|RmH(mswolqM)rD!d1~%1&mU~a2k!uCZ zAtK}GD}OLGDBzx~ZPIgzeO7odYQ0E>XES9pG(o-V!u+@sL+LvI8Ndug52ClM=;%sE7nnN88neMgPFVusQ7T;6M}{;l4#hM<<21apqD8 z0m~trOf5Dr83=2#L!)|nZhCqesfE9P*+Vz_jRR>B-KjTQX9y~iO8e9z4xT8zW_=K$eUPFfbX7I_n}nIZWy#B3NY^vJ z_WU9F*4EY4%`W_++%UPNl5cGH&(EAopAX}wE4Ah7eAOIUx`Yz;X4`C&a{6J>Q#4BF@op1)PBMpGS)t?zS@Xk=9;nBNqg zV{P*BeDiXjbGFGL{6c`|iTPV~`}(AVVORVhe`37M)$fGYu6?Ef9FxR(rJDCqt{gKR zVOAM%KyX5CosWC379rdp)xRe$j)s~c3NZH1Fa|%!^)qB5u2kAeQehJ^xPM=Wr4{@- zi8PZC8N4(C&Br#%&Cc%xY3Nh0N8MgmB5HmrK}QtUiEeGJcdo2w9rO_#YR6MwN*S&f zxV%!xA^p+dYHKf*QBL}KsRxO?%FLj(tTG5Gw7&=Oc@2$?NF8fy@*;=F{Y_UkmTuQ- z@qG6%Yg>T`APUIF)Oy?H`PSumcM{AqLIHv?dhz1)E|JW3yL>so1yj!*6{qvPfTsOE z_$xIn?R#inN@98zMZrBqC62AGwA3*hnjKv*3|qU(l5{Bho;fJ{a{CiDxv#J9M+nmv z0T(i`Z!A$_(Y7&6Dp_x2f1(h4i`k{qxeF0F4AiTe!=C34=~Fpfi9+PYMDc~IK2g>d zP&|>AIhotra`SU`)H3IC#xNqH`qLd4LT)hfFrqRifq_w4|E^;`N;JGA@PU5^UrIahPAa=Fdmu#zSBt@FeHQ;D|eMdkUBhUl~})Cp&AFKDl9CP=b+|@PpaBBwDIJ7LHfB2 zG^Eu&fB($>JnbUYG)m?^sxpT|{TyZum(?xjwH`^J@wNV5qK7%W=LQ(I9!uV*YM*$) zaRA5E5Xck!V~&(p3|rF;IylvFy4>XZR65TM-F5ZIC!{986b=lh?gCx3etdWGXXH=@ znK?(h>`uQzk9>>U8Rb@1wJnnok%ZJ-fwpl$UhR)h&D2s9LpORh3N3H*Ks%BmyMfOM0hMJ2cYy^U50I?84v!6`VOFNTVm7=OA;9L$Bkb6@R!bL|(iLKTV!juW5MgE~=* zwPx;w)ZFu=G)>!mw$Rd5mds@d=>%n4XTmV4_#fwt=>CR57Ocs2J>#*M@y2#uJgDTU zw7ui3O>uv#3P*G>pIBr%T({|P?Al|08vgnr`qLGEm+L6k6J7Cm?cP|SQxAkL7vvbO zZrcu&n3^_K@9czKs$YJ5mzDMVe8lJ9&!vFz3gtQ%1{$=(0)EHKZ`%%kSjL=N+1YWi z{Ep*y|4>)AH33AF%ktu4uM8H+knP{YG#69>s-!lgL6;S_$Uif?O*Qh9HMVBBgO9VD zab9IATh@e1za2B%*?M*G`2wTo*!gG$TkTlU9fw?o3~lhKF!Cv8YZ)bALvi9{n&rNbvQ zmiRrmssdf07rN z;%^h(IPj>~kjzmH4O*Q%czVwgQCd!JoN!%Mhg5%8eg z8;Rl{eAbFZF89qP$fusAELN4uoO`#`HT-&el=)`$r0>9LaHjDzowrSA*6rK)7yY@I zB&aO^`ZQq4%}J5`fC+!uknG=SR{6{0A%O`~9o~7`*(0Rz?D3a8`4q>ri}iY0niCE- z#9D_s!pBKK++%aV>4yv3!KM>q`lf;m_F&0v9mzWm=$&U?<{v~HxCD}}f0>$(DPon% zj1WF2{3(wJbZVpaU)24yth{Zc@Icw;2DV;4en#6s8Yo~C zzEXgOvl~2io=8KopE1Bz6P0f3W1+7#T0lHW3Rsp;U5oGXBOd))=Y~qZX)?aIh0JLB z>T`{_!c~Fev#%2!XR=QF*RCI|Z)K>@h>lEPV@R$DIJZ}i5mOA}<<%xIXB%n-BjT<0 ziP!{Xy1tI`A-f#5AApafjTfyNtDH0s5 zt9~mCfvJI!i%gtrDh&)HDZDJqp$}y{SH)kg8Z2tsR?hfYsV-@}(c<0?QXZOve1KnZt3-Br%w0_V$>;$bXj4772* zWL%Wol|e6W$8*;u zzOcEZd`3^V&7~FD5c$j3XYqdybBjM<^Gj-3CC&Q1fmG4D2d$n{e9LVofAsIprhRD{ zp8n6XSOiU=Ww(Du{&>CZ;uNe+`Hz++<<|O-f*8-{{6J|p&j@zn3H%>?zk0uFXEbWl z318|H&zSN%oo_#X^f&Wj;G&g{c&yS}8J|Mn9daY-sYyC{7|JYmt~RByto_EcFNN)i z>{2y{g(}lK8Z9#BUbuZ)+WqKKzQxM^<-4xg5di^TIQCZZ zf$0POikCag)tv&(vt$pi|8t2T$}nJzU;y0KSb31|4pEwTuNAcY`u+R9Ky5ZSVnxFc z;Oy|d>G{cMjLdGv=h4Qesp&XI%&1670n11Yd(%=sFV3mw&w}gdf6-lxVHcgY{ScOl z{9Y$!v2*rVL_C#?`+oKs!I}s{h+|&8F2uNvwkti+O$c&05SvVvb-_N+diG{#76Nkish7^lNs>98B)!gHwV%q53997B~x{Kj=t$=RFJ zZu66p?VnLwqZ%jlIFm#uuuwh~SucOThWOmuh;W*CP@;E^{nGuxsY&q!{Z&oKR*702np{S=19 zfkFM3Sc_bu9G{V(I`>mZprZ*dDynK|f6ok&n%oeZpV7v1X(m!g{Mr>WG`klgV{6iA;qdsDJ6~gf0A2 zybnRq;!{^#g=eVj0Tmw-bKnTpGvBROwL$w_l_jQ=jOf@OJHI7(rwXn8+=h# z5H<5(&buF1xlIa9m#_M_K-?S-!dXU5f&CCOi(dSyovlUfns6zE z_Er+5-D;M&Hf!MtNxjG)wv+?<`t4oIaK%JjPxX*>m*luWD~{Lci6J>&TzrYQA9JIh zmhg0Li6uy2za5{DW<0Xc*~fc2eB(b(o_c%SW;pgenqeAcilqgzt=+5)DoZ1u&@^-H^-R_t6-$BCLmSB@&)fP2 zC!%EEIQD#CVx&qUGQ1jjyz21PhwHe<>Q$BmrAG0x51EF|lZ)n<;EAISc@i5&O=q$L ztF}6uNMdGe2{&)G==BaK7cNsISKV^yyeudQ$fp(pUXXUNjRO`Ey2}!poX@-Wk z*5zoV8EXG=cBLh?L%|gh7Md&UJ3E4+T`u5iND-(0tE!ncZPhFD$js(*vS6Moa7zv~ z@L!9)R3sr5wYkq5cDwJ(gT88ukXw^sqaWibYj0Efc|LotXuH*$Vc+AxQ^qAq`}nSw zj!x(<9ww`$g1$3dm?nXRS(maDzv45E9~5(vbghHCNT!8t<#(@Nc;$#NSMOdI;_4N| zrJzDH)AHppCFclEdF82r^8pv@){+L!N1xO@c@u%8%M_kBIjcI9vQIkW={yJT+YzTuR}ZJ&fH(gMrf=jToNXq z8hB+1+Gvd5_c%PnvIu&(`BdHTnLFEQ;wL+AauM0wP~Gsp`&wh_OVx5h+C)LDV&2}8 zC)hPq;RQJ*sAMS|Y#ggEa&APjnnrN!oaqxXj1W|^Gnon@srq>pq)l}=g458jhFu*O zDwTbyET8rqm|P^kO13u&Tti5dhqXFlJr(xPz-$)L;?1j-lKAl0eY0kd_U=VX!%f+f zuQP?)-989Oh?oc}c=wqcl4 zz3{v7ghN9wi1%$GLgrWY`<{-q8r*#X&US11i0ikJiDpN5XeALwV%VxAf1>F4_vT%R zR9BWiOWaETw9nqVnx^}@HFzwx?wNjS!-5Z10@H`t(|ZQ?^p-Lc!X1N<@=&LM5nk+B1Qf#S8>&tp$KdR7$zvsF}#Gh}} zOne(uqU{9eap{!zuZJ_woqlZ~3KE{jL(?buXe&3pM{(LwFpL@0O-&Ms9xeU^`K!r) z6SW*4Cx}>;+T#I%D%X3$qLswnskHw5O+M?%*xgs(AJHdS{;epJ)Zrqqr~i*b;a^uf z8GnHMWAW5`Qv<$Pww>nZS~+~@2SMj$=d5F^It{&N!wE2CpPDaSUfU@(l_B%si6LS` zZ}GKM_=Z|9cEP^xZ^Oh!`_&q^S|HcBqt(P<<;AheWXK&PWvnQnc?Z#T<~^Au{E26| z$(ug(#qX>~jTsMOza;Xpzj)|y%lS*0h==bM5Bp3AGIC7u|8LoUEd~X1(xbeNNLL1x zqRolcA=lVfF1cT8nF#J)IGL94cAi-rnY4%H7mnPsd&*_Z@LDP{sSL~MF8|%a^GPGZ zYgMJQB%e=2RW--%)CX&ur*k($@-Xi#o?<@d3CdoMVDuY2vSN~3LScl?BKeC}5=>b* z_8Te&`q%`D;UVg*)>IGpt#*~ZZZF386es=e!P~r*bMW7bG=Mg^ot<`{9SFvE4|A6b zCp>~Su$|qX$%-ctxZE8RqW<{IlXqMqy_bKDc3@iQP36NEqwe3Ug7t6*ZnnNs&*z9p zP(}vh`3IS;TQhX z)}EdsFFDz;|FdBNikY7_d|3l!_s2qeLXA6qHGQ|XJ6@z9tE!$`^RFfpjFiXLHl)_b z^u{uuy$*{JkSF)@=SG*TOMZ2f>Tprxuu8q4!x;{Z5pT&2_BANg1 z@;)K1FGY}!Mi3qCNT79h^UhjlM0aqsdzo0K=6+(aj7;UBI2=%Z&-&lrcZ~L)M~Wm| zslllxqPqI43sP=A$wis0aruCI-P7z&7s39&r`h=JxrmU7E|GNC zs9eN2*15NAnrnF>-}=4!mzes?LFO&nMxqttKkbeF`w)4`N?QNeoI~t|oZdZ)NL)Wf zDZF3>{W>an7SQar||luW9o(@DsNQMe;r!zk9KPupU386g5|F=V-?bVuJdNBe6*aH<`{7M zGqT?AN|eyisHx6oI=J%EQDaie1c!6d-+w~fbIu6Q-v#JZDub*eAn zJU)Gu5*sZ*A3b8XGZm31f>dSK2)h2SZSqfvzJ6!!61|H;8S&;1At4IPyA07*naRCr$Oy$76KM|n2>%$#<6*{0RnvYO>4JI2`H-T*s500TCr1yd6O0pjq% zpZo$OKuGc>e4!*HUjl&ufrLP)0c>n=H`w4J$?7X9+7g(N1e8*2 zc(nh6wqqYKY-1A-U?vh!N|__MJvVU<*NT{XIud8>WQ+yJ zI#wI7j#X0a^V$+!|D?ccX0(3ZfwNnmJn``b-!4%<;{@L0^l zHek)$QY`I(m3qtqmYIyLs2~0$Ikh>_!_Kcv|M}F^?zCwJme2eiQ?gN|gz`M@n`YF= zW^M0C<0z&0v3ymRwj|rLKbi#gkL~;eC1fS$B<}0%JbnMOo!=#wBCdKVu|E zsm(Tx&Di#TXT3w=-64DuqzjlJA5HuiQDcP<4Wp6PR+Y}b)iKAPM0 zF2mxRf=2X-!!@loU@g%V56!$SX0e!z3&ZP!#OVkEr8tjqWj>M3=0|`C1J?T|8#o

V79T5P``9UYA`t9GS?v0rsqd;8cS z^RNwAi*2=zGN0k$;h5vpIyslCm11d_MnN$#WdpfH!Yv-|M_OW!H?Py3n%a1^Fx8k+ zmUIB5Qs)tP9X0o7UMJNa{D=}5QbSReYrG{Ek30m=+vj=lptrZT+6JsezG0NgC92ix znZ63rQpjE@WXd#o!(j4JAUT;#j+Pj^gv`A5aTEzqL==DowpfJ%&J~cWs@2k^(Wt#4 zYMb{a60*>i<>J{!lau`^Pw=alZEmMoElZ!Vw+&c}XMGONq*xs4FpR*KoUg4{>lc_N zBFr#$n+SdZaF$G-MPc8zW-v!+>bUofff_uim1)+X=_+Vlhy+Ko;0eu zsGO#J`DVu0uZ;-b5I2*xSnRZ+1-O0t7Dtc(1vT>3&F2^MrB@{r@lBl>^T(8rMyIx; zjtjqKbaLrP?+e<1weYe(c*lBucxACtywT*wdc)*TSO))TJRT{tX;G0AIKB3*0ZBK!eF-MOvyZQ6h}$IDGVDrxyC;fYttK-L^9`mJPg`S={)rajyu zlt8t*b$PAkFL!fVQ{cxt%CH#0NWa@3enuXYh z)o{$2a<5U0Jr@$+>+DnmjID0WV#xNTC!qvL^*uJ__uRhPQozqMn|F>gp zZFLfh{@<;7Wcboz|#ZOmiG(I*tc3I$w+ftp`zfL); zH4{PmI5r8i0qfZ0q|Z`nC|WEU>pj!m-`y@oJ`TyXPd$YULAtfQ=Tt}@Weg|Wf{+nr9glL1eiS7YIxlrrX}te+Q1`5J@Sf7jW0W}Bts z8N}ZPtY;9+-|2M*ch>#|4F6Jo;X!y z@l4O%2CO4UhRSL@p{`_7J$|%KwCxSmmcU#}V7$7$k9*?vilM_$> zX>y&I7`l`>X1O!DeCzy(yFF@K0>>_a0W}aw6sk8u%HB#x@@uQxNy%aR@>BhwK4_-n zy$FIm*44G*z^N{e_VX_;2~19Iy-ad>S=3Vhxzw3X`&GZQj;(WND&bQeur#qAtg{eU zTie;{Gu=k*>*iYm#p0HZh8MieKCbM$MF+k+<1J6-`Trat~VR`4uEypXr zQx>o$OJi3^m+$TB>Z0~p;&=tuKEEx26CnYm`knE~`t9Hwg;*UujtOfA@B*cd_}jqq3h89}|75d$$DF!xN&(h*b$mJV#d>M-%{_^p(G$67 z?R&Q+Fk16h>`O(R zn~vqZJS6}tpC4Hz89xice54%-TpTZ2dlqvifpWROyYBmc1jct;5&NEObj>lFSaUav z;~!}_x9{p=rSfCOcrqGsei}0quj%gYEjz6Af=G#^{yx63 zUay~R+a}Vf#9yA#b^1r1Nroq_hLz8cU#pbNXR|#!8Dnjp^*MNp`jv7bCB&t2GX~V^ zK=D+mUb#p}Go`o;l*BT{xWS3*5KxlC@O{xMlqiz2@spavf8RFfI3+3@gi%!-yjVQ~E1F`W(Z_^Oc^46~q@>>fSv`ck>{ zbt!xvjo3&jkEl~ zy|LlTOQck12Lio9DBBk*9zG(20*7xn5R5?)L`^&BclzKA0`xl`o}IRBsaLDNU-RV)8$8 zJZ)||y(9YIs&$?Bw*l-ox64F++pD=@`q`xWQ^w9;)Yit%k&%y%jFvya3`ajmB5rQ6 z*YrL2{ApIwNds7u<;nAup^9Cxt~T4kw0%3ORJ>fOUr?)gH#c1W#)gm=(AQUz2E{mp zBrT~4N?iNI$t|vbr?n^91SAbu+QvKVt(ZAr8A^f$9!zdPnGpzPz$cAPDVT(9tEJ&J>BQzs*RB5di`N-64f;M$mgU8 zu#^h5tb-cX+yfSUOX)tYGU0Orl9}2>WU2u(a1l2Y6a*rvEX+(4!!EFCzreQf3dZUT zQ$B}nC*TMLqgsKQ7=@b~!pfAr@poA;XWTZ4hY1-@fj&)4ojqUh;rdD2!iBEo{0o-H*@to43i+-^AZ~C4GJ#@f_zaSpP=*aGi8JnSmt@Sf$eV*+F2eF%09;bUHmX zgLh`hU$VDaU0$w=_cj`hcQsr;;|d7^8ObatW`SD{d<6uOK`3$+kzdLLGdMU?u%!#_ zUT;S34yxWP^kMm;m0b{m*<1n^jVFWxGYxQ)K~UsS1@NWPrYM&jQOkiTI7~84g{J{2 znXr*bMG-_S?5hC}?RK$YB#*KgfonN%eGciHuwei$P#USDu>C1y{2X3#@$&E8^rGMT zudd;t9aQT8cr262oKma{(Cf$RKRU!PQ&y0hc4h%Db zMqPk01D0jLPysxz1OqP82@8p61W_Y~z%udZPz^udF@!;tf}7|@O_~S7FG~f-o1$OX_le$$fFL;(e-8_s*>a z3eu!Js1P&=lsHA*wCW=DLetL<2c79!6lHr(|M%hlP5z{L11yRXN}<7u{*b0e;6#U% zCjASHN8pP9iZd7lZt!6-0x=JUTgQq-3YKL zb&)9wpyC>RD?dOoaDsnpf*8?S(g2YXJDet`s3+jE7R z$_?+Ysy;iL9MqB-klZ2x!MdC?o6lKDFn`QU%XZhNC~aUf?e+z#_+CBp-Q^VHpD*$&Sqn zh4v@vK9tzGV{GCjZlM0Q=Bu;o0dzEtbP@t90-BbiJVYsa@##OpcprhmGl5m>tP=@X zhva$~U^c-q1S|=uA1X8kxHL7mK^Zy>AZ36!7g!eY5i=Q>6NR6OV&fEW=jL(r7d@2B zD3l#Tz-cV{9YbIkqH9UUFmTd=q9ViW62wAMN~SlR4c#&?X=`)d7S4OaocP6a<&r885lidZIRXn_8A0TPKn!L<|dE zi6}aw3{3c_3W0JYf*NGr4J5n< z)^ynQH@^J&|Lgs)wEpE}F4i6^Y`aGe$f#G{Pc+(yTLf4QZ{zL#4fn}hRRk;Q$4O)t{s8Cy+4p=%^aL7;%=gXu{ls8Np)w+Zw ze5_0u=(H6a=0n9&*d1`(-Cw~Y2g;aYG5D4Z4<-b+^neiqs26}t3@DEQJ0IH(FN`;!8U~CC!Sfx^_ zv+jAV+EAq4{csQc3+2{dcRDk$_w~h(+z$<&vVde17*IfK}Z+PL_kDt@co|PH%MDMBNy?-vh`8mSJ zA17n+4r#_HbZ z%RXM|1p4ukyyj24to5n98Ks*( z-Ij4f$Bsft(v}pKcO;N96zcUV>~tq`68OoMqRx)b**z!|IMEzv3eEHlB_vE` z!cq*P3T*oM4xlVxNfQa>0+s#vt#z4^H@)`q-+RZE?8h^F^hJ5u_~h1iFmA@UGQPT) zC}}n;WpBA}+hDnh-bI~ON zO6sJ1Br^AcW~O-yC|Y?+O;tX$rkXLd!~cHz^p0GlOkkxIoui~D91d8mv8Oxnk5?Qa zAhD)7tDXj0ts?6daE72oU`0S3XuzWD9mMeeQ|!XD4cHL}4L^Vo0x^>##yqUZI*2j} z-*@3TX$&hHKiN{k1A|orndPVeXh^_G0D%WcPatAb-WC84aD3pYUks0 z0dc#6n_h7C7vK7tl^-}+%6%vb&&P}Ji`&u1GMTkMKi>76&-3qoa`3Br_D$TjXaOr% z$@Q9^+11f;a-}Z}D;3L)RsK3(t-P;J;V3`2;tcAYjAW)K% z{a9|^OmzrBmLiKGtAtN1HJmI=Q#or`{owOo^70j| z|5&EK*w2})?ONeiyxR=MzTMq@?xIl?`g7Hb9(v@lCl)1ODW&*iY4Ukp>8?i?`vM=& z{B{=`>m~}-@0I;vy-%hd{m~Ab*_F2(&m2ysb7}J$QbKURH;BQMqK$@*HkvZ5leAI8 z+S5AW_*J;B4{2vmG$VK_uW-k<{n%D<5Onrn$`iW$hst<_4+!q-f7_D7ttv!JhCN$L zC~VvTh&bRUVJd;BIfd@XMCCoVKJSC?e|g70pA7PyDDHT^3UFnkBlgcL*os9Htu4WZ=I;TX{ucNO5K&*CymOW=wDcr(mrM;fmSr}Aakkqi5c2K zuTvx_4k$|K?TDe%CVEpJmYqUHStytckBtI9+O!WjGlR07f@{zcUHu2^SFj~th(Dg+bv=S0+1-84 z4`;SIbAQ#NTep9B(E-+YY5XjuY}J!apA;%K(Y{QLu(iFf1Q%i&)i_LekfocYS!;V=*J69Ld`=@)P$7E&PdCupf6>k z(=lLq9ujs04Nt(0rm)WkzWvx39NfP$!BUiLA}Q+!DH+QscvaaQ*M!b{+foSwGkb96+hfc5c#oN7Ro0S1NV- zqF``cG=Qa)az-a6E?17fxi8f>aRS$APHw$tqW1i;TJ;AdH|Q0Xqf1Z(d4(l|MG-*B zbsd~8i)#bcjAh=6o`wa{Vbl>+Z^^^>XTTbWSqzp}!m7>`;=YF{1Z-xYstlB@B%UY$ zUwGsIO4(ji$PAPMQNh3s3pJV<#YFr7)Hko;3K+@&wGP_z)HtR#@c&8t4Nlxq@4C$`^Ya4X*3vGzY^yhw9+;i3Vov8k!wGL;HCJ3F_| z;q#r4p|SFA*^Wl@=z@xMXzAMDo3;{S!(`ep zuS+r_18j2AbIOeu;Pop~#>-u+Glx2&qxcFEWU$+c1?KeWm|LN{NXZTpIna^_;1z?Sq%ROi`URdwy z`rhg#-1L;9n>#1nca0T_pRW+DZzKkfdiPO*t|p!#bV$#Kp^L?-*pZk}IlQt!iW}JU%VE@Z7Xc#)xWtS~>&;;DBGPIY zSD&By)%*Y8ikDrpoQ*Gfkse`!Bjx^X)Pvt)QR^qY$un~&@)=GzU=@qS4qwWb`=wsvpl)%6~#_vlNlag*oJ5Ms{{V&_A_8SlEhn|dApI=_ETWnjH5T zY>&b5fM4IW52#o`)kP#?!VT(BI*Zi$lHcx#buB8I$@k0Nb zELh(^k=$7)6tE^ICsQ8y&C0F+S6^SB_HQn=qkmGRN;U7UiE8mupEkHnETH6Ir>{QO zUu0_jN%SeL@{*_5%(c2IFSRFM4D+4djhae(D2PE(S7L7nOLuH(>D(;cd_yY49H2KF zM^x2dB7kHrR3H#%QRM9`zOx_r!R8UT@ovb(UwxUfMHKg)tNsBIPX(xXr~ zk;F+2SYd&&_J$l*7r<^nNDd@X$>i}z8Ua^&6Q45@mz)^XBge$ zVR-WpP$gHF14*rlh;Pg?09i)Lm_lEyH1_B3`0d+XcO|=PcF(@>R}UW8d}TChrFuHf zzGLCKLz-@5@!Z3v}~OO4Om$1*q_A-fRj% zY?v)`u*I=XoaN0^LJrM#Y0k{|{z2A;Fe*q4rCReLQeDb*6ZGMr;06Puj8LMU$wbj< z30T6@AZAg-&~HFk8BFpd?%FNz-GOnGlG%{uRhcjep~$DD(@?_cYVIydDs9veF)i4p z3Dp3exMwFYVFMKhmhV7FA1o#yyh&J5iMXsG;a2b$AGqNqZ@rq`IrE2K_$$Y&`}+J^ z?G-Fyd}n!b-Gcj^Pl$#^rl8B%>kZC-wk(rba`rrd)mWwUK)zOA?|~YWQa_efTX(Lb zPr7ZE5Qn4nhJ*8Lc%o;*~Ag3uhNBwZHLOB)u43aaRga-0#j zVELg{Lp6h*TZ1wa@OT>A8o*~C*^dKu8b0yCvu0mJo!1r8{SX+^L9NxO4v3OXP18i+ zGKi9a9rq6c2Poew1Jf{2M;VGYD4`2*>xja@8OetF*n2L${PmZ!B~>#!Jh9^(2693|@zZVjg6z^aepf{t?OBky_1^{=>u zEvcYWF2DV)hBSZH*?H#Btk&>2FJ5rKDijJ?Ps;UN%7Lz~u0`2i=$T}g%c;Zx>HcW4 zT)ozn0Swzgjbh)_Gf&YUXy9s@gVbG_x-OH?qZJKoai_P!IUj48YTT`T~x@!5R_~D!^_4 zyEcuZuxkSFBY=wNYA!s0#n+z37WCzx%|@tH_AILl z^}?>yn*UnJMzjkGSd>^fQ7m4hB#-rUq#m4&z!v)2A*G_D!?k~$s0DAOt!o;@2Fl@+ z6TQ{Frsc1qF3ZiI`9i@oBVlfiZty}T3K8`Q5WA(-Y=Rn^F)h#v`@?9T*VAZ@!>@Xa zopZXjw$Soe4ELKZ2V!SvalR8&g@rjnr;k!YSz$O@_9l~v`ZcUhTS!^dxK+XPJVdM% zd`k8;6slGf`5ufo^)pSN?l-{Ah!zE{mDcoV8JoRlxd5KL zV-rxMdLL@)Y5~3v)8>$d3n?0~sQIlwhVxddy`TS!7hHN}lAX}R%wt{Ge%0?dBmM`R zNaTN(b*$NRtk=!s0}pg`bQEVh`GvfG=al=&T)FY#qF|7w2_^#g zz7K<2%_b27tl9QGq%kJ-pR(B_sJmWxt`V|`5=Idg8N|)&w?K+$f9Z)@jZhEK2e+QH zWm=Ncp8lIo*NNB1#C*4q(ps2%O*gf5doVK7bm3oUY?GEnVG!zWt#~v9EHIsewK8KO zWl=QI)lojjOzQ+wW=Yfyjwv3;*MB{L`)YldbYi4L>yar4sXr8!geN?J+Xv}v4z^Vz z-#)d=kPO59Q<&T^1o&B?N_lxEEJMMSCBU`-LD9!5qG~^`Sm$i`%KyIhilyQvnV8sn zZX>9^DizCqA+4v(-o!>M^_z|yI?pNUbIT`(tp=F~cKcqlQLqO`RdeVeBOmV0gY)cEKWimZr<3oF) z_BQ}8s?#I5LFHLxAZmkCMtvoZ&IIL`?#Bx*J?*>y@h2DDyi`{7!07h3#T@(R+3cFZ zg_!e#0oLSX?Q~DnFHc16`;tqj1ZQ_qts5;A9?Dk>Jx0WaWjk;K6RsbjYCv!bmuvGc z2xI6i4T)5J9!3nQIf4)#3=U8_4w)g8#7Hj?wVDrm!?ei!nkf-Ypwcg1vj<<8!*$rU zIK=@g1p6ZcmS)Lf)ZBwKQb%iE7A1cMb*#=sUmB8L56>GXW+RL31p-j23b)UzGqfiL~)SM)Stq4AQ-vkDNumLMo zFc4solI@BBh7mRm3AyD%UVb|CN>N&#POAW-0@(V?QB?L!0wMwqTPGEJLFgvXrl?@B zO$E!Y1NH6r@~2*P$*t$I#}{g^&-6Y7SO;?At+wqv(vw}iVW#68>vanPSW{C|>4xC1 zg{gkpm(4DS>UgXIc((I4Pu6bUUsPXrxf2O=5q_vO`2rX%0Amw?TXi)6xvr~2=}yc* zIupgRo;0#qt{t@wX;oCx;Sx)OkC;PFVHXXr4rLjd3PpE4&`G2rc(t}ZYqAh;q;!J%oZ9c~haf})DfP{R_OG%RS7&%G;FBvra8Q3+mr@yMsiXiW8&NgCR?QxZ6d4ZFEdWOX{y4Dd?k544 zTnTY-L0R;qWfSn(aGWS=wF(%c&XhTPxgVi0Fs3mWBYiuZ?q!5F%cNgm3AyusR4TjfF0XLv3qYG z`=>kC;(+og+T_Ay9)xUYvqEbM z1X$X<(?x2B9#My!S{fED2Hh?1a2KV>_`XYa%D`ojUl|~;*N*!{P1(G zTBM>sIIZE~;TW^3AFwR@;d+|?Vm=#J%sXHa*OgcCe#ha?PnIo9j4gKIx90cD{pwGL zinyu_T=(mZh3F%{>c@fo6?kq80gEHxF$CNO<93+stU)Ubp(CJrHsVZTFjWnHV+_kW zfGaLtgUc^jfxczUW(U;#j@NY=32k}pKyP(8))OJ$OAD;RnT5`Ha_}C?1zIXHosW4Q z4X_MqXRkZ!gc=qBl1~8hp-CAcrKN+DTCWQcECek2nJW;L2HZ*Dsh@8I+$gw8LQ;g7`QT9t(x-&+ zkX6EOQ~{F+0(1Dto3D89hh850{Cvo6F2|gl96YaBExj$1NPnfHo0mZy9 zY($Oi4w`kXnNSOLz%5TAY6ftO2D(zfHCJ7LE3QZ)?vOquAlxZ*q%-i`kU4;;ceTDo2#UK@0td<$*-l_Y^BcxSc*}UFaT45Hsc$D!7wa1v{v;3&gn@Y3P0?kt$k3W zdy&@U=~_eq8znP?LC)~`J9pqfqz5%8qZ>q%?~2%C!w*O+VjF6^hXa;l0hK}(X*-Un z1RlG0FT61isM;{?I08wH5NSh&QD&_v>W`5DM^+H`_u(sl_p;}_{A{*yuJ-qQj8jmB z?9|ZE2RhT4ucXr}=B(@wmiRYwGMfQPnMNIw#qbmY`HlIy%Ti)81PaCTQ5QBHss?jVZtX|N!Qnx<)4F%uXzEd1ZQc3>B^wr>I!r*f-i zn{6E!(SEBI=V^0Cwwl{24;J|fJb>KoQ-cBaJU|LaCuLLsW;0<}w#-YR_Ys#ir~)!- z)v>B$s{h;neDwwE7&{@9F30+gO%#Wo?|Jnzmv^!6%oqL4yaLw9kKekDU@Q4JiL46aoq*mRx;0qjH! zUbReR{fMO!2s{_U6Ik9M@#+_!iOZfF*V>lfkcivl-LGjcJ!Qj7Dx}u068XvwF_&t< zI@WH<$GYQ|;Ca6ZSj6cbuE%u1@|6*0s8gzesAFBHg9!0vQNkQ)tXgZOw2@|JC`{Ch z7{*NpUw&XW9+w>`Iaw&Fbcg7cUUi5vs0mojH>#CK=LarQ2?zCR07sdKNZ^q>b^>|I zji5ahGNuv&wkUDJWZ=-`cU28>YaFk;rsH3~^hf9a-eO*&oXREKv8lHw(y8xfqkZ!d zT$?w*qB5M}v8mTMX7r)1uGo^O8T`hB>P>(9<%hqvYsyB|>I59B0jo6q5hj)DhvgNN z#jLz$83vhx#7s#{m(<6O2;}q`q2@(4BsH6`O>LTp2AKes_EiM6Q7lgt@X{AufE%w} z7W$`rAE`LgtO5aDXPCNKLI_x)`O->?)OlI$DB)pF_oo9ER{=#v%A1fDF0hL$C>R=SV{) zpO=9k2*9OpYT!fDNSfq8r$kRP++38|&MlBLmkOP^whfveNfr|Kg_}Oyz?L0>=QaSLogd`k zZASg5U6c#%P9hr(@VZ;B$ED9P5FxM&Do8{DMG2?mr_zzH5CFp(xzIlo-s9Bf(|RTi ztCxUV0*xZd-gz0`9MGm-PE$kLJV4(9*sxkS`v&0 zh_E_VWhM{&;0xESUCgFosWjYGsuh1PxvcMB(kwmaiKO!eSff+bmj%8zbo#RFqIMP% z)I3;F*^m7B!%uA4$5%DX(@_a1(WVHqD}$g$C66)$DrV6)TGt6moox{4q|p#Wn$e*q zc&TOutZLL@Cez@|faj45LVI{AbVP^*o6JnVh-|!y_q_djoV^N2Mu2jD4Cz!9zDMWU zT4Ou-a#Cc8UNnkIHUsmE`*SdVvwJqc3R{2(4K9Mff-wzPy$Kt=cAx<((0~*-ruKk+KHjrc^xb20#pZol~&b)p8Bs#m1 zHNioC&r4uNc6Fyt+cCS*e)H;i1FQoRrJEwmzqhlqvozO}p6l`6@|oRV|L(p0Z>*br z@L5`$e$lvtdSMby)X}wp+L^AC6gfP-O~V3PU8iv{6J1x|KJBnA2OgP$LnukfJGf?Ev*V-BRumqe_+AKU8 zU>Ta?n3|(f(l6D1N~}o7(8=ncI)heS=8*s^U<6nW?%G$xzYhkOjC5%qIRO?C%qrrF zKbR;Q`Y<)@k%OOTXqwbQq?r3u&p^fRl!xa6Mj3eW?rlKT0c*M$LO*E8)NwF@m>Ta| z41S}GxKoE69KaVp`O@d!vYsuP_VI_mk5iSQ3+mOz&BIwGuyYMoU%X$v^w0kOz8_Di3|yWFzjg-I2)ZzJ*@X(VDKg239FCEBA$5ZRnB<>o zMtPcLG#b3ZPh|MFPvmeB2%n0geV7)tj|w2&0I(gcJuzwrRUJe}vWBal*N4}=>P$op z3GNqlo2t;qK`9iWqn)${BDM*?$zkf_sd`M$bSLem)mSxjA%&*7)39H044KD@@!qsBj%7irm7zBQ^ZnE)P?&PHON3B?Fj5i!H-D9Ijz$su|6j zMP*mv*73+cWpV^{iIl}*)PYS84#M5%g9o+-JMBu)&M$I3IAD~NETncn*fEBBDThST z#Fghn9{B0U&boH7u6aH`kg0m!+mn&l59d0?VO{`hI5%~biR^}+9#&bbLZx~2E9Kns zu8kW8OUZKzHPT!Ntf<*3?R**5ifIz7rkbv$6&A}JY&3ZA0guA*66Pc{t72Ppl@xf4 z=%hjF&SLW#l12p|c<*Ic-3LTj5oTa$6GL;AH_gsgdP9pNelg2qK!2N>`yq@>hWBszu*j)zP&XyLr6eq3KIK5YB;b$v7`SH$+_k_Q z0d6VywK|xS0U8z@WkHB4?1+gfY1CZkygs|J7k~5K7v1{q7qV~9r+jBK_GEeFJik$2 z3lY1gx3_oBdN#~CU@4`{k$k?ducL#QW*7DGsUJ3O|LbpT{mhi#5oV{GMEyZISDI-r zOlO$y!w5`Jcccz-Ca$<3K={o7%agH99w^|4Tc|s09Dbq zTnbT`eoWUrn%svgU3dCi~=`G{fHiuBw^2##+i&wlm;{i^!|fA_$)O(U_+2Jh0~MB#7Rgc`7# zUtaBWn$LrJ7MdmmH8S*Th}jBJ9Kip2_tiLil?7WhU?~3}FMQZ!frAkZ@4@f;A%3j+ zoa$m5`=EwJMwD*rbx6Y@(>*MaAuXBs-36jdU`0m?j&zYQ0O1n>U#KO8fJIP8W_nnk z=yMZgD~3A<3iz*m3^_Zk8B2A(7G=gCBVe`UM#av0^(ayUz-a&*el!S_3`mdCXUNa# z03P|lq6C{@%F`dKfIdu?tqC6;RN&Nh;r8FX{NtZ^)3T2(wsoQTkn?>kzxP%{s3*I- z*UXt~XwCs^&v5SQHGMscp8ojHzE%C?XTEaZhyBPZct%RE`9aN!{?Ym@&a;+fSr&X^ zdY~6a25@+Q7|-GQOS5>x&F8_AE)2K_b9}woHgra_wAgt>;6?|r)=)~}Yt>gINxPzRTI&Q#f zfmFys8?F-ZYS9t#8AdiVFuM5wQGtZEB@w0(9SCK*6Kk#Sf|Kb=<|oayA>ddNoxFg! z=vV*w54ZGO(#IwiI+u(o%OhOR*M?3|;QU{m=)8Me&Nu<;;-uUu!KKto+ zp8m)4wm)Yv@Q@mcis9;eI#Zdi&K2#kUGZ}#tI;RB45WuE4VwksSF5-cwJK3k3h#J(JiJ~?&o?sHEb){hhH4Ro=hGlt5 zS8|4c6~;tDpBQO6|6NDN1&c~H|&3#M!!Dkt!US3L*UU*o{ylVBnmnmkQSpbc1I zM6gw}N6vNf@3r{Ub=PM_Ew>abDbSsCkd4{80NvCrB0}IH=XnTNPiJ(ck3!XqV$`PA z_5IkwmmzNwV1@ZqM5J*nfYmyu<#eZyA^EWM&jqoc66gB?h}|`C#2|x^*_QfQ>Uyn#vN6X(AHjapN_o;aE?Nk^lf807*naRF+p~VYdK__{14;Hiflr zr@Dqko43VULCsD8gg|@039y=p;u@@MArT{gl;6OrWnCbWV@|!~sdYRjz%mg~Q;23{ zkwEJZqbopc4F_xo|8)0m?6#JnU?z?muxS4fz>)dbG6e}J!^P7IGjjbUOguUULnc8; zC}~gF9Ll4}wW9%xT7w0y7w%cj4rqXp&r?>AHTK}k|NF%kzT_OXVC}Y!^>t8cv%PyC zZeM%G`p+<;Q=R28XMi=FudnROM(3qMdzR05@bbroRo5%td-r(RI16Q$A~GS5_tSRi zT;1###v#DsOojDqRs^688A<6@=JB9`D=tdm9dGG{&GX=z3!F3oT5~dKLR02cHVF?0 zEKLlfwHIm@Az}CdVnpN;v(Z5fM7RJa(nLmu>QV<;Oe5B8!AALY^P;3>OyAQu(aZ%v1c}KkyNOUco_KkKA;-W4S981hGXasOCbX-)h>o} z*1T)lF&2KXEG3XogShp&_`m(j?H9f67`EMlpE5eJ>je?h9LQu=4b1lDvk~>v?!9Br z$Zcn@S^l-zPG~-_|C4X*{@6eK_wb)Lj8zBoVoqd|c&K5CP{Sh5q=0OM7|>`S8u+;Q z92@U__ZhHx0bDq3z&dPmgo*!Q#4XIX3X^(ud#}LLreIgZK{`To$iym2rWH{Dg@xl3 zS0rj<9bBPnRoHk{vua3S%FN)22E$h#8p1@p8+9Y1KW?NoS^SWnY36_xrbzfxz|K1c zfU*OVDjH}$R@kLSYf>7pC{o-kb`QOnO@;@q-3RBSbHU&K*$r!NSj*-t=V`9q#gWOu z^#((>r(@+qvmJBJ0Bg(c;Ww;b+xH){ozQ$;zj0K>-}vD>a@+FVF<~sD;xbJ)qRT4g zObW+0drfO10?k}1TG2kD^7RA6g^%;s0Pla_nQ&MM1|=%$7t0G3Li*A+v$;>@vK;R_5(!+mgL&uPQ3EkQ_%!0qS7Xg zGiK3{T9hcdHJl?v=w41REgUVjVp1YvtAa$Onl zDQcx_$!zVLNI_uzDvsOVy%sjFgcQCSu$rWkD(th@B;{S8886=3XlFJFlvE?(BW^0B zYy;i#C@4y3Py<^{L)@f>;bCWP-3{6_;nMH8(j4qw5J2za|KNpy6_XJ>@jrtQIqDWol@_$lq(KoV(z?^!NW&towZmRn>H@?$ zVy=&~dTR&%?UT;L`>_K?kD^Nqf6;?ncdc;{PI!s4~ixlASj z0hXpUp^WUX!||!CVUa0Ehh|9-3M}y8FokT?Mka0}LSRJ^LGo3S`Au2uJY;Xx)mOwV zra9kz9b_??KqC;4RvbAeickG)9J@pUjd&D|CScKrgLzb=7#2p&2CyjTl28t|-J3&s zpa@3H927awsW!A1=zp~CO&?Y>A1`E6mEhDbu~J7ol0sDO#V2mR_>J$r)>(9x4%HS0 zo`3qX&NF6}MKEi?Qc77nhH`H|=Zt0le1hKlqu%!GkEt7P|K$BYt+5sGR2bAY9Sea+ z?4Q#)*pE8YoSr~{1& zIT$+AX;3P{*1_vgi`ITOT}K+)vjo7j?vfjjh8*pUWQo(~n$y^(Au^kw;#%X?OTo$1!+GdpGtSh-xKw-or7pV^oF-pr;j|5v@~bG!fbyZ7ea?x7D% z5alo+BMQo^Ybu!&br4|jz|x#diV|{&EQvNEJjA^s-gfI%xaN6)&0Jjz_nUx4o|I{= zrcTs@oS^%L#n0i*iTc=)3JJ?ZHfm|mvOqM})ES5TexW;Fch^=#!f1wxqQV2WI2w`~ zqY^Qr4`$h@F&p;{l=00yz_8r~Vt^rrY#N9BTs#Ohlxp?&PVZ&A9?xne&+)A*a=#eBi?pE z4Md_7>V}vT`DH5AH{(*GS%^BE*RJ9GE7kJM9gCH=_1>IH+;;n&<#G2kG-Mi<&tTXV z8kCPioK~|;v(j3UHVaxGjV8y5{^wB&r^)}o$Sn1!UId^yF$IUaNQx=E|6SMP+%u@R z9VHbK13}p2f*h|(JKc%5Y3Z7T1U43>_AW)ZPO`pghD3{5x;08po7t+x1cn0G>!?&K;i(k~cF={4t zewUUs<#*{mhGC2Ppc%NCDL7CjJhct5O2CtMZULx*L7Rlla%g6);y!+Lc{PkbmmRfYq_tA;L7ugX$){$88)J)|yYrxt*F#5i;S1tSe zTx7A%^dX+@y1)6UdgEXG^Dn<%QCYxBz?L#>?qIbVpw66)LHnSZEq+5vThf9K1FVBD z;i3L@dcLjVYND5+mR=S$o^Z8Kin~)dvq$0MAG#b4gD%aXwHmFl^ba*Oxr znMQ53Av?SvIRyDu$L3J`X*H77o3bglP={1HeL=sNT6UC@5Q3dW8k6rLW&vG^C{m6I z9(XW`p`qDO;?Ud4c{z6vEX39Gq$A%!yF zkQJsG9cHIt>BI%%ax4Q+eD?`}Hz|{e7_ybd-;#&^I1<>>9urLsQNeO7P_{j>|GgUgXdoZdH4YgA8TQ=rt6 zu&G9~&?Y3lo2F?|awqwUbauD~s<8c;)?XQ!Qvni=iEPZ#tk$Bpgk;Cefm+R_#4no%dMSpgO?cC!5wx1G77`-^k)NOL<1abDf>fje>& zSOG*cko46Gs3e0nvH?b+?O9T@{Dw)Wvyq$W&&?Kvt&YAea5)HEBz-pki^dKT_@yif z=I+Cve((mYC38?#z^Q+xZq#ZveKypcrP*C&Zbdd{qaA5uwP0zwm18ukd|TarLMQg2 zzJFu_hqawTgO<^JQ8kbSK9VL!S3LgzsCy4MyRPzDe4TphG>tSGjaA%x@5b21Hl~;o z2sjX0FwNkAjYBieqlG+72q8E?2rY#2UV&)(~NWqs>ISB^E90Svz)nLb*os(4*XNUN3<@@DAHC0&gq zXzhE*2q0Q0z{ZYD4%gi|il>7NW=%(=T_k}kilhp#sOGLdcXcpqb?`J`S-ydEnMKfR zfod8E6L*ltw619tAZJW#E?`MNMbU6$*3v>4MaV`Q@wN+lzVqexo$%IH?`dD|&oZ6u zWBCiyj{A6DUweHEI|8hY!?Q7B`S-Z*HQ~ zPC4X6^Sk{QHP6+f-IK(xR8~q`5bDdW+kc6bFF{lTmXkwRt|EmJ(#90dK79q=^!k-> zj0g@%qYShqfYo@PN?l<8(K3|DGEc)kxxr>8meM}2%TE(V7{00Lq$fRWX;IN7G-z2# z#~K==rtqr<@>vP6GZ0RxX(lIJlV<`vpLKjxVOL-!R++v(;WR{u$iuQ!gVjjERZdqv+t8{0X1GxKVX z=UumL^3M)Cpzo*cT==;-=B3x%ee0c1Tj!RHVuy4&zeuDYO53{F6@pU$N9{MsKx>8&4fh%8tRVyitRD@+p zC^+#B2!<+4v{(qE(sib0Ro2I_7A?{)aq_F0my`V5)Jcn^yheiCB5j-iA+?}vQ@}g+ zeX)j8mWiHR7CjjkIg@0|s%W?S4k`+R1}xqc-B4v^31Bhk?%F`b$BgA* zs<#jS{qO{Sz0JUEdWoVvAOef4CA*l&@lyqq$4iU*-6RW|nN6G(XHfF6K7bS#FnuP$YY!$fgHyaU+c`#T=#OpR;VF?Y!AYd_M zz@T0X+bJPc+K9_ubKIA|@YcciwK22z*&&mY!)uDw(mP$V`?CWB11&eHX&bP%Ovg)# z)8l`!SOM$W8{;>A=HGAr{;aCeI1+ZA}m$WHQGC9Id zm#eD=^J-IHdfAu-q{etWiz8MUc>jA&L~kCLElj~Bk&y&K0+v}&f%b@!ICf3%dJh@$E-Yn;Y*w-5ib&A>0$jiF-Y5!x9!A0uMS4)UHE zZ0Gx^lHa*NfQ5>(upa!~W(1=Zm_` zefhdGj#*43(y6JbzM10qiU4R94DrLy%15^hmq@+N#8bLamt#Bg(ZFDR7)+> zjTtbNnnpyq-U9laow)kFFT|SV{P9ktvN?D@b7Gu?tbZ#@FG5Nqt6qdE0KdNhOI+RE z%pwg!^R17@HP}e}RM=eyDgqJ#i(u82a?zK`Am>Niu@`eJpV&lbOFTmUdkZlnc0`e+is8fE^K{ zRh)`orWHc9$|zrnQnISM-2Asl#rR%L$$|T&9M#H=B`pl(7o997b3OR!6SMf`HUr~E z7Tr!Lx>vrL5>e_JV3CB%5-A4YMNYYW9-js*CV@$7c;wbC@JA~!MBkmy(ZFingN3D` z)QDVFZdBu*naD>Q@#!}l_<^flyYdTdP49hh(2gBD@+E)zb^YD>pLKPu7;E(sZ3EU5 zo5tQ1u3q{d$FlBWk!?SJBtGTRPv3U`m_Gp6lyKu?CV*9I1+Li6oiUl}t;JQl@g#P| zs(Puerv=7NCq9tvQ?(so1k*JURcBzEB@yR)*I&H=rymDo;u2ieg(dpChHJx@R&`XD zbVVMSuAyWam93%3miuv&iXnoNRoBATphZ_rq+e$zj0EjGRa0we*5Kt)%P96YhIY#UQhhH&5H56r1uA+@GGufIwOb=HAXHW0@2G0Z zcJmclSj0~k!OSR9SvHaw!zHge4Hv&K&(4}i#Ra576|}b;6SJivtTY#f3zNb2#5L54 znJS>IGn4yiN~-36wH|$nz)$WJVJj5r79o04GDUz&)!>oTNyipp6=}yp&P^lZ*hpD6 z98wLEUV|ERjHHolojI>!H%cm6frh3oD0eU6T7`jFR8mGGTozh&ibX}zrxzQT%y;8U zckjd_GX^R~7fjbi7|<$Xwo>wrO7M{`I5qeYz?LQ;bshP3Mqjh`?SS$yz&*d9CKf$T zMYW0H8#r|*jhsSRG$sL84dhTijwV=AThLSrF{!jZ~$J%q~ls<{5 z3GYgzKm@S3mkoGap?UTH-VC~PF~aIFUh$HX@v4{hfvU$wG=+S|Mo^^NXGy@_r{J7e zh6F9r88{i)21?2Ns@2Er&;mS1n=i&Bg)#vnu~rj_V(qdCa3rjJ_6KBOoH|mDZBw=! zdDoV1%8qHt??@zNw;S#&5JW|S#BEe!i|YTyKq9c$0859`HFZG&mSh-hQ>-gt%-d=B z5oM`?s^y|+r*Z#8gfHE-8Rp6ZrG>n_wUmWQgCc``>I9pUWB|3`+51+>R<*Thz;Z$b zSa<*A383Tvo(0Qwn*glj16PJQ|D#sh8Li|nQzO%-9kHV4zJ2gK9iQ^pxBY5#!}yAUa@>!IMPRkyWdq4lcv7mZ zQ%73^U}+0WSwL(q!JoK5$%ETHfY>V`u8tyYPT0l!>?>skmx53Wru z?@-e0u9KF4vfvWk5?Q3|_)I^H>2JDJ``N-0>q7^Jl}0^XJ(4>KHet0Qfo*c9(zY#x z&$Mm9bsULNrKx-ng@}VN5!fR=!Gz79n}A0}R=SS5BaRGmy`XAWz*28hTUZjfH)+87 z0#=b>q0G+8sT7KC8rve^`*#oH(O?N?%ruy53;il0JFR-#N>(rl93(Z21XMfecAySLg7<#l zCDvEkxyOBR3<0a;PanQ|MZC7vn!>gLYu!ZLeOSLSvo9W~lXKoU5-+>-eLve#36Dg{ z??T`zg#+c=C$%y{56}^;&Wcx6FqxA{X>6;K52kH&gOD^*sT{mw6{bInZg(21miu`3 zJD-Q8-At1g;F^(i8TJAnx$YGFss}r!URNNNM`TiIQngao(#WcoP?v^uGGYsJth8;H zCJqc_C)qv(0!}I0M9#L6aUHm{ut*n5+Hi$lH328q3vyl3VpFjuaUpes>qIC|u2Ta@ zo#0j3*SZ>KVqGzz64Ao4;gewLq)`nFu>Xjg$zgj8-0*Y(cWf_VhqVN=5nWTpx(~`bp3^Cr}y6e{{EIQq%FXDe8cFgjy`DlPddN# zg+IfWZjCSa#C5;?9pb}L4Z2XZs*;vg;%B;)Bvu)zPW6m5%UgIb^RkYb0q>bYR1KyJ z$fW|T8K~ghZ$A#J2gxs*g&Qc6W0=YyG%b|8SxM3NX1ieoF=Ey~B1C9a5t)=8ktr~-`puRKSz<`NzogXeJ+iu8(Y#bQ166uZ_q%0goe zoR*8>i;)bYW~3)E8VT-;yI5zmiHlgXsT!)`6j|~;ECTn zB7kMG8#ymdV(g!+CFFyv@-j27DvC>-l4K04DhOh3T)^=|rDyN`(wRpX2EU|>V;dfR z_Eg8|eyY``%WVzyLr)H0dD4+X*Da1mpL-XVe|P-a@BH|&t3z))eM`C%=ukd~3OaobCC*Ja!lW_Lw{0;e6CD@TENx{HQ39x!` zfP5y6a-od0MSXXs|5XrQ0hWqeSz%5U!m_TTQ&F#E2@GG6~+(vEg%ae<}D* zXc2-EqH(S*&qVfEThg+KY3ZqY!m1yOn^!T?sJbRbrh)&u>j^xa8NzVrpu0PZLM23n z;YN#%O2s&nccK8RoSe^*jBU-x>kzOA(za)!Is!a%%cDRg1Dk+at%~fB5*2E236poF zDjhgT2~Q7Iv0B+7TvRC7EWilh*fDy;b@<&^UNU&xa-(IU=Du3vTSnHOp3bDhWxYc! z#nxH|EShtVuHSLh@rNv5G~ABHL4E#5p8V~78^Q|%&jHLd;)v;9rHL(Bwpy{R_BGmM zxhEGjEL-&9K~}m~fyKJCh=jvrXt|QBQ|M2Hc~sUGgNPT;XJe=ry+CTOU04tk;}r+}-}&F6@zGQ)8$2UO787u)3vM$Cd$W z!-hCrF~2X&)v19FsLwY+bGp%Z@WR0sfNoJ&qVVT-i6QKjZlG-gL zAZM`grSD0=IP^mpHtmWKh7}@X7jW2sfe*gt1T0~Q!kd!Gs5nadH)y^$z>)wnHKpjb zNyd)eudM)!7E|C!mM?2D{$onkFKfk8N~0|=rX}+yuAlk@NZFMYg-E#>l&gT3>%oR9@S7(JcxW=f7+h2x zGTA#!ytKKN30OfKARW5c@pu`d_iqCHEZo?F$9hP~KsL_PdS_}$wn)}kf2XlKxL+Q(#pX#KW}CDS_Z89RN&;VYe88Z1M}_UVi#ms|~$5}-){M^hK1bzowVagTaAk}1|# z?1w@DVfXBSy(SG87Oh@UV<37~D02V+AOJ~3K~#;x$3&rs)W9li@-6)Au_-(_7GTuK z3s~_S`*Y)cXJQ-c?%ILk)6+15v=mf(RK>Mv)wTjyN5p)sU%7nsH(OCFZW*wKhvV$d^60ycJ!J5Ui{;_x(Kd?OZmLRg)G8;s$tZ#)kd zo@c--RmEB%Q<~ODKv$BBYd;QTgSU~yrqqN_A|#0pl~TqOqNf<83cP5=EK9q{IR>&R zS2BRKx{T0MMa!f(h&rfdr7PQ~4npc$+VxXzU)=-JL3kZluW5g2vaS?H8>+}TWCITa zw(Sa!jl@qW`^luRz3O9UZV7((%p`ulv4F``KW5_$EZdO2koyy~I9IZmcV^Sn9nUG&rJ^{Yom{`uV|NkYo68#KdSTX*U`1UjBlHCf9GJ+bOf1i(kf!29qGq5(_LQ5) z*q2VttNKT^31B6$I0a#KD^t`iYL-_LlS%GTTU|;kODS*aG9U-8$gI#s3WEw9-$!V= zD5ttHl1||#>k7DS+cb9C{V2xF@=*XViPLpDh8-L)5zGqk&@Y|_rcJOt4*@Fx0V^pX z;yhI;(?p#?^9O5Lq~s3jF)Ynn<1f1t-vHpoLzk=q$m6-mZkXUN~O@p-hUEYXYqbn>lrfA9Dt1g33kk>WMBj=q8G`)M0Fxb zLyF1U9$=-SF&vTG=6~a}uUv7)N@Jq^J6^aWw@+<9*eg|D6|RW?^&sP*raQ*B{HoTE z7M54TVC7Lmy)C697w%E_e5SX3?vd+%cyH;F*=PWk$l3D6Q&UO{?Im4WIxJdkyaR2b zlfIzhCTx{nMxX29RTmwNS6@U5f{!pRz>CVlHA<(tVO7(LvOucB{-VbuWiH}|Nnl&c z->5n}0UE(|@_T$(fiD^nU5<@j7ALdzFKJY&%9>TYi(4&ADi)Ov3^!&o)xWJIW=5@a zA&=LZc964WO1Bz{=GFDH6xBcx%_H{hK*g^hNU=aXhFS4pr+ZMavUsup+%XRP=JBmS z&oTsV3bW-B(%CGf{p*C!mRG^T%w|4QDgY+7RI%yyXMidN0~1wK$!as%!5FN{G)R}~ zq-{#=gPQ=Ex}&;~Vl9ivGAMrsR@$5K&Ck8+zzYvHHZ9EEcXpPM*^y;_$^Waqtoz-r zuE8D6o~`9qwRZjZTb88ze>5~?l$yQBbG6^A|JQwY-t?q#R>A9rWv5{nCIT}5WWi7t zq>(q*d1gprE=G)y-7MJvk6WyYm=9ha;KP@njJzGAJ6(bwtC)(FHc=`DNE>YDWb9w! zXAQ9U<+c)@lP+o@$16gW#7md(9{_#@c@tQg&7ddc!imVprxGfysd_D+5IjYiSY<+M z+dSaXu`60vHLIzvx-PLLmcYn3CnwPXc*O-HeZfG*D*+i7k!>RgY3+?YN7gytdsp^e}D73SS`P*ho2q4@PtE_ z{;spz-8V;{aoKN&AD!r1QS!TycCzq1ACav)I}@;k_Ly9JVF1e@SXs12V%Vt^g5nf1 z#twY&vU73lAwbq*{HLe_&q;1x)&DIFkE)}Q{8NCXYxh_eXpwPWh8uYp=*nVAI*k;I zidmp6HI3TM=0ECI+FaL9c1rB8gqvQ&GDUNca+D<2#n_Y(HVp%Tl%5z;ro<3g=n*6h z$qoh{RYOTwicv}BvB?DP*cRfp&C?jET8J#N+HC<}1#xxGTT)cDPd!31E~?c4u4_wO zB9j#?#wK?xZCJ!^~t6Rc7I$fHP0%!`m)d zgWfF5RJ=s~tqzo$OQg9-&*d1+xz?X>36%wt1e0Q4EUB)Pg{2u6U6uhSBz1uchc0a+ zP|Y)2YE38-85AuTJtxzkVu_?o2(UAh&R7+Rsst-)ZK)=M)cAx=WsE3Bz!;a?g|bJM zyos$I@cWS>Zr?hIZAKnecQ1m9qE%?pr&x0&s3y0mvy%Jr9cYjWLsfF*P>{ik;Y0?~ zCE(tho&_ctzerOsM>N7>s`^QBWun#I3}DTf*U8e3VkytbV>_<=tFu1%iC1U;X}Xld11udEY4;w5V;N~Z` zVsptrHQ$37$HJ`d!**Rs_1l4}|I;jcrOc^gKAl_JdwLB@Fp%Pgtk9x9m<5}PLtYHm zHxP^ikKOVlFirAnR+^%U%rQ%@D0v~pj0EVUSAdiwX$wnNgzLd={1EH77pn=1aKaJ1 zH>Sc8L;6q-(D+UXy}-$>(W{tfyO%|B6@J#loTo3(N3Itw1dkp zJr3P25c!JHE=r`y#G@Pv7o=}h5wnF?q+&<=d#mS5RjG%{%&{Ji_voQNm&Ver3{r6b zlW{F_v0_R=TN0qJZ3!(wo?6xsL0Yl8k{W5*wRVICEK#ITu|^pesTpK3HGzzBD=8RO z3SMZ!i)e#csHW4{TmgRl@CY8CtfJc0hiTsgKNCjQL6tR%yjJAG38dpEWI&pm>C<@I7C&XsMGKw#()$# z)C~bg{vFg>%+q1)sQ7&^!>$#MX&q{*Xd&I2ro^uvyLmGzTgpJd`9T?MmO!yKl?X>o zHTEd9VAPTiq0|Vho9s@cUOR|BLo!%EBKtK0SVV7=(Sx`XVZj}!JZP=V`&@Z*vyR+jc$y8eq_cLuB%CYILh%bvfVSW%Q)NdoMQ?P$P|!sUqM zS079B87#?V;DjDxzY3Qc>9*2HCtx+^@M_eiTEmaVt0v&E8;i2Bz*}LuQX;TZ-Iyx+ zsM;x@2iV~Q53Tp{$i^+$Qi(8;?Z;GTA)m>^aZJn8ecb5O)FENDbjZ;1q2Dxn?v?@Tj~galeBwd< zO(SBnH`3{TKi?Ghz2R?e+Bs`1MT|VsA!At9ZU9TY+#12V7r#7|PCoo`d$GcvC z0Zu#C7KTH~n?Wi=iIN5>V_78Xg6#aVKXt6c0!rd){I@SMDk^TII@5%3q5z~>F7M8v z+jU@hRq2GxtfA_f%(2v+s1?{Lw@(GS8#m2t*R_N-OiB_Wa!P47!M5wFTaag@P>56T z^9Ht-f%`XB@$mLZjE5FVP8KDS8t5vTK((UEg|aC}_;(bLU~pUP=?31GdTzz-Qr6wR zx3CCM+-w1Pc4}60U}EDWw%@S{C}d%P`e&xZ@xrhKuy=~Y327lJ3rm#{&9|`j1X%Uw zL|B6DR&mJ-5B|}2-_yNl0BfQ&v8pmtIbvw}(5(i=yPDWqUak*3GkV!6hc5qG6X$+T zclqËWJ{LSQK)Q{Ngh8wxkL^Bi%OQnTn(_f>SL~?VtsTX4XYn(9U4Nj&J9KWW7 z4_|%`(h=K8F~e1nl>c0n0UG=y1{$?iRSRF1Q?JjfSMuv_%8iBA3DHyZwkYeON?izA zEDUie#1{qVaV;#%Wszr-Qokbo?o1;@5K(xL7(OyjFO29OwBOUgkKGiVq*>;c=nH5P=0a-C@@Zy zkvde13J!h=MwY^cG{;DRNn%*4418|TT~r$yEi5(88kku^6?(HsIaR#w#fSXp+wWdH zfHhheT^`Lws|~|=d~k4(+@HN}Ed$nFk8S<9PjR*5Lf-^&@9g zB<>UYCR{g-F!W@xXrZ~7b!y0a9T68w-wWDjml<>S{x;4t-Bx1CcsEo9eh8Y;J%0M~*?!ECzVAhf@ zIVxO`6pLxTpuDf-L(2@aE{c_l5sw>ux+{MF~3u=+jCocy`mhk*5tD}T0g##oIKgB_+O zj3yGo`Xx33R*iYBkz-fDl8*Bfn-jq_2GE7ls9d2iMwbCB?ViFFSDb+5OIW%>;;0S7 z%c_?0W`KxYbQ(0-h2EgP{mSa-V4!vdEQQemUJcChdZ!r;SZbn6|3r~A?gBz`oRpeG zT$zG{3Jw`s28V5}k~XEZ7$}#=agc^rd`1B*x+ZEmB?VAQ`-)gE!7_rB3McZ(>Fs!t zYA7^T1h!7hV9U-K3>Q62geGPT7bPnL&q^Z@ph}9$6U(^9e6I=qbufq>9LmgNT`W(% zPsRoLbG`yfF-KN}$Y#dV5Gfg_3Y4}0Pu@hZ9?BiCwVKM1L^Fiqn zzUQi+O)9|Z1B|4GkQPA#3`+1Ud1PJpw@Uzcj>#nge}oqT7R`F<7pa;-rU3)wqglN0 z`D^gzH}|1hDj*#fk+B>^tQF*>F$09m6eeFkYJw{#Xkh?rmzUfK!eYYJGm|>VuX@;7 zqL~yrWfA5x!Yrbyg>+;|hLg`h;_I>=HZv_tSDQcr^jUxszNQ6_+J$MLOyRr$sPa$9 zPB$!nQDS;*z@18q^jvFf~#Wq_m|0kZY{&B-l>fZ{Mnb^=6Kd z7dO@nELahByGrQG+sI~3;SRAjlh#7P1IA{7(a8Ygr7|{7O`#aZ@O&R$7$IcOKSKnH zrKMIx;9(d)Cc~Tnt`uJbmeSDD%BO`dMPex55kCzQlx{RT8)m60vC)6{eEXJ1^{3{f zwq&)6L>;i|<1X*O0m_v!GEN2zZALqR$8UKGDAN2&Vt{Q=el<9AEv&gz8QTbu^98Ve z_{}Q@77eXoVs_$~N~to`mFaq@ySsb5nIUKyu%1D8c&=RsvWy(mk=R^oj78!jE`S^HY(*ZGAVMD zBEcaFv6*Bon-!@uRM(gfSn4y}wo6Ezaj_tI4yM0V&{|m}iZQO$B)G7369<{#0^*6} zv4rSUH{?``dJM{1=;_a(8UfoUfeqU$*fdtaj*j^8h*@mM$i^|mI1wvI%4rDxv<#JGjR_~| zl>J=JHGQp`>n#eOT9u-1Wyv!qIR%fwcFRV}2R1%DiHXOyD|b*V^-hSZ%I}l+ELMEN z&r))uDwVNF0c&h#=J23eToL>6eM3V-O^GSA3|MzQviS?u<5qomu_zbn$)f=47n9Ri zg&JVVj8QsRH4BTuO`Rzez)B>P^+;$%2VDclMMl8l%V#AkV>Lom@Q}s`-tuQh;KFkT zU|S{FMn#%7u-?rwva(nsoAE1>OL-kzx|#@ZsCLgC0u~`g0~Q0fa{;TSQX#nqRpL=| zM-5+DdXWP^BKb2V2|v#SHjI|BX>>b`Y+m9@(rL!X!nbqqEEjdRdKM54sV3z}^W3BDp`ECF!dCTgIF>KREPF%tR7n_B3 zbgAsES-EWRcg>3UwG3FdJ+$dR&N*@QrG{=s*X}qk@Nv5W7WqL+2a9QX34qJHO|mLU zYb*kmNG8il))rY~`k&F$gc`xGp+s0cI93%s?sk0Y1LvYYS3o{bjXMuUz>dYt1lkB9 zX490c%v`|YVyws3+P1KiGmt!I5;W05e98D~@w^&oRf9O2a&TWi3n!JqtPec$EO7G! z8*tB)>tOfy;?(m`hfffbZaE5|v=J(O8%TT)5}o%1nDlE^L4N`ll@?-{iIqhwjKIa; z%TP+Kouw=@p(HSCnJl%0ucXFDJ?DHH!u|p)8AhGlm}g;WDhEGId@ipXNF!PHVjX*? zg01&G1<(s%6U2@iU^D=hlor;rin{3EHiJb8Slg$@FAOVHV`%x%9}I(VzW1$Vz`FC{ zE#G|J39H_*_ftQ&do%-B>U+}!U=d)F%#E~37aJg9sY_SZD!S4YEX$AKGgqI3oLxXn zRZh0Gv@)vCU>L(rTL`04BAH@|m7^>y4OncL)k%g(bO>bSNE6K(7m5sabzaS4S1~l@ zmev^Fu_O0-?*j$={*EW`)V67qogT!gG~89oaNOC0D6-kC_-pEYiUMc@=T(|-Bn9BY zIZE`exKP0VJw?5OJeESXEhb5rbKz4b+Mj30T@KQx;YyfF%|R;fK}(FT5xR zAw>e|4caUd3dQ|%QyEN81CKoc{OG3-VY+Cbz^}dIB5)WSFJtA22V>ZPA+RQ~ApK*WiqAu!6pKYS zsz`QF`#)E+Y}f15T;N2fiu4Wjp-^?AabO^48!*bij`bckKe!&KGQCP3ALl5Epq<&+ zyk4z2#T=b1W#t-C5xwCwF1hgFAAJ3afkjdLynS|dAg-2QYC7@uf$o9dHp@C_`BmTk z@TQMG|AaNy?!PIr`zv4nY}|9@2XB~~H4j1wy$N7(wNPMA)$9or8|Bb}=LgbhII;-J1*|z~4OSh|iz+*3TY<7pVf{^=nAHA$wi0$*Vo@W@R#k(wS_)rtXN!8 zF3!9->o_w@`le>HZsq%%y})y|-;L|y?#r&eahgH**|1ynbCa(f zV^~S{L^Ijtsa0lWZ-7Oq6A>vPSBR^sY9fVc!{n)*OS2zZp2Z6{E9uS>sA%EpOfYyz!>#9o|50Cg@T;ES87VN*0nRajht$5wOTc z);*t;g=P8~v4+T_5?O8jo_Y4jMvqg(!OJIc^;OS9iY#$TASbS6U678en!#n=Wyz;@(gZvX*9E+%36};Z$^e#g7l90iK5lScj7QAPy!ykeKY*g zvH+H8sLnVcC8jyA7GP6jPrpIa8g#JHhig`l_a`XeckSoi(tFJUT(09&jLgn1D^5W_ zRo9pFL9E#zNtR3EJZjkrptZX($QuTV)ahFyC+wzK?dQQ~M;W&V!`p%1+_VX|+`18_ zGlB*@O9W`5upn@oC~C<{wX zCu(qwJX7F+!$trAAOJ~3K~%xEyPpE$E?6PO9s$!a5cm}8t6OHB+}iE|OOcaPRux_T z1U~%sbKm*lml@yeR-s3KN}VsJ$%ub|6h0GL2$ zztiCIw&3dbJ`cwp=D-$p17-;I7fr>cv=vcbe2Q#Jpi*v>nyB>%&)%MH*WdHyr5ziy zGiA8x9+<9)>Bt3uw^*F|M~vC*fwk!20G4Naih^LUPENIAv5CS;9)-U7GLd`}IIi&oj+lv90(0 z;L|(bee&T;{;k=IJXia@_S!8s{POORmwVO{1Z)}4k{Tg`Hb~*>T0*{M5vfxkLe_QN z!XjYhBui-6KD?*|q!i`Ss)op61fT!dxzZptmd-I+kWaM|6SpT}8Y_mm%HpUAu4Pn| z@SUh1%+ro)cDy7Y%?|DC#FBHt-hKt&Lg-j{=#daV_@6s4I_|)9m!smDFr2isKac#f zn&A=2t7KbK(~GKn^w8J{uz(dO9f%c&XW)y4`P4k{^ zP62w-j`dH)pk>B3T5ave0Gl1>J?$s&(+Y)aW(5xH*pw#V0!W-*KCt2OZ74pr1;8?q zqOjPZ+P2|`Wrz}DW9PYcZYa5)D2M1Ij(`}^QvsG4&0vW!gRg()f)oGz2;<>)F0;_b zY@M0u_hzOp@9N8Zb6IZLNVDf}3$Pw~cKWoF4(YkC*^4|^`(64kTmS9)-*36B>aIW- znTny#sHeP1U31o;fi1wI0`j7qDRpY>u*sUbR1%P{BDP~h#24R#8-^In&)~BkI~^Iz zQ)MT$My(pKR0>|{m@B~2^>1|xizg&xC5-nP;ZrTf#up>5SHnV8B{oXXjRJw$d=68k zlCaTZ*T(PeEaGQBe;DJ_E>gK=D3=+RvPlw4>Rm+aTW!9wuq3<0)y6ELQNX}S2jYOk zyVyBhxr91~MZn^HH)QwbxP(o_==dyiWR%rP+xu1RR(UjX$HT!Z5_%%4?HqHrJRV%vayk@mW{TPV6lmSUt`e_<|n^^ z^npup!Wl!7B~+c3!zAmf-4E3Xrm;_Iv+qlS>~kXR^NySkUTw|Nz2A-c5Ge4Q@50w^ zfmW3My?&Ps;*Tu7qS^&pkC}9>%Ro1aEQ-LrH$MZ6`#_L^8K{yh5fijHpBP5q7l*vP zP5>*pyGRA{saLNW21oMV7}R(CF&x)bD1QC37Yv-bNE#qBOIWB@U!AtPel;+V-@f;^ zm0)qRJ2vij|5M{fo_fU6C)+u8{xP5Y@8Q3@`GL(Jt3+8;sxeZAfu)%e{`S%{mY;p7 z(R2%Ip7$HK6fU~*i}(MgHX)G#p;KZfxqp(4ZtEWJN7h$=XsEAR;> z?9G>r3V6X819Gi!G^toYtCAKIsU2*3q17SClVY*7Y6s7WPQW54 zO1BoX1Y@uZr#}B+gp~cF=b|N)1)=yq$y4tmFll>CTR3n&V9jx78)R)7K}|{X)B!hQ z2c*bH#?8PhM@SoNg9dDPWCmkTYy+w(m_Ds_RVu(n8xhBtfAqXuK)~W0mCi%V;EE}1 zA{C9}qSeu3pZvn`B_1Zhu%i@|JUs zUGd#!FS4ioZhtZ!`n!L>bz3QQ04lMAQq@OqHjiLtD~=t)t^fX^(_YZZHGcAFyy~r= zx@|Kxv83WbGBAmSwObhyB{e6h;9USqU0J8C<4`F#60lx#@lkl$i(Ht|w0v1R*^-!P zk~LIT8o$Jrd@)I8rF|E`H!85`(uHJC$5Q$4`GngHkOqEr^A7yr$9ExeSEE?X0Z|H8 zz6YUCjF(V3{yNs)#1BHF=aWL|!x>&0%X1de zrOo*0<=TGs9R43ubP1}@ay)$BC^SO{TKkS`1curlEk zE_=(VIP(<13MZ+{(2+^6zv?O=uVXW&q`#FMO^DdyJ#do8s8l={mLu%;nOOiC2fw?0 z7C-v&EvTXwp0Nb5```&diuEi?;GC0~IV1rqmJUg@g&4*!GFwoYimV@00VcA{GLF9R zIJip`U`ZTHBE+IaA*z~^w@4Osb8l$z?WGBxVl^eKdVQCQrjm4uuQi-i7PW*6_&0}v zooU#GIw_QR=vHP0SudCty(B~9X00(g9RBBHZ=BhLTxmmi(2 zX4YUT3}LuwIb|?2j=p#nm%Z-jqh5Qw(UR+V##N6!@$ly8$ZC`k-SrwZTbWDhmTT3! z2~VoFfcb(|Y@t@0B}${JO}Z78kd7ztkt@!|Q3o$v9n<%u8j02-DluaowghxZ zU8q!NVVEJDw1ZMW@Nf`0IXt}q_~KV@MWJHAOf5lKrS)KS_aU5}Rc;ERWBZIEVmKMROXZV<0kYqW^A8&a1@1g+o0Osq!Sit!+V~WUFf_HQb*X7DM-Ll z^qHhG0};9~g6lqX=0$Hi%lPen8RFesbmPd#%hIkpG1%XKcPsbQ7GT};)aVJemA6kh zG}m;;);-V z&IK$Tl-KH{LZe{UVxmG+LCCr@Cxns8p%}V&b{p{B|9k>lcNmy0mf__4;Fo+=s7=i- zo4%ZKGezY<7MZdLsl(1Jl3@mjqW~tGYQ!w-iDaE2nnL!lr8wrSA$TdEYWeUHh;>D9 zvwTzX8Z=@%Uv#ZZ*@fP>?&>wxEH>6N*2Vei*H#2qWxzVgpNTENGq*hi*j+%CD3-_( z&AVl#IFZm0q_tZ6&!p%wDkH7*ICPCFgZX@qK31g1@zuXSef6sjHOQ}66x;eOTi(&r z+k0n!S8K_;wuWlmx;QoBkA3ajqX#;O@RvV_pq7!^$ zs@^C7Ju!}d(NT!0^_7Z*?8q1vokBaXWZ#B*@>X8cqS;rIUUjFNMzX2~5)D{&mpxIA zh)kJbmC1y{aNFaO5k!IWC5NEO;Ce!PshMr+Bc+oyipSOrr(@S4Q}0x}teVb1?KgS~ z{8%yRTfq@4Dr5io)#FwkYl!=>D7FooH@`UF-~Z^cT&`(KgDl5(@BWrQj(+R~CocbV zyT|L}Zd@0qZoTuFjhiQ|LC@)d>6n>xYaQYF;zi`D%yFbv%HU9PX_{>j!{T`m0XB3fT*a!FN9+877N$g3=Kx!gP?r@9&w#_kM5}?ChZ^mlL2;3V15nP^|)K zr7)o^E4ndB(4C8fKhwcq>Cs2Pa#j5#1H(4UMrLs2MJK@Q1Hq67 zEZsOlDYA;iq>H-~AgJrLH&7swAh{?pu=1e-+;{UjV4U2lF4#s2!E6OqHia-L4QniR zXH07$0gJCvLSzzWQ<-3vW-$JlkIOja(D;s9{_&XSx3Z4*^}d_7ZoRN4pMS8ox0i6= z^wu_D-L`h(o#!00c2jDwb?p8WF{ z9(w8tgGN(L7J0%?#<6+*TW=|?o5-Z%%n$-$Jo2m7DqtlP05-8?vs4LY?!>C4MSSu@ z&qoi5;n8H`6SZk!B~2vc>mIRehRUZtLeoP?#Wym0qBM-af*aeY_y(MA2X{SE#@D`k zFQ%)#2#hYoaZ1rI=6|WhL{P3vqcW-bqO+h)AHx8>E9A!-SBQY+lwq$-;ly(ehEKPU zuA^;=m4Ya!D|XKY zgd;P6(+yNC>ELXW&CUTzX<~6*)Q^Bgu;Ug;EoJ)_R_q0EGii9z)?U@eDh^@}W>7|V zuoLfk-HD(6+#6FLU)Zth>}=~cZM~!`W#8Cr166&swx`;ykBt5FyyKVsS!cJcj;4$G zy}LGTDMVe%W<7Ra@nCobA9h%C@~Qe>F8a{D_ujQWKCKYR^SWOL}3>KS;JH)uqTwtQrrDI4G@e@m-7T-z$OZZA8c?YnfQ5<)e ziNCq*ROG0PU=|T|6fsp)1}U`>z+z0ijEE%8T1+j3X;`RME!deH{^utJ{OIQoA<7QG zH`Fn@wr+k+pQ11*3sc=@16U<&Xk~ekWTn(lRog7e$*`=0ZeaIYsyK?-h)T0aWfcFo_+27ngx^ALi55-mr zQCtRsNt|`k&}UwEO4r}Ea*eOu5}$wdmw*5Jsb~;DKB?(btaVw$Nlv%Q_3 z#%J;{s3hupFlzv-M@TQYB$3dd{JAS&X{v{^B#CMouw*fdmE{R9>-e*egw?H#2m)7< zY*fZWD=%kJ?OS&V=Yy4o7~l8Nl}({ih~XLrKVM>)K6KAk_#;!Iz#wT=ei)PxmO7QL zS&|Rot}{gcsH5+l^k_9@4o2*e#%p<=@J$TYK{nihvsTSMdee0moUoXh#%$QS{iB(l z+_wh1x>~Aa;jp$n*Vb;0PZ%2?|KRy2E_?Gr&A}TUjbC@~AIJX7Om$ z{R0=TJg1dw{QBWI_mH%HkW}eiC=zS4LzmhnY1M zH*21rQjLhJttv6QwB?|RVs#;QZIMbwT)U7B&%%`{b#GPVSN7r9(^sI%Y;Cawhc6n1;Q0S1 zcW>M>{F>CNp&Nz_qp4nmjU&_cT%$!awrTTCFFk9`*{u)t?vIG$*!cQyt3TYfW9Bth z%9XMbGp@uhI_Kcr^ID+q^X#X6_>nsv+!#GC4(Jk+K&tAiVm678Ts*-+*9~-F$Vnx+7K2-(sZiBwszmyv(lP-;; z&pZm*6@X7ka^9=0B&PRgtF4xXdv`lGer>dI)UY277rKBU6^3Hq;k&lM+cpI;HwzTf zcce&3*z2S)NK+;j(6de^CAIXLvn7U?X1Ys^vQb5(^(Rrq&LkTr0+gJmrY#z6zE?z_ zwH5#R(Q{w;m!~@|$BJ6No92#SNzP2gf7w9?47R0O+%{nG_HMdw_@6I2ZRo1zZne3+ zZhAaE^PYz{-$iNPVzDd>;`9?&UhtA*jhmZ0gWBuzAH`RF^}p`>f)V#hV+dUk%8Xha zuAqi;7;?q8wW{M_1(*?`o_BDbGRWp-J z)x21X;HVI5ec@}RS7dScsVfonhRzMBj3jc%GGW>{rY0G_)4 z8DM85nLyA*O!r89i-i!O9VTLaw9d8ks4Bt~DWIe@XCZ3|d2bFAyrL(f$#a-TQ+#+GDLLiB*;u3OJ(C^ zG3s&^QDxBK2OV=@&YBE!xphp>2pNxNYR#GlDYc0i`;$#t7BX*hP zxUzyke<1Y7VPT4&<=KVj#a4C7{UpIT_6o?Q0zChi)D1U&{GeC1vTpYEzRzsha)o2N zPpmm$`AzLyt0Ta=Z{6$*qR5y$JQr^%K+O6u3 zA4?d02up}G3A3HiPv#r@3i4Cw`&BqmgiJb(s$WF~T_`y=K6Bk4@$6Oy)p7;~%WX)e zuw#S_uu{8ze&lvW2_u!QUv^aB?f|IyL; z7x>_HTef{9pI`EW6@8hF?VP$Jz^nj=s!m{Z~`7k6vs$vOD>5GddLR6fM zx<8>Ur6^$>z;$M^B)1dSe*6VkBK2dP0Tu^eLT6-`I}F_VFxf{jj^M`ze0M1x+XnpW ze>{vGJGqRy;F=CRuPj|4_)!(Pm-(sqW(HV2bltKN>#Gh{nm}q;B0hsyWO^8o@nH0J z;ou_<#_FSh3F&;sT0~pAs)=GW!JKP4S(|m9n7Zbi5*OWJd;UGE@LD2?(D3)%e=2SH zWYgzW*0TBq-6g3Qpu&VwTM&KKgNe#puoAL*Mkd_za~7sZ|8nc*ETIb_8$c6v^-4z1l+$AER)+MPc+_t+(` zYVESk?S1o8@zO2Z$N%Vu!2wYmR1aRW@`4KwHSTQgXtmdom)$+QWiq#-=rMznRTKv% zbX>Z5v=|iIn}C(9rv$K+Oa~d}1T1l3uoD?;1Yh{{%dp&`h1D5g$pv-u01`;0DXWR9 zAHz)LF!RY~VT`FW#s0MIYyq*STi$y7CQ2?l#SJYGvlC{p_U;e3Wd$U+} z6KmtKwT{~JbgjgaN+?QVDJHGCr1>f72o)0<3UPRlzehG!vFYI_rD3kn7D7rvvkHLC zJDB}5U^pz-GDIUy)h6x7jh&}$$`CPQSw+S*VU*Y7+V{WsEmvM>d~ZMA*)EsfJT`SI z;wZCvV96iay|0c(`<|x@ZwL(gp0f{2H{A@em7BfkaqrCOLgD8#g;M|0rF~y|I zS~>o0Umg9spWVFvV-wYG#8wv^%Z2ZiWzxk=Kl4*90amBpgi>53n*E|_tD2Nrw8W;6 zGDh&}t1reOeG3CvvXD&^LF7xW$2J{Ak&D1|@Ze(s{^i^EVm9cNt~g$q!J#Fv*hi4% zl~T;snd0eQ$CAQ|t}Xrd1hk~Mn$w13M9{Ku^clxs=^!v$g-9wB`zs-95t+A%`lp z@_{EG-3)JdLhA5POvLr54^5zDd6vPhH!NKWrJhp8g1m?QxUmb8aKP9z(~OWe%jh+> z;yYiwc-i?w#`u1`wOuZ~VPxdIj4^osU<1?YoY$zkaOv zneoZlD=pL9a=-!0&N^p}(R79Px9^OPyXK#M`N)L78j-sk0Y9ZuF2T|hu9l=`Piww= z?`v@Y03ZNKL_t*73W5_LlZD0=7r?|U+;}_Qf7y$0${~Oo&vyKU6^p~5TNxr?TH3M@ zTX_^c;0Hh5h@0-10vp3xnNXUY!tX)MRc;nwq&ZplrCY*9V5)4S=;Ko&o{?$B71@<2 zkt*oTV9nvHF?3K`+(WVf11nbQ4oL?WGs#X;--~f4{?UN7E8AN+ku^2YxlZNWIozPV zzPrWAxSDLlpMY}E1TWR4S%eK{8LO&b>yyu@Hr5Qe7by$F;5rps=?caYqq8hZ>O=^c zwV_39+Y;Z~uN%jg2_!cS%bSH&-i$v#|KOkf=Z6nnkUHOme71F4hu?hY0V}@W3G2RN zz~Tel^Gx}IGY-xCejy*O@mxYu9WQ;SP$-|B&!<2C(&O`AXysZjx%%FFZ(Zx0=Gm(d zf&t11F^j_3Yfu8H1g@k>g$68DcnFrvP~eV$Wt&Af@fhCx=cnVMGxA6qWl*iN`|rKg5|nmnW)$eE3x{B9@bH#7>7vZEm3&TMcE>+oDfa5MI>0V zhgqn(t9yDpm9S`31yvF-j6*gu)rE~8ns7oxcuQ^sR7-$23q1C~2E;Q;%7-n_ z7nsOpa#Hv#%BoUrLko>5W)*NZ31AEH{yTVo;+m(@Fsu};@)-KUjriwlE`ITw&NP10 z$!Qn**ezS)jBif7{on(Zez}u#F9=}WwXXCRXC0QkzLOi-+p+iVhzBP}%O5KiX9xNY z9=P=U7P1VVyP@!wzyG(j--2gjw#)4R*~=K-!AM;$*;`mX^eg7O2&H z+i#GA!AVq(kWUqHbpQCKfB48bN4C-nq3vt8aNwrxJ70k~UcY8V|6?s2w#%*y0$2}k zj}ICh9{tFRP9A)BCpWXVWAEG$A2~HS{nl*G`R^ATk!!k<>y7K;?#utz{bM_-gKn7% zPK+sn4TJ7U;jdn}HOZAEc0pl(Ba$qkX;zRjia7K5KD_hwhahYDVAmvy{1flbTKA_D*!vwIwpR zslwgK;t5v6|G=Rot-BkZ(VyG!9*!kXoucnd1(|XyE}fTs-vbLPuJ2(f{eJt#%GxV) z3QC_T4)k1abkD}6#9>i^FD&2w^jD7_eQtmE{Jlz6VK{N+#2yCu&%LAhAFqGVcz)3F zLGt=wS>1q6h~>o5`7Vb=Xk~I@34u$8a9i|J5H2eUU9vE$1i0neX`CKcV9^Bee=mB@ zF-b%WNFYTw=x-_vVdldho@l_*l`SwuFXx=+Y@vJyME50DLdj$QUR0fYgl`EsqeC)F zcQ0V$uDA8G4r>=qE0H0<)$VDjV$Z^W|g83V*wQPkjDeDRWrQEv@=!OxDPwrcCL7-e|^{*b_V!b^nni z?8u+qyZFPHT!yfqgK;rS3cM!3f64s{Z+=o}l(vcnPj?`340;(=nZYYG<2QF44-W^Z z$6dMkUR<0gk8d?qC~=C1h=?gbm+iqVcdkN1X9jUMa7V^p*d*B?2XLhbDcwVqeDZ}( zF2o$7GLhVqPQsEAX&oalE?;etLTd#6QAH>jT?BtFAdvEv?1qa>+ku=sW#g)3gq3pH zJ-NdY1s>?;tE~iThs&yQPv*vyG*xtSjR}lT z;R$9=>5EVKcrIn~E!h{j?w7$ZdND8d{}J-eaS&01WX5pZl{;C`6E@|05uz|5 zhs1T`2p~yYMIP58S{m13QHUVQ0%u6poic~Ds*+VSw>R8(#t|j+ zd)u-8e(r(S*qOII`1*@+y&NIFEpXM3>2e*GnQ&O-m55rI(oHPBY7mISn_fyf;kWAX z|1O?}V`h4w!sLm{y}9^_7NyuY45~q3L?5EEh{dbA@aUrTFhB%^bje&j*!53)B`YUZ zUcIS*pU-?Aquf686Wvvw8&66E;|ab%Fb(c`IH$0?(xRmgmuDv`Ujbrv1_C(&6qn}l zVNliG7}#Arcr+wbOfnHU!QMi@(@u_JS6y8uI@{XO)zksdX`e-3DWuIv%Q)J9x;kXk zCJL6*Poe}u0=vZdtG?(;)k-#H1GKv!;!cx=7(8|xrj@WwOCFp%xi64`erO`i17%Bf z;}0i{D!w}v<)_SHQDa_NS$EkPN0k0C6%Ff;GH5F~>*^KjKib#HnY{?mr3?=R5uy%dCr^DKFwZX*+$+J+L zjTTr*kx|;p+R!~7#Ntsn9J58sFIpNAV3(Fgv?Zs6Y8mk9(CLq^Lm{ePS>~%Z5_6za zRFcdx;&DVHaTt`y9*GmbjZ2uv4^D`D-AOnpYLAvkPNu*Xb5AyDWlAYfv<>7jFG8<> z$$`nBc31{fAr6nR1Gipz+{HgQ!~b|MreVK6vZJYcwh@mXHMSzZM^L2CYg9@d*7B|4 z8K#Mbxs!Z@hCh1mj{o#I>yn3_c>ga!vltdKcI5%`lRjEu?d7l}j*TWolH{I6hmKZ^ zDQw0K*Up1aCJxj67WMf|b2y_Yy~7O9O$OWcA+CwI_jhaXVU-1&Wal{)oR~ccAx;up zw8A2Wfy7p(uPdx1$1yJ_Nvq(d((Jl83&LK(_+3Hz1k+_QuWrwM9mjBy?L-K&23aLc zo5LN@F2p^u2>fB&z&gYvyqyxIR6KmSfjlvjl?=1$Fc_l}>9I(a4?Sr4*n zdu`i!qNXn$nV%Y;kWz<5wtC_H9l!hP>=9QCGP~_+&LYO7+pqp`Lv@XRLX73YOepX2 z1Zbi=7US;ANko6N+OR2E+6{vHz zRP6y7;B8AB|0Ua#K$&t194?;0gJ%3_f{gK$Bif?Ilz=) zK~nCpUi`4>;j@k^T)6*JVBqh(XE8hT?t7NKXegzKsR~SbSxg6tro)VdA?is)H=TA7 zkcf_;NR*UfSQb<#Rtc@m%Ae@6T~ zxOq>uN{N^9^CsKpu2Q}XwvpuZ`aDad<{jx@+j8Iqb%4Iec%B1yNHPb+I!GaP8d5k+_;isT5c!?G#=jA&t!4l9vI$bje^@6W_Ahuth@M@5!9H4*}KwFj%_G)LVc%^1U4vEixT` zs7kAL!sYQ5BO&rFKvxBr(Qe3+hzwSZM{hjw@Ndi%)}+31eJ^oaZR@wkl;!=U?|H>ihgow?Tz1@Q+7ux8=C5}9$n04-g57ID9upi_vCfgAs8$BWe@ZVL2jcE2bS*s^`LiERUxEQ3DdZ%x*}Lv?>Ym z;xbG?rTiv}^-+$a4OuWCMH}$7xuc%_=M5EKKalPJyt+vo+0k0RHohYNpQ&tLzc{Q# z@9%s*Kd{5|1a}2_%E&f`zz~ zfrb!p^PR7u%P9auHb4C$hvmk@2!my~agANv^yw8=f6R@6DASFby7R!HU>@o3IHW;N z|3*pU#E2n5lxZGDAm|EMFa%vQQ7pIM&L5pL>HNcmO#{)^p4PFwq3vA8gvLWka+db= z*)O`kUmVt(A9s#!?+8A9-rTao2jkv4X^=%5SJ%luDhedgaXfKE$qHn%5vLuMi3Rh=flkV8rwgKExilms|HK2jG%NQYSLDjd z2}NKYe0P5*mlU%-Xb1u~-SrB(Suq%$5|Xn@I4p|wki&8dNQ!Bs9abt|h=DANsId9U znV%NB3f*o{pVE_D+9R2HU>R{Jeg&o#ge2PV+s(N2x;Kub@a`?7oc#bHsUkH1;Dand29XHOp`bbeVQ4&>+G`+e}{zdyg?Hb*N<71s zw=x|rN+1yV8|4@Z0xX%fWCSUY-J|;d6NgY)Cq`#=bECRubKon&-4sj*X zvo4#18#LdYUc*77T%d>2Zd~+I%+|TXy7{h`5p+smi>||RX@wIjEY9WSVzUwj97yw} z>+XXExbip;5s-_L-K6$C4>@RFICPL>LGekj%yvjl3nmq!{hl8jH}8w9Y#w%Cb2t19Vru zNwOFOOW?FG0tGwi$XxRPhAZi$h!JG4R($K+S(ts43Qdebv}5iOyP&!P2#F;>X$`t; zHtshnr7-f`K%R0UaJ62;*3ldSZoKP7blc^ykek3pxpXWpoM8_E&Mza3O0}WV<)zpH ztgu zM$plOh%fQz644XIvnUZ-G9eosIPR!2Ty$;;ys`m}uzR)vMuBt&0ftFrD{jn-9w(;F&#bVx!*b&l6t~czA!dtc zi2!#$_&TcFa$pMuiS8#z204f@K^WLq!0133HeJsbT7ZL+!(oV$htuR(w1VaVQ5P`m zk|!F_SvPX!I{UusPd@DYBZLiu)vRka?RYddC;M+BQozixuRivx!=eiR_S)8^-#T^d zoWW|~WV>Gam}Osk@8TUTvEo2ylq9t%zm1NQN$eq+219pK5Ihlkf&*0%5D!)3tP@A# zg0GDL4~3JO4ig}$iYrz?ZjO`u(bAt#?<8|a->3=d5{KB1Rai_$*Z=KE?|Se}R5s=y z4hoFXm3CNzeg5>ieUw+JXpoo;+a%l`m^wYj?Y&xM8lrc{)Ss9)rOqWBM(1gENPPr?(65zb)Ng+ zpFe&oj0(gBKOD+@=0X!h-sh&S@IWaooD!#WwqU@kM3Jqv(tC9?s%hOKV zLhh)D_ngBRh=7k$Qz*`rP$=fY$||SmB>{(fAAT1btNn2&LMS-`d+dc0gIo3K`Lf|7?g!T8~dq(Y80V)h|y>;2asby!c;&j-@dTXwb{ zq3fzPq97xsP=-DG(C-dw=~gytXI<^jEc^no;9Z|nN{?KOXY?Y;Vo4854AKR~0^ z6$uY;^ruVKN6Y|@OI1ul47!&=jy2<}Cr!lJr)NN61~ieR${s{S;GsW%h!59Gh_Ql%1Xd!Zk#<$CKthe}EW`rhrAtKS_Kwfec0ZP%VX zGv|T9jLAw`_t`3zb=^xKP%rwx6A(y%0BBHJ{lr0s4s3Rf(i(cM>m)`C|oE5^6 zDH29Uiisd>Lb;e69$o1bR=>O#16L>kMmdUI;Z(;g5{1NUD1HXX2|ZfedUfv zUwh_J_@p+BE$PP9-<^j%j|~MeFo#r{9hkBSn^bm)CDIn5TC_p~43cvp zrg4)TmX3%hpr!-3_1-lIo4GJ4*_k`61UqVCg+(a*AI@yk^zWq*dog(#{xp&U6ZLcT{MP&^(ukp^UY8u0bgr{c8Z zN})PYP9ei_S40a#q1OR}0K)p-(#cCHdc!Z#*-x`L=dOspE5SSO0~7_K<) z@bBFAO<~Y7_$J4C-M0Eiax!v$Sze%T?eFm)Fo#7Qd2VI%?dKd(aC3imDplqG?Gsk~ zlUv`bZ4C!Rre*=E4x_UbVrCXs61Ee8QqGD=LJ}lkVKky7CyHBsG8aDCfrl|p&_^=q zTy#{i5VxZ6dcBCpOvprAL*J0aDtloYMC+RYm=aI%p~YQX z47toCoOQEjBy_2=&p}@fcUHviu?ItHax8XnfF*#wv^wWvwS?m%(64*y%c*2E1@Tx6 z0!g1)5#(y!n1B4ZJ0ALfIX4Y<8*ka!ek=nn@(I$7<>i_Eo+>n;4(sKQ+oy_xFy{2B z+0PF4fk?LRw%6DR|NGl_-iiqMh!`>ijl?M^#z0b3bV|tK>#i3uB2ZKZex(B^99xL< z&Z>Yy#G!%#(IE*e1FA;`K_M&(-jfm*A>bh)24Xs;V@L_6lY#e^$ML5pmm{hbz-C^E z@NfnQ*>YgQge?=#d{Sf~?XV8me4-O$zD-Yni+6o_BoumtB=c+%m`=F$1P49;nUwzl zJh}tPtiv%Av){bqswrm=W(Y&l-kW!}-!7}lKgSdW1_aDHpbm>V`QUSFKK%YUGmbi7 z6RZ~%U;7W^%0K^e*(0V<2vf*JoXbT;0F4+(ZHN-lf4D3TswP4(JCNsz;0NEGjj`o~ zy$W$DDp?Q_w@g<8i&#d9+Lz<6L>R4Y=u!SJ919IAWG7r7lXmO2ab1 zh~bR@hUpbnFGe&qkMIPD1o1DO__|cadwqHAx@Y&R<}$2O61xal@xhA5ATtYIp%oKH z#&+EJ!()#*yF#c>O)LAaluzq}69wkX8(W(BaR1fv*^&p`VZFDVO>1v&I_vD2MYj)B zCsR?+MfWv5^2UUhK7K?H5JxL}7 z7$Wi`iFfK*&qNebVT*Jbt_9qbD>tE?zOVLNT)UrL|h5rVj&zoZd?yyK|>$fj`Ts3~mq0{CK za_X5qjYiOnKm2(8;uV{s$412*#B~jf@H2#bPXtbhBg>A_%1MS#ks%m$xa?a;VAf3H zZ*PT+7&K%;61^~Oo2Ts%O9MF~a#*xoG-Ead(F22Nc;VF={OgTo7;>qbV@8O3+Fnxr zq2mFONASF}^mRZiu;&VkFS_|EE18Kvt6x$(at~KHevil(ImLAX2+U8JO=##uu2}2* zbm19i{J#@~*A7T$dRXbE?QN$hnv^}JH0#M8KH8_-2i##%Yd>gX8C%v=KI1v*kkbZh z*7?4y`ut`VxbxwU-rG`VPwNPaKt_S91mjpxR1s!8#*v(==7AHpA=;f7QDR}?6-QuH zu?aKMh)gvbwjm)Lx8U;ycwCG4+@n?mswTrWh#b%h!&0!LS;YU`zYJYQF5vUIX*qV# z-FBE>NP@y8F4C}uKD`5ddN`?~<8Jd@sW^A_MNF~uMdj*A(!V?0S-DK2Tv;3z1iK44 zYAdcf|L_I3emx_l{O5h@*cUv^7?aj)u6@2BKlgj(dEUDIs%Ky29uS8`4SDO6_>aSp zVC1Va^L{gs9qwygi#D*LpZ$8t+KyOpVPgjYr~-VRDhh*TMz~CsEK4wBF{oYzcBlns zpE40=pOl4cZwNYxQWy*|Q-&|#McjzG@p_va7lUXEkR=^)n<2(@-1X2#d|VlaqvyDR zeR5cm0z@QeiWdf>mmnnyN_{oWLHInqKFJj>?G0@=8zMp0NLxWSZ#2or@6u(vdq)j2 zi@+~7VE&nt{`;{7`GZ=tWzWIgT-!O{c36CJMfURpJ?IC_VXa!ll&#^Kw=OuYV(vh9 zy07*9aSfaF^ShV6-(}`xbvr&Vf+kot6kXypw!7xABtzwzr#1K$=f{Nr03ZNKL_t)q zZy$vt4%H#s9q{QAEK7%B(e}^cu`ANfV>W?JWu7!mnEDv=;Dxu_@WR{M(P?G_3dw#& zA;>CNhY4BHT_k+kVfA%Ld-*(RA|@ObVdEGBsd6P>D$eDfNN{(-zb=e{{*dOWhE`no zmC3g~_MO~Ydf9*he{4%_$28Nir%$QK`}2TTo|I@j5EWCMZ*MXVkAy=Mf~^AUPp&L5|&A6LX^9qLyWn!%a)XOSiK#? zlmQeh>(e}^kdh54Srf0B4B4i`5Mttp!=Wq&g+c}lFjWo;Rcr?e>q5ZOj#G{s^}yd2 zR$QBsb_`G<@2=YX$fyzHZ>z{ZFsGOYY=uSkB{X@_iyJGych=<5Lm@GG?>X`NWo*pd zfB10ijuyw~Ncn&gfEDfnGBbc!oacs-xT*pJqS1y)<5gUL^%Ur$0fB@CIP)m2vPi~D zgha6on<(QABBX(!W+1|VXWyv9)35A=qg8Oa5riq;<>j$6ldG$ww}b=qzW!%5N#ajN z6xfQZ0YKY+DGo;>_SrC?djV7DzDtU9B2(?a$;X%f>hIT#{NVv>cP}d5))<^(#Ee;! zM&|y$7mugpkpt?ms0T~7vKd>eDzE>}DPt~3Nsk7&5OP}g{Q2DtRgHp1@B_&CKqLA9pXH8F)OME z=nO&eXTT0NqCo4$MPHry-}f)@51E)%a=6!SYreUtF#oZlfY5xProf<>>Thpm<4rRf zId!VH=0F*uFZDjUhE00-FDqXAw9YIuSlRzk@cH1FQJ%!AC>r8%6Hw`7s13RPHk@|c zXv{xj1fpFL=swahbU0I~8H;mCF=8q;Oc&cj;Lxb4;fZHz@cNRSaMWT5asXy5$^-m@ zly+Es8N0qcOT>&eL`ZNf#SIA%BZri5TC`#|BwupA*Wva^Ac>Mf?VSV0v5QqAFJO%SYC{x^9y> zP1cGKiA5o4I=L)ZT!fWkRdMK22mv*Sh2K6N)29F&4{C=IQ0WNVM$CvpR8%lgfkofe z{1`kFwT;B!@CpoZ6ruzyrG#sr4<=#3($@j808c_^HY7-#-kOgw#jR*{MHj7@EeVPp zMy}rFTzK}3>+inMKjfU%=9X}Y6*az@r)E7?bb$34293j7{3$zZXYGzhzkR~USqEhH zrJ&M9TUq{Jp4j=&^0l?+g)AL{o`pE=_;j5M3=p4|84M!R0c;SFGwht9=s*N1oSn63hPdM<4i9zGA$ez!x zYQMpD?ACLS%ul%hV;{z=M~^bbgiC(Y^2?W&?YyQVtN^OUPxTNXPIno)pn*9iL^+Dm zLJL=3awsO015(U@$V>=|fT(4_QB}T0^eGxF!v>T6=!^=u^TBnfZWdutdXPo=g$b_r z9Nm+dy|!1;gP)8rf_Au>w}{GgUVqot6`q1s=B2k3QCWkGeICo_mI85^;q=gmzHe4EodpQVm4Gg6IPV; z3{|MAintkpN9)A6@(8|j!7(Vyrkz9(vMwS9~4_0jg9(jBz zLh%4ZUoITe-4gCTDCCJduHE>Ly;ubHjL;8@)5Rp$R1a61UvyHjFT3!>^dv1>5_?5y zRkFLHqAUb*SSB5wSSZjPlxA3;+<4X0Q@=4ys2z$Ed23swEYub{Y5KT=1A@8Qm(2#1 z!=eVhwS|?|*3>-hIk9}sP)gg`mvOKDt8vBiZ?5}Ib5PU_Apjqz)gx-h+y=BC3&2N;@iaA?6T{-euSJz(rFP-)!v<= z{o8I5e7-WGdv>8b_i*|g?3sj$(CUf!s|agFf9L@TlBjY54cCVBN8n#9Dhq=JYjE7* zWgq_6r4!FOW313J6wO;z#q^eVPQCD}wAyFWTDb5XdS-lzC9q0Nnn-y9k=OFdP+vq=6Yx2oNC4l>DQ@>IwpRxqw3g zCd4IAFDq#(y6g@-QP-su7{XW5VUfd-6FGVbXZ4wxouo=gbTzq^+uLE0^CPF8;J4xq zjq+^bao4f=d{Bt5EM@{01=vX#gmFhEIwrhgnDelobIPnDyMciOzMLvuO3=O91+G0T4PU)%n)Y|W0d4LP4{ z9>yuM&J>;xD6tSER*%_76yyAJM?n!xt|+b1X%kV&5Z#|>z_3NgS|LIaVD%aczyI62 zuzh0?jmwZo6N5Rh3lMPs{DNB9=e&3EGr6A3Pqp&q#BCw060&~EDCUEASZ z{od0*yL2yn>&z(^+|`_+cA5BpZ0@#Rfe6P7MTqIc&pllcQrXh`P>E^QJ+dMmkMQI4eSB zb5U3Vs?s*c-4+dH;AxWL#qEPTooPGgDhj!xw=OYa@^ZJ0ySIdU@~tQQ7ZbU%5tT++ zH8$=2D8qoLd}INl23f% zaVnbF;w3*zt|E6kEWY2{qu0CRVfjaVtCDQF>A%jO zb7mUGU$6dSoXGUHE)h6Ex+)D34>dz zyQh=({@2-we_6ERA59@CKPu|z4#t42Jis!6P#4N_C7g5oSe!i9$Fn_{6@rgJCnq*UgYVq4 z5aP-02RSURh@H@G+;bb4?BQo)c08({umL%3H|S2}doE{{2t|`&bhbnAd!aZ{uy7;t z{c)T)Yt$3xo;BvmvxhkRCTXJ$wV}ge;qb*ipXbhrWf^Tfoy!At|DbnRWC+4%eYj%7 z=3_@p88dTcg3@uIEY#2SP(c2b#a-9G`u>_5LV_1@Vxlw^2&x})tQ%Q!CuSd3gbTiY z2r_ga7G+R02bM@=fdbbnv?T?bDuG`=_BJ}>84%=L#A#(Exp|6vUb|jcqGqNmA$i}= z-Rz&@KjW|nra`f*MDU!pgv5BU3-UnQyo323d9Z0f^CE zi^C4dtH1CohkfhXQ-!ztr&9x8>iX)|i4K#F%*n}mu{=}gcd4$v^l(TxEb8FP>tknb zuiSpm_s^aON`)G69#JHUSk3S#LXdF5%AnZ1b8>!=47feN2 z0pS!4Wa%obcm$02v7mD_{hIZ_AO5-m9T6Wa&5szpyd>UATTO?kYv^~23%4>3Z=e@* z$V`F%G;dNDFtNgNVH)JH+*45Q&m2;w(RkpTq}g~14cUcjtdT?)otlyq5j`tGmStFU z5=gYyn#@lCBON}$;B{)oSU*IHT z=9e{Y(0-l-iSBk*?#j{K(s^`0qMhBGR$A0Oa16p!5mZ40i$`6DMf5OGI81@Tgeo%# zM8HQzjW-VLr z&FNQNG+kIbv~9HS)-T)G_*}rFKcU;1&yO7|#D_v3hm^x2gObzQxwZ2BNn;N=eeMuV zd)VKJv1kp;fBCJ*)vqqwbW^vaAM&_$eevv^B0nQ9Y0Z6R~h71dne>- z!=x>uD03Kv&4##4SQ4+m5v2QYSad2Hr+|C{JZs0Hdj!7i3x&EN${u(m8>-oc$>sh{ zUpw`%UtDvh`ov%aP4;x&eBS+Qt0P~t9lO*M@INytUuZ~GxdTzokaSpyl~#U7W6PaN zL;m;X&rcF-4}{4MTpf!lS>|j14*vMn_tszE7MEngTZv{E3OaGrtTLQ))-)bR&dmgZ z-6k|e;i0Ry-i~6)vTD?K7>Lv1zesw9UM_sW(@ciymBeA~%Kb|?EPgadrxmnalp#@s zil3;)!K^TkPf3LMccPF56Niqkc>nY{R7uYI5`rD{aV-cTW5?4 z3|R*~{rsgXKKWr?Q)i|i$?$k( z9Ch>*Oh2*+Wo0f5izu?lJFQ&aj5pp|kESj^B)t%(KgFsnSMb4MkR+WGDDX9vOd_;&_7DS@3BN}K;+^oRQA`+}^VZ47PW|)P zkytc;3hf6EuIv2`jqhY-=Uh9vRQ+fu_k375EE<5PRzxrDZ122idS21#!wuNilL@wn zF=@xEICj;B){9rIU3Xb9)=4NWj2>NpxpQW89X2UHlk^acFhI0U&%EA_wVP|u-VubT z=r9Sp#bgMIOEySOgF6w5cag6k>hgpGqIQu8hLGfz=ZmSt>L;-xa_l5*f?x(NIFs%t z2SzI*W)Uum0IjqPQaK4Be`Xhn0*U!06_$xQfd~m5h_V2gxT&JV9Y>41#UIv59Ir zhU|cfqFm1hbB~(x&q?F(!WBmeRXwreaJqMGP3*UhX+AzkHq>6Ub4OENK|ui~Oc;l0(+fMZ@{y$k zfSLwi#p+tDS-TA_?Qt+M6HE-iu!)+62mzC*c-p=tRyz&}#X1la?=Dn1Up3K6h&&tr zI|EeOa*~g6rQBUWXCiDSkfa%*(1|*lXm4DUI$tJHmE@@SbM!Nj_eai=%HVmx$&hC{iG54Qzx%3=Qdh6>K=%WOTY92RwU z(aNrGRaS1h>8vR;4xMrkF8#k~!2kL_E2yr*nIC+#`p50T#&L=V7(J#G<0nm6UsA45 zCBDCno6-E`F7Wbo(L$4;)<*?8yZ zaU*6A7apxtPo#%du%d8h*P%PM?7XV3vHh#W1z%D&vU}8+@}~khXsl_%VQW5aIpNdF zT2EKVL^w)YGcOdCjvr-2;ucsu4$1r<7b_DSN2;@EpFx%8T}x{O=-9 z72+{2hqiUZx?n*O7*xrIWrd)Kaj233O|y`jt+iBC+xUy-%RIYquR(<`msPuoAOViUT9hHTklJ`-d;6fzHKZR;G<*&V5C^H948S zmi)YoExFnFxGad4Xc?sRxb6SAt*VbybT@ZCR+yi6!=w`BV?h{xfa+oEu#&_4_EO>!L9TJ_U%M{9j}~y_M^` z&66dps0@DHvg{+W1DQ|TI$EaW2s=|(`rs*P*gGsT^)suC8PzpApBy=Qprh7^AU?B1AjI9UyABG6MWgo8C+wuHpH}}q zzaamKyiDJHr%fK*%!p+njNl$Q1C>iTCgG6UN+ zU4P1#scZ`e&^=QS%;iyprBx*s(9Yiy~tuwteIRcw12QYbh7I4t({RUv77Z%#r>|bUZFNym(XFFL z_+L0Cor5@B=lYQD*NV#U5y4RSl)RGsr%e+z+lvvOz72Ah4}nl9bOCdm*%HIjxf3%V z9MT>AlExqO4vXwVJn@aSZ7;>55l>!4`Drm5?2I+fx8bKLj3H#8HBXMN?7gLk64yrM$T($X}x07 z=!(-T!|49rFcWR<%>+q5bKoqntddP_ZEO5lVP5WSKEG0_bwO7*nu;)S8l&y{KU~0O)tjI4OJ)zno&7poTsI;6-(L&7wKH}|!_J+*FU-ok zt0=!w$A9_`bh&4CVXM4(oG9 zkEFHYJAx;N!jbs}#f6W3<4|G9vo#Loi8|cdzKSuWeP{H@NVxO#oZO5>n#WhCM?#X; z>5obh3}Sz7k}%-kX$^NAs)|ZU*;>Ao}7p#6t z_jtlHvkNahnCX`fmX~6n`?;i=6<60(|0FBV|H;T=Wl=a{6&+oYw~4lfNkYPxx3iPm zgQ5E@<{V0V_5oc%QI7wlxkbWT1Kr*OSzp>=?WgshT4SDAUt9Z&{Jh+|l&io74pF2xE#5FEysNI#QBF4|&zzddUH+Hf2}c5L`0pv32W zEPJf-eM-gYWz|FSv9!bL!La@7LzdeSZu*{S+tW(3OD;WY=J3pj?}16uxAxKkq%CO~ zZ4WD=-BMIo@COO8*z~>f|6XZh8TEDXj~cooqXmx_eqDnc38L7mJU$^q{{f}q^s@RP z`&imx^8sYQ{@0kX;}>2wC-7u1i%LIskXYb_k6IT*!{PJFM-*NSXmylTL5a+i z5=Ke+^!uwdEN6RT=*8OZ&@qZX6Aomkq69B&_yyKEF;AT|ccjp8keKX0x|w!ZeHeM# zB5DEnYwFs67B>tOhl{RA$F}-l&|&)M81D5~R31|S?-8N9}^E4Of;&)CRC%iOF&9qVx}u!C7w3+{t4{$!SuIqGPrcU2d|e`h$~xng>W=Q~o4Ui0{keds!Ll5# zER^L};gJ9(9)@mqV{ESPp^}1(AD>#3)*+x7k#<=9_+C7}nFTsKjT1UM+PjZUx9lS7+!SOYZ47p-BFwpUlb==TL) z{{E?XKNz5D)7A8o1?Y4y?^8^vZD@SN2uHxUOzXYh3Wh1P$0Ul-{OJW$49eCo1saSBZ0YkrR zxY7>m%Pf?l&;L}#bYBC62#cM+y|VJI?2POuuRW(U?YL58-+?S(dktH%r zEXn`f!pZnvGA(By`!?9?68^C~I`V>}ho{2EU^YJSZ#6czJQ9k;56RCj{Ot%+`Ad4c zI5-);$Pi3SEiZVeG*9`_S;JIUaDVKzuX{FZU}{urxnFpAap&blIhikfeVWkR z+rrY%_09r+UB`TqfyvD+?LUmgBiWgO%-}>RaDLix_0FFC@znF{LxrKP$UR<<=Ve9H zM~RyJ$QfgWZaN9v-$eLpUZ`hvj)1r?fH*`5f`ovm!U%=n(`B$&5IKs2siO+-%kkls z`DvQ&&kQAfVXUfbe_D~`r2-lL)v7JOE-Np4cEK$FK|@o~ACtQ;Wxn(=%kGF7CpR}W zep^2}lRv?Pd8XB5^+0xc>R8CIzdmdeXyEHH7*+>Y& z*N-|lh}s5X6p}fh^b?bz$6L*DAAh{z4WG{!8#Q|Lw*tDnE$AS6#@N1T{5`lbcK?Q! z_8&HxCiLuV7={5cZXo~x1)3Z@Y(mb=^UH)!2cpkIt&WeXI*ykmWu_vD%Oz3j*cEiA zBlvBbw%>hR`RJ?ncd0wnIzN=!``cAaDUje3WoR{Z^)ClQkwA8K_J+LT+@ClB)Sg#{ z*i##!%%36{L#bZ}x~+?=Skb&uLL==g+Om)x>53lT&{%h4+%P?PdAT2qC@=Vl9YXNk z$%v$CWlJZD)obEUZf|Y^`cabMUp=|NbMDth3ByvQxvyiry0YyW%eId3 z`T`FKC7xAN1i|>sDdB6|Soxc8FaP(vc}E|f%yH^#rRnGUZUM?OlUp#(Vi;Y$v*tzt z0u)u%3i1kWu|#aUd^Jwdb>-Yl~gzvfv)!MpE}Gwk_q-kU%=a%ljHr3CZmhC zf)t-hWu3{g7BePqtHRMO9pQzRVHH>8XWo>j;FB{Gr;Ghj=TI(l<@UCd%~<@#ob2pd zClq@YC;Rdru!g7C7&Et5*ZuHEUmtbhQ10+>ZTq4PO!Wrf>*@l!8oPfLH^n1k@mRGa zO4(z^XP;?JKqo#z&iomD+r|u4`%%gZov@K}YB?UPcqcO2I;|;P9bH$}*4AEFm|w6s zFE8iCQKi~r&lVz@s#Vlbwbx*5_XnF>=E;h@P!_~{4;!7iV$U}Ihr?R5hUEt%;Zy2r zYri>R%J_5BTaLjvq`TX+Xc^1V0#ITmrdC(g|Bq?L3PeS2&(F?T?eVG)s(y9b!7zs8 z$jdSzb#|g85{n!cH;l=q87~V4Lo+i18EZzC=ieG*Xv*~>)KCwk$rlIbyk6JHeD&Ku z-Bwy!cIKfYe5?0v<>vxN|MEd}-lol4@1K3j)H!KP$lx5@FKX34R-4DRwl*)cZIs)# z<9Iz8l^I^&(hOC4KCU8T!&FpHs)a^}EQ6VVkeE5ixln3L$bkRMacJL@KKS{KY)n^M z+jULNEvL#7rxg}-U9YMbIbxwMA{-YH^vDqBjS@PC-Y6biEhcK{cRt$rP+n2ducwx1 zoA%ZFeLAc~isT(Ny4XG0{PS zvim}Cr>ZyPjOL9JV$W8wEV&H^WMoS|1pR>48HY#K&?(u-ZER`#Ze4BtWs)StGc&WE zvutyw$Kz=%$St^$$;eUxL-Sxel|X4SscK)koPn{xv+r*BVB&=F*G(u?-v7KBzc4k2 z2*&E0<0ozUWaC}OPoHsAdKo=11J|Fm{Cz9SD6K`LB_Cowpti(uWXv#*?(AqkDH4g6 zKolHVR&+rStD@2HVP&Ni&-r}nKVxx3eHr)!-N463qGQ%4$cjraGc=gwY>1&E8L)nB zGaF?H2nqr+lqj02O&EKtXp=)ta)!}7>kc|M#zJAQY1^l@b#zw9g4k|3j#6Gy@_IvE z{gDjF7(Z#`IjV~8s08keGz(F)3F_H%gkXQ#EM0d0T7V8DoHaYYUtC=A?4%N5#~0Q1 zrMZNJ#*TDGj+5cc$WZ+E&p$?p_HXmkr5^|jyts~}dri$f6=mg*jxGFhaPF5ntfXNdeY<_3APVF2v$KAY zPUAWFrrQ&%SY7~#NSUy0AEanIL`g)zqaYj_gRpNS@`G_B+gGgapqS^P4a^S%9?5~M z24G4t=m@VMO~ubQ$S zn8N*bSY+Biycd~MSG)6nob$(@oIVadxF_4*z3Qt|D!LVPt_fR@Xk1m6QFtU-2%T zSk|-L*d9A9f;^Cv)uV5Bf48=#_Nqh2PMnnv#1Es{lypQvM*1pXN8eqbi}8dw>@^^;ZtXyIxS6--%sP5E;em}A#Z`oc9yw$OZBtG z#f3M`80lT!+Ya^GVI?ahzUoy=*1mDX;fI|o#!;PKZS{62($A$WFgz{rL1ko4cSqZT zkt56QSAwV-HA;x}wx@k|SXA-jAF;99x9|8}abDpqmmlX@(%Z(QpG#XHZGqusf#qAn zXLWUVUY=K+{a-UnygU2ayOcOAIz2=Nq)#_jJy%v-^1TJK(;9qz9hmgtbn+i}pUl2$by-!z499FUgi#D*L^{Y2688K?ak1n64*_My1{cr@h>i#x3k*{HVG zrmMrD$b=a)M$J#>fTeQC(q*MBFgPu+ww8@+ZELw&6ojVo(!2-8=BA##wOkLYBpxw$J?GOA30*dmGjh9`_uXKJ!uQ1Eig1Ju%VgxJ3FFZF%9#Y48P}E zF-zGzJ-h$n*8T3V$n2zYyn6kPbve0#zaDZ(&OJ#Uq@gh+=~kvKkhZ|SEU;`_Y>a8! z*NCD}BTAlsmI!F-3&v`HtlaMoE1AXr`}f&bsw%5)3uJmeC_J?2hnMyNJhnd_OW%{W zK-vQRXMtrku`vxh8 None: - """ - Validate provider credentials - if validate failed, raise exception - - :param credentials: provider credentials, credentials form defined in `provider_credential_schema`. - """ - try: - model_instance = self.get_model_instance(ModelType.LLM) - - # Use `hunyuan-standard` model for validate, - model_instance.validate_credentials(model="hunyuan-standard", credentials=credentials) - except CredentialsValidateFailedError as ex: - raise ex - except Exception as ex: - logger.exception(f"{self.get_provider_schema().provider} credentials validate failed") - raise ex diff --git a/api/core/model_runtime/model_providers/hunyuan/hunyuan.yaml b/api/core/model_runtime/model_providers/hunyuan/hunyuan.yaml deleted file mode 100644 index 812b51ddcd176a..00000000000000 --- a/api/core/model_runtime/model_providers/hunyuan/hunyuan.yaml +++ /dev/null @@ -1,41 +0,0 @@ -provider: hunyuan -label: - zh_Hans: 腾讯混元 - en_US: Hunyuan -description: - en_US: Models provided by Tencent Hunyuan, such as hunyuan-standard, hunyuan-standard-256k, hunyuan-pro and hunyuan-lite. - zh_Hans: 腾讯混元提供的模型,例如 hunyuan-standard、 hunyuan-standard-256k, hunyuan-pro 和 hunyuan-lite。 -icon_small: - en_US: icon_s_en.png -icon_large: - en_US: icon_l_en.png -background: "#F6F7F7" -help: - title: - en_US: Get your API Key from Tencent Hunyuan - zh_Hans: 从腾讯混元获取 API Key - url: - en_US: https://console.cloud.tencent.com/cam/capi -supported_model_types: - - llm - - text-embedding -configurate_methods: - - predefined-model -provider_credential_schema: - credential_form_schemas: - - variable: secret_id - label: - en_US: Secret ID - type: secret-input - required: true - placeholder: - zh_Hans: 在此输入您的 Secret ID - en_US: Enter your Secret ID - - variable: secret_key - label: - en_US: Secret Key - type: secret-input - required: true - placeholder: - zh_Hans: 在此输入您的 Secret Key - en_US: Enter your Secret Key diff --git a/api/core/model_runtime/model_providers/hunyuan/llm/__init__.py b/api/core/model_runtime/model_providers/hunyuan/llm/__init__.py deleted file mode 100644 index e69de29bb2d1d6..00000000000000 diff --git a/api/core/model_runtime/model_providers/hunyuan/llm/_position.yaml b/api/core/model_runtime/model_providers/hunyuan/llm/_position.yaml deleted file mode 100644 index f494984443cb42..00000000000000 --- a/api/core/model_runtime/model_providers/hunyuan/llm/_position.yaml +++ /dev/null @@ -1,6 +0,0 @@ -- hunyuan-lite -- hunyuan-standard -- hunyuan-standard-256k -- hunyuan-pro -- hunyuan-turbo -- hunyuan-vision diff --git a/api/core/model_runtime/model_providers/hunyuan/llm/hunyuan-lite.yaml b/api/core/model_runtime/model_providers/hunyuan/llm/hunyuan-lite.yaml deleted file mode 100644 index 4f5a5dfb4858ae..00000000000000 --- a/api/core/model_runtime/model_providers/hunyuan/llm/hunyuan-lite.yaml +++ /dev/null @@ -1,28 +0,0 @@ -model: hunyuan-lite -label: - zh_Hans: hunyuan-lite - en_US: hunyuan-lite -model_type: llm -features: - - agent-thought - - tool-call - - multi-tool-call - - stream-tool-call -model_properties: - mode: chat - context_size: 256000 -parameter_rules: - - name: temperature - use_template: temperature - - name: top_p - use_template: top_p - - name: max_tokens - use_template: max_tokens - default: 1024 - min: 1 - max: 256000 -pricing: - input: '0.00' - output: '0.00' - unit: '0.001' - currency: RMB diff --git a/api/core/model_runtime/model_providers/hunyuan/llm/hunyuan-pro.yaml b/api/core/model_runtime/model_providers/hunyuan/llm/hunyuan-pro.yaml deleted file mode 100644 index b173ffbe778563..00000000000000 --- a/api/core/model_runtime/model_providers/hunyuan/llm/hunyuan-pro.yaml +++ /dev/null @@ -1,38 +0,0 @@ -model: hunyuan-pro -label: - zh_Hans: hunyuan-pro - en_US: hunyuan-pro -model_type: llm -features: - - agent-thought - - tool-call - - multi-tool-call - - stream-tool-call -model_properties: - mode: chat - context_size: 32000 -parameter_rules: - - name: temperature - use_template: temperature - - name: top_p - use_template: top_p - - name: max_tokens - use_template: max_tokens - default: 1024 - min: 1 - max: 32000 - - name: enable_enhance - label: - zh_Hans: 功能增强 - en_US: Enable Enhancement - type: boolean - help: - zh_Hans: 功能增强(如搜索)开关,关闭时将直接由主模型生成回复内容,可以降低响应时延(对于流式输出时的首字时延尤为明显)。但在少数场景里,回复效果可能会下降。 - en_US: Allow the model to perform external search to enhance the generation results. - required: false - default: true -pricing: - input: '0.03' - output: '0.10' - unit: '0.001' - currency: RMB diff --git a/api/core/model_runtime/model_providers/hunyuan/llm/hunyuan-standard-256k.yaml b/api/core/model_runtime/model_providers/hunyuan/llm/hunyuan-standard-256k.yaml deleted file mode 100644 index 1f94a8623b494c..00000000000000 --- a/api/core/model_runtime/model_providers/hunyuan/llm/hunyuan-standard-256k.yaml +++ /dev/null @@ -1,38 +0,0 @@ -model: hunyuan-standard-256k -label: - zh_Hans: hunyuan-standard-256k - en_US: hunyuan-standard-256k -model_type: llm -features: - - agent-thought - - tool-call - - multi-tool-call - - stream-tool-call -model_properties: - mode: chat - context_size: 256000 -parameter_rules: - - name: temperature - use_template: temperature - - name: top_p - use_template: top_p - - name: max_tokens - use_template: max_tokens - default: 1024 - min: 1 - max: 256000 - - name: enable_enhance - label: - zh_Hans: 功能增强 - en_US: Enable Enhancement - type: boolean - help: - zh_Hans: 功能增强(如搜索)开关,关闭时将直接由主模型生成回复内容,可以降低响应时延(对于流式输出时的首字时延尤为明显)。但在少数场景里,回复效果可能会下降。 - en_US: Allow the model to perform external search to enhance the generation results. - required: false - default: true -pricing: - input: '0.015' - output: '0.06' - unit: '0.001' - currency: RMB diff --git a/api/core/model_runtime/model_providers/hunyuan/llm/hunyuan-standard.yaml b/api/core/model_runtime/model_providers/hunyuan/llm/hunyuan-standard.yaml deleted file mode 100644 index 1db25930fc7b1c..00000000000000 --- a/api/core/model_runtime/model_providers/hunyuan/llm/hunyuan-standard.yaml +++ /dev/null @@ -1,38 +0,0 @@ -model: hunyuan-standard -label: - zh_Hans: hunyuan-standard - en_US: hunyuan-standard -model_type: llm -features: - - agent-thought - - tool-call - - multi-tool-call - - stream-tool-call -model_properties: - mode: chat - context_size: 32000 -parameter_rules: - - name: temperature - use_template: temperature - - name: top_p - use_template: top_p - - name: max_tokens - use_template: max_tokens - default: 1024 - min: 1 - max: 32000 - - name: enable_enhance - label: - zh_Hans: 功能增强 - en_US: Enable Enhancement - type: boolean - help: - zh_Hans: 功能增强(如搜索)开关,关闭时将直接由主模型生成回复内容,可以降低响应时延(对于流式输出时的首字时延尤为明显)。但在少数场景里,回复效果可能会下降。 - en_US: Allow the model to perform external search to enhance the generation results. - required: false - default: true -pricing: - input: '0.0045' - output: '0.0005' - unit: '0.001' - currency: RMB diff --git a/api/core/model_runtime/model_providers/hunyuan/llm/hunyuan-turbo.yaml b/api/core/model_runtime/model_providers/hunyuan/llm/hunyuan-turbo.yaml deleted file mode 100644 index 4837fed4bae563..00000000000000 --- a/api/core/model_runtime/model_providers/hunyuan/llm/hunyuan-turbo.yaml +++ /dev/null @@ -1,38 +0,0 @@ -model: hunyuan-turbo -label: - zh_Hans: hunyuan-turbo - en_US: hunyuan-turbo -model_type: llm -features: - - agent-thought - - tool-call - - multi-tool-call - - stream-tool-call -model_properties: - mode: chat - context_size: 32000 -parameter_rules: - - name: temperature - use_template: temperature - - name: top_p - use_template: top_p - - name: max_tokens - use_template: max_tokens - default: 1024 - min: 1 - max: 32000 - - name: enable_enhance - label: - zh_Hans: 功能增强 - en_US: Enable Enhancement - type: boolean - help: - zh_Hans: 功能增强(如搜索)开关,关闭时将直接由主模型生成回复内容,可以降低响应时延(对于流式输出时的首字时延尤为明显)。但在少数场景里,回复效果可能会下降。 - en_US: Allow the model to perform external search to enhance the generation results. - required: false - default: true -pricing: - input: '0.015' - output: '0.05' - unit: '0.001' - currency: RMB diff --git a/api/core/model_runtime/model_providers/hunyuan/llm/hunyuan-vision.yaml b/api/core/model_runtime/model_providers/hunyuan/llm/hunyuan-vision.yaml deleted file mode 100644 index 9edc7f4710f9a5..00000000000000 --- a/api/core/model_runtime/model_providers/hunyuan/llm/hunyuan-vision.yaml +++ /dev/null @@ -1,39 +0,0 @@ -model: hunyuan-vision -label: - zh_Hans: hunyuan-vision - en_US: hunyuan-vision -model_type: llm -features: - - agent-thought - - tool-call - - multi-tool-call - - stream-tool-call - - vision -model_properties: - mode: chat - context_size: 8000 -parameter_rules: - - name: temperature - use_template: temperature - - name: top_p - use_template: top_p - - name: max_tokens - use_template: max_tokens - default: 1024 - min: 1 - max: 8000 - - name: enable_enhance - label: - zh_Hans: 功能增强 - en_US: Enable Enhancement - type: boolean - help: - zh_Hans: 功能增强(如搜索)开关,关闭时将直接由主模型生成回复内容,可以降低响应时延(对于流式输出时的首字时延尤为明显)。但在少数场景里,回复效果可能会下降。 - en_US: Allow the model to perform external search to enhance the generation results. - required: false - default: true -pricing: - input: '0.018' - output: '0.018' - unit: '0.001' - currency: RMB diff --git a/api/core/model_runtime/model_providers/hunyuan/llm/llm.py b/api/core/model_runtime/model_providers/hunyuan/llm/llm.py deleted file mode 100644 index 2014de8516bc11..00000000000000 --- a/api/core/model_runtime/model_providers/hunyuan/llm/llm.py +++ /dev/null @@ -1,348 +0,0 @@ -import json -import logging -from collections.abc import Generator -from typing import cast - -from tencentcloud.common import credential -from tencentcloud.common.exception import TencentCloudSDKException -from tencentcloud.common.profile.client_profile import ClientProfile -from tencentcloud.common.profile.http_profile import HttpProfile -from tencentcloud.hunyuan.v20230901 import hunyuan_client, models - -from core.model_runtime.entities.llm_entities import LLMResult, LLMResultChunk, LLMResultChunkDelta -from core.model_runtime.entities.message_entities import ( - AssistantPromptMessage, - ImagePromptMessageContent, - PromptMessage, - PromptMessageContentType, - PromptMessageTool, - SystemPromptMessage, - TextPromptMessageContent, - ToolPromptMessage, - UserPromptMessage, -) -from core.model_runtime.errors.invoke import InvokeError -from core.model_runtime.errors.validate import CredentialsValidateFailedError -from core.model_runtime.model_providers.__base.large_language_model import LargeLanguageModel - -logger = logging.getLogger(__name__) - - -class HunyuanLargeLanguageModel(LargeLanguageModel): - def _invoke( - self, - model: str, - credentials: dict, - prompt_messages: list[PromptMessage], - model_parameters: dict, - tools: list[PromptMessageTool] | None = None, - stop: list[str] | None = None, - stream: bool = True, - user: str | None = None, - ) -> LLMResult | Generator: - client = self._setup_hunyuan_client(credentials) - request = models.ChatCompletionsRequest() - messages_dict = self._convert_prompt_messages_to_dicts(prompt_messages) - - custom_parameters = { - "Temperature": model_parameters.get("temperature", 0.0), - "TopP": model_parameters.get("top_p", 1.0), - "EnableEnhancement": model_parameters.get("enable_enhance", True), - } - - params = { - "Model": model, - "Messages": messages_dict, - "Stream": stream, - **custom_parameters, - } - # add Tools and ToolChoice - if tools and len(tools) > 0: - params["ToolChoice"] = "auto" - params["Tools"] = [ - { - "Type": "function", - "Function": { - "Name": tool.name, - "Description": tool.description, - "Parameters": json.dumps(tool.parameters), - }, - } - for tool in tools - ] - - request.from_json_string(json.dumps(params)) - response = client.ChatCompletions(request) - - if stream: - return self._handle_stream_chat_response(model, credentials, prompt_messages, response) - - return self._handle_chat_response(credentials, model, prompt_messages, response) - - def validate_credentials(self, model: str, credentials: dict) -> None: - """ - Validate credentials - """ - try: - client = self._setup_hunyuan_client(credentials) - - req = models.ChatCompletionsRequest() - params = { - "Model": model, - "Messages": [{"Role": "user", "Content": "hello"}], - "TopP": 1, - "Temperature": 0, - "Stream": False, - } - req.from_json_string(json.dumps(params)) - client.ChatCompletions(req) - except Exception as e: - raise CredentialsValidateFailedError(f"Credentials validation failed: {e}") - - def _setup_hunyuan_client(self, credentials): - secret_id = credentials["secret_id"] - secret_key = credentials["secret_key"] - cred = credential.Credential(secret_id, secret_key) - httpProfile = HttpProfile() - httpProfile.endpoint = "hunyuan.tencentcloudapi.com" - clientProfile = ClientProfile() - clientProfile.httpProfile = httpProfile - client = hunyuan_client.HunyuanClient(cred, "", clientProfile) - return client - - def _convert_prompt_messages_to_dicts(self, prompt_messages: list[PromptMessage]) -> list[dict]: - """Convert a list of PromptMessage objects to a list of dictionaries with 'Role' and 'Content' keys.""" - dict_list = [] - for message in prompt_messages: - if isinstance(message, AssistantPromptMessage): - tool_calls = message.tool_calls - if tool_calls and len(tool_calls) > 0: - dict_tool_calls = [ - { - "Id": tool_call.id, - "Type": tool_call.type, - "Function": { - "Name": tool_call.function.name, - "Arguments": tool_call.function.arguments - if (tool_call.function.arguments == "") - else "{}", - }, - } - for tool_call in tool_calls - ] - - dict_list.append( - { - "Role": message.role.value, - # fix set content = "" while tool_call request - # fix [hunyuan] None, [TencentCloudSDKException] code:InvalidParameter - # message:Messages Content and Contents not allowed empty at the same time. - "Content": " ", # message.content if (message.content is not None) else "", - "ToolCalls": dict_tool_calls, - } - ) - else: - dict_list.append({"Role": message.role.value, "Content": message.content}) - elif isinstance(message, ToolPromptMessage): - tool_execute_result = {"result": message.content} - content = json.dumps(tool_execute_result, ensure_ascii=False) - dict_list.append({"Role": message.role.value, "Content": content, "ToolCallId": message.tool_call_id}) - elif isinstance(message, UserPromptMessage): - message = cast(UserPromptMessage, message) - if isinstance(message.content, str): - dict_list.append({"Role": message.role.value, "Content": message.content}) - else: - sub_messages = [] - for message_content in message.content: - if message_content.type == PromptMessageContentType.TEXT: - message_content = cast(TextPromptMessageContent, message_content) - sub_message_dict = {"Type": "text", "Text": message_content.data} - sub_messages.append(sub_message_dict) - elif message_content.type == PromptMessageContentType.IMAGE: - message_content = cast(ImagePromptMessageContent, message_content) - sub_message_dict = { - "Type": "image_url", - "ImageUrl": {"Url": message_content.data}, - } - sub_messages.append(sub_message_dict) - dict_list.append({"Role": message.role.value, "Contents": sub_messages}) - else: - dict_list.append({"Role": message.role.value, "Content": message.content}) - return dict_list - - def _handle_stream_chat_response(self, model, credentials, prompt_messages, resp): - tool_call = None - tool_calls = [] - - for index, event in enumerate(resp): - logging.debug("_handle_stream_chat_response, event: %s", event) - - data_str = event["data"] - data = json.loads(data_str) - - choices = data.get("Choices", []) - if not choices: - continue - choice = choices[0] - delta = choice.get("Delta", {}) - message_content = delta.get("Content", "") - finish_reason = choice.get("FinishReason", "") - - usage = data.get("Usage", {}) - prompt_tokens = usage.get("PromptTokens", 0) - completion_tokens = usage.get("CompletionTokens", 0) - - response_tool_calls = delta.get("ToolCalls") - if response_tool_calls is not None: - new_tool_calls = self._extract_response_tool_calls(response_tool_calls) - if len(new_tool_calls) > 0: - new_tool_call = new_tool_calls[0] - if tool_call is None: - tool_call = new_tool_call - elif tool_call.id != new_tool_call.id: - tool_calls.append(tool_call) - tool_call = new_tool_call - else: - tool_call.function.name += new_tool_call.function.name - tool_call.function.arguments += new_tool_call.function.arguments - if tool_call is not None and len(tool_call.function.name) > 0 and len(tool_call.function.arguments) > 0: - tool_calls.append(tool_call) - tool_call = None - - assistant_prompt_message = AssistantPromptMessage(content=message_content, tool_calls=[]) - # rewrite content = "" while tool_call to avoid show content on web page - if len(tool_calls) > 0: - assistant_prompt_message.content = "" - - # add tool_calls to assistant_prompt_message - if finish_reason == "tool_calls": - assistant_prompt_message.tool_calls = tool_calls - tool_call = None - tool_calls = [] - - if len(finish_reason) > 0: - usage = self._calc_response_usage(model, credentials, prompt_tokens, completion_tokens) - - delta_chunk = LLMResultChunkDelta( - index=index, - role=delta.get("Role", "assistant"), - message=assistant_prompt_message, - usage=usage, - finish_reason=finish_reason, - ) - tool_call = None - tool_calls = [] - - else: - delta_chunk = LLMResultChunkDelta( - index=index, - message=assistant_prompt_message, - ) - - yield LLMResultChunk( - model=model, - prompt_messages=prompt_messages, - delta=delta_chunk, - ) - - def _handle_chat_response(self, credentials, model, prompt_messages, response): - usage = self._calc_response_usage( - model, credentials, response.Usage.PromptTokens, response.Usage.CompletionTokens - ) - assistant_prompt_message = AssistantPromptMessage() - assistant_prompt_message.content = response.Choices[0].Message.Content - result = LLMResult( - model=model, - prompt_messages=prompt_messages, - message=assistant_prompt_message, - usage=usage, - ) - - return result - - def get_num_tokens( - self, - model: str, - credentials: dict, - prompt_messages: list[PromptMessage], - tools: list[PromptMessageTool] | None = None, - ) -> int: - if len(prompt_messages) == 0: - return 0 - prompt = self._convert_messages_to_prompt(prompt_messages) - return self._get_num_tokens_by_gpt2(prompt) - - def _convert_messages_to_prompt(self, messages: list[PromptMessage]) -> str: - """ - Format a list of messages into a full prompt for the Anthropic model - - :param messages: List of PromptMessage to combine. - :return: Combined string with necessary human_prompt and ai_prompt tags. - """ - messages = messages.copy() # don't mutate the original list - - text = "".join(self._convert_one_message_to_text(message) for message in messages) - - # trim off the trailing ' ' that might come from the "Assistant: " - return text.rstrip() - - def _convert_one_message_to_text(self, message: PromptMessage) -> str: - """ - Convert a single message to a string. - - :param message: PromptMessage to convert. - :return: String representation of the message. - """ - human_prompt = "\n\nHuman:" - ai_prompt = "\n\nAssistant:" - tool_prompt = "\n\nTool:" - content = message.content - - if isinstance(message, UserPromptMessage): - message_text = f"{human_prompt} {content}" - elif isinstance(message, AssistantPromptMessage): - message_text = f"{ai_prompt} {content}" - elif isinstance(message, ToolPromptMessage): - message_text = f"{tool_prompt} {content}" - elif isinstance(message, SystemPromptMessage): - message_text = content - else: - raise ValueError(f"Got unknown type {message}") - - return message_text - - @property - def _invoke_error_mapping(self) -> dict[type[InvokeError], list[type[Exception]]]: - """ - Map model invoke error to unified error - The key is the error type thrown to the caller - The value is the error type thrown by the model, - which needs to be converted into a unified error type for the caller. - - :return: Invoke error mapping - """ - return { - InvokeError: [TencentCloudSDKException], - } - - def _extract_response_tool_calls(self, response_tool_calls: list[dict]) -> list[AssistantPromptMessage.ToolCall]: - """ - Extract tool calls from response - - :param response_tool_calls: response tool calls - :return: list of tool calls - """ - tool_calls = [] - if response_tool_calls: - for response_tool_call in response_tool_calls: - response_function = response_tool_call.get("Function", {}) - function = AssistantPromptMessage.ToolCall.ToolCallFunction( - name=response_function.get("Name", ""), arguments=response_function.get("Arguments", "") - ) - - tool_call = AssistantPromptMessage.ToolCall( - id=response_tool_call.get("Id", 0), type="function", function=function - ) - tool_calls.append(tool_call) - - return tool_calls diff --git a/api/core/model_runtime/model_providers/hunyuan/text_embedding/__init__.py b/api/core/model_runtime/model_providers/hunyuan/text_embedding/__init__.py deleted file mode 100644 index e69de29bb2d1d6..00000000000000 diff --git a/api/core/model_runtime/model_providers/hunyuan/text_embedding/hunyuan-text-embedding.yaml b/api/core/model_runtime/model_providers/hunyuan/text_embedding/hunyuan-text-embedding.yaml deleted file mode 100644 index ab014e4344de97..00000000000000 --- a/api/core/model_runtime/model_providers/hunyuan/text_embedding/hunyuan-text-embedding.yaml +++ /dev/null @@ -1,5 +0,0 @@ -model: hunyuan-embedding -model_type: text-embedding -model_properties: - context_size: 1024 - max_chunks: 1 diff --git a/api/core/model_runtime/model_providers/jina/_assets/icon_l_en.svg b/api/core/model_runtime/model_providers/jina/_assets/icon_l_en.svg deleted file mode 100644 index 6a241fc9ae32cf..00000000000000 --- a/api/core/model_runtime/model_providers/jina/_assets/icon_l_en.svg +++ /dev/null @@ -1,12 +0,0 @@ - - - - - - - - - - - - diff --git a/api/core/model_runtime/model_providers/jina/_assets/icon_s_en.svg b/api/core/model_runtime/model_providers/jina/_assets/icon_s_en.svg deleted file mode 100644 index 2e1b00fa52e43c..00000000000000 --- a/api/core/model_runtime/model_providers/jina/_assets/icon_s_en.svg +++ /dev/null @@ -1,4 +0,0 @@ - - - - diff --git a/api/core/model_runtime/model_providers/jina/jina.py b/api/core/model_runtime/model_providers/jina/jina.py deleted file mode 100644 index 186a0a0fa7e6dc..00000000000000 --- a/api/core/model_runtime/model_providers/jina/jina.py +++ /dev/null @@ -1,28 +0,0 @@ -import logging - -from core.model_runtime.entities.model_entities import ModelType -from core.model_runtime.errors.validate import CredentialsValidateFailedError -from core.model_runtime.model_providers.__base.model_provider import ModelProvider - -logger = logging.getLogger(__name__) - - -class JinaProvider(ModelProvider): - def validate_provider_credentials(self, credentials: dict) -> None: - """ - Validate provider credentials - if validate failed, raise exception - - :param credentials: provider credentials, credentials form defined in `provider_credential_schema`. - """ - try: - model_instance = self.get_model_instance(ModelType.TEXT_EMBEDDING) - - # Use `jina-embeddings-v3` model for validate, - # no matter what model you pass in, text completion model or chat model - model_instance.validate_credentials(model="jina-embeddings-v3", credentials=credentials) - except CredentialsValidateFailedError as ex: - raise ex - except Exception as ex: - logger.exception(f"{self.get_provider_schema().provider} credentials validate failed") - raise ex diff --git a/api/core/model_runtime/model_providers/jina/rerank/__init__.py b/api/core/model_runtime/model_providers/jina/rerank/__init__.py deleted file mode 100644 index e69de29bb2d1d6..00000000000000 diff --git a/api/core/model_runtime/model_providers/jina/rerank/_position.yaml b/api/core/model_runtime/model_providers/jina/rerank/_position.yaml deleted file mode 100644 index c9ddaad758a539..00000000000000 --- a/api/core/model_runtime/model_providers/jina/rerank/_position.yaml +++ /dev/null @@ -1,5 +0,0 @@ -- jina-reranker-v2-base-multilingual -- jina-reranker-v1-base-en -- jina-reranker-v1-turbo-en -- jina-colbert-v1-en -- jina-reranker-v1-tiny-en diff --git a/api/core/model_runtime/model_providers/jina/rerank/jina-colbert-v1-en.yaml b/api/core/model_runtime/model_providers/jina/rerank/jina-colbert-v1-en.yaml deleted file mode 100644 index 320370f2425521..00000000000000 --- a/api/core/model_runtime/model_providers/jina/rerank/jina-colbert-v1-en.yaml +++ /dev/null @@ -1,4 +0,0 @@ -model: jina-colbert-v1-en -model_type: rerank -model_properties: - context_size: 8192 diff --git a/api/core/model_runtime/model_providers/jina/rerank/jina-reranker-v1-base-en.yaml b/api/core/model_runtime/model_providers/jina/rerank/jina-reranker-v1-base-en.yaml deleted file mode 100644 index bd3f31fbd17f5f..00000000000000 --- a/api/core/model_runtime/model_providers/jina/rerank/jina-reranker-v1-base-en.yaml +++ /dev/null @@ -1,4 +0,0 @@ -model: jina-reranker-v1-base-en -model_type: rerank -model_properties: - context_size: 8192 diff --git a/api/core/model_runtime/model_providers/jina/rerank/jina-reranker-v1-tiny-en.yaml b/api/core/model_runtime/model_providers/jina/rerank/jina-reranker-v1-tiny-en.yaml deleted file mode 100644 index b81711195b3135..00000000000000 --- a/api/core/model_runtime/model_providers/jina/rerank/jina-reranker-v1-tiny-en.yaml +++ /dev/null @@ -1,4 +0,0 @@ -model: jina-reranker-v1-tiny-en -model_type: rerank -model_properties: - context_size: 8192 diff --git a/api/core/model_runtime/model_providers/jina/rerank/jina-reranker-v1-turbo-en.yaml b/api/core/model_runtime/model_providers/jina/rerank/jina-reranker-v1-turbo-en.yaml deleted file mode 100644 index d05f4bb4a2fbb7..00000000000000 --- a/api/core/model_runtime/model_providers/jina/rerank/jina-reranker-v1-turbo-en.yaml +++ /dev/null @@ -1,4 +0,0 @@ -model: jina-reranker-v1-turbo-en -model_type: rerank -model_properties: - context_size: 8192 diff --git a/api/core/model_runtime/model_providers/jina/rerank/jina-reranker-v2-base-multilingual.yaml b/api/core/model_runtime/model_providers/jina/rerank/jina-reranker-v2-base-multilingual.yaml deleted file mode 100644 index e6af62107eaa08..00000000000000 --- a/api/core/model_runtime/model_providers/jina/rerank/jina-reranker-v2-base-multilingual.yaml +++ /dev/null @@ -1,4 +0,0 @@ -model: jina-reranker-v2-base-multilingual -model_type: rerank -model_properties: - context_size: 1024 diff --git a/api/core/model_runtime/model_providers/jina/rerank/rerank.py b/api/core/model_runtime/model_providers/jina/rerank/rerank.py deleted file mode 100644 index 79ca68914f9791..00000000000000 --- a/api/core/model_runtime/model_providers/jina/rerank/rerank.py +++ /dev/null @@ -1,125 +0,0 @@ -from typing import Optional - -import httpx - -from core.model_runtime.entities.common_entities import I18nObject -from core.model_runtime.entities.model_entities import AIModelEntity, FetchFrom, ModelPropertyKey, ModelType -from core.model_runtime.entities.rerank_entities import RerankDocument, RerankResult -from core.model_runtime.errors.invoke import ( - InvokeAuthorizationError, - InvokeBadRequestError, - InvokeConnectionError, - InvokeError, - InvokeRateLimitError, - InvokeServerUnavailableError, -) -from core.model_runtime.errors.validate import CredentialsValidateFailedError -from core.model_runtime.model_providers.__base.rerank_model import RerankModel - - -class JinaRerankModel(RerankModel): - """ - Model class for Jina rerank model. - """ - - def _invoke( - self, - model: str, - credentials: dict, - query: str, - docs: list[str], - score_threshold: Optional[float] = None, - top_n: Optional[int] = None, - user: Optional[str] = None, - ) -> RerankResult: - """ - Invoke rerank model - - :param model: model name - :param credentials: model credentials - :param query: search query - :param docs: docs for reranking - :param score_threshold: score threshold - :param top_n: top n documents to return - :param user: unique user id - :return: rerank result - """ - if len(docs) == 0: - return RerankResult(model=model, docs=[]) - - base_url = credentials.get("base_url", "https://api.jina.ai/v1") - base_url = base_url.removesuffix("/") - - try: - response = httpx.post( - base_url + "/rerank", - json={"model": model, "query": query, "documents": docs, "top_n": top_n}, - headers={"Authorization": f"Bearer {credentials.get('api_key')}"}, - ) - response.raise_for_status() - results = response.json() - - rerank_documents = [] - for result in results["results"]: - rerank_document = RerankDocument( - index=result["index"], - text=result["document"]["text"], - score=result["relevance_score"], - ) - if score_threshold is None or result["relevance_score"] >= score_threshold: - rerank_documents.append(rerank_document) - - return RerankResult(model=model, docs=rerank_documents) - except httpx.HTTPStatusError as e: - raise InvokeServerUnavailableError(str(e)) - - def validate_credentials(self, model: str, credentials: dict) -> None: - """ - Validate model credentials - - :param model: model name - :param credentials: model credentials - :return: - """ - try: - self._invoke( - model=model, - credentials=credentials, - query="What is the capital of the United States?", - docs=[ - "Carson City is the capital city of the American state of Nevada. At the 2010 United States " - "Census, Carson City had a population of 55,274.", - "The Commonwealth of the Northern Mariana Islands is a group of islands in the Pacific Ocean that " - "are a political division controlled by the United States. Its capital is Saipan.", - ], - score_threshold=0.8, - ) - except Exception as ex: - raise CredentialsValidateFailedError(str(ex)) - - @property - def _invoke_error_mapping(self) -> dict[type[InvokeError], list[type[Exception]]]: - """ - Map model invoke error to unified error - """ - return { - InvokeConnectionError: [httpx.ConnectError], - InvokeServerUnavailableError: [httpx.RemoteProtocolError], - InvokeRateLimitError: [], - InvokeAuthorizationError: [httpx.HTTPStatusError], - InvokeBadRequestError: [httpx.RequestError], - } - - def get_customizable_model_schema(self, model: str, credentials: dict) -> AIModelEntity: - """ - generate custom model entities from credentials - """ - entity = AIModelEntity( - model=model, - label=I18nObject(en_US=model), - model_type=ModelType.RERANK, - fetch_from=FetchFrom.CUSTOMIZABLE_MODEL, - model_properties={ModelPropertyKey.CONTEXT_SIZE: int(credentials.get("context_size"))}, - ) - - return entity diff --git a/api/core/model_runtime/model_providers/jina/text_embedding/jina-clip-v1.yaml b/api/core/model_runtime/model_providers/jina/text_embedding/jina-clip-v1.yaml deleted file mode 100644 index c06bfd7ebeb6cd..00000000000000 --- a/api/core/model_runtime/model_providers/jina/text_embedding/jina-clip-v1.yaml +++ /dev/null @@ -1,9 +0,0 @@ -model: jina-clip-v1 -model_type: text-embedding -model_properties: - context_size: 8192 - max_chunks: 2048 -pricing: - input: '0.001' - unit: '0.001' - currency: USD diff --git a/api/core/model_runtime/model_providers/jina/text_embedding/jina-embeddings-v2-base-de.yaml b/api/core/model_runtime/model_providers/jina/text_embedding/jina-embeddings-v2-base-de.yaml deleted file mode 100644 index 09f7023acbc67d..00000000000000 --- a/api/core/model_runtime/model_providers/jina/text_embedding/jina-embeddings-v2-base-de.yaml +++ /dev/null @@ -1,9 +0,0 @@ -model: jina-embeddings-v2-base-de -model_type: text-embedding -model_properties: - context_size: 8192 - max_chunks: 2048 -pricing: - input: '0.001' - unit: '0.001' - currency: USD diff --git a/api/core/model_runtime/model_providers/jina/text_embedding/jina-embeddings-v2-base-en.yaml b/api/core/model_runtime/model_providers/jina/text_embedding/jina-embeddings-v2-base-en.yaml deleted file mode 100644 index a9b2cd4efb328a..00000000000000 --- a/api/core/model_runtime/model_providers/jina/text_embedding/jina-embeddings-v2-base-en.yaml +++ /dev/null @@ -1,9 +0,0 @@ -model: jina-embeddings-v2-base-en -model_type: text-embedding -model_properties: - context_size: 8192 - max_chunks: 2048 -pricing: - input: '0.001' - unit: '0.001' - currency: USD diff --git a/api/core/model_runtime/model_providers/jina/text_embedding/jina-embeddings-v2-base-zh.yaml b/api/core/model_runtime/model_providers/jina/text_embedding/jina-embeddings-v2-base-zh.yaml deleted file mode 100644 index 2a66b4729b5fa2..00000000000000 --- a/api/core/model_runtime/model_providers/jina/text_embedding/jina-embeddings-v2-base-zh.yaml +++ /dev/null @@ -1,9 +0,0 @@ -model: jina-embeddings-v2-base-zh -model_type: text-embedding -model_properties: - context_size: 8192 - max_chunks: 2048 -pricing: - input: '0.001' - unit: '0.001' - currency: USD diff --git a/api/core/model_runtime/model_providers/jina/text_embedding/jina-embeddings-v2-small-en.yaml b/api/core/model_runtime/model_providers/jina/text_embedding/jina-embeddings-v2-small-en.yaml deleted file mode 100644 index c92779d4997995..00000000000000 --- a/api/core/model_runtime/model_providers/jina/text_embedding/jina-embeddings-v2-small-en.yaml +++ /dev/null @@ -1,9 +0,0 @@ -model: jina-embeddings-v2-small-en -model_type: text-embedding -model_properties: - context_size: 8192 - max_chunks: 2048 -pricing: - input: '0.001' - unit: '0.001' - currency: USD diff --git a/api/core/model_runtime/model_providers/jina/text_embedding/jina-embeddings-v3.yaml b/api/core/model_runtime/model_providers/jina/text_embedding/jina-embeddings-v3.yaml deleted file mode 100644 index 4e5374dc9d733d..00000000000000 --- a/api/core/model_runtime/model_providers/jina/text_embedding/jina-embeddings-v3.yaml +++ /dev/null @@ -1,9 +0,0 @@ -model: jina-embeddings-v3 -model_type: text-embedding -model_properties: - context_size: 8192 - max_chunks: 2048 -pricing: - input: '0.001' - unit: '0.001' - currency: USD diff --git a/api/core/model_runtime/model_providers/jina/text_embedding/jina_tokenizer.py b/api/core/model_runtime/model_providers/jina/text_embedding/jina_tokenizer.py deleted file mode 100644 index d80cbfa83d6425..00000000000000 --- a/api/core/model_runtime/model_providers/jina/text_embedding/jina_tokenizer.py +++ /dev/null @@ -1,32 +0,0 @@ -from os.path import abspath, dirname, join -from threading import Lock - -from transformers import AutoTokenizer - - -class JinaTokenizer: - _tokenizer = None - _lock = Lock() - - @classmethod - def _get_tokenizer(cls): - if cls._tokenizer is None: - with cls._lock: - if cls._tokenizer is None: - base_path = abspath(__file__) - gpt2_tokenizer_path = join(dirname(base_path), "tokenizer") - cls._tokenizer = AutoTokenizer.from_pretrained(gpt2_tokenizer_path) - return cls._tokenizer - - @classmethod - def _get_num_tokens_by_jina_base(cls, text: str) -> int: - """ - use jina tokenizer to get num tokens - """ - tokenizer = cls._get_tokenizer() - tokens = tokenizer.encode(text) - return len(tokens) - - @classmethod - def get_num_tokens(cls, text: str) -> int: - return cls._get_num_tokens_by_jina_base(text) diff --git a/api/core/model_runtime/model_providers/jina/text_embedding/tokenizer/tokenizer.json b/api/core/model_runtime/model_providers/jina/text_embedding/tokenizer/tokenizer.json deleted file mode 100644 index 65f2b2bc19a33a..00000000000000 --- a/api/core/model_runtime/model_providers/jina/text_embedding/tokenizer/tokenizer.json +++ /dev/null @@ -1,30678 +0,0 @@ -{ - "version": "1.0", - "truncation": null, - "padding": null, - "added_tokens": [ - { - "id": 0, - "content": "[PAD]", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": false, - "special": true - }, - { - "id": 100, - "content": "[UNK]", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": false, - "special": true - }, - { - "id": 101, - "content": "[CLS]", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": false, - "special": true - }, - { - "id": 102, - "content": "[SEP]", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": false, - "special": true - }, - { - "id": 103, - "content": "[MASK]", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": false, - "special": true - } - ], - "normalizer": { - "type": "BertNormalizer", - "clean_text": true, - "handle_chinese_chars": true, - "strip_accents": null, - "lowercase": true - }, - "pre_tokenizer": { - "type": "BertPreTokenizer" - }, - "post_processor": { - "type": "TemplateProcessing", - "single": [ - { - "SpecialToken": { - "id": "[CLS]", - "type_id": 0 - } - }, - { - "Sequence": { - "id": "A", - "type_id": 0 - } - }, - { - "SpecialToken": { - "id": "[SEP]", - "type_id": 0 - } - } - ], - "pair": [ - { - "SpecialToken": { - "id": "[CLS]", - "type_id": 0 - } - }, - { - "Sequence": { - "id": "A", - "type_id": 0 - } - }, - { - "SpecialToken": { - "id": "[SEP]", - "type_id": 0 - } - }, - { - "Sequence": { - "id": "B", - "type_id": 1 - } - }, - { - "SpecialToken": { - "id": "[SEP]", - "type_id": 1 - } - } - ], - "special_tokens": { - "[CLS]": { - "id": "[CLS]", - "ids": [ - 101 - ], - "tokens": [ - "[CLS]" - ] - }, - "[SEP]": { - "id": "[SEP]", - "ids": [ - 102 - ], - "tokens": [ - "[SEP]" - ] - } - } - }, - "decoder": { - "type": "WordPiece", - "prefix": "##", - "cleanup": true - }, - "model": { - "type": "WordPiece", - "unk_token": "[UNK]", - "continuing_subword_prefix": "##", - "max_input_chars_per_word": 100, - "vocab": { - "[PAD]": 0, - "[unused0]": 1, - "[unused1]": 2, - "[unused2]": 3, - "[unused3]": 4, - "[unused4]": 5, - "[unused5]": 6, - "[unused6]": 7, - "[unused7]": 8, - "[unused8]": 9, - "[unused9]": 10, - "[unused10]": 11, - "[unused11]": 12, - "[unused12]": 13, - "[unused13]": 14, - "[unused14]": 15, - "[unused15]": 16, - "[unused16]": 17, - "[unused17]": 18, - "[unused18]": 19, - "[unused19]": 20, - "[unused20]": 21, - "[unused21]": 22, - "[unused22]": 23, - "[unused23]": 24, - "[unused24]": 25, - "[unused25]": 26, - "[unused26]": 27, - "[unused27]": 28, - "[unused28]": 29, - "[unused29]": 30, - "[unused30]": 31, - "[unused31]": 32, - "[unused32]": 33, - "[unused33]": 34, - "[unused34]": 35, - "[unused35]": 36, - "[unused36]": 37, - "[unused37]": 38, - "[unused38]": 39, - "[unused39]": 40, - "[unused40]": 41, - "[unused41]": 42, - "[unused42]": 43, - "[unused43]": 44, - "[unused44]": 45, - "[unused45]": 46, - "[unused46]": 47, - "[unused47]": 48, - "[unused48]": 49, - "[unused49]": 50, - "[unused50]": 51, - "[unused51]": 52, - "[unused52]": 53, - "[unused53]": 54, - "[unused54]": 55, - "[unused55]": 56, - "[unused56]": 57, - "[unused57]": 58, - "[unused58]": 59, - "[unused59]": 60, - "[unused60]": 61, - "[unused61]": 62, - "[unused62]": 63, - "[unused63]": 64, - "[unused64]": 65, - "[unused65]": 66, - "[unused66]": 67, - "[unused67]": 68, - "[unused68]": 69, - "[unused69]": 70, - "[unused70]": 71, - "[unused71]": 72, - "[unused72]": 73, - "[unused73]": 74, - "[unused74]": 75, - "[unused75]": 76, - "[unused76]": 77, - "[unused77]": 78, - "[unused78]": 79, - "[unused79]": 80, - "[unused80]": 81, - "[unused81]": 82, - "[unused82]": 83, - "[unused83]": 84, - "[unused84]": 85, - "[unused85]": 86, - "[unused86]": 87, - "[unused87]": 88, - "[unused88]": 89, - "[unused89]": 90, - "[unused90]": 91, - "[unused91]": 92, - "[unused92]": 93, - "[unused93]": 94, - "[unused94]": 95, - "[unused95]": 96, - "[unused96]": 97, - "[unused97]": 98, - "[unused98]": 99, - "[UNK]": 100, - "[CLS]": 101, - "[SEP]": 102, - "[MASK]": 103, - "[unused99]": 104, - "[unused100]": 105, - "[unused101]": 106, - "[unused102]": 107, - "[unused103]": 108, - "[unused104]": 109, - "[unused105]": 110, - "[unused106]": 111, - "[unused107]": 112, - "[unused108]": 113, - "[unused109]": 114, - "[unused110]": 115, - "[unused111]": 116, - "[unused112]": 117, - "[unused113]": 118, - "[unused114]": 119, - "[unused115]": 120, - "[unused116]": 121, - "[unused117]": 122, - "[unused118]": 123, - "[unused119]": 124, - "[unused120]": 125, - "[unused121]": 126, - "[unused122]": 127, - "[unused123]": 128, - "[unused124]": 129, - "[unused125]": 130, - "[unused126]": 131, - "[unused127]": 132, - "[unused128]": 133, - "[unused129]": 134, - "[unused130]": 135, - "[unused131]": 136, - "[unused132]": 137, - "[unused133]": 138, - "[unused134]": 139, - "[unused135]": 140, - "[unused136]": 141, - "[unused137]": 142, - "[unused138]": 143, - "[unused139]": 144, - "[unused140]": 145, - "[unused141]": 146, - "[unused142]": 147, - "[unused143]": 148, - "[unused144]": 149, - "[unused145]": 150, - "[unused146]": 151, - "[unused147]": 152, - "[unused148]": 153, - "[unused149]": 154, - "[unused150]": 155, - "[unused151]": 156, - "[unused152]": 157, - "[unused153]": 158, - "[unused154]": 159, - "[unused155]": 160, - "[unused156]": 161, - "[unused157]": 162, - "[unused158]": 163, - "[unused159]": 164, - "[unused160]": 165, - "[unused161]": 166, - "[unused162]": 167, - "[unused163]": 168, - "[unused164]": 169, - "[unused165]": 170, - "[unused166]": 171, - "[unused167]": 172, - "[unused168]": 173, - "[unused169]": 174, - "[unused170]": 175, - "[unused171]": 176, - "[unused172]": 177, - "[unused173]": 178, - "[unused174]": 179, - "[unused175]": 180, - "[unused176]": 181, - "[unused177]": 182, - "[unused178]": 183, - "[unused179]": 184, - "[unused180]": 185, - "[unused181]": 186, - "[unused182]": 187, - "[unused183]": 188, - "[unused184]": 189, - "[unused185]": 190, - "[unused186]": 191, - "[unused187]": 192, - "[unused188]": 193, - "[unused189]": 194, - "[unused190]": 195, - "[unused191]": 196, - "[unused192]": 197, - "[unused193]": 198, - "[unused194]": 199, - "[unused195]": 200, - "[unused196]": 201, - "[unused197]": 202, - "[unused198]": 203, - "[unused199]": 204, - "[unused200]": 205, - "[unused201]": 206, - "[unused202]": 207, - "[unused203]": 208, - "[unused204]": 209, - "[unused205]": 210, - "[unused206]": 211, - "[unused207]": 212, - "[unused208]": 213, - "[unused209]": 214, - "[unused210]": 215, - "[unused211]": 216, - "[unused212]": 217, - "[unused213]": 218, - "[unused214]": 219, - "[unused215]": 220, - "[unused216]": 221, - "[unused217]": 222, - "[unused218]": 223, - "[unused219]": 224, - "[unused220]": 225, - "[unused221]": 226, - "[unused222]": 227, - "[unused223]": 228, - "[unused224]": 229, - "[unused225]": 230, - "[unused226]": 231, - "[unused227]": 232, - "[unused228]": 233, - "[unused229]": 234, - "[unused230]": 235, - "[unused231]": 236, - "[unused232]": 237, - "[unused233]": 238, - "[unused234]": 239, - "[unused235]": 240, - "[unused236]": 241, - "[unused237]": 242, - "[unused238]": 243, - "[unused239]": 244, - "[unused240]": 245, - "[unused241]": 246, - "[unused242]": 247, - "[unused243]": 248, - "[unused244]": 249, - "[unused245]": 250, - "[unused246]": 251, - "[unused247]": 252, - "[unused248]": 253, - "[unused249]": 254, - "[unused250]": 255, - "[unused251]": 256, - "[unused252]": 257, - "[unused253]": 258, - "[unused254]": 259, - "[unused255]": 260, - "[unused256]": 261, - "[unused257]": 262, - "[unused258]": 263, - "[unused259]": 264, - "[unused260]": 265, - "[unused261]": 266, - "[unused262]": 267, - "[unused263]": 268, - "[unused264]": 269, - "[unused265]": 270, - "[unused266]": 271, - "[unused267]": 272, - "[unused268]": 273, - "[unused269]": 274, - "[unused270]": 275, - "[unused271]": 276, - "[unused272]": 277, - "[unused273]": 278, - "[unused274]": 279, - "[unused275]": 280, - "[unused276]": 281, - "[unused277]": 282, - "[unused278]": 283, - "[unused279]": 284, - "[unused280]": 285, - "[unused281]": 286, - "[unused282]": 287, - "[unused283]": 288, - "[unused284]": 289, - "[unused285]": 290, - "[unused286]": 291, - "[unused287]": 292, - "[unused288]": 293, - "[unused289]": 294, - "[unused290]": 295, - "[unused291]": 296, - "[unused292]": 297, - "[unused293]": 298, - "[unused294]": 299, - "[unused295]": 300, - "[unused296]": 301, - "[unused297]": 302, - "[unused298]": 303, - "[unused299]": 304, - "[unused300]": 305, - "[unused301]": 306, - "[unused302]": 307, - "[unused303]": 308, - "[unused304]": 309, - "[unused305]": 310, - "[unused306]": 311, - "[unused307]": 312, - "[unused308]": 313, - "[unused309]": 314, - "[unused310]": 315, - "[unused311]": 316, - "[unused312]": 317, - "[unused313]": 318, - "[unused314]": 319, - "[unused315]": 320, - "[unused316]": 321, - "[unused317]": 322, - "[unused318]": 323, - "[unused319]": 324, - "[unused320]": 325, - "[unused321]": 326, - "[unused322]": 327, - "[unused323]": 328, - "[unused324]": 329, - "[unused325]": 330, - "[unused326]": 331, - "[unused327]": 332, - "[unused328]": 333, - "[unused329]": 334, - "[unused330]": 335, - "[unused331]": 336, - "[unused332]": 337, - "[unused333]": 338, - "[unused334]": 339, - "[unused335]": 340, - "[unused336]": 341, - "[unused337]": 342, - "[unused338]": 343, - "[unused339]": 344, - "[unused340]": 345, - "[unused341]": 346, - "[unused342]": 347, - "[unused343]": 348, - "[unused344]": 349, - "[unused345]": 350, - "[unused346]": 351, - "[unused347]": 352, - "[unused348]": 353, - "[unused349]": 354, - "[unused350]": 355, - "[unused351]": 356, - "[unused352]": 357, - "[unused353]": 358, - "[unused354]": 359, - "[unused355]": 360, - "[unused356]": 361, - "[unused357]": 362, - "[unused358]": 363, - "[unused359]": 364, - "[unused360]": 365, - "[unused361]": 366, - "[unused362]": 367, - "[unused363]": 368, - "[unused364]": 369, - "[unused365]": 370, - "[unused366]": 371, - "[unused367]": 372, - "[unused368]": 373, - "[unused369]": 374, - "[unused370]": 375, - "[unused371]": 376, - "[unused372]": 377, - "[unused373]": 378, - "[unused374]": 379, - "[unused375]": 380, - "[unused376]": 381, - "[unused377]": 382, - "[unused378]": 383, - "[unused379]": 384, - "[unused380]": 385, - "[unused381]": 386, - "[unused382]": 387, - "[unused383]": 388, - "[unused384]": 389, - "[unused385]": 390, - "[unused386]": 391, - "[unused387]": 392, - "[unused388]": 393, - "[unused389]": 394, - "[unused390]": 395, - "[unused391]": 396, - "[unused392]": 397, - "[unused393]": 398, - "[unused394]": 399, - "[unused395]": 400, - "[unused396]": 401, - "[unused397]": 402, - "[unused398]": 403, - "[unused399]": 404, - "[unused400]": 405, - "[unused401]": 406, - "[unused402]": 407, - "[unused403]": 408, - "[unused404]": 409, - "[unused405]": 410, - "[unused406]": 411, - "[unused407]": 412, - "[unused408]": 413, - "[unused409]": 414, - "[unused410]": 415, - "[unused411]": 416, - "[unused412]": 417, - "[unused413]": 418, - "[unused414]": 419, - "[unused415]": 420, - "[unused416]": 421, - "[unused417]": 422, - "[unused418]": 423, - "[unused419]": 424, - "[unused420]": 425, - "[unused421]": 426, - "[unused422]": 427, - "[unused423]": 428, - "[unused424]": 429, - "[unused425]": 430, - "[unused426]": 431, - "[unused427]": 432, - "[unused428]": 433, - "[unused429]": 434, - "[unused430]": 435, - "[unused431]": 436, - "[unused432]": 437, - "[unused433]": 438, - "[unused434]": 439, - "[unused435]": 440, - "[unused436]": 441, - "[unused437]": 442, - "[unused438]": 443, - "[unused439]": 444, - "[unused440]": 445, - "[unused441]": 446, - "[unused442]": 447, - "[unused443]": 448, - "[unused444]": 449, - "[unused445]": 450, - "[unused446]": 451, - "[unused447]": 452, - "[unused448]": 453, - "[unused449]": 454, - "[unused450]": 455, - "[unused451]": 456, - "[unused452]": 457, - "[unused453]": 458, - "[unused454]": 459, - "[unused455]": 460, - "[unused456]": 461, - "[unused457]": 462, - "[unused458]": 463, - "[unused459]": 464, - "[unused460]": 465, - "[unused461]": 466, - "[unused462]": 467, - "[unused463]": 468, - "[unused464]": 469, - "[unused465]": 470, - "[unused466]": 471, - "[unused467]": 472, - "[unused468]": 473, - "[unused469]": 474, - "[unused470]": 475, - "[unused471]": 476, - "[unused472]": 477, - "[unused473]": 478, - "[unused474]": 479, - "[unused475]": 480, - "[unused476]": 481, - "[unused477]": 482, - "[unused478]": 483, - "[unused479]": 484, - "[unused480]": 485, - "[unused481]": 486, - "[unused482]": 487, - "[unused483]": 488, - "[unused484]": 489, - "[unused485]": 490, - "[unused486]": 491, - "[unused487]": 492, - "[unused488]": 493, - "[unused489]": 494, - "[unused490]": 495, - "[unused491]": 496, - "[unused492]": 497, - "[unused493]": 498, - "[unused494]": 499, - "[unused495]": 500, - "[unused496]": 501, - "[unused497]": 502, - "[unused498]": 503, - "[unused499]": 504, - "[unused500]": 505, - "[unused501]": 506, - "[unused502]": 507, - "[unused503]": 508, - "[unused504]": 509, - "[unused505]": 510, - "[unused506]": 511, - "[unused507]": 512, - "[unused508]": 513, - "[unused509]": 514, - "[unused510]": 515, - "[unused511]": 516, - "[unused512]": 517, - "[unused513]": 518, - "[unused514]": 519, - "[unused515]": 520, - "[unused516]": 521, - "[unused517]": 522, - "[unused518]": 523, - "[unused519]": 524, - "[unused520]": 525, - "[unused521]": 526, - "[unused522]": 527, - "[unused523]": 528, - "[unused524]": 529, - "[unused525]": 530, - "[unused526]": 531, - "[unused527]": 532, - "[unused528]": 533, - "[unused529]": 534, - "[unused530]": 535, - "[unused531]": 536, - "[unused532]": 537, - "[unused533]": 538, - "[unused534]": 539, - "[unused535]": 540, - "[unused536]": 541, - "[unused537]": 542, - "[unused538]": 543, - "[unused539]": 544, - "[unused540]": 545, - "[unused541]": 546, - "[unused542]": 547, - "[unused543]": 548, - "[unused544]": 549, - "[unused545]": 550, - "[unused546]": 551, - "[unused547]": 552, - "[unused548]": 553, - "[unused549]": 554, - "[unused550]": 555, - "[unused551]": 556, - "[unused552]": 557, - "[unused553]": 558, - "[unused554]": 559, - "[unused555]": 560, - "[unused556]": 561, - "[unused557]": 562, - "[unused558]": 563, - "[unused559]": 564, - "[unused560]": 565, - "[unused561]": 566, - "[unused562]": 567, - "[unused563]": 568, - "[unused564]": 569, - "[unused565]": 570, - "[unused566]": 571, - "[unused567]": 572, - "[unused568]": 573, - "[unused569]": 574, - "[unused570]": 575, - "[unused571]": 576, - "[unused572]": 577, - "[unused573]": 578, - "[unused574]": 579, - "[unused575]": 580, - "[unused576]": 581, - "[unused577]": 582, - "[unused578]": 583, - "[unused579]": 584, - "[unused580]": 585, - "[unused581]": 586, - "[unused582]": 587, - "[unused583]": 588, - "[unused584]": 589, - "[unused585]": 590, - "[unused586]": 591, - "[unused587]": 592, - "[unused588]": 593, - "[unused589]": 594, - "[unused590]": 595, - "[unused591]": 596, - "[unused592]": 597, - "[unused593]": 598, - "[unused594]": 599, - "[unused595]": 600, - "[unused596]": 601, - "[unused597]": 602, - "[unused598]": 603, - "[unused599]": 604, - "[unused600]": 605, - "[unused601]": 606, - "[unused602]": 607, - "[unused603]": 608, - "[unused604]": 609, - "[unused605]": 610, - "[unused606]": 611, - "[unused607]": 612, - "[unused608]": 613, - "[unused609]": 614, - "[unused610]": 615, - "[unused611]": 616, - "[unused612]": 617, - "[unused613]": 618, - "[unused614]": 619, - "[unused615]": 620, - "[unused616]": 621, - "[unused617]": 622, - "[unused618]": 623, - "[unused619]": 624, - "[unused620]": 625, - "[unused621]": 626, - "[unused622]": 627, - "[unused623]": 628, - "[unused624]": 629, - "[unused625]": 630, - "[unused626]": 631, - "[unused627]": 632, - "[unused628]": 633, - "[unused629]": 634, - "[unused630]": 635, - "[unused631]": 636, - "[unused632]": 637, - "[unused633]": 638, - "[unused634]": 639, - "[unused635]": 640, - "[unused636]": 641, - "[unused637]": 642, - "[unused638]": 643, - "[unused639]": 644, - "[unused640]": 645, - "[unused641]": 646, - "[unused642]": 647, - "[unused643]": 648, - "[unused644]": 649, - "[unused645]": 650, - "[unused646]": 651, - "[unused647]": 652, - "[unused648]": 653, - "[unused649]": 654, - "[unused650]": 655, - "[unused651]": 656, - "[unused652]": 657, - "[unused653]": 658, - "[unused654]": 659, - "[unused655]": 660, - "[unused656]": 661, - "[unused657]": 662, - "[unused658]": 663, - "[unused659]": 664, - "[unused660]": 665, - "[unused661]": 666, - "[unused662]": 667, - "[unused663]": 668, - "[unused664]": 669, - "[unused665]": 670, - "[unused666]": 671, - "[unused667]": 672, - "[unused668]": 673, - "[unused669]": 674, - "[unused670]": 675, - "[unused671]": 676, - "[unused672]": 677, - "[unused673]": 678, - "[unused674]": 679, - "[unused675]": 680, - "[unused676]": 681, - "[unused677]": 682, - "[unused678]": 683, - "[unused679]": 684, - "[unused680]": 685, - "[unused681]": 686, - "[unused682]": 687, - "[unused683]": 688, - "[unused684]": 689, - "[unused685]": 690, - "[unused686]": 691, - "[unused687]": 692, - "[unused688]": 693, - "[unused689]": 694, - "[unused690]": 695, - "[unused691]": 696, - "[unused692]": 697, - "[unused693]": 698, - "[unused694]": 699, - "[unused695]": 700, - "[unused696]": 701, - "[unused697]": 702, - "[unused698]": 703, - "[unused699]": 704, - "[unused700]": 705, - "[unused701]": 706, - "[unused702]": 707, - "[unused703]": 708, - "[unused704]": 709, - "[unused705]": 710, - "[unused706]": 711, - "[unused707]": 712, - "[unused708]": 713, - "[unused709]": 714, - "[unused710]": 715, - "[unused711]": 716, - "[unused712]": 717, - "[unused713]": 718, - "[unused714]": 719, - "[unused715]": 720, - "[unused716]": 721, - "[unused717]": 722, - "[unused718]": 723, - "[unused719]": 724, - "[unused720]": 725, - "[unused721]": 726, - "[unused722]": 727, - "[unused723]": 728, - "[unused724]": 729, - "[unused725]": 730, - "[unused726]": 731, - "[unused727]": 732, - "[unused728]": 733, - "[unused729]": 734, - "[unused730]": 735, - "[unused731]": 736, - "[unused732]": 737, - "[unused733]": 738, - "[unused734]": 739, - "[unused735]": 740, - "[unused736]": 741, - "[unused737]": 742, - "[unused738]": 743, - "[unused739]": 744, - "[unused740]": 745, - "[unused741]": 746, - "[unused742]": 747, - "[unused743]": 748, - "[unused744]": 749, - "[unused745]": 750, - "[unused746]": 751, - "[unused747]": 752, - "[unused748]": 753, - "[unused749]": 754, - "[unused750]": 755, - "[unused751]": 756, - "[unused752]": 757, - "[unused753]": 758, - "[unused754]": 759, - "[unused755]": 760, - "[unused756]": 761, - "[unused757]": 762, - "[unused758]": 763, - "[unused759]": 764, - "[unused760]": 765, - "[unused761]": 766, - "[unused762]": 767, - "[unused763]": 768, - "[unused764]": 769, - "[unused765]": 770, - "[unused766]": 771, - "[unused767]": 772, - "[unused768]": 773, - "[unused769]": 774, - "[unused770]": 775, - "[unused771]": 776, - "[unused772]": 777, - "[unused773]": 778, - "[unused774]": 779, - "[unused775]": 780, - "[unused776]": 781, - "[unused777]": 782, - "[unused778]": 783, - "[unused779]": 784, - "[unused780]": 785, - "[unused781]": 786, - "[unused782]": 787, - "[unused783]": 788, - "[unused784]": 789, - "[unused785]": 790, - "[unused786]": 791, - "[unused787]": 792, - "[unused788]": 793, - "[unused789]": 794, - "[unused790]": 795, - "[unused791]": 796, - "[unused792]": 797, - "[unused793]": 798, - "[unused794]": 799, - "[unused795]": 800, - "[unused796]": 801, - "[unused797]": 802, - "[unused798]": 803, - "[unused799]": 804, - "[unused800]": 805, - "[unused801]": 806, - "[unused802]": 807, - "[unused803]": 808, - "[unused804]": 809, - "[unused805]": 810, - "[unused806]": 811, - "[unused807]": 812, - "[unused808]": 813, - "[unused809]": 814, - "[unused810]": 815, - "[unused811]": 816, - "[unused812]": 817, - "[unused813]": 818, - "[unused814]": 819, - "[unused815]": 820, - "[unused816]": 821, - "[unused817]": 822, - "[unused818]": 823, - "[unused819]": 824, - "[unused820]": 825, - "[unused821]": 826, - "[unused822]": 827, - "[unused823]": 828, - "[unused824]": 829, - "[unused825]": 830, - "[unused826]": 831, - "[unused827]": 832, - "[unused828]": 833, - "[unused829]": 834, - "[unused830]": 835, - "[unused831]": 836, - "[unused832]": 837, - "[unused833]": 838, - "[unused834]": 839, - "[unused835]": 840, - "[unused836]": 841, - "[unused837]": 842, - "[unused838]": 843, - "[unused839]": 844, - "[unused840]": 845, - "[unused841]": 846, - "[unused842]": 847, - "[unused843]": 848, - "[unused844]": 849, - "[unused845]": 850, - "[unused846]": 851, - "[unused847]": 852, - "[unused848]": 853, - "[unused849]": 854, - "[unused850]": 855, - "[unused851]": 856, - "[unused852]": 857, - "[unused853]": 858, - "[unused854]": 859, - "[unused855]": 860, - "[unused856]": 861, - "[unused857]": 862, - "[unused858]": 863, - "[unused859]": 864, - "[unused860]": 865, - "[unused861]": 866, - "[unused862]": 867, - "[unused863]": 868, - "[unused864]": 869, - "[unused865]": 870, - "[unused866]": 871, - "[unused867]": 872, - "[unused868]": 873, - "[unused869]": 874, - "[unused870]": 875, - "[unused871]": 876, - "[unused872]": 877, - "[unused873]": 878, - "[unused874]": 879, - "[unused875]": 880, - "[unused876]": 881, - "[unused877]": 882, - "[unused878]": 883, - "[unused879]": 884, - "[unused880]": 885, - "[unused881]": 886, - "[unused882]": 887, - "[unused883]": 888, - "[unused884]": 889, - "[unused885]": 890, - "[unused886]": 891, - "[unused887]": 892, - "[unused888]": 893, - "[unused889]": 894, - "[unused890]": 895, - "[unused891]": 896, - "[unused892]": 897, - "[unused893]": 898, - "[unused894]": 899, - "[unused895]": 900, - "[unused896]": 901, - "[unused897]": 902, - "[unused898]": 903, - "[unused899]": 904, - "[unused900]": 905, - "[unused901]": 906, - "[unused902]": 907, - "[unused903]": 908, - "[unused904]": 909, - "[unused905]": 910, - "[unused906]": 911, - "[unused907]": 912, - "[unused908]": 913, - "[unused909]": 914, - "[unused910]": 915, - "[unused911]": 916, - "[unused912]": 917, - "[unused913]": 918, - "[unused914]": 919, - "[unused915]": 920, - "[unused916]": 921, - "[unused917]": 922, - "[unused918]": 923, - "[unused919]": 924, - "[unused920]": 925, - "[unused921]": 926, - "[unused922]": 927, - "[unused923]": 928, - "[unused924]": 929, - "[unused925]": 930, - "[unused926]": 931, - "[unused927]": 932, - "[unused928]": 933, - "[unused929]": 934, - "[unused930]": 935, - "[unused931]": 936, - "[unused932]": 937, - "[unused933]": 938, - "[unused934]": 939, - "[unused935]": 940, - "[unused936]": 941, - "[unused937]": 942, - "[unused938]": 943, - "[unused939]": 944, - "[unused940]": 945, - "[unused941]": 946, - "[unused942]": 947, - "[unused943]": 948, - "[unused944]": 949, - "[unused945]": 950, - "[unused946]": 951, - "[unused947]": 952, - "[unused948]": 953, - "[unused949]": 954, - "[unused950]": 955, - "[unused951]": 956, - "[unused952]": 957, - "[unused953]": 958, - "[unused954]": 959, - "[unused955]": 960, - "[unused956]": 961, - "[unused957]": 962, - "[unused958]": 963, - "[unused959]": 964, - "[unused960]": 965, - "[unused961]": 966, - "[unused962]": 967, - "[unused963]": 968, - "[unused964]": 969, - "[unused965]": 970, - "[unused966]": 971, - "[unused967]": 972, - "[unused968]": 973, - "[unused969]": 974, - "[unused970]": 975, - "[unused971]": 976, - "[unused972]": 977, - "[unused973]": 978, - "[unused974]": 979, - "[unused975]": 980, - "[unused976]": 981, - "[unused977]": 982, - "[unused978]": 983, - "[unused979]": 984, - "[unused980]": 985, - "[unused981]": 986, - "[unused982]": 987, - "[unused983]": 988, - "[unused984]": 989, - "[unused985]": 990, - "[unused986]": 991, - "[unused987]": 992, - "[unused988]": 993, - "[unused989]": 994, - "[unused990]": 995, - "[unused991]": 996, - "[unused992]": 997, - "[unused993]": 998, - "!": 999, - "\"": 1000, - "#": 1001, - "$": 1002, - "%": 1003, - "&": 1004, - "'": 1005, - "(": 1006, - ")": 1007, - "*": 1008, - "+": 1009, - ",": 1010, - "-": 1011, - ".": 1012, - "/": 1013, - "0": 1014, - "1": 1015, - "2": 1016, - "3": 1017, - "4": 1018, - "5": 1019, - "6": 1020, - "7": 1021, - "8": 1022, - "9": 1023, - ":": 1024, - ";": 1025, - "<": 1026, - "=": 1027, - ">": 1028, - "?": 1029, - "@": 1030, - "[": 1031, - "\\": 1032, - "]": 1033, - "^": 1034, - "_": 1035, - "`": 1036, - "a": 1037, - "b": 1038, - "c": 1039, - "d": 1040, - "e": 1041, - "f": 1042, - "g": 1043, - "h": 1044, - "i": 1045, - "j": 1046, - "k": 1047, - "l": 1048, - "m": 1049, - "n": 1050, - "o": 1051, - "p": 1052, - "q": 1053, - "r": 1054, - "s": 1055, - "t": 1056, - "u": 1057, - "v": 1058, - "w": 1059, - "x": 1060, - "y": 1061, - "z": 1062, - "{": 1063, - "|": 1064, - "}": 1065, - "~": 1066, - "¡": 1067, - "¢": 1068, - "£": 1069, - "¤": 1070, - "¥": 1071, - "¦": 1072, - "§": 1073, - "¨": 1074, - "©": 1075, - "ª": 1076, - "«": 1077, - "¬": 1078, - "®": 1079, - "°": 1080, - "±": 1081, - "²": 1082, - "³": 1083, - "´": 1084, - "µ": 1085, - "¶": 1086, - "·": 1087, - "¹": 1088, - "º": 1089, - "»": 1090, - "¼": 1091, - "½": 1092, - "¾": 1093, - "¿": 1094, - "×": 1095, - "ß": 1096, - "æ": 1097, - "ð": 1098, - "÷": 1099, - "ø": 1100, - "þ": 1101, - "đ": 1102, - "ħ": 1103, - "ı": 1104, - "ł": 1105, - "ŋ": 1106, - "œ": 1107, - "ƒ": 1108, - "ɐ": 1109, - "ɑ": 1110, - "ɒ": 1111, - "ɔ": 1112, - "ɕ": 1113, - "ə": 1114, - "ɛ": 1115, - "ɡ": 1116, - "ɣ": 1117, - "ɨ": 1118, - "ɪ": 1119, - "ɫ": 1120, - "ɬ": 1121, - "ɯ": 1122, - "ɲ": 1123, - "ɴ": 1124, - "ɹ": 1125, - "ɾ": 1126, - "ʀ": 1127, - "ʁ": 1128, - "ʂ": 1129, - "ʃ": 1130, - "ʉ": 1131, - "ʊ": 1132, - "ʋ": 1133, - "ʌ": 1134, - "ʎ": 1135, - "ʐ": 1136, - "ʑ": 1137, - "ʒ": 1138, - "ʔ": 1139, - "ʰ": 1140, - "ʲ": 1141, - "ʳ": 1142, - "ʷ": 1143, - "ʸ": 1144, - "ʻ": 1145, - "ʼ": 1146, - "ʾ": 1147, - "ʿ": 1148, - "ˈ": 1149, - "ː": 1150, - "ˡ": 1151, - "ˢ": 1152, - "ˣ": 1153, - "ˤ": 1154, - "α": 1155, - "β": 1156, - "γ": 1157, - "δ": 1158, - "ε": 1159, - "ζ": 1160, - "η": 1161, - "θ": 1162, - "ι": 1163, - "κ": 1164, - "λ": 1165, - "μ": 1166, - "ν": 1167, - "ξ": 1168, - "ο": 1169, - "π": 1170, - "ρ": 1171, - "ς": 1172, - "σ": 1173, - "τ": 1174, - "υ": 1175, - "φ": 1176, - "χ": 1177, - "ψ": 1178, - "ω": 1179, - "а": 1180, - "б": 1181, - "в": 1182, - "г": 1183, - "д": 1184, - "е": 1185, - "ж": 1186, - "з": 1187, - "и": 1188, - "к": 1189, - "л": 1190, - "м": 1191, - "н": 1192, - "о": 1193, - "п": 1194, - "р": 1195, - "с": 1196, - "т": 1197, - "у": 1198, - "ф": 1199, - "х": 1200, - "ц": 1201, - "ч": 1202, - "ш": 1203, - "щ": 1204, - "ъ": 1205, - "ы": 1206, - "ь": 1207, - "э": 1208, - "ю": 1209, - "я": 1210, - "ђ": 1211, - "є": 1212, - "і": 1213, - "ј": 1214, - "љ": 1215, - "њ": 1216, - "ћ": 1217, - "ӏ": 1218, - "ա": 1219, - "բ": 1220, - "գ": 1221, - "դ": 1222, - "ե": 1223, - "թ": 1224, - "ի": 1225, - "լ": 1226, - "կ": 1227, - "հ": 1228, - "մ": 1229, - "յ": 1230, - "ն": 1231, - "ո": 1232, - "պ": 1233, - "ս": 1234, - "վ": 1235, - "տ": 1236, - "ր": 1237, - "ւ": 1238, - "ք": 1239, - "־": 1240, - "א": 1241, - "ב": 1242, - "ג": 1243, - "ד": 1244, - "ה": 1245, - "ו": 1246, - "ז": 1247, - "ח": 1248, - "ט": 1249, - "י": 1250, - "ך": 1251, - "כ": 1252, - "ל": 1253, - "ם": 1254, - "מ": 1255, - "ן": 1256, - "נ": 1257, - "ס": 1258, - "ע": 1259, - "ף": 1260, - "פ": 1261, - "ץ": 1262, - "צ": 1263, - "ק": 1264, - "ר": 1265, - "ש": 1266, - "ת": 1267, - "،": 1268, - "ء": 1269, - "ا": 1270, - "ب": 1271, - "ة": 1272, - "ت": 1273, - "ث": 1274, - "ج": 1275, - "ح": 1276, - "خ": 1277, - "د": 1278, - "ذ": 1279, - "ر": 1280, - "ز": 1281, - "س": 1282, - "ش": 1283, - "ص": 1284, - "ض": 1285, - "ط": 1286, - "ظ": 1287, - "ع": 1288, - "غ": 1289, - "ـ": 1290, - "ف": 1291, - "ق": 1292, - "ك": 1293, - "ل": 1294, - "م": 1295, - "ن": 1296, - "ه": 1297, - "و": 1298, - "ى": 1299, - "ي": 1300, - "ٹ": 1301, - "پ": 1302, - "چ": 1303, - "ک": 1304, - "گ": 1305, - "ں": 1306, - "ھ": 1307, - "ہ": 1308, - "ی": 1309, - "ے": 1310, - "अ": 1311, - "आ": 1312, - "उ": 1313, - "ए": 1314, - "क": 1315, - "ख": 1316, - "ग": 1317, - "च": 1318, - "ज": 1319, - "ट": 1320, - "ड": 1321, - "ण": 1322, - "त": 1323, - "थ": 1324, - "द": 1325, - "ध": 1326, - "न": 1327, - "प": 1328, - "ब": 1329, - "भ": 1330, - "म": 1331, - "य": 1332, - "र": 1333, - "ल": 1334, - "व": 1335, - "श": 1336, - "ष": 1337, - "स": 1338, - "ह": 1339, - "ा": 1340, - "ि": 1341, - "ी": 1342, - "ो": 1343, - "।": 1344, - "॥": 1345, - "ং": 1346, - "অ": 1347, - "আ": 1348, - "ই": 1349, - "উ": 1350, - "এ": 1351, - "ও": 1352, - "ক": 1353, - "খ": 1354, - "গ": 1355, - "চ": 1356, - "ছ": 1357, - "জ": 1358, - "ট": 1359, - "ড": 1360, - "ণ": 1361, - "ত": 1362, - "থ": 1363, - "দ": 1364, - "ধ": 1365, - "ন": 1366, - "প": 1367, - "ব": 1368, - "ভ": 1369, - "ম": 1370, - "য": 1371, - "র": 1372, - "ল": 1373, - "শ": 1374, - "ষ": 1375, - "স": 1376, - "হ": 1377, - "া": 1378, - "ি": 1379, - "ী": 1380, - "ে": 1381, - "க": 1382, - "ச": 1383, - "ட": 1384, - "த": 1385, - "ந": 1386, - "ன": 1387, - "ப": 1388, - "ம": 1389, - "ய": 1390, - "ர": 1391, - "ல": 1392, - "ள": 1393, - "வ": 1394, - "ா": 1395, - "ி": 1396, - "ு": 1397, - "ே": 1398, - "ை": 1399, - "ನ": 1400, - "ರ": 1401, - "ಾ": 1402, - "ක": 1403, - "ය": 1404, - "ර": 1405, - "ල": 1406, - "ව": 1407, - "ා": 1408, - "ก": 1409, - "ง": 1410, - "ต": 1411, - "ท": 1412, - "น": 1413, - "พ": 1414, - "ม": 1415, - "ย": 1416, - "ร": 1417, - "ล": 1418, - "ว": 1419, - "ส": 1420, - "อ": 1421, - "า": 1422, - "เ": 1423, - "་": 1424, - "།": 1425, - "ག": 1426, - "ང": 1427, - "ད": 1428, - "ན": 1429, - "པ": 1430, - "བ": 1431, - "མ": 1432, - "འ": 1433, - "ར": 1434, - "ལ": 1435, - "ས": 1436, - "မ": 1437, - "ა": 1438, - "ბ": 1439, - "გ": 1440, - "დ": 1441, - "ე": 1442, - "ვ": 1443, - "თ": 1444, - "ი": 1445, - "კ": 1446, - "ლ": 1447, - "მ": 1448, - "ნ": 1449, - "ო": 1450, - "რ": 1451, - "ს": 1452, - "ტ": 1453, - "უ": 1454, - "ᄀ": 1455, - "ᄂ": 1456, - "ᄃ": 1457, - "ᄅ": 1458, - "ᄆ": 1459, - "ᄇ": 1460, - "ᄉ": 1461, - "ᄊ": 1462, - "ᄋ": 1463, - "ᄌ": 1464, - "ᄎ": 1465, - "ᄏ": 1466, - "ᄐ": 1467, - "ᄑ": 1468, - "ᄒ": 1469, - "ᅡ": 1470, - "ᅢ": 1471, - "ᅥ": 1472, - "ᅦ": 1473, - "ᅧ": 1474, - "ᅩ": 1475, - "ᅪ": 1476, - "ᅭ": 1477, - "ᅮ": 1478, - "ᅯ": 1479, - "ᅲ": 1480, - "ᅳ": 1481, - "ᅴ": 1482, - "ᅵ": 1483, - "ᆨ": 1484, - "ᆫ": 1485, - "ᆯ": 1486, - "ᆷ": 1487, - "ᆸ": 1488, - "ᆼ": 1489, - "ᴬ": 1490, - "ᴮ": 1491, - "ᴰ": 1492, - "ᴵ": 1493, - "ᴺ": 1494, - "ᵀ": 1495, - "ᵃ": 1496, - "ᵇ": 1497, - "ᵈ": 1498, - "ᵉ": 1499, - "ᵍ": 1500, - "ᵏ": 1501, - "ᵐ": 1502, - "ᵒ": 1503, - "ᵖ": 1504, - "ᵗ": 1505, - "ᵘ": 1506, - "ᵢ": 1507, - "ᵣ": 1508, - "ᵤ": 1509, - "ᵥ": 1510, - "ᶜ": 1511, - "ᶠ": 1512, - "‐": 1513, - "‑": 1514, - "‒": 1515, - "–": 1516, - "—": 1517, - "―": 1518, - "‖": 1519, - "‘": 1520, - "’": 1521, - "‚": 1522, - "“": 1523, - "”": 1524, - "„": 1525, - "†": 1526, - "‡": 1527, - "•": 1528, - "…": 1529, - "‰": 1530, - "′": 1531, - "″": 1532, - "›": 1533, - "‿": 1534, - "⁄": 1535, - "⁰": 1536, - "ⁱ": 1537, - "⁴": 1538, - "⁵": 1539, - "⁶": 1540, - "⁷": 1541, - "⁸": 1542, - "⁹": 1543, - "⁺": 1544, - "⁻": 1545, - "ⁿ": 1546, - "₀": 1547, - "₁": 1548, - "₂": 1549, - "₃": 1550, - "₄": 1551, - "₅": 1552, - "₆": 1553, - "₇": 1554, - "₈": 1555, - "₉": 1556, - "₊": 1557, - "₍": 1558, - "₎": 1559, - "ₐ": 1560, - "ₑ": 1561, - "ₒ": 1562, - "ₓ": 1563, - "ₕ": 1564, - "ₖ": 1565, - "ₗ": 1566, - "ₘ": 1567, - "ₙ": 1568, - "ₚ": 1569, - "ₛ": 1570, - "ₜ": 1571, - "₤": 1572, - "₩": 1573, - "€": 1574, - "₱": 1575, - "₹": 1576, - "ℓ": 1577, - "№": 1578, - "ℝ": 1579, - "™": 1580, - "⅓": 1581, - "⅔": 1582, - "←": 1583, - "↑": 1584, - "→": 1585, - "↓": 1586, - "↔": 1587, - "↦": 1588, - "⇄": 1589, - "⇌": 1590, - "⇒": 1591, - "∂": 1592, - "∅": 1593, - "∆": 1594, - "∇": 1595, - "∈": 1596, - "−": 1597, - "∗": 1598, - "∘": 1599, - "√": 1600, - "∞": 1601, - "∧": 1602, - "∨": 1603, - "∩": 1604, - "∪": 1605, - "≈": 1606, - "≡": 1607, - "≤": 1608, - "≥": 1609, - "⊂": 1610, - "⊆": 1611, - "⊕": 1612, - "⊗": 1613, - "⋅": 1614, - "─": 1615, - "│": 1616, - "■": 1617, - "▪": 1618, - "●": 1619, - "★": 1620, - "☆": 1621, - "☉": 1622, - "♠": 1623, - "♣": 1624, - "♥": 1625, - "♦": 1626, - "♭": 1627, - "♯": 1628, - "⟨": 1629, - "⟩": 1630, - "ⱼ": 1631, - "⺩": 1632, - "⺼": 1633, - "⽥": 1634, - "、": 1635, - "。": 1636, - "〈": 1637, - "〉": 1638, - "《": 1639, - "》": 1640, - "「": 1641, - "」": 1642, - "『": 1643, - "』": 1644, - "〜": 1645, - "あ": 1646, - "い": 1647, - "う": 1648, - "え": 1649, - "お": 1650, - "か": 1651, - "き": 1652, - "く": 1653, - "け": 1654, - "こ": 1655, - "さ": 1656, - "し": 1657, - "す": 1658, - "せ": 1659, - "そ": 1660, - "た": 1661, - "ち": 1662, - "っ": 1663, - "つ": 1664, - "て": 1665, - "と": 1666, - "な": 1667, - "に": 1668, - "ぬ": 1669, - "ね": 1670, - "の": 1671, - "は": 1672, - "ひ": 1673, - "ふ": 1674, - "へ": 1675, - "ほ": 1676, - "ま": 1677, - "み": 1678, - "む": 1679, - "め": 1680, - "も": 1681, - "や": 1682, - "ゆ": 1683, - "よ": 1684, - "ら": 1685, - "り": 1686, - "る": 1687, - "れ": 1688, - "ろ": 1689, - "を": 1690, - "ん": 1691, - "ァ": 1692, - "ア": 1693, - "ィ": 1694, - "イ": 1695, - "ウ": 1696, - "ェ": 1697, - "エ": 1698, - "オ": 1699, - "カ": 1700, - "キ": 1701, - "ク": 1702, - "ケ": 1703, - "コ": 1704, - "サ": 1705, - "シ": 1706, - "ス": 1707, - "セ": 1708, - "タ": 1709, - "チ": 1710, - "ッ": 1711, - "ツ": 1712, - "テ": 1713, - "ト": 1714, - "ナ": 1715, - "ニ": 1716, - "ノ": 1717, - "ハ": 1718, - "ヒ": 1719, - "フ": 1720, - "ヘ": 1721, - "ホ": 1722, - "マ": 1723, - "ミ": 1724, - "ム": 1725, - "メ": 1726, - "モ": 1727, - "ャ": 1728, - "ュ": 1729, - "ョ": 1730, - "ラ": 1731, - "リ": 1732, - "ル": 1733, - "レ": 1734, - "ロ": 1735, - "ワ": 1736, - "ン": 1737, - "・": 1738, - "ー": 1739, - "一": 1740, - "三": 1741, - "上": 1742, - "下": 1743, - "不": 1744, - "世": 1745, - "中": 1746, - "主": 1747, - "久": 1748, - "之": 1749, - "也": 1750, - "事": 1751, - "二": 1752, - "五": 1753, - "井": 1754, - "京": 1755, - "人": 1756, - "亻": 1757, - "仁": 1758, - "介": 1759, - "代": 1760, - "仮": 1761, - "伊": 1762, - "会": 1763, - "佐": 1764, - "侍": 1765, - "保": 1766, - "信": 1767, - "健": 1768, - "元": 1769, - "光": 1770, - "八": 1771, - "公": 1772, - "内": 1773, - "出": 1774, - "分": 1775, - "前": 1776, - "劉": 1777, - "力": 1778, - "加": 1779, - "勝": 1780, - "北": 1781, - "区": 1782, - "十": 1783, - "千": 1784, - "南": 1785, - "博": 1786, - "原": 1787, - "口": 1788, - "古": 1789, - "史": 1790, - "司": 1791, - "合": 1792, - "吉": 1793, - "同": 1794, - "名": 1795, - "和": 1796, - "囗": 1797, - "四": 1798, - "国": 1799, - "國": 1800, - "土": 1801, - "地": 1802, - "坂": 1803, - "城": 1804, - "堂": 1805, - "場": 1806, - "士": 1807, - "夏": 1808, - "外": 1809, - "大": 1810, - "天": 1811, - "太": 1812, - "夫": 1813, - "奈": 1814, - "女": 1815, - "子": 1816, - "学": 1817, - "宀": 1818, - "宇": 1819, - "安": 1820, - "宗": 1821, - "定": 1822, - "宣": 1823, - "宮": 1824, - "家": 1825, - "宿": 1826, - "寺": 1827, - "將": 1828, - "小": 1829, - "尚": 1830, - "山": 1831, - "岡": 1832, - "島": 1833, - "崎": 1834, - "川": 1835, - "州": 1836, - "巿": 1837, - "帝": 1838, - "平": 1839, - "年": 1840, - "幸": 1841, - "广": 1842, - "弘": 1843, - "張": 1844, - "彳": 1845, - "後": 1846, - "御": 1847, - "德": 1848, - "心": 1849, - "忄": 1850, - "志": 1851, - "忠": 1852, - "愛": 1853, - "成": 1854, - "我": 1855, - "戦": 1856, - "戸": 1857, - "手": 1858, - "扌": 1859, - "政": 1860, - "文": 1861, - "新": 1862, - "方": 1863, - "日": 1864, - "明": 1865, - "星": 1866, - "春": 1867, - "昭": 1868, - "智": 1869, - "曲": 1870, - "書": 1871, - "月": 1872, - "有": 1873, - "朝": 1874, - "木": 1875, - "本": 1876, - "李": 1877, - "村": 1878, - "東": 1879, - "松": 1880, - "林": 1881, - "森": 1882, - "楊": 1883, - "樹": 1884, - "橋": 1885, - "歌": 1886, - "止": 1887, - "正": 1888, - "武": 1889, - "比": 1890, - "氏": 1891, - "民": 1892, - "水": 1893, - "氵": 1894, - "氷": 1895, - "永": 1896, - "江": 1897, - "沢": 1898, - "河": 1899, - "治": 1900, - "法": 1901, - "海": 1902, - "清": 1903, - "漢": 1904, - "瀬": 1905, - "火": 1906, - "版": 1907, - "犬": 1908, - "王": 1909, - "生": 1910, - "田": 1911, - "男": 1912, - "疒": 1913, - "発": 1914, - "白": 1915, - "的": 1916, - "皇": 1917, - "目": 1918, - "相": 1919, - "省": 1920, - "真": 1921, - "石": 1922, - "示": 1923, - "社": 1924, - "神": 1925, - "福": 1926, - "禾": 1927, - "秀": 1928, - "秋": 1929, - "空": 1930, - "立": 1931, - "章": 1932, - "竹": 1933, - "糹": 1934, - "美": 1935, - "義": 1936, - "耳": 1937, - "良": 1938, - "艹": 1939, - "花": 1940, - "英": 1941, - "華": 1942, - "葉": 1943, - "藤": 1944, - "行": 1945, - "街": 1946, - "西": 1947, - "見": 1948, - "訁": 1949, - "語": 1950, - "谷": 1951, - "貝": 1952, - "貴": 1953, - "車": 1954, - "軍": 1955, - "辶": 1956, - "道": 1957, - "郎": 1958, - "郡": 1959, - "部": 1960, - "都": 1961, - "里": 1962, - "野": 1963, - "金": 1964, - "鈴": 1965, - "镇": 1966, - "長": 1967, - "門": 1968, - "間": 1969, - "阝": 1970, - "阿": 1971, - "陳": 1972, - "陽": 1973, - "雄": 1974, - "青": 1975, - "面": 1976, - "風": 1977, - "食": 1978, - "香": 1979, - "馬": 1980, - "高": 1981, - "龍": 1982, - "龸": 1983, - "fi": 1984, - "fl": 1985, - "!": 1986, - "(": 1987, - ")": 1988, - ",": 1989, - "-": 1990, - ".": 1991, - "/": 1992, - ":": 1993, - "?": 1994, - "~": 1995, - "the": 1996, - "of": 1997, - "and": 1998, - "in": 1999, - "to": 2000, - "was": 2001, - "he": 2002, - "is": 2003, - "as": 2004, - "for": 2005, - "on": 2006, - "with": 2007, - "that": 2008, - "it": 2009, - "his": 2010, - "by": 2011, - "at": 2012, - "from": 2013, - "her": 2014, - "##s": 2015, - "she": 2016, - "you": 2017, - "had": 2018, - "an": 2019, - "were": 2020, - "but": 2021, - "be": 2022, - "this": 2023, - "are": 2024, - "not": 2025, - "my": 2026, - "they": 2027, - "one": 2028, - "which": 2029, - "or": 2030, - "have": 2031, - "him": 2032, - "me": 2033, - "first": 2034, - "all": 2035, - "also": 2036, - "their": 2037, - "has": 2038, - "up": 2039, - "who": 2040, - "out": 2041, - "been": 2042, - "when": 2043, - "after": 2044, - "there": 2045, - "into": 2046, - "new": 2047, - "two": 2048, - "its": 2049, - "##a": 2050, - "time": 2051, - "would": 2052, - "no": 2053, - "what": 2054, - "about": 2055, - "said": 2056, - "we": 2057, - "over": 2058, - "then": 2059, - "other": 2060, - "so": 2061, - "more": 2062, - "##e": 2063, - "can": 2064, - "if": 2065, - "like": 2066, - "back": 2067, - "them": 2068, - "only": 2069, - "some": 2070, - "could": 2071, - "##i": 2072, - "where": 2073, - "just": 2074, - "##ing": 2075, - "during": 2076, - "before": 2077, - "##n": 2078, - "do": 2079, - "##o": 2080, - "made": 2081, - "school": 2082, - "through": 2083, - "than": 2084, - "now": 2085, - "years": 2086, - "most": 2087, - "world": 2088, - "may": 2089, - "between": 2090, - "down": 2091, - "well": 2092, - "three": 2093, - "##d": 2094, - "year": 2095, - "while": 2096, - "will": 2097, - "##ed": 2098, - "##r": 2099, - "##y": 2100, - "later": 2101, - "##t": 2102, - "city": 2103, - "under": 2104, - "around": 2105, - "did": 2106, - "such": 2107, - "being": 2108, - "used": 2109, - "state": 2110, - "people": 2111, - "part": 2112, - "know": 2113, - "against": 2114, - "your": 2115, - "many": 2116, - "second": 2117, - "university": 2118, - "both": 2119, - "national": 2120, - "##er": 2121, - "these": 2122, - "don": 2123, - "known": 2124, - "off": 2125, - "way": 2126, - "until": 2127, - "re": 2128, - "how": 2129, - "even": 2130, - "get": 2131, - "head": 2132, - "...": 2133, - "didn": 2134, - "##ly": 2135, - "team": 2136, - "american": 2137, - "because": 2138, - "de": 2139, - "##l": 2140, - "born": 2141, - "united": 2142, - "film": 2143, - "since": 2144, - "still": 2145, - "long": 2146, - "work": 2147, - "south": 2148, - "us": 2149, - "became": 2150, - "any": 2151, - "high": 2152, - "again": 2153, - "day": 2154, - "family": 2155, - "see": 2156, - "right": 2157, - "man": 2158, - "eyes": 2159, - "house": 2160, - "season": 2161, - "war": 2162, - "states": 2163, - "including": 2164, - "took": 2165, - "life": 2166, - "north": 2167, - "same": 2168, - "each": 2169, - "called": 2170, - "name": 2171, - "much": 2172, - "place": 2173, - "however": 2174, - "go": 2175, - "four": 2176, - "group": 2177, - "another": 2178, - "found": 2179, - "won": 2180, - "area": 2181, - "here": 2182, - "going": 2183, - "10": 2184, - "away": 2185, - "series": 2186, - "left": 2187, - "home": 2188, - "music": 2189, - "best": 2190, - "make": 2191, - "hand": 2192, - "number": 2193, - "company": 2194, - "several": 2195, - "never": 2196, - "last": 2197, - "john": 2198, - "000": 2199, - "very": 2200, - "album": 2201, - "take": 2202, - "end": 2203, - "good": 2204, - "too": 2205, - "following": 2206, - "released": 2207, - "game": 2208, - "played": 2209, - "little": 2210, - "began": 2211, - "district": 2212, - "##m": 2213, - "old": 2214, - "want": 2215, - "those": 2216, - "side": 2217, - "held": 2218, - "own": 2219, - "early": 2220, - "county": 2221, - "ll": 2222, - "league": 2223, - "use": 2224, - "west": 2225, - "##u": 2226, - "face": 2227, - "think": 2228, - "##es": 2229, - "2010": 2230, - "government": 2231, - "##h": 2232, - "march": 2233, - "came": 2234, - "small": 2235, - "general": 2236, - "town": 2237, - "june": 2238, - "##on": 2239, - "line": 2240, - "based": 2241, - "something": 2242, - "##k": 2243, - "september": 2244, - "thought": 2245, - "looked": 2246, - "along": 2247, - "international": 2248, - "2011": 2249, - "air": 2250, - "july": 2251, - "club": 2252, - "went": 2253, - "january": 2254, - "october": 2255, - "our": 2256, - "august": 2257, - "april": 2258, - "york": 2259, - "12": 2260, - "few": 2261, - "2012": 2262, - "2008": 2263, - "east": 2264, - "show": 2265, - "member": 2266, - "college": 2267, - "2009": 2268, - "father": 2269, - "public": 2270, - "##us": 2271, - "come": 2272, - "men": 2273, - "five": 2274, - "set": 2275, - "station": 2276, - "church": 2277, - "##c": 2278, - "next": 2279, - "former": 2280, - "november": 2281, - "room": 2282, - "party": 2283, - "located": 2284, - "december": 2285, - "2013": 2286, - "age": 2287, - "got": 2288, - "2007": 2289, - "##g": 2290, - "system": 2291, - "let": 2292, - "love": 2293, - "2006": 2294, - "though": 2295, - "every": 2296, - "2014": 2297, - "look": 2298, - "song": 2299, - "water": 2300, - "century": 2301, - "without": 2302, - "body": 2303, - "black": 2304, - "night": 2305, - "within": 2306, - "great": 2307, - "women": 2308, - "single": 2309, - "ve": 2310, - "building": 2311, - "large": 2312, - "population": 2313, - "river": 2314, - "named": 2315, - "band": 2316, - "white": 2317, - "started": 2318, - "##an": 2319, - "once": 2320, - "15": 2321, - "20": 2322, - "should": 2323, - "18": 2324, - "2015": 2325, - "service": 2326, - "top": 2327, - "built": 2328, - "british": 2329, - "open": 2330, - "death": 2331, - "king": 2332, - "moved": 2333, - "local": 2334, - "times": 2335, - "children": 2336, - "february": 2337, - "book": 2338, - "why": 2339, - "11": 2340, - "door": 2341, - "need": 2342, - "president": 2343, - "order": 2344, - "final": 2345, - "road": 2346, - "wasn": 2347, - "although": 2348, - "due": 2349, - "major": 2350, - "died": 2351, - "village": 2352, - "third": 2353, - "knew": 2354, - "2016": 2355, - "asked": 2356, - "turned": 2357, - "st": 2358, - "wanted": 2359, - "say": 2360, - "##p": 2361, - "together": 2362, - "received": 2363, - "main": 2364, - "son": 2365, - "served": 2366, - "different": 2367, - "##en": 2368, - "behind": 2369, - "himself": 2370, - "felt": 2371, - "members": 2372, - "power": 2373, - "football": 2374, - "law": 2375, - "voice": 2376, - "play": 2377, - "##in": 2378, - "near": 2379, - "park": 2380, - "history": 2381, - "30": 2382, - "having": 2383, - "2005": 2384, - "16": 2385, - "##man": 2386, - "saw": 2387, - "mother": 2388, - "##al": 2389, - "army": 2390, - "point": 2391, - "front": 2392, - "help": 2393, - "english": 2394, - "street": 2395, - "art": 2396, - "late": 2397, - "hands": 2398, - "games": 2399, - "award": 2400, - "##ia": 2401, - "young": 2402, - "14": 2403, - "put": 2404, - "published": 2405, - "country": 2406, - "division": 2407, - "across": 2408, - "told": 2409, - "13": 2410, - "often": 2411, - "ever": 2412, - "french": 2413, - "london": 2414, - "center": 2415, - "six": 2416, - "red": 2417, - "2017": 2418, - "led": 2419, - "days": 2420, - "include": 2421, - "light": 2422, - "25": 2423, - "find": 2424, - "tell": 2425, - "among": 2426, - "species": 2427, - "really": 2428, - "according": 2429, - "central": 2430, - "half": 2431, - "2004": 2432, - "form": 2433, - "original": 2434, - "gave": 2435, - "office": 2436, - "making": 2437, - "enough": 2438, - "lost": 2439, - "full": 2440, - "opened": 2441, - "must": 2442, - "included": 2443, - "live": 2444, - "given": 2445, - "german": 2446, - "player": 2447, - "run": 2448, - "business": 2449, - "woman": 2450, - "community": 2451, - "cup": 2452, - "might": 2453, - "million": 2454, - "land": 2455, - "2000": 2456, - "court": 2457, - "development": 2458, - "17": 2459, - "short": 2460, - "round": 2461, - "ii": 2462, - "km": 2463, - "seen": 2464, - "class": 2465, - "story": 2466, - "always": 2467, - "become": 2468, - "sure": 2469, - "research": 2470, - "almost": 2471, - "director": 2472, - "council": 2473, - "la": 2474, - "##2": 2475, - "career": 2476, - "things": 2477, - "using": 2478, - "island": 2479, - "##z": 2480, - "couldn": 2481, - "car": 2482, - "##is": 2483, - "24": 2484, - "close": 2485, - "force": 2486, - "##1": 2487, - "better": 2488, - "free": 2489, - "support": 2490, - "control": 2491, - "field": 2492, - "students": 2493, - "2003": 2494, - "education": 2495, - "married": 2496, - "##b": 2497, - "nothing": 2498, - "worked": 2499, - "others": 2500, - "record": 2501, - "big": 2502, - "inside": 2503, - "level": 2504, - "anything": 2505, - "continued": 2506, - "give": 2507, - "james": 2508, - "##3": 2509, - "military": 2510, - "established": 2511, - "non": 2512, - "returned": 2513, - "feel": 2514, - "does": 2515, - "title": 2516, - "written": 2517, - "thing": 2518, - "feet": 2519, - "william": 2520, - "far": 2521, - "co": 2522, - "association": 2523, - "hard": 2524, - "already": 2525, - "2002": 2526, - "##ra": 2527, - "championship": 2528, - "human": 2529, - "western": 2530, - "100": 2531, - "##na": 2532, - "department": 2533, - "hall": 2534, - "role": 2535, - "various": 2536, - "production": 2537, - "21": 2538, - "19": 2539, - "heart": 2540, - "2001": 2541, - "living": 2542, - "fire": 2543, - "version": 2544, - "##ers": 2545, - "##f": 2546, - "television": 2547, - "royal": 2548, - "##4": 2549, - "produced": 2550, - "working": 2551, - "act": 2552, - "case": 2553, - "society": 2554, - "region": 2555, - "present": 2556, - "radio": 2557, - "period": 2558, - "looking": 2559, - "least": 2560, - "total": 2561, - "keep": 2562, - "england": 2563, - "wife": 2564, - "program": 2565, - "per": 2566, - "brother": 2567, - "mind": 2568, - "special": 2569, - "22": 2570, - "##le": 2571, - "am": 2572, - "works": 2573, - "soon": 2574, - "##6": 2575, - "political": 2576, - "george": 2577, - "services": 2578, - "taken": 2579, - "created": 2580, - "##7": 2581, - "further": 2582, - "able": 2583, - "reached": 2584, - "david": 2585, - "union": 2586, - "joined": 2587, - "upon": 2588, - "done": 2589, - "important": 2590, - "social": 2591, - "information": 2592, - "either": 2593, - "##ic": 2594, - "##x": 2595, - "appeared": 2596, - "position": 2597, - "ground": 2598, - "lead": 2599, - "rock": 2600, - "dark": 2601, - "election": 2602, - "23": 2603, - "board": 2604, - "france": 2605, - "hair": 2606, - "course": 2607, - "arms": 2608, - "site": 2609, - "police": 2610, - "girl": 2611, - "instead": 2612, - "real": 2613, - "sound": 2614, - "##v": 2615, - "words": 2616, - "moment": 2617, - "##te": 2618, - "someone": 2619, - "##8": 2620, - "summer": 2621, - "project": 2622, - "announced": 2623, - "san": 2624, - "less": 2625, - "wrote": 2626, - "past": 2627, - "followed": 2628, - "##5": 2629, - "blue": 2630, - "founded": 2631, - "al": 2632, - "finally": 2633, - "india": 2634, - "taking": 2635, - "records": 2636, - "america": 2637, - "##ne": 2638, - "1999": 2639, - "design": 2640, - "considered": 2641, - "northern": 2642, - "god": 2643, - "stop": 2644, - "battle": 2645, - "toward": 2646, - "european": 2647, - "outside": 2648, - "described": 2649, - "track": 2650, - "today": 2651, - "playing": 2652, - "language": 2653, - "28": 2654, - "call": 2655, - "26": 2656, - "heard": 2657, - "professional": 2658, - "low": 2659, - "australia": 2660, - "miles": 2661, - "california": 2662, - "win": 2663, - "yet": 2664, - "green": 2665, - "##ie": 2666, - "trying": 2667, - "blood": 2668, - "##ton": 2669, - "southern": 2670, - "science": 2671, - "maybe": 2672, - "everything": 2673, - "match": 2674, - "square": 2675, - "27": 2676, - "mouth": 2677, - "video": 2678, - "race": 2679, - "recorded": 2680, - "leave": 2681, - "above": 2682, - "##9": 2683, - "daughter": 2684, - "points": 2685, - "space": 2686, - "1998": 2687, - "museum": 2688, - "change": 2689, - "middle": 2690, - "common": 2691, - "##0": 2692, - "move": 2693, - "tv": 2694, - "post": 2695, - "##ta": 2696, - "lake": 2697, - "seven": 2698, - "tried": 2699, - "elected": 2700, - "closed": 2701, - "ten": 2702, - "paul": 2703, - "minister": 2704, - "##th": 2705, - "months": 2706, - "start": 2707, - "chief": 2708, - "return": 2709, - "canada": 2710, - "person": 2711, - "sea": 2712, - "release": 2713, - "similar": 2714, - "modern": 2715, - "brought": 2716, - "rest": 2717, - "hit": 2718, - "formed": 2719, - "mr": 2720, - "##la": 2721, - "1997": 2722, - "floor": 2723, - "event": 2724, - "doing": 2725, - "thomas": 2726, - "1996": 2727, - "robert": 2728, - "care": 2729, - "killed": 2730, - "training": 2731, - "star": 2732, - "week": 2733, - "needed": 2734, - "turn": 2735, - "finished": 2736, - "railway": 2737, - "rather": 2738, - "news": 2739, - "health": 2740, - "sent": 2741, - "example": 2742, - "ran": 2743, - "term": 2744, - "michael": 2745, - "coming": 2746, - "currently": 2747, - "yes": 2748, - "forces": 2749, - "despite": 2750, - "gold": 2751, - "areas": 2752, - "50": 2753, - "stage": 2754, - "fact": 2755, - "29": 2756, - "dead": 2757, - "says": 2758, - "popular": 2759, - "2018": 2760, - "originally": 2761, - "germany": 2762, - "probably": 2763, - "developed": 2764, - "result": 2765, - "pulled": 2766, - "friend": 2767, - "stood": 2768, - "money": 2769, - "running": 2770, - "mi": 2771, - "signed": 2772, - "word": 2773, - "songs": 2774, - "child": 2775, - "eventually": 2776, - "met": 2777, - "tour": 2778, - "average": 2779, - "teams": 2780, - "minutes": 2781, - "festival": 2782, - "current": 2783, - "deep": 2784, - "kind": 2785, - "1995": 2786, - "decided": 2787, - "usually": 2788, - "eastern": 2789, - "seemed": 2790, - "##ness": 2791, - "episode": 2792, - "bed": 2793, - "added": 2794, - "table": 2795, - "indian": 2796, - "private": 2797, - "charles": 2798, - "route": 2799, - "available": 2800, - "idea": 2801, - "throughout": 2802, - "centre": 2803, - "addition": 2804, - "appointed": 2805, - "style": 2806, - "1994": 2807, - "books": 2808, - "eight": 2809, - "construction": 2810, - "press": 2811, - "mean": 2812, - "wall": 2813, - "friends": 2814, - "remained": 2815, - "schools": 2816, - "study": 2817, - "##ch": 2818, - "##um": 2819, - "institute": 2820, - "oh": 2821, - "chinese": 2822, - "sometimes": 2823, - "events": 2824, - "possible": 2825, - "1992": 2826, - "australian": 2827, - "type": 2828, - "brown": 2829, - "forward": 2830, - "talk": 2831, - "process": 2832, - "food": 2833, - "debut": 2834, - "seat": 2835, - "performance": 2836, - "committee": 2837, - "features": 2838, - "character": 2839, - "arts": 2840, - "herself": 2841, - "else": 2842, - "lot": 2843, - "strong": 2844, - "russian": 2845, - "range": 2846, - "hours": 2847, - "peter": 2848, - "arm": 2849, - "##da": 2850, - "morning": 2851, - "dr": 2852, - "sold": 2853, - "##ry": 2854, - "quickly": 2855, - "directed": 2856, - "1993": 2857, - "guitar": 2858, - "china": 2859, - "##w": 2860, - "31": 2861, - "list": 2862, - "##ma": 2863, - "performed": 2864, - "media": 2865, - "uk": 2866, - "players": 2867, - "smile": 2868, - "##rs": 2869, - "myself": 2870, - "40": 2871, - "placed": 2872, - "coach": 2873, - "province": 2874, - "towards": 2875, - "wouldn": 2876, - "leading": 2877, - "whole": 2878, - "boy": 2879, - "official": 2880, - "designed": 2881, - "grand": 2882, - "census": 2883, - "##el": 2884, - "europe": 2885, - "attack": 2886, - "japanese": 2887, - "henry": 2888, - "1991": 2889, - "##re": 2890, - "##os": 2891, - "cross": 2892, - "getting": 2893, - "alone": 2894, - "action": 2895, - "lower": 2896, - "network": 2897, - "wide": 2898, - "washington": 2899, - "japan": 2900, - "1990": 2901, - "hospital": 2902, - "believe": 2903, - "changed": 2904, - "sister": 2905, - "##ar": 2906, - "hold": 2907, - "gone": 2908, - "sir": 2909, - "hadn": 2910, - "ship": 2911, - "##ka": 2912, - "studies": 2913, - "academy": 2914, - "shot": 2915, - "rights": 2916, - "below": 2917, - "base": 2918, - "bad": 2919, - "involved": 2920, - "kept": 2921, - "largest": 2922, - "##ist": 2923, - "bank": 2924, - "future": 2925, - "especially": 2926, - "beginning": 2927, - "mark": 2928, - "movement": 2929, - "section": 2930, - "female": 2931, - "magazine": 2932, - "plan": 2933, - "professor": 2934, - "lord": 2935, - "longer": 2936, - "##ian": 2937, - "sat": 2938, - "walked": 2939, - "hill": 2940, - "actually": 2941, - "civil": 2942, - "energy": 2943, - "model": 2944, - "families": 2945, - "size": 2946, - "thus": 2947, - "aircraft": 2948, - "completed": 2949, - "includes": 2950, - "data": 2951, - "captain": 2952, - "##or": 2953, - "fight": 2954, - "vocals": 2955, - "featured": 2956, - "richard": 2957, - "bridge": 2958, - "fourth": 2959, - "1989": 2960, - "officer": 2961, - "stone": 2962, - "hear": 2963, - "##ism": 2964, - "means": 2965, - "medical": 2966, - "groups": 2967, - "management": 2968, - "self": 2969, - "lips": 2970, - "competition": 2971, - "entire": 2972, - "lived": 2973, - "technology": 2974, - "leaving": 2975, - "federal": 2976, - "tournament": 2977, - "bit": 2978, - "passed": 2979, - "hot": 2980, - "independent": 2981, - "awards": 2982, - "kingdom": 2983, - "mary": 2984, - "spent": 2985, - "fine": 2986, - "doesn": 2987, - "reported": 2988, - "##ling": 2989, - "jack": 2990, - "fall": 2991, - "raised": 2992, - "itself": 2993, - "stay": 2994, - "true": 2995, - "studio": 2996, - "1988": 2997, - "sports": 2998, - "replaced": 2999, - "paris": 3000, - "systems": 3001, - "saint": 3002, - "leader": 3003, - "theatre": 3004, - "whose": 3005, - "market": 3006, - "capital": 3007, - "parents": 3008, - "spanish": 3009, - "canadian": 3010, - "earth": 3011, - "##ity": 3012, - "cut": 3013, - "degree": 3014, - "writing": 3015, - "bay": 3016, - "christian": 3017, - "awarded": 3018, - "natural": 3019, - "higher": 3020, - "bill": 3021, - "##as": 3022, - "coast": 3023, - "provided": 3024, - "previous": 3025, - "senior": 3026, - "ft": 3027, - "valley": 3028, - "organization": 3029, - "stopped": 3030, - "onto": 3031, - "countries": 3032, - "parts": 3033, - "conference": 3034, - "queen": 3035, - "security": 3036, - "interest": 3037, - "saying": 3038, - "allowed": 3039, - "master": 3040, - "earlier": 3041, - "phone": 3042, - "matter": 3043, - "smith": 3044, - "winning": 3045, - "try": 3046, - "happened": 3047, - "moving": 3048, - "campaign": 3049, - "los": 3050, - "##ley": 3051, - "breath": 3052, - "nearly": 3053, - "mid": 3054, - "1987": 3055, - "certain": 3056, - "girls": 3057, - "date": 3058, - "italian": 3059, - "african": 3060, - "standing": 3061, - "fell": 3062, - "artist": 3063, - "##ted": 3064, - "shows": 3065, - "deal": 3066, - "mine": 3067, - "industry": 3068, - "1986": 3069, - "##ng": 3070, - "everyone": 3071, - "republic": 3072, - "provide": 3073, - "collection": 3074, - "library": 3075, - "student": 3076, - "##ville": 3077, - "primary": 3078, - "owned": 3079, - "older": 3080, - "via": 3081, - "heavy": 3082, - "1st": 3083, - "makes": 3084, - "##able": 3085, - "attention": 3086, - "anyone": 3087, - "africa": 3088, - "##ri": 3089, - "stated": 3090, - "length": 3091, - "ended": 3092, - "fingers": 3093, - "command": 3094, - "staff": 3095, - "skin": 3096, - "foreign": 3097, - "opening": 3098, - "governor": 3099, - "okay": 3100, - "medal": 3101, - "kill": 3102, - "sun": 3103, - "cover": 3104, - "job": 3105, - "1985": 3106, - "introduced": 3107, - "chest": 3108, - "hell": 3109, - "feeling": 3110, - "##ies": 3111, - "success": 3112, - "meet": 3113, - "reason": 3114, - "standard": 3115, - "meeting": 3116, - "novel": 3117, - "1984": 3118, - "trade": 3119, - "source": 3120, - "buildings": 3121, - "##land": 3122, - "rose": 3123, - "guy": 3124, - "goal": 3125, - "##ur": 3126, - "chapter": 3127, - "native": 3128, - "husband": 3129, - "previously": 3130, - "unit": 3131, - "limited": 3132, - "entered": 3133, - "weeks": 3134, - "producer": 3135, - "operations": 3136, - "mountain": 3137, - "takes": 3138, - "covered": 3139, - "forced": 3140, - "related": 3141, - "roman": 3142, - "complete": 3143, - "successful": 3144, - "key": 3145, - "texas": 3146, - "cold": 3147, - "##ya": 3148, - "channel": 3149, - "1980": 3150, - "traditional": 3151, - "films": 3152, - "dance": 3153, - "clear": 3154, - "approximately": 3155, - "500": 3156, - "nine": 3157, - "van": 3158, - "prince": 3159, - "question": 3160, - "active": 3161, - "tracks": 3162, - "ireland": 3163, - "regional": 3164, - "silver": 3165, - "author": 3166, - "personal": 3167, - "sense": 3168, - "operation": 3169, - "##ine": 3170, - "economic": 3171, - "1983": 3172, - "holding": 3173, - "twenty": 3174, - "isbn": 3175, - "additional": 3176, - "speed": 3177, - "hour": 3178, - "edition": 3179, - "regular": 3180, - "historic": 3181, - "places": 3182, - "whom": 3183, - "shook": 3184, - "movie": 3185, - "km²": 3186, - "secretary": 3187, - "prior": 3188, - "report": 3189, - "chicago": 3190, - "read": 3191, - "foundation": 3192, - "view": 3193, - "engine": 3194, - "scored": 3195, - "1982": 3196, - "units": 3197, - "ask": 3198, - "airport": 3199, - "property": 3200, - "ready": 3201, - "immediately": 3202, - "lady": 3203, - "month": 3204, - "listed": 3205, - "contract": 3206, - "##de": 3207, - "manager": 3208, - "themselves": 3209, - "lines": 3210, - "##ki": 3211, - "navy": 3212, - "writer": 3213, - "meant": 3214, - "##ts": 3215, - "runs": 3216, - "##ro": 3217, - "practice": 3218, - "championships": 3219, - "singer": 3220, - "glass": 3221, - "commission": 3222, - "required": 3223, - "forest": 3224, - "starting": 3225, - "culture": 3226, - "generally": 3227, - "giving": 3228, - "access": 3229, - "attended": 3230, - "test": 3231, - "couple": 3232, - "stand": 3233, - "catholic": 3234, - "martin": 3235, - "caught": 3236, - "executive": 3237, - "##less": 3238, - "eye": 3239, - "##ey": 3240, - "thinking": 3241, - "chair": 3242, - "quite": 3243, - "shoulder": 3244, - "1979": 3245, - "hope": 3246, - "decision": 3247, - "plays": 3248, - "defeated": 3249, - "municipality": 3250, - "whether": 3251, - "structure": 3252, - "offered": 3253, - "slowly": 3254, - "pain": 3255, - "ice": 3256, - "direction": 3257, - "##ion": 3258, - "paper": 3259, - "mission": 3260, - "1981": 3261, - "mostly": 3262, - "200": 3263, - "noted": 3264, - "individual": 3265, - "managed": 3266, - "nature": 3267, - "lives": 3268, - "plant": 3269, - "##ha": 3270, - "helped": 3271, - "except": 3272, - "studied": 3273, - "computer": 3274, - "figure": 3275, - "relationship": 3276, - "issue": 3277, - "significant": 3278, - "loss": 3279, - "die": 3280, - "smiled": 3281, - "gun": 3282, - "ago": 3283, - "highest": 3284, - "1972": 3285, - "##am": 3286, - "male": 3287, - "bring": 3288, - "goals": 3289, - "mexico": 3290, - "problem": 3291, - "distance": 3292, - "commercial": 3293, - "completely": 3294, - "location": 3295, - "annual": 3296, - "famous": 3297, - "drive": 3298, - "1976": 3299, - "neck": 3300, - "1978": 3301, - "surface": 3302, - "caused": 3303, - "italy": 3304, - "understand": 3305, - "greek": 3306, - "highway": 3307, - "wrong": 3308, - "hotel": 3309, - "comes": 3310, - "appearance": 3311, - "joseph": 3312, - "double": 3313, - "issues": 3314, - "musical": 3315, - "companies": 3316, - "castle": 3317, - "income": 3318, - "review": 3319, - "assembly": 3320, - "bass": 3321, - "initially": 3322, - "parliament": 3323, - "artists": 3324, - "experience": 3325, - "1974": 3326, - "particular": 3327, - "walk": 3328, - "foot": 3329, - "engineering": 3330, - "talking": 3331, - "window": 3332, - "dropped": 3333, - "##ter": 3334, - "miss": 3335, - "baby": 3336, - "boys": 3337, - "break": 3338, - "1975": 3339, - "stars": 3340, - "edge": 3341, - "remember": 3342, - "policy": 3343, - "carried": 3344, - "train": 3345, - "stadium": 3346, - "bar": 3347, - "sex": 3348, - "angeles": 3349, - "evidence": 3350, - "##ge": 3351, - "becoming": 3352, - "assistant": 3353, - "soviet": 3354, - "1977": 3355, - "upper": 3356, - "step": 3357, - "wing": 3358, - "1970": 3359, - "youth": 3360, - "financial": 3361, - "reach": 3362, - "##ll": 3363, - "actor": 3364, - "numerous": 3365, - "##se": 3366, - "##st": 3367, - "nodded": 3368, - "arrived": 3369, - "##ation": 3370, - "minute": 3371, - "##nt": 3372, - "believed": 3373, - "sorry": 3374, - "complex": 3375, - "beautiful": 3376, - "victory": 3377, - "associated": 3378, - "temple": 3379, - "1968": 3380, - "1973": 3381, - "chance": 3382, - "perhaps": 3383, - "metal": 3384, - "##son": 3385, - "1945": 3386, - "bishop": 3387, - "##et": 3388, - "lee": 3389, - "launched": 3390, - "particularly": 3391, - "tree": 3392, - "le": 3393, - "retired": 3394, - "subject": 3395, - "prize": 3396, - "contains": 3397, - "yeah": 3398, - "theory": 3399, - "empire": 3400, - "##ce": 3401, - "suddenly": 3402, - "waiting": 3403, - "trust": 3404, - "recording": 3405, - "##to": 3406, - "happy": 3407, - "terms": 3408, - "camp": 3409, - "champion": 3410, - "1971": 3411, - "religious": 3412, - "pass": 3413, - "zealand": 3414, - "names": 3415, - "2nd": 3416, - "port": 3417, - "ancient": 3418, - "tom": 3419, - "corner": 3420, - "represented": 3421, - "watch": 3422, - "legal": 3423, - "anti": 3424, - "justice": 3425, - "cause": 3426, - "watched": 3427, - "brothers": 3428, - "45": 3429, - "material": 3430, - "changes": 3431, - "simply": 3432, - "response": 3433, - "louis": 3434, - "fast": 3435, - "##ting": 3436, - "answer": 3437, - "60": 3438, - "historical": 3439, - "1969": 3440, - "stories": 3441, - "straight": 3442, - "create": 3443, - "feature": 3444, - "increased": 3445, - "rate": 3446, - "administration": 3447, - "virginia": 3448, - "el": 3449, - "activities": 3450, - "cultural": 3451, - "overall": 3452, - "winner": 3453, - "programs": 3454, - "basketball": 3455, - "legs": 3456, - "guard": 3457, - "beyond": 3458, - "cast": 3459, - "doctor": 3460, - "mm": 3461, - "flight": 3462, - "results": 3463, - "remains": 3464, - "cost": 3465, - "effect": 3466, - "winter": 3467, - "##ble": 3468, - "larger": 3469, - "islands": 3470, - "problems": 3471, - "chairman": 3472, - "grew": 3473, - "commander": 3474, - "isn": 3475, - "1967": 3476, - "pay": 3477, - "failed": 3478, - "selected": 3479, - "hurt": 3480, - "fort": 3481, - "box": 3482, - "regiment": 3483, - "majority": 3484, - "journal": 3485, - "35": 3486, - "edward": 3487, - "plans": 3488, - "##ke": 3489, - "##ni": 3490, - "shown": 3491, - "pretty": 3492, - "irish": 3493, - "characters": 3494, - "directly": 3495, - "scene": 3496, - "likely": 3497, - "operated": 3498, - "allow": 3499, - "spring": 3500, - "##j": 3501, - "junior": 3502, - "matches": 3503, - "looks": 3504, - "mike": 3505, - "houses": 3506, - "fellow": 3507, - "##tion": 3508, - "beach": 3509, - "marriage": 3510, - "##ham": 3511, - "##ive": 3512, - "rules": 3513, - "oil": 3514, - "65": 3515, - "florida": 3516, - "expected": 3517, - "nearby": 3518, - "congress": 3519, - "sam": 3520, - "peace": 3521, - "recent": 3522, - "iii": 3523, - "wait": 3524, - "subsequently": 3525, - "cell": 3526, - "##do": 3527, - "variety": 3528, - "serving": 3529, - "agreed": 3530, - "please": 3531, - "poor": 3532, - "joe": 3533, - "pacific": 3534, - "attempt": 3535, - "wood": 3536, - "democratic": 3537, - "piece": 3538, - "prime": 3539, - "##ca": 3540, - "rural": 3541, - "mile": 3542, - "touch": 3543, - "appears": 3544, - "township": 3545, - "1964": 3546, - "1966": 3547, - "soldiers": 3548, - "##men": 3549, - "##ized": 3550, - "1965": 3551, - "pennsylvania": 3552, - "closer": 3553, - "fighting": 3554, - "claimed": 3555, - "score": 3556, - "jones": 3557, - "physical": 3558, - "editor": 3559, - "##ous": 3560, - "filled": 3561, - "genus": 3562, - "specific": 3563, - "sitting": 3564, - "super": 3565, - "mom": 3566, - "##va": 3567, - "therefore": 3568, - "supported": 3569, - "status": 3570, - "fear": 3571, - "cases": 3572, - "store": 3573, - "meaning": 3574, - "wales": 3575, - "minor": 3576, - "spain": 3577, - "tower": 3578, - "focus": 3579, - "vice": 3580, - "frank": 3581, - "follow": 3582, - "parish": 3583, - "separate": 3584, - "golden": 3585, - "horse": 3586, - "fifth": 3587, - "remaining": 3588, - "branch": 3589, - "32": 3590, - "presented": 3591, - "stared": 3592, - "##id": 3593, - "uses": 3594, - "secret": 3595, - "forms": 3596, - "##co": 3597, - "baseball": 3598, - "exactly": 3599, - "##ck": 3600, - "choice": 3601, - "note": 3602, - "discovered": 3603, - "travel": 3604, - "composed": 3605, - "truth": 3606, - "russia": 3607, - "ball": 3608, - "color": 3609, - "kiss": 3610, - "dad": 3611, - "wind": 3612, - "continue": 3613, - "ring": 3614, - "referred": 3615, - "numbers": 3616, - "digital": 3617, - "greater": 3618, - "##ns": 3619, - "metres": 3620, - "slightly": 3621, - "direct": 3622, - "increase": 3623, - "1960": 3624, - "responsible": 3625, - "crew": 3626, - "rule": 3627, - "trees": 3628, - "troops": 3629, - "##no": 3630, - "broke": 3631, - "goes": 3632, - "individuals": 3633, - "hundred": 3634, - "weight": 3635, - "creek": 3636, - "sleep": 3637, - "memory": 3638, - "defense": 3639, - "provides": 3640, - "ordered": 3641, - "code": 3642, - "value": 3643, - "jewish": 3644, - "windows": 3645, - "1944": 3646, - "safe": 3647, - "judge": 3648, - "whatever": 3649, - "corps": 3650, - "realized": 3651, - "growing": 3652, - "pre": 3653, - "##ga": 3654, - "cities": 3655, - "alexander": 3656, - "gaze": 3657, - "lies": 3658, - "spread": 3659, - "scott": 3660, - "letter": 3661, - "showed": 3662, - "situation": 3663, - "mayor": 3664, - "transport": 3665, - "watching": 3666, - "workers": 3667, - "extended": 3668, - "##li": 3669, - "expression": 3670, - "normal": 3671, - "##ment": 3672, - "chart": 3673, - "multiple": 3674, - "border": 3675, - "##ba": 3676, - "host": 3677, - "##ner": 3678, - "daily": 3679, - "mrs": 3680, - "walls": 3681, - "piano": 3682, - "##ko": 3683, - "heat": 3684, - "cannot": 3685, - "##ate": 3686, - "earned": 3687, - "products": 3688, - "drama": 3689, - "era": 3690, - "authority": 3691, - "seasons": 3692, - "join": 3693, - "grade": 3694, - "##io": 3695, - "sign": 3696, - "difficult": 3697, - "machine": 3698, - "1963": 3699, - "territory": 3700, - "mainly": 3701, - "##wood": 3702, - "stations": 3703, - "squadron": 3704, - "1962": 3705, - "stepped": 3706, - "iron": 3707, - "19th": 3708, - "##led": 3709, - "serve": 3710, - "appear": 3711, - "sky": 3712, - "speak": 3713, - "broken": 3714, - "charge": 3715, - "knowledge": 3716, - "kilometres": 3717, - "removed": 3718, - "ships": 3719, - "article": 3720, - "campus": 3721, - "simple": 3722, - "##ty": 3723, - "pushed": 3724, - "britain": 3725, - "##ve": 3726, - "leaves": 3727, - "recently": 3728, - "cd": 3729, - "soft": 3730, - "boston": 3731, - "latter": 3732, - "easy": 3733, - "acquired": 3734, - "poland": 3735, - "##sa": 3736, - "quality": 3737, - "officers": 3738, - "presence": 3739, - "planned": 3740, - "nations": 3741, - "mass": 3742, - "broadcast": 3743, - "jean": 3744, - "share": 3745, - "image": 3746, - "influence": 3747, - "wild": 3748, - "offer": 3749, - "emperor": 3750, - "electric": 3751, - "reading": 3752, - "headed": 3753, - "ability": 3754, - "promoted": 3755, - "yellow": 3756, - "ministry": 3757, - "1942": 3758, - "throat": 3759, - "smaller": 3760, - "politician": 3761, - "##by": 3762, - "latin": 3763, - "spoke": 3764, - "cars": 3765, - "williams": 3766, - "males": 3767, - "lack": 3768, - "pop": 3769, - "80": 3770, - "##ier": 3771, - "acting": 3772, - "seeing": 3773, - "consists": 3774, - "##ti": 3775, - "estate": 3776, - "1961": 3777, - "pressure": 3778, - "johnson": 3779, - "newspaper": 3780, - "jr": 3781, - "chris": 3782, - "olympics": 3783, - "online": 3784, - "conditions": 3785, - "beat": 3786, - "elements": 3787, - "walking": 3788, - "vote": 3789, - "##field": 3790, - "needs": 3791, - "carolina": 3792, - "text": 3793, - "featuring": 3794, - "global": 3795, - "block": 3796, - "shirt": 3797, - "levels": 3798, - "francisco": 3799, - "purpose": 3800, - "females": 3801, - "et": 3802, - "dutch": 3803, - "duke": 3804, - "ahead": 3805, - "gas": 3806, - "twice": 3807, - "safety": 3808, - "serious": 3809, - "turning": 3810, - "highly": 3811, - "lieutenant": 3812, - "firm": 3813, - "maria": 3814, - "amount": 3815, - "mixed": 3816, - "daniel": 3817, - "proposed": 3818, - "perfect": 3819, - "agreement": 3820, - "affairs": 3821, - "3rd": 3822, - "seconds": 3823, - "contemporary": 3824, - "paid": 3825, - "1943": 3826, - "prison": 3827, - "save": 3828, - "kitchen": 3829, - "label": 3830, - "administrative": 3831, - "intended": 3832, - "constructed": 3833, - "academic": 3834, - "nice": 3835, - "teacher": 3836, - "races": 3837, - "1956": 3838, - "formerly": 3839, - "corporation": 3840, - "ben": 3841, - "nation": 3842, - "issued": 3843, - "shut": 3844, - "1958": 3845, - "drums": 3846, - "housing": 3847, - "victoria": 3848, - "seems": 3849, - "opera": 3850, - "1959": 3851, - "graduated": 3852, - "function": 3853, - "von": 3854, - "mentioned": 3855, - "picked": 3856, - "build": 3857, - "recognized": 3858, - "shortly": 3859, - "protection": 3860, - "picture": 3861, - "notable": 3862, - "exchange": 3863, - "elections": 3864, - "1980s": 3865, - "loved": 3866, - "percent": 3867, - "racing": 3868, - "fish": 3869, - "elizabeth": 3870, - "garden": 3871, - "volume": 3872, - "hockey": 3873, - "1941": 3874, - "beside": 3875, - "settled": 3876, - "##ford": 3877, - "1940": 3878, - "competed": 3879, - "replied": 3880, - "drew": 3881, - "1948": 3882, - "actress": 3883, - "marine": 3884, - "scotland": 3885, - "steel": 3886, - "glanced": 3887, - "farm": 3888, - "steve": 3889, - "1957": 3890, - "risk": 3891, - "tonight": 3892, - "positive": 3893, - "magic": 3894, - "singles": 3895, - "effects": 3896, - "gray": 3897, - "screen": 3898, - "dog": 3899, - "##ja": 3900, - "residents": 3901, - "bus": 3902, - "sides": 3903, - "none": 3904, - "secondary": 3905, - "literature": 3906, - "polish": 3907, - "destroyed": 3908, - "flying": 3909, - "founder": 3910, - "households": 3911, - "1939": 3912, - "lay": 3913, - "reserve": 3914, - "usa": 3915, - "gallery": 3916, - "##ler": 3917, - "1946": 3918, - "industrial": 3919, - "younger": 3920, - "approach": 3921, - "appearances": 3922, - "urban": 3923, - "ones": 3924, - "1950": 3925, - "finish": 3926, - "avenue": 3927, - "powerful": 3928, - "fully": 3929, - "growth": 3930, - "page": 3931, - "honor": 3932, - "jersey": 3933, - "projects": 3934, - "advanced": 3935, - "revealed": 3936, - "basic": 3937, - "90": 3938, - "infantry": 3939, - "pair": 3940, - "equipment": 3941, - "visit": 3942, - "33": 3943, - "evening": 3944, - "search": 3945, - "grant": 3946, - "effort": 3947, - "solo": 3948, - "treatment": 3949, - "buried": 3950, - "republican": 3951, - "primarily": 3952, - "bottom": 3953, - "owner": 3954, - "1970s": 3955, - "israel": 3956, - "gives": 3957, - "jim": 3958, - "dream": 3959, - "bob": 3960, - "remain": 3961, - "spot": 3962, - "70": 3963, - "notes": 3964, - "produce": 3965, - "champions": 3966, - "contact": 3967, - "ed": 3968, - "soul": 3969, - "accepted": 3970, - "ways": 3971, - "del": 3972, - "##ally": 3973, - "losing": 3974, - "split": 3975, - "price": 3976, - "capacity": 3977, - "basis": 3978, - "trial": 3979, - "questions": 3980, - "##ina": 3981, - "1955": 3982, - "20th": 3983, - "guess": 3984, - "officially": 3985, - "memorial": 3986, - "naval": 3987, - "initial": 3988, - "##ization": 3989, - "whispered": 3990, - "median": 3991, - "engineer": 3992, - "##ful": 3993, - "sydney": 3994, - "##go": 3995, - "columbia": 3996, - "strength": 3997, - "300": 3998, - "1952": 3999, - "tears": 4000, - "senate": 4001, - "00": 4002, - "card": 4003, - "asian": 4004, - "agent": 4005, - "1947": 4006, - "software": 4007, - "44": 4008, - "draw": 4009, - "warm": 4010, - "supposed": 4011, - "com": 4012, - "pro": 4013, - "##il": 4014, - "transferred": 4015, - "leaned": 4016, - "##at": 4017, - "candidate": 4018, - "escape": 4019, - "mountains": 4020, - "asia": 4021, - "potential": 4022, - "activity": 4023, - "entertainment": 4024, - "seem": 4025, - "traffic": 4026, - "jackson": 4027, - "murder": 4028, - "36": 4029, - "slow": 4030, - "product": 4031, - "orchestra": 4032, - "haven": 4033, - "agency": 4034, - "bbc": 4035, - "taught": 4036, - "website": 4037, - "comedy": 4038, - "unable": 4039, - "storm": 4040, - "planning": 4041, - "albums": 4042, - "rugby": 4043, - "environment": 4044, - "scientific": 4045, - "grabbed": 4046, - "protect": 4047, - "##hi": 4048, - "boat": 4049, - "typically": 4050, - "1954": 4051, - "1953": 4052, - "damage": 4053, - "principal": 4054, - "divided": 4055, - "dedicated": 4056, - "mount": 4057, - "ohio": 4058, - "##berg": 4059, - "pick": 4060, - "fought": 4061, - "driver": 4062, - "##der": 4063, - "empty": 4064, - "shoulders": 4065, - "sort": 4066, - "thank": 4067, - "berlin": 4068, - "prominent": 4069, - "account": 4070, - "freedom": 4071, - "necessary": 4072, - "efforts": 4073, - "alex": 4074, - "headquarters": 4075, - "follows": 4076, - "alongside": 4077, - "des": 4078, - "simon": 4079, - "andrew": 4080, - "suggested": 4081, - "operating": 4082, - "learning": 4083, - "steps": 4084, - "1949": 4085, - "sweet": 4086, - "technical": 4087, - "begin": 4088, - "easily": 4089, - "34": 4090, - "teeth": 4091, - "speaking": 4092, - "settlement": 4093, - "scale": 4094, - "##sh": 4095, - "renamed": 4096, - "ray": 4097, - "max": 4098, - "enemy": 4099, - "semi": 4100, - "joint": 4101, - "compared": 4102, - "##rd": 4103, - "scottish": 4104, - "leadership": 4105, - "analysis": 4106, - "offers": 4107, - "georgia": 4108, - "pieces": 4109, - "captured": 4110, - "animal": 4111, - "deputy": 4112, - "guest": 4113, - "organized": 4114, - "##lin": 4115, - "tony": 4116, - "combined": 4117, - "method": 4118, - "challenge": 4119, - "1960s": 4120, - "huge": 4121, - "wants": 4122, - "battalion": 4123, - "sons": 4124, - "rise": 4125, - "crime": 4126, - "types": 4127, - "facilities": 4128, - "telling": 4129, - "path": 4130, - "1951": 4131, - "platform": 4132, - "sit": 4133, - "1990s": 4134, - "##lo": 4135, - "tells": 4136, - "assigned": 4137, - "rich": 4138, - "pull": 4139, - "##ot": 4140, - "commonly": 4141, - "alive": 4142, - "##za": 4143, - "letters": 4144, - "concept": 4145, - "conducted": 4146, - "wearing": 4147, - "happen": 4148, - "bought": 4149, - "becomes": 4150, - "holy": 4151, - "gets": 4152, - "ocean": 4153, - "defeat": 4154, - "languages": 4155, - "purchased": 4156, - "coffee": 4157, - "occurred": 4158, - "titled": 4159, - "##q": 4160, - "declared": 4161, - "applied": 4162, - "sciences": 4163, - "concert": 4164, - "sounds": 4165, - "jazz": 4166, - "brain": 4167, - "##me": 4168, - "painting": 4169, - "fleet": 4170, - "tax": 4171, - "nick": 4172, - "##ius": 4173, - "michigan": 4174, - "count": 4175, - "animals": 4176, - "leaders": 4177, - "episodes": 4178, - "##line": 4179, - "content": 4180, - "##den": 4181, - "birth": 4182, - "##it": 4183, - "clubs": 4184, - "64": 4185, - "palace": 4186, - "critical": 4187, - "refused": 4188, - "fair": 4189, - "leg": 4190, - "laughed": 4191, - "returning": 4192, - "surrounding": 4193, - "participated": 4194, - "formation": 4195, - "lifted": 4196, - "pointed": 4197, - "connected": 4198, - "rome": 4199, - "medicine": 4200, - "laid": 4201, - "taylor": 4202, - "santa": 4203, - "powers": 4204, - "adam": 4205, - "tall": 4206, - "shared": 4207, - "focused": 4208, - "knowing": 4209, - "yards": 4210, - "entrance": 4211, - "falls": 4212, - "##wa": 4213, - "calling": 4214, - "##ad": 4215, - "sources": 4216, - "chosen": 4217, - "beneath": 4218, - "resources": 4219, - "yard": 4220, - "##ite": 4221, - "nominated": 4222, - "silence": 4223, - "zone": 4224, - "defined": 4225, - "##que": 4226, - "gained": 4227, - "thirty": 4228, - "38": 4229, - "bodies": 4230, - "moon": 4231, - "##ard": 4232, - "adopted": 4233, - "christmas": 4234, - "widely": 4235, - "register": 4236, - "apart": 4237, - "iran": 4238, - "premier": 4239, - "serves": 4240, - "du": 4241, - "unknown": 4242, - "parties": 4243, - "##les": 4244, - "generation": 4245, - "##ff": 4246, - "continues": 4247, - "quick": 4248, - "fields": 4249, - "brigade": 4250, - "quiet": 4251, - "teaching": 4252, - "clothes": 4253, - "impact": 4254, - "weapons": 4255, - "partner": 4256, - "flat": 4257, - "theater": 4258, - "supreme": 4259, - "1938": 4260, - "37": 4261, - "relations": 4262, - "##tor": 4263, - "plants": 4264, - "suffered": 4265, - "1936": 4266, - "wilson": 4267, - "kids": 4268, - "begins": 4269, - "##age": 4270, - "1918": 4271, - "seats": 4272, - "armed": 4273, - "internet": 4274, - "models": 4275, - "worth": 4276, - "laws": 4277, - "400": 4278, - "communities": 4279, - "classes": 4280, - "background": 4281, - "knows": 4282, - "thanks": 4283, - "quarter": 4284, - "reaching": 4285, - "humans": 4286, - "carry": 4287, - "killing": 4288, - "format": 4289, - "kong": 4290, - "hong": 4291, - "setting": 4292, - "75": 4293, - "architecture": 4294, - "disease": 4295, - "railroad": 4296, - "inc": 4297, - "possibly": 4298, - "wish": 4299, - "arthur": 4300, - "thoughts": 4301, - "harry": 4302, - "doors": 4303, - "density": 4304, - "##di": 4305, - "crowd": 4306, - "illinois": 4307, - "stomach": 4308, - "tone": 4309, - "unique": 4310, - "reports": 4311, - "anyway": 4312, - "##ir": 4313, - "liberal": 4314, - "der": 4315, - "vehicle": 4316, - "thick": 4317, - "dry": 4318, - "drug": 4319, - "faced": 4320, - "largely": 4321, - "facility": 4322, - "theme": 4323, - "holds": 4324, - "creation": 4325, - "strange": 4326, - "colonel": 4327, - "##mi": 4328, - "revolution": 4329, - "bell": 4330, - "politics": 4331, - "turns": 4332, - "silent": 4333, - "rail": 4334, - "relief": 4335, - "independence": 4336, - "combat": 4337, - "shape": 4338, - "write": 4339, - "determined": 4340, - "sales": 4341, - "learned": 4342, - "4th": 4343, - "finger": 4344, - "oxford": 4345, - "providing": 4346, - "1937": 4347, - "heritage": 4348, - "fiction": 4349, - "situated": 4350, - "designated": 4351, - "allowing": 4352, - "distribution": 4353, - "hosted": 4354, - "##est": 4355, - "sight": 4356, - "interview": 4357, - "estimated": 4358, - "reduced": 4359, - "##ria": 4360, - "toronto": 4361, - "footballer": 4362, - "keeping": 4363, - "guys": 4364, - "damn": 4365, - "claim": 4366, - "motion": 4367, - "sport": 4368, - "sixth": 4369, - "stayed": 4370, - "##ze": 4371, - "en": 4372, - "rear": 4373, - "receive": 4374, - "handed": 4375, - "twelve": 4376, - "dress": 4377, - "audience": 4378, - "granted": 4379, - "brazil": 4380, - "##well": 4381, - "spirit": 4382, - "##ated": 4383, - "noticed": 4384, - "etc": 4385, - "olympic": 4386, - "representative": 4387, - "eric": 4388, - "tight": 4389, - "trouble": 4390, - "reviews": 4391, - "drink": 4392, - "vampire": 4393, - "missing": 4394, - "roles": 4395, - "ranked": 4396, - "newly": 4397, - "household": 4398, - "finals": 4399, - "wave": 4400, - "critics": 4401, - "##ee": 4402, - "phase": 4403, - "massachusetts": 4404, - "pilot": 4405, - "unlike": 4406, - "philadelphia": 4407, - "bright": 4408, - "guns": 4409, - "crown": 4410, - "organizations": 4411, - "roof": 4412, - "42": 4413, - "respectively": 4414, - "clearly": 4415, - "tongue": 4416, - "marked": 4417, - "circle": 4418, - "fox": 4419, - "korea": 4420, - "bronze": 4421, - "brian": 4422, - "expanded": 4423, - "sexual": 4424, - "supply": 4425, - "yourself": 4426, - "inspired": 4427, - "labour": 4428, - "fc": 4429, - "##ah": 4430, - "reference": 4431, - "vision": 4432, - "draft": 4433, - "connection": 4434, - "brand": 4435, - "reasons": 4436, - "1935": 4437, - "classic": 4438, - "driving": 4439, - "trip": 4440, - "jesus": 4441, - "cells": 4442, - "entry": 4443, - "1920": 4444, - "neither": 4445, - "trail": 4446, - "claims": 4447, - "atlantic": 4448, - "orders": 4449, - "labor": 4450, - "nose": 4451, - "afraid": 4452, - "identified": 4453, - "intelligence": 4454, - "calls": 4455, - "cancer": 4456, - "attacked": 4457, - "passing": 4458, - "stephen": 4459, - "positions": 4460, - "imperial": 4461, - "grey": 4462, - "jason": 4463, - "39": 4464, - "sunday": 4465, - "48": 4466, - "swedish": 4467, - "avoid": 4468, - "extra": 4469, - "uncle": 4470, - "message": 4471, - "covers": 4472, - "allows": 4473, - "surprise": 4474, - "materials": 4475, - "fame": 4476, - "hunter": 4477, - "##ji": 4478, - "1930": 4479, - "citizens": 4480, - "figures": 4481, - "davis": 4482, - "environmental": 4483, - "confirmed": 4484, - "shit": 4485, - "titles": 4486, - "di": 4487, - "performing": 4488, - "difference": 4489, - "acts": 4490, - "attacks": 4491, - "##ov": 4492, - "existing": 4493, - "votes": 4494, - "opportunity": 4495, - "nor": 4496, - "shop": 4497, - "entirely": 4498, - "trains": 4499, - "opposite": 4500, - "pakistan": 4501, - "##pa": 4502, - "develop": 4503, - "resulted": 4504, - "representatives": 4505, - "actions": 4506, - "reality": 4507, - "pressed": 4508, - "##ish": 4509, - "barely": 4510, - "wine": 4511, - "conversation": 4512, - "faculty": 4513, - "northwest": 4514, - "ends": 4515, - "documentary": 4516, - "nuclear": 4517, - "stock": 4518, - "grace": 4519, - "sets": 4520, - "eat": 4521, - "alternative": 4522, - "##ps": 4523, - "bag": 4524, - "resulting": 4525, - "creating": 4526, - "surprised": 4527, - "cemetery": 4528, - "1919": 4529, - "drop": 4530, - "finding": 4531, - "sarah": 4532, - "cricket": 4533, - "streets": 4534, - "tradition": 4535, - "ride": 4536, - "1933": 4537, - "exhibition": 4538, - "target": 4539, - "ear": 4540, - "explained": 4541, - "rain": 4542, - "composer": 4543, - "injury": 4544, - "apartment": 4545, - "municipal": 4546, - "educational": 4547, - "occupied": 4548, - "netherlands": 4549, - "clean": 4550, - "billion": 4551, - "constitution": 4552, - "learn": 4553, - "1914": 4554, - "maximum": 4555, - "classical": 4556, - "francis": 4557, - "lose": 4558, - "opposition": 4559, - "jose": 4560, - "ontario": 4561, - "bear": 4562, - "core": 4563, - "hills": 4564, - "rolled": 4565, - "ending": 4566, - "drawn": 4567, - "permanent": 4568, - "fun": 4569, - "##tes": 4570, - "##lla": 4571, - "lewis": 4572, - "sites": 4573, - "chamber": 4574, - "ryan": 4575, - "##way": 4576, - "scoring": 4577, - "height": 4578, - "1934": 4579, - "##house": 4580, - "lyrics": 4581, - "staring": 4582, - "55": 4583, - "officials": 4584, - "1917": 4585, - "snow": 4586, - "oldest": 4587, - "##tic": 4588, - "orange": 4589, - "##ger": 4590, - "qualified": 4591, - "interior": 4592, - "apparently": 4593, - "succeeded": 4594, - "thousand": 4595, - "dinner": 4596, - "lights": 4597, - "existence": 4598, - "fans": 4599, - "heavily": 4600, - "41": 4601, - "greatest": 4602, - "conservative": 4603, - "send": 4604, - "bowl": 4605, - "plus": 4606, - "enter": 4607, - "catch": 4608, - "##un": 4609, - "economy": 4610, - "duty": 4611, - "1929": 4612, - "speech": 4613, - "authorities": 4614, - "princess": 4615, - "performances": 4616, - "versions": 4617, - "shall": 4618, - "graduate": 4619, - "pictures": 4620, - "effective": 4621, - "remembered": 4622, - "poetry": 4623, - "desk": 4624, - "crossed": 4625, - "starring": 4626, - "starts": 4627, - "passenger": 4628, - "sharp": 4629, - "##ant": 4630, - "acres": 4631, - "ass": 4632, - "weather": 4633, - "falling": 4634, - "rank": 4635, - "fund": 4636, - "supporting": 4637, - "check": 4638, - "adult": 4639, - "publishing": 4640, - "heads": 4641, - "cm": 4642, - "southeast": 4643, - "lane": 4644, - "##burg": 4645, - "application": 4646, - "bc": 4647, - "##ura": 4648, - "les": 4649, - "condition": 4650, - "transfer": 4651, - "prevent": 4652, - "display": 4653, - "ex": 4654, - "regions": 4655, - "earl": 4656, - "federation": 4657, - "cool": 4658, - "relatively": 4659, - "answered": 4660, - "besides": 4661, - "1928": 4662, - "obtained": 4663, - "portion": 4664, - "##town": 4665, - "mix": 4666, - "##ding": 4667, - "reaction": 4668, - "liked": 4669, - "dean": 4670, - "express": 4671, - "peak": 4672, - "1932": 4673, - "##tte": 4674, - "counter": 4675, - "religion": 4676, - "chain": 4677, - "rare": 4678, - "miller": 4679, - "convention": 4680, - "aid": 4681, - "lie": 4682, - "vehicles": 4683, - "mobile": 4684, - "perform": 4685, - "squad": 4686, - "wonder": 4687, - "lying": 4688, - "crazy": 4689, - "sword": 4690, - "##ping": 4691, - "attempted": 4692, - "centuries": 4693, - "weren": 4694, - "philosophy": 4695, - "category": 4696, - "##ize": 4697, - "anna": 4698, - "interested": 4699, - "47": 4700, - "sweden": 4701, - "wolf": 4702, - "frequently": 4703, - "abandoned": 4704, - "kg": 4705, - "literary": 4706, - "alliance": 4707, - "task": 4708, - "entitled": 4709, - "##ay": 4710, - "threw": 4711, - "promotion": 4712, - "factory": 4713, - "tiny": 4714, - "soccer": 4715, - "visited": 4716, - "matt": 4717, - "fm": 4718, - "achieved": 4719, - "52": 4720, - "defence": 4721, - "internal": 4722, - "persian": 4723, - "43": 4724, - "methods": 4725, - "##ging": 4726, - "arrested": 4727, - "otherwise": 4728, - "cambridge": 4729, - "programming": 4730, - "villages": 4731, - "elementary": 4732, - "districts": 4733, - "rooms": 4734, - "criminal": 4735, - "conflict": 4736, - "worry": 4737, - "trained": 4738, - "1931": 4739, - "attempts": 4740, - "waited": 4741, - "signal": 4742, - "bird": 4743, - "truck": 4744, - "subsequent": 4745, - "programme": 4746, - "##ol": 4747, - "ad": 4748, - "49": 4749, - "communist": 4750, - "details": 4751, - "faith": 4752, - "sector": 4753, - "patrick": 4754, - "carrying": 4755, - "laugh": 4756, - "##ss": 4757, - "controlled": 4758, - "korean": 4759, - "showing": 4760, - "origin": 4761, - "fuel": 4762, - "evil": 4763, - "1927": 4764, - "##ent": 4765, - "brief": 4766, - "identity": 4767, - "darkness": 4768, - "address": 4769, - "pool": 4770, - "missed": 4771, - "publication": 4772, - "web": 4773, - "planet": 4774, - "ian": 4775, - "anne": 4776, - "wings": 4777, - "invited": 4778, - "##tt": 4779, - "briefly": 4780, - "standards": 4781, - "kissed": 4782, - "##be": 4783, - "ideas": 4784, - "climate": 4785, - "causing": 4786, - "walter": 4787, - "worse": 4788, - "albert": 4789, - "articles": 4790, - "winners": 4791, - "desire": 4792, - "aged": 4793, - "northeast": 4794, - "dangerous": 4795, - "gate": 4796, - "doubt": 4797, - "1922": 4798, - "wooden": 4799, - "multi": 4800, - "##ky": 4801, - "poet": 4802, - "rising": 4803, - "funding": 4804, - "46": 4805, - "communications": 4806, - "communication": 4807, - "violence": 4808, - "copies": 4809, - "prepared": 4810, - "ford": 4811, - "investigation": 4812, - "skills": 4813, - "1924": 4814, - "pulling": 4815, - "electronic": 4816, - "##ak": 4817, - "##ial": 4818, - "##han": 4819, - "containing": 4820, - "ultimately": 4821, - "offices": 4822, - "singing": 4823, - "understanding": 4824, - "restaurant": 4825, - "tomorrow": 4826, - "fashion": 4827, - "christ": 4828, - "ward": 4829, - "da": 4830, - "pope": 4831, - "stands": 4832, - "5th": 4833, - "flow": 4834, - "studios": 4835, - "aired": 4836, - "commissioned": 4837, - "contained": 4838, - "exist": 4839, - "fresh": 4840, - "americans": 4841, - "##per": 4842, - "wrestling": 4843, - "approved": 4844, - "kid": 4845, - "employed": 4846, - "respect": 4847, - "suit": 4848, - "1925": 4849, - "angel": 4850, - "asking": 4851, - "increasing": 4852, - "frame": 4853, - "angry": 4854, - "selling": 4855, - "1950s": 4856, - "thin": 4857, - "finds": 4858, - "##nd": 4859, - "temperature": 4860, - "statement": 4861, - "ali": 4862, - "explain": 4863, - "inhabitants": 4864, - "towns": 4865, - "extensive": 4866, - "narrow": 4867, - "51": 4868, - "jane": 4869, - "flowers": 4870, - "images": 4871, - "promise": 4872, - "somewhere": 4873, - "object": 4874, - "fly": 4875, - "closely": 4876, - "##ls": 4877, - "1912": 4878, - "bureau": 4879, - "cape": 4880, - "1926": 4881, - "weekly": 4882, - "presidential": 4883, - "legislative": 4884, - "1921": 4885, - "##ai": 4886, - "##au": 4887, - "launch": 4888, - "founding": 4889, - "##ny": 4890, - "978": 4891, - "##ring": 4892, - "artillery": 4893, - "strike": 4894, - "un": 4895, - "institutions": 4896, - "roll": 4897, - "writers": 4898, - "landing": 4899, - "chose": 4900, - "kevin": 4901, - "anymore": 4902, - "pp": 4903, - "##ut": 4904, - "attorney": 4905, - "fit": 4906, - "dan": 4907, - "billboard": 4908, - "receiving": 4909, - "agricultural": 4910, - "breaking": 4911, - "sought": 4912, - "dave": 4913, - "admitted": 4914, - "lands": 4915, - "mexican": 4916, - "##bury": 4917, - "charlie": 4918, - "specifically": 4919, - "hole": 4920, - "iv": 4921, - "howard": 4922, - "credit": 4923, - "moscow": 4924, - "roads": 4925, - "accident": 4926, - "1923": 4927, - "proved": 4928, - "wear": 4929, - "struck": 4930, - "hey": 4931, - "guards": 4932, - "stuff": 4933, - "slid": 4934, - "expansion": 4935, - "1915": 4936, - "cat": 4937, - "anthony": 4938, - "##kin": 4939, - "melbourne": 4940, - "opposed": 4941, - "sub": 4942, - "southwest": 4943, - "architect": 4944, - "failure": 4945, - "plane": 4946, - "1916": 4947, - "##ron": 4948, - "map": 4949, - "camera": 4950, - "tank": 4951, - "listen": 4952, - "regarding": 4953, - "wet": 4954, - "introduction": 4955, - "metropolitan": 4956, - "link": 4957, - "ep": 4958, - "fighter": 4959, - "inch": 4960, - "grown": 4961, - "gene": 4962, - "anger": 4963, - "fixed": 4964, - "buy": 4965, - "dvd": 4966, - "khan": 4967, - "domestic": 4968, - "worldwide": 4969, - "chapel": 4970, - "mill": 4971, - "functions": 4972, - "examples": 4973, - "##head": 4974, - "developing": 4975, - "1910": 4976, - "turkey": 4977, - "hits": 4978, - "pocket": 4979, - "antonio": 4980, - "papers": 4981, - "grow": 4982, - "unless": 4983, - "circuit": 4984, - "18th": 4985, - "concerned": 4986, - "attached": 4987, - "journalist": 4988, - "selection": 4989, - "journey": 4990, - "converted": 4991, - "provincial": 4992, - "painted": 4993, - "hearing": 4994, - "aren": 4995, - "bands": 4996, - "negative": 4997, - "aside": 4998, - "wondered": 4999, - "knight": 5000, - "lap": 5001, - "survey": 5002, - "ma": 5003, - "##ow": 5004, - "noise": 5005, - "billy": 5006, - "##ium": 5007, - "shooting": 5008, - "guide": 5009, - "bedroom": 5010, - "priest": 5011, - "resistance": 5012, - "motor": 5013, - "homes": 5014, - "sounded": 5015, - "giant": 5016, - "##mer": 5017, - "150": 5018, - "scenes": 5019, - "equal": 5020, - "comic": 5021, - "patients": 5022, - "hidden": 5023, - "solid": 5024, - "actual": 5025, - "bringing": 5026, - "afternoon": 5027, - "touched": 5028, - "funds": 5029, - "wedding": 5030, - "consisted": 5031, - "marie": 5032, - "canal": 5033, - "sr": 5034, - "kim": 5035, - "treaty": 5036, - "turkish": 5037, - "recognition": 5038, - "residence": 5039, - "cathedral": 5040, - "broad": 5041, - "knees": 5042, - "incident": 5043, - "shaped": 5044, - "fired": 5045, - "norwegian": 5046, - "handle": 5047, - "cheek": 5048, - "contest": 5049, - "represent": 5050, - "##pe": 5051, - "representing": 5052, - "beauty": 5053, - "##sen": 5054, - "birds": 5055, - "advantage": 5056, - "emergency": 5057, - "wrapped": 5058, - "drawing": 5059, - "notice": 5060, - "pink": 5061, - "broadcasting": 5062, - "##ong": 5063, - "somehow": 5064, - "bachelor": 5065, - "seventh": 5066, - "collected": 5067, - "registered": 5068, - "establishment": 5069, - "alan": 5070, - "assumed": 5071, - "chemical": 5072, - "personnel": 5073, - "roger": 5074, - "retirement": 5075, - "jeff": 5076, - "portuguese": 5077, - "wore": 5078, - "tied": 5079, - "device": 5080, - "threat": 5081, - "progress": 5082, - "advance": 5083, - "##ised": 5084, - "banks": 5085, - "hired": 5086, - "manchester": 5087, - "nfl": 5088, - "teachers": 5089, - "structures": 5090, - "forever": 5091, - "##bo": 5092, - "tennis": 5093, - "helping": 5094, - "saturday": 5095, - "sale": 5096, - "applications": 5097, - "junction": 5098, - "hip": 5099, - "incorporated": 5100, - "neighborhood": 5101, - "dressed": 5102, - "ceremony": 5103, - "##ds": 5104, - "influenced": 5105, - "hers": 5106, - "visual": 5107, - "stairs": 5108, - "decades": 5109, - "inner": 5110, - "kansas": 5111, - "hung": 5112, - "hoped": 5113, - "gain": 5114, - "scheduled": 5115, - "downtown": 5116, - "engaged": 5117, - "austria": 5118, - "clock": 5119, - "norway": 5120, - "certainly": 5121, - "pale": 5122, - "protected": 5123, - "1913": 5124, - "victor": 5125, - "employees": 5126, - "plate": 5127, - "putting": 5128, - "surrounded": 5129, - "##ists": 5130, - "finishing": 5131, - "blues": 5132, - "tropical": 5133, - "##ries": 5134, - "minnesota": 5135, - "consider": 5136, - "philippines": 5137, - "accept": 5138, - "54": 5139, - "retrieved": 5140, - "1900": 5141, - "concern": 5142, - "anderson": 5143, - "properties": 5144, - "institution": 5145, - "gordon": 5146, - "successfully": 5147, - "vietnam": 5148, - "##dy": 5149, - "backing": 5150, - "outstanding": 5151, - "muslim": 5152, - "crossing": 5153, - "folk": 5154, - "producing": 5155, - "usual": 5156, - "demand": 5157, - "occurs": 5158, - "observed": 5159, - "lawyer": 5160, - "educated": 5161, - "##ana": 5162, - "kelly": 5163, - "string": 5164, - "pleasure": 5165, - "budget": 5166, - "items": 5167, - "quietly": 5168, - "colorado": 5169, - "philip": 5170, - "typical": 5171, - "##worth": 5172, - "derived": 5173, - "600": 5174, - "survived": 5175, - "asks": 5176, - "mental": 5177, - "##ide": 5178, - "56": 5179, - "jake": 5180, - "jews": 5181, - "distinguished": 5182, - "ltd": 5183, - "1911": 5184, - "sri": 5185, - "extremely": 5186, - "53": 5187, - "athletic": 5188, - "loud": 5189, - "thousands": 5190, - "worried": 5191, - "shadow": 5192, - "transportation": 5193, - "horses": 5194, - "weapon": 5195, - "arena": 5196, - "importance": 5197, - "users": 5198, - "tim": 5199, - "objects": 5200, - "contributed": 5201, - "dragon": 5202, - "douglas": 5203, - "aware": 5204, - "senator": 5205, - "johnny": 5206, - "jordan": 5207, - "sisters": 5208, - "engines": 5209, - "flag": 5210, - "investment": 5211, - "samuel": 5212, - "shock": 5213, - "capable": 5214, - "clark": 5215, - "row": 5216, - "wheel": 5217, - "refers": 5218, - "session": 5219, - "familiar": 5220, - "biggest": 5221, - "wins": 5222, - "hate": 5223, - "maintained": 5224, - "drove": 5225, - "hamilton": 5226, - "request": 5227, - "expressed": 5228, - "injured": 5229, - "underground": 5230, - "churches": 5231, - "walker": 5232, - "wars": 5233, - "tunnel": 5234, - "passes": 5235, - "stupid": 5236, - "agriculture": 5237, - "softly": 5238, - "cabinet": 5239, - "regarded": 5240, - "joining": 5241, - "indiana": 5242, - "##ea": 5243, - "##ms": 5244, - "push": 5245, - "dates": 5246, - "spend": 5247, - "behavior": 5248, - "woods": 5249, - "protein": 5250, - "gently": 5251, - "chase": 5252, - "morgan": 5253, - "mention": 5254, - "burning": 5255, - "wake": 5256, - "combination": 5257, - "occur": 5258, - "mirror": 5259, - "leads": 5260, - "jimmy": 5261, - "indeed": 5262, - "impossible": 5263, - "singapore": 5264, - "paintings": 5265, - "covering": 5266, - "##nes": 5267, - "soldier": 5268, - "locations": 5269, - "attendance": 5270, - "sell": 5271, - "historian": 5272, - "wisconsin": 5273, - "invasion": 5274, - "argued": 5275, - "painter": 5276, - "diego": 5277, - "changing": 5278, - "egypt": 5279, - "##don": 5280, - "experienced": 5281, - "inches": 5282, - "##ku": 5283, - "missouri": 5284, - "vol": 5285, - "grounds": 5286, - "spoken": 5287, - "switzerland": 5288, - "##gan": 5289, - "reform": 5290, - "rolling": 5291, - "ha": 5292, - "forget": 5293, - "massive": 5294, - "resigned": 5295, - "burned": 5296, - "allen": 5297, - "tennessee": 5298, - "locked": 5299, - "values": 5300, - "improved": 5301, - "##mo": 5302, - "wounded": 5303, - "universe": 5304, - "sick": 5305, - "dating": 5306, - "facing": 5307, - "pack": 5308, - "purchase": 5309, - "user": 5310, - "##pur": 5311, - "moments": 5312, - "##ul": 5313, - "merged": 5314, - "anniversary": 5315, - "1908": 5316, - "coal": 5317, - "brick": 5318, - "understood": 5319, - "causes": 5320, - "dynasty": 5321, - "queensland": 5322, - "establish": 5323, - "stores": 5324, - "crisis": 5325, - "promote": 5326, - "hoping": 5327, - "views": 5328, - "cards": 5329, - "referee": 5330, - "extension": 5331, - "##si": 5332, - "raise": 5333, - "arizona": 5334, - "improve": 5335, - "colonial": 5336, - "formal": 5337, - "charged": 5338, - "##rt": 5339, - "palm": 5340, - "lucky": 5341, - "hide": 5342, - "rescue": 5343, - "faces": 5344, - "95": 5345, - "feelings": 5346, - "candidates": 5347, - "juan": 5348, - "##ell": 5349, - "goods": 5350, - "6th": 5351, - "courses": 5352, - "weekend": 5353, - "59": 5354, - "luke": 5355, - "cash": 5356, - "fallen": 5357, - "##om": 5358, - "delivered": 5359, - "affected": 5360, - "installed": 5361, - "carefully": 5362, - "tries": 5363, - "swiss": 5364, - "hollywood": 5365, - "costs": 5366, - "lincoln": 5367, - "responsibility": 5368, - "##he": 5369, - "shore": 5370, - "file": 5371, - "proper": 5372, - "normally": 5373, - "maryland": 5374, - "assistance": 5375, - "jump": 5376, - "constant": 5377, - "offering": 5378, - "friendly": 5379, - "waters": 5380, - "persons": 5381, - "realize": 5382, - "contain": 5383, - "trophy": 5384, - "800": 5385, - "partnership": 5386, - "factor": 5387, - "58": 5388, - "musicians": 5389, - "cry": 5390, - "bound": 5391, - "oregon": 5392, - "indicated": 5393, - "hero": 5394, - "houston": 5395, - "medium": 5396, - "##ure": 5397, - "consisting": 5398, - "somewhat": 5399, - "##ara": 5400, - "57": 5401, - "cycle": 5402, - "##che": 5403, - "beer": 5404, - "moore": 5405, - "frederick": 5406, - "gotten": 5407, - "eleven": 5408, - "worst": 5409, - "weak": 5410, - "approached": 5411, - "arranged": 5412, - "chin": 5413, - "loan": 5414, - "universal": 5415, - "bond": 5416, - "fifteen": 5417, - "pattern": 5418, - "disappeared": 5419, - "##ney": 5420, - "translated": 5421, - "##zed": 5422, - "lip": 5423, - "arab": 5424, - "capture": 5425, - "interests": 5426, - "insurance": 5427, - "##chi": 5428, - "shifted": 5429, - "cave": 5430, - "prix": 5431, - "warning": 5432, - "sections": 5433, - "courts": 5434, - "coat": 5435, - "plot": 5436, - "smell": 5437, - "feed": 5438, - "golf": 5439, - "favorite": 5440, - "maintain": 5441, - "knife": 5442, - "vs": 5443, - "voted": 5444, - "degrees": 5445, - "finance": 5446, - "quebec": 5447, - "opinion": 5448, - "translation": 5449, - "manner": 5450, - "ruled": 5451, - "operate": 5452, - "productions": 5453, - "choose": 5454, - "musician": 5455, - "discovery": 5456, - "confused": 5457, - "tired": 5458, - "separated": 5459, - "stream": 5460, - "techniques": 5461, - "committed": 5462, - "attend": 5463, - "ranking": 5464, - "kings": 5465, - "throw": 5466, - "passengers": 5467, - "measure": 5468, - "horror": 5469, - "fan": 5470, - "mining": 5471, - "sand": 5472, - "danger": 5473, - "salt": 5474, - "calm": 5475, - "decade": 5476, - "dam": 5477, - "require": 5478, - "runner": 5479, - "##ik": 5480, - "rush": 5481, - "associate": 5482, - "greece": 5483, - "##ker": 5484, - "rivers": 5485, - "consecutive": 5486, - "matthew": 5487, - "##ski": 5488, - "sighed": 5489, - "sq": 5490, - "documents": 5491, - "steam": 5492, - "edited": 5493, - "closing": 5494, - "tie": 5495, - "accused": 5496, - "1905": 5497, - "##ini": 5498, - "islamic": 5499, - "distributed": 5500, - "directors": 5501, - "organisation": 5502, - "bruce": 5503, - "7th": 5504, - "breathing": 5505, - "mad": 5506, - "lit": 5507, - "arrival": 5508, - "concrete": 5509, - "taste": 5510, - "08": 5511, - "composition": 5512, - "shaking": 5513, - "faster": 5514, - "amateur": 5515, - "adjacent": 5516, - "stating": 5517, - "1906": 5518, - "twin": 5519, - "flew": 5520, - "##ran": 5521, - "tokyo": 5522, - "publications": 5523, - "##tone": 5524, - "obviously": 5525, - "ridge": 5526, - "storage": 5527, - "1907": 5528, - "carl": 5529, - "pages": 5530, - "concluded": 5531, - "desert": 5532, - "driven": 5533, - "universities": 5534, - "ages": 5535, - "terminal": 5536, - "sequence": 5537, - "borough": 5538, - "250": 5539, - "constituency": 5540, - "creative": 5541, - "cousin": 5542, - "economics": 5543, - "dreams": 5544, - "margaret": 5545, - "notably": 5546, - "reduce": 5547, - "montreal": 5548, - "mode": 5549, - "17th": 5550, - "ears": 5551, - "saved": 5552, - "jan": 5553, - "vocal": 5554, - "##ica": 5555, - "1909": 5556, - "andy": 5557, - "##jo": 5558, - "riding": 5559, - "roughly": 5560, - "threatened": 5561, - "##ise": 5562, - "meters": 5563, - "meanwhile": 5564, - "landed": 5565, - "compete": 5566, - "repeated": 5567, - "grass": 5568, - "czech": 5569, - "regularly": 5570, - "charges": 5571, - "tea": 5572, - "sudden": 5573, - "appeal": 5574, - "##ung": 5575, - "solution": 5576, - "describes": 5577, - "pierre": 5578, - "classification": 5579, - "glad": 5580, - "parking": 5581, - "##ning": 5582, - "belt": 5583, - "physics": 5584, - "99": 5585, - "rachel": 5586, - "add": 5587, - "hungarian": 5588, - "participate": 5589, - "expedition": 5590, - "damaged": 5591, - "gift": 5592, - "childhood": 5593, - "85": 5594, - "fifty": 5595, - "##red": 5596, - "mathematics": 5597, - "jumped": 5598, - "letting": 5599, - "defensive": 5600, - "mph": 5601, - "##ux": 5602, - "##gh": 5603, - "testing": 5604, - "##hip": 5605, - "hundreds": 5606, - "shoot": 5607, - "owners": 5608, - "matters": 5609, - "smoke": 5610, - "israeli": 5611, - "kentucky": 5612, - "dancing": 5613, - "mounted": 5614, - "grandfather": 5615, - "emma": 5616, - "designs": 5617, - "profit": 5618, - "argentina": 5619, - "##gs": 5620, - "truly": 5621, - "li": 5622, - "lawrence": 5623, - "cole": 5624, - "begun": 5625, - "detroit": 5626, - "willing": 5627, - "branches": 5628, - "smiling": 5629, - "decide": 5630, - "miami": 5631, - "enjoyed": 5632, - "recordings": 5633, - "##dale": 5634, - "poverty": 5635, - "ethnic": 5636, - "gay": 5637, - "##bi": 5638, - "gary": 5639, - "arabic": 5640, - "09": 5641, - "accompanied": 5642, - "##one": 5643, - "##ons": 5644, - "fishing": 5645, - "determine": 5646, - "residential": 5647, - "acid": 5648, - "##ary": 5649, - "alice": 5650, - "returns": 5651, - "starred": 5652, - "mail": 5653, - "##ang": 5654, - "jonathan": 5655, - "strategy": 5656, - "##ue": 5657, - "net": 5658, - "forty": 5659, - "cook": 5660, - "businesses": 5661, - "equivalent": 5662, - "commonwealth": 5663, - "distinct": 5664, - "ill": 5665, - "##cy": 5666, - "seriously": 5667, - "##ors": 5668, - "##ped": 5669, - "shift": 5670, - "harris": 5671, - "replace": 5672, - "rio": 5673, - "imagine": 5674, - "formula": 5675, - "ensure": 5676, - "##ber": 5677, - "additionally": 5678, - "scheme": 5679, - "conservation": 5680, - "occasionally": 5681, - "purposes": 5682, - "feels": 5683, - "favor": 5684, - "##and": 5685, - "##ore": 5686, - "1930s": 5687, - "contrast": 5688, - "hanging": 5689, - "hunt": 5690, - "movies": 5691, - "1904": 5692, - "instruments": 5693, - "victims": 5694, - "danish": 5695, - "christopher": 5696, - "busy": 5697, - "demon": 5698, - "sugar": 5699, - "earliest": 5700, - "colony": 5701, - "studying": 5702, - "balance": 5703, - "duties": 5704, - "##ks": 5705, - "belgium": 5706, - "slipped": 5707, - "carter": 5708, - "05": 5709, - "visible": 5710, - "stages": 5711, - "iraq": 5712, - "fifa": 5713, - "##im": 5714, - "commune": 5715, - "forming": 5716, - "zero": 5717, - "07": 5718, - "continuing": 5719, - "talked": 5720, - "counties": 5721, - "legend": 5722, - "bathroom": 5723, - "option": 5724, - "tail": 5725, - "clay": 5726, - "daughters": 5727, - "afterwards": 5728, - "severe": 5729, - "jaw": 5730, - "visitors": 5731, - "##ded": 5732, - "devices": 5733, - "aviation": 5734, - "russell": 5735, - "kate": 5736, - "##vi": 5737, - "entering": 5738, - "subjects": 5739, - "##ino": 5740, - "temporary": 5741, - "swimming": 5742, - "forth": 5743, - "smooth": 5744, - "ghost": 5745, - "audio": 5746, - "bush": 5747, - "operates": 5748, - "rocks": 5749, - "movements": 5750, - "signs": 5751, - "eddie": 5752, - "##tz": 5753, - "ann": 5754, - "voices": 5755, - "honorary": 5756, - "06": 5757, - "memories": 5758, - "dallas": 5759, - "pure": 5760, - "measures": 5761, - "racial": 5762, - "promised": 5763, - "66": 5764, - "harvard": 5765, - "ceo": 5766, - "16th": 5767, - "parliamentary": 5768, - "indicate": 5769, - "benefit": 5770, - "flesh": 5771, - "dublin": 5772, - "louisiana": 5773, - "1902": 5774, - "1901": 5775, - "patient": 5776, - "sleeping": 5777, - "1903": 5778, - "membership": 5779, - "coastal": 5780, - "medieval": 5781, - "wanting": 5782, - "element": 5783, - "scholars": 5784, - "rice": 5785, - "62": 5786, - "limit": 5787, - "survive": 5788, - "makeup": 5789, - "rating": 5790, - "definitely": 5791, - "collaboration": 5792, - "obvious": 5793, - "##tan": 5794, - "boss": 5795, - "ms": 5796, - "baron": 5797, - "birthday": 5798, - "linked": 5799, - "soil": 5800, - "diocese": 5801, - "##lan": 5802, - "ncaa": 5803, - "##mann": 5804, - "offensive": 5805, - "shell": 5806, - "shouldn": 5807, - "waist": 5808, - "##tus": 5809, - "plain": 5810, - "ross": 5811, - "organ": 5812, - "resolution": 5813, - "manufacturing": 5814, - "adding": 5815, - "relative": 5816, - "kennedy": 5817, - "98": 5818, - "whilst": 5819, - "moth": 5820, - "marketing": 5821, - "gardens": 5822, - "crash": 5823, - "72": 5824, - "heading": 5825, - "partners": 5826, - "credited": 5827, - "carlos": 5828, - "moves": 5829, - "cable": 5830, - "##zi": 5831, - "marshall": 5832, - "##out": 5833, - "depending": 5834, - "bottle": 5835, - "represents": 5836, - "rejected": 5837, - "responded": 5838, - "existed": 5839, - "04": 5840, - "jobs": 5841, - "denmark": 5842, - "lock": 5843, - "##ating": 5844, - "treated": 5845, - "graham": 5846, - "routes": 5847, - "talent": 5848, - "commissioner": 5849, - "drugs": 5850, - "secure": 5851, - "tests": 5852, - "reign": 5853, - "restored": 5854, - "photography": 5855, - "##gi": 5856, - "contributions": 5857, - "oklahoma": 5858, - "designer": 5859, - "disc": 5860, - "grin": 5861, - "seattle": 5862, - "robin": 5863, - "paused": 5864, - "atlanta": 5865, - "unusual": 5866, - "##gate": 5867, - "praised": 5868, - "las": 5869, - "laughing": 5870, - "satellite": 5871, - "hungary": 5872, - "visiting": 5873, - "##sky": 5874, - "interesting": 5875, - "factors": 5876, - "deck": 5877, - "poems": 5878, - "norman": 5879, - "##water": 5880, - "stuck": 5881, - "speaker": 5882, - "rifle": 5883, - "domain": 5884, - "premiered": 5885, - "##her": 5886, - "dc": 5887, - "comics": 5888, - "actors": 5889, - "01": 5890, - "reputation": 5891, - "eliminated": 5892, - "8th": 5893, - "ceiling": 5894, - "prisoners": 5895, - "script": 5896, - "##nce": 5897, - "leather": 5898, - "austin": 5899, - "mississippi": 5900, - "rapidly": 5901, - "admiral": 5902, - "parallel": 5903, - "charlotte": 5904, - "guilty": 5905, - "tools": 5906, - "gender": 5907, - "divisions": 5908, - "fruit": 5909, - "##bs": 5910, - "laboratory": 5911, - "nelson": 5912, - "fantasy": 5913, - "marry": 5914, - "rapid": 5915, - "aunt": 5916, - "tribe": 5917, - "requirements": 5918, - "aspects": 5919, - "suicide": 5920, - "amongst": 5921, - "adams": 5922, - "bone": 5923, - "ukraine": 5924, - "abc": 5925, - "kick": 5926, - "sees": 5927, - "edinburgh": 5928, - "clothing": 5929, - "column": 5930, - "rough": 5931, - "gods": 5932, - "hunting": 5933, - "broadway": 5934, - "gathered": 5935, - "concerns": 5936, - "##ek": 5937, - "spending": 5938, - "ty": 5939, - "12th": 5940, - "snapped": 5941, - "requires": 5942, - "solar": 5943, - "bones": 5944, - "cavalry": 5945, - "##tta": 5946, - "iowa": 5947, - "drinking": 5948, - "waste": 5949, - "index": 5950, - "franklin": 5951, - "charity": 5952, - "thompson": 5953, - "stewart": 5954, - "tip": 5955, - "flash": 5956, - "landscape": 5957, - "friday": 5958, - "enjoy": 5959, - "singh": 5960, - "poem": 5961, - "listening": 5962, - "##back": 5963, - "eighth": 5964, - "fred": 5965, - "differences": 5966, - "adapted": 5967, - "bomb": 5968, - "ukrainian": 5969, - "surgery": 5970, - "corporate": 5971, - "masters": 5972, - "anywhere": 5973, - "##more": 5974, - "waves": 5975, - "odd": 5976, - "sean": 5977, - "portugal": 5978, - "orleans": 5979, - "dick": 5980, - "debate": 5981, - "kent": 5982, - "eating": 5983, - "puerto": 5984, - "cleared": 5985, - "96": 5986, - "expect": 5987, - "cinema": 5988, - "97": 5989, - "guitarist": 5990, - "blocks": 5991, - "electrical": 5992, - "agree": 5993, - "involving": 5994, - "depth": 5995, - "dying": 5996, - "panel": 5997, - "struggle": 5998, - "##ged": 5999, - "peninsula": 6000, - "adults": 6001, - "novels": 6002, - "emerged": 6003, - "vienna": 6004, - "metro": 6005, - "debuted": 6006, - "shoes": 6007, - "tamil": 6008, - "songwriter": 6009, - "meets": 6010, - "prove": 6011, - "beating": 6012, - "instance": 6013, - "heaven": 6014, - "scared": 6015, - "sending": 6016, - "marks": 6017, - "artistic": 6018, - "passage": 6019, - "superior": 6020, - "03": 6021, - "significantly": 6022, - "shopping": 6023, - "##tive": 6024, - "retained": 6025, - "##izing": 6026, - "malaysia": 6027, - "technique": 6028, - "cheeks": 6029, - "##ola": 6030, - "warren": 6031, - "maintenance": 6032, - "destroy": 6033, - "extreme": 6034, - "allied": 6035, - "120": 6036, - "appearing": 6037, - "##yn": 6038, - "fill": 6039, - "advice": 6040, - "alabama": 6041, - "qualifying": 6042, - "policies": 6043, - "cleveland": 6044, - "hat": 6045, - "battery": 6046, - "smart": 6047, - "authors": 6048, - "10th": 6049, - "soundtrack": 6050, - "acted": 6051, - "dated": 6052, - "lb": 6053, - "glance": 6054, - "equipped": 6055, - "coalition": 6056, - "funny": 6057, - "outer": 6058, - "ambassador": 6059, - "roy": 6060, - "possibility": 6061, - "couples": 6062, - "campbell": 6063, - "dna": 6064, - "loose": 6065, - "ethan": 6066, - "supplies": 6067, - "1898": 6068, - "gonna": 6069, - "88": 6070, - "monster": 6071, - "##res": 6072, - "shake": 6073, - "agents": 6074, - "frequency": 6075, - "springs": 6076, - "dogs": 6077, - "practices": 6078, - "61": 6079, - "gang": 6080, - "plastic": 6081, - "easier": 6082, - "suggests": 6083, - "gulf": 6084, - "blade": 6085, - "exposed": 6086, - "colors": 6087, - "industries": 6088, - "markets": 6089, - "pan": 6090, - "nervous": 6091, - "electoral": 6092, - "charts": 6093, - "legislation": 6094, - "ownership": 6095, - "##idae": 6096, - "mac": 6097, - "appointment": 6098, - "shield": 6099, - "copy": 6100, - "assault": 6101, - "socialist": 6102, - "abbey": 6103, - "monument": 6104, - "license": 6105, - "throne": 6106, - "employment": 6107, - "jay": 6108, - "93": 6109, - "replacement": 6110, - "charter": 6111, - "cloud": 6112, - "powered": 6113, - "suffering": 6114, - "accounts": 6115, - "oak": 6116, - "connecticut": 6117, - "strongly": 6118, - "wright": 6119, - "colour": 6120, - "crystal": 6121, - "13th": 6122, - "context": 6123, - "welsh": 6124, - "networks": 6125, - "voiced": 6126, - "gabriel": 6127, - "jerry": 6128, - "##cing": 6129, - "forehead": 6130, - "mp": 6131, - "##ens": 6132, - "manage": 6133, - "schedule": 6134, - "totally": 6135, - "remix": 6136, - "##ii": 6137, - "forests": 6138, - "occupation": 6139, - "print": 6140, - "nicholas": 6141, - "brazilian": 6142, - "strategic": 6143, - "vampires": 6144, - "engineers": 6145, - "76": 6146, - "roots": 6147, - "seek": 6148, - "correct": 6149, - "instrumental": 6150, - "und": 6151, - "alfred": 6152, - "backed": 6153, - "hop": 6154, - "##des": 6155, - "stanley": 6156, - "robinson": 6157, - "traveled": 6158, - "wayne": 6159, - "welcome": 6160, - "austrian": 6161, - "achieve": 6162, - "67": 6163, - "exit": 6164, - "rates": 6165, - "1899": 6166, - "strip": 6167, - "whereas": 6168, - "##cs": 6169, - "sing": 6170, - "deeply": 6171, - "adventure": 6172, - "bobby": 6173, - "rick": 6174, - "jamie": 6175, - "careful": 6176, - "components": 6177, - "cap": 6178, - "useful": 6179, - "personality": 6180, - "knee": 6181, - "##shi": 6182, - "pushing": 6183, - "hosts": 6184, - "02": 6185, - "protest": 6186, - "ca": 6187, - "ottoman": 6188, - "symphony": 6189, - "##sis": 6190, - "63": 6191, - "boundary": 6192, - "1890": 6193, - "processes": 6194, - "considering": 6195, - "considerable": 6196, - "tons": 6197, - "##work": 6198, - "##ft": 6199, - "##nia": 6200, - "cooper": 6201, - "trading": 6202, - "dear": 6203, - "conduct": 6204, - "91": 6205, - "illegal": 6206, - "apple": 6207, - "revolutionary": 6208, - "holiday": 6209, - "definition": 6210, - "harder": 6211, - "##van": 6212, - "jacob": 6213, - "circumstances": 6214, - "destruction": 6215, - "##lle": 6216, - "popularity": 6217, - "grip": 6218, - "classified": 6219, - "liverpool": 6220, - "donald": 6221, - "baltimore": 6222, - "flows": 6223, - "seeking": 6224, - "honour": 6225, - "approval": 6226, - "92": 6227, - "mechanical": 6228, - "till": 6229, - "happening": 6230, - "statue": 6231, - "critic": 6232, - "increasingly": 6233, - "immediate": 6234, - "describe": 6235, - "commerce": 6236, - "stare": 6237, - "##ster": 6238, - "indonesia": 6239, - "meat": 6240, - "rounds": 6241, - "boats": 6242, - "baker": 6243, - "orthodox": 6244, - "depression": 6245, - "formally": 6246, - "worn": 6247, - "naked": 6248, - "claire": 6249, - "muttered": 6250, - "sentence": 6251, - "11th": 6252, - "emily": 6253, - "document": 6254, - "77": 6255, - "criticism": 6256, - "wished": 6257, - "vessel": 6258, - "spiritual": 6259, - "bent": 6260, - "virgin": 6261, - "parker": 6262, - "minimum": 6263, - "murray": 6264, - "lunch": 6265, - "danny": 6266, - "printed": 6267, - "compilation": 6268, - "keyboards": 6269, - "false": 6270, - "blow": 6271, - "belonged": 6272, - "68": 6273, - "raising": 6274, - "78": 6275, - "cutting": 6276, - "##board": 6277, - "pittsburgh": 6278, - "##up": 6279, - "9th": 6280, - "shadows": 6281, - "81": 6282, - "hated": 6283, - "indigenous": 6284, - "jon": 6285, - "15th": 6286, - "barry": 6287, - "scholar": 6288, - "ah": 6289, - "##zer": 6290, - "oliver": 6291, - "##gy": 6292, - "stick": 6293, - "susan": 6294, - "meetings": 6295, - "attracted": 6296, - "spell": 6297, - "romantic": 6298, - "##ver": 6299, - "ye": 6300, - "1895": 6301, - "photo": 6302, - "demanded": 6303, - "customers": 6304, - "##ac": 6305, - "1896": 6306, - "logan": 6307, - "revival": 6308, - "keys": 6309, - "modified": 6310, - "commanded": 6311, - "jeans": 6312, - "##ious": 6313, - "upset": 6314, - "raw": 6315, - "phil": 6316, - "detective": 6317, - "hiding": 6318, - "resident": 6319, - "vincent": 6320, - "##bly": 6321, - "experiences": 6322, - "diamond": 6323, - "defeating": 6324, - "coverage": 6325, - "lucas": 6326, - "external": 6327, - "parks": 6328, - "franchise": 6329, - "helen": 6330, - "bible": 6331, - "successor": 6332, - "percussion": 6333, - "celebrated": 6334, - "il": 6335, - "lift": 6336, - "profile": 6337, - "clan": 6338, - "romania": 6339, - "##ied": 6340, - "mills": 6341, - "##su": 6342, - "nobody": 6343, - "achievement": 6344, - "shrugged": 6345, - "fault": 6346, - "1897": 6347, - "rhythm": 6348, - "initiative": 6349, - "breakfast": 6350, - "carbon": 6351, - "700": 6352, - "69": 6353, - "lasted": 6354, - "violent": 6355, - "74": 6356, - "wound": 6357, - "ken": 6358, - "killer": 6359, - "gradually": 6360, - "filmed": 6361, - "°c": 6362, - "dollars": 6363, - "processing": 6364, - "94": 6365, - "remove": 6366, - "criticized": 6367, - "guests": 6368, - "sang": 6369, - "chemistry": 6370, - "##vin": 6371, - "legislature": 6372, - "disney": 6373, - "##bridge": 6374, - "uniform": 6375, - "escaped": 6376, - "integrated": 6377, - "proposal": 6378, - "purple": 6379, - "denied": 6380, - "liquid": 6381, - "karl": 6382, - "influential": 6383, - "morris": 6384, - "nights": 6385, - "stones": 6386, - "intense": 6387, - "experimental": 6388, - "twisted": 6389, - "71": 6390, - "84": 6391, - "##ld": 6392, - "pace": 6393, - "nazi": 6394, - "mitchell": 6395, - "ny": 6396, - "blind": 6397, - "reporter": 6398, - "newspapers": 6399, - "14th": 6400, - "centers": 6401, - "burn": 6402, - "basin": 6403, - "forgotten": 6404, - "surviving": 6405, - "filed": 6406, - "collections": 6407, - "monastery": 6408, - "losses": 6409, - "manual": 6410, - "couch": 6411, - "description": 6412, - "appropriate": 6413, - "merely": 6414, - "tag": 6415, - "missions": 6416, - "sebastian": 6417, - "restoration": 6418, - "replacing": 6419, - "triple": 6420, - "73": 6421, - "elder": 6422, - "julia": 6423, - "warriors": 6424, - "benjamin": 6425, - "julian": 6426, - "convinced": 6427, - "stronger": 6428, - "amazing": 6429, - "declined": 6430, - "versus": 6431, - "merchant": 6432, - "happens": 6433, - "output": 6434, - "finland": 6435, - "bare": 6436, - "barbara": 6437, - "absence": 6438, - "ignored": 6439, - "dawn": 6440, - "injuries": 6441, - "##port": 6442, - "producers": 6443, - "##ram": 6444, - "82": 6445, - "luis": 6446, - "##ities": 6447, - "kw": 6448, - "admit": 6449, - "expensive": 6450, - "electricity": 6451, - "nba": 6452, - "exception": 6453, - "symbol": 6454, - "##ving": 6455, - "ladies": 6456, - "shower": 6457, - "sheriff": 6458, - "characteristics": 6459, - "##je": 6460, - "aimed": 6461, - "button": 6462, - "ratio": 6463, - "effectively": 6464, - "summit": 6465, - "angle": 6466, - "jury": 6467, - "bears": 6468, - "foster": 6469, - "vessels": 6470, - "pants": 6471, - "executed": 6472, - "evans": 6473, - "dozen": 6474, - "advertising": 6475, - "kicked": 6476, - "patrol": 6477, - "1889": 6478, - "competitions": 6479, - "lifetime": 6480, - "principles": 6481, - "athletics": 6482, - "##logy": 6483, - "birmingham": 6484, - "sponsored": 6485, - "89": 6486, - "rob": 6487, - "nomination": 6488, - "1893": 6489, - "acoustic": 6490, - "##sm": 6491, - "creature": 6492, - "longest": 6493, - "##tra": 6494, - "credits": 6495, - "harbor": 6496, - "dust": 6497, - "josh": 6498, - "##so": 6499, - "territories": 6500, - "milk": 6501, - "infrastructure": 6502, - "completion": 6503, - "thailand": 6504, - "indians": 6505, - "leon": 6506, - "archbishop": 6507, - "##sy": 6508, - "assist": 6509, - "pitch": 6510, - "blake": 6511, - "arrangement": 6512, - "girlfriend": 6513, - "serbian": 6514, - "operational": 6515, - "hence": 6516, - "sad": 6517, - "scent": 6518, - "fur": 6519, - "dj": 6520, - "sessions": 6521, - "hp": 6522, - "refer": 6523, - "rarely": 6524, - "##ora": 6525, - "exists": 6526, - "1892": 6527, - "##ten": 6528, - "scientists": 6529, - "dirty": 6530, - "penalty": 6531, - "burst": 6532, - "portrait": 6533, - "seed": 6534, - "79": 6535, - "pole": 6536, - "limits": 6537, - "rival": 6538, - "1894": 6539, - "stable": 6540, - "alpha": 6541, - "grave": 6542, - "constitutional": 6543, - "alcohol": 6544, - "arrest": 6545, - "flower": 6546, - "mystery": 6547, - "devil": 6548, - "architectural": 6549, - "relationships": 6550, - "greatly": 6551, - "habitat": 6552, - "##istic": 6553, - "larry": 6554, - "progressive": 6555, - "remote": 6556, - "cotton": 6557, - "##ics": 6558, - "##ok": 6559, - "preserved": 6560, - "reaches": 6561, - "##ming": 6562, - "cited": 6563, - "86": 6564, - "vast": 6565, - "scholarship": 6566, - "decisions": 6567, - "cbs": 6568, - "joy": 6569, - "teach": 6570, - "1885": 6571, - "editions": 6572, - "knocked": 6573, - "eve": 6574, - "searching": 6575, - "partly": 6576, - "participation": 6577, - "gap": 6578, - "animated": 6579, - "fate": 6580, - "excellent": 6581, - "##ett": 6582, - "na": 6583, - "87": 6584, - "alternate": 6585, - "saints": 6586, - "youngest": 6587, - "##ily": 6588, - "climbed": 6589, - "##ita": 6590, - "##tors": 6591, - "suggest": 6592, - "##ct": 6593, - "discussion": 6594, - "staying": 6595, - "choir": 6596, - "lakes": 6597, - "jacket": 6598, - "revenue": 6599, - "nevertheless": 6600, - "peaked": 6601, - "instrument": 6602, - "wondering": 6603, - "annually": 6604, - "managing": 6605, - "neil": 6606, - "1891": 6607, - "signing": 6608, - "terry": 6609, - "##ice": 6610, - "apply": 6611, - "clinical": 6612, - "brooklyn": 6613, - "aim": 6614, - "catherine": 6615, - "fuck": 6616, - "farmers": 6617, - "figured": 6618, - "ninth": 6619, - "pride": 6620, - "hugh": 6621, - "evolution": 6622, - "ordinary": 6623, - "involvement": 6624, - "comfortable": 6625, - "shouted": 6626, - "tech": 6627, - "encouraged": 6628, - "taiwan": 6629, - "representation": 6630, - "sharing": 6631, - "##lia": 6632, - "##em": 6633, - "panic": 6634, - "exact": 6635, - "cargo": 6636, - "competing": 6637, - "fat": 6638, - "cried": 6639, - "83": 6640, - "1920s": 6641, - "occasions": 6642, - "pa": 6643, - "cabin": 6644, - "borders": 6645, - "utah": 6646, - "marcus": 6647, - "##isation": 6648, - "badly": 6649, - "muscles": 6650, - "##ance": 6651, - "victorian": 6652, - "transition": 6653, - "warner": 6654, - "bet": 6655, - "permission": 6656, - "##rin": 6657, - "slave": 6658, - "terrible": 6659, - "similarly": 6660, - "shares": 6661, - "seth": 6662, - "uefa": 6663, - "possession": 6664, - "medals": 6665, - "benefits": 6666, - "colleges": 6667, - "lowered": 6668, - "perfectly": 6669, - "mall": 6670, - "transit": 6671, - "##ye": 6672, - "##kar": 6673, - "publisher": 6674, - "##ened": 6675, - "harrison": 6676, - "deaths": 6677, - "elevation": 6678, - "##ae": 6679, - "asleep": 6680, - "machines": 6681, - "sigh": 6682, - "ash": 6683, - "hardly": 6684, - "argument": 6685, - "occasion": 6686, - "parent": 6687, - "leo": 6688, - "decline": 6689, - "1888": 6690, - "contribution": 6691, - "##ua": 6692, - "concentration": 6693, - "1000": 6694, - "opportunities": 6695, - "hispanic": 6696, - "guardian": 6697, - "extent": 6698, - "emotions": 6699, - "hips": 6700, - "mason": 6701, - "volumes": 6702, - "bloody": 6703, - "controversy": 6704, - "diameter": 6705, - "steady": 6706, - "mistake": 6707, - "phoenix": 6708, - "identify": 6709, - "violin": 6710, - "##sk": 6711, - "departure": 6712, - "richmond": 6713, - "spin": 6714, - "funeral": 6715, - "enemies": 6716, - "1864": 6717, - "gear": 6718, - "literally": 6719, - "connor": 6720, - "random": 6721, - "sergeant": 6722, - "grab": 6723, - "confusion": 6724, - "1865": 6725, - "transmission": 6726, - "informed": 6727, - "op": 6728, - "leaning": 6729, - "sacred": 6730, - "suspended": 6731, - "thinks": 6732, - "gates": 6733, - "portland": 6734, - "luck": 6735, - "agencies": 6736, - "yours": 6737, - "hull": 6738, - "expert": 6739, - "muscle": 6740, - "layer": 6741, - "practical": 6742, - "sculpture": 6743, - "jerusalem": 6744, - "latest": 6745, - "lloyd": 6746, - "statistics": 6747, - "deeper": 6748, - "recommended": 6749, - "warrior": 6750, - "arkansas": 6751, - "mess": 6752, - "supports": 6753, - "greg": 6754, - "eagle": 6755, - "1880": 6756, - "recovered": 6757, - "rated": 6758, - "concerts": 6759, - "rushed": 6760, - "##ano": 6761, - "stops": 6762, - "eggs": 6763, - "files": 6764, - "premiere": 6765, - "keith": 6766, - "##vo": 6767, - "delhi": 6768, - "turner": 6769, - "pit": 6770, - "affair": 6771, - "belief": 6772, - "paint": 6773, - "##zing": 6774, - "mate": 6775, - "##ach": 6776, - "##ev": 6777, - "victim": 6778, - "##ology": 6779, - "withdrew": 6780, - "bonus": 6781, - "styles": 6782, - "fled": 6783, - "##ud": 6784, - "glasgow": 6785, - "technologies": 6786, - "funded": 6787, - "nbc": 6788, - "adaptation": 6789, - "##ata": 6790, - "portrayed": 6791, - "cooperation": 6792, - "supporters": 6793, - "judges": 6794, - "bernard": 6795, - "justin": 6796, - "hallway": 6797, - "ralph": 6798, - "##ick": 6799, - "graduating": 6800, - "controversial": 6801, - "distant": 6802, - "continental": 6803, - "spider": 6804, - "bite": 6805, - "##ho": 6806, - "recognize": 6807, - "intention": 6808, - "mixing": 6809, - "##ese": 6810, - "egyptian": 6811, - "bow": 6812, - "tourism": 6813, - "suppose": 6814, - "claiming": 6815, - "tiger": 6816, - "dominated": 6817, - "participants": 6818, - "vi": 6819, - "##ru": 6820, - "nurse": 6821, - "partially": 6822, - "tape": 6823, - "##rum": 6824, - "psychology": 6825, - "##rn": 6826, - "essential": 6827, - "touring": 6828, - "duo": 6829, - "voting": 6830, - "civilian": 6831, - "emotional": 6832, - "channels": 6833, - "##king": 6834, - "apparent": 6835, - "hebrew": 6836, - "1887": 6837, - "tommy": 6838, - "carrier": 6839, - "intersection": 6840, - "beast": 6841, - "hudson": 6842, - "##gar": 6843, - "##zo": 6844, - "lab": 6845, - "nova": 6846, - "bench": 6847, - "discuss": 6848, - "costa": 6849, - "##ered": 6850, - "detailed": 6851, - "behalf": 6852, - "drivers": 6853, - "unfortunately": 6854, - "obtain": 6855, - "##lis": 6856, - "rocky": 6857, - "##dae": 6858, - "siege": 6859, - "friendship": 6860, - "honey": 6861, - "##rian": 6862, - "1861": 6863, - "amy": 6864, - "hang": 6865, - "posted": 6866, - "governments": 6867, - "collins": 6868, - "respond": 6869, - "wildlife": 6870, - "preferred": 6871, - "operator": 6872, - "##po": 6873, - "laura": 6874, - "pregnant": 6875, - "videos": 6876, - "dennis": 6877, - "suspected": 6878, - "boots": 6879, - "instantly": 6880, - "weird": 6881, - "automatic": 6882, - "businessman": 6883, - "alleged": 6884, - "placing": 6885, - "throwing": 6886, - "ph": 6887, - "mood": 6888, - "1862": 6889, - "perry": 6890, - "venue": 6891, - "jet": 6892, - "remainder": 6893, - "##lli": 6894, - "##ci": 6895, - "passion": 6896, - "biological": 6897, - "boyfriend": 6898, - "1863": 6899, - "dirt": 6900, - "buffalo": 6901, - "ron": 6902, - "segment": 6903, - "fa": 6904, - "abuse": 6905, - "##era": 6906, - "genre": 6907, - "thrown": 6908, - "stroke": 6909, - "colored": 6910, - "stress": 6911, - "exercise": 6912, - "displayed": 6913, - "##gen": 6914, - "struggled": 6915, - "##tti": 6916, - "abroad": 6917, - "dramatic": 6918, - "wonderful": 6919, - "thereafter": 6920, - "madrid": 6921, - "component": 6922, - "widespread": 6923, - "##sed": 6924, - "tale": 6925, - "citizen": 6926, - "todd": 6927, - "monday": 6928, - "1886": 6929, - "vancouver": 6930, - "overseas": 6931, - "forcing": 6932, - "crying": 6933, - "descent": 6934, - "##ris": 6935, - "discussed": 6936, - "substantial": 6937, - "ranks": 6938, - "regime": 6939, - "1870": 6940, - "provinces": 6941, - "switch": 6942, - "drum": 6943, - "zane": 6944, - "ted": 6945, - "tribes": 6946, - "proof": 6947, - "lp": 6948, - "cream": 6949, - "researchers": 6950, - "volunteer": 6951, - "manor": 6952, - "silk": 6953, - "milan": 6954, - "donated": 6955, - "allies": 6956, - "venture": 6957, - "principle": 6958, - "delivery": 6959, - "enterprise": 6960, - "##ves": 6961, - "##ans": 6962, - "bars": 6963, - "traditionally": 6964, - "witch": 6965, - "reminded": 6966, - "copper": 6967, - "##uk": 6968, - "pete": 6969, - "inter": 6970, - "links": 6971, - "colin": 6972, - "grinned": 6973, - "elsewhere": 6974, - "competitive": 6975, - "frequent": 6976, - "##oy": 6977, - "scream": 6978, - "##hu": 6979, - "tension": 6980, - "texts": 6981, - "submarine": 6982, - "finnish": 6983, - "defending": 6984, - "defend": 6985, - "pat": 6986, - "detail": 6987, - "1884": 6988, - "affiliated": 6989, - "stuart": 6990, - "themes": 6991, - "villa": 6992, - "periods": 6993, - "tool": 6994, - "belgian": 6995, - "ruling": 6996, - "crimes": 6997, - "answers": 6998, - "folded": 6999, - "licensed": 7000, - "resort": 7001, - "demolished": 7002, - "hans": 7003, - "lucy": 7004, - "1881": 7005, - "lion": 7006, - "traded": 7007, - "photographs": 7008, - "writes": 7009, - "craig": 7010, - "##fa": 7011, - "trials": 7012, - "generated": 7013, - "beth": 7014, - "noble": 7015, - "debt": 7016, - "percentage": 7017, - "yorkshire": 7018, - "erected": 7019, - "ss": 7020, - "viewed": 7021, - "grades": 7022, - "confidence": 7023, - "ceased": 7024, - "islam": 7025, - "telephone": 7026, - "retail": 7027, - "##ible": 7028, - "chile": 7029, - "m²": 7030, - "roberts": 7031, - "sixteen": 7032, - "##ich": 7033, - "commented": 7034, - "hampshire": 7035, - "innocent": 7036, - "dual": 7037, - "pounds": 7038, - "checked": 7039, - "regulations": 7040, - "afghanistan": 7041, - "sung": 7042, - "rico": 7043, - "liberty": 7044, - "assets": 7045, - "bigger": 7046, - "options": 7047, - "angels": 7048, - "relegated": 7049, - "tribute": 7050, - "wells": 7051, - "attending": 7052, - "leaf": 7053, - "##yan": 7054, - "butler": 7055, - "romanian": 7056, - "forum": 7057, - "monthly": 7058, - "lisa": 7059, - "patterns": 7060, - "gmina": 7061, - "##tory": 7062, - "madison": 7063, - "hurricane": 7064, - "rev": 7065, - "##ians": 7066, - "bristol": 7067, - "##ula": 7068, - "elite": 7069, - "valuable": 7070, - "disaster": 7071, - "democracy": 7072, - "awareness": 7073, - "germans": 7074, - "freyja": 7075, - "##ins": 7076, - "loop": 7077, - "absolutely": 7078, - "paying": 7079, - "populations": 7080, - "maine": 7081, - "sole": 7082, - "prayer": 7083, - "spencer": 7084, - "releases": 7085, - "doorway": 7086, - "bull": 7087, - "##ani": 7088, - "lover": 7089, - "midnight": 7090, - "conclusion": 7091, - "##sson": 7092, - "thirteen": 7093, - "lily": 7094, - "mediterranean": 7095, - "##lt": 7096, - "nhl": 7097, - "proud": 7098, - "sample": 7099, - "##hill": 7100, - "drummer": 7101, - "guinea": 7102, - "##ova": 7103, - "murphy": 7104, - "climb": 7105, - "##ston": 7106, - "instant": 7107, - "attributed": 7108, - "horn": 7109, - "ain": 7110, - "railways": 7111, - "steven": 7112, - "##ao": 7113, - "autumn": 7114, - "ferry": 7115, - "opponent": 7116, - "root": 7117, - "traveling": 7118, - "secured": 7119, - "corridor": 7120, - "stretched": 7121, - "tales": 7122, - "sheet": 7123, - "trinity": 7124, - "cattle": 7125, - "helps": 7126, - "indicates": 7127, - "manhattan": 7128, - "murdered": 7129, - "fitted": 7130, - "1882": 7131, - "gentle": 7132, - "grandmother": 7133, - "mines": 7134, - "shocked": 7135, - "vegas": 7136, - "produces": 7137, - "##light": 7138, - "caribbean": 7139, - "##ou": 7140, - "belong": 7141, - "continuous": 7142, - "desperate": 7143, - "drunk": 7144, - "historically": 7145, - "trio": 7146, - "waved": 7147, - "raf": 7148, - "dealing": 7149, - "nathan": 7150, - "bat": 7151, - "murmured": 7152, - "interrupted": 7153, - "residing": 7154, - "scientist": 7155, - "pioneer": 7156, - "harold": 7157, - "aaron": 7158, - "##net": 7159, - "delta": 7160, - "attempting": 7161, - "minority": 7162, - "mini": 7163, - "believes": 7164, - "chorus": 7165, - "tend": 7166, - "lots": 7167, - "eyed": 7168, - "indoor": 7169, - "load": 7170, - "shots": 7171, - "updated": 7172, - "jail": 7173, - "##llo": 7174, - "concerning": 7175, - "connecting": 7176, - "wealth": 7177, - "##ved": 7178, - "slaves": 7179, - "arrive": 7180, - "rangers": 7181, - "sufficient": 7182, - "rebuilt": 7183, - "##wick": 7184, - "cardinal": 7185, - "flood": 7186, - "muhammad": 7187, - "whenever": 7188, - "relation": 7189, - "runners": 7190, - "moral": 7191, - "repair": 7192, - "viewers": 7193, - "arriving": 7194, - "revenge": 7195, - "punk": 7196, - "assisted": 7197, - "bath": 7198, - "fairly": 7199, - "breathe": 7200, - "lists": 7201, - "innings": 7202, - "illustrated": 7203, - "whisper": 7204, - "nearest": 7205, - "voters": 7206, - "clinton": 7207, - "ties": 7208, - "ultimate": 7209, - "screamed": 7210, - "beijing": 7211, - "lions": 7212, - "andre": 7213, - "fictional": 7214, - "gathering": 7215, - "comfort": 7216, - "radar": 7217, - "suitable": 7218, - "dismissed": 7219, - "hms": 7220, - "ban": 7221, - "pine": 7222, - "wrist": 7223, - "atmosphere": 7224, - "voivodeship": 7225, - "bid": 7226, - "timber": 7227, - "##ned": 7228, - "##nan": 7229, - "giants": 7230, - "##ane": 7231, - "cameron": 7232, - "recovery": 7233, - "uss": 7234, - "identical": 7235, - "categories": 7236, - "switched": 7237, - "serbia": 7238, - "laughter": 7239, - "noah": 7240, - "ensemble": 7241, - "therapy": 7242, - "peoples": 7243, - "touching": 7244, - "##off": 7245, - "locally": 7246, - "pearl": 7247, - "platforms": 7248, - "everywhere": 7249, - "ballet": 7250, - "tables": 7251, - "lanka": 7252, - "herbert": 7253, - "outdoor": 7254, - "toured": 7255, - "derek": 7256, - "1883": 7257, - "spaces": 7258, - "contested": 7259, - "swept": 7260, - "1878": 7261, - "exclusive": 7262, - "slight": 7263, - "connections": 7264, - "##dra": 7265, - "winds": 7266, - "prisoner": 7267, - "collective": 7268, - "bangladesh": 7269, - "tube": 7270, - "publicly": 7271, - "wealthy": 7272, - "thai": 7273, - "##ys": 7274, - "isolated": 7275, - "select": 7276, - "##ric": 7277, - "insisted": 7278, - "pen": 7279, - "fortune": 7280, - "ticket": 7281, - "spotted": 7282, - "reportedly": 7283, - "animation": 7284, - "enforcement": 7285, - "tanks": 7286, - "110": 7287, - "decides": 7288, - "wider": 7289, - "lowest": 7290, - "owen": 7291, - "##time": 7292, - "nod": 7293, - "hitting": 7294, - "##hn": 7295, - "gregory": 7296, - "furthermore": 7297, - "magazines": 7298, - "fighters": 7299, - "solutions": 7300, - "##ery": 7301, - "pointing": 7302, - "requested": 7303, - "peru": 7304, - "reed": 7305, - "chancellor": 7306, - "knights": 7307, - "mask": 7308, - "worker": 7309, - "eldest": 7310, - "flames": 7311, - "reduction": 7312, - "1860": 7313, - "volunteers": 7314, - "##tis": 7315, - "reporting": 7316, - "##hl": 7317, - "wire": 7318, - "advisory": 7319, - "endemic": 7320, - "origins": 7321, - "settlers": 7322, - "pursue": 7323, - "knock": 7324, - "consumer": 7325, - "1876": 7326, - "eu": 7327, - "compound": 7328, - "creatures": 7329, - "mansion": 7330, - "sentenced": 7331, - "ivan": 7332, - "deployed": 7333, - "guitars": 7334, - "frowned": 7335, - "involves": 7336, - "mechanism": 7337, - "kilometers": 7338, - "perspective": 7339, - "shops": 7340, - "maps": 7341, - "terminus": 7342, - "duncan": 7343, - "alien": 7344, - "fist": 7345, - "bridges": 7346, - "##pers": 7347, - "heroes": 7348, - "fed": 7349, - "derby": 7350, - "swallowed": 7351, - "##ros": 7352, - "patent": 7353, - "sara": 7354, - "illness": 7355, - "characterized": 7356, - "adventures": 7357, - "slide": 7358, - "hawaii": 7359, - "jurisdiction": 7360, - "##op": 7361, - "organised": 7362, - "##side": 7363, - "adelaide": 7364, - "walks": 7365, - "biology": 7366, - "se": 7367, - "##ties": 7368, - "rogers": 7369, - "swing": 7370, - "tightly": 7371, - "boundaries": 7372, - "##rie": 7373, - "prepare": 7374, - "implementation": 7375, - "stolen": 7376, - "##sha": 7377, - "certified": 7378, - "colombia": 7379, - "edwards": 7380, - "garage": 7381, - "##mm": 7382, - "recalled": 7383, - "##ball": 7384, - "rage": 7385, - "harm": 7386, - "nigeria": 7387, - "breast": 7388, - "##ren": 7389, - "furniture": 7390, - "pupils": 7391, - "settle": 7392, - "##lus": 7393, - "cuba": 7394, - "balls": 7395, - "client": 7396, - "alaska": 7397, - "21st": 7398, - "linear": 7399, - "thrust": 7400, - "celebration": 7401, - "latino": 7402, - "genetic": 7403, - "terror": 7404, - "##cia": 7405, - "##ening": 7406, - "lightning": 7407, - "fee": 7408, - "witness": 7409, - "lodge": 7410, - "establishing": 7411, - "skull": 7412, - "##ique": 7413, - "earning": 7414, - "hood": 7415, - "##ei": 7416, - "rebellion": 7417, - "wang": 7418, - "sporting": 7419, - "warned": 7420, - "missile": 7421, - "devoted": 7422, - "activist": 7423, - "porch": 7424, - "worship": 7425, - "fourteen": 7426, - "package": 7427, - "1871": 7428, - "decorated": 7429, - "##shire": 7430, - "housed": 7431, - "##ock": 7432, - "chess": 7433, - "sailed": 7434, - "doctors": 7435, - "oscar": 7436, - "joan": 7437, - "treat": 7438, - "garcia": 7439, - "harbour": 7440, - "jeremy": 7441, - "##ire": 7442, - "traditions": 7443, - "dominant": 7444, - "jacques": 7445, - "##gon": 7446, - "##wan": 7447, - "relocated": 7448, - "1879": 7449, - "amendment": 7450, - "sized": 7451, - "companion": 7452, - "simultaneously": 7453, - "volleyball": 7454, - "spun": 7455, - "acre": 7456, - "increases": 7457, - "stopping": 7458, - "loves": 7459, - "belongs": 7460, - "affect": 7461, - "drafted": 7462, - "tossed": 7463, - "scout": 7464, - "battles": 7465, - "1875": 7466, - "filming": 7467, - "shoved": 7468, - "munich": 7469, - "tenure": 7470, - "vertical": 7471, - "romance": 7472, - "pc": 7473, - "##cher": 7474, - "argue": 7475, - "##ical": 7476, - "craft": 7477, - "ranging": 7478, - "www": 7479, - "opens": 7480, - "honest": 7481, - "tyler": 7482, - "yesterday": 7483, - "virtual": 7484, - "##let": 7485, - "muslims": 7486, - "reveal": 7487, - "snake": 7488, - "immigrants": 7489, - "radical": 7490, - "screaming": 7491, - "speakers": 7492, - "firing": 7493, - "saving": 7494, - "belonging": 7495, - "ease": 7496, - "lighting": 7497, - "prefecture": 7498, - "blame": 7499, - "farmer": 7500, - "hungry": 7501, - "grows": 7502, - "rubbed": 7503, - "beam": 7504, - "sur": 7505, - "subsidiary": 7506, - "##cha": 7507, - "armenian": 7508, - "sao": 7509, - "dropping": 7510, - "conventional": 7511, - "##fer": 7512, - "microsoft": 7513, - "reply": 7514, - "qualify": 7515, - "spots": 7516, - "1867": 7517, - "sweat": 7518, - "festivals": 7519, - "##ken": 7520, - "immigration": 7521, - "physician": 7522, - "discover": 7523, - "exposure": 7524, - "sandy": 7525, - "explanation": 7526, - "isaac": 7527, - "implemented": 7528, - "##fish": 7529, - "hart": 7530, - "initiated": 7531, - "connect": 7532, - "stakes": 7533, - "presents": 7534, - "heights": 7535, - "householder": 7536, - "pleased": 7537, - "tourist": 7538, - "regardless": 7539, - "slip": 7540, - "closest": 7541, - "##ction": 7542, - "surely": 7543, - "sultan": 7544, - "brings": 7545, - "riley": 7546, - "preparation": 7547, - "aboard": 7548, - "slammed": 7549, - "baptist": 7550, - "experiment": 7551, - "ongoing": 7552, - "interstate": 7553, - "organic": 7554, - "playoffs": 7555, - "##ika": 7556, - "1877": 7557, - "130": 7558, - "##tar": 7559, - "hindu": 7560, - "error": 7561, - "tours": 7562, - "tier": 7563, - "plenty": 7564, - "arrangements": 7565, - "talks": 7566, - "trapped": 7567, - "excited": 7568, - "sank": 7569, - "ho": 7570, - "athens": 7571, - "1872": 7572, - "denver": 7573, - "welfare": 7574, - "suburb": 7575, - "athletes": 7576, - "trick": 7577, - "diverse": 7578, - "belly": 7579, - "exclusively": 7580, - "yelled": 7581, - "1868": 7582, - "##med": 7583, - "conversion": 7584, - "##ette": 7585, - "1874": 7586, - "internationally": 7587, - "computers": 7588, - "conductor": 7589, - "abilities": 7590, - "sensitive": 7591, - "hello": 7592, - "dispute": 7593, - "measured": 7594, - "globe": 7595, - "rocket": 7596, - "prices": 7597, - "amsterdam": 7598, - "flights": 7599, - "tigers": 7600, - "inn": 7601, - "municipalities": 7602, - "emotion": 7603, - "references": 7604, - "3d": 7605, - "##mus": 7606, - "explains": 7607, - "airlines": 7608, - "manufactured": 7609, - "pm": 7610, - "archaeological": 7611, - "1873": 7612, - "interpretation": 7613, - "devon": 7614, - "comment": 7615, - "##ites": 7616, - "settlements": 7617, - "kissing": 7618, - "absolute": 7619, - "improvement": 7620, - "suite": 7621, - "impressed": 7622, - "barcelona": 7623, - "sullivan": 7624, - "jefferson": 7625, - "towers": 7626, - "jesse": 7627, - "julie": 7628, - "##tin": 7629, - "##lu": 7630, - "grandson": 7631, - "hi": 7632, - "gauge": 7633, - "regard": 7634, - "rings": 7635, - "interviews": 7636, - "trace": 7637, - "raymond": 7638, - "thumb": 7639, - "departments": 7640, - "burns": 7641, - "serial": 7642, - "bulgarian": 7643, - "scores": 7644, - "demonstrated": 7645, - "##ix": 7646, - "1866": 7647, - "kyle": 7648, - "alberta": 7649, - "underneath": 7650, - "romanized": 7651, - "##ward": 7652, - "relieved": 7653, - "acquisition": 7654, - "phrase": 7655, - "cliff": 7656, - "reveals": 7657, - "han": 7658, - "cuts": 7659, - "merger": 7660, - "custom": 7661, - "##dar": 7662, - "nee": 7663, - "gilbert": 7664, - "graduation": 7665, - "##nts": 7666, - "assessment": 7667, - "cafe": 7668, - "difficulty": 7669, - "demands": 7670, - "swung": 7671, - "democrat": 7672, - "jennifer": 7673, - "commons": 7674, - "1940s": 7675, - "grove": 7676, - "##yo": 7677, - "completing": 7678, - "focuses": 7679, - "sum": 7680, - "substitute": 7681, - "bearing": 7682, - "stretch": 7683, - "reception": 7684, - "##py": 7685, - "reflected": 7686, - "essentially": 7687, - "destination": 7688, - "pairs": 7689, - "##ched": 7690, - "survival": 7691, - "resource": 7692, - "##bach": 7693, - "promoting": 7694, - "doubles": 7695, - "messages": 7696, - "tear": 7697, - "##down": 7698, - "##fully": 7699, - "parade": 7700, - "florence": 7701, - "harvey": 7702, - "incumbent": 7703, - "partial": 7704, - "framework": 7705, - "900": 7706, - "pedro": 7707, - "frozen": 7708, - "procedure": 7709, - "olivia": 7710, - "controls": 7711, - "##mic": 7712, - "shelter": 7713, - "personally": 7714, - "temperatures": 7715, - "##od": 7716, - "brisbane": 7717, - "tested": 7718, - "sits": 7719, - "marble": 7720, - "comprehensive": 7721, - "oxygen": 7722, - "leonard": 7723, - "##kov": 7724, - "inaugural": 7725, - "iranian": 7726, - "referring": 7727, - "quarters": 7728, - "attitude": 7729, - "##ivity": 7730, - "mainstream": 7731, - "lined": 7732, - "mars": 7733, - "dakota": 7734, - "norfolk": 7735, - "unsuccessful": 7736, - "##°": 7737, - "explosion": 7738, - "helicopter": 7739, - "congressional": 7740, - "##sing": 7741, - "inspector": 7742, - "bitch": 7743, - "seal": 7744, - "departed": 7745, - "divine": 7746, - "##ters": 7747, - "coaching": 7748, - "examination": 7749, - "punishment": 7750, - "manufacturer": 7751, - "sink": 7752, - "columns": 7753, - "unincorporated": 7754, - "signals": 7755, - "nevada": 7756, - "squeezed": 7757, - "dylan": 7758, - "dining": 7759, - "photos": 7760, - "martial": 7761, - "manuel": 7762, - "eighteen": 7763, - "elevator": 7764, - "brushed": 7765, - "plates": 7766, - "ministers": 7767, - "ivy": 7768, - "congregation": 7769, - "##len": 7770, - "slept": 7771, - "specialized": 7772, - "taxes": 7773, - "curve": 7774, - "restricted": 7775, - "negotiations": 7776, - "likes": 7777, - "statistical": 7778, - "arnold": 7779, - "inspiration": 7780, - "execution": 7781, - "bold": 7782, - "intermediate": 7783, - "significance": 7784, - "margin": 7785, - "ruler": 7786, - "wheels": 7787, - "gothic": 7788, - "intellectual": 7789, - "dependent": 7790, - "listened": 7791, - "eligible": 7792, - "buses": 7793, - "widow": 7794, - "syria": 7795, - "earn": 7796, - "cincinnati": 7797, - "collapsed": 7798, - "recipient": 7799, - "secrets": 7800, - "accessible": 7801, - "philippine": 7802, - "maritime": 7803, - "goddess": 7804, - "clerk": 7805, - "surrender": 7806, - "breaks": 7807, - "playoff": 7808, - "database": 7809, - "##ified": 7810, - "##lon": 7811, - "ideal": 7812, - "beetle": 7813, - "aspect": 7814, - "soap": 7815, - "regulation": 7816, - "strings": 7817, - "expand": 7818, - "anglo": 7819, - "shorter": 7820, - "crosses": 7821, - "retreat": 7822, - "tough": 7823, - "coins": 7824, - "wallace": 7825, - "directions": 7826, - "pressing": 7827, - "##oon": 7828, - "shipping": 7829, - "locomotives": 7830, - "comparison": 7831, - "topics": 7832, - "nephew": 7833, - "##mes": 7834, - "distinction": 7835, - "honors": 7836, - "travelled": 7837, - "sierra": 7838, - "ibn": 7839, - "##over": 7840, - "fortress": 7841, - "sa": 7842, - "recognised": 7843, - "carved": 7844, - "1869": 7845, - "clients": 7846, - "##dan": 7847, - "intent": 7848, - "##mar": 7849, - "coaches": 7850, - "describing": 7851, - "bread": 7852, - "##ington": 7853, - "beaten": 7854, - "northwestern": 7855, - "##ona": 7856, - "merit": 7857, - "youtube": 7858, - "collapse": 7859, - "challenges": 7860, - "em": 7861, - "historians": 7862, - "objective": 7863, - "submitted": 7864, - "virus": 7865, - "attacking": 7866, - "drake": 7867, - "assume": 7868, - "##ere": 7869, - "diseases": 7870, - "marc": 7871, - "stem": 7872, - "leeds": 7873, - "##cus": 7874, - "##ab": 7875, - "farming": 7876, - "glasses": 7877, - "##lock": 7878, - "visits": 7879, - "nowhere": 7880, - "fellowship": 7881, - "relevant": 7882, - "carries": 7883, - "restaurants": 7884, - "experiments": 7885, - "101": 7886, - "constantly": 7887, - "bases": 7888, - "targets": 7889, - "shah": 7890, - "tenth": 7891, - "opponents": 7892, - "verse": 7893, - "territorial": 7894, - "##ira": 7895, - "writings": 7896, - "corruption": 7897, - "##hs": 7898, - "instruction": 7899, - "inherited": 7900, - "reverse": 7901, - "emphasis": 7902, - "##vic": 7903, - "employee": 7904, - "arch": 7905, - "keeps": 7906, - "rabbi": 7907, - "watson": 7908, - "payment": 7909, - "uh": 7910, - "##ala": 7911, - "nancy": 7912, - "##tre": 7913, - "venice": 7914, - "fastest": 7915, - "sexy": 7916, - "banned": 7917, - "adrian": 7918, - "properly": 7919, - "ruth": 7920, - "touchdown": 7921, - "dollar": 7922, - "boards": 7923, - "metre": 7924, - "circles": 7925, - "edges": 7926, - "favour": 7927, - "comments": 7928, - "ok": 7929, - "travels": 7930, - "liberation": 7931, - "scattered": 7932, - "firmly": 7933, - "##ular": 7934, - "holland": 7935, - "permitted": 7936, - "diesel": 7937, - "kenya": 7938, - "den": 7939, - "originated": 7940, - "##ral": 7941, - "demons": 7942, - "resumed": 7943, - "dragged": 7944, - "rider": 7945, - "##rus": 7946, - "servant": 7947, - "blinked": 7948, - "extend": 7949, - "torn": 7950, - "##ias": 7951, - "##sey": 7952, - "input": 7953, - "meal": 7954, - "everybody": 7955, - "cylinder": 7956, - "kinds": 7957, - "camps": 7958, - "##fe": 7959, - "bullet": 7960, - "logic": 7961, - "##wn": 7962, - "croatian": 7963, - "evolved": 7964, - "healthy": 7965, - "fool": 7966, - "chocolate": 7967, - "wise": 7968, - "preserve": 7969, - "pradesh": 7970, - "##ess": 7971, - "respective": 7972, - "1850": 7973, - "##ew": 7974, - "chicken": 7975, - "artificial": 7976, - "gross": 7977, - "corresponding": 7978, - "convicted": 7979, - "cage": 7980, - "caroline": 7981, - "dialogue": 7982, - "##dor": 7983, - "narrative": 7984, - "stranger": 7985, - "mario": 7986, - "br": 7987, - "christianity": 7988, - "failing": 7989, - "trent": 7990, - "commanding": 7991, - "buddhist": 7992, - "1848": 7993, - "maurice": 7994, - "focusing": 7995, - "yale": 7996, - "bike": 7997, - "altitude": 7998, - "##ering": 7999, - "mouse": 8000, - "revised": 8001, - "##sley": 8002, - "veteran": 8003, - "##ig": 8004, - "pulls": 8005, - "theology": 8006, - "crashed": 8007, - "campaigns": 8008, - "legion": 8009, - "##ability": 8010, - "drag": 8011, - "excellence": 8012, - "customer": 8013, - "cancelled": 8014, - "intensity": 8015, - "excuse": 8016, - "##lar": 8017, - "liga": 8018, - "participating": 8019, - "contributing": 8020, - "printing": 8021, - "##burn": 8022, - "variable": 8023, - "##rk": 8024, - "curious": 8025, - "bin": 8026, - "legacy": 8027, - "renaissance": 8028, - "##my": 8029, - "symptoms": 8030, - "binding": 8031, - "vocalist": 8032, - "dancer": 8033, - "##nie": 8034, - "grammar": 8035, - "gospel": 8036, - "democrats": 8037, - "ya": 8038, - "enters": 8039, - "sc": 8040, - "diplomatic": 8041, - "hitler": 8042, - "##ser": 8043, - "clouds": 8044, - "mathematical": 8045, - "quit": 8046, - "defended": 8047, - "oriented": 8048, - "##heim": 8049, - "fundamental": 8050, - "hardware": 8051, - "impressive": 8052, - "equally": 8053, - "convince": 8054, - "confederate": 8055, - "guilt": 8056, - "chuck": 8057, - "sliding": 8058, - "##ware": 8059, - "magnetic": 8060, - "narrowed": 8061, - "petersburg": 8062, - "bulgaria": 8063, - "otto": 8064, - "phd": 8065, - "skill": 8066, - "##ama": 8067, - "reader": 8068, - "hopes": 8069, - "pitcher": 8070, - "reservoir": 8071, - "hearts": 8072, - "automatically": 8073, - "expecting": 8074, - "mysterious": 8075, - "bennett": 8076, - "extensively": 8077, - "imagined": 8078, - "seeds": 8079, - "monitor": 8080, - "fix": 8081, - "##ative": 8082, - "journalism": 8083, - "struggling": 8084, - "signature": 8085, - "ranch": 8086, - "encounter": 8087, - "photographer": 8088, - "observation": 8089, - "protests": 8090, - "##pin": 8091, - "influences": 8092, - "##hr": 8093, - "calendar": 8094, - "##all": 8095, - "cruz": 8096, - "croatia": 8097, - "locomotive": 8098, - "hughes": 8099, - "naturally": 8100, - "shakespeare": 8101, - "basement": 8102, - "hook": 8103, - "uncredited": 8104, - "faded": 8105, - "theories": 8106, - "approaches": 8107, - "dare": 8108, - "phillips": 8109, - "filling": 8110, - "fury": 8111, - "obama": 8112, - "##ain": 8113, - "efficient": 8114, - "arc": 8115, - "deliver": 8116, - "min": 8117, - "raid": 8118, - "breeding": 8119, - "inducted": 8120, - "leagues": 8121, - "efficiency": 8122, - "axis": 8123, - "montana": 8124, - "eagles": 8125, - "##ked": 8126, - "supplied": 8127, - "instructions": 8128, - "karen": 8129, - "picking": 8130, - "indicating": 8131, - "trap": 8132, - "anchor": 8133, - "practically": 8134, - "christians": 8135, - "tomb": 8136, - "vary": 8137, - "occasional": 8138, - "electronics": 8139, - "lords": 8140, - "readers": 8141, - "newcastle": 8142, - "faint": 8143, - "innovation": 8144, - "collect": 8145, - "situations": 8146, - "engagement": 8147, - "160": 8148, - "claude": 8149, - "mixture": 8150, - "##feld": 8151, - "peer": 8152, - "tissue": 8153, - "logo": 8154, - "lean": 8155, - "##ration": 8156, - "°f": 8157, - "floors": 8158, - "##ven": 8159, - "architects": 8160, - "reducing": 8161, - "##our": 8162, - "##ments": 8163, - "rope": 8164, - "1859": 8165, - "ottawa": 8166, - "##har": 8167, - "samples": 8168, - "banking": 8169, - "declaration": 8170, - "proteins": 8171, - "resignation": 8172, - "francois": 8173, - "saudi": 8174, - "advocate": 8175, - "exhibited": 8176, - "armor": 8177, - "twins": 8178, - "divorce": 8179, - "##ras": 8180, - "abraham": 8181, - "reviewed": 8182, - "jo": 8183, - "temporarily": 8184, - "matrix": 8185, - "physically": 8186, - "pulse": 8187, - "curled": 8188, - "##ena": 8189, - "difficulties": 8190, - "bengal": 8191, - "usage": 8192, - "##ban": 8193, - "annie": 8194, - "riders": 8195, - "certificate": 8196, - "##pi": 8197, - "holes": 8198, - "warsaw": 8199, - "distinctive": 8200, - "jessica": 8201, - "##mon": 8202, - "mutual": 8203, - "1857": 8204, - "customs": 8205, - "circular": 8206, - "eugene": 8207, - "removal": 8208, - "loaded": 8209, - "mere": 8210, - "vulnerable": 8211, - "depicted": 8212, - "generations": 8213, - "dame": 8214, - "heir": 8215, - "enormous": 8216, - "lightly": 8217, - "climbing": 8218, - "pitched": 8219, - "lessons": 8220, - "pilots": 8221, - "nepal": 8222, - "ram": 8223, - "google": 8224, - "preparing": 8225, - "brad": 8226, - "louise": 8227, - "renowned": 8228, - "##₂": 8229, - "liam": 8230, - "##ably": 8231, - "plaza": 8232, - "shaw": 8233, - "sophie": 8234, - "brilliant": 8235, - "bills": 8236, - "##bar": 8237, - "##nik": 8238, - "fucking": 8239, - "mainland": 8240, - "server": 8241, - "pleasant": 8242, - "seized": 8243, - "veterans": 8244, - "jerked": 8245, - "fail": 8246, - "beta": 8247, - "brush": 8248, - "radiation": 8249, - "stored": 8250, - "warmth": 8251, - "southeastern": 8252, - "nate": 8253, - "sin": 8254, - "raced": 8255, - "berkeley": 8256, - "joke": 8257, - "athlete": 8258, - "designation": 8259, - "trunk": 8260, - "##low": 8261, - "roland": 8262, - "qualification": 8263, - "archives": 8264, - "heels": 8265, - "artwork": 8266, - "receives": 8267, - "judicial": 8268, - "reserves": 8269, - "##bed": 8270, - "woke": 8271, - "installation": 8272, - "abu": 8273, - "floating": 8274, - "fake": 8275, - "lesser": 8276, - "excitement": 8277, - "interface": 8278, - "concentrated": 8279, - "addressed": 8280, - "characteristic": 8281, - "amanda": 8282, - "saxophone": 8283, - "monk": 8284, - "auto": 8285, - "##bus": 8286, - "releasing": 8287, - "egg": 8288, - "dies": 8289, - "interaction": 8290, - "defender": 8291, - "ce": 8292, - "outbreak": 8293, - "glory": 8294, - "loving": 8295, - "##bert": 8296, - "sequel": 8297, - "consciousness": 8298, - "http": 8299, - "awake": 8300, - "ski": 8301, - "enrolled": 8302, - "##ress": 8303, - "handling": 8304, - "rookie": 8305, - "brow": 8306, - "somebody": 8307, - "biography": 8308, - "warfare": 8309, - "amounts": 8310, - "contracts": 8311, - "presentation": 8312, - "fabric": 8313, - "dissolved": 8314, - "challenged": 8315, - "meter": 8316, - "psychological": 8317, - "lt": 8318, - "elevated": 8319, - "rally": 8320, - "accurate": 8321, - "##tha": 8322, - "hospitals": 8323, - "undergraduate": 8324, - "specialist": 8325, - "venezuela": 8326, - "exhibit": 8327, - "shed": 8328, - "nursing": 8329, - "protestant": 8330, - "fluid": 8331, - "structural": 8332, - "footage": 8333, - "jared": 8334, - "consistent": 8335, - "prey": 8336, - "##ska": 8337, - "succession": 8338, - "reflect": 8339, - "exile": 8340, - "lebanon": 8341, - "wiped": 8342, - "suspect": 8343, - "shanghai": 8344, - "resting": 8345, - "integration": 8346, - "preservation": 8347, - "marvel": 8348, - "variant": 8349, - "pirates": 8350, - "sheep": 8351, - "rounded": 8352, - "capita": 8353, - "sailing": 8354, - "colonies": 8355, - "manuscript": 8356, - "deemed": 8357, - "variations": 8358, - "clarke": 8359, - "functional": 8360, - "emerging": 8361, - "boxing": 8362, - "relaxed": 8363, - "curse": 8364, - "azerbaijan": 8365, - "heavyweight": 8366, - "nickname": 8367, - "editorial": 8368, - "rang": 8369, - "grid": 8370, - "tightened": 8371, - "earthquake": 8372, - "flashed": 8373, - "miguel": 8374, - "rushing": 8375, - "##ches": 8376, - "improvements": 8377, - "boxes": 8378, - "brooks": 8379, - "180": 8380, - "consumption": 8381, - "molecular": 8382, - "felix": 8383, - "societies": 8384, - "repeatedly": 8385, - "variation": 8386, - "aids": 8387, - "civic": 8388, - "graphics": 8389, - "professionals": 8390, - "realm": 8391, - "autonomous": 8392, - "receiver": 8393, - "delayed": 8394, - "workshop": 8395, - "militia": 8396, - "chairs": 8397, - "trump": 8398, - "canyon": 8399, - "##point": 8400, - "harsh": 8401, - "extending": 8402, - "lovely": 8403, - "happiness": 8404, - "##jan": 8405, - "stake": 8406, - "eyebrows": 8407, - "embassy": 8408, - "wellington": 8409, - "hannah": 8410, - "##ella": 8411, - "sony": 8412, - "corners": 8413, - "bishops": 8414, - "swear": 8415, - "cloth": 8416, - "contents": 8417, - "xi": 8418, - "namely": 8419, - "commenced": 8420, - "1854": 8421, - "stanford": 8422, - "nashville": 8423, - "courage": 8424, - "graphic": 8425, - "commitment": 8426, - "garrison": 8427, - "##bin": 8428, - "hamlet": 8429, - "clearing": 8430, - "rebels": 8431, - "attraction": 8432, - "literacy": 8433, - "cooking": 8434, - "ruins": 8435, - "temples": 8436, - "jenny": 8437, - "humanity": 8438, - "celebrate": 8439, - "hasn": 8440, - "freight": 8441, - "sixty": 8442, - "rebel": 8443, - "bastard": 8444, - "##art": 8445, - "newton": 8446, - "##ada": 8447, - "deer": 8448, - "##ges": 8449, - "##ching": 8450, - "smiles": 8451, - "delaware": 8452, - "singers": 8453, - "##ets": 8454, - "approaching": 8455, - "assists": 8456, - "flame": 8457, - "##ph": 8458, - "boulevard": 8459, - "barrel": 8460, - "planted": 8461, - "##ome": 8462, - "pursuit": 8463, - "##sia": 8464, - "consequences": 8465, - "posts": 8466, - "shallow": 8467, - "invitation": 8468, - "rode": 8469, - "depot": 8470, - "ernest": 8471, - "kane": 8472, - "rod": 8473, - "concepts": 8474, - "preston": 8475, - "topic": 8476, - "chambers": 8477, - "striking": 8478, - "blast": 8479, - "arrives": 8480, - "descendants": 8481, - "montgomery": 8482, - "ranges": 8483, - "worlds": 8484, - "##lay": 8485, - "##ari": 8486, - "span": 8487, - "chaos": 8488, - "praise": 8489, - "##ag": 8490, - "fewer": 8491, - "1855": 8492, - "sanctuary": 8493, - "mud": 8494, - "fbi": 8495, - "##ions": 8496, - "programmes": 8497, - "maintaining": 8498, - "unity": 8499, - "harper": 8500, - "bore": 8501, - "handsome": 8502, - "closure": 8503, - "tournaments": 8504, - "thunder": 8505, - "nebraska": 8506, - "linda": 8507, - "facade": 8508, - "puts": 8509, - "satisfied": 8510, - "argentine": 8511, - "dale": 8512, - "cork": 8513, - "dome": 8514, - "panama": 8515, - "##yl": 8516, - "1858": 8517, - "tasks": 8518, - "experts": 8519, - "##ates": 8520, - "feeding": 8521, - "equation": 8522, - "##las": 8523, - "##ida": 8524, - "##tu": 8525, - "engage": 8526, - "bryan": 8527, - "##ax": 8528, - "um": 8529, - "quartet": 8530, - "melody": 8531, - "disbanded": 8532, - "sheffield": 8533, - "blocked": 8534, - "gasped": 8535, - "delay": 8536, - "kisses": 8537, - "maggie": 8538, - "connects": 8539, - "##non": 8540, - "sts": 8541, - "poured": 8542, - "creator": 8543, - "publishers": 8544, - "##we": 8545, - "guided": 8546, - "ellis": 8547, - "extinct": 8548, - "hug": 8549, - "gaining": 8550, - "##ord": 8551, - "complicated": 8552, - "##bility": 8553, - "poll": 8554, - "clenched": 8555, - "investigate": 8556, - "##use": 8557, - "thereby": 8558, - "quantum": 8559, - "spine": 8560, - "cdp": 8561, - "humor": 8562, - "kills": 8563, - "administered": 8564, - "semifinals": 8565, - "##du": 8566, - "encountered": 8567, - "ignore": 8568, - "##bu": 8569, - "commentary": 8570, - "##maker": 8571, - "bother": 8572, - "roosevelt": 8573, - "140": 8574, - "plains": 8575, - "halfway": 8576, - "flowing": 8577, - "cultures": 8578, - "crack": 8579, - "imprisoned": 8580, - "neighboring": 8581, - "airline": 8582, - "##ses": 8583, - "##view": 8584, - "##mate": 8585, - "##ec": 8586, - "gather": 8587, - "wolves": 8588, - "marathon": 8589, - "transformed": 8590, - "##ill": 8591, - "cruise": 8592, - "organisations": 8593, - "carol": 8594, - "punch": 8595, - "exhibitions": 8596, - "numbered": 8597, - "alarm": 8598, - "ratings": 8599, - "daddy": 8600, - "silently": 8601, - "##stein": 8602, - "queens": 8603, - "colours": 8604, - "impression": 8605, - "guidance": 8606, - "liu": 8607, - "tactical": 8608, - "##rat": 8609, - "marshal": 8610, - "della": 8611, - "arrow": 8612, - "##ings": 8613, - "rested": 8614, - "feared": 8615, - "tender": 8616, - "owns": 8617, - "bitter": 8618, - "advisor": 8619, - "escort": 8620, - "##ides": 8621, - "spare": 8622, - "farms": 8623, - "grants": 8624, - "##ene": 8625, - "dragons": 8626, - "encourage": 8627, - "colleagues": 8628, - "cameras": 8629, - "##und": 8630, - "sucked": 8631, - "pile": 8632, - "spirits": 8633, - "prague": 8634, - "statements": 8635, - "suspension": 8636, - "landmark": 8637, - "fence": 8638, - "torture": 8639, - "recreation": 8640, - "bags": 8641, - "permanently": 8642, - "survivors": 8643, - "pond": 8644, - "spy": 8645, - "predecessor": 8646, - "bombing": 8647, - "coup": 8648, - "##og": 8649, - "protecting": 8650, - "transformation": 8651, - "glow": 8652, - "##lands": 8653, - "##book": 8654, - "dug": 8655, - "priests": 8656, - "andrea": 8657, - "feat": 8658, - "barn": 8659, - "jumping": 8660, - "##chen": 8661, - "##ologist": 8662, - "##con": 8663, - "casualties": 8664, - "stern": 8665, - "auckland": 8666, - "pipe": 8667, - "serie": 8668, - "revealing": 8669, - "ba": 8670, - "##bel": 8671, - "trevor": 8672, - "mercy": 8673, - "spectrum": 8674, - "yang": 8675, - "consist": 8676, - "governing": 8677, - "collaborated": 8678, - "possessed": 8679, - "epic": 8680, - "comprises": 8681, - "blew": 8682, - "shane": 8683, - "##ack": 8684, - "lopez": 8685, - "honored": 8686, - "magical": 8687, - "sacrifice": 8688, - "judgment": 8689, - "perceived": 8690, - "hammer": 8691, - "mtv": 8692, - "baronet": 8693, - "tune": 8694, - "das": 8695, - "missionary": 8696, - "sheets": 8697, - "350": 8698, - "neutral": 8699, - "oral": 8700, - "threatening": 8701, - "attractive": 8702, - "shade": 8703, - "aims": 8704, - "seminary": 8705, - "##master": 8706, - "estates": 8707, - "1856": 8708, - "michel": 8709, - "wounds": 8710, - "refugees": 8711, - "manufacturers": 8712, - "##nic": 8713, - "mercury": 8714, - "syndrome": 8715, - "porter": 8716, - "##iya": 8717, - "##din": 8718, - "hamburg": 8719, - "identification": 8720, - "upstairs": 8721, - "purse": 8722, - "widened": 8723, - "pause": 8724, - "cared": 8725, - "breathed": 8726, - "affiliate": 8727, - "santiago": 8728, - "prevented": 8729, - "celtic": 8730, - "fisher": 8731, - "125": 8732, - "recruited": 8733, - "byzantine": 8734, - "reconstruction": 8735, - "farther": 8736, - "##mp": 8737, - "diet": 8738, - "sake": 8739, - "au": 8740, - "spite": 8741, - "sensation": 8742, - "##ert": 8743, - "blank": 8744, - "separation": 8745, - "105": 8746, - "##hon": 8747, - "vladimir": 8748, - "armies": 8749, - "anime": 8750, - "##lie": 8751, - "accommodate": 8752, - "orbit": 8753, - "cult": 8754, - "sofia": 8755, - "archive": 8756, - "##ify": 8757, - "##box": 8758, - "founders": 8759, - "sustained": 8760, - "disorder": 8761, - "honours": 8762, - "northeastern": 8763, - "mia": 8764, - "crops": 8765, - "violet": 8766, - "threats": 8767, - "blanket": 8768, - "fires": 8769, - "canton": 8770, - "followers": 8771, - "southwestern": 8772, - "prototype": 8773, - "voyage": 8774, - "assignment": 8775, - "altered": 8776, - "moderate": 8777, - "protocol": 8778, - "pistol": 8779, - "##eo": 8780, - "questioned": 8781, - "brass": 8782, - "lifting": 8783, - "1852": 8784, - "math": 8785, - "authored": 8786, - "##ual": 8787, - "doug": 8788, - "dimensional": 8789, - "dynamic": 8790, - "##san": 8791, - "1851": 8792, - "pronounced": 8793, - "grateful": 8794, - "quest": 8795, - "uncomfortable": 8796, - "boom": 8797, - "presidency": 8798, - "stevens": 8799, - "relating": 8800, - "politicians": 8801, - "chen": 8802, - "barrier": 8803, - "quinn": 8804, - "diana": 8805, - "mosque": 8806, - "tribal": 8807, - "cheese": 8808, - "palmer": 8809, - "portions": 8810, - "sometime": 8811, - "chester": 8812, - "treasure": 8813, - "wu": 8814, - "bend": 8815, - "download": 8816, - "millions": 8817, - "reforms": 8818, - "registration": 8819, - "##osa": 8820, - "consequently": 8821, - "monitoring": 8822, - "ate": 8823, - "preliminary": 8824, - "brandon": 8825, - "invented": 8826, - "ps": 8827, - "eaten": 8828, - "exterior": 8829, - "intervention": 8830, - "ports": 8831, - "documented": 8832, - "log": 8833, - "displays": 8834, - "lecture": 8835, - "sally": 8836, - "favourite": 8837, - "##itz": 8838, - "vermont": 8839, - "lo": 8840, - "invisible": 8841, - "isle": 8842, - "breed": 8843, - "##ator": 8844, - "journalists": 8845, - "relay": 8846, - "speaks": 8847, - "backward": 8848, - "explore": 8849, - "midfielder": 8850, - "actively": 8851, - "stefan": 8852, - "procedures": 8853, - "cannon": 8854, - "blond": 8855, - "kenneth": 8856, - "centered": 8857, - "servants": 8858, - "chains": 8859, - "libraries": 8860, - "malcolm": 8861, - "essex": 8862, - "henri": 8863, - "slavery": 8864, - "##hal": 8865, - "facts": 8866, - "fairy": 8867, - "coached": 8868, - "cassie": 8869, - "cats": 8870, - "washed": 8871, - "cop": 8872, - "##fi": 8873, - "announcement": 8874, - "item": 8875, - "2000s": 8876, - "vinyl": 8877, - "activated": 8878, - "marco": 8879, - "frontier": 8880, - "growled": 8881, - "curriculum": 8882, - "##das": 8883, - "loyal": 8884, - "accomplished": 8885, - "leslie": 8886, - "ritual": 8887, - "kenny": 8888, - "##00": 8889, - "vii": 8890, - "napoleon": 8891, - "hollow": 8892, - "hybrid": 8893, - "jungle": 8894, - "stationed": 8895, - "friedrich": 8896, - "counted": 8897, - "##ulated": 8898, - "platinum": 8899, - "theatrical": 8900, - "seated": 8901, - "col": 8902, - "rubber": 8903, - "glen": 8904, - "1840": 8905, - "diversity": 8906, - "healing": 8907, - "extends": 8908, - "id": 8909, - "provisions": 8910, - "administrator": 8911, - "columbus": 8912, - "##oe": 8913, - "tributary": 8914, - "te": 8915, - "assured": 8916, - "org": 8917, - "##uous": 8918, - "prestigious": 8919, - "examined": 8920, - "lectures": 8921, - "grammy": 8922, - "ronald": 8923, - "associations": 8924, - "bailey": 8925, - "allan": 8926, - "essays": 8927, - "flute": 8928, - "believing": 8929, - "consultant": 8930, - "proceedings": 8931, - "travelling": 8932, - "1853": 8933, - "kit": 8934, - "kerala": 8935, - "yugoslavia": 8936, - "buddy": 8937, - "methodist": 8938, - "##ith": 8939, - "burial": 8940, - "centres": 8941, - "batman": 8942, - "##nda": 8943, - "discontinued": 8944, - "bo": 8945, - "dock": 8946, - "stockholm": 8947, - "lungs": 8948, - "severely": 8949, - "##nk": 8950, - "citing": 8951, - "manga": 8952, - "##ugh": 8953, - "steal": 8954, - "mumbai": 8955, - "iraqi": 8956, - "robot": 8957, - "celebrity": 8958, - "bride": 8959, - "broadcasts": 8960, - "abolished": 8961, - "pot": 8962, - "joel": 8963, - "overhead": 8964, - "franz": 8965, - "packed": 8966, - "reconnaissance": 8967, - "johann": 8968, - "acknowledged": 8969, - "introduce": 8970, - "handled": 8971, - "doctorate": 8972, - "developments": 8973, - "drinks": 8974, - "alley": 8975, - "palestine": 8976, - "##nis": 8977, - "##aki": 8978, - "proceeded": 8979, - "recover": 8980, - "bradley": 8981, - "grain": 8982, - "patch": 8983, - "afford": 8984, - "infection": 8985, - "nationalist": 8986, - "legendary": 8987, - "##ath": 8988, - "interchange": 8989, - "virtually": 8990, - "gen": 8991, - "gravity": 8992, - "exploration": 8993, - "amber": 8994, - "vital": 8995, - "wishes": 8996, - "powell": 8997, - "doctrine": 8998, - "elbow": 8999, - "screenplay": 9000, - "##bird": 9001, - "contribute": 9002, - "indonesian": 9003, - "pet": 9004, - "creates": 9005, - "##com": 9006, - "enzyme": 9007, - "kylie": 9008, - "discipline": 9009, - "drops": 9010, - "manila": 9011, - "hunger": 9012, - "##ien": 9013, - "layers": 9014, - "suffer": 9015, - "fever": 9016, - "bits": 9017, - "monica": 9018, - "keyboard": 9019, - "manages": 9020, - "##hood": 9021, - "searched": 9022, - "appeals": 9023, - "##bad": 9024, - "testament": 9025, - "grande": 9026, - "reid": 9027, - "##war": 9028, - "beliefs": 9029, - "congo": 9030, - "##ification": 9031, - "##dia": 9032, - "si": 9033, - "requiring": 9034, - "##via": 9035, - "casey": 9036, - "1849": 9037, - "regret": 9038, - "streak": 9039, - "rape": 9040, - "depends": 9041, - "syrian": 9042, - "sprint": 9043, - "pound": 9044, - "tourists": 9045, - "upcoming": 9046, - "pub": 9047, - "##xi": 9048, - "tense": 9049, - "##els": 9050, - "practiced": 9051, - "echo": 9052, - "nationwide": 9053, - "guild": 9054, - "motorcycle": 9055, - "liz": 9056, - "##zar": 9057, - "chiefs": 9058, - "desired": 9059, - "elena": 9060, - "bye": 9061, - "precious": 9062, - "absorbed": 9063, - "relatives": 9064, - "booth": 9065, - "pianist": 9066, - "##mal": 9067, - "citizenship": 9068, - "exhausted": 9069, - "wilhelm": 9070, - "##ceae": 9071, - "##hed": 9072, - "noting": 9073, - "quarterback": 9074, - "urge": 9075, - "hectares": 9076, - "##gue": 9077, - "ace": 9078, - "holly": 9079, - "##tal": 9080, - "blonde": 9081, - "davies": 9082, - "parked": 9083, - "sustainable": 9084, - "stepping": 9085, - "twentieth": 9086, - "airfield": 9087, - "galaxy": 9088, - "nest": 9089, - "chip": 9090, - "##nell": 9091, - "tan": 9092, - "shaft": 9093, - "paulo": 9094, - "requirement": 9095, - "##zy": 9096, - "paradise": 9097, - "tobacco": 9098, - "trans": 9099, - "renewed": 9100, - "vietnamese": 9101, - "##cker": 9102, - "##ju": 9103, - "suggesting": 9104, - "catching": 9105, - "holmes": 9106, - "enjoying": 9107, - "md": 9108, - "trips": 9109, - "colt": 9110, - "holder": 9111, - "butterfly": 9112, - "nerve": 9113, - "reformed": 9114, - "cherry": 9115, - "bowling": 9116, - "trailer": 9117, - "carriage": 9118, - "goodbye": 9119, - "appreciate": 9120, - "toy": 9121, - "joshua": 9122, - "interactive": 9123, - "enabled": 9124, - "involve": 9125, - "##kan": 9126, - "collar": 9127, - "determination": 9128, - "bunch": 9129, - "facebook": 9130, - "recall": 9131, - "shorts": 9132, - "superintendent": 9133, - "episcopal": 9134, - "frustration": 9135, - "giovanni": 9136, - "nineteenth": 9137, - "laser": 9138, - "privately": 9139, - "array": 9140, - "circulation": 9141, - "##ovic": 9142, - "armstrong": 9143, - "deals": 9144, - "painful": 9145, - "permit": 9146, - "discrimination": 9147, - "##wi": 9148, - "aires": 9149, - "retiring": 9150, - "cottage": 9151, - "ni": 9152, - "##sta": 9153, - "horizon": 9154, - "ellen": 9155, - "jamaica": 9156, - "ripped": 9157, - "fernando": 9158, - "chapters": 9159, - "playstation": 9160, - "patron": 9161, - "lecturer": 9162, - "navigation": 9163, - "behaviour": 9164, - "genes": 9165, - "georgian": 9166, - "export": 9167, - "solomon": 9168, - "rivals": 9169, - "swift": 9170, - "seventeen": 9171, - "rodriguez": 9172, - "princeton": 9173, - "independently": 9174, - "sox": 9175, - "1847": 9176, - "arguing": 9177, - "entity": 9178, - "casting": 9179, - "hank": 9180, - "criteria": 9181, - "oakland": 9182, - "geographic": 9183, - "milwaukee": 9184, - "reflection": 9185, - "expanding": 9186, - "conquest": 9187, - "dubbed": 9188, - "##tv": 9189, - "halt": 9190, - "brave": 9191, - "brunswick": 9192, - "doi": 9193, - "arched": 9194, - "curtis": 9195, - "divorced": 9196, - "predominantly": 9197, - "somerset": 9198, - "streams": 9199, - "ugly": 9200, - "zoo": 9201, - "horrible": 9202, - "curved": 9203, - "buenos": 9204, - "fierce": 9205, - "dictionary": 9206, - "vector": 9207, - "theological": 9208, - "unions": 9209, - "handful": 9210, - "stability": 9211, - "chan": 9212, - "punjab": 9213, - "segments": 9214, - "##lly": 9215, - "altar": 9216, - "ignoring": 9217, - "gesture": 9218, - "monsters": 9219, - "pastor": 9220, - "##stone": 9221, - "thighs": 9222, - "unexpected": 9223, - "operators": 9224, - "abruptly": 9225, - "coin": 9226, - "compiled": 9227, - "associates": 9228, - "improving": 9229, - "migration": 9230, - "pin": 9231, - "##ose": 9232, - "compact": 9233, - "collegiate": 9234, - "reserved": 9235, - "##urs": 9236, - "quarterfinals": 9237, - "roster": 9238, - "restore": 9239, - "assembled": 9240, - "hurry": 9241, - "oval": 9242, - "##cies": 9243, - "1846": 9244, - "flags": 9245, - "martha": 9246, - "##del": 9247, - "victories": 9248, - "sharply": 9249, - "##rated": 9250, - "argues": 9251, - "deadly": 9252, - "neo": 9253, - "drawings": 9254, - "symbols": 9255, - "performer": 9256, - "##iel": 9257, - "griffin": 9258, - "restrictions": 9259, - "editing": 9260, - "andrews": 9261, - "java": 9262, - "journals": 9263, - "arabia": 9264, - "compositions": 9265, - "dee": 9266, - "pierce": 9267, - "removing": 9268, - "hindi": 9269, - "casino": 9270, - "runway": 9271, - "civilians": 9272, - "minds": 9273, - "nasa": 9274, - "hotels": 9275, - "##zation": 9276, - "refuge": 9277, - "rent": 9278, - "retain": 9279, - "potentially": 9280, - "conferences": 9281, - "suburban": 9282, - "conducting": 9283, - "##tto": 9284, - "##tions": 9285, - "##tle": 9286, - "descended": 9287, - "massacre": 9288, - "##cal": 9289, - "ammunition": 9290, - "terrain": 9291, - "fork": 9292, - "souls": 9293, - "counts": 9294, - "chelsea": 9295, - "durham": 9296, - "drives": 9297, - "cab": 9298, - "##bank": 9299, - "perth": 9300, - "realizing": 9301, - "palestinian": 9302, - "finn": 9303, - "simpson": 9304, - "##dal": 9305, - "betty": 9306, - "##ule": 9307, - "moreover": 9308, - "particles": 9309, - "cardinals": 9310, - "tent": 9311, - "evaluation": 9312, - "extraordinary": 9313, - "##oid": 9314, - "inscription": 9315, - "##works": 9316, - "wednesday": 9317, - "chloe": 9318, - "maintains": 9319, - "panels": 9320, - "ashley": 9321, - "trucks": 9322, - "##nation": 9323, - "cluster": 9324, - "sunlight": 9325, - "strikes": 9326, - "zhang": 9327, - "##wing": 9328, - "dialect": 9329, - "canon": 9330, - "##ap": 9331, - "tucked": 9332, - "##ws": 9333, - "collecting": 9334, - "##mas": 9335, - "##can": 9336, - "##sville": 9337, - "maker": 9338, - "quoted": 9339, - "evan": 9340, - "franco": 9341, - "aria": 9342, - "buying": 9343, - "cleaning": 9344, - "eva": 9345, - "closet": 9346, - "provision": 9347, - "apollo": 9348, - "clinic": 9349, - "rat": 9350, - "##ez": 9351, - "necessarily": 9352, - "ac": 9353, - "##gle": 9354, - "##ising": 9355, - "venues": 9356, - "flipped": 9357, - "cent": 9358, - "spreading": 9359, - "trustees": 9360, - "checking": 9361, - "authorized": 9362, - "##sco": 9363, - "disappointed": 9364, - "##ado": 9365, - "notion": 9366, - "duration": 9367, - "trumpet": 9368, - "hesitated": 9369, - "topped": 9370, - "brussels": 9371, - "rolls": 9372, - "theoretical": 9373, - "hint": 9374, - "define": 9375, - "aggressive": 9376, - "repeat": 9377, - "wash": 9378, - "peaceful": 9379, - "optical": 9380, - "width": 9381, - "allegedly": 9382, - "mcdonald": 9383, - "strict": 9384, - "copyright": 9385, - "##illa": 9386, - "investors": 9387, - "mar": 9388, - "jam": 9389, - "witnesses": 9390, - "sounding": 9391, - "miranda": 9392, - "michelle": 9393, - "privacy": 9394, - "hugo": 9395, - "harmony": 9396, - "##pp": 9397, - "valid": 9398, - "lynn": 9399, - "glared": 9400, - "nina": 9401, - "102": 9402, - "headquartered": 9403, - "diving": 9404, - "boarding": 9405, - "gibson": 9406, - "##ncy": 9407, - "albanian": 9408, - "marsh": 9409, - "routine": 9410, - "dealt": 9411, - "enhanced": 9412, - "er": 9413, - "intelligent": 9414, - "substance": 9415, - "targeted": 9416, - "enlisted": 9417, - "discovers": 9418, - "spinning": 9419, - "observations": 9420, - "pissed": 9421, - "smoking": 9422, - "rebecca": 9423, - "capitol": 9424, - "visa": 9425, - "varied": 9426, - "costume": 9427, - "seemingly": 9428, - "indies": 9429, - "compensation": 9430, - "surgeon": 9431, - "thursday": 9432, - "arsenal": 9433, - "westminster": 9434, - "suburbs": 9435, - "rid": 9436, - "anglican": 9437, - "##ridge": 9438, - "knots": 9439, - "foods": 9440, - "alumni": 9441, - "lighter": 9442, - "fraser": 9443, - "whoever": 9444, - "portal": 9445, - "scandal": 9446, - "##ray": 9447, - "gavin": 9448, - "advised": 9449, - "instructor": 9450, - "flooding": 9451, - "terrorist": 9452, - "##ale": 9453, - "teenage": 9454, - "interim": 9455, - "senses": 9456, - "duck": 9457, - "teen": 9458, - "thesis": 9459, - "abby": 9460, - "eager": 9461, - "overcome": 9462, - "##ile": 9463, - "newport": 9464, - "glenn": 9465, - "rises": 9466, - "shame": 9467, - "##cc": 9468, - "prompted": 9469, - "priority": 9470, - "forgot": 9471, - "bomber": 9472, - "nicolas": 9473, - "protective": 9474, - "360": 9475, - "cartoon": 9476, - "katherine": 9477, - "breeze": 9478, - "lonely": 9479, - "trusted": 9480, - "henderson": 9481, - "richardson": 9482, - "relax": 9483, - "banner": 9484, - "candy": 9485, - "palms": 9486, - "remarkable": 9487, - "##rio": 9488, - "legends": 9489, - "cricketer": 9490, - "essay": 9491, - "ordained": 9492, - "edmund": 9493, - "rifles": 9494, - "trigger": 9495, - "##uri": 9496, - "##away": 9497, - "sail": 9498, - "alert": 9499, - "1830": 9500, - "audiences": 9501, - "penn": 9502, - "sussex": 9503, - "siblings": 9504, - "pursued": 9505, - "indianapolis": 9506, - "resist": 9507, - "rosa": 9508, - "consequence": 9509, - "succeed": 9510, - "avoided": 9511, - "1845": 9512, - "##ulation": 9513, - "inland": 9514, - "##tie": 9515, - "##nna": 9516, - "counsel": 9517, - "profession": 9518, - "chronicle": 9519, - "hurried": 9520, - "##una": 9521, - "eyebrow": 9522, - "eventual": 9523, - "bleeding": 9524, - "innovative": 9525, - "cure": 9526, - "##dom": 9527, - "committees": 9528, - "accounting": 9529, - "con": 9530, - "scope": 9531, - "hardy": 9532, - "heather": 9533, - "tenor": 9534, - "gut": 9535, - "herald": 9536, - "codes": 9537, - "tore": 9538, - "scales": 9539, - "wagon": 9540, - "##oo": 9541, - "luxury": 9542, - "tin": 9543, - "prefer": 9544, - "fountain": 9545, - "triangle": 9546, - "bonds": 9547, - "darling": 9548, - "convoy": 9549, - "dried": 9550, - "traced": 9551, - "beings": 9552, - "troy": 9553, - "accidentally": 9554, - "slam": 9555, - "findings": 9556, - "smelled": 9557, - "joey": 9558, - "lawyers": 9559, - "outcome": 9560, - "steep": 9561, - "bosnia": 9562, - "configuration": 9563, - "shifting": 9564, - "toll": 9565, - "brook": 9566, - "performers": 9567, - "lobby": 9568, - "philosophical": 9569, - "construct": 9570, - "shrine": 9571, - "aggregate": 9572, - "boot": 9573, - "cox": 9574, - "phenomenon": 9575, - "savage": 9576, - "insane": 9577, - "solely": 9578, - "reynolds": 9579, - "lifestyle": 9580, - "##ima": 9581, - "nationally": 9582, - "holdings": 9583, - "consideration": 9584, - "enable": 9585, - "edgar": 9586, - "mo": 9587, - "mama": 9588, - "##tein": 9589, - "fights": 9590, - "relegation": 9591, - "chances": 9592, - "atomic": 9593, - "hub": 9594, - "conjunction": 9595, - "awkward": 9596, - "reactions": 9597, - "currency": 9598, - "finale": 9599, - "kumar": 9600, - "underwent": 9601, - "steering": 9602, - "elaborate": 9603, - "gifts": 9604, - "comprising": 9605, - "melissa": 9606, - "veins": 9607, - "reasonable": 9608, - "sunshine": 9609, - "chi": 9610, - "solve": 9611, - "trails": 9612, - "inhabited": 9613, - "elimination": 9614, - "ethics": 9615, - "huh": 9616, - "ana": 9617, - "molly": 9618, - "consent": 9619, - "apartments": 9620, - "layout": 9621, - "marines": 9622, - "##ces": 9623, - "hunters": 9624, - "bulk": 9625, - "##oma": 9626, - "hometown": 9627, - "##wall": 9628, - "##mont": 9629, - "cracked": 9630, - "reads": 9631, - "neighbouring": 9632, - "withdrawn": 9633, - "admission": 9634, - "wingspan": 9635, - "damned": 9636, - "anthology": 9637, - "lancashire": 9638, - "brands": 9639, - "batting": 9640, - "forgive": 9641, - "cuban": 9642, - "awful": 9643, - "##lyn": 9644, - "104": 9645, - "dimensions": 9646, - "imagination": 9647, - "##ade": 9648, - "dante": 9649, - "##ship": 9650, - "tracking": 9651, - "desperately": 9652, - "goalkeeper": 9653, - "##yne": 9654, - "groaned": 9655, - "workshops": 9656, - "confident": 9657, - "burton": 9658, - "gerald": 9659, - "milton": 9660, - "circus": 9661, - "uncertain": 9662, - "slope": 9663, - "copenhagen": 9664, - "sophia": 9665, - "fog": 9666, - "philosopher": 9667, - "portraits": 9668, - "accent": 9669, - "cycling": 9670, - "varying": 9671, - "gripped": 9672, - "larvae": 9673, - "garrett": 9674, - "specified": 9675, - "scotia": 9676, - "mature": 9677, - "luther": 9678, - "kurt": 9679, - "rap": 9680, - "##kes": 9681, - "aerial": 9682, - "750": 9683, - "ferdinand": 9684, - "heated": 9685, - "es": 9686, - "transported": 9687, - "##shan": 9688, - "safely": 9689, - "nonetheless": 9690, - "##orn": 9691, - "##gal": 9692, - "motors": 9693, - "demanding": 9694, - "##sburg": 9695, - "startled": 9696, - "##brook": 9697, - "ally": 9698, - "generate": 9699, - "caps": 9700, - "ghana": 9701, - "stained": 9702, - "demo": 9703, - "mentions": 9704, - "beds": 9705, - "ap": 9706, - "afterward": 9707, - "diary": 9708, - "##bling": 9709, - "utility": 9710, - "##iro": 9711, - "richards": 9712, - "1837": 9713, - "conspiracy": 9714, - "conscious": 9715, - "shining": 9716, - "footsteps": 9717, - "observer": 9718, - "cyprus": 9719, - "urged": 9720, - "loyalty": 9721, - "developer": 9722, - "probability": 9723, - "olive": 9724, - "upgraded": 9725, - "gym": 9726, - "miracle": 9727, - "insects": 9728, - "graves": 9729, - "1844": 9730, - "ourselves": 9731, - "hydrogen": 9732, - "amazon": 9733, - "katie": 9734, - "tickets": 9735, - "poets": 9736, - "##pm": 9737, - "planes": 9738, - "##pan": 9739, - "prevention": 9740, - "witnessed": 9741, - "dense": 9742, - "jin": 9743, - "randy": 9744, - "tang": 9745, - "warehouse": 9746, - "monroe": 9747, - "bang": 9748, - "archived": 9749, - "elderly": 9750, - "investigations": 9751, - "alec": 9752, - "granite": 9753, - "mineral": 9754, - "conflicts": 9755, - "controlling": 9756, - "aboriginal": 9757, - "carlo": 9758, - "##zu": 9759, - "mechanics": 9760, - "stan": 9761, - "stark": 9762, - "rhode": 9763, - "skirt": 9764, - "est": 9765, - "##berry": 9766, - "bombs": 9767, - "respected": 9768, - "##horn": 9769, - "imposed": 9770, - "limestone": 9771, - "deny": 9772, - "nominee": 9773, - "memphis": 9774, - "grabbing": 9775, - "disabled": 9776, - "##als": 9777, - "amusement": 9778, - "aa": 9779, - "frankfurt": 9780, - "corn": 9781, - "referendum": 9782, - "varies": 9783, - "slowed": 9784, - "disk": 9785, - "firms": 9786, - "unconscious": 9787, - "incredible": 9788, - "clue": 9789, - "sue": 9790, - "##zhou": 9791, - "twist": 9792, - "##cio": 9793, - "joins": 9794, - "idaho": 9795, - "chad": 9796, - "developers": 9797, - "computing": 9798, - "destroyer": 9799, - "103": 9800, - "mortal": 9801, - "tucker": 9802, - "kingston": 9803, - "choices": 9804, - "yu": 9805, - "carson": 9806, - "1800": 9807, - "os": 9808, - "whitney": 9809, - "geneva": 9810, - "pretend": 9811, - "dimension": 9812, - "staged": 9813, - "plateau": 9814, - "maya": 9815, - "##une": 9816, - "freestyle": 9817, - "##bc": 9818, - "rovers": 9819, - "hiv": 9820, - "##ids": 9821, - "tristan": 9822, - "classroom": 9823, - "prospect": 9824, - "##hus": 9825, - "honestly": 9826, - "diploma": 9827, - "lied": 9828, - "thermal": 9829, - "auxiliary": 9830, - "feast": 9831, - "unlikely": 9832, - "iata": 9833, - "##tel": 9834, - "morocco": 9835, - "pounding": 9836, - "treasury": 9837, - "lithuania": 9838, - "considerably": 9839, - "1841": 9840, - "dish": 9841, - "1812": 9842, - "geological": 9843, - "matching": 9844, - "stumbled": 9845, - "destroying": 9846, - "marched": 9847, - "brien": 9848, - "advances": 9849, - "cake": 9850, - "nicole": 9851, - "belle": 9852, - "settling": 9853, - "measuring": 9854, - "directing": 9855, - "##mie": 9856, - "tuesday": 9857, - "bassist": 9858, - "capabilities": 9859, - "stunned": 9860, - "fraud": 9861, - "torpedo": 9862, - "##list": 9863, - "##phone": 9864, - "anton": 9865, - "wisdom": 9866, - "surveillance": 9867, - "ruined": 9868, - "##ulate": 9869, - "lawsuit": 9870, - "healthcare": 9871, - "theorem": 9872, - "halls": 9873, - "trend": 9874, - "aka": 9875, - "horizontal": 9876, - "dozens": 9877, - "acquire": 9878, - "lasting": 9879, - "swim": 9880, - "hawk": 9881, - "gorgeous": 9882, - "fees": 9883, - "vicinity": 9884, - "decrease": 9885, - "adoption": 9886, - "tactics": 9887, - "##ography": 9888, - "pakistani": 9889, - "##ole": 9890, - "draws": 9891, - "##hall": 9892, - "willie": 9893, - "burke": 9894, - "heath": 9895, - "algorithm": 9896, - "integral": 9897, - "powder": 9898, - "elliott": 9899, - "brigadier": 9900, - "jackie": 9901, - "tate": 9902, - "varieties": 9903, - "darker": 9904, - "##cho": 9905, - "lately": 9906, - "cigarette": 9907, - "specimens": 9908, - "adds": 9909, - "##ree": 9910, - "##ensis": 9911, - "##inger": 9912, - "exploded": 9913, - "finalist": 9914, - "cia": 9915, - "murders": 9916, - "wilderness": 9917, - "arguments": 9918, - "nicknamed": 9919, - "acceptance": 9920, - "onwards": 9921, - "manufacture": 9922, - "robertson": 9923, - "jets": 9924, - "tampa": 9925, - "enterprises": 9926, - "blog": 9927, - "loudly": 9928, - "composers": 9929, - "nominations": 9930, - "1838": 9931, - "ai": 9932, - "malta": 9933, - "inquiry": 9934, - "automobile": 9935, - "hosting": 9936, - "viii": 9937, - "rays": 9938, - "tilted": 9939, - "grief": 9940, - "museums": 9941, - "strategies": 9942, - "furious": 9943, - "euro": 9944, - "equality": 9945, - "cohen": 9946, - "poison": 9947, - "surrey": 9948, - "wireless": 9949, - "governed": 9950, - "ridiculous": 9951, - "moses": 9952, - "##esh": 9953, - "##room": 9954, - "vanished": 9955, - "##ito": 9956, - "barnes": 9957, - "attract": 9958, - "morrison": 9959, - "istanbul": 9960, - "##iness": 9961, - "absent": 9962, - "rotation": 9963, - "petition": 9964, - "janet": 9965, - "##logical": 9966, - "satisfaction": 9967, - "custody": 9968, - "deliberately": 9969, - "observatory": 9970, - "comedian": 9971, - "surfaces": 9972, - "pinyin": 9973, - "novelist": 9974, - "strictly": 9975, - "canterbury": 9976, - "oslo": 9977, - "monks": 9978, - "embrace": 9979, - "ibm": 9980, - "jealous": 9981, - "photograph": 9982, - "continent": 9983, - "dorothy": 9984, - "marina": 9985, - "doc": 9986, - "excess": 9987, - "holden": 9988, - "allegations": 9989, - "explaining": 9990, - "stack": 9991, - "avoiding": 9992, - "lance": 9993, - "storyline": 9994, - "majesty": 9995, - "poorly": 9996, - "spike": 9997, - "dos": 9998, - "bradford": 9999, - "raven": 10000, - "travis": 10001, - "classics": 10002, - "proven": 10003, - "voltage": 10004, - "pillow": 10005, - "fists": 10006, - "butt": 10007, - "1842": 10008, - "interpreted": 10009, - "##car": 10010, - "1839": 10011, - "gage": 10012, - "telegraph": 10013, - "lens": 10014, - "promising": 10015, - "expelled": 10016, - "casual": 10017, - "collector": 10018, - "zones": 10019, - "##min": 10020, - "silly": 10021, - "nintendo": 10022, - "##kh": 10023, - "##bra": 10024, - "downstairs": 10025, - "chef": 10026, - "suspicious": 10027, - "afl": 10028, - "flies": 10029, - "vacant": 10030, - "uganda": 10031, - "pregnancy": 10032, - "condemned": 10033, - "lutheran": 10034, - "estimates": 10035, - "cheap": 10036, - "decree": 10037, - "saxon": 10038, - "proximity": 10039, - "stripped": 10040, - "idiot": 10041, - "deposits": 10042, - "contrary": 10043, - "presenter": 10044, - "magnus": 10045, - "glacier": 10046, - "im": 10047, - "offense": 10048, - "edwin": 10049, - "##ori": 10050, - "upright": 10051, - "##long": 10052, - "bolt": 10053, - "##ois": 10054, - "toss": 10055, - "geographical": 10056, - "##izes": 10057, - "environments": 10058, - "delicate": 10059, - "marking": 10060, - "abstract": 10061, - "xavier": 10062, - "nails": 10063, - "windsor": 10064, - "plantation": 10065, - "occurring": 10066, - "equity": 10067, - "saskatchewan": 10068, - "fears": 10069, - "drifted": 10070, - "sequences": 10071, - "vegetation": 10072, - "revolt": 10073, - "##stic": 10074, - "1843": 10075, - "sooner": 10076, - "fusion": 10077, - "opposing": 10078, - "nato": 10079, - "skating": 10080, - "1836": 10081, - "secretly": 10082, - "ruin": 10083, - "lease": 10084, - "##oc": 10085, - "edit": 10086, - "##nne": 10087, - "flora": 10088, - "anxiety": 10089, - "ruby": 10090, - "##ological": 10091, - "##mia": 10092, - "tel": 10093, - "bout": 10094, - "taxi": 10095, - "emmy": 10096, - "frost": 10097, - "rainbow": 10098, - "compounds": 10099, - "foundations": 10100, - "rainfall": 10101, - "assassination": 10102, - "nightmare": 10103, - "dominican": 10104, - "##win": 10105, - "achievements": 10106, - "deserve": 10107, - "orlando": 10108, - "intact": 10109, - "armenia": 10110, - "##nte": 10111, - "calgary": 10112, - "valentine": 10113, - "106": 10114, - "marion": 10115, - "proclaimed": 10116, - "theodore": 10117, - "bells": 10118, - "courtyard": 10119, - "thigh": 10120, - "gonzalez": 10121, - "console": 10122, - "troop": 10123, - "minimal": 10124, - "monte": 10125, - "everyday": 10126, - "##ence": 10127, - "##if": 10128, - "supporter": 10129, - "terrorism": 10130, - "buck": 10131, - "openly": 10132, - "presbyterian": 10133, - "activists": 10134, - "carpet": 10135, - "##iers": 10136, - "rubbing": 10137, - "uprising": 10138, - "##yi": 10139, - "cute": 10140, - "conceived": 10141, - "legally": 10142, - "##cht": 10143, - "millennium": 10144, - "cello": 10145, - "velocity": 10146, - "ji": 10147, - "rescued": 10148, - "cardiff": 10149, - "1835": 10150, - "rex": 10151, - "concentrate": 10152, - "senators": 10153, - "beard": 10154, - "rendered": 10155, - "glowing": 10156, - "battalions": 10157, - "scouts": 10158, - "competitors": 10159, - "sculptor": 10160, - "catalogue": 10161, - "arctic": 10162, - "ion": 10163, - "raja": 10164, - "bicycle": 10165, - "wow": 10166, - "glancing": 10167, - "lawn": 10168, - "##woman": 10169, - "gentleman": 10170, - "lighthouse": 10171, - "publish": 10172, - "predicted": 10173, - "calculated": 10174, - "##val": 10175, - "variants": 10176, - "##gne": 10177, - "strain": 10178, - "##ui": 10179, - "winston": 10180, - "deceased": 10181, - "##nus": 10182, - "touchdowns": 10183, - "brady": 10184, - "caleb": 10185, - "sinking": 10186, - "echoed": 10187, - "crush": 10188, - "hon": 10189, - "blessed": 10190, - "protagonist": 10191, - "hayes": 10192, - "endangered": 10193, - "magnitude": 10194, - "editors": 10195, - "##tine": 10196, - "estimate": 10197, - "responsibilities": 10198, - "##mel": 10199, - "backup": 10200, - "laying": 10201, - "consumed": 10202, - "sealed": 10203, - "zurich": 10204, - "lovers": 10205, - "frustrated": 10206, - "##eau": 10207, - "ahmed": 10208, - "kicking": 10209, - "mit": 10210, - "treasurer": 10211, - "1832": 10212, - "biblical": 10213, - "refuse": 10214, - "terrified": 10215, - "pump": 10216, - "agrees": 10217, - "genuine": 10218, - "imprisonment": 10219, - "refuses": 10220, - "plymouth": 10221, - "##hen": 10222, - "lou": 10223, - "##nen": 10224, - "tara": 10225, - "trembling": 10226, - "antarctic": 10227, - "ton": 10228, - "learns": 10229, - "##tas": 10230, - "crap": 10231, - "crucial": 10232, - "faction": 10233, - "atop": 10234, - "##borough": 10235, - "wrap": 10236, - "lancaster": 10237, - "odds": 10238, - "hopkins": 10239, - "erik": 10240, - "lyon": 10241, - "##eon": 10242, - "bros": 10243, - "##ode": 10244, - "snap": 10245, - "locality": 10246, - "tips": 10247, - "empress": 10248, - "crowned": 10249, - "cal": 10250, - "acclaimed": 10251, - "chuckled": 10252, - "##ory": 10253, - "clara": 10254, - "sends": 10255, - "mild": 10256, - "towel": 10257, - "##fl": 10258, - "##day": 10259, - "##а": 10260, - "wishing": 10261, - "assuming": 10262, - "interviewed": 10263, - "##bal": 10264, - "##die": 10265, - "interactions": 10266, - "eden": 10267, - "cups": 10268, - "helena": 10269, - "##lf": 10270, - "indie": 10271, - "beck": 10272, - "##fire": 10273, - "batteries": 10274, - "filipino": 10275, - "wizard": 10276, - "parted": 10277, - "##lam": 10278, - "traces": 10279, - "##born": 10280, - "rows": 10281, - "idol": 10282, - "albany": 10283, - "delegates": 10284, - "##ees": 10285, - "##sar": 10286, - "discussions": 10287, - "##ex": 10288, - "notre": 10289, - "instructed": 10290, - "belgrade": 10291, - "highways": 10292, - "suggestion": 10293, - "lauren": 10294, - "possess": 10295, - "orientation": 10296, - "alexandria": 10297, - "abdul": 10298, - "beats": 10299, - "salary": 10300, - "reunion": 10301, - "ludwig": 10302, - "alright": 10303, - "wagner": 10304, - "intimate": 10305, - "pockets": 10306, - "slovenia": 10307, - "hugged": 10308, - "brighton": 10309, - "merchants": 10310, - "cruel": 10311, - "stole": 10312, - "trek": 10313, - "slopes": 10314, - "repairs": 10315, - "enrollment": 10316, - "politically": 10317, - "underlying": 10318, - "promotional": 10319, - "counting": 10320, - "boeing": 10321, - "##bb": 10322, - "isabella": 10323, - "naming": 10324, - "##и": 10325, - "keen": 10326, - "bacteria": 10327, - "listing": 10328, - "separately": 10329, - "belfast": 10330, - "ussr": 10331, - "450": 10332, - "lithuanian": 10333, - "anybody": 10334, - "ribs": 10335, - "sphere": 10336, - "martinez": 10337, - "cock": 10338, - "embarrassed": 10339, - "proposals": 10340, - "fragments": 10341, - "nationals": 10342, - "##fs": 10343, - "##wski": 10344, - "premises": 10345, - "fin": 10346, - "1500": 10347, - "alpine": 10348, - "matched": 10349, - "freely": 10350, - "bounded": 10351, - "jace": 10352, - "sleeve": 10353, - "##af": 10354, - "gaming": 10355, - "pier": 10356, - "populated": 10357, - "evident": 10358, - "##like": 10359, - "frances": 10360, - "flooded": 10361, - "##dle": 10362, - "frightened": 10363, - "pour": 10364, - "trainer": 10365, - "framed": 10366, - "visitor": 10367, - "challenging": 10368, - "pig": 10369, - "wickets": 10370, - "##fold": 10371, - "infected": 10372, - "email": 10373, - "##pes": 10374, - "arose": 10375, - "##aw": 10376, - "reward": 10377, - "ecuador": 10378, - "oblast": 10379, - "vale": 10380, - "ch": 10381, - "shuttle": 10382, - "##usa": 10383, - "bach": 10384, - "rankings": 10385, - "forbidden": 10386, - "cornwall": 10387, - "accordance": 10388, - "salem": 10389, - "consumers": 10390, - "bruno": 10391, - "fantastic": 10392, - "toes": 10393, - "machinery": 10394, - "resolved": 10395, - "julius": 10396, - "remembering": 10397, - "propaganda": 10398, - "iceland": 10399, - "bombardment": 10400, - "tide": 10401, - "contacts": 10402, - "wives": 10403, - "##rah": 10404, - "concerto": 10405, - "macdonald": 10406, - "albania": 10407, - "implement": 10408, - "daisy": 10409, - "tapped": 10410, - "sudan": 10411, - "helmet": 10412, - "angela": 10413, - "mistress": 10414, - "##lic": 10415, - "crop": 10416, - "sunk": 10417, - "finest": 10418, - "##craft": 10419, - "hostile": 10420, - "##ute": 10421, - "##tsu": 10422, - "boxer": 10423, - "fr": 10424, - "paths": 10425, - "adjusted": 10426, - "habit": 10427, - "ballot": 10428, - "supervision": 10429, - "soprano": 10430, - "##zen": 10431, - "bullets": 10432, - "wicked": 10433, - "sunset": 10434, - "regiments": 10435, - "disappear": 10436, - "lamp": 10437, - "performs": 10438, - "app": 10439, - "##gia": 10440, - "##oa": 10441, - "rabbit": 10442, - "digging": 10443, - "incidents": 10444, - "entries": 10445, - "##cion": 10446, - "dishes": 10447, - "##oi": 10448, - "introducing": 10449, - "##ati": 10450, - "##fied": 10451, - "freshman": 10452, - "slot": 10453, - "jill": 10454, - "tackles": 10455, - "baroque": 10456, - "backs": 10457, - "##iest": 10458, - "lone": 10459, - "sponsor": 10460, - "destiny": 10461, - "altogether": 10462, - "convert": 10463, - "##aro": 10464, - "consensus": 10465, - "shapes": 10466, - "demonstration": 10467, - "basically": 10468, - "feminist": 10469, - "auction": 10470, - "artifacts": 10471, - "##bing": 10472, - "strongest": 10473, - "twitter": 10474, - "halifax": 10475, - "2019": 10476, - "allmusic": 10477, - "mighty": 10478, - "smallest": 10479, - "precise": 10480, - "alexandra": 10481, - "viola": 10482, - "##los": 10483, - "##ille": 10484, - "manuscripts": 10485, - "##illo": 10486, - "dancers": 10487, - "ari": 10488, - "managers": 10489, - "monuments": 10490, - "blades": 10491, - "barracks": 10492, - "springfield": 10493, - "maiden": 10494, - "consolidated": 10495, - "electron": 10496, - "##end": 10497, - "berry": 10498, - "airing": 10499, - "wheat": 10500, - "nobel": 10501, - "inclusion": 10502, - "blair": 10503, - "payments": 10504, - "geography": 10505, - "bee": 10506, - "cc": 10507, - "eleanor": 10508, - "react": 10509, - "##hurst": 10510, - "afc": 10511, - "manitoba": 10512, - "##yu": 10513, - "su": 10514, - "lineup": 10515, - "fitness": 10516, - "recreational": 10517, - "investments": 10518, - "airborne": 10519, - "disappointment": 10520, - "##dis": 10521, - "edmonton": 10522, - "viewing": 10523, - "##row": 10524, - "renovation": 10525, - "##cast": 10526, - "infant": 10527, - "bankruptcy": 10528, - "roses": 10529, - "aftermath": 10530, - "pavilion": 10531, - "##yer": 10532, - "carpenter": 10533, - "withdrawal": 10534, - "ladder": 10535, - "##hy": 10536, - "discussing": 10537, - "popped": 10538, - "reliable": 10539, - "agreements": 10540, - "rochester": 10541, - "##abad": 10542, - "curves": 10543, - "bombers": 10544, - "220": 10545, - "rao": 10546, - "reverend": 10547, - "decreased": 10548, - "choosing": 10549, - "107": 10550, - "stiff": 10551, - "consulting": 10552, - "naples": 10553, - "crawford": 10554, - "tracy": 10555, - "ka": 10556, - "ribbon": 10557, - "cops": 10558, - "##lee": 10559, - "crushed": 10560, - "deciding": 10561, - "unified": 10562, - "teenager": 10563, - "accepting": 10564, - "flagship": 10565, - "explorer": 10566, - "poles": 10567, - "sanchez": 10568, - "inspection": 10569, - "revived": 10570, - "skilled": 10571, - "induced": 10572, - "exchanged": 10573, - "flee": 10574, - "locals": 10575, - "tragedy": 10576, - "swallow": 10577, - "loading": 10578, - "hanna": 10579, - "demonstrate": 10580, - "##ela": 10581, - "salvador": 10582, - "flown": 10583, - "contestants": 10584, - "civilization": 10585, - "##ines": 10586, - "wanna": 10587, - "rhodes": 10588, - "fletcher": 10589, - "hector": 10590, - "knocking": 10591, - "considers": 10592, - "##ough": 10593, - "nash": 10594, - "mechanisms": 10595, - "sensed": 10596, - "mentally": 10597, - "walt": 10598, - "unclear": 10599, - "##eus": 10600, - "renovated": 10601, - "madame": 10602, - "##cks": 10603, - "crews": 10604, - "governmental": 10605, - "##hin": 10606, - "undertaken": 10607, - "monkey": 10608, - "##ben": 10609, - "##ato": 10610, - "fatal": 10611, - "armored": 10612, - "copa": 10613, - "caves": 10614, - "governance": 10615, - "grasp": 10616, - "perception": 10617, - "certification": 10618, - "froze": 10619, - "damp": 10620, - "tugged": 10621, - "wyoming": 10622, - "##rg": 10623, - "##ero": 10624, - "newman": 10625, - "##lor": 10626, - "nerves": 10627, - "curiosity": 10628, - "graph": 10629, - "115": 10630, - "##ami": 10631, - "withdraw": 10632, - "tunnels": 10633, - "dull": 10634, - "meredith": 10635, - "moss": 10636, - "exhibits": 10637, - "neighbors": 10638, - "communicate": 10639, - "accuracy": 10640, - "explored": 10641, - "raiders": 10642, - "republicans": 10643, - "secular": 10644, - "kat": 10645, - "superman": 10646, - "penny": 10647, - "criticised": 10648, - "##tch": 10649, - "freed": 10650, - "update": 10651, - "conviction": 10652, - "wade": 10653, - "ham": 10654, - "likewise": 10655, - "delegation": 10656, - "gotta": 10657, - "doll": 10658, - "promises": 10659, - "technological": 10660, - "myth": 10661, - "nationality": 10662, - "resolve": 10663, - "convent": 10664, - "##mark": 10665, - "sharon": 10666, - "dig": 10667, - "sip": 10668, - "coordinator": 10669, - "entrepreneur": 10670, - "fold": 10671, - "##dine": 10672, - "capability": 10673, - "councillor": 10674, - "synonym": 10675, - "blown": 10676, - "swan": 10677, - "cursed": 10678, - "1815": 10679, - "jonas": 10680, - "haired": 10681, - "sofa": 10682, - "canvas": 10683, - "keeper": 10684, - "rivalry": 10685, - "##hart": 10686, - "rapper": 10687, - "speedway": 10688, - "swords": 10689, - "postal": 10690, - "maxwell": 10691, - "estonia": 10692, - "potter": 10693, - "recurring": 10694, - "##nn": 10695, - "##ave": 10696, - "errors": 10697, - "##oni": 10698, - "cognitive": 10699, - "1834": 10700, - "##²": 10701, - "claws": 10702, - "nadu": 10703, - "roberto": 10704, - "bce": 10705, - "wrestler": 10706, - "ellie": 10707, - "##ations": 10708, - "infinite": 10709, - "ink": 10710, - "##tia": 10711, - "presumably": 10712, - "finite": 10713, - "staircase": 10714, - "108": 10715, - "noel": 10716, - "patricia": 10717, - "nacional": 10718, - "##cation": 10719, - "chill": 10720, - "eternal": 10721, - "tu": 10722, - "preventing": 10723, - "prussia": 10724, - "fossil": 10725, - "limbs": 10726, - "##logist": 10727, - "ernst": 10728, - "frog": 10729, - "perez": 10730, - "rene": 10731, - "##ace": 10732, - "pizza": 10733, - "prussian": 10734, - "##ios": 10735, - "##vy": 10736, - "molecules": 10737, - "regulatory": 10738, - "answering": 10739, - "opinions": 10740, - "sworn": 10741, - "lengths": 10742, - "supposedly": 10743, - "hypothesis": 10744, - "upward": 10745, - "habitats": 10746, - "seating": 10747, - "ancestors": 10748, - "drank": 10749, - "yield": 10750, - "hd": 10751, - "synthesis": 10752, - "researcher": 10753, - "modest": 10754, - "##var": 10755, - "mothers": 10756, - "peered": 10757, - "voluntary": 10758, - "homeland": 10759, - "##the": 10760, - "acclaim": 10761, - "##igan": 10762, - "static": 10763, - "valve": 10764, - "luxembourg": 10765, - "alto": 10766, - "carroll": 10767, - "fe": 10768, - "receptor": 10769, - "norton": 10770, - "ambulance": 10771, - "##tian": 10772, - "johnston": 10773, - "catholics": 10774, - "depicting": 10775, - "jointly": 10776, - "elephant": 10777, - "gloria": 10778, - "mentor": 10779, - "badge": 10780, - "ahmad": 10781, - "distinguish": 10782, - "remarked": 10783, - "councils": 10784, - "precisely": 10785, - "allison": 10786, - "advancing": 10787, - "detection": 10788, - "crowded": 10789, - "##10": 10790, - "cooperative": 10791, - "ankle": 10792, - "mercedes": 10793, - "dagger": 10794, - "surrendered": 10795, - "pollution": 10796, - "commit": 10797, - "subway": 10798, - "jeffrey": 10799, - "lesson": 10800, - "sculptures": 10801, - "provider": 10802, - "##fication": 10803, - "membrane": 10804, - "timothy": 10805, - "rectangular": 10806, - "fiscal": 10807, - "heating": 10808, - "teammate": 10809, - "basket": 10810, - "particle": 10811, - "anonymous": 10812, - "deployment": 10813, - "##ple": 10814, - "missiles": 10815, - "courthouse": 10816, - "proportion": 10817, - "shoe": 10818, - "sec": 10819, - "##ller": 10820, - "complaints": 10821, - "forbes": 10822, - "blacks": 10823, - "abandon": 10824, - "remind": 10825, - "sizes": 10826, - "overwhelming": 10827, - "autobiography": 10828, - "natalie": 10829, - "##awa": 10830, - "risks": 10831, - "contestant": 10832, - "countryside": 10833, - "babies": 10834, - "scorer": 10835, - "invaded": 10836, - "enclosed": 10837, - "proceed": 10838, - "hurling": 10839, - "disorders": 10840, - "##cu": 10841, - "reflecting": 10842, - "continuously": 10843, - "cruiser": 10844, - "graduates": 10845, - "freeway": 10846, - "investigated": 10847, - "ore": 10848, - "deserved": 10849, - "maid": 10850, - "blocking": 10851, - "phillip": 10852, - "jorge": 10853, - "shakes": 10854, - "dove": 10855, - "mann": 10856, - "variables": 10857, - "lacked": 10858, - "burden": 10859, - "accompanying": 10860, - "que": 10861, - "consistently": 10862, - "organizing": 10863, - "provisional": 10864, - "complained": 10865, - "endless": 10866, - "##rm": 10867, - "tubes": 10868, - "juice": 10869, - "georges": 10870, - "krishna": 10871, - "mick": 10872, - "labels": 10873, - "thriller": 10874, - "##uch": 10875, - "laps": 10876, - "arcade": 10877, - "sage": 10878, - "snail": 10879, - "##table": 10880, - "shannon": 10881, - "fi": 10882, - "laurence": 10883, - "seoul": 10884, - "vacation": 10885, - "presenting": 10886, - "hire": 10887, - "churchill": 10888, - "surprisingly": 10889, - "prohibited": 10890, - "savannah": 10891, - "technically": 10892, - "##oli": 10893, - "170": 10894, - "##lessly": 10895, - "testimony": 10896, - "suited": 10897, - "speeds": 10898, - "toys": 10899, - "romans": 10900, - "mlb": 10901, - "flowering": 10902, - "measurement": 10903, - "talented": 10904, - "kay": 10905, - "settings": 10906, - "charleston": 10907, - "expectations": 10908, - "shattered": 10909, - "achieving": 10910, - "triumph": 10911, - "ceremonies": 10912, - "portsmouth": 10913, - "lanes": 10914, - "mandatory": 10915, - "loser": 10916, - "stretching": 10917, - "cologne": 10918, - "realizes": 10919, - "seventy": 10920, - "cornell": 10921, - "careers": 10922, - "webb": 10923, - "##ulating": 10924, - "americas": 10925, - "budapest": 10926, - "ava": 10927, - "suspicion": 10928, - "##ison": 10929, - "yo": 10930, - "conrad": 10931, - "##hai": 10932, - "sterling": 10933, - "jessie": 10934, - "rector": 10935, - "##az": 10936, - "1831": 10937, - "transform": 10938, - "organize": 10939, - "loans": 10940, - "christine": 10941, - "volcanic": 10942, - "warrant": 10943, - "slender": 10944, - "summers": 10945, - "subfamily": 10946, - "newer": 10947, - "danced": 10948, - "dynamics": 10949, - "rhine": 10950, - "proceeds": 10951, - "heinrich": 10952, - "gastropod": 10953, - "commands": 10954, - "sings": 10955, - "facilitate": 10956, - "easter": 10957, - "ra": 10958, - "positioned": 10959, - "responses": 10960, - "expense": 10961, - "fruits": 10962, - "yanked": 10963, - "imported": 10964, - "25th": 10965, - "velvet": 10966, - "vic": 10967, - "primitive": 10968, - "tribune": 10969, - "baldwin": 10970, - "neighbourhood": 10971, - "donna": 10972, - "rip": 10973, - "hay": 10974, - "pr": 10975, - "##uro": 10976, - "1814": 10977, - "espn": 10978, - "welcomed": 10979, - "##aria": 10980, - "qualifier": 10981, - "glare": 10982, - "highland": 10983, - "timing": 10984, - "##cted": 10985, - "shells": 10986, - "eased": 10987, - "geometry": 10988, - "louder": 10989, - "exciting": 10990, - "slovakia": 10991, - "##sion": 10992, - "##iz": 10993, - "##lot": 10994, - "savings": 10995, - "prairie": 10996, - "##ques": 10997, - "marching": 10998, - "rafael": 10999, - "tonnes": 11000, - "##lled": 11001, - "curtain": 11002, - "preceding": 11003, - "shy": 11004, - "heal": 11005, - "greene": 11006, - "worthy": 11007, - "##pot": 11008, - "detachment": 11009, - "bury": 11010, - "sherman": 11011, - "##eck": 11012, - "reinforced": 11013, - "seeks": 11014, - "bottles": 11015, - "contracted": 11016, - "duchess": 11017, - "outfit": 11018, - "walsh": 11019, - "##sc": 11020, - "mickey": 11021, - "##ase": 11022, - "geoffrey": 11023, - "archer": 11024, - "squeeze": 11025, - "dawson": 11026, - "eliminate": 11027, - "invention": 11028, - "##enberg": 11029, - "neal": 11030, - "##eth": 11031, - "stance": 11032, - "dealer": 11033, - "coral": 11034, - "maple": 11035, - "retire": 11036, - "polo": 11037, - "simplified": 11038, - "##ht": 11039, - "1833": 11040, - "hid": 11041, - "watts": 11042, - "backwards": 11043, - "jules": 11044, - "##oke": 11045, - "genesis": 11046, - "mt": 11047, - "frames": 11048, - "rebounds": 11049, - "burma": 11050, - "woodland": 11051, - "moist": 11052, - "santos": 11053, - "whispers": 11054, - "drained": 11055, - "subspecies": 11056, - "##aa": 11057, - "streaming": 11058, - "ulster": 11059, - "burnt": 11060, - "correspondence": 11061, - "maternal": 11062, - "gerard": 11063, - "denis": 11064, - "stealing": 11065, - "##load": 11066, - "genius": 11067, - "duchy": 11068, - "##oria": 11069, - "inaugurated": 11070, - "momentum": 11071, - "suits": 11072, - "placement": 11073, - "sovereign": 11074, - "clause": 11075, - "thames": 11076, - "##hara": 11077, - "confederation": 11078, - "reservation": 11079, - "sketch": 11080, - "yankees": 11081, - "lets": 11082, - "rotten": 11083, - "charm": 11084, - "hal": 11085, - "verses": 11086, - "ultra": 11087, - "commercially": 11088, - "dot": 11089, - "salon": 11090, - "citation": 11091, - "adopt": 11092, - "winnipeg": 11093, - "mist": 11094, - "allocated": 11095, - "cairo": 11096, - "##boy": 11097, - "jenkins": 11098, - "interference": 11099, - "objectives": 11100, - "##wind": 11101, - "1820": 11102, - "portfolio": 11103, - "armoured": 11104, - "sectors": 11105, - "##eh": 11106, - "initiatives": 11107, - "##world": 11108, - "integrity": 11109, - "exercises": 11110, - "robe": 11111, - "tap": 11112, - "ab": 11113, - "gazed": 11114, - "##tones": 11115, - "distracted": 11116, - "rulers": 11117, - "111": 11118, - "favorable": 11119, - "jerome": 11120, - "tended": 11121, - "cart": 11122, - "factories": 11123, - "##eri": 11124, - "diplomat": 11125, - "valued": 11126, - "gravel": 11127, - "charitable": 11128, - "##try": 11129, - "calvin": 11130, - "exploring": 11131, - "chang": 11132, - "shepherd": 11133, - "terrace": 11134, - "pdf": 11135, - "pupil": 11136, - "##ural": 11137, - "reflects": 11138, - "ups": 11139, - "##rch": 11140, - "governors": 11141, - "shelf": 11142, - "depths": 11143, - "##nberg": 11144, - "trailed": 11145, - "crest": 11146, - "tackle": 11147, - "##nian": 11148, - "##ats": 11149, - "hatred": 11150, - "##kai": 11151, - "clare": 11152, - "makers": 11153, - "ethiopia": 11154, - "longtime": 11155, - "detected": 11156, - "embedded": 11157, - "lacking": 11158, - "slapped": 11159, - "rely": 11160, - "thomson": 11161, - "anticipation": 11162, - "iso": 11163, - "morton": 11164, - "successive": 11165, - "agnes": 11166, - "screenwriter": 11167, - "straightened": 11168, - "philippe": 11169, - "playwright": 11170, - "haunted": 11171, - "licence": 11172, - "iris": 11173, - "intentions": 11174, - "sutton": 11175, - "112": 11176, - "logical": 11177, - "correctly": 11178, - "##weight": 11179, - "branded": 11180, - "licked": 11181, - "tipped": 11182, - "silva": 11183, - "ricky": 11184, - "narrator": 11185, - "requests": 11186, - "##ents": 11187, - "greeted": 11188, - "supernatural": 11189, - "cow": 11190, - "##wald": 11191, - "lung": 11192, - "refusing": 11193, - "employer": 11194, - "strait": 11195, - "gaelic": 11196, - "liner": 11197, - "##piece": 11198, - "zoe": 11199, - "sabha": 11200, - "##mba": 11201, - "driveway": 11202, - "harvest": 11203, - "prints": 11204, - "bates": 11205, - "reluctantly": 11206, - "threshold": 11207, - "algebra": 11208, - "ira": 11209, - "wherever": 11210, - "coupled": 11211, - "240": 11212, - "assumption": 11213, - "picks": 11214, - "##air": 11215, - "designers": 11216, - "raids": 11217, - "gentlemen": 11218, - "##ean": 11219, - "roller": 11220, - "blowing": 11221, - "leipzig": 11222, - "locks": 11223, - "screw": 11224, - "dressing": 11225, - "strand": 11226, - "##lings": 11227, - "scar": 11228, - "dwarf": 11229, - "depicts": 11230, - "##nu": 11231, - "nods": 11232, - "##mine": 11233, - "differ": 11234, - "boris": 11235, - "##eur": 11236, - "yuan": 11237, - "flip": 11238, - "##gie": 11239, - "mob": 11240, - "invested": 11241, - "questioning": 11242, - "applying": 11243, - "##ture": 11244, - "shout": 11245, - "##sel": 11246, - "gameplay": 11247, - "blamed": 11248, - "illustrations": 11249, - "bothered": 11250, - "weakness": 11251, - "rehabilitation": 11252, - "##of": 11253, - "##zes": 11254, - "envelope": 11255, - "rumors": 11256, - "miners": 11257, - "leicester": 11258, - "subtle": 11259, - "kerry": 11260, - "##ico": 11261, - "ferguson": 11262, - "##fu": 11263, - "premiership": 11264, - "ne": 11265, - "##cat": 11266, - "bengali": 11267, - "prof": 11268, - "catches": 11269, - "remnants": 11270, - "dana": 11271, - "##rily": 11272, - "shouting": 11273, - "presidents": 11274, - "baltic": 11275, - "ought": 11276, - "ghosts": 11277, - "dances": 11278, - "sailors": 11279, - "shirley": 11280, - "fancy": 11281, - "dominic": 11282, - "##bie": 11283, - "madonna": 11284, - "##rick": 11285, - "bark": 11286, - "buttons": 11287, - "gymnasium": 11288, - "ashes": 11289, - "liver": 11290, - "toby": 11291, - "oath": 11292, - "providence": 11293, - "doyle": 11294, - "evangelical": 11295, - "nixon": 11296, - "cement": 11297, - "carnegie": 11298, - "embarked": 11299, - "hatch": 11300, - "surroundings": 11301, - "guarantee": 11302, - "needing": 11303, - "pirate": 11304, - "essence": 11305, - "##bee": 11306, - "filter": 11307, - "crane": 11308, - "hammond": 11309, - "projected": 11310, - "immune": 11311, - "percy": 11312, - "twelfth": 11313, - "##ult": 11314, - "regent": 11315, - "doctoral": 11316, - "damon": 11317, - "mikhail": 11318, - "##ichi": 11319, - "lu": 11320, - "critically": 11321, - "elect": 11322, - "realised": 11323, - "abortion": 11324, - "acute": 11325, - "screening": 11326, - "mythology": 11327, - "steadily": 11328, - "##fc": 11329, - "frown": 11330, - "nottingham": 11331, - "kirk": 11332, - "wa": 11333, - "minneapolis": 11334, - "##rra": 11335, - "module": 11336, - "algeria": 11337, - "mc": 11338, - "nautical": 11339, - "encounters": 11340, - "surprising": 11341, - "statues": 11342, - "availability": 11343, - "shirts": 11344, - "pie": 11345, - "alma": 11346, - "brows": 11347, - "munster": 11348, - "mack": 11349, - "soup": 11350, - "crater": 11351, - "tornado": 11352, - "sanskrit": 11353, - "cedar": 11354, - "explosive": 11355, - "bordered": 11356, - "dixon": 11357, - "planets": 11358, - "stamp": 11359, - "exam": 11360, - "happily": 11361, - "##bble": 11362, - "carriers": 11363, - "kidnapped": 11364, - "##vis": 11365, - "accommodation": 11366, - "emigrated": 11367, - "##met": 11368, - "knockout": 11369, - "correspondent": 11370, - "violation": 11371, - "profits": 11372, - "peaks": 11373, - "lang": 11374, - "specimen": 11375, - "agenda": 11376, - "ancestry": 11377, - "pottery": 11378, - "spelling": 11379, - "equations": 11380, - "obtaining": 11381, - "ki": 11382, - "linking": 11383, - "1825": 11384, - "debris": 11385, - "asylum": 11386, - "##20": 11387, - "buddhism": 11388, - "teddy": 11389, - "##ants": 11390, - "gazette": 11391, - "##nger": 11392, - "##sse": 11393, - "dental": 11394, - "eligibility": 11395, - "utc": 11396, - "fathers": 11397, - "averaged": 11398, - "zimbabwe": 11399, - "francesco": 11400, - "coloured": 11401, - "hissed": 11402, - "translator": 11403, - "lynch": 11404, - "mandate": 11405, - "humanities": 11406, - "mackenzie": 11407, - "uniforms": 11408, - "lin": 11409, - "##iana": 11410, - "##gio": 11411, - "asset": 11412, - "mhz": 11413, - "fitting": 11414, - "samantha": 11415, - "genera": 11416, - "wei": 11417, - "rim": 11418, - "beloved": 11419, - "shark": 11420, - "riot": 11421, - "entities": 11422, - "expressions": 11423, - "indo": 11424, - "carmen": 11425, - "slipping": 11426, - "owing": 11427, - "abbot": 11428, - "neighbor": 11429, - "sidney": 11430, - "##av": 11431, - "rats": 11432, - "recommendations": 11433, - "encouraging": 11434, - "squadrons": 11435, - "anticipated": 11436, - "commanders": 11437, - "conquered": 11438, - "##oto": 11439, - "donations": 11440, - "diagnosed": 11441, - "##mond": 11442, - "divide": 11443, - "##iva": 11444, - "guessed": 11445, - "decoration": 11446, - "vernon": 11447, - "auditorium": 11448, - "revelation": 11449, - "conversations": 11450, - "##kers": 11451, - "##power": 11452, - "herzegovina": 11453, - "dash": 11454, - "alike": 11455, - "protested": 11456, - "lateral": 11457, - "herman": 11458, - "accredited": 11459, - "mg": 11460, - "##gent": 11461, - "freeman": 11462, - "mel": 11463, - "fiji": 11464, - "crow": 11465, - "crimson": 11466, - "##rine": 11467, - "livestock": 11468, - "##pped": 11469, - "humanitarian": 11470, - "bored": 11471, - "oz": 11472, - "whip": 11473, - "##lene": 11474, - "##ali": 11475, - "legitimate": 11476, - "alter": 11477, - "grinning": 11478, - "spelled": 11479, - "anxious": 11480, - "oriental": 11481, - "wesley": 11482, - "##nin": 11483, - "##hole": 11484, - "carnival": 11485, - "controller": 11486, - "detect": 11487, - "##ssa": 11488, - "bowed": 11489, - "educator": 11490, - "kosovo": 11491, - "macedonia": 11492, - "##sin": 11493, - "occupy": 11494, - "mastering": 11495, - "stephanie": 11496, - "janeiro": 11497, - "para": 11498, - "unaware": 11499, - "nurses": 11500, - "noon": 11501, - "135": 11502, - "cam": 11503, - "hopefully": 11504, - "ranger": 11505, - "combine": 11506, - "sociology": 11507, - "polar": 11508, - "rica": 11509, - "##eer": 11510, - "neill": 11511, - "##sman": 11512, - "holocaust": 11513, - "##ip": 11514, - "doubled": 11515, - "lust": 11516, - "1828": 11517, - "109": 11518, - "decent": 11519, - "cooling": 11520, - "unveiled": 11521, - "##card": 11522, - "1829": 11523, - "nsw": 11524, - "homer": 11525, - "chapman": 11526, - "meyer": 11527, - "##gin": 11528, - "dive": 11529, - "mae": 11530, - "reagan": 11531, - "expertise": 11532, - "##gled": 11533, - "darwin": 11534, - "brooke": 11535, - "sided": 11536, - "prosecution": 11537, - "investigating": 11538, - "comprised": 11539, - "petroleum": 11540, - "genres": 11541, - "reluctant": 11542, - "differently": 11543, - "trilogy": 11544, - "johns": 11545, - "vegetables": 11546, - "corpse": 11547, - "highlighted": 11548, - "lounge": 11549, - "pension": 11550, - "unsuccessfully": 11551, - "elegant": 11552, - "aided": 11553, - "ivory": 11554, - "beatles": 11555, - "amelia": 11556, - "cain": 11557, - "dubai": 11558, - "sunny": 11559, - "immigrant": 11560, - "babe": 11561, - "click": 11562, - "##nder": 11563, - "underwater": 11564, - "pepper": 11565, - "combining": 11566, - "mumbled": 11567, - "atlas": 11568, - "horns": 11569, - "accessed": 11570, - "ballad": 11571, - "physicians": 11572, - "homeless": 11573, - "gestured": 11574, - "rpm": 11575, - "freak": 11576, - "louisville": 11577, - "corporations": 11578, - "patriots": 11579, - "prizes": 11580, - "rational": 11581, - "warn": 11582, - "modes": 11583, - "decorative": 11584, - "overnight": 11585, - "din": 11586, - "troubled": 11587, - "phantom": 11588, - "##ort": 11589, - "monarch": 11590, - "sheer": 11591, - "##dorf": 11592, - "generals": 11593, - "guidelines": 11594, - "organs": 11595, - "addresses": 11596, - "##zon": 11597, - "enhance": 11598, - "curling": 11599, - "parishes": 11600, - "cord": 11601, - "##kie": 11602, - "linux": 11603, - "caesar": 11604, - "deutsche": 11605, - "bavaria": 11606, - "##bia": 11607, - "coleman": 11608, - "cyclone": 11609, - "##eria": 11610, - "bacon": 11611, - "petty": 11612, - "##yama": 11613, - "##old": 11614, - "hampton": 11615, - "diagnosis": 11616, - "1824": 11617, - "throws": 11618, - "complexity": 11619, - "rita": 11620, - "disputed": 11621, - "##₃": 11622, - "pablo": 11623, - "##sch": 11624, - "marketed": 11625, - "trafficking": 11626, - "##ulus": 11627, - "examine": 11628, - "plague": 11629, - "formats": 11630, - "##oh": 11631, - "vault": 11632, - "faithful": 11633, - "##bourne": 11634, - "webster": 11635, - "##ox": 11636, - "highlights": 11637, - "##ient": 11638, - "##ann": 11639, - "phones": 11640, - "vacuum": 11641, - "sandwich": 11642, - "modeling": 11643, - "##gated": 11644, - "bolivia": 11645, - "clergy": 11646, - "qualities": 11647, - "isabel": 11648, - "##nas": 11649, - "##ars": 11650, - "wears": 11651, - "screams": 11652, - "reunited": 11653, - "annoyed": 11654, - "bra": 11655, - "##ancy": 11656, - "##rate": 11657, - "differential": 11658, - "transmitter": 11659, - "tattoo": 11660, - "container": 11661, - "poker": 11662, - "##och": 11663, - "excessive": 11664, - "resides": 11665, - "cowboys": 11666, - "##tum": 11667, - "augustus": 11668, - "trash": 11669, - "providers": 11670, - "statute": 11671, - "retreated": 11672, - "balcony": 11673, - "reversed": 11674, - "void": 11675, - "storey": 11676, - "preceded": 11677, - "masses": 11678, - "leap": 11679, - "laughs": 11680, - "neighborhoods": 11681, - "wards": 11682, - "schemes": 11683, - "falcon": 11684, - "santo": 11685, - "battlefield": 11686, - "pad": 11687, - "ronnie": 11688, - "thread": 11689, - "lesbian": 11690, - "venus": 11691, - "##dian": 11692, - "beg": 11693, - "sandstone": 11694, - "daylight": 11695, - "punched": 11696, - "gwen": 11697, - "analog": 11698, - "stroked": 11699, - "wwe": 11700, - "acceptable": 11701, - "measurements": 11702, - "dec": 11703, - "toxic": 11704, - "##kel": 11705, - "adequate": 11706, - "surgical": 11707, - "economist": 11708, - "parameters": 11709, - "varsity": 11710, - "##sberg": 11711, - "quantity": 11712, - "ella": 11713, - "##chy": 11714, - "##rton": 11715, - "countess": 11716, - "generating": 11717, - "precision": 11718, - "diamonds": 11719, - "expressway": 11720, - "ga": 11721, - "##ı": 11722, - "1821": 11723, - "uruguay": 11724, - "talents": 11725, - "galleries": 11726, - "expenses": 11727, - "scanned": 11728, - "colleague": 11729, - "outlets": 11730, - "ryder": 11731, - "lucien": 11732, - "##ila": 11733, - "paramount": 11734, - "##bon": 11735, - "syracuse": 11736, - "dim": 11737, - "fangs": 11738, - "gown": 11739, - "sweep": 11740, - "##sie": 11741, - "toyota": 11742, - "missionaries": 11743, - "websites": 11744, - "##nsis": 11745, - "sentences": 11746, - "adviser": 11747, - "val": 11748, - "trademark": 11749, - "spells": 11750, - "##plane": 11751, - "patience": 11752, - "starter": 11753, - "slim": 11754, - "##borg": 11755, - "toe": 11756, - "incredibly": 11757, - "shoots": 11758, - "elliot": 11759, - "nobility": 11760, - "##wyn": 11761, - "cowboy": 11762, - "endorsed": 11763, - "gardner": 11764, - "tendency": 11765, - "persuaded": 11766, - "organisms": 11767, - "emissions": 11768, - "kazakhstan": 11769, - "amused": 11770, - "boring": 11771, - "chips": 11772, - "themed": 11773, - "##hand": 11774, - "llc": 11775, - "constantinople": 11776, - "chasing": 11777, - "systematic": 11778, - "guatemala": 11779, - "borrowed": 11780, - "erin": 11781, - "carey": 11782, - "##hard": 11783, - "highlands": 11784, - "struggles": 11785, - "1810": 11786, - "##ifying": 11787, - "##ced": 11788, - "wong": 11789, - "exceptions": 11790, - "develops": 11791, - "enlarged": 11792, - "kindergarten": 11793, - "castro": 11794, - "##ern": 11795, - "##rina": 11796, - "leigh": 11797, - "zombie": 11798, - "juvenile": 11799, - "##most": 11800, - "consul": 11801, - "##nar": 11802, - "sailor": 11803, - "hyde": 11804, - "clarence": 11805, - "intensive": 11806, - "pinned": 11807, - "nasty": 11808, - "useless": 11809, - "jung": 11810, - "clayton": 11811, - "stuffed": 11812, - "exceptional": 11813, - "ix": 11814, - "apostolic": 11815, - "230": 11816, - "transactions": 11817, - "##dge": 11818, - "exempt": 11819, - "swinging": 11820, - "cove": 11821, - "religions": 11822, - "##ash": 11823, - "shields": 11824, - "dairy": 11825, - "bypass": 11826, - "190": 11827, - "pursuing": 11828, - "bug": 11829, - "joyce": 11830, - "bombay": 11831, - "chassis": 11832, - "southampton": 11833, - "chat": 11834, - "interact": 11835, - "redesignated": 11836, - "##pen": 11837, - "nascar": 11838, - "pray": 11839, - "salmon": 11840, - "rigid": 11841, - "regained": 11842, - "malaysian": 11843, - "grim": 11844, - "publicity": 11845, - "constituted": 11846, - "capturing": 11847, - "toilet": 11848, - "delegate": 11849, - "purely": 11850, - "tray": 11851, - "drift": 11852, - "loosely": 11853, - "striker": 11854, - "weakened": 11855, - "trinidad": 11856, - "mitch": 11857, - "itv": 11858, - "defines": 11859, - "transmitted": 11860, - "ming": 11861, - "scarlet": 11862, - "nodding": 11863, - "fitzgerald": 11864, - "fu": 11865, - "narrowly": 11866, - "sp": 11867, - "tooth": 11868, - "standings": 11869, - "virtue": 11870, - "##₁": 11871, - "##wara": 11872, - "##cting": 11873, - "chateau": 11874, - "gloves": 11875, - "lid": 11876, - "##nel": 11877, - "hurting": 11878, - "conservatory": 11879, - "##pel": 11880, - "sinclair": 11881, - "reopened": 11882, - "sympathy": 11883, - "nigerian": 11884, - "strode": 11885, - "advocated": 11886, - "optional": 11887, - "chronic": 11888, - "discharge": 11889, - "##rc": 11890, - "suck": 11891, - "compatible": 11892, - "laurel": 11893, - "stella": 11894, - "shi": 11895, - "fails": 11896, - "wage": 11897, - "dodge": 11898, - "128": 11899, - "informal": 11900, - "sorts": 11901, - "levi": 11902, - "buddha": 11903, - "villagers": 11904, - "##aka": 11905, - "chronicles": 11906, - "heavier": 11907, - "summoned": 11908, - "gateway": 11909, - "3000": 11910, - "eleventh": 11911, - "jewelry": 11912, - "translations": 11913, - "accordingly": 11914, - "seas": 11915, - "##ency": 11916, - "fiber": 11917, - "pyramid": 11918, - "cubic": 11919, - "dragging": 11920, - "##ista": 11921, - "caring": 11922, - "##ops": 11923, - "android": 11924, - "contacted": 11925, - "lunar": 11926, - "##dt": 11927, - "kai": 11928, - "lisbon": 11929, - "patted": 11930, - "1826": 11931, - "sacramento": 11932, - "theft": 11933, - "madagascar": 11934, - "subtropical": 11935, - "disputes": 11936, - "ta": 11937, - "holidays": 11938, - "piper": 11939, - "willow": 11940, - "mare": 11941, - "cane": 11942, - "itunes": 11943, - "newfoundland": 11944, - "benny": 11945, - "companions": 11946, - "dong": 11947, - "raj": 11948, - "observe": 11949, - "roar": 11950, - "charming": 11951, - "plaque": 11952, - "tibetan": 11953, - "fossils": 11954, - "enacted": 11955, - "manning": 11956, - "bubble": 11957, - "tina": 11958, - "tanzania": 11959, - "##eda": 11960, - "##hir": 11961, - "funk": 11962, - "swamp": 11963, - "deputies": 11964, - "cloak": 11965, - "ufc": 11966, - "scenario": 11967, - "par": 11968, - "scratch": 11969, - "metals": 11970, - "anthem": 11971, - "guru": 11972, - "engaging": 11973, - "specially": 11974, - "##boat": 11975, - "dialects": 11976, - "nineteen": 11977, - "cecil": 11978, - "duet": 11979, - "disability": 11980, - "messenger": 11981, - "unofficial": 11982, - "##lies": 11983, - "defunct": 11984, - "eds": 11985, - "moonlight": 11986, - "drainage": 11987, - "surname": 11988, - "puzzle": 11989, - "honda": 11990, - "switching": 11991, - "conservatives": 11992, - "mammals": 11993, - "knox": 11994, - "broadcaster": 11995, - "sidewalk": 11996, - "cope": 11997, - "##ried": 11998, - "benson": 11999, - "princes": 12000, - "peterson": 12001, - "##sal": 12002, - "bedford": 12003, - "sharks": 12004, - "eli": 12005, - "wreck": 12006, - "alberto": 12007, - "gasp": 12008, - "archaeology": 12009, - "lgbt": 12010, - "teaches": 12011, - "securities": 12012, - "madness": 12013, - "compromise": 12014, - "waving": 12015, - "coordination": 12016, - "davidson": 12017, - "visions": 12018, - "leased": 12019, - "possibilities": 12020, - "eighty": 12021, - "jun": 12022, - "fernandez": 12023, - "enthusiasm": 12024, - "assassin": 12025, - "sponsorship": 12026, - "reviewer": 12027, - "kingdoms": 12028, - "estonian": 12029, - "laboratories": 12030, - "##fy": 12031, - "##nal": 12032, - "applies": 12033, - "verb": 12034, - "celebrations": 12035, - "##zzo": 12036, - "rowing": 12037, - "lightweight": 12038, - "sadness": 12039, - "submit": 12040, - "mvp": 12041, - "balanced": 12042, - "dude": 12043, - "##vas": 12044, - "explicitly": 12045, - "metric": 12046, - "magnificent": 12047, - "mound": 12048, - "brett": 12049, - "mohammad": 12050, - "mistakes": 12051, - "irregular": 12052, - "##hing": 12053, - "##ass": 12054, - "sanders": 12055, - "betrayed": 12056, - "shipped": 12057, - "surge": 12058, - "##enburg": 12059, - "reporters": 12060, - "termed": 12061, - "georg": 12062, - "pity": 12063, - "verbal": 12064, - "bulls": 12065, - "abbreviated": 12066, - "enabling": 12067, - "appealed": 12068, - "##are": 12069, - "##atic": 12070, - "sicily": 12071, - "sting": 12072, - "heel": 12073, - "sweetheart": 12074, - "bart": 12075, - "spacecraft": 12076, - "brutal": 12077, - "monarchy": 12078, - "##tter": 12079, - "aberdeen": 12080, - "cameo": 12081, - "diane": 12082, - "##ub": 12083, - "survivor": 12084, - "clyde": 12085, - "##aries": 12086, - "complaint": 12087, - "##makers": 12088, - "clarinet": 12089, - "delicious": 12090, - "chilean": 12091, - "karnataka": 12092, - "coordinates": 12093, - "1818": 12094, - "panties": 12095, - "##rst": 12096, - "pretending": 12097, - "ar": 12098, - "dramatically": 12099, - "kiev": 12100, - "bella": 12101, - "tends": 12102, - "distances": 12103, - "113": 12104, - "catalog": 12105, - "launching": 12106, - "instances": 12107, - "telecommunications": 12108, - "portable": 12109, - "lindsay": 12110, - "vatican": 12111, - "##eim": 12112, - "angles": 12113, - "aliens": 12114, - "marker": 12115, - "stint": 12116, - "screens": 12117, - "bolton": 12118, - "##rne": 12119, - "judy": 12120, - "wool": 12121, - "benedict": 12122, - "plasma": 12123, - "europa": 12124, - "spark": 12125, - "imaging": 12126, - "filmmaker": 12127, - "swiftly": 12128, - "##een": 12129, - "contributor": 12130, - "##nor": 12131, - "opted": 12132, - "stamps": 12133, - "apologize": 12134, - "financing": 12135, - "butter": 12136, - "gideon": 12137, - "sophisticated": 12138, - "alignment": 12139, - "avery": 12140, - "chemicals": 12141, - "yearly": 12142, - "speculation": 12143, - "prominence": 12144, - "professionally": 12145, - "##ils": 12146, - "immortal": 12147, - "institutional": 12148, - "inception": 12149, - "wrists": 12150, - "identifying": 12151, - "tribunal": 12152, - "derives": 12153, - "gains": 12154, - "##wo": 12155, - "papal": 12156, - "preference": 12157, - "linguistic": 12158, - "vince": 12159, - "operative": 12160, - "brewery": 12161, - "##ont": 12162, - "unemployment": 12163, - "boyd": 12164, - "##ured": 12165, - "##outs": 12166, - "albeit": 12167, - "prophet": 12168, - "1813": 12169, - "bi": 12170, - "##rr": 12171, - "##face": 12172, - "##rad": 12173, - "quarterly": 12174, - "asteroid": 12175, - "cleaned": 12176, - "radius": 12177, - "temper": 12178, - "##llen": 12179, - "telugu": 12180, - "jerk": 12181, - "viscount": 12182, - "menu": 12183, - "##ote": 12184, - "glimpse": 12185, - "##aya": 12186, - "yacht": 12187, - "hawaiian": 12188, - "baden": 12189, - "##rl": 12190, - "laptop": 12191, - "readily": 12192, - "##gu": 12193, - "monetary": 12194, - "offshore": 12195, - "scots": 12196, - "watches": 12197, - "##yang": 12198, - "##arian": 12199, - "upgrade": 12200, - "needle": 12201, - "xbox": 12202, - "lea": 12203, - "encyclopedia": 12204, - "flank": 12205, - "fingertips": 12206, - "##pus": 12207, - "delight": 12208, - "teachings": 12209, - "confirm": 12210, - "roth": 12211, - "beaches": 12212, - "midway": 12213, - "winters": 12214, - "##iah": 12215, - "teasing": 12216, - "daytime": 12217, - "beverly": 12218, - "gambling": 12219, - "bonnie": 12220, - "##backs": 12221, - "regulated": 12222, - "clement": 12223, - "hermann": 12224, - "tricks": 12225, - "knot": 12226, - "##shing": 12227, - "##uring": 12228, - "##vre": 12229, - "detached": 12230, - "ecological": 12231, - "owed": 12232, - "specialty": 12233, - "byron": 12234, - "inventor": 12235, - "bats": 12236, - "stays": 12237, - "screened": 12238, - "unesco": 12239, - "midland": 12240, - "trim": 12241, - "affection": 12242, - "##ander": 12243, - "##rry": 12244, - "jess": 12245, - "thoroughly": 12246, - "feedback": 12247, - "##uma": 12248, - "chennai": 12249, - "strained": 12250, - "heartbeat": 12251, - "wrapping": 12252, - "overtime": 12253, - "pleaded": 12254, - "##sworth": 12255, - "mon": 12256, - "leisure": 12257, - "oclc": 12258, - "##tate": 12259, - "##ele": 12260, - "feathers": 12261, - "angelo": 12262, - "thirds": 12263, - "nuts": 12264, - "surveys": 12265, - "clever": 12266, - "gill": 12267, - "commentator": 12268, - "##dos": 12269, - "darren": 12270, - "rides": 12271, - "gibraltar": 12272, - "##nc": 12273, - "##mu": 12274, - "dissolution": 12275, - "dedication": 12276, - "shin": 12277, - "meals": 12278, - "saddle": 12279, - "elvis": 12280, - "reds": 12281, - "chaired": 12282, - "taller": 12283, - "appreciation": 12284, - "functioning": 12285, - "niece": 12286, - "favored": 12287, - "advocacy": 12288, - "robbie": 12289, - "criminals": 12290, - "suffolk": 12291, - "yugoslav": 12292, - "passport": 12293, - "constable": 12294, - "congressman": 12295, - "hastings": 12296, - "vera": 12297, - "##rov": 12298, - "consecrated": 12299, - "sparks": 12300, - "ecclesiastical": 12301, - "confined": 12302, - "##ovich": 12303, - "muller": 12304, - "floyd": 12305, - "nora": 12306, - "1822": 12307, - "paved": 12308, - "1827": 12309, - "cumberland": 12310, - "ned": 12311, - "saga": 12312, - "spiral": 12313, - "##flow": 12314, - "appreciated": 12315, - "yi": 12316, - "collaborative": 12317, - "treating": 12318, - "similarities": 12319, - "feminine": 12320, - "finishes": 12321, - "##ib": 12322, - "jade": 12323, - "import": 12324, - "##nse": 12325, - "##hot": 12326, - "champagne": 12327, - "mice": 12328, - "securing": 12329, - "celebrities": 12330, - "helsinki": 12331, - "attributes": 12332, - "##gos": 12333, - "cousins": 12334, - "phases": 12335, - "ache": 12336, - "lucia": 12337, - "gandhi": 12338, - "submission": 12339, - "vicar": 12340, - "spear": 12341, - "shine": 12342, - "tasmania": 12343, - "biting": 12344, - "detention": 12345, - "constitute": 12346, - "tighter": 12347, - "seasonal": 12348, - "##gus": 12349, - "terrestrial": 12350, - "matthews": 12351, - "##oka": 12352, - "effectiveness": 12353, - "parody": 12354, - "philharmonic": 12355, - "##onic": 12356, - "1816": 12357, - "strangers": 12358, - "encoded": 12359, - "consortium": 12360, - "guaranteed": 12361, - "regards": 12362, - "shifts": 12363, - "tortured": 12364, - "collision": 12365, - "supervisor": 12366, - "inform": 12367, - "broader": 12368, - "insight": 12369, - "theaters": 12370, - "armour": 12371, - "emeritus": 12372, - "blink": 12373, - "incorporates": 12374, - "mapping": 12375, - "##50": 12376, - "##ein": 12377, - "handball": 12378, - "flexible": 12379, - "##nta": 12380, - "substantially": 12381, - "generous": 12382, - "thief": 12383, - "##own": 12384, - "carr": 12385, - "loses": 12386, - "1793": 12387, - "prose": 12388, - "ucla": 12389, - "romeo": 12390, - "generic": 12391, - "metallic": 12392, - "realization": 12393, - "damages": 12394, - "mk": 12395, - "commissioners": 12396, - "zach": 12397, - "default": 12398, - "##ther": 12399, - "helicopters": 12400, - "lengthy": 12401, - "stems": 12402, - "spa": 12403, - "partnered": 12404, - "spectators": 12405, - "rogue": 12406, - "indication": 12407, - "penalties": 12408, - "teresa": 12409, - "1801": 12410, - "sen": 12411, - "##tric": 12412, - "dalton": 12413, - "##wich": 12414, - "irving": 12415, - "photographic": 12416, - "##vey": 12417, - "dell": 12418, - "deaf": 12419, - "peters": 12420, - "excluded": 12421, - "unsure": 12422, - "##vable": 12423, - "patterson": 12424, - "crawled": 12425, - "##zio": 12426, - "resided": 12427, - "whipped": 12428, - "latvia": 12429, - "slower": 12430, - "ecole": 12431, - "pipes": 12432, - "employers": 12433, - "maharashtra": 12434, - "comparable": 12435, - "va": 12436, - "textile": 12437, - "pageant": 12438, - "##gel": 12439, - "alphabet": 12440, - "binary": 12441, - "irrigation": 12442, - "chartered": 12443, - "choked": 12444, - "antoine": 12445, - "offs": 12446, - "waking": 12447, - "supplement": 12448, - "##wen": 12449, - "quantities": 12450, - "demolition": 12451, - "regain": 12452, - "locate": 12453, - "urdu": 12454, - "folks": 12455, - "alt": 12456, - "114": 12457, - "##mc": 12458, - "scary": 12459, - "andreas": 12460, - "whites": 12461, - "##ava": 12462, - "classrooms": 12463, - "mw": 12464, - "aesthetic": 12465, - "publishes": 12466, - "valleys": 12467, - "guides": 12468, - "cubs": 12469, - "johannes": 12470, - "bryant": 12471, - "conventions": 12472, - "affecting": 12473, - "##itt": 12474, - "drain": 12475, - "awesome": 12476, - "isolation": 12477, - "prosecutor": 12478, - "ambitious": 12479, - "apology": 12480, - "captive": 12481, - "downs": 12482, - "atmospheric": 12483, - "lorenzo": 12484, - "aisle": 12485, - "beef": 12486, - "foul": 12487, - "##onia": 12488, - "kidding": 12489, - "composite": 12490, - "disturbed": 12491, - "illusion": 12492, - "natives": 12493, - "##ffer": 12494, - "emi": 12495, - "rockets": 12496, - "riverside": 12497, - "wartime": 12498, - "painters": 12499, - "adolf": 12500, - "melted": 12501, - "##ail": 12502, - "uncertainty": 12503, - "simulation": 12504, - "hawks": 12505, - "progressed": 12506, - "meantime": 12507, - "builder": 12508, - "spray": 12509, - "breach": 12510, - "unhappy": 12511, - "regina": 12512, - "russians": 12513, - "##urg": 12514, - "determining": 12515, - "##tation": 12516, - "tram": 12517, - "1806": 12518, - "##quin": 12519, - "aging": 12520, - "##12": 12521, - "1823": 12522, - "garion": 12523, - "rented": 12524, - "mister": 12525, - "diaz": 12526, - "terminated": 12527, - "clip": 12528, - "1817": 12529, - "depend": 12530, - "nervously": 12531, - "disco": 12532, - "owe": 12533, - "defenders": 12534, - "shiva": 12535, - "notorious": 12536, - "disbelief": 12537, - "shiny": 12538, - "worcester": 12539, - "##gation": 12540, - "##yr": 12541, - "trailing": 12542, - "undertook": 12543, - "islander": 12544, - "belarus": 12545, - "limitations": 12546, - "watershed": 12547, - "fuller": 12548, - "overlooking": 12549, - "utilized": 12550, - "raphael": 12551, - "1819": 12552, - "synthetic": 12553, - "breakdown": 12554, - "klein": 12555, - "##nate": 12556, - "moaned": 12557, - "memoir": 12558, - "lamb": 12559, - "practicing": 12560, - "##erly": 12561, - "cellular": 12562, - "arrows": 12563, - "exotic": 12564, - "##graphy": 12565, - "witches": 12566, - "117": 12567, - "charted": 12568, - "rey": 12569, - "hut": 12570, - "hierarchy": 12571, - "subdivision": 12572, - "freshwater": 12573, - "giuseppe": 12574, - "aloud": 12575, - "reyes": 12576, - "qatar": 12577, - "marty": 12578, - "sideways": 12579, - "utterly": 12580, - "sexually": 12581, - "jude": 12582, - "prayers": 12583, - "mccarthy": 12584, - "softball": 12585, - "blend": 12586, - "damien": 12587, - "##gging": 12588, - "##metric": 12589, - "wholly": 12590, - "erupted": 12591, - "lebanese": 12592, - "negro": 12593, - "revenues": 12594, - "tasted": 12595, - "comparative": 12596, - "teamed": 12597, - "transaction": 12598, - "labeled": 12599, - "maori": 12600, - "sovereignty": 12601, - "parkway": 12602, - "trauma": 12603, - "gran": 12604, - "malay": 12605, - "121": 12606, - "advancement": 12607, - "descendant": 12608, - "2020": 12609, - "buzz": 12610, - "salvation": 12611, - "inventory": 12612, - "symbolic": 12613, - "##making": 12614, - "antarctica": 12615, - "mps": 12616, - "##gas": 12617, - "##bro": 12618, - "mohammed": 12619, - "myanmar": 12620, - "holt": 12621, - "submarines": 12622, - "tones": 12623, - "##lman": 12624, - "locker": 12625, - "patriarch": 12626, - "bangkok": 12627, - "emerson": 12628, - "remarks": 12629, - "predators": 12630, - "kin": 12631, - "afghan": 12632, - "confession": 12633, - "norwich": 12634, - "rental": 12635, - "emerge": 12636, - "advantages": 12637, - "##zel": 12638, - "rca": 12639, - "##hold": 12640, - "shortened": 12641, - "storms": 12642, - "aidan": 12643, - "##matic": 12644, - "autonomy": 12645, - "compliance": 12646, - "##quet": 12647, - "dudley": 12648, - "atp": 12649, - "##osis": 12650, - "1803": 12651, - "motto": 12652, - "documentation": 12653, - "summary": 12654, - "professors": 12655, - "spectacular": 12656, - "christina": 12657, - "archdiocese": 12658, - "flashing": 12659, - "innocence": 12660, - "remake": 12661, - "##dell": 12662, - "psychic": 12663, - "reef": 12664, - "scare": 12665, - "employ": 12666, - "rs": 12667, - "sticks": 12668, - "meg": 12669, - "gus": 12670, - "leans": 12671, - "##ude": 12672, - "accompany": 12673, - "bergen": 12674, - "tomas": 12675, - "##iko": 12676, - "doom": 12677, - "wages": 12678, - "pools": 12679, - "##nch": 12680, - "##bes": 12681, - "breasts": 12682, - "scholarly": 12683, - "alison": 12684, - "outline": 12685, - "brittany": 12686, - "breakthrough": 12687, - "willis": 12688, - "realistic": 12689, - "##cut": 12690, - "##boro": 12691, - "competitor": 12692, - "##stan": 12693, - "pike": 12694, - "picnic": 12695, - "icon": 12696, - "designing": 12697, - "commercials": 12698, - "washing": 12699, - "villain": 12700, - "skiing": 12701, - "micro": 12702, - "costumes": 12703, - "auburn": 12704, - "halted": 12705, - "executives": 12706, - "##hat": 12707, - "logistics": 12708, - "cycles": 12709, - "vowel": 12710, - "applicable": 12711, - "barrett": 12712, - "exclaimed": 12713, - "eurovision": 12714, - "eternity": 12715, - "ramon": 12716, - "##umi": 12717, - "##lls": 12718, - "modifications": 12719, - "sweeping": 12720, - "disgust": 12721, - "##uck": 12722, - "torch": 12723, - "aviv": 12724, - "ensuring": 12725, - "rude": 12726, - "dusty": 12727, - "sonic": 12728, - "donovan": 12729, - "outskirts": 12730, - "cu": 12731, - "pathway": 12732, - "##band": 12733, - "##gun": 12734, - "##lines": 12735, - "disciplines": 12736, - "acids": 12737, - "cadet": 12738, - "paired": 12739, - "##40": 12740, - "sketches": 12741, - "##sive": 12742, - "marriages": 12743, - "##⁺": 12744, - "folding": 12745, - "peers": 12746, - "slovak": 12747, - "implies": 12748, - "admired": 12749, - "##beck": 12750, - "1880s": 12751, - "leopold": 12752, - "instinct": 12753, - "attained": 12754, - "weston": 12755, - "megan": 12756, - "horace": 12757, - "##ination": 12758, - "dorsal": 12759, - "ingredients": 12760, - "evolutionary": 12761, - "##its": 12762, - "complications": 12763, - "deity": 12764, - "lethal": 12765, - "brushing": 12766, - "levy": 12767, - "deserted": 12768, - "institutes": 12769, - "posthumously": 12770, - "delivering": 12771, - "telescope": 12772, - "coronation": 12773, - "motivated": 12774, - "rapids": 12775, - "luc": 12776, - "flicked": 12777, - "pays": 12778, - "volcano": 12779, - "tanner": 12780, - "weighed": 12781, - "##nica": 12782, - "crowds": 12783, - "frankie": 12784, - "gifted": 12785, - "addressing": 12786, - "granddaughter": 12787, - "winding": 12788, - "##rna": 12789, - "constantine": 12790, - "gomez": 12791, - "##front": 12792, - "landscapes": 12793, - "rudolf": 12794, - "anthropology": 12795, - "slate": 12796, - "werewolf": 12797, - "##lio": 12798, - "astronomy": 12799, - "circa": 12800, - "rouge": 12801, - "dreaming": 12802, - "sack": 12803, - "knelt": 12804, - "drowned": 12805, - "naomi": 12806, - "prolific": 12807, - "tracked": 12808, - "freezing": 12809, - "herb": 12810, - "##dium": 12811, - "agony": 12812, - "randall": 12813, - "twisting": 12814, - "wendy": 12815, - "deposit": 12816, - "touches": 12817, - "vein": 12818, - "wheeler": 12819, - "##bbled": 12820, - "##bor": 12821, - "batted": 12822, - "retaining": 12823, - "tire": 12824, - "presently": 12825, - "compare": 12826, - "specification": 12827, - "daemon": 12828, - "nigel": 12829, - "##grave": 12830, - "merry": 12831, - "recommendation": 12832, - "czechoslovakia": 12833, - "sandra": 12834, - "ng": 12835, - "roma": 12836, - "##sts": 12837, - "lambert": 12838, - "inheritance": 12839, - "sheikh": 12840, - "winchester": 12841, - "cries": 12842, - "examining": 12843, - "##yle": 12844, - "comeback": 12845, - "cuisine": 12846, - "nave": 12847, - "##iv": 12848, - "ko": 12849, - "retrieve": 12850, - "tomatoes": 12851, - "barker": 12852, - "polished": 12853, - "defining": 12854, - "irene": 12855, - "lantern": 12856, - "personalities": 12857, - "begging": 12858, - "tract": 12859, - "swore": 12860, - "1809": 12861, - "175": 12862, - "##gic": 12863, - "omaha": 12864, - "brotherhood": 12865, - "##rley": 12866, - "haiti": 12867, - "##ots": 12868, - "exeter": 12869, - "##ete": 12870, - "##zia": 12871, - "steele": 12872, - "dumb": 12873, - "pearson": 12874, - "210": 12875, - "surveyed": 12876, - "elisabeth": 12877, - "trends": 12878, - "##ef": 12879, - "fritz": 12880, - "##rf": 12881, - "premium": 12882, - "bugs": 12883, - "fraction": 12884, - "calmly": 12885, - "viking": 12886, - "##birds": 12887, - "tug": 12888, - "inserted": 12889, - "unusually": 12890, - "##ield": 12891, - "confronted": 12892, - "distress": 12893, - "crashing": 12894, - "brent": 12895, - "turks": 12896, - "resign": 12897, - "##olo": 12898, - "cambodia": 12899, - "gabe": 12900, - "sauce": 12901, - "##kal": 12902, - "evelyn": 12903, - "116": 12904, - "extant": 12905, - "clusters": 12906, - "quarry": 12907, - "teenagers": 12908, - "luna": 12909, - "##lers": 12910, - "##ister": 12911, - "affiliation": 12912, - "drill": 12913, - "##ashi": 12914, - "panthers": 12915, - "scenic": 12916, - "libya": 12917, - "anita": 12918, - "strengthen": 12919, - "inscriptions": 12920, - "##cated": 12921, - "lace": 12922, - "sued": 12923, - "judith": 12924, - "riots": 12925, - "##uted": 12926, - "mint": 12927, - "##eta": 12928, - "preparations": 12929, - "midst": 12930, - "dub": 12931, - "challenger": 12932, - "##vich": 12933, - "mock": 12934, - "cf": 12935, - "displaced": 12936, - "wicket": 12937, - "breaths": 12938, - "enables": 12939, - "schmidt": 12940, - "analyst": 12941, - "##lum": 12942, - "ag": 12943, - "highlight": 12944, - "automotive": 12945, - "axe": 12946, - "josef": 12947, - "newark": 12948, - "sufficiently": 12949, - "resembles": 12950, - "50th": 12951, - "##pal": 12952, - "flushed": 12953, - "mum": 12954, - "traits": 12955, - "##ante": 12956, - "commodore": 12957, - "incomplete": 12958, - "warming": 12959, - "titular": 12960, - "ceremonial": 12961, - "ethical": 12962, - "118": 12963, - "celebrating": 12964, - "eighteenth": 12965, - "cao": 12966, - "lima": 12967, - "medalist": 12968, - "mobility": 12969, - "strips": 12970, - "snakes": 12971, - "##city": 12972, - "miniature": 12973, - "zagreb": 12974, - "barton": 12975, - "escapes": 12976, - "umbrella": 12977, - "automated": 12978, - "doubted": 12979, - "differs": 12980, - "cooled": 12981, - "georgetown": 12982, - "dresden": 12983, - "cooked": 12984, - "fade": 12985, - "wyatt": 12986, - "rna": 12987, - "jacobs": 12988, - "carlton": 12989, - "abundant": 12990, - "stereo": 12991, - "boost": 12992, - "madras": 12993, - "inning": 12994, - "##hia": 12995, - "spur": 12996, - "ip": 12997, - "malayalam": 12998, - "begged": 12999, - "osaka": 13000, - "groan": 13001, - "escaping": 13002, - "charging": 13003, - "dose": 13004, - "vista": 13005, - "##aj": 13006, - "bud": 13007, - "papa": 13008, - "communists": 13009, - "advocates": 13010, - "edged": 13011, - "tri": 13012, - "##cent": 13013, - "resemble": 13014, - "peaking": 13015, - "necklace": 13016, - "fried": 13017, - "montenegro": 13018, - "saxony": 13019, - "goose": 13020, - "glances": 13021, - "stuttgart": 13022, - "curator": 13023, - "recruit": 13024, - "grocery": 13025, - "sympathetic": 13026, - "##tting": 13027, - "##fort": 13028, - "127": 13029, - "lotus": 13030, - "randolph": 13031, - "ancestor": 13032, - "##rand": 13033, - "succeeding": 13034, - "jupiter": 13035, - "1798": 13036, - "macedonian": 13037, - "##heads": 13038, - "hiking": 13039, - "1808": 13040, - "handing": 13041, - "fischer": 13042, - "##itive": 13043, - "garbage": 13044, - "node": 13045, - "##pies": 13046, - "prone": 13047, - "singular": 13048, - "papua": 13049, - "inclined": 13050, - "attractions": 13051, - "italia": 13052, - "pouring": 13053, - "motioned": 13054, - "grandma": 13055, - "garnered": 13056, - "jacksonville": 13057, - "corp": 13058, - "ego": 13059, - "ringing": 13060, - "aluminum": 13061, - "##hausen": 13062, - "ordering": 13063, - "##foot": 13064, - "drawer": 13065, - "traders": 13066, - "synagogue": 13067, - "##play": 13068, - "##kawa": 13069, - "resistant": 13070, - "wandering": 13071, - "fragile": 13072, - "fiona": 13073, - "teased": 13074, - "var": 13075, - "hardcore": 13076, - "soaked": 13077, - "jubilee": 13078, - "decisive": 13079, - "exposition": 13080, - "mercer": 13081, - "poster": 13082, - "valencia": 13083, - "hale": 13084, - "kuwait": 13085, - "1811": 13086, - "##ises": 13087, - "##wr": 13088, - "##eed": 13089, - "tavern": 13090, - "gamma": 13091, - "122": 13092, - "johan": 13093, - "##uer": 13094, - "airways": 13095, - "amino": 13096, - "gil": 13097, - "##ury": 13098, - "vocational": 13099, - "domains": 13100, - "torres": 13101, - "##sp": 13102, - "generator": 13103, - "folklore": 13104, - "outcomes": 13105, - "##keeper": 13106, - "canberra": 13107, - "shooter": 13108, - "fl": 13109, - "beams": 13110, - "confrontation": 13111, - "##lling": 13112, - "##gram": 13113, - "feb": 13114, - "aligned": 13115, - "forestry": 13116, - "pipeline": 13117, - "jax": 13118, - "motorway": 13119, - "conception": 13120, - "decay": 13121, - "##tos": 13122, - "coffin": 13123, - "##cott": 13124, - "stalin": 13125, - "1805": 13126, - "escorted": 13127, - "minded": 13128, - "##nam": 13129, - "sitcom": 13130, - "purchasing": 13131, - "twilight": 13132, - "veronica": 13133, - "additions": 13134, - "passive": 13135, - "tensions": 13136, - "straw": 13137, - "123": 13138, - "frequencies": 13139, - "1804": 13140, - "refugee": 13141, - "cultivation": 13142, - "##iate": 13143, - "christie": 13144, - "clary": 13145, - "bulletin": 13146, - "crept": 13147, - "disposal": 13148, - "##rich": 13149, - "##zong": 13150, - "processor": 13151, - "crescent": 13152, - "##rol": 13153, - "bmw": 13154, - "emphasized": 13155, - "whale": 13156, - "nazis": 13157, - "aurora": 13158, - "##eng": 13159, - "dwelling": 13160, - "hauled": 13161, - "sponsors": 13162, - "toledo": 13163, - "mega": 13164, - "ideology": 13165, - "theatres": 13166, - "tessa": 13167, - "cerambycidae": 13168, - "saves": 13169, - "turtle": 13170, - "cone": 13171, - "suspects": 13172, - "kara": 13173, - "rusty": 13174, - "yelling": 13175, - "greeks": 13176, - "mozart": 13177, - "shades": 13178, - "cocked": 13179, - "participant": 13180, - "##tro": 13181, - "shire": 13182, - "spit": 13183, - "freeze": 13184, - "necessity": 13185, - "##cos": 13186, - "inmates": 13187, - "nielsen": 13188, - "councillors": 13189, - "loaned": 13190, - "uncommon": 13191, - "omar": 13192, - "peasants": 13193, - "botanical": 13194, - "offspring": 13195, - "daniels": 13196, - "formations": 13197, - "jokes": 13198, - "1794": 13199, - "pioneers": 13200, - "sigma": 13201, - "licensing": 13202, - "##sus": 13203, - "wheelchair": 13204, - "polite": 13205, - "1807": 13206, - "liquor": 13207, - "pratt": 13208, - "trustee": 13209, - "##uta": 13210, - "forewings": 13211, - "balloon": 13212, - "##zz": 13213, - "kilometre": 13214, - "camping": 13215, - "explicit": 13216, - "casually": 13217, - "shawn": 13218, - "foolish": 13219, - "teammates": 13220, - "nm": 13221, - "hassan": 13222, - "carrie": 13223, - "judged": 13224, - "satisfy": 13225, - "vanessa": 13226, - "knives": 13227, - "selective": 13228, - "cnn": 13229, - "flowed": 13230, - "##lice": 13231, - "eclipse": 13232, - "stressed": 13233, - "eliza": 13234, - "mathematician": 13235, - "cease": 13236, - "cultivated": 13237, - "##roy": 13238, - "commissions": 13239, - "browns": 13240, - "##ania": 13241, - "destroyers": 13242, - "sheridan": 13243, - "meadow": 13244, - "##rius": 13245, - "minerals": 13246, - "##cial": 13247, - "downstream": 13248, - "clash": 13249, - "gram": 13250, - "memoirs": 13251, - "ventures": 13252, - "baha": 13253, - "seymour": 13254, - "archie": 13255, - "midlands": 13256, - "edith": 13257, - "fare": 13258, - "flynn": 13259, - "invite": 13260, - "canceled": 13261, - "tiles": 13262, - "stabbed": 13263, - "boulder": 13264, - "incorporate": 13265, - "amended": 13266, - "camden": 13267, - "facial": 13268, - "mollusk": 13269, - "unreleased": 13270, - "descriptions": 13271, - "yoga": 13272, - "grabs": 13273, - "550": 13274, - "raises": 13275, - "ramp": 13276, - "shiver": 13277, - "##rose": 13278, - "coined": 13279, - "pioneering": 13280, - "tunes": 13281, - "qing": 13282, - "warwick": 13283, - "tops": 13284, - "119": 13285, - "melanie": 13286, - "giles": 13287, - "##rous": 13288, - "wandered": 13289, - "##inal": 13290, - "annexed": 13291, - "nov": 13292, - "30th": 13293, - "unnamed": 13294, - "##ished": 13295, - "organizational": 13296, - "airplane": 13297, - "normandy": 13298, - "stoke": 13299, - "whistle": 13300, - "blessing": 13301, - "violations": 13302, - "chased": 13303, - "holders": 13304, - "shotgun": 13305, - "##ctic": 13306, - "outlet": 13307, - "reactor": 13308, - "##vik": 13309, - "tires": 13310, - "tearing": 13311, - "shores": 13312, - "fortified": 13313, - "mascot": 13314, - "constituencies": 13315, - "nc": 13316, - "columnist": 13317, - "productive": 13318, - "tibet": 13319, - "##rta": 13320, - "lineage": 13321, - "hooked": 13322, - "oct": 13323, - "tapes": 13324, - "judging": 13325, - "cody": 13326, - "##gger": 13327, - "hansen": 13328, - "kashmir": 13329, - "triggered": 13330, - "##eva": 13331, - "solved": 13332, - "cliffs": 13333, - "##tree": 13334, - "resisted": 13335, - "anatomy": 13336, - "protesters": 13337, - "transparent": 13338, - "implied": 13339, - "##iga": 13340, - "injection": 13341, - "mattress": 13342, - "excluding": 13343, - "##mbo": 13344, - "defenses": 13345, - "helpless": 13346, - "devotion": 13347, - "##elli": 13348, - "growl": 13349, - "liberals": 13350, - "weber": 13351, - "phenomena": 13352, - "atoms": 13353, - "plug": 13354, - "##iff": 13355, - "mortality": 13356, - "apprentice": 13357, - "howe": 13358, - "convincing": 13359, - "aaa": 13360, - "swimmer": 13361, - "barber": 13362, - "leone": 13363, - "promptly": 13364, - "sodium": 13365, - "def": 13366, - "nowadays": 13367, - "arise": 13368, - "##oning": 13369, - "gloucester": 13370, - "corrected": 13371, - "dignity": 13372, - "norm": 13373, - "erie": 13374, - "##ders": 13375, - "elders": 13376, - "evacuated": 13377, - "sylvia": 13378, - "compression": 13379, - "##yar": 13380, - "hartford": 13381, - "pose": 13382, - "backpack": 13383, - "reasoning": 13384, - "accepts": 13385, - "24th": 13386, - "wipe": 13387, - "millimetres": 13388, - "marcel": 13389, - "##oda": 13390, - "dodgers": 13391, - "albion": 13392, - "1790": 13393, - "overwhelmed": 13394, - "aerospace": 13395, - "oaks": 13396, - "1795": 13397, - "showcase": 13398, - "acknowledge": 13399, - "recovering": 13400, - "nolan": 13401, - "ashe": 13402, - "hurts": 13403, - "geology": 13404, - "fashioned": 13405, - "disappearance": 13406, - "farewell": 13407, - "swollen": 13408, - "shrug": 13409, - "marquis": 13410, - "wimbledon": 13411, - "124": 13412, - "rue": 13413, - "1792": 13414, - "commemorate": 13415, - "reduces": 13416, - "experiencing": 13417, - "inevitable": 13418, - "calcutta": 13419, - "intel": 13420, - "##court": 13421, - "murderer": 13422, - "sticking": 13423, - "fisheries": 13424, - "imagery": 13425, - "bloom": 13426, - "280": 13427, - "brake": 13428, - "##inus": 13429, - "gustav": 13430, - "hesitation": 13431, - "memorable": 13432, - "po": 13433, - "viral": 13434, - "beans": 13435, - "accidents": 13436, - "tunisia": 13437, - "antenna": 13438, - "spilled": 13439, - "consort": 13440, - "treatments": 13441, - "aye": 13442, - "perimeter": 13443, - "##gard": 13444, - "donation": 13445, - "hostage": 13446, - "migrated": 13447, - "banker": 13448, - "addiction": 13449, - "apex": 13450, - "lil": 13451, - "trout": 13452, - "##ously": 13453, - "conscience": 13454, - "##nova": 13455, - "rams": 13456, - "sands": 13457, - "genome": 13458, - "passionate": 13459, - "troubles": 13460, - "##lets": 13461, - "##set": 13462, - "amid": 13463, - "##ibility": 13464, - "##ret": 13465, - "higgins": 13466, - "exceed": 13467, - "vikings": 13468, - "##vie": 13469, - "payne": 13470, - "##zan": 13471, - "muscular": 13472, - "##ste": 13473, - "defendant": 13474, - "sucking": 13475, - "##wal": 13476, - "ibrahim": 13477, - "fuselage": 13478, - "claudia": 13479, - "vfl": 13480, - "europeans": 13481, - "snails": 13482, - "interval": 13483, - "##garh": 13484, - "preparatory": 13485, - "statewide": 13486, - "tasked": 13487, - "lacrosse": 13488, - "viktor": 13489, - "##lation": 13490, - "angola": 13491, - "##hra": 13492, - "flint": 13493, - "implications": 13494, - "employs": 13495, - "teens": 13496, - "patrons": 13497, - "stall": 13498, - "weekends": 13499, - "barriers": 13500, - "scrambled": 13501, - "nucleus": 13502, - "tehran": 13503, - "jenna": 13504, - "parsons": 13505, - "lifelong": 13506, - "robots": 13507, - "displacement": 13508, - "5000": 13509, - "##bles": 13510, - "precipitation": 13511, - "##gt": 13512, - "knuckles": 13513, - "clutched": 13514, - "1802": 13515, - "marrying": 13516, - "ecology": 13517, - "marx": 13518, - "accusations": 13519, - "declare": 13520, - "scars": 13521, - "kolkata": 13522, - "mat": 13523, - "meadows": 13524, - "bermuda": 13525, - "skeleton": 13526, - "finalists": 13527, - "vintage": 13528, - "crawl": 13529, - "coordinate": 13530, - "affects": 13531, - "subjected": 13532, - "orchestral": 13533, - "mistaken": 13534, - "##tc": 13535, - "mirrors": 13536, - "dipped": 13537, - "relied": 13538, - "260": 13539, - "arches": 13540, - "candle": 13541, - "##nick": 13542, - "incorporating": 13543, - "wildly": 13544, - "fond": 13545, - "basilica": 13546, - "owl": 13547, - "fringe": 13548, - "rituals": 13549, - "whispering": 13550, - "stirred": 13551, - "feud": 13552, - "tertiary": 13553, - "slick": 13554, - "goat": 13555, - "honorable": 13556, - "whereby": 13557, - "skip": 13558, - "ricardo": 13559, - "stripes": 13560, - "parachute": 13561, - "adjoining": 13562, - "submerged": 13563, - "synthesizer": 13564, - "##gren": 13565, - "intend": 13566, - "positively": 13567, - "ninety": 13568, - "phi": 13569, - "beaver": 13570, - "partition": 13571, - "fellows": 13572, - "alexis": 13573, - "prohibition": 13574, - "carlisle": 13575, - "bizarre": 13576, - "fraternity": 13577, - "##bre": 13578, - "doubts": 13579, - "icy": 13580, - "cbc": 13581, - "aquatic": 13582, - "sneak": 13583, - "sonny": 13584, - "combines": 13585, - "airports": 13586, - "crude": 13587, - "supervised": 13588, - "spatial": 13589, - "merge": 13590, - "alfonso": 13591, - "##bic": 13592, - "corrupt": 13593, - "scan": 13594, - "undergo": 13595, - "##ams": 13596, - "disabilities": 13597, - "colombian": 13598, - "comparing": 13599, - "dolphins": 13600, - "perkins": 13601, - "##lish": 13602, - "reprinted": 13603, - "unanimous": 13604, - "bounced": 13605, - "hairs": 13606, - "underworld": 13607, - "midwest": 13608, - "semester": 13609, - "bucket": 13610, - "paperback": 13611, - "miniseries": 13612, - "coventry": 13613, - "demise": 13614, - "##leigh": 13615, - "demonstrations": 13616, - "sensor": 13617, - "rotating": 13618, - "yan": 13619, - "##hler": 13620, - "arrange": 13621, - "soils": 13622, - "##idge": 13623, - "hyderabad": 13624, - "labs": 13625, - "##dr": 13626, - "brakes": 13627, - "grandchildren": 13628, - "##nde": 13629, - "negotiated": 13630, - "rover": 13631, - "ferrari": 13632, - "continuation": 13633, - "directorate": 13634, - "augusta": 13635, - "stevenson": 13636, - "counterpart": 13637, - "gore": 13638, - "##rda": 13639, - "nursery": 13640, - "rican": 13641, - "ave": 13642, - "collectively": 13643, - "broadly": 13644, - "pastoral": 13645, - "repertoire": 13646, - "asserted": 13647, - "discovering": 13648, - "nordic": 13649, - "styled": 13650, - "fiba": 13651, - "cunningham": 13652, - "harley": 13653, - "middlesex": 13654, - "survives": 13655, - "tumor": 13656, - "tempo": 13657, - "zack": 13658, - "aiming": 13659, - "lok": 13660, - "urgent": 13661, - "##rade": 13662, - "##nto": 13663, - "devils": 13664, - "##ement": 13665, - "contractor": 13666, - "turin": 13667, - "##wl": 13668, - "##ool": 13669, - "bliss": 13670, - "repaired": 13671, - "simmons": 13672, - "moan": 13673, - "astronomical": 13674, - "cr": 13675, - "negotiate": 13676, - "lyric": 13677, - "1890s": 13678, - "lara": 13679, - "bred": 13680, - "clad": 13681, - "angus": 13682, - "pbs": 13683, - "##ience": 13684, - "engineered": 13685, - "posed": 13686, - "##lk": 13687, - "hernandez": 13688, - "possessions": 13689, - "elbows": 13690, - "psychiatric": 13691, - "strokes": 13692, - "confluence": 13693, - "electorate": 13694, - "lifts": 13695, - "campuses": 13696, - "lava": 13697, - "alps": 13698, - "##ep": 13699, - "##ution": 13700, - "##date": 13701, - "physicist": 13702, - "woody": 13703, - "##page": 13704, - "##ographic": 13705, - "##itis": 13706, - "juliet": 13707, - "reformation": 13708, - "sparhawk": 13709, - "320": 13710, - "complement": 13711, - "suppressed": 13712, - "jewel": 13713, - "##½": 13714, - "floated": 13715, - "##kas": 13716, - "continuity": 13717, - "sadly": 13718, - "##ische": 13719, - "inability": 13720, - "melting": 13721, - "scanning": 13722, - "paula": 13723, - "flour": 13724, - "judaism": 13725, - "safer": 13726, - "vague": 13727, - "##lm": 13728, - "solving": 13729, - "curb": 13730, - "##stown": 13731, - "financially": 13732, - "gable": 13733, - "bees": 13734, - "expired": 13735, - "miserable": 13736, - "cassidy": 13737, - "dominion": 13738, - "1789": 13739, - "cupped": 13740, - "145": 13741, - "robbery": 13742, - "facto": 13743, - "amos": 13744, - "warden": 13745, - "resume": 13746, - "tallest": 13747, - "marvin": 13748, - "ing": 13749, - "pounded": 13750, - "usd": 13751, - "declaring": 13752, - "gasoline": 13753, - "##aux": 13754, - "darkened": 13755, - "270": 13756, - "650": 13757, - "sophomore": 13758, - "##mere": 13759, - "erection": 13760, - "gossip": 13761, - "televised": 13762, - "risen": 13763, - "dial": 13764, - "##eu": 13765, - "pillars": 13766, - "##link": 13767, - "passages": 13768, - "profound": 13769, - "##tina": 13770, - "arabian": 13771, - "ashton": 13772, - "silicon": 13773, - "nail": 13774, - "##ead": 13775, - "##lated": 13776, - "##wer": 13777, - "##hardt": 13778, - "fleming": 13779, - "firearms": 13780, - "ducked": 13781, - "circuits": 13782, - "blows": 13783, - "waterloo": 13784, - "titans": 13785, - "##lina": 13786, - "atom": 13787, - "fireplace": 13788, - "cheshire": 13789, - "financed": 13790, - "activation": 13791, - "algorithms": 13792, - "##zzi": 13793, - "constituent": 13794, - "catcher": 13795, - "cherokee": 13796, - "partnerships": 13797, - "sexuality": 13798, - "platoon": 13799, - "tragic": 13800, - "vivian": 13801, - "guarded": 13802, - "whiskey": 13803, - "meditation": 13804, - "poetic": 13805, - "##late": 13806, - "##nga": 13807, - "##ake": 13808, - "porto": 13809, - "listeners": 13810, - "dominance": 13811, - "kendra": 13812, - "mona": 13813, - "chandler": 13814, - "factions": 13815, - "22nd": 13816, - "salisbury": 13817, - "attitudes": 13818, - "derivative": 13819, - "##ido": 13820, - "##haus": 13821, - "intake": 13822, - "paced": 13823, - "javier": 13824, - "illustrator": 13825, - "barrels": 13826, - "bias": 13827, - "cockpit": 13828, - "burnett": 13829, - "dreamed": 13830, - "ensuing": 13831, - "##anda": 13832, - "receptors": 13833, - "someday": 13834, - "hawkins": 13835, - "mattered": 13836, - "##lal": 13837, - "slavic": 13838, - "1799": 13839, - "jesuit": 13840, - "cameroon": 13841, - "wasted": 13842, - "tai": 13843, - "wax": 13844, - "lowering": 13845, - "victorious": 13846, - "freaking": 13847, - "outright": 13848, - "hancock": 13849, - "librarian": 13850, - "sensing": 13851, - "bald": 13852, - "calcium": 13853, - "myers": 13854, - "tablet": 13855, - "announcing": 13856, - "barack": 13857, - "shipyard": 13858, - "pharmaceutical": 13859, - "##uan": 13860, - "greenwich": 13861, - "flush": 13862, - "medley": 13863, - "patches": 13864, - "wolfgang": 13865, - "pt": 13866, - "speeches": 13867, - "acquiring": 13868, - "exams": 13869, - "nikolai": 13870, - "##gg": 13871, - "hayden": 13872, - "kannada": 13873, - "##type": 13874, - "reilly": 13875, - "##pt": 13876, - "waitress": 13877, - "abdomen": 13878, - "devastated": 13879, - "capped": 13880, - "pseudonym": 13881, - "pharmacy": 13882, - "fulfill": 13883, - "paraguay": 13884, - "1796": 13885, - "clicked": 13886, - "##trom": 13887, - "archipelago": 13888, - "syndicated": 13889, - "##hman": 13890, - "lumber": 13891, - "orgasm": 13892, - "rejection": 13893, - "clifford": 13894, - "lorraine": 13895, - "advent": 13896, - "mafia": 13897, - "rodney": 13898, - "brock": 13899, - "##ght": 13900, - "##used": 13901, - "##elia": 13902, - "cassette": 13903, - "chamberlain": 13904, - "despair": 13905, - "mongolia": 13906, - "sensors": 13907, - "developmental": 13908, - "upstream": 13909, - "##eg": 13910, - "##alis": 13911, - "spanning": 13912, - "165": 13913, - "trombone": 13914, - "basque": 13915, - "seeded": 13916, - "interred": 13917, - "renewable": 13918, - "rhys": 13919, - "leapt": 13920, - "revision": 13921, - "molecule": 13922, - "##ages": 13923, - "chord": 13924, - "vicious": 13925, - "nord": 13926, - "shivered": 13927, - "23rd": 13928, - "arlington": 13929, - "debts": 13930, - "corpus": 13931, - "sunrise": 13932, - "bays": 13933, - "blackburn": 13934, - "centimetres": 13935, - "##uded": 13936, - "shuddered": 13937, - "gm": 13938, - "strangely": 13939, - "gripping": 13940, - "cartoons": 13941, - "isabelle": 13942, - "orbital": 13943, - "##ppa": 13944, - "seals": 13945, - "proving": 13946, - "##lton": 13947, - "refusal": 13948, - "strengthened": 13949, - "bust": 13950, - "assisting": 13951, - "baghdad": 13952, - "batsman": 13953, - "portrayal": 13954, - "mara": 13955, - "pushes": 13956, - "spears": 13957, - "og": 13958, - "##cock": 13959, - "reside": 13960, - "nathaniel": 13961, - "brennan": 13962, - "1776": 13963, - "confirmation": 13964, - "caucus": 13965, - "##worthy": 13966, - "markings": 13967, - "yemen": 13968, - "nobles": 13969, - "ku": 13970, - "lazy": 13971, - "viewer": 13972, - "catalan": 13973, - "encompasses": 13974, - "sawyer": 13975, - "##fall": 13976, - "sparked": 13977, - "substances": 13978, - "patents": 13979, - "braves": 13980, - "arranger": 13981, - "evacuation": 13982, - "sergio": 13983, - "persuade": 13984, - "dover": 13985, - "tolerance": 13986, - "penguin": 13987, - "cum": 13988, - "jockey": 13989, - "insufficient": 13990, - "townships": 13991, - "occupying": 13992, - "declining": 13993, - "plural": 13994, - "processed": 13995, - "projection": 13996, - "puppet": 13997, - "flanders": 13998, - "introduces": 13999, - "liability": 14000, - "##yon": 14001, - "gymnastics": 14002, - "antwerp": 14003, - "taipei": 14004, - "hobart": 14005, - "candles": 14006, - "jeep": 14007, - "wes": 14008, - "observers": 14009, - "126": 14010, - "chaplain": 14011, - "bundle": 14012, - "glorious": 14013, - "##hine": 14014, - "hazel": 14015, - "flung": 14016, - "sol": 14017, - "excavations": 14018, - "dumped": 14019, - "stares": 14020, - "sh": 14021, - "bangalore": 14022, - "triangular": 14023, - "icelandic": 14024, - "intervals": 14025, - "expressing": 14026, - "turbine": 14027, - "##vers": 14028, - "songwriting": 14029, - "crafts": 14030, - "##igo": 14031, - "jasmine": 14032, - "ditch": 14033, - "rite": 14034, - "##ways": 14035, - "entertaining": 14036, - "comply": 14037, - "sorrow": 14038, - "wrestlers": 14039, - "basel": 14040, - "emirates": 14041, - "marian": 14042, - "rivera": 14043, - "helpful": 14044, - "##some": 14045, - "caution": 14046, - "downward": 14047, - "networking": 14048, - "##atory": 14049, - "##tered": 14050, - "darted": 14051, - "genocide": 14052, - "emergence": 14053, - "replies": 14054, - "specializing": 14055, - "spokesman": 14056, - "convenient": 14057, - "unlocked": 14058, - "fading": 14059, - "augustine": 14060, - "concentrations": 14061, - "resemblance": 14062, - "elijah": 14063, - "investigator": 14064, - "andhra": 14065, - "##uda": 14066, - "promotes": 14067, - "bean": 14068, - "##rrell": 14069, - "fleeing": 14070, - "wan": 14071, - "simone": 14072, - "announcer": 14073, - "##ame": 14074, - "##bby": 14075, - "lydia": 14076, - "weaver": 14077, - "132": 14078, - "residency": 14079, - "modification": 14080, - "##fest": 14081, - "stretches": 14082, - "##ast": 14083, - "alternatively": 14084, - "nat": 14085, - "lowe": 14086, - "lacks": 14087, - "##ented": 14088, - "pam": 14089, - "tile": 14090, - "concealed": 14091, - "inferior": 14092, - "abdullah": 14093, - "residences": 14094, - "tissues": 14095, - "vengeance": 14096, - "##ided": 14097, - "moisture": 14098, - "peculiar": 14099, - "groove": 14100, - "zip": 14101, - "bologna": 14102, - "jennings": 14103, - "ninja": 14104, - "oversaw": 14105, - "zombies": 14106, - "pumping": 14107, - "batch": 14108, - "livingston": 14109, - "emerald": 14110, - "installations": 14111, - "1797": 14112, - "peel": 14113, - "nitrogen": 14114, - "rama": 14115, - "##fying": 14116, - "##star": 14117, - "schooling": 14118, - "strands": 14119, - "responding": 14120, - "werner": 14121, - "##ost": 14122, - "lime": 14123, - "casa": 14124, - "accurately": 14125, - "targeting": 14126, - "##rod": 14127, - "underway": 14128, - "##uru": 14129, - "hemisphere": 14130, - "lester": 14131, - "##yard": 14132, - "occupies": 14133, - "2d": 14134, - "griffith": 14135, - "angrily": 14136, - "reorganized": 14137, - "##owing": 14138, - "courtney": 14139, - "deposited": 14140, - "##dd": 14141, - "##30": 14142, - "estadio": 14143, - "##ifies": 14144, - "dunn": 14145, - "exiled": 14146, - "##ying": 14147, - "checks": 14148, - "##combe": 14149, - "##о": 14150, - "##fly": 14151, - "successes": 14152, - "unexpectedly": 14153, - "blu": 14154, - "assessed": 14155, - "##flower": 14156, - "##ه": 14157, - "observing": 14158, - "sacked": 14159, - "spiders": 14160, - "kn": 14161, - "##tail": 14162, - "mu": 14163, - "nodes": 14164, - "prosperity": 14165, - "audrey": 14166, - "divisional": 14167, - "155": 14168, - "broncos": 14169, - "tangled": 14170, - "adjust": 14171, - "feeds": 14172, - "erosion": 14173, - "paolo": 14174, - "surf": 14175, - "directory": 14176, - "snatched": 14177, - "humid": 14178, - "admiralty": 14179, - "screwed": 14180, - "gt": 14181, - "reddish": 14182, - "##nese": 14183, - "modules": 14184, - "trench": 14185, - "lamps": 14186, - "bind": 14187, - "leah": 14188, - "bucks": 14189, - "competes": 14190, - "##nz": 14191, - "##form": 14192, - "transcription": 14193, - "##uc": 14194, - "isles": 14195, - "violently": 14196, - "clutching": 14197, - "pga": 14198, - "cyclist": 14199, - "inflation": 14200, - "flats": 14201, - "ragged": 14202, - "unnecessary": 14203, - "##hian": 14204, - "stubborn": 14205, - "coordinated": 14206, - "harriet": 14207, - "baba": 14208, - "disqualified": 14209, - "330": 14210, - "insect": 14211, - "wolfe": 14212, - "##fies": 14213, - "reinforcements": 14214, - "rocked": 14215, - "duel": 14216, - "winked": 14217, - "embraced": 14218, - "bricks": 14219, - "##raj": 14220, - "hiatus": 14221, - "defeats": 14222, - "pending": 14223, - "brightly": 14224, - "jealousy": 14225, - "##xton": 14226, - "##hm": 14227, - "##uki": 14228, - "lena": 14229, - "gdp": 14230, - "colorful": 14231, - "##dley": 14232, - "stein": 14233, - "kidney": 14234, - "##shu": 14235, - "underwear": 14236, - "wanderers": 14237, - "##haw": 14238, - "##icus": 14239, - "guardians": 14240, - "m³": 14241, - "roared": 14242, - "habits": 14243, - "##wise": 14244, - "permits": 14245, - "gp": 14246, - "uranium": 14247, - "punished": 14248, - "disguise": 14249, - "bundesliga": 14250, - "elise": 14251, - "dundee": 14252, - "erotic": 14253, - "partisan": 14254, - "pi": 14255, - "collectors": 14256, - "float": 14257, - "individually": 14258, - "rendering": 14259, - "behavioral": 14260, - "bucharest": 14261, - "ser": 14262, - "hare": 14263, - "valerie": 14264, - "corporal": 14265, - "nutrition": 14266, - "proportional": 14267, - "##isa": 14268, - "immense": 14269, - "##kis": 14270, - "pavement": 14271, - "##zie": 14272, - "##eld": 14273, - "sutherland": 14274, - "crouched": 14275, - "1775": 14276, - "##lp": 14277, - "suzuki": 14278, - "trades": 14279, - "endurance": 14280, - "operas": 14281, - "crosby": 14282, - "prayed": 14283, - "priory": 14284, - "rory": 14285, - "socially": 14286, - "##urn": 14287, - "gujarat": 14288, - "##pu": 14289, - "walton": 14290, - "cube": 14291, - "pasha": 14292, - "privilege": 14293, - "lennon": 14294, - "floods": 14295, - "thorne": 14296, - "waterfall": 14297, - "nipple": 14298, - "scouting": 14299, - "approve": 14300, - "##lov": 14301, - "minorities": 14302, - "voter": 14303, - "dwight": 14304, - "extensions": 14305, - "assure": 14306, - "ballroom": 14307, - "slap": 14308, - "dripping": 14309, - "privileges": 14310, - "rejoined": 14311, - "confessed": 14312, - "demonstrating": 14313, - "patriotic": 14314, - "yell": 14315, - "investor": 14316, - "##uth": 14317, - "pagan": 14318, - "slumped": 14319, - "squares": 14320, - "##cle": 14321, - "##kins": 14322, - "confront": 14323, - "bert": 14324, - "embarrassment": 14325, - "##aid": 14326, - "aston": 14327, - "urging": 14328, - "sweater": 14329, - "starr": 14330, - "yuri": 14331, - "brains": 14332, - "williamson": 14333, - "commuter": 14334, - "mortar": 14335, - "structured": 14336, - "selfish": 14337, - "exports": 14338, - "##jon": 14339, - "cds": 14340, - "##him": 14341, - "unfinished": 14342, - "##rre": 14343, - "mortgage": 14344, - "destinations": 14345, - "##nagar": 14346, - "canoe": 14347, - "solitary": 14348, - "buchanan": 14349, - "delays": 14350, - "magistrate": 14351, - "fk": 14352, - "##pling": 14353, - "motivation": 14354, - "##lier": 14355, - "##vier": 14356, - "recruiting": 14357, - "assess": 14358, - "##mouth": 14359, - "malik": 14360, - "antique": 14361, - "1791": 14362, - "pius": 14363, - "rahman": 14364, - "reich": 14365, - "tub": 14366, - "zhou": 14367, - "smashed": 14368, - "airs": 14369, - "galway": 14370, - "xii": 14371, - "conditioning": 14372, - "honduras": 14373, - "discharged": 14374, - "dexter": 14375, - "##pf": 14376, - "lionel": 14377, - "129": 14378, - "debates": 14379, - "lemon": 14380, - "tiffany": 14381, - "volunteered": 14382, - "dom": 14383, - "dioxide": 14384, - "procession": 14385, - "devi": 14386, - "sic": 14387, - "tremendous": 14388, - "advertisements": 14389, - "colts": 14390, - "transferring": 14391, - "verdict": 14392, - "hanover": 14393, - "decommissioned": 14394, - "utter": 14395, - "relate": 14396, - "pac": 14397, - "racism": 14398, - "##top": 14399, - "beacon": 14400, - "limp": 14401, - "similarity": 14402, - "terra": 14403, - "occurrence": 14404, - "ant": 14405, - "##how": 14406, - "becky": 14407, - "capt": 14408, - "updates": 14409, - "armament": 14410, - "richie": 14411, - "pal": 14412, - "##graph": 14413, - "halloween": 14414, - "mayo": 14415, - "##ssen": 14416, - "##bone": 14417, - "cara": 14418, - "serena": 14419, - "fcc": 14420, - "dolls": 14421, - "obligations": 14422, - "##dling": 14423, - "violated": 14424, - "lafayette": 14425, - "jakarta": 14426, - "exploitation": 14427, - "##ime": 14428, - "infamous": 14429, - "iconic": 14430, - "##lah": 14431, - "##park": 14432, - "kitty": 14433, - "moody": 14434, - "reginald": 14435, - "dread": 14436, - "spill": 14437, - "crystals": 14438, - "olivier": 14439, - "modeled": 14440, - "bluff": 14441, - "equilibrium": 14442, - "separating": 14443, - "notices": 14444, - "ordnance": 14445, - "extinction": 14446, - "onset": 14447, - "cosmic": 14448, - "attachment": 14449, - "sammy": 14450, - "expose": 14451, - "privy": 14452, - "anchored": 14453, - "##bil": 14454, - "abbott": 14455, - "admits": 14456, - "bending": 14457, - "baritone": 14458, - "emmanuel": 14459, - "policeman": 14460, - "vaughan": 14461, - "winged": 14462, - "climax": 14463, - "dresses": 14464, - "denny": 14465, - "polytechnic": 14466, - "mohamed": 14467, - "burmese": 14468, - "authentic": 14469, - "nikki": 14470, - "genetics": 14471, - "grandparents": 14472, - "homestead": 14473, - "gaza": 14474, - "postponed": 14475, - "metacritic": 14476, - "una": 14477, - "##sby": 14478, - "##bat": 14479, - "unstable": 14480, - "dissertation": 14481, - "##rial": 14482, - "##cian": 14483, - "curls": 14484, - "obscure": 14485, - "uncovered": 14486, - "bronx": 14487, - "praying": 14488, - "disappearing": 14489, - "##hoe": 14490, - "prehistoric": 14491, - "coke": 14492, - "turret": 14493, - "mutations": 14494, - "nonprofit": 14495, - "pits": 14496, - "monaco": 14497, - "##ي": 14498, - "##usion": 14499, - "prominently": 14500, - "dispatched": 14501, - "podium": 14502, - "##mir": 14503, - "uci": 14504, - "##uation": 14505, - "133": 14506, - "fortifications": 14507, - "birthplace": 14508, - "kendall": 14509, - "##lby": 14510, - "##oll": 14511, - "preacher": 14512, - "rack": 14513, - "goodman": 14514, - "##rman": 14515, - "persistent": 14516, - "##ott": 14517, - "countless": 14518, - "jaime": 14519, - "recorder": 14520, - "lexington": 14521, - "persecution": 14522, - "jumps": 14523, - "renewal": 14524, - "wagons": 14525, - "##11": 14526, - "crushing": 14527, - "##holder": 14528, - "decorations": 14529, - "##lake": 14530, - "abundance": 14531, - "wrath": 14532, - "laundry": 14533, - "£1": 14534, - "garde": 14535, - "##rp": 14536, - "jeanne": 14537, - "beetles": 14538, - "peasant": 14539, - "##sl": 14540, - "splitting": 14541, - "caste": 14542, - "sergei": 14543, - "##rer": 14544, - "##ema": 14545, - "scripts": 14546, - "##ively": 14547, - "rub": 14548, - "satellites": 14549, - "##vor": 14550, - "inscribed": 14551, - "verlag": 14552, - "scrapped": 14553, - "gale": 14554, - "packages": 14555, - "chick": 14556, - "potato": 14557, - "slogan": 14558, - "kathleen": 14559, - "arabs": 14560, - "##culture": 14561, - "counterparts": 14562, - "reminiscent": 14563, - "choral": 14564, - "##tead": 14565, - "rand": 14566, - "retains": 14567, - "bushes": 14568, - "dane": 14569, - "accomplish": 14570, - "courtesy": 14571, - "closes": 14572, - "##oth": 14573, - "slaughter": 14574, - "hague": 14575, - "krakow": 14576, - "lawson": 14577, - "tailed": 14578, - "elias": 14579, - "ginger": 14580, - "##ttes": 14581, - "canopy": 14582, - "betrayal": 14583, - "rebuilding": 14584, - "turf": 14585, - "##hof": 14586, - "frowning": 14587, - "allegiance": 14588, - "brigades": 14589, - "kicks": 14590, - "rebuild": 14591, - "polls": 14592, - "alias": 14593, - "nationalism": 14594, - "td": 14595, - "rowan": 14596, - "audition": 14597, - "bowie": 14598, - "fortunately": 14599, - "recognizes": 14600, - "harp": 14601, - "dillon": 14602, - "horrified": 14603, - "##oro": 14604, - "renault": 14605, - "##tics": 14606, - "ropes": 14607, - "##α": 14608, - "presumed": 14609, - "rewarded": 14610, - "infrared": 14611, - "wiping": 14612, - "accelerated": 14613, - "illustration": 14614, - "##rid": 14615, - "presses": 14616, - "practitioners": 14617, - "badminton": 14618, - "##iard": 14619, - "detained": 14620, - "##tera": 14621, - "recognizing": 14622, - "relates": 14623, - "misery": 14624, - "##sies": 14625, - "##tly": 14626, - "reproduction": 14627, - "piercing": 14628, - "potatoes": 14629, - "thornton": 14630, - "esther": 14631, - "manners": 14632, - "hbo": 14633, - "##aan": 14634, - "ours": 14635, - "bullshit": 14636, - "ernie": 14637, - "perennial": 14638, - "sensitivity": 14639, - "illuminated": 14640, - "rupert": 14641, - "##jin": 14642, - "##iss": 14643, - "##ear": 14644, - "rfc": 14645, - "nassau": 14646, - "##dock": 14647, - "staggered": 14648, - "socialism": 14649, - "##haven": 14650, - "appointments": 14651, - "nonsense": 14652, - "prestige": 14653, - "sharma": 14654, - "haul": 14655, - "##tical": 14656, - "solidarity": 14657, - "gps": 14658, - "##ook": 14659, - "##rata": 14660, - "igor": 14661, - "pedestrian": 14662, - "##uit": 14663, - "baxter": 14664, - "tenants": 14665, - "wires": 14666, - "medication": 14667, - "unlimited": 14668, - "guiding": 14669, - "impacts": 14670, - "diabetes": 14671, - "##rama": 14672, - "sasha": 14673, - "pas": 14674, - "clive": 14675, - "extraction": 14676, - "131": 14677, - "continually": 14678, - "constraints": 14679, - "##bilities": 14680, - "sonata": 14681, - "hunted": 14682, - "sixteenth": 14683, - "chu": 14684, - "planting": 14685, - "quote": 14686, - "mayer": 14687, - "pretended": 14688, - "abs": 14689, - "spat": 14690, - "##hua": 14691, - "ceramic": 14692, - "##cci": 14693, - "curtains": 14694, - "pigs": 14695, - "pitching": 14696, - "##dad": 14697, - "latvian": 14698, - "sore": 14699, - "dayton": 14700, - "##sted": 14701, - "##qi": 14702, - "patrols": 14703, - "slice": 14704, - "playground": 14705, - "##nted": 14706, - "shone": 14707, - "stool": 14708, - "apparatus": 14709, - "inadequate": 14710, - "mates": 14711, - "treason": 14712, - "##ija": 14713, - "desires": 14714, - "##liga": 14715, - "##croft": 14716, - "somalia": 14717, - "laurent": 14718, - "mir": 14719, - "leonardo": 14720, - "oracle": 14721, - "grape": 14722, - "obliged": 14723, - "chevrolet": 14724, - "thirteenth": 14725, - "stunning": 14726, - "enthusiastic": 14727, - "##ede": 14728, - "accounted": 14729, - "concludes": 14730, - "currents": 14731, - "basil": 14732, - "##kovic": 14733, - "drought": 14734, - "##rica": 14735, - "mai": 14736, - "##aire": 14737, - "shove": 14738, - "posting": 14739, - "##shed": 14740, - "pilgrimage": 14741, - "humorous": 14742, - "packing": 14743, - "fry": 14744, - "pencil": 14745, - "wines": 14746, - "smells": 14747, - "144": 14748, - "marilyn": 14749, - "aching": 14750, - "newest": 14751, - "clung": 14752, - "bon": 14753, - "neighbours": 14754, - "sanctioned": 14755, - "##pie": 14756, - "mug": 14757, - "##stock": 14758, - "drowning": 14759, - "##mma": 14760, - "hydraulic": 14761, - "##vil": 14762, - "hiring": 14763, - "reminder": 14764, - "lilly": 14765, - "investigators": 14766, - "##ncies": 14767, - "sour": 14768, - "##eous": 14769, - "compulsory": 14770, - "packet": 14771, - "##rion": 14772, - "##graphic": 14773, - "##elle": 14774, - "cannes": 14775, - "##inate": 14776, - "depressed": 14777, - "##rit": 14778, - "heroic": 14779, - "importantly": 14780, - "theresa": 14781, - "##tled": 14782, - "conway": 14783, - "saturn": 14784, - "marginal": 14785, - "rae": 14786, - "##xia": 14787, - "corresponds": 14788, - "royce": 14789, - "pact": 14790, - "jasper": 14791, - "explosives": 14792, - "packaging": 14793, - "aluminium": 14794, - "##ttered": 14795, - "denotes": 14796, - "rhythmic": 14797, - "spans": 14798, - "assignments": 14799, - "hereditary": 14800, - "outlined": 14801, - "originating": 14802, - "sundays": 14803, - "lad": 14804, - "reissued": 14805, - "greeting": 14806, - "beatrice": 14807, - "##dic": 14808, - "pillar": 14809, - "marcos": 14810, - "plots": 14811, - "handbook": 14812, - "alcoholic": 14813, - "judiciary": 14814, - "avant": 14815, - "slides": 14816, - "extract": 14817, - "masculine": 14818, - "blur": 14819, - "##eum": 14820, - "##force": 14821, - "homage": 14822, - "trembled": 14823, - "owens": 14824, - "hymn": 14825, - "trey": 14826, - "omega": 14827, - "signaling": 14828, - "socks": 14829, - "accumulated": 14830, - "reacted": 14831, - "attic": 14832, - "theo": 14833, - "lining": 14834, - "angie": 14835, - "distraction": 14836, - "primera": 14837, - "talbot": 14838, - "##key": 14839, - "1200": 14840, - "ti": 14841, - "creativity": 14842, - "billed": 14843, - "##hey": 14844, - "deacon": 14845, - "eduardo": 14846, - "identifies": 14847, - "proposition": 14848, - "dizzy": 14849, - "gunner": 14850, - "hogan": 14851, - "##yam": 14852, - "##pping": 14853, - "##hol": 14854, - "ja": 14855, - "##chan": 14856, - "jensen": 14857, - "reconstructed": 14858, - "##berger": 14859, - "clearance": 14860, - "darius": 14861, - "##nier": 14862, - "abe": 14863, - "harlem": 14864, - "plea": 14865, - "dei": 14866, - "circled": 14867, - "emotionally": 14868, - "notation": 14869, - "fascist": 14870, - "neville": 14871, - "exceeded": 14872, - "upwards": 14873, - "viable": 14874, - "ducks": 14875, - "##fo": 14876, - "workforce": 14877, - "racer": 14878, - "limiting": 14879, - "shri": 14880, - "##lson": 14881, - "possesses": 14882, - "1600": 14883, - "kerr": 14884, - "moths": 14885, - "devastating": 14886, - "laden": 14887, - "disturbing": 14888, - "locking": 14889, - "##cture": 14890, - "gal": 14891, - "fearing": 14892, - "accreditation": 14893, - "flavor": 14894, - "aide": 14895, - "1870s": 14896, - "mountainous": 14897, - "##baum": 14898, - "melt": 14899, - "##ures": 14900, - "motel": 14901, - "texture": 14902, - "servers": 14903, - "soda": 14904, - "##mb": 14905, - "herd": 14906, - "##nium": 14907, - "erect": 14908, - "puzzled": 14909, - "hum": 14910, - "peggy": 14911, - "examinations": 14912, - "gould": 14913, - "testified": 14914, - "geoff": 14915, - "ren": 14916, - "devised": 14917, - "sacks": 14918, - "##law": 14919, - "denial": 14920, - "posters": 14921, - "grunted": 14922, - "cesar": 14923, - "tutor": 14924, - "ec": 14925, - "gerry": 14926, - "offerings": 14927, - "byrne": 14928, - "falcons": 14929, - "combinations": 14930, - "ct": 14931, - "incoming": 14932, - "pardon": 14933, - "rocking": 14934, - "26th": 14935, - "avengers": 14936, - "flared": 14937, - "mankind": 14938, - "seller": 14939, - "uttar": 14940, - "loch": 14941, - "nadia": 14942, - "stroking": 14943, - "exposing": 14944, - "##hd": 14945, - "fertile": 14946, - "ancestral": 14947, - "instituted": 14948, - "##has": 14949, - "noises": 14950, - "prophecy": 14951, - "taxation": 14952, - "eminent": 14953, - "vivid": 14954, - "pol": 14955, - "##bol": 14956, - "dart": 14957, - "indirect": 14958, - "multimedia": 14959, - "notebook": 14960, - "upside": 14961, - "displaying": 14962, - "adrenaline": 14963, - "referenced": 14964, - "geometric": 14965, - "##iving": 14966, - "progression": 14967, - "##ddy": 14968, - "blunt": 14969, - "announce": 14970, - "##far": 14971, - "implementing": 14972, - "##lav": 14973, - "aggression": 14974, - "liaison": 14975, - "cooler": 14976, - "cares": 14977, - "headache": 14978, - "plantations": 14979, - "gorge": 14980, - "dots": 14981, - "impulse": 14982, - "thickness": 14983, - "ashamed": 14984, - "averaging": 14985, - "kathy": 14986, - "obligation": 14987, - "precursor": 14988, - "137": 14989, - "fowler": 14990, - "symmetry": 14991, - "thee": 14992, - "225": 14993, - "hears": 14994, - "##rai": 14995, - "undergoing": 14996, - "ads": 14997, - "butcher": 14998, - "bowler": 14999, - "##lip": 15000, - "cigarettes": 15001, - "subscription": 15002, - "goodness": 15003, - "##ically": 15004, - "browne": 15005, - "##hos": 15006, - "##tech": 15007, - "kyoto": 15008, - "donor": 15009, - "##erty": 15010, - "damaging": 15011, - "friction": 15012, - "drifting": 15013, - "expeditions": 15014, - "hardened": 15015, - "prostitution": 15016, - "152": 15017, - "fauna": 15018, - "blankets": 15019, - "claw": 15020, - "tossing": 15021, - "snarled": 15022, - "butterflies": 15023, - "recruits": 15024, - "investigative": 15025, - "coated": 15026, - "healed": 15027, - "138": 15028, - "communal": 15029, - "hai": 15030, - "xiii": 15031, - "academics": 15032, - "boone": 15033, - "psychologist": 15034, - "restless": 15035, - "lahore": 15036, - "stephens": 15037, - "mba": 15038, - "brendan": 15039, - "foreigners": 15040, - "printer": 15041, - "##pc": 15042, - "ached": 15043, - "explode": 15044, - "27th": 15045, - "deed": 15046, - "scratched": 15047, - "dared": 15048, - "##pole": 15049, - "cardiac": 15050, - "1780": 15051, - "okinawa": 15052, - "proto": 15053, - "commando": 15054, - "compelled": 15055, - "oddly": 15056, - "electrons": 15057, - "##base": 15058, - "replica": 15059, - "thanksgiving": 15060, - "##rist": 15061, - "sheila": 15062, - "deliberate": 15063, - "stafford": 15064, - "tidal": 15065, - "representations": 15066, - "hercules": 15067, - "ou": 15068, - "##path": 15069, - "##iated": 15070, - "kidnapping": 15071, - "lenses": 15072, - "##tling": 15073, - "deficit": 15074, - "samoa": 15075, - "mouths": 15076, - "consuming": 15077, - "computational": 15078, - "maze": 15079, - "granting": 15080, - "smirk": 15081, - "razor": 15082, - "fixture": 15083, - "ideals": 15084, - "inviting": 15085, - "aiden": 15086, - "nominal": 15087, - "##vs": 15088, - "issuing": 15089, - "julio": 15090, - "pitt": 15091, - "ramsey": 15092, - "docks": 15093, - "##oss": 15094, - "exhaust": 15095, - "##owed": 15096, - "bavarian": 15097, - "draped": 15098, - "anterior": 15099, - "mating": 15100, - "ethiopian": 15101, - "explores": 15102, - "noticing": 15103, - "##nton": 15104, - "discarded": 15105, - "convenience": 15106, - "hoffman": 15107, - "endowment": 15108, - "beasts": 15109, - "cartridge": 15110, - "mormon": 15111, - "paternal": 15112, - "probe": 15113, - "sleeves": 15114, - "interfere": 15115, - "lump": 15116, - "deadline": 15117, - "##rail": 15118, - "jenks": 15119, - "bulldogs": 15120, - "scrap": 15121, - "alternating": 15122, - "justified": 15123, - "reproductive": 15124, - "nam": 15125, - "seize": 15126, - "descending": 15127, - "secretariat": 15128, - "kirby": 15129, - "coupe": 15130, - "grouped": 15131, - "smash": 15132, - "panther": 15133, - "sedan": 15134, - "tapping": 15135, - "##18": 15136, - "lola": 15137, - "cheer": 15138, - "germanic": 15139, - "unfortunate": 15140, - "##eter": 15141, - "unrelated": 15142, - "##fan": 15143, - "subordinate": 15144, - "##sdale": 15145, - "suzanne": 15146, - "advertisement": 15147, - "##ility": 15148, - "horsepower": 15149, - "##lda": 15150, - "cautiously": 15151, - "discourse": 15152, - "luigi": 15153, - "##mans": 15154, - "##fields": 15155, - "noun": 15156, - "prevalent": 15157, - "mao": 15158, - "schneider": 15159, - "everett": 15160, - "surround": 15161, - "governorate": 15162, - "kira": 15163, - "##avia": 15164, - "westward": 15165, - "##take": 15166, - "misty": 15167, - "rails": 15168, - "sustainability": 15169, - "134": 15170, - "unused": 15171, - "##rating": 15172, - "packs": 15173, - "toast": 15174, - "unwilling": 15175, - "regulate": 15176, - "thy": 15177, - "suffrage": 15178, - "nile": 15179, - "awe": 15180, - "assam": 15181, - "definitions": 15182, - "travelers": 15183, - "affordable": 15184, - "##rb": 15185, - "conferred": 15186, - "sells": 15187, - "undefeated": 15188, - "beneficial": 15189, - "torso": 15190, - "basal": 15191, - "repeating": 15192, - "remixes": 15193, - "##pass": 15194, - "bahrain": 15195, - "cables": 15196, - "fang": 15197, - "##itated": 15198, - "excavated": 15199, - "numbering": 15200, - "statutory": 15201, - "##rey": 15202, - "deluxe": 15203, - "##lian": 15204, - "forested": 15205, - "ramirez": 15206, - "derbyshire": 15207, - "zeus": 15208, - "slamming": 15209, - "transfers": 15210, - "astronomer": 15211, - "banana": 15212, - "lottery": 15213, - "berg": 15214, - "histories": 15215, - "bamboo": 15216, - "##uchi": 15217, - "resurrection": 15218, - "posterior": 15219, - "bowls": 15220, - "vaguely": 15221, - "##thi": 15222, - "thou": 15223, - "preserving": 15224, - "tensed": 15225, - "offence": 15226, - "##inas": 15227, - "meyrick": 15228, - "callum": 15229, - "ridden": 15230, - "watt": 15231, - "langdon": 15232, - "tying": 15233, - "lowland": 15234, - "snorted": 15235, - "daring": 15236, - "truman": 15237, - "##hale": 15238, - "##girl": 15239, - "aura": 15240, - "overly": 15241, - "filing": 15242, - "weighing": 15243, - "goa": 15244, - "infections": 15245, - "philanthropist": 15246, - "saunders": 15247, - "eponymous": 15248, - "##owski": 15249, - "latitude": 15250, - "perspectives": 15251, - "reviewing": 15252, - "mets": 15253, - "commandant": 15254, - "radial": 15255, - "##kha": 15256, - "flashlight": 15257, - "reliability": 15258, - "koch": 15259, - "vowels": 15260, - "amazed": 15261, - "ada": 15262, - "elaine": 15263, - "supper": 15264, - "##rth": 15265, - "##encies": 15266, - "predator": 15267, - "debated": 15268, - "soviets": 15269, - "cola": 15270, - "##boards": 15271, - "##nah": 15272, - "compartment": 15273, - "crooked": 15274, - "arbitrary": 15275, - "fourteenth": 15276, - "##ctive": 15277, - "havana": 15278, - "majors": 15279, - "steelers": 15280, - "clips": 15281, - "profitable": 15282, - "ambush": 15283, - "exited": 15284, - "packers": 15285, - "##tile": 15286, - "nude": 15287, - "cracks": 15288, - "fungi": 15289, - "##е": 15290, - "limb": 15291, - "trousers": 15292, - "josie": 15293, - "shelby": 15294, - "tens": 15295, - "frederic": 15296, - "##ος": 15297, - "definite": 15298, - "smoothly": 15299, - "constellation": 15300, - "insult": 15301, - "baton": 15302, - "discs": 15303, - "lingering": 15304, - "##nco": 15305, - "conclusions": 15306, - "lent": 15307, - "staging": 15308, - "becker": 15309, - "grandpa": 15310, - "shaky": 15311, - "##tron": 15312, - "einstein": 15313, - "obstacles": 15314, - "sk": 15315, - "adverse": 15316, - "elle": 15317, - "economically": 15318, - "##moto": 15319, - "mccartney": 15320, - "thor": 15321, - "dismissal": 15322, - "motions": 15323, - "readings": 15324, - "nostrils": 15325, - "treatise": 15326, - "##pace": 15327, - "squeezing": 15328, - "evidently": 15329, - "prolonged": 15330, - "1783": 15331, - "venezuelan": 15332, - "je": 15333, - "marguerite": 15334, - "beirut": 15335, - "takeover": 15336, - "shareholders": 15337, - "##vent": 15338, - "denise": 15339, - "digit": 15340, - "airplay": 15341, - "norse": 15342, - "##bbling": 15343, - "imaginary": 15344, - "pills": 15345, - "hubert": 15346, - "blaze": 15347, - "vacated": 15348, - "eliminating": 15349, - "##ello": 15350, - "vine": 15351, - "mansfield": 15352, - "##tty": 15353, - "retrospective": 15354, - "barrow": 15355, - "borne": 15356, - "clutch": 15357, - "bail": 15358, - "forensic": 15359, - "weaving": 15360, - "##nett": 15361, - "##witz": 15362, - "desktop": 15363, - "citadel": 15364, - "promotions": 15365, - "worrying": 15366, - "dorset": 15367, - "ieee": 15368, - "subdivided": 15369, - "##iating": 15370, - "manned": 15371, - "expeditionary": 15372, - "pickup": 15373, - "synod": 15374, - "chuckle": 15375, - "185": 15376, - "barney": 15377, - "##rz": 15378, - "##ffin": 15379, - "functionality": 15380, - "karachi": 15381, - "litigation": 15382, - "meanings": 15383, - "uc": 15384, - "lick": 15385, - "turbo": 15386, - "anders": 15387, - "##ffed": 15388, - "execute": 15389, - "curl": 15390, - "oppose": 15391, - "ankles": 15392, - "typhoon": 15393, - "##د": 15394, - "##ache": 15395, - "##asia": 15396, - "linguistics": 15397, - "compassion": 15398, - "pressures": 15399, - "grazing": 15400, - "perfection": 15401, - "##iting": 15402, - "immunity": 15403, - "monopoly": 15404, - "muddy": 15405, - "backgrounds": 15406, - "136": 15407, - "namibia": 15408, - "francesca": 15409, - "monitors": 15410, - "attracting": 15411, - "stunt": 15412, - "tuition": 15413, - "##ии": 15414, - "vegetable": 15415, - "##mates": 15416, - "##quent": 15417, - "mgm": 15418, - "jen": 15419, - "complexes": 15420, - "forts": 15421, - "##ond": 15422, - "cellar": 15423, - "bites": 15424, - "seventeenth": 15425, - "royals": 15426, - "flemish": 15427, - "failures": 15428, - "mast": 15429, - "charities": 15430, - "##cular": 15431, - "peruvian": 15432, - "capitals": 15433, - "macmillan": 15434, - "ipswich": 15435, - "outward": 15436, - "frigate": 15437, - "postgraduate": 15438, - "folds": 15439, - "employing": 15440, - "##ouse": 15441, - "concurrently": 15442, - "fiery": 15443, - "##tai": 15444, - "contingent": 15445, - "nightmares": 15446, - "monumental": 15447, - "nicaragua": 15448, - "##kowski": 15449, - "lizard": 15450, - "mal": 15451, - "fielding": 15452, - "gig": 15453, - "reject": 15454, - "##pad": 15455, - "harding": 15456, - "##ipe": 15457, - "coastline": 15458, - "##cin": 15459, - "##nos": 15460, - "beethoven": 15461, - "humphrey": 15462, - "innovations": 15463, - "##tam": 15464, - "##nge": 15465, - "norris": 15466, - "doris": 15467, - "solicitor": 15468, - "huang": 15469, - "obey": 15470, - "141": 15471, - "##lc": 15472, - "niagara": 15473, - "##tton": 15474, - "shelves": 15475, - "aug": 15476, - "bourbon": 15477, - "curry": 15478, - "nightclub": 15479, - "specifications": 15480, - "hilton": 15481, - "##ndo": 15482, - "centennial": 15483, - "dispersed": 15484, - "worm": 15485, - "neglected": 15486, - "briggs": 15487, - "sm": 15488, - "font": 15489, - "kuala": 15490, - "uneasy": 15491, - "plc": 15492, - "##nstein": 15493, - "##bound": 15494, - "##aking": 15495, - "##burgh": 15496, - "awaiting": 15497, - "pronunciation": 15498, - "##bbed": 15499, - "##quest": 15500, - "eh": 15501, - "optimal": 15502, - "zhu": 15503, - "raped": 15504, - "greens": 15505, - "presided": 15506, - "brenda": 15507, - "worries": 15508, - "##life": 15509, - "venetian": 15510, - "marxist": 15511, - "turnout": 15512, - "##lius": 15513, - "refined": 15514, - "braced": 15515, - "sins": 15516, - "grasped": 15517, - "sunderland": 15518, - "nickel": 15519, - "speculated": 15520, - "lowell": 15521, - "cyrillic": 15522, - "communism": 15523, - "fundraising": 15524, - "resembling": 15525, - "colonists": 15526, - "mutant": 15527, - "freddie": 15528, - "usc": 15529, - "##mos": 15530, - "gratitude": 15531, - "##run": 15532, - "mural": 15533, - "##lous": 15534, - "chemist": 15535, - "wi": 15536, - "reminds": 15537, - "28th": 15538, - "steals": 15539, - "tess": 15540, - "pietro": 15541, - "##ingen": 15542, - "promoter": 15543, - "ri": 15544, - "microphone": 15545, - "honoured": 15546, - "rai": 15547, - "sant": 15548, - "##qui": 15549, - "feather": 15550, - "##nson": 15551, - "burlington": 15552, - "kurdish": 15553, - "terrorists": 15554, - "deborah": 15555, - "sickness": 15556, - "##wed": 15557, - "##eet": 15558, - "hazard": 15559, - "irritated": 15560, - "desperation": 15561, - "veil": 15562, - "clarity": 15563, - "##rik": 15564, - "jewels": 15565, - "xv": 15566, - "##gged": 15567, - "##ows": 15568, - "##cup": 15569, - "berkshire": 15570, - "unfair": 15571, - "mysteries": 15572, - "orchid": 15573, - "winced": 15574, - "exhaustion": 15575, - "renovations": 15576, - "stranded": 15577, - "obe": 15578, - "infinity": 15579, - "##nies": 15580, - "adapt": 15581, - "redevelopment": 15582, - "thanked": 15583, - "registry": 15584, - "olga": 15585, - "domingo": 15586, - "noir": 15587, - "tudor": 15588, - "ole": 15589, - "##atus": 15590, - "commenting": 15591, - "behaviors": 15592, - "##ais": 15593, - "crisp": 15594, - "pauline": 15595, - "probable": 15596, - "stirling": 15597, - "wigan": 15598, - "##bian": 15599, - "paralympics": 15600, - "panting": 15601, - "surpassed": 15602, - "##rew": 15603, - "luca": 15604, - "barred": 15605, - "pony": 15606, - "famed": 15607, - "##sters": 15608, - "cassandra": 15609, - "waiter": 15610, - "carolyn": 15611, - "exported": 15612, - "##orted": 15613, - "andres": 15614, - "destructive": 15615, - "deeds": 15616, - "jonah": 15617, - "castles": 15618, - "vacancy": 15619, - "suv": 15620, - "##glass": 15621, - "1788": 15622, - "orchard": 15623, - "yep": 15624, - "famine": 15625, - "belarusian": 15626, - "sprang": 15627, - "##forth": 15628, - "skinny": 15629, - "##mis": 15630, - "administrators": 15631, - "rotterdam": 15632, - "zambia": 15633, - "zhao": 15634, - "boiler": 15635, - "discoveries": 15636, - "##ride": 15637, - "##physics": 15638, - "lucius": 15639, - "disappointing": 15640, - "outreach": 15641, - "spoon": 15642, - "##frame": 15643, - "qualifications": 15644, - "unanimously": 15645, - "enjoys": 15646, - "regency": 15647, - "##iidae": 15648, - "stade": 15649, - "realism": 15650, - "veterinary": 15651, - "rodgers": 15652, - "dump": 15653, - "alain": 15654, - "chestnut": 15655, - "castile": 15656, - "censorship": 15657, - "rumble": 15658, - "gibbs": 15659, - "##itor": 15660, - "communion": 15661, - "reggae": 15662, - "inactivated": 15663, - "logs": 15664, - "loads": 15665, - "##houses": 15666, - "homosexual": 15667, - "##iano": 15668, - "ale": 15669, - "informs": 15670, - "##cas": 15671, - "phrases": 15672, - "plaster": 15673, - "linebacker": 15674, - "ambrose": 15675, - "kaiser": 15676, - "fascinated": 15677, - "850": 15678, - "limerick": 15679, - "recruitment": 15680, - "forge": 15681, - "mastered": 15682, - "##nding": 15683, - "leinster": 15684, - "rooted": 15685, - "threaten": 15686, - "##strom": 15687, - "borneo": 15688, - "##hes": 15689, - "suggestions": 15690, - "scholarships": 15691, - "propeller": 15692, - "documentaries": 15693, - "patronage": 15694, - "coats": 15695, - "constructing": 15696, - "invest": 15697, - "neurons": 15698, - "comet": 15699, - "entirety": 15700, - "shouts": 15701, - "identities": 15702, - "annoying": 15703, - "unchanged": 15704, - "wary": 15705, - "##antly": 15706, - "##ogy": 15707, - "neat": 15708, - "oversight": 15709, - "##kos": 15710, - "phillies": 15711, - "replay": 15712, - "constance": 15713, - "##kka": 15714, - "incarnation": 15715, - "humble": 15716, - "skies": 15717, - "minus": 15718, - "##acy": 15719, - "smithsonian": 15720, - "##chel": 15721, - "guerrilla": 15722, - "jar": 15723, - "cadets": 15724, - "##plate": 15725, - "surplus": 15726, - "audit": 15727, - "##aru": 15728, - "cracking": 15729, - "joanna": 15730, - "louisa": 15731, - "pacing": 15732, - "##lights": 15733, - "intentionally": 15734, - "##iri": 15735, - "diner": 15736, - "nwa": 15737, - "imprint": 15738, - "australians": 15739, - "tong": 15740, - "unprecedented": 15741, - "bunker": 15742, - "naive": 15743, - "specialists": 15744, - "ark": 15745, - "nichols": 15746, - "railing": 15747, - "leaked": 15748, - "pedal": 15749, - "##uka": 15750, - "shrub": 15751, - "longing": 15752, - "roofs": 15753, - "v8": 15754, - "captains": 15755, - "neural": 15756, - "tuned": 15757, - "##ntal": 15758, - "##jet": 15759, - "emission": 15760, - "medina": 15761, - "frantic": 15762, - "codex": 15763, - "definitive": 15764, - "sid": 15765, - "abolition": 15766, - "intensified": 15767, - "stocks": 15768, - "enrique": 15769, - "sustain": 15770, - "genoa": 15771, - "oxide": 15772, - "##written": 15773, - "clues": 15774, - "cha": 15775, - "##gers": 15776, - "tributaries": 15777, - "fragment": 15778, - "venom": 15779, - "##rity": 15780, - "##ente": 15781, - "##sca": 15782, - "muffled": 15783, - "vain": 15784, - "sire": 15785, - "laos": 15786, - "##ingly": 15787, - "##hana": 15788, - "hastily": 15789, - "snapping": 15790, - "surfaced": 15791, - "sentiment": 15792, - "motive": 15793, - "##oft": 15794, - "contests": 15795, - "approximate": 15796, - "mesa": 15797, - "luckily": 15798, - "dinosaur": 15799, - "exchanges": 15800, - "propelled": 15801, - "accord": 15802, - "bourne": 15803, - "relieve": 15804, - "tow": 15805, - "masks": 15806, - "offended": 15807, - "##ues": 15808, - "cynthia": 15809, - "##mmer": 15810, - "rains": 15811, - "bartender": 15812, - "zinc": 15813, - "reviewers": 15814, - "lois": 15815, - "##sai": 15816, - "legged": 15817, - "arrogant": 15818, - "rafe": 15819, - "rosie": 15820, - "comprise": 15821, - "handicap": 15822, - "blockade": 15823, - "inlet": 15824, - "lagoon": 15825, - "copied": 15826, - "drilling": 15827, - "shelley": 15828, - "petals": 15829, - "##inian": 15830, - "mandarin": 15831, - "obsolete": 15832, - "##inated": 15833, - "onward": 15834, - "arguably": 15835, - "productivity": 15836, - "cindy": 15837, - "praising": 15838, - "seldom": 15839, - "busch": 15840, - "discusses": 15841, - "raleigh": 15842, - "shortage": 15843, - "ranged": 15844, - "stanton": 15845, - "encouragement": 15846, - "firstly": 15847, - "conceded": 15848, - "overs": 15849, - "temporal": 15850, - "##uke": 15851, - "cbe": 15852, - "##bos": 15853, - "woo": 15854, - "certainty": 15855, - "pumps": 15856, - "##pton": 15857, - "stalked": 15858, - "##uli": 15859, - "lizzie": 15860, - "periodic": 15861, - "thieves": 15862, - "weaker": 15863, - "##night": 15864, - "gases": 15865, - "shoving": 15866, - "chooses": 15867, - "wc": 15868, - "##chemical": 15869, - "prompting": 15870, - "weights": 15871, - "##kill": 15872, - "robust": 15873, - "flanked": 15874, - "sticky": 15875, - "hu": 15876, - "tuberculosis": 15877, - "##eb": 15878, - "##eal": 15879, - "christchurch": 15880, - "resembled": 15881, - "wallet": 15882, - "reese": 15883, - "inappropriate": 15884, - "pictured": 15885, - "distract": 15886, - "fixing": 15887, - "fiddle": 15888, - "giggled": 15889, - "burger": 15890, - "heirs": 15891, - "hairy": 15892, - "mechanic": 15893, - "torque": 15894, - "apache": 15895, - "obsessed": 15896, - "chiefly": 15897, - "cheng": 15898, - "logging": 15899, - "##tag": 15900, - "extracted": 15901, - "meaningful": 15902, - "numb": 15903, - "##vsky": 15904, - "gloucestershire": 15905, - "reminding": 15906, - "##bay": 15907, - "unite": 15908, - "##lit": 15909, - "breeds": 15910, - "diminished": 15911, - "clown": 15912, - "glove": 15913, - "1860s": 15914, - "##ن": 15915, - "##ug": 15916, - "archibald": 15917, - "focal": 15918, - "freelance": 15919, - "sliced": 15920, - "depiction": 15921, - "##yk": 15922, - "organism": 15923, - "switches": 15924, - "sights": 15925, - "stray": 15926, - "crawling": 15927, - "##ril": 15928, - "lever": 15929, - "leningrad": 15930, - "interpretations": 15931, - "loops": 15932, - "anytime": 15933, - "reel": 15934, - "alicia": 15935, - "delighted": 15936, - "##ech": 15937, - "inhaled": 15938, - "xiv": 15939, - "suitcase": 15940, - "bernie": 15941, - "vega": 15942, - "licenses": 15943, - "northampton": 15944, - "exclusion": 15945, - "induction": 15946, - "monasteries": 15947, - "racecourse": 15948, - "homosexuality": 15949, - "##right": 15950, - "##sfield": 15951, - "##rky": 15952, - "dimitri": 15953, - "michele": 15954, - "alternatives": 15955, - "ions": 15956, - "commentators": 15957, - "genuinely": 15958, - "objected": 15959, - "pork": 15960, - "hospitality": 15961, - "fencing": 15962, - "stephan": 15963, - "warships": 15964, - "peripheral": 15965, - "wit": 15966, - "drunken": 15967, - "wrinkled": 15968, - "quentin": 15969, - "spends": 15970, - "departing": 15971, - "chung": 15972, - "numerical": 15973, - "spokesperson": 15974, - "##zone": 15975, - "johannesburg": 15976, - "caliber": 15977, - "killers": 15978, - "##udge": 15979, - "assumes": 15980, - "neatly": 15981, - "demographic": 15982, - "abigail": 15983, - "bloc": 15984, - "##vel": 15985, - "mounting": 15986, - "##lain": 15987, - "bentley": 15988, - "slightest": 15989, - "xu": 15990, - "recipients": 15991, - "##jk": 15992, - "merlin": 15993, - "##writer": 15994, - "seniors": 15995, - "prisons": 15996, - "blinking": 15997, - "hindwings": 15998, - "flickered": 15999, - "kappa": 16000, - "##hel": 16001, - "80s": 16002, - "strengthening": 16003, - "appealing": 16004, - "brewing": 16005, - "gypsy": 16006, - "mali": 16007, - "lashes": 16008, - "hulk": 16009, - "unpleasant": 16010, - "harassment": 16011, - "bio": 16012, - "treaties": 16013, - "predict": 16014, - "instrumentation": 16015, - "pulp": 16016, - "troupe": 16017, - "boiling": 16018, - "mantle": 16019, - "##ffe": 16020, - "ins": 16021, - "##vn": 16022, - "dividing": 16023, - "handles": 16024, - "verbs": 16025, - "##onal": 16026, - "coconut": 16027, - "senegal": 16028, - "340": 16029, - "thorough": 16030, - "gum": 16031, - "momentarily": 16032, - "##sto": 16033, - "cocaine": 16034, - "panicked": 16035, - "destined": 16036, - "##turing": 16037, - "teatro": 16038, - "denying": 16039, - "weary": 16040, - "captained": 16041, - "mans": 16042, - "##hawks": 16043, - "##code": 16044, - "wakefield": 16045, - "bollywood": 16046, - "thankfully": 16047, - "##16": 16048, - "cyril": 16049, - "##wu": 16050, - "amendments": 16051, - "##bahn": 16052, - "consultation": 16053, - "stud": 16054, - "reflections": 16055, - "kindness": 16056, - "1787": 16057, - "internally": 16058, - "##ovo": 16059, - "tex": 16060, - "mosaic": 16061, - "distribute": 16062, - "paddy": 16063, - "seeming": 16064, - "143": 16065, - "##hic": 16066, - "piers": 16067, - "##15": 16068, - "##mura": 16069, - "##verse": 16070, - "popularly": 16071, - "winger": 16072, - "kang": 16073, - "sentinel": 16074, - "mccoy": 16075, - "##anza": 16076, - "covenant": 16077, - "##bag": 16078, - "verge": 16079, - "fireworks": 16080, - "suppress": 16081, - "thrilled": 16082, - "dominate": 16083, - "##jar": 16084, - "swansea": 16085, - "##60": 16086, - "142": 16087, - "reconciliation": 16088, - "##ndi": 16089, - "stiffened": 16090, - "cue": 16091, - "dorian": 16092, - "##uf": 16093, - "damascus": 16094, - "amor": 16095, - "ida": 16096, - "foremost": 16097, - "##aga": 16098, - "porsche": 16099, - "unseen": 16100, - "dir": 16101, - "##had": 16102, - "##azi": 16103, - "stony": 16104, - "lexi": 16105, - "melodies": 16106, - "##nko": 16107, - "angular": 16108, - "integer": 16109, - "podcast": 16110, - "ants": 16111, - "inherent": 16112, - "jaws": 16113, - "justify": 16114, - "persona": 16115, - "##olved": 16116, - "josephine": 16117, - "##nr": 16118, - "##ressed": 16119, - "customary": 16120, - "flashes": 16121, - "gala": 16122, - "cyrus": 16123, - "glaring": 16124, - "backyard": 16125, - "ariel": 16126, - "physiology": 16127, - "greenland": 16128, - "html": 16129, - "stir": 16130, - "avon": 16131, - "atletico": 16132, - "finch": 16133, - "methodology": 16134, - "ked": 16135, - "##lent": 16136, - "mas": 16137, - "catholicism": 16138, - "townsend": 16139, - "branding": 16140, - "quincy": 16141, - "fits": 16142, - "containers": 16143, - "1777": 16144, - "ashore": 16145, - "aragon": 16146, - "##19": 16147, - "forearm": 16148, - "poisoning": 16149, - "##sd": 16150, - "adopting": 16151, - "conquer": 16152, - "grinding": 16153, - "amnesty": 16154, - "keller": 16155, - "finances": 16156, - "evaluate": 16157, - "forged": 16158, - "lankan": 16159, - "instincts": 16160, - "##uto": 16161, - "guam": 16162, - "bosnian": 16163, - "photographed": 16164, - "workplace": 16165, - "desirable": 16166, - "protector": 16167, - "##dog": 16168, - "allocation": 16169, - "intently": 16170, - "encourages": 16171, - "willy": 16172, - "##sten": 16173, - "bodyguard": 16174, - "electro": 16175, - "brighter": 16176, - "##ν": 16177, - "bihar": 16178, - "##chev": 16179, - "lasts": 16180, - "opener": 16181, - "amphibious": 16182, - "sal": 16183, - "verde": 16184, - "arte": 16185, - "##cope": 16186, - "captivity": 16187, - "vocabulary": 16188, - "yields": 16189, - "##tted": 16190, - "agreeing": 16191, - "desmond": 16192, - "pioneered": 16193, - "##chus": 16194, - "strap": 16195, - "campaigned": 16196, - "railroads": 16197, - "##ович": 16198, - "emblem": 16199, - "##dre": 16200, - "stormed": 16201, - "501": 16202, - "##ulous": 16203, - "marijuana": 16204, - "northumberland": 16205, - "##gn": 16206, - "##nath": 16207, - "bowen": 16208, - "landmarks": 16209, - "beaumont": 16210, - "##qua": 16211, - "danube": 16212, - "##bler": 16213, - "attorneys": 16214, - "th": 16215, - "ge": 16216, - "flyers": 16217, - "critique": 16218, - "villains": 16219, - "cass": 16220, - "mutation": 16221, - "acc": 16222, - "##0s": 16223, - "colombo": 16224, - "mckay": 16225, - "motif": 16226, - "sampling": 16227, - "concluding": 16228, - "syndicate": 16229, - "##rell": 16230, - "neon": 16231, - "stables": 16232, - "ds": 16233, - "warnings": 16234, - "clint": 16235, - "mourning": 16236, - "wilkinson": 16237, - "##tated": 16238, - "merrill": 16239, - "leopard": 16240, - "evenings": 16241, - "exhaled": 16242, - "emil": 16243, - "sonia": 16244, - "ezra": 16245, - "discrete": 16246, - "stove": 16247, - "farrell": 16248, - "fifteenth": 16249, - "prescribed": 16250, - "superhero": 16251, - "##rier": 16252, - "worms": 16253, - "helm": 16254, - "wren": 16255, - "##duction": 16256, - "##hc": 16257, - "expo": 16258, - "##rator": 16259, - "hq": 16260, - "unfamiliar": 16261, - "antony": 16262, - "prevents": 16263, - "acceleration": 16264, - "fiercely": 16265, - "mari": 16266, - "painfully": 16267, - "calculations": 16268, - "cheaper": 16269, - "ign": 16270, - "clifton": 16271, - "irvine": 16272, - "davenport": 16273, - "mozambique": 16274, - "##np": 16275, - "pierced": 16276, - "##evich": 16277, - "wonders": 16278, - "##wig": 16279, - "##cate": 16280, - "##iling": 16281, - "crusade": 16282, - "ware": 16283, - "##uel": 16284, - "enzymes": 16285, - "reasonably": 16286, - "mls": 16287, - "##coe": 16288, - "mater": 16289, - "ambition": 16290, - "bunny": 16291, - "eliot": 16292, - "kernel": 16293, - "##fin": 16294, - "asphalt": 16295, - "headmaster": 16296, - "torah": 16297, - "aden": 16298, - "lush": 16299, - "pins": 16300, - "waived": 16301, - "##care": 16302, - "##yas": 16303, - "joao": 16304, - "substrate": 16305, - "enforce": 16306, - "##grad": 16307, - "##ules": 16308, - "alvarez": 16309, - "selections": 16310, - "epidemic": 16311, - "tempted": 16312, - "##bit": 16313, - "bremen": 16314, - "translates": 16315, - "ensured": 16316, - "waterfront": 16317, - "29th": 16318, - "forrest": 16319, - "manny": 16320, - "malone": 16321, - "kramer": 16322, - "reigning": 16323, - "cookies": 16324, - "simpler": 16325, - "absorption": 16326, - "205": 16327, - "engraved": 16328, - "##ffy": 16329, - "evaluated": 16330, - "1778": 16331, - "haze": 16332, - "146": 16333, - "comforting": 16334, - "crossover": 16335, - "##abe": 16336, - "thorn": 16337, - "##rift": 16338, - "##imo": 16339, - "##pop": 16340, - "suppression": 16341, - "fatigue": 16342, - "cutter": 16343, - "##tr": 16344, - "201": 16345, - "wurttemberg": 16346, - "##orf": 16347, - "enforced": 16348, - "hovering": 16349, - "proprietary": 16350, - "gb": 16351, - "samurai": 16352, - "syllable": 16353, - "ascent": 16354, - "lacey": 16355, - "tick": 16356, - "lars": 16357, - "tractor": 16358, - "merchandise": 16359, - "rep": 16360, - "bouncing": 16361, - "defendants": 16362, - "##yre": 16363, - "huntington": 16364, - "##ground": 16365, - "##oko": 16366, - "standardized": 16367, - "##hor": 16368, - "##hima": 16369, - "assassinated": 16370, - "nu": 16371, - "predecessors": 16372, - "rainy": 16373, - "liar": 16374, - "assurance": 16375, - "lyrical": 16376, - "##uga": 16377, - "secondly": 16378, - "flattened": 16379, - "ios": 16380, - "parameter": 16381, - "undercover": 16382, - "##mity": 16383, - "bordeaux": 16384, - "punish": 16385, - "ridges": 16386, - "markers": 16387, - "exodus": 16388, - "inactive": 16389, - "hesitate": 16390, - "debbie": 16391, - "nyc": 16392, - "pledge": 16393, - "savoy": 16394, - "nagar": 16395, - "offset": 16396, - "organist": 16397, - "##tium": 16398, - "hesse": 16399, - "marin": 16400, - "converting": 16401, - "##iver": 16402, - "diagram": 16403, - "propulsion": 16404, - "pu": 16405, - "validity": 16406, - "reverted": 16407, - "supportive": 16408, - "##dc": 16409, - "ministries": 16410, - "clans": 16411, - "responds": 16412, - "proclamation": 16413, - "##inae": 16414, - "##ø": 16415, - "##rea": 16416, - "ein": 16417, - "pleading": 16418, - "patriot": 16419, - "sf": 16420, - "birch": 16421, - "islanders": 16422, - "strauss": 16423, - "hates": 16424, - "##dh": 16425, - "brandenburg": 16426, - "concession": 16427, - "rd": 16428, - "##ob": 16429, - "1900s": 16430, - "killings": 16431, - "textbook": 16432, - "antiquity": 16433, - "cinematography": 16434, - "wharf": 16435, - "embarrassing": 16436, - "setup": 16437, - "creed": 16438, - "farmland": 16439, - "inequality": 16440, - "centred": 16441, - "signatures": 16442, - "fallon": 16443, - "370": 16444, - "##ingham": 16445, - "##uts": 16446, - "ceylon": 16447, - "gazing": 16448, - "directive": 16449, - "laurie": 16450, - "##tern": 16451, - "globally": 16452, - "##uated": 16453, - "##dent": 16454, - "allah": 16455, - "excavation": 16456, - "threads": 16457, - "##cross": 16458, - "148": 16459, - "frantically": 16460, - "icc": 16461, - "utilize": 16462, - "determines": 16463, - "respiratory": 16464, - "thoughtful": 16465, - "receptions": 16466, - "##dicate": 16467, - "merging": 16468, - "chandra": 16469, - "seine": 16470, - "147": 16471, - "builders": 16472, - "builds": 16473, - "diagnostic": 16474, - "dev": 16475, - "visibility": 16476, - "goddamn": 16477, - "analyses": 16478, - "dhaka": 16479, - "cho": 16480, - "proves": 16481, - "chancel": 16482, - "concurrent": 16483, - "curiously": 16484, - "canadians": 16485, - "pumped": 16486, - "restoring": 16487, - "1850s": 16488, - "turtles": 16489, - "jaguar": 16490, - "sinister": 16491, - "spinal": 16492, - "traction": 16493, - "declan": 16494, - "vows": 16495, - "1784": 16496, - "glowed": 16497, - "capitalism": 16498, - "swirling": 16499, - "install": 16500, - "universidad": 16501, - "##lder": 16502, - "##oat": 16503, - "soloist": 16504, - "##genic": 16505, - "##oor": 16506, - "coincidence": 16507, - "beginnings": 16508, - "nissan": 16509, - "dip": 16510, - "resorts": 16511, - "caucasus": 16512, - "combustion": 16513, - "infectious": 16514, - "##eno": 16515, - "pigeon": 16516, - "serpent": 16517, - "##itating": 16518, - "conclude": 16519, - "masked": 16520, - "salad": 16521, - "jew": 16522, - "##gr": 16523, - "surreal": 16524, - "toni": 16525, - "##wc": 16526, - "harmonica": 16527, - "151": 16528, - "##gins": 16529, - "##etic": 16530, - "##coat": 16531, - "fishermen": 16532, - "intending": 16533, - "bravery": 16534, - "##wave": 16535, - "klaus": 16536, - "titan": 16537, - "wembley": 16538, - "taiwanese": 16539, - "ransom": 16540, - "40th": 16541, - "incorrect": 16542, - "hussein": 16543, - "eyelids": 16544, - "jp": 16545, - "cooke": 16546, - "dramas": 16547, - "utilities": 16548, - "##etta": 16549, - "##print": 16550, - "eisenhower": 16551, - "principally": 16552, - "granada": 16553, - "lana": 16554, - "##rak": 16555, - "openings": 16556, - "concord": 16557, - "##bl": 16558, - "bethany": 16559, - "connie": 16560, - "morality": 16561, - "sega": 16562, - "##mons": 16563, - "##nard": 16564, - "earnings": 16565, - "##kara": 16566, - "##cine": 16567, - "wii": 16568, - "communes": 16569, - "##rel": 16570, - "coma": 16571, - "composing": 16572, - "softened": 16573, - "severed": 16574, - "grapes": 16575, - "##17": 16576, - "nguyen": 16577, - "analyzed": 16578, - "warlord": 16579, - "hubbard": 16580, - "heavenly": 16581, - "behave": 16582, - "slovenian": 16583, - "##hit": 16584, - "##ony": 16585, - "hailed": 16586, - "filmmakers": 16587, - "trance": 16588, - "caldwell": 16589, - "skye": 16590, - "unrest": 16591, - "coward": 16592, - "likelihood": 16593, - "##aging": 16594, - "bern": 16595, - "sci": 16596, - "taliban": 16597, - "honolulu": 16598, - "propose": 16599, - "##wang": 16600, - "1700": 16601, - "browser": 16602, - "imagining": 16603, - "cobra": 16604, - "contributes": 16605, - "dukes": 16606, - "instinctively": 16607, - "conan": 16608, - "violinist": 16609, - "##ores": 16610, - "accessories": 16611, - "gradual": 16612, - "##amp": 16613, - "quotes": 16614, - "sioux": 16615, - "##dating": 16616, - "undertake": 16617, - "intercepted": 16618, - "sparkling": 16619, - "compressed": 16620, - "139": 16621, - "fungus": 16622, - "tombs": 16623, - "haley": 16624, - "imposing": 16625, - "rests": 16626, - "degradation": 16627, - "lincolnshire": 16628, - "retailers": 16629, - "wetlands": 16630, - "tulsa": 16631, - "distributor": 16632, - "dungeon": 16633, - "nun": 16634, - "greenhouse": 16635, - "convey": 16636, - "atlantis": 16637, - "aft": 16638, - "exits": 16639, - "oman": 16640, - "dresser": 16641, - "lyons": 16642, - "##sti": 16643, - "joking": 16644, - "eddy": 16645, - "judgement": 16646, - "omitted": 16647, - "digits": 16648, - "##cts": 16649, - "##game": 16650, - "juniors": 16651, - "##rae": 16652, - "cents": 16653, - "stricken": 16654, - "une": 16655, - "##ngo": 16656, - "wizards": 16657, - "weir": 16658, - "breton": 16659, - "nan": 16660, - "technician": 16661, - "fibers": 16662, - "liking": 16663, - "royalty": 16664, - "##cca": 16665, - "154": 16666, - "persia": 16667, - "terribly": 16668, - "magician": 16669, - "##rable": 16670, - "##unt": 16671, - "vance": 16672, - "cafeteria": 16673, - "booker": 16674, - "camille": 16675, - "warmer": 16676, - "##static": 16677, - "consume": 16678, - "cavern": 16679, - "gaps": 16680, - "compass": 16681, - "contemporaries": 16682, - "foyer": 16683, - "soothing": 16684, - "graveyard": 16685, - "maj": 16686, - "plunged": 16687, - "blush": 16688, - "##wear": 16689, - "cascade": 16690, - "demonstrates": 16691, - "ordinance": 16692, - "##nov": 16693, - "boyle": 16694, - "##lana": 16695, - "rockefeller": 16696, - "shaken": 16697, - "banjo": 16698, - "izzy": 16699, - "##ense": 16700, - "breathless": 16701, - "vines": 16702, - "##32": 16703, - "##eman": 16704, - "alterations": 16705, - "chromosome": 16706, - "dwellings": 16707, - "feudal": 16708, - "mole": 16709, - "153": 16710, - "catalonia": 16711, - "relics": 16712, - "tenant": 16713, - "mandated": 16714, - "##fm": 16715, - "fridge": 16716, - "hats": 16717, - "honesty": 16718, - "patented": 16719, - "raul": 16720, - "heap": 16721, - "cruisers": 16722, - "accusing": 16723, - "enlightenment": 16724, - "infants": 16725, - "wherein": 16726, - "chatham": 16727, - "contractors": 16728, - "zen": 16729, - "affinity": 16730, - "hc": 16731, - "osborne": 16732, - "piston": 16733, - "156": 16734, - "traps": 16735, - "maturity": 16736, - "##rana": 16737, - "lagos": 16738, - "##zal": 16739, - "peering": 16740, - "##nay": 16741, - "attendant": 16742, - "dealers": 16743, - "protocols": 16744, - "subset": 16745, - "prospects": 16746, - "biographical": 16747, - "##cre": 16748, - "artery": 16749, - "##zers": 16750, - "insignia": 16751, - "nuns": 16752, - "endured": 16753, - "##eration": 16754, - "recommend": 16755, - "schwartz": 16756, - "serbs": 16757, - "berger": 16758, - "cromwell": 16759, - "crossroads": 16760, - "##ctor": 16761, - "enduring": 16762, - "clasped": 16763, - "grounded": 16764, - "##bine": 16765, - "marseille": 16766, - "twitched": 16767, - "abel": 16768, - "choke": 16769, - "https": 16770, - "catalyst": 16771, - "moldova": 16772, - "italians": 16773, - "##tist": 16774, - "disastrous": 16775, - "wee": 16776, - "##oured": 16777, - "##nti": 16778, - "wwf": 16779, - "nope": 16780, - "##piration": 16781, - "##asa": 16782, - "expresses": 16783, - "thumbs": 16784, - "167": 16785, - "##nza": 16786, - "coca": 16787, - "1781": 16788, - "cheating": 16789, - "##ption": 16790, - "skipped": 16791, - "sensory": 16792, - "heidelberg": 16793, - "spies": 16794, - "satan": 16795, - "dangers": 16796, - "semifinal": 16797, - "202": 16798, - "bohemia": 16799, - "whitish": 16800, - "confusing": 16801, - "shipbuilding": 16802, - "relies": 16803, - "surgeons": 16804, - "landings": 16805, - "ravi": 16806, - "baku": 16807, - "moor": 16808, - "suffix": 16809, - "alejandro": 16810, - "##yana": 16811, - "litre": 16812, - "upheld": 16813, - "##unk": 16814, - "rajasthan": 16815, - "##rek": 16816, - "coaster": 16817, - "insists": 16818, - "posture": 16819, - "scenarios": 16820, - "etienne": 16821, - "favoured": 16822, - "appoint": 16823, - "transgender": 16824, - "elephants": 16825, - "poked": 16826, - "greenwood": 16827, - "defences": 16828, - "fulfilled": 16829, - "militant": 16830, - "somali": 16831, - "1758": 16832, - "chalk": 16833, - "potent": 16834, - "##ucci": 16835, - "migrants": 16836, - "wink": 16837, - "assistants": 16838, - "nos": 16839, - "restriction": 16840, - "activism": 16841, - "niger": 16842, - "##ario": 16843, - "colon": 16844, - "shaun": 16845, - "##sat": 16846, - "daphne": 16847, - "##erated": 16848, - "swam": 16849, - "congregations": 16850, - "reprise": 16851, - "considerations": 16852, - "magnet": 16853, - "playable": 16854, - "xvi": 16855, - "##р": 16856, - "overthrow": 16857, - "tobias": 16858, - "knob": 16859, - "chavez": 16860, - "coding": 16861, - "##mers": 16862, - "propped": 16863, - "katrina": 16864, - "orient": 16865, - "newcomer": 16866, - "##suke": 16867, - "temperate": 16868, - "##pool": 16869, - "farmhouse": 16870, - "interrogation": 16871, - "##vd": 16872, - "committing": 16873, - "##vert": 16874, - "forthcoming": 16875, - "strawberry": 16876, - "joaquin": 16877, - "macau": 16878, - "ponds": 16879, - "shocking": 16880, - "siberia": 16881, - "##cellular": 16882, - "chant": 16883, - "contributors": 16884, - "##nant": 16885, - "##ologists": 16886, - "sped": 16887, - "absorb": 16888, - "hail": 16889, - "1782": 16890, - "spared": 16891, - "##hore": 16892, - "barbados": 16893, - "karate": 16894, - "opus": 16895, - "originates": 16896, - "saul": 16897, - "##xie": 16898, - "evergreen": 16899, - "leaped": 16900, - "##rock": 16901, - "correlation": 16902, - "exaggerated": 16903, - "weekday": 16904, - "unification": 16905, - "bump": 16906, - "tracing": 16907, - "brig": 16908, - "afb": 16909, - "pathways": 16910, - "utilizing": 16911, - "##ners": 16912, - "mod": 16913, - "mb": 16914, - "disturbance": 16915, - "kneeling": 16916, - "##stad": 16917, - "##guchi": 16918, - "100th": 16919, - "pune": 16920, - "##thy": 16921, - "decreasing": 16922, - "168": 16923, - "manipulation": 16924, - "miriam": 16925, - "academia": 16926, - "ecosystem": 16927, - "occupational": 16928, - "rbi": 16929, - "##lem": 16930, - "rift": 16931, - "##14": 16932, - "rotary": 16933, - "stacked": 16934, - "incorporation": 16935, - "awakening": 16936, - "generators": 16937, - "guerrero": 16938, - "racist": 16939, - "##omy": 16940, - "cyber": 16941, - "derivatives": 16942, - "culminated": 16943, - "allie": 16944, - "annals": 16945, - "panzer": 16946, - "sainte": 16947, - "wikipedia": 16948, - "pops": 16949, - "zu": 16950, - "austro": 16951, - "##vate": 16952, - "algerian": 16953, - "politely": 16954, - "nicholson": 16955, - "mornings": 16956, - "educate": 16957, - "tastes": 16958, - "thrill": 16959, - "dartmouth": 16960, - "##gating": 16961, - "db": 16962, - "##jee": 16963, - "regan": 16964, - "differing": 16965, - "concentrating": 16966, - "choreography": 16967, - "divinity": 16968, - "##media": 16969, - "pledged": 16970, - "alexandre": 16971, - "routing": 16972, - "gregor": 16973, - "madeline": 16974, - "##idal": 16975, - "apocalypse": 16976, - "##hora": 16977, - "gunfire": 16978, - "culminating": 16979, - "elves": 16980, - "fined": 16981, - "liang": 16982, - "lam": 16983, - "programmed": 16984, - "tar": 16985, - "guessing": 16986, - "transparency": 16987, - "gabrielle": 16988, - "##gna": 16989, - "cancellation": 16990, - "flexibility": 16991, - "##lining": 16992, - "accession": 16993, - "shea": 16994, - "stronghold": 16995, - "nets": 16996, - "specializes": 16997, - "##rgan": 16998, - "abused": 16999, - "hasan": 17000, - "sgt": 17001, - "ling": 17002, - "exceeding": 17003, - "##₄": 17004, - "admiration": 17005, - "supermarket": 17006, - "##ark": 17007, - "photographers": 17008, - "specialised": 17009, - "tilt": 17010, - "resonance": 17011, - "hmm": 17012, - "perfume": 17013, - "380": 17014, - "sami": 17015, - "threatens": 17016, - "garland": 17017, - "botany": 17018, - "guarding": 17019, - "boiled": 17020, - "greet": 17021, - "puppy": 17022, - "russo": 17023, - "supplier": 17024, - "wilmington": 17025, - "vibrant": 17026, - "vijay": 17027, - "##bius": 17028, - "paralympic": 17029, - "grumbled": 17030, - "paige": 17031, - "faa": 17032, - "licking": 17033, - "margins": 17034, - "hurricanes": 17035, - "##gong": 17036, - "fest": 17037, - "grenade": 17038, - "ripping": 17039, - "##uz": 17040, - "counseling": 17041, - "weigh": 17042, - "##sian": 17043, - "needles": 17044, - "wiltshire": 17045, - "edison": 17046, - "costly": 17047, - "##not": 17048, - "fulton": 17049, - "tramway": 17050, - "redesigned": 17051, - "staffordshire": 17052, - "cache": 17053, - "gasping": 17054, - "watkins": 17055, - "sleepy": 17056, - "candidacy": 17057, - "##group": 17058, - "monkeys": 17059, - "timeline": 17060, - "throbbing": 17061, - "##bid": 17062, - "##sos": 17063, - "berth": 17064, - "uzbekistan": 17065, - "vanderbilt": 17066, - "bothering": 17067, - "overturned": 17068, - "ballots": 17069, - "gem": 17070, - "##iger": 17071, - "sunglasses": 17072, - "subscribers": 17073, - "hooker": 17074, - "compelling": 17075, - "ang": 17076, - "exceptionally": 17077, - "saloon": 17078, - "stab": 17079, - "##rdi": 17080, - "carla": 17081, - "terrifying": 17082, - "rom": 17083, - "##vision": 17084, - "coil": 17085, - "##oids": 17086, - "satisfying": 17087, - "vendors": 17088, - "31st": 17089, - "mackay": 17090, - "deities": 17091, - "overlooked": 17092, - "ambient": 17093, - "bahamas": 17094, - "felipe": 17095, - "olympia": 17096, - "whirled": 17097, - "botanist": 17098, - "advertised": 17099, - "tugging": 17100, - "##dden": 17101, - "disciples": 17102, - "morales": 17103, - "unionist": 17104, - "rites": 17105, - "foley": 17106, - "morse": 17107, - "motives": 17108, - "creepy": 17109, - "##₀": 17110, - "soo": 17111, - "##sz": 17112, - "bargain": 17113, - "highness": 17114, - "frightening": 17115, - "turnpike": 17116, - "tory": 17117, - "reorganization": 17118, - "##cer": 17119, - "depict": 17120, - "biographer": 17121, - "##walk": 17122, - "unopposed": 17123, - "manifesto": 17124, - "##gles": 17125, - "institut": 17126, - "emile": 17127, - "accidental": 17128, - "kapoor": 17129, - "##dam": 17130, - "kilkenny": 17131, - "cortex": 17132, - "lively": 17133, - "##13": 17134, - "romanesque": 17135, - "jain": 17136, - "shan": 17137, - "cannons": 17138, - "##ood": 17139, - "##ske": 17140, - "petrol": 17141, - "echoing": 17142, - "amalgamated": 17143, - "disappears": 17144, - "cautious": 17145, - "proposes": 17146, - "sanctions": 17147, - "trenton": 17148, - "##ر": 17149, - "flotilla": 17150, - "aus": 17151, - "contempt": 17152, - "tor": 17153, - "canary": 17154, - "cote": 17155, - "theirs": 17156, - "##hun": 17157, - "conceptual": 17158, - "deleted": 17159, - "fascinating": 17160, - "paso": 17161, - "blazing": 17162, - "elf": 17163, - "honourable": 17164, - "hutchinson": 17165, - "##eiro": 17166, - "##outh": 17167, - "##zin": 17168, - "surveyor": 17169, - "tee": 17170, - "amidst": 17171, - "wooded": 17172, - "reissue": 17173, - "intro": 17174, - "##ono": 17175, - "cobb": 17176, - "shelters": 17177, - "newsletter": 17178, - "hanson": 17179, - "brace": 17180, - "encoding": 17181, - "confiscated": 17182, - "dem": 17183, - "caravan": 17184, - "marino": 17185, - "scroll": 17186, - "melodic": 17187, - "cows": 17188, - "imam": 17189, - "##adi": 17190, - "##aneous": 17191, - "northward": 17192, - "searches": 17193, - "biodiversity": 17194, - "cora": 17195, - "310": 17196, - "roaring": 17197, - "##bers": 17198, - "connell": 17199, - "theologian": 17200, - "halo": 17201, - "compose": 17202, - "pathetic": 17203, - "unmarried": 17204, - "dynamo": 17205, - "##oot": 17206, - "az": 17207, - "calculation": 17208, - "toulouse": 17209, - "deserves": 17210, - "humour": 17211, - "nr": 17212, - "forgiveness": 17213, - "tam": 17214, - "undergone": 17215, - "martyr": 17216, - "pamela": 17217, - "myths": 17218, - "whore": 17219, - "counselor": 17220, - "hicks": 17221, - "290": 17222, - "heavens": 17223, - "battleship": 17224, - "electromagnetic": 17225, - "##bbs": 17226, - "stellar": 17227, - "establishments": 17228, - "presley": 17229, - "hopped": 17230, - "##chin": 17231, - "temptation": 17232, - "90s": 17233, - "wills": 17234, - "nas": 17235, - "##yuan": 17236, - "nhs": 17237, - "##nya": 17238, - "seminars": 17239, - "##yev": 17240, - "adaptations": 17241, - "gong": 17242, - "asher": 17243, - "lex": 17244, - "indicator": 17245, - "sikh": 17246, - "tobago": 17247, - "cites": 17248, - "goin": 17249, - "##yte": 17250, - "satirical": 17251, - "##gies": 17252, - "characterised": 17253, - "correspond": 17254, - "bubbles": 17255, - "lure": 17256, - "participates": 17257, - "##vid": 17258, - "eruption": 17259, - "skate": 17260, - "therapeutic": 17261, - "1785": 17262, - "canals": 17263, - "wholesale": 17264, - "defaulted": 17265, - "sac": 17266, - "460": 17267, - "petit": 17268, - "##zzled": 17269, - "virgil": 17270, - "leak": 17271, - "ravens": 17272, - "256": 17273, - "portraying": 17274, - "##yx": 17275, - "ghetto": 17276, - "creators": 17277, - "dams": 17278, - "portray": 17279, - "vicente": 17280, - "##rington": 17281, - "fae": 17282, - "namesake": 17283, - "bounty": 17284, - "##arium": 17285, - "joachim": 17286, - "##ota": 17287, - "##iser": 17288, - "aforementioned": 17289, - "axle": 17290, - "snout": 17291, - "depended": 17292, - "dismantled": 17293, - "reuben": 17294, - "480": 17295, - "##ibly": 17296, - "gallagher": 17297, - "##lau": 17298, - "##pd": 17299, - "earnest": 17300, - "##ieu": 17301, - "##iary": 17302, - "inflicted": 17303, - "objections": 17304, - "##llar": 17305, - "asa": 17306, - "gritted": 17307, - "##athy": 17308, - "jericho": 17309, - "##sea": 17310, - "##was": 17311, - "flick": 17312, - "underside": 17313, - "ceramics": 17314, - "undead": 17315, - "substituted": 17316, - "195": 17317, - "eastward": 17318, - "undoubtedly": 17319, - "wheeled": 17320, - "chimney": 17321, - "##iche": 17322, - "guinness": 17323, - "cb": 17324, - "##ager": 17325, - "siding": 17326, - "##bell": 17327, - "traitor": 17328, - "baptiste": 17329, - "disguised": 17330, - "inauguration": 17331, - "149": 17332, - "tipperary": 17333, - "choreographer": 17334, - "perched": 17335, - "warmed": 17336, - "stationary": 17337, - "eco": 17338, - "##ike": 17339, - "##ntes": 17340, - "bacterial": 17341, - "##aurus": 17342, - "flores": 17343, - "phosphate": 17344, - "##core": 17345, - "attacker": 17346, - "invaders": 17347, - "alvin": 17348, - "intersects": 17349, - "a1": 17350, - "indirectly": 17351, - "immigrated": 17352, - "businessmen": 17353, - "cornelius": 17354, - "valves": 17355, - "narrated": 17356, - "pill": 17357, - "sober": 17358, - "ul": 17359, - "nationale": 17360, - "monastic": 17361, - "applicants": 17362, - "scenery": 17363, - "##jack": 17364, - "161": 17365, - "motifs": 17366, - "constitutes": 17367, - "cpu": 17368, - "##osh": 17369, - "jurisdictions": 17370, - "sd": 17371, - "tuning": 17372, - "irritation": 17373, - "woven": 17374, - "##uddin": 17375, - "fertility": 17376, - "gao": 17377, - "##erie": 17378, - "antagonist": 17379, - "impatient": 17380, - "glacial": 17381, - "hides": 17382, - "boarded": 17383, - "denominations": 17384, - "interception": 17385, - "##jas": 17386, - "cookie": 17387, - "nicola": 17388, - "##tee": 17389, - "algebraic": 17390, - "marquess": 17391, - "bahn": 17392, - "parole": 17393, - "buyers": 17394, - "bait": 17395, - "turbines": 17396, - "paperwork": 17397, - "bestowed": 17398, - "natasha": 17399, - "renee": 17400, - "oceans": 17401, - "purchases": 17402, - "157": 17403, - "vaccine": 17404, - "215": 17405, - "##tock": 17406, - "fixtures": 17407, - "playhouse": 17408, - "integrate": 17409, - "jai": 17410, - "oswald": 17411, - "intellectuals": 17412, - "##cky": 17413, - "booked": 17414, - "nests": 17415, - "mortimer": 17416, - "##isi": 17417, - "obsession": 17418, - "sept": 17419, - "##gler": 17420, - "##sum": 17421, - "440": 17422, - "scrutiny": 17423, - "simultaneous": 17424, - "squinted": 17425, - "##shin": 17426, - "collects": 17427, - "oven": 17428, - "shankar": 17429, - "penned": 17430, - "remarkably": 17431, - "##я": 17432, - "slips": 17433, - "luggage": 17434, - "spectral": 17435, - "1786": 17436, - "collaborations": 17437, - "louie": 17438, - "consolidation": 17439, - "##ailed": 17440, - "##ivating": 17441, - "420": 17442, - "hoover": 17443, - "blackpool": 17444, - "harness": 17445, - "ignition": 17446, - "vest": 17447, - "tails": 17448, - "belmont": 17449, - "mongol": 17450, - "skinner": 17451, - "##nae": 17452, - "visually": 17453, - "mage": 17454, - "derry": 17455, - "##tism": 17456, - "##unce": 17457, - "stevie": 17458, - "transitional": 17459, - "##rdy": 17460, - "redskins": 17461, - "drying": 17462, - "prep": 17463, - "prospective": 17464, - "##21": 17465, - "annoyance": 17466, - "oversee": 17467, - "##loaded": 17468, - "fills": 17469, - "##books": 17470, - "##iki": 17471, - "announces": 17472, - "fda": 17473, - "scowled": 17474, - "respects": 17475, - "prasad": 17476, - "mystic": 17477, - "tucson": 17478, - "##vale": 17479, - "revue": 17480, - "springer": 17481, - "bankrupt": 17482, - "1772": 17483, - "aristotle": 17484, - "salvatore": 17485, - "habsburg": 17486, - "##geny": 17487, - "dal": 17488, - "natal": 17489, - "nut": 17490, - "pod": 17491, - "chewing": 17492, - "darts": 17493, - "moroccan": 17494, - "walkover": 17495, - "rosario": 17496, - "lenin": 17497, - "punjabi": 17498, - "##ße": 17499, - "grossed": 17500, - "scattering": 17501, - "wired": 17502, - "invasive": 17503, - "hui": 17504, - "polynomial": 17505, - "corridors": 17506, - "wakes": 17507, - "gina": 17508, - "portrays": 17509, - "##cratic": 17510, - "arid": 17511, - "retreating": 17512, - "erich": 17513, - "irwin": 17514, - "sniper": 17515, - "##dha": 17516, - "linen": 17517, - "lindsey": 17518, - "maneuver": 17519, - "butch": 17520, - "shutting": 17521, - "socio": 17522, - "bounce": 17523, - "commemorative": 17524, - "postseason": 17525, - "jeremiah": 17526, - "pines": 17527, - "275": 17528, - "mystical": 17529, - "beads": 17530, - "bp": 17531, - "abbas": 17532, - "furnace": 17533, - "bidding": 17534, - "consulted": 17535, - "assaulted": 17536, - "empirical": 17537, - "rubble": 17538, - "enclosure": 17539, - "sob": 17540, - "weakly": 17541, - "cancel": 17542, - "polly": 17543, - "yielded": 17544, - "##emann": 17545, - "curly": 17546, - "prediction": 17547, - "battered": 17548, - "70s": 17549, - "vhs": 17550, - "jacqueline": 17551, - "render": 17552, - "sails": 17553, - "barked": 17554, - "detailing": 17555, - "grayson": 17556, - "riga": 17557, - "sloane": 17558, - "raging": 17559, - "##yah": 17560, - "herbs": 17561, - "bravo": 17562, - "##athlon": 17563, - "alloy": 17564, - "giggle": 17565, - "imminent": 17566, - "suffers": 17567, - "assumptions": 17568, - "waltz": 17569, - "##itate": 17570, - "accomplishments": 17571, - "##ited": 17572, - "bathing": 17573, - "remixed": 17574, - "deception": 17575, - "prefix": 17576, - "##emia": 17577, - "deepest": 17578, - "##tier": 17579, - "##eis": 17580, - "balkan": 17581, - "frogs": 17582, - "##rong": 17583, - "slab": 17584, - "##pate": 17585, - "philosophers": 17586, - "peterborough": 17587, - "grains": 17588, - "imports": 17589, - "dickinson": 17590, - "rwanda": 17591, - "##atics": 17592, - "1774": 17593, - "dirk": 17594, - "lan": 17595, - "tablets": 17596, - "##rove": 17597, - "clone": 17598, - "##rice": 17599, - "caretaker": 17600, - "hostilities": 17601, - "mclean": 17602, - "##gre": 17603, - "regimental": 17604, - "treasures": 17605, - "norms": 17606, - "impose": 17607, - "tsar": 17608, - "tango": 17609, - "diplomacy": 17610, - "variously": 17611, - "complain": 17612, - "192": 17613, - "recognise": 17614, - "arrests": 17615, - "1779": 17616, - "celestial": 17617, - "pulitzer": 17618, - "##dus": 17619, - "bing": 17620, - "libretto": 17621, - "##moor": 17622, - "adele": 17623, - "splash": 17624, - "##rite": 17625, - "expectation": 17626, - "lds": 17627, - "confronts": 17628, - "##izer": 17629, - "spontaneous": 17630, - "harmful": 17631, - "wedge": 17632, - "entrepreneurs": 17633, - "buyer": 17634, - "##ope": 17635, - "bilingual": 17636, - "translate": 17637, - "rugged": 17638, - "conner": 17639, - "circulated": 17640, - "uae": 17641, - "eaton": 17642, - "##gra": 17643, - "##zzle": 17644, - "lingered": 17645, - "lockheed": 17646, - "vishnu": 17647, - "reelection": 17648, - "alonso": 17649, - "##oom": 17650, - "joints": 17651, - "yankee": 17652, - "headline": 17653, - "cooperate": 17654, - "heinz": 17655, - "laureate": 17656, - "invading": 17657, - "##sford": 17658, - "echoes": 17659, - "scandinavian": 17660, - "##dham": 17661, - "hugging": 17662, - "vitamin": 17663, - "salute": 17664, - "micah": 17665, - "hind": 17666, - "trader": 17667, - "##sper": 17668, - "radioactive": 17669, - "##ndra": 17670, - "militants": 17671, - "poisoned": 17672, - "ratified": 17673, - "remark": 17674, - "campeonato": 17675, - "deprived": 17676, - "wander": 17677, - "prop": 17678, - "##dong": 17679, - "outlook": 17680, - "##tani": 17681, - "##rix": 17682, - "##eye": 17683, - "chiang": 17684, - "darcy": 17685, - "##oping": 17686, - "mandolin": 17687, - "spice": 17688, - "statesman": 17689, - "babylon": 17690, - "182": 17691, - "walled": 17692, - "forgetting": 17693, - "afro": 17694, - "##cap": 17695, - "158": 17696, - "giorgio": 17697, - "buffer": 17698, - "##polis": 17699, - "planetary": 17700, - "##gis": 17701, - "overlap": 17702, - "terminals": 17703, - "kinda": 17704, - "centenary": 17705, - "##bir": 17706, - "arising": 17707, - "manipulate": 17708, - "elm": 17709, - "ke": 17710, - "1770": 17711, - "ak": 17712, - "##tad": 17713, - "chrysler": 17714, - "mapped": 17715, - "moose": 17716, - "pomeranian": 17717, - "quad": 17718, - "macarthur": 17719, - "assemblies": 17720, - "shoreline": 17721, - "recalls": 17722, - "stratford": 17723, - "##rted": 17724, - "noticeable": 17725, - "##evic": 17726, - "imp": 17727, - "##rita": 17728, - "##sque": 17729, - "accustomed": 17730, - "supplying": 17731, - "tents": 17732, - "disgusted": 17733, - "vogue": 17734, - "sipped": 17735, - "filters": 17736, - "khz": 17737, - "reno": 17738, - "selecting": 17739, - "luftwaffe": 17740, - "mcmahon": 17741, - "tyne": 17742, - "masterpiece": 17743, - "carriages": 17744, - "collided": 17745, - "dunes": 17746, - "exercised": 17747, - "flare": 17748, - "remembers": 17749, - "muzzle": 17750, - "##mobile": 17751, - "heck": 17752, - "##rson": 17753, - "burgess": 17754, - "lunged": 17755, - "middleton": 17756, - "boycott": 17757, - "bilateral": 17758, - "##sity": 17759, - "hazardous": 17760, - "lumpur": 17761, - "multiplayer": 17762, - "spotlight": 17763, - "jackets": 17764, - "goldman": 17765, - "liege": 17766, - "porcelain": 17767, - "rag": 17768, - "waterford": 17769, - "benz": 17770, - "attracts": 17771, - "hopeful": 17772, - "battling": 17773, - "ottomans": 17774, - "kensington": 17775, - "baked": 17776, - "hymns": 17777, - "cheyenne": 17778, - "lattice": 17779, - "levine": 17780, - "borrow": 17781, - "polymer": 17782, - "clashes": 17783, - "michaels": 17784, - "monitored": 17785, - "commitments": 17786, - "denounced": 17787, - "##25": 17788, - "##von": 17789, - "cavity": 17790, - "##oney": 17791, - "hobby": 17792, - "akin": 17793, - "##holders": 17794, - "futures": 17795, - "intricate": 17796, - "cornish": 17797, - "patty": 17798, - "##oned": 17799, - "illegally": 17800, - "dolphin": 17801, - "##lag": 17802, - "barlow": 17803, - "yellowish": 17804, - "maddie": 17805, - "apologized": 17806, - "luton": 17807, - "plagued": 17808, - "##puram": 17809, - "nana": 17810, - "##rds": 17811, - "sway": 17812, - "fanny": 17813, - "łodz": 17814, - "##rino": 17815, - "psi": 17816, - "suspicions": 17817, - "hanged": 17818, - "##eding": 17819, - "initiate": 17820, - "charlton": 17821, - "##por": 17822, - "nak": 17823, - "competent": 17824, - "235": 17825, - "analytical": 17826, - "annex": 17827, - "wardrobe": 17828, - "reservations": 17829, - "##rma": 17830, - "sect": 17831, - "162": 17832, - "fairfax": 17833, - "hedge": 17834, - "piled": 17835, - "buckingham": 17836, - "uneven": 17837, - "bauer": 17838, - "simplicity": 17839, - "snyder": 17840, - "interpret": 17841, - "accountability": 17842, - "donors": 17843, - "moderately": 17844, - "byrd": 17845, - "continents": 17846, - "##cite": 17847, - "##max": 17848, - "disciple": 17849, - "hr": 17850, - "jamaican": 17851, - "ping": 17852, - "nominees": 17853, - "##uss": 17854, - "mongolian": 17855, - "diver": 17856, - "attackers": 17857, - "eagerly": 17858, - "ideological": 17859, - "pillows": 17860, - "miracles": 17861, - "apartheid": 17862, - "revolver": 17863, - "sulfur": 17864, - "clinics": 17865, - "moran": 17866, - "163": 17867, - "##enko": 17868, - "ile": 17869, - "katy": 17870, - "rhetoric": 17871, - "##icated": 17872, - "chronology": 17873, - "recycling": 17874, - "##hrer": 17875, - "elongated": 17876, - "mughal": 17877, - "pascal": 17878, - "profiles": 17879, - "vibration": 17880, - "databases": 17881, - "domination": 17882, - "##fare": 17883, - "##rant": 17884, - "matthias": 17885, - "digest": 17886, - "rehearsal": 17887, - "polling": 17888, - "weiss": 17889, - "initiation": 17890, - "reeves": 17891, - "clinging": 17892, - "flourished": 17893, - "impress": 17894, - "ngo": 17895, - "##hoff": 17896, - "##ume": 17897, - "buckley": 17898, - "symposium": 17899, - "rhythms": 17900, - "weed": 17901, - "emphasize": 17902, - "transforming": 17903, - "##taking": 17904, - "##gence": 17905, - "##yman": 17906, - "accountant": 17907, - "analyze": 17908, - "flicker": 17909, - "foil": 17910, - "priesthood": 17911, - "voluntarily": 17912, - "decreases": 17913, - "##80": 17914, - "##hya": 17915, - "slater": 17916, - "sv": 17917, - "charting": 17918, - "mcgill": 17919, - "##lde": 17920, - "moreno": 17921, - "##iu": 17922, - "besieged": 17923, - "zur": 17924, - "robes": 17925, - "##phic": 17926, - "admitting": 17927, - "api": 17928, - "deported": 17929, - "turmoil": 17930, - "peyton": 17931, - "earthquakes": 17932, - "##ares": 17933, - "nationalists": 17934, - "beau": 17935, - "clair": 17936, - "brethren": 17937, - "interrupt": 17938, - "welch": 17939, - "curated": 17940, - "galerie": 17941, - "requesting": 17942, - "164": 17943, - "##ested": 17944, - "impending": 17945, - "steward": 17946, - "viper": 17947, - "##vina": 17948, - "complaining": 17949, - "beautifully": 17950, - "brandy": 17951, - "foam": 17952, - "nl": 17953, - "1660": 17954, - "##cake": 17955, - "alessandro": 17956, - "punches": 17957, - "laced": 17958, - "explanations": 17959, - "##lim": 17960, - "attribute": 17961, - "clit": 17962, - "reggie": 17963, - "discomfort": 17964, - "##cards": 17965, - "smoothed": 17966, - "whales": 17967, - "##cene": 17968, - "adler": 17969, - "countered": 17970, - "duffy": 17971, - "disciplinary": 17972, - "widening": 17973, - "recipe": 17974, - "reliance": 17975, - "conducts": 17976, - "goats": 17977, - "gradient": 17978, - "preaching": 17979, - "##shaw": 17980, - "matilda": 17981, - "quasi": 17982, - "striped": 17983, - "meridian": 17984, - "cannabis": 17985, - "cordoba": 17986, - "certificates": 17987, - "##agh": 17988, - "##tering": 17989, - "graffiti": 17990, - "hangs": 17991, - "pilgrims": 17992, - "repeats": 17993, - "##ych": 17994, - "revive": 17995, - "urine": 17996, - "etat": 17997, - "##hawk": 17998, - "fueled": 17999, - "belts": 18000, - "fuzzy": 18001, - "susceptible": 18002, - "##hang": 18003, - "mauritius": 18004, - "salle": 18005, - "sincere": 18006, - "beers": 18007, - "hooks": 18008, - "##cki": 18009, - "arbitration": 18010, - "entrusted": 18011, - "advise": 18012, - "sniffed": 18013, - "seminar": 18014, - "junk": 18015, - "donnell": 18016, - "processors": 18017, - "principality": 18018, - "strapped": 18019, - "celia": 18020, - "mendoza": 18021, - "everton": 18022, - "fortunes": 18023, - "prejudice": 18024, - "starving": 18025, - "reassigned": 18026, - "steamer": 18027, - "##lund": 18028, - "tuck": 18029, - "evenly": 18030, - "foreman": 18031, - "##ffen": 18032, - "dans": 18033, - "375": 18034, - "envisioned": 18035, - "slit": 18036, - "##xy": 18037, - "baseman": 18038, - "liberia": 18039, - "rosemary": 18040, - "##weed": 18041, - "electrified": 18042, - "periodically": 18043, - "potassium": 18044, - "stride": 18045, - "contexts": 18046, - "sperm": 18047, - "slade": 18048, - "mariners": 18049, - "influx": 18050, - "bianca": 18051, - "subcommittee": 18052, - "##rane": 18053, - "spilling": 18054, - "icao": 18055, - "estuary": 18056, - "##nock": 18057, - "delivers": 18058, - "iphone": 18059, - "##ulata": 18060, - "isa": 18061, - "mira": 18062, - "bohemian": 18063, - "dessert": 18064, - "##sbury": 18065, - "welcoming": 18066, - "proudly": 18067, - "slowing": 18068, - "##chs": 18069, - "musee": 18070, - "ascension": 18071, - "russ": 18072, - "##vian": 18073, - "waits": 18074, - "##psy": 18075, - "africans": 18076, - "exploit": 18077, - "##morphic": 18078, - "gov": 18079, - "eccentric": 18080, - "crab": 18081, - "peck": 18082, - "##ull": 18083, - "entrances": 18084, - "formidable": 18085, - "marketplace": 18086, - "groom": 18087, - "bolted": 18088, - "metabolism": 18089, - "patton": 18090, - "robbins": 18091, - "courier": 18092, - "payload": 18093, - "endure": 18094, - "##ifier": 18095, - "andes": 18096, - "refrigerator": 18097, - "##pr": 18098, - "ornate": 18099, - "##uca": 18100, - "ruthless": 18101, - "illegitimate": 18102, - "masonry": 18103, - "strasbourg": 18104, - "bikes": 18105, - "adobe": 18106, - "##³": 18107, - "apples": 18108, - "quintet": 18109, - "willingly": 18110, - "niche": 18111, - "bakery": 18112, - "corpses": 18113, - "energetic": 18114, - "##cliffe": 18115, - "##sser": 18116, - "##ards": 18117, - "177": 18118, - "centimeters": 18119, - "centro": 18120, - "fuscous": 18121, - "cretaceous": 18122, - "rancho": 18123, - "##yde": 18124, - "andrei": 18125, - "telecom": 18126, - "tottenham": 18127, - "oasis": 18128, - "ordination": 18129, - "vulnerability": 18130, - "presiding": 18131, - "corey": 18132, - "cp": 18133, - "penguins": 18134, - "sims": 18135, - "##pis": 18136, - "malawi": 18137, - "piss": 18138, - "##48": 18139, - "correction": 18140, - "##cked": 18141, - "##ffle": 18142, - "##ryn": 18143, - "countdown": 18144, - "detectives": 18145, - "psychiatrist": 18146, - "psychedelic": 18147, - "dinosaurs": 18148, - "blouse": 18149, - "##get": 18150, - "choi": 18151, - "vowed": 18152, - "##oz": 18153, - "randomly": 18154, - "##pol": 18155, - "49ers": 18156, - "scrub": 18157, - "blanche": 18158, - "bruins": 18159, - "dusseldorf": 18160, - "##using": 18161, - "unwanted": 18162, - "##ums": 18163, - "212": 18164, - "dominique": 18165, - "elevations": 18166, - "headlights": 18167, - "om": 18168, - "laguna": 18169, - "##oga": 18170, - "1750": 18171, - "famously": 18172, - "ignorance": 18173, - "shrewsbury": 18174, - "##aine": 18175, - "ajax": 18176, - "breuning": 18177, - "che": 18178, - "confederacy": 18179, - "greco": 18180, - "overhaul": 18181, - "##screen": 18182, - "paz": 18183, - "skirts": 18184, - "disagreement": 18185, - "cruelty": 18186, - "jagged": 18187, - "phoebe": 18188, - "shifter": 18189, - "hovered": 18190, - "viruses": 18191, - "##wes": 18192, - "mandy": 18193, - "##lined": 18194, - "##gc": 18195, - "landlord": 18196, - "squirrel": 18197, - "dashed": 18198, - "##ι": 18199, - "ornamental": 18200, - "gag": 18201, - "wally": 18202, - "grange": 18203, - "literal": 18204, - "spurs": 18205, - "undisclosed": 18206, - "proceeding": 18207, - "yin": 18208, - "##text": 18209, - "billie": 18210, - "orphan": 18211, - "spanned": 18212, - "humidity": 18213, - "indy": 18214, - "weighted": 18215, - "presentations": 18216, - "explosions": 18217, - "lucian": 18218, - "##tary": 18219, - "vaughn": 18220, - "hindus": 18221, - "##anga": 18222, - "##hell": 18223, - "psycho": 18224, - "171": 18225, - "daytona": 18226, - "protects": 18227, - "efficiently": 18228, - "rematch": 18229, - "sly": 18230, - "tandem": 18231, - "##oya": 18232, - "rebranded": 18233, - "impaired": 18234, - "hee": 18235, - "metropolis": 18236, - "peach": 18237, - "godfrey": 18238, - "diaspora": 18239, - "ethnicity": 18240, - "prosperous": 18241, - "gleaming": 18242, - "dar": 18243, - "grossing": 18244, - "playback": 18245, - "##rden": 18246, - "stripe": 18247, - "pistols": 18248, - "##tain": 18249, - "births": 18250, - "labelled": 18251, - "##cating": 18252, - "172": 18253, - "rudy": 18254, - "alba": 18255, - "##onne": 18256, - "aquarium": 18257, - "hostility": 18258, - "##gb": 18259, - "##tase": 18260, - "shudder": 18261, - "sumatra": 18262, - "hardest": 18263, - "lakers": 18264, - "consonant": 18265, - "creeping": 18266, - "demos": 18267, - "homicide": 18268, - "capsule": 18269, - "zeke": 18270, - "liberties": 18271, - "expulsion": 18272, - "pueblo": 18273, - "##comb": 18274, - "trait": 18275, - "transporting": 18276, - "##ddin": 18277, - "##neck": 18278, - "##yna": 18279, - "depart": 18280, - "gregg": 18281, - "mold": 18282, - "ledge": 18283, - "hangar": 18284, - "oldham": 18285, - "playboy": 18286, - "termination": 18287, - "analysts": 18288, - "gmbh": 18289, - "romero": 18290, - "##itic": 18291, - "insist": 18292, - "cradle": 18293, - "filthy": 18294, - "brightness": 18295, - "slash": 18296, - "shootout": 18297, - "deposed": 18298, - "bordering": 18299, - "##truct": 18300, - "isis": 18301, - "microwave": 18302, - "tumbled": 18303, - "sheltered": 18304, - "cathy": 18305, - "werewolves": 18306, - "messy": 18307, - "andersen": 18308, - "convex": 18309, - "clapped": 18310, - "clinched": 18311, - "satire": 18312, - "wasting": 18313, - "edo": 18314, - "vc": 18315, - "rufus": 18316, - "##jak": 18317, - "mont": 18318, - "##etti": 18319, - "poznan": 18320, - "##keeping": 18321, - "restructuring": 18322, - "transverse": 18323, - "##rland": 18324, - "azerbaijani": 18325, - "slovene": 18326, - "gestures": 18327, - "roommate": 18328, - "choking": 18329, - "shear": 18330, - "##quist": 18331, - "vanguard": 18332, - "oblivious": 18333, - "##hiro": 18334, - "disagreed": 18335, - "baptism": 18336, - "##lich": 18337, - "coliseum": 18338, - "##aceae": 18339, - "salvage": 18340, - "societe": 18341, - "cory": 18342, - "locke": 18343, - "relocation": 18344, - "relying": 18345, - "versailles": 18346, - "ahl": 18347, - "swelling": 18348, - "##elo": 18349, - "cheerful": 18350, - "##word": 18351, - "##edes": 18352, - "gin": 18353, - "sarajevo": 18354, - "obstacle": 18355, - "diverted": 18356, - "##nac": 18357, - "messed": 18358, - "thoroughbred": 18359, - "fluttered": 18360, - "utrecht": 18361, - "chewed": 18362, - "acquaintance": 18363, - "assassins": 18364, - "dispatch": 18365, - "mirza": 18366, - "##wart": 18367, - "nike": 18368, - "salzburg": 18369, - "swell": 18370, - "yen": 18371, - "##gee": 18372, - "idle": 18373, - "ligue": 18374, - "samson": 18375, - "##nds": 18376, - "##igh": 18377, - "playful": 18378, - "spawned": 18379, - "##cise": 18380, - "tease": 18381, - "##case": 18382, - "burgundy": 18383, - "##bot": 18384, - "stirring": 18385, - "skeptical": 18386, - "interceptions": 18387, - "marathi": 18388, - "##dies": 18389, - "bedrooms": 18390, - "aroused": 18391, - "pinch": 18392, - "##lik": 18393, - "preferences": 18394, - "tattoos": 18395, - "buster": 18396, - "digitally": 18397, - "projecting": 18398, - "rust": 18399, - "##ital": 18400, - "kitten": 18401, - "priorities": 18402, - "addison": 18403, - "pseudo": 18404, - "##guard": 18405, - "dusk": 18406, - "icons": 18407, - "sermon": 18408, - "##psis": 18409, - "##iba": 18410, - "bt": 18411, - "##lift": 18412, - "##xt": 18413, - "ju": 18414, - "truce": 18415, - "rink": 18416, - "##dah": 18417, - "##wy": 18418, - "defects": 18419, - "psychiatry": 18420, - "offences": 18421, - "calculate": 18422, - "glucose": 18423, - "##iful": 18424, - "##rized": 18425, - "##unda": 18426, - "francaise": 18427, - "##hari": 18428, - "richest": 18429, - "warwickshire": 18430, - "carly": 18431, - "1763": 18432, - "purity": 18433, - "redemption": 18434, - "lending": 18435, - "##cious": 18436, - "muse": 18437, - "bruises": 18438, - "cerebral": 18439, - "aero": 18440, - "carving": 18441, - "##name": 18442, - "preface": 18443, - "terminology": 18444, - "invade": 18445, - "monty": 18446, - "##int": 18447, - "anarchist": 18448, - "blurred": 18449, - "##iled": 18450, - "rossi": 18451, - "treats": 18452, - "guts": 18453, - "shu": 18454, - "foothills": 18455, - "ballads": 18456, - "undertaking": 18457, - "premise": 18458, - "cecilia": 18459, - "affiliates": 18460, - "blasted": 18461, - "conditional": 18462, - "wilder": 18463, - "minors": 18464, - "drone": 18465, - "rudolph": 18466, - "buffy": 18467, - "swallowing": 18468, - "horton": 18469, - "attested": 18470, - "##hop": 18471, - "rutherford": 18472, - "howell": 18473, - "primetime": 18474, - "livery": 18475, - "penal": 18476, - "##bis": 18477, - "minimize": 18478, - "hydro": 18479, - "wrecked": 18480, - "wrought": 18481, - "palazzo": 18482, - "##gling": 18483, - "cans": 18484, - "vernacular": 18485, - "friedman": 18486, - "nobleman": 18487, - "shale": 18488, - "walnut": 18489, - "danielle": 18490, - "##ection": 18491, - "##tley": 18492, - "sears": 18493, - "##kumar": 18494, - "chords": 18495, - "lend": 18496, - "flipping": 18497, - "streamed": 18498, - "por": 18499, - "dracula": 18500, - "gallons": 18501, - "sacrifices": 18502, - "gamble": 18503, - "orphanage": 18504, - "##iman": 18505, - "mckenzie": 18506, - "##gible": 18507, - "boxers": 18508, - "daly": 18509, - "##balls": 18510, - "##ان": 18511, - "208": 18512, - "##ific": 18513, - "##rative": 18514, - "##iq": 18515, - "exploited": 18516, - "slated": 18517, - "##uity": 18518, - "circling": 18519, - "hillary": 18520, - "pinched": 18521, - "goldberg": 18522, - "provost": 18523, - "campaigning": 18524, - "lim": 18525, - "piles": 18526, - "ironically": 18527, - "jong": 18528, - "mohan": 18529, - "successors": 18530, - "usaf": 18531, - "##tem": 18532, - "##ught": 18533, - "autobiographical": 18534, - "haute": 18535, - "preserves": 18536, - "##ending": 18537, - "acquitted": 18538, - "comparisons": 18539, - "203": 18540, - "hydroelectric": 18541, - "gangs": 18542, - "cypriot": 18543, - "torpedoes": 18544, - "rushes": 18545, - "chrome": 18546, - "derive": 18547, - "bumps": 18548, - "instability": 18549, - "fiat": 18550, - "pets": 18551, - "##mbe": 18552, - "silas": 18553, - "dye": 18554, - "reckless": 18555, - "settler": 18556, - "##itation": 18557, - "info": 18558, - "heats": 18559, - "##writing": 18560, - "176": 18561, - "canonical": 18562, - "maltese": 18563, - "fins": 18564, - "mushroom": 18565, - "stacy": 18566, - "aspen": 18567, - "avid": 18568, - "##kur": 18569, - "##loading": 18570, - "vickers": 18571, - "gaston": 18572, - "hillside": 18573, - "statutes": 18574, - "wilde": 18575, - "gail": 18576, - "kung": 18577, - "sabine": 18578, - "comfortably": 18579, - "motorcycles": 18580, - "##rgo": 18581, - "169": 18582, - "pneumonia": 18583, - "fetch": 18584, - "##sonic": 18585, - "axel": 18586, - "faintly": 18587, - "parallels": 18588, - "##oop": 18589, - "mclaren": 18590, - "spouse": 18591, - "compton": 18592, - "interdisciplinary": 18593, - "miner": 18594, - "##eni": 18595, - "181": 18596, - "clamped": 18597, - "##chal": 18598, - "##llah": 18599, - "separates": 18600, - "versa": 18601, - "##mler": 18602, - "scarborough": 18603, - "labrador": 18604, - "##lity": 18605, - "##osing": 18606, - "rutgers": 18607, - "hurdles": 18608, - "como": 18609, - "166": 18610, - "burt": 18611, - "divers": 18612, - "##100": 18613, - "wichita": 18614, - "cade": 18615, - "coincided": 18616, - "##erson": 18617, - "bruised": 18618, - "mla": 18619, - "##pper": 18620, - "vineyard": 18621, - "##ili": 18622, - "##brush": 18623, - "notch": 18624, - "mentioning": 18625, - "jase": 18626, - "hearted": 18627, - "kits": 18628, - "doe": 18629, - "##acle": 18630, - "pomerania": 18631, - "##ady": 18632, - "ronan": 18633, - "seizure": 18634, - "pavel": 18635, - "problematic": 18636, - "##zaki": 18637, - "domenico": 18638, - "##ulin": 18639, - "catering": 18640, - "penelope": 18641, - "dependence": 18642, - "parental": 18643, - "emilio": 18644, - "ministerial": 18645, - "atkinson": 18646, - "##bolic": 18647, - "clarkson": 18648, - "chargers": 18649, - "colby": 18650, - "grill": 18651, - "peeked": 18652, - "arises": 18653, - "summon": 18654, - "##aged": 18655, - "fools": 18656, - "##grapher": 18657, - "faculties": 18658, - "qaeda": 18659, - "##vial": 18660, - "garner": 18661, - "refurbished": 18662, - "##hwa": 18663, - "geelong": 18664, - "disasters": 18665, - "nudged": 18666, - "bs": 18667, - "shareholder": 18668, - "lori": 18669, - "algae": 18670, - "reinstated": 18671, - "rot": 18672, - "##ades": 18673, - "##nous": 18674, - "invites": 18675, - "stainless": 18676, - "183": 18677, - "inclusive": 18678, - "##itude": 18679, - "diocesan": 18680, - "til": 18681, - "##icz": 18682, - "denomination": 18683, - "##xa": 18684, - "benton": 18685, - "floral": 18686, - "registers": 18687, - "##ider": 18688, - "##erman": 18689, - "##kell": 18690, - "absurd": 18691, - "brunei": 18692, - "guangzhou": 18693, - "hitter": 18694, - "retaliation": 18695, - "##uled": 18696, - "##eve": 18697, - "blanc": 18698, - "nh": 18699, - "consistency": 18700, - "contamination": 18701, - "##eres": 18702, - "##rner": 18703, - "dire": 18704, - "palermo": 18705, - "broadcasters": 18706, - "diaries": 18707, - "inspire": 18708, - "vols": 18709, - "brewer": 18710, - "tightening": 18711, - "ky": 18712, - "mixtape": 18713, - "hormone": 18714, - "##tok": 18715, - "stokes": 18716, - "##color": 18717, - "##dly": 18718, - "##ssi": 18719, - "pg": 18720, - "##ometer": 18721, - "##lington": 18722, - "sanitation": 18723, - "##tility": 18724, - "intercontinental": 18725, - "apps": 18726, - "##adt": 18727, - "¹⁄₂": 18728, - "cylinders": 18729, - "economies": 18730, - "favourable": 18731, - "unison": 18732, - "croix": 18733, - "gertrude": 18734, - "odyssey": 18735, - "vanity": 18736, - "dangling": 18737, - "##logists": 18738, - "upgrades": 18739, - "dice": 18740, - "middleweight": 18741, - "practitioner": 18742, - "##ight": 18743, - "206": 18744, - "henrik": 18745, - "parlor": 18746, - "orion": 18747, - "angered": 18748, - "lac": 18749, - "python": 18750, - "blurted": 18751, - "##rri": 18752, - "sensual": 18753, - "intends": 18754, - "swings": 18755, - "angled": 18756, - "##phs": 18757, - "husky": 18758, - "attain": 18759, - "peerage": 18760, - "precinct": 18761, - "textiles": 18762, - "cheltenham": 18763, - "shuffled": 18764, - "dai": 18765, - "confess": 18766, - "tasting": 18767, - "bhutan": 18768, - "##riation": 18769, - "tyrone": 18770, - "segregation": 18771, - "abrupt": 18772, - "ruiz": 18773, - "##rish": 18774, - "smirked": 18775, - "blackwell": 18776, - "confidential": 18777, - "browning": 18778, - "amounted": 18779, - "##put": 18780, - "vase": 18781, - "scarce": 18782, - "fabulous": 18783, - "raided": 18784, - "staple": 18785, - "guyana": 18786, - "unemployed": 18787, - "glider": 18788, - "shay": 18789, - "##tow": 18790, - "carmine": 18791, - "troll": 18792, - "intervene": 18793, - "squash": 18794, - "superstar": 18795, - "##uce": 18796, - "cylindrical": 18797, - "len": 18798, - "roadway": 18799, - "researched": 18800, - "handy": 18801, - "##rium": 18802, - "##jana": 18803, - "meta": 18804, - "lao": 18805, - "declares": 18806, - "##rring": 18807, - "##tadt": 18808, - "##elin": 18809, - "##kova": 18810, - "willem": 18811, - "shrubs": 18812, - "napoleonic": 18813, - "realms": 18814, - "skater": 18815, - "qi": 18816, - "volkswagen": 18817, - "##ł": 18818, - "tad": 18819, - "hara": 18820, - "archaeologist": 18821, - "awkwardly": 18822, - "eerie": 18823, - "##kind": 18824, - "wiley": 18825, - "##heimer": 18826, - "##24": 18827, - "titus": 18828, - "organizers": 18829, - "cfl": 18830, - "crusaders": 18831, - "lama": 18832, - "usb": 18833, - "vent": 18834, - "enraged": 18835, - "thankful": 18836, - "occupants": 18837, - "maximilian": 18838, - "##gaard": 18839, - "possessing": 18840, - "textbooks": 18841, - "##oran": 18842, - "collaborator": 18843, - "quaker": 18844, - "##ulo": 18845, - "avalanche": 18846, - "mono": 18847, - "silky": 18848, - "straits": 18849, - "isaiah": 18850, - "mustang": 18851, - "surged": 18852, - "resolutions": 18853, - "potomac": 18854, - "descend": 18855, - "cl": 18856, - "kilograms": 18857, - "plato": 18858, - "strains": 18859, - "saturdays": 18860, - "##olin": 18861, - "bernstein": 18862, - "##ype": 18863, - "holstein": 18864, - "ponytail": 18865, - "##watch": 18866, - "belize": 18867, - "conversely": 18868, - "heroine": 18869, - "perpetual": 18870, - "##ylus": 18871, - "charcoal": 18872, - "piedmont": 18873, - "glee": 18874, - "negotiating": 18875, - "backdrop": 18876, - "prologue": 18877, - "##jah": 18878, - "##mmy": 18879, - "pasadena": 18880, - "climbs": 18881, - "ramos": 18882, - "sunni": 18883, - "##holm": 18884, - "##tner": 18885, - "##tri": 18886, - "anand": 18887, - "deficiency": 18888, - "hertfordshire": 18889, - "stout": 18890, - "##avi": 18891, - "aperture": 18892, - "orioles": 18893, - "##irs": 18894, - "doncaster": 18895, - "intrigued": 18896, - "bombed": 18897, - "coating": 18898, - "otis": 18899, - "##mat": 18900, - "cocktail": 18901, - "##jit": 18902, - "##eto": 18903, - "amir": 18904, - "arousal": 18905, - "sar": 18906, - "##proof": 18907, - "##act": 18908, - "##ories": 18909, - "dixie": 18910, - "pots": 18911, - "##bow": 18912, - "whereabouts": 18913, - "159": 18914, - "##fted": 18915, - "drains": 18916, - "bullying": 18917, - "cottages": 18918, - "scripture": 18919, - "coherent": 18920, - "fore": 18921, - "poe": 18922, - "appetite": 18923, - "##uration": 18924, - "sampled": 18925, - "##ators": 18926, - "##dp": 18927, - "derrick": 18928, - "rotor": 18929, - "jays": 18930, - "peacock": 18931, - "installment": 18932, - "##rro": 18933, - "advisors": 18934, - "##coming": 18935, - "rodeo": 18936, - "scotch": 18937, - "##mot": 18938, - "##db": 18939, - "##fen": 18940, - "##vant": 18941, - "ensued": 18942, - "rodrigo": 18943, - "dictatorship": 18944, - "martyrs": 18945, - "twenties": 18946, - "##н": 18947, - "towed": 18948, - "incidence": 18949, - "marta": 18950, - "rainforest": 18951, - "sai": 18952, - "scaled": 18953, - "##cles": 18954, - "oceanic": 18955, - "qualifiers": 18956, - "symphonic": 18957, - "mcbride": 18958, - "dislike": 18959, - "generalized": 18960, - "aubrey": 18961, - "colonization": 18962, - "##iation": 18963, - "##lion": 18964, - "##ssing": 18965, - "disliked": 18966, - "lublin": 18967, - "salesman": 18968, - "##ulates": 18969, - "spherical": 18970, - "whatsoever": 18971, - "sweating": 18972, - "avalon": 18973, - "contention": 18974, - "punt": 18975, - "severity": 18976, - "alderman": 18977, - "atari": 18978, - "##dina": 18979, - "##grant": 18980, - "##rop": 18981, - "scarf": 18982, - "seville": 18983, - "vertices": 18984, - "annexation": 18985, - "fairfield": 18986, - "fascination": 18987, - "inspiring": 18988, - "launches": 18989, - "palatinate": 18990, - "regretted": 18991, - "##rca": 18992, - "feral": 18993, - "##iom": 18994, - "elk": 18995, - "nap": 18996, - "olsen": 18997, - "reddy": 18998, - "yong": 18999, - "##leader": 19000, - "##iae": 19001, - "garment": 19002, - "transports": 19003, - "feng": 19004, - "gracie": 19005, - "outrage": 19006, - "viceroy": 19007, - "insides": 19008, - "##esis": 19009, - "breakup": 19010, - "grady": 19011, - "organizer": 19012, - "softer": 19013, - "grimaced": 19014, - "222": 19015, - "murals": 19016, - "galicia": 19017, - "arranging": 19018, - "vectors": 19019, - "##rsten": 19020, - "bas": 19021, - "##sb": 19022, - "##cens": 19023, - "sloan": 19024, - "##eka": 19025, - "bitten": 19026, - "ara": 19027, - "fender": 19028, - "nausea": 19029, - "bumped": 19030, - "kris": 19031, - "banquet": 19032, - "comrades": 19033, - "detector": 19034, - "persisted": 19035, - "##llan": 19036, - "adjustment": 19037, - "endowed": 19038, - "cinemas": 19039, - "##shot": 19040, - "sellers": 19041, - "##uman": 19042, - "peek": 19043, - "epa": 19044, - "kindly": 19045, - "neglect": 19046, - "simpsons": 19047, - "talon": 19048, - "mausoleum": 19049, - "runaway": 19050, - "hangul": 19051, - "lookout": 19052, - "##cic": 19053, - "rewards": 19054, - "coughed": 19055, - "acquainted": 19056, - "chloride": 19057, - "##ald": 19058, - "quicker": 19059, - "accordion": 19060, - "neolithic": 19061, - "##qa": 19062, - "artemis": 19063, - "coefficient": 19064, - "lenny": 19065, - "pandora": 19066, - "tx": 19067, - "##xed": 19068, - "ecstasy": 19069, - "litter": 19070, - "segunda": 19071, - "chairperson": 19072, - "gemma": 19073, - "hiss": 19074, - "rumor": 19075, - "vow": 19076, - "nasal": 19077, - "antioch": 19078, - "compensate": 19079, - "patiently": 19080, - "transformers": 19081, - "##eded": 19082, - "judo": 19083, - "morrow": 19084, - "penis": 19085, - "posthumous": 19086, - "philips": 19087, - "bandits": 19088, - "husbands": 19089, - "denote": 19090, - "flaming": 19091, - "##any": 19092, - "##phones": 19093, - "langley": 19094, - "yorker": 19095, - "1760": 19096, - "walters": 19097, - "##uo": 19098, - "##kle": 19099, - "gubernatorial": 19100, - "fatty": 19101, - "samsung": 19102, - "leroy": 19103, - "outlaw": 19104, - "##nine": 19105, - "unpublished": 19106, - "poole": 19107, - "jakob": 19108, - "##ᵢ": 19109, - "##ₙ": 19110, - "crete": 19111, - "distorted": 19112, - "superiority": 19113, - "##dhi": 19114, - "intercept": 19115, - "crust": 19116, - "mig": 19117, - "claus": 19118, - "crashes": 19119, - "positioning": 19120, - "188": 19121, - "stallion": 19122, - "301": 19123, - "frontal": 19124, - "armistice": 19125, - "##estinal": 19126, - "elton": 19127, - "aj": 19128, - "encompassing": 19129, - "camel": 19130, - "commemorated": 19131, - "malaria": 19132, - "woodward": 19133, - "calf": 19134, - "cigar": 19135, - "penetrate": 19136, - "##oso": 19137, - "willard": 19138, - "##rno": 19139, - "##uche": 19140, - "illustrate": 19141, - "amusing": 19142, - "convergence": 19143, - "noteworthy": 19144, - "##lma": 19145, - "##rva": 19146, - "journeys": 19147, - "realise": 19148, - "manfred": 19149, - "##sable": 19150, - "410": 19151, - "##vocation": 19152, - "hearings": 19153, - "fiance": 19154, - "##posed": 19155, - "educators": 19156, - "provoked": 19157, - "adjusting": 19158, - "##cturing": 19159, - "modular": 19160, - "stockton": 19161, - "paterson": 19162, - "vlad": 19163, - "rejects": 19164, - "electors": 19165, - "selena": 19166, - "maureen": 19167, - "##tres": 19168, - "uber": 19169, - "##rce": 19170, - "swirled": 19171, - "##num": 19172, - "proportions": 19173, - "nanny": 19174, - "pawn": 19175, - "naturalist": 19176, - "parma": 19177, - "apostles": 19178, - "awoke": 19179, - "ethel": 19180, - "wen": 19181, - "##bey": 19182, - "monsoon": 19183, - "overview": 19184, - "##inating": 19185, - "mccain": 19186, - "rendition": 19187, - "risky": 19188, - "adorned": 19189, - "##ih": 19190, - "equestrian": 19191, - "germain": 19192, - "nj": 19193, - "conspicuous": 19194, - "confirming": 19195, - "##yoshi": 19196, - "shivering": 19197, - "##imeter": 19198, - "milestone": 19199, - "rumours": 19200, - "flinched": 19201, - "bounds": 19202, - "smacked": 19203, - "token": 19204, - "##bei": 19205, - "lectured": 19206, - "automobiles": 19207, - "##shore": 19208, - "impacted": 19209, - "##iable": 19210, - "nouns": 19211, - "nero": 19212, - "##leaf": 19213, - "ismail": 19214, - "prostitute": 19215, - "trams": 19216, - "##lace": 19217, - "bridget": 19218, - "sud": 19219, - "stimulus": 19220, - "impressions": 19221, - "reins": 19222, - "revolves": 19223, - "##oud": 19224, - "##gned": 19225, - "giro": 19226, - "honeymoon": 19227, - "##swell": 19228, - "criterion": 19229, - "##sms": 19230, - "##uil": 19231, - "libyan": 19232, - "prefers": 19233, - "##osition": 19234, - "211": 19235, - "preview": 19236, - "sucks": 19237, - "accusation": 19238, - "bursts": 19239, - "metaphor": 19240, - "diffusion": 19241, - "tolerate": 19242, - "faye": 19243, - "betting": 19244, - "cinematographer": 19245, - "liturgical": 19246, - "specials": 19247, - "bitterly": 19248, - "humboldt": 19249, - "##ckle": 19250, - "flux": 19251, - "rattled": 19252, - "##itzer": 19253, - "archaeologists": 19254, - "odor": 19255, - "authorised": 19256, - "marshes": 19257, - "discretion": 19258, - "##ов": 19259, - "alarmed": 19260, - "archaic": 19261, - "inverse": 19262, - "##leton": 19263, - "explorers": 19264, - "##pine": 19265, - "drummond": 19266, - "tsunami": 19267, - "woodlands": 19268, - "##minate": 19269, - "##tland": 19270, - "booklet": 19271, - "insanity": 19272, - "owning": 19273, - "insert": 19274, - "crafted": 19275, - "calculus": 19276, - "##tore": 19277, - "receivers": 19278, - "##bt": 19279, - "stung": 19280, - "##eca": 19281, - "##nched": 19282, - "prevailing": 19283, - "travellers": 19284, - "eyeing": 19285, - "lila": 19286, - "graphs": 19287, - "##borne": 19288, - "178": 19289, - "julien": 19290, - "##won": 19291, - "morale": 19292, - "adaptive": 19293, - "therapist": 19294, - "erica": 19295, - "cw": 19296, - "libertarian": 19297, - "bowman": 19298, - "pitches": 19299, - "vita": 19300, - "##ional": 19301, - "crook": 19302, - "##ads": 19303, - "##entation": 19304, - "caledonia": 19305, - "mutiny": 19306, - "##sible": 19307, - "1840s": 19308, - "automation": 19309, - "##ß": 19310, - "flock": 19311, - "##pia": 19312, - "ironic": 19313, - "pathology": 19314, - "##imus": 19315, - "remarried": 19316, - "##22": 19317, - "joker": 19318, - "withstand": 19319, - "energies": 19320, - "##att": 19321, - "shropshire": 19322, - "hostages": 19323, - "madeleine": 19324, - "tentatively": 19325, - "conflicting": 19326, - "mateo": 19327, - "recipes": 19328, - "euros": 19329, - "ol": 19330, - "mercenaries": 19331, - "nico": 19332, - "##ndon": 19333, - "albuquerque": 19334, - "augmented": 19335, - "mythical": 19336, - "bel": 19337, - "freud": 19338, - "##child": 19339, - "cough": 19340, - "##lica": 19341, - "365": 19342, - "freddy": 19343, - "lillian": 19344, - "genetically": 19345, - "nuremberg": 19346, - "calder": 19347, - "209": 19348, - "bonn": 19349, - "outdoors": 19350, - "paste": 19351, - "suns": 19352, - "urgency": 19353, - "vin": 19354, - "restraint": 19355, - "tyson": 19356, - "##cera": 19357, - "##selle": 19358, - "barrage": 19359, - "bethlehem": 19360, - "kahn": 19361, - "##par": 19362, - "mounts": 19363, - "nippon": 19364, - "barony": 19365, - "happier": 19366, - "ryu": 19367, - "makeshift": 19368, - "sheldon": 19369, - "blushed": 19370, - "castillo": 19371, - "barking": 19372, - "listener": 19373, - "taped": 19374, - "bethel": 19375, - "fluent": 19376, - "headlines": 19377, - "pornography": 19378, - "rum": 19379, - "disclosure": 19380, - "sighing": 19381, - "mace": 19382, - "doubling": 19383, - "gunther": 19384, - "manly": 19385, - "##plex": 19386, - "rt": 19387, - "interventions": 19388, - "physiological": 19389, - "forwards": 19390, - "emerges": 19391, - "##tooth": 19392, - "##gny": 19393, - "compliment": 19394, - "rib": 19395, - "recession": 19396, - "visibly": 19397, - "barge": 19398, - "faults": 19399, - "connector": 19400, - "exquisite": 19401, - "prefect": 19402, - "##rlin": 19403, - "patio": 19404, - "##cured": 19405, - "elevators": 19406, - "brandt": 19407, - "italics": 19408, - "pena": 19409, - "173": 19410, - "wasp": 19411, - "satin": 19412, - "ea": 19413, - "botswana": 19414, - "graceful": 19415, - "respectable": 19416, - "##jima": 19417, - "##rter": 19418, - "##oic": 19419, - "franciscan": 19420, - "generates": 19421, - "##dl": 19422, - "alfredo": 19423, - "disgusting": 19424, - "##olate": 19425, - "##iously": 19426, - "sherwood": 19427, - "warns": 19428, - "cod": 19429, - "promo": 19430, - "cheryl": 19431, - "sino": 19432, - "##ة": 19433, - "##escu": 19434, - "twitch": 19435, - "##zhi": 19436, - "brownish": 19437, - "thom": 19438, - "ortiz": 19439, - "##dron": 19440, - "densely": 19441, - "##beat": 19442, - "carmel": 19443, - "reinforce": 19444, - "##bana": 19445, - "187": 19446, - "anastasia": 19447, - "downhill": 19448, - "vertex": 19449, - "contaminated": 19450, - "remembrance": 19451, - "harmonic": 19452, - "homework": 19453, - "##sol": 19454, - "fiancee": 19455, - "gears": 19456, - "olds": 19457, - "angelica": 19458, - "loft": 19459, - "ramsay": 19460, - "quiz": 19461, - "colliery": 19462, - "sevens": 19463, - "##cape": 19464, - "autism": 19465, - "##hil": 19466, - "walkway": 19467, - "##boats": 19468, - "ruben": 19469, - "abnormal": 19470, - "ounce": 19471, - "khmer": 19472, - "##bbe": 19473, - "zachary": 19474, - "bedside": 19475, - "morphology": 19476, - "punching": 19477, - "##olar": 19478, - "sparrow": 19479, - "convinces": 19480, - "##35": 19481, - "hewitt": 19482, - "queer": 19483, - "remastered": 19484, - "rods": 19485, - "mabel": 19486, - "solemn": 19487, - "notified": 19488, - "lyricist": 19489, - "symmetric": 19490, - "##xide": 19491, - "174": 19492, - "encore": 19493, - "passports": 19494, - "wildcats": 19495, - "##uni": 19496, - "baja": 19497, - "##pac": 19498, - "mildly": 19499, - "##ease": 19500, - "bleed": 19501, - "commodity": 19502, - "mounds": 19503, - "glossy": 19504, - "orchestras": 19505, - "##omo": 19506, - "damian": 19507, - "prelude": 19508, - "ambitions": 19509, - "##vet": 19510, - "awhile": 19511, - "remotely": 19512, - "##aud": 19513, - "asserts": 19514, - "imply": 19515, - "##iques": 19516, - "distinctly": 19517, - "modelling": 19518, - "remedy": 19519, - "##dded": 19520, - "windshield": 19521, - "dani": 19522, - "xiao": 19523, - "##endra": 19524, - "audible": 19525, - "powerplant": 19526, - "1300": 19527, - "invalid": 19528, - "elemental": 19529, - "acquisitions": 19530, - "##hala": 19531, - "immaculate": 19532, - "libby": 19533, - "plata": 19534, - "smuggling": 19535, - "ventilation": 19536, - "denoted": 19537, - "minh": 19538, - "##morphism": 19539, - "430": 19540, - "differed": 19541, - "dion": 19542, - "kelley": 19543, - "lore": 19544, - "mocking": 19545, - "sabbath": 19546, - "spikes": 19547, - "hygiene": 19548, - "drown": 19549, - "runoff": 19550, - "stylized": 19551, - "tally": 19552, - "liberated": 19553, - "aux": 19554, - "interpreter": 19555, - "righteous": 19556, - "aba": 19557, - "siren": 19558, - "reaper": 19559, - "pearce": 19560, - "millie": 19561, - "##cier": 19562, - "##yra": 19563, - "gaius": 19564, - "##iso": 19565, - "captures": 19566, - "##ttering": 19567, - "dorm": 19568, - "claudio": 19569, - "##sic": 19570, - "benches": 19571, - "knighted": 19572, - "blackness": 19573, - "##ored": 19574, - "discount": 19575, - "fumble": 19576, - "oxidation": 19577, - "routed": 19578, - "##ς": 19579, - "novak": 19580, - "perpendicular": 19581, - "spoiled": 19582, - "fracture": 19583, - "splits": 19584, - "##urt": 19585, - "pads": 19586, - "topology": 19587, - "##cats": 19588, - "axes": 19589, - "fortunate": 19590, - "offenders": 19591, - "protestants": 19592, - "esteem": 19593, - "221": 19594, - "broadband": 19595, - "convened": 19596, - "frankly": 19597, - "hound": 19598, - "prototypes": 19599, - "isil": 19600, - "facilitated": 19601, - "keel": 19602, - "##sher": 19603, - "sahara": 19604, - "awaited": 19605, - "bubba": 19606, - "orb": 19607, - "prosecutors": 19608, - "186": 19609, - "hem": 19610, - "520": 19611, - "##xing": 19612, - "relaxing": 19613, - "remnant": 19614, - "romney": 19615, - "sorted": 19616, - "slalom": 19617, - "stefano": 19618, - "ulrich": 19619, - "##active": 19620, - "exemption": 19621, - "folder": 19622, - "pauses": 19623, - "foliage": 19624, - "hitchcock": 19625, - "epithet": 19626, - "204": 19627, - "criticisms": 19628, - "##aca": 19629, - "ballistic": 19630, - "brody": 19631, - "hinduism": 19632, - "chaotic": 19633, - "youths": 19634, - "equals": 19635, - "##pala": 19636, - "pts": 19637, - "thicker": 19638, - "analogous": 19639, - "capitalist": 19640, - "improvised": 19641, - "overseeing": 19642, - "sinatra": 19643, - "ascended": 19644, - "beverage": 19645, - "##tl": 19646, - "straightforward": 19647, - "##kon": 19648, - "curran": 19649, - "##west": 19650, - "bois": 19651, - "325": 19652, - "induce": 19653, - "surveying": 19654, - "emperors": 19655, - "sax": 19656, - "unpopular": 19657, - "##kk": 19658, - "cartoonist": 19659, - "fused": 19660, - "##mble": 19661, - "unto": 19662, - "##yuki": 19663, - "localities": 19664, - "##cko": 19665, - "##ln": 19666, - "darlington": 19667, - "slain": 19668, - "academie": 19669, - "lobbying": 19670, - "sediment": 19671, - "puzzles": 19672, - "##grass": 19673, - "defiance": 19674, - "dickens": 19675, - "manifest": 19676, - "tongues": 19677, - "alumnus": 19678, - "arbor": 19679, - "coincide": 19680, - "184": 19681, - "appalachian": 19682, - "mustafa": 19683, - "examiner": 19684, - "cabaret": 19685, - "traumatic": 19686, - "yves": 19687, - "bracelet": 19688, - "draining": 19689, - "heroin": 19690, - "magnum": 19691, - "baths": 19692, - "odessa": 19693, - "consonants": 19694, - "mitsubishi": 19695, - "##gua": 19696, - "kellan": 19697, - "vaudeville": 19698, - "##fr": 19699, - "joked": 19700, - "null": 19701, - "straps": 19702, - "probation": 19703, - "##ław": 19704, - "ceded": 19705, - "interfaces": 19706, - "##pas": 19707, - "##zawa": 19708, - "blinding": 19709, - "viet": 19710, - "224": 19711, - "rothschild": 19712, - "museo": 19713, - "640": 19714, - "huddersfield": 19715, - "##vr": 19716, - "tactic": 19717, - "##storm": 19718, - "brackets": 19719, - "dazed": 19720, - "incorrectly": 19721, - "##vu": 19722, - "reg": 19723, - "glazed": 19724, - "fearful": 19725, - "manifold": 19726, - "benefited": 19727, - "irony": 19728, - "##sun": 19729, - "stumbling": 19730, - "##rte": 19731, - "willingness": 19732, - "balkans": 19733, - "mei": 19734, - "wraps": 19735, - "##aba": 19736, - "injected": 19737, - "##lea": 19738, - "gu": 19739, - "syed": 19740, - "harmless": 19741, - "##hammer": 19742, - "bray": 19743, - "takeoff": 19744, - "poppy": 19745, - "timor": 19746, - "cardboard": 19747, - "astronaut": 19748, - "purdue": 19749, - "weeping": 19750, - "southbound": 19751, - "cursing": 19752, - "stalls": 19753, - "diagonal": 19754, - "##neer": 19755, - "lamar": 19756, - "bryce": 19757, - "comte": 19758, - "weekdays": 19759, - "harrington": 19760, - "##uba": 19761, - "negatively": 19762, - "##see": 19763, - "lays": 19764, - "grouping": 19765, - "##cken": 19766, - "##henko": 19767, - "affirmed": 19768, - "halle": 19769, - "modernist": 19770, - "##lai": 19771, - "hodges": 19772, - "smelling": 19773, - "aristocratic": 19774, - "baptized": 19775, - "dismiss": 19776, - "justification": 19777, - "oilers": 19778, - "##now": 19779, - "coupling": 19780, - "qin": 19781, - "snack": 19782, - "healer": 19783, - "##qing": 19784, - "gardener": 19785, - "layla": 19786, - "battled": 19787, - "formulated": 19788, - "stephenson": 19789, - "gravitational": 19790, - "##gill": 19791, - "##jun": 19792, - "1768": 19793, - "granny": 19794, - "coordinating": 19795, - "suites": 19796, - "##cd": 19797, - "##ioned": 19798, - "monarchs": 19799, - "##cote": 19800, - "##hips": 19801, - "sep": 19802, - "blended": 19803, - "apr": 19804, - "barrister": 19805, - "deposition": 19806, - "fia": 19807, - "mina": 19808, - "policemen": 19809, - "paranoid": 19810, - "##pressed": 19811, - "churchyard": 19812, - "covert": 19813, - "crumpled": 19814, - "creep": 19815, - "abandoning": 19816, - "tr": 19817, - "transmit": 19818, - "conceal": 19819, - "barr": 19820, - "understands": 19821, - "readiness": 19822, - "spire": 19823, - "##cology": 19824, - "##enia": 19825, - "##erry": 19826, - "610": 19827, - "startling": 19828, - "unlock": 19829, - "vida": 19830, - "bowled": 19831, - "slots": 19832, - "##nat": 19833, - "##islav": 19834, - "spaced": 19835, - "trusting": 19836, - "admire": 19837, - "rig": 19838, - "##ink": 19839, - "slack": 19840, - "##70": 19841, - "mv": 19842, - "207": 19843, - "casualty": 19844, - "##wei": 19845, - "classmates": 19846, - "##odes": 19847, - "##rar": 19848, - "##rked": 19849, - "amherst": 19850, - "furnished": 19851, - "evolve": 19852, - "foundry": 19853, - "menace": 19854, - "mead": 19855, - "##lein": 19856, - "flu": 19857, - "wesleyan": 19858, - "##kled": 19859, - "monterey": 19860, - "webber": 19861, - "##vos": 19862, - "wil": 19863, - "##mith": 19864, - "##на": 19865, - "bartholomew": 19866, - "justices": 19867, - "restrained": 19868, - "##cke": 19869, - "amenities": 19870, - "191": 19871, - "mediated": 19872, - "sewage": 19873, - "trenches": 19874, - "ml": 19875, - "mainz": 19876, - "##thus": 19877, - "1800s": 19878, - "##cula": 19879, - "##inski": 19880, - "caine": 19881, - "bonding": 19882, - "213": 19883, - "converts": 19884, - "spheres": 19885, - "superseded": 19886, - "marianne": 19887, - "crypt": 19888, - "sweaty": 19889, - "ensign": 19890, - "historia": 19891, - "##br": 19892, - "spruce": 19893, - "##post": 19894, - "##ask": 19895, - "forks": 19896, - "thoughtfully": 19897, - "yukon": 19898, - "pamphlet": 19899, - "ames": 19900, - "##uter": 19901, - "karma": 19902, - "##yya": 19903, - "bryn": 19904, - "negotiation": 19905, - "sighs": 19906, - "incapable": 19907, - "##mbre": 19908, - "##ntial": 19909, - "actresses": 19910, - "taft": 19911, - "##mill": 19912, - "luce": 19913, - "prevailed": 19914, - "##amine": 19915, - "1773": 19916, - "motionless": 19917, - "envoy": 19918, - "testify": 19919, - "investing": 19920, - "sculpted": 19921, - "instructors": 19922, - "provence": 19923, - "kali": 19924, - "cullen": 19925, - "horseback": 19926, - "##while": 19927, - "goodwin": 19928, - "##jos": 19929, - "gaa": 19930, - "norte": 19931, - "##ldon": 19932, - "modify": 19933, - "wavelength": 19934, - "abd": 19935, - "214": 19936, - "skinned": 19937, - "sprinter": 19938, - "forecast": 19939, - "scheduling": 19940, - "marries": 19941, - "squared": 19942, - "tentative": 19943, - "##chman": 19944, - "boer": 19945, - "##isch": 19946, - "bolts": 19947, - "swap": 19948, - "fisherman": 19949, - "assyrian": 19950, - "impatiently": 19951, - "guthrie": 19952, - "martins": 19953, - "murdoch": 19954, - "194": 19955, - "tanya": 19956, - "nicely": 19957, - "dolly": 19958, - "lacy": 19959, - "med": 19960, - "##45": 19961, - "syn": 19962, - "decks": 19963, - "fashionable": 19964, - "millionaire": 19965, - "##ust": 19966, - "surfing": 19967, - "##ml": 19968, - "##ision": 19969, - "heaved": 19970, - "tammy": 19971, - "consulate": 19972, - "attendees": 19973, - "routinely": 19974, - "197": 19975, - "fuse": 19976, - "saxophonist": 19977, - "backseat": 19978, - "malaya": 19979, - "##lord": 19980, - "scowl": 19981, - "tau": 19982, - "##ishly": 19983, - "193": 19984, - "sighted": 19985, - "steaming": 19986, - "##rks": 19987, - "303": 19988, - "911": 19989, - "##holes": 19990, - "##hong": 19991, - "ching": 19992, - "##wife": 19993, - "bless": 19994, - "conserved": 19995, - "jurassic": 19996, - "stacey": 19997, - "unix": 19998, - "zion": 19999, - "chunk": 20000, - "rigorous": 20001, - "blaine": 20002, - "198": 20003, - "peabody": 20004, - "slayer": 20005, - "dismay": 20006, - "brewers": 20007, - "nz": 20008, - "##jer": 20009, - "det": 20010, - "##glia": 20011, - "glover": 20012, - "postwar": 20013, - "int": 20014, - "penetration": 20015, - "sylvester": 20016, - "imitation": 20017, - "vertically": 20018, - "airlift": 20019, - "heiress": 20020, - "knoxville": 20021, - "viva": 20022, - "##uin": 20023, - "390": 20024, - "macon": 20025, - "##rim": 20026, - "##fighter": 20027, - "##gonal": 20028, - "janice": 20029, - "##orescence": 20030, - "##wari": 20031, - "marius": 20032, - "belongings": 20033, - "leicestershire": 20034, - "196": 20035, - "blanco": 20036, - "inverted": 20037, - "preseason": 20038, - "sanity": 20039, - "sobbing": 20040, - "##due": 20041, - "##elt": 20042, - "##dled": 20043, - "collingwood": 20044, - "regeneration": 20045, - "flickering": 20046, - "shortest": 20047, - "##mount": 20048, - "##osi": 20049, - "feminism": 20050, - "##lat": 20051, - "sherlock": 20052, - "cabinets": 20053, - "fumbled": 20054, - "northbound": 20055, - "precedent": 20056, - "snaps": 20057, - "##mme": 20058, - "researching": 20059, - "##akes": 20060, - "guillaume": 20061, - "insights": 20062, - "manipulated": 20063, - "vapor": 20064, - "neighbour": 20065, - "sap": 20066, - "gangster": 20067, - "frey": 20068, - "f1": 20069, - "stalking": 20070, - "scarcely": 20071, - "callie": 20072, - "barnett": 20073, - "tendencies": 20074, - "audi": 20075, - "doomed": 20076, - "assessing": 20077, - "slung": 20078, - "panchayat": 20079, - "ambiguous": 20080, - "bartlett": 20081, - "##etto": 20082, - "distributing": 20083, - "violating": 20084, - "wolverhampton": 20085, - "##hetic": 20086, - "swami": 20087, - "histoire": 20088, - "##urus": 20089, - "liable": 20090, - "pounder": 20091, - "groin": 20092, - "hussain": 20093, - "larsen": 20094, - "popping": 20095, - "surprises": 20096, - "##atter": 20097, - "vie": 20098, - "curt": 20099, - "##station": 20100, - "mute": 20101, - "relocate": 20102, - "musicals": 20103, - "authorization": 20104, - "richter": 20105, - "##sef": 20106, - "immortality": 20107, - "tna": 20108, - "bombings": 20109, - "##press": 20110, - "deteriorated": 20111, - "yiddish": 20112, - "##acious": 20113, - "robbed": 20114, - "colchester": 20115, - "cs": 20116, - "pmid": 20117, - "ao": 20118, - "verified": 20119, - "balancing": 20120, - "apostle": 20121, - "swayed": 20122, - "recognizable": 20123, - "oxfordshire": 20124, - "retention": 20125, - "nottinghamshire": 20126, - "contender": 20127, - "judd": 20128, - "invitational": 20129, - "shrimp": 20130, - "uhf": 20131, - "##icient": 20132, - "cleaner": 20133, - "longitudinal": 20134, - "tanker": 20135, - "##mur": 20136, - "acronym": 20137, - "broker": 20138, - "koppen": 20139, - "sundance": 20140, - "suppliers": 20141, - "##gil": 20142, - "4000": 20143, - "clipped": 20144, - "fuels": 20145, - "petite": 20146, - "##anne": 20147, - "landslide": 20148, - "helene": 20149, - "diversion": 20150, - "populous": 20151, - "landowners": 20152, - "auspices": 20153, - "melville": 20154, - "quantitative": 20155, - "##xes": 20156, - "ferries": 20157, - "nicky": 20158, - "##llus": 20159, - "doo": 20160, - "haunting": 20161, - "roche": 20162, - "carver": 20163, - "downed": 20164, - "unavailable": 20165, - "##pathy": 20166, - "approximation": 20167, - "hiroshima": 20168, - "##hue": 20169, - "garfield": 20170, - "valle": 20171, - "comparatively": 20172, - "keyboardist": 20173, - "traveler": 20174, - "##eit": 20175, - "congestion": 20176, - "calculating": 20177, - "subsidiaries": 20178, - "##bate": 20179, - "serb": 20180, - "modernization": 20181, - "fairies": 20182, - "deepened": 20183, - "ville": 20184, - "averages": 20185, - "##lore": 20186, - "inflammatory": 20187, - "tonga": 20188, - "##itch": 20189, - "co₂": 20190, - "squads": 20191, - "##hea": 20192, - "gigantic": 20193, - "serum": 20194, - "enjoyment": 20195, - "retailer": 20196, - "verona": 20197, - "35th": 20198, - "cis": 20199, - "##phobic": 20200, - "magna": 20201, - "technicians": 20202, - "##vati": 20203, - "arithmetic": 20204, - "##sport": 20205, - "levin": 20206, - "##dation": 20207, - "amtrak": 20208, - "chow": 20209, - "sienna": 20210, - "##eyer": 20211, - "backstage": 20212, - "entrepreneurship": 20213, - "##otic": 20214, - "learnt": 20215, - "tao": 20216, - "##udy": 20217, - "worcestershire": 20218, - "formulation": 20219, - "baggage": 20220, - "hesitant": 20221, - "bali": 20222, - "sabotage": 20223, - "##kari": 20224, - "barren": 20225, - "enhancing": 20226, - "murmur": 20227, - "pl": 20228, - "freshly": 20229, - "putnam": 20230, - "syntax": 20231, - "aces": 20232, - "medicines": 20233, - "resentment": 20234, - "bandwidth": 20235, - "##sier": 20236, - "grins": 20237, - "chili": 20238, - "guido": 20239, - "##sei": 20240, - "framing": 20241, - "implying": 20242, - "gareth": 20243, - "lissa": 20244, - "genevieve": 20245, - "pertaining": 20246, - "admissions": 20247, - "geo": 20248, - "thorpe": 20249, - "proliferation": 20250, - "sato": 20251, - "bela": 20252, - "analyzing": 20253, - "parting": 20254, - "##gor": 20255, - "awakened": 20256, - "##isman": 20257, - "huddled": 20258, - "secrecy": 20259, - "##kling": 20260, - "hush": 20261, - "gentry": 20262, - "540": 20263, - "dungeons": 20264, - "##ego": 20265, - "coasts": 20266, - "##utz": 20267, - "sacrificed": 20268, - "##chule": 20269, - "landowner": 20270, - "mutually": 20271, - "prevalence": 20272, - "programmer": 20273, - "adolescent": 20274, - "disrupted": 20275, - "seaside": 20276, - "gee": 20277, - "trusts": 20278, - "vamp": 20279, - "georgie": 20280, - "##nesian": 20281, - "##iol": 20282, - "schedules": 20283, - "sindh": 20284, - "##market": 20285, - "etched": 20286, - "hm": 20287, - "sparse": 20288, - "bey": 20289, - "beaux": 20290, - "scratching": 20291, - "gliding": 20292, - "unidentified": 20293, - "216": 20294, - "collaborating": 20295, - "gems": 20296, - "jesuits": 20297, - "oro": 20298, - "accumulation": 20299, - "shaping": 20300, - "mbe": 20301, - "anal": 20302, - "##xin": 20303, - "231": 20304, - "enthusiasts": 20305, - "newscast": 20306, - "##egan": 20307, - "janata": 20308, - "dewey": 20309, - "parkinson": 20310, - "179": 20311, - "ankara": 20312, - "biennial": 20313, - "towering": 20314, - "dd": 20315, - "inconsistent": 20316, - "950": 20317, - "##chet": 20318, - "thriving": 20319, - "terminate": 20320, - "cabins": 20321, - "furiously": 20322, - "eats": 20323, - "advocating": 20324, - "donkey": 20325, - "marley": 20326, - "muster": 20327, - "phyllis": 20328, - "leiden": 20329, - "##user": 20330, - "grassland": 20331, - "glittering": 20332, - "iucn": 20333, - "loneliness": 20334, - "217": 20335, - "memorandum": 20336, - "armenians": 20337, - "##ddle": 20338, - "popularized": 20339, - "rhodesia": 20340, - "60s": 20341, - "lame": 20342, - "##illon": 20343, - "sans": 20344, - "bikini": 20345, - "header": 20346, - "orbits": 20347, - "##xx": 20348, - "##finger": 20349, - "##ulator": 20350, - "sharif": 20351, - "spines": 20352, - "biotechnology": 20353, - "strolled": 20354, - "naughty": 20355, - "yates": 20356, - "##wire": 20357, - "fremantle": 20358, - "milo": 20359, - "##mour": 20360, - "abducted": 20361, - "removes": 20362, - "##atin": 20363, - "humming": 20364, - "wonderland": 20365, - "##chrome": 20366, - "##ester": 20367, - "hume": 20368, - "pivotal": 20369, - "##rates": 20370, - "armand": 20371, - "grams": 20372, - "believers": 20373, - "elector": 20374, - "rte": 20375, - "apron": 20376, - "bis": 20377, - "scraped": 20378, - "##yria": 20379, - "endorsement": 20380, - "initials": 20381, - "##llation": 20382, - "eps": 20383, - "dotted": 20384, - "hints": 20385, - "buzzing": 20386, - "emigration": 20387, - "nearer": 20388, - "##tom": 20389, - "indicators": 20390, - "##ulu": 20391, - "coarse": 20392, - "neutron": 20393, - "protectorate": 20394, - "##uze": 20395, - "directional": 20396, - "exploits": 20397, - "pains": 20398, - "loire": 20399, - "1830s": 20400, - "proponents": 20401, - "guggenheim": 20402, - "rabbits": 20403, - "ritchie": 20404, - "305": 20405, - "hectare": 20406, - "inputs": 20407, - "hutton": 20408, - "##raz": 20409, - "verify": 20410, - "##ako": 20411, - "boilers": 20412, - "longitude": 20413, - "##lev": 20414, - "skeletal": 20415, - "yer": 20416, - "emilia": 20417, - "citrus": 20418, - "compromised": 20419, - "##gau": 20420, - "pokemon": 20421, - "prescription": 20422, - "paragraph": 20423, - "eduard": 20424, - "cadillac": 20425, - "attire": 20426, - "categorized": 20427, - "kenyan": 20428, - "weddings": 20429, - "charley": 20430, - "##bourg": 20431, - "entertain": 20432, - "monmouth": 20433, - "##lles": 20434, - "nutrients": 20435, - "davey": 20436, - "mesh": 20437, - "incentive": 20438, - "practised": 20439, - "ecosystems": 20440, - "kemp": 20441, - "subdued": 20442, - "overheard": 20443, - "##rya": 20444, - "bodily": 20445, - "maxim": 20446, - "##nius": 20447, - "apprenticeship": 20448, - "ursula": 20449, - "##fight": 20450, - "lodged": 20451, - "rug": 20452, - "silesian": 20453, - "unconstitutional": 20454, - "patel": 20455, - "inspected": 20456, - "coyote": 20457, - "unbeaten": 20458, - "##hak": 20459, - "34th": 20460, - "disruption": 20461, - "convict": 20462, - "parcel": 20463, - "##cl": 20464, - "##nham": 20465, - "collier": 20466, - "implicated": 20467, - "mallory": 20468, - "##iac": 20469, - "##lab": 20470, - "susannah": 20471, - "winkler": 20472, - "##rber": 20473, - "shia": 20474, - "phelps": 20475, - "sediments": 20476, - "graphical": 20477, - "robotic": 20478, - "##sner": 20479, - "adulthood": 20480, - "mart": 20481, - "smoked": 20482, - "##isto": 20483, - "kathryn": 20484, - "clarified": 20485, - "##aran": 20486, - "divides": 20487, - "convictions": 20488, - "oppression": 20489, - "pausing": 20490, - "burying": 20491, - "##mt": 20492, - "federico": 20493, - "mathias": 20494, - "eileen": 20495, - "##tana": 20496, - "kite": 20497, - "hunched": 20498, - "##acies": 20499, - "189": 20500, - "##atz": 20501, - "disadvantage": 20502, - "liza": 20503, - "kinetic": 20504, - "greedy": 20505, - "paradox": 20506, - "yokohama": 20507, - "dowager": 20508, - "trunks": 20509, - "ventured": 20510, - "##gement": 20511, - "gupta": 20512, - "vilnius": 20513, - "olaf": 20514, - "##thest": 20515, - "crimean": 20516, - "hopper": 20517, - "##ej": 20518, - "progressively": 20519, - "arturo": 20520, - "mouthed": 20521, - "arrondissement": 20522, - "##fusion": 20523, - "rubin": 20524, - "simulcast": 20525, - "oceania": 20526, - "##orum": 20527, - "##stra": 20528, - "##rred": 20529, - "busiest": 20530, - "intensely": 20531, - "navigator": 20532, - "cary": 20533, - "##vine": 20534, - "##hini": 20535, - "##bies": 20536, - "fife": 20537, - "rowe": 20538, - "rowland": 20539, - "posing": 20540, - "insurgents": 20541, - "shafts": 20542, - "lawsuits": 20543, - "activate": 20544, - "conor": 20545, - "inward": 20546, - "culturally": 20547, - "garlic": 20548, - "265": 20549, - "##eering": 20550, - "eclectic": 20551, - "##hui": 20552, - "##kee": 20553, - "##nl": 20554, - "furrowed": 20555, - "vargas": 20556, - "meteorological": 20557, - "rendezvous": 20558, - "##aus": 20559, - "culinary": 20560, - "commencement": 20561, - "##dition": 20562, - "quota": 20563, - "##notes": 20564, - "mommy": 20565, - "salaries": 20566, - "overlapping": 20567, - "mule": 20568, - "##iology": 20569, - "##mology": 20570, - "sums": 20571, - "wentworth": 20572, - "##isk": 20573, - "##zione": 20574, - "mainline": 20575, - "subgroup": 20576, - "##illy": 20577, - "hack": 20578, - "plaintiff": 20579, - "verdi": 20580, - "bulb": 20581, - "differentiation": 20582, - "engagements": 20583, - "multinational": 20584, - "supplemented": 20585, - "bertrand": 20586, - "caller": 20587, - "regis": 20588, - "##naire": 20589, - "##sler": 20590, - "##arts": 20591, - "##imated": 20592, - "blossom": 20593, - "propagation": 20594, - "kilometer": 20595, - "viaduct": 20596, - "vineyards": 20597, - "##uate": 20598, - "beckett": 20599, - "optimization": 20600, - "golfer": 20601, - "songwriters": 20602, - "seminal": 20603, - "semitic": 20604, - "thud": 20605, - "volatile": 20606, - "evolving": 20607, - "ridley": 20608, - "##wley": 20609, - "trivial": 20610, - "distributions": 20611, - "scandinavia": 20612, - "jiang": 20613, - "##ject": 20614, - "wrestled": 20615, - "insistence": 20616, - "##dio": 20617, - "emphasizes": 20618, - "napkin": 20619, - "##ods": 20620, - "adjunct": 20621, - "rhyme": 20622, - "##ricted": 20623, - "##eti": 20624, - "hopeless": 20625, - "surrounds": 20626, - "tremble": 20627, - "32nd": 20628, - "smoky": 20629, - "##ntly": 20630, - "oils": 20631, - "medicinal": 20632, - "padded": 20633, - "steer": 20634, - "wilkes": 20635, - "219": 20636, - "255": 20637, - "concessions": 20638, - "hue": 20639, - "uniquely": 20640, - "blinded": 20641, - "landon": 20642, - "yahoo": 20643, - "##lane": 20644, - "hendrix": 20645, - "commemorating": 20646, - "dex": 20647, - "specify": 20648, - "chicks": 20649, - "##ggio": 20650, - "intercity": 20651, - "1400": 20652, - "morley": 20653, - "##torm": 20654, - "highlighting": 20655, - "##oting": 20656, - "pang": 20657, - "oblique": 20658, - "stalled": 20659, - "##liner": 20660, - "flirting": 20661, - "newborn": 20662, - "1769": 20663, - "bishopric": 20664, - "shaved": 20665, - "232": 20666, - "currie": 20667, - "##ush": 20668, - "dharma": 20669, - "spartan": 20670, - "##ooped": 20671, - "favorites": 20672, - "smug": 20673, - "novella": 20674, - "sirens": 20675, - "abusive": 20676, - "creations": 20677, - "espana": 20678, - "##lage": 20679, - "paradigm": 20680, - "semiconductor": 20681, - "sheen": 20682, - "##rdo": 20683, - "##yen": 20684, - "##zak": 20685, - "nrl": 20686, - "renew": 20687, - "##pose": 20688, - "##tur": 20689, - "adjutant": 20690, - "marches": 20691, - "norma": 20692, - "##enity": 20693, - "ineffective": 20694, - "weimar": 20695, - "grunt": 20696, - "##gat": 20697, - "lordship": 20698, - "plotting": 20699, - "expenditure": 20700, - "infringement": 20701, - "lbs": 20702, - "refrain": 20703, - "av": 20704, - "mimi": 20705, - "mistakenly": 20706, - "postmaster": 20707, - "1771": 20708, - "##bara": 20709, - "ras": 20710, - "motorsports": 20711, - "tito": 20712, - "199": 20713, - "subjective": 20714, - "##zza": 20715, - "bully": 20716, - "stew": 20717, - "##kaya": 20718, - "prescott": 20719, - "1a": 20720, - "##raphic": 20721, - "##zam": 20722, - "bids": 20723, - "styling": 20724, - "paranormal": 20725, - "reeve": 20726, - "sneaking": 20727, - "exploding": 20728, - "katz": 20729, - "akbar": 20730, - "migrant": 20731, - "syllables": 20732, - "indefinitely": 20733, - "##ogical": 20734, - "destroys": 20735, - "replaces": 20736, - "applause": 20737, - "##phine": 20738, - "pest": 20739, - "##fide": 20740, - "218": 20741, - "articulated": 20742, - "bertie": 20743, - "##thing": 20744, - "##cars": 20745, - "##ptic": 20746, - "courtroom": 20747, - "crowley": 20748, - "aesthetics": 20749, - "cummings": 20750, - "tehsil": 20751, - "hormones": 20752, - "titanic": 20753, - "dangerously": 20754, - "##ibe": 20755, - "stadion": 20756, - "jaenelle": 20757, - "auguste": 20758, - "ciudad": 20759, - "##chu": 20760, - "mysore": 20761, - "partisans": 20762, - "##sio": 20763, - "lucan": 20764, - "philipp": 20765, - "##aly": 20766, - "debating": 20767, - "henley": 20768, - "interiors": 20769, - "##rano": 20770, - "##tious": 20771, - "homecoming": 20772, - "beyonce": 20773, - "usher": 20774, - "henrietta": 20775, - "prepares": 20776, - "weeds": 20777, - "##oman": 20778, - "ely": 20779, - "plucked": 20780, - "##pire": 20781, - "##dable": 20782, - "luxurious": 20783, - "##aq": 20784, - "artifact": 20785, - "password": 20786, - "pasture": 20787, - "juno": 20788, - "maddy": 20789, - "minsk": 20790, - "##dder": 20791, - "##ologies": 20792, - "##rone": 20793, - "assessments": 20794, - "martian": 20795, - "royalist": 20796, - "1765": 20797, - "examines": 20798, - "##mani": 20799, - "##rge": 20800, - "nino": 20801, - "223": 20802, - "parry": 20803, - "scooped": 20804, - "relativity": 20805, - "##eli": 20806, - "##uting": 20807, - "##cao": 20808, - "congregational": 20809, - "noisy": 20810, - "traverse": 20811, - "##agawa": 20812, - "strikeouts": 20813, - "nickelodeon": 20814, - "obituary": 20815, - "transylvania": 20816, - "binds": 20817, - "depictions": 20818, - "polk": 20819, - "trolley": 20820, - "##yed": 20821, - "##lard": 20822, - "breeders": 20823, - "##under": 20824, - "dryly": 20825, - "hokkaido": 20826, - "1762": 20827, - "strengths": 20828, - "stacks": 20829, - "bonaparte": 20830, - "connectivity": 20831, - "neared": 20832, - "prostitutes": 20833, - "stamped": 20834, - "anaheim": 20835, - "gutierrez": 20836, - "sinai": 20837, - "##zzling": 20838, - "bram": 20839, - "fresno": 20840, - "madhya": 20841, - "##86": 20842, - "proton": 20843, - "##lena": 20844, - "##llum": 20845, - "##phon": 20846, - "reelected": 20847, - "wanda": 20848, - "##anus": 20849, - "##lb": 20850, - "ample": 20851, - "distinguishing": 20852, - "##yler": 20853, - "grasping": 20854, - "sermons": 20855, - "tomato": 20856, - "bland": 20857, - "stimulation": 20858, - "avenues": 20859, - "##eux": 20860, - "spreads": 20861, - "scarlett": 20862, - "fern": 20863, - "pentagon": 20864, - "assert": 20865, - "baird": 20866, - "chesapeake": 20867, - "ir": 20868, - "calmed": 20869, - "distortion": 20870, - "fatalities": 20871, - "##olis": 20872, - "correctional": 20873, - "pricing": 20874, - "##astic": 20875, - "##gina": 20876, - "prom": 20877, - "dammit": 20878, - "ying": 20879, - "collaborate": 20880, - "##chia": 20881, - "welterweight": 20882, - "33rd": 20883, - "pointer": 20884, - "substitution": 20885, - "bonded": 20886, - "umpire": 20887, - "communicating": 20888, - "multitude": 20889, - "paddle": 20890, - "##obe": 20891, - "federally": 20892, - "intimacy": 20893, - "##insky": 20894, - "betray": 20895, - "ssr": 20896, - "##lett": 20897, - "##lean": 20898, - "##lves": 20899, - "##therapy": 20900, - "airbus": 20901, - "##tery": 20902, - "functioned": 20903, - "ud": 20904, - "bearer": 20905, - "biomedical": 20906, - "netflix": 20907, - "##hire": 20908, - "##nca": 20909, - "condom": 20910, - "brink": 20911, - "ik": 20912, - "##nical": 20913, - "macy": 20914, - "##bet": 20915, - "flap": 20916, - "gma": 20917, - "experimented": 20918, - "jelly": 20919, - "lavender": 20920, - "##icles": 20921, - "##ulia": 20922, - "munro": 20923, - "##mian": 20924, - "##tial": 20925, - "rye": 20926, - "##rle": 20927, - "60th": 20928, - "gigs": 20929, - "hottest": 20930, - "rotated": 20931, - "predictions": 20932, - "fuji": 20933, - "bu": 20934, - "##erence": 20935, - "##omi": 20936, - "barangay": 20937, - "##fulness": 20938, - "##sas": 20939, - "clocks": 20940, - "##rwood": 20941, - "##liness": 20942, - "cereal": 20943, - "roe": 20944, - "wight": 20945, - "decker": 20946, - "uttered": 20947, - "babu": 20948, - "onion": 20949, - "xml": 20950, - "forcibly": 20951, - "##df": 20952, - "petra": 20953, - "sarcasm": 20954, - "hartley": 20955, - "peeled": 20956, - "storytelling": 20957, - "##42": 20958, - "##xley": 20959, - "##ysis": 20960, - "##ffa": 20961, - "fibre": 20962, - "kiel": 20963, - "auditor": 20964, - "fig": 20965, - "harald": 20966, - "greenville": 20967, - "##berries": 20968, - "geographically": 20969, - "nell": 20970, - "quartz": 20971, - "##athic": 20972, - "cemeteries": 20973, - "##lr": 20974, - "crossings": 20975, - "nah": 20976, - "holloway": 20977, - "reptiles": 20978, - "chun": 20979, - "sichuan": 20980, - "snowy": 20981, - "660": 20982, - "corrections": 20983, - "##ivo": 20984, - "zheng": 20985, - "ambassadors": 20986, - "blacksmith": 20987, - "fielded": 20988, - "fluids": 20989, - "hardcover": 20990, - "turnover": 20991, - "medications": 20992, - "melvin": 20993, - "academies": 20994, - "##erton": 20995, - "ro": 20996, - "roach": 20997, - "absorbing": 20998, - "spaniards": 20999, - "colton": 21000, - "##founded": 21001, - "outsider": 21002, - "espionage": 21003, - "kelsey": 21004, - "245": 21005, - "edible": 21006, - "##ulf": 21007, - "dora": 21008, - "establishes": 21009, - "##sham": 21010, - "##tries": 21011, - "contracting": 21012, - "##tania": 21013, - "cinematic": 21014, - "costello": 21015, - "nesting": 21016, - "##uron": 21017, - "connolly": 21018, - "duff": 21019, - "##nology": 21020, - "mma": 21021, - "##mata": 21022, - "fergus": 21023, - "sexes": 21024, - "gi": 21025, - "optics": 21026, - "spectator": 21027, - "woodstock": 21028, - "banning": 21029, - "##hee": 21030, - "##fle": 21031, - "differentiate": 21032, - "outfielder": 21033, - "refinery": 21034, - "226": 21035, - "312": 21036, - "gerhard": 21037, - "horde": 21038, - "lair": 21039, - "drastically": 21040, - "##udi": 21041, - "landfall": 21042, - "##cheng": 21043, - "motorsport": 21044, - "odi": 21045, - "##achi": 21046, - "predominant": 21047, - "quay": 21048, - "skins": 21049, - "##ental": 21050, - "edna": 21051, - "harshly": 21052, - "complementary": 21053, - "murdering": 21054, - "##aves": 21055, - "wreckage": 21056, - "##90": 21057, - "ono": 21058, - "outstretched": 21059, - "lennox": 21060, - "munitions": 21061, - "galen": 21062, - "reconcile": 21063, - "470": 21064, - "scalp": 21065, - "bicycles": 21066, - "gillespie": 21067, - "questionable": 21068, - "rosenberg": 21069, - "guillermo": 21070, - "hostel": 21071, - "jarvis": 21072, - "kabul": 21073, - "volvo": 21074, - "opium": 21075, - "yd": 21076, - "##twined": 21077, - "abuses": 21078, - "decca": 21079, - "outpost": 21080, - "##cino": 21081, - "sensible": 21082, - "neutrality": 21083, - "##64": 21084, - "ponce": 21085, - "anchorage": 21086, - "atkins": 21087, - "turrets": 21088, - "inadvertently": 21089, - "disagree": 21090, - "libre": 21091, - "vodka": 21092, - "reassuring": 21093, - "weighs": 21094, - "##yal": 21095, - "glide": 21096, - "jumper": 21097, - "ceilings": 21098, - "repertory": 21099, - "outs": 21100, - "stain": 21101, - "##bial": 21102, - "envy": 21103, - "##ucible": 21104, - "smashing": 21105, - "heightened": 21106, - "policing": 21107, - "hyun": 21108, - "mixes": 21109, - "lai": 21110, - "prima": 21111, - "##ples": 21112, - "celeste": 21113, - "##bina": 21114, - "lucrative": 21115, - "intervened": 21116, - "kc": 21117, - "manually": 21118, - "##rned": 21119, - "stature": 21120, - "staffed": 21121, - "bun": 21122, - "bastards": 21123, - "nairobi": 21124, - "priced": 21125, - "##auer": 21126, - "thatcher": 21127, - "##kia": 21128, - "tripped": 21129, - "comune": 21130, - "##ogan": 21131, - "##pled": 21132, - "brasil": 21133, - "incentives": 21134, - "emanuel": 21135, - "hereford": 21136, - "musica": 21137, - "##kim": 21138, - "benedictine": 21139, - "biennale": 21140, - "##lani": 21141, - "eureka": 21142, - "gardiner": 21143, - "rb": 21144, - "knocks": 21145, - "sha": 21146, - "##ael": 21147, - "##elled": 21148, - "##onate": 21149, - "efficacy": 21150, - "ventura": 21151, - "masonic": 21152, - "sanford": 21153, - "maize": 21154, - "leverage": 21155, - "##feit": 21156, - "capacities": 21157, - "santana": 21158, - "##aur": 21159, - "novelty": 21160, - "vanilla": 21161, - "##cter": 21162, - "##tour": 21163, - "benin": 21164, - "##oir": 21165, - "##rain": 21166, - "neptune": 21167, - "drafting": 21168, - "tallinn": 21169, - "##cable": 21170, - "humiliation": 21171, - "##boarding": 21172, - "schleswig": 21173, - "fabian": 21174, - "bernardo": 21175, - "liturgy": 21176, - "spectacle": 21177, - "sweeney": 21178, - "pont": 21179, - "routledge": 21180, - "##tment": 21181, - "cosmos": 21182, - "ut": 21183, - "hilt": 21184, - "sleek": 21185, - "universally": 21186, - "##eville": 21187, - "##gawa": 21188, - "typed": 21189, - "##dry": 21190, - "favors": 21191, - "allegheny": 21192, - "glaciers": 21193, - "##rly": 21194, - "recalling": 21195, - "aziz": 21196, - "##log": 21197, - "parasite": 21198, - "requiem": 21199, - "auf": 21200, - "##berto": 21201, - "##llin": 21202, - "illumination": 21203, - "##breaker": 21204, - "##issa": 21205, - "festivities": 21206, - "bows": 21207, - "govern": 21208, - "vibe": 21209, - "vp": 21210, - "333": 21211, - "sprawled": 21212, - "larson": 21213, - "pilgrim": 21214, - "bwf": 21215, - "leaping": 21216, - "##rts": 21217, - "##ssel": 21218, - "alexei": 21219, - "greyhound": 21220, - "hoarse": 21221, - "##dler": 21222, - "##oration": 21223, - "seneca": 21224, - "##cule": 21225, - "gaping": 21226, - "##ulously": 21227, - "##pura": 21228, - "cinnamon": 21229, - "##gens": 21230, - "##rricular": 21231, - "craven": 21232, - "fantasies": 21233, - "houghton": 21234, - "engined": 21235, - "reigned": 21236, - "dictator": 21237, - "supervising": 21238, - "##oris": 21239, - "bogota": 21240, - "commentaries": 21241, - "unnatural": 21242, - "fingernails": 21243, - "spirituality": 21244, - "tighten": 21245, - "##tm": 21246, - "canadiens": 21247, - "protesting": 21248, - "intentional": 21249, - "cheers": 21250, - "sparta": 21251, - "##ytic": 21252, - "##iere": 21253, - "##zine": 21254, - "widen": 21255, - "belgarath": 21256, - "controllers": 21257, - "dodd": 21258, - "iaaf": 21259, - "navarre": 21260, - "##ication": 21261, - "defect": 21262, - "squire": 21263, - "steiner": 21264, - "whisky": 21265, - "##mins": 21266, - "560": 21267, - "inevitably": 21268, - "tome": 21269, - "##gold": 21270, - "chew": 21271, - "##uid": 21272, - "##lid": 21273, - "elastic": 21274, - "##aby": 21275, - "streaked": 21276, - "alliances": 21277, - "jailed": 21278, - "regal": 21279, - "##ined": 21280, - "##phy": 21281, - "czechoslovak": 21282, - "narration": 21283, - "absently": 21284, - "##uld": 21285, - "bluegrass": 21286, - "guangdong": 21287, - "quran": 21288, - "criticizing": 21289, - "hose": 21290, - "hari": 21291, - "##liest": 21292, - "##owa": 21293, - "skier": 21294, - "streaks": 21295, - "deploy": 21296, - "##lom": 21297, - "raft": 21298, - "bose": 21299, - "dialed": 21300, - "huff": 21301, - "##eira": 21302, - "haifa": 21303, - "simplest": 21304, - "bursting": 21305, - "endings": 21306, - "ib": 21307, - "sultanate": 21308, - "##titled": 21309, - "franks": 21310, - "whitman": 21311, - "ensures": 21312, - "sven": 21313, - "##ggs": 21314, - "collaborators": 21315, - "forster": 21316, - "organising": 21317, - "ui": 21318, - "banished": 21319, - "napier": 21320, - "injustice": 21321, - "teller": 21322, - "layered": 21323, - "thump": 21324, - "##otti": 21325, - "roc": 21326, - "battleships": 21327, - "evidenced": 21328, - "fugitive": 21329, - "sadie": 21330, - "robotics": 21331, - "##roud": 21332, - "equatorial": 21333, - "geologist": 21334, - "##iza": 21335, - "yielding": 21336, - "##bron": 21337, - "##sr": 21338, - "internationale": 21339, - "mecca": 21340, - "##diment": 21341, - "sbs": 21342, - "skyline": 21343, - "toad": 21344, - "uploaded": 21345, - "reflective": 21346, - "undrafted": 21347, - "lal": 21348, - "leafs": 21349, - "bayern": 21350, - "##dai": 21351, - "lakshmi": 21352, - "shortlisted": 21353, - "##stick": 21354, - "##wicz": 21355, - "camouflage": 21356, - "donate": 21357, - "af": 21358, - "christi": 21359, - "lau": 21360, - "##acio": 21361, - "disclosed": 21362, - "nemesis": 21363, - "1761": 21364, - "assemble": 21365, - "straining": 21366, - "northamptonshire": 21367, - "tal": 21368, - "##asi": 21369, - "bernardino": 21370, - "premature": 21371, - "heidi": 21372, - "42nd": 21373, - "coefficients": 21374, - "galactic": 21375, - "reproduce": 21376, - "buzzed": 21377, - "sensations": 21378, - "zionist": 21379, - "monsieur": 21380, - "myrtle": 21381, - "##eme": 21382, - "archery": 21383, - "strangled": 21384, - "musically": 21385, - "viewpoint": 21386, - "antiquities": 21387, - "bei": 21388, - "trailers": 21389, - "seahawks": 21390, - "cured": 21391, - "pee": 21392, - "preferring": 21393, - "tasmanian": 21394, - "lange": 21395, - "sul": 21396, - "##mail": 21397, - "##working": 21398, - "colder": 21399, - "overland": 21400, - "lucivar": 21401, - "massey": 21402, - "gatherings": 21403, - "haitian": 21404, - "##smith": 21405, - "disapproval": 21406, - "flaws": 21407, - "##cco": 21408, - "##enbach": 21409, - "1766": 21410, - "npr": 21411, - "##icular": 21412, - "boroughs": 21413, - "creole": 21414, - "forums": 21415, - "techno": 21416, - "1755": 21417, - "dent": 21418, - "abdominal": 21419, - "streetcar": 21420, - "##eson": 21421, - "##stream": 21422, - "procurement": 21423, - "gemini": 21424, - "predictable": 21425, - "##tya": 21426, - "acheron": 21427, - "christoph": 21428, - "feeder": 21429, - "fronts": 21430, - "vendor": 21431, - "bernhard": 21432, - "jammu": 21433, - "tumors": 21434, - "slang": 21435, - "##uber": 21436, - "goaltender": 21437, - "twists": 21438, - "curving": 21439, - "manson": 21440, - "vuelta": 21441, - "mer": 21442, - "peanut": 21443, - "confessions": 21444, - "pouch": 21445, - "unpredictable": 21446, - "allowance": 21447, - "theodor": 21448, - "vascular": 21449, - "##factory": 21450, - "bala": 21451, - "authenticity": 21452, - "metabolic": 21453, - "coughing": 21454, - "nanjing": 21455, - "##cea": 21456, - "pembroke": 21457, - "##bard": 21458, - "splendid": 21459, - "36th": 21460, - "ff": 21461, - "hourly": 21462, - "##ahu": 21463, - "elmer": 21464, - "handel": 21465, - "##ivate": 21466, - "awarding": 21467, - "thrusting": 21468, - "dl": 21469, - "experimentation": 21470, - "##hesion": 21471, - "##46": 21472, - "caressed": 21473, - "entertained": 21474, - "steak": 21475, - "##rangle": 21476, - "biologist": 21477, - "orphans": 21478, - "baroness": 21479, - "oyster": 21480, - "stepfather": 21481, - "##dridge": 21482, - "mirage": 21483, - "reefs": 21484, - "speeding": 21485, - "##31": 21486, - "barons": 21487, - "1764": 21488, - "227": 21489, - "inhabit": 21490, - "preached": 21491, - "repealed": 21492, - "##tral": 21493, - "honoring": 21494, - "boogie": 21495, - "captives": 21496, - "administer": 21497, - "johanna": 21498, - "##imate": 21499, - "gel": 21500, - "suspiciously": 21501, - "1767": 21502, - "sobs": 21503, - "##dington": 21504, - "backbone": 21505, - "hayward": 21506, - "garry": 21507, - "##folding": 21508, - "##nesia": 21509, - "maxi": 21510, - "##oof": 21511, - "##ppe": 21512, - "ellison": 21513, - "galileo": 21514, - "##stand": 21515, - "crimea": 21516, - "frenzy": 21517, - "amour": 21518, - "bumper": 21519, - "matrices": 21520, - "natalia": 21521, - "baking": 21522, - "garth": 21523, - "palestinians": 21524, - "##grove": 21525, - "smack": 21526, - "conveyed": 21527, - "ensembles": 21528, - "gardening": 21529, - "##manship": 21530, - "##rup": 21531, - "##stituting": 21532, - "1640": 21533, - "harvesting": 21534, - "topography": 21535, - "jing": 21536, - "shifters": 21537, - "dormitory": 21538, - "##carriage": 21539, - "##lston": 21540, - "ist": 21541, - "skulls": 21542, - "##stadt": 21543, - "dolores": 21544, - "jewellery": 21545, - "sarawak": 21546, - "##wai": 21547, - "##zier": 21548, - "fences": 21549, - "christy": 21550, - "confinement": 21551, - "tumbling": 21552, - "credibility": 21553, - "fir": 21554, - "stench": 21555, - "##bria": 21556, - "##plication": 21557, - "##nged": 21558, - "##sam": 21559, - "virtues": 21560, - "##belt": 21561, - "marjorie": 21562, - "pba": 21563, - "##eem": 21564, - "##made": 21565, - "celebrates": 21566, - "schooner": 21567, - "agitated": 21568, - "barley": 21569, - "fulfilling": 21570, - "anthropologist": 21571, - "##pro": 21572, - "restrict": 21573, - "novi": 21574, - "regulating": 21575, - "##nent": 21576, - "padres": 21577, - "##rani": 21578, - "##hesive": 21579, - "loyola": 21580, - "tabitha": 21581, - "milky": 21582, - "olson": 21583, - "proprietor": 21584, - "crambidae": 21585, - "guarantees": 21586, - "intercollegiate": 21587, - "ljubljana": 21588, - "hilda": 21589, - "##sko": 21590, - "ignorant": 21591, - "hooded": 21592, - "##lts": 21593, - "sardinia": 21594, - "##lidae": 21595, - "##vation": 21596, - "frontman": 21597, - "privileged": 21598, - "witchcraft": 21599, - "##gp": 21600, - "jammed": 21601, - "laude": 21602, - "poking": 21603, - "##than": 21604, - "bracket": 21605, - "amazement": 21606, - "yunnan": 21607, - "##erus": 21608, - "maharaja": 21609, - "linnaeus": 21610, - "264": 21611, - "commissioning": 21612, - "milano": 21613, - "peacefully": 21614, - "##logies": 21615, - "akira": 21616, - "rani": 21617, - "regulator": 21618, - "##36": 21619, - "grasses": 21620, - "##rance": 21621, - "luzon": 21622, - "crows": 21623, - "compiler": 21624, - "gretchen": 21625, - "seaman": 21626, - "edouard": 21627, - "tab": 21628, - "buccaneers": 21629, - "ellington": 21630, - "hamlets": 21631, - "whig": 21632, - "socialists": 21633, - "##anto": 21634, - "directorial": 21635, - "easton": 21636, - "mythological": 21637, - "##kr": 21638, - "##vary": 21639, - "rhineland": 21640, - "semantic": 21641, - "taut": 21642, - "dune": 21643, - "inventions": 21644, - "succeeds": 21645, - "##iter": 21646, - "replication": 21647, - "branched": 21648, - "##pired": 21649, - "jul": 21650, - "prosecuted": 21651, - "kangaroo": 21652, - "penetrated": 21653, - "##avian": 21654, - "middlesbrough": 21655, - "doses": 21656, - "bleak": 21657, - "madam": 21658, - "predatory": 21659, - "relentless": 21660, - "##vili": 21661, - "reluctance": 21662, - "##vir": 21663, - "hailey": 21664, - "crore": 21665, - "silvery": 21666, - "1759": 21667, - "monstrous": 21668, - "swimmers": 21669, - "transmissions": 21670, - "hawthorn": 21671, - "informing": 21672, - "##eral": 21673, - "toilets": 21674, - "caracas": 21675, - "crouch": 21676, - "kb": 21677, - "##sett": 21678, - "295": 21679, - "cartel": 21680, - "hadley": 21681, - "##aling": 21682, - "alexia": 21683, - "yvonne": 21684, - "##biology": 21685, - "cinderella": 21686, - "eton": 21687, - "superb": 21688, - "blizzard": 21689, - "stabbing": 21690, - "industrialist": 21691, - "maximus": 21692, - "##gm": 21693, - "##orus": 21694, - "groves": 21695, - "maud": 21696, - "clade": 21697, - "oversized": 21698, - "comedic": 21699, - "##bella": 21700, - "rosen": 21701, - "nomadic": 21702, - "fulham": 21703, - "montane": 21704, - "beverages": 21705, - "galaxies": 21706, - "redundant": 21707, - "swarm": 21708, - "##rot": 21709, - "##folia": 21710, - "##llis": 21711, - "buckinghamshire": 21712, - "fen": 21713, - "bearings": 21714, - "bahadur": 21715, - "##rom": 21716, - "gilles": 21717, - "phased": 21718, - "dynamite": 21719, - "faber": 21720, - "benoit": 21721, - "vip": 21722, - "##ount": 21723, - "##wd": 21724, - "booking": 21725, - "fractured": 21726, - "tailored": 21727, - "anya": 21728, - "spices": 21729, - "westwood": 21730, - "cairns": 21731, - "auditions": 21732, - "inflammation": 21733, - "steamed": 21734, - "##rocity": 21735, - "##acion": 21736, - "##urne": 21737, - "skyla": 21738, - "thereof": 21739, - "watford": 21740, - "torment": 21741, - "archdeacon": 21742, - "transforms": 21743, - "lulu": 21744, - "demeanor": 21745, - "fucked": 21746, - "serge": 21747, - "##sor": 21748, - "mckenna": 21749, - "minas": 21750, - "entertainer": 21751, - "##icide": 21752, - "caress": 21753, - "originate": 21754, - "residue": 21755, - "##sty": 21756, - "1740": 21757, - "##ilised": 21758, - "##org": 21759, - "beech": 21760, - "##wana": 21761, - "subsidies": 21762, - "##ghton": 21763, - "emptied": 21764, - "gladstone": 21765, - "ru": 21766, - "firefighters": 21767, - "voodoo": 21768, - "##rcle": 21769, - "het": 21770, - "nightingale": 21771, - "tamara": 21772, - "edmond": 21773, - "ingredient": 21774, - "weaknesses": 21775, - "silhouette": 21776, - "285": 21777, - "compatibility": 21778, - "withdrawing": 21779, - "hampson": 21780, - "##mona": 21781, - "anguish": 21782, - "giggling": 21783, - "##mber": 21784, - "bookstore": 21785, - "##jiang": 21786, - "southernmost": 21787, - "tilting": 21788, - "##vance": 21789, - "bai": 21790, - "economical": 21791, - "rf": 21792, - "briefcase": 21793, - "dreadful": 21794, - "hinted": 21795, - "projections": 21796, - "shattering": 21797, - "totaling": 21798, - "##rogate": 21799, - "analogue": 21800, - "indicted": 21801, - "periodical": 21802, - "fullback": 21803, - "##dman": 21804, - "haynes": 21805, - "##tenberg": 21806, - "##ffs": 21807, - "##ishment": 21808, - "1745": 21809, - "thirst": 21810, - "stumble": 21811, - "penang": 21812, - "vigorous": 21813, - "##ddling": 21814, - "##kor": 21815, - "##lium": 21816, - "octave": 21817, - "##ove": 21818, - "##enstein": 21819, - "##inen": 21820, - "##ones": 21821, - "siberian": 21822, - "##uti": 21823, - "cbn": 21824, - "repeal": 21825, - "swaying": 21826, - "##vington": 21827, - "khalid": 21828, - "tanaka": 21829, - "unicorn": 21830, - "otago": 21831, - "plastered": 21832, - "lobe": 21833, - "riddle": 21834, - "##rella": 21835, - "perch": 21836, - "##ishing": 21837, - "croydon": 21838, - "filtered": 21839, - "graeme": 21840, - "tripoli": 21841, - "##ossa": 21842, - "crocodile": 21843, - "##chers": 21844, - "sufi": 21845, - "mined": 21846, - "##tung": 21847, - "inferno": 21848, - "lsu": 21849, - "##phi": 21850, - "swelled": 21851, - "utilizes": 21852, - "£2": 21853, - "cale": 21854, - "periodicals": 21855, - "styx": 21856, - "hike": 21857, - "informally": 21858, - "coop": 21859, - "lund": 21860, - "##tidae": 21861, - "ala": 21862, - "hen": 21863, - "qui": 21864, - "transformations": 21865, - "disposed": 21866, - "sheath": 21867, - "chickens": 21868, - "##cade": 21869, - "fitzroy": 21870, - "sas": 21871, - "silesia": 21872, - "unacceptable": 21873, - "odisha": 21874, - "1650": 21875, - "sabrina": 21876, - "pe": 21877, - "spokane": 21878, - "ratios": 21879, - "athena": 21880, - "massage": 21881, - "shen": 21882, - "dilemma": 21883, - "##drum": 21884, - "##riz": 21885, - "##hul": 21886, - "corona": 21887, - "doubtful": 21888, - "niall": 21889, - "##pha": 21890, - "##bino": 21891, - "fines": 21892, - "cite": 21893, - "acknowledging": 21894, - "bangor": 21895, - "ballard": 21896, - "bathurst": 21897, - "##resh": 21898, - "huron": 21899, - "mustered": 21900, - "alzheimer": 21901, - "garments": 21902, - "kinase": 21903, - "tyre": 21904, - "warship": 21905, - "##cp": 21906, - "flashback": 21907, - "pulmonary": 21908, - "braun": 21909, - "cheat": 21910, - "kamal": 21911, - "cyclists": 21912, - "constructions": 21913, - "grenades": 21914, - "ndp": 21915, - "traveller": 21916, - "excuses": 21917, - "stomped": 21918, - "signalling": 21919, - "trimmed": 21920, - "futsal": 21921, - "mosques": 21922, - "relevance": 21923, - "##wine": 21924, - "wta": 21925, - "##23": 21926, - "##vah": 21927, - "##lter": 21928, - "hoc": 21929, - "##riding": 21930, - "optimistic": 21931, - "##´s": 21932, - "deco": 21933, - "sim": 21934, - "interacting": 21935, - "rejecting": 21936, - "moniker": 21937, - "waterways": 21938, - "##ieri": 21939, - "##oku": 21940, - "mayors": 21941, - "gdansk": 21942, - "outnumbered": 21943, - "pearls": 21944, - "##ended": 21945, - "##hampton": 21946, - "fairs": 21947, - "totals": 21948, - "dominating": 21949, - "262": 21950, - "notions": 21951, - "stairway": 21952, - "compiling": 21953, - "pursed": 21954, - "commodities": 21955, - "grease": 21956, - "yeast": 21957, - "##jong": 21958, - "carthage": 21959, - "griffiths": 21960, - "residual": 21961, - "amc": 21962, - "contraction": 21963, - "laird": 21964, - "sapphire": 21965, - "##marine": 21966, - "##ivated": 21967, - "amalgamation": 21968, - "dissolve": 21969, - "inclination": 21970, - "lyle": 21971, - "packaged": 21972, - "altitudes": 21973, - "suez": 21974, - "canons": 21975, - "graded": 21976, - "lurched": 21977, - "narrowing": 21978, - "boasts": 21979, - "guise": 21980, - "wed": 21981, - "enrico": 21982, - "##ovsky": 21983, - "rower": 21984, - "scarred": 21985, - "bree": 21986, - "cub": 21987, - "iberian": 21988, - "protagonists": 21989, - "bargaining": 21990, - "proposing": 21991, - "trainers": 21992, - "voyages": 21993, - "vans": 21994, - "fishes": 21995, - "##aea": 21996, - "##ivist": 21997, - "##verance": 21998, - "encryption": 21999, - "artworks": 22000, - "kazan": 22001, - "sabre": 22002, - "cleopatra": 22003, - "hepburn": 22004, - "rotting": 22005, - "supremacy": 22006, - "mecklenburg": 22007, - "##brate": 22008, - "burrows": 22009, - "hazards": 22010, - "outgoing": 22011, - "flair": 22012, - "organizes": 22013, - "##ctions": 22014, - "scorpion": 22015, - "##usions": 22016, - "boo": 22017, - "234": 22018, - "chevalier": 22019, - "dunedin": 22020, - "slapping": 22021, - "##34": 22022, - "ineligible": 22023, - "pensions": 22024, - "##38": 22025, - "##omic": 22026, - "manufactures": 22027, - "emails": 22028, - "bismarck": 22029, - "238": 22030, - "weakening": 22031, - "blackish": 22032, - "ding": 22033, - "mcgee": 22034, - "quo": 22035, - "##rling": 22036, - "northernmost": 22037, - "xx": 22038, - "manpower": 22039, - "greed": 22040, - "sampson": 22041, - "clicking": 22042, - "##ange": 22043, - "##horpe": 22044, - "##inations": 22045, - "##roving": 22046, - "torre": 22047, - "##eptive": 22048, - "##moral": 22049, - "symbolism": 22050, - "38th": 22051, - "asshole": 22052, - "meritorious": 22053, - "outfits": 22054, - "splashed": 22055, - "biographies": 22056, - "sprung": 22057, - "astros": 22058, - "##tale": 22059, - "302": 22060, - "737": 22061, - "filly": 22062, - "raoul": 22063, - "nw": 22064, - "tokugawa": 22065, - "linden": 22066, - "clubhouse": 22067, - "##apa": 22068, - "tracts": 22069, - "romano": 22070, - "##pio": 22071, - "putin": 22072, - "tags": 22073, - "##note": 22074, - "chained": 22075, - "dickson": 22076, - "gunshot": 22077, - "moe": 22078, - "gunn": 22079, - "rashid": 22080, - "##tails": 22081, - "zipper": 22082, - "##bas": 22083, - "##nea": 22084, - "contrasted": 22085, - "##ply": 22086, - "##udes": 22087, - "plum": 22088, - "pharaoh": 22089, - "##pile": 22090, - "aw": 22091, - "comedies": 22092, - "ingrid": 22093, - "sandwiches": 22094, - "subdivisions": 22095, - "1100": 22096, - "mariana": 22097, - "nokia": 22098, - "kamen": 22099, - "hz": 22100, - "delaney": 22101, - "veto": 22102, - "herring": 22103, - "##words": 22104, - "possessive": 22105, - "outlines": 22106, - "##roup": 22107, - "siemens": 22108, - "stairwell": 22109, - "rc": 22110, - "gallantry": 22111, - "messiah": 22112, - "palais": 22113, - "yells": 22114, - "233": 22115, - "zeppelin": 22116, - "##dm": 22117, - "bolivar": 22118, - "##cede": 22119, - "smackdown": 22120, - "mckinley": 22121, - "##mora": 22122, - "##yt": 22123, - "muted": 22124, - "geologic": 22125, - "finely": 22126, - "unitary": 22127, - "avatar": 22128, - "hamas": 22129, - "maynard": 22130, - "rees": 22131, - "bog": 22132, - "contrasting": 22133, - "##rut": 22134, - "liv": 22135, - "chico": 22136, - "disposition": 22137, - "pixel": 22138, - "##erate": 22139, - "becca": 22140, - "dmitry": 22141, - "yeshiva": 22142, - "narratives": 22143, - "##lva": 22144, - "##ulton": 22145, - "mercenary": 22146, - "sharpe": 22147, - "tempered": 22148, - "navigate": 22149, - "stealth": 22150, - "amassed": 22151, - "keynes": 22152, - "##lini": 22153, - "untouched": 22154, - "##rrie": 22155, - "havoc": 22156, - "lithium": 22157, - "##fighting": 22158, - "abyss": 22159, - "graf": 22160, - "southward": 22161, - "wolverine": 22162, - "balloons": 22163, - "implements": 22164, - "ngos": 22165, - "transitions": 22166, - "##icum": 22167, - "ambushed": 22168, - "concacaf": 22169, - "dormant": 22170, - "economists": 22171, - "##dim": 22172, - "costing": 22173, - "csi": 22174, - "rana": 22175, - "universite": 22176, - "boulders": 22177, - "verity": 22178, - "##llon": 22179, - "collin": 22180, - "mellon": 22181, - "misses": 22182, - "cypress": 22183, - "fluorescent": 22184, - "lifeless": 22185, - "spence": 22186, - "##ulla": 22187, - "crewe": 22188, - "shepard": 22189, - "pak": 22190, - "revelations": 22191, - "##م": 22192, - "jolly": 22193, - "gibbons": 22194, - "paw": 22195, - "##dro": 22196, - "##quel": 22197, - "freeing": 22198, - "##test": 22199, - "shack": 22200, - "fries": 22201, - "palatine": 22202, - "##51": 22203, - "##hiko": 22204, - "accompaniment": 22205, - "cruising": 22206, - "recycled": 22207, - "##aver": 22208, - "erwin": 22209, - "sorting": 22210, - "synthesizers": 22211, - "dyke": 22212, - "realities": 22213, - "sg": 22214, - "strides": 22215, - "enslaved": 22216, - "wetland": 22217, - "##ghan": 22218, - "competence": 22219, - "gunpowder": 22220, - "grassy": 22221, - "maroon": 22222, - "reactors": 22223, - "objection": 22224, - "##oms": 22225, - "carlson": 22226, - "gearbox": 22227, - "macintosh": 22228, - "radios": 22229, - "shelton": 22230, - "##sho": 22231, - "clergyman": 22232, - "prakash": 22233, - "254": 22234, - "mongols": 22235, - "trophies": 22236, - "oricon": 22237, - "228": 22238, - "stimuli": 22239, - "twenty20": 22240, - "cantonese": 22241, - "cortes": 22242, - "mirrored": 22243, - "##saurus": 22244, - "bhp": 22245, - "cristina": 22246, - "melancholy": 22247, - "##lating": 22248, - "enjoyable": 22249, - "nuevo": 22250, - "##wny": 22251, - "downfall": 22252, - "schumacher": 22253, - "##ind": 22254, - "banging": 22255, - "lausanne": 22256, - "rumbled": 22257, - "paramilitary": 22258, - "reflex": 22259, - "ax": 22260, - "amplitude": 22261, - "migratory": 22262, - "##gall": 22263, - "##ups": 22264, - "midi": 22265, - "barnard": 22266, - "lastly": 22267, - "sherry": 22268, - "##hp": 22269, - "##nall": 22270, - "keystone": 22271, - "##kra": 22272, - "carleton": 22273, - "slippery": 22274, - "##53": 22275, - "coloring": 22276, - "foe": 22277, - "socket": 22278, - "otter": 22279, - "##rgos": 22280, - "mats": 22281, - "##tose": 22282, - "consultants": 22283, - "bafta": 22284, - "bison": 22285, - "topping": 22286, - "##km": 22287, - "490": 22288, - "primal": 22289, - "abandonment": 22290, - "transplant": 22291, - "atoll": 22292, - "hideous": 22293, - "mort": 22294, - "pained": 22295, - "reproduced": 22296, - "tae": 22297, - "howling": 22298, - "##turn": 22299, - "unlawful": 22300, - "billionaire": 22301, - "hotter": 22302, - "poised": 22303, - "lansing": 22304, - "##chang": 22305, - "dinamo": 22306, - "retro": 22307, - "messing": 22308, - "nfc": 22309, - "domesday": 22310, - "##mina": 22311, - "blitz": 22312, - "timed": 22313, - "##athing": 22314, - "##kley": 22315, - "ascending": 22316, - "gesturing": 22317, - "##izations": 22318, - "signaled": 22319, - "tis": 22320, - "chinatown": 22321, - "mermaid": 22322, - "savanna": 22323, - "jameson": 22324, - "##aint": 22325, - "catalina": 22326, - "##pet": 22327, - "##hers": 22328, - "cochrane": 22329, - "cy": 22330, - "chatting": 22331, - "##kus": 22332, - "alerted": 22333, - "computation": 22334, - "mused": 22335, - "noelle": 22336, - "majestic": 22337, - "mohawk": 22338, - "campo": 22339, - "octagonal": 22340, - "##sant": 22341, - "##hend": 22342, - "241": 22343, - "aspiring": 22344, - "##mart": 22345, - "comprehend": 22346, - "iona": 22347, - "paralyzed": 22348, - "shimmering": 22349, - "swindon": 22350, - "rhone": 22351, - "##eley": 22352, - "reputed": 22353, - "configurations": 22354, - "pitchfork": 22355, - "agitation": 22356, - "francais": 22357, - "gillian": 22358, - "lipstick": 22359, - "##ilo": 22360, - "outsiders": 22361, - "pontifical": 22362, - "resisting": 22363, - "bitterness": 22364, - "sewer": 22365, - "rockies": 22366, - "##edd": 22367, - "##ucher": 22368, - "misleading": 22369, - "1756": 22370, - "exiting": 22371, - "galloway": 22372, - "##nging": 22373, - "risked": 22374, - "##heart": 22375, - "246": 22376, - "commemoration": 22377, - "schultz": 22378, - "##rka": 22379, - "integrating": 22380, - "##rsa": 22381, - "poses": 22382, - "shrieked": 22383, - "##weiler": 22384, - "guineas": 22385, - "gladys": 22386, - "jerking": 22387, - "owls": 22388, - "goldsmith": 22389, - "nightly": 22390, - "penetrating": 22391, - "##unced": 22392, - "lia": 22393, - "##33": 22394, - "ignited": 22395, - "betsy": 22396, - "##aring": 22397, - "##thorpe": 22398, - "follower": 22399, - "vigorously": 22400, - "##rave": 22401, - "coded": 22402, - "kiran": 22403, - "knit": 22404, - "zoology": 22405, - "tbilisi": 22406, - "##28": 22407, - "##bered": 22408, - "repository": 22409, - "govt": 22410, - "deciduous": 22411, - "dino": 22412, - "growling": 22413, - "##bba": 22414, - "enhancement": 22415, - "unleashed": 22416, - "chanting": 22417, - "pussy": 22418, - "biochemistry": 22419, - "##eric": 22420, - "kettle": 22421, - "repression": 22422, - "toxicity": 22423, - "nrhp": 22424, - "##arth": 22425, - "##kko": 22426, - "##bush": 22427, - "ernesto": 22428, - "commended": 22429, - "outspoken": 22430, - "242": 22431, - "mca": 22432, - "parchment": 22433, - "sms": 22434, - "kristen": 22435, - "##aton": 22436, - "bisexual": 22437, - "raked": 22438, - "glamour": 22439, - "navajo": 22440, - "a2": 22441, - "conditioned": 22442, - "showcased": 22443, - "##hma": 22444, - "spacious": 22445, - "youthful": 22446, - "##esa": 22447, - "usl": 22448, - "appliances": 22449, - "junta": 22450, - "brest": 22451, - "layne": 22452, - "conglomerate": 22453, - "enchanted": 22454, - "chao": 22455, - "loosened": 22456, - "picasso": 22457, - "circulating": 22458, - "inspect": 22459, - "montevideo": 22460, - "##centric": 22461, - "##kti": 22462, - "piazza": 22463, - "spurred": 22464, - "##aith": 22465, - "bari": 22466, - "freedoms": 22467, - "poultry": 22468, - "stamford": 22469, - "lieu": 22470, - "##ect": 22471, - "indigo": 22472, - "sarcastic": 22473, - "bahia": 22474, - "stump": 22475, - "attach": 22476, - "dvds": 22477, - "frankenstein": 22478, - "lille": 22479, - "approx": 22480, - "scriptures": 22481, - "pollen": 22482, - "##script": 22483, - "nmi": 22484, - "overseen": 22485, - "##ivism": 22486, - "tides": 22487, - "proponent": 22488, - "newmarket": 22489, - "inherit": 22490, - "milling": 22491, - "##erland": 22492, - "centralized": 22493, - "##rou": 22494, - "distributors": 22495, - "credentials": 22496, - "drawers": 22497, - "abbreviation": 22498, - "##lco": 22499, - "##xon": 22500, - "downing": 22501, - "uncomfortably": 22502, - "ripe": 22503, - "##oes": 22504, - "erase": 22505, - "franchises": 22506, - "##ever": 22507, - "populace": 22508, - "##bery": 22509, - "##khar": 22510, - "decomposition": 22511, - "pleas": 22512, - "##tet": 22513, - "daryl": 22514, - "sabah": 22515, - "##stle": 22516, - "##wide": 22517, - "fearless": 22518, - "genie": 22519, - "lesions": 22520, - "annette": 22521, - "##ogist": 22522, - "oboe": 22523, - "appendix": 22524, - "nair": 22525, - "dripped": 22526, - "petitioned": 22527, - "maclean": 22528, - "mosquito": 22529, - "parrot": 22530, - "rpg": 22531, - "hampered": 22532, - "1648": 22533, - "operatic": 22534, - "reservoirs": 22535, - "##tham": 22536, - "irrelevant": 22537, - "jolt": 22538, - "summarized": 22539, - "##fp": 22540, - "medallion": 22541, - "##taff": 22542, - "##−": 22543, - "clawed": 22544, - "harlow": 22545, - "narrower": 22546, - "goddard": 22547, - "marcia": 22548, - "bodied": 22549, - "fremont": 22550, - "suarez": 22551, - "altering": 22552, - "tempest": 22553, - "mussolini": 22554, - "porn": 22555, - "##isms": 22556, - "sweetly": 22557, - "oversees": 22558, - "walkers": 22559, - "solitude": 22560, - "grimly": 22561, - "shrines": 22562, - "hk": 22563, - "ich": 22564, - "supervisors": 22565, - "hostess": 22566, - "dietrich": 22567, - "legitimacy": 22568, - "brushes": 22569, - "expressive": 22570, - "##yp": 22571, - "dissipated": 22572, - "##rse": 22573, - "localized": 22574, - "systemic": 22575, - "##nikov": 22576, - "gettysburg": 22577, - "##js": 22578, - "##uaries": 22579, - "dialogues": 22580, - "muttering": 22581, - "251": 22582, - "housekeeper": 22583, - "sicilian": 22584, - "discouraged": 22585, - "##frey": 22586, - "beamed": 22587, - "kaladin": 22588, - "halftime": 22589, - "kidnap": 22590, - "##amo": 22591, - "##llet": 22592, - "1754": 22593, - "synonymous": 22594, - "depleted": 22595, - "instituto": 22596, - "insulin": 22597, - "reprised": 22598, - "##opsis": 22599, - "clashed": 22600, - "##ctric": 22601, - "interrupting": 22602, - "radcliffe": 22603, - "insisting": 22604, - "medici": 22605, - "1715": 22606, - "ejected": 22607, - "playfully": 22608, - "turbulent": 22609, - "##47": 22610, - "starvation": 22611, - "##rini": 22612, - "shipment": 22613, - "rebellious": 22614, - "petersen": 22615, - "verification": 22616, - "merits": 22617, - "##rified": 22618, - "cakes": 22619, - "##charged": 22620, - "1757": 22621, - "milford": 22622, - "shortages": 22623, - "spying": 22624, - "fidelity": 22625, - "##aker": 22626, - "emitted": 22627, - "storylines": 22628, - "harvested": 22629, - "seismic": 22630, - "##iform": 22631, - "cheung": 22632, - "kilda": 22633, - "theoretically": 22634, - "barbie": 22635, - "lynx": 22636, - "##rgy": 22637, - "##tius": 22638, - "goblin": 22639, - "mata": 22640, - "poisonous": 22641, - "##nburg": 22642, - "reactive": 22643, - "residues": 22644, - "obedience": 22645, - "##евич": 22646, - "conjecture": 22647, - "##rac": 22648, - "401": 22649, - "hating": 22650, - "sixties": 22651, - "kicker": 22652, - "moaning": 22653, - "motown": 22654, - "##bha": 22655, - "emancipation": 22656, - "neoclassical": 22657, - "##hering": 22658, - "consoles": 22659, - "ebert": 22660, - "professorship": 22661, - "##tures": 22662, - "sustaining": 22663, - "assaults": 22664, - "obeyed": 22665, - "affluent": 22666, - "incurred": 22667, - "tornadoes": 22668, - "##eber": 22669, - "##zow": 22670, - "emphasizing": 22671, - "highlanders": 22672, - "cheated": 22673, - "helmets": 22674, - "##ctus": 22675, - "internship": 22676, - "terence": 22677, - "bony": 22678, - "executions": 22679, - "legislators": 22680, - "berries": 22681, - "peninsular": 22682, - "tinged": 22683, - "##aco": 22684, - "1689": 22685, - "amplifier": 22686, - "corvette": 22687, - "ribbons": 22688, - "lavish": 22689, - "pennant": 22690, - "##lander": 22691, - "worthless": 22692, - "##chfield": 22693, - "##forms": 22694, - "mariano": 22695, - "pyrenees": 22696, - "expenditures": 22697, - "##icides": 22698, - "chesterfield": 22699, - "mandir": 22700, - "tailor": 22701, - "39th": 22702, - "sergey": 22703, - "nestled": 22704, - "willed": 22705, - "aristocracy": 22706, - "devotees": 22707, - "goodnight": 22708, - "raaf": 22709, - "rumored": 22710, - "weaponry": 22711, - "remy": 22712, - "appropriations": 22713, - "harcourt": 22714, - "burr": 22715, - "riaa": 22716, - "##lence": 22717, - "limitation": 22718, - "unnoticed": 22719, - "guo": 22720, - "soaking": 22721, - "swamps": 22722, - "##tica": 22723, - "collapsing": 22724, - "tatiana": 22725, - "descriptive": 22726, - "brigham": 22727, - "psalm": 22728, - "##chment": 22729, - "maddox": 22730, - "##lization": 22731, - "patti": 22732, - "caliph": 22733, - "##aja": 22734, - "akron": 22735, - "injuring": 22736, - "serra": 22737, - "##ganj": 22738, - "basins": 22739, - "##sari": 22740, - "astonished": 22741, - "launcher": 22742, - "##church": 22743, - "hilary": 22744, - "wilkins": 22745, - "sewing": 22746, - "##sf": 22747, - "stinging": 22748, - "##fia": 22749, - "##ncia": 22750, - "underwood": 22751, - "startup": 22752, - "##ition": 22753, - "compilations": 22754, - "vibrations": 22755, - "embankment": 22756, - "jurist": 22757, - "##nity": 22758, - "bard": 22759, - "juventus": 22760, - "groundwater": 22761, - "kern": 22762, - "palaces": 22763, - "helium": 22764, - "boca": 22765, - "cramped": 22766, - "marissa": 22767, - "soto": 22768, - "##worm": 22769, - "jae": 22770, - "princely": 22771, - "##ggy": 22772, - "faso": 22773, - "bazaar": 22774, - "warmly": 22775, - "##voking": 22776, - "229": 22777, - "pairing": 22778, - "##lite": 22779, - "##grate": 22780, - "##nets": 22781, - "wien": 22782, - "freaked": 22783, - "ulysses": 22784, - "rebirth": 22785, - "##alia": 22786, - "##rent": 22787, - "mummy": 22788, - "guzman": 22789, - "jimenez": 22790, - "stilled": 22791, - "##nitz": 22792, - "trajectory": 22793, - "tha": 22794, - "woken": 22795, - "archival": 22796, - "professions": 22797, - "##pts": 22798, - "##pta": 22799, - "hilly": 22800, - "shadowy": 22801, - "shrink": 22802, - "##bolt": 22803, - "norwood": 22804, - "glued": 22805, - "migrate": 22806, - "stereotypes": 22807, - "devoid": 22808, - "##pheus": 22809, - "625": 22810, - "evacuate": 22811, - "horrors": 22812, - "infancy": 22813, - "gotham": 22814, - "knowles": 22815, - "optic": 22816, - "downloaded": 22817, - "sachs": 22818, - "kingsley": 22819, - "parramatta": 22820, - "darryl": 22821, - "mor": 22822, - "##onale": 22823, - "shady": 22824, - "commence": 22825, - "confesses": 22826, - "kan": 22827, - "##meter": 22828, - "##placed": 22829, - "marlborough": 22830, - "roundabout": 22831, - "regents": 22832, - "frigates": 22833, - "io": 22834, - "##imating": 22835, - "gothenburg": 22836, - "revoked": 22837, - "carvings": 22838, - "clockwise": 22839, - "convertible": 22840, - "intruder": 22841, - "##sche": 22842, - "banged": 22843, - "##ogo": 22844, - "vicky": 22845, - "bourgeois": 22846, - "##mony": 22847, - "dupont": 22848, - "footing": 22849, - "##gum": 22850, - "pd": 22851, - "##real": 22852, - "buckle": 22853, - "yun": 22854, - "penthouse": 22855, - "sane": 22856, - "720": 22857, - "serviced": 22858, - "stakeholders": 22859, - "neumann": 22860, - "bb": 22861, - "##eers": 22862, - "comb": 22863, - "##gam": 22864, - "catchment": 22865, - "pinning": 22866, - "rallies": 22867, - "typing": 22868, - "##elles": 22869, - "forefront": 22870, - "freiburg": 22871, - "sweetie": 22872, - "giacomo": 22873, - "widowed": 22874, - "goodwill": 22875, - "worshipped": 22876, - "aspirations": 22877, - "midday": 22878, - "##vat": 22879, - "fishery": 22880, - "##trick": 22881, - "bournemouth": 22882, - "turk": 22883, - "243": 22884, - "hearth": 22885, - "ethanol": 22886, - "guadalajara": 22887, - "murmurs": 22888, - "sl": 22889, - "##uge": 22890, - "afforded": 22891, - "scripted": 22892, - "##hta": 22893, - "wah": 22894, - "##jn": 22895, - "coroner": 22896, - "translucent": 22897, - "252": 22898, - "memorials": 22899, - "puck": 22900, - "progresses": 22901, - "clumsy": 22902, - "##race": 22903, - "315": 22904, - "candace": 22905, - "recounted": 22906, - "##27": 22907, - "##slin": 22908, - "##uve": 22909, - "filtering": 22910, - "##mac": 22911, - "howl": 22912, - "strata": 22913, - "heron": 22914, - "leveled": 22915, - "##ays": 22916, - "dubious": 22917, - "##oja": 22918, - "##т": 22919, - "##wheel": 22920, - "citations": 22921, - "exhibiting": 22922, - "##laya": 22923, - "##mics": 22924, - "##pods": 22925, - "turkic": 22926, - "##lberg": 22927, - "injunction": 22928, - "##ennial": 22929, - "##mit": 22930, - "antibodies": 22931, - "##44": 22932, - "organise": 22933, - "##rigues": 22934, - "cardiovascular": 22935, - "cushion": 22936, - "inverness": 22937, - "##zquez": 22938, - "dia": 22939, - "cocoa": 22940, - "sibling": 22941, - "##tman": 22942, - "##roid": 22943, - "expanse": 22944, - "feasible": 22945, - "tunisian": 22946, - "algiers": 22947, - "##relli": 22948, - "rus": 22949, - "bloomberg": 22950, - "dso": 22951, - "westphalia": 22952, - "bro": 22953, - "tacoma": 22954, - "281": 22955, - "downloads": 22956, - "##ours": 22957, - "konrad": 22958, - "duran": 22959, - "##hdi": 22960, - "continuum": 22961, - "jett": 22962, - "compares": 22963, - "legislator": 22964, - "secession": 22965, - "##nable": 22966, - "##gues": 22967, - "##zuka": 22968, - "translating": 22969, - "reacher": 22970, - "##gley": 22971, - "##ła": 22972, - "aleppo": 22973, - "##agi": 22974, - "tc": 22975, - "orchards": 22976, - "trapping": 22977, - "linguist": 22978, - "versatile": 22979, - "drumming": 22980, - "postage": 22981, - "calhoun": 22982, - "superiors": 22983, - "##mx": 22984, - "barefoot": 22985, - "leary": 22986, - "##cis": 22987, - "ignacio": 22988, - "alfa": 22989, - "kaplan": 22990, - "##rogen": 22991, - "bratislava": 22992, - "mori": 22993, - "##vot": 22994, - "disturb": 22995, - "haas": 22996, - "313": 22997, - "cartridges": 22998, - "gilmore": 22999, - "radiated": 23000, - "salford": 23001, - "tunic": 23002, - "hades": 23003, - "##ulsive": 23004, - "archeological": 23005, - "delilah": 23006, - "magistrates": 23007, - "auditioned": 23008, - "brewster": 23009, - "charters": 23010, - "empowerment": 23011, - "blogs": 23012, - "cappella": 23013, - "dynasties": 23014, - "iroquois": 23015, - "whipping": 23016, - "##krishna": 23017, - "raceway": 23018, - "truths": 23019, - "myra": 23020, - "weaken": 23021, - "judah": 23022, - "mcgregor": 23023, - "##horse": 23024, - "mic": 23025, - "refueling": 23026, - "37th": 23027, - "burnley": 23028, - "bosses": 23029, - "markus": 23030, - "premio": 23031, - "query": 23032, - "##gga": 23033, - "dunbar": 23034, - "##economic": 23035, - "darkest": 23036, - "lyndon": 23037, - "sealing": 23038, - "commendation": 23039, - "reappeared": 23040, - "##mun": 23041, - "addicted": 23042, - "ezio": 23043, - "slaughtered": 23044, - "satisfactory": 23045, - "shuffle": 23046, - "##eves": 23047, - "##thic": 23048, - "##uj": 23049, - "fortification": 23050, - "warrington": 23051, - "##otto": 23052, - "resurrected": 23053, - "fargo": 23054, - "mane": 23055, - "##utable": 23056, - "##lei": 23057, - "##space": 23058, - "foreword": 23059, - "ox": 23060, - "##aris": 23061, - "##vern": 23062, - "abrams": 23063, - "hua": 23064, - "##mento": 23065, - "sakura": 23066, - "##alo": 23067, - "uv": 23068, - "sentimental": 23069, - "##skaya": 23070, - "midfield": 23071, - "##eses": 23072, - "sturdy": 23073, - "scrolls": 23074, - "macleod": 23075, - "##kyu": 23076, - "entropy": 23077, - "##lance": 23078, - "mitochondrial": 23079, - "cicero": 23080, - "excelled": 23081, - "thinner": 23082, - "convoys": 23083, - "perceive": 23084, - "##oslav": 23085, - "##urable": 23086, - "systematically": 23087, - "grind": 23088, - "burkina": 23089, - "287": 23090, - "##tagram": 23091, - "ops": 23092, - "##aman": 23093, - "guantanamo": 23094, - "##cloth": 23095, - "##tite": 23096, - "forcefully": 23097, - "wavy": 23098, - "##jou": 23099, - "pointless": 23100, - "##linger": 23101, - "##tze": 23102, - "layton": 23103, - "portico": 23104, - "superficial": 23105, - "clerical": 23106, - "outlaws": 23107, - "##hism": 23108, - "burials": 23109, - "muir": 23110, - "##inn": 23111, - "creditors": 23112, - "hauling": 23113, - "rattle": 23114, - "##leg": 23115, - "calais": 23116, - "monde": 23117, - "archers": 23118, - "reclaimed": 23119, - "dwell": 23120, - "wexford": 23121, - "hellenic": 23122, - "falsely": 23123, - "remorse": 23124, - "##tek": 23125, - "dough": 23126, - "furnishings": 23127, - "##uttered": 23128, - "gabon": 23129, - "neurological": 23130, - "novice": 23131, - "##igraphy": 23132, - "contemplated": 23133, - "pulpit": 23134, - "nightstand": 23135, - "saratoga": 23136, - "##istan": 23137, - "documenting": 23138, - "pulsing": 23139, - "taluk": 23140, - "##firmed": 23141, - "busted": 23142, - "marital": 23143, - "##rien": 23144, - "disagreements": 23145, - "wasps": 23146, - "##yes": 23147, - "hodge": 23148, - "mcdonnell": 23149, - "mimic": 23150, - "fran": 23151, - "pendant": 23152, - "dhabi": 23153, - "musa": 23154, - "##nington": 23155, - "congratulations": 23156, - "argent": 23157, - "darrell": 23158, - "concussion": 23159, - "losers": 23160, - "regrets": 23161, - "thessaloniki": 23162, - "reversal": 23163, - "donaldson": 23164, - "hardwood": 23165, - "thence": 23166, - "achilles": 23167, - "ritter": 23168, - "##eran": 23169, - "demonic": 23170, - "jurgen": 23171, - "prophets": 23172, - "goethe": 23173, - "eki": 23174, - "classmate": 23175, - "buff": 23176, - "##cking": 23177, - "yank": 23178, - "irrational": 23179, - "##inging": 23180, - "perished": 23181, - "seductive": 23182, - "qur": 23183, - "sourced": 23184, - "##crat": 23185, - "##typic": 23186, - "mustard": 23187, - "ravine": 23188, - "barre": 23189, - "horizontally": 23190, - "characterization": 23191, - "phylogenetic": 23192, - "boise": 23193, - "##dit": 23194, - "##runner": 23195, - "##tower": 23196, - "brutally": 23197, - "intercourse": 23198, - "seduce": 23199, - "##bbing": 23200, - "fay": 23201, - "ferris": 23202, - "ogden": 23203, - "amar": 23204, - "nik": 23205, - "unarmed": 23206, - "##inator": 23207, - "evaluating": 23208, - "kyrgyzstan": 23209, - "sweetness": 23210, - "##lford": 23211, - "##oki": 23212, - "mccormick": 23213, - "meiji": 23214, - "notoriety": 23215, - "stimulate": 23216, - "disrupt": 23217, - "figuring": 23218, - "instructional": 23219, - "mcgrath": 23220, - "##zoo": 23221, - "groundbreaking": 23222, - "##lto": 23223, - "flinch": 23224, - "khorasan": 23225, - "agrarian": 23226, - "bengals": 23227, - "mixer": 23228, - "radiating": 23229, - "##sov": 23230, - "ingram": 23231, - "pitchers": 23232, - "nad": 23233, - "tariff": 23234, - "##cript": 23235, - "tata": 23236, - "##codes": 23237, - "##emi": 23238, - "##ungen": 23239, - "appellate": 23240, - "lehigh": 23241, - "##bled": 23242, - "##giri": 23243, - "brawl": 23244, - "duct": 23245, - "texans": 23246, - "##ciation": 23247, - "##ropolis": 23248, - "skipper": 23249, - "speculative": 23250, - "vomit": 23251, - "doctrines": 23252, - "stresses": 23253, - "253": 23254, - "davy": 23255, - "graders": 23256, - "whitehead": 23257, - "jozef": 23258, - "timely": 23259, - "cumulative": 23260, - "haryana": 23261, - "paints": 23262, - "appropriately": 23263, - "boon": 23264, - "cactus": 23265, - "##ales": 23266, - "##pid": 23267, - "dow": 23268, - "legions": 23269, - "##pit": 23270, - "perceptions": 23271, - "1730": 23272, - "picturesque": 23273, - "##yse": 23274, - "periphery": 23275, - "rune": 23276, - "wr": 23277, - "##aha": 23278, - "celtics": 23279, - "sentencing": 23280, - "whoa": 23281, - "##erin": 23282, - "confirms": 23283, - "variance": 23284, - "425": 23285, - "moines": 23286, - "mathews": 23287, - "spade": 23288, - "rave": 23289, - "m1": 23290, - "fronted": 23291, - "fx": 23292, - "blending": 23293, - "alleging": 23294, - "reared": 23295, - "##gl": 23296, - "237": 23297, - "##paper": 23298, - "grassroots": 23299, - "eroded": 23300, - "##free": 23301, - "##physical": 23302, - "directs": 23303, - "ordeal": 23304, - "##sław": 23305, - "accelerate": 23306, - "hacker": 23307, - "rooftop": 23308, - "##inia": 23309, - "lev": 23310, - "buys": 23311, - "cebu": 23312, - "devote": 23313, - "##lce": 23314, - "specialising": 23315, - "##ulsion": 23316, - "choreographed": 23317, - "repetition": 23318, - "warehouses": 23319, - "##ryl": 23320, - "paisley": 23321, - "tuscany": 23322, - "analogy": 23323, - "sorcerer": 23324, - "hash": 23325, - "huts": 23326, - "shards": 23327, - "descends": 23328, - "exclude": 23329, - "nix": 23330, - "chaplin": 23331, - "gaga": 23332, - "ito": 23333, - "vane": 23334, - "##drich": 23335, - "causeway": 23336, - "misconduct": 23337, - "limo": 23338, - "orchestrated": 23339, - "glands": 23340, - "jana": 23341, - "##kot": 23342, - "u2": 23343, - "##mple": 23344, - "##sons": 23345, - "branching": 23346, - "contrasts": 23347, - "scoop": 23348, - "longed": 23349, - "##virus": 23350, - "chattanooga": 23351, - "##75": 23352, - "syrup": 23353, - "cornerstone": 23354, - "##tized": 23355, - "##mind": 23356, - "##iaceae": 23357, - "careless": 23358, - "precedence": 23359, - "frescoes": 23360, - "##uet": 23361, - "chilled": 23362, - "consult": 23363, - "modelled": 23364, - "snatch": 23365, - "peat": 23366, - "##thermal": 23367, - "caucasian": 23368, - "humane": 23369, - "relaxation": 23370, - "spins": 23371, - "temperance": 23372, - "##lbert": 23373, - "occupations": 23374, - "lambda": 23375, - "hybrids": 23376, - "moons": 23377, - "mp3": 23378, - "##oese": 23379, - "247": 23380, - "rolf": 23381, - "societal": 23382, - "yerevan": 23383, - "ness": 23384, - "##ssler": 23385, - "befriended": 23386, - "mechanized": 23387, - "nominate": 23388, - "trough": 23389, - "boasted": 23390, - "cues": 23391, - "seater": 23392, - "##hom": 23393, - "bends": 23394, - "##tangle": 23395, - "conductors": 23396, - "emptiness": 23397, - "##lmer": 23398, - "eurasian": 23399, - "adriatic": 23400, - "tian": 23401, - "##cie": 23402, - "anxiously": 23403, - "lark": 23404, - "propellers": 23405, - "chichester": 23406, - "jock": 23407, - "ev": 23408, - "2a": 23409, - "##holding": 23410, - "credible": 23411, - "recounts": 23412, - "tori": 23413, - "loyalist": 23414, - "abduction": 23415, - "##hoot": 23416, - "##redo": 23417, - "nepali": 23418, - "##mite": 23419, - "ventral": 23420, - "tempting": 23421, - "##ango": 23422, - "##crats": 23423, - "steered": 23424, - "##wice": 23425, - "javelin": 23426, - "dipping": 23427, - "laborers": 23428, - "prentice": 23429, - "looming": 23430, - "titanium": 23431, - "##ː": 23432, - "badges": 23433, - "emir": 23434, - "tensor": 23435, - "##ntation": 23436, - "egyptians": 23437, - "rash": 23438, - "denies": 23439, - "hawthorne": 23440, - "lombard": 23441, - "showers": 23442, - "wehrmacht": 23443, - "dietary": 23444, - "trojan": 23445, - "##reus": 23446, - "welles": 23447, - "executing": 23448, - "horseshoe": 23449, - "lifeboat": 23450, - "##lak": 23451, - "elsa": 23452, - "infirmary": 23453, - "nearing": 23454, - "roberta": 23455, - "boyer": 23456, - "mutter": 23457, - "trillion": 23458, - "joanne": 23459, - "##fine": 23460, - "##oked": 23461, - "sinks": 23462, - "vortex": 23463, - "uruguayan": 23464, - "clasp": 23465, - "sirius": 23466, - "##block": 23467, - "accelerator": 23468, - "prohibit": 23469, - "sunken": 23470, - "byu": 23471, - "chronological": 23472, - "diplomats": 23473, - "ochreous": 23474, - "510": 23475, - "symmetrical": 23476, - "1644": 23477, - "maia": 23478, - "##tology": 23479, - "salts": 23480, - "reigns": 23481, - "atrocities": 23482, - "##ия": 23483, - "hess": 23484, - "bared": 23485, - "issn": 23486, - "##vyn": 23487, - "cater": 23488, - "saturated": 23489, - "##cycle": 23490, - "##isse": 23491, - "sable": 23492, - "voyager": 23493, - "dyer": 23494, - "yusuf": 23495, - "##inge": 23496, - "fountains": 23497, - "wolff": 23498, - "##39": 23499, - "##nni": 23500, - "engraving": 23501, - "rollins": 23502, - "atheist": 23503, - "ominous": 23504, - "##ault": 23505, - "herr": 23506, - "chariot": 23507, - "martina": 23508, - "strung": 23509, - "##fell": 23510, - "##farlane": 23511, - "horrific": 23512, - "sahib": 23513, - "gazes": 23514, - "saetan": 23515, - "erased": 23516, - "ptolemy": 23517, - "##olic": 23518, - "flushing": 23519, - "lauderdale": 23520, - "analytic": 23521, - "##ices": 23522, - "530": 23523, - "navarro": 23524, - "beak": 23525, - "gorilla": 23526, - "herrera": 23527, - "broom": 23528, - "guadalupe": 23529, - "raiding": 23530, - "sykes": 23531, - "311": 23532, - "bsc": 23533, - "deliveries": 23534, - "1720": 23535, - "invasions": 23536, - "carmichael": 23537, - "tajikistan": 23538, - "thematic": 23539, - "ecumenical": 23540, - "sentiments": 23541, - "onstage": 23542, - "##rians": 23543, - "##brand": 23544, - "##sume": 23545, - "catastrophic": 23546, - "flanks": 23547, - "molten": 23548, - "##arns": 23549, - "waller": 23550, - "aimee": 23551, - "terminating": 23552, - "##icing": 23553, - "alternately": 23554, - "##oche": 23555, - "nehru": 23556, - "printers": 23557, - "outraged": 23558, - "##eving": 23559, - "empires": 23560, - "template": 23561, - "banners": 23562, - "repetitive": 23563, - "za": 23564, - "##oise": 23565, - "vegetarian": 23566, - "##tell": 23567, - "guiana": 23568, - "opt": 23569, - "cavendish": 23570, - "lucknow": 23571, - "synthesized": 23572, - "##hani": 23573, - "##mada": 23574, - "finalized": 23575, - "##ctable": 23576, - "fictitious": 23577, - "mayoral": 23578, - "unreliable": 23579, - "##enham": 23580, - "embracing": 23581, - "peppers": 23582, - "rbis": 23583, - "##chio": 23584, - "##neo": 23585, - "inhibition": 23586, - "slashed": 23587, - "togo": 23588, - "orderly": 23589, - "embroidered": 23590, - "safari": 23591, - "salty": 23592, - "236": 23593, - "barron": 23594, - "benito": 23595, - "totaled": 23596, - "##dak": 23597, - "pubs": 23598, - "simulated": 23599, - "caden": 23600, - "devin": 23601, - "tolkien": 23602, - "momma": 23603, - "welding": 23604, - "sesame": 23605, - "##ept": 23606, - "gottingen": 23607, - "hardness": 23608, - "630": 23609, - "shaman": 23610, - "temeraire": 23611, - "620": 23612, - "adequately": 23613, - "pediatric": 23614, - "##kit": 23615, - "ck": 23616, - "assertion": 23617, - "radicals": 23618, - "composure": 23619, - "cadence": 23620, - "seafood": 23621, - "beaufort": 23622, - "lazarus": 23623, - "mani": 23624, - "warily": 23625, - "cunning": 23626, - "kurdistan": 23627, - "249": 23628, - "cantata": 23629, - "##kir": 23630, - "ares": 23631, - "##41": 23632, - "##clusive": 23633, - "nape": 23634, - "townland": 23635, - "geared": 23636, - "insulted": 23637, - "flutter": 23638, - "boating": 23639, - "violate": 23640, - "draper": 23641, - "dumping": 23642, - "malmo": 23643, - "##hh": 23644, - "##romatic": 23645, - "firearm": 23646, - "alta": 23647, - "bono": 23648, - "obscured": 23649, - "##clave": 23650, - "exceeds": 23651, - "panorama": 23652, - "unbelievable": 23653, - "##train": 23654, - "preschool": 23655, - "##essed": 23656, - "disconnected": 23657, - "installing": 23658, - "rescuing": 23659, - "secretaries": 23660, - "accessibility": 23661, - "##castle": 23662, - "##drive": 23663, - "##ifice": 23664, - "##film": 23665, - "bouts": 23666, - "slug": 23667, - "waterway": 23668, - "mindanao": 23669, - "##buro": 23670, - "##ratic": 23671, - "halves": 23672, - "##ل": 23673, - "calming": 23674, - "liter": 23675, - "maternity": 23676, - "adorable": 23677, - "bragg": 23678, - "electrification": 23679, - "mcc": 23680, - "##dote": 23681, - "roxy": 23682, - "schizophrenia": 23683, - "##body": 23684, - "munoz": 23685, - "kaye": 23686, - "whaling": 23687, - "239": 23688, - "mil": 23689, - "tingling": 23690, - "tolerant": 23691, - "##ago": 23692, - "unconventional": 23693, - "volcanoes": 23694, - "##finder": 23695, - "deportivo": 23696, - "##llie": 23697, - "robson": 23698, - "kaufman": 23699, - "neuroscience": 23700, - "wai": 23701, - "deportation": 23702, - "masovian": 23703, - "scraping": 23704, - "converse": 23705, - "##bh": 23706, - "hacking": 23707, - "bulge": 23708, - "##oun": 23709, - "administratively": 23710, - "yao": 23711, - "580": 23712, - "amp": 23713, - "mammoth": 23714, - "booster": 23715, - "claremont": 23716, - "hooper": 23717, - "nomenclature": 23718, - "pursuits": 23719, - "mclaughlin": 23720, - "melinda": 23721, - "##sul": 23722, - "catfish": 23723, - "barclay": 23724, - "substrates": 23725, - "taxa": 23726, - "zee": 23727, - "originals": 23728, - "kimberly": 23729, - "packets": 23730, - "padma": 23731, - "##ality": 23732, - "borrowing": 23733, - "ostensibly": 23734, - "solvent": 23735, - "##bri": 23736, - "##genesis": 23737, - "##mist": 23738, - "lukas": 23739, - "shreveport": 23740, - "veracruz": 23741, - "##ь": 23742, - "##lou": 23743, - "##wives": 23744, - "cheney": 23745, - "tt": 23746, - "anatolia": 23747, - "hobbs": 23748, - "##zyn": 23749, - "cyclic": 23750, - "radiant": 23751, - "alistair": 23752, - "greenish": 23753, - "siena": 23754, - "dat": 23755, - "independents": 23756, - "##bation": 23757, - "conform": 23758, - "pieter": 23759, - "hyper": 23760, - "applicant": 23761, - "bradshaw": 23762, - "spores": 23763, - "telangana": 23764, - "vinci": 23765, - "inexpensive": 23766, - "nuclei": 23767, - "322": 23768, - "jang": 23769, - "nme": 23770, - "soho": 23771, - "spd": 23772, - "##ign": 23773, - "cradled": 23774, - "receptionist": 23775, - "pow": 23776, - "##43": 23777, - "##rika": 23778, - "fascism": 23779, - "##ifer": 23780, - "experimenting": 23781, - "##ading": 23782, - "##iec": 23783, - "##region": 23784, - "345": 23785, - "jocelyn": 23786, - "maris": 23787, - "stair": 23788, - "nocturnal": 23789, - "toro": 23790, - "constabulary": 23791, - "elgin": 23792, - "##kker": 23793, - "msc": 23794, - "##giving": 23795, - "##schen": 23796, - "##rase": 23797, - "doherty": 23798, - "doping": 23799, - "sarcastically": 23800, - "batter": 23801, - "maneuvers": 23802, - "##cano": 23803, - "##apple": 23804, - "##gai": 23805, - "##git": 23806, - "intrinsic": 23807, - "##nst": 23808, - "##stor": 23809, - "1753": 23810, - "showtime": 23811, - "cafes": 23812, - "gasps": 23813, - "lviv": 23814, - "ushered": 23815, - "##thed": 23816, - "fours": 23817, - "restart": 23818, - "astonishment": 23819, - "transmitting": 23820, - "flyer": 23821, - "shrugs": 23822, - "##sau": 23823, - "intriguing": 23824, - "cones": 23825, - "dictated": 23826, - "mushrooms": 23827, - "medial": 23828, - "##kovsky": 23829, - "##elman": 23830, - "escorting": 23831, - "gaped": 23832, - "##26": 23833, - "godfather": 23834, - "##door": 23835, - "##sell": 23836, - "djs": 23837, - "recaptured": 23838, - "timetable": 23839, - "vila": 23840, - "1710": 23841, - "3a": 23842, - "aerodrome": 23843, - "mortals": 23844, - "scientology": 23845, - "##orne": 23846, - "angelina": 23847, - "mag": 23848, - "convection": 23849, - "unpaid": 23850, - "insertion": 23851, - "intermittent": 23852, - "lego": 23853, - "##nated": 23854, - "endeavor": 23855, - "kota": 23856, - "pereira": 23857, - "##lz": 23858, - "304": 23859, - "bwv": 23860, - "glamorgan": 23861, - "insults": 23862, - "agatha": 23863, - "fey": 23864, - "##cend": 23865, - "fleetwood": 23866, - "mahogany": 23867, - "protruding": 23868, - "steamship": 23869, - "zeta": 23870, - "##arty": 23871, - "mcguire": 23872, - "suspense": 23873, - "##sphere": 23874, - "advising": 23875, - "urges": 23876, - "##wala": 23877, - "hurriedly": 23878, - "meteor": 23879, - "gilded": 23880, - "inline": 23881, - "arroyo": 23882, - "stalker": 23883, - "##oge": 23884, - "excitedly": 23885, - "revered": 23886, - "##cure": 23887, - "earle": 23888, - "introductory": 23889, - "##break": 23890, - "##ilde": 23891, - "mutants": 23892, - "puff": 23893, - "pulses": 23894, - "reinforcement": 23895, - "##haling": 23896, - "curses": 23897, - "lizards": 23898, - "stalk": 23899, - "correlated": 23900, - "##fixed": 23901, - "fallout": 23902, - "macquarie": 23903, - "##unas": 23904, - "bearded": 23905, - "denton": 23906, - "heaving": 23907, - "802": 23908, - "##ocation": 23909, - "winery": 23910, - "assign": 23911, - "dortmund": 23912, - "##lkirk": 23913, - "everest": 23914, - "invariant": 23915, - "charismatic": 23916, - "susie": 23917, - "##elling": 23918, - "bled": 23919, - "lesley": 23920, - "telegram": 23921, - "sumner": 23922, - "bk": 23923, - "##ogen": 23924, - "##к": 23925, - "wilcox": 23926, - "needy": 23927, - "colbert": 23928, - "duval": 23929, - "##iferous": 23930, - "##mbled": 23931, - "allotted": 23932, - "attends": 23933, - "imperative": 23934, - "##hita": 23935, - "replacements": 23936, - "hawker": 23937, - "##inda": 23938, - "insurgency": 23939, - "##zee": 23940, - "##eke": 23941, - "casts": 23942, - "##yla": 23943, - "680": 23944, - "ives": 23945, - "transitioned": 23946, - "##pack": 23947, - "##powering": 23948, - "authoritative": 23949, - "baylor": 23950, - "flex": 23951, - "cringed": 23952, - "plaintiffs": 23953, - "woodrow": 23954, - "##skie": 23955, - "drastic": 23956, - "ape": 23957, - "aroma": 23958, - "unfolded": 23959, - "commotion": 23960, - "nt": 23961, - "preoccupied": 23962, - "theta": 23963, - "routines": 23964, - "lasers": 23965, - "privatization": 23966, - "wand": 23967, - "domino": 23968, - "ek": 23969, - "clenching": 23970, - "nsa": 23971, - "strategically": 23972, - "showered": 23973, - "bile": 23974, - "handkerchief": 23975, - "pere": 23976, - "storing": 23977, - "christophe": 23978, - "insulting": 23979, - "316": 23980, - "nakamura": 23981, - "romani": 23982, - "asiatic": 23983, - "magdalena": 23984, - "palma": 23985, - "cruises": 23986, - "stripping": 23987, - "405": 23988, - "konstantin": 23989, - "soaring": 23990, - "##berman": 23991, - "colloquially": 23992, - "forerunner": 23993, - "havilland": 23994, - "incarcerated": 23995, - "parasites": 23996, - "sincerity": 23997, - "##utus": 23998, - "disks": 23999, - "plank": 24000, - "saigon": 24001, - "##ining": 24002, - "corbin": 24003, - "homo": 24004, - "ornaments": 24005, - "powerhouse": 24006, - "##tlement": 24007, - "chong": 24008, - "fastened": 24009, - "feasibility": 24010, - "idf": 24011, - "morphological": 24012, - "usable": 24013, - "##nish": 24014, - "##zuki": 24015, - "aqueduct": 24016, - "jaguars": 24017, - "keepers": 24018, - "##flies": 24019, - "aleksandr": 24020, - "faust": 24021, - "assigns": 24022, - "ewing": 24023, - "bacterium": 24024, - "hurled": 24025, - "tricky": 24026, - "hungarians": 24027, - "integers": 24028, - "wallis": 24029, - "321": 24030, - "yamaha": 24031, - "##isha": 24032, - "hushed": 24033, - "oblivion": 24034, - "aviator": 24035, - "evangelist": 24036, - "friars": 24037, - "##eller": 24038, - "monograph": 24039, - "ode": 24040, - "##nary": 24041, - "airplanes": 24042, - "labourers": 24043, - "charms": 24044, - "##nee": 24045, - "1661": 24046, - "hagen": 24047, - "tnt": 24048, - "rudder": 24049, - "fiesta": 24050, - "transcript": 24051, - "dorothea": 24052, - "ska": 24053, - "inhibitor": 24054, - "maccabi": 24055, - "retorted": 24056, - "raining": 24057, - "encompassed": 24058, - "clauses": 24059, - "menacing": 24060, - "1642": 24061, - "lineman": 24062, - "##gist": 24063, - "vamps": 24064, - "##ape": 24065, - "##dick": 24066, - "gloom": 24067, - "##rera": 24068, - "dealings": 24069, - "easing": 24070, - "seekers": 24071, - "##nut": 24072, - "##pment": 24073, - "helens": 24074, - "unmanned": 24075, - "##anu": 24076, - "##isson": 24077, - "basics": 24078, - "##amy": 24079, - "##ckman": 24080, - "adjustments": 24081, - "1688": 24082, - "brutality": 24083, - "horne": 24084, - "##zell": 24085, - "sui": 24086, - "##55": 24087, - "##mable": 24088, - "aggregator": 24089, - "##thal": 24090, - "rhino": 24091, - "##drick": 24092, - "##vira": 24093, - "counters": 24094, - "zoom": 24095, - "##01": 24096, - "##rting": 24097, - "mn": 24098, - "montenegrin": 24099, - "packard": 24100, - "##unciation": 24101, - "##♭": 24102, - "##kki": 24103, - "reclaim": 24104, - "scholastic": 24105, - "thugs": 24106, - "pulsed": 24107, - "##icia": 24108, - "syriac": 24109, - "quan": 24110, - "saddam": 24111, - "banda": 24112, - "kobe": 24113, - "blaming": 24114, - "buddies": 24115, - "dissent": 24116, - "##lusion": 24117, - "##usia": 24118, - "corbett": 24119, - "jaya": 24120, - "delle": 24121, - "erratic": 24122, - "lexie": 24123, - "##hesis": 24124, - "435": 24125, - "amiga": 24126, - "hermes": 24127, - "##pressing": 24128, - "##leen": 24129, - "chapels": 24130, - "gospels": 24131, - "jamal": 24132, - "##uating": 24133, - "compute": 24134, - "revolving": 24135, - "warp": 24136, - "##sso": 24137, - "##thes": 24138, - "armory": 24139, - "##eras": 24140, - "##gol": 24141, - "antrim": 24142, - "loki": 24143, - "##kow": 24144, - "##asian": 24145, - "##good": 24146, - "##zano": 24147, - "braid": 24148, - "handwriting": 24149, - "subdistrict": 24150, - "funky": 24151, - "pantheon": 24152, - "##iculate": 24153, - "concurrency": 24154, - "estimation": 24155, - "improper": 24156, - "juliana": 24157, - "##his": 24158, - "newcomers": 24159, - "johnstone": 24160, - "staten": 24161, - "communicated": 24162, - "##oco": 24163, - "##alle": 24164, - "sausage": 24165, - "stormy": 24166, - "##stered": 24167, - "##tters": 24168, - "superfamily": 24169, - "##grade": 24170, - "acidic": 24171, - "collateral": 24172, - "tabloid": 24173, - "##oped": 24174, - "##rza": 24175, - "bladder": 24176, - "austen": 24177, - "##ellant": 24178, - "mcgraw": 24179, - "##hay": 24180, - "hannibal": 24181, - "mein": 24182, - "aquino": 24183, - "lucifer": 24184, - "wo": 24185, - "badger": 24186, - "boar": 24187, - "cher": 24188, - "christensen": 24189, - "greenberg": 24190, - "interruption": 24191, - "##kken": 24192, - "jem": 24193, - "244": 24194, - "mocked": 24195, - "bottoms": 24196, - "cambridgeshire": 24197, - "##lide": 24198, - "sprawling": 24199, - "##bbly": 24200, - "eastwood": 24201, - "ghent": 24202, - "synth": 24203, - "##buck": 24204, - "advisers": 24205, - "##bah": 24206, - "nominally": 24207, - "hapoel": 24208, - "qu": 24209, - "daggers": 24210, - "estranged": 24211, - "fabricated": 24212, - "towels": 24213, - "vinnie": 24214, - "wcw": 24215, - "misunderstanding": 24216, - "anglia": 24217, - "nothin": 24218, - "unmistakable": 24219, - "##dust": 24220, - "##lova": 24221, - "chilly": 24222, - "marquette": 24223, - "truss": 24224, - "##edge": 24225, - "##erine": 24226, - "reece": 24227, - "##lty": 24228, - "##chemist": 24229, - "##connected": 24230, - "272": 24231, - "308": 24232, - "41st": 24233, - "bash": 24234, - "raion": 24235, - "waterfalls": 24236, - "##ump": 24237, - "##main": 24238, - "labyrinth": 24239, - "queue": 24240, - "theorist": 24241, - "##istle": 24242, - "bharatiya": 24243, - "flexed": 24244, - "soundtracks": 24245, - "rooney": 24246, - "leftist": 24247, - "patrolling": 24248, - "wharton": 24249, - "plainly": 24250, - "alleviate": 24251, - "eastman": 24252, - "schuster": 24253, - "topographic": 24254, - "engages": 24255, - "immensely": 24256, - "unbearable": 24257, - "fairchild": 24258, - "1620": 24259, - "dona": 24260, - "lurking": 24261, - "parisian": 24262, - "oliveira": 24263, - "ia": 24264, - "indictment": 24265, - "hahn": 24266, - "bangladeshi": 24267, - "##aster": 24268, - "vivo": 24269, - "##uming": 24270, - "##ential": 24271, - "antonia": 24272, - "expects": 24273, - "indoors": 24274, - "kildare": 24275, - "harlan": 24276, - "##logue": 24277, - "##ogenic": 24278, - "##sities": 24279, - "forgiven": 24280, - "##wat": 24281, - "childish": 24282, - "tavi": 24283, - "##mide": 24284, - "##orra": 24285, - "plausible": 24286, - "grimm": 24287, - "successively": 24288, - "scooted": 24289, - "##bola": 24290, - "##dget": 24291, - "##rith": 24292, - "spartans": 24293, - "emery": 24294, - "flatly": 24295, - "azure": 24296, - "epilogue": 24297, - "##wark": 24298, - "flourish": 24299, - "##iny": 24300, - "##tracted": 24301, - "##overs": 24302, - "##oshi": 24303, - "bestseller": 24304, - "distressed": 24305, - "receipt": 24306, - "spitting": 24307, - "hermit": 24308, - "topological": 24309, - "##cot": 24310, - "drilled": 24311, - "subunit": 24312, - "francs": 24313, - "##layer": 24314, - "eel": 24315, - "##fk": 24316, - "##itas": 24317, - "octopus": 24318, - "footprint": 24319, - "petitions": 24320, - "ufo": 24321, - "##say": 24322, - "##foil": 24323, - "interfering": 24324, - "leaking": 24325, - "palo": 24326, - "##metry": 24327, - "thistle": 24328, - "valiant": 24329, - "##pic": 24330, - "narayan": 24331, - "mcpherson": 24332, - "##fast": 24333, - "gonzales": 24334, - "##ym": 24335, - "##enne": 24336, - "dustin": 24337, - "novgorod": 24338, - "solos": 24339, - "##zman": 24340, - "doin": 24341, - "##raph": 24342, - "##patient": 24343, - "##meyer": 24344, - "soluble": 24345, - "ashland": 24346, - "cuffs": 24347, - "carole": 24348, - "pendleton": 24349, - "whistling": 24350, - "vassal": 24351, - "##river": 24352, - "deviation": 24353, - "revisited": 24354, - "constituents": 24355, - "rallied": 24356, - "rotate": 24357, - "loomed": 24358, - "##eil": 24359, - "##nting": 24360, - "amateurs": 24361, - "augsburg": 24362, - "auschwitz": 24363, - "crowns": 24364, - "skeletons": 24365, - "##cona": 24366, - "bonnet": 24367, - "257": 24368, - "dummy": 24369, - "globalization": 24370, - "simeon": 24371, - "sleeper": 24372, - "mandal": 24373, - "differentiated": 24374, - "##crow": 24375, - "##mare": 24376, - "milne": 24377, - "bundled": 24378, - "exasperated": 24379, - "talmud": 24380, - "owes": 24381, - "segregated": 24382, - "##feng": 24383, - "##uary": 24384, - "dentist": 24385, - "piracy": 24386, - "props": 24387, - "##rang": 24388, - "devlin": 24389, - "##torium": 24390, - "malicious": 24391, - "paws": 24392, - "##laid": 24393, - "dependency": 24394, - "##ergy": 24395, - "##fers": 24396, - "##enna": 24397, - "258": 24398, - "pistons": 24399, - "rourke": 24400, - "jed": 24401, - "grammatical": 24402, - "tres": 24403, - "maha": 24404, - "wig": 24405, - "512": 24406, - "ghostly": 24407, - "jayne": 24408, - "##achal": 24409, - "##creen": 24410, - "##ilis": 24411, - "##lins": 24412, - "##rence": 24413, - "designate": 24414, - "##with": 24415, - "arrogance": 24416, - "cambodian": 24417, - "clones": 24418, - "showdown": 24419, - "throttle": 24420, - "twain": 24421, - "##ception": 24422, - "lobes": 24423, - "metz": 24424, - "nagoya": 24425, - "335": 24426, - "braking": 24427, - "##furt": 24428, - "385": 24429, - "roaming": 24430, - "##minster": 24431, - "amin": 24432, - "crippled": 24433, - "##37": 24434, - "##llary": 24435, - "indifferent": 24436, - "hoffmann": 24437, - "idols": 24438, - "intimidating": 24439, - "1751": 24440, - "261": 24441, - "influenza": 24442, - "memo": 24443, - "onions": 24444, - "1748": 24445, - "bandage": 24446, - "consciously": 24447, - "##landa": 24448, - "##rage": 24449, - "clandestine": 24450, - "observes": 24451, - "swiped": 24452, - "tangle": 24453, - "##ener": 24454, - "##jected": 24455, - "##trum": 24456, - "##bill": 24457, - "##lta": 24458, - "hugs": 24459, - "congresses": 24460, - "josiah": 24461, - "spirited": 24462, - "##dek": 24463, - "humanist": 24464, - "managerial": 24465, - "filmmaking": 24466, - "inmate": 24467, - "rhymes": 24468, - "debuting": 24469, - "grimsby": 24470, - "ur": 24471, - "##laze": 24472, - "duplicate": 24473, - "vigor": 24474, - "##tf": 24475, - "republished": 24476, - "bolshevik": 24477, - "refurbishment": 24478, - "antibiotics": 24479, - "martini": 24480, - "methane": 24481, - "newscasts": 24482, - "royale": 24483, - "horizons": 24484, - "levant": 24485, - "iain": 24486, - "visas": 24487, - "##ischen": 24488, - "paler": 24489, - "##around": 24490, - "manifestation": 24491, - "snuck": 24492, - "alf": 24493, - "chop": 24494, - "futile": 24495, - "pedestal": 24496, - "rehab": 24497, - "##kat": 24498, - "bmg": 24499, - "kerman": 24500, - "res": 24501, - "fairbanks": 24502, - "jarrett": 24503, - "abstraction": 24504, - "saharan": 24505, - "##zek": 24506, - "1746": 24507, - "procedural": 24508, - "clearer": 24509, - "kincaid": 24510, - "sash": 24511, - "luciano": 24512, - "##ffey": 24513, - "crunch": 24514, - "helmut": 24515, - "##vara": 24516, - "revolutionaries": 24517, - "##tute": 24518, - "creamy": 24519, - "leach": 24520, - "##mmon": 24521, - "1747": 24522, - "permitting": 24523, - "nes": 24524, - "plight": 24525, - "wendell": 24526, - "##lese": 24527, - "contra": 24528, - "ts": 24529, - "clancy": 24530, - "ipa": 24531, - "mach": 24532, - "staples": 24533, - "autopsy": 24534, - "disturbances": 24535, - "nueva": 24536, - "karin": 24537, - "pontiac": 24538, - "##uding": 24539, - "proxy": 24540, - "venerable": 24541, - "haunt": 24542, - "leto": 24543, - "bergman": 24544, - "expands": 24545, - "##helm": 24546, - "wal": 24547, - "##pipe": 24548, - "canning": 24549, - "celine": 24550, - "cords": 24551, - "obesity": 24552, - "##enary": 24553, - "intrusion": 24554, - "planner": 24555, - "##phate": 24556, - "reasoned": 24557, - "sequencing": 24558, - "307": 24559, - "harrow": 24560, - "##chon": 24561, - "##dora": 24562, - "marred": 24563, - "mcintyre": 24564, - "repay": 24565, - "tarzan": 24566, - "darting": 24567, - "248": 24568, - "harrisburg": 24569, - "margarita": 24570, - "repulsed": 24571, - "##hur": 24572, - "##lding": 24573, - "belinda": 24574, - "hamburger": 24575, - "novo": 24576, - "compliant": 24577, - "runways": 24578, - "bingham": 24579, - "registrar": 24580, - "skyscraper": 24581, - "ic": 24582, - "cuthbert": 24583, - "improvisation": 24584, - "livelihood": 24585, - "##corp": 24586, - "##elial": 24587, - "admiring": 24588, - "##dened": 24589, - "sporadic": 24590, - "believer": 24591, - "casablanca": 24592, - "popcorn": 24593, - "##29": 24594, - "asha": 24595, - "shovel": 24596, - "##bek": 24597, - "##dice": 24598, - "coiled": 24599, - "tangible": 24600, - "##dez": 24601, - "casper": 24602, - "elsie": 24603, - "resin": 24604, - "tenderness": 24605, - "rectory": 24606, - "##ivision": 24607, - "avail": 24608, - "sonar": 24609, - "##mori": 24610, - "boutique": 24611, - "##dier": 24612, - "guerre": 24613, - "bathed": 24614, - "upbringing": 24615, - "vaulted": 24616, - "sandals": 24617, - "blessings": 24618, - "##naut": 24619, - "##utnant": 24620, - "1680": 24621, - "306": 24622, - "foxes": 24623, - "pia": 24624, - "corrosion": 24625, - "hesitantly": 24626, - "confederates": 24627, - "crystalline": 24628, - "footprints": 24629, - "shapiro": 24630, - "tirana": 24631, - "valentin": 24632, - "drones": 24633, - "45th": 24634, - "microscope": 24635, - "shipments": 24636, - "texted": 24637, - "inquisition": 24638, - "wry": 24639, - "guernsey": 24640, - "unauthorized": 24641, - "resigning": 24642, - "760": 24643, - "ripple": 24644, - "schubert": 24645, - "stu": 24646, - "reassure": 24647, - "felony": 24648, - "##ardo": 24649, - "brittle": 24650, - "koreans": 24651, - "##havan": 24652, - "##ives": 24653, - "dun": 24654, - "implicit": 24655, - "tyres": 24656, - "##aldi": 24657, - "##lth": 24658, - "magnolia": 24659, - "##ehan": 24660, - "##puri": 24661, - "##poulos": 24662, - "aggressively": 24663, - "fei": 24664, - "gr": 24665, - "familiarity": 24666, - "##poo": 24667, - "indicative": 24668, - "##trust": 24669, - "fundamentally": 24670, - "jimmie": 24671, - "overrun": 24672, - "395": 24673, - "anchors": 24674, - "moans": 24675, - "##opus": 24676, - "britannia": 24677, - "armagh": 24678, - "##ggle": 24679, - "purposely": 24680, - "seizing": 24681, - "##vao": 24682, - "bewildered": 24683, - "mundane": 24684, - "avoidance": 24685, - "cosmopolitan": 24686, - "geometridae": 24687, - "quartermaster": 24688, - "caf": 24689, - "415": 24690, - "chatter": 24691, - "engulfed": 24692, - "gleam": 24693, - "purge": 24694, - "##icate": 24695, - "juliette": 24696, - "jurisprudence": 24697, - "guerra": 24698, - "revisions": 24699, - "##bn": 24700, - "casimir": 24701, - "brew": 24702, - "##jm": 24703, - "1749": 24704, - "clapton": 24705, - "cloudy": 24706, - "conde": 24707, - "hermitage": 24708, - "278": 24709, - "simulations": 24710, - "torches": 24711, - "vincenzo": 24712, - "matteo": 24713, - "##rill": 24714, - "hidalgo": 24715, - "booming": 24716, - "westbound": 24717, - "accomplishment": 24718, - "tentacles": 24719, - "unaffected": 24720, - "##sius": 24721, - "annabelle": 24722, - "flopped": 24723, - "sloping": 24724, - "##litz": 24725, - "dreamer": 24726, - "interceptor": 24727, - "vu": 24728, - "##loh": 24729, - "consecration": 24730, - "copying": 24731, - "messaging": 24732, - "breaker": 24733, - "climates": 24734, - "hospitalized": 24735, - "1752": 24736, - "torino": 24737, - "afternoons": 24738, - "winfield": 24739, - "witnessing": 24740, - "##teacher": 24741, - "breakers": 24742, - "choirs": 24743, - "sawmill": 24744, - "coldly": 24745, - "##ege": 24746, - "sipping": 24747, - "haste": 24748, - "uninhabited": 24749, - "conical": 24750, - "bibliography": 24751, - "pamphlets": 24752, - "severn": 24753, - "edict": 24754, - "##oca": 24755, - "deux": 24756, - "illnesses": 24757, - "grips": 24758, - "##pl": 24759, - "rehearsals": 24760, - "sis": 24761, - "thinkers": 24762, - "tame": 24763, - "##keepers": 24764, - "1690": 24765, - "acacia": 24766, - "reformer": 24767, - "##osed": 24768, - "##rys": 24769, - "shuffling": 24770, - "##iring": 24771, - "##shima": 24772, - "eastbound": 24773, - "ionic": 24774, - "rhea": 24775, - "flees": 24776, - "littered": 24777, - "##oum": 24778, - "rocker": 24779, - "vomiting": 24780, - "groaning": 24781, - "champ": 24782, - "overwhelmingly": 24783, - "civilizations": 24784, - "paces": 24785, - "sloop": 24786, - "adoptive": 24787, - "##tish": 24788, - "skaters": 24789, - "##vres": 24790, - "aiding": 24791, - "mango": 24792, - "##joy": 24793, - "nikola": 24794, - "shriek": 24795, - "##ignon": 24796, - "pharmaceuticals": 24797, - "##mg": 24798, - "tuna": 24799, - "calvert": 24800, - "gustavo": 24801, - "stocked": 24802, - "yearbook": 24803, - "##urai": 24804, - "##mana": 24805, - "computed": 24806, - "subsp": 24807, - "riff": 24808, - "hanoi": 24809, - "kelvin": 24810, - "hamid": 24811, - "moors": 24812, - "pastures": 24813, - "summons": 24814, - "jihad": 24815, - "nectar": 24816, - "##ctors": 24817, - "bayou": 24818, - "untitled": 24819, - "pleasing": 24820, - "vastly": 24821, - "republics": 24822, - "intellect": 24823, - "##η": 24824, - "##ulio": 24825, - "##tou": 24826, - "crumbling": 24827, - "stylistic": 24828, - "sb": 24829, - "##ی": 24830, - "consolation": 24831, - "frequented": 24832, - "h₂o": 24833, - "walden": 24834, - "widows": 24835, - "##iens": 24836, - "404": 24837, - "##ignment": 24838, - "chunks": 24839, - "improves": 24840, - "288": 24841, - "grit": 24842, - "recited": 24843, - "##dev": 24844, - "snarl": 24845, - "sociological": 24846, - "##arte": 24847, - "##gul": 24848, - "inquired": 24849, - "##held": 24850, - "bruise": 24851, - "clube": 24852, - "consultancy": 24853, - "homogeneous": 24854, - "hornets": 24855, - "multiplication": 24856, - "pasta": 24857, - "prick": 24858, - "savior": 24859, - "##grin": 24860, - "##kou": 24861, - "##phile": 24862, - "yoon": 24863, - "##gara": 24864, - "grimes": 24865, - "vanishing": 24866, - "cheering": 24867, - "reacting": 24868, - "bn": 24869, - "distillery": 24870, - "##quisite": 24871, - "##vity": 24872, - "coe": 24873, - "dockyard": 24874, - "massif": 24875, - "##jord": 24876, - "escorts": 24877, - "voss": 24878, - "##valent": 24879, - "byte": 24880, - "chopped": 24881, - "hawke": 24882, - "illusions": 24883, - "workings": 24884, - "floats": 24885, - "##koto": 24886, - "##vac": 24887, - "kv": 24888, - "annapolis": 24889, - "madden": 24890, - "##onus": 24891, - "alvaro": 24892, - "noctuidae": 24893, - "##cum": 24894, - "##scopic": 24895, - "avenge": 24896, - "steamboat": 24897, - "forte": 24898, - "illustrates": 24899, - "erika": 24900, - "##trip": 24901, - "570": 24902, - "dew": 24903, - "nationalities": 24904, - "bran": 24905, - "manifested": 24906, - "thirsty": 24907, - "diversified": 24908, - "muscled": 24909, - "reborn": 24910, - "##standing": 24911, - "arson": 24912, - "##lessness": 24913, - "##dran": 24914, - "##logram": 24915, - "##boys": 24916, - "##kushima": 24917, - "##vious": 24918, - "willoughby": 24919, - "##phobia": 24920, - "286": 24921, - "alsace": 24922, - "dashboard": 24923, - "yuki": 24924, - "##chai": 24925, - "granville": 24926, - "myspace": 24927, - "publicized": 24928, - "tricked": 24929, - "##gang": 24930, - "adjective": 24931, - "##ater": 24932, - "relic": 24933, - "reorganisation": 24934, - "enthusiastically": 24935, - "indications": 24936, - "saxe": 24937, - "##lassified": 24938, - "consolidate": 24939, - "iec": 24940, - "padua": 24941, - "helplessly": 24942, - "ramps": 24943, - "renaming": 24944, - "regulars": 24945, - "pedestrians": 24946, - "accents": 24947, - "convicts": 24948, - "inaccurate": 24949, - "lowers": 24950, - "mana": 24951, - "##pati": 24952, - "barrie": 24953, - "bjp": 24954, - "outta": 24955, - "someplace": 24956, - "berwick": 24957, - "flanking": 24958, - "invoked": 24959, - "marrow": 24960, - "sparsely": 24961, - "excerpts": 24962, - "clothed": 24963, - "rei": 24964, - "##ginal": 24965, - "wept": 24966, - "##straße": 24967, - "##vish": 24968, - "alexa": 24969, - "excel": 24970, - "##ptive": 24971, - "membranes": 24972, - "aquitaine": 24973, - "creeks": 24974, - "cutler": 24975, - "sheppard": 24976, - "implementations": 24977, - "ns": 24978, - "##dur": 24979, - "fragrance": 24980, - "budge": 24981, - "concordia": 24982, - "magnesium": 24983, - "marcelo": 24984, - "##antes": 24985, - "gladly": 24986, - "vibrating": 24987, - "##rral": 24988, - "##ggles": 24989, - "montrose": 24990, - "##omba": 24991, - "lew": 24992, - "seamus": 24993, - "1630": 24994, - "cocky": 24995, - "##ament": 24996, - "##uen": 24997, - "bjorn": 24998, - "##rrick": 24999, - "fielder": 25000, - "fluttering": 25001, - "##lase": 25002, - "methyl": 25003, - "kimberley": 25004, - "mcdowell": 25005, - "reductions": 25006, - "barbed": 25007, - "##jic": 25008, - "##tonic": 25009, - "aeronautical": 25010, - "condensed": 25011, - "distracting": 25012, - "##promising": 25013, - "huffed": 25014, - "##cala": 25015, - "##sle": 25016, - "claudius": 25017, - "invincible": 25018, - "missy": 25019, - "pious": 25020, - "balthazar": 25021, - "ci": 25022, - "##lang": 25023, - "butte": 25024, - "combo": 25025, - "orson": 25026, - "##dication": 25027, - "myriad": 25028, - "1707": 25029, - "silenced": 25030, - "##fed": 25031, - "##rh": 25032, - "coco": 25033, - "netball": 25034, - "yourselves": 25035, - "##oza": 25036, - "clarify": 25037, - "heller": 25038, - "peg": 25039, - "durban": 25040, - "etudes": 25041, - "offender": 25042, - "roast": 25043, - "blackmail": 25044, - "curvature": 25045, - "##woods": 25046, - "vile": 25047, - "309": 25048, - "illicit": 25049, - "suriname": 25050, - "##linson": 25051, - "overture": 25052, - "1685": 25053, - "bubbling": 25054, - "gymnast": 25055, - "tucking": 25056, - "##mming": 25057, - "##ouin": 25058, - "maldives": 25059, - "##bala": 25060, - "gurney": 25061, - "##dda": 25062, - "##eased": 25063, - "##oides": 25064, - "backside": 25065, - "pinto": 25066, - "jars": 25067, - "racehorse": 25068, - "tending": 25069, - "##rdial": 25070, - "baronetcy": 25071, - "wiener": 25072, - "duly": 25073, - "##rke": 25074, - "barbarian": 25075, - "cupping": 25076, - "flawed": 25077, - "##thesis": 25078, - "bertha": 25079, - "pleistocene": 25080, - "puddle": 25081, - "swearing": 25082, - "##nob": 25083, - "##tically": 25084, - "fleeting": 25085, - "prostate": 25086, - "amulet": 25087, - "educating": 25088, - "##mined": 25089, - "##iti": 25090, - "##tler": 25091, - "75th": 25092, - "jens": 25093, - "respondents": 25094, - "analytics": 25095, - "cavaliers": 25096, - "papacy": 25097, - "raju": 25098, - "##iente": 25099, - "##ulum": 25100, - "##tip": 25101, - "funnel": 25102, - "271": 25103, - "disneyland": 25104, - "##lley": 25105, - "sociologist": 25106, - "##iam": 25107, - "2500": 25108, - "faulkner": 25109, - "louvre": 25110, - "menon": 25111, - "##dson": 25112, - "276": 25113, - "##ower": 25114, - "afterlife": 25115, - "mannheim": 25116, - "peptide": 25117, - "referees": 25118, - "comedians": 25119, - "meaningless": 25120, - "##anger": 25121, - "##laise": 25122, - "fabrics": 25123, - "hurley": 25124, - "renal": 25125, - "sleeps": 25126, - "##bour": 25127, - "##icle": 25128, - "breakout": 25129, - "kristin": 25130, - "roadside": 25131, - "animator": 25132, - "clover": 25133, - "disdain": 25134, - "unsafe": 25135, - "redesign": 25136, - "##urity": 25137, - "firth": 25138, - "barnsley": 25139, - "portage": 25140, - "reset": 25141, - "narrows": 25142, - "268": 25143, - "commandos": 25144, - "expansive": 25145, - "speechless": 25146, - "tubular": 25147, - "##lux": 25148, - "essendon": 25149, - "eyelashes": 25150, - "smashwords": 25151, - "##yad": 25152, - "##bang": 25153, - "##claim": 25154, - "craved": 25155, - "sprinted": 25156, - "chet": 25157, - "somme": 25158, - "astor": 25159, - "wrocław": 25160, - "orton": 25161, - "266": 25162, - "bane": 25163, - "##erving": 25164, - "##uing": 25165, - "mischief": 25166, - "##amps": 25167, - "##sund": 25168, - "scaling": 25169, - "terre": 25170, - "##xious": 25171, - "impairment": 25172, - "offenses": 25173, - "undermine": 25174, - "moi": 25175, - "soy": 25176, - "contiguous": 25177, - "arcadia": 25178, - "inuit": 25179, - "seam": 25180, - "##tops": 25181, - "macbeth": 25182, - "rebelled": 25183, - "##icative": 25184, - "##iot": 25185, - "590": 25186, - "elaborated": 25187, - "frs": 25188, - "uniformed": 25189, - "##dberg": 25190, - "259": 25191, - "powerless": 25192, - "priscilla": 25193, - "stimulated": 25194, - "980": 25195, - "qc": 25196, - "arboretum": 25197, - "frustrating": 25198, - "trieste": 25199, - "bullock": 25200, - "##nified": 25201, - "enriched": 25202, - "glistening": 25203, - "intern": 25204, - "##adia": 25205, - "locus": 25206, - "nouvelle": 25207, - "ollie": 25208, - "ike": 25209, - "lash": 25210, - "starboard": 25211, - "ee": 25212, - "tapestry": 25213, - "headlined": 25214, - "hove": 25215, - "rigged": 25216, - "##vite": 25217, - "pollock": 25218, - "##yme": 25219, - "thrive": 25220, - "clustered": 25221, - "cas": 25222, - "roi": 25223, - "gleamed": 25224, - "olympiad": 25225, - "##lino": 25226, - "pressured": 25227, - "regimes": 25228, - "##hosis": 25229, - "##lick": 25230, - "ripley": 25231, - "##ophone": 25232, - "kickoff": 25233, - "gallon": 25234, - "rockwell": 25235, - "##arable": 25236, - "crusader": 25237, - "glue": 25238, - "revolutions": 25239, - "scrambling": 25240, - "1714": 25241, - "grover": 25242, - "##jure": 25243, - "englishman": 25244, - "aztec": 25245, - "263": 25246, - "contemplating": 25247, - "coven": 25248, - "ipad": 25249, - "preach": 25250, - "triumphant": 25251, - "tufts": 25252, - "##esian": 25253, - "rotational": 25254, - "##phus": 25255, - "328": 25256, - "falkland": 25257, - "##brates": 25258, - "strewn": 25259, - "clarissa": 25260, - "rejoin": 25261, - "environmentally": 25262, - "glint": 25263, - "banded": 25264, - "drenched": 25265, - "moat": 25266, - "albanians": 25267, - "johor": 25268, - "rr": 25269, - "maestro": 25270, - "malley": 25271, - "nouveau": 25272, - "shaded": 25273, - "taxonomy": 25274, - "v6": 25275, - "adhere": 25276, - "bunk": 25277, - "airfields": 25278, - "##ritan": 25279, - "1741": 25280, - "encompass": 25281, - "remington": 25282, - "tran": 25283, - "##erative": 25284, - "amelie": 25285, - "mazda": 25286, - "friar": 25287, - "morals": 25288, - "passions": 25289, - "##zai": 25290, - "breadth": 25291, - "vis": 25292, - "##hae": 25293, - "argus": 25294, - "burnham": 25295, - "caressing": 25296, - "insider": 25297, - "rudd": 25298, - "##imov": 25299, - "##mini": 25300, - "##rso": 25301, - "italianate": 25302, - "murderous": 25303, - "textual": 25304, - "wainwright": 25305, - "armada": 25306, - "bam": 25307, - "weave": 25308, - "timer": 25309, - "##taken": 25310, - "##nh": 25311, - "fra": 25312, - "##crest": 25313, - "ardent": 25314, - "salazar": 25315, - "taps": 25316, - "tunis": 25317, - "##ntino": 25318, - "allegro": 25319, - "gland": 25320, - "philanthropic": 25321, - "##chester": 25322, - "implication": 25323, - "##optera": 25324, - "esq": 25325, - "judas": 25326, - "noticeably": 25327, - "wynn": 25328, - "##dara": 25329, - "inched": 25330, - "indexed": 25331, - "crises": 25332, - "villiers": 25333, - "bandit": 25334, - "royalties": 25335, - "patterned": 25336, - "cupboard": 25337, - "interspersed": 25338, - "accessory": 25339, - "isla": 25340, - "kendrick": 25341, - "entourage": 25342, - "stitches": 25343, - "##esthesia": 25344, - "headwaters": 25345, - "##ior": 25346, - "interlude": 25347, - "distraught": 25348, - "draught": 25349, - "1727": 25350, - "##basket": 25351, - "biased": 25352, - "sy": 25353, - "transient": 25354, - "triad": 25355, - "subgenus": 25356, - "adapting": 25357, - "kidd": 25358, - "shortstop": 25359, - "##umatic": 25360, - "dimly": 25361, - "spiked": 25362, - "mcleod": 25363, - "reprint": 25364, - "nellie": 25365, - "pretoria": 25366, - "windmill": 25367, - "##cek": 25368, - "singled": 25369, - "##mps": 25370, - "273": 25371, - "reunite": 25372, - "##orous": 25373, - "747": 25374, - "bankers": 25375, - "outlying": 25376, - "##omp": 25377, - "##ports": 25378, - "##tream": 25379, - "apologies": 25380, - "cosmetics": 25381, - "patsy": 25382, - "##deh": 25383, - "##ocks": 25384, - "##yson": 25385, - "bender": 25386, - "nantes": 25387, - "serene": 25388, - "##nad": 25389, - "lucha": 25390, - "mmm": 25391, - "323": 25392, - "##cius": 25393, - "##gli": 25394, - "cmll": 25395, - "coinage": 25396, - "nestor": 25397, - "juarez": 25398, - "##rook": 25399, - "smeared": 25400, - "sprayed": 25401, - "twitching": 25402, - "sterile": 25403, - "irina": 25404, - "embodied": 25405, - "juveniles": 25406, - "enveloped": 25407, - "miscellaneous": 25408, - "cancers": 25409, - "dq": 25410, - "gulped": 25411, - "luisa": 25412, - "crested": 25413, - "swat": 25414, - "donegal": 25415, - "ref": 25416, - "##anov": 25417, - "##acker": 25418, - "hearst": 25419, - "mercantile": 25420, - "##lika": 25421, - "doorbell": 25422, - "ua": 25423, - "vicki": 25424, - "##alla": 25425, - "##som": 25426, - "bilbao": 25427, - "psychologists": 25428, - "stryker": 25429, - "sw": 25430, - "horsemen": 25431, - "turkmenistan": 25432, - "wits": 25433, - "##national": 25434, - "anson": 25435, - "mathew": 25436, - "screenings": 25437, - "##umb": 25438, - "rihanna": 25439, - "##agne": 25440, - "##nessy": 25441, - "aisles": 25442, - "##iani": 25443, - "##osphere": 25444, - "hines": 25445, - "kenton": 25446, - "saskatoon": 25447, - "tasha": 25448, - "truncated": 25449, - "##champ": 25450, - "##itan": 25451, - "mildred": 25452, - "advises": 25453, - "fredrik": 25454, - "interpreting": 25455, - "inhibitors": 25456, - "##athi": 25457, - "spectroscopy": 25458, - "##hab": 25459, - "##kong": 25460, - "karim": 25461, - "panda": 25462, - "##oia": 25463, - "##nail": 25464, - "##vc": 25465, - "conqueror": 25466, - "kgb": 25467, - "leukemia": 25468, - "##dity": 25469, - "arrivals": 25470, - "cheered": 25471, - "pisa": 25472, - "phosphorus": 25473, - "shielded": 25474, - "##riated": 25475, - "mammal": 25476, - "unitarian": 25477, - "urgently": 25478, - "chopin": 25479, - "sanitary": 25480, - "##mission": 25481, - "spicy": 25482, - "drugged": 25483, - "hinges": 25484, - "##tort": 25485, - "tipping": 25486, - "trier": 25487, - "impoverished": 25488, - "westchester": 25489, - "##caster": 25490, - "267": 25491, - "epoch": 25492, - "nonstop": 25493, - "##gman": 25494, - "##khov": 25495, - "aromatic": 25496, - "centrally": 25497, - "cerro": 25498, - "##tively": 25499, - "##vio": 25500, - "billions": 25501, - "modulation": 25502, - "sedimentary": 25503, - "283": 25504, - "facilitating": 25505, - "outrageous": 25506, - "goldstein": 25507, - "##eak": 25508, - "##kt": 25509, - "ld": 25510, - "maitland": 25511, - "penultimate": 25512, - "pollard": 25513, - "##dance": 25514, - "fleets": 25515, - "spaceship": 25516, - "vertebrae": 25517, - "##nig": 25518, - "alcoholism": 25519, - "als": 25520, - "recital": 25521, - "##bham": 25522, - "##ference": 25523, - "##omics": 25524, - "m2": 25525, - "##bm": 25526, - "trois": 25527, - "##tropical": 25528, - "##в": 25529, - "commemorates": 25530, - "##meric": 25531, - "marge": 25532, - "##raction": 25533, - "1643": 25534, - "670": 25535, - "cosmetic": 25536, - "ravaged": 25537, - "##ige": 25538, - "catastrophe": 25539, - "eng": 25540, - "##shida": 25541, - "albrecht": 25542, - "arterial": 25543, - "bellamy": 25544, - "decor": 25545, - "harmon": 25546, - "##rde": 25547, - "bulbs": 25548, - "synchronized": 25549, - "vito": 25550, - "easiest": 25551, - "shetland": 25552, - "shielding": 25553, - "wnba": 25554, - "##glers": 25555, - "##ssar": 25556, - "##riam": 25557, - "brianna": 25558, - "cumbria": 25559, - "##aceous": 25560, - "##rard": 25561, - "cores": 25562, - "thayer": 25563, - "##nsk": 25564, - "brood": 25565, - "hilltop": 25566, - "luminous": 25567, - "carts": 25568, - "keynote": 25569, - "larkin": 25570, - "logos": 25571, - "##cta": 25572, - "##ا": 25573, - "##mund": 25574, - "##quay": 25575, - "lilith": 25576, - "tinted": 25577, - "277": 25578, - "wrestle": 25579, - "mobilization": 25580, - "##uses": 25581, - "sequential": 25582, - "siam": 25583, - "bloomfield": 25584, - "takahashi": 25585, - "274": 25586, - "##ieving": 25587, - "presenters": 25588, - "ringo": 25589, - "blazed": 25590, - "witty": 25591, - "##oven": 25592, - "##ignant": 25593, - "devastation": 25594, - "haydn": 25595, - "harmed": 25596, - "newt": 25597, - "therese": 25598, - "##peed": 25599, - "gershwin": 25600, - "molina": 25601, - "rabbis": 25602, - "sudanese": 25603, - "001": 25604, - "innate": 25605, - "restarted": 25606, - "##sack": 25607, - "##fus": 25608, - "slices": 25609, - "wb": 25610, - "##shah": 25611, - "enroll": 25612, - "hypothetical": 25613, - "hysterical": 25614, - "1743": 25615, - "fabio": 25616, - "indefinite": 25617, - "warped": 25618, - "##hg": 25619, - "exchanging": 25620, - "525": 25621, - "unsuitable": 25622, - "##sboro": 25623, - "gallo": 25624, - "1603": 25625, - "bret": 25626, - "cobalt": 25627, - "homemade": 25628, - "##hunter": 25629, - "mx": 25630, - "operatives": 25631, - "##dhar": 25632, - "terraces": 25633, - "durable": 25634, - "latch": 25635, - "pens": 25636, - "whorls": 25637, - "##ctuated": 25638, - "##eaux": 25639, - "billing": 25640, - "ligament": 25641, - "succumbed": 25642, - "##gly": 25643, - "regulators": 25644, - "spawn": 25645, - "##brick": 25646, - "##stead": 25647, - "filmfare": 25648, - "rochelle": 25649, - "##nzo": 25650, - "1725": 25651, - "circumstance": 25652, - "saber": 25653, - "supplements": 25654, - "##nsky": 25655, - "##tson": 25656, - "crowe": 25657, - "wellesley": 25658, - "carrot": 25659, - "##9th": 25660, - "##movable": 25661, - "primate": 25662, - "drury": 25663, - "sincerely": 25664, - "topical": 25665, - "##mad": 25666, - "##rao": 25667, - "callahan": 25668, - "kyiv": 25669, - "smarter": 25670, - "tits": 25671, - "undo": 25672, - "##yeh": 25673, - "announcements": 25674, - "anthologies": 25675, - "barrio": 25676, - "nebula": 25677, - "##islaus": 25678, - "##shaft": 25679, - "##tyn": 25680, - "bodyguards": 25681, - "2021": 25682, - "assassinate": 25683, - "barns": 25684, - "emmett": 25685, - "scully": 25686, - "##mah": 25687, - "##yd": 25688, - "##eland": 25689, - "##tino": 25690, - "##itarian": 25691, - "demoted": 25692, - "gorman": 25693, - "lashed": 25694, - "prized": 25695, - "adventist": 25696, - "writ": 25697, - "##gui": 25698, - "alla": 25699, - "invertebrates": 25700, - "##ausen": 25701, - "1641": 25702, - "amman": 25703, - "1742": 25704, - "align": 25705, - "healy": 25706, - "redistribution": 25707, - "##gf": 25708, - "##rize": 25709, - "insulation": 25710, - "##drop": 25711, - "adherents": 25712, - "hezbollah": 25713, - "vitro": 25714, - "ferns": 25715, - "yanking": 25716, - "269": 25717, - "php": 25718, - "registering": 25719, - "uppsala": 25720, - "cheerleading": 25721, - "confines": 25722, - "mischievous": 25723, - "tully": 25724, - "##ross": 25725, - "49th": 25726, - "docked": 25727, - "roam": 25728, - "stipulated": 25729, - "pumpkin": 25730, - "##bry": 25731, - "prompt": 25732, - "##ezer": 25733, - "blindly": 25734, - "shuddering": 25735, - "craftsmen": 25736, - "frail": 25737, - "scented": 25738, - "katharine": 25739, - "scramble": 25740, - "shaggy": 25741, - "sponge": 25742, - "helix": 25743, - "zaragoza": 25744, - "279": 25745, - "##52": 25746, - "43rd": 25747, - "backlash": 25748, - "fontaine": 25749, - "seizures": 25750, - "posse": 25751, - "cowan": 25752, - "nonfiction": 25753, - "telenovela": 25754, - "wwii": 25755, - "hammered": 25756, - "undone": 25757, - "##gpur": 25758, - "encircled": 25759, - "irs": 25760, - "##ivation": 25761, - "artefacts": 25762, - "oneself": 25763, - "searing": 25764, - "smallpox": 25765, - "##belle": 25766, - "##osaurus": 25767, - "shandong": 25768, - "breached": 25769, - "upland": 25770, - "blushing": 25771, - "rankin": 25772, - "infinitely": 25773, - "psyche": 25774, - "tolerated": 25775, - "docking": 25776, - "evicted": 25777, - "##col": 25778, - "unmarked": 25779, - "##lving": 25780, - "gnome": 25781, - "lettering": 25782, - "litres": 25783, - "musique": 25784, - "##oint": 25785, - "benevolent": 25786, - "##jal": 25787, - "blackened": 25788, - "##anna": 25789, - "mccall": 25790, - "racers": 25791, - "tingle": 25792, - "##ocene": 25793, - "##orestation": 25794, - "introductions": 25795, - "radically": 25796, - "292": 25797, - "##hiff": 25798, - "##باد": 25799, - "1610": 25800, - "1739": 25801, - "munchen": 25802, - "plead": 25803, - "##nka": 25804, - "condo": 25805, - "scissors": 25806, - "##sight": 25807, - "##tens": 25808, - "apprehension": 25809, - "##cey": 25810, - "##yin": 25811, - "hallmark": 25812, - "watering": 25813, - "formulas": 25814, - "sequels": 25815, - "##llas": 25816, - "aggravated": 25817, - "bae": 25818, - "commencing": 25819, - "##building": 25820, - "enfield": 25821, - "prohibits": 25822, - "marne": 25823, - "vedic": 25824, - "civilized": 25825, - "euclidean": 25826, - "jagger": 25827, - "beforehand": 25828, - "blasts": 25829, - "dumont": 25830, - "##arney": 25831, - "##nem": 25832, - "740": 25833, - "conversions": 25834, - "hierarchical": 25835, - "rios": 25836, - "simulator": 25837, - "##dya": 25838, - "##lellan": 25839, - "hedges": 25840, - "oleg": 25841, - "thrusts": 25842, - "shadowed": 25843, - "darby": 25844, - "maximize": 25845, - "1744": 25846, - "gregorian": 25847, - "##nded": 25848, - "##routed": 25849, - "sham": 25850, - "unspecified": 25851, - "##hog": 25852, - "emory": 25853, - "factual": 25854, - "##smo": 25855, - "##tp": 25856, - "fooled": 25857, - "##rger": 25858, - "ortega": 25859, - "wellness": 25860, - "marlon": 25861, - "##oton": 25862, - "##urance": 25863, - "casket": 25864, - "keating": 25865, - "ley": 25866, - "enclave": 25867, - "##ayan": 25868, - "char": 25869, - "influencing": 25870, - "jia": 25871, - "##chenko": 25872, - "412": 25873, - "ammonia": 25874, - "erebidae": 25875, - "incompatible": 25876, - "violins": 25877, - "cornered": 25878, - "##arat": 25879, - "grooves": 25880, - "astronauts": 25881, - "columbian": 25882, - "rampant": 25883, - "fabrication": 25884, - "kyushu": 25885, - "mahmud": 25886, - "vanish": 25887, - "##dern": 25888, - "mesopotamia": 25889, - "##lete": 25890, - "ict": 25891, - "##rgen": 25892, - "caspian": 25893, - "kenji": 25894, - "pitted": 25895, - "##vered": 25896, - "999": 25897, - "grimace": 25898, - "roanoke": 25899, - "tchaikovsky": 25900, - "twinned": 25901, - "##analysis": 25902, - "##awan": 25903, - "xinjiang": 25904, - "arias": 25905, - "clemson": 25906, - "kazakh": 25907, - "sizable": 25908, - "1662": 25909, - "##khand": 25910, - "##vard": 25911, - "plunge": 25912, - "tatum": 25913, - "vittorio": 25914, - "##nden": 25915, - "cholera": 25916, - "##dana": 25917, - "##oper": 25918, - "bracing": 25919, - "indifference": 25920, - "projectile": 25921, - "superliga": 25922, - "##chee": 25923, - "realises": 25924, - "upgrading": 25925, - "299": 25926, - "porte": 25927, - "retribution": 25928, - "##vies": 25929, - "nk": 25930, - "stil": 25931, - "##resses": 25932, - "ama": 25933, - "bureaucracy": 25934, - "blackberry": 25935, - "bosch": 25936, - "testosterone": 25937, - "collapses": 25938, - "greer": 25939, - "##pathic": 25940, - "ioc": 25941, - "fifties": 25942, - "malls": 25943, - "##erved": 25944, - "bao": 25945, - "baskets": 25946, - "adolescents": 25947, - "siegfried": 25948, - "##osity": 25949, - "##tosis": 25950, - "mantra": 25951, - "detecting": 25952, - "existent": 25953, - "fledgling": 25954, - "##cchi": 25955, - "dissatisfied": 25956, - "gan": 25957, - "telecommunication": 25958, - "mingled": 25959, - "sobbed": 25960, - "6000": 25961, - "controversies": 25962, - "outdated": 25963, - "taxis": 25964, - "##raus": 25965, - "fright": 25966, - "slams": 25967, - "##lham": 25968, - "##fect": 25969, - "##tten": 25970, - "detectors": 25971, - "fetal": 25972, - "tanned": 25973, - "##uw": 25974, - "fray": 25975, - "goth": 25976, - "olympian": 25977, - "skipping": 25978, - "mandates": 25979, - "scratches": 25980, - "sheng": 25981, - "unspoken": 25982, - "hyundai": 25983, - "tracey": 25984, - "hotspur": 25985, - "restrictive": 25986, - "##buch": 25987, - "americana": 25988, - "mundo": 25989, - "##bari": 25990, - "burroughs": 25991, - "diva": 25992, - "vulcan": 25993, - "##6th": 25994, - "distinctions": 25995, - "thumping": 25996, - "##ngen": 25997, - "mikey": 25998, - "sheds": 25999, - "fide": 26000, - "rescues": 26001, - "springsteen": 26002, - "vested": 26003, - "valuation": 26004, - "##ece": 26005, - "##ely": 26006, - "pinnacle": 26007, - "rake": 26008, - "sylvie": 26009, - "##edo": 26010, - "almond": 26011, - "quivering": 26012, - "##irus": 26013, - "alteration": 26014, - "faltered": 26015, - "##wad": 26016, - "51st": 26017, - "hydra": 26018, - "ticked": 26019, - "##kato": 26020, - "recommends": 26021, - "##dicated": 26022, - "antigua": 26023, - "arjun": 26024, - "stagecoach": 26025, - "wilfred": 26026, - "trickle": 26027, - "pronouns": 26028, - "##pon": 26029, - "aryan": 26030, - "nighttime": 26031, - "##anian": 26032, - "gall": 26033, - "pea": 26034, - "stitch": 26035, - "##hei": 26036, - "leung": 26037, - "milos": 26038, - "##dini": 26039, - "eritrea": 26040, - "nexus": 26041, - "starved": 26042, - "snowfall": 26043, - "kant": 26044, - "parasitic": 26045, - "cot": 26046, - "discus": 26047, - "hana": 26048, - "strikers": 26049, - "appleton": 26050, - "kitchens": 26051, - "##erina": 26052, - "##partisan": 26053, - "##itha": 26054, - "##vius": 26055, - "disclose": 26056, - "metis": 26057, - "##channel": 26058, - "1701": 26059, - "tesla": 26060, - "##vera": 26061, - "fitch": 26062, - "1735": 26063, - "blooded": 26064, - "##tila": 26065, - "decimal": 26066, - "##tang": 26067, - "##bai": 26068, - "cyclones": 26069, - "eun": 26070, - "bottled": 26071, - "peas": 26072, - "pensacola": 26073, - "basha": 26074, - "bolivian": 26075, - "crabs": 26076, - "boil": 26077, - "lanterns": 26078, - "partridge": 26079, - "roofed": 26080, - "1645": 26081, - "necks": 26082, - "##phila": 26083, - "opined": 26084, - "patting": 26085, - "##kla": 26086, - "##lland": 26087, - "chuckles": 26088, - "volta": 26089, - "whereupon": 26090, - "##nche": 26091, - "devout": 26092, - "euroleague": 26093, - "suicidal": 26094, - "##dee": 26095, - "inherently": 26096, - "involuntary": 26097, - "knitting": 26098, - "nasser": 26099, - "##hide": 26100, - "puppets": 26101, - "colourful": 26102, - "courageous": 26103, - "southend": 26104, - "stills": 26105, - "miraculous": 26106, - "hodgson": 26107, - "richer": 26108, - "rochdale": 26109, - "ethernet": 26110, - "greta": 26111, - "uniting": 26112, - "prism": 26113, - "umm": 26114, - "##haya": 26115, - "##itical": 26116, - "##utation": 26117, - "deterioration": 26118, - "pointe": 26119, - "prowess": 26120, - "##ropriation": 26121, - "lids": 26122, - "scranton": 26123, - "billings": 26124, - "subcontinent": 26125, - "##koff": 26126, - "##scope": 26127, - "brute": 26128, - "kellogg": 26129, - "psalms": 26130, - "degraded": 26131, - "##vez": 26132, - "stanisław": 26133, - "##ructured": 26134, - "ferreira": 26135, - "pun": 26136, - "astonishing": 26137, - "gunnar": 26138, - "##yat": 26139, - "arya": 26140, - "prc": 26141, - "gottfried": 26142, - "##tight": 26143, - "excursion": 26144, - "##ographer": 26145, - "dina": 26146, - "##quil": 26147, - "##nare": 26148, - "huffington": 26149, - "illustrious": 26150, - "wilbur": 26151, - "gundam": 26152, - "verandah": 26153, - "##zard": 26154, - "naacp": 26155, - "##odle": 26156, - "constructive": 26157, - "fjord": 26158, - "kade": 26159, - "##naud": 26160, - "generosity": 26161, - "thrilling": 26162, - "baseline": 26163, - "cayman": 26164, - "frankish": 26165, - "plastics": 26166, - "accommodations": 26167, - "zoological": 26168, - "##fting": 26169, - "cedric": 26170, - "qb": 26171, - "motorized": 26172, - "##dome": 26173, - "##otted": 26174, - "squealed": 26175, - "tackled": 26176, - "canucks": 26177, - "budgets": 26178, - "situ": 26179, - "asthma": 26180, - "dail": 26181, - "gabled": 26182, - "grasslands": 26183, - "whimpered": 26184, - "writhing": 26185, - "judgments": 26186, - "##65": 26187, - "minnie": 26188, - "pv": 26189, - "##carbon": 26190, - "bananas": 26191, - "grille": 26192, - "domes": 26193, - "monique": 26194, - "odin": 26195, - "maguire": 26196, - "markham": 26197, - "tierney": 26198, - "##estra": 26199, - "##chua": 26200, - "libel": 26201, - "poke": 26202, - "speedy": 26203, - "atrium": 26204, - "laval": 26205, - "notwithstanding": 26206, - "##edly": 26207, - "fai": 26208, - "kala": 26209, - "##sur": 26210, - "robb": 26211, - "##sma": 26212, - "listings": 26213, - "luz": 26214, - "supplementary": 26215, - "tianjin": 26216, - "##acing": 26217, - "enzo": 26218, - "jd": 26219, - "ric": 26220, - "scanner": 26221, - "croats": 26222, - "transcribed": 26223, - "##49": 26224, - "arden": 26225, - "cv": 26226, - "##hair": 26227, - "##raphy": 26228, - "##lver": 26229, - "##uy": 26230, - "357": 26231, - "seventies": 26232, - "staggering": 26233, - "alam": 26234, - "horticultural": 26235, - "hs": 26236, - "regression": 26237, - "timbers": 26238, - "blasting": 26239, - "##ounded": 26240, - "montagu": 26241, - "manipulating": 26242, - "##cit": 26243, - "catalytic": 26244, - "1550": 26245, - "troopers": 26246, - "##meo": 26247, - "condemnation": 26248, - "fitzpatrick": 26249, - "##oire": 26250, - "##roved": 26251, - "inexperienced": 26252, - "1670": 26253, - "castes": 26254, - "##lative": 26255, - "outing": 26256, - "314": 26257, - "dubois": 26258, - "flicking": 26259, - "quarrel": 26260, - "ste": 26261, - "learners": 26262, - "1625": 26263, - "iq": 26264, - "whistled": 26265, - "##class": 26266, - "282": 26267, - "classify": 26268, - "tariffs": 26269, - "temperament": 26270, - "355": 26271, - "folly": 26272, - "liszt": 26273, - "##yles": 26274, - "immersed": 26275, - "jordanian": 26276, - "ceasefire": 26277, - "apparel": 26278, - "extras": 26279, - "maru": 26280, - "fished": 26281, - "##bio": 26282, - "harta": 26283, - "stockport": 26284, - "assortment": 26285, - "craftsman": 26286, - "paralysis": 26287, - "transmitters": 26288, - "##cola": 26289, - "blindness": 26290, - "##wk": 26291, - "fatally": 26292, - "proficiency": 26293, - "solemnly": 26294, - "##orno": 26295, - "repairing": 26296, - "amore": 26297, - "groceries": 26298, - "ultraviolet": 26299, - "##chase": 26300, - "schoolhouse": 26301, - "##tua": 26302, - "resurgence": 26303, - "nailed": 26304, - "##otype": 26305, - "##×": 26306, - "ruse": 26307, - "saliva": 26308, - "diagrams": 26309, - "##tructing": 26310, - "albans": 26311, - "rann": 26312, - "thirties": 26313, - "1b": 26314, - "antennas": 26315, - "hilarious": 26316, - "cougars": 26317, - "paddington": 26318, - "stats": 26319, - "##eger": 26320, - "breakaway": 26321, - "ipod": 26322, - "reza": 26323, - "authorship": 26324, - "prohibiting": 26325, - "scoffed": 26326, - "##etz": 26327, - "##ttle": 26328, - "conscription": 26329, - "defected": 26330, - "trondheim": 26331, - "##fires": 26332, - "ivanov": 26333, - "keenan": 26334, - "##adan": 26335, - "##ciful": 26336, - "##fb": 26337, - "##slow": 26338, - "locating": 26339, - "##ials": 26340, - "##tford": 26341, - "cadiz": 26342, - "basalt": 26343, - "blankly": 26344, - "interned": 26345, - "rags": 26346, - "rattling": 26347, - "##tick": 26348, - "carpathian": 26349, - "reassured": 26350, - "sync": 26351, - "bum": 26352, - "guildford": 26353, - "iss": 26354, - "staunch": 26355, - "##onga": 26356, - "astronomers": 26357, - "sera": 26358, - "sofie": 26359, - "emergencies": 26360, - "susquehanna": 26361, - "##heard": 26362, - "duc": 26363, - "mastery": 26364, - "vh1": 26365, - "williamsburg": 26366, - "bayer": 26367, - "buckled": 26368, - "craving": 26369, - "##khan": 26370, - "##rdes": 26371, - "bloomington": 26372, - "##write": 26373, - "alton": 26374, - "barbecue": 26375, - "##bians": 26376, - "justine": 26377, - "##hri": 26378, - "##ndt": 26379, - "delightful": 26380, - "smartphone": 26381, - "newtown": 26382, - "photon": 26383, - "retrieval": 26384, - "peugeot": 26385, - "hissing": 26386, - "##monium": 26387, - "##orough": 26388, - "flavors": 26389, - "lighted": 26390, - "relaunched": 26391, - "tainted": 26392, - "##games": 26393, - "##lysis": 26394, - "anarchy": 26395, - "microscopic": 26396, - "hopping": 26397, - "adept": 26398, - "evade": 26399, - "evie": 26400, - "##beau": 26401, - "inhibit": 26402, - "sinn": 26403, - "adjustable": 26404, - "hurst": 26405, - "intuition": 26406, - "wilton": 26407, - "cisco": 26408, - "44th": 26409, - "lawful": 26410, - "lowlands": 26411, - "stockings": 26412, - "thierry": 26413, - "##dalen": 26414, - "##hila": 26415, - "##nai": 26416, - "fates": 26417, - "prank": 26418, - "tb": 26419, - "maison": 26420, - "lobbied": 26421, - "provocative": 26422, - "1724": 26423, - "4a": 26424, - "utopia": 26425, - "##qual": 26426, - "carbonate": 26427, - "gujarati": 26428, - "purcell": 26429, - "##rford": 26430, - "curtiss": 26431, - "##mei": 26432, - "overgrown": 26433, - "arenas": 26434, - "mediation": 26435, - "swallows": 26436, - "##rnik": 26437, - "respectful": 26438, - "turnbull": 26439, - "##hedron": 26440, - "##hope": 26441, - "alyssa": 26442, - "ozone": 26443, - "##ʻi": 26444, - "ami": 26445, - "gestapo": 26446, - "johansson": 26447, - "snooker": 26448, - "canteen": 26449, - "cuff": 26450, - "declines": 26451, - "empathy": 26452, - "stigma": 26453, - "##ags": 26454, - "##iner": 26455, - "##raine": 26456, - "taxpayers": 26457, - "gui": 26458, - "volga": 26459, - "##wright": 26460, - "##copic": 26461, - "lifespan": 26462, - "overcame": 26463, - "tattooed": 26464, - "enactment": 26465, - "giggles": 26466, - "##ador": 26467, - "##camp": 26468, - "barrington": 26469, - "bribe": 26470, - "obligatory": 26471, - "orbiting": 26472, - "peng": 26473, - "##enas": 26474, - "elusive": 26475, - "sucker": 26476, - "##vating": 26477, - "cong": 26478, - "hardship": 26479, - "empowered": 26480, - "anticipating": 26481, - "estrada": 26482, - "cryptic": 26483, - "greasy": 26484, - "detainees": 26485, - "planck": 26486, - "sudbury": 26487, - "plaid": 26488, - "dod": 26489, - "marriott": 26490, - "kayla": 26491, - "##ears": 26492, - "##vb": 26493, - "##zd": 26494, - "mortally": 26495, - "##hein": 26496, - "cognition": 26497, - "radha": 26498, - "319": 26499, - "liechtenstein": 26500, - "meade": 26501, - "richly": 26502, - "argyle": 26503, - "harpsichord": 26504, - "liberalism": 26505, - "trumpets": 26506, - "lauded": 26507, - "tyrant": 26508, - "salsa": 26509, - "tiled": 26510, - "lear": 26511, - "promoters": 26512, - "reused": 26513, - "slicing": 26514, - "trident": 26515, - "##chuk": 26516, - "##gami": 26517, - "##lka": 26518, - "cantor": 26519, - "checkpoint": 26520, - "##points": 26521, - "gaul": 26522, - "leger": 26523, - "mammalian": 26524, - "##tov": 26525, - "##aar": 26526, - "##schaft": 26527, - "doha": 26528, - "frenchman": 26529, - "nirvana": 26530, - "##vino": 26531, - "delgado": 26532, - "headlining": 26533, - "##eron": 26534, - "##iography": 26535, - "jug": 26536, - "tko": 26537, - "1649": 26538, - "naga": 26539, - "intersections": 26540, - "##jia": 26541, - "benfica": 26542, - "nawab": 26543, - "##suka": 26544, - "ashford": 26545, - "gulp": 26546, - "##deck": 26547, - "##vill": 26548, - "##rug": 26549, - "brentford": 26550, - "frazier": 26551, - "pleasures": 26552, - "dunne": 26553, - "potsdam": 26554, - "shenzhen": 26555, - "dentistry": 26556, - "##tec": 26557, - "flanagan": 26558, - "##dorff": 26559, - "##hear": 26560, - "chorale": 26561, - "dinah": 26562, - "prem": 26563, - "quezon": 26564, - "##rogated": 26565, - "relinquished": 26566, - "sutra": 26567, - "terri": 26568, - "##pani": 26569, - "flaps": 26570, - "##rissa": 26571, - "poly": 26572, - "##rnet": 26573, - "homme": 26574, - "aback": 26575, - "##eki": 26576, - "linger": 26577, - "womb": 26578, - "##kson": 26579, - "##lewood": 26580, - "doorstep": 26581, - "orthodoxy": 26582, - "threaded": 26583, - "westfield": 26584, - "##rval": 26585, - "dioceses": 26586, - "fridays": 26587, - "subsided": 26588, - "##gata": 26589, - "loyalists": 26590, - "##biotic": 26591, - "##ettes": 26592, - "letterman": 26593, - "lunatic": 26594, - "prelate": 26595, - "tenderly": 26596, - "invariably": 26597, - "souza": 26598, - "thug": 26599, - "winslow": 26600, - "##otide": 26601, - "furlongs": 26602, - "gogh": 26603, - "jeopardy": 26604, - "##runa": 26605, - "pegasus": 26606, - "##umble": 26607, - "humiliated": 26608, - "standalone": 26609, - "tagged": 26610, - "##roller": 26611, - "freshmen": 26612, - "klan": 26613, - "##bright": 26614, - "attaining": 26615, - "initiating": 26616, - "transatlantic": 26617, - "logged": 26618, - "viz": 26619, - "##uance": 26620, - "1723": 26621, - "combatants": 26622, - "intervening": 26623, - "stephane": 26624, - "chieftain": 26625, - "despised": 26626, - "grazed": 26627, - "317": 26628, - "cdc": 26629, - "galveston": 26630, - "godzilla": 26631, - "macro": 26632, - "simulate": 26633, - "##planes": 26634, - "parades": 26635, - "##esses": 26636, - "960": 26637, - "##ductive": 26638, - "##unes": 26639, - "equator": 26640, - "overdose": 26641, - "##cans": 26642, - "##hosh": 26643, - "##lifting": 26644, - "joshi": 26645, - "epstein": 26646, - "sonora": 26647, - "treacherous": 26648, - "aquatics": 26649, - "manchu": 26650, - "responsive": 26651, - "##sation": 26652, - "supervisory": 26653, - "##christ": 26654, - "##llins": 26655, - "##ibar": 26656, - "##balance": 26657, - "##uso": 26658, - "kimball": 26659, - "karlsruhe": 26660, - "mab": 26661, - "##emy": 26662, - "ignores": 26663, - "phonetic": 26664, - "reuters": 26665, - "spaghetti": 26666, - "820": 26667, - "almighty": 26668, - "danzig": 26669, - "rumbling": 26670, - "tombstone": 26671, - "designations": 26672, - "lured": 26673, - "outset": 26674, - "##felt": 26675, - "supermarkets": 26676, - "##wt": 26677, - "grupo": 26678, - "kei": 26679, - "kraft": 26680, - "susanna": 26681, - "##blood": 26682, - "comprehension": 26683, - "genealogy": 26684, - "##aghan": 26685, - "##verted": 26686, - "redding": 26687, - "##ythe": 26688, - "1722": 26689, - "bowing": 26690, - "##pore": 26691, - "##roi": 26692, - "lest": 26693, - "sharpened": 26694, - "fulbright": 26695, - "valkyrie": 26696, - "sikhs": 26697, - "##unds": 26698, - "swans": 26699, - "bouquet": 26700, - "merritt": 26701, - "##tage": 26702, - "##venting": 26703, - "commuted": 26704, - "redhead": 26705, - "clerks": 26706, - "leasing": 26707, - "cesare": 26708, - "dea": 26709, - "hazy": 26710, - "##vances": 26711, - "fledged": 26712, - "greenfield": 26713, - "servicemen": 26714, - "##gical": 26715, - "armando": 26716, - "blackout": 26717, - "dt": 26718, - "sagged": 26719, - "downloadable": 26720, - "intra": 26721, - "potion": 26722, - "pods": 26723, - "##4th": 26724, - "##mism": 26725, - "xp": 26726, - "attendants": 26727, - "gambia": 26728, - "stale": 26729, - "##ntine": 26730, - "plump": 26731, - "asteroids": 26732, - "rediscovered": 26733, - "buds": 26734, - "flea": 26735, - "hive": 26736, - "##neas": 26737, - "1737": 26738, - "classifications": 26739, - "debuts": 26740, - "##eles": 26741, - "olympus": 26742, - "scala": 26743, - "##eurs": 26744, - "##gno": 26745, - "##mute": 26746, - "hummed": 26747, - "sigismund": 26748, - "visuals": 26749, - "wiggled": 26750, - "await": 26751, - "pilasters": 26752, - "clench": 26753, - "sulfate": 26754, - "##ances": 26755, - "bellevue": 26756, - "enigma": 26757, - "trainee": 26758, - "snort": 26759, - "##sw": 26760, - "clouded": 26761, - "denim": 26762, - "##rank": 26763, - "##rder": 26764, - "churning": 26765, - "hartman": 26766, - "lodges": 26767, - "riches": 26768, - "sima": 26769, - "##missible": 26770, - "accountable": 26771, - "socrates": 26772, - "regulates": 26773, - "mueller": 26774, - "##cr": 26775, - "1702": 26776, - "avoids": 26777, - "solids": 26778, - "himalayas": 26779, - "nutrient": 26780, - "pup": 26781, - "##jevic": 26782, - "squat": 26783, - "fades": 26784, - "nec": 26785, - "##lates": 26786, - "##pina": 26787, - "##rona": 26788, - "##ου": 26789, - "privateer": 26790, - "tequila": 26791, - "##gative": 26792, - "##mpton": 26793, - "apt": 26794, - "hornet": 26795, - "immortals": 26796, - "##dou": 26797, - "asturias": 26798, - "cleansing": 26799, - "dario": 26800, - "##rries": 26801, - "##anta": 26802, - "etymology": 26803, - "servicing": 26804, - "zhejiang": 26805, - "##venor": 26806, - "##nx": 26807, - "horned": 26808, - "erasmus": 26809, - "rayon": 26810, - "relocating": 26811, - "£10": 26812, - "##bags": 26813, - "escalated": 26814, - "promenade": 26815, - "stubble": 26816, - "2010s": 26817, - "artisans": 26818, - "axial": 26819, - "liquids": 26820, - "mora": 26821, - "sho": 26822, - "yoo": 26823, - "##tsky": 26824, - "bundles": 26825, - "oldies": 26826, - "##nally": 26827, - "notification": 26828, - "bastion": 26829, - "##ths": 26830, - "sparkle": 26831, - "##lved": 26832, - "1728": 26833, - "leash": 26834, - "pathogen": 26835, - "highs": 26836, - "##hmi": 26837, - "immature": 26838, - "880": 26839, - "gonzaga": 26840, - "ignatius": 26841, - "mansions": 26842, - "monterrey": 26843, - "sweets": 26844, - "bryson": 26845, - "##loe": 26846, - "polled": 26847, - "regatta": 26848, - "brightest": 26849, - "pei": 26850, - "rosy": 26851, - "squid": 26852, - "hatfield": 26853, - "payroll": 26854, - "addict": 26855, - "meath": 26856, - "cornerback": 26857, - "heaviest": 26858, - "lodging": 26859, - "##mage": 26860, - "capcom": 26861, - "rippled": 26862, - "##sily": 26863, - "barnet": 26864, - "mayhem": 26865, - "ymca": 26866, - "snuggled": 26867, - "rousseau": 26868, - "##cute": 26869, - "blanchard": 26870, - "284": 26871, - "fragmented": 26872, - "leighton": 26873, - "chromosomes": 26874, - "risking": 26875, - "##md": 26876, - "##strel": 26877, - "##utter": 26878, - "corinne": 26879, - "coyotes": 26880, - "cynical": 26881, - "hiroshi": 26882, - "yeomanry": 26883, - "##ractive": 26884, - "ebook": 26885, - "grading": 26886, - "mandela": 26887, - "plume": 26888, - "agustin": 26889, - "magdalene": 26890, - "##rkin": 26891, - "bea": 26892, - "femme": 26893, - "trafford": 26894, - "##coll": 26895, - "##lun": 26896, - "##tance": 26897, - "52nd": 26898, - "fourier": 26899, - "upton": 26900, - "##mental": 26901, - "camilla": 26902, - "gust": 26903, - "iihf": 26904, - "islamabad": 26905, - "longevity": 26906, - "##kala": 26907, - "feldman": 26908, - "netting": 26909, - "##rization": 26910, - "endeavour": 26911, - "foraging": 26912, - "mfa": 26913, - "orr": 26914, - "##open": 26915, - "greyish": 26916, - "contradiction": 26917, - "graz": 26918, - "##ruff": 26919, - "handicapped": 26920, - "marlene": 26921, - "tweed": 26922, - "oaxaca": 26923, - "spp": 26924, - "campos": 26925, - "miocene": 26926, - "pri": 26927, - "configured": 26928, - "cooks": 26929, - "pluto": 26930, - "cozy": 26931, - "pornographic": 26932, - "##entes": 26933, - "70th": 26934, - "fairness": 26935, - "glided": 26936, - "jonny": 26937, - "lynne": 26938, - "rounding": 26939, - "sired": 26940, - "##emon": 26941, - "##nist": 26942, - "remade": 26943, - "uncover": 26944, - "##mack": 26945, - "complied": 26946, - "lei": 26947, - "newsweek": 26948, - "##jured": 26949, - "##parts": 26950, - "##enting": 26951, - "##pg": 26952, - "293": 26953, - "finer": 26954, - "guerrillas": 26955, - "athenian": 26956, - "deng": 26957, - "disused": 26958, - "stepmother": 26959, - "accuse": 26960, - "gingerly": 26961, - "seduction": 26962, - "521": 26963, - "confronting": 26964, - "##walker": 26965, - "##going": 26966, - "gora": 26967, - "nostalgia": 26968, - "sabres": 26969, - "virginity": 26970, - "wrenched": 26971, - "##minated": 26972, - "syndication": 26973, - "wielding": 26974, - "eyre": 26975, - "##56": 26976, - "##gnon": 26977, - "##igny": 26978, - "behaved": 26979, - "taxpayer": 26980, - "sweeps": 26981, - "##growth": 26982, - "childless": 26983, - "gallant": 26984, - "##ywood": 26985, - "amplified": 26986, - "geraldine": 26987, - "scrape": 26988, - "##ffi": 26989, - "babylonian": 26990, - "fresco": 26991, - "##rdan": 26992, - "##kney": 26993, - "##position": 26994, - "1718": 26995, - "restricting": 26996, - "tack": 26997, - "fukuoka": 26998, - "osborn": 26999, - "selector": 27000, - "partnering": 27001, - "##dlow": 27002, - "318": 27003, - "gnu": 27004, - "kia": 27005, - "tak": 27006, - "whitley": 27007, - "gables": 27008, - "##54": 27009, - "##mania": 27010, - "mri": 27011, - "softness": 27012, - "immersion": 27013, - "##bots": 27014, - "##evsky": 27015, - "1713": 27016, - "chilling": 27017, - "insignificant": 27018, - "pcs": 27019, - "##uis": 27020, - "elites": 27021, - "lina": 27022, - "purported": 27023, - "supplemental": 27024, - "teaming": 27025, - "##americana": 27026, - "##dding": 27027, - "##inton": 27028, - "proficient": 27029, - "rouen": 27030, - "##nage": 27031, - "##rret": 27032, - "niccolo": 27033, - "selects": 27034, - "##bread": 27035, - "fluffy": 27036, - "1621": 27037, - "gruff": 27038, - "knotted": 27039, - "mukherjee": 27040, - "polgara": 27041, - "thrash": 27042, - "nicholls": 27043, - "secluded": 27044, - "smoothing": 27045, - "thru": 27046, - "corsica": 27047, - "loaf": 27048, - "whitaker": 27049, - "inquiries": 27050, - "##rrier": 27051, - "##kam": 27052, - "indochina": 27053, - "289": 27054, - "marlins": 27055, - "myles": 27056, - "peking": 27057, - "##tea": 27058, - "extracts": 27059, - "pastry": 27060, - "superhuman": 27061, - "connacht": 27062, - "vogel": 27063, - "##ditional": 27064, - "##het": 27065, - "##udged": 27066, - "##lash": 27067, - "gloss": 27068, - "quarries": 27069, - "refit": 27070, - "teaser": 27071, - "##alic": 27072, - "##gaon": 27073, - "20s": 27074, - "materialized": 27075, - "sling": 27076, - "camped": 27077, - "pickering": 27078, - "tung": 27079, - "tracker": 27080, - "pursuant": 27081, - "##cide": 27082, - "cranes": 27083, - "soc": 27084, - "##cini": 27085, - "##typical": 27086, - "##viere": 27087, - "anhalt": 27088, - "overboard": 27089, - "workout": 27090, - "chores": 27091, - "fares": 27092, - "orphaned": 27093, - "stains": 27094, - "##logie": 27095, - "fenton": 27096, - "surpassing": 27097, - "joyah": 27098, - "triggers": 27099, - "##itte": 27100, - "grandmaster": 27101, - "##lass": 27102, - "##lists": 27103, - "clapping": 27104, - "fraudulent": 27105, - "ledger": 27106, - "nagasaki": 27107, - "##cor": 27108, - "##nosis": 27109, - "##tsa": 27110, - "eucalyptus": 27111, - "tun": 27112, - "##icio": 27113, - "##rney": 27114, - "##tara": 27115, - "dax": 27116, - "heroism": 27117, - "ina": 27118, - "wrexham": 27119, - "onboard": 27120, - "unsigned": 27121, - "##dates": 27122, - "moshe": 27123, - "galley": 27124, - "winnie": 27125, - "droplets": 27126, - "exiles": 27127, - "praises": 27128, - "watered": 27129, - "noodles": 27130, - "##aia": 27131, - "fein": 27132, - "adi": 27133, - "leland": 27134, - "multicultural": 27135, - "stink": 27136, - "bingo": 27137, - "comets": 27138, - "erskine": 27139, - "modernized": 27140, - "canned": 27141, - "constraint": 27142, - "domestically": 27143, - "chemotherapy": 27144, - "featherweight": 27145, - "stifled": 27146, - "##mum": 27147, - "darkly": 27148, - "irresistible": 27149, - "refreshing": 27150, - "hasty": 27151, - "isolate": 27152, - "##oys": 27153, - "kitchener": 27154, - "planners": 27155, - "##wehr": 27156, - "cages": 27157, - "yarn": 27158, - "implant": 27159, - "toulon": 27160, - "elects": 27161, - "childbirth": 27162, - "yue": 27163, - "##lind": 27164, - "##lone": 27165, - "cn": 27166, - "rightful": 27167, - "sportsman": 27168, - "junctions": 27169, - "remodeled": 27170, - "specifies": 27171, - "##rgh": 27172, - "291": 27173, - "##oons": 27174, - "complimented": 27175, - "##urgent": 27176, - "lister": 27177, - "ot": 27178, - "##logic": 27179, - "bequeathed": 27180, - "cheekbones": 27181, - "fontana": 27182, - "gabby": 27183, - "##dial": 27184, - "amadeus": 27185, - "corrugated": 27186, - "maverick": 27187, - "resented": 27188, - "triangles": 27189, - "##hered": 27190, - "##usly": 27191, - "nazareth": 27192, - "tyrol": 27193, - "1675": 27194, - "assent": 27195, - "poorer": 27196, - "sectional": 27197, - "aegean": 27198, - "##cous": 27199, - "296": 27200, - "nylon": 27201, - "ghanaian": 27202, - "##egorical": 27203, - "##weig": 27204, - "cushions": 27205, - "forbid": 27206, - "fusiliers": 27207, - "obstruction": 27208, - "somerville": 27209, - "##scia": 27210, - "dime": 27211, - "earrings": 27212, - "elliptical": 27213, - "leyte": 27214, - "oder": 27215, - "polymers": 27216, - "timmy": 27217, - "atm": 27218, - "midtown": 27219, - "piloted": 27220, - "settles": 27221, - "continual": 27222, - "externally": 27223, - "mayfield": 27224, - "##uh": 27225, - "enrichment": 27226, - "henson": 27227, - "keane": 27228, - "persians": 27229, - "1733": 27230, - "benji": 27231, - "braden": 27232, - "pep": 27233, - "324": 27234, - "##efe": 27235, - "contenders": 27236, - "pepsi": 27237, - "valet": 27238, - "##isches": 27239, - "298": 27240, - "##asse": 27241, - "##earing": 27242, - "goofy": 27243, - "stroll": 27244, - "##amen": 27245, - "authoritarian": 27246, - "occurrences": 27247, - "adversary": 27248, - "ahmedabad": 27249, - "tangent": 27250, - "toppled": 27251, - "dorchester": 27252, - "1672": 27253, - "modernism": 27254, - "marxism": 27255, - "islamist": 27256, - "charlemagne": 27257, - "exponential": 27258, - "racks": 27259, - "unicode": 27260, - "brunette": 27261, - "mbc": 27262, - "pic": 27263, - "skirmish": 27264, - "##bund": 27265, - "##lad": 27266, - "##powered": 27267, - "##yst": 27268, - "hoisted": 27269, - "messina": 27270, - "shatter": 27271, - "##ctum": 27272, - "jedi": 27273, - "vantage": 27274, - "##music": 27275, - "##neil": 27276, - "clemens": 27277, - "mahmoud": 27278, - "corrupted": 27279, - "authentication": 27280, - "lowry": 27281, - "nils": 27282, - "##washed": 27283, - "omnibus": 27284, - "wounding": 27285, - "jillian": 27286, - "##itors": 27287, - "##opped": 27288, - "serialized": 27289, - "narcotics": 27290, - "handheld": 27291, - "##arm": 27292, - "##plicity": 27293, - "intersecting": 27294, - "stimulating": 27295, - "##onis": 27296, - "crate": 27297, - "fellowships": 27298, - "hemingway": 27299, - "casinos": 27300, - "climatic": 27301, - "fordham": 27302, - "copeland": 27303, - "drip": 27304, - "beatty": 27305, - "leaflets": 27306, - "robber": 27307, - "brothel": 27308, - "madeira": 27309, - "##hedral": 27310, - "sphinx": 27311, - "ultrasound": 27312, - "##vana": 27313, - "valor": 27314, - "forbade": 27315, - "leonid": 27316, - "villas": 27317, - "##aldo": 27318, - "duane": 27319, - "marquez": 27320, - "##cytes": 27321, - "disadvantaged": 27322, - "forearms": 27323, - "kawasaki": 27324, - "reacts": 27325, - "consular": 27326, - "lax": 27327, - "uncles": 27328, - "uphold": 27329, - "##hopper": 27330, - "concepcion": 27331, - "dorsey": 27332, - "lass": 27333, - "##izan": 27334, - "arching": 27335, - "passageway": 27336, - "1708": 27337, - "researches": 27338, - "tia": 27339, - "internationals": 27340, - "##graphs": 27341, - "##opers": 27342, - "distinguishes": 27343, - "javanese": 27344, - "divert": 27345, - "##uven": 27346, - "plotted": 27347, - "##listic": 27348, - "##rwin": 27349, - "##erik": 27350, - "##tify": 27351, - "affirmative": 27352, - "signifies": 27353, - "validation": 27354, - "##bson": 27355, - "kari": 27356, - "felicity": 27357, - "georgina": 27358, - "zulu": 27359, - "##eros": 27360, - "##rained": 27361, - "##rath": 27362, - "overcoming": 27363, - "##dot": 27364, - "argyll": 27365, - "##rbin": 27366, - "1734": 27367, - "chiba": 27368, - "ratification": 27369, - "windy": 27370, - "earls": 27371, - "parapet": 27372, - "##marks": 27373, - "hunan": 27374, - "pristine": 27375, - "astrid": 27376, - "punta": 27377, - "##gart": 27378, - "brodie": 27379, - "##kota": 27380, - "##oder": 27381, - "malaga": 27382, - "minerva": 27383, - "rouse": 27384, - "##phonic": 27385, - "bellowed": 27386, - "pagoda": 27387, - "portals": 27388, - "reclamation": 27389, - "##gur": 27390, - "##odies": 27391, - "##⁄₄": 27392, - "parentheses": 27393, - "quoting": 27394, - "allergic": 27395, - "palette": 27396, - "showcases": 27397, - "benefactor": 27398, - "heartland": 27399, - "nonlinear": 27400, - "##tness": 27401, - "bladed": 27402, - "cheerfully": 27403, - "scans": 27404, - "##ety": 27405, - "##hone": 27406, - "1666": 27407, - "girlfriends": 27408, - "pedersen": 27409, - "hiram": 27410, - "sous": 27411, - "##liche": 27412, - "##nator": 27413, - "1683": 27414, - "##nery": 27415, - "##orio": 27416, - "##umen": 27417, - "bobo": 27418, - "primaries": 27419, - "smiley": 27420, - "##cb": 27421, - "unearthed": 27422, - "uniformly": 27423, - "fis": 27424, - "metadata": 27425, - "1635": 27426, - "ind": 27427, - "##oted": 27428, - "recoil": 27429, - "##titles": 27430, - "##tura": 27431, - "##ια": 27432, - "406": 27433, - "hilbert": 27434, - "jamestown": 27435, - "mcmillan": 27436, - "tulane": 27437, - "seychelles": 27438, - "##frid": 27439, - "antics": 27440, - "coli": 27441, - "fated": 27442, - "stucco": 27443, - "##grants": 27444, - "1654": 27445, - "bulky": 27446, - "accolades": 27447, - "arrays": 27448, - "caledonian": 27449, - "carnage": 27450, - "optimism": 27451, - "puebla": 27452, - "##tative": 27453, - "##cave": 27454, - "enforcing": 27455, - "rotherham": 27456, - "seo": 27457, - "dunlop": 27458, - "aeronautics": 27459, - "chimed": 27460, - "incline": 27461, - "zoning": 27462, - "archduke": 27463, - "hellenistic": 27464, - "##oses": 27465, - "##sions": 27466, - "candi": 27467, - "thong": 27468, - "##ople": 27469, - "magnate": 27470, - "rustic": 27471, - "##rsk": 27472, - "projective": 27473, - "slant": 27474, - "##offs": 27475, - "danes": 27476, - "hollis": 27477, - "vocalists": 27478, - "##ammed": 27479, - "congenital": 27480, - "contend": 27481, - "gesellschaft": 27482, - "##ocating": 27483, - "##pressive": 27484, - "douglass": 27485, - "quieter": 27486, - "##cm": 27487, - "##kshi": 27488, - "howled": 27489, - "salim": 27490, - "spontaneously": 27491, - "townsville": 27492, - "buena": 27493, - "southport": 27494, - "##bold": 27495, - "kato": 27496, - "1638": 27497, - "faerie": 27498, - "stiffly": 27499, - "##vus": 27500, - "##rled": 27501, - "297": 27502, - "flawless": 27503, - "realising": 27504, - "taboo": 27505, - "##7th": 27506, - "bytes": 27507, - "straightening": 27508, - "356": 27509, - "jena": 27510, - "##hid": 27511, - "##rmin": 27512, - "cartwright": 27513, - "berber": 27514, - "bertram": 27515, - "soloists": 27516, - "411": 27517, - "noses": 27518, - "417": 27519, - "coping": 27520, - "fission": 27521, - "hardin": 27522, - "inca": 27523, - "##cen": 27524, - "1717": 27525, - "mobilized": 27526, - "vhf": 27527, - "##raf": 27528, - "biscuits": 27529, - "curate": 27530, - "##85": 27531, - "##anial": 27532, - "331": 27533, - "gaunt": 27534, - "neighbourhoods": 27535, - "1540": 27536, - "##abas": 27537, - "blanca": 27538, - "bypassed": 27539, - "sockets": 27540, - "behold": 27541, - "coincidentally": 27542, - "##bane": 27543, - "nara": 27544, - "shave": 27545, - "splinter": 27546, - "terrific": 27547, - "##arion": 27548, - "##erian": 27549, - "commonplace": 27550, - "juris": 27551, - "redwood": 27552, - "waistband": 27553, - "boxed": 27554, - "caitlin": 27555, - "fingerprints": 27556, - "jennie": 27557, - "naturalized": 27558, - "##ired": 27559, - "balfour": 27560, - "craters": 27561, - "jody": 27562, - "bungalow": 27563, - "hugely": 27564, - "quilt": 27565, - "glitter": 27566, - "pigeons": 27567, - "undertaker": 27568, - "bulging": 27569, - "constrained": 27570, - "goo": 27571, - "##sil": 27572, - "##akh": 27573, - "assimilation": 27574, - "reworked": 27575, - "##person": 27576, - "persuasion": 27577, - "##pants": 27578, - "felicia": 27579, - "##cliff": 27580, - "##ulent": 27581, - "1732": 27582, - "explodes": 27583, - "##dun": 27584, - "##inium": 27585, - "##zic": 27586, - "lyman": 27587, - "vulture": 27588, - "hog": 27589, - "overlook": 27590, - "begs": 27591, - "northwards": 27592, - "ow": 27593, - "spoil": 27594, - "##urer": 27595, - "fatima": 27596, - "favorably": 27597, - "accumulate": 27598, - "sargent": 27599, - "sorority": 27600, - "corresponded": 27601, - "dispersal": 27602, - "kochi": 27603, - "toned": 27604, - "##imi": 27605, - "##lita": 27606, - "internacional": 27607, - "newfound": 27608, - "##agger": 27609, - "##lynn": 27610, - "##rigue": 27611, - "booths": 27612, - "peanuts": 27613, - "##eborg": 27614, - "medicare": 27615, - "muriel": 27616, - "nur": 27617, - "##uram": 27618, - "crates": 27619, - "millennia": 27620, - "pajamas": 27621, - "worsened": 27622, - "##breakers": 27623, - "jimi": 27624, - "vanuatu": 27625, - "yawned": 27626, - "##udeau": 27627, - "carousel": 27628, - "##hony": 27629, - "hurdle": 27630, - "##ccus": 27631, - "##mounted": 27632, - "##pod": 27633, - "rv": 27634, - "##eche": 27635, - "airship": 27636, - "ambiguity": 27637, - "compulsion": 27638, - "recapture": 27639, - "##claiming": 27640, - "arthritis": 27641, - "##osomal": 27642, - "1667": 27643, - "asserting": 27644, - "ngc": 27645, - "sniffing": 27646, - "dade": 27647, - "discontent": 27648, - "glendale": 27649, - "ported": 27650, - "##amina": 27651, - "defamation": 27652, - "rammed": 27653, - "##scent": 27654, - "fling": 27655, - "livingstone": 27656, - "##fleet": 27657, - "875": 27658, - "##ppy": 27659, - "apocalyptic": 27660, - "comrade": 27661, - "lcd": 27662, - "##lowe": 27663, - "cessna": 27664, - "eine": 27665, - "persecuted": 27666, - "subsistence": 27667, - "demi": 27668, - "hoop": 27669, - "reliefs": 27670, - "710": 27671, - "coptic": 27672, - "progressing": 27673, - "stemmed": 27674, - "perpetrators": 27675, - "1665": 27676, - "priestess": 27677, - "##nio": 27678, - "dobson": 27679, - "ebony": 27680, - "rooster": 27681, - "itf": 27682, - "tortricidae": 27683, - "##bbon": 27684, - "##jian": 27685, - "cleanup": 27686, - "##jean": 27687, - "##øy": 27688, - "1721": 27689, - "eighties": 27690, - "taxonomic": 27691, - "holiness": 27692, - "##hearted": 27693, - "##spar": 27694, - "antilles": 27695, - "showcasing": 27696, - "stabilized": 27697, - "##nb": 27698, - "gia": 27699, - "mascara": 27700, - "michelangelo": 27701, - "dawned": 27702, - "##uria": 27703, - "##vinsky": 27704, - "extinguished": 27705, - "fitz": 27706, - "grotesque": 27707, - "£100": 27708, - "##fera": 27709, - "##loid": 27710, - "##mous": 27711, - "barges": 27712, - "neue": 27713, - "throbbed": 27714, - "cipher": 27715, - "johnnie": 27716, - "##a1": 27717, - "##mpt": 27718, - "outburst": 27719, - "##swick": 27720, - "spearheaded": 27721, - "administrations": 27722, - "c1": 27723, - "heartbreak": 27724, - "pixels": 27725, - "pleasantly": 27726, - "##enay": 27727, - "lombardy": 27728, - "plush": 27729, - "##nsed": 27730, - "bobbie": 27731, - "##hly": 27732, - "reapers": 27733, - "tremor": 27734, - "xiang": 27735, - "minogue": 27736, - "substantive": 27737, - "hitch": 27738, - "barak": 27739, - "##wyl": 27740, - "kwan": 27741, - "##encia": 27742, - "910": 27743, - "obscene": 27744, - "elegance": 27745, - "indus": 27746, - "surfer": 27747, - "bribery": 27748, - "conserve": 27749, - "##hyllum": 27750, - "##masters": 27751, - "horatio": 27752, - "##fat": 27753, - "apes": 27754, - "rebound": 27755, - "psychotic": 27756, - "##pour": 27757, - "iteration": 27758, - "##mium": 27759, - "##vani": 27760, - "botanic": 27761, - "horribly": 27762, - "antiques": 27763, - "dispose": 27764, - "paxton": 27765, - "##hli": 27766, - "##wg": 27767, - "timeless": 27768, - "1704": 27769, - "disregard": 27770, - "engraver": 27771, - "hounds": 27772, - "##bau": 27773, - "##version": 27774, - "looted": 27775, - "uno": 27776, - "facilitates": 27777, - "groans": 27778, - "masjid": 27779, - "rutland": 27780, - "antibody": 27781, - "disqualification": 27782, - "decatur": 27783, - "footballers": 27784, - "quake": 27785, - "slacks": 27786, - "48th": 27787, - "rein": 27788, - "scribe": 27789, - "stabilize": 27790, - "commits": 27791, - "exemplary": 27792, - "tho": 27793, - "##hort": 27794, - "##chison": 27795, - "pantry": 27796, - "traversed": 27797, - "##hiti": 27798, - "disrepair": 27799, - "identifiable": 27800, - "vibrated": 27801, - "baccalaureate": 27802, - "##nnis": 27803, - "csa": 27804, - "interviewing": 27805, - "##iensis": 27806, - "##raße": 27807, - "greaves": 27808, - "wealthiest": 27809, - "343": 27810, - "classed": 27811, - "jogged": 27812, - "£5": 27813, - "##58": 27814, - "##atal": 27815, - "illuminating": 27816, - "knicks": 27817, - "respecting": 27818, - "##uno": 27819, - "scrubbed": 27820, - "##iji": 27821, - "##dles": 27822, - "kruger": 27823, - "moods": 27824, - "growls": 27825, - "raider": 27826, - "silvia": 27827, - "chefs": 27828, - "kam": 27829, - "vr": 27830, - "cree": 27831, - "percival": 27832, - "##terol": 27833, - "gunter": 27834, - "counterattack": 27835, - "defiant": 27836, - "henan": 27837, - "ze": 27838, - "##rasia": 27839, - "##riety": 27840, - "equivalence": 27841, - "submissions": 27842, - "##fra": 27843, - "##thor": 27844, - "bautista": 27845, - "mechanically": 27846, - "##heater": 27847, - "cornice": 27848, - "herbal": 27849, - "templar": 27850, - "##mering": 27851, - "outputs": 27852, - "ruining": 27853, - "ligand": 27854, - "renumbered": 27855, - "extravagant": 27856, - "mika": 27857, - "blockbuster": 27858, - "eta": 27859, - "insurrection": 27860, - "##ilia": 27861, - "darkening": 27862, - "ferocious": 27863, - "pianos": 27864, - "strife": 27865, - "kinship": 27866, - "##aer": 27867, - "melee": 27868, - "##anor": 27869, - "##iste": 27870, - "##may": 27871, - "##oue": 27872, - "decidedly": 27873, - "weep": 27874, - "##jad": 27875, - "##missive": 27876, - "##ppel": 27877, - "354": 27878, - "puget": 27879, - "unease": 27880, - "##gnant": 27881, - "1629": 27882, - "hammering": 27883, - "kassel": 27884, - "ob": 27885, - "wessex": 27886, - "##lga": 27887, - "bromwich": 27888, - "egan": 27889, - "paranoia": 27890, - "utilization": 27891, - "##atable": 27892, - "##idad": 27893, - "contradictory": 27894, - "provoke": 27895, - "##ols": 27896, - "##ouring": 27897, - "##tangled": 27898, - "knesset": 27899, - "##very": 27900, - "##lette": 27901, - "plumbing": 27902, - "##sden": 27903, - "##¹": 27904, - "greensboro": 27905, - "occult": 27906, - "sniff": 27907, - "338": 27908, - "zev": 27909, - "beaming": 27910, - "gamer": 27911, - "haggard": 27912, - "mahal": 27913, - "##olt": 27914, - "##pins": 27915, - "mendes": 27916, - "utmost": 27917, - "briefing": 27918, - "gunnery": 27919, - "##gut": 27920, - "##pher": 27921, - "##zh": 27922, - "##rok": 27923, - "1679": 27924, - "khalifa": 27925, - "sonya": 27926, - "##boot": 27927, - "principals": 27928, - "urbana": 27929, - "wiring": 27930, - "##liffe": 27931, - "##minating": 27932, - "##rrado": 27933, - "dahl": 27934, - "nyu": 27935, - "skepticism": 27936, - "np": 27937, - "townspeople": 27938, - "ithaca": 27939, - "lobster": 27940, - "somethin": 27941, - "##fur": 27942, - "##arina": 27943, - "##−1": 27944, - "freighter": 27945, - "zimmerman": 27946, - "biceps": 27947, - "contractual": 27948, - "##herton": 27949, - "amend": 27950, - "hurrying": 27951, - "subconscious": 27952, - "##anal": 27953, - "336": 27954, - "meng": 27955, - "clermont": 27956, - "spawning": 27957, - "##eia": 27958, - "##lub": 27959, - "dignitaries": 27960, - "impetus": 27961, - "snacks": 27962, - "spotting": 27963, - "twigs": 27964, - "##bilis": 27965, - "##cz": 27966, - "##ouk": 27967, - "libertadores": 27968, - "nic": 27969, - "skylar": 27970, - "##aina": 27971, - "##firm": 27972, - "gustave": 27973, - "asean": 27974, - "##anum": 27975, - "dieter": 27976, - "legislatures": 27977, - "flirt": 27978, - "bromley": 27979, - "trolls": 27980, - "umar": 27981, - "##bbies": 27982, - "##tyle": 27983, - "blah": 27984, - "parc": 27985, - "bridgeport": 27986, - "crank": 27987, - "negligence": 27988, - "##nction": 27989, - "46th": 27990, - "constantin": 27991, - "molded": 27992, - "bandages": 27993, - "seriousness": 27994, - "00pm": 27995, - "siegel": 27996, - "carpets": 27997, - "compartments": 27998, - "upbeat": 27999, - "statehood": 28000, - "##dner": 28001, - "##edging": 28002, - "marko": 28003, - "730": 28004, - "platt": 28005, - "##hane": 28006, - "paving": 28007, - "##iy": 28008, - "1738": 28009, - "abbess": 28010, - "impatience": 28011, - "limousine": 28012, - "nbl": 28013, - "##talk": 28014, - "441": 28015, - "lucille": 28016, - "mojo": 28017, - "nightfall": 28018, - "robbers": 28019, - "##nais": 28020, - "karel": 28021, - "brisk": 28022, - "calves": 28023, - "replicate": 28024, - "ascribed": 28025, - "telescopes": 28026, - "##olf": 28027, - "intimidated": 28028, - "##reen": 28029, - "ballast": 28030, - "specialization": 28031, - "##sit": 28032, - "aerodynamic": 28033, - "caliphate": 28034, - "rainer": 28035, - "visionary": 28036, - "##arded": 28037, - "epsilon": 28038, - "##aday": 28039, - "##onte": 28040, - "aggregation": 28041, - "auditory": 28042, - "boosted": 28043, - "reunification": 28044, - "kathmandu": 28045, - "loco": 28046, - "robyn": 28047, - "402": 28048, - "acknowledges": 28049, - "appointing": 28050, - "humanoid": 28051, - "newell": 28052, - "redeveloped": 28053, - "restraints": 28054, - "##tained": 28055, - "barbarians": 28056, - "chopper": 28057, - "1609": 28058, - "italiana": 28059, - "##lez": 28060, - "##lho": 28061, - "investigates": 28062, - "wrestlemania": 28063, - "##anies": 28064, - "##bib": 28065, - "690": 28066, - "##falls": 28067, - "creaked": 28068, - "dragoons": 28069, - "gravely": 28070, - "minions": 28071, - "stupidity": 28072, - "volley": 28073, - "##harat": 28074, - "##week": 28075, - "musik": 28076, - "##eries": 28077, - "##uously": 28078, - "fungal": 28079, - "massimo": 28080, - "semantics": 28081, - "malvern": 28082, - "##ahl": 28083, - "##pee": 28084, - "discourage": 28085, - "embryo": 28086, - "imperialism": 28087, - "1910s": 28088, - "profoundly": 28089, - "##ddled": 28090, - "jiangsu": 28091, - "sparkled": 28092, - "stat": 28093, - "##holz": 28094, - "sweatshirt": 28095, - "tobin": 28096, - "##iction": 28097, - "sneered": 28098, - "##cheon": 28099, - "##oit": 28100, - "brit": 28101, - "causal": 28102, - "smyth": 28103, - "##neuve": 28104, - "diffuse": 28105, - "perrin": 28106, - "silvio": 28107, - "##ipes": 28108, - "##recht": 28109, - "detonated": 28110, - "iqbal": 28111, - "selma": 28112, - "##nism": 28113, - "##zumi": 28114, - "roasted": 28115, - "##riders": 28116, - "tay": 28117, - "##ados": 28118, - "##mament": 28119, - "##mut": 28120, - "##rud": 28121, - "840": 28122, - "completes": 28123, - "nipples": 28124, - "cfa": 28125, - "flavour": 28126, - "hirsch": 28127, - "##laus": 28128, - "calderon": 28129, - "sneakers": 28130, - "moravian": 28131, - "##ksha": 28132, - "1622": 28133, - "rq": 28134, - "294": 28135, - "##imeters": 28136, - "bodo": 28137, - "##isance": 28138, - "##pre": 28139, - "##ronia": 28140, - "anatomical": 28141, - "excerpt": 28142, - "##lke": 28143, - "dh": 28144, - "kunst": 28145, - "##tablished": 28146, - "##scoe": 28147, - "biomass": 28148, - "panted": 28149, - "unharmed": 28150, - "gael": 28151, - "housemates": 28152, - "montpellier": 28153, - "##59": 28154, - "coa": 28155, - "rodents": 28156, - "tonic": 28157, - "hickory": 28158, - "singleton": 28159, - "##taro": 28160, - "451": 28161, - "1719": 28162, - "aldo": 28163, - "breaststroke": 28164, - "dempsey": 28165, - "och": 28166, - "rocco": 28167, - "##cuit": 28168, - "merton": 28169, - "dissemination": 28170, - "midsummer": 28171, - "serials": 28172, - "##idi": 28173, - "haji": 28174, - "polynomials": 28175, - "##rdon": 28176, - "gs": 28177, - "enoch": 28178, - "prematurely": 28179, - "shutter": 28180, - "taunton": 28181, - "£3": 28182, - "##grating": 28183, - "##inates": 28184, - "archangel": 28185, - "harassed": 28186, - "##asco": 28187, - "326": 28188, - "archway": 28189, - "dazzling": 28190, - "##ecin": 28191, - "1736": 28192, - "sumo": 28193, - "wat": 28194, - "##kovich": 28195, - "1086": 28196, - "honneur": 28197, - "##ently": 28198, - "##nostic": 28199, - "##ttal": 28200, - "##idon": 28201, - "1605": 28202, - "403": 28203, - "1716": 28204, - "blogger": 28205, - "rents": 28206, - "##gnan": 28207, - "hires": 28208, - "##ikh": 28209, - "##dant": 28210, - "howie": 28211, - "##rons": 28212, - "handler": 28213, - "retracted": 28214, - "shocks": 28215, - "1632": 28216, - "arun": 28217, - "duluth": 28218, - "kepler": 28219, - "trumpeter": 28220, - "##lary": 28221, - "peeking": 28222, - "seasoned": 28223, - "trooper": 28224, - "##mara": 28225, - "laszlo": 28226, - "##iciencies": 28227, - "##rti": 28228, - "heterosexual": 28229, - "##inatory": 28230, - "##ssion": 28231, - "indira": 28232, - "jogging": 28233, - "##inga": 28234, - "##lism": 28235, - "beit": 28236, - "dissatisfaction": 28237, - "malice": 28238, - "##ately": 28239, - "nedra": 28240, - "peeling": 28241, - "##rgeon": 28242, - "47th": 28243, - "stadiums": 28244, - "475": 28245, - "vertigo": 28246, - "##ains": 28247, - "iced": 28248, - "restroom": 28249, - "##plify": 28250, - "##tub": 28251, - "illustrating": 28252, - "pear": 28253, - "##chner": 28254, - "##sibility": 28255, - "inorganic": 28256, - "rappers": 28257, - "receipts": 28258, - "watery": 28259, - "##kura": 28260, - "lucinda": 28261, - "##oulos": 28262, - "reintroduced": 28263, - "##8th": 28264, - "##tched": 28265, - "gracefully": 28266, - "saxons": 28267, - "nutritional": 28268, - "wastewater": 28269, - "rained": 28270, - "favourites": 28271, - "bedrock": 28272, - "fisted": 28273, - "hallways": 28274, - "likeness": 28275, - "upscale": 28276, - "##lateral": 28277, - "1580": 28278, - "blinds": 28279, - "prequel": 28280, - "##pps": 28281, - "##tama": 28282, - "deter": 28283, - "humiliating": 28284, - "restraining": 28285, - "tn": 28286, - "vents": 28287, - "1659": 28288, - "laundering": 28289, - "recess": 28290, - "rosary": 28291, - "tractors": 28292, - "coulter": 28293, - "federer": 28294, - "##ifiers": 28295, - "##plin": 28296, - "persistence": 28297, - "##quitable": 28298, - "geschichte": 28299, - "pendulum": 28300, - "quakers": 28301, - "##beam": 28302, - "bassett": 28303, - "pictorial": 28304, - "buffet": 28305, - "koln": 28306, - "##sitor": 28307, - "drills": 28308, - "reciprocal": 28309, - "shooters": 28310, - "##57": 28311, - "##cton": 28312, - "##tees": 28313, - "converge": 28314, - "pip": 28315, - "dmitri": 28316, - "donnelly": 28317, - "yamamoto": 28318, - "aqua": 28319, - "azores": 28320, - "demographics": 28321, - "hypnotic": 28322, - "spitfire": 28323, - "suspend": 28324, - "wryly": 28325, - "roderick": 28326, - "##rran": 28327, - "sebastien": 28328, - "##asurable": 28329, - "mavericks": 28330, - "##fles": 28331, - "##200": 28332, - "himalayan": 28333, - "prodigy": 28334, - "##iance": 28335, - "transvaal": 28336, - "demonstrators": 28337, - "handcuffs": 28338, - "dodged": 28339, - "mcnamara": 28340, - "sublime": 28341, - "1726": 28342, - "crazed": 28343, - "##efined": 28344, - "##till": 28345, - "ivo": 28346, - "pondered": 28347, - "reconciled": 28348, - "shrill": 28349, - "sava": 28350, - "##duk": 28351, - "bal": 28352, - "cad": 28353, - "heresy": 28354, - "jaipur": 28355, - "goran": 28356, - "##nished": 28357, - "341": 28358, - "lux": 28359, - "shelly": 28360, - "whitehall": 28361, - "##hre": 28362, - "israelis": 28363, - "peacekeeping": 28364, - "##wled": 28365, - "1703": 28366, - "demetrius": 28367, - "ousted": 28368, - "##arians": 28369, - "##zos": 28370, - "beale": 28371, - "anwar": 28372, - "backstroke": 28373, - "raged": 28374, - "shrinking": 28375, - "cremated": 28376, - "##yck": 28377, - "benign": 28378, - "towing": 28379, - "wadi": 28380, - "darmstadt": 28381, - "landfill": 28382, - "parana": 28383, - "soothe": 28384, - "colleen": 28385, - "sidewalks": 28386, - "mayfair": 28387, - "tumble": 28388, - "hepatitis": 28389, - "ferrer": 28390, - "superstructure": 28391, - "##gingly": 28392, - "##urse": 28393, - "##wee": 28394, - "anthropological": 28395, - "translators": 28396, - "##mies": 28397, - "closeness": 28398, - "hooves": 28399, - "##pw": 28400, - "mondays": 28401, - "##roll": 28402, - "##vita": 28403, - "landscaping": 28404, - "##urized": 28405, - "purification": 28406, - "sock": 28407, - "thorns": 28408, - "thwarted": 28409, - "jalan": 28410, - "tiberius": 28411, - "##taka": 28412, - "saline": 28413, - "##rito": 28414, - "confidently": 28415, - "khyber": 28416, - "sculptors": 28417, - "##ij": 28418, - "brahms": 28419, - "hammersmith": 28420, - "inspectors": 28421, - "battista": 28422, - "fivb": 28423, - "fragmentation": 28424, - "hackney": 28425, - "##uls": 28426, - "arresting": 28427, - "exercising": 28428, - "antoinette": 28429, - "bedfordshire": 28430, - "##zily": 28431, - "dyed": 28432, - "##hema": 28433, - "1656": 28434, - "racetrack": 28435, - "variability": 28436, - "##tique": 28437, - "1655": 28438, - "austrians": 28439, - "deteriorating": 28440, - "madman": 28441, - "theorists": 28442, - "aix": 28443, - "lehman": 28444, - "weathered": 28445, - "1731": 28446, - "decreed": 28447, - "eruptions": 28448, - "1729": 28449, - "flaw": 28450, - "quinlan": 28451, - "sorbonne": 28452, - "flutes": 28453, - "nunez": 28454, - "1711": 28455, - "adored": 28456, - "downwards": 28457, - "fable": 28458, - "rasped": 28459, - "1712": 28460, - "moritz": 28461, - "mouthful": 28462, - "renegade": 28463, - "shivers": 28464, - "stunts": 28465, - "dysfunction": 28466, - "restrain": 28467, - "translit": 28468, - "327": 28469, - "pancakes": 28470, - "##avio": 28471, - "##cision": 28472, - "##tray": 28473, - "351": 28474, - "vial": 28475, - "##lden": 28476, - "bain": 28477, - "##maid": 28478, - "##oxide": 28479, - "chihuahua": 28480, - "malacca": 28481, - "vimes": 28482, - "##rba": 28483, - "##rnier": 28484, - "1664": 28485, - "donnie": 28486, - "plaques": 28487, - "##ually": 28488, - "337": 28489, - "bangs": 28490, - "floppy": 28491, - "huntsville": 28492, - "loretta": 28493, - "nikolay": 28494, - "##otte": 28495, - "eater": 28496, - "handgun": 28497, - "ubiquitous": 28498, - "##hett": 28499, - "eras": 28500, - "zodiac": 28501, - "1634": 28502, - "##omorphic": 28503, - "1820s": 28504, - "##zog": 28505, - "cochran": 28506, - "##bula": 28507, - "##lithic": 28508, - "warring": 28509, - "##rada": 28510, - "dalai": 28511, - "excused": 28512, - "blazers": 28513, - "mcconnell": 28514, - "reeling": 28515, - "bot": 28516, - "este": 28517, - "##abi": 28518, - "geese": 28519, - "hoax": 28520, - "taxon": 28521, - "##bla": 28522, - "guitarists": 28523, - "##icon": 28524, - "condemning": 28525, - "hunts": 28526, - "inversion": 28527, - "moffat": 28528, - "taekwondo": 28529, - "##lvis": 28530, - "1624": 28531, - "stammered": 28532, - "##rest": 28533, - "##rzy": 28534, - "sousa": 28535, - "fundraiser": 28536, - "marylebone": 28537, - "navigable": 28538, - "uptown": 28539, - "cabbage": 28540, - "daniela": 28541, - "salman": 28542, - "shitty": 28543, - "whimper": 28544, - "##kian": 28545, - "##utive": 28546, - "programmers": 28547, - "protections": 28548, - "rm": 28549, - "##rmi": 28550, - "##rued": 28551, - "forceful": 28552, - "##enes": 28553, - "fuss": 28554, - "##tao": 28555, - "##wash": 28556, - "brat": 28557, - "oppressive": 28558, - "reykjavik": 28559, - "spartak": 28560, - "ticking": 28561, - "##inkles": 28562, - "##kiewicz": 28563, - "adolph": 28564, - "horst": 28565, - "maui": 28566, - "protege": 28567, - "straighten": 28568, - "cpc": 28569, - "landau": 28570, - "concourse": 28571, - "clements": 28572, - "resultant": 28573, - "##ando": 28574, - "imaginative": 28575, - "joo": 28576, - "reactivated": 28577, - "##rem": 28578, - "##ffled": 28579, - "##uising": 28580, - "consultative": 28581, - "##guide": 28582, - "flop": 28583, - "kaitlyn": 28584, - "mergers": 28585, - "parenting": 28586, - "somber": 28587, - "##vron": 28588, - "supervise": 28589, - "vidhan": 28590, - "##imum": 28591, - "courtship": 28592, - "exemplified": 28593, - "harmonies": 28594, - "medallist": 28595, - "refining": 28596, - "##rrow": 28597, - "##ка": 28598, - "amara": 28599, - "##hum": 28600, - "780": 28601, - "goalscorer": 28602, - "sited": 28603, - "overshadowed": 28604, - "rohan": 28605, - "displeasure": 28606, - "secretive": 28607, - "multiplied": 28608, - "osman": 28609, - "##orth": 28610, - "engravings": 28611, - "padre": 28612, - "##kali": 28613, - "##veda": 28614, - "miniatures": 28615, - "mis": 28616, - "##yala": 28617, - "clap": 28618, - "pali": 28619, - "rook": 28620, - "##cana": 28621, - "1692": 28622, - "57th": 28623, - "antennae": 28624, - "astro": 28625, - "oskar": 28626, - "1628": 28627, - "bulldog": 28628, - "crotch": 28629, - "hackett": 28630, - "yucatan": 28631, - "##sure": 28632, - "amplifiers": 28633, - "brno": 28634, - "ferrara": 28635, - "migrating": 28636, - "##gree": 28637, - "thanking": 28638, - "turing": 28639, - "##eza": 28640, - "mccann": 28641, - "ting": 28642, - "andersson": 28643, - "onslaught": 28644, - "gaines": 28645, - "ganga": 28646, - "incense": 28647, - "standardization": 28648, - "##mation": 28649, - "sentai": 28650, - "scuba": 28651, - "stuffing": 28652, - "turquoise": 28653, - "waivers": 28654, - "alloys": 28655, - "##vitt": 28656, - "regaining": 28657, - "vaults": 28658, - "##clops": 28659, - "##gizing": 28660, - "digger": 28661, - "furry": 28662, - "memorabilia": 28663, - "probing": 28664, - "##iad": 28665, - "payton": 28666, - "rec": 28667, - "deutschland": 28668, - "filippo": 28669, - "opaque": 28670, - "seamen": 28671, - "zenith": 28672, - "afrikaans": 28673, - "##filtration": 28674, - "disciplined": 28675, - "inspirational": 28676, - "##merie": 28677, - "banco": 28678, - "confuse": 28679, - "grafton": 28680, - "tod": 28681, - "##dgets": 28682, - "championed": 28683, - "simi": 28684, - "anomaly": 28685, - "biplane": 28686, - "##ceptive": 28687, - "electrode": 28688, - "##para": 28689, - "1697": 28690, - "cleavage": 28691, - "crossbow": 28692, - "swirl": 28693, - "informant": 28694, - "##lars": 28695, - "##osta": 28696, - "afi": 28697, - "bonfire": 28698, - "spec": 28699, - "##oux": 28700, - "lakeside": 28701, - "slump": 28702, - "##culus": 28703, - "##lais": 28704, - "##qvist": 28705, - "##rrigan": 28706, - "1016": 28707, - "facades": 28708, - "borg": 28709, - "inwardly": 28710, - "cervical": 28711, - "xl": 28712, - "pointedly": 28713, - "050": 28714, - "stabilization": 28715, - "##odon": 28716, - "chests": 28717, - "1699": 28718, - "hacked": 28719, - "ctv": 28720, - "orthogonal": 28721, - "suzy": 28722, - "##lastic": 28723, - "gaulle": 28724, - "jacobite": 28725, - "rearview": 28726, - "##cam": 28727, - "##erted": 28728, - "ashby": 28729, - "##drik": 28730, - "##igate": 28731, - "##mise": 28732, - "##zbek": 28733, - "affectionately": 28734, - "canine": 28735, - "disperse": 28736, - "latham": 28737, - "##istles": 28738, - "##ivar": 28739, - "spielberg": 28740, - "##orin": 28741, - "##idium": 28742, - "ezekiel": 28743, - "cid": 28744, - "##sg": 28745, - "durga": 28746, - "middletown": 28747, - "##cina": 28748, - "customized": 28749, - "frontiers": 28750, - "harden": 28751, - "##etano": 28752, - "##zzy": 28753, - "1604": 28754, - "bolsheviks": 28755, - "##66": 28756, - "coloration": 28757, - "yoko": 28758, - "##bedo": 28759, - "briefs": 28760, - "slabs": 28761, - "debra": 28762, - "liquidation": 28763, - "plumage": 28764, - "##oin": 28765, - "blossoms": 28766, - "dementia": 28767, - "subsidy": 28768, - "1611": 28769, - "proctor": 28770, - "relational": 28771, - "jerseys": 28772, - "parochial": 28773, - "ter": 28774, - "##ici": 28775, - "esa": 28776, - "peshawar": 28777, - "cavalier": 28778, - "loren": 28779, - "cpi": 28780, - "idiots": 28781, - "shamrock": 28782, - "1646": 28783, - "dutton": 28784, - "malabar": 28785, - "mustache": 28786, - "##endez": 28787, - "##ocytes": 28788, - "referencing": 28789, - "terminates": 28790, - "marche": 28791, - "yarmouth": 28792, - "##sop": 28793, - "acton": 28794, - "mated": 28795, - "seton": 28796, - "subtly": 28797, - "baptised": 28798, - "beige": 28799, - "extremes": 28800, - "jolted": 28801, - "kristina": 28802, - "telecast": 28803, - "##actic": 28804, - "safeguard": 28805, - "waldo": 28806, - "##baldi": 28807, - "##bular": 28808, - "endeavors": 28809, - "sloppy": 28810, - "subterranean": 28811, - "##ensburg": 28812, - "##itung": 28813, - "delicately": 28814, - "pigment": 28815, - "tq": 28816, - "##scu": 28817, - "1626": 28818, - "##ound": 28819, - "collisions": 28820, - "coveted": 28821, - "herds": 28822, - "##personal": 28823, - "##meister": 28824, - "##nberger": 28825, - "chopra": 28826, - "##ricting": 28827, - "abnormalities": 28828, - "defective": 28829, - "galician": 28830, - "lucie": 28831, - "##dilly": 28832, - "alligator": 28833, - "likened": 28834, - "##genase": 28835, - "burundi": 28836, - "clears": 28837, - "complexion": 28838, - "derelict": 28839, - "deafening": 28840, - "diablo": 28841, - "fingered": 28842, - "champaign": 28843, - "dogg": 28844, - "enlist": 28845, - "isotope": 28846, - "labeling": 28847, - "mrna": 28848, - "##erre": 28849, - "brilliance": 28850, - "marvelous": 28851, - "##ayo": 28852, - "1652": 28853, - "crawley": 28854, - "ether": 28855, - "footed": 28856, - "dwellers": 28857, - "deserts": 28858, - "hamish": 28859, - "rubs": 28860, - "warlock": 28861, - "skimmed": 28862, - "##lizer": 28863, - "870": 28864, - "buick": 28865, - "embark": 28866, - "heraldic": 28867, - "irregularities": 28868, - "##ajan": 28869, - "kiara": 28870, - "##kulam": 28871, - "##ieg": 28872, - "antigen": 28873, - "kowalski": 28874, - "##lge": 28875, - "oakley": 28876, - "visitation": 28877, - "##mbit": 28878, - "vt": 28879, - "##suit": 28880, - "1570": 28881, - "murderers": 28882, - "##miento": 28883, - "##rites": 28884, - "chimneys": 28885, - "##sling": 28886, - "condemn": 28887, - "custer": 28888, - "exchequer": 28889, - "havre": 28890, - "##ghi": 28891, - "fluctuations": 28892, - "##rations": 28893, - "dfb": 28894, - "hendricks": 28895, - "vaccines": 28896, - "##tarian": 28897, - "nietzsche": 28898, - "biking": 28899, - "juicy": 28900, - "##duced": 28901, - "brooding": 28902, - "scrolling": 28903, - "selangor": 28904, - "##ragan": 28905, - "352": 28906, - "annum": 28907, - "boomed": 28908, - "seminole": 28909, - "sugarcane": 28910, - "##dna": 28911, - "departmental": 28912, - "dismissing": 28913, - "innsbruck": 28914, - "arteries": 28915, - "ashok": 28916, - "batavia": 28917, - "daze": 28918, - "kun": 28919, - "overtook": 28920, - "##rga": 28921, - "##tlan": 28922, - "beheaded": 28923, - "gaddafi": 28924, - "holm": 28925, - "electronically": 28926, - "faulty": 28927, - "galilee": 28928, - "fractures": 28929, - "kobayashi": 28930, - "##lized": 28931, - "gunmen": 28932, - "magma": 28933, - "aramaic": 28934, - "mala": 28935, - "eastenders": 28936, - "inference": 28937, - "messengers": 28938, - "bf": 28939, - "##qu": 28940, - "407": 28941, - "bathrooms": 28942, - "##vere": 28943, - "1658": 28944, - "flashbacks": 28945, - "ideally": 28946, - "misunderstood": 28947, - "##jali": 28948, - "##weather": 28949, - "mendez": 28950, - "##grounds": 28951, - "505": 28952, - "uncanny": 28953, - "##iii": 28954, - "1709": 28955, - "friendships": 28956, - "##nbc": 28957, - "sacrament": 28958, - "accommodated": 28959, - "reiterated": 28960, - "logistical": 28961, - "pebbles": 28962, - "thumped": 28963, - "##escence": 28964, - "administering": 28965, - "decrees": 28966, - "drafts": 28967, - "##flight": 28968, - "##cased": 28969, - "##tula": 28970, - "futuristic": 28971, - "picket": 28972, - "intimidation": 28973, - "winthrop": 28974, - "##fahan": 28975, - "interfered": 28976, - "339": 28977, - "afar": 28978, - "francoise": 28979, - "morally": 28980, - "uta": 28981, - "cochin": 28982, - "croft": 28983, - "dwarfs": 28984, - "##bruck": 28985, - "##dents": 28986, - "##nami": 28987, - "biker": 28988, - "##hner": 28989, - "##meral": 28990, - "nano": 28991, - "##isen": 28992, - "##ometric": 28993, - "##pres": 28994, - "##ан": 28995, - "brightened": 28996, - "meek": 28997, - "parcels": 28998, - "securely": 28999, - "gunners": 29000, - "##jhl": 29001, - "##zko": 29002, - "agile": 29003, - "hysteria": 29004, - "##lten": 29005, - "##rcus": 29006, - "bukit": 29007, - "champs": 29008, - "chevy": 29009, - "cuckoo": 29010, - "leith": 29011, - "sadler": 29012, - "theologians": 29013, - "welded": 29014, - "##section": 29015, - "1663": 29016, - "jj": 29017, - "plurality": 29018, - "xander": 29019, - "##rooms": 29020, - "##formed": 29021, - "shredded": 29022, - "temps": 29023, - "intimately": 29024, - "pau": 29025, - "tormented": 29026, - "##lok": 29027, - "##stellar": 29028, - "1618": 29029, - "charred": 29030, - "ems": 29031, - "essen": 29032, - "##mmel": 29033, - "alarms": 29034, - "spraying": 29035, - "ascot": 29036, - "blooms": 29037, - "twinkle": 29038, - "##abia": 29039, - "##apes": 29040, - "internment": 29041, - "obsidian": 29042, - "##chaft": 29043, - "snoop": 29044, - "##dav": 29045, - "##ooping": 29046, - "malibu": 29047, - "##tension": 29048, - "quiver": 29049, - "##itia": 29050, - "hays": 29051, - "mcintosh": 29052, - "travers": 29053, - "walsall": 29054, - "##ffie": 29055, - "1623": 29056, - "beverley": 29057, - "schwarz": 29058, - "plunging": 29059, - "structurally": 29060, - "m3": 29061, - "rosenthal": 29062, - "vikram": 29063, - "##tsk": 29064, - "770": 29065, - "ghz": 29066, - "##onda": 29067, - "##tiv": 29068, - "chalmers": 29069, - "groningen": 29070, - "pew": 29071, - "reckon": 29072, - "unicef": 29073, - "##rvis": 29074, - "55th": 29075, - "##gni": 29076, - "1651": 29077, - "sulawesi": 29078, - "avila": 29079, - "cai": 29080, - "metaphysical": 29081, - "screwing": 29082, - "turbulence": 29083, - "##mberg": 29084, - "augusto": 29085, - "samba": 29086, - "56th": 29087, - "baffled": 29088, - "momentary": 29089, - "toxin": 29090, - "##urian": 29091, - "##wani": 29092, - "aachen": 29093, - "condoms": 29094, - "dali": 29095, - "steppe": 29096, - "##3d": 29097, - "##app": 29098, - "##oed": 29099, - "##year": 29100, - "adolescence": 29101, - "dauphin": 29102, - "electrically": 29103, - "inaccessible": 29104, - "microscopy": 29105, - "nikita": 29106, - "##ega": 29107, - "atv": 29108, - "##cel": 29109, - "##enter": 29110, - "##oles": 29111, - "##oteric": 29112, - "##ы": 29113, - "accountants": 29114, - "punishments": 29115, - "wrongly": 29116, - "bribes": 29117, - "adventurous": 29118, - "clinch": 29119, - "flinders": 29120, - "southland": 29121, - "##hem": 29122, - "##kata": 29123, - "gough": 29124, - "##ciency": 29125, - "lads": 29126, - "soared": 29127, - "##ה": 29128, - "undergoes": 29129, - "deformation": 29130, - "outlawed": 29131, - "rubbish": 29132, - "##arus": 29133, - "##mussen": 29134, - "##nidae": 29135, - "##rzburg": 29136, - "arcs": 29137, - "##ingdon": 29138, - "##tituted": 29139, - "1695": 29140, - "wheelbase": 29141, - "wheeling": 29142, - "bombardier": 29143, - "campground": 29144, - "zebra": 29145, - "##lices": 29146, - "##oj": 29147, - "##bain": 29148, - "lullaby": 29149, - "##ecure": 29150, - "donetsk": 29151, - "wylie": 29152, - "grenada": 29153, - "##arding": 29154, - "##ης": 29155, - "squinting": 29156, - "eireann": 29157, - "opposes": 29158, - "##andra": 29159, - "maximal": 29160, - "runes": 29161, - "##broken": 29162, - "##cuting": 29163, - "##iface": 29164, - "##ror": 29165, - "##rosis": 29166, - "additive": 29167, - "britney": 29168, - "adultery": 29169, - "triggering": 29170, - "##drome": 29171, - "detrimental": 29172, - "aarhus": 29173, - "containment": 29174, - "jc": 29175, - "swapped": 29176, - "vichy": 29177, - "##ioms": 29178, - "madly": 29179, - "##oric": 29180, - "##rag": 29181, - "brant": 29182, - "##ckey": 29183, - "##trix": 29184, - "1560": 29185, - "1612": 29186, - "broughton": 29187, - "rustling": 29188, - "##stems": 29189, - "##uder": 29190, - "asbestos": 29191, - "mentoring": 29192, - "##nivorous": 29193, - "finley": 29194, - "leaps": 29195, - "##isan": 29196, - "apical": 29197, - "pry": 29198, - "slits": 29199, - "substitutes": 29200, - "##dict": 29201, - "intuitive": 29202, - "fantasia": 29203, - "insistent": 29204, - "unreasonable": 29205, - "##igen": 29206, - "##vna": 29207, - "domed": 29208, - "hannover": 29209, - "margot": 29210, - "ponder": 29211, - "##zziness": 29212, - "impromptu": 29213, - "jian": 29214, - "lc": 29215, - "rampage": 29216, - "stemming": 29217, - "##eft": 29218, - "andrey": 29219, - "gerais": 29220, - "whichever": 29221, - "amnesia": 29222, - "appropriated": 29223, - "anzac": 29224, - "clicks": 29225, - "modifying": 29226, - "ultimatum": 29227, - "cambrian": 29228, - "maids": 29229, - "verve": 29230, - "yellowstone": 29231, - "##mbs": 29232, - "conservatoire": 29233, - "##scribe": 29234, - "adherence": 29235, - "dinners": 29236, - "spectra": 29237, - "imperfect": 29238, - "mysteriously": 29239, - "sidekick": 29240, - "tatar": 29241, - "tuba": 29242, - "##aks": 29243, - "##ifolia": 29244, - "distrust": 29245, - "##athan": 29246, - "##zle": 29247, - "c2": 29248, - "ronin": 29249, - "zac": 29250, - "##pse": 29251, - "celaena": 29252, - "instrumentalist": 29253, - "scents": 29254, - "skopje": 29255, - "##mbling": 29256, - "comical": 29257, - "compensated": 29258, - "vidal": 29259, - "condor": 29260, - "intersect": 29261, - "jingle": 29262, - "wavelengths": 29263, - "##urrent": 29264, - "mcqueen": 29265, - "##izzly": 29266, - "carp": 29267, - "weasel": 29268, - "422": 29269, - "kanye": 29270, - "militias": 29271, - "postdoctoral": 29272, - "eugen": 29273, - "gunslinger": 29274, - "##ɛ": 29275, - "faux": 29276, - "hospice": 29277, - "##for": 29278, - "appalled": 29279, - "derivation": 29280, - "dwarves": 29281, - "##elis": 29282, - "dilapidated": 29283, - "##folk": 29284, - "astoria": 29285, - "philology": 29286, - "##lwyn": 29287, - "##otho": 29288, - "##saka": 29289, - "inducing": 29290, - "philanthropy": 29291, - "##bf": 29292, - "##itative": 29293, - "geek": 29294, - "markedly": 29295, - "sql": 29296, - "##yce": 29297, - "bessie": 29298, - "indices": 29299, - "rn": 29300, - "##flict": 29301, - "495": 29302, - "frowns": 29303, - "resolving": 29304, - "weightlifting": 29305, - "tugs": 29306, - "cleric": 29307, - "contentious": 29308, - "1653": 29309, - "mania": 29310, - "rms": 29311, - "##miya": 29312, - "##reate": 29313, - "##ruck": 29314, - "##tucket": 29315, - "bien": 29316, - "eels": 29317, - "marek": 29318, - "##ayton": 29319, - "##cence": 29320, - "discreet": 29321, - "unofficially": 29322, - "##ife": 29323, - "leaks": 29324, - "##bber": 29325, - "1705": 29326, - "332": 29327, - "dung": 29328, - "compressor": 29329, - "hillsborough": 29330, - "pandit": 29331, - "shillings": 29332, - "distal": 29333, - "##skin": 29334, - "381": 29335, - "##tat": 29336, - "##you": 29337, - "nosed": 29338, - "##nir": 29339, - "mangrove": 29340, - "undeveloped": 29341, - "##idia": 29342, - "textures": 29343, - "##inho": 29344, - "##500": 29345, - "##rise": 29346, - "ae": 29347, - "irritating": 29348, - "nay": 29349, - "amazingly": 29350, - "bancroft": 29351, - "apologetic": 29352, - "compassionate": 29353, - "kata": 29354, - "symphonies": 29355, - "##lovic": 29356, - "airspace": 29357, - "##lch": 29358, - "930": 29359, - "gifford": 29360, - "precautions": 29361, - "fulfillment": 29362, - "sevilla": 29363, - "vulgar": 29364, - "martinique": 29365, - "##urities": 29366, - "looting": 29367, - "piccolo": 29368, - "tidy": 29369, - "##dermott": 29370, - "quadrant": 29371, - "armchair": 29372, - "incomes": 29373, - "mathematicians": 29374, - "stampede": 29375, - "nilsson": 29376, - "##inking": 29377, - "##scan": 29378, - "foo": 29379, - "quarterfinal": 29380, - "##ostal": 29381, - "shang": 29382, - "shouldered": 29383, - "squirrels": 29384, - "##owe": 29385, - "344": 29386, - "vinegar": 29387, - "##bner": 29388, - "##rchy": 29389, - "##systems": 29390, - "delaying": 29391, - "##trics": 29392, - "ars": 29393, - "dwyer": 29394, - "rhapsody": 29395, - "sponsoring": 29396, - "##gration": 29397, - "bipolar": 29398, - "cinder": 29399, - "starters": 29400, - "##olio": 29401, - "##urst": 29402, - "421": 29403, - "signage": 29404, - "##nty": 29405, - "aground": 29406, - "figurative": 29407, - "mons": 29408, - "acquaintances": 29409, - "duets": 29410, - "erroneously": 29411, - "soyuz": 29412, - "elliptic": 29413, - "recreated": 29414, - "##cultural": 29415, - "##quette": 29416, - "##ssed": 29417, - "##tma": 29418, - "##zcz": 29419, - "moderator": 29420, - "scares": 29421, - "##itaire": 29422, - "##stones": 29423, - "##udence": 29424, - "juniper": 29425, - "sighting": 29426, - "##just": 29427, - "##nsen": 29428, - "britten": 29429, - "calabria": 29430, - "ry": 29431, - "bop": 29432, - "cramer": 29433, - "forsyth": 29434, - "stillness": 29435, - "##л": 29436, - "airmen": 29437, - "gathers": 29438, - "unfit": 29439, - "##umber": 29440, - "##upt": 29441, - "taunting": 29442, - "##rip": 29443, - "seeker": 29444, - "streamlined": 29445, - "##bution": 29446, - "holster": 29447, - "schumann": 29448, - "tread": 29449, - "vox": 29450, - "##gano": 29451, - "##onzo": 29452, - "strive": 29453, - "dil": 29454, - "reforming": 29455, - "covent": 29456, - "newbury": 29457, - "predicting": 29458, - "##orro": 29459, - "decorate": 29460, - "tre": 29461, - "##puted": 29462, - "andover": 29463, - "ie": 29464, - "asahi": 29465, - "dept": 29466, - "dunkirk": 29467, - "gills": 29468, - "##tori": 29469, - "buren": 29470, - "huskies": 29471, - "##stis": 29472, - "##stov": 29473, - "abstracts": 29474, - "bets": 29475, - "loosen": 29476, - "##opa": 29477, - "1682": 29478, - "yearning": 29479, - "##glio": 29480, - "##sir": 29481, - "berman": 29482, - "effortlessly": 29483, - "enamel": 29484, - "napoli": 29485, - "persist": 29486, - "##peration": 29487, - "##uez": 29488, - "attache": 29489, - "elisa": 29490, - "b1": 29491, - "invitations": 29492, - "##kic": 29493, - "accelerating": 29494, - "reindeer": 29495, - "boardwalk": 29496, - "clutches": 29497, - "nelly": 29498, - "polka": 29499, - "starbucks": 29500, - "##kei": 29501, - "adamant": 29502, - "huey": 29503, - "lough": 29504, - "unbroken": 29505, - "adventurer": 29506, - "embroidery": 29507, - "inspecting": 29508, - "stanza": 29509, - "##ducted": 29510, - "naia": 29511, - "taluka": 29512, - "##pone": 29513, - "##roids": 29514, - "chases": 29515, - "deprivation": 29516, - "florian": 29517, - "##jing": 29518, - "##ppet": 29519, - "earthly": 29520, - "##lib": 29521, - "##ssee": 29522, - "colossal": 29523, - "foreigner": 29524, - "vet": 29525, - "freaks": 29526, - "patrice": 29527, - "rosewood": 29528, - "triassic": 29529, - "upstate": 29530, - "##pkins": 29531, - "dominates": 29532, - "ata": 29533, - "chants": 29534, - "ks": 29535, - "vo": 29536, - "##400": 29537, - "##bley": 29538, - "##raya": 29539, - "##rmed": 29540, - "555": 29541, - "agra": 29542, - "infiltrate": 29543, - "##ailing": 29544, - "##ilation": 29545, - "##tzer": 29546, - "##uppe": 29547, - "##werk": 29548, - "binoculars": 29549, - "enthusiast": 29550, - "fujian": 29551, - "squeak": 29552, - "##avs": 29553, - "abolitionist": 29554, - "almeida": 29555, - "boredom": 29556, - "hampstead": 29557, - "marsden": 29558, - "rations": 29559, - "##ands": 29560, - "inflated": 29561, - "334": 29562, - "bonuses": 29563, - "rosalie": 29564, - "patna": 29565, - "##rco": 29566, - "329": 29567, - "detachments": 29568, - "penitentiary": 29569, - "54th": 29570, - "flourishing": 29571, - "woolf": 29572, - "##dion": 29573, - "##etched": 29574, - "papyrus": 29575, - "##lster": 29576, - "##nsor": 29577, - "##toy": 29578, - "bobbed": 29579, - "dismounted": 29580, - "endelle": 29581, - "inhuman": 29582, - "motorola": 29583, - "tbs": 29584, - "wince": 29585, - "wreath": 29586, - "##ticus": 29587, - "hideout": 29588, - "inspections": 29589, - "sanjay": 29590, - "disgrace": 29591, - "infused": 29592, - "pudding": 29593, - "stalks": 29594, - "##urbed": 29595, - "arsenic": 29596, - "leases": 29597, - "##hyl": 29598, - "##rrard": 29599, - "collarbone": 29600, - "##waite": 29601, - "##wil": 29602, - "dowry": 29603, - "##bant": 29604, - "##edance": 29605, - "genealogical": 29606, - "nitrate": 29607, - "salamanca": 29608, - "scandals": 29609, - "thyroid": 29610, - "necessitated": 29611, - "##!": 29612, - "##\"": 29613, - "###": 29614, - "##$": 29615, - "##%": 29616, - "##&": 29617, - "##'": 29618, - "##(": 29619, - "##)": 29620, - "##*": 29621, - "##+": 29622, - "##,": 29623, - "##-": 29624, - "##.": 29625, - "##/": 29626, - "##:": 29627, - "##;": 29628, - "##<": 29629, - "##=": 29630, - "##>": 29631, - "##?": 29632, - "##@": 29633, - "##[": 29634, - "##\\": 29635, - "##]": 29636, - "##^": 29637, - "##_": 29638, - "##`": 29639, - "##{": 29640, - "##|": 29641, - "##}": 29642, - "##~": 29643, - "##¡": 29644, - "##¢": 29645, - "##£": 29646, - "##¤": 29647, - "##¥": 29648, - "##¦": 29649, - "##§": 29650, - "##¨": 29651, - "##©": 29652, - "##ª": 29653, - "##«": 29654, - "##¬": 29655, - "##®": 29656, - "##±": 29657, - "##´": 29658, - "##µ": 29659, - "##¶": 29660, - "##·": 29661, - "##º": 29662, - "##»": 29663, - "##¼": 29664, - "##¾": 29665, - "##¿": 29666, - "##æ": 29667, - "##ð": 29668, - "##÷": 29669, - "##þ": 29670, - "##đ": 29671, - "##ħ": 29672, - "##ŋ": 29673, - "##œ": 29674, - "##ƒ": 29675, - "##ɐ": 29676, - "##ɑ": 29677, - "##ɒ": 29678, - "##ɔ": 29679, - "##ɕ": 29680, - "##ə": 29681, - "##ɡ": 29682, - "##ɣ": 29683, - "##ɨ": 29684, - "##ɪ": 29685, - "##ɫ": 29686, - "##ɬ": 29687, - "##ɯ": 29688, - "##ɲ": 29689, - "##ɴ": 29690, - "##ɹ": 29691, - "##ɾ": 29692, - "##ʀ": 29693, - "##ʁ": 29694, - "##ʂ": 29695, - "##ʃ": 29696, - "##ʉ": 29697, - "##ʊ": 29698, - "##ʋ": 29699, - "##ʌ": 29700, - "##ʎ": 29701, - "##ʐ": 29702, - "##ʑ": 29703, - "##ʒ": 29704, - "##ʔ": 29705, - "##ʰ": 29706, - "##ʲ": 29707, - "##ʳ": 29708, - "##ʷ": 29709, - "##ʸ": 29710, - "##ʻ": 29711, - "##ʼ": 29712, - "##ʾ": 29713, - "##ʿ": 29714, - "##ˈ": 29715, - "##ˡ": 29716, - "##ˢ": 29717, - "##ˣ": 29718, - "##ˤ": 29719, - "##β": 29720, - "##γ": 29721, - "##δ": 29722, - "##ε": 29723, - "##ζ": 29724, - "##θ": 29725, - "##κ": 29726, - "##λ": 29727, - "##μ": 29728, - "##ξ": 29729, - "##ο": 29730, - "##π": 29731, - "##ρ": 29732, - "##σ": 29733, - "##τ": 29734, - "##υ": 29735, - "##φ": 29736, - "##χ": 29737, - "##ψ": 29738, - "##ω": 29739, - "##б": 29740, - "##г": 29741, - "##д": 29742, - "##ж": 29743, - "##з": 29744, - "##м": 29745, - "##п": 29746, - "##с": 29747, - "##у": 29748, - "##ф": 29749, - "##х": 29750, - "##ц": 29751, - "##ч": 29752, - "##ш": 29753, - "##щ": 29754, - "##ъ": 29755, - "##э": 29756, - "##ю": 29757, - "##ђ": 29758, - "##є": 29759, - "##і": 29760, - "##ј": 29761, - "##љ": 29762, - "##њ": 29763, - "##ћ": 29764, - "##ӏ": 29765, - "##ա": 29766, - "##բ": 29767, - "##գ": 29768, - "##դ": 29769, - "##ե": 29770, - "##թ": 29771, - "##ի": 29772, - "##լ": 29773, - "##կ": 29774, - "##հ": 29775, - "##մ": 29776, - "##յ": 29777, - "##ն": 29778, - "##ո": 29779, - "##պ": 29780, - "##ս": 29781, - "##վ": 29782, - "##տ": 29783, - "##ր": 29784, - "##ւ": 29785, - "##ք": 29786, - "##־": 29787, - "##א": 29788, - "##ב": 29789, - "##ג": 29790, - "##ד": 29791, - "##ו": 29792, - "##ז": 29793, - "##ח": 29794, - "##ט": 29795, - "##י": 29796, - "##ך": 29797, - "##כ": 29798, - "##ל": 29799, - "##ם": 29800, - "##מ": 29801, - "##ן": 29802, - "##נ": 29803, - "##ס": 29804, - "##ע": 29805, - "##ף": 29806, - "##פ": 29807, - "##ץ": 29808, - "##צ": 29809, - "##ק": 29810, - "##ר": 29811, - "##ש": 29812, - "##ת": 29813, - "##،": 29814, - "##ء": 29815, - "##ب": 29816, - "##ت": 29817, - "##ث": 29818, - "##ج": 29819, - "##ح": 29820, - "##خ": 29821, - "##ذ": 29822, - "##ز": 29823, - "##س": 29824, - "##ش": 29825, - "##ص": 29826, - "##ض": 29827, - "##ط": 29828, - "##ظ": 29829, - "##ع": 29830, - "##غ": 29831, - "##ـ": 29832, - "##ف": 29833, - "##ق": 29834, - "##ك": 29835, - "##و": 29836, - "##ى": 29837, - "##ٹ": 29838, - "##پ": 29839, - "##چ": 29840, - "##ک": 29841, - "##گ": 29842, - "##ں": 29843, - "##ھ": 29844, - "##ہ": 29845, - "##ے": 29846, - "##अ": 29847, - "##आ": 29848, - "##उ": 29849, - "##ए": 29850, - "##क": 29851, - "##ख": 29852, - "##ग": 29853, - "##च": 29854, - "##ज": 29855, - "##ट": 29856, - "##ड": 29857, - "##ण": 29858, - "##त": 29859, - "##थ": 29860, - "##द": 29861, - "##ध": 29862, - "##न": 29863, - "##प": 29864, - "##ब": 29865, - "##भ": 29866, - "##म": 29867, - "##य": 29868, - "##र": 29869, - "##ल": 29870, - "##व": 29871, - "##श": 29872, - "##ष": 29873, - "##स": 29874, - "##ह": 29875, - "##ा": 29876, - "##ि": 29877, - "##ी": 29878, - "##ो": 29879, - "##।": 29880, - "##॥": 29881, - "##ং": 29882, - "##অ": 29883, - "##আ": 29884, - "##ই": 29885, - "##উ": 29886, - "##এ": 29887, - "##ও": 29888, - "##ক": 29889, - "##খ": 29890, - "##গ": 29891, - "##চ": 29892, - "##ছ": 29893, - "##জ": 29894, - "##ট": 29895, - "##ড": 29896, - "##ণ": 29897, - "##ত": 29898, - "##থ": 29899, - "##দ": 29900, - "##ধ": 29901, - "##ন": 29902, - "##প": 29903, - "##ব": 29904, - "##ভ": 29905, - "##ম": 29906, - "##য": 29907, - "##র": 29908, - "##ল": 29909, - "##শ": 29910, - "##ষ": 29911, - "##স": 29912, - "##হ": 29913, - "##া": 29914, - "##ি": 29915, - "##ী": 29916, - "##ে": 29917, - "##க": 29918, - "##ச": 29919, - "##ட": 29920, - "##த": 29921, - "##ந": 29922, - "##ன": 29923, - "##ப": 29924, - "##ம": 29925, - "##ய": 29926, - "##ர": 29927, - "##ல": 29928, - "##ள": 29929, - "##வ": 29930, - "##ா": 29931, - "##ி": 29932, - "##ு": 29933, - "##ே": 29934, - "##ை": 29935, - "##ನ": 29936, - "##ರ": 29937, - "##ಾ": 29938, - "##ක": 29939, - "##ය": 29940, - "##ර": 29941, - "##ල": 29942, - "##ව": 29943, - "##ා": 29944, - "##ก": 29945, - "##ง": 29946, - "##ต": 29947, - "##ท": 29948, - "##น": 29949, - "##พ": 29950, - "##ม": 29951, - "##ย": 29952, - "##ร": 29953, - "##ล": 29954, - "##ว": 29955, - "##ส": 29956, - "##อ": 29957, - "##า": 29958, - "##เ": 29959, - "##་": 29960, - "##།": 29961, - "##ག": 29962, - "##ང": 29963, - "##ད": 29964, - "##ན": 29965, - "##པ": 29966, - "##བ": 29967, - "##མ": 29968, - "##འ": 29969, - "##ར": 29970, - "##ལ": 29971, - "##ས": 29972, - "##မ": 29973, - "##ა": 29974, - "##ბ": 29975, - "##გ": 29976, - "##დ": 29977, - "##ე": 29978, - "##ვ": 29979, - "##თ": 29980, - "##ი": 29981, - "##კ": 29982, - "##ლ": 29983, - "##მ": 29984, - "##ნ": 29985, - "##ო": 29986, - "##რ": 29987, - "##ს": 29988, - "##ტ": 29989, - "##უ": 29990, - "##ᄀ": 29991, - "##ᄂ": 29992, - "##ᄃ": 29993, - "##ᄅ": 29994, - "##ᄆ": 29995, - "##ᄇ": 29996, - "##ᄉ": 29997, - "##ᄊ": 29998, - "##ᄋ": 29999, - "##ᄌ": 30000, - "##ᄎ": 30001, - "##ᄏ": 30002, - "##ᄐ": 30003, - "##ᄑ": 30004, - "##ᄒ": 30005, - "##ᅡ": 30006, - "##ᅢ": 30007, - "##ᅥ": 30008, - "##ᅦ": 30009, - "##ᅧ": 30010, - "##ᅩ": 30011, - "##ᅪ": 30012, - "##ᅭ": 30013, - "##ᅮ": 30014, - "##ᅯ": 30015, - "##ᅲ": 30016, - "##ᅳ": 30017, - "##ᅴ": 30018, - "##ᅵ": 30019, - "##ᆨ": 30020, - "##ᆫ": 30021, - "##ᆯ": 30022, - "##ᆷ": 30023, - "##ᆸ": 30024, - "##ᆼ": 30025, - "##ᴬ": 30026, - "##ᴮ": 30027, - "##ᴰ": 30028, - "##ᴵ": 30029, - "##ᴺ": 30030, - "##ᵀ": 30031, - "##ᵃ": 30032, - "##ᵇ": 30033, - "##ᵈ": 30034, - "##ᵉ": 30035, - "##ᵍ": 30036, - "##ᵏ": 30037, - "##ᵐ": 30038, - "##ᵒ": 30039, - "##ᵖ": 30040, - "##ᵗ": 30041, - "##ᵘ": 30042, - "##ᵣ": 30043, - "##ᵤ": 30044, - "##ᵥ": 30045, - "##ᶜ": 30046, - "##ᶠ": 30047, - "##‐": 30048, - "##‑": 30049, - "##‒": 30050, - "##–": 30051, - "##—": 30052, - "##―": 30053, - "##‖": 30054, - "##‘": 30055, - "##’": 30056, - "##‚": 30057, - "##“": 30058, - "##”": 30059, - "##„": 30060, - "##†": 30061, - "##‡": 30062, - "##•": 30063, - "##…": 30064, - "##‰": 30065, - "##′": 30066, - "##″": 30067, - "##›": 30068, - "##‿": 30069, - "##⁄": 30070, - "##⁰": 30071, - "##ⁱ": 30072, - "##⁴": 30073, - "##⁵": 30074, - "##⁶": 30075, - "##⁷": 30076, - "##⁸": 30077, - "##⁹": 30078, - "##⁻": 30079, - "##ⁿ": 30080, - "##₅": 30081, - "##₆": 30082, - "##₇": 30083, - "##₈": 30084, - "##₉": 30085, - "##₊": 30086, - "##₍": 30087, - "##₎": 30088, - "##ₐ": 30089, - "##ₑ": 30090, - "##ₒ": 30091, - "##ₓ": 30092, - "##ₕ": 30093, - "##ₖ": 30094, - "##ₗ": 30095, - "##ₘ": 30096, - "##ₚ": 30097, - "##ₛ": 30098, - "##ₜ": 30099, - "##₤": 30100, - "##₩": 30101, - "##€": 30102, - "##₱": 30103, - "##₹": 30104, - "##ℓ": 30105, - "##№": 30106, - "##ℝ": 30107, - "##™": 30108, - "##⅓": 30109, - "##⅔": 30110, - "##←": 30111, - "##↑": 30112, - "##→": 30113, - "##↓": 30114, - "##↔": 30115, - "##↦": 30116, - "##⇄": 30117, - "##⇌": 30118, - "##⇒": 30119, - "##∂": 30120, - "##∅": 30121, - "##∆": 30122, - "##∇": 30123, - "##∈": 30124, - "##∗": 30125, - "##∘": 30126, - "##√": 30127, - "##∞": 30128, - "##∧": 30129, - "##∨": 30130, - "##∩": 30131, - "##∪": 30132, - "##≈": 30133, - "##≡": 30134, - "##≤": 30135, - "##≥": 30136, - "##⊂": 30137, - "##⊆": 30138, - "##⊕": 30139, - "##⊗": 30140, - "##⋅": 30141, - "##─": 30142, - "##│": 30143, - "##■": 30144, - "##▪": 30145, - "##●": 30146, - "##★": 30147, - "##☆": 30148, - "##☉": 30149, - "##♠": 30150, - "##♣": 30151, - "##♥": 30152, - "##♦": 30153, - "##♯": 30154, - "##⟨": 30155, - "##⟩": 30156, - "##ⱼ": 30157, - "##⺩": 30158, - "##⺼": 30159, - "##⽥": 30160, - "##、": 30161, - "##。": 30162, - "##〈": 30163, - "##〉": 30164, - "##《": 30165, - "##》": 30166, - "##「": 30167, - "##」": 30168, - "##『": 30169, - "##』": 30170, - "##〜": 30171, - "##あ": 30172, - "##い": 30173, - "##う": 30174, - "##え": 30175, - "##お": 30176, - "##か": 30177, - "##き": 30178, - "##く": 30179, - "##け": 30180, - "##こ": 30181, - "##さ": 30182, - "##し": 30183, - "##す": 30184, - "##せ": 30185, - "##そ": 30186, - "##た": 30187, - "##ち": 30188, - "##っ": 30189, - "##つ": 30190, - "##て": 30191, - "##と": 30192, - "##な": 30193, - "##に": 30194, - "##ぬ": 30195, - "##ね": 30196, - "##の": 30197, - "##は": 30198, - "##ひ": 30199, - "##ふ": 30200, - "##へ": 30201, - "##ほ": 30202, - "##ま": 30203, - "##み": 30204, - "##む": 30205, - "##め": 30206, - "##も": 30207, - "##や": 30208, - "##ゆ": 30209, - "##よ": 30210, - "##ら": 30211, - "##り": 30212, - "##る": 30213, - "##れ": 30214, - "##ろ": 30215, - "##を": 30216, - "##ん": 30217, - "##ァ": 30218, - "##ア": 30219, - "##ィ": 30220, - "##イ": 30221, - "##ウ": 30222, - "##ェ": 30223, - "##エ": 30224, - "##オ": 30225, - "##カ": 30226, - "##キ": 30227, - "##ク": 30228, - "##ケ": 30229, - "##コ": 30230, - "##サ": 30231, - "##シ": 30232, - "##ス": 30233, - "##セ": 30234, - "##タ": 30235, - "##チ": 30236, - "##ッ": 30237, - "##ツ": 30238, - "##テ": 30239, - "##ト": 30240, - "##ナ": 30241, - "##ニ": 30242, - "##ノ": 30243, - "##ハ": 30244, - "##ヒ": 30245, - "##フ": 30246, - "##ヘ": 30247, - "##ホ": 30248, - "##マ": 30249, - "##ミ": 30250, - "##ム": 30251, - "##メ": 30252, - "##モ": 30253, - "##ャ": 30254, - "##ュ": 30255, - "##ョ": 30256, - "##ラ": 30257, - "##リ": 30258, - "##ル": 30259, - "##レ": 30260, - "##ロ": 30261, - "##ワ": 30262, - "##ン": 30263, - "##・": 30264, - "##ー": 30265, - "##一": 30266, - "##三": 30267, - "##上": 30268, - "##下": 30269, - "##不": 30270, - "##世": 30271, - "##中": 30272, - "##主": 30273, - "##久": 30274, - "##之": 30275, - "##也": 30276, - "##事": 30277, - "##二": 30278, - "##五": 30279, - "##井": 30280, - "##京": 30281, - "##人": 30282, - "##亻": 30283, - "##仁": 30284, - "##介": 30285, - "##代": 30286, - "##仮": 30287, - "##伊": 30288, - "##会": 30289, - "##佐": 30290, - "##侍": 30291, - "##保": 30292, - "##信": 30293, - "##健": 30294, - "##元": 30295, - "##光": 30296, - "##八": 30297, - "##公": 30298, - "##内": 30299, - "##出": 30300, - "##分": 30301, - "##前": 30302, - "##劉": 30303, - "##力": 30304, - "##加": 30305, - "##勝": 30306, - "##北": 30307, - "##区": 30308, - "##十": 30309, - "##千": 30310, - "##南": 30311, - "##博": 30312, - "##原": 30313, - "##口": 30314, - "##古": 30315, - "##史": 30316, - "##司": 30317, - "##合": 30318, - "##吉": 30319, - "##同": 30320, - "##名": 30321, - "##和": 30322, - "##囗": 30323, - "##四": 30324, - "##国": 30325, - "##國": 30326, - "##土": 30327, - "##地": 30328, - "##坂": 30329, - "##城": 30330, - "##堂": 30331, - "##場": 30332, - "##士": 30333, - "##夏": 30334, - "##外": 30335, - "##大": 30336, - "##天": 30337, - "##太": 30338, - "##夫": 30339, - "##奈": 30340, - "##女": 30341, - "##子": 30342, - "##学": 30343, - "##宀": 30344, - "##宇": 30345, - "##安": 30346, - "##宗": 30347, - "##定": 30348, - "##宣": 30349, - "##宮": 30350, - "##家": 30351, - "##宿": 30352, - "##寺": 30353, - "##將": 30354, - "##小": 30355, - "##尚": 30356, - "##山": 30357, - "##岡": 30358, - "##島": 30359, - "##崎": 30360, - "##川": 30361, - "##州": 30362, - "##巿": 30363, - "##帝": 30364, - "##平": 30365, - "##年": 30366, - "##幸": 30367, - "##广": 30368, - "##弘": 30369, - "##張": 30370, - "##彳": 30371, - "##後": 30372, - "##御": 30373, - "##德": 30374, - "##心": 30375, - "##忄": 30376, - "##志": 30377, - "##忠": 30378, - "##愛": 30379, - "##成": 30380, - "##我": 30381, - "##戦": 30382, - "##戸": 30383, - "##手": 30384, - "##扌": 30385, - "##政": 30386, - "##文": 30387, - "##新": 30388, - "##方": 30389, - "##日": 30390, - "##明": 30391, - "##星": 30392, - "##春": 30393, - "##昭": 30394, - "##智": 30395, - "##曲": 30396, - "##書": 30397, - "##月": 30398, - "##有": 30399, - "##朝": 30400, - "##木": 30401, - "##本": 30402, - "##李": 30403, - "##村": 30404, - "##東": 30405, - "##松": 30406, - "##林": 30407, - "##森": 30408, - "##楊": 30409, - "##樹": 30410, - "##橋": 30411, - "##歌": 30412, - "##止": 30413, - "##正": 30414, - "##武": 30415, - "##比": 30416, - "##氏": 30417, - "##民": 30418, - "##水": 30419, - "##氵": 30420, - "##氷": 30421, - "##永": 30422, - "##江": 30423, - "##沢": 30424, - "##河": 30425, - "##治": 30426, - "##法": 30427, - "##海": 30428, - "##清": 30429, - "##漢": 30430, - "##瀬": 30431, - "##火": 30432, - "##版": 30433, - "##犬": 30434, - "##王": 30435, - "##生": 30436, - "##田": 30437, - "##男": 30438, - "##疒": 30439, - "##発": 30440, - "##白": 30441, - "##的": 30442, - "##皇": 30443, - "##目": 30444, - "##相": 30445, - "##省": 30446, - "##真": 30447, - "##石": 30448, - "##示": 30449, - "##社": 30450, - "##神": 30451, - "##福": 30452, - "##禾": 30453, - "##秀": 30454, - "##秋": 30455, - "##空": 30456, - "##立": 30457, - "##章": 30458, - "##竹": 30459, - "##糹": 30460, - "##美": 30461, - "##義": 30462, - "##耳": 30463, - "##良": 30464, - "##艹": 30465, - "##花": 30466, - "##英": 30467, - "##華": 30468, - "##葉": 30469, - "##藤": 30470, - "##行": 30471, - "##街": 30472, - "##西": 30473, - "##見": 30474, - "##訁": 30475, - "##語": 30476, - "##谷": 30477, - "##貝": 30478, - "##貴": 30479, - "##車": 30480, - "##軍": 30481, - "##辶": 30482, - "##道": 30483, - "##郎": 30484, - "##郡": 30485, - "##部": 30486, - "##都": 30487, - "##里": 30488, - "##野": 30489, - "##金": 30490, - "##鈴": 30491, - "##镇": 30492, - "##長": 30493, - "##門": 30494, - "##間": 30495, - "##阝": 30496, - "##阿": 30497, - "##陳": 30498, - "##陽": 30499, - "##雄": 30500, - "##青": 30501, - "##面": 30502, - "##風": 30503, - "##食": 30504, - "##香": 30505, - "##馬": 30506, - "##高": 30507, - "##龍": 30508, - "##龸": 30509, - "##fi": 30510, - "##fl": 30511, - "##!": 30512, - "##(": 30513, - "##)": 30514, - "##,": 30515, - "##-": 30516, - "##.": 30517, - "##/": 30518, - "##:": 30519, - "##?": 30520, - "##~": 30521, - "bowang": 30522, - "georgiosmastrapas": 30523, - "jackminong": 30524, - "alaeddineabdessalem": 30525, - "isabellemohr": 30526, - "michaelguenther": 30527 - } - } -} \ No newline at end of file diff --git a/api/core/model_runtime/model_providers/jina/text_embedding/tokenizer/tokenizer_config.json b/api/core/model_runtime/model_providers/jina/text_embedding/tokenizer/tokenizer_config.json deleted file mode 100644 index 91f9e357cc1352..00000000000000 --- a/api/core/model_runtime/model_providers/jina/text_embedding/tokenizer/tokenizer_config.json +++ /dev/null @@ -1,15 +0,0 @@ -{ - "clean_up_tokenization_spaces": true, - "cls_token": "[CLS]", - "do_basic_tokenize": true, - "do_lower_case": true, - "mask_token": "[MASK]", - "model_max_length": 2147483648, - "never_split": null, - "pad_token": "[PAD]", - "sep_token": "[SEP]", - "strip_accents": null, - "tokenize_chinese_chars": true, - "tokenizer_class": "BertTokenizer", - "unk_token": "[UNK]" -} diff --git a/api/core/model_runtime/model_providers/leptonai/_assets/icon_l_en.png b/api/core/model_runtime/model_providers/leptonai/_assets/icon_l_en.png deleted file mode 100644 index 719122b284eb9e66a38a10fa8464a2973e1aa0a9..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 528134 zcmaI8XIPU-7clIuD1s{}MU)N(Bp^+yp-Cs9CM1;5q=}SJLJw^f6aneI3nY}#q)Q2` z0wN&2g9Pcl6MFe@pYne1dp+*`gWUIA=bV{2ZO%C}Lx_%+>Mcq}$}3l{+ybj9!G8W+ zx$?W@jq8^!>(@gPE`RR3DI2)yIz4yuG)G%sk+*WPux1CN%x$b;*5+2;F74LRSFT)L zwuKwG8E9&NES*pS=09Nsyim@U*jKJd%XvAQTiRQ@v0GT%*gDE^Zq_$(vfEn8aO#U` z3TZkkTEDPW!=SBoF|MRn>o7|%E8eU`_E=U zPWHb+-0WpI|1~HBO&xYcC$u&D69HjIW2&Sa+@9r+YWH?{AxjBOb1wB1I1w2IroX|Fc!cU$& z5fpkX`1mpZB?P~#x1*c67r&$Hga1%avUasZ+d8}1Iyticq-bv8w28 zU$Ty_|K`*s&jh{9odtyjgnrKIZ$K-{|H3)DqaFV4+{#kW+QAxS?daxu2`l_x*h@xf zYW^4ef09L^{tNBu2DAP@bNipru5fQ>YeAT`tCKt0@>0&vAN(AaGe{9_ZSLlThC4Yq z$Z#r}J9?P^-L9z#0z108nLAoqgOy}BFChhNZLL5`@*+ZtiV{ymg`S8B3o9!tNGK{l zkrWY8c`UD@qO2_OA6z9TOZT6X`VX$v|G}02CGMrbtSmv6{~FN0rKfA{^6xhXTUofZ zm6bQUnz^*#Pcd9Ju>b4rUqFKY7xLwYo2{FJ^}n&@9o+s)=chP84(5(Fm-FJcvVLyv z?%>8Li*WoK-iqDI+SS&^k)5Ai`2X$aUqpWP^B>ZGiwUG;jkfi$wo*Ymq1gWx1<3Z- zyp#Cl{Qr$>^=sZq{t{R4KfL(=DeHflE-m2a>wm59^5ehuYwdVxwCGFg{Pe!i?#d0y z3b2wq+-rP|04{xGl(1MeEWS(Zsa^DlzJ=g|b8(R6F!rhmJ)`{Nwg{kw?7Huj#@{TX zUf2b}#pD%kM6r`oaNUFB$*kfZx0As;@I?SzXvf#kuP=!8T~FF3yBtbe$J58eU7PMC z-w}LZx_dEMvmx#&QR5+4@AO0NEcwb6Ir>b$T!|?*rJ^`M9B%<{7-5rOV9N`pABI}M z939-scm&PrGV0W|{p5Ouymqo0oM+Rji*pbYz0W86JqX#8j+$7DlO4UsdR~!9C~6lS zzLSicfQ`~MCswIo`YTuNtWKtGUQFBc?2XXjKxAW7lu_+|U^i9dyvEB%&NyX9aT-fBV}H{?9*CLP+D^ulREx zw!QP6B}JZe7o+N1t3R*)Fs8>T;J0-rVp=hsg(D4n?ur`|HWC=uOhF#>ifLHug5@7IjA>B^R&X(kP&$lJk?~sl8 zZPT!gNRm>d~?Rd3SN_>i^J1W7*S~ z-+$OxCbJ@L!aC8J`MB>bh=AeWRlH5qwOP8Z)ov*ZUQ@n>T`_K!FdYHIvg88UtGe9PWBM~KP8LBX12`$!Y zTlOFN^UZm7DRG*nU2;sJ;Nb~d(D{-`Po@loUjt7jXx>o=~tZ35wAS~}i|A%JKpVksxYpFApjGqPXPZ+jcFL6FVFfX$^X6KNMTjM?96_%U%z4Z!!mMq%}%QV#;b1Ex4uF_^RDUyPJlZjuQ`R*dD!8y?MB^ zof@#W?GY*GeN3GFYqLioGJR|8xPG^TFLljm^#`K41eLTdS8ckWZ#2H4UuPsyX`kdF zzz`Wj7tH<6l9)`2d|nUU?bco8cgOoaBi=1~*>1#lO};~gw7>8p{`lp`O_X%(>Ux=| zF(z8~;j!2Kxk+8YDId@1O(d!~`5;w{rJ6}*qh`o@ zM4zNo^w!q+Xy6*@Oz&c+^m`-&r10iJ;gZX*rSn(f9{})gLQoougdx2u-q{0LwUczu zekw>YgQMTQ;_){r?&|A!{?BksPnl@ZOp*nnlllB+z!+O%qVeqvy)3vJDkuI69LTl) zwo3OdUqD}jhKnNf^IJu_c!x5kJ>N2rJI23m>8Kgp&6aUC7NI^I-jNG!vPF0lAETsl zhl3dL^L18b#(`u^TZoly&{*-t?&0xSYt57&W6Hq$t})$-5b^Z9g%7zT7sZLQ6!H3m z>eK}BRc>x@JXPMnW(r6si}`sRG_k%}@xxFg(q4ATjTGXOlA2>h(FO!$Ma)a%V6 zQ0>bKh?r^B39-!QD3aY@cFqZ?Vq68_r)kejtpxWZf6bvE>NhVDCQyO}g+d4HEhsGDr*C5)(PM#vGvrV{@Vxa0=$fuJqpHuN z{2H6!DtEfHgtLi0fC3nyoV$`pD247Yo*-wJ-?#mP$1+o3zf_)w*p4D}vFo)bD<2c)K^ufX^imfP-ww8M?F7OboSiYGS9*8n62c?lTm_^e| z5YYJ+f6x*bg$&^z5Tb-ZTY4?PuWzIf^_E)mga!CWc<7%=K{r?uwQIY>=G9qyj*!>N zIDQ2`)FSc{AbFACK^XnpBxiPZ!DpCR^d~|%ArD7O5puet2rRWQ3ng3v?5e5r6jJr?o64%OuLq;*;S!MM*t9TF( zHQk3m0&IjuUNC;e{nu)8<*@%ZFy)^iAnRPMIEv3L&EcT@HZ}(VaYL?T*|5@NHEr4(vjn+@>%V;>5<#<$|48A~_Kc8E)28$7EIU9w-4+tgXL$#ofoDA!TCBMF z)}Q>>-0;2s5^(J=rM-BxS&&;YyLM1evJ?b1n2I+o>?gpYxU=uke~Efj70YgZxCDZF zWJvNVgZ;i{#O>T;q4i)??G}$)5ahGpPq(;&xPdJb_F{-hqm0MLc*oCRW7y+G3gBGW z&QXThd#1tI`zPn^;xsLJvE0A$0=|-gXPUD=SMikVz5hykt1B(!{%xMW5^LMkLkAuE zW=o4qBXHYif!ray5yK+nt2KJB$nJBs^3qEM3$KZGPZq4LSm($^hRN84JE7OrfJFe$NX7BR858&D?%i~a; zT=gPyt-TY!=Fgvkz$olrBMD7s>xV|?$eEDNP3_N9M-D&OWHA0e*Tt;P_oosUO#>@} z-^EV+I)f3-CvTR2BppLBPZDfgSkSES)ct=ZP~rk zBjDc0-u7duqJz{a+2SeY@w4aFG8>0Kj5oH!q@@oSF{kc=1#fil2i5DAyfSt?*+Swe zOa@#M!oaQ=tQCfZtKfSoIEL~;WIHsLJsyU$XqLipN`JAoKmriq)UVjpUOy|~gxJ#H z1gcMoL~Sn2y@s{xo0)Ke45SL7oRf6tO7RpTe=F)n5$R1h0Nr`jVT zFAgU^91(-3d(kx5qW$WZ%|DjU`v(dgf9Rh*Ep6y+W_Mec;t6Oi(`2g|^-1{iK~Te+z6x!Hdul z_R?`o8$I$FA8iD@Wh5Y^qN-);5p9K}OetWUR(D>c>u9; za9wYlO=4VTt?~pXwJv^=q$Tp5=TjwF0F?~!5r$y6=T#K2XV`Q$OMTueDjx;0MtLqN zZx$u8fue&yt%k{q?H!(tbxs57R^j6NL5I$NmYvh*&UoPp2e%>xi}K;?Ap$aFJl~BusCx4KDm(a= zd<@LJ&wE}+Ioq~-WsY~*J!y9`8n*;PcR~zE|X((GF~%*?%nb`ONhvfqR&aN zoQ)RK15-~1!3ixZ5uGS(ORFQE3x^ECw|(EvB7M|K$iV{NNVq4+ih*_0V&`JI4QhP& zRhJJFavcR<#Ss`}xLA~i(+`+NcAv4ZvEA^SDp_}x8oDQIozv2n;bUS$n4mD~+Z|gO z8>kSH^I5f*N*U-_wl!D=>jq>pw8D+v4}2wq{w$#V;@NV&5<_upY}FWL-0Vn!OnAnx#UCh$yCZ_ z0pMj(6&KDBn5gwPowE08HvNdK8~W3m?;&(h-u3+M^8PimAQmzpFBqm|?(ID25n*() z-%(xV&H)Zvo?#~cwEPH?uI&!ad2^tRQe<00`c@vujxPIGuKRsBU@Oij%GiBLCf{*K z3AnX5Fk6UkQ(Q+ovEfyk2Ru^vu9*TIRkEJk~dt8F|irG(`KXMo4G#9Qh3G>erL7K zw_j;3;FutO_M^r_pc-^0kMV5O0GAPi9flvr%p+7+GKFDrT0*CSjsMv?b@Zgk_LJLNJz&TSuA#iIv@Dt_H?GL>8HX#!;B|9e-E+_MX&Cfd+U6j6(cvHz%X$VQ z`$rH|<%oDc5^j_Un_(=Kg}@*&6IFW~JI;68VQz))t}$^5B}J90U52otirDl3*^uaM z;pQg>yy6(O%A3>=P=;Pzvc8XlGMpR&3W-ObCK7$$Mpf+Wql~CZ6+4WbEah$oyE|Zs zPyDM%L?c4Q>K0MY&&CI%ZEHM0XU33mKaa_e>YtVRx1UPws{a1Qt3ehn;gzbPROv=N z978sTOz6IR_Koa8Tq1!wTV5l|rCWT|SZz(WWN03V05i_fMJiHW$GJrahK&EdD4qxm{Sqr(0Y`T@3p-}t9 z2h5SYD~8p~>m|37A52?GqBLfccm*an(j9cCYSz#Hir9)ODHA_phrEI(ZoX`eBMZ=y zF;)6n*Jg$!;Cc5B32JI_{fdjr&SmpqkzknpsvrY~^PNj_4xH|Iyp_n!)EsBiZb#P> zS{RYW1sT$M5J?5E>IOjO^$}sNq5g&9Ur5}A=?m|kbis8tcj@8jZTyPRsTS>@(^pv0@c*5yAl z`7Yxk{Wnd!1czIIFWbwUd%;tLEo!3A!S>wIAcYxb-s~d%VBO4@09upS_~3XrDhU#e zpzcX021vKBx;}J55o#viJ7Kh)$g_gk)ox`=F?sG28*BP}Oy~_??%sM) zWgv-Q?QMJ4GR`A1aA^N&a(QQ`u7yT=SGI_E=oeD%%?Fiqk~54TqXDX9)v*Vx42kn| z2%)Z62Z*zlS~I@dpJpgtA2}vw4=O$VPFpO{SLwXZ>Up(q;PoC`fXcbqab&z?R(f4ha#WJw@c%L~C{O`Pd@^ge*jfZ~#gB(MvvSW1T5k{dI@Y@afx4^J&M(MQ8+hhgs;wqG-&3w6q zas_mGtg)s_cFI7mw76|z;Qc;N{NWa5Vyi5zAZNhENl){|-oaTxS=iv?&W`9QF@T^R zAmFU#Txkh(u5}R@FrCsZ?gIItIqBtw?tCIsCyc_>GLB!>}U;_Z# z!Z5ZXQop$$VR(Qvi!j$x%gva*@UDsIOun10GrDgeBAM{M~Ra%w0XHItd< zE7HN5gLcuK>227afeA;h)@r%0ORs+%@d4>>qAWAh^Vi8iqhgJ62YWl)8cwo4*n_zK z=a+d{K+YRu(qkIi#^f`ZRhQI&a0ZiW@{4Qk?yOjXm!S-4azQ)eIIwHo@-}v@vs$CA zcXsh5D`<6Fwmp*Cbg6`PJTtyJz5ffh;VP=0LqSuNtBPWNuLgpiTj@>uKFVgC^3=OL z!Off3J@PR^YhctKUz>^Ed_x-nM(3O=%?5+29IBr78C&H6<4YWj(4cfnhMD)im{754 zX4DwCgk9fusF#bOFvw_(QUI%S>*{*5)#ayrZC&hj6xoInZ%MTQr0toh{0`d* zXHNqc&nEO_YVqpYZ{Leq0`NWb1FXLm-Oy}WZr~xf+R1&o=)s-$?KaX)fxOBz?~ga zy2HmhAN5TtOr#j|sr8@h*dbNx{okL+zI}03CPNCdCM`22p^)x4IJOegR=y@Y*|aTV z%BvQY8uh|zz^|xVUCmF*K$}f5gj9{~Gp(=h9ntfo3EOaR+`!;8Z6MO*i4pOaEtLd1HDTX`hHZl@Gzq6BJBya373Kxs&e<1b9gX`C zS79)!8LbX|NBLMKCIKMWwwJFwGw|`N8<(oE&2KqY;AJbzAb4S4>)TFgZl!Rb@K|$F z@@-!}d0sHon$0q1@Gw25!g56I1+M5MO3-qoWc{^x#PX6%IyoPccJs5G-hyc!unf}A z81A(RoSOJEZ+qF$oV^W)_A(QvDn4uFqUTEcyoJ+iR2~^WC#osT}eGk zrXlE5t>Wst1?m`V1v12|7X$gpDOTo`wF46eo>{Ig^_A?B`@He5^z;3o8FYTGLps!z zZI9w~{}lI6l@jP>(cesLB`%pqTL%(RSA$7j<#-3#`y94`DV-=uG?6p72JRs4jynXs z;KLhbale};jW-VDgYsI$k;?p>%bGcHbl9w@))-}MpU}(zobDvSU?O+yxY1l6Qr1WI%y7RmAu^EgPlTGP`(ft|@ z`WET8v8^N6@xpbb+XlR{rfSHGg{(UeOLS|ix|~)~CO5Y`FePOa!=rIkp_yDA!fF;;Y>8IE#^NI#7#DX!_%?|y)15H<5-z<-<*sEYyl+Z%w%d?CK#O2C{W7FDbqO6bwDv^d9=ciezF0e2*k%q^6^!KI+jKsR{Fm|mBm>M(Ne zf3cS`s^ZT0MbxY<2**y@KFF&(@|t+J%h5a`Sh7=-ivU!M;Hn1abz@V@2c^5h^J8)o z&~ZD1sbNi}bNP_&L3YS;oM@s|_h2#e1z)tYvfErkB9D9S!+Q+vykRig*HP6~ciCFY zt3DXilb61mjr}&f3-sU<2tH863{tO=s2vW1eW@pW&EmP`@ye0^5FR$FT}s4^Ct#C0 z(~g`cB5?F%YI!oVf%f1WaOw;7WR1H>HM@ZBGWfe!NNr4$l_7i z*^HXz+DHm@u@W>V8l?V6f+6-xmjxcQ(D)jo7;LDP zyL&?w08IImSX{#1q90!~iIY9A&F#(%XUaxbHX||cB zj(mqFaOWdV0a{c3o-Uc0*AGT1GBGac`owC@6F$%SGxN!{5zPuG7PhomKZ&oCi;L4k zZKkG~*ZthLkLzeDN!bjbeMqqB(Tm+LDj+I|eLRo+YeRbey!T*4addi{oj_#v%w=(S z`~mJm`2HL~z&3s`7t&dhmZx^+_2uCoBU9hdmIOkSN?Rvka3Q`k1ZHVv;z1u$x;wf8 zuRWF*MF|{OYfMsl#Fox$X>rmo&Yx;X1u4nar#pV|oAARutQXEMNlU2lBkdQ5+3V#s z?N4l;obFL_;y@FRnk_HNUCX?*fd%7 z?5>rOXs}rz(6GFc?DNKq-E3kEGQ~`v$iFg{xAzH{q$nnzA2w1p;Z%}QzQ%hBm~sh` zH0PH)rGE38DK_2i$qTyoZ&mK_+Q}mR(ET0(K=E+({qYHLuk{@d^|jbP-4X!GE_|-$ zfo(l`d-qMPG&Fx74!Y}N2YN9f)>9%KIS#ptbAyyowk<%XS+8mGC$>B3XJ-|wpqL&a65v1%}M_)Q(LxqmE#;e5* zNQn56(1hcD=z;rvCMc4WJ)Bv$(oobLSd!s0CQT|gG7ON2DE>|axjn?Iu5ObXZN4tP z5K=2TVOL9+UKjj4p;g>gVCxsfIvb}Rj(;KpM=-~`-L8X)WJn%yt2^hcp zc2<9Y5qM?`aRJ-gCJ3yZJPx@buM?WB(k;iE4QHu<2YfXgRs-^T*AEFh3qM8|X{7Ta z#HD)5rlH??ytVPJQ2bZ+94xM~H+#Wy`{|c;J)oqnP`KXbIFMl7M&Q%FfghiZW>OKI zd`90#%huMi*T$Cno)C=0+^Q|tir)zJ0~Ae)wBpFZU^3&&>JzhBdHRTeQcEX36iREY zh~Gn8)k9puqlYlX=e_FFO}NZ_Kx7;p$ES`_ql>DAQi7G6K{dj{TJEUWoT~HZ?V8!T zqD&()-*f@STZtL-`XVh2GF_n(NrA>xlhK zxkTabb_u^HiX$k}u==e>A}lAen-AoKLU|B*i11f*Bicz&LRvqz-vv~^VehlBAwZmz z*{2amFp>aAAjP^-DffzPBAb@-WDx}44sy`2~ zw}q!UJ2@9IqOS1(u@pJiW%*Tyhy1Ug22(a;K=AbfFFI_;F)|F6rVLD>ariPjyIOfq zAe#;m3_<)6rUwbsGaBm1;VFP2@I%;Fa0)3`Fn(?Xo71Dxu4$PC0Jp~7H=)k(NTlPR zzrLM{Xf|4WF(NrG5zac1u%TZh=n&4X=~itKm0f3Sel)fs+>_3Uc$##G-uL7OqtF3P zYZ=6`E>Hroy4IPlKj(l0{LUi8pw&v4cnfmEF8gL)h_cnadjAiDiI0qN`uof;iqp34 zk(4Uc3{&c|Rtk}77Io3)aVMT@o8TMRG-cz7quGTx=PI>%SVKZn{-6KKj!+sq=LI0EyWZ7A; zh*gy2b&t)Vq+ts`K^!Pyn7oy3lqx76HIA7H*Eh}UYwcSgb}X3Cu&+O%QJL@ml9Jxk zrv)%|6%WhBBV0&+X|*WU8V&7XmiD3>NSfyX1RKj)S7_V9|4w{H%Iriy4N z?Ch`Lu#rdLXxcUo`CN(|w83)*6h#PBkok8kT8~oO7Mw!VElvwg1Bj`kI<|Z{RuyuH zN``cd*9v(F1=#ivT4RW`2WD;@VBjjJ?CXbyc)j1Vms;XAqkEGXq9f(K_f1_SRD!dK z4+XI_CG+9vFEKs6uW>vOgtyojbsvzQ#SL%%$eOet(OPpWbA%5xzNo#nRDkfW zBxSCp_N2ga6lD4;8?tNJNkMwYumi8NZY3Dh9Ra&UEZ^ zd(sOXj*#LR%jXcr)1x`)yKQ0VpZ|!UXHp5o@*n~eo)qWY6Qh((^Qq7>?8r7d)DC60 z4hR9#TSv!+@23tdjNEL?UrsBr($k!8~<1F5WwuCvV}pGCA&goA_djm(QrL*qDy1 zIhQd#d_;y*YAPee~qN>R2F>MPRy%3z7iaS)e#p1BFeH9iQttBq*wcFT z<9&%1W!Co%-H&su?ou`9I4S8`bRD8;n0dnPOLH|VsdJnSPsNYK&D|_+c?}#3Y4iKg zT~*+6GPG*gp-EC`JNO6+yVjvw7q0cYzOhjvgTjFOS96(fv1H;94ntyZnk2nmf?M)L z)G;lo#H|xvM~0hx>MB17%tfW?Htkv5KOo&{Ey3r8lCrI`^F@VW5lt zqz@i`QTj##kqoB3v0`Dc%VW;|YR4h9EQ<}_tRJPDRe+QFkHVkhj^(v(8sbF>e#OyV zN&Vwz<)&5?S+MZf>HZIL6N<-UjwO=;j%kzH(CR0j66Fuen3e)g2@j7;{Zrp0)~lGY zA^q%0>YjXOd8S2|r>3x1q;||JkF1O;@~$2;{nEn!bdtU-l=p;1-KFUd7$zN_Mf&EzTd&Gd#keEm+T^(iccNzRY?KGKL9D z7@aaoKUNxSxiziP<+g1K3|GL#VXNVW$L7a;5GBdj+vGe4r!I{q z@m1j$x`KXnW6~D?fh|pr+i;5dlx;5HvA(>HvF6+{#TU}7UE}-CgU+L=y5iV}hGz9% zv5F()wIY!fSed>(=7F7}!S5?vnPKVrc7dlJR6CY}9`&t^18v>|K4WgCRExwZIg{f= zHi7f?I$SsFL&7BG3FEDmk52j}p5F+l4;97x9>$aS>4+Fn{V^s`{gYX397=7#8(N!(;~Y{iQdiw zRW6^@AH1i@eBSnLR(^{|dz(ke#A#M9NDopsBka!IfSwOs2)E&~NXUOsA;kau^5|5k zw-h*zl2xVfvwaoQYV!AI2Q}7?Ou6og4XCkoRKw_KRfL{=1hT3lRjjrd@;$>6ITkxq zF$C0?{KC3^@EV5A_kd1(ADMko@!XSj?uW7>OW+bu+0rQO)Gfy2w9N)mO&&YAj~Hg; z9qpmRgfJ@XT{JI$!V+8PmX=vmwf_}C1J5`!CV~1C;ojFP;_bVyLW7WLIA7cP@k- zJQ=^EKt1`CIxm7PL(m)>-1%NG&MPxiVf`}?UaGW?5soY^-%$#m18}mq|E6gxWNAT* zgD3gk?sUc}TMZ`BRMADP_$DE9k)iPR%-&>X#9T0s8W;a%A?NT*&#_aUnysIbvGMhc z`rG>G!OM&=!KVCmcNKrhzGOS?ycIVTUu0IT_QW=KPBR337wgtg9nVhxI`)HAfRxb- zlngxw>vhNLeZ#_~EG|xBqAqTJijFUsOp62A=^IDvDxd;0-98YY^)1a;g9>t2TqmN8J+x`2}`Y7XZ1X+oR7W_e`yzx@bMeLdg1_(U0UdE_fk zCwW=-SU0^uH1$lQ!VnaD$2;Xfg3AusiYG)2* z!@-|ZY7woIDU9B}Ud#L@YO8wB>Jm6@A<_NiM(Je0?wn58D36oveqo0=j7~p(nEo6& z#vpDdF(p!;VdbYcR?sW2v_VKr)O%QHGog|H@PwEnb5PvBgsKdedvlbq`F7N%2E!~v z{Y!)WOXBwBcGjXYI0j@u_<*q`u|rC)YnNgQUA0TK^cEvbc?XaoBj$jac$k-_J%@{Y z$`&0Nr8_j^UU(2VE`=Ib5`03?$ka1a=XcMdZ_Z=Ee>PAKBIkAPZozx>>+m__U_s{=52* zDP!l{XG!JPC_cl_L)G&Ngb}>9uxXKp9QQTe640}7eFB+0#P1LvHVs>2slEM}YxRxZ zgtNyjF+S+xK=K=8pK%n3-%+w9Br}~8Ta4Bw2lL*D@~o|4+qu_S$QpQW$opr3aSxi% zPY`l)$S{ENth)QKU^3Ds{V8B^T@*$UKp0R^OU%8KI;hXp`)+5P0IQ^CqJ*wA9Y8!foyNi zZqBc!TI;6AV0qrcvMjpcYp(YPw$L%~%e6p(z9vcVPB8Xk@#O3mkT*)g%V}uhI2}XC zS4-?LjWu2SC_Ab`oVYtRR>3joGjT4JDp7cvRMmEkRNd`;E=u-G0blXe{h7_04#o@; zhWH94?6^9du9yDYfUbKo(%+v$qInCu28u(~XF3XAKuT$R1{OsNytJY}WGt*# z&>|ANeB4_(IN#-R8OiT0X4 zzP2qHQ_uKNYURz+0M-3p3-w3pRRC~H_wu;^`>I8e;fte_$PMa7AKS)@Mkm=c-(BB3 zo*Pw1Qvnxet0M=i5-08_&nN37P|Z$^ZBA^0$H$^ySHr%pn(oC9b;|F_*IFfQDLm^R zbGmTneoD`68~P%HOav9e*Owm}X2DWZXiPynl=~2xe{O686nS6>NJvX96e6iIBT@hoLvyO+eF;Ju~MgO=olGv1TWd%;H*+ z^?IhvNN?Ag7&HIc=%)P|_^Z00&#i~f+raJ%o?M=Uf zGvdZypJc}-?D?o>X3tf%6GzzUr%44h{;VySWYgfdj}^VsoDG|CRD>PkiV)8_;B91I zXqc_@qqLx}tG4p-^!XLFVibzYS%1Y>>R;Y9M3gyNhYFAZhswE%Ju4*N(3 z=jFsR8?Gzo%|2SzubBN|U1zk;-yI2ece#$4ts@>1-B*Zv?-zv9k*ucKGo;B zE}jQZ9jiJHh?X0Qm77(ezZcD!Hzi!`)cD`SJh?U|+}3w&e9_!=OesV4OWpZv>1|#0zhORU>)16g6$di~~&gZU!B$#O2yI=cu~-Nh|3y zz&eYQTeu}44AK)Vj}7f(@_45oL7)T=jj(q!SR^v@E+JOw~%K0_!iv95X68te~=W_rpg&#{yww}A+`FFNV%o7jO&Fg)$ z?3bnJw%zyC=}R~3KT^xP?coJK+c5Ohckfoy)|B0B*cTRqC_Xm+#?xU(DK(VJ;G2d4 zQN(U1>v(Hh|89Gx@qKlEUcY1Kte($6dCX)6MI!kyWac6}I4@{;JWo>5na3j& z@%tyeBj3*OD2$+Gze%ST2pHiKYnS+}#z&KMW+3E+JyN7cdtL4&%EGme^Y{OP7 zlebA#kSuKcQ)sEKlZ}REf$S`>_PIw2asl6iHOi(wxA?MFu;1&$}ki2m0x-WA0kyDgD> z=Ca^i;BWTRyu@ML;g|Sl*{PV6g&z+g{dg-mQ7qv}yG{BvAjK>ATU&IRPs+pg&|O!r z{W@l^+gQkGtX=y}ERWf;-R)GTjEL{#wvbd)w+EbmN9~6`g~95JX4Evt#ywd|ue6jI zr0VM0)YBRM!Pc)FGX5-!R>tk+v#VT-wuo(G&?|K%@OBasW!%fz;2yFLG*d%34%eSjJH1uk`3fG5-SJWeOBKA5uha`&LdMmVy*?GI>-f-D`v@nYgkOD^ z{QC0#N%cGvI8y}G@a`wQ8Xnmg)Sbo@Dcszo&^pQC6=?VhtF=)V!du7O{7tX7n?GZ zw=%T+FO#VEHk#;`*zibm<3ZIzYHtAs?-`9uv_qpI9oWvv{XO6*t1>BzX>Q(oOEA;N zTXLeDwB7u);h?4703ubNG))@b=z+xPn@&R10@;UNyo;JC>a@afb5lYg_Jwu)-SQ<1 zD?BU$x~aKPd*Gsq?11)VnchAfWLS7Tm2FxYY8UtCm&u`achD+dBj~r3c0Hxb?fuS= z^{?r``aR*=J*Fo9k)#D(XvG!)PJRSzt!!F31~;fWHVB$O!9&%dcHV_W=1)wmPWaw| zWNc6wRi24`rXU?rlo@TJxIU~$@rm_*&ash(+3 zL#63iN32tjA6@oGNc0Ry6CXD$SJT8nZw-BjtK34&d@H7fm}TCC!dI_=7y#TM`R7At z{lt7cQNlGQ_imd`-d{`KLQ^(zvXj;r_?@r~Sp%3^j^L$TrSE7>YJhM5$@aO3XX=)#gtSHx1C6KNPe zr|m(>kN8>$&Eu&|_QJ-0%J=104d3+``_$PATHL@~BM2YmuduPuj&hIfv~_o|iP%>q zG^r$)pQn|UMhnDNdLaeB6CNq!R-;_!M#F7BgZ%h#?p#C&v!KOV*y7nhP0hZfmvX|QW-2|V%{{yRheaBahv{%81(TmclYgRez% z0=mhfXNrePk!Q+`HF6%|s(6&clnT7jbgtlcyuZt*+ch5@hJI{~ zPkq0eW1R7)z|`PaU6m5lin?SrJ;~LMwyPx{eBRGXkq3$51&g;JLI)9U+A$@c`L_n# zZ`6Dnd*y1F{44`^kUwmp?sv|m%v&U=i%!h7dSY@GWvo_6+oU#BaB52v>sF?8{+=wM5@VfsQLOJL^|o=D(6C-45ZJI?eg3pNl(BwG z&m;1)_>Ab{T=!gi{9Kao-XnDwXxwh3l>>wC4lB$fYDA@k=ps{vFuS?kQr@x>wTDpY z5ZFk&u4_SzJvhc&;}w3M=S(<6yNc?`b5!3tpdh9kad1kyx`8?2D${K9;z^;1!*Jy+ z4;Ffx4Un{Y-U11oV}z89PUcO!+6q}2fQ6mG;hVYOH#>)?!!Di040cK=sy731d)H1( zoi6h%x*ZYQmqcN=-pytY$I=TJ3P-h?RPDmT8W>!vCU+C5Ncow zbctnQi;(-k05(*GdPTzx-}KQ5(gu9Zqn}RoA|uN++~q~u zdbO$)dEJ@*d$o`KJPY``61`*IuZ&diYuE}X#eanYa)rUbn;N8^qvW@x-NMG-egE23 zO9JB!7C8C4qU_GV`TZS$0qF6)X>BaR1ZcM_B_h>P$lwmsPM?flP;I|)(=>cRQ6rD_ zxmxWrm&}wwq330aY(d5n`93Qx1gTq^TLQ1Jizr!-K+m2;;@P;9TySslk%$>`?D zBTuHqi-C)I?NI0W)or=skpOTbjm&wvTfjs zYOXhma5gU7R2^?NK?pe8;4IA~_w(D|!!uxPNA_Op*VTkg13o_bW(#nK<))dEf1HH4 zG8RH&8GStOhb7G8n8q|~GOuCIp7a4E(Hmv`o3V@V;=}fwrtXURGdd(dW1c*zyXBjm zU+E&Av@_ay2S;&jg7{{4V>`L|Kuloca_XDliy6-; zaE>X{_V~*C;y>#8O)n-cX4&B0USs3s+fzsS=Z0_L|9f%x+3I`kPrxAX1(^BjOu zJaQhn7}}XBM}6Y?P*%w34%^yge25krq4xjVV_WYF#WII`B|n*Ae*C8!P1 zf{q^eD)t5t^M4DmRn*^CVp)DX z!J#?a8~2xJBX6z&K`_Ot0Q&zC_LV_#tXtPWf&>We1b26LcL;-faCZiW0157cdjc~! zgS!QH2<{NvA-KcGIe))<@7wkK=&G*js;*wW_S);&dpBa~bZ)769Ngll%^%yw6i*)f zu6^u~ndEWJcteP^YrxBvn`*rtG8x2>M)#b$^)8!K+FWtgBlb#mX-qk8GdUewsu!%E za&)8+y#l3|hoDmKXn&ty6>r^BMG*DDef$;O)HPB{_H;A#xHS#=dVapW?qG20Wq*pe zu~vQL#dP8jP2tE0o;1ABP2FLd*#dr@w{M8MB>p$>C(fB2XS+q^*=eE~e~z>46VX z1Ix|DbDNKPY(i6rgDZ}3;ibfU*H#|6jP=7!0X&o%DmF{Vuy zf>H7BAQsKOS9d1=!!PQ1dNX!IDHTEm)O(Ao+Ul2B%5HrLgh>!)zu^l;O;OC;=d(?S zv>%6>ylH37iXP|1HyNbh`3LK2%lbfF!&e!UZ}jGUo3CpRB*cMkIE^0%Z6@_?aNqBXkZ8h)Vi>&vl*0G4*M`O=#w*yXp z(T&8~P7$6L*mHCw@!9+vd;%JJP&nTzu!jx>;!Ou+z;}!{-dSZ=*B!>EZ5{hE#=et? ztq9srUvAGK?O#!6{RGj5g`Af`pXO7n76#vZYIw%`{i-8mHEw<@7Jo$1@+b6zUHNpb(js7qm;Dn?}k)YMcp^EhF9GK@p(D zy6+~h z-2I)xI_}*e3;G)`IV~NXs(+fn7w2Bv8CHEg!G`ee_ZVX%Y&c01Bva`P6CB zBF1~NWIqHu=h`TWmH?DJb^*Uro}^H!f2Kn4O4Qw{Y#Zk7l7MzetlXeFVf5e5nKORz?P6Jc!oS+fSG=twG7B`J1HBgJ^-%jHvk8*XCGt=rY6 z$-zCjUNP|BO%drM|5*0R)JXkK~F*!ktw|H{HGu zq1SgPt-L~PEwKfl*tR&|Pim?v$k@}aEydcy(+*B9$1jvaE|_3O!qVIi^BkXnl0@+e zmbK7n#fZ$-Z$P8Z{}`?%7G@gR-Rw`PpshU*+laH zu2Jv$-zgyD*acG}0hSFVGQD+Yl(YoV@&^Kc^jQAx>C?c0wEfl&<;`!yT#n;(?w(jI z8l@e6=uMUdy!2*c7+b)}tma1m?gwrx?Iq$cV@+wUEAK~q{b`sE9I9^^yp!otBbBwH zu(a|&KkH#eK{ri@i?yaSFu~v`!>L7|gQp?QMAb|`e!@!{VF!sbW z_|05riN{fo8rUkn5mJiIJacL3oV5k0(;lvrxf!J?7z;8o0HbESQ7#Z=3JA<&rY@-E zC-=+iDLun-7m^y<5Bu?MO5Jc0$NQs~Y>o^i?UHBZmMxecZ_G)>wqKC4m_sx#agWdc zu9)Zq82dh1YN2~JpH>uoqX&K^-G`!#V9kF7Xe#9E_wCrQt7=%y#PJKnfTqK#)i6uq z2VX9@`##6ITg@ryM|xVWa&~!x?rOM;NASw=nrpxwbDcgHF(H~MGs$^+xUKzZ~f>jXm%gX*-qcv_ipCxHy)Eh`tqhYY(*<-{$Mns?c>e5#KrE^sYO zi7C`TUT;d#8(%+uJ{Qn{Gnzy&%rQ}#I zR`Njih>FHv0_={nN1o2g4li_X>n6^Gp3eR|4q!|^zE$;f1AZV~^yY%ZF~O1Z+Rnqp z(zjTk0%?u>8S#OUA97kw`HNt6zQJ=fZkdW1!0%%<7I}yTg%J`U`_i`GQD)r+L~FNb znkp~p%VIPWtY%0v{&y4cso*JD2GSRQSBVWy<!iR?^98#vV_qw2!eA)c?s^18PTuV+HFsrCz}3l^+8p!)_AjJj8V_-VVGJe zc1T*kNMY90MI%$=`S+g;OwDDjoo1S(Pgjxe%bE*|qrL91VZC+;`!3&%GcM?EN9oJK zX;a*}d#z)U@s@2KBwmTgNK{$p?H-hy9u@_Enr zjvP7&N;42SF37r3W^9S^o#$oqS5%}7HG%_K6xWCZHJ#V14oaO?KW}Lne0=S<#d$A< z?xhtvzvo}Iq4#@l{%vB@e?9J^U=$ARhu5D2a{Q>8MIV1RpMCXoJ_Gs$h6CLf7hM<`9zj}|957nCp@}M6!`EqrsZh-sBCSzOo?p!!&3MK7B;xM zX`Wb&eowCkBVKHp&4*?O@mAGGvoN0k@+#)%d}*#wQ|QxPd4C@@F)@jpP(~bqO|IA@H@fw%kkvAFCwttufjq4)NGM)YQtdI<#)* zbYx|F1x}sVZX7E!9wSFvw17+nr~x>Qv)p71WPpLp_697|KR(6JuqvVB8^w6sB_kC2%9ryb>$PwK=`?6{KK}jrvQy6p zg#SCO96-$2#L?KZNS)-;|gGjsjS;W48FAGU^c!mHY0hX7XD6;^XEw&IiF|t#rc5@<9Ja${H;`|9FtB` z(}L#^&lJSF>HD`}gVPKb{Sg3VD~YWA?}?i5VQ&juCc_Qlv73|vy|kH5V;+!iC&gNh z+BfYw7jZz4mKSB>dlXO|T}s2t)N4XnEm4{M$1gaQLAp<;>_+b$hOmPAR%D)^&Y>=6Q4|3-88agvjGwNMLQ^`eRS0+wz90#cF>W z;X=m(`iUI`JTf(%In?>o zST!9gS8w09jgXiWrR6*I*%3>AsDOInu)^ z+StWQa6<%;I&ACWGXA%*G13Jzgqav8CR!7f3#bfhf<7;y0^$gJ7!BNpf z{t#c?NteiT2u2wis(wY7)_`GT<2s>Om2HQ9GDw4)Nxzd!s^mzQO=7cxt8|d~`v;{% z4Ig?@ST4(%tkUB^BwIMuSckNIyE&E*zug*Y|0(acx55p|-UP3-ObyYJd3%Kp$lds< zb3nE;&|&X{Eq<=Asi~X&&02|#_GcPz-`$onKHbl6e$IPKV`&?#M)`Z_ zM+@rD!@!kL5gB>yUAWV+r9nnR&Q$DQ1WtQNSgs%GivuQ_z9A|g1`C?X-2u@Y@;Bu) z{nO-=c;z!&j@&qp!K?EZFsUhm0EgPXUhujGtJ@i<{J4-}c8M48NHYMEUN09eao<`P z7&)lp)&L?n>@BT{IUU^gkL47!n``oQZ0~C!@lNoJZ#<83>|~6k2*so}_D{j9by283 zwI8~s=2Xu~sTW|T84&Vq)C;#To*?$Hw)wQK{@tBQk0pLu^BERW%4zYp#Yg&|=y@;k z`<7N5AZBP8L@A}dTz7)+XVT>H`<{Db=h?zJu6v#i6J5?ot#@8~Uvb4Vh!LQQ^$QtY z-Y3C=Pj6{d@*A!`Y{6ltYgr!=rAEQcR7{+Uo-W&`6K~E{%Be0aLcz^(iPCcxiuz)) zcRcMqnqf(HGO26QGbaYmAiF{mJxP{D1Ab0J#Wjg63k97k1XQ!EHq^ocI310B5qi->_>>M z#V{yJ;V$KdC@wn-^sC^nFXL>5i;fTILF2TcYN?g7KaT`jq>WmUjg&@3xPd1d@~=x$ zW};)#Ld4m?-t_kb)kKZ1!t#U#K25T;aZEGC;jvbM{PEKcaeK|q8{aO zhecXYn*(_d#Y@((m84B7W%Kbw<#`^Y+MAhrvUkNZDCihY5@4gqkcpV8sW8TVz3F6Mgkpg%2q1bR;fOYuWNNLibs40J%jiL|0UFk59%t(%V$v+4_4Rj87i3p^Q|v{}IB zn1!WzsLGAPRFwt_N8Rjn2PKLa+9YbQF=r(V%(yZ<-(RgoRWwPK{b@Kc$5{O@iX23< zFtR*O%#uq-DTW&Gijk%DAxQa3sZfmzZ^a*)&GH|$Y{rZ2+iupAl{{P~S*YQAhgsEz z&cngWIb%abee%d1Z?ynPL0*}~~Vdy_{J*l`r>KIhOYWS^r0gZg9%cyD{jpH;aiWS!*un;Dgt`D#lu=)6We* z(b$D0I*E=zEooldm65atOE{7got#AY7*;Y?E1DG5i8k=X(+FmO)V~|Lu|Fs_2@Qi392MqatJxn|5`uLrGvnM zO^2{i?*-t$Y|7+G?^`u^5Q~@SIgwhq39KSb=4LcNt74T58c!}`P55(H3Y5pcHOe94 zUoj0d|56_gruilhFlmgwJ__-R#M4O0G1E2-4(GxlpUr7*JBF;7H@S-reUT}y0PDNA zN1YqdMU-snS66c1LaLin4+i&*#7a4*EuuAW`Njdb9!c^uuDz_^ zOI$@RN$lvmHP#}d+P6l>mldjYogM3|YgZ@2&=8eOaU2k=>_rH44XsX%e9JBy!Lu}X~rF@EyHM)PAf2fO_05tjwm9OHgoKPN+@7JEK-WiWDwF|7+- zH1WFYDEDL3!+cbK_n^rI-tGYR+fTgXnD5w$(kZPj$`9s|O!3QTRgk^M-4USSuT}`H z$<$SL0+uH5;8WYS71?m4RVjkIicys!p+H2>>(!Fm5v4N5MLbs+c z9QHUfr?vB?T-hnj79-;wy`?3or0QkZwyPV*2`qzG(z5dD+HM@#OnY)M9H7VSBJ6)7 zg?!%+m%M}wT9cF<24*dh1dx?n>Gw+ExC!ZX5L!2jsCYc`PgnS@Kkj^gHP0oWxIo#b zde~eQERS%DQD_RPOVC7!;f89Ye;QiOCTEC5xsnkexo6R+WM&TAJ09;%*~4T-(_kTo zI9QIezqfRrq(?~Fzzd6*6GvVH9U^ZvIxf)d5?z$0Y8no`hgYnIhSKkUpVRrHbZud` zQc!x7+VRp7C&m|B6;g18Yx}trB>T{dTlg6bU}*r$o=jdC5qp4#1D{arN`tDkkICdKmOR0Fg-dg@;ADqZb>IuFs_BE-&CT9wx+8V{Q-ify8;+x2VBlVF# z56j0^>|@q$3sR3I$eW9WSTP>!8W8ZX+BNHO+7&0cd`0E^VSKs>$-RS7a(T z0oom9r*J1Q7?Dh&P>i7~ZbhYR$S9Z#Q6KG9^z*581?OC91+u{cP;^V1ElyX!zNi&B z1w?dZ{+@A{njM~HyN(9rC1}T+Xh0RdFgWEDWQA$y$)Hib^@%5m#R_L4&=`|g7PhEa z0*-u#WIGH-F=er@g-_&eAcY(#)*##4LVojOMi^;~Gk=tHs?SVZ)N;e#-m$(l?H5f^weX z_Zl_mJUzQL)ZzWe-s#J~q*Ih=9Gm3BGxl^CShyUXQTZ0V>C&uzDtWDR`64n5k$<~+ z_-Q5aEO4N;Re_Jnb0)hG#MNBG{9~!Cn9P=0{GBXVJOj$>Uuc%EIxQroZorb-;C=8Z zPG5~k9jL{s0BLiIZMQitsd&spR5|OX8P=0&RhrEM!bC;5HQ2$HEFDX>H)^j*6rXY0 z`qs|?ltX6(&qz+VW(e#>hOlX1!doxEq3U3!JiNXdCj(Xtoo##SZc5l@_S%RHE5Vqg z=z_$;u}F)iTbtBX_K{H2W{(5AFU_PeZNQ;#8eYe+jJu6t2jlE#U(ddZir$$Cz~R?2 z`LCf_ycT@*vR3;*r$H6*I+Q1zDfaB~=~T6bkdBa6iTKFu8U4jV)bR9p*0U;&m`bPr zGyiLkjq{n>&CV8erD|>4Cu`rGIG+Qvr``Q2n*(307SFxrx?X%hx|nhADJSaef`4UT zzEzthzx;T%Eeo#=?BbvvEyP_9ORv|@T$tv>HfzdzRVS|A2qy6nPz~sWj{cofS9Ksq#>#UHTbUI%1UZxw-O~ko|BdTeDm# z`0wH9M8#2Br6|5%r*zH|_gN@uMg_`-2UAw4$`=DUuL*w~QNod=jD-vJJBq)WHNe}x zRg06gzS)gaZj-0muTS@@fq(AV)9>xQ= zd)E)7XZ#^$AI>Dsvf4Y&IU;aG#&>Uq&y2 zyyARKD|}qD4-UJgb|$AxvDTLMKTlWep83n;mF-y$<;lx3m8PKB*D+wr4c*J|n+p)} z48}?46QrXx*xBTm5nvfbMlCC%2r;J}Fcm=zCq~hldthdLx7BT@vlVVPqJJ5sBYThvoHO{J;#tQtgGpbxm!3Qvwgt zm7JxiYNf!bpN5}$H+{@(Wt+8j%t4Quo{W?{q)q6>hvK+@@i5<-VaTwjnOmo+5qUWu z8!rFM^jH{;4ndaB-U`20J4A2EXO_Eu>tqG59V0v(nkhLup0*=Es@k>k z7?&*v=63wp#{mv5-%=TM5bg}m9uK!{n;lFU9nCrzwHf$rxgUYbwkn!d`fg^oM)3;< z16ets2c4eEHzH-h%`~5>?cezcm@C--=#LFw7$(OdfvJfgQ?Kr}E|f1M?a*jn?a}xU z1A_0Fg|%3biny*%@Ho||Hr#D6@S&+0msD?2XcA=1$ufDM-S)4bL4X89z;g5a<2f}* zn0h%q{OG|v;q)KN4lZ5Jh-PCm(54t^T`i4=Zc#MIH9wp?&s31N895c<3F>&1DI zHzH<<%li~GC62Du&qGFq$Eb!9E$+JenxY{n_!?=V4TXyx!m5|{_9v^{=}8U;b*7@R z-?ANI?Q>*HxY0T@&W>D*t zwyha@|AW8|vd|ms$?+)Et~;!oP)ibxSHypEDR@Y_)yXqAKunq2ho)OBO|upySqhmNA} zzvrE4gIzEK~o+zbJILL0ZAm5O_95a-vOn zt^A+@T)}?BxYMS`lV2vJ4Oe|TsU05h0j|v`=l3Z}w~Y5mO;so`@Ri_C>wFBW5B@bC zC#oJmH(Q+M++O4R?eTzv{w@eHst#7e&^yC4r#CXQJ=+cHVXnD$(o8Wnkc+AFOYSDI zPnN|ni=ng zn~`zxBuW;PjRLNvX-5u>$5n7fN<4t7sod5nJ`GKbLl3|*0Rs!m%+KODe3b@i6>wru z7(Ee;de6lChtD%@OtDm{+>?ln(p}@uPk^VbS=7uK61h zHhjuAk`56?#ML(`xR3qgH?b5xLc~5*siY1t)CXO-N3cVpiBYcqQ+)puC#l}zVrg>5w2n?_;XQD?zwwejp=d6a|;VC4eF zcHY=d*{vuRBYMfRy8-0p+%*4W2J4egK1mqY@j?!6ux)RR(3-TdZ!M49_ z`NtG^dTp8tSX9QR*TU1R8#GpK)oyie1hUO;m2ym*fFU}aUp=N$c@vU+VF!oo)1iLr z0|&X3^gQ4sU+>Lo$L&q5G>Q_XG~=c#b;U|UJ+}ToXfpEyi#>pwB|5Jc546SBhqLMX z8z|{=0`o~*QZlttC9bT)#tJx~n>&Jzk+tR5vD^*e@4Xl%LY?E{D}ys;9fZKW3$U5+ z(?F)aVzu#)pQ$kV=Mt1WO*-bHegu*7*A!c{g3EVyFsvm(nqj`+TYEc_r(q!`L(?sG zt-n&xWl}U$M1$)~L8XY0Zf(KZXoj7J0lPIz9=-!tYL>q(FucSBaWjPApptH74`}>U zbw0`93=4HnmZ8zWMOY*k&f_NHhw9CoceeavZRSjAtbxvk_qWV*GzyWYe9F#7pVst1 ze5Xx9UQoX6%k;J64^{nl0z^3fhtm4tED;3G&3nK-A_BGKBLUiQ(#jz^ThwGZw2z10 zHI<~YkB1gJzE-t)kvG`WYn-i)wT->RScKHnNBB{Q;@|SvL-n}0H&TYJ#EY(7qiAqG z7yjj#f)URDMWFpx_H0Izy$Av8?-@a;sS6Mj)DQ;m^jqmRr8?TvbyJ$&=4*~iU1^d_ z>ssD5&*%7a4^qLL5=G;gvsa#2DaE6#UVzI-yN%?VVIrh5f)n@Rn%5 zc;`!RoiWfUK)9VnQxj#l2~de<2DW!}HE3|C?xP}py^l!f z9i=4AP4~vbSKVCm6w6OA=w!()F!RMhmnUpla^CN5KOiacv|T%aEeZC0tiS_ZynR;^ zhxJNgZU+O6s4c7ETF%jvyGKiT{$kQaG8mr(ASH1%to z^KPDf@H)~yvUsOSB0r&7k{6C$r7B&tp2%s4ksV!ww#13 z4}~wZm?ezELrO$o2rY5#0l_m8J$`p*1HWu&(m5G|?W}t*^6Qf;bcCAlTXFJOhxijd z#SRus1i0~y%IPe%Gd-Z_9H+4cY#4t%n8L@|S}7(F1B}f5-+BL;_#0{(FBBh8)xGgc zj(IPaqS|sR{JF<29=L~&w&=-n%G)y+oaj==MvvV*n-`I*O>0d1yn|e8l2qw@e2;=7o64@DTQ)I8qQ3DB(xVB7C2cC(VrQEeh`_2C`3&+hY8z$>>aiD9 z4uu67dc^`1&iibkl#wHJr# ze3Dw~n_K84%*+$*po0*QcBx386Yu1s6Lc{xCkn*yIyHe$rc@!KMT+NvF*o>Dcxs{= zLzw1T8jj1y@mt%zZ&gvR-jyrBridvECYM!v!K3eJa85cQ4BV3Rfh}K3&Q-%)&(~gI z_cR^Fk=jl)(M?@sq2Ict8kn&8-iT$-_h6|KUQb4#wIVj&3@#OuZhXEQQXwN?)`wK5 ziBY;qs*{7NSCjOkQz+enRcjaz9ka!6kF~e<$dEW|(Mi6LWqSW&l7y?7N^U;+_SCx5 zMaZA_{N?DZrxmAhN#fABj~`Q<;s1z75329w58q9{-s}ak09_M?;y9nwUMl@A@jI)s z*w&UdPSnE&)Om{{5vbg%xvqT49|qamJrVu4r;frCO#)mwE7!(*M&y(l={Aod@7%0J zN4XuCGrC&kB3)ckDD*Rv|8m*4;*BSw`Io55l_|&F#BHF$Xb${zD8m(mxGO0314`1B zrCkf_@_RIXerWnl^bxME#n2emm~e1VCr#p;KT)ZwE-aD!scL>)MR79i5A8d`ja0|M z_ZSQbA&$+WW;<(B9`C)LFIb2CI@rqE*RBE<-D73`+%6cGd>jaiEa+y`rK)COoM<P9-?;FNrR53dI=z&MiLRQQE*c&iCY&zZ)vebM zes{LmU8f%T+LQ8Xu5&d2{rg5HPGoxrhuq5}LrY5bJ6|9{x_evS^STK;(Ve8`Occnh z3**G#J8X8N@!6^I3FN=q=6pDckWIwBsO~qh=Sri>*by1S-$8HS%6pJ%*q z1PPV6)XWYn7B9H{MaPNpw_1~;hr)e2e7VN>#QgZzUhuy@rQPBabD21gJ-rc5C3??O z(#Xk^-|O*&wHI2Bf9US()!8!+IqttMGdb41Z{rRfO9sV%Mwu@_&0npvWdV16S_%UU zFMrshflz>uBYgKo<|Ya9%sm~5s&I3utmDkm?)KQ`rtu=@)LA8>`m}j6xgxCWaY^ql z0umYRcd{NK{%MNAJERTHWVy!Uov4`XR^e!hmk&-s7EJh*hO9nAGQTdUM_6prm`7i(_`e8qA6oH5zS+cOAv26By=_J*OL;Qpl${(J|>jW1^=WZVQ0bEV(m@bW*oH{z%7i--|1bairnDC-$={ z0BLUnj7+mOM;rd+f){fpW>tOJ8vKzeV>C`9G={uqGB8FclhrEySc# z-e5X@C`@q8P2YF6dOO0JSafql;+okA@w5z9t^1~KCsvcP_+u`JeuH^U+TqCTYLkED zxH^2X?`l~Atzg0aA5}Z4e!QGi@0EJMt;a`XW!9*YBov5oF5%10TDWS!X85P$s#&P3 zLg*0^UO}6fqh!FxhI%2L6i^(b>A<8H^Fi-`PcKH`l&$6?NlIELBe}iW$W)j6vrEaK zDGAo%p28Nl=1P8(K!%N#+Xrd4zM$fuTtJubkSc&>5Im4eea9&!4q}j3wWLmu7}?(N zsRk?P{#}d>R?f#AU3+Y8@)}Sm6}R`9lD+ z3XHji`+9TT421f8ug5hgMU+87a8M}Ij>A+4z^ApvQQ@nlJuiE#6kf?uDxGb~rHke@s^F`p z`{@`IzqN?eScD4*<-zUHEwc>~lkQ^fAmi`GU79KM)Cvx4J5qK5UGz>X*NJ=}iAg-l zIN4IZag93I54_orOLuUqK7Su3{YqxCGOqP!IJQVBueA|s7VZ+ z&SMusUA7Nz3hpbrlR2{hO@*;RuzoUCiu^mdAtIIj`hw;^ZYlDkRi=F{{5!Oc8E{lJ zoUmqzG6X!AZyUhW(C3{E2ra9~g84{b`N^rrYR~TtfDQ?)VNqF=BW?tZ-EmarqxZ*% zZQLLS2pRtB@JNaD*iRyCDjztIe)a1Xk4@QCXrUq`()J`5*N2<6J#!*sN${tYgTwbu ze+L6}u5n3xR+lmP8WO{D^tnF9=vKaw0$lt(*2RSFL@M&@l-{d;=c4V##^j+2B>ApK z{C`p%(Upm#y|n>0qyJ0~`>lz_V1FlA#`hARG002MH5L_y*)nf}W`czYHb^WYMBTU^ z)dKQftfeFUY-#}UiFYUf{fok|p6AVDM;N?fs+tsUOxL+@iVbXdJ@;q=iNC=usw z|MBc}dsIuc#G>L*t6Y_E|5IQVE(@je$nlD(5vs0ncRn>*O8k1Vxm4fC*T&K1!I-{Y zNAinI_?-T1|Esq8v?Ld7vYGLL2&=hK&7NAMo}O2tVvz}fhB^`AaIC7Pqut6!)1}>A z|5Q9y&wG98VH>S}c?wVays=U}2Sp-+QrJwTq-|yE=n*ZVTEcTZdut=hovMCcVT>B?jni~pxwaSGu z-fLCV0StGXYyz1Wq46fZ2B*hAYw5Aqg#e0Jr?W)K{K*`*PwA(IFU@`@)7A`PU_TKJ zcrYna0?t1&LnJN!=S8pmf0h^_fj;Nh0{_U|apZ)V@Dfx7TzrP8gk-P83zwPr@7r>$ zuOHEs^?sHOP0jL8&rTLaLRXJ+K5yjdmM*J}#B7HiEMCj+FNOdzV(CW?-Z5iTvS88i zQrmvXY+)8_Dz+#}g<<;4p^+%fX>p7Bni=hfMaLh7LS2VIHPd&GB_4L@(D#v5^G8-M zC6bc8p(WQRt!Fz2=m&RT{RV|$xMzInk5mv9?}rdQENfK zNu1xwjh~4AgH7&EpKamMKq9MAAcx-CM5^Z;$SWSX?0|SZuP!0Co(fCJJ-WD1apKB& z^4CXYSn=u{rpP+`M;3g4q3bv5~I;5_$BV&>j@7>lWIPE z(iVmSE6r7{ReVV90R6OO90m#0lL- z{~D5+0J^u4avPG*tvIQFFht>CM!wO+ad2PX`tD^J#7M3&))b?`R<>bXN=IP*J<5GF zb5wL(Iy_^vkAC;Tm3M%TN-a+`B0y;OUexYSuxC%)%jMUXdsm^~$GM2decH#@630ix z_P4~gqwl9>hS&YB&4pwBslNJ+`0^M$JHdv^OY|SjxQE$;Bmbfk;HlpUG~3-IaHMtE zXWef4-m(h@OpbJ^K_w1Y&+&-1&ce*`Up~`9hN{;>deUy)4g~Lp6-Pt2ryNbxw2kk$ zQQBQ9P4s^n+&&y~p9j8G_>I+E2PK1NmuL4wW+gr6X5e>Z_EuWoiwykuvm?T5*lLpR z#5MP2rjO~B%$6ew6*aOyNxD96rth(PDXkD_k%-U?_%%i8_3b;ZK;QlQ#eVwvOs?0- zm>RoNGn6XK#@HnRShs^VmfTYY?OZeHHLU3Z(*XE-6 z@Ef+|rF<$Dcamdunq9!Z4-g+dd*`?n-6~pXF4|gHf!egPKnb4!f|FN&r=Y&2nzq%C zr^|X7Q=wr32Lg{XB<|H6+D&EepmSR7P)-q!tWX~gqAD~5D$l}3rz9o0)#^K{pAg9) z4<7z1_T8FHavNA_06J6VYGpvA^+xf4Ax0z37Ye3gq5RcW7+K;cWri>%f`WR%fR$MV zb?4DBBs^A^@&Sr)1@Esl{^$G|Sweh4ZHVj7)cx=Iqekbd79C?2QTxT{gg<+vUDyJa z-9LSy$eSLr688KT6t(xyFo6On|D6ASD}dgYZsm$6ZZCzi<6|0~k!r0x`YNJCr}pKH z-+(ZA$=OTTMk5zay;gI0KC4E{a5HX;u!;wdsvFO^8&5O0L36eUw~QML&JTu071QOD zuNyauv8jzokP5GaC|POXL7?;Tq-KK7sndm_L3Ua{7hxRo*<;^@r_aVBjC5-PwJ zPq^!*@Yt?0*fX2Yo2V%_ItUWdM4J&bIR3G5-r8U-GC-qyqz9gLZeC5MO z7QewQQ#LvoE_fyL%r$PMn>C$M5iW^fTqNs)g`>SH9M#`pCALj-QO++V4v zK_9aE6XigU1o*5}tzGi$6_LBbScXj=QL2DR6Iy3Ec}6Z$z-Tm1EU1~zxEyNe!ZPtO zA)koKeww~4kd0kg0wJYGbGRM?;YE(9e=mRTGB_;7-aV?%&G!vMB%mhn%j&Xqk_CE>YgZ}|>kzL*~DLND_!yH}-Z7m{}n@@`;O(0rS!_O+ftaV+jB3?K);tcyXpDlE}29ZmPGaCM# z46W-!6#Yk%_~Lbv?TbOh|35_h3z=J*z=4SlR{B{sSEF}X9dUEaq%iUlzbnr^U9VN) zyuqS@>mtfgise(vYJpShRX1>P%;8}88(?6lRayb2P?(;E8IfY~aV4e2Yx$JtWaTAG z&%&Gqz1Ewq#DO6BB!STon^9{nwMgc5bkCV?LRvDXSw5I-Y7>|gV;xl%aZ1fPu5Q{D zz*^2+WeqQ9IgiKSsEZM|ZQNb{6gLCs#~45f-^xg>3{!wij(f3<*2CK&FGk^XdU{w* z`Hgs1Ms{adfVmUO%0&1nQf>*xW^veUlCX`w3oRcB?S5%O!5W645PtBNb#6fJ+X(C$ zUMWUIVoW=t@|cX3`TG{-Lmpc%6`G7=a;+albtE9IoG)80oS0xtDG_r#b(}fPG>e6z zzq+$sO0hmu5RXPrhSedJEG@E*7^Tpb+D^4rzpKEyHHl$0%o^9{j&RpPGf&QadA>VJEYwqM^UH=ht z)~#@8#I$_AD)j#O?_P0zo(SRNTiqTHNfJc~syqq3pqvl;VL`I-{mS*65|hV?M2TyD z*|hvTX=`THBagoI)c_|)>y-f{j>WMo~b^LO= z5Q=)?H${sV$NCJMlQ~~mO*KFg_b`wVmgHgN+mEvuNLp#iKPZUV#95G3KF(hA(FLBYSS7fW~QP2v!d89Q+~yljjhG{@bUF)7RE7sXNKyMBL|V}O*v6rry$pjRrhx}-5=0`$ z)UdFp{76Gq)WWYLNCaSt*ed5Q)aA;+M^FD+dlcac?3~(aUqWpH`(v?&oI2r}Z``0uN$<-pmNW15x1mX_Mm;r}S(TCuG)|;sSY`96 zzly0Au*sWlK^{<#VKIh8t_I-T<}qo?UdNnF6s|MX>K(=op4Nuy!7lf;B93`W+XlCj z2RLX|IFH*!Gc)x2QA1vqSr&TVK?xe(&I|;mC93{Z2Vi)~DZHX)TA20SZ36oU z)cl~J)jKK^NFR_c36Q080I;5Th&^;ia}AbO}&$cdpcFyQ(Ic0P`?HMsKHYYZY=> zRf49?(n0xbKCnbJq*%qA({5qe39+By3)IvX&WDIvo)GnuJSD6~*IA>@=DF>QZFae{ zraHia9Z-}q-p_F8h$|AqCvqPXFm*zaN?Ed&M0<)lP~{gjzbYirT!M6%f;3GHlgBbV z5wI98^(|d-vXgcZPJ*lUq_H z*j(DGAF(T`~}hR$r_mB^@j=P6^Y znM-@k@hDM5*g7`Db3zOnz^;oQI1^4*Ud~R2ug%u5u8lsKmI>`ye4}%}ts2@~Eghz~ zq&#z0iiPIDExm>F%KwHSk)n0nKlt_kIUI%O2cA6gxqcPgqrGQVC~HEZ2h{mdb&Z~x;Qm=Qj@g*Qq{UN-4`f@zZ}fU8-~{ZJM;)N!WVz_ zupwO;j~(FQW}A87nZ2`=;`&xg(1IF zO{dmuc`6-D^ufu&o@|L%XbP!txM#4tJq8 z{F{b%yo8UNJfwB6Z>J4T9Nbc!y`TnJWBe!2CUDGS*Kf8KD!)E`Te=?hm}ERkxBlak zeUQbti#5* z+M4&nkq*|zZ{!R&Bo6TIP*^at@wgH~qKI;ZF(-Wz=MGZzdmLdsCOb&TCF!dLVkI6lnTWD^QwHH7J0o*$;xTR7()H-N zc|=gM7R#5GvT<|Xfw*#4)%xx+14D6&oeh~KDri%XtAo3(D7x(#Z_(|TPf)JbrmlxvhI+EHv9WI_R59X$dzRK5` zB2fmko1A1KnDV)!+qi@hFj&KgVO4tDNS}6q*f)^nT={x!?+4mjmXf)ev^(1br|@tO zr!#i*XW~kS!7ZWU|GhSy%S;CAcd1ugG_)b%h4)@xm?(y?u?ufs2I@U$O}5a`oEir-6H zJ`;)@?I)nx)w@aX^ubtTbu6Uvb`jrVFLx*0J15IHnaP#&(d*5W&^|8@ z66+AO0Oz~aEcy0uhpW;41eVR5H08)uR4*SD%VdiKQGu5Iv)8P!$7xoBC9Tz)kv`HL31G2&q8)aHP2eS$9 z;X}AsvG+2JoCO4_TmLXX$8yHEvEk>+YF|n=;eVITf@0YxRLj4#RcEBy3 ze`&av$y$3W6=p6WdZp%Qc#2O`b9IkeM#j1=@ZbC7zy>s=@`H2SGm|@{k;?U)bf$eY zNO-CT7!e9K_M6xkRzG4L4r)mv>NN2kjfX(^>0sZ53lp8qfkmUyD9S}9TQjB70F&Hr zC4-ne8B`)5{Hr$jtwx1kIIw&3SMwylbT_lf{eAq!H^Hmf!aBg~%DQDENOk-}N_GEZ$}D~Vt8v|Y&R&$K z>qS%%xb91gXquF)rz^POQ0mNlRtedMZ6BMk4_Z4@oQ(f)H%CLJMkds zj{Qqo*kf!-M`pVkh*)lZ5VP@KuAq?7{hNhlJi4N*>ON-Y*+rw|r*81KP$V-$KF0}R zbx%&SKls`muIn(%waH>3nru9C#ZWRsF)fb#p#U@SxU3h-0lwWYijvhoH;h^%01j0` zl^NE1{hK;H=85)%0$m}mlm%9as4;+|PFiO1fa7ESw3wv!6V|*con+rKbm!#;><1Pro01vnXWw$s_k{W*3*x-tfmH__5}j zwfH1F$pCF7=x4%Q0q3&FPobxQT8S$TD7EdN)fVkA4$f1fFsvKJ&ZoJSOxa)l?6Qzc z{2-NtvUoMem?^t68Cge7v;}J=t3?S?Po$GSm%h6#-&tb?Y^+L#5##YsDvaIi=i`UqPRhl*1???(84 zsgyTfS5Ta-`^Uk%C+h!t)rieY8YE&p04t%Jr1KxAC!m|T7p*$z|6CxcyPE^gajO5K zk%k@6;b^5GhM#!vdc$SyQd`5EOM5x0PBwl`UeH}@RRkJ z-;i@0)p{7=Qhvv0PJ@~*t(n$ti@!=_@O|kxJblk_Luj#U zqS5zE=@0aODM54-GW5?n>D7gZ(iTkU94hQ~eRn#9Dbkcw!Kksn5y`H~6Tomq@}0E( z7)2|=>LE0pR4e)+bmkPC_r4@`|Job}@Tv6~)*1JO!~q)?1`$8J$aSA*zI|893^vCO7t7vJWoR@d0wDs@J25B%F!dt%soq7~Q7EmI|E-vRB2n zqMT5*NQc)}|0wzh>utp$Ec;pe@tSy{?fp{JTH>yz@ufsA_PEAKZF^g?@YTJbzoJ-V zpm{56g)RzZ0ktuKw`;l5y4jIc;+fgdyvqdu$;!++t@Pj3--1=VJOnMpu_r zzWfZgGI8FJ;NOWYUHKr%xK zh%1AzC_^9o;E&CZ<`3PZk4nCM*NUl&)-~V7gtyrpZd=8j-I;cRB6ru+{WJFEGkW$h zQoN)h@<0s2AvdLAjB}NBq~TuWudy;Mh;#JoSZ7QW7CjyMp|+}eBG?&!h@GetM=4t) zc5ih;q(hWExRUJzQp}ZCzq6|?#x6GQ4#0%OPsa{hm%~6xXvWu0Wuf8kIojNOF;6*+ zKqCkRyX(N@v)`P4=8lZNbvXE$4}f^EoO^KA($fCu?SAo&3WpDUbavNSTenp(3UUtl zKe&`CGJ0EI(y2vMyd(aqeG_9nCQ0W;qhXFH4^Z*iKZ@3*B1x>`TOGy`mRT$mU!R*zBXnq z*5%6+|5ab@d~1&cg~zImyq$2bv*stQQbMq(^kBz9M+5QO!S}8q5(o-pIDbc0Is4_J zb6PV@HG*&YK-P;YS(tou52{>HODSsv6gHENL*XiND*ioQHWl^{->_~L`rDeVv0mRf zuo{8$)c^$@pZ4kUM(Y&3h#j2)9Qkwv<4m-s?CgNlTf}7>O_aZQZi-JAfHy=ccp0lz zq4}f}H)3bxSsRsswTHidz%OFU_wJ80ng@mL);BncB&Dm z(ZgHBBQDIhiI+%8-d+nN(QNqgY}IUE{9r*|O}vNj^=aS`YFqv;*vbH6h>PSxeu z_Ok55Eaa*>VrG;5YQ#2W7J7CW+4Yv!%f#sNt}ON{->;ytYgl`)DCiA)sEh4eX^HnR zxwkOY_2H^lDMZgoi1pd6&(^nnTh8*XeSYtHj`Rg&dB5Wb@PlWMoObO~%`=N0`SH%{ zj+Pht%vP^U#BPgPp0Oz%_7{gnu3WEJ9{HXSr!wRoPk;JcYYP=-y+&>1YgXw^jo)GD z>)@Mrt+q_Y6;`EhwtV{^yf0f{s^G5tZ38g5>TzYZ^ltm!=DUQY++|*Y-~#yQc{5Lp zRr#ezu!u8oQMKct{;}q)=XvyD(}W%mrG2k>LU?2U&g7$r7B!1Kv)hH`YLRO8d|Xx~ z+S=BZ_wwTD;pUb-IB2Yu_Q!j9p-MH{a^^yFCZ-$3LV3#d4?85i@sEE-U z)#&W%DsJ!!-H5pgq5vO)1M?Rdp?knTIg>td=T+tmHoc)y^|_H39koDvX+7{-ntnMm zB!d9$xC`Ic#{3?kN0^T-%d{TAv4!=*%*XqOLfYiH$<1EgTD!pTy&-7a9yo5O*wuk^ zNM-ZwonbMFsLko*zAM>u{qIe63cdB<7qP1W`KQ3P4n$yB(cjBHs5`izkxJ!}pM@zs z@f@&CI|T#*I^Ignk8jluLPw!f5!&Nys9YkNfxJzI>zMOF9Q-X;iNETb^p_^!MG-ER zv3tPc&R2QE*m1*&)Hs>2y4`VsMucnHR7Rc->U`H@fUcGQJy}SSLK19vPZu_>Qc#;2{@w`>Rq6hQhGy9%jLrSMTR==R(+wIR3geXt zHTnv_Ae$&;vVQQq!pSTV9B|A1PEs7aHz&{2@Y4^$kxOHha{QcJG1DfjrZvv-WmyD5 zr8MYQ(P;$wJCTe(F2+oz@SfwOpEP0b}4$GIPgl;X> zXL(avVu3~E{k1S=QK4oO`AQ}7xEX1WD{E3lL5@ygq@|f^XMx|iSn@JplO_NyIWM8n zog1~0pxdm~pwadf$)bQIr-KB~EK6qqp>KKxGvww+js){Q}FNka@Z-B?+voR*0^GxTn z3_J?@Ho)%j@1y!lGk`)VcfR zqyH*%e4?DsljbP}P9L6D-qbRwAC`|lUQQHGsj^=G*>Lu@Z(QuJrky-(Jo5RjzTdj~ z1en47FK(3W+%d{S%fx>c?=IEy8tv$lVZ49|>6mmK@Q|~rN4IKw(3LA#c4i7keCLb& z=Job+u_KWn;CkSneUL9WaVVEw(X!=*H(|FS`PP-^9)H)m_*8-oLw;BP`sGs@HZ=@? zMnxBDU-)JZdh4YQ9gMrrn~LGYd!J^Wih&j*s>{))>`XRelWT02{Qd*TO4vp1qJ;mJ zHuufuyUe_{VcV|G0(N{t8#Pr`giDI&^;q^P_ciz{jrV*y)(aDN2BK_<#EF+6Eytfi zk#8kPjKhT#dt1()b^E}&y&>M8wwS#k{n$HrAO6ehY-ygH*9+f+JR9!?8D<~&UQC`P zYkM$mLf)FMctWx7iRVlkeg2FH?%KZ)pEtl_DVA*dh%e8KaNhi&Qi_--_0EL{EQ=>= z@9AL(f7KZ81<$6gN$0ao%FX%vfq^z}ONofD)2(-NxWvvbLNrJ^?p+2;oa9n8Wc_^% zylcO`7#bio4+HD86$+eGbO24dlT|tItXfW}m6sE>^NI3rfACTYo(4qNd<%+Dy(IgF!AMi_|##_ zbfV>1mhh+D4Z>4zP7Y*Li77256+5MVYn8qsCbVabEN$PtFf|cpIH5b@KJS$l+%eKC zs1>NoCrA8~?-o-@{7;M5U1;`DYT+=08t^I7aIC$)Hs3opOysgP^K2q|G+en1v|0^) zGgw=F-Hf;$ju|vgIzdi)We~jmk2rA(c`r$+aP;k<=1gYhLCHPF^SJKq%Xuch8Qnd5 z%K<8Hrh@DK`)z-cezXvUu*(0ex5{`pTV$az*$kR++&usR^7A{Xku-{T>L!WRT}k@)vVjo7b$w%l7-J{cY6FsZfJIxSBq7@Gjh zljK>HLwpDXYV~zi3~Y1E>Kj)sMql+9WN$eclRT}p(ljcBpIOMi!enG*gM)95j2Q<%znAYC)pLS4* z`-eklcmJ55th$1+xstISk*lH`HaHlQ%6< zw|?U$)99zODGLylV;@($S+L%@tEu;oMYD-2pce++N0|_#F&glu!_yb25r$MUFCd1Ia0g|s`&+>2X zmbqL1XO;!Z#~C)oh_IAB>o!Q-4qZH#pG%s;9}$3Dj2=W3w*OlGJeiq$o?~Vsv5c$6 zlaHav{au=3k-cytE>g9K730(WQW#o#S zbgkKg4aZ~W9A@42IBNAGif$%KwpH;c2U_OQ;8C6k_#esUVd(SA*E*ofkt_6P&`-V^ z-Jb}ny!J8WcE(uw%AD7T5w1BN^`2%*58Bs0Jj9TNApQ^igviuS0VwGo^Bkw|F9!$- zjbezZUxVc2s0oIc08DcqbuNv?*FNZxuHR;B!pRkeF65I_!IQeHhEM zSW_W#9O9Jf=K;T#2u>i?BLTb#v?3<)>GXHBJdECdu3jYRery}RlJqw#vj|q5Axa+~ zDU2$p5dQVpni&Pzh#zW5+j!fk0#g_nrZEt7uTie)Ogm9V2Yw+-0)o`~$gn9Zx~u#^ z`XC!E^Q|!woeWclZP?IT1>O}jiNo~oL4>|S)f`i_}Tupo0tG(G#{1S9!42i)yu0>+ERJIBIDF$)Ie| z_&RA4@s9~&z#SlVStw>6B?$i}7^oAg)GUah3_ zx`2S3?Imm70ymTB3gtm(aD06>>J9JVL-YZW2BQi=>Ca6Dnsa3p$9cu?_ziXzrK&Jo zH|im4x9xXx2c`FVtLtpE!8EBeP|KHhh73af4kt8%{6Zv3Hz?ie9SP;rtgP3<*Z@Xzs2)3d6C|H?yr;M9G; z7GxALskgcfGMR5YU@crc@4l41t_`X3wyuD~UAdgIB+U3?24}-rbpEG80F$??-t$Q> zB=*&HYI%3(QhfOU^C;uxJXI5Qnl}j4MYW;y*2#|-r&CUT##?}O>E28thUC%anm9tU z*qhmcl=1q$@>8lRJn)lR`B=+jB3wU%5E78hwqL>RwVY;CXeR9!HE*B0eg3r!j{g7} zrN`6wB|Gv#06kh~S%Xh9^7+Z{R;YeF5A9XytQS=@^mqP|{kOhlwj&TUBt|!FR~$<+ z%<#E5pmKDip&v0}Z`i+cTFA^oQs1Q<<&3nz58daW`CLFKb2atwm%c`}n#K6(LeAe{ z2Luf}nXg~mH38@1IMKtI3d7^dxu3aM4}LaHj7zex+|`(%I|7v{vZOp7=j_5kNMVKj zYT(tXtm@l$S$GLRLp}b(Y&_`7M0(FIwq{4R8kmDIQP1*_BbJfYHb1^a#PV zlJDd`>vSTbqh4bl5)NDOReZnCQxsMIU&PX1*EyD+_Gs0oj1b=4~lSYgZHSuv}|RtlXgA@ ztpAyA7&DckQ6*_)pVI0V#Olru+~d`Og_I#ID z%*iih-+iAf%>-YfI-Y_f_xPG7JMfZh*SI}Tu06BjEoNQzF*wNEX0hR(L1mNW0LMr;xrlZlT*Y;{=pl_RC~yoLn4f;j|%)_O{51^ zxKOBdW?C#lUgBDZNXJqo{I@iI#*Oocv418&nTCEI=TAfq&sUWN`D)G$jalz0bgc+n zFI5r!e^BRtg@KuM;!yybbgVM%jP#Dh;}?5peC+^b4|2GMSVw3Nghs3{Tu9lVReR~! zxq&tN`}1Zx*O!!U1t$Z6-?{RVUmGKfCc<u4D(QR|?5jEv8D7eowud5MTfPr!tUT zvZfGpDZKYzJI}poIH`8~ zjN72Vu;N&zHdIhTD_|SaBo8LNuu{d1xJBcPMEHQjL zGX^@_RW5kXr$&QW>-dLNzOMlv#~aT_mztV^!3(dsvUA#m9&>_Utlb*xqwLx+i2G%6Mqr>{BNB*q^>~^tK!s(}58X^@ zL=3yoyAxGekf-3h@QZ7|GVNC~sLmWjiGHx9Fl~?PokxND_O5HxlNnY89FLJowREQ` zAX`nu_85P_(&^J#Dgr_A0!01;7t;dVlBhm{Bq*2?bmHVYCQK>=iYlzFOTXCfxwNo) zd}mJPTEx5QKgoFN-)IMW)e=39yKJ_dgnp$JW4!a?r2Ku5s^A@HA;cv`IYXF_}90U+|k{ zndmTd{E!FL!jgbnW^xM{&LqL-xwGwzLL|nsay%u~-{W$}IbQ%B0`f6kA5y;VRe@C8 zWUV1DwjYO+n$}t$@m(*fUgZ9t@#&-06`puE+2i|MBt&LOQgSJ~;hyOl?;DLbNS9=z z>#?=ocE@G6lQ|3)_}pH{{_$rx#M);-ak7(raX#f@TrBG;@$v($eM`ie^?IacfFaG_$UH&B;;QlUq^N%B#!gH1% zd6wO}$O7DYcE+!${jqk*KEBA+gPvi&rtPCI@M zd!o_!{#L^Vjl}tXGj5_ehy=S7L-ufX84wL8aCc}1`W^rtJ2~@ zlqWL0^7Y;~!fBHi7BBbYpGG!~=N;pduD2dsQ-gsyG2dqg(`OE1Zm-9sn_}v8q8HB1 zfIZI=2h08c4jNmB(6@7x$Su+fpr$x0dlgK%+t0URiT|=n&*h%7jx?8-*bFKUH#7Q~ z36S3U^(=>(N%lwoge{mOXa=yGvUbE2M9`4|{>aN36qvFz2 zurcLrO*g$K8k9mk*l}9me<& zWYTGZ-7>f!G8!QS#uC@ZwATcM0YT8hg{%sCyktX&EJNs;E)SqX60027_@Lr?jw1b zA3uV5T7seetlB%V=*OUxucO##u%X!*N#pIyVjCI3hJRgD9W!^+ zwv+R&$gw})SWMAunyqVFsCwj^x>d-D#BA5EhBrmdv&$E*Q6F6XXlM0(jZg~sV9}gM z7K0|wYu&y9*TMDxb~(0NdY`9JeVw=0NgtoL$Hzy@u8l6i%xK%^PG13M*jEE$`FPZW zv}8iLP`Iz#eP7~`g0GJr$=^Kz&=}XV!57`}|1QRKMN8}ix;O-%ZN4K*G8FCPo@Bnf z|1Pbe^P^R#MgPyKO4R@7Pd~m_&F7J;dvD|lhuI+H~XQDlqPJoVz}4vY9mgJr#jT|9&_ z=?a=};i?-|;O0)`ekVcn*+Ir=aeCSM+SeTe814*@Iw8WME>%HP z&VlewsuW;ceVm};Rnpdq4zU(P%ca|F2&OJqyT{weps}QpL`8JoMHPlVex~ar>laR1 zX@*Yk51%*1k6lI1hlhuz$jE%7rpa)4gyX zFY*#CL(AnZ40($)*aa`F0Wa48O-~5hW_EqYr;aUBuN{nn%ow{mg2R1-tx~FAzg%t^ zX&n1jjl@A*6dpRXFWfI|`?R2je!GFY#A78E6nHBUarB;Z2{G!xWx97l6sA0`=AKM5p|PW8wO@!Q?O?@U$1PtWx%YTLGgrQ z8&*R{O2g}%?!Ec?ZdwIJ6fcxR6@s;-OF|Ocl%cm4?JKAOf_SAfpKWVwt(aS}KpI4@ zx0RwvE$(4Odg&G^$xf0zv-?Y9Dm&45Y*K zn{o5eC|j)OKcj<#p=GWY>ucbRjST_YCR^mtUd^J}qq0j_zkfG{a}{Cv#g8V>Ed2d& z-YG{AKBatbEbnu#J3$xB-I;jbGzS$YM>TwAP|m4{K3ARTF(yT}wc)8Ewcc&j{x1yq z53r0814&*z53y&@snql@8r#}Fqp|9t?QM1T=r-O5+h;qyEh-m}nCymH_DA0)`-sS& z6!HW(rJZ}4W86sMwp&X+Ne^g=V&yRZfnkY3faGc?QKHyJ)k}`PdjcNc(!4<^MoYN@ zMM7zLgI)r>!kYn7k!qU|KIY`Ajn_ds>LWO4kD16lmN;(FpQfD=r3)o`Ai2;Q_qesP ztF@KaDY9RcS{n;B+$4BAvUh8*rsA#_J;exB{dA`XtMlq)9;pI(paOdbG zoE(EpA<+A;sDu5C4?JWEemvv0YIc}n_}6;z@ZS_VqB>OWj5&9pKz|nT*+J%HY(Jqu z#~+DfUVyl+_@q3$k(cRpsfm!~2J0I)SJzeaPP!NMXBJujN6o)5uik!#-lK(sOpDQt zHAEzvWvmSzVFhN&_p|ehJ|tb<5@4x{Uc|1-&DnrJW?jx$3d z1x+3-8>)CoKu~~V0<0LmbAd{tnp-wVEDy`gDw3IT`Ersbu+OD~zP`LTK-efsTpI10 zlU{9&NNq(=Cx_D?JjX*sd^|Djz{^v#kM}vdb=*sbSWigDU2j5neZ8mfq(NfGD%Ksm zZL>HcK?kh->oJlAW5M+)4m+L3YS+lC$CyaWi{WIBoR$C@q9izbf!jU;v_S$?Eo5OZ z4@or{*Y9!^N|j7XNp#9LYWScl^uI(6=A=LFM4?huLAgJRlENi82tu+oMB~K^kMODY zMEWW901_a0^y>EYX2yJLnq8%M638cG70CQ>VhjvpaqH5jn&msnRcfb6=bWPt~Yt!N&kK(gZ{J4vHup3Q+2ymA|yv%UHS3|po%iX;z-Juc?L~d;` z>jyAH`9Q&47623t&rsZMaUbkKZStpUnz7J7Eo_L6tjwN&h4$4VQk=1;sXb-AH8nd! z0jbGK;`7o}DB^(TsnQX}dtuR56_oL65LfDS8-2^R8oQcuWJz^*Mv869}(YnkP18Re@;(Jo*l{+nmL z1-FyEsHde#rP3UnAz(oVlI(v7!IiOFG&5pH%>w6+*v1jhh&1q1m7u8@6T?L$=;d~I zdwXlzwBhT`pVWvt=>M&Lt`Wg3ltOvpk(NaMKDRI+OH$mQBv`*;_mv(?sTE z9VL8&@VJflOSYb6Ct4_*4;cMcW(}e)_`fO0|IbTq!hVNc3EU==>0~JswVmDE-|fuK zYKFdTo$MUlMiHEr(Xy%!#O}dfIgTevQ>P}*fR&rInM4$uR&b?bQTuDq(!H>@ks*+NlG@z)y!oAbA7) z_YsC7E*;V;67tTfs*1q0Om3cLLfA?MCgh;Sj$jj^ZX>(;!*EQ@b|5GtpiZoh4XDJ2 z79o#rgI^$gWA{L<9xZAu%18x73Xygdu>sD*P+ZA?&-%t$V~3SF0G0}B#}sR8z;K|N zWiBgB!KM$oj4Jgw+CF*#DixKj!1@{VbR+DW5h)b`_t^?`CgO~k)ux#7W{%aN;q(lI zZ37uA%t1UG%*kcI;(@m ziP!nGn9dn}a`S#ziA=SEuej^A=|!}(mbrIUNIT~@C6ZRTYEUvk>O4w5jm!8{S^8Uq zTPgiXjzSrp&7Z_x|F{)@E=NY4h8m+F;*2=1oN@*f_EM%r=ExC@QedFQs}0gs@DnCQ z3z1Y~5W{dtJ}Jd15mP`Z5G4z2+3n(w?^mqcf<-gz>+f_|qO*EUNE?p9K@`Vu&}-sY zgh9RKp;dS7#0R}VX=V?W|QCvFZkdL1JYE+;DfIiYH_ zQWW*t7USpqb0HrXV;!j2zOEYi*v$M`zrC?UDR4AR*Z*BT0$xSncR#hoBzOETn`ZNV z7@J>Wdb}go&USpe!>Lqka*#AZ-C3lEg@Q6l|4a_p61fajta7IUiRmo_pilUI($eG* zGYr^ZkR+UBv?%jZT_&~{%aNk)ZUposgo%>a8}G-8dYT1rO^(4m2T(YL zFf$DxN)m=a6Lbf+IRNC#-vt{IZ{LUC>r@av9wN67vWO@UfFMiqkO2P$4>I?VFAGRM zl4bei{ZEhlTW8L1N2fQjno+Q?EBkzE=}_#jH@f6C%e%B%=7a5<6q0({F{@FM=r z7um!K4?@Vi+A_~-$^w?Ze{ZAfokLlW;dW<90&(-L$;CB-k6wuN$UVb37(G{w&2mG2GEv`* zCHk%#Gg~gD+u_9{QNU3^4F^CWOwBX(cKo(`iu9atk9|*)vzc+RnDF>3`M_0Jhj22A)({Vo(CbXRZoUnE4_tH} zlTlA>iy7XwS9YiNU3{r;p$w<~lDXpK@Gwk@UWyv$sX`!05$NPDgrvs;c6a~Q5{)8p*EMcfZQIObnSkGsTjl95ySLn1i8 z#6%*RJxW;qYUv_Dk56clrRzn8kv|cpA~|;=9+9hK1f>iggqk3G0>~saZ~@PZ$q=xT zkYOj}=C7!W{0dc(kclnGwM<9f4xb4j``pY9bxocqu81S62S4VGGaZ_BPgGOswjKSD z$@pA0MXX|u-@=ksnWRHqi*A^Tx2EAJtm@UraPgiJ(`fQh zy=U1R>;V03n6hsq(~5oE-|E692ri#`LkJicP!c(n; zZObcK^glCnH~{*5ejyn)%Qt3>8_e(tUQ+B`?jIJWVBsV<6)ldR!F?hX5?E10!QW=Z zV`cJlgkxFb(*I5~B#JM|aSn{(CcaAo&r2uktm)@I9fz)&;kW0=U4rwPK)aGu>LfHE zHu;?nw+K+B7_g6(j~7^gVl33tCd9Tb#>R8CaFI%9I5=;DF*mcchURKLr0zqT@nNCQ>?068_&z1`;{1=>3OlDJ2lSA4bV%zijWwsg)NW zX|e6?ecqw=Zr|M?Mj3h6rpUanl+@{Z);FZEJLzjS-gRBYxT~#m{Vo)^9IDY zg++Mcv+48FWWB2QwA_O63CahiTCxXL;e04*Fu#`CPDFZZ7%EU-<}>f+?;LDl_BucG^# z{0|3w^#b#VHkZmVF@An^7FrT?!9Ft>7%@hS@cbAw*6Tg=wFJ3`(LEasX6j?3->%;A_vdUkCp_96vpp=kK2%1^pw<0M6NfKyK?%a**8EBuL&~!)vB+@F1YEgh`U_YU;D`OH@ zlz7f`90@!{R(roJsq~)8zugBN^MO}qPa`!_xqiRtxPHXX-q8r_d@r{7G2Vh59Zhv6 znLRY+h?0%?fX6wd5&Guvm%DU@?*|N1IbvXj=9_lbdVQ|?)%t4v#!P&ui%RY_`mHxj zJg*0ssx;R!OxFWWn}?6%i>|jQ+wqNOKHrscJkC~VvY#gHXH*9m1?hO@7x-~F-3^I4 zTySzJ1OKQgmal`a%`98e@!&~Q;uFLTWo)6a)$*%1M@UUxMrlLr5m9Yq{@e_(M7GPT zMOQ(JMC3wh&o9KriqKog#StgW=SxCu9Et^v=#olPh@w-v)Aku{YA{kcpaW1d3}zxI zIero5R{yi}LPakb;-sKn(JCRQfWnEby(W>F6tTdja}Gez&=o8zQDiQu_D7zv(P>|p z)1;{6`0aR3!BTU;d6U|tcax#xKva4{z#d9MFmO?La2NBM^HTM`=nHnk<2cP;DfI8p z%~?(3cn#k0Wz#ZFeOO*68BbIeuR7DT&+-Ry-s`s&452ReaJ_29`p&c@=1dK$8`8 zecbG{TCd0ljt^jiFXfxJb2m;EZcTjys-hvhoNI3_tL@mhe`V3rKRI%7n^!>WlqYRp z?%4*;h%LA>bJLuNJLk%XHZ{aKIIOh)@;$qC;=ri^xKUt9Ybt;z#AR^!& zQh|8xv%&IF^xD>te_Av~zvrtQWa0@NQy#}}1s!>$O!WE3^!Q1>Ue5K+O15TO#M32= zi$DPZXg*(9mv_h%{5$O$`yqJ2&`m|mSTlC+U}K?RM~}gNyWWsdf^S;vK?qXKyMzIn z5hwU6gH($S>kd~gjKJc;=NKbwR!$xTa8+L%Ew8|QMnYe1wGXKr;7$+dN*kZa9IHL6 zD}nBUZf%iYs?cT^JNK1knp@%o4=(hxGZNse2Jfe`m0N6MRa2{6dg4@56b{y8?iKG~ zdWG8Kjg+@c>kGm)?kBKl7nM@G?iyL|Q)B*F0))?!>|y>EJZCkBGntiFqnB)=IBsg2 z_F3TgrGFFSlHL<2{}s2AdORS~t+!#{lBW?_Bmb++9((dI#(z2`ZX#rISJ~)!F6@B! zcswG?mP($sA(dgHFg1vnu<|kYuxCUeCTOE7ztYvPp$QwyY|z0eh4^V`m{f zi!@AA=z#y-bNT#xl*jwk2f;{77QMT}h}pZ|otyCzBF>3vkH2p>EIxt`4_?>y^gFsI zZqD4Q_C3dK#mK_jjymzwU7S)saMFxdnJ*wn6Q zX1Cr>EN5|zF|AiCTPs&DHJf~22S2i6P6Wfyjae!dg!_|1a0HcAI4iY4XR#q=o91?ebEG2FrO z+E#|x(A@Y)tU;a9T?phf>BHW}KPX^~TLls8Yp9xPkhl`ZLdr%4{Ff0EZD6FT%JcXt znX2fgIozRWq!1JcJ{GQRziU#80c`%ruykjsNH-#mN7JI!7#U1cVptLc5${yw%}5{? zN5>t(0t_ThLuD+)e-;$eQx2bWOUFj=QM4>0L>i9FkSkTA2DW0s_g9E(@}%LV4PjS9 z=h()E3=ki1Uz$?AKXvap3o9~SkEY`W=`LZ=~4~ zXsPt#V2r|qwsgGP@bxw6M!h1aF)(tdvjUZ3zmX+c8G*thR8e+t>xvOI_?=T>c^ae` z;qJ)M--?+_=}EAoMNAI9=dDDcyTr@byXm>0!;y<o@5(deXOayx_Bu>+``r#f-Lc=KNz0$+60e|ths$- zZjvvJvP6_j1n#Y4; zNeL1*EKh=-NWKWBRWC5W*Hs&V+apN34<&ESfDF%8S1FF~f{$Cz+_TQzucrFV=*g-H zv#MoF1J*xAf?(8!kdK_B$79y4Fx!-P8mpODwD5B*2xpSIRQS)^8gaot^H-%YCz6+= zIFA~1gPa3SmJFC``9J%l|9zl8KW-03g@zVUBW~}kcW{Trcz3PaxN`Lx2mmZ4AV2Rkm6*}HkLV9M z7ib2Y}r{iAb-Ar)42TA&mZ*FGe|kdE36fk@X{vi zg%`x1j7j%7cnKqfAEEW$BM1>se@@-?a*Q75b3$4+x zG(n2nTr5Zgs(%NmG1TDhf`%R~MGeTQ`@OSwd&Qajc^H#RhXX7oM8=>$f!C%QRtr zcCv|=P`h3K)iYvMoQ(39k-D4AkYu4ODabK}NclX#9Bl+6(vutbryV`b`ukl)kMZi% z@IjP+$5-(6|Hs!`2F1BH-NH$b;BLVKgS!O{1ZQvt5AN>n4#8p2;4-+oLvV-S?(XjP zv7h&x_t}5WS2b1lpSz}Js;+f)_v*E}n>CzaHQJ}fUmXlQ{aM^{l%^siLG3J&VXRJ! zgP3b2L(+|13SUDgDT=7!zeIn92T85!S|BYZGC3c3$iP-P_0hu$O~u_<*jsTL`=We5 zaWs@BRiKrmO2daK{-}(h;BdF~9GuWv9G3L~1S44}i49FdJp-WyW92zfX^U!x0Jx=S zO9xQ4==EsRI6+QCN>ym##kd1R8?|ICtbH0oVgZu3#DcR(5hAfEy>H!DsOP*(v|+4b z6N2BW)YH0TyKfZLzRp)DF~-d?{Q^s<7ub+V#NzJc1~2u_{#YYh;_8aEdY&e=(#R@+ zJuiZ}>PdJl+G@RV_HVo@f)}A#)vMiW!aDJgmDzhbw^aZR1&(*-PsG)#Y<+;&8=Wy2 zopYc0vdnJ!t&C+3*?ls8flCf=*ZFLHnM33H25VLdSq7?1A;c=>`G#7czcY&a(VMN<}lTun=O+#r7ejVB*kzp;zc+R_!!PG=nq@Y?1*W37LxKS@Q`df zDuX&e!oBpE8PQl_fe#8>>pPTE{PFf*$Tl;DJvLB3e_Jmk`J`&hfPovygkRQCkBaSg zt&CoL**261MUh_$GFO1L>0;;a89m<;QK->L->Q>oH%G0-9scRzw!%-cj(jVcbY=f^ z1bR`QpVwPzrLdmXlXtABhF;laP>nc>La8~Bi&z9Yg|J@WFYX&Nu%yy(ynN=|FRJeQ zc{BIq`l?o4zGf4MOo~$!8JDLe{J+}^pO(r{#R<1Qk^{|^z>8&BPH+e-yP=xl^+ph-4x?z3{57_ad$U|mBba^GL ztIQ*+KifRyQX?JQ-6@Ll&1TcYD2iUJJAzu9qmTH6a5Cko+v$^)wh;9t;6Q$%)H~P^ zLV0;GPq;&#ROc7a^oIcD6v!%$ux9uGHa)%NPb9lG>Jd!D1X6laLM2R&@i8+MAOa00 zl74dP}lkDjw!+6M|M~VvgHSOr5USqig_%Up1j^D1X@v5gVH*eG~>uAe2 zHSWEWK*tz2GyUf;>o#3ESKlOr)QFqA+SqVe`I$hGd38uKo)$Av@?EQ>^s5Fza5Q;< zR0%pe7w%as-tP#D13{S7Nt-bT5<1B(@`B@N92PxhLMGIz!>nok<^YALu7R%jTrY)H zrC`(-V}-xJ6dW}SdmD~(oAIId?mqisaNQt$H{0Ueufs!3E<0+P{CjBVM|wk z|Iq~{bP}N8Y<12yhC@0<`2#HnVh|(}90vF(O)JkKhyZPe0B%+S#`|9rz>f=d^2V`a z`Z}!q5sArxe8V2(b@Nf;zO8-=4iw^MoVV4al#Z)RidT^?W>Me{^FPEuSou2`@+@zC z&pWLQ_SBoWrjV4S6nR*NVclw)9jDjAo~k>-~SU}k?n5zisjYb;2f#t|I45^D=i;p38u0>6gumN@cJ z3A*-~e@HrMAp}g)3x|rx2@)%!l}&8x#Mx&842*q(L-FvjrV6j89F2YSx%VYxtOt?@ z%^= z`Q*8G3-6U{20(T@PKa18EG0`YK0^0$qklw4E2tQ1Q^g4DcT)*FYg>pHhIvHUVdn+Y z&ApRUBHe94lypEE7RmotQOu~@akt?M1P=8dy&s{i3s-}(rI=i7S1{K5=ZkoPv2>bR zdtwX?zp^>=2l-&AaQId|Z-FEf1p_rrH!v3g1@U`G4hrq-|65x@t0VTrE;L*BzKnu+mKv?K^?60cwrlqQLB z_5=Ncj(iAY_)qCAJMGB;vIAaa1cxX~wvS!Z0;G3P@~i)s-e$J(_#muZm5fG?K(s61 zrBU^hp05=pa7OmnMHWaKh!Bl3B84Ibo)+jnxNDTwR`*vBJY<(H2K9ez^N){SH*-6J zE$A~IS;131`%5RmU7e=!$_M9Gg%@FiQ6(7sqDcBf`s29KkfaP_pb+nbY%JVu)KQP5 z(b#-jsAG%-q2LKEjLNV_bWs=8C&73b#Ea+O*H0J;5auM0>isk+4#tRQ82c$!@RcMe zGTVEi%Wk2|yDf`h54EiGPRn4GiTBETW#TCnyH$hzejCo^*u)u%s$8*U<)gD^zo+dz zp}G*Qg+yNDSq-%2xs0r%pD`_|vB;juq76eGp$i0v*>e-ICK!!)frpnPL4zvETNex z8Ynb9r_7^HdiiZCx&wg@+*!>6n+5j7=1%lQ3eJ6S2P<`m@RcH;&Xz4O5_ldXEi>V} zc37Xk4+V z_y=}R-o6Ey0;nDpj_o8wDXz<&^HN3XQbllksQ;T&OPWY)D=$a(+J}WX`RnhDwF?>06!HQIrrL;V7}8Yw++1O{pYL_m+S3121;R1U(fv*GG)%hZV9_4W44^f zK$Ner*wuDTs8EBjk}x#LQ7Zq$o-ZlUW92_|9)h`ZrO-6)fFNgAB9zyk9 zI=%K_zh`FiSfCCCIRyQ)9YvBB94CbG6w+NVo^z+yuEmC zFo8pKz=t#V0Pr*<2P9jG%4(`?j3zK6$;NUT*<+ZY1l=V>@!R*qo`8vc0vtR9qymeO zT|#Z~)%)&j7@a~;QkG9rpZJClF*zzS&lnkh3Yww_o5||FWJzz|t28g#=?Zh>DfB?& z-g`Bb4r@)IGO9h2lDR5ZQ$99j02G&^l#!f?pchR)Mb0r-%ymS%A&l!Psn!fOEJ@~y zQYxcMW;U@lh+s1?FZABmpIsDtOLq0!NP^j-H$Xsd(k9HDX5(~6 zGX_{WO+bG}n*hKBUp!)!G{c({S1;-%s(p&#xZCj+*;3~?jS)Ij#Paz|HkiX_zy!51 zjknlfwN1sv5UQo$7O{1oZSU|5fJlnn`eYyV!mE-r9`^14Tu{>k0T()iy2_sH>d!Cozv5%>i@eW5K`HT>{%b*5 zrMR9p+y;LZkl=tCaGN5}?MgHVwvn4$y!PZ(JHcmWdh2dx=MTj!<0Uv}Net~R z=A$h~d2V;cR;w?^^;g4kaay=$PZe*LC$Fu;XKPP%9eXtr?X61X2e)r7KG*F_#49ya zZZhxtz={s<|DwKxBHj#GElvD%5Lx=Bi`%V~q#-zCT^6D>h!#9Dhj0Z*&=N?jO(ccrppX@sl?ENr&x$4Mm}&zVjCnD zMkp+kt0NOQKWNov{HU+)>ndWK&Lq5NC!|M_gL0o$8Qz;bw?j^&e_q&Ff1L=?k-3?P zI}@`L8fT|^YItoBu&Y|RA*x6$nJ!UA^tsV_4BLHr;=xh==NhniyO`XI$lh=LE>j~G zVN04p67MlK)Msx_ME{601!bZt0Q3$;JrZLVqgny|ufRBL{G*rLR>#_Hjnd=CzM@A@ zwSPr;fnbvlZ?8V*BzkLi8GxOcoL2BOZ0qC1h3oK|O=+FQo@wV&!`Afzmi~+zweTvbcExfMz%XW>Q^n2;$Yb)D+vcCg>ggDhoFc3;Y>H{6ye=TI1cB;!YmmGpHgn;9_? z%Z@QwJxCNoh_^0WsFOnwC<^_R9)NWawx8%T zX@RKi{kFk#IT^2udpLz#X)$SVoo}&raa?&73E%obgzrAZejdqkt=NvbX6xR45bV>! zgH+4&uKd&3CzQG3*-Lq^_-Kk2d<|<-iZCW1gh77G%ourTAA!~hmwL&!1_8}+m~~%E zfC@@cVFcppf6&5ZbA;qg^9>EaGyKIr9Ox?ykZhyZq%jC{E=XNNb5H@l0`m^Zn@Xz^ zw`0--Iq@Lb8|)0-fAP2+k@*<$JuQ*HU%EtVUT47F{5i;&3W<5%V}PFgD1E$HSa<;@ zG$Fx%Fr4JSGUX8he<#!BnVpr^Dep$mQtr>TCE1jW;$PmwgaTu*Wh&>7;)EZhsIGHBY2Yu|!8#Bp2LF(@zQmPhx*cQ-{ZK zY@K!$aVOUM85QAWHjYw5rYtEE3r2)h2Bz8$myZP09CyWwgLP^;)` zaEGhZvQoYG2x{>v(88{pdiNcj*0I>1D$UHIBT=R?&cb^QJIgUwh$Yv~^2? zRiH-f|NV?~+#3x)_aeo4WAUuXT8lv7!}`Zh4HjuJNyw<>^X*i;Hf@W9OYTR$t~%3L zj+cKvN!LgQ%8t#iLQ_DBpms%aDa3_fAy@>!GOdlRD!g5|C0t1Hm2(7wy+Qq6wd{s8 zkyL;fEw8uwsVc4Lpaq#*S59ssPz``pGxS=D*#ZWbBt6o^da8^}E5JGF*|qKLV}r?z z%UX>KiKTac!!!~*e(!WYTi?y@$0YuHI_NqxYH_sR7u!Nm>fux!LN%7j8}L%yJJrll zFq*^N_HHX!sSd4IqH^gJr2CHwXX%j6tH7Cxn#@$!4H%8&!`pAw5@kK|{ zVMAzR1I}TF?)F(T7br8O>`o$`oI7_KR!MH>P@|*!^R05 znzo&p_4Zy2v7gW4xeo7~x5M99U#%i zXYXCJX8thA22a1r;Ny*9=H-iwO)UiDY|Pu5BNHvb%9d@6!`52`phjkgjQxSCxcW#{ z9Y}@F7kZ8K`^N@>oZPoC!BwWbeSqI4JZaUDtmaOA8ZlSfopblCL${GbuTkyxrTTp< zOXK>Pr-|N-h9+uMTyIHdS0~U`nN9znOr7G9&X1rkUh>INf73=}5iZ^G`sa{hXmd2nMFQ zI$~&VAeOr&84FSmc0cg23ev{)f9S32yY&R+e!=g_xQZKZvc5e|h~}c?vh_|*y!g{` zrV+3t9lzPkzi?S2A*av1crQgJG^BsCcMQ~SIZsD?rNw|e*SUY%Z_qR*V9ewwpv!dl z?Ej9AEmn;^*6ihy3ibwQ>9f294}1Cc zoR7QN4FQaI(Yj(|8$@T|z7c&M{=x~i@xO}Q*?+qic|dXgzVWzz4$HvfAyh-IXmv;fBw9F zsobqz;O1R9QmC)_I?%G*nlGc)~h@) z#2l9x2vTJR!fQXJRs7+Eotu3SN$u>q(LGqiSoRIlSUrqtXulbZ`%WK=`aZy7FR;wv z?_alH-k`Xl&cWZL2{%6}XTpIp2;w#CMS!X#(9MfccPG%whhgHy*tuwc4 zvpgua&|Tr!EoQ6TWnDY=X#I|4%cek9Y3B@J$#4nFDnC;y{xgD}2jCu3>Y&>AEh&Rw zFYB;PUCQG!u_wpMrT&f59;Gtt5UyH@Tk)~>MI=4#S*Ul0v{OUcpSx=6vmg5)r$5|w z2lt-Zul2vZ^*d7Lr2>mqj7@-q93{(41vE97)PSQRD>S+EPT}Ffy$qxs<(zxj<9k`l zm+E(;6~VHbJyu1F#i#__va&Nn96+!gKxRnaCGoB%FDj(xpp^)6o>_V_UPwL)GArjb zDCcFLW7vz*b*N4w)Ve|Ow1;X@<{cd4c;;>L98WPND58h}H<0`N4OR5Z@fwBIz=ZpX z3DHj{Q~zA;H}Sh_MnwZZf{t`)J}`TQ+jf`wt^+;?HP(qn&?!3`Z1CAMHnytJWA;} zw|WCx54;%fl6{Be@Qon>YzoVXNI_A^m9CQ)dig(npfzWmgZCOfnfMKwgN6o~;1>v# zcv+o;rDS!t;QB0@^!2rQr{rJYGS|HcUE7CAJFBQE<0tv22L3;Y?2R@%CJ~w$v}{11O13U)>)&Z*5LhUi%*pMXJ|U;QdvN2yGZ%E0>&^G|j>)J0 zDyym0n((H85QYk&CS^JzLMg-Zd*7Wtwqr0C4X~+K_atSweC@7q!$m-2v)-RS* zT%^~&6+K`tvPFEjpU8w{t&^7O{b7e)$`9(*_v38fIh~auzxHm2U2hx70^l7St>xTq>oR_Y1ggBG9xivHL~lj_2hffh{x*l_c`*+ zC*M=WgvS`qkBc$dIpAqxRzdUe2G7nLlLb6A$4I$G&HCE)y16;2#@*^>btlI*)BX4C z`hBK`2cND0jV+lAqwa!SwS+A{kSAT^@rz*Cm~t*OHv(b~Rz!z_Xp9inJFS2$IqS7i z8FUkP-gh9D15lJFJ2C`8!KcD|lTwe!yj8|K+9_rnul8sq?1@lQgZ!8iOx2X!bBD{X z5DRrP_x0i$zgLf^p3DXAEXx}(ybi#wk}l4!haMgIS-CghE9ad57Ke&{ULt?HozHPQ zj=0FobSojE(8qf3NPevD0x=ygs|>a=tKtDcv7~jVRtJg$istAQ-^!YnYDZUw*c-z8 zmYsgNB(6IzO`WVZPj!M-H@B>@>on&g)75wx6!iQy`GY@>NI%8VmyxyYd5KWaq?tt) zRc?b4><+i&J0)A!eGCccJ949E<_FBN`U@s&wpTz4u)-=qzbn2_W7V+7B1H_gN#*AJVo9+FKw`%#2-i)9Hakw-%qt%ncX4KHdw-_AuMh6j*gE$` zK{1wQr!2?fwb+mSycd0pn8Ed+Io6{m(q?HK-0>gycq)ujbxj=rtbo5svGEP*Z-(#s|5YA#4Ei2n9!(4=yvdpumebfE-rcDbe;AB7nH_))>hx<3yyHAYhnfx||0tcvn zj~^)ni}_$@hR6bUo;MG01zmG~E6VvbIC{6oT5=Fo*ku4XkV-IdS6^+<=^p3h#=oBf zXufph%X`|vwMOy48H7Eiae*=zxQl5bLlFYVcyi@utLU&}Ln&bz9lFDzi=^WvC2f@) zEXKYgkIu7o1~Fmo1p$0mEz%=)t?~yn0t_Sk>Md-m_D61R#K4d=E+X@e^(XpadRm|; zc-lhNhP&-+EFn6Ts|CC#n$56w+13sVNvHyO__sKw?;Ve(YK~(zqbQ(sxPpV(e-3|CH6L&KjJo+_S zU3|}A#m}5^R!`EGgeZl%t27Uw+ql3=05><*U#K7x7x>ZYAmv zoEFVnXF3c7%@zc%Gi1vr?p*haXD3$Y&@_m1TnREfNZ6DoW8qebq@s!;#VWC^?*lL0 zHkGtxwfIYk%BJ;cI=+;6fL|eIs?-B26ZGJx(e&!+i=4s!C}k#i|?Zu!6{qD*w# z_eVYy4CfZCz};mYYY zm|@!aRJ$=!{HBDTU6+O9=5i%R>+RV5VYl8wrvm>fc=*&Qym3jx<$XP=51*gKfP0N0 zuB6oU%V*X&5vy8I7rQW2P(A=7@A64Tb$Xe5Ca|#Bc2*t1tE{zI$~-dg%Ibt7j@r2X z2km~RgCR|^yA``eEDSpRx2;YW$kv2N4E}JCJkqKD5`FFTxo54aGYv~q_p(onBq$lL zBpJ?vJb`}zUs0unApGlnhpxfY2}AMHbo4-f=r>w#dnG zGo@;ef8kxrc6@v4Hh1$ukv8?qg4Bdw)IWt``(G0byB`^;!&wu*k_lc5sYQEpv%=V6 z>@VZ4y!-d~d|!p0GL+0#|79uP}+YZ!#{8+#j!^ZsAg1zx%E#&4DC_>C5R+E+L28=IcPs^>G=E;=_*ZA64^b!o1t6|QSG zZY_B`_BDZA_i1-}P`RBUclxBd5wS9A)ds_#5^DtJI%T+fNq1Q0xoO4Wx-81EFuBcs zM!B#CpKfUAC$BUy_ z%&yCYoAd1TvE$+9)nzo~HL`;>(MqYm$g*Y>!z>&8J_&^w=WQFe`|1^%x4y2;$#z{i z`p$!K1oQU^d`f-6EHuSC9d!^(x4>YAQQlaG)t_LW*-*Zs}LV6)_VeyEDG*r{&l4TP; z&kT$4Bst?M9`KJSHs$-9;=q^`t}j&@M|2Y+3>R(ze;s&iBX15B6Lb;b|v8Z2wBMRwHWa90?zmq4;K)!-8{CY;q8B z!yD6(3&g76zbLi-Xw*v`4{ZpH=^C*pu^`u1??($?)g#Cze)7s0j3ZDHk)kD3 z{+=n`+CGT-CrH)|B0{ww@>N+BJFoI|Shuobq0kbHkEcX!&%G9#_GkptQFY_gDJW+! zla--*_Ao}=xK_8xKVM9xmPSZ_3*|VzEE_n8^r>?q?ZZ;{+x6RGY#Nu7VgK5)@@Kx) z%rjZe&6}2TU)BKlQK$@EDWX5ZD5^>|Wzp2jL8*rLgNg${!pD75|!)1qfl6Garnn ze*&WAAAZS&PNE8olhd{tVc}Q2Hlzu#nj4Mv5PSYB2@*9EAkKQ3VJnrxQJL*bsqh)a zZE;6+tr5A)7rkr#c-Qx#ZTHBdMxaSG`tzOnT1#qN-p#tE$sn6?&FqNd8x=~Q1gVhZ z$G*urBMb%SY6<1v8=5yUA`z_29teSbrKdtwRt^W33Ij4(q>{0X9W=5(+1Yo6gyko{ z757CVytkqEBRYJx?)^J$W`3WMVO5aVi$yiL$J|z6 zB9}m)*>@QTggL*A0dhsuvoK*MI56kCvnV*_;$_h-uz3U@7^aIzT@Dcz#{5gM)pymv zk+fafdO69|StexwZNf4I7WuTEipwlym;{ypKgaHKKIREB% ziiV0I{oSu#N5b+$E3OK*ouaagm{r+hE=MY6i>=E3kM!vuK0|INKy&wZE2b&4S?YC> z7tcRkH@|(#-fO#A_P(~9dpWvMBRMf?A^T4t_U2t#ubOZGPBfjC^wom~Bxa%P{jc_* zok%*&y>ujMl0>Z(ZfW+f5{&+vYHx~1xtyK8{4)PIMc+5vXqkQ@TY&jEo$0<6;O<#H zn?q<8#r<;oIWCIu?+@JxA=jIX%+6tW1jPttmQ;qGKge%m>I9bIZ5AV#!#)Kj}q za~m``PYfB3%(WwPhET^p4wcQU-!%U2-CWUdFL?Pj~Mvx|r+I<#VA*4~?}nJwycUTdYgp+nf- zrNGbHnaags|KQQ^z+6{+mxz!elu{D92&9D?xrVDt z>b%VnE-e!}9h@R!xjq&oDjMuhXQS5Ue?L2m%G0u{y&fb@=$?_La_ zLRJU~XXqvdu-LB@G!;_2_td*1d8e7t!3t}=7+jv3_$|`!DF1?6t0r0-Sz_hGU-itt zCF9bI@=8I3ASHKUGoPvyeNZeog%ypkIEw;%hB*$p`V(P>BEctk|H)=@78Y9JK?cAD z0;`b)$S-Ug72`%A?YAG8l%o7YHkl-ba?PVrT%LOGx98F?`v;pp9jnxzT(KY0&hi}c zgenn5MRRM4Ie8q0duKfe?rvB7E{2z8;~+?$!;uRO&rcPuJ2rY`;@?$izz|t^Fr-#E z)h2~0&VZT;?%V0G1ui_OwN>w_Ww(0u{EyyL&IKF&z2p1dk%qa%XWd8@zDBj5nQ~$~ zFTvTQn|O;tt@CsendD!J>pu-8k_|X7SO*eitEC1Cz!zX7EC2)>t(kz5k73`+EfXq9 zUlN!K52X1&nS>6&d&mT0zO8rvFQ41#Q+atGyi>0K}}gwECC1TdHTjm}qSg?kSr8Dog>BqiP8_L_DpC`P!G_ zt{%l3ES*!9rc_{dBQ}YE!{SKYs}p?kj~zP1@ku^a4q7pga5HeIiIpbd_#a5G>c%<6t>>8xq$eCPXKCg;`hM{gevcB}7#O~hX*xs)}! z(W36NhnW1kNw%|+oj z`+nF;5Ua$X|1w@SFFIo`B2!YP7W)|o5Lbwm8~$3_2uIEuL!L&iQ0({`N~5aqMY8ip zt0g|$NbBz}y$CYZZkGz$ecVRjfA^<@a1C0oI1joU8}Ew>O@H;g zH4K}&|Ds3o%j8ZopP+n2)pXr?VxpuxcTVd_yAHX{8VMQaVDaAU7ojgqAfn<}?)rBZ}r%xCHYOx$uXD zrx9jJ=j7yvnrgJDHWj(Ro-O|qXYQS7O@Y&gCmW4G9XyE6WwG~AkcZwlt3Yrw-w*Gq z`~gmx#rr~1ierz_J9DGfZTi2(9Ih+uuH6TTym*{x)(^xhNXudl9`DAo+Kq2+Q&+l? zSH}2RksQ;u)?_nc&erFfS=5g~Mc}y;%|zr3i$~TCYIIz9QIj&I2$5oQH!Lk7GXnJk z+Ka{?a6Nu$=muo;6G5`%INTW=yGbUa11A?>vNgEi6kB4z56Z0rC@Jzr71$0o5|4h; zt#?8DCX6@GH?hR{^RvHRxLEoNkLFQU&oH$Z2+MGTT_Ny&pAV?Ghk8s>j(CJKz7SMv8!dS19QUZU6hFf2ymoV6Tr&u` zzFuej_!pCgCLza9+8>vz9dw=2u@;lb?e5vmpO4dbx4$K|+a$Qvue0v?e6=Skn5Sy2 z)Xo@ko}4zcVS|o^opJgY(#K9gGPrKCfZt$>f7i+FGJXJ6F{O}EY6nMiD!O*`UwS-f zQ?zT*PJ$o-@?7JaAouRKu9!O-wQo{8xg7<6o-T_naRLg4<(u^#T+;1vp>&)dZ`91D zF0B<%_NOh)_5T9h1m@Bp`J~xC^EsoKh(L{rI%oZTu#Jikoy#KX9*YsUqG~YJXxQFj zdPp9wH%n4IDp77ky;y)ctohuVyA^`<` zUSrC3^k=@Qw?{Q->ka+9JQzZ@J}K2e9V>&CA}Z)z*X^8JRIdPVJV*fqai{ zxef+m3)z38g`aC;C`Bsm*~;p|kJJ6QC#LbRf^f^EHhb&2?Sc<{onjM*_;$ts*htl6 zfqPw+lLJDKlrvw1jxqC)keTqe#6*IH-%5SwR z?A8t6PGfly&geUtn!KN@8}5g{uQSQv@Fy#ng<(bQkcpMTYb9mPW(cwHcS^w4|QuvpdN%=(4CZ z+a8JUaf|`P)y?HF^1AjwO~?U{@D!0>fbTyafaDykYbv`HE!k||njC5@i+?`QahUG$z{Hm#OuxcJ098mtTijoiiCKz=g-H3+y$Mc+cS(}>AdBA_Kc z>GKYR_kXmeuor6kYg8hJ>>43^_~ZHEcGjL_oh9YXpF{#(>r`=d(+=b_2=bCc4Kh{A zVsdl~&3`iW*H#`$$8s`dJ}|c`IEQbYDb@3FTmHB=;>(6KKTk|v@#odWsQ-r7-n`Bk z5%)_raSv=7xtPb4I3IEE{=p8;v-keO{rkvv=_$&HT7H*Zp2va9f*ioPB6(M>w&$@X zwd77#Ar@f*94OF_c}IK&>G9pV|6qaaNBm0E%c5C<1QIIa_}_%#-4GeN-dQmY`KM!~ z{676@c;>SB2*;wrIS`hY7*CvnMtVv)YReQ|gLaEW0E3P;R66OGROVvhWZ?aMl?kEA z(Oj{zqV&1?yoL6Uug1PR-SG7M2HspkZvHn#t!bH+OA&rKBDt&$HO_+53^G#@0GiB- z#Nk4KQO|ERuiLEqPQU#6sN+u}x_pD@i~e?4*z@vy{oOJa(@|q}o+(bkL490{)5loT+5E#&0IsiUH%}>RH%D}jj z<*`rm7>cm{=EIu4W`=a>qh0}|mhX{NRsN8cK$Dt@KwmsH5?ecDHB4$>93>tDHKd@8 zr`IX+wWr1c0y)?Msi(&^h7hv(~Xt9*JmU;Z7Y_gogQEoNC0ZSxXjh8eTjn0wESUe(f*`s#8+PVmv zV)S`*G3s{7*6o|B>gn$8BENIl5@!Z!uZmd=j`*f*hOkfgf^qf74|zC_#cio2(-7&6Z7ToxuZl$-@GulbfhvxryM)!6wDgQs%frerG?lv4 ze~|>kjIp-h0ukw4+&VjjoPzhj4j+=>6e!-DUvYstBl&WfrTTfna%f3X)`O8QjORS8 zz@c?JqY>|^faCtcZ{-^cZWT@0?@`pZ%C= zG-W{QZ%Vn{uJw%88sk#K@NDbOT4qEi5pUquRIKcsCsZTLF7NKo&(7Rjoag<!A=~iuN_e)X(BmnntR}Tlz`? zFqbE_B!&q?=u^GT+p&`mP+GO~<5kB^Gy?E?Voz1a(4?bkSL!&00wST?74(xNsnug_#$T3XOlFa@d-&cxYht zPLMG-HBbwVEBE>M>x~@$CG&w1yPL8OPb5tK@9_Vo=|UIWh3_%->0)gwc81_wP*H^6 zbO`)06Q8Z%!I5T6_!6MKh8@8jVw;!=pT9-yFRd@ z_B{J$>%I6%vMuE-F$`0|w{vrnJya4=(+XX7G+rPt7Y}jNw9|^E=eQ6*PvyCXDOTEda1X$S6NH+^9q zCM+Q{BcrkBJTRv3o{9oGYxSa-7FW_B#zV8w7pzEpujzc!!xz^?(IKinQCCO)`X@$4 zhZ1&nSmK{h(TIT|dD;DucBl=JEqg>>!%JSnqE~bQqZKN{w@L5Z=}YJLkygBWC*BVo zieuOp1~_`clJ_0N{h7#0nn`c3XB`5smsK4+>dnTyH#I9nt*pNsJS*rQeXJw0U8W%R zIL^n!+jnt~6K?{#6{O6FU3BvSU2rv}vJm8Wwt3xn zIX&2imFihR!W)<(gtz=``b2!d{^2?pX)Vq9&&5tz4LM6H&#Dowb(kQqLU`e~z0e}{ zWNMmxAXuxRiCJG7x=VVwSHx9lqBX;1ka}K4IV7~J-JAxFTbI{nFaP4?w@OOmr5obI zD9_N^%JxCctp{UCSbXv%S3>j`fvDPVVIu_xuXX`#rVPY#uyRxuAzQk^S_1Pyp^g^z z%cZE6hc8=pTrWv){bq*J_##I@6)S_|qoc7Gnu%k?m$fNp&3_=f_U3BZ^L8%qs`cpF z628L^X?9C=81ce@AeP-q?cj_`d*}R1K_ZuP_CgnB2b#h*Ue6qo02!7v{y;#7?2^0?VdtMp|k-bM`n^X ztNzsKzWrIWx?hbVN_4Ed$K4^QN2MK|?!_pi0Gdvhaq=d4WazTU?IIou14I2!LAD># zKvq`*Y&Q+IA10RF`8HymsZ1^m5$sqpCrh>djN7thqyKqGe)P_2&L|L`S?g@=v=KNs zJn5sa%cda6!S)HrQ;u1@b!5AKR9!5b3sB$;K3^_IQ?pkRK$>CRm%Nh2jY#MrFBtq0 zL?WwS{1p}`$Y*944JQmob^i%7C|UZ%ECpkq7z{mT!ArP?626qL6oGkyoo0!MAdTri zT@|dPFOL}A z9^uKXo7}hw3Sl3G!uV24kiQ&qNU{G?Skg0GgC}|kHfd(B}T(LfgV-unxGE_EwUkiNBbnpajQ26Cyv;y?$6aVEGVaifty z(x9vQon-l72b<3hj>}VyvT1C^M(zA5RwPoX_@?OIDNsmhyhE1$2~mz98#OZ}WRv^e zRdmW2*gz6UIDd+<#ma7f%7p zzyK^fYMw>Udu;#0r#ED(n9xJLMkx$wQo)ahrbSpe*Hb$7n?255bQb9y_2jg}Z|JIp z#1a~b4=pE?M!j>=gYzT)sbH(6Z(C0&!rzhFUMQV48~pd?ix~e84F7ou26wzHt@+w) zE!KA^$5zMX{WEa!XvNy4(`tjwnogsmxlfi~R#A0~Jcz;b6AjM@I!ac7{GC6-WmDE|^%_ zw~coZ>Wd(S*?EjdP^Q*Yusz8aI)T|*((N=G0BXlG+2sUcGKpZ`q=Ns%B5(}Jj)qzS zF$;wL61wMQRnQ*OwkYXRP?VA>px?K*P;{+;I;~39%XjlYuKlWyJxLzpbDraw{jaN6 z&54$8H_T|;0i1dt3d$|&cE6YSjTBEsQ5ykcOS&b%KOF<>D1bMCL#(UUg?0xNc^EK) z*xQ97i7AUdkk-pUam}oF#rU?6R;M0JRo-UBcsPbZ zQi%eo_RobnaeK+_I}z9gibI^C^}deMdRh9^WBrPPi3oElELn2!hCq;SBA{;B!;K|! z>>HQ=bah`%0_aFIPo*85#Zb>Y119q`trTN}wK|h!91*Y>fzr>#wBJ@CEb&Id&Kzq} zaLe1~90_@8oux#BV=&Rv0+}4uT=RXD0F0mpT5dDuSDYPN=`v_cN>2>rUg<|R45x4* zX0%vL1=_?&!M9G#08`rY{i&)@BhAEC1<{*g6CK|4LQI2iP=d4gnbWy_XHm}9Q7q98 z?aCZU@ZsP0*+ge_o#gHkK#7FCJKkC}#ih=3VL~rXsI3utcl`(3GjBQ%UTI&=^Q3S6WW)@No+3 z5Q?TWE6Y&E-sBqU%-YlKQPcC)Bl!P+Ouc1Pn~l;n+!nV2#oZ+ViWe&YyF;P46}JM#9ZIoM9KM|UeD8DaKm5pAtZS`g=9)cw_UwUtP_N2HTU}qsJA>5T zEfs*FtL(S_C;^)GAh0BsbOpfB$WBcUzEV&&ILn#YK%Pb0TykImOK`(w-X zN5{5{k|zrrnXSEGW(A^iq9!emr^EXOFT1G!kJhl_avSPE)I3NC*pWp|xEUk-dVj@y zX03a^0I-N5^SSi3bOVZ66mcXE3{_)mC9(~FL~V;y*_h$0k&Nw#c^M;#xr+?FqM8fZ zluGe!*lsrwlI&Fhb-^}^bP%NE7jCyJy^EPkWP0w7(3mZ~Ivr+n!!d51<51lW!Uw;f zVc3^}A5vc}yt3CR5>iOrCLIWy>fEu_Hi4PntqUZWLiuOt?+idURs!K39Pe-gf6-%iFeM5$A5T9m}NN z-eL0Q<7@4lNwa`B2OqN!@J;%9{m4fxBJN%+Qd@opr(AP08DO?`tBd1~?-}=RP%v57a2o)va zN71@+o{n249XU!0K5q5QWAr{aDki`;M;6iIhPw{L-&*b3e#p=^P*@ zl~N_v>^-JAuLUOE7`l#+2WR|n3LSIFRRjTZ^6rP_mSiYKle?H$%M(Ewl8rqYcqRQf z4>GamF}T-7Cr8p}9$E)RpZsy@zk5Zh3*Blkp^m6X0LNj#;nrrkr+P1yhCYw-=XW7R zA><7Id;4P>wXQ0q6tRMA%wx3S?vZYEps{yFq_yir7w2Q=jC0|a)F5(6sRZB@{L1g zCHnv%W=__)`63sTBNAU9LwOMEyqQ$8j(Rzx^c>5!6w;W0z9r>SPLnhV!?-^CWlYi) z-(D0r0S_2=fEZl1E|o}3fPh?p@3WWo7`BeKHnC?eR2}Q;SfbcT=dGyj;))&j>zM)? zfqh9+GnXilP%4VEgy>e7=g1=1L|{jRWlzLwce@}87>FNx&vLH?JulzSi)bO}EpdL` ziV^%}_9W+6M)aqlpno27)`lQsSLlv-fStdBT*yqV9KDYSKZQjZ_1Y{$_ZWD*& zW+;$yHWCFCuIS(XI7m5}rApe8Jf>0Fq5{n0=Q~VI&d0GBQ9BB?ic<##a8yytJG*dI z=p{AmkN(HpdE;G49t95XYB+}7W);+HD9lDbmcCr+FZE(fWYAB|k>AN7#IT}EZG6(_ z=cfjZu(UYxL33dgu3W)cr%fy)pnVxd#NybD#g9x`{ZhD081u2XZjq(0S;B{HliHChDWL4uwQwWc5s{Hm6rg=x-~)GBXmZ2@X!68eJ5*H&{#S{YYJIr1&QZTjE z`FWwQ#1<23-|rX zi`7ir-oTIGZ>N>Q)DpbyC-x2%olf_ci^S6=vXFp$tDY+BR)<4}wLW|O-p-PO8+(K^ z4YsdBx-*^81b&;t%Ypf9+9va#b?3^fm!U124UO^Ed>AAJ4mS7=14~mIbG>9Nvh8o> z!HM+6*A?)oZY&dM0U=O5DQa}FG(TPB>6LuhCLkAOXjeu$RYeaai}M|cmmB-V-@n8=`LcF$?Q;*FNQea$&NSgl!MXec6 ztU!UX@h3u7%35TbZD2ai*Ky-G&6~pC2Rn7}%3UYtFo-9@8RSmyz+@XnSbW?WytvKi z|Lt@HXQ|9kAivSVoh69=i{1WdcZ5Uww{WuD?CO$nSL$2feks>VOP=oZplg5o7z+N3 zrU8r0TrVH+w}Pik0m7%EF!GrT24w(f&;Meljama*O!ydOFRH}=ShFf5;|7A@Fb|RB z1ovN6hkG!f2iyM@9HSA3k-a5aN#|w}>9hY8Mz#{g+}>wQ4I=0x?e`F zJQ(aeFfz@nhm=3y7Z05%;>-9ABdtV!<%VBKX@*`jBoCkj%~M1q!8QRFDGz+gcwI#s zn1-SB(o9-qFAFb!aX&o;lp!mn(3Djr3sLF&AD?)BH~Mdkag)f_%i z_3%*Lt_W2U4jO9$epY9Cys*C<7X*V_E&Sd(|G2k%?2&4W+@pFKlsOfgJO9l<qnD=eVcUVjILiu3)}%e~1YjFg4E z!>+R}myL`QzSgISm#0>r%}NJ;L4iDzFRxBo7C+s;;5U4nNq-vM!29CF<(@r>7x-BN zN9AQ))IYwPh6wrn8q!~cZ^M&Db7WOATQp7kIi{U^R<&M(4Vl181!Wd_5>MY^)c(y< zp!Q@rq|bUe-kg+;e|~UA-G}ZCWg@vk^N4Q|A4p6)$5OFy9)oRM=Aq(?pBIO&GfRETtoB0ls47w+?X9Ly)~|?H}8s+ z6{kJXAgovh>b%vNgF3a5MG9qg0}rm^c@LiwANi~+nx^aNN(hw!ac{=*0QfTu@M168J>m2rTn6SdL{9${P#Nu!)TGMQ|Kc%lM|f6qJSWQBET(B| z84BQN`;DyQ0e3+AtV0G~c||Hez>JE_dX?-E#-wf&)zc`Vp1FrO{nh%U-uIGkSlHae z;_J>;;0;qrnH(-%6k}pa<*3oKGq*jUwnS1bgCX_R#h9ExIUqpl{Edha^O^438?q@* z)pwvF10P;OelMv#`L~B!BGnQ+ev~NlUGpB=&YN_;akX*rX9NZAR z=nmeiY=&cD@#G@rj>Cvo4xN=Re|L-r@BaQI?%3+`>8E<| zSM}I~00~zm?p>s}2#~3@Up7q)KN6h$)wRM68*;w?7w6+2xl%T$V3ApJ>b-#5 z-CPS_T?$qR`ATH zqkHY>)Udnw*#V;(d|R$@>#SD!*G>)k$!%u^3u;02J*FDx(T&(K{HFUt)qIsLa!@(a znQz6(uiw0Xc5?dLP@b{rbb-qW8%bS5ugBu|p%2RZYd1VTXlJiEJJUHv_QW2w(h^z80HP?&gbGp}z zHILAOe3li&Ov)mKPRPNK2GIIjBw>s8Te)f6aVGB)mJa>=;&~nJm$sWTxXU*(#R4J(QPC|3v>rBT866?Z$8Hi)y6*W9K$42g@@~O+>j57V8gx)Q^VGU`KYpaaH(#RQG>C7skBcA@d41M zyWE$?4k5SL52={aEN^YACwzKVtGymfh{J}g_!5O_O^!@S5d}CD+qfih)48RM-#F?mz;JwG_Rgbiu&L4B+O9oZ@B4QY+*d_yU+2UcXt&^QQ!J3N}e;5 zN!$?k-yg(~O2+Lr&4*3$_9BUn6vH*$_Ix@!@n6HJP!MX7gKU+oT5H2!uNbHcSy}Nz zGDnh&T%KqZE7<1}dVGe8NY7b~|JjvK6+5OC@ER2FV3p zyI*=`wV!ev-aqZv{k*<6nRe^HyXIxeURo1lZ~gf7=gpsjIj3bZ9aj-UC#xoCwD#rThn9g4}o=?TLXC#>w$7Vd&NlmNVfR>Vmzq`HE205 z54hf_$v<&J+fbC4`^rZjocIVfvJ`9_ zS2Z0&?k<(&(ico8KK2|J0(5*D)gf88w(7+SZlpZ2@a5?D|P@pBJZ4Dyx)_!sXqOnU{OD?h)sUw$gii9$lh|z5?2c~4-PViYV(4$ zu*6vG5!yL`II*K>Ur8>Dm6XS?T{_#}Wfnsd)oU*W)gpDEs*Ks&>a?2LTzIb(rN}Ru zqO5DGV9V&^xAj$%=VrHg4Lfu1-*CKKiuBRo(8V8a^q%Y7apWGr{fkK#nqj3CG}oUo z)~9kQW}Kel@sxD0JS)A4P}WxTH7ugsq{piKbvmw(6d!mP=>h*ibB|g6s$fe$MeKif zFe073Xmc0e`8Bmf&O^DmXukMY*qDKiRZyU{6@|=j!NwYQ*;Y5jAC# z0={->Proip+hxTORw9Gn1y7MY&OK@|USSz*HAqjSz7AS4ba7Hx|0@q)IJEUzw(a_j zm34JS5FRF3&8y8&K1M8IikhT4TgHkf6k1xLHh4Chx?igzt?m3BX9T1&czLLc%w)F_ z`$gq8rE>1O@Z!9ZkK05^*+9sQkqU#K@IpEBuS@&kP%0$@8#BB@-R(pQb^;z|LAQ9> ziraRllDaWitxFO+9GJf4HCbX_Xhz4@DfZl#Ki>Q8r2kb~_K?ElswTo# z2BQ_Pk$shG7ap7=q8}u{Ac8JxX{DryS)tl_Vk)X!N&!f-&y}cU_jbutHYAYBHHcBc zG#k|4%i7EeRc~TmsFmC?Io0C|NLcu9a8(o!(zrR54NL->e)YP%V!9K0D>f5DY)e~V zT&IuMpYtBC=%TAbkzQWuQ>p!6dk*s+u)_FL39a-%FGNiWy}x(sjo)WB)3)#YRLI=q zcK4heN4||#HVf~`pv#{8wiDlmDX4M1Ab;s4DsDSQQi{HPGA#XLN>IdKrC*1I5#p}C zb=NR-g@n@SAgSMbmmePe@gm8p4xx~=UF7rpEHem(n+;ov$W&8oir>B}MIVsH6oAI= z84)T%q_|Qm5yVw+X?tx*ofvcXA^S#os7bflBi^pClTw2>UST@(zx!gopY{+b9b@6s zi$KeYVhFupkC4M7+W;VhZy}OX%UOHoPgZMn7o=ZpZbe@&w(Q$HLV5{pV~ECMP1w8R;N`$Mu@)(@b`JTaq2KG`xz@oD1HpdHiBA<{d zxXdvI&+?rA_VI?dB5=znJD7@2*1!JUD`z5LL<&u#(!BUumFjJH$;o}_q7R(SbLBs| z&p!&!UkGlYFJJi(jcf6^8M*z3sgy~~J1%d#P_5+ygZzx-CD`i`G&1q?BM^io*|W>f zu_{k99DeO8eBfIpzzQrTb!ohx(wC<8dZ#UwaPSMou5XDRL@|nRxyEwxZL(GLX19#W z>xoFiIt+8oDtIY4>|_$Dix{)6w{?J7l^iJP0n$RH+iOjyku`qbj}J3=97W1EG2w7F z5Iv4I8|V1hYV=``K1NE_Dei}X5|H_WG{(s_!7EN{{e)a$)-qY?p!x4ytT?IrKylp+Z5*~vpQmZ zPDvam)^YN75s8uN2#YbmCSW7d3;RL0qJyQ^Lhg08xEggHHMEL_Tl(06N)Nc)c-TB9 zwIX<34>dJ%#n!_Y^VQhdldh~;jP1~w_ZlRtwGGv5MGe<2=qJDC|^q{*y3u|-q|6}h0 zHwE}^F+Q!fSi=@OMBMa!e^3zyQ>Mzq)0d0urqHQl%V=tVY}^E}@YYQ;tq|d+;WJ+; z-l&jqzi(5QE+6h%gGg7Glq$K4hI!whV1$!DFjpTG_43|(FX+?y`5Be>Q@rS;$yft9JpV|GhPQP@nx{JB2|Di^8pid?4J*qnUBuA zP)VWrH8u;)yp$TvE7`nl*1*Rf8s{F`J6{?JKY2C}@QLPB@`YJdV*N&zTy@AhT?=ga zbT#_x6}rl;UI<3sW7bp~^7$@{BM@=C=6AEUd$52g=Do%ApJ2v&2Fo=n+BWh-lICwg~+Thd_(aILf($Copdyg0anHo8q0|oKFpz*qA)o>#VD{ep=89dR6Z?4 zv4IG`lCt_B305dyFyBNfwLEjH<$8BFBgT>F0W4|CEqYF3mX_^~HCJk6HGKM_h|M%2 zXMgwuOI+GUuP2@w39kX49$GFI<$_oql?DY@xm1WS5_>;rI%jGV^C`<=oi$3WDhTxl zv25boC?{usXxbUy=#wi!bu>Z)e>FR*jT6&BLj(*18k$CA0(Iug7A+u2JGW{tvrt#} z#wx=a8^0N5KW)Izy2KwZ$kug#KaYQ|m}}a+-J2_@)-+g3c!6XI0&|;{6t@n`hd)H( zkHx@gQI#YvWi@$*AbjuCYRx3B^*Z=_!PGe}SvYw?l$lBPjw?e^TeR0Z=t7V@7whi) zFs4sW5TtTE2L;#-$pLoOa$Y4q6_kEyYnY?<43JHCOcM3aI$zAC38UU*eN7uxC_HrY{`MFTT9`Ed zS>KZBfYG4#k6cn;3N&e(zalOAgJ#k@7ue>iWEVA+y+lMDtSE{KPO@EZyd>*WD?>!u zt|=?H3oFJc4{bQyOfkER=$>sjZY$>6;97w$hyEzZ@!@$Km@%`aNWo$H7ww*=YHG2~ zi{W!2D%j6{*?=WqX8t*48q8VOnO2@?qK12S>#_e-8DYOko`OI=K`DmascO z#pT;5tFbr@Ek4d6)TF^NW^h$zHtd?4FV5IJ?U0$hq{e6ta@g@J?xKq`96<>=NOf$1|Mxb&Z8PE^VM6ZO8Sb#hUZPc+esO}JmOH8 zZ+Pw6-kT~ZC-fB>e;m^TX3u6f-p#ot&5V^o81{I2*3fX(kb}ljHFaF>mU??`nE2E$Z4CC6U1^?|UUpBXt!a0jq4K0)+6sGTGSkV*M7QJR1dy5)v_cZK?0NtsoNz%G)sI7{`SgEAmAfet_#?8|_2 z&MAQRJ9li`mZJe~qQ~7T3tPh4_Uz44h?*OVpsNUdR6fdjU3Mmb8T(ZWHp+aU_c zxu4PBr~z|X|EMh$6_%|0T$VDt_;~r&NZ7*!9WOMlUnrmp3qY33Xs!P5s9)Y;f4(zZ z4!t&P42ViGTnw)pU-Zez$=}@PInl=FnbHtp z#GqUON3$d0(i61bF9zJVK>G4{T~8ES{9lUC(RK5m4`dcp3%)1EB{L!d)_-LGASi+t@qIZXP=YJT9l@2MN ztwh|{oUb6oH>t&UlfQVk)=3B=k)Q74f{<)T)1A*=F!Wga)W!?ay z0L2v)llpPW+9?li(&g0N)4?Brdpg1$9x^^fKf_WHdB(aQv{=HyBQ?`r;|oR9bPDDb z5FRYzoA}g`S^XIGl*<)SPBEVR!m3zkKFN_rO_mmB$VuS5v}MJKe(hh!GsqN(73T?` zez355_uNs3B7@a=v=xN`So!WxV!zPap1lTBZMqhMZCY$QS5nDuSTa^k zhL_cucKGeAS0*)R($N>PF*fF7m15r69n{%rxYghQdPQDvUjh}NLM2vq-;d3&q?1Hue>h#P3J60bc>-K1^sdArQHA`qNZz)zCZjPvp zC06srg5f;mnV-OdWj=yh4Bmeb(_NN|(Ypmu4#|wy_GbSq^%idf``-f`g~$C?$5Yybq7|IK`aK4@+1c!b z;|p8UZ1(|90WtVlTxb|6(az6LRbN$jQV2gMiUHLN{!uH2HzxKd zRZ8E2fHslGsfA@M?+;djLaQ4@BB0G$)v4o4wFcF{e}7n%)luoAF4sL*`Yfw0bsqYk zWa#BTF|9Qxsj=jE$GnK>g;ZwHVD2?T(y?ya_6tRj3Nz~(p~cQhM>dJJEXFlsVxpNz zI!`dpKg6^FB2y$NkC)tMd|b!PAeYipuIFFIlRrQF<%4&3rR6(SMteN;3Lvw9am}KH z!F#9s`wSmK@H*|d^5C@Bf=J|X<9=7y{-Wf5FEwgf<7FjTY`o!u(NTbALUe5K%C?)3D{0K=JcUG6TLF6M-oEm4J~ zP#lt|s!$`dx|pYtiJ!)Ie9uw#>~NM|B#zF@BnKGX+8_{&j!wXw2VL=3;JW}!F>!ZN zyfL}WarzTyZ8){lc|B0FN9nQJ^r#S)76=+21c z&APyq<^DD;Cwf3rVjtBCF?1p7e&}7%D?a(HxaiDmu+7;?fq1A3cBB)NJL*5t+pS+5 z7()Lq1x!3b&MzjJk>NzDEWhae;e@dhb3CIrA|^o?kB@vK!-IW@CCdYm>A1zSS~!Hs z)I)p(ssARsa%esXowVRHxf+IXVD9_X>*X}tcUDyR1j;Wz5wuPvCcXICy^}R;Jv>03 z*}!@-TbN3t-3yg<$^F5RJ2r6S0jZ>n2ca*_p=pdnvnnn$a|#fxp8c9H!u%$-gH}{k zyDSF-+q7%3Y7h|}oET51(wqAeqx8_HC;uiyMN}b$gCld9S|i)KYGZyBxR0`}Q6ye1 zPZ<%#%(Zy`p26H;o^ds=7E<|&R}w@uNj{Y&0%Ru7vpr}+{8@>kJwfNn& zDY~unj^;$g-+F2 zY{=g-v&>+cNiLCkcfuNGV@=;j)SUfkJHwve@F!w?Rmn52*bd`-GL`wn?|fN5$oz=_ z^Kbj>GBlPq$%1n;^%>;?#Xxt(uJl5=HQB2^u734QwLG5GjEZ^OUh4|ty^}533RNdj zadabz)V?4(+~z8$u0SRW3lCmOvv}3zR_72ANt+_VG5<#5r zoe=VmR!6`lOhD2Ug?V>HD^m1-%yfo$0A#qP%Nds0a~`4l-$z}ob1W3q&)v9yO>0R6 zq~+%6$;Gc4x`Zn{0rI0kj31^Gi!dPwCbpG~S+(BTnPCx?No`y5!*QNYK5D)-rsRHm z&IrY{8N1H^gnhvN3dJ52X-QV9^b7C5*rSopOgcK!vM!u!6a?YDXh;CyWTwI679&}} zUt5?NfOQm{F7(mdk&~MQD8&mnS|RvX^|+GUz%mwbIC1QJ#V%+P({8u=x63-y9jb{D zI`UA@HIvUXDVF`%1GpNe9qDR9<>0T_x}uX;+ZR>P#+Ex*!j6G^0!r6o1QuX(kRKt^ z(+ieZqQK^R>=V?o`YBaYVB#81$ zt`}XmCVm7-Fd{!Rd2lMd``{K*cpAB6?GM4OLo5R-O#Ro4|RWRBbi90X>`6Lu`u2jSI|VzmRL+=ZP( z;?>uCiTz13Sw7;LNmWTX^<`;sRZ=2IGYKGVbDE46_PO{AV`sZIp+_1O@nC1+yv2}S zg~n2`>pVI&Youacr>gi3%XUk){!Tq3Mbg)Y57-X{v`8gY6VR#P7e`G|yYc>%5vH+# zoY%vs_@513ef$QkZ^Pz(Ffe`Qj%}JAAZSY@-ut>ku#Z3sLSv>k*qIYx56*a-mC7DFAX zK2siZB#uX`UPDT-Lp_y`bOgXHLc?F%yl4bcUyt%hNr8?ZAUg%#qjmwR2`=0`PG)@RNG-Gz-i0-_ zgbNcL8hp|6YfdM-9U5&fZNtj3gT-9PnSu5MJ|52bQshv4*AhdG_mTo{$xbMd@?9TK zTgt@ZE)`JCsI!RQf1lrOAOYq;XJm>S%>OcA1h$=jRWsi(!FTRg6`9|C9RtG;2 zs~CW66tOkbDjKXm z_kd2eU!{>`=?bkqYKB8_<`3=Tv5MP9%?~ol;R`=1pAuT$XI2Z|m;=N$R;iJ7y?c-U zjRnQdA33Z}-V*&sL^&22Ph2qG;V^kzq78*Mi@5=1qLP3^iVB@pzlh|)p94a~RSo%<9g zzX}%XRT+;7{e(6VdXVCWmS{k+(bQO0An=kA2tPu!PTPAl>{bR(&l8y)J982nu0GWb zi@maG9YXmii4tt;HyM_#buo~f?uM-XtjNPz&^|emg*%vWy?J!S4>X{aDK72TVvhB} z5L-EUPb|h%SD4k<+&2a&`Jqcjjt0(PufG{*^qjQXI!!BYKpPP5MdUj8rI?Ri|4W|z zN2~Xim=;b^pLj9CA2|)xU0#~1M()@g)l>`Hlp;d(4+p#fKJ^2hJ#A&{=Pi6j2tPhM zuK6H20+CAgr!|aHc{8Pxy(`n2FnT>h1QFk)oe!;yTk*sZz%fNl-C{oItGMUY+CM|S4svTIyMPsZcp1>em zuRv#v_a=*}M4+Jh;5eyv+=X2OSca4` zmn`vSk|xAc&fvQH2M(uHxf(%c8(Qb@ptGD#N~bOnS-lM%RwUE)bWyZz{e zkss{e)vp5N?i__vaX>QPaR**sd3<%@l>D}Us*ptnsQ>$8?3)Ai&#Pzn*Boa@By#== zj()uL{uAv%j%u2D5`nra=l?4K=gcs9HTXTEVpVX_5T&D!s`xR}$GPwKPqJaJb4-YH zN}|;8ElF7YUY$F){v$p4|5>EXa!E2iz#{~7ob_dd$X!2o3Hr4H(yR){Jt^*;9>1G2 z)U8yZW2<-KBK>y@K2$n>EkJO&Asu?8S$7MPwhh+?x^a19_6e^UWm(3C_4y$cg^?^< z!a3$VkJ8#@1@y0u(@)luj{2e&0{ATm27mmZvt(oXy(fG>LeO!~ZLZ^K!6k`ENX1Fs z8m5*9P|3=iAeB(ApzCTP+JK5r+z+;(T(?yjZXnjeEOcw)E)Cu zBDr(p!CQ~j7bYA|ldFq}6?;u!-JYAn`#99SEKeAaOZ&TNs{M3jyb(W@x`|ZG!biic ztATG;#BVYz7*%#dLes@L#!YVrO=YG1T{}c{O=i%Z8H&-KttKzO>&M2eyT$pt!wn43 zRRmlS<3TEnOp4mbU|Jyo4NH0%rKxOh%CTQGG+DDR@<+i@_YPHUCJsA~xI8ZQ0!k1& z3SCKlOvJEps!DouGJIc=XL%a^m1)TS<^3M z;(Cm}_NN1CSO0oFy#!)r_)1@piW|-WyN2;LS*0f33EQsWF&$RD#kE8pj4AtAUnLj0 z7!iq-v(m_uA2_R}RvTB5>fQT z%l(N;1W9qSuki8sggVuv zF`vn3ZB+lXF2y3i|578!GR)I)eI@j|v9$m()y{QjtKp;resSF?KUQSN;xdV>i!HWM z#W!~uM3x)pFA?mxc!psa2CJ!@&4bRZ9s}UdL6f%;UHyvtxzExL z_SfE};6Tl=P508Rz0$AUE1pj9gUYm$Y@}f2zz8m;2dnwu+|DK6^T`VchZ!Kr7yZGr zi%TIjQ!_^J*fUgpMNJBE4z2Dyr&e06dg#G&IJ+GGyz_Yk!meJ56)-8vBHDd`S9dv00$mg=&0j96x_y>0^Ju z(Z5DaTYWwCmQ*uXyrxOKI%{6GyzouJo;B**OV<=)f-B)6k^_!O-~qh^XIX=>*|{{4 zUnbqhN#@GCN2>)x6Onu|bw9hKB<{}^=uCo86SekE{(u6i>vY*s{!o6h{EP~9NHh9w zS>(35j6(sQS-Avw(x{wrZ|6;EQF(bLAp-_ML}5Lb&f;ebAqDjn^! zt9X~zSq0_t)xgp$H#6`d#f?ll0n11t`;nN&HjUdGaIYrf{ ztsu`@euK%g-ybx4?oQ9%J~N+~TKI!*sGA6S`NoNp-D#BkIgi*;i0LJ@5{jo|!Tee)EikDFK+T4W>`<)R!d zS|zE_VxFx|PIp*;x^$LIY1UNmJq{0Ke_ADuJhZIq%8eF;WTTEhb5T^^wiabde8T(- zE?DrI9f|S7V7M+z#3Tr)WX2-UZ+51Ake~xqylI#`_zIqO-F4n{&!ae7IR}sh1hWwd zc0su56zM>D(EJ~F$>;Ii3Zt+AHyA`gV6ITSVEO9YEhsncdb&RYfyupPMb?pNf>0$mokn7Apq$v*G}`01H9Xp|XTz z+;JF&&9h?HV$&Wgk(&U6U$)XjdN}$EMh>vqFY*}_G2DE={C?br9}bekmrQ9FJ_x@7 z**wNhmnTxYTe18+BK?p>=x}#~GWo)mXY-@PsZf2{-N3|`!(zd<`FE2sSvZSnoeMp1 zDa7SCPOAcB3@VCy7u`#QFsvkDOa62#^o*>G~c22&=SUGxHHL= z6R2_uIGQFI)%YKl`fq@5`UX+{P`JqwpJSQc!7ovckAWW=l<9MR?z7g*YN)3?e;RQ` z-Pu)DOh6j^W#S<+1hAI>S84>6#^0Q6tV*jUHHiE@Ag^g&WcOHof^J}2!k0{FYykXC zCd-RdOtb*9ZQ{pX&Ks@ZS?50_c{s`f#~$f4*ja}<-$0_MYWE%qcLU+IPy+#ndV?Ui z#;0dd=S%cjz^Chr9a*8??rSy-#B$P*6589;kx1qs_MtQ&h^=d@`+Q5D%xSmmsf_vh z-e?q@Y4C@XkUTZGgu!}_mZZ2!9j-sBS5R?7?LJSJWwqc|5qEhm{r(od?h!$$Op;!{ z24lJCOP1nOF=2lD2MJevNX}8$2}~~+8-*dD7EI0w#8JSvP}C@pP~&JPLGPbIS8S%ToApl;5q>~q03@(J{yC=Bro<*_;#SUH%F;5h!eW1mpnt}r zf7a|Q4<@Z!5K~SZ1cJ2VAo-aRB!H+lDUl86po*s$?;*4Rd?~>2Vl)b=N`id9C}R7V zs0YTvu2*UKBX_3(mf8S(coeMV3fRe7xz*;X~k# z*GF^Ll)YN{+>84EsW!x@Z-2KvxmkW`{SOc71i$Z@!g)WqRo6p$zYYXdx1IiUQfMuZ zz<<@?eOL;VKYSXick=mE^F3Qny28nKsz*|*_flxUp8F$G?^4dI#{NfbIdx-{2RMrk zcd597Lp1-^Wi3%|vZ~yI?C)#$5Ejr{kYk#7)OYqIWsDsAPEuL_r z+eKoK8h}|vK3@@BH~j_vgiM9NZm64wvUQdb&rrVQ~lRLt1SK4I(1zuEl0m-CSAj)}g znFJ8FI27$8h)BUCpQRo;7T>8UjgC&x<%`j@-N8e0*ymdtD0@P}34}xde3!J7@Rjk4 z8qD*ldG)8d8O&2-_K}y~(7b_fk83FALk3u6{+4B_;V8)98;QdjX0(=N7Z+nl%Bl@w zX^v-rpv3m=PJ7&moU>K&BoYkzEDF@CmvUNW5K<5f`fQV`!5MMxpufYmBqU;kaA5a( z5>vz0L7`?X2|@PChnm0gZ%6B1?N%&;aWt~~n1ELib!bRpQ}(Qb_{{iSp0gyY!f37} z4V5Kgo@I#KkcWGVm=k!05v8QFc`FVx1+VC~bybEzElLseJoF>sS_)ujSRe;;IaSh4 zX`%=ClEg{sxS1{14C<^70pM$$YZu)r-K=v7`2X2xPGSuCH;(#GN_bW9Gi1Z!$x>Xs zXMZ`%HgbsmVEC9F$DVsxGk@6@E4NVml{NmnP)u)6s&MG&VkiL2$`R4I5|-Id1Mt@3)Cw zAn4j{0awo@Y~M_&3|7ezGlHzNP3}noLT3bsOi&&~qM{ehGboO5?_Qb8tKti29TF5& zbjd*G^I$5g$kf1Ab8kjwfcbJcx$YRIx=e7EdfOL+5AQ^xfs2vE>BVhRxIpIdOCCA_ zQja}uEcgqfJPBVZQgt6OutGI42yRS~9IyIrvZPjSy-s#9;C*0b;_(~K1%Fp4a4)t* zJ=P41Rj&3g82*zlvk-R=8F?2=b<`Y6;Mw1iD-<2#05qF1fYPxLn_N~qa_v5av^}S5 z&nauTEEd0vw=|g7QFk?LT!!z!w_-|2J^Q9;66M=fa0|M6=3H=%Pq4kEQ!$Qv5$=7+ zZa`t_7DMrN8B|u}>|j8pfTz-ub=kVeK5kYsePaKj3s!{AD=M3C>E3Rog0Rd>$A-)R zc-SwJEr2n|k{B|kC$dWCEAPi*NQe|#wbfkGmyhL)->o*;l>(LY46L7radqOii&o`( z^&L0y4DS(gZN?kczmPuEA;d}9|I={)E1s!uYx3BC8pc`71s|A-;OZmhXodf<^lf_B z#hh49{>fKDiPGGloyYm7BV4UmEUr&L8bG28pM?EC4Az@GYgjtJ->LDbKRtv`Ors4dxvP!Kv6|o-io=D3K1XR#-`jXO zk#bX>(IYkC$!gyceE+bSVSqC=JGdl@;YRPut>cQ3K;iV zpWG`zQ$f&!M@dR@)d!0karqmXPrQ;5WAAdkBw-$?{~uLv8P#UjZ4Kj2af)lO;8wIP z4hbHDySo-Eh2rkPDQ-oJTPg08;_hDDiWd8FKj)nHxj+77492*2l99dlnsctT=1Pv> z#kc8nA}P9Q42so^C?{FmEs^*H%xuwpMwu+Ie30Tw}yqap-z(dX~1@ z`OMukW&4!u@H_IyW$q()XLq++ZyRH9cJEQ_o9g#8`@2p7M{66c%m((~$jF-LN5r0z zSeo**QS(d5U>K0%rNuHZt{jpwKmv7_HAty8gpU+SnAR@_E8V4Tk2sg^Rqe*4WH(Oe z2*EG%z}6#`W^~F>0dsM_k8CI{U(3^gUdh@sQ~fSdc|hVqIu@Z~{M>nqW!x;S^briG z@Sm7rcNVs<pGd_Fjjti}JP;}-et(`cezZ)b z%Qme6l;@Z?ewx>QI?4NcgTp6EtM3}L%sn?oY`eaym2@N>bmI*D7$ozpsmWhpav-U2z>RJO(RFDm zo6nk{zFf%&y?~c7Zg+U-FPe4|@4L0*c#8iOq)t3 z{y!W1E=sn@7xA9(|28|k#_2;|7U9P@=BtQWvd2-m<#zd{;cxpidZeol^(&&}CK1-hcFHbLb=!c#6&M%)=slRJcw$m|GP&Dqj%w4a-i~5S^ zN_fNU7&7^CO*}9CMAyQ)$8X3P()hbei(t2uCY!HCCk2Ej#RBC2E_^usKFEg(B{$cj z=f8?U)71hU9>mrj{b&&FER{|@e4#I;*?MRfpx4BfoDH?O-V;%R;U1Vz78jBRA( z+nHjbnDc3vfBz6dsc6Di)+3i;rclz7{F^azX4lL|D6%jqu838L7#f^yU7CyRIfC)}^$H8V7r^fVBnd`<$pHt;%tFnq};)W(~}C zSwx+LG4WAfN@UnZ6J)A2WoNVFMcka{qF2_^`nlLYp;n&6QCd)a%Yal`^vo= zNv_q7pEZF4j$nYNqn4TXU9dp|8}s?{o2vhTkp@qEpB`UUKWKwT#FhY8JS{<9(U zJO@7f^V~8X7#kMdww9*al(n=-nEF%5?tUo0?!ZVg)@nn*Y&e%QEw##UX^#yFyKc^4kZ?H1(!L=9gpS!CMT z$@6feAO6wMl>a8=Hhxm-#4j5Q6Z!2K?!|ZdN2z5xbaOjC>=LbS?qlN*^whU49Mn&U z(G1T}{r4o%c^|6bf4xV4+~xLHZ&aQ>y?tn+4GW$0F^$o6({VRkzwukg@hNSbv`hYu z5m#|P`l#b1I=kinO@{5NBma33GrmmGH84_43%|H74m&2-;gBP%H3O}4YXNmj7~2L% zQ|~Bj+p_ij@x!k$|5v@KURBf}tlFyN@_B1}!D)H=;O(GXws-}s2Z~bKiwtu$lG1qe z>C;yYR5;m`$ny>oMifEw4>WMybF2+fYYsEJS+l!`xPUvu2i9mmoJ78Ttf9e981r0u z2%A$hm%lAZf>%%`j+Crs8n6@7Oz)~6@JoBLSX@l;RmcgkjX!?Gx~Y! zDaa+I6Bf&jNGJcHmp86;NB>1I%`S^PXl<;?0~LH_pmRDY64$GRsE6+Ms%6)_8MoZA zDn=uroVf173s5FT>r`)&Jr1uo7p<^IpeGh*N&5S2a=vXOG{846pE3SA%jfe(#AE;5 zME_WqrK)!cqW)1#Tx*OqS)WAG5bsW;`p2NKd)*_}ar@_LM!F5x9IQi?xDt|~KWId3 zh|D`EBY&<1I<3p)lf*?*w#73Mt4mLnX4mQ{NK;cdGc67tgD^hWxo2djJ28&z%eYq} zp#~zLW|+AFmiyfzNqv@Rpm5s=SvF0C(b3(p`uT3IBE8lNO>H+_oh98Tf*y&_9uk|%7kHc z{&5VXYuvc1t@VfxBkJ#2O2BI63#pj^dxadWb!DEc_>hHO*h%Yb3t(E0SSCje8M2h9Br_5YDx{jB-%fw`rsa^Z{kiXAf@x#a7 zV-aa(<(DhRJa!|hCO^`lN?EaS`gjzDCkEGW9YyPf!lZom{X|P8(PsZs_bU5VP5ueM zpYnJ80Z)?U@ZP(QQSM)4Jl@uMs$o;tiE~~9`9wwo_td`JkEZc(fqT2-(^|aDAjA8} zQd&>&pDLgCY?(@BOjXMN0%_vIVePnc#%rR`vROOLd1Z4ymnu-qM%%;%z^$3=h&mp{Ln!tG2kJu`vM=ty2v~c2LxiqLdx_^P zu0BFWSNQQv4N*6=^xHS#nF&EoPw-}H?!FeiYd{;;d6U!+&9RJ?^LLG97)>PmRZNgT z?d_qSHEv`PV5veRZ01jCsmKa0+B$8+uTFLq@wBXvO4tk@RpfimfQlUK=QYt>WG;iI zP>Fz1;)PKK5M~A69;0Gy!C@*jRqNvWf?T` zgJgI+O;h#1A}dZWjW%#;!3yct=xBsA-j4jz8f?FjJ^B2F%#4olfC$ror zDkshy%^{Xyj{$spe9L29xfL=&464%j^d*KyolPxeizMm0wwacN5X(!h@8638OE%5G z%f2Wc zwpz#@shYR|%d)XF)mXgfC?ak93TgwgrmQBk<(9G`2SKA#f&JPUnZax~CN`?qmpdd$ zQZ#stWO-`M7rV8LXWmm*Y7u?mn(Vm_y#jB9ZOsbAaYrSru*E^Z)A3(R(0hobOqmP& zrPYwInAPlNCDq;?ru;UEecYp(FNU^p<=-^+uUu$BPfk24pq?Cg$C_+I{roFW%kd;jWPr9ZEd*D?cF-F#DQK_VLH)B<8B#K z27^ICaaf-%3n4Sm>Ftbi`L_j~1<3*mKt@xBv-33(T`c~W`g1nkyUHn>!b7#eC?xSmlTbWds&zoP?NJ^&HHX{j~wp z!`dPl?`+H<^*!4bl7zd;JR0h(+~AA?C?vK=AwfzNmg&|F`woVbMugH@VPON=xJ^`S zTyE8#9o~vY#jo{K#7vIiQBi~7hBXV^6yh9uEAx%tcO7YgsArU@v`b(=ow>Ul#)~+G zlbAR9g|7B!3$L;lFRz2mplY#LN6gp-4A-Mdzn$njmcN8)jKK|0nVi3GVmt;;@5H0! z^BQc}_YJtXfFVY?U+EN3XA?MrmF2nPtTM5K84?77YSdCRDro6f8%4j(RN_DfXCQxJYA01C>hA2 zcR7!4&s2v&o$ez(MP5Gdh;5asb)M)K(K{sMnYZ1r_$jUje3cOg zXEp_qQ-dK|EY=Fzg{x`fWCPss4NMdl$rVT)r)x77*niT( z=iXQD^W#A~g6e%?3J`)1QqCx>c&($*&JdBm9AM=m%yYv_EMC&}b_hTW$wke(07H5M z;&Koy9H>qST&sw=k|9;DGQg^hg$Hd>Zg`?HW?UTxaX=^!F7ce7KAiXhXBUmUTA(PS zO;+a}R`EC$5`$$nat<#^tU4&hCeH5gJLWcO^|{qd4b$lB=_dakqPJ+u3R*15MN`k= zm>R=s=Fl#3ZjvD-AYoAi7zTB9mZ=^LNHDYnxGx+58)&)H_rCESo@hC?&Q@K?# zxKgXfIH@K%s}J|!3|lP^T=j`;gTvNkighkK-w$(JUn+`y9-@E#{q?0 z)TwQH+5fq@;mKv0)^@TBr&<;(@VDlzuubrCcLUYB7U-}Cp1Xh%voshZq}`( z_+Q-efVx?!(uOjp!BYacQ@&d{SSyyA3?p-{b=1+@7X+ysn)tjAGvunK0C2y79%12- zMF{IldnuhFn=`|U|IOO&JZwiFFN!2F{z>>Z)MNd0y?w#J-I4x?&m>4f2kB#11mcHh z1UVcsp8#?5t`K@;K;H~wUjQ3&NK8UZs067FSqzi$dboeH|FvlK(@2a?rMJAr-n;n2 z@>5aGqocSH@zxK&$6{i1Y8lM@E>eIV@VcQ36;cd{wX&ow3$0qxc#$fGxq&i))>$qp}0StdK(3ANV>}y^Fm89YM1}Ah}6U(L5?j4tuv_$j+64 zsMvkTpWIuE?_7fEKvy2U$MBBcWE+L4P5L|!=hx@&MMzS1$7T?({jh6m2((zDa-{W z``eZO=ZXJ3=v%vbtB_oGp%9x~rx^$+ZuCScn?7iukYA;g;Thn>*=m+dri;cM)jkUk zrQubArWmbAF)~R4;p#D;1_3R_M-+Tnygb6?pe>(+sHA>92oBGvynsP^m1J?OttpkG z+#8!Kg)^5k5X7xG`FJ0Oo6Y#%Q#hpCo3vy%MZW(XMKqx7<@%*I`b132o^s4}8PChd zZ?dAISKWZDLV!OiMQ~^1UAzTKB2=0bc`5%<5gJ^k*u&0a+Sijou=!_FG>KgWqnU@? z_X!XiI?tcJ5{yTaLpWICgM*MkOcMJEhxH~(pAWyZh$k{C6-t8i(90y<=~Z-e4=@Zk z&sTS`DOzA-y8)ZQDZTUlUE8-nuL`XnZ93g;v|Kyk6)8Vn8~JsYw68uN`8T|K{?&;V zRx$pwfBotGI+~S4_H*O9vEd43%lEgLKa&PHMJ(;hkKQhc8o@5j)N^e@m7g#$#?U2{ zZwcbPF>?g?O@CRK7L>1P3u5rs;s+%WAEA&Qb+(-i8~AVC9_dkX5EJKaiZGjizW418 zlmjG9H?ugwmfe^4mRxd|8hpIAW$i_*yC2Ea2#R4o$!u}skb}->>uS{&Kpqn*2Sb$4 zXKd_Se(J!?(lCVjcXE%~SnBw#5)!72vNgG>5Xl=NC59N0w&hr~dB&9c=?Lw%q>tCr zRrjB%sCL1jwDlu0@=Y|xH1IEZ6K+ZF&(D%H}(5lkc7eW z7M9Cope@!Q`p6^%PQt0Ka z{M%4~u*r{6$p3NM=lb^99j~A!)%C^sX)}|Z^q);n`lLU=FN*rVC*Yr#yNtJ!K7J#1 zR!@wg=bhY_50rH_ z+eZ)SHxEx>HjjL0eJbQ;hW*sRm%Jd+D7vdZNfLwplbJmAD#M+k-_`5tzN`?9^T7PT z^T!@`4Q_4}4KxyIt-2f+d^fOjTFYmr{PY+WVSNOaRk=sbtyW-%06|Jp{(4&#W+yG0q1D2yxeA~sg ziY4<#oG>A!a`I`3V;HFADi9}L{y_TotadOgh2bY+5^{u+n24>`w9<5!Iq|*omA^LB zMUQIcywn2kgd(!geX&E+u2zh8kj7->SvY}106yo(VwSW(?UIOc{YZ)pJ*CZA-x!xF zmTZwJU35EQ=VC&yt12<3ohn>nC(4CJ@G&LkC(O7z3*_#qo*-P(&R5G{TvlL(9K0J) zGWP-OL6CXni(%<+r%#QLv}&PWM+Bp&uUg(v=nJrNYW59#Buic-MjG*i2jv;ieKjG( zij+J_UyX;VjZ0WCDyGeiN4|MYtt#+F;ddJW5&QOXoWXbwbw6E`&IIpJiUG z4$}jNEQ$cMqD<18?*5mz4skeq@97is`uIRVs$lf%4dxrKxfvAC4jrB$>?2HOK60sU zZ%)l}lP!K90+#V#N&TM?azlo&d^Saa-!SnN{8B|zZr*}`rPrXZqLv3aXr(cfiphLO zrUI`Y3O_#WIjzvfF|XyldgH<(jB#b(O}5nhV1ivMj`j^Vq!}vtnPJ)sRHo51EuF1p z+l(;xau5^v%CW~R`obr&1v5{1p$as0*Yj7twF4byYPviEaW7pVyWDdgycK|&wwZ>^(v*c!vQe^~o_um(GPsIu(NSD5a<^lD z1akSn7RF8CcmKd3p2lWjFVVk_fC;g%D)ptL#xwW(Ykk6^^G){TKc2gr{4!)=c^+%N4*%MKn7D=*&pUJ|A{02yAn65^V*MC02`&bGp4{7ctp^I z7`{<9Z)Q)ZEB;&y6cD5vc>3)x3pcNSp%(j@Lx*~B9?dLIuaa|CrtThyyD=c-#-aa< z4KCb-CSL>A$fB3e%Z=pz4(YS4%9$d+c~=ui(mpur)4uk@=lXDK?`CXZ3a@97{8j&M zI-vh9uxCIEvVH67QNPsuFzyRuGafOC1R3i04Y4Nht_x{QK*8>uN-DA4)cBA)){$0&a2)JBreW_!U(a8I9Z1n zQQwmxgRmGtz=Mhf%Cx}!K$Sn^a*xYZPx=MqoQPoX4@ZP`LCB9Y;vdc=D{o>~G?oOh zZnQvHG7R8V?joC+LR0IxBW;9jMIEB7dSwNBAY-*qyHER+%SXZ%nt(%i!#UNyrcYgs zF2?0x5C=&%|3Q2>t5<+tHye>YsROBsZM-m}LW)J9ua6cDK_Xo!A8Skc2*D)~l2HUD z^1ZZBEs&4|g=P|crD>MXpzx_`N3zCL$NmUC*s%yA0<58qVyf=eVuG-0=vY#IM}COc z1YeL0B8-joWNX7RVa!FPw&TpD3M4|7vc6MQQj!2v)k@YicV_uvi8x4PU)o;^jJO*1 z*aR&;P5XM9##V8h?W5%E&Ba6k{{+plwNwHp2-0J7Q7U6#7b$hE70|0_rg&$nW zxQmn1z=D*;Ghn2K4OvBIAd*M{X$h|;0^ZefXU=1V)vW8sEK{Ej4D2o?Z!KAz&(zN; zTbEyTqY~RTdQW5Z`)QvJpO7fapo;4UU*xK^D?YawRdWrvHe7_gNxdTbM!2m=9y5-; zf#YpcTlPT*0E*SM9H4Nb#PF3pAR!N)PnDT$zPuFJkfS`tfnCVS#6rln5AZ?c%oGHo zcHk0P>*V(`G?Y0sADvwivuzPLIvY?w2WMh)d; z@CUD?BsPOTy~3poYD#5CkP+^nqo9v{RaJ2lOjC>Pu*7!bq&zT}%jaHWy-SdFL9v3N zc*p3CmNJ&Zxj!@}(<$7fPre%q+c^G1U_s^foJd;(2z0kbNZ&@vMHx(4k<7rsz2tT- zqjh{vgQjH%Ntgi^Zb6~2%fQn}`QT7w{%x$85C&EU{Ypi(#pq1d&_+Zoi?Y#U@I1m$ z6x3R4rNUP2`p_4oj&+f7q6V;nKG?9^eyN4ZKa|V{18HdL`hPL)MmbmHNv1xRs9{G7E|eDpRmhpv?D8z(tcw5 z^h;ZUI+t`{$sNM6d*1$hRx>34h1D?8-pOyq@-KE6Ct3K3Jg6#?c9ukq+4P8lxiz|i zmv*Z|KUIb+w7NCfH={2?ttJGLRag z_35y-Xo1?;k*Z;>lw>(Tf!>11NwX-APAUxKsSskwZGV_tBynT$(yi7{+qe&>w)fR( zsdsW*r>KT6L63RLEy%;!TK|Cba~G0-p_I?m8)h#A(JxzfZmoCw_lg^4URPN`GuyW) zNg{coWY{DAQ6eqgB=QMZKnQ}WVJ>iNl3yh1UzfV^o49au07$kr=%;#ob$4RXj2757 zrAV~7M{Ligw({pAk0ci9HG0nU;7bs}b^zj-QfY69Dvxf!Cm6cgiD@m4P?RmH`Kxzm zL^_%_YD;Q@+sId62GZkB^S<7;oPKwjZd?tWTNmxR=o8s|K6|q+0(_zedCl>I=XYzb)xC z9EGtfC5V?{Dw7RCip^~(d0S#mDW3h3PAC}nfw4LZZ-3(UjjaSbd zQn!~Q7r)JJpH~qcB|p*NS(vsUCB#$kr?wIRf{I1CBp)TNAH>Sn)4yoL%4AU#~W4u2)gq`^kS+V>EqlC%&eTU+la= zaxeatMBcFt9NGPQ$!cE4I&|30V>C@KbF31qH)ogB7fqj7?qb`o&e2KfAn5JSmQj+2 zWc>snUq&W!2w}{pR3yebG_0F6d7nBHvp7~RaMj`Ie#+!X4T@4(0k7X=Y;g&A8Ib=Q z-u#1lxUqcqgO#E(iHa`!W*!MewyewPt|!F3a@gaeL-y7s4LUY!cC4RCcLKge(J{TV za?AK?{(r~@zSf21wg8SN+?-ejf7^vScCs4KRSa<}EIMQwO8vV{_*x7;BuQeCMb+(4&32KMp)QtF9NEZWay}*K%aYDg+_03kDh8aKWz8q(f zx+~k~^FG$Zlp>`^Y(8}?@crv7%vKAx1lCB14tW*d=x4htc4@D$#}L|l5ex)XCmrzQ zyHZv}!MAi}xe{fb$SR!OGWVS>9El)bVHeUyNmUb9#2#S?GoD2$F1g9aJhYn<`!CJa z+?ngovG0#xuACoduPJ@x(arll^LPtqxxBolz1qxeCm#SQica5&{1MtlWoRKOhRv{p z*EM+SWrpUHxilH)OgKPMhbef>gd?`E0GN<*Z{Dj3pK!qQ1x~%HmKTG4y z4;rCcZe*p`87Rq^syZi>@)2MeroqNQ2^eAAc!D$X@4nK5bxGNZIz|xht3JX|n~|Gv zusRT*rJvBWOl!Hv_LqQKLJr>r=2swuE#9Pq1n+SVq7Pfc7B~1#`aY21Yhje877WNn zy113$(!|m6MYFWv$=$P_PVu8u3JW+=BH69t71{Pkh;ma^J-`^5HJh)1*Iv0jS(4Iq zCQiUIdtq5k-a)>^^6L#dZ(s%nRk5?AI=xRjtft`GfpcjM4z#OOGc>y6gj?^r5O&U( z?WTxoA}@Q;KpCSo_AQ@+;Gw(xjQr;zwo06kM28gG%|{)%dY$YoCQ_E6Ywa1LjcI5m zvGEt|_gqT{Hb3cEhb6UAfc323K@d`Ez2Hq>)#SR9ltz{0)saq{gHh9HZ)bGcA+L6s zYCvQ0k)t$u1WK8tg(lm7l$v+ztT_J)17wJJzeMf@CY1zL|k2mjx4}2Vep{=8l#`+y(BoBZ(A;(lk)z(U! zhl#3ZYbRc$e3qR zy`R<*ceJef+fjwcr9GE<`F=}~en;El#q5V!%`YS2P(Dh7A5F0+bMJIdvErn96|7_3 z=M)Cz-6cD{l@C-89$uu7o{N&bJ5M+F+J5dC-Q~G_sf-#psD23~d%iJVwD~5n{_6Su zmu)$~5?-+7#kVzh#OUiqw&^xglOJTm#F&CJBTh~tW}qe(Wk%3y(4lyZS9#P}NJK~6 zNL`n%4#3cSM8v?NCkrj3vPJ{4pho%RTonLe!b;RsIJbcxPud$$i8cHy>cn}RBl5E5 zb!!B#jU*Gw6~H{aAJsCt7Y%eE2|;=3sAHbnx*Av+`)|}6%ahwkSI#(&GHHo>#n`GS z-c1P4UOni!Iv<2ho1+X0Mp`oYBwH^Bc7anyiHq82xX6uNRvI~UncnE})_1jg6qo(L z!>C9~w0aK|2K_)-|9D`#JT*Vt5B}cdTxNR@v~cJ|d}Q%IGB$Fj<<*JT5Y%I0MMo%F+?2m~^&?S0Dr}B`2w1J!Hb<)hTYB=ahoFWi@gr^e+W) z+|&N49L?B39y8hpThmW4u4Mnc5MA~X<(Q4eH5RKnFP02*z1*zQ?}~hkDA#U00oRJ8 z0#n=E$>B^ncGIJZdh9pjVs%LD!B+e+y}RLUtL%O-TV;HVqE;1=7t-7#Y`%Y8-~T~0 z17DHFzYmdyT+A%G?DdFiz4Jd1Jy(y8&XN93N{YR%{Cg@*iz_di5- zCP4n#N}T)`@|<<*FZ}H(X$Mg|TKm%`vJgw80|coMQJGnkpY=gOtRZ|ld1VI~eVi;A zZDyK4gI5FLAKqwiBov!~(bL}@f2^DF&J93X@X}ZdWRKU^YG%|91z&7ldn^P{mG|Plh-u9e`TlM7eZTl6=y?Y zsYkX>{Jiyz0c5D=qkIrfusQgJ!01fAg+jmqu3^M5fg(|Zh)e#GW6aJncr+4fNXkkn z2&DC)aIlGC$WmWnN#(E#dZ`0Zf62rJ1qGml+9^sVU=h8`0FYopJXYn>=T5h}5yqQk zi7w|D)WO6kftd6__P3m01^{~&GR1(<0J%lq_bPKf-wbf3>S4pX+MXi)uf@~k%2r?7?{dt&UmLb?d%>(LJ4)wtZy<}24 z#+4ORYN z_<%7L^~V{ho?*`SqJ8lfm!cKt%dh`(SDb&)~xu*Aa9B09ZA%(-R(vEcbnQs9T3edfKjnPs5_RU9~`!Bs`H%-|pIG&0yH!uXP-}#lC6CN6uTmu*~fgF@< zQwD}>Hijcbr0N%@JotIycr11WBGh?798B2$$fSo9fI6rL3cq<d7@9^T3p*K<5km`!rDQK*TN4OsYW!(MZN{hG3B1oP9AtGG4ReK9cY9MTpj-XFy&E$=zuV)6jew!X#m73vt`I5u{VUaZJeIF4joB8gi7X=&B&W=yxlYT6u^*9Q^-I(w*p{A`s(G1FKVamijS%)QR?RQ4tZkG`L_^^{61KVH4^5Liw<9Z`OhVsd-R`F>i>f3?Z|+B zBa(pJ2Opay_yAUvS+3EW&B*zWkAE`$CPzdQlj1>275WRO5${?vQ(u_7>gxWJefmE{ zVvKcqdg_p;4gfi_8bWCAuAg06gtJw?xX!e{8DIx#DysHw7oyhCHC zhIeDL4q_J|KwC|ox1?TvYg>ZmdwdSgK4b;aVVOIB&3`zYq-!6%*zy*p@%+U_Smdyh zr-^UQ#5V97U7qi=z*s`j*a9YP!kO80XtzwVn;k^Wm-kpY>f_s93}7<}f4+%5+D+dv z^>*)^htFwU5cF+}xMAQ?W#B-i3p_lN%~oUevekb6p^)I_i&mS(B#Dm5^u2;YF#%>H zU}$fXBK}Qc#hG};!2NvFj_KQc19gM1KgCSj6W!W6(AQb+`q_u7lHeU@5$`Hi3FkIt z?+GXk!1mL`3k-oG`4<7#S^zQ25YEKJx}70)lIFfobdeRo^lI-C3(X|?E%=m7^swX; zkRp-+3Nnnhv|*eUjZCb_2ZaJw>fwyrpQKn+2ahNm>v}Xj#!(e%NgaL$sb~1n1V6Iu zl-g+XAS8?1(=CHJ@kLc$^B8vq<5`*%_ns<1)mwW#vvvP_G_6mNYQ5%hbFkL%( z?t;VJ&5mpC97j{@_M#*ivZRVkb*#RfkJAo2))q(Tl3{wNWYkQQD%@LrbBo`3#>D%d%86%e@_vA0g$(DtU$VsEssnZKW1lTdZ zoN1riEpHs91kGm?Rv+9|JO#)L7x1zqGn#!(7)j%g;pS8nSARnrDgi?s|E|J6zkeb= zw_XMnxO6bYlmr**qRysxH3 zLXu~Fa7U^xSdb)Zl|Hl7f1vS&18v}--+!szHj(=`EaIQoDx?p-F_ln5&Ry4$rpiSo(U44EWohUtf5y;Plqa zEwO<-v_Rz;V(+IXLl--Db$^7YC5JExlU&g!IZP#s6F&1!b*{J4j-%@?@r9(kInm4bCam*k(?m7;GmTJ zl|~R$B=2#;SuThe{;mnQ&dkInSZhK7s35Fzhj5o^lMf{D&Mq4sId*($_C7e4;38S^ zT_3*f>?CpSTpnt6zoC@57ULUmmYeQuU90xLU4QEBY{?an(`b8!Lio1r-N(?hk(MWP z$G9Z$_z?70^yrV(#C4fbw9B#b3OzG8IB$3Kw@ zwSU@lh*bfsZat)K4Qs1%k7EIu0+i_CBZYcudUThgLO&U@a+#Jbghz96YH4@3f@j>n zL|EuN(S)G$tKk>Z5xUS{W?tUg5V~@e1+@N=rUAGS|3$K`W6zes&7XRz2SY{#oymT7 z)Bwr^QzknQC*NGPCKIA06?O{bN2rGbemnq~zW}bM*^H8g3S+-N3(b^y|PUQ)jq}@oFBvH_Cqh z{p&7z(<|&*IM)s@B)jY$<5U{JjOx7Dy$Z=y-PKWp0S7rR$z(@gXT=kCxqFqEQcL|k zI+F|1qF^sWQ(GxtPc@#T&uo0-L!}Zd0<;A|gf{)^VZ_^Mh-o$Pa`ik3nEto^B3O@b#k}0-e&vit&?8 zsxchIooQ|)S)B)2CNXxp_p7^8S}zj!kC8a2Ev0tpL-vWZX{jvd<4PHh{6^PUOFxaS zI$I$X{*OsDH=5Y0Ic&`_X~&B1IzlWpZLnX;+^nCM{U4`Yyn2xDm2Pdsx>+j?_)-`X zz*5t!Wul?w&7QB-W2f$Q1MO&90#wwdE@HCaW6jcUey;a35j213ARGm5XLKAwPl%{$ z8yZe98l5$DCyE}m<{I3l#1;^`dPp}%wF#5#F|6^RHEtVv#GQP$0NE64mcxqnIDv?h zh!#<@m6arFt>jtUyz4LD4pznZEU@bgjh%KpNFQaoW&TG&{6DDjkM`Xfpc>^CVD}e5 zjZ#~AB=BNK)hx^Un!?XJal)D!@3vmx?d#Wqqk-Tg)A^PSLlNwFZ2Rl~9yIs@?3=WY zh5mfFJRYtF{=c1HzOJsW8oqyJZXI(#5UWXZsQNXZGOTu1ZyEIq6NA>J-1Pp+B9I~a z$=SPH>d$mI(^ls+FM32F>HV_-2ZsVK;eeUg)E}TgNv-}!aM)!YFWQ$sg;c12Dwe8q zJzQlF{-i4~4F_q!hqoeOTRLfPElbvxGbr3YqWd`8k$HHD<5sMm=B0e+ADp(8r*(Fy zBrjG0EgBdamgj!{3yMOW>vsP9hw__vjH)^)gH=#4OjV}-imANy{6r?P~?-N0_CyT4cR0|D(7z&>Yesa+sbL$>3CGcLDw^V0v)>=aon3eUZBhNwubxBoG z!F6oPBdACD8T@$7@k_|ro31RasLLoF3!ZOJ9u>K6Fp?22 zSZN~gUT?mXUhWE)i?qi)4|KXkHaykywmRfws)#U|<-(aqlnWfP(=uItR9v3j3-|a9 zp!EtqMGM58wH}H&4hn+^TauCSk}b}r|Hvm7A&8=l_hGomJa|9)m@>5p?#pS(G&sPZ z7Z>IaDo(aK8PNngr(Y?9X9-vv5?q|5M@TO56|FfYyej{GVNn24hTwTKeDHy6Xh=WM z?F^H-PU%s}$Fkry$hz{e4rILEJCTuIKcNI$u=un&2j5Q)8O7JpHXaL1btJSC&?(C< z_DQsN9hjqhmZD<<$fixjkD~}i)M;L{g>sQ@S!`7-NHw&wk{;W3HzJQNVv?a2JibMh zQvgf`vPM8;Y3Z9^6A|TNG3+4e0uxO?uZPm4S(hz9Bx#!oq17^wPOBI9@f7t8?#i*~{G}Qe@AmYQs~D3d#q?w;{U=oc)rN(@8KH>P|EVPXWRGnlTVIepUDQ+ymw1KlX>vRnDg7o zlfgc!A2Xow^}Q#dKFfd(J#wWAX4C?xBa3w&_zMwrZpYb8nK2X(!WVehc1x2*mu5=$ zN+~>SGH*&7od#Wx*{bB;ersr|tg-oXn8Sb$Ea?^HZ#IBCM`tFjD z9A`j&VO;?!=r{gAW+apQLo7;5$7_qcVV!*3H4^RQj>0F}xWa|3hJ_p(G5DBmh$iu| z#g7l*&*^T-Q`)57s>R*h()OliKUP2VG9p*<^W**bB67>SxA7SVjEf6OJZj!rK~GOAcY{rf>SXs_YjFj{1#$vw}yfdBc)KoGJRDOA1L_$JoOo`*||q zK#&esLW)TNF=U2-9O)zpGD{u=byukY9CBbucaMJ5%Y0S7m$;Qw{P!dUWS3ucIcT@V zXr|m06T)%u@om#3yr8V0f~>r=$%$4E|5JJOgUYaB#g0+nN|NJhEX8dvM%#3Rn1Gr5 zJdIT*)5fvPJo|zt;Tujz{Q%CzsMyJxw?Qj7QcfyzTP6~5QcWs>19}DUR&}RW_GOd0 z`~G8!?iO#x4+8pbSyy`Yd9=!vuA|U+h@P-NYAZQuMhRfA2TFA_Q+LD6UXS<2@gu| z$w?B}h_tVX0YYgyr+foYh5Keq+q6NVUC?09shn?7?;$lQbaNK^!@o7F>@tHKMyUT& zEAjt?segi8Gy=^#9PF=MTgG<@nKSZR^s{Adjg}#zHf36gb@RLz%oVL(LWALvtR(ZY zyS2LiNwfV=kn{yTc`{+;BeiwvReeHOv{#Ke9B|zibmZ(F021gjvGJ?JMog=xF^2GS zLuLKR4^cr@^ve8tuShRoKMT=z%aW+q-^U|iI|*^vI&bSR#W)tea$H{ZGHfn%U$54> z?d7=|>~;MlPR+ex(e_*_uy1DSz)SBX8KoI4)nG!QPui;b91ltK353G2?B~uQPh#cn zj3}=dzqev~QsKCMs{^O8!KW%bEoywhtY1nNCq0B9BPoz_{)&!7@BqTjhsjo)fpPS- z^7xT-t6@UD-Ixbtfv*JG@NOfx9dyUn_|Igq`|^EjPJ&Tu3;4dyxc1A~jVG0?*}HlW zY}YCsnXxqUZ)z}Dxy|1ob+!8XzDux%8s$bYDemvvGYWO@c9|4BOwPCXX+N7iFj`UXYdRk=eyIkBZV)70c?|Cy`UGUCY6J`ffOE zk<$H&7;}{=6V7@c8*O(gQi3W}y4giN1!G(qiQE?SPkDBzOqJx@{9#v z4MS}6a3uOaTO~y9tl;ucZInMz{t9OhShki$@UkYmPhGElEJzaGM!qHtL#k@QEPU@>0q7MbF*x-#C+dtlzJ zylCv;oaz^}-UtNNX@{$(78MPeCW7q$2h$ ztnJ^y>p!e&`@`mO7h?}cg!N76;^Cz2a7zs{=@{2NuX`CfCS(?@XZrb7o*qE(*Re#F zKEG|hHOK2|Fo4bXSpMHj-4*kau-BB?K6Jc}1B5;aA3nQNw7F7=RnS<*{C`Y+Wl$Rd z*DjReQi{6=2=4A4+#QN*1I68?g`y!?ad$25PJ!YScW7}d?l*nk@80{(WHOoj*x60? z?2$)~Mv8t|2wgG(pcUvBqMVTLKdgu=LhjkTJEql^<9w*VIxpH6WyKCeTkT80%{n0JlE6%GbtaX)qf zVb8m|UTlyF2P(a6+UG==9El|+$qWY$JH#J9O7lAOMLjA<`vH@PBF$+7vT8P0)tMrh z*T7e$>58oB1ce?ofqh3)Q#7>+GuWCYElFe497BjEu;g{MI8@G3WqF%R6u1gK}ud-lA{!vYE{3j#-_0> zT=5MOj*+f1aj5`ML!U?J;sqB~=`GCg1fn>(GomRekH#6LESbol0V;r-{jgrXfwp6BbX3v^$y3c<8nT?KUh*i8gt3gS-bbc2kX08mxg zDwscZ`AYFRi)$@cBi5|44P=hVm5w4?H5w+X9H>&%r>G&L9T%ZY_&NOtE)o}3bmgzN zNkS|X*-!5Q#Cyg^*g>?19U~x3L}Dr!ekF8?`^*utUGEs_5v$vPi8c#!V{IttXILgNOg)k=u^fP)9HrwUI$wQkl5uMkC=JHlW~uN*eetmU9m|2tX! z7f!d85-p3kl-}zK{;rc(LRaoffjPmVe8&=~ITHStu<9j}2wk1+=a<2Kmv45=7AP|; z>reA7-v8@3J9@`8upuz%*Of-_N*J`069(Zt%yQi58i2et@dRA)qJ05S?0gtHSce?r zAuf26JSJN*h7VFN9IP8t<<2~6X5UvW;S8l|`u?Ttw!+yjjzF=54lvwgk9E}5-o5H1 z*5UiS)=n9Cx3xAtJ|2Pj$Ro#GQOJwos=!6BE+0QAp%HY3s8LR)%a{!qhHUqF`pos9 z(iT$YpmXo`k6jFNhEJ4-EVXEd?W1V!2si+L$Ia*h8}ltqW*bx4JeY9&|OE*+@{bahwJ%Qmd|ov*Y^C3;p?=*rQ-$u%z@Xw zsWIj#ZxYcO5l-vyX9*3eLs=dCG4oAdEs^$erxT_0v^V3{@Oc0bW$z6tNfvO^bkEC@g4clFgo4&vL+QIBWx4soGe`^9ao_gMHRJ2srK_#&3U8D5&nMX zLy?1+QKRhYr2eTKGd7~knCNLm@SL~z!=j*78A*`+%{SAaJbvT8n;lw}qx}cReCU0V z2ng*n!LE7HxzkGDxL2qDZNKd0>7$D6(O;v{S$-(DGZfF6&CvVt?AhGwvuXM467kOM z#o6md|BZLw#?*`ReyvWt@Ra<{qwuJ!QUZyLym%A+q-#Qm9Q4^7k0`nD_?fisN3|Cp zOV_27y7$)V`1hx0GhIJI-wRSXWMjKk`6zrXoMRvkZEB& z%&q)(AyfCV%EJ7Xg`Eq}@Np-+jI5y($&9UBNoFb|sbs_$=E){+Cif;XR$C_b#OX_= zq`$b)-&)!v_%OR51=?+@d1XgfiWB+63M&Y=f_~;cBV8Gw{sUU(YF@KRzL!X#s81Fq0|T>W5c2C8KnJRcZbc=iz`ak8n|R)=BZ&G3Ya2@haG&y@&CHiIY>K5z9EqB+}## z`##>8D^^A$i-Jb2Jea%v#U9eM%_||=3w)oi9Xz}|e0-iC#b3@(<6rjU10Tk60)^~E z%V!VfDC35!psty5cG{sBk|3s}j$Hb%K~f`Zy&poFpFEG>$k2cIb*~qb371KJiDG~{ zlq)soB5)Eehk&jkSoIv=>8BJfPiB$RTuG|%x&ln3P&v_MfAcvjykYsBXg;? zZknob!3Ji}^=V8dUzheHk>w_#o`%l6QL=Db(g(=G{#S82T#l$fLkge+&}Z5O@iB%_1TMkEVo^pw{k2BtiR} zPd2FHs&Lxn40IS>*a64;54yyo5Gt0aZOrG$wZPn*wt~q@oWVp0=7d+d#=PpQ{A=K( zT(Ap40bgCuSf8Y0<4_zL7~#cP%}Zv_6Ay{^%+`KFesPg~+I87CouFrrkQeI-nq&y zrx6t`{N)+FF)i?)k%JMhWQOg zM@#NBWX1ISFhKK6>!JCZtfCyPJ#r&lKS=CT z%RpH6TbFX6&o^4*`cU~YZ2;4O^#CI5r&X7S+*n|)ozfY`P7&da zYDn1{WJDPx2xv!nLS??$6r`rnYNf6d>mn&zS!jYLf`7B$s}WkA*A{xgSn)*Tfj!=qX>aY9@RE#|G}eT4B;I?P+|vio5^G@o-?=hYKc~`6C|FZ zmYO8je{{Yw#iVzD7kWsmSkzAC%_^`Lh6vj=f8%n>jak#8KP550!CYoN1-hL*AXWPz zQW^rq)X85Z_JUA69-2xQ`iVa?&Yvk%E=DPrbHvrkzMEnk=<@h_mIu9R?){3p?U38= zuBxVIfU0(NL8GSOJv1vr?#0wHuBs6Aw@;C&^%@C|7PeSTOKe0)x4MbDfxben^-S(h zq(TLT%x_K%2Q3$=N_TH~N{3bR*1XU4YiXt5@L}NJ=_O#M&^XQAO;L2FF#uY0mF}@< z)V|%%@$J{e+W>C{aH-z0F-P`cI#%9#GtJo1a@(33?saLC6+c_V8>PGsx(ru)yC4o5 zH?CSF%TlL|#vE~Zd2bbM=d@~SxQM4u9T~NMk-znwr@Femw7ji(YwR%&{~;S8bTB}^ zNG<4xqCDvDq!oLFg9eXpPu(3s&0x=ekTYqM^1huvd`Yk}FxydlG7b?o>>pD4Zv9KD z^5ZN~jQq-rwm1|rcsRASJMv^ioTi1S(tCe7+MRb(DM=B?bM;{RdJ0 zYmvUR7fzwc_$W1NacnxB&JF)oln-Y72Gxq-zbT8qmKaL2(d6YqOQ$U?k;z4PId71L%9!R1s#$XH5)ow#+c4Fvll}3uI{VB z?X=`Fgkkm{$%LPD{*Xvq z!)BzsCozN!Z;>@dafYB!C$a52Oypr`&bVd5aNeZzw-~(J2vFjM<4-_^>F#LmFY+EL zWp#6+3eid!OZMg4<*1fyl{Q@;ORi3^zK%bo**LQ zl7`M)>-4J8VrlTl{eE5L|hm7ZKuQ2DexnNAz#%=Dqt@408m@_h6Zi z+7MUxHY=6~4~B>8-kaQ9_*f?eO%XeFbdO>)+)vcEN`Nx1MT$QJg3Ax?Y|1gbY-_{2 zZPz{t5v6d^Lxp6Hac@+LEw&zd9_dPw<=g`30tl8AADFOTk!EuRG>g|g(AH}V@9f({ zH^6#-Sp)plHX3A5m$&0?&JNmcoJswI(mv8A($MQdEJV=2DanN~y;zwLZ5r>zw}5_; zI;C2>y^8y=O1fLulaIx?KV<&CN0el83I52su=_JEHdu>!OXm81k@s+s$tPF2)Bde# zbMOLrS_VGYuZPt2!HERuoFTXFiFp!6tFT7sAyoW1^qjWtP4qMQQd*wQn-SdtfRcUe zd@5c$yu44JB*v8|QeG*F`>wOlgLJO^(ksSYcj%3lzgcGYk5I|C8VZ$EigU|829l%S zfPS(E`x=@uUWPtx&@pw~7j!v~-W2hc4)IivS7Xxp8lm%baBK6f6EO@R zL439ny5Atp77{sJ5UYwo(Bd`+MF{J0%6=%z{H1-5(MHu9|0$T}*!Z(4y>@K=B^ZjjJlL*; zp|QJfV?wECHLs#MwhEtGt^7@XKYfRimRWuKE+(xzDxF~q&Ch;Ths&A7k#JdYNz@uR zJZq5nkk*};P4#<3BjVd!C%Q!UTGxG}AF?guaSgP$R?JP-uKOu5+qG9HTh=r>) zzlYQ8YA>?k_i|-ML~Gj|H!el*h2$v-w}!htr5}5wc#>A!jM3B}4$B@39=fu}nnxGf zcNm;(2@Dg&Ig4S#+ynAMf;@RR41dGVnUE@~2dnLKb!M29ruU)>8Vc#V=k&bGA$V4g zSbH0;Wp^;3@T;Spc|cH_Hw6c+u_P3=GOf-4aWIG^EY#9W+=`QVXSWeTq-Zls--zTV z)bxf7e&?Pg9}Z-H^LL){C`>zFTPZ2+=A;w58yLytUgax2L@SM3!DS zZt$h`fh4ODkJ`gBFh)r3ESJtaDGF@qu&O|pDEbi?;{;6hMPpL4K0n6-@5Jf z7GhLXOK^Snam|_0P7Yu8S8_VMUR(JpgqlqMlHpVew(1p4*%VP;P1H`4Y6fUE=G_>Ac;KmP*(Rz<9LcI+W{g= zFY>OJpfn#NIb68a{@Ylr!NOg)KFa4;5*5XBR>!UMovfA8gLROlp23Jl;60$9AXjkB%_PV!&~{8B>~HD zn%Cgj%C@GpDN9oaa9lpU05F;I~kgZ zvHtcmH>1#oFv&~4w=wzp`2?3>4FWnBdq}t|vykW_EQLDHNK5W63W~|p>TqQ;B$dZK zMvgZ~xelC=PS&Em1!A~<=A$#v8ankudoPB zvDv2B#7RvyVNHz`>T$aRS%y89H3NaOJx(h?#c>n%!OzJ!&g>)?Hy-HDK|!qRPsxqm zALcF|(_a2``??snrM{j&frliX^F;18p5mK_PeNV%-M&gWLU%tjh2d4e4@SsvA{j!T z7h0@FG@XuqrZ!s7pxYRTrHveNgTVBYvRhK4XX0NzA=X3PAGsRQAvb{1zG?O)pNKXp zHg^)ll)*`LCwRqC^m|&tn`U?X_q{xG;E+|>aZC4yqfCsI}4W@Q^tB*!2`O9 zmrJAJ;IOAvHPy?C)->%fF4(OwW6vaP&+Z}6f(?p!QfWp(3w9{1eUFOy(ByK6@n5-@ zw}tROlLqd~;G9VbxQ_i+xHOD_CmMKxoXY@z#Ix{=%Tbi>CuamDt%f4Z#~BgnJN&~p zD`U<*$oIs*YaNaXAlfJ$b?BSp#ioH1mjMp0a~4a8)G0Qu*sz<(UPJf?h%w zfeQfg{DJs3k^;?T16G{M={)8qh6dFqNLN>bOM=ultvu{{B23wT-YVgj`HNCgAtgNQ z>C%tH=?K7PY@Q`L-9jImGamJ7OxHZYe$uw4S`QKjqM|F0;*Yc9ez)v_k1Kp+-8zbE zTJ;z8$Vx=g8{Mgfq=4RBZ|aGWHyTVfS+pP!P}p~Zw}_RzbH;Gxu1g`I{vxpBTx`b{ zPvYV-<=LE$r`<$0yH;}JnPdCg-{w>*Aw^c}roa9J)GwcugFl^Mo#xgY%Tqe<0#24&3eF?|G zAnaPqOU%3e{Ahg7UIuj+-aRh_TL%N(tG@4;pvY0J-d^ykKxgXdY^HkvE_#peZ(_}WFtC5cK zD16pUp*=bKnTajtw4sQ}wpkS4 z`S)Ek3L55;AM;*hN?FUNoWGQ5{6<1y~=i z-CZYYKjy6V76>b2|L+($&_h0uEA#T&%)Uzg3ICE{VA41Ua`9`Sd-!l$YhK90pS%g3 zqSpE@V&jZ`i9TQ)OtiBzx>bomROQ0e`yOr5O4rZ)rAj->x8eNdrw)|TPbfyFY%<14 z4s1z{(d<7NSM}yK7A*aH_tpJ>uazv3u(yR}eLyvY7I0Hj(o;dShGLqd~xGLRk1V!{nfh6xG!-oPB$G!kIdiL^R9w%4X~e^!mM*Pd(c!_?+AWWJE2E_oW*@+-0yGl}y=Cl0#iNiEZESmu@Ldo29z-M|iLg zJJ*qEV~#Qi0I3+6fQji2QKcxFy{inNP;Q4=q%lYV65_T@%1^*(2X0mqgJXBRQJ97N zM!ih*#-PmYJ(7syJV~D=M>L2!^ef$t);0#HykJKcf4F>y=PK{7dqM+WMooOMbp)$} zN-@1I??FU0!M61VnYpNF4uD!cK$H=+onRmUJ%vd25$$wYZq>t+d60Z5*e~Ie(%!nF zk}jOQeha*sVcR>8CpSo8i zuIw`(c%T3g8 zlq~n(W&ppk(3wh&nCgv|o9(5Cn};Oi$@LGzzor}-?=>}H6nsD{EEKYUTDn;=CdP%( z3O>-BfwVusYm6>sv~ZNwc4$~fX?_!`XNXnV?{E|SH)Hxw8y~2%_uRX~F37P%9ooyD z)%}USTQDKIPFwCA_4kev<3`Mfi4@l4SoVv~*Hzd1RlWm9r~a-+t{1n_@uV*2n6d@* zSad%&mN#f{lQeRJXZ|lSeZHKBBTr)CvV=SC*xI&;5N-7=MKU124r)&HB3``8tlRpQ z+c8;l*uN9Fg9H#cWr*dx-txx7qx9~`zr8F9>hchj%T6*rc8avJYO-H59)o(=Za|aCmrs^@E2Q zmPJzZ+X-=@FtbgS1}u2pR&>SFm2pw%ew;s2fYc9Hl?Ky-47lcCv~u;Z*sg{;{Z5^4 z1o|&Fo#z2%U!Dc$>L^LXqi)?#`JY=N5*on3_y=<${?9vGhYt^bN*ifGg96>A9H8jl zJDMqsl61(N0}o79=~tlu7o0-(TWav)r@*UEL7ba(Ou{i6RI;gMby7l2JNlyXdj&lr zFvb_I5mvQje#n*8xebw37eiAP(lnh$E5MJu%S6kBk$^ztM%a%O19_<_WD(6sgRye8 zD=X8B0WPzQ7m}b0$>m*yZ?qr>MjU1krIJHExPR%hf7 zsn4OGu-sa62$#wkOlW-R(yyXRo#LUipLL^z%U2ed5&0bv7Zm$oTD+9QE3ndvZnTlm zt*W68@F2NpU$I@i`!>%hh>gaIPU%>NUP1Qn0Bh%jBWg?0<1*yA)sC*ReTcr*A=!BL zxrTI^Le34I);R4w#u6LIx&Sv~?BpZk+lTjfbkP!?h%DN}V~62}~n7l?^mL zD}&7a6f41k*NzUn4n#y~fD8=V;P!!|^%1%zF!LDdT6V`@^ zA=A8fC45n>P5lG0m9Q0A#hkhxNSWT6bGs zgZW$PGEG;zcMrqGZ0&;@66I)VnbjY7PPs{=MqA6ry>B-s7SEsM%GS!y&iFDK0pbQY zP=K_7Ce%nL==<6LQL`rKP|lGSrdKMTEi2kPIM9xD5iy(T%cPZ283u~!Vmci7QI@d% zmeFGYITT&D6nO=8w0D!*iHkjEnL+TvE(UXn<@Dau`t$)o_O!b0wW-16upNXNj3C*h zbZt2f#z`|>%V=o+Op9+(35NojI(ETdG1cqR{5IU5?Jh5EX?TTHwboS^(bA*pM2}_5 zm|q}d$=e|a#JPn^_>wm-oUv~?nSgOwIFF43?Nxm+DtmV2vvB?H8RNT+cSswUa$RSJ zEoY=HXCy8g3N6htXj@^~jX&yPK{I%_OXLXA#?=%iIWG0~gFxYaM(6_;H@+}0>)b<3(O*Yr`#tZfpxyEwkJxzZgOdH;bj|zwHMFr{1SU#G zW<{TLYa`oOTA6z$Z6Pz+PX9bhLLYB9**UcamZe-e>!TI`PO3*o<(|mh|1*^HXWA2S zz6hC2od`JSx=$r4eH(w}N1>&#)itwVq*k(HI?L5}_OddVD);bIw9Il5!)IT%?#~Kd zS~w2#k8AsDwsHV1jP@v$wZhjaUMypRH75|VYp7W^k(IVnYS%4ih@Af0Ky;>RS0C|&Z?*CNi@K!0^emi?1v-{1h~FA?dsr;t zNFExEqr)y>9sD<`n2 z!tX)ZGCF*aW^+FW-(!ECj^fRgf7sbeh5fqh?&og$4hu|PSO-&) z5*)WvdbNu>RDq3i015%v%CLgBi-^Z}KmnqtsJYx@2y;`VHbk{E$aG*D4wv;i#{&Qv zG5P;A=KD)Hq5Ne1gH+oz_nl28x=8*9hF*-K=F+)zUZ|Ge4M%dJ6y9KvT_1Lpa^8)L zIrD>_5ui*iO1kD#v;E+j4zts~k@Nme&VFXj{_chb`uQx??imSUNp;9<`RCa|G}S7_ zQ7)HVVW(E<-n+sn`Z|4WjY%UgNHbp@g2v5^?{%@d7+7fuirBLzG{R{50fsN=B`OLn z(19$&#I)`B9rt0pPpZKk#kO6=F>Tby=igX%8b225B#{^?b`I2O#hLNy|9P~n-bv(1 zoU=X_F5YlzBUrvhBjNf0UXl7fzi4_hJ}6sbT0?SQu5-Q&oPYy+*=4}{a7{6>&R>1B zbSK93z<* z53jd!PYQbZ?q!l5b+*)TtNoeC5~kIyY_xcue^{-vs zt$n+2{3+elgSYTOvE!6SWdVD?+%=c4)6+fZ9UTZl2dYP2|8>_N@L)1)C>n!bL<9Wy zC7*O5X-4sg=ZeX+=bAEe<6Io5+hqPZMgNWZ1{-cnP&@(aQS85|ai6I*kj zMMT&gWsErbPP~Vh?}ur+DcZa3tJ0IkT%;;)p*iyB*%{uGzcd?csbjyE?$eP$gZF3K zPsf>RGf~t@bHA#q$F6;~tPS{5F(6V%pe?(I5Yj;kC!A()<{^BN1gI_KaJgyFq!XbC zA7Oxqsu3HNE_v6(vA`;-vtd_}Q%B-mcCIc7JrEE7AOm8E6-@lLoPL<<4JVJo&o{i! zUxVfAkc?%!yZNk;qK=++#F&%!E7a~8q;v5<+n=T&Pwi09{1!7*v>;z{Q&^JH@jLnq zr*d03Qn<(|JlBDpYRl~}8U1vryj^njB>!DA=xSFx@CyBP^q#?q^Gh$zGNNY^olgOm z1%xEDP#w>XQVAK6c;*13SfTq}t!?!ETDl)<%3(KxoWENrJQ7d$SF-u9Z`iVb{{Eih zPbXevBy(wOtN>)+cV<`J`@3Dz?c)o0!2HHBDem3x_C)xyTeiRqetLQ*df1@$iSxPU zIZ~*-qmKd142zlhD>raDj!vS`azpuIgN17JOZce_=|{Ion%~#8?nB?>hNOFH6~{>M zdyD!2R96+!%D>SP-=E0 zCHBzCUNY3y4^lVK3Ot|p4J^6q9*^~2_Oj$Eu<&?anVDtIszPAlhQIf zzZbiRH#a%p?)ogi`LI#SBWuygL1Q7-E<@Md1lgqI=OrtzFAG#NU#og=FN0ut{$;$m z2`oE1C1~ix>JBE&-dp%)^8x*j=*#nI<|z5=_n<4aZ=-x%V-I3UbFPVd#Js0&4FS|I zjX}>F?|l-#hEfp9Qb*75T*Z&&^Hh~H&F*KWO$*+42<8aKKdfe#u%K0rYCg?7KlAzj zWnw)|8Rni4yI>bs%44Qqv&#r-vX+j)%eKyzZcpLr7I|*0!re7td5J zMQ%8`5cl=z!(GkK%Sttno`peuSEfM#s4ex{$=rkXnU%2nq2kp`fb!^PIn$u4;P8nj zy8o%ZyxiztB5VXKvUgWUCK>O}YODL3^Te@Gm3W`$ooegzjIr zUAJKuLXI!sA6`qmjR!_M-#^VxJpzv7KWY7Oo@X!@AAA4|z2@Y>w^wT^0&1RAH1XQKuYR{JnlVj7$QCdtH8I0LzW9xO$c>FDYy?`I3;Qe@=`0N}9Kj8nF;0r}yXP`pL`J zgy2YZCgZz7#MeRNOo*!g(>m>?e&}rKRhxm%7Jt`GW0?S)^4&A=CJ7b*UUKsp;7USi z7wefVpIJUY6$Adk9ipfWecAUW9mIp@(BmUx`(($trA9VD+fVl#+OuBC3CaBc2NZ)V zs_jg_)$gCE9%gw;@Gz1C$Y545v%!K_QFNZFPcz2GtazLh$Z?ey1FZ5ogm=jEbt>4mFT;UUqHh5L`5PlRA>UZ`=4q|L&oEW@7_G1X@|Yh;+QvZ zGT)l>#%kg+rE_D(x;<@@nBSo3Ntx&@DB6r}cyhyg;C7`ejhfuoo-FMyJX)XPq_w=> zr0~iSWGGt1=UKnO{3b!DH!6~R~7oqnj65DgoE`7(Wk>0HF-owRbU@!zEp=U!spfD_n$_;0|pFCrLCQRwF3ff zHceg>`kQ-#O9&dP4<5no)kd?9HHA>->9E2#N!kV}6m-015J-CFtYB(rh!&NQYjA@{ zsPA_cEH8|04CK|&rB&b<6vsCy0? zmp>K&aKsU;Q$!O;ZH;N#a#+GD*v$9EPd=yn zm(&M)yvh9}vE*EFh$ctIoEvaUF?5k6y5|{A6+-8%LZ+Y?&8dKvxKxN1=0ISFA>@2F zz+Kf$-p4l>c)U*$f7qXB(qt`D!23~{b-s!ql_%$e!1>O%Keurd{ARK3?PA@}jl>ON zCYN$obBiV`2jvG>ppq+Eu6f6~MT2riWKCIlW6Mf$B5f=V5~2p8Hw8~wQ<4k$-|wb5 zXEzg>WjRd@-O#9bU*N+`(CVta9>^mEv>R}rg=)`a&W9|AA{48%Xh(|&i34~iNSGpQ zEtHhGL0v7ad2n;7R$S+kW#=Ma?$$aCOi|h zx#!!^{7#GH>SdLuyUS;vP+!|$Rqan!Up*pu7!|Gp(|;DGx&pdg_NyW^w z_FAjU6+c|ID?dj9*Poq!E{Qfje7o&nsokVT?I=3KlK1V%qv$-bwbEFni#OzZFj{pg zXD`9Q9Nw_4lpb!=FezYnF1L-1+tswdfNrlN6t;8^I%nWCoZUWUmFL11IrNPk!W-c) zAGcl=mA90a0G;-GGy;zh8P-X4geWjNrZ1gwjCdA*c_p90-8ftOKeu?L`nCZ-{jP8Q zLVo9G!C#s7?0Oiufj8in~RRuGT9EvlNVyTq+3OF$R*IWNt4CIj~4`!HCy zSD4eBCbR#!G)Vc4FW8){+jhKMZIz2bzNdJk%*-}W-Aiv6Etz+7MDWl>q1Meh6Xt#n zCp>3PcxA={M|(|y(5k!(kF6ZXD7b^;9bos~qe=)Jl7G}XT4;5SyO({sx@>VnUtS)Q zUMQjv-7AHWusm~cU)*#h7(D?-EtM_`*qZ(2bee0T#bSfD>CPDFddyvX#jD=HCU0F_TF{TPe;75@%2m?LM;z>AI+)36hb8@KnPGu}g|FyV#wsc+Q zcqidV&1<3RR>ZLxg*CFoNmVugYYVH>x^13d6%%dnw~sBDWmov$IHS3oYjd~6aZ8D* zbPC`>tt6Q!ek+F9VEe~4uTO3y1+id%q5_H@8bR)dXnMv8IE$I*z`)^<2f8RS= z7YAdXvT4sQoBT32xlU`8+?Ogad~$yyoBEkZ?bOMnx@>O`? zq?o=vUo&mv=qcUY)0_*CnxBC8`NqVonvWCtaK5#f))PR~^UzvSsfG(_ec&Ou#v2S; zua0Xu*jWj7*+l9Z#=Fxu{oZ7{H%s=$l3ZvbZy?a4&;xfeHtEr&Z*O`bQ*Z73+QOPVo-CWA73F4jS(w3()=;U4tFX0Ew_Pj!9>VIjqp99A{|RRt}0xHdSYE$Zp$ zd>0w}Ns`~lcHkka6e}%Tns|v`^v86iaBO7oWoX$wP1Hx^2ghism(R__=QvmTfNK5S z$@XwRID~lDDS*U8R8fCqm+e}a(@Ik;Jm|OVgtBJ4oeAg7tv_du1!VM6M7YRJ_z$pw zs*EwH!S<)nPnzrU<0Y-Sn6jz9Sn=WAq>Vnb3&ubkb*7OMot~qpz|I1QW4O0ve`ukyYiWT>y#n>Y4kU!#fo8W`{?FSw8}TRf z`^DmbWIsy@>%lo+EinKVw+BLg|9AOVNzDRL`xr5F5ziiD)a7B~<3}!7Qw@abkst0A zu8CKTo=UfYjDUlDNwv&wi+}EeL5g2wQd4)qq+r8g9VMCsDClj;y_y^e8k--ZD6C}T zi4UXxg;2;5%9-C17`FiqVQeFf!(aQXYrn{grn{nQoTAx9r6V~+!lNmbp|qqx!~t4s z6cl56-vcem=PbIpYm=P23cYUA9RnAvI~q3XqLC-zIX~4)+C}3F5bIii4{`h&|lrOGJFzixfL#Z`LQ&avAP7A=LpyHfEapC{@qh ze;IMA_RG`vkA!BC0xU6~Mgl)Zp4Ep}|Fp7{(ELeeJp?eT)8Qr=XW)5HIqmIKde(we zT`@|0ocrU)5xgdrB%WhnJK)`|D}%lB=?Vg>SWAep2;7+JI~?ZOCs=v%e>3xL+?}ZW zHigT~i?+pBp4GEJul3L9dOe`#u^y?|+aKQ)W8Pb|{XO%Dir5)=g-QRSg*=GhRyXQwdY+=hg(}9!pGAHXJZ%bK;?uc0Lc#*rJ-e?*8 zwIC>F*vj{nBDG*g`azRoDt%}2a`-b5U??hZJiMArPb23|#>0Z*0(u2^ziJeHR+Q%G z3O&j;YvAWck(bl@Riu4aa^V$^sGCnlCO>j1Wt!tdn?5qe_oGlDp59yTc^8G}Ex097 z8%j{^nyJX@J_?+msG&Gn4qLkrZxXd&H_FO~c+ZCG&+Z__#gwejFK-}KCc5{n{bCp% z8UPPRc0`=H{6tPaeAl^0MC7$!()WvB)V1uI^c*BC=ORylV>&^X&SI zfhk^nmtyh=`}#(KquEk4vN$92p@yVI zB;z-GFn*M51NU_a+okf_v|yLsRz%=tTs0|oRIvlba|l+%lmD;mYR@(6wL_H>7x|5L z+?0;^6VHN#0}~!TSDU2*+s>+aLB{tt873{1%yl=dHE`J9dACee0AJvIDQc$m(6N)? zfXA#e6rb>);b}I9ch$mocp2O5W1fb;fTAlT7{`@0yM!DpAc}uwCoee=DA<4ONmbgf zI?2YPIClq?rR;WRCuucNe@+fb7*%GPN85zoo)F^-{t+m5e)|3GQ70|N9@@HeR=4;W zfK$VAc5j z<4VKh;mYl;m;?}*!Z3yrrR$Vx+s@zbL5-Uv@BI9` z36Nm?_*KDDU*wkGt1^bXY9;rVz2+&_CGnE?zkeg{0i|NrC!Zw3i6L{>W(QhvKxZDS z%nlR%6cE%W0M;X&SWoE5W~5ukiQI9u_gE(i$0lX;X4iG1sw%v}{CV5sFUjrh^GyLN zx0zE@=f3uHVkRj>o0nv^u(btA1>h7Ok5Nt~@*I2J8@aVG+IMq%i4f_P{ks8?26O*- zCMgbLBWG#3%_>VO1-DpZUI&Zeb!jPwuH{(G-G$A%A>al+COUd7jU*Omf~}r4^P+b!FFf z1)|rbaTX56u8M%;wDCD|j8;1a?$h~GTk_1V;Rs9@d(AvG;6ul2Q{T3W$ zJaEv`Hckv}InS(Z+wFF@T`S#PJ$`hy9>vvc-fSj4+j||Ye^?axBf6HYkJULQDI{;7 zb22%tAI3O<*TpDxQ()BXV#M=vLJx%foN-5of zSCL#>;Lw21TbpcnrVZg{GG^gAmkJ#y$?InvsQW2)VK^A$gJ@|0fC1Zw#jV<8T|hDU zx1LFg`%!VuJru2DIuX7Ym1VWFiq515o98WHD&)y3Eo7}wY-%ko$+LL&Z~m#}Q!zmi z&I?EyW=Xy%=`@b#w=v$x(=80}!!k+9xC)*BH_YXUx@i!hR{DPf{P14ILcr35x}7KU zSb?MeWG1P)gYAnN*8k#nzPu9q$s%~y=J*G_k%l83?E@$S#rw;DtG-kkSI9vU6gB;! z<%$3pkn7S|rp9u{+?>Mh)Mb&uuKmCm0i|#h-@))-KVJ4Dc6+P*0O%A0p@!777&g(y z(ZCo073+VTV3vUaNXk`v$O2GGAvrr3gQUy?A>r`mC9anQK zl~Scvy3|@%O6h_g=UmxOn_iQ@uQPhCXxpynx~@>v=yR!8{J3n}rsRr{eB3u>UlW?P zO`G^Zrrmito`Ce2gCLr>&gjeVMx*cPfBuRc_uPbO%4|T=jk8IG)FVF@Xf`G3{0)7d znm(_g&#viu8YyMR)g7}e3w@TE=UQq_CbNbfw{&mlST=NRJ9^)azH_##ciH8|rF{DN z=a~tnyNZzjQiIGe%W^F89IL9t`FtiGouA?d-}oAS^MkJivIlPTnK+xxQhelrO?Q7l zb5Q(^zxlUiUDuMv9(aZm8cXuov*+UZ^B3a9iwp7M;sP!%F2(9MyOAk7CU5J4F0QVdx+qH0fzV3QhjiVY(TiEuW*LxvCEbdwV~uWY=sGzGq-~m3(f3m8 z^+r8^exZK!;}6wmPoHV}jqzl;6lZ5=_~gkG@wKmh70=Gk(CI{!5?FVN_uqel|I(lQ zPs{}+E5e(wk0moHwtfX!yjI*ja#7Mivb^xK+7V-{=b zce_SBdGZ(@KRy@dXJ>GFvV`Sw2|Twq+nv~~H%Rse(mcWEFD|%=lar-bE*7w$IX|Dx zfSM?Zfhw(S!y~>P>JF;y>dZ?ngMz$NR3Zy(}7SlXjtB*hWME%)6`=2lyCMvUL zD6?kpF@JB?<5`{=y`H&7Ituzqu>E!NIcolmnp*5?(~#FYeh>fltj8Dg8L)jbt11K0 zA{~mnrfFr?IJ35qrm*MXbI|*DyBfE*gC zqsRD@-}$bdRWq`;3K~lljje^Cv4|gh{i|t$->vx0!JdYxpfM^gS1V%+eg5nPe){yO z_=|t}zZRc-{0XnKg7gRQYuJzZrN8iBWZ(VHPnM4!Jt`NAMae?zMnMCO{g#r38Yk72 ztQ)xJdEtE(?}O0m%yXUbV^nU|SmVd&yzN@!!?b4oH9idcFWl$KyjGdstjE;&T)rXq zh$fey2XTgq_^f1jt)_L8*=ey+cI|AHk_}Chs0q{9ry&=GeH0~W}L=?SD;E@txz&mNtLZ++*RrXSnDEZ_OcSJDi)&y)9n zANpIr^*fXC+h6{mo{BtMwYM zuCCZ0!q1;Q!w-JnKRNliKl4xhF=J<(ou05ALF>Z8tPNbagzQbECN8hY-nh6lw#L_g z^5?|f@=Eg4KmSL~TD4sAS~aJ!Jg4=lGV9QGvoqI3wu@M;*0@@&1ljU}v<%xRblmq9 zt#ACk*?h)plHl`^?K5UI`a+ATeE8w?$Nn#W^;h88=bzE~y(1mmnb;Bg4VRY}u)4Z} z&29^%56JFo%-?_R=l!?Tb*jHuXq&g9#^ZS!!}6XuxS6b)0+M z0b4z`Z|QXnwbsOpNVhUeIO*Vy9I1}HDY}4+E|cR-dP~=`qBoMAsVZtEUmNMZ!mTHH zQ#Uz{5&7vSp8@rB8Y5(1(aFd+Cwp1*IH&uCi{%2($=CTR>BFD?nV-U6{eOOatSim& zANi+$E-9TPNs>GW*>1Nnle!1cF3GN#lPzkQq)?Z5q6M>mCh z-}UeRec#%MeTG>4|9mS<4amR{nSq?nk1Tn<~>TRYAzvo^k@t@pCP zsoQ|2hUZ|%g1&$f8G=i&rxM%|_9+4(3V5Q3Sw+SSHMp<92ai$UC;=ZQS#BbA@4k@D`Br)&NWoDj1QHPZQmNz3FC5=G9T@*$>QjndKA#r@S~&)gw7%1nPHD^P_FbIKq}4e zBuSDaNpf@nK463PEWf>=a1Uc zv(qW2D_#w21ei3M99@s!mqA4FriYR_IRn15ZWxW|au{3*KXWabE62KC`+cdH zsVMYyUDL#BH77O}MWL6=h5q)pKX{u3w*2R~!dD`yCJ0Vt9@MX6-D;kDaq&F=?DMA? zCwp|A)oZNh7V~*|dU7&b(81g)-x=xN8J&KnQCD=&D$+MIP72`0$=3Wn)~5yx<&p&% zKuWLAT+fMu8G}seelrL`x}Gbtnj}J}H(1N~l* zug%K1D04k#v0z=Xtn22{#l_X;_kQn3UE6edO=b$+ z(KmlT!RMz5S12+14sm{fy;cS~F@RS)hD|4Dq4Koyv`ZSMAw|j z()ljz!!W?f^v)0Bgh(B&>5BAbN6NFaMIXQxc`)nlK4S{6-`e;y*wJ+}Fv@@d*uP=W z7BWzZj}2H#8NZ3_6!LH8-IDy9vy;>I$>YZzSuo@E+CM(T>XPK)O1uwfj(58{|M=ri zs}Dc^xVpN$nr(Nxip)c{X>&_xXx93W2+ivU%^3r{r8#5)ywo51K(a9foliV_{v0Ae zuLa?nLJ_uD1oz5TH<*+Sorl*La>LBpLUu5%RhiHZIBa`uj#>EHu+|2QwS!vld$`aE z%@Iu&y`Ifyn(TYMs~4J#d?ogoT0Jw|7wVt7TF!n=j@g`oKSMTrez{ub7Z)$`)oPWK z4Uv<7ntlHH^Tp}u33)NI1+B%4vM3j%Z5Lcnhy56B{Fva+lo5C|w*XS~fYL1JQKL(( zy*2<#*;o)4=V1W2G`6SKuFwd&LiM&ljZ4l%o3k!4=*Nuxp3EOJz|`ngI_~^2$B&E- z1@|8JT6(;r=NSaHkr`~b&vLn;Yog_eyd4_Y>&>>N`?Diop7Vpcs;oM4?iA@rjtMEU zQ#u->Qw+R59yF?0pOU?kBuVmcDqz~gP!bo z2#^ZPSE$zk!N-oc2~3`rO^8 ziv|a$z`>I^21vm$QUw9M1^riajw_0NU>0J`r=thgjjp1Drp9sbwu7`0oa9c58cstG z>M%**WbS09T$0-lUGCN%UMEKmbd*DemkY?dCQNth#-F`#viF=nMJ8 ziHtWEHE+ga2&nsYGbOwyNs=T$nHd9Jf ztJS*PY__G8P>~y7N{|(ur7++qgxsCOFf)LOhnvmI%&^LGZ+rrq$#Ib6?#}7K9Btc<0aDBuwCFmuq;a&M zak8lEhKYKpFx6u4wUh(J<0(w&+a@944BgRBoF_Y8{i$<<&%P9h{*cl`SZ!$PGy$3{guQmK{( zEq%YPZJ5TQX`8y?1Z++W;iT^_FY>m?i?%4sHn$0a^+X#dl7i?rTW=COzUk|d>E zG9TkJW1Rs70y**Zgw{H0$~5|^muO=Au6lB$>UhpGE0Ilj)O!c%^9fzaQ$*7u7D6!X@dNN9*;befsnn(E2i`$2-df#~Ho5 zdb`=yOe_`h76Mqxe;9_(i$B9B4h$-Jqm6>=Sm)cb*+ep<&cOhmgb2A>I4&&N1GUV# z!taD`90>=Q<3x1_q}!5SS(Dyb(|BCdpEhPaLDAAHTe3CjPuT8ugX+>rCw%cT-iRD* zH(p#`G4o8uzDeCQhIxdO>__WtIA*5Bx@F*nVT`IAb5zC2mRU}#PnPATN$zD%x*@ZA z4%nv&$4>8Y5`Gqrg?;RbKrk5Dfs>6-li77PfRoL!iW2=Du-QDex-76sxl9=YG$-%G z)_9PH8A=w;ujxX|EbxiJ&6cH>*6>jeygr*k524tD0WUFIq-Tw!>odLPwVO#wjelc$ zUaQ&vX$^CggvNlRxtEfYIZfqz30Y0XN=?pCgVXD^U$dtwyQjJLuzj8$WbN{3n7xh0 z)v4;#Db3SUvJX!iYBcI95zw|Crekslgn`& zX#0CWwqa7_^0(5Q#{zaaxx=2T&#M!?U1F}-Qa8BxKo?>Lwr`xDgvic-xhg3Cp|kS*Lh?4G%@sSLJA0Hc= z(iYWmu_EpnkSZ5e!_J4p zSkTTje>?`7CS92%Ns0!ck0|MZMB-U;d3IqyU89zzd~nh33qd{BseRJ6t0 zVZ{D44qE!0dKb2g^kQ5hnqcuc{AwV|P|5{Astg$OkelPDu)Xa!T=VP|mjw3JhDpdw&-Z{{`bx{Q@#iYKXUnpxPRU+4zr4Cc7gOH4Q>Zk{lDzT` zK$08F6q8hx>^|PE!Vygp?CJIc4@bXNtdgV8fdkuPFm)9@$ZBfepl7riP$gJ2hyzCr z7MYk$3VnPts>p>=0Y0vdV|`y_WI@yJDgFeMN^&PeA;v)hPX(IMxHUizWTzeuFlFp* z#lil(jWSlWb3dhwR9|~g}&MS_6^Y;U;z~uuS z+m7x`t7Jh9!IFAs+6^Zhfu!u7kPej%Y=^z>jyC>0Pzqv05%9>mF}5;C{qaTTu;qaJ zryV;g&eL!lJHl+zGm8`;nnWseKq;0YgQ+V`lH_fQPe|(`{iIfn-qngs6gCc0&wx1S#vq-`I7Q|4T}S?3Z8O_f8>ej?E($4H;dY?Ebd6JRR@2ar^J%*ijD z@DCO^6(zOUL>QYi=mAi!L4V8zF~EeYP0+;xq&VRaQHLZ#BmP)fHWktE#-vT2F;yHH z7`0JaZRw3$`d%CI5P9RyZ{Tgja5E43>j`5)rTEB$o8Mu6YMy5n7G+Js z%uFdgqvtr$mwL?%C}k9%Fn!O_bX1OUNWG5nbRbzV+MSAv21dP;OKQ)lJ#&3-$2~S^ z*ynZ2#oC!wigR{pUcJt1zs2|R3{r+T8?9O$fKaPQx6Y^ZE8Uct`OTGL&|BGdZPBz% zN#BjZQpIk^EEieflDIiBx}dw512<0EHpeF6&;p^*0ikT-c3+sr7NjyUSaZRSEk`?W z6azyz*;E>EDK!8GG-bVM0$U~lBj`vNCG2774X_g((>kk2qa`h1N_QQNX8Ol=ojK-& zY)-H)%d+JpL1v?(#l*+ZHwW6z=}8ZeYQ+#ldYx`edh_KpmaphmFi@HSH|7|{89o1) zeoJcl3H{=a=|_L;#@#4b=roi@3!E{9-{b&5<65N1+@7(pym{boG?v&1`$Q%Y0WL98 zi{_8Ipm)AU51t5+)bttGx8sCd54c7PC=4Kl{<04z?o4Uh>sX`?B@!Ka58{3iIt~GrDx7SDH_!ktv)7!pA-3ct)?}Ofk#2}_q;y|y}uHUL4sdT-Fj<% zAOl3soNvU(rZ{8|P$E?%U@1emgJvB!%>b+DZka0gS)Mm}Zj(D1kVyA+ z%l-|65$UzGKBM<%c2OVyo^kPql)U)x#oMGk+cYB0Ph?-`J=dO)7yTYN(u;l(b2Pa& zm%!NyW=QtH=tD}x0hKICly;<+W;8RyP;UeGc|K$NFiO9$PxG3=3DBnOrO7jP#RjI% zVT}QYe>AUPFpS9)L1tt_Y+X^^7p_S+7wRwAH=Qc7<3Uo3sYc()#EfjyTYNvO7?8s!myVsk>{t^Ae3@UHY_%Hn13UX3aOP)^44o_$6M6fX1 zZp|!HbiLfTV2%ex`63}PXbwcr^yh(dI_bGySzWDSkQA>g99wuqI`fS5sbP{L zU0iY@gP!_mq~iA&jfYH9mi@`*%VS?eWssfg*ZX+g$31^P^0Y&7s;CGE;$FsooHrSm zr1Gp~kW^KcbybyQTg#mUS#$9LW`3g9k&iybA{C!J234EK+s$@IuKbSH;+huLI+Iz8 zITtxlTyiRN;Sw+bz)?)%7}zOhg1RntF>RCKQKqTuoI)3K036gD2j&>(IO(h?iUqY( z`tzgRuGY?`@;*i*QZ7M~ByUX)F-ci{22oJZ1E=ut^oQ|wQFuK}xNKu!)Be6dc*U4O z3Svww_Mz4g)QlDZ@PCCk=RK+*)nCH^;S>^rweU0xo;}3@uncU9X%?EH@)zRa04S2& zb5TQ!C{)1F%!bo5NlB9$A1BQZ?IAJEwBu&&J!X{RU_xe@qM9NTyGl~azywFZI{l3V z(dKe*jz1V-kQ8D-@|(&p^d+|q*Z69s-8w?xv0`F%R)r5)q|g*;6c#9DhM4Y+eGt(m zlgE>Sm{j&7Lcq~Va5yA(tz~=QxF?j2b^Bobz!2axJMNO_4o#TMZ`vESTlzzu!PF#4 zYe15>Dc<4q6U{iEm$j;Qoo+hK6(~6gM3_W5wBRVK`=v9Fq=qD2ZiPEZ6 zo#JFf-wLgDVUmhX#dO;v1DOP7Hh~!;F*}vgY*(5-dOTp5MVG*Ds~U37zDr&TiH4?P zGa7&t#K-p34k+c1Wq6Ez;)`_04)kPtuss;UUu~27xfi;p?H2Am7Tw`$*l-G=$t*OT z%PizX!pv58Z+TPF^jzf~3QmMs*(O#315?hG>&wVbQWHU*sS1)QKJtL(Ct4ullKL|m zOz&}{<0tMppMXYl)%0`Wc2#^}0izW8Xv`dobG*#o#O>bK#~!iIcI-3d_anleQ~T8E zsLYM|oz)WtD8*A#n@qE|6Gc(HjqyJb{fuj-dD5s*DWB)^JkqfzW8KOaCKR$>^z7+gH}(v7*!O6+^?VI|?R!HKq_iX4aTlKx+t$0%!D_y-zRrGX2{rDP`6=`8Y}|Qd!`bQ;$b}?k`iLTCbeG_JKD3iJNRIjq-Z_qI>q%1 zN%!EguA9d+IfrbRq=4+6mcPHFYw( z(LIRh{LGK$xx-kaNka;T&*<5wyk^oLE9}>kjr$y%a;EqVdaX=!vz9%)08=+HlNFcb46xt(vx-QL7j*#RVRuSx*f(3cwvZn4Voz0A#<| zTc5=yXWHi-fC?Na89Za7@F;2w>oGVSpmf94gQsi(NzZuVK~j!Y%2nCRobRCrK8txA z68ibgK55_kKce?}09~bGbO5Q~Pk1*55eLU^J$JpGTFku5ps2hk>cTMKAg$fd(AzfT z|E%bpQ$EgGI1q&B3lQR@k(lJ6`#V`=H1n9CH5kvSy-(xjtFC>GhTv)MF8D<*FsykS zLEsJOa1DkQyg|xqgNy*TJpVBE!=`Qih&yvo_A;|a8wP3I zIi|H5Ets$pLx9xP9r91T4vaxkr;$%XI+k@Uo^qovY?|U?9=aGy*>1O)*k_V@%*CQe?JY?GGLux%Gf7429j2K^Ao?5lkDoLARAEnIqydp7^RMAvbxtq^tJ9@kYCKv!Nu4Y)))pon{djfF;b-Xvw zMQcNLb#=9En|4P=eNCTl3bi@L!(cmc#dI_a6ZP2+$k{GZ_p=XnF04ZOMjmx8E zaAaXjlH{$32TA!kZ3ghujoNKm50VOh4}foQ3{nm>6=@4Afl@v#IC?A~)NsMUv5QWl&ARdhsBI>dJtz9B_T|1bdeH_3pk1CW zZ+dd|$2`la&pVEP>C7bmU$}NjC+0*LZgcm0WbQur&Ad(w1N|XHl;R@~W{wY&&`^|U z0Z`}E7$Ef(*G_FBuy6@zX3lZ~L4X;Baf}DY1|HBbw;f9_0j?VCEJ0sXY40Wcr;pv7 z@CofEVGu+0Ri0*o6V2p$y~f32{x&SmkQRUR8JMT>8K71$Ja3^=J_lRd3j7Sl>XgcAQ%zkP9a+2IoRh0xL~ZVF zvQKjG0I6CWW}i)Z^7U#gKk)4sN6V6ic6^h*5?`ZR{tfECPQT+fxb7MBPFnNgI2 zuCd?1j*^k~s=%RL!7(c9IXIbHxM7GQ3V&h{H_b~d=scunfyYwG3A;)=;5FJdB!M^# zl!_}xPyOvC2|#0j)Uis+gMA?EC!WWldF5*E6YT$V_HTkgnb36xjJZ@ww^Fv9t!Aoq zMA4z+NgUr$pFY;3*ZiM{3{w6$dOJ+5KG=_~@FI`C3iY{tpd9zuQN3t)+kNPrzr$y4 z-JCH)sk?)9q#GF6$wkj(n%s9T$}`sEVajhdTOeN~r*T|5u=t5vW4S$YEgW;P2O8Q8 zK%SBRQnd_}YFY*+=g=>s{vWTadKxo3qTN1+b7lH%j}hYv_Q%1dfvK_YK+XQSAwWt* zU=Ta5Lm!IVL&T-#3pW4+b`%eg625SRD^)ovMr@Yj$d~afJ>iSQ2}zidZ$p1JOpj!Y zG%5>g_nO*;V{E$=*1wrKTPTm#?9TZ&IGVf3w|O`G9nT~c+sSA2tG`d}CurP#lZM_0 z)IW}RXMIgcpGOCA5z3qB6GT_+B#=u5aFh(yYWIQmqeNu0s)vj4;QVq8Wr5}{t$yoebF4}>v&MtLu1= z!WVCQWI$3#j}4O)lnjyzzWIcb?>81=OC3T^R!GZ-vF4A%2Lo*MfDL(2hy^`397KPo2IQY--PP49W?dB%cFJDP z+OucR=yuj7uI2$(T(pfteRNr%xx^B4DM!qTw=xbU;uXhY7{_8L*TqVPtW`auRPUx3 zaCITGY?f#FlD?~9l48<&(eJ73?ucELnQPU+v3PsmMC%ax&&}33ZjhcAmpp*g#M%`h*XC-{<%tL+(%i zk56OP8UC&YETs_|r6BL5C^8UAdiEI{ZUCZZm2xb701)MCRY2H zw(OSS=s=EvjFt+!0fBB-^h{EwS?VlTjm`djf=Mb#lD8`V+}GZpjJ*HsU->mp zb+24A8&e4VUX@P;Fd<*W0C4O9Q(`oi#Y13`@_MQVJQ|P`7`8<*1W2X%og_(;BuQ>y zaVANUB*{I{R(Lyob&Cpm=7pTd+bJblrLgHbQMWB@nnwIj|MUL}Pe1z{HtP-4bpu`3 z!e9F3|BQU;QcB3kqh&mX}icWR8RvPGeus|u(gO6+26ha6alYJaRM8#iXbkvAR0Vmkl z(KAf!Hy?(PF)(7I@Kvh)rYHF5g)b21&U4^e44aho5kb*wnN>=0 zHAlLiCrarPx{u4YY3I9LJu8ZBm1VNn)w^uF-N?;m4eRx#_|cDkFVh<7=T`J{tG1yZ zX|>+8ySicE6*7a)teW9sxxhz{9^qg6>wg^42u++3hU?YaonGX2p|Kt`WzAVpZCUiwpSv z4}K3mBfWcZc_r7I4eWM1_~r-SsJ{7)ub(cL%d@g9&S;*VWm2A9tyYidHSg`}`bpC^ zPmGRLM!%lWZ$7u+D4c1;x$T*q&jyN|8iH8Q4Pq}x^yOF&`(^-2mO;wlBG5d?I>WI? z4SL}LsUp^MBbu0rQ9Z`tIZ$gzAh9EcmJh_5K7U8|!z5gjj@;4xS&^PxF!M)U*VT5r zEiSLFa%JX-&MeTQ<@=6LpFUOp?SJ<_)J0ifSyp;JpX15NiGK3r3C`%4^DIfnVoo}9 zxtQb6{>)E_pZkM9g>QKse+VvF43aV+BRe;dlV%x|K>A92nSSA)pu6}1-OK`gl9G#* z+R1NITZIpgqT^HXV|}0(dq_^7egi>s3_((kbz)36Wq(-DKID>e0c@hJaG*hL0oPFi zu21;64U5!m*rcWyq~dcN9E&5{1Torw!Km=oe(^m1zBx00l0j0MjVRFQSZF*kQ~=!e zTSR+w9Ya1S94P|a&u<6}Q0SRKaIgMe_$V0enufAmQ9`=c#STa1Kpj?VGUmf*EgpXs z@xklHj@p{R8;0ef2Uz+T^{v*1WDnh5{@&mF2V)b%9RHnP`BnJwhabYT=g-AzwSvuV zOTJ7E+wBf6uP(_R-oVy8*Fe)W@U^dgrTo^nzDeus;=C-&N3_m9rgiqwX1je%>%`-_ zX&$$2`Z8jo-v~o`8O5)jRDD-*h{8_;c&#Er_jF^#&he)T#e?rfP?dY z43H9|HFw;mhG6hdCfazx_S?aGmG80FEy0KCuTg=7S}NH{rRt3Q6S}W;XV>c`-S-9E z|2h43)oQ)2==qXfUoabHmS@@1XP?UtKmLgQlbRD4$rkFGx^CC2b@%BfpLYN5Fa8I5 zHk;8py%1+-r}+N+@8h?={Y@;3g6tE?>$oV*I=&FU|GVGDsd<>@=3R`Bm@X7i^fm-N z&CEheUz{*h18NZ2S)}ncMEz_Bhf@-v5Z>HT@XFfzBEp zUpGE+?|90&J$h~sqZPy#@bPId7G%Z0J^6W8v@Wga9$nErx+2|qMgA83_K-7spb6S# znW%oZWES+T)}~ldLF-_Cd3h;+^EZAA{*8a@7j;>c$Ue{c`5Auw8$W^n!k_r#Q%ob` zpZNB-lOjrzB*~rKFZt8|-hasJO+hbUpr~goJa3!!jOhEkF{Y0Mo`n-En%8(<=%EuOG>2$N@-#<|=( zjm%EJB9pZxubzxC*p<25^afd&;}16L5%+CO5dCJ3)2*5CmGQiO^nqzsi7)^^xuQhg zdXF~2Y@`t)$X(Cl3Sxi#fKi|aMytdyk^<4jqetpP1df6OhO!vd?}2`;>8mD$ie{$5 znG8fzCTPTa@icDY4@z0GN$%uAFc|wGTCPLADK63B?I(1F%ItCGcIh1>=KxiB|9l7( zd$kk;s*}E=#E=o0T#Y12k|arzBuSDa4_@GU@8PJoF~}3$HjSVLtx^n@0;L0yt*^=) zXib>K^|Pm+7OT~|;Nk-N3P8^3 zdvzWCv2I%V;^Klggeztc5~xqfk31Kso(Q2gQf9T3vd*%+&hxx3in1xovZ<;{&1SQ1 zx$HWbWgYz{zHmp}e9@j1N4dKZg%Ib`c03@>Gofg_7-kbY&!kVpa}~2)iDhh9?*QTfJ^=}53?bgRY-5!=U-_yoUwd+v~ffH{ro)CjVGT-Rl=L{Mp z9m)E&T(399X1gi=(f9uG?Bh>9J!Ks`n^jK77AIZXp3!U0cUHHaH+6H?wC#d)@|@05 zH3oQU0CG}98Q9~rj6Mf2(rGbP35S8-o`(g(kK^OcC`LL{_Q(En484QK!4Hcg?1|UO zL$08)h98k|xd=9Tt{LQnOr3EOCCvfNHMD3PsMdf(JGI-@uv%~P=PxcR(u0dmb!TnU zKB}Aga=Y1FU0z*YYNdAb#k@UPF59!SvyOCUd;Un91d{{px(FZKJ{tJI6_#OH8r&a~ z`eT!5BOT@RmY?UQfU?^gv&EESWb>e6|M1>Nhy@s!2^LM+GfVUX6bD)eL5v>T-DwdR z2hgt5Jxj&tmmPw;`{%|$rZ<(CKb36MCw@&Q0AoN;iGEKq$^_Hsa=&$7x-Og%!YiXm zixEx=r{_it$buf)j`yX;52Fhkka|r(80Y@OOKyVYUfi@4>lHu`tIrfaoFr1>) zjwuX%H9|aS6ef9lYpNWze6-f)q;E1#2&c6(V}R^>v(7i0ZT9r@XVpiae0p|vdV024 z%+JUkJ|(;OjMjj&^=f^#*>2D3-R_Lm+7n)5nKg>m6=uSvb(?({V`KDl-H%=L)UqR> z)IjmafTM|g7~H49is{mVm*I6t{27QNz~=$~2QWsN@84aB4@N~c!*?C|E;#(WDH;~E z_!OZPt=DuPJE^!fs`(Z)&&a*hWWFlRU^e=l^2^Jsiu_CRcMKT(TxoShYxh;xwG4h| z--fH(H#xUFC*P)E|0Z6eRdlQmG)!yr-51NKAod2Pvl$qyWLT*~Qz~;yGQ@N~(HbBe zzWU%#4*isq)({b%V-bZ1OFP@e79wChgg7v3ovC4oSS_~7@ygjEna<`B-Z#?@XUsf| zpe3`iTC9{!o?|;VZfx^1h-jn}-`1KW#gGcu#E`C5m*wYApBERGSM1xA+wG2HNacJ! zKUpl6=hdt_C*S5g&x^Ax&$z(O`Fg!RBVXujyW1^E*UsA3fufWtXpk9SPWybAAvqvS z5lp(D)YxWTKj9D%g`%%MiX#Pw16TMajGl3EiCDK|^qh!v@f6q<_Y_bd__U}wchLru zFKI0*nz}8wayRF?+wAYt{nQMMBArMoF|SA`F8|T@e*28xw4w8Cgag+x0~w8~P38(R z3){yn@NgYO2Tzz9YWjXmZl5yFkVLK>Rs;nPCWI*Q3EP%tbH<&*o- zy&2sC1W^?od%XjuEOg|!2CGVog<_a$W_y)^G)-6lMH2-=lJTD8j+$_X(9Kimk%@&o zp6@UbJQYKb$vw1iHG`yReltj_$#c1@iU5-G&ijJ~Lb?E89Q?Dtt`3^jA)VwS=@zN9b!&C)~0!Lz)u+)R2EGWuVDz&$v zcN{!Os>tLaCn~KEN$!B9JT*4avA>0~q@st#iwGbk!~6j;&B}J4A{ud0jGx2(KDnLU zIH41hBuSDaNs=ThLUT<}p`HZ<}Ve+it53 zwcU0%Yul#UY&Ofh$QOB*FS9&bWHMXOS(dxHIcZvM?Md5p%dYF@U8gFgx=fj57sb8g z1UYm7DNlcGwgqkSb12#Z!^&U7gun1aj1z$&)Aio1948C9_y?kxuHiDlxDtAd;vOI- zGNWB+1LC6bQ9+t%?=VC{Y%&kH#4iC#@p}saU4>YyPHEA!UAEgbWuE6VjcS2fpQ2Xh zT6a&h>elpzO;HrPvMje%Rqf`p`EI!syDZC?Wu>7p(9mbp)EfFQUyR5%&y=83?Bsp~ z6*$ARsT-s@Cse1o$|ntNpX4p14!|LsXs&&50mf72U`3AsB)*RbVFbfM;E1lcMY*09_yL$2B#l>R1CLNpS)UqY( zSDLFQqh$!$UUmMgC31|a52X!Z9oRL~EXgm~R=62BjYY89gd z!&r|-nSddQ5o6Fv%x=+tN=R`$v`+%V+Hrf!0NC_Z8tE6X(1uIc`*+NNI1Om2#<*wKC6(f!_O)H}L3U#M-|e1dg6R7bm^ zlQ(wiweOCy8uACZ+Q`Bu2pb)xZEtM^gNDPfX8^zqVlpu21wsbwGf@K^d5`sRu)W}u zoc)DCT*Wzz?ja)p^gx}kOYTCp4!jc?DTk7Qra?>7>@YW0GB;+P8Ui85@52$!wa>{7 z>|K4@ELSV_docDyVX8upz3O>7aN2y`o7NuG1Vcs#5C5=sYhOHe2=eTgpIE0rj1a-T zJS7GVVo%G3xqUhW|56Nwu}SxiF(hgt#iYL|iJ4?PIAFNGI)*xQ$E3?k(&@BDa#IX; znmv2=e7W83PRKT1R%N*)TX{)8`Lb?koo&ooYnt_Ax{8zbUE;ni^x$zu@yHjWy(+_YwZ*f?RV#1G#uiEN( z18SDgs@xI#Fq1GNq;%UDc1H%YRaaN5IbDYVNKspBtv2-gQR~)OCU;ytpZuHMY*y9t z`J5i-^?~);=L*HZDK&B+U&X&p@`gnX$G*26n0ZMulaqEfzwQA<((|45#aJS|bPukD zIFDw}jmL;`sK@wYzn1ijgcx!)!jct1XFM6Cx2$P-EQZb?RL3ZuTN;s zo^bI8@^Mb`EaRBbl6==?Lwc8wySAORohtad+27OswrH&bpCO1ty34`W#z=AXEO%l6 zm>QaRy^jPV^Bu)RZp?&LZ?tRAXgXPRIDY@N+{-?-Go*cjuW4_J2rxLVfvzGIXX-3> zt;(49og}9=lM;YK!{o_%=4;+>v ziZ3gQxD6n^{i{4b0u1*(TpS~2a2*tikAk4|umg~_l-ah(i}h?ayCl7SDJ-bR=g_`X zj{I`D(9TvKOr2wNC4siBgN|*pW2a-QW81cEqhs54(y?u`Fw)t|-cz4|UtNz!h zTAb^f-@DWeG3#GzXIc_+oM~L#*|(iDaW;T3}M&%-te94jLxQpLHaLn8rT|N0;5u(b0o#?x`^= zkX`Wq1eB3h8M6F@WgK``-*)EaZIn2V@B3^x(hL{fku}VlYhxvb(EKQzlm)l?$Lh7u z1D-G8)cbv3u!IwO!xccH?tUFl5;=12y$-JHGmD9A7IrpPVBZLTN}2?yjMu(F1SL^C zgZX!%aI@s=+9Qi10V@tKqnJ-Y=z=}$FSNr?Mg3#PP+`e=n+Lnlb5!u1(`!{9?Y^&^ zlihPn?vF(rNwK^quB#v2^twStu@38e*GN$DtpA(r=CE{J$+|4B68!f(UL)v)duc^-!aYkuKxq#1>6f*XeTw=XfVEeonUZBzyiMq)cZB{u1IPzdl@pl~MjE7dqhn*oFb6Y&aOH zfAAFsupgQJ`YH^f(l9YDiHl1T7?BK(*6C$~Nn!Ey-%zxU*3X&>a6QFoVk&!&X5_C)Y=jevvskX!W%z0&NnjRRn) zn23KbW0L)0g~A~=-3w!Hp8`SiMJk|a{e#6h;Ld`!Bk_`w^=rTHg5kr>ADtNw{&8N+ z;qV9B2+RdIW(A%ez~#SyDN zF!P~W&G}zZwYhW_S%uR-IIfCNx``eaO~{`V703UsS1|lATG(5&n_OXfTAg8sjD}u_2jk;7!6|QKCDx5N{26AUqN5 z#wh0~m!$mX2>OzF1lfx5;VWBqM25D_UMNp!(!ClB+-7s87%u)}*cpYjkSi$#pHUn% z)4*!z#{>Qvl!sXpoQn)ww0X(j5A8LFehCl_R5VycI$0bMU=|!=J?sxhBz6r+*u%xF zXsw7fh$CQW@FESTdF2xyQbfh^azp%~zk%aLh4H&x4*5VKreq|4;i%Xa05KXjhM1S6$xqqo z!(f@~tvmU;N_dvBTv=ED^R4qylrglgwEigKp~##Ffh!!F+)_7Db^(*s!9zBCJ}T}0 zmBs5MHMUjMEjgp z=r%xTGIDyQ4$5rYefR!@8TO`9yQl_@Y2e-Dp^+cCSA6npLT~dYKNr_cJp)qW-kly5 zy$o|rscL(&t>j8Y?C>wdz!F5ztWu3)y(^1AzF;K!pWQedI{3z&fQlurKdw2A@{YSU z*-H0<_uqROig70|@^Mg#qI!ntsD2hW871E~XREEzOFbgZT)NQ~U??p90!ZYyexi3b ziXi|uVFD5L;H|G@p9{uDoe7?*e`k|fK}vc|g`$3v#Ud)Y$x)7e5|6nWF)6(`aRdC# zc0y^uFH!Uw5!+)1`GOrDXNPb=2_e-jJEORRF1U2ekb}0-JyH_3IhXaB9PGwRK?{tLv*f%ceW45Nq!(!;{Gmw}wR$7ciF-jG)Z^sJ=YwvPe_uClcK_;lcL@Jb(YfacRuCC=5@NL6!#lE!EiR?NyO9p+Y{c? z+}*}aVV{+3WCi^2|F#^GgpoWwz`UyEgO7OAV&KKF< zw=444K7(`=+2@5S?*CjjQQL)iZp~0q7w^0?FiH++d@{Xlz5KIz*zpFf_R;cDz8zV{ z$nYtz2F65AX*pwQYUD*p3msY~_b%Z21ksKANT#T3P;u1#LO`R?!yPXq@Qd5wX_e#> zly?^y(S5OI75bkh>?o~jaQnxS-bDw|y->AM?B-T?9XtFV_R%q(K_mdp1Y zs*32=CHGoIM==W+ISzl^&|F1MeT_ zRP&5#t29KO^D^+^){YHUT)es+*9U&~s1erF7*GL2QPG2)4Js9ZO*M?-C!N0oHz0%* z>f(&&I<$8{%NS<1Y)2icnj1Fu!v!z{nvEQ{Qa}^*aqF;(1>(9>$yorTfLE+#fjORQ z4~3Cr0ztl**4(`+d9(W#N940Gbu7Mf^24;(+Qnc-qM%reXi<_LaJH2T4v}H7*h5~H z3sL+K&U@!=f&+I!dlbr>VaFpC_QI`;kE*r_Y5|pQccl2)g2c?+nSw|<`4Fn-B>>^ZsdjY@K@&!2{v&TPY`HZ|mIYWuNe0LkJb&wQg#no4twTzfD zCkTJA5$X>PfEJ2Bw_X3Z3Ru@QyhJdjS!;#FKLk-)|3#lc3R7sFn7DVrB1x}vew+|R z#3#IH9t(PzU2_#}bL*U0q$5*C2A7RiCEYMykw)Pc!eBSV*P*AauX?)E*m2%he{_}; zks!+gCORxi_;~GDe^X~aApat%{OVpEu*6Y0iGH8hu$k@vjc()I~YlK&z@sTSlALc?7DI%TrY-P>YLh2e%3?r6=nx%o0hp+gtvCO+1W`-`?IIkU>hm@V;nIuB}zXx!-uU z*mbuZa}Nh>*r>Ojb4>0CG!u}C=vP4zhmEZ-Bd1kp{_wly1* z)GVC?;LJz`sXwVCs^g-iT+uq+r|LQj-@btX5VJxGOOo9hpW z2(1hV4X%L2A1W~)_ScZGS=G-5opJ;yU?fHoMj;S$vgKV8!?nG%Vhx3rEZco@Rp7={4I3a0r9` zW)(X7i$BSvQ|UAtOvV$*r5~Bs>yIYf$8GB^wO7WfAOs_^e7>FUEH}z7OnXPAr8F6i z+0t@sJuG>^GxtWa3)LA45ZWVRV}Wvk~tg4s$I>cc5>c5dBC;rdi+hazReo&7(26+W8{EN%JH@R?*b39-EiB6 zX00qINaX=>pKVFMXE-vb>#k_luUbR0(w3*p%BaS*Y+~U21JWnJN<|A-+)sY2*x}or zLwxpTio>&3;OzKLY>X7~2a7He2zb?N!QWHp9mdjFkmXUSc=R_-I|?7s?Kd`pk8dM& zy@ijJjVU3Z{XFgO=0B>DTIr>T3()J9P#*iv>sX#p^jPWWn#klKbQaght$&JK@|A!< z3fY;*h$5O{jlq-gu4NJo}sA9Q_&yE|D?v;#W%mJu-T7?<^3!=jG47wZcn?5TS(>3nt#qy0CGzmE`pg9&4i9DZ+!^6ifAS#q&gJ~}+H zF|Q?qB=bCnv65t@b6WG5K4fCp*hpo}5&KIt0xkq5Bo-^C0Z2PUiaZCNa()!;xi4di zU@sRcp>izh&8LH`hk?MoYEc(}GY57s3O^;yUr<6AN5>;TnJM``%+Z}Ka#vy-74HTt zdk)R?Ub=lf67IHu_LWQP!P#{iB(jtvpruDVzjxZ(AOQW4&Bb@iz527b6^A}cm3 zKBi>Jr~1%GBWrb_pnQQM^_n$-p!LvkV{sk(R4H7Ec!UU2xm<-4S@=+ zV_&Mbj?|e!K8v==yN$)*;~wyYn&evI;g{*)=j9wJ>#Oe{{*7rxqKjL4dM9VFpxhcf zbX_AJy{f79R1cx0?xD#>L1oTX5t-Hr?kLdR7?a<2A-_g<0+tqd0FQ=c+TYJ>M>~rnx467jVJ2j=KCS5(6Dx!y} zrkb*i9yzs;KNf?N4P+b6P96>#^NI}GT(2oW~dexNO2rWS$RB)Ba6-W6+y} zDb^UDy>cQ%K8L@Kt~x>F*xQ;*_qc9vh2x@=JBDg7#{Ri436qwT^5{1ZT|v z(g6AoziTFWoac!@!+ISzj`mTNa6w|po$iSSpy0$Jv&pK>9R{eZX$@=WTF`7MWv(|; zr7@E|uVXpY|86r4L2)5+p3V(h7xMf+x!%*hmi%=W>P|2xMO!y*I^^|wPd_jG@AS_; z4+Op_RZ>mUf$cPr!t|JM%{mOq-5&slAD_(ZKX(OQzg04G>gh2!WsW~2`Z@jPgrmd) z&?neEd=O{m0*i`aBNY?=`3^Rc|7=8X6R<}xpZT27|2VsD{kjwg7~IXzFkO1qc_o}& za;o2~p1q~xq0IA^EW1eL9XQ?u9FMZ2xdn{$1V1T@wF4_Yma<5hai zLw&BgNMZ#cg?OQfmki%sgWFXh;eCj`rkD2h6*=a0SFhY&rZ05G zTJQ8qsuu3u%d4b5j_f}vT4bkVsOew2-MaN!zd3&vUw`!9mL3=61r*Hz^tRT`n)Zuk zjX@7qOLw0>K-&gn;vc(o@!YFn!eV0b1$cOA-rs+}No7xW%UM;Z78D#FwhlxP46dlS z)pi@2-`Ne{hz7*=n*A&@nLohU{Oe_5o0QQMX#Qch$!v6!XP#z+e9<0M;N36qiLJM; zM@Biw=rXAitdF9Lg2;PSZZa=mJ_rPay0^Lq$bI<`dmuO-<2I#0jA_LCH#}M8`SWv? zULU6EmkT@rzo6RMJ*y@w7i|}?*=oN&{onH%{4dkKR*%kYI!SsvNxnMAo3`SwwtBif zy*XG(A86EWSU62pQL;X-asPV3isp6~ob zU8ui&s^$CVlYTa89OWAYX&9&bwP1k3M-%2vFk!9c3K@kMwan-xlX^Ar7V)m2U=h0hW5gqNUltO>wk!vQ+zGpoViNDM`*$M9$#!aw$>%zU3s`;W zwrE|pzy95R#S@c_w3ax)lUAeAM2CB}_P?-)h2ldz5x6t|Ttdetmrgy`ZuEbZ|H?ph z-l}69Q>_0*GVJrMl2xz9oiwOOH4=u(Kk`(r1eG^3tR2AqRWRnM1CrkzKv2pU>0&-Y z5$=fSKx~VRUW^~u4ZVi2nn!4=-#vSGZrHfFrKhL&D5YcoQp@% zR~gfVxB`%s)p=;7$RucGt+M`8N@?`8JdRq5Ksg~Yu;d86bN>o$p8*tZKum8%OF>WQ zjy!6uGW*kzZ4aT_Pt4DEWrYGt##bHe)rQa zn+z?(Bq4D*yj4f{ROrS5Tb=<#14h}fXQ-l1ZMn3YG(3i)BuW$P(kw1hDK&@XAtf% zoEDbf4JJf|Ii_&9J*$*C7Kw6ZFRSzM?9p0$cifwhJ)5;|?f=X<-m8*_H2R%ge1qeV ztG2=82@8M(_#w+q#f296Tr-DaV;T#gJW+;5Av5e(r-;(!n5OugY_~F#M%{bYh zqvqOelWEqhLB|k9+B@O2-qf?GrLVZ^>+0|C|5yjexp05GD5zF*-9(Rp5Od`l>`}Hs ziIP7~02}^}%C-0#Vqc7eAk?Ii2!$=x#MOMAVkKNoB()WcWS5yq-bpF1uENo0ze!+D zZrr_3@o0jOGJLw!Bb)>F0>lY_+(=tuqJid@1*UYVLw1js4F>U6bHw#Lut9(~`Ov6U zfbLk!KmiRL3CsQI=}f>TQ(A4e=*nTZ(x)p$)z#J6`&(6Vw&~|D`RSpZ^3Y&++GV@G z@x0oy;PS^3CUU6aKAX4*wPYD4m`~!})WJJ68}{R^q3Fkfol&c-py zQIB)`t2m68n-{^LSRW!3Yxcepfud6&eHV_V=?Xjjd zx4bqkhIS^)?ATYXW;37>v6JO?7nnS1^FpKz)2=o!U*87WV)TyxDkf%10wx6Vb zgP@aLsFYy>t1Qvlz-Q@!*kW1(<1au)-BrkQa>7Ugy$b5k6H-e#DCva!qm$wGJ=zHs zvi$1AGgLRv?&_H3sp;a_^IIhXH4!{PKYf_zcS~J6oznjhbAtP+APjZ&OBkDv?NQL> ztnsj~H4Rj3WZU7dXbmk)diE567`+O}0Kv5jsm|*`4iD{W(9J_-Y;Yg)4UEcs;t&(@ z&xY~mwP|g2$3j2rp~!oM)kMLH;l8X23)_tP4w&E14^2%qYw7!VoPk)vXu1q<%A}ri z-y!jle%1lO5>iDO&L;sc5Yp?b>kt2}YuVVw>klzDG{s@uoGq|K=dxB`$Nut)Xc_&jR%?l2y{360D!o~(eNh=4 z@!f6*sA$$?WmcL|Nir*2J};FOGw&L>QxD z6h!&X88~~FY5BKay^Za+gyw=^&9upSh0GbbY$Dt`ql+S)`sC8yir<5JTK@*+(|7yk z9hV9>O38lDKBQ@QVL7ssa6_xlfGk>XI!Bq%d{V~*voH=CD+9@-{3FHx$9>d16O+hV z96`F!!2~<*D|}ySpb~3EU9gi=L;9JeTyiAtIKKm)q|)3#F=Tu){g+0c3j7Wy9dchI z(LoV7c3(Uu$Qlm_c#w_>Rf)>tsEe<4PFQaR5$N~H8?Cs07q7+XPllD)%VAYXn$q{(E`C=B|D9{`v%_^V%l$Q;SWQ=?IdiBw^J?6_ zSaElE&kEbL^X0^6H`T0(8s6)oifrVawVa%BP0eBU&i!~RrKxiL9=bvTSY?soe`Oi6 zh}GG=S*bjDbgA(z6aIFd%ilrw=dV;m5R}D@G->=x%O4&b$yvb-{}7#_^v?vs_21$- zho!ui54m^K07zSa-u}Y71c)MVu%6U*X)2y4G|7174M+{y< zCqaW@)MGmus7Pj1K4lcB2!b8nksl;p3qYDxjqMo>Gxtgp{d(hp2URM2pG&eBls)Mf zgrHTyPJ!JrZne!cd%M1NFjal2IsP;Su&Yr@X>T zL4ap+$X2JiW{f5&xcLb1G$MHMB+}am#-yI%%1s=IVcLRvV2i!IKf(= zN7!J;hUw-^U%gAGH;$;V=kRGkn&uv?Z(4OdbUw+kAGgr9!+;f5|*uO%wu(&wON)Z%8J^{>Sw4{!iTfYHXLBZ#o{yfC`A6* zMJ60#UfIQU_J|0ceLWAnAl-%~CX6*aP|sep;Xz8y#Cs}Z2jT1Pr-DouoF(#rWdn_; zF)@t|#DgMoI^_&tioEM(kRp{tk(TAu0`e^FZzGks4~E+}e3x#}J4BnBs%@joBzdU4 zlw~{mZCx{yF&5hGiZk`}Pvqcgvl=ALwpx(l8KWm9cwRH5v+nQ#Rw9NcUjC~9oieQz z*-i^>H3*kV#n`+Bb-%<)Gb)e~<2V9*2;hW|h1cuuG27lQ1o*+Wu@Ra0MrEdQNMsp_ zvT3`eM}+vt{DIbRLK@K!5=6<5a@8Tkqu1|NBJaB zgghYD>v{8&8Tb2vw^;TU{J22IW*<%)bwyn>kQYybocKv2+~B z5mu02nOhDMee8`{j^f$}y-Yu@M=r8D+Sf0h7^z!5C07ut5_*l;ZbdY8&)wfxCLCv| zMoD@Bbm{T=&Zs!KUIf2==T1PJ?v+5Hf3c^hI>sU&mH*rqOuUdlUQ%|tKe^_7d;!uPhiGGG?+l*sIL9Dc-9lWKZ7g%IboOwZrY;utg zuHAb7_7(J4sQbY-rAipcTgk~b_q+4=I2-7b9zQc`=rCVa6LwKvm@I(ZRC(pCvWDn5 zQ2X`5ory{kr=-lSS&M&7zBYAo=Jh-Gep9;eAua8qsPZ{djGH5PleS(`2R%t z+0>_3Vyn`*aK9xKBHAh8x^TK7U{mB>+D)W&fxJ@nv#*9V@|839{u67!U9U&lBwrd*2+h`3R|mJSb{wMH_Cc! zdgqWyXLp`RuRUu6AcZQmWDvnh_?6LqmrBNM{BQGg!pC&$8bSGM@@^BLCD8|Ob&2FA z$f?JfE+BclrY3xa z>hM66j@(y6>EP0xq?9s|g1CeU+Qh<5#`%BTjqG{$^eqPz&oMdArLz-P;5sVKt7XCd z61K&TI~-tI9k4L(PxzTiXyi0EYsHPeWN`0M=at{ZEv%mYKTC{My0uYrj|0EeDOC2R zY{&Iob~APR%%kec)5cqc+y_~@u*R65iG4? z=2kYRX*AHEg%)2C`9>A77;A)YU5B)S<7OHB%r11S-PbL400!|TI?acwnyhmLRngK$ zh{oE57E?^u^|bF=U*?Q2=LzcdEp}v3B-a#aLqfv8x8UxL&39G%d=%4P!t^OeJHX@t z1DCR%IqO`z4sqYF07{&Mli&r#0NiT*1Yvf9dBT)Bd_`}H`Pdh;2Pt4N+68Qd)KO^E zT?9Z*umi_El6K#oW~!_Az1mC|du1aE zEnfV*6D~zITREqpm2&6mT4R@0>#pr_4}R0dKnMG|~xmM!N-S!tk3(UUm8!4K_<3U}gZ%5s(#Q zztFDx&l%HI_rVjMR?<;sBe*!ZtR{lQiSbw~aFy+Sl{%MwYDdxs%c?j6ZJ+N_Yv(vV ziS7ktf$Q-`VGK$!cQ48ak+3WwtL&5VLz%Oi7J$CE-xVg{wBTo$8cJ4Fe1 z68=}of1YtpB&5=Bko5SvjX)Za>W^<+u(tRRv24KjGu)_;4L^aA8bt0X%alJ*yhfwI zg4{0d4nO)1y-i1u-5dn=dF4%s7w)6Jnp(FbrX~gJz`yqCZIEqva^=9fmb$gw44HO# z&iWOPHoh?teRLU@zOy##T32p|ExS`c%#aI`^yXhvYEVOI;egNYTue^KRYO+4XHVf4 zPX~0Fkv~3)Ws9qvjQfNrZyn5qK5T{t|6(B5b%dsOKzFqCHW%R_v;R%Pey?x(+Ls+f z0Uu87?}ndDx@*i{7^I_sYt-=@A2GWfyDGp0IES!UTXi+ng z%jjt#B~uPX0L`tU35|)1fj;h%vsD^g4vlNPFW9v!)=-Ii9_epO*kr?}r$Dg`j6h%@ zE?E4cP7d+=n{uC0+!6=j&w`8|%=dr@p!x)yr`|a@GDZ16?#Xr0T%a_vNENl+u26EXWx-HOz82HWQP4S0?4`Ki4#G(Qex^7eq8bj z((8}q&0kl6mLjLG#B#2oxlUA*f%HoV5Qzz5*bo3SqWdDCif({FDq$HcBWFeX^aH|< z3Sfhr_>w)^88pO{9)DL65&uvjW(x_A@SGM3eIMIv0LW(U{jRQ8XEzw zo91~`G!*W3r-(dG_3EbKNqdvEnTu z=jd3E3oQYKXY1L4?6%gAE_u={-8=4#?=d@!2mSYFvrA=RzZ}Mj;S-21a7~81bh}&d1HHXaUmht!e zSRv11+tOn%CNoylvPG=ED?xbB2-XSkJN<3bzwdWoawnE{~ z>UwkpnZ|8_^(=zb+`8SpRAtO1uTXDos)Kh-2 zGu`n^`$Glf;*#bU&5}Kni(QK*Ks}k;ty);Mqn(sn&iBDs<_!#&C)^<9(^rh^6k9=x z{bTq{yg#!@r7ZB+k?B;2H43MXkx2MfKdRJJ>VcvR3O+nJ+f0;^#6JwfH{##ri(iK6 zYgNnHXMCXQV;9%+lH&|QsxX_ZeE`6D5%ol&FJ_ao_z0Mr9X6m={Y!@*e?i80~7WXJw;E za%+EqHR@M_aDr&RNwv^dr~-(d9>9g39fvA#%^mzeOy1dJa*nR#F`SEg8)P>c9txl& za2|zSX%uMI7LGJqzrURsL%sRO%o*OQlx$}-Kh0F2%&h{URL?ANMYf#5jCZW7TKz_z#)?^RA5^Ck33}OPQ4-lZcIMBHire;O`6w{ks1SHG2Nx?xJ8-Y5_euA zpOH^f0ve2wM!h-=gj7&lHywCYQi{2EAaizHK-(BA%I?y^;8P)zi-h3hniub&9p{U0s)?|zQ_CPTwtNvEHgnS zwJkQoxlNSbkbo*ncGI%Blswgfz^FxNLO@>@_kadRG{<5-=^^FfpjCtpv?Z4rc@cG6 zA+?9Uyz|wjSF=&Pis)w+sEljs$>Ztm`*$exr^ z{quRb-O()BzS`?x9T(g46 zC$m&CRum1Fpr&Q(v%T(4M5j_qGQz0PlcX8E21hlZS%bPH5yepiDaOw?)fj4$K#AjG zA>Fx?Ar}>6G_40APA+yqN)*1Fq578cs{Qnic-5$}9gAGu5rkd9CKGkn0#7pdrY56^ zL}EFQ8aPqigT4;`)KtX7b)3Uv4bSJjSPOtfF6o(MdI^7ks374{Om-UnHc10R2Im~% z-Ff#nAJd$+XNPl>iLrSUlbi@Bd94R&5r#H%>pM72@{N}xVu*rZrzdaDb}6K(H$Pf6 z4fXB(6Dynrxt`j;^V32>2d&aP5e$HGSG{is52l50ykt0RtivZQSn*$R(to4;HiWm- zFFfOq!%)l>gE4^X_hhl9=&!9ltL$B2%^6mxq?$-6d{Onm!TO<8m@O)E8_QwASV~ znGLpSQB zn##K;I!?LDA?3*Mmx@bQ$a#sLuo+BW2xjD>`@x2oY~H=C2!N|+Z5r+Q!5sy)4)fr} zi)Hs2c}=$NqCJn5G}3mYUF#Q*>eT`B`YfdY{(U3L%r~h?XRci3Y}ThvLScPNJ6NCV zD`|)1+QMI5TCYypwGiW}L=t>5dT8#Gic96A0fcA{BR_74v*0$boZi+#?Hy!ScCd$E zIrfiw@x4MxA6?CgR+JDkRvSV=zwjn}$g%+aTEY zyy6Oc9H`RB5k=TC9lNDi!8&Og1yFlSBMVDTpwV8B^1*6Msz}06SSiVMgOUO6_DLq^ znLky&XP^N}{~3c`(HoPNE#g#mN+_h%l41DSJrgL#dys@&qo)6GAXJbZvdX&i5p~$1vv%u9-LCyZqtxa2GxMjHhb-X9@{Q1Q>8_Mnu;Q+= zdIl+MP*hVDPfD}Nwqxazy;q(H{HZcJdm+791l@rzFa)mFI(%3xX$uQ|A0C)>9ovvk z8<%c~J;|B(IOQMOnfsH5mXcauD4!Fe@k31Hr-0FYs~oyfsD<#=%aW( z#D{a;kO4C}%c&fzDQc%YROW3|9%?yvl%%SfMklAu)< zl%fBud_31yV!VU_J&;2DnK=liN4XUO`J!=XBL#=pf}AGLrJ;> zp&eams(tIWYtT_=jbO;?`YzlXyh~P`mEVJ(=i%cg&UJGB)3k4!b+f1>8dVhACSYap z&T%LVhKi7Nm=Gm<2juhdwcx$guF^j3gXI9P{2I`$f>6bVThaabEu+2PELlTgOG6l z1=k$=I#LeW@QpEY**745Y#>s1%_eF$H}q#Qlw$ff^Gb`toBi7-bm*qN%;AUh!?9pG zO~_>AX|WpAY>O38?W&;Hw&rA)UxO8XhEUh%^Bd6tPbi*nz^*-qr-Axbj&lH-1hwc0 zhXg{6UL-2@*q<#T2Bt$_QiFwfDiGGDf7+KwN7NjGEhb4@_44U!~|< zc~VHmaN(@L*M@w}-;d?(XYIA=X}LMOf5++g?izh zL4gJ+u}w`{Og&XcDG)>g*0-DBMQ@>$a(;$PKg>_0=BfMOY%o;+MvWiGRY5+_BuvD) zfF${(-!A90!%boRvPmtIeSLvwI=5GOvr4AFVCJ5`>Wr^Sx`ME}|NMM|Q@Pvo?&w?g zGqVl&+CuUliwYy}mQOL&Y}R0yknersdsvhAr-6x9D4Cerr@lcO_swmqiw3{&C@u|1 zIJ@^G9C}iRwF9uLIR%D=K|R~vb_%@Co4-!0>zlT^Kdt$-u7ovcEyvuHt+>$W&%^l2 zZXYC7*60&W+*VdsPFC76oOSGm3UAq)WRZ#092LM5sz&drv*oi$%zj6|I(gWyWCbZEm~}D52PzpMgns?Z3p+q- z&!`h=JZYiKxnPY!;~ti67QmlqwYQ%j`n(PcyKtMFCOgN|od410_;x42YyCMd@Isk$ zi>G;!9J#&d`c*K>3ChR+>N1hAOM-#z>xvQX-m^|&Jml%ma{){EFz1!uzwka$@5~BC z;Xwq{3MxJDCN^R{9?&#+^YG1OWgm9MC!|65+ka*SX9+f%D04k;9#xya)`+&qiWkb4 zBVXbCp$|oha5DR`{$BbYU|85KI>Ou6KI6yi+KPVf^!T46*M;;@u?fY zF$2};Sv(B6TD=?SDfnSd&TmFsth-D&UpWqa1Ih^>0Sz&o!wjh8-(HSbp5^UB*{HwWZwvB_tSjs3fsIjH^ptSh?p|AT% zzsJ-0d9?SAr&?}cC)j%_iCQSt{2BO}wjF-N{fGGga;!7AqOVj> zFj`uRYO(%ancGL_fqmi~h3-sOU~`Mp_?(w7KN0RXiMH(?ue6%w_br9!)jP&w9UA?+-DTtzgN6A-#q1A|qwxjp{^6-!lVKh~6cyX_ z@UL{3p=|qIn$0(I(JgcR65p z6;yy?acYATv{5ckQ`2)zwxwAVVX&AVb0TB6r@XTbSf05lVIMu47ni%2M`raJ40chx z+w-4>otx)+Jw7iEAEy{B2+r+t4YY35?%s)fv7b@h(1q5-AC+)>!3C^5>Vn6r-DE)v_<>8#rOZ*yjU|A(U4PXkwH=f zWm|x4(~RDN>qBPjgS8j;l|H+y_kR>iar^=`|v{U6|Z94MBrowM1I2c&s}|LNX+8r4U&?9H|Bt{IJG<>#0>^MIN@N~BMzM>w_aUH~}G zi2OhIm!>Z+B9F$|Ugj_k8P7T_1uoVvl_RAEL}v`q^%Q5E>H!z*VS|RcOAm#%0DSRf zi9ak!1bfvp1p>A~nN;kRYtnxBQA(wVW=DxT82$MnhPe*_EMG7c0o-ue;=MPIrw?GC z3u?^VKZpP+&WialX>cKY^)l~0;QwRlEZE|Tnsf^^?(QBOf=lD>!7aE;@Zb)?gS)%C zyGw8n5Zv8e8omA9nYnjpOs z*5-lm`}@o&o^aJfg#p)oSbK@O1~uc3-`}QY$O?84s&8v$0~n$qb6XqyKOxe_Wmm^Qz>sJ z83N!TYC)Bu$giIt>VDmHyCLtT2-JfCv7=wuGM}A;!K==wj=~p9q>?S{tN!CaIkgib9g6c|-WpQ-}wSPnV#>oTz4aWxtFJG(o(-`;424}|%) z)6~}G5F5u7nf++^r*6}ejkDYaL#YLodz|3@m}LzI+m}5BrhbcpLBNO%BE;1SqyE`g z7cqzz*&u9urPXgl@~0Jm&_tNB@t#YKUWl%~s63FSxS+z|YJuT!>s5=Ejsf=|XtY`ol*jr<~1VbgtY&qht`Q2vY-^gHi;%-*3o0 z(61*%SqOUmWRq_b;UA^JWK=R?Vt+Sp9557*>yT%LEMu>E2cD538`*$q5oWPrUsl_1 z-DBV&yK@1eSTS#1IVOWiLH8ch5&$=}q*FRHyUwc6&=p=3$B5yW>4tNr372%sjsySk zhVR6WuTv%}UHVM8JZBFtp*ilIEA|`{Wz&)6_Qp8pJ@Q(&iK?c>yTotL;kn8TWo$MR zbk@=&u$5wsgBOFXRIPrIJ2);>PV^bFE%V!ChZ-1hUs0;GJiv4l0N}QZf~Y3e&aPbzappRfX8!4z(>JWV=^C4Vbjv(WE`+s3b-H8R z&_mecjz1rL@bI(TZ5h2;H&Lzp;h?-8Wu`EFWAY-@hYmD#H)!nNvvoDye$%7%+uCrh z>m`AQF!I}D-)ZZSxkhzGg~6h0w^tAzdwyp_0BSjUqwiq8H<23R7jL8>gEJGP|HX@U4{*e%ISg4-8hoPXK1>A7E<*i=^18MbzhA>I=j-cUiC_E$Wf zwTr`0p~H)O0ju-sof2ijNDBw&oW})lndj$=}*S^Q}cB1`|0mDnfO-DBJkg6}XQNrMLzI@(W*n8NrlQRB?yB z?*uv#6)r4sZ+L2e?LgAKRcEIf^PDVeHNLX#Q%JY{M@d^%6AhO(awJb2*0eP|oJsun zcpiE3D&q01HpsQxzY=`aUtEfMY24}rL-2pUD#_W3lFZa?Zop0Z>mg5~hzpJ>6~8IU z%9F9c2M>beB*U8@II*B>NqooySS9%G;e(1q7LV>rGp3uZPfL~s&)Y8V9!%QbtQ!q# znVB;K&xHPQXql*$A0~o}yuz0;q>UsL60X- z3N3`x1LlY~8=b6i6e841n3n`DP!=XA_COLb7k(~6p8BB7jLhPvrM>Yen{J>{8+0*M zoFlnme?T>loXt~JnBS*!bf9|gjU7@FyCx4G?!7Ouoa(Yjm4~F?6gEpJ#+MA;P;(-T z1r+CCH8K|)`5MFm$D%*PY4O97MK>gR`78soOqt>6Z-GV-Nr?HxLL^P@U+=eQ3X`zq!zK$1r5LZpTOHxe%LVtw+*G=In3MHD!eh?MUZr*;aq}@8>P;z5|WwpgyWf6x3 z+kj;JR8s$+7C4@^cu#C$S!-?2Y@4Dm&qqxk#|!;p))G%@jIOatAuP=mL8QQu_hl@8 zD+}9`AnFJ9C@d8YJA2`hdV!*O(9^13f9dt79!531&@ z6Ofar?WJEt%EG_b-PWS|tYcOje@i!6qeb|92@F_jYP=^5zg322wr4(qAxk`kHT_vv zmz7)6n*tkt*jnZjhvB0Z65PYJ@6_}mJ%q#}ZGB{cz=9m}9s<_CoH42tCuNxJRs$&_ z>it$QBZ&MvJc7+jtR>&_7gVe?)G>~eFtBm<1M4a`x=RTF+3{`?wmL)B-nt!YZu^WN zfz*SYNr)oNvqkNBrdxp}&aK;t_Bk`@wSN1z;i(gihS;Hg5&|0_H*9PL6p#+y_*^eY z*lyhO8Gol2O%4^GXE2li(hLfodR0-^A)K8sw+xx%n>$N5Y3PL*tD{1vE*k}UMUPF0msJ~h)_Q3v&e@Ag!qElK%=4I`LlxNhKc8!%{cLC@jBI{g3IJX5! z^QO=dtg?#{zmtY;qZNV0eBOR_g$GkTQD;U>CTa42R;b9i{Qv|6y@G0Gvh?-Sghc=> zI(7KTh=AhV?Y|%p8X)P8-sW5VQx>54#c!C@5*-ww4+k3_kX^Kz1jA;UqzfUj8KNm$ z0Vi9pd#*D{$HL5c4?&-O0;ahXQY`VbQw&)$hf+Cfd-IztRWzs%#J(!MH)prn%t0`u zzY3p%Uh%A@WpwK;LEu(6p!i!XenApeVl~XRJ_9<*29PK!BzU*b|Hd+Hg$4)T1`bP< z0q4)+@kk#)WZY5`GE0+VyJ&(-9jruHZCO#?^zSz+E3vQnv2M7e@m|1GiKn5R@q($( z!;;Z+*;H)T->!g`Fo0Z@pe`JugGrFT&Lx)P6Gs@)hBhL<^!NFhLC0W%eB60AH&VJ* z1YYp8+!QT0>Fc1W1DxOW4&f0Sd6#1kDin@L7K&T7f6^ZTW4eD&`1G3b4Sti?6|6E5 zAAZw$;)DR0=wDODy}jR=Q`a6Wx(kRs>%UjKY$ZBu{=UoTzW%M?qyvU=UTMxhK7>lB z9ttWdCTi?Xglfi~z6OY@LXc{|CYh1af`U?=#n82<>121#_b~zs-g;koN1yeolQ(QQ z$;&AnA_~fIB@MpfN=nwB{PNR9Ku#EM;G4N?sW`(IIn{qH%SuqMmg-Y?{;cCq((uje zsxFYG$Nd*way60|j+hjv|Mh__q_kaUjQ$~hBtYxKn>ZCpS|wJ@d_e%@zMqF8V?DJO zb^B}L-F}IIj6AtmN2KAnIQRUhK>%5MB;13p%IHld2yqn8g7PPRw&y(psbpy>6_>;^ z;&7gC9AoGhHafHIC=W3v!L>gPs@F1Nm7yfH&ihwyEa_V&7X@9O1v~wMgbsmz; zN%Io~_5+%%*|c za`w+VqqXMRj+R%jC~IlGOLg%<(vvP7<+*{WGP0wh{g=P!%-3U2-m8;`c0xicQAfJnjb{c=`jJOrC z`7P@DP`4%g-ble5h+UV3vkoG&2!` zuz1&L(j=-dMn1r;i*_prOdtRPCExR)}pz>gY8OP+vPW-&nBJD1LJ7<~$Pk64qYLP*gwncY@es zp`N&2X);Fd_0;q}-OJ4-cpPCF%g%(=Ux*Jr&=sEI2ttg8oewW;;yOPgNta%fYsLrv ziGo&`7S_!xwIu&!9o+_IMAnF{ke{bU2*ySNU;iq1lsnqyQ_MRUgmZSgyXM1DsOA;z6sJJ{Z+sa>`0^&50XA>n6;Vc@kpRFWeiUH*(J z<~)S>7t{`L7>0*jv7l#e(c?VHAcq4}Z#zDVGCWp{hmdnazV8G99g6FA4=zI4nCfib z9QFa&RPYPx()ZSiXmJ)e<))z463Q)dWrHAOA(Bq`u6dl*# zTaP0o){_F&u*ip6Q%LvQOX>w-zihwOxw$#D~g{Ozl>WTIx+n z)0QUx=VU(M*4Yh^{i=bMz%Hk{pU6z7$^V6{nWmH{plBCH9Xrk}>b5IH52{g4^|m{< zz|NR^1}^+)O;Cox;jYVS?B?IZL4jUPN|?aJbski=Lz+YC^GS0G^A=|x|c!2mDiAXF~i)9jc1boyykIVjXv5nj?}QG9L7T_))U7)a4)k9zXo$GKEmKxAn1p|AM3>lEyqOcGUE@uxwT ziZ+#0Fj5MLjYU++=YZDGeuZsNXHI%bs0&(>^B+qpZ<)8FI1*j0n8jR3lVqccPd zD!c%~7j@govwGpm*tsj(|KE#otCBLImO*X=%h}pwGg}zXufdef6GkVQi`@uQ-;suX zcCxu=&lZ`7zto9eXbHwJWDRz(T`fMH=tL|TUX`ug@*<^#_*y_4p_pqYxs~A|*SYs8 zm8o;wx#fe7sbsp%ZI(!Kzoiu#BV!9GdzszfJ-?)SlOV=90z8FCgTz9)*{Yn9CIl|{Qn>;P`{=GhoLwz85|Ok+EChto za&9oNDis2p@`%#%pk)Ph#X1n@GF|7Eo36BL)#cn=A+iocn6xP8d1HC2|_Ynp4Y zM4K2cRvot^?SWS}{M+Pq)kdI1xh{r<*!G3_JoD;PByKlnb(9gega5gTh7Vyd!h=V} z9SAY8k#Ywi6Zlo2TEJOi$>H#RU{Wo_+TeNxc&A^~OPm8~YTA|o@#BvQ!69xV&Dd45 z5$g}QI$Isr5>YN3h(bv%zWJ2~or;pHn!}RQ?q`tRdZprhB$>wE!_Dm*ut!P*5)I&86!W4Dt#+3jWtqxDfIrlG!Fkwf57e!~&Bq>79-_+H?> z_TYjfk_4!ip{B=~L>nW0GRGtziZ(|&Uwp2a^jV$rS#|XUk(v-5|NIKS zxjXEm!-TLW1K85{s#K?-=KRf+%&|PHvfQnu534!m*8q#Jmxy9ORx&NrLte;P9cn=~ zBI#QK^qNYK%-l!`>>q7gd{-?lQ|Q5eKf3UKtFg9`n)qVmXEa@l(p8`vlK>9RsYOz1 zc-^C6i!`2Nnl|F$SfydFG3+wj9~Zw){&{M*k8qT4Qd+(_$oo#f1iQ;{bEs-d2if}=&k zyp;>o?6UsPuEbP;tg924R>X67d>zbpkyCwZpA9N{Y0F90whIi5U;RVR6|2nknzZ_4 zO2tt#eF}&+hUic*X%iCOS*B?vr`Js2E9?TV-#r_jcIHnOQ5{okPf$%h&Sr|0UL368 zm<*hYDcUQ8E*_E{!+28xT3@l3o&E z)@vorjTQDr+k4ZP&PeuE>jK_OBAhn#+T-{MdE=uAr_L~4D!9$e<-b&G|CeSYGm`sk za(p2BxSSb4-#Y(Xb$IUuO|8}9x@zFstvQ*ttoXg{%^3D{w|ja5i48PerxciK+tQ2A zwf%UitW|lMu5c8&!vpPXj`;}IIIpQr_0wwsF;k2wyueLaY;2goHHtoZoe7lFQR_f< z#N{nXQ?+GRcqa-~n5`#H)SRCxQ(EIr(F5Y|L?sLx=u|&Z(b=X>V#uUx{dHZOqq~0r zt8?4n>h1!Jj`0^~Tlgpb%5Q~?XDh+H8_Vh{pm7qqR5rdiE~kdQzUqU9@Ej`quhBkK z+Bd7T?z2d`t)*wiv_q^rd9`{!jy}bR_MJj6ZiSzpJ~#H6HnkW8yJu~GP;A2BswsN= z#c09^D+NM?vG1Ity`5Ip(nw!Vux#}ieok#oqZQw@!!*oDGF@I?YBlL=X}Br~J(#Pt z)@HQ$+G!NyMz3TP{8)tnZNLbo5a}iTTR%3c5xrnuR56eYX`VAJ{o)cWlxX)21Q`Sf zp-AAk9${!GqD}G(t%-wISt~y}V>+9+}+nVjDV!`J~Vw)0Y5)Q@TmHl-lzL*tuuR%c39wYWOy1OZ%dN-hZxU9b7yInPRyYf^?$=X|B;q5K2ci|Wx@2&ndbs-hh~@Lz z%UTgm>r1;$w`ep-?tN(eFu;MfIK=vR@CPXgp5&EmPMeMOWIi5FWtQQ+&wSi{Qi6Yg zTX@ZuwYW^a(s$|*nd80Fr=pO3*bb|&A!;)qz^TA3|ol~CzhasG59IrLIGA z%|Pq@s^&NVwvpzUeLW{X|p3K?o`MZ;T>eTLFni|>jnNn?F33F<)y(*%y719%P0 zrb@w8I35id5hudlxt4hD(D2-H8*GbJqxCgvEGs}tz5^l=z~h;-RLa5tOK~EfB#STT zMsdH8e@K-0GY7)y-`#HCfe^c-w;VVeS-9%Afu2!j=WdRB6Io$$8?z2wAV9pk z*gT{hm1|@46=SyQcqg((?X_gEq=L}HV;N`29DnF zJKnGw=fB}%*LeVQ+u0c=+##Hlo1*d1P?L%DI0TEWLFA~u>581QBPP_B*FsxMOMF&k z?7V;f63a73qWhkJ5|mfuh3O$DtCq_mq=bxpAA=b_Yza?{l*K)ty<7xUI*BHIm^juc ziDSL)iyd9+H#nsJK;x5gOG@Q0vzJ#0YVT_6_Ac8lHJqJQ>7ct+NHNc(WJxg_8UlH` z{dDU;6LDSRtP)xLZWChoR%UHT;$~bmKf-IFu@sJuPXR^S78+kKytN&;`TX=t!URoP z#}EKB_DmaXJ5M2gPh2-<^A~aWo=x+>wAPbPnE@JZ3S!)3Teiui8vhG7O7fBy8J>ol zG7!lp>+>IemNBv7yoaZv_NR##(dBJ&A0|SC*i5smB4k?3s+qHehHPl`?|?=yHF@wj z!O|<>-R93a-jbA?Rlj>NbumTTxpA2&?&+ZrtIt*r01cW;-u>8UlAPfEwp1yN(2sdV zW%TVqV_yJcc5RG+$cRcDsSu=GFiBL&UGrF?iT;5Y6Uo}q>{n`wr2=wI^bt7t1+NmA zaliR&NO29fqb6QlV+xA-Ad8z+Xv&9uq za)Bv*LH04YwZ~%;upnj6)9TK0c}XU;AxLb%rd&~Btx3JrkU(}I3eFrM-vIO((@l#> ztP#X7iZY`GOm=!}Vb2aX=rK^GHsw8of#`@2pbvIvwEmmF#6n!g>GU6e`tGfz-s$Q7 zdKdxW``p=vv3{H9CPv+s&|h~(nr!~-u|QY&p`$mu-sQP`#qzMkgCo4RvlDmrt@f6Q zF+vhD^MSDUvs)?a^XyO0f0+uDvOfk9`43=0gM&R*s}{dW>_@{IC@(-Cj=;j`May)f z{NuBqYFo${!nEBq&_gXM#UzC;&IGNBgGXQi2FK&%q+LB z2Tngb^_U$_*kS~=%exv}3mv*FnpUcGUOnMnL9O@Y4q#4C6NhQ^hK2t;r9#rq4^B~o zQasrz3)d?LxQ7^VQHsv}tJm+KNA)vKJ8(Y9{}B6$Dc$GVxK;S`^eoe&p6)qqOUFcIZf5l=MGs;SydY6%p`(;>fCsSCNG_JzC@Wf2fA=Pm6Xl&Iv%$ zSqRSpq?$9;#=Elr|n+u>Cxv2IKA_syJJ*(INm31t7Ll>$D{mJJ@0D*YG0_(wjBI+(g znp5G>--8nH@H5P2_2`2%K()(d9?sk6!5S%P{KbYq8qR@S^_W~0cl6oT8CF+4pdOaL z#$6YO{v3UnEd75pG%Kds>fmF)(ES-(i{cR>a+n~;)1}*MzPOy0KRJJCq>DLX@ydla(& z5oRz`3eCGy3{j37&4fmY5~3QK55crnX7Jj2#V~+06QM54Gq;403MgAF1X_$__AUA# zVnYf(GhO!hEIxzKK@=bD2S+Ye5UR^9`iJfO}!vQfQMx{WET;Jl=#2jiQ%Fl z*Q&*O84k%=%s;i zng3FJ_;{ri9MYY~{!1$IcB7gsYog5OZbrz@U92kU%+ci#a?JY-R5*NU2~_joAsDCrZV(o`Ntg0XWl*@pn>p>{;3xfLWaSn%#Is&jMO!4AI+)J9T(`s)KEzkXXBd28k!oOtG~Sb{yi|0fpunXd17DtLQM6^x_MXUi85Te z#bzOhcApP3tjwBSNuSA6mUd(uc>qyf=boq#)KB$GSx)-Fr`S83f#Eq3sTq+D*j_t- z+4^=Ip|`qgzfYZY;q?~2X$C%<_}72vjS(kKoQ z=02^_6*C6lyggL=j=RWUlt^459XYj8*T~rjg%u!Gt(hH~gv&X;N{806?upy#sA4RU zxZzkP zWY2P8bxgh z0LL+>5{cDr_-c>sc>YlRF8r-PJPO+7 zV}Q|z$^gGNQOoH~T3!o}9gaV?0^>rpS~<;RYYYjI&J!&eqEd&rCe9%ix0R{0uGf*o z6fG)NNNc_AfrQHrhT3n==jUK5$!2b=g(u$w)o$OUhF`hWajCK#`UvXbEP>^JxO zLrn}lX|$0@UTaV1y!_I#ouGgC1H~B_B>j71qt7nN}`YY_w$Whnl7B{K=8qoPjlmqdXW!&31e5j z8LYD4imc#cnTW`2GmQ85RWJqNfc~W_tm$z8lSNhUyHEsMtU8&`RuyB(=^=7=SRgD- zmfOhSq{nj4!cpKfemklnJYyw8h(j6u!Q&&Rl9Ez*ZWehifS-6bLq>o>D%UB6noY?_ z_n>45RO(Elj9%l|M%@w?-}e=x@$USKm{8zk)n-9m-VEgwv{&7oWag1!bM|FYN%KGvFD})#yXL=g}LhK$Oyc2frfIc~m z#l>_)N*wv<*k$ZAw!3f0b%s@vO7+b*_DKq8r+@?~N6^Hwz?PuaKC|UkI${&CHs`T$?cF_m-SGRo{n*)A%z1SAq@Kz~<`gRJ&7CLq-gMnaIs8jKRZK8b zjBTl!R_^8KCqu)VF5XZ3`H)+bi4s+j^5xIGR>G|?(JpYDfKRC8Klf-eLZfu)MJNVde_5kD@751Os$`%4zH#{oTC{HVX{=u3@M6iB zUXqW1oC0e2HXaAPJLW8%CH?J^E@h|n;(oEde<8>kY=Ma!5$Qzl+_a29L}vg&FV$fi zigOKLe0hUeC?nIKR z$TD8B`WN4jV+Zv>uw5l#ci5Sjt2V1Q;Qo>wG#Wr-h#{Gs z-8_a;?nvKAyJ=qf5mC;~j?OTWeeFjPCEmiL>Sl{L_34>kGIXhjjaL<|d@-B=j1w*#)Vx_8#u(dx=N!1AH?rv}8l~@CqGLmx64WW#M(obf5cKd=7R6REhUXH& z0Um(&E!uQe1;0^p`eeH71VOUQY0wwk@npBjzJHM9h=K>?#W9r!S)>dCK|wHE^=-T1 z2}x18(&*TH&92UqFU;6Yf834prCG6}cFnsp7V6@tB6p9SYYV~#9vC^;w@26@+$68j z_j$HtVG8ID=D&a;i#&zrOuHDGb9j06VvXA&R@;%r4ERmKyCUL@~>`@OV2`w%!)W? zZbDNs2hVlmxW*vgOg8fZ4H30pFb7 z_P-ifF8Z|gGPApoHT>%)_Ng@mX9c0h`IaC4A|DUlC?FHUr%z#Thg%zkmIJs8oT(H0 zPpViwPz z-~U}@EZMEqk0(*cvW;rqC52EFQ?^mj|K!!9YLd_5^IX}fGU7;(hMpyi9ZV5SDEn-J z6l*4hnX5bOnKG7bSHO5@^NmF^A(Rcb}X-)r)-SvZz1ue_5Zj$M=_BDSI?f4lp zr*X_FQ-wRa;7kG56CeO?J`_@PoI-w#4{iXT-(QqfQ4e#e*CF`Q7Fleuz1#vAnif$Z zBTHDKUiMIahwsI9_SE+x3_)A+`XM5DO1ZL+FM=7NLN}{u7rThOETkNNn@js@Zhn8* zUlx%BC~Fcz++zr=X+Q>!Ykk=jdAcCSkDUAUV^-Sq@1Fv5z-gd$7XRh0lIb1$#O;(x zd(rYHzR`Z1!*W}7*eZ?u!DMHa>@SZ%rco6u{(#st*t7vsI+6SEJM6%ZiR3Ug8RQ z&4dPL*`$&N!)g8ScETK`Fw)7uXviIzdBW87h@MlBOx%F%oVN>Y0Ob}DNQwcyIbmM` z{!i|@XMHQmQYj&S@LKZ2wv3Kgs|#Xsh1g%lNd*dWAZ8Z&SaRQ%cfYG-Jzj-F%3#G9 zp864XlSN2j}px{arUR-oWyl;)u&SI#`cG%}MMJ&q#7gjueqgBvz4y1x|Rj@6&mD94{QL ztB2yzkb>*j_alPyG%8nD&zqOW$Ivm^l&F_%dfAPQb(g^>Hd*UZSZey5v(ha5(+&nY z!^4-_Gon3_H1Et%n1sb>+WYpyBj#A8_`$ImQ>v8RI)6R)zuy_by9aK`6--eZHiTyG zqGAY`%f!C3$yPAQPsfrE)q$h8)w0y|zv*XSap)bar=5wm`CN%bPgS|BaD6VJ#a2IodVXitZN36 zxI#{7y>#`9%nvqEcsWEGe@<|CrO}D=3@aDGgGbadC6WGKebgn*hw&g7 zJ86S&=es+JmYgP^Z`>LQf*yH^Cgs-!VxxFl7_P+O7-)LVQyalr=waobDZ@VCPdgL5 zB)_iO7bt}NE<29AXCQ4&KJnO#p}nQd8Cy=+LEeovw-vhyZRT%&Zoc!f;&FgE@&Ek4 zU`ofRLElw}UW!3B-Tpl3h(qL{;_QCD=1i#cpw`9isTS-}8M3BT*r+-!F%}U9G-&y* z3Ruabz2holdIg712GglD3(|Jmo)IW)pY z|1-(Rk(KE?_w1JDJz~KvUl4sON>^vlbYVNZ3L^Pa_*InQ7yMr6)wQ@0*{PCJ z@W31@S%vgNiMm&w!It+htqjF(y|}9^(YFM#?$f58BDOb)#;wJD_pvAT@JyeZ(A1VQ z?kcJwIcsqGwyBM~PN|;8N$l8Eeg6rcvn?>b4#E*yX#254b>!N$-ZG&7DduhvO7GO@ zi}B%vJ`S7&=p2e<;g(IZoVS_S$CihQ1vY>m2FIJB+@#{mF-Qhm+Qzu5iYi13Mv|oy zwPTZq!7W%aAUMQTMcu>=+#%Rxq{!PzjgU-pb@6wl-;)Eo2iE=hA z0*D5>+IP-Pvoe@^(&SW$AZ2J51Od5_4Abomq|LF1D-q$`8=TzhuN|#pGg;%w+utbj z=kp0^1LFqCh*uWy8Dhs_s}ei}>6*UPAZLSqgulSM^UIaE47BEsCkJ}JjlN%b8z9U! zG@(H#v5hA`^tF=9`=UP`@^k9;I)2luBn|h^$8ezc6QW*v@|@@F)#G_UtL*qNhvjtA zdd_bLFdpGzn@0Xl=Mdet6Jmz_-8QrDk-+9W(DUr~PNl+tFRnR_K@tPOKx}Si&f=*} zn72%OR9>@;5H@LA4^#xmMKNrTc^kWweQ8&Erj#a-df9RMkDy7^qJD8sFL{2KDU)jb#7tPmt~zZFWM;Z0;}Uz4%VS7)4NA`?dJoH?mD zS5L`JMGTeV_wo5m21-w;Z}}h@P;* z{?e4I#^QRo<;s#WceI!#Uf6Qz4o5~~~Z@U06qM*Kn z7rjzG&;oe5B9)IzSVSNiYY5~ZP(jt+sOcN|L+(-09Xm?;hLV%bAZnH~{tg&AQay{= zZ8Q*{KoC4;ns~$>A$l1H+a1>v9DEdxqB0MkGAf1zxTHlHF=3Lb z0@=v4WH61&d^de9$-nAg_&Xr}kn7GE(&DIxt?c@cQ)Ab;;Ufzg3=@V@EsAP#@m{SaSbc-u-GTDxfZVo`z-&Ql6e(HRfF^Tjm+}Yfi%}d!O%wKhF;;wPxHqHyXt?1gk3} zk`;1hEtxY)zMec3Ty!*WowZ1m6JymD2laZqzw6IHHpW0hzbsQ2f=)W2r-1<`D`^M8 zUV+Y-UwpOn>Ri653|x@&awCbS6~akcsP@@>70!YVp!1l+OXbt=&(@4l?N82plxMgN z3~+uTIhjf7@4J>x^D0SaA!7EH>Yc17dP%9@#{R0Xw&=QX`J;-o1#K;3*z3;H|MAF9 zWgp2)OO|YM%CenWJ^rlsF7q?nxOw!z;j{Wv%wJWMu&0~#;*8$M1$8l97G74m1tF6E z>k6RebJg&5Y5Nnr1A^c3Q})AB6e&n|qN4(@bK|3?CX;qur8~b_<<^G(wtFf8pk*k>8XGH*`6Q5)8kH#x#!ZJHcDRd?tKpN-lbz1D%zUe z!^0BGs_^HXuw^wOp|djUNm8*YIN8(tp!+F)1d#Z`M4YghgLk&p48xi zdv9KeGg>-V@1O(sT1wj3(7H|mk+;bWuA)<$xM9=Y9c7VcL&dfHuI$2U!`x1- z{PV5%tyWuqeE4>{=i};g?ej3P!l7sA_oUiAT9^KQEyowq8x@8w$6@yeAJfUX%wSU| z=e5JC4xf3>sf+y8t?uWSGaj(YD7KAv8W(?broczLBsEg<&f%@nzqpuBdaRfZu~G3q zs%@LtYGJ?6&v!s^$uJVA?mzv<2%q7I^>IutX8w)(f$a3&%+DO)$%mjtJ#xe__<%Sy zbbJJMTz4*7KRCL2jzQCFI{hE6-YP1NK-(6@-CY`Y3GVLDxCROC5lR)oqf;Q8{VRN1n#Y9Io*EVaBF*8)W_5F@zrTXwC?1&ws70cU|h`4-&*6FLHbGUI2T&E2MXCs z2n0r*5?;^{_XN9$G9j=UH^F6hR?ufoww{%v3rg5i_Vm zW;rWzl_+~nm0=Rx>{8u}hnExPaoaUf170OVoG0nLgX|*urRW3GmesF#(|N*7HeX(y z(#hJ#+`Q{EP8Fw*<8C7B7zFtt<*(5QWaQ!S@rm#7KdwhCw3dlKM-+*77g~Vjc&2UJ z0m|?9gfAPFl^sn2(D3P6o6Od}$rooR{79G-?samMiLQ3RN00mTQhdTkBiUF&Zl>oOiIeXyM$U3;NEQGG1*| z;#_W>69-daDDKjFvj4!Bk7uofhO9Fo04KaCBixFpk3ue|S;X0Zo$BkxbFf>?CJ~6o zj}|dCHSdRToINq$%RMmaqoSyLR$ z)1LQBeOFQnKTYd_vP!(Q)U>WVW557~e&>rCT`U-ZSkcCU*VC9})sxz4lK zS*9tZyUVyD?d!kJ5s;-1|_1#Pz6z${iguHE}NS+bl)T7JMmv zgf96>hxl$SCBlnnRPDlZ)x#BoD3m1#QiBwvL0S z*IV3ael`EjieuZwW>hA)1HVA2A8{}opv!(OJ19On#CUsj%gD`Si7k4PusDrGQdDJ- z?$Jca9I z*GY$`M(vIoG&|5BKy)SS$~1V25w+%Hy&Ix$h+4tI(UIbBC8u6~ReAxix(ZL1zR0f7^A^(7rp{Ro&ED+_f>_1#8SY?AOo14czP` z{$0usQcN|Fi~KMQy4C>bRFc`93?6~qiw0T5dlcAjsW5ODO@5CwK@NB`;iTo$P+!e8 zt~Cx8ZdDa0d)*<2BnTm=m}Si?!gls7EQzV65{eF|8kXJk{Jn4<89IsoK!muflr6t` z?#ssFPdu3-=6nNwVROHKNTFrcfc;jcyZIGW%WX2f5W`|0|wX z-c7re=ET;Q#~{;Wec8qlm9ob_IzpCYk+?lknt86EW2n|HKd^{8gTO78sLtlM;XFaF zL)IbuzGMFEGtgl9^`_Kj4HudtYw2;T8{P}ue_DgOK}__oA(TbO)z-HU0nCEK+@$Z= zG71>N{mEB9~9vwsLxn%ap*I%}c%q!M(O z(bTmh)%TvsnumkCZZw1(2>>fR_Wmmu=@z;y_~s{zmEGN)zthL1d3+z)9tQ_6n_x>} zf+EfOWD|m^#6dpUD$o(hB{D9tWN|qwXklrI{4N+AhvC0$afscVkb;QJW9Ti~zEkNk z(thLc!=Hk3$YJ+-fAT8nq|H+`fmS$l@=eM&ivdm0sa2`wz<&oB2_P{$xf*?OH!Glm0ewRQFn#k?3vVdvIENed_Rg}YlYaa)^%%0m zJ%}Q#?D^1)RT>a}fc-F_WkGZU7he!wbe!H%pFO36rxDX^2UHhM#PuoQKgiJ5O*3dQ z@Ng@yWV#;LPKh^MRB{E!sRBiOhQ{_w5v4SP2&i0f)vqolA_#D#ou~zz^{Qbr)^sH* zkk<1cRA8-R%6-2hDHN$05kksnu~><0XL;D z9GpFJc(*E__odtPKQUd$$52|C-m2o?!7z9$1Y^-D-2assvwjnI`%Vqc!2UV^yXwGY z@`ldKFr840CtZ;Ay!&MK;`mu5uNmQErFWc$=4yjY7MgKnd&j}nb48LgJoGYxAM7Oz= z-a-)7x;Ep|-sb<^l&IzP9S-YBbq&!KBIrH zIBe=PjXc$B@Jv|E!b8)u8*(2JGxCM-wH;dyW}+e!tPnlVG{%tonVa1?X=XU>xaP&8 zcAO)0%0+Sf`w2cK;itJg*FHM^HFCHn?QiQ@$b+9zBzxuICZ7qY6e4SiR9di32u7tl ze)3R+$^iN#TL;-Wv*K#B&9I~Nk}97vXbs7*#N{h)fCg#QErkb3X-tN{@i(=!?#vaq zC)~UI8rLr>N-Odk86DQ$@NukI>HQ7Ab!sOOo#5pNpvj44WAmypGNJt;ZxSJar3Y(6 zMrBa0A9nfFDb$cuhj2#CoibA3_=)pMY}nghkG4cHOpv62i~f1!ao8LAWsj$71Otwg zyY*Z-RifM-V(9|{TP*eGCc{i%l4u+f@yNQm86%y9Uq~8AU zid%gfuG0LKGbi~WLg)hf1v&yiQ1;?da24!E&4=w(y|*2;WeaiWm1s2ouHPmjT-mus zhW2k#HOTYxUf9IQdub=dpQpM|$XKQ+k>tG0y%TOclczOJ>{CK;Cb|lQCw}N#?T_8d zxE2@8#*jA(JcWU%NbY)F4<{*$sZ>F*0W7D`#OD!0WF3W%0g`SmZR=;{-NR6gG4f}M z>C(Dw_Dxlej)t7Sb4EtRQovqe{lRjn1aRi122vv33Hmh+xozxXf(AG;>);^xx~7pi zqrfn!X%y7@#DbtMi#xVG+A7TqD@HKCrOycD-e*E@K80vJbLm*C00NH%setx8_fcbn?J1hjKl9M6HIt&Ixca$1A{&yhHxPN&ja8IY=oiMyWD2QM(UnXtiOHkF^}-^% zyA!|u?mhy=9WR*m;107ldQBH@kLU$ZEsfj)C_A7{s+Np& zriKm~&f4sPF>fm1kln@>4M7)3x`wu$xm4~mqua*-S)1*tSSt_?!>SNkfw@n+SUC)R zO?*)Rt{J8=hYf2}2sxbjK>~|wee_!ETTlBKtc2ZxG^Lf`($DGg(L)+}t-ro03t5%2 z83~Qc0yNi{@`B};A>UxVOevXZgdbKSkGH4&sCg1MUAG(;sA9cAIW9GWJ-AE2U%<=c@dx(ZFkdoCIAORM8iOotYUNOFLN@z zicj3AxxShB(NhZPtEPQdX?B1-nJewZPSVgsHN~|u_*bL}35T92uWNcvmb;R$F-3C) zx)2TX8JAnR941O#9sa^`XCr?Co|IV_679RfTJ0(Kg z`fJwLL6m`&0I2?*HPl33FF5t{#rI0oK+&(X+zhh|WJ6LratBRgYWH~&QmQf30(@0a zaF}ylb~RV7oFz8`@=k-q=fMXgP3<=_Cuj|}P|R}FWzdWy|h@?dUfyC7L~@$$id1zk?X8AQNeDR;C7px*4tUy zeK+Igt>W+IuiLCHJjE?4Jh(ZW$ z5wiX78U}vG2jMaryW^}xNMU}M*`yK{)m^tb<2nDO)-&|%(kF_ZhxC)uj)MI0S_`(1 znccRNF-Qzy5$B)ul`xx2&tX1lCV)0r0w2G3g#Jf zD-$A;dG|){Zhe~m5h)jFgV9*2#IjRje`P6lmyC`RFIY~cc?Q|C58RtF9cVY z8=c#tv$V`S+mxH1tjGSwh*tiV10Nbu=+_ndkNi30$0DY(UZ9_qKLR08l)eNt{Au|q z@qMh2(=59$Z8mv>*)#LNt3#i~m1snfBk|_pm2FnUNs0WYxs|tuYVTO0kII0Q$X@m| zBSiu__@jlAxY)U1&_*byUQK<+|5Qgd_tj%Vk)X-MOmi*3)dR>2ECJglh5= zvb{j~Qm%5R!-60(Y^saWzat?3l-3a#|xMMFx?I`sF!<(#WG$qXNr`r{Rmp17d{o z;$!Jy9i4?vL) zPB~>nh-1(XDVv%2pRNORu}W0gV&XA97f+AeWO;sSTE)Hy7Hzzk>n<@R7Zr^oCVYXz z^!X|Y|Mz8>j|zc+-rQv5PqQf}2QMbfAzN7R-?{Szg25yUcOzqJ@$4atZ)Hq050d=v zVu+S)$X0ZkYn{;q;6bxrx2qwDJYl9cE!B7_?9tB(|ZfS!^09_d}WO&58}>19U16@ohb?*ej{I2}PdUj^<8 zuWcrTZnCr#ocwQ}7gwBavRm!&-Es|PGL1BH4cxqXi%mh?O!nV|!3&O%+uQC#lKS_g z|9DU)jR|W+W1`{}l1z&(wDLgdrz$95GIR`u96js7S3%@<6XS=}CGoKh0mD!rVFrlC zQkW4)vGh%ZkrsI1)!=u3`sAL$TNe(v5mZa^zCYD1qorZ!2F*oXGFQ>IR3yG15zR0FCBetGg%a~m9AXa(wjML<6wl9*SoqV}?TslZdp z8-Y%Nj^)2|Ytil&3m=b?OD3$Tcxk-hvjDL8SM82Y*zp0 zCQzMG-{JX*N>ydAUDjdRz*9B(&@pzmlaH*(Ql&>q2*D9>vAb`ZFu zTudFCbsVxe=;(Xg7|kO+VyUQKuZnO<4vq@>RB;3qdoIgVW106OEJnm~Oa=hi>Q+2q zfE%$c@%z)w$3}EVO#aXnMZsx3*}LF65T(Z+etGjk>CkV5;_J$G7RfZ6sd{m&%1qZtGg=fZsPmt@8;KOcs+k&xnV)5KK@hiHU5 zqJ%JV{SpnX#pyr9xeRv@FK_A6WcyTW&8Zqja3( zdu@R6V{aXqV0Vxw&QdV(?7I(3jy_G2vnNR|yE_Z}ggIEm^MK!d)={a=8L7;bk2-o5 zXIgwKI3IJt;9{^s%q_VpJ>Z#wwcn9(*O6|3j>cn@(;oCCGA^atw& z&T)sPP@yND0j(-Lvcc$`9ZgjdaET{jXq;$P-mwCAp}aGtNm7qqP9yqkA;JfGIX=bF zukfR{$WrBElYS1iV+9*QetC|UZcOp$3^ol2Cm$l z6ppiX%+S#yKgNdpJnav4Ek_HrpV`Y@r;~3?m0x16A7N1!iM>b8qVZYtL{3{hwCdES zM0}OTgMHm7AV(0?<*;(!@}(^OL}^|B29e`JG8B`SGb*IORP~iX?U{S(Qq0+SUsE*_ zh9K!@Ue@KkB-nqN$zD$6MwxnM5=y>AiyIfRV(W|c8f}}TQt=~M=H74$!7W^aD}XCz z3HHh}I%#YVWA6IPWw@;5af9yvK&E?uZK&7v9{({MkdX1O>z()vzwdF45$G%lCz()m zUA1Rr&3biQW9u3ykX$o9^GfAHGw`LP_$m3bs=vvn%`B$`7J=}nYl%v zEE}Ck;uXHiI^iJnwFn0D&O|m5f4G+Io)A~HN8u`V6l6>kIx~%5XVlXHl~$VDh~yKO zD|`_5&E6iT#M?!4_*kRZdjWzGk?mXS z_YQT#WjD`-LQ>omB18#-4}g=;BjWFkhnV~VIyiENJMQX`UzfXyE6^e{lQb%jzXIDe z@NY0@I06%M+T@FZ9|_^)(B+_T!X%C3zH;3w+(Rg_ONxI*%C}P9p+_bU*>S`u&kfY{ zR%bx~Ky+)&U{fo?D~O5rLX|~#in`Fk5_`?fYK=9F={CM_iH0m>wAr@_0V<2 zSGercK>T>ph2XezFaW8PC%OyY*S+G9>H;LsGS5>1e=i|Iv6CvTEGLd9%AYdgmaE-I z%;#GR`ivIcTw6BJYcuEbvT}Y9Z-fqLlo!o!R#u*jznn=XzfsN`tRGZ=lj(K7 zjLtsPH3S^>gb%+@{Q%J^;S^7xL*xsj4FpIhRCq*|< zTA%mB_xJPn_uWHPEyZ_pCx1vkvh%+Z$}WVGwFq&dq=@y2C*enK0PWhnf?}bg_;586 zwNk{Lgc3kH&K>*|8ca)?N(NDj0i=~_D$E5UH@0|D0wdj(9wjN-hzuUY)leQ8j#qb0 z@qSywb6pRM?h@z*f~8L^p}3z;1#$wVIT9lXLj02;uf!)H^mN1ry=hzQ37p(ml4V-K z?03F0qA@DHTkeCB9lUDV zx0*0WuT7y^D4@%koBh$V*mIlKA5c71hS{kM`lPC49~SgkbD4R(Xk~`f02g*^W<5_; z`SLQ|$hQIvRi*coYb1Su;ik~5S^xK&y7u>a`H1Q%FQsvkMM7S!q1aeG+H$8?M&-j_ zbgy@-9d~VM91zL(ZfO<vdUoWjV>+0Hl1KAiv^w>;OSMAGAOOg@m!QPW!dRY~Ba(kn96XzR^^&OW&$b)}|lAk4N3R z`ik%Dhm8Fsla8Z;sA0=7;i~a&J(D1fDF%K@h}*z`K@mAnIj<5due@;LRU4!yS=scO z@+O>OA>3m$ja+2dXk3x3X3!|j? zPLa+WjlKDZ7fFyr*?Js;e%k}!>VWjoF)OT%3(Pv)lCdA{Fcko ztX7oO8PG+EIReDP`g-z=k;6|J^Sb(mHq4k$R;#?x6h}tVO*Hk;iy}B8DK-`L)Mh5H zA;OL)%&`WOc3Dg$bw7DF7LSRQtq_sBLP$lRA6(i3TdM+rN&xxJV!Vw@gVPy(h@QDh z@Vyb0ykVP_DBN3XdGf%a4Rm-y7&~C0hSLsS%=BuAY8H3Tfo(q?&NWwp3>X!HToBa@ zBn~PruKZyGAe1+9D5jpJ;8Tupkju6Gr(B0U1_6+OJ?Flx9 zFQD%to24HX5aCr=c{oc>34{Lfm!S%y^N6e6(;Lh70-c|?_FAH2Alr|JQMIGU5hWK+ zvgf8Uv!2+uuS@POMk}G${Iy07nK;tUHw;hJR{G;J7q$FpjYwweElg7{VsPf`DPa1N zt}XWQWFS5shTn%IbZ#4RfZ z547e97!<#eZ&{@RN`+-QA2*$)Exwm8{q0n;5_|0$Wqj0&WhaL*u~^=YS0IT$^jPEDcr^{SQ0#27JP{e7aD|CHX7AfZ zP+)FMbrA%>`d1mLJYAD8tOtDQ*uUjR?bPtHLr_=DIxd)hiZXO)WbP|z081Y<11^5I zlkR#SiYviXl(Z)@ua@Cft1OYR8udmcmAJ2OqAmi`NrqS@ueCBB<~g=W#EM9F!xBxC7Ch9_*E$|$r0yJ4 zBHN~9+Q^R76kPer;dk#?nYa(|DPzn&Ah(x~X@NO^y5ue`J}@Q6YG zJZJ@7MeSDDbq+3(@R9s8%lJz#ShIG@(=g*ygF#_#R&?pVo0w0uadyPo{|YAlXI_!$ zp^UoU4!|o`r|2u?uDHTUU3i3#hrgViIrRB_Uc3X@&%f7$RiELVV5vrOrwwa%sezk; zw!0okF=4J8GBrV(D3PuRnhz;>;ZYNw68&nYnnUpZ(oypcBhaEE2WAXDQ63Hr2_qY# zUkv3M>%+n54HC9<8UAmyAAcqMAx6n?)vTxkgi~`25?j z!Ln|RRfB%Bog~Z**zyS$R-yp*heV9cn{t_wX`+3Moe=%alJEU5IXo%niI9uO$}*Xy z^HO8Bev3s{byk9nD}F0vzdx3=_sbOY@CG|~rXL=vT@J!kx4soH&tP^F!>2+}RJCQd zrTDYL2y~s8DI;6rp65)9F{6oznY;OgUVI0Md5j=vP~^i?<_A%k2;6R?vEfR*SGcM| z2a1e)b0ouQubQc^XGW-FYum19-@ab*DKZE8s$OfT&dBK}ikdr_Y_*>4scL0Q>$X-c zR+ct+=&)#i@kWP6BGmaUkMlWm?jiaen35H7fed6M$Qa}kmz8EDr$ey^?rQvpZimz9>PA^xZY=R5*Ue;jWrHd<(%;>x$&$+e8NA^Pwv_T0MO5I?-`N8-Urd zzVJaiTq^NGg{%yk95Ip|dy=>Vfl&k=GTl-q8R+2UFnG@VRK0nqHZNd)4@ zM+zE^lw$!s>NQp<9vOQ$l#Yj=7rYyS;X2@`KMy+e zpmE3L=(DCWWp0mY3#sYe`s)jjvcJ$@c-(ITg;1<6$T2?g4Y%Of-y&Eg&HSyBVGKA$ z_*v!(n6*2Jrw9Oy-K@vk*3R?0Ka7;$)Y^|*s)5FbOjT?SDW-3tmbg7u$aq{>kF(v| zr<43{__H*y0o$8~5k4kaHxS}*`10z>?=AQb#$Pe|k*sr)oUH)x2DpQ& zbJltBQY|A^=AsqXMJ5Yr+#k5B~+%rx$gbs&h}D%(ODqx@oHT)<+S$T?c?w@GkC+ z@9hDemq2yxW+$_-FV|E;9&NPwjnrjY-An=ZJQo4HQVcYyC2Nf(I4){Sx1B}X>}r)$ z*VTtYVB(W7Ni#p}#tQ6g1gF#Q?+&%OY%&AKxOQiW*3TTe%82Sse=s?~B^-DuAUeoK zN|!464V+Ls|4{^Wnq6&Yhex)F^)m>D-_)Jy&kI0!Y&m6#>_iID|K-!Z6{O8Fa-PRn zSmR+8K;d@HbGR|LQqnA)`(ztQ{V$FgJe>T#b5SM!_S{ufG$WGk)Plk_kkehY!<#WK<(yHX}$x zDl#zOtMmL<=w9w4>Es}=+tDECoD$GuX0hu`WTb69yV?S<`2$n=^pj?9F=D4K>Xn=p zCE}*2+mYn{$Wr#}I^AZcj1)_R(;s-KdvrB-5x67%j80rTeS#ZJL5f5F?jGK#@a$Q> z0xf9Y_JGJt9R#i`9pj(I`z7+#3MO)RIAxQd&&X`nq|H1D$wR;F%#kC|vPF+a&*PQ+ zQS4zLZ)-#f!tg7+i~@`t_(UvZCA#=B@Vlwk8qimqgu}{(=e6rR0bp>m0Ka2{=pA+L zFPq#?`*zQYTde&xx}1QNl08i0Evq7x1Q#5Nej56?*g*>NNX3Vy03#nH;zCzJq9q9G zgy~I{iZL8|7O@;^i#Mr)BZ|H!^9Q%>a3C(jgYWno44?a_QnRM90C8Ue!FvVMh_kKu zv@-^XlpTmJ5^e_M!?YbA40){x_Z<%Bc%Zr>rGdIb{b*ygAqze#c1a_+QE;WZ?B*_P z>qX{KJmyil??Ob%KYaV-?fCqp+4$=Q!cuUoHc}Bxg+`Q&gT5eF2X)n>P?TQ z)ZGBT`ZuAxmrt2kC3TS*9C6|~;pToH5A%ZTLVrqIs2OPG*j2BBr>2zFTibA$^1=C* zW$T;wghSJy{ida`kQg2HoKMcGNk4aji>rv>YPY#ThVk0&wos7Cd+;*nkqP9XK`&Rh zy2JUY1#HnTu*EToD871p`el}QQ!C-~=>rsUCgs&wu*b44Z&xC%5A(kgvvTUx-o-Qo zb+>tXjLI3KUXg01lPs`uioY;7X6baxX!e?;My*7Gk>o1ANW)z2Sa|H!rpQ-j;0#}a zc7Ii|tf+C@cZY5C%*?;aAef`M<-S&qmVGHo zpTEsR-1m)C8P=jqCv&GJFU`vZ{c^?8CUkQeXniNRmu?*j(wMsOQy};57rgYGXC{U^ zRno;6Dlhtq7k!>IEWiZRYHsE_Y<9sdaUcBvA5p-y*=Mu1O>hxU`FX)}amO6kr?r>9 z97=UL3VLd6(@ykfRvWWd;}P=yEtb4}8dYSSLEWOyV(V=%+iEvOb;Il^uy6;4l21fD zV9Z@`SS?16|5z}a_u4hNUQ_8-o~*rs<)1H=wdfm{F+g>%w3g=c+x`TO22HA4;ZQaK zbhcbsweX-6fHvLQTmPv3-a^a$A3o0>zc5i^8iu3P5+2PH!`Td>8LOvGK$sN)ctCZ3 zfueiyCnW`?|97_%6kVa5I4+CqlCP_VZI?rjB_y=I~ZPr&5h9`P8`aIj=Z>OCID`w45nQVeI5!5!jV*(GN*fm zk5r*O9E%=i43_Ilpaafjrg#eTPO$aHb|m)2&!B!-6oHA_j{F1u&)686Nh|$oWr>xT zJ8n{+H6E;FD-`3ft5|l=;h2E4chmKY$lOO^^|O4S^NjF2+m_<1zG6SKp_G^1|-YONsz0_y?z$$ z=z>73NS0C%;`M!N7-4|z78`>?ME?5wZR!>L2TRbe%~A*?fxuo!J^)5xzeaPGZxiWj z7fj^|DK~zCcpMWDXd3EL&w8bESR0R}JTp|MV8!}J^$>E9A_D3&a8|n^17m@03>ZvM zeBUqA44+(bJ@rkJCr-_^J8lP9CEZ6puRcQr?jxL0FIF;M6BM-Vec!Qjqd>#nzRdZpZLm&C#RD3o zv1kAQBF25zQ^`QvX5Hhuej~PLQU-ZMG1fQ01EJNoNVot%Ckz7(0@im**f=qpKefBF zAntlJ|mlyl8lgT$P`_>R~R0z z-V!%0ma(vN+_b2#f7b;u(+RC@AyX@QpYB1fy&%U_7zPdSj6&tRK@-B#<&J{y1vK4@ z0DSlR@8G$wiQpM3n5PT%nRR+7>Q2_^lb77wlgFT@w6NZ2R~S>afZQbE!mHc{7m*?o7Hub|Gv5LO%{8#$qAoeIv<71^o5tU*#9S z#&iAUQqCA<*PpA+USIQ~g{T0XRp_COeIdhBi^9!9ZQ>`cWgxQBVafRA1a{}#d+2W; zDHnsWfav`X@G5e+{_Yf!SmMCo@0=WlJk}MHgu|Z#=riegjXk!zo5cMQW(w4JwM^1f z@G|SsX926Y89CvVJE(_ZvRp@5dtVz10fmKq6^#?Pb?M_tX+ypeujh(Y;N=hPvCQb5 z`Q=HwZ-rOMb*9|z*PvUw0OK!dEnLruH_`=z|h#*7L5VaXlhvZ-xDHTUZfXPg43IXpT3Yt85___0hYa5#P<<0+PZ)kjz?Bj}T z`PTh&_Sh2#n1*LbuR4xq-8$__wq5GGRnU&lrVHlMxNQ(PF;P8iCNSoWtpOXN9!_4u z;ma#4t&cm)vrE5bms+otdfpx1C0CmEOOKc{oLBZzZ+{_RbA@Z@ns^fdxqnYS=5l{U zL}_~9rRfhn!ul#xUxFaNgUz{sD9s>RQs}X*0M8Oh^Je8&z`1v&;~Arm*+o}Ydp444 zx%Etn2U3y7sb|*7zVgppg6`ne97@X1KY5Mo?bJ+=>&Fm%roG3DnfxH=D_3%Ga^=%h z>Gi)deSr_S49E35MD?pa-L0R&wNU%$>U(z_(V2RDf8x*SNcp@w$Si5KUk&;UwoE`? z3CgEHu-?l<)txS0Y_nbo(Zk$zRi@7iUv9ePzsu0d?I745!nq2v=YK!D1m?t(z|WQL)ty$=iO9NX)_O?t#X5*-NBtn$SA044TL2g9Q4OxHkOF3XD}z{dM2NHpO6vc89E`W4uL@wnG5AC-sVK{40A zdfQ14lpm793w4RAsyEM8i@DS2izZMDYtB-WuEo-{0h)fyN7hu+ zw-hc%$KigIFR@Qj8k=h>hm>N29F%!k@|KM+r)J|Lo}tW}klFii@4Uq8WyL#x{(*(* zOi`45@-4NRp%)>jS&s9|t*hj&xK+9l8p}j~-f<8I_!XeQD&{b*E7w&V_HJCDqb}}| zkWjz$L-&{{jC_PKW)8V@O!U2C6@Nd37nQr=247_;Jn3z3Lsi=1_qNCNLT22*TvVg= z63tcEig)uT5#H$UG@J61m7BRc?w z&DCjl2w)jF-n2iC(J!S)o@)RLb#H5Z2NRrEqJ7uPA0(2`tWn)+j2A!y`Ln+(?n=w4 z#(AM`(lWXYJgIP&bgrG^p&quUSoyq{+2)g043ZkfZ9PjnTu#PV-oTE9Mw{b1aEGPu z+cRqNdItGW)P-VRqr5sppr1?J`LF5Gsn&lISg_>PF%p`KSYE?Z>HoXBjrNj67fMc@ z1zLza{2}|G7S+UsK4(3m+5o0z0+w09Ha0`8yQbDNhfKg+tG9HWE;r+{AMa<$IG0=U zlz2T{2yeSgqQ#_Ahf|7C8AfP+K@!NGt{u+^EbG$nXo*!Ru?gCu*(;5+9H&_u)nF4g zh{0P?0#A$K8tS0l(S*VbuFwPvZynA{y?HNjXl2eA+IQW|?MP1g4B##NLSPnerMOw2 z%o+z1WoG_g$Fw!e~Jfx~eKdjC1h6<#TlxeQ5OhH@^ahJ?#%gYcGC0u)e2kx0+tlCr3mHIr89V_e4D~`Ca(tQUofM`Rk+XMyHBwSYQ-1 ze^-yJq8aJsRR+MO=DD$NO894FL=F*bgdd@*u!=@~udXJ$Ty`~c6KQ)sJWOKryOuHU z>*+>DWe2O!`x$q0!V1k49~V|T0{=jYtODrBtolgVNH;+o4~f(94pTIM?^^f9CaAPq zR%zY592#DHHa|pqK`!_H6Tbx3MZRi_zFjN8@ojV2-!z|eT4@#w`CRP{-GGy4%d{+T zjo8JdzR2P7tx%$)?mYA+=rcziaDPj)OL* zYTjBS65VITO3)EP5a$wrMqufvumOl{y?J1;j%z=U=eEgAFH%#k**kIl%5!lI{j}%a zj()GH{gL2%Z>*aRog&;{aF2hUaFBJX6c9A{fG0;dVR;DrS`CgNE$|p*7-$plO+P_S^ z8J8mp?Ued1)OSsB@096VXH9O*;lONN%~Dml_0^**ZJ2(^Oi+~;nfal z5NE;ePNU0+0=fKdelsimvt==|5QR(UAT7^o>P(w`oX-k;$Nz@2*vZcfT|5+7x!oy0=-jkxVMGSZwV)>WmwB6Mh=r`Gdh$%gynM zd`LoG;DUE_UKJMu&*A1J{A{dR>HSMn>h&S6U=2}9R6@4~O5^LxIRhM`R(}7<)M^Wa zpAVc&35z8((UjN;#|$BMNmytk&)`YisG8*_cDcZN4^dZORdv}H!Xj{Y5VvFmM+v=f zLh+;3?{BhpYd9J;&7}{)xaCOR)erKTW*%lQKH6_$$FXuQ;w#d^-a`wS8$JEZ^iGb*Ei3>mCq^Y=;&3O4sB}} z9!$?(T3U->-sYvbmiT^1(cFN6C|)lEji8u8b|6^QYm-Cud{5A9C$w#6H}u>`_` z@Wu%nY5IXbI}f40DmT&)TYfiQm-2jOoZC;gW{1-XGUDg?D)nML4?Ux0b1wJ0>40}! zrW0J8?ccJNpPq*O)_L$Ef@I-`eVprxgcO3QhjLaGF@9VFf%1qo$>JIo`;U-tC0{Xt zvy90xWeaDFC_C1Ek!OJ+pzq&TGwZw9Dpg@avJ zcENTFi7}4<3j9@wo(+BB{kb^UlA>D7xa%rovFWrRC*u`BdC&K+tGl|YtE+qOd*6Glby@N=BoxGOPW9p+eO+N5AVH@QWznxfDr7OWd86>I@q&5*okt-hAssKyC`{1Bo@5i?){ zb_PXy*8G*?&zy(JOG|7fI&FW&TPGi{iEo{1>z=d_T_sV`mW^uo%ICV*@Y>)B$9?N9 zcnAwNTDniuD*rCP{~0p-Nq}gphw(xZ_TySg;LiDr)Wqxt+(+>{3<6bxh{GkfzW7W<$(Cz)T$+RiF-Ze0q$=Xhl*NdXTr@~}?Va0rjoPI;tJ-(IyhEMkW(|U@Y;afFFR-J3&9Q0^hJ?naE4z6MLTapE}6PDE2oRkJ#Lz zpoO8LT@MJrDDtu#l555*oLysV>&v=o&pJL*;9uclrMmqjdPE5*`)6H#RdrhToJ3^1 zQn~c$(Pg$5t(uTQHtlOSI6q}(?4&lWU1_NACYumYb*8AM^(+T|hn1Ja#(VPjEuPSn zuBhItpSibSW*kaG2@i`=zRa zRz2QhIkCu|7~mpa${{EVs%i}GkoH-DIzT~493Co)t~CBdm<27~y%+k=2j(L>aPE*3 z+y!Fp$EF$`k4rn5AbfJc?QXiwfnYhK<|DPuvgY!k=GLKmO(S1xt;HD6)sD?q&asWW z)f;bML|WD`Ow2!eyw_u$^BAI_beEr1ujZ6ZE+E{_M?y}4^A33J3dbIU3^7td@OO_j zbT0(6-g~`dTHG@T3h%NkNC+m1H$^x{oWCKiL(*Wd_9AlIwGcSds;2VR#7cAx%ru6b5gC0r2vIl2A9(P!-q z$gOlZp65s``i}_2wpCkzkI#N&2C-}F0{LCerHr%V^Q+x91HZk8sb1rh28TIp84}vS z78FS!VmkMB%PB%}+A~Nx*thLO6b?eO?+dXAhe+O;Dj&k%pt&H)k+)@o@pRzb{^m#) zP3?r5EPuAOSA{@}%-~p2oED*V#Gfga%s?(^3n#?{?Bw3T%B4S=n`zJ`8IJJ8t;($j z{wSu{t(U@0w&Yf3zfl~ol-p#D^c)r+aj5pQK&MkPNt`9Iw9_|e7MG$N=y~=dDOQY< z=)n1^Y3KyCZ)-EaRSob$0MoXEEc@Vv35eqmPL?!m!Z8#H$m+YBBgxnp990&Gzxe^S z)LKXh>Cbfc%)y1Fc=ZQT|550u7$<6&MceOY#-BB-)!Xj47~ZLVQqPw3XY`k!%C3MP zaUU*gD05>*3Y^gOcakiZCdaCU}h)Zw~q zUOl?x0uS+W045uyHN3EV$nEgOiT5zKK-gO^j?(yZX?{W4_HU#0klHdUY+=0C3hJw7 zi6{hOVLqbKQ(#`7pI+G{?u7FS@V`Huw$rJ@*!nQ>6Fc)Lp=E+v<*IwTO!G*uLGq>H z`gKCLje+)X3Bl3&t!uYqx3qGhpgL;tXz%lDMU$jfd0wudpG@q-2Q&grF3>}5 zI%eOvCv>igl)wDu7WGd=bc$6{1QB;_;V%l@wqraAeUvisUV>r{9JK#Ax}Yj5MX6q} zKY1`@bcd<#PcJUb(vF4-+MHTX!0Q)KQ*^%M*lcarAwqCH@E(~rK8o>}E3Uz3^a#sR|VC}E6{|v%f_ugdx zt#jy3GBCLxgTzHN0skWs`QL(f!OIr4yl=jE?Xqy7rpDBK#i+;zkmgql6a1a=q*?k{ zy(}eDOD--mHke81p1;_p&b%=F6C!!M=Wj1Fkuw`Z6EY!=HR%{%3N?q%z)KVjk~Qyl z(nMFQG}KmVn)h^6kP4Kvc~<}iD@g@&QCR^4TnD2Tr39vI3~hv&&cZd~uQFpa4D>?h zIrPHGffX+u_F6VQtGAl0=gLN6kwR8eg*15k^tx4QV&MBmL6syz*SQC-2NQ8%wv9pk zRWw}eF;tG)n`LGnoPTjGly8Fz#R!$fADAT-oph3JvO#`&N4Iz7S*a~FuQ6d=;=Z%btWX0SN|l#@YmwEhIHGPKiQs(G$M^5%C8Jx@ z#VIgXrHxN@Da7k8WLpwni-uB=^_RUOx>CTe-?-%z(#q*d2@M*HcM%du zNH9ChdC2qxm|8-kX#Rpn=6e`1dAJJwj-3x|v_|nPe`Mv(Fv@QpK5sao7XIE#!Hisy zJ@9bF9(W1MrmM}?{Vh_C{}2L;BQEG=)3w&mL^s-oz9TUTsyeI`g3zi)@<+J$Z=>=m zXb@9+yp6{u7h)lDWPswTt!Qu0`{UlYag{rNK9kY%Npfhs0?aYZXfec8jA;Bt`Ua;| zc&8qu8nugqGm`oAll5(Awc5{CfICJ;?>wy{{abUliY&r8e{W2-J-Xmm#^|30(16+o zQq!i{Xw-ps;Xc`H?5;>NKKv&L8r%U%HEQY920isytMZ(xlIYrH=|C>`D$uJK%F zz-^{)9|uJxiFHLg4GxG*@qu?dcI~Op-t>JHc{3VswP|(Yg*DNW-qrkId=Ov`r7**3 zA$$Lz(6|~YBeSVVa}J4I&xaRtF7=aPO1jNM?)vY6&K&!Ruy>@^wzICAc(;e#zJuZ7 zxF!dFC^Zc85kKef5R5WPWmUC4)He8EIHVohiP9X8C|5w-9wj?7^;UF@>nQyj$3#o? znrYQ^JTK3nLm0J}95mi32MRU7A6t@{I6B4$yyUd&uVDl$&Mvfc92nIRu<^;e$t6OZu|fSka2XG&$(8sCd^xGQEE0Qu=-4xA!??WUkFB&SRg7z0B8;*+uSD; zj?a17%x8l1vuRv%`mf)m;?5Ri4@&fb;S2O|q6Ix|ZGW7coTj9eWfORc$sfDcmX?yI zG#LQn)BuY?&vuCl(*V_t6wL*Ce+oLXk=QGYj5{@U5_P{<7LE!jCTHg*v2RyZtLzBm zA-?bxMYBC|b8p*iLny@WmOL*X;gMk~QW}8w9WsTg<;-&o{r_x1E)r`I`}Ip!Gyb=# z{r4DG8gh(rarbPqVP>;I-=POMOGz;gJA<}aX^2=XUjDOY(OIx^-m)YCfb&QWBQ%H; zF8j4>$o9#&b`Me?eqrPvrJQdJRtUR;_XGO%ZJWDXGCza)!+g>JMB=s?#qSl;&jzH> z#9NXe2m!H-XT}ecy1jGMMe|{hS_|*Koql6Ju3#L9xhtWL^iX2N^O0L2J#TM05i^%= zY-0Gd#=2UU?E%-T!=1bS7rMr`KH0>N__+-d7*7TRKL86>uDA;V-qjZ>8M;0R^r~uk zQ?66uQ9@b2a%fY6(;j~DTiQUyf!*yP!--^vk{;+#kGWeF4UbtqIvavn%H5XNMC}q8V#2jew_BqV>=aYoA z4^n$(b@%2&rMJxwU{83fe}0E6i|Ed5N2Wjmzi+(g6^JiN=W90u zQIzSft;v*mo51|E$TeTXhqGonJq8Mp5BCH5p8xa|1QtL$G^P9)Dx>aLhI>G{LNwT( z0wu2pjO9YlZ~4_g#sR{qhRPw-am8QMNf|pBf}oL>P!5K6B2XjE0K-(%Gu+qSboVgh z>Rlrm?O>tNdq>cef@)xIi1EhgsIA+2QPhWlK+_p0@q@+w_1q>>n-4u9HE}105ceQ` z)?J;C)UN>5T5^ugmAF0B=_+Ue_C_%pnFLxIDEiEjuBwW+kiEl1Q8jM~miyFED&34D z&?Gjes}MYR4o7jy{_@9CWQdW;9roRO1#IlPA8%6hoL?rZK9pg~F%77LI>pGz zp&-=Iq$w}a#9$Fs6nv3scBZoQ>VgSuyj0{81^maP?T4vqf}({t!K>#3re5Wq&0Tuo zKiRCtz4Pqy=3*bqC4?fqh+g+d>QIRvYQt{%ldKel1$I_i?%_t+>bG92kj@Bgkx=iq zPaZl>o{Kf?C*OKBIi~yzKu?Y|6W)e5kJKjoq;=}{@HS;KWRyL;6*jzqi&6b~tC&mK z1T7@(ec4F=x@u`@Wg=S#v^aNh#EXCb;vc!>@+1Q=@S`;gEwfz+%oxhd+LgEnXsCbc z;H%Lrw&rP~;4_^RVn6H!2zbu5$R?a&jTbt+HG3D)hd+Vi-~$P+(37g>=;>;&h-4C5 ziJmQasS#(_J^qF>Q}{`QFj*-#;pp|8BI{70V>p!P#7J--5E2JxD2sG^%7wK%blZX` zMGceTG%Or_GApHgh8`C}$!g#kFhhtBAMD7y#0e zU}C^4G67rgS|}D5nAh;E%ux=T#QlF(B# zR7k*3BR3}(m+yzMY=s2N?Vmlsk~l>-t*Hsy479akynaHDyWdw>j2Bd-I$PVQ0^mq* zQFJkT(4#o+n>X;!Z3NCa!|zhU}b%)dX7cOrAY=qt}M z`X+H3;82ZRot(TsZ*J^Od1_iOo39?4m*3heD(F0}LwrUCQLavS`NaHR9SzI^3@)U9 zd~B_WAG*%D#&RH8BlrCKWJGY2E4X9sUSPDJ+M6haRtEPIG4Gtt+s{4a5d5>?EYEAA ziKu0zXLZ#M1H4@8tcy^EZaSKoGuOhwy62f_<2CL%&=ftVoVlYcU8$lj_U}+U;&$tr zvd9lCUin4*Tt8TqSAE=c8o)^BRz=UMo(W2Q((zkv1^Evl(Fyh?lakuGglWbRqW^I=SK~}?}`ne9P9FFcf3MMZOKryI}J2Y ze4_LWh=soA}yub{2yx44W?mrixf_jAv;?`9bxmvO3m)i%B{TH~2;oYnLXBv|>5KE?9s+8@Z!~vDM|siMao(=*ode3xEXhbGhvy)v@UY9sT+&$S`ETlO z7`%G&je^M3v{G*(f&+(XfO32PoG8$uo04yxP(U#OH~9Yl088j?&Q_KWTXJu zk@dRt*zZ7{#yGCkH2%Jo$fYs?umi- z)oZjbh0KmwZY831EV1BuHuhqCX8$$>ecdxJ%$XS9k)1o1K;1_YNkY6ji-F6fODa^A z8b>S{pN)1WvXt;wiOR+>U}+?MyZ}FaOLeyVxP9dy02dV6g;J$+^5Tjs zLNPmXB09Y#*!#<$kCTiT1FvUHF8>~6aos1PFgeYYY*x(hv7`jAm13${#mK~@ z@jbTv606?x26Q`1$kB##mYnK;KkCc;{2qC;C*`zH;QxPaTOeYYjKgc9$J?V>oCp8^ zseHEd-%n?{16F`m29c)Ou;PMX)>YtgzcZM=0~py960QV{U~9H<(_DEJcPUG$GU(SW z@$@rI{he2!;?%jBMr|#rO z8kzZ($r_EH2P{7vX}Pt}X)Ux{wHwN9FPVj4B)b@_$X1;7g;qn7(PDF5^|WlZ-y8V* zAoA0U>*FL$?acl=@tyOu=!xv4mR`48zw9jXjijHD7Sk;lP8c2mbN529y2{ybBV#}B zG`23{1hKxpkQ1V~X|JKvH>AWt0G|Sf2vOC!wmym3>UrWJj} zf?&n*3ysQ{RCljoI?dXgy=$^P+l=+SGwxR3Fm%V^wCvCrhC2S9*~%PRPpCQ-MvdRk zc-}hBs?ct=1^SY^f7!Hd{wc5_6cS+}3PQ-K zH@M(20k110#NrfCDzVogvtRB&jR6gzzx%L!BG(S}88YC^5N;HNlcNfe5>bH)ra_<& z1rL1@I`QmTb>8rs*k@Imlwi~9Np6^Zsg1hgGOk~G*J_Cfj(mw_NzhE+pW6kEgbk3s z=r>X3Hs*6xNK(!0eRe|r+)l$C-^;h&kzaGS&0Cy1I*Zw7*LL7MU-^r7_|G8;T_@bD zq1tD>uwjjU2q5KX%ebIK(FF9DVOJw;oi6PHn)4He&+nX)Q`VV=35}JXWyq+^>v5ob zmi|()61|k`{C#X00}TLVXeZL>;l_@6v(3Ft5l+!GbQq+uCH2-tpT2-%dPU30?Kej8ag<(>5r5@R z&Q%R-r=3E7g%n!#X$U$!Cwr$G0l2~%X6>~zVwvr|mr%z^_{Z6ZMJ{ge4T(`#8d2Zn z?9XNG$H($dOl*_2V`c5bgSV1|B=K8Ip3E&-ss)ay8HRwDop!q4(YPJiZrKXIe9no! zu-@87m$?I5>TDV{ePf-zbl~d2ve^UG{q&@ZAc3MweNAM!TN(m9ObZu�NG+BSgFt z+tjP;oU2U8^*Dc<#j`2QcYZbSIXw007UwH#FJ|Y zY)e1yFqfSU9ZNq|=$^S<^$Lar4NH(@l7>Z{7_bTPG~eP2H)t6teh?fLHI&6m;f)#) z`QO_lvVFdaO#3Mn3_K`sA2jSZswdtMdc6XJZL}$4@4qC<{)C+MLk&~;O;CXf*A*Gp z{MGK-T=%tvb3|}`aS?>%;@8FNOr5ax>$};KmsuMsa*vS)AtY zY|C#_4)ZFJ;%hH70aL`kZk8r$H{>@G@GH=RZvU}B35e-`;}64bv6mc=^ma7&5~6(j zw3)9rvg3M}dlQGHLSFbQ7~$emGz=OFgF`6zPY;7JHdLH}8D)3cxDUIBI>Z=-T!Qkq z9SNmt%a4!=^7p?zFUHi^x-S3lLgQCk>=R-~lnK#V*@a;b@s8<+xP%vMgCt&jcGeMP zFOvqB#q26$C#HdqBMrG!Kji?AF^cH7FrZKxvJ5koUWBw@pUoa6you4?loS~e{F5cm zY=MAFfL2l7abo%i!Dc4||K;MpiX@uE$7)m4<>?`V$(XsE)|JAjRKQ4@0qBG50le0Xr1cV_0?G&Vl z9p+wm`9l|zz+Ea!5CbCKyPk`nT`CMx4E*T=G7cjkhCWgCd3CvQqJex` z%gc26WMwobVn%caPNuDeM#rG?#ac9Dyzu1$Vm3S~k!!p3n8Lve`wZhZj5_$~DmuLz z!vbGK9|w=_J=i`0JvK=gTm`inF-_dvoe{_D$Ap(s; z8u4v89h6|8tkr3GN6Jh5)5N}PFPTrds zBCg|PjCld(i!zTBF^5Q5WhnaM2$0I>(KvR2WqPS`xr)<7SWFPOmV`M7_+2ffa>7Ms zN#Tik`kBZo#qXoKsRmNHM6fF-ns%hbZmL;(ZP(d96Y;l{r4TJVuh?s%F;@=`tPVGN z_TWVi=_J(kYV)yu4p-gdOa&ts#)b)df1xLL1C69-Mta-vnieUl)#ejvb7L?sWs;NV z#vnVpjytVdG8w*Z80L@{7=$YB%qOUWWuhpyS1`fPD5a>23(|@*TC(dZloP|_4S#+5Oy3Mhg)6S+KpbrDL%?4thmq3$dpL-t}rL z#CXepX;Yskh_M`#DEi}L@o!9?2Wd+PEP4-;W1UrdD9j7Q#%g`SvrX5D`PCFke=9+V z3-6b2Juxi71}TH`x|n)2GwG_sCV~rlR_~jPA|CMh6WpeW4)>*!WU7{#Pak$8Sfl$G zDrB>xpHzU395ez5Xj<|D80ulG85J#qNR5khYBGQSt~g+;v!1WR&d(Wq4_#U*ty7a# zuJct|;h>ydl^9c;dsYIZRE%sgMkav)FirL3N)tD&1vZ@vGW&5DotQuDyUIa%+K6dv zBEw8|e)K9s{V?@YE}3r_!j&G#BjxufvnTBUS9vOzp{3F!mm8qUV=FOeHJpH4B|oR5 zPN6$9Us`YjjqzB3V8Z1}c06b}lVY}Hk#71a>sS76(LPF!?R@PwdhZXE^~P$JoH%X+o7Xe6FKxC-QM zPMIw*fW6+)R}#cwP+h|&E>2xm!zZ-+xx!vnR8++FLh%Luh4D$t5?$C4w_Hs4#5xHZ?l2d0#c;wwnot-ZVXzykK?R{5##d3cJuH?q z8!P4;KE^XI3`M87b^#IWk)!XLQVKu#yZqZB*N{g)LX7|?D{=gCU?oCNTH0hmu6QAC z3E2l4caSoBJp%T`(nK_69XW4QF;UUPRjG@gi)G%QXpBtoH$)ujV6$ZK&#RHAf53P= z5QbO=yU~|Z;h*LE892Sk$g`KAzX;*=dP$9iI-$nPzW^9JDQ@>IhuAJ^t>=$WeS~SU z85yF@_Cet4yRklBKO<0LU`1wLF0MU>q@LhH1&G;~zKU+Gl5ktBr3WWDXxQ(q<|(3_ zLYhsPH8nWfmKR&N+-z8|oXx=YXbqiPt*pRR7y?yX-<6=sX!$54OlAm8{R(JU-2yKc zt@tz%zYGwcn*4wDumw8?g##wlU(x>69RGfz8Aqg+<}sj|C@O{9kW%ZEsK>_>N}4x> zdjPOgOVeiCgR&WC=n5Jfl)$ zQjB}jhko|40(S2c%fm(MevQL9gJT7y6N@6BAzJL&fa6dOAAYOvoxIn0Zd;u;n9C+T zrH3&?qT$iB!&%QG)wru5$SPxz4-sika%am{b=!`DB&_=pFzgUw<=x!oOy@6XtDunH z3|nl|Fmj$@tb8a^Uws6G^1Yj{3F5<+Uw!u54wUleB|Y`o7l+d@$r1;|7NK02q8J>C z=cGoy>ab8`90RI#?@43p!jzJzW0gpyX#b}HgrgmPrmC1aD0DsNk2(xX$=XEz-`)^@ zH(FGKU-d6jIvh3xjyxDy>?n!_0#F@Y_VC_q+2@Wx4|+(19fifx=x*?7HP9WYK0SBE~HaeJ@lQ#fO&>`w6`CF}_+mxfvCV@G}l!jI2 z$9ThPis|1>S;`G|IoASo?UQ4M>z5R#ZX50HbFSLpUK5eN{M>xrN32B4yl>BCOvDt@ z;|ys&xYi63Y2o}j3|_Jj2S{|Yo82_a>^wLo$ZG1yQ)|v_*%Ar?oXi7pfj;uM7K>vB zJ9GPMb9+Rmw&%LTf{M(5Mb+XBgC(PWSjzLboUq__*r^BV@E)c!`fN`uaw(+np^;ep zSbxW)Kb=3xh-t9tUHD?;qbrSXHtnY~#AGQpGgJZ?dn=xTX90idesgSXRg~X0k_gdu zY4Ewfbx|vLumpQ}pbo3eyO2T?kW|2}>5E15mdT&~!`dE6@h2P;_BCm!Wwq@^(wmw! zESN)YUnWrB?;Wsb9Q9{8DMM`UfHp1PUM_`#gH3V)mkKrohBBh1;o!(GvyvIaZ#!=2O8;4QJ7&?M@Nsa&u%InA2W1WecI{{@pHLD!?| zh$-PwQ>u878e+`HrLbNW&83oKu$vfdN)pEP#HM9Iin(*aEH3TqQ%15tN(y-%tCjd? zB_sU_ae_)o^ArCLhxl-3dpPC&--k-3ahZka4pP4SMVQfLzm9X8j4T_y+^#Wv+YH?U z+m3)*j9wE+xn!yiV#{VjraLiBUD}cn%f?T+M*nkQlEP?aiF_{-d5O`3AI0Kx{8MTIU zh@O}+h4na1lSnu6dlgDoX5_~>VFoFIK!xF5`=ZO@Qt_6!)!FUg~-WTdKK1fiv{#UQDE@wMjuZ{2W4q#9zE!6}F?O#OWv z_c6K`K>E5OB0b{sDT2^S?U)x)eYa9j!E`Gc7@QC33**w$*Vf# zD5Tz9bMG1l{$sF05z6v-&;!uofoF35ujoRqGC|JAv^)-}2rYZ6C8AWQ3%WTwWm)AN zjlOE@!!LWG+=)TfGTt%JnEL5EdXoLW1xj1QT5Th&w>%alxy^)u zO2Jmcr}zGW>{Dv%27kK-iP_4hp8$V@Ut~Q%n@Fns<=7W{;thMlL>(Hr4~0PyjPUfJ zC3I$6OpXUqjB|s5Lfcp?Cu^)7qn(zB`=J#9d#-anhq1{EzRp*j1?EW}OD0kJ5A7K{JLf3uW(bkpFm@oc{(oTp!4ORt+2w>_Btp zO%p_fqKz6L-JD^+Xiq{hB1@?$X@*R;(7%;wuOFQc9%qER(PIY(kvMGPLg7C$gUky_rq$8m%jY-?>Td8$JUk;?RY>+m?3 zxVM}B7@V%Ida*dw|9ko$f4`(9iu7s(`4&B)$D{L?{GW;7KhuKAyraAeX)qXIE!0!S zBpwXz;!p3A^V=tDWy)MJek*C5^9FXC@q$%_(!@(IfgWu}?DGcd z>m9tSAPl!zH@EDQW!;rchiT;5+)f+3Ep}SNx=JKWO3Vm|bPXKQo#cum`7vr*VJIru zaBY+vr4+>%CPP(?sg>|=i-b5HLkNGRMYBPF9bMhY+8Rdl`$I3JV?nwJG&+N6*N)9I z`?3@lH72zH!-a?vn;R<;HL?qyW#ydSaua1ZsyA-47(BF(^pVD$;6(WX>PH$Si7P<@ zM5RrLeHG&Jn_pj^1GmE?X>8pXY5gW0R$HJcX0*6&P`R3YnFI}Tw=K=Efmp=N6r##g z;eFWqBhRkUy~e1gZY=I7MAIq*K8oy!`?SI+ef$OH64HiS6d~$^i@$-)WkZwPl}tBL zb#P>6YSXwsp6r!bL1LxB*2?76lQDH<34Q`(x(SL9A{_2HhW#AsrYlQpjZJ+kv+Lvl z?FsteJlYUqT3Im@VYGtf32EI3d6UuJVoi}nthbseRm?Xmf71%QF0iy_7*;Fy6-$)l zgNkz=MCYXbhJnW>(h}{DCfT!wKQG=if$6e`^=P*MPbg06_f|#8M~?}@nd%2yCv;ZJKv6 z6=@H38UR9Kl41P3ER@;&nK5qMVy~a*0Z7>Vq~+VP;@UgIk%VEx%;jdg9H+LZra0>W z+?pm7&j59~4us?hPJCt8; zi_EWizuZN zL(3d$x@uqyT{`pV)~ZhcQ2_rG#v53PX)>#x^q2vCUQX#n>SBRK0-&E?vT*+N%B#IZ ziNU4`cxA3oZRt6x^P*R|cP!V>s{;W_7Z}A`I=HrZ1MHN=f(=Cq_hOg-nEj}9QUCdY z#Crvefi6sRDx(G=?8xQ{&kq0MteK-%rhV#dgGwhhlxG*u>FcCv;VQwx*{;HJL%Anb z1i^J8sWQNXAgGh0Enh3rqK5$W-jeP*blGeQt*}H66@{-C?^v_l=;+}6$m+%t#98Iq)E_<&N)ybc2Q!VFuM8q@}p#9CiuN&mioFk#8m2 zq&mcw75}5_>t__^Fwgad*BM@v=W9lpvw>hEq{>@;`B&ecUx|#U;i3_=I+SuK|BTZz zXBklmo?$4Rg~5+l0k5M&#!%;EQAv&TrpL}Fv{LgL>A23sSXFax$im8qFMm#WFU%HB zT?H~PnM3m1%j6_E=xSzzZD&)3>@i+25xn6p^~(~~Hp{SNip)xSeR^>u%0AwyWT zYkqC~g%mmL_bcch)odc za`%`vjZXlzf%a8X%1FZQbzCGxCU9HUGLc~7 zRIf!M1-xtFm!>SQoS-`wV?W|^(h0Q8$jJAB_U;K(sX-wxzHGVw7X}p)faH?w5y*Ac zu@K7ZINvBS&DIP`d^qglU4-Zy zEjRas-_IiysN|Ucy=Dr(6X2atG9nWHf30MG-i3hC9~t?%2n?XBD9uEZPe(G;u)o=0 zvks}gca$~b0TLxQ(SW->?=Lw<(tPig15{es>k~DC#DD!-v>J(d?q%Jf^x80X&&4e_ z6yn!P^#@3k=wsA`jtPXXkOIW}B|L zC}FKMu06aJ#_H)XX+(Ba3~n}c^89#tvmNMp`JnoETWa=uHql)~A{Wa)>uWU1H;km5S7tQBF|w&4DG$-q zT+rrB(pH06^SrR^o2NjYyh0+J>I*WBnhy8n7Z@c837eAt58)L_+@XpJG zVk;1((uF~hg$Pn9Mpq(9@coe$lMK}*k)QH7QN4ni6=pd2DdI4t8!str;>}@H%Nljm z%vP$YElqgF=Bvb);i~@SP+GwAVEU`VB%L|4lBQ&@SbIuEg)SXwo2*&7Z&|ifBUqHT$`*(M` z=YBX7Kb&K1Wevzc7!&Qd7 z{3o=%m=0=rGz+&Vr#U{u45`E&bsxVn!1)of|BmCA1Szc)cy(PH%mWmc$;+MXlzZ{e zQK942SRi^lVhtbSeau}hEgkv@#5?=rmzEo8^6ZbQ80Xv*?$8<2e3{HDB<3cgy`OEM=4a=NE8P#-|8%3Wy-gsqpTylz@W?hAIo-iV5{G=l zB%T=G+|LU(BEbxaR;9)s3MlMnK6WN@hey@hO(7G}U1NHCIOmNJmu5S?{jrw}6evJQ zGNi)m>?(|c!zQa{20U8Bv%s@Q z8Lv;*LAkl>t^|XLaY&Tn8mOufj91jiym&(Uj`VY&=_s%*p%NG44BTR)(RyT+=AV-B zXO?T&k2Y#^Ugn8H?_^)+2nBfejC0T;2W{ z9#xi7iuZ?oo*QQQ&v}1iMoA1NL3u_=NQu6Waxz~Im7Iea00@PAIW0h;7aA8pj!ywp zoYVg1t&7ggeS1)W02oeM;FuHXPrjZ~<)ZAd0ciZrzwC)o9IJK&u6Cn#z1MKgBxC(o zw8^-Rak8B@PtFO1+?v|NBSV10gy#cSiv7Ow1d3J5Q`kNIu#y?6TfnDPTDTAnq&0eNO4!(6}X zqr!T_l`=?3agw45+s*A9yS24dBlHt=EwH3>KMfPw&9;W`wJ|j_5N0EFXpiKs5$KdGvCC&+C}>4$$VPH|6Zp;BHVJa3yz-=gQ1Pkajn zoL%-Aj~RMF16Jp+9Zy87yrQFoOx2cvPR7QuYpau1mjv>2zKPVzCC6L+&(@v>$tbAP zbOKdVi`&|d$nVaV*~hF*!>_ONha6A^vL7Zh6M-{_7O?=8N1Qk zR6>|JQ&>Eqyi(B8C+asU1Zq`^Hx{wge)XRPFbpUd)N$O^q9)kg%T&Fq;6C_^)A$&01y%-)`_-uyMSC);y{qw-CgJAE?JP@(R0=uDu+G^W zUx~M7ZF43lTo(>M_Yp_(3j<}|z^7E7>J850XNEPR+F4g_8q@|TNvXcEuHUvxEB zro1{ftATkAYMDE$!cd^n+HJKQ|Gq!1d8WEw_96#08a%%DV+93DX%EuAZ#cz3Q66ur zYt)yV`=&Jv@ZFfzHB(B$ELnXEsf3j&0jn=Y9#;NJB#P4S)-X|vJJiX{^u>^++6Ia2 zLePzmn#JqnedD8HxuZMi6d$Hg_o$jkK?Z22=ZlCqG6#Z7zSm$NF5{)AiJoQmZpSeb zDAY@VFmKo`g7I8Pk5iz|lUQDvE}B?wdHS?%zA&?5_89EVHs0TUJbAYJBIA1c_`TY` zSD#({j)|VLG%l;%A)b;ty=`pirq?)tQjBI9{ln`44w8u>ymd;z0)x&q=rQ)0;hk7| z(O2U4MbDs&tgNx?yVdtfZFP-)m)4S8*9Dip5k0t*Nq8^;k{W+7PRRj-WJ{k(nj4aE zP-P^+Y~)3}{Io%#KR&-Q{Xb6z{gu9iHVXWIGtW<1gbuIesD){Ty)H$tD!HZ+2}2IK zeaY+yO46lLW>C3{)(kLsvh44G(scY}U1(3aS&3HFHS2l2%&uTp_@44T{JM!&X2~l9 zq{~LWS`;C*6jMkRc@7{T2hml2&b4*t{El>Hqam$H8mNrxV_>cBeO^}(c$Jy@W#Cpu zdzV<-lfrN;!cvguD;#E2Qc=lig>ModA~DK%$fb-V)*G7d8kit8g0B?9|^ zH|Yv!3ANhjZDxWvlc0g;$_1+->d~g2#ROYUCIZneZym*>YbI3ZS)Eki@#}*Q+syO) zt;p4nY&pGFDu4wGr2!*iA4@_&xfRO2{vcL{6B0lEQ=Hj%oMhDBGcb?yA`mxv<;H<7@Vly}IyazK!M!r!Reu@R8zz+B*G9_-Z>yvKJL?i1 zW5~`dJ;IC^Cawv_0y{`;IU(2C&*p!j6oEUl$r@z=w^=r?D@x|=RcJ)C+K+5vhA3g@ zASZE|xX7Htn%J33?fU21?Ctz_j(?${px~wp)IRKkiep<-pGMsl1rFJ)cc*oRYs>aE zc5RKmZhu^Z2Vx(HCt7+@f!HhY0g!RdHKUX(V$JVYw46CrQImoD0zkjPW`RyQ^tb68S#8QoJxszCt}IVVZwJqj!qELZJyO1Ka8U z4nP0z!QJI3H$;5qrwP#?g9JC%&$IS_*U$GUt(7(CGdo?_@1MIvo12?UUqHaK4et;H z`+$l49X3BF>RykpfYR$9OrOzRf#k4rs(YeD`q$p%Zt0dx6x&3ftSXqFn6PX4H~zm; z$mJ?Dp_osR(O<1-#DkG%ie|M*cw{>_4DS#q=M~2}?Sl}1K;W&NL5Q{}jd1{!NkBjn z(c*O>6J2m9r%w`-3b9|N#JBeOGhcKQ^Of`DU`C*Gp$l;1u6EYzJgy+ratEM?Ut0Ugg>X3$L>TBos%iN>GId)L>*2X7k_4ilf6|;g_ZN85 z@5Lr6$*$oj()x*<)K9e6Ok|WWps5>vaqo5x1bLX8X&t$D%O+y948e8#eOeMt8yU5% zNpOc4=a_+bRvK7}HexTt=K|f*Q0+xS`E&pTkj2*Ei z^PU29UXHvo3XO})sgCQNAz=Bx;WL)GGSl6)(dnHxj_vTodbOf^vmX59$)JEyoWN;a0D_E(8K3i|$&JkVv6% zj63LoM?RNT=_Cx|IxCAcx`_8$aD*KKXIdh(}H7hQxZP`7zKxyay%c$i#mZOv?+pV(B)=iiJQY!DwOC z>gC{!3H?QB*6qV=?BO%=Laq&z4NO~TxB9q-U%qixtal%WJ6oKJky;puhd1rUEb5}X zqPJZ1;g7k>U#AvviM$Kj`+G8;RY#0lXR(s%aaXfoUsz1xc$5!b#o&g(v_A=19xgJYBO~P>w)7z!hIDpYc1Q~ z@j$1M3{hpq3IkVn({#-xWDk4d8mzxBWmJ^L9+8U2?X);M_36>x`u9)h*McJ}ND(J6 z14TUiPI$EP{9K8$e98>`R*D0f2v(SiJ75A9HzFP)X;eW<;|$F4mntay2#FMqF|3zB zzX3dv(z&0@LQvIEYy)*DjUReI(G4So%?2UQoLb&9KAvCEgA;;Cb~B-e?Iq*GfTunU z^*CiQUj$@1@*%&T(*H-+J4QzucHN(`)ltW`ZQJRvla8HqY^!1?72E7|jE-&Fwrx#4 zGxNOff6aWWRp08qYn|&l`~3FVPN&8*CUh@;F}w`Far%jnVq^XKp{hLfe?FZ4NoCG8 z)5e1+0`i;*=S@oj;ZnTBBS^~sLqSD2Z4OKQZ&Ch#x`<7UV8Im2k#(w&VbE5TeY=jF zC0E}b-Y)d)=Q6uf&>9dUZNdt(wFb3KSI;SP^jVgNUCo-#uSl=qG9%>$01OOv`OOtE zbyysD3xSBb!49(mN})ax9MuvU~Ht*ZpFm%0#tG#9=MES|VFOgjW$vI9Has@w~WSX16`OrD6kNA{n|*h-g^@ zRvuwB7fhMRAWtoZjUl%Jm59uHRZ@^hf1HcuawUf_qIlqE4n)Ktt)*#KFeK-67&R{d zooNof7EE-enGU#MQs{qi3#sE+qw&2;+DzlY4%M}&#GQ~AuqYE`>8#h9SZqj;9&sUx z3<(%__UKvDE9W@06G=T=$i!6@r9+FRQ8hvypa;vjmEu;Eati)!(~Ln#Dgg3~D9Oa7 zn#zfOGi9(b%P*E~k_Ksb=}^B-LlnN&%Ef8)2e43xH*r#7%VpXu(=H}62%1_oP2Jtc zTkvXX3kPKL1TYCSv|8hHDjPsb{}<2PK6b+TjD ze3lV@SA}Ht@bI6_hhOEQiZWmlr}T((@n*eje#lpG;NnF{w)rnbNFrmtw;|37blF?~ zuI>pSfjM7nIkd+;n6I@WROJ#eM(GWV#uqrNgM!CH6E1w|x|Jy{EkBcKl290ze^&d4rflA!GAJe?G@F@}t$p8bC9HMim8^t&c`Bcd8`FdT><1czncYSgC= zB$V4`ld_Cwlc|SVa=ue@1X`08p^i1DVc`OkJ#XUO5cB4W%ngbnL%yXebo*D&F>F}C zDNJDE&vyodl7Nx&K{c1b+)TvYxuJHI?cxALz%S}opG%N46gkcTRDh);bn1uGt<&$8qwOS(Zr1oK(lbnZP zCp$jLxv?ssyG|E~8~umU=N8?>$P&vh#WKDGdIi2POz8eO6bh+jmXIHHh_8dvB?&1& zMS6_Z1&V|+D8A>(RNKek;_9+h8h#qR(gV@8Liw*6TJ=_vaLIv86iR9$wds4g% z%d!?(D6dym%E~&H{d(*Kl`2y|sw3rTsmhzO2~on_>b2#oL{;6gD=GdXq>RFny1TnK zc|Zb+%t1pc&9)7@5NpCbda8=ySP4Qx^%hT^yhD36J-T?-I%+mu#C`3G0Pzw-)dB?a zz>CU1r)Yp8A~VpP+6JrL7iN|@2@qSo{R=4(Qxr`rbGT@3d{cte2Se|jQG|F#Tlsp-}qor24-I(|=)Bf?Yt73q&XdXdhGJh8wpq0m`2e+g#Ty|9Pj+f{F~Q z4UV}c+tPxw+d$b<0_~vOZvrW({1ni!!qoU8YNRZYtpk-oTZt&!k|^*T910B1V5;+f zT@(ZhkOaglTXR?z&9#4-+-|$DtxS~6>FHzQAIGhs;2phVzaBN&o${0RgJ5^Qu{u7* z#7H9zH#vHr$w3@zG@5t|Z8dDqC}dI96xmUAa}JG7WtZc=OLkn|NiDJ_dTh*)U5Jtz zBVguV%}N|iv>zhJMdP?Vo%Jnfqcqb&7!A8;V!& z!<^2yVF#IDCnob?PckS013hW7ZS-YdA+50yG(?C*vp}Z~ql1e9fGJC1 zbi#&jB|vDofpNyN?=6@8)1go(9*}77__D(%6XB|$-5;5yql5E$8AtkqMQkxaCSyRsI zI938|J3``=x?QwDBvkpJ*$*#5=P6o|YwnB+Q<(tDkIo!<9wi-9Bcr5?goJTnLN9Ec zdpwKCoMoUluQACm%&&z9q^4o))*xZyI(x*AO{??Zj3^+?$jXt3^3G@5TpS8bt_*BF zdh9zZ4A;dWZr3*-K4c&OQ7GW6k@3!_BW=pQ@q-ffnYN+Ni{9)bv?Qvt=ie)p`EAm_ z?&QH0V);`)xhgNaQ?P@+eKvH%gN3g;AflS5a6v z^&1RPzS*l8`t>1Taj(q-f#zT`y_PvPpAdGw-rRvScIyv!G|Nx=UmZ+}0;ZdV90@Xy zi6lQrdJwKF$7UNu@pHbvlrW4oHto%E9y2q%gEh5p1Wc@Hx;F9>?1x%8fN82#?*yc< zjsl?8DP&S#W}uO?tZzB*#G&36rAw-7UmzXi>cz1QPZ6(~^C+LaAe#J8K9ShudU%ZQ zf+^u5e|6*h(uDhGjfdi;1~BOBz9j!pkn?`$txz1biQMfzmwi2jf9yo2(vI!tyl-rs zH$%M({Q(zwdw+oTI@(R}7;k-I=$r+mk4vNG>$fOWR`4BXO6A~ACcWExSc!egv?&-> zk1P*rPe1x@o4)x_NGo6-7KSL#0#o=wMYd<6kbIjD_1bNheQSBH)~}%Bt6wkCap=!j z9lK=}snBC8360{auv7=F#1JjRcgYzzhz@oI$w=}s+DYt8vRI?R4AHM|= z*v&wDFjmB^#`cXy#ri*)UhX^uZV?a~#mL8D=7s|rk1mw~;$?TO-}Jr@vQnF*eDlQy zh->r}pyqY$sB4SOVlH8z|4`O~DhPf)M6Y^403X^7_@=j>!70x+N+(mUVp0S)BksL?}x8$U2B&f z8Vi{ECRktELoW@S%8wj*U>HZ+QgAv6S}=tc48-eQ4WJkI$*C{ze7x+8th zK4}6|2cEP|9nbD0Yv^6<#Ao-{G8V7)@{*ch0tzUO6;$^_7{&nOR2b>z<>02Y(`Bh!_h0v zYG;#&_6S&#q82j}R0>D$Dv#%$k2z>5N5H zXAM>v(HZmV=LV)DL3ZF1*z5BuQt&H~B}I@Q`A#y8i{_&KOVzEyXi*`0tSN@Jfw@dF zqwc?a*iC}bCd zh@2iiVCA=IB#6XxQmxF@)7$L5Iv0t#l%$Rqv{>S}AUgDX!1k%SQq|=`boY;rxhfAI zqX-!Zk4(lGtTP5eFt7eWp3sb3K^E@WAfSz;8}>pKYmNdZao*7RDRIP9 zFjLV!CF)?};9$Tu#k`r({;GR05V`si)JAVCRqJoS&*b}w$FsB5l1#5ai-*|wo8$#L zUu1n^^mq`OEEHP!fVsSiO4egiPC1*hDt4`=0(?u>;7B+NX=7vadJ-wR<1?5KHxR)INZLpypM*GaBD#?kyMcX z`CoO#LKe6Iu6vp%G~p!HutT2iM@qUY*7X52MA0lJE(?0>fK4RCgF#EqWXV$0!29d& z?k?UgYP1m=Nh|nw!;ZBJuI6TL{Og;u_TjcV8oqGkc;3Fc^;j%B5(^i@vMnXn(Acv& z=QoFzyj>hsnS8&#UV6Aw@GTHx_YRl-E0JTvQ4EGeA!MIN+!=#dOBiN3w z9||R5KJQN!fa@w4_CqH-zkykT?O%413>~4dMMDW(`y~Pnb%E7yqyZ4ZAQmtmEkOm6 zNrZ5~iDw2Wp&dn3T{kLgh6#=lOo6=?H01}EN&1{2)DqN^jER^R=mTWQO@^qoa0Gzx z_+s4jhO?Br!`H)A?4y5iQ(xKe{lbg-&eW5~ot>T7VW$pYf(C(MI)BdkaNs_r_;t1w z(JWLZb+&IJsd%6<-k0qKB9j7t?y*V6_{~P3Hm(SjoSpmnT5nlx2b955AV6_K3wIuR z>}Z4p@c3W{hukijM@z@ghoM&qh?cYs{ok$4f6j=>^pzX#|DOQAMR6#U+~JOt1cVZ0 zFwy8IJfzEffD zeykwP=Vfnm(g41qMSK3)R;RjvK~6_M4TX#dFn3iYJ^k33(dlBNDW!x-Bs zSV3ou>B1Yq8r4kGK9WD&=Uvp-GjWmsecWe)FP|JwlBq z)l@t2Clc1A1dm`R<&@)&8fCP|T4LEUo+*@Y1PgpOr@M!V%AHS~$IU+-^p|9Tx)$26 z6%5o%%sIkspZ9OM=f9E_7G2&xM14L^f4%2+JwCOzwIRQRY<=DB!wNqObwF(V(`7hz z;nFx-y4B=zC3k-dYHw+w?7Bhy%x~{#@urWa&-J+?ulx1)*oKQd5yWs6keF%r@(Sqm zv{Y~Nef#$haQ6>Pu=7>=>$o|yy7RT7t)qi7QxV9LHIxQL&A5$WVm zn+Pb4EFJ-AdIux{$ivZJwQ()Wt~(FL3+FJf(9P>Wl1IRd(CW#QD=$c#u-a4jiZr*4 z`{KQajf5bDsxk2i?fjK%N-wc;$~<~mx}JF{vZ98|BkQ|6i0`o)--1`Z(-T-FR=j+9 zE{r>lI-1e|RD-`&IPWl0yUqbCDh}M5sSQy_lsi3Ww>a-+)81FN-e%gZR_UP*dwDOz zA!JDtG0$*}VPFI1XE?+`@p8GLPn-mMpF zZWrKcvpcnVXN=pSD6?vB$yTiykJwb1uqXFP6XurjSnr1Fk?7#`7VwVrjO&@B(y;wZ z{OP|;>~zhp=|D*TZxZVrVcD<6%hk8->&7Ct(V)YpI9@%E`kMet2=OTBWHr_u$CQ`1 zFLlN5CTKzv&!sp?AVX)Cas*c$@?w+eC^E(2CLbUxi?Spk)y(Ak_WdC}HLg_y<>-*g zh^{6dpcxQGvmJt$M>bZE`z*mjtph332wR7w+JycQhP5@k)X98BxEkGCT<&bK6??Xa zLLc|{ghEReMqCA$u;a-w*;jk{Tn4GR7407F9owUA=l;w>Tl^xzO%?hVyM&Dk8GSzB;0 z?dIv`#H~PNg5)dzTfy5Q|C2b@@RsA>jiz&;2aW&w_5K=H;!u^@m){$Fg^k-W^F3@y zizWi#r(Bb#I9C(lR;x?F`tV*(bS@<`C3eP~K1_XNruBoq*c2=X?=PiyN%JNjy_7B~ zUqo~-atM#)DDV!-b)B9qphy*cF6TeoYHsSbj74dL+bNPOFr82aU#?yKV)p3Hz_-p%mFul`&NR=lcE#W*gCOuPe%a zVb#r<;*dS4$3CWcb{Hc`pj9&y)CXI8@s}_P$hb>HZ z641e;NUhW<$$sI6<6A@v9$cpvQM|&}RVDBw5{1LaD2{YfVC}H06tqjtJ~`oDZUhfa zrVMU^`bue6f)>!%jVBblK9RS2aeWw5?VmAUpUz~3&HrPWL#G@yK_r(3SN*#CdR@vgQ9Kma=`MOp$P4k^3>JF}FzUDyC};eXKMrOWjHUP6UHlJWU+=yN@N+@@Dkc z+toMEBrWUaC;5Mb)m6v!Z%AF#(m<0pcS5k#drf0kxq8{#__XJ^N@WKn!8f&x5)Ex^ zZqFP83vIVVsuAROlsXWxU?++l7l{XZuxRA#TD-(D0CQ>EaH475$6KBZAPZyvG_FBa z)(Y{#NW7Rl&wn^MkB_JIou1(y`KbO}Fz(Pc@cm4j!Vgjzo=>1TF)UeNDRG;X7q0)Db zP{3pxWBJw<>_=PM%5z+;-Yf7^hnBURPg06kP7t$9@|38@Ta4-3Rjb)G@5}4W{x8+J z1Zr2%1q5=s*4w6<+{!wP%*}?yL5`GFR7mg{-OfgIdf`dMPa8i#Glt;rGsj4qkPb_UPpgQORG-4 z`=8`GQ+2eu%jH|9aJ@n`P&fwAild`HoHC;7EQGdAA3Z)c>Ef&+0LDEchMoiNX|yv& z{NJkI51eiEpE|2&^f7KhY3a3VE)}St{H>92y}Yjn<}W8dXK#MdLDlKG?+(nlqTY_a zzIVHiZoV3Xr<6xpjf5C2@k}*gqrBKa7_V6i^arrVS+fGdrMMI#yZ%9fn4nthM*!mQ zN0v8^ZP^pWf0I_tKXgO;vREhHhcTn%5cQwIZLsP}FEAQ4s+Zs_2M5ukt(IL2L?_)H zDmmXrmDsB&C!~o5uRhOnKP_5oy&t&-pURJv66+|s-e842HnM=IJe^@&ul)W;SGUYv zJ}ccG74bIyRjzh4xkDLpe}5Ky{>dX8_vs9p((9w}hhF&)Vr`Ms0aZ3uk6&$qQXkNLp*K1*r_r8U;?W+3$< z&i|bsHZ5~F+%?D%-o{Z8_0X~|#ClU~yovGF2l2|@;V(=l5-XBucAA`Nb}DEpp+F(W z8I@hAl;hwO0ITV?tk=GaF7_Cp`r{cBQQC3QXeJs{6mq%rn!5(_I>X3h#8^nV`^ji% z2A`B!zdc-zN-pWjV^dhd@4k>YrY`Ag!T(E`uG8nmb%sL`+$$f9@K<+jREj(Aa|&)h zMFf~{piR@Du+J!+8X2N%!TE31zzv>#$xh~58*z>`H0qfC&Vu_8!^WZuvnnNR) zud;t0z@R|3&Ct>9vyh_Nk$M#J>E(qL?gSU^6eA&c{Q(zCw}S(%s~j5V`Z?P5kb2^O zllxVAML`sOn`X`*@#D&#q`W;Bl32|;E_KN`SHcthl@-)XTySDf0D*}hY6%2I6y7Ro4fL`qW+~?co|+d+KdxpnHtB~8DVHL?6>(Y<_mcF zs}YTeoQD!dWNxBR&z@SQExO z#G~N9$!K>U)ujigwhb-=|7^2`1r}-i=Vfpq z*MOpw-=kNFhg7TK$3S_=YA9;bdIvjV!T)pIq5GdSTP1RFr+ebszemJ;b@gp zCd@}@J5KjPJTQpQW-u69ZL(Sj_H4j~R<~BWP1_}?Me~8@O1`84%))IL_xk5ptVh#6 z5yci(e8?wm0U-D-+z$k?0zuBS!A$@8O(I2Q=U!s|37FR(nq}PPGk9`@-g_MT+xbD_ zau?;9(Z3}h!^gc-dM0+^k6FF$UH#Amos_n2KD^_QHgVoE`Qyy4-N$_XJoea+cKN+L zfV?c&jP?nA)zNa~hAzBk*$Ye}p|Fm!_Q z)`NqlQvo%`Wc>?DR)bxz5_;p}Rw?jy*5xNAbnIWw{yDWF?(;0{u`40Ab}st5?>Mf?3TK-7&^da#(ZF5Cz@X-S5TtX@Bt9 zQ=9Q9iUU;h-bB6v4U}C73biR4yRHUV_8F@ed@{jZEvlfaC)iqCK|UzO6tm1ans{2V z0ax3xqii&~Q1As!H$rS>bcsMw*HWEPn`VlcIsL$RNRN4{sxUBGs|o>6k$TwOi;X(xqwZnekR?|l`=tjBr{gC4xcV*!~($Uvhhr?CPCF?su)PV~`Gp~Ic z68Lk)weA$!mrG+HZPjexg-uPoZtRrosFU@1SYu|)-jv`9^ht^{d{e@A&p^Z73N`4Y zekB(td8?R8qvIEOQ)yUc$`eH5Z(htlKlZ)gke61KUcT-v!AJivnb~9z=uzTpU3j~B zLw{bVZqmi8t5Cvm{FH>Ec9@bkO$WT?V0q>Uc)LB=)-+4MFj-62T#r|(oBXRJpz8Q~ z+_EEB@trJce%XDH?sbcLXCWsdRYYoP6V7yGl;x((Dsy1Rp*xuA-zN-)H4AC|;O__R zj7`_9(tUw7v7{$6+X2P>Ciu0X)hUW}BF!jSHuJ~Q<+#** zX?2{@9pX8qZvs;&+8O6N7EP+*f8!zOqwS3*0h{D3QuTTn6gs9b-K>bh;UnL;L?JQ} zfv3YCK*X*w2{x)9(7yUiIMFwPH|^WxYVx{#C9C>eImIG z4Ap#9@~KC)+F@r!(XKBNxWKqq%Ckl^65smv7e;&6@WM~X0r zB1f*2%si8bSaWkja7y)?a|q8jw1V}p-n;JiQv5e zFBX6#i^RusB@s4G27e*3DjRMVkNfjG9`4M*4$8@jFlZUxeWT`pi9o_FQHLZbDJ7Q4 z700VRk(l3?9Z3TKLewY$8We+Wj8omINq8j5_S;ucBQ9+6v!-G&P2k_}8450a7=FLr zd*Ch_6Fgji-Ifu`OYi-F*&=+=3o9Ep0V1FSF7gs#D9n_q<Qv{wN+)_J6B1K68g zfmeI0F?P|$Gns7^vhF-xJB_~whon1xrkjfv00acVa=ojvTXG0%!Du~@U#($%8)>7Q zkSlDG&0bm=UhsBPD3|9Y=NzEOnRQTzhdANqtpGprV`tVTo~V*FM_J4l{R$k?i!jM~ zaDFz_V5S$TvxL0|2?k*z{aT<(lof-CS09cg0F{zb3mirsy2iB^A5BxMisiz1MS^>a znH#2~?Cum}asMBIMR4%DG1Nf>p5fHS-x0|Z)su!!8HXBYwW+zx#h{h zn%Ab77@Q;-tA(5`(C_#Y+*cx_qN2YjTj7_~ijaIgXlHw~Ywx3NCWqco$Z;#wAdcp0 z|L7jCEvFaqC3etF>W|g*wfgO#XrYrp`itqNt$ikh)+*&e@ry5zZN zn-dullPHA#VCv&jNn$qjJfwIgOo=v6uuof$wLKjEZeDXVt18O5(u&zw zK=z$%6F1n^+)54QoiPL6(p}55LBx|yY9W-!cLhiXQAC8f)zuy(@oY2hVih&YzImAl znIsy+Cb6R+{9aZ@hvMuWgU=tPkYwhk&1|)+!B_30FvI~R3L*~;wk_tO8aWl&d&Htz zXx(sH2}hP$nRG7m*399vizUpF0GHzUXW6+_e~Ws73(2BH7nm9*vSnF>OW=a3F1F9T z#bwrdyG0(bRsnOb#4~Ly-xRZs#hZ=!U!!^Z*%Bw*_^T^U>ug_~r>lv@b)6TSCbq*~ zu`g|v1X1I1~&(q^@c^qzQizzR9FDfVXA39$hIzDkVL@M z!V7+OcrmcMRo66d9Ty3R44rtBni zw|zE|x%69e9*u2VY;Stwtb-&ML3lNV^wm7{40Lpa-@C&0@wkWo7a56>B3>h~M<~y; zgR2?b4dund_WFpz3r{Izrry2&9KUk6`cIq4sPZ+;9XvggJ>%8UqGbfeVR2jeIa)~Q zP!l>Jd-xZis2Ad~7SC7Duoj6ysMoV4H0`Bn&j-bq+jS5Z*+nEtI792^rE4T`#+8_^O5UI3i3Wgiz z*oRRI)~UZqh=N`fXVL=|NJ2}Cl_`K7)teUM+~WaBifzKXpu~PMRyAE8Me<~vfQO&J z6!k~=#}KVaG1B4>FN1M-E?b5_X+;tUjtzzl8YP6A{bZ6D*MH<#ta%r^#_AaY87XVy z*WRkZl?7{~otR~(cn)fnm7$pu%;-noxGM@l#TB{a2b8R&XMfeL?`GWpUOOpAoAIS$ zUyl^$en6RAS7pkQGE*c>6(?;S5q`C2icYi)V)k-eZhn*5=4u%Y@D!A_#Ryd#y=1xfAmqxuy*m+cED)~=AQ z4uBgn*X}Qu4bH38pDgAHv?vc|Iw_Lu*$S%^iZ)_K+_RU6M3nn}vf)>wcE1Mc$4C~a zj}r5L*Q|z}O;j;4f3JVBQAs&Kx9Ns>g-S1E^1DrB{+0b4ixq5C6?6=RFurn?m`v-) zVn+cb%p0wW9leijBh1BMlKfTfzXrlmP{R3eDZX+~J-mZLxzr!s^C0@2a#o^8gr-)( zWwig*)$Z}-XQA~`NPe%e?Up|IHOVN6cTderf{$p@@H?hss&b9J7t?g^qe zL{{_(+(QZ%R7wRDqJ8s3iCa$xPxYXI6=&v7%f#4keLE1w8Ao|0!Z2Mos&MH&9TN`n zXcUr(iCLa0=?OAp(=_UA9LVU83lX#65@Ok=Uqmzq96JH$Lq4;t0rO}&yaC!L!P*?r zxiZ_O8C&MVkMfnulRYwx+9?fIK!R?4BeHkbgd+o`(zz+nEcNvqgyUpVSY?y$$>kIh zRwt;WBAv(Cy*14iZK^YOM%XnC?M`A>6WgK6-&GD%CE`ZYrkMZkngH%!%7L@ujiV1x zEStiZ903{ymY#@vLzQ0?i-TvOcMEu2ZCz1YtwlJ4*-{fd5K0RwXHwy zp?@IFkT?daa$M^W5^E^KM4zc5$DgWRd&#Bo>t#?C(eSW|8RsWJk1yatMGjNAk_d@p zCp_ESIMSSvsJM?Sj0Iqc-W7&!)A)~$bWc&l+RG58ILg7`Ux~i9KM$NKV&Os&?@tc}>LnoDk~O0ALD@fLsWjNfr3cg~_|DH{_y)BHTGRj|G6B*7>KoK_sA|TGF zMY+jEXe&fi^Y+GXEtkskqr#UNjMxTxQ6jn- z%~&4AgsCAwo<`4i$5CF#^ddA0jQ|6^fDKv^70iM&gh|gCE?bmNrPv45ZYMR`LU?Q6UGjl@M<&IswQn?OH~pq+=8hn4+rEtRE8(*=d+&sl~ixUp;oCkZA#g zfFch>Baw+YK3gZLn-^594>4g}|0}&-V(ugT~YXW;gI+Bp$OJ#yiHrU2xZDx z+J%^TvieGWZclxYQQ_?b%?hfZDh=qO*Rt4 z`A)W@2UOV{Aqv1$KXV^Sm}oG*_k~qAyG;=MDGHNuMb`7xrftB(Zd8UOber=Ks#ClLp-?0} zyQdBnk{&n`z;lx3a%Y)6zxJ&rR~RA8UY5Py<>8?rC+WbiR=3ii-NWy0_oe#Ab@p;=W8>nQUzZY|8zrNHq21t|*=ztIyTrY!=mXmud?b<&3m zFtm4dN6U*eUN0uM!aml+r^Jg6{)dukL0C6yoN;M`cew|uF`LIm%6yR z4QuipGKVS-spkvho>~k^lL+lu`Co=}3}ALZ)vBg-563GOOM2&G1H^2cKuDQ+U8mean%wI?KVQ2gL8idaY_5$gb?=@dMR zKr*N${72ERW)(@lC$-Ce9*}~>3$YWwPMG6lc43D$dfN<#+It9iz)oXZa#d`0f0<@ayTyB+T;gEQL9* zk;R=q2A0iyvAWKX>HefR=ph)-MQiGO2{pt8r#M)pkY^t1YjA z{Ts!C35J`t9!IQ%6q_Wd7qPt+OYw?;DMA+%FU11-b5_dCVh-_Rd#OpT8`2N&8BRvu zRuLAZ9ehdszt-{FullE|>L!LYU#*x5ze(ODliVTS7k%xz>KF|Fa0-C9m>3*9D-IMm zbfT7ljeKo36f}W2OK7kN`a305G~5f&pCr&{+5n9V16n}@c=x0GSgy78N#JqQy1MbO z&480l%JrwahWk|GqL9Uv@8W9PAa95FWQ*72O1<~IhkDG`Gc-Ipg-kq&C;;leAKJ-Z zKDNHTUF};25&L|>zAP-b`hl+Zbrsl(a|{iOh)g^JM6uf1nzS9*&eXhWA1p)tg|*~B zv1;-z9T><?vcp0QS-{+-_d{5jn0CmR7)!v)pENG%p@{4+-)wdg zIv`7yv)eG|9)V5jY_ka4BJQ`*m#FS+LWR~-hRDu)|8$tQINYx+BMiC^PX83zALnGK z?OU8$P~LBRpK(!{7pxcgViy^QKye6lEoN@OH4r_@YU{O?0&kG8b%{9EfJQLxq$=`% zajMnwdmR}%>Uz-Wnt+zK`}Sv}S8V@{WBAAYxJOWho8-K%A^iq~#+F~s=eVa7B8>PA zK!{y2tpmup*2^%ZUt)F37FkIDiXS)i$IhG5V;o_5mY1N-u`>Ij{JU=teA^S+j-$3) zr;+^!;|BBPKfFW(6Ugs+Jx*7ed2QpQsZ9lpzLiZ;Vm=#ly|DM>l|Rt9*qztxqvi0dWQyZ{~l=<@@ep+xx~ zl&zq6h>0-&V$TR~%MtseJ6I8nnis7X2f~W1`Hz)^>~(3n-%l5hY^tZu#blGVcE!Lp z6rWWAC*e;VEp9J8+dbFVTtnrj)J(kgJnt08c&WO4>$ak^&e>Z1*}uB8g1^&}Tjs!` ztWt-0o22jd0~Wt+aJXGh5xs-G#@-N-JD0le`H@1D=q57aY&j)Hs%)}D94Lx?BQP<9 zS%2!6eF&rvf@Sk+vdK7ne|>$umG-|TpEP4Isk*#9`w{tsjM{Ip9b^m%%fmBH{9>vOCMb@#!pHWSK$fktE?e{shM+i-L2+>(e-R0xSOgUD>AdqO zzbaIE5y|D}cYJFpn9~)|Y+`+_vUN=w$LqG2aAaoO>ZkimoTwPY6N?A4KC9P)>6$oW zJG}BZ^Duf${#=_eyCZnxI6cJ7to{!#--P=ucBLK@`;>@ z@bk~`*HZf@{DTw`*OBD98a8x~^Nx}WBtAkQ6z;_eRoIPYmi$rg_d0&e|bmooag~zPL7U-V&a=>bE}K?!Hn!&Frq;V z2bMmll9ZsMBh=Qe`*C^oY49EMyLzm>^Aatfof-YthYuq>N%{X-s{W^DL3v86F2!89 zSn^f7dpG{7a-!7GUvkwaKm>(`oSu{3d;cKZa9u0A92%N!e^UqbGr<6*f&^4hAd6ENM$t2`3)nGww-5g@vUpt1OT;hH9cj)H?JsUD8=iOI(XmJ*F4Yvnz)7W%Lxk2-5^t0wnKLGt(N@~h){y+LCqY^EkO?F9 z({0PXzEW*y=ObX>qD4ljm=?0AF5}Wbbjep|;UUuJ93jJ8vMJ`9VB~oQ-X%k8^DPm^ zMGmLJCM&8?&T>HnYEGd^Z-SmH?tyRS$=TX-?Vq(hwYf5^0V+q8l$Lc!SiAxT#FB%5 zRD!xRl2j(E~OSy}Kz+bI6`>hI^`lW(0**>zl{gJ97Q=HsExk&fqoQKwg~8U{pS zS+kJId)ULb2V9ETw;i1mvXkN6o$K_M4><9^t!#?+$@sf3aLw#`jHfMSiz6${bNp0@My3 z?E$sn=SH&c@K`EU%$XktA2-T_BZ>Sp444TYAFsrb=|(bV z#90s%5QV#+7z2yP%Mm|vskH#q2`HL0(+vb0p zpZw>Jeb2tcQw1DMKWVB21#3$h5gkvg?VI5e#;W2JXeGg{pv-ko-Ce2O z4^c*T(UM)8p&I?A1A3CrclUG;ME9>BJ4$O!Y5zGEam;?H*+>tIX)Jk#b zVgr@511QC>e5G$&aZU)JKKPGA*Sb6xe_6Qr#^Z-v!%a*rLE2|q;IXDkOD6zmvb5x> zJaP9N7&-w`Yb(}BX*b0&Bp^^$F=_pRsBDs%AI;SPPZlmL%H6O`<4e3 zABlgSru`px6HW-64C7Igexe$whFWHg9N2aers@)RXAMntliMg+;#_#R#QVGhCm{lp z4JRvGFXNx7er5t(qw;n?enPYLC+(Q)$f111-XFqk{2eO_Z-&+X#34Vgl~i`+BdX(< zKz$+Wb%H49N7#QMixQ9eoq*carH(Zr@o9L?f>({&hde*%1Z)FY2W)5YFMg#Fh1IMN zBLi>Li2@Q!3&1%aw;E0c21jHdE@0QY*+05)?rR5#W2+b$GdsbF*y}2Pq?*2VxTO?j za5t})!i6JavqccH;jxNnH8DkLOJCWZe7~-i7|A43!(p7_(28TqN;a!KxIUfmP1Gwz zzidNe$RN~ShfnXQj3Xn!B^x1_iNdZ+MT<9fzzG%yjT4acKA|tQaDen<<$XRSFhaNV z>46554a>E}$J7A)!A7ogU*mYMl`hHeOE!)bo*2uMLJ*uN1JSmELY*Fw8u9qVrS6OS zQu<*FC3SiJT~;cU*ziE>O5vD;vYUvwFqOW^)O;WmiPc<|SDt0(L6^igOXbjhka7YprI4yUSFZcNoOheAZV|u{O!1UT>1^q1)Y6rcH5^GOzNP%wh?^x z#l=gm$?H&n9RA-!)8pyN6K`LJ3d6^u1?cL~`gO-;zHESTm*NNGP*87WF$u!ICV%3s zF&F>7`S8FW9@^_I_S$PJtz^BB7tW=#JD9oL?Yl+CJ>kJpwRACi6|(H09GD`tSju>* zHMcrc=xYorU1zG&EF!hMtKEJa>%ZfKq>Bbh2QsUZ#e zw&)mT9Qy$z{kWHANPf6&mQhauH>=WN-BO<%xcYSuWwDssvD}Ao$nH4xV&FA|5ISIB zIfOEda8p+l>1D~6D?5d`*4552M3hU8%@M!SQ%rA`(e01eoeX)l2w~NQc4It>m2%)b z#3&iqgX~_r?K$I|P;CU~L=lme3cdbRyHIUUA>4U2TbG+*+Z9l>OP1AztHprFr3lMI zMxri&P}03MQ1}n5Hw2N0kt>dH-i!BG(Z#0nYHw5#WT(7c5#h+CL}k{pu_8(TOsz$+ zupP4}LS85fabV|`(L$4k7?)@G{9_xQsxB!R16J_9A5P=yxfIQ6^7*ATNKuU?0{(B? z-v5D6D3|6trlRikf6Y>%4Fj?G$5`gSjUm(M4R#Nu2&X@Ujs z;(a(QM_qA&Lta6;&aO_0s~FY*b01auq%(SxB4zYB?MNB3)1`wI|S#K*~oXlcGR zTh1k&2bM}B-`QGhLJN~IWd>bW){zp80}_>pOiss8z**B5EenyiZERPn8YE|`rI9$D z^GI(7_@>-Bmp-(`joAR>&`*G;=&s;XbhE&s-wnrKXxtd-3e4TOh>9$mqG?+?#v)>& z*~8S)RjBs<^#_5UQYIFJ3(}|+APlnZ5fW`$ zjIb76V@KJFc8aBYN{}%^BIS#>!tHq)ta{j=zl7eH3rdA9u7ei9zNB)M?flf7a@E&0 zpNoTFSOWd7%PL}Bt37Ah4G|c}R8xb!a`tW(>av@knB=%-rSh(uS`JzU9=p4AbIPx- zzTvL(y=m#};&Ebf`0&7GQj8%Jl-FUP`LY+1cOXmhy8T1MSihG_`Daf$E1zljGbdeg z*7aAczu^Z6`&2VcFm(T>p+p7=q*W*P0wBL$c*^zL8bR1H{>`dn{d}+djh5;SBVmJ| zoD6L}0Yd@tPXCTK5pLPpC$_n_FN6iVCN;_lduhKDkCILLF#h#E(zw^T3I+8a8w8h7 zmnmdeFGBCmtwK3?xIDPxm-|R%YQx!8ie1Qf3%4ePOUJdz5~_O!YUFX9D@FjgL{g&a zX9_7dnA29uOawgnhdMe720LMYr}vcO>_ZkX6lE@QO=)nP;wdr0E+&M>1^i5gOcmj+ z7f@~_|LQy57!J;06J-TJ(Ub@!(MG9q66Xp0qNB#oNX`&$y@X^xIwQ7`(IfRjfe_n7w%-h~ z#Q3b4dN5z@V@BWI#b4otj5O!8%=Z+mkJd! zZ9n1i{tF;8AO4fTF&nW{1?6|h>0hcUEuT*x4UN?Ab&JBvNIV^dG!OGo`I#eaTj?TY z@LMov!@;e?Rdm9kI9(BASF<>zYe~|NkI{$Gl22?aEKi5${uY37I*QVeyhNPgVmxXZ zCyqvxEaet=$wFfwa;)kP9tRgUO<>9bo==9|Ue+!h5_PW}_Ug96#_s(j|LXZ^U*NglF(Bnx@Y6XMvT60DH0= z7mSA7R#C&N@t4aZc!1`^0iou;@O+vXa3DvMcpeD;_L92G8FLEhJjk1Ifp6f6S}~R2 z`ja#O^#A9f{xg{d(i8Z-vFcEr&7DQTso*cj0WDT8`f4|MhFry4j#{&$LHKu^loNBV zLdQ?1m{FzPp(rjW%zd_hdM9=i-q*jHVtio9xPGZVLr6~~&LAQpb+xa_WblkrN5Z}*Lua`@I?YAC>g;e6BQ~-6C-Hq{X}5-=q*uY64l9{fB-;93t8i4v`=@bA z$&K-9a<7;;yzAUZIe^mvqr>cVoMQJ-EbJ@|Xo9$*_N}>ha{Vw}2aWB^y@T+GGxZ9# zdAyhn789kt^(PIQRH;Fzzf*5OICZqjV+yp~dKQ<;nia8rWv@FcDBD+KZG9yX7I(1K zuhM*T3a4tj_nI5yZ_o{gKtmkqGHMoc*vy%29?$1`qtGh?M{x6~?Lo)$mvVYxhZ(K( zFGaBh)Mk8uEJGHTvA{+YjMhbPiSuW;3mMnl2JCwa-trJs)Y!Y|t(*u^1OxWyY77=D3M-+SP$ft6ZKft=P>csv%x` zEnBvVM4=%;nx3SA7})C^ld*K}=a>NIz!U_TKtwWEK=G<)*Ux7pM$r_1n#t-s3I(pm zFfE5wxvUtngz^_`IS2obDCY54f1yWC>KbZVI8t+>OPSgI4`Imh@X1f-j*;Cc&kHny z*v5eUs3Lu@&`B$SbwObXx*a`~dLECN2soN?M2TTbNzub_TJ?w~wls5v*^y(4o>aq|NrVL!;x^PyA@JiTosUMw@lby6h!au97p zgku&b0@QFB&Z@1Q5n9K}I`2)frePVWM)t3r&hgq`H}Tqa@WUwN{bdvn{_=B^-lXnR zlU?I^rzQAD{1%01Rz@u2kE&9b75mr;du5-UXety0xkZ6trhryf7m*a__1{uG`7Y%A z9EwBVXIz7u^45H3i0%@dZG;~8<3+)EKLq-zU9D+R$=B2@cP=ldkfz*TGfifBR<9Hl z#(>ED6DQmA-?Iu^kJA1j`N2E>8;}kMCM5O~96{gnlsf;k6KWXc-(fj2x&jez^NjRj z^PJPGepehdC8RZ*v-L+ph|eR54{VqHH>PxiDmX#Egk)#ryXhN&@%h_V11WgvFaKZO z{J)UYYUDD%njUwH`ez$_HyzV~l?CUIvH5)x(=({uv}%dV2YgbPzB8w`^M)HP)mwg* zCt5s+s`RuX#km9K0V<19EE`Ex{R))^TcL;Dl=03)@AK`v#p9wkTH&W%2^t4PL5ttw zzD#p0GUG+u?Ck9PIeRNU6MjSW-^hN_ttnOJ9(&!bcpG9q5hCdI^2j|GHwAkP9+k8| z5BsO$!(!(dnmha-@8155o@~6yew~tqyJ@tU>32ESpSyvq8UtYQToa6m)=E?#rv;F1S|*hYM#gYn^R#{Lsm7 zReer${^s2ixuFPVdT`9o!d>3jiaeeejsWS)1FIIS{{8vtlNB_ z^h6Le2a#6@k<_!Ao!i~3)qXn+lfk^q%Q!|hlx*(@lcvJRjGuQMmV!a-r-`xZbRLi> zf$hM^4C44sX;Q>!H7uB+`ii~InK=Xw^pn}@DY7p66%&>a!0p_AcJ^A<{;L14d_K;9 zFZtCrW7x!R@9ibQB)R+iUapDpWpwR%bnE2AR?_YC*h9eYTlCiogXLDXF9b z3PwlPNXp_;(ABLJ0>)@6Y||mJFr%>$q=&c=kCl#m*+mW45c3O=uN&Djcgccdj+&Sk8djmT=4)&q(66t> zc!}PyN5Oe=haHas7kTZrjc~x857qM#wjp9Fx&;5%K>I(ZLD)OnOpWd(kNv_A*gCoc zsID#H^?9}rhr6>9W$EniDh*kXYzq;Z$A`Bn4CFP+bqh%?Z2vG{TE{ddI1PAyGAC<{ z5!5FqA^O-HvZa6FW^n(7Kr@G`fCzW8klMBcLAoHFCwlJsA0w$-L^!vL07AY;U<`go z1IrKm9v7wh`n>k6^H$L2_5##P_HJL2`!PgvTcSbo$b50;CJ5E|VD7Ro<)d6p=3odS zhgK`x-(IhE*S6bhqLtl93WpDj2QjtVR(|!@o-!B$#TU--zJ$??TQp`WL=xcdnfPY4 zy7umS25laLIrPl{Sb>{y(aq^x+qgLVo(&Wsa+0SO78IN09vEr?9@GY@O{bLbKlL*x znPr(VPf`1UxXwf3FKJr0LM%~H8#`i8*2|~sweMvQH!-STXM7Jx();EgGLw8KPKery zj#_ESufIfZNu<%DaQc+~uKr+E&1A5)+>@^2UvXN!smpx}(&q?VuRbxJkR_p5oH zj1e~Y8Hk&6D%5~wlzH=2oBUcAiaeD9S05GJ9fp@STAeO9Q`0TozGSSnahN7Mu*o8~ zNcsKUzLrmD{C6~&tM@y-5y?)pF}!Ow^?R_3;v$#FZ}197iCH^o(e6m0t>2vp zV-10oteFTg5Z3hlgAY{MDEI=Q?Ac{QJv9rv{4aD|&_Stxb=QxQLCf(5i+##25<;)e z|MaJ>bRckU*Pe69OJW|rT@}cltqYw|gadKn5^Wj2&t*xM5Pb!7F4BP0KqJv+LzY13 zUwM!;Ll%l4(jeTI9gXN9i;|n%he&<&-ZrxBXOm|1!cXf>2rD@)%?b8q#a;eL=n}^Bd9K>#GB} zeV=C28*K#XXF$caBY=JDpnPl=TXnX8d&L50J)??*bOg;PL_3FW<|DhB9EhnX?`Rg% zd|I^0Exp~jgM?eGbLv;R7+|-+*gRPCWSfn4Y_18o-$Xt*e+-OSMx9xb?ku#bF)d|g z?*g}dsa)cRX3@(!n!cE;`&##P_ z1jGce>v2-CWM)E?PFqvsVnkqPq-_>f?lzBfEL}>nu4pF(une2%4V4Y;>pz3#w^t%3Ac~5y`gt zocpNffbHeHoQ3k=C6G>Kw8#<|4J(8|8b6b|zoT4fvAU(XENPaJ;50_4W>#B7avi>n zeL$xgzv&G*sg*y?$(H+G^yTA<>pSj=K)-TZ2MIfTc=t>9lnOZ3zg)KriwedNmMNf1 z6(A)&ILqF8CX?#HS$bdJ!c48AZs`SPs1#VMm&Mmt`Q1T%BP1Cl+E(>0b_VzstpAGt zKT_)ym~mFkPEs&->0-^}Q&t43E&gr9sGaxXzBvudQSt*emOlCgCZ2!=RR;hckmWZ0 z*dJ|pZ}VGdk{hct+TJC&cRa`R9snbS5vYUh6V>BNHhD@m>8c%Gy3-lIp!IUv8^lQ9NXRD@_>8){PV0Uz!w*kM46#WoZ2JAxQ#q9q7->1& zB+*u0h1PP@7QI_&;DA&gkK2dN;CqsGWPDP#y{WfOe}`@U-bw-Q{CaKl&qT}lP(-Y! z*6i@gtHbNzZW2CO<3t1;SZC7BI`OQGZ3I;ga<$fMaFV3wy8Uhr>Lw8uzCZI_=Nytz znR4=*g#kP6%fRU*9zM!a8?_KM=rO2|xLTr>`sqGsG5;HCytaEfB8W2|yXS5JBhbTH z1gN8{>25OL(g#q)m8b{BZ)>IXHCZ)4%Ypfp25Ow{W_q)(zI{GM@WcfN9`$MP{M#x9 zGL9uq4!_&7aIP`w!V?U_^ap!?C|c9|opflB1$g#|Eg~rEaiLD)K95HSymF&Y?A0|S zeT)C9p-e(&m9@7RZ(7gK(EMG8=dK(@NJn4>GPa~YOg)7F?fIQ!E>pv0rd3mqMTcgQ zpyKsWx8oP!+-ounMr{;3bjPoR7Zh(ZyJiZ25?iFL3b3$+P58X!BjZ6UCA1eDut!HP# zxJ`71Hl)GOA8;u2DDyZ{?J`r_U%->etid_?_n~BG1O15#LMHV&=k`nn(r~d%m;}~ zqh3k^r1yIrUy4s6DVjMOJxp@8ab2>|#N3J~TRuO(sU#tf!P%9EVihGYtXRi#}J9^?8U(o7p^ zqtYpS(ZltbFrQY*y$_F6oVQvu$T${fsZuq60FtL`$EAb1zz+$urAiEeDGhqIXK&<@ z>&ErCK{AghP@_5mAF(g~c}?~> zT=K%}+c;m)PC9fG+u-Qg5d^t2xBARZOtjd%%fRuX%7}|EK_TSM<)Ul$B4B`iIVw(CqdSJ@U%DnyX>9bf-Wt)Vk{RUsGnCXIO~a5m4Q|@N>?ZDT0*H zc{D9MhRBD^`}XX+b3U8g;ZOXn&n)*7o%2rO5^)^pyU;D!q3-bteZA=$06qn73Uqb2 zv75!@+?6^2dOGG8Z2dp7Slg~U+bx^s!Eu1K=)fQDJ^wJH1EJR<#lU=!nCa0}H+f%jx-(lFIZvh6encN%{7r=8DWSG8Ye zYIwh?zd@4-4;HOeV`A1z&>$eKT;}!SBk1&-<*zCOo0&Gg7*yPsNmfScI|!EJBe#v# zA4>ChWA)T3JM}OVU_i`6fkIA;ZqyXbJ= zD&lgqxoIVxUNS80dDhXS!^rVXd{b0|iLE_~PB()+vGUdwHqg(z3A{0EE`g5rw!S?b zTA;IdqY&TEEzXrNsJ=QFSbRX1D4~2h!0qqCFcc$K?^BvM7n>j<&DR4di^8skST8umv8 zbdasvXFoB3%hEQH_q@s4OXt&BshDCOuJY)|F#ETvzI=tz1sSx)l0xL4*jUBbSr-lM z0Op$GyOL;fwgsq>V)H({HU+QVxnIEuVO5yOyLE)^cA*BShuzU@Gc2IVJ)3>OiUA=% z5bxNPL{_`f7cboY$v+&j_5Z0R_jp#+NSYG7Dlo1`S#p;J8UlU~8H9%IP4B`l z!2NhFWX6%(6wCf)lN9`ESImZ6{RUH<7a=CE++E+IYn-$={roI4%lHjvDBXYD%pcOVax5 zZHR8y-s>aK!qKVte!OD^MQL*fJT1se^sVnWV#gONMDsqk%<)E#eJ9x*-tgu}l171L z6b>u|Ksk2Jv>dzB&5~7?#GD8>M`0wBv3}Jr39TLhMR_;FKvL;L2OfPTyZ5w#BQ3W@VWq;Ser)Ve^iysv)Tiilx? zgn{V|D05GdoAOx!$8$`?B0V>oP|!o3?C%V6-%lftEbHFPzUCO`c;icTU<|t z?a*ahX_+P1%uv#_YOSw3C(ZYB^pUGPVhb*&-Mv(>rpdDw9^g=D0*6(wOcNsatuyjS@SWbtpCQ2z^cML%R+FZ`E@O$35Zk*d7W z3aeIL$w~e2#petuXde#ToyWt|hqW>1QxBC`}6r3-)HlfADgx)6FhZ zua_CZUv?CP*Ai0I*l-FlCKWC=gArB#1VIA@tJ22Y|-x|G-BYmV&N zUz_?RUfSZTl9KTVlgi&E@h7HYtocrQ>uIO>~SbXY&CE^L?=Ezun{l@L-$JlyU&2qem}OH|O+Fdkfl#4Ak;MxuFy z+cWP75Vd;E&Gmn1L8jmoEPxwf^DjiufOg~Xz%#8Ek7qf+-~Yx>LC@!-1(xME8GQ^P z{2ITRq4@>GhbG&^)0i0fIfk;y&4jj?pRY#1_l+M`_Q?u3HHqPs&4{~;3+ui5R^|xdsOoQTw9em z-+D|ruz`V9?Nb9&j9KZnV;oJGW@nb}d9=++A^m}w8W#XjdzS}pH2mE^i`EqPdcEac zD4h7keG4%eKvtoenjiU~67=&^zUp4{qy>v}dp@(~Sw=JQwDw$oU1-K#{9DnRsoXdr zy^&^TOuzBsL4Ia8iX80TE+D#SY&lGRoEF0C1J;BT&dW0vf)`X=nM+L3@wIo*h_*m>%D}gr^&@(v!%Q%MsDjp*jC&dot zl+5|V+=wlLaEnw`z%K1mzlo8#UIYKQk=4{+5HKo*0~Y2!siv&<=po?bcx20OE*~gQ zEtK)=oe9oIp+wPK2?M6UsnWFiUL7Xitn*`29c(VNqP8B~$QO2sVh-7b(=uT%H^-L? zSl8}kw*JI|r-Iu#?_%ekOC%y_HuJ=}UM2L+w^H00V^{vKwAF^YG4iFvXc5d-GOjK= zii;OD=RM=)dviE@8}oXv$idFY;W`Q!bVhM4>)dcS{*6Uv_z|CA0 z(Her$i%fKl7?IC`_NY?51`BUb0YOB;iQO$S3H!q_!cEVEm~Y3~H0>D3 zmq*Cv2cYTe23;mbY3tQ>q=zQn$i`@Ve2ZUm*{3@5t0cgrwOIYar3)+u@dj)=mAwKlh-DmQxg9XyV!MuR7jHb+~} zoz=R{>;-Xy6H8UTFOy9zTJt%co(}5OZPkqS2(UD)1yyO(p1?6#!fkO{FDK zS(5TOIjBo8DxWh72veU@KJ3)DzZ^s`kumg6oYRhMwWJF4EthT?=}=b+bWsZK)2_ym zowtmd`!iq#2x{~W!&ddpz!5!L2RQ~zKh|3`JaR`qWuE)VPny>6VypaW*-^Mg z`x2;A6eW`)Yws>G--<=#&K!zco;`CurTBlcY?i;{VHLtH#RfQ zcY$qdj>cYZ?91V(lFpR-g^gD#bnJm?$iEI#_n+ja##vxF^Z$;jN_#dORIiKBUoIrk zD1d~eJ>H2MkdMVadNAmul4`G1`X4R13c>$9y*#_YIJ(R>yJmv4#Z?TC18<#zXksgU4`fG0XS} zr+JBD4vvcgm&nB5rxYI8H4bp^Sy-U)G#0d19FTqoXp&>5kG8*^z3xPLd);=z)`9Cj z)0u<6?R{@c_HP)>0##Ebr>;yke9x)~+K$i;0E7SY2tsZ3t#OMzf z1${;>Gv)tlwimlK1hUznYEt;#!3^RQ6MK=*{M5&ug? zzoOSha-VrN^2*jNySw_gySJ;Su$R|Zoc^smh9MY;adTyqjcSq{MMS3OBp3)gex{Lo z0gk;CCh2{c^c1*L>8uXiDnD_i9e-gWQ8OBeLw+zgVgp6Nf%ji%nb_eh`KuFzifVRr zRpsZyO^EO*%sTQbhIQY4OakhX?|RHk3T8EypO+83toMASnpoM_4I{vQB5*c!6yz$V zu-EEVh#VDuzOXQ^Uiw)$$w;hrHXCT9huMZNGFeO4!JScw9<`d(%>FOJkHpL&n8&O!ZwYVr_DJ0~ zX_u&P&zL__ax3m<4PI*5WL4v6c$aw0oU_;LkzX!z#B zOYCogrgyno&w}|go)Vj!t)Wk=jjhd$u@xAE9ua?JXyUIL!*L|t_?i386sGx$Q#{JJABd=MWrLk&j zR)Tr@vTU9s3HqG>o4fCf{#hu~XKi`;a;BctFAk`!!AQeNYNM`gC63g^?lWi1}dGlJMgj(IYb=$r7@8-Wp^19`Q0Pu3$k_$PW zuEtm8odr}0`nw#N6Fsj8{zCYlkEMyKgB?~AiNW~j-byaG3M*K)WL>>bP6lP#xN-Dn8KEXj#K+9hLuas&;|3aUR^pu zMwKbXJ3w>4vulL!$o^?B%Ev$FhOA80ArBbc?@@3c!XQ50vqGU@kf-eHX4=*!!?2qp z7Wus^09#x~QkvAb%l&k1^5RM|QB^GsD)DQU-<*Y3*sqNECCJCSQ=(RF1?oBr#W^n8 zUQ!#!pQD`;WKX?`^0N~d4J!k$EdC|@Q8c-4RBdiFY5NLKT6=`{eiDH#qfN~AE5Jrp z3Jx8R25zATO*44k9vfd&3qKWUyd6vV3-nw6^LcAt!+3m-#_{~?Jobm){Ce}+C)Pft z$dy9S(psR1eS4&Zs+rau49Y@XpaF^LJVUe`2q}o9E@-0@s0#XOO|DoL+UluXUTU%3 z+4VE ze=p^qcybq3GiT07Q{b326BYfZDLO5!pTE~Jjd3cR!Ud0-J-YRji+|dabOT|UZt%TF z)V6|u(h}RdmB!xCW3OULS7=7?qSYNJjfKT+{sAzo`jpFI%)%Y%q)2UGxqG?qmyFX) zegG!(SP@f~|90U^o%8cAUW)=8)6x1w4#3*7pp$)7JJi4asZ=<(6P7WRj2lc_i!c_;jL0@1xI&<{Fo zaf+9ktf}!w)m;-<7And)TPu|dUGvi;H&JYPU9s#O+Z;Z?k(QM3(pGQe_oHS_n~h+9 zEH3|uWtLAY(ay*;T%`=2l1)ohVFLzgsT#*oJCaD$t((h*f4aT{5xJi8{po%ucc^L6 z;~3_bn+O+bP4u-Cwi9y842JeW;MV@WOcPaDHB;(&P5$d9qgD&YENa+b4X<^s(~P#? z*9Z0XW~U2`+YJWjCS+^>Qlo+7jaX%?nfe%Lket$_$vJqj~&}QhpcezF0oZnm?ShiNM$F zgCvpN9EsYp=lo@3M^zlt!DeHc;`=401kN9EED7WffEXMEeH8)^@r`x9@hiQgLaAa^ zM3+(m02s#cjP9@tD#=JF%Rdaqxwzu@52lGuK%1E(s!G>2nUqvzR|v{!<_@5}+?^f9 z->Wdp(Dr z9Z24;O28KcxeB@@tqT?q?%=A#f}^;ZqZxKui2_B#ntM&PkG4A~fHDGu$h~N>dBhED zqx^gVrrh=6b*WM5dyk>!d=~6(CoIUOl55>Doyy_thF~MnY-4z9ysc?0I#$#?&e6US zQQ6Ny@g~^O$^F1qzpEY3EfmLzF()J3Af6e+fc|2fea?JQDTn zjJ#@nBTAG%diZps5S+9AEeGn|!hS>4mP2Umbh@_VUMQKNlX-g^B1lac8l7l=Os|m7 zxb@Jjw})VpUvRg$w@J?-+SCsdU-RM%G9BoX3~n7C?2zh}zsr%~U{jyF{tI@7LdZ}z z-{2{jy^y6>;cc_z3j0C!P{HCfla?Hf(>22p6 z8}CQmBGg$az^vqAf*69Sjd?Y4rXu!nyYs*NAK&DbEgeckgM2J=UH%(H5M9;JIw{*Y zbK_rKl4#8Wm6RW;DdR*mna3t_!Ava(pj<@GjT*ELMj`^dy&kzM@$2FI6Ut#5I!*i$ z%E$3z;ZCzlLiw>Liv>J}OGQJxUrF*3=bKT5do(X*Qeh57OB&$fJ*^~Pg&2EuX&KGf zL?8p2=P>uzmH}=4!~8~Zx?q7R>P0Tkw%n(h^6Be3hOe#+L%LJyiMXVi!0C)-mwvP1_)i%^FZtJYmGxV#4W{zYrG3g}FteVv|L5-bE6H;nC z+m4vnzpG6sBW+#C-_K`$L$o9gsvQ7vIITAV!DEzvL5J!noB=S&a9qijGz`^J`Yy^A z%N5y|UEm(f$xe|Yfle~YKIYzE>kYyfifnG}qXx$75yBBS?B0v2a_cJF0wwGAAcRt- z@15%aC+A|3qEZ|MZW?k(Ns&nL#nsb7eqUtt6D6(>gwd$d@YCQZ6hF4%w3^(8(&wVCFYhre6f5E$#yD5IM?RKB7-RjtA# z&N`Jy`$HISJD&nks3fXi-R2KPjMH}l-n&cEOKnDD+xHJ+K5=(e$deLDO}-;Mo*zrp zWk?fv7I(CoMz36C@pseon}4R7aSpqAW|31B$RJ5a1bWf|klF~znfZk6Px)5rts8a8 z)NoD?n{qYf+VgBH*T}5{pJce{2iB@ji!2GuDq~)3;;27dr$BA6;~E8aO}~eu5DX#H zAuJ;Y5WyjtTS8y1E|3e5 z!KMh);_-zy^yC<~XN&{ZwJ`B8iNAf*z0PS2VldR$?hvMEh5yp#Ux)kSW_m%eFWQQD zSpZ*G?dX{^!?+qBA)O54e$*()yQAEdG#OL+uwmP2=|+^&mLuGUAnGoPNr9r?r$>ZvEKuqXsp0Vf@}tN{6-Y zRa<(8%B!jBist+k=ZhE2;Ngwbmfd{nNux3)YzR13^|rr3*Npi%#TaxBzxe3`VRztP zQ)>y-KdHQyqn7l$;I~yc4_Q(iU7A|EfJp(c`zK!7B`jQqP+E})9L)#G|MGJyh>RVZ z7@vKaOWR=H^@R;WX+HHeW7I7W{qoc%v(_1jK<17Xl3-e|%fzjWvo__cJ<%|Z(VxHl zRKoeeOTM^7h980BOC6z_uE${z42&-;^3{f6%fu9QCG*PKKoSRD&6=$6aJn-{F_4ET z%py4Yxg447usS7SB8uNmgip(EHucB#j#+zzz5KC>G(CopsR`oc(LaQnr2E3z)tLmj4 zw)S}N%C|Ueb`R@DZL)psUpU0tIUH#>M+M13GiYy?zH@1Rew5ZWx2G{ zN=Zqm98$#=Ili3P7I7eHjX<@2L7*PQZih$aF*N=Bepiy7FQlcSOnu=j!_mZvB({S$ z3Z|H5*yPclQlrl#w@m$hN~HPu&2#IYKwUJw0(;0!#wy+4bBbLpXvbM1yJvG{(=xrO zR3mD&w4PpufzQPS-FO?dU`tX9u=aB%FB9Id_B-{RdZzI#L1K0gq59Eiv}2Ak1_dF7 zD^4LOZeqewz_o)${A({S`oe=q>}tjUkDAC2Lcnxl9Px2WzUxKxapOKP6*Vc%k^--WW42t$F2o_uR%E@7l2k&RFfVYzrLJpo z@&E=2N>jC~od{xnBT-Lh)&{H^w8FcO-#H?%;JugO2uX*v9n*l`m=V@zn9IuX8?SUz z5s9g{o16Ln?v6&JZy&$#C3l&34aTXMvYp3G^T3U90p6J&-QiKFC7e`9i#Ghk-3nRp z-Y5@Q^jK*ScUa_cOu0hDZ04}tcdY8clceB1zYJb%6e;?YeB*e{k1kzOTu@?0s&Y)@ zkr@OBj*Gtc4Ek*{8w5>E>25{#^f$K+643mA*MU#D|KUkmQIeWb1_L7gn{$1;Olobb zRl~}MhwQP-m^A>Z-qdNPrM~>YvV+)~1p3f{M+xiK1;mT_clPOZbVm2)8B7yHavm*w zQ4!taB+;@>n%mtZ0)0`&M1*P9w=X|D;^)4|F`m$xO|yegG^fc8Xh4t87Jj>m{&2Q; z-FVahhi%H99QC!|nudvp7%5`$FMi~4$4L>rw8K;A*4yyiI$nYs{Nsr9YHIxewbA!f zI)+~W8D@ek@Dt4ROChrQXdG)6LYU2V+50$~0n?m1fDvYqTR^fA9C2?4C3JKX zw!Srm$~eTK3-8uNvE?7dPHp_OOVb4X)%&?}gbzM1vL3StXSVrOuM>0V<|?J}7766y ziis%x!xKHs0s0W@paeUL8OnRP_%f9InO!^#A<$e}_KkJ_zQ^$Q(@<5CXOjJ|$T?V6 z631cW>G!G?n?*7fZPj09ad?QTrmP%%S9Zah8Dy{a9|&C21g*!)qeYdR`A#L-gd%2s zr-a#(1~*kpat}kcHF1vHx%F(iS;t5cW{T}mdtvG-fpVX|lLb4-r@=5Y-AHLa#)40< zoR5^|+Id&(+ER4A&&KHz0cVOv^jI~B2JBRC97-P)h#Zn*k?COZ2PS@fmAmJfCI-^0 zw%QZ+7ABfNTwJTP(*geq;m7-Y4RXQe2KZQRnITSg9AzJ$k;~GdE2x`~oT2i=?z7Js zAbA+K*X$zh+Fk1Imvc(w%df;Z`XL$8XbDQpe#Cxbm+{3;XE!RO?C3d9lq7gv8776-U23!^x}-95OwyE8Zg1ozO+6X`3G}~) zhlp>Vt{0sHpGOLwxW!-K-yEfb4Hm6|XDk@8vSK)f;i#8ILlO2A;%WO)e&u7CjQvG_?MC>o@^o(Or9kxJe-O?HSbY5 zT52@4nEg?v-9=|XFkOgoIL6Dlr&?$AgfMTBJgBh~#`<`g_@LHzx+J8?>X(lCv6h88 zEC?f%6rwj}|M_<+C=GbhPZo82*8=C8V)?Y(_vHL)z%JWWc<3}e)aiuiOLATR!Q9v1 z1t6F(`REE3SKaebBVivUmLvV0u7Nuyt2c1<^Q+)k>P5X?;W^3|YkZ78VlmUBUR!<{ z@w<_W@99{`FCz*^Q0m4?syi%-3A|m*HJlJHGR9Z!PDkln+2q~giEsBv+)NMzn={RK zAvS0fg(@4Swg#THcsA8qJMkX7xK&G0TsHiPS$UK>e8mY!fhE9LXoY56jbCYt9J)HV zgk(6jV{vb;d<=d3agH$fm%h*bR_ZJKu>Cr;om9O{)b!a=n8Z3Wxh}=NW&LtFsI8=d zg7s%cTHG?6HG17;uoz>f;3lijYnx{8W%bkWlhZ%$@}uskhhd?LqudT9_b;mvfGRO^ zbi>RhTvi39qS@%9fc?!-X$X`f&r!dC85`1AMhh>P3!xg9ah*!hL@k(aR~?rEXTPuT z2ywxWu_{zqmkZ?4dU|22>Cub8mlQzf=$b}PTfIswIx=wbMf*(LPUl`6X^ppWD=3oT zSdkdAIEgFs=Z6W zUWwL(p=EN;Vo$GI|DaG;k&8u{%aWrRS^+kbX{nItikXBl3q}MlGlP(4%mQ+^LVhZ? zA^u$l55z&|YfcI;H29tJLBI=HYyHn8>6`3> znO518QE{Q^OoT6Z*7@@#m3_fB1_+~1tMWp~=5VRR>)K~Z%W9)YN24gF)@o=HiX{gs zwh4M=|2YbQ^Tn-m+A4dk|9JfPONW|kFXN%LFye$l+$}y|w1SAyOH{{%v+V3_5O1!3 zflJ|wnt5$)AZq_=k8T*y7KYrMMh2l_}6Ptz^ ztsKfF4;exbUQPYAZ91!+$+OFXy>3% zNH>R`2D7n>pZ5RkApUo7_`mOjv+C3d0{cR@A0)K3OO!*ss)`9vKky#~TEWN4$NTy( ztFG}(!75wgrjR1uQO{|Vd#fGe1^0hl*s2+Z4w1r_lU)~XtxmyZERX}&lWYc<{xhir zgV{L>s-j?YchN5{`X6JlK8lr}W3-6IG_r;@BwsARODedsr##bM0Bnj0F!T8CA=h6wPznSxSZYOw7 z=z~lkz!s5ocvafU(^H^qkxOUYg;`MsP(WolNt(t`*+`>$%>zgmf4{P3o@Z0l*))#& z@##sXR`7Yca@wB527jjhi_o8x?9{&%;Ejc~wPDxp=_Uv1U>`}pAeHB)@u{*6G7QX&RVo_qO+>em-i9R9plk4cEOf&xSrjl=$d(v{DIk~fV)?h8YM z6Ni2wJ|>w8N|fQ?5q5+cm%#-}*0!&8TOO<}JaD;B@IF1+$Jnk?n#l+4^s7*1it0>3 z6@~{iNj)eeUK0-aDTIohzJy&+zuc3-ZvKo+`+KRI7tbzTf+Z#Nx5?RzY2faXgQK$0 zglJV7v2%BG$52&k&R4d#g#B2*hLH9WYL4ffU~KQ|YgP`MYFNM< zl)8r&A=zF-i-9}e+|%lOG$%n8VaxcHT_Y<_J6tNI?<`qB(ph%aWYNaz4<4IM(vzx# z2$)E8Ld!#sLBpA@yx^rxQsQ;q#oUn5%TO(=*%}j%*?U8EHTes><$v8YZ|fDr#I%GQ z-!{pt`50~Wc&zATNYj7$P7kD{Fk(F8&4}y=*ol0ML%nZ&ot+$Oo#q1AE*{BoO2k}e zC_qdeAOrUGryhzAcYpt(Qzoouo9V?KYM`v2r=_<8RZAD!UKL zDb8hW8tOGBpT z@?rz9Lpy=v_xdGQ>^U(SuR6b~O)itM#ZE%1EvYQZ!tt_Ilac4@{#uVw><@=KnrWfu zhAii;wk+`&cOxC}#!ApHh4+x`tvxL2*Q zSt_8w7<2cF-^+z3%h&B%<6=Oc$_>KHV8w=orY}@p#I}1?j0ZU;@8wRDdOKX|ag(;9 zw)NMtV)N_jkP1mA?O4#Emm=R^j`C9yCuxz6tln-J#DqgcBSBP5jN7iEFas;1*vI|$ zPUJ)0c|ezGWw^8T*<KPMHAP<_iqr8)OF;93@vv1vKoQU(Vc8M^hD|TQ$vA8$4 zgr9Z>+cSmBCm#cvLUKs_{31(ivBO3de72meRXo$KZg10nB_;K=V! zFs}LDCA6(wvt@%cUqZ4f!RXDKq2e8xhShTX8k;^4CzxOt2&&rZ7;`Tm#X#)X%uZkd ztah8pTD){K#Bj$L5OP=^kUJQ2QE)%qbdmpqBMZSse;Rm_fWucBZAH?9;0rKie65Q_T;_e80a0Zndlb3;X|5-A_Nl_W$21 z?|wVV=^{n8N;-i8VsXp5w%zTUCtKTQ146W@%QlL7YO308v)4RI<&rACpCH%Bx3JB5 z7Leq>p#6Jwsx~&c54swS1V36Q82P4dFOpd?p^GReVSN7A#g@HKhh(U$IjYe;+vw0` z_%{bK0HtF)fwfKr5+EXyE=oqx-%@-xpDY`6*S_<1L5=q_tU0t|h44UVO?o!++Ct{W zlVuPM*X2?AddNg^X5sT66!v$cWx`reZD_6n1t{~0`4erb!}zR3r@DJ*Z35a45xfm9X={3Yw`6k%W+rXK3)%82eJEf zb#=W8OYl)0+nn2OGgQbcvn8`~CNH1?oc$J@%$!TY`UA5|yIJSnx#?f2%qapZl@meQ zwNhp$X}8#7*uUCS>kSPyj+C?un5tYIjZb+y?Aec~BM~OhD^tX0=r{p8DLyl>NdRbn z%E&S)?&`*1B?aJe|IZc6_}z5v+9k$RXVM(Q^v?=c=v`lpbOOUnhD<<}#p-FMwO>-I znWKw&W3SJYnmOGN#UI{iG1jdu_l-Gmk3Ku7hDV9YO8Br)*^%Bv7;!7v{mhvkB@h)1 zpwaV3x%j;@&^Ei<{11zx!9;Mv*Y_%hgzPM5WUosS{7!zzap!3U69R&UL$KV#6Ue;H z2%CN?L30HyJS=`z`nkZVicWT+!~vGPrfsp?Dt9Q+ot%Kvg~0l^w60MEbSVDt9K~-= zfNh-ZhG_oBuZ!LQOf7~;iAJEm0wvDCItN)b%07j zLT!1-GUn%*;KBZXHUP;9qzn)HxdoD%X+%16yuAa3h?g$!^Q?Y+BfEd=JE9=k!uzk2 zYbNK%Tj#}9F~|%#UY})kBTCuG2ql0I>s7w~i!EMm^a14g+i^?2P+|o_D8d8L^z8AL zKP^J+GrD9{HY;W#{%597oLh{CP|l1Pk+86&np`>&JLw1yz7RX~1I{SJTV>!H7XVt| z4}g%9Ghsyy^E>}yXn8lC*jm0yCRyGqTvxghtUF!;2Zn$*=VM`nL#r7GB8%ga*G!(+71$XS?MQV4JX9JKU^;CvZFFm&-jl}<0E2Ka zGXPco=omlY|B0N{u!Dd{Zz$zZXW|ic z{WW~y5XrXtNWelcQX~T~hexGOr+PmYLLVI;dYmyob8{;;pg|JOcvjDO*ZQ1wzf69& zKP)6!ECr%kIn9h8IJ7I|2N+_>YuE}ca#7RpmoG!s&B0LCr}vOc&~n|<3s{97yy=1= zjJPPXIJ{MxOt4)H#14zHqO@}Dj*Q2NeGFOIM(qaztF``IQANr4}N*EcDtOQ7o z&zAqczZ!G0;wNa2&=ed9BqqX5PGNeXQ$2AKe4%}tvb`yaZ30VmIAbrS@aCHDT4fK0ErwUt{*wNG0Iczq8fe_M?t+f{I=kJmkE!Lyzg_B#~FtTF&*LV;%EP-=* z#A6z$QL1#@suXLfX5l}TfzKiO&Wx+N1$lX#DY)TbhDo@+^_?921s<-h5u2A@dVlz@ z#1vC##u>{@6k`s3dsRbh{U9eKXz{CO@;S%v{;)p(ba6DY!jTxvAS55ASWRMW&zeEXks z?f=E#9|b+$$7wXbR=zo8Wr=?_zud^uD?izAXU7(*h23q%EJU$OO2P~?>5miCv{Int zJjJ?6#}t*|*yEI>Od_P@G@UDvQ=p`xVJn_L_n#HKo0xcu`V^c=WS`a5k@H0T+3Sm? z_Sw{(wy)gT8dfj$UivP?N(*PyJ=JzRY zJ953{B3zo1rYnchO)J zZ)f)XY*tEgM^c7tt?%t#n6(PFM`+Oc-`-QEXv0-8>`uA(-bJDFhOduLij}FCOEz&-Al+{0N;qfL z9tru2&*o95T|F@#24csIC|JoY{;3MZS_)A?A4M#a-p*Uy@6XYIRJ=#$WfZW<{nQ8!o>ok;>qb=doBRu z8D4#)7C};4A`jy7Dt6D^`wVnwh*n=213EmlY;3rn*Zv+V=&7}-#M5snCY1uY+KBLs zTPf2~^w$oE;+XRgC<^lXiK~rP(*mjeXc?z;x;Z%B zy<@(AHVfVl)S_r)?feAFrQ_>M>9A^I{J%e5>o`l3<1i~8ch>O>)c1@Sz9!@9M4gqT z)sg%iQ>G0Nqg;rD_f6$dadY)_wyTfz)g1QHfA(Me8%$cwk$UPi;gaPJ zuCi=nZ78UoxNE?~0=E`Z16+e{PBSXka^tlAXpk5j?mJ0_ssmqS_O z!wx#^Bdk4{udRIo!T4@4YVKwwtaV21ZMlHvkmMbK{4wg0r;yJLNC zZHpp}O-NSP&dU@qzhyp1ypQa%PzkMUeR>=8yZs;wJ302|F-++1lyzOTR%e2q#I$4g z<*0NUBOpc_Oc=+Ue|o?O$(zDlc)Bo%F;gk`br<}OudBXZ+CFjK2_(s8pezG0q%4GQMf<;iCVl~ zAO|#~(p0wc))xmvo>Ai{gZ#{xxyy@jb&a*mSforIJ-G={N3BUb{_{sEAxp6Mzoj2k z{P>)e^AddXBEEoD%j#1~B%&zNs}Z(cLvFVLoL{mf3>#T?@q?8GbeCL>Cg>KQ)$vF6rq8Ee*!y1b@URx- z$Qdy;G|(KEc>3PXbs|{LE&o7dGmexPaA`Ds$M?Y>`ybLdu0BSaTPfmDc1%pAtr12V zf!o~NS`LDlS4F$=BQpW?UL>g{pG({UvQBp2kDp*T3xj*Yt)D}JhBloGkx5JIG(j*7 zSI|r}-GRR^$IHgkBa4;h@eV~!g~jxs#vZfqwGF>IvpLqYN%04n0M6I{y%-mZYQ{|1 zL{nC^{4V+MXNlQ;JMHSa{J1ap;NHFsa;A6^A6k8R{>T227fj`0fzkH&a!LHv3{1fFqSigl#*OuvO8q0T%jX`P)q?DB>Sfb#E*j87ccm1DQfXI z;uG-E=D2|W=&Rn>`3 znWOiLew@EdDB_3!7o7rHRih-p?jNar;PR#$*AAObvhKL@mrq62c{J(foNAzJJ0?Xw zaD~*cZaiY8d(vmNAf{ZDr6KW$+4m=D3X+`nf3h(meI}Og7sd8?$bWbGUhCg;K64B%AWD1H{*BV_-* zVrsbf+d^AIxoIpzojMBbcK|x}?X(l(!^1=D*|)APVZQ#v5YJ$2yrsc4T$%|#3)@D( zV&oE)7mdblTiD%%uc9w}W%18MACGSp7Xrn3qvSe{vhsjpkrZNr3MNBMRA*N3RyMno8c*RSUQP9#USPu># zh}`B&m7F_%QgF)KN^MSiw30Q>F8_H4dFe30nG7SQhQ}WK(@OU*^hW%S343kLV7t?e z>b)xkG*T(!-5mZXy!hEd8xmnbeoPZ!&ve=k`_9z{a{GHYR^3}y4k`uKubgBZ6ZM>4 z9jVU!Uqj7x4((1^X8H~a!G5oI1+Urn+Ewp1!-qvQX|EU4Is9hvj5Pz7!UiFzdl|yH zd*~AOzDvU@w~9a~z2r68%A@SxT}L_CEMrzm!#8HCv2<-CJkX|0FhUmw5+Lgr`e#x6 ze=zqlpan2Yd>^w>pLoOerAf5w<-uGc79F5R*ZHxAzGtK5C=u=p$ee>P&L`#%s~|%7 z6+jsB`JV<4>GN0aI}{&x+jA`nKN`t(D`o$UsSI8v)-QKzt@36V}J#!}y$Mx@OK*Zk)pD#vwCEdsoEm;Tw8RBIwU zETFac@9yIGY&BP$OLr(osff?&!Sgwtb;|Y?n?9_xOIqsLwnimnJflKLD~h3Q=rtak zO5+3VIwG2WF~JE!L7mC{ApO@01WabFJjF!$3niu|e0~?iC}cd*qGa4{8prNur!r5? z?Je<*?YEu8;EG{|3V}8xq}*l`annu>vRCu%yA)*n?ghA8CLE7HZ{0sB=C?(eYRRtz z6Ib-E{fpW;*vx;tvYN134U;!V^366?vBZjQ1^;ZkN3@uFP+2clvf)$ZAvra)jk}XM z{!?OZ!poE`!$-Fe_Rj0sRg`4ymLPzy%l11Oxqfe@nZsM!>5G8q8_A|5{GLQFnS)Oe z3_Q3!w9aIIeY))NWRjgf`1N2to-i8r3i1Qg*>s|KpH!u*y})A8M$u>sse#E|Na%>e?UmO~XQ$X9xRBF*%JsE*uU@L2zYsa%Y z86o?*SU#uInysCvjgW6rfHVuz)WExa0@s8+RcnK|f?-{HJ8|1RQeEE37G2J0S$e*O zeV(lkvL+FKrkGu%q0Wav=9p0DTM5pFFB5L6{(%fJB0Rn}&z?vVq8N_r*z^45_9%Iz zHDoL!c?JfCSJIk_gTtABQsv!GPe8}n3^k*uMMyJ`bO>c%%4zns`J)(Q?SOLG$u#g` zNI=xlmQgEdh1)UlAhhj(n>@J>S^Ii})nxERe*V4p%iBA`UK(q{Da=?W45}7XmrW$|LYOt3g`X{NI`bi`-#>-Ne|y(71^xe5-$ zY7IBu=%$Q%UKW`Rr?*Aa7ZRM~VO!~MkIwM>d*ef*3Y&SvoK97py}khfes^Q_ePZ{m zlG`XFP93J8rAZ2;IE*mk7(Qz>Z-F#34#ENsQ^BqM%I^cF-GBeBy>O!M{4Z*cGeO_( z^FK_(8&&`nqn~+WBqr9^wzSEBKtRxmDqWXRx1gjgB|WQ$#98NvtWklq+z${^|Jlr0 z#-d7ovhuza89@dEODN?&KTS_@4CQsmz}TxC-n)-F5E!aL2^f)qrbFuP-r!CGXaCdn ztVyV~Wo}NZVy*(VKsV6+11E;Qd1$7(dy4rkagwOCxL}pDU=l= zkJr&f%Zr3P+Bs8SX`8yPs=)d{rvpiE;fbxY&X~WJ@2N518}Z4oS60q zhUaP)OUc&cs};TH=kOJ5!(!k_YCTI{Ywe;azrSB_E)sd32XT$2fb)lSD^{W!l0sd3 zdfBKHcTzWZJ!|>ay%JYnig*tYZ=tufwd1!AeJpCZYa1R#!ruq*j!Kq_^YTu_UP=AG zmlm(J=v(r;^8a|vh3E6^3_XiQ>#2z-EgM~lgzBXhPC$%zX4KIqu++n^H{MM5)K1B^ zT3-)%EK&Xpsb~8Wk_((;91CJ#=k$D^<6Nl^2qJ818?%r-;VNz5bC-!jE!7WA`dh|> zk-4xL=zQCj=qT<9jT9n5fj)}s}(RFZ!! zBK*hDBTi~h! zxbCIqBAQ#0_c|yHMxT8>w}>~o*gQcr$=nFE_IusPr}^HYQdbp23=I`TA9Nm;pnPh` zn6y@#I2tDetl90Gh2@~lv4M6gWNyuGe^!OkM3@1zi@HNhkm8<< zTI_~>I>>PzdD!mLWaYIvx(1PuK6q!QbPJIu@V7W`nW6;bX1A!+?W?X!U7)o z{KZ&CZJaz+5b$B@!TdwOC4%g~3U-5VUnITPDR^u7{lcm-@sfk=l&6N*eCp!eupStV zKHQ0@Mw!eU8I!<2`%<=Nv&*(Dj86eT0mMu;w_?Kb$$JNJEP-FMI`hCx_tt`NInSX| zIl0ALcTwD!-G?N~VYx%qv(@OUCrn1(aD~)Lnn0`98cR)nVPLY~!&}$CJWR=$``yO( z7qjkL)buL{n=uo5NK;02Ww-7}McC6sPM*DZ;{0z>7r?~k@FuEhAzt#6BXcqVdW1+5 z>T5mQU`!FAEOnHX9WM{!mCN_lIHPU1sD$WZ+mn})D`Zgn$a6|D4$`vrzz4ojQF&tXx_w+Zd*){82Opug%VKc!q)V=J4(wTo-)d}Gg_tLJ`+peQnlb(UtgHAkyYo&N&(C5GaPlfK(w=rP#!pGP_B)z zUvJ%)YUiC5{hX*70w~*NoYu>FL`l)7-@=`S6_P%iOYo6RpghbrXMcTNs0(!6$vpeP z&0n2r=UK+glsao^uc!WFA`IqjH-vKHt^-I;uif0Ba2ZgDDYCcIQ*yE13!~oN;kobgNF}6 z!!nG|ZZ&5HmrRFXd@xO^mNBkx$1f+o$2^DUCowvOD=*Y=S5X{eH~ZgT%#{y?TT6nO zU2B}@DK=JA+zoV=(J9z#8wADbdJ=FUF4VgYtm{oudAxX)yu3Z%D~0S>u-wXqb7W;V zlKyJsfl{l4L0VVM>rUkSIoIUkm*K$cXxuN?pX=dhZaQ$3VFHov#d1z38)X*xc~$=+ z0zYJ4I^_*~<*TASHzl-}^#OW?mV{L11HX`ie*T+#^NWl(*PF2v`=Wl59^{3T_0@<9)en35 z)o?3gG^oCfObuImAq4?I=H0cqwY{~~J+>_dl?;ZeU{vg4R2o}-dsg>arL73UPy`;0 zC4dv6RuhPS5%w9k&EZj1jMjC|N`w)oYi(@A%1$uYvkBlA+%#&l0y;%g1_WHFaALbxAz`4W zy5@TFK-H}t+L4F}F;g3fQuB7a`s>#*bL-K2GwZ%$mwR)sV&ts|oIzIvzQOdRU!>8=l_r zaiAb4|49xb$#kan1GH40KF!4zCbna`ag$LADb~I3(`1^zw8l22ATxD&XMX?xhq579 zPYh;PG@9mLnYrzJZ65+H<7dF8w&yY<0BKDz37%X}NAD9(j8@eC;Uw;!+?15{(VVvK z6f-$aUR03}3U)EB2Ye_Nbg4hrrv9N-n3jf_)HXeS_k9f5&GxerwEDbkISu@-gwd+A zlM@Uert=pU;ADSLu54esbZfh>KD+E*5vVw!Z==l&OfF^{u@PS(*eY5k5ndT=Ha@t0gyF5u=?W1c}Fbqt$kgmO{vqQtZEj20gBj!>Z`v`A^V+9|XVgQ>MwX_M43CD-(O+}<4gwi!*tN9s zV#5Z+hmqsl6S?vI(1NGlezu`F718;z08#9ijeL%+;5kY_$1D;a)Bq;objvG`rghPc zjkB%2Jq*fya*Ivre1F=?xq&=9u(8}lFApGQ)62SIAe{F;r1d6h<(j-eqm?xwa!A7> zNDW+s=2Q9=i}O>lU<-Y2uC-u%y%E!8B)9K)bRwsbfUd=AO%3fQHPB~TM&<1Qoll>o zcLmmV8|L*>u)UmW=Y7)|lkxSrpr0+^OPrqGD?Y);qLHl_^S9(Rn5n8O9;Mw1fsTr3 zKu1n5?{}N06#oEAuLCLIRM5FoS4-}!F-2?6bhFI{Gk^?ZIW%?W=(nQy&lg@Y{u+)! zi6l3t;&gs`1|0R}kJf}vMII(z`~RDn z4~eJ#`1HQK`}Yed>m4HR0D#SXyBGQvmnWT!6_w!)WiCNv#m-xE1tYbifaH%)(ILVa znaTk5k`JPBNbX)si)CJ+77bupL(y*dE6+@mzLGdaN=)iWD%kFI>TYj{n)jk|Sm8?| zo})Ogux_WoyLFs~6zXpof)z~8dmn)_`_F)#hkcf0aBjQ>IRUuzSvY;E#Nko4?NFjH zANs4#rXf8&xRMrrHch}_n0WFEwY~d9q(%;Qs>WsIkHRNnYQdm?*zWg6o;7ESI9DYi zk|&7s$zYpAP#W0Sjc$?|jRN(}!p-jg%*>H1kWp9bC-fb5JRL+JBz;8;T=Yl52sHvT%1Pc>=@Z;!1q;pRNof$XKy;+iKqzvK-{;mpcfaQ3l1|N-m9Rh z=uS~>Z{0olPEAz+`@^&)0`o5e^>SVg`ArwNc?{@7%eeTvvu>v_py+ zS9Q03ZoZs$6DRm243z57#BxJZdG2-N;qU+Qt@S!;^gXKeS|$c(Y>$%x@V#MZWQl!f zb>xAHz41Plee280Z_(!h#8s>ikM~VsPk|M`zIx=^1Tg+DxQ}+3ROCb>#ULTsD&oL& z1D~6eM5rGDef&QBDW!=q_&ANJ;p%8;D|gtSUjJGh5KwVnuT1IHT3Tk|ohDy>eN5gk z>8GjTst5nAio^#3;f!tEv75!zs6G%fMf-fDFIoO>ofZbAsux+jR3PjG6}P2)k*Q}? ztc_H&HOk9T*z3p&4$qoR$)}+uxwRN`?D)0a+ZpI{^PFHVQjZyph*ezqD8c|>ohFGp zo!Ycs+~3?Z^4}Al)=W4dkEWFE2+&^swYO-IEGp{A*f+qP_5Gizx(PmIz8TB~?Dp-x zz9zxjNGOj013)860^5~1xNLVL?@LT7F~?}K0K}W)N{9iVqBbo!J%+59r?ldli()V1 z)c#?%Q;XT)&}cFVZ3Ae4xyg~%OHYJM8e73v)otQFpo$RDvO#e6I^^xyC|j8HjX)EQRYao|ic(+2Dm&)w&yY@{z`+>boL(JSGjbV4Z3 zu)xMJ1CF70YRP|&TYY*sTHvp^A1=m&5C%+Ws>@qE$*^ryL)A`W=p8xXP^Y$m2&=2M zp;HqClDWKiRS-Ap)I*7kvf*+p{;?2Z)*-*8FroaqVXn_LE@B?$-aVt-2EkRh$>5rm ziLY6990=3}n?Cytn4arAcU|7wI=*H0SGM)6`EMf5+uitqcptEo@gpt!6>C~>wxpSY`ldaf#5W!}*}ogN_O>X+1wPQPiu7L;)Xq$$T`Tux@J zb4Le56a{0i`~P?6Y8l&2$J~@(9Fr||v zxhidX)aQ5p3+b3Dv1X@{5oc=_C?#)O7{Lh&WbkEwLLz@c3|Fo&kaGt@9cNO?%8~?? zoOCb|*>_;>?d=tOtoNa;BpEh)Uv9O`{EFx_WMZs-C#G-}Aw@IZNhbs@%+?mVe^b|(37k~LgRjb_*2iey zy*JZ)kO{(8w{O#5^o{KgwGVDhT;I)E3$^0Q(cjH0OS5NTW2_EF>4x(SU zDU5l^*?x^65$je){o?X)k|4wkH-eYF&8Rcm`*!idwfGC&aVMlzj_h^4621#XLFpfA z4tW7awYBa<=*q#&YFrA&`PBS%aZCK}$MV;Ei&BNuTbc3Zj z(EoT3icJ|W*mo#?C2~bOAidUJf0!h_u#kP_oaC60>E{C@GRCAhE^ z4~~`EX5jg>(^&5%asy=7<$&w&znyNax6BCg3B$z_J9p5tW@z!Q9)j*S`uoe&QkM2E zPwD==_&}wy-ksoa_DDvC>Xr!zW(vEC8y*(@0^0o>Ht=Y3vvlM3_Hi8iZnE3`bI*EhAd zXtHz@J*PBkR`JWkO>h|HE{CS0zOK!L+w^($CRrAqcVSB{E1)cV3N z3vO<|zftT17|8(0VC{ScT2@Cf4XUiBR@?YWh<_;fo4UFHCV2Q*Kk^ZlyKZi4D6dPW z-iCUn@I1*4*XU5xxLKa@bWnX=&O&ybxNkeQb!Y0I8{s*ZhhiMUfbJimY{{eip5{=Y zD&^?mh)Bt_idJ~+<>5I)KQSy#%bIcoD94{|Wy>nm+?A9Wlr_8&3dy|{*xO``RSdOh zhD7l-d@jp!Xrl0Nst_~&!9SO_O>dB^^i!5D_DN8u=T>ap&wB=78qz3Agw}BIy*t6Q zg%*t({NRMXPG&1@X&Nk^d10Roz;cTDa6mmIdr9-##VLSlBt|Xg*O9w|HWa`_KZH@z zZb3)TH`sj_T0%sz%mPsafSd?p(j`b{tO%B#+^QE5(6wa6_+npmHmO{GhQ}m0sQI)QEaoJ)qpdS| zqj0Cc6Tb9FwW^rro9>T&yJ`|l&|BZWc5OM_g|2GcV4ojf;9hA_94(MH>jPp&?Px9^ z<4r%?(z8STSHb-ja=5Np#HZfS0IJpEGEUkhJWveTYc^>iL>{xP!M@5^IYiyB_sMe% zuI;iX_b0|g5#-1|Z)lP}hKIfuUSiCT}}$v83srG$N(BedW-%W1>NHXq`}HkYg;jI%R`m&}wgJ82ip)lJaSfRmkqQ{2CQrZ-9q?7^4|Q_|l~x z;1?yk={=%7_vB1A%mg(02rt?noi!$HS3N~;jWF^+6W-#(+t_AhoQ?*|o+_I!FlK*n z0d}&G+qBcNFV>lADq5(h>ae}`5 z)?>}5HXOMRib8RV$JDUZQovINKOG?IoRdC<$P~l+L2LL!w+1f-sL` z8uJy`+{I0n>#d27_UK0>Oiaxe3cx=?ZdxXx8;ix8;Gr6vS$3D8{AVraMVDXP@*pp@ z`jhZ@nPUw6Zmy8QU^fwtd#v?NC#Q=`iu;~7#P8^|KK>sIx+&HOqw=r`YE+}2Wth^7 zyJ^;)+8{PKN`SBOryLRXH)XcmajrZEyjogvG>j&>Hb#_+)6{~Poa|&lONGG+b~CRn zSdk|(Yk1z*7vm;cGOC>C(A%#mz!-(O29Or596Z_R-@v#i>oM%)Mk$lB(`UwQuaC#$b$wu( zTlRrBDWcZbjv_h^HMN&QbR=^ci~d+boQpxYhwMh2%xWTeN1`E)e zanyayK6S_5JfwYp+>LGP+MISW(B`de!&PlIo^PF%r6e6qbj{$i{S^I)<#e@!7W;*W zejg;vX~yKF-1PU%biqLtJR!lxQvT_LWyPknT4eGe(_h3fW>-?8Ts={(>YpI%s73nW zhAb#m2Q>IJwEM7X9-wxmxW9T_6IdLs^#?$vr+yZW%zg9M^O7JQ9&`6k-w ztBe4!!sI6>CipO=qeM``KcI9HZ_L$Ej`B4onoI#J)A?)o$8Ey}-D5kJ4!0Mo>+gRH ziu-lsvg-jWJsSJdiiqCA;vJbVZkE6E>;GSmWvFqy3SZIxGN!1lokn7hVtu|M*k&K! zQ(3jwdJF(Qw!1lLfwJ=_XuF*EtfpRBWr72~grs>%;$H0;p-r{dwlxoMpRprM@Nj;E(9)zXfyF5E+{X>VvR>$~l`O;Kmqt*3y4s$vS&cKpx4fpSv0NC!Jx8IDi zFtBBk>taGDgBKgrzg0E(YimxvRCkV-I!v_>tYsXiq~?rH%SWXy88&12B3ZN1UDFCN znG3~jjO%+u#RPXfSazW)*u-nh%^is5!3D24BIVQUr)hDKA}T8Kl5IH5vX2i_Ylf%N zEpNL+2Jp9S4d$m*f~U9X>1hlbH%(f`FWHZ9Nr%1+Jm3!HI}A-T-lqWCREc>}BtQyAyg^WypB~%}W^z1!LK#9|iJ)4JUr%^+ z?g1^OtegaOZ8k#K%YA!9lr<+lbVt!7Fb^%2SZCWoonwwD?`uD&v|xgY8lqBmT5(j#^{hMFy@W6U0l)Z&*P;F^_CR-iNQ=?9GBqIP9KmFxEDHsWf zq>QX|BR4rsp9ivQE9amNOd;WrER}QnF>U$^jCC-#Oc_j6ngGPS|8#cp>OzyU#-LTm z+GFNk=XLn^q#z%1E5j(+R`i5=Zr1?(En+nlY-(v7yPcH7PmzoGrm=Bp zbx4L+Rt5=sL%@p9jtaLCO8_@@59-^4fsI=FFAd-JsAoO!KSfz#Q*+euvhOJDmq*7x zG5?ol{x2&<5${6}mkLzZP7@~gDk{cqhZaqX1^^-V1v#yi7?F@x zMTAG3eWc4=qlZ11y=85feBKv$gHYec} z6Lztd7Tm7zM4}>4I*!A;`BqsLP9vaVSkuWxtX+^iF;rQiO|ljtpYt-<$>l8=b##oE z=dgpk0R-VWCX}s7YI%~uQD>PJY?{p~DQ%

=B^)xu9M8_woJUrMyG+Q=l_qo?}IW2>dI+}C@2|aCtrY@IET-Xy< z-(1>Sm;pJMGKU?Dbb8;e!DY9nZOjae_@L#h_Mv^QROxA@g}Uve z8&W<>gTHjX)r?fNGw0{$^8-;`Hm4R}7(xE1k2L%&`sFhCPr=H3Q?V1-LQ9RqGSK=? zT^_gXw^06QGg%@DbmyEsK4L%j_jDxxmdPU(SCrb5+K{-#oe1o03<6+G#HBGa{}wVQ zXIC0Fd{VcA@2Fl3ZywH^TGkw|I+?DNZQG`Wfm1PtO-XpBGEj?mL2>p>PBA{|sD>06 zwrc}J<;bQ==i-ZXCq5nF!iI}TQMU63*!^!+pTyP~&Z)a#xoM=qsYP9lu+g~4Z(eZ= zvkIkv&m`~QzMfUUzZ9;ULro?B2}?iSubCH`YVhYIp4b_UW#K~n!9Q4>fR;fK|30u= zmaMHeW6QGk=tr@%&~_gt>kzFZ4J`54e4XOQjO_??Qlly4^W)4x`;GX5+U(uY)6^FT zIR3LRVk9xk;qC|50MSPg;NP7%TQjbow+<~Ge>)_- zNF&`CMDUul=kW`%_McCoL07qweWgK@hManb5klid<+le`vxh?KV9oVu{89z3Nh%Ai zDiZXDrI#-9L*+@*h0Q5`wuz@Qr-V{>yFdCQUpbfuYMEwGs8AhFphq9K{^-wmbnxJQ zN4<&9tl{J$iFfFIc?52~&7fdC`v!V6f`Q8}XV}QXDSZYxC0|Bsyxx6czU#UER$DGS z$P5h>OXklp9Mvr2%D~Z#9I*(`EQ*THDR|meK(V=(7dSsg3G~jh)z7nD!c`0Yy+9np zIydXJ_KY9=z<4@f4RDJvgZ|ACn^R(LWE5bqL~sE?Fpq>w)lXm-mDuwSwh1>5dcJxa z1m9WUw>fAT2h|7c;;a>dGLOLyShz+6-&M$1a$A~zb^fp-`SzIa9Qa#J2y>4cd41nH z%eLp`-+6Op>Lt?3G6$;O13>uP{CQOkc%u-3V!K=I#b*e=^pl$reR%i3)raT+0y`w& zg;LM)Fl-}!22qM@%pxV9JqR6kwEtx@j!O7Wv#KVdIh<6A1SV;(n@~`{x;9sbkqDT| z^+7hYfk{8MtkNgy-}Bj8htH|$zd6&CW5rrHYZ|O=zMTp1Cwk{hf2(fRiC9sMu&+E9 z*`eE%o{<#vp!~LRPKdkLG{IfdpbSok8oiDBNBn`VZpzI00?2*1GIy6I2$T4r%F}=L z%sF`1CE80*M2>`QV~Y^|18Q+7NQ$P4Wt=K2YGO3o=3BIvQNE(lRn& z1c`NPR$}Dz{CZYf#SvMkS|=Cg2;oB?e+m(EQd5Nb=5=Kgc!g*iXihCP`G#hgi@ELQ zx$7ALS2coe1yS+zj}_20eUqot&;l2YrIsQp7X3U8xa|BW`CI3T?T8CNIyt%XB171% znu+4YUgJ&FuPYs>p&=abmVAlR80h-B7YIzw{L0{q9SvU+F3Jv&MP-lzZ2AP{R%K2+ zW7fRjA@v1s(#ggK&QF3{BP%6zlU zrDbGp+JOsKDXI)lPm}iM{@|rbozzV>5H1ksbUE4gV&pn6-)GA9^cd9J3;;`x=4HM3 z<>smS40CYW+2cchap@R!Kb6?4`>cVrPos;C>s#!b(^YnF5l15Heqtc*Q@K&vhlS2+ zJ1v*pFheDU?McpG#n`WX%|zt75p0SI-MYWzV$SF_3KhHS&UEAyMP@hYba%+Nayqh~ z7`8C9;S7@Oe0d-{+(9v*8-HB+J?HH2@87sWfgOdlPm7#VKjjD62>C>`lN)!>oQhka+-Tcf+)k`4=u!PY<-O zD(4;d`eXLRDX(4J}B)lUJ>D3<9XNU!NM~hQx>Het~hJH zBT77gyj#(IpL2-Vp%=<;ZzMR@fid7)$S3rm{Hy7G*AVQZPHhY|@}`T6i*s!Mmby$m z)o_SzeyeIHf@ulQf5;8JW*65ZSM&hEfgw!v?tklz3JuW3q{80glvYrablOYFr;++X z-ONKWoy#J^BAO-E6Uh7m`#Fi_?=(c(>m`jlabM?K@#(@07OUgd75?0PJ%V{hc}>_` zI!^J9|2M4d>=UdoZC#j)~PW6WV>AF5|5cD4E`DI>YW z_-Z2x!{o#zfv=&es=AhpP15GW18EW_m4DpA)7t{~@L?1V-+3R}C_53^R;vuZBbB~; zE`k_L_+YTb?}>wf*%n#(ZucopvNN?W9^qi@opqMcEy%X&2^S(TJ(8~7)2r2N2yJEMs0RbOX z&P&_3HW6d8jG`JLBynVes~IsVAADS3UbDCNa+$odA)ir!1iBUPbxyf@`Wf*PG1hPeeed{+1dxZOdpj(elorh@t znWZRv6qQwm!33NG0rT#Nk?G#)PWA{rjqFY>A$zc#U@1fMb^FMqB>i-qM>0&yAwHe$ zEA(vA3eJ9yFx{MuUnTbQncvxa<~;~i7Q?zphf6Z{DvS6H{hm4wJMDiEsz9a?W%%oj z(8zc5$fA_7LJ~@PB=b|t4+8|?*l-dEv`E5#U%GD^?V2T}m&$m^*Pvj3%}QlI;$J^< zoFVXp+$j;Gl%)~@n41dk=qY4VHk|o$jPnn26sfMtQe^UBY2ltSgy+1}`j=j*mPNOu z-xovJM=WKQ-#G8aP58HAHZgtU6)H%rG0ZBmTi34AR$K@?s|Sqqbc2~PeNl-ZYe4J5 zhl`PKiIkk<0wInF*QQ1kiFuL*fE4vk`jV!!`I-2Z$Uv;>2rWi1O0 zukEA=xM*w4UB|!q@rv%pSM%O{WU>f0jK5D*GBzz`PZ72U6md5Ao=YH)?GhO6GqPZl9%+Y z)CL(@R_M2omOa!j-$gYI6c z;IZ;wTF}sbM3hfQlb_loZab-D|55|udmX)W%0^$QgYOH4O&d6sx#fDVumyeA;{23O z-fU8%lQVAPly6&$<>28wUz;mT7x>v){9b6>MFfg+Z+uLldKww1;%1buz&z`o%)gHG z*9LQDUAmLJ{8Ao(K|`QSIxW7Gsn^j|!O36V6H*AP{i|n2)Y1<<7vLf@mc(_ijtJNZ z_?xoIh@}>?gzEP9J8tR6tK(46oUON11M+)R2`%`m6>DQ?#!dbC!#{gHrvxYeCIEmi zV0M-h8Kl0$yVWOZZp6edAmBt_S?kZ~BYG?a8D-l;Zqtm`W6i|pMkXi74OpKJZS&sv z14G?@rBwA1of^avZvtT^(DbJVpX2J&S5BnB_ieHd(84e6^#GSuSJt=p_E4BqObT}l z)``?c+m2gCSNr4Q+;@+cnj5lxDkKNiqZvzZnQ8>ME=Ws{e=-#~!_6kau4)7B0`&5Ggsyz8>Z&TtZ^5QHtz5 zo`9|5gEzOE>Q_JIG-1u_K&l|m(_r76)>ScQuywhkJhmDPkufWmlpP^@I>c`zok=!{ z%QWkU_E!L=hb2{TqHJ#?+s`oev)942cZQ(Qx>}*w~WsV?+lsx<|Bt{q5T~(~)6b>@wgP%KZ1sT-97fsMA7f zs@dC)e$oS%kQBHK?1?g4yHCjoBWL zx9O?PL;w*Ao?9pNlaA(4omhU8`Tk#;y6w$!t7(698^1YIu!YUmdpB+fANy5S&CzVK zh@Tq>=C#PBi-i?EM*m6lPB{iEsyL!1-I1WRtORsTT%JF`=kT-=5|Uj zAaC#WWzoL+9yfe_bF2-yl8Kh1LV=-HD{yT7_`1~s*$Jn>6|#jGH%ZU{a{RsL_I_50gcLWBpbSH zjL-_c#rdhFv--usS3iFpl7v!Y$3afiK)bJs&{TR!&7xxc4S=xfP7h{)7}qoUYQhR% zLyp}JAKAPL1+epeF+=pz>SJE$YP#t4t41i|2NU^v?P^J5Cap9Uj8BqMs&ivs*PIwE z?=va-5Uga>`0|X6*6canwWi&3}=*& zlgUL_)Q+nj5iX9zkAD-*Y!Y*Yq&zGU54tA8}2VZga=9^nbSj@CDOJeH(b|d9l>xx4%@Q4A;tt?tL zw)wSobUYqkDHz)KS(%J7&o#bW(-QbABqbTHVRvzh_kKv18l5_2G8L#in!r@%N@ck5hO82VxhqIp zPgW6nVuz@d%>7N-pmHQY=L>KGtI}>+kPtk81?ZfH|BO2cwGKRDGMfm7zvH_EGk1I8 zA@^ME^vs<|?2wOPD+Ne>AuT4Tj4C|gt-TJ^f+4zDQVOf5N zm|1-h+jN=t4fEFDNKykof4NL_vW-NZ@Jr!cq)PSIv?a2VH5QJasdg>!(>QtGnQT8!tpJsV{d_uHAdF7dyjA&9Jn* zlYUN7lE(%inH5+G`(g66RMOY|43 zVQH?4DZ-;N78E(Yd}RT(W)7vp#fw~2eXoA*R_K^p(>hWl-k#K6D|Lq(&vZ$_r$_li zUfny=gNKw*T}u^SslT*bU34)_l&3H>HQ9g6JL@pXsvlQ`+9pz+orS%h$#P0H?Fq;l za&sx;@bYg^gwlhV_n+PGcS8u@xap$zX;&CwAydl9E@AY zrQe4CPos)Qe00M!@nphmpja$-E1s{J^{NH#Sg9*eb8awSqGRTNKwvz5Z>1`dcKLo_ z8zoa7<>pFu>44x*&d!OHX&L6;B7c)<+=U(^v z+`(Te)J46z`e%&)5+LYyJ)&77uwK?q4H5JVhvipBkmOU|f+?wphMpgG4#wTv?@f0{ ztI25I^J;s2JZHRLFv~;j%f<*h-!d?qY^TwYDZHdgt9W#NI;(-Obn1&+eh`RJNc?=; zEu)y(=LZ`hp-F0^JpGPvr}7;p1wugGbjsC0vlF%TSqACBvJh$o>W!=2bG{PRd=MZc z+jn2Ked z;@N91I^jU0SyIbU#}Y=tUUh=exne!=Hy>{tRam!J(t!xxW((pCo2J=sY*aiGI?nrB z2)u5*O?e0!$p)1wm_bPVzO=C5>pN3N>N6Ca*WNJ4mS%nKF+;FYtlg~WJcrp%g3NJf z#9hdH8G;C8DP@HWHA|j@`&Rwxv!G+H4b4TFI6+bHKQ5#Jcq})DvJ82fi z;85H*6zBH$6Sz!o9T+t({3Ps(`OFi$dS~nyn;eF|7O5@0#*Mk`B7d)YhheK68oG!Z z4r`j41*3C2a3W=)aW+d~4j62gN<8NLhz4<8`ZXZJ>|dW{M^%vK_(`wNoc`v=PTl@0b&FV{_T0~d8^kKTs;F`X&?;8Aa)Z`B_WP&? z2eZZ*O?oRq9y89;7pC$l+s(ul*~zQN?v;lt(yT@ws3^IbtpAGSce#uD5{MR~ym#Lz zrdxQxGR}S}6nWjbWYsa9$%Qw$ap(s;b}DSoDCu!5dcp=}AXd zxns!khOE?u6t(N61S{u@aw;kGsa9n7enJ&19)o?mkZ`-B5Ff1Cqu8?CEHSM7D*Q1Q zzn(X#%eGR|PETOmJ)Ee~m#@P2)vZEV)N;-VAhzEDtWQAxc)6#iX9uLR$26Ou#I_{- z{X8d>4#3!3QY~1qW#nyO)CadvuhWRk8GW0_H4ox&>FZ4YBQ1$pogZE+ispZ;z$8!d z*}jopApnS9x?G-+4H!kt-4D9XiS~YP>p+rp`nzZ*7K`FrQhrdCDpiyPKNCsOu&g+H z3JnfHF(S0>SdwNQkB6p6OVV9h>h%rjBrA$4LB9Yl;cuWEU6*}ODMMp!QcNBEnjR-! z6@CqJJzq^HT9;rB6Y@P&mkEv6^-KdZuA08T9G9w)G!&i2Ch%nq{$N)6?2J~VKK#I0MY30KT9V}^b&BY|-OdAMWWbBK(EW8v8j0tJ^TENR zr(U6Q3X6(`vVK#Fa%veOEWwjfqNW{D61m`#AD>+3D z8KluI0fay7nu2lLWP~F%*L=o^BnDKNo3bJT3Bv#1wgBea?|u>!=osVmAW-;e73vT` zDn|N=eD{d)t&RS@`eBDADME@9b=#{+A{NQUow(lZbMwlZ+~0~AAiUzLG>G7@N6WQX zMwqqjvb^(=%~(o;0KZc*T5c&6>e>}LIJj1Vt4=oOReIdcwpFA4qX;?e>mRp)!;EebVD}A4AWkBq zY-5@JBvS9^2CSOZiG;V8J$aS8|NH9u607_eV^rB|PT3|6-9I3l=I`#zmv^(5pMz&<**4Sorf zv;UY8RyD*{-C3;v>nvXMt!D*PletVoxZk<~RbQ78+(3}a(2rEE)5oq7+usa&Pn`(i z6gnvSXJJ%^gZ*5OR>jG)JWhv^&n30*Qi2;J(f~PMj-ONk zY&)UVy%Kk@{y?!jp%ap%*7?!hKppN}hmDAx6Mvm+E%bM7vs!%m4-Wdnn{Pjomcc>g z(97Lx6Fj}h-B2>>&hR3auMnmUw?3D$k^)31DkJgX9hQ7T5{gv?Osy037gf`&;V8y5 zS6;VP<&e;gkAcRuy=@9ytyNkYgoW#@keErR&b;G;pKDQAvmf0(S74ldkfuQOB}YJ? zB{s!nKP)#K_KB{^@B76L{1Ca|q>bx?0wcI@m~w5*~=y&c>v9Dk|PcOqPoSsBhB1kX=SU57n(hDnke)3{%|bYhX|GH$tLWPNg+gHH;*%i4Bont3-2R+pXJTn6W3M4&{kn=5#+5TfkGpp zunnpjR+2N8!dG(K1iv}LT%v4#&Lg&*o%c}; zm%ZEW_s-ok5=u#3^W=g9!I&(RP*zpZK2aW1D-$lpps3EoMkSOH&5^!yCwT$rb)qxW$nm%7{00F#FoY=7mYDo%OYXYlTmr49VFV zO}?T7niEV`b2Sp6IW4}ljdcF%{1-g2a32#*JF5J(6(#kM?%$nd;1gc;;)0vUcuUW; zV-pS*4~f3sCrl-8i(p96moQ)c6ijGwPm68A`?o3$ouhg9f6lEM{Ox}_C--->lu$d# zz8Bwsj3y5xI`C&{JnIshq@y;ZG-|f-thaReD=k4kKOy76S*#{jsPoyCryMsfbk0=FhWnO*g6&1ael80g8aT}_M^t$#%wH(@o0cNcYJ_;-Q&us<3eEUr>VUL_NN7b2kqU%y<=vzO{P?(2 zH4H0`iA_z^d?`t4@D_l7`H@;Nl*^uIeU!_9X9Ai|lXcP56X5!q=e3i8eB-IfcCz054 zZpI63!eXj%R)9@AA{8=jXT9v}&{C4)>(`%0P+a?s?3VXi$4UVv2{>W7rS_$dvNBpo zvsmYF{{CX@YuvOb7GWcIduh4u@`5#KcQiVY29!f!=L_ zKC1T#VCa#1d)#)JIH`zRrFzx48F1aUOAozLPM}BDb7h}~%)Qmdh+N&1e*sP5CMX#} z?s{R(IU6aFZ|ga-YfxAnV{NRWkvmjD21PRwJlNW!@qZ}LP9*Q2O)rY3zqvW3za!;j zMtsDCt^3?v*v{fSIxG(7+$uA%A>;$1usAX`ZWRwwe(0v_)x|Kg~BL;gXG`wy0qi!@4@G2t4@(U#%d z5av8*@v1&yVX|`CoU5xn!+;iwwn)Cvauv!h2T`W;T^C7Vy{oJj8#QyY`Jc=0$GSVviSSyexcnBR|Zb71{O#Mo3 zNh$#3J{&~mZK1us$<}uGxNHPC0NWL7C6^wRE)?{+H@`)vq>vchrv>X1RA6=qBl$a< zZ(kUqus?-FO@<9b8DtPXugFYqy!P)YeQ5mfyHd;F>`Oy4>n^gY=n&c~e%pGf_{e?K zt;@qCMwG;pc2-e5+LLy6l;ZUiCcOWy(%x}7c;kudnLvxJ3X@d%z zP67xcOKp{lSKe98%F=2>@27F!yYg~=XCd!cG+GbJ!+pfS42CJD3gkh(NP-#{;ny*5 zN=x>{mxN+F(s@apqLCTPjct_yKyp1JRRVPc3$iW>LOm0AGMTmk-gI}MK6h!f&Kdc8 zn|7U@?Yb>~%a9~+7dUNk#xdnIUT3$+={FSSLLMq^ve;x=I5%%jnyHYGq!$QCn(ne- zMV`|jk%N)U#UzhKgsd;*)ECR~t8^ zhCO$`N9KWI9$x3F08@%@i_UK*Wzn}io0W+Rj+R9#--6LXdSFjjl?iEzD#bn!{aL!2 z`SVi)=90S8`mJC>EtTJat-GZp)|V_O+nuKEYfZ^;h}`}*pYk;m_Sbp+su*8#fI*bO z!~8C-{!cUx43{KTUY$PxgBZ-fp1PXGB9?MEn&eYyrJ8OyP7xzfUY)4PytTD!b$;Ic zY~3v`2en&@ckw%A@w=@w$%iPwfEtgn91HT};i6PxEtB-`^o zNEaR=6Ofbay^1l-G?qxD+rla898{wg5xzz|y1=bIjJChGv$lF3_%a-mbBz97sS*aF z%rX*SJNBHR6s9>o6{Kxq76&QJ5=krk??B&5UngK`nF69tv^_x}YWB9=FY?Iq>B&<` z$m=|!0DDo3M34D%XBkiaEtIZ_XqS|5 z+UIAe!@67?oii7EKY`9+8B(kk(QW<5J=nwFn4bw(9%ko#rX5Uv_bU!+(^Z8;W5ifq%I!J1y%eD{{nGHHYS55)x|w& z#!mstGc;B8Bm2A5A1c?komdx8y!OwEGs`IHWAmNDs0PLJL0=!*^lBsOWRSq16WoYB zKsvB|@gv3j%X%XOCbMz<{+t3|@AovN_x4ZK-zdtOCVgWfZ?4~(?Vcy`9Gd=o8u#n& z_Vx5!^?iECm_SRo2p4J+u=tq2y2gE}5A32(t*`%qc$#vdH#{}D3IFu{gt8quPCe$- z?Rek*O_J86D_N?LuE}Ny+uNMV@XudKK&Nc8;!_(be;r z#`O%ZL}1Z@8tU0rn};XG`&~jpr{mj=S1xUeo89M%B%cEp<1GRx`wwhK(prSp=o5sl zAm`Vs0;NAgB-~)`JxWAoW?Wv30=4NO_#vsXshW2CrM8Dqy@}J8pV8yY+I{U~cC~%E z;;sZK?>@zlM0UiJH{Nom9KNfe)nZeoynlV$(wSV37*7#i#s`GC&$ZIT7;xDZ6W(DT zOQRyc73HF({e(1CCwN*LjfvX^o=ozR*JL5jkoMe~D_=!puCT>4-MIeS8-~;4WG4*F zZ*E`yBu(h&7D(l8uF%kTgQLsKf&*@%Q!x6XD6;(DOGEE3eJ%}P(LC*!$2o)uS%|2p#_MD$)U-`t=`5^@P|m=Muw%fw9#$P-{sB zKr=YIOQbp5xSy{L?SAs;rirGhKMOA7$15Ii1~m+Lkq`FVki%`*&T)~{2{WBP3BVm2 zwBrS*gl`L=%JgALVuVoxj$6%lUQZpKRIIx^(pR@-zS)vw9E@rjq0>Hvogm~q=hfpY zZ_vS%R38=WoJ`Jcp^k8gD>uC6;CF{Qu2L|q!bp-nKEIGb7WWMhH8N277H(mbdh z#1JB#-I?{y+{Z4GC;g;-{m8uq=mb%TKrOJ3$JPX`#_*vPCc;o7N5_k&&YQ)dl5GC} zlp$$2sJK$`4&met59#7D`D6u6{T+rC=eOMdopcHLC3>h=YK9OBug2jS^(0Au5n7I$ zeP(Ue&;Ez?Zvc4m(+kxpbHsWde;a&?tp42*ZCzUhGa?<+ZGz?qSU7br2g`pcB5JYFy(mO_`6>rd;;%8ad^OfBTt{Om@!B;meBRC7c9dX&=wH*rTI z1LWLfb&`HD(n{%Rwp0p-06P#1qwE1xHv9!6z z6gwbVphUr=0fQB$l-hQ6X{11>&j1F=XFvnhGdEqrGhHFdtfDxye2XJI9K)+12yI|;Y9a)7 z1q(7h!|Fc zzjlwLd zu-&JB8{#z)TfS(@*o&KWYyon0lb8~dwGs;%af;eBL zK}_r>I$bU#u110(#pTMz)m%nD%M3g@-Oi4$v^mZ4NkL1 z1Vh1$PhATL6S~->`B*Gw8g`#)}K|yK<6OWhze>JXx+A*PmDC8chl=xMF6#x;G96 zV}&C0vpDe}u=fNnm&Bq+N;m7T#EnD;@cz33&$N(gxdR5pf!u1w32yegu}{==7?m2x zI3i2`r&$jGaikUNK=KqUyz{gQP);Lt4LxPS=EI$+&hf=~#c8suT=rcYp?9w*w6mPy}s%)m+3^Lz^Gbi*x5J~2lcfW>fU*A@>k?%5tjdzj<*g}BWi~HNKfo5|W zaufAm-beiE`%@n#=4ZdV9{H3OnlI0t0M|!Sru~b8Tcn&@+*Fnn<)McpX$zjeHvY4x zNjRAIMYkY5+Q7Lvx3f8r-4hijsqbRQh7h5efvAsfx_f&L`QUImBH-;K=TR16fn;k1 zEeHoJQBgq4IZoW8zf?1n_A+7@gaXp^xLT^)@ zqk&Op36x!PmEqmZX*t4v*K7!T1=)eVB#XnZF98wHmWv+p$%rjoywFH9XI-d*jl{>l ziSiH|L@9Tv44KRyC`<+y6t7_F!ddcTj1QYW=p5{sF_)?c*39le4KlHT0%Ws<&Q}Pw zU@s@Y7XV#Jn*bQ`9~WW;nc?UsB@*svzJ{)lab{a{8=9n>KhxTiAkYpV<=VvS>+h>y z?j)*eN=S5c9}uC6w@_Np!s4XCQlmBJF{uo|8M%ws4lO?d)T*1=yeENpcwrtmb$-tSR($@9b_DvhNi{Dy?rf+EbXFQNd;2D1htf?)}a>}3`xms(_;klJ6w4$ z!*TJO3YCiJp-|mxBMD57>0zYu^V3arE{ZNRCMaMHVlx5xIOC)LHCC=9-TJ50DsT6HBWm>wm1az6?^m`1XRFQ)}?%_k23n z8PAl-Le`s4B%%ophsc*K217)!-91a2DRM8Ka3Ei}y0RIrr_F=ZhF2 zfxdKR*9vJTlVbgYKDQypO!cm>tn9aWr2`PAUbm(yQtakr-%mTDa$ufW@)z`hw$OU! z+kFBKO&+X8FbBS!zl#nCu16 zEA{PFGXsZAQJm*;zGwP$opjp0_R*MtYzVT>D3!TT{<{a6;ecqIKh;spYCl~PL zru)3-wCAShGeDs{+P-pLJ-HV6RN6!m6Y@i3Dki~&J%>hpB-R;_uS+5Zs`)ko=hl9E z0|(_S^ee1$fAK);@5$G)o3$+c0lNQ=qF9l1&oz@gpiO#m82Xah#klyh>lcJW*ncB! zo`~k#UXHN7R`KXeaTx`~Z#)v&ku z24tl=h7 zEJ`%ecRpUYuqC{2ij5y@K)dlxb>24x+(8Mx?WRxwE}lcw#|mzTx>MD!A*!0 zXcW)r&BH<;apYcF`YqZ8S@h}yoCP;#Y}K`T?FCb_&=NfPj?1+;eaNaJG6-9V;ypLe zTORxPr)}PDer;pzx_xab~ZKh*5DWkY9tocONAs^IWP8b_BG%8fxGhQJ(BR5Uhk!v z`~k|mX6}~TImC2+KWFR)CxFcZ++=P5q5o23%L2oWvBV6wX%tY+nG7cozl&*$Ffa*fZ@0hu(J zez#6kZZFjzDmR|4*au!AoxB0xGX-2`8>8w$G9={pas~S6;$M9{-{{*Z_WHv&(fgKg z(@xiH1E=l4`;DeG8ElSHkqy&gIA*x9b3u(!1fzCc(DMs=y~0Nh325Z?+DPOr%f5Z z$4w1uR9(jsqPEUfcSf(B)C`;65B;lBY=2w~!k{Ky46kSBXbcJ2%~!Dp8Xsle8*X~J zGXcK!$>=8J?P(SG4K9Ci#y6z(6eu^;#+-tdTI!W4XIn09BQihlWXhOBlUaWfOjvdl zhSd7?^NL=lzh|ZS^EbVCMD1N4GlB``s{{tE(1~u4UhEz>#9rBA?x?YZ&w6Y>a zGx#2eCLtY=v%V4U&&by1Z%K`w(9>6ZMNJu2a^F78HaWzOhQF5o)24zC-M_kOrjVbH zN+j(+59{$<5gHiaQTXGRS+*2Zjgw@|crN0VnXwc|K+|Ern``maw>q6(GE1jkAz>ac zD3r5>=zVnw)FdTIS}5atETw|7hE|dO3>+PfTLM{@_94KzNG4?8CN{VSS15-Sp!W2m z{#$Gq@;_a)_@i^9>_|tvtLGjZ7dn>sXyn0M+`)bPeOX8y&MJ2r#s~$+1VkgcwWp?! zxK*fYDfC%#i#n`T%2PQtxGSOkCjG?fuQY}^*wS6?(?K&AWBfZ+a9NU0p ztss@#KkqJfm#hx$qirSG1WTvIYLLfJO}J-lM3R$d(lK-%*!eDvd&$`C)frd!^C@4* zu$~jBYapH7Ru@(<9@|PGiK`!c%UkLa`o#eD)*k9ywBy>Ij_&bW!>OYVo!{-nkyr)V zW9#S}44^(KN($ZytVeL@j>3q+zGP0N7jRL&Ozt>8xLp7$SMz#Zf9O%Q1>F5JdjwWg zPXYG7;zRVyQ@Iq(-NIJ`TBd*|aT z1l3UUV{Y6Us){k8Cv~QX@(jt`I2mowVHO=?&rw!DvwkMtZm$_|IKs~dxTHLF1}CPr zDKAw^5qbBK&Gjs2}(XeW8E zS8$R~78zVi2t5!c;*JD--OeFge#TuY$c1xo`)P&*cy^{HIu}P2ZOWi(v!v$xlX%@y z_)CD1bXNrVec$D7;d%V8p6G1p>FI-%@DIpFPBsdtg>YaaBHKJu8x#N}1#og^L?+Wa z^N|Z%3{AFC%4Cc{`dIRiuOd=q7|P;jg|5b@(10EoVC24_YUa^X#&3aP5u0!cPq7(wUh|NqiS}5E zt#uAHeb2JB!e+9GHz%hi3+l;G=OK}D%fKV91LFcrr}%4!lm^Nz@51gx1%BxW2ck-smMM^q#?JSa>4txKGDejIs7 zGXT6Iz;m>}-l|L^`JD;@3`GOT<2}=58g2Q& zDxINU9cW!4C+UNBVdQbQhNAq4XwF>vO|It#-|U)*E9Y$}EcD#Wgq5{-qESi;Y3BL{ zDtz=rWd@Qj4~7eL3X5vru28}vdkpn1#mb>QB?tFemtJlbUH37Nahh`!d8K6YY^=M~ z>3|cn$cdiSpJ;J_Xp@G*j?>X5D>$*TeO6}5#bN2!8^~p&&a&l#uFe?wNcCSMOI2=+ zsV50Sh&fPfnpwhyChc6XhLD}CukK7&cx->|)|kaH$PDe}d?v1W^X?Sk2LI4&1gP*i zGiIHw8}Jf^jM10V86n3=&5S6wEP2JJJ67NMqk=D%N0+}wpEzAiGIPYviDL4+IDJ1= zBg_G=+yOTXp%D8mP9?!32hGR}{xI53c#yl*eoSIuBqp!}ysag+ zGF5Yt-B^V4H!uo{N*xcUUc+ewPTWCzNnmTu!Ff{-+LTUo7tQx?SQY_H*yA{g$!COM zl4j#73z_~AD?dBt@bd7#M_mrqX&yjyRwWZra!c1_-+#S#XAWn;l?|L^-rEAcTs6Hp zr!$Rk`d@HWZp(A#E@IRCY=JS-F70qczv=BeplY{!myfeUOU0A ziyG(=CR6p3z(M>qq4QlQ_qMv#7vsBcS2xy1acIP|D;SVfCx2yQ87JaY*!iyjhw(PX zsncP&04CelD+Sp7<2t5dbwKl@qzb}_Y}g1A0{VCDlxB~~!J(y_D;p7Nh{xMT#+Zx~ z?sVrj>EEmzCPtQTE+|}$9mlpEzicq|NoTiP09mrnIapJ|7fi=IpheMzWvjd1kFANyV z(>f-jB4EP9^z9rfY6&jtNR+;Yo!q*js1&T?{nkMpuS64z$_(5J6o_`3eyn$3O>lAb z39FCTRY=*8BC0F1^4<4!_>mMDnsaOS>MC87<@*o^Pn5AS;zJu2N8IJcp|G5EHmI}g z)04b^KGfFZ)jpyKRxcp>nb##mFLwLf?hizLVM_NR3CTpaRzrvi?FH2d2l)n+>F6v+ z?%kn!{*86oavOVx3WX@RHyqq`G4q4BX(F;2ie(MV;s6a?g_6LWjG~)myB+X{$*Ly4 zaXypjCG@edM(J|yBkaY-gH90;ar|=;KGw4Z>(#pS_m(jLpRtvcN~q7&So88-2LtoE zK-%95Z1>DRxF>*}3QCP_bYX-V7@_`OZRL?&V4uyck;rqLsFN6I05VVS)*0DR5=n3^ znzEFVRUbqv_+wB*%V@PIHasHyAr;f54$rXtR*}u>TTzRaH{I{#zG9{w7R6{#;+$Ql zVA?Ez;u947d3A+-e0&THGZC5-vLi`vyg9sE|Lk<(X()u)3f(;j|9H4n1Xc} z&y|#|d-2*86&qVwl9fi z5xL4#2wU46a+)m!|cEOf(9I^9HN%^?<~=H{y1k>`-Jv)hUBBz9zEK! zk8Mly7X=2H^m^RXne4gBlHoC0x#N7`a@E=H(Lu6N3`k9UH3cBzA0_T4pJtD1{qD*M zm_MFdcdQ^}s=oNA8Z{1q(mxo1i%{4P@IZ|Y9&K-Eq-Bj~ z4zhb<|8U>sZtnvL&L|im4PtF_-TnzznEQDinDnkgHj6~S(L&;Q&}`9dWVK`IbS&ws)HVVkDC`-bHCC$+g83Byj6eGy6rK5XVR_Fh=m&MPmZ<;A%P(dh2G zTeq#*`-m6T&-H8QA8icsL$pEiz1)$-3>jj*G*Qc-T4dyPlu4+}h3; z6N!p$iIdM0K>DwLZ9}|1b;jE?ui5?MIX$bEp|tpvkQ6Vh6>Fn>22!wVw4^&1XI zz%4?H5!u?q;s7Z`#e*(a7An zNsDb-THl}m8q(7&gu;I9HS96OXvh=F!-UAq!6w)|Mw%@0kfANbs`x2HBN0$xebBs(n`EF@Za4z=ZzP!K9xMOZt{3dGHR-Fqxn_e z;r^09ZsaizaI7EP6UitHRXX%~rkB`aO2cyLLv(oFjtI+M7R&mTU_St5^xDS#y}$V3 zpmWtO_9WfE1x3v(iN^~9Va#MyqbS+LyPKQV$5ZF_M=llm1USUZj;p3Eno$v9()2cX z?NzYXBzQw;7KgRmm2Ypn2N^Mz!$kH4Pt+E2?fU|>x3mTSYl_?v^NuTyGQN)XlcMNaC(tr z()JnyWpoP9{DjBD>`7gJAm9M-UNT`5p!^(CJxYBEa^X z()nWlK_#)LRhJzM)e|;_=%KLm?xkvVs7W(sw{ra70wayGAtceq4~Dgsssx08%)eM? z`o&kMb6tP%{SU#+(4}>-f-&m%;~bsF@JO*Rfv9|Y!dC@ov|}_$P8Rj;6P_wlB$|a> zEXhPNLNrlaIHXa7lG%ccMd*7JH?c&UuV9%18n2-hfGby#5P=DjgE#e9<2Sk1^AMVJ z*p@F45IE07N_5wSaW2f+oR|F8Qjv|2T=6r5K!`B{I;$6caOZ|;A{Q}^nd2cg1@5HJ z05khttEmn$rA}zD9w*aZJAyx^iF4)a6hWt*Khvf01P6Eq8g&Dc7o_c25l1^d;#Q}= zZ~Yzen!Ma;=XcJXnyz1s(fUiQWvxZJsC2m7m4G%PKccHBa0?5J>BV(OloJ#kC(@1!a~P zY=0sMsA$n7EhAsG_{whSB^dEDIaB902AQ3F_BM68+5aOOlZ{~Ko3(;bsD`2)K($r8 z-NY;D*>hoE8W8n56QK+G+QHxQY|7D1e5R0L!-3*LSR(rIp;1l(Lx?wy@C&;Zs3^c< zf+)B=A3t=<~F$e+3kU=^b}BN0uXjyaXMZZO*TRG&?a&6LHf zwRB4!#q5G@=Ha`nusz~eNP&ci1D#?mdUW2dUjFbwo;u z<{b|94O~mus?z^T^Of=(!jY4wKAcwkTdU$h&dyg@`4bd3q*HQq3;h{i9OmTZsIr;yPp(~e{(xpZeSn&p0w&Y z$(0+Hsw0TPzoXQ&m*(6jFjyK7;vlH zm_WF0A28x&xQ6^k<#SWacOsP?S+Mcz%AMez8)IEqU$G(xqK-#HHT)*0E+Rgwlt4$u zMwBj2PTy$7IwU#fe$>3Fe=X)mT(&0s`?N44xc|LKZ#1%f)izgb5$GLZc_lf0*R7vs z;{JU>``4w9*B&imZxS`d2R)xUR%l$VL{6RlaR^3}Y#%IgN8t%R znci^9HlHGaA8cYxGA6qf>(%M`aWglFfIKenkFURQ>pqBZ?RMI^B#%L0{OYXXUx9FN zue4Ppw`1(jK7^WUQ?xX!7lP1lv!tEpgJKErF^tRR7nvHqK>b28O+}Z+Ui6T2v_qT> zM~WHsCOGkVxce))&4UZ?dIhYc^&c+T?gsz1doTyeiIcj;gL;J8Gq9?o1CEp_D#>tU zN2*z?73mcUgvtv3XT_%s$+YSZY2Hi|=*$MDibw@uU|K8FCYlX-o_ebe9CPY%q-rvTMwcip6kf*_43*|X$|CQM37;^vA z`rQ^s>~uUXWpH78Htux)kwBf*+eMG$k5=~4F4!%jF>6!*&-&_iILtlHgNXrIv}}?# zZqo*)Z7ZGe8~}m&N+r(tq>>7^bxmSa5&*1JIV#6uy|c!W9hK2$JVQR)CvLU*3C%Z} z)r&p7&6oL3E^|!G-nyN^Y!1LEGa8lKHjyh4SY^&RfoHqvCuwm=x}f(fJJ`%EPbx7s zSTa3sA)_*nsgpjn<@f1S`rpfg?NzJ!F1lY=$M53mI@|8~jS~Rg@QPbg^Lcg$3{$4y z71?~kfb)qEOdY!4&;%zYw(hR-a3Q^O>@@bs%EBT_axY{^b*^$Xmf(3-`KQY8#G|IB zw$Y;V|2$V%_|LFI+`V&$pg3Uy*Me35A0sbKt4~NL4>TXESzAC**6|>$?hOPzTJ^4v zwCmxXXiyvx4=+3B-yhUYlHu%nWBgM^DEj2Z+0iGYgzyi1Cqn2R!YQXJOL&7x-w<5U zKK~%1Xt;9OON2=!xF_Q4tRxiuGK7venj;4iG6iaW=;gI67nM8l_oylwt^gop`#@5+ z?7qQt@u&4j!qn}g>hCU}0wDf^i{u=JwuPXnSe()ZB~9*VaiDBlmiNOE_j5fY4G7yYPUE{ZYu)2^&vGxO{N4APg=-F^KP@S|;an`5xt`(zdgA!AUxL*g9*d;q z_VLAftGOGB zH0bGFrR}Q-RXDuz7E?}QhWU%t^Wr;05B~ezEA4LF5?02?+%Xg~{E7rzfHCIM2n*k_g&pACm(uXFsXdaQT~@0EdGw25*nETCw0QBFcnb|er;%$sOfgK zsYVK{7)b#c_Qt>MDw?r%=UE0gt%0r|EKRa{TJc%>^iUyZ&xCFbB&6+9)E5!|?@EH@ ze~vl89GHI@hqUVX8*iO%&?{7oUtivE9X>3=gkEbIqUYSS!GcbqVk?97Wn3~{4&5wC z?Y&n`ZfALN18u(P0ew`J$S4qHGu(*v7aKSwqOAD{=XxCu$OQMr zR9nC5;(9g<=*~z_`Uw9iXgX>V-U_S&DpbezRpV`Y_dQ75-wy$%B#kM{_~Ln6;+DDh z`IN+;`_R2T1?Ck4L3JFv1Rg~uG222)3hF_^5uO#3n++qabxVkPNu=SQEQJbr6R$}0 zf0Z_xz;dG+%nU*DV(Jd)GuESM>gSWY$De0{dlI!oXh<*{YUe>9Mi2p$bW(TUokvJM z_aVe(3M5RG@SZb5t2!K)4K`0!8~yYvNu1cznJ1*J=+jr4!OLxT4-ubRo(!iBUYt(e zxGuUBFnFgn+-?wdD(dL zFPtCu2u}T;`JR6mf>}H1wJDtA6Kc6OdeEP}P!t|m)alS8bNv_#P?j(|bt>aZW0mbY zsI&P>@-$%^_Lz}JszLMjeRQ$Ki2A}wGrF8Z>akU%L!s^*OHZZ8`8wAy)`C+;uu$VG z7-f21LACY!zW*zx{wvs9LBaYA8Ac-;K!m%@b?wuoLir#O9>*Mff&B6NR|70J#tb>9G1310}f;z2ZwW)`H|zo%m|+fq4t z^7^<(T7r$ACW;mIp~emdx?hNA-d)@HeIK)PFK@m+gE+IBvIi*+4OHVZceTbOSq{*) zgDpAnFBS<|Vs?16cH+t>gJSa*eq?+SKD^%cYF|@ z@`)YWc#3P(@bT!>)dN27-YcC~>nrAl9O?IhdB>vHpJV|jQOK}d7#-R4r;SNgsI7PS z)YgmE0Q~>DS|mssK+j7z*jHC+#K;676lVmue3s=~dBjs+Ldmp){Ru+l2YY0!j~t1G zjDy*8vrLu;gRigKb6E11Mu|W%Orj=~KYD%*?W~b{3(;lvrM;%|{PnVDnry+7KUy}y-+P#!Qi5~|2H2P`#a~*?ibhp|IaaSk5mc-6rKRdxWWppnGB!& z{k!+!jjXV3Kf}1$R&9mqrdw~AT{@Fkdbh~A;QZ?s+R9ZfH@;{s1k=kOPBFnlLo%lt zW-6ZOwWu+1CIVsa3yKMe9P@JGBW+TiV?68nb0B?Xcfp!okW-q!8()Sby1o0vXkQ&o ze<*RneWmb&k}{Wc9u^(=AQ-m$2c>F|oG>n|%9#cQU;FAl=ZiT^e1;Z?PI<|pCk z?bh{UcnLw_9AD;%mvt<~l3QiLN!Ym6+U;#@AR27X?n^DYQsNu)SESJx6wA@{*2t<3 zW~0P_u$cYg{~_wDqT1Y|b#X1O#T|lck>Z}ai!44k-X)*F9Y*Lml!NqN{xUWfsOFIpT2~X$IP22u|BJ#w|Je2lEGC17V3Y-?0q4*9QPuBXj^jTZk5ifP zTULGA=B>JWa^g`+)J&jhB60Dq!PhsjJYq#NQO#+=fKW#PSGsF{0oWpL6XJ_%<&jH@ zZM930i=MD94SFetrV??JD=rXdlpg)DKkw!76{V0E@I8#1w4@bX8ivuExI zA>2uw^Y!D3Gc2Zo!F!o@6onuXWyR6ya4^c0Io|0Qez90h%XmhQwJ?HoSG zZ@L}dw6a-c@IHLEu21sUDQW)=%Mq7YY6F^9iGBH2r* zEuGARl8#CoXb`4>D|Uu=lis%6prfcHFvj+Vlek2?{P?yo{vZ86imEAKPyekz$FL{X z;CII9Cbshi`?vO09^$BbqVQTMTeGw5TFO(iiVyG`d#Q{TE~M4-pv|6MqPlT#ju${9 zUj=PKbpaiOWl!l{W4MJa8eQSL`uV_2d22~v58#y1h@pKJWJqKptJkw55YL5q4~?A9p@;#Y3Io_qP{ z-&t5swYTlEd`x0{n=nj;oXFd3h(E|ilsxJ_LhV>@d6|LkAdM+cg3++b&VtbUuYdgn zmL#p?{yV7g)PL_T&TltvghgX@e@wI0Js=ztOj+3kW=7Bb@lj0wI4#p#^;{vQ-4&b2 z;5zZSOW0lPXgVMb|I<|mQYJ5IP(pu!awi_zALu^TfIlW)pJFI4+W1*QePxv@ zniRh0k5M1Zw?y(mb9n?EkMokpG!<{L=C&0x15m@@Oc6LzSvj@pl+0fbaQ6gYp~E)T zb3L8|-T82!?vS;WxsZo%Cb31y>drS^hTZyhsXx8Q$4n+I;r)^M7i+%Ftq*&d>i6P( z`6Tu%-CEc47m?IuVR`lE!_C7!Wm{(#|4@}v^8DZo#aNXe4jdssD?;S*aQr#*`_^Pc+`V|WXD zXB1^vK02f=I;sI#yExv8>U@f8X(VNwHHs)S4Go4Xk@i<+BBp=56GfF!s$q_(-8+Q;4pIH8?+9i0qwg@O#RKZ&-qD%2q4snvXG~6Iwe+xR{wkCY#mh zGO8MP7gqg@!zl-Q-nsp{*=D#?k}Gfc`m|y|sM|`G1@xiikNHI^vIvD8epj0>nqn`- zan(oBo=!GkMeIMIXIsV<9+0af@!QXb6|Fmzewy-i^LTUL%bj1I5M4<&Axp-_sK4f{ z9K$~n299hdO!oCEHlY6mLl=bBKi8Cgp1MQKxlEJ;_inTQ`G`^IE*P(W`Nbk~p=ht9 zg$p`>lg(L+&+3^kC;N2AWwO2aNYf>?mtiU$+AEq`aV^M=cTqaT8O-xK43v*UrtQTM z!Sq3YOJ{OB&gAW9bv}7=r`zukPh$r1dv+(X5Srz>BThT%>#60m~n4KD7_^+8ghZK5QFxtv{ z{0r4=YD6(8Lc+zx!`u6Qck~kt@D!kTC0ETnVARg?&m+=8;dteK*eIU#r+=~FZ+R%< zm8xfFrezI4`I!3;K$cHE0gVnyN`*?+W*2`6cGRcdLq#;I6a24HffhR`^;^BQI2%7p z1fdubIw*2PVh6j!jv;T(vj*p~c&Ma<5(ePPP!gp{B;r?bURx3}7{!}xH`^nL21ocB zu1}uvj(PBY2u^|h8K3dpk-O7tk8c*^>h#xCJya0g6?kwGF9c4*1@Gs(h|G1ktB9=< zv9^2k{%*&XXfNTuDr?!5;71y3xx|81(D@vLHZXFk9|{l9QT`~?Dp+&lJIo-NoY!a4 z6FTkd={9ISPw9-|DQxLYu7*a)`rwp8VD#GBO)7=+6%1i25{j#$v4&PGZB9|x$u#Wa zDo1X4zf3HD`uWTB1*MaB2VM39eQ8WqNZ9bRjSI!?+~4Z?h4^EABT`>gL18`w1}imW zBc~1H!xMet7;_Qy1gPLoe6rYu2}vLJ_d&UJFleOe_EI%}u&*+G zu#cFUYI1PZfaSxI_-k($kV#Qm1ug@p)y5C$4URam9sPinu|oq(EPvWuEvOfbt5+xyvcs~8)0iej{lt=z*~Y%KTDkX0(8 z=tQXFj(9KL4r7Fd@VLzEyTO%yF1H3`1(U`#%*sy=lJ7&DR$ww2Os90Sc5X#TRj}hT zoXVZ^b!!KwF7vXP8dpiFIu1&{6E;#6R`JAmY~bq}caUcR9MU9@aak9x&?@B?F%iZ_aJU4G28wd7!q4 z#MP>f0YrV$wgKdBhsahJ5_lL@Nheh|djSvvHL?}0T0Nkq3;EJ^ zLiFLR(5e!61q7`!#d!}@@1cJuScDW0ehsQdXAeG7rVGS>c%~g1wIG<9OFSv~r+(UQ zgK0eGn&XG>-VA8;3FGzN@I zHo{N${kOh`egeM2Tq2@nD5qKwsy9yqTIvk9Dq&vl^r{jdW42~C&uhEF}DRgEsx?~a46fVd9$&ng8jX&B{wQCYpRr8IE zWd_tkpCV!h;30tF1-r%_bh|gHD(|H^5Y#_sn_$X~{KpnCj|4${;h1n!JM|I$%N8I~ zsiSOcgv@-ckD3uLW~KCIu{e?fp-lc>%(YE#1qu~3iQ5Wb`ZeKtgH-QA2T+M`OQ8u0 zd{JzL<*kl^_ND}7V$ZKwGhW0RW}m-C|5r7^UG5$S5to$lu!!*4KqYl>7|)WY#fY09 z|NI5=?`0fLtZF{7e`U8P(mHcV#tsU-=sD4o4#P`?OX{+HAw1BW z#Mz6~;FE~F>q2QHN_p$NJ^xm{Q~GH7eCQ&DipNBuBayYGrxp#Sw+%8FWhH()T%x_#wGxeo7U#e0 z{`4zlIx;~Sn@_?LAQJ-%HP=J`8LgBG9m)UmYY;5QJaH{`99CwAQPoniE|$4|v@dap zXW#ORKhQu0k40uvCqR0A9@ZO#``S^s?Oe=iKD!jI>Kiv;V|pL0!X4NJ7D<{P4iBE4 z~&7NYJxrCXldbR;J4nnzOMGxtqVY5JK)|0m5ny zxa%JOA!BQbV6;nuUukR@qI|sw+dF-G@Vhibt6^dsAG_a1_}Gx){-cFh3b{E{-xlk8 ze{}D1p})yEmOFZ9&{PjCbj*f^5jr4JszgT**p@=fjYwvIYQROCqRHq{K}EilQijXu zyVWsJbM`KTKhl*CYu5ejyEbKIq%%VW-(A>vf2ui+Iu7E9%K`vzeb#Y+Xt)a2g#4Q_ z)z^;haxJy0)BKZ9PF zHiF8pePa!wD;ixLc%h4O-GU`W%=&vPmiG~r@e4~ZJVULAF6_C-U&)=^eTlB?L~Vly z)){di&Tt0Gan>(?_N!@gn!0x6Tbub9QLY;d&8XdoHiri(mW7U3j@0QVt(u-q9Gsxb79-Pi-I%aFyn`n zH)~hgU3$O1y4^ou=~oRC;K}Evx)?E{cDRjL555$2O2oE^Z9-F5Hbg91VU+}|6^RvO zXu0bLKU$^z(%g7H9w*jZzss82kF9gmB1G)EXmp`K%)i)ZC}&q`xw(@QE7@j}ppFlX z=0c26PF4z(<&1hy^!v1(aE*D~FFXDIczn;P!(hd`QXrh`l2X7@Q*=b$xk#RTdQ%hPrhYbf8qQ?=P(X=2?2l9iuh3{)!Ti z=|66UW^f_lbcy*lw1jrqn~}08nWDssWN7NIJPjj|OD*yrnj%9#H~x^Kd9^Ye0iOck=l9K^vZe?euX7vk0(DP1Gf2W;_;ln>X>|~Un64k@e>3RZ?uxq7Nbjz=>3D3~E- zGlsYJ;Ivr+OIiXQbY*=^it(wtRw6q@0Ta7_Pzcn};~Y^^1U$HUOAz_l=sig zB2W-?m;{M(sS;5;`kLlTle1H@|0-${kID3tZtFyU`b8B7vbtv~8%N`uBnY8bm~x8) z18Wi0UCX{*CV`wl;h#B6DwFK6Q)lgSGkdk|?I+PrCP~szO5P1WboRwF!eqUwHxHo< zu8ArnMV_b2ip^d+HUY8&O>8m77d5Aww4&(BGRGgCq=vk1p#ro&j^~b?cICkzEhueq zxS_fW_9OxrqRhd`F-&t%THCe&(ycvvfqA&~yY98qs#QA=2#BUnH2C5Rpt%5UqaNdy zPav%-{MWZlJlnHGYcoO$;cM*rg7`eebPBZA6{!!?&XA;OYZAF8%KOAi)Z#G@lgweG zcJ4=G%R#O|1Eo@+3>y7mXTZ9pz|!2H)Y``8XBmFE6x*+$9ukBic_I1J$;$KAgruFO z7cVZQowo1ozshf)gtl*1-oj~j8zJoOQPP1)QA(<|t12J*c7^=u_D@c?eZQFC|2y4D z&AqNwok7SI^`QxIQ%K0KQ}>vzRg`x}$+$S!O+Zw4EZm!* zyfg+I1#B@9&n!H&cZWq5Ls54bbrKf6eEOTFC_<({L!|Sl&f^anST(EA^>IJaY-4MS z&$^v_=(2C2#?bHiu%v70Nf=HaSHc>5RUqn8d5ZFvlgq}4ksfC4E&tVf*b)kpmnl8q z5eNm$#`qp_|M7j)sCW>a8J1x>su~cKM2OYkj}ptuK-4VsLFo1WGIyCaem1q1Z3{4Hbm}uHn@N{-(^3~`(8;$dtSuco zAGIm}n$R1Gsb1;Q4-hby1_UoAaB&rL9+NW>{sMD^BVe9&adi%;-fkBQoW|LI{;HPt zrZrjqQCcjEEDV)9L4(9Fek-}TL>S!teV6~cklo(oMAYs0-rgltSg2*mmQ^P*9L(YZ z#h&1AuMYlXIXXC@Mj%kT*P(V}Dh{P}3BCWrzCbY%lbJ~h_&Euah~|x5ZIwxmguBPW zEPK55YghpzF}_?lI{SW@F^RoDm6B11`{) zK<1rULm2TWQNk`rf%(b6%H+UoB^kv z%H6?i^~9rwrWfnGsP`^$xDZPw;QQQl08p??Rg%mv_3&2EIVt*ftz6hGw;pjrm&c_ds( zQ0kzW!*>yP9EAIH#zFTh=@q;~4#ifF7tra7@8ffN)+-cZ!3+ged>uN#S}F)J3+REK z!>?ZZ1}T~e)1M$z13~^hEv|~;p00SwkZ;cvk(^W-J9TaN$eGq*yX@HbL}R}I zIL}NwEfUv=J%Y@p(9HqqBsJFW3q^!n-ko`=PIGcFPwkuizSV5pAy{w6>04hPwliGy zt1u+V`D>n(xIq4(PrL}c@k1*p%}nA2N}K?fiqm12ybhEjkMSOZ&D^S8EqNcUF?6QO3| zr7gYX!1(D|`{w=P)qguQJ>Xf87qhUa@C|e;)-*_pceDQTcG95`Yc~3W0-x4o;&xb0 z^K|*?F9=De9HSuVAxgxCoOxd!p8o+ZJ@MJv)Qjj$b-fo`8QfYYQu%eg6xsIC3APaTeQK&X1N{ ze#Bf=GE*#x_Cs390qz9XX(X0*BXUSXs*gFN`l?#HNG&IUh{SKmjiAcY+KO$jAF(8{ zjH;+#3|ZlFQq_+_n8zR|{WJQvBqif_G^90`?Z4R~{p4}ZdUd_sZZO}*Pci2BOXnqE z%CaM=01@c}oxtOXN-EqRkr5Fh-eLO2>>yc!veZ&Kdi0L8#cRG}^Gj3K9T=N0LES~% z-#K~2kJ8}^Q&J_Y`mpi-C!SD_A4RqR6}EZQ(f5sP+T zd(7NDUU6%GXSIZ~k0?Pc9|QBK2r%K~LkiE^>dt@WpFf$dJ1A!=`4uQ=C|5vbU~$O% zvlL5vjaYSYOwhPh%i(~VB@BEQiGB!QZ)`-(g66IlY;@IW7?> zjx>`;Z6?*{C$T|&ha&+ttk%;LQIIL%fjq`K+$vc<`n3r#U~k&m20h@;_75=Tn(pNKbwyEt?Fb~aU)_HSrk zeUAwLbN*sZ92(t4Ar5~1y8BYHP_o(BT-Z@NVON--p=z!Y5+>;9Lv8ultK^EaT1blEvG)jHBUEhy!fi)M{ zopNBmVTDOb_HpSQM&To);w#5@DV6G6eVQjmI#3nJaX z_odoU6uH9f=s#GPQ}IbkL` zatNno6(fDwP`P}WN<5j)wY#9WHS>-{87aH5-yULDL1|wv+V9A2wRubHv}sehX!Y!T zqyt)5$cN1F9!jXVB`KyfQIFaL{Z@t}YW@Q=zj3m{M(&Rnq1f-a=99exr7&lxco)w= zKOm0P^O<@!F~tLI_XCmdVtR;K?{Fm@0W4UpiWSHdhHPw-D2>=ue$|?rIj%hRr3Cdi zk!TnuRoN*Nb@X%X+=Jis8af_0OaNi#E881q zH0y$1$dah0k{KhY+tP{qiUlG-Egl!|39<6nJhBCcuE>aNOyE(w7&)8@pq-J6g$5R# zFz))Ke?GQ-W(sS#v_%a7A;Ltto#ivfn@sxnc@SoFhy+5OwWpxiUDyK)yVOamVFV&# zD-o2)spT(mv+qv631?qC+^B>{{1Y9lpRe=7j8((>sBu(7h7jKlOqywNmS0_k>?)hQ z_gCqe+phxNSSaru1AzMBRhkH3YIu;u7^~8|r3-v-u?4)V zS`x3VdR`|_BtlhPBsnMgvatS08!j{S!H+bghRm75o$&j(Ob-{;E3!g$~A(&x>418#=>@cBV zPnrg%pupsw+Hj{p!8BAIS{#%3DBWlaEr9o=@P%<{8Ii3o>qQ#P z`S~VpZ<;ZA=j6f@1_T>IT;-+_e|7m1Cl1bfmbkmzx46pu=lgcpjz+2-K}~~~hpsYa z3u!jrANg%@yoRjVH!o3OhMigtoeChS&Q3h3tQZq}z5Sh7|B&`uO?R>f6sM8oEJ_=w zx@D@^(F7U(>)Afg`jY!of6i%I z$U7u`PZ-BvdX|rEdDh>Ahc>AHO`UgeJ`&mxZ6Z88?|AdZi5%yFUVCowa^5#laV2Y| z4;IO-7p82tX4|-b4?9%^hmxiDBzXcuKbzkC>^xt0@L2J9T6@j@@VGHKKn@Ez`2E@z znTPYocF|uO4I5FtJwek8ze3YDG?7Zsf3j2T^(vGl_mIaQV)TJ+jxHyJHK~}--`~H) zH6061_N9_W`{=`qzBqWj&A5Y3VvbHhX|~#iq|6M%GylUN19Tm<8+&$jRlj22TzoDh zlBV|2CIH+0@=X9D8LM(wT;`TpzlHB`0c8GacH7gI$zTjI*?z~Jhi}0%$YDrinM}$U zmqdPfcNPN=$x5g!Ok4caJfQ&s@1jrv)Q2euH6SU;dlT&NvHjc0#@565Apc5}b+J)Rj7&`~Yj}@h^+XWAII-^+tOc4s*F2_g8r+Zks5MmAf z)bPwifjKfVO%e0uCb&dh#1vzr6oj~uVBUKn%MfYI^t_oD|BUGv#QWTqrO(`)AkUJx z&VV-k4*EW%*UlOK?={FOuBKsglni_Ix_S7>u}aO`>)YE48bdVLSd=#_t2K&xhoK_I zSlFaE4OW=W$~e+;kxpxz2W;^Szdb3!0*0~gUoY+tl@@N->^*d@w(yKjFKH%COmbI* z=o=qw)(v+w`*VK6KW(vr`4XO@NXQfL&dG03sSv4qP-l-2j+SU55U6hJT*7{~IU@~p z&RyyvvRiKz?m9Bb$D<|<*HbKoUp(_;r)7U!;u+|N#+T2~veLVj9?2au0ZkmRO~!|z z=3;D5(i;+Y)x)AvIpss6U9JS*4Q~H_>}Nk)?Ei_008_Wm&SGpbQ|T@2`^)QnjX~FA zM=VodMvNF1P!pM3-WoFoPB{ljS^MSBr)#y(sq8y!2VX~7l2FmNB#<@iC}FmgIGqtNBN`KOLqMomYTnuZesX8k ze*mq$N$4msQX?4nAhiXo?E_0>%Eh7e-eLU2jhn2Z}6xH9>NrXX=J z{u^|+2@C9)MKM%_#tHoirj7{njn?C+qwS;bipLruzI_#K+^jqr7$IR9yDY8&Ru%f% zP?N%{7SUR7DEOdq-oL4p>4=(+Onng6iM;?hj|bqMp|- zks-Do5i`kN`9)QZ+XTtNep~(9Dpgl=`pw_uef3|Nt@~{zQazDzn9+Dz-<=3p|AzB> z2rT$PFU2#kVe#X4$W$R25^j7<5~Q~#Kr1##5GgKc{$t^|c?0UPWK29ZvDC+KKfoRO z=VtC8LxaD%j2H9h5EUOEJUpBBC?b(8uD_oe{cMPmAW3z~1oQy;2!~KC?7Sr6@1;fo zhq{U*7i`wH9e-8&*d|`9Y7Anwz{gO^K#* zbH2e}e?<0{+U-fp1a1@?>XL9QWX4*=1QLLTY68jNREM9TI2eQ~0DXlsBTb)$>ErM} zB?xUZnuVShFTs80INEOKYjH^Z=6VUIQP`DwJ_mV+f9GKh=kkFC4o&qaB(o`Is7g-> z1Wy+M_jRw2R$~CRDYW*3>tK{f03?cHRu2feyWYos6lPT>{HJg9e%0Cx7*=6$Oo}D) z@;h9EZ8&YS-+%kzCLv{!h?bff{QLAk|A1nLi_D;Goo+`XyA#LAb5jB)#Cph{FM;h@e!i=2-DB|fqJ1t(p z0=`~{`IM{wh7b*UN?&&t=}I=yzx$zW^6TB9SH_wvSrk9UH@(R9fTZoxToPHKSj6P;QAA!JaqMVXH94Mp6FH~26`bL6BU~!6|XsM5+Wyx`U>3L zFNc=lrOcMNxE!!dAyh1R|1@%orXLKPP@s9SAibIiDClk#%~yz|)HsuGjFRG=yIC1z zSpFq1_Y|_m^D1x0e;a=Y`j0!xwc6 za@a!q*pivk?6-4hIn?ZTXVKOA`MX7_(<-shyI=*oBtQLKGG^<_Vg>MV-Z2yAvwG}j zO7b5slINtbsB!+@Cro)A!5|~bIDS+u8z~oPp8PyXz#0=;Kvx7WKcZ931OuhaCItJ$ zm?EObRf7|jZJUeNZ57wugih?1{SiqIS2W@*)u^e%5j^jWG!1oBz zGl|ROKa4j-&Xd2f`w;GdjG!*g=R_Nm=YKYPNtmuB6;EsixmY4w5E8CzGz;nG6wkaZ zSbQIEURSM%v|X-of71u$Md;0pYtRfBlnOSZB+%5)iMJf>hlip{zbOW__E8c6c8)4-*L)tuu?NmIVsg&wF`QnkAn&3t-$c*pK7`HxnruvXUEN!*d)U$l zoG4V%uP^e$N91+luziO%?sR8ZMv2a)s9K7V*f`>Z@oL+-rfKP} zfCqEq<}=6j0GyaH zoF5db)-f@ABu#)Oqz9u)B_QXIiZHKCc9+}LRZsF}7EloWl8{MK>1FV%=d*(9I!Df( zuzRildGQe~^it>U57MZ|qaB#Pd%CaE)#=R{;n&wGe64yr!lEyY81-T<5dts3={{*t zu;KY{t)g`AULyS62xa$mp3FT6ixlAULJdCJTUIZT=MX`e@cCp}Rx!|n_jGi({3X#Ni{la-i&I1{ zBQ&Ak;bP3+5kZFpsWxlvt(^Xpm0Yw?Rj#ghzISY%^R-?tn>J&n=-i74TM8Mt_w6(j ze}ACRxt%~I$==+k^Q`q zwJ9@B-PGp240qv6=mDEENwU__?{2)$&J-GMmS(ope0{ZRElfZWjVr=)3~vfP1r7UR zOsm1lAE{ynFrWv|ZF_tB)f(ShmS06;3qEf%BA@V05^AuAT(iM^;^qDjs8Hnu2&|nk z3En-CFzyvOM*M*0;rQBfAHz7`f15UjX_53D1a|UUsBc!#le}wwoqywnm!Fb&LG-O* z+lHsCpBC9vla9K`>!GIyWF|pcoU2RaMZTYYP7&gQMqTUumekjlj*gkI?fB=NKSSuB z&(%+3N{r99LpuCsW9*-X>||}AV)u7D42PBe7;O`#ylSQz6%!*owP!c9wBfQwOt+|X ze8xE!0WqlO@dMu9hFa11Ay_f$kqk&GEhpqDPsV_)fg>g;d2;;YJty2K0Fc~c^cO-l z0k8|nJ!nvg-#|zUltC<~&s!$>YNz&45vh>NIsTPiHut7>I_p;#>i4>y{8(=?IH4kP zY;0!vzSD1dwZ}q`(P+Pe!Ow4*>o098b+Alo886FSrHhen%(DkJp;WV$FE24))hvsp zrbQ8key5_L(=<&e6k&u>f1y^WqvoDv(GUl;TToYIYS-B^JB2AG1RgqLT546fN-py{ z>ej~nVjXlS2_G7#^lB_8m7Sv`u*c9nUQhT!!~aU%fi(9XBOQoh1{3MiXG2vHZx+Q{ ztVW5|fszw=sKz;t)4ArN`hs_}DjXQMAA(>31ElAcW!8K3%?b1yvCTDeY77dR z0!UGD5zHZai+y<9F11=amPJP(n@O3#fSD=WJm%phB0SC?Y%RmH6>BYy{)tl2*Q`4~ zDr8u(Wpt)u>7}(i&u#lW+lvqLtgcx47yk=YKRFWY5)y86J!dKyR!{MSs9}9AiZ{&L zJ^0~S#o*4t&*fu#8~Mf7j-%A|nhEiY)0*K{&+y-eeh}O+z1x1T*CImwwG{>r+4N)k z5OSqnZv9^~kl{x1xJEbJsj1L*u_RGb6*l1eD~rRu?%dc9?!$kpdxLtBJQ1MciJ1=6 zJ}#+T(t?l*&wPV@lrM(qF^XaNLpuTjir~7hqe7A_?E$4+y(SNF26g? zV#iz~FBP(=6Uz>Ts$03R?}_pObS87^9-{rqBHFI|Len(Lpm;y!n6yt*YKgEL(sHOX z@_hvini;`pJ@FX;aYEVE8-Yi|5;c_HCr_36rM#C$`U>O1rs3%CaLVl|tT+-W(5K%` zl|k~Pei65L<%}MEGvT1qv&|U6-!^2^tMn9bmLtkz7Wn1$@G#DrC=&Jh8t;#-J0m;~ z9UYZx8kfJD@XJ|Dn&<84$?9cw;rU_r;Apqp!}dN5Xz`%d9*Q!NZPg)7G72 zx7f)Joqlw5eQv7lB9*(n<(1`(#?iA@4f#PKibd{7U+>Rz`5;&nIK;M6TkEwDIYXS; zXDOy44yYYGa4aSH0yu8`cSyW=UB4mW_xA$!tta<&nJgx3Twu9Fs{O*Ldv{Xo<#}1R z{f?`z2&rQR#-|*k6y#BOLk2r)N;S$=`5-(dWpzZ=?bTjEhQvMuN7k%SEtPm|WVKA_ z%o_m2jiamZY8Fv$FY)OhNDzsA2rfz%d9WxW6*)?|I*Ix2kh9Et>GBX_=ws=#_es&H zp3i?ym%>^^tSOI5YoH;|T5XHtja+O&XEO3Okwk~*(6fVUhhwy%uR}Ti^15SY(DrfM z-S6=9Hgloo^@j3QghmdMaqycAm+_nVM|j5Mb{SK0BuT67wx+ z;7X~$QBi&gS%9caPAAM9;Y6AC?)6WVSeBo9Lh$NeId^E|bkp6{o<}**nH`PcXtt0o z|D-`Yji(CY%Zul}&YPeA<==ns#RQI7bo_D)s*#M~dQ#$~%Y$Zu?<8+ ziR<%YwHc8?8G_4#A|#8L!km;Xi{d7s>` zos$PK$&;Vw2)o3a(|G0hbmi=ue2-3(nQGU7hb-5$SdNO>t(l&=NJ_-u4J1T_fqh@3 z5#=(g7f#vN8)L060Xo?|6FE|Y-AeU*d-4ZUKKUT9-bVlYc)9K$8_5ioAFr#X46VCg zu%gw52mbQ9leEpsD9x0$=0r$}G}D@$a7gn#!IlRNfDuOR8j_6y_sTMMo0OgRc_~QS zRJ*~y{c+=_4#tVpRS6;-c2z15+h-L!uwm7pCu{@l?eqZmdpSeD3D@p!)D*X+LAa(7 ziP2X=OId>YNbEtU0_Ye-W}H|Z=df6N&>9Yyo=0a!4O0Yn;53h_CY~`~BLYk9G%#Sr zgpaOk{i;q|kbze>zE-ExV60iA?Xq;WKh(5Mg9n6K>>g9{PHB2HgsQS3sByK&O6U5C z$J5L1tBIeFcS}CuaiJKdE%~p@z(tORl&-JG_))?S1Rq!VFvcKs1bO~x;^0~@cGELv zzAh=NNEPoObi2~Hj6-AVF}vo(R=UmJ+5UQ;T3>^3T}m!wE|EC+wy%Askm$bVi04*5 z)f%q47M=aEQoa4j9wZd1T@>cLAFrOIzkicfMa6)?j)TV7cuyf(LVAA~6_%Ij)70TW z#&ad=@g%8&aINP>o4=*MCD4ktMQ{ENcr8UiK_nCXB^?_1=b}Ahdvw}bE_gE-7CVxe z&L!w^3{_K%>EJmxdL26)sa!*xTNo9r3e(O+`U7|QQzfP1hZ}SzGb&ZN(7)aNZT4C) ziai5*(eH?U97o8xl;f&6w|=MKt6{H^&-q8vKQ2&)hmBy16T_#p$!>;`WDV~-G#jjY z#maXM&r=e;wu}tPl&l*Ne|4L7G=^kn>YvKkpexy9K3rx%V<4!W`9F#cb z{cjPOYKx9P?F{ZWD89v$FNXs~7n9~`B}fw173&q3%jYr-;c0#I)8t|M!0uWtF)_lS zbd4XTM?g4uwaL(g)~^@b&xhmBj2#B9|4zpnL%e9(e#JE&Vgvl9W0$jF5Qz14OzJhp z@bH?MFNjf*oHEeEJC+0$ zM7^oHnC6m>_qYTA#V7j=M{VM=g`fUv*~WvDE@;IaWS(r%A~dWa^e7W zfXoOd?!P;IZ+F7>p!K&+x#z=u4=>`uXcv1GX7DRk4W>sJKh#16ja?pymXM&)H)hzq zpl%tidGIxNnit}$xxuWkyFSZ5Y~WG#r4_%Na3LyHxXe3zPORQv`~f@U27Ye#O@NPr zeZeEc1^m`POewLevZs(TsY4a|b);Y|7FMoG#1ECKUqU06;zhHRGyj&E0H7UiHwlvv z_tJSC@B8NY^}tgXKAL(G_YbXHe=v&C(K3chz@8ugX!q2)?$G>mblm*fnpoN6)wsDF zNk01v;N`ieVrQ5l{s{yE^C@7zAhR=6pc3QP-$~p3sP}DfT}-0nY+VuNE1v(Xp1fvA zHx=YHsF{hIvLQ@(#3LSA0o=5mg3&zHL!Au(Ib ziOq*`{V0!s(J(kg__VXgAed1WM~XJ{3}ZS}0=y@g1s==>m1BL zj6O@Gs@nsRVU#4vHxFADx6v)$_KgyArE&bsk9>zJ-PUk0q?2xhzl*mh@CO!u-E-6g zVCb)!kX56_19A5-(W;^o@JklFzdXujl;-BMF@}hq-gXkIT1Uul=jXyt=%rr1j${1J z;AKElkpu9758H%|jrerjQE*-GdgKv{gpSiu^M z&~or(@GI-<;LoQKxM4XCM@I^lE+Y_cU%yu~LJIEFJ9F#$uy^jnchMY ziTKKhNK&Kb*-ND#odN^=dFK;3ucx>NghjEO1@t?l6aXRsN+@8zEKT#VW`c9~lhYeW z$`U~9%hBs}Cx_P-NZJa190+kxjL2VdlKu~qDN@$;8|kUZyp zZo=7}QY8kShTGGWAHfM=YL+|_Gx8R>O-;?#xI6cPfE>UY%y)Vxz)%0HM0G9;3h+3YBxi%Xr44XZ#4>1e-4@fo}r9=MJ6mWUEjQj}f zf_Z2VluO&2y1`O6Wt?O6i76i(=cn7arrDH;6Evqb235$sxoMmI`hDhT^>KA}g8~d7 zqOi-7_$iL^&Wtnw^3{Y3p|-4VZ5JfiPirYNnOY)T<{4Z=t6i(;$NCwWj@V`ipV=%n zSq2hcV^fnV5|ji|okXoDFP8v|07k(fMM}&^3@}CVkNwP&3a#wLyx3#{<$-o?FhB{) zwBXgs1Nxq>#pX?rxaMKT^Zz62E!f(OmbGEri&I>SLve@V?xc7rP^?ICFAl}s3dJcJ z+@(lxT3mv=yE{qX%iiZb=XQ5Dk|MLAAy!XI_vP$5xf;Vp0&Ya_(R75B6N<`{C3q@c(Yxs0 zAtyqe#9;oa3PCRzfXceXAcCd!G9O&{irrw5QS%+7tq~T3&eJyV7;!3ksoLLMIFE`M1)5 z!uC4aKfN8PMGwLbOl+K*9Ny-?7#aZSw+`=Cf2J3!(&M<$e9EZ8O$;Ibt9o^C76~-mHq#K-wT~mu8LAG?K1WecaHe zL7GP*SA1n%C?*{nJELbYonV63jdW64)z`_@Dh^emI1RoEb(DfMkme^_6lFNQqwK9g z754{U{;hYxmrwfqsO76uIB^zad?1J9U}Z^ZeTN}l=-v@5Q^iX z*@;k6kggNyI8;K8J7}z7e(N2OZ?=0BA8I68ymWXkjsCSg{{9IkpeoXft3Vs}sgFc* z%M$E8d$bf~*M@d2F?Ipv6mL$L`Km~pb%#Id;e_%3j#O<=0A3^1ap6rI1v@(b?K7_| z8M9C+7Kp*moI5IJyc69ft7oCfh!ct{GiKsrQ~9#GLUd&sHTI?SJ_jfg!rRYJ=P_6m z8Z&tPZIzTML|^7irmN@cubU9>Yd*Ho6hHG>-VkO-ZTPHQ-he{|vq0}ZG@$yauhG&h zyy=I^dHQ)z9lGpvg>`{WW@u|8XYtdDJ8xW$c3N|pnScewYrz@BngMce40i&*VWpW~ zY|p;_xcRFfSiwzxu4~9e1s1?QxTu_LXC8h5{xGUWau6; zN6RZ&b7WN8y>gZ^B%DA9$H|48+%#OnA8GlF&6#!f%hms;qgRJc7M+*C*km2)YI zkd&Jse56PW`WgK`-Q^uF@(r*a2;X|P>RT!JO1cM1xV~YLCa1CttFl6xt{}Io<43Oa=;g^iy^PK54GyTvbTPqK z9nY|jYtz8^S6s@V^Y2J<&;S0=@80CU?$_z42V&3E?_0qD?*3;t(A*%G&|Ztj8g^!T zU{Y=Cr!JUFPxULqCx^MA*RF>JKQ(DKKoS-^0EOmbFo^^eGn-Lfy*0aa4|pU=1xvHG zf8peme&L=58Gl)u%b=rdgU6o@*v*=GAn#>H; zPTm{!#YhhK%I}witrppiJ?ZMB$R8ftQ^Vcoc6@b+XBmQzYMP10cAg@o(@EpW%GgS> zmPuUmI;#Cj!>ELT^%4FRpoPV03UC4zeR>wy^4BHfejvd;8EPpyXu@)6nFr?>zN&M? zSUiM)U~8B*NH-%R!#obrCjZ7SXzcVY#JHa?#E_m}@W%P+)b~3QzqT*tbTlXXeMo__hCPz%QLzA`j7AK5?VpqQbv1YRs4+&);V6G3MaV z%JBc*^Jm*x!~9Or=liUNXX!-@a>>uXTi8tg|1RiuvW9>egk`Mu8X5+tqd%~D`jwh8 zQ8ycg=PoAr12InIo+TB4g7_qpA8MuY6)5X!lAC!zd2vQ&+J=LpgRw!I@Ip`d#px1;@)T1eH%~H)HQtjzV z8LrZ#HHsyLiOS-4Ub7r{*3Ln4L%q@FAlcr?j;lpdW^!$}2dRq%bF|^^khAaB{{kVs zSsjReuK~g`?^Op!4V?d6I4j*PueEasZ_+5OilFDdi;=v<)tJ)8d_<6qW5(ia0c31Q zfzjy*E$+Wb+)794Q(L7=&U~#{leemJGAyU&F>?k1zWjE$Cz$B#DgR@m?iN?Vl`e@y zgG4#i5i1_K@!Iox9_T3Sawy(*B0$w=N`9cL35AQ9$9=0;EG=&b?FM3d+p(o4xnW{j z=D^XS5(s$00zt-$i};!D_Xh7w=kF2X+JW#>=Z^AcMJDPo)o;-RZxPuQA%uSrEt}O| zC=nLy?87t3R#f)r1;K5xbR*AVVbc=Xi{fZr9$M2sZ*w#cV^Lq<~}sSrdXtrVA5*e_M}AC3TYLJubt9-{GH z{2kjr-EB>_U)1N%@Gp8niF~l&q5c^7m|(D$`#M=Frn}@%-T)cZ&qL5w5#4t><`oX8 z79Z^MXyDQ6EyJ^}LGJGEc5%$s7mlnrk$FP}6unnl!t2<4F*}UIuReNW4=T|#JTO+3S(FEyR+h!d0Z+wZCbrJ{7A9unG z!MX=I*jUNKVH+D#t267GZwkmU;2L^(=dl_gRL}r&o@rZ5+-Z|L5Tc)F3hf(cUHIAW z(?9QTR-?qn6At{@gwMAFuS}1bGF>)@_u6f`WJ|sBLh~ISga?$pi@&Bo| z?>li?R!m|0tso{@L?jtUcvx0?^ZhbJHKT0S^&p_Ax>^3`{q2-k)M&TSeQr$F$E$zr zc!^KOJ$bdIdS>^qsiVn!7L&`{@C31O__0p!gN1aJX zz6yCaK~hK@+jTIaQ;EQ4p2je@qS=6{Rua4acI|X(Yd*m%NP>oYqbun3crxgrV^vG7 ztGQJ*t$3h&CQjZ2od;i;ouwNqst<1~Oe!I;K0z9Y$r!Fc!DVxP^t&fUbz2fBpdi|_ zO1jA)UGbNH!8mN{5@pY?(hmLaJQ`;r>y;Oh@*az{eG|7dFlVQ~SU%h+oxL*8+&3pr zGB*Ja_aXG%Ie$-Z)I5cmC&dOuPxj6ft^OeD~R-Hi)QulefmrxjF zZQUQVw3QvEx9~z`LdT{`@R6e47!9^yEQ>0ljU^iEVLua!%bdgx|HV~2U$_`_-iTT# z`0hr##K0&N9$LC-5UY%v{oN2b_43|e$6MLq$dEPiJS(ddl@+ZVg8GHa2TLF~(PKCe7c9;FG}w?kgZ6das4cS-+%7GIoaqFjZRY zzuBqhIh_H};wPtlt9x+}J$R5T1AY+KAyAKQ%iX z44l8Vq`m2!*%nE#{AKxhA^ci73-bQ&QN4NX%@~~pXcJ*v%ul4`?X=xNj_bsIzsNVO z;o0;1^ZOawt_SR`iPua@5Y{uj#qHdlbw}PmXUN#u{yuXmC2BY(Np?B6qPXcJJzOZA zCcPl@6W8?%P_|i`$G4_xkuILhl5^W%g+2a*Bk-%vpN20TpHVz45mk9!nLAxkWJ4Yz z)%8;v+VLr0OLg`K-Ir%hT-$d2EW&^P@m@jJZId16yl;;atU40mq-14s|72dNJ`aN&zz z&6Qo#pRw2$wmwIz9h^2lq;qKw6Fn(X0<7*=2k$wJSWfB)GY z-5O!D3eMaON@u{vQ_uVk&!>Q*(l($j<@_Ux4uGlf&t2=@K$s{Lat4sw;8Dl0E1IDq z2m$~PNwq{8W#SEG=RxH&S=Kf^i0f3VY<6yZe0-~{yDZQcj(5tH>6qx)>T4Sk#7(uG zt4=zSe(b9PSR61>f{R7u5P;KWv@A)wdFjUKUcQQ+Z$poGR@`w;!Ni)7Gy0GuiU}Pr z$51s5c)AXRLe(@Fja1p{()F8`}3CMmU-e^;V5!X}gNw3|0P5YR|1)=G_5WXDBxfP7_ z*;48os-0zKS0PZ2xfAOZU=Hp@VYp9!v8B_7SQ9EA$6a^hR!r2k6b5d-S{@^IOC+C)h_K? zjk3BvzSVN{%wufV$y4=iOvvSpinzfl78HvonjW|I-6ld+-q4plWw)+Y$1V*<69cs!X z;=mGjx_dn()|NLw_;k8TLj&Az7bHT3CPi}}*z*mxh@mRuGszaje79hbhbeD4;>GwhK8%B!*kCM2+$XDYS#>dD0C5R0ZFrNC%s{jT&Pf9Ot zvHL7VIvVw1XYj;D;u;o^IyA7JurmP(clA$7(KV+IpZnk@di!~OrA(YGKmc8>`A#qx zviI&2t?(WRJe%8o>7YO$G=c5_uK6WU9`X2J+;T5b?Pe%e6ap3;hF4f8%BUrR+A8;j z$o{QsfgG>mzt6kU035pTU;d))=7lPJ$e(4oM3A&h&ZD&^Qg;S;RmT-DbT#zLuy_Ii z8+*;a8;yQL+7+#KUqX9PE~iponOD8>x*RkMvjhFgpO3)|+V*VRe?zqK5DheZe<@ro z-qioA`pT`I3inOrtR?olc(!$k2k=BZ+X3M+_$&-1#)P+!-;H>4zS8CkT68g00`5o} zssN!1Wj3nlD*>R&R@&!KM#+M&*VH{&`^koX>l2>AEeZY+b8+mQxJEQ++64fFGeWrs zg*u`LwTQM{Q-yI91UJ5#a&_w;%q|B>tx^EwYIl0Sy1PUH zo%Ei3ah&*D+EINr=KFtM(Bq#iH4B6$&24|)b~Fe5sUHe9A; zrPzSO-!SC2xXqqcoT2-dLu8NLCwG)X1fc>%qx~(eoGo3 z$~79H>X2Adnrkw@g>fhYT6H{_ao)hq_XLd1)2)=qCh%1jfpAdv6YefEiCcmXUrWqp zk))5Uin73)Nx>h*PYyu&6!vH@q?mU(!=yl!FEOL=MHs-U5HFM>5$`|C@PQZZWXx-kfpKLoq-8KYy9o4D$lzW{~RuBWqttqIFuze0*X=Fa0-V3qL#8zx0%}=_bv^Vn)4t zpDW-0D&Yq5N^~a7sBzKxapW$jQnL2fX)_KlpIt=GVK8xr#uV`E)i0JkpS)RZ70-_) z%kj9)cw*&KMZihcwhukQ+??@YPiPlXweB1wR{|BHzo@0LliXFxwbY_x19sC4 z@2g`$wT4~qkuS)S!q~&P?rqTt-J3U4^P|mHa8Q&?$_J!LPady@e~V|M$0y%}XAlM|x>CO@nW2B^cUd;MN!< z<-&L6_~-(KGdcwFh?dg5eAw4P?c_8|8~ncvCwfTLiNHU!fAum-m2sY@4rrJ#_N{un zIDH5}ldegvv7rO1>ewSGDHL+X!63Y^6e|y5Gh(?YtvC6{Gu(8wMSaqS!-w)&bpA-k zyIRlHpu|P1CVM7zMp3%^^iwwr1P)c#mo`eOb*H!$2%;aBDgeuI*DB1Ijn#3+x<8M8 zW2IPFqFG_a&=~PBr5uOn?I9F)As$~_^$wNIbLXOie#&y>hKSMj*6blzn?4uRY~iiP z^LtlPlvX*VRkiE76?Bua;ehO{fnFOQ;$VG?YTtUr(u+a@L`p(J)f?w4Tpe}GdO*$f zmrzYRMroZwWG%7tl`q=PE2LEVFgoi-0`y-4M#Bvq6qc3R{D?dZBJAwi8lJg0Eulw* z9!q9C$R&MZcrQ1yx+#F~mu4?u&;7cU%nnBo+}wes7VD){MfmF3@P^e7i~)*c`Wdx)nR`R2>k$s=nD#F4Q~@YS*XU;%9^%B_ zQ2YHC7kMa0U+8~S)&G53EWmh^DevGVxG;jgAWiPaDSsMqtqcOPYN48ri?5aDxu*R` zgdTfR_4ioy*Ii{&oFp>l_FO22Zd*$nJVFZdrG`D93Fy+fG1;tp;z_5k{5VqU-(c8)*A3-GOYzZvevA`MDh2b~ zq!cg@&b^PAg?0)O9bxwRqekzKbZzeHuMIUHEGe0jbSGAHfPDUrss_Dz|>-5rEQ@RV@%y6iSx|BtKDb@6(le zo&6FnH(&a6Gi!sT%h9omQekoOPDmPv#EID4G`41mlEN3*bzbec@`cK4cqe-P5+~(2 zJ-FDkDp!-G8opv^6(t1=;i-!FU%GPr-Fr$p;vkfs50ZAk$(?qRTTbl?2~u4K#@zh{ zoZ=`?mtu$XzX(YdbZnKy5&!cy-BZ2yVOA$GFzL4dtLSXy)02`;TVe*%6yNE2;TD zVc6)#q%w%J5(qT7{jx*$_D+)sj8>`|yd!zlm%R7}IR1$08pl)sKCRtVd_0Nfi7X!u zYqoD9%V1t;n%fE zWFS6%=7@7a%Z<_o+k@FlqST%|;@8ZuILZ<95K_S$}698$qw(+hi-$78Fjdm9&T z5Qs98m53Yx%^bGvLq+h~1%iE=gE$7MMLw8tL0$LEu0T)uuf;M}Lnp7tQV$uDqg1bE zLGk|dTnn#}uO}!$K0$n~=w*pvvqdGy&HVf6uUDp!{73QEy4IIzsmI}07qZodFg@q0 zP>$DD>d%Dq**USrUBf)F57k@_DD3B2EN~4^D;$^xO%0evlflvrxG(JUzYa3Eyt6q}!C|2$NoWEUDx` z_-p?HCWy|K_>GX&Cmldrn5b|>#3E#5PVgN&E$eDa&&M;zzinx|lcp0n?|>rSb7t|< zsH@g~u%wSVd2R#J@iMgeF!Zr@l!|+tG6*`cz42HSKj!&=Es_F%t`U}O0H}GXOwE%N27s!j=Flj+4785Mx<(YQ&qvJx9Ui4q5 zgRb-4l619yT@LSE`TX&iKZmg{!Y%rNNbKCzUL{7psE2W*`QDPIE;?VCGD43fKEV)^ zDr?RMowzEeB`@ie``_V!)Dv-BkVEOXGD|PsTKVjT5+FYYL zYONQgq7PjdOxfDKg34eSJQ-=<57h8$TJ1f=c@c`#EG+EtZQ}Q4gN2cn%nqqUX`3dH25V`w-DN{(4qE87C~u+xWUEchRrE%I6eUZe^?YI}lPs zo9VutLn9Ur^yWR%MI3BJ%LMz-EisH!qki8xj2v>KOATu=vvOAag#BN?rT%TMiHi;x z6Os#6zYr&)+d%s+GN+G`dTbXJ_xYQqeUFOjIJWzHIloU-i0k0h$DO&m(-VWSaH0;u zM4wRyn_&IH&zk^>tmpe8GR6~k>g8yXo?^cZ_M=7-J_pb0v^IWV8G7!xKZ!RNOrM}2I z(w8gS$!UPI(wI4Kl5?tx@JxL2#@@o(xY17HUtIP_)g~86c`PeLO2I?~W@7uC zDlJHXAjf)GLW=LVet$E0u?)+7My+vjcIJj>X{}#e=m;XF*MyASxcm_2s6j0G2@TZ` zzmN=`1Gw*5^jjw<+e;qIoc=Hj*0P7QUFm5I`Mc{0bx=sL{Cv1K8jn7}7M!k?L zR&$3P-|asie-DzN!o;a>UU#NZ5gjoGSez-U{g9o2fHAbN8Bx{8F)b>;T~E`gpjw3! zo~(Ah?f-jf=%82a*70_(N>9cup!9m$Uf;Gx6*r%%C^kU78%=}^tDz$&>I2Wi;NFTl zYjDD!Je`4HwRRQvn4Py)F-6v;-YKx@rmH}quMUo zO^MTTe`{SC-@mI|2#6{BiNZ24(B0;VR)?g;s_Gv?Q6&coVuOE3*|g{#1g?(8m%%H? zz5(WQbd#kNm+wPPrh<*v`0AI8*n`0W(UolDY@ZMotvh6X#038MQCJ}vgof0CAg>L> z1Z&1`;tR&T1+L^7rZolipi!T~MolnL z|1hKHUEja<_@LDiG|v(e6UyKd+}jh=e!o{LBxReuWSPMe52{&L<%}ocNA-M36vm}w zjL282PoQ;^$7Vk%u~j@FDpI9N(|pcA;or{_uV><*8qi%LoRkTPRwgC@PZ+uO&SfXr+jF|m67CQpzw*?tx99Enx%@?R+ugKbX z0H!KcA=Fc`1zLrDY^0BZs7NYLn40KMAZeUm!E$BnNoiy5{|AHo-V2geFu%&4jubKM z&7_6>!vw5Pk?x$T8w8hZ>aL=`;ok8#4}_m-N!R59M=1=)MeiK zm@*LnwGjoVvviBA8rQTrRIE!=nD9q^?LY9=qnak5OCGGAl`NS0ME*#hb66bp7n)oh0c@q0x}F^Pa+zA{%!+`F!Vh$5Yrq*w8WCwmtKrLPJu$o1jlFA^%fslG#^A1k$iT{J2&mqQLWHR-bEs z95gKdA~~AQIprN&+UH#IcdK~w);phurOlxbVRk;5gSY2FbSR&_RtuB`eQ4|{v)z{qwJ>DREbaz*wUc6z6I2woiSGp6aAwWlPW1nU2nfSb zsh_la&PmZlP-c3&(-yf^+%}ZvR}rACb*zg@9yg}u7n-1N;lZ9DjUo$=-<;q^T@Ya~ z92lRjz`iIj@uH~ln_8eOGUW)n_#RHn6H?G=nG%vnPNv`v73!Kj$lH-%T`E^`+@A@` zbBH`1p|-Px-rPB3{Fw zS9_2tO^}p{<%Zw8uz*p8;3Y=LgOy`k8I6<*eDClOx?+@{DQg4Ig*#%Me6yJ(ZwhC8 z=yXgQ)1}}nDK~Wpz}8bc#qEXgXH5HcQ*?lSKd>+~=KaDE-CX&CQmpei8sxd(c4IaE z93E*T9C-fRet{?b>hlK|Zs55sYQHIJ^G1PB?ARDwGzQLQm(LBenTavYy7B3dRhmPd z1>Vc(U9PfSXPen9B&Rc{M&*vE797-FpOKW(8%w<<0%wE>H#Sh!e~{BnZ5W6-lEOwT z)}$6eRQZof?7z>45p`YEt@ezhAo`@V2bH70rzaSzkwa`7WB`FUy9EALnUbn1teaI| zo7SI2AX(#j3u=z=o|I73o94oa{M`e0#m-_zctmgl#o*9%iiIb$_div@!w zDLKRB%2shx;N)Sby2UT|C+rx5uca}n>_jpuBl0DfCkJSg%_`kz^in%FpEYASzM~0J zD5l_~a65lC+s-sr8L?twBS|mTMN{UDPp|NGcC_1#*JY~k8Q6@ROP;EU{fHS%&{03i z6dL_)Ad1p5qn=iCnc$}Sbs3&SfC&HGCI{il+#hCq6&7x7XWrmIOA%Y8=7JNz>%*O& z05QcPJ}KX;4*Ih~i9zcx;@J*Fwn6nXHBZAhdsBJtL4u{^%QUXEo+Dd=JRJCmlGx7qWUC8s9d25r{zMbmS&1gSxl5sp zf$T+5@rPrBoarAXu+DK<2e~sWHjJRymwyu>Ny9t7wB+rf`plH{OhCgbkBQ@6G9=}* zj+&L{$VU-7(2`RNIivNGE(#Dx8?HU=g_n4zM4fdU=C3SX-j#=K=+7;gnVtA`;3W&#;zn@=}#Ruq4 zp$8Mo3LAIv7Tmuad59fdFZ*FTL+?JtAJ2>3O-LQ&2E9xwXQ+0mVWydCAg}>)+f(V; z?tKG9%fOu~!*)n2g^jv{8cMnoTQ?|=g?*i@?6gsWSt7pP7+M!N*2Cd;iP~PZjaU!6 zJ)dG>o8Up$fs^&allX{-iE;ykeYx5>zc}gP6@egU*FS+p%n`ddNi+FR{X{&CHw&z+ zOQoKBBSGiFFE7Gmi5G`6Da&I~wFzP0Jqy%=auhw!g666}DxaoznKcW)aCqYf%aI$V zaovcW*bDxG(lUOjOryVR^$3~Lw^DmbaZ%i~pzGBuxu%P$>uQpmJvnZGm?L)7h}sb; zvkT|%5na%6=o}R@%$0m)+`#^}?1n+5fVU6`-)jpHV^>k(g)1>BNj`3OPS61_pjSim zfx=?`ehudx{fFHh)MqQ=K}V$5xwLVgqy$?6Hz8_g3#hBl1jnD#dQ8h;nL>f;a@yzP zW4A^GT$3lkxaz%z08^|H0R3S;7SGAdq}sF;vJr`J-MYfHNjOqcs2+1VFmL%WGTlY5 zh!vHSLc@V*O9pmyX~xZ|T>prD4i0&nl=k1E_@CojRK>67{^egiVPnHXK5R?YonHRe zK34Y+L;jX6qamRjSU~(6t_JebPWz8O*`^H5Pl6PqHz@M##MeI+O^;QyJXBOh%qSwc zLW@rqAqUn>Y}NB~2$G6S%Ie<)E1cy1DoHrMVVy&Iw91k6c1SM5U+K(kw4o{rO)EL^ zDi}J-rBBk`Nd8VE7_GgXOi^>wnEPEsTJ@BYhGhKCp1Q58V^oz$)PVN-`*8nNR;F&X zXUCL0wVP*!BK3|xV)k01>e|L^U6Xw8wXo=pyEJUFD8f;l8?8Y1+wb$ZyKX@C#|*(+ zo&GaBPKqdw9Uv2kW5+qfg38Jcnls((1tzSF8slVe9pCO2mPZ0VcFo{F;F2>l(% z==Q;DcPuIsD-t(*#W0{2KraWA}F+Y;xgqexPBB3~5+wMl`sULJXwv_zz1 zvRp4x!r=LBOI124?D1m3WC(yl8T2sc2nv200yh$l37e_~%BT~EcrSRg-y?-zcy$Q4OiPh(?#BD5UZHestYdG8qfU84 z+14LNFJOCk%IFoeP_2iN+5?0BC^$H&J=cT}U#>=K?Wk=v)QXu;QM5zzoyzVg-8A{( z5KTQ z!lt($&`-J_!d!RIE3ak;3ve^CYAB^pZ3_!VaCE=`lF1+8xX4f-$}Lx#7|aHNzd$!H ziRrSI%Sahd!)idens_ZyR8XD82a>hh$1?kN$Cf3~B@tab*-F}Gboclg zx0U^|SqDW-`n=SsLn=ynH&1_PBNww55Kp3%T)%HNmOZ| zbl99?zTu@boAH1-l2@%71QvC4$mwToYJh7smEhr!eh%+>xtycGtf#BLN-|RwxBI0u z^{s|c$^XvCSNq&4u3;zGQ_xHVLDKDH7enFx%VpaI1tzu{0eauN$Ww(Wj9ZDqGq39C z*J)LjsL;&kzWIkyE4rDi!n(e2t2RxIh2oBPLi4(u&RvidkMP~0f(*BP%0ShH1cZ30Ps zo=gZrTnhQGaMIL@(U=fv7qS~T(6#aU@Ok5c^>r^GzS=t-o*Cp6@#RqV`#ZvXxzn5J z%QP2&Hk60;^A!OdYO<2#5cbQTcVD?Z0~Jw$z*bD7K~&1^2ViB4ch$Rz3GtiPeH4+T zzn5*e|8l&I`%-I<)n&g>X!itlkcst!zoLJnH4*XHn>4Uu96NAM6aqC66RnbR=oVRp z*V(he3K1(;iYkw&sK395kJ3Dk z^}nJ*iP%(@Fp}I*Zq-D&Iid6-+0=9L&WA6qz#oQ=C@y({)!(_sTJWj>sJ@!|pRWln zdlBda^$StIFqvasFVCf+_DJ_vsGmM)jp!-65TD`zMIKeoFKBoL78$PI+_C7wdVzub z73VO_j&ocMsFlp|k1Tiesps2#*jWbw1A7RHR=L5_nV8G#5U6d?JvJkAG37`^LIfiv zt6S?O5a}`(=_e@+R`{`-6{ywt=NH9PntjHyMP3%@cV?4a$P-Zle+|XJGqLRq>pRKd zW1RAJk!JFoW$N5z%1W-Pp{}5@K(M`5#YdJRE+@t2k1{Ekga9ST5{Ufe2ISDMs5K)0 zL*cUaphkEEc2isR>~fN3=1qUCI*fWq+eN$e3OOnc@)JJxRw{EwE5mzt{Q7(Wb-E#~ z5hBf?bgvhD{w?hC1%YdQEx|dkvtDYIYPA*@`Nf3_j29M83qAN#Iw>^srGIbFb}Lk( zc60kA=`LUU#J9RO7)PZ=FgUB>cnl@(&1;DoDJ#Qj=sQ!j9>u-xqx-bD@TM9E5|Ro4 z-EZa|VonsaOem}EJ%jlvUU+@Yu~x9X)JLMcjS`!d=?y9Sp48}@uU0pzR6_NGo8c?3 zFNuLaQaw>eGp~~Q{pH;ccmXE|w=2_T&pYB50v>kcWk97xM+}T}fp|I3QsY<^+a@+r+e6yl;o%(vF z8JI5W<(lxd;6H@v|32rFAhOiYGci#k;<;g-ULNzPb-FmAmNzadOou+jD*{?`ypdUJ zrkZ#ybnJOQ!^hJ2S>Wr9$13(kSBJC=BtlCKJjqCkhg{voXuRPKh~KM#J!4U*J(^l? zUt|Dlc1L7#fN+gnT=%e@hgD=@(XFPe^xEC~_&K`xV2OgLN+!8UjHZo}iaI=eR5q zaZB3OgGduEV@@qZT=f$HBdPUDKpgYAXUP*GQ&GC^fS(hN9k zR)r$8|K^-=)sUZF5CdK%vLUt5`%`DNi(y#AAIt$%Q%59i`wt$0i4R2Y7H0CyKRA}W zd!v0%`yY^1PwEi#nA}7w`yyFsg^rX77d|^&$eR!tj?kh6Uj`;)ZAM8)OpH=r-qsDd za7hfZ2Qm@bFsL!;Vko+iu{!Dy{h|sIH3!d5VJrym`b=+qtT^iDcC79kB(W#Nay9LK zj_L*0R!=!gqkIyfwos3Zwn7fU#vRzGuR*maTJ0nj=Nd#j@f`5mju2Gv)2wR~(PH*0 zL1E=3*60A2o|mDp@jAmq7gP8erz4`2?m@7%3oo{fB(t7>B7_k8cJ7b62yG*A4aE$5 zNXn||xL&qMdy4}xM2VPnT>3v9^2q=)t}aA8Z%I(hSBol=alR2*WR+|LKRjtVCuj@K zY2}-6bD3DrvTAfXE$4_BVl75yGd9V}a&vm<+z*%+!-WngxgpeIZ36^V;!jI~MPt>2 z>p|ACI`)>V9ca%M*i+6(SmLDEXkGznTkQ9Tm^Gup)U$?Sk2_A_!Z^|Hs4x)){9};(C1&!;^8-V&S)7ccW%k3vNw9(H{|`#du|V{tA{Z*5fS(RMuQ83HksX z08ZF;@6E~VGii=vJdsb#gr?hxtUE%(u3h7|>#G6n+p3nVdFM<`R4^aBJ2F5s0OG7- zRax~K$a#??7J|c{uv9qaiRy4d_smN!PP9H6i(5Q1{EXnvcbLL-wRx)g*|7nb+CwFfDd;J z&;Cp*`yWFS4T#*h+%9v_V+yBy{M}SCivdZxT9$1^$&!oB^@gX2uAkAXkxS+w`lh~O zpWpRh>r+U$y;&wn>=|@pHc@}+8+`3lx9wTnzFh7CsHBmwmXqVG2|3;z?^W)fpD!}s zwELPnabHWoVjC5`KHuG+t=Mm}AFbk(Uz%LyXbK?;zH?+?TtUgKk}R`UeLKaWJWOCh z@sU>ZPa9RFS^+q9wcM9C{|*82@a_$94gSWQ2KD?bv(Ef%3^5y^SULGC_r~kZg>NdK z6)g(C6<&Yr7phvjpviB57DIfP(s=#$kG{Q2oUXQ{3>133eMNIrrZ0 z{f51EcXf4j75^F!Jm*D)zoGCY3!>5=y&a5LWoNa~i#x(ya^I`Sven>E@#wY}63Xm< zh1YJ2-r7G>dkEY>QbV|oj3Y+^D|a5pf6rG zlUOgpOFq>sf&AQaG*OE3M(Lu#K%i&UZCl-`_@mPD`^&R=^Hx7cs9+-V?^-HHKf;I8 zpj~eKxu15UndW(n)6^{J1yVydT((NWDVMR^-J)pWtF|%t4w2l=6nb18Iq&U2G8Ej7 z^sm8s)nCzskd>nLHvXZVVyA{ktPk7}qttkqF?c1YO&?Ck2%HfUP;W^2W^!!&{Q6C@ zT4yKD&UtXdRumoFksX!mF@th(gru$22KsL!@)=VCSKpqZgd4BNQp6+as~rbMUl>mHlO~fm|MjN zoE6LAm{q$!3?KFt&a;@?Oc1TAEjqp%VG)g`hwj6OFY#$u5_4z?oCU3p-O6Jd<~m@{ zZQf2r!;LL*6&1dP03%1pAx!_)r2c52D|(lUq?3fdB~Ka%$I6%pnn>?M+8|_cOH|sC zZ_F?XsE6qh(HKzwcHUZ#9fosaU6lKV(p;DHUc?PiLU%#?mon9XA66%T$rl*SW+_J~ zG{S}4A0K3^N0Ag-dKqOYoR$PN2F}ENIS;X?co zQGZQa+S9)6nvjsicBk{vfH@7s$6qubz~WJtsjc45U$A%`khRq}dzw?+o>+aVWWDZT z8$u2!dT3dg%V%Ptp?6iaO(gp7gQt~`$M?@@#I{!jx~PN~s)}PkQ%+m#F|<{=^1B=V zqJ4~K#rPP`3TT8GLM|bk6r0ESJe_#p?Z^)J3Fk>UiTYVE_KU46fUfrikEho@3JDqq zX-6KtaqOM9*%N4g6x+VJ<=DeXPlYm#pDL9$`m`crVy5&n)4B*Pi?ukDHoFTwI-D2Q zbfS``iPmwgZPf*uT~m3At*rJnhvZgP9g&A^rTvrq($zb@FkJdXpM|&)0I-OExZ~^1H=t z@fK57uBq2tY_69j2_Yu;9jitucAVnuzBD5ABPNiUePTvh@Q2~L)%l+9^vj*DCr}N5 zh0l*ZeV*K9bguo$YhG{;QPv-~_FAa!z!^P}X9T_sWBTsszy}!wC)oF#d5R8paCWvd z?9Jc=CRqjToK|AkU^gm>PZY$CxA!VnB5Q*-oDY3TiC(ROIroZ{(*FC=My?OkY(Q3u zVP4t9^gg`m3o7Dj%Qj?xVfUk7UKK<>U_CX{7yGIAdd2fxf6mdUx_(S@xM4kJKvG&1 z_@#Bnzbc*NENk>Cyj%EUr2@u@pA>1ZfggpYe(A4~2FVaU?W#&p6lQ09dHnkr&pUyw z<-yeFcO)NqsA3H!jrQbCmK9CT3d%~;j;E~63zQ$r;WwPSVK8uVe3xvQ?$?mua7!}$ zyqMkJ*)=$C32+uMi9Pt2B3nGZBNGo9Wt+$35MCfV$SA((4e<5N9~p}<*Ah8ZJPsK~ z4lL5i#5q+38acQoeC0Bq@XrP1RWV3KF|KS#nbprVu&B!xva^OvxWuoPMvJYay;_%}$K6I< zIk%nmV=gX?s({+9I&=ruy@UK;4i2LXxv!P5w8S2XEIT9%`8l+d7;cn4s-&D79ZbcL zbXcY6a^x!D($+%J4^oNK3KJ4ghL8=wuzYtysN`514X1U}AMvvQRSi#(5`1^=B>k!T zqs)1QS4lagf&X{QN)Es!DU`yogu}F99qv`_a26#=psCzdzFxPj?YKmYOzt2XpZZUc z_*Bn;o>c_C!A^gLx@kUw%&BKHDqhkQtJPy6p%{XaJqlF_(w_%=T`byso?U!9xtzKw zjG}ARs3jj*62mffIodFTIF!_;?ogaW&Al8j->dy7_VGzwEe~TQ@UCghnx2^MnArXG z^C3KXU|;JqQGQUUK|;wTh*iK$IW9MTuf5~whhN79y*RAGxak?p8>G3?b& zMZs`81I8(H>HBFjUq&2i2rZU$LIKXi3Ljy_#*@_-SsZi0BQ}ZISEkUm{huF#1-LJ* z)ii;o>2Hi%0niWH!d~D@8X-QbYKVYEo!kD50W@&}3`KDg`N-1{j_Y0)itu-gZ2s_r z1&%(|Yz`E>Bm#D#hU6Z$u*VSyewSCQucnC7Oz#AWpc7#^U zl=_~f#q3K7&&>pVNo=-5=GVqCTlpwMl18XQ{A!D97u`mPg&u4g(O^2!lha(t%+VDm zEbu5?yJ;6R$oCK^r6YUMj^t_G;>c7-E$V37f~5*fsU6Lm>B&{QMA0SCgxI&q^hP0X zgFXKca{4afhqy`usrh*UEV`I{j3T6XlQ{tZ@SD8%BH}h#@S5>E4d7**LGgDl=RizZ z{TGpMSDox_jwCo7O3Ja3fRmt8i)M-I%1E|X7|cq)m?V!bReZg*>QgHnY^|GKv0P;tpR4COr zG=Y?_aAu;BJW^zqCT?Czb@h`JOB&8wWPVCYBDeUTdoh)Sw}to3#i$NT;{0=OQc={b zf|VyUQc~}*pDiy^W_(wPVJ;g>eJy>74`M^~h%kr46WaBZ+;mA{=$md+Dg_%wIQ?mV z71IM8GOF5?E5&>K7SsNvka8b35T!zxw4DC*ek`kkosk0a1$ol&p@Vpmzfd=zArV&A zs7um6*5&;2F&ymL8okZLw`!fG?kMqx8zhwb^o!vX|5qb=!5%Sh!Q@oslcNj&{;zl( za`i(NI=FS^&;tdJ4Dq4Hf`6>CxM{o;9TPbACn}Ea>?L`L4hr~t#hz6R-mYh%TFPjE z2T~74x9`K<9wX9G+Q(J$5^`-mNFdS|1nnfG)toboa#bK9{xQ=;t~?s=VjPLDF_PN* zO7s$^5O0lH05$6;G%WIHvW#{vXcIotW-$-COs2PYD&cq1lV|Zj%4Yy zWOgH_+QcYTTujxxE1fE)>R88C%r=*Ii zrEU@?qW7hmWW%a@x3O6vexN77?TQrD7m*$oW^9Xf(!iS|PtVw9^|vXuG-nfYbz;`# zO!yLuj>KjyHmkkGPIlKt=6d;K*>8c&Q=Hh7cmA`?9I(Q=ztolQ;B@t9VC)Im3P~(J z!wa+4lgHR{pao!#cf5lAIV-WcSYfJdwi9i*V9fPVWJ;Mca%gj-J)0S*M7Hi&(b~BGJ8ZNk?yDonyLz~?QkUrf@hi{B|Gjqk_-=jHMHKE) zefU|*m|#-%iePd@bWqcM8HXB{g=!u%HVm$P{StvEuHhe{OIvZfVu;aT^H_S3+{8V- zUCbN%=X@p!YojviS{)wFJ$6Ey!MkF)h)nw_t{_h7@Vf{sD?3mCl~R=w6n3r3h;?7T z5f?AWvi*zNs2Pj;n11s6I|`jrRb9T?21K-WI#XJe1Uypm-iz_gKN*kWZ#p&%%ceg4 znuENTg}hgYYoc4?z*2Y_MzgAedU+NlJp*M|;kC#0%_u8rz&A1WgRQV*=LFihUqdh% z$p9yy@*3Kt+QO}C7r^?Y>~e*ZmY76&hltf1g?eb=N}tFu$^-V2_4N_rn{Nd-%4>^GNgMWGF`jZ%8rLSQbeQYJKi*9&6D&&cdr!+or`ZZ*=pfp4p-^x}i+>PTgiql&0vv8iR85AgXx~%T zBZKBBM8KjCe~YWv+Ibt)PWb%%v^+Ln5E*!QGKuq%L1|G&=I0K%zPep@Dd8G1X^R52RpGe^(^5$& zjG>PW%{QdflmL$94+Bg)2Z=y?=hWjG5}{Qd3w(Cx_o)fGk1L!DL9$k%_`D$*_KMPY zd-@V1hi33O^~4}>5H=PLssoxol1y?Qz*zj3Thz`AQWLo*cR~W~tKeMRIH>=FhZK5h zNuVo&vtoZIkJ`jXFIBUjxT;7) z7TOVq)CigfQDc>cC`Q=am9nMb3=eeLl$wy~o8{)sg&7P*X-Qv#!ThV44=|3>}WZ+X$4D>mm4eukbn7wmyb!A%>gYPV&L1R$|9ExlS` z^<3kJPGs^#ME?B@llN@g5#Y82?zf3{cfSM-kc_AN*9$HO&qWf?MQ1Sw*qXk9V|?d0 zfF!M$eBPB5i}?=Co@&0~?%&wR+;$F%^j?JXDZXKNSn{K}fAXs7%qn9CagY#=Rj^!% z!A%!v-8)s-k{6ZWKwE3Om9XQo-{S6Gwwo9|m6oBR_C}s@UC5C*plLE0-rH#KxS>9f zoAUtL?f#3n{tLY}+1BqxhRlI`5b}#a9}&tJM^bSmw9b9y{w6n?QhWNXMQtCvdGXW%WyjR zv9{#XvoJyqu1(J-;9^VYGY|F198djDdvGUOnW3YQ??hS=Kt38R{gdzAotXYlJo^YE zRer~{)&PgbcZR$?@(!DTD*-rqn3D`cT>1zwxcrJsPr(2CS=+iRsWvA7Qe<*%+=Wp6 z<%xxqYB}3WN7v}v2h)AhEUm+Ndx69svkscV~oiKn}3LQJ~C%Ej36IM zRc1`H*I|lm(c+upSAP*^Njk%O5+6j={%9Jdw8U_@-0?;$?DTmN_V15USX&)}9I(?2 zA>irv6W`pS%0ZYe|I_Ax*1p#*_k2mkQb#&2tEIG7bS~J*E~&itTVk~-ie&;@4-7D? z*$Xph0MigwTJjrmErG5fT-p4?k|O@WYWfj~^!Nc=|KLg>M>si7u)*thy2K000u8GEP<2 z9{KF^!?NJ=g;`I)wq0e-nYI&kezo*P1WO2ND^Ah~_x@B_VEgN{cp zD8M)MviM76J}+6r42`5vbDku^_@A6U%2#9i>-xto_Y1dnfxcu|=&Qo`*{~xcLvl%5 zDZ(S!mC?)Jeh4ph62$$LLFxqbHDts?)ZC!=+;_k>%;(Ui{P;>02qrU<{~@E?S|M0h z?ua%#5m;}Y!R%0&B93st{*XX!VP2s_V539&?KW4Aj_Kqf9b{FbiDALAct-dIJUS4e z+5e+=u10voID*d_bDR)P}N}chyZ| zL5sAU*az>m+c*~C$W^LCqxvRx$`X-lsF|YR9Wm0o%vH$v%{?$I6W0QRMDv$3 z#Z%Fmx2`_^ziUQ0rtnV<9}b4GrhN5f-m~8qddk$&dUetM2EK0=t#3cW(aQIJajZ%N zumTBKJ-ifj{go7|2KD8rp z@wmkeS=?t9vbmz2OUmNgUh03fr}ZUJaO^_8^k4GrurFA!_ba1FJ0aOT!C9nI(tVV}F=PUcQ3#}A zhrGu?js|zQXk>6A#tlz0hz&qD^q;Cebp3jvshQ2ng*G4L$od#Gc@CS={1D zf&)<%MC2aYMy)uHq8Nb1geYZACNf_Ne>`{9P%X>9K05)!`5&U0R-V$4=JD~z)K~dl zk=2%%24`Tzi}wjr?erKxPI+89)+u%VcHaSrH{p>)a81%7f_%YbIXN~|z@x}v%=b;Y zyB+t|S|*&v(Mi299GyxPMD?MB=~sg9UwA-m^*F~;BVFB*lc4u6Li!fvD!b+lNXV1# zyCuW*6ld4F`^)J&kJ;sbp7x+}%}w(|wxmJfrHkZa_>6Xl_HnkkNqz6iu!r7SypdW` zdPVyl)cx82=02KeaJazxSHi$`8WZO|#GX2Pbf53O1kgI*i34Qely+p=NzikzryW9_dLpn51Npcx_rCRs`wYf zRn7KV4xtHN*|onq7>`olLJyl}T{fxk9CjX`B{<=;U* zc*r_?fAs@eocAd<(zO@(m?Ybgi!)fnEB}|hqio8$xfOh0dS`t5*Y|jlDNtSxZ{!^{ zgJtt<^6=`BQ|{=@M-*;pj0$e%82UMoH1qqhkdBP_3tU^S$raIk1ipJGP9ns0p_}3P zP;Ii-b<;&4U$ATAJn2@76&r+WX+D(aC&X0AL|vI_50?!73H7qx%J+b!wmqt&yFkYt z9E3?6tjNywi!8dE(|AM;^yae)85u3Q4gBC0`4e36+9>XH-Q4Kt#GTv<-*mTlr@_}|558rh!2WZOR0-0b*p z64B36R?l!*?$nh&>W$Z@p1UwxCX-&5&A!?cPn72mN#9%TwhL7T%$HE`pGRw%d=2>G z6T-9g6{)+d(;a-s(}IVQJ`iTtB|UY!I9dA^E7CG{lf~EJDk?DDty|wnrj?lYjxc+h z+}J}!?nV*CbeR~mX=<;u;&+uyKY+6zOL+QT-KJ9K?cNF z9tCuZIX-@xy#yh>@r(EM*e1`HkYFy6%G%6pH(t)ZxZN`C#z_|2tEbRQelr!tD6N>-}=DDiqdUi^nK+HJal|Q7o?!hd0@IetmR$CNeY|b69Tz;x#J`!CQ`2 zQACd#f?KsR1@HLHSaKh|5#Rrv++sD!$~|+SU3BwXsu4utbX=!(Ud=d#THZZTf+XT$ zBl^32IzC$}Wv+DVqxn8)~)0 zecnef0_PC%+Z+&{3GWvTqKN*_zpNOkK%YgkEfcRML$+TDcwl+7P5q=E=KRAmSxLU+ zA-t3bg7-6!WK7D7`j`FI?BIX=%nzMy_PE(5%;rRz`{31SKDFnCE5FyD?!iKz<_LFo zuQ9N8Y>e+dj7q0%rM)}}N+OK><0`spTRm$NgkX*CIf^D*Pr|V5;8Mx(?{>1T7|C1* z7QT?O4W*0li0RCcY*v?FVK5_l@)KGk+Ngh!w)@)nT9}ZWhUu{-Fir6~-FOEFdg{12 zu=gE@@oOxqy)zbierY1r8$bD%LCXNe-latq;HC0~^mAg29oQ6EU0piy#BuHek0|N< zZQDt!nLh$IGA%TfP6I4=ku;Zl{9!jkUmJ|d>~HeWJ{HT&qk-B)!>ueZ;K4Ba!Q>UE z7a_0tqTEd0BxyB$ME;xC>r^2b36DWP*Q=@?e01G2o1|E-H^R^h`wNy4?5WH7Uy(T$jY*B>=`iS&+YfpI`mcanXQb}n4F&?!6b`D(lZ zj>ms=np)r)O2B`Vc~$Rv{VDk`=;((!=1+r#g1&E^MY*2$&|oe18pIip@iOFSwR91`u6 z1+xFDlwA(zZ9sv5$4@%`C7Tb1jMzi6L-hNyoBv@=br>x!t_Yktw9MgxNh%ywtjlx1 zr`4O1S+}cw7s-QgX}rXYnr|Au-5Xj)k~2q5Y7ZcWP}sXQ9aWfcAyTLQ>i9ZA>_VdB zfAZt!dizBHA2C`iMNglu6i_~xyOK~DV)v$@KrFBBrcJ}E$h|%_PEGE3`P-4sp;$&O zoyy(+tXBY4OJBr~HVKqt2roxZHcldeLsXQq3%b+3XATuEqR1u)`=yNjO@&BdObMwe z40C&i2o!AV=kK~77+e2=Hk=KN!@{I1Kp|6*>p+>HNONorlL;1)~7ZYn}vpT~TaH$#wq@81;ZD)e7s0vaM*$zYL)3ur4 z9OM-?ILbnFoNq54eRY)L&`K~1FX2{?4qZ(u?W9D0Eup%l=;7}lvKf};W!|Gj3J#*)h*&PpDd)vUN4S1A=UzwEC;nt3j9Zr8nE``S@HnS#%@f_8Py<4cVt+ ze*IKlwDg)v%@tH?jnIh)ZGlQ0$zk3uNkTPL?gHDZTX56uwdk(|dW4bqRVvGn;aST( z$?v9%J{5Qv%w-)-^trpb+FI)u4+S1ePky>o1pYzIPm+vYB-8Xr>_(%6i@No%47m@E< zX^YYden5@w{64NnFY(>omAhiAnO=yL83mMNayZ!$X?gZc5Z1A!+V0*|Si=N#taG>q z%0Wq}VI$P1R{A>CX!zIR?RdIX8Q*+YGH9}S+7Egy$?2LeOL%G%pTdcb>wbv{`!UXq zt`W#fNP$~~KZIcd;x%2gHJECB6tU~mVZ@tvmvs3ryGJlYqF;#2VzxQ-_4s7iR~%%n z*7~E<4;{rea+orA{*uLk{m6FdrnNpSiL#+K!LkT?gMNOmIBmb!+(;m&Ud--& zv>k)S{Y_n9xY$EEHzt?yq77YsUiqr5+~Ji%ugq7X1UHU>xFY()?I*$Be!kg}6#7qp!M=+ zhS_4zK7TbkmS2qki-t+PelApF{Yb(kHE}4j8nhHQ z;w+~+U!|##(+ThL7>9t%$dYpwfU%8@oeK5$jCBDTQmCgj!z-`*iU(#OE3MucO=>xF zw-93tGdLglN06~ z={TFPMSy>*nH^fbYAw^T&uFK6FqTd@BDIq+vJf^)Z6T2qaI%xOdA)lfTM-AumbA!$ zD`L^188x5lry3!?lzEhgmb{L(dFIqnw!ArS?{@rJ3U?$xVE=srE|v9vrv2coA@Dd) z3gKe_vUHOq##cvDtan!hSML-odLNDC19AVas;-FzHq1=wdG?Hz!uL2|AS!X{{`?)o z$|EQC*VeoOv6)_yx9~J~OWc(S78rh`n_VB7F_q;<`ECQrM9R}Lodok;zfXvZYy~76 zas{cLcf#ub>Osm<9?&}0uwZ0)8dBoCrQXq5zT5cR zZk>wHq~%+}1qkxTdxHyXTC%^9h+O?kk`gnCWRa#p=c8Mu3LN+-xX1N*LCD8}eWg)9 z9?Fs1q-HEBIC8}=Q@J837zCPp8~d3Pu~YO6uPRDa0)M#c>D8{B`#H;Lxl!p+26UAd zDJ1Lh2+kU#N1UQZ4179U9^Jd5w}aZxA4Uw)-yx!`y0fI~zyQw5fPrNWZu;@BM$g@9 z27EEC!AP_bk~s z{UzYB5b zb2u7Ddd4PJkR%9OP13Nr&*L?JNguiK?uus0K%3B=iiMk--a?ZYVXZ7j4MI1`ls9k=M+?H#h==I?CzSLaox+ zVGdiEE_5E^Gxr&eztFaPAv^X>h>~17dkmJxO*yObn_#oAh57 zM8XI<(?Pk5B$>T2V)alglV8Xj|xn@MOBR!)Alx%So3O!mNTDxmPU^HdOm zE3pOT^;=+OSL?J7sgz)M@D;(O&ppRd1oRd~&+rG^#-HK!OY|ggxkGR=JFuxMA z?u9p~LJ&TiuMMM&?j7FZ%LOG0*Q>%%OIf7hN{^CgB=SXM;tGKR?|YwG5S8RN$S_!X zC86`8UP|~CL4Cy9u$bp$xHxSr9Eab8qIr*aX2ZSZrbV2k5@Rlj6&czVOGxZ%?Uz4D zFH1BW#38iepC-@IH3rL)2p3See*2U>A4@ddlOU&tGkBS&)$UX`xU3EqCOLFY_t)*gr;hq?pUDF~k|jt@ zd2JPf!*r+1B(FyM(WdqAtkB!FspHPdvN;cMJJwbty$fQn{F{VfZB&Cm^uL;0TI@%? zO|SvIW|n`;7?caT7W#eVZNJtwXX3w6yrcYg3OA&_wHFVI8gYS-rOD9QeDY@_Z%hs? zJ9@xAYB_HpzMTv3Fmdu&b9nLlm`DUI3J9kYEe-r)+X{@wn>J$5 zU7wZQvC2#R%}ck$##+}&RmKdtxhv&-eT4|!Tc?a-3t@}x_-4F43|HUzzti>(Rq`Zn z@i};mHxe!vHzH}H+K}mmklY;^YtU%wZnZj-~mj(%NqDfp@U?|k2&qMMd!Oie0_XS zRDlKfA66LSv0kmpA1XjXW(6ixaN61Xxf;~@nAMFP$>jGw%RUvBp?( zjv>1Yw!p}mF^3x76n0tW16?$tJ&h{!z1${$(7?Zpk5MF=tdiBnC6VkAUcYV(s^Of1 zxY0o+ZDo@8`+cUquQ>gMVJz1{)JL>4a$+b(apd$q@~^=fH&1%pFr+p~E)Th2D%hz% zg|MIcEfB6P;q-di=ng@uC5zZp2KJ z^sSqM@z&#_`&^N9onC4-+g+WT~YJ>~G`={>gjI@e6l z^(f_sKmH=^=M$X?wRgE_j~M#v(|XH3t_HK|e7`rgNJx#B7>XOUH5OXFR_b3)F~Ahz z#QxKqLZtjF(s((5maij_z+@a5rTw*&h4?0z_h`7p4PoWQ*chXb)?Y>tqA zI0reolrR+vA$3?i)vsdSw*_S&%BpBJyz8YZD3bFXJ=vSQ0vM@0CWsj~f6^z)a#DaEvINPRZ@PfH|EGx}jnxA6}x>YV-v#|b! zweOPaR;F7Nm<$Oh4@!CJh_5G|5`{eF!O+8Az75i(70CM5pltl^0pZUTAfwf3N9N{oDyT9BlQfNG(Iul0llrF5UKz;r_q826A;iw^wV?(d(Rh*-dFj+!Doq!0^3jEAthjG5vs+l`ol`@WR8g>Yl* z(SG;e;X8pSQ#=Wk3B(zd&_;@-Fx&rXKW|MCS_1>dsDLTYBSHtv=*egnENx{4Z9WCT zh0N0n^s9gH>$S*}b)y?mkNr@pP)v?Qs6QVKu2gp(4Ls2>GfoHHca}ToteDqI@ z7(e?NNpZ!wErTW*lZGF1wlAgqI6BKQ$o~+2-8ZsTa(|w~5me?**I9JUJ9%rUb~mTU zTW)Pq1=2{DmU^kSBPBAk|KDsib6txw$!D}*O@(5KUcdFS5;B0ze8hhuzv;SgoP5C?F;P1~C0m%B&}NdMjSP>r-EX81~lV{9DXO{S{rx z`l!i`k&nig?F(A-NUPE2o(z_CbY!YreuwI$+!0Sc(`7H1Xf_+3NgPg?$&j z)FyAc^#^<_8Ot9vU48vbU@~AL94xe%ZI9D&QlnuE2qQ4wmR;QNhvOGBjIjNi-RT3p ziRQCnPlwobb%xxX*d`9dK>?X}^>ybv{?OuiTY(lHv6MkM`}TJY_N}02<=Acq;`P1j zjV|Af*$0Y8rer`B5I*TJ%;lLI>A(|CJ%MlJ7$2VqRB5r%5l{vDsD#-Lqf};&)eE!N z1i6SJcUPfjAb7O?Tkop0ZkP%Y!suDHRpdT&!cT<|QKv6Cs{)Zv;HMbny}EWgdk*C7 zM{!AJwS?gFUL=*EI3 z=?F?NKoG2U!6rXXI)qPcg9}5EX7RPX@1eSMne1Ecd^(hj+^tg-es>BpR{mIbTkkqO ztrqNiTJ-IEEtPyKy%^_wic73tzU>Mr*?JEsC|{QBeLJo|zCEbuWS{EGfy7r?aZ)8a*nXIWyasBX@5SUON&%z8jQC?}&*avl&bmylk&~TRVY0~-c zHLvkm_ggG?5ca1mlk5oRTyV=$G`iNU>5NE(C$nS#RW(bncbYk{O26LhD@p77997?l zLKI3gMcNvV49;K{XJ;I80O|j>j*@n4HKifl!)4x*2`U z+rNH<4ELkMHd~kN_H?lRpIx%ZF!|u({SKG> zP3z5mAJ&+R5`SLX*w0LPeJkny=x%ugAlAIqXV9D<^LZvD|D)#FB9|YW564LpLJ6pC z^Oxk|m(Qw1fr-F}NrM%kS(UNzLkpQ9&%?PSi<5}K|M{WS)uv0>`gYR2sY_?-FeA}) z>;PX$(M=jNmbMuXp64PGF$NIPZ zm<$m6ENJ|FH{@MNVao~_MbpSDwQns-RTk`BHcp6sI^_0>x*#{kYi6cuMIFZQvB3s8 zx*fgl{%O)M_xGwl8oc-$T7{6d-5g1F9w{`~IVq>|j@Wf-yQRa7bV(c0$^m@j%kP~8 z>P~fJvj_-;=Cy}~9ANnWXNdamjYEwyKT#etaQ_sv_5Qlp%KVb>J`fU{Kh4P17bEur zYInUwkwU>-#>x_wpy_JTc&8F2HNSsUfoRTkb)HVDiC&yxex$^MmMy>KS=MB7FHW;F zyX?tjk~-{xAsb&MgHBm|;Q-Dku;>VUJvcL)N}#5p!7D6wi+`!Rd;9rBSvV^%x&Lx# z?nlFNX+Nbx)ij$(b+abP8J6OgUzi=#{3O!P17=SeaLV)M#rS#eebNAoZ?49M+=HJ# zS5>t=Z@AqEzGX}9M4ogT5wuFaU%VL0w+Go>|HPK8!3yoW)_>0{vHv}5(q4AeTfyW& z@@K0z=%MN2$=ME1)>@#AdTjzm*j~uLyH09|>7@N(74wk$Y1Dw!OMWd?4xz3CT-(UP zt`t8amom}_aP?h>NmaJ?ESm|D{xq&9ep>OC3jXUDOz4qttX_YDlWq=onDB-Eaj|_1 zJdUH~nEG+gQ$(-o1dx8GP?;_*b_^&oF_fE(XQL-ZY!Jx0YFZ3UX+@G8jO6$fB+9G! zHH51^m|{z}4z6t#YtN%Yk}^ineGAijD(%NH#j~dgPKhho-N7{3FBbXhD}?E16ZwLd z;t4lse~~wGO_5bB!qW^qFV`y;(uaTET_%DN`?#N4cuE3L;g{?eS@L#N!+oa(6wv zeRw;I_8*;Vy=ocZe*oqo+xs@vODP}n#DTPrslKa#>m>IVxdqVcj8sh4L2z3ff#CIB zCbH7N{7|TU^#_j~0_s(Mq0|j8+ULW%jTZO+=$N!km#0XFU4*7c-M!G>FR_E)|7E}J z_L+<5e7GulsdyJoq5H-AC(`wvqT^C#{UPY4tx|BG?7Z_|YU&F{}I%Wp?1$}z0hb6}&u#@gLMz6kJnkJbBlu|rDf>^u$JV>1Uu zrwUqRC!hWheWfpV!2IGZ22m#c`e;Z;te*(iTsJYcSCtrhbwM#QdQR%*Mc>AP)G7x% zFloUVVJAly)Ea4Q~y4^~L_aBRKO z=hicS^6m2%;t-WHMba)&SGihD2pGo6PJjoc^@9oQ3Y-^h3HNDf=x%34hIL+MiZi!N zB0;`P*{uIxS-9jpEm54fWcJ5N)u%xh?kOsQ0pM?9UKWC7y6sE4M=H?H_bFmq6~gVQ z3wqp8?LxAB@50u_p7UmOwH+hze*-&Z)UfK{KA9(8f6d68fe*B+Bj!b+rPQ-VW&OhC zSV<~xLPS&H@`O@Gj5u@V)<^m|ut2qN8D3iRL5~^OCV%+BAOL|= z|CkH!Ey9Sh;Vi_x$II(a^>al$6FfSh#; zq4aJ;)A5X8^<^Um*4G7Pm0GFt1OZ!@|9|fHc+|AoLNk&#V&Nt|VXDi*Q06k$w9#Q* z^B_hfF0J9w{T=r;w&l+-cMfbBf>~@7ioP@g*A5!8Cm}TeR-4Uz#tg>pN>4q%4kkxe z*6oJ?R=xYRz zUwn*Bu!O#f+(D%}>&bg9ydx&x2C^Ztz5Wm9=lNaF&-s?bSZg_$cj)RRS(8Iyqi}Sb z++q^<&3*z86@RbLYcVc`k}~Rjkv#Aj2^=wr=)I^}#1j`FYUvVyRFhct8a1TyxDxoj zmI1hn>Anz)rwA#KjCC~LE&HWTTz9A;tnw>QdZF5Phdeu;SXq8uisK)6NvEQu3!)+p zD)prhQj6X2lq>*ub!ZlB@(_rWP=D2p$~i-PGb3D;368#u&SRPwp%JoFGwK*z54|Z$ zJ>TjJ^c9_pQ4qOIzFiZov@q87DTVM8%{CMG9sAjl-y(96J5;R%&B#W*J}vj@MU$j{ z5dL;0m2!#b+UNg#fA*iidL^c_qj8=UQ|YEQtZ_VrMuU)>n^O@XCqT|i@G@-*W=)9Q zM}!prok7JUhK3p~hPSBR7SnEn?fGUg0WyWzl`l!fPTG|C70;;5&<<%G>*Jyz<(=2lo^;HIJDwuF zd5#b1YAd43e}=?9LXBn}Jy3Ypj83&;4!XFkt?(YwE- z z1(0<B{h8sb6x{?kVOZ`2`2>&0qSiG5 zqs!kPjaNU8(^;74$M4il*3wZrL*9zd6kw-lJrq~BuVns#IAT89cK+>YzpCw(3x0d|Dx{!&LC9RJ* z+#=z3g;A2H8-Z2)U^miTOy~982`)9(f~A59_J-PnFDDV1mG~ARB%;T&Ut(4uld9;r(@uM%qmAZZxonUw(n z$$;v5Ft*lBio#OS44qf@EFkIGji+lbQq&Ga+UlpJawX#le+7g!OQ&j}!daUe%Q|pH zDpihX7SE8W=jIw57=#TYdD+AB_=G$ZQQ|T{IQIDQFbk$>=uxPdPLk)BG#DQr{m8P# z7kHpa^J!Z*{(7Qn)@slQIgTLP#!%*Z)nL_XNwT_nCr8vNtq`4qnLwO*QHi5vzL*Ue zWaQQMf2<>vpMPX-S~rZo@YWh4ECn@f>F6#hp_1{q_(~{P>&wd&e3lSt2Pe3zw0Yn7r?TxO-LC zSjgo52RHY{OU!4hAh`ls39%JUIi{;xCnw)+V^9hjWeTfF1?_*zBPCv58}c7T+RaB` zu}j6fkdkF(07;E+K6uWjQ;vB4Jr4F!*8N&^3ZI~3nEnDpo(v2F`k_AV8i4Crh&;G5w*1A=|WxK(y5zhal z7U>)+EBrKeHdeqM4@+0ng%)(8hxQk0VYQjp&!&|Ii z)ymNyUR2c7_}3aCUmh3d;*&Enf261C{hvT9uiZs&)K3FUx<1xKtB5n(O+(`$Z4GE? zZ6Cv;%bma1BuJTjxCfR&j-LC+(9@hqX>3;Jg7WqE`kXx_m*Gjc5jUt)ro!x^(1{QR zJVifCB$BFIUN}@m+nVa5@V)dbB-nPN=XB1-oLfq)Td!;%`S7Kaai<%OZ3a6lExov% zk{t{u8~5B63nO@r!6?(OUJSbT@BKW6oI6HMnn_^y!_fRNPA;O@3|6O|BpKca+jw|T zqgE&03n4+c^GQ@BsU#_{jNPQq1fEGFzONJ1WFb~rPI|s2AQ>K=?GIqig`n=EK%dpfhJ+xYB%4*!WzYrZ zmsP9E@#-mwo(wrkabskaPSGT z9Ws=tDw+abi39N`KC)CSOVYzVD`9J zm>3AV3G0^W00Do++7|>|N3d&cZrijSvMtUBv1LNGMkIK2By~^Uv*anx;5EkXYWsh& zhSX55=Xq?C^!Ai~_(JdH16O7H_#fJ*BN}p1f5Gtlf*PH(_?iEmCV`Kau5L-{{B0Qt z0gs1S8oE!l5P3>>ka3=}J(gl0uemiohz2s6eDofVS#cGkGL?McXGaob3%w-qk3D^R zXbGCJa~_d2u2b7e3=*?#KcH*#wZdlV7&ss`Lm04DE)Fii&qe&|J8P{>b{o8wk&D3W zHQ}km7khff!VaUzfceD-we9WSOVZzn&|YAO9xTjbG*qFAn2`nA*l`3Gt}kC?Do^12 zt*kH(Seltd48$z1lk(l-WoQ~R7M*SF=oJJ7q+4;t?x%7dg|ZxN6cG2Gc3cV75|#Ztrxs-!s@Tzc2&!HhgKkyRi(Y2lVE3pKU{qqYZCo3kgnt+P{YkekzVa^6||QNv(v@a@QhuR9<9GV6$gF-6Lk2m=^#U<%9h zI4CO}6OWHn|JNl*J2!nmy>IT>BWY~d!>F%OT_(;|?a6lWKKrtSOwR%6w=GTK?d6;Q z;&-7y#qd9q_5W;RT0}VQz@%A}0`CeqO?&OKOV;N9_7=DIQe+PV17lbP^muJdJOeB) zilNhVkRp}+E<=R@F8-7a$lgil%^Nc1t+pz6uL@M=l;vo%bZBO+8=*cIO_s*uzTuIU z3NZ4LjHGPRJiRQU^uka-ejFKfwg=4f2%Yt}8BY*k)*+3%i=f413MRa5pTryZA@@Zw z?Wc1RN=3b!C@hLX9;hV0(Q~C%aQn?IHABkSx@Y&1Lyv3Hil;{TxM)F3`!>2e7i44t z+t?-o^eCGB?D%&&WJt+U z4j^R=sA57>(%e`*?O+5t+9-h}uDBM+up_o}XqU@S9zcc!Nm$z57AFbVDVWzTuRgHy zg#EmLVnbnBSYw7ormFb9@sj9{Q$S@(g&@7>~EhK1!FSzD-9NY@&5NKQO4I%LkI$MW<)S=dWT z)YE+`P-aSA`T{W~Btbf58R9&nCl& zG*ksfI|OFf3g|+VI2q6m8|P}&g}xB&K#1jm0ZE``Da>wuKzo~km4k^K`zLiXK{1Tu zi^`*jXg~}=C>SOFm!Sjd3;#(U93r|AS54y@I}lj-YN)AgvLjPX8zg&fyy3+1&{?7G zRe?#f8q83pUW*Rw_!#**Nm~<})F4vQ7=Qx8zUOpz*-6dTV}11iiIwNCEz2BjoCJ{u zg+UgAJzO+Wv>)*C;OiJKo3B&9-bAV(|Lz1HN5sGPlm=B1IoqNk8_weIuQuy>FUR6)U;QjoF1QS_ zX$D1hqOKy&{jakvZU@wMXHTM|AzGsCy!0;xBm>ur6~)9~yMBDc^%$rF?{LMO)YPm6 z|2%{pJz~uRdyuHQ*6dkI4&3>vc!(6EUfn0h`|pw3_r?*uVn9B6{`_(Kx%#anG#~^$ z8tq4VwPb`}H!6>iH4N!sW-p3k`NaZ(FA?@Lb26!Ot%t%&szs9Z21v}wHQG_32vQQG zd7XVhHs3yO+VbY5^K}1r`D|X;oVc}ZoPZIG?Phv?F%GkuTs;ZBMV}w2Uzv$Hau^4J z+qtt4S?HaH;q4!nPXn6(ky9yxC@<)>u+NR;yX{)Kf9m4D!v3%^Xf%3`Bf1dPZ$LgV zJ?%;zf++-8Rs^!RI)Kpb(h(9G!aGawe|3LJCEU}zVQ+{MmU#B7$c!9#!qZ~FlZ_91 z63}uyxjf+?IhZx@WTaM@kV}~n&7J0)Do6@ZziTgK~4Y--7nm5ABx*iz7)e8^6pfE`L02_S)B7}*vLr=)!Qr*A}jIKZC*I~Gr8_fY61cG0E-~E1DJV% zmbYiF1S6^Jg0&soF&NE8j0PY4ezaLoLQGYev>6R=m04YAQ4>V=(e)1;mP3Pr9mB(T zmmuPgH0&+T&&*P=@}J5O69&KE*SdWV3Q(XZ%Uf0e^Y7#^$ZA~h-~KuF?&{O*Yf3Y0 zh?Bf{$}q3)I-Q9oA*GDmS=gX>w2<%4!Mfqj1tChw&z-O@C}-CB@8SS2scu*7Etc~FDWMVR*GmA@Qb_dD2;i_?bDG{#WL&XQLI6?Zpb{=(EJ&|M|r>m?n7%U*cPD11z57yoOrY z!+P1?xopRicM(vNQcJK}0%!-n;TAxS#Ho&)X-XA-Y`8hn1Uv4GlGCrIp&gqjw_rMq z6H&0@Q~9iWYz%qSsp&e8)AAK&6kJwS9>mDSn9P&AZ9-ULUi4Mj_|AxgV`Imn{Z6Tm z6{-3Mhlcg2pY%78*(GXh&Wbeqw^`D{f!|sZri8{{P`9f!^#8oT{!;@rdS4mMk`P#M z)K=vGv|Kg={!!B>W5o4Q!hcW0QX$1kr>gV%*U+37}4m1v-@Lm7= zc|)d0C$N|EoOl)MG0X2YGppz0DT{R6amP0cxu+dh855f7IMMh`SjprjKV@CftAT{Z z92Fx@nl^_O3@G!XbG9?jA_kxtt-v7KuqUfd>nYS+AdBPevzFTixCK-5sB5RR1(}jgnqv z4xMInHEkhh@yz;ZLZ#2#*>2(SW{(_J6?K`hO_cSzRMeV(y6XAzxiU=K|4e8>#U-to zN~-Jm!a+X|Y)Ff5SpZP#YGN4#JLg#|++{E=$u`r9PgA z9ep(hq1fPqP-*9SlTE)B_loP+k>G+FdMSk4Y(E^7MQ+;+iUl@lwREsKN8+?e)0Zlz%qSEo7toPv^W0ST@O;8rfiiVmz=srGy8~)OD!pOWe z&oaadEBKl8x5wN4Y0vBH(cl&PYYsh1EbIc}h#Kv%^SVbq6R5D|0h-!t%5(?`lT&c# z^K-jT%=~mA1cthp2Nu%7G$GOe#5j6-x8$`bqTe}99+mBe6GEY8WcF z=0zU(Wm_2`}XgU~Z;p?`PR(9@o>JE!TQd$v4Q$wy7(Lgs*4yF=B;$ zJIR8$Z?XI~v0*}s>zmmzwKzrQj}H*U|v_FPKbO%pcH>{PosvmOYBbu0z6+~`lbKyDPDy6c8=!ObkHYCR^M+5C9B1AmY?683R5rJX@f%dG zPn*%T^7(Ol$pUgxj5Wlg5|p};7-wo8Dc+?Ul#&Sdx)Q67AO^IcJEH5s_*(ZZXe*XD zGO?W(gwP1B>FN}dx7_sHJ)W*Mqpy3va&2$mxyKQ9$cz87%j?a119H9~y4u@m2rn~{ z!c$S>UD5rYcO`GHHVKgi!IrSOt(G`hMlHBGkAgNxLJQ5y$uGPY($Sns*%*oJ$lP?% zgJIY5KD+%-qRiU%a03;!-hcOg`6Cw7{wOZ2ZqTvQRuoaxmeXEzbJDS{5eV-j!fp8m zNtULp#D#$x^Pz8$7P5bT56&3Uw%cOm*t!|9=|;C+@;TtE@I2s$D6tv#E3{mM3eH0r zTQp1wKBZM{*c9L;>HAqh9F`4Q)%o4Ie_d@0&d(D#SG~8hpAX17k5j?+Vzg6#jpn`6 zZ2RaFBS$~}wHEUf_X@$KeALeV;FkAZAnTOQW@waCuEhBp*aDFlAiGm0JL>^o=4C)E zMK{LS>bd8(6})H~4cluBp70O;`O^%eo&FLOBhw`+ea?P%2+s5oYt- zb|4~p(%7n;DdChbv37TNqU)_c%p!@uWyz}2i}85o-*GVgHB{oInv&N)eM~k@8|+UX>74hegWC7jFHtc43YWLNqt}~_)6&5u z#qh`@H&JIk{yYAMw@dC=(?xlXm2Ec00*^m03n2jfUC{_)#@CRfsDsd7Dj)H#GP=g z^>T>{>arAJd~_GDbs7b<)h4%$u_1xQnicNfIX>y_Vz>DBapi5&6u3Jg8~T^!-7mG; z^1o+pzopr}JMaZh4*&UsY5M0nYV+>Tgj=gLqH#G-Tp$&U)U7s&Yjoi2AL`VczMJ|q zOfwjY9nZ}hjNT?9QQgK--}E7IN749lO)R*6NmcWyUhm-*VL577K$z}9;E0Wju3SYb zz_$Hlf;mRxYy0a>k*5YV%eAxHK<~h}hL~@e3nSjypx3aRD1s*R8`twrfZhoU^;JJO z@CuLO>ZDl*UMiVPb%fCaaZ4R5RP?tu?v@NzteDi}DAW>^#Vq8^84n{kUZO3b7{YIt z$R2JKB^RZj)EzMMuNY`TZoc3lAqLo7LK$-05vP9I~Cn<{S@lq!(5b^4PudnRj_#?&>h%1rg@70`bMUFo5zheT)w6$~WPvXH58=+_i z*wk_mAq8UU$#-FWNfRp*E=G!OeH7Dv&nOR&yP~o{6p}y`@-58w9@U8?s8K@J)&e+t zgxsO@8Ny{20>-y3hZZo$FDYw9nVt5<>#$4Y{3CeqOENSBcK4VKmKBFB8%abSvnVQL z2#TcOGWJ@Tk85NmYceB(N_`dcj<4h^LjN@}+@pu|V~7lUrx8q3R!h~mz!sK@#;-^G z#DlayaTZ)SRY_3Qh^b?efn>xMOv#`FLeW}v3gWCHqx*>dKEc$8De`-W8riNC&emv! z+{&Ts!b7%e?hC^OhG5_Vzhg})ET4f3y2Hj=>rfY3@3jd&`Gr^vA#qApq>Gueg_!K) z97IFbPLfE+$@v!c9bnZWR)I*|eQ}TQ0u_%OQO?bzg@+vWvzhPeSkBA^r<~3MiFE#f zpGmqB1grNPE*DwsEAzEy9zW6r+$xp33fSJo1mnR6y6VtDx#RT>|G!d>%-Pk}9_W}8 z!{W6ECCaL@)|%2(J!-{kZL5HkOKRY*DzgwM^Y*T^p{w4^Hp5cadE^Qng37i^L&py0 zd9>TW2SJa9wF|L=Tv87))NTmRSR{g)S!L4v$OErqWwLIB;|7wL98G6)jer{%lIC@# z0|?JMyxe_VExY_%v`q?JDrdEcXgksM)3Mi+WY+BT2$8Ul-G&2`-ua1bL{L%YkvyIf zx0dl3GNQ_V7n@jct;5-}N=*g+mF^Qm>-95~D-2^EhD{IaTK+YX4pxpNpV(0e)XjmwrpRiJ%7{xM2 z7F63`N3DTIxhA0?euCd^Ma3CDn5Ve4S#7hOm40pMIJMUn`*8uV+Qkc6<8^saLs?ym zRzTHAFw67IN8S)dEzm4WSw>z>Emlo0-c@bBrz4%@XSu0jEsC06=-I^4z`OoIG#e}t zmr+gE#s1?M*kIO(8vng7oq0kbTB!UP_N|eoon6iC=jWLElu#o+mGkD}qOXQVDHeHx zKnO^cF1}BRQZZ&I)drJ-@C&h=I50XVp~^!y>FYKm&q3dZq3(QY*vTT#s@nwSn{VF0|)|6RO)x`!*hpRxa1 zP*o^E=7?c;1q!8$vAOqwHbyP(YYIs%aM1=T8O}LB#v^&}AacVY#g+xI{SyIlGT^rT zw!%c?SZB0q53YLgE>lA;YoM2pREI`&#FJR0drX5Q39)Zlz;J95YoFa!0)Ji7+|&$g zOG)F8C6fA-j!h%XJY_?WY;bo%s;@c{)GPvdu^wvjdl!&;05T3z-+|Szd90oLP9EYL z8J3O=o43{2n2gM@4PZ(g#K{|+uI>E_yk6{%e}YO_Q}S9YIy~(-qSFm5Bl2Jh0FF5P z(iNcHQjTvTDOZow!83QuD+%;sEzE>sKRH1+U4Ib5#Dg!OhIQALEM8Gz_(~GY=KQ@)h(s9%_9m3LcKeUwPH<>%tw_34 zv=G?Zob(OUCB(Nt5)-Y~&NwIvN)a1m&QT5=|hgy+4Ul!MTUJN=d_xU*A@9K%(Qh(kZ zw6{_th0;(9MTDECilO|tg-l3zMj2xWv25GLLSq-kD0FbGvlN0IClrc+A)IW8t^1x5 zOK;<#rmW|AX=pvRJ3jsJk3q-MQDN+eO=^*jJ91~XU-(=U}!HvFK zgz!>XIv4IfaR&8JgQz12c9&Be@&~E&HNs6l(d&N-NF*Y)6iqH! z;8^X?OefPof11r&gW22nC}a^`J^iS+aM;DCITfOLr7_-%BoZv&?aSt6D%d1z&mwsm zj+0(e3wSvWRyd_04ETH2;7EV_Ept%fG7+(7> zgfvu4o=J!_W=mk#c}wa3I3GYiJ_Z+jO^V_v6oB$jp}%7GJs4=*9BGqRTRhzFDS>T~ zs8WdLk0m`%Q?ddFE!`&tz!jlEpmMsndvc3Xsqu?59IixduTMNmJY01M&+ti){{a(x z(5Y>QvDCd>l~!U#_f~Q#@W|2V3v)#PRT7#gw84|~%TqA|C*_ejKpXMjS7i@Zy6r}; zTW9y@Rp)hI+y}VadVQ9@a+xu!#Xp=Eqze!3z&jPBggx!uWkMz(t(_%dC-7I-{CZV~ z0$W+IG&wFwv5hn29@2E?|41Ya)BVE9xexT2!VYNT zIoBQIEjlN>3QoycvAY9Qm4zl=%-}lA8hV?UO7{xu6|T+gk)9~i8rtJ?V*ycN})W1A`KF2eN~K7`adlllmZBmBtRe2orTRE6_Y0 zUEz>OP+eqa5kfe%_vs7AR&Ve<9oZLwnhITk?yIp?WG}_bQi+AKY}X$d5?MEdmuq5Q zLJO^=DEZC{RQ8@r>>pPh3|%6>7aW~Bq}hNB>oL}ysWl3k^QypT&)>VSE=ogBaz0H> z8kUf8U3>bNzB0o8QWprHam~xyx{>|d+=fm12RW;tZ%g05rysPchUR{610%)%D8a`` zqMqNs3L*iO2Nq*FazcVFO1#ZE*h@T6;ZU&w(YNYTNg3&+p^HnIM-L8U@V=L4MZmdG z%$!K~=VKqLnSvQRq{NKH$4HrBk=jC}sK zlDzjS&qRNP=znvG*TR6EV_t?H-^G2`(p&W+?l}TrIYSe{0e96%8P3YES^kpqU3x+Q z(FX!bWH!iPW}id4aBy(iC%EUk@_r8rJ!M1?Jom(Bo${~wvF4qA05ce+p=yR>EBI{C zcf-Orb`f%B%LnM;OeH(a#Ww0Nc`!zob}_J69A6?bogYyNe?^TRlMNlV7&V>do>5tX zG}fZ@hh${Lui3z5#8HvH@`XpEKbZ(WmuRKI@9zP~9}SZFv)T+-met;L#n95jRra_- zx9<#CG|{O=@8)BvKN&wVC1x?JOINBTyzjN=ge`)xUG(QOv>qbpoC+z{>r7T<@Q-u# zHcIc<5cgJ#x+n{t($f2_gRX zcS|OzCG?b#=I&)Ja$lc%@1jyp^VUIi9p8u1B65UQS}Wizp>u5jN)N93T}w!|{3J5B zg==J_`su#2n~t1M;;s`jbDrgYOX`0ved##Q$ObmHPQ8A$+(rvXs_fP|ADD$-nN`An z(&H`zC{&TLG8Y>Nmo z)ld69Kv}}Hug{rTw@rb@k{WcTArxjQpvaM)jc?&Sh(c*PhXuYiHySEw2YDTa-OKaqZeZ%SBqS68c(1uGiY} zTLU+}()iM%E`JZ|&Awe2RhX1IVtVWmH6kX#MpxfsVL+An;10FM&k!A&!)7o~;dD>Y zU+)zbpxoGKqfMe^`xwGMr4B96`^fOyy5NXJ-T_=58SO5TbVFUp^p;zbDzCx4geefH7LK~Rt?w9)iX*NzcxEZ zt@I=V==qVc@*Lejudn1ck>4s?G@bTu#dWczdAWq0{*8}XyaHEh;z>vzmdNLVeaJLk z(3U4m38n9;Qfk=5AD0LxxsEb!Rdd(^%nm^yMeU_+&E|@ciBUj8n)dkS|7vO7rX3fw zw4?4@(B3-T1TicO%SV+i*|o~i+WeGb{vW^`8Xc)Z%9+R!r7#c-L(5k@m>*6r$J4;zulOs z`P6HIEo3B!3WZ`EMJ*VSdoQx`4W8)kaB8~r3~~UwC9Kif ztKZw}Cv;maBCl-yzPU=QF|6HP7C3;vkq>tk3!|T!V9e^2D;zp)0Zj8|P(cQ! zK1ekqV%&+3b3T~CI?^=ATfPLGXqbbJhIBebjMWZ|J@WhNVILqDgAv9Z6iBAbH3)~0 z!Hp51rDbfB9hZd_c6{a{|A6l}B10V@L4RV@d}%vZxCE#|5SJ7VuLCQmV|FNl2~hR9 zDBNhyfCR3O5s#eI*~|`22#1a`F|PkcsUC6L&fF*&4bM#l&Q;i3o*Me~#Dyt0dMgg8Z5PjC)0@AJew6mULR#e}g& zKBqgTU7SfDdPu#t=XlUNRQhBF1N_0we}9AUb`0Tq)kN$Un!OH#eBor{)$AfBRfL3* zWs#C$mO>aVSlqi^jr^H~XkP(wr8l=83HEJTdMf=pl~;o5=`s(D6wiz0?Eqoxm5-?7 zjG3fl(do%;Ls1b|TS1~4D0E)sBSoz#k3XCbuo0$79Dn|$V50;{sH>&S{^e-nfhJYm3$09t_ z>yO$wNG0oMgtX=)n$u>`i!^RgNiL4E{lq?+O>}j_<=x5nM<8q8FW2J#^{=4&`!D{| zZO|HU=-+3ZCzq_O@g5gU*lpTH(xfd2R21W$!dqW~ME8(Kgw5{M{R+Bc+c*zKFcV96` z!XGPjTy@$pr20^&bW~QH1^sijV+MWs$qK_c(SndV5H#}1Yuwnig?DID6I1BMa@n@9 zL%A6(pONh_tb3X>NDTn27;b8HkNgyjV{o18^O@g7gXe7P^USOZIC$IFvYCy(=K6e_ zXhD7Gp>3Kd@I9<{hRw}I&GQn;vlan1uB*h$_Yj2LCxQBR!ZPhOuRW;kfqRW?+I7C@ z&9#K`6-&vYkLWZq@*dQo*cXUz zko!Q8-X2h4lAsETwD4*as5I6Nx6|EjeV9@q8tx+9P!j`qr2hS}oXCkWk@OBH`63;72l4H%(B%&2 z+B0x~B*;Z1ud*?#=DLf^L;lw~BHnhIP+}^O+Rvb|kTCXZ0?82+nZY)OK<28PaSyeF zLNH1);Jl0eSQYv)eh4qPMI^yvKia4*GDuS1Q9)c&Q*#B{uz1e?rIUX7MDrPlk097E z|KHn`*Gd>)nwf+#+}5hMqK9wGXo+cnSV|84otbvaAOiBVUwadxE@s7$^~(f%OaP!Q$3; z&~4$%hZDNb5)D`jt3C*Zpl+rPV3CS)uF0;s^r_X~cRC?m@%2-hc`LFpZAxpXiHSS=U1u=H~v?OK%JLm&VH{QeFtZwzykAvQhDaUI9sh1!ZbSSAgL zh>pYm6SobS67HVb1pQNNqOT#16e=G>XegXFlqA-S<*B)NF)@kEXOfFY5!1p(C4fRI zsYqe`+4Dp3Sb4!jqkj81mJ8nptiGuoX8%g~xscM>7Cmw=7CmbZMIC{0c`oyB+0`j^ z%Bs?RgSh?}W)4Oa**63PABN12Ln9|fafF0_5%LT}P*!P|YpZ$fj2{n6q}Hm>8~#o< zR|-ejCD`%nLBM?Z6rk=Xe+LgiCC0E&`7!$CYZ)o~RbSaFD_Bt*hNT+t-XR-5HG>st zk8M^|omIGW)j0b}9O#h(LqFrqeT}VZ7zRUDOM1vZlTSdwuOxH-jrwpG)9L1?dY?oL zS_TbQf0-`sI}Vf*Rosd_+I!mpwAeFV3q0u9_)v|Za=kb?xA`#rG(g5qNvQBe_=!=f z0fEmmIB?!m3v`;et$sqF!saRjOT%v$SdU7h&a+DhQJlvZ@eYyd5Jsoc;Z$?-s~jZyA9_lJxa&hvm4C#%z#`Ry%oVQ%)KG8KU_X) zYhQ<@unzwYqg5#Lb4s7i7Nb%Ip;lo0Zcq*_o%ze6A_eCmh3CaO_lKwtDdA!#q_PQ^ zfhUZI!y1a=(l4xXF0H&Bc8FMiSx|OJQ3Jk$pVIprl3-}$Fc^()OlJ`}EsWHQzw@th z#~tdT(4zU{m+2`6?LxJrGa7TVkxmI#sR{5OD8JhzwYop5?U~|8%^Ieww*I#>`A_F7 zAil){)4DM&@xF&$n@Lkc#U_!Rt5y2-dP@H7ZLr|H10A{rdAMpyVhlNK1T91p6t^4L z{XM5UF`2Vh!1Zqa2PT)I{M&xQvnrDqA=I zvT1b^My1W%vA)$6NeZE_g{KC_Fut7--K24~_oXnv@jHio1Rv92D1lD}`C%X+Cw@g4 z^H*B-?id&bjkG5z0ZS60!Z!9%WU;(RH4ICiW`8o{U1|-~A&PDp^e*}~$EP2vj22Ps zAq#+)twQi;Em2=7gfTP*4s2QY-{PGzC~CF31t??)Lt;rke`jlj^q^F?5Rd#7A4EXS z7#G%j+i>W{njZ|Cn;KCnkdrh3s1JrjLSvK)bog`LQ|PG=he+NMV2lHgRrD~wGWEgx zCQzqZRv%48{1p7#?fo?pc67}}N}MeqJJ6*ds)u&cO>L!g+T3~7W*L^N$8&q0lh+U- zX(G+mQ8?BF8j+c!#0BiNJV`YU**{44=;7Qx1Q2T1S@UO|;5S6+%jfh))<3K(8mScE z2pEGW3yzK$#Q0uUJJ<~*=>!TK|9)u5 zg>|sGsA4yXzlVG+$V}(G=O-la8|l5K>D#-{3@KdJax-=?0N4gR@BHK2Te4bh?jK@T zyBTW~9_^GJ1QtCG=QngO3s+p)Es+?t1TZir&fu)@jojBotD`-g;E}=s5qU8L7Xl_&`E+xkEg&= z-V?6#L$I4fgG{uJ8Wn?JC}{Ms#5FY3I`yj+v9ZtjheIUeZh4LiuBT;gKE7UiKYFyP z)#b1tneQ{Tbs_>md!^KMCWGFrE63GaNB=K}!mdJL^&~(8L%yZ*gnGag%$YwzFR~1a zsF2)0-Jy^@+bYFqt4+H|(+^#z5EBJH?@y7Lgd>u>QAsB3CNtdya^WXH=pfePb>~-|HgKeXq6}&qDD+|hBkD0kmigGM`IJ~T?;W~e_U=bku7N zCf^_w)};A3lgcKf>N$~HP6vy>igSqKSIvFusxHXUJ%fcRg*NzAiZUzVXe-4b+S1Bt zp-O7tb$WpmeSeqwW+H>Ych{kK^M z4SwuWgQHN>gZ&GfL1AA`Ju1Nxpq3CzQyD(i`wJa%f;6KDewV9EAky0A-k)nlY6TmX z6;MQAEH`xK7_orKwwe1)0n?B&Uv?KW4Qme8`0S^t%hy~d3s{k9QjAAL2y#i%is*_k znjNH9wE&XepO>`n`eUF1*n|n61aS{zwpG9B`4ayzyG@yn7AAsclmq^r;jzp5Ww`4j0+3pW;B@Gr9 z^nNq=Jdx+)8QFc~`n-vK_qO+X_|UHmhxt=4`RDOsP|UGieM60gt2hWb7FM7IZ+KV_ zkqM>S!*gNJ?AMU2Hj_bv6#jOIe6Y8Eq5mNRKw zXP_Xlt3AR5zotsXkT5*ff?w&6ZK8;50%g@HTI>_snNBJU7El+EKI6gJ z3vGY?JhF_jNli(wP)c`I);G}8)BT$9fAsERDW#Ik``C`R_dmD)xV|C{Vj5cG-8cC7 z<=^ZMT54jWhs8k7V8DPxkwTfGzRFEuS=y_Kv&a!{=cc8ZIQe8lJ_ z)Y6NxYwa^d0X%Y(GYe5HSaB zyy6D)?^M55TO0N<-1qKrdgB{?Wwvd;jb*hUwc3THFVp9};+JaVi#}P8-v@7e2l3ig zmuKUF>yEqsH50b{;!+B_5@V2t3y=Pshli^>h(hPVtFKbiQ0&-K=xDH)kH^vfPZ%Od zfU9PkYiFN>XQmImZ;9LQcQ4C+NKW_4AXcvVs@;_`t8oX7BZk zl?a=RBco?NO4c@k_YrZ&9M7a~Z*Nl-vZb)j+B3%nU z%z8cmv_(n!5US636Vssr=lU)w=5_%vZsG6_@tt^s<$7tQvlzVEZK!=$qeNib@5xxcrK_x-&oXl4|aY0n;4)X zCIZY~P;@*&FoKxS4>%^C0BoP?r8ZJ}q|Z^v7O(}axP2&&xWGTH@UXRJRiq)Xk}qF) zPi18eSetSi2z+C*3OTCJJvi|CRDUHcEG>-tUvGX7%mxHFwP6MD!-)%u_0#%lFAiS_ zxlyam!)wi}-tc_p+l6lVP@%dKE1eD;*BZ+&THc8_CDKuV`y_ zKqEV1Oe>#|G?m(DT~Xu~0v9GedM24SOt-sq^FU)*WqxWj5OUSdBOF$n6b) z74Q_Nj=S8HO7jx(mZ`MCO$h(I2^8i-|86|6rr8Xl>GpIJizurY5~#}Ir;yy@=6Xzd zYqoB_8`^Q0?0f@aoJ+BtAWc`do+r4;b-W8wu+yWIDDz- z8OLV1vFpU@G*Kha6zCY~3EPDS-ODalF`C^x8OX>4ya#C#aKDX2mHWjjZ?l$%&bK0d zd0oUxIR3%^%h|2f%>N=}o6!$I78W^xBdwV%7dQl*2#gAsBwQNpbI%YMHW*<`VxglN z#XNz3r?LN*&8hGD4NbbWTXAfkM;W^KZTy&~y%$;UMqmQI1hsP#T z&lamntZV-QlM@!33$Y;mTi$>HgPEe)?%GlSZn6++vT5?KX9vH{_wyRBZG>;Txt^*H#ajxjL{h}-zahaGV6cB+1-6S*)tLmyJLqB8f~yoi*LAL=U5cM4PEGmScFJ48+IRrrT<%HIeugMlegsDN)&99=s%G!9VGKL&7vP@)r)(#mXwWRQ>36zq$W z>=D(*{!}bK_4@gvX;GYb@;LSW3C?IjFrHeb0Sr*SX+}s}Rb6DVub1gR6<^qNL#6_L zDPwJjU}%^r|Fz*fPiMFAxeJ92=Okr3GJmW0#l@cX(VJh!(>lDV+qNeratFbY`cJ<8 zmCb(QepiQsgZmG+E*&SYe)4|B7;cZFV&=vQ%#;gwYs@na0Ir`P(!N(L3arwwcZ4DZ z-7}GAI{ETEQbGdKBV&|_T56|PvloV7RV42Qb_zls{WQHP_!7 ziL{07nU|&GCNV#~xY94TwY4j${Y?GCZS0{KI9m!sG^QD;AfWNtFVK_4j2j_w3h-9G;P339-FE0`) zR;6ix?HZe{K#~T1&Yej!wf%B^de_4Sq=U}Y!tX0-- zv4er2jl4BG^&d^E;fc9`KM;%@NHPH3yF{|Mff(jM_TQsWH6i2lSy;GYmV3~!P+lz1Qvd!_&B;KR`MZGG*e^1slm~YO{%g}FBkVQ{8p?Y zS4$PCrb~$Nq8sDN<7EdRh#M5=i>_Cjfa8Ueo{rK*SiIR_odIb6rZruWvO;D+i|m0V zY3DU*alvp}vVY9hEv2i8dF8^1Nr-MU;Ls^O)C@;QQ=4em$rcj5=LgDaK8K% z)R7qXs42T7g&8@OlSl&d(FaHqZeZvMOVq&qOd=KtS@8}RRIABY=37%V$k9T|IEpSN0!&xZsnn-AHJS$4~afevVF;wi^ zg^G4u3q<4~PXD4AkHD~8cgyuF`{^UOzr=py396S!PYJ?aQ% zfFn4}nGng=a>2K-=Nj_5=$xDf;Vcr8+1iThX8C|!Ob+#|z{wT? zwP|rvG5We@APfZoj%n3be|}~RpyRJcA^G7J?-8?*q(}la2#;tSqF&7%ISx1_CswoK z4}eaxC-{f*4~Hi@P3O9v*-7dcB{nte@ICc?se)N*7O~xJGs5ZBwvO zNXjFO$3Dh}Xk5xUjtNJJTSH-`_mjZZr&Bv$KVQE+!er~!_PxFWsf zrp)^H8Noz;f&y`)6`I3%4h^S~Z#p;0 z-u^!TWX4I!K#+Wu9Ep*?n+P1ma?`@)`n$t-P04cSrs_MS)w^nIT z1Y$oE9oaAbbS`Ih(6uV=A0FzaZMGDjaX#{buIa1Y{%}j{`=BIcOidT5@aucr#{^^E z3-231LM#v}j^fWxIK>bjcm=7am88z8NKMrvA*C+#8d>Q1C3r5=d-P-t10u=xec$bz zn!-t>jl%d5b&zb?vSrJbE!$@n7je!-v3=2XeMRxmijFnK0h?3X)fObx_az1StRw{( z$ww_fxy7s>>bd+V+-oGHt!pU7b&YDb*=+6$RXk2=8(O4{`Acz6j(@U=rLjnAf=`1dags(%aL^0+gl>;IuDcrCr}`kBy1P@50~%{eh6u6q>F;(b4U;Qc%~Gkj6+M z>8IGN(DYjHejdM=zFbv0v5jcu|Xo zN{M%3Xu#Sth(dwr=Sa9F5he=Bj7%S9a z0C5hnSAZYIM0YWa+iqlf_6PS}M|C)u_&R}$sZx611OPq(a>f2JP=4e1`Mo1?-p=X% zSTecr1AQAH- z`B(QV0L*D=vt|2Q>nvhW1bZ_qVOgD$6w0aE)sw4TK>IPmNY>JJqiYZ}w66m`i**L63|>2z~A9AE5q`xm?Y z&fccKl|${u6}E=~jgi_(EPMh^ZVEA~72#Nq_wxb9dX@$08HaT;=mAs0fuh{m-3B*8 zeX(Dlhf0YU7&V!T=u1#Tsm}iU;54dqQ547Zx_NlGzsehY&(NbI#6S@YAQOaGt0ZM~$xV5~d=#`#zfQgb6 ztpyY+(Auv1sy!Vg{T92TDDPLR`a&tW8sgkhNlJX(^`g%rSCKpprhqI$?HWRk3(-CzBy%}XyU%(dCwa`Y_i?dL5< zyo-4tW!qSg6!-0VC8;8>>)AfFebmO>H<$L|v67TkCwgdvDR!`-e=$JnK&wKlSs~5g zh#l{un`*|l^P)vBAtrh!zP=ex!jYBKPZ}vAB`5|sgn1WuCLz2Q`kCkVT8#*eJ1~JO z;W&NK!MoBT8${|+nEcl(#Uvc6Fx5Z^0D`Ba<;QXxPD^^>bHd~k2T44P?i+qQbQ{Ys zpfiL~Og8P3WG-4$3fajgV{btO&Lv$`L4+Su1a38JT$Z44$XNVys6 znI?|wvg8TGO*(##KhQXR*D});4Sn4T=nNLp%B(W+>SR&GXZX#%k`#~RUJ43*heD+V z2n4pr332l5YeNER@ZbSRXDj+C84CExFSr0JME9KGF1e7mfQZqf=ydM}zZ-y3B2GpC zliFXh@;X>dO`M$T>|r-twzBJP2~NJ$F3gcmx%Du&Hk z<2kM%O~OLsmRl?&4*Sz|Z3oG`zAQyk)#9*OiTkUKc(*E2kQ9%ERh5&MXJ%3a1t|`i zS1y3QqT>Z!!CN|B((PNioG%Q6q8KD)VB1*06mB~dYb5{1n_C7Aox{PPD712JmdR3i zSR_u?_A&j3GkGus5aM!ns$q*hgV z*woeTVqNdK(TEEe^wJGIu2oWKP!e;YB3rg>*|Pma#m}$7#>l`%;gD;W+4B5Ps|GVd zH$0wBrmpK=se(5}8Vb8lboZg_`hziO0a9(-mr~lQCx)gy5liKWxbiT2D@-RTP9!3d z96Nd3kefrY{{czPk8$FIp~*X8*d9xh=&@IhXbwGp1K;NSZDVsc5FJBT`o8z2i5MWI z^04}B{r~J>K!~Ah%Ny$sAa%n>-|O1}SkSzrIq>puIMQ)=skNyf)RGXQjX0PV;oKM+ zm84LNYElsApizD)Jdnx^Q^|v+ECbj8kg}v|;N_r3#h|F8HqKjP3M7bl%52xHOP>LD zys4>_Vl{yWWAyj?!{Mn7zMPMKhOXS#g~vS?jIi+Y0#Kq%5swlal%#~MFG-E8sHc?V z`LP8Hl7cDc?kG^6>iF!r*0~)JR-9Q9KAS!+`1(H^iRJ#9`=VLtIW_}EJ#W?B`Mbr0>-r@gnh(WJU9O3oc7ZU{)9MTrJe?* zF!QUiuM5JUDM0G%k1-Yh0f_f7cp!6g*Qyf(K&tP$O{1hLisHU3%eOR!w<|L4hjZgM zW}eR^vp-MH;(?Zu#&1FSNWDMow_Vp=(Y3tM#=Pv>{)+PORo8bbqjgQ=ph%g3qCKsV zscFbviYt$KB(8 zN48n7cFqrYg}F>N`sFSLm{;3u&o1!HAmnV>J|-p_<04i)Z`-zPnub-QYFEqM^css^ z8%0OKN>YYFQX&OYgqoQMJ%3{m{hUGsPF^fXsx6ArlD}MCT|LxQwN%?!@;Mn~ob9QU z){X@lS6WKN`(I3$=z6m8k)}-5G+l9O+FGfmb0DdN{cwquae9WO+3xe#q;b$eq25!6 z9CL`I_c_hw7d;HZB1gPWKbV|e8(mWl#{GWW_E)q@@$YdVh8|C^B$e0nY@gIV3MBPs zU)NJ5DgJl&Cp+x_kGFV<5^oHMqh=*35L%DgCc>eY91En~>hv6ZgOrQcA{oA5FcB#Py3o(=}GeWr`_RbAs{l>i|b zII=nRx}^CGEh6hauB0}dUAyAa_H(?_UUZE|b5BR-K~hFUT8}Qc$2l&^jdT+$tT+$? z&Z?rWc#6Jp(EV;BXbMxb_2?g9RL2|UvJ5^%M2blwD6i1;bYE>?H^qf8X;gj@n?zWE! zWU3$qk+|7(w{Fs;I6eqMEdnE8cuZc13uy;LaY1~~Quc$RdecOuNrc-TNM)5ag$F4P zXU#5Kfoo%Kk^p>4Z;*BR9`y$x*vqytHfGc1aoQ-#)XV{#%}4+{}E zIyDqTob95(B#s{eQV{C17^Ub4V!ksIF&$udKm?F-#9B~M24R+M9J=*&t|*Toxh>$`?~A;Z@R91-L~yFr&IG<8&gAE zPZ~f=XW4+ga?}WT^57K^n2c9fU-#qOE5-YN!}wl7MF5u20~q)g#hm;Iqmn-YjOF-y zDdmxl`=Thmrw4DE(m=lu;P3X4;JGX+si8x+p{`{P6IIjC=`86w_#94n5?ATO&jL;jjoWSWu* zi?-X{ix;<^Z8AY7YM~Wteo3`;SK}?a`ae;BijNlv~N8Plmc;{ea>M5VmAb8WX>K#F{$5F{p}6OCcL#?*{^S4W@t^sp|LiUrzcJu1+t*Xi z%9ic(8lxYAv(A<++u3+X3?QkZ>pH6>Mdy{)14xQNQQA< z8gec@^vsD~(!W<+7Z(FaihDdOSNqB9dA3h#AGI;JszwfBxHI*Mm&utzI`Sgh~IRHO(=`W>qf__~DKez$iT zh@TPx3{XZdViEZ zH-G7`ws-VuU0p)Isbqh#0)4ZV{Wn*t|BwHtod#}7rbE%ODnb97|I%7-syVbK_f!1@ zLEf3)_&@p^4Z7ft7a$J!Y$Hl%7-|B74y8aArId9gaSaHC6L6Cb&LRz_69Ue}hIlqE z{}tj3goW3Fv{?ru`6t^}FD*z44Wto#f>3~%8(oaRC76qk&-)}n1Tv{f@$no{*aSzA zq7HG65RU=hv6qX1m(H&y7w1g*GEQ6&(m*HtgczyOOc_#fM54pg-+`FInMC5G#`kc5 zkwj%sMqrXyX9fe?6%J2jJO?c}NQ)24a~j5Ll}a26CHCZl9_Zh9biAdt>pQydjT`H` zD8a$(i+;vz;mmH-@BCe%j#?;dQ|b>eJ@niKWvq5Hxhd~^cIj4ce8zW60i0x zzB=~eRjcu~GvbDhtCNOxYhc|Wta||~THPx;O20OmF`*w|Q8F38n5tVwW74g$X&e9; zt;(sY+W0UGaZBPAAaZRZ`y8geHD;21Di5DPm=oU&(MAr$Or8ONiEjTHMT#GdAHrvx+Rf7DqFT}*|L4|EbGH-l3(iA9|S-gvu&S;M7AG2KR6f5eNMTY zq{MT(-Olb;N+BJ-zUmv{9|7d1C<;k`E9to_7!=$Mz?u3AE;lo$WC{y;L6n~-ZHEc7 zTf~+g|KzxwYQ!HZsA79%*ho_&0(EJ_jFg%;Vpqr0Y5TY@3@D&{UOs|^NBSKe%Cfq= zy1Ksjul}3=;oyq1@uB~%f8$@z@%+zZfB*mbBM}NVp?Uf4?s5J7_irzDyWM5ewAX## z-x{M|y?b}}^6_zZb2^=_x~|{y#=C7CMcxn-A}(AZOf(0gc|GQp80DClro9P%Zv<0K z`w=nD&vDA^4p9aTQJ#vBNXt`(EH!UnvhthcjZD4`&@=NR&0|e-+VJ5P$S=-Cr#JD7 z{)XZlaaFV^!K9WNh_PHH#0^#&|3%mLt5ef#cf0-dn>Tko`9AbuLB1_{6Q->6zyBZp z4nJ2$b)i26%42 z|NhO(@4kQg?csR5ZQJg;*M@yp#f|Z9Uu2+W#Aq(1Rhxxt*T1jNYuDEN9Wqx`{Tt)05`-r^yAXs zczxk@1>G_7hfc4ZzbtSjVf(NU`QuYqoJQvGM5_D*<^?TS-ulM6X`1zZcW4V$H1xSQ z3|OSGS1T#YdR42pZ{Ny=fsMDfw|PK)Y%2f$fsu^1fAP=#tEMc=X&$rFy! zYXKc47+f0^SG~d8Q`>*@5C8E0-Tm(G{gAxuM?;ol`8Nre`aPCn*JnX;K8eh@zg4Xt{zSkSdOKU@GJl&`4 zI?sEM=!L@~_l2;yy`LzM!epP!`(QjXz3``4DASEe{#w%ID=v@G?%#X*&b+2Ktb9%X z?kHc^uV25Mf}`+rtG{H+mhD6QrFegyku6)cFV24d@BE*fKNZtsdwIF7?jF`{*RMpc zDONJ2HfWWjj3xZ>q%bq4gIO5s3D=W?>=@IF^GPC1Qp1mSP%x`+;N*n@S%OO-;`aqs z#Kn%j`-zStJ#<$V#XZIBk6?2hXy`E=#YyIuzx?)B$*Zz`#rgGL_>2ERjJIHr6xYrk zX{@a%?q1W=PTX@ps3IVy`7bVJ=isW&F(HkZ3N%DHHi{1{_e_7Xv z!KlUpXQTE(*a2dn5W5G6a1h{HdfnzKLMVp?BScl(aIfXcVjqfrUn7zycju7z8!2#m24gp&`Z(wkM^=Al=59whsV4HC%=& zHO4?DKL-ZUx^UZW;mL3qNbovig%Hz+;WklWTTC87=J?)=CibtRO~dPmn@Oa|0(^e6R$AoOF2!5#ZW!qO<7DrweRUka$Am zDnub8f?HaUlo^%mq*Iat0VB8+Bma`hWR4)O=#z&SFsNJs%r85HZKok+8bm?+0U!m( z3dFJGGapJlR)xB&lw_d(9r=QL21n7sYH{yiP)9tnnpEU;@?6iBEnBv1**=;1jRv1q zieH7|He0q2$q$5hrb_0k6{j+`F6NsHy_)yBEL5qatR=wO0WFMlF}2TtG07_?^}^|o zb1v?mmL0(>Tcjlrn7qKOEQ7Foad6sjh%sViC^j)js_)9CQ!90{%~`^rVG!A4Q527M zGg20h1)W78HR+=>iaCp8wx7rzc6$#rh&R~F!|}Aazkj&6ySuw$)s?nwZ~MM~wcG6( zBz1i}HWyvjuZ%Y3sKXRDAI79Vp5Y=P^AKkUi&8CtNoE`rUPP$7Coh9h6%Zhd{wFuN zt&+N7(4#73S18pfC3`v!++0Y1YiK@qG?sgAKomha;Qa-_XSV)-6-y8L&F+ccgQ*|H zsFV|$vt$3MEn9D2pH9uiqdKq}kD!0kP?DfjAXZTGUP3b~og=uJ?z-%e(vgUmQ=(tFG5qbluiW+C;8P z4mI)tMU>Rc#l-VWm7wAe7y+hcK&dC^+Qb*a9CZ>Z?l@b=)A%6N+Ie-faPVj^kO2)z52n44ePAjPm1xRQ?xcs{h zhwz~Er63KL*2TC7ZQebWfQM7{6F5H_7ou>4B^1^q zj~!Mhx}fjBVFH1&EUJ=%L^pq@>uJtEe*)XA*SL_!JTkbAE++las?dP!4L3cNDG8+; zl`uuf#r1@R_zqw~2od?y#1GD(^1}B)j*s;uc$3$eCOPx)50`)}5Oue(zhqSW${<{7 zBW~MHAKu+R^nd>!{NFW=UnYQfq%r)s-EQQ1z0wQuU>?7pZvD5n-`(98-+lLH-F7X5 zoVTpvNekEZbZWmf249mOx;0u{_1diaUR!lAe>fF181YRp*&#;ziB36kqyq5-q7YGx z!jv%iL%~sj`CUvkq8EB4CtQikw}ZkZ193U{?25u2oXZR#sRIN16w6yzmFCChahfgL z7m~Ci=2@IA+gE6J_m94Q%RN?=KW{b{7xikrIi8vobbW=Cf_i|ILC64rL@0_e$(j)&(rK8hOhs=LSG`8iL;AF!PT-nzAgG@> zN)z`KX<^T!iWNF=6sC(g0E$#FJNjI4Q6k!Dg_+Yx>$8b}nK7^~K}g~U&ii^X2__c6 zQ3>N){7oWN4EI3Ws^ESS148uxHT}*PrWe>XLXiim$(3IA29Lcq6d;&mr}5a*K`*n{ z1`ZTK9LTTj>6$Tv)WJD-e+D;>+9$qbwx6eZI8~AoZoV^faiIiMtRl6NVneg%q5!Nq zgC&iHlFrzpWzLduvkr_6`8d1U>8yZ}@he#~VczV&tgsf%Ki^L8fl}62oI3`96ymZT zvxwPYlLF)%76cf>941A0v4mHQqmM{wMa-+2v(^m}w%Y+y6a`Fbln|k@VsI9m3E44H zz6qbwDeO2|m!v_(F`SN@7YXMX2AoHrSetCz;_MbW0#0;bDG!huiptSuZ@dzT5gLcm zf~2sg^{6Wp_GQ6m*q4feOo{jOJO)j?K?X?;w|CAzjY?A4BYX}nX?DA$V@(HBv9sLj z1zo6ZXW-)4<3-b&i;j=Jzog{#qDQ!N#}xylD3zcyY z{ln)-!CQeX8AJ?_Pt-kQM)1^+pm#apKx08tQnU^nwOb_9wU0KL?^tX44 z=C%Y2j$#nxBfSkbR*3wbjqTefW3vSs_xBDe)5shsO)`_%b?cu(Q%v$ySLY%cUf zHMY5szQn35N@mGtfD|+5cQWEH7@D&%PJ=@g0hWlJ#F<*h{D%aHbaP-qZ2%YYh3Q?3 z#5S=i1_mZa%NnO;r*$dYmO){>bt8Mz1N{b9Rb5@tqps@p`bw(e%GDJ;NWd48{pLuv zpTXL;i$PMA%h{{0>o`H&?cpU^u6;$<=Oy{AmkonGwZ5juZ45F~edkhfGwqQys ze_0Z>Ocvc#tndM&lGG5RMPcwb{%~nBz@%{nW&o5~!~SHi?QAU>o`^SbX-2}C=soZm zBU+FY#}PIAMx~C(=y{L~l9KB}sTIYC)#b&7o4ez0_GPX?pKBho-A56ss_v!L=p4l@JXT2mQndc8sAHWFX(gg~kg^h8slV8a{N$ct(5? zHHS^6o7?tO^f}fvD6eR(c-d>yAC8TX_YW(nqILYn7;{6u=7z@TH)zCbW5f$u+nM-z)Ay#d`3WKeq3z+b0i0g^I7Cd= za~#Q6VX}sX?Kzz1@@KyWp6NaeXj5F6KJNuexe?3iQ4Ef%s*-Y;@>J7VDYbKX%!zz& zTUI5LJxl>od5mVu_PlcRh@-)4a%any?Q1fMg*dL^x-EmF*8ZsK)n!@LTUC@RkZnbA zM==7V!g=VYM4Wah&)iMiRqcFEnhqwcEjZ=}an6co>mq4x>H+MY%qYH%uK(NT%7LK6 z){#T|_`nO@8^l%{ed>A!Np(9)W*#Uu+F9T@gQO_AsjAA{++08FJ*jLzpRLzxkGE(< zARDQRqS(+Nx)f+mG;Z5bNos7dLzlYZj1e)}tpH4GlMy)3&&p)#30$riLfXG4CS(uu zeZVutpk%s^`#ytTF-F?Fo>B~E15yf^T%{_MsH9R>N$UFQdhLFzUId&9=K5l`A8H>9 zlJd!~-+Z=(T0;Z3lw!ZF@IisN?+Uo+jabtvQk)vmHm&GedRB)*bw<<}0d@jV3L{`a zG^uAhmVpJxtII)(5DH7Luhd5h5EJNt7#i;#KnmrcB87vLl9yf|8~Ovy(9j?pK_=(U z#C={^Dm_eeB47?yfXMfxt_deS4>&1EPYNXnXaDAz3}HsBDF)~?Ga5?Z4WF8>G4@)> zwiERU#fI){jlqTk1v`CRtmlhMz;h>ZV`h!ral@+l`4#-Ibp_|np|g?FuTFT zRm12#7ZM_f;9hQ>5Tp8E@?K-N!Jsl@m86iBq!=JoD$!O2wk015_ezS#f|aBI-?5Su z9pBNb-nk&u9~mU|d08A7cqOSzI&R!?%>XEhFD_5LxNJIe+49lr3oT%)5jJ$*(5mjw zlzqrT3qyx23=Nm5k`#_Wsc0RI2@fHx<(L2*qoythz(j;Iz|QGCFj){2V4)BR-RpvH zoU+72{JF#wC^e4@Ozd+3k{#~=T9g(^ja)H6|2vq43l_7NVpI~54j=_1Knmdu)6BT- zf)*SF_RK<;gOXGUaI6cp+m!0zqEZi(HNBHkzEw)SEed#BN_b0;eM1M+n;-4k2I4?o zqyxxvJX^MG*|KH(F@m;XA7TcQ%9icN$Pd!;*8KM8_F0q9b$Q904|63c${^~hDmO|= zX3TE}B`Mx37=rmRu0|2|O6Jtivvhz2cKe&d;c(lw?JGKNnxQt>GAs#3`?hOuG)sac+=J_Uxw z@$R7(IWngQ&JRCgJzEm^wpq7#0FmMW#?#H^{ZeVAioPt%wyvvYRaeKdDi2^uI-mgw zomHArUnoclk_`R}TW)Z1*@5`KNwfE)PK5Iz>-UsdNvdZc09REEs$QE_({}6H0i^hm z-F(){6*8Yzi8X;W!H6A zUDsdgUSHAmSR;d^a1>(*jLP`_R_Am*jVk90x2I1$2gM7ca?%>@un8Eg!Q<;k<2EHy z^Z^G1z*H3Ik0T#k)f0sUNok{*ahX9ZN3*4?d9A z6O?X?E5)T_)JPVX`o-=0#h`E@{)7oHSTs8g8p*K%#fB?x zERamPsx>`FUeFr4Da(STZM_h_3`#QD%v+6Udf^t6EsmI&=&nybK59(ysz@!a@B9fTEeL9vn) z-OoP~v(byT>q`1<)=H|XD0|-G&{fr)wAPPqEi%zK&SUtKm8OQ<^gFY1=bEm=CEdOy zAM*{xU9afcd_$N0l~t0WeA753DF$nzfUyxMCQ}`KR-(Fx0UQ-66^uhZQBaF>4lyAh zjCmtcRmiCmjg|No6)WRu5$$4z{X!gDs95Q?C{$Zj<%#0YBdy_wq9}IscWwwi0h2u_ z)kjKFLS&#+wrtPIW(h5(PAFTpY+s3Wy^fWn7<|B+6Wg+^F6h`OMY$HpilPJtNyz|? z5`d|4#Jt!7IF)8i_XQx1qbBu>aX&(MCYDLbWUSXFIps9;^R?R)Y!#>3=vpfsK#IQI zpweuEu|Y{n>xOcfW7l($C!Tx`$PnX+Yj(rD~>kd!AIVZynh zD2pwP!vhVclU0&JqeGDpg2FpY72F`I({V0pLhaA*9b(7>;ypwFmte zQ;&r-B*{g@G}?esk6s!llSLmHjQ!}clprY=6SV?W*Hyh*(*zVlZ)l7!$VyV#ez5&e zB`JTlQunKV_r8-Be8X*2u9zbx7VBff z=2BQe$3mExFk69Jj3@QBc-ugDAcuWe43r8CEMc3&H~Gp4#Npe3WK<9dpC>4~oP|J~ z!7$+rb}(*xz?sV2xsAJp&TAi9j0!UtG1`F(bcSGr3wpIip@fE-zIF$ioHk%@S>kXQ z17bMyzwhal-_g?H-))#OVtR0Cvd=L3)H5zD`ZSK9YXnn-<`faJ_Xajr z;kCfHs7s-gl7c}}eW{Q^Qm0jkO`&ivK|Ipm?pa-i61z9FUVT?6+howYvF}mn8GZhF z*i^F3D@id(>IEGy>3Bug@~WqUe9~2;@$%H0D+WqwUccxt;LgKQ(O{8AF!dQqiW8Uu zO-^(B0F+9o3q>k?gd**#!kOM%%r?F7*V*!q6DW6>z7bd(aa554kt#{W;8ZbP{-cdR z6w}*cRCo#%ikNrSt4P5Jl!`MS7$kqknVL#qW7v8D40=;87EI&6Stt-INzu~7Xk6%N zv1uzQkE|fIt%`@6O?6in@-6wIHvsZ`Ddihg!1uMpH%i$glPS)5jx!T~S7wk@wrtt5 zWy|)XMQ=k0(dXPg+fR`vh|jo|^c0v&+}_+QH5XP=$hxYki$bY`1ZWu~<{=ffr&z43YfEI4hx?c@tbLN)Q1r zN6N%dt}we@Os+HBT`x~{BRIlV+fk7+4S)f14^VpZ&!+84dv4>gWJ z+!yvxLJPOY;~`FIrlJ~a0+?c86#2Yfv+71cbE59MzHgf1qF-zK92!r8?vI+-M%5Hw zvsc**efYXNx!?OPuYy<81J}LQmu=U*Xqx7mi)mMT$9z5Wst8Uyy$iF|FB*6p9YP()GZC5r;y8y57d-9Ky>9~io6Ug*mXlZoa+;gR z4T*7=(xRf2OUMV1RMAycbz*?j<;BIWs!CeN;UGAV5ztX?)GH;Yc}yOs*|I&?vezXy7tht4aY+dH#gUN z39_R&sHb?T-)z=qy* zv03Z`M1&%|?}k=UZLHt9%o7qYA}&`<7g7=kt;ykIodHS-m!rE(C`nNGbJ=MYx-6?zT~%vWZ}1;QPx9w%**?mCC`ii0Q{lTu(O=cDzpTU~4vQ%x z`KnVV)SsYLYbkUu>Q0JHD?lgJ%5fJSCpZM05ulDc4qo^Waj`()&jUcpHRMK}#*l1G z4Z=oTm>957W^^9I!=uJ$80yoQFa!DyAUOdhz+C0#OqJ#XQk7q!26X;ql8u%aO&t|0 z>DS;wvqS^6SIN~pmy%?I<*=beIQTgVNgA2Zv;8Z- zt~^M}OFY`^^WXoy_NWjxHK>bH;uT8qKv!(n8oaRM1hwjEo_By6AXR7%P4gqdF@f8p z10Za;_@Z_SkiuEy4N0dCjJQS{^col%`HcriMTd4obKt>S!#JLAXTnEK+)v(j59j*LHm0meNmu1y>bmxX3H~| zz%g$*JnWF(a4(B=N>Y_FZC$`=UE#4%$bi2`dd@u^?`Sc4b8O+efBDyo$9R?b0ggA@ zr#Dz$*%Ocyt4MuA$1mt&T{BRM687uP;B}|*s_D%YIix}h%idSeZF}Z*4b$DV5Gzbs z4912mmt@E6(c=!8akIokij(hO!(&CZq+tPI9@<0<@i0}LM5lF~&WSO9K@>AqiJ*!Z zftdn)dWxzNUP03zaRdO#X1s|w9x(uO=)AK z(4u63lr|b=FGZ)ij^6yLEX%828&>VO06& z_x(lFwpXW9b8|c%Z|Qi|_xjQb1T$mcaOQMYinuO_OLSnK*awm^zO)k!3vj9A@A3Wv zZx})lI5Tce1@K^~Q2_+O3dEQsO|p`dDOA~)Ro$*u>(h3-J#IEDuY49j0x1LgGc^2x ze|zrBPN3Y2#~FygQVU~~he()eF{yRreUUAJx5y>8p~*P5pNGhN@m;)ZEH&=_S{ zVFHb( zF>Dtyk)koZ?se05-ANd;|Fzez_s;KiF3!nHQtz=ss@RzZpp+5=K*}jeNe__nU^NWw zXrb{7;=Z$FIz%d9?lCAL9?fW61)?i>&rC~4VGPupV`BozSH#g3)TB5rP~;oQ(e)M} zMPp0ToRmtc3+_py&%3J1l9k_z$Ok%R)7+f)Q`zn9i&)Xs=P{4WK7Ty5teUhcilSxk z#UL{soDEC|#Ef_j0|43i(L~X6OffLtP6$A$GjRz_oH3rcmE=T|AvOBvggQI!^|YRm zH`&*2%ci0=W23dcp>e$7?h1O`snF_?av&bh}Bc?#9L`f>BIO0Xvj0T_FMg^?I~6m*DnDWZTKCLnT*dc)@`Qlb75=M4am#9J78Rnc~YyCzl`R5TbQ)oR^0 zP1_s~$K&quYWwid{?>0EDGoH!5?@I}F_T%_B7mBxoGkuKXaQ1^`jc0FdtK!9+<7d4D6+hZv9~qm0vL1+XCfFdfa~ybSXWC!~ww z2}P-ykhAN#ba_1GXvVeo8XuFCWoat9rveL-Dpo~VdL=1ee<)lHB3rf}WFG~R`WJp< zS;FW)`5$>!+9a7b{vO?;suTlffFTM7=(a2Xi&7dcV6t?Yq!Fw%&>{F?pu7-Bn4Iad z7AAO95WqB?A3sz?e~LjCLWo4^C9eL4Zd`!@G!Z})lfqs63b@oz{7l!v*)s0GlxzNG z|I2j{3nnuxRPNLx+;4BN(i+xnFD?#^XzwIyIUrXgMHR*W&)%EH+O}-xK{cvov%1}! z?%w-4ch89(gF^&~NR%H)5duZB5`o|kKM_LwAVMId5Fy1*r^rep5y?cLL?((NOGtr; zJ_%p~g2XtDO=4{4+2J*Qw>$To-K=hQRf92VR?S&8*Ic_f_ndR@es9)Mow@ev*PK`oBQP2~E zO;i}$9l|3ik;A{oYM?MQ&^Q`{+&Ky3?Ky^?Rs=LLLZXN^Mr3yzLyM1HNv0ikP#S~{ z^(*?Dm7-WZ>Ny=hq9?zwU@0N?^jP!Tzb8d8F<`yfgd5^$wEExw`g3(zHmWj4X)ZS4 z;t+xA$N#UdCm-)ka#>=ssZFwLFex-9Us$f5oaFh_d6GO$rF@!DR{-ameQ~lc>yxr> zmbEtXnhQ)2Q@Z}jK4<;C?I0iHNIueGUiC69gJjBieQt`Y{kGY=(+$LJXDtMDT1Qyc(zX144Z&4E?kaYZ@rn zS>_7=DJVj{IR6z`-;-HV8^s-EqoimuC3dNj8zo^){fe%~g_Q7uZeP%AuIW8CG~f4$ zgld&w^UpqwSVf~`$<(GcwW&>QYE%1kZl36~DlQB})Rr1YPCoV2K5g7^Tcn9cj-Qvg zaig=TlB5lI`;8IKsVZ_(Gh@t_&c%-2rXXLqRI+6({_Lt91psfcB^He}-S8>bl9xvRdwT+p~*{i~Tpg z@r|acDo#o+$oHSqImwemrSo}~{O3RYmomDa!anK}OrjFKS}paz`ul$DjVeSRizkkZ z<9ol)YnVH)l4%;9OMmUZ{Tm=!{+g)&`q#deUtV3E(6u^Y1s0<%2F8% zcDvnUx^Da!gAWqBC(wfYY$%CcCASEQ27|mkjHw*=gK7?A#o;s^wE0`VdmY5uH|G^xn`>!z;llUP*{q!fvinB|%H zqksI*q^#5-rPB0VozG|b=@;K?{)ONFd(GII`IXOT{JjplcuyZetk)aywSV#rxmj-{ zolj^QEwwiCdc96Q{P3gYZogmFb-mPDFUdYFudc2ieel7DU!d1LD~pN?+E=XbWmr{$ zK~XKZQ#x=bj02)>sFAth91LyNz%&ik%+L~sWR3%nsQ*3?kW%8|X~SWf9|xdPY{#AC z!k|G{ifmqz6e)EYrzJhTq;p1NR(TS>MtF89fm3X7lV7M*2#)f5q799S1{i2!alFVK zX}q!$UCf3Z0yf3rv0p2Ly%!vgi$hS=c<|RV0%K6f-+jigf^P7UUU|Rs_~=;P{Q$Y`+~BbaEI-8Y z4j|Jb7be`AM7YbAr`v@=PZs0I09d_zbi9xTMAUG||mt@|U-$P5BGI`2RFN z@yEWglpss80Y#do*jZTZQToy$!uc|uNO+#zF zQ6kz*!;V-G+CbnCC^vHcrIb#YR>n5i7AZQyMI2C*j$J= z{cB|y@T3x{rE0-XbYDraOr=_)hnnMXqfL|NnFWe-XW|m_mCUUU*wm(WbKb|3jqqN9 zWhbYYaYwzN*oE(_oQiAH63fXpm~Poe>~Qw+E)JBLG0?N{`5_qDTe&IMaeMyu)rUO} zL@@?p=1fz;`GgsL>xI_lJ9fntn;qM>ZQHgwwr$&XI_UJR@1FaeasNTp7<=z` zt@+OROg8WwlLZqAL;1_tLK+%Qh@xa0vvh2&W%8fV*TX{g2oO0CcsM-9_3+GM>ToeZ zfNe+~6S#Ob`5S_wOdmU*zLa|r#hJ^LG9&Sju-%TpyPl)pGA}LS^CR24I8u+tD+Em) zBg47e;pe}#5KY%*&c3?lJ(ZSEimRXAYB>~|!Ba14fy0EfWBH*I-(gn%z)H^YaJO*p zqAaaqMo=qjF!cGrCK{Qp!HnB+bm)#FO?zcGC|b=XpQrK-p(op4p?{!#Y@YYV9^ilmjQJSG1Gh?L{hL>(NhK8qIaM`Cs4h4=Z`BMzzG3+yB&8FU^6lv6K zGCqUf6H5e=5Q}+eUN+yA_ooN3co%~n(s9YV9dbD2*t~tQD2Jb?1TT8|IB%;hI+)6j zUww18_(28(LN-h$dM}PErJIrG9#5WksUh8f0?bHcOtLC5QWvEXj8-I&q|{YPdY_oG zZ~oF-eA-jI$|7aS0FKa6hi(sb$@@Rk4)hiX$aV2NxMcLGi96)batj-4h}gG9S84VQaaBmxj_Up2FOc|tT{JI?E*;3;ztNLBg8ywDCx^)Wy= zhlB{L!3KTqtZ733WXy^LlamDINOH?{C>Dt?J2N`yl1mOnsEz);5oKy5TXIoTif-Gy zo&|GPhAUXRRZb}v&!hyE^frP@a`#Dd+LBa|5KQb^)$2HL?m1)3yuluAu08^Zi_Wl$ z{9|bsDr--e_4Ts#_4V^^qYkQTD<3;7W>zn!ld3MO+#-GM!@mRyF2@P4Ddw+#a@+(D*785@$D(HREb_2yBCuN?Yq znyGrzr#O_uyi1C5mRhLaHjFrcShCM;W% z9LK>Fi$5X@^9xEy3vx{>3{E!Ke;491Dm%2N4bPPKZfYyfu3jSR=-2(yRMRP+ zp~r|&O`7BD?dah1xqqO-h0IA6_fT7XKKJeQbN2B0dt=+?1-P8`SUjVu8P+lRuI*n2 zEg@_*Dnf0HhtG_6j3f$koHe?6-+;3d=JTM?e$2k7fCyHGR`m!iEk)MBKST1z(cSaM z9&#(Ra3Uh7bK)Ast8QVUPiCojBBeDX13|Bz@C2lf*2r*0=>AA5CGlN!zSDXc< zMk!#&sYqiI#J>L#uVO!Jlz%&2Ky<+RTF%TBB~hbq1}+f*WEEG}|GZuA+z*#ruG#ky zvrwguu_yGZ@ynj0oH|Fw0aju@K+leIdp~@>A+~zxwK&|`a=%8?>;(UVwHpIfuXqpa z`!gcD+6iSEw;hv>e|6 zEE=o{`Mk|Hfgkx;Fw*|HVC2zBq#N&-KuBZJR_NW8CzKE>lH`ZTgY>kf^()YTB3mEC z8kGf|K|yb2>5Kp7WE5f@=CoOr<@C^q&7Bw*SZ7Od>6q%PY}z~@08 zgc8#M3@S0}OD{zoE=nKJa0+eJ#!!g%Aa^qn)u0NlgdmcE(9dK6_hGz>L)uc|U8JW7 z<-myogm0xyZt4Z4&8}W)Dee{*v6qT^XCFanB~-r~AiK zEl^rZL>7LeqT9$Thm=kbkNf6kqmi~S0f?94Kh9R+RA65Uwm2aqfyra{`eJk37`c=a z3hu)b5HyTBfDGG5n-LOLxuGioKCI}vT{VHn%?k-lhhF>7J>Co6rQ&nb=X6SHHtBFi z8tNB1q*PQl4!~-Rfl1bMDp^>QJG}s%7yl__#l5hF*N$LtF9i8HN232itkpPgY3FzC z7H$R@FmIn!3OiFX5L;J)X+e|X#|$lSklvwX-y?}M9se0y|0Px*-T{pI?h#ZoVQcxC zuB?tE1SHu4rR!_8?US$PAWIlYD^>GED2h&v75C!yLyyU#_rTwk<}${EbaOXfhGJpJ z|5Ttsb9ajHFBz5V1O=^$bRhGbZ?)pkE|c(k7Ik;!st5KvsBtzg!K|3X>ES_sTkyFB z#qAOIP>jB75+e`MHeD?3Xb{@!1zS@&xgGUzY$XsG45@lyO%~3}u(Vqfv=pBiWGK-n zfU>C1UFBK4LB*Cfj*)l^FudkMsId@c1Yoc`f+PQ8q`0HNNQuBoLkE9p+_A91=R-mc zkcCfF9U_zaq8Vf2!?@s~`Q_Ob@vX)82RV!1>I)+!U-&B(wvGCR2Hf2w;9rdlE1S9J z8lLTXeBb_ZKux(YP)&?tISaxP$^lcuH^>0T0$~p^e#>L&NW)iSj5@tT^T&ZJZ{ko= z0WuF;p!zKI6#n~wYX!ya+mwAf9_x#-2LQ*LzvQo7M$tw`4o~rs{GL9>TJctBUS)1e z*)QFo{rQi1slxdiUhv@i|h@%6+G zPp+pQZbC_loK&Rc2|@%RxfF?reEN7 zi$;HG@~}i<({ri4jZXRTe6*_a>qayIe+pb zs$Z1Jn_~|9)6!2-y){f>wOIe7*PO<~>L)advBk*LHu=IBYeF)8%qUr1$`t3b)|r_c!1va)zoCo4q+cDZeA34 z0S;l}tM`*yS&Z0gBIFk?6Z~?P9or0_}Vr1grcF!AY92RW8TbE(^P|r7%UD&-nE)u7W1~UxA@lmlol+eWr z=yaB%%7hoBW@+k{nNc=&Ec{;aKV~gSMJ^m!*w-0 zUq8o@LA=n_Z%nPp7KwNB5XFq^LG`~qZ1RhO9=mK=j&cb%<`v2`Al=~RJo7O}N;{n( z_oc3!u0D7_kZ?4tnk}1OnQQnqMCd#cX;llAq8&{JqI51z#kroyr5Dq#7*5}!@0ClM zz;{T3Tfs@j+DTOVie&~#Uk8ANk9<$UB%@3Ut<@c;e|RxO?=^p+T&$bGR^~lO#L=cX0MH<4Mdrh7Gi4jN5yA}$CEMr#2vQr zv9v2TVVvEb-?dW6Z?d0Iz=6jcy1O^3s5BGNsar`?OU-vT=MxXl($|ay9)13&jBnqUcLk(3Z2fIcrzq36#|=KJC}& z_jXnNA{MGFtMoF^ONRSZ>=|l~*2Y68DsgsrHJdlm`E8phNVoxh$d#9fyC%+j>)3L+ zNP|o^l4Oui@#DVSLoJcM8U0csDs4%y7`8V>3Ng_LY&bOccLER#p`$Q>F5Kg>h_j0y zCXw$xhiL>~H-X*ygKj;DU5kAN4~z==Brrn<`~ilPLV=f=Fh^Z3X}OyGS2MKqISwWC z8H?-MA{xqz@4w1&W zSJywc2CBDoY zt)`tY`=u&CKWV9|x-4CkmBLz0`3ux91+uoL-t+pa40SPi7IGjh=iP&T0|{-&U%@Nf zn$+m&(dtGDv%wpmzh*{xkFRfcOek#;v~sFBkDZG&sK_6yKr_Pk9jTl&h1JABgH7-gG37@TdcLg_X+h$*e zzcf|V;;K4$EeM#hTM^v5=<-@xC+W6$IO*&)V!ej0sxrNL0}_xad`wpjSDV> z5PEFnX9b1BaPS;P?gw)LHV1W+GMymwBY>TUUS^SPb}`ZRcIOJjiPYtQ9ROv%l z=`{lz4lFPO7bUOpCAPa)I&1w??P!UZC}RAIIwq(gO*eZ+?CPSrdVoe+VteNhDzGQ4 zI%DfL?li_vzqRhRITEy_-1T6AyCiT%?0)av$L3(f~gB zY^ARU<2TrM7gH^AjX_-g6jLb?GhL0QW0$@pGS9aFI6+W}Q1w?XV}y~M1y;9<-%lml z)H z&CTMa#2|4QLHgxC%6Pwp3XxcKm9>_zy0>UdG0)7%i^eY%hyVDmX5c2YY28LbiM?W) zlcwsS#jO_NB=My<;vY>~LKZvM-LBP>C0elqea_Yi%X%5m8#7A0&2ob*jgxQOfRMr; zR7s<5&Ez2(;C&;;3W7{FG1fHxtVvF$8qQ1sR__f0fcb5k-yiWb$5GFuGX!TnS}S~J z2!ozD%TfHKcUN+7!h@>JvA0!~Dbr5Uhv|C0+(SJAw_peNsN2(B&_3?*Z z@vvA1SFyblv{eC!O~8ICc~w!Sut-G;WMT&>@VIVXyTzP|d$k%tNXuO-H?Kk@0#w2_ z5V1=z0;c89=vCK_c+))!rxv7imN87bTI>|1W70VwfVq<_j9bK%7M;j`5`RLMeCA92 zBSih2wIo-B#vqqN&*D%x47Wms?sNxq6`%IXg?7OUAd!5`ILnAaNGO*BOUORA=R`-& zelo;V6Huu@ra$(O@YkLvB<_?_M(H+ck>*%_aCd+FS`*(~r+XZ~GIaE^w0k|+bj`F% zVo+xPz}>#YC5~w(FAFIZ2h>5OvtyM$0DxcLSrobMBLv{`Cvx$*WX}wH>5Ni>KfR6p zqtzoJV3R@BskgalXgLeXxWVi_p28h*33&AsMt`13auEq}_+kPwCED2TP;kGYdgCGG zq%^#a9zw#481HO<3)0x^bum!?@L5734H>AA9=CN9`s3WFjN6iyeTC=|WA#3krutog zGV5tt940TkRnZ^2m|ol`b)Z@4!&UOK-YXtUhA+>NRQ@me^XS?O%{BV95@C@g7eOg8 zg7ZD)cZ|jr>Xz-Z;sRsiv9|o3TA~7?UtQ0kK|()vNH3!ZL{5Zezr}=k+dxtR10KqcT`{f3K<0VU9xGmJO`w^4+&VuUd!k|}42(F?k+Ip3LsxrDJ(`_l!rrLh(&SWo z4S#%73NKf2Ar(cYFvGp=55vQWEihY$xq`2~dQFJs+L2T${nsTx3khqm@1^lZama1# zpS&_1m_m0^xj|T?{2pVOYSsw)xQaZn`bagaAU}lXNpzpP{E8ewaJc_IowF%`DiIs@ zV33UpCA?Z=Gy zOt{KYO#~!0Pdrv{NB7BTXE>;Br?{{-H=k2CJ%f(!mlFy zGs0viIeZBcpn2$_26vyEK0*R?M5*yqi8485kHQjZzOO}C z@$QSs|NBlzFo{H8ltxdmigj|9__aA_=J^n2fXJaGKr-R(nRLi;^}UWds2J_kxiO>u z6+Y+YwcIa(ukN-?PC1V)0CISvO!lNt$o%9_-^=qCT=xX)i)$qz;v6~)Qvb9e<6+({ z9KTV%%H1%!_crE8r6)O~@73$qz%PL#{}FTr3Pv~*v+M@JSk-CE3pHta>BP&$#iX>` zp2wMDO3wcsP8yrHuL6EppruZeUvXBbw2|piUFpDkox=$PY(_LB=QX6_tD(lSDoeqv z)0U8wzi}RPkzd>?S0EbcpIRVviV^RQ|Zq%A_{5F4LV-*VoaxLbOzqMsW8N=8PAD%)}JQ&Jd2J+E&X4_21dY zpI<((>qZpB!QSDZzyH{am7tjG1S`730VJn(&(KM)Zzv%s%X`e=)ualXTZCi&&I*xvtRfy`M7+C$hsk+-Y^1|J=3rAPI_=RBHkjS z!+qr;OD2TiiN>H$+DikWl$%$8yFs_&?w@RL35*@aC0FfL^9+S4 z;wrQUxGXk5 zK#J4hNfWitU!G98({&p-vB--+RVt8>QhEKp&|9Bi(0?YirSs&5{Q+HJe@g zrSt=H0px*&O=g>k&2lvp0480m%Wm(^F-UskW9b6)sj-sXHZItAHroKeh+rJblnKY& zekS$$q7QCB9Y4_mje90R4Jk1ho7i3gzvGlY$J z+!YKy8?2wLi6eIivmx3uY3kMI+>655b8mv25`rMI8l7&ETUFzyk!G@fLhst?{Z#s8~Xv_KAP0+&1LhcSh52G(+kt*=i|$`J$EEQ#VfWyK@?oj(;Sae$dm>S zOG<}IdKS@BFLP*+Pr5AlPf^;^{6`F@#fz&5d+6)5)*VUJAKGCt=XgqZ4yUTZQQo?_ z84v&DT}cn~kfpqlnOF$4R&9B77uQLKz5A~te1gaDtHs6Aid_n?=aS0G9-S0yT$QE= zEX*GOt2QK0=q{<}Dh3WTyqE;ItzbOn&K(luhrVlSGE+D_V!1XLN|&~5)gePB z<)_~R2F$4`#p&azD|;ZhG3u>4eflA%$swf4X?c0MMV&2cgI_C=_M3VL0N9nRMMY;Y zAcE`g#yJiV30YsKZbgin-Jy~8djor6e70OgrU{7@D~t9ZHO$%;Q#k;IFCnBVhAs8r`H?0WB{#8Sd zV8ivaf#|8n)tEY&pflyx8Mzc6!d`OOYEYtmp>VF_8xXrI`j<85l8QkL*wx55X{m(e zWlE#IVmqs4hV^(Ak6C01PXhq!~xnK?8(g_|5ei&WwREK+4@64jYg$=EFp{69W+7|`Z zVCb72gPqspQ7h3=`%61#mS%63n>J|{B-884$GnoPaa(?kul5?IPL|+Dg@MFAb&G#n zb|3RBr;Erpuf2c2a!<_q@5zFmlao2wgixa;R+2;@VQkKINCnX8J>#a^0|5Me^Xose zo5G;vDL^Qt&A0~-qTY`&w>^m$c+>0;GB5iDju_MYK539HD z#5=oSRg+4gdy4d#0~%`TdAO<_48$-X>-iu%S=q-F&DX8 z2w4|g&SG&;&%u-iux0lOJQK~TA<^sO;XiuTDq4hQE!+`Z--M_4H%WZ3eP{Y}UisBw zrH|m4V~Iug_sfJ-z(jBII=fytt*01nrY=Z%4Z!>T{6y?t1!Afd%JoYk)|xP;8AFr| zu==6iq5D8Cud1|h8ZTLHPU;Vs#fk7_oX=qDFJ4^~7zzH;st3F)8}HA? zq0voXwrYz;MyNMzBiX{lSRg~DF*aTE3?ypaNF^ZPXbo&#(f!47XUTrMazz;RW_aF) z@+aO4TBF#YgI;1o1@_y!sJ)TwIZLWs`0APztQ#p|n6OcQ)h(GTAt~C-M!Yc2yA5`9 zBvpg*STgb8OT1U7B>8g^eflXHgLXi8hO79VQwy4;MY?JtQ%V7Jply+~5Efm!j3Z3# z7+rhehlPJXcJ~$L#ka*M=Ck2y5>@*K|KR@BdqJnn!~eVS1gUx5{b-30?j;Y5a=4Fg zpHK5|>qU%{J8w>c&$z$gpj{RX7R@i5s|h~`1@DuD0pPZ=LU(w(KH)e1u>^=W7*x-< z@IeCtSO?Q)>frkN(a(29YSF(6&U*9y82f3Ji_PZ)#0n`ll=OV*Gh+UC50>-HE2fdM3+S=O+Zlges$ISmec#_#&O&AC>!1WaFFA+^S1Ps zY<#WljuyT)Aj}k8@}{z+*;=6QY?$n8i^QZl*plC#zuOzH<(O~=p%rngme|73csf-E zpDexQ0k>8U8IaUqFEw<8!u@ZDXGM?^>lVKWk~6P%&H9graBjX&oK#tws`6P!nW`#? zhxhkJ0 zb?Bo}Cbx|~kY8=v@w#t}>pONOcb)BP2&@AGUxdB)gb$~_U;pIK(!0Dp60EthO!Pr- z@v?P~Ms~4AOkOx`6N3{i7RGU2X213s233XaDLLBr?$tuUQo~#y!-L zm@}25O^sy(!P@y4=+Q^b0+C4Px)dMx7$2xhNh?&e`lO^z!xbfXjO%&& zZ&l=SiQS2b+*hqno2!`}XX~q?@mMI4Ui+=TtRl?>2U{*^alk|k%FYzo$y3SVutSFm z5>-rOf~=5P?hv)oLazLPtIyF_yB7}+k&pSFy}LoeNptO!E%PAnhnl!*?!V)_FBvPx znfARRGT0gcO(w#J`3u2kvwzv9YC4Tke3QU#4aO(+o=>X3MgN}S(i^Oo7t0jvCa@06 zbP;b;oIZc}DM0;}IOeNDsMP=W{7<3#-xusf5-u}V4q?lBwW{v&`1;%1xVL~dsD8@^ z_wNmM4Vl?D@4Noaw6rt@_wN-|mhH=yjcFsMZNJP-e%nvLF;$d2jvtJ-#f=dO?cX zrxaFM++AtTG2$7<)EX1iz%$gec0I82b^KANp;xzxy;)z$Skpz{4xQyjf{ouUcz64+ zKs@!-Qy`18jE3fYy~%9)Khu2W-&EX}8oz0;Y15Anzq*5{ZPC1EC!9*+GhcC2aut3n zF;sa5+SJWo7Off6rmIiEh!4y99402~f+j8kH-T}@uDs%6tc-;SDSt%RR)jr9;L4p1 zAO{<0Y}=Yd(PRbvY!5E{6A*VX1K-`FR`94H((-MtQZ;_@L@t^mXDzx5lsG-L&zo9t zsQI0q`p$e_tbX0|=8V$k9WNWP*3Hb$RKE7y*}61pv6^djwyQZYK3$y~`!F*7zU}b- zhPH93^%&=KGW&J)4|IH$DnqE~LcQz#JkFEe`FG~w=+Q0T*6E{_{0qP;)uC%gg59~D zmfqfPKHYcfq}8oPRrB~u^Vp?`eX&vogcl+K%SP=1q?r*gV-3kCFu!~o^;O5C`L=Bq zyoN-)HYmV^;+;eijG9nrL0Qyyj~GLEBeEFLEUD$iWKpYktm!J}yO248#gK7*#`>+- zJ@6_b+WtRQ579?6&?vv%c(L03dWmNoc=>lG_&K=l+B7|Wo6ReP45%Y5=n^W;dh zpez?})^33|s?OCs~2O`sZn7)d|R#-*W$UOAFT4dUq zdGr))T6rp!g--fnzDY=`7M1E;FX@%g(MdX?YNpRl=ep+V6Lz@i+ga6m}*#KsI3n~Hpv zj!#r(-@0~QTwDNeU)NJf^qTkR5*+v;rZsuqb4q{1*D+K)=d9_qA!&Ar8;GOi7Y|3^ zV74EP({FSKC!T-+!AevJ&Zf7<$FQNok-?sTd9H77`hf!dp>P76cAi9P!L${zk&c5=U7T`DQdrPvODvcQATLY3v7!E#qXn~{P^@KL0BnRPynui6j1@PffnTG>5 zb>+ta<~?q&yycPNLu=83tDQ2CywA|)72?AHEVhb}DN6sUFz^&A;B*PV^gCv{Ss|h8 z_xEW?Rx0bAp*^r!IJM^@!Ud>3dS%B1h9nCKTChwQ`X#k9OZyhBthB<{Gsh@;|F{Cv#I z|1Q9P403riid`v(kfl2Uj=FwR=hzu?6M`BMSn4=uqF~H^Tu?nnS_gAY@qM4MFjpLv zT$()}$hNa`XssHy*bWEf6D<;?7UeEikPO1SOa_@_#HW9g9YFp7MF#%g;IwlMj zW(%tPSmyKfrG4mT(y_EBBN*xH?RTuPKzS$c|M5d{wQ#4KAV#&}Q>K(ybbziri^q=l z>D~`!m(Ql>efZAx(WmZS5og;)Nw+L{U!6IBc_Z;#Y=;Ao(tM@2D_hry5*erlYyaTg zzJ2XI;OkxIJGVjdzRkpWYx}3}o8e_DP|rRBxu>a>#!Kekd7DmGRkzJ!%;bz`Op17M zOsT_(IxwvFi`%tZ9n)B2Q~(_y;$^EJQe(kuhdX2mViv9oEj!D-s~{vDH83>+N#`u<>JS) zXOx59<;tZ`-W2@Kq1K@`p0;GKwkqfT>n+Cb%V{o6{kqf6ZC^dr9zRsq6FNh)CGiwjBmgj7UDV2wGLjEaNhlqU9XX8E zYcJsKw9O0!2X4LAstZr?nP$c6cGA`U&HlcK7ZEW^4~b5uQa*)&LVdN&Ht-q@*M%FW z+>1O+3ZB{7)J>nNLnLQKe&zZDMylaqM}*L4;p`-PWEZ<8HS5h)z#|@ z-#Uz-3Ha)!;=Mh6eEzz3Zu2um9!v%PAj~;mu?=WwtrE8}g7Zi8eMZ~Z2~w7I8?vA7 z96aY${0bPnKSV5IbsgF49v_@1IHe!$0~-JSr)RGBgy4NOW-QIk=*3V|!))H|Bm9K` zSuB7q8^0b{>>hnSZo1W50ov$PNZof8bCwoxN^W-1CCLa^)#ATP`&e>A0lIH2B4BI9 z1y3MKZ|jK;g(AwfH&L-y`cCf*P1X@#UMze;QllnT6f!&gqRw|2*LzBkQzil!Z(5N# zYX(n@U#~m2#WW_#SX;LWiafi_bD+arbLAlk9*+|!$9uwAz03LD3vZEoa&B$lN3+Q# zZd<3;ofGKHxBNE0cuyWo@2EZL3Y}LzMM5czs?=in8E$V(2_b4IRKvRF#5X}`od2bH(@hra~x59@PfjDzs} zT6ghdyFMdMXHrtiGmV10f~$0*n$14-(vjCng|wB8EZ%I-O~Z@Y>U+Yhe4xwjPAO*E zDpuK*Tj#oKcgFnhOk8o^BF?{E^i^)-YF(g%w=&Syni@q2kOxeN-6ANF#`7y{l~`ezSa9ufB(+G+4N!G_gbFf zweJ?rgTJ@eZQDTUo}??BLphwMVtfWUX?TP^72gqu3sG}S3Cm(CyQk=&d`)_Kt*llg zPYCVBsR>|xnUXfuf(QvdCj}-J{IvT#=}8NEhas)0(b0L~k#cWZ zP*99Yna^CMgoi)YlkWhU`TRhG?4%F_3t;L6jG@=~Ko64kMqlghcYQZSoAD4XOkdfc zoORjA@|))Vyv=<<;|7Us=j>c>b_Wi!*~_m&@CU6i-WcQ*%6!KQ=iUi4Z;WcZbKq07 z>LT?+mX)b}Eb{3S5IU_%%#^bz&wRiH;Zp0c!}c!ucPoR-qmCTvs3s$-^30Wg^eKf?#fE|o zHIzD~q=M35d5e|z*|p?B-@NP+Ialm%<}{G^z+3Y)*R@X=4i zI3WOr=CiA^Qh~rr-)X}7kIvW>J&+oPFg(1W7?$Sw5N-rSb{B&DxrKlxoM>JzHe>|3 z7Q-GautBl__B`A!3^!dY$3)f#AfZ>9>eyicNf@Cg)(NHOdeXyL=Nxv=p)>H1#Ts(- zP$lj-FZ=F+7TbTk>txTsvcoKaM)j9aO1@4UodgwH(N!-4p`m~mKqs>{}s+wB%OEckb zCEgaOy0g05vOkB!Ac6uBOe})gnY2=)o2FiuJG+$im4~tCcBv{h$C(F4>^y)W0b#OV zl_*viPejWLDxamAQoILK4}45vf$^_-LweK12O_ItV&>JzU+qN7uLlBjT4^c{15hO! z?(a5TF+dpc`YG~$>L~&tN?{SS4Fz9Txa5e!UEtkI`G!iq0uDy<@a^6Ri@~VE#IR>= z{;VyeK7ad#prbL3FQ*(04POuj}R1OM016X#CqRnE70RndKM_CI{?sJ+Oy-R?xN zQa(GY?nK|fVQ23TTPT={{zWoQx<ZuvG6p5}h}T7I}nvn-=_KoZix>rpND^pjCEOhp$M(xH6x*Hwv$5 z2D?z?mGHzxR_d2f>rwSzjmtV4C}jw^)XjSq`>~aoV`MPM!bBEZls7a{yV>msr7Q9w zH0k5ORMA_gSahDYx_p=7lXfbZB>v&)kDrsEuVmoJ>aW~Wbe))mvVc<<4|!M|{XK<$TL6+#2N(1^uxR<6q$q zxfJ68Eb~hN7#|;d>HByRwKq5*{^Up#DIKT~%^EgQ%*J0BXdn#4AD$gI5(t{*k^lR1 z^Q&TeE|Hrf<9fVpN)3aDwa1~QnaXa77F@fEbofC@LaK@=1caYZEJ4roxVtJ9Kp36? z);yrS@Klh5anO3)T|k9psW<@DT%mgkDg$=OTdK=kQ%nq!Q6e)oKP0eKJA&~-wf~?h z%OSv&EYQMC<=(iK5n%qEi$ zEdl;!8A@^qGVl&HI*qD4(N@=&56dR~inx_;~)+Rv-X8bxRT9)>JK(Jjp9`TlAjj ziAEm_&&8?>kryUWE<*HGC7!$bcDFyp6k7LCUARzuF8Qo5$@UJFu2K!Czq}sgFrB5u zsSqsBPyi)a7CN-U`2|BV``v%tGPzx%@z`5Pj>-HI$&#s7MK5^qPf1c4Qd(uttV zYuVDb#qAKTQh#A3ye<;bMU9h}p3ARGdS5U8{w+6QHt-ua-Jgw2rGqb##E0oWy7vEy zNM<@oXWTgw+b~HZKS<{dYn(wuXr^91K0Y>PPHQGmJ6ZnpY+7GYRlO{%P64ViX`(|0 z!@yM*^d|re^>vQ>rC^~L_QM9&^q{x6-o)|LgcXHgQ1+L?bnv{x@a_Uh$zNRX@=Eq# zLY5@T<}_Y-c6bb7ro@$!kruL*Ch4TIspvB-Y)nV?WC2cw{i-nO4LKPruLtp#x74kl z#(Vy(Az)~2xLdwVCju|a-utB<16jvYloL~zPuZhSHshsHqff6Mi%SfAt5uD~ddmz1 zaY$SSd;MM4JckqnvUDfvH3wOTIEZbFqW)OHeF*`H$uyJw0i{^3!Def?_#%qx+cBXy# zQWe%GJ&jKcOP;q1wyAMHAW@~9GC)bd%$@UtHSQ3`&IA+xJ8d5&*mSg2xyxd)s_Kln zEYlgge0p;Hjw5PfTz#2C$KioxqsOYr=gLziXKNq-F6JwI*Ntlb9}eTmp|K86+>ci+ zLfqsiJ!yKUHwiPpsS3!*oP}kcXG=HhqA|7a_;AA^OL9f{P#q>hG~2n{9ftPH*{e8+ zi5NMaSnZpV^iBrevhKzhrKR>;FoXGf9&j*sdo*|-pT=)fX9v$>Nvw(phc>D_8bQR> zoPWf}0&{ix>|C8sQ&jrsf!}fj`9AvAI~e8}q~$`E>gMpS*zY83GO>7lViGG8Fc=jh zV`VR69?&$2K+M8@7VtBSOVRVCp#ASCRL6JW+L*XU-!Kki=qp?fcTd;7SP;T}x5Mm(NJ*;%`Ehh zNzp?A-VB2Z{yH8?xPph7Tk%qc!yS=SUWZ70VkZ})OffyN%7OLT^?s1%UgYh@{#xu& zBfkxiVG0JXEhspzIi;4C->eV+N_CWQYlJu`Nxop4T#%s-H&=~E8s?DDBuz1SHt@pX z^a-&9vHB}q-fXb*-vahu39d7DU3FMlw&=wzE@A{Ide2n2GnYAL#{{xhv0XZ*h3I1u z$3tlsF?gHT*tkl0REXe~i4Hj+pe)_UBxb_MzQjinf=5SG!V#K3{K&iv%Sd=h==S=a zNa_cX(84}%01%_hzvP{qS@UdCqj;wmuL4&xKoPpS@zmNB0G`f8O7T;^LIVgM@v=<< z_3abcK27ZFKWFK-okE?u$1<$9^o;op2vF`L+bMS_aRM*Nu}9j-GpP9N z2guqOjDML4GXxdgF&USq(=6s-fz#=W zCk5j*>GSsQ5H{y%48)?0fDv!Ai1t?q_Mn~-dlLUHG34RJFZaT|@RK3k^|S|&vJhtm zoKa?pK~IrrDDvpkOY{8ey_)Piq>BtdU@xljC1+d{URX|pKPB7Dj3~}2g=ya?0VM*q z86;bTLiEazAeFeyZ~~{8`D{1*6WX^|oE6JKnYQmZ${U7OjGqYNpOvs2Ui(bX0caql z8zw$*)!dG>T`ve(AE_U!AOrymqMtqx-w_E>@q|xqIqF!)`)()Wb7c|pjkWucv->@B z=UppsZC2U(=9F3XS{Z7aOe&I_2?r^@@GJctvk_gvd`6(4>QAsWOcmg}CTFGP&;SW1 z({+cx$*IS81Bw7^mJL-)Qf!ldD9NLJ>c_%T6?9(y$#7g8P@&o9YogvchbZ7d7k902 z#g>MBcdko6p2ay#k+$LcG5Gw^AhtepDe@45>~N;{S>JS`cR73`Au~mgk_XB?jp}*f zloTmO{DE*c886fXnTsy;atSg`c??@8iR%{ofj9rez91SKuX7_HKNHe#l?&^Q1~=27o!f?i)nD&#r1z~ z6-IPV8y#^RtsCfds^&wzx%*sw{$1ytqkZDu>~S}@Zj<~T1>kXLUlG=f=?OGY8D48rJ7Pb zev^GBF?(h zusA0oeDi4!&KR;3wI%sT&(xOlwq+v*Z*ts94>_;gL7fA5?=^&{RGhAE!V{zlnIDK4#kOqYe5QxYPnx2wn4d0RsF&z=$!{ zRcQxq|MuGS@#{(GnF4@SNUF)PMU3XaFg?m5M{+MKPDrbf?QR(ocn`=yAbe-k_ zU;8r16#mtQ@Aa;27sRt^7!h@gW)+efK;`Y5)Ut_3ma~{)4C_DU0AN8Vw;Gju*MOLv=)8d9j8+=HK2 z>8wdD(@2k}62kE+A#VY#;J#W%yrkCzpT>yOC~IFOe7b1HV;t=|?-h0Kf+#D;?atl` z{=GI_j8`;nQctcRr=y7!wCqrRlyH4dRKumfH0lhfs{6}h-WGPk`L>*qB7V$&wNhGd z`-(lBS<7k>s%v5|s4?g~2~%)v(`+W;4sp&v=Sjfa+0kL*lu7iFnXJxTDBFD(-4-rN zOXYcm<{a#X$+u1~2GrTFgIvxh9(-GpKnSdDG}JRUBIDeHKD5KksV~cq$CB)n(70)Okj|T@tb|fdbeQ%B znsSY@wlX8|74{bm>fcfG_#l4wqD3qoztI+X2c=nove(PQF=wZ@h{&Y`ylrE=Xyw=u zS=KT6K8d)y3X`djv zc?5C8@S$_o>dO^$m(f06v4!$nE#clY0xJ>AMR!Csp*8MnmVA)j5uzrPiWVMbQfH%=ZjBvX_CyaA%7kJ|_t8K3pMO{*r$ z`%s{9g}?`3^7t0$--|!@zwdDwn=5dqtW;*WMaHvwT#@T)nbs-!SZ9ia<(Oa zo4`b&@Bx5;I_){@5m)do`&d$qlmVnQv_F=u9|J<&ovT5Gaz7TqTNEMe?MP(a`}qp# zO6HaHxGzLhV7(qUtf}GblyiZ_U0%%}GIQunrj;~~8}LJ5*kX_ZABbyB(8A?tcUbWo zK%ggRF)1NANsEh(BTPYPfg|zjs~Z0RE;bId4!gQH)t^msF_ht6B6XtgzFUFrRj!;2 zq~0&@-U!n_b-6$8+dgkoJG)%r3D}^F{$B-a-wO}fuR=@I`pOJxT>0E;3+5TPX4u{x zNLIosM^>~Y;SlBd>dC)^sHHD6#G*ZiKF6YzoUup`e>(-A1pu9$Py;+)$Nef89Ls{S zh<;F8`4o`v1IZuNMRyz1@}4mT)jI@b3?xe7w5hDm70e9^R>>BOs9xFTuLtumg0BgT zlaAjHhVlX>mj68Iw%W{^_>&sthbdm{{9kZ80~OZ|nh~F4UY|9hq9!8bV@wpoG8`h~ zUef4L`b#)UANWfx8Ov+OEOM=b6~}IRMKy1+ou8jN z%N}kt=y$XuI!p)j43vS3JWVJgp`%1NJ#cN62Y8~Ag4_jiLBJynh93ev1bA_#{ANuf zKxT9zdzw>vn99~L%x1;#!3fBq1Y8DhG+`5&IUJfpt@_5|kYO;vsb_~uTVgbJ^zGY^ z?jZ-)eM7? zKj0RifMAM_%#Ky0y<|o+P&sriH!xj!>bMc2WF=N^tY*om&O<~AC%+)|Q^xFpk-=`E z?>~lazajWx@Srk6q|LiDhc>F;dpEr)ZTGZ5g@K>D-(sfK8JFW9l-t#YicMp5%KP3e!xg?sv{Or@N)C(Nb3%!d|_z4*Oi8TRR!&RKO`aa z`?g8rdL(~#Xhqe~i~sG*IN@v0ZIYKlseF0Jm8Z-Dsdg;@OOZbo#>)Gl%5r);@9jH| z{m}!rAQNp6+l{!ootob}#?&|$q z_Wq2nct}Ob+o&LVkq@rT2n#|vQk`Q#()TI`ok&Hh3{GOk$30(aAfyckb-FCEAV~lV+Hm?(M!gt@FA@3x= zhjPUm;^3&z5<3zncD~Dhu1dvIi~DT2%^JmRC4tYHO}Y&Zmw!*^g8wzEG%(PM@P8Zq zW(TYlsU~&er;**%qX>Tr{ZYQN{Xcf)Frv6=(`L?GFsdrntb9dU&a$pnR9keLs0504 zU!tQ*-#iw-U^VvhZh07WT8_H5D{Yo9O@T@u;Ws{f^_X43;Z?CS)fJk6Q7soNZm+;%9vsF(kDu}M`20JXAN@J#ee@PE z6AU#_H@>XB`_K6RGmUQa_Qk2Fv9_ckJrPvI_Tj@XMZ<;;3WO2?y2)~}7dG?Q*Ncm4 zkKgJzSb!M~E%UX*2jPf=@xUb*a1BA2ZIZPag<+t)fHBEbo&h|XCKQ+Ovv4ONKw31G zYyA9>Ww#rBqBw)X8%AD)qyMY_=V{sPx!Lf8efbRHVrAxhauW9S*zgPe{^h$Lkil?C zBCpz_+R`{X`ZBMps(AfWKgu=?kV*bX9}T{KMdW_6`Wv{0df~T0g8RY>8;YrvZgzZS z#`@nOezoP@#+WS*U6_&Q2>6op+8xtvk5-{axhG~Yo)HH zqj8y5`Un)@TdLwVP^l=)ZsBJyS>X!jL|2k{pY}&U;S-^V3fM9yO*!-^)s-X9Cy^u4 z3WwyyYY;fGx>n4U6&u62U6~9S1p&W$cPncBzb;!`!%l!~EX??+amAA6?jpBtlQ}#=ithu(pXfFNLs8|cYul>_>sX(k7eDdrDh=%E;H;im zy}HB(BuZBRal~K5Z<=*8#xGnT$uYOV25s7V^Zd)wT)SRg$v2B6d)j&tY$O32;`yYH zE^LotXppP*1^Uk50ZsCGwqyRpc?PJkv4K_(uj-g6>{9UEb9auY5vC2$*LeSK=%C1S zaqi%0&!c3H*g-{e%oX%+|IqKf^gvsj;QL>@)1ciIZ)6(UB#KT#Z+b#wT2ie!0A~U~ zV5SFH^T;$k`(QfN!Y3Ku01qc{p~M0eBiK~$Go3OMnMW-P@IK>~S}lSWhk#2%Q;<+6 zv8~UZHcl5lNUj19N-BxJyjfJQ=a&p@c{y^vw}18!-|>`=M~LEv<+snG4=U)^F|$Cc zb!XslyM+F}1p5izI(W;~AE**G4oGFOMvKP=6j%Lfue85{8?`SF3GTGp(Y4JCQ?qid zXkJ2VsXqI{l%rbMU@~)~_Hyg?izs5N?cKNU^&4mmr}v8bOAdzR@~@=Gt0CVHc0M*0-^d0_FLca8`$AE)n8nj-sEf)o#=%U|ZwiKAMyteuu}npQdDsGaLbT#PZrZF_y`qnTznFE6ual&`HHIH2wnO z(ufwRBmu|3GY?d8zYsR6^|m^>Q*|9ivs<>)a^W;aha+Gf*MS8u#_Xq5pdvcG*( zOWdF$L6B*uAM3Xw{sxT(C4!Q{6YTWR?*|5qir}<;NVjdmhNH8QDhAxnpqWQMd5BNo zOO=B6ypqf@an*@WA$-o)mvAEyJQg&>8Vmv#(VZjfP`f}U3L^k4uC1&#;@ZXL>r!G~ zm~{i2eEbIDeW%*5t+z$Vn{4tmmR^3vxW2-IXI16xXHyPDR025mTG*t{fohzqBd=eD zPT~;>L4CSCR!F>efMoS{X4^s@dZT zzj}=NC^$M2+|*CSmFa3$S1tXIE!|fewQEiry&AO`GLn*#PCh^NY85mDEU}(W85yRA zm)R5b2l-~5xO4a4)~RRCTH10mwliC1g;6v^=rnVEVT1OXAI z-djn{Nhg>XDWG7e|4kCEPg#=QF-|U~*KEp%>oxY~L94qouf0tgrFs0H9dGWmK}T9j z6s(7bF>d91VVPI`kNI%5=K2~Gi#=n zm0gPHNH-Un?y|TvXWLeWW7*gao}e;HCSRxCoPg0}tnN^BbeN_;wC#tuy1J^w8Dp1` z5tUocsf0$JTzs9_0~#*~jclo7v^q8$()*9~=lI{}r(L@q!x-9Zs&*>9wp;Md$Gk(Q zZxdsr%kNs3hPTR!8Vqyg6LD=8)n7V*_4L0qY;2J#h*UMEsYZFcn*3z&O^|$Jp#C62DP~-;s*Ua)nKGwSnbdoO-Odu3pr@Y7Y7?qag zM}Wr68jtHEj}Ue266+(Y!unMoa(_Dg{hu^{HvGG9K6zVQ=5{pP0WR||_Tc7M^;Ju( zSL^-V9weJHZye{cy?=qAkn|(=$GPDII`Q!Tt`sdz+2F_@BL1QLwX%*WXXuY5`|Xe3&&VoiwP9x%kCW)#&L zgmLCNgl;O@4v?@r>B?A*1;YZn;G%h@Ly;kNj?1%C%R@dw|Ying8-60Z6P!`n^5aYMlo<~NWyLmn!j%>*Q zlZ{kOO%3{2PRIe*699sX_yF)9Ck@-ROf>bwxncle8( zM)c2dKSl4vnV8DgGf(X71__*?eKATxT16}81#Vn-!xfbuLTc^A2L=DDWF3IKbH1y3j!P&>pxjya1Qj zQQ(~3@H-JKZ9NMq;MoijNx}$NdQunxVq^^dkiTVq$Q4O4#-##pmz*$ZCF~HJOtHq) zrPMyobkYmzBXw88UIjy20_t-j3DQ zVmd<+GWc``OLKIP(nGs%3J5o)i_D9-=1P^ zn|JcPyK&ci*6;%JNe?TV{6>nG!@lZ$HPpj{kt6=($G{=93|FZyk;2M2j%<;ZpJ@iYCB{AZqqG@}G@(J~ z4zM8!LOv5PC`+j<0bX4WRY_JH}w`uj^t_MkIx4}Nf(z$CsW zMUe(c@n(^n;@->1WM>E$hTgx2-RbgSrjlX=%zrpdtE%m;jUkSyAAr6+mM%O*)h8j%=SxFFo-LiVy=}5f|!HcKN&2pgK)y zucd0j!*`ceD59S}#(WBbT#1>w-OHzI8ZHx$Dy32~CLlqt$g&0qj&TT9Mc<&W$*}K@ z`PPxKcQ~Z=m$SZllP~G9^Bg^B z0PoqhgTvNBXH`ajUb3fvwWlblr^wWB-n}qpC+qB%%^>Z#ryyUQ%fd^_RP5d}M?Te@ zjck5e0Y+K80uLM*h4UFQOB?QAF_}7L^yem>RyGu@{XMoyu;zE_>zX zwZ)*{r8mz#e|M&I7mp5T^Q%({@n>1z#)3fLt1Z<_c%Tjy;~oMt1zs%vAE%kXmQ#~0 z1`+emF4`{^V9@)xdKp@ge}XzLPrVH0tD9szl9zjA4$xbTJ7~4_QreB4o4$W+Z}WrZuDI#jpM6wUR%}UW+jZ1*mw0_kJY8h0 z1mjjOvrk`N_7uM5t^mE?kbg-qq(Ru?yvoMB$v`dA3eKeEDrkR8vE zzh}p3rL;{%W^QL-km#?2acH$8sinTfzTSuER=Zl}Fn-}MI?NJr9D@KsrUQw!^WF{9FY4MX zX${3a+Y9fj&ij=M(%-NiUE2j_22g^Pevp@651LHrBGb8w;r|O@wltzW%hLpPF25 ztMHCyka`K#V?@*%#pMCEJtVQGX)#C^sP_&64KNSsJsn@aVwH^kslQm1YL-z!9}TeBMUu$TXd>T;+OKK z^iq~1ub$ZJ3Ton@wMoFB-W`}mIFQ9_e0XE4v+TeiSe}{vxOneW{IN*#qhJ|kePWL7 zfN0*}qCodwIXYfjFkPslMARCnGCImN0i8Q8#q7nq08$f}FrwZkmSAfvc;)2r*v1QR zVusQUv#2VuxGfwXlywpz-Ygj;K0~W9e~srwy?kmP2<^*3M#Br`YEkK zikk=qA&@7%LL5%T>#ft2yqt|nP%nuq0aW@I9-Ts@pssFIp;|-?xD<9O7`phXu_(pl z9|N9_ZUN|>5bhQ7t<~nOYNe~O8-zdc&~qR*v4VPziC3@*8x5 zCIAzUvXsnRn3g`KRe`kH1PZ)TN_!m(UeClNoB*^$0-f4gIOM`V@5J=ZNkQQ($L5Z^P_px-qwRQ(FAtl^U*CAKTjNiA(<@ZAfEAvS|+3Y zFCG8T(k_dS8Hw@po>Y~yhJYHHylNQd3K_0>+n2|U8iwJ)4z%zNqkv>S^j|>I;E)!= z(f*{gG-#T@+>2V?r5VD?n?8$FH3~SjR(X!zrZrXsy$cZMZd*8=f9PX|V>YAU*IQCb zVxGynHQveHSjLCs9I*n6h^iJ*C$k&&l5cmLla zue(t@B@hi&qC``DS#!y~AEXyC5CBG9tZ*4B6wr3&*zXg65M;~R6A^b$Z2XS62YI28 z{eiHXunAB-av=37Lo*FzJg@}h%%MZbdYner-`1z->Rp*_-TzM>BpDbHBG38-u#!30 zlPm*JZ|&vp#TNXexD?-7@{%g*+{i>VJWFN>c-GoRkd+xLUrmkU>xqhQ*MF`y*nI=N z(a5{^6yo9|I>Wiw+H&zT>9Z^BQ$#1X% zL_P1wS-Z4Is7jVSiUy{~D#gncT;pK^e__3`phkcc88UX5@{)QE!+K~14QzFAt!fT~ ztO55BdL~;rD1=oeowN~kd`)?IcS&EXjQ5Us0)Hu3Jwpx0#V+IM2-3q+vLh z8Am?t(j>;(R%_WCV3Dt+hDytGe4#*bdumA^SDq1z^pJiHH8Cnz{asoP~{CNS@Ka8g|wL!ZUBYf^W2!^0wYl z?{fmyZzMbN7*J`&_gB1!VeSBzuJQCIe!FOtZKs`hm$1ie&PQrogVm3o-FWgwH~&F; zUw)lkXVDh8b=J=`ju_omPWYIoh3YqrHw_bxTH{!T>6BoQM?XHtZY+WK1bpA`x zxLr~UZ^9Fy1Zw#aUz}l|TO44OK`yh+XgdMzvJ_7U`&Ga6YHO8|Q& z{_}7U{l$lxITonmOoeCqt3WbD!N|#-)ZG>t*}mO{1sBMRu;aK32o)h&yk3`GUMZ31 z4>(&-(`~ywXC}}qG&9Ie9DI_fw6>jfSB?ZZ^`AfQEwITz3K2-8=FLU@sVCp<&F#RNMnnJq)N%>PL{POB$Eczq@s6Tk&!+NYCZ6 z6!ka4i(3U{b8kOX5@lG9!QGBkM54e2q#%DEw+q7O*B?8(*Dv?)~&_#qZR z${Lai6;h`d1I18vF(uQ|rEPA3%|#Os z9iKaO}`>jBd`V*~p63-yIMD5}!u5az0#9;BY=8Z_R zrWyJ!x9T*5z$h2avCYe^a`FZRW&xZP(O@VW5NM^ zZ*U?FZHRi1AFn(E-w|E^PbDlqa&^v_o*+6oZ5+6i6j|~l%*VH5|6@mS1e9a zLl>(;*Ulo1oZnckQC134Kw`rQ!Z*KC9bvczjE@~9?h+)9eGlJvg_rscKKu6Xa>ZtizHCHD22fAJ>%#g=4tSGc z#4E91sa06qt5>dll(0&V&=`q5-kCJZ$9-MvC^J{2sTcv-u(z$Iw^1^r+A$IJK3egX zDfMwV-K2-xwo}5ZO=ovSGW|$!^6Z}Gt_iv;uFF2jTtfvxOD6kU-@hKs3x3SlW_wDH zPRP+5$7OCd%wSTG87i27v{|j8K{uU%v@!wX3(OcA72>ylPjXLfMYxnR8ajBeg^4Mo zIymUVLQjc4D?QLe%pHKH;Ch~Y!=^Zgx;mCkFx&HxM8qPJ4T9b%QsF_U)I6v--0JYO z7>WTI6bY}H_gN;hTSJeraB}TlKjC|zvz3)9-RD_?bEQxD5*_rZwS+t+X zL~a2wI116Z?@YvT@DHXS&%XQAp_XZEkn9a3xDG zhuK?8qaLM+X?undE!KS0uaYp%_Z{~2LSw_l+avC%Wil8)JN3%2HXO6SJ8YB|u!zK< zQb0yvi72;vIbomz=^1rZW)dBg96&&S3TG}d-w`EEcav}f%k)4mYPSTkr0v#N%zaZw zk$L!T)41ud+PcK)0IR+ASA9I-I^T25(iy9R;CT}78QH$u;ec;5kIc>H(o|W8ynKgd zPJlSYbN4ENPDbiu7j0h)(+Oj1WCGQR#?@! zhFT%Q)WUAmPu$r87H&T9sxcz=wm`S4arht!-xQ&W!{;SFNziI~qT6En>d8&rU~#Io za^hg_slu$7tRvh(LR>wd-m6TuIXS7vg<64$p_8lZMmRDy16=0#!x&-KUMX!pt;_Q- zOJBAAVD&};Irx6F9xxipT6dDrx16{)gGDz(kr{g)X}83Kh%?=_27$GKfXa`*(B))`V>4ToB&mj$}4ft3?3Ct)yv!A8U|FHmwvB1?#^7P-lAh0 zhupy0i|s#e&Z9=AJ1uUHKxJ@o9tHYB4=#=XXr~|oyfpNct$Dm;)55;ONw2;4CWVe zgjK5Kd==O98* zM$Ci(l7{tHYGB>(KDW`@Kh|y}WZWTMd1~+dR3_|Ma@dqJ>TqV9)b}e^z0NR;j~SoB z4UCFm=-QPwt=o1WQZy97+8clw+uhd}omLt=UTt}aowiqh(UX6@tr`p-)Ye!oIM(tr z;nfSIGNafIcZj^eQ{m#rgie6dZ^TZY8K!MWl9K69QlVzN1xr?b)@H?-lXlFP3~o5=KQ8gCkL$H-5hrBxkMVyxNsH8v6%M zy0@=BX$1PC6qJ8r{N3UZQqXn4qoS5tZbA=1W!QT8f!N_q9MNt-)(;s*%f)szQX5qD zkAJ{49rc}JT7_%llfj7&K+K#DEu#~L?3nKxVge6GgNyYN{1s3`6`ih##PX19>5jd4 z@XzSJ%|(Gb7Q1mnCm(PL^#>!$$MprVD0>5(+@gzOfa%zcNWp13M$96ZvPP&%`OWM= zVht=YnqS2Ka5ne9803=ya<#$BP1&R)XFY>HS9JDo%$1(zzTYQ0yLnu*xffVmlO?ib zG$LKDg7o7sh>Bq#<3%Kfq$`QgfmHaw%Ku`s)>Jr~khA=jd?kVNWuFzzyRPRNlY*cI zK*-sn-`agfRPBhzU{I;o$BxGgLLV+IIF=|WFf1h03#ix!(bB*}=4UeDTAVV|vC}Xf zWm>E0gxHI(R#W)wFHQWrX$Uk@x=P>tG^Os9Fid)DhU&qbH`G=*Uf*(`!r~96M>Z=09u>i? ztO&Ef1I*pe(j!!VeTSSh4Do@HA1M0NE}7q;1Eh6$HRE86?(RInr2FHs6}%8f5wZJL z&SFpHH)P}uTC)?EWDbmV)A+Bo=`Z;4zcY58$glksHh&~*wrttus)$oHR{9VNgXf-BCVWWoF3@Hd6x+ zUTn&OghgPlqPR3oY@^U-^RUhG6|jT5%sFqcMKHu_IH*hL8i;*)N{xE$ib})TJl1F? zC4aiI#GlW*x>4+~2rXtJdX&*C0>$ZPwPG6Z)gdC>cx+O4w1m1y69f4vti!i#HEI8P zAnJ@H*V!{pS}LP7{00y24=rU$$M}cR_fsrEFEHMP|jBf)i3SRe3EeY{v z{U7@eQWuuJ*RhALP%_Udg5k~Pm;J*p_OI_#*T5X*Uay+u-#af~jV-q|#z3?};xANX z(v9zpVx>FBDOx5r2p+f5^mkL}6Csc?{g1k%ealn|f+JUuEC?Y)>2R1J9o)D+E9>+A zw@P|3@r|VKo;W8qbis@@WsbiR_6KdcJACgC-ZFPT*A)#H2_5@`iuSam)~i=-AEdV4 zn8_oCSHH+iYe*kmJt062wPAv)IEy&s5t!spchICrNzg$uH-jGC@YI5h$bdwR)z!D) zY(ShQK-PhmB1e!nEfJbR%B)<$15i)5&}b{p3an%`W?0t>)`QE> zd;P{cWbc+z*ED%T=jGgoI)KZgg5Y+Zs&fx0-FIw^gQs2R4o!Muy&BJ! zE?XuAd#M6T&%PGEQd&zwB@+oqQyecfy^PKai0O}+Am=qm1UC7wj}~R}lv^7O z(OkPB@y|7(-v<2okonE~Sj#$7m=_kSUXeqTXVlrDVvha|nZEKAx)XkC5LhVj_My#{v4n^kjl@`Ix>b*y(T>;A z1EN3?dGkA+k!T2w>S_GAHL8GVp1r}w>hsrT4p(t6y{t5c)mK#&48-HrWQ`?IJfsvf zL|a`pee@s8;i>TG7k3aA=rZ)gc3x2GQ5YX$Nz^w=7ohTZ%~DG1T_>7^k=J{KC@^oM5d09Ps!G8(qSE&Fc*pQ9TOBX9diaps=Z#Jz5d<6p7+j5M3eVyGI(**XOW5ocK80L>h8K$ zE&NbI6;<2wda$g^9CZ3_7yEygTD(NP zj7=zG0GaFA0o}$n$ylRxj}$CwwZh<(86*#-SPRkxgC}`%@N#*zZppo z3YL_E;K97;Bex*-8BdW=iWfB>Mg8HH&05hATn-GL7l0PCOtO-J2qQUr7&1jV?91a1 zk3KO>!{WdyR@DoD#l|%zR$IXM^--5q3^Eb`&?UmCTh%M;RHY!(Zg4L7+id(5cr7kx zbkm7C+%J0h8S#HqUrk0?O&O9`jj+6g>20e3pbZhvZd-aUAM+T%zngnso8Mm$8@@_^ zOnEsQ2mD_oyFGg^jXjhO)@+;$fHxAPwQEyS!EHLkHY&oP>Y$ZPix}YUEIgAS=!7Ob zpQs}C>C&VV`}Fw7pm_R>l}~a=Bqijf)Tgkbi9q*^l;T(g#vhs_ZpmS4xglh^KvPkY zid1K){v???bF7DATK-+jt=G&3?dt=g7S)LABvkP<&mg9bX`!k~HmU%5SJ!sk?BLq2 zsu*mcPFsJI3U7ARQ)~?Tev{7p`8TT*)zjo*$O23uqiu7a`+3eNX`yLo)yP2hJ-z#W z(1?BL8D?-x_(!~VlBA;!|h7$HNhTJ7FL7FJzrN`*}(mHQo{aP!1)>~ z@`>DiEy*_TC;VZ__TzBgOq&tc+Kr)`fuki|5fLab5FAtlxw8=yg%gSug(JzqkNl4g zDm_T8ge(Q?Ex(Q$L^2ef#EA{8Qg@1LYP}J!h|NQ;YsC0bUjl!mtj}0VLl-ImQs$WG z#+h+0>kZ(<$I2#vs`NCQI7n9)tOtU4qT-YQa=BB5^TA;?V%~xw%Hip=^tz&?vY=-G zQfvad#r=c0IdH|X;IwdqZ=aRyCBOOCb&K))(Els9^J8mu_kRxWfWKo-fUooodb_*c zRsj=Z^Cypq;$Qm}RvR_LQHC_r`>(aNag$gd-X3U+3TB_AcBRO;P&ES`F6+KB& zj}d3}Q?-UoVG-a6TxhF}5k81~b8`d#f?TXN|I*Dzpy=f79rd&M^Dg;1>i^hl>;JsA zJGA>D>o1$tov<&r!f!JSNV#xVe%4g!a0sv_PG! zwj1kjvy}2wi}9pwT)uqooqHFJoPzd(V!-EiZjsj;QmD>Y|0C;ivz_U$3(#jS!>j+u zT}9#v0mQ$RDxQhH-^H%-r-jpO61vNj8o|FRl*WL3Dy44G3gc4>t5m?Ul`*Sg#UIcG zEsVsFr~8or#sYCLR`JXxBAyxc=v6@j+(l}Nnp6Vqc*;wwG(<`5th1hzs+x3Q6wq=X z@*gcGwhB6mx8`*AZ)Ly?Xpdc11EY~xaXDzw*j?&&{7fnM7If&=`e+|?Fr6T*^7DE7 zsn9zGx2piyCp5~aeE7PU0fFh?=6x23j$!ZrlVRzk6ImjsoK)ixJT77GYpr)GHsomq zHwie%qrw&f;Zo)>XR_))m#ieQ`GPzyU%UEki>@95<=r3PCD?mUFiud!|8MK`gqtC? zP`k?9!r9=#%fS`b(7W_x_#un8baTMekuy-U9^TvfDyx_#14>rZB|o>7M&s&;-yZIt zqyXdB%#0!G3M8D72DJdQ(GqwsrJ3hP8|YW&&x|1>+=Zn%b#4-@1P2!?>RznfVPvUo zwK9#p%zNqci~Six{EH8;Xi|9u2I9i`_ZE#LaY9{N(!{yE6ZhVdt+yh^?iT;Ci;eg_f+ zcK*`Jb#QPP>V4MyS-xof-pXaPt}biIlwYsXK!2kt24`z5ASKy0Z6dP9fn|RqZEl48 zZ3{c+VXsi?E?V17*13ni4pw{C-9+rjX@@&^Sy`ibRakA+c+mnyQo=$21;<=DHY_*I zigqaPqoN65>UcU|ho~JmfK3V#&}NKl{u}Y^PXIy>^TXRjQAJ5aBRZZQhEmdOI?`=9 z%Dg#mhWve<^W5sX_EA#4^Lg$6;@E{sEgiM}limBpDOiZ^34QaQiV5&C_0x*E;(jIs z30wh6B-y-(yb6I|$Aq^>*TxBzGFt8LiLr$|eGG=2gptStR{%lzbDkYc%kos#2=p+N zBiQOtdPJyPjrDHG8Zz7Fp3;N;5lSBnP`UBP!*_nT2!5>_6u}I_TKQP->VDi~3c?6H z3b75;)@n?{uirva*cJ9n-b3LZ;0KDxzLHoyQ$8@GB!!zP=e7%T&MuhxV}f9URY$ z#aLNlEPwEutCRX44bIvSoejKt+L@Rn@gV&!KU zHT$^BuW`Lz|2f7RgtpD4vKoyb=wNkyquQxHFpgGwi1^OCz-w!xpaNr+g=Ts}9o3y% zw>PJS;{*asUgt)G@mvk^Pl%Y!Tq$aU1^#xG60H{*4xRB0E2>b7&qzh`2&FT^edO>O z(1b2&yeona;@V_|zf@>^QkH)qhgo(_U-?ogdCmt->LPPDtmaE6G<2|THwI%c>d+gwOE76&w2L@z+ic-AYcJ*mR-`by+JgSZ9(5ae z8~G(g6FS2jEn^FtX$+5@BzJg0I;7X&F(a8U1ytt0Lf< zxD&2y<*rC2et=Y};{w&pBa-6StPuQ+eN6UA5XU*6T6^xfeecbcq)$Q26--Ad=bzjCMlg=n9J zV7<|&l1oli8XtiRi6R9nE&z&UE$l#tV@>)%s@XdY`a1En;T*mZet1I&FH8=lOIJ>+ zIV_=@yzHpSNg(v$%2uHN>h|^Yw7R{$*=Tj#ZGQB#IFsN(m@IJh-iS0Qpo@3B7Z-;x zz>kx>1NTNsoa+8R0NFq$zfz!jCHh`5NK2?*+fJ}Q9AwvZ(Dl7!c2EY%8Dr%C@u&Z& zdG_pPb#-;sY&Pp=MXju=6`kXX-hZXEzA{$n*>EE1>hAXLX1_mPo!V~GE8Wl~D?RjM z4AUi{Aei^OXNm*Ez0bGtJs3-g4QPdtJsroWmP`P*K@Cd8Dgq*kD-T2?t~gm_vggz2 zz<@s(m4GBj;|dH5?Z8pOFgR+pYVN5|+|zw$7WN(WjQ|+nTn00ac3CX>+n!fGp)gY3 z6MmTMVqT=-iddPsQz|Y%Wd$ahH+Zi+aJ+PiMut_4O0Q~U=id>)lO#>fT-g~lzBp#Y zAYviBs(3mPBf=RQ2xcO+A_xKUJW?p7O8`{~Pew{}gDg81ASk)$xvR>mp?RgFF`(Qv z7&n`fkv3s(T;g8%JqtQuVOaTzmFCeYLGhmEH`@A&fm1Y>**$Iidd+{M?^E^mHC-tS zxiW<|Rv}8dwQanPBOTfp|AOdJ%LombD@R3gbHlndPE%1LA3tO`hs6_9^#{O-qS#pb zoW=x`Qzd4rnJ6z#)@T%Y_nPg~36R7;XMhyl%a&f>(^0IrcuVJdL+c_F8ojqMJAfk{ z*EdaVzHM9Qb8cl(g#35_NU^=gDr+mPG&5YcTC1Mcam|$?@!T}py5oQz?7Xb@B^J4Z z51#oUi908_0D(zgj-u+xGCx1JrFP;uFdrgLmU2rrHZUaTAes=(M`uoWvx24~55!T1 z=1}8^6$IH>Y|a%`QApZ~p6RS5y_3ZwMwSctBP- zLVQEdS9B|SnsaXGM=B2lqBRIsFjC&=LnlDeCvVq6L_X%R2g}J#{_#J6+p#rfXZ>(nilU`6)r+Zqo0_ zFB1RNKlq2`cDpGz>vhSlN7FQAQ`gkL>#D44YdnzMV0Uz{*+#19BGnE!WgoL0u-ff* z&ku*gb0&5;we2;1DOQhaIt#AWC9nNf)z;B!Mufj}`RwujK$ettd9EP%IR!+EiBBB) zvY6>J^Lb~P}GnL7yL~hCxnW!;@>?->(4D#I59QHsy>b|P$ zho-LH(!BSY`onv=89TZqN1o3_3Xu9N*YNWxW_V&V?b4REw52U=X|8?wYdY_YkExXZxPhMVM2bnoR{@ z7HfVi(iF`pIK^i{EMYYk9Y_}&6D#t8x8&>IeD(V6i?-`S?j^o%pI^`Uu-CrFkN?DD zub-Iz86?;b(RZ_fVOq2-K&mO3JxOX>zf5P~)Eiv&C|1<{*9vh3F7ZE0vhA=`3hPeey6y4i*qN96 z7VnI~2ZJ1t6({7ttV?1C>CAi|$&P*I%kpSQ$6JHE(HQi5W^`<{B18KGKT;+9xRCH8 zTBu*r7{1mhH@zuptz~VDENSo*9PgI?o(@K=6iEO(JaDQ)kebMdiAs@s3CH2?v5?$z zOaMZ}IgBSC`%8@zm|K=Yz&ASx;xUOI6D7?42y@S~?-Qx+h5HizFcda1h)treOktEP zviul(Dizs>4RgyG1z4hu>N7y9I101cAzt00dD%*HPiEk;km9&9;-C#2sY@KAaXlmh z!~&2Ck9io@VO)nXPHXFS*Xe6kj-t8rrl(CA+8c30Yw)!~aib7!H1hZs*R&?n7{{ig zjgBjsz?&>{uo__l#b2bx==))uTs1-@g&(Xn78fndrr+mm^hCN0BR!+M|6{@c2ovXF z%4|7&zk$gd#HHbcI2Ir#)WVnwP!GjoS#%*U7L3KRl6i8>8Ag{NeE%_nf;d-G9P+pr z6=96uCFpO0mG2z(F<{H(8;!k;9BiN=D-N_Bd0)`Jl(4S=?;3#nTEZQT(K|YpK~nGO zEbr(f@0o#{*=TDCak0e`h*;XvmbSE|E$zveml)ODUZ~JK>svtT=A2`v`bMl(XKID< zYy8Y78U+0Gx()aO??@8^d<+OJ84*B4C>E)nJD7gNEHHQ%7y~CDx!+^a5%{_~ZVZyb zZ!apSi&eVE;Atv83m@#=UyZo9T$E5+(9ScYg!9QsXq0;cSL@ehH7 zC`$~AH%{RVMg=AQ#lWwmBq#ubsuM!WdWdQNnorW@fT;;XNYv$FPD|c9wH%i!3MOll zeNjj*lIDWxzAWW|F6>>?H1}0i-qU#7(XHS4@fc#uF-Yo%?#B+dAMl5~GDeBp;g%K* zl4@yuov1JPtzvbON^8|4H$0-FOB?<`ZLZon! zEBeJl1*Y*@gak=!B}-V5g;n{4Li=2!EP7R1aDW2|G(}PNMOi2Y(|7<>w%Jsi6(5H% z_qk-hBWFTP$eCDSiq%z=RZ|%iR%lN0kL|v4d)kauTq%ZY886&9F{5EbMEFzp-& zcaFU{2SuGDYXUf<%M+Ow6R?U9Nepfpdvt>qYLS3Fy=TWDDLVQs-TOCm+FM}7JSmT5QFd3?SLb+Tml)nhw6e14@Ejz? zsgspb{XzBWL}R5lG^sF1Dwm^6YFGf0*m~O6@xnZf36MD_=1P1hoG}z;G+A??)I5L$ zhW>vRD3u*Q*6qyaoI@P|!r&X&}*q&69$Q6(!gKiXf*< zrcZ8+k?q7tv=d#6JzWC^Uvln=mKe?JOE1BKLp(rAXiN4mbl0I6Gf@hRK~i%*f36gi z^ev3%+?jba@1Fyy<9v7n6Xtw8>uEtUW+TZVJOk&ClR%@N!#0Fu+n;6&ucXVWw|d>v z1tlK^PRCQZ-|w5d`}++8X6SeJZZjP%@VxJ7ZB^XpJ#N=2)$s{vvGtp_ds>ji8Sp<} zztibd(EI804h0(w^bfhC^j}rc1Ku+{%Dz%a`YXMS`?%X%yFl>T&MHyYecxa81G$6R z(7sU&rq;&V0Ujh3T??2U7%(>xow*P3aWJ!@A!5d%hzY`D}YJC=M7TCFLz$|Jzm5+^*Km8yaU{(HhSnsfX37InjO57kt#u zFqU~?eqAh^r7i8!i1=*O2bQ+9r_$cNdlxH7ZD_lu?fJvQ{)TMDE!lW0GD$oivP^@b zaRLKHx4_ksaE1Z5IGi5lAYjrdrz4zH56lRzzz3i{{jx7$kLK(i}9# z_EWILZiZ{|Y_bpnE#Tk)!i4FxA;|nB%oct4u=INpkdA$BmSJu;!X{V{l7z*v;+j{l4#Y7_YJV)@M6tao)lCPOk?%!K+<<0{1B{ebIiv7x8lr z;uM9@b;-FJUck^a6=18dJ!!n6{gxw)pluevGGTEJiIRbZGsu_Q)T~`aM~xYX0>}`N z7h_VuJgU`+2nZCGbu%U`o}kw})`X;iIe-YnfaCKd_y}3}DPl%)$)#y@FPIhfRv-@h z0S^_bBqB&fl+%W&M!isE#7T{-*2U$d(O9*uEz)0i8n1hW`&Q#YDRH1Tu~g=s-o2+| z-D9^sM2|s?puMHJL?#=`L5}sx@58pCYq>>dWN(cTbj`(cW8irq<#V#GUr;}|HbSm@ zl$AD6QJ*L+ULPTX?BM)#4N=kq#snWk3+tFz90Q~n8RJGoz??jwhw-T$Y}cPtn8V`0VK7K z$2VQ-o7Uj7gTm*>UOzu6d`5HX^WIoc)J7p}c&}+sV~ig+blq$1)?(VAh%vMf-|W$( zj!pB31uiL<@&T?RGi8P|yn>_EA4^dS z5pm&(0lX!|gdBL1a7IWACvZ-}7ROJ37%&wj`W21p$tkaG?e26XL!Koj( z7Lt2CDIuD1>mexAWLE!JSZ90|;XC)eF6o$^t%C|rin2tJ2K`;~gugtqKayWuNO@J3 z#Z6t6M;eK>lDY(Pjx9wr18_tXG`Up&gO>;7$daAD^Fyy9o+%<6)x^$Sz(`9f14Sjr zMun3Aqbw1K(AgNTU`nO*>eO}?AjJwMtQ;Uh@)#iytsLptj>`yrEun2j>z)At7T(0* zC`>?6I@(n9I0jLT`yp=a6~7!{N@4&^*@8gpvxkj~YrR?xmL4!wc5PePeb<%6p>UOn zMN!c)jtB)E(Gka0=rdMy+^VOo>-x3UrlB8FcuyI|#J3lJ&*MQ6;fIIG3_j-w*oXmA zGAm9+=S$|;4TvIE0661G13n_3YCO{0jtI`cF`!)rZ5mue2O{crxcKrDjLQ8HwN_Nsu(yq<4|0S zaVXwRygbh^D2l$IEx6XkNLE*I#Uw>F00r!GX#r6q7z*6{1_>ZE**p9pvlKH>!Lk+e zW4<2Eba~$iub4Y8ZrY2q7F2CkMJHh?-^G(Bu(M^H7kNK z(E31!?`SQ0Pj7rnef>3EzXv*&RVj{jQ}-{QKM&xym=kJ=-F;+h>N<>&4_oZ#cM&CCDO?Yb5yEMdL9fl zr|2?K_Q~?kDdsVaZTtQ|8WuDcD=?)1<#`o&PC>Gj)L^fHx7m1afuB4+xCn`h>S)jN zU^FFo(c8hQiANqp7zS^@p|l)<_BqO?Y3Q~p9%~OYrdG$}v1ZVyRgn@F8)d8dCX@Up z9t@bX6}HpvVNIA21xihCLa}Nvd(&R5QC#2>Cbb}=|Sl<0!vwtl(E#3c8*}c zFii{W%ZRQgqoZJul5FT7tBOnSV1SfDV-(Hl+CE030uIXfz4Cj@AcB?xg8vD!bw)R_wjRsoG}C&QsiS-2+Y0lz@QM;12Yi}wDPeqXS%Bz==LohRCmHr-ei zsbCjGN>U<@8Hl_V528>_mh;Y4%f&fhCJuRYOlO$kEpoV*@p?RyN1U-Sh6sp^)%W6k zsJ$-YeVy22KEK$*{CLN6o_`##Jg+d7C`e6-cRbW`IG%&K6%(y!W43*#uJ`-H ziDebUm@DPnO!@itAiNy%`{C6;<(w)SK#Gx6tQlk3#?=nB5$ht4}2a}EDC+5#!aQ#KZUgUvG9>9b%plOVvL#cX{6iLw#PNQfLNqnKb zs-qeej@FJp((#Fg{FnQOBig3!UA&H#0#h1=D+##Kh}X(s*Bfk=5iRwNjt)Q23GR`L zvV~Q!2>X>c7Nr;y5=XHpS%fYU3~2^XJg4oMaMG;tc1;)QhQ{ZW(6FUtd8+}o0CG#C zZcA%)qlGM(h?4GMiNY#LLBcdJYO>C=&k2~YEqY0@0n8J_3<7!@BzQ(xo-lk+8*utIGCWw=VH*8mdIh&jNy+lK#IfTlDYN3DM?8z=!R7^ zMl4^Sczi#j3-Vl}c+na2@~H8}QJI$~WnQqtltyUiZ*{y!o7~`Q49rk&nGZ$G z)h+^2?U=2_#1t13Sn`+-3o`)tB1b7eJbvdOE-H!6l8lp?3nDPB$(pdfB@@%U3z1Um zWsV0xI$MSL2r&XFk6=Ooi1EkcP>mOH{~;z6dqm)8I6EYLg3YtGW8}DuEG-y5-4ouR0MeA)N#r?JvZ?9|dZc|#ts9QSlmiBivv~Q`S&~vcN+hA3v z;8+!KdbTRf5_(!`XlYj3=~DO&K0GWJfn*Z52`}oAd|2#n0Z4sx zcbS7m4cFg?h>~p7Q1=4~#vkH>$cTms(m=wf96K^DqbvF{DCM_S7&K>ggScEg{Almx z2USzYaVqqWPr`~I;ffC3N-1y3vV2Z|J%Us#=)nlCgR{UJX0YcXw1^BT@$Hkq;7k#C zUO4zo!=GPAaSBs_9He9t!A}dos2IRA*6M;MeXzeiC`oZ)q0~xO3>X3_*&pVFQ%`RX zfS1k}7HYbNnj9eG_UJ(Zu9z05z^Hu7lC8-GJ_5o;|CX1+1Eg4Sz!%lpB0}kX_sVa~ z!p=z|20~TLD9?Z?`dx*qPm~Nq;1~%1-S6Fd`S-Iri_yB)mYrQ!-!%1UwW<&6RdZN1&3#c6_cX@t>!!Y^-?yV{9Y9hs<7iNl`k}^|J|Bklq3_`k z8*yxW$Y7ziuB!cdy}rM`zP@|@{Mqg4blMz`$F6Nps%=}Oc|}4mDh;~O8VY}%9I>CL z){O~l!d#ZC8O5)Mc@X0LP%w^D0Apff#l&o6 z10Ev`l43G2V}+e0xDfbAdWHe06FDf6ITZBk(1rMeU?x3Yb$v(k-2;s+y8Y`FtAQ#H zlHy6rflDngbZ)?ZYGdr;4SezP1%q)~x>9?WFS56)Z##weyM0M(>WZ!u1EgA~Bn2a6 z6g?V0CYcM&Zf60mk(W6xbpz5^=o}ZVK(zWOTOul*6 zxjnNs|6>Uigdw}%SSX)cqgh~5^15Bq3me*cdQScfdf=1Mu7BG?3j3OCuD^%J;*9cb zHaSEXNd{GW5TGY?uxVZfM>)6@NavQ=mEZa|ZXK6iL6_I~lI}oleHxdeWTx=il0g^; z!g30dJowyD!a2objDrN3t(3Ep&-gPF+iT(!$LlL(^A;wy+#E=1wolCVt=Ol+slPlX zIuWv8;Q^iulG>Bca!3Bl>zijczj*cXYC8udsnh9n)A#+B?D93okvYcf?HM0=m{DFGUx0xj zBDZPd_d#AZmD=QUMn@Xv#msw*34tI}Qip*Y&%KBdCrpTS6uK;nzG5P*s_ZQ(i~~w_ zl9i;i?(XjI`zV&169g=6Y2UpyDhDMgAx6MdcXxMxI5LW_?-?Xz`zfoV@z|xKq>FTT zY9z=glyJ^P#ShPX!v`EUBgBGd%>+E}e+}TuFt`Ut938EM^%Jd>RX$r;4%9yiS0dE8?gLD?pL^)LDeGwq*(1kmQozIjWp|;gEhi!G%-A!G*ji! zDAs_Q2L-NJV1%Pt92~Mq9lvQGG?q3YQqZHqlgI@?)W{~wu^iy|OaMIDuj49yF(@8I z0Tmcnf23b~jSIZs6Uqk!P}C&kg~H{%#sd)2pir#i?cO9KkW>U93rQUuCtpbRF26P? zUw|CBz+h1PLUb`Uj-RoRjK{c`N@y+Vo>fL;qdAZ&P;r0Iya`K@q?xag-hHC?yc6lh z>M4~-6{LE_k+_KL<-jUY>V zmpQ8-I4kLZ%|SXbF3!OLRWSqn5d&sVgy18fm?US04TjO_08-Ky{6~O<5DJ7Rcw;b| zYm%UE!ZdXbA|evQB^-fbhO`$_@Dz@I({v>~SGNe`JEVx@cAdZ<_Y8ZF$So6G#$zlN z1`C?WE6oI##*}3HaRJY&qwds!qz$mAy|$oPv8R6anr`iDIx8!jt^9f?MGBBQe#QZu z{N~3>QrCVwzocvTissH&%7|CJ#xGiJUY#_)>NJB;q0lG_W!#>h3!4Oi;Wj$f!x$tL zU`}xYYpU0Xjvp}M3Jmnx(ypx#h)OYs|IC;e@fZjyQt=WdVrM#|n8wsQPYUm%s|aTS zQYjWCrA>zVO78XN&qA$O9L#S9Fikg({*AS)bK zb{>*G(*;<#qBYA$ul&FtyiT*;@KQmX5wRKNXlW5iNev9zTvZD~te+LJOZvy!lnbZ*Xtj4n7E@G<8W z3EmwaivfP@=PvE>o&zzgKLSFy|V*Rw>H(ip&8FfUF*cp`aOK zfJ}d8ixs8fy$46R0>kk!D@NH^hGSXxd%;D6?7#ax7Zq}H8?^-&L;+QsMDoRmLZ7tetFGXqywD0s9Wsmg(*HtY4_>T0{Y+OF^EZ}(El`$87?bj+?j zodQTISJ3%ECe$B+G5+9wZ=`5A6ukOa4lC}obzL7eo6Y@;=g;5$+OPlG+lPnU)&2dw z-tSAamBGN{6jyG}(mt`lV~uAP7!tsf((ZvvY&vgdW6B?IQi7ke1)aG*g$S`Av`Tis z*)R-X%%BlxV?Tae4%_feF^ZT)744WwBXPOEt&X;Z(i}9j7%7*(<6>{)^LWNR7l|It zD7z8VL;~gqMaDktgJzhhCHYO-3UWKQxOpX2-$Kk4yfB&f^4G)t;B%(yQ&!Z2n9Q4) z-R#_cP5{8sI*7C2;&`GP0WAP|ZXo)63|Eb_Yr-nySQV+VU`a&=NuB7{9O$?m85r+r z|K=w@{?Y5AELB}qs%aW^eRZXN?I%ArXQqWEj`tDm<%{P+q|CwA-eWaLT5FD7*PT?a zdZV=p;w}*7(gx)wF{WvAw<_edXD!sM&&GCN3hEeR>!Y2XFcge$gVeT<$J(ee^|(Vd z52(5zu!I31mD;r~MQAKop6wv*vtn@@dgJ$e&=`?r5fljY1HsRuEV zJsgh5R=j<8yOKq5Mf1*cx=5V`q`8Gn`uPeRv&A_75BtQwz!{fIM)x986(qxQ;KTO- zseEov*WC~Kp_uUNUp7D?>BkPY3}*_{()feV`AAkqiv}RSy|QTiHFQ3Z6?Golq#j&R|;r1EGrB?Pkqu_%Hsc-}>`k(TD8lZx3`Yb_{&J z+HRCMLy`J9#xp0^0bwz0mbSESw)`(`X-oSRwZq|%D@ifg{d4j+u6w1{+BhXCCcOqH zYZ-zMfH)KF8+|{Bd^4QJCPrH<;a!K!QA|v?Br6fcbjC>IK>7l4)lvjlhtXz($QNaj zpVE_H)m7a!P1UlJ6a&f~s9bhTMDp{W|Gdlm@MY;=+V|SZqKL`Xl<(y7_3PJ%{eFMy zyZ&U1W&#YWB!$lB^Tn8f2P#vmpI}<1e(NM215u9lgVUKqz-h*-7=2^Rkdc^8GeLb0 zlX!}Clvsd+iT^A}N-5o+TE>hSn;R7l2pWqeEbaR-+MXm(>Q@4inhAWyK@}4%K(_t; z|Gw`;Ch)+=zAptMV1!ZzloX``Q7M6q2TDmp8!&*%vd?Log+Wo;gfH62j13sbp&OI5;@N>I4|a z^M*)1Ld;lZ^WK*6%aRNU=^+-p3ef#`46Bu~)JTl0aC~Y(3p1iXYH7QoZA0%{DI@Am zi}KW4CL&82rVXmLH{F|Ed-^Z_ryZ`#0yni3*L8_6*A@Qyb%X!;AL49K@UQ*;r+?S~ zmp|R*UDWOmbY=d@8x3!F26iU{#|}l?BYgSh)NHrarjVA+>lt10m-L1&I)y({TD&rb zHWV-EioBwp^2)HWjhu0`B?g@QmBNAAc$xMYR^jnW|f@`RbWF~VjH zYC*n)2fUgT`Z6Fpn0Y=ISZXu{&FdvF&mZ0Rp62zQZmpudmKb!XSS^kjFc_#;Y9nM% z6GYn&A4kiD=*a6kHdT>LH)1)&o}Uo?@sce*RoYc_GCh{GFMaAo4Ty7XpBFj@nQi| zbRU1zY4e3LNSpbhNBn}0|AJrZ4bPn@G+j};v_O~4KIXgXky1=OVnH>Kryw%0jv0rc zzu0CdX~OtFaI~=lOJ?YdI5!Mt<`2yKAIvXbL1k`Ysic~(@lc4kc*P|HC)Q0I3OUfj ze}cJT49{Pll3BuN6nlnsde$$#)4GC=4V zdBQKMvb-mI{T_?rL4sV_dQT2;k+&?ttC$QO7~@+op>T|g#U{QyKvGopVX@!zL(=z6 zncicy@c8)g81RbW{=Ehg^U4RjF7VMH;vF?CM4;txI}e zLFXtngSwbWT{F1LFpU_;zZo3WU;s0Zy^H@Hj8S zEPwz2Qr7mARXu0f8pUgJ%1{``Xec<85`0M~0E|i~5V$~KKw<>uP@IIa9$QENM)fGq zGRmLJfPua&i&IlqhxK~3yT01q-P~NUN;&(o_q1_fb+>!i?f>fU|K-Cnng4Lx7{5u< z0Gf*_s;W9}*6ZE#=g)6{_S2ufefREdXF)i&pVC-1rlNVE7QF>Ea>}8SC^@9b;hiGP zO;csdc&yEVMDxee7wASYr`e#D7{_b6_k&FUCV;R|gfbTL4r@SIE3$o<8`8NRosyFS zNAZTXaN2{F#(ByCmoN|z#_Ba928GQjhm3#ADT&77csYPKUEzEviihl)hc$3~eC~7O zg6aYE5#2`2&MRgLfa2S84mg6@{FZ~2hj?aoMIchK9|#e}pm1+k5s~hPDhkzRYK zRksul%x%0q5+{FPq$Z1PTuccGGA{-%JdREDP{eXE4+@AGP|d7s z2MK6r;$X#}f61>x3uyN1wu$;5roh>h=3tx|&Ny$QujPPcoTrtLc--h`00c+V@7+ZY zctQ_&_P&M%S6g*1%UJ6AF#k4pjBp?+{Hk9A^lV@siv!hzUR{QQVz;CZ==oc%ncY? z@}aJ(+wFGq_BVg_8(;n2@BT}i?B+tkqu;l}sZK3xIerUAENy9@_XvXNeu} z`_n)DC$sL?-}*cMUZJ!KF=HmkzM|oI({*aAdexA>v+_z(GQ@Q~z#`VN9<8l8g>09g z!8kq<2P*)@qtgLCk`kHE*i7>jXE@flrb$a1-!1#PuG*$)PDN3~wopg@SO3R<@+azd ze&;vz(xaC4t7`x9U;4cuDx>30{)=CHdDL2+XzX>=w>$EgdQk`^z*f)}!q_ln~+a1dtS4#+!YOfdLiyn=5T}uawg5sTEo~(iyNK z0MO7^DVtRTUw!q}>^1)JkAM6*xHI2(t>q^Pl)B`+l~3fJ#;PBT1WXivxFDW7JipF~ z%C{0|5CBxPDp#eD6&=jL8QRI3w#se5=cwr{l^yor5;39TVkp$M#o7UC6Txi2%v|Q2 zEb{63&8hr&G{FJinEeqJd3IJ1v$@4NNg(R=|?(gv^S`hG!|ok3DB z?~nNGp@ZwAmfOyVhF0X#*mH5UO5yZRK#r(o2nsRo-k1m~Vf%_F>CIK%6PB%DcZ|rfU7--(`z*S>F846 z4BjcF#C1`a)vC~U23vA~$dEF?z+2kFx6^0FIex0s_gvSH_VT1m@$+5V{NkZ&?pn1v z^rkt{yjPdS_Sw3Axv9$+O+_;41cE}57W$!2{sh33am8bi8Y6fP@Ui$M4M?Owa1f9& z#k(8ePGOa!bZG%npey%S(`$C|=;ZhPbHG%hAP_~65( z@w6l#mbSE|Ep2H_duqYBNcrGVo@K<3IRAW+@W-6%o4oJadjDtL;4;K)u1GR5>*FLF z&VX-%G)I=I9|WeX+^vM7D9E8K*bkJM%kganiN6jz^QHIXHK_JEt3T9L-I`iID3G_t zm^Wz5ihg6?D}7D>Tyav+*QV9LtV3~S6~?fb)C14c9EnoNOsWLVIA>|GVJJo$Z%+Ka zSXpYq=$&7Zxv2(3#OjSAs~%3dFJcAVq*~pB1;7T2qgut37nAG$xbJWQCx;Kvsp|sA z7D{EHFSRUtRkUSamTg%v_(r;-bStpV&-t-u;*?g+$rXXx$J&^JK~hHZ-fq&+bGoN> zulkCMX_cbCTXHZl0HM$;8sGqU7}P@`yHI&R_ydP8Tznqgx2TgC7e2@fig74y6rnGM z`c^KYoTkOyZ4r6o3IoGdhQ5QOc07u14eRu0esS`G;llwI(mMp%JNXx4Xm0*w(AutYSf3B zz=8-&DlZ6hPW>p5t$^uox`6HlYavk#h-24>V6!G!XLZ7+o7Xf!?Y?D+; z0<(BJPHh)4M)Gr=ksK%@;@bpifIL3ul)DhK^=*u-j@(O9&>W{23`%|X*wpp0X&TWR$PBaNe@B?A>s z1=3-D*p7(Q)*|ULNGQ3Io|Nd6h50Dyoo7@=W1Bv=0b?@GDF>Y&2?@|3w<}_MacToj z=f`sjs53LWzdpuVU>;yjH}_EPg4^H4X<{i^TZ(<^W2`Ih|`I=&%0x7OP2F)*eE zFffdHVlnBf^J`zemRL}}XU|7D-tU+kfY;hfX=GDb2*8pMfDgZV<6ul)dX z-q*DD6p~RmS}!oFEYEtTkgJ1(C-{pT)Zn^-8AylGC8Yep03$Jp$Bf4mvctTwv98Ri$mx3 zXinn;B8$xqwpFmiaV{t^KX@lG{tI3NOol5=Y`&N`!3(82LJVmOPFZ!QV_+27Mf=sN z*_TDh@!Y+)jk+bSyR=WaQJ*zhvt9_>@yLDlK#zCiAFq0)YT8%EN{Dct$`t7UiNr{R z6MYG&#_{pFRd%eayrh&YW@1Kj1A8i(3j?7^Oeho26ULSd1N&Q`Y(A{7+O}&7c~tC> zmSwS_vv3ZykrJws+E0J_YoYF4h*{b3&mpAvoyK^c!ZpB4bsJ9&Bo*Y+LXemNQUN%1 zM}vk{ZmtUnta!5)AZb8|6-XW!a%E7ijiC+sG0 zd~)Cpn6S=1?VLbTupSo?3{yZ?z9bVZM!7R7;!^l51?crGAS%p= z6~D$Xq#5)z09}DS#COhOWE4W(8L^fM6q`Quo}z3`qqN!3VshQ~`o*bN_s7ocX#DJH zV|4D3_hQPn6YupCF{FZ#Tt*u&nn89oYz^{w z#Z{~1rbB3ykcEK|=93{RDn=XG-6;cRye|-q8#MlonMuzacxN&1u`sKMo_hs=9(&_4 zI3maR{6Ln+^8l%t5uX?=lM<-x@jO6kAPtV@2$2(X$A1$(uq$y;5F6fap?mx{uxY-r zULfW`l`>kJxjsfppSrT1A5HKyvKh>?rapT`16SQ@fo%y;O2E31xGIDx>cU_RWK?47 z#e8fRiN1aB$95Rs!jEtK1aCRaIKxG>IS3kxMj5>7Xlu2(K527xQs%1Dcths%&AvA` zby?h0g}f+WGzJU>7CtG46v_x8!uhOToFYm9QZTcs;#64) z#ACHwaT!ZqNHD<)jX)^z2zyC%0!Z*taFKm;a3U~ONgQq*#ADa5xBe`41;mKE7(IxL z^H780Wt@|W&IM6{2_`V;$$nV&1&D}nfrF zANrm-H{#s!V_>%|RkLbN^kW}%sooo7*&qx0iK^>*E~HiTQ<=x2rftKVA^tRjP0ZsQx)4nfRjm?9k z=u`}h`ih=2`Nuo&+dlX>kyKok!foAIg`FasFJNw5o&c&?aLIhszh zFcI7whFpj*g?HnOkj890+S!N375LgE!2+d3q+lg5TEbxzJp?fU%|50yGY2?|kBtGO z$$gkpXcFha`Dg7@I0Nh-7Lb&HF#o=}T1~>G43Rbl4A^fmQPIJa{48jeh1z#kk1B1k zP`PWGddI40>-GAM=ES#B7PmAPKCD*Fv99ZLAgTQE-1DVYQTNi_k1(0f1?x63?Dw7YY6TdZYn%`$WMed1)E%o}lzTc#iPh}he=Z+B}}_kCZ> z_5&+@otnBn@O(qI-;V4821*ruk0sdwWv^IqlGR*>!GrlYKgTC4?LJJksFaI8C&If4 z@Cvg)durboUXT!<1Yd=M=r!i%;iVeD*pPyKzHLssMJ87Jirkz!>Cv#DwAuU0p- zZoa3!z3JL+bv&K+$K!EJzRb2gpHAt1El40UW&kv)2pGXJn1W>dTER+TN+s;p zAm;SpJx;SHf3^yR?K;8REtF_QoWu?>lhiq=+0`>2Q~98o=BE)iBLhD{<}U>mriplm zT=(7T5^-kXSy&$gqWu8lF*2<4{>cDlM1B#x1=Hhg#%4(};95-p8F37d!o-wOQ?+xs z6F;F7mCQvr3HTPzND0*_rA9oa!LLD_n(QJFJnClIX@k~l@?XkB#enCE_EmMH{dHYc z9~v4{w6@BOdc7t$xaU54 z8fw*%YwYfir`Es`dhYH_)~9S8sHqZC97l~96v18bpW{g%;~+s2>Ldhn^YLSx8|q`O zM=_^_n#CCNfl>6sh7Z_jF)VaoNdv1w9~+LzvlCqCVngR#lTXyp7_XbADK54fKM&%( z8SquC3-Hk(&0}La@VxF`Bnz9q>hc*)0$xeV2G3LkNZoOgUvuXl_8YC$wkY+sC~&I_ zeW5k&wYumGY}*bl+MYMft6hNxn-Kp26GkJt1vsclq2vNGZd@4=p_yjGdN!j3($NA^ zV%!G{JswFWi;{$x&ti{NG(vGgm``CO|AC=yK?LnEFDg%?(5ZIN@DZ4l=Nl{3K0HSs zh~{83)YK2wYJkEOol!KI;)W*J%dR&&G7g8$;Eta31AWvzeLN??<@VsVQVHPRGnyQ| za@3%zHf#)v+Pdw+9~)GcZI3UH#=LBedetJlqZRfgf?Yh145mSw$g zDm6Yc)h3RR07rpLs`-#ODhZs-6)<5^+X!RtbEq|IIBA~>g77y?^J`hQO>0l>41@i| zl!>5+4##Jx=nShC)y09UgnBI60<;B4m2~P__g%qM&(P7Q9(~tP7#v7&ASw7TSA+9-5Y#G+v^VS1-AW$?$@^Laa(F$L-mGUv!O9h8?I@*uW7Dy zASnQ8(E6JfA0Y^|87jy?d!jrl<*kG{*fECUhzOS)vkFYb;~-I7n+8iMm9wX(26%eT z2srY|4JFpeBx$T7pecL`2&V#bRV$f@hGYzeH5X!WTTqe`tcfU)TOsiiE(|h5HcjHW zz=mgtLGYy;^gC9NDgip$P7IK$Xax7+ z`+kMSlw5>H->*9!k3DVtWe0$i!anLNiE7QfTHE_48AB3cUU-^1Uuhxg7$6mkxFJ!T zlWG``9fQ8m4Iq{VGR#cx)x8$kzBK}r;~VIn;SLxgyUW1@} zqybIWbC2p7m&To|9DcBy1x6{yxvm^Y%DKS-y9d61TBRxkRueFO>x~6rF*Be+!!d2U zw|@(&Q2{KagpvfLE+C}R%He^-bSIku5uBSF5mTD?RDBEs>wXqPmrk$}MYNcAPG3x! z0VW_NB8mMgC|V@bjJ*?vmDY+uJ*q3Kf(y~P!0^5(3i509JH;x=0Kcc7|3GiLRfBK4 zC`o-i3s#_@^kE%Vl7c~@ucbb8q``h@n&zS3ZrZkMJL-@}R(h$c>S5p1x70rmJxfcq zEwlg?^}UTkmtUGt(hFlCEB4NpfAyHzBN&IfZ5nw5a~k? zZpK6tBe@J;h>i(Vbyhv<9B3(5lJa{@)NOKH^ZE5iJMOM?%2t`l5{uO3v(4_Y1V~9W zL?NV?JW>ge0#TJP6a+=JK3}9C;+V>&X>b6CmWR5o?^mnU^>(}caCLS0p60}Np!E%d zMeDjc(BGy&Qh5(rz)_#cHBwoe^msU0#k#U8k2N;Of}Rh`j6w@L0CO%WL z!SsH$YVI#Cwzp+je4s^i#i~)e-R?wl6xVfMU$(DizCziR3aw6@Q=Kbl|4`o|}>sxZ{%@{9pl6j1;W3R&}AFDK*w*1sAJEUsaWQQI+tbDAZM<)r&)`UhGb) zKMW=Atf*1}>)ScXgtp>IMCgXw<#;V&W$8I+Sc#QgVa@eQcWWl+|FDQ2}qM8(p zf=!u*Wz+%@ft^@94vDNd=D|z>q{93&e36UDxkS2K4oPYEN!pzkASoL6kSP;6-QDwlh%mhQ^Ws8c({S`&syc4jbKft80vm zM)09Q2SOaEq{94^f&?U)ZiGgl{5?d_1db9d4jjcypUU+TP6lGiqKD5IHI=#unSvC~ zh+pEzR=ElFsN~rF4a7TPW>fjG14n5)^M+#&qQdO`A%u=n;|@SnIPu%`bjXdT*<&JgF5I^G1_*CiiYH$M}<;zE#RRU5YX8 z!H^epi(b<9Eg96mq`Un~x*^{hqh8T*(<{?xqgGnezSk?dwY*N&JjRV(Godnr`5d5m zaWq1}zXpIJ3TtCw!!D zrP9I$j74Cw@DoBopBi0DV01_ZCq@TfDkQp}1M^=bfS`SHE-(Q4VhK#??u2g6lw&|_!ORWyPk!`l8t2offJTR&i z+Y~#?BhqtATiVi=wzQ=!?db*M;88KL&jm30DU_s^_A~Yv#01R{+`cI63g^8aNM4+> zhNBcvO~ZDg#)-9+ih`_MD7=zX4wU*_1LC~v{ljeF@b`=DrhonF)uC&<`vD+jdD>g8 zi*C2u!G5=|$R=k6sVlEu^>iNyZpcbVWIpvh2 zz`4EXK~mw9AvUGNu~mdH2*ZA0_~c_7zd#pF8TLcii!WnfOS6TS1}Bxjrb~Gtdw5eS zO(MAlnY;rEw8jK*Aa|5u6kp1eyrhgv1``Oyqbdbrjph|um>84}9l1-0Xbgat-YH3) z>auFHO0$xb`i{==yQZ#x=i)?uTa=`}_y&Ra$KjbA3#SD=_N?y01s5Fm zSg)F0Speq|PEB3y*PGSD;c%#rhhxJ4snhWY$K#=<_iQ*GG8F8ZAteP4)S1s!nZiL- z#fSyqkSW3Cajx;i=VC|MI1LIDLOCm415zk*F64)2Cz?2{aY6x5Ws~>R@~wmtgq&xR z;^)!YbWTYsy{B@j0+>F|&n*jZ*XVc^FsM(cve0c+7JKTi_nY3x75e4={0v;Omwl`<{+sl zduB1PKaWG@_x^sv!@~m?cG22YpS0FTvI7oW@ZR>_PV1I)dWsb#>7Rn0H~nwiTmXWd zH}8~bACqE{!&_h?pERc?3KRQoK z$baj*p2Zfq)(bmkb*DMzL4&?&H`^8ctzw(^=H{jwOktxLG}X54#wU#-ki+?B55qGx zg4PzNRmc+j97HO$*}40Ap!iZ=&GGjvx4mE^rx>OZzf{R^z&`l6=P+xW>z{a#UBJP6JSi?4p`WCdoV^ z-*G6LM2QNIQMq8c4@DNT6wM zE9r}GXne2fpdr79X1%Jcs!Ed0WXd;R^g8hKebUpsld*H#bzokC{<(pqyo!fS1amz@ zRj8t=R6*mvprJ%R>*8{&FE2L5>!ya+WeKke4X+EWjyHSkt{=Gaff+X~N_qo5_BI=( z74v^;j3^<9Qj8*7VlW0_LIuCaZh?p@!Oz|wKZ^wg15&_Cv3QUay2;cR7hs6-2^B4r z$I&3MS$ZA9K_N)b0U>n8jTy0+V)%@}++@uoqe3l#i{aS0+-7QM}idQICKw}0)FXD+%PE>9g^9y&!EUbY6-hH*zU9BFMT42$_F&G*I9-)gTu1fGD{ z8>q(KTHcHW9GYwjlk8cAiuzztn`T(tbFfsL>tf6k6TrrhZLk*|z&MGx$}@9`=beHX zfD}&sJfR*1;#i@|K~q6Z%5mP=;4Y+7W4!5VtR`h4Nml+Tz>-j>Mo?Ci70G{v_8p*v z%ngjm1Jx*2JQFY*#tE?QJKXdIu!dA$0EE9V=&d0^NGWhbydxA7QpzX$Tco`FQm2;P85UToPC)oNcjS+JRcGi zpM0=DB-0>c!B3d{Aigmu?x9KUhvcX82xpLUq(vf%wKV~1870$ol5O8ji(IN>O#nU% zbqX_pr1Z&xq%<68W2MDwgYcax)NeXe|KA4jH}?j=Z)l8dcq^f6O4X5()~T3)$+vSs z(4{SHX-iw$(w6p&+8HJDGw=EIW&g)2NiAThKeF>O$d`)p?UCxx;}YRshhju>fYEP& zsd&6olG25yJ%gkqK#LXy8D~I{+sUR=BB>C zzu#yLFEpql7xmjR%osXOJpZX8&!q4V^J4XMJdTpk& zW1>+2@|>e@B#62_gS$VLuX8}D@CM_-Qkj@JS~V#%x?KY(3Ov<|U;{!Qa1IXNP{8i= z*tIL5qo28`&jF-3Ap|(=9gxbwsgys8>QO;Oir=4L2c#c7Lq8%O?A&xd0Hi>G(LBYu z4;atQu?TZT(&)heYE-Nu0)&9BkP@XjR%GH21)B&eCpJmh*996=DoHUw>e$rvL2GT5 zqzbLy)4RW;*Zl6yo7cZpDFgeGO#jl?DkT*PR7@Lbu>FP(Tl)D(edVy-Zgv{Bu-$CR z)9F~8PN%YMTe}?&d$r#^)VtlohTeBYr|O3Ij&_CAGCl;!8Wt5T;((OLo~A5l696RT zC4zWLNwAVU4`Q5IRr3CbZ}gxniFd%M6$(ZJ*L{`%N{NDEsU#SOdIW35*U0O}N#!QF zUL;^6H3l%rbLF8vSGk*2U-wmA&4Hwr zINndw62Sh4>l-fYvLLCdDi8E%_w;=CZQJeXP_g1<+x0fj$JH}KVT=y_D+l_G&?{cG zEh3}@fuS5QAo1wo1dtfpX-`PhbyP;YH@ccB;5=j`3K;PL!Y1^<(Fxnq3C%!a%gP+3 zab&1NK3wFkonoBI)QJ$|U66`Dkg=JMRueggi@Sgs`5m*l_k+Uia|B3DEu*%rS1X(1 zD2_*<_0gQV&f3~_RqtQFesh0y)vc)SGW#_X0O0R`U$OF(rZu#p*KdwT-ARts0D^a* zlL%}w!vKPW!&R&!5vUfxxgMy4!>Wdi-e2<{VpAf$d5ix+Em3%ybyBoyK9$$-7J0dVHm zz?7Ib5p_bGus0+SEs_@kjFp}eB>|OI5NDpz#BGF(B65rg#)-PCNuZPfNNMNi8Heb@ zk@D3DjWt$~3U>+_@NAW&7$kLhadG?R^{WprUcPuoulYc}%r$MdRaHIoUFVggFx!^) z6NW~YTd=gRwBP?gWr0M+4@Yu>3!rIfOFK7sq>>cZh0*d~F+opl3^rZYH>b9>ic%~h z;R=8?hQi-zb=)la57I`1)EQJlP8B~!XPCrgyf>A>tH1%Lyck2mnw5D-n3N_@C`oC3 zs>*V|TCMJ{UR>R7w_65DdE01Fl3Lo6ZkLyr?3{K(Uq$Y-T)%opeq=%8n%wJCMgADq zfSRGp>p`+OGN*^Qh^P}3mH05>U&7c?{3LF3Oan$Z=d`K`mYLKCbUr@x{eW#@F=}J_ zvhTZ^#`dPrdexOhL$9m)zAwo)@`4Cv;>&!k-Us|Np9b($Z^OCTk7ogr`uF~|ioZcO zy?{2OtnPpRkNb<;BksFi9ass7Ud9wfeNlAS^_^~eDBHfT+a6a%ZwtoJ02W5u_)WnC zEV1}o0)dps4M`=VAq4>#OTghL>uW!)2^Ewkl+2l@$Ojw{WD0R2q>{;kgL@HRih+U< zT1o?dUIAkqHjf{IuBJKwrR*GD4qz_9?~0T2-7xE$0Yz1s5EMlrVl!mg!Gmd0J{`^N4sT|l-HCe59?P!zZ~p3rI#I7Q_SC65dt6}W79*wdJNEDXj@Tyt`}F?Y@VHV-JUuf&bxuITXMCWwF_;U|SckEegd++ARa1 zZVve7p~W|cPJMIi)i?C=Z#sjo2GuCs(zv464O?qe6DmC*-bw#-y&4PvsuT|FgUth2 zDkOI0BR0b%civ&ufK**_CZ(210Q+5fTSv zymksu6r{(Yj4A;^;k2Gnp6UT{-=$`g9&ng4Srz_uARA0(ka(TG#|@{o@3)CTPCD@1 zPUoy?&!9&r0Z(m#r&EvZk(Pvqlj&$iJCQL}urcX3dN03!rG{=cfTRHSMFG3N|Bv4K zx&vGERMLn3#fuvM^4AyB&*?KT+dt4M_J7}Xu%mrT6IVZAcHw_(kI){iEHblw6izUW zzjH}T--`D19Wd;_-}f)~oq9uK_ggy6pBSb8jQ;j3!+=Oen;N?eXbT!PvMxf5#gOEP zDFex{nm|%a%%kToUpwx_W0fZHUKmlJD>Z8{L4G0!kWyKJS;kPC zCpHAsPl@$5Tp8e$50Dha$kou5*M-#-Fc!pwdnJ-yLZRB1iJ=hFvme8tVXnusASru1 z6pfV{cB~v#ae)NwYuXxsTYAeot>Jrm-S4Xce*0oo{L}yHZ-4v!goQ9-8(-SembSE| zE$!KdoA7Z_sITR=2qBRlW&3{0d*_OX&R^%!iJw4knJr0rv_G7v4Nb?pXXTkftrGak zN%*nBWB7UF!r`m@!+-q08#N^LJcAP#Gw+Yb1N`PUzqzQZ@~SM0S4E*8%0lnytrg;+ zKs3b0uo#k~2$OeU!~%gMaMY(vo4Ico$MNIO1S-@Sib7sYJV5|YzLc@k&ANS%%`4gK zAQ4$mp~!tmyh11*L_E_k1i6%mH@ki)0a8POGY+6A2a*au+d^KglfB0$jknZa&uKl! zp7)qEB)>0H&upZIe0Hv%c%!pD;R_;&REQ4tzuCJvNJQhsH?Oh`=m02jD`#h@a^ zNu`at9?_~ck~QyH0Wz)VrQUKS(NYo%fI`dznhAL)9;_coUpW?H~{O5a5aQ- zcLqtZ(nU-C=(OFekE_*+lhS}z3B=K9U+SMPrP>%U-fz#G~)-nRoL76aA2dGp3B z^W0C{e)-E^s;L40Z~ohVhl}7_E`(QtIrw-yZCH_~Z9C9KThMc=ho#U1FXciDWA!6Q z$G80VyahKY6y)qJ9NCUP!tKI*2w`SJNdUet%#$FD(YTfD$tnWMtu~v6kRMbqhfNx* z+Jw(PHx_dHNX|Vm*6+bzfKf^62m8%G_1K&{|IBs5s%aE$IXDCV>VN)UPwVxD`ZRK} zV(I;vwr%y@-F>}#cv$VL{RP`-MNv2vMpuIo(LRBl2n4WL5@DqA07%p+KNeS-R5?cc_$-$c`>F(# zhZzPyNad-FrbI=6;Gj%wpCs9l4ALjxWjCzj>+6>HKciPan(LP_xwL29Ztw1LVth-SO?JWxG;n=b(;L(D zy;YQ|$WE#T^H~q75wd^^UFl_RjLeRc+^(KFnv2C?Vo&FOs#H#wYOs*iV#)Z61o=C^ z(SoGt987G)N>UG(mzQ@}SC@;D)Y6_@oAa;#{r~Vk^SM5b2kl+oy*nIFSR0-#%6p4+J$T#%wGPkTS0Rmuq>oDf-^fJhv{& zVpY*RS5+m=dr+4}QNMfl-V?X?KZ9-YWV+B#Ww$*?l-aWfNqxeD^54{}e()r5=L~~E z6@i9NNk7#E%@XAhFl$k*=?yh+9q&=qR%ix&VA2Iw5ksH?V)Xzzeo6#{scPbyd76EmHZ!U8386k1*xa>n348454#4|E&)rEg0Z>tpMOhoQ zt$SEUMxNY&wC9$_QeQvJG%kjcnryE=|mrnIyr zj0J%LOXM`wOr<9z%DN+>VUS{aUf(H+w~P0S*Cpo`Ds(A}Ckk++H;CdGH}1Q58cS57 zqzl8Sj~z^5N&i;l2(FKvzIr(IEkK98ITcMwLtGgK7IC2%?+d4EwILU=D0rih-V_R6 z(m^qdanBko8HDjOKh^wa0Vx}fJG#yfU0*+(Om%2Y)%I91n6vE-ZT-5X@5|rW7+ljB zSwFP>i?@6ACl6im&B>@2J(>+dQ4QZ0JT3D8{}KtI1j>VAw#PKFkUU!XOd=PE8UcwB zGX=qD{GuDZu|I%Nkdu%r(RWd)u~2$sY?O&2P$?)0b089C`6Q`&sM02JV@#}2di{Bfk)P@2Gg<&M|GRNLTPfy)wK&BiujzzoWLLb`m*3uZT z`wG+pgQH5?$jNwsAt}!`*6(OzP!tzY?8Lg3Gtw>P$V*$=(w4Tguc3X_>m9(S0Htsm zi16so`r(h~$9(S6KC`nE)Qkidb5g7{E<{e07F1qnl?CNs4)nnS3&?EyOy9zM?Nc;B z&t%*Msr6p?rl7UnS53?@_r=?d2%V3`W^G_mJ%Nh}l)ypyk**^$F$_co2C*QiTT0 zo@uPw2bg}QBh?C+BNN8tv%{U303yi9f{>h^UKS*mU3BO!vM6?CS=`e8 zt}M#CvMiZ#{RTAro?ics)uiZW8cSt+W`2Oh4*0T7yI5a~JS?q3k5?eEzlipQv6x9# z?88v%%bQB_asyi-Z?fag8zBL2ck=+c(BkeH*S5>MYFfo^fT(eL%pP!Ru&y0Yg zIQLGLuqdIdH#0x0kzF+hrj&|NvEW6<6CoYMD&-(XoCaq`K*aPKG6!5moL7Yqsx?v| zPmRf(QssxJWP!>Oi|yUpn4eXk1TMlh>eFarm-ZPOJtuUw9p@wN-7|Wx zlr9g)6Fr|2=^1JIucm*Dc2bxOwhpo4WI*Pz?Q>l%U$fKWDZ&F#D688hd51IfkHQIi zP-S#xbE4Pm}G|JM^tvPw@E3>o|-CpS}P2YS`Rke9Qqru(p>`DxYE^}3^h08C`s_l+6I z4GVJP%Hi0QUZn`d8t28pMj%_RmxSrw>-YBkUP2c2v>yuDdf@xN@E0%;4LTuT#KbXz zlz~A_vvh~tQ%2u_X*x`Hml3C`T$EG*2S)!P^JOserxGx59GtT34kN<*qdtAwTT`FP z0~&ozNn(RQoL5qkO;NuhfTF`Q)QOJ)Fatl!a*8l9-z1o zmf#9a;xkOcBLk^}=DDFKAD;;xdRos9WK)u#QhuN{;63>+Z|QyC(Z*os_rM@2;9MP? z+?=wsr!_AwZD~s%|6JkaF$))*jYG$ZsiiIL*|-1qpZ*gzAh-^cm1H)wt$8!pUy*&V zB_DA`wnI%;OQB-CU&e4CDG5&EE{Pd$D`#UJ*EtE@G#dUx6L2x!BPgF?E)E|HSFPN` zKt{%OPb^=1VkKr4Mxc9g$GH+U)R8mjWKoh@+LLZg(^#;d>#sW>6Cyvk)P-inI0i(v zkW9}^(;fg&RGK3XF3;a5nT|YxoU1r^a|m>4GM< ziu0XyU9Y#>%?0^?m%sVVZ?%h|`OUuR%N^%<&i;YNy*V-89&5{;4fQ?MR(@em-j5s) zlHy6iM;5FcdkGAXQVt*ms-nN1dUbI!uxSz3Fys=nm6~*cBtS1-9T@nynmnitX-66A zi_3Ak-1(Ud9dL5=vDi9MvEct?I7J4e616D*wRKPt@-7EX1-~S_Q?Rho;IS-bhY^}p zF!3q*1TZon1$C(`v4|>^+za*|K!=y7&Q!Dr^|6NyU0TQI=@84h#G=%w%ThsIBWw!; ztKrR4vgJORam=>vGd@CZ1ba|aC5+;g+uLJg9E)SPjgG9s*)fmr7CtKx!D=;ErXW7@vD=Y6>U?NvvFrig;jN-BOfB1prj|U8Wud ziHZ~^ASsD$No=#)Y+6HGj~n{^VQ-TtwATmv`$Neas2weP43e@;6|}u`n+HkB;<3*$9+tMWr7dk~ zOMCK;^s?y50Ow&@!A>$@%eysGO{)wb>7)OMS; zv)i`oI>b&nHDwhG_*BX(k@!2A;(RPNO$+FzR6gd$-&uzITu4J+pMNJt>YFKABshBy z^H+?4QfYl*s?3d(n%ptCs6b_<@h~Fwzhqi}JQCiw|X0e5mW{eY0xb)9e1=KvK8#yB&?CIbxjrK7d-xfiG*R5)7?|>=jE}Yf&@K{=XhV#0+1@(Jx;_C&;ET<-9ded>PkK* zO#{XvSPH{;iE3CWP-+UyHd4OK?ma*TWSJpt1u+Suil7h>$~YcH%zB}j^6vHYC2 z0J_*64p>%4O=}ecq}KGjOi}nmoJZ}!NOf#(wB-xfBp7@GU;sG)Tqs#3z$KUW#Vq}x zGFEYzsmz-n`+i)WRrCx{l_B@772PgJNF2cvY6O&;2WLsOt~}3;*&c-Dn*1C*lYtY` zs=m>Msre$FxySfpX`Vl`m-o10nWnDEB|EmgF-N)&4=yRs#f%j*;M4rHrg>^bYwoHn zidEaS8R1Ialn4IMYc)UM)rL2s}0la~jY1ND&z&fr;5* z7XAjNMKTF!Q#vM6U05p9MSI7S7h_V@n~Wh_Cy)f^oPDIjW0vF>b6pcNB8$X3lup#V zf^Pn=B*6d^a)Gi0#-3YCa}6WOyTy6&+-?;GPt~9-*wWb8(b%}AwfsX-7Vl{8cuS}G zp3cu?hPU*-J9_ty_NSsK7m(DGi-avesiiILbB0ATJ7{T3`)b;Un`^Hk#WkP|khOpB7#(w=-`<{(M5;5W4y`)vJ8Yq|?_&%_W%?oX6**^OaD_{hwZ#os z6$Z4m{*Hh%ZlGs4SD)-yiMckf>J)=#=%Qpek$jG&+wyl1*$hL-F&4)`re#PbaMgNndCKN+9Y=T(yOMsTenC~E7rjX$Oj+_b&AIQD9LGPrGN>x>0R zZH!LyA`&8LmVjM>?AHn~Ja@gPiw`5KA798dBV*^|Wb)jL?FhHstK6D+OM;=!XVh!!o=g}OUdMdIMi@}MN zigL*{eMmh39C6cGkR$DBJkxjF9D3aDTHNkCvpsg^f;JxO7p-A+IDJL0zoJQKLuPU5 zl(S^UV~TV=`-Na{pP^q%O2tNZM@pHE0Y{kq4nYa!s$4~C>iXmwGh@v`mW;)yOg0)P zN&<9+@*#X=TxN62IFpHI#-XVBN^A;uE=={OL%j()Fe(_Y8fBHEc;fRaQe+CXC8!g1 z?-SWZ$4bMg((0r^?U;4Gu!>Q7UuwOh<2}9ao`(Gco%ey}())t;bXpE1+=@+tq?B5U zCYH9er7dk~Ur=;u>KB-nn=H*+DrZ=vemeCgJPUeSRHL5gxEf@{Ivfr!;An3W7Z$RzR3QcnU=qiHk>BS9P=_@p5PNWmjK^E~02_*)>4VG1Lq(ntnQ{vIaz;bkE%WbslNgN^r4 z3HEu+Za4~s;pF>gKDjJtoKlh+j>pBl`SVSP+(Tag9Hr7T=6DA(5nlxpwV0^H5&(cI z>5IWCvDL*;N&*N0smEs{U@(G~ZurH(X%&9<+W> z;N>aRh7@-CLmTL`LdPz&iEzM3Hy`?p# zzt=-C#vQ%)t|*J!uIX-1$98pU+Xhap{V4idF@$bZssaIGNz0>3f=mwJ<2LM+BGboW z;nNB(^S0F#SSp=QW$bSn;54Nmm8&jH0dVK9cbcU%1v<%MoTwDa4v+#I=Y(;3a&o6gz>o+D_$toLI7)?$ zZB0n+FVr5GFvxESn{7PKC&z1YJB2!riZ)z)4Te0l;D(SafmTHdd|ATd)TB~5qvsJ! z33qJ0USrqwE&X#~g50G@0j}>_d%^n) z4uJ)74O7DV4Jyl?v;i~U1tmeQP(GYgdXxz^6so)#U?{Ts32Pj(4vOiV5*A>UD|Q9e zbDRn6gqjt=nXwMjDLhb0P8w;}GgHeO&7GJM8R8h2p60t_f)^390SpQ0i|n2r0>{1! z1EXlYJ~GIe*75`G?-}Gw#~-S)dQY!;OK)|+J=f0Fa!0_IrS zm)f5{_VZ8C;77c68H-DMw(a48IgJw~sY`Yt$=y`vJDiSoi&r=bvF8d7Adsm!c$NlST$qRAZ`Cl?DSrC1G9wQ8*4FkU403Md2Hj1s{Dj z5wh?Wj)RUk)S277y#vY>^vj07c8{v)jA~9DZmUw&D^6fi2Pn$`KjjncRYZ6At#`kd zlkxF0PYfO;#o&px*sdlvR*TxUJzg9;wT)mYwJ{cW0X6mG3L^-kLIYelQ_y`V>E0CL zNlXR~A!mvb_gOMsVwPBd(Zi2{T*udG1r)8S6nZr&kw66q44T?4rr=?34j=_+9R5vB z0uP?`N;i|FPRy4uCRL6~6no6npD<1fVhhlkTwzK}fR$8}OTkiU;>cg^P;8K34aEmz zVD=jd>O4>?%s1#H@%ZYD7csIa!rItWAr>^x*R3(zLl0NXdJ(1&JFyQPs4UdFD{R+; zx&R*k{uY*EEL#SOnV`z&0805ys|b>k>To>9FOfGyR(-qdl~sScWI)uB#`vMbi+yWo zZ!Q=RX_Q{~2p$NzqWf2&zkBGRXDR?7Az3A4k~3B3OP-}alB{r&T@EBWDT`bX(mwOh!rYF}>}BT*4&fZgh_r=zMV8P@8KOfWae!rTAzn}a^c+zr0M#>9LoLfv zDoH7ST-WvP>gw{IzRC4=yD8hY(|zBWySsy`;c57Ei|ShAPDa`52jaO-AAg0x;#1%27EgSBHxCAclX(J~!f6 zwILx7$v%H>Z?C}k;!rd3z6g*CMbJLVi?N_HQxltArDNEO&!<^gr6*svV?}Y=w`Eyb zkQ6IOtycAS7Z=;#ee=zm-_vK_0r|#-)^}x9?O$A7wrZN4I45pfR+lermGUZf7QE!f zB!i=Z+MB~93|)cKwH;`60;7-A?Kt*-q$&2UEQ-6;YIVozWKG}Q^}V^>?e-n)4}1a( zkm@>BMUbKd`KnC#9g=kr#3T$;N<>+-PUY{NnTO98vcL!wpJN=RkJEJ^=8IIK$n+R8 z@F^9k6ks)_VmJkWnErV>$D3ahj^|MrMW>VPx~A9Mn7$vBqy{i)SC&QB)OB}xxiu3j z-!k9*Ozq8^H=Y4oDoK^VlgZtEbzkk)^sE>l#kQRp%sW@}hAOd2rp8dTqvjte&Wy*T z$X||;i6qq&l4k_?PF18v<9trRZfw7(M6CrNV*w1;u7f2J;2B9STTRYaxgDI|J{*JG zvpXyS>Jc_R1o6rC@Jdos04bQ*PuU?)MNR7bBu?$+i%WANCxv?Vq3b$Y8qJCB#GY)$ zqAE)!d03TNuRGe)TDp4J?JSUt7EYko7j!RKiYlSRmf)Dwj+TCBPGc|*9t0m?qF=`D z6j4hkRR@F4lM*t7W4m>FolG7jivETGYIH6X-2^cMN6E}y#>|ZAhwXo zQxeppGCx73J`rfaxhg)}kH$bs!D@(6rOgu*+d6<`2ElBdO@QNLV4KPT&aDl2W_6ZtgBFFBoLvYn!~1)M6hk?Mb$)7gthAiu)}qNtI30 zY|5hGaemNXV?i9pQ-~o2bUtq;**T^PK4*un@c%x2M9++Im~>TScn5=GhSQMi6<4ko z^cJf?O+L`Ns>&_>>5{_=?tQ(ucZ(nN5uXG;iA@P`LVE03*j441_TWTk_W&thSgsqb z+OndRvD8V@x#A&ZjP5(E=nd=Es807%lQE4}aa4=XpeJ?q>)&ivrUYvTh zJ#^~gWE6uYsQ;oK_E6Y8-EA0TT^LWIAxLd033D1iNVDaGm>IKqkWv(jWK_iAxwzn< z2S=d=NMT^@v=|p}`ht!?DIqJQ+@6)C&|y!~wBwhzrj5&6GWQ>F91_SuPfGkORHPvM zES01rND5PBDlxAjBr#%@6XZ#t6oh%n%}qKgOAV@3D(D}9tg9PXiqRuXXlgI$xS`2x zOXusErxk6l9ws#lmU4ip8Z7_`O8P8T9W;T@IZ=c2(x&he7*{z$s8lIPipTf{t+T6M z()GQh3w%N2;U#S^=$bEE#LE-yk3C){Y5jmm}}S2C6+mjPqB!tIKWiC%^pm?&Z~0)pngWJ*#G6 z`~CNCJN>(2AP?#LRoC^~wred&if#qRgFGiuK#oEDM--nqdKw`ol^3#lfE25)3IF!BK;6swO<3M4Z5O8WRveQQuz60y*@N!o0wCOnL#+ta^#cQR>G{s7GZC?W24F zNaeHWM~BeG1Xv&(qX0;W!e&shKwe8GdQd{aD`PYdO!4{ORQ9>)ScQokcdig4eP(|g zeQ1uYn1T4njFn`(B9(kUrnL8|7A*UtRUrbBz6)FI>V0U*?rr#chuMJ0M>WLG;OAJKs3sx27^6yz9(L|F;vio-u*TTQD;O#mkm$3RgBP>^7Sxl70a zQR6vfUU4eUFPJIYsYH1%s?~hl0g!WU1iZ@kcrqnHG?yVGYRq_&&&!`0=*-HR7j42H5Bt4PsnZo0OmpA{Efo2IUDy>9UG z@&dp8#T#7axIbR|^I!eSD@kz?7v~o(3%D-I;{Au~b$xetQI_R_3*q_MSpcUBLNAcm z*m4emu^s&}%Byrf$UzXfk>(6{F|Xu%IZi&6T^vH57iatm7gZO?lQ=vfhyYDkad2*& zr{gu9n-Z0rgeXAF-!KN0x|iA{m(d`tv{IXVhNb-1fWO z;dDHmfR%62^u?;F*JYts#;jMgX0GTqt;_4uuD#^jl&8~)^WfH%8=4*~E27LCrD{oY z&o)sYQo_ZU@>~4$?5FKON+9NIl(dx-cIzZhLpI%7+c;RlQG(Wefa~uW5O?hBU9Yx2f25`hk-QKcMR3lKfsn3K9>6W{rz1>o{Fcsj4H?%6Zqmy6H2iu7raNOmyPA!t z+=NPsvVoZ85+B6-O+kSxXBy3Ni&42NIbTY-i)Z76n4-rLdq9gg2Z+6_v_KIYVc?`y zl5$|@%ZrQq&31iD^98Np@S!Nm_hnVR1+BlQb^DzIJNNAKG)>e0;uqhT37=(YOIzB~ ze(LrA$wl5l<@8NUTiWw(r}pIQL%ovJ0FpwyB6~n8Nfn`N$iuNx}Ff&@FDuL-a(pO z(tXVIhvw8LyzfyyNx03~=ML`h5I#o7PmdfxXB-I4Z@M90XmIP+l66UTL`1_go%k$7 zE0j7=U^BeYgsBK;5@b&(Y6Yl8Tm(q*8lgdwn`{6OaUUwG9s%HJQZN=EWFa4l%gl`m zY_z)U)#=o$?$lv_Y)!vAq5(3pY!sIZ(NWPJdX1WXPsS?U*wMT^zt@$dgt-&WKc zn-XzVDZC&P_Qe{pNtNNDeiRq#Ufe|^`%A8vcN-W&aJLO8St|J^_MgZ>x)^z!0j zRbJH09H-&30JvavnRk2j`t}5`cP+d=cJTVt!|T@ItJc7_Gq~w`+V@DuiuMEWivx|u z1?Bu+yJCCo*t7Hqet_Zm$#pd2O&riC0Fgo0uX-~IQn62>gJ}4GZ4ApMg(*e0u>;u%Fcpf3IYP=0E;YzTAhl2s zG|W5P4nb0#GZZ3ee@0()WdGCRXNN}N-|<^%c{sJkv?rtcV~=Ln zA=0?fhAxQ444mlFXdIgV@vo2l-}{GmT|)zRU4hxw8ecTpyxC~{d;jP?HspG)Xxo$u zU(h)JcI7f(7?sfY_^?MT=eSH<4d~M7xcGF8rzo)mg-n#%COQDu0lc}%%MJkWW;^P^jKxo)t<== z(wu#w=_HelE>RMS56*H>%FI$Mrb5T2HUN*njd9GJYD@@QadL&57zs&+D$^GnvE_-P zHXMzp9QhX+1+(v)qw1185{y-?lEEs0Q4$!4V&?UTUof-=Avs2-Bip58fE4dbP`gUw zeXZf1`sc&CRQH=o(J|aK6QO~?LlT!^WZ094`Gx+Nb}Ku5sxoI?VC z@vA>Y5ep6bzxnG|M-5<$^LxvcU|Uxd1wQQd#r|-pj>l7TYTH%SwHvw(J>9#K*(HrK zC9=L2m{NJ2A{p$PV$jcHKj9gV)ill*+|N&|UR_*nDpNK}_W0?A{g3%u4t~PyKN%O6 zbbJVEQawb*OJi(eLT6#-rb__892nr-lY%p2^L%~o@a{BGma4?W!pitn-F0$NEFhwq z3!%eEDIFobH}bWz2->sWLpXMP z9$qHJGhCm=NL!#D^&_-^P%f&bHJ`ztbyYt!b$!<~&CO=BzP`M;xTZI}r|ko6+%NCC z_H_U6{GESmr=}|Fms-LWgAHuZ?8Kl})p{tjg*|QIxx)ERVESwbUIt zR@p(rMI(+t4l(65{LD`?L1M`+JF{MUB1FJ6ral_C9t{2no5orcm?xJah|+^I+&Fc= zf;0Bd>{l4gvsoFBP{+X;yDLkWi^XQ(1cZDBz>nPiCu4)0nQtej!GGbO|Fxlw4<&;; z{wM$Gf6=V!=2q1W7ke@z1Q&kRZQGf{;b5r$1KDz#=D3mzKZkL_NiJ*oX}5&xq=(7% zJmia0w-T}?7cwAm{vFX3n1To1j8%-&z76NsZn-97Le88>7D`7L^8T9Vv66n8^fp1x zprZfbfx!S0sSr~vcY-&LvDjHld+uHLr|ri--r8Bl z*wVhp_VDoFm82SHGhfp7;&eJOP+*H(7d8;=mq}4qkR6p$GMU{tw(24U8AQ3%^NZS< zsR16dAm%`IOx!4e?BI$3AVJ^0G8}l;EqNL;VBE!ZSxM@MX!do}?B0CyhJEUj+dPC)eGw1)R*BveZL(@62S3?xC&j2c_xXHQc0z}q!v84*y4{` z>_7E=`M3`koMH|*22jy=Z-9jb5VyzU;cB-lS`kCrs|6(WDNhPNPA?1RLt;JQ-kr>4 zvFlszN=;!7%x|Ozkd*BXnz{$wr!8=@Q|r1cin^}LdQ1NlL*J8%Q+}IkRld=LU;Xvl z(?wC>1!!}jbwPX0NZE_y5ni+>c+qusyXt$pZTGEOpL(_Gjj9bVAVC=loFbF9&lOOj zPJGmI5_ywBri5}=GGRLPA2n8E$-xDq^FrWT6n4BG7H+Qy764c7vn&F}$P)}e7`Bp~ z5)nQ&$@`?k5I7t8#01PSp`bIzKPPTy7y#OjPmJIw3785SCJI!jQe`PbP&q)#Cy*gh z9%l}a8u8-U?`|TnGq4{Ta8r$Y&RV;EKq7oSF)wCyFK;hvhB^41w$DNhT`LkU2C?--mE)gYWhm$ zFk>sngl595ufly-BVZI}{C1e9$4A_Xm;j?DN#4UK#`wI&$sjKqbBJ2N1U8xVU;HG~ zO|B0(!`KH|EdWARayLdX&B{jQ$l&egPyLJRFCyvRQEM?QNme zu>d&Hb}TjT@mLjbSJTGIQPg+u=&iTFwh<^kzN1s#!(c1%cCUs+{6Pc_mVm+1mbSE| zEp2H_Q*$2S(w6opdpuEVh`t}dbZ5dO9&bkBS)A*Gf`B1x+HLC7+3TNlXdv#Vk|oO z-3d|D7>9FCA6cHqFwv5~E1n%b#CZ{?g}N|zmIwcGTx>m{A{FxMP8gO z6zP zM!X7Gwnk0GQhn9;6yXzQBvUX}%NyRRRiQN(Qg>BRw6t$oJ&G3lwyA3df_`97D9v*p z=zB3Rins51Pp7=&!U`@TQgf60S2D*v;i_cBdmwwR z?0Zw#%?x`4gv5jxJz1Q7!XWH2FDMGrYM`m=<0F8DC_o@Wu{5TZWq^MQDVW+#ZZ5+F zObXF9BD=_v59d~fn25w55fDGN{s~fsUcgsO_Thm z@0`Eit4C!km$tM=dqE4ZWC`fcy$*loYgoX9OIzARd;9j?q>|L#-QDHkaM-k6x9+)4 zjC{yYZ3i$FPy(kWql5(vE*=r_>OqhfQ#R>o$0e!Q%40XuXHz0d6!G#OC%ca|ynw96 ztFzSn|E$zt~V?$oT;~CgLw$=Kl(`iTB zq3^mA`NY(xv4>EG4S`I;@p-5;*g6_lXV=%y6X?(Ke!;axmw$uFxDUV?=oVPbXO6Eo zUEgnL&b$2Z{=Yd%M}PafRwtTMRZ*j z&=iGQ6$Pxc+xH!=x*nRor(=aHW5F`2vh*%sAw>cb@Lb{zDAX<@6Das(y$3vK0kLp1 zl7T@9-T0uEkRVTGV^T%^mA0z(PZVSizli7C4Tu_I;U=Rg858CT3UD*Y8a6!xB77gtU z?dkYrRMYmV>5QuAR#;ia;XCR9_%>)pS(c~p1W+o>2#+-*evPS32ml0>Ac&E2l|Aez2Zb7VR--jFUuFWiIpZ#zFBHl(rXc}8 zc>p91y(AzH<1uC+DHMuQVGdC$RhCk|NCpH@1%e1vm?Kg!`v{wy#+}`>H6GySz$gA% z+-G6(nJR?aL8>Ux6k&R-JEb=VpizUIP@}Peux^dIXj!qTQ}+kjT7&m=&3l7zpn2gy z=c3JyZ4WAG#oixJYIMDlQQ;+LIH|%M=t`A0STg0tZfg?w^N7&NZLz~$egB$wX zEp1_c>Uw&wSuse;peZa=%zj@JXJ9m$iWkllm1T}7%&9a%79sOqwGeX4W@pH5K^W~A z*O~y30s-EVeM+e$VLR_K=OEkjN!GN}K~~6sNdi<&luU{ARMOW*w^ezzKB==M6Q}8% z3BJfPnIC<3M-LH1X3?Yve=0#8X#Lq&TJ3Av*BbVPRyU=Fo2pPZ742z_x}h`D#`cgc z40~|)(#|;%UV8ep1Pzw9w52U=X-iw$w0EM^e3{9#%>_UD6Q_UrDL?n~-K|f#_NV`y ze)#v#&F?H{7b69bDn#L#_*QD`Lk3bAYR3Sy9?IO&7#MrS&vqbsFGGT)wEG={J`@9_ znx@9pYSokPc+$E!oKB~!_S9asZF|vn-K7;k2PlRjH7?M%u}+PS=svhaj&LOBhxgBq zolyVs1f&Mz`p;Mt8@+UuczrWP?b!%G%wkGOAA^xX(X=uVPSmKTz`ju&F~n3=>bpty z_XH#$-Efovu2@<-nX|%JaEOqpFr?0DmSjRyQ_4+<y+1Ar{ZJOg4R!tNswzL!b$Q)1)isTU z@9Vn$zN)KtbbL#%W#88_Fe(9*VxZL{rn9B!@Jnj}V4Fs>KqtcWD(KgbsfBDSilTFI z6$VLFHZo^St3jW*pB81I7`bZdx}kHf7}U`9#sVFb!P2-ePdYD5$UDfo8ux?I ztdJH9;~Dv)Dc}N*f5yqT$4Yb**B&P!e5?rI2+(v+`RQY-P;=m@kz&Z?Brv7+p&}_2 zSpiA`r(3Vt0G0$ z!{CEETC4YEY5VC9lX_g*Pi@;tkd&Q5y1r+W`#n9wdwTBobzN~B_krxVy6yT3x~^2T z=dMq-oc16fp8*CGu%OVBQB7MNDgmfZjjs{tAOj2mjK@@3?h>I+Be3LTQIDLaUG5+w z?*uvLNviZ5s}any06>|{8l*<<)O;uC0EMvvYHS}%PD04DYKV{VOU?nc@d@{3m^sdy z1WEZ?4hD=ISPj$yq|oejp${~-9on|rbRBQ~rt7;+-x;7WQaQjXNT^@pHwdgzDA){L z&gHcrT_f`~0ne_Tq98QyxA~eehTABwd*;-1JzbuX>Tr;#)usTcu*~{|dIVj?D{)D? z;4eeI!YRQk8UKTkpO!sM7_VyqG&m|K0AZ8hP=Pyw1gG~75_zeyqT|Qrs1ZOa1HaAM z6En7h?9q~BBcD-WG?zwjJNYXe`75k=%WE>P-+QtTcP0BSRdroA)d%|92R(qDb45rV zFWpDh>}4Ha+NVVtmmc$VJm()ZM|?Ky($d?OwzMD6uCK49l2iamJv=;I9goK?lc5=7 z`0quuN#|S)ZvlIOrg3lwl)^k76m&9SE`IDH;}fc^AB)>ah10Pe?WJG`kQCV1FZ5(j z^<`n>z^9@pIOcXjqfX>69FK>iRFLwuPRsnWw5PjHYYUR{HRJ~$4>}wV79`bm?a`RN zrC!{_;Q0Fd3?zIXpwx6ACQ}{%6zRt4u(a@RB;g-q9;T1A=Kv|;<-(XeFf$tL{KtxU zeb;Yj&bz$5y*tp}1Ijo?=0Q?RTbgZOBS=aX2l$EPfd=j^Z5z2&rP?-?+SV0p>G+~7 zU|Z7f4kx%cwCdsnV9Zd^UUwEr!V`!ApZk4|kPDC!8ImN^gACN59uwv?w;91o6o5}Y zZ|Cye7!;(2i4HIe3PWTm@_~0uqFP=VMkq@8;GSeK5^5uWS*+EaTq;QU z^KEXa*{VS*^}=oTm@Z*^Fy`W9^!7x@EvkzSaSOd#(F0n80oucgu4qmBrZ=jx04c)@ z2Ne8JJRB+jr1;8VK+1q+ZbG+Ds+82oR(8wXz@Z7@*!PdX@Z^&o_O=_9fDX|2J5Umo z7sA-u0b!I=m~l*=QGrMRpv3Y!^sAsNmQ&(ByOy#^Vw!}W7W^6~sb>LFj`dQ4r8G_+ z2c@~7BBcZ{3llY|;YbJ9NGeM)%rsR56W1h-t*i`XY`j5|pu&MOJ2Lx)#DEIdHxw9I zlHdVG#jLH0K~lImvLcm%lE%!MUVKUWmtC*+wBNVAI?y_>r*8N_XW;D~2cyexhip5# zy@aBLJ)MU^P8U6mWQOmCzNy_CHU`P* zWp)t9q*n@7TQ|+aC=r24CmE&pQY2D>W`akFtVS?yGTDduASm@oW^lxy^yQN=00K-T zk`DkQ^Lk{drzSK^4J7dvm=5;|V3YrFl#G!Q0z3lur3IR{)DW||&#y<20i(p3BX3Ip zVR@=a3zV`mBr6(%YD*1=TEng>;eK7}hjj%E>iba8Mt{F9wfaD>VfI)qkT}x*Nas1w z#(UpiCBV-g))6CAWQs7$iPwl-NH!(5o4TW`G;6{q{{Mb zU6Z_)4VOp!WJE_R;v$9)7)}VqVs#bf|MB;N%d?fZej}l+7j#|!HqBJ2BiF5 zaXuT?5YAV7W=otCK~!Z~c?Qcn_p6pB)x8Ds6$JxVi>j_0I?ZWbG3c8;9USFxM z2Ue1TDFP`$0tT3m{bW+i_GWCZ;l#3=d`=4X9HpnnlIbHwq)`@OPQ`CxN%P-rZY`0; zG`VueV*paQ(#NN&NKHmVL4ZD{I+a*^&QT3y%6Um#8z9!bYntn-syH=uy<4wV51Y;U ze!JN`&>Z--EQ)t!Me|=(zokCLYG&_PL7w*aTEhc_rO7tvKEXm=3WyKok~2k;@+}7Q|`colH>=KXOAsg5KTVL7Gq^ zw*r}6`jM&TO94NeOIz}M#5sw36>B-TfA7g&<2>A+`u(9@tq)z_lM8craX1{f7_^5I z6JVSw=shzm1WrZ??MVT{>DmQKsGTJy=_7$sQ_8t21xt-V!DI6hM-qeL9T*1ygvwM2 zk_zJv65C*=uPTvEgUpTy{x{|-y3ywz-JeJB<^aZpEoe!ZV9!9fwxGl&l&p|kgL1srz*a5+ugRUscL*3L5tERbMt(tpU&u?j+X2sk0 zWG=p^b^blsjvgF!?`+F^XJ>ZHy1lf|;Q5@@x?9>;ZI9+ZW-ido0{Fut;){CJ(w6p- zc6WD|D@k$P=!@O%aCtnP97u}m#K<2^YI!Bud=w-|qfUtV>Hwm^2@~%KM}}+;C7&dq znb6Dik?F_ICN>^yRiw1h79dr0WohHUEpOy6vy#;D)SgZc4-b+I+HZ@Ew6rJNih;DP z6Y(JX^X_4{JCXy;;6I@xHRzl85OD}$Pl6)hT=0Hc*$GoI6D8{XVJgR#MsgqvX-FVd z>YJW7KQWXjJ0&TDjWPX(hWG{D7ar;!AjN_s3rK2dN&DJBQbtT3KFIb!1E`^`romQg z+6t|jLPNc&;qqbyFIJ7ZYD#!fm2g!S>cwqg3y6C|!>&ce!DiDND7==EBW!)9x-?Oi z#jAKE){n1Ha6lKY5Ck45k;Zm|f`}(m1Q$q4YwH8+qQJmNRT-5cBv2sS14*M9&y3+T zQCdVQN(F$F@n9-%4l;ZLEm%YHhJgUaTTC3F1<18zI`syNR+;7mW($D5wjd+B0-=Q) zfySzU?MmrN6;Realm=Nviu;Y7p!eBUsu`vvYFD(q%r|3!6zby8W|x?aML_{r36c93K{AYCRFm=wQZCqyK7j^jD%HZNrr6Wt+yNeJ zmnrt-!5kQBV7MRR;1kinH4Kun-?pTAo)x4{be*j+aN>e88h0)Io_?ojIZ$_W-W$ct zU4B1oQjy1R5*(FmbagN2Tvv3i7xWod2GQ<29@8CS5 zc$RFbBqjUnqaa$-DzYFGVk3n+H3^jR>r6=gpa43F^G9G)LL!@ef7vf-GbRQA-26v9 z(J3xYNMJ(*CcB|%hK*sQrY`_a43cUJc-U0M-L}%VS50wCFMda_dk=Px?=|4NLgV`a zRL8(7-so>q;m`*Qx+Q3^w52U=X-iw$(q{dW^MzgW;PfBs6X7FX_K^VVAI+b`ANTgp zcQ1eDmK8U_M1BlLN(;dTcbwW|(-;$>i%~(Pb2)>Q)WAy?C4;^M6)Au0m84{GZCRF8 zx7}RO&&6r8*&f=id+59V?&kLP;pXP%^l<;cppb&j#EFEBw+WmlUqgy%g>#ysR_ z0!)v0oQP!voFJ%#Bm?AN!KfyMIVd5SiZK+?Ln!$SV+A9F%7j-93Mc8{lj=%IerO z^=`9S-(6i^-n@MI;)c%iz2;5B+d>!L)13G{z5Fe`^GLs=tvFI2JYHOE`%kbwm-h48 zcfB~~`J~+K4;9&HtO9mV&(A7K^?mg~^UBIDvovRzo|dq_FKst9n5n?QtP7We0U(Q) zp94n)s1Z&Ty5wrHa3;H$cDZ4gK?X2N)MIes8ZjvDm`n)|66!I_>FEI=sK(%m{2Fn_ zmCN&42_S5Ox=r9e9NyM*pq-_7hi7bu%;miE>-I`g9w24a80)&;UntnGR_lW?=D=!F z)VB}UH#f)I+q=%r(*~=)>((74B-I0YA0+MxN6*lkr07Ze0_kG5lFf|)m7OT_LH>XC z-X&I+Y)ubZ|5_1|kNr50I=AlazVF-JYPZ|CEn5gAKuAVd2H8eJh#8V4+yhHy#snm= zm;nPM#K;1Kgak4$K}-M%1{OkqEX=?XblXzjZujlmRdt^GnVIoeKmUqYkAFQP_dciU zo;v$ftg1K>nL9HwGh#jd*Y|x+c@U4@CQR2tz@!&>e}Pl=0S8I3vFlw0&pJD6@Brk> zh!_b`&c%CDRl3qmhD=Q>GCWW-wOLI=HmtMK$#$C8N$KkZoEvj29D)TXKi<=PE!q_j z1IdCaL!|i|sgywxRE4Y-Si1sWFJel;xBS}znp)b@l6G@*!S5?&POfBxEofDmK$zne&Z5mVGu;1&7dd9 zvHIR8^6{fBJl)jd2|a$QB+w@Ie|!nUfCh35&0dSG9zgdad7vAj+6vi$fCsalqGjVl zf6Rh39t;&(H|{yM_aQlq1!Xv?s!uF3F%n?l8E^D3wxgIR$!7PR)#DkHbTX0JfFl#W zLH4~S1u3-0_Kz|KaEzb{2=_4rG_8^piiX}t{RUS8^n(T2q6}aF3c#sf|NNn|7pL_*{_7t*W&@mG|X?Vf`$ns<-k&9;DIf+ z839rd%uyeR79rxvGS7i9*pq=3Km;82ro%C45#C`JV<>VvK8yw!nHJ142qR+dhozvX z!tn%@5&2fgb~;(2uT&wT*o!`p;(Q$pnCz40ZqE$mzqSrxuRRgMb+r z4;);}0;E6;<_pu15e@b8nx0+v0J_!Vr<)kP=|p#$In`82NWsNV# z=CxwuTZkAOrP%%vo&PC)&{Nt!q0fFw+v7otQv;Oh^f<;xKxAt`Qqz}^Ml=Y=#Gg4P zUvM1Y;|P>s>w9F%8t0Q5QgFvOFQb4GCM~X=5}a`k{rxyzZI~($GO!d=s)MY*qX**o z%odbw`m993F|E!IsJIxdWYQ-|U!MrpE*~qS_Xp-ajIN>Q!?yp704Y7~rx{sJ;|(E* zO0g@7(Fb5y%twF}n)*dk(b&~wyR0g>-PZEz(Yk)~?4%x(E7OK{ub+eHUskkI)Jp1A zC3T}fD;Wg{Fgae@(w4Tgr7dk~OA~RC5zcaJ$M)Y{fz!A6e7sK}7MRV5AJoOecfJ;1 z{k440j%@r0Qj2a)TB^+Mq5Yks)oUN8l!I+WTcSA zBQOm!KoP)KEJTM{5EG8Bje|a=WEv3SI=-M4xu51@12XAxu3D1C01Jvz{A&b75=X-) zl7IxpK?;CeDZm4qAmoRcegVH`;nJ0q7fLrg0X?v}7$%i_rqu0}h%H#jNm0eJ@yLpq zb7TXPx{7N;<1dH)C&xvV7^znWtQPS81j!l`tCXRQBjYrdB~BnIiAF)Hsw%Vfw6(N# z1_)Kx)x~d>11H?eaoo6C;g)Qe&JQi|feuzxSig9*#;FwAF3f*3X~6RCRMOYVcd? zZ>RN8KOXe3gE4w1U@)=0L01NynuLRrLJ?oi!s1Nm!NQnC_~JQ_9eb&bJl@YFA0l=1 zvI12O!t${&%x5kTAx`Iyqs(`?9<^o9}%3sUb&tPOtcaUiJmO_6z#VFLd91 zp_J4#ziE2?aDINKfAE9vX;ChdSmJ+Q|2#V|yKcx#o=y(|H zsvL^w?K5P`Io@V*J}Mv6Cns!lSfEG^=M zWEe=Q>@a;iWAd)w;6PGt-5hggj~_oaYEpYzCuxJ-_!@vc`7owL!vF*8zUwz_+nouY zf&pe)2U6Dnvf)ZW&K?ya44Pu#p$rzZuYm$UN-$N`LQfaD%1~l2dr||hl8cPhKa5cb zlzo9*l$Da|&=vEVQ^;ul{aDCUqLSaiV?iZu2#-!n3tWYzR+z4%9ti|B?T0CAX$-E> zE(cq)fPsu7yJ_*eT&!|k0!pPelckXbFh0wk{FR%tv$Kn5 z&z`;d*0WDc?(R8#7uxi5vJ+o)UHkmz=JJgQq=s57Y6wevUxB-&Eq6Eb6A-@QZi08X zTT5Ho(rnw^?wpd8Av!ZkQX@#J?fNtF71#YRtjK<7P;ezF8{6gtLYji5j!$D_T-d~e2OAIW>#FLck|RLMY+7Tx-|r8< z|IYVD^U{{~;kCz)&RrZxi>aV<%<;W=@$%;O_IBTP?ScG*jt0#j$EY+;1)>{03Q|APsb+b=>gZ{hz!2zDHU#MN2Q~X|VQYf7JqBwjhi+ z;|NOzP?{dfu`C*=P>uOtjGVL>|rOvBczt>tGawYCJITczys1wk;f?ISe~y@ zO(ez>+0le4acrjkcuYrB1N7}6hPD^DYf)V9HFP~}l*F4#it9?@=5>Xe=d{U*I>{_$ zV;aLITg4Yw_{p1{cy`l@XZuck>(Gm5-2l%zjoW?@n|`4EcsywT}Q>kg^|jmsKvLt}iilDNK*V*TPXuKKFqO!P6^>U}5 zUbXmi*WuHBFP^poKB0NzNo&;NCdD`!)BONp~xLm1kNU#j@O(tgm;p(03&9QXXiXp(3%c`7%0U#ZgWFDWwfD{(uIpDlZaO6676(fGY^AjBFAekz}|N3|So?{qt;3<SE%XbR6N)>?#(MH=-0<;ju4d#qLb0B~<}Q-y6h)Qy3}}k)Gq0<|yJi&( zv6_2zZ=Cn0%l>?t#AXCOfjR?#dtExJf zt*&cx{Hkf1m#g*q#p&tk%WwbEcV7HU|MFjcVaGr(f`W4Y|h(@ps4$+N!@cT{gOsbJeNl>48FE8-XjnCg**#lL1aepab))!RpenJGcsyIO;ca4 zR;xGb_4?K6$;pc+PaYe^s2B9!muO;u6QS4ZzxnU}n-`^?{SLOq(tg41FZ~OD-t2Vx z{D1ym{#Sdl-ENH{B5jw{Wz0P__gA%T*Hu-gRi!{GNq?4&h*eV=CnQEoHNM5Dcesfh zJAsj69Tdn6TuR>MivzhBM3m&I88J6IG4Y&)K%(2D=MR zY*@&j$EhiVl#%^o7=!Qd{T*pr&!7BT|Ji@djcL!SeS32YUNun3Is+4lis@NR>{_$yM zzx@5*ChUT9F(-gRZD8`dF!Xy*>;djxjnCcB zZKuCwXg}pqIX3@7S>o@GlVhj};ekRF_fXq@yWa#-&i<`hrj^;t>J+EOVgWJ9umKCa znZ@*$p17oYXA%rw&~&h4(G(hQ?_5v%q=@ohbA2-UucBewZ7z;Dh zQIh0&cD*Q=mrJok3_PLieR{8)bh64t12|+8D+(-_ICzwnR|Bt%cjh5ZkSOFbI6lm%Q78kRIP_w3bI@n> z*FlRO3~++}yrWm{=v=on`H#n7txv03>a!R74j;F@c-#%}q}SpxZRc8x?Y@U~I~XZJ zY|yYwj*3xeBNNoOjVnd&C0^^#2H=#wDM@y2zCx1ygD8d^z(YAh zEiy&2N%`xp2pfn*3y}RLKAiFH=on+(Lsle*AmXApjC2@1)D&FFASU5P6oTibr>-t7 zV3EtadCf0Fr9Ma)WpjIJkA-1Q)M7_(95*nJX|E()Qg6Pb)#-vxWo#CcKrxB)H+Fwx zZJj}sD`%Frw52U=X-oUYw^Rf&Yyo)gVC&x3;>;kK&NLj2?>r>T3(h()$Caeqd*7Kd z221C>`!l1Leccxsze2#=g9A!sm0XxdZCC{*i&n%jE<7Fz^R5iWkq{QkaiNHBSln~4 zG0)aIz2@3oNhx1fm1-*b$EZtaJ(wiRSuf>Q4stULayuxw)mm-z6zi0WUBZ? z7e{X5>l=r51mh}=?wy0C!4$qS3Gkt+WUrJ}jiS9&$|wxnOj~th6r^aorR|TYTYp|D z^@{#Rez7#_0}dd?nb+~2V`ypLkjXL|>flA`{eFHC3gIM-lRjp$1nR>F+OCyUuTkN8 zQt~r>Tb>R}v{o=h4Ik8wi zhsJ86B8gMN9VP&KjaN}htzuquim~WRw1muqs_#;Yx{nIgo#&e2i%O+IqFe%|>jPi& z0&O7JqVtg=3!q5(}A!od{{0K*wbJCl>c(G^5L7NAhXb042QQAsrY9QZgY`X35eAfaVGjIe?s<-i8n0b2I zCRN)5*?harX8m%#YF^fL^-`I2T!I5ey{66N^lokc*UMD!3yDl6-JfO)oN88lJ&>;p zf|B%c?DraOWw2wjVEBcxh4A)*!*!Q7{OAkAczRb8)atCLY|YK@{kPL|*3-t3QfDAn|Rlz~a#? z?c-Rk9JRS}$KU?++ner>e!jWg?bgH4ttPOPZU&>SI_PTXX&X$;zF7N?d~{iG6hIPl z7ZIE)2SY*Z(?L{L3xxm@rXh07aKCpAXt1?eFe$+z*;51AQU-9oYnpmTzQC@k)U~yp z_SSw{z)?$k$m@6%1f_m%hI>za-uS(H+HPsP?uVi2`k{h;s0D2(bnVyo)FGQyOct4Q z69XzCvn4adsiJsQ9LLSmb|6wf7bn2Mh(KhtTO`eOs_XjtpxPDrRC+AFRdNJK(PMf} z)vML2`u_KStVEeX=RsHqg;;d}n24~6B<@fa^$3BlM7tn~6>w-;QAhzwA%Xa*T;;;& zAYys4{JTaZHJGG1Fd;?-FT`OG8az1Hi&YgkUgQy#hju~LtR5kN39MCMS( zlacQvymuy4LcjxJX^{Zyf}tcn^dXs~@VOj#R@(wNQwWxjNq9qsgJKG~B8H&ix+Q7s zA^r}jK0PE>A6np-ck^7tPjz#MHqZ2@5;EP@H=SScQN#X$OFjo1q?XLchx-s19b2(|!*TXe|&z4O*Pio6oeS4PiW{*Pk5d+}$9Iij83eP(o`< z)}#gaD1X@#jG%JB@2C#H!2-m<17gIiYYGwKflX3#$bXzZ3mgb*L_S%!+C@6uIkfiH z^^|LY7-1{~)4yT>t3sL!yG)$P!Vh>Bl1#!ioi<82Vj{i(}d%`1klJp!K3N#jFkJz*W zY+wYgBaQKqX?sU+JEK?bg)vgcz1UCP0v!rV+S*yEr!jtFw^OvoEv+T(s8S_rng=TF zC#YCz2ry?abojyurdUeaV;KMFl0Oiy1{g6SVm=38K~*Yc(Js)rCxCI0cj_dOl&yoI zug6RR$trF>uP;Lvs1P9@jwZhUAua2RIPU{=+eu^anKO}bm*lZTfuV|e8 zo=m%zY>c*&(3xX;;WfSRn)=-po#2|*u`ASYr6gR_b|a;@ruSXjq6oKLi3ucS0tEd- zat4;Rw52U=X-j*bO~SG86xyaIYa5-Foj6MXg`yKknD#?(5g*`^oc*@@24C>Ye%3pD zrVwQbTzKCX3y(s|?rKrd^21b=!sD_3C^!$r7}s5=Mn_h_t35=l){5`F2YlVv+^I-G z2yQ`LDb>_Ij}ev1hV${9LRzKm=fV~a8x?TeSIP_ z=5s6zfskH^!Y)zO5@r#9mf;qeZhRsD?!N#Qz-GuDc>i6CJil|{UXeuO_;(2yj3OCV zQFz(KNa?<6@^gFYvu#~hUES1eQ#YNI@=7XoA*H%hs=8EFbx~FIWmQ!#=ykN!FAYn( zDMYkmz>%Rj#R5FGY%KorH?!sO+LAuW@#O>Jw9ObsoK(-MYmM5deJgEH3uB?IdP>`# zw!W&z^j#%sjvfpcRcpJA7ATCB(Q`5Sh#x49(M?kfgxYM+X%qvd(lH7w?w1y}VNyvA z1yc)-*=;D2ir+|jIkp|ClqYtL>;7Xml!+)-BM=mn1XTc zJ5b-))fH`ZJr)Ad0{(KfT0N)j1wH?Q=ED&r1yc_5lEwdqnfYyLAAa-q>g?=Pi>vDc z*6W-BSD`J+%JdOslT5M zTAxzir(;@i4Z#BlIS4Jso`XUHT6R_x#p=wGOM)1BJ>H>W2momaAUYYa&jrlKl(WLI zSRyyR39-0#hS#L3i%?#MnQs%;V|=UGiK{2E%L&C#VxNIA*h)e;hJ<7>a;So9g1-FT z!I)5BQgs`rL}BK4V3H3`+>*rLn8FO0K|AgGjkZ_|M}U-+M*Y@+qOL2mRchR5uD+)A z`_*Q%dA(k*UeP-Kin_I_KQh~u-5f}2Df0a) z$j7Zq29UZpILZ@qB{f$GUJ6S9=OkV+V+n!{RRAI`>(EE)-sBG@oOtK8}g+$YXgTp%p5vH}}FXL+WTkQv0~Hr9BA$&+JV# zU&S`{==^;1#q$@N-EP0ugI;&tKz?68lIo9s-xx!hNHRR$9t+xmm{?kzv8+^377g1e zurwAr`&wr%Ql8iTidiM&SYtQ880a$xj-1_0EmDeJRkE#XwQp9<4fz8%a|9 zVTtK3?PFQTP43S4!Q|_XiWKx?L>iTh=B}9ihQq)RBBaMb)%}LIy--) zM9BM~n9l)ym^U1CWT(d)fH^NF z`us!cjFby$y+2bj|2SBRoam~m3MZP{0+JGQflgmEa1cT2#s%MQ#iYjYn%sn~Q3)~o zRVB_g6`axjv{CY`mUvo8akg*KsQhf{L??(-5N;4!8Dg(7Dl&bvM6l$Xk<2(NG>1YI zjR(D;Oo!|OZ~&Zh6C;3A>>2YBkd7x_+{MVV0TY0fEkbc|1Y4Lep+8*&y9`5t)Icgc zb~7%_<#FyJ^WzRtqz-}sBxT`X7|jMwNlJL&h4A`c<_1a#*W5}?LeL`%M?QvIRms?D1oU! zL9|vw7GTMMBy7$Akw?+_VxE-jOxPoqeugN{8Q8Efy8arG#yB+eU~7ilfKauASew8# znhnv7^VW=c+Z6yf8VgK9b9~R5@9F((8sl}36F_R9`DAox3*Km|uE)WDxuwA7^CWo} zm+FE@uYPJvm)EMf*j0h00J8Gnc)pGTx(tqloYhUR^(9x3$|pZ31%+Z(6ckF=&T60m z2Fx9#V5xxq8sUT9uR?Ez1g7~_vAGj-e`OcT*!yj13 zZz$rtP>%NmvdVPkQADB>7!;*kqR$Us0P)RLl5&BqK`JThrk0!aS`S0tN(rXm`m${g zPui|~+V}l_pg+(ijEYp(^(({F2GBRgK{iPU&F=zYMJNSi+6IqeagNJR|c&THDFGeYGuDp0;2{i?VrU1Nc zn)L8_jBc*yxn^BBRX49IRlTX|>dku9Tr{i3fTwnL3}|s2Ahr0$ z-|&|6C&nsLVJV3*@}*O`e4q_k>AJ2Dt5tIiQmCOaVto2QlkCmm(C)il^|dysJFMvS zh5>Wa8$fCg)fl|NVM<%4Ea8E(SXcyHvJAK`JkU<<582=7lyQefu3 z{i$d#n#fKto6_u*R3@X6CB zS`Yfr_1(2q^)$(z{Xq5_^^d-;q|k#>fYnM#vF&>)$(GZ7FDhH)O?I2fQDe~p@RWmG zl?Jdt5(q&t$r&w-a6Due6Aw25USMJ}MDta-cUsKm#D>7sdVqiJOjIgS%8au-(eDbH zpg1>9%5^AFg-ND+GdIF)E+eFHm&)C8bHuhDAZ6yYN-0h2X}>+$8Wr3f&4H#s?Ba0P zKWW?c3HdBfI_m#4KGa}{MhBA#MKwA?=tP*XIOOic2mmI+WjH^v;xQv5_(!SK86SF3 zU``g9>g(}Y{1^lbvl!1k0xgi#2|#(zNhopVGr~IS;%Jbi5+N(GgY;XY1(HWZ8UWlK zh50p@-%D5Q1yCe43ash5ETm(!HUO!S*&HP8r2(@Z3@D0x@7L>9{f5@@*PG4yO-;T^ z)2v>v*K6amydhuajW9XCIFGj|Nqyz<-kB=RvHH^1fOrH6Mz(6mttV^%fe|!G{$duy zNY}=ap&SzrKnW?+<+!an*ni3GOlA=XH>f>~8L0c86uz4jZI}J-$1re=F zah)tM67diVLV)5r;lpBF-Egu8A<|}xi;dj840ky$(Rjh*UhHCME$xB%enJEVDH9}W z#H6U&Y`5D@Rn=>H`FhZ}B7e`s>Km=msK-b>){_~K1VVv_f!k16SP}~L$4U8WsN2n1 z?FFN`!6c%L$A5JmqQ}6Gjhr~#DoM#vNvf^tYEL%Q?a9gJs%aXhBy|u0WWK~WmiAGE z031Q%zNlBL)nL@+wGrXKk)WUYI$le4&_cGF0i^0dLqQoC@*Gn#IP;ijOoflBtT(#z zV!_S4n_NUBW^^8B#@JUKK~j^F)acjS$8=pbw5`eKS`91FtTtG$*PDvX0a`pTGCV{q znJ~V_IO8Qt$yLgVQamx6eBi7kIutMgFh83IQ56)tCKqD4!Gv(d`!iS&?ZDLB47KqS z*9r&~D@jE^WC2OdhopoHu!$HTWmG{L38JCP+DJ)zDOMHjPgn5xtbxay8lS8x@wisv zu>#y*DSa;YLvMO6O+qlhPy^~_GDS=^{upsJTvx$8P06@0G9y@Ymf}9fs&xi6)GjOy-AhE~7F-Z*5F)PHRzj1Fl*SC-jnmPBWmHl*c{x zW`D#qtfy@RW7PCo+CyzX6ZFj#oy@QSsF9N>0&>Klker)23!w^;Emc@n(@E!J;E`Em zOuZas9N$-fueVrnATW>@>Nwr7dk~ zOIzAEyv>Qw3#THL`Uirq=fQ@L%+i8r$lT)x5TGjs+2*e%9>2)=# zA1)1fh?IR@sdm*=`}L-|J=tz{lZuplsZ{k`Rn-@Dy?Vaco;;UQ-Bgvjshj#{yWQMu z)~j3V3#SZ%i!Z#id9#DfYl)H+&Qjg-C?`ZC4^B=Dr=Gs<`_8EM9q6BHnrAN$hqk9? z61*lJxTZdR+NprajG%nXgP)KwvClX!cLW+`(g&%69vrzSMWWGc zK7b6A8o?p5^`E0Tb5)E8oQj3-9lbA)VWRuNh6PtOEL^~t?g<%K z3Q#HN>$*CuSIrI0dlzSCXKx-qe)OhN%BV*@SG1qh^Xlhf0!fWAKnStG{*Erbv=7=H zosNI-z3-X{;IQ8xuE{nVX#O~$#w$Ja`~9xMffh)bA50#5TPbo>1&uCH%e8Weaq98@dC8ai?6q=tU zCnuXP9z8mH{`Bc1vxZ+NsV-;@zi67(<*KO-KX28llcI!@DDv!UrvU< z4k}U^+MN@$LKJ`UGkw8G%%4ga(+hd``2=?70KOg}srie=p3wT501($-ASY4ILvFG1u>H&^S{T(o6@vCN#{iJoU?neS>BLKW z;QpW~UK_ztw5`lWZBwagV?a`Rz*XN3tG4Y-ty+!a5;qng%Tf(}n(stEN<%eNRNPli zoj@Y7o)U&(6i+YsO9d-6{)1=67nV(HlAb{KGzdKs$tc-21|+p!-JG7DT&>sZMM-LD zA6rP;8=afYMw36$Pvrc#w|)IuNI9HZ6`C`r9j4;xs}%_Dp&CFH6CwQaz#q?*U$@sB@&XJ;#TwyMOlN{VMn zngthoB@IAI=s~0GdaPT}YdY=Pp3dBRZL!WFF$FBxRbUSF2x-npnigXv9X|4vkt;zV z>)0F^RTyUgQGgDkT(Y(SxrkTMhOC@mwG;uw<$h*J$i?gqF|a0Pb3zZsDD`Z-&kcsM zj3+`lC>dS%%<^*=8H#q{7`;VQ3$NFp^+mK$VQ?DyZPZN>TwF z1s<0evuRBbDFc!k87L?9sF8mHCS@rP%48WJWK4h=UJ8<;92Uy<6yEXNP?l-r7xS*z zTbACMHPnvRDP+lr9JYgZC+J2t4q+6e%oxYDNIO6(^UTmTb&Rrzf2b3#_lqxWbZx zR6%145!hM-F~L_vr5mmlV+t1OikEl?6F>?ki7>}5tP9xoP(T;Pj00pPDruUm-HwK8 zYd}(4g*PV^T#?OT)GeP&kbkTceAZO(+4;8mOaT}M#g5L>E3<*@=_Dn_><1+)yR@Y( zZD~te+WTyBmNb5(7M)VZ7{~~MofQ{PVHyv_*b9r)Lhzw4%Ws)_#INr&5x#Ogr=`8^ zod^|QKHiC$n*%;F07+sFtv&|;W-85qiGn%`MgnXYE^I*Vn-%kP@lH?P@$|`gw^^yK z@4H*Ahf9sPM%1g{|NTF>`QnQg-Szdg*zfmM+jX11?@yHpB$&%*08+iK1{gFSi2>}y z;(tkn5AdQvOzMuZpfu$J> z?>!$D8K&oEK7jeZ&-HaYGBTsE$TZqGo(h@5i;s*Vc;cT}6`aSG?SCMBNHk&!#Qf^H zXqr`$>gu{`SF2{f-K=lVPPaFzsxGQZy{ziy^YwcD+1dH|AAkQxKmLsTg%iP z&7nPrK@TQxXB4DPhhcc4`u;Fg^-sW57(hGtFE4V00-==u@QB1{8p!GN3I(v$aU&{B z;N{Fz@+g{f17~<391v_y3Gqcvd+&>?5gD^6#MDsyeRl6jo&Yixo4F)%C~$@18k5=U z-o`&(tjd4G+}hv(QclrkWJtCw$#ta;>veN;c6NIC?2{+2e)*Sw>4j3tfuf$%uYEz^ z(LFY~M-6ctaA^A^ZfC!8q(uxk!le(aNoNMLI=7Xo}eXcJqK+*iUqMLXoP{ z6Mt<6c!4510q-Nc=N5i(hniHvs}MjuVf~D%moUWwrYOF2+%hUyuE-1dPmRA~LZpaH z8!^d90tadXQ;E&|zSyWr;Y|4&n}9IEy`JSCql7rx;WO1pI2!?Z4n8BQE~pD9-VG+u zMm*+#CcR}+fHd0(lrrl#T9A}c!M#2`*}i`I^zrlWefK**|FvKJ;UCjmuITHUg5E1y zxG#qRul5IhBcdXlWgY*@7goTT(TUda)ffU5&iI7#xGpfFBi#aOk=pqjAZ5il5OJnhYH1JHtsU=SNl9wD_i|boof1v;KvI)1oU}#b<}}iez^uF}s(AA2AQW7!P~7DN89WC%fWSDD zgR$|)5mVsfJ))!(8nJC9bP^ceI@*LZb1CRmrP`*ccbo0{_U!ER>f~f=l%$Lu#X(X_ zTiSyUPn)*gZVb}d`NVsx9Di-Vky@kdhoSEJekFSgl8WLnko&HYFNyd1Civxk!ZEY> zUhmbW6kQBPDNhbsEBevclCPxEf`ZZCh}T7&j=PIRcs4L zO5{lnKCW2cjdj3W%Is<{;J^L91-8AwfyG}2jr#Tg-R>azT8W`nI8+jKFR_;(D(XqK z9{1ojHF_WuVoHw4Ka3?jI7?^5I~9rECMl>`dOex^jv+RI%hy=wA7pA75xB|~LKIj| z;Am(%{Z8q93ieoFu+Y|$V>lythbj0dUF>})ZMf^R%($Nrs=x4pH+<`81-`=qHE*M{ zX^pd1MoMKEkv(zzjZd@$yjS)aJnbAQQ}Hh87W4ubCnyg=0j87F3qxt7026XbiX2^B z78scofOi(suTlQw=x4`)SpKb%dG^4RZD7I~0 z5twTx56pwt$N-go-Y8@SV7AY}mmUXi--qArbD}YRpO44)@q2_Qfu(@!PXQLpoYMUP zzZAp_aNMv-nCh@BkuYCkkpi}< zW6sp6LjdFmcHmKlVj0{Rd6-z(r=*fBrY0zIeQ?J3HtH5sD#)9~Nw%zqXi!OIzB~mbSE|y{qvE zcbbtUVI? z9@Wi9;$=D>3u|Q2Da}-w=0b#!$G9SOrczNI_e<}mNH7PG;<_tG-<%fG!%R%}%f>hG z7cr|r%35{Tsj+nJBJv~03$9-K_H?&>VRdwCa zzGl-iOI4{++1qa8@lU?}?WMqQY4;s@ z(Qy(N1&nL+v)=vP@8(&?*RNlTAO7%P9PE7S)G*Zl_dEY!|9ijx2WPjpx2HqjpVAz9 zLVey8dz*Q8Qz^aCW~=mu{irzbC-K7I1MQtBnW?G?TM4ec-J^jE+3 zt3SE<%YW%FVqv~r2KR?H*SwuE3jf$|{t5i`U;7E!U!wumjpT`BbN#LV^uLg=UOqqX zyVig~PO0CV41+usV?C_i(yKT0`d*{q^hA16D>H}qf@m?FCuamk5!WK53KpF7Zl2*a z8)Nu3^YL4%Ox@o?0x=i*7x(?hvXU0BL!Z5T$$9Y0_m2lZ#_@P%R94{lzyDuYn?_Sax)Np+Lo=m_j0Fmgpl|{Sz|8t& zNiq>2NpQnG`(h>u%T_(i3Hk3jZvv1CW;ZJ+0ywgi7nGBD)d{@oFmsB}$*h7UvjU>B zT$qv3$(1KdWRWSXk#XGuWVa!!_G9-GM6Q6xl;S*?c>*?1Fia^`PiuHjYj-D=GW%XB zWe6Ky(mMWfyx>9;<4{u{sks|MI?3VB^Y?|A$d{=#3_Ei2?Ne8MU9T;E+n z(zg_$nBhZ#`+658n_0}-x!gX*!Ws#8phh4IU6&T5sQP9JP=8!4gw!Nt?ov$di+LBp zdA}{PcRY|4BCd;KVU$P&#~GV7B;I%&DclL(FVxOZ$4;#pRXU@ApO- zW{l6%xY<&N-jXkOLgQ^qh^#rNu6b79_TW0Ch6NUkGF53eZB0L)XYxRpxoO`LeU-CP_aAVN}KhX_7{u zjdY1>=)$dZCFmN+byL@y-F~n;e7QkJ3&zV^J z(Z}58gS&4v%-?wwNx6FTa|^Kz0}mcmLdsWQ0ZDx|qtjj7-Y&8xh*8_;2m0qA0d|!X zH?mdc#&Drc*gb6mYZ})Pum*Vw8WQR9)Dgx4yA3iI z2c(!I#&-BDTw7*R!3l-MLILqL zdezCZVEdP+6Npp`W9HpCMKEBhQOH6#nX)rWJVXE~!GTgDQd&xDY5A(B(eV1E0hQenJP1f?)(ljahS; zKvGgp0g>@L^0JmF#sSfYPPo3ja#aYkaqwce{$9l``_0Rk{W{D_5NSd(u*yln0;`ZH z41!4TZaD)hrh|iZ=oIKzFquS&Hrm377&)Dc#tX8~_UjSEz+qrzZhpOzg0W|bIMc!L z_mMIj<27=b5E@K?f{lSOsfJpFVBR=lwzQ=!ZE0_7JLQG14kB&I*c?j@ z#OmhGPQn5xT|oA4><=vIy00s+v!qJj(-milGK|vk!JS0!hC+epHu4-2`Y2$YNALoS zV$HOH4VWr=C;^x8#)pC-HH^PWoWS68TUt29 z>gn^fBf_32!zR^~(RDUHFb&a`wu7`vP-fpW^$3bG8}++OCDoMy1kih4(tBUfvGI$I zue=i=96)MVeB!0Ob1U%a@JT9EfxPvc>*ih8dXq<@ad1i73y@G7y%*~Dd!^(l_4iZy zxKmocPU~tgDpRH~rqUX#X>X4al?YRln2Wwgihh~>i$@CWnNkcuAQwvKq(Y)IEoOFw z-+-}jM-#AL^8dF&Pys7Kws;XYl`fCcSpIdMg^&3+NG|rjBul~wU zma+SwXPtW)25t_qrVn(~Z*FP3G==yhz}+mY8n1_1pK27R^w7*9XO-$(!!~YhwaVFV zsC8CQ0Es6IsZAd#&tPt^9g|jjE1T#ZhR(hIJg4@C9h;B}N}c%2!fJQz#}XMZSs}c69wu=&vW_2%bnaY*nRC zsPk`4QK8>9*L$+5KY`IP&Pvl0JCRVqRCG}DT zC_ctv4kLAZEJZ}rLX>5MSzW+gh!Vg%Vlxn}-4RyX-uwA*FUGf8RbW_ew%!rDR6X!yuDZy zquxs!hRnKm@m@Iwi^e7+lATPvFlH6aRTLSzE)jXl6`-QEm8}^d;`$dTD0U;W?*$_; zNO8^c!`c#gf>%HjLZsW$9{#!tWFs1ueIUDUysO%Q4aV{Xr$sP1d5DF`6JQqeiqs4R zFRQRCj#cS&{XJ4chB&X{u!$JSKQRa~6xKo^j%ff!qt%igFKuZLrQzpaJcs>$Z{j@$ z2(_VpenK7j?9jG`3}b760JLP)v|gD+hY*QPnyMNCS3HF*iW97?X&_@*|I%Pjau&$E zAc)8X1RPbMiNwHN2na=>;F|IV;PVHD^aG7JyYA`Q9B8|xXK#!z+%(N4v#*?OwT%3w zeH3lIS~-Fc2a+;<-Nb+@2{aebOK5m>G&}~Q80kh4B4qX`&IW!SOt=7ol=H+nMLt$t zR~22RX1m#}Pj|bU^UuHdtg_duwR?9BA8c5}@wy1X^!*%AJ@t8Gs7R6A1yUv>I2vQZ zB>5Vz5C}8;F;INSNrk*wdgOhW&{p!O+cDCh1tj$q876k`KvFj%0!WEnrNnKc;CiUw zN@#eaB%BE{R>lp?9@YloO($A+y;%1H?FU>B0*x%D5w?|>K^H>B08i4S80$#B9FtQN z3b?#XbHa?oCrpf_?)P0urwfNLQ!YH{HAsK+I zkSSK0Nu&OdMr>^868Sn8#fq3#Q38<4`UXJ6kQI?oBJbfrlQ6aJr+=kib27LGvS6!8 zh#CrI(BXG*pj5OD#RNT=K90tgUGvz08fOX%lyWLkD1-8vL2Q|V6gkReTX2YE`iqHL zPihG9yNFy4+g~p`HGwDw--?5NaYw$^#S3^E-c}?4V^ibB2u>|2U;dI|9a&oF3m71Q zdE?x9I22qk3x|Wb-1qV86&r;d@F;@1Q=tDszPjZm6*9&`uu%Xg6o9~_Hfud-?rG5T$6WmZT1q- zI6a09#W?#HcDu6nh*OVRg4j!2+R~P`w55F+n+eNFbE`*VnBH9?4*;ML-9hHQK<=BQ ztix|htG%?Z#HamA#9o&67BPfKW+ts51d$W==6481hzv1F<_H`GiNiYD_QrmnwglY> z-{5Y*0a78d0mcuJmr}|~Dv465PHBCss_JoF;}g1|H>Lors&wBpgYLS1)%CP>eFM;= z=nV!&D=YbB?>jGBu=8WaJi`m$MSP1=MZ zII1YasHi*^=V&hhljDnO&d|#Oa_yj5;e;9y#Vz{7l zUeReCd5BY0<^&>3TiX3rD?XMBKp-39gtt-pfS6L-gQX0xh1MohRBPtlT22gzEuH(C z`urmR15nkEw8qC;>vqt(>uJ92Y2KZ-fnM6nDb&Qc>3lW`GTZhppj0}K&CPFc&*c+~ zS6L)sWJuVB;{*}-xVcldpL=~=93J7JFsc5AR8qcpzeFD3&Xp|Dtx4|MTD6hr3=r2B zF=J-pVN$ZMs(Qc#V(iGh_gTM+g2Ou6B%~5u&kdMv^wIiGBExYJEF7eJwIbWcGzK)OA;rXO^ zaK9%@=gYv_ln$D!7!X)4!JSfV~(e*v+a zOb`WsyUTxE!7)K+a~q@p;7t__2X+3+1a5}t`T#JsNPp9jlQiO}_Ber?2igxxs)5$$EApMM z6z!=SQePZZa4%N|?7UjNq;>tJ0khKU97xI(>v53OvTnbJrZpyUUPnJmE9Quik`84T ztJeT{qUm_dBmhP(*@3LSiT8d;K=>RC#j(gGpp?jmKOZ-vibyn|u|xyRpFx;_G8#sv z-6gO5G1`XykW=BuPM_lmd%TIEU z6dNHBRAdsW=WHdxW4LmhS%mIydoOKiUt4?q<~1A+ts%o1K~gkaPw44$^5f2^M{jAx zT8;4|vv8Rpl6LfA0GJiMLl)1Ds2y@$MIOPx)Z_7rr4Yh3f)U$2wzF4i+rf$mAZ99fKvOptZ9B46uu=S!{1BDQ;IvT3yr0MFM(#KrBLXaVNb_mhx&Odnh!eyC^8h$RW&fTG73#~CJpDLfw8dJx!O z9g8lT7qzXt17?W2AcIZUXnML=fL$eFUjy#RZrCdjw-Vr5g19C_{#vVs3ONf+ngy&v)Gd>M{OpA-cC9GL z1zAKkeqqQnVETy#5P*Pa;mJP|SYA5KeIU$`ny^Aj79@jIyBQ$AKb(jpizANQBO?nt zjLhp`3JZ0m)S;=X8^ZwFteVR;ZJW*dlAgbmN?s~STO}_IXv!4Anj&y8KJv5~HI-Ch zdGU*v_KwZPN%OdHoR)m+IhW79rVw+eCZz#W*ez70I>UrZBkPTl)hD$6Jw;RGfug1T zKy&gybMBVr-eIyqQR)F|ov*kMPnVDZrS2C^3^6S@I-eaQMiNH=4wB7lnPx^jxA20T znb-5l5E!79m#2mz^y6TPGEfTHwakH1&aMYmL=Sx41i`ZdE`m5#{?Po^nS%aR(_F7< z-rH`s7pEs%Q}po4fTL($d|`^LX&rt+Z+&SMqzuWD0ofgzrXEhV+q@Gm?IBDybM`16 zxu4^(r~c5bR;yj#4;Q0Cz%Ur_$F6GYYo*%js;kZ4+QBrfMSJLm{eIt1CL5a9D>2o{ zLRuifhX4mrVMd0C<#~hIWOx7cF>0s!7|fuJz!eyroWRJ>eDZ*h>6TT^>uC)QOh$cF6K{dz|)H^BPWT{S%Ihv5?% z1bgcD{Zw3u>TqbA7TOhcQ-W{=~nYoH<${Jce+?=i{yr|dq zK?hvt`cP1ctU<|ZPb#;f;A6_lyR@Z!sPTE0%>LR8ve*!!I1F*P7QX>XgP>6A2#8h^<560hth*>Zx zwyW7`Fo4>px?!5VwA2P9HIfJoXkum8byHt%Pqq#uZGI+_|9>e4`-6(R<@PDG?SAupwUXHbzN6mnh#C~Jv?f=))_eN z+O-dOZAbx9UPa0R&K+B261Kv`G%Q6O=V48$Ff|8CVG;*JPrl%v6i3dSM9jEsN3m}@ zFGq+N&tjoj3rOm{7$+hqNqK-2ZOy8p326n}jgs4bgcr+Z_{3$ z_F9}>g6O32N3;Mv=(-o9q7>uzl!8MykQpYU=gyKv$c6^$a0VwYJ~2J;G}#f>QsjUs z9t4=a4`3nb?m>dikJg4l*89{@3|p}vBKN}>!*&2i>4Ys981Q7NZhG${LBtFm5CNow zh|6A3#1uHQMNLe-h68G*`;QDD6+@%~ivS=*U>`qTSnw!Qk^*F-%!NcfFAz(Ldw;8!F9 zL}=c44D#TFy#nr=XbV!)ez{?Bdq{%NkVyCPBAjKszW*VM83Aq_yiK13k;c&C3PielQuCm9gk~)m5LS2G0uRq5+bgXR2do(V~{at%(V(_V0Z;& zlIVi?8Ue-uYn@z!NI_yuqTo_(w4Tgr7i83ht+y@vste%w%hINv$NAzPaZ#dH5C)$1)cqJ+@m(zCB5fBe><4H zQQx6qZhYfjEW{y8`_gtnK?W!t_ixd&%=*KcmsYFmN>HEQt=B6t47#N;aZ{<`wbpnd zr7|GETiRNB>0rj(&<~=zxf$dnWg>@RsC(J3#%K@|mMCC&jw(nMCqzh;V7Nsb%}L0r zVscOfmS@)-cm+_LxkBL^#%h2NuOK3e#ihhC;#A+)E|pOPcFm*0oWUYF^SA zUYa-{&3TQ<@BsDQp{>gM@swfbVaJ^B36`J?BmQrGmFYg)grPfxcuG+#Rc#39)h zkkor<1|Xcwsjj!gj98Aa(^ZOi=mv;raR4^HEO}?ioyY70fphmDvTD`){?nGYAr!4`XI#;NrL2AN;h=}%mYyaXYkl$P(c@G}s$aU((ms}Uetw=R zNg02lSvAdxQu36B)q%!IH|j`fC*+W-}VINxYP>_O)*4#K5M zg4UBMaFnHN!>Nc*7M2rvY(I{_30Rb*-ji`+2NMTK$x5R9?m2F1kZUQb4G6WSJHH`2 zWOLK2^Mh9Bord!s<@tbczVGm$4EA6^IrLa}66y824?eJ0k`g074b#Bzzzr|otgR6P zBl&|X`Zfh27>KV$VCc(zU9Qm<$BP}*&ml9UJuYZDjj(eU(}9SIH5!13g@k*)d4QBv z#Bq|+fRe@Cm!hWkAFJk3R>%_$z!X_r763hxC}Q6gY*v>{#?=Kf18x*-1rG+Wln6$# z15Zf|8tFkfDJQN5L{4lQ5|;M8jx9k(^3ROT2Ldk9tutt~D}$Pw1!r-k>O@m^il<== zUPOkLbwDf6X=L!w?n|1XIH^RT)NJp7QOTa=6C#~13qXlw;XwrK?sWa8NG&p!XN=ZJ zH!n$k4>Ngd#$>d#_r7pNbFN6)T1(9Bz`#VIkMhnBsTXU{iI{U^&rK~*V4i~rtYvza znhG5wuw`I<1D3hv07t>VkAkU7aR+;nNwdF8!n8Snl-+ZBbKq5b9H1;_#M}KkhlVY? zCwrk?DcEln-kwx&MMlF#t>ERVf|py`&Kmjh*;c)(rMRZAb=zxk+iJYM?eO;JS3Mp3 z#7Vk|CG5Afr7dk~OIzBPw3{$n&4GCq0ytEPD@A$#8KdBhV79VT#T1@l6c-YKrF}zU z2@!KGFrPGuXg6^$^f5?I@D*My}aG+4yOLr5c zQWv?CBqD8b7Q;!44pH2%f*81K>Jz?k6y_LCfYm03m}f>LGN-##IybQJN9p}#ViArl zisMKSsl$Z0Qi05hJqJ@Z8vUXbs{Z+fiVL_>a8yD(62MY%d<_{<60#yr{~0)Pvm2FV zqavj(6MJ7b^?tox-<+PFTs(gK_~p00_3VoueE)mTo2Gt4_wa)H-J4+;F1oJ0xVU&_ zKvRQK)j;pl+wE4bniW@&T72WBec2T&owIzBWt`IxGnVeTR#jEn566M`=+PtnqaXfY zce35KH@Ca%wr$N^N}FB}gWjPQ2csZm2bgMUUZyc)kg9SR`kLnHRR?3#3iGuR7$EZA zc5j?SO`ea(eV8Y`P7@pRCDu|OS%(;GDWHrM0jFvaDM;O4u>#A1N0An5vnw|9v+sdR zK#qWLnGDHt6I0PO7ag$J*%2%?f}QGmzge%Y&(6-?JbU)+#Seb){V!<#GoYwf^qQCS zCfd{S1e(&TRfC)LMxURb;dj3CDb`azOKTCA`R`#&JL1Sa!@2JLz3=^U_vFc=TT>{e zQ5z=hOX`Kq&wu{;_4Ajn+S}V*e>k)TKsACYuj*@K!fe~C zlf(TlF@yqjU#4NdCwC9HPb6^8*TEXLtr;Zj6>UvZ8S>?WSimS>cf8k?g4hk5ij)ocN{AR@IkqYTNHGu< z3r0jY7@19J0aDzT4-CH?(}WpN3Q|C2Vm@T+BRf|fyO@SPOhBp8FJJoP$JvZ8sz#8M zQIe7)>k&ObC%-;b-2zjf5fSi|_j?o~QWHfMPr`^}BUJzh`cd? z`crK;zGjSzfRDP&`BF^gTX4>NoFl{zD`X^Y0uYb!$mSHGcs|r#1`-n%kO;WI|d}R69FW(C`m2tqiLT!ed<({xsp`1Ua!w+ecz4G8;}&@fXGvQ zQWO`>IaQ$#+luG&1b9s)pm`8vt(07=N^R+SoYExmXk-eC0aEHAyD;|GqF3Q?u_PZ; z0yd>0Q`w9aZG->eaZi|$CqmH=2&gNrKE<35i2NX{|ATl(fHNnUNHfUA$NV=P35F~n zsdtIY&0kMfj6Ap}e&_eDtz4U;!KU0KweJS8`}|gRmj~!>duR_m96Buyl@#q&E%d4q z*eC%t`6rDOmBAlTPpq^6r4ba}qz?H7>6*D&|qCk_wA=e$AA5mFGl$e!8C0gED4D(+!p z65xsByy1+HeG9&PdL^gQ??EVlxqt_LCkR-?tQBY==UGcF^I;M@H{aPt#MddbvWBSFcDog&@tl9 zb7S>w$Pt(JRU3?q%RBRFAzJGoGfdJVCt*d{v2<7G5;*hUEF!rI;|>dqc}_(RqpVq; zL32hO#!OlW@miBb&JxOc|F%%c$r3sk?OkC5$90Z_mjRgQSOM&4?Z28fysjl&tt4Kp zm3U3Iz^e^y=MB7kwt-haK9{df8+pT=`u((Tc7OIq-(DR1r7dk~OIzB~-gj&uiXVa} zRlt%jTXb*lQ{un0rF{bi5^^t}Lu%MTR)l%qL7MIpA_%x=4cW)y4IVF!&*INN`q7VE z(P5C1r|sYW5C4JgNIN-8jF$(oU8f9`MwqxC_+KOxI9yrXl7C`*E47)V52$D&x`A)|!6+lFGaujaBiEomN3=p(rCJ@IIB$5WeVT!KCVlb%< zPEkqOjhj;Kw8@V8PFq*i>!z;0SglsiH=E7#^Yil;-~RTu>3DrXXTGBITuLb~slQ$R z)}Q{3tDpS%2UzOI0Lxmtw69>1Do+m{)Dm>X?_Q>;Kc~ZQ{Ggxw_($Sz{Po`+*t|Rb z?Z5vY{-^!l{k#9qdbi)xoI98~w>I;tYFn|p-5r{`>1Z67Z4{58zny3l3dhaRXSw5X zo^@-guku8kn>g9(JZHNC9C>GXp2koOQ=DH55|OM5R21IzF6-gE*>LCFIzvMajP$^f zC;RLM&2=P3pena;f?LEh7Lty})uW1haqOP0gYnlU3PPWgVJbChbySVp_f1{5bzPZt*C?3vG*8rGstg}DcRT_@C-u;2(=MBq=q@E7s~k}H zmNPNjW$c&Ua|d7&<5}wXDv$!7;4OYdn6H4zfO}QwGk1=Cnq?x?BzN-KBfAhCDbPv{2-ZZq3HneWmWQIar)u-gQoX}?WJKFZN-fn2z zNI9wyt5J$r8j^lv#OkpuQ|fU{XfVVP_DdY)2?P!_f_%Ld_f&a|m!rth5k+KC@+ER{ z8Jt-o<8mb6YPT5x`}q0^aQ`vP$J!hKEq39`0I69Y#wWzlk5Bs@jlY6&^ih-nEt~aw zWG0@#1ZHnGJ?@R_qnTLiN|FCNZo6?`SD(||`o($#Nu9j-&Ue1`@}K#q{)tz1UT$r_ z9FKqFH-4pCR<^G-vYXoWt_IupU0zAT<5x#zt?vO-N3V}6#9SsFpnRIURt_jn76qxq z#bErVb@~z@DX6~8^sKy`OWebonscVX0|{qTx{tnqagL@=Jw|7(6-xQ7&c^?G}QT3po_zZz^1TjWC0euV|YWh zK#BRfs?3@TL7fm?;yiia$tZYioa>hc1#amS7>u3Pky$?0VUuBIZ1k}4wA#@{8M#nlm-zTX+m1CxUZ^TOOXzuTAlu)_s{fv87xg_$qc zS`Q~}dpNtix_n&kYks{NBST?g^QBzrhnGi!Odki2G4J_uF zBUx%50Kxot)~I6?DxaWg0z67-cQqZ(e0DoDD9r3Joj|J`bxsk zC^4)n7=)ICQJCm8tOhNb0fpJGhRFz$LAYWv7&N-#5j{B&fL8gLfi6bCiRbaNohWoP zC`v6yikdcB#8iPmv+!7n)Cs*H?gShON%Sz9kgGlC_+D#!o_PZ(1frpXhn8jOfppO}hl z#Ps7nRl8J$>+X=O5V7m+2v#gE0}|?C5Ls~yFg9Hg04US1&|?Z4%pUYK=*;;Q{pb-S zHC366L`Kr7M1@5HA+Y^=)-n5<2pA<&h9Y|>*s&jjHfSQpE9>rMm}4BkD#l8sEMiP+ zcX3I+mQkBAB7!aH@JSL7C~`H;NHUGpf8^ADaNqi^*?Ak;h!5056KK$X?{U4;Y@M=W z9vE{xn@>2L9Fg(WBcnU9EUtiWBNCJXrCc^Ti=h-R8 z!mfLI94n|?kaJnu(%#=vhH*-G26%i&V{cBL2$kQ=-Gv-clnCmz(N9g=Jat|7IF6Qc zN{?LNJz89mJzX&{!X0@piES)$Q8#K2bcib;X62L_6}h+^mnr`!M3-}MWq~zTb_rBb z+9t`FBqRP@8K~SqsFIpEG5_E15==ITzvpYzh2%kDBoZe%YsojA-6NaVn5Ji?JVQrT zoS+Av(b}F~=!|HoGux|Wz{(w6$5-^6@7M;?Yko&B_ey1ORAfXR)a(Y$?1H%1#!ud= zOprw1*GJ8X^KvXm`w6r zq5{2Or7BXIH`$*+w^&L+8&MDhyPo^rjdVeq>Q|TdOZWsMRc*_&5{80o*Jq>c1dvqa zr;9kTU*0E$FsFCE5Zz|V>$ z;*(|dMUaLCJ8N*>3md1K=8QGi)@Q}W!&n?GESSxK+q@~iK&W}{w&{Q`SR=@ch_LiR zR7OPorWKg<2ot82w(hZ|~NsDHnua6l0=r*;6mGltVRP!bvsiwd{sYw5lm< zydA(3MU_{wEVdft4NHvFlK3f`5_@Eg0tZA-wJM6>#SPx=|7Q3{(91r&e0s3HZPXBi zC8+xU;h&CW$jwFk^ER;a>cU?e=6Gx|$V@T_s-f%?(h5#|V`{92c)LE~i4RYLG6HcO zG96`~+?(PC0r}G9A0^?UYguTlH(fYpczNDP{kSX#ug0O3r%|2cS7hve*%o`sy661e z((3;za%7Nnj3p7DAS2}AS>>D5SF7Q(mY3iF;1Goi_XS8GIiRxmeMOdk$R zW?opPmFdQ!5bUZPl%G>f3G_+GPDj}2F ze=AG_(Muz0tl@9Fr1&Vgof20kuonnr^^0TNKy<6MK!WKX_eY>DE}f^<4P7F(20UgT zX~-kt*;HnbxK~%k6|7I&%z7stVXxv~womRT(qf21`=s7IC5vP2F4NQ0X|*n^ej)6B?g z0GL;xA)-R+WY*$#gjLzPkU=BZx~3s$qlAy?CYZ=)Uw)%5B6YQE_}1K-+Gs%i62`UV zQZs~245WclKj$mAk|yI+;3^L-7?U7~Qx9 zz;qphwx)vumweVaK;#J+6{PrZVt8J8f{I(OD)#gs9zIYPzKu5j&r0`bh}&xuuv4W4 zWN1fnYC!ep`_uT-G=EVZr|2dLU-{~Qjh|gk6OpRY(l6a%m#pYw8MDMGKW~^Z8a=%W{JHL$?n-l4$=2f-wzDXBs;Tmli#Ne^TO%w+ zW#P5i<#DVrM=X4Czef6A|4$hyN{t~~j}xQG@8G_kHjX|8^vlAsRuSsk9Q4=%5a?t! z1*I76!=US*evjkseS^|>>kpy<&cKx_t&eD!WH@@8WUa}pBF&^s3$kub% z2RGghwvJWcet#LLkt*__;6&b1zg{Gbl9GO@&ylt~Xs2**FYu#uTw`qF2NUr(3HIlD z>$NbyWBWJ1*3+vmkCEpUzG3zC((09zv6JmWQ2 z@dkVGb~~L`hhNr6E~2qZu~6+|ml7>k^=GRcEw-09j8e;wvHWYk*x)vlKO*K@Cq@~T zW+r}aKBrDs-whry+9iG{*byuuvC`XrGi-iN>H;XX=DfVDt2gFaw>17DOiGn3Z!lWF z^cut)S^%)|*f`Deh5krp{RHoaED4O#ugwNd4@)ve1a-^5J8LRp!0DN`I-rd4@wGm) zQFH{vPjW$Lp}=Bv!KHYtUn>c{;se$AoZnH0zmQ&Xo$p8ji%A7fWw3ytLEkiGtLV%r zZEhmV4~QAsOj_%ix?N{4I;^sq*X}DFB5av(f=Ve!1tDEs6xuP$9U*G6C@l1(?Omf3 z*sxrLpPR+8WoEJne)cbiNR=~meNk!1_=CkTY?L8A36DK{H6&C;IW&F$*h}oSYHNnA za`4?}^5X8X0fGeJ$fqD$M4YJwu3qwvFhRwSta6X6--nT@R>Yk7Z;r+vs1z9FG_vhs znMiD9o@XCfUWu_)G8{|;CCD;`#JCEa-T1b|n-{J`biG-udbRwGs3>XbRJ}Bv0uaz2 zau2#fonanr`L`DsKNf@k_);G|qh=z!WNV=9(^eQ^Kz!5Z7d?h8>mcG`*m$3+K%`Ae zEt$KIu)d0&@K4fGXbzR9xJFg;AtVB}t1u1vH5gi=cYpf=#*;YvZ+z?siSjf)Tx^GD z1B%OfI0>i!3=iy72;^}TspH$1Foo~?BLn+n;=tQNY-PULykCIT_HGs zQVDYE()5WZ<@0vD3y*x5zxhG{Nd%eaUZ#q{U&+6v_4p=);qpvZ^ZA*n(=U$yXgqzq zoI3DkUtV3UsMYBj$P9t|I#Pf#3w*7p=RFC7;-uv!O5Nxj8&Huulu^ORaq1#O$g$Rd zgF)wE!G(9^hnNFUbW->glFifxA3-O`*LIYqz|hV#Bvr{&iUncCMTrAR z*#Sl|b=QH=b;gFyD)FbnTWlbR#E@mfa%%}yOCE%d|DGJYj~AhXq)XUX^_HnLK7>7T zUocTGl81v~>xjiLq$j)-Oas+PGf1;p#TJ$Lhe3;7wt;o3RXRne3}hfCoDf!tdohsD zH~458H0ZPj<+ll@3#pHWSVSnUR*=6L(OBf2)P`<|sWVp}n&Ok{rXlR)JX`l1Jrlu+Po3&`z?iQ zyjZAOkPFBdDpGrji&G%Vg2%&V1)Sk-j@eG*F^WS0F7{=AfWAe2uCNQ9^ZOAcr#}dY zsKfn>%il~ua!2OQ;q#J^W@!G+T8j;q?a5q;D)f90SpQc@=44H`uYHsVifgkgemc-! z3W2_-!Vr<4+N3=~#+AE&RpiaL_TeB_-Gb(qG)FTIYv_IgMEP_N1A&=-yo#XSgw3Xc zV+NJ7AljXdw*O-)&2l#CC#wtnrr-F<*Zc`xD7CB|S77L4_ndh{UXLvL0-7UMKm z_M&2_&G=0Qmp+qYKb|e5O1;C&!I|)98R}iOZM-s|61zLy7@JulKu8<29B42fnr$Ky z=}jP5rTV?8SNlMlk{TjOoKk!}-1SPQ5a}!sYr^n8 z35wrZmW5I5G1Vk{XT(#bDSLXud(VIvwiosNpL(CZVI8sHEE`&g@ufSOY}}QR8Rz60 z$=sAMq=61%jH#SX4OF4z0*f3H9kP=@21fc?p9#Te=2nW3IG7_e(U=!PuE%a@6%#97{14X*0trvdfpqNDThk2VEOh0%{ z(&YbPWw#GA>?FYx9^MHZ+F^^4?Dn<2;A9+01SK(dc>Bj1Lzmy|CBx(AXKw`t6(}u& zcH~=XeyD!YK|FQEHxW=k(M^z=C_7ux_p9KCq1X`vtkI3E0c!Ke2Lqjp0C7Q)QHFRv zyYHLh?QfQ9o!f)WNAEPelo$3m{~t@=GbwlY$U=`)4(La{s_K%x$#vQ}S=`z!4#jO37$d!UxB~$~Fy_tbq%6W6f!gZ;7bI zUWkGoTt4sorr2pJ<$lNByK5$DG;`A)jdSCXtX(I~ql8*&oxfdMuT~Fb_I6|*Z>pMC z94CbR-I-u&$I-)n1&Wy2l>TV{;U1~n+A-W{MI@+{82KO+mAF7T(*z|sNh28(j!2%u zs3b+5jdDLAflXQoy48MOFaj4;5u$7eo1Cb%${2D*GkQ)NMS%9hTd|{%;(1S^@u#5D zjiNNka9Ts_9JKr3+~#Mg@!vbxewg%li2;1jcoqCmQXd&9MNVZGCDM>H4b;<#WtGn= zh6}&Av}5-?Me5s%F}-RnQX!5?1t5Ah9IW0c+(e3)2JN@L;RcX9wVcbQ&LidM?q7;OfF=I3Oi}4&h~(J0 z4Fe#-77Gk*w*<@|7cn6O6=RS1;fH=Utz8&TnV|cYmujtI&9|#k3Mo0##|$u@Uod{h z6yR$%Z$gk$-$PShJgp^);Yrw|3Up*{c-COMY|Xb65iFVT)W#0)UEkk_XGYNZ$hi(g zo~%-f#7X(`dy6yji6tYeAQx)Ncq7C|9s#L!sDzYIDe(rgchAK1QA80O6?@_&BOS~x zJXkz6Lt~Ry;3bj~ch`jVm`UCVKSI*<1eJ*F+{6UYr;+n{aEuD1&eq5x-QF9e^{GiR35VjuJE*BjM!amLp3B zhZU`(i;l)iW~VG7uuP|!msXuo*+CfGGCQ5DTcA`#K~ITIKUfn=$#Rn}`|JyK-}a!d z%@KdwkNR=@#(8sLYNb9|?35qwj?$pRMJ42~P$p4cXaP=PZQ2#K?Q0BkExFd63(pWI zM;B6NrYu6-M~adK;W`?wV66nUdu&c&Hb**$pGOxDX0PmA{xANu2PPWaAT}cHI0!i`EXI^a( zhVfTDO+*a`dAdw;aJfn~OK^@@&RKWfM5BQqB!s#OL=6(p)sS4!lEnzDrVbRE|D_h^fw2x%ihwL^6GURN0dOWI+U_#XUf8L`CFSu=+t9#!TWYy z$LdYk=SN$ai?7dHi-jhY;j)z}@zKbykq7+GXz(`4uUq&!VM#30fjH+a;W9(~ED;$= z&!)fCNOvI+!#W#htdq--`=JUZVx(&`W@<=%edjIEOD{eb+q{DYa0!ZOwI z@p)P&03TLlMvS0uFXC_C_c{SFC|)1|)rQPb=?AXnSp+9!J~9=g!taUJ=w-s2l^`U< z6lq4Ro&>9%zR zU%Snx$<5P(0H~C^qa0_uc3-LLZsO4UjmPd>wIljQo)Kl0w3ViHnq$Cu#qjXYJ(T=g z!Pjs!?b0GTsn>HA?L%Q5+*pm)U5m|{^y+MDYtUV$mmZ(vKW8SimQa9Xgx}_IIzALc zMcL0+0dTqur9Uh#K0;2DBQdM|xlaz8KY4$C{rtRszF)XPgra}8-L6{lowms+!V$ZN zbUZedDOAyyL?Z)Lv;rVTO39A;GQ3eiQlV3L_B8cn5|T*VJ88<(3#&G35LG0CzFhde zxoE4+PX-p9?shr(!q9k4n1|-4{0YcUzxd~bBEQULGOgBev3PiMc>22Y`1sguxdzpa ziDYs|XkqJ9=Yu~CrB-a9K)p6G>E=z$bG&#`x-S{z)d;pf$1eb<+yU~;muF2o{DM9e zW+2eHwdA^1qn)s!z)`rxJaX&6=`%>Test%VUlQ8QL0N5^jm#=H30)CE&*4=l7yj3j z^9;yeTLqI6)B2ard-P+PU-o2}&s`6dY3l15H62uAt!h4wD8G z@5goO*+3fi(-wrA|Js#SV~0_=F5?6opvJ|-S-mN@0ql}3M?YbsyAaM zjJW4vEbh&s#eD^JEBes;NpH;(`(rDZo`p%84t(e~Kff?xAmi^D^#V4FAXcvCFqt5% zk|T|vDi7O=cL+0Q+0%P23=wE95_z6-d#<|w03827Ctn%=q&BuNolbBdb=Sz0At3r1 zw+1vTpG>rV$aAuO+{?F^^DtrQ+*{3qY|T^rX~Vt+FIFF{H^Xa7AYqbh`K+#JxWTJf znZrV6AaWdK9slV*wW~`$|D=ix<03>Jwcs0gt)-*}@|(E~g=Y7Nvs*BwU1`1zlK5l1 zY)A*mpgi1HD5qVoLy*FibSJ>zdxS*j8p1nr+W&VTra5w9t~=h$6tqf8usna4aDWE5 znhQWyWg-+h49W-0808RW$;ufK=#k1@aH)Kn$cjOcb8{`yN4heB&V$yKG<*l`@)xKz z&YbySBMh2voOtNO?X{Bk3{XoZ_~9t&X2*3{OJNzm+|ygE-ajRvf%Zwg3CDWwj|+kl{f1R?KFopfxUycB1W ztj#9o=qTe0sL&ylvq;#q0u9D>=8>175JizTEr*e)Xl?n= zSdIK{C}aINIG_mfyjwFNlL+U!4!~_4GW(W-8X(M z%);LZI&79cWOqv!h?)Er%>ACA|IK!5h0iQ%j-Fm18L1l`g{2?!xkf|t8wA#Tg~B4A zSx7{9=#@#pWL4)$5-?owAziiAnDUGAm5%U1#YPTw%j60pIt_$^L1&n7@PACXV2f<4 zW{_RVNePiZlx$<&p|paPqU7=U(|@xC(;t=`-KZ;K8rSe;^VsCIRLiXj6cSJ?F6AW0 zf{J<_h@j%ucCqFXtmGPP4A8s8sL-JGBxwr6Nk-6w`@_GvP)_V1XpA&Ly1@WrEsI`# zQy^(Uwx4V1;!&EvmkR1Y{i|EPZihKx}K~EBcV=j)|6!;DNn>)IFe31cuz2cc*}T-*-s-mHe3|j`@vs~%K&)%M>Gfbzs^|S>sdL@!+TiD|=`x+{U-I}ie- z$cgZ$c^|C_g*>7O>EXEa=wxXQ;N zJcxV9sORhn65J))3ADkP`bDq78iuFzln2HWxEN^=dCz|*#`VDkfpK%u`cB*sAKuF_#>tax#un-0_{tX4`eaYhdH z_1fg=F6SQ&b2t#2>u`~QZJuu_cES`E8{Qho@>yShSk;CXs)90P$e zch_IvY}xX;doW&ZVX(-pw|e>M8@m=ZJiO8KuI|sp%Fv^1;3Cp!WQ$R0xV>txi_1=7 zqE?M~(wrPitJ%HPS+3b2p1LBMz2p=LD7?Q#Ea-?o;+VP1KD|Ede9r*Ed;*4Czu{#KK& zw}|nzSWq`8!t$2i+pRf+emC!?05AO8hF-!~iF~Q^(NTo@1P&B7K*1K%FhD20V$-k3 zyq#7tkm*p*T#rv`NSu?U^ys4~SgRb)HU&a7WekrKPCDDpDd3;+w6_KF!XY{kRj^#A z8d{3)NBzTm=G~Rk&y*t+4}4lU))lk6O8t+U#mPxFT1j2Dn6?rwN4D6#hf4L@9xorS z*YCE+$F(>@(3Y1l&W>eu7aF}S-H=gtYXDjyvPgk@quCb6Cgt*%`2d%~bWK|`AJszrDLxP% zKz%u`9l*4}+-cV+1&Z~Zh8$s(w&&Ji2)QoK-BWaTWs&V8e)e4X&9K{giB;vRZkA_N zBYboIY7#il^ZPj4;&t30s6ME2v#zaaNj1VMH>#^6^CgwdDw%p1xr=j2H{eNU#z#X8kbR_QF?iG8*Sq!gzkSCA^2;4lD-8DafB_Wj+C z8&&ju6(ind132jECg|Y`EI~nUs1xk^Z&K#vr z9z$Db)!whLA_Nu0-v9Z*_vyh+DF3yq|1PSs=||81pGvy+$niDAP(*W~x$iY`;P2|SIO__EB z3er76?TNJ=Fr}DN!d=3;@=@^xmbzoOxf5eK7`fq1mdPSSF@K!0O!4PHi zG>Ufui0bG0&$Lcb0*A3S}#Tn`ICxgKZ{-097#3Iv*@HQy*z?LLr++UXB71 z0D+($z;)Bjttw$#O;g(co><~?X$M!FavNgzPjH>cMY{WJqdcw`g2-^_)&2`2K^Ky> zr5os}Y4Ez-Yq+O8Oe4Ng6;>php&`NOHa&U-hibBfA$3KV9}@`G04ziBV-|- zb}ZWC%n|%XHogH;US>&1rm?L6Tz<4RTDCmy9CZ4L&yNQA$U${zr98&rC92-;!M~Gk zAjMd!qmC5pj1W87Hxh1?ynmoF6MexQD8eFRPuHzDU9~Q>Ph-@=h6oJhpZHt=-P#=2 z8?JC$zFl**epjoVuUTgGl=(GsvMs*Rb}yaND2lv_+O(2Z>e zA}FTs(Bo>R#KjLkZn4MwnU$TJ55TcVXPC>s&fkMxizgCCF^X^tw#F^`Ck{OAWXs`j z^1Sm-D1!efkCwCKNgh@;Cb5=&i(?TSQ?EZYMEMwHa7kCcJoAzt-e@6QJWY!fp8rddzN$Yv*=<%keO8(l%*W>2>0Ar$binxpK_kHfg1^QTz%?#~^&N>_Cn ztW%JCZ3>nCq+@SqHe&Z;?3>KHZqy(NCR2Dt%Rq4@sHjk0or{KdA|#&_G2;U|jihC4 z05+)LRL%v9gZufhLghH|`(GN*t;MI0kFB0AR;eNG&=>u$KVN)mv4cHaW_FoS<_&8; zIT|#P?^>KmdESMtw}PK^Z_Ufpp|dte4`MGb27>srOj2=H5}E88{g<2iwnO&gQKmtEdM=> zIB>680*7o7W5X`Ji_gpNr03ax78E97H!R2;a|5yJE^^gAz z7X1#g6Yq_MT}?}6i=~?QZ|+;6E~^F3ni2crwXs&-*uiYPJTW>^8`jaFX@%0o1rC-^ zvSi@MpVjH?Ql2EbRSc}?Iv5E*&NoZ+B=)KWPn~Kts8`*`v1JX>!Ku&BOTTBNuPy=o zew$zx~%5*(*LoVozZ2oUsA8UvD{%^JiUr;SWW0R?mxnbXQ3xK-Q@F;Iw>C_mkk1DF9DutSG z`q)0|!URO19)zf1j0RW@v#UCMo~uvD{xpvwMGr%g%WrUQOlc>2DsVt}?U}m=l@*Sq z-(!p#cn~#iOeeLMSf!jvRvhMz1cStp;E~nHFMVP-?Ouz`ali(vfsC}Y+aCY6m0%YR zGJDb78p_6|OXY1h;#gCYKJoyqwoKykCg7h!_+f|K>gn!$@i-?qcR~-iT6s#VOW{}b zT{H6Ane@i~@C|N#c9BB>yj003{ef6gbbI3QwI@V!veED#2~AmkxP>Eye;HLXz$9Gb z-VEIzFbi}Qy@DnXBz$Y&0c;|lHsJ8_O2e4fGgiFcL)k8UJiO0cJqzx?!Y|NhIV%QH&nhY1%_-KHX=6p(6yv-nx<$Xgd#XZQ1rsnaM5<)j~k5t2gu?;P099xpBX-&Me6T-^Wd)pWSA0e$l2?b!{}e zHbOhgP}Q+f8-6KI;qF@X!;2h$s17Cj(krQmnrMA2(-g~Y1Mw5&xJwK9k-RcO3B29QQUTrZCrSfUC7pyfrGn3vpf)c{%pSD97`qNElqVcdh zUp5Q2!A#X{an2kXRxF~A+*6L{kJ#KZSjj$D?oy46m8FyVxKHTpJwNyl5`1@3F}LV5 z8|2pcbOqhzKFBk!()P|OA?ddpIe1L3b3=jeFljnBHDZSyTm*IXwYhO`fuCS_g$?>P zTBgnLo96B2Db&XIk%y-hmLo%2CLQe!7Ca3_KVRwzSnOVn@XjtOX?I&fV=2JORfL>M zK%zEe(pTBiXKUoodo^dLI{FwN+UH3b%I-i=Sv;uM3L=+tnATCSL&yzaRre?FKe+>I z)7(HHA&`K=P(~n=z?p=lN)Tu^xszD?ppq@T1RHGErq8nTQ3sT+S)_ZN(`&)2FX+un zM}O4ED}@8-qT9bl)2WaAnI^>SR#MM=p=>J$EtZ8g78A9Wowa!L@f|kHtkz5UuF_pQ zHO-d0wqmNZP(aeRKK9WF4NifWiwKy{ESNuN7Gyxyf%-@%M!$0|5ycrdxex9a#=mp! z(i9M-j(7_$Hk^BAkZ-D3k({B*099^cF07}vyAn%mpFN>{LLdt^coDnS)Z6dYtAXh- z?9yYai&X0i`l@nu{EpV=!StWH^R&2Xtm3>NnjM}X1S`IXbe=J`RrAXy6?1$x3`(6Y>P0MnSN$Gcw-tVNI+cY0?4Qr-A>H+sRsMtmM$ib%WsG zV?{9qt8P&_a^53&VMs3b+SoQ#-4x-Apkfu9ruGq7r;oLas7kHrZMHO4FGpg*TX>k-71t3 zHl(zJJkIx90}e+ui}_d)786?h=Xj`?qicCTh%!RLdvGLlbRKTR=6sRVx!E(k3`HLq z+Ib~GN6iKcW0&gufz1vJhnt6jsbL7BpokugfZD6Jj~R;hcvH4y$Ovcm7-m%9^0CzIPXoyrvVBsA!_fGsT^nI$v zEYSGCQuNH}u{!xt;-LL07ESQ`E7u*Y9eYErSqpLeVbHg`2ozxB%{W-%E9)9D>a-MD zIoBvEs1ipn!9vPxT|qaSm~E4^(m-m>y&1M6kJfs$+D!s{fYscifY+Gn&R(!C$qqsQ zelpfXb@H(g{Fu=3H!U1Q8F(OO#CGpY!cF$VK6=QJ_6X-414DHX0#r0$P8~TEsqGFq z0h`c+iY za3gnFX_M)1m5b}<(COrf*L9iZP+Nc#xxGpeZyR0eN40?foOT7F)cEzkf^cvSYe~`J z84JzOKxn-4%@(}+uL87E%8X&+wG5PqT1>a`QrmqFY?20bm;%V|%%m{jow1%o1^*Y=cwisx?SsEMC;qzrAW zmJv)oh=X?@7tCG@p56VBO=&gk2bP!Cbqj>}sbpCCDvTc<{jVbtt_7@7=L z?1b8?0afariM~tEU)DN{VOpg{Wu3%jwMpNI*{y9B=+7kVAr$Mtsn%6?7nc?vF0}M? z{)bB&dkhONk~(9LJ-psi`Vc@R12C;YRjN4Mg%7|xy)5P? z$@fwloc7}h!bXr#cBlXl_n~>wpST5qv~>HjCmJ*3y`?`MlGuG%#m#8Tm}IVM zh{Qkz19CSK{J#`*B)R)BLsiT%89V#FM~^y~cH&uh-VhRwdZ#?EI?E-t8C*%W=M;4o zi3EcWoUwS9ZumJdrf=M_Ek$pcvr0t1rS?@K-9InG)U?l1AcoHWM!y(UQ9T~{7p|hk zg>ZALo*O7Qx54Apau_PBaoapfEh$DXiWbaFffs4)43>A#%$7&S6WFBZEHX8<8?WUa zJDS{`_wBZd^ITRFAl+wOYK8=!V^v0Oatw${1>~q zx^LH++<|-rQKVa?>4)w7eaxBJwE^{GE&Q9BG22m^=hsdqge0{pjkF{XJ!!`gzOmo| z*r$k>iPmXm7tL<1^|tIH%T=@E+2@Ej%v5QJy|l+z1JExA6|o?k%|^Mc6bA45A~*IuI;%d*0G&(^ADV>j+}Ehz4h>wXftdk zz`KZDrG_8}^!mtir;VELFa@1koPDelFWO_(fCFf$u-~U;N*S^Qq{W!0GQxd#8B380 zg0X0>@Oe)mtiyxx5#_rI7ld2tq)iP*@^r_&Ba^>jg)s!tyKD0?X@rwipg|Dn_L!+2 zw3?!i7|gDV8ox3Q(=9_$d${_Onaax6d;i+Tl6QrGm>bkH$`EkT87e><*^K{P&aUvH zIK}kQLM;q=m}f$VKdTEHNKsVlepf1KYLYK}7&fLe*yp~af4ng3(yU%p9dJwy72`^T z`GZy7_9$@JiN=mxKUNEDr+Pwn+5;^MR^ped@&}_+ao=mShD1faGbaE5n4Iin=Cn$kJ6>>*ZNZ52Dn^VfMy}SY;f5Q2T{`1-gO+kZ`qK zbBVE}!e_m(>M}wdDx*T6xiVjB3QAL2C$jj$W)R;4!#;TG{OES4sSKebC*{IARUr9G z4VkM3G-0B^g{?>I(<4=e0Z|s+7}rNIr;0hnCKuT@;+MC2W_Dy^VRi&D#vya15mXY@ z|Gr|CFePPaF`-Pt@nEU52wI5@*o$EK+kR(lXt!SC`p&aE%wsRtidWjn@onY;@TY?C zRH~)5hJT@k)(1har1S;9ODHBlR6+j}MXAJUia4h`*=CI>qWMz_tAMV(o&UTCsGHAL z*P9eeVMIvVj~u1PU9$T@2Q0Q22@?iRCkVVa6V@GE<$zjgfEDMfi4Fy#LeOvXOP}mp zP1gPfH{C3iShfhznyka;c(kfh5rz{3s$?Suf)F|{%4~}%AU7Hd&C0M0{0yRM;&#^} zPy--Y?=6_+iow;kJ>~1=i9clO?Xy)Yze&DEp}F9$x>mM)YgrE|t2Nx6r^jw_rzT!< zk8@!}=|VV5=+mVHMN(^)G%#^orHDqxP(r7}qHLq6%o!mi4Rh#nn?Ec6Vw~95C$LU? zeaDxifBiH~Ud*mhp9CS37hGi1ZB8ze2%?5DsT#+So(GvOzW7c6GFQMz*29D)7p@At zAdR4?f?ELoq?%jg4nE!qXXUW1OaYgiHR{}AJvEf}{k_Q7bxZJp>3&bgUBM$_{Rdn; zAhOC?e0G|qLe%V=LmB)(8{ufkiE|sr=*$4~gHVQ8L^_xYsdbVnQJ*&b3MmD~^)qN< zG*KUn@6$?e+`aB~m~RD6ERZNsKBNJ<+=#pNzPove#xMHi$7S#+5>5cEp)}##FPQW8 zkmpKhN8f7K>bf5*RX9Y0uBp_9(Rz|&at#~&l#l}F{XRHii2nQ5F}u8&cr+=J`J5D5 zj-^|kzhf?Oja#=#g`P=`W7 zYU1s4M4V9R6F0GD5ql77Q%;Yx`UH2A(*m2p`YTc*=G@d#%OsJf|vlH}WgX zO#JEUWYzKJ%kSIz!FhSY-M0C}2VRUQyVe>g0#LD>$-Ybdpqhemh*bRsjHwi3lZhTp zPmYtP>aBV+MpHr8qeaAyn4dT~;5 z)?#C9*{wIb?#iBObFld01gww&5x%WkBRY-Ue9tQYH2UBUIKhJOJbqM9YDNpIGL$5u{xFE4)n6LE%Y-RY|vSDPYu9lSy%SG4Cuj|t7?7>UiZZ*~h(Os|BePBjlwt0nX>0v0FbJKG5QO5B&1H(}?+o@{m z&8jN;yC%NuQ;Z5Y`yHgK0Qz|yHK79yi|qM29ohv_f}vf*PnpFqi=t)qQV19b*fYN?p1&p&v2{P#Mg z4IAwk>8ssbm==)iFQn5Zywg z-lO`#%mCC>{mY$S7UZF9&;(fCMx){~bQsRzK`NlpA*uH3&VzsXaLnAIOL2>)ZRZI` z^VaPDWR*U6H7YBUmz!2=t;$o|62PHyBI35U6kIDQ^0IJ)@c<;bAj?7GaRHO{<{afy zbcNOOuWw5{de9gF|8dN?C9?ec5e*KK^r8?wyp<)~Ut=mtt(FA)Hp=YR0TJ(&3wK`h zPMU6-TqO&ncKO~OV~2E(rsE`Gq=eb`^XeZLDG<(zqaoF9T!vi9${NG=B0|XRDGcd= zM(u#S)27xStq?Xa>9hTC*1PfHX2Q1mmeHc&H6g@ec3MlVR|4HbwcLDKt${7J2HqV^ z`nuKk>XmB#qJMg<;C&Y3PVXJ+iul_!0%zm0W&tJ0(PYRZQD%H({bGp;(8+Tehhg(( zs{bCHEPlzf>SmH$RtaV)fzy1NHMrCMu>Y=lOMo0+-&C!GUG zIS`?iUYqk&Bbaeixe!dUp}Cb3Rj!sjtrOxB=UQ&1%tg`~BxtjL6aAkpR<_bt z`pf-P+VC+vexcvVV$j*%9T=4{d#e6jkjhuloQpssy~_yK1uHTu2B4hrt~8Cn!;Y9P zg$+={&nq(+0Q_$TutCZ#g_IERmt$6So3Eb>m1{Wwdh^P9vqe2cfBZ$QS7{@; zo*!eYWTyLIx)7l#9q-7%icJ0s{@!wWsv9*g@Uxoe!=<`TLhXxSF|1vQMm$Q zq(_y)6q>0W!pp-!<@)_#U04wwbBdTsqRnYMp3w|K_8(~U4gkEERggf~w>%VKbF_78 zQtc=q;O8M{)&eN*X8dt`OTF8MRdrU2tq$mSQg4@D_0qELH>Fh8c`~PFeR6*O(#@T- zlany+&(De(V@Uo93>}yU#~gxg$~-OgvLcf zb=~!<+xObfVqGqQyEOpl(qzVRmW-Xs&?*jwZ?_LkDLl+IPVk=xH26;Vj-;- z&Bg~O;*$x~)!V0!KR3bU3Hg%=t#?Dx(WIGg$#BjI+cZmyy$X569N4#`{H?{Mhz3+? zHuOC3!eTd}Qqlt9iq#>2u-Gm$HGp_|wbf6Qe}m0}c#YYO6^q*+U%kk6_|k{FdSt3< z^k0>C3rD+oez=pUf6(1x-(qe1&)u?fiU6fDsRZzm!6ruV7&^~Y11lRwj0p3C-iX~8 zI#T%iJaSH7L@JCYwCF!1BI9Fdi06hH$(G1*O6=VMX=7_^9}NS+c!GH6=1Q3+2ogk&Z!NHR^VEXKwOo#}d{Lc8x1D zT3x=aZQXwP)_Y6Vr${argaEz9usd)q@51FB+WC3Piw`}*Umz-x$`!8K6|tJlW$S*3 zFOtjHWRMA1WQq1h6lstdVM-}2a=&9hO>^N`CZ;2^pvmC`LCD%_(BeAoM1CIX2-C3+ zKT96S^#0Y1m(W73!%|WEO4q({AK zL-o8`;4q_L7RbyXXM~$b_j-fFQiY8zbbY=BNnU>V>+|vR^z`(^szW!qIQ21UN^5*{ zO4|ti>Ar-?-1|E1}}NG!)s^nDtYukUPz&$m~ab$gr=d8O%{v;3%bT_Ee4a z=UKXc|9Q2}#PVk&eSdKL+w*zRxBi4ae$SfjowH~;+Pmj#1I1Hwa-(i;>J7b^%7h#6OE!)x3D8YEQLFz6TJX&jmfM?GBV3$f#)^3phiXl2MivVqg(3a8-rm%Nv?CO_T}IE2=y>Rq>|T6SkU2sp;^->P zlN~Ak%Z4^3e@u45e1St*4A(gnSM@hV7}p2wGI}z6Gcl`h!TY<0pK}iqWNygRq-}3# zYRi&fkJWaot)Y8IU|62*{e~>m)~vEeY)`RySFz^Hg{J~T9N+mUAu%7Jx|p_BBZ|ST zHN)uP|2&(6oKQD&M7*N1$;@$5O7WxE=wzozbdP|#{{z!NEWcoNz#^Sis!3rj?)f>n zo$v5-!z5_@_}}YYy~vOFLYTRj@q0e;gJue*H!>?IB+c>|lZ1+Eq2!zyuOdh)R+dOH zWvq(X6Wc?KMO=>9Vr17+)?#G$1L*ket8Kh$?B=XLS$=*?TN<7qS=xCRI?7!f32XdQu3Clb-f|3pJ^?F(z-1 z{ed@HDZohqZ&8SnlQsoVDsHA2&B`gms%uZ1_4>HoZuXna`f0sd-ETIVyQ{11-RqiH0rC>N_AZa43whI17r?# zvc%C#d#?sEgQ5gD3bD#-=EW@*1v?}6{;}N=*f7>MR>ico*kAgSKl-=2U;XNjZ(rQp z+|c~Gr8#}m7<2RZ^z_wlfBWtJ{{H^-^z=0D_XkCD)*KFpz=>xaKx!QII1JI;VIjsR zl`KFY2IPzl6!&G`zW9}*DvWqSE*4GsXb8pPv9MbQGPJ1tgyIcg@^56t1T1|E=OIMq zOo#-5;X7D*58D`4)H4HvJn8p4kqz84IO_WP`u@f3i@UF#{oAm*{409=D?T;`Uah+3 zb=#T)z1&D+XCU`uB-=Rly&W&vzmMm)E$t`PtXNa~85>gcU;W9Sw7>l2FWZ~z>vp|f zw_V$H%)a}y-`yNerx)z24dZxA>;4NG&=ndP6HI_2v)&_=uK$wp9uJ))g~G+9**dP+kG z_|!Zh%MKa+AjBpro7#fQ_u8lcr~yJ6Juucpv%tMGlSk^xb}UP?F*~ zXFSQndeu?vvfka^Tt9sE)mN{7{iz z<9Q%kHQZ6``IrCdF9-CDNOkOi7vhm+-hL)IOC5qskYL$4H7Uqj(;g%%=>f0^1;S`f z@igW-4aj4dZ|d47jUPhGd4-61M<0Y>kIDecK;{rgVe+TZ2?U9eYy?Z5m6WHJ<@L@H z7W>!BTJ)t2{CLgtFt$lK3~~vHf4MSe|3afMTo zBDaK9zBOi$6hu+POp`6CW-zMbV!8h)fxbfaJC#FXHK}AVSz0=zxVV&~LN$?oE4-y5 zyb^I4mdaIJ;Lqyb6e}5X?z;BKxtN=q>!;hBn+TGM%G?V`YH2@}bajzg4QubPT6Jfd zXlH|UdT&h6k` zmbTHhF*q8ndU6_A4*W>t{)&qZJW!C1oPV}TlZh~+rlUqqz!@51dgRwb5Q7g=h7AO} z!jwazvI`KvmRRQ0{eL!iZF$$uLf+&`j|)M3cDW*Rqg8%aJrtxZvHC|0r^ViDP;M&r zVGCEbSf(zrXBYQ_5c-yItjQV;(hDej|Ag7`AX4#Pxtz-EDyA?3BW()02@-~5*hw9c zy8?ftPP2=A47_hNJT#h*>Y)L3-)dHv!QaxeUeZgwrulfk?eOt`{0A?d)C^eSfB3)d z29;UL;tz_4UO{Ut(8{gowVApMTbc@1tF?fBgxSQakq!EA%$@CY;@r8f-ULC_02z;l zaAA&%bt*+l^*K(42h>~j;_u6h{W!B0=a>~G)eqvl*D24S949gnCey1*)g@voZ&Jz^ zfD0;8SigQHs)_S#jY<@%%_}0LVSfzwcZ( z@RB^~?>eJjt{crDsn^u+=&L-?OYc@&*#Con{};zOPxdENe7UrxEp2H_TiWNGbBsx{ z$gX`3{`rnp^?UmJcjReo=wWS$$qfPu2Ay0WT8LW;Rvh7tVd+8Tgwq@HW8c@MzToVC z@Lb;8ea?yV_R+?|J9zWCl4<+_QoC42v7lx}o8ItQJ_^efh{|Qib0lnTL)@1nq&|L;HlSFqddvG033jF!`!J*!7OJU!a|{xHz_`P4Qi)0?9< z=FwQo8yp>?<;~)bUeU2@8e@$ejTw#A^eXf)YtZct_-d$7Xb&pA<+Uz}_5JnL_Tj~g+xy$w zn+L7+lhNj>Z5vjXdiqEpM-s0G^%+5;Sdb@g3COiinx(3I?@W0Qn}_f!0fiCr8c-r4=O znwh>_@w1NSeB`=i^eU0)&#jGiOl6w~X)~9e7jl2#i>bZgq^DVCZ&uc$nB2Kj(lh4F zU29r(v~~(m)c5qeJT#_xXq)!FTXhec&HCZ`>gtYc>QDag@z4M4 zkB`ybG7B>IrfKZ;^)-Hs^?GSb=Hlr9yB2`M>-N`|`V2 zcDLV+$Kz=@lLI%5!-?5?X+ErIzPJvW6{yszZ`w`Y^c(Ue)`RhOg~mMD8EC-)N1y@6 zlCJo6Q^`hUK0d95$$?;j!g+G;pqNmBA0hH61Ygc#Ru-^n6H`r~k?K zT2KG#uO9g4Hz6;S`mcZUoBsd%%fEn!hX*{Kj;zW!&|KhadEV_-=R?~b$v0*{dQW3u zNAqJ#>k1dWu4u98Xkl2XGnq^3dDU8KoQ2eFM`T5 z1VrQmnX43Y2oeT8gjh$wq?1T`A|*-F!Sjkb7TvG~=?MsP6EHAJn}OyPcb;cF_XhgmcJ$lP+HCG=-M#O+_Kw!;dnS0G+jni(-EFq( zyRX0g>i*CEoj-l}=I7r;mB&#W$3ObzFTa=bw#?7Z#?z1Eh|5MeH#DNZ?yM%Zr+!ZZ z>?Pg4R$kHL2~;-nAd+gaY(y}|*4yfkIe9aQW*{pGEY~3TaNSGlB;XneOjM9pgCb)r z^+2F{e<2Mb>gG$`*1hNlx|fd+)Oo-AUU_xNlPU{Gm-hL_={&{{ZhR}`Yy4|E(in$L z76TTkD~xnjMNVucZ;yFY<}^3XB=n(RzRvS7_e;fNSj@eU6Gy;{`v_q_2hpkjp*v^s zdH21)rGCfrI*f1oedF}S)-UZn%~>n_klw`zz+wq-M^o~OMqLMv_FQdnQmBcLMBnDb zPQst3vTKuBohk(dXgJC3BF^T@QS5Pv^O77t^4rKsjV0z-m8yxZaX#fW`FpR)$9v5n z_qJg&n)X##ubxy^lUhm-m-a=sufP6!cCG&}|EvFcpxE&k<^!*9TaDPV(l^CJk7Prx zs9y)ac3aNZXKcc|@jZTcTK>{)nA|BDG*#kE;qt>W8{bjq8|ujtkzeW`=BJDjX@Z2$GC9c)%??A1>A;uX-QkCFJh16HVy?8wz5e{Kci2|?o6Ov`=JtE`7=#-#S6j@;hQoV)0qDit1%H zDukoT+``4kNnk7pvP%%CQ-X>NgpEvP!J(`jFzQrxIMG87D+7--(HO zZ0oyf=sUF`I~{(qpeT=`8mIgr;}uLPHWWY@RsMpR<|`q#ghHIWmg<9OP%f=ZhR%AF z^Y_iy&q^?BT^E9t3oW^_RQAXM{7@#6#Gt8NO~?}+1xFELnJa@jDTNudt|VJ+QR5Pn zq7;lYuLgFA-3E%^#H3&XPc)gR!d+ci$;d_@w3;+ZX@+{oB`QQzK(r z%yn{+f(eha;yRszy>&SjdNLD2b;d9w9D}2DXEYZXjxAbjthEgj##xI7`P}DBa$O*V z`a?)k!;V-gp2q|idZF=oWVMf6sxXywZV848xN931|ond77HrK zpr)Wh6VEY{_k{5si>cze5t)Bw!IjHl(c?ycbg`}*>#q0TpftuA zIB*(64MKd(xh^<@a>cv6k#tl!A6<8zTd~OM6t1V8nNKh?7Ut$%%@x0}w12ee`c~`v zmvLPnlp#W{CsbY`N`=DZm>WwQda-byhG34i28jG+IcDgjgVKGOqzxh*|{ z!3_+OS{bd^40_O>akBxf4H?Xaeh*dybjLu-ZxqBM+fJwGr&Y9<9wem|PL*nct<9|X zo`uro>a|m4*Q|EIK&f<&K_fEQvV5)}v!Z7}w9oCk*8=Nv{r)lII3ji4PjyGW&D&5- z!yu`Kx<1o~=;@N5!|${cHc#5X$Izn<98?BK`8@`$Rok{d)|+42(mu3dlmJUq_WspdsR%gE}_)&d04)k&O_?r4ldg1TrlW>nk z=;Ui9-zoZUOZ(Jjf>7V6=0c2N_Da-ii1@{?uL`(u>6*sWmR8WNK@X0iv9U6ZS<$l^ zKdyOLXSv-NpP(R^;N>xtS-3UUvp474VLdI5;ki158ajL>{}>e?8`tfiYJ*8U1Q3(k zZvDQR=d%rYumlDUU97heI8}D>TZVNTY|zC?Q!Qbkh1!kEL=Wigjt3(uCJv70{#wMf zxjx3eQ@OBqn8^*iiO6ATpLZ@I<{Grh#nn7t$u7gUVSPg%1GCC?f-L3TY>}@51^7(d z3#I=JVZHHjz88{%P-d9;d#m^t0uV7L)~Xyc|`Hk^9jywn3$Ms5E756p3`C=e}QbEW&3;Q9$Gx6%&d*L&2u8f0l=KMLU zn_nXqtIThqNabbO??Zl=^Tc;yd@nxN(msVbI|_d?Rx@=p&{sjPd<>u|20W}XDi%Rq zEO-}iCwM-uRCAky&}@V*P!;fJe z_sd*f+E2K}*yldvUZP?+ul1YixK{6W5A=POlR;fVyk6`i}Z7 zZ)`Py>jr>2uIO)TdeVACwK-bco*7+8M`p|7;sGyQ$-~zqcmsGyQ*ec>FgB4*5rrf= zX7#fWwqpU1iPaAfvV}g5(V>{kQ4mHcXd%yz;^k$)gK7roy0rJd775j_bYUyyu*9P- z)TCk}>CK}(0;Q&FQpQD<`6Z~x3oxVb!YkMSw7@7LDBmvTirg2n=pkTk15+NU|rz5jDv|2rN8XmdR{Cm=n60Y~5KdKCpy0oaqO{JsCNp%O_pn2SP zMx9m-?CG;UY5$sAnm6>8s-us)(vCe@bpyhxw`%P>u4taGX_}i*xv5oD;zcQ_TN5_G z%x^zk{}Y2bDj`MhBP4m5pnys(?Gu^J&)kNxJXHb|3krBFRsV#Up3iVWT0%f6^;}_R z0pk}-fS&v80d}As>ykZn=cDq4&;Rf#HMc>}is`B75FlQQSnw(PKIE`Y;uwn!FBQnt0I9ix!{-1}%Uu1q^Y)9) zktKMPtU`3I9I&2t;*2DF`Z3K>;csA=(&7o7?exWI`l=Pzell=4N_$IuTK?BHHy@+7z6E&)>Xn{fvVaPS zKKy3U$8w-5wt362`=sXssnIOwH@X>z?EcX_7kEH8)hZ8^0>7T}ePK=Yz+{eD{JLu6y34UiSdU{oLmx3783<)u zJY%h~F}4XRycx6{OU*n`ipO&RK#dU3`RK;EAMdCa>xP{cZb5=^ChsR*yzHn4g- zz;ggfrK3Wph8twx!nvEpiWY5q3Ng)zen&o@tF^g4%Jo{tEHW>EZ}8QY_51Uiw=lD= zS-Gx(S5smYDONXR@ZQN4q?3P)BAth&+m18JIdY6lWoUEZBF7a`i7f-A=EfMPi4r|z zpwwI}A*vu#HoE=|{MrNl2>$#S@@8Cccpv7;q9pZs$L=7mH^+S6`P`R_19#3R%kqMW zQAov6Be<(CiCkXD7vP1lS=-O=lZyorNP$Ew&g6pQ9GCX}DddJ|oV6S~DnIdA1?H?e6(XGe|0j)50D=iZc=%1!NA%$K66a-RGipd}JU}O$h-pY2yU- zR9rT21GxmYq_mI$+R{GA5)=UjSa{R`84QJS{K%yWGn{F%t_YIN__PR+DpEHfKSMOW zl9)I8+nGGXqI@lB!KEUSQYblim;~6ZFs>+NY#5+q;M_LU4V;u$iMj)$?t$hUAGJ{r zv>H7%0K0~q^Nu<_Hv%X2;#kX*Uvm13InGKp43OG1aG>Avu>*DAH_Tv!TdmcGKF>yT zqCK3SN3}f;cm*R4YSf%kHlt&mVAe6nn%4-Jvv@LiDm8Rv;To0%Mg?oP40|G$O;VKN zXX9)A1g7${w;@H>Spfm0&shxE0w9H0(S;P0S^*#hNlB_!fht6$gcOwoOL2x;*XBTY zXIRD5xsWtE%T5xF|FcFbno4k@b@E8-Cv^vqVxZ$A{f&Lp*-Km6(w4Tgr7i9Aj=SRqh1{%Ass>0oHedyLlUqB06qjc)6N%*rJ93Sn zfJr4E22eqzj5*y)EWjT>p1KyYh|g!`v3?OB2H{5}Fns0_e+b&@BLrckb6=`(ko;U{h{*JDCF2F?$=dOUC*`OE*WjsFq?g52#}KY)v5Hg)aZ#V zIB-`-akHMwEhN6k!DVso45kO@@0HmU_1*06C zJq&gcgR&IcM+S1TZF5H*&siZ}1?865-EFU`7XVU0sRbDuHN-|{kp6LSDOTMhn@P4T z9Jv6yvUlgo7M`l}F#%qET!0Lca-txM3dPbs&)8V0DL_OjfgXY#FwgWx9Nl>UNO{4bCoQ$r6T+e4ka6hqdj@kkO7j|RT2ACbDT^jXKoA1!@OOCR?@_wVR~@97incu${W zlK}c;d*#4Y{Mf}0`efr5S)hcNH%d>c1SKDoFGA~p$t`((b?0u4jq9iD^onafI>kmF z)LPS!F^z)&YU4o2=;IVOcfxt&K~IX~lmtfRcqYUmZVe9Lr*ux^yg`3oIv=@@ql>Fvk5A?ZRyx?L7!@xi@xcgl!;bWZurm`Mj zrEO4G8$}-+Pzu#R@8^$BAu=7u0F+8pW%8ow^5H95P2|+#9G|_ zJagsl>^#ci8oy+TJ^7qNi}#^IoZ&u=O6-u-;KF`?+Cz^@Y|c+s;CoO`m5Qm0lGK-z zck9D7)#99sYTH13oYXqmeRAWHff!*!1LqZ+-X^zm={eu8KpTp1J>CIW)F-*RLKHr# zoZlE?-v^N+-mA+Xsl^Xl+J}s@7ix>KR;cyt+iR9 zNs`Y=r+}pdjtawGtW_LW<#oq>us}Ps z0iNiL>$-i<sBYV; z6~)i1&Zw>*)T%LRyJ_K;{&v%9bxU*M#sJ*z&-j%(;9!pyN3^E5mY!!vPO5Qe4oT~h zi^w$;`JM>zc7xuAzeos#E%F4x&k@3$I+~^ACCMTgEzU7K868!SQ@e_CKjBqC0HqeB z)Q4$dc>(EQ%jE&Gn~f8uf~2=Ms}PkU?F_j{WrWps1|%Ble2k!r45$lZM>$@pDc6&v zO~43n^^`o?<7^#B%4nzHbY3@bT(z**THVu{^2#*oyOw&lQZKtsy`~r0)A!ra8}8MJ zJ2hD9ZjQ==k+ZtYrW+TyUX_JGhUV}7y4Cv*)YH(aE1C;e^p&pYW5#|>ANY#;H9hfa z-@}bQ1c>E}iSSIOPhx-t32=X(*1)`zqG_M8d#uTbbP)>m; zJPx~>RWWh+3qn#t)FUl=eW&rXHn68TPMrbAz>18l81XLKwZdqRPrDpisx(E%=u&^Ix z2`6t$o?_ec^Smv8hrf$xI&$q34x#a&Dybug7e1LdD12%|gu&P)j{`)gXQVz>0-59 zE*8>l-*?(C7S`AW#SV#FK>AMmA@^daLPk%AzGI>9#9#W3gmIdJ5US|9u5_JD4|&)5 z`Mx7z%3Yf0Ixn6IoeRBSDt1z&D46P}A&#!@1yR1UpnM)uXS3-vKXfb_fq`D}|gDr~Q{e(l2bCi*?cf3E+nXumP_lMnXS8q0(`s(UhJ~w4q zc2!w)ybWi=NbzZ;y>wm7^J$vCXd2tU->fL;)=K2q1ycDKmtnd&>>zzse{?>`ZV>E_ z-&sJf4~zcl@!6BSc&zINPFBnN929)OJbK4Ex6A6<8s^0cK1s)Hu2_qbE7cQUFFNnY zw_sKbSGy~&beCK|tayP4nekeb{s zqCm>^dF)6{q2BX#Ul5TRTXUVqb(`c;&-r@K3qyUvAQI&|vEllwNu990s5IoQbh2io zgSAMb8vd_tn%3(^y=JQA&vji(uj}1zU-N!8Gj(8>JrC?A&j)h{DM`6de(ETtb(`{( z@7n;zolWt`JzH|;#7RDzd{QYLy!d99R<#(yTgJj8yqMgM-GcYg|P2mdB99|tb}z5E5=$+_NlTu*ddS;$Ql-^W?veI(vT;{Ek$5%QYvTx*`LdB1C}pVB)+mjomt zuHt3r5w8Kt`-z9t$0#c+!@FF6P4xmm?`CVob4M9Gca{<{ zvKLAXiPtq=upHYgay>4k&5}M#+d~F(NCGIC7uqSV&z zU1L)F#YAzHv|;?UI|wxp8V=Vc2&XvDudzt++Dl>qS#vvP$8opYZnyjE>-Fy9_2o_m zdziU4AG=;ZbnujEOlpjNdhv^sYnn;rAi6vs#QBfQ<}1z@WT2QS7{jaRQME8z8v2_* z8A~g>FZ7)kyWrH<-;EH3|kJU;u1<0qwlQ`5dyNrV+UgM<$C)b|tU zlFMowY{frcfSW=>6-j-%&)o77Dd+SW1_vo|3|scXbU&DbxODoCwc?KM2mcVKWO=7S zW4C0yN1WZK{d=#vbL4b`FnXIoG&myAc=TMFO!tFQPf~Zserg@Be293x@eWc3z2bYR zJ+~j$i^Zbidmb;YZ<-eQ1_;{L0Z)+cF&~9e&^LIpzP6KUAzcly?=u#AgT?Sl0$!;X zY@C*+72hAq!xR49E`}Q&kL64tmF*X%IHlM}E4=T#)zu^j#>8nixbpV*jxY?;mG1d8 zPL(f}B$Ar;TS(7l>}2{u*Wf1y##%1B--WDyXtKeSYK7~HgHLGoysV@&KnBgE+wP%0 z#xa`?=#cuSq7Nhx~|-}W5aD?^$rG+Y%Z3a{bdzNefqfUVxI~JezipNqZ9PaYHspc z^bg^{)~-9L3BKay@(W(^1>b6X#l>>-Y7I?y3DgqtGHzj8cgl_GE_5BIVAA2IO>~mV zpBbd~KzeRKG!}q6hz3xD4^3aSMDGtwUEI^QvtWuNG!EW)&Ud=+q$5mB#@po?-%;Lx zQGwKlzxed$jBq+`D26bxT;FHSS|3o^Aq^) z+voV*kH5y3j$lWQwjHOsal5N^v;FzsZ4XBp!?bH#n#&^NnVc=?!Iw=QzrU!R(0~j!c=yj#f0(FaB*#D6z&{j=Ii*5s}n>WodU- zz~;2{E>W)+1-?8j@YS=0efdi#_T{%Ld&%oo7ZLIxE2QqB&ku*+lw>8hew4^h^Dg!SK8}3n z=b$(m>FVwOHo7@Q^sr-yAN-(&hmqF8VzJ;xK>jUf$ zwSN*7mgiJe$jL7sM!(|n--S*jDK472rh{x_&+G5^#0hr0UD}zv)6;ZxFuia8xREs# zO0_WKB|e@0)nEA)^B4Z{e{AvX*IzGAPF5n6@*}eI))t4RUbIcS;IhG~t6b+69p_wb zyzr@u9Rj5TKQB`@O=)0DiODtzZtnl56JWqs~NQf+73@EbuW)ma;+ zMF=3dn4s}=2arTj)^`xJq2s?5ag-m8T|`bYUvhCH=yI`?v3?}cgE2s zUwqeip>Nj`f9IOIp~FFpsEB6HAssodG&z4T^O+u0nJCKpzURRFNfYz~b)ktaQl{_U zfq7?+FmPoDfBI*?pcl{2-}Q#)F-NXW_p{xR`gIhSzw#@;Y<}g(zigjBJGZNomF4=? zmSrjGgC+mvC%lNH(~#o{R~#$8-(7M2u&nE5#dYVB>%~=Hw>E96TT8BMs&){X7K89q zgjmsmFXTxiWuizb6OxJ}ym3WQ?s>oVT-O~!UAK@rFA778L&-URU&}nso8)vMb!zC~ z9qLX#qw>bq<>giP-S^+iK&0T?E}Zx#l@`?k zDsBVW;3(%re9oWg?EK7kPM5a1EYJ63aI#v_v-2}8bx$=I$iJozuR_P8gSOLnF#h=q zm*>^W3ysR6#1k&(zxlJD7C-&zuPvWFJ70)Ysw@j>$9Tc9Z-;b_+b`U9aa8fVO0Jd!=4#BP{3mHs7uCq-gc~v!WUdBIg3+BE)JE~(+;CPL`O*eIwGk{bxUS_ct8%~AoSdxo9Glmo+?DH6-89B^UDgvKdwAgo#0t-9 zhi6l5=qxyc)bZ!%=XiE@3TG#$z`uojuMHfpzy%mK+b!{ZCtP2z>FVm*cTAV(X7U`@ zKg;#cj!Tq|jvVKHJ&s-Ab~@C&FX1_JDV-xzjxz0^C-={hIq`ZTe39js%O&wO7@j?U z2G2R2pPkaovokI0pY}7iL>;eH8_3>ga2en-c@C$ir~ci>YIP#>KnDKU?e_kC2A56$ zK10eS(T6V%(tMKgYW=$mn{wZ*YP(7KmKbz&>=@jqe7}!DOBCh4&ujWJ`25*(^IJdr z8UFZ3zijwE$8gz}!MMzBzX!fn&=p@R-n_Y@i`Q@9hnKH>X!3nnxEA;rDN$l22p#2G z72hew_b4LNmG_b1J)w=UW$~2j0d9}THBhb*DDP$XUQg-*{#|*mxq<7eYr4FABfo#- z!B%<)83vLp;_pb%`p)|C{ia;UVu$_N?T8ze~<)Ui@`_t%Vz*Z!Iz@8Tb_FNGh_ zsEqBAp^wQ>mp7qZBJGT}`A}^+2lMayN2o~Z6X6cg&y5TH#f3b(wH*u!P$_xl=dT_8 zVgtptMQr%n){l>cU0EQN7O=EvDozVd&Z2cjoM!xZ*4DrF3$8Q%fim%Y|AGo+T0~Ud z*4;l1jAN7pd6i!+NzpTLq5qT2Y`vUJX)rxzaDz3Te=JQ1!+&OQ5mIr~lWJz;%=5(Y zv*P^a@v*jN_G~IMHo94EQfa~LH~JC;H2Q@wZIDYSLL7lo-n4@M%&T-UIX^P4xSBg? z3j;fAU>kFvqJzv2+)ntRvhc%FL=;QB{LwR%mcX8WKaM_P`9`EX{M*0#V)iQVAKb5E z@A<>OzfJ*^H<+4>hX1C91GA;hIBJZ8+BiILd!v!T>v`WD$2l#WcaM^H`K!)B*#cG^ zKUKU}uu-)VU8FE`zs+`qXyh6fhkWRF32(GUa5S_a#dAs8O!-Km8lxrGwI*df(TqXV z#0M^nCNdGk;f*^*%2o)~IQHufodWW3p1hBsETl-K@7}n@xo!D37c}>fB)Ycd$F%CzTY!H{_&6RbMEgv2Y>WO ze`3(*A?DzZX&@@%k_>O=vH8u&HykM#g0;jrY3h78F$=bHyg-YId z$%UcB@EpAyQW&}2kKWG7NHV7I9dV`qADK}nU3tE0NS>+s4!r*PD(|-qoo~aa%_z^G z;^@s`G-vM`<=sxA7$6f!#gW*p7g$M30;w(uqr7~LbTk5>@-oL6}g z7pnaEG~Ez557Ot61-gGy_T3LJ;kW;hf9S6Juw!-LoXgrf@vr_X|I&l%*VnIK%kQR9 zM$>yH^WFD9bgwVpxZQ5wiQQ7ybt0s!cRZsgwp*@$mPJu7>)I~rx>+VC46f4(G;N6u zikn919W{`rxjqEwdPghrV;T8p*D)8dlpjIaWiAwyM55vxB}wY6%GY=0VX;`$NgU-n zZdX;!bzogpi>43(YLV(nambTtt}m^Pox00Jkx2EPLwWN2)^Gjfw(I#jmEkN#MlbxhZo%!gF?B7@oKhE%NzR;H^;o`- z^8$3Du6L!Aj)QI6WsaVHz86JNFY=)Da$0H{^7moJ->rEuW^}D16Yk3>q~5)=O+xmv zv~5)ty@*}z+rqtA;kXWw{I0tGw!swjM(Bz1lb<}hSNrp>=Py71`On+Bu4UW-5#M#` zTEo}Q!~Sq654CT@h%hP$q_~a3-xakNIkiRO2X^2#O_K(0uzWS^QrpJ*Yc2TeEVo@y z2GjL9UsubRxE*6l!_(4^8Cv5%it2p zH9Ebs+x_0}{?7iN`HO#{eT;U-%)OfmoIj=e`Mb~eG-m1F_^ZD!lidUBW4h1pUca1u zZhWu$1Rn0N{HV+SXaDJcqy_5JFZcOCa6K>SVec~@Qjb1VP<2O}>fYt`Jv>v=I$wEj z{0o2iFCQyoX^@)^<_DWP)}FY_3GF`5b&p-EM!)%vG5gTiy7RjB@BO=fO$(Ir#qrnw z%m4g~5edFS|Au?rt3M_F`QQ6z?`j*2xcGkbY;9VH(|zmjcR2R+v5)Vu-&K*+7f?4q z&?6+0#k)KpGVy!qEKdGunL${J_f#owypSB(m5i4n<9Gpip_I4Za|C7Qpl}XKpNRN# z-c=sa`9ZnlVe{OL6h&u}esGFWOy(k~zRNp7>>Ljxag>SMGI2RX1bue=5lR^j=WhJw zmU`L5?RUJ{zrAZ%uvkq38|tM_(7RVr|EfB=SrPPZR=r4v2KSkMUGG8#j*EM147HM{ z#0wrcYHD6ebqQ@yYz3MWY)BY&KLrA)v?nW!j;H|fqxyx8q8vFMpL+s(;rJPKpCQbRV zC@KYU?^x!Y$il4`Mq5HZs^f4Sz$lLb-?0p&lNoeII-7;*7Rg@+eL~5J4ieLqG3(0W zrm$WhwZ@>lmw3LKB$>aB+ejlaI4#MFq9}@@+|pXQZ->&B+eW(Ad;KR$5=h18Bds(DiDsXh$ny~qIQdTaS#Bqvi!4n)PrtJ>+cGoxNMI0~ zuLi<+ALQK&#v`6hQK(XQvcr?9N4*{%i=mHkt)7m|nCg~%0I&4kX#wH6c$KBOI@L~+$)-mxt=aT+ zWXLp~*Kc{A>au*goz##9An+_5%}y(K^96xU9!t?lG`+f0@@*3RiB+vq0i<=g9yq-JQ#VC&!iTTAcp+|9+L z#;Eex`0l>2F2@UHd3vABAd z>y)A>de8Eq`&4}D_jB}~j{TE9#wwEf{0n=^^A}k1kSGbH;&Nj68zW+=LPi;CJF1$FmTd=%*3q)-{McK@iL(^>QT{~Sf{wT3VlRBm3~4aC z4MUii*d>8Q?;J%FO(TNw--kaTMcl|pN^glicv17UDn;?p=x+9S9oyHIqwzL}e<=!U z27y$_rzd(=ASJzI{11?qQ_T6R`WKrBZE5IG7^q82oJ25zTOhMVpVI~%@-%D<3%kPL zzAPjULMaF@eXp~$j!EX{FH^hs&@fMKanqMMBY_mBr58ydb(O(R1X2Yb7@n3MF!i2z zS?5F6cmX8!$CNXJ!+U6vB!-{BC|(PqMR(+yN^zY~E_$&|(pvRGNqxTd@oo__H#f>Y zQ!aR%y18L?7e!h8j@!rK)MoLDkh%UrFOmYt#8SP0i~2;_blh^L?FyDsI9yJAP2gWa zhV46x*jmF0p|O56J7{?RCM4=}-pq#MdtDmbaBQr3osB`1Sl-06olNI3-n@qxR}@82 z6y@`M5cSXpOUK&!@8kJ_wx*Sj_3Y|x%PKw6_K&@QoO=nVJhehZ$we62Odyqa)SGq+ zoC=ucI_V!vbIZ>OqSEu4$!8pW=R48n&u;OC2w+YaHP?}MDyDL9v$JBJ*E4ClTnBd# z2!coQ+pdAP2^=-fl&5zGH&aUA!!T{fgi>(N7hjWBrf>QmL*1evg63qPOH2LqMILhbnFo6_3;8-Z1R~tB8N2da*sracl(q6f1 zfQGx;Ip!X1onvj~?vB*LO;O=o`^Ldd0rPEXuH6&w1+ex5C2|~=g?58d9zA9s!vW(l z?hBvV^2+Dg0{#@r0e#}zSCqMD3v>B>+7owekM8*#uYx?r_ zu?uE;@H-vBE*D7o7t#i(u23ALfJ7iAwmRbWww6)uL}|hwIOfYv1XbxE`{NjdQk0&D z_p#Z#`cV3uo&4TY&I??Sypxnad(k!s<93px9BRydocp2C?Fx+oB{@k!HW2QO4j_2* zUWalOTqbbtv4e$kJwxw8Dw0d+(i_TR96^mN`r^eVqF-s z=Xki{5O^cWB5{%&WS$pG@iZs&ISh_bX;kf$Y8ZE(P$wxxQ4~c{<{k{)zv=U^??1$| z1VwqD-~-DfzC~H3ci6AF2gu4(tZb=&KGON|a>ApB&(4CpAVB= zoFm_~?woeK_IW0da%EZ2Vo|_5+#k$!Fw*;QMVXI}+k{f?mb&AvItAdU1or4?fB?4( z)qD4-m?tNx+hY46oz<1kx6F6hbS{vZ6A|C1En@Dt=03E2V{U1~yIaznDEY4T%R|!4 zy52!boZVV$ty?Ur_tBoGTj~(y%ZS$}ylx829lmv-z`B=zM~0x1BJpdtyR{EMp+ zO>b>h8Hg#(ZY4X7|QorcWA_5 zs&ihCg4QI1sKHGph$ILykp&DL_0x`H|GB?QLK6j2gPTe3c7iID`Y3fPO+`?H;9~6e zbnI?-;yHx~G*d$dSEA6hgpwl-WllAa38Zk)@6b$fkcXu0(jf>y7`IU(wL30J$eyc zAk`}iu98Dxkt5THNgG<}J;cG?wNSPQ%Lb@TrcdMrj?%-(1V<^f**SG#LWF&FP+R@F zbz8Jpai>r`xD%wfI{}JAfl>+-cZy4kYw%#fy|_bhhvM!M+$j!U-gC~q=e*y&GdGhz zGQ%b_*=w)$=B+8Sf#mHYYy3q|ner-i^^;}p%7l}ahj#Dv@j7ca8wniyxXxy^= z5NqfLk1HVjT&Y?r5L)CG$91-3X8Ue7{Ma;0)*ruNSY08v-Xz>1*Tz^mxve`i+~nqr zcI1*8Usu!A9Bz;QffQ0QHvd1lW*9Njj_fxIz@WbNu zhuVX7CCyPTgKY(;FGCN$Frq2O5r(4`7pFG2`|j>OP~I;w4QjMiq>&Yo+cg^Z=^sn) z7|E|}BRXo!#*w+mdak~^cAn9T2@1L1(s-*TilO)2?U znDmHm%m~Rv?wHk$x;L>it26Qw`YyEuCyjE?h{a2R3W66Q+Q&^*)Dcih6@{Dic{Jaf zZSI@(`ZiYMvmG~zj<%yOzF$myD}x`OFP+?gxOOM^)bXNU+K%X=Ge!FT_-$N$_+x16 z^@7AP-?FHbtX}v$xqhf2h3 zP3)d6yTODv-(095jhfvTvg(CF;~h3zbO}M_-ZiT%Yg6n(uCeE{emc+h=m^RgkDw1V z@!#-~rpxI)fhlSDlCP+VcD_+&ItM zWZ1PBic}-!q+F)>rS8Y)?1aWn6Ng%^_y((9SfewRO78)!OJ7=-QWg~-2&!N$)aBH{ zJA8)EeT~=G&0LmRRSSG00o#*Jg6jq!H{r(i#N|y!=at8Tcz(0&@{bdpOp%>4PiBTkGne@sbs`SKtZg=S-*#}wxPmF9IcP~LKhdD7AQ@coHd zoyl9Ll)i?#aWlITMl>BZCLPu!d&jB(4V4r*v>L+-FX*g@^ey>No+~;Cyjkg>RO9pY z_xB%PuMLr{Jt$o|oC}Z5#=w_%h47zz6z(P?4ax)<`$k^B_Vc-`c#bUJUPRwRZQG`m zT%ULEdsj|M*WJw=G7SBRTnE|j1Zn75H3tZu8%~(`i&cGnsgl?SY8ppLLGmm@Y7$>S zw2aUkG?e}BpmD*k>{s8zVcy0-x=Gn06Y0t=bk2S#D1~%m(AOCcyJ{emgZkXUIl)DnI+2m34S>^PmJ$ zPh|39#8QlEV1K}&+RwYq;jOgL2lIcPh)vvIyOdFfB!6-Uaw6*%+4=F=4Lk`FS@xyJ z$jmWFj0+`CaZWg$DAb(GBCr4s+)DO2>B%Sb*jU=@_w%AVSbGKV!q{&whC0_uCN_Gi ze%g&DB@T%kx;xz`?^BAFq_*F|GVAp|{zH&GewPtKk8|LK$Y_D{#MrtInZWkPMc?BY z*t_e8<42;>Gj8q{>t~T*&$~G%vDQC~nXn%Jogk1yZbr=7dzYI#oyv=8ZR7X3BHlHE zf8Mza-HY%!7~48UKnuLE2)4QHBt=v1FH2WC;lmNi@M^r;Z9HcF?0)-V?ag#;I>_Ln zJaL};rLq0_k<98V(_r&*2|05G_n5}c9qz;r#_Kn8efrU%z4`DsuXW~Io}08ZNI?RO zjwyz(j_5=bHrc}yn%qe}J)@>UICUD3F1~@tAoo)W1%0KHtJstU(I4z=K~y>Q(d(E( z5$Yu&xGW2cdq}7Lj7$?EFD6MNOZH5{O~kYvxLR_pmv`u$9SURj^yzT|uR0dE+-tlJ z6hq_6#z%&coLHGbjXI^h!Qgj9rFI;s*;3f%pHdZ5UILQl?mkiN%*h#%wy=~nvtSzN zp(pkJCR5ZUdu`xznPkb%4*G2Fbl-%nxRgj>FF7iH^ntE9dEALN3>Lk~A!s!=Nokt+7zX zg&7dO^th+qfu*#^7omU^>Nb(Qj!oSVZ8--@_`J%9pbj&ueW}35x`<4yHZJ-1un@?FhkZDd zcFvj(=D48CfnxoWEkXbv2}bxPxRy6*5uI(g0zI%o5rN2n!}vmm&W2uIihw%Ks(MSL zlFVg(l`-U8FVBFdcBjj8pzja(IhR|cs(^4kx9TSoIuO7juSYPG&eLBM9G^&=ZGAiJ zDwY>6AxAVu5Lp@+P&n?cn-)$j8*wgof+UqCQS?UEH95Zjel$R-wZquiK9Ga~un+qY z90dW9dOdrZfTP!wn;E^(Kza^Qan|T04gr$8E@1CH>gVMS>N0eMS@N;jJl@$n&e`6z zUed1D{KCdkAP*(g$*6km3ss&t#_kX!rGJ8lzm5K_`{h1uo)@I*PvavispO!A#?MKV z&hZ!Oj}0I9=82cQ2C}^}dN$r&5J=w!uv3IVg&PlQ|0*`r9H-Q@eE>I`ZQ5j1`k)1N zEYG-3G>OBX2@f7k+C17gMy6$QI&A=sbc!XH++t^wt9Us9&LEVEp-m*%na%CDcpqgG zRZLvePKRjkx1nT?pV0azv7liRv>z)c;;P&P3gg4exREC@yE~5XO1cdNBB<*!%l`sP zt%L6iqour>^l$dnW;qjFH7VbB5$57vKLRLIPyviZ+}-}jd6$i&s;BS`GE+J@Cof@A zHm^TxLzk0~j*D?!!zT_26PDj4qa$7qQnhWc8tY{2lo@qb`6ds<2Wp~ZX82KuLOIe1 zJ2bK7Ub4`U1R?nvv9V;NgYDZ`_Po@8c~~y%TCRJAUw(aac__AqrD961h%$3F&&l@Z z4V4@N1=PK4(*weQ!cpP!TEuJ@VWyB|r;n<{G+M+Ty7(CrS}4wlB#-VPMTHbN$TUs4 zCE++~{g$rJK6DKoS&*3;uti>(qwX#K_^}Q7iycn6B0@zHk`V;#?x05?ej3uSLy0M1 ziYtFq5Gu8W{u^W;bFKoqv~27F{3H7a2L?-(f9*piwl`-%OTFWDV^0iP9(W9BdY|o& z`!wLPL(BVk{BBonLG^y_3ycCzy1KUCf)f8^^P+m#`R_m7W~g}?o-HHqt;hd~r{+KK z98$tnl%JP-1%S|7H$7GzHVrmKc~!WsO1@?F#lA(V{Rm9JGTDo1I)(lwJ})M;dxxLP zAy4Y>KrGW8dd48pHm5@@-O6Rf!6HuKxM5bK9}f(RQ^fu>s+yvvkjQm5MOUdQI`J7> zuBZ0iNg>4H5v){`?4XWElmIfo9}mwhbW(zC)=e3t?}JH*`9Ie1Gpsg;dAFVx zn{IW@V0#o~?-|0NU%a#b@-X4=!TYb0EeG5)dqPnKY>5+f@qaPcNIe!DXV2jp-;`i6 z6bZC*mfy$a{NM@Z|I|dPVDWMO-IR)89iE4L_;YL)Oj`bja)VjcXjzh$OD6s`h9m}=1b^e!kAMHR^=T7tu5t^WF~IsVF||OGQ~?^QuQQI62*fF|@5WuAXN;Vai;m zKPMGRI)ol%9SR!MhPZ5w6^BseFyjyAp8c^G$J~P^(t+_VI|qq3h-m@2q(yIroDKAw zT5I1EVGOiZ5Xq^}P<+~2f_T3bkPjA=lxQH54gFYr;DzLGkP8%enkA4k^q&j!D$ZGl z5){AmMVco!4nCjwED0c0A#IA!FJ2h=xF1*nE^0US-p<(Q_iVjAAqiZLI z6ermsfBuuZBNXw10?LzFwReO0(u)21jS3f0pwVyI)ZjHzf=^1+iMZmBaL1RW|^zOs9@FHnVnluuWq-Y$x^x42075Jd2p?+reVj~2D6kk zNm0c*kZ~gv{VAOvQsI#!lwtd{>HAo4>?Ex5JSzHlP<=)LS+@ORcG`Yt{9I!xxNxz) z64PyMpFktB6Q_su=RLKJ$K65@klj1NX=9GfpkJ{vdE-+fi;iP2c4*>6m^WAsM&-=i zKh}J%t~U<^UM!=YrI26D(fOqnm(Y6lkfptXdO1yLUx|YlHUya>EYYn~C+w>dHTs31 zi5j!9*7~E35^E89M)yLQ*ghS>2PlAe@(b-VQo3?W>O*)k=I|-p_sqJ$!r-5Zs19x` zgCZO;>WyC7fxtiTl&81Fi3Aaq%+TInP6tJi=UqXdjC$H|j&c4K+PKs};j4@PoaMU& z{Kc?rIK$5CLTHq67x&kdN362W#@tLySGd(H6*nk3`T#wrSwaS%s+x#YBTG z;2^>Pu?8-LhJoLuz_X5xR!Syh2)6oEe_Khe|`%H)@=PLb3$=m&QRQ1X*~XAen!xD#r#yJ-9n*q zBiMT*LJt_CKGyPw*Kz)fF7Vq~r&gn-IvKQrL*e8Ku1Jg zOa<(acx)~rISOo(Xd@~lFUWepSO*E^0e7f{eo%vq3hsSNE%s`E=tUw+ofOhdLvP^+ zaasaCpP=~nG%=l)OXk@r+gw#Y*jaSFzPZpwB7wxogVpbo!}F16i&yMaWR|24rKoy5 zu;q2X6^5DCGlo4vSzL#Tm)u6n$YyO)%k>wrWv zO>nlP`A1{j>JpaY+Y7Sroe5G;L(-;F7DG1li`1dTx}UB?=63oCT#@?3OF6Vh>qD+_ zgQ|LhFKr?m`pqC?7rmqnOe}q_FH4Zz`wF$R-u3Z|R3nMhBh$5Ip6z+p3H`do`pNwX z7*j-5gHB|aTQyn12BN&mrF4inKd+5_KZSX)`H`M3!KIEMV#tIo2|BKtTdc1loP<#> z&>yNcXs^rtrHL_aK|6Svwb*nB>Fc+uQohkf{>THiwDNBn^+$O{XH}bHbbZe7Al-h$ z`Zt!Omz|k_iu_<@Snhus0&smUr2*q>=!2hV+(&Hry`0f&*ENEF*ognU>=>R}8C;}h ze~c^K!XApNMINmBWvY8-(K(^VSd1HXp>c28SYxb}_gKt0JUg>7LoD24q|w!Q6FL|5 z(~6?3r4l0o2{h`~fkNJxewqGfy;mn-U-4pHZiA6M=ybAoi~&Gx1YGa5bl7xC>Lvt! z>*cUT=uC-5a7Ly;4yDhj_EWaZGQw}rPiS~wWUmq8E(@Xl0`{h>30k>A^q)E? zp!3<45c|HxrJnP}&mJWfHNNXh=A3M%00(eYaEGI=={ELIr$nQp8Xs0keDG~C%gAhS zPep`(#ZB_iSs8n)*q|E$VSC8SN7*NO<~mN()y*pIe; zHN38}Tuf@dJDo7SFRs#IFN50z4R-WNdjaKRb=$a_tiL_`cCzl86yK4;{oLXt2;~0u zr+>(jFwS4RAb5T{d8|TZh;xU+PSiAq{ANWjZ=cV7J7?40do9-4AEDMs{e(>sZ-!`B zmIYQFf22DW3o+8#tjAkn8jPSy+;85scm+_frNb}Hor>F$fcF4NSM7A%yPyv zz~i8z2Bb=1GfyR-Gjg14&&r*X_swlcSsfogP=M=2a6Wa0(NCAmr<>$fsn$kG=L5de zL{`Ha=pAk<<%12?30zhQxL| zjvb%WELK)q#XxH|25TeL!O_&DgiFDEbQQY|VH{a`5^GZdP5zX=&=UhNwMA`IpXUgy zJmQb3*W04a7e{Eh`CL=ZiB_}=0<@Dab&UjQ!Q_}plzZw_$fhz1YW+VtxgibuD~Xep zI@z~XQT0J&BS<=JJ$=5G(I*ajd#U^f3W9psYBLD5-cSRp;6@bk%P?IGQ$`~49AHuu z4cCbDuNTwEKkLL$L~c>g-G4%~31`VSXT9Dv?6{O~-~CE!+PP%vy>Vf&8_5-B`cD%) zn1hqyW05&|?*~@rQan*l_-C5?Z%PYRw47ZJkTeWmB{19f%dz5=UL)3cir*^|__ zMB<;d?0=ID9;8IWD^K9-j)rG%DgpWJ0JIa9Z6pwCK^#Ws>w?LuUs@(--FJx0T>`$o zj)iBXGAfu5{)soMCIJ1MOKYGOk@+#boS9jnk06l>MjRTcms%E)T)!~X=-KZxHY#l; z{+90prn>a8D=#Xu=MjO7TVm`NGi;mf_s^U{6X3RVv{pwO;te?tE;%X=JlFA4eI6Y7 z-&?ZPt0c|iAMLwIt#zm<_2WH5iEfcN8-{ph#71C`YQ!7lSO$cyEk(R{jsfq^ER1aR ziP_^8!agd9T<}TJ)*>~;o|xFjDo_Im)P+?dBqJ;Y9(kXuC3C4_&NEFY5kcj;y2U+! z5Cqp%(uLDdom#{pLi=ePHV+zR9FaOH>4U7by($+?sdi2|1tAV0-rjcfBOJtQ=DVnFA?!0_SyieV(3)O$K-hY8*ryzo2oG)z#c64fZEBsxVF_;cbsqQh*K?sfvp~?xtE&NG-5KH>(yY$~t2QuZ``-$z-I7 zaYU~hw8Cp^AHLWv+}O=KPnqLT_Md{x0a?>_(dT%!=DH-0E&oO(abnA+)%4R}7gp_wX~ zs#<-0pick#LIq?5G%;ys{8-OuKeoM*n&fTvqM~#wL|eL_U)a|g$4!_u0DvP&Ck_V6 zP|z0zc8DuTgQD?ZIk@!Y>JMWXV+l@jym1h+1u)|)fc9dQ5KH8uTcRt)c)qLq2fPeE zsRGt@l7c$A-F+-(1Wn$kp ^V$X6a6~I>kwB2cmHrx&z<)ojPiWpB~sY8JCi225( znh{k*t=(gvUIAJf$tDeC{$aq`c+H&5Qv2MiK*$v?)jN6u$MNf=66*vOWS z&Pkv0&GGWBm=%-&1InSQMi$~yAooI+zWNStbBDe&uaQbthaUx7Qz`|6;_5k7 zUjdj0NLdunrL{~e_kVJTNt-L*9&ukhIGbd!D|)FDp0@nF37GXcLx@Gq^&@5J@yK_X za=}9O2a~w^V{`3E9FCqODQ-}NbTXzkWPP+#PI&kd0%aP+lmM`Ya0Le9$-G3`h$vC8 zb3wwZPh*@38&xz?{&fL)_$Ci+tZ`^q7#i3P*R-qTRGMF)RpqlSx! zI&Q;oK>$Txtc{e^7Bc2^M)pFL6Q)wtA)-kOz@vz|48*%H zz((P}f44?UFDoJk+zjNwWJ#3^0KJbZ3GnRMhPX2VP1lF(Ia?S+w797UL{1(4us7e5 zp!{edD~Z@l_?%wWoA37+*NTXCXBI;luQ}<_%79!()>psDHKB?82R*P^%Reo?^Q&TD zQl7tJ)YUAy3Imc+M$%3#=jRb-yVpm(Bwe9=%J#8?W)I2S5VXGs5wd1GCK}Ik={p!l z|1);@r+mw9TL9x$w{o$LXgX2Ddv@tIwiAab9~`c)%D-0#p%@DUR7mTn*V||Hntb~e zH(C4=mm#$m$UZEGC6{~&q)|tdIiQE2QhV})D+4}a%cgW7)3YfJJTQ*%V|S~MMm zwfR2jt$?aN*5=5NWMy-yL%VI|yvy$+p&&_^qQ+2~lgiUNbUeEn;*?BQ0d@>|w{K6r z<4j9$dy*1!utRUXuI!ILs$0|3191AL$SD{9*w3Sx)6jc16Q#2HaDSZIdltPqH+p*D zw9#Km7$Tw8@QyK3LQcVj^gFTJ_|zB)l|_J$mX!04i8WixR!c?tB4#a`Ev`8B=Z|P zX$Dq4{Y1iQ2yNf^siOP~F`j;JPfvcPSaYdtsoys@w_i3g(suE=&qA}es1P~?(6G66 zW#&qN1V{#!PG--^x%{N)xmCszLBsiBqtvTVZOAW0$KwXn%9AXv{2xq+FDm>D8*_(GF<=}ZvK}Bs2)Ep~f$fqfi z{k`5=Nahw5!z?*MV48L+dqA?a(H&&$+0taKhE{kx&NJ+b3Yshf>&JN<2EQ@VE9l`R^8 z4w?bg65TXIy<#JVe;oZu%-bV)v=ef3SY@;Ds@X#wG6SRCqHMn2(AHD?@Nh*=fsuQ7 z6->P-OvynJ?V)>IJGh&R#IPSaGJ3>IYk$M!Z%X|y3;g-7{2_SPMj1Ps8?{z?P5>de z-#KFYm31Yq|5O!MofFphi$vBstRiP>h!{I$jhxx2z+P%Y!b~3W<MgIW_WLAg{ zX_D&4pt3f{Lcyg3GTD^oiohT`4%8knSJ0goK=m%3O6vWG2PO5Y?Pv6bw}U;XG}%&h zlqr~)qZCbFcbvW9#uCG~MXFb4+G}y#xDcwrkl&bd`lf|7#+zydrb6&kr@>Wdb5ibY<=w&bnr+I9k^*ug3$ zd#8)zIB3xgL*mD#(t#R;5lChn8eJdT5J7@#qtve6kUj(Q9CxBWGNWZ%KBU*d9g!iq zK$|!Ci4wCZDQmr2mq>~Ad6vU}|d>7>0U&O%Of(yox5T?&p*pEa!;}*X^ zp@RkXf1lz36XSl}jHSKz*XSYelz*KmDd}Q)rlz;H)uU#F-_=~deNqp(sF^1nl_u6A zW(UW4QS@*x(f~qpXtiJVOh)WU;mYT2%=M0;08oUr5kZO>MvekN0EUQcHw8tp0sfMQ`0Az#(>ohv#(ryDX8I3Kw;$enDm2zqY+g;@v6CHnle8Jp@dZgyxHJC zrSN}zc4UU5lz}U5MMJ1YDEO*09bq#dAWK?u#LDDQSFu;8TUxq&FUwk}*czoEGAF*b zLw?8u=H3>6QuI~7SkG|^na;04jzl5#`^SCf$p$3uc`1Aalankixy?|-b6tmCxt4G>NS|f8nPXR!iI46zaXie&gwXMc^GF-<^sLhL6>iaMSWHr< z%54;aOVXw$!?ts$Tg9vx)3}8ahr%6K8OUK%`rWryG}}fo_)vi`GQ%Af zq9=v!n!I(6n-PleR+FNKp+)G;=ir3bxsLGG&uJR4V&>`Ro}|~k-)RyUV9}_V6^W`J zXzjf$+b8=lBMm zMiWLlH+oo!-~JS8LsWw@(bvB1;WF7_Vn|kFV5Lfidtb;FlAX0Odgfx&)`Oep}63*Qo(ry?R50%i6yk`dF* zSjx#w68-!Nu-lr8LF)h?6`q^ED@`ze5}ZjwSx16{7g2{(k4vC5VE7}hOIcGEC&I84 z8GdNUwCEF+v^blzIvt&CJwZ~+;D)VM>d5@7nW0w|Xn_^z=t=Mn*rZ?&Z_qlOZ`;mA z)%HcB4Ta6Ys7-zC;`y1piu&Xh)ofak-v*%11;wlqjBjU#T}I3HCZD#3oUHDr4y(dN zBmqBlZmT%ZUcefdua8d&bnG|J{TfSXO5K;;5Hf0!|K=@u^ZS=Uz#+7|LT3&7XoU;T zCo)J*3ppcNo>IRz?M0>}n@kIk8L7TeoGPIQqa(sT&Q|HoJC*vGI5XMe4rw$_^ucjX zD&pr~Yhk=0`9JF?$m#-h4L7T3esstWifKDWuw%Uafb>Qmv!xggq1N_aQA++U4S@S#|_^1A%^PR`&EYBjEoVi-bBj)tnAO zF^yvpkk5CIX@~48OSxQ}TJUnjDnqIqr{}n)0m?VUO5niQhUn8C6fr|c2XO58O z&SV>NX>#dV3j(9e52v2$zj}(hwc#%3{(;8DKBc<}yHV*OZv{<2sG*Y2UFL(6d4c-5 ze6NEFZaW5@8#Z&~C%5OLG2?_)8!4C^FZ*On!)u4%wgHT|`&h%-M_~vO1JWJ?%tX*> zoLKz6_-#3?fgdr^+4z&ho0e5M+{L1Ufed&uSB*zGoKr~ZTf|*&UVCZ-4)dbTP#V2f z?3Iw8OVoq#esOyLUeisQrUGuQ11;25ir6^j-+Ziloohr*$kco@a7Nk->(Ez}0%_`Z zdt2Jq#}L;yh_IfeUeHE)der@7Zx;XRIPk0 z@d@qr;5Di0aP!%2^G{E2B?|7XjZPyjxCbyhLwW7>^nmMhs+b${CUEP{6d8z1LD(+fGBQ&9+Pc$Fv@5-nkjA!JiN(knGy5L?j@Oi zi-%I$63_@ZPi^Yhjsl_5$ZOrqn;<(-Ob<|+Z$)#9B#lw{E<5gHP{F3o9cb z-9Tou({8;@C$?|QE4x+j8UEy~g3w|*?b z)SMVcJD{B&LG6bWAeu%HW-Mg5?6dIt4id;)1-=s}Dpw)gd>e$jA4x0AhvyDOyo7QFw z$Eb8p>cmb2cAp+=qcfQbcbw6)L8>xE^t6eMrVZgRM!qwHqt9`WzAcWNS7GezGfvjk zTNp{?x$v`I;`U7N;N^8v1>TSVv2anmRqR!KjSo5!vYS!q1(0*To#pJ)-Ykg_WT)q2VDC zVt2C-a2ESle$zw=beBNFH#RCQkXRJkp+XX;s$Fx`ak$mThbRPUS$|Zg&VYQ_>4E-a zXHv=IWH0S7JlLH_ZsNu|Cd4E(fsT_ufkdn(-@s*r5ojTM#q$Pwyuv8c0 zPX6g5qkE^c`m~NGczDZxi(I-W-*2fE?Q=KiJzgyS6|Wq=!id#tjUz6R&g5xH(E6t+ zhX=Q~O|!)ha+uoCZz5~=gA;8Sulkrp%C)7H3>uAemDud37(`Oj-<>WlNcGX_OLq8-{057LgPsdk zQ-cGv&^GRw963c3R}fKLBZf2??$!{FfB3#3BJ&y)M~9>ws12lZOZ^1IOV4H8t)hBj z!MsD+BBz4$ZZy*>VEVd3CdC@%DoB|9vidE z0JF32#K$LxeSNjI}7ucMao>ad_!C9tYwun*ZK20!L=?C7(lk;8OO7RxKU z7xzL8Wb=`jNV9@zn5dfCJbdf0Q*;k8kQ{#gyf`md`$=EvztxUwLWg+t$uSJ~3O#3qc-K1B&hYxn1XddIs`$6TiL=o9gWN~N?iqI6|IF9PlIyU)iN*Pd&6_Xrz^ntJr zT~Rx)W7g||oe-(bDB;hQhdDcwb@}b7Ep09N=8w8X1O754hJr#%NHdK?`TwGLe*^QJ z@vmI8$)B zI$H|qkiOy}c9uZyRv!#?{Uz0i=ZQj(BlqsgmNO8adM_i+8yz`^aB$h23zk&q@+))} zJ^zG}RY~0_0jo>rwfOu#_|bDN2Z$j17Buhs#5`|D^u+bd-%q)uN2@YN3f_z9HqYmo z3y~{^HxbZE6deq99%u!#s}h|)&RX1ruinsIShW9%^uQEfqqfYo!A@!Z$mYWVnA^4V>7_qV{BPC+8kQMvX00&;V2K#72nz}9$7b?+?Yr^W`N zch|>G!54`jacN*b`s5o2gDh1MBMii6#-LAq-+}!_AIWp3m>H{eJ$T-H*eXL0-3R`RMNsGJAZkaTRII|nx8qUul;o}ZT0m>Rrtp3dc*VLQab}^WfM^+Yk<>;0@7+M zh!+4qqXqFVEdfk&5RRL9M52OSis@z{ll+!_%*?n)h0BJ0Y2rqPspqkk)=LG)GRv1n z#!^PEM~=6eC24_D{Bpy|Tql%%h@yC$CjFFtFHJ+HUcJQ4L7rL`v1v9)@*Wrt>Ctl- zh)5r8n(I4WXnqhDDVZ7U59tq4^U)2|sAxlZ%>I5V>trCC(cCX(T%JoNb41AN8d=;? zcu&947U{LhQ#5SOV>!!LX}vg?@S~dj{?OE`&Ti=DzQ>*S{Q+rMQ)b5V5{cX_vCn{D zh1F@LD?`c4bkYU*`SE+=`c{&*_}DhKi&^u<>htAg@Wc|2O-fpeO==5(!Ma&kiaKYg zR#8&}FM*eAx_fG%IEp?&uVOjJ5k8HvSJ2osY6wOo;Py@M0GFWekG;d$A5)J*KzK@D zW>jPRl*6iUWDVp;W=!GcjgO(*cD426aj#&M;Zd8G?Wrx^?Q)ay$zUXWUW5P1;J1@} z*qs%rsM6-q^TmI^ep%4a_9T75QkeF@l5bUZ60>@ceXN%hV~ToNRIy0^ z-Y&JqP zT=7Ni7E$q0P2ovnh8>}A=Jbgk=3eB>esD(8l}q~2^<9%!X2k~^&zZWtVy}wK+S(bK zvU_kEo9txL-BGlY(RSOE%Q3$|felHTiPB!qqr6ALPA+lb_4o2p2Y$0KMcCQ0X;ei5t2kX3GuP%m^wQ&4`6ybY;ToStI(jHNJQ zQ=sBqk?3u{6P_Y_L|bb$ksYy2Kk?WU%5`qo*vxMezfz~{Q*E_zRIF^Saxxgal0b9P zE0ExUDlu)LkAQbF0T5Q7y=oPpq7&<=P+zsYGn&pCT@v5tXf0puDlw62Vp_c}0pV)f z6kXfIXV@)XdbKLd9`K18^FChSkL0V4XUJ$sf^3TRO1x){PwpE;z^a1IjbByGSBnf6 zV_Z|i8fQL@zA3jaoD(Me!%3B}W1!gr%TG*)o_mN{?b7n{W)5xI$z%koKVnBPxx5d` zrN)e`9DMI))0m!bxxb`rm}8W5y*kwx`QXW0zB=XiDeb&jup(#F8&f`#M!M)qaQ}`t zMvzuq?%!yv#bU>8DDt_*HBE_I={L*NnwKulk>7F2Hnr(pv-6ca^%PUcE?yeTN*%FG z&%|hkR4;V#emqqC$`YZGPp!M%|Dkaq)M$9S@_E0*e)8Nx3j% zXa>#uLwa5mN|?*z({zjxCSUo%7Z6D-pK^DTS2R2zE~u!O-Rp5pFb1DI!sRJE#A|xm zUNcuKGgZrYzh%_T(AT?K^+&VhsM`-sAtf6u!;Gd*)_s9^gRSS(ItQd3(LUX;9EO-YDe|FGdF z)8lgv~#wmZ4?k+M78=IHXOGx07c+hl{CE(#W%AMX#`Z>I%Y zS+1TOCv#V8mEo%=#+s_Ei7NML1`Ps1NBdW7Z9cq&E&^?fhxg9?8ZWp^$MLPOlrarg z^3g|}Fk#O7GzluV{f#^qQ-Zep!UP&Xrs&rt0IF%2^C5`$0d?vwJttaHt$5~QO1Ze? zWzjhmRtf$O7w~w^50~bJ3hfg3a`vUtv`f#)UN>!b$dfjhlAX#(SW9&3+?~3IyqFdJ@G_NYrB2>qH~#&LIshx zQ?r%=PeZ=?AY49KLGuK=spfQ)M!tLA*m`~37?t6JOhBcG)6W09Wn z0p*krv6D>@Yaw5;jCkxmB~5buf#Hs@>=F%3^(jqRwwz2drK>JZZ&Cz;k)L!Ef%JTm zAnGykQsF-OsM|>539SNFkA;Ho0P0q2YPjNg*4u6_yC$p{R&0)ZPH0;!pIjAf53}=i zqgZ` ziICuJt0=IP$i3u-Glr$CN7?JAnX<@LUs4ykTz+uh)Me7ESuZsfh5&q}H1NbV0z3@Nx zj!}z_%lzAOi**sOw1!(4`(oGph03O?{*CFYf0HdbECwI)wC+2?Gc~GFZ;2|7VQ4vV z>A9=MJ#uk(bZL}soN#7C!ag)N%n&s>V2xb7gJ#7Cb*&D|ClOrX(Kt2PrQ#m;7TjUE z1CVf*n-P21QsS9W(ZI)`eK+FK;QMrF!rjdR&8VPsJ+LlF>F3O^Xxdw_GlSUW&+@*w z>T(vnaBp>g_VJW4oZ$Ulpbc}%+PA$cXSeopFYMlB~9?u+sJ25loM60gz0H@6R)u$O>; z-y%O*dAYFFs$0s(G2MJpw8C{i(yZhzJYG#TAxU1MUMYyYC3`g* zc#zcZR-k64oU)EELH@>wn&QZ;ofu7J9)=so>O$=%E66>wLIA`lT3K><-hCn$*Y7K&1`2w6o! z6GCVbg02D!ND%2Q)P#UiR6rmEU7AZ36QqNRB#;O}iUC64%V!e(et!RE?wothd)|BQ zopr38xcww!vdL_mE31i5f*7Edqg=X-E$Q#5?mAL&? zTGJUiQD$ll;TEtdoj{9gj4h|_II^LtD`BXtIf@}P!jjlCt^@naAg z*@+Ro*%xWhXISd+0j~A1&E+BW*Vf#hYzh3gqFNC>s3nhG;=p5Xt;T)r3WfH+C)I1` z(!Y6A6(~*8dZx0?pr^Uys__?7FMOP+PDkj{sTG$h)Q>sM*h62ygw0JB9ptF`LZzGX za`p~RO4uE6P8--~QTGu0rJU}&Rb-~AK1J9PQ*~{KuOu zFfXUhNMUmkLA>kzN5Rv>2~ZYk_U%lNat*;SrbFXaXPqX$NMR z?;u2fvZXI!1m1F1X*&cLzAkF#2oEn7x3dh39R#e#uRLsszK>aN?4 z3mno8XGlMnW*Lt$L?c?qacs8g0wdUkId3PbOBTIAYLSy%Ij>!Jzz*SU9DWZ6ro$Ih zeYb8Gb>^BNhjpv&Vi%gs%bcO4wc0-XXj-ufaDrT0HLm6V+$Gar36l_b_7(oIMCj+6*LAW-J&^e^{leDGsedb@?>bFdH zfa2wpGMcdQ=@CTql&d`<45F1 zx+b1c-^ggdCrwcOyKhWtFbCUJb3#0#6+#Cyh8H5QCLhrw_LTrk-7M<1_9rRCE)LOIBlWaQrGJYjzy?gd1t^95!F-cYKeWWw>CzY=p zPM+upw7Kj)roUH^Vxw&6CI_{l7A3B?qTtQM7J5|^$YWMFcr3%98b-qdTnS`t6KO9o zJH>-R~Uh&`ZUo?_*1O}5|;~Js8=W?rdNH>m&Y~+9t z@xC;sm5yFGcQBW!#U z6et!Kh0tCducFuc=Z01~O;o?{V@Jv|#}{A?Ew0mAgEBesNHkeBKLTC+eUI+6~_061Y}(_S(;n0n8{R^%8*qW?yTxgw&b;mtaW*1-%k zKn?_yu9n!?t(0TJvaV#xg~kq^6auvrc(aQ$p!Q*X#aBpP$gFRZW$NuOvO5X>4c8b_ zbxnD!)tWny=j()xmy1RZJvQ_0SwEw_k}@^h9}Vd#B+ER}&hu{$>5Zd=4`dmGbqo$z zsq6x?AFK!hsy@OqY2l*kDjlB~W72=R+S1oFG6XwU&Osa2`{v%YrI3Sdyt}?J8YwsI zEEabC^zI=eue6TH+xt7^phLq*sN4%}!)+w}THc)_soheYTiHE2it_u`br6J!(5yey z+R#dD?bE;=$hZx(adGOxCNs(MdS3je{#Q+cjOaX8kJzS&yt(=(&#?(p9FABY{$A4xRxJMx3>G5OnG+Yb0_wEZ+`_a z{gW5>2Lf&ff$(?yDBOk^MQa|!|KGq_jvMe*rXqFg@i%llX2&xTI`d`C!HIDUvv?~a zOzJ(?TU3A%n0w?=FIE<@JfMAUISrrSiP7%+&12-Rqb1-K*@6O#sDDbhz&`-MGr#e= z)_zhXZqMV;_#j99Br&88Iv~$iz)$r4!@=`B6%^BTS&^%UV$?j`uTc#*{em_RiX+L7C?JL*SSHD1AWZ%1fTqPWPG#B6?qC6yg?git%Se7v=%^UylwCVTPE zl;LtS#Ft4VgMl&wq zR~7)E8dOb>Olk#$^M?m>*oYxDu6bazSn+qo?!*tI^|v7Lly0SYUVSg!`q6tM+BwI2 zzd^^w97}&s88~m*7=G(?Kj>`1`k~yvz&JbdyeHpQUy={sqcDD5V!+@(+AhEy6Ue*|FT^cQH`tTCI_BnaN2^KHb zA)aOdy+^~B{_S-nC4m$vYHp3yQo!h%&!6HIY&L)X-?oSEH=pg8V8Smd+vJzN8*!o? zPPQOLUIWz@SQ`HVYoY|@zf%VQtJDx(zw^p%J&8ZoFXxc?rN#Kk(&M&wi?#|MlaI@! z7l5Vkd$ppQcH~8Yl#+ML=*`gWAStCwJn#dTe}KoPID!Dy7VMEr%CV}PEf8gj9a&ul zNDNoj!N)O!513igp?Y|A;Zrg^)a88MQ-Y%p-jFsj-1%zArmhC=Otzwl3@VdOrcN-e z4U8{J{Oa>`qdQsny{f5%z(K^J9hJrKJ}0f>b>W1T_46;M(y4{y6bY|drR<3la1tjcrNoY#9v7K@mma`FZ zz6pRxP0;yMT&Lr0<~&lc*mj@ftl{z>s(xromKCHFW&`{30O&6h=`Cw~-QglE4=QY- zQl*aiv?lDxn`!&`X=E7)i`G?YYSNv)Fc)6R!z?NoJ+u*%itMh#X%&}BY9EosQ+hl*$gBekv2`yqV2FSw;Ohj=tvr+yqA5 z0wF_^WGjegQ11)L0PI)e(Qm2`Hp|Ym=2uKrpeGlW;@9D+-${(}x49|jyd%X{OsemFwiI#LUq)lz%kgjEZz@6y)nb#6Ss;<@DTgW5T z*r3Oqw;g3!8ljJAhRc3?c=96pBtMh02Exmi+ljHIIj4gA`-nc96$37 z987jCi`$td4J;g6^G$eu=O`{Nuw$bvgB{Sr*_C&1HceL_;7l;LUN#PNl{+fYE_~0u z{mtfOPUDNtl}->bK|FCFFb8`k*V)<)=znN1?KkUDoQoXmjQe<8yCTMCPC^2>rLns- z){y=tLZ%K)u2FT^sP=(rKN&SSG$YfsXwUZgd_tJFu8i{6m70(Pn;L_qsFmuSArLbH z)758u5YK=UHb2I#E;LNfay}r}YKdYG?^^YTAD-F1x0yczw; zV7y=K4b%gx7*=gKTMUppyj{(ecrjf@JcS_8k37qMY`DCs^zS_B9$1$r)%3miM4jyz z-GqKDRFl^~A;3$p9mvyH0*_zTB(HAUSpH+ebHDa2wIxd{KEF+-6sM%_9=Py&xx>&nuj3 k-G)KD$K9yV9gi460Ry|I9GM2P`>& zZ@+im?w&n+cDH8e`ns;-{ux;=GyoPS&cu;iuXqyacM0DyzNfX5Bc2RR7DNL5`$ zT24{wZwDyJ-ps)bo&x|J9Nk^jWhAJyb#$qK|3oHcZqDMWs*3-K^>?ym}Y&_ z^Yh&(>d6Cz~=Cmo;*Rb zaMaR(UCI9bxdG~c3?Kne0j7WlU<23#?f@(7>Ihr2|G6CJUpZyK2_|C(Yn=ffz!fH8 z1z5vmU%ijp)v;+W}VQWmZ;%wq- z^4AU+)`Bc80pPq00MK;+02j8_&-DIp`~O*QnC>V2K+z8X(0mI3iemtfl??z4FngH# zk4pdqKmfzT!-ElE4FUoJA`%)h5{xiWQBlyaF>!FPF|o063CIX>@ksEov5BaONXW@4 zDJgLYY3OJu=*TE2DV~(T!9vF(~k`@hJX}>9H5UMutm*O9I2u03d8QFgDy{ zKR^l#bT9yhd-C941e+lo7#;-cB!Pp`f9Zq=ff3;z7XdVw3>|laJ5_7xQAOCbmwAi4)X)tppdu7S&~{|V$ischFMSE6&E=s@SoHiu z$Xy*4ER95{T;^u68jy@`VM1M8KF=S&lZ}kE>hg&^9&W1MJv4mO3QV)^S(*ab;`cSE zr4HJkAFQl6Deeb7?Zl@bI~dm~n(DKv@T--_IIhFEW0$c|Iyv<`#W2FGIVQ7gGs_e~ zvJwLsMe>KobU{ys4iCVcn=qAj0uC7s;WXs_?r;{CWnT zUek-Re@yPGDRA9CJ{a`Qj=2c17jXx`%Y@eJm3_wByTXLnpY-pyb0qAY@wRrIydvHu zZTF+xeOLL_2ho@MCfz|P5CmY~&^P7%+6WhdG;dZd+qDd@8hvt&)Z}*q$MB3Wtg^4^ zgu$^;4z`a?_jyrmkH|J`Ee>Y14)uM+Xe;ST_#PKqrc?SlR>$q@-kPi}|5Y?wV~|1< z09at1yS(rFj^w3ZnxppRW`ry{ryD@l{~`JP@aA+?z^mA9de8Ins%T~GfW*^mZ$09^ zjJa zcq=pk9WnIgQ?a{2>|z_V&amvtw7H2ajKk+>QgkwpRhXlm1pwmGjBLj&G*)q)0|LLr z+Cn7QcIJG(@O;&5r>{AD?o#TDk6l5mDgb9V&MkEQ<30NH;vezak@`jYh1wmG0Fz%u z2#Tz!qzg1tmr)*K0HRIFPEIKVRrR&+Kt6BPui(9I04;`Bo^Sr9?r7cPi<*<6?l03B z0C>|dhAGsz8|r@5m1K8*=o#&4cN3AH>HEA#1J(TNmF(v)rsOcI*)~3O?*oHm5BW-Zv!Vyy&vgqM2!G|e+Y$)v-+$p3)gOu+_#e#8f5=5Dchll z;n3`zKO0M7W!H|>1OUryLz^~pjz$Elk2+A>&M1mp*y3$6QauFbV*F`OgOW^qc>M_l zPYggtJ5U<^*o_X=-H|aFfJp2PZzp>7Wqlj)wGM86W%c!{{N0~0`5-`qQlx?D)hw9cC0HsMSoa>$4OLPK^Cs~X$4{vRQJ%jb81YZRm3}1eGzcAzCG@j=# z-0>h1nBnE^hFKeZBtl{KZ3Qh`jJ86MQ`hF@{v`Od7>NT!D%C2Pw;KSW(_PmSuA6Lm zZg?1fj44uIT==Yt{37U3R~JTifD^^ZN8YDM$gli8GA)5|5_B!{N5lyqV^q`=pJaX4 z>DIvGz0gPZ+tuukPF0%B7iXhYR8JccUI|rW)6ib3MD=9&G(M|a zPP)ba_C~-Dvtapb`qCR}d{X5P9TZrHZgv$wPa3}ZA+ZMuWVCNBy>9bOVU1LV-5y|s zWMp!ls<`NH**qaJN)+F;^yR+)A?z2*8w9|eUHR2Y8r)2#$5Pw+-|Spu zHG==K`rpO=uR$OzMYCc7aPV;OAXo!N_^S~f4g^M^0uZq{xOuQ~Il07e@bIZ=>0bsV z!Ll|oET6+6fF6N983B^;WpRpzl=lUbs!VF z$PMqK4G%aIB;od@kwh%jXJ_j$1WGU$hTC7+q7-H2R|ri?^c@>M2xxsj1PvumG%(V? zrB0VM#BV1rtrQRy{}vUgU!P55V-=t}KsTO{;9+8_%+e}(GsX8o{v3G{pH6yKN@@tg zNG$OwC;@s%g^e!g`h!kN3o%=cTL@chOt{)jrNpk0TOC5f_d(5)zUMUri9QnLxrP1a z1=xz4geWS?%3v(xi9Zv?LRO2iEy|f} zE!E0EF7m4drQxd8*Ech*UKG4dJLwifrbnZ2Sq_(6mAoGcVVZYPOtuN^U6*2?K`q=1 zOv~pfWD0x6P0Lld7r;_iRc$I$K{QP2rXRAFe?D+yCEhI|uQ({h{>mY6=GW&dp`*aA z(np}XmvZ3i+GPvs=Ecvq6rQ`OlU9cUi>qErk3evkQDS`00SD&5UMp<)DtqDhyjx@E zan2(EM(&~{4V^QljGlHo^jYdOlFF~!wew%*`-6xbAi{_Av6~&Ybax)w?AIA9YVv*V zuFdHljpp9Z5mP6~kWp#8e%JBFpCF(wull$9>NSzn_4QAR2B1SZC*SP*^7-wHhsalY z$9r9;9Is@ z+W&gQJ_z$D6J_J#=N*OTNyeldY>LnF-;~~nE}aGk7;ie)4mg6#x*vECpl08Hp8K+E ze{9(DDW*_mHK8qC?)ujC?0^v?bR6%H`xm?dxa8%1edTWRJ%gjBExCPkiI&!`LvxvH z&-tj>=UfLwdjvj49aE?=KwA2EXc}G=de54>U%+uLe6({1ycRtxZMMy|h4CNx7wXDqy zRH{Tus+*)2WkPE+HmlKjtS~&+qxbnLOGx>Qy}iRjZzx%wlH}}6JaALmS1`U2pwu&EKKUUK1xbK=s_ za=1vS89V>YUSK#lQMgB-hs*={hKTE^mr+j>>>r~l+Zw*F#2DA>!JEhtiBs>MzgPkv zo9X*@idvqTkH_Y+xu1(SlEc~NWjS>RBLlg=V@~;Ov*tpZqQbt|`Vc%>;>^XqTG$Sm zk0WVzL0WC_Gda2X0#&Sv47_|C7fs%h96Cr`byrdqt}<7@!Z`0S2F7nP^s=G8X?eGs zr)~z~WH3t*Yro(kfygf($hVgrvZd{*hNAAw3ZZ%fH4mN4FTs0?kXE6-0U_KA!V)9T@<)Rk7tSNyf+uzr0CQ?xm7 zVj8N+yi%~TTji2tNWtDdaV!mF<(^Av#Hm%Y&b6l=VaWEnahO?g=S zMl7qeWxrvD-Lx#Y|0pkOl&dSWIC$;YFwXGq`_D|ZYBAn1m7RO;=Sid(^x1WB$%zy+ zYaExReX9^%P!dgT*4)QSIVLr@$(FlBZ_D@KuF;bF^UdaGUR#-Jafb8aX+uPc8_L<~ zD-ZD-DLP68c@e|4mK+}=Z0=t!BQZ98sm-cc65SFuG;G&PLSt#WudN7HFeJHe$u`JS1u~a z|MX3bI2Kq4X`=RS8_4@r!)JpX|Hef|vdhm?>gd-Co0nhc(k%ERNi;0`wX&pI-^25; zMO+u1JA8SF+$OVn&pg_mkpEe-BZOpl_+fxro5OrjqyT5ED7e*Cu{Iec*+k?6z#)1d z3X5m9zwrzPgAie1{Fgv*uy{tm!lt5riNhsM!->nmE#VSOYhvu2{5PaQ|Ae#z_i$Sa zd~k6`#|BwxCn@wUi78m2opai7JLo0% z(0n|O2(G`s_R5YaN=lGB*;Vy&29i%?@z|FeCHv3H8M6JKgSfg_yvPw#>FT*OX%*Q7*lX!z8)zw)+e|PrW%cRRi!Izm9-4)OQH+QQs|;%Y&ooxC~Ps{ZW1It zIB(LDTsE~Vr)_ZFEZx{jKS%cP#u9IoN(?*x$-3mdq#ws6#8_|NT+Y*M znC0FFxo%Y2eB&#`kR9kaeT)@D;}bU^Fy)f3RfTdit>XT!?e%nC+SH)6gW7G#H1%Sm z!cik*!(F3U@U+S?Yv~l4%h3 ziP9>f4|hh$=EG4q>bc~lmdjl}IXKJVoI7?6VvUHRIkzvr!RE;bEq;S_fcT@0)H+xH z8HAJO)%(-vSmw>l%=gNq;ra&|c`k`ZG=?6{Ejv5&`K{Er1h6l|#P-Fddtq<7eR=FC z@`u9=UuV*#v`9q%l#ijY*vX&;z8dS}h<-FgA+xno6qQ?zf(mytcvys>i)_!nY zthau$&*ztua!ukv34>xN3+?plM%YY35%~9S;&$`T$6RS3Uk< zG`y#|YBoe_R(a2YF%{lM0@BZBlOm3Ut$6Yphs1zPmOuUoA4jed{w8Xuskf;Gc2d&l zcs++gSTFD8cut_#xWCTFUAf#bbS;^Qm@2a*q|JSTr)h7Bh6-;(YO^s==2=&8(eOoE zc4vTuBguty(E>-VfAO1F)X>_J?ZP^t?&y&PeaRGjWR(7-T+}U5DlkC8yVz1^v+}Y0 z2W>GvORAKLYHF(ZJ{1Q)sdlfVWoZ_?(60#^#4n3Y-b!Ok%eAyl2oCJ<5lfo5FfY21 z(^01sqt^$D-Gh?XnS-cy27H;`o#tfAL@{aD)rEfcUF%U;Vb4!Ed!;XNm5KS1I2K4d z!M?LuW){7%*=oBfqaa{bi{_uh$7Ef_c33p6q4lS?g*~sgYqR*UY~#Rc`^8cF zLrjS(yqoD4qVCi5 zFY|@%EyB}y!UfYupkv|@m>N09D=Uk+P4v~jLS^iHKg0fM;1TdCcmzr^wwP2@@GmJY zS`P_S{DsSnEMosaA&-Cp`;hwk_bmIudv5bD-rv4e?HnO|*opj;Zj@9aEc9mP?bL3a zj_}1*fqy#t-5BP>i!N$;U+>!T-zDS5SD)VQ9ZB51F#4TMd8-p3@$TZxNbN+SrpNsE z^kVoG%!z_?A5_{I_0X$Cd^gpSd6;`EpM{R4P90&mfQc{_5!A~}g_tTm=e@{qOUeGB-m4gmS4qA6CgGf)_ef8> zcvVNPW#6JrYma+iDn^ANCQh!H<+gp17pKg-Bh(4MD$GD#{5tKkz2=dbdKj^CM);s$ zVT$-Dhja=9nZ#AzRBt46SJI)K%E0uXmI)I!!w!ll@myu$neHj*D(%&0bM_h!^hNev zE9S=Me#|x$rNm_DZf!bpHr&bERk4UsFY%5kz@!tR)rooh8iy@5c^e@zyiAi}MNuo$ z&r_5cTJG7-G?1*ZKvOd1&&cL$rI%DNSV2^F%au&9om+pt;+&7Zf4P?Y7O%+tB<)Ir zF3E?xO?hI2@bmE;9tmCdONIDGt5D@j3+Yz!;_`FjFO1fwYZHWeN2Sj;2+%rZPEt;0{e2_;86ewB|tPnNpNw^SKDO zm&Ae6LBMo^H#sb7gY{EpRn!A_m8>d1LB15n9V(uXoE#A;GUBr-KI;)!K!aow7L9AD zrX}26P>Bs_gieji+6Vr0R=yc}W|Qlrkq&dn1`&rgS_v;Y9 zps`ct*7}Qg$;Hf6yO`|uBf#DDbpFPIm43lUu&VA;9r%gh0Bq{NncBo9IJw}oXA(=S zemyAZ>|gmAPLvv!pQVe5s;I(rgwxcIzvDTW-Gpb|n52RsCbV5;)` z4YjntMg~E+1TP~ie&TvZv%J5eT+XT>{_>Tfzs4P0Qavj8l@wehFolj@O81qs{!6G^ zbZT2sJhg)<^Oys_yuiHkS5Sj0Wdol|Do3GvnbqueTYhsRp=_$q;jNHpI0#Lvml0%J z=ij%T=h{lQ)N2U~Y=0`K$f?Ae z>MC2HaXG7zRd4X|Na1Ma89xmNc0`1a2?|^C@v#ut0#`CuHCPn%d2AZ&kHLn#8ge{@|#8qjouKZ#ibfxw&-JI zXE*KAeN`4B9^}+L*dIPZn`6*=Jn@tD*W9m_ z`%SXRKj^6U3cgfCf(i0u4JD!EA*hN1yB_tKuW<>tQ+(g8gqNafhnU-KD;M>lg3k#O z4}MY=L@7Et%hP&Bwl5&(2Pre7&6d5{2dx8i1cPww=5zz8uSW}7s5cl+U0J#pbl(;m zDa7N$pYcbh3TGUHf}*1q)Uyv;^()=UL;71%>}Auhx_&wO2)!pTA9W)lR?!CNARYej zL&U5mySH`4)#NdJLxVn5j4pwK<@9OJM&k*oAbh#G(!Zc zM}hhMPX~RUVXef$%!1j(&(`1B^QQq`m+NdKm%fCCr6Uv(+v6Q`gQ3u_S`_CWRTFKO zyxJFhZ2mh$A-U<<448wm^d>SlhbvxmyU%Nj!=+);ry!an1Xy$*c`J$9Aj6RY~ zwQT&&w$9@ba4cnu-x($hc`r(Z&`o}LB<|gY7`KIq5d3D@QB_@rUZr*ZELC|@UM1y_ z@3{*%dE8;MeXAIF6iHBqd~swSnUQ7tYMx<-$_;Ez?|c!8Z;ti0Oewf0m&-zN_IvM` zyz1NWjqN`f`XX-^lWFy85g8q`P|YuT)Y5>R-iVh;Y}u)(+R>G>pQ`Rl4Ub%S8yr}>Nm{%2t=hb z#r^aS#9h-644-2BZYf`*7cnnp9Ma>>EQeD#oGc^Q6D~9qKsBIGBR}Ry*)k2NfMcyv zT(MtDKX-QfNP>k2J0!9nF=4j6BPGO8tf5osB^vq2YCE8zXA^?_wK{LNEeK!)*E<0C z&d6DeIEsO4R1g7E=&fn7{i`rVe&u*?(!P-!EVlr60aH7!=7(M?8a2{kxcQc|kBYi0 z>^HY#XYja03~owBo4<+btl2<8THQ1q?W;3(on$LUqq7AQf%DnUY-Cyb-_2gjywFIt z3oUilxUvv5-*N7rT^ad}_7hLC!U9$&@5#lnUEP?Pdnugu*$X*)GHKT16Mh#pj(VVP zo;JxKkbGBshK{7IYksT9RlyR$C`S^N{97-!q)bsVvvY{ms`{&~c&v-*5idLg@H<I7$Foa0F+UZ%Z=Qb{cNBNI+`rjuFKMv!e z<37-Fou^?v`gg}@2@ChooQ9)uzpITW*}Efk=*a!=E|G?Du+{1-7-#wb#^*WzM0WcK z2tj-A8^(psj5=T)Z^k?Bj$r_F-X#Pb2!L_Uj6|R(l`w$%?H)Rge+RQM;CDCPSv4+n z%Fgm6av}udLr+BRV4%CB3pU^p2!nOFjW9g|`>-jQ!Wcr(?|*K(JI;(uUY&$Mdqttc zFm2d&P949h^X?vj5Ps+?bfF3+1s!g>JNa|Bkb6FU|9c2F2&P@A&av|@v}0OS_%i?L z&6S8<^s(hgHOirQNKYsDV@vl~xl?-4JT^$htx z6Q@(bJWNdi2bAcTv1|G>#E^WI+;TO#TNeDe$jVHvlCvvYtUiw%ijnb@>C=IU!3Y)R z;8Too5~=R8wLZq3Gm!3}oa>$#mAH1`NR1|UYEF;>V?$DQc^mKnG>cM>@+I4fD_(q>bxquYJwZYd=k?+-(eoduF5d z>Zm@v%|2>QbUpIv5D^;`ba7-H`+5q#b!2Le-@Ow)p5Fc$dUluV6MRi6Kf4f{E|n?G zELT=BKOHy(bO<6|P3dK<&8@Wd9ojD~tzQ{9)c%MRTUk_ay(d}v!S5fR^VUyfq`L9; z^cU%poTE{wo-ZS2eS?!zIB4go+g^nb6v4{rhE}V< zO>yh|Sy@F?wEf`&c{U-1fJZy>W>IzCKymlBabd{L#?K**q*dl#0#4FhcVuYy@_K+P z4|NM=c2q;F>Kut&zV16tRp*kS;t7S4ZUu>-G8Z$UxNMCf1YNeN(7Zs+64tz6R&L`g zkDCvGn9GJ7aXhqmn{M<^?~#HEUW2)_CJ)c=D7ZdHr8u_f)@v zg&n*n`1m&Ox)Hu@F(NqNu*3Z{U*@nqp&% zMj%^Q$EJI)l-TzSH4zBp8=%4qmc{YKHH7$(qA9&Kj*8kUi?^#0P6EFqp%P?pmRdQ? z1HaqSu4-u6qTXnUYUsw3=A{`T$3$2$=J|@!@95J0+d_B~i;yXQOHoqCZA_I4+yosp zgxF`d$OZ+?$S&BpJAjI~=@;*)zWU~cJ&RTRg~i64|8)z8@?eyn!cT{aD~QZJNVIz6 zdy)fsY47*54LmQGXIV{Hv+rN*31CeuOraNip4V9sl1MnF*5K_WX|R$Tiir3hM&}4B zOLFtNhgv)E5_lzI6}3lcj@3ncJo`I(NXB>(Df4I6;tu+yMK2L0*8AX=_or)L&|edN z1N$An0DhgKtp{}i&Pc$u8pv7$z_2O_{Js(hrg}qCr&sYk-)F9QAI8Rrh>i zgT|Gg%z`9ZTh^KmF)dbb+{WfQ=y@=eKHi2UKZ&QZ_Os_3^YO1tbB5M^qQr-lQ{#Qj zqd62v-woj^e$6k(K}ItS!{SQd^OU4fa%cv8k>aqf7`pB5uoS_W?tlqgO`Vihef!Bo zl%pbIogBy_%)&l+Sh_5o&`&H$pF#2b5m>b>ec9zgSm=(#a5PzwY0Px&oB_JVB%-oM zCBtlpk`Y+<_1hBpW92FSE9CP{Pi_44=e0W4Dg ziB&jkfEre1GI62e2rfuI#j5WSoBa2a&!<3L!@X&m-96i^x|Xq>Q6f*^3f{gRPcGdor9v$Q?=42S;XcrFk4fMaVfh81 zICvMUQi}QHE%=7!Cv`_ehuBY>pn*CRee^e?*Wlqs6>Z2OH<(Jr(g_a420WR)l#(bI zD8}(lvcQqT zr=Njmc|*21_Af=hp7xF5;WtUcRW>H;DCL_`Lb6?bVw<|Vd0(;gS(uPZmM5!n7}eZQ zg?KkdO7?pO4mG}Ch%*4=&wo*B{e%=9$vP!2{LT1b^GQ84n|lf$ya5%?Vh0$+8TGU)WGJ!?iU-XfbVjm& zm>uG?RqgNBrJ3%(saUI8n|RpA5u?!GxO>U$NmSE|vpD;-ie&aai7K1}O;l}%96>umcG(fjQ)n1mG*g!g-7 zdAyV$*MS-7IxMeLTz4qoaAO`n?Smth}I@+_KyM`|nlT$1?&12S8h+4~!%NKipIN$&9GR~^ z#??>dl9kAA)ao*ITgctCYOe%;f;S_e+M17v@v>v@3sN|L4#{+;%hhpx@cE&~JiA>QJ#!#? z_l_BYW2OjC#okLy5OvF(F{tD7(0bE0Z$kuQs)p;btam2y*nMWsJ7R5yp%meqUX!I4 z!!4K!1d}PdX1=Oit@}dWld%vdsychh$l|>(W5)G6xg1D%7QSz!dA-*0w`ej->rb(* z;ym|KH=M+lI{Vo1jLN@X<|RqsYU*2Wykr6Q>U|}zF#Zg}o)olf&1nAYPjohKgFH&m zEt&_Xfr(-f#ivIpps6nB{q6_$gQp=FvwaHQjlH~$xR7-IgBT!&i073JRzc?xDoLw> z#1sso?-oRxNbvaW&f_kGp1BNsBNph4RxdW;NaeDkh#k&2#}oX?3~4wW6x02Q>E5y0 z@F{We5fBvf0UlV`d-nTg{#+DYS6^S5#t_&{xXSsr9`d_KU+T^tKD_r0$cI2L76mmA zNE_ci!0D{c=L$;RR@-hb+zDMgh(zx0efC;^`v6|OV#G)msnTur3>X&{RVlc+bs#@A zd<4XPKScKxt5nX_800sMt4#k22*{D~BkYq8dyyTWdEIT-DSqU%JWEtw-SdNH7jJol(%)WQ5(% zBo}EjNadt;PJZdF5~kj39o#y0$P_gM6V$5AFh0g27stmmP019DpaAk|yx90_4zQlU|oI+JkDc!%&3C-uj{CGznH;#C& zux7G8Q~wo>9-7PP0*L_m6K1v2lS@W9@6gffi@R|Ui^%$2kUoFbSedFiwV+PEu}!As z;;U@RgiD5$)s;rDmz~s(V5W^`c1DJCrc8h;QZ%-lQF+>rjSWv0|by zOMW?ykoPAeThj-5)OpsHSuIo;`fXu3g+Mf7v&%FFWC0ZDGY z!fG-J#BEjC%@tkN)#Ad`GcM#KQA!>Rc+jv{O_iiYCYSVOd>WaV*L1k4TZ zhDIe3xLfe>2m*g&1nKR3qHt{L(BuSrHC;gl_9yk@MQlDq3M}aB=kP6p473fBDX#%U z+jZrLsO;l~8mi?oFVM5^p}=M>2v@PGCgO;jB<$F_OXBAQk18_I{mhE_RYhit)D}N_ zzGiRG;y4tDY1ox5mtVkR2KNa`013(hhf;Pg3XXt1YL}jw zHGI_v4Hi{+NT;Fe5u^c+ZNJ_yun!+M{+oD`POH7i@Np?`-@bJG)v?KgfTHa$$Ejdb zsdP@qc7i5WIX6R>^^x8?4J&tK$$sSxD8iQ5QewduoF-G8>me7J8#xPcpg z7$Ta>*;Eu>e21(ALGq29dFbih4!i#4hT8C*6<*`py6_5Kd88sVsn$BNA}VG9qx_oz zD@mIqV+&Ya&4;i=3jlSLdy$b`SjQ%=ALvr*g!sKzZT(3Za3!7hk{RVp&|SWu?=u(& z=hRQ|L%H1visAW~pRws5Ob*we(`FQ{cdbX3!M7^LF*!Xs)}+W-6>+#FsavCgwP4ImkL1XLb@$A$j}06>`I diff --git a/api/core/model_runtime/model_providers/leptonai/leptonai.py b/api/core/model_runtime/model_providers/leptonai/leptonai.py deleted file mode 100644 index 34a55ff1924cf8..00000000000000 --- a/api/core/model_runtime/model_providers/leptonai/leptonai.py +++ /dev/null @@ -1,26 +0,0 @@ -import logging - -from core.model_runtime.entities.model_entities import ModelType -from core.model_runtime.errors.validate import CredentialsValidateFailedError -from core.model_runtime.model_providers.__base.model_provider import ModelProvider - -logger = logging.getLogger(__name__) - - -class LeptonAIProvider(ModelProvider): - def validate_provider_credentials(self, credentials: dict) -> None: - """ - Validate provider credentials - if validate failed, raise exception - - :param credentials: provider credentials, credentials form defined in `provider_credential_schema`. - """ - try: - model_instance = self.get_model_instance(ModelType.LLM) - - model_instance.validate_credentials(model="llama2-7b", credentials=credentials) - except CredentialsValidateFailedError as ex: - raise ex - except Exception as ex: - logger.exception(f"{self.get_provider_schema().provider} credentials validate failed") - raise ex diff --git a/api/core/model_runtime/model_providers/leptonai/leptonai.yaml b/api/core/model_runtime/model_providers/leptonai/leptonai.yaml deleted file mode 100644 index a246ff5356f723..00000000000000 --- a/api/core/model_runtime/model_providers/leptonai/leptonai.yaml +++ /dev/null @@ -1,29 +0,0 @@ -provider: leptonai -label: - zh_Hans: Lepton AI - en_US: Lepton AI -icon_small: - en_US: icon_s_en.png -icon_large: - en_US: icon_l_en.png -background: "#F5F5F4" -help: - title: - en_US: Get your API Key from Lepton AI - zh_Hans: 从 Lepton AI 获取 API Key - url: - en_US: https://dashboard.lepton.ai -supported_model_types: - - llm -configurate_methods: - - predefined-model -provider_credential_schema: - credential_form_schemas: - - variable: api_key - label: - en_US: API Key - type: secret-input - required: true - placeholder: - zh_Hans: 在此输入您的 API Key - en_US: Enter your API Key diff --git a/api/core/model_runtime/model_providers/leptonai/llm/_position.yaml b/api/core/model_runtime/model_providers/leptonai/llm/_position.yaml deleted file mode 100644 index a85e8e65ba403e..00000000000000 --- a/api/core/model_runtime/model_providers/leptonai/llm/_position.yaml +++ /dev/null @@ -1,6 +0,0 @@ -- gemma-7b -- mistral-7b -- mixtral-8x7b -- llama2-7b -- llama2-13b -- llama3-70b diff --git a/api/core/model_runtime/model_providers/leptonai/llm/gemma-7b.yaml b/api/core/model_runtime/model_providers/leptonai/llm/gemma-7b.yaml deleted file mode 100644 index 2d69067a23c092..00000000000000 --- a/api/core/model_runtime/model_providers/leptonai/llm/gemma-7b.yaml +++ /dev/null @@ -1,20 +0,0 @@ -model: gemma-7b -label: - zh_Hans: gemma-7b - en_US: gemma-7b -model_type: llm -features: - - agent-thought -model_properties: - mode: chat - context_size: 8192 -parameter_rules: - - name: temperature - use_template: temperature - - name: top_p - use_template: top_p - - name: max_tokens - use_template: max_tokens - default: 1024 - min: 1 - max: 1024 diff --git a/api/core/model_runtime/model_providers/leptonai/llm/llama2-13b.yaml b/api/core/model_runtime/model_providers/leptonai/llm/llama2-13b.yaml deleted file mode 100644 index 307f1ea88f1b10..00000000000000 --- a/api/core/model_runtime/model_providers/leptonai/llm/llama2-13b.yaml +++ /dev/null @@ -1,20 +0,0 @@ -model: llama2-13b -label: - zh_Hans: llama2-13b - en_US: llama2-13b -model_type: llm -features: - - agent-thought -model_properties: - mode: chat - context_size: 4096 -parameter_rules: - - name: temperature - use_template: temperature - - name: top_p - use_template: top_p - - name: max_tokens - use_template: max_tokens - default: 1024 - min: 1 - max: 1024 diff --git a/api/core/model_runtime/model_providers/leptonai/llm/llama2-7b.yaml b/api/core/model_runtime/model_providers/leptonai/llm/llama2-7b.yaml deleted file mode 100644 index bd471e59cd2d2d..00000000000000 --- a/api/core/model_runtime/model_providers/leptonai/llm/llama2-7b.yaml +++ /dev/null @@ -1,20 +0,0 @@ -model: llama2-7b -label: - zh_Hans: llama2-7b - en_US: llama2-7b -model_type: llm -features: - - agent-thought -model_properties: - mode: chat - context_size: 4096 -parameter_rules: - - name: temperature - use_template: temperature - - name: top_p - use_template: top_p - - name: max_tokens - use_template: max_tokens - default: 1024 - min: 1 - max: 1024 diff --git a/api/core/model_runtime/model_providers/leptonai/llm/llama3-70b.yaml b/api/core/model_runtime/model_providers/leptonai/llm/llama3-70b.yaml deleted file mode 100644 index 9c20eb6cdb15ca..00000000000000 --- a/api/core/model_runtime/model_providers/leptonai/llm/llama3-70b.yaml +++ /dev/null @@ -1,20 +0,0 @@ -model: llama3-70b -label: - zh_Hans: llama3-70b - en_US: llama3-70b -model_type: llm -features: - - agent-thought -model_properties: - mode: chat - context_size: 8192 -parameter_rules: - - name: temperature - use_template: temperature - - name: top_p - use_template: top_p - - name: max_tokens - use_template: max_tokens - default: 1024 - min: 1 - max: 1024 diff --git a/api/core/model_runtime/model_providers/leptonai/llm/llm.py b/api/core/model_runtime/model_providers/leptonai/llm/llm.py deleted file mode 100644 index 3d69417e45da72..00000000000000 --- a/api/core/model_runtime/model_providers/leptonai/llm/llm.py +++ /dev/null @@ -1,40 +0,0 @@ -from collections.abc import Generator -from typing import Optional, Union - -from core.model_runtime.entities.llm_entities import LLMResult -from core.model_runtime.entities.message_entities import PromptMessage, PromptMessageTool -from core.model_runtime.model_providers.openai_api_compatible.llm.llm import OAIAPICompatLargeLanguageModel - - -class LeptonAILargeLanguageModel(OAIAPICompatLargeLanguageModel): - MODEL_PREFIX_MAP = { - "llama2-7b": "llama2-7b", - "gemma-7b": "gemma-7b", - "mistral-7b": "mistral-7b", - "mixtral-8x7b": "mixtral-8x7b", - "llama3-70b": "llama3-70b", - "llama2-13b": "llama2-13b", - } - - def _invoke( - self, - model: str, - credentials: dict, - prompt_messages: list[PromptMessage], - model_parameters: dict, - tools: Optional[list[PromptMessageTool]] = None, - stop: Optional[list[str]] = None, - stream: bool = True, - user: Optional[str] = None, - ) -> Union[LLMResult, Generator]: - self._add_custom_parameters(credentials, model) - return super()._invoke(model, credentials, prompt_messages, model_parameters, tools, stop, stream) - - def validate_credentials(self, model: str, credentials: dict) -> None: - self._add_custom_parameters(credentials, model) - super().validate_credentials(model, credentials) - - @classmethod - def _add_custom_parameters(cls, credentials: dict, model: str) -> None: - credentials["mode"] = "chat" - credentials["endpoint_url"] = f"https://{cls.MODEL_PREFIX_MAP[model]}.lepton.run/api/v1" diff --git a/api/core/model_runtime/model_providers/leptonai/llm/mistral-7b.yaml b/api/core/model_runtime/model_providers/leptonai/llm/mistral-7b.yaml deleted file mode 100644 index f2b46ff91781c6..00000000000000 --- a/api/core/model_runtime/model_providers/leptonai/llm/mistral-7b.yaml +++ /dev/null @@ -1,20 +0,0 @@ -model: mistral-7b -label: - zh_Hans: mistral-7b - en_US: mistral-7b -model_type: llm -features: - - agent-thought -model_properties: - mode: chat - context_size: 8192 -parameter_rules: - - name: temperature - use_template: temperature - - name: top_p - use_template: top_p - - name: max_tokens - use_template: max_tokens - default: 1024 - min: 1 - max: 1024 diff --git a/api/core/model_runtime/model_providers/leptonai/llm/mixtral-8x7b.yaml b/api/core/model_runtime/model_providers/leptonai/llm/mixtral-8x7b.yaml deleted file mode 100644 index de788ac2568494..00000000000000 --- a/api/core/model_runtime/model_providers/leptonai/llm/mixtral-8x7b.yaml +++ /dev/null @@ -1,20 +0,0 @@ -model: mixtral-8x7b -label: - zh_Hans: mixtral-8x7b - en_US: mixtral-8x7b -model_type: llm -features: - - agent-thought -model_properties: - mode: chat - context_size: 32000 -parameter_rules: - - name: temperature - use_template: temperature - - name: top_p - use_template: top_p - - name: max_tokens - use_template: max_tokens - default: 1024 - min: 1 - max: 1024 diff --git a/api/core/model_runtime/model_providers/localai/__init__.py b/api/core/model_runtime/model_providers/localai/__init__.py deleted file mode 100644 index e69de29bb2d1d6..00000000000000 diff --git a/api/core/model_runtime/model_providers/localai/_assets/icon_l_en.svg b/api/core/model_runtime/model_providers/localai/_assets/icon_l_en.svg deleted file mode 100644 index 251a37fdc7e124..00000000000000 --- a/api/core/model_runtime/model_providers/localai/_assets/icon_l_en.svg +++ /dev/null @@ -1,22 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - diff --git a/api/core/model_runtime/model_providers/localai/_assets/icon_s_en.svg b/api/core/model_runtime/model_providers/localai/_assets/icon_s_en.svg deleted file mode 100644 index 9dc6e6276ea166..00000000000000 --- a/api/core/model_runtime/model_providers/localai/_assets/icon_s_en.svg +++ /dev/null @@ -1,15 +0,0 @@ - - - - - - - - - - - - - - - diff --git a/api/core/model_runtime/model_providers/localai/llm/__init__.py b/api/core/model_runtime/model_providers/localai/llm/__init__.py deleted file mode 100644 index e69de29bb2d1d6..00000000000000 diff --git a/api/core/model_runtime/model_providers/localai/llm/llm.py b/api/core/model_runtime/model_providers/localai/llm/llm.py deleted file mode 100644 index e7295355f627db..00000000000000 --- a/api/core/model_runtime/model_providers/localai/llm/llm.py +++ /dev/null @@ -1,674 +0,0 @@ -from collections.abc import Generator -from typing import cast - -from httpx import Timeout -from openai import ( - APIConnectionError, - APITimeoutError, - AuthenticationError, - ConflictError, - InternalServerError, - NotFoundError, - OpenAI, - PermissionDeniedError, - RateLimitError, - Stream, - UnprocessableEntityError, -) -from openai.types.chat import ChatCompletion, ChatCompletionChunk -from openai.types.chat.chat_completion_message import FunctionCall -from openai.types.completion import Completion -from yarl import URL - -from core.model_runtime.entities.common_entities import I18nObject -from core.model_runtime.entities.llm_entities import LLMMode, LLMResult, LLMResultChunk, LLMResultChunkDelta -from core.model_runtime.entities.message_entities import ( - AssistantPromptMessage, - PromptMessage, - PromptMessageTool, - SystemPromptMessage, - ToolPromptMessage, - UserPromptMessage, -) -from core.model_runtime.entities.model_entities import ( - AIModelEntity, - FetchFrom, - ModelPropertyKey, - ModelType, - ParameterRule, - ParameterType, -) -from core.model_runtime.errors.invoke import ( - InvokeAuthorizationError, - InvokeBadRequestError, - InvokeConnectionError, - InvokeError, - InvokeRateLimitError, - InvokeServerUnavailableError, -) -from core.model_runtime.errors.validate import CredentialsValidateFailedError -from core.model_runtime.model_providers.__base.large_language_model import LargeLanguageModel -from core.model_runtime.utils import helper - - -class LocalAILanguageModel(LargeLanguageModel): - def _invoke( - self, - model: str, - credentials: dict, - prompt_messages: list[PromptMessage], - model_parameters: dict, - tools: list[PromptMessageTool] | None = None, - stop: list[str] | None = None, - stream: bool = True, - user: str | None = None, - ) -> LLMResult | Generator: - return self._generate( - model=model, - credentials=credentials, - prompt_messages=prompt_messages, - model_parameters=model_parameters, - tools=tools, - stop=stop, - stream=stream, - user=user, - ) - - def get_num_tokens( - self, - model: str, - credentials: dict, - prompt_messages: list[PromptMessage], - tools: list[PromptMessageTool] | None = None, - ) -> int: - # tools is not supported yet - return self._num_tokens_from_messages(prompt_messages, tools=tools) - - def _num_tokens_from_messages(self, messages: list[PromptMessage], tools: list[PromptMessageTool]) -> int: - """ - Calculate num tokens for baichuan model - LocalAI does not supports - """ - - def tokens(text: str): - """ - We could not determine which tokenizer to use, cause the model is customized. - So we use gpt2 tokenizer to calculate the num tokens for convenience. - """ - return self._get_num_tokens_by_gpt2(text) - - tokens_per_message = 3 - tokens_per_name = 1 - - num_tokens = 0 - messages_dict = [self._convert_prompt_message_to_dict(m) for m in messages] - for message in messages_dict: - num_tokens += tokens_per_message - for key, value in message.items(): - if isinstance(value, list): - text = "" - for item in value: - if isinstance(item, dict) and item["type"] == "text": - text += item["text"] - - value = text - - if key == "tool_calls": - for tool_call in value: - for t_key, t_value in tool_call.items(): - num_tokens += tokens(t_key) - if t_key == "function": - for f_key, f_value in t_value.items(): - num_tokens += tokens(f_key) - num_tokens += tokens(f_value) - else: - num_tokens += tokens(t_key) - num_tokens += tokens(t_value) - if key == "function_call": - for t_key, t_value in value.items(): - num_tokens += tokens(t_key) - if t_key == "function": - for f_key, f_value in t_value.items(): - num_tokens += tokens(f_key) - num_tokens += tokens(f_value) - else: - num_tokens += tokens(t_key) - num_tokens += tokens(t_value) - else: - num_tokens += tokens(str(value)) - - if key == "name": - num_tokens += tokens_per_name - num_tokens += 3 - - if tools: - num_tokens += self._num_tokens_for_tools(tools) - - return num_tokens - - def _num_tokens_for_tools(self, tools: list[PromptMessageTool]) -> int: - """ - Calculate num tokens for tool calling - - :param encoding: encoding - :param tools: tools for tool calling - :return: number of tokens - """ - - def tokens(text: str): - return self._get_num_tokens_by_gpt2(text) - - num_tokens = 0 - for tool in tools: - # calculate num tokens for function object - num_tokens += tokens("name") - num_tokens += tokens(tool.name) - num_tokens += tokens("description") - num_tokens += tokens(tool.description) - parameters = tool.parameters - num_tokens += tokens("parameters") - num_tokens += tokens("type") - num_tokens += tokens(parameters.get("type")) - if "properties" in parameters: - num_tokens += tokens("properties") - for key, value in parameters.get("properties").items(): - num_tokens += tokens(key) - for field_key, field_value in value.items(): - num_tokens += tokens(field_key) - if field_key == "enum": - for enum_field in field_value: - num_tokens += 3 - num_tokens += tokens(enum_field) - else: - num_tokens += tokens(field_key) - num_tokens += tokens(str(field_value)) - if "required" in parameters: - num_tokens += tokens("required") - for required_field in parameters["required"]: - num_tokens += 3 - num_tokens += tokens(required_field) - - return num_tokens - - def validate_credentials(self, model: str, credentials: dict) -> None: - """ - Validate model credentials - - :param model: model name - :param credentials: model credentials - :return: - """ - try: - self._invoke( - model=model, - credentials=credentials, - prompt_messages=[UserPromptMessage(content="ping")], - model_parameters={ - "max_tokens": 10, - }, - stop=[], - stream=False, - ) - except Exception as ex: - raise CredentialsValidateFailedError(f"Invalid credentials {str(ex)}") - - def get_customizable_model_schema(self, model: str, credentials: dict) -> AIModelEntity | None: - completion_model = None - if credentials["completion_type"] == "chat_completion": - completion_model = LLMMode.CHAT.value - elif credentials["completion_type"] == "completion": - completion_model = LLMMode.COMPLETION.value - else: - raise ValueError(f"Unknown completion type {credentials['completion_type']}") - - rules = [ - ParameterRule( - name="temperature", - type=ParameterType.FLOAT, - use_template="temperature", - label=I18nObject(zh_Hans="温度", en_US="Temperature"), - ), - ParameterRule( - name="top_p", - type=ParameterType.FLOAT, - use_template="top_p", - label=I18nObject(zh_Hans="Top P", en_US="Top P"), - ), - ParameterRule( - name="max_tokens", - type=ParameterType.INT, - use_template="max_tokens", - min=1, - max=2048, - default=512, - label=I18nObject(zh_Hans="最大生成长度", en_US="Max Tokens"), - ), - ] - - model_properties = ( - { - ModelPropertyKey.MODE: completion_model, - } - if completion_model - else {} - ) - - model_properties[ModelPropertyKey.CONTEXT_SIZE] = int(credentials.get("context_size", "2048")) - - entity = AIModelEntity( - model=model, - label=I18nObject(en_US=model), - fetch_from=FetchFrom.CUSTOMIZABLE_MODEL, - model_type=ModelType.LLM, - model_properties=model_properties, - parameter_rules=rules, - ) - - return entity - - def _generate( - self, - model: str, - credentials: dict, - prompt_messages: list[PromptMessage], - model_parameters: dict, - tools: list[PromptMessageTool] | None = None, - stop: list[str] | None = None, - stream: bool = True, - user: str | None = None, - ) -> LLMResult | Generator: - kwargs = self._to_client_kwargs(credentials) - # init model client - client = OpenAI(**kwargs) - - model_name = model - completion_type = credentials["completion_type"] - - extra_model_kwargs = { - "timeout": 60, - } - if stop: - extra_model_kwargs["stop"] = stop - - if user: - extra_model_kwargs["user"] = user - - if tools and len(tools) > 0: - extra_model_kwargs["functions"] = [helper.dump_model(tool) for tool in tools] - - if completion_type == "chat_completion": - result = client.chat.completions.create( - messages=[self._convert_prompt_message_to_dict(m) for m in prompt_messages], - model=model_name, - stream=stream, - **model_parameters, - **extra_model_kwargs, - ) - elif completion_type == "completion": - result = client.completions.create( - prompt=self._convert_prompt_message_to_completion_prompts(prompt_messages), - model=model, - stream=stream, - **model_parameters, - **extra_model_kwargs, - ) - else: - raise ValueError(f"Unknown completion type {completion_type}") - - if stream: - if completion_type == "completion": - return self._handle_completion_generate_stream_response( - model=model, credentials=credentials, response=result, tools=tools, prompt_messages=prompt_messages - ) - return self._handle_chat_generate_stream_response( - model=model, credentials=credentials, response=result, tools=tools, prompt_messages=prompt_messages - ) - - if completion_type == "completion": - return self._handle_completion_generate_response( - model=model, credentials=credentials, response=result, prompt_messages=prompt_messages - ) - return self._handle_chat_generate_response( - model=model, credentials=credentials, response=result, tools=tools, prompt_messages=prompt_messages - ) - - def _to_client_kwargs(self, credentials: dict) -> dict: - """ - Convert invoke kwargs to client kwargs - - :param credentials: credentials dict - :return: client kwargs - """ - if not credentials["server_url"].endswith("/"): - credentials["server_url"] += "/" - - client_kwargs = { - "timeout": Timeout(315.0, read=300.0, write=10.0, connect=5.0), - "api_key": "1", - "base_url": str(URL(credentials["server_url"]) / "v1"), - } - - return client_kwargs - - def _convert_prompt_message_to_dict(self, message: PromptMessage) -> dict: - """ - Convert PromptMessage to dict for OpenAI Compatibility API - """ - if isinstance(message, UserPromptMessage): - message = cast(UserPromptMessage, message) - if isinstance(message.content, str): - message_dict = {"role": "user", "content": message.content} - else: - raise ValueError("User message content must be str") - elif isinstance(message, AssistantPromptMessage): - message = cast(AssistantPromptMessage, message) - message_dict = {"role": "assistant", "content": message.content} - if message.tool_calls and len(message.tool_calls) > 0: - message_dict["function_call"] = { - "name": message.tool_calls[0].function.name, - "arguments": message.tool_calls[0].function.arguments, - } - elif isinstance(message, SystemPromptMessage): - message = cast(SystemPromptMessage, message) - message_dict = {"role": "system", "content": message.content} - elif isinstance(message, ToolPromptMessage): - # copy from core/model_runtime/model_providers/anthropic/llm/llm.py - message = cast(ToolPromptMessage, message) - message_dict = { - "role": "user", - "content": [{"type": "tool_result", "tool_use_id": message.tool_call_id, "content": message.content}], - } - else: - raise ValueError(f"Unknown message type {type(message)}") - - return message_dict - - def _convert_prompt_message_to_completion_prompts(self, messages: list[PromptMessage]) -> str: - """ - Convert PromptMessage to completion prompts - """ - prompts = "" - for message in messages: - if isinstance(message, UserPromptMessage): - message = cast(UserPromptMessage, message) - prompts += f"{message.content}\n" - elif isinstance(message, AssistantPromptMessage): - message = cast(AssistantPromptMessage, message) - prompts += f"{message.content}\n" - elif isinstance(message, SystemPromptMessage): - message = cast(SystemPromptMessage, message) - prompts += f"{message.content}\n" - else: - raise ValueError(f"Unknown message type {type(message)}") - - return prompts - - def _handle_completion_generate_response( - self, - model: str, - prompt_messages: list[PromptMessage], - credentials: dict, - response: Completion, - ) -> LLMResult: - """ - Handle llm chat response - - :param model: model name - :param credentials: credentials - :param response: response - :param prompt_messages: prompt messages - :param tools: tools for tool calling - :return: llm response - """ - if len(response.choices) == 0: - raise InvokeServerUnavailableError("Empty response") - - assistant_message = response.choices[0].text - - # transform assistant message to prompt message - assistant_prompt_message = AssistantPromptMessage(content=assistant_message, tool_calls=[]) - - prompt_tokens = self._get_num_tokens_by_gpt2( - self._convert_prompt_message_to_completion_prompts(prompt_messages) - ) - completion_tokens = self._num_tokens_from_messages(messages=[assistant_prompt_message], tools=[]) - - usage = self._calc_response_usage( - model=model, credentials=credentials, prompt_tokens=prompt_tokens, completion_tokens=completion_tokens - ) - - response = LLMResult( - model=model, - prompt_messages=prompt_messages, - system_fingerprint=response.system_fingerprint, - usage=usage, - message=assistant_prompt_message, - ) - - return response - - def _handle_chat_generate_response( - self, - model: str, - prompt_messages: list[PromptMessage], - credentials: dict, - response: ChatCompletion, - tools: list[PromptMessageTool], - ) -> LLMResult: - """ - Handle llm chat response - - :param model: model name - :param credentials: credentials - :param response: response - :param prompt_messages: prompt messages - :param tools: tools for tool calling - :return: llm response - """ - if len(response.choices) == 0: - raise InvokeServerUnavailableError("Empty response") - - assistant_message = response.choices[0].message - - # convert function call to tool call - function_calls = assistant_message.function_call - tool_calls = self._extract_response_tool_calls([function_calls] if function_calls else []) - - # transform assistant message to prompt message - assistant_prompt_message = AssistantPromptMessage(content=assistant_message.content, tool_calls=tool_calls) - - prompt_tokens = self._num_tokens_from_messages(messages=prompt_messages, tools=tools) - completion_tokens = self._num_tokens_from_messages(messages=[assistant_prompt_message], tools=tools) - - usage = self._calc_response_usage( - model=model, credentials=credentials, prompt_tokens=prompt_tokens, completion_tokens=completion_tokens - ) - - response = LLMResult( - model=model, - prompt_messages=prompt_messages, - system_fingerprint=response.system_fingerprint, - usage=usage, - message=assistant_prompt_message, - ) - - return response - - def _handle_completion_generate_stream_response( - self, - model: str, - prompt_messages: list[PromptMessage], - credentials: dict, - response: Stream[Completion], - tools: list[PromptMessageTool], - ) -> Generator: - full_response = "" - - for chunk in response: - if len(chunk.choices) == 0: - continue - - delta = chunk.choices[0] - - # transform assistant message to prompt message - assistant_prompt_message = AssistantPromptMessage(content=delta.text or "", tool_calls=[]) - - if delta.finish_reason is not None: - # temp_assistant_prompt_message is used to calculate usage - temp_assistant_prompt_message = AssistantPromptMessage(content=full_response, tool_calls=[]) - - prompt_tokens = self._get_num_tokens_by_gpt2( - self._convert_prompt_message_to_completion_prompts(prompt_messages) - ) - - completion_tokens = self._num_tokens_from_messages(messages=[temp_assistant_prompt_message], tools=[]) - - usage = self._calc_response_usage( - model=model, - credentials=credentials, - prompt_tokens=prompt_tokens, - completion_tokens=completion_tokens, - ) - - yield LLMResultChunk( - model=model, - prompt_messages=prompt_messages, - system_fingerprint=chunk.system_fingerprint, - delta=LLMResultChunkDelta( - index=delta.index, - message=assistant_prompt_message, - finish_reason=delta.finish_reason, - usage=usage, - ), - ) - else: - yield LLMResultChunk( - model=model, - prompt_messages=prompt_messages, - system_fingerprint=chunk.system_fingerprint, - delta=LLMResultChunkDelta( - index=0, - message=assistant_prompt_message, - ), - ) - - full_response += delta.text - - def _handle_chat_generate_stream_response( - self, - model: str, - prompt_messages: list[PromptMessage], - credentials: dict, - response: Stream[ChatCompletionChunk], - tools: list[PromptMessageTool], - ) -> Generator: - full_response = "" - - for chunk in response: - if len(chunk.choices) == 0: - continue - - delta = chunk.choices[0] - - if delta.finish_reason is None and (delta.delta.content is None or delta.delta.content == ""): - continue - - # check if there is a tool call in the response - function_calls = None - if delta.delta.function_call: - function_calls = [delta.delta.function_call] - - assistant_message_tool_calls = self._extract_response_tool_calls(function_calls or []) - - # transform assistant message to prompt message - assistant_prompt_message = AssistantPromptMessage( - content=delta.delta.content or "", tool_calls=assistant_message_tool_calls - ) - - if delta.finish_reason is not None: - # temp_assistant_prompt_message is used to calculate usage - temp_assistant_prompt_message = AssistantPromptMessage( - content=full_response, tool_calls=assistant_message_tool_calls - ) - - prompt_tokens = self._num_tokens_from_messages(messages=prompt_messages, tools=tools) - completion_tokens = self._num_tokens_from_messages(messages=[temp_assistant_prompt_message], tools=[]) - - usage = self._calc_response_usage( - model=model, - credentials=credentials, - prompt_tokens=prompt_tokens, - completion_tokens=completion_tokens, - ) - - yield LLMResultChunk( - model=model, - prompt_messages=prompt_messages, - system_fingerprint=chunk.system_fingerprint, - delta=LLMResultChunkDelta( - index=delta.index, - message=assistant_prompt_message, - finish_reason=delta.finish_reason, - usage=usage, - ), - ) - else: - yield LLMResultChunk( - model=model, - prompt_messages=prompt_messages, - system_fingerprint=chunk.system_fingerprint, - delta=LLMResultChunkDelta( - index=delta.index, - message=assistant_prompt_message, - ), - ) - - full_response += delta.delta.content - - def _extract_response_tool_calls( - self, response_function_calls: list[FunctionCall] - ) -> list[AssistantPromptMessage.ToolCall]: - """ - Extract tool calls from response - - :param response_tool_calls: response tool calls - :return: list of tool calls - """ - tool_calls = [] - if response_function_calls: - for response_tool_call in response_function_calls: - function = AssistantPromptMessage.ToolCall.ToolCallFunction( - name=response_tool_call.name, arguments=response_tool_call.arguments - ) - - tool_call = AssistantPromptMessage.ToolCall(id=0, type="function", function=function) - tool_calls.append(tool_call) - - return tool_calls - - @property - def _invoke_error_mapping(self) -> dict[type[InvokeError], list[type[Exception]]]: - """ - Map model invoke error to unified error - The key is the error type thrown to the caller - The value is the error type thrown by the model, - which needs to be converted into a unified error type for the caller. - - :return: Invoke error mapping - """ - return { - InvokeConnectionError: [ - APIConnectionError, - APITimeoutError, - ], - InvokeServerUnavailableError: [ - InternalServerError, - ConflictError, - NotFoundError, - UnprocessableEntityError, - PermissionDeniedError, - ], - InvokeRateLimitError: [RateLimitError], - InvokeAuthorizationError: [AuthenticationError], - InvokeBadRequestError: [ValueError], - } diff --git a/api/core/model_runtime/model_providers/localai/localai.py b/api/core/model_runtime/model_providers/localai/localai.py deleted file mode 100644 index 4ff898052b380d..00000000000000 --- a/api/core/model_runtime/model_providers/localai/localai.py +++ /dev/null @@ -1,10 +0,0 @@ -import logging - -from core.model_runtime.model_providers.__base.model_provider import ModelProvider - -logger = logging.getLogger(__name__) - - -class LocalAIProvider(ModelProvider): - def validate_provider_credentials(self, credentials: dict) -> None: - pass diff --git a/api/core/model_runtime/model_providers/localai/localai.yaml b/api/core/model_runtime/model_providers/localai/localai.yaml deleted file mode 100644 index 864dd7a30c3a6f..00000000000000 --- a/api/core/model_runtime/model_providers/localai/localai.yaml +++ /dev/null @@ -1,72 +0,0 @@ -provider: localai -label: - en_US: LocalAI -icon_small: - en_US: icon_s_en.svg -icon_large: - en_US: icon_l_en.svg -background: "#F3F4F6" -help: - title: - en_US: How to deploy LocalAI - zh_Hans: 如何部署 LocalAI - url: - en_US: https://github.com/go-skynet/LocalAI -supported_model_types: - - llm - - text-embedding - - rerank - - speech2text -configurate_methods: - - customizable-model -model_credential_schema: - model: - label: - en_US: Model Name - zh_Hans: 模型名称 - placeholder: - en_US: Enter your model name - zh_Hans: 输入模型名称 - credential_form_schemas: - - variable: completion_type - show_on: - - variable: __model_type - value: llm - label: - en_US: Completion type - type: select - required: false - default: chat_completion - placeholder: - zh_Hans: 选择对话类型 - en_US: Select completion type - options: - - value: completion - label: - en_US: Completion - zh_Hans: 补全 - - value: chat_completion - label: - en_US: ChatCompletion - zh_Hans: 对话 - - variable: server_url - label: - zh_Hans: 服务器URL - en_US: Server url - type: text-input - required: true - placeholder: - zh_Hans: 在此输入LocalAI的服务器地址,如 http://192.168.1.100:8080 - en_US: Enter the url of your LocalAI, e.g. http://192.168.1.100:8080 - - variable: context_size - show_on: - - variable: __model_type - value: llm - label: - zh_Hans: 上下文大小 - en_US: Context size - placeholder: - zh_Hans: 输入上下文大小 - en_US: Enter context size - required: false - type: text-input diff --git a/api/core/model_runtime/model_providers/localai/rerank/__init__.py b/api/core/model_runtime/model_providers/localai/rerank/__init__.py deleted file mode 100644 index e69de29bb2d1d6..00000000000000 diff --git a/api/core/model_runtime/model_providers/localai/rerank/rerank.py b/api/core/model_runtime/model_providers/localai/rerank/rerank.py deleted file mode 100644 index 2b0f53bc19e8ec..00000000000000 --- a/api/core/model_runtime/model_providers/localai/rerank/rerank.py +++ /dev/null @@ -1,134 +0,0 @@ -from json import dumps -from typing import Optional - -import httpx -from requests import post -from yarl import URL - -from core.model_runtime.entities.common_entities import I18nObject -from core.model_runtime.entities.model_entities import AIModelEntity, FetchFrom, ModelType -from core.model_runtime.entities.rerank_entities import RerankDocument, RerankResult -from core.model_runtime.errors.invoke import ( - InvokeAuthorizationError, - InvokeBadRequestError, - InvokeConnectionError, - InvokeError, - InvokeRateLimitError, - InvokeServerUnavailableError, -) -from core.model_runtime.errors.validate import CredentialsValidateFailedError -from core.model_runtime.model_providers.__base.rerank_model import RerankModel - - -class LocalaiRerankModel(RerankModel): - """ - LocalAI rerank model API is compatible with Jina rerank model API. So just copy the JinaRerankModel class code here. - """ - - def _invoke( - self, - model: str, - credentials: dict, - query: str, - docs: list[str], - score_threshold: Optional[float] = None, - top_n: Optional[int] = None, - user: Optional[str] = None, - ) -> RerankResult: - """ - Invoke rerank model - - :param model: model name - :param credentials: model credentials - :param query: search query - :param docs: docs for reranking - :param score_threshold: score threshold - :param top_n: top n documents to return - :param user: unique user id - :return: rerank result - """ - if len(docs) == 0: - return RerankResult(model=model, docs=[]) - - server_url = credentials["server_url"] - model_name = model - - if not server_url: - raise CredentialsValidateFailedError("server_url is required") - if not model_name: - raise CredentialsValidateFailedError("model_name is required") - - url = server_url - headers = {"Authorization": f"Bearer {credentials.get('api_key')}", "Content-Type": "application/json"} - - data = {"model": model_name, "query": query, "documents": docs, "top_n": top_n} - - try: - response = post(str(URL(url) / "rerank"), headers=headers, data=dumps(data), timeout=10) - response.raise_for_status() - results = response.json() - - rerank_documents = [] - for result in results["results"]: - rerank_document = RerankDocument( - index=result["index"], - text=result["document"]["text"], - score=result["relevance_score"], - ) - if score_threshold is None or result["relevance_score"] >= score_threshold: - rerank_documents.append(rerank_document) - - return RerankResult(model=model, docs=rerank_documents) - except httpx.HTTPStatusError as e: - raise InvokeServerUnavailableError(str(e)) - - def validate_credentials(self, model: str, credentials: dict) -> None: - """ - Validate model credentials - - :param model: model name - :param credentials: model credentials - :return: - """ - try: - self._invoke( - model=model, - credentials=credentials, - query="What is the capital of the United States?", - docs=[ - "Carson City is the capital city of the American state of Nevada. At the 2010 United States " - "Census, Carson City had a population of 55,274.", - "The Commonwealth of the Northern Mariana Islands is a group of islands in the Pacific Ocean that " - "are a political division controlled by the United States. Its capital is Saipan.", - ], - score_threshold=0.8, - ) - except Exception as ex: - raise CredentialsValidateFailedError(str(ex)) - - @property - def _invoke_error_mapping(self) -> dict[type[InvokeError], list[type[Exception]]]: - """ - Map model invoke error to unified error - """ - return { - InvokeConnectionError: [httpx.ConnectError], - InvokeServerUnavailableError: [httpx.RemoteProtocolError], - InvokeRateLimitError: [], - InvokeAuthorizationError: [httpx.HTTPStatusError], - InvokeBadRequestError: [httpx.RequestError], - } - - def get_customizable_model_schema(self, model: str, credentials: dict) -> AIModelEntity: - """ - generate custom model entities from credentials - """ - entity = AIModelEntity( - model=model, - label=I18nObject(en_US=model), - model_type=ModelType.RERANK, - fetch_from=FetchFrom.CUSTOMIZABLE_MODEL, - model_properties={}, - ) - - return entity diff --git a/api/core/model_runtime/model_providers/localai/speech2text/__init__.py b/api/core/model_runtime/model_providers/localai/speech2text/__init__.py deleted file mode 100644 index e69de29bb2d1d6..00000000000000 diff --git a/api/core/model_runtime/model_providers/localai/speech2text/speech2text.py b/api/core/model_runtime/model_providers/localai/speech2text/speech2text.py deleted file mode 100644 index 4b9d0f5bfefd33..00000000000000 --- a/api/core/model_runtime/model_providers/localai/speech2text/speech2text.py +++ /dev/null @@ -1,89 +0,0 @@ -from typing import IO, Optional - -from requests import Request, Session -from yarl import URL - -from core.model_runtime.entities.common_entities import I18nObject -from core.model_runtime.entities.model_entities import AIModelEntity, FetchFrom, ModelType -from core.model_runtime.errors.invoke import ( - InvokeAuthorizationError, - InvokeBadRequestError, - InvokeConnectionError, - InvokeError, - InvokeRateLimitError, - InvokeServerUnavailableError, -) -from core.model_runtime.errors.validate import CredentialsValidateFailedError -from core.model_runtime.model_providers.__base.speech2text_model import Speech2TextModel - - -class LocalAISpeech2text(Speech2TextModel): - """ - Model class for Local AI Text to speech model. - """ - - def _invoke(self, model: str, credentials: dict, file: IO[bytes], user: Optional[str] = None) -> str: - """ - Invoke large language model - - :param model: model name - :param credentials: model credentials - :param file: audio file - :param user: unique user id - :return: text for given audio file - """ - - url = str(URL(credentials["server_url"]) / "v1/audio/transcriptions") - data = {"model": model} - files = {"file": file} - - session = Session() - request = Request("POST", url, data=data, files=files) - prepared_request = session.prepare_request(request) - response = session.send(prepared_request) - - if "error" in response.json(): - raise InvokeServerUnavailableError("Empty response") - - return response.json()["text"] - - def validate_credentials(self, model: str, credentials: dict) -> None: - """ - Validate model credentials - - :param model: model name - :param credentials: model credentials - :return: - """ - try: - audio_file_path = self._get_demo_file_path() - - with open(audio_file_path, "rb") as audio_file: - self._invoke(model, credentials, audio_file) - except Exception as ex: - raise CredentialsValidateFailedError(str(ex)) - - @property - def _invoke_error_mapping(self) -> dict[type[InvokeError], list[type[Exception]]]: - return { - InvokeConnectionError: [InvokeConnectionError], - InvokeServerUnavailableError: [InvokeServerUnavailableError], - InvokeRateLimitError: [InvokeRateLimitError], - InvokeAuthorizationError: [InvokeAuthorizationError], - InvokeBadRequestError: [InvokeBadRequestError], - } - - def get_customizable_model_schema(self, model: str, credentials: dict) -> AIModelEntity | None: - """ - used to define customizable model schema - """ - entity = AIModelEntity( - model=model, - label=I18nObject(en_US=model), - fetch_from=FetchFrom.CUSTOMIZABLE_MODEL, - model_type=ModelType.SPEECH2TEXT, - model_properties={}, - parameter_rules=[], - ) - - return entity diff --git a/api/core/model_runtime/model_providers/localai/text_embedding/__init__.py b/api/core/model_runtime/model_providers/localai/text_embedding/__init__.py deleted file mode 100644 index e69de29bb2d1d6..00000000000000 diff --git a/api/core/model_runtime/model_providers/minimax/__init__.py b/api/core/model_runtime/model_providers/minimax/__init__.py deleted file mode 100644 index e69de29bb2d1d6..00000000000000 diff --git a/api/core/model_runtime/model_providers/minimax/_assets/icon_l_en.png b/api/core/model_runtime/model_providers/minimax/_assets/icon_l_en.png deleted file mode 100644 index 5066b525f99c3f36d2f96b3b905aaead8cb263ef..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 5767 zcmV;27I^82P)4ihG1%Y>eBeXACCiemV`emGcfXhK^^C@{4j)LiurJe z57IMRIGse1^XBn#@=X>@-U}3639uDo?_MV90Ji-oautAjXSX+bh3qw1BG=s>(#d#; z5xWJuYw)p{L_J1nl&j^8J>ci^s>t16E2X@g(Q;A&+V$3+fNilEjQB+WmM)3Ci&Oqc zGX67Y(q(|8fHpkWVPX!|f7i@)ig{)OgmZw{VkYDUXwY9`_m8pfDSW2{$(goruJ)#R z73JlOmXoVN)1Cl8SP2bS39v;qo4s55N#rk-*8a={Er)UL#HWZcHohZad2;m_)uB!Z zpibcTR_=(m{RPG%krHp9ohD*ZL$QfWqX6aQjFyuMP|d4*V5SS;JNmF`NkN}7THplN zz8JQI+yn4-0f1g;(bAw1&<^*CRY)nZpGBl(jbrca zqfDo~oau6M0h*V*3{mm`&C6F>@&L`tS6cD_&C6F>zEqqc@1Cz6T)#f(_`6L*aI3;B zjndkmCJ?uLC9*p5?z=V8+e*pcCMASJUQd*Mw>XZ#DxY;a&E$jux@*EeRko~Kcdq32 zWG$fwkb6wFSmAxn*^g~ovwiN)*>q&e@>EOY-ug0IYn#p_pUi|PCR13Fto_b?mv57E zliS&gYe%$yc{wpTA%GsNe4@Z++MSY|ZzIDq34CP%azP2OMf;IAUp{_{?*-b1ee?)t25CjzvK8^ z9C*EU^3b6}Q5r;g_gmc?)%T8$4s~z(^}V#TG~hUnCWK#8_XU3(l#TRlv$nR@i~W98 zPHh(y$T;qItmpV~1GLnR283=r%p@yeQgi^E&uNTpKpuTE_T2|?o+r8den;KozmyF! zda6MhZ7m6Sz5F4PP8l?N1PuTGq7R?oM%Dn3_W+ZZVvHW4$1nabDOX1a18F%#NcL0I z3!*^W7TTJEf{Hg{vDTKpW%uqqaGtJv>veDc6&R0+Pm$Vni_9BCGPcj>2j3wW8!s~X3fvq z+Z&sztGA6!+x!urb^;VaJdtwNVxV6F5R;b0uQh%0P8=&BS=bbhr~ZoO-oYo`r~4du zsYf5LGCZsf?IcJVs_aVx-IqY1CTJM)3EhC@%n0;=_xNuT&FA0 zk1}*W42*n?{l6o3g5`vDb;YL`W&y^B)u|nhBj2v9tbD5nq{j_VPeSupI?c|W{}0j< zzb)JQtKGY6AFV3zn#h?;wUDDkc;$tAY9Clddnie#P>Kq5Xe%*Wp^i&FU$dP5VXrUU zDIdJNh@{p6?Rh@5X(5a0XVXn}8;KfH1GOWBUcxxL6+g#jmRGSWyfeB$GttHYAw)a%0F=`em>$=vZZqOx(!@l} zi|_SlSS871QfJI_0VdMdQ8tLSn=tvFQO6QC3u54|$IlCK4yBd6u~_Ukne&S2IN$B~ zS*j+DkaR(?q9WSS+WLO)>#V4#m@b6&2rS=?*iw8vDN|lBr!8T) zCQ?L`%0m*AHJRwj$Z@N%%kDSSNJ8To6Yld!ax?x}ldkC}3Ia4hQ{Xl{T3A?Fp>|YO z*38CLLd!}uI77T%#?|0KlYA#wRdo&xuc&JcFec{cdcl2g8{EY;9*^&4+^UN)dX;qS zNr9qf5iWCf2Jo)*Sdwzwn>uLYn!c6b6Q=;s6;NIuU{hHJ4;rfP?zS#fzIA!|_8GXR z*Wl#m0mMp~G?U=J-nDC&@_}>iSDvUpwx|IN;?EWEac@@|z!BnMY@drmCJl=yACsmr zhkb{J|0xFrs^$^8;j55IHY`fFM_x>A;n7D z)>aQxzaKy`oo?4Se^N;60R%=hG^$0>WpWKhK@Es<+)mYo9xS-vG;N(B7+1Dl%Ao@V`N(u}LVG<2^Ku zF9z|x5B0vlN11IEjg|m27Y^1A0;Voa6CYqQetbm#ByGWsT+-Ioyk@YvgF49)u(p+p zG0Fu#mTg&b3!KC!vFQ#nZuDmOT`x#Yd?p5YrvJDFD3T5XXDEI_Sq;O;`3B4?2>QdE z0t>5>V|FGDH?jgyQzc?3~3lz-InU-+4WiNpGEP(k7Amlj^!#@Wmj^#T8 zmtrz}#Iv)T3C8%!!w0Q3oXP#rf&iB+M%x!gBB`+hHB?qsR?V1xC#>>iN1lLYFpb|3 z3XL07`}X&9kbPioB7{OZcS_!^d(1q?j>5`Wh+V5|&2{)8L#Q($rKHsj6yv z6T!=3_=pEPnhz$uW!2+om;}K{1lg*WHACD7P=?&pA9Goh;fUy^&!_DLz4N?dIC{9H zWk2>gp-}ap6%<#QPSaE{pJ!(oZeX1Dm6wm5={6{xQmN9rOlf_lXt#X`bxnX$yMtwz17+H`!}S=W{7^Ls@%x_Q;> zl*To%4vN}R*5ClJ^lKctOHp5#gJW>)YF)S0I)Fb>9|PSVpqy>QOxX<;IvqOxdgO{R zgG3UuWhQ{BLyNA8Yw4Yns*4jNx3KPv6s4!qAjRKw9IklQ^If)JWP~B+pMumev>f&L zl`0*1L9Rf*=tOz}=JP3&_Eg+3uBWUJgaKRxai#K8vjvx<+6-i_fVrM!IyDOpFi2GiYK1l;llkA0+56eDd85k2pFB}R zeOOPlw6uJR0sIT-mu4Ed_fB~b1kJ+P&&qBmF=6)LdtIN;1{FAEk{NIY7@b{QTv1b3 zSEoSAN=vm`0Q4Ftyh})7jZueigbq?0F1Exr=H$H74W@_!_a z3jqq{i=J|-4?#^x3gsKRgg&~#_F*190~CJLyBug9zpg87FTRgW^%^1{{PSz0(desF zrc8nF8NLepE`=q4yCUOgX=z7C^N08-4ZATEs#=cg9}gPrBA$7h%mWiAPW%}8_#E{$ zAQ?Q4o=zZ7uu#&xMGe7ruP#D7QP!TA-0*CQ@-pnA^@-Hsx9vdSyAXq?g0LBMibV)> zJlCYsp5Viog&^jDu*HX6oh_UCmMt%@hP7r_$qYat>=p!jZ#z!pEXMiQNcpS?-;wq| zAUXYtG6@eQw$SA=E;P6iQeC5(h_{AgsoAx)wJX)yR#oVz*$PJU($taVsJdua#lb%{ zD%s^xlu7NL1Sv2NuVhl9v7>5##ynwItfd%~GE!+yA-6;#zEoz}<&k1pqEj>YCeSa7 zP}9N49u7hT?ZbeqgsnP=4jclV@wPIfl8gk$A|M8;;7yX6CY42dy3fv$`z}7*D zn{GRtHfHXT0ByvSi`N41MZnv93{G^{KyAe1&UUX?dj(*0+*O~Td4%xjXj|pyyWE$= z5$i-8v$Uh7`G`f?oh>b!OG?Jh*L4-!WF{gPa(pZn%cVU>?Cmi=&HG8pN&Oi&Wed#7 zUmJ=#<8(5lwmwZe-`&i(_hm`WM3UW`>6wN!Jv(smLx<+H7F2xrg!;|mToe_l3{1;_ zi?74sbM^;=pTC|ojD_m444yP{etB(e)ry9OhV`~-dXz`y6C!tgb~^r?dc;BaY|XIJ zsgjbCWLqC@0APz%R#v`*lfSCkW*{BATh2iM(MVILPHo(@Y15AqocmQD)ngxf+A?b> zJsQ0iaU9natNsKlecf$?@D39u)T~7xx1~(;1-0GNM^mTei9~#rN+Aw%P(NgEx6KkcMM(ggTqsTJ5qr+9L{nYE)weHiS zIbJW;>saQwrEpJXnlL~EH7>J{J@l%Y>DIDk?(<7g%C&G=36ACE z6t(@wb3WhV#W22B;$M+ot}@Zm6rJ@FDNTuD?$TF(bKs zX$lz-bSC%kcGJ~ZKfUBPhQSo85SS3Z0jXEbxuzUl1rg0p~z?| zC^5Axs~B)P2oH53)9mNZU9cek;;y`$kPK54e~$a758 zl(yspp3y49#)D?GBgps9-BqH;Tq&ySb~893^+>FGElsUt>90ouR;ig3A)(0`2)4j* zsywo`j~)5Ksl1FL88M>1&`bNGaq)>@Ts?O%n@jNk^zd~KlVx18v54>wB1iv`ZKa1H ztjf#jDkJvl8@BkBSlUUw4iEFU(4sqJCN!U<9>7AB)dcowAyT~W1)@!RDKDc|Mm+jx znstjmV3tmFbmubU(yOIL7E}KVvduOXmD`V5zK002ovPDHLk FV1nFr8|eT5 diff --git a/api/core/model_runtime/model_providers/minimax/_assets/icon_s_en.png b/api/core/model_runtime/model_providers/minimax/_assets/icon_s_en.png deleted file mode 100644 index 30c71e9bd383ca475e64ef6843b2bfedba6905f4..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 2007 zcmV;|2PpW7P)}@2+j^P#drfu?WT_Ev9K`2+$N4l$1ouqd!d5q^;T?AyU*tN@<%YQia<} zRV&dFl^>NVHA+iW+E`IQk?N{V6a%SCqEa~l93HkD8)F{E57r*ve#&1suatRvdsSrTX(nP1?Ho& zqxUu}WY64Tv|pt(r=Z5+*y_4AZfrE!zIRp(=)e7u<=W7*qReV$6HX&eA>?~&pWP3_rRIkp=FFU&F{OA(ive(mkFstVeV(9qf{0e zDQMo?X_(r^XERGEOXO^Tr!0+CRe|G%B9!rUV@047xmaa*6i>dxkXUfKV9bzf`PB6J zjK4!JmH8yjTA!Q^__Q%+z^4uC;{>F-v7pB|(P&}9ZE3GhZlNtceZr82Tf*d)#|Gm` zbHnjBl6;sZ^UAaaoLlgtxED1m&xJ($MeEUo*><4l=Mq^Dc%rv z)!-_l&031c;$n`Z{987hkUD9h8#kT_GWV1Q1gqZ1_Iiu-V+h%65kHEIq9h_qS5lcG z|BFxVL*N^xJVLhK7IMc1zQDeE3Rz<yo`3IwSl^Uq;JoM8FzN3a z;=7QHIZu=GU3u*Ru zXbXC<+K;6}!jon6{rbq1256Efgi3LHkv%z5(e<0|G3B?xs53AqAeak!J1&JNkBFO) z1=lIVN)L(XBhlRlx+vuw1Jmt<@?eWHkKANV*~sq4;nV+WdY zE?gvYW!-F>5pYH`!DX@@2#5#0+vw0Nz_QJliJNY)$qrmS73GoHsp1w!T^h0bpzJBI z(z7&_^gp(wyc(3W*&{>iieK`Ldm`;Qr;7IPN?-8@pqAqq$-Ga|HQ2ui)|JKc$3r>q zYEH>_);DADH2Hj-n%eEW7?5$lRtnC~jLr~vWv03_LW|_VP4lUIIFY`@H^NPdG2BZ^ zjZDtH`#(MS=$UxVivd#oy@KJ2^a|GD6jH8Pu-C?aer~B#54M1UFE+0sy573Tr-p)Z+SuS^$&lmf}F$!-qV!q`W`WwA5 za0Ze={7>!Ue<}i{R`REL;EM6Fi z%U2pw7dr%{CrWJ++jZ1fAghZ)7O|;>J*cgD@nmmw8gO%SV(ftjp21?ghP7FXLMnzi zJIV46MkD4Rx3qYas{z$*>qgGj{`VbhZ33&k$ua(~oRgnRL-kGL{Z2ZRTs|;JZ;i`? z;o>iVV6$d6e{A2r(`ci-;Q02dPTEqTLVGMe+%wG09apSTW^4eXp>;_=x!X~4GakPQ zZ+b|^yRjO+bAT=P*HukTIfY?vY6^H)R@^JAlCrq{w?oFEpL)hTg?{uuHS=rO?wyK5 zKX^OZb&|((*Tlqd;SAf=6%%6;>{O=CMcAAjmYY=+3c7L9As1=c6Ms5yxqE}LkUi>@ z-kP?**3>eYtRk+L;*c7`VTs@zEE_FKE|vB7cPE5bAvBhuO>)ZkDV&$aoQ=pjjAmMj z9hr!7VREqJ!!kvFLBfvMt>9LD*n@gRmOUfkxUIz0VuCoD9k0iQ`#!j1SBt!KC4xWo zhP-qu{^|QUVs9rRR3$X6MNCI~()@+Q`!l_|cZg;Ic`YM@gk~?I8aI@_Jz}Y-vHTN4 p&Rd25T6K}BF+VJC4s-a7;XSA0PRj&!3y=T+002ovPDHLkV1hX)(k1`^ diff --git a/api/core/model_runtime/model_providers/minimax/llm/__init__.py b/api/core/model_runtime/model_providers/minimax/llm/__init__.py deleted file mode 100644 index e69de29bb2d1d6..00000000000000 diff --git a/api/core/model_runtime/model_providers/minimax/llm/abab5-chat.yaml b/api/core/model_runtime/model_providers/minimax/llm/abab5-chat.yaml deleted file mode 100644 index 2c1f79e2b77052..00000000000000 --- a/api/core/model_runtime/model_providers/minimax/llm/abab5-chat.yaml +++ /dev/null @@ -1,38 +0,0 @@ -model: abab5-chat -label: - en_US: Abab5-Chat -model_type: llm -features: - - agent-thought -model_properties: - mode: chat - context_size: 6144 -parameter_rules: - - name: temperature - use_template: temperature - - name: top_p - use_template: top_p - - name: max_tokens - use_template: max_tokens - required: true - default: 6144 - min: 1 - max: 6144 - - name: mask_sensitive_info - type: boolean - default: true - label: - zh_Hans: 隐私保护 - en_US: Moderate - help: - zh_Hans: 对输出中易涉及隐私问题的文本信息进行打码,目前包括但不限于邮箱、域名、链接、证件号、家庭住址等,默认true,即开启打码 - en_US: Mask the sensitive info of the generated content, such as email/domain/link/address/phone/id.. - - name: presence_penalty - use_template: presence_penalty - - name: frequency_penalty - use_template: frequency_penalty -pricing: - input: '0.015' - output: '0.015' - unit: '0.001' - currency: RMB diff --git a/api/core/model_runtime/model_providers/minimax/llm/abab5.5-chat.yaml b/api/core/model_runtime/model_providers/minimax/llm/abab5.5-chat.yaml deleted file mode 100644 index 6d29be0d0e8708..00000000000000 --- a/api/core/model_runtime/model_providers/minimax/llm/abab5.5-chat.yaml +++ /dev/null @@ -1,53 +0,0 @@ -model: abab5.5-chat -label: - en_US: Abab5.5-Chat -model_type: llm -features: - - agent-thought - - tool-call - - stream-tool-call -model_properties: - mode: chat - context_size: 16384 -parameter_rules: - - name: temperature - use_template: temperature - min: 0.01 - max: 1 - default: 0.9 - - name: top_p - use_template: top_p - min: 0.01 - max: 1 - default: 0.95 - - name: max_tokens - use_template: max_tokens - required: true - default: 6144 - min: 1 - max: 16384 - - name: mask_sensitive_info - type: boolean - default: true - label: - zh_Hans: 隐私保护 - en_US: Moderate - help: - zh_Hans: 对输出中易涉及隐私问题的文本信息进行打码,目前包括但不限于邮箱、域名、链接、证件号、家庭住址等,默认true,即开启打码 - en_US: Mask the sensitive info of the generated content, such as email/domain/link/address/phone/id.. - - name: presence_penalty - use_template: presence_penalty - - name: frequency_penalty - use_template: frequency_penalty - - name: plugin_web_search - required: false - default: false - type: boolean - label: - en_US: Enable Web Search - zh_Hans: 开启网页搜索 -pricing: - input: '0.015' - output: '0.015' - unit: '0.001' - currency: RMB diff --git a/api/core/model_runtime/model_providers/minimax/llm/abab5.5s-chat.yaml b/api/core/model_runtime/model_providers/minimax/llm/abab5.5s-chat.yaml deleted file mode 100644 index aa42bb57395d68..00000000000000 --- a/api/core/model_runtime/model_providers/minimax/llm/abab5.5s-chat.yaml +++ /dev/null @@ -1,44 +0,0 @@ -model: abab5.5s-chat -label: - en_US: Abab5.5s-Chat -model_type: llm -features: - - agent-thought -model_properties: - mode: chat - context_size: 8192 -parameter_rules: - - name: temperature - use_template: temperature - min: 0.01 - max: 1 - default: 0.9 - - name: top_p - use_template: top_p - min: 0.01 - max: 1 - default: 0.95 - - name: max_tokens - use_template: max_tokens - required: true - default: 3072 - min: 1 - max: 8192 - - name: mask_sensitive_info - type: boolean - default: true - label: - zh_Hans: 隐私保护 - en_US: Moderate - help: - zh_Hans: 对输出中易涉及隐私问题的文本信息进行打码,目前包括但不限于邮箱、域名、链接、证件号、家庭住址等,默认true,即开启打码 - en_US: Mask the sensitive info of the generated content, such as email/domain/link/address/phone/id.. - - name: presence_penalty - use_template: presence_penalty - - name: frequency_penalty - use_template: frequency_penalty -pricing: - input: '0.005' - output: '0.005' - unit: '0.001' - currency: RMB diff --git a/api/core/model_runtime/model_providers/minimax/llm/abab6-chat.yaml b/api/core/model_runtime/model_providers/minimax/llm/abab6-chat.yaml deleted file mode 100644 index 9188b6b53fa08c..00000000000000 --- a/api/core/model_runtime/model_providers/minimax/llm/abab6-chat.yaml +++ /dev/null @@ -1,46 +0,0 @@ -model: abab6-chat -label: - en_US: Abab6-Chat -model_type: llm -features: - - agent-thought - - tool-call - - stream-tool-call -model_properties: - mode: chat - context_size: 32768 -parameter_rules: - - name: temperature - use_template: temperature - min: 0.01 - max: 1 - default: 0.1 - - name: top_p - use_template: top_p - min: 0.01 - max: 1 - default: 0.9 - - name: max_tokens - use_template: max_tokens - required: true - default: 2048 - min: 1 - max: 32768 - - name: mask_sensitive_info - type: boolean - default: true - label: - zh_Hans: 隐私保护 - en_US: Moderate - help: - zh_Hans: 对输出中易涉及隐私问题的文本信息进行打码,目前包括但不限于邮箱、域名、链接、证件号、家庭住址等,默认true,即开启打码 - en_US: Mask the sensitive info of the generated content, such as email/domain/link/address/phone/id.. - - name: presence_penalty - use_template: presence_penalty - - name: frequency_penalty - use_template: frequency_penalty -pricing: - input: '0.1' - output: '0.1' - unit: '0.001' - currency: RMB diff --git a/api/core/model_runtime/model_providers/minimax/llm/abab6.5-chat.yaml b/api/core/model_runtime/model_providers/minimax/llm/abab6.5-chat.yaml deleted file mode 100644 index 5d717d5f8ccd9d..00000000000000 --- a/api/core/model_runtime/model_providers/minimax/llm/abab6.5-chat.yaml +++ /dev/null @@ -1,46 +0,0 @@ -model: abab6.5-chat -label: - en_US: Abab6.5-Chat -model_type: llm -features: - - agent-thought - - tool-call - - stream-tool-call -model_properties: - mode: chat - context_size: 8192 -parameter_rules: - - name: temperature - use_template: temperature - min: 0.01 - max: 1 - default: 0.1 - - name: top_p - use_template: top_p - min: 0.01 - max: 1 - default: 0.95 - - name: max_tokens - use_template: max_tokens - required: true - default: 2048 - min: 1 - max: 8192 - - name: mask_sensitive_info - type: boolean - default: true - label: - zh_Hans: 隐私保护 - en_US: Moderate - help: - zh_Hans: 对输出中易涉及隐私问题的文本信息进行打码,目前包括但不限于邮箱、域名、链接、证件号、家庭住址等,默认true,即开启打码 - en_US: Mask the sensitive info of the generated content, such as email/domain/link/address/phone/id.. - - name: presence_penalty - use_template: presence_penalty - - name: frequency_penalty - use_template: frequency_penalty -pricing: - input: '0.03' - output: '0.03' - unit: '0.001' - currency: RMB diff --git a/api/core/model_runtime/model_providers/minimax/llm/abab6.5s-chat.yaml b/api/core/model_runtime/model_providers/minimax/llm/abab6.5s-chat.yaml deleted file mode 100644 index 4631fe67e44cb7..00000000000000 --- a/api/core/model_runtime/model_providers/minimax/llm/abab6.5s-chat.yaml +++ /dev/null @@ -1,46 +0,0 @@ -model: abab6.5s-chat -label: - en_US: Abab6.5s-Chat -model_type: llm -features: - - agent-thought - - tool-call - - stream-tool-call -model_properties: - mode: chat - context_size: 245760 -parameter_rules: - - name: temperature - use_template: temperature - min: 0.01 - max: 1 - default: 0.1 - - name: top_p - use_template: top_p - min: 0.01 - max: 1 - default: 0.95 - - name: max_tokens - use_template: max_tokens - required: true - default: 2048 - min: 1 - max: 245760 - - name: mask_sensitive_info - type: boolean - default: true - label: - zh_Hans: 隐私保护 - en_US: Moderate - help: - zh_Hans: 对输出中易涉及隐私问题的文本信息进行打码,目前包括但不限于邮箱、域名、链接、证件号、家庭住址等,默认true,即开启打码 - en_US: Mask the sensitive info of the generated content, such as email/domain/link/address/phone/id.. - - name: presence_penalty - use_template: presence_penalty - - name: frequency_penalty - use_template: frequency_penalty -pricing: - input: '0.01' - output: '0.01' - unit: '0.001' - currency: RMB diff --git a/api/core/model_runtime/model_providers/minimax/llm/chat_completion.py b/api/core/model_runtime/model_providers/minimax/llm/chat_completion.py deleted file mode 100644 index 88cc0e8e0f32d0..00000000000000 --- a/api/core/model_runtime/model_providers/minimax/llm/chat_completion.py +++ /dev/null @@ -1,166 +0,0 @@ -from collections.abc import Generator -from json import dumps, loads -from typing import Any, Union - -from requests import Response, post - -from core.model_runtime.model_providers.minimax.llm.errors import ( - BadRequestError, - InsufficientAccountBalanceError, - InternalServerError, - InvalidAPIKeyError, - InvalidAuthenticationError, - RateLimitReachedError, -) -from core.model_runtime.model_providers.minimax.llm.types import MinimaxMessage - - -class MinimaxChatCompletion: - """ - Minimax Chat Completion API - """ - - def generate( - self, - model: str, - api_key: str, - group_id: str, - prompt_messages: list[MinimaxMessage], - model_parameters: dict, - tools: list[dict[str, Any]], - stop: list[str] | None, - stream: bool, - user: str, - ) -> Union[MinimaxMessage, Generator[MinimaxMessage, None, None]]: - """ - generate chat completion - """ - if not api_key or not group_id: - raise InvalidAPIKeyError("Invalid API key or group ID") - - url = f"https://api.minimax.chat/v1/text/chatcompletion?GroupId={group_id}" - - extra_kwargs = {} - - if "max_tokens" in model_parameters and type(model_parameters["max_tokens"]) == int: - extra_kwargs["tokens_to_generate"] = model_parameters["max_tokens"] - - if "temperature" in model_parameters and type(model_parameters["temperature"]) == float: - extra_kwargs["temperature"] = model_parameters["temperature"] - - if "top_p" in model_parameters and type(model_parameters["top_p"]) == float: - extra_kwargs["top_p"] = model_parameters["top_p"] - - prompt = "你是一个什么都懂的专家" - - role_meta = {"user_name": "我", "bot_name": "专家"} - - # check if there is a system message - if len(prompt_messages) == 0: - raise BadRequestError("At least one message is required") - - if prompt_messages[0].role == MinimaxMessage.Role.SYSTEM.value: - if prompt_messages[0].content: - prompt = prompt_messages[0].content - prompt_messages = prompt_messages[1:] - - # check if there is a user message - if len(prompt_messages) == 0: - raise BadRequestError("At least one user message is required") - - messages = [ - { - "sender_type": message.role, - "text": message.content, - } - for message in prompt_messages - ] - - headers = {"Authorization": "Bearer " + api_key, "Content-Type": "application/json"} - - body = { - "model": model, - "messages": messages, - "prompt": prompt, - "role_meta": role_meta, - "stream": stream, - **extra_kwargs, - } - - try: - response = post(url=url, data=dumps(body), headers=headers, stream=stream, timeout=(10, 300)) - except Exception as e: - raise InternalServerError(e) - - if response.status_code != 200: - raise InternalServerError(response.text) - - if stream: - return self._handle_stream_chat_generate_response(response) - return self._handle_chat_generate_response(response) - - def _handle_error(self, code: int, msg: str): - if code in {1000, 1001, 1013, 1027}: - raise InternalServerError(msg) - elif code in {1002, 1039}: - raise RateLimitReachedError(msg) - elif code == 1004: - raise InvalidAuthenticationError(msg) - elif code == 1008: - raise InsufficientAccountBalanceError(msg) - elif code == 2013: - raise BadRequestError(msg) - else: - raise InternalServerError(msg) - - def _handle_chat_generate_response(self, response: Response) -> MinimaxMessage: - """ - handle chat generate response - """ - response = response.json() - if "base_resp" in response and response["base_resp"]["status_code"] != 0: - code = response["base_resp"]["status_code"] - msg = response["base_resp"]["status_msg"] - self._handle_error(code, msg) - - message = MinimaxMessage(content=response["reply"], role=MinimaxMessage.Role.ASSISTANT.value) - message.usage = { - "prompt_tokens": 0, - "completion_tokens": response["usage"]["total_tokens"], - "total_tokens": response["usage"]["total_tokens"], - } - message.stop_reason = response["choices"][0]["finish_reason"] - return message - - def _handle_stream_chat_generate_response(self, response: Response) -> Generator[MinimaxMessage, None, None]: - """ - handle stream chat generate response - """ - for line in response.iter_lines(): - if not line: - continue - line: str = line.decode("utf-8") - if line.startswith("data: "): - line = line[6:].strip() - data = loads(line) - - if "base_resp" in data and data["base_resp"]["status_code"] != 0: - code = data["base_resp"]["status_code"] - msg = data["base_resp"]["status_msg"] - self._handle_error(code, msg) - - if data["reply"]: - total_tokens = data["usage"]["total_tokens"] - message = MinimaxMessage(role=MinimaxMessage.Role.ASSISTANT.value, content="") - message.usage = {"prompt_tokens": 0, "completion_tokens": total_tokens, "total_tokens": total_tokens} - message.stop_reason = data["choices"][0]["finish_reason"] - yield message - return - - choices = data.get("choices", []) - if len(choices) == 0: - continue - - for choice in choices: - message = choice["delta"] - yield MinimaxMessage(content=message, role=MinimaxMessage.Role.ASSISTANT.value) diff --git a/api/core/model_runtime/model_providers/minimax/llm/chat_completion_pro.py b/api/core/model_runtime/model_providers/minimax/llm/chat_completion_pro.py deleted file mode 100644 index 8b8fdbb6bdf558..00000000000000 --- a/api/core/model_runtime/model_providers/minimax/llm/chat_completion_pro.py +++ /dev/null @@ -1,191 +0,0 @@ -from collections.abc import Generator -from json import dumps, loads -from typing import Any, Union - -from requests import Response, post - -from core.model_runtime.model_providers.minimax.llm.errors import ( - BadRequestError, - InsufficientAccountBalanceError, - InternalServerError, - InvalidAPIKeyError, - InvalidAuthenticationError, - RateLimitReachedError, -) -from core.model_runtime.model_providers.minimax.llm.types import MinimaxMessage - - -class MinimaxChatCompletionPro: - """ - Minimax Chat Completion Pro API, supports function calling - however, we do not have enough time and energy to implement it, but the parameters are reserved - """ - - def generate( - self, - model: str, - api_key: str, - group_id: str, - prompt_messages: list[MinimaxMessage], - model_parameters: dict, - tools: list[dict[str, Any]], - stop: list[str] | None, - stream: bool, - user: str, - ) -> Union[MinimaxMessage, Generator[MinimaxMessage, None, None]]: - """ - generate chat completion - """ - if not api_key or not group_id: - raise InvalidAPIKeyError("Invalid API key or group ID") - - url = f"https://api.minimax.chat/v1/text/chatcompletion_pro?GroupId={group_id}" - - extra_kwargs = {} - - if "max_tokens" in model_parameters and type(model_parameters["max_tokens"]) == int: - extra_kwargs["tokens_to_generate"] = model_parameters["max_tokens"] - - if "temperature" in model_parameters and type(model_parameters["temperature"]) == float: - extra_kwargs["temperature"] = model_parameters["temperature"] - - if "top_p" in model_parameters and type(model_parameters["top_p"]) == float: - extra_kwargs["top_p"] = model_parameters["top_p"] - - if "mask_sensitive_info" in model_parameters and type(model_parameters["mask_sensitive_info"]) == bool: - extra_kwargs["mask_sensitive_info"] = model_parameters["mask_sensitive_info"] - - if model_parameters.get("plugin_web_search"): - extra_kwargs["plugins"] = ["plugin_web_search"] - - bot_setting = {"bot_name": "专家", "content": "你是一个什么都懂的专家"} - - reply_constraints = {"sender_type": "BOT", "sender_name": "专家"} - - # check if there is a system message - if len(prompt_messages) == 0: - raise BadRequestError("At least one message is required") - - if prompt_messages[0].role == MinimaxMessage.Role.SYSTEM.value: - if prompt_messages[0].content: - bot_setting["content"] = prompt_messages[0].content - prompt_messages = prompt_messages[1:] - - # check if there is a user message - if len(prompt_messages) == 0: - raise BadRequestError("At least one user message is required") - - messages = [message.to_dict() for message in prompt_messages] - - headers = {"Authorization": "Bearer " + api_key, "Content-Type": "application/json"} - - body = { - "model": model, - "messages": messages, - "bot_setting": [bot_setting], - "reply_constraints": reply_constraints, - "stream": stream, - **extra_kwargs, - } - - if tools: - body["functions"] = tools - body["function_call"] = {"type": "auto"} - - try: - response = post(url=url, data=dumps(body), headers=headers, stream=stream, timeout=(10, 300)) - except Exception as e: - raise InternalServerError(e) - - if response.status_code != 200: - raise InternalServerError(response.text) - - if stream: - return self._handle_stream_chat_generate_response(response) - return self._handle_chat_generate_response(response) - - def _handle_error(self, code: int, msg: str): - if code in {1000, 1001, 1013, 1027}: - raise InternalServerError(msg) - elif code in {1002, 1039}: - raise RateLimitReachedError(msg) - elif code == 1004: - raise InvalidAuthenticationError(msg) - elif code == 1008: - raise InsufficientAccountBalanceError(msg) - elif code == 2013: - raise BadRequestError(msg) - else: - raise InternalServerError(msg) - - def _handle_chat_generate_response(self, response: Response) -> MinimaxMessage: - """ - handle chat generate response - """ - response = response.json() - if "base_resp" in response and response["base_resp"]["status_code"] != 0: - code = response["base_resp"]["status_code"] - msg = response["base_resp"]["status_msg"] - self._handle_error(code, msg) - - message = MinimaxMessage(content=response["reply"], role=MinimaxMessage.Role.ASSISTANT.value) - message.usage = { - "prompt_tokens": 0, - "completion_tokens": response["usage"]["total_tokens"], - "total_tokens": response["usage"]["total_tokens"], - } - message.stop_reason = response["choices"][0]["finish_reason"] - return message - - def _handle_stream_chat_generate_response(self, response: Response) -> Generator[MinimaxMessage, None, None]: - """ - handle stream chat generate response - """ - for line in response.iter_lines(): - if not line: - continue - line: str = line.decode("utf-8") - if line.startswith("data: "): - line = line[6:].strip() - data = loads(line) - - if "base_resp" in data and data["base_resp"]["status_code"] != 0: - code = data["base_resp"]["status_code"] - msg = data["base_resp"]["status_msg"] - self._handle_error(code, msg) - - # final chunk - if data["reply"] or data.get("usage"): - total_tokens = data["usage"]["total_tokens"] - minimax_message = MinimaxMessage(role=MinimaxMessage.Role.ASSISTANT.value, content="") - minimax_message.usage = { - "prompt_tokens": 0, - "completion_tokens": total_tokens, - "total_tokens": total_tokens, - } - minimax_message.stop_reason = data["choices"][0]["finish_reason"] - - choices = data.get("choices", []) - if len(choices) > 0: - for choice in choices: - message = choice["messages"][0] - # append function_call message - if "function_call" in message: - function_call_message = MinimaxMessage(content="", role=MinimaxMessage.Role.ASSISTANT.value) - function_call_message.function_call = message["function_call"] - yield function_call_message - - yield minimax_message - return - - # partial chunk - choices = data.get("choices", []) - if len(choices) == 0: - continue - - for choice in choices: - message = choice["messages"][0] - # append text message - if "text" in message: - minimax_message = MinimaxMessage(content=message["text"], role=MinimaxMessage.Role.ASSISTANT.value) - yield minimax_message diff --git a/api/core/model_runtime/model_providers/minimax/llm/errors.py b/api/core/model_runtime/model_providers/minimax/llm/errors.py deleted file mode 100644 index 309b5cf413bd54..00000000000000 --- a/api/core/model_runtime/model_providers/minimax/llm/errors.py +++ /dev/null @@ -1,22 +0,0 @@ -class InvalidAuthenticationError(Exception): - pass - - -class InvalidAPIKeyError(Exception): - pass - - -class RateLimitReachedError(Exception): - pass - - -class InsufficientAccountBalanceError(Exception): - pass - - -class InternalServerError(Exception): - pass - - -class BadRequestError(Exception): - pass diff --git a/api/core/model_runtime/model_providers/minimax/llm/llm.py b/api/core/model_runtime/model_providers/minimax/llm/llm.py deleted file mode 100644 index 4250c40cfb94b1..00000000000000 --- a/api/core/model_runtime/model_providers/minimax/llm/llm.py +++ /dev/null @@ -1,271 +0,0 @@ -from collections.abc import Generator - -from core.model_runtime.entities.llm_entities import LLMResult, LLMResultChunk, LLMResultChunkDelta -from core.model_runtime.entities.message_entities import ( - AssistantPromptMessage, - PromptMessage, - PromptMessageTool, - SystemPromptMessage, - ToolPromptMessage, - UserPromptMessage, -) -from core.model_runtime.errors.invoke import ( - InvokeAuthorizationError, - InvokeBadRequestError, - InvokeConnectionError, - InvokeError, - InvokeRateLimitError, - InvokeServerUnavailableError, -) -from core.model_runtime.errors.validate import CredentialsValidateFailedError -from core.model_runtime.model_providers.__base.large_language_model import LargeLanguageModel -from core.model_runtime.model_providers.minimax.llm.chat_completion import MinimaxChatCompletion -from core.model_runtime.model_providers.minimax.llm.chat_completion_pro import MinimaxChatCompletionPro -from core.model_runtime.model_providers.minimax.llm.errors import ( - BadRequestError, - InsufficientAccountBalanceError, - InternalServerError, - InvalidAPIKeyError, - InvalidAuthenticationError, - RateLimitReachedError, -) -from core.model_runtime.model_providers.minimax.llm.types import MinimaxMessage - - -class MinimaxLargeLanguageModel(LargeLanguageModel): - model_apis = { - "abab6.5s-chat": MinimaxChatCompletionPro, - "abab6.5-chat": MinimaxChatCompletionPro, - "abab6-chat": MinimaxChatCompletionPro, - "abab5.5s-chat": MinimaxChatCompletionPro, - "abab5.5-chat": MinimaxChatCompletionPro, - "abab5-chat": MinimaxChatCompletion, - } - - def _invoke( - self, - model: str, - credentials: dict, - prompt_messages: list[PromptMessage], - model_parameters: dict, - tools: list[PromptMessageTool] | None = None, - stop: list[str] | None = None, - stream: bool = True, - user: str | None = None, - ) -> LLMResult | Generator: - return self._generate(model, credentials, prompt_messages, model_parameters, tools, stop, stream, user) - - def validate_credentials(self, model: str, credentials: dict) -> None: - """ - Validate credentials for Baichuan model - """ - if model not in self.model_apis: - raise CredentialsValidateFailedError(f"Invalid model: {model}") - - if not credentials.get("minimax_api_key"): - raise CredentialsValidateFailedError("Invalid API key") - - if not credentials.get("minimax_group_id"): - raise CredentialsValidateFailedError("Invalid group ID") - - # ping - instance = MinimaxChatCompletionPro() - try: - instance.generate( - model=model, - api_key=credentials["minimax_api_key"], - group_id=credentials["minimax_group_id"], - prompt_messages=[MinimaxMessage(content="ping", role="USER")], - model_parameters={}, - tools=[], - stop=[], - stream=False, - user="", - ) - except (InvalidAuthenticationError, InsufficientAccountBalanceError) as e: - raise CredentialsValidateFailedError(f"Invalid API key: {e}") - - def get_num_tokens( - self, - model: str, - credentials: dict, - prompt_messages: list[PromptMessage], - tools: list[PromptMessageTool] | None = None, - ) -> int: - return self._num_tokens_from_messages(prompt_messages, tools) - - def _num_tokens_from_messages(self, messages: list[PromptMessage], tools: list[PromptMessageTool]) -> int: - """ - Calculate num tokens for minimax model - - not like ChatGLM, Minimax has a special prompt structure, we could not find a proper way - to calculate the num tokens, so we use str() to convert the prompt to string - - Minimax does not provide their own tokenizer of adab5.5 and abab5 model - therefore, we use gpt2 tokenizer instead - """ - messages_dict = [self._convert_prompt_message_to_minimax_message(m).to_dict() for m in messages] - return self._get_num_tokens_by_gpt2(str(messages_dict)) - - def _generate( - self, - model: str, - credentials: dict, - prompt_messages: list[PromptMessage], - model_parameters: dict, - tools: list[PromptMessageTool] | None = None, - stop: list[str] | None = None, - stream: bool = True, - user: str | None = None, - ) -> LLMResult | Generator: - """ - use MinimaxChatCompletionPro as the type of client, anyway, MinimaxChatCompletion has the same interface - """ - client: MinimaxChatCompletionPro = self.model_apis[model]() - - if tools: - tools = [ - {"name": tool.name, "description": tool.description, "parameters": tool.parameters} for tool in tools - ] - - response = client.generate( - model=model, - api_key=credentials["minimax_api_key"], - group_id=credentials["minimax_group_id"], - prompt_messages=[self._convert_prompt_message_to_minimax_message(message) for message in prompt_messages], - model_parameters=model_parameters, - tools=tools, - stop=stop, - stream=stream, - user=user, - ) - - if stream: - return self._handle_chat_generate_stream_response( - model=model, prompt_messages=prompt_messages, credentials=credentials, response=response - ) - return self._handle_chat_generate_response( - model=model, prompt_messages=prompt_messages, credentials=credentials, response=response - ) - - def _convert_prompt_message_to_minimax_message(self, prompt_message: PromptMessage) -> MinimaxMessage: - """ - convert PromptMessage to MinimaxMessage so that we can use MinimaxChatCompletionPro interface - """ - if isinstance(prompt_message, SystemPromptMessage): - return MinimaxMessage(role=MinimaxMessage.Role.SYSTEM.value, content=prompt_message.content) - elif isinstance(prompt_message, UserPromptMessage): - return MinimaxMessage(role=MinimaxMessage.Role.USER.value, content=prompt_message.content) - elif isinstance(prompt_message, AssistantPromptMessage): - if prompt_message.tool_calls: - message = MinimaxMessage(role=MinimaxMessage.Role.ASSISTANT.value, content="") - message.function_call = { - "name": prompt_message.tool_calls[0].function.name, - "arguments": prompt_message.tool_calls[0].function.arguments, - } - return message - return MinimaxMessage(role=MinimaxMessage.Role.ASSISTANT.value, content=prompt_message.content) - elif isinstance(prompt_message, ToolPromptMessage): - return MinimaxMessage(role=MinimaxMessage.Role.FUNCTION.value, content=prompt_message.content) - else: - raise NotImplementedError(f"Prompt message type {type(prompt_message)} is not supported") - - def _handle_chat_generate_response( - self, model: str, prompt_messages: list[PromptMessage], credentials: dict, response: MinimaxMessage - ) -> LLMResult: - usage = self._calc_response_usage( - model=model, - credentials=credentials, - prompt_tokens=response.usage["prompt_tokens"], - completion_tokens=response.usage["completion_tokens"], - ) - return LLMResult( - model=model, - prompt_messages=prompt_messages, - message=AssistantPromptMessage( - content=response.content, - tool_calls=[], - ), - usage=usage, - ) - - def _handle_chat_generate_stream_response( - self, - model: str, - prompt_messages: list[PromptMessage], - credentials: dict, - response: Generator[MinimaxMessage, None, None], - ) -> Generator[LLMResultChunk, None, None]: - for message in response: - if message.usage: - usage = self._calc_response_usage( - model=model, - credentials=credentials, - prompt_tokens=message.usage["prompt_tokens"], - completion_tokens=message.usage["completion_tokens"], - ) - yield LLMResultChunk( - model=model, - prompt_messages=prompt_messages, - delta=LLMResultChunkDelta( - index=0, - message=AssistantPromptMessage(content=message.content, tool_calls=[]), - usage=usage, - finish_reason=message.stop_reason or None, - ), - ) - elif message.function_call: - if "name" not in message.function_call or "arguments" not in message.function_call: - continue - - yield LLMResultChunk( - model=model, - prompt_messages=prompt_messages, - delta=LLMResultChunkDelta( - index=0, - message=AssistantPromptMessage( - content="", - tool_calls=[ - AssistantPromptMessage.ToolCall( - id="", - type="function", - function=AssistantPromptMessage.ToolCall.ToolCallFunction( - name=message.function_call["name"], arguments=message.function_call["arguments"] - ), - ) - ], - ), - ), - ) - else: - yield LLMResultChunk( - model=model, - prompt_messages=prompt_messages, - delta=LLMResultChunkDelta( - index=0, - message=AssistantPromptMessage(content=message.content, tool_calls=[]), - finish_reason=message.stop_reason or None, - ), - ) - - @property - def _invoke_error_mapping(self) -> dict[type[InvokeError], list[type[Exception]]]: - """ - Map model invoke error to unified error - The key is the error type thrown to the caller - The value is the error type thrown by the model, - which needs to be converted into a unified error type for the caller. - - :return: Invoke error mapping - """ - return { - InvokeConnectionError: [], - InvokeServerUnavailableError: [InternalServerError], - InvokeRateLimitError: [RateLimitReachedError], - InvokeAuthorizationError: [ - InvalidAuthenticationError, - InsufficientAccountBalanceError, - InvalidAPIKeyError, - ], - InvokeBadRequestError: [BadRequestError, KeyError], - } diff --git a/api/core/model_runtime/model_providers/minimax/llm/types.py b/api/core/model_runtime/model_providers/minimax/llm/types.py deleted file mode 100644 index 88ebe5e2e00e7a..00000000000000 --- a/api/core/model_runtime/model_providers/minimax/llm/types.py +++ /dev/null @@ -1,30 +0,0 @@ -from enum import Enum -from typing import Any - - -class MinimaxMessage: - class Role(Enum): - USER = "USER" - ASSISTANT = "BOT" - SYSTEM = "SYSTEM" - FUNCTION = "FUNCTION" - - role: str = Role.USER.value - content: str - usage: dict[str, int] = None - stop_reason: str = "" - function_call: dict[str, Any] = None - - def to_dict(self) -> dict[str, Any]: - if self.function_call and self.role == MinimaxMessage.Role.ASSISTANT.value: - return {"sender_type": "BOT", "sender_name": "专家", "text": "", "function_call": self.function_call} - - return { - "sender_type": self.role, - "sender_name": "我" if self.role == "USER" else "专家", - "text": self.content, - } - - def __init__(self, content: str, role: str = "USER") -> None: - self.content = content - self.role = role diff --git a/api/core/model_runtime/model_providers/minimax/minimax.py b/api/core/model_runtime/model_providers/minimax/minimax.py deleted file mode 100644 index 5a761903a1eb12..00000000000000 --- a/api/core/model_runtime/model_providers/minimax/minimax.py +++ /dev/null @@ -1,28 +0,0 @@ -import logging - -from core.model_runtime.entities.model_entities import ModelType -from core.model_runtime.errors.validate import CredentialsValidateFailedError -from core.model_runtime.model_providers.__base.model_provider import ModelProvider - -logger = logging.getLogger(__name__) - - -class MinimaxProvider(ModelProvider): - def validate_provider_credentials(self, credentials: dict) -> None: - """ - Validate provider credentials - - if validate failed, raise exception - - :param credentials: provider credentials, credentials form defined in `provider_credential_schema`. - """ - try: - model_instance = self.get_model_instance(ModelType.LLM) - - # Use `abab5.5-chat` model for validate, - model_instance.validate_credentials(model="abab5.5-chat", credentials=credentials) - except CredentialsValidateFailedError as ex: - raise ex - except Exception as ex: - logger.exception(f"{self.get_provider_schema().provider} credentials validate failed") - raise CredentialsValidateFailedError(f"{ex}") diff --git a/api/core/model_runtime/model_providers/minimax/minimax.yaml b/api/core/model_runtime/model_providers/minimax/minimax.yaml deleted file mode 100644 index 0a97ff9bb98758..00000000000000 --- a/api/core/model_runtime/model_providers/minimax/minimax.yaml +++ /dev/null @@ -1,37 +0,0 @@ -provider: minimax -label: - en_US: Minimax -icon_small: - en_US: icon_s_en.png -icon_large: - en_US: icon_l_en.png -background: "#FFEFEF" -help: - title: - en_US: Get your API Key from Minimax - zh_Hans: 从 Minimax 获取您的 API Key - url: - en_US: https://api.minimax.chat/user-center/basic-information/interface-key -supported_model_types: - - llm - - text-embedding -configurate_methods: - - predefined-model -provider_credential_schema: - credential_form_schemas: - - variable: minimax_api_key - label: - en_US: API Key - type: secret-input - required: true - placeholder: - zh_Hans: 在此输入您的 API Key - en_US: Enter your API Key - - variable: minimax_group_id - label: - en_US: Group ID - type: text-input - required: true - placeholder: - zh_Hans: 在此输入您的 Group ID - en_US: Enter your group ID diff --git a/api/core/model_runtime/model_providers/minimax/text_embedding/__init__.py b/api/core/model_runtime/model_providers/minimax/text_embedding/__init__.py deleted file mode 100644 index e69de29bb2d1d6..00000000000000 diff --git a/api/core/model_runtime/model_providers/minimax/text_embedding/embo-01.yaml b/api/core/model_runtime/model_providers/minimax/text_embedding/embo-01.yaml deleted file mode 100644 index 33546eafd3f54e..00000000000000 --- a/api/core/model_runtime/model_providers/minimax/text_embedding/embo-01.yaml +++ /dev/null @@ -1,9 +0,0 @@ -model: embo-01 -model_type: text-embedding -model_properties: - context_size: 4096 - max_chunks: 1 -pricing: - input: '0.0005' - unit: '0.001' - currency: RMB diff --git a/api/core/model_runtime/model_providers/mistralai/__init__.py b/api/core/model_runtime/model_providers/mistralai/__init__.py deleted file mode 100644 index e69de29bb2d1d6..00000000000000 diff --git a/api/core/model_runtime/model_providers/mistralai/_assets/icon_l_en.png b/api/core/model_runtime/model_providers/mistralai/_assets/icon_l_en.png deleted file mode 100644 index f019b1edceac20bb838c9be16985b4e8767344d8..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 7064 zcmV;J8)xKFNk&GH8vp=TMM6+kP&il$0000G00030002<{06|PpNIwk#009|-Y#V^i z-}q;U=zp5>aRdTM`-_hd5vl9Awr!=k7pDpq70?6Yw3}fHavk}NeE$tZOuz(q!~Xx! z9Qpqq&)2)E5hr7;cgLR{+laHaZQHhO+qN;z*lUe4o-~q9U-f>yZdBEUPIvnF+>3|_ zxNU&{OKE9=1QIm=_QP_b*U;oWZ&om`7xVp?@5Q`N{C|JU`!pYryu#-Jxoh4R^S*f^ zNjz-_X0;r(q*?_i^;EUV@Bp(M4QBnQz^se#=pm^Vxtk=wy$cFKKmODt-R&RGk<*>=H|y9 zm92-+*jGG`JT6;J)x-@~U!O&EvHsq!(57$G<-_)fNE^>JZ&~}3PDfm%G+8P9N$Kby z5!&|hI~AZ9{lJ1idHM1z0KeO~ix@7VLE8c%O;mm32~P&N-%rhB_;b4fTXxxE9Q8qZ zUA9>P<{RF6FwNJ0?mZJqokzTY-tXLFn)|<&E^puX8I&O8Xa=66G?&j{u#2fLwSEO-ovNtH6G0rjqC@EB0Je5qpoRd(9l-p}uOkP6 z&|a^<=mlrPSf^6}Y6u|gW?-UI72qMPSh42oKYG%gikjt<7f&nX<>}u@RSmek<6evt zv_3TqHTu(1H;o!${7?4SSPF8iby-uO~ETc;)qh|ysnPXLkb z4C3U5>*#MJ@BI{X(Ii z9Ohnp`W z?egGvEr59JBLc8|^uD37cfHHvF30@V@*LuWC^rA=P)>p4m-4rfyVYRZ4FFRRY~Dn( zv47-bSAP(T@r$2#Lity77{8YK9RSnU(X1K-M0MlGKK`=GblxJdswb89-r0MFc=0#Lj!%NQ%%|m;(|}RAo(Nu0_x+2@3xM zDcE^<0#s=YaM5+w93!nMJBm324I++=_3KCrwED4+h!NIA~WDU(@G@Sn>$ck8HFqe`(OAa?#f9Q6SB zV~M8$5cbj^o)+0~-|5-ge)pF1%m*Xx@-b(pgAag^z6>b#IvR1_R^52@;c)PM{wC&3 z8wBii>89QHe+d4YlL!V9;~U=djDoBpe0beiuXvyNfL1&kz%6p|!SgMX^6CW{E=$8x zXTC7<(Ap2#+%ZtGV(RBl_E9docJ~1h+WOohpY`CSc%*Q9?~}h^J_v|ulw_#*(n}TK zv7RWZpX4q43UgMG`y?O~PpMFVB5i-Lh#z59`3o%@6d}9d$#;9q-S~(?k{*t5@(Ppe)4=8 z|9qkcXV7XKxNbpi0Wkx`j1C`6T@Y=?X=RMQSApG_j}b;OyLYv4Z9 z#BAvVT{;mpr4SH+5}`sk7ajJ#$0RaUNV4*8kG{sd4Tp$sa3TP*{qE=Ms+1pp)W-W; zmjCUq1mK)Ewm5RJ?o|SUKxw9hW{G5lQd_d5LV(HW)<{6;md79Sj8%Km_|LC>=yr43 z&5}`cW0s5rAie4Af;9B5HEUIb&g!HLgl@m&WGlH}*-=P|?NH2$&b#)UPLr7coOkUx zowiMp`OQ(eE0iY*LOSnByWjVqy+)`0b=4(zVoqzt=Z=|{O*+v~@wsEBWs{j8v+>Xo zNAUDvkD>qIG{)oh=t%AaPg$0?htRALTuYb1wKN8|UqD~OO`CpAb8@FW*mOQ!2b<2P zOI=Ch6*NXea4qeBXS)=$qzS?szy#rdte5Z_IHo4gbT0$KTR;YcH-rfYHvt~Bnt*V? zv|WT|38!nPHF7WAI(?2zkCExtX>@vY+MVrE%wwZL0}3=#0wQ*_iiY03MXHHjBU-E{ zfl=;Y-7Zj!3`KW^wXm&*sfSW6-f2ipswRT4@z}9ahQR3B(;wa>0#ZKxwIM*!Rp=eR zP8meJ?ldl&@?;P4Lyg`EJO4x!f7_C2Z5v=t+`jJ!M#nXr)r)PRYSK? z^)$z7u2Jm%>eBI{@BB99n)xA5tJF={*4Ixd=8(`Fld4tEqyCF^K?4Uon_h>QQ^g$X ztjbja9kbJI3Lx0laRUj6AT$>?xvDb;evJ4PY02zYplg2Q-TAIIM;G*j^7Cg1qcX6 z7dUulOUsqNARzAami7i1EsF%EBp|eK7YL2p066fzz{cQwtSVN%S?mBNngt+C01*u! zqaL)X!GpTOPy!HuFzSi^8*5UPxIl4sRb*Bx?+6S!azp=vK89idnKfWr&qe?E_kF~X z`(JIoqch8z-{W}1Mc2Xf@1E(oT=^kmJ{r+KRt{(5Za*&Awhzf!OP-eu&{;`{yWEeu zjN^8@NimO-?{)@>oE=b+bSURSdkHGqw^{pO>d9#K zEKnw9$cq&~NtvQvb*2}R6SJFb$t zlBC%R09H^qASw<30PsElodGHU0a5@y5d?uigBz^?U?7|Ur!&~`1H3kI@IT)_5Irkf8;;7{>cAq`UC!J^=sW%_mA7ZaDUbR?f=t%U_Yn# zEdHhcC)5Y<|K@+_f1>-o|D*p&{a4(F_J8vq=6%3Fl>a~VsQ*9gQT-$Rr?5ZqKk0s# z|MY*v{2PAK|K0yZ@GbnS`j_7)$OoVYsORtw$Pe(o5x&I!J^tJETKhZAnWZ0;{FnIM z+kC$+AN@c4ALVzT2lqdW9`E-W|E=|t!gJ}~f`8FJ-+z4eUH@0j&$|A_|IdHd{lItw z^b-AF6TAz=s_Go6Y++o;YDItb4ka$tk>^JL)J+TK(JCfKaBhSjCAh?_!mN_9Px}+O zFY|&4*0QOW83D3ie}|K=VURrFeOAOKPp--N2Z1jg&(WN!XY|$^2bESH7Hcz=yb?m=R)ZOkbzB&jZ;myMoxr}PTdCz`qMdB0^iaED=~QQ^)12nx%Q&$9(`r0n z>IemHpaA~=>Z6-e?-J%7>VhQ+IX_oU6z(T5g5sti3w^jZvn2vw2lM9VG*3w%Ev7a0X@f0X70%x`p5m|R)|A3)CyV;YO9Hj<9Z zWbcRUdgvleJ^#@nYb`39xQ3zR^;CA<{8Y*_p#S+{GzaIiI9+)*zYuZz%tue>{Mp~` zxARhEW&@u zxNBE?#kzvFj^xa6W=@;qnBUKD@3p@cL*G@@G=3?^{%JdVar(^uWJx_}FXxkrWA}RN zWQ53Vp%>LiRvXizJ~GTN?&v7ZlWWur#e8IPI-EdowBSfhKpnSGtyKM*%@zJj|59A~ zC%Bxt2eNPgE2lypqs-

))uNIgkUz4SeV%*w)t7;E=L9;#MYzrRnDf9*ERS#xuA{ z(Mv)oh(<3y_-{Os3A5BdUl|?yC);ju?7i5Iua2(y8KZ}qB)jaxnrD0ovHLA zDzSF)7QM(<3tOKEz=ZLI=Mw+fRBM&8OQm0*HO`5qWPOu=3sG5^{|n5SyZ{rUb2Lha zeTCTyFQrEuR}ry-NrqSug4nRpIE7d}12*Kb@zt0r$R-=d59l(YU-XlvA0c#EL3+2m zy6X?d7>UE`$s5?iy%3Y0emqmHHX#ghA3wU%463yGnKvp zhL?yjKGbHApOIMW>yVwIyn;j|@EBRy3Dlsi!rRtgYiJ;MBU>32XZBpYW<%lKZX&sG z<`QlL*@ZoPgL~OZ-Up>l{6{1eg2bx3HH9>K`s&^c2x^WhSntG2AwY`QQNa09HNx9wp3X32dfc$l$M-K5N7yR#SB7l#=4?M!k56HHe0>CklLZxv z+knoC)Y~-s%#JByNtzv^ETZWTIF>)J?I<&R{vIl+`@F&zW8w^a$_1-$HPdy?#66xD z7X5~q$@A?koJer+j=qGq6X^`WX#l)Ki$gnPIcftK1m%s;X8hdHg&9~rs!Mp?3W@68 zZRfxo4~Q2}ve6;o8(-a=Sm9_3{~TW@_NTv9se$L8RexAH&J~tPeaGu`YX~XL9YV|W z3HE^UX+g~AjhjH^=6({;T;)0Zy`f3}f%*kS9w*q4%GPlpXmhnPvVRTnIqH|K+2j7g zUi8l2^{N|V&qv0Sd-$~Qt4e$PbPJ%720FrZQ0>1p4ObUiA=G?KY*4^iE^SAgxsSE2lsi z0u+7MiLE{iT2?U=j>pzkWqiHz###UNm*GOxrZFV1+}>-K6OXu0@Hbqb@N6AnN?rd- z)p_oML=rHS$4`Tq)2n(N$Y8g`tu?Gv-4K4tVGlZz2-A!(-miGi?4T z&!z|X8fz6800T=+Vag0R+a@}$v6Lm|BdPS@&^U&7RO%zcK9o?UiQHVbzH$8LJo8d= z;hNKC+dX&!9T?xpE(2xSr}z%NDydQYBCoMBhixKlq-@$GHaxtIM;FE|MzKSWr3jF$%eBj;xMXc8}YGJb}^}I zn_5*pFkjLzmcWJM(T5na_>!VMIhn4U|;zdE@?M?yBt63GI|bm653*#CL!FE7;vWV$$C}`DtbsQeEFvl&d=bRi!4o1TCCL8QD7-9z@pY|U#Qx;wagw1 z)Sa$<{)JvAW9>kS!}kM`PS!)8CYP`12<_ajhaNcB6O)b)^i+|Y;s5QjZ43Y62Kmqb z%NchqlB!eU9S<(v1ROf8uuCxgYyxZ_H3HvK?5q%rp z&c@Oa@)TQ$qIP(rl84EsaBSOKG{=m6#S&M**l$R`;udYw^nG~f#FXWg)Z}%8^>lxe{Q0{{3nymu(+y;^)B=Jw4}Sjc*}@AC9$IoqY9- z#YeFu2J!#nDB^`MYv1GFnrrmj&*pU-dg~ubO^l9?0oJ=pdd=W*q+8leyVi{JH=4Qc z8$t%z&A8(T@Y=SF`1;+wC8{#qFY(paV!cVO)6Zwr}#su{^LJlblmhr{gC0?VPU{>1Zw) z+*$hzRuxU-2T$FM9>xAqz`V~tzL^{_u3lg%zu@1-n-fmv1&7dOpwPigf{D=lm+N?w zaG5xi81MsO?mbIuaYzrW#S&{MF;c8ZB>b;e$aF-nGRSiZzD`5QOoRUw(wiGYBBtspP0`D6{BUQKkgr*-bG&*jM= zx@*cwB2-Ph+JD;ES?epzQ5Ljvfv{VET`aBHy&$fC z2nfPnf=@2Q8fHQ51#xnA7xWSV{KXJ_^8dgb0P4ReFh>!9zKS}vlnd0FnwOoEof9C6 zNli^H32 zt89`QB1TDhFr7Lf$|#aO9i9`Oq`3JI8U~)F1ReTw0Lk+xHYa_A=X5BV;20egbh?xg zq!*z;UJ%k29^+utkyoovao$f2o%_f2Qms#Atq0dR=_0DTu!Tx@Rlk#qoCUo-)89Yp zCslLYt@bb<2Pf_*^xpf9JZ!oM{ABIAOv$D?YH0u(Cn3nNjJYP{6?sl*?WMX|AGa%+ z>_1Kfe171mS@Tg$P)&L{;65qx)6yUa`*H--n)M+Yg9m?P}-&0IpMvwl??Q@M&JT{D!p4RllkK%>B1n{(sST@w}d`9&*o1%?lO&qi@_aNq!^wCojcqyctz4ry?#zaBzEy-iI@X}e7$Y`XD$lx) zc2w7SnToAgksg0=EfkR_;4U@aH+K%%<$CYnkRU1tFua?-+Nc^Xf|#3Vkb&n{G~g8(0A+GOkVBL;Y0e#T)+5e@POc~qk^2#WkZ1b6;pXt=rL8? zYAP_J&1e}o_O%>)e`AiP_`IQlp;Nmb zfmy4?6uP`KGCM5j-eRjH*Tpe|LPR{F+aGM&pWEL1H8^>KRU8aTTb>t$A4p$c3F`y7#Va1lnYT)(znGhJpdpMLTN;Kf$7 zZ`mO}K*j$%gew1xQOi?z#R5;j9YdrBh-;$jL_6itEoq%_M#R{MV4_$6Oy;ciZht{= zds$&dh4^hc#voZSK5S>)K+%N+a$C3(Xmtv+a;P%3P(N{`DfA51cAZq)GcE4quSDjS zF4DW(Z0R3PISzMUXj=8PI*tYgh)~fq`NH|3&OAA{U^jt}bdvoX6^pP0=E?>8x%}Vy zox<1tS-Z>0o3dZf&e_;|3ky~ov^Tp}0>)IFA0`izZ+qHPIJX?*SI^+_a?9I3husfT z#J~F=xTobpn()eBNkOR3O4$7#>z2E?+sQCr=r;8uS$5K{;AxPURWP)`&(8`_Hun}eu%nq zxns5LQcph{&~#sGEQf(b+&`5vlQ0lh$uk^(S)=eoHDM$uE70MBO0O6u%D9f|vj^oN z8x>|h?ssT?yk=xO0n7R-SaRY+ne8_@O~Gp%JgTqRAPMADI^;olSsf&9FJqve3v(68 zv2PA=#ZBr`$p~rkxZwbK{Piv3-g5JVub{##MMlmEtYj=DzXPMNMABojZBA z!t@JXCxzo+v=BC&O?@ti=ul%7k7HDiCDW;{J;>ofo=fy3o-HaUaeQD{Y&?p7~CJNvD2C4JRUMJc0_DCjA-@3o{W) zXM9p`>lxQXj~{cA>c6#-RJULfZ*{=Pl|92@YvXi5FlL!AZd@+$7{l`Uh zLtozwsEjLy<{mpw1KAq7F4MTL%8n@4^suMY7E5>K>-|;gg__i!Lnt@g*0D*jLpza2 zapb+Q{H3H+Ob4ka(fX1RN{2GnRyUk}N@`re&U>C^F{tKEkvR)LxeiMYM=w#l_;ZFj z@IZJGnR!r!<``!M-B-pzIb=_fy#qy*0%sj6#GVS|g~8SGB8RKUQFXSjG-xKp^D{*P zF!CIB;~0Ka?1)x&PAyG416w7b4g3ZnZP(KtUBMtt>nSGR3wFd-$wtFUR*SZZjA(k& z!{&zyqorjnY>@eC;%Y3!ioKC{ie(FEfGY0LDEraj%Y1veT&sN%^PAAJ)RNV>T;Jre zv;eXx#5#JXP4?=qq*-!$EQ241l5z|)+40joXEuXM*vFk)5@J zi_$Krk5-tYJI|)K`h&r9cqHM8A*`RmqpV+2qDZ;u_0M>C$`<5Qm+7u6GdJ>@pOw9) z6o0L^X>i$zSmSY}Gn}VUbVl6LI39cdx!xL9FHU}Bs`LSZMF-wV(=g4wjU}OmzB<-v z>!1|P8Bv!;qJgU@`1qgck7ZjWD@7g{?x>?x;eulL>+rLub@t1A<|*z>MASI#bsFHB#QG7icD92=M$Vi0NW9f zd(WfESbC@WcoYrD)A)TGA`uwrxQm$wcGCh|TN`^T+|{dZS&(L=C<`}Bb+f`K_`d;^s`SmPgO66gQBqhzX%f|_ zge2#_&=g4PC;L^ajs_wNg7Og6=~i`XPDA;EWnU_MH4Cx$8I(NG_IC`H{y{BM#8>e; zXYsVRyNq3@&s`oF3SPHod!Znr=SmUF%@)j`8@YeF&75re!P92KGS^$x3Jlbkx)gwf zbhR+Wa2MMAXdnvuj>|AIJkYFlj1L!}l`TT6(>`JuxjRMbCRm!ymS87wY%Hc%Xp&I6 z`vx6N4x*DPfzikS71O*8EmKGjc03h5Pv`x*GA7iim}0zh(%bC$>gBz+;&Eu2WaJkn zvxPhe3ha$^zSdZ&LE}5OLf0V9Y%6A^?|5RZYb*bnvq7M}q zn7ZMOeTdpEdED5b`1E_;Z!Jxny!H8UdD2RW^e3?IHjX~;1mE+ncVU~_USy^z{YDbyGolJMTzyb-Yyw_+^*akXvcCK5rgD@q7k^iH#!VeI$8emKTJ#ISFeE~qMs zXOt^zts@nSZ+Tlz8aR(PpQs1t;Z~B;DmPUf0I(6a*Hf$Ef^w|66y%c*x3)$`-V+f# z0D%(L5WbwrmC*RNBeU!8dTz*)@a~cY%RUl^lJCKGAfx=Al{zrG@(roS=gqh6(|U4+ zmR2d_YQ;{ysSZx8ixGI1+0R7-soMz?_2|&?TE}0M@;xh71%DfQIo9M1uo~cW;LvQ- zlwFVx<+9f_msKEq4GApfP17bbR0@i8jI$5L=dwjrh3kIrfh5{nyUC}6F02!EepYEU zsG3ZVb7m2}pUqv%5~abwQnKuIj`J28bD{@|yzzwb*t)h<5$yoT!6H@W#K$85HVu(}jsf#RiE%K@p@ zGJ<2yYjO;#3KJ}i@k>TU{u7_N=|=`oyqVoiZ9&3);?UZhM( z!4^(`f6AcirG(S;!txn?gKOAPi};qT$;qZ&b6s>8{X^ZqhUj658pN*f;WEtF$NfybDtkqnH zD(};ox1!~+GmhXmJ5ZdtA{~!ro|N?qw)#cmfp|r){z-9TK%zlgb1GUQI$C|#AR ztl`3H)I?o{OMD+|$IEHk&A}V;h9*PNTO-S4x%(@zBD%otNHXz{F_!&pl*|Lpvz54N zhiaAZ#;CF$K(4&B#bLy2!{IsT^T6PZd3!j%<7rkPS5L-ORi*zR9;!d(&%33s8oL3U zVJB#C1ld`Ac!rEiaKtp1={t+YdK3xaZMp(R`2;3C40rpDcD;+%M~V@xP_~g7ARomsR$AR26hd4Q#xJYHgS+(s;vJRwB33$qc4}Jt zIxqs051cPGXn7_$R-%XKp!^on&`}y2TkGUa0x{JCjuSeN<&g-k(#8^cL>{QNSMAg* z7dvadx^sJ5d{U&el&E~8Kh>c**e>q+e&wo!y)6e%jkc^%KZ;r2bB=^XWV|X{tr_O- zn6(*}OA;3UP2mRW|HFoHXyl~~1=7ok4)NW!bbcL&c~m)cqQblh6OTmueu@Zn9;?h) zg0?VY;C?=N%M%lacBUefw4;qn2)3@) z{-{@d(rUY^k@<=iUH;&@1EY)?4Rq4`5V?A1@a}%v@l)|{SaWjG>_D%ue6AppG7pYi z+m7w;hmD_|o2v;bd>%E7)r_uzP|zEy)%R`dolQp06i3VC3TJ7xsarXbjesd=-f5he z2KLL*x8J5~QVQm&09V=%vOR`rcytf8$$bMxm&>7inB&O&DknJF(n9*d$vK*Qq<24Z z^6r0JUQF~?zHQ`MZ>#tEb)8yHfO_qwC}xfBm1XYd9aAj~tU4{W+MHR$PkDXaq;7<& z!r26-`;r&OFtT%$!y)o};iQ}V@VXfJCw%#MVZ9ml2FmI4f%;nY>-}fQOnKFAM5sJR zNNd8ld^_Tl+w(DKG6`0>cMhJiLzxy=;#QJTKj{~BLn@Lbq$D!-e8MFaGo^_O(i2=# z1HwDI*l9MWf^KOQR;#IJoQuOnizD9$)S55%9VY?~Q$1L_ZmHOPcHw#YzQ4?V`#TOQ zeFaXA1&7lo9rM)+R@bedHSl9m^31XXpR3nVGLWcz*ome24o)`Pk~b59F8(}f9#F#W zfB5~Nf8HMcEW6tW1zU*=eGoU9`0ak_vm_bHinbB! zkZ414=@7iRme2yW`LbcxaBmz~Gk_#}Z;N)*_0G%}!`ADa0z8dp!%VI{ySg(a;Cox% zb~$!5)di)|1rl2k$oLoOZ;MRif{VQ3J|-ol-+ZT?I41!$-EWZ3cV;^7p=W2dvOB(} z&C^UkS*>>64&D3g4!y{k%51#GT&yb+3ded+3o8LK!L*ZKlx?s3WI1_M*{n=b&Qm^p zaUeaEIW))8a54-gjkwyB*^di-`qaWx7ga+vD07Otb^BmvcW-T%`Nh#|CKd3AY2wm) z7+!KES8COvBD7j9drB2H{f541-z#}h482r<(G*kQYdQBw{~Lc%Lj{i=DbiL-UR}wm z3r;eK`MNn>)p`SY36oQ7qzE8s;e48KuDvE4Ja0@ion9W&y6Y?3*OKFk>C!ZbEF4Nk zDwwsmHd1~!bO;FMW94;?SL*YzUFoS{xKKT6u_Rl{_~kwAfwL>2CoG=eOzxvTb+{Sz z<;a2ngx<0i@+I0qq)*3;Jh*c%z*BP!zGHDPItFh)#9-Grq}c#ex3*|IdA*_SJiLe} zJb|?14vI34k@e|rJJy-z^BLWr9;NK?^0Aa+umc;z2|vcQZAT6)a3{W@ZwG}kB6FNg zXOQFQBGG8B{JMRHJpV11i|xc2bAH0ne@atd0WF@O<=lg-k*n_Va1YxURcj~+sf8CV zs{l%o#gx^|L2t-=-5p-%xr6#{_$_-+TNzrLI3gU7aUiJ1c23&tNmgS2)y5y8jrV5i zLce;p5MP`slD^mnmW-!7wMpIPT5QfHVZ}SZj`5I|kYFC6w`ThQckXbCWuVC!)_8h0LK9X^xP|#;X$zP<#nB3J=51 zAD?V}o0e+X3Jfz=qu>+-+D*_?yP4DmOx?b|0K84wLeq4rN|dP!!@2i}!y)18`e3Yu z1IW;3$Y53>#B8^$))edIIbIl9*UqeO4CM)CMfo<8FL0J1kR7G91++rp7>SSozE@4i zMn4Mbpt##+J}<6_x;?{;KY5w`;ke3ub^AbJD)PLjUu2Qresj`|b6^bv@r8yIdKI?3 z41B1~Q1`8ggKv(XR)q}6`EIn@uV?p_-dmmscPULcFgi>P{S{@qo9*89zL%AUtKosL zkutWBqpw(Q?5k%!QV4N_qXN~!BYAV1U`=gCz@h)_5~bSk?@FLt9)P6x4k=7pAEv;! zg;w8Z)W$fFQ%J2`PwmmWu$4{7?~2pcPXS#F7xLgI{c-LLtw(Z}H-TrjYKK5999DUq9UKq6;MLf$CYod(tPn+V{ECIyZtIZmu@|P@$(tG$XyAWiO z$^S~NVf@D3SU;!8PuSwqlw<5IbTwcRUoaWh`X#ra%wHx{Yvj$=Gvk*i!Jeh$_iji8 zyH^YAzh3CK9?+THa8z~O%RL-)cN80ie|HH*qsRwTcg5-&F$Jr!ut@7)%`xgFbd9Ou zbRIA^$VPqQ!x7dMOlQkc0;KIS3amOVuL)k)rN}K0zfe)o*XtY@c`HgpC0az4Nu_;b zF-h*1I>=C-#NMI{Qd?m@kqgH*!H4Fq6U9$#5l+mVV6k?1!3RLv9%5#o_*KJaG^H~s z%@|giTftl9O0Djo7CVye9tQU%^UhNHcX9x$-A&W8Z+srwH7Kt;C@`bqLG!1)oUl1= zHtHmGwvU})kj*Yp^;wIHKjO8ucw)3~^!w@ur>QtAPf*%w{@u3QtutF-%e>sLSu3fZ%R4~} zqP&-WS#$nlr9xHLQ@&2$DdEDgWkccqB)Ufh2Be1NVb_Whn-Lx=xJ^2K*$q0}i8|%2 zOGsC;o#9jy=0(Nt7Ks@8`F;pzPc)1~w(1W>m1)GkiE9gu9TQ)knv1!&m^j%_##`pZ zKj?^2~XBTBosUlPr@GImOHJnmCk!DkKA50<) ze@J-tXg+%hXzb=pTRZt#=q_98I(D_CWB)BV}+amz8aNbLo^tsgePD8+*hpdy$OYp7n7LW5{gp9Sy<~+ZPOY zZmuRVxY&{Y7ik1c`}+_V)c}}1sB{oLqV@f+0$E$HkcAoer%wV9MP=n@!P~2tlZXVS zoe?b8GF0$&Flh12%4~^ zKB(hKE4<*mIltVs^ggG1b~H0?N+ zmEe{A2kJHcj(hLc+Qt!s+S`n7lCGOFDx5iTS^ugQ<=uMlPoUW1=xWrYZ5>iX*eL|b z%Lx;A)v0>(9mIMbDJN42)R0c7pcD{t?OTbx&VtVY3?w+SR`UOROKj?Ki=EpTNL4B6 z{UB(I;9@DE8B1o`=E3$b%vxPIblwtlN&EfkUDX5;P}Z*b7edg`JnjP0d4CZfuNOA2 zJ+ckc^PW2iD!^U-VVBmG|CEQX@`=z4(r?_O4mw|eJ{unCAs7hB!ng{<6uvF|* diff --git a/api/core/model_runtime/model_providers/mistralai/llm/_position.yaml b/api/core/model_runtime/model_providers/mistralai/llm/_position.yaml deleted file mode 100644 index bdb06b7fff6376..00000000000000 --- a/api/core/model_runtime/model_providers/mistralai/llm/_position.yaml +++ /dev/null @@ -1,11 +0,0 @@ -- pixtral-12b-2409 -- codestral-latest -- mistral-embed -- open-mistral-nemo -- open-codestral-mamba -- open-mistral-7b -- open-mixtral-8x7b -- open-mixtral-8x22b -- mistral-small-latest -- mistral-medium-latest -- mistral-large-latest diff --git a/api/core/model_runtime/model_providers/mistralai/llm/codestral-latest.yaml b/api/core/model_runtime/model_providers/mistralai/llm/codestral-latest.yaml deleted file mode 100644 index 5f1260233fe97b..00000000000000 --- a/api/core/model_runtime/model_providers/mistralai/llm/codestral-latest.yaml +++ /dev/null @@ -1,51 +0,0 @@ -model: codestral-latest -label: - zh_Hans: codestral-latest - en_US: codestral-latest -model_type: llm -features: - - agent-thought -model_properties: - mode: chat - context_size: 32000 -parameter_rules: - - name: temperature - use_template: temperature - default: 0.7 - min: 0 - max: 1 - - name: top_p - use_template: top_p - default: 1 - min: 0 - max: 1 - - name: max_tokens - use_template: max_tokens - default: 1024 - min: 1 - max: 4096 - - name: safe_prompt - default: false - type: boolean - help: - en_US: Whether to inject a safety prompt before all conversations. - zh_Hans: 是否开启提示词审查 - label: - en_US: SafePrompt - zh_Hans: 提示词审查 - - name: random_seed - type: int - help: - en_US: The seed to use for random sampling. If set, different calls will generate deterministic results. - zh_Hans: 当开启随机数种子以后,你可以通过指定一个固定的种子来使得回答结果更加稳定 - label: - en_US: RandomSeed - zh_Hans: 随机数种子 - default: 0 - min: 0 - max: 2147483647 -pricing: - input: '0.008' - output: '0.024' - unit: '0.001' - currency: USD diff --git a/api/core/model_runtime/model_providers/mistralai/llm/llm.py b/api/core/model_runtime/model_providers/mistralai/llm/llm.py deleted file mode 100644 index da60bd7661d597..00000000000000 --- a/api/core/model_runtime/model_providers/mistralai/llm/llm.py +++ /dev/null @@ -1,36 +0,0 @@ -from collections.abc import Generator -from typing import Optional, Union - -from core.model_runtime.entities.llm_entities import LLMResult -from core.model_runtime.entities.message_entities import PromptMessage, PromptMessageTool -from core.model_runtime.model_providers.openai_api_compatible.llm.llm import OAIAPICompatLargeLanguageModel - - -class MistralAILargeLanguageModel(OAIAPICompatLargeLanguageModel): - def _invoke( - self, - model: str, - credentials: dict, - prompt_messages: list[PromptMessage], - model_parameters: dict, - tools: Optional[list[PromptMessageTool]] = None, - stop: Optional[list[str]] = None, - stream: bool = True, - user: Optional[str] = None, - ) -> Union[LLMResult, Generator]: - self._add_custom_parameters(credentials) - - # mistral dose not support user/stop arguments - stop = [] - user = None - - return super()._invoke(model, credentials, prompt_messages, model_parameters, tools, stop, stream, user) - - def validate_credentials(self, model: str, credentials: dict) -> None: - self._add_custom_parameters(credentials) - super().validate_credentials(model, credentials) - - @staticmethod - def _add_custom_parameters(credentials: dict) -> None: - credentials["mode"] = "chat" - credentials["endpoint_url"] = "https://api.mistral.ai/v1" diff --git a/api/core/model_runtime/model_providers/mistralai/llm/mistral-embed.yaml b/api/core/model_runtime/model_providers/mistralai/llm/mistral-embed.yaml deleted file mode 100644 index d759103d08a944..00000000000000 --- a/api/core/model_runtime/model_providers/mistralai/llm/mistral-embed.yaml +++ /dev/null @@ -1,51 +0,0 @@ -model: mistral-embed -label: - zh_Hans: mistral-embed - en_US: mistral-embed -model_type: llm -features: - - agent-thought -model_properties: - mode: chat - context_size: 8192 -parameter_rules: - - name: temperature - use_template: temperature - default: 0.7 - min: 0 - max: 1 - - name: top_p - use_template: top_p - default: 1 - min: 0 - max: 1 - - name: max_tokens - use_template: max_tokens - default: 1024 - min: 1 - max: 1024 - - name: safe_prompt - default: false - type: boolean - help: - en_US: Whether to inject a safety prompt before all conversations. - zh_Hans: 是否开启提示词审查 - label: - en_US: SafePrompt - zh_Hans: 提示词审查 - - name: random_seed - type: int - help: - en_US: The seed to use for random sampling. If set, different calls will generate deterministic results. - zh_Hans: 当开启随机数种子以后,你可以通过指定一个固定的种子来使得回答结果更加稳定 - label: - en_US: RandomSeed - zh_Hans: 随机数种子 - default: 0 - min: 0 - max: 2147483647 -pricing: - input: '0.008' - output: '0.024' - unit: '0.001' - currency: USD diff --git a/api/core/model_runtime/model_providers/mistralai/llm/mistral-large-latest.yaml b/api/core/model_runtime/model_providers/mistralai/llm/mistral-large-latest.yaml deleted file mode 100644 index a0d07a2bf801bf..00000000000000 --- a/api/core/model_runtime/model_providers/mistralai/llm/mistral-large-latest.yaml +++ /dev/null @@ -1,51 +0,0 @@ -model: mistral-large-latest -label: - zh_Hans: mistral-large-latest - en_US: mistral-large-latest -model_type: llm -features: - - agent-thought -model_properties: - mode: chat - context_size: 32000 -parameter_rules: - - name: temperature - use_template: temperature - default: 0.7 - min: 0 - max: 1 - - name: top_p - use_template: top_p - default: 1 - min: 0 - max: 1 - - name: max_tokens - use_template: max_tokens - default: 1024 - min: 1 - max: 8000 - - name: safe_prompt - default: false - type: boolean - help: - en_US: Whether to inject a safety prompt before all conversations. - zh_Hans: 是否开启提示词审查 - label: - en_US: SafePrompt - zh_Hans: 提示词审查 - - name: random_seed - type: int - help: - en_US: The seed to use for random sampling. If set, different calls will generate deterministic results. - zh_Hans: 当开启随机数种子以后,你可以通过指定一个固定的种子来使得回答结果更加稳定 - label: - en_US: RandomSeed - zh_Hans: 随机数种子 - default: 0 - min: 0 - max: 2147483647 -pricing: - input: '0.008' - output: '0.024' - unit: '0.001' - currency: USD diff --git a/api/core/model_runtime/model_providers/mistralai/llm/mistral-medium-latest.yaml b/api/core/model_runtime/model_providers/mistralai/llm/mistral-medium-latest.yaml deleted file mode 100644 index 7c7440894c0c9c..00000000000000 --- a/api/core/model_runtime/model_providers/mistralai/llm/mistral-medium-latest.yaml +++ /dev/null @@ -1,51 +0,0 @@ -model: mistral-medium-latest -label: - zh_Hans: mistral-medium-latest - en_US: mistral-medium-latest -model_type: llm -features: - - agent-thought -model_properties: - mode: chat - context_size: 32000 -parameter_rules: - - name: temperature - use_template: temperature - default: 0.7 - min: 0 - max: 1 - - name: top_p - use_template: top_p - default: 1 - min: 0 - max: 1 - - name: max_tokens - use_template: max_tokens - default: 1024 - min: 1 - max: 8000 - - name: safe_prompt - default: false - type: boolean - help: - en_US: Whether to inject a safety prompt before all conversations. - zh_Hans: 是否开启提示词审查 - label: - en_US: SafePrompt - zh_Hans: 提示词审查 - - name: random_seed - type: int - help: - en_US: The seed to use for random sampling. If set, different calls will generate deterministic results. - zh_Hans: 当开启随机数种子以后,你可以通过指定一个固定的种子来使得回答结果更加稳定 - label: - en_US: RandomSeed - zh_Hans: 随机数种子 - default: 0 - min: 0 - max: 2147483647 -pricing: - input: '0.0027' - output: '0.0081' - unit: '0.001' - currency: USD diff --git a/api/core/model_runtime/model_providers/mistralai/llm/mistral-small-latest.yaml b/api/core/model_runtime/model_providers/mistralai/llm/mistral-small-latest.yaml deleted file mode 100644 index 865e6102262196..00000000000000 --- a/api/core/model_runtime/model_providers/mistralai/llm/mistral-small-latest.yaml +++ /dev/null @@ -1,51 +0,0 @@ -model: mistral-small-latest -label: - zh_Hans: mistral-small-latest - en_US: mistral-small-latest -model_type: llm -features: - - agent-thought -model_properties: - mode: chat - context_size: 32000 -parameter_rules: - - name: temperature - use_template: temperature - default: 0.7 - min: 0 - max: 1 - - name: top_p - use_template: top_p - default: 1 - min: 0 - max: 1 - - name: max_tokens - use_template: max_tokens - default: 1024 - min: 1 - max: 8000 - - name: safe_prompt - default: false - type: boolean - help: - en_US: Whether to inject a safety prompt before all conversations. - zh_Hans: 是否开启提示词审查 - label: - en_US: SafePrompt - zh_Hans: 提示词审查 - - name: random_seed - type: int - help: - en_US: The seed to use for random sampling. If set, different calls will generate deterministic results. - zh_Hans: 当开启随机数种子以后,你可以通过指定一个固定的种子来使得回答结果更加稳定 - label: - en_US: RandomSeed - zh_Hans: 随机数种子 - default: 0 - min: 0 - max: 2147483647 -pricing: - input: '0.002' - output: '0.006' - unit: '0.001' - currency: USD diff --git a/api/core/model_runtime/model_providers/mistralai/llm/open-codestral-mamba.yaml b/api/core/model_runtime/model_providers/mistralai/llm/open-codestral-mamba.yaml deleted file mode 100644 index d7ffb9ea020848..00000000000000 --- a/api/core/model_runtime/model_providers/mistralai/llm/open-codestral-mamba.yaml +++ /dev/null @@ -1,51 +0,0 @@ -model: open-codestral-mamba -label: - zh_Hans: open-codestral-mamba - en_US: open-codestral-mamba -model_type: llm -features: - - agent-thought -model_properties: - mode: chat - context_size: 256000 -parameter_rules: - - name: temperature - use_template: temperature - default: 0.7 - min: 0 - max: 1 - - name: top_p - use_template: top_p - default: 1 - min: 0 - max: 1 - - name: max_tokens - use_template: max_tokens - default: 1024 - min: 1 - max: 16384 - - name: safe_prompt - default: false - type: boolean - help: - en_US: Whether to inject a safety prompt before all conversations. - zh_Hans: 是否开启提示词审查 - label: - en_US: SafePrompt - zh_Hans: 提示词审查 - - name: random_seed - type: int - help: - en_US: The seed to use for random sampling. If set, different calls will generate deterministic results. - zh_Hans: 当开启随机数种子以后,你可以通过指定一个固定的种子来使得回答结果更加稳定 - label: - en_US: RandomSeed - zh_Hans: 随机数种子 - default: 0 - min: 0 - max: 2147483647 -pricing: - input: '0.008' - output: '0.024' - unit: '0.001' - currency: USD diff --git a/api/core/model_runtime/model_providers/mistralai/llm/open-mistral-7b.yaml b/api/core/model_runtime/model_providers/mistralai/llm/open-mistral-7b.yaml deleted file mode 100644 index ac2922695999c0..00000000000000 --- a/api/core/model_runtime/model_providers/mistralai/llm/open-mistral-7b.yaml +++ /dev/null @@ -1,51 +0,0 @@ -model: open-mistral-7b -label: - zh_Hans: open-mistral-7b - en_US: open-mistral-7b -model_type: llm -features: - - agent-thought -model_properties: - mode: chat - context_size: 8000 -parameter_rules: - - name: temperature - use_template: temperature - default: 0.7 - min: 0 - max: 1 - - name: top_p - use_template: top_p - default: 1 - min: 0 - max: 1 - - name: max_tokens - use_template: max_tokens - default: 1024 - min: 1 - max: 2048 - - name: safe_prompt - default: false - type: boolean - help: - en_US: Whether to inject a safety prompt before all conversations. - zh_Hans: 是否开启提示词审查 - label: - en_US: SafePrompt - zh_Hans: 提示词审查 - - name: random_seed - type: int - help: - en_US: The seed to use for random sampling. If set, different calls will generate deterministic results. - zh_Hans: 当开启随机数种子以后,你可以通过指定一个固定的种子来使得回答结果更加稳定 - label: - en_US: RandomSeed - zh_Hans: 随机数种子 - default: 0 - min: 0 - max: 2147483647 -pricing: - input: '0.00025' - output: '0.00025' - unit: '0.001' - currency: USD diff --git a/api/core/model_runtime/model_providers/mistralai/llm/open-mistral-nemo.yaml b/api/core/model_runtime/model_providers/mistralai/llm/open-mistral-nemo.yaml deleted file mode 100644 index dcda4fbce7e82c..00000000000000 --- a/api/core/model_runtime/model_providers/mistralai/llm/open-mistral-nemo.yaml +++ /dev/null @@ -1,51 +0,0 @@ -model: open-mistral-nemo -label: - zh_Hans: open-mistral-nemo - en_US: open-mistral-nemo -model_type: llm -features: - - agent-thought -model_properties: - mode: chat - context_size: 128000 -parameter_rules: - - name: temperature - use_template: temperature - default: 0.7 - min: 0 - max: 1 - - name: top_p - use_template: top_p - default: 1 - min: 0 - max: 1 - - name: max_tokens - use_template: max_tokens - default: 1024 - min: 1 - max: 8192 - - name: safe_prompt - default: false - type: boolean - help: - en_US: Whether to inject a safety prompt before all conversations. - zh_Hans: 是否开启提示词审查 - label: - en_US: SafePrompt - zh_Hans: 提示词审查 - - name: random_seed - type: int - help: - en_US: The seed to use for random sampling. If set, different calls will generate deterministic results. - zh_Hans: 当开启随机数种子以后,你可以通过指定一个固定的种子来使得回答结果更加稳定 - label: - en_US: RandomSeed - zh_Hans: 随机数种子 - default: 0 - min: 0 - max: 2147483647 -pricing: - input: '0.008' - output: '0.024' - unit: '0.001' - currency: USD diff --git a/api/core/model_runtime/model_providers/mistralai/llm/open-mixtral-8x22b.yaml b/api/core/model_runtime/model_providers/mistralai/llm/open-mixtral-8x22b.yaml deleted file mode 100644 index 325fafd497300e..00000000000000 --- a/api/core/model_runtime/model_providers/mistralai/llm/open-mixtral-8x22b.yaml +++ /dev/null @@ -1,51 +0,0 @@ -model: open-mixtral-8x22b -label: - zh_Hans: open-mixtral-8x22b - en_US: open-mixtral-8x22b -model_type: llm -features: - - agent-thought -model_properties: - mode: chat - context_size: 64000 -parameter_rules: - - name: temperature - use_template: temperature - default: 0.7 - min: 0 - max: 1 - - name: top_p - use_template: top_p - default: 1 - min: 0 - max: 1 - - name: max_tokens - use_template: max_tokens - default: 1024 - min: 1 - max: 8000 - - name: safe_prompt - default: false - type: boolean - help: - en_US: Whether to inject a safety prompt before all conversations. - zh_Hans: 是否开启提示词审查 - label: - en_US: SafePrompt - zh_Hans: 提示词审查 - - name: random_seed - type: int - help: - en_US: The seed to use for random sampling. If set, different calls will generate deterministic results. - zh_Hans: 当开启随机数种子以后,你可以通过指定一个固定的种子来使得回答结果更加稳定 - label: - en_US: RandomSeed - zh_Hans: 随机数种子 - default: 0 - min: 0 - max: 2147483647 -pricing: - input: '0.002' - output: '0.006' - unit: '0.001' - currency: USD diff --git a/api/core/model_runtime/model_providers/mistralai/llm/open-mixtral-8x7b.yaml b/api/core/model_runtime/model_providers/mistralai/llm/open-mixtral-8x7b.yaml deleted file mode 100644 index d217e5e7e98c42..00000000000000 --- a/api/core/model_runtime/model_providers/mistralai/llm/open-mixtral-8x7b.yaml +++ /dev/null @@ -1,51 +0,0 @@ -model: open-mixtral-8x7b -label: - zh_Hans: open-mixtral-8x7b - en_US: open-mixtral-8x7b -model_type: llm -features: - - agent-thought -model_properties: - mode: chat - context_size: 32000 -parameter_rules: - - name: temperature - use_template: temperature - default: 0.7 - min: 0 - max: 1 - - name: top_p - use_template: top_p - default: 1 - min: 0 - max: 1 - - name: max_tokens - use_template: max_tokens - default: 1024 - min: 1 - max: 8000 - - name: safe_prompt - default: false - type: boolean - help: - en_US: Whether to inject a safety prompt before all conversations. - zh_Hans: 是否开启提示词审查 - label: - en_US: SafePrompt - zh_Hans: 提示词审查 - - name: random_seed - type: int - help: - en_US: The seed to use for random sampling. If set, different calls will generate deterministic results. - zh_Hans: 当开启随机数种子以后,你可以通过指定一个固定的种子来使得回答结果更加稳定 - label: - en_US: RandomSeed - zh_Hans: 随机数种子 - default: 0 - min: 0 - max: 2147483647 -pricing: - input: '0.0007' - output: '0.0007' - unit: '0.001' - currency: USD diff --git a/api/core/model_runtime/model_providers/mistralai/llm/pixtral-12b-2409.yaml b/api/core/model_runtime/model_providers/mistralai/llm/pixtral-12b-2409.yaml deleted file mode 100644 index 0b002b49cac8e0..00000000000000 --- a/api/core/model_runtime/model_providers/mistralai/llm/pixtral-12b-2409.yaml +++ /dev/null @@ -1,51 +0,0 @@ -model: pixtral-12b-2409 -label: - zh_Hans: pixtral-12b-2409 - en_US: pixtral-12b-2409 -model_type: llm -features: - - agent-thought -model_properties: - mode: chat - context_size: 128000 -parameter_rules: - - name: temperature - use_template: temperature - default: 0.7 - min: 0 - max: 1 - - name: top_p - use_template: top_p - default: 1 - min: 0 - max: 1 - - name: max_tokens - use_template: max_tokens - default: 1024 - min: 1 - max: 8192 - - name: safe_prompt - default: false - type: boolean - help: - en_US: Whether to inject a safety prompt before all conversations. - zh_Hans: 是否开启提示词审查 - label: - en_US: SafePrompt - zh_Hans: 提示词审查 - - name: random_seed - type: int - help: - en_US: The seed to use for random sampling. If set, different calls will generate deterministic results. - zh_Hans: 当开启随机数种子以后,你可以通过指定一个固定的种子来使得回答结果更加稳定 - label: - en_US: RandomSeed - zh_Hans: 随机数种子 - default: 0 - min: 0 - max: 2147483647 -pricing: - input: '0.008' - output: '0.024' - unit: '0.001' - currency: USD diff --git a/api/core/model_runtime/model_providers/mistralai/mistralai.py b/api/core/model_runtime/model_providers/mistralai/mistralai.py deleted file mode 100644 index 7f9db8da1c1ddf..00000000000000 --- a/api/core/model_runtime/model_providers/mistralai/mistralai.py +++ /dev/null @@ -1,26 +0,0 @@ -import logging - -from core.model_runtime.entities.model_entities import ModelType -from core.model_runtime.errors.validate import CredentialsValidateFailedError -from core.model_runtime.model_providers.__base.model_provider import ModelProvider - -logger = logging.getLogger(__name__) - - -class MistralAIProvider(ModelProvider): - def validate_provider_credentials(self, credentials: dict) -> None: - """ - Validate provider credentials - if validate failed, raise exception - - :param credentials: provider credentials, credentials form defined in `provider_credential_schema`. - """ - try: - model_instance = self.get_model_instance(ModelType.LLM) - - model_instance.validate_credentials(model="open-mistral-7b", credentials=credentials) - except CredentialsValidateFailedError as ex: - raise ex - except Exception as ex: - logger.exception(f"{self.get_provider_schema().provider} credentials validate failed") - raise ex diff --git a/api/core/model_runtime/model_providers/mistralai/mistralai.yaml b/api/core/model_runtime/model_providers/mistralai/mistralai.yaml deleted file mode 100644 index c9b4226ea6e573..00000000000000 --- a/api/core/model_runtime/model_providers/mistralai/mistralai.yaml +++ /dev/null @@ -1,31 +0,0 @@ -provider: mistralai -label: - en_US: MistralAI -description: - en_US: Models provided by MistralAI, such as open-mistral-7b and mistral-large-latest. - zh_Hans: MistralAI 提供的模型,例如 open-mistral-7b 和 mistral-large-latest。 -icon_small: - en_US: icon_s_en.png -icon_large: - en_US: icon_l_en.png -background: "#FFFFFF" -help: - title: - en_US: Get your API Key from MistralAI - zh_Hans: 从 MistralAI 获取 API Key - url: - en_US: https://console.mistral.ai/api-keys/ -supported_model_types: - - llm -configurate_methods: - - predefined-model -provider_credential_schema: - credential_form_schemas: - - variable: api_key - label: - en_US: API Key - type: secret-input - required: true - placeholder: - zh_Hans: 在此输入您的 API Key - en_US: Enter your API Key diff --git a/api/core/model_runtime/model_providers/mixedbread/__init__.py b/api/core/model_runtime/model_providers/mixedbread/__init__.py deleted file mode 100644 index e69de29bb2d1d6..00000000000000 diff --git a/api/core/model_runtime/model_providers/mixedbread/_assets/icon_l_en.png b/api/core/model_runtime/model_providers/mixedbread/_assets/icon_l_en.png deleted file mode 100644 index 2027611bd5e8b4c7d06a5d00e515a0db70e67a17..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 123637 zcmb5W1yo#Hwlxd{hv4o`;T8z)?pj!I3YXv#+}+*XJxFl(Ai-S{2o{{+@)x(eU%%Jy z8@cZv;}lgKYMisrUNYyJYj47p6{X%H5+FiAK)jWa7FUIUKrn=WfW(7`1)o`(ohSys zK{}~Qi9*y&5*>hl5eI3>m?WJgjm4%hTF$%H=ngTLNTxEE& zpdhNmuL3Wd9BYKMqhC!EmQTJuDhkrxtT|0xWSt5)r8?z25LNF>LqLChQ<2x&&AZNJ z)iz_5#cl;%Fo?<~n`}`WfzBL&+Ortip#m2TTt)Ypm^tJGRVqlNmTA^0NkQ?*7;a*N z3Y%20C6@~utEH@pL-B2IR3vYDPfL)yu}OUxxCrdeRxPHRJ;yCBX|7vzSE|^UsE=To zD$2)YxNWcy;voL7#||CjKH~H)ga1(J{HQWbE4(ZvuAc}@2SA(p&{YI|7vH4PJOZww z_?~e-!NUKm5>z_H*@-#qO`Q&b5R^+rBw7E5+kAvEC}OE5AmLNgvAu`ch{;1)AfwN`SnzDQ5az-vFwv-#E~x?@vjZh!e>o`f|#N&%s5I= zGL&a0#hp{rDQ%F55xK(o#WD!`U*yVzHLBn;Y=3WdGOKVp>=}z2pdzM))SP?s@N^^<-Kbru2dn!rr(}$m5dY$yBSw&ROucI~6&-RJd z@5Zqo9t~d3+c!#v#X$3mZuMYymaaBGvnA#I1ZfrN6wV8@74LV$F;)1h;t>D;dfvs? z=RGnn%e7WvJBT=61$ZM|J>hm%-gGw!VHUBA!X#H4{;_W>g;RCoeZ+#hwl`NpN^ z?z~7zlfZ(uE1Vjw``!NeiutSt?GZU72H8Ije{lr)OfK7~1y7s{^{HiZBAn0~_uTLL?u-BIs~h6l0Ef!hK*lj6Hqv^o%%pJIh) zXAd3u%AVm8eqZJ|lZXGr%hlODYi#S$J0@N4nAOuI_wd0q=}8V?@cTwbEgzpx zbosFMf%)jk-9dSM1ccD(8UjH5*FF8vdd8jbX|Se-kQ(K^a^QS1u6LD3$n{5kZh`CcW;s?Kk6-R~&$R!Znyd0ru>D84U{LwF#UTLtE>CUQL$Z{N-?1Bvv7(j*lsq4nrcHez=UJyL-ic^Z z>9+e&GzmD&x0@pcD$rF&2PgW8ZLLospBQnz|6VEfaH$^}uz8NHAjs*ox$7+?cpHAh z^+`{EObUuh_^%_56uLw)n-`O%Xj8;mD9pCtGzn8>NW3G=7iJeGFYfnyt)ctcw}f0( z6Q~(B)c=}~e;M$9BphL8w6m2N=hmKgl(yX0pInUdYIP8;1&uxVGS@i$d7GB_ceSnc zmIo3VTt&CC$M2&WG6-4I&G~D+uosdN8xd-jd_9(_jU12@{_FC>D+o!cskx9a<$H$7 zhA>Gbg3yqC$+akkzC7PkjUO;N0I{0=-Ef=825(+?A0+|0)#7c`@U;Q8!ESowP^4ma z*2;^YRdYZ2>Hf=6Au;xY{P-i?M~SZ!ShpLtw2@6rHXP$*a^4>NhZ!6*{i1O98j(m@ zrw&4XI{qNY1=5>F4l z*2!WEhw#ZTJBQYq`BcDW3$y9QnR^T#gUmmRN?BW*r=z&ISQF=c4GE6T#)$D9ohKrK zaJV^g6%lrET|^Sj+vQlBl}1Y?S%!1vZO5)W*55*DczoU2TDs`I5a(iOLMT2sea8un zCn)05UPlQ*lksHDEdAp}<@}}DP%M_>y+>kYx+ydr9#8kbSE+Jh{w5`NNP`oRK0kwA zT={{e3;j>-=V->|GIflNnbBv_Cajc>NxG*ZPrgTpeyTVCI0fFT z_{%B%G!d{w;=k92W73W#7&}v;QLsAw{E7iE5C|gf3U%*R(o*(p!yHp87&xIfu++)>%HjJcRdn}_U5T1!`+H_85Iw8WqmM$!t$_^SN8GCU7q zQr=Bp2x~4Kq^`kNPM4R%u2hb6MJ<~`nNR6&hY8QbS9ZKv=vSrp9bo*Usm*P(7_?}?Qo^Sn6eHCN_pi1*%G zy1f5xXN5bZhY^<9lYi$e)wsX)c_fY@rM7Q120{Arc7x$wp78p1`+XlOt2`7{+-K;D zhAPmh@J9cccOI+5c%$X)bBEn}8*W3@Tden^|HyAo+h%k)vizsyjHDRyvW}c|3@8^Q z3fkKw^zJi7GTwo`TbCB62NXDy8pM4p{pO-~@{GkAy^)+%iCYi7Q+|$1LU}6-y0O zXT@;EzQ|);DLc&rpUox?VJcmWjW~+Df^9jmYOU%7DEpakt+k@R)O36wVIz^%QX>0$ zvy7aJ2UPmw*gs}Lz*T*&qUDyAF_>hpZ>OyRcK4|UT_p1nBRGeHpGI89!7G&=Wd7Y! zAj8v_k58g>4m=mFiV(7d9N&g^75S8|9-dw@k8I5z3NX1nul9Yn_vsWP=J!d>C6y*9 z(<%I@DR4zc%lpc^33ThJ=u=8NwnFkZk%P%`#$wkp)$J7_656BX)^P=As;-=b+zGd6ZGhVIc(e6_Z zsu*^5sobY-B+&29VA1J5G43|iLA|2YJ_%%fjuXU2 z$}i=UFQ{V}lvMJmq|l}ZB3ev{2A&@obOgDDbaDl%2Tm{4r9YKYI$$;zZ0*cnz9aCq z;@iQLV07s2!6+>hC4Z*;pZV~=&E#LG=#Pw^2L%l2acf#BDV;anY0a;^Yk23Kh)~wO ztYDSU%kHRC`7rUO;+@>8X#K2okG&|)_YPj|G+B(je#C`PHEqao8t@(bq2aS)I^?n+ zLCSsiOnqLk3J?}tpuw5I&Crg5wZgo2p9wOxF-k6p{2$0w@}+|}_vMz{J?YFx0OM55 zm>%1*jN=KrrT>Y-{{hOX<}~_NI$a?KEcow`H99TRu)jLQbL8os)u&W@6>3)o80EHY?SjjuxU%TG?pi zIVxa4$z%F`+#zZ7b?r^+bIgH=Gs~%hK+X$0SuA2jFo8~9x)=z zI_oK?$V)!piz{K`y=CCECPF~6UG=G|sF1}1>^sH@=L&k89`sJLyDA^XUrrXo3ApFv z!+X$7PRUC(+TKP-Xq4MH8do2=pyM5L!vX6`K#6Bb$$w-kWNz>~HL6N6S(KMahvrH5 z>M`$1#W;T9G<~&GelpswbYj5M(Q~?bZ)F%NiWh$a-B^6NdO!aluvO4f`)mH?NiXEp zr+Rdu^T^n65lO6x%W3B3=br|2F;p{5(aOro(qf#vE7cazvo6s`0jVIWTnH?3q2rW@ zOZCt?*PG9#45y#r`b|6O8NZVKkq?7eP&7CP%!IUk!sgUI#Mr?)QsUXI`?}SxW%n`L z3h2Z-F-ti)lzfjg$LUZ4YTzeVcCysw$?){adDrsU>>laO;g4+U?+&k)8=-;({6v;| z3KfS9>PC*_+ykkSEM$LxYPcesw7#7QWGM{k9jy*7rVfN)oW4_;98|@b)F$@YBNeAA z{JxV)?zkvduT0Ty`uuoz(vUh@8&3UilKroY^>42QpAp7_Qtyd?8e;fXhwM_UX)2RJ z#AMZK)u$RL>Ea!ZR>&-uLBt54gfBCLm5BnY_rc+l(Ye<~>;6-$zAejewd{fN>+@nJgJxFfC@ zUNo#A_lwB<{QOBm#IVV2?jQZZ4|+xz%w8@1*4F6{b5zCZ9t|hu$$AdwA+fWLw3f{@ApN7l$nwuyRXPOhzOOtY!YQOdsv9F&7V;Ma z+xAzPt)mPv)PId_41NTo!Cv;DF3DL%LSk@F4Ty=P$m0*@qBuG!O=#GyhRISebc{96}hB3u5NQP+lpE z#T1i*k|d-zbQ$Qw1ILg(2KQUzRyp)*WZ1yiuem~-O?f*C;dB~sP`kUkvuOBTS6c|g z7;Jr!(4vU3bst=pBKus#O5P~uV1hYUM00*-H4G{cI9_xE!L<@9{JyBTp8^GC{E83`3_ zHpN4w6*eiE#L!W-4(wD48K9J z^7=Br-EfW^8S2B03nw(Y4@D znFYQe7%>aL!)t#>{J@i6HD|>$7Ck;=TM31;V{dDa(j093Hvx?Q8o*aUMmSVQSao?` zO>;|DW*KoPvT6cVLxs?+{jLpC5|=?*TWUWML;!zDrOy5kSHeR*i`?2ZbSy00bx6zU z@1mi>1i35nkcLO2rr*_-pwe6;9FrRd7{tPu)g+=dX{|vO$Zl>SGh1bg4D)ZL>;GlW z5nk{dUF_2ZUWhBjM&PJ0&Q_(Q`i9s*2|cjjbQ2yrqtSqgBe0FSf=FlWNAs##VE%>)H-;!qV00 zx)^B<*F*;JhA@}=oE+rTk`X9jdztGM-2Mp5uo)LKp7*}N$!~+eP^X#() zKV7-w`rw^E&>I%V*>hpNr5?o0_i|xQs-uspJyWRmj;wdy%eRRWAZ8~3Ma-(89=)w%qWv+1s&Gb7}!a;z^z#@vI|z+V%= zT=j4NG(>Q@CFX~DwIQIH)8ZElf=e|My`y&?iz;v-J-uFcmcc1UB~zU8$4F7qe#V`p z4x>F{YuiY4Q~|uQov>IxtsQijcr)5NB`|#uWbUexPo{@TPEJ-+zV?;AQ+AD~-UyLS zW41<6=SzqLY2UP@R7ay*>(|8)IPzsPz5nybsQwkV%$ih*F*z;;Uf=-^eizYUG6;sG zLqy_8Gl%}#Q~{x2r)NeZdig+qoLODf*CctTAM=s_rqJ2KnEz&V6bB6YJM8}F7Hf{B z{>4m8OtC->jn?svUB`BTWiD;c>E`2LG{uu%#Bp)Eo)9@y#He2`jwplSeLohI0iRwv zp}swY-zuYg$n2cR+xhc=mu0~OheW_>+G^!gbloKEdC4&*d%~TS30TF65 zL};D7w;0YTPfl26RO2I7aWSR#MiGIg&v--17bCmB!w@|%;3YH9=v56bSQ+4cE~0K- zFGvB|EcZ4u6~ReO{MKAond;OPGQEpP2SjC zLtwS}DBk>ZTfguI`Wc*e!@w4g&O?ahxaQk#ar{mJ6tvDLkOp1lNZ&<}9GgfBQw+a$ z`oJ~z@rZ0M-MiO9z?q7bXz@rvu@QBAOMu;ap-P`Pk&OoXwPXkEF^Vs5%c17PMYL&| zc_P>>)fut)=(5OED-RKbM$-yGga?l4OZn6%wKz^>aH@;Oz9C5bbK0Ux1?PZ-aSqGk z?SYQW#=ZoVIF)t{;p~6{_EIm*f@r%$%70{aDX;kNn4% z(X%Tr1W64kc*Ur$GmRT*`I**LedlAEU*N{H#*?r65ra0?Wx5!7s&0@yObtkwTGFUX zgN?x+PbF%I!ybwyQAfPKwB&9;5zJ=)2%Wd&$fv60;AqnWOgcYs41$-{y;;mx# z2qPSF+Fd;084m^_-t^ZBXFbAxo?EWQjMrX)h){TSzj*j8s3WeL;G8L-y1Ey;D#Z|W ztgwg2ohj=}idq5S8YO5di{0lUV<=&R_6#pz*c5k64tt3x;Ui9~_kLk=Oo!AP{~k(@Hhc=$JmN+oPkxFx zktGGa(c*_){0){j$}>cD!5Lt3JiVe59Fz?(Zo=V&&*w10wkE==Pq7Bs+6p94L%w^L zvy1n+h!ZREH5_yhu7U$bNPY#^?7<89?Vg7;lYq#|p~1xVQQ1a2Ow>VXP*3B?Who0_ z)V4-m2NqA?aJQh!MAjXLZ_cMMT4(cLCPjRWd&1Riw((#Y-zoj_=Y+*03QkxXg7KLg zOgWY+<3^#@K;GTDXkG%<4l88^*T@Rdb#x}Z-bW~KdF)}#ubz6Hd>h65SP8tJm>lS1 zeBv&y##bKjoTv|**T%W|qIH`F@`AxM!argHm@;v$v|u=VT8G|PKg8{1Af$ekxBP?P zlf-09%hg1EZE*W$DH7jMrDqmO#g5Gcs%5k1N7?_R`s|gmOAH0h(?b656e!Sr43rIT zmQ?pCv`lY!3)XLv+5!RsawQdqv|bVD4~Mh>m|_*@I8bnoTv(<%Np@Ohh>brLOTd23 z31QUlz1CzcatEn*Yhha}Fd*08?p(0hKG0ZboS_%Wg#z>t88l#C29k^vs<_0^kJRhK zikV)x!1)@J!FJfhliQ4HkK$BbYmfl88y8V~JQ-9i(rtT^CRoVks+`#kQilTml_u$^ zASB3C(9j2w9QOJU7ZipAA9ER@40#bi-s5yiDI{2+_w;Q%!j86|9exb0Tg>|M9n*(k zEYvVs6!QDZY&vh6cehy0)4PSwFrwvB{JDDFA%Jx2;ayxwCOl?4jK#N+{%oo4Qr2y* zV_G-w+OB`>gjIV#puLCSP-4aY@QHNn<1Im4}|6YPBL-0 zp8{Ci;KkQ;A+tisHY3UR4jEDVv$AIKIre=azf9|l z5l0>Fz@Q9SIztL>Rl11s!i?;;fwO9(JvskL{g-xPupIg&=)f_f2X(q_eMS_(GdTb| z#g*gQ&~bGT6GRBs7`$ssi;F-0jngWg1hC^jhwOu4y z+`F*dk+)8roc2vS6l3Ykq4b!WgjNC?W=OD8e$?*!Hd7v~YW?U1nf=OdLHu*4LzpS)zYBS3}X92sxc zoTw;>*Xs4HKtjYkm+(*i3%=jK#JubxkXID>VHDs?ISx=Ep`kM<-cuMfH&8&;E<2s#q5`cW=U zl;)!CS0j6HgM9uu_PvoFQiN8zcE+^ULT)=FKgTPg3FBb+fl7GL2p4bTGF3l(@G8ap zS&|7&PfxEXwqVb<+k86%)c@et3HGjQX7?S5%iPPeT0++4_AvaqK3?5E!==-X<5A!`0SjNcYn&X`uK5>7 z;k!e^xEo8i+`nsgf~7*RlAGW|ezfC%?rsjljgk7P*|A1xgvU1hP#i6&$%Tb+gxI!ygWRDh#k{fMHZp0bp!#)vOq^Q?rZI z39(FGs2y7+lG{-1c~>@3R!V^{abLZYl!&aYCFu#j)Y~JYs6Z`wbeTj)@@5>!(nGN@ zelBeo6~Y-?aG;g*@$JR0$ar%KM%(BH#!cg*a5GK@N!OdWoInslOJR+eoxD}E*=63a zb*_TKGUHzYc8454=k9v}ws_k1&ySp^j zPG}q^9V{CwtBhEk_05D14q(1PRqsL*cPev!g~~U zay|5zZK&d@^ZG4}cm(?P=S)sD%>?FGhd9m1rtF7Yb1PX58E>p`e-T|tYu^Eu?35Ym zWil?3N37mq*9D0Q@~COOo1Ez~Pv)v^v~CG;VF2u4rx0p1h*%p`fne9Gyp!m%K+XrU z#V+cRC;+#lW-m46pOMM~yuZv&R)DCBKqI!K!E0wG1OD=f5=f>drNP7m%fH-T3gJq! z{knfu@xceBQc;~E;)m(!TI(gDva^6 z{iuq&%7AQ-tc7?OT$bY}SpjL4??v~aQ+o^sZ*bXgDgUl8>wisq+n8wfzXwyx7UQn> z&Pz^Zd5P7J43o$7777`(e<-b1=?^6BuibJu=ga_HrtQ>!|17P>jA1s2n zWZG048*k5Cxf<=3;7Z#SS;BSjB3r|mG6{wsbrq9^E47b-n+aiqSFjJqh)P_A*jgla zGt}w{bIT*mm`=oF<(6EwquwBc7P-cIu@Ka1{my%`zQ2?1wq&5Pk}h^E!x`1%4c7fB zPsIgSj+aslg?}Hpo^Z2U>2^l!zSSE?blBPF54Ti#0sc;dK&()jTRHqb>BK6*jK+Ny zd~u$JEjVh4^mFe@9bh!^84-~%AFD|V|F^z7U%l6sI|#e)Q0H61*{!ru5+5?6ZYlY( z@mB#c8TtqgtGtW)F#&zwMvN&G7DmSfjTwfht=)(6zgNI!<(Xy z@B^cv1RPs(+k?k}FIGlI+ZJMXxOEPe&~{;ihO#$wRkZ(asrZwYOr07BGa0 zkKi(l*#R$_+A60p7%W?2M9PcNV154@Zv4Z03$tKE9B<_QradgxS+(Ks9` z`RL4|aEQ;l9MMR7QZ=c9rZ+Yam%)W&m6$Z<_9N|>P60Pvgr<;Qt0QRSJJ1~}xb-C?ylVpo*kdF1%yVdX&jV;;PUnGgoJ~@|&4Q>VEKSllZxwV3sC^k4L7bG?G?q_k`5qgWdqF)x4B8%w=P(AtBAp2E0;XvOP*tT}tS% z1nN<^Z1NRH8=0BsS9V1S?@$#*@2dt1QjrZAWr9z17vH#zqj`~$K7yObDC+W};AwJ$ z9{NsFw_@o+&UnoambIwIwwzJ?FD@?5;h8_Wg77!l-PbbqrO5=Llw&-l7>$h>fU`wH z>rgL@UYfb#rrJM;&oiGGoBNeFOmEw_RvB5MP5w3@EunmrtE$MoVAcjUwh)Dv@F06mbC#i{;~0!Ilb!PPY^inLDGP zmEFx$sr|}3mEG+hdp9%(m~E8BCyY=vlMel9=}nMA!(S3GAr}v3Q#0St>t*@7v5XTGt5{LfqP0D`;e(mv|yfhKQ9LH4V>4K3{^X6Valn=5iv;3|1H>!lmQ?SXr7_KGhVg%- zCR?;_Wg!YUh4VE5H#rUJoL7X}?F!H!1_ z`{TTT;RY7!LjWt`q$)K2?@|q>D!J@fv3Yb9Hv$mc+^RHIgV(g>_Y*1$)dC_%N4JJH zk0=$mV-qX$4-v+a*;=b=*?T{Cxmw`UU^ILH271f&8f{v-5>Ck)ywKxF%)y@OVxjL= z=oh>CFaXT976K*S9F5enTIW24zvYV~QU~Oj7Q`6ks;ZsmlMV6vF-&ZkQKtljm$!8OPky2r@YG0m@eFrcxk~o5)p*s)GwH+8%Q|#zmv7W z32s^sD(m9`GY{$xLzn!qjU})lY2_h;`g{aiUDt>f5s|vwVFjprjFVi(864;u$ctE< zKKngcRrm1FeU@F%6`lLbdv{2xl<+Hy$yz3@_6FG-GJi(U?1JU|@C9y8Nu&Nx64s&j zZ)9~@S|76Qn(RqZkk^PPiIMBIX{~HKaVZs})TU`HefGa}hdzu_<-y4s1CP(d{M zOCgODmVz?wJd>dH)FMN3&ofEr2#?=;4&90@9#odrjh+C2@#}x}Xa2Vg`p;$a{m{Ttaky7b2S2bM*I zb#r5tlm&oyvia~}xjTfRCn3nGu~?T$+j5srtTMno^+IgnyI@^|3u6dlV13eTC0ooe zcM(Ircr&*n)LRDX`wM?1B5&3UhoOH6Cj(@xFvA;;F$N#>^?fv=8hJj!A3xI*&>~uX z|8lkEfb#fvU2U)f$h;EvBEzpXiTsi{F4J?|=$SIWZI zzmklZ5zOn1aJMpmmYO|z+_A>#xynghs7Yr%b9;I#S<);61jZWcH^P*31m=PaeLwsqbUUyCPGUJ%Ay@E;3Slp@vaF07=J;8uv=fXU z28#TvGJot{y`u0x+op5CA{G_@gvfc&P#H@eO}c5+egI_hjFTPQoL?CBFurBRoUuuz zwgXM7smOeuzVdIY4j}npt@TE`SoVDIqx_)*R5O=QNZ%aM>earv{5cZZ`vrDyMQdkY zlVlXNF)NKmMC+2`N0?n@t+gwkEKXf*iSdoiSoE1Lu(IDWBu$?9pvE@X%?HdV8*cQi zQSI6tj?ENmwBYdQ`O(xMkM)n86~Z?N@}j^S-c1HPWI=Y{XD@@B$nlD-i(kXVaGwv8 zRQ^scB=fhSj8g?=*#yQTU8H;{@F8xTgVu_>HwnFlp63P7Wp9uRYNJcqO5M@AwQy@q zd5%JBXsq2u^O`P}SG&qdZ&$crVB>rtS!)+g9@0a>^h^<+;3T~RFltZWKz?iSv{d_; zQvXTVL3YuZx$&ghi<%=@;|pR+B}$W=f$1Hh6#2>T1$UDPe~g8Mg{OVUaRB4vftG4` zLRBc}?t5IliCwjP{>9xxVRY)-`}J>1D4)LX`^S{yiT!28>9?Q#=s`ua(ETIZxIe-+yw;7H|V;=y`cygNF|<$ z?eLY=5x;CF*<{Y;oTMO)>)EFZ3EGFU_* zWDOeT_8ve|(wO8G%NE`(Leq}0sYFr2p(=k7Lp(^KhSVNXr--`~a3R8g!7#JhT0i%l zTY6s*lanG!s%TC}>^B#>?B6Z^1z)WEHJ0Z-{uVDE#H|#<#djeHYJEF?&otO)YhI+x zY63UEWS2t9`nCcE(WLYHR|D&q@|!p=D{p0=*4+^(PJzjbw6B5CDA1v6$n}cRj2ZY< z3g=Xy?+sePHK6F`ssK*n@tJ-`1Al-_XKhy5lX>6D!Yc7>bM&_gL+&c|rp9R7&*u3X zX>!=jGSzdNjH@~Px8I&xu63_A;$4To$o*y4I3lP*<*&S@Wl-Y{B*DP|m7xg<^QOi| z{-6+SxgUlw*4SLMGHHLeZQ!pWw-tH-0|M-q)2TsNawiquT1;2gF4R>J-Uuszb$uFE z0(wT`Tf`WJC|7=9how4$15rvSubXm|B{GRdd%U9~ZF#+X)_s$9UdaKB(HLj8>31W0 zH13bU8R^%Qzf_nqJQ;CZG0cxXfd=h4>pBiD>X^Qb$DNuX6?^rzY(FKcJ~X}7dfCvNXbK+-_#|e zE)~T_rqr=$&KC-Y{2x^1DbgdKY79B`0tpQ%P&qBM?ehug#_GbB?-am=U6|O zbi*|t=bH52JV`SEzl-gmAFT3^Q`A0qeZm@c?H~78Zv}TB-p7|F%AgkIQTp$o)qO-2 zguK~O0oakSH=YU91T*;SD?%ZNm95;JE~B~^mEL`jFM*k9_>0Fadxt8 zQNQ1i`JmPqeL6fkx&b$*G-|+m4C3UWaDaPwF*j9e(~Cp-Q_-T^o!h#bi)f4YD75Ei z5@%A|v0F>%JJy#QA-CCIeC-ELK^$3E=55I6afFO|Bxw6j+SDwLZs1)NpAJb?_P!t7 z68y?V+GL=cut(5FK5T%(sWPkXE!6oirWrRo)DYP4YYpFF%!{CgzlHKRfP0oB9S;6i z+hK)qd)I;f_+|%6gvRGQ)}>LGG0oy%Gs!6~Rcj$KRd@<#x?SH*XqEshpV$pI4jZS0 zmK&i55L5V*=++t)dl`(-L_7eIhlVgnmd5zKsy~{5nP=^Zz3Crm*%|$@gaW?LJ8i?^ z{X_Hw_ntoNDTy+{ZGj-pd6CthK;S)fz6#zG2jxa%NpuA^kKUzR_~LOQs`y03|18`6 zJ1AK_E_O&HBqTW4(0CxDNhAGb(~}6)^z@jf-mv45I)=Fg<}=NN!HjY_81WTE%^y-_ z7sc-Eq`C9kk3j4zE>e0+vsIRt%fL$ANR5qiV}wZxV^zBzuW-H{rp>pHY?$W`QWEJ- z311X#5o+F)^-#HQ;%7mL|8bn1MDD>#PAV?65pzZ}U_0PyA|o>zu{)kRP6YvWV`NB= z+`b@d>igS+{Uf#MI=b8Jw~oF_4)4^H)OPeZcGkQu_%LX7!^P6`y&r2($m{g@V>&~^ zzZ{pn6l+uhJBej`e5v9@q4s^>OrK6lb@QRFjtvb-w&sfalpO#<*Z5#vqfnx2Lko&? z3u7dcI$@rkJ?1XGz{Q#VhYt7pD2-_se_;)cC!#&IpDM2X-6*Gms}T+B49bA@U~XQFeGJIGHvwwj z_GtR$d`W_zxe&Ki;A4ASih7QHRqL)47K4Y|9oU-lHK6)prN&Q(#Ns8e!D}O3}$prBR{+#O&`L{+A^^*(c#xBrixnZ}{73A<<0{W4TK6TJD$uBBhxUoC(y;6DK zkR&G|I_c1J`kApt#RM6}^$*87zj@l4AX`k%Od1QumgvYfbJDi zmyokgI(6=#?+DvIT|S<<>OUHgedISIiCq+nYF@B388?*`jzh^FSrlDtyxfZN9 zckZ!8l>W`oXC1tdfd_kexn2+vKn?B{UDbi*QtI;Pj^Q}{B*=A^7Z3(#nESyvy9a{- zaC|tUVRK`22~P0c?2r0R`NKhRZ_N8sq4<{@WoIu~oCcJpsR8fT0=2G&To#xcRE66+ z?ry-}&E)(PH;AVq>g=oOnEUY*oxPmq2KGGvhQo%Er+&>L#Qjb$Up{zuXLsX_;Q9ro zH7C>;5@>!_%|?tsr3i78=>H}6L-_M=nX4B3C7qem0v;kvBs;jRH}K=$VqMZH$OR+J zH<1(rq4#_G2aqQ$xP?2M~fhx z?X@G(nt0Q~fsS)jr8hVm#P9gxcKXyn+SB#bI&8+3{5o=wl5vr~)od{B6khL#xQDP; zC8h*+)?y2KJv4~v{NYJ1FRrPR6Y>oe1Qdt)Bx7%T27p^BIQ7%1I{%_#Uq08E%D$N1 zHuaM(V^ixQZL6frOG>o`pPUxr6=gK3XOs z>Ynji<+HCaa`fXh!^DY?A|a!dG5g7w%5^p7>{R%oj$5EUsTXtBUdf)igP9(sGsgms2<=#GSF zzyhZ+z8d`8^SuiR)hTbFcmS+u375Y1WOI6(cKQ@C*+p6q{7%_FeBP6Wgcl0L-HCN+ zXN@%#@4iD^xlohJs$R-LB(>i9N||6DjK^M2ftkRLf4RTXWG$44t*R#dm*(#PWilMY z1YR2c0FmK=Q!{n>ku)pfW7VfzZn z*M5L*3QSx$1pe34=rhBS_wd`3V=1E*(>vHSG^x{wh!#8f{SbG`?;bu}WlnDi^fi!9 zj20za9eGRps^4?cEP@R6hxSa_*NzwAhjzucm2==dWrO23HyF30Sl=pKgbdG5F?o3ys&VN_9klm?19V#$u#K(vNdKL=rvbw zPlV*g9uGui8TalCbwtVMZxzBaT*ZB$rC?07_?9LAr8>};P{kEWZhrb}rlllqa)7Gva#gx=J?`S)m{{0s~-MGDYj=y3r zFsEgOPv!9b4aMnj+Si6_xJw%Y6Zt*)Gj6!!l%s9=TP-Kd3uJ~x8XkMcsg#G0=-+oX zDEB5iZ-zaYB^$yNbKdNH7;~~Ee*6SzM}LBwWys%cEjAc&0=nv?NOjdj`;WI7GMSfg495|>d)(lZ_H$3&Z7ZeJsmzI!3x0f0`+ip;FUqmR8)1)T zB3RNhfwFJ_u$RsFYqot>*yr}s}@rm@jD8-M5d1%Lytlk7QxwiR2WRT zcb92?+7O?_Y0LcGle7GWf5`|tWVk!?E?=|1wGzN;8{-oTdSvNSUHCOQBf*Oz@LcSg zeJT&=>j>GKx_sEg)uCroRUHDQ2~Q;F-W!CS4O$p$0Y9dnFHZDed4#6+b9p#l3MJb; z8fJcR<>OttdhaE7O)lR37PV5{3cRWqs8wUiC@<#lIak)(7l+nCWS_3mOO)g%K^kkWoEMQMxlT=p#$-!^T zApg4LI-M4FVn^c!RT?6}+?Y01ZMZaBsLy+9+EQFe(U){M`dwi@F;e%xTrB#zkP)Al zKIz7N+4*uBUL41uG6OpEE=&T*ZG^#~6GBCRyMwZ6f)iUA!}l zYldERSaQSWq|j)9iASY8HaSqX3a5hjF(cNat-mjNwaAh;bh%wf=hhM9^LaQO?J(B4 ze|s07mH32=-s4Rdjt~4}`Pp$cAoCHD{TXd)vEW?-h7?RyjZraFit24Nc7Mdzc``S` zM;UbZF}{KiP7hiRwCG5*yc-_S5zXV{mEpA%e;$6uf1C>u!!&&(SclxS=XB=wAWBxxckpJ-M<2iG-G zDmH;a$SmDVt!|CJB%kmdACWH@#k*1H1j!U8`G!@;%%_#l3k%+HhusoyQAR8{Dqw^Z zz$esv5~?`d+To>}YUGTQt7;4@NR1K+HwM0=y=05+i!{Ed{D#3M*pBcujm?CW@z==G zjK@B%hD9_WS=?Eg66d;Ndv~KWUi#!y!4~bQZNn)i?Gyq8Zb<0S2d(Z{ zyC-gi(=Q$S>K5M>3BEjT{KKy)JKIQdd z(XOm~JTMb@(L+O!tLJ!2V_B(pl!zNTEuPL^C%ZGd_x1}d8S1?=3Ate&q;Fm0jpjMz zU@R~0=!Plw0?P2ZFo{(`t65>JZLsQG5{w&v&9b5w~r7YXX9i3e)+jr}o=^En+6rUG}=0Oq2`m{gj(Hs!9gA~_QOLIC~M z2ZSyjYe*!Pffw-y!8m@KaykU|4S#=sm~y&KlkzV#LVlZbO~kRL8uG-j-io0Yk}g1* zY*V9e!>3{|t>mMFCGFhlTWKMiJ=O;`+_S%p0<5r7R-WGNe8G6$fxj5>`|=Te5Y5A$ zA4X6QbFZvoRS{OeZL4EUvo07uHaHb>=30?v^Eu`n>f*w}^TF>$1AD7nzlUWRh(#Ke zq8F&5ZbYAtLBKHMC+4hANC!o}BSlG?Sz3^(;ed4IH26&)<(J&B2S}PMed*>N@cIUV zZ(hJ2(N&53=kc?ao|cyQLrY%I{R;*G9@@Gs3`@$HR zuvpH?4Q5?cH;H9q%$>Nq%)~x9$w!RFPI`Xk#lA!ug~Q0*JZ6c9TJcZjk`}puywTGa zvW@M5gm6F=aqeD3+>(iky-3b`qI5}PK7O!U8s4&UKgP)6Ns%W>d~?i|5bA{YzIP1s z=IQ|KG`U?cQ%1NIeEfjSLOLYv3uj~OU(aA(^m$?)kVwV&fpHgEw}_2 z+}%lV_u%gC?(R;4ySqzZ2oN9y8Qk6d&3E6uwX5dm)TuLj_U>M*yBBrqtKnW}M<{Gz z`Xk?Yrgqnh-0e#Q0GV*h8>YeI3-x7REWNikH9P%BFF8Ki5 z4szP<5e?YTXVu_eOT>K<`o}7~weF`ekdleQk1fjt9bbN`R*Xg}MDae6$YFh~Q8-9; z&$U7hf6-;BJ&An=u46KS|0AH-xb3fhqHV`^;F1CNRy!QI zs`3iMBMbkCyVJ5Df599&V|z4#(XIR!EL?vq{-jEGJZ%zOF3&L#(DI2)sU-?sF^73j zJt*3nF^19W^w#JjH5iyCSB!RqkA?U+Gw85K11_OU&;Wd~@ z242}h<_LoFYMo0%S59gEPyO_2J=n`|{aF2tx+D1s4Gig%Qd`U;`Qa}^l&`az8i zh(~D7f!*rdFSeTXV`F0izaQ#ljF8D~jWFo0nVtEcqZLuC=I?^-TrP0hp$(rsX?rlP zIO|hW0TqR@3n6M@+YqH%^jahhN1qE<|}-l z^nn3PQ`&x=LAP+i4dhV2cEAa6{Kl8~&l!ZbN2gO~@GM@2Vbe1=P@Lq*u#2k$7?{2( z@q648ww)ck0(Q{FhTLMP@E?^)If;>0o=`y)fe?pvDt)2o-{n7mv}XfBnSlCphcC-3 zl-1e<|IlrK5oWU&Ht`IVvvqgVnF`lQs~dM(tAc@tbepdMBB*o*q0RmlYl+H5`%|`K z@U;~pJEV~aStiBz)Kj0|DjO5OQEG|Z<{f{R9G>2-~5P_KMK9Bh9F1BVHjMfbG zY2R{3(<0O}IgZ(n<;*!D0{_o@3(=K^R5&US$v(}(8==Jh9BfS z;;qc#hcJS01m}_#7p_%L>Hw=ms#h|>n_fo5Ln<6m5p5Vl7m>TLP-Yp5g*C~r!A=o^ zDXr^YzT(h~v57R-ji%@yuFoqvAR zbNZx`Kv4ooFc|Sp1Foi)mPIGASe5MhFoCYNM<;LIn81`~avBgAs+p$hIB<_}>4&`K zo9_wqj;Of7gw{_LR)h4FV58jRJGA*KoBS#mX6|LYV1};kvXzu^>&=xV^BAlZ`qSST zm2q1OMY}IVU;4uy+5*WF_dmQKNoGuj>WY~zFeSedO3Hs3eln{mcH_&SdiPzU@7`Zk zM<~OlyAPXwG=Ie=b@wr$z&gjraSmQa{^|!+D8`lpeT{*C^=AG1J1)qc5C4A^KBkyS z(B#=+nf2(SKHF!iEsy^q&`C6@PTeCP1Xx0=IUgyN&FfMi6;B%30qFowPWr);lnkL# z6q63Ro{-?i+_@YhAWO<1-qTsDe}y~1=X>ahgv;-CjNH@6=n(VkBy+QG@Z3J~SD*^qg<=R^FZOFw=& z6A{}4!7^J7NR#>%A6Hyua4G*6apL7phx9stf^+MA+z&d{{%i!uPr&(swDD%3fK%q^ z7?Rm1gzhiERHu^kB_lPOR;>e@-rhF2kWRoU6_WJQkada5+Nb)^?ABo?GPryYzO-)x zx0HKcUR_R>YJ(3)uVnd=!Nk-Vt_}mh9>879ykkcyE!XC#vv(k>jw7l_)`5vyo6E~v zt}*3>Ye@sw#Fh)fJqT*E-t0w*q5D+`M1nCm-1c4W`i~A;1jx{YXgn zS6M${LhEJNO#Ez7RKaDHtNWqwxWtulovs(5LgIcVDy)a|dgZSJC(_St17 z+9gzxwLd1Hd+d8&_~o5y!g|EP-^2sm(7y%0FtdQJmLj*6k>({x*d0?^G7W4ykx~#S zbw(eDm44@d8=zA1RTF_tvvI>km&X46a|W;qA@c*O5SzXrmy{ZcwlJ7)+$He|&?9g_ z|3Dz}+%Ir+@uXQwZYC?jOPV2)ANlf*-QzjwjjYv|;#|pMU9D&{eW6(+ybJjEH*NV~}N9t>AQ-tbm75dvde zg;gZ8Sowpo_`bqx4yiI5)s8OtXXeJd9n2jRj=KvLc%L7(Udpy3gP(=44!|Hj0@E++ z-}+KqX)f05ho=P|?7W_$1S&~anWkR3z!ra3(oN#KQ9!BnQ(j8iq@f&09T;XDDVLbY z<_V(p|Gp^|!XNv%Ey{Itz0qdS4C-#)c*N$AFe!zZ9T4C?^OE=c1qDwcIi=u*yH;Wt zxGuQ&?ZY(cIw9X$z7S1|=;~13#F14EhWmH5`G8>@$xfZvnTJ;1U6mRhRoDyK23VPi zxWI!}feCo2xx*>!vj1D2H=v|oQ>hCXBJ@iRbkyRk6kst#urPf_RPKdd6G6qgFV_Ps z$-q2G4;p?=CvSPva*)6Zz}f0M z?~80Nv8Kge+FfNmw^fD0waCqLBb*Q*d1$SD6cUO-NN^=48d=Jh&G5aIh5W=jaOR3C zeP4xO4G}`ttq2KodGL%GLm#HiVKB!JIVGVEjN#~N0wQl35f1i9wa~((u;k8ZpBCFy z;3MT9)2&5>OHey;E{!s1Ut#HTBpb66;JTzl>B7uSk+SPuP?P#1oN%6V^$}eiQhwQ6 zt+YXH>0CCgWeOH{?NpUy-N9b|g;v(0RLqosb2`g|q9yl6;G6!gDfIk`61Mj~KOQ|D zgZ|7?fz|X6BD3+&W6(EyS{UWS*mC_w^1Er~Qe%tW!jnmZs#lO2sb2TsnU=gTiUj(D zsi)h)mF|7-DU;P7j`{Wg6iY?=+{|FuNscm+;#0DwA9~|X*S51juRgoDE2A)Soz9?( zY@{~gvi`TDu72$1O5^aMPQ24M?jJ7JfbR|F?ZYPcnPvGHb*Kt!RL6TP?cv-HNNzSq zZ#6V6aAN4?4147pi2HU8Tsu+b|1|pX8|l&#FCJ}2NUG=o=FVml{#cw@!;2Q~#F&Ut z`{1S9<~R9i#a!sO0Z-SNKSBkyon4hLCG#^SS+zpJZ_Dd^cvD(z+h<1vs=FBk?>ow7 z@n+S=aleHgKf7F|pD7{|j8aI+^`S75TD>fHZI#~@rt zSsMDK472B}emudt3XQID%yq2&yHTlKb{+veY|Ya(HlYpsN3v)IQjKnF6@I8mX7PK{ zwn#wiwfoAw;^acO&)%C+UsD=w-N<@^t^O>V=hnl^g@m3(h|yYXN1N6s44)T~=XA8^ zcw9!xo5EthJ1$3sHKC(jTN^a+8;t!cfjr+I#?diN-1?2M%;;UUOSeYkiJMsc*tTGc zNAR0nHWb`WziKBf&%jsGQKOS{m;&k`3uN+O1H{gi=p~DZY=o?IfScZvJds|%1MicC>VpK~!xJxVrOALxk? zI)#MPfEQ`eFwp-aba}&NdXR0S=Z_V$U%7pPBx6Op#gnvIr&wpSyPI@kvsPuzAf*9y0dTF_d!U*DwwxyO`VNsHf5GLqC$+p`D zL4febL`Y$Z1#W4exWth^eR5Y5x<{aMPtwhOlsi%oxYKIB9=B;b~lPF>PFjdc`Gk`k_le6Z7t^jjx+$v5gAly`0~<$+HJ zoQM$rxf?-G2PgXrb=~@=*h(*57~ql|S(1t(!%|mQS97c2N$tQ3bBh*fS~6LYaIgCW z8S2~R_<$K>8>JM&>n!VtkO^v?1|e$Yv?dJX$%&doP|3SM;GP(mHU_!42#|ba05Ngo zB+u5KZrFjY8h#gQ-$-&GQya-|I`5B(3{OlMHx;`GF%)m*Bu~C4_ThX;tWyQko{kA5 zl1g}Ffi@dzC1daddpV6jwdJ3u#ZR%$#0h`RF{+!vD)W@dv2S^o8J_UoR!N?S&Mr!w z;Y$}qAbhUj)L z$dmti`szx}GUni5(qZiN*#d&hI;vX45J zwh1zLy6N6E`N3d-?J5z6)s9pN|Kv_s^B6&40d~B(^gzdP4K+1pTI!HpXs>Ee}V(JO}$5IR(Jr zgsCXzaQ}2P9p&=mhEcW7f)7tpGCunpxQY}jKH=oms4l&n5v0hC?n*d`MHKp?;^e1W zhledD1M$>HU1Rq38O57b@{ey0c?dku3)%rWPDP!0UL_wm6cB|L84bh%eu%7Kg_?}BfeUaev9N4D~xcs_EM%}m==!Px&E6s~Fd zb<^R|{c_tJ?Np??x>_shT8|;(BSjqQAq-VDi2=iK2S4hanY^$!TA1|#>&ZHu^$u<~ z--PDX^wHAGnE)3T*KX_v6q$H&9asW4pnjg~V$eCug8JsiI0Tz#+trgf*Q;U-Q-{c@ z*-W_MQPLk{d(}*8iiKy4KIB@>;n!fkp|Y#OaGjwr4X;PnM5hHaD(;ZBTJ z`Z*eddP#=4`~i%3-`034w5nZ?7Vs+TPS&w-&VjZdyt-a(+)!+cw@ROL zOGdgH3$1X1@kQ(2$H}PK5fVVskKwg!>f&zf|lA0I^T`offGR=*hUL;@e1WFKHcci1|ld7(sM&3zI5tr6$X8eT1BT#GL6#2(mwOo#od z`JY4q*`gD;%aSi-ogPEey=Q_wY3mITMbO8QkDMH+n1krUHtksXbB$=n=cCpJQHZkN z3km#Z!ViBrR!NpQD&fhRI1!(BI^nTR%oKH z042DP##e-f-&XaNi@0R(SP9;!%6QD)r)0U}wTX(LYO z)*e|x;4;inGcA%49IE%u<<{W1u^T6~S)#MttRxX-kY{s!Y2pMTBVi00P>qKSnGMA2 z_8WO3>x830FJg0zR4(ESy~86vo))=&fGbjPVct6r>iGjEZ@8Y{H0BF2`-5vzWMkao?5tF{}Pk{lDa2aKJrpb{Jo?863|Bvq+7ZKF^Ese zbh;VP8wYRM8$bD*v$#R@8SkuN-Xifc8n;qa12*W-WpP`#AtD4jsJ3|zJjbehuMi_r z&_w3UhHYf}^YMw51n(6oL-g0;bsW1tr0S%>B}fITLtm{F{~h1~VQ5p6JRss+@rio_ zH4miVFS9Wxy>EleS=6}jPk zA^w$@uV#6qs#jE}IP>LA_V!cPys*W;oy!Wgiev-c;cJ2(6GXlHEu6#Dby=&Ik)YqD zj(TG$>az`Rr=3O|zPRSvtOE;-Q**kG8@IE85V_9WQN^uryT28j~_Kl9InOWW5rR!_B}P3FNqhC$bY;c#qU zrYXu2zgFS^G{3+{WbG8EFXzb1I{}vz_eX;AP#0v&rTu{*ta{`F7q0ttE15)jvVLuA zg+n5achYRRXixOUfd`t$>5*nWHIK|a)xCCizinpRnLtJginz_7`iMd}Bb57xt>-(n z{>{L zmcUcLa?`=6Nnh6P;CG$|CO2c*iyJ!yqvGRZkqEj6q~&IL%wPtgd_;@98)iL!-;ai- zhkJwK!tkM~!ei9L*Qc`ZDvrTxftihqjSK>e4Q)P|?=R%C%&^h<-9e5z4S%)%$cf|F zU6YMze5@?F5$};$uaLZbVb=I@Z?xn~knr}mwWzl=(RBPA&~aScr9LNRzesd9!Gj3m0g@rC}9fo35MB*rvuaV2pIQJ)Jg zSI|>npw)f$T6U-BG0VYP_wqqAh}9~yb?GnUV|k-{ICpNU+sN zr^msL`6%AZVn0Y*Y805XF*ET?K)f8mH zEU$^za5NlgxvrmksO_^Zudom>k*^;_5ye@5DevX5m4wiL-+8X-! z%_FO}uSXb9_AT%(5kcU{DFWkhd2FFhP7G|`KoSCVp?q2&hOuyOG9vya3McZA^L4eS zSVR#)nEuKo~NP&*^%%R^|QoHL8_Z@8Ccs*Dt7N2LswE)~p{?f~28(J4;4f z6+%f*=bo#D`AW&QfGl(b&s-O7Y5jqATh-SY_ot_c!e=D8ze$4!0KHN;PXbyRMMi~& z?$B=gSeNnp3Lv?3(0lMv))ir^5&0I1wE^f}XABjkeGUOIP+xxuT9;UKk*fUEA8EvC zSUatQgn_7@UiszVaM<&iy>j$@;nxmN1md4d(;>nGsD>A#itSa%FGIYa$0wZ|cOgVcE^xwaq#oGs`=QMlz}QO*^95Jr8kV2z zP(Nbwz@z%8(|aAbK`l4l`w=wm*^@K9{j#BPbG&ccl@~HJ%RB|_C~sO@w^%Tn)J(58 zHO%M28O_>t~H0Ut_hcye%!gF^m?qWyd z`b_#!lFjhmO;G1SbrUi0=_L(_?qEeBJT3>Wvs+J7hWNLg7M6$Z0-N%cNPuhc{Bb z=)~>rjo{w(q)%m@7JQ&a$kO+dr9TZm{3BBQ??(jMj7PDA-V@WITH#1>*E^m-@du#j zSy);g5oSh`b=$2K!_R1h9BI?Xg>x=7=?Y6CCJj;qhk>WSv8xqy7NswWec-pZXaA5F zS(CZh6a>NiB4XBNnZWrZrG{p|kK|R!qcP;iqT3t&2hS|Mek3Vj_?_kqS;R_RrIjLO zARvvOxLIDC%*rNB!(~Xqvu^cwhz=y165prnht*5UtiN{rhq>i|-i)ui3#eAN$b9P* zWli0w0TTu=x9OL5af|fhkObM%>~@BEHxbtPJ-$$N?T=XUEAMIp!K;8+L+i~j^c(7* zEZ!$T&I4I=H}Cz&b^jq;#>bc=B`ji#nB~7Vdy#M>tqMtRK>uz5Vr7_)RR^y*(r`hpgw;}Q(RgP%lSp%t?~EK-q61nICFU1 zk=fxh0~E5Nkba*Xpl^FOs$)NVYZhYSHPCU6An+_KjjQ6`TkP!qTxfNyoxg8TO2PKC z8WHEnV|!`NhcuTsJJmUJXSswKd{I9j4(=rTP}c*uQV0E^=g|p;G$HL$ zh+gL&NZ)u=Sf_Q*@g>=Yc0rW44qcx#b{rOci(74#U}OwGf5Lpt zcUe?BHPw`ff4uGRQn9cq!cXQYshP7T`u&){UI+(={noIP!imDFFHhNMoJH9E9xrz? zn01x3~`qsebtAvXuakfQVwD*2w#FdQSBt zxZlVTgw(TAmB&I;!CM+5=ik*27pn(5s#h~Y-<+ba&c>(-oKU6vYwW;Ajh8g5SF-blJ*^#EFCUFlV)lCpoS=r{cy z>R!TTmDp+wgt&h|`NICs17>y~g|%#AcK#r0i#NIJggv6%#aV zb*}B4af1G#_%Xngi-CD%MMN%HqPi5GGtXCEb(-Gla5vhQn5O{8wEA}v;e5-iq_9X( zc3+YC6E@`#VldNm4@a#6c>*xvgxAUyd+8|8x@Lv~KgS6Q_#1iAF&+}vOVAUMtd}dy z>{bB#gXHn%OY3mToop|dpk=sGU2~`jMLWMa3IwWG z`&92&*L9%K#~$#!9YL^<%p&(xqR`hi!!9d^DjCN&Vl7k-S=lPLyZRNv8FZH|_bkQQ zdDM_+XS`%rsBkBh?ZP@wti^cX^DIVg(%S`>MF4si&q2d9yjYvT{O?40Zeev;$E~D+ zxGE$5&F9VIF6kPlWc6(>4ubo)H`@lhrAZnxzh?9`+OvP;LZl9Kbqy!-x`?z4^qNZ+ z^xhBD)pgDvx}xVj3Bc2~#ZLA0?v#(gv_Ax+C4D1*$YMGoriMmTPx1$wE}i+G_X}7}Y1!8IeHYC> zSsOq$dN%5^NY0zJ{FCqKPt?<4x3if0PJcAwcpUifuyJfx!PC13;aGd3b-)2pJye=5 zu3l%6?30k2)Q@1?(#BY2t|-{VI1*lmksj}xoH%Hg&^Voe_u|Q3O+Mq8@1!4e?gwRS zPN01oV3G)+sHGuhv`XOzD3H4xFlrcor6N5^PItyCCoc~&p-M`9&YG&O#c|QC}!RFbast%8+ zl!JsG0-Ip;zR?l?#+vE@(jP^B{)tCfHfRbqjfEVBd`RVIj$ocx;|_V!vMdDD3bO?6 znw6N|UvbTfNv%`^!NK4kO0EhV>arq=^d?vSn|cEm~z$ww6)Q_B}08PDHTNd+=ql`Bcl`1r}! z*eJ*Afdw~0OS*vrb5|U%7-|!tBi>N_jwX*@7$^ds1OL7g;>Ps+EbAqeYF{YcaeJl5 z6pm_m2G9~|9IPR!j$Lm$*WGUQ4A?;Tm``8m$*xW@wV0ccCye$!&An(}z`g`jr$3VU zoEi9h2QI(8Ax>#IrVhGiu`?QdDV}^1oFCN!Vx9E4XR6rVujfd%8UBA%BkmgX)K!Z7 z>m^*8MrKb%bl|>cQV&Oukzp4$Pm<~i3LYM3_-_ZSn4Itw$el<%X^tueZW^B%kqQJ2 zhZ-0X<%>qC?w}p{o!f~0V#rd?nZmjsBrSXW)IhmHEX-i^vleaN7W-Lp%RDY~kZXme zB|l)lrd%P@2GO$&8onS=e85c+f+`5xpBo1WQMa~(c4hd{Z7am8Ngqo6hEImWExj6o zd5sWqKL+8Ncw2eR)L;2n%u`_(CntA#*LR5+O!CubIPDC#cA#RT)*)kIKEyWQtYO`tFx-0k1&U`e=+$U%!2O%QF3g$H`^1^-;=A(10iiOKCieB?8pT?LMh3>AWYKlaI~SFZ!o zo>denH1wl=ap>qzT6S$I+er_V?nBvA7?HRi0`_-PRWeAYU`UNOJm{vhRCaEckB%e` zz!`ZANsT)#NfrRbFpy28ngW=Q2-J=7w5`>oFHO_5P&O(a25dO5F)Ur~|Le+Xw%|)i zjmaYuT#{Hidb7Ri>MPV`BmZqlAnffzSr`qc!Z~1ZZ8st2{PlRSsDyUoRPLNbFWN<; ztK(Dq;H*+62E8(^I+y!e({S9T#Ta|bZ-H@SxVfK$j1Kg20sg9G3j>u`VZ+g9G>WKc z=s_V%=Ljlhk@F6=q~?AJyhzIli=KHr<%5~J!@@Lvymtl~ytVu9O2bVjiR55+_j;!a zrP3I40v`RhS{&YQ_rB5kj-!$$ygh`f6*!I9(W@2=3I3jcT9>8a9k(0?T?Sl=-1v&f zG(rr~YW5Odw;NTIQW0^Gl;$;=2>C%~R1e{FYE9=-s%}Kt0Tbr02p)~d`hVCkIBvhY z{0E`0Nmn4LV|SR1bC@OYILPEf0BqUX@yCziFq%AuFBGS;h>t|Uvqxk7Xuf`ztRwdb zkACcrPcB5yS@pQX4&OYo5+XB1M74_Ys7AL+*>wsZQY*txohXzI(n5yaQppPEIzu@o zK$pB33Ry_yHouqh&>dSsd6Qt3#)UXW6Boo2_*9kp7L+#nur-Cpwkrf}_p)JIZq&`T^ zZ?`R;%0NI?@0}OI%yBD(s}0DR`@<((j78W*05}tc2{pB~Z-M%`x>!0fO<{aDZCr;M z0SX?%Y(H+eICfwHGh!P5mTf`#6~JB4B7Q48XENb%?B-TlECGBzhSuWw|nDu(YVyL7B_CftCx|s&&&Y*=Ze^u%1^AGt{dM8cVd+Ogm=HL@cL-0K@lTLZr znRxSqLyXa^1iQP-9A8~}KC(4SBCRGhe4V1R0A@kh{mt`+s|xY6KvJ$8Re5>5*2XLK zxO@4QV9yQ^kGmW}qus6gU`dz{!iQ&pID4-29? zumR%E?9IccfS)n`#XK4zN|8bQ#qM0K#g+?Q{e`-tTf`2jVpwLmWj^504Jvq5e#Ul) z)#Qu)Oe7pdsSpJGH4hptmPaDd6ryl&nVDi?p z@x3r^+HL(_!byCG<9_Dpu6xymuAUULzcLpa$1haISw@S_2B}fg6>wRPhg$@}Czto5O?7!LL5}Rg3_*dWz;o!}&DGAmAmt>E zsn4NGa{T^^=f=VdqwqUaRV+J)~1AWjZmS|LeIgeGu;y#!zkhQ;eoa|rZ^J4X)BM4>u;B^(;zA&?q#yBdf0TFWV` zw9@DAJK1W&fa9?aF~=E!2l<90+kY0wbj-f;^T(1CtMB7p8Ofy{#EE2j!(?YJ_YT4U^@RA*@%prv;Ow);AF0LeczoV zS#{&&Hpp&a;0HL_&vzrigF@Neg5D+z@Fg@S4D6ut=}k7c-Tb2_*WLeCB#+M4vm-n; zTz|3nF4m$gx|7*Dx0jgSZ~++!WfdFFrx3X(IskoqN8MpT14R;~!&>=DiNL~qI`nvh z^$seO3&PX%8&rx>_=C{D+^E8F&aS7hWjj}4yE8*@C^337AwGr20*j7y)1WfO-rIsH z29O1+?vQ@aFt65;r(J(@WFEPXBmf!15G969{2@dQ-x2*HC8E`_Z9nO~J@9FjoP{RB zgEWO+7`KU#Wj^zYY>9+swoIg>ISDD<_^%_ogWylNBUYrzuC~~PjWQ=PhU%K258};p zEzB^x;{3e<6Z4|#wonP23iiNPH7l-ob)Piki_mvD1S`hU?|uD&Y1R!LZP~Dqn66)> zIS~fiRRpg_uI^^%y{-dK_EBd3xeetSv)X&9ew;|0azT#BkME(DyRCCcxcC^zpYY>( zoQxqgoBl)ojm>D?9`GqgM*v?5x4#N#S@U`o<)JLE~5dLu6YmL_KL zJA*~f0S!?XA9XtT)HSFYP)DI01L-&hbu&#sb)q+=+@0LV;}6`gE}Yi~JE%IZV3TY?>sScFO-6vF)m0oP&UY zZ+G)6U-b@5@Ym__kupQ@rn74FAi?{EV#W<(+q8C!1iB1@h+`q*W&c1!&Fr^?hk*h- zx7zq3A(3;4c45Ersqh|J?eM$W9w!0t;VG%Mf_Qm-geYY$(QvNiAv3@Vrv*t1i#^Am zJC#Jlo8wEZtLl<1m<9mSwABc)v1RMwx;R##{WvK<8|8gCom7MH>hr46zt{$j;ql-d zQMny@u{HPg%|qN4A4dpnZ%@Ce|M>Ko7KGgslB{21{1%hc08S*MpYQM2hL|#jlEaf+ zMW<-=L7w!rBY`0{-V?uP5PTq`jL-F49Y7bc_p_EO2k$d_le-aPPkeg4y%yX9%KU@b z89=?Seb;LvcFG;{fAVVMJEP5rr!Gw)ST{`IO7FvimEaS~ns)Qc4>Hh=XW{e34E4s3 zuti?n^J4J~Xs?CRBc}1I7THg(`kw46f$Hnr$}u8I>pu7&Mf3Ss>9Dl-sxx6AoTp_e~blFj8$3 z3s%Stq}Efx5SDgcv{5M4VFYrveZSIO@S?m`fm&WneXoc>8|Uj@AV;7#%CcTHy8{*@ zv<8EH8j?odg{HLn(Y_4TQfL_%tcM%?QNdxsbg6PU5iqY7v4e+w;U6c>5`AA#B1&J} z6lqG*;5JL)xm4y{fW$K&;^fJGexBZKS9Jn>4H|?#SD;S*zbt?(43vauDT)))qTXz= z3Y$>ouuxc;8>PkZWn{83cF&SDUrRn!CPQX$vpIAz`KIL!sw#-H&dR(w&&|8{?fzeX z(W`Ol4Z^lH%h>1YGBeHFp@gkKAH0@Sak2-4xbGr(LY%7!9QLnCtwCTS;vP z#%9u4u*4$p*T>XZ^?@QoEiH(5Mj8hqcGx|k{uC8|`F(vkTb!H6g8AM~l6TvvY=P1g z)YUW1vN(YI4<*Q6fx@^=W7LtZ28!!s^uDU4}b0YdD38;?BcCYpy$yJPH@1h}@{Q z*rNPKgyae%-Mrr6?N6;?;CaCu-O(jv3Wc(Y4OWW2d;qyx!7ke#;#Ix2F=DC-W5yF| zW=FmE(_SX+M7uFib&(Y8Cdyc1=oq!QW+|dbqCEbE5ydS1Cefx(!z||GeWlTimK_uk8RDJ> zgILznJ&Eaqr-fyyf5D1idC9g3MMglZE7_jr{X*E^Iy4A9Q(hk0KudbK`zcm*i|T>) zFZSvTbUnXj+j(t4BIR-CkM-zS+F$~KZX+E~e_f7?6T9{yrbP{@Q%|ZZ{8G$r!H9HP zz1yv^#tH<3qioA}eirhYovzcXSnsu9#Ofew(iboExfI05&Zi7sZqHx&gdh}39RB6X z$@EWE76EyUv2yqEzg>4Uct&0&!ovzg(4D%o9rok`>);rAe!EIB+Pj@YN>TRmUC)$d zFc}h<^w&>7>>ZixD#K)zZT*?9j?Rl!9U1fSpd@t(iLW)N*T}x(IHMt7d5%!wr1?L@ zgADkU%j?L3I&CFpH`k|d^mIo>WB7I%jO6edklOd2^Se|AvDVnPDQ+i9O0cjENw*fAsr$^~HWFq4oYyRemxlsfD#nRT zg7{ASr4yl-|2Vjj#8*Ud@X98fZF&H>0`*mf-1~kg9F+t#t~*I#SFc9c7a@G&w*_y1 z(%90yhyol<+JNUr++Sn!P}APC}LR%mROH&xbhWNa;q(hZ46~ zX;VB3VL=Tj*a#*=8}%}U3uL9DG0qE0>XYH*Z1?X2b2*-|97`m)RWl0sF2A77`BD!L z0&QY`N4VO5R^2o$p5PnsTxu^cnK+Jk0oid9>zkX8-JkxwDV*DNEC?ARaLzpT(P{Kw zy`O9DKaxp{P)J;hjJf+P&Zw>q>bJ^jMtt%AMX}Sa*}+$IQ6kKv&4xnYp)(c%?~7`( z8x(l`Cs-&j_ED5xHjTUCqN}UK@1;~$J#UuX^gldI>zOGeHQsztC7MdvC8MC~yQu`N zUMG*rN^j)t(7C~j#mx^2)*UWdiyfu)bAT(*y60&s!VgCTi;|L406B`(5WEDJe^c3+ z$*Sepyo$@~*BZVwG^r7DXTpfctJSun8`>dV2`sDnG;ZF#-wAPrxCq$zYE)`^Kp48r zeBh5#Z(KQkfX)5+2KQYBI4qosayrH2zZR1efLXgil-}QO0bD2b_l;e*=Q9r>Vd6OK z9w*|%@Ngu?#kv?(pc4NX5*B-36xmsK3|P#->l0_lLeED1j~|bwSVd=+wPnmmCt2q0*OqNj zr3Rq;8q*qX^Woo4j*s6JTz~mE-WD>H9um)Rq0^F!)-PR#K<+N&7Fu={g6SW&r91>K@-GMIN>Hsud z4e==5V%6uhu4do&qJ{8Gpl52N2Pa%ZZ0?teQl}Bt<6oW>zJ{iCI~m_E&rqL3N8$`J z1RAGs?w^21Q?ZG%S1+?(#N{;=&Vszfqu&RmxobZk=f4zOdDUdYWTcjGx3XFGPRG*b zf>-$vV)*qmBuIbS5mDH4!8d>nVuS`<*E2O<&)mBlsCa67qjSa{SbJOiQnl_7hg2M& z>%P7n?yWZ-P36uqetTwF&w$N<-P(epCw8vJcB)EK7b>emxQ6ZQ8i^N0>VGvewL^dH|<)cD^aN@zJU zHP*JzFry1}*an@nv{Wqeez~cMIWwNOtmF;;FZa0&^jU!{FokkY4DG2FJ(ADEN}mD` zxLXAHvyH@36N$grU?P>VewF*7*TAo)YMG|Fwf?=oiv2{2=f0fz(yxk8XF&W^V$MCN z_zbv86o11URtTFa_WSdB%X|J%4^nmDS=5qT0|#XPtd0(X8wSD7L}tS(_{5b|pN&3# zq=^l^*5ku>t92cP4BdT%X1zxg9T09xnmDN8zOqqZahoLUrz(WOp5t_C`sb{vA?7ji!2iAKqbVV05Mh|3ySzkdzNk z_*F_Q^fFbjLy*i+edqbwXKuB8TZt%L6Ezo8-Wp*0L8d_H$bd|HU;K|tj_WxnbGpwm zBs&Fq@d7*jkZTCSwCCKh_OcL+@mpf55xY|t0>mif_1VmP;NJ)C^CY>#tHAkznuG&%4v7^<}$s!g)trxohfVYA`6Z`-BnaO@YhXDw(ROaNEGDVeo8Gy6}zzy zJ?87GBifSbmLNvZ9 z1Bch?NCrD_F;?U9*xM=3+R1SGkNv@pa=v%A*6#+l8C9y}Iq`2sp;ye*g0TqD_|vm4 zqLl%CIOap`A0SBVVynfCW)jUUkNfNAq}MRa@V@l_N7Fm7$JMs|+p(Q=;>K!h+g2Mp zX&T#_*tTukww;EJZ5!`g|ND7Az^rZ7+7`~^hyBzjIA}(LM=2GQ5`$RiC&fJ!D6Oom ztSf2>fP7n!fN}JqVd7J=`Vi)Ii@PZ0`6X9O?&p!zQpYdD`7~exG=@Fs4%a>t1jqzH z^0W%ctgnkRSg%BlUXub(38)Cn8&q-^nedjtSb-PIbcByGHn zSyUd+A zR@vUgZ!+&MokqhjrWyE==k=+RLDQQ;R2b<|Briy<9Jk)sn3LmHV@9-a*c;8Cf~=KI zBWl&o?79;pQWc z0wbGScB*VMmnvh6&c@5u^^>fvjN(l%4r={04j}$0|KT(dhe8%d@fH8V{4&i1+5hY= zK0AZ4c#4cwbl#w`Sjw-Yl~Z+WCZH&8AEk^S7wc}`^l@#uD6y5KWf>41ED`$z8Jd~bhp4zK7}Cy;f_*TZSAZKr{c-! zVrWSk^sY$m@2vT6x&mXQackm{KvLn;5jM%)2uiR2n3LMEv8!7pcPWhl01XE zq1g!Hi($K~)FJn&faVgDuKm#^eENOluz`or8kK@k{gG{bMjI?>ftG)F^ z0NN=nop_c(^>+ezWWUUO7@mi&h|i2-FHV}tv-0#TuRk=i%R9v<&Z}F$80q^>YjUth zehjE(38Jxr8TUElu5jhLeCke`Mpm!M7k?ry0zXjF?Z!oe|rCYW)CxG3;yO2Sf)@Br`%ID zWw%CM4%8oa6W87%(Y|$EI{V&QgAa14yFm2Utx&=G+}CN>_cjvW28mAgR!DAflM@Tr zUEa_)fh|YgM$64&o*eT^tzq7#D@bO%60K28SQ#UcSSmf2_S7ZS_9xM)BXlQ=MQ{zN z89$cx{;g_$&z?OPzPbj{{=wPn1`Y%d(v@mZ4sgTTiB`rfi7C_EDU})B7b}eDH&rPdI9Y zzj}A!D*F~Wk!9Y+kq$ZjslYp*c0%IiyfiUhNmQHsOV;T8?ZOd$<=Ls}ebw>D=I**V z#Hw>wd!RG5$Kc_-M9@N1=B3fAbtjkX7(<56hxaU$Fha=w!Pl^3_i0 zk{Itb>~9NbHB;dwRE;XzO^lU{TRsH%mDZrX0m`n*IV+S51>q2BzMY9%wj>0*SwX+XYBkGP7NBbBf7tv1!@ zb6p>FO)%&N)vo$qjZIvK8eztb3C)|1>k{s@_D}uea#fv4oX4X$)4{x{{9h0vO>q7Y zgY{&-engn5sKzu^lkA2sZ0#i2X+_jIKKmq`Z2|r{!ZvoHMDa55EGa!`-_p|yJ&;%W z8ML`?jWQ})5OkJbU18X=?(A;ar>dyv1n7X)U4Fk8_32HV`@Z{Dap0$E@t01Qd?Nbs z)&{t(q@Q(0#EFb31SLEg&^4uq!_NMlB?7g#acOn>R<4i4B}*wljCJ4sro=IzNoc(P zfyG82UjdHUFWy4?^x;CS#AS`2cP&0RW7{6aT98$^RRd-^odInXSPMnwj-~_2qQ57_&fh=LidKPn?%TXj71fraQ+2ILB|!vs z#MuFS*2#qXzN*TPXyfV_7TRWMkZ>NDPb~yR2Q6&;J@VL)-o-_F^8QlpE#%<~ zB`%RKGnC)J^I}}bK4dpHOOTBJ{O5he+R6tA*STs!vR#1Xz1b=R&Na_IaBj?!0bGHK zR6>gay9sN2K8CUjSUJ?+no%Q(cGu(CUFQ)%dP0cP#z?n^#IA zrCRZ&z+?J8e41}(lm#&C7&zR|217#3_Zl79N__KAH4tKBm5BRPkB=rBFJ=aa;2WSE zJ@1cisyzO%<+T66_P_&_=Nj=#c)KT(z~4sXS;9PO{q-uN6SP98UEUvHiA_=5$t6&Q3Beq8xp|uJ_=|E9G{!v)A zPLoq%lM{3QXT4+hIG)n>ZY(lr%%v*}RmG57i#^F*ACcoEQpeeb8 zW0S#UGlJ{l4iwz=a`?`Xx@05{Nje!kXj2dvWGcMwcjE_WttKQRqVGX`3;^zWYgdVy-@~9`&v@R4_Sh68Had+%41NR8 zO$m9&7aWyWWR_>i7C{xLr^(Xxn*sk6o5>Rq+$rMA!5@ofoWUmI#R*mBaur5n`@L|{tRB`XoZL6g1^|b*3WnZ{&H1VpWRP-_|I<8KmoaZI`bChp5l)avI`T+aOST_WOKlC<=NQYijO2qkuEUKDM48qqZ~=Tp{^o|H#`{-$sw}9)V&0EF2^SI z+?LLDg&u-g-(^j=axtv-)jdKUPb!PO>ol~9>Pft#RXGnKOqR!SMSe~YbmRA|6$yk? zWSq2r$cyv%j*QHjYsjKu*?GtHb=FG#*LlrwjIW#KQZ&H@nkbSrOG83AAByL|f)EaM z~gO`Hd7Nh3O}Y)D*Mksaj;L>P+!{eCHQ!`8BrGUy6bw-ApHyeqvSpM)IH5M$JSGreQTt5@_*#%p6sWotwU$Gz>X zK3r)ogjZ9coY#OKhpjEfK<4;Fjlzg*$i=0MvWiperK6tEZ;`77QXRvk6M5#;qeO;s zJUoll=6u)L%o%k{fJ@4Y@3*sIf}d{5wFWxtM6OK5ODEiA#O^7t-nf3l@D}QOIAbsU zd@0afrnz;`>o`$+OOl`As`I9n16Y@(zJk&0qoI|rBcdMQM6HX{CBeNA#JlM8-L+?RUU&|HzBH++k$$aOZeH7$#l(WyHx4VDR7I&HXE$ z35TBpOwRNQrC(HMqI>}EGI`hcOgB&ieD5^8mr{!5F?L>qawSI$53OS7BAEf>H&|BXFXoz^^hg}!mZQOi*d;brRKM)A>8PPSDt7Os$*aN7xjVlc3*tqR6~2dzgny~7 z!XX$bW~ANrJ!|P}9VB0w7{C1})(89Qcs0h`W=+L8Zeod#l~d=iu2nwqP>hlCP~ zK-nuvf@ezWK9jWw=6xW+BhriGPqpTH{>(_W>?_D>_1Wq#bnpkkh9q|jflL`ke4nqM zcy4zP2SHjd2s+FDM+R2Fbm(q zE&GK2 zg&$VA)6z83>5<%|zx&;xwXFM=a*1e`I-y{|`wa1yeJh1rJl!}ECdMP_)$8^a)mJ}g zU?Nr|m}S>#C=1b=wB||HkX_Mt|DwwX`W()z8ep051?a{!7!8FB?{jCfEESI;A#WO^ za@vepEqxttaSaEGPy0)Zbr)3<4^2j-oqhUOz%6BK)e^uV!t!9jU_@LoE-fz7X}3F>%O|*5Kax(IhTonj(-><-y*0vM zRdPx8DhG$kBw}$NPAGEJsX`(?w(WR*aUFNcY`#*vGFH z9p~YnvOk%y+TA*rlNbD(A2kW1&d!JR_U8Kb2cJumz2uyVCqK5wt4K;>fyzi&z9d5Z z9bAs|8*hNi^8tlyveh(9-sCzP3i6?~Rt)ukREhzFL@n9^^nw#ZnJZvz6r6cUB#(QW z+y@N7Jjn(jN^5lDN?m(r6DgY032vtSwrAdG>z{?zjOOv+kELg4Ma(DXnJA*2i?dLC zaZo4SLQFfb6(5+QS0fB|;C2JKaM85?DheNg4{qoeTdUl9BQ@n5vR9X|dE;8j8L`)T z=~uUyfy{4<{BMOsSgd54ztAug9m=$}tG(S%K|Ba*oxX)O`^hAjP7C?*KSffN?lVG- zYz^*&%kD9_tc9=ji+}4)vOR@k67%>FyZ80N(-x|;C7$p2%9xVrn_S@5TRzH__A^=h~ zeNg_>q6a9-W* z99PiZ5Dlz|s=~o#9AA%W?-&RZ&q7(3V=LPOZVE^GUr;W!W|TwLV^E2vJQ4_LC;O;S zAqZ^I3;h9r2e#-m3GHi*=5>hMDcoy$1&TYGAHKE?@mIMOIQejRiqn0^_k<5*WuTjE z$(R@ka6H@BGs-!Z`sf?!9UR;zu63W zTpT=3FbPehqWA+U2n#6xG zSjLa!IESryK8zrpND{eSWI@F3M-Tv(%-vH+ebKD$89)NdRn&kaegjJ~MKq$KkG-9Z z!>m%2n=kJ@5&-;hnsM`vdx!nH*<F|01{I3#%4Z@+&Q>-_c&Y5r&;HnX^_v5gICf$DVqpZkDQP`>p}g01?=2r?APzjBrg#s_itgiZ5)EJ73HR} z22d3frEBL%R&?gBv@pJ}=M8e8lL=picx^H-wk+0IaU1`?!cwd&N-h4QJ*5K1sL!{8 zA#C~QSHt|~QpE8zKj}r{O`E~{-?vWC3F{?k8v+qZ=RW-&MoLj$F;7d((?!knm^R2XrN;9S&9ko;Y>14k{^*MmdC`X%ptxC8OcNKVan z=9k7Aswk632pH`H4OYl&C(59!&GNqsS z4XX*c;Kd6pcISi9={@u7QWJvW=On(;p&G(l0JHtDNQvJB#r8dJ{|{z7{GY}g=o|sA zS~y6b;F)=WqDa-{_#bA7HvQeqiK!gI<8;F^(!H>NS`MKJ;=QQ&q6CITeKT-sUp%up zqjOu3VgJdwr7r4ldciZ^ou%sQ)9Ur&K(AA#XI58&)JbV*+Xofi$pt#+{Xj_GQrn0! zVA&q*2fknlu!DDt>L-I%Lt9iLs!FK8M~#X=n&t+ zXD_P5MJ6&ni^Wpm?Kk&;J%nrQoC1jUR|gI*OP!TQy{+&KjlC6r2z)0O-`+izH67ph z@uh7q?oYkc@2x={`&jc0aIX%TdBJO7yw_hCmzEH~QR}g>%e?MqN8^o&UiJFYQa`M1 zCuir=mBwhMF>0R%&!QpA*PAg`gp?t7?}UUgQAyzV~wlt zxoy3_ac!Y-SPa!o0JVrN1yfAJR#3A_J^acs6p9?vYNpraJ9w?h7Br9 z{xVG2b4sYjN{*Lbz!N+&9sYandBiahgK4e0c@>d1eeicFTFlQGPa>r3NhHs+heFx;bf1wn2%VGqbuo3UcgkNRKTFBTTX>R7YWfVYm}P$N)7c>nWX~A~G-smO!@3jN+-9tc2_;n-)u+uEu`G}V7+_!Q zU@N(;s9r@KCM^8u0Xxh=2YKM}wz+NgKMeq(D1(tQ!q3vxhTI5-pSW~zg7U2yx7BD0 z*nFSEJwP?mI4-#FTQC^w$fWI?jx09x%~ec>c}=~)yioSRmhHSp4D-Y}pqT;2O}@K1 zD_~#xy<+BbGKp4wlS9W_1peq!zF(GDJHblJf-x_$-AOfGbd#bbV15HcrDs(ATM_Um zCVE|VIbx}wOaCa_o9JNc$NZMZtc(+GK%x$BGLO41=r`#YkNt>@mH8h%QNTT^&w!a* zy^Ahlzd1LN65{D$G30h%yV;)4*f!giQgnkGdI1zA)R^FMTl{es!>1uO{VQg45mWkH z!UalB`e0bDv+;82Mj~UgneXE*x4(EkXS3$U@eb)JdvRVt@S_OH~_XB14+ zDh|8cw0d#;`4wu--QIRbk0{a#)IT&GD$|GkPYlO%EyhUKpQiH{_k2fJ(dupfXE0Rx z%Z0WH@XbS~yV~YrRhl&;rz-+ z6^zFdC8G>8BG}9o*Un?4;inyI-T%C0@e%NgRO5JN=zj9hxPHGt7A>(K9JBc=bfSsF zFoG1dGl_~5KHR8Zb0rWbe0ed zp)2lt+B(m=ouxASdG5=-S0tb&Kzyt1e3U4L_(l$}v4ube{-4PqmDLtxKWA|LMnahi zl;xHh*Hkt`B!|O3zV{_sb&;x`SIc+$Yzs=GIFheygrRp^Fh10j6S-^qASOeuZi78oP`*gf+%P3<{do0T zc;@^hBuo2U>I>Zasu{dT+-HKel9{IrSp^ z!O`f=0s9sR{zP&iqnkP^{mdPZFX`8?FXf-q+9Sm4!g3)&Y+Me+j4#>u+IA%h*AVRk z{xGXzEhP-3AA;N=jC7o;xYoLUL&-|TlR#>F>_F&bK)2bZ5r|3RJqN3H>{TxLL}6{U z$*Vpd9K=-mbo+7s3{u-YdEFEOP^`&6L;70qFK=)@8Vo}mu8|Uv-tIxy$6*Eikmy{5 zaE?GT*QaR1Rs1MVz@Gmw5i9Y_-xJIE#0!)|%z&L6dFP~RCqmw`g`O$A9;S!pI>G29 z=?VJFjb_Uq9u-H6!j%5uQGLr$=hczC#I1jUGZj{nsj+L0Rnt6ZoJ5p|%blO(_2@<| z?_Sp$D3R9Z_<1mt8Ne!ez13=?yE?nbm6==mFMO(RifL6sve|t<5p9GOlhD>!#*B;# zz0ms%@`$TA&DX69ZW*dVetW1BvujOD-t;|~n;rePImTa`>tgu|1M-(97pg}1dHd~A z5Y%HXKN&8E3X9F*b+2Yf`iy6d7P(oT%* zbFu#4>dR6Lvx{L;g|rET58(1RKTtt zGW{-~j@sUU=fM&KP94sCL(%^CEcL4&FgOj)Q7f~i5N^7~kCyaj4?p3|7yZF*(XAvD z+ep%G^E2?_zbK#>)Rl_F9pP!(@SRHi{II|1i;FQ+0^Bl=3B)0JB$inr8XJr!!PVl^ z*-xYo$0d+}2}F|ElX}pMf05b|`8q^gsbF@8tbXH*>K8CU&_Q6pyi;=dQPfOiT^2RPNkcJ!CAxs!PTEC;R4!#0R7+GP%*e>u91eG;};o{IjIlrwsBzTPrtl ztEs5oeDES4G1ay!n12>+kOY&rPC=+=u}~f`@9@Jyo73d+kpL5Yf%dqEPG+s&I+y;2 zWskkLu=Xp!x;>)UL2boAU)HW=YI!lYLf?I#3X4IZt(L!GdDyY*hfQ-mg%lk3I!;KX zprlJoR&YAn%7k%PN);Nex$(oYz&4_5?Dun{y}CFfu-^_q^dOSycn>1@t?#hvP#=%C z;}Wo!Tovhlkdy2AvtSO7oS(Bh^|JBzPQwUTRrN$!omNZ4*(>2?G!kyX;-ZE$;!rii z$=heP_wtg8&0=u`VrVTndBX7XPs#uGS33h1U7O&?JVY#$q(WmY%X@3 zArElVtl4ml-c5V;vBSGF5BGCdx=F*mtfM!oL~Y-a&-5K|S*JoKu$LyB{43fX8P z++5HR<|voEqM-#b^J7#w8%ZACNqvg=^AbVdmMPt&s|#G}@Dh6A8V!nqXoDnSG-_XI z$D7H7pTCgiy=zeiI<4OS))RpI44uz2P4aRM__J$$S=~3bWk+iZThpNMnu<3w)-e5jS{6wBGK2g>VFz>Q0MJ5dHa(qKX5=Veo4@68W z>REa^o`+LX8s-r6on$+ew+WmBuWrsWTjae#!{;WzgO;p0;tNW4#lMGDVgKx) zK`PzVa%`HNkdoGc|0Y&H=fv*#_N2TA9d3JmOx#WvHLUVEteIxWb;F zEwyAh;Y7~@B~wVVw||0+T}R^cANXGAO%8=tUq8UJDeprn1#6CO zF2t$linO(1UoySE<;)Oz_ib}}E2Hq*oc)N|rQm!UYUg&|G#PntqO*0BQe*r2%5>{F zM2ELG(u%Z!dC*dw@_&LJJPUHjK9a#n|IT*>C$=;L%lI$#O>Y~;3T1wm`rJh^bhx#N z{ear-Bf^WV)S+vQ0aVSz-(^1XccH3>XL0O}@bgs5#a%t zNT+_>Uv-mxqJ(SC#(s0rf>3oHw=q^J!oVmXc zfB9H5!rXvaA#VVaDJaE1h2^ek@jOkYmNvz29KZ8(KqL-uPLO9{E%At=jOffnYr zUUx9a zaajx?J!jh3#aC{}GVLrwVbkPG?WlyO`d2mLmn=kY48rp|KM2qHsPyFc56=jTN97gb z@s+%PHWlnd6VlDm5V)u0|nVpDpVcnj!N%N_n_J@eSSE9 zCZr;R4&#CMTkE0h33sxom>K3S^G2H%8MdAyS302udAt`KHSM0~vWfi<%J0zM0qYZf zCM=Kc^Np4*I$I+t*4j@m0Ti8bHP7Vad@cm;VT3Dv#KL(edDR3#)i5NTL?PM`p4?N$ zjMwoun0U~h-+AkIh5yVSf1~m$n^S;XECztl*CGf8pIt213bN>WadMLb6r}=MGuirt z^6cd}U-anW%u+b`J#imh&ny%V9v`6<^52tuP&ezk?2D`3ICMPkg!Odkjw`hk4U8Lu zAb%fZ-!wmYb4Ynho_AM8^M(rUBurY z0-K9z`KAdUC;fJo_^eeK7b{Sin~>+ko>?Oo@exI;^c-d{KOqFQs~&bTTe1MZ)-^^m zLuAE%;M{l5d>|0O58Zdjuou%Q-jzP!${lxgi!~V8k2Fj-V8&@D#5p}H6p$Imhoq!M zA9TXK4E3pD#Ssm+!M*}MgA5`Y3BO971&c$#Tc)>b?RNU<3v>F%@&)~hE$z>~W>=){ z6?y}4A7PIS&OMYs!etxrn^Gz|1Na8VwY*-D;9MuEGq^U@!aLz>jsfeEAU3_$y^pqA zo+WEo%K#3=+={OEFV^k-F#&2pSz*~_?D8nV?_@(Y24f~*HfdHOnUmcq@rEr}t~gnz zXIn6JxmTW9bz37iu-%vvCcOTb;mZ}rSI)l=$QGe#D(-s;Lfs)9u_%A$z9NWdO<-iF zAQST3yXaQoz>u4(eh~^JBVt=6r)L@W`>Mkbkdu{iTs``q{V82uU>%IIs!c>%^>RRI z6o5y!SdSj6>?iPtCo@C3gsjhq*?CU72_E;)*R~;4v6RUy9nm%JJ85Jh{K zc0FlT@74mEN4+)*WI0=?Z~VHzD+I}OosQ?m}apKGdUK=p?mhtU?$qTpw^7=p7#?`^Enhb(e4$Gd4N*Qp%HAB!p3?b=D8JMNK zq5d(yh(c29dxM+6;0GQWhUmuD3U;+$O!i)bo9=SLB{?U>4UM5KlUQ z8WS$u+a{bH{fEOnc|re#USvuvyh=gY6qUB|6V7Gfqbf?d2;DtqiyjeVFYKl!X%hI) z<%#%pkxlMg9}__Fzu&RmL?-RV z35B|1>-z&tC=RMBA?{)w$Cz*ej>*%F>$QYjSQC)syuCrZ6Qtj*V?%{r-)bXldm=b4 zD_GeW~lJ>})6nYF~QdU!LhLS1v6#Fe?gnl<8*3V%lGW&xh0 zo7qRtch_{LsRGWDZq&55YcQhj2&`HWSn6EFnp|Bka97CbJW&@S5yxjLb_GnU6jFL? zJ!WeCCDp+Mmv_P1Zc_-1*$T_wDI8+8(J&|)MZqv?`wKfcd>@fIf96tij4Am&4rZj8S%%H24m zuCl%c7hCQ%;uw9fI2Rl!tcBTpBwmbzX7;r6O#eyBaQZz~p&AiygRz+RFwf8*vz_hj z?KDFUQQ|z^83{@XN4mrteCf7@s{SIdjoQe@oPpEoAxM3;I9~TN!MZOprPIAqZNF+^ zhDOKkkr|gfBhKJ;Rlq))^2({24?07J4`mMuP$ema`$V+zsgqwoZO0d+2`?ZHdAns9o4c~8^p45NzH81 zX~>w1qFFW9L$k>3Qqr#eRLfl$R4k}b)tK}93s$=Biqqe|llKhFc3DaMD+W>+=B!HgpeO&CSA>pnff%-J1D3sC_-ODZ|BO=&t5|1Du& z(gCmJ$!NW64MCndJplV^i?3E7kQQ#imC43R(C9g!x6wQ+m~i6wmwQcdlrcnCAyp9* zywP|a`J@eup6&%21Us>fAlGL0Bjx{-$O~~7Hk~+pGPq`&>hp{LeaU7xG{^nljo1es zXxGAO2mEWo45tkt3c?okdUY!^J!;1IUW?4qrVnM>)vXz+o)YBBzoYX#XY2$F?=psD zQl+a(2}{pr4b5&D{7K}M@{Ke&O$kRTC7d(zM0FQIpzWhoLfdP0Khg%X zBtlar5i#3iCpxEe#Q}dWOl0}HUHtOilhpsMwcPT)twi|BGe7Hieng;`sV_AR`EC?eLioSnUt4=jG;()s>b z54jN&B}z=oZ}hNwCCGf>r-hM)kJk}&4j&C>WkF2zE`}eyv#mtUujll`L^2UBm97Xz zeh^kNvCfCFkawfnGU@~JxE8X&-m(TsrIbry^R9mm9tnyHl`nTkJv5z4v(IqQ_r@sp z)>H_uq~@?0_bNVcj=&nsKael)3~Ti<3(GZ3t=LsX@|d9dfdD?l*YD89;&5mZ@@F)= zc{e#G4Dor3tzW&c&SGCuX-c_K$QOKAd|S}sK)sl^Kkr+v@JB|l4J5}7$M?%tc^^z0 zc^0;+-F+5&m;4_0W;?ie+xqy3SN`s?Yh$^Lh{)0MXquB>ZuRgLkWVwD6B%3N3d-&$ zHMW7dR7lF{SIY)k+)%@V4Cu54#EoLdGrVbM(-nAY$!F)c(He1B$E`NCI4h>%TuUdv zOwSNotFNx~SnALYD)yB7d5Yz6-KH}q$JLSiEp*|Jp6CfY;Q?})rX37snVj7Az!J$d zqb~38pC_AFGT6rZ^GrD~&pSQ2P)Wm5k({qM*&WWw_6 zGD)n{6oTRza@-}+q@XzqlB$WnJj(okZYL*-8y!TBqIr5HqC$SoqDV!Mot@A)I!5$M zZ%Z2#_)dDSt@VG3bfu$^^)B>&IZ)9K;p^Ys!}P3xB6!0xk_)DCy@fgWfxzyAx=!HF zhFXaULX{OnJ&bINIn`_aK#K)$^+6()*gU-~GL~nQD??D8&kgwksKH>cy^T zb4Bp0PWKYl?So>7FZP~&Px^%ePg+hjsoTWh|9L=TEL&F4$-<4<9G2*ryEf;$|0{WAk0;QrqYt2H3-2a)o(ebIBZxhr+il% zQ}XGRa0cd@Q9G-(-PV+Vkfe^c%ceFD#86ui)30$?5`o*zmeB@CP!wif+n&r9vERS^ ziF40wtZ^ouf*pX-g$R6ZsfOPOs_!FFT2am7u2phtRJqi-Ca~PC{K3fBQf944*@i{>Us zWMNo+_WNpMY2VhA>WNE-ipz0JbTX_7YK(T$e} z?~in4wX9`&%fHo;u3xQo;Y*&(EnZ$col4yvQP+`QWageFvL#G2=PC+co3tg{Sw4K6 z?k3*%_sri;$-7>me~wJhMc!MWQd+P$GT?4bYA?uSF( z9ZqS>^12DUoDu!ZDCL&QEiBKi_2W=7Q~o(F~7WNxRoH@+Hm8&8Q2+7D?z0h z2en%pf+zFOoh_>!Ycz!nbbiJc0AoUlLpIr_uB?f+*1L^1g**eZP>TzBX} zTr!cS%Vr*X9>sqx%i$h0!taK;Vd8BAap74y0@0pztIvE9!Uzs59oKqr36c$4C?{yh z+v8hGKkl)ocMcR^4r9JycY0CEfQ!n5Catrs_ z_Czl(>MC83?>LG*rcbzrazm2{3i{b@0fE|T$!AI8N}#lLgYjWg&iA2EJ;)REWg>kU zj;uJH>B5a}6%0@IoEx+)x{pHBIsU~vyZxvQI444)$>tW7Rj0qrB7Z|1uG7lU&S-$( zO|Bbmv`sC{0YOhNL!9U`DvMMIv90=$9&~p+T5}f!p^NYfa-Y-uZPhXt&ukNv=S=|b zN19{2+G(?&_XxN7K%0?vx7Z(?kF$BAw0-i9;C&WuxE9>#To&@)KoUegIiROvSHp9T zN};&>6&@&#H!8}A)aagc8xA$7~1^mnTKA_eOefbZ7inak%>nR^M)n#3>&;|i;bYd#w&J7 zV%2t2&vqhZI`SzOY$N`-|Nq8uYVO`Zam0Vf5fQCL_YE|Oqsee8kz}SK7Ir*S7*K7T ziEt;$aua_3j$nT8Tf)1rFmc~kc$vH7%i$3-*aJC4FIscshwdfID=Lb}cOm^Pr{pi$ z_2A9fsSp*CS^DzoK{%9j6Q;GnGLxg@!ek}G6?+pl?2mBiZfqXWid2!TG~{eaIQ5Mg z7PZ#(ZC!_C^m74gH~YdJ`PApxh<2=VUw$~(Tq{j90{ItRxt*(TP+=T^@fNS>(h+JN zw;8_o>wy|uB?zs# z0|3YfLcV$;!E2@JU>Z5F0V}sM&$(2-H=HTAy=cUL$x09EhxBer0B^a65%oFMjSh+z zV-M2jUCk<2*iDX$@*8kwpfIl$QW$jo1sK^Cq+8RDEWEySMXgY~^Zxqv!<-?D*spC> zAOvlIlw-QK(i#J)k7QDtVfG=B9XLHl8qZJ~;CJg;%%J|F*aMbiU2)ydgg)WBWpR+? zD!Li~kIL~9BmrIlno}^hm$bPC1~}{b`D|u8tsCv20iX;4du+vGNMf%lmD61oga4wz zhu+=FldSK(nlzH*koC70GYPv&2sIl)ouSv;^4tJhRcUHzXk^i|8-4}vQd)f+Sv68G zSEmiy#sE*|+-qJ5q^1K(vjgwQK9z={Z1VsoqAg`2`w z>RDauHew6Q;r`zxW;46S;{{XSnI!9ID^iD|ee6z?e`R{Q|6(=dr~YbKU+iks+Z69n z{_!uN{BiLs+maDs=zGudD{YB%toi@F022wAA|e#=_EFgUQ=6aG^9p~|OP!3D+`zo? zQ>$i15p$xwN+wfBv>pg0a{12h0k4UsnYoAp>{#tW;*so@E?b0V-TI>md2Qg)#O*#T zH!z4T?M(n`sYhc(z6A=HkKpYLhd=9;Xzid{n};NDF9!w_lr)HgaKAd9yY2YNSMZIM z)&ej#zU_!Gey!L!tR^M4%TnNPdc^|iSW-rTj!d>6_V||*tRs^j_-mPW@-+P=JHTo0 zu?q&_2zILUpB+2v6yk;Fs|=>K#BWfYZlmOQzS@XP0GSNPt7}Cle0a+;%s&j8^(Kxq z^?_%rdO?@#;=Eq7hucXGAz0+7$w=yUOjKCE-Wc9SDPhU4^rPma`604!So{_+zm%@% zqo_)BFHu1NlcvURv~?l{O2nBeLM8m>x?}3uJKDM+rrh}j&6Vu*7`vLFuUSvd(Rvn= zA_T=}?X`$;12Rln#Hz+(5nAX4h@CIv>C+`lc{Br#;3p~j`}X1$`U)4QAJJj*yLIuv z6H>1?rOJ<2teN8x+97JTG3ENS(^vBP+P9ZbMEmS<1h@0_nTk=f*=ltYZ8yy^F|~zX zmPC*p`h+R1iJOrP0$KyQ@*P;ngqVCc6Gf1C?2(p~HvetJTp`S9M}D9cKzDq&FYMs|2Jn9SY>gn?K<;D)_?{yanhJ@Se+lmu7$@#{84 zg5-{w`|REl$>O7i$y8$@$-F>RCaT{Y;eqqWUq z)0iCgM*|T2a1iT5Zc$;WR;1y#dquYAhFO-;6^$91ZWdM|(VA`7Bv|S(tBQ+Fe{jFl z6Vi|F>Lq4nS~Do`gE<@UyTP(d+s+fs#E=rM_NS==E{SV|m~Y587=P1V5$vme+aH5C zaQ9nZ)7^u}68jfT8#{xOFt||_mMXS$%8iWYB=3|=8%WIrsbKuBD{Qw>;^+?yfX1lL z{6Y1#rygSvd(S6mHIJIP?5#{o=0r5#iEu`8l?x=yKbmqu>~gy?T>-<`SsL3s-42uL zVs4q<2Hv;?*gL%dA9GFWc^}MhY@G6#k!*D>0yBlGU%eL737J8a{GNg|3HLLa;2g#| zkkmg3|9w~EA`o1X@v=FLn7Ou~9dXkO*zQUq?{w(`himxq^I1k}Y1Tm;#wD)f9zeuA zKOcSHjEij&w=+^$4u;ba4W0jsj|>9bp&IJz-U?NdAT_=&4EcAjsp7D7#(w_3Q^J~J zQ?h1F{52R5CN*JBporHLz%-z_ggZhQhRMxsS)U+>0@VX>TXtwJy5&IMYRTyNY1ZuY zU!zAX0@9Rf`>4;OrB4UZtj||KYfubZ`{s=)kC6mHB=9X?HhgHqfq0ETD70*Ws4(@1 zK}>)vmrJT^3L%6KBcf(563LP+vtY&3aD3}hvn%r_lU9R}MWC>p?Wnlu4YO*&=F~b28nrg!%iz#VhGzR>QW(Qa?O?6Z@>byUcC40eso zI-7?2r<&U7Ryi8R^?86l>4L*fO@Kaej)t1Dvc2WRT*OxumO1x>!rr{loKi+Mn*xny zErN#o`@J3?Ggr(|l9h7i#$t7!Wc>kVt%Z@z3*SgpCXy(mpYFQ5cFHvf^JdPf;J>FZ zL5L5DjXcTfyPSnhPpGUt@+Og>HDf*7J{?;^#MgVXk$n3LRRbVmt&~m|6~z_QE)7uWTv{djojRu3c&q9rJa(H z&F>boXMqp`Mdmi2-d1v@_fZ>R4$l9^Q4Ysm9jaTzgk01|P>yZyh66kHBNWqIR*!Af zqWbSb>mPIhy0@X<^paB)lxZ6-wQYxp;%aGzNd-*Zf4tX z4wp7m5DEHqi18n#4YXATmxNr37xN~uiOj>J2t;vO#g`orprro@XBx=9Gt!%F()owW zuJA3n@{|fm3}?y4zX3vv=57Z)!{?rS9^1R(LCP=xrsyF%COaHU@Qu*#^ZKRuNAQ76 zA@uuf8`*w~^{q8pT1A7w@C8u0dwksFs#|;bd$@O@#HUSm$-u8iGlge*rzcEf4mq=* zKfLy#^F35DsH%KXDJ7e<*%;aN*`YpM4sdKj)_2MVyXfXX<}IBR82$Yr!g^bdhVWN6 zhs*s)uZUmuTp5&mcLaBrye_^kc#?lRHNJt$an9tlRtlxiM>mRYR_Y4hk+pA{DoK6VJ+;gHi26%4G_?g*s;d5wIcrLI z5yH`$XqF&2kyqT12siSm3ELNfll}MkjCzu(H!+%V`O%{IrM;ld-+wzAhIDKL!&Rp zPf}C!Pcg|@X)BfxviIK<6JiJtQ0L!AYg0uNODd!%V0r^fo99(D%&rE+Vj2V?76gcX zC+&AEx!f_%37(sEYhzyc27p<%cYVsnvP~%cvIIUd_13^7p7|TcDzN|I75g#3uv6;i z;q_I2dni{sJg8uuXEm~HP=I&HF_&Z;;8^$`T2FIqrPqax&v)3PlD&GVFHm@2AQhJ@x#4*wl#0I^j-q4$bVUw3qJJywIpzkgibj zf@b7;(%w0;qf;yp?YPdU-+vdV$z1mE_oC@BFXJk67p#v$r+P4BN_TCZ&;XHR}$;= z@Ds8Aa^|Eyc=v87KKH?lR?AGlwR2y-vlDLf`&9@fn7uhPj^rB*ye<(CAsA4I6IN2? z-}Nx1u=F6}-=XI({mbp)_IBYG@|=L(&$5n_g~~w9C7%=Qb5ZYVjo1Q0unTO*3F;?z z)7qqQAt#+h*lIH;Xf%QCZe+Yr6BxF2hmSL{CCUT!rS`sQ z&G)c}Cte+w-V8p*`(scqc#4&9E(9j0d#|(bLeAg*5UzFrE0t)n>o>dudQa0d(KM?| zn^7s~!y9g4p~I(R8%6y_7`pRa>}lmpA_!y>Jo|CD&5nFdGhAki{yDCpB^)bhH>H*I zP+c+QTRy?i@F&OGD(_XdFY|XVnV?Oh5{KZeCH_@|Bi8I7ctFqHBTH)f;_hRhP-Ed4 zybKL{U%6tol=&xWWC?RG!d*LS6{V6NF{oZMyqEs5TlY32C9ueaxgVWo(02wc^=mq4 z&f{q!sCcc30Nlw!#vfcKcxMj^!MQNLaq~uQ+fQmb{2T>` zhO+j6AxsNa`u)pd|C}DQzp=E*G$URZZ6WJg?+53;vj^U3MOaK19CB>c=0z-_^5q_m zP7PyVtN6p+Xuc3F(3rMp%0BGY#@X=dDdF;EQ#v6L&QZ3|nX+$cS^ybU)EBI%Z)HEB z!`_!dNW0RxcO}VyuzdhS9+6YrghUlYy#VnI>jYd3Nb&sI7;QFYF6HCq>Jaa=i<uywLsKJFZ zn+%OZh(c^-rth5end0w+X%-Kt;h+DGXmM{xef;Y0yZy)L$z`NF_)VP^&3VBSw@dj3|7__ggT3 z(n>^Qm+DlUG=z=U3$qy}NASb_%A=TRVc2(R<(KuvY&iSIqm8rk2b7l@@CR4Ml2t&A z^H_%|_Nf)lTtKe6mRlT*BiFG_v+v21xlUui7ipT)Nvzt2C%j9StVhu{rBJM>Vc4P( zDPxezA{UzK>G4)b;_m?ewwh1Yy~jv-3%-H@HJ#hv@&{(f9_wbK+lEaKG);I3{{5WX zde|xG<%}B$xXnSuXEUL%ScO9fn#?;={~kfXwS#=C@|-m?ZG;B2Ike5UD!#E{Sr5%y zy1Snv*7{SgQ%64N`HmHf!HGUdh(KEV#n2e>N(Z{GYRl>~}lfY9Xf( z{olx6Uggoh*EW=TUz)WxZmvSf!*${IPX*<|3lOJIm@bom8>b(om&Z+h@3C3ENakk* zl7KY)yRkps(q#>{Rs9=K#sst8bR^jdVs2#t<2)4!yJ^Op8DTL80)mcxp1=DeBoX1^ zjy^~d(D>>qNLn6Tqd|(d#coC4%5@LGtDF<01op$)z?H&zm+m;3mYg)9){69vR0W#gyu9fI?zth6Kt@(n zc!~22`ZksCuPNWp(@_UBaZhTQVZzwz*DqS_LzF$AOhGR>-lqCma z8b`$sG*~ixX}QoWUf!=+A<=gf23Q@ukO&1ZYjk0y4>z2e*cug=immxm;}exofAd!m ziU}9t$5AMQ6PLSK?)3iU6jS2AuK5(xpaRtCB6D|7+q1**hlXFfvAcYEgZQgr);UCz zi7xpSFzZBVw+zhveOCv&ofyJNAiYHA4{hIwA6hsldQ0##%LMTxjCKO5N&&bhJcdt# z;hACbF@GRm4hqGWRw>)+t4e?>S9SRms=51zg8fkyh(20a{U?G20q0F0c&(Abl(Hx)3+9@DUs$wrLLvev>6efn0-H#)TrQy6_`RpcsLQP{C-&sR`jtVSgbo^Ig9&%?-FaA<2=k4%rRt}o6 z>l&9H(~qh%wA{rO-j`XHbLsZOMG?DfC-f8DOUYAe3si}5{hVws|4N=2){}>xqP#$O zrg#{fNWBf==n5BS6~^QP|S(9a-VA_A$#Z=S_MPuhFr&>y&u8Mc|9Fb8Q(Pmmvd zh1PqP5~Xu>^-vBLf-h(jNCfT?a(~1nwGh!UFT51z33CtcG19_x--n3!CcwQ5Fpk)q zqd)n*kIdZOY8K4Eb~f@mAOv*wn~e7rFPifjY$*{+Jn-WxNm7<6=L;jRy1yVm22}Fn=6W58|r9OFaQ>tZO42KO6_+UC;1ZCH+iML~52QJ=2 z>g-Rxqpj^zrN_GMAbM@;E9~mEaxis!#3h3toMMZ!1eO*LJGqH?mSh7+i`WJZ^Hx^O zFa^i3tpLNec+yzEqz!TxWtxeXft82ZPZeeJrvi^Ua+pz@h1) z;I?i^EgL@t&j#d{3gX%lrMn|}w8|G%Yb=zElt22y)D-97M-kMVz{6PQ9wGuT-TDmr z;`bHLk&P*$3j^D+5N7WQ`nYIBEAvO^>Yn%8P8PN+ZQ0zvLKQ)g5yaTQnb=f z?-g=*{SO7&1GhbxNI*hi&j+;^B^2!{(HgWbeKl$XNLIGMmU!$=d1p?#a-R&#FP#1B zcsjE*M$$i|0CvG8mP0PstoFBXwaC10RTn*!*8TXrxzoCHh1rUH=&3>F2CLcSYk!gf4>~hWVvwpbJ z7}qrhebw)pJ8&a_f|Q^NSSW`-~wzP&*E(J3AqH?9zT(VU7Wqr1X3hM~;*yN@~aFC;6aYhbFBv?Dq3cQa77z zh;}o&P#G-bdU>9=!TKJN)o~s?d3zx(!EvP_LmA;X!?Tx`zcENIYM-Q@Yw(6=zgJ_> zjjr-fT>k+*Jj|DmxaCs~vA`m_+>10}hx1_N^UEF#f)fcxNgHCrvm`Bjf4@|9s@rp` zT?)r_H=b_ax9Yj=%pyrOw4bo_PQVaulOYto@)1bJ&yokt)3AoFPJ-n!eQ;k{B{Rg; zRae8;r#!yqjD$T{@w+c;z`Ixx1j9-L2(xWmcbAlSXnOs}u70VLLY?p)@QYQvb^PYL z(-+nX?at+}M4U|9+6F6oe=0ZEQbDB>{SwhA_*yhQ-Xn~z;s}7z6h9Fz`Xda#$cy^9 z650E>G{a^>i}OFZaXX@#!2s|*SD zQONdcbWvf}%}#cX!=cKdX8uncOge!J(iwj}_HuU4-ZNg4X`|E7{9$)~0#9R-v&@S( z(Oiz+&cR5)$;I)b8d4f2{Uez~d9;}(fhKb7ElJeHYtnL-HN$av0D-Cw{S zrDe!9BOKI(vuAI|>XQIgtsa^SDfODGK(vLmddm7Njt6}8`g%kY*>MU*s7V_?q#M@}ab502H>A}b@swExc7 zj!)Bcy%F)d(M`Um?swd?3sFy2EinZV8z`y0{jsN7N zI;8%f7;|D0f=Qqvma~^}cqPB2q|HP?Zh4AHj#cRnCr?h=cs>9c@6A4coe`{TA@ue% zty8Nljon5RUuh7-KPXL`q`zYY_+jOXy0d(>4XEtTw#dlUb5>FlnxAKFZv|pl`J5eNQsG%J644NK^lB`!&Sq zwR#dM%75+}7^Hgirlb*#GP!;Pg)sJA{z;)#e)kJHB2I;rXD;y|+@IvjE}S|I#LZR% z8Wy0OVgya0j^MSO`qL1C7Fn>hapJUwM`+so)=#i4-;$e%%M`e&e0`+C1>#uqYTNA%So zb^8hJiA?s;o}KGQQW&F~9I}_0o()4`uba-Z<73^R^;KV#UYU`0rEoEGJ!n#%i=0ORzoNrX>I%Wqe^@G||D}roBB8bZx~GQ#M%XPwXwEg!O}`IX z3ZEm(k|zx3TKy`^u(!S1P$~bp5CqfLJ{e(y(C7tk%8cpr(*}LYDRT^F5GRZaz|ADD z^aV?ctCLGkR%^T-y?@mL-n+W2Qz&ijv?D~#A z({%IJ8MJXAo)x@d?ubC-M{?L%_-!%ly?joKWW(It&{gRT5I`)V_%i4JGP$cz42xF8gO6Tvp`87Uk zl7KWM!{>F?s~+lG;lRXkv2;SZph`K)03kcD#!G-bK=Uq#$I-8*-ZmA+suUSWr(d@? z@F@dftPZkq3X3ARmF>_f`mPdOmU69RqF3b9l7#xOe~BD-P=qWN@dLP;HIt`lO((5l zLRocmy>(UKrYW3%d#XP@Z8?8FTgS|Ys3T!AGai)}&kNQ_I0?dCQ7 zT^rgIsJwM<6Pw~_LjaPf_o1VCSMTv(IH{irzaQ@J-;MCP34_t3v)fKSWROm3rM3kS zNt<4li$_yyxxfvBKpl1*C3)cseSmx!b4w^fpHU0>S|piKLAV6{T;rwAtWJMtff7fxx} zDPxaCF=9Cnh4fUOkiP9!Y;+rl;iDb@y8T?!`+}P+*R|-HrX=j`Od{FXlFUklO2$h(zh{+qP=Rai!88_fCz@?|q{KQrw0@h|mZcmKRK zCSuHE$7-%eM{cl!Ee zAM?j}E&oSbKJDMk)>9Vk)ebde(=5ZZ{)K0u+lX0Sq{n4Qss7&@1_ZY9+i^d>5=Aa) z-Oh&YJTSkm!F$JMj+_iu}zEDee>Si8KNg&)o#SSK*jvNLj)I z9VZ-)vWO=$jzD+lk$rVVZ|bWQZu@+g-BwdM-t6#|>ZR?7IcI=`pU%$`R0Pf(6q}O_ zOfKohNZ&sZc?QG{dDvx8@XkZ*P45aAzq73hV>T~L1=Zz7(?9CIqyZ^|Vtn2OaES4> zzJb+}OtsNKHtR)UE}!;K+e7dWQ5O=2skqc;H_zSt8hzAS!)@}xA$8>qgJ6sWa z^x|xB{5^<>g75D=Jl~6Ll?)2m;7xMZ?BkGKC%xu;ZuI%Po3N-)8xGmfkh55`=>QzfmKy_8!{*7(>Uu7=f2Z2RzxK+G@ zDola`5n%7#C^mBy=DzXp@Nn50Y@1w}d3rubN9NvGwMmJ$Xc@vLmnz ziS$wgfjNFmq{$2zVTlWg9Lp*bu|D#bW(JeZ!tPryi2%_#3akG7n{;Tz!Fn6nOXR2A zp4b5I*=0++9cX^r{O1AVh=RbBGn=S=l0C)mR~e^`vQo_||46znjO;(ub+<27x8S-@ zM53p?dhM3G)d^2}W?!X}zMdCeCvI04bN}0o;S|Bf{Opafm78EbA~M#T1Ne?1`tcWb z>hPw1EOsJaBp*Z&$%nk$zstgz<#p-b;aAAKRe_+7G6cIwdvv+=4RyYe#k+QnTSF;? z{nYq~0^bnT5ENV}A{_8caT-Ob-=859hPV*g|>jbuqMUEeb208VrID>Sk0b} z0LT&4|MGVMIoLCpLou4hyy%O#R4$kV?C^}f94}jp**#{~@HPcOWohcBcjv4i-5NS8 zHy{cnIuIKBQ)p>;O&8cLx!YwpNLta92a>i(d$cZkPc%7F(>#(>NwSR(H%Wm49WIfl zna;Os=l_TWlflH+|=xU7X$PSDL2z$!f}L}9M@Pp6)T!WH)HQ@Ps5OTO+`*B7)fDPm z3{TNVJ890~AfEgKpv5&f#UVdHgo^D8O#;ly{?T;hv>6sU)PnOIa^b}IaLj4Ilw2=Tc|!zG&3oRR$nngkdz z<7sAL$>^g5TDJC+YxMJ3q|&2V08YZD_ZtIKM=<(jz?KAXD3PeFuUdD7KJ37FoL^2@3 z{XEfeg$|$NVjzH83GE4spMeTCxJZ=t+6BhQGAe-lc^^m6O0HVv6Hug-hRU|&j@SfD z>k;mk@hGBod+Wrhrw_N|FKNnkiEXn(m11`~#UEOXdA#(>&JR@uO6OJiSWj#={8_~*}n3!NKT`k+U?|4>~jAQ6wlG8cpEU`Wa?MO9 za@}Uc4V9hZH)%-441L+^JsyXgg+=|hF>as-xky}8>;Bpdh8d4D3BjeVL|Fz25H;%} z*EA-hBO(+gJ@GV#zBNv-Y4l{`EnCqBlz$G$%^_tg1`SkPyi>5+NvjD;Qxsy7VDP_6 zN^Q6IR=O19g0zvI8rL}TFy|)R3XRs_R<_kTCTOUDCFHV>CE-+kv$x_wjdzM}IzGrw zl&ru1;OIK=)Rik*b397vv0YR#iVR`GDcb;FqVWaQIuy;Zcn;A&S9kWFSdUHD#sp#Z z7O}ruJXA`Nrmo4L34GGL+qjAC>cg0+F?LBKDDbNj_aH^cqmG9Hfx@bXF(MwF%J|Or zy_5)*j#XqC?`%(q#PJ0imbe_Xg<3uaA0!;n=$*+>{Pq|`Hi9IlN2%rYa<8|47$DUK zkZ@#HXF$<#3MSH#zxvdxGZ|*J{H(WK__k;t6>S69I%iw*Pju3ZnXK7QDUAf!XR$vvVGNRSy!dzy3@*Gh)c|ooT^vj3T^_t>dNeH>G717dXBmy*#<3UREtxd{ zv|8o*>mj?Ns67bW_U;I8Y8V;(R%4R-d{J&v*;B5#QO23MTnmRQpBwvWxc}Aha(*uo z1rl3S#E`>lUyQRI<06zt@&>uk_ZSmfb8Cd7$Amcb>&vt1bo8_q3WOH4&Ik%L@-w=e<68bdUJ7d-OnYlPMRPNqpotQ zfeKPw9}vjN2EXBdqW)zwTjXJ|+YC5OLQ=`oE@aJw%>WaDmdXkYKJC%LlWg-T;$BUK z7@-;Xq=#?e`h?`yzBVrjUnzVBj#0tHj@-NN@A3Xd%eaTOoy=V4dQU;;e6oEPe*wGY zl0E%SMwgWo!yJ@m`zIuZ;&3ZciH9y*VkwJ#cIBg z`eT+oQwk;oEt*Z0fj$}~P>CM_vttfDkF{RRx!4gDRw3@Jq9gST zFK5KPm^Ln1_8?v&YuKa0CHP2=upR)^!Ix7JZXUobp{93`rDV3xk@-QrmElJo7tWD! zhJBnKpKN+bk)5oZ;cNWJlHoL6UdL1lToEGr7bbIJ7^6+f@#ce>v4;t!(kQ}KANsc`Z=`aIh$H-OicfEUA}BxFg(`|4$ND}t|vxc?^07p z-z_)Ln6%&GpF~KmsU4BQ;+M$+m4u^d7-+`klgj+)xeE>($*B-@4oLDb$5pzZ9Bq>Uzw?@Fi;{~zTI zfXXTOaFLfErvUrFtoA{C&1Yy>>;a1d4Khg0jfvKE{$xELScK}$5!8#@SDio|H#2Q% z2}{Xz#RiOmb{TUlen-+{D;yqg8~;J0QJ6DMn=Q95$+eWK5qQ@)m^+600Hj%45*NIG zARQi*?|^P?q^+7szQ$c{dt(yI-(4WHrGC`lQj{+n&)S%%>cg8snt{trb&v;DqAwL+ zldg^udargzSR(+xFq=!eFi23&?i81j*bnH{Kr-r>ar-QxHA96VGC$%%ms9E0^&nlu3!F}C55sZ8Ntk` zV5QY~IvM0XS=AlFdlhu9>UOc&^nhkkob&9PGkqE)7NQc=tq^X zz#Y?AQzh5aIvufx{zg?JI(IpTj)wD_*=bo`$PE8X2OYaOy7S><)B&aQ1TMHPI_59D zx?eJRjjRdFR)QEgL%>D)gsN~?Oz@jb>Wle)!%dB!lVUyF_6_e)UWRXa_qLbG#Mms- z(>bPu-V%Z>Ggf-j#OUGSZjW&J<=hi9dN*N|RXCw(iA&UBf$Q*73 zX%qr?*rI7RD9HEAX5YhP{}zv45Yv9R-7Ojm1b9FKrQkPu0oJVz^j5sXysRO=i3_ts zGB01g1W%e)c-jom18={B0A=h)m{F?it*k_vtVffi3E*UxagbzPhPR6C;L7Q*HMeB1 z;ptBJtBl-ehm=GX<}w!@@R3wT<+}o|c^V!B>e7b+QU#U9#(y)$%%T$Wo|7sXwu|1f z@-)5!fERbISFyNo8Xqdff5vXufs~Xs&GH&Om$$Z{Mw8%TD?EMb94q0bMZXkDl zho4ft$W&};j!8XI$+f-7iCh4iH3rF3`^)unP=5o6zQ}PVi1cv~+Uqh8#E>Oug?~T% z+nco%KXS#s2a5xptVfOT`qJA}2(2LK8>Wr$jyLvTHpUZiE#<$nFI0pIZLqc<880K{ z);7*Wii1s@bjm`wuJemlvzwd*{o052qDbGf8rLOjgV^~#|F0Lsg#yCB=R*{|yx+|g z{@Dn6U^i#H)2=&61mN*`mmr)ePI}#G^^a%%8i5Ci1C|nmCO3uQdBeCQWpD}4dV$*sj1?7xFSz*na0pS%PzdRuE2H+8 zHh5kA?Soipwn2+)_dvm5wm-A46Pd#K(j^BC>2>PEWPlEA^WAA2;PNIz1z^(^Qz#Or z4>!Dt$lyz{sbG&5NSoXt(PGG6l5s@ER_1!>XaimS#TxgAIaKwdeCw@vIr@#luNsto zovK}HMqPUGnTn#zWZsdCSHCq=x-NYb>PJ?@;wJUz`?g0xnd^2RN`)pcewY@TMC;iwpJdGAlbC#t)38c zfN`c$=|@|2>3*eyMa4Omz-;oCh4>?4f3_V!1!1b;g4_#F#-yH!J>d6$$aByz{O#?n z9}xQW4mD@x}-mfbf zg6+rG!~NjBE3cF__9zQO@3kg-WZ6;`)8HW<*9YXPf1u>hNbZLdMC%UMmPrGzf6HTR zMQ8)6_MTmHzts*~;f0{(cw6q0ZPyAF8?Q}P+)4u+!wKi-cG|_>wys5_$c#WLa2uf@ zd|x9R=aZ{)*x)L@84q`B2oxv0_on1>Negz$hWSvs6_MLM0lpf4cNyQFGpgbxes8d~ z0;wpkdlQ{}Pj6t@w$D_Ts)L+=Hnsd1XN>MFj`M?Zvk{7Zc3!QMX`F3Pqa}15pku|S z8dNcmmRSWaO0=3e_t#fg-jD4XFzx%DdLqvT)N`qkc#*7>OpB|lF|~c24ofNuZ;eKh zE-+}Am4@R{k|}mECkU}zjM<-SM&={#FIP@OQoqXO8s3$I#ASl0C zv)Ze%I<3cF84s-M&wa8-OX$LnKEzR92_Z1V3_kqr{e3rH&N;c$EN*gH=g4S2Kq7>M zE29C=EwM$p_)R}M$WJpH#`hPZ7cr3Bv$#O7h&UeuJ`^xad+tMZVquZ3S+H+7n&2=+ zOp{RhH%NLlmSTYL^2Md%1W%8oQc>OB+}*IQl7W)=$CzqOe)kwe?9(eO33C>|cJR|W ztMvpnd;)?UrMmp2ss59OQYv<^(C&UZwrQoR&2pO2s$F&Z{kgu6o)dU4=u-k0$td(b z=tb_v4G`D92p+C-A6<~TrT6S5*|SItqi^o_=syq2{qEY0*{b%&Px{Lg^h_D#<5uqj z?5RD)lKXZgHpJMmgVEbgOagyIfk^j_dl5ld&1!<2L{>HOb%7$PsUBfqb&VHg#NP(Kqi@O~R?9#ZXk&K5}yko(i5KS`*hP1$Dg$|5IV9+H17K=LHx@-GCpKK#fuZ`hZC$==NLyh$q zz5EmNs$pP4GSP%F29J94ksSqrQ6hmErE`vk;w4LP@--_G<5yxB_*XLr85rf9Hl2t& z_2{XHf|ux4W4S@;<*Z|oDN;mXV2``v;90ev56O2j(cwhCx}PT$g?>U~hZgTp#^gxQ zvi6-5o)>sec}fyiAAlNzlC~;}i*O5y_k0-WyCZz1AEE8}xb$sQe_wBtunv_FmcoRw zAbMfQB*Y1F`TKXty; zo+<|lpjIg-ttSEoPiFI`^p^Wq5PjQhhR_V8Fn(>m>Yv&6a0+U?R|#pi5pj0(my6l+ z?=0d#MmEfmT**fa20F@qpU=E7Y=&Cw5{^}9v0O(<#I+>~uk2vBtS&&QA4pmVt@@*k zXX+_5OzpuH(2D~1!^S+ZLC&B~_i|F-+BE!9SB+tW!JB3|npaFwcuVVUy&Muj38)E4{0sMf^ny%1 z>7I!K!s!fs7E(vCW1s?UNaDnTVgbY(>OJ&;pwbr8Pnam_HY`gyPns6H#0j-IJvR?EnbLRa<))!~az)!_3u z0LW<~;>NBZe{B$kjRuHDG3(ZIyK8s~QyvHFR|y#paK zRBCR^`oC)jFw@ak?OX`AiZm>{)=jNy)l*e`_3&>j5@dPRkVlr*e7g8fp;}3iF`~ai zS3BkGlz_tL;#8`W5`n|$AK@~|**uP`WCom%Eo9AD-cL*h?XywWl24~${y1lIeyvC* zXC8WJgUB)Uhp5ndU5I{dD?PueD&re=i z$2O>H)jqgs`oIl$ognsVmL^=p;hxOkqFG86#m~!!}7dQjfTz7_Za}7G?aqx>}$@OSx7JWG?+Z%{ZY7lbs}eSIc`P z9`NO|X#J14u&Rc??w5rh3dHaQ`CngX9{;>D#h0YyMQluM6V1AsrwrSlw>Ou+Df#@+ z7|Ykjs%e?7z^?nTS>?(c_p!nTbe9IaEy_kl&AMCD5JLkA>o&I*7n`9Sukb`eK&!uJ z^2})SlFHgpbrvIssPZL>M4q;5lK#Z+Xi|BYajDGcsr!S{-KktTn(|^U6K**w{jR#&Ho(WW5$>aNxh9E#qmm0%%nyTEY_NE^Vq2 zZyhy8l87C6P=KD+BShF?T!1mH(yX-35(MjD6x^dDjkJ)f=6HjZ|2wnsAQh!D zhwG5IGahyCPbHd7#$AD0F-i8a3eRgO;bo6?Ui8VOP6R;e;5q^+K=M6?!tb*2qh zf>q|UKt)|gy5`REa$S?c!syxrT|YFP=6{@!9j*rjMkyyXAix?cjd4nWlCoepm%!j{Q^l7F61YHZ)LLS%%T-iJQ(`6Ib4UwHc2U1AbOcekMm66!W4 zV7i)kWAcomP_1!H&>Oz2PEj&IS=9<+;#JaFHMEpmq$nGWr&zuxVMDub>ji_Q{}BnL80*5%1T1=hlppq&!ul|D)_@@nd@m(S}%!L1Q>(&-Oq!n z_IMpDEP%>%nq(>_%zzx@SB*6e=;{DRxbDw~ftErIj$_&Q|JH{oc+sOEF2aD}og|vT z#!Tt*PQuBY!992U%;xZ4J0HstC`5$1nbotG)&T^;5&sE>-*FvBv~yySgXJ9y?}>;q z>#8F)znvA24k+g4=Kh&FBDW!>$B<@~WjUn{O&0W2F7WX0_&DmH zi5T-Q)WRxUcw>J;$BTIxX_CE(zalkBY9i}U^7gTi|1FDb`}m>j%}8+$Q3Ct5%^&$y zkOu|YEa`LaTrRZl+7yKt(R&hMHJ6{1+jQ?vcH}(p{@?e<&UH3Un*_5g0u<; zQX-Fg9yZ490tyDa@ocKXX3;MqL2!gW|HY|`sFHc@DQPf9WO%$4PbtWlI@aNDB9K`+ zL3O^oMR@jGhEKft_I`G{EZ(?odfb&q!mlMKwjKSYQ<22l0s4ChKd+fU>izY29kzH# zkC1PJ12ju8Nb^rK0wrMRe|63feYw?jw}))Um@mp0@u97qO--^Wv)3WGYmnUaQ&?TM zJ`7*{804dh(Eu*#H=%}riQmGueR|~r*gB1_cx^i$46C<7?Ki}{fxSbE*rLbcY(pD+ z@!4of2@ru+kWVE~I-DuiC=TXr1j0_sDgW>n$Vra(5{BD8JLunCT*&pkQ*>)~!QFne zI$8)|Y-XbvqJ^0KO35M<;D%tYn{H=aaj~TOdDa)aOj30eVx0S^Vkz~zDxg8B`Y6M} z*SAB!NVH?eDgJbX{@a2AWRG)M_rzx6NZDODJ)o)G;}{;u`&b^TNNEBo zpiNSucQ1PJFPF?YFJP2`HQpt{j4$&eKB)rE@;)NxN=}%DMT8bC&Q|F`C|e=iX*xWU zT(!+uBaGqp*AImtF#N9^HW4^lhhm>q^dIB>W~||SQ_cqDC7wlk!dYCf65xOIh*Yti zYoloi7Cnxk=!>YIcGk%<|9rUYjiuGKq5*xF2K>Gy(k0RnS?hGoXp`nM$e1qLBwOpf zOhC2Cjs|v~{jy9fAo;(Ope!7ym9;frB95MMC=+rTs47DE*gu*0MyHn@c;0Qr!Cux{ z|G{pQAf=qoSG*=8!pG4b`!J79cyn<1=bg}w57Kxn21ke%wjjqY-76=pA%W=%&nZ5C zM~IL#jS)F0e7SD!A1fx?HsVJ6QJ#A@&3F|kr;*7L;Exuptb) z)s!yy+^f?;W)s-C`}ze*ts{l#Ope?XSxj`#J;_Rza3Fle4r%_29Ix&m&6%OFg;yq% z;jR43!fngIAPz+nDTPuYtX>&;ytvn~+SaigK&si(V8vL z*tXfRZM$RJwr!gob!@X^?<5^N-LY-kZ=dts@4k26@0~IB9$BkuRn3}n78+#gV#~4f zyMuUsO4+*{$tz@Va6cksBJEe&tqF{=fG&>_J84PsxVYGu*ih=o5Fl?4z^m5tpage` z8Iey%9Qx+~=U;XRI}Dg^s~u0wR3CnSoj~Qlz75N}E?&@wKqf|&Z3atXqeEdgJIL1L zKU!3)2oNmOjDm|dzDNe0sM2sVcQOq&hnYrjLF|6PLw?-&>e$O*VpTX!IMCGUE^Wvf zl~bOsIAH-);T|{amYai~$pdJntne{dkaH#`@261JJqGLa>7B z`S*&z^4F(Xnmge4zsXS>0w^2{00n!;(XbD42nkeKk;dy0=abIrG;q=K0msVtk|ck0 zGaA|edVp2Mxo^jMTRA|icSiZ(E?A@ikIDcZ?RFCW1$7~?2C@T;4_FPOR8N}p zX>&RFj#U&3^A+MDDOIQlJkRc`N4}zUwx8dCS#uYz6L1B`LB&#^Qyw8|6t#=>z*;2+ z8Dq9^nZx6HSf)}*C2~ANVDjzy7U$Io&(0w%Pk!AO?Pcp z%HwZ_%r=TnVoOkZ#r~ynU#}5~ddLe|8(|^SnAX`4MviSMz7!M{;8wUYMn5Z6Z1xO) zYOQy6SdI?#U?XQ5w^OqM?s=G19>kEKnY&b|I?{D?YK9oHrU`_fRHQX%h{fyP_xp|Y zkR@wO?KHo$-vnTurSM*m22WQ^v^3;7e#2T52A5S~vn;? zAwkIkLEz)y%(X_oUi0gW*UFYADBQ2pmgf3SG{8-LW(ckQl-D7Vu?@B)%taMU%R!rx zIlIvkHVe3b_;i`khojbKgtpOi2$mB8?6NQ9wSSbpBy5@!!!vj;ZF}Pwc%Y z)hZz#EI>>l!fepGMzz^2jGigX-HFbeqgSWOXXQ6UGt#MJLq|Q>#^SWzBKXDyloMP3 za0Xmzj^_PpGA4*-ZA@>yM^uM80dY_#{GHKXpt9XzoKNNr%`~c+ococ#-xz(5Ngcs; zLcX7jkf6Q4_KUixC~%ow(GQ+(T8ysm0mizvm2i0f2I_&X8}GYhjOlr#Hr(bGMk$rJ z1;nmw`WIqIZa^PtmHEjY`#FgNdtW06v@l^s#Ti2qzO@|sso=MYh!t{?y;3AYD(2vh zj;5xj2FJxDdy8n`U%H8bP??;}M&m(W4iUI^I`F^lAF+n%UWM8B^S^I~!((@rC;QJ! zn=u>hz;%*@GDJ^)v#i=d5!T(S%?Mr;#ba!ez+u$-%Y+s}{2~$5cps}K7AAWyDYs6c zbZvhe#gJ~NzxU9EyfO&m1%&q^`8)v6>1T1+1dmJTpq>P=9-7%NRKO|*PU#c52F*hs zUg(AkGabk&c7#yFuiJYrA?EZuuovR4di^nR(9CpI#d;h~pu;>Q7TRNu!rktNp$~g^ zWFDgS0k@~1vb4sY#z|sC-q~u{z-^Qh&|jNVf*4&FfE@cdri5FeP;3s z@5_LQs>jfl>c4~`V>O9#D=ZWe@HP4ld|vEDJ!W!Z$$EKR4irC~E(4vmc02?92=-$v zp%b;qB7Pgqk25shZb%`L8?uQ={A}YW0dA0;G$lpOz7Qx@z6_lRi@8cekwvb1F6bPl z?RDd!`1eTI#f;7tYs(uNo;VvyUwP2nRx41mr5Y>5Lb`0|zcPTItyQ81PQv{IPME@2_oE5GO*Iez#+iZw~*#FG%XkjOhMegmkm83_&#r-801w88SB-{3uZg$u`kzO_zGfdS0&hhUl1 zx2=L)8kF0Ig!1OSG~9V~&=%`rDo@z$@6D~<;43&v6+$nQPRZ)UBc8D_65*U1`|6ym$2N4FKcq_yg|xA$biIRd0zInL<=p3 zF7L4y(ZS^_7{aatb8;U zMs^q`%AiYiK;Tw!+oo#M9l~xl@Zqlss~`D9Z{=vMVG0zwLVPONYLZB-A=j5*8rPWr z%KAF!kJLl~bLNNP^^I{P^zNDUyP<|u9=Ek*0S?$BSEz^n$PMhx*))w)LHv>Z(0AtF zY|Rq#Y>SRraS;{JbKc`-Eeu1=-voK_TBI)Y|wy=fCqTfE16rdK@f%yySj zUqBaTXot)X_4T!l(H7|M?9gwwgi;6&1S@G^Br^~`qx4YL~=itxE8qODQEtDE>-Lr=h9SMI1A%#rtUhjElM zfn>ojOhtgyYVLGb06$IPV zv+JJ3Bvdu${ERr3U_;lsOJZ)QE6If2&YI_*_mu7Y7A(MPE}11DiYtqkPvr6S!6z#5 zD9+n`1bAa{)nUBrDUVb0!Cah7_coB)9cp7?K@)`(SAtn7_#-fvRV-hab*&08IOzjI z5=9igq+s0|kOSQh@lt)){o2@mueyqrmj7Ya&Q8J*05`=Ii$x;kA|o8ftasW*+{k^p zT=%&?oGwshcldE%{R6Zkfx>)akeYl_v7`5LD-`I!*enp!iK(mZH%2c;Zv|gmyyq|< zPYnmk#-9w_Lp7(2m4+`ZFkL3Ww5F8LU7jbKclES>SXqzsN3F<{fGj3=R zAP^2SMN^s`E^lfNLHd#f2{6lda`%?Nq5>udQ;*M!YxP4nH>!MR9#lnV!fX)cI4Wr4 z>W|hCo5Xws(CNp6)9CvMODd-pq%p+7#q`_GlLIA0Tr?p3L~q; znnQ^U7>6A{Oy+=BpPoT{71g<9O;D?s*gDLwuMH`k^R`I@ESRc3Q3brirUJ1LJ*Hgr}A-VZ8yo%7cFYBy>AsUo} zqYud14}I{3TPF)ul^AF!^8$_P&sF-#SDfR1OU=kDTAgQ`D*6pZUn9qT|TMC zv|}K1%X1N?!k90XQc^oeyg`vR<>d{SV8DnXYbn4shn9eitsN+Rvu!6Fmp50eH>pt3 zDsW^ewW3*gC_omGDQ5(s6ydP-cVpwC6)_mrIZPCy`ywvkqKGai+))#D7V9whiw(>G zLb^E7UdhtSQ8l>VVFOjzzj=-AD)XB{8AOYuSc5Udy*S8LOjer& z2XY*5@uqinf@wv?9X`B(BR*_k(VO#Pka{t#`?Y??)c8^D6E4|TLy|zf6l`PJbg?pRh z4rhteuruV{$`anE%p4q}L^XkgVQDl6JqsKWC2@ zl*AX^@s308NB z8FqcA;|ZRYYl68|OBV#(=8W>miNUPi`AoE$^O=v-e)??R4(-@a2OJk_Z%(1~w6#6i z>FRD72%N^XDcVPzyAp9of;=e0rb@DwA*QIq80sfp=X>rd>2~6CX8FrTQuD&`?)A1iMQ{Gz-<=h!3~iE}UHMzJcC!n!LIakaTp#8wYB2qsG6Bsn zsAK9ur6dpK7C{DI(_KE-kY1imfX4A*{x`>5+7c6y1Gj7dmiZk>dhSjb*PcclPuEpM z$CUzZWqz<4J2)4YT z+buC27O^?U$oRu0O69$C^H8!?9Gc`MnsrIBz88uG2`D|A{e@_N6vl+Beg2qO!nHaD zJ!I9eC166n?cXkp8^&nx6W2)=SS0_$1VxMm39Gj3W2s3m;Z$u zJDJ=HfJ~LyfAIwxZ{&0UjW;q*0?~0bb%oInel}GJfnADn_D)G-Z8jG8Vl^W)+b7Rf zgND^{;$8FBK;37jd>dc$brIYr(^eHNS1sFU(SgxAe*HKN%V09Nwod}##pQjjet!ff z445@I0u)cdz1xs$Krd!O%cHiRR#i`5E;kMyBDqJd`N~gJ;;xALPtN0D4D(S|@?a!* zoaK{{wqSRIeo}BKW}+KGR7nBhK6a{-#BxVrC4r3!>ULLklzneq@rvDoR9*t%Gglf8 zq`0VV4Kuak8cfxApoO0O523il0sYZo4*R9=EJjp1$h_XVuAu<|1Yzo&OZ-EPiM_bZBw^}3uyjM;5YN8f%NKHnhO^fVo2Mlf8YoC0`Me*AZt~eY zM{&jX91(R&b=sJYy<>}%?C-)oXMelx0F7uSn;Q;ubS-LdsjjwImlcNSM@F4`cXqtj zdLQ7nZT2J<)=3Mi$Kd12dtNs*DEw^r7lHprWB9MsaCRAHGEd;meYBq{S1;$cD71d? zB^)@r$K|+7ABea9K^RWn%u!&o-^*>;YJ*JqA7I1)1~zf2V{!V-Aa+Fn`q08Xab`{9 zO~J&lJr%~+hv?nAvy|D?C?ItP{VAZzvu1cVWI?QM8Im{l*OZ!W*K1o)%mS!{W5?8m z>vu7Du(Py6K)8zLpZS`M`|u)r*y|s)y%&|tYgYjjVLtpWs2Yh_td??>l5a(d0IaRw z#n(;OX??UJt|qJK6hm;4Mo;~h5gSNv1m{@G@_9HFx<9 zfe0QP19ic&&eWHx4dS-=^mCVC=HL)P*=<{z16sVsd$fN=v~Rl^ic&z+FS`)~7>>?S z%{kgvvn$XPXmbbK6n|RC?8%0|03q3hbesm=sUzSO~@Sz ztvJGDbXIA9Iwo}b3`-Ty`boeN`aBWBlQKU)evdX`XCD|nS}laNC46pHtk;ZDy%ZQ8 zbS2gE1wnkzblLIi`+-(U^(0bIyTm*c7IUMbaXeoq-Q@)`VRsgQ$8Dhc?mkyeY0;cN zoV^y)dl30A{P^#|{~M%0ie9~>vFRBRvmM$Wt@G88#s*)j3S(BZ6y^Pl;r^LH1JJMe zTt3fF!go5O>;W%qF6B$GuWYidoUsVuAWlIYWh#B+8rlwx)y*gaBu;&bo{Hiw_?J;O zrhTa}#s-25(xG-AfE5Ol!r+fX-faEUBWf?JyC3ooFw5C`fJe255*yLrRcrHZ{KivU z;5A@@sHkf$*sxN*kZcrwk3)^Z1*K5JTgYrBdt0%4%!XlKgMCJLogwiR!aU&;4l>=o zQP)B%D&mm<8Vc2VD!mE2f^??vgJ+C~J?@XGf2GOJ92@BNKnzh_EMivI|qp2FQ zF~sEX5|A|{C2V3*pxYB4XM%7J@UCmQ`${cU^jWRle2BeTno7HYg~MxNaTg3WvRATgVI<3ay3Z=@8}4!4)s3>%fDmLSxnFx$EYt5S8QGc!Gte)a=(LpZHCsovM;Usmq{QJSm(M)u_+R; zAumraIpUA=?dI79s#+8vND#^UeamVA%6?5m0wRESsxBM11P3he^28A6km3Y0E82_x zgHy;m^cz8(dwlp+zkoKJJ&}4%@*)R*CxyaC&xe552q-TLY;P(cB=`o_`oeM$=2-+l zv{bZFVWIe2k(VF>6(%GIjhdm8iy$m1zrpor+8=Rs)>BohUEitV>RVyaH@vJtp?JT# zkT-66>(rvPGHKJpv<7%1Ylp31W?5tGLjO!;SvacWk8jJdMpA(JUOhc-<=ldvnc{}= zYZ+>2*wi0wTjfCeH3W5pLdKhJ`C#Qzc>0CzcxnnfO`;MtZSC&4$Xw>$s7r^4C+$K* zJTR3}`(x_P49wox`#xsI$nT}(>7?lT!GlInB1JRjeeRQ^g!@AG! zOWF1QNJlNwAdb>IZ{NS1VZ}YQ5VbPpEvwF3B1IZbRFWB22y((nroRYUZY@34$0N6% zT!!H8W)rXjqrfWzEe;gB&9XutMVb~c1$WC!(hP0zG|Jg^kMTtx2k|;!?%uH*hQUFb zPZ7owp`4Nc#Ty8oQxzD>usz_|Yi)QLaHt1&avo{Y+QzH+)@Gec!tn1*$X{<$u~L5b zQdC7|vDW`0qyLlQ0WiNqL9i@^f>S^L%Ggusxqs&*g1VP6C(|l~T%%~9p?`nJS>l}3 zYqE$>i1kk{THM(1FR86{WCEd**KJtFrTM6cDwIPC;?fkRpzV~CvV}Mwl%al0fkzW* zAA$N5oc&c?z&O`dyS5|lNlN_QSfG?Nz<`cr?!dGU{%3I9kdnJ^9tAX2zxrs+9JB^z z+zQBiZDz42?uBk`ZLozfvy}wnzpTb}rp#f7Kq412HL6TFvsnyowC-db@mqiZMSCPg zAIoe_RJ++qNJzLJ`$5%Lw^{VD4*sS0AOw0Tvu{jCuYV?O`ZCtQw!jS5(CC`m_1JT}W|Iui3gUxET zz@df(g;3XcWFQ=VWZ$LZ<9Vh`2y8RJaHMAhVY!r-9mGB8$o^xZE-UamNNjI1;1|Zi zHv-S~EKewB%L;=N!glxZeBpv!)82XPN^>+(Em039iDy~}>G3j3Gun@u{_iu=A`YUHXGQu5uYlJK z5W#o4#MGL;vv5f_x;clXZ#IaPGn@x{dg-@2%J72dwmAquigcTf9x?9=8Q6D&0w5jT zL^C=60-XI-xgX4Icu43r2k|9`DZ|Rs9c0dgBO7>DKWnB$ZSG|#K$h_OJzDt z!MT;I?9DKk7F;x~o)sE?WfAxq!79OnCO$TQ$0{=0&$zaa#Rw@2aao9CsF-S_${s4v z$Od2VC>^#x=dN{Gpc7$*Dkpy{6G5OUw%f2i!(jmjB(GIRvrGu?w@?z46o?n4y)a#b zJBH%j8OF#)^v)n)%Fs^N$0QI1K(|Z^n(Nq3@}OU9kuIu!$2tZh{o5c~d2N*`Rh#Or z9K7`I#t-}Vr2sH3)YPfqHOgrK^;J2^fiERE1J6W>RUnwC7CZ^K+7B0$Uyw20($bmn zqQW|d#1eunIA3d>>50pxHT>_m4~%e6MTUY3^M^~sUUw;cN_B?8TU)!FKH0M;vTp`^ zvvV6!F9y~n`iPZMq~GqBOfUPcd@tXk`1RX1Ze@(_c7O_X=itfw3)Dk^CJBUxbDt;0 zXMwAM-TrMZS?(z{YRRrCDFaN7jGpqW;)U^$7X>&>Q&CU~O3JD96l8;(Dp4v!k0zNT zC{^KY)MSo7q+g0tHtSM8f!9gfHS|N)e@gxTUIt-ul7@e1@!Y}QOkPtceVHybLpWJT zpd2lrNPj>A-@kIX95*Am?3S}4-+i3P{M#Jr6}kSi6|vm7jY#(pd^SVH%H=B_J!qzgjR4!{dqd%E2 zvk}#-9^O?#=MiEcS=^LXt57~{B}kA2ds8k<^yl?KAn|)+%>bgH1!b?TE~y4s{!UZ9 z=ap~xppGk2d8$&`^Kg;O);6~-8CX2zb0{TKXYt3?(tHmD%jOcJmm_cPUTs8-D^y(e ztKz2CoQMe^VF!=Kmt`)>f374c&<4~+Mfn}!K@Fm*tEP$qk|7(u2mMYA1_VFp364!_ z_k}O*WH)>oo&Sd_-rlQh$KJO&sP=xwvxxY{3Sk z5Bm4nntL3b2B70hQ%YwoM(?{X>D>YNwmq+u%AXc4h90IO3wop!lt(TD4xO2KKrUzL zEAHQ>KJJ@R-8STxzXFD*Pa$9bTT}jf8?2&%a~@>aSw+&r2BQw7*fe=2zXJ>m!j!^z ze4Xpkv*%s}dE-REm;E2vxlc0g)H7^JhV%5I@HrejFd7tImt&e%mtG@4eBh-jf)>=i z_1Nlc{m0*SJn`MW1e96PPcJQ9_u#v!-Xe6v$=FAj*-VX}V%NviK?j$Rg?aS= z<3wo8K_Du)KoV4aZsjO{@$`xMnP79gK*Om2Ar(1T(boqRz=ZGiux0A?sJpziy%xQn z6u|}hXd4<-{eot0&$|a|pGhtWSb~kL-vPR(;EQJswU{*YXZAlVyQS?(o8Y$-LG~i1 zhZ>!C?uPdlIa`Tt1LV<1gr851y~Oe-gHdKVBIaJb;!y}8={_u4*Of6hu>Wupl>VR-SiF~LFTF5eV*8-9~9~!kwL8;q-D>@ zi?6!AhO=M{H$JCJ+a0cj0IFjem{(NNX9(`y-yrr!N^12S+VX=a2Df{UQm(*y0lbEl ztb$TB7vz&2vxRUgvaeJ+*&*1OD*={crpO zO$wmF+8r)E=&ja8*Nv1z72{_dSB6b`9|A5^WDm4n}Fl`>A;?J5{F#-?Gs{(@b85~G{f zB8c-D9FE8D8yL`p%Qfrl>FKE_vwU18G_Nl$;6B81%dN48Lcn1*cv(S=6b330F=;7R zL9ulo=&c#$9hSp{x38yEBeZDtxT1J@#P2_q#eMRaeG&_jZz>VivNyXog!?UlLXM~? znMD?QRO^-6_$52&O}Qp)N=^1>KY=|o**0G?R2P~YDeFZIkAEd44HBUl*e~&2FV4x1 ztOqtwQsptdz7Xe&WjXRZA4;gw8wX%y_TYa((+C#8UO*c0>wX}gf?1l?(%ykV{1N24 zJ?i_khG>pqbva(ttxuf~LD4x#z;HjYArJJ4PsZ~?j0!HjmBd+WsYvA`C17v#8%1+u z7$x{=o;|P!@{Gb8CYH%U2qned$Op+n+rEp@Q2a%8aYPn4n^Z|mhTJ@Bt5YToi^1zh z^OwzPZ6MG(N3tw$*w^oB7fg0^$4hccQ06N|c1uouppk5$3z=ULj4)7At7|g7>3%~) z=Urkkj+o%0v-=XHeUC=+u7H-r`4qGgHy?{ZPKLh+f*w?1xend)2 z!|(a(+;70}VPoB4zPPtvh0B+Sd_I!R&^ku$IAeD&h#~4_Nv%NPH4G0n^}kFYk#EEQ z!+7@;sgSX5vwgy(`4qk+Bebp%xov&{0KE$U*KiB1BHLyptDQ-y+&Vp|!Yl5D^i)BK zvW4a(;0)l=TbY#wV^4-pf!R7&r8r?n=B0PJypops2r^%+haWp zBi2hMvf1e08NqOl{BSy&sN5-MJ({k?{e#)Q+F$* za9)?R`w?pNU^YT}oiefPLNnbs!1f-z@15`+RsGF1y!ttNG zvlZ(%gS&NIRyUlSlxP~PUJrqkVJ1GtC43V(A*=}Uo@|kK-+-&;umhkcaCd7X_um_1CPI}UI7p8V4 zc&)^NEoH;HW_dy}-8~h0khzE}>gA_x+JQ*t-N*7(7d=ALOFe9Ffl932Dso(JJI^M# z_c`AFkK}Ry#E%4!TKn0BwC2r{I?FkRnM<7-a;tVpE-3tH;%3V_DEel6Q@m);!xka1 zav>uG&DDPizQKKYVmT{zUS;wLYjq<-LY)2-9F%98f&b`-LK=OwIet z+G1V7HU>@h@iyDV&QXxE2q}wXoN>bHFv3dVt2b)|K=-i6fhw3`?4N?O>J73+3pnlA z?}6x#bU^CU_ihkIhz2Oy_GHU>u(HozYg9q zG}h-S7mIbMhF`C+m~vd2HCiHDFUljm5KqyfL{GKtrhuSLP*p_h>G1b=y||UbLcH0h z6c)CXqa8-NIMrRD`^DUqh-VRq)~LT$R`;(DH}yT9W3!2Y845Lx|6~97Umyqs)J`h^ z)^5^SYX#wn7jb$1CnQ?;Dh!m2E^GJhG$cIMHppdXE-xEl1Q(3Y`A|p(A*KVCH=kzB?UD|B> zHd9ez;ShKFAa$&|Tc$>9?gdy8oyTT$09fG2Uo8lSU9Rnk46HuH5RiE5G~kK>W!t27 zmWUxx3sWxoi^y4^gTg=etsvJA`Lqjdzwl!swu%f#aUXsA#rt}5N9GdsStv?OQU1?} zn37vVRwOw5Nq^BvPX+N?(3l*Eh^STxa7X*57XpY+c$n9w2F3O>mh@eB(Qe&G1+3e}2OaD1?g%8ZJLNYbc# zK6-FZIBS80wY=3`7}iRXXlJSSV;I=q?{keFy#~GZ*HP(yHZ;8>NHCQ8iQ4U1+^bh~ zA24(qu)&PKAMA%s1zvA7fz{fda607rJHqOipkg9D`K6^~59(jT)1L0lh}E#R!h%;RlqN<^zDUM~A{hL`AxxO5$DU2^hF2r}vOsQ%%f_`K|2+Y-n{Hcx zxXty^HlL%Ojm!=FzG>#qHY6OHJD%r-fy{i2mFdn#x5=?pS}B4R@$YS zdCOrwm>S?+I%-MG^KziO#r*u^iGZFkZtMSB`2jJeU=W3&Uepu6Ntsx7;vY)N<1aG` z))0p#s8*O*pH_^582G$GC&$NH^D8SS2Iv~RHex+1XFFJ`VhmG|)Ulz|v2#w|P2VTT zY+%ZcmC%sdSHPgZc{vqY=ykstF&2XUg24T z=(X~4tJwvP&hJ1yDb#M3S=;myB~MwF5rp)Q)ySOpWWdWk)F_?xNfDjs5np#+DZlXb z=^G-zw}f8w`MWh0R}d)4oI5R%b?XOO@#mMKv5ZDI1iTE0A(fbYI|ELH%bvGx8b#=9 zJ^Rz#YAPR9T}n;soHodWN(emzrufCk@Hy8WMHPxrnPehL zvzfhin{(r2t^l^%Umy5GaJuT&W85+U&SS_5^AI;j2^^gcxV%z_2erpV4g*ar2e(N5 z`SKP9^t9Mzzq^!Ce0ywZ4&Qn~jN)w9ePxWUzbfcbsfXG#-Qkx>s z`OY)5xd|K&fo9^LG7)|~arxUGO9f)a&YGZn za?}5JNcGP%Iu!!Kxq1w7zLEDXh}@^Me$8*;$O6%j>A{fa7*0``A5Nu^^9;7$7c3tK zWV%jH%qB&U@*gHYw(R;?Ae!I6ag$F;WQd_K<_e2&3j`}-X_kDQ`=$zs=yQxi{?cJ_~~I2?{Lauz=9(Rw4aIW;GFKB z1*-})vMaXfil0wUPMp^+R!fG}(4#;uz>`20wc_tQ7I);gvr9g$Lx;OrZSlxKg2m93 zex5f-KUg&mpQG6>Pq4XI?NI?Jn!rV+0~5}m@-N@k^!fnC4W~bL$_K&irb)LizPhfp zxIpRgs=I@3y9m;c?2pD78XSQ}5gGd~Ji449T9=S@K!PyN5QOA$=f|lcu$44S;>ZHY zvz)aMT1ozk}AX{(M$9iGmj8i`^{L z11X~0-B0@5v@}_2eO((C*j@ke%%+v{Io^LzTo@C=1Ik;U{Hl8|km$_jg;?~^Q6zai$nsu4hnaQ5O|*iRRBz7ZkTm(U zhj44Xma5NrFt@|*)RF1A8d`b{>_19}|E>0)QGzHP?;@oWI7yF*dZZc)8rggI(UZ}p zXd(eA5xf0>>mR@s?LYwxB7Q(d)H*3T5RJST+Wm;VHb{7Np#f_6wZRRkf`o3;dE8I` zs%RS4y}VI$o8Tia>U&zx$@r`iVoVP7d-J~eqbMW~J-iGd z4w7{Uw4gXVP}?R3chMCpV@qomi>|Zw88$8L0w?BV#ZAIR58(!HbLE+wYqpSfT zO$CQRO-&L(LMBsx!EAeJ1WL zGv_5he2qWhfifvlyaqJIe5>{O_z0a*UDa5%G3RufqPwISn%BLax1}DoSoZ>dOyw>5?mgTQ=s4p4C1H^aKBD#3($pIU8+p{Q7S(b!PO|Msl&PnjsWHs!@CvKw?)LM;XbrGH z0GPV=#b0kv)-g^mFM?FZzqNkMJ7e=jx2M)0wW1z04Az8jOgNjuBk|3xf^Kz8#J*4= zbog&@ggOR%rY#mNn-a1y-{*m^MQi{E>ybI_mZ=%{OxNfvQ}2uCk$Ph&t?zFyXV`=v z3%SI2wYYN{I?Te=;~N2UU3Is6pG^+)9lx#ly)cwlIVtZyY=npcqquF7O39(4Uur+{ zJXZ>I18%;cw%*y^x=RLi=$xVSp7u6A`fl#F?k^0TA+cNx$zq@YtXK&3Sls6BO&tvf zTxL;MubvHfi^aZQPx_UJ6LEJ9*#G~7p9K5`8Z1wKY2c`?=~eh^V19DV>4t;dI->`} zqA!>2NjyiWtEsJ*4f1NjiN2^nR~xW4%Z-$6^U_h+g{B8L<7hQoB_nW$ET~o7Nz!`u zm!?&M_eW7@@YYbTIP4U6=c8h&RWp?I@^wnQ$o3>vf^(D_Eb^y1+GHML`PBwFX4!!$ z8Ier9F}uH!{&CXbk6@iGlE=4yu*6$U2AO}7FTk78T{p5q*YP=kjTK~0qk8Hs=#C(?OoBM-+-}A~aa1iM4;(Xj`GJOt^dxfUB2s%%oqF9lUhB!DVxG-=HH#0UpqNOwK;8(|0lS$pm@gsVX)}0O`=JwF3MeU!yF@? ztSf#7y-~j{!@#S~?tUs${n60n%2Wbdbfp4H{=*f@EGG}>-}0)eXBSZ@V^Zh);i#$k z$SHE8xfv)!akp3Ep@7CTO>^uO+kA|Ws_x&Hq5hZ!Ef7ES@Ug@ZA*`2pSH@d#f>(w* z&Q+aRkaHps(nZQFdAh+&{B4X>%XPl*<&2YoA2EYsKG%X3#AO?$E^X`AS_)rQ*zMvN zHOj%t0%}FJ0xcopAB)G_KOmi^5gHe7^rlZP=?o>F5QSI)tKZwb(Im%TE{PCULH}ME zqa;^5f9Sc;nid}Ws^(6Fb;;`NJTd811~MEV#a+wxyf1>#bq~@H?Ok@-oqInxys(8G zx$O?$fz@t@p9(CI#h08t3&}hSj39Kle+cR1GzGej0+QZ5HV-KX7=~WTr>XF(ZwM7N z4pTsJu>Gknxs0ORC)+xkNYeuk_HQq+bH5NZpmiaclm}@^WP}3di7&YpuSmQKUbc;c z_g`x478aYX(W2Fmdq3)siJT|&RMQ|M(<+vc?q6@G>hIrnFNj?}_UYqxdDXny9o~8B zP5ST%)U8unLbwh2l$4*&^YRV70`P9^G5$Aa{~O7{2slP-Ra+^wOWf=LLwC8>=_gMJjSFW!?L7gzzoS)u$xT#D;MC|K6_f}UZl9ZDv|}(k z5#a!wtuV$r4=;38ik1t^3-qBX#FW1@#y8|IBpD}VY6(77G9Ol+-NtIoMgfpCH}W5O zCBn`9?6^2G-BZTvu7kB1^WA?X(ZT(;wz38w1_=&fakBGQQlDD9g&D|9akYAUT80VO zqpEr9pCXdMT03EI-2EjnT6@h&fM3OR6ydC%p&)ZVWg zLH}1_;dm70OLOKiX=A!b=A=$c=f6<=-$aAItSoDe!h$_t9Zb_v1|qako}~LHc2#1VSe9Z}M?dYxzkbi0bsZ}aa-F?6 z6XH8hC~#h0xxaWIWM+PT;xi!RbsXvP&a5Y~Dd4`^P8n2qOXwPZn3xef58JyhfAYyi zXl&JYn{=8a(_^7YL z-jny~!3HGjk!|9}XSz`H=0QF`sz#Ap$>p9_8ROb`CT&sH#uBau{A9`KE}hMSD2i45 zRclgVz}MI0jg_Hlblax!=*_$gb^W-=_nr_`#~j^O)ss=Ziaf72gHh!JT$?L1sKMvP z>!g%=tq7(oisltC8`$F1I0`>dU*9RaL|HM8Fy zdJ!4e_V>sAOsimK@O~p{KNL5ehf1!Hxn~{OpuqY_b5)nk+-mmHCB84({XnWCpW2~E zyZd$a_T}g)WH~U)wtDCiKtR8&mkG!NG zW!U?F6ct~sd~O!c9QL|a+?&1SG+8>n0aiR0d=cv#;Fc2!+CIe+d@FX;s!J7y%Xy=R zA#3w&d6x&}jVcb#$j=tVHp?T{1D4*P)_tls@H+|TZ=Judp*_CM+vo2a=*hoMvCV~a zukTkkR{5_J1xgQlcTN9MIM}qQ!=H(<FKt$hpR+q-%TWcx7r>OCp*z++`B>-kTq z_df-IuqF#&k+nQ|33>2+mzw#n$6^h?s>DYyo2XOvH3y%D3*}`viP<*tc%L>Pg;m?d zPhVeOhB~E^NmiX?y!`Xh7A~98NGyN;Ti6!^obDED5+isGN=0onSRqmi2Mv9U3JS5F zx$-a$p^ek<_bh}_60FbrgmkOje5kG3fA7eqvNB9Qad>S$7C2E(F5&Qcl;bXKip(k* zRTxfwb!BCxY_%C@ig^Jf_}_;Dd3>UTxGde@w9i{VDz4_zbz0?huh*3IK*q<%?*%R8 zD37}&{H@U8=h(o)gD;L?Cb&1hqk|ubi(fM`-((Z?(`6zvO?_|*r(z>Wzu}gju5Hc- zsQGfH1)rYUJa}&vny#P8dyzYBh-UGtB5+4CE8}b%o3hqh$paU%_$%$w_*CwF& zs(u~mZN;||*)i4W2-~$B2FyO?AYZm0XJ#09zVp3nCF%kz2nYWzY&<`e2s1uGEdibr zFKnemro*pj2BI|9rD}ox#HZCT(#5F9m|ZG73z2!hCA$0#9e#~@-T@i*8hIbga^#*n zMtf_k(jbKUuX+1Euf2$gMIVLR03(9_p0xvwixmmNQ9pIat(7aa^E$HJLrHGqIJ^&6 z+#8-JfPp1(waEstYj<{acdhKk1ebQHEa+9w)at*j!GG?{9j+1V8_{G@H*`rd=^J`j zHKJrMqMb4y>zMFZq;jWKX-2lweKGT#XO3h}Weg`GN^i`MU-w)86chCa6B%3I+pQ=F zYF?$af-`}`8rQ`d|0o{&R64}7)~46;{B z9^CQ<0~vVLO8?%$G*t2)uriM;Y0;GtNWIy;$6(T(KTEkS&0ky3SYtL~wvV$HVY7ZO zYHW0NYZtIka8mNT7eEMmDy5vsq-Fx69ncoT_v?)&fsZ=57|(ud%YCES2uZ_2XfRo| z$h(UtAc9q_{3ym^fq#9%aH$_rcOjk`>T4~qilM&yczenRUk@PN&$yNf+evx)z6dhW zH+|#L!~cLG^z(14j;i;NHRsA$z$~S0pXsIdfu#w1QR3@wdfTA5dPYLGH@3xRT3Ikx z|CL!^gRRHcoc<%Hqp$C)7kEn@0p~0T4HlN6!S{(`AFj1)$2lLbzKXoCdnM)1Ul^FX zm0F=)Uf22gTN(15`p#3f#NNJ4ZG|_4_qx+tmwZQsPy~lioq$DGONyF81?>wsDrJiB zo;bVJbgx1BHvY$N!--i30Ts{F6TP|~C$oH4>rTh8Z}mzD2+zB<99@pjqiRdeSI%^v zviLdaEGDwu4%!#2BeKm;^bdtG`gu~;-Ftvq)BIx4rVy-?7zoD8)LH_km+=jT zKP_Jj|EFQqggQMt3rsDFQqOPM=#?+@77cEN>{bl}|B^&tz(Vm$QSaWI;CYzipz{+v zTD4_6y%ik;VO=DN)(D9YzOw`p9(-MHc87-R))!EfQ;l(Rq~_u_pg`lY`+3m{{Auav zI416iQLWBACqdTzL$%6~{<32Sz2z{Gv(z@+QuB8sbA=!n_7jG)$u!&iVdF;B>5*8Q zWVU+hlmU?+8W8xf3X6Y>Bbxpm&7#!rpJp8H7DdHM?THoY%F5j~4+@S`;>R2V7THtO zVBEXu8Vri{^lUuXpv^7u8{}e8T~2O}5eaYa+>zOhlAH^6_~uJ;6wgQ^tYqho@s0i} z#&kYP=a_oJ#oDiQKz7GLG?=-*o#hlI}!QNSX1k)9Yd#-<2F7RPh%Bq2L76+n_%yKK z^nuek%lQ6Wc`0to$V;A-#%3JeM_86%bcCGl?CTZ>CO%w??&ICJx2R*o{|{Af*%eom zt&Ktmg%uLC@Zb=f;7)M2;O_1a+%34fySux)yE_C64#BBgJ;r^<>HY=#%i3$s_2@9Y zHWP1GM7oj2L51nJNkm{;i#M3|iu)|9t~{j2PX=)_`NtAXMc$OpuL+E_x02?rJ?=V5 z?FE>F9;9o=Yl$^Rr+AP_mC?zbsHXhWM@08=l@t1ni*U&O#@`PQ4I|2DV?5WH%u^Wr z_-*PP*=XaY>I3z?h>!|_OsvxAJY0^=GQOuy4B8sokw4{0bQl;qj32Q%be>wYRLZDr zK1LUqJ)I*zRvy&7t2LUr>JA~du6qqzw`K8Qq#&4ayUyWpX2!Q9#u{I9q5 zY!#^2{M5TKsTvWf3qx=Cb9Ns&Z*+i(>F=wxtq#|TuOjSe4nkH^a{>z!_II2vjB%{fl8U=NZbD{@WfA;@vCcHX_K zPVMU;Jt;V_YHm`^|1Qb;#PrjzSB*@7_(xmOsIOjXvonf^86HKVFBK=Gr9)Bi#^)^u z7j{;V3FsCHrCQjcCF_>y&qQA+$I|OGM@A7w3oj6Ny93ZN6SV2)+M{X0Sn^?(Yi)UV zdqhzC^jPWuMW35*@Sp78CXhJ;>@_ZDRci<83Y=MbB&-6V9LDNS;48|s6dthlXrlpPEH7gKeN7p^-uJ+XJAs9B$JQ7YE?+ICdD;Wff~p>VWrGKa9}yetMxG!!Rdo1?`r zr|&Pe?;8?D!>)!)7dk@8SaM_`>_rUmM>LQRgtw|_FSNNxCfKf0FB;B3YuGpY}D`-hT44(H z$f*~_v`894sd=uip^C~9@*6lZOpvGlr?Rp#+#{HDyJt_X*Jg;BYlL&|48%f!C&l;j z6^1EiK`_ICdg}B21w3|0G$Za)wR-Vu_}#&cn`{u<3jg?asA9DE7=W?hB!-6HMGMAi z{s#V}=b#cAo4k?A#Ka^;v83R&t);8=pSI$6Cg^)D`vC_h)5+AEQwR?&fofcY3|L+N zYb}SoV6jbaqwyQcMq?E#T8Tp}Q}@fKT+#yWVYE!k0l{TIGm+n?bF(vHYLJzCxG-(b zz_1ukziu=L!{E)cPEKKb4B09U{@q6Q*GnwQ-J9LPbIN-F3%y;nJx>CmEB6`}kvmrS*}V zwriu7YV)f6V9_e}{I6g~Z=X>c2okOn59H+X+Fo9_!9jIL9e|4PccP1i_OBo5Bmn7^ zx%sdxgO+~2%PTvNeF23L`0rT5rr*DbyAf^YGs<7~8#+ zneYaCAzbfp{ePWwmf0rYq`}ZJGCupJY_$+E-q4UykeMrec%-9ps4?7lQg107*dwa8 zNycBe2U6*K+GxSr_&t;8*^G6mWUil+yeA2~u z@rR%QA=`H^?8&k1Xw-JK9QxCdH9uK_ZDN1Qf7`O8BBZ%E-tz!V9P6$|xkq;Ud;i-?ers(1h`JET186s2^6SJiF)+LlGSupBj0jSG=!6KG z?WOSw9Z)lMf2~CCZ>e1$RFnmWpQot!8*^gczqSgTRzK+7CHKj2<0x8B&RIF7DbG=mQIJNcucrIwf9cFbZ{iKbD3M4O@A|yO(?p~+8z6p+n!VR%pvY;jW zj@E><*zH?=aH2yTcqMZ-@N^16qjd;XdK-$L`a zj8|iZX2Qtusc;^Acet4F57%{99xkT=`DlRfIzXio;7l<0z%uzGo{X^NLxaIuKwziG zmjc!}_saL}sAY4_bKSp}TlrOuNB3atV$gzrNXq7%w}xS%Z{VUjBNA|z zzFk%fy%}-^HB|H!I*c$WJ<>zdV~E9ca-et`HMQV@vn)~ zuISTcx<$0%ZRT^N$WZhscihOnzG{aVqC^92l&P)4d|Vh5*|nM$fzVadRN2ny9_6@$ z!!|B``bu|qvQJkwm=tEAdwK4`SgmX64M9#BvzYN4W#oNNHwYM%_cJ!P6bX3utiGsh zsV>fAiIdoc^Z?_-x1VmepMlq;Fa^!~uEz&>fu|&&+~jloe`!%HZ`$j`z zXskB}Hlw{plz17W8N{+qZY6B(-DkODcCktQ(mOJ+vSg4*_+&|&M#+K}w%z1A!N$2h zHWk6!D{A~Z`P`a2iKsO{SoPQJv2T-dSkmkeK8I~*;(jI`wxqZ1hNs;CUTYSbE z;0)($(wz88gj$?HQJ$LW#gO3{Na8f>8yrTp)7)+bw^nINn?S6$%vdiw;OZ8{c9O7GwC!n@Y;a< zD%nu4pI%?MqBYgRx8?efXIl;L_Kn_4$Iiec5U)y8x+*>#A63~Jg9R$5p$uDTRN*@B zU+$<8dq+^r_LAJZd*?6PoFTZYi+BE=>hL8>vwuPVKWF@(weHzKumU&MvD`mq{l~}= zRZC`J`d2L6UU3+|9iUlJO9rRIr63muFcZyzJiKL;sdVcc(FNUW!G2Ck-OMRR2}?RS zAjzRSkt7|n;NQb{T&h_Y999txMfq;*;3{8gn8g9dr)OQa#p93SEjw=3qPGtiHP+Nd ztD$hLh}#VO36hi~$sLRnmEDoXB#iw13cWYyrxFg@#j)6aDI~zE?%fsv9tP2}tKrWG z>VI|?EcWC>@qJm5w2gRDa>Fc%x@DuO%trU!;GgQ*FrDX=AkF5LUKgp~7h#Z!za{+e zh5m0WfK;?EO%^78A0d5^ASbzNewgEwzvVs`+cZrUfz5PRA7%*n9Bwmwk&!h7>$Y4u zKLy)f3z;zQYm`4S&xo=|%1`jJjRyonFr(eIxq6HdfQ^P61At8V$T5)gRCa?@< zl`VEuOA1*I^B3bhX4x!m7j}?o5GQNiW#bQ{Zz3OAmyLfrE&ttT!8FrDzs=as(S`g- z49-?xV{QC)ew);GHEhaRSFfhARq-4c(6_cUVC&7Tlz2#Mm0?SS`#UAcJMvHqZ`mZN z;5^yP`g%@ROue0h<8VD<<)h8mboqE}P=sa5CrrG>r}=NA56Da1%E9uRNq5i3;kL9o7Of6U1GeO9&;Jardc$4pYVL0kD&$SV zKTgF^$A0eM@Cz`yz}X9Zpq#icgG%bW;y3=J{!CAfiZ3c2{QhEGdtOppd zBqu0na3&+DqD!=*vb2T@1`0!THQ#l-gw4%}bo7UzVJ+FxDx0Q5UTm!Ow$4B=^|pn4 zutwJ4<3AXk1e=>fB@K;dl35fC!&c3Z6?ub&&(?VI2*deq*~Co9IYY{wKqem+;(Je( zPiM<@fRw|N#K*O;%J{o0fDt{2l@cpc_76@l6npPl8Z6Ahui>c7@b&d`97f%@9~+7d zJib-TM=tX_4n25Ep|CTN7g>Q9pi^Fv2WHzMgY|sU3#)x-ymlB{+Wy_ z@e%X&Xy{>@%&j6&P|bdpiE&581^OXahBU5~odSo0VzVkh17H8S643uEaHlvxGLCr; zZJC!*RRb=$lg2@N1^*%`g;?C`l6VnPC9~E2!Y({+YhXOq8|TDflkd(@@gj$?fy#gJ zTBqNer;JbpxrEfSe7T;(+@#_yU=zl~bd+;c50^>ZAIv&z-@eEd@kV?whVWmnglb0E z#$chsbk#iHW$`j9C3A$WT$gIKJyxFBg?6ifF##SLlf!p7JktaC}#!Ivsg=0`+lsObDpxH8M_!qw_xi?5%_r)sEk`*<5U+Uo5lKpe>?mcZ-{bjr zwNBIot(W~vxRanpk#Z+9=5(G60QPSx$Dw*faN(zmNNUM&JD&p}AaF23K}JmMqw&!UpLRfr5H9Uj z5Oj7F31$-4O5Zzp8$xsCL+{x33vLWSh)xgGr{`ZFGD%MFE{%<`KS#g{OhR1B&ED`Y zcen9jvW;3SXtu6I$m{30tH$5H@D{Kn179S(X1Wd3`nd1qf@xpgdASkwco58rcVZZ( zYkEFe3+-b)~4pj%&74D|g{7Q{k_^R7=A18b209>(P8$g>S6@u%~B zGIqtep%%jDF$Cv=`nq{5=J+^}&4jM62{LEAy|hxFzdvb2!s2}PnEsx0`^{+WeXIfS z*Ue-(uo97UC?&(;lY3{c>$sdzQ90FZYOsfwif!c^ee{(ijcpMo$o||%hM9Xs;d-KA zEu70JD3rFs>>druhvmQ@YDF^p5BY&KiX_#50OV9)k5nw=4oay;Y32N#dny7Z)XpKB zd?mW$_g{AEQ2tdrT(#=@tDT9DgKOsj&$Jj{fGzucNu~JXiu!MRR2O&n!ai@-PHS?2 zt^9pgM}Bp)&rsj%i<}i>KA+itLlR%L03h@1-~Icqdk@G@>Bfl`8kDMU-!|YIgN%R`LAyU48CA27Iaf+Yil(w1e*Gi(1wl>Tp$Vvm^BI~G zj-nu0r07oyi@u!xDzyyXk@ZP#?oZ|pA`ThJu*XO*&lihxHF_FF(Z7&V5^doIJDnzo z=Oo_CEW%2Ne}R9bvFs5GZO@uHYcknv;GuTYrxRB~vx=`T3K%(Dn8^`{n9GUD^R2#x~1h#T9EZ zb&Dg5XI59dyjLQ)S*WGPgDb~w=|0_v{N&8vbnLpV!JkyW^u65w_KJsY-S|$a-q0Ja^@tAl`pJJd zBq%=*&urGOpEc%rPO}L7`ud(!k0P0hTWzZym3}{HVj{-D%-%IBTesTe%@s4|V4(}4 z#_n@Uvo;>%Sr0bM|7Uc~8wyr%=k{hwAC9IVXQx!l-MPpsG!JO+~HEMp`;AQuH{yC9U z3uJ+Eko7OA)_T4z{mt4s>Job9Mn`e=CD~t_S+MH`#^M5Dfd^TDsdHN8g-h<(d>#o< zT4a=jBEAxyE(fUKpt!Cv%Q-Nb{vzaf80Zeyl(~H5aJh@rU(h zlRWt)(!~>|62frlP4H!)D|Mt_Pj1d`b}2!)-m(Ml4?%UyF4`RSL}(kHC+pv`!>$6% zm}V*cXFEM$oKXb~s>LfG^cpSs%n1pDri=21VJf~y#mE@3a6#qZ@$HEy6-lm=7-fF) zFOOYUM`Q*Wrh6cX`$%y5WNF!1r-dxPg zzU;HeS)R;5Cu8=7?+#gvnMYQX9WRWcaH(+aVoy|b%hhMnVeUvTx6eBdDF680>@&pK zJ8a)u*=L#^`kl@Kg{Il%T7IRPE%ZF>@_8?o)*AlL9YY1IEf0)L`D8?ii7kDyF+DEX z|12#cRU*+=niigNP=6U}lvo$&=t~+Tt=5kl@LeOX;#c`gtV8CZ7%~wn>=an1-MOrJ zMjb?&71T5Kb8&a^n|j3$uabegrDDU2Z`ubVF#>qHjW0`qmTf7fI!H55 zgl+QO@0fe1(}|#|5Gb0FNcey|nWr_(fHYw~4%b^N9472;c>zMxCL~*y%X3#~nGDet zCZ<^H#Xqr$dp=uREtgW{=mG(u+oYf8r0^}jr%_wt0Bp40__{nFeP%^sv6UUZqv8G9 z!Amwo+d)cD`$Y)dZQgkV-!DLC`;RAvh^*1ktu-t0Tau<5IQq`A*aL#;wiF&RpVH$N zW@>li`yQTW$PhY}eLhzFBe6qCC<4A5^rt^J08Z|Xo?V~dLTa7BR|AI52{0fk-Jy>C zKm!N8;hBlSU4pqXPi$PJ&6*{8$HZ#cVHSIM7+#E?UBPQ_DgxXZ0`!qYf!&M%Bw+uI z04WTLaxE78h;Q80Ak$Tnz2Pl+p?TzhnaXzQOuEzh?$B!Yu%&4RLmmNtgje_Ccra0!|jwF=3S)<#@4 zua$c7pFW|g|B(;v%jxm8Q$0|mqF$=hiXmNwJtkBG`A=JCpuOhHMz#M6TiZ_+mG|gO zITR24~ z$G_9m*4`6>)wud5E648atRXaWx$j6qqmwzhy&m)dA6*lL78@l^kfSf8D8; z4Y%AMom-c`$i8wlc6WesN^NU4-4UIU@Seq-eU|ATcA(Ze*2)b%MHtvPe$+1Day(@v zL0cX5(-}B8@$G8=eSH`zswbfeW`)bvOQ{+5u1gH_VXU#6ulk(Pervel`_#lEo@#CX z@=coV3Jd?t8C#RntM>M7>$}~5kE2{NKw++s21kmUcYmwWuFT()_D?x_uz%7>LUgEg z1hS=hl zi1=snuqzS$faMm)nI1#x>CWE$jwcIrAK%ngV7&PL&zUOcY=6UJhtHUm2o|&BQ7v^Z zgQe38N(405*7pUIESV_V&p|k7L{>~5O%eC}3D(1crasEEADV#M0Vtcn3?-%QWOR0h z(YuO1OG*s5e20`*2=2vqkukUVy=f-E4r9*@o|ury@Q?i6D(LY_?Kw!|<#phmjeqqh z0cV~%1L*5YOQgPpda2s~%j$g%@kt2LI};HFPTKw~Lvsuo#=Z=JI&M^d$(RetULNV| z6;qRY`_d==lP>-rX##n-WIPV_qSPxnmkhe%pWT?{iQ_SN!p;7=ukZ~=sqkmedSxGo zeV1`eDwG{z1$c=E5?_qzHWO|hJlW!~(#3mkz{Fp2%}8d7Y?jU?Uz*VGKL>i32r(+q zoGiq(-@Y6&E`*sMU{b-rFx0HcKAvO`%)Lr8cbB_%4qAlfNMFHgz1rM&wmZ`oaq#GN zow=EYi*P)*o)0^%F?~Gyd%nm@)&#L9{}+&#>~9L_jtHQ!6y_aJq_LMthVe^KqL`)f zd$%+Nv(pwh!De0n!Qr21)m0*RjT0GdWs*sDw>}H%eeHY82*aQPAZomGXr#7P_-GZb z5*xFq)~fW{H1y|ySD4)H3+JbnFyavayUPhFL=?mw`)&H15Wms~YTM4~YGIa` z_!JpJNK~_5vHmG|euQ{(Fj}0Z=LIAFxJ|EQe-;u5F%E+0GkFmK~$Rt|d^+kCi18}QKr9-bg`eZBhHdRtL&C+iO4Bf`y3M*+owxq>*vMmb)8!-lDp250s#>RY+WJu9n(ATtrJR@Jrnmdm19kc@3;xFc z$p`%}$Dt4-oM>K;2X3Ve$cBnAIVmGvUKPc)Gf{G@U*!Xw(@FN$bB`jrNT_;W4pkgao=0LIwKJ?Csppbxj!ew)H|v z{Tky63tktsP8C=FDF1V&syc}eKvcJ*qlRLE8vqA^6f{`voHWhVZH^epB24xqyTmVt zI8U=|L>Yn)fU23YT^|@G2+q)(H6>31#hBx!6R}!=epfwK5J$BCtZzM*f*fVtA8ZR- zr2=}zp?{OUhwg4xsrl<->A%hE37|Fy(ROW{vEHM`^y!=D_4Og`@Ce(eJ&4i^5oVN3 zo$Rg7e7&o@bx?o7eJ$7cfP*Q7Mu&sp2T~H;i6|5dZ3NWf7K;_XCMCBvc>zToq`Kpw zz?gT%{-7w<-hj{*;Yh5bqa!EM&Z56YUM`pwzB46}7(eAMrzm-&_R8h7#^_;oV&V&` zo9d9{F$;D-=Dw=q=PL*(zl5P;5~vVC*M7SAc{w34ns1gGb+4arn`$ygu~iJc933;o z9qi)etK>{!2~$g4!JCDpl5KV1tQp$&@vo6|T|#uKcXy}f{+pAemh#^fZvKk0=epg4 zbpZX7TU>#n)+}HNm7?v&Ei9<-?GB?ra02D}-fGQ$w$V7tro5#rMi5t}(G_!)*OrC;5wHhB76*L-Sim^gZJyo3b zi129W9oLVwPtH{dmD=Y|COWo#r#w^1hxA`f{;jyglW9|RGc{DKhi7G;QXNEWHNy$) zC)J73OjkO?0X8)wF)$8zbbjI-$@~?>-S`wONkxCQbCA6PqRyQUBOY|Xt#z`hW1OBk za}51+zS64uE-@qNQ+(N7?}Wl<2G;m*%LHc-G>%a?4=0O=xJ8CU91cyM%c#g&AbQ!F zVB~dD1PZa7yGzxp;{ph{h?jLSeB?#VsAQ+$<;m1&MCgi!Y$p z$}=58LRSlOPGLW1%5PZ@x1Kqw zL}edmXp5G5wC`{mqlHU^IZ)^Zu1tPTUqPtvh}6lZYPCkMN8m;#6Ti46%ARoVz%kB; zXuzioK2#Yhhfm!EmxtE=Q`#ugSbmb z4|`_cBP`;EazVJ&FYsn7e)o|qfM67 zT5p6m$FEF`iba{0VVS+>$x>XeJEaAzt~Bh4VY>+Em6Y2y4X++UHXeTogsr3f_Aw5} zWQ%%(<}30g{lAT^a)#+S)}=ql1lIpYu?>M8!9#Ss_!t^vX1FG9q2ql{VD4?`P58`HQk|PPl2I)+2toB1A`j9ThQiS4 zmNlI#w^By9I;o#*n3V7)$OQ7DO9foFxS?(eK+pw+l}wZV5bfeemPeDiaT&y!J@QInArbF?wTVR5ov;8%PN~F~^`sVG?*CA1HQCKn;O z{*!&yeXw=`hor297*8waHo+5jV+%}4_SDTvH14!qh!-2F@ZEBGC$Si^jZQ3dh#h^{ zEuIP2iD|7Hb{eI`0^cBajdw3*HX;8Ha0HJl_?+Jz3=|>P91@%_82kQD4aVa84`;1w zfsG`Il}Zh=p~s`c)6nPa-*zv8;ZGi zZ(LbSU$@mh@7{lISA(&iHWF$X8s05)?2bzyvREanM`iLM^JNRemE$NDg&P!`>*WEL zH3ZJRbQTQbv%&ohn&nUpe99Prp<;sXEKOAt0m8N3BNJgITYR4x{zwGx8IbX9MT|a4 zg^&v{p%=lb%mZhOlky(GLBW5A?(VFaF+rIN4v@Ivm)~AwJ6dXLC68+~Lw-GMk$KP) zO`R!A(NWtL_|zFAU?a_r;aVf{q-4QOy7 zjrvub`)|C>&)UY3qO(%nAo$lY4O$PDv( z-aZhec8m;!zz2qvQJ$BWFP_#clkYG&dEK zgmrF0r7{(Hs%%_>NY!-2HdihJSa<41Cav(2}mUcs7>a<*OTQOt^S?wJk~ zxPM%>!eU~I)hVS0(oV3@tsJUX*H5j^xiyktRBODBoxX!@tD{|tCvBKIRvZ4viG`&1O<1w5_N#m z6GKtS1`#@Y#;;t}gIO~*?E?!+%v%&Wei64m2?YE9vH*~uWbK!zp>S?FQiElmvaH?j z31&8n?8KM_SRru`pz$gBo6Ih#bz1vAKcok9eiJ3)QD-CBxJm@>1Wc$$^yZe#l3K|h zTdfGZET*x))lMa3aBy{|hR3K6a(hD_ykM`ABV^!m!ejd29;QrIJssS%UxG$xHt4(( z;BbXom%qpQ@9CdKL;(Cf@4U=K2Z?bO&X}#aI0Ni@-+7VncEeS<6Pd+)GrPxzz9K>4 zHsKTJPN@SgnhbZB3@0<8ai0JOzP!pWm)7IFVbwjKDi?$+=T?tsI%ia``Cr*-OGU1N ztKpJGqxJcTVX&j{-*Bo+;&bU(%<+PFq`=52HoFP_{>B^?rMf$qli;iIBwCIH=`P27 zxszAZdIE%29g^M&^ORq>^3PvkjS{vK_rwdUu4*Ey$U90qERB91@Mb1480HS04dGSn z%!_KcvS(PL$9CJ@R^9<<4=JYpzsiM;>zzk_s99e^xWB5Qz^Lrzn|48n6 zWrN!yM(h6;wApu%pv4WX6dwy+o2l|N-6w3_&jpEx@p^O`^@n-I`a(WEQq|}EnN9Jj zfM*%(sn!(egh-@9UN8~N7k`8BvRn=3U5(l;f_)yuHU0CZyVnUfG)x?lH z+*u+s50X-RGn>^~h!qG7ti-cUkyz_Nw(Hn`3FUj$F zL58x>S0&AwxDzUc_bElf6IssvTa0#DxA^W}eykK5Iudyg$>Zb*^6;SzM3slSX7w-R zt=T)}Q3mb^3pE=uz( z9a(fg9H;5RZ<~!Ae0-XQbBGfvei3Ht#JSIANOoOE(vJd;bWeOM?|9l#i6<{8-SL!B zGf6k$Kl^UGyWU!TOynE&3bvA^8)NDEq4OueQw}1%Z%Ev0@cVtuNToeeM-%*MIqMEt zjWIg%<(?rjQaQpO8`u8w&+}}D2uq@eGk$xR=w47_aRJ~X8Ulkr%>*{@Xs0VZKA>pJ zx))~DLX1YC)OH?flni7awJ6mzMGJhZ(Y5Pg54^jr%o=~o6qnQg;yT{NM^$Cjj!30I z3+RUYm?l&@L2r*1ASDKQ_4r_0RNTZG(WIMvb<)1P%+_8PD@V3oc}&IN;S8)oGQmu0 zzRvSqi(1MVg;mNgqVG=7pQSFDQH)oq0ezO7lz{Ejo~dXWi3S%t+!5M9u%pIzB2)i|cm^0||an zV3V}$WRmW!^niKOrwH0r|MJF*FnqH0uf_g|TiL*uSU;{V`{mb6I$|T1wh-TOD=^I~ zOO0=YzKCHf0KdYhc=WU+XB$UGkD8HjWp|*cHCG;Wn}QSB_Eg6f63K!DU|m*>@Kcn$dE$h$K(a{UR6>EuW*dpA$@j~KyT zw;K}ti&7hYhxB9e_Fm7X+EUwi@yD#WVup#wZ+ok&?JnQ-CG)nGNQ=+;k7qAT)`f7( zG@Ml+tcL}G>B0Ed2xDotl;cxAUNzc2Ut(J~iCa0ZQ(;#Bv(oF-2gF`Mh|0j0bfdJU zILyr*v@)&L$ExuMl(~K6?-Rg2|C6$iZr7VD<*`^X#G4n6&z*>WXxu10*O_e^DQ)gf zG4#7fzx?kdct%pWE|-;uEJl!jd4*6bXw6NffqAeTNp;XMBSjh*+dH*z?|fb9Oa<7t zeWt`1kBA%Zm1&-sglaf`b+ME=bSrx|wb};eu9k|<_-*(UgaU>cMj)(TdlPlsnW>55 zwpdN&9&+ROnO8A=XGe}x*NZc-XkIqKPAHRtM#bKE{_NQAAv);!cw!uA)GG@)0^7nGK5E{t_^){ zlZkxA?{4uGtuoHt7H=pE1?{hbt z>FFFBf@sKdoXkF08>p?j>-~?-+pYjZ=K@lAd?B)qzj7zHF*sg=+ShWw8^>I>?Qqsn z#;DB{d6-pJ+QXoRt?OnZD=Vv09Olf<|M;{=joiOKZw`Hs)+*{(iR|C4rPZ%25JiBm z$OC14HKK=dxfAHF{afqZpUU1+O?;>6I`ILq4{Am)^KrsV=~gv|H2ZevUSIw!b^8e4 zVS=o-lTkphEm;B1AjJrw_Qu0FDIk>^K z=Q<5Dr@5O`UG?;vP5-`dbvaSKjRwBB`*TnIM0RL;nZi-(DZ>q=rg6bZCq2u0ahnI{ z6ry`=--xT$Hs7{qe#W^FtY|Hgf@Fdz{_BS?b@NjoeYPg7D)VyGRyjv5fC7vcM|EGv z_l82)BX2So)iIfx7#`hO4$F^alQeKfO54o{&+4!1sn#iyN2LHIcCG*NH|FBA+Zn(L&10v@{_D%pt?Rrn2{kUMQ@p@AN?jZ?zP0*h* zM-B=*@xsAh61Bi1<7lqUU%GZfHKGmx%{DL8g3-faT2+*efa=skkzUJPoz?i+pYl~-mPD%wzb1__YW1oc$~biB zIlKrh>>E6P4uTx$ZQ#2b@%SJ4Pjz@D^f#;$>eevyLR}$Xsxsvff4!MX)xr6$)al-p z7@C8eCotMZ2Hzxmea)#?_veB51GnaTpKi3rUzw2`q>6CwG_~e`UYO^EAc+{agMtEQ z>41)V+ocZg$lJYD%xoNQU%Op=rj+A-JBYKz%NmlWfy|wQx`#`5jL(#1c3@Ljqhg?0 zO*9M7V8g+K&A~$qYX3U5b;vCBtPoOji#+={8E%rh3%suKjWxU0gW?K3mbs{T7JbUJ z*eK6S6rw#C^V5DO_SO@>0eg7i3Rd;zjI@cLpP#23pJxr1CktbQ4U*rc$!s5giC*_H z-l1c3@*zB4Dn?$Xm@*6h3iXewm#8xWVWtFS3sOYu^_^&MVA4b*Vm+8#<=|;J0)iHYhE~&+Bcd6`J$Gw+b`}<(&%Y z!hMI9xauRUG5hWzha9l>C2`ImXlR%C)5n)RU)-e+M8Zi_>Vq6EoQI9(pyRv!C}M#R z^Yy;tn`(C)_iVV3pZL&CvLSODfYvN6xPexAfJ?}K$>8S|4 zc3gBV(}LgiUV!(d73wav^+J4~W*pwTtY<#lVG%;u!ZSd^rdu6R zl}*XJ2MbIu(El=?Y(y^U%P~Ypt&I~(02D8p<)IQxnnybjvw}X@FQWqMoly5g^R(pG zIPxPm2mi4#TEW6bx6n+gnk0RM`xKV+0oo26654dd)Dl3Mc(cPp$eDWMF5X;>Fu8_F zqKRa9ep;wFJ9Nnbey`uJ<-u&JiIhc8KH67?eZ+f(1ZUqXvddAVcX$v=%+y@2SjJ0_ zH6FZ>!Nimwh?tj*5Cdogt!m}!bX}wgM9-J>{_?P(|F5i&@u2_Ce(sz3)TzUV%k)IA zqDyD{a}Y9S0Q6GWC8d8HX|S%G&f=L@dBy&oM1Rgf+Y#NxCxTkOdXR$LQ`O1NDIeixAt z8O5A)jqyA24{(6p-vZpC)v?hNltlnv%gK1wBoF#tJk+lKIIlH zIu15$POy-WPJovC8l^m@{2Vs2%QNa5FjXHB+nqv{aP{|30XxJ~CD#N|Gag;yQN%5REr}wZv#8--o#mIjupd6an(U#I(^@eHJd2(~kEjI{3+_S( zMSMkdKZfVjgWF7F8p)NmAn6nn_qMLd|6&G7O`s@7I{bZH;Pt{I$sAbVnKhd8{SXo# z{UR~Cjknre9*Ut&?d_Pu7q0@Ztn?G=f5LnbPYY>)%|>{u=~Fp)$(>D~5WGkuL=Hl0 z`HDWx@=lZH3*b-)7kFY|hpGH|Sy;GqMpbiARm6LCE{#i!OFn9Uc9G`;3}f7 ziuH5=>4dDL=dHX))Bjv{@#2H|c~u<=Fd8*e3pQ0@|J;vD7M)Z(#2kV*>0di%d0Z%6~29t=NSS8g*q z1A1bdUJuXLdLOS%6qYjVub2qhnac#nL-7KT?n*f+uf=Gz2Qp|B-F$L9`Yhr zpd3e?0bgWF%{m|mqMcZ$_d2hEXn61yEO(LnPurF`sz9PU`dr=>I11H>+f;ZGE03B= zW4=v{O%$2y-6(;Vj$_z1noTzUA5-5LU02t39jmcz+g4-SPGj44W1Ed_G*;urNnuTZ{}hsa@bCtq z?-kW;kt;QQY=Q6YK`3U|>vy^y#X>U=3?DYM=e`n6xuiOekoH#Mw19VESID`u*Awzc zC-^=KzJDS-(G<)g5l)r;6uf2+{llXLV{(l4r=!zCXnW+gVvvK!a?W&c4zFl14PSxQx3Efs>YVZUIk$_c9{=!S}B5{;#@`>b0vFrXVG1gI5an|cN6 zs)(0~7kR$wJHvi(5510_Zq>Uh7wu@5x2`80^Y#&0y*W6brk(Dw8}4R|%?ys0N|)n! zWj?o#FTph}5aHeAGhzCP?ZpnsHjR62$>?TkYE6^^rTz_e6@c@! zH_#ZTwdiT~TI%s@&l;HtG1yovJ_FVsZ;-V55%B+HTW*mHw@@5>X)$;?n=bX&&X1k# z@fnExhoB#=2h3{09?BHq*&B#8IEGXhl3OCIt}G&aTR=m2*_^zcTwVmljQq!=uRM|c#$M}37vjzROZL8zxL8ByvJvyu2Qw!D-Zj%sOEwttF6<& zAb~-FOdZu{xuOKBpA0I#f-Oh!DnY{Fv$lT2V+G-)aS31=5FPjxUafIO=CxV$AJIHQ zZQIhbQilw#w!DD-v_|5}qU&laP8Zq*QXMuz)^97!kUa{MyQn#!7j!!AwR- zh3%hFA$9-)=F!t^VSn%=5w=P7sycHz_}Yjb}&R*HEr zpwz<7L~xL@{}CJWZ;_I4g*Zg7d%CF$)Q!7j3h8Z`UOHM0?}FI*Ulq_0_5c$`+5$#) z?QZp*jj_RoXyj@RbO*?1alV2nhxeeAMlE}N4K1zqw)*<3c+ky}o`;@PU|sZ>?X>v! zAV!+yZJ~_DzUzYtX@>Swzm1&A_^zCL_Q*efZ0A25nd*fssUdfu|CI4=d9{fY4CRA< z{=9@DAN)b_47C9t;DPe2a{IWdxT{VUI@LXimkH_1qD}7ELRcvoIJ-cNXUu_?;HWJ? zmthG*DH~r(tBVZMzcHI?<0Z5Oa8A#nC6bpBim!_IB~lx)hpM@Lm=2Gd*YUu0Oc5%k zH0?ka>Vx9^x&c?)M>QKtPJ@!_Mp z<02)eDyP0-i>A;al9XZ|+lAQ+^#Tv9iiT0QjEwNb1ICW%h-zJ|k7x-M7BjjHvebMp zab};SO@HG~X_lO|4|VMw0Ol|0(rp5+(!l@B6D|hVPVOXIvtDr3`#>9W)VHlCYE1}) z07rzu7F5?RUlvS7cMe?WMpW~+heV&gzE>*M2+jW?b(}2w+L*kf{qxbI?CUC3tzF`< z4MC>=nfpZXc?7#nqu@ezuiUCm*j>HV*k5aMNP9yMt{Ty*t>VY*xQ2qE^hq!Ij~I;u zr8ZULf!@KEs>iC=QlXn-Py40vWm{c6fX*#|IJLn`hiPoda?UneZ(7FTZ;A6D_jiL5 z^)kv?m2!?@2(AT`XKQKwBrnN5i=~*51--k4jP|p0Y_m6wosH~W91Hfql)=8Z&*j1t zYxLiKywo_gbCaHb^-iTP6#?0rmWIC%fhJ^lFX@{UJ>rrp9pqg`H6(T;WQxe&wSL)C zRSTK(Ln=J-$@E_yY$~5ByALn-sTE3GSE!-32^~GmXi*tQEl&FTCQg-0U;CzOek^G* z2#%>s&d%8J)}3?}T9LB<5(>F#UHZrP^m5c?(PF9G!(%x}+r|5;!}1N#U%HQ-QLZwZ{xUReh%U zT!Sh9sJ0q}6{8@{g`II>Y#l5{!OG7CFZ9my@Hg22J8dfiTECEgaO5P%z(%!qpjy-3 zQjS&Z59<}@(=3Gpd442CW3tvtoEgQaCZ*j-eZkc(3hpNESpjdwxsuF-(OgiKLaQSs zk62UZUH*bJw;dVkBRKg>`%7qfn8F(NH>ziiivFm@f%vqbY69K1wayvyc{vEZdyso{ZH_K?!_3pNzk}jus5d2fC24bUHP054cpcZx& z?C@(%OwBUW;X~fkgpB;Fnb4c7uhc`mP>fv;EWg^h$Z@R>OiXf!>C%De{F^|T()kDV zfx&6dVXJLq-xj;k_ku4%7_UKu6$!BmyFqkQQ)%juYv?49@$%0w_D-*MK}?XA>;s;z z-KRVTlWwo5Z~#I(k7S2%3otcX@`DTvP1?y-$F+4|E6J5MkG?I5*mHC9B}{-rU+~@{ zeOSzmK;t`@_C&9D%p3a#seQIsw3fo+)4osMdV%zT)V`jU23`yz9FdXUf+%C1U>{L^ zT0z0({7<~CB4YL5S7Z+xpV5K)(rJ(Pyo#i2af`h(-h#Py3fMxB)%~taRA(Jk4tlZb z{fYqNmC68m5S@ABa4Psw<_&Q`3`cH_psq);Fd4u0hq0uXxY~^Y@$)R^4DUZ_+Q|%kUyk%!EI3-%e5iI;T$JBiRrq!6AhKQZ; zXF4j8fb~zt*I1BVz%3%HGZ0QnN8~R7kS{MzpkP>XbY%3P1I%p(tbx^dBiT-pdO~o$ z6@<;_+-|I{dc$5WUWb+w=HX9i2Q|gQAl#3FhW#^QC3ez0coJ~SDqeuHgj1h!RISLcw+!Ez#Nht-UF;52=34_~)t z-e4Rcjp;ahdqUTS?u3Sdo6OXt#Yn-8n%Ek6WIsQJZ!Y23CS}T~5%nAV*owZAOzV&kW4|dQ zYG5r3l+F)S#6JA5>cH`L-tz3~htA}5uc047O39}A7TAhc zMtODv*MMAVrCMCZGIn_VeHTH^&VDC6S6&OcLhp|ACclBJxfVR3?>zaKgNhjwiQ2r5 zYM`w!db`a(lI@Wa~{;&FGk%KhrW(I;RK@0q`d zQQd{HWy6^FV`ED*;MzujYx%YR-|!Y|BLdFp^y4D)&#b3%Ey|{Ec(nL1f9mwLQC7b= zv%tGs2bc8areBQ&YdyqkDQX2>6oV*wl_)lOtP|%O!7;Dx_%v|@unQ2A;otx!PYc(O zct2i95S1)2r0b zCJ&f&L-(1ouXJWs`}Q&-zV+W;Km38alw*mr$`q1sF7jceYVkw+joI{NJY9~4VF#}? zR@2ef7x%qgE3AF|WapeiUvvlU>TqgS5QXVh@5%d)ZQCRNDFh;y6hfz7{ti3x+Ru3*ouyyky--!Ln00RHhy=iMja`Rb1ymI-sG9LZdyfISlUe;8P(Q{jB1a$ z<961;;Tkvj{-GWXY<;)4I>H)rAe4brtf4wob#0c@Cca%O{m0z;cw`8OIG<2KbQ^Y~ zR|iq#sd{a{aJFxU9sKr-byiwO@~oVBMx!4UPD7GjGDXbhp zD!mJ&MkF}>ariDYAWRaEI23Db+tihU`WMw9(1d|b%kkT=s1JnEstOUVIR7=~lFc$ge3fJt#g>YFr6%y{v zO`jelp+apy%SO+vQ^mzp{y4)g>INDIe9q6QO3>IGCTc%L$y^9Qa5^9)s4os8l|4F{C?v>Jsu7M20jQ~bS$!V~iw-^6wmJ(D|5o`KAEJ;J!FMUEV z?F&(O<3cFZ@_RcH{eS%~4NWHb&Z?=gKeW7Qhgln0IG}S!l8GTVJ6<+Bti|Mgan^K* z*atXzVDmRSB{s{)Z)!EWNX~D>2nN}cq+ta0us-gy0_i9n4h#CrMD5wq$hNKjs72T( zdrJK9x&Qdp*(B30wTh+1 zr4}=p3r8*3woIxZi0K9@@Nu?iu){ogqq4tAZy;X{jXi+)d=;+h@9|SE^Hk@`zOFWrE@V=)f)VG>FEUZ>1 zWzrt@4@~>t^YuSsCy$op{LJ9$xxwST~$>VrjV6~C|`PU;5m;3v)d_`_A#5DVUMSWDuUI^K%XD)s!yI_ll%Lx|VN44)6 zh0{(_r{!1FOV8S49dkoxK7i!};4hr^*%cFr<)R-f!5 z-Po)L8oWR}Qb;!kaoR)>SM&giu8gzdc%PBGg&%ajFh4uwX~Pl0!|K*wfE+Me{rV5k zQ$=7sBzCkyvxL9r2fuy@bdHf z_pi6*drmeR%jYvx8keR+s@Xspon*~WfIIV@h8--ZMx`yyViBe7M9=WqgIK) zh;K%|Ub}zD<$R_%J16xTn_;qUh}GVAWxL*-)2+*{rLFTHyz?YCm~zEjZgh*8hN18^ zo7F8jZ4be!>jNNl&~Xg5`d@uwz}qM9a;@ ztq!Lhl#TCJzxHx+a=PyB?z|wVbu8kn#KOQnTxRjW=R!{qA#=*=LP7`Nl5P|QwI$o+ zM6D1CeVLB>61zTLZgAZI@9J>+AJX))mf>8q*#oEfPG#P>ykUM?hzy$8C?%cZT}gf% zObS;==Ik%XaT=b7%s#m(76Xhhl{>(%`H8PynymrUcRR@2xgNRAUrTOV`!=V3?2T}N zqz8_xNKzHQMf62_o*EB0>wiC>^~ZbCVao&u#3qhr3pPy}JhDrS-rp)Szvy`5HMn>o z?4@wsO(YSg7EfgXkqjD%YGnspBP3yT=!-xKmnPYT(++vfQQhE>=?Nv-I#SW+G@b;| zv{2%Zk4qBlXw$4bXvHIGRxmoUwjJlomv`ShDy(zAuE@vHOzF_HIHa*Mss}b-a}f@QSQEta%pS2Fntja}J9Jq1M+9?6gG{ASW>%&|cblkp$~y9Ymo-ca zuxd_DF(Xy#0;^m1;DSVovS+A2C?l^mxCd?sV6X~QvtMg(T`5_?Ha9}eqoQMrW21`& zgV`9sIK#XTkdgC$M-H2c`mz(pe&4zG^EV>Da7hAWC}|ji%qkS73sg=x7<{BXCncpR zjVShbfsxbnb&xuifNaI$R%eIPmLKr?zUN6}xsD%DU<@Lfp5=ASyujP|@G2$tePfA0 z#x%?oSK%9z73Q#6D0S;#DZlr++Uz9AMm|7%-X_tn6fBqeJ965(s^t+fK=_y)`gc3i z`sXL{)1pN!O^K)nSK!*+Lc;oUitr&solX#nYc&x-I|6tYx@ID=wzgh1xZ%Kggfb@e4FIFg{%dvOiAwh#5Mx&&OxsH$ zzeH?C#|pmw^E21)qvqD;!E{vBAd2RTzn=7t2LwnDaO_t}cc9GaWmRPFa+#IqwXT9y zCsCs>ir3l9QkocSEv@x(W=2)sC?iH&Z&FvRb(n9wC~m73v}xL`Dhd9Lwl5n(?isEo zL!vR9L__#jw=WbXNS9o^x6$mXqD|k}z%f|=y=XlkIJfm2`;B{!P6av5n1-qNqL6_raX=i4AGxCYRSZAR7>agMZ%-6|Vmc^$m>} zsc?*eSm74j`n4#+nq5)vmZ=$fi+Z5fJ?FZ@(Tw32cr}MmeDw_FT4cero9G53wF(-P zw{%x%*R%HiuWA&4#-B?{Jzs6(gwpH%HCIvl*R73qhqt0_eK{670eyvhcSwEV(7!NA za~x?$G@jd2pq9SqohQLAm0&MQ#eqeHxmgj*&G(FHhpb6^4E_oQD9gB!%}vB7YO#Fi zo~6diz^-s)4qH3(9QmXF;v`Nv8_45t877#r43G~1V@-mi$Zxr`e$IL2&ZhcrJe0O0A)-`{S#*437XwMZHSXpzBhS)x&Np0Jdu z?rvq;1Cx}05^59kMRH5H4Tg%wu^Z*{l>xY*gPDUyGS~g)CGmqh>%QbIV zeT>N3EiKuj_DMDi+61pdv`*-j&46r$MIA!isAHxDM=8vRK=?Ci_G?E7YLkDhDstAq z_h(2Og*RxaqdW{9N0XOE4ZO7`HUjHBATP3*~o0SS8J{WHsYRxk}Z0%&X&`#c) zl`Qx%p7Ibl6y$}&4JK%dX*e#6q+jHZ0nM7dqQS=>L8>JE-_?)u>i3W12@MWwtz~IK zG_%_B&dd9tqhIIy2H$ogKm$6R=UE$i)FMmEL0|RyF=#48ro?pv`xH{NVV5a6auPz8s5+5nvR;r>(I(Ruu~xo2`K6g9HNM%G zRF|I`HDXN*$<(3B9K!TQZ9Rnq+F?vuHL2atOLm=itzesK&g#kzI zWRJgiNUDx-$B7gII~KVuBYr7VX5udFP+MRBXJs&_`fD)@$&p~D=V@3-z&uma4`WmM zAvI@C74hR8vwM+Zho;L4)an*ni=Hb|Dkxe4;=#0%jdV1mGzayckrVDIbWNOwmIKFy zdk-BtZ6)g1k**pi^8zkiYTD?3IKLeUX3-GUUP;hJPyMM%p5P)JFd`OeQrGYS-qU>( zEZuT5VWu$7|3zCobstDAZ4E&s1vZv%?CO*kwMJmJ$hGCbDKlHX@XcWl@za0~9K z^e{NDTit?|QFs5v=fSR&McawSnB7mQk>r)o_S6u<*3LV{uCc^%@^0j_sLYE+Oxmgh z@fmGpfB1a`6WCy5fzz1R+T!k>)p3*jfgxn23$)OVH0&Ky#AtQG(w3aNXakH9WGpZUy>Ncw)M0td@%QGxsOuXnlGc$ ziL}?pK>Zb7FQH|Q(X9Ep(#08C#AbuBVoek{X#Q%<;lKL>P04DrK&fcvCFLJ;3{Qh| zR~Exj|x?+WG1$}i23QH-Gz2CY1Y%T&>qvznu?r>9rGpxRh_@+UnQ+_?)p zkmT2r!GX{eO$(zZy%S>u#SORW6dR?_><-+U8-f@A*rq(x)nEQkY5#iLA@#Xe@2#(< zaC5*cx1n_LiK1uBelmX)3gc5eQ&6G|2190kLRS!5Hf6yZ>Mija^yPe64VIKohC9ix zCl*2`3YGi&Bh{PaMclCtR1uVG_k5@PWLEmCa(C>Uv4t}4w_<1!whdc8TCq9A#hdE| z9WY=e3l3gCa%q6krg$PmUNplOA$ng04v6(0B+{`9`xZ+1eQrg6o; z4;mBFmCma<@E%ibznE>t)xT3V!BVY0kU*wLS=kH1{|)>Zu-s3jKsxo|&i6RAC-g$! zm(U#DP9eaHx!ct@8PI~;Cy?i?`w2fAa<8I{X8^XF(A)~|Y+Sq9Xc{IZ(#Nk9z21C2 z2lJo(V!@b;qTe@;vQ(K^j*pM8aGf^V?$i71S7?w)A(*8q^P+1@JBCH=f%GWM55`o= z@y@X2+>1W>{>(Ys;=rGf+t0FTL*%9m=d5UXzwL}LHhC*o3{Hj|eV!w`#qkssbSGwl z)Kp2BG2DE}v`)Drz2^V)xP13HP3R6Nxx4af=}BE1Ua}a5_4O`AVW5Cm&OS3Jql%El zS9%*xwP|8NSUj!Fz!pybCe2+d*VlR?IAN+Ept9_a@2Cwau+wA(v>e*KC0&#Vv#;Ol zwn{z(RzkldF0kbU!)N__Vde~gj3i;^W!%0TC%1@+n&tGB3#m;}kxn(a=-3}GD+|Av zk9esp{`901+(yU9P^O|ympX-CeNDNbMkx^TxjUS->BJ15AKNZf|6pl+le?H6<*wOt zf9&ElPYIxd_mY^nNEj2%uWr`gD7F(n9M z7VU`PCY_>%ke!O>@p8k9JVChAEA%q=J(n7yhevR6eVyKFeXYsxgO*3hOgF&GvpoLC zA(@UGVYM+a##mWMO(xHQb$D5edc^Ft8}`Im zcVX6&+CE~XupZCd-x0RI+BA4WK1qzqVq*Gawo-fWBA60kP1UywJXcpPO>w8AheoS@ zrJhr6tvug(`s033JGtf~&!aw*4@prFpjCFn}A8R;`X zkiuD)!uQ|a#jg%iE#^k!`(eK0*#e0|Gl?A>1G9N*CoVt~%yQAwB8i zj((Gl8OqPXa}M94A&X?(kV=X_BK|(JT zM;mZhm*}~!GH(l`Ge5%YCwIRbx5l?|8J#-NU>-%#Hflvb%9L%t%EmyuoVi*sXUCuh zU?KVBg!i*!JTnOCP5E~=hn0HI^jA4}M!nV(uKIK--ChWc)W>vL#;NS~Z-n6p?mj7R z;trKjBgbw_;x#zm#`3!Igs;kig4kK3ixC zY_vdcpOVJn+)+%*`}E0C$hImV-Gf~ZCN_f^eeMK;Wl*)`3+44o$I_oaeThBiXvtl0 zO_*Mb_o2;a8umd(kEyW8=7zIhw&e9&rx%zYe6i=^@)#1`i}G*|S*ukK3+7>B;j=_n z;8mpROytD)jDQR|=K~dS3*E(1zfxM{?*_qRGyG9^`Q}6gl?PwUhs@Kg6f1m@iQbM& ze2qI;Q^;QKoxpD;H7t51Jk)zpWSE2~MU7G^p1-*Cwki`&18l$W+Hj_F zoq%tBFShPpFnK_r*%9I1h2>as+G24*H{e9Z9g$u)F%Yv4XWfN$tmGnywg#I6 zy(-VM!68#;Oa2V=o)*+dxHnhQ@dIj?TTHBbcG$C&4vb#;tI^PzK<_3*imEL8tnA?6 zfdAw6(^GYP&BEmU_mj{cWCawQ%)vWZcg8WhG<;d?54OeL-dwAZ-cF{^wP>)THx$!H zV(vaPpbO?+Bd;-ag02A3LAnrnQpufILB8uBS*c#z(4!+~;jpLb;-~qmc~{^(9r!Yr zOL_iK+Xt~gwSnqeQeNY0qfVlu7*jsRcN6Yd!w1#;Nt+35+`yQ(1N-$L`a6o!I9kF$ zvjB=hk$1VN48oB1%C<}D7S@pPft&29jJ-`Vl(z@jC0{}*2f6~&d4l2X^_@iz>}!b9 ztKT2O&9Bj<2pMX|*+k*-3VUm`W1zuu&NQo4wY&zYT_OFs`v+bmQ)jEhM8N+qrFxZ2 zK^)uXb01c4BAn}DRkWbE`$renWsPub4Au}qRGpvBd#-2NBt=)g$n!^1mlTAkBKIqe zhBGTyzpjPPj|j;t9w|(#d>xc_UnsxOsM9?6RQSdIxF91nt|k&!vYUX_{YYn-Kc07g z^tvKrl3ah6dsp&BJYE^YivDd*=F_myvaB`)b*LcUdomSg?#Vylttz|ssUl)VvMK)EiWTDEWFn&T^5Jk+R0a3|tl zxtW%qism&uvOIM9;s(icS8J7Re*0hOCWm_5aO$sN<+3qRmu{5Or-h!!M0B-IZf*8Y z#!qc;c=ghT;B4L+Y}aok(z+(qDLO(=`oRg;7n#pTgKyt&IhjTW-Qa~2It9vE8z+J< zXz}^d=snPK$E(*DEO|sq$rESs7*3@lKI$H|Trsi16duad_#*TE@!SOGBtQgA`^9J)_j7t7kfS}AF8Mac&aHw>Wd=f8b z$UAyb%Zrh4pbt+|Akr58m~bfnfIHKeh30-55%97I5<+?S!lm7=^hXu#j?u9>I3lPt zhflMpe$wj_1=DIk!RM?*F4t~+<0gU_qnI~a<&IrmUbkF}9+1EN@I{S#P|7*{%Ga6? zw}@9+(2>DTJ|OMe0&4CwRy6SX$duMm?}GENS6b@NulsH;O$x%I^Ipc#&$bj7;yx*! zMs4~;LP=A_hh~%P&I+l$d375OSrJOk>G?jC{KlXnwV6Jk6$hvIK{bMY$=^N6X407T z>L9IBx3^Y0>bZuxzqY>_ybrW7CBCp{my5$imEw-gzRj-wjoimHCe5|{?}lH(E97vV z+W!%0aNz9fRSYiiQYizZ#S&`&S$O`z0-)R}bMgJ$zZlK~10(+;EAdt1Q4)9+4OSv* zFd8)R9lxU4fd2QPbp?#ugbAUbW4@q|>-6LD!rSt9bIt|Bq1Ny-^G*~+!lq4|<(DJ> z^#1eC>lL(^t)HIVA_I?A&L0dKN=zB#XeQ=y0NzCZvaYUIIE81n>QS2s(-Y1Ip9fV= zm$`3E#pcqG!c|J;*yzDg&OiAQtk#rulqR7ZIy7iQeD43&l?dj3y%UL{zGVEMJug?c zIv;NLZ6&YZBilOL$POb;ur)^kFY~%+#=66v+roE16Bc-G>D+MV< zywc_vfNAB`vil>4sZO$az{O2-=)Iksbo}>R-d;viLlSg_awTV{P$v=9hYbQG&E%=pynR9CTAV|Kh#x z;Roe<7OSQSys~n*=m}<7wBb~J(n;6K)j9PI^pe7^vEA^6u+prLzGAL^5D>h(prLy5 znnbH$hVQ89tf6xw_If@#(dUM!@1TV^Th3vJ&-3uVHau?>msG`{$q#O)v;I6N>b^YP z>YkH7u-_ZoULR*zc=}@NfIB_-f{^GQ50R;7lMs4#UHXTg;QtN*bt6Uy$j60HZg_Au zHJ(vq?2c^0H%A;_;4m&VZoMoyZ|TNq?F+b6i1!!I%=QfjsHw7rEKh{?>S0asL*=jX zecKbQ;mQlvUfi+^C*Xc(t-hhH!lS_Sv)bYYN3O03e~p*61VMjPQ*xBSV@%!xS?Sn5 zxwg{q0nl~H&S957anwW!>uLXd`)ccaco)ba;8{0MpZ=PJ>-V{CSx=8TWM6x7#S!D3 z5uxJ)VSCnq64W>W4@XYaRc^oZ%8Bx%5MR;Ck4b&N34?X#RBq`T8af?~()ySya=vU( z;(GLK#Iib=(@@Q~ zLSDzgc2!YdD;RHCf@KaaDR1sdjaR*k8ZJO9G6Fyd&V~abekJ*br!}rj7L>YWUPBv&IV_9UOoXOO8EF6zX0h)%6%H%!o> zpVIU^4Jb#SEHAAMqKZu0vlx0kTxFf zOxQw@;U65%fF+~C`YgwwDXXhS76Oj7S61u#jZ9kKqgB?QKXzU=jIPq%UMP>0ii9KJ zHH6fVq_8o@1;FtGjCP?NEw(APqu`ogTiTV08d2uc7JgLg{Hgf8o4;-R#Y8`HUN%s+ zM;wc1tJpzQ zoyFiy*x))ZpAiUo`b-gj79`WV=f{@B$!L)&KbA-WTm~VgQ#zlB)o73!-1#zk@U9dH zzb!hEU=DP(kNrAh`o^`6MKMl=avlnP%8T(d2XbsQ9BpfVwv}dqQGDzD?t{azzu(Vx zw$ag9e%xG7-~*P!`>l7p`N3Ml*dQ~uM@Fnvx_Iozr3D&epIo-T@msu*j_`N0Ge2ho zBb2>BMA_Mg9;vSD?g-7B+$awg-VuH1+7S zW7hPp^{=C{H2Dt=)6>&9E&hD`FzE`UDD+Fc^j5VnKb- zKJl&fq(CC6p#;r+9i@fJ2a(@E9#9vwtvg`jcO?4h>$>nQqErAF;58zr06rt%9bP|g zRiTi57a)bH8T1TojaB_7Hq#37=QujCeju{$9Iv4?hvrAvYO6JxY;PDo_8bbJt3Ww? z_T@xU%T%WDv>TX-92BuA-u=jgfbv`-xIQbdcW!1nrvX?)KETNxGKdno~^36CV2G3u&F-ywu0Ff zpR|8Lc=}MBOH59kRNV>9N&YW zzynRZoZHNVYo^^N)cH!&P90}pb?PrQF{l$Ae{44q_f40vD#*KUbmtVR6XQC|s(giZ zJ=wrzGCxW^TZi^D^JO>L8MYj1H_COR0f{Bi7buP5I~U~Y8TO+={MIycq!(GD zkviNDM^2wdDjVxVV7JRjPYC~RnEh|+Al{2SFa_l+e-xk{#0wd!fQx~my~t7TJ)u~y zU*(qgth>#AT$!sc+tY*ZUB%>S@<|^~hJ>1gPpq3ACqoKmH6nrX&lLBBAxdoCmfR zvLU<8d1BY&TN=_193Bnkm;o*y%q~QZGrROt|{B^+nCWX@;_(w7^q0xYNap2cg zZkF%Wc*@%G-4P zD?o}3BiU#^?&1GpqY6bB5S*bk^>^_~o^;Xu*1JA&kxSQ^;Z1ZL=#B)q@qf{`?*j|j z7rX3czSh;%@wm)oSK@s_ z%4W=pA2Ak7y-nCtj8FImW+qGHuVqZp)LuWky)jDyHKos8*JO@|$-=vUo&ky42cHh5 zds2i_e=)bd5vbfjWiaZJOZNk3IrcXWYfzG}nJquuHJ+q{-0|txLaQ${hzVoPvem-M zWz#Z_Wx}LDUhc3&@hWVEAC5%_?-{%wHgaJOAY9@kOElwkW9!|6=P11(*>}5ewzvMc zl(0#|^3qvlKPVm6bwFdSjilPDntybW-WTW%@29Pb;zUL`)Rk9X-qcewoh!Q&V83Lo zje?Oe-91$guXykKUH};G>gpyVdphcdii47?iIU#|@ON{Fe>9nEl5)p= z2RyOE6vO4=J!VS*;`m5s#a{C9aK5BduJrO(LD+7^$FoEy@q+zl@DySY@>Woue)~4Z z%Y}6WSvC#_Q>8bC`9uavKuJ^Qjhn)mS7*Mm2^%`X5DyX}p`Q-=%VIN0)pAmp8bTzI z0a94ZM=rljZ_**1jGwnc3&&_^zd*!yAQ2N;v}nwTb9nb-g}Dg*82JtgV-(?nP&k9hl}wETndFgDM+YMi7TDVH5YZdGft;Nk2_N zT~tHa_8c*8r7AD4y(kLu<)!$Y-jzZY>z)dK^m38mf>_A}eR;c{@j^6h^DPFA5~298 z`a1u&ZpDb1RjfFKh0RMy`HNr-5Y8ntd3ZnZ78HM)-^;*x*H0+6z5hD-LlUXf_4k5| zZg)W5c)M5(Ka^I9T@zKTqAM6L#AU1}rEPBj!UxmQQhf7R=(Wv$`avuljdTGE&j^9we+Lq+9#fQ_X@!RmM=O~YS|q_eiPTpY9tjhAMMwaHP7O8%lH$1--^ z65NX_g})6wvR~M2T8G!f#a@_+b&`K!g;W9g#caV4)8LzBiozC;XN_r7(?CqEC zMV7@1bQEEa*EI-i4(_Ca(d+?eBR&$9{Fm+;AjTl&REsAxV25Xd0uwrUdkgS;<-UVw zvhI=F!SV>{Le;N`&=3ZrTB*iaxj987aEk58VWa&rS5C<23O+jIl zhdBeVcknT@^U61n?wSsz596{DdHipWd_`$^z#-`x11tBzLppMr}ySQfy= zn~A*q)MfAKfI_cCyR150`a)!q(=*73gk<0(fMYxiCl%+Yr?rAVZ~d%5++$Xw#MenZ zptR#Ie5N0P0BiDVJ4Xd8k}hxV+u>+=d3pBj_ZGqol0CyG>-}wviub(Is3vD7p83Cb zawsS}E~^CyMJ8JnXESld7aszTjMotz8_ir}gbyvSQ{TRS-t}pm%5D*m;B#`LoXx z_;3|e<~o%@n#xyyZOp@@T;{<&`F~;c-`tbQq+osoYf8b`(tOPGac0VvbhGUuG&Q2U zGl>OQOy98vOFyoSqq*YbmeMmF84U`Ii9tmrmW6U}!;Ph^wV|f<^5>6IrcZ6tJ!Jrd-@*Wy|Lo)J_!R`DbEsr&Bf35<^(R?9t)H zvvD4tk$Mgb0>3QyLs;gsjsi$L);#%FC_2voV6QX%NuRi(^T$o}Nl8g}<)g$Z^onRp z{MGnr8e8Kjxd42k_N7;Fq$ zeF)cuqP%DM2Xk~E^y=f!ZT@Oa$2uu4i&LqDX`we=?d&ps(YIy>y8aY?QG&OsceS;$ z1J5)=!rUbr&Hm{*)9u99@xWgd{0`c}CTjj}D$6!OQ?%k!I_+u5A;146u#Ug-rl24U znIC*L-w^!4Y4KQ7S&*MYtgWP0?ML5my4G>toqkuWF)MwPUfNfs@}fGcaikb;yD9Zq zvO%?&=pEH`NmNxU5#tUjxRf+76dPjh%1`36Wmrw(x|&FNY!l<)saJmhjhe2tf1 zu@!L7Jw+en^gqf_Jn(@5h~_T^&2-k z5#O}p=+rK_wzmSV@P#O`@T;R4&F&Rm2*$8K^B}O`du~zm(}tz-!uKke06L`&zs|oj z^!l&QDC1}?Hfp-r9t!}Tcj`1;%ZdG0zV%+{D-=yZ;kt0xzXJx|s_gRSkHT8baC`A# zrt88fx;+fgy#6{or37;5J-oO62ipAK6EiwkO3U8PP9S=8IMv!;<9PYD5}#?2+V;di zH0;EGy8~E?>XYDb*4UUe;0VPj2jlbR{wah1D1F_oe#M*if2m>|JYCxD#ZN?$RtJI~LC@9E|abe+}vi+e|0v=MYnE|NvZTY$8a z@f4|DwPKILgqo}A$yKX$(#3bD9y4`S$F2aqWoT#ATxGtl)fp|JZ+gwJ@gN;P6X5@A z?_9&#%(ggQn!1##5|e7kn6`>^TsnfHl*TCI(rHPFju4ktOQK3cqG()Gt?1~u-#VC7 zDKS)>YD?USpe|8F8;rXWZ4gD`?j1o4_IECQ5LAv8op=8%tje%}O50}}V&usW z3s;vY1Pwlu%r}ySpwtRE2VakFW}QkMo)ujLDDibg6M{%mTtvyxYficwr34A4s{(&U z4aua}K5w?4xU@(4M|@z|FJ>ES(Tu3jQ)8(-PA`>)8RG^unv~spMR-pgX^YW2w0C98 z7P$3>ZVD2*3lE0ar`mWfXba?I^jX30r^oJg_k9`Po1zwAerFS!v6A2~Ny|}+ zR5P;Y<*mKNedr?pYa)q21ycR2c~1Nb__4qpNk3bjp6&ZK&-&&JQ@6%{F*`%#`DX@i5#RiosrxqwTU|81H zdF|XNqn47~8}fxQi}37`Ruakw+Mr~wUi{Ekgl;YY8bxPjcV_z^^!P2s>?_U_dlIgJ z#MYG0p=tB4Z*eobc2|?J(~wc7pypQhIEc@Q+6Ik|BbJbDbNa-IeHq7tCWI$SrvN-h zMxJG?Kr1T6B%s_8oQ}2?oOdc(DsDT{sglmNR;J})sPZq5ZZ6hv{B`ORLos$!%(n@; z33o5awLEd{>4Ep|Y_~fAJLM#`=8NPyxxE}1%$jiHo<&o}-q5@bWbBo_XEItb25Z{6;lxge}SFH8rwj@B1ET)$Pd>la7Q&azDh24EOj1lNsxY76w) zc1Ez?VWA*`^w&z69v9fx2qgEk0}jB<21GX8>LF%?80TH9_hfjhC^3O$>BImUO*;m zT6(0!@kqpRZR=Gu;vS`I;2B5#D>QJ_c@x(u*_gZwY}e=szWnF)$%`|0up*G5b1S?c zpAF;Fac@cz$(?l$DD_ChK-`ZORQkqVHAVW&j5tR~lWx zB|(}MWj8I)lWD)q>i zTO6Fx$^{+wmtE=(!n2Luv^^#@yudc|p@kba2gXNHU3p~;=JP6~^1+;G5h}c}5 zb8EGHWD-`BOYOUkfOce!hw41lX!jwvi7g1dicDn-(rd$Bf1#p8Wvul;Ff*k(3Op=c z-9mVKquWnlnX$)U&v?Nz>%9DSn%9KU6-km0AtOsqSxRg2^UdnEFQ0k|V{Ki3EP`5Z(*YK$${hSGD$(%Ino zy?fQ;Dma%TB^x6ZrqjBMJ}0+*#h+WN)oRc7t2l3;lxW%bM#Jp>OUKc-x59D&MyFY< z?V)Wr8?_h)3N-k05=GLBeZ=uWbOm~iP6Q`%xZ?d&OW+TTE7y4XqbT!yQRi%DwUh@+xMPEudA+sS;KYs_qxohX9gTcwaYbK!9LLoC2nq1$VT=9 zkKx6rKs_Q*p0u=kEihn__lsU)-A&`2gyF+^U-iCH&eMvx_?XF^3f>I)Z@K$vLaSSe zikGWx)=T$ue6l7J)mm(Ft$9A_M?1rQ+enxF7b8Xg;7i^*a5 ziz&4m3f9dVU|ExryVgO}NBtxh%Z&H79A$^j-FTgk)a?IO|2+ioZs}&KC;qoUp_c_> zo05F7vRATDuh=_$d$RsSZ0Xg5_`}&9qih9?u#4nB6W9gMAvXdNPkFNNZO=-fiQPT1 z4a#Tba})Pf<6%YWyZy_1tPGAWzpcfu=kh#yyhq6l03p|chcVF5gQcqc+I=dyJy~pg z0li-!CfO3cVb#Tt(~q9sYF=>tTxt2EA4;@nP^lPc3Ll@St&8y=q39Dj=I-!<5BCGf z(^1!RxLsOyxnsKVXDh@t%9Ubok5pzNB~c#9`nOsDjAVLAl?I@A&h}I_!H#j8?BZt4s&~bJK2d7flGf_ zAU@|=A<@fK2h0+SG$hCM6tK0Av3~w9M(WF;^PuX%6`8`p5P(4jX(Ls=iRA3Y zqjQjdAdY|qa}&bVfgI%l$hQ}q@(0V{hJnYoT0vlTAiq-D`5WTw_J9N!Ne#153|M1w$R3W&k&>G_2%6c6Q6yueWZpx& z?zA3qlo)lCFuD%emK4IWqjUDcd%|N^upO)RuQmU5^qGkshpT1;v(s5PVU^m~*L|+k zB_|k^KJrEqjCMnnYS&}VW+48QZ1l9u(Ww3|XYo%~X_pQrMHY;Y$(Eof1N=xN#}B|y zKeWBT|EnKAU0^bDn+g8zlcx2lCxuEN)LX=}U`2eK24DKH-F>sZeU z(6<`MwDMqVB6NfW>;}C?FHaKIrU2fV?kgVRcEX}9AO$Ce?XifRMcf%;XFBW7Uvzp! zEyow?MgG?%YMu;=XVliDLQ}*C9pN(sF%hjOmIJxB5ylrOcx{;7s zEAcE@y_JR7KP~^IC$iYl_!Hr|LaElsfQ2Rs^EdEtItgt2$)klIE>vBW)B937%jsOk z=%!i}k*3H;cak2~@0xzSG=)0F)J3eiUD$h2!Jmj=_oba$`4y3((N8suQeVhRA4q_u z@VkQT2H63rjA72b20XOTtLGA_-+zvX%-&!Xb(HaAQLTFOZ}jzw zgiOy_Q*G?gO=NX1FJJQ3u&0g*LRI?IU(dZSjgkkATOezXGSqniTrFR?B#(a8FuD%H zmGaVkvS5P_N9qAp5R+VPZD8Z2GxpuySA!L)ryU*SO4e^;RkB{@dufE`lH5aS%jLJJ zD!x}cO1BBD(pql1aHuQIo_iu%cd2hyw(>6%qeadN9(KK+12HGEDb$`o0NMzA+*cUa@1Sz= zZ;9zeDh>xyQ5V%q(Y~;S|I?|?#a w{bSIksD4HJ__9z1UXH|stboLk~^m(z={`Y z8fa5Fe@+Nc=@Sf5qaoJ9`XF5LEz^V*k~k6r9*Gn2l{|TzZxlY2@Rq~C*!5a%n3?F6 zrsK;~3;2bWL9r290$TzZ*I-+k6c{whhof5nCwFU?^u3hvN)W`cXv^T@>$ZE8-! zYDG|cqx1PIj*?Zh{-RfI+qUG;n{HWcs-UF+U2-<4sL7aH9i<;L3wZ=7LW zopR1I%Mmof7^6-S9u3_m8gSa?iwZkuS7XC1rGfhNGi*f*sI0?>S*TT4=V6_WP8NyP z2La)@q7ozntXB#Jyyl`x%d%(A%5a~gl%QmT*ufHhVKEm$``sde~q z#GGEOnQCKq)##kXZ`w55)n&%;*iFn;8tQ7<|LH>8kE8))%PZ;jswLS%Y~8>iTq+X~ zc?cYUWrU}?gfxir46d*EIEgM;=B&bGtvW|4C1}wcw#s{q6v!F4i=y2k=Jht#wt;wc zO+s+MFj;Cltqd4E3o6mTlSnE&3^G+o0GpXgWb5~g4V8iA0zb-8F4rGsgBB2$WragdJLq>KA6&A*;w{Ln&fe^GMwa1!7^(Fr}V&Y(IC<{ z%n=Ls%hAXyLR^F*+dcTyr`8?(W-keVdp)R3#7Y6Aa7I!nL^&^=P+O$2xIV`(WimUJcG+?Ahs^c&Q4gmjjZ1Xu6D8 zCRjAVkj?eoU)%T)bke3ACDjlALLf5MVWq)rB`XPtrrY)@*X)9TZ@r_{>Y>o=aV|hr zZG7|T(Qgj4eIgJh5CWIcvdp~EvWI6IRYt!0gQ8lHD)l{~=~A;iu51b53gb$}w-jhU zVA57@<#F%n!2)(-r!(JERwM-$Rx&s|H8Y|Z;zT-_h5f40Fr=n8bAvp{J=erJLMp%_!iR=1uT*fleMbusiQCY8V#|W+=rEueJu+#Sjx> zWVY)q!A576@(bACD&O9$4`F;r(yx+F3!w)~b2JM@DrZ16w0bAZ@Wl#>sF^QfVIuG4 zgD5tR2eIUU^y&}FSELA9&xQtg+8Cl#c;;WvY17Or`T($S1%x*L(CAmC`A4N*hHe1UMr zvXyv`5Krlbrr_>ea1qYy26}2L5vg4%rPVa`;pz9VtK;OLCSU?>Nv4c>DDBq9GcD{6 z^nzK#a?a|;*vrbug1iuyVu(iF*B(A+X`x`jHrw^XZMK#?>T0c!A1GINU9` z0*HJ*el)`*5f}4?=RDOsMBo$~{}#C9a4Q0S9ATnS6~rWY)h?d_?T8kr-Pu_M2@W{z zsEc@FY~}XXY=^{=t`*#a6|Ivs?tJDBewgX8R3?t5y^hN97;*%7Tkdm<-H=O{&PoIJ7547S5q1KzwFZleJ%F~^ z_<+u5{=EX~l>L3^(o-^vJy&uYcU%GVn_U-)aND&it*cE%bPsvHK+A=@6DL*0BfTEh z{klIFPKu5eQ}91Y;=t`IO6W?Uqvyp^2V(XHwZAQnZ?g2AVdyi0=mTkTNGb;M_r}5cGf-_}SRH5r%1!w1^^NCZvy6KZ#_=#+*143mqp~UMe#F^L-QFQ|zl+_Tshg zXsc4HOseoE1T?@{uA@NK0C@QZNFk|?*~7YThMn+$e9gir=F%EZy>5UTXb58X4M65j z)h8W;iQQ)&gu`)Uj#w5#lz7wOTv0+WLJBnWB3ZtNUWy>**cf=hHO{pSIclIT$hrf$EK$mgcb7#{gI`rvXvVV6krJvEIT>(A*> zQ)tchn~&J2r6CL{P9itz(N64st3O+LU)CP5B-&TjCXaXZ_ zC5c+d1t7VpLN6#>&RIAy2S~SI6$;*Ebu?PH+B@kD8k6E0BzeCVwE-ftK?yrL_FerF z6ST7)yqh&^1{eR}!uT4o5VnobU#XbwSlmpouKj{g?Fp`%Nrpi_7i@Y)73U{ZZH=&;DpxP1i6a2gLI^LYj?HIw# z5CTG9CQ3p*%_+a^F%b18~ErgUJAJxT4JG%($8EjLVQD4cm}r2NjP0=rY0= zqHP%NG@{{;)&s*Si#>Fk7pf`s@7vS{@>+CY%Agf3dfe3CwofelE@J(miqOH1;ShTM zc&!h(>@x?4e}FB(kb*Iu>R_@~-DXz#)H`4^1A2}Zk-eUC>dCC-t@CKU9kX`O%s5Nv znOLdGyj19o9wZgbIokpt{Bm825I)jjAL9H2Lt+SwGwFs>V}QZ4mA zL{m75!LriYe;EhXXs7e{`9u_gl1~(I9MK@yyOY?q2c}Xg@kP-r>T;CsXiy+1C$C&H zubCmRR&gc^&f~Iwuqfp6a8wM2X-;$(lzJE=f8{y1JUFND^feH@8K=(SuiMfHj%_aCE1IZ=#vMOF zB~)>hBE^65S}5hPFv7pOXX2EHnB`15{z48z3gEX!%-4w2GKjX&i<5a{9#pcgVb_I^ zN_($E;7YPNnr7BxG6yy)1a9-Jl-_gt5hE;rBqR0F%^{3!mWRfgq zH8o@cI4b=U7<-7p@xO`U=*yK9VuSk&o^n~FA@oR%#>Wv$STEsSYZ_j>Sdo>P5-@!h z*Pn7`4DZl;0m73NkY6bQ?dgTFbfa6roPA;`4ATkFD-j`zq&|@z>={2#r4n1amlI`%7GNG6m*H{3dQu(ox3%DQQdOxS7=-{2Yt#p_6U)5|J&yG?cYVM4-xR-AioTVvGGN{f&{_c5_BxLpK&LIV| z+~dBy4>XwVG@LY>UTPP3jbOH`kHO?^t~}yJWe4nkwvNG1&WA}Be7;{k^D+8Ej<7Dx zL3+(C#XD!?D*A#%ZYBp{;2rZYc=!$MZ@f>Uh8N)>KnR zV}^kcD5z9bzSXvLOROGyAF4q_ei@chYD^As?YNZnFF{~kP)Zsc&m%neigN$Jnjl7F zUuqAu1qi4k;q!Ka@`WcHWnpoBeA8C*%(nPIVv-GYD_{YxYD1BD^z)^p8fxj#JMhzj zksKMF!cZ&lj_K{vmbs_Jr`*vKPq;eSjm&fjuZI$L3}J&Ut$Q>qrm_+gX10MdJ4Hga zA$;Jh4O1IB+7otTQfUZ!YLZ?rYxKcM06kr8NG$ZM%t$uDks{`!v#kGRuhxjiaESe) zT^tKk@N9P^E%Dtir~wdE%V^(SbmRznOg9Ug`t67?UX9*?>K-KCgE7NT{c&Ad@5DNG z0w!EHqBII33wK_|4Q#i?TL_aLL0K2+EvA*TaaxR z+G71{K#P$0pptgpzmmmyU?ms+2x({0{4F&7C`AMj)WFS1!FPXBKCA|F&@F~a+&hq0 zxyeoM^tZAqV!HnTi01A5P5=5zw6-_-6ecM8neitqY2KYw)zM}+kGKHQ@*Q!?6=z$X zrPos$n8^92_owP%m_nk|g`SK3(8oD1tstK5kW$FTG#h%5>}$*}^$9mdHB8=w_E;Sq zR=FFRk$Kq#&$Lk)#^v!c>#5JyZ8CGDmNC+l@#{L(H|wK+wLM**EiuK z-Jc#p8v*tB^WaU9%vO$!>Vm=l{kEcbnM)LIolX=%=JyR1S`#qLhu1|~D6P%oE#${P z;)C3@v;H^;-4X>nLHTi%kqXeWc}o)Nvn!knKK9r(oy)bekGE-IwboRI?;t1L+spI6 zIK#fehoeTgdWTnQVxH0ovANhn!JpL(ijRMq5RCZOw1F?2@(Svh8Xttil*k$RBD9}$ z?*4p)3>Sd9(<(VIt@|!S*gJR;Q%aOY0ocdQji$k(efeSzxj}=i;zD_xmXVlQemSi} z!mZsx1|uEvlU#*`H9waR`03b(7W=`}8vw~?ysR8mXc0V0CpSKhygxG!0ozL)p^!Dl z{7HFdR659(cI84Q!0XCX)h&7xpt%}8HxibTtdttFUN zNs_I<0awMfuH%O$I8ao^@Y4ZvO%W_dGy4hDQ|&Wl0+i(jZ21*YT*>99P4_;> za4sc^06J5rG!C=(c#M0YgL^}6bgCTu1O-7|kSU+HCFq!Q1;j@U%78d#ZT5uiR1~AC zhND756>f2%j+i=GD6XmN)Q?rfgSby-^lK>Qrh{Ll&@oY%ZsoypkbMVgM(vIgg$Q4@ zw*9=rel3%dFB6}ZvBu|}JS>F?w31-1hcG16+tm~h0A7NbA#M?LufJH0{I0MqbQk>| z6l~g(WL%;M`F|6O#c)#`t;}z?UiOs8Pn-~Tf;x2++<#c{&CA$FlFXe_NiOiK(`>8N zFZ^p(>1%$`+b^o*%M%D_HpY>7#~>fpf*8&5KVxmvlhUF|6KD2mxAYXEgx~imd{8pv zKe~!IObU!h-|oHCA5oYt*JxEI4`j5R!0Nx;eWX{VE9)rw?uA;`r@2Wx6H_NyK3Z}9 zK0z&sELpW0X3)!*3XiiJvR`+86V(+&N38d3zFzN&a4v+q^r0n=Nl~X~eI|`u)1NJnT%J~4No`?uMH!-xm< z*_&codr+d4Y(83KYWAJ{)lLj^I1Q;Vx&d7MZ?i7Ep9iwglV$yIL3Oz!wihV@zrWGfm!IK%J#3x07H^;o zJX+kZZIX9BGg7qP~_3Exrk^&YW~jTL(E4v9diFBq*q|{V%>+% z6^dcbb>n@b2{@Vxv_vWhgKnu?cl(WotBbSb4{*JB!9VwcGgq+AXUfxW(^GwlormIC zk*}pYd@}*mz>)$`xr}^}t@$a=rmhpv$ z+Giv(s#Lpbe1(Z&1BfQql{-pk?6b4!z`TL$1HEgu%7htGi`fcAI~x#1I3q~&D1nwb|h)qSp~IckbK03p0}@kQ8B0`3iPM#S-rZmI>xV14oW zTSNpXc6JsIQZKxFlKZs2Ula~OcT%Phj>bCMMA-j|wc6?J;zPA>ADdBqa2}#pJryS58Y2m?ZVh;gTY>-S#L0yO&95)h*L^ zCp~d?EK2)Etc2D96ari&)5{cv4IUBQVIip#e9f+GdsmJfERSk7B^`|M0C)d)LnG`F&A}TH+TlMUNKX%P+VuWVvQjuS~oZo18&SDl; zJxBoZh78^4Ift-hh`*-p217pEeh|g8!Auz7f}K6+s5fJn0)DnWJ`{F6o1<`}gCHoSSJ1XZS%Suud#EM(TLS zxJmK46hZ|-XdZkmyLP|iJ)`s~sg$Ik9>3mO&WOq}7v{W#O!?K^eI6YsqqA*(lSsXk6;p2{t=NO0VAmH&eug@oZIl&WBFJu{i#xdzq+!^qw? zMIJQDLj$Gw8*CK^$kodGWvzuSpNSdpGI+TY4gJ?t;n5=@A&GdLWz&f}M#d?Ipb&dx zLehvllGB{%h{J*h?{LI63&E+Fk0W@FJ48E-FNNeKKepZ%9-M-@s3c(t+3y*S^5c(c z4*__k)ek1#?1VoL^av@ZzgRyU$1^8)O7>dp!Ed(?Ms?sXU2p#<(_GX5j!6$g=LbvR z#5FiKenLKc=n7%siUsAdC*Q4BoBpK4b|5g~(4F(B5b0fd{>>0LVCuKvGVg*Bcur5V zsABxz#|{uin2hR!dH4Qd4A%XQ9j02~Xum!58vnTU-l*5tF?|&?QNmq$5C7A8?!N$` z!U2&YJZhOvSf~5$A3&MhZI(u8tesUK|F|+0_AF3_AVtK3Xxp99?<%&FU9%+T?mU0d z+G`qJN1FxpP03ELoCeCN;kAkjOV&h#V$l@V0R_&tsv|BYk1Q7Idc0P+zO z`gm#d6)28ZM)EgIZO9FDHaUOn6r=f-lzW}yjTq*g+h9QUOIN=2{!5u=uXqffDWBxm za;j#lb+%#PS-h}CMZYp-%Jn=*#+7j20B7;yR0C$g=4JOne|3+*P&JR0eYi@G>+RLm zb0;{1dJVY}1}gBaP!nQ5YJ5OC|88J>9-9lPnLV=*rFrioTxbc54p|1_ms_G2>JzU) zrFY`BA0C+57KyywesTAl*XR}(+^DpZ%%<$aUH_3i4wLtGh`6B@2P#SAiw%AM&Jk!6|rD<7d6&$B3g4M8BvNSue{=%Uaj&G^+xc6II5G$oWISo+l^Zfyz zVFvi^ZY^+Dw!jP1*phmGxnGturaqy0Xm&1VQ;4?9Sg{aVSf6NX_-Je^yEh{H%5<0> z`s*A>izwUVkPqNaszi0fEeLU!d~L{Qk+xNn0-(+oU7ifLO@$*JBOb%4j5q!wiY^0~ z#l$)|zuF_42Wi73CJXu`VIQq}Dd^dv+_uGj*z9XAbN%^!Yu;mrGrdeLUr-`Twpo1x zf9=Yca!#YNTWERIJ(_Fx*F8J4EsFh>?vv2r^W@3HvyeI<^FPAI@n-b{Qqq3vIFt2L zKy;sVyt_%w%st;Eb<RF^frehea&f=`}tF0p2?@dl8MmkHgk^<$L4i6RTKxML@O)B&f{%`TpWgG&vyk&Lp{UPTZa)cMr<@k5cKnW0L*Qv_oC@xIWEG z^&_YrPKCU#BP~E_P7HOK>h)⩾!yGZE4c`i(M)g0d#49`e0ztyfP1Ot+<6qyC}b zO!LpuleD~Ttlv(;A-=L--MdHh`mVq41=gnEH!-ZTmGHZu4_UEMM;{= zf^4&?+sK3I%Vr`1>F;o-wuD0KOCSeQ?8~*_bYc8@*bb+=zvwB^I%{ExVjoUMXTvvf z3u#_S75hFeGs=VM#U)2wE5-`c=4H@9g`MK#+AboSn&q%ZDf5AnTYt1axZm3bl*X#Y zqv}`&)fJh}NZ+woNm0o2j|URn^fPz#OM$$h$b6*&zydmvBCR~`XX2O);UWV^d`R0z zt7|{@S6*C)9gjA1pOmv#At|=4vn3%e(X-#B1pAO^O$ZH9q!1^#W7K3)(RthVlV{PD zJ|2GV+OlI2z8D(Zl$8?A?jKR+Le+cg>;b6EM$Hp z9Wk8mk5+%I+&g`_9xY(xi<*+#UU&&*JZc+;X%tsP*6GtbKv_l2+bYkN?(r9LSf{Bt_Q!L#~f zZzLRO{gm~`Z+r_ApH zN3tj{ONyUW-{CRPBO+~PUf2#f)Z;d%ftwsW+`NHx4GY`}6-0BBNyXNqJ%&%aHwrX| z%??kW{9|*WA*yYo4@9R)BOyWa3#9l^$OhH&jABTm7l**<#^91?Ok6x(AoqL|3f4%B z@c^r*mAefUkXcvdsbm}U)X+t}Z9vdB(`dS2 z26ARm&{xk$3;XCA@L^E@w3f!Iw^FSz@F~FlJFRc{{!jY5%xCV&{$0K8Rz7N_4{+Yt zLAbuqeZO|)@G+NO8G3a9vJl$+&7)ev?_17k-NdEJkrAz# zuDY!DdeJk7OYx>0RsGwLTm@1Lx-8++u6U`5%9{TyTFG`~sQwz}Jq3;Q=8x7?L4TIi zN|TS|^vP7JWxF|1k%HW27gk=UR>YkWMJLTvPcQGQkBOX&ncVMf-!iGHyCYMVrN({gDfI zD(r}_?(-jv`JuR*0rG$817w!8vx<{Lq)nqgDHLCOU(z zeBlhRWB>*So;gy`4{&z2c;;Zp=mZ^Ga_#jbZ9xNu%*pL$-^F>5Xe47T<#Bf!;>~XZ z9}Ly_FvM;7((EAOQ}?+2v46;1X9|F#TCDU?)UB0K$!A>WbWxiTdoX;A{2(r*Qu4Ku zRBhf4yFULiT~6Vtdh|It8;3D!MzU$Tsr!y&3*6*jarwb2*Te@Lj;jy3Gi&gmS|h)~ zr2u2(7^MEjTz^4{x;rfTwdJ5JR`bvrKAU3ku#nLEQdCd<8h5YljW`pZ3$w@mky(Y7 z(U>&B`%RQt%s|N?X(t={y&TgZ?1q1;nW5lR5>Esv3hzY-;mwRE=;G)W&#|gfIL!%0 z62xoOz{3e4kRJc>%Ne53dU8A9ffnJ%+VXwiN+WrFGuVkm2+}nNmg$!EU|v4NYv620 z>HkL&X`gY&(qT3lg@Vj)>40a}Icoh0U0NwGW&SO9Nh4n=rcYpxHhHdhm)*p-&92-% z>5#GMBA-q7ug?TA2JMck+Am47MB$-Xr)iS8roIkIn73bhys;jA-a>q!;jQ$f-gr1m z295MacH%Ddz=SZXRpESdJqr%7`n@IgC21tcv+j%woF`n*jTGhdJB9<_i;yfwA2LfY znh3t$+fV zn^9CLEbT)OXwh0{NROs-pqc4#(v1Gl*fc{nsqa$*b%K-{zSI+f@5Z(lcJLD z9w|n>v=al{KEri*SSt|xVmIePUOhYd*%o8L&}OZB(>|jw7c^__LX**!8S;#9DES2_|C2%U zA2ns7(h?Q$7ZYE?#nUWMut1_sdfro;qCwxwCxL>V17_7^(w` z>DO#nX~?h9-hgODEDqoV%F65C!D>=7a6LVtr272Lp+T7J@ zcCc&&;tmfB9sLmm`lTABIjBh+txkEIKk%@u4h5x4MTqBXSg0uzrjSbS0dE4`qoZLO?@_4)*lq97LsVV)$j)MB-e#oSvtYQb>s{?7V;n}#^H6k z(h*1Yrym+H1&!u@fjK*S88ue#M6-TzXj4{Iqh046`kMMbZ>;b1AU`itcY%xaHLRtFHGdy5S6olYyst3m{7r8)9Q_FwX$Usg+n7ca4Iv=YU({C zNPeHIk$~~QHJU#)DPIi8FwW@09}B+}sQ)GfKHc-8dN)(+FVM-W_2*LRE`Arv(~!A( zEnJeb+3>RCHM7XZzj%>PbTnPz0kNWk$FaBkW)nD8TX(699`8*(l~o*bdlG1S?yaOZ zGn8zL%%juY5dSO!Cz_r(XFVZ9Ybc50Qw*iyTwvZ-wx3N(6yfgqKJg+X2mf5`*=UbR z-{i!g3(AMrr^S|r1}S5BB!5CQ@^g)sZ7Df6#R8$~UtP(@x`9c~3pXm}QrA!F%r-`- zYW5U;*}87%yRfP`+C_O$Fa*!^_X6*ehMP#(2W{>DLK#81qjAHgS;pZXg!Cod=X4Rn zGDZ@F;!ffGFEpAKIu2*60EtUR)4g?|x;qBpFxY+aBVVAmBU}noN-F=cQL!*eS8R*v z4;xg?S1&zt?cvC!XR4ER)TeCQ;K;L>88tjNXoO$PY(`0~5i_jUV188EiN8pWjnpVw zIs4NTviP0Y2+LC=Q+*vX;$#HzxRu4Kvi#ilRUkSl1-k4A!8>Ag!Ms}{=&PFo+UF)&3E0$M7MR!jgR8vY! z`isN}gjWTq&>>yKtT*j*Ds^$48elRVkj{{emy0sY?azJK`g^H_9EEA;kHuVqAF?=@ zh9BvwBy@^<$&$I=B*&3FnOg>xfcDRTuScoQL5zA;6am61*new|1m!Q4m+x!KBP{Y8 zP&>tM+gN+$fW0FRL*G9TMJ~q}g2yRDzM(&L_RpIQ*+1{J8u{u*!nNCWoj(KPWGVG> z*+d4r;M!o)8Cy_J6yfN);)YwXhm(Jr(eP0hEd%c$9Za^eSDC6u%kqIG&Dq+>6xBqD z+8J(4O*oOfcIin`ELFrOVSjiV%vHHUqO7iNrW)+5DI(!c<@Ct%WiJsF#?0DuJn4*} zL#oR&^7r@IEQX$meEWEUjz4z4MD~s2Kq^V(!LCEz6=!-G@K$Nlmnt15DE`|gLGhB} zw|}WdUZ7Z^k}gOd@LWW6sl#9Y7s?7#>dxMmPw54@dcr>eV=KmKLp9$hejE(HLJO(& zPOT5VW7Q?;fCvU> z2$fiC)V`+{9?aOy1dR%Z2}aN?EXmbhxW4dCA5hp$7qTArFl4h4S*FI$_QWx2Mr`+7 zT3wb!rwI$E)wX*~E|tNd6BoIEZO07UsHY@#WmDM5+D9@aJlaIjbTU&T?d7z!E6J%h zwzbd3%73c%X+i#bV-fKjuW&5Rk-LhvvZSTw3bjqpp{36#)f2pF35iTA+kIcBD zBqT@XSB|{tcN$d1TzW$35tbNc9FjvKJ&0Xi#N!2jOB5^wG%#A}yp^jv{DcmdMP5?s zHNh&((5D1tM{|;f!%3VSKjRks!S^?eM91K3UXHPPv^)Q|!}})M2&j=qo3_Kuo%;#j zPW+=yNXr+N9xii)dIjJ=y!@{W1np3$FX&a|;1hpfEaAObZ?=ryF+FYf5nc~2>L&-h zj`af}6x<_vlcf+*oIpbw;!{nucLp94*fIwWq(PW&8%I(G9HaRO?hNCQmr{oLg{X1I zR~W>_QCb2lobBHhiPBrCdap({BNT0#ix&2z@7g_rXy~6@Pq)*hS$-_j?&LWb{9Sq5U4gW546PqU2b?2LqedeXcACsQZ+|!fW+N_NB zidA1d$GgTW27pTb0h%Da*+jPd<%33NT7jvBM-zm#KLT)9Or#Y8uuE~R#X-bQvGL1#hKNxC^!`cCAwZ(76I%#HZrah*_r1cl@k3cWTt(M z1Msbhtnx>&!%!_i>JhgKD%WN@0mO1!S;HwzZo;+dyQmew)Hxi#LuY>8lh#qoCHL9B z0a?5q-!+LOdUN?#>tGXTRqr_03{Px+fO+7ZyALbzu6zzd2vgkvg*m`(QO0SE1pL6Q zGZrtqm z<`u7lyJZ+7p01EE7^w9dsnCfNC2>}p(Zo1oKl|GLOgdZPZ$H*G!HiQyw58py0BvVy z5ms=!HK9@EY8k-EWYE$#h!#jH^9{O5VV7f@9CQrDJ9i%GQUFekZ3zs>h zJSy_QCb29(QH549em5#uNFsvA=;_)Eb5=-5{d%cph{Krv{BUNljd#XRZ!9N>C8@Dq zX4NZ0uxCq>=D{zBKpei@rb4|4iu6ETj#R{+8#VG>tE)z}qKeXD2S9vL(;B^-sH#;n zQbAO+^pK0F`CuAiGi&$1@K4EsQIJz$)Z8xjm8+Hipumgth|Vv zaBTKSLvm3hE3fRK-3r3_vZd5b!CV+jA*U=jDI6I~c}6z2AQhZIh%4nKNX8uKRCChR zI6OldVpsIpMRmfC&=ceTKxH3+{8cQY)v*2C!Qf<E=QXV{;rPQ= zlg)q-1AAcSS<-=gYk=u503T>hP}q~QLd z(V|z&Qx4blCHrjDpAQ9V`KodGx6?pa7&nL-_O{Eg`cbkptEP2&<7ObP!Bx^iUdxm5 zV}H$nhbjPHuplDdvffGo6RQgvE2#S8nc!7*aSOFC=F+gM09~AgBEM&0IHi`%p5x9H zUEToUJ>mi@HVZ!mkTOV#hp!dv6AHZF&-b~YUr|F33hj!DR!U19ar?vsnNOv?p>jgq z*{uK^a9C%W7PA+pLwidw#Sv%C?AgVcDl*s{6$$Sf-}%NpAYU6~U;`lAuY(Vji6eV1PZnMFR58 zAoN}oTNbg6WRjO~-5gyD^I)pk@Vtgh9_Lc@i5s65t7)T>BTFt z)92pj&;qZUJ~Pb1kAc5^z3D*@wa_%J3KhNJ1nF^k7o$1aHa#{aLV$N}YSAN^yKd3PyQwojeLwp^sBv0hq~W^ zF(2*w$?OeGuMJgUw$v29rNDX{RTQ4bm0RQCNrrY#2i0TYw#uK4LF0aoKzXWhTK(wh z(O6gwIho=etTN2(LegX1?+rQRKzuPHx z#&$A$LoOs;^cQMkhQ0jx+aar0JbpvxI6%Q4zR{i%_{aCAU7!Ol?u3z4 z`$LTA(09gfG1q@@5%1=Qj7b?%5ssu2tpt&Q=i#bWsxUzX9v23H%CMXLA-zUQdyL@7 zl`((TNeG33AVL?{*6;$u&kf4Rj-*0+>Jb_bR>Tt>Q)7k1d@8zD>58;i9uG$7sJu<< zquC;cE!6rCNoys!6;Gyuxu?jB;11*z+%yI$1tr~onNS_Xaf&gq(t&AmJ;59G!vf`O zNEn2Yr)CooePBv0q0A|aLN|2JmHM9j!YS8nVqDYh2c0NflDUjqFXsm>!TEw<=lRk1 zXAl=$h5{ZB`1j`Q)aARvqKB7QCdK=*nqhrv!Bu9*lkJ6y+FV4B z)F(@0_iobNj&1Bq?)Nld+s0V2#85_DWaI0=Y!L%B;vlV(AU6tZfm_iWK7Kw8a~Gof z7kcwX#>6xfCyK(_Yqg4lVE#B!e-FJ2q)UDU=xgq*u@S?5NTL>zP4dRrhGjdES2m0% zMdnj|s8jFaL}>m_K>#r;)X4to46^R}Q@dG<$bFgS1*!7AL6s9xy=MHh;9~(T-;+d{ z2f%UwO?5EyuL%$14cJR~w#yHj0joy@@-m~aR|N+J^V`MRq*JV@lCkhRK*?C>#3tQp z$S&CGW9`Imbtp_E8(;8N0F6S#2zH|$ZrhY2L}L=RL|{E4CsfVQmq3$wWZ zQ)?M{c3ivEc6rJ=3M0)p&LmYU(rKQagz2(|Nm(s+ZbV{sa9!Whw=#38&g>mY165w)o0x4b;R4?*2y<$*=kg6og8lm ztsXpO83*bi!#n(6(r-A`5!dYiCo@5#z#)nhw7iPMquB5?&Ebj9Gb77wg)`(r`WwK< zr0)z6Ttz9Y@zq{fiK91F?THe(#ICdmW1?~iXvshWf)g#TQAr;QdOuR1jS>FkPo*@k z!H|_hz=w-%!D8XG)|Fqf`3E+-Qh~ijW?ZOLJ;mp%IH0%QY@y_a5g@@E~8lC@SXCw}9 zgSe37mj;uuhtC+RE|I@;?BeKwrPe1r5vW08B2D^mD!-TEK8phb)O_ zn)n>18AZU>T4opsJ}X3r!&Ix}1F~qU5>{)DO;uV}8=-z? zhMz)GjoENHp9AHaf8;F5Q<2iwUhRes1>ns z5f*PpnvGR(ZNoCMp0@>ZtF)|BnHby)OY^x5uuO%V(VTvZEm46cQt8-%?!w^pB>eeV z8xKUd8i4Wra*R)?2L7v=M`Zz*^67ClE;$7+Wxu1ZuRt$ut6v|FK&L{c-}APwu?zvQ$^N z7pc-JV50Q3axd;`q#X)4(9%GS47(zRp%icl9|M9~>|cZ>xncgD^fUD zIl*Xq5@A#RT-cnihkw~ME&vH9Jd9gtdMmDg2ON18@wMEnX2e*fx$2?FF?mQYSl2y- ztid*?1qhg9ix{M@KA~eB(9x4k!K6pq1Z~tcFv~5`g|*^ zUBr(cl#06x!DlOBv)fwj$i;jb<^kKX@N=lp5#i<+4Lo~qgYrwv2ONUZ3i3Vh(N$%E z1Wb@pS1t&LFO=QY%6ql(9YHv=U!I8y;DV;m zvslkP$Zwx;9KV9U7B6_X2eF07t8jUg(rk?oMCDLmP1-7FoL77pP`dEf;eZ;i#!-gz zVU2HegYnbWXY>bo=eTF$^3!1u7)p1^2SHV^N&>k^Ajpp)7y1e(MFQB@s1$IDJc@Z~ zhC;qAU_DO@7!#{?jEBw$G9D;vsq{Px;5?Tt;DU}%HiV#qq10{Rv6gJ>UPh?Z6e!^r zaG1f+k%i728QdI{O5B#dGg^8J8S#T4(kmVAVA&&CMkx!Qi~Yinp~tr{Vd=!`yR^XZ zcvVpnD2@b_0xphRZK>i7s1&fRWC~vyvMk>Um<*AR3wnT2z1-jGpy2GsM%?g~eMoT4 zHApK?eRvHI*77XmK7PWkM<`%tj{u4HVhuTat5OCbD5N7PIw)P%xfYrd3hnWWjG#qV ztK2W00C333A{OAt!x^p7J^ti)!3Zq~LW%nsbQ|8RTD zIh#jXfV|Huv+MoT@ICzIn|)i*I7+a@7I5|$;5U$-ia?XAY$~coNx)tLN&(x;r6{!_ zp_O_@UzBlF15qtTA-{a$$h({@V7!&rVB66Gzx^cPG)*2{;3x0}mKS-Z2?WqcCL`iA z;cd>gQi)tGX>29-zdcA;XTVkaOmaN_F9Io41eOJY_s@oZNO%=IBh~#=t1L)-Zw7ft zJgPNBbDU6B)_AIs8>N6t_|zKj*el&{S=LDjSr|24L8r!W!sb7;R&V+ZGO!#o##&XoPa60W0V%aksV!YFZ3LNBqv@og`=TdmYX zE=PaF#mKs-2LlGT-u3`VHe(XCowGVvy&Cc*2QQq3WM3fTufPR5%@dOIw6TJCwgo&y z#7&_Y=qu=6-zj`BDgz6BrEtWHey!(NQUJnwU7(3k3TnYF;pxaZKA>PsRE1MShADQWMj=z7HlegWP+NWdhh$1*eQM@ zqhmS+Q@}=jPQh`$`K~cQu8SA>n87F6*ntTw9BadjNG>b*Dwgm%mCF|y1r$&MRX4i} zIKGMl*NiccIc)}G-N?YS9~%$r5rliZaNDO@aigewhBU4PQowNABB+WTZ$C1=4$EXy zq3k>(lI`*1wd-)$kaYj~F0kV9{qQW(P1~3SCD=AN8q;3DNOf$IoBrqGka%iJ&QHmr z=T#xBHZHWsNz>=sf^xQX>TiA%X!)hLkD24soa?rfi(4tgOxX6T@!b$|;Q%h_2CU(q zh)#Vxn&PsSppH0!H}r|b65>35V7Au%?{h!I%z5GFTMtCE_kr<;mk=~(FJT||(fN}u00qN(Nw13?1m$~3Uet7@fb)`Hj>yUd zoulA#N6X%iiI-C=*Fy6(t^Qd|tbIq7=FY=tz|?|lt6{MaJqW2JU7m>8#j~ixae%7>GGz&DJNW}|q z9p;t^cXV_GaMIk}==(1L=)^%AqjiD27(wah3XkXMJDEtXefYvN#5!I-cnFklwgg_M9&z=f2iKU#8q&c1MdqNdf=K^Z@851?xON&t$+Q+cJ*I`IEq z>xNVDaaf2Ok=YSeZ~X~Snn$I=PJy6ETe2Jz&cT7xfy_Ikq7%s{>~Mufn~w8>3>d;qNXuwoM4a3cStBmQaB!F`%-W zK1(xXOC8rc{>7Tv3V&J?H$J9w{7+IFMXAn;5*{_-&-Q{dI z3@h?0#_JEjI%yl4Vdxa5V0|qxEzb1K=-t!u24&k*jwI*U+dCZrS!q@lsc77u2EfIRw0oNyPS$C#^wRNc;JmBo zBqZ#}NttVAOin;t$LHODF(A0J7poZEjnFiC!@y%aY%WBV=j3bz&HD-yHKl;P#Z+(z zrGN`AQfGn|H3U@(b%>i;q#Jde2-sTVOj)&LOMqr%*SXg;J+$r}K&)_w3fu}krq2qB z#Wmfh;^FRa*tiTOyd|xFn8s=MHa7qWGcw<>X$o3}&}-|DL6PF{BxaB`u#;PZD;UN8 z39DRM?w-8o)R%vx7#Jvr0^W}-R%^LV3;eSh0Fsk;!j}N#6sSK5CdKfNgReVT*PZ&RLh7Y(j5U$t#0Tq2|mHFIgIb|5KpjHp{(=D{` z$TlNq1P&{>C(#UrHZF1P@d>ZICA|W<7tZd<6&ZZa$;8EY5#284Gb?ryvj*M-Od+iF z%x>K7b(|GE8*cTjWLd@kEpoO;8`D3CEM%FYM{*D0fX`tx$*tCl5n>lb#!yO##RHke) z!D|p8DFfUJx(;i@1wr}ZyAY3Nw`neD)r7svzElp^dO-(KZN!uUF6k4aMFr?i=R6`` zy5(@c6@dK`k`UbM1#Zg>au$ED0P+>Y>n7TE;8n>r-+RH4K*XOr8IgRjg5wGps~A(@ zvlq(_?Z0Tg|CW6YZLZa{bZG=*^D|+c$gS99EYrO0O0XD>RNQkKw7S8S zm@UbyTO}!Gj2~W-j#MW}2`B}e6h(0_h9jUIQXK~Fdkz9?p9qXK$`BAqJTI3ax7`2y zCRks;%>_4!!!p-vO!gNM@j*g z=mA+;cmcZd{bKcEOVbRvHu+vJxKZAolLD49Gr9I%d^qm0^^hy_@`in_;o8w5u1&YA zx2xlv{93>=d~X~k7V#}X$O}DSN6(?I4odhlC1=q+bN*9{gt~HpmK)bZ>XTjt|OO+BzptKLTgafOzuJD35yP!Pb z#2c6P(CV~*sQIK$LF-JHSK}t-p0^=nal7v*Abx`Jn~ugr7^x6%IhHC{USQY zH!J{&cwA|~a-IBVVjv==8D?S-s!`rhWn4=+TFN?sg{I%K7QOYq2PnS8IZ3kuy8_6VPb3^r0xsA4Yx z9tyE@?3=u6g+<>6RIn{HobMnWKE<25Jp`U-AWM^JJeZYk(J|=RgylRF>NrqeI4oo# z)`LBjFqG%_oA0`7yZs2gYdb%EwcKuVfH!0|1o&HkHQ^Q6Ek&pyHnyNtIqkryMxBt~ z+=9~ePi#*2wO{t_eeq9LK{`o5DPSkb6tKSg&1q+W+<^ronIIL`cwNpfx&)D0{=_8Z zyj;gO^`RF!;ko!iFnmKQHGkR8YzgbZf|guzLEDmNY%E^zugf;Lp@UA|;NgueLTX8= zgs~&YuF&S)`}X|UuElSi#(5z3b0%G47WNGfzJ#3AMfDJ2Z`UqZsv8jv9c+@8$*{cF zX@fpy0ADxCDQ>`yWG;1hz#q&7PvPVy0i}R*lUpDx%*vMpLI?niy}_qqtIOH|D5DFM z=-jvmBF9m;JowylXsYGD&^cevxN*8KSVth;X9A8P0#K%=BPoXl1n+jqCU~QLZ%0Zc zg}uRx@b?7jGW&-9;GaJ9p@-~GXi6pGat$KE-Oriyh>m6yJ&uWi*O1ZcF~}*Hry!G_ z%{?-6wK3B?l+0K&4xU(H1LCg=M+$2715*(+m{X!2f1UV)!gjR}`-4a z3yqG&&C*Tv5f_lWn{a|s7(^T_lDx@RJAtOoO%vzmGYDalw1RzDIbrz>@UG9f><9J{ zL9nq^CwL$b(U9wu*pY~b_PymPjW4q1LkYit;r5Nx^B+xKe&;_&?t@=-f8xt>e{G?5 z3&>x^;Y^qdgf*}m*|Ul^Q6&poyK*Uqauz;mfFL;4)BSg_6 z#Y|A3iIW^0wh?c}->C&3XmElt7K;6o)%EYwjM)2xFuxI;gu=*00!jhrA~l~l?Ux>K zEI?2837#)l1NNSdM-**$zJzKJKg>)$?}<>$2`_vN>vRvM$_EzcRw!TTKFrI@fiQWg zyaHPuNZJux0iz>e4eysFnf*xMFXtkNT|42v`|?(r-EX&l2JsS$g%xeV`s&J@8FE3y zeC1%5=A4@_4T=Rt<`MI4-~^s@1?|Tb-Y}ow_5JT~!#oRc2S`|0!sqS&-sg{V#Z!$1 zk$_Ub1reh?5ksR(>|2Uci-4?Yh>Cs-)bo0w7Kgj?QpFKApP|M1B|_oL5Le5%+)nn7 zq0#W2LJPwMoWBc^nDz?jNJSp7;B}cpK_J%DVYG>yiJWtbV*+wr>wWkAF$WgXP`WQ! zIv(!fZw*(DWtO>!iP&n~X9jwu;pK2M73;z~a=>#(;S}8Z*RAR9M2pzUsgp$pp5>v~ z>F6bEQo;p&vxvPb(!&x^3b;sLg^rIOdYnb|pC%r87+sW1Awmhy!_kHE#idG$1Yjmk z?+8!GYkP|%hXfsc(fY{0z)gA&*4?pZQ=6I6P41Um@5u?7Fek{R5FG}Y$Tepa;UmCo z2r~VD{@~=Jj{5iXy4>z49A7`iNk=0EYxideGb(w^O*SK>(yxTMDZL{9JIsQDZ68DM z6>Q&G`S+Xy1unMmx`6>?@^VW(`Qh&GeBsC(s4J8J5>N^_fUx|6;>Cug9r|A=Ls%Ea zIm8>D0&cHg{Cwe%rc(Z}b9*u8+j%A93;LFPHulzPh9+YqwT+&0U7q1)CLC4GbMnr=6M*jUN3T_fm3fN6LIkt5!Ipiw5 zO{HrmUf`xX9ZGmJUM9oqZi$xSd}2LnIucL}TQ|k&qbjL#c5KH*;|sDZAT8mp$Zly- z|Dh1F$W@CW6D6`H(aX8;04p8=RBScAmvS9Az}{WDt9eq>`={J`Ye6QJf~+5)f5#vu z@HdNFW&2k)Q3g_AqMUzs{bDBf zP3K*XT)sZq5}9@ZPUE>HUt$`?toP~TzLv6j32XWORa;_Y$D4%7<@jHTA%AR(rTW7fUn&4K-+bSB-@w>ZHGL!*fnR> z=>qU2EkdgsSc^`BxRo`Fv1?1p7|t7Hdc0f> z9y_wOby%C6V%wl9XEucU_|3Ug5Iuio#~_i%dyl<)~mSgU-(yMXbrTto}D7K{nP zAuMK2Z&cfs!e{P;{S@~S;I2`B}eo7`-%CiF14 z(&EYPZQErfG8f@TP*?mQDbD6#YfDL7-j0K<&;}@yZ|uCRHLq<$nL}%R;PL;~7)h6O zP60!32M=V@^m`AhCSQww0uFL`tWU>n@fb%(*eu& zb)N)u64pX4Vk-a-y_Zj+(~!+;Lp~G|xOjfka9uCtlDc<6(0gu&O?Z>Gfe6P|ay_TB zg^7g+p5%sjp!m_Tp8Z1#KbIa-Adf&`2tB& zw7cM>8`#>SN~Q!z@U?uv`Wb`|-Bq~KeE$;jf)ac_6BZN3f0Aw;-Msw1n={+;x$k^)!}Y-bprirM&>>{?wjeIbw?!%hwJ|I9 z1kn8`JlTz1!JNs;X~JYjOx7V6ws&xsM(^O4Kh=pwLF*0k8LpdW1f_`y*C3DdJ;k}z zCchiTpuU`zwdLb!3ChB#fL98*0FSwS$-&29pXuEo<@<%snN-5{PXfjJ2b&|0IIG<^ zKRI?$b!a5Cr?_Rz2jiD@E^hj6p$9PSfydr%z5nrZ@LWFw+WsSB5i*}MFT@M;JGP>! z6T;n45}#_l_ujKx@BZT(IrCn^Mwx0c?~#KYvwtY6JQa( z02<3F6r0;}-+i~_NiPe%W6325xT62cY8a}RBE~;MLziH zeQ@%8*32+V6CfFiZp1_|&{{L+Vh6A5+lxtp^yH)!e9x}vx$d&BrB$k%ydRE0@H-uVkgWGvC34IFe%(NF0aw-2}=i;Wb ziZSli`=4&d+VE#BxNu=j$ZSzg?$TZy19wODV2k;%H+0{F1!Z2@CA_9`;lU?7fPZJxpMn3(}iAw z+X`<(c(3IDm=Mc*Y_-6Q0WTQ?7h%$P81)fUh2!@K*%+#$I?!kYvP|$GJp=MHgugNs z-oaz~Ao!f_z`$N6w1utrb*yt}FGzLA2Tq$UxOO#U_6ZOin;;5L$P37-b$sGJ_^0*~ z;;^zTmH>1%U}EyE$^ZA@-FRjNG^j^7=(_B(xx~fesF^rY>wE>7JjwJirvd^jd6t-_ zhu(p5IM@@?4uJS7W<(a+H4WG8g;2V%O3;OLnKFZwTw2% z1Uw{@U1&m+`7E$Rpb4C`fltDH!(cDhtvP7N@10K+oF?HrA%FW)Hu6*cX7VzQwD7=V zq?7ax_$x9yR=B?oWAHR4Ad7yC@65)p-ukpZ;tI-10)D>SPPn7CzGundO@o^86D|fm zj4xyIMVgmD$XbC(e_I|j`hmLzf7`GnaU0a+cKq7636sCUcz++)taiiD+b8~Z&z?N` z$QgC}mD3vZ{k>B(;?of+I-P5Lx=a`Xf-~_a-NFaquNcK-v5*W#JH~_fc8gi3OAjgk782rZUkUG%DC~~+zIETfGzy^|16kxQ_CM8cm&r)QM__Y z?VrA=Rf}r!Wj!Ov7`YbuC*yVgyc3-No$H}@1)#qh)5$H^fBlx609i&@Xo7b^2wl*Z zT{<(nknOCi+1%S*s6Eeqd6N<1dj|#@nPE+`$Z7>&hv4@-3x9_|fzN`%KLo!WM@5O* z^G@vfm5MyD>joGkwBKunxJcX~I`;-5dZ%fo4bmzdKceNTBnR$4^6|cp2q- z1ZKl4p)_Tng5NK?b5|T$vZ%KXufYaqpt$P_>pJuzUfeHXM!p$l>1IodZf^N)=S~;A zif`(^WYYK;Gml_`9|e2+Sj@1cMIx=byuPDk87p9cwD}akTbjnsX}-H_gHQOp!8$)M zf1Iuv3rsT3!?#RELE8Ge_#CpZty&up-{AJFvhb|}?Z$u?p;;ktPZ-ksOrws{?j^-Z1F z7OEFpnr^(YCka=D7u%Oz+0YPe!1>172}tR20+i>8cz4L}K$u6OE0Jkykmh zL0IR)SV5L=<(nYgDHy$f>hkV_WL!!R_4~Q`<85^fyR~_kcN_<0c`PPa$HLM$3i!_Z zmWi)pJd(8V@x9pvcjsF;<>)Oa=Pi)kTAk>bNq>6iRmc#{r70a3Um(|f|AgM;5=1-G zja?Yi+WzX5RjplsKe5o=A$O*d8bq|{@)SlDP-aYQCw-zpv+;8VK00;T0;zt7AmmhTw zX3}>7Hfs$FQJW8A+uS*R=(xe)&A66RFV>RiVQ#U%HXXY2Ilip;0UK^SRZ?5$rH%8* zqRX(>cMj^g$zIS*L%y^lZg385QM|maX}P!S$E8$Q4!lT>$-6%Co+*|UJqSyR zGw^-GC$EPx!@G~~q!4?B%({J=rFBl8vaOTf{9Me5R;lVcF1p|XcxZl){zl}_X%p5* z!8aWch!4VK!f2?&9M|rdZ-v!+i$>b7n|^Z>6tyY|3G{sDGpAt<`ciyHK8CucM1A5D z%Tg~Q(T1Gd<+Z<;hkJiVeDJ2mtN&bU>|FI0_O0Uieap3q{-|Ec}yt3~lM$)z8~s+;nH6iW~nQwo z@t9An;hyahNV_nTo7_07?M;{Xl`WgLYrj3hFpOU_rDil#uW+n0PgqhRiXyhi_{|Bd za+Q~;QnLn*#8VrlJ}loCE11&cx8MJhAykP@r2g))@XWq34Qa+A<_o<`)Uf#8#V^0@ z!e)to9B|ib&t+sF2~tO>5|e`nwB2+D%$)oY+@+YD(_o|%5?zd72AQ2kIC-9_{GaQe z{ZyLG(g13=h~H1x?sbE%Ft~aZwiMlNx1K#Zc*^qN6H1`!k^1L1m9+DYz5Y6*>M@aj z|EekYg_Ve7`x$K~{_1vcImU_;Xy2r&DrQa?Gi6NOu*3gXr{a5H)U;{gjMS1KBLZSp z+;JfM6Z{|k6~vBra$wfG5VK$5RA*(rZh`N{orCUreO*RjDwx2|!(UPGh%H7@>JxE- zCuZ4kn3JPxpoB+T)c+hC-tTCf_UT_O%4K$r2<)0ve-pN1{T6JT44$l7)fo3Lg*&eJ z*tA2r{8pj9hw7R?+iz8&lJeUoe)~kMNk4@?kB#p)wwZlFde&`8nh0O)b)qt15>1z0 zVv7E=`e$F11J!gWWm(qsU$>0VnD!v5UvHa*5?jvvohO4{EC-}ZQ561WKs5MEdQWpG z9bu)GNd#*s%4py2y~FG%GjD!@IIYXY|K|fArcLX62QfSMds1oZ(y7J@`gEc=zBWF^wDA ziFhC!u1%#rLICy)oG_yu2>TVn5{Ee-<&+n=*Eosc@bOU|W?X zh9Rs6FU3~7zNk&WbdHbtZmpd*O*->r1lT;J zizj9YHM-wwA3}e5{2w;KG%Zsx5ZG}|{dtHcGzV=T;h=q(m;+sKC=mmwv~LTfJwj<<96qDjl+m04n*J=+GrmNRQ-^0Rjm6uN_e3V{DRNmO4YPs zFKznNQDqMi0|2`F+NRM6RrxJE$8bm_CnSr9_3)iss|jJobaZlA%Dw8JhtQJaQ}PdG z0J8<0`F}j@x#w0|(a8M5A<+Kd%}tCLFQdhJOi=dHVEYa{+ow8~fFovhVBZexIX>Q^ zu85)ivO?I@Ap7zoI(bXjg03vH%T@skuh_lY&O4bK212ya^YG#QJN%>L*JQ2#p&XxT zJ-QkMXIm4cEs@GMEj-03+%>Ub06z3$9V+eMigINhwia6|3tQ3~!G*B8o(Sc%7`kjR z9*D(Clz!Oo+WHP#^@~9C)s0`Tq0!jon2CN9bNDMEf(>5?WjWDq8dlrou7Gz9iNc;hGxvRiCA|n%AdP zkQqfsVBe$PtW^vqd{^vEUg9NY+5+m|e@T3n#q|NIcESwJhgp6o`T!qtbf`}Pig`+1C<`M4<@-Tqx8317Y zbqo6Sb&wx}KfGgUlSWuTZ1)+Jmdd1%>cJLHlqQp+q%0bPXY@i$jhfJpJ*vW+>SnyO zCW%PidI*8g@2@&iQ;1N)!u#1)!m`C0Qngo@orS+Z`b4#NG5h_y^hEKcdGjNYQ#-r=FkBj3-aUh7%?q@i9aZ;O@TopaJtFTPE zBTT`2wv89c@jOC}b8+<7IWPkriF+tc_YyTii?JWk&ibCSM!E4J*R^v|iDjyYe%;;p z$2G!vK_-tQaBxl75V32;v#3EywYs8w!Hjd?yOjGKd{C^x;w<Dmt3qtehM7AY`Slh8L4GcE7hpxlM7Ne&p&hP;hX z+#g}k`yov2i-JGc`I#k`JQd_c0rx1~9b+fpXu0oW_INd9w=ylb)-9a;vYP855i8d9 z#0`BOF)|+SH48J}Yg}(!GRzwSJFjUx6I;*b!sNUhrsu4cT(W?aSAE7g6EjX1Qc#?R zm4!k))HGFA;G`t69AhhCmhd@celaM?;@iMriK?nc-AprWERB$ZfFM4mw4 zz{6MFhKJ&*48n9%|ADR8YGlu3&9u3cg(hU+nOMxaKBUw*KQYtljxVN4*zUw^BNXxL zIIsNWstvn7panPxCySTNs-uL6qtpfj^qVna|3^?AXHA)>61^Lyte&IW+;70pTHw&t zWWjg?QKIJ}o^yvqxy<$`0=p&-yM}Pzi*AVr1-6+Q)uY_v2j%$|j4zjG&yRKzRwaBP z?7!>G{}ITA)yH`arPUNoimp;ij6?WAFi5-|Lk=Za&V${etpZ#Z$`Rb%Cz?gjX4`!Q~eD>Bct&y&%rs#FH}wH zSdgxbT*_E3m3!+7C#c<1#{KVseq$???_yz&*oAU;~JOs@YjwTUUpLhwzFV%4ov z@_ng%T}Qki3>%wwz64Ey^?&W>-%D6`z@hTRI-s!vb!{nMAtPqW zL&dL?L`e+JiDWoly&vI>r}?252Y%T7(tz&?%dQhXc4%{ExaaoqWA@bUT!O|P#$SV- zM)O8P)${SS+LSN;IECe6gwET6ZB2b1n;3BXX!x|%tMk>SBQYkp_HNtw(l z0R#r!{`ziw^M0BlDvd3mYDZH_zIg`LeM4KZ?mN=F*fQ7PtlR$Nj>9>N7^hfrGb^C{ zQ|uBrBdO>-^!eq;NduO2zc|R&>^`Z!MHXX#X4@~JpCA7a&I6xPHZ6YE-MAR@6JnVcW)o}XN(H|)k@gJW(s$QMTpZdQ(chH7BR~X zQpI;E_DxDtT0Vg;BySr29hwuKk=ZBi>(0Kic};u?vU*1oxN)O4;oim#oElFfh9)dq zStrgSml76U3I;1+HTu-3irtqrGg;A72xtns1MdE9MD1>&{*DQDg~K)=L_~{@wM!pQ zM_D)!f1l|Z@uzDt(Ik~)u&5oMl$U%fl=H3OP-J=ctU)#%!ID|@-#dP}_ue-4jF`@r zLJDz~r#BM~jE1didp?Cj|Nl62`pRwjY(w%c90HBaZJpS=`0K1&#GntSCJE;wR}(jg zFK33-I2JtOW}X%^K@jScKrgT(4k0@Pd`joC)hR^t$D`tMvc4THj5vaZM)h#5@ z{xDTTO|~0mWGPp=?9Y@F81p5VHQTqeQdnlG?r2vGqVO!kYjRXVv(`7qQiCrDy*TKH z)+Cur94X+Pw_kFm!RTBlpHZ1v7mos)uxqFi6zg7?tmY-@8fQ71Eb}6nWSTVsdbRc> zD(`p70A~fSKp+a&m9LNkXGiC*fH|*$>FhJ7{pz9T+BnQ~a z%LvT2D;%D`&4hJ`*=05keu!Y&opEbhoGe=@jU=z*#ub}iG7_=v+;xZp-c3HO`-Oq` znsdyYYg+;Dxqa*@3TJPFWnE^r9-yGgX#Ce&^CJnLIc(~Z>^^Au5lC`%0fFH&cH$go zGR+YyPBv1bY7|C#xhqPOhOuAP=|{nYix`RWuS^RvXX4u#9RpvF#hFRXDX@0--%!H& z8Vs8^Ds%IWz`!XBvBpZUWi3hz#Xm8 zB8bxx1e$ZKw9!81d*s3JO9LN-)fHWImTW2Dw!gpR1cbp8TQwS-u`ez)WmOje9`TU4 znQ(|W{gs;xB`@zFK$#)7(b-DSC|eO7RqRO=e|bsp;taaN*wC{uLHD(+Fdjn$J7ZN} zj@=7Rb~G6E&)X%Jbftv=24>STNF0Vj{1i&`=nQn`#T>g6SV%0P*h`2%jj^6HM@UY( zu`Zp{#1hK)-q1?}es536eyWTLxb2pU2g37LM64O!U#(=Wd7tn2tAL?e-k=^31uM#W2G*Z0Q~Z_E-qV zs|lI8CQ@(~U5**HeI=h*z3&e1>Rs@Vp4^vV<07L1W(s=(d`_^z%bO$=C%q_vPuvm? zLGK-pa@=ZL@+lAkoV=K!r2{ZK?}uma5gF)QC>Dp5n2d3T4jD=H=uQacY@Rt$sj5{*} zQ)521%Ux{FGOOH2yIvT4uHBkx6>!__I7gD0Mx**@Cx@SIG=mHX}jX&7{^yk z!S^X7_Vf5Wt7}UIOc;L%mE3Uvv1+}b`80szg~b8s`?GBpQ2}RVTUXXAEd=0H@m7+= zAz}p9BCt-@>sd+sY~RAUa+hu5G=vDQaWo}b#`WzqEaHlcvE0ybj2mM5nkOxw#DwpN zOqGBD4PkJqpP6%4DgOxj##NYwZ8=im`J2yJ?CXx&rFdp(2=hyKtV@Y)^A^e?qY$(D z?(i)2vc$5~J5vR`d&=1V1ylOhQprm@2NAN4B1!<}LlSe^6U-boH%$F#z0-Pf*XdRx3G`GT{sSod|Zw;1Eu$YFlq=h~VVi_)S{sDS2p&$ zLV`iqvK;!d4~lCU9InQL2R^m7QVdR4yWsika2JBnN{DXTNy?Tac<@EsbE?F|j5l8X zW1}_ded&0lDBuBp{Vs4Am9A3Bm%%9uXQXb1nQ^<_z98=dfj)Do(;U>{porv7P!_Kof{(aQ4jK!7pdD~N+p2ZiOj zqlP1d4Bb&wAA7v@Gbq>R+oQEl#kqbS)gwpa$ic1lYe~vdM}XeGR4nVQancVgVJw8n z`AEVY9!p3Om5oN7E|G@e*d=^`?08k%?FN)@Y%ylPQw_9H+KXC2|5P#G86E*5^cr zpMtCJk%3w7B1-j(Gd51hon5oZVP@REy5 zz-Bf6+xU^Gk`U0SZ;KcDi;q;DljRZzGBeJ@BSA0B(Jm&s!9`}0RjDA*bDEH~uuz`x zJ~rOR{@%|B36Ha;v@D4+HQJ7Cd3BZ)CI$F*ufrO%9YtwU+cXQhKtt3m&F;xgennSr zFyqvxfcM-sZUS_0&f&Ps8Y!JTTGulpX4N&61r}f}3yvj3$3Jj&>GVNL>J|bDqo2FQ zD_1^@@-K4bVOpQ_1{}8VCCbE_w@V2p-^f&P2*8?RwoKQBwOVWy3aw#kXDwx27ZIkN zlm$3C!b~nfd>p5dwKS~zissZT+BC0%HDAiKvF3$Vx%4Kpl0F4Y7@cfJP?>o&faqD5 zP*yXVE7jX6Ro|;j94foljm;l-VyK&74JjTXh9j?iM{;tUv&uyfdv_8BuAUjVxYHxa z#SQ{hVZVs9X7tcfegH}edty7I*w;H2!7_y6#9St;B$V$5P~2HKQ0WV??JgUIZz?FZ z1kTH_=4)CHnWsi)`BI$5dK9oYZ~+$9?^)HX)B z)1bV{!K35x?Of%~@yn9Hx-pfuIq-5aEH~tdd(Y3=ZctnJN(hs&rHnCDa6mqTC0J2($q& z6ru zMP0#WCe}y2kZQ2TwlU9wD4d6|A5&}1r5&B9S%-VgrCnLK_z2*1`ixsT?MMm7!<9Hk z7`{0AadzmFX`p!L!}HmVnC(#v0+ztz*{;J?vM6!){IJN&{xjk-$iW#f;opeQl-G#} zFsXnO0jZ!n*quBw0~T)h+?ANn8UB3=0&2iR728Czbj%uF+kG&CScZmVarq$tZ+)w+ zoVGhlnbFFW$SF>}oqHR=d$IusLS!>*?q$*Yt0T_z3T!pHus0*;Uv4DmTGoC?)6rmB zfOAcA7QJW;bc5#40j6lZk`3EIfOEq`>zTTHo}8IFZ^W=iIJyj4zatZAQhA6z*%Oo@ zDO=uF1_Cq(1Y`Djxal87VU;2?_&#V^W}Xv zxL~>)Cs@RDEW>L#11`gVs?9E82J9JgDh2}u9Vp|pqw@$4x8Y;9#U@;WOwN8CfK_g! zSSHEN1kHIDXR(#=c82oJI_<`~Zv@JFc-9;aRTSRNFgxcWikCL3%#2*{)Q;Hi0TfvaC$dl!hLk0N&0H_xYybmRRJE`F*GKW-){DQ+sdZuLS~C zp#p#OEaZ^c1xEl%&r8b>_|A~e3JwC{gE{X$2YZF**=XA{76TiyN)ne5z_CD1P0*Vu z4a3oiXTMh$%Op6K@GB_a<8$o}X7FpV{(L(4^!P{R%FnDmXY`ofd{|e>(B2#QYl;A` ztN%dGfK8z(|4gberyrC&gQ6E}Fvt)wYdq)CMKJNDF*8+G@Q45a5eZ2|K~z#X1J3gk z;AQqWh(67;ktjKqHkL6ZJOs)&`=u2qU(BAjU=RDriQKcg+YXOsgni}JS=F{(sehJ^ zmx|=LQQCH^rApXE?>`jXrBr($<<1%b=F_}11IBj-+xd%_aNOV+%AYuc zkW3XSyxgf5XobU7UftJX1Wp~^NidPs7h4)c)ie+`(%VhC}hm6G4XJ9s(;B~gF|JX$ETcf639v8j5u#jPps3^!GNdlda1vQVy+#= z$*Fkuk=ywSOa70(th-ks1N~u<$*7Y)xjvocv{57Wbq4TQj)Z zBw~A$_+(@M(k>C@Wwu5O@T&9FA0iUXZugIc{(W&)!cJ(dPP*VWq=&>@oAK0Q8iqST zKv3L8Ctqil%6+N%#{1c}cwQCfQx8kY!OZ|uN;_x1ESNUPnjMYQ2^O$XBr2~+P5srk zCf<`!RK7=c9i>vwIB|>gDwq1HSmj1ntFMCOkt@Xo=ilnt(u$)ou%i)ibvNhj|9l~ zNDj&6S1r1-&_e5nMt#;b0?I*46MFvX5dph8ezi*alU|}PLU~&Z&Z?IX#kI6J4z_4< zJ7`+p(``L$4kbO^cX-;T9>x8FB>kBRaJDZucH$S8L*r2ZsZI@5k5CZ9~F( z8#F%?y|^JB&$dY-zd&KU31hU_sPdz(Gu~Om=ki5TJA`^cWM30m7TT28X;i-u)Puh{ zPTTITur(=eyvV3I5H;BgTUI8Qh-}_0h=B=(xe0RdzoOy=dc2ZGp?Z`4c8Z^f<6;}j zH9@%0!o>~b1>4@WzSPN_yM2n&E{6xswOgz^T)cqZB@Hzs^5)^+Wl>W=VGIm0%1 z=DQ8sN0Z5XF^*cBW^<*_Cwk=f{yt|(TuM4#HY-mF5f{_6;QY#DFcZ33QX7lAE^Z^r z#cxPutG}Z2Wk_qK^oTRPvIzW}z*KouSdCGqf$}Eesn-Gt+==DpLs~}CN%c|QdN1mc z!D`x)jxHE0KM#IQPGtJP=$%ej+EaE+_!qtJM+?YVl8a@mF}xR$ME0hs+FZj&Es4^m z4K6s@46;~`V*cKHemPBXceS78SEbX8Skpp51&Vp4Ks?Z4^thy%`x#Gw??c=h*F{?* zIh?B8r3Rk=0`P*XzT9v~za_#>udH2U=9K|2)cZ^kQ`EPxRXat8;>lo&6w}4s2%Ch@&+lyrFv_{L2=E0JSd9~)Vkaw zc;qNF);4#f$lR?jHshMA>;0q8GLHtji&~ZRtr{&jvwZFeq$MBKE?9|WLD2Z%5h6(v zE3A9mwV&Db@@WrTd1Y$Ywm5e=SDOALKu!y>+vCg9_pL$Hq|7~gu5hKoT!QwsGMjNc zBhMR$VH+Zzl_?%T>?Lu!lBKp&l+6vvyytZAC6>Cat=^c$7`ek+&dbx@gK6L_+Yq`@ zsL6PL9w8zz7MKocSh3coM4Q5so38`QTde zg1q2%P(o4EmG9I0W=r`eKM}^MCl1ZwCwD+n3yuOe__rXkhief0&;;_pS27FOAG8!}r?bIlvFjzt@?(&}E1Rtf^*m z-?f^-Ma6DX%WtRk)gTRtH-(7PhJ}?C|KD}KI$bPvzp^oY0W03O8qoMsG(29LCU8-x zU$|O9(w}Gr8827G2}5YKnB~-MAYceNat5ilX7a0lM_w$?6rsoUym;(^0Ta_-0-s%+ zClBslqJl1QH5i}Q%~yQ4B{e!;OQtb*OP(Uf1QWI%98-aho!o^L+19M;)=1a4 z%6?B7z9w|ja9Au4Q^6*sze3tYjJ20DVB{s`dtuclf*)kXUWSRWu)o;p7Ubaxp_eCM zZ|%z-;mVTTpD)d)>VD7z)G{f+7Xz^zc5h9SHvbau1U_qe`b8m6OILgD;YBLHBDT1U zB%B|s>UT43YwZKRoL!yc2rKxN$|DvLV={7XH^AZgm(z#V!CmKW1E!lINf90i0WRz(Llrnm9^_Em)=Y2jDEUGM zr{?;`%u5y|Is+GH565u0;(?)+V$wkuLuA&bH{I*0R^#O&hF{&OF*6*QLme16*T+mS z$UJb9jGDG<$5?f&JF3xu1F*HdRiQLIv-mk|>2R(0P)*E%xkbOH$fkn*!uB>+bjwSd zj4arBJ<;o+VBhSX+!0}ahXbNLs4gX{$qy&9kYMPH!C%hpw=zk`(D=!1XUHTV!$6bZ zx!Y96IJaS0&IYY0<)Fp;cRt5~)~^90w|es>iz0p%v)r2>yDgsRQQz=W?*xl9)4f?M z3+JkC4gua&t^~1lk11kJks~>CDGQ^_I%i$vsJ9-)#}eKrXLXsGuP?RbOIb@}!WN%T zpf9vs->rRw`neKVB*}KYENp#m|Cn!hy}B8;sO7fxN*qAj}62Ow(P0VGmY`}Rpl2Zgi>%V(Ej5jXcOGT?_vd1ZCgFv;`k&W529g+&Cxk}EZo~dzNCKot7=Er3k)AC zI-dEzOd5az#&@E8g8IGtIYUtYH9(c6Z&p%AX)Tz8JSpk9#Hjy(zzU&pSUC0QYZg)+ zuIxfNg}Nkm{~28Z*=06%=l+~#=^$0*eOOTARgsG%X~wmyrLFn$(Q<%V@xLFJX6icJ zARxPMHG7*EczduQ{i_a}7k^D;DYLxxWH!Ac(gF%+#BcaWLxr{LG?jKtSvZoX^6lAf za{>&9F1Vp4c*>&b`lq8O>xXJX3czhhpI^1qafr-2n`}M9Lz4r<_rFniYIeP9Q7gBD z)0vqM6cM`>1C5RrhLsY)i>YV&PYbqoH)RkiaXxZUU;nLd4lEXc7TUPss7?G|;5#)|H2XPxQH1F^%7Zg?Y#Hu4qa^|ERZ!{AB z96F*J`^?O$Xnhn?GS~r#*N!D7gEddMZ8!p#fSnY_)wN`r@gXTZGs&9M$4T?DvY`e& zyV7JLZ@_i^#RNFO^c?2&TD1pR2%}+t&Ur0(XfhgbuOZY%)SWct*7Cex;T*eoSpv#! zl^qKeHUjsY4o9XoJrv?W92^DmBo;`(pHp_4n5x*q3x1`H)wo4ax6AB-4`x91vHr?eYZaE$vX%IW>ga1d5qSAl zz#=n7n+ggfjKRu(w8Jf0kAxlvJ2tgglVpnneU%)%4DQPR97cD&;27n{KyZ0KtxNB? zM3OwagBT{3J?b8`b5@-TMR=mFi`)k~mYA^+?yGigabY(g1?<^1qkIn%Dw7AwRitNk zG;4C&R_^0I%?u3GtHgaQPeWZ8-#rHMCMIyX&u5| z-5TKK{>Xn~_6eESUKSP@z2a>)3(k2aKU87L`9mquTnc-JC#`|z{Pjd&yAkO%5BNwJ z@JOPwTFINN64LB|9FAcaT^7a_Y5qBRm^R}IwCP+(ojpijp|m!h{>kCHJaHs4-hjaWH#zqgnD6&U?{7o9wb^9EzqW0SVY|4j% zYs5XPgKHDoPgg9YCw5x=C1=4$6))O(MCGna4M>Sxjy= z0vV@C5q|R0`o^HR3KBZ3f+Eci*S*@iV$6xLq$i#Am9;eNHbCmqt6a-T#5HZnnneaw zS;zg;L6Qbp;eD@D()yJt1)%eORcv*M+`>bHGD=`?NFXZ*16C~eDd%P&e;ceex_@@k z@fZnb;b)j*tBR@g=*S(&Rh%}PKr!eGI^FiC>U2&P)==h%6vD>IioI)jI&n`uCW6~UH z?3Ror16b{?Dv@XvA0)9PhXOk;Oj&`M5&Qx43djohe`+})z None: - """ - Validate provider credentials - if validate failed, raise exception - - :param credentials: provider credentials, credentials form defined in `provider_credential_schema`. - """ - try: - model_instance = self.get_model_instance(ModelType.TEXT_EMBEDDING) - - # Use `mxbai-embed-large-v1` model for validate, - model_instance.validate_credentials(model="mxbai-embed-large-v1", credentials=credentials) - except CredentialsValidateFailedError as ex: - raise ex - except Exception as ex: - logger.exception(f"{self.get_provider_schema().provider} credentials validate failed") - raise ex diff --git a/api/core/model_runtime/model_providers/mixedbread/mixedbread.yaml b/api/core/model_runtime/model_providers/mixedbread/mixedbread.yaml deleted file mode 100644 index 2f43aea6ade2c6..00000000000000 --- a/api/core/model_runtime/model_providers/mixedbread/mixedbread.yaml +++ /dev/null @@ -1,31 +0,0 @@ -provider: mixedbread -label: - en_US: MixedBread -description: - en_US: Embedding and Rerank Model Supported -icon_small: - en_US: icon_s_en.png -icon_large: - en_US: icon_l_en.png -background: "#EFFDFD" -help: - title: - en_US: Get your API key from MixedBread AI - zh_Hans: 从 MixedBread 获取 API Key - url: - en_US: https://www.mixedbread.ai/ -supported_model_types: - - text-embedding - - rerank -configurate_methods: - - predefined-model -provider_credential_schema: - credential_form_schemas: - - variable: api_key - label: - en_US: API Key - type: secret-input - required: true - placeholder: - zh_Hans: 在此输入您的 API Key - en_US: Enter your API Key diff --git a/api/core/model_runtime/model_providers/mixedbread/rerank/__init__.py b/api/core/model_runtime/model_providers/mixedbread/rerank/__init__.py deleted file mode 100644 index e69de29bb2d1d6..00000000000000 diff --git a/api/core/model_runtime/model_providers/mixedbread/rerank/mxbai-rerank-large-v1-en.yaml b/api/core/model_runtime/model_providers/mixedbread/rerank/mxbai-rerank-large-v1-en.yaml deleted file mode 100644 index beda2199537450..00000000000000 --- a/api/core/model_runtime/model_providers/mixedbread/rerank/mxbai-rerank-large-v1-en.yaml +++ /dev/null @@ -1,4 +0,0 @@ -model: mxbai-rerank-large-v1 -model_type: rerank -model_properties: - context_size: 512 diff --git a/api/core/model_runtime/model_providers/mixedbread/rerank/rerank.py b/api/core/model_runtime/model_providers/mixedbread/rerank/rerank.py deleted file mode 100644 index bf3c12fd86dc35..00000000000000 --- a/api/core/model_runtime/model_providers/mixedbread/rerank/rerank.py +++ /dev/null @@ -1,125 +0,0 @@ -from typing import Optional - -import httpx - -from core.model_runtime.entities.common_entities import I18nObject -from core.model_runtime.entities.model_entities import AIModelEntity, FetchFrom, ModelPropertyKey, ModelType -from core.model_runtime.entities.rerank_entities import RerankDocument, RerankResult -from core.model_runtime.errors.invoke import ( - InvokeAuthorizationError, - InvokeBadRequestError, - InvokeConnectionError, - InvokeError, - InvokeRateLimitError, - InvokeServerUnavailableError, -) -from core.model_runtime.errors.validate import CredentialsValidateFailedError -from core.model_runtime.model_providers.__base.rerank_model import RerankModel - - -class MixedBreadRerankModel(RerankModel): - """ - Model class for MixedBread rerank model. - """ - - def _invoke( - self, - model: str, - credentials: dict, - query: str, - docs: list[str], - score_threshold: Optional[float] = None, - top_n: Optional[int] = None, - user: Optional[str] = None, - ) -> RerankResult: - """ - Invoke rerank model - - :param model: model name - :param credentials: model credentials - :param query: search query - :param docs: docs for reranking - :param score_threshold: score threshold - :param top_n: top n documents to return - :param user: unique user id - :return: rerank result - """ - if len(docs) == 0: - return RerankResult(model=model, docs=[]) - - base_url = credentials.get("base_url", "https://api.mixedbread.ai/v1") - base_url = base_url.removesuffix("/") - - try: - response = httpx.post( - base_url + "/reranking", - json={"model": model, "query": query, "input": docs, "top_k": top_n, "return_input": True}, - headers={"Authorization": f"Bearer {credentials.get('api_key')}", "Content-Type": "application/json"}, - ) - response.raise_for_status() - results = response.json() - - rerank_documents = [] - for result in results["data"]: - rerank_document = RerankDocument( - index=result["index"], - text=result["input"], - score=result["score"], - ) - if score_threshold is None or result["score"] >= score_threshold: - rerank_documents.append(rerank_document) - - return RerankResult(model=model, docs=rerank_documents) - except httpx.HTTPStatusError as e: - raise InvokeServerUnavailableError(str(e)) - - def validate_credentials(self, model: str, credentials: dict) -> None: - """ - Validate model credentials - - :param model: model name - :param credentials: model credentials - :return: - """ - try: - self._invoke( - model=model, - credentials=credentials, - query="What is the capital of the United States?", - docs=[ - "Carson City is the capital city of the American state of Nevada. At the 2010 United States " - "Census, Carson City had a population of 55,274.", - "The Commonwealth of the Northern Mariana Islands is a group of islands in the Pacific Ocean that " - "are a political division controlled by the United States. Its capital is Saipan.", - ], - score_threshold=0.8, - ) - except Exception as ex: - raise CredentialsValidateFailedError(str(ex)) - - @property - def _invoke_error_mapping(self) -> dict[type[InvokeError], list[type[Exception]]]: - """ - Map model invoke error to unified error - """ - return { - InvokeConnectionError: [httpx.ConnectError], - InvokeServerUnavailableError: [httpx.RemoteProtocolError], - InvokeRateLimitError: [], - InvokeAuthorizationError: [httpx.HTTPStatusError], - InvokeBadRequestError: [httpx.RequestError], - } - - def get_customizable_model_schema(self, model: str, credentials: dict) -> AIModelEntity: - """ - generate custom model entities from credentials - """ - entity = AIModelEntity( - model=model, - label=I18nObject(en_US=model), - model_type=ModelType.RERANK, - fetch_from=FetchFrom.CUSTOMIZABLE_MODEL, - model_properties={ModelPropertyKey.CONTEXT_SIZE: int(credentials.get("context_size", "512"))}, - ) - - return entity diff --git a/api/core/model_runtime/model_providers/mixedbread/text_embedding/__init__.py b/api/core/model_runtime/model_providers/mixedbread/text_embedding/__init__.py deleted file mode 100644 index e69de29bb2d1d6..00000000000000 diff --git a/api/core/model_runtime/model_providers/mixedbread/text_embedding/mxbai-embed-2d-large-v1-en.yaml b/api/core/model_runtime/model_providers/mixedbread/text_embedding/mxbai-embed-2d-large-v1-en.yaml deleted file mode 100644 index 0c3c863d06b89a..00000000000000 --- a/api/core/model_runtime/model_providers/mixedbread/text_embedding/mxbai-embed-2d-large-v1-en.yaml +++ /dev/null @@ -1,8 +0,0 @@ -model: mxbai-embed-2d-large-v1 -model_type: text-embedding -model_properties: - context_size: 512 -pricing: - input: '0.0001' - unit: '0.001' - currency: USD diff --git a/api/core/model_runtime/model_providers/mixedbread/text_embedding/mxbai-embed-large-v1-en.yaml b/api/core/model_runtime/model_providers/mixedbread/text_embedding/mxbai-embed-large-v1-en.yaml deleted file mode 100644 index 0c5cda2a72a99e..00000000000000 --- a/api/core/model_runtime/model_providers/mixedbread/text_embedding/mxbai-embed-large-v1-en.yaml +++ /dev/null @@ -1,8 +0,0 @@ -model: mxbai-embed-large-v1 -model_type: text-embedding -model_properties: - context_size: 512 -pricing: - input: '0.0001' - unit: '0.001' - currency: USD diff --git a/api/core/model_runtime/model_providers/model_provider_factory.py b/api/core/model_runtime/model_providers/model_provider_factory.py index e2d17e32575920..1370676f0e1d85 100644 --- a/api/core/model_runtime/model_providers/model_provider_factory.py +++ b/api/core/model_runtime/model_providers/model_provider_factory.py @@ -3,61 +3,116 @@ from collections.abc import Sequence from typing import Optional -from pydantic import BaseModel, ConfigDict +from pydantic import BaseModel -from core.helper.module_import_helper import load_single_subclass_from_source from core.helper.position_helper import get_provider_position_map, sort_to_dict_by_position_map -from core.model_runtime.entities.model_entities import ModelType +from core.model_runtime.entities.model_entities import AIModelEntity, ModelType from core.model_runtime.entities.provider_entities import ProviderConfig, ProviderEntity, SimpleProviderEntity -from core.model_runtime.model_providers.__base.model_provider import ModelProvider +from core.model_runtime.model_providers.__base.ai_model import AIModel +from core.model_runtime.model_providers.__base.large_language_model import LargeLanguageModel +from core.model_runtime.model_providers.__base.moderation_model import ModerationModel +from core.model_runtime.model_providers.__base.rerank_model import RerankModel +from core.model_runtime.model_providers.__base.speech2text_model import Speech2TextModel +from core.model_runtime.model_providers.__base.text2img_model import Text2ImageModel +from core.model_runtime.model_providers.__base.text_embedding_model import TextEmbeddingModel +from core.model_runtime.model_providers.__base.tts_model import TTSModel from core.model_runtime.schema_validators.model_credential_schema_validator import ModelCredentialSchemaValidator from core.model_runtime.schema_validators.provider_credential_schema_validator import ProviderCredentialSchemaValidator +from core.plugin.entities.plugin_daemon import PluginModelProviderEntity +from core.plugin.manager.asset import PluginAssetManager +from core.plugin.manager.model import PluginModelManager logger = logging.getLogger(__name__) class ModelProviderExtension(BaseModel): - model_config = ConfigDict(arbitrary_types_allowed=True) - - provider_instance: ModelProvider - name: str + plugin_model_provider_entity: PluginModelProviderEntity position: Optional[int] = None class ModelProviderFactory: - model_provider_extensions: Optional[dict[str, ModelProviderExtension]] = None + provider_position_map: dict[str, int] = {} + + def __init__(self, tenant_id: str) -> None: + self.tenant_id = tenant_id + self.plugin_model_manager = PluginModelManager() - def __init__(self) -> None: - # for cache in memory - self.get_providers() + if not self.provider_position_map: + # get the path of current classes + current_path = os.path.abspath(__file__) + model_providers_path = os.path.dirname(current_path) + + # get _position.yaml file path + self.provider_position_map = get_provider_position_map(model_providers_path) def get_providers(self) -> Sequence[ProviderEntity]: """ Get all providers :return: list of providers """ - # scan all providers - model_provider_extensions = self._get_model_provider_map() + # Fetch plugin model providers + plugin_providers = self.get_plugin_model_providers() - # traverse all model_provider_extensions - providers = [] - for model_provider_extension in model_provider_extensions.values(): - # get model_provider instance - model_provider_instance = model_provider_extension.provider_instance + # Convert PluginModelProviderEntity to ModelProviderExtension + model_provider_extensions = [] + for provider in plugin_providers: + model_provider_extensions.append(ModelProviderExtension(plugin_model_provider_entity=provider)) - # get provider schema - provider_schema = model_provider_instance.get_provider_schema() + sorted_extensions = sort_to_dict_by_position_map( + position_map=self.provider_position_map, + data=model_provider_extensions, + name_func=lambda x: x.plugin_model_provider_entity.declaration.provider, + ) - for model_type in provider_schema.supported_model_types: - # get predefined models for given model type - models = model_provider_instance.models(model_type) - if models: - provider_schema.models.extend(models) + return [extension.plugin_model_provider_entity.declaration for extension in sorted_extensions.values()] - providers.append(provider_schema) + def get_plugin_model_providers(self) -> Sequence[PluginModelProviderEntity]: + """ + Get all plugin model providers + :return: list of plugin model providers + """ + # Fetch plugin model providers + plugin_providers = self.plugin_model_manager.fetch_model_providers(self.tenant_id) - # return providers - return providers + for provider in plugin_providers: + provider.declaration.provider = provider.plugin_id + "/" + provider.declaration.provider + + return plugin_providers + + def get_provider_schema(self, provider: str) -> ProviderEntity: + """ + Get provider schema + :param provider: provider name + :return: provider schema + """ + plugin_model_provider_entity = self.get_plugin_model_provider(provider=provider) + return plugin_model_provider_entity.declaration + + def get_plugin_model_provider(self, provider: str) -> PluginModelProviderEntity: + """ + Get plugin model provider + :param provider: provider name + :return: provider schema + """ + # fetch plugin model providers + plugin_model_provider_entities = self.get_plugin_model_providers() + + plugin_id, provider_name = self.get_plugin_id_and_provider_name_from_provider(provider) + + # get the provider + plugin_model_provider_entity = next( + ( + p + for p in plugin_model_provider_entities + if p.declaration.provider == provider_name and (plugin_id and p.plugin_id == plugin_id) + ), + None, + ) + + if not plugin_model_provider_entity: + raise ValueError(f"Invalid provider: {provider}") + + return plugin_model_provider_entity def provider_credentials_validate(self, *, provider: str, credentials: dict) -> dict: """ @@ -67,15 +122,11 @@ def provider_credentials_validate(self, *, provider: str, credentials: dict) -> :param credentials: provider credentials, credentials form defined in `provider_credential_schema`. :return: """ - # get the provider instance - model_provider_instance = self.get_provider_instance(provider) - - # get provider schema - provider_schema = model_provider_instance.get_provider_schema() + # fetch plugin model provider + plugin_model_provider_entity = self.get_plugin_model_provider(provider=provider) # get provider_credential_schema and validate credentials according to the rules - provider_credential_schema = provider_schema.provider_credential_schema - + provider_credential_schema = plugin_model_provider_entity.declaration.provider_credential_schema if not provider_credential_schema: raise ValueError(f"Provider {provider} does not have provider_credential_schema") @@ -84,7 +135,13 @@ def provider_credentials_validate(self, *, provider: str, credentials: dict) -> filtered_credentials = validator.validate_and_filter(credentials) # validate the credentials, raise exception if validation failed - model_provider_instance.validate_provider_credentials(filtered_credentials) + self.plugin_model_manager.validate_provider_credentials( + tenant_id=self.tenant_id, + user_id="unknown", + plugin_id=plugin_model_provider_entity.plugin_id, + provider=provider, + credentials=filtered_credentials, + ) return filtered_credentials @@ -100,15 +157,11 @@ def model_credentials_validate( :param credentials: model credentials, credentials form defined in `model_credential_schema`. :return: """ - # get the provider instance - model_provider_instance = self.get_provider_instance(provider) - - # get provider schema - provider_schema = model_provider_instance.get_provider_schema() + # fetch plugin model provider + plugin_model_provider_entity = self.get_plugin_model_provider(provider=provider) # get model_credential_schema and validate credentials according to the rules - model_credential_schema = provider_schema.model_credential_schema - + model_credential_schema = plugin_model_provider_entity.declaration.model_credential_schema if not model_credential_schema: raise ValueError(f"Provider {provider} does not have model_credential_schema") @@ -116,14 +169,38 @@ def model_credentials_validate( validator = ModelCredentialSchemaValidator(model_type, model_credential_schema) filtered_credentials = validator.validate_and_filter(credentials) - # get model instance of the model type - model_instance = model_provider_instance.get_model_instance(model_type) - # call validate_credentials method of model type to validate credentials, raise exception if validation failed - model_instance.validate_credentials(model, filtered_credentials) + self.plugin_model_manager.validate_model_credentials( + tenant_id=self.tenant_id, + user_id="unknown", + plugin_id=plugin_model_provider_entity.plugin_id, + provider=provider, + model_type=model_type.value, + model=model, + credentials=filtered_credentials, + ) return filtered_credentials + def get_model_schema( + self, *, provider: str, model_type: ModelType, model: str, credentials: dict + ) -> AIModelEntity | None: + """ + Get model schema + """ + plugin_id, provider_name = self.get_plugin_id_and_provider_name_from_provider(provider) + model_schema = self.plugin_model_manager.get_model_schema( + tenant_id=self.tenant_id, + user_id="unknown", + plugin_id=plugin_id, + provider=provider_name, + model_type=model_type.value, + model=model, + credentials=credentials, + ) + + return model_schema + def get_models( self, *, @@ -142,7 +219,7 @@ def get_models( provider_configs = provider_configs or [] # scan all providers - model_provider_extensions = self._get_model_provider_map() + plugin_model_provider_entities = self.get_plugin_model_providers() # convert provider_configs to dict provider_credentials_dict = {} @@ -151,16 +228,13 @@ def get_models( # traverse all model_provider_extensions providers = [] - for name, model_provider_extension in model_provider_extensions.items(): + for plugin_model_provider_entity in plugin_model_provider_entities: # filter by provider if provider is present - if provider and name != provider: + if provider and plugin_model_provider_entity.declaration.provider != provider: continue - # get model_provider instance - model_provider_instance = model_provider_extension.provider_instance - # get provider schema - provider_schema = model_provider_instance.get_provider_schema() + provider_schema = plugin_model_provider_entity.declaration model_types = provider_schema.supported_model_types if model_type: @@ -170,13 +244,11 @@ def get_models( model_types = [model_type] all_model_type_models = [] - for model_type in model_types: - # get predefined models for given model type - models = model_provider_instance.models( - model_type=model_type, - ) + for model_schema in provider_schema.models: + if model_schema.model_type != model_type: + continue - all_model_type_models.extend(models) + all_model_type_models.append(model_schema) simple_provider_schema = provider_schema.to_simple_provider() simple_provider_schema.models.extend(all_model_type_models) @@ -185,95 +257,82 @@ def get_models( return providers - def get_provider_instance(self, provider: str) -> ModelProvider: + def get_model_type_instance(self, provider: str, model_type: ModelType) -> AIModel: """ - Get provider instance by provider name + Get model type instance by provider name and model type :param provider: provider name - :return: provider instance + :param model_type: model type + :return: model type instance """ - # scan all providers - model_provider_extensions = self._get_model_provider_map() - - # get the provider extension - model_provider_extension = model_provider_extensions.get(provider) - if not model_provider_extension: - raise Exception(f"Invalid provider: {provider}") - - # get the provider instance - model_provider_instance = model_provider_extension.provider_instance - - return model_provider_instance - - def _get_model_provider_map(self) -> dict[str, ModelProviderExtension]: + plugin_id, provider_name = self.get_plugin_id_and_provider_name_from_provider(provider) + init_params = { + "tenant_id": self.tenant_id, + "plugin_id": plugin_id, + "provider_name": provider_name, + "plugin_model_provider": self.get_plugin_model_provider(provider), + } + + if model_type == ModelType.LLM: + return LargeLanguageModel(**init_params) + elif model_type == ModelType.TEXT_EMBEDDING: + return TextEmbeddingModel(**init_params) + elif model_type == ModelType.RERANK: + return RerankModel(**init_params) + elif model_type == ModelType.SPEECH2TEXT: + return Speech2TextModel(**init_params) + elif model_type == ModelType.MODERATION: + return ModerationModel(**init_params) + elif model_type == ModelType.TTS: + return TTSModel(**init_params) + elif model_type == ModelType.TEXT2IMG: + return Text2ImageModel(**init_params) + + def get_provider_icon(self, provider: str, icon_type: str, lang: str) -> bytes: """ - Retrieves the model provider map. - - This method retrieves the model provider map, which is a dictionary containing the model provider names as keys - and instances of `ModelProviderExtension` as values. The model provider map is used to store information about - available model providers. - - Returns: - A dictionary containing the model provider map. - - Raises: - None. + Get provider icon + :param provider: provider name + :param icon_type: icon type (icon_small or icon_large) + :param lang: language (zh_Hans or en_US) + :return: provider icon """ - if self.model_provider_extensions: - return self.model_provider_extensions - - # get the path of current classes - current_path = os.path.abspath(__file__) - model_providers_path = os.path.dirname(current_path) - - # get all folders path under model_providers_path that do not start with __ - model_provider_dir_paths = [ - os.path.join(model_providers_path, model_provider_dir) - for model_provider_dir in os.listdir(model_providers_path) - if not model_provider_dir.startswith("__") - and os.path.isdir(os.path.join(model_providers_path, model_provider_dir)) - ] - - # get _position.yaml file path - position_map = get_provider_position_map(model_providers_path) - - # traverse all model_provider_dir_paths - model_providers: list[ModelProviderExtension] = [] - for model_provider_dir_path in model_provider_dir_paths: - # get model_provider dir name - model_provider_name = os.path.basename(model_provider_dir_path) - - file_names = os.listdir(model_provider_dir_path) - - if (model_provider_name + ".py") not in file_names: - logger.warning(f"Missing {model_provider_name}.py file in {model_provider_dir_path}, Skip.") - continue - - # Dynamic loading {model_provider_name}.py file and find the subclass of ModelProvider - py_path = os.path.join(model_provider_dir_path, model_provider_name + ".py") - model_provider_class = load_single_subclass_from_source( - module_name=f"core.model_runtime.model_providers.{model_provider_name}.{model_provider_name}", - script_path=py_path, - parent_type=ModelProvider, - ) - - if not model_provider_class: - logger.warning(f"Missing Model Provider Class that extends ModelProvider in {py_path}, Skip.") - continue - - if f"{model_provider_name}.yaml" not in file_names: - logger.warning(f"Missing {model_provider_name}.yaml file in {model_provider_dir_path}, Skip.") - continue - - model_providers.append( - ModelProviderExtension( - name=model_provider_name, - provider_instance=model_provider_class(), - position=position_map.get(model_provider_name), - ) - ) - - sorted_extensions = sort_to_dict_by_position_map(position_map, model_providers, lambda x: x.name) - - self.model_provider_extensions = sorted_extensions - - return sorted_extensions + # get the provider schema + provider_schema = self.get_provider_schema(provider) + + if icon_type.lower() == "icon_small": + if not provider_schema.icon_small: + raise ValueError(f"Provider {provider} does not have small icon.") + + if lang.lower() == "zh_hans": + file_name = provider_schema.icon_small.zh_Hans + else: + file_name = provider_schema.icon_small.en_US + else: + if not provider_schema.icon_large: + raise ValueError(f"Provider {provider} does not have large icon.") + + if lang.lower() == "zh_hans": + file_name = provider_schema.icon_large.zh_Hans + else: + file_name = provider_schema.icon_large.en_US + + if not file_name: + raise ValueError(f"Provider {provider} does not have icon.") + + # get icon bytes from plugin asset manager + plugin_asset_manager = PluginAssetManager() + return plugin_asset_manager.fetch_asset(tenant_id=self.tenant_id, id=file_name) + + def get_plugin_id_and_provider_name_from_provider(self, provider: str) -> tuple[str, str]: + """ + Get plugin id and provider name from provider name + :param provider: provider name + :return: plugin id and provider name + """ + plugin_id = "langgenius" + provider_name = provider + if "/" in provider: + # get the plugin_id before provider + plugin_id = "/".join(provider.split("/")[:-1]) + provider_name = provider.split("/")[-1] + + return plugin_id, provider_name diff --git a/api/core/model_runtime/model_providers/moonshot/__init__.py b/api/core/model_runtime/model_providers/moonshot/__init__.py deleted file mode 100644 index e69de29bb2d1d6..00000000000000 diff --git a/api/core/model_runtime/model_providers/moonshot/_assets/icon_l_en.png b/api/core/model_runtime/model_providers/moonshot/_assets/icon_l_en.png deleted file mode 100644 index a411526d3d69117dc37e7859666e639d3cda433e..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 13654 zcmZ`=WmH>TvxeeO-WGQ$?(XjH6eqZA@!(Qiiv@?`rC4!_Ly_X{THIX|AUAyf?~jvx zva;6Mv&YWNGf!eP)Z{VIiP7QU;4l>xWVGPm;KyM1W~j)p&u{Kh<*)_qi-Lg%9NZ^? zw+mht#AyZxM+v7WBl#JadzR;)Yiu0|-JHx?rwKnjN5uLTej09sopLJo(S3ED1^zc} zc-#Qf2ApeYh|IroGb6Uqup2W=R^jYxZ1}y%c<(~O7tFY13 za@OuciNY^4AdnEt{(k5SK<}9ouz2_CiKNlGRn+pkhB?uNI>h}>t_R-O4X`L+t9Q|d zSF0@hCD)P1<7zN%E8AM&tj)^0HYez)l%QqdI`9{4+tJXWXe5u`q6xJ$kFG5?X?;qn z#}6_x5;3imlwb-OV+r5M4M+IPq*>x^F)1OXBEefA{ZPFO@k23emwZalg$(AQ~tJfsPG|ubS-!hl`17D^`G|UUz=IH z?musa6r@y0n?3BOE#E|SPijpG|1rR@9Xq4;8Bai9eM&3tW1RN>y|sSmpqTnn0IRME zyQC9$BXKc)`BBi_P;9F?+N#V>u8r_Vnyiny>X`yD{0|*<6}b;%Zw`WjFAI~_*7o|Z z{ZOEETGc(^yGgA5p~+pU8`Y}Cxwdwrv4?o?=Ag24Kb14yca;-eBnu++yhY1zsJl3B zN62oPrt#8sw@5;ME}=a>3+lPwNLLyThU+%~Zw^Sxf$KT!?d?tHb5VUERlZ+cQqf{WFlxZJM{<0%MSo_F-+E!*L-#6`2hXcr18sI(Y{< zq%iTn3rp_2dsD}90fU`6IoC_8?SPk&G-qh^=xj7W_Ja9Gi1rE~&K61PbBD2XzsiGi zklP)Ll%9wxX80)1($<$hBQ$im;KJJ&BxjYn&%Gy^NU=n#h_qvBDnHcE$cPQKUyNMe zEcg=B5$08r0Cobu?gr6&EnKWLQ^d^E>X^n z>h98S!HbA1ZaLp5^2so~r;)4p&rGgXJzul&@dg|9Hr_r-V>hIOzW^n>sKtvIC%izI<#+=tS9W&y6a#u*`x5 zgS&)QdBL2j=GDzTF}@xpd88~MXy68rCHb=StJ+>MmR@t)cJf#+PEAlY?`mQ%*9~kR zDxlk!NH#g!VvUg`UKiRmZe*b-ifBR98fm$R@;hR?bE=XHFuWnz>xgA~*_11k*s$WY zp2O4y_doOdH%9z^oFkB-*~18-^0Z`%7XYjQPU7q-HEc)%LgNknqPQEdoKZVxxK|WP79oM@_>}VskiU0K?wNsS5m#J1cJu z9Kn9Q_4UI)qcF*iSubi3`xY(klY9<8+(zBXb7B|0X3H6EPp3}7um4Fv>+sq)K|yu? zB*B1ASSR=gJhM7ZL&-0%@3G!AwKUap*7C}0o7>f~6(a(@G5xi-d;k+7X{p0Hu7;$t z@sbVD5j_xxUk{v?01ih*dL^Lt;uy5Y$6Ir5hu0bzq zW$ME9ML4S+dj@ZZ;T z=r+KTm7zo1{%N1~Mq$_8ov{3_7Shsh4+7!*;Cy;M5ygvrRx2)HgsW_~IZYNo{gOY6 z`Cxo;>^<+d;meJOuB7Uhmw#Sa78U$*F4$qdxxU!B5*VeQvXB1#GtYUngl&sL=e>kQv8yTKOqJ2 zTF`=b`vgl5B9|@2JZugd!_M1Au|ZdG+APH&9Tzz#8WMboaKQ%KtEoV!fW>INu^)u5 zArRtC?*EMD=jVs@yhfUFU#e{HXr$zCL~?fK+{^j(>(^Yjh2>kAf3Mxn`8*8V1*zlhjdXv)c2AO z%8q%q5gr!|q%{mIjb|?xt+%1=Rh=YSeNp*FC4znId za1h3@6^?DLFDTJ|u-dMhoK42K@zoS%`nuhr`7 zqNO?tSlLVPmwEhTDc<8Jvz<0rF+6D+ar3y&98!x=n{-4p|8c0w3FnZ8Q0kw>LKH8K z*~hc{+v>VVMs48r5yj0jdl07$!QAZXs37x1x(c18&6(WVP>`{=nOTVQYBSQ}j%tOb z4d=r<3&zBq$5HLL?#X!DL;cJj*WfdBe_I`r#~ zG@q>s+7A5Q!c*ZN&@@%fT%*qaO=|TJtGCAxJs&yrSfh&?t_d#?gGf~br@c^O;M)wqcqZfcT+BI`CWKmf4Tt69WM>{$ zfNEuxgdTm36H49%UFG59`?dGG;CfD5eEZ4tIgGv_C=yd$472CfepbKn(rQBJ<~@i- zsB|Aq66U=(q%iv|WJ3=(M{yE8wA-@L~Z=5GA;toWL*ANFOsoox#i zPVlXVefGwlg>0m^&y&Hupo5^kNcsYs(T2&C<*a}Hn^1VFzU={wET2Bo36Y)mu!PIA zjbZQLP*mHJlb>(m=jVTTeqPvKJ~S49Cz-|=o@_<8SwAQ29*6x=`8HHSAm)suoMvjd z!eiPeu&J$*=Ql#DA1)Zy@9gB%;x79LULyS$Qu<;36bXYiSo56+(uWw4j^H#`U$FqF zC<~M2K=Syc*bq#jBG()I{NUFE$uL!EX@r~o{2l|_d`-8zd<(0FueZ1Nzr4?cz!1n+ zpf$@0e@d&>pesg9VHVy=d&#(S@Z%qfK_TU4T>GtJ+{v-Z@T?wU^vQ%+%LXe}K4m_u z?unE<4)Z=V*RCfUM&&!3tukUv0~Y0bPBZI-!QsLQ6&H9aoEutOf1VRuvx!^->loUe z6j=;|db>|>?LHS}uA>ddG4y;>El&nuKG(7qbtqKO0qhB4F_BrFp|#ezV3grn?*GlK z=YFpA@{0NX{jZAloF>P`dM`)uqg#w#%#aYSa4zgro`D%n^gRJApV}P(oxDxLly~{a zuW~Xj8cK>W_X9w2p*n^Zss6(+3>>tR_lqGJP}IQ(#gxr6z|D=FgG08EfPnt#%?sJD zc9120;S{7BW69{GmzbFNzreeBJ&!?HEde~6*E3zuD0l!4_}+~3UoaQ1(}!MY3ln_# zBzJA3i_KfPXN9(2AE|*nYOY9rqTJ#E_@`|8EJ6SfxSpwK^P9x4^;4(#+U`SL!%XS$ z+TqJ`*WXa>Y`<|VIzP3!Ia#Kf1H7H)_4IOnZ_#um2zvFHb`EhT{rK^=F4Es@927Mr=JtSCy-b6q>M z+RLmSsdHBz)`NBQf8#9={%Q`5fyO2PT*SBc_rn06 zO;+X!QFY>Sa;Ajg76r>|)|qc2G81C9r2(%Xqtf>F@-j=8wN_2tS`*g9W)8rR8%?r; z0sxRVx45Vr7}%+j$stLnl1Yg}ua*{rLqAfYm})gsqUb&)i88!9oN9f&J6!A+r5&z} zs+GkY^c*>2kyKtBO!<@SVufo+kSST=`FreZa9mP7X!Y&K0@s?xoq{ z!D7|btkmQe{W$6+-QD7~I!6Rd8cK!Cg5h;4duNcBi{S1}{Y=iT@c$+Vei|8k2vB46 z%EN80A1NfoxUuZngmWe96|mo;?%~!UZ6|D{0NgYhn#dc83}919VC?63GE-o&zlMxdivlly$khqrc6N^FPo5yB-@6#>KC*WSmGc^X5CgFmmp7JAGiZPtx zcUPjOdCj{&myFAy8_B0nMFb@crYQe-=C?)oa*sL$Q$V6aDTO>@zgI#SVx&GctknzZ zi*NB<+}0k610>=CW?UFs*8MO7MRS(wItwBrk>N~+yQoeUst<}OpxpSZCSz60^q12~ zf$^~H!O6j)F%Qfe1<9uPQI%exBD5y&QVhjdjh~krBRrD`rSeVuJESN~jJ*-OF|(BhV=GIieJe+&B`JweCVKjSoaYp=!C43`aH92xLq?E zdyMlstu$7gz8Vp*L{I$ghiV@oQdm;R?3*UGrJN#^6|WOlCOA{Ty&~9cqU;32x7G{> z1|{1erC0>KBZF4%3g)idh~5afh}6lqi;Ppj@KFB|A>77= zjLZd#$yN8V2t*4u1}HHc7}DZ?kY{TCWM7`FK%?IkAn3EP^#U zmg_cxWc9fhI3skp(e(g0Vp2~rXm>ps(=I*jArJ;}3~$26Sta#}ozyuxMoJ1OoUw70 zWMJUS>h!{y-50b09AUW}?oc26C7+n+`1{~}c7$TnbLI?J&By%%n%lMA>8-DyZ9~sK zsJwf1KpgTZ5`}}1B|d^^J`SqF*Ba-um(+Zc>Y~i6+ZGY?J+u8^`_$>bX_da|eo}4L z8Z~}{){>xr0U!N02_#re8a4LwIOOMl`l;u_eD553j$mhSjnOB)r2w*%L<)9@ zCuvI_;6{Y+^cyZt5*x|pb^JmawZCrzVl^84sZ*&?s^ws2MyaxY#bKEfB9214#6O`J zmoMc`#*0rsb=+`Mv8jY+PI6zLGwkXt{q9OYr7 zkERk51j;POimJe|dVY@t{EP_s5^N-^)YNjt{&j zP0)(TML#sn)oqrv%3k+!fj{1Us8Ou5r+wX7r>!At`~o_rXJf0tJ{wICC68b;!xHbB zvek7kjQL)jCUv77m~tpIj0DkCI*TUbCK7dRn0xfme7@--C1fe4@W2WiKD6Syxhkhk zG|fNAoNkPdb5jlVxTUE5SQAoTC$q}dK}*M6qTgLCR2^Py+wvD?ixi=td>#sMz3imf zu%pQakz+en(5zOMJ~_QS9|iM;-@|~Q1)qQbLy{{{tH$t!QSd5l!2J+wc5jNhY}n{O zaV`8@&sbx(`T6TD>7>6s5@j~;RyH@;O`neS_HJi48C^wBIaon4LNKk{7kZLPGQLpi zvEU?9d&S5k>VqeHNtL1h&}=9{n3yw5OAHAK2{UtZG{aU}a(4*KW3;$X_p)nJ4?>~T zU%3=d0ZEv+%_Fg71O#Ij>2~4@PpQf5Y&#vU(PSQqL_=x9>Rxo0-L&9Xbq_|`%&QyL za4D!6?#ao_5@&_Q8QIX2dzK}H)YW*>T6QI!I&;qkx!`kAfu6;gxZ&W^9zKR?$h{x# zNo8g!(#p8;>0|1MNXGO&YNJol((euH#tVjNUDBc7MX@(a`X!Uocw=)cv-P*tqk^G2 zzH`$J2&313_bvC{E@$q>G+slVt1d6uLHsr#}WnFiuS$>EAQbcY!>`j}E z*}Aux44_LpDdRSdbl+iF{d!nENJ$OQWeoGn+Y*aV36^yUmd+wXFE1|_N>E)9A_|$O zMhQcxy#aFA^3)dJSwYy*jpi0qiIgHZ%^iv-yns7~_^Uw%qO>^IUMv#~wcyiL64fmV z{5^|vi$sfE%7QG}E*GeTX7e-b-z3ZQBK=Oe@=*+UQ>uM(q#8a85g%lyt;E3(5(G`+ z(1IQLR>Ta8&U~0iA@SHRolMu3u(>>7dG>A+n~14H+339n{8+@uA;wDCL0A!!%$o#feXt9D`u21Xw>< zXQx~{SF*{|M!K+oeOB^Utl^6!hLD<$Q%4r*AIu0a%hm21B>B z^nRhpvNA$Hh7A{U0E6J!KRNJ802`p!H08r#igjAfkSpnUE#w#E@By3~98vT0^Y{pV zDLR4iR}=XHA0WceJizn=;IHLH}(T;Q~kpU%)4 zzg{K5Ppz&Ffrkpdx0K6!`S{GPuaC1z)Ym1K!cZ}v0rdTrvGS(Uwqwy@tCX8b@q(Ml z#nFV)S$jU`mHw3`$D@l2`qW`W=gunqUj0>UR1ixHJyZt0=H#p>CmzyYAoWZDR=XEM zug&shZ#y2#Cr(^7wtzb8+&&QtX%=T%3n8$kDaLNTKyl{hOJBqWkURtn&G2{l@e(19 zo)(I(a; zoQe`dJlsAJ6%~DRo&5ZKiL>{k+71ki#S{4=;#m`jh6ns}3 z4u_XMB=lvL{tOb)v+fpWtPAD?yWB2epOM$NKi6PG9NWXde;+>yLGCAiiUiRW5^U+- z-X>ENmtQ$iTrJOow6f~vgNBq@)Adx{M}g-54eU&R7nO@Zyx_W3)hR3(`U5+rJduHH zRbW`Y?zW#c7p{`A9%N>1iHTNhUNV zgl9prQf7S%8mh5gaJ0?tkfH;X;@Y9rp zNJ@a##qge2sTP~nlSL=UNsi3g7-b-t(g>XA%hWK1-mq5Sy>k$#VUjp6{j;H=p`E?G z`TTEYgLdz{3u@#>>!oflX49t;JuEkah3Q`3g93F&<+z#|ZCWxiGEYy>Urx(h8#nn6 zyUsZM!U=uI5;Vx{oP^tOaJbKJTmZk2Z*oJjL)q>YlurzXUwM)hI7CF=gT?(hV+SPT z>bhA|+DKk}U1f#@D?N5(6yY+u?^hh@i`H9f7rsv>YaQQ^S9<{}^9n>3Sm3XrgpoXj zm0@_)Iu^m$kGOlb;YnzT6b*Agwx#Hth0RhZ{vw_mpxe;Rq2|cIHAC7;Y?_TLo z9Gl71{5)-CZI>a4U0kP3EpG&g!1$YXXE`dQ2hj$e+RBmD`)y8qMSp;<0G3`qHtMt#dF)#B$ z8?(x%wF}yu3%Gevm1$sgPhCIT82O*^bv2aGqQf2JyrXS)bTX>}Vl)Y?mMY4~hdS3A zkfwO=`MGL1a*gKA0_K=`KL&r877XI60Qz@q10 zJ>ZH(-%_zQKz4~0Pn&5_=j?d5G}Z-D+P+Z&l)H$kI2XSvNL$)8k}ZJ5|NHlk*RJLH z(;>@5w+#D4I}$*qXy%oztntSwMm=t~LCmB!YeqLiU2vlmaq)0gkL&<(^k3G)DOFY% zxDS#>Sv&ah&lNP8+VZ!!IKeAX8$@0A;mTkzAGg2=uNA-ZaiTuxpKA_V=mFkWf=y@A zBz_w%qVx6(F}Aits{h2Fzo>~lS=Dsj8@9E#-z4&LaP-`ttri>9b=k#!C#5e(^|$h^ zUFk+b5h^{7Gm+SovD`mDFE0HPmVVZomPOygBq2jr* zS$T+&<0)R2Xkz&?G2xSiKt{>Otthsg!l-U}8(ZG2 zrP68LLRSOu0d|E8#_mM&ZOg#J*-}?tZQz+>uJ3-9ir66~44zf#dcCV{s5T8Q@b~xk z07qB(60K*EG&Hg;D?N2%BmKkk%QyS2YZ8m?(I!J)`nUd zzgPFVlKcaPKxxhz1sd#&;{I4FD)-{6>;ISv-lZoYkJSEbnsn={pfSmvx+_bHcC;Ek zJ}Z-WIz^3%pc z=m###ealHYMVvOuqy*VW((DmGc3W*r?Oi`9~+?ZqUPyUsHI44AFJ28Tn+@?w4k5n ze@*Okma(0}Z@5m~)%_T&9fw$C_^t4kYB1`%YgN8vt#`_GGkxg5q0|lZtj(n&U2A48 zDD-%G9~+BNXNbQ5?~&EPQjvM2cCTH%B8~>~h$gOi!fLoT^y76WV{FultIFs`8qN;_ z2A0dRZ@b?MN3)s_*jU5?s5y5OGjv{h@jsUktN>yW%BqcqpchNehueDCB6*kp`R4t~ z%F5xPU3qaQ_*nF5WYZT3b*|ZYmFmvYau?OXDm$7H-edvv@PW?oCX^(DX^d!*HcNMH}4lP!tPpAD%rt(xdxJzw%)L2KA@ao2HZJ+%a z>>B<==5O(IeT@w1PiTm9A|=U;`pRZ4liLiH{BM6IA(e zKgZCmN29iEcnc?08xbFgm~`4wrWV5N<>k0oS5Y1Xxqz}gtOrw|Ha3pI&hS;IQg?b~ zMa%1sy!&|_Mf?ZOHS93>pD&N@%HkwAKF~7xT@@j|HveenC!VCS@HMO>CDJR}NxLA7 z=<1d}B3#`p&Ts_ZXc7ja>`*Ju?Ft{hLn>v7HJloMG-7#N?O{9w|83ewa?s|rH{I4K z`XWJ<40Hkl9|?E~Pk3SI$7!2O@6&IpYx?*G*)r+~3|P@-$^b`&w2XE<%CzZq(q^dO z4*|EHoV((aRrwCS-+*>KT>Yi~aFb)&{a0JvWy{sIJ1_Cx8wRCa&DRE|&#H;%NpJcQ zmgjjB^KS-l!&(mNwjK}&zoz3_DL>#^F$fAiaa#Hn%3Rx#0s?_@WymEyj{!*7lQP(! zP)cLfb{o=E@kJAMGkDJ*R@210PtYAQs4i}pJwM%F3HZLyHGV6XcZ=sKbxcmg%g}~Q zLN=$wZy`or1t*N0rItWo>rClA`gYs-dYbp_MO@DBdbcops=>5m4x6KY8I!4Y?`MPW z_odul?h33|@2?$MeLNdI=F|1AZ)er7y1QR6)x}?U^n$mU42||@=jR8;`8UU|_sZS= zeu_ez_iuI~%TjLd?95{i_=n-N(UApc_jY@+6({K;+8?2jVO0r6k1@B8Wh_%$LvQ*# z%I4{`v&wtuh?*CF5JKaeJt4QBkCs9-@1hi{C6{ zKJAD)hJJk|5n2!CO^*xto+@R-So*WcyWtRc8WCSWQ()2YKHz<)-`M&g(tv!Q$?aUc ztpEKfQLTl&J9FDsf3p@Q#-$Qfa%;Z_^=plHd6O&ftZDHzce`#>G%erO2vc1%K(+|& zFalnQb>^4)UxN>`!3nKYT_jCvD~Ps!2Xurn(YuV0Si zcp;FOh%gjKaw_Z%mjvT02tk7ILV}U$@RlBKxFEoOf8#LkqoxeI3-&Jo%%$|@8GqPi zt?Sz7I0Cp`cUOctj~ni?g9$QT6qL9TW%)wNAT} zhBY0rAdD*b^P=c>XV`S7eFM0YAhoF%qBlZ1d60qo`U=gz?tPn3_~tP?c@>e`7XlzB%N8W-V$;j%^;WJ|NcbN0z#z?n*`g`Yk&s` zOmhW`Fj{c8HEu zhV=5<&D#vs>w&_W#cmkJH*cfADYTWo+L5w;ort@e?9ERWy_=b5jnC{ zW;Qk^M(~*J99`+NRO&4T(7jKWu$mYD#JH2<&Bb93a2;;qYExo2yl;+k3cOnE4)WI_ zaC+nPxWx(GI?o12{-XRSTH0UmZB@|of99RM zE=g6hfK7D-@jS0}=7)um`t5drSyd4u>$3fmnYt<3hU#$^3^Tu9yv|@P=UgoN!W-dn zC%eB_n;1VXT2Ftaf#d7CxuQrn3wg*%TNt+MiVaWYg3*yon$)LDrF*=E8Y6k_`j`cg z^JxoGw|*>@Y@Qe=K9aO5ZB8s6zM}fzO4}I7) z*-yig7QmxSEcQnWZ;^P2&COf$7f2DYa<6qPGNf_h?>pfsSE$eEFYk7G&y=O9b{zys^Cm0=- zswlT7XzU`AS7srAjNq?`E>3pcy?4xu)7u{D53@9wvt_F+ce*XBo!5V z$G81N;q$G-;v}1=>pkk9+uQ;I3NYN|U?R_oi)b^D#RmReG#!Ruq?g_K4HQ|!gknQ3 zxZoOsnRs^}mmh5NKW^Daq;Dd((Q4lkqASZ&etfs7N;Hrv@yri8sx_oC7nx1489BD) ze-Nn6vtRZNqtPx`)4{-J`QFr&{+UVE!^4ASKWq3fR{Q1U1-7N1zyETlzt3(MmD_Nu zWBQuPa;5~+oB11Pvx0iGn-q4C|Q(r5>u)b?O5d! zwz%MPfQ{?ED4#+N`bI*)b1%CuvE446V43@vo$jXhlQF*Tr5{P%6$ej7a5zg0+jllV zuhx5VBUP=Oz4~yy<&AG2B>kOFX?T1)D?KQeQ$sY#tlcNNRW1PAnbCYt5+gF(tbU)`w?~{!Ijf0MDn65bFUMRT7S6-{*bUz|=H;m#Vpvw- z(;gdElVYtu5nk^vxb~c(YtMvm%5Kc$ZRDUO?k;qtHEUwGbiY7g7%k^0_mEV)Z3ERd zfeOJhu#7!AvdP{sGpIYXa{hun34_1+jQk`PMrr{ei&uyIBBSrYY2Pj3f9Qv)t2wbVlh99gV*uJa zPx`Gez-f`~rdR1sY6?V{MwAJ?1~l6g*kC9978h&2wy4vubI|eXZy`k!6+7rJgL0rKkrzz zE)1TuRj<=W;!w+dmbSi$J9Sh65`*nJRxheEdY3+wQGS?b3omQO0z zXu7;;J2~$dlI_COJAe{u#xp?Yu~b}tjiF2eDs`Nw{cNuJ&eU6_?mgm|$22~! z@(rRCr*iUoxG(2 z*DJzAj@xyqD^psr4cKQ1H24?Ts8n8sQaRn?jl%c0r))~|^{Kw$x9S%}!o_}yPf5K) z*MuL}7E*ui?&JdIYi#DIm8(~uIyb-h?5mpE``O(D{tcid@M4NOrp+%xfu8WhiYbiC zC!c%|)jHXV{HnT1fmD+}Hr8RynK;RVcnGdcSm)_1Z|PaJp}Q4`ylP=_nm0s0dvxoi z(=qv`*5ye?!awoa{PtSqJ0m{EZrWRmlpGiYK<^*XToGtRHRI8!`0fdQ&MH`OL%7s| zAMv!ilm7Z52)LROxANqv`J_XF3}88LucyeV_qL+4vRzXK@Xlp~x88=9 zykL&K&w1lSUxh;;Cc2V0$f+v`9ktp3g^w!S7ja>YPG)j_HAA*S7$%l#dYAf}Sc1QP z=yQl_(O~C|c)8*xr)e<_)wBFdN$98T#7wUDaC79L=U%XLG>o$`O~@?Dv+Y1nPO=C%d^;02n#WpH;mO^n|72FhfGl8_RupUcc_lpFf*wgeKd z-L;VNjEjvI@z{m=mJS3G-4h(d#1ePR$X$)&5Ftf7;H;MIkUqDM$vhIU5xg#?GppiT lmUyGQ|NjdJAsYGt@t1S??kX-5?2l4#in3}lHBx3_{|BE&Bme*a diff --git a/api/core/model_runtime/model_providers/moonshot/_assets/icon_s_en.png b/api/core/model_runtime/model_providers/moonshot/_assets/icon_s_en.png deleted file mode 100644 index 58ba4b462376265f9575769b69521f9c9ee8606c..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 7419 zcmZ{JWl&sA)ArfL78Z947TnzlZVLnn5J<4#?(VW!kR-TUAh=7A8+UhiLV`O4g1-Cw z{C<3OYEIXgsh-m{-PhGUGtnAv6|peLF#rI-QdW}FLP#A%(}kcS-W{J-t`P#+N?J`C z0BXNLA3vcWzG=*qwA28=mk|Jh;Q(-t=nCEk01s{eI5YtO(KG-can5Sh6h{o8nyM(u z0Wbf1^4g0N5k2V6O8Ra9Kr8&e29i(VL8t&|?#gQNX!~Fk3@qlqn!Wx2Kyj!nC#~(h z(r;+zY%m}2lFha%jFqs!f^0vWV4u@;gMoZh+cqQ9g=24?sEC&E75|dfrc+ANJmUw) z7aMZxG}Sx;`~({&Def<-R0~L}!o%K0hRTfpf;|~zjrlr0q^C0$dcU_RMqS<8#JSCK zd)ZrSld0Ri9vhn3@H=g}eVjOZdE>=V6I#iasJs<46-w@&BN;>`H-{+&1SchE3(*#7 z2f@HJG@$6Mw4{Xp@^C=7yuy$!BpPKHOOM#d79o8pEy|cjf=8mpE&Y>t-c?5u^Q_20 z=hNSh@7SWl^iF~7^>j@5&s>@Z>3w>-eQI1hP^c0ZS>qfhiXO-hET1GC6KUq$S4jxn zltY#(;h7gewle_ehhwGX4g()^?p^=JTC$H|uoNRtNId*qb>f2VLcxvyvsU6P5LW`A z*%qfXVMHE+gO;6OvmYt}ka# zxd(=ye?V4mt`gXIo_(ahJU(}B7~wa^pf>V(*eT#X0dQiD402|S7ref7d@+4gJ!FlA z$a)M7)w<=gcspVMSxKOS{59p`o2nPVU!(!mdl1}jFH{qo{yz-@Mnd!@2jTT z+#wqP;biCm$V0cgh`_z`k<+uj-7-=)bSsmVN?-K@hBNw>y*2|pXxGX<2QelpkL7;M zM$wF9b96CkDQc-I9o5Rjn$4TpY*(4uIrb)UUK#M02o*X#5#ycg`Gy}UKGE=E1Mjb+ zz^{fXct~WS05l306WdhSD0U9MNYR->jbkVwLG}7`AQe<2SOU4i2UpZiw^}(%(o3)a zgD!6)t@R`@;kpJT-&3Rz8Sfha*Jgv}%fXR)yYNQ-;ao8Fvagydf|7Z;LoGuI56^{f zKd&3_1HB7JGZ1;)6v$DI0Sr^$TnS(077g%Ptk8ca9Q60N(;1T^dnJ9iA*x=WI;=s~ zSU!?>Z-W=VHqc^-H9y;TSgN6q{dq;tX6m-8;3zIo=$FoP{+NgBAUd7Vgs(!5hEVFJ z-`TnvFhZ_4DRXB(-Yb{1RbbJ=%p-aDM~){rLRa{%A`M$vt2UxOtRhmtz6{JvV)Z)^ zPbd^s<41g9y|V&!IpYnffxHclEbMbHNQPdw+UTiL?}w-7W=6=@!8M|Kf9X})CPr%j ze2ZK`^d+YwGYy-T*#HlJ2E^H$aCcUxu`zVj$+0}GbSs6=bP=QAEM;;gS$D^= zm??aVUKnWn1zAHpm+fr%g`J zHBYGL+C2}_mevYl+F13sJ5$@QNpsWOG4U|-6tG0DpgJ+XPFZr(61#NG@@wgF#V7rw zVE>%E=5jLC@ik*k>C=WFRYC^yCO%+6x0b3#QX~v*WKdij#rd6?v7-8-nhI*&2e$F? zZHg~J1phA3$5LRi*<&>c$l4Pbz-TRw{F}q4W_(Ih0;LSVpKgU-)Q>Nl{z8-o_f;x5 zKUWzUKH2gcZ84_c~5hX>>IOipo1Im`2Vw@*IYz3wKp>@kMfiM`2LR-O^fj>3u5mO{}LX9 zx)E3eCJo@x*y#N@-Ps%e(WzAQAA!?<(Zj#0w}+;UO-x;Rqpsv@gokq+uid@!i`$^X z8#z$$Z5Nblhb`zvCyO&x`Me}nqIh%#m-%B+KMI8m@hU4}1 zHO>5L+`_^FLroKW^>z&uIdj z#@xSmsv}c1T$N<)c$rHa#Wx1hGR4Bv37DMkhRTQ3RSUE;_z3J*n(>A=sBUIse*B1o z#tVF8-9BxT3@erCHZ>JD`&1}I4(lB=gmj18$gir|xCV%jMZ51@+pDHzNh~ql;%r?E zb~Lebm-9M0V*mO@Anbkzx%@@I{GQdar*gH4HXF9{cfD9n|S(hUv9R>1Rp_XAY83j^hp6G}E+VQB)@O3+NKH-;$ot z+C;cskN|y66p7}#OcBX{t(YKN<;B4cOEpwnPv>YV3;AsGtjRvA>gvD#kr?%bAfW+i zag+re9oVJX7)XA%d6ku~qU9t!kJpIy_1=?vJcoYDbKwPCx#w~4Q1V)p(Wz3%yW*(J zJz}L3Meg8hTwn5PKOJaJ!_FWP5etJiH#f}0F%kT{&Oyj!UsaJx zU;e^t`iy~+7XN_PP|f)sz4G3^tijQ;>S0RsyuULlF3T3F+X*TQTfM2q&o5wW&fLL4 z$qSbI6Q>}yg0kU04FGTCH*ero<|aI_1a_k{I?F3Hd?lWL%@UnQ_QcxcU-%IfiMD3N z1W=UU{A9UQ+|)#Tbw$JkYsD6Gd?590ZRKSCVQkCxR{eGQA=J|3nzp}?ZQcO+>H~9y zW_X)ng%S&)8%oZePb;-g4io>m>8N5WOGfE7-6R%4nod#(S6S?0es^wORTXh!^%J?` zmuE23hChK@@dmGphnXx=Ynx+FRkqmFYI{I`oL-Su_le)Bl#6eM50_wCKJ~b3A%-y3 zKx>)$(t^Z-urvh=Elt(gF>l1kO>Y@FC;T3r#{cry!^Pcq&D(=M3MbRcM&s?Fa>>En zi(e_^2L}f-M++QFEoNv5?dxo)j-08AoVmSAde~+y2G(*6cp-4C(+8D&5Ki#9_z|CJ zEKx8p=RKHXy$??Y!=_nre?yo$b3=bl#o+jv&>(XaXlv^#a5yAtKjwapILq zd;+oUZ&#ESe{5OVUS>j!0U8e7qJGeTq+Lpa-*IdF2_eyg@Zd&OhOI}9#b~3j_}wgn zx_4k8)vv@sm%DSkSanK8n=13u#@brEc8PT*20s!a29Boji3wA8drVnb*|ZGSu3L5T z1l-j(^zf-uYh(>|2&72m#$^%;Wd@a;2;sdSK~-gvb*wJf$WyudBlJv_dw-XGCX{>Y zAf3ctT2Sbs{bw)CltQVYv5Bq}cw3ZEt2qcHgEhz;q+OJcI*c7y>(5cx^9sr$MbeVW`==|< zXrOrqG;B%;jJ}YL=IS_D(l6n0@9pQkoCn${$jQmc{+WuIi3#xZ^b|wh`MsWx|1G(m zTpV%poA4j>@Mhmt#S)m|Fnxv(heBBo4c>O&T=k1dI`g`vmCDk}3b9MHcOl{4l3l^| z?XB5nN5KC6z7%6(sp}VggMfLR-T66)!*2;Rb6UBs4_FnK02~+hy~9bN#&x4n_^XFe zjq1zbyxLljgyCwf+n@sp1MIV%@#9SH_P@Pc93x^=^vfmH_T<*xT}UvD9)HN3cj>1S z_5H)c^vsOuX}#q*IlfF^s}5fIw^uYI^QlLc0VTF#}Od zKMjnWQYK%oOYAI5uv97GZzfX|XpUaG`K^1sR@TOczU*h>n2%LZW%*^b)>vN$L^%vGsv0YvGys+S~3I?oJpMCY#2p`iVbN zKtBl8JgX{pSMC=OW8vbARmvAN0yy<>o2T~ zOr;O}g8W;Hi7ggtqPf(AL;1Z!d#=nS%+Ub-;o(iH%K{I?BNr z-6J)ZBH45I$F3=Xl^S`~o}LXnuuIx9d3)wW$d%#rft^^B`K_6^Hz|TSQ-anuHsU)v z`2S*I=6(N;6fI{ee<~s<7_q(mDK<7XPn)%$vEZMRia^KfTJqq)b{)tjL;uh$p#JSK$$herf- ziNhu@&rjc&ZE87EN9FnML7^#r{XuT8_YTm03oz*^d@c<(;T?F4Q&%<8mI9gB+w-$$ z+pTjcw{w`{v#?g+6(C?_Z4J3y_oA-qxVJU%nAf@MA?XP2?v_p({Vg9hQm>vuZgTL3 z<30rgpdlABv?>|F!q*4d!g+N{H!9L^B_i&{v!R@j}=!7~Lt6;C$) z{F$lIP+iUC+PS=}-_X>g@T7y4wkKa!?CAb``7{YrAY?!ri5w*8{qw~w$d}s4|3Hy> zeB6*w1Ig1SBBEu3dl?PNzp+zsH+Ie6@pR&3wl|S|_Z`O{X!AVnd#5fDUa*3aHo5oO zERA|H%#GKaO^lKJLzU*hdVB_Z`DBka=Vu!9QavzTLzAQIM>{spvz2gn+usF+GWk){ zwVIez;!%i%n&6>Q_8n~vXTPIKLWZe>B zA4C4G>9<0tOwN8P#%afqx<*E}Y@m2`SyeZrvh{~5$h-dJIf?G3*rYa?mzcG#uXkL<7Y~{%E;VHH)kPX5#~-@Mr3cA zx8PPP4F7`^4we~zs_Z{^GT(X!KR=55nE+B!QYl2HM3~!eIvt$#zA6El1l2IuND42u z#GKS2yT3}N5bFONq_-D6Szh;)HJ}-IePrC58*~s8lXH$Jg2GKfvSG5MuPFF_fuw?r z$pZoc_Rr3YzcDNSG~kT<_6-m5UqsE@R>9wPD}3&3t4%qi_u5n5ET&X;kZ2;bPA(0n zrZcYRzXr>W8b+ZMdB2aV2}~b3Q#F~1c?xIT-@SXMa6*rNgP3#h`F|Lk4w;ykAObaO zm2Qb`{==s7JLUo2vf{+{>(o8wc+xbSl}$vmk!M6RsqSb6d@IDLSVRviD?glVaCyym zX*TLv?7unqXFRj=GBU?nMFomT3EUS?ZI^Eyb<3=RVWEcv958YU; zeLCTdc1ddqjT->FIL=NlMMgzs5{P@-6XqBBne%<`h0Xd-MxT}Bm5u+|L2)X|VXMcH z250JaZ9sl2$L`YY%}sPj2&nKKOM{oY+yH_omfF0yEPoV#e^Z83lB^fvkW@BT>3^|( zL!pjeA|HmYtSh$sGW5p;%ULz}>)WF9%TX@FbI$hhOd&-W&7Ys9C z&T=G_ie{DQ@t5kYa4an?kNc^t00DTa|v3%#e05n zsXV6(UgZ)d57P`XmLste%Fg+u8YLYo2Q#lpiQ6rwmp|29Lmx;mP-06Br)FnonNvqy zJihDY&Je^?RS;iAE+Rjd>7twpU~ge+Kxj{iqDLu2?cn}~qM|(4AGb*v2KH@FNxt-$ z937_ovGhN6yWQ$Wqc-5J`OhWTt;K*jegy@}WgL=aRB=5bX-F8BR@Is=Q(bDYQ7g?0ronQ z2oi!Yl>9`d0P%}Hf~Eg__#9h)6N^6_*QaGC1b#f@c;bz6Z)a!gZ>XY4o~z? zozK=hU&2ZTrc`zI;F&cGUHIJAZ>BRo*&{c6~)Lnu9(#ieAOh1#1 zLw`b?elUrOiK!LNt*zNJyuU_;w3E^0PMQSLyw-gUhA1P6p=;9f_uN>a3wEELbbGI>RCuVPTp9rBT( zM&h&Wo8pJ>iDRh1Q+f=y-G@Q4i`e0S&^fR0)(U8@d{;|u5#y4Z*uXK~`e5*f=k>EX zIn%l45u6jR5e-EJ(lN6jU2$_zsBZ5ryH=yC;?O&@;nK4Dvskk(t#rP8$MFG@tj75Q z`tCEl@0?j?2z$;HrBH(Fg>QXulhrDs-PW4UjE%#zb&#J)v_)(%-dozTfky6%wz54T;MeLc9)7 zGlpSf;YW{dI^BKrDfuxHXHnmuV9l==dakZ_AdH8I>x22QWSZ?UtG#tUXwm9?3!48X zbtMKXkQHz{W@pE#--azE#1jz#*e0#Fri9M=KJgN6>+u+G-I;dX+zBc=s!}%QrB?<* zPb~YxD~X$JS9~=Mh`?KTo42_Y1BXfvU1)Y+KtrVOF$86?oX6x%?-vMBTd1ygjFd`t z*mxy**8&PE)?_GLULS}II+X9gT(T+6uMMk1M4nmwZYs_mZU;W?<-^*{J;t-a{J&4Z zb0)CUAPiZHb{KJsTAjOIZ)fybcH-$2S zqN}d;^yxt0%>ip5J!Oj6*slURf7QtgCk{qYAk%Cjv~=$dx_R3fdTX=uB)Z>Zd$JPC zbbyv%9z)03mB_Hdj)(qyj;h)Plj;K*tG)EgD=d&=Z*f{X6GK@^-K*o;-ezBc{5Ni& zj!kPm@Ry*j>6BB^;opMm7%q*mM9R{V!A^X9BX5fJh6Y_oLa5!!!G{K9 zGU5jh&%G+$*28&a3!xbqv0pD~>1^?2a&Zx5K~a)_Vg^3}_Y#vwu}EAyt=evpWqSK*9gm|;1*TYij_cbo zq|t@J6}YBziM&U-qREEXG;E-l)!*G!PaBmU5>F z#DNX{ai#E~@A=WP!BAHHncYU>B@mLQaE8=%IA2~2+>!JQ*`B-rsLfEcwCzg7og%>C zF0bcq{>j}!)XdcaApl-(UVaX4UJh;nZ60n>9$ryCAvSJqQEqOv*ne05kAj1fxs9dI W|6f6TT3Q{U04U49m8+653H?93{_?s2 diff --git a/api/core/model_runtime/model_providers/moonshot/llm/__init__.py b/api/core/model_runtime/model_providers/moonshot/llm/__init__.py deleted file mode 100644 index e69de29bb2d1d6..00000000000000 diff --git a/api/core/model_runtime/model_providers/moonshot/llm/_position.yaml b/api/core/model_runtime/model_providers/moonshot/llm/_position.yaml deleted file mode 100644 index 1810ec61d68e89..00000000000000 --- a/api/core/model_runtime/model_providers/moonshot/llm/_position.yaml +++ /dev/null @@ -1,3 +0,0 @@ -- moonshot-v1-8k -- moonshot-v1-32k -- moonshot-v1-128k diff --git a/api/core/model_runtime/model_providers/moonshot/llm/llm.py b/api/core/model_runtime/model_providers/moonshot/llm/llm.py deleted file mode 100644 index 3ea46c2967e19c..00000000000000 --- a/api/core/model_runtime/model_providers/moonshot/llm/llm.py +++ /dev/null @@ -1,327 +0,0 @@ -import json -from collections.abc import Generator -from typing import Optional, Union, cast - -import requests - -from core.model_runtime.entities.common_entities import I18nObject -from core.model_runtime.entities.llm_entities import LLMMode, LLMResult, LLMResultChunk, LLMResultChunkDelta -from core.model_runtime.entities.message_entities import ( - AssistantPromptMessage, - ImagePromptMessageContent, - PromptMessage, - PromptMessageContent, - PromptMessageContentType, - PromptMessageTool, - SystemPromptMessage, - ToolPromptMessage, - UserPromptMessage, -) -from core.model_runtime.entities.model_entities import ( - AIModelEntity, - FetchFrom, - ModelFeature, - ModelPropertyKey, - ModelType, - ParameterRule, - ParameterType, -) -from core.model_runtime.model_providers.openai_api_compatible.llm.llm import OAIAPICompatLargeLanguageModel - - -class MoonshotLargeLanguageModel(OAIAPICompatLargeLanguageModel): - def _invoke( - self, - model: str, - credentials: dict, - prompt_messages: list[PromptMessage], - model_parameters: dict, - tools: Optional[list[PromptMessageTool]] = None, - stop: Optional[list[str]] = None, - stream: bool = True, - user: Optional[str] = None, - ) -> Union[LLMResult, Generator]: - self._add_custom_parameters(credentials) - self._add_function_call(model, credentials) - user = user[:32] if user else None - return super()._invoke(model, credentials, prompt_messages, model_parameters, tools, stop, stream, user) - - def validate_credentials(self, model: str, credentials: dict) -> None: - self._add_custom_parameters(credentials) - super().validate_credentials(model, credentials) - - def get_customizable_model_schema(self, model: str, credentials: dict) -> AIModelEntity | None: - return AIModelEntity( - model=model, - label=I18nObject(en_US=model, zh_Hans=model), - model_type=ModelType.LLM, - features=[ModelFeature.TOOL_CALL, ModelFeature.MULTI_TOOL_CALL, ModelFeature.STREAM_TOOL_CALL] - if credentials.get("function_calling_type") == "tool_call" - else [], - fetch_from=FetchFrom.CUSTOMIZABLE_MODEL, - model_properties={ - ModelPropertyKey.CONTEXT_SIZE: int(credentials.get("context_size", 4096)), - ModelPropertyKey.MODE: LLMMode.CHAT.value, - }, - parameter_rules=[ - ParameterRule( - name="temperature", - use_template="temperature", - label=I18nObject(en_US="Temperature", zh_Hans="温度"), - type=ParameterType.FLOAT, - ), - ParameterRule( - name="max_tokens", - use_template="max_tokens", - default=512, - min=1, - max=int(credentials.get("max_tokens", 4096)), - label=I18nObject(en_US="Max Tokens", zh_Hans="最大标记"), - type=ParameterType.INT, - ), - ParameterRule( - name="top_p", - use_template="top_p", - label=I18nObject(en_US="Top P", zh_Hans="Top P"), - type=ParameterType.FLOAT, - ), - ], - ) - - def _add_custom_parameters(self, credentials: dict) -> None: - credentials["mode"] = "chat" - if "endpoint_url" not in credentials or credentials["endpoint_url"] == "": - credentials["endpoint_url"] = "https://api.moonshot.cn/v1" - - def _add_function_call(self, model: str, credentials: dict) -> None: - model_schema = self.get_model_schema(model, credentials) - if model_schema and {ModelFeature.TOOL_CALL, ModelFeature.MULTI_TOOL_CALL}.intersection( - model_schema.features or [] - ): - credentials["function_calling_type"] = "tool_call" - - def _convert_prompt_message_to_dict(self, message: PromptMessage, credentials: Optional[dict] = None) -> dict: - """ - Convert PromptMessage to dict for OpenAI API format - """ - if isinstance(message, UserPromptMessage): - message = cast(UserPromptMessage, message) - if isinstance(message.content, str): - message_dict = {"role": "user", "content": message.content} - else: - sub_messages = [] - for message_content in message.content: - if message_content.type == PromptMessageContentType.TEXT: - message_content = cast(PromptMessageContent, message_content) - sub_message_dict = {"type": "text", "text": message_content.data} - sub_messages.append(sub_message_dict) - elif message_content.type == PromptMessageContentType.IMAGE: - message_content = cast(ImagePromptMessageContent, message_content) - sub_message_dict = { - "type": "image_url", - "image_url": {"url": message_content.data, "detail": message_content.detail.value}, - } - sub_messages.append(sub_message_dict) - message_dict = {"role": "user", "content": sub_messages} - elif isinstance(message, AssistantPromptMessage): - message = cast(AssistantPromptMessage, message) - message_dict = {"role": "assistant", "content": message.content} - if message.tool_calls: - message_dict["tool_calls"] = [] - for function_call in message.tool_calls: - message_dict["tool_calls"].append( - { - "id": function_call.id, - "type": function_call.type, - "function": { - "name": function_call.function.name, - "arguments": function_call.function.arguments, - }, - } - ) - elif isinstance(message, ToolPromptMessage): - message = cast(ToolPromptMessage, message) - message_dict = {"role": "tool", "content": message.content, "tool_call_id": message.tool_call_id} - elif isinstance(message, SystemPromptMessage): - message = cast(SystemPromptMessage, message) - message_dict = {"role": "system", "content": message.content} - else: - raise ValueError(f"Got unknown type {message}") - - if message.name: - message_dict["name"] = message.name - - return message_dict - - def _extract_response_tool_calls(self, response_tool_calls: list[dict]) -> list[AssistantPromptMessage.ToolCall]: - """ - Extract tool calls from response - - :param response_tool_calls: response tool calls - :return: list of tool calls - """ - tool_calls = [] - if response_tool_calls: - for response_tool_call in response_tool_calls: - function = AssistantPromptMessage.ToolCall.ToolCallFunction( - name=response_tool_call["function"]["name"] - if response_tool_call.get("function", {}).get("name") - else "", - arguments=response_tool_call["function"]["arguments"] - if response_tool_call.get("function", {}).get("arguments") - else "", - ) - - tool_call = AssistantPromptMessage.ToolCall( - id=response_tool_call["id"] if response_tool_call.get("id") else "", - type=response_tool_call["type"] if response_tool_call.get("type") else "", - function=function, - ) - tool_calls.append(tool_call) - - return tool_calls - - def _handle_generate_stream_response( - self, model: str, credentials: dict, response: requests.Response, prompt_messages: list[PromptMessage] - ) -> Generator: - """ - Handle llm stream response - - :param model: model name - :param credentials: model credentials - :param response: streamed response - :param prompt_messages: prompt messages - :return: llm response chunk generator - """ - full_assistant_content = "" - chunk_index = 0 - - def create_final_llm_result_chunk( - index: int, message: AssistantPromptMessage, finish_reason: str - ) -> LLMResultChunk: - # calculate num tokens - prompt_tokens = self._num_tokens_from_string(model, prompt_messages[0].content) - completion_tokens = self._num_tokens_from_string(model, full_assistant_content) - - # transform usage - usage = self._calc_response_usage(model, credentials, prompt_tokens, completion_tokens) - - return LLMResultChunk( - model=model, - prompt_messages=prompt_messages, - delta=LLMResultChunkDelta(index=index, message=message, finish_reason=finish_reason, usage=usage), - ) - - tools_calls: list[AssistantPromptMessage.ToolCall] = [] - finish_reason = "Unknown" - - def increase_tool_call(new_tool_calls: list[AssistantPromptMessage.ToolCall]): - def get_tool_call(tool_name: str): - if not tool_name: - return tools_calls[-1] - - tool_call = next((tool_call for tool_call in tools_calls if tool_call.function.name == tool_name), None) - if tool_call is None: - tool_call = AssistantPromptMessage.ToolCall( - id="", - type="", - function=AssistantPromptMessage.ToolCall.ToolCallFunction(name=tool_name, arguments=""), - ) - tools_calls.append(tool_call) - - return tool_call - - for new_tool_call in new_tool_calls: - # get tool call - tool_call = get_tool_call(new_tool_call.function.name) - # update tool call - if new_tool_call.id: - tool_call.id = new_tool_call.id - if new_tool_call.type: - tool_call.type = new_tool_call.type - if new_tool_call.function.name: - tool_call.function.name = new_tool_call.function.name - if new_tool_call.function.arguments: - tool_call.function.arguments += new_tool_call.function.arguments - - for chunk in response.iter_lines(decode_unicode=True, delimiter="\n\n"): - if chunk: - # ignore sse comments - if chunk.startswith(":"): - continue - decoded_chunk = chunk.strip().lstrip("data: ").lstrip() - chunk_json = None - try: - chunk_json = json.loads(decoded_chunk) - # stream ended - except json.JSONDecodeError as e: - yield create_final_llm_result_chunk( - index=chunk_index + 1, - message=AssistantPromptMessage(content=""), - finish_reason="Non-JSON encountered.", - ) - break - if not chunk_json or len(chunk_json["choices"]) == 0: - continue - - choice = chunk_json["choices"][0] - finish_reason = chunk_json["choices"][0].get("finish_reason") - chunk_index += 1 - - if "delta" in choice: - delta = choice["delta"] - delta_content = delta.get("content") - - assistant_message_tool_calls = delta.get("tool_calls", None) - # assistant_message_function_call = delta.delta.function_call - - # extract tool calls from response - if assistant_message_tool_calls: - tool_calls = self._extract_response_tool_calls(assistant_message_tool_calls) - increase_tool_call(tool_calls) - - if delta_content is None or delta_content == "": - continue - - # transform assistant message to prompt message - assistant_prompt_message = AssistantPromptMessage( - content=delta_content, tool_calls=tool_calls if assistant_message_tool_calls else [] - ) - - full_assistant_content += delta_content - elif "text" in choice: - choice_text = choice.get("text", "") - if choice_text == "": - continue - - # transform assistant message to prompt message - assistant_prompt_message = AssistantPromptMessage(content=choice_text) - full_assistant_content += choice_text - else: - continue - - # check payload indicator for completion - yield LLMResultChunk( - model=model, - prompt_messages=prompt_messages, - delta=LLMResultChunkDelta( - index=chunk_index, - message=assistant_prompt_message, - ), - ) - - chunk_index += 1 - - if tools_calls: - yield LLMResultChunk( - model=model, - prompt_messages=prompt_messages, - delta=LLMResultChunkDelta( - index=chunk_index, - message=AssistantPromptMessage(tool_calls=tools_calls, content=""), - ), - ) - - yield create_final_llm_result_chunk( - index=chunk_index, message=AssistantPromptMessage(content=""), finish_reason=finish_reason - ) diff --git a/api/core/model_runtime/model_providers/moonshot/llm/moonshot-v1-128k.yaml b/api/core/model_runtime/model_providers/moonshot/llm/moonshot-v1-128k.yaml deleted file mode 100644 index 59c0915ee9b610..00000000000000 --- a/api/core/model_runtime/model_providers/moonshot/llm/moonshot-v1-128k.yaml +++ /dev/null @@ -1,40 +0,0 @@ -model: moonshot-v1-128k -label: - zh_Hans: moonshot-v1-128k - en_US: moonshot-v1-128k -model_type: llm -features: - - agent-thought - - tool-call - - multi-tool-call - - stream-tool-call -model_properties: - mode: chat - context_size: 128000 -parameter_rules: - - name: temperature - use_template: temperature - - name: top_p - use_template: top_p - - name: max_tokens - use_template: max_tokens - default: 1024 - min: 1 - max: 128000 - - name: response_format - label: - zh_Hans: 回复格式 - en_US: Response Format - type: string - help: - zh_Hans: 指定模型必须输出的格式 - en_US: specifying the format that the model must output - required: false - options: - - text - - json_object -pricing: - input: '0.06' - output: '0.06' - unit: '0.001' - currency: RMB diff --git a/api/core/model_runtime/model_providers/moonshot/llm/moonshot-v1-32k.yaml b/api/core/model_runtime/model_providers/moonshot/llm/moonshot-v1-32k.yaml deleted file mode 100644 index 724f2aa5a29f96..00000000000000 --- a/api/core/model_runtime/model_providers/moonshot/llm/moonshot-v1-32k.yaml +++ /dev/null @@ -1,40 +0,0 @@ -model: moonshot-v1-32k -label: - zh_Hans: moonshot-v1-32k - en_US: moonshot-v1-32k -model_type: llm -features: - - agent-thought - - tool-call - - multi-tool-call - - stream-tool-call -model_properties: - mode: chat - context_size: 32000 -parameter_rules: - - name: temperature - use_template: temperature - - name: top_p - use_template: top_p - - name: max_tokens - use_template: max_tokens - default: 1024 - min: 1 - max: 32000 - - name: response_format - label: - zh_Hans: 回复格式 - en_US: Response Format - type: string - help: - zh_Hans: 指定模型必须输出的格式 - en_US: specifying the format that the model must output - required: false - options: - - text - - json_object -pricing: - input: '0.024' - output: '0.024' - unit: '0.001' - currency: RMB diff --git a/api/core/model_runtime/model_providers/moonshot/llm/moonshot-v1-8k.yaml b/api/core/model_runtime/model_providers/moonshot/llm/moonshot-v1-8k.yaml deleted file mode 100644 index 5872295bfad1ef..00000000000000 --- a/api/core/model_runtime/model_providers/moonshot/llm/moonshot-v1-8k.yaml +++ /dev/null @@ -1,40 +0,0 @@ -model: moonshot-v1-8k -label: - zh_Hans: moonshot-v1-8k - en_US: moonshot-v1-8k -model_type: llm -features: - - agent-thought - - tool-call - - multi-tool-call - - stream-tool-call -model_properties: - mode: chat - context_size: 8192 -parameter_rules: - - name: temperature - use_template: temperature - - name: top_p - use_template: top_p - - name: max_tokens - use_template: max_tokens - default: 512 - min: 1 - max: 8192 - - name: response_format - label: - zh_Hans: 回复格式 - en_US: Response Format - type: string - help: - zh_Hans: 指定模型必须输出的格式 - en_US: specifying the format that the model must output - required: false - options: - - text - - json_object -pricing: - input: '0.012' - output: '0.012' - unit: '0.001' - currency: RMB diff --git a/api/core/model_runtime/model_providers/moonshot/moonshot.py b/api/core/model_runtime/model_providers/moonshot/moonshot.py deleted file mode 100644 index 4995e235f54c69..00000000000000 --- a/api/core/model_runtime/model_providers/moonshot/moonshot.py +++ /dev/null @@ -1,26 +0,0 @@ -import logging - -from core.model_runtime.entities.model_entities import ModelType -from core.model_runtime.errors.validate import CredentialsValidateFailedError -from core.model_runtime.model_providers.__base.model_provider import ModelProvider - -logger = logging.getLogger(__name__) - - -class MoonshotProvider(ModelProvider): - def validate_provider_credentials(self, credentials: dict) -> None: - """ - Validate provider credentials - if validate failed, raise exception - - :param credentials: provider credentials, credentials form defined in `provider_credential_schema`. - """ - try: - model_instance = self.get_model_instance(ModelType.LLM) - - model_instance.validate_credentials(model="moonshot-v1-8k", credentials=credentials) - except CredentialsValidateFailedError as ex: - raise ex - except Exception as ex: - logger.exception(f"{self.get_provider_schema().provider} credentials validate failed") - raise ex diff --git a/api/core/model_runtime/model_providers/moonshot/moonshot.yaml b/api/core/model_runtime/model_providers/moonshot/moonshot.yaml deleted file mode 100644 index 41e9c2e8086e83..00000000000000 --- a/api/core/model_runtime/model_providers/moonshot/moonshot.yaml +++ /dev/null @@ -1,89 +0,0 @@ -provider: moonshot -label: - zh_Hans: 月之暗面 - en_US: Moonshot -description: - en_US: Models provided by Moonshot, such as moonshot-v1-8k, moonshot-v1-32k, and moonshot-v1-128k. - zh_Hans: Moonshot 提供的模型,例如 moonshot-v1-8k、moonshot-v1-32k 和 moonshot-v1-128k。 -icon_small: - en_US: icon_s_en.png -icon_large: - en_US: icon_l_en.png -background: "#FFFFFF" -help: - title: - en_US: Get your API Key from Moonshot - zh_Hans: 从 Moonshot 获取 API Key - url: - en_US: https://platform.moonshot.cn/console/api-keys -supported_model_types: - - llm -configurate_methods: - - predefined-model - - customizable-model -provider_credential_schema: - credential_form_schemas: - - variable: api_key - label: - en_US: API Key - type: secret-input - required: true - placeholder: - zh_Hans: 在此输入您的 API Key - en_US: Enter your API Key - - variable: endpoint_url - label: - en_US: API Base - type: text-input - required: false - placeholder: - zh_Hans: Base URL, 如:https://api.moonshot.cn/v1 - en_US: Base URL, e.g. https://api.moonshot.cn/v1 -model_credential_schema: - model: - label: - en_US: Model Name - zh_Hans: 模型名称 - placeholder: - en_US: Enter your model name - zh_Hans: 输入模型名称 - credential_form_schemas: - - variable: api_key - label: - en_US: API Key - type: secret-input - required: true - placeholder: - zh_Hans: 在此输入您的 API Key - en_US: Enter your API Key - - variable: context_size - label: - zh_Hans: 模型上下文长度 - en_US: Model context size - required: true - type: text-input - default: '4096' - placeholder: - zh_Hans: 在此输入您的模型上下文长度 - en_US: Enter your Model context size - - variable: max_tokens - label: - zh_Hans: 最大 token 上限 - en_US: Upper bound for max tokens - default: '4096' - type: text-input - - variable: function_calling_type - label: - en_US: Function calling - type: select - required: false - default: no_call - options: - - value: no_call - label: - en_US: Not supported - zh_Hans: 不支持 - - value: tool_call - label: - en_US: Tool Call - zh_Hans: Tool Call diff --git a/api/core/model_runtime/model_providers/nomic/__init__.py b/api/core/model_runtime/model_providers/nomic/__init__.py deleted file mode 100644 index e69de29bb2d1d6..00000000000000 diff --git a/api/core/model_runtime/model_providers/nomic/_assets/icon_l_en.svg b/api/core/model_runtime/model_providers/nomic/_assets/icon_l_en.svg deleted file mode 100644 index 6c4a1058ab9c70..00000000000000 --- a/api/core/model_runtime/model_providers/nomic/_assets/icon_l_en.svg +++ /dev/null @@ -1,13 +0,0 @@ - - - - - - - - - - - - - diff --git a/api/core/model_runtime/model_providers/nomic/_assets/icon_s_en.png b/api/core/model_runtime/model_providers/nomic/_assets/icon_s_en.png deleted file mode 100644 index 3eba3b82bc1e3fcb6b27cd9038786e19b7c2adcf..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 25814 zcmce8c{o*H`?qc#G9K{_ZDQ;ou+Q{u_24=Hr{Y2>&m?^_GR7 z`1k~x9X4&V0E2h$-VF~A9~c;DY;3HotYk14=gysriHW&(?OIh;)$`}i-@JKq`SRtr zZ{IdGHPzJAE zO=o9k5n*8~E32fWq>mpzDk>?l+3YiC&gkjsMMg#<2$GhTW^QgiciucYo&M<2qoJXp zg9i^vN=T@xsumU&E?&I2y1M$ri4!MJo@{S#-??+=yLayn95}#Yv0PnU@87?F<;s=$ z^XD5G8Ch6ZJbwIm=B!yGBO^zT9t{l*6%rDvudmnD)g2rh%*@P`m6c^OnGgg;M@Rel z`Ptdog@lBxSh2#!#>U;<-O|!hO-=32ojV^sd=M28xpe8$g9i^98XEfg`lO_!UcP)e zIy$PUsX1%*>_dkRJ$(4^#*G{Q{PRyjLPAPPijR-a_3PJ%hlkg!S<}|mrlhP~P*8C8 z?AdMGw#CNAmXwrqc6R#u`l_p|8yFarm6Zhq1UNZ4xwyEjTenV9Qu683r!6fl$;ruK zVPQHtI@`Bz-?eL3U|?WkVxpj+U`|fX#fum7^Yb@u+}P34v1-+-O`A5=*4DPRwq|5x zWM^lanwoCdut8ZxB`z+GLZJi)2XEfIIVdRT`0?YrckfP5PtVHAl2=ezzkdDfIdiP7 zttG_8U%YrBCMxRh?>|po{`KqEMMXs@ist3ztz5bC!i5XX&CR!N-I_CZE)2uHy}f(( z>{+^W>5?T&jvP56ARyr7<;Bm>udS_ZW@aWVuD=o3;-@29?6&g#{@wqZ_n*4oWq;^LcUAUq_vZa67j00{( zd|LNnJnF6&mE3uB#kT~0Q8V0OxAN+uAK4+ox=lZ58BR05T)cVjw9F2LrT!LD zX>kp=a?J==Mp5R|VYe1mOgKBFzV5g3KS^{{i%N47S~8fTH^;{476;!ilsPtej^)uL+RE-Z8X_a|`auUSr+RpEgcUro*5gR1BbyM%C}bPd9}i7dkv3LBv3 z(p(1E!tg>iTd=R;p7JESJNb<{GF)oe*j9|it;WKH=_kjPkBsQD40UI0f=Z;hWiHlG zpVsf=Lk2-*PinRsMtF0Kc4auSs_uW@M~~8tbGwEaERGP*VhvFAktA%*YW^oC=c4S2 zZwYULG+M@(g>aJn5=d6-^~EG#`o0;LFh!-&J=s+6(x|QWQ3nUST%lvItvfQox4cP# zcEum*QTfq7^TCY=vXX&O-laDOCLuQEF;%uuZKPU9-kM9;3)AT2d@7pm{Pn)<>t4iJQBZ%qb3N6Msewul zEKq$os%n>8D3b^O@^j4SWO8rD&x1ckjemdaLoX8b<$U_$BWl}!yZ)xFH57FT{p|Yc z+fPMjewNk5KBg{Oa%gRfujzTo&H`9>2Q(?^3*KGlZ{N!1GcZxQ4V@j?xkrbAY5((X zTwZQ*9Hy;(46d!N#yZ_mxHhO+h%G&8jikM})(_hZ&ieU00)h0F*>6R9%7;2e_ctPG z<}B_LWbLf)3#&D}>57>GbQc-t=3qe|j@!Z-bAS7U*5fuEn5Kp+LvSU+=~;C zX|{jU+Y^nx2jx0ljq9mLfOKr{O}KqO{=1?#+Aa&JJ#lexqneEny4+<@o%n7SdQ2I1MPc+FP%?dHra9eNp-Zmn zq)+^}JwGj(PGe(>H&1_KeouW>NrG<(w2sB4=a?Mu;%r}3dT{1rr=A5jV8_$YCw!|q zjs9B4r-LDB>sVbH>60xdu{#c*ki2yh58iI`M_DU-qpYA-_CGI9(s!*c%|#_T*(usZ;=8vxIv&WuB6P+b&$+uFBXf)4;ngAU`8F^0l?~mWreN6K zz4!)fc^di-_EZubKX-dkvCh5wNNr-0r;J?g4J_zs_-4KFEy-bvy-H&TD5k(N=a#=2 z{0!}{54ak84>67*Uc8POXs)I#Pg+W!5JC@ZS>U!(+`i!=_Ne;9tIy8+A1=L^eh-1n z7TJZOk+RqI-$fZZz4SVP-BJG7P^GT?WnZXyD_j2is|cxa2@lUQ?8jAAxSMw23R1i1 z_cyKnqB%iV81V6j_3rL=_Ald^hMCT@MlY$9x+`xy<;yWxcjXgO+c#RqSlIcVzRCjH zF8f<>weB9XR<=oJ&!T}ig|Sul+5YJB`8ndHaM$47D;A+)`+hiXWmbt(UDhm#ezgJm zQnp5ZrNaF^q1BjCX5rZqxM$3*aou-&726h9!Fu>2Ft{#&Sg}ZdJD?(Tvi{kddfJ6! z$b{uD-L%a^9L|ClQr*nbt)eEC~WNy$DhBk zt$#IaqjD_Nhvjlr_4`EzJQ0?p)xT72!yet>w3d%j?fM=O$h31N+=hV5W~Ry#iU|(UMXmH8tAD_4k{L zS(AEKtBlU-qrza@SZx1oEMr5Bv$46GMlDyv+9J~*pQmXSxQA7BZ@+l($2V0`+v&fL zuG{{!D`;+F9<|fMOK<1m1A2Nc(2`91zJv6#p*YozA>yHCWyx98NAt|4}zKp8mjo_X~d%Rv&P5xgw|i!K{Y1&LS5pCpPuFmJ70=tFn@hk2yZw&$5t7kl!1`j4)h`*iE0(z}13xRM3un?z|X zypGMxxB*-L^WmiTgc;;_K2X1`0w zg2N9z)3w`%!t?If*lTnhR}2>ylg*dc*bH5ZhRy%& z{xc}0_R6LU*zpfw#wDay>h{_}&*170+jm5}DU4oE+lxL_8C_(iacHrx?6~+<`(F?o zeSFE6m9;Lm>ef(cP|VFUmyonmeVG?H_D#a7Pd7kM1In@2GTbeIsa;V22wHby?hw?+ z`JS;5{WdB3QI{1xIq&NitCX*NnKQ;*im0*kcS3ujp(jAxC9vQ%% z`Q9A4D&;p?(mDOUvJY?7j^50cNU^{v2<5 zFqm_)o&BWwd3Sc1KrkvY1VX&P)z1VHZ;2qygG8#Xi3-ty&X{VyZ|>%PSj+& zc)0&Q#j}XChg&zY4=S_>!k&e!Yb$Sq&|-d`t-Y4uaW-3V2$^4uC@ zSxQ?ILKRlq42^~t#}vRi>cNlRMsk`5eeUjU@zz&BBv;ARn9&s(f-XD@9?*B!?mARF?xYpsc0Y!0bMLkd8x#qeZek4Hd_P{91k9*y@i zSzPse`IsHzV)r`PT7a@6$7XzZ%S}$Ve(R<4+F!?mX&A+dzPhh1yd`JO*xlZ*p4dMN zeK>~)jo!&?WMK*U5sLfP9T$_gy7G05Iqy()sxFdt4>3P(XmG4_0lloNhk3;^3;R)# z9&movW@v){GW%eg#Q67!-=2>iH?**|Kc9x8(F-@@FBieD_zwk@c?ZsJ{&*Gf>$nY|j3H0o_sz~4%3m!Un)BDuV$XQ;P=%`X=s*~o##mn-zLsD=J8B; zei0uh+FgqWT-g9GWIyn>h9<+pPH3%1-hBJGcq?@CG!z07a{SEvC20ES zti9^3lu{?d%ZTKv-bQ8A8oFp?>&-cqsU{gBzCjJuZD@Bb7;t@0F0~Od{2{dS4D<1D zhwTk)zBG3wK#7z^c3Y7zlZ$Wv5058q+9#aa0Nr-&d+Vdq?5`6H;PB?x`nmA0=cd+k z)zHt!0+WhhT0PtgFp$;RwiK-R&RG+=x9YoFDN%mtLxoZFa;>m;{EYnNsLM#wUVehu zZlvc#lB|17@A3`Md7}f~oHdyXB@+n7JK1zN{_?ujgH^Yj6EOKhNGltg+NlKo)p#{x zLC9)Iw3V{J&Ka4wYJWkPB+r*_Z&#PN7r#=2At*k(iaejTAyQ zvEFp)Oid5o9GwP#op6$+Ja|!9)_HY;qf%I{9{x#kbP2Qje0Jq&LN<}uKT_N=Qe++o zRAhfqmr$bwbDl?asu`h1#WvU9CYf4aZYg-_pt_pPu4e{Z?~&5Cg*?CSy&TV5YMm5U z038&X8|p*nIL|;n(jH)N~v%%v_KAr z87Vw?xac!J>pgRjo4Uq3zN;v0J{p#sbTg_nDTuFZ}mg#4Zv zTx@?S#6;={cyYFO=@0)>FH)BI@Z+%iUvsGYOAkI}IziP=I2hCfYh}E;mPlB@KBS1+ zyG)T#$#d`0yV>@N-tpZW1S+YT8oA?YQ!QC_k#b~(pa1T zF!0OPG=#8+b1Wx%wbL5X9@^P{nI>I)CBUX1B1Hzh%SfAE?nB?1X+p^L-;X)-d5_;A zMXLZCS9jxVY=-QTln7M^pdE^%b4Zt&%c_dDr|=$6X1(y|Aa1iYa1+?o?i#)?)nr02 z9qbEM((hfSO2!!eIoRltsmE^q z7woQul5!}759}TzJW?L?lU8YbTv%z)upAdeAvM!{LA=l!8%Rov87v7#4(jqo;9qwr z~Q5xB&MZaqO7kR@_>O{n{daQ&xR zxGP+Tw~l-vRHKHN-`E*9EBO4o^DFT=HUc2oQ+ON6QPmDzuBYy0M(<6*PCWRWwE>#^ z@{F8FLnbm?x>iI(bzYqC9QgF>qnq>KsOMjl`Hv)H@-)O;8>IY%7WRc!SMx)+HQU(8 zdy;xJG+k=<1Eo>*Y&a>GH!uJBpGD2bM(ds!$Ey_2w}I9YYv%JdbjMR6-UKAq%b4WF zoJ)AZD+9LJ<)Ezdl-c#FaZQseZL4=Ye;&Mn?1QbE{WxYF3j3uKiWg~f%^|yOis#-~ zT*G8oF9;2TmQuKt8*7<{KfSAE^RFrmlts)_@#W2HXY1ANOWiEsCbBRj@6Rap;MC~z zx_ymX1$1N=-n#W?^s=hrElTq?#DmXA=Y_v4p2(#ie$H}$CVOH@mKlhoUaufXJ_K8! z^ZAGrlLKm@FnSFbZRs^5*NZ+)$NUH)qkMFp`pc^D*4YM?>XO7q`N*F2`@QJV2a|o5 zoDj*Gk7Kr=lc|R?aw9T;UF?8<8~j-F$X`|-o$Pses_sq>tOk_Qk@2mG!L+*j&OCy} zRq0>%dw{YMnRxLsx$ab}Yl4yMY+w3~1Eg>5fZ_$_{gA~qn(g5vFmEHFonpB3<$J=g zBUvCWJpWl%)!aQzf`*NHsLo!5nZG@!!Q49IefvEwy zY!7L$)!2L~?kApB&w+l(6MZ7XO5XqplZrTQG|1JFLtD}?1NUJGsbc)+Fpps$i6Z>XE1xP$-~S@6et_~aim z1O-p`N)i&B!+d19TX=T6qB-NkhbAJXFTr!A4IVjwMck)BwL9PMoa3d5>TYOpMaiy` zuV&ml5u~<_x6%bHPBjb-irZPG)c5PV#XP}@3%FN;xn|5~q!e=Cn24Ck7y z5YZ;w7QB$hi*5_OIg9%Lxr^%=@^0yiCNvyEpV`7SAnLw6G~bAuj|GYe;Gft7?Wn z{McV7kSB>*sKPI`jnK2&K8x#^k2KyHAbh#3gfQ_Hot>{o6dhQb6h(&~{?(pX9%O-`SfS9+^Jro0!G86s`Ib@qe^TRn* zk<|$FZD>(tJ=LAb{aEN5L0V9xm+Uyd(R1Dgao7vt&K)v%NV~Ol9=|UU6C;@1mX@v^ zDm<^whLa;!rPRY$c?7DVVItKHQwzMDKJr{ZyQXh3NqhJNqt>7PJ+*O;)bBP#9Rm zEY1O*^*(85x7Mvp!=sb)vAQ+xmkGCKU@}75wNK_Z#n2W2x#eL;>)~fSeb7)9(cK1* z=aW*uf!#4ZcKP`Xp-iQ|q2Z{`Kb*Hy#!g0jSl?4%+sMn%0v(jGL&byAnteB7j`>?3x@}|fh9s*H){*Clyo;TiJugL;8_~kf zjdBv;DZZ03+`K^zh)B&1dL`_bLYRE8sE)8Bk;VCa;{&}GHiHT)p%gBPNY2F(UvNi% zZrw&AMY^NFvQ1jm_3Gmt*qQ*GOe9YIvUR^)8iGS`O0jio0|4c}_l#9bM;;py+ zqQ-fA`nW*7n}VyIa7pe0aPR!#gGvdWja`~xYKF$#4a#uO z{o%0%@c8y#Ze#=nY`+d2m*zhGJ2a*jvdvqK5lxi%4N2IpHt=XpZig5Ht_HtyP$x{#Qcm_vPZEs-V(Fn<2jI(e1V*i9pUn2uDHTu7UG6%nzcf%c^#15bvP>W+iKj_yS_!QB1{LMql5jg zXGsY1F9$g!7x@CpXxzzDULOTpyiE`F+M~t*y+Fne$czI2a#FZ;d{vF#;Fl&6EZuv2%-xhu(4L6Sa$ZMWM$vE4uO+51X2ZsiRinbjWW=@ zfe}TlZ(0wx;xTTEvQ8^AT3Im!+Nnki?S{1fJ7hIAM5%{Y)1K=baZJD!Z2|CA=F~G2dRRo~woRandwiaVQH$IoBmPS}kM}g^ShQgTZm$>v2RWsKemW4g)HnRup~ubNm5L%Fyp~UjzdM{xjsDm=Nh>J zl(t$wAUBIf6ZDRcjKs7BH|9ul_w&!u8P--O@cVjHJt|Q&Mu1-$>?49ysE#J%AQG1t z2*|Oa|7=!cL^2=Ot%K_PIgA_!(O7Mxe?ZOyg_)!%nyjKIt`BTb4Kb##mmVgHm*-~` z2l-c#^6bc%AcY=z3i~D2UTvh__K*gf#TAg7Z$s6iErU!`J!aFw0DY&h1oXC^dJ^8% z^IZ}*uMm$T&qB6+pC9pqXdwktOCD7*y?&aI-xknd-ksq#Q>*~=;Q&aaN6fDn z*noE$aQ2US(X%hm@U?-`r7h=lD{7t~e7!UKoFCupL>#+=!yz_L%!`okbpyQddS9y@X~@sDVI>2u5a$4}5`A%OH)Oe-2%JT-(%{cUKjAOp01>nx#o zzU|Ef;NQD<4r%7#tm|HHiJF{)E8^jvO>G>=c*0{^8-eI+)$oZRJ{4v+j;ySaScBJ8 zwdZdHEG^R^5&}Sp<1LbyrkD_(@8rEXFzs@;gW6uE=+CHSgkF!Jt5p@nw}Cd3JUAR= zJj-xkP$M@M#cMC%4FkzjE~|fg0@J&*F`)hl&IOi%&zA*)aP$O@*K?l0 zwI>oTaT*H+77Nm`R58+c)CW>@YBQe%K_8Ia!74cBHxtn-a6K5-tiqrEPc znoVH1`Q{)zFUI)W*YZ6C%H5AtD2!gFe%eIrat6!0BObA69GC6P6_yu`8b|Js#%2ZS2>4POrG?r-4^#~iw3q#k4sFaP5MaJmSf*S*60U>kj>hF^a*+3aQb3TnYi)eOG@r2swzJ~~qiMiA!z(Ffe zt2vr1%N^lqyrGq?*3%dbB{hA66ojIAP?^4 z(O`on%W$ReY7M03hPI41ykJp?#Zg>}e5*yzV{w+*BwzMW9N;S%ESiT*2s!wD$BimX zAa*`s&n9>zp(M@|D*)X!8ncSBYh)ecu)1StE>t=*1lj55I74k()M9i~tnpz8ik{iqxfcZ_^=I3U51!{hbToyWlqt9cQ z-PuE{90=GR;|K7@TuRCeG0kNl%yqf4{hN;Ql;%g|o^*aADMm5i$`#aaE}tga=7xoT z&2n3qOQ!EiV9d>5^aS|#0lUMHMi3}?*>T`Hi~~%3dk77iQE>G2#Z?*tzIQHNTt~!_ zanSD7pGg8ZZhp?WE`d4u1nOs5FZm_3?IMwSKrtZXxL-)%IuOkWD^;&o5AXluG1d)7 z#!dO6#E>$xYLP?07;f+fF382PfAKS_wp0&~kjUv25f*Jknw9ckKFX#kT58`KZIM4=jRa-PJ2=ZG1 z3bm05C5I!QaT!c5Uv+|*dzF=X9C#M!?Sh)9p*>57&*Bw4^f6e_>28AJzy?MEQM@if zCLRste&7W~ZKPIQv*svnC|g0H3O8t`v7SFLG*vBwdHRZIN`?ig@&A!#e!iA4lVe+UAKkp@+>`)KnlK zeByfOgKDFRRKbtRQ(bqYs|_ZrQzj#?6CMUij5lP3SO=;)e-1}dXL4f5ri~XHHCr$J-)W&Ipmp~8>dOj2` z&4V82n1hFa(7{Xnxzr=ogZK#~uC2>hM0OdrL5e^mN4+`AXno_H>XL}zI~Q8q2$$H0 z95&LQSpfR0pIX_vV7PR}nBdV(mf=Kby6BCArYI(jW&yZN#hM2)+IW$29|DQ*fRFD| zV`Kqaw}oEfR)D6qx$>4ZKfbYPlO*m`AanNU=6{N!4U{li*ioy|U?B4cpP8D66iZz(`SqEMmZ5rf!>36_K{~Q06sVUqp?s}KmvHT9NL$$T_MC|`PiFa zA4hIa;QH5gcxx$LKt^7o-Hai3%_iHrcp*uYd}Jv-U*-S5Q=UW`nxaBwEl=N1&4riizL zR`gLHjwqP@De4)Uw6D&LZ@lE4k2zXau0D~h2NA_^)()r>_})8EUSuG8@?_5jlo#NH zVka%~B8|2hsJjSbpB$-3GJID_)W;S#u^U1XBNyewG(jU8RKNiTKR-?g3mU8F?bHx_ zNYv<>os@u4@279nA_D-uPAD}VEHHT3`M;aqK!#l+{FqUa<;LXDZs-v&7Tf{1TDrYN zyY>Z>yWYai17vy~h%S`H)!Un`Qk>AYlMvlITy!Qe@A9Xohym)6q~p zfOGNEHULG1wpGt0U-|A+>QCNVz+K}OC<5ERe5TP>0ah!pU*|`TXokO_YvI91G7g(5 zLvDJNRi#zlc1oT^!0X@1KGt9tUG93^gV2?{R}%)(_$A;CJ0aLMu|#H1 z+lf1bB39rHj)lLX4e?QcfwP6bnZ^1Aie+jKO=vo};iO6mqUx$A5k(MlaZUPiLiZM# zZ}>i|8(ihaPaGK39$;Pn0IAYZ^c0v2&G|~UW2~X?K*_p*iz-awG_;79sdsoaedYO& z3sh=_tw}i9WNu5M_PK~2U;} z#8{wvm=>q$O;j=9jKf8`eTOJO#TP(m_AL$OEAQr4c4#LXC4blc)WsG}crMc~XyHP( zF|w$QJP!#+sWE;*d`)u$2_C=+gceZpMxbOl6yDa(uMj8o`eW7&9ex#}WCb;(7DG%? zOcPN2smt}fe<^;Ap9K^(3stvh&Xi+ZWIm2yz;yN6^hfrMS2hyo1M8uD0Si|?0D1uv zLTE|sNRXm$>ZoKy2XfVrG-u@ProDHQj4GJ#c7s+LPL3r~frl%w9m9*W0Us=%K8X9z z90?&%fyp~CyCw|E2gE~btKaNq#PDXDn+n#Lf>;U`==b8x-djJ*D0lk!pVj-l=ySl} zj%kDI(LKPUPXW<`k8a}qjtYFbtYv;#@(5Ai-`@!B=lS~?%yB6_3Eb{bMZY-;ARCAy ztz2pau#`2x+Am=26C+1w(3j9P367VcC4%jd6cHIvxXLFmqeIgnU8jqN7l{sDGM1q5 z{M-gLMjSAQ9N0HOMd$UEvD2jLS~I$V>QsR0tRa}I?(EtyA@zeiN!kQCPEixIawy8A z=+ITV;@2MWA)>W+>M~Xg!U4GDDzyXYk?tN_7o;~toU}^n0W2$_9vHC_keY%8t*^YD zpdvlBYCPX5ipWR4eLx6W=preu77w4SXk%}oU4tXn&=deUwh-MCGkKJ@9Gs(pM&*5y z8hp|jETz)xh+z4k?!hvzD*fgSM5HiA6?jT-W(FJMgH_;Y5D#1A$TlFEW}wbFc+5Fh-+3|0kWy`HVMgk1(9wKEHZ&eugQ1zcocqZS6#c*wSfq{a?P z6-FL&zir(E|V*w8X%K(%|L>N70?E3xtv7Vwi}}@>SQx9Lzz8 z=wQn%a7NubHu7ef-`V@Mpsj?<1rJYdVtL|t63nG_vMbYLD_OpOy88+|A`@6u`#rN)p^79ij*tj&p*et*y=X~i zx7Vs#OVaH+GVTGl1D7KCcS7m))OsF}@?%w*IvjZL>TQGsFuL_nh4jRM))voM^`;en z&b5o+t&u(p0Pjly?=KN3E4yjd=`d1lMbs#uHhgL&_CDzAW#5-%W_yGz1IQ4s@Z5b% zx!uhY*ffPgUJkMDOF?DqCbOV;Xn#%uPT0X-ItPs5{RuEOk)U0h_TRB$&?f|AZ3)`Om#2@xeC@`p zB0kW)$4jh@Jt-Xdmv2=nZEpyX?dMZ0%u+RS)$2O~)qv`wixKy+Co52MKxlAdkN>{h`vtph#l3BkQbmI1J^1GFRy7;xm@F2ggH zp`_+Z76l)nF}b>LGGwKPJ|`TCkLR5)-ro%k^9h%Z?vT|2oU}Ky0|bw0dG?y~R_#&M zdEf$i5X}T7oCeM&ztkwT3Wl3PSlB@ z>8~PkQ5n?TWuR^4U=68ewD;Fz(uB|k7;rfd+Cy;Gu@Onr?>)>rjk|9SSxjZU&fp>A z&#PIPhYIeR@?wAq)9DCU&=D+X13`8Lck|7|2);qbU@hbQyZX6;zQDd%OhahhWVg&W z0^m!sc))fD`6qdp~2K5s{SRtGbnUXoKqe40fpNC z*|5Eh{k=l##u46tDmsT3`9P`hEdSf9$LD_c<$vGDyDjpm#TCRka5Nmh)&hk+i@!sm z=SZflpu3{iWp+vwgGzW?JKNObA@}nVax+yloo^ew|I-%tTwp|5*j(k{J>d?IWYlEJK6d!}t|qyzglR3G<$>Vg+~IMuJy&kY|`l*cfncT#$0;WJ@9U z>=&QRp=}fKJSzQLU4H>8*8wVR(dXfxY99*@4-$tDk{YOr@I{r|Gk1Whx}Xrwc%w}U z3-m2MnCeLH7T3b*?_l@*pnqAqoRF0UdVueuN+>WAP$)ZWhd85mdy1HZ(_LkzhLg`h z6VGq6IjKI2;psHe8sbBIu z0fcS~>F}MhS1FVE&$}E7co%rsK9D`UJ8xP05S1|2+Np$57f_ z--Cn*&OHCFOt!I$K!$0hoN~_)pZi$S45jjKqm)VQ042{9UEjSq>lkC1c{+lQVBska zjw3y)1t$FNnX;LQvjfu?1G$dQ@ncltEM+XYwVltrrm0Uhn4oK?+COk!224bvQKs_*{ z4BWI(LpO^NjrC9MY-I+<1k+a$2x3`4)}`|Kyx=8=hG|q_duXUPeT94M|6|7@EC3G% z(|5dq7|+l&4U{ee*9oR}1X(Vz_qD9RL!HcWksSa_f-?;%zfBRkt0$*WljWG~@B*h%tV?1^N-5<`}|R zu>G8N(Wl`n1HlRQUqdK?@i+1A*^~=aVRt0kGnstV1kjt+Wlxfnr^Y0PRhoRD<#d(WO(fA9o!O57uh8p;YhZG3lQw2nkUVa`hIf1nT7XkNX zr<-P2Rq?|sw3vrZg5ZkHU*?6tAXc-eBfvm7ORCg^=RI5fF2lP{FRzoIi_QdP(QRliy?FL6kQv)lQRZ7hs zEf4Eh(q{02^ zzjOfC_4H1vk68Q$=m&ovG^HPbba8-}E*M^*13&hAuB!xIDon)%Bg6)@?W;kK<9(Y*94<^5e;js4;Kfrq)e>y|}vGeBK*bEWH=XYhQH%+6E;my%xwEP^7Xq=wo zuV6CZRZy?u?*gP^1Z+P}kZ1e0kP=_)d)XP0|C$1NnwO6V$i(($Yv89!3;ra-es5Xa znKw?`v)`Mez}RY!EWkcm{>iuZ+Snen{J~u6Kcxpjiaa&z;q14c$Q%Dh5$AbB%wqh@ zt=K=12k@iew~IgBRp(!usrkqPr0_4F1pOUPM)JQVpD2K-k&eW7WoIx7u%vfuc_-I$ z;5*-TZUO!&+Vriu{B}_#m^H5W$7@p*uWhG*TFQSqXt>u-@%MT=NPI9h<+z*B1L9vV z;CHpSZptaplT63YhPzsf*Qgo1*=-{LlHOtNl8-bw#{A<;?{MMDY@t zo;)2`;0RxQ`V)Uvuxj%GePH41A+vrUO;Mh~g1~QBaZriB&vJT@1h1Qtum8!X!cNfP zh=bYFqewM4cO5bS-y;6?#vnxn7-WQjJ^#-2ZZtl?5)ewWCAOAe)GYF z)7@?-)X@EG!Sr1kkTglA61dwrE&R~U>W|ZE6Q}Rvhw?{T#sQQBJi!x$-Jeptuq17uL4Tp{K&Yz>4G_fvG*c}4lq0nr_0$55*1Xboi69^ zh_UR@O8|C3@*W2QRI_R)j#=+@xsDSo|FHxMVKiNUIWirPf;5zVpwTlTC4KtNbHD{E zn^)5{w|OlTEV$fw`nF5GO20R6AUcVvhq5k>E#UFi-t79}U)zF+Y^KEhsn=WqZlq2z zoOK%07C9CKQ0!D52d$#hv?lQR_*5E;QAD8GL)}1;fVT?3n~5X|Ouzu>PEFSva0l<< zye58Iy8`kXnJEHOkXP5CIzQH%H}}ueePe%Rq!jG(>LDM$_}ITT$@gKG7e0FG^bBngf5q4 zs*t_<%V3wQ;#0f4DyJJOc^S|CL)4{1DMJ3pu|;T~5HSOI?usau)1l#HyeR}eA_foC z(2keKmZM>kL&j3XytPQt&2PJq9?|I!0ztG29GaNJ;H)6RhvNV+|2e=!C<|UlwX=7V z&s!ixn*PuXjPq$@o7HocyQpvL*|ja9Lv}bA)pKFMm{2MUr>`G;`WFr5Uv||XQ@Sg`glG7gmLF9jDzA)t(YXauu2WMd!0=r~MKm7AR zgm?gApa?(%2l)UR(0%=E6Ivq8q=3VXKXSJ~)A^@A0K`{)XPU}-QRV*9bPgGY=L}KB z{yw>a7z+~*fH_0JoNwXxiGZ-oSyc<0A-TVTUB%)GGPr7l-4d#qfrho^2XYSn>!GiY z4tKh~t|L7ZFH^q1`&~yS^qNEYz`Ee>0tmM!Oa?dO;wwaI<)^<0ghR)?`-b~O^i?DN za=9I)O8n;mXl9U>zX4iR>FNYE@%QDE7xFbh1ssik+aZ5we=&AYsK%beh`3-?TG(Nx zC4+~k&0+YVqIW!DJ%Oy?HUD`~rSqQ$slm930PS;txW=gG4yvIUGN3TMxa$vn>QNJ@|kh{G!T4 z#ILPnc*%hkol8r=XN~`PurKmI54vf;3(3IH>Ka?J0O`4x|4n{yuQD!15cR1*Y7JY% zdF!*IBlxzGag*5K1}w|RpGU$Ff=3r<;=d0b^G|;x&Wp1ovs3btq|l`kgxszGuPMeO z4N&_Nr14LZCCvQ}mhjx3?CJ3SjS0@&^sfg&qSGG$Jyx2Q%nJ}paK}$vd3yZDs#-uk zSP$6)D;pCc)Z}Rij(~jB?l1gn#e!a(RN6`Fzw!`mX5akR10bLW`QMCW`DQHst6B;T zDiS0f0EsOF6073bStW)E%-=+IRB#Tc5!f-h(8^7JU7C6V`1te(fCY`PT{^F=)#}&3 zM0#Wzn31+c(h-h5;0Iw&;lLZK6n-7rQqc98RvI09%vSvb)+w@b)5#LpCUTQ;Hxm)} z3M{lO+KhuoZ%!NSs%Pvm6~3=)o)vV>p6|txB9@(SQfpgdP}0f2YqAKu3oc-#<50h4 zo}LqQSN+aM@VhjTc0#C15-1Fvp$6l_bJ84(9(3O!_D>Ody?bw)PdKAP$8VRdU<=#8 z4xabfQSE`yOT_%Wilv!mL+y)k8@)FK39=!k&tnZrS0 zuT8+O_k^gAR(P+9l|!5b#dP^AF}Y>oeR&=6k&Zc;*6X3h!vMa8UUfFf1mD+%e)33j zr1S4mm<&IrsLl*Fl>0{jTqgYz_v_f)bUVv6i4<=wP%GuZ9T7&8Nw&KtufLWAMQ}9W zED6*XIJwz(Pg`hgC>wM>qDWLF7(=Ha?Pq zp5?@?90CX7pyi9MRn42owa8XVv~w#T(vOe_mzZt|$GH2--m;qft?PBtc|J;ngCsL2K89pe6wOSRTAL|0NQ|mY7s&oVXqFbsP#&6$9KQmhUK6E-M92|66G6 zEBJjU!AaFFv`|Tx8LV_f_R%$8PG;gZkyqP#XMXsSF=h&4_bD%ALNOu#?!J)Bwgl7P zyV;&A&S2V~eeRq|ON7eAYOvAuTasl>;!gf5KYZcdX^=7{T9{Q@V1~_S!RRvXkMz05 zitDA&Z|wNqlZS?o9=UVde^&JhUNaDN9udRAyDh}!3;8iwva!dV^G$krkp3C)jfrH9 zq~tYUS!iUkNTT+(RNy%4NgSwmz}j6C%5nwYyQm{D`&Hw9n>O(*>QHI-n`TxQq;hdM zvV3u7UEbda$|U(;H%-|vV8#hB{*{}+{0hEf_c&Z+}rpI@t4Px z_J#Xa*v8@v*g_x%+{VCfEp;-&E8pE%Jj()Vh6dELq^{i#$F%$CgoewKl0^Ur*)!G(X# zD7!QGYIc?v-SeyK*CH>@PW_(6JKvd|O8H%I j#hd$5Bz3DL**g^>vTwt4UwW3_V z9g*2R>O21oCZBnk?{}ph__eriGFJ8I<;}{+$~)xER=s-Z0Cez~87+G?fIZxBZ>{9Z z-cXO2v*0_c{@Tq}5a(1|?(f1t-_T*p!7ZS?oh{(QQBZ3Ypjhk3bd8#M+^w)!-@?8Q zx`v(2uJ?D|iC>g*TuKMUI!5@(mobju<+%wzhOfz#%t=L`l?M(cq33a-SMsFoeZR;`@M#t*)#b`jq7@u+jXr)bhU8noYUdfC4+(5|EITW|A#6K<7A_< zMN8STjYJ}3Q#x5O)+LqLO-!G48J9_oq*+~D<}hw;YE?t`*v6tr%0x4U#v!6wng|(& z&QOf4=`_TgL&MCz{)zqheLm+rpXc*F?{oXU-*?)hna(B5r>}uTes#IS+xpwSROg31 zOoLCkjvGU@i0iH~Y|p*etrRW8SjVTR9C1RYlp|3-B~K^z8e;J*)Gb{av4``)a<(dY z^L1+Ciz|)-u2SLrM<0Xb1v{H{b48pd4S?&BDbIq>&Quiy9 zxEaxyTGi#ZC1=MNf^3(#ENigg^TaODWDsvC&Hmy+|9RIAgb~y9&||e^U+5xE$ztqr z!fsvu66j8&L@+2|JqWf5_pO%ce=hLk5vvGe+7kok@=^rdw~!mXiczWv0x}dZ?r2Kc zu0BqPz%(J@;Qq!^e8}g=;7P2qqj@@=&Q9Runa9op4`lI6a$hcN*5fY#M9Wh(fjy+- z!X16tjR0`(5sE#?A)jr%V!sI1zm2?otqRx&BQ$+#M~YiuWEpVyT8S_7UELZzd%wQm zwb+_W^hUk}R?;RGc!*PUJ5KTLj1R?s>jfX)oH@mGDTYc*@h<-{1dUnm5UV@T+iz8jT>j`~`_2;7NvKD4LkZ=HaF^=O+pbeLIBk+F<^y`X(X2&pT^fdvME4;m5cJ9mrWO(yMw5T28?}RWT3{D*_2)NHjWQj zVR`hOFw(8j@)G_1?D-1Nz4!KgWO=f1K@1mCHMUbHGiHfTk^unywv@c4GQ!xzR|gh_ zW?RJ*=$dNl*~sj$;h}Mwc@DdHT>F{V?lP9vNFR! zJEKpLFEI^H2U0ee689*)bb7+~bD46bkuQvth!Jv5Z4Mc)^Ow zHI7+{^Mk=Njw+FR#CgqcLdhnOITq#)tndf&H@E3!GgybP3wEIA+L@0tk);B?b!=Ix zw7gailx6Q*>z36;YJ->^Uy`*%synCt*|dtx2fY}dB3GbmHp_64YoqW4*N;Nxsh-8p{~bnJJUYSp)SZ{xD=Au)hy9vq(sbd!`!dQlMcj(4<=<&jVmPYjcDH$22;RV`KF{;)-h%9}ACh=h`Y=U2n0 z7aOK6V4Iza!(P(XN@z!9_al<4lZHPCQkTJ|^ z=W+^HPlQM{akHN={{93IH0WXQB)I8G@`J^C?fY8_Ny2y)L{ekrGf=7~t^Rn4K5e#! z47xqdMwX+v|J) diff --git a/api/core/model_runtime/model_providers/nomic/_common.py b/api/core/model_runtime/model_providers/nomic/_common.py deleted file mode 100644 index 406577dcd7e701..00000000000000 --- a/api/core/model_runtime/model_providers/nomic/_common.py +++ /dev/null @@ -1,28 +0,0 @@ -from core.model_runtime.errors.invoke import ( - InvokeAuthorizationError, - InvokeBadRequestError, - InvokeConnectionError, - InvokeError, - InvokeRateLimitError, - InvokeServerUnavailableError, -) - - -class _CommonNomic: - @property - def _invoke_error_mapping(self) -> dict[type[InvokeError], list[type[Exception]]]: - """ - Map model invoke error to unified error - The key is the error type thrown to the caller - The value is the error type thrown by the model, - which needs to be converted into a unified error type for the caller. - - :return: Invoke error mapping - """ - return { - InvokeConnectionError: [InvokeConnectionError], - InvokeServerUnavailableError: [InvokeServerUnavailableError], - InvokeRateLimitError: [InvokeRateLimitError], - InvokeAuthorizationError: [InvokeAuthorizationError], - InvokeBadRequestError: [KeyError, InvokeBadRequestError], - } diff --git a/api/core/model_runtime/model_providers/nomic/nomic.py b/api/core/model_runtime/model_providers/nomic/nomic.py deleted file mode 100644 index d4e5da2e98ec97..00000000000000 --- a/api/core/model_runtime/model_providers/nomic/nomic.py +++ /dev/null @@ -1,26 +0,0 @@ -import logging - -from core.model_runtime.entities.model_entities import ModelType -from core.model_runtime.errors.validate import CredentialsValidateFailedError -from core.model_runtime.model_providers.__base.model_provider import ModelProvider - -logger = logging.getLogger(__name__) - - -class NomicAtlasProvider(ModelProvider): - def validate_provider_credentials(self, credentials: dict) -> None: - """ - Validate provider credentials - - if validate failed, raise exception - - :param credentials: provider credentials, credentials form defined in `provider_credential_schema`. - """ - try: - model_instance = self.get_model_instance(ModelType.TEXT_EMBEDDING) - model_instance.validate_credentials(model="nomic-embed-text-v1.5", credentials=credentials) - except CredentialsValidateFailedError as ex: - raise ex - except Exception as ex: - logger.exception(f"{self.get_provider_schema().provider} credentials validate failed") - raise ex diff --git a/api/core/model_runtime/model_providers/nomic/nomic.yaml b/api/core/model_runtime/model_providers/nomic/nomic.yaml deleted file mode 100644 index 60dcf1facb475d..00000000000000 --- a/api/core/model_runtime/model_providers/nomic/nomic.yaml +++ /dev/null @@ -1,29 +0,0 @@ -provider: nomic -label: - zh_Hans: Nomic Atlas - en_US: Nomic Atlas -icon_small: - en_US: icon_s_en.png -icon_large: - en_US: icon_l_en.svg -background: "#EFF1FE" -help: - title: - en_US: Get your API key from Nomic Atlas - zh_Hans: 从Nomic Atlas获取 API Key - url: - en_US: https://atlas.nomic.ai/data -supported_model_types: - - text-embedding -configurate_methods: - - predefined-model -provider_credential_schema: - credential_form_schemas: - - variable: nomic_api_key - label: - en_US: API Key - type: secret-input - required: true - placeholder: - zh_Hans: 在此输入您的 API Key - en_US: Enter your API Key diff --git a/api/core/model_runtime/model_providers/nomic/text_embedding/__init__.py b/api/core/model_runtime/model_providers/nomic/text_embedding/__init__.py deleted file mode 100644 index e69de29bb2d1d6..00000000000000 diff --git a/api/core/model_runtime/model_providers/nomic/text_embedding/nomic-embed-text-v1.5.yaml b/api/core/model_runtime/model_providers/nomic/text_embedding/nomic-embed-text-v1.5.yaml deleted file mode 100644 index 111452df579f8f..00000000000000 --- a/api/core/model_runtime/model_providers/nomic/text_embedding/nomic-embed-text-v1.5.yaml +++ /dev/null @@ -1,8 +0,0 @@ -model: nomic-embed-text-v1.5 -model_type: text-embedding -model_properties: - context_size: 8192 -pricing: - input: "0.1" - unit: "0.000001" - currency: USD diff --git a/api/core/model_runtime/model_providers/nomic/text_embedding/nomic-embed-text-v1.yaml b/api/core/model_runtime/model_providers/nomic/text_embedding/nomic-embed-text-v1.yaml deleted file mode 100644 index ac59f106ed2928..00000000000000 --- a/api/core/model_runtime/model_providers/nomic/text_embedding/nomic-embed-text-v1.yaml +++ /dev/null @@ -1,8 +0,0 @@ -model: nomic-embed-text-v1 -model_type: text-embedding -model_properties: - context_size: 8192 -pricing: - input: "0.1" - unit: "0.000001" - currency: USD diff --git a/api/core/model_runtime/model_providers/novita/_assets/icon_l_en.svg b/api/core/model_runtime/model_providers/novita/_assets/icon_l_en.svg deleted file mode 100644 index 5c92cdbc6d8466..00000000000000 --- a/api/core/model_runtime/model_providers/novita/_assets/icon_l_en.svg +++ /dev/null @@ -1,19 +0,0 @@ - - - - - - - - - - - - - - - - - - - diff --git a/api/core/model_runtime/model_providers/novita/_assets/icon_s_en.svg b/api/core/model_runtime/model_providers/novita/_assets/icon_s_en.svg deleted file mode 100644 index 798c1d63485221..00000000000000 --- a/api/core/model_runtime/model_providers/novita/_assets/icon_s_en.svg +++ /dev/null @@ -1,10 +0,0 @@ - - - - - - - - - - diff --git a/api/core/model_runtime/model_providers/novita/llm/Nous-Hermes-2-Mixtral-8x7B-DPO.yaml b/api/core/model_runtime/model_providers/novita/llm/Nous-Hermes-2-Mixtral-8x7B-DPO.yaml deleted file mode 100644 index 7ff30458e2ff0e..00000000000000 --- a/api/core/model_runtime/model_providers/novita/llm/Nous-Hermes-2-Mixtral-8x7B-DPO.yaml +++ /dev/null @@ -1,41 +0,0 @@ -model: Nous-Hermes-2-Mixtral-8x7B-DPO -label: - zh_Hans: Nous-Hermes-2-Mixtral-8x7B-DPO - en_US: Nous-Hermes-2-Mixtral-8x7B-DPO -model_type: llm -features: - - agent-thought -model_properties: - mode: chat - context_size: 32768 -parameter_rules: - - name: temperature - use_template: temperature - min: 0 - max: 2 - default: 1 - - name: top_p - use_template: top_p - min: 0 - max: 1 - default: 1 - - name: max_tokens - use_template: max_tokens - min: 1 - max: 2048 - default: 512 - - name: frequency_penalty - use_template: frequency_penalty - min: -2 - max: 2 - default: 0 - - name: presence_penalty - use_template: presence_penalty - min: -2 - max: 2 - default: 0 -pricing: - input: '0.0027' - output: '0.0027' - unit: '0.0001' - currency: USD diff --git a/api/core/model_runtime/model_providers/novita/llm/airoboros-l2-70b.yaml b/api/core/model_runtime/model_providers/novita/llm/airoboros-l2-70b.yaml deleted file mode 100644 index b5994184613141..00000000000000 --- a/api/core/model_runtime/model_providers/novita/llm/airoboros-l2-70b.yaml +++ /dev/null @@ -1,41 +0,0 @@ -model: jondurbin/airoboros-l2-70b -label: - zh_Hans: jondurbin/airoboros-l2-70b - en_US: jondurbin/airoboros-l2-70b -model_type: llm -features: - - agent-thought -model_properties: - mode: chat - context_size: 4096 -parameter_rules: - - name: temperature - use_template: temperature - min: 0 - max: 2 - default: 1 - - name: top_p - use_template: top_p - min: 0 - max: 1 - default: 1 - - name: max_tokens - use_template: max_tokens - min: 1 - max: 2048 - default: 512 - - name: frequency_penalty - use_template: frequency_penalty - min: -2 - max: 2 - default: 0 - - name: presence_penalty - use_template: presence_penalty - min: -2 - max: 2 - default: 0 -pricing: - input: '0.005' - output: '0.005' - unit: '0.0001' - currency: USD diff --git a/api/core/model_runtime/model_providers/novita/llm/dolphin-mixtral-8x22b.yaml b/api/core/model_runtime/model_providers/novita/llm/dolphin-mixtral-8x22b.yaml deleted file mode 100644 index 72a181f5d37193..00000000000000 --- a/api/core/model_runtime/model_providers/novita/llm/dolphin-mixtral-8x22b.yaml +++ /dev/null @@ -1,41 +0,0 @@ -model: cognitivecomputations/dolphin-mixtral-8x22b -label: - zh_Hans: cognitivecomputations/dolphin-mixtral-8x22b - en_US: cognitivecomputations/dolphin-mixtral-8x22b -model_type: llm -features: - - agent-thought -model_properties: - mode: chat - context_size: 16000 -parameter_rules: - - name: temperature - use_template: temperature - min: 0 - max: 2 - default: 1 - - name: top_p - use_template: top_p - min: 0 - max: 1 - default: 1 - - name: max_tokens - use_template: max_tokens - min: 1 - max: 2048 - default: 512 - - name: frequency_penalty - use_template: frequency_penalty - min: -2 - max: 2 - default: 0 - - name: presence_penalty - use_template: presence_penalty - min: -2 - max: 2 - default: 0 -pricing: - input: '0.009' - output: '0.009' - unit: '0.0001' - currency: USD diff --git a/api/core/model_runtime/model_providers/novita/llm/gemma-2-9b-it.yaml b/api/core/model_runtime/model_providers/novita/llm/gemma-2-9b-it.yaml deleted file mode 100644 index d1749bc882bfea..00000000000000 --- a/api/core/model_runtime/model_providers/novita/llm/gemma-2-9b-it.yaml +++ /dev/null @@ -1,41 +0,0 @@ -model: google/gemma-2-9b-it -label: - zh_Hans: google/gemma-2-9b-it - en_US: google/gemma-2-9b-it -model_type: llm -features: - - agent-thought -model_properties: - mode: chat - context_size: 8192 -parameter_rules: - - name: temperature - use_template: temperature - min: 0 - max: 2 - default: 1 - - name: top_p - use_template: top_p - min: 0 - max: 1 - default: 1 - - name: max_tokens - use_template: max_tokens - min: 1 - max: 2048 - default: 512 - - name: frequency_penalty - use_template: frequency_penalty - min: -2 - max: 2 - default: 0 - - name: presence_penalty - use_template: presence_penalty - min: -2 - max: 2 - default: 0 -pricing: - input: '0.0008' - output: '0.0008' - unit: '0.0001' - currency: USD diff --git a/api/core/model_runtime/model_providers/novita/llm/hermes-2-pro-llama-3-8b.yaml b/api/core/model_runtime/model_providers/novita/llm/hermes-2-pro-llama-3-8b.yaml deleted file mode 100644 index 8b3228e56a6726..00000000000000 --- a/api/core/model_runtime/model_providers/novita/llm/hermes-2-pro-llama-3-8b.yaml +++ /dev/null @@ -1,41 +0,0 @@ -model: nousresearch/hermes-2-pro-llama-3-8b -label: - zh_Hans: nousresearch/hermes-2-pro-llama-3-8b - en_US: nousresearch/hermes-2-pro-llama-3-8b -model_type: llm -features: - - agent-thought -model_properties: - mode: chat - context_size: 8192 -parameter_rules: - - name: temperature - use_template: temperature - min: 0 - max: 2 - default: 1 - - name: top_p - use_template: top_p - min: 0 - max: 1 - default: 1 - - name: max_tokens - use_template: max_tokens - min: 1 - max: 2048 - default: 512 - - name: frequency_penalty - use_template: frequency_penalty - min: -2 - max: 2 - default: 0 - - name: presence_penalty - use_template: presence_penalty - min: -2 - max: 2 - default: 0 -pricing: - input: '0.0014' - output: '0.0014' - unit: '0.0001' - currency: USD diff --git a/api/core/model_runtime/model_providers/novita/llm/l3-70b-euryale-v2.1.yaml b/api/core/model_runtime/model_providers/novita/llm/l3-70b-euryale-v2.1.yaml deleted file mode 100644 index 5e27941c529e8f..00000000000000 --- a/api/core/model_runtime/model_providers/novita/llm/l3-70b-euryale-v2.1.yaml +++ /dev/null @@ -1,41 +0,0 @@ -model: sao10k/l3-70b-euryale-v2.1 -label: - zh_Hans: sao10k/l3-70b-euryale-v2.1 - en_US: sao10k/l3-70b-euryale-v2.1 -model_type: llm -features: - - agent-thought -model_properties: - mode: chat - context_size: 16000 -parameter_rules: - - name: temperature - use_template: temperature - min: 0 - max: 2 - default: 1 - - name: top_p - use_template: top_p - min: 0 - max: 1 - default: 1 - - name: max_tokens - use_template: max_tokens - min: 1 - max: 2048 - default: 512 - - name: frequency_penalty - use_template: frequency_penalty - min: -2 - max: 2 - default: 0 - - name: presence_penalty - use_template: presence_penalty - min: -2 - max: 2 - default: 0 -pricing: - input: '0.0148' - output: '0.0148' - unit: '0.0001' - currency: USD diff --git a/api/core/model_runtime/model_providers/novita/llm/llama-3-70b-instruct.yaml b/api/core/model_runtime/model_providers/novita/llm/llama-3-70b-instruct.yaml deleted file mode 100644 index 39709e10639b69..00000000000000 --- a/api/core/model_runtime/model_providers/novita/llm/llama-3-70b-instruct.yaml +++ /dev/null @@ -1,41 +0,0 @@ -model: meta-llama/llama-3-70b-instruct -label: - zh_Hans: meta-llama/llama-3-70b-instruct - en_US: meta-llama/llama-3-70b-instruct -model_type: llm -features: - - agent-thought -model_properties: - mode: chat - context_size: 8192 -parameter_rules: - - name: temperature - use_template: temperature - min: 0 - max: 2 - default: 1 - - name: top_p - use_template: top_p - min: 0 - max: 1 - default: 1 - - name: max_tokens - use_template: max_tokens - min: 1 - max: 2048 - default: 512 - - name: frequency_penalty - use_template: frequency_penalty - min: -2 - max: 2 - default: 0 - - name: presence_penalty - use_template: presence_penalty - min: -2 - max: 2 - default: 0 -pricing: - input: '0.0051' - output: '0.0074' - unit: '0.0001' - currency: USD diff --git a/api/core/model_runtime/model_providers/novita/llm/llama-3-8b-instruct.yaml b/api/core/model_runtime/model_providers/novita/llm/llama-3-8b-instruct.yaml deleted file mode 100644 index 9b5e5df4d071fe..00000000000000 --- a/api/core/model_runtime/model_providers/novita/llm/llama-3-8b-instruct.yaml +++ /dev/null @@ -1,41 +0,0 @@ -model: meta-llama/llama-3-8b-instruct -label: - zh_Hans: meta-llama/llama-3-8b-instruct - en_US: meta-llama/llama-3-8b-instruct -model_type: llm -features: - - agent-thought -model_properties: - mode: chat - context_size: 8192 -parameter_rules: - - name: temperature - use_template: temperature - min: 0 - max: 2 - default: 1 - - name: top_p - use_template: top_p - min: 0 - max: 1 - default: 1 - - name: max_tokens - use_template: max_tokens - min: 1 - max: 2048 - default: 512 - - name: frequency_penalty - use_template: frequency_penalty - min: -2 - max: 2 - default: 0 - - name: presence_penalty - use_template: presence_penalty - min: -2 - max: 2 - default: 0 -pricing: - input: '0.00063' - output: '0.00063' - unit: '0.0001' - currency: USD diff --git a/api/core/model_runtime/model_providers/novita/llm/llama-3.1-405b-instruct.yaml b/api/core/model_runtime/model_providers/novita/llm/llama-3.1-405b-instruct.yaml deleted file mode 100644 index c5a45271ae94aa..00000000000000 --- a/api/core/model_runtime/model_providers/novita/llm/llama-3.1-405b-instruct.yaml +++ /dev/null @@ -1,41 +0,0 @@ -model: meta-llama/llama-3.1-405b-instruct -label: - zh_Hans: meta-llama/llama-3.1-405b-instruct - en_US: meta-llama/llama-3.1-405b-instruct -model_type: llm -features: - - agent-thought -model_properties: - mode: chat - context_size: 32768 -parameter_rules: - - name: temperature - use_template: temperature - min: 0 - max: 2 - default: 1 - - name: top_p - use_template: top_p - min: 0 - max: 1 - default: 1 - - name: max_tokens - use_template: max_tokens - min: 1 - max: 2048 - default: 512 - - name: frequency_penalty - use_template: frequency_penalty - min: -2 - max: 2 - default: 0 - - name: presence_penalty - use_template: presence_penalty - min: -2 - max: 2 - default: 0 -pricing: - input: '0.03' - output: '0.05' - unit: '0.0001' - currency: USD diff --git a/api/core/model_runtime/model_providers/novita/llm/llama-3.1-70b-instruct.yaml b/api/core/model_runtime/model_providers/novita/llm/llama-3.1-70b-instruct.yaml deleted file mode 100644 index 3a5c29c40fd7c7..00000000000000 --- a/api/core/model_runtime/model_providers/novita/llm/llama-3.1-70b-instruct.yaml +++ /dev/null @@ -1,41 +0,0 @@ -model: meta-llama/llama-3.1-70b-instruct -label: - zh_Hans: meta-llama/llama-3.1-70b-instruct - en_US: meta-llama/llama-3.1-70b-instruct -model_type: llm -features: - - agent-thought -model_properties: - mode: chat - context_size: 8192 -parameter_rules: - - name: temperature - use_template: temperature - min: 0 - max: 2 - default: 1 - - name: top_p - use_template: top_p - min: 0 - max: 1 - default: 1 - - name: max_tokens - use_template: max_tokens - min: 1 - max: 2048 - default: 512 - - name: frequency_penalty - use_template: frequency_penalty - min: -2 - max: 2 - default: 0 - - name: presence_penalty - use_template: presence_penalty - min: -2 - max: 2 - default: 0 -pricing: - input: '0.0055' - output: '0.0076' - unit: '0.0001' - currency: USD diff --git a/api/core/model_runtime/model_providers/novita/llm/llama-3.1-8b-instruct.yaml b/api/core/model_runtime/model_providers/novita/llm/llama-3.1-8b-instruct.yaml deleted file mode 100644 index e6ef772a3f0784..00000000000000 --- a/api/core/model_runtime/model_providers/novita/llm/llama-3.1-8b-instruct.yaml +++ /dev/null @@ -1,41 +0,0 @@ -model: meta-llama/llama-3.1-8b-instruct -label: - zh_Hans: meta-llama/llama-3.1-8b-instruct - en_US: meta-llama/llama-3.1-8b-instruct -model_type: llm -features: - - agent-thought -model_properties: - mode: chat - context_size: 8192 -parameter_rules: - - name: temperature - use_template: temperature - min: 0 - max: 2 - default: 1 - - name: top_p - use_template: top_p - min: 0 - max: 1 - default: 1 - - name: max_tokens - use_template: max_tokens - min: 1 - max: 2048 - default: 512 - - name: frequency_penalty - use_template: frequency_penalty - min: -2 - max: 2 - default: 0 - - name: presence_penalty - use_template: presence_penalty - min: -2 - max: 2 - default: 0 -pricing: - input: '0.001' - output: '0.001' - unit: '0.0001' - currency: USD diff --git a/api/core/model_runtime/model_providers/novita/llm/llm.py b/api/core/model_runtime/model_providers/novita/llm/llm.py deleted file mode 100644 index 23367ed1b4309e..00000000000000 --- a/api/core/model_runtime/model_providers/novita/llm/llm.py +++ /dev/null @@ -1,69 +0,0 @@ -from collections.abc import Generator -from typing import Optional, Union - -from core.model_runtime.entities.llm_entities import LLMResult -from core.model_runtime.entities.message_entities import PromptMessage, PromptMessageTool -from core.model_runtime.entities.model_entities import AIModelEntity -from core.model_runtime.model_providers.openai_api_compatible.llm.llm import OAIAPICompatLargeLanguageModel - - -class NovitaLargeLanguageModel(OAIAPICompatLargeLanguageModel): - def _update_endpoint_url(self, credentials: dict): - credentials["endpoint_url"] = "https://api.novita.ai/v3/openai" - credentials["extra_headers"] = {"X-Novita-Source": "dify.ai"} - return credentials - - def _invoke( - self, - model: str, - credentials: dict, - prompt_messages: list[PromptMessage], - model_parameters: dict, - tools: Optional[list[PromptMessageTool]] = None, - stop: Optional[list[str]] = None, - stream: bool = True, - user: Optional[str] = None, - ) -> Union[LLMResult, Generator]: - cred_with_endpoint = self._update_endpoint_url(credentials=credentials) - return super()._invoke(model, cred_with_endpoint, prompt_messages, model_parameters, tools, stop, stream, user) - - def validate_credentials(self, model: str, credentials: dict) -> None: - cred_with_endpoint = self._update_endpoint_url(credentials=credentials) - self._add_custom_parameters(credentials, model) - return super().validate_credentials(model, cred_with_endpoint) - - @classmethod - def _add_custom_parameters(cls, credentials: dict, model: str) -> None: - credentials["mode"] = "chat" - - def _generate( - self, - model: str, - credentials: dict, - prompt_messages: list[PromptMessage], - model_parameters: dict, - tools: Optional[list[PromptMessageTool]] = None, - stop: Optional[list[str]] = None, - stream: bool = True, - user: Optional[str] = None, - ) -> Union[LLMResult, Generator]: - cred_with_endpoint = self._update_endpoint_url(credentials=credentials) - return super()._generate( - model, cred_with_endpoint, prompt_messages, model_parameters, tools, stop, stream, user - ) - - def get_customizable_model_schema(self, model: str, credentials: dict) -> AIModelEntity: - cred_with_endpoint = self._update_endpoint_url(credentials=credentials) - - return super().get_customizable_model_schema(model, cred_with_endpoint) - - def get_num_tokens( - self, - model: str, - credentials: dict, - prompt_messages: list[PromptMessage], - tools: Optional[list[PromptMessageTool]] = None, - ) -> int: - cred_with_endpoint = self._update_endpoint_url(credentials=credentials) - - return super().get_num_tokens(model, cred_with_endpoint, prompt_messages, tools) diff --git a/api/core/model_runtime/model_providers/novita/llm/lzlv_70b.yaml b/api/core/model_runtime/model_providers/novita/llm/lzlv_70b.yaml deleted file mode 100644 index 0cc68a8c45082d..00000000000000 --- a/api/core/model_runtime/model_providers/novita/llm/lzlv_70b.yaml +++ /dev/null @@ -1,41 +0,0 @@ -model: lzlv_70b -label: - zh_Hans: lzlv_70b - en_US: lzlv_70b -model_type: llm -features: - - agent-thought -model_properties: - mode: chat - context_size: 4096 -parameter_rules: - - name: temperature - use_template: temperature - min: 0 - max: 2 - default: 1 - - name: top_p - use_template: top_p - min: 0 - max: 1 - default: 1 - - name: max_tokens - use_template: max_tokens - min: 1 - max: 2048 - default: 512 - - name: frequency_penalty - use_template: frequency_penalty - min: -2 - max: 2 - default: 0 - - name: presence_penalty - use_template: presence_penalty - min: -2 - max: 2 - default: 0 -pricing: - input: '0.0058' - output: '0.0078' - unit: '0.0001' - currency: USD diff --git a/api/core/model_runtime/model_providers/novita/llm/midnight-rose-70b.yaml b/api/core/model_runtime/model_providers/novita/llm/midnight-rose-70b.yaml deleted file mode 100644 index 19876bee17ab4d..00000000000000 --- a/api/core/model_runtime/model_providers/novita/llm/midnight-rose-70b.yaml +++ /dev/null @@ -1,41 +0,0 @@ -model: sophosympatheia/midnight-rose-70b -label: - zh_Hans: sophosympatheia/midnight-rose-70b - en_US: sophosympatheia/midnight-rose-70b -model_type: llm -features: - - agent-thought -model_properties: - mode: chat - context_size: 4096 -parameter_rules: - - name: temperature - use_template: temperature - min: 0 - max: 2 - default: 1 - - name: top_p - use_template: top_p - min: 0 - max: 1 - default: 1 - - name: max_tokens - use_template: max_tokens - min: 1 - max: 2048 - default: 512 - - name: frequency_penalty - use_template: frequency_penalty - min: -2 - max: 2 - default: 0 - - name: presence_penalty - use_template: presence_penalty - min: -2 - max: 2 - default: 0 -pricing: - input: '0.008' - output: '0.008' - unit: '0.0001' - currency: USD diff --git a/api/core/model_runtime/model_providers/novita/llm/mistral-7b-instruct.yaml b/api/core/model_runtime/model_providers/novita/llm/mistral-7b-instruct.yaml deleted file mode 100644 index 6fba47bcf09731..00000000000000 --- a/api/core/model_runtime/model_providers/novita/llm/mistral-7b-instruct.yaml +++ /dev/null @@ -1,41 +0,0 @@ -model: mistralai/mistral-7b-instruct -label: - zh_Hans: mistralai/mistral-7b-instruct - en_US: mistralai/mistral-7b-instruct -model_type: llm -features: - - agent-thought -model_properties: - mode: chat - context_size: 32768 -parameter_rules: - - name: temperature - use_template: temperature - min: 0 - max: 2 - default: 1 - - name: top_p - use_template: top_p - min: 0 - max: 1 - default: 1 - - name: max_tokens - use_template: max_tokens - min: 1 - max: 2048 - default: 512 - - name: frequency_penalty - use_template: frequency_penalty - min: -2 - max: 2 - default: 0 - - name: presence_penalty - use_template: presence_penalty - min: -2 - max: 2 - default: 0 -pricing: - input: '0.00059' - output: '0.00059' - unit: '0.0001' - currency: USD diff --git a/api/core/model_runtime/model_providers/novita/llm/mythomax-l2-13b.yaml b/api/core/model_runtime/model_providers/novita/llm/mythomax-l2-13b.yaml deleted file mode 100644 index 7e4ac3ffe02d0f..00000000000000 --- a/api/core/model_runtime/model_providers/novita/llm/mythomax-l2-13b.yaml +++ /dev/null @@ -1,41 +0,0 @@ -model: gryphe/mythomax-l2-13b -label: - zh_Hans: gryphe/mythomax-l2-13b - en_US: gryphe/mythomax-l2-13b -model_type: llm -features: - - agent-thought -model_properties: - mode: chat - context_size: 4096 -parameter_rules: - - name: temperature - use_template: temperature - min: 0 - max: 2 - default: 1 - - name: top_p - use_template: top_p - min: 0 - max: 1 - default: 1 - - name: max_tokens - use_template: max_tokens - min: 1 - max: 2048 - default: 512 - - name: frequency_penalty - use_template: frequency_penalty - min: -2 - max: 2 - default: 0 - - name: presence_penalty - use_template: presence_penalty - min: -2 - max: 2 - default: 0 -pricing: - input: '0.00119' - output: '0.00119' - unit: '0.0001' - currency: USD diff --git a/api/core/model_runtime/model_providers/novita/llm/nous-hermes-llama2-13b.yaml b/api/core/model_runtime/model_providers/novita/llm/nous-hermes-llama2-13b.yaml deleted file mode 100644 index 75671c414c7424..00000000000000 --- a/api/core/model_runtime/model_providers/novita/llm/nous-hermes-llama2-13b.yaml +++ /dev/null @@ -1,41 +0,0 @@ -model: nousresearch/nous-hermes-llama2-13b -label: - zh_Hans: nousresearch/nous-hermes-llama2-13b - en_US: nousresearch/nous-hermes-llama2-13b -model_type: llm -features: - - agent-thought -model_properties: - mode: chat - context_size: 4096 -parameter_rules: - - name: temperature - use_template: temperature - min: 0 - max: 2 - default: 1 - - name: top_p - use_template: top_p - min: 0 - max: 1 - default: 1 - - name: max_tokens - use_template: max_tokens - min: 1 - max: 2048 - default: 512 - - name: frequency_penalty - use_template: frequency_penalty - min: -2 - max: 2 - default: 0 - - name: presence_penalty - use_template: presence_penalty - min: -2 - max: 2 - default: 0 -pricing: - input: '0.0017' - output: '0.0017' - unit: '0.0001' - currency: USD diff --git a/api/core/model_runtime/model_providers/novita/llm/openhermes-2.5-mistral-7b.yaml b/api/core/model_runtime/model_providers/novita/llm/openhermes-2.5-mistral-7b.yaml deleted file mode 100644 index 8b0deba4f7889e..00000000000000 --- a/api/core/model_runtime/model_providers/novita/llm/openhermes-2.5-mistral-7b.yaml +++ /dev/null @@ -1,41 +0,0 @@ -model: teknium/openhermes-2.5-mistral-7b -label: - zh_Hans: teknium/openhermes-2.5-mistral-7b - en_US: teknium/openhermes-2.5-mistral-7b -model_type: llm -features: - - agent-thought -model_properties: - mode: chat - context_size: 4096 -parameter_rules: - - name: temperature - use_template: temperature - min: 0 - max: 2 - default: 1 - - name: top_p - use_template: top_p - min: 0 - max: 1 - default: 1 - - name: max_tokens - use_template: max_tokens - min: 1 - max: 2048 - default: 512 - - name: frequency_penalty - use_template: frequency_penalty - min: -2 - max: 2 - default: 0 - - name: presence_penalty - use_template: presence_penalty - min: -2 - max: 2 - default: 0 -pricing: - input: '0.0017' - output: '0.0017' - unit: '0.0001' - currency: USD diff --git a/api/core/model_runtime/model_providers/novita/llm/wizardlm-2-8x22b.yaml b/api/core/model_runtime/model_providers/novita/llm/wizardlm-2-8x22b.yaml deleted file mode 100644 index ef42568e8f627c..00000000000000 --- a/api/core/model_runtime/model_providers/novita/llm/wizardlm-2-8x22b.yaml +++ /dev/null @@ -1,41 +0,0 @@ -model: microsoft/wizardlm-2-8x22b -label: - zh_Hans: microsoft/wizardlm-2-8x22b - en_US: microsoft/wizardlm-2-8x22b -model_type: llm -features: - - agent-thought -model_properties: - mode: chat - context_size: 65535 -parameter_rules: - - name: temperature - use_template: temperature - min: 0 - max: 2 - default: 1 - - name: top_p - use_template: top_p - min: 0 - max: 1 - default: 1 - - name: max_tokens - use_template: max_tokens - min: 1 - max: 2048 - default: 512 - - name: frequency_penalty - use_template: frequency_penalty - min: -2 - max: 2 - default: 0 - - name: presence_penalty - use_template: presence_penalty - min: -2 - max: 2 - default: 0 -pricing: - input: '0.0064' - output: '0.0064' - unit: '0.0001' - currency: USD diff --git a/api/core/model_runtime/model_providers/novita/novita.py b/api/core/model_runtime/model_providers/novita/novita.py deleted file mode 100644 index 76a75b01e27e01..00000000000000 --- a/api/core/model_runtime/model_providers/novita/novita.py +++ /dev/null @@ -1,28 +0,0 @@ -import logging - -from core.model_runtime.entities.model_entities import ModelType -from core.model_runtime.errors.validate import CredentialsValidateFailedError -from core.model_runtime.model_providers.__base.model_provider import ModelProvider - -logger = logging.getLogger(__name__) - - -class NovitaProvider(ModelProvider): - def validate_provider_credentials(self, credentials: dict) -> None: - """ - Validate provider credentials - if validate failed, raise exception - - :param credentials: provider credentials, credentials form defined in `provider_credential_schema`. - """ - try: - model_instance = self.get_model_instance(ModelType.LLM) - - # Use `meta-llama/llama-3-8b-instruct` model for validate, - # no matter what model you pass in, text completion model or chat model - model_instance.validate_credentials(model="meta-llama/llama-3-8b-instruct", credentials=credentials) - except CredentialsValidateFailedError as ex: - raise ex - except Exception as ex: - logger.exception(f"{self.get_provider_schema().provider} credentials validate failed") - raise ex diff --git a/api/core/model_runtime/model_providers/novita/novita.yaml b/api/core/model_runtime/model_providers/novita/novita.yaml deleted file mode 100644 index f6341979891e65..00000000000000 --- a/api/core/model_runtime/model_providers/novita/novita.yaml +++ /dev/null @@ -1,31 +0,0 @@ -provider: novita -label: - en_US: novita.ai -description: - en_US: An LLM API that matches various application scenarios with high cost-effectiveness. - zh_Hans: 适配多种海外应用场景的高性价比 LLM API -icon_small: - en_US: icon_s_en.svg -icon_large: - en_US: icon_l_en.svg -background: "#eadeff" -help: - title: - en_US: Get your API key from novita.ai - zh_Hans: 从 novita.ai 获取 API Key - url: - en_US: https://novita.ai/settings#key-management?utm_source=dify&utm_medium=ch&utm_campaign=api -supported_model_types: - - llm -configurate_methods: - - predefined-model -provider_credential_schema: - credential_form_schemas: - - variable: api_key - required: true - label: - en_US: API Key - type: secret-input - placeholder: - zh_Hans: 在此输入您的 API Key - en_US: Enter your API Key diff --git a/api/core/model_runtime/model_providers/nvidia/__init__.py b/api/core/model_runtime/model_providers/nvidia/__init__.py deleted file mode 100644 index e69de29bb2d1d6..00000000000000 diff --git a/api/core/model_runtime/model_providers/nvidia/_assets/icon_l_en.png b/api/core/model_runtime/model_providers/nvidia/_assets/icon_l_en.png deleted file mode 100644 index 5a7f42e61792b7b8be0529f6ae0ad4f3ba5fa7f9..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 112528 zcmeEt2U`?bw=S_!Nh$+KR%k>7LCK*3BhY{_B2ls=k<{cIT2w~Kg@8!<96%%^NKTCi zZA%gua%zI)&;-fh?!uY#UC%l9{({e^9<0J$d#}CLyWSOc`Qmp?75Y=`r^v|2=+UaT zwaLhs{vacxtT=%HzfnYSz9%Cu*GJ#Jp>vLHu|cbKZ0kn<>ekHO{@@!aW=6-?X)noE zL$J*!Kk4N)H=-{woG!PMyeOhNSvG{sxHkRQ6;_^0ci&vQ;4)}8^8Oo+87dAcVabO2E* zOb-%hR+iFWvHtzXUtRs*5C3(S_v!3^-T(V0w}|TB5C7cZE(??YbN}zZi%$J#TC)Er z0RZAZO8Acwe)7V9knkTQ{09j?HSixK{09mDLBfA&;Q!+e7vU_n{k!K(SMJiyAI3-D z?nk69f0r1__uLEG%%KFiC+SIN)BimaS#a(k6-hT>-IQuQX-K+YhmJ;wW$*XLTSm; z-297{p;W)wIuZ8>Kca#{XW!nnpgX{aFF)D zFX27XPen#LJ94Bdtt(yHZCaAlJZqkwl#`USq${o9m(9W6GfSP`p}4(%piv{NxI3D< zNUi10Y|FDWz2s1sviW97a)?9zfb@E)Z7_E;r}n?kAKW(#lq}SqV(&AZHJCKCDW&e` zR#u+xC7fO>J7oW{&%rr-b9(Xl;pi-NiL+xFN8t;a(Qc+5rU2TlBMWDf)McW9oVSlV z#z9_Y-aI{|gh5K)U`dxIw;YrUB<(g&%3oG2c+5nG8IjrYrb(^)Ydn)7%GWlXr_jmC zv%92J+D1D3-q3)4#N&3;t9v=Jd+p&O&5r{%+7AnMtLMW^nhv7efyx~J8n>RqCvl~E z7>>#D;ZN0SLk5A$3O{*VPW5ioCet6@-`Wnaj&cpWy_d?m1ItCcQ28_pSs(&qT^nt~_O?GivyKh(h z_l7#(TF-s+EK{2Q{iyn2>5=o?LK`veM^e^a?tEd+QD@(FoQiMvR?XW3m&ya>y|zsb zuAI%>gZV4JP71^~1M?c>yUq9wiK_!~-*~(q2=sVA=sfhc@}qc;to_EHXY#(T)Rc?r z2ALlC0EyFs+YvazR=fRo1QGN2Z+Ef{L=}Ym*S0>IQYvw*Zs(B06bHcWk+xoNc>f49g=s zW0n?sugP^SE`hx-NDa~dVzY{zyJR?_Y5k6^GD`JwhjK1Ecl8p!j=)Nz=GoBQmuE<4 zQSN4}IkY{tzMYAoLqh}ZR=xR_?pE^E9^sJ^ety491|CRqCBROG{m2HIebF{rs=HVj}bND+*O6NJ`G}GmfA4zk|F!F1=ESKv0F=(R3dqZq)%t}m zpYs^X8=B6$_Sm!O{h6_@*@q-{U%(qWtT`PUf4W6^5feqdjD|FY)soGE;4)|ArEKucU7a@gH(ZZ5bj9oVN=D4B+YdB4bc2i!Ttwyk>KY@Q(mZOa zY6n)8oq;*Fms|fAZ#AA#+@1?`FkSBVba!ZtR5!&&HG#? z$|}_l+u6x$BzP*zMRm;HD~>Yr$c*LpEJZ}>mrKi!&PM!qx5IgvGm#nJYDW!^7SHv+ zPcMno#g}<_yEnbJArsA|(b}@@Nz*<0Af!mj4t!|3y#0QNgR9V~Q0jloQ*OaTRuh@x zS3}tmkS#wmo2u+3C2z|0`h#40Sc3<#A#Fh`S1JE6KtI{1qBbS7+bt{2X>)mg8pIG^ ziK=tIY!RcIL`L+OUff}DUf2+;S{YCu8JK-u_YhGe$2ijWY~Hd5Up29qK4M(!x4hBa zv-f7?*Y{x+Jz^ru8y?UqS4vyCozB$5RWd4>!&WF%G7`+~t)->IldHIi)%sDZo-*7^ zxF#>VUFa^Dzb6 zM9Yy6Tq${)dICvYbiYghGtk$IWCmD9g~Os-K7jl78khwpZ8|9DZb zjk7%XD=*g4vsi9mw)qz!z(d7F2Z~_*FO_(CQjo}x68Ut$nMHSxiZgw=olU_BO(9#G zmGf>rw~p3UJ;yeTSM0O=l4=VFfmiiDa=-S=WW*c*-ZB3Lg*KpI^Cf_{w0y-qp{R{<=@s|K?aOQ)Ih`MVc^NzyMBF7K-A#u5 z$9TTro>x?-iJPZaN#}{+zU(m{XR{pcd~St3v%Na0l*jNSb{mtw?g3E#8<6;!O%6{k zDXxEC<}RsDt^I2xeYA(!4S|!K#%}$P?pe{VKUzqerK#J0t{IVe-_5h*JW1K7ed^apDOyG=G;K&xZf z{|lkxJSO3&*tVFOlA;On7eD2LN8;WdfT~Ts0v8SxbPqg#3?1!u9YbgS>K<*DC62 zOlrpa#yDvtH7Cn94VgYsTYz4hd9ia{Fo+VG<<4kn3iOj&exHipkcq^C!4 z^P!}F?~%4mK#D2 zlx%L{7C$izKcQa4UD>86vm~eOkL2p-C%A4X*4z^dU})rIK@w6|ZxXC3={dkFR)Vh^ zlM6kgYi+^EXI#s!Or8$~oIDe_`kzq?gH4;0)50@bRV6AevcbfTEw+8Jz?}{H)Os1qz;kVrc0b?0Vy+2Uoo$`C&N-V@ z%@y>lW4w^0zs-4g>iD+kK4iCk50=nomAA$SLSeLU4M3-U{epyyA$S6L-#wZ_F= z;_yY`IexU5OM7zp$)w(ZZ$tRdB%asOucdu6V9g~ex3cw^Gea!XLFSao3zeRSNo1r0%NXh^=KE=?EakqXf__56i-+O|I(Zw^#mkHY9 zVSPk8G-Pe32U(%Fv+23lG1K&VmfT~CYVW>0(uDRYZS%^QoevY~4-Z79L)q3F9|cZz zg4e}Nl-;23)|wE~Y>bISg~In$(GL~Tr+xD1f0+*nQwIf0tG?^l8W*g10HLl5pf9%c zRG9wM*t?csN=&d{8ST&q${`m@py)D78c#LYf_}Cvg)vrwzE|3T1>=DQxwRLh=93MJ}F)HA`q2sPVl_V7EGm)uFEmorfzR=f1Te`+zCE+Y8v3!a$V z)>wWZS*UvPD+SgLTo$lI9N2H|{02S41FJxrB45rnOV4N|dO1vd+OZ*C+Jrbfz^utB z<1&ST%~R~&29*1Dl?p|UuN=BkL6`Qqr-FhO8s_1_IrgiFnba|Ne9q!01e7a9#%aWd zArmi_13>tFnk32Ot@U_X2$fCTub|Mkya8*>u84i?B0+nTO9qh!Ar18aPBtt0OqVRi zHhpqgRo25=8AAjjfDS2ttVFKcZkMEu&JGVWya-EYW#ANr{)0g&GS6U7cle%pP4HdD~3MjoC>`<#H>@fB$n1XLr8I%O7(+-hS~yVyeG5{B%y z0{%Sp7IT%MS#a4^99Xh~p>N{B@CA4?+CRiNFA(Bx6U(l_1#&tZ6Znm-Y_?p=>0 z6oYu=TA%ERrKAg7jAT=KLdn0=3~0^@)W@SN4nC9ofy-yScCYOt831UYOpBuPhNja1 zWCN*@S(B0=2q``fTfH!hG^|q$$#n^~+U1N4<0sT^NuchD8IB4NK33|21#?oL3XCJY zhOj~yK`9GB#t@S6*#YJyGIlC%n{AjET`UYNGi+a2JJ`s*E*;6knqrPYaSIV@frB6! zpO~S(QndTDP#><#Og@_=-cCLb&ye4lY~^DJo97+=P=-xlQp%q*Vbax~(+eu!HM7UellV;(Zrir+M(1jM3ez+s#p zkK9Q$FK2q*As=Wp3RV+#vE`hp{|S+$PD&Dn_=f zCO~uLY(Wvu2^tBFBn7a@&a@3|6A_pLt+Hi*3!M-s3KoJzh}*7#MerNsyCzCNJ_zwL z&q5OZ0_jAhN|p(+D3LkRY_mx$KBLb&GC@E#$4**$O^ID6@Gg99KD8Y; z^rk#Kd%XW3WTK{#i=ZiQqDWMO#qs))IHeekNckc|czmV_8t=ZH^End@d->;aZ3Yk>lmZt*<{!M1#T*7!Uh<-;A>8z0< zOfFAh2?o{yDvD770-DW)Wmm~Sp87~GXwLNlkU>#pLP;HRUOqvv+WD;jvSJ?rys42Z zFpl1~@EUl2xBrj3700B$H^~oa6eo=PqoHhRA@rITOz5n2v#$~xw8GPG(Lsx7z}m&P zlRpgVIhX{!2=cXz6?_^P6*%^|nHCXc97Z#lMEO1ey~QMK7nY zssb1s6M{R<`1u;{Y7Dj11j->6ygW7fx)F#}99zT#^3cS`;*eFM{1QcIN?m7t?Lk;U zGTXiYCodsY8h}x~bvJ!c&c1ZLIVDv-Qj@FlI;{T4V<-X_Iecq7$x6!d<~fv2y@{Tm z77)Hwc)-l)*UH4DBqG;9+>XIBz+|OSyY%c(VD#WVkJo6UQ;DZd=b#JbPuK|H@@upt zJ*+t=J(0eBNu(htRwKSm<~W;df~Hcru+DKBk!{-*@W}4R{JrM%PEGvqKo9ukedm(b z30CZ_lS>u%A*;u2YLKSZc}s4e)TwVf?aLCo0+KGK4~0Giy`+!$EUWv1_a>TIKJ(MUMtlb zujY@n6#q#9mGWeTpK>t@iZ9hk_lAYR1`-3uB7D&A2<-l%hq*|C&YKwI{pq_2{VN-^ z2mR%Woixpzdl9A~cmNz9veB~Uy^aS<%#%d^$ysTHQv$4lEeDX}4KUR)6*jI+My>iT z@)0gapixspCmAzHeT++HTvup{ATx(&$b9yTd5f&jQ8!8A&nQ( zW{c=7v)Q{PW@}iNjkIu6V0$Y@kUJj|9oJ1I=}aGzoYId3!OQ5n-Y0Mb1JdpL zc3|C#w95Vx&H8Ug@+CuH$Fj;DAWJGvNO8%dZI|oKZVWHo*4jSC+msWi0tln7rw{q1 zh>R|0ZTK1U)j^rZ=KTh_^O(jW&dSxH@uJUp1fk0djyE(;ECSq`>`WQw=3-36^nIRh%N|y1~l?s{t55=;itqoc-zp|8qQ_;AjUzoownK08#_& zC~6~q^t-R&$i&lGj_zGmVgCM{wLFEN()klPVM3)#4oFr9crs&1pJCsZgR72{$LTR) zbW6+S;%HP_(*=kI?__*AQ89Fq08NR|jGrJ(ZBreC8R^$dOA_%v?cAgUJCVk7AO9$1 z_u`A!3CKh_pxKi~K`-LUgF16V89|;DPZdnHb17FA~OE#Z3Fr=Hv>p$3T zN*7jzoQbVIL!>v|(%Y3T7%6~gcx%v9gDad)c`GRfN2zZXv~9By+;^lnBkQA)d7sMF zFXC;6Mc{#(dABepOhZAUy?uHaR=M!qbx4@Tp2V;ik4$8TrSa{iSt@_nTVFA$S;&Fd zQy23X-MF!*;LxjxC{d=_E_P`?Z2U7WEF>U&bKP28Xy1&lMA;4D_F7U7XR_I-=r#O; zEy(#JD?MsLv)6Hhxw@!PpU&x1yM_CZOhr>dMD++!2vOb|?!;lc$jLG?C4`V?VO{bc za`99Z>j4!ly{H@rEc7MqSLL+foqY$zHq!7^9j#J?^n~=PTmQuYd8vmq4AB}1GsJq; zvyrbf;@7RU&LP#uVB+NVk7}|fu^We&eR6&X1KDa$=A3|9F(+r3?ZjgcH$grZAKfi2 z^N4ew$}}ntfNTg8+rY4Rwd$m_k$X2TvWaake`vOT{Q6PLY!KUit;_R<1p32vnW~{j z*@;cqx`*!n66FXOm|dY0KOB|WmY)o-m(DXEQ8xQ9LcEyH1*%g+Fb#u)S!GuwC~Ei# zxBZ4(3UK$FN6iCmFo@$|Z1o22A%Hy7X7zD$@L01SDHKvzsr8Npn0*-s8U?Opq6{Ds z*<!r_wMd6y5q0p=SH$l~Y-#3msP*VtP!1F`Gwkkw(!c>1a@H4QJkcD%cA*|!5 z8rI{o1SP?Mt3kF9*imdj7tmtNHvEKJvmYZg*%3;WG5`gkpW3mGd_zT!oB>Yfh9!in zP7di8xOamz<0DO8zoS!f%-+ly8O+r6^m=LYb8IR$HiqS$pkcI7G23lpT>hmufZJ&Z zjDBTB@W#6wveZ`{o}C#zyi>@7I6AclGK)l;pWfW%a(?v513hZE^2cfNOETYOhpO^9p zu=5WjA=1RFEI3E5iuWrrIGtk|DXM6pH}ef&s>eK)N1gC2EV?TGbyPC+7Us*<$(1}D zm)Gu)2ZymzYrj-v0S}^2RHQ1Bo^Po_DM1WL?%;&~|+OJ7#gk$&Gcu^h@y=TxLy=>R01UK_eMjyZTW7S^>VSjqZ* zY!JtHiU$nHh|w9i&s0Se@KgkHnmA()CjMNo<*^wOKWhFV8jw8#p)R#6_L>p%o$J2Q z*s|36bJqA1<>BSQ^2elQh z1$5L{XDmRH2U)H7icIqxMoxb~@PL%my95-4m_cO$`h5;gF*R4E5gQCw8CwARP;*0> zJP<{c+q&Uw`NQ%`7v=c)&`Cd)x<2AcYim(OlGd`l#mKWV2ICUHPJyR@5uJiSH)t_>Q28CViV|<+L?Dw=m?ON8T1$Fv(8cuNRpdpBtpc!{t~IN-=I=wLeD)D~ z>aNa9&3X=819HNHaw3COG7I&oZh4}wf3Mazc+F<*+RDZy<=WpG9Mdhet8Q%Xp5IU! z7FM8O8)1Jyg2T=0VcW*e6MN9qDLqT8ciRlk^a}gU zRiPMHVy$H5)Brv>(;x-AzhiTva2C;^hIOUsJ1nxojElh)f~%(l!zU>;^g>Dqy;<~w z12rAlsqPPwYmz^Dd0;ju+<^;mrMIb}wtJgW-_)8=SRd`298-oG{t8?Lnw5CohcENMIs$ z@Eba9HoSlcF3e7^6zrmf1K_?&B5EvBnwU1OG%oB>m$O6zf*$7h}nl(Atw{BxAqYQ zO{F%*VfGzz&H2!?M}+nX*Dq`f2y z@Uh75NixnCQK0jl{6`!B?~Tom_lh@`Kv&z!HUBfmYKXUVdlM^|0Ce%f_)iKDmDC`O zkA~^`D)C#=}e%#Jz>!Y)(uN;1(=N*mTYN zR4Kz6Bv@%HU&N*b0Y_WC27(A5GImnD|2Rv!*e$-Q0Y+8)4&HDbG*)A=2SX*rxF@2#wE)V1r_DGiniXlw-QS@3e zw>SAfer0ThF+fVBp=KR5;9yJf!{e+#qb$J!45X^d2b+s1@ckU2 z_~B5}Q*DhtWz|3Ed3o~YnIs#V8rP|~*Kqv|uBiqLogTa_k9u9N%_5E*nsbqXJt96t zSLU{u>~Tj4n!}JC@}M|oCd%Qr#)d7gg5p{#YUPtHz++b)vNEC9Wa^}{eQ4)kOrr`x z?CLhunbq7W*%)qbQ!%Zc4l9usT4W2MbRm9GR~bI^4ssnt=+g@sm&)vZ9>7;ALw*}& zk|NDbAocG2l8ecM*HLeP!MIdMeMBKJnMF#13OCt|Bfz=N8yXrMt zo1(r4SG|Qm4+IkC51}Tk6$b-TTgmn_)?;nnY1osE$DjcTP zEIOHm^s62o-1Af`eubjeN*s^y8n)}JqBXg;?7512SJGRd0o9iuUaBVXjC#KqGv491 z4Nr$#UB~7IHAUZK_O^A6zH5T|YTvdeVMB&43dhQyXw!kaOqiEU zR*eV=w}wMBk9%ha8pCZT$MS||?>Cya$n+OKEWe_pw4#jOpSZ_Dn#^)0?M)Knwk?2o zjHR7F>q7-rt)}~{n){`~UPd3-?D@MV4P`iaVy)ife@aKjj$br}d+_M^&{BhEZmUkl zaMRd=J1P!oW=}os@)k+>oEV2ZYS~rK8{Xj;;HY)rY*T;Nm_S$XarUy1vFxI2Polj; zg4F|*C8t8;tl1*BgOS(UE0g9etxiTDTfbY}nj-7mxUwG^RuK5{ZX+>H2C(w6`f44O z_LaOZkG_Z^xR%3$eZT6j0oi#zwQ@CnsRUCfvIRf6T1aw84m;w7p9YjZ)gUWv?Ma>v zpftKv0tx#~K9X?^iCwAaZXw+yWN;iLvgw1B-WZO56^~lQ1EkQfaY=-miZeJF1F?Y8 zDu&`qrUTSK4`?IPb8-Z<2>%3yqU_pb)Sds5>q_cNuZaX}*`aW78ubo84r!FEzy|$E zfbm&8pWR3rHI!+vV|;(a4Y~~qey4hgfNGem^&sn|k5SuiyG7TE6WK)LGzvH(3fK|) zA|h4uMtW7Q2lt#nIQ6UPRL6I!hc=8ARDX-dUYUH<0?s*&i|J~YWaKf;b37mHl2_Gg z&VpcftgFSlT21n+I{RXW8u95`7UG((G@=?wNMlig#Diy){ITQfKfxw!ChpCb8t!GD z4&D=%UWcZVlJM1lSH9*UTe^%y+=rL%2rz~uuqhae8P@$F4YQ+ol7K|bK%%d69Gnsi z?FO9rTnGZPvW>WJQ6ATTJl*)K7CG{Y`gTnkdZX?NA^oYG@m5DZ{}Vrg(*1;b?Mp7&*j&-IO$S_n47SQRy3!R;&PcWsRr04)dj z)CjpVR!8HEzu`@z2u{b5-A>$N7tnQ;p%&c#NcO2J4CQyO*1_0aY%^BlxqXqf)%xNo zqd-s?#lI-^d-g2xUh!X?!P0M5n>{!Jh97nFCV|4|PO(OE>7Y-RvuoUlCYLO86hLvY zbT~|ENFUDj=0h18X@cwONr&dgfGao|@hOl-EKIDi<9a{+a4qOf1IMaW4J8N2cB~%c ze+Gcx>Lo61gLARluj8z8QV!236;*s{5%vYeKY{>BMIK^onx|blXsRNde^pX?>^y`{Q8b!NKma)dJDa&<;Gbc5qxdFleP#-4zahYtoSYW zCpRRhiQR>RfsHY&8_?yeA)^`R@&+eikVd)>ryzQ{HtQjM;2p?{w^dCn4g{;cMb9-E z?}4knpqQ@eeguf=;l~{#Zi^OBQvoJQiQ~g_a7NXGj}P9te4NviI+&>8;J?3^*Q4k< zXf49w|fZ>)5fWw5kq5CL3%yp}%X70glM#!+xH6k?v(zhvs`A zpjRNfr?L(B6Y0t)kLx|i*#@EXB80@?0E66)q|>Sn?KyFGG^&7;kQzhuyH8rq8%rfM z4_(Uj&-vE_*bG1V*c9F-0X$y*JD1l^|} zr+pUabHvDMO3FO=TRXMmb}}4$lbnl?T^XASU?+=h5r5E#D>~%{5sKK-qG=j00kse$ zyvZ3p$&)l;6}hi(920 zj|vpo?;71gJKb#cWgJJ8ljx)(svUiY*wf-Q5=*8JHmkynrG(`?V(!z_-TzxG1xKZ1 z?12k|=a-LFI$|c=p^d;g?;3#7T(V)H5dwZFc+01Tmrsg=H^g4(nVUkxLF?Y~AYHRU zC&gJkO!0hsi>94h1$3d=MO(>l}`Gz$zy^JpcoBq8r0gz)5+~@KVCD zP47?TS^Bdqj1_nKgoSvqaeGq+yp0f1-@@C{b3(W}hVE zOMMtK^|riwb|U6(qZ4b|4~jmuL+sBOZC7?yfKe#9#wjEM!NaY$O@hA>A-s~YEX`N? z;euD#m&#&Kd1(s-t1Jg!6#&f)b96NGJftuCDj~;s^gv7j94sO_niU}ehQ16811-(d zp+v@2UgL}4DRxQ)9QAI=B@3X7GuqU|uJt;SK3b4o#^{Hq3#f8GPH$;$cxTbsWSj4j zh$NUGazFSEFX8(fH^wbGLpzKoEg?Y?tx5&uivN13N!`OrgG82jmSy=d*BGgU$o9Ed3AC@WWyp% z5hcFRXp0(~;b&T2WUT=09H)>Lmz)OBhXd;XcFJg6)k9J6MBBLvDq?w}LC^ynfGz{Y z=+9hwQBdm>o43SaEf_BBu`Z(uzNU%#jDo*tuxe8)wgM&7*P)e-mlhkRIQe~M{aKR2 zMMf3+-P<1rT|v8oL($nxgs6$Jds9_}BB~Xb3%_kicq(r{x{~bqw&i(ewhXft?AGA2 z((+*$eW^l3C9-&#iw9m8Q(%a-g7QBBh+_nQ_HhSdHyhddS#8a;j?xq=uq9O@rxo@O zQe=td_rpzi#lH?jT+xQ$^-{_wBqta*Kcy3gn))ne;?QC&M{4Yl73qIny$E2lf+g?JJ(nLyj%FY_HwgkAkN6P(V1f926ws z`v9AZP2)qWWe7uOv(uxzmWSKe$TjceL~`M5h^gA<>u;#{&!M{x!z3u%K720Tq~Q2c zWN|Z|KIIA>U#3n6#UXXYpZN*SR${_0VoKn{YH&$BV4xA9VIWhCdfap#b4`2cHU0ic z5*5_-h(X(z4$jKZmqS-^K{TcacXBOoLZ6XgNH!Id(dmYara45QwRx>$UR~m<)ri3M zTytgD_CV1MbA}BMmP9+Mre_qWTDG2_+hQOe)q1hDGTheaSZnM>Bp>b{_#~3?YM#)| zxKYm9uS$H(8w=#wO~IXpW!mh~A-WR5ch5Ss2h3o+cceohlAr4sR^KFwU%Of&k58(0 zGwy*!gDzVupD?a=ldgXxT2z*vkgY1%n=;uuZtKHW#C@z7Bs%@OBqJH&4Skb5kpbP_FR0zsh#W{ zpRTr*+X)BZ2usVYll!N$!(V#PS@S}s27E;*8<~Dx z5Fp*k5X%+n`Xi&3DUqs%r@#N6`F;GwNENm00&0IJZ${da!{8K~eTd=~+=^qTXTr8% zqyF`ggR1O&Ph>jQ2ASDroUw5ZKI(;Z(Cb(s?0q1UcIo$+j5RxI(`QIQmtxhNicJau zN?XdT3%aT&=w7Kgobqs3OzmKdzMN-(|NSDb-BaEvLbS6GMjzp1%UJGpWg>_#^9!sM zjGhYi@B}Lv3|mXBj5Br)c0fJZ{_|L&VB27iOZkV)-X~r^*O9j&-?|`pr5GouI#)zL zd#5M*hFTpLtPf)wax=2glMXOmdalxY(?B|k;=z?TUnKekbVs|%{WqB$DqBiha+(Wq zWA~|~;$J$vWRo(Dn_)wuvwt(c_waEc&EyBe8^~rS>Nt!E)6UP*D`IWa^4yE&`7V$&etB$3}OJ>*-wK&zMU31L){$Hu*N{8?|u+ zxWBw)jLhC2rO#7=UR^Ro#*Jjw*5te%&pRh8j~iFDX5ky7!$x=*N}2lbj@^KCY8oA z*#;vr2EUz>Ak*R#q<<6BHOZiJg)L)h2n!EfI!(NPovVleysE3FP(+2kT;Ft^sfZb^ z{m?2}M{g-0*^tgZRaF`zqrI~)_Nnc5y9|kp#dD@&;w#e*1C;T=Ln0tv^UlEx33fdY!xi8!8vS>3HAcmEi$g< zEiMx$j^n!Ih`_qRE=6QP@{eekivWirJBozI2nvc&s^PxF0u8UJYJZ=?YQY?+_LL4% z?pX_DAEM2R#Nv5{Z(dHja;@0mlZ1Wd6Hc7`D^cC;+Yu*GJUs(HURAQcCk4>toYMg=Ym- z6xqR4iI73qZ77^Hrb+Hfv2@v@aGLj_slN$j%xq~BYfhLs2w_Z&cm#MeuY-Te384)x z#fihd11|P#7NRy$_0`1?)lzFI2+BM88XzMCR}X&~hF1Gi7{i7UgvU9?Vo{KePd<(l zcyIKbE=402Ef0`pB#()Y%=})zNQZ3{L(F{vaEwunLIEY|Q07QTTZy-Uj3(orb z$=RFWKAoSjh8mqylR%!2qf~`*L_#voEG%Z#)m~RQDW$S~{#&p_6?aq&GgEW-aO`AP zk8ItuSFRp+*kg3=9?=`Hh!2m9;^{%$yrWjJj0*B1db2#k%>Z_rV?B41-H071Jf~ob zOrPBF6(Wj!OeKda|2icE&3k9Kd$Vt!$|SymdN@Vq@yrHX^2GY zCfw*UHS*{1aI6&`s7#8$J%;sio0xmeb$R(qUMgxq&d8fRTlu$zb2qimTD?eSqNH4+ z?kyN>b;zJva2TH}psrD(8r7PrR)sP)UP|yrTsijLF+tq-OIPXu&(34-sH**$Kqem3 zHNFyTq2u@uu|fZ0+NOokUH=Aa8Iv(DdrpQ6t=(fC{ZU_!`SoTN(;Pjvp87$wa>vyo zMV3V1FewT?P6kOc396Fs3RL+ZXzH}NPxhBS;9Mq^*pQgBdG_RKVf>>EFm-3%H$MSe zqNj+$;CYw%>3%Grd_KV!(-h7()k1F^ThdPKOvP_9G+@|L6zba_NL_vIV*xgv$n9l2 zixyQ;DUnTIh)6ZTRsBBEQRODQZ1^Qa6~O5SZcQUzVDO)Hb8IgRRk^hS8{$NMLIk-K zK^goVck`9|7$qY5v5NTbp&1#8A+H#es1e5zIJ4~nAeD#k_C*;e0A>6rr6`F(>gpoU z9UyoJ%6qM!`$ldRql_0ZH?e!IG}r#&&)m3tuz0|~7a!H))4us*%25AQD}wv)@8`Nk zy%mZmUPnSsr?WjdDO;7byISu6qA=$G%78K?cEyQ&I5co0yNorQFfx0W9XLov&e!vE zvU-~?#(q4~R25v0ZnD#3dCpXb{RY%-+4LsX`>Y+EVDGi384~Yl;-kje0+y)d26q#m zyt;1BLL!l_W<9+wO%eiOd=20htfO;x9c*9y9z;(e9|N`zK~{*XfFOR-L*_tIusU`U z4D3?<$crX)p3=Ap=|~zP4}y3^NN*g7$z(I9ckX9&exNomOBh$zZ=c*#gP+E$Yol;jpV5u+70GI)x&pVe4j?-{&Jz` z`GBe#REfi?61K$6f98dQZR8@S8ejkSJxGM>aNGslPSW7 zORqR1fQtFM@xdsy;=P#1_?!PCN29MX3UVEidsFb=K?v@60;0&sc*E$={s{I?Q5!|l zDWss`Xb3X7Hkxw z<5-9o4S&VJ@jLV?8gk|Z0;mct74>XqG zfpxp|f$H`1k4M~*MX6^O{2uIF0Qp0zFR|wIBS#^O+aJTjDKmbmCtVIY7s(H+9GRk? z(zOKNhcN-J4Kp96B8qKWqQE7)I&wOSE2^b$*(#XPJyNVPzX|RHlK;$?b0k^Gw8)R( zQ}YA;HQWbP?X?=tFO-0!bzBTHuD#F4+2PLe(#C9#dGY3T+^+j8WyS@tU_dC90UKpL zSHQ3iJHAcfio-3nKT`FUE8WGTehc+^g%cw({~bPdEd?l#sPf33-|IRS!a^#VTun9S zoviBI)f4;2yrF&N!O49g3_TEg*D_yK+qINJpR)coq;n5QU}Y`pOrUBfk0+y7-Qy!U zng@I-s`KMkx&vigyNKI3H}`e%ZE0;nlEC8lbbIWMq&9`IY*bcoi`=d^GJgB~G4!F! zxgeruOjV!u{%xC1?M)%bV|t!j2KP}KMeE$n;>~Qdc_r~r%GLERZUagb6$(xW18vV@ zd6HuRA4v|p)_@h{Yse-2B#G~tjUbk>LCnvp)wkUk7rdRJHYu{J#MyTeX zueow!7reM}3QI{cEP^+xSV);CX)t2gBtS?#%y2Au4(`-K!>~S=?6}G60AXt0)RrtI zyAKc|4YmeUT*QYjX&y5Ml|QARmAzu)omY!fk>AT!QTt<=v;6s&By1>*&aZ{A zH8y0JWqA18lrsih&i)>i@tDAuc}rC7&woOwUC9Xp_=^G?fL!ghyA+xLk@wg#*#GKn zflR9BB4TKu_q@A6w1eIrrZ3;ehBQ}OtQ z>Ob6t|nZkuz_Sj&_skvQyIO+MZAzP_&Z|_CAtSAgP{TuoUASadjIs5KNApE zoBaxf2>2|`&Ox3DtrkUnoe#=*N#N~?E;Ee;undb?=d+65)FldCb5++${Q)9#0ITdZ zsz_&13@4itsG|;4bv^Gy3IsDdQU}r*Q)$4TxPhxH7n_M|#C80BcAxzmRittbeH)H7 zM#s(#`$ClBlQP}J?$)q+zwy+Y6O@RHEx=iv3grH=Ii zc%!O|(&4ZnZ&S_5AmEHMruHPS9;4DO?+v*y%^MhR@?vm|NYf);LZX>t?x)aM$;*hm!LXI^mGr%m+3G2SbsClA5Jz>p`Afu@7VRc65*L~tlr7%19$g{+K1C>SV0d#Qzz zHacU&hx*&8eM%^!xaEB2??cLO7*9A*i2ym3omeqF1*HL!TQ5kxHX^H60O|)cGpH_3 zpnVGv=0Dm;92{1@yMtA~u6s+h2wot)I03gJ(gnI;<7P-0p4yjHdqQqbtL^xjz6ODg zI~`o9N+_rK*(b;td%#e`1jY?YF@ySMPLlP}9Z1KM4b)18=J&Eyv8qPF349U5jc;!k z$m4hHS*`|WyvzYj3Zz2)4waLWC`Nrq`Q3TlPQ2DsS``Zh9bAIV0*;ir_3Vr0(rou*?$qh!8tc@i8KZOf@8a-4 z?i`aa00BQC5;*3vj=Q1u9?bYQKY^CdUxX^2VlrBJn)SBtAIT%Jwj=XC`ylfZq>m(1 zl&p(c(BE|7)Mox6Oym~xKT&bYo8_ky*e%L(Ji5~(bz=XwY$*v!a`bml&h7C2^gMJ& zDV@Pf7FBX#P|f$>?%sake}$?5VRJs>_u$dfK1N=to6D#u-t75# zPz|8@&d7>0RptWOVQG0s&|6w$o~)T`}iM?D{X+?qI#zMMp-{ltAU)=?CLZY7<&%4j!L z_KG*(MD_t((Sul4{rq%=?+o`cDEip7nLTDIBq_xyQJ}U5=7S{4YFK4BbcfD^#!Ezo z>l<1P&{OzXQ2Pd|P_zUm09@#BFi-=ETasac5hul#@pN{Uzut|}jH;LHhbrHh?Hj>m zU01KzuWj%Ox`1}w1@zwKF5{Y7Og5U(?ubBV^Wi#7;uNniZ&6(>?oE5(d`yn{5~rue89 z1{qOBr~NE{(Ng&jhZEmUaz2ruxIjv@#f#U=<4g5Lrwoyq$}p+geWcb5$Jndz=zDE; z2kTJh&bW*{T}{>`0(S3Or=`0PaRn{|dfqWow7q}8O<5PJ!vVTjvdQ7E{D*=m)WYQc z?x;0C@%$G!!pzvnrfI(eJ?Wd9v4rQDBPn%3=4a(qbCQ}nm zE3!2nPJK$L9w>5UTDg_;q@Bgnqix1m&6soC765Jrc0tgS(#yNkv+;j;x(;|K`2T;y zIa~JLXBx4T_>G*`vE;&pWazDc3j&$!gFwP#NhWGOwi+rKq(3pVRaE zzdWxVJ$dr^e!rjb{=7f$@0S~#x=eWv^YIX$mxpI3zye+n917xIb%ZRyJZyz2Kjs^9 z8r`bt_Xzv%+hdv|Uu-*QJAzchgk(2=OLgXIo%?ihe5H~TAAO9hu(qEJA%F4s@O%?f zNSHP{`B2wP!QXY6pZ8bu5=I37fYb0HInw-nt#I?z(0CiKOp-aB6=J~Cs+Ow;R3_UUGscGIDQ+o-l20lwvA*8PM@;r2rtgg3%+mUe39JN7J? zjI>6+m?!uRTAViC^Dav-2Zr+9jOi z`j0mZH?$IuHvk)FP2|dReM7z>|00%?o=Xrq6LF{o(?9*^%2ZNPmfnW6j@G zaffbijipRgrt^Zu{Lp2MyDjq=`^19|#Xw5$Q=J|N3ql@-zkz4OMIM$MTPqx|R$JPj zHJnjQOT^`8@RKD-pHtAwGM>f%fA9>JBd;-^Z4`*dlM=olZD>EKAK?J32n=^W{_v8R zCDWFU!VjcZiDY}u)K6`SUmqnZuIf?e&q-+r`?0u8MA@FW=*V#$Z`ayJ;C6lE3jEkY z+#WdH@H!r(WPLrwubff)T#0SOT)Kpn)d)8~i5k+VE*{%K zR=Q>3O3s=d;}^k>`ssg`K7!+K9d^r5pzTyw(%ihPRbl;S>oNY+2EmKEhfGWa_d1fa zPsy6T8cz%wE`FoP6aq9duZ{U|DQ%p_A~4MTF}z-c;k!=^D~tY%_8V)XA=NWWQm@j! z5XQDg7co9!*@zV<%P)wg{iLb!BG@Z|tH=EO4&B-HaZ>abDqe;Z^NQA>PE?c$V)_g| z+jzI$uC9PCOn0A|mH$3U!%{Zv5xz^{MB6EllSKAoTP2=}{o1N`<#PFsB)uj(sraVO zsz{iK6-!1cRWKfG+PK>j@3QN>>#jbE$$}(mv-2x5+4GnVH1K33|$# z8Oa>>EAS}&W_C4oU7IW5l@&vW3!YHNsXd#{iK48SODFP~zlRaP%yc?&%l<{8^$cn~ z)ek<`-mJ>{^<&wk5OsofwWzj6rd$kaR`{(@`q#~X^x{6u@RJoe1ilZVZl#CYnq-K( z9+)(Wj;g{cY>B&-8nsGB#rCTVG}>+6eG!T5|(i8aIDjAlJEcrWMtjc&N^!wmN3DNX*yh&jXXeVu!)md9gg4G0Wb`ZQxn2 zt)5mA1vPp>9p-PrTmn>S9)ZDNK8`V>I5Z;Rv)xww_rZjo$YyRDy_iLcz{T6xW(8t3M{mBM{y+=5_wPegH zl=CZNywK;7UkJOlz3M~9Mkn{^47}s<&qDBPcWeHB!I%j#W!~ky2p=Bu+|_fK+u^8N z+k61AnlM|2eyS0^eKjPuOzMSAio+V((_-+Y=B1~ECNLAcmf_IJLf_OQJxAu{*eNh4 z8a{|Jkpjw|w~YKvNwoEz6st{z&EKjD=zexq^-d)HI(?JN0#8t0S_}M|PXx_jnM~_H zE{=jDE2dNA{ywNhJ;^ay&GvCLqnHn7nSb@lR`?J#ey>No?oG~sYzAGQxkp31HF9^@ z#h2b>UOA|>a9KgkyLxU=w^G&4dcMQj=ai&oA{+Qq3z+moGZ+?qj&u(9`(J{t`-uQ9 zMj8&{U6Qd0hINyl`f=gcTEu#K?Hey{DvtP5JGZ%PO!($%_fphH7sZOLyI%dMvi(i) zECEmAEPuLLFR+=$s$jqO)IOg9znWMrsA=Akc@{@^FOk0#y&u_%*#3H*zb*UN!M_6% z73?K}C$w4D0(FWB^jH}pWw2_cG2!6JQ!kNBq1mAOt;bVt+PE4CQjic1O@MoN=kZJe z!a~TGn8rr4m6?Z=R+}gV7BcAoCLKLnPa}Zuhp+Or?`O}lW#Dta?Yz*YPTJn;Xa5Fb zYxuE{GXF1V-&0E6#)Kiq>fRY!wbyklX=Bg*+A7--byltQ&j`jU>)f)FD!lu_&qc+5dd6tD~xQ%aE}lpeHKMH z@VGz*^Ru-|yD+a&WWGxpvE!8s19&j9nxl1VMf&7FF^XZFp zcTLLHWABp=O{6h%t}lD*g!V)S3Ml*QAB-1JZjD}-8OkNs;yGEQb63-MaN*;Ri-+;% zTzW+X`!Qbue;7)tD8g+wNvE;%a@jt;gBy6WJ3qokgH!XEDwGj_2#jJ{bpG{@TjFqy z^2?UBOs8rz{&$#GIP*Lr8VcLLh<^AnANKw(Z|^r6EO;_MUssCHl511gixqRNEwEj{ zlf_~88E%QHzz^ip&S{uRg&VuYYf4=!unV5<$oiYlY}(R74-na7WhiBpcmFw`m=;Bz z9?&hIp`Kq`Z(E2iM7J#{&37oPn)c;U%;~TIM}GWdh$L!$cS4E>ZQu1n$to=8>(RkC zYT3p)(0g86aSABw{UJLr|~i4;ujK@iwHGKJ*q^g9dR-H;C!LfdV&R7p$W7W)=Nw2>15&`;6g) zR`u|qn6JgZzo>@YlQes0dK2$nx!%N1iuLBN&@X>n1@O#r=@2{KLAJplnx+}kDv1h+586Wm-uxn(Ytn1F@gyLVZHx;cb0m@tZQNn|~zU?)n(XKULssm%!ae zV0R&h&+up0G9O7=0fm4& z_b-Kx2yu(qM!xtqw3@r;Eh$SVkARyy_O+H%z13RL?Sx*x=qh1!k94(?X>Tn z$iAr%zc?^?GhL+kdp*8SzsXr>hNJ3 zNlU(EIb27%ELjK~NO)m&Nvzv{GJ>T{Bjf-ghZhIkhq-~w&*S&*iy+%jZ`1+uHu44L z;==DW?d{w8tS4{N3t@@#TKSFw!Dc&@4Q#fKrZXkB!hp>zs6d={ZOx(m#}r&60>}dS z;ggHskqc<<7@W}QXZ<&?vu$?b#JxaB@zm?p<;ae@xd)31Orf{?cgRt?9-_qeaAg54 zF*y1rymt5ySX)_H1qyDfiHw4gT@Z&pr$ZnoJM%V$8vsazImp#H)={4=Bo8T6SQ^|UIUk_yPAH1u20@jaKi|1 zazpF`zZh?>77oKfpdDjPDdr2SYD;XW!gNl9!m=qN#0z+#4^A!B|ItG@yOB(qF<>TN zg*{rwr**3D-%pVU#Ckc`7JO%Wah-5PEr#1JPA%%H=YkV=Ka-v{A@7--xWQoBog*Ca z{$-I!Uk-g2HHf{2y*?{%1KpkZ=JC}cEDb9+?f&Mp*Q;X)UHIrx*YvKr{G=?>fWG9cgY41N9yeL= zgEL@!B$DM4HHxYHIDASN;sRnj=r33|D`~TY*A^lZ_c|5$qcBFUwN>H>aZREk-B2-l zZ0*4i!BV7$#(c-=4EBX1*G}wfaH!a>1@l8FrRZ1_gQo{Y=)1D|a!fG9r1|w9&kIb5 zPE7K$(;lle{N4y}0TYmBYU*}rZcs_7d%p#S+4|KsJ?Kk9&1IUma1=2x9R49SokG{4 zGB#<8pyQBxG|!EjR|}5jFyCAifE~xQ0;(=a{)qnIrsX59+^1HF+WozQ=!m`*0KYr1 zflmjynVLpRqq*Z)wnpeq9($FLf?)khr0G73JB{Tn=^IK8xlxNawlBy$?C_GQ1;4#O z@+wP2_{T{F&C?$qKF(!2P-epl48_b0!Yp-u3X5&5w}z#msfr8tPCoT^zCj<_wl*CJ5%c2LK^M~=uy>P9O%%#Sc)!JJ z5R(q@FW7?!1*R4+McD@luiL7AFWQk2mSQaHceF+7-@B?42*Nfw0kyw>G9|9m>@rF) zx_)u))k9KmNlx34A301$YCEB$&fKDZ>2v<6F5Ct4$!(kPDSa1i&bhf@Hn@cD3EJwI zXwl2bxsh4*mYH<1T{*&-qVV;e{rNpvlF0GyJ?mROe%DE(QGQfB ze6Q>W3r4~@*;>Mnv=@iY)1SW;2#cH^K)SBNK-;cQxUqLf)pleXx~=Sod`qZ-uDgP_ z&?e{uM5W1x`P3eWEquN9ZLtdDaaDrVEj#mFhH~&qUjywDOj`xv|DI*=1{E7^(1BU+ zUK??B$mXP`euRBGQ{@4wN2_bAOkiGRmjH8lJP(JTO-29W5A4nDMZTndVyCj-)6i5j z^$LM=`X{212W7^}kvC;ot!t5SjY+M@4)zX0ezg7q5l6IHom;tuRX9-^d2&5Bn0uUH z^HB*IHTN)DiS$q=U?HN_EI%upZO(Q4v)<7~^1g;>8cBF74L-)NCu}V_<5(?Ierzc4 ziCI!3^Kzf=jn9T|wP^#kDqW^0Xlu!8IAYFKM=|_1eMg>rY98B%EUiu)g|U~ni)SmE z7J&FGM|||n8neZO9z^iE2DP*@oJS4#^pkm!j2Bfyi6eAxVqx zyqgK@@`)?kFj3CDMV&S>db7rTK?b!8PN*JE2#&||QVmu6Vu(Lz_q50NQ6I1;35lI} z!lyPxSTAc$s?aZKy~W@Lqz*!gD#+&Jtwuhb877D9LB~N*o8J_2RAqfllvkbRPo%vp zQoMk$rQ*Ub?Dp9lVZ2B~ypfDI<9O$2rmAu`Z~T+@-XVJJco7N5cXoZRm$1YW{vBKR zo=|VO-_DTMj1!nSC>AUg`$NMzLe!_+D}g1{WI>-xc4?4=VlI!U-V{Cv22S_&K$D5fG$J* zCNM&LWi5`A)N0zIUS3~iDHXo=X=J9AaM))(QjT<)S-wb5>o=1VS%SaH1cqH*F)T6b z&$cLARPxTKb?ILB6xjRKbA3HDC?V$`Hh+zpIV>HR>AOqi$?T~lhr&Hy$%3#>57r8Thl!Ck#{f_<2e;yV-vV=CE9sl+C64iy8 z&DLPMk_{P^T?vq;7&y_w6q;61#K*N6BN5y;zymApGN)Km>=;bB<4zlD=8-y)3fwCz zOZkuxfjh9xq}YQ_B1eg1WqN~s3>WznR8dAq9d*{Mq2AzR#hwB3@c?81N{RmNef6qO z3lpYSi{6oMKi)sFPdp4dyp;mLc=1|~`x-n6xGEg?GU3PO%|$N`$nZ+TAv@#*I+(v1 zbpYedXHAu17ZW_2UV?=%rBlUlV+buR5^s0J|27!R9SQ`|OsuD@b_lC*t`s>2YE(nc zz@-fKL_I3Es4_Tdos{#vpVVSOFGo_Xp=D)0`xi;=^yu~D#V#4*kZgxNjL^L~p)+wa z*Cbipo~!p`W~6(AMVrA@c~*Aab>;5mMqgC*N8jTAgC!jbg=~G7kk~EFj875@&jilZJ?J(A3-aNCyRoQOa?gCcBpbQMQ(*~MVQ^;7$L`Xu&*Uf z+5U!RufJ{l`}-r#)2(K3qu<{xPJnG-cBDSFmB6#SZLn zZy`+$Xb=&cofcL(UHX6nvNOGM!#^JZGEbCMV$ zOk;Q|;9~wcwmY-i8mN)nht>Bg$2ZWjl4`Ox7iFj){n`5A_FMrfPQ#+26)Qjev>(%@ zs;bzxAc{Jz{(_)sdhrV6jWwStviHBQygjLs;GN5F#TO{xTNGiKW2x(E7@h7IL0{oHyCY`)#J6MO5X4nrV!f71fgXTy8YLCpBAIh8&#_&?q1MA9x_ z>Nz%^O=CY}tFRmB(g8u-U^tQ3N8yB)Z+Z)&5j8@|f~+eAh_7yprK6^L2R&1E=Z4nz zx*F=5$cDf1qa~;$6a+zK&blGzBLOs} z*1Nvk;A%NmY&U(!!EqrYN66y-Pm7*63;1ggNi}?jco|$$x!~PvAF>Q69Gp45R2Jb= zt780$C}!hN4Irv@O;nZz7SvZ2Nn^379ss+|`?k=2(Y-R^j5_`9d*ZqWvy<5h)tsAGCrL zh@ZZ!=5A&CfoPt{X0h!F5lnH!2(JvqwwRPnnI1OrR!A+fF#^xX^)q zyNlP9x;-&Jp3vLXYx%lz*XS`{ms1IyKJQ+ZtR4^aCMyEHwAmFyBq>K?o@&{RSZ6#6 zB-@&Z$&2StAD6^?-dv+po$yyQr=L3(^;p8SL$19hM zd!YetqhXNThNjLs8pEI19B_d84|2=^-J6*tn9c@y5X$U@X4yWdgJQg|$KZGH5YEnuudB7b`|xc? z+6#C&K9KWq@@%hHn3xRWPNqo|(~^oZMt}lTiSF%luDt>t**ZcP$k&VlS#VMG*(qG-m$@${7l@;sP#+)l1t6pzH0{l^5p?39!k4H6eq)W^Eo;(BK(#B5Vu?8{j9Egn*`tGx)mOXLxhmS%m^Ib~ zo>$>jDbTupcH(-%{`m@9F?U~7?;6qJYH-ci>5i)L78G?8Y@omrN(QIhAHsfqG2{b~ zu5qZEBF^k(wg6<%qm@Db<^IpVl^O*sQI;wIXIj3Ru86*32Yk8xgqFBI2?UdU>sY$eAddAFyRi7SD=f7Ga3I8St4o>w>%k`d|4hc&zKYBpYQbs<# zaEVtql6F1}t7goyJZf zZ;H3e5nG0YZt`ZGa_g|HuOM+FrhGx?(iQhFC9Wb0brCzwEFnM4T4ddh^fo2cC7$cbR5t4hIHQo#UI5yZsUZSC|*L#4m7L#GPx$mL{(q z6s+T~CFJBn68neBb~BeMgz{W++IUkdV(qzkwj@`Y%g2=^n=vw;tl@0MW*;(Nn|*ZL<>`&|KX z%R*?EDU(cniz>$(z#C(^CR{bDIBMFNqRsdf+|;4DCTLG! z{?2)B&;1Pm%^xRPrT`Th>aU?NTnkH@M*DN*H=%JyqWPNn4JNTtPUH6(m{J5*C8BSNHxj=$~o^s@ubiF&Yp#3s7x@JRKoAa zTvh@t%xsfbyf{T@Fpc?q!H8$<+-|ZhX@2$jD-RtVBkG z>&4(~X_x35_~x=8dz7j$%?#QbQqJm#Gn|S&QPS})eBU(YuRoSB41Ygk3!AIg7}e-0 z-1tu_R5@}8fR3z*TwpdcQ3(l9A=H8g6g&%ApsxPZD#SxO`G;*5QpxrUM~a$l!caeX zL;E*E0J#o0isugtL(cX$px_wHzokww77cxM0Axf@=9wQhIGa^0EB)3zhyFq+WSe1h z>Xp76uX~>n80H({S4E_{8us-9+n!bNaRP_6<}3G0A56J&Vj!VoF5lNhxFBp-$#b=A zXweuME-Nh@FYV3tu$`+hu~K$`2WKJvtNX{ zo?HUg9yec`LgUG@mV#f>ilOUKVsBAwz_PBm^CbGe+eB&$9ewAX-%SG%AwOqu(vuw3 zJ;H}=w-Aba$`6Xh-^cVB7)j6E% zen4|u&rChCWGY_&p_lrLVfWn;JcP*kkj1NvVt568!Wcc1CTW7PRDt2wmIo;8 znZ%%slH}r|%ISC@rsHqOopbwFP#9I9F*DnQkVW}HW#3QcQM->49Z2($hz~ldo!Dg9 z=C9cE>ck&uBDJg+LZ5G`am7#IP~^f9WDo@-fpX% z;+sqJO*)OOz1OpikRK-rUvj^?GTb2AC@tCaXiZUu9{VM7CRX-sjwXtfmdw2|_c&yR zxwFEihP@3y=K3O>=!pT_-Xm=l-#iosu$`Ia=Bs_~`9&I7sp~N+L{`N-p1y~iC!SGdpFtH3f4J$wmx9rCiL@~h|Bz`JWR}NPeQ~yENo0H_wmuTSz2J#@lf=VO0D0W@2A|2TOTf zj{i)G@lVn9=n1ht4NbU>pc9oW_glrReaAfkZ>%ix1U=TDywm)?-+Zr?C@Z7hDq}-Z z{_fPQw>-kxSO;La44kYuO>+=6QNVouS)8rp(=m}-vR@0X=v0pAk=K%L#?+WU(CnHH zgRR`o5f2vcc>_E*}z? zkV6JB&*TIKO?E#}!Ca>+Ru6f!wx8VGCbF=J@3c5APE#}!YHr97bPhZAR0-dW6=KKH zW2fG`WIU2Bi`FNJ9^dlzV4hXmiz>&!z8cgkOkJ z>1wM*H>08jf60fx+5LdBE2$#Eaiuw!@qkNrNg(%8SgFbVZ&bOy+{4v!#2dRLYs*@>AZplVD@lu>YKNs$u0b99{IFVb9wKYIfja|Q1ZO!tDK zoFA|izN?iVmrT#?0?%A9Za=y z-S0eRB`4i$X!M2dR(qx%wT{mcbCFkb9ge$rW(YAddX1?d0!J#*&3$)K*VQBj2`y=9 zkMCyATvek8kdh-YA6&zWArdo~uGns8my%_jRtby#MpY=ZQ35-LlxloSouoQ>N@RpL;QEOJ*WKvF5GUKO3qb z%}^5pBI+IO#o)t_0BdIYx@=1*bzeB6_d<@qSSo|Ye3rxm2#k!a>nMf`+BnhS9T!7d zfmYFmpYJ{sEdq40;C{F8&HrVrZ9bR*>>t5p1;(GD6A*YX5{gEch_b$R3MQY3-zt1q z%VH}0pSj@%iyJjwSXX0!0d$jClx=R}V~H7cl$8Nwzoqg4*C+sOI{sjN<%61S3HH)l zG~ZLO^qbfn$N&ij(3!vbF0v_yE4Y%ZIC0cPDrim%n5QfRN2P{)BOs-UDCZ3Yp9ky$ zHHm?q!%%-zdP%^Fhq^GFKlJX#VTvH zHwDgYVk*8Xf6(uU&snyD+yqhE25tU#_DE5)nHWk3YLw_N8`7b9K`-YBRV->V{l)uf z+6t77@c)L8L$+zmDrADNjI&$3n(CYMSFlc9A(6IS%i`$?nm-PI&UEjE7CZK`{=|lR zZmzqFVx9OVlk#8L1-$vb7E%nvl<}M9IYOtx?Z0_k6Q+C4MfLE&ZKw7`?xE!>yb2`S zafAin>Wa$0<5o$qZ%ZkPrh>5L$7Tr`FqbnM#kB<~>3m)rxFmKZ;}iOK=`WC&%UdvY zyio|rEQzmO;4L42(P6y>9bSw}Q?=+34cKMt2McK6c|HjdNS>jC-r*m^5=&nHWVTs;*1(A17Ik)<3{IP!6wSJg#{Rpq@RlGC{nms)tOPUpEnJoCr0{)5kc%x)VA=wUt2h$K3dm3+U{YO1Ditc$N3&RL)GI>h+p z9B}tc#qh1Sfy`R3wJcNJ8 z({P*(X9-G$e#L5;w;rAicY*I=FvH^XO@vrq zOHsdZz5d)-*g?_znnZlDlZ|h%Q>saC3pYA@as$rii~=IqBWtRb%^S*v8=h!J5JUIg!>sxrbc3mcf-1}pacz1)6u&3ve* z7I!V#oKyePJ1ahJkHNm8jK}9DZ=UTqk^3z<2U{ajhOQ}MXcoDUCS#hL?8kCnx^QdQ z#`HhIYmc7D4K;GAJp&x-F_DkLG&Q*5eqm*>XE*J_PUeQ-ezVZD#}`9XF)+md>?27! zv;lO^T_gJ3OkU6IdBcWQi2j>XVh=(%h-iyTC{jutB6XwbZ(W#O#9xM|Iq+_#Z^eOG zEa1Ne&x|1dD29(j-e>>39tKgg!@RApZeHS&%ROgJF?Y{OfIW(5U#i$X^W)|;o_YqP zpu;-;z(FA02^K>ZX*X&hY$3zUuGD6t&1Vx6wRS1uqxjVn_1X)5ycrk%8m}*kD2WB3 z25z=c7OjR&%_%W@N#&*L{#M~J zUd^fX_jbW4YxDS?l@4ASTZr6)0MSC;Zy&qKf|C6`fc$m?%{f(C-8_z78No5$yT zuU5lvD*)=#qQ->=?bTEq^#FbUCE7=*%L*WG_skZ-1AkLdk+qLjLAddzs_1zW2BYe% z$loQS9B6ozDzaT*Aqt2W{!^ILrK{72La5?WpBe~{q}=+ZoAL{R(@FQ$QZf7PDUf`U zUHET6_2?~@H>_G}cG+YXiBSDtdf~m`@nWASPY4T~O}zzsjiU_S?mRdVY~aj(NPwvR z=WChWO4vSk21(P!QzpO!(0GqCcMDe(Y>P#e#q`5{`WxXP#z}2u*Gxn{yTQ-~9+xoB zE4`UJ4@Fo8^(LJiK(8{U%$8nQ9@P&0BgAqCAQ8%^hUO)hp_40smUPk=Dy^~BZ2rZm zQqX*eFke|A?`w9AnBqn=R>sQ2FRhZxhy^~9bsqo9_7!WBmXu@nP!bElWNrbw(_Q-DLtBRx2 z+czRtFd_5SoGkYx+qj6c!xWHYg*4T!2x?p4#^`wEw{8(P{mG_qDO`bczN3uid}82y z>)a?W$@iX}ll?)Db-B`aCeki>t!oKdnL3mQC85ikl^y{NK@A&AYau*}ak`S}wwU-# ziRBJ6V5!?ye5pA&)-?=o2ok=>j{h)f+SRk642i8yOcV%AF9|NDrWG-E9U$wIk}soH z?v3L*8c*;7SdR651JC<%1CW;qUmm+e&w_nx909X}eY+tG?yy-;RUq8J=d|w_BG^w@ z6d&;zb31464yc*Gq=sGLn&%qf4`sj0--C9-loHHWsP&u0&cwDwZ)O!5lJXC~x|Yul zhZ@YBk@Y@}@W^S%Ec&xo{NI2hg82v$N$5YI z*eDL$sNjY9>QUR#!Zxk!RblJE2@69R*@(zj2s1NsCHa~rtT}D?t zpiB{B481Znzt*d|dm562XfUn$CrUzXdLJ&D9_w=u^91|h$c#SWc}&e`10X{3JvbHbPVRYmZj1t703F{L(z;C$p7NQqN5=G)Rs*hb^>)S zp7k#Qy_hQJN^f@naQpkQ>OVeE8e4|!fEHT^>f-EIbOCIe{sN}*i6k=~?nUtIy8f(tKpLY^{=gZFro-e5EJ~ zxaEN4f;DsbI(9Z_9DW0>C|<1AQ6eyV=;l{!2q?cio4#od_W)N=`9zDY$m~ABdt0G0 zFhghfE28+Ugsq5|t0KP!Wk&!!%WyvP77Qhh+kfnY%_B zl=~V+Z_a8L9~H;UthAdzITO8zhWRFAyqn9C;hDII~}(!NelLjrli@<$O66vr@e87BJ*Tuvo)EA z6O?M({_?3hd)8}F4-!6EC|dHi4wXiDH1=OTdolBby@Aj_yB7c{>C`d!0n1d~y;0k- zs!kC%+mM@!m^#PL5T$Q*|4H+c@$l8CLb56}ZT7#y==@BFrj++q%vNV=`DP3%R(TnH1aV%o!Uex@g zm-u)f;a)i5u0|tGL~Z_Iv=+zv2caUMr3{~*b%F+0*ja)xudc1mX~#hU*qFOa5$L{0HQ;RG4Yko@CBc~*T>-+7g0YE3Rr~(S zaXB@*R82ta!S1LP!*s;+oN7CIXjnp~F={Ja@gt6G%NROliUYyN6PJ4o0U^TO8bwW` zxs$i|&|^slt)A2lO2J!l#|b~8Aq`uk7uar3dO+~b;o39*N@9#ukpY^+o8EXC!38R! zXML6s>AG(bLnD)k8$%>Z*UsGj<<0yp5xtKT{v+u zGFSy!W-#hW`{RoydJ^gIyACunWxkM0qvKhL@ciY5!-D?+eds1F`F zWh^$5@Tp~m$5%{UhdRJFz-y1uLDe~sU5=s_0055jEfpDcuzwaQHXz_ej3hBaGf48{ zkVkAULYOLCl(Er*jR*chbN*PXsjfRZSPteAwvmXZ?h@~vxo;k;D`k&1+TA^W@!|RU zno&p5{*KcJHF`(q)OVrt#3go(U$@d_|Bz^m8mg)@ZrS8U+If7-Mbw+Nn8g~s7m4b* zoDWBLR{R;TU8G0s9#T!F^dhXduHZ6YUfXJRKYN@e2uXYUkiaF{5oAbViB~c0C;dgJ z5GmOXRR=u|ObDhC3l6k((DGN_JwbJ$9^m;hbO>s8>~Dm!5bheTBeT1bPny~V$?5^a z1Pb*56|&$aM@9Q##D&AJgL0NMc~j*%g1zX;tf6>6?mON~cU>yWbyF?-ODd;Z$w-$M9t z!B?*ui6s9{nLa5ZSkT{oK9+Ljo?@$0nx@-{qPK^x7g@R77dq{{KK4xa5x=-EQ|+eF zI!Vu8hPP}8<5&6`A*VMb>84ad9z>Y!F`X(Fc}zw+-Ilay8|?78uy%|XHz!hT_NL?d z=p_P`X!@q_qus}E3Gt={?dZL3L2;4a)E(vwC}@&O$!Fj7GEX;p(a}aA2*w9T!Yd3J zO!IN{gvgSXMcYWtH4`CQ#4jC^bqr!eGySzBlQN0T)3O^+W-h2eWl#U`p55YzVI`at zO_m`tq_01r8PFAX={_?Pn!^mZS0?>Mjuzl7=*oDG8JqC{9}BMwgn#vw3Y;FfEyk1^ z+&1%B?}fXhtd>QHOuS)Ee53!%_xC1_yh~O!cwK?(C`GL=J|_GfPvk5;Fl ziTMX47XwAbnp-K=B^Pp-+$A~r3Evq}3wcCowO6iYYpl5QcA8SrJNVNrwUjUSeE11G zeHN*_o4S^{(4v8#j{?rj$R#4g7(Ijb@ToaYy95*QVfH@S0rm9fwp>FdW!}lUwTAF~ zE9lof)9m0G3^bwtc^hpbFqY13NO)&xdKWw3{&^%fg%JI*4WzhOI#Y+PrJaMW^WOOZ zF-8yDXhHjuqB7^0sH!&6^&(h!9jdHY-NN&DRthc|UlF!Elx|CUXhvN7N-|K)uyCi* z8{HYC)MhW)Uga!J)w<#3u#yGt9SrYl3Yb=H*V542zBQUwJ@S_HUso6EuLvLujKCV0>6(s=%)`jbb1cyRaP_B9wR5eH=L<(5!jx5sQ6|y{_mE zMU^5%Di=e4Q&;)lVV>(!0|8qsbLj?LsVU+A=opa)adQ8B5-p8QVqW`l34{%41XA&R zI;smW=aqn;<_`JwxWK?F2H-Dc;8#V{YT{8c+k2Lc_h#L+f(E^ zln2z?A}#Xd&-jD3-T{))qJqLjVUG%_r0rOnqAHh-71Oo1)!Y{{afBENx8!xib_ZI6 zj^H=7?=~O)=Eg;r=$P9TUa>qA0xnyV)mwor-u&xUmuVqRu<+g*@c=2+Yn^GjMMJC3 z*}fbpN$9bCi?pXU8Oq&$TBWh2t!{@BNoA=av^j8Q()E|1AV|g14bFgO5I6iJmaMXH zps4c6rNLkY2~LAi+8hchL17IGYDK6HjHg!h^WQtEy^!aB_9&+k*5_^T}j)v zP2U!${^i)Q9o*KaIS0iiG8V49Kwy(>o@M%CJ5=)a>o3-Cxt9Wt!H_pV7tww^u{qPq|TFyocLrV+d5jmn|Aji~TaZ9%oS1{534n*DWyu@8Oh z-S7C}2v>D?ZYRAoJ9vj%>RsiUU#hjkqS4xOnR2CXuk^vV)n}3;gwx=72(`JL-k#gi zlykJA_zvk^+LEoHCpP$O{DD2wdUWD8?FFJt!VJNCKLIv=Z6x+7+^DuVeQs~PGM_uu zfX)fmrWkW3tkv+rNwgy?7iJYNp?+yE?0|baa*!fxQ4~ee@85PN$5JEY+oi)l#Gu}Q zmX1L((lI&l&@eZ8PIm`}Gk2rDfh|ht88l;KE_i)15e(4iFK}UdoIPfiG_r8M6SXdK z_7ze3=vUnHQxX$v)^s_@W?q!;UD~gIRfKr8{uWy|f}&tj&{TK4d>j0&F15u|-d~3$ z>k+)(t-oU}^RT=N$?Urnv2M-@uQcI!^ z@QuOVYm9+c{iHS2oc@6`$u|EM!e8gasC4vlOyqB`<`+k-FAAF$?NCYPn=LERf0Yxu zR(H(}nl?dq4gmV>IjvEIZFbY#S-lrCq;!9#^=wDh`(nitke#%4ih%!H zJiI_lBlN#fHZQZ!EeTNp!&j5q|2^?EGZjOaZH2o4(u}oS60>{Re3RR)y{jzqq%%Nr zftNGO^prW&r4fa$cdIRYp1!)y_%%!Ow-*HQw~}WvZpk8h-K_14U6ZXn+b_1I1uVr< zCQDIRpC{|dR)eSaJ^$Hd8h7~kHKs2>^Ovnq9$WMcXlT9OfeOoqj^ImR?0cr`y|p~I z>Hg<73Vt*Pj|lr)v>A>I+d<>p6Dyd{D}HQYA7Yx=3EJi@-6_yaKOh11q5ms#yoj?; zZ~+<=lHQbKsFr3YW}I-r8!>zqqa?Du;`iYSlV70(vFpDrvY{r?^YNu53s3QRxhCt{cp+y37N=b<|yS!USgcs$nl& z3ibu?vm|AgW_YT6rvpWl)2HGkId!WsJ>Y9>@0- z64Dycq0KY%k8SNS&051QchEj}l75bu=J5G^Q~$VLGZy(qqrY#J#K-IlR<|11J?j&6 z9@p`*$FoNB>}xSQbBw>t&7}N|Dc|nGS=4{6aN2trtr_JO6uaSI*59MPx^M`WwQtJB z^AR}{guN~gOV=n!-gSm+%H|HYhl0EuD|nt zdw;V2BR4yJ@w*Ed`>WkIl{OUbV3LhvebVfPn!*Wg7q&b)WE)a2GZw-; z9DxBJ8Td|kuDIeqca{>kCZr#R*ivZKW=KeQ++)|AyYVX|SspKPiv7NbDV-Azxvn3e zrlT0kzIN#C+}68kdW+XHEcfip@gTMf&0<_loQR0yw=G&iI2)zItdHS%1cWhZZ6B*P|vfhiu0l2mdrA zCH@wJ4bG50uTRnC^!+aq7b8HCfEylJ$!d7vD31tLV~^;0^+q}TsR86z z(YufRY)PicJX~P!pOF5Cfh!+epc3IRIW1tk>klJtr4PxEQ}rveQ$W)7A!9)AeHk;C7I`-ZF*6>_{*}b*$-h}m< zu9w@=q>Gje&)E(`7RSa`*F_Z0T)E+UMuz2`Xwz<=vuw#r47gAig?Is$)yWKJo}s01 zMfKeQ8B+V#6(F@k34Y!{i9L*A88e>9tfF@fLLz4>tOZ11GpBj-mC&80TJ! zcn3rH60>bPt+2-Hy~6I zFHH_iQ;Y;^fQLSh7W9~qEUoFxlI)7mf1qt6*@|T6w*UP+S4$5_>v}IuRd2Et=gJ0@ zT0(95)gM9=z7IFM3H2U7SkQ9=n;_1pI#Y7tdSm*f?9t37{PUyPH4_l=E1MWBy7K-f zki-olzsyKKPGvZopyoV0FB~U)`4YiRW}4*x_e|#ValvE>{I=5nW9m!bp!i zfB)C(zFzmfaLwa+&Uv5rd7tx~hvy0Kf+Am@;~J(9C9Q(*L;v{>JP%UTmR?r?Iq7A< z)I*!M-xqnmU`P+B6}}OA>Jl@hT4vH;vrW6V907ducM6Weep|8eVev14okdrK&bY->yv+i=Dc4(h)D%yCCn_ zvt~!f?Za<6HvDEpo*cGk|A8f{>fSCm(sRZ=KkR0H_}daLkn;cb;TyK6ZC;PiueGr9 z-9#2`25#m`he!$E`%^>@o}zwypGs|CTu=Fz*cmHK$S44`>e|VdCP>(Z%%O;84D)t7 zM;G2jJ$#mj*UBCX4dJEHrHz}bBQwPSLE--A74N#e->On(97j2SUeFb3Wj7Y>dLQ6ODvRvLSVmm>+$353}MW;k)j535-t_S~)w zD}-^~4_>28x*?(o6!i(U06M9Ba1sN(JX=HJqCM0jCvZ@cI{jb>S}H&8vdQ!BPc_c< z(n*$RGR_->Yj3u_xM!`6V7OKg&Mo!xv;^|Q+hQ+1%fUf}y|5`>A&w{9+c0nNM#Iy! z468wMhy^JP=^O!8c!d;#0jn)`xQZrJ2t!doai^njZv{6~{V^oE8D8vA0HF1jQmoEao#Ftw-R3t%#kN+L3O|- zx^NR}OVV0B=6cQS9Wt|uI0ApDr#%3>|Acsm78#8b+P17RRJ67}l3IJzXIdB7V5DL} z^zBQjV&4raZ1Vbsjz1LonJxg8DIiaZj;iv`0WBt#Jt`|qAf`aZHXP|S6K8O*h> zqwrWQW*Bj@MLO#1B?sYNLGBHH9HHWqv)hSvh@&GQRG<0g!#}os(=NjIS7&-6iz@bM zaF}|4MZ@z1Vv^27eSRDPx+P#U?MUt_|Kde=>*y`ruh@^x7}NgL2SW&`le77W3W&DU zcO50~I(W2k-zfeXSRP^1mj+;T;#~YAFMP*VGODcU%-vNi<9lfuWA_3Vd|vZC-~n8kmIFF4C2-IrI2oK^?U3*Ux_trfS1yssP*zw_4+ zdRb(AIO2)X>>p70hIuQ9WKL+rIk1v3Wf^H8AP*_^f!EI>a8t8cqkr84gB5dI2j;V> zH7;d4CRJmy95*`hY3&JE*+ERItLfY9rs%}fQyWV^{*#EoL<{-xAUSIB_7(lDz)r$l zv$p?A4K+hb-qo#24w8m4TNUDteb}Y7?7aWHwD~6=v@7K%f|q`wqu=KA$sc8!nQX1( z`Q#|vS<78zh|j6;_~K>AU0sgAm8^@mG2rxG6X@9smLvT;ks3<2eW(|;9_bqSE9(ZHGvJw1a^O`?WJ6)p-UuVXfoc`n@ z>jRi|THKZlq-4*THAnbYHDH{DQdY0;+#a%*dCmR)blL z_X%AOy!DXCOyBXZN=%-nU2)U7WYy^*|1)9;Wf*XHfXr+Kgt#%3OAoH+UE`oJSbzv$ z&P3fr#l*fgcS9yIXfS0>)kNkmb91p!moPt#pmgd|`TyXND+7 z-kA0oNLKaJ4|<7@ZMf{~l}7Ztbn6d#oimX)b(jA%xRUUHGLGX1|{w;>Na2YKNzE8y4@9lZU?Kqa_ZTW>W!Gm zcP;AKp9LW=6R|@B+U*d61*$?DF>IV8Q{3crG0%1f;m+4oMrMai?kLZNJOxmU6><4X zX$L8rs-NPIPv-5=eU`u2qJkJ7_yIM^=J<>1CIo5=^$;>M63y8F!2%2NgCw~EG~oW? z1cf;6-A&k*9UO4TXh%5IoKki}X41SNWHvKgzgVykorWqKG?_5=rNQ0#=)&FSg6*=f z#p6tvO7CUd>7^WEYK*pDO&$|nbFv5Hr2}czzMSj)1ETM!A-9juUIq>4I+!5M(8s)=oldQ~w;Pg3 z=krISkD-}65Yr+>i%Y22_G@Cxq&Vrs$!6&^F?OD{8o~V=Ivue+#zYflNrSx94bARd zZJwMWE~+_xF)73rdxj{`u+=BSATJmz+;l%&ZCmG#zD2WR0mpZZ*hw4O?|bz{WNX^} z32|}tsJ51ejm`cOZm=52QoM`4oz;)l4vlPZSk2a7z2(uk=ZJVUGX5ShW;8U!*7m}Z zvY3J(hYB~MCLbX99~{E1(M21UpwfX$=^#@rOipv{{o`(VL{RZV7_z-_C)GXCnw*7; z`|?G_CZ{YItiSch&&+V5QQ*c8(=*iTT-;M#t&%_*E5qs!G`UHd*2Y7Yxq|L!hp3$uy;;?hXEaA zi(do-kLQ?ejxFK&2K)#}U1n(Una`o(yb#b+_XCQasq5c1kr0}4v=Nd_zEr<@i(7Y@ z>a>q~`2&Ia3p*Hv`_5;tM?LhC-8N|ZD4iNX`o^OzZ3c13BoQLwvmD073!s2(^gi+x zom@R5xm{&K-!`|tK|1aW+HAR^mmZ2!`TGY6Tp!@_H zg2*AEm9PI~Z{8&JQ?{*9$n@XPQ@b>;qy00RE(<5^o(y^8+8-}ZDoWT5RD@ADdFMp0 zoO`_5vz(@LirO|gCL+M@3q$Q7r-pOun&gFuc-UdlBZQBzKNx+{-s%81G|q-0-*2$A z1?Db1EDhihlV%KoyIi9We;?-9w|m>uP^%V{W!vK9jEXm_)wI?Gc|(Taae*6vI-z|} zzDU|C-TJyJDRD;_cPSt4oRE2Lqe=|1WHA^Is}@t5Bqnjm!%u{{Gh)D~gC%%Mv}$`a zvFD{=Re4Dn-8{Y;f8%uD&?i`k0&-Gb@Kw`?Cn&I#KWKSCab3VrEcN7jFV;E*=b>Vg zpXa@cz9141tY*i(2iVt;TWQ?OdTbIq0P+ASF$&g`TUr)$_35$a@YK)@{zA^CZA@3H zC%K1bdR^Bc#IPaH{;d6tLc$4z{9hq+RNsc#VIuG@0{nN>Xf4bU5@*RpM(ffraFW=ICT zwteKn(0O5NFEk+Da`vL)r7bsZf}tl#yU7F|J;{r>7h_c~H&u~_i@W=S^Bukz%_cJz zCwXMGx`I>*?h-s<#OC_{EIM(GQ8BiyAJS6Km!X{XAWrCd*FZ9@kv$fTBd8nqjCBhA zqzJS1^EgKz3>_(ZX&QduXI0hcI^0E7HU=;{-hCXx0n7$T;Gvqk}my<+7!;kOWcwYV=>BE z20Co`hYC!r#Z@Xs%ib1P94aICsBe6KuMIysTys&$qwdDpRI!7;dftvd^7i(&obhU* z#qCc$^`x}S9;8^^g#pj@gYajC#ZoqDR4@d8<4D&PN$lx-dH7VQ#)vK%l5greXejYk zI=EFGBd%)}w!jc<4x*>I5xQx|dh$RpUpd@UvGNvy2?t}MQ1B52eacmrhCOmp&Z{C+ z$tW)=;e4O}A7Jn<9#Oz{NDuZi*V1=`I8f^Bjb{~~%QWTvX`K?4w` zC*K#@dR-jsU&2ds2O5D;T=7$9_8elokF1X~AVe~=#_D1E{J z(@z0|n{<+a4hF?BYvvvkzpp4TIz04vGI$=`&Mdh+W)vsKethHdwWIsb5o9p_+p?DJ ztywMj(Yow~8|Of>CAwuP8>2gTeYI-6`qi`dHmdnU-O}q4>qV{27RFJi+T`FT93kR% z6?021w8|77$Rcwb%nt!s;0?}NY)4}9zr^M%|JmK5Bs59y*a~i8! z3b4iNC?d$Zp4uCX93|@F58X<)9)M#6bblbqvm5{9JgNOBLjT~CVSLc)g#m6#hMiRX zCrtUO2-N%ikW)q$t$QYtDsuEB@~B3cEf-I5wlh1x&RpRDrD4Ca7Jmg}%CoVRpLrxS zy6*gCfZ;Dr{I_Sf6(4Dtb{m@Xe?=WE!4@YI$vmME)d1nMIQCq=MMhHowKH{n2*m5z zuVyvP@m>C~dNv*&X=)!;+`La9ARl`j1m_+}a~G+t6lKa4*cLl@U=%#EnwWGfRKW}jygs;4*^&HlvY>+jRahYk z(|h>{F_92yiu-Mn8zV~bIvvFp>%HFa#;M-TC#t643A0&*>lO%SOy8X9nMNYrIyVln;|52j&gy6tX# zH%N{ml_&X*>9ssuxPkVEUVGS@A95ucwHuX$-v~CU5%&8+CxKpw5IM~y^A9*YxMPnu zM~V^muRJt9(UNQs#=`f@RJbFRvV#%#O9vpn>&(e9$rslZCM|GrQNXoLvq6}G)QcYF z{Xkc?rSUR+D9MA6|J@5ov{2wy0-<_PNd9puu>FStzqn>aH933e9riVe+u^a?!lmbn ziQA#E+{gPv9rQ%k_E<5(($|qs=9GT?AI5)G2Uvo?p{&6-1Wg{Bb~^K2ne2brfi~gw zG=6~nxKdEnv8>^6$I(skyly}bE-_u`$*0>cj%-7m%(PW-&#k(r2@K24`puSkH)I}Z zEW4A(N+7!eKnEd@#suR;EN44>(9M<_lO+`SM+^GBhV5%@;>an!PmorTXiZZG7>yOs z%30QM0TSVd^5{U4 z)^zs2l<6J>x`OZ&lr8bD`k{MJy9Ph^k60(91a5x*E?4-V9r%P{rc&88Ic$y;4C0(j zG-j{c#-qPMxTk_?L0)#(SSfrx`7OJoO1|z1Wu&e?xJddPYn11(sK98vtwJi(GVYx0 z^BE8*)add;oj9(iZ{N@dv9}r%9ss>AklFz|3%D7QKZ6Jx0+UG2Ptar55_^*3nzFyZ zqSC2F_RyJ3DtgY1#_XOKFb1TyvjH#m5JBr3f|I9wK~<1!xbs&tq=&EP`7KKbLfk7N z)Dj+}9gA^2^TUMf|5iZXo;G3sG#;uZm^O{B&F!NHE6-+<>)|sH`w1WG14oDP&Zp@1 zJ3{ljh+@xdAMA}U&ZR6+E2AQ4-JAhq@ni`*lrK$`u~YdFG2{ahde&`k@O1YV1);b z$o?QmTS_LPKu=UZw1IKT?RpES@U;$5_c(jx68?!x*rRVi8A(4HPAc8XvA{Hl1CuQZ zK{Cx4waKXv3|mM~ywM7p#2&%#hR=x@eNCJ+R;%qO;o>l5v1Ne*EmSMI_>YEIjqE8C zz#0((Pb6TC1VsaT^9}PG6;Sw>HLl@asOK}2UzU(Gd%7nOgo7bY;wEJ2;TT%f+eQ$b zi0q!$heFmHWdchhNZdlnC1!4EvBsf$*#^I)*g~&1M<^Jl@?2VRrBgr5X2Tdsuq*!n z(e{N@5ZYgC)}vPH2R|CnOnLH2PxJxknpw;MsA*P86U_b;n+Z_!6ayP0P+++MbUHDc zJQE+|Sm%2MTCXydS?8f{{2d{ZINR5>Ji?w0W&A5?O4BXRH5qAmvuPAXfxu z2|ojGg^jUN9p=9Scl8&%%L;jhUEEGIQ)KW{2jEF5f=xQ5lY2zVeZw#xSJ+3i9B^iz z;a=`J;SC^0v4wtF(faI-bu~!Wg`(=D4_w$qP4s>?x;H~?NrQFv4bvKQ!u-t`MBVP< zutHCL)2cXJxk31btldVa-i96F7>Am@rACTQ@+}TMnHp0*Cz@rn=Vj1aSfDE&-OmT zkf^(6m(zodnrZ8VUrmU7Hy{$~vmPLIE3=Aeyj4Ivz7? zP3-io@ci)c-y>_;LijB^JraIZK?r0tIu(L3F|^#3U0fp1IBZllW%W00-N!}UvyU{xD-QDMQFck1x{DYJkCyx9Pe@Cr+ z-6^X&(nIxRVp6ZBh31!_|m4y8v61}eis$RUVuZYC?7S`ys34BTDkGTvdGpChRdox zMBR|zgcyOhMbHi^74~1N3cGp#DuT+y8IxBsOR6(yT{_D8U^8 z>H~w)oCL;00=gH!yAqW{ZExcB&%~M9KaDxEgk_6r3w{4H_9sjtnDb6w#8KkU-B_I+ z6rzArSxrmbThH^#`7)+*n-5Vdw5zM7q6+wB)_h42Z4V~5h?6x2t3!9o#BRYBspEYe z*B))1D&E;jcC#Ibu2ps3s&!VpvGH`*r`z?mueLis3Z_-W4#!UXvDwZs#_yp_dA$~% z9e?(&{GQKu4YEs}u!+}e(kep*`O!(DL|6z$w|Hj!C@+iHOXNJ!7&*iWd{{bhj;t|r zWv)%qLXAWoLTNL)+4ihlY1*RKddzPz!hvo0k~;!$ulUX{)j0e|r(M+1=2ajWrF>QI zh$Vn0-klX=SlB|LY;mgFmDRnCDapF926T~*F(Q=4$O7bvdlShS!q>&3QYzP4K5q8v zD0resO- zuTM~EpOpKpLh$?p#BA|GKJ!Uw(}E1gjP7?(dWMVMg2L4cY)N@OO%E}?Av?ELx)<_ z1fqikgYt0%wf}GTW`FY&4tUW0%r(Y0DI}MP*-H@&k1~hV!*@xLe;tRcdr{<1KDPco z1qF+|`-`_xlwe)8VQ;DdIjB;o{0OeqpJg){_y9uz6-vtD!e^agm*?lpJDgBj=PC+4 z@9jNtWlV>`?{)I*;a7vT|C$y%s$7Z1$e<3E;%sE21iA=s@9b6a)0oMjO}bw)jBNVeB%ECKf;gwZdxtAiL~XxQf~~J*NKppM2HdAZO6^}{PSzg#PHTO_JYy4y8=SCdL>!DC$9gv=Xwo~YHuSS;p(Xm7R^ zIsByd@|HGjLhOdJicR8Qy*9j(_;-uv#tgqPYteKD@{ycLKqswR% z!*k2=w7^LWcO4gt>1$khR(sd`i`noK?4T(I)={^xpOK~l6#vJja2PTd!&ja2szZ_- zRLsvJl0|Bt)~xPl^Vl6Rv3`=CN_M$=OLaU@FsQLMcNdir6El4)@7TGp#g`U_2J^{q zNh|_hwW0N{2Cb>A=C?|@Jxe@9h`SXHFIEmDG8y14BOu$VLy@u6M}g6;5KbmD)Ru(E z<>Ek8RmAC~P0b<$*|3J$5@P)123jCKyi%KTmrtiOLc=4?GJoq)C9ldNwFJq76kK@y zN$gTw_({XQulp{P9~Zd82XhFmYcxg{FHH>=nYf-lf3X@~MgL3S^@oIPGO^d3@jZL^ zKZ04n;PvD~T*CnKMAlIVFEQu$B6hL1QJP?F+UNNRFb_EnB~o-LeQ(>1rz*@Z#O6eo z&nd^x?A$#$62ZyEU%C14Tbw41en}>5RZ#w`z=OvM$V;Ly&gN^d@%7@Z)QO!w$7DR@ zbzpcBj`ZEzft4#jTb%tQa3BYJEzA3@n8x^%&!|lzANy&3ROy0uA1bJ5e)UKaL(pAj zXSL^i6ZnMJ>&XjqAAaj0?+`jO0))K=*C2dZ?(9ZgULGY?d+F}be}H0WD&>94#H*p(|CF#b>dvA z^%L6RZ<5IM``_9oRI80>f|?tVeP@}66n*J4?=9+lxOQ}Ek4t{f7oteye|R!{pb&hQ zNb4I67QmJ^C2B?csX699DTVGohAc6v0+f#Kd5Yc|m*+QcpN{1p$eaK8!Nb(kIWWYB zU-4Y;#k7z$GJUc?&X*@V)%hJZ+HiWcdT zpDXLcG3HUPTR(P(h>XM8wSUWRBgzI2mT_Zn1B+w5>HRQuqWapy zcvAam;s+o&a3%(fH$?K(i_-b=Ql?(tWw;&ZlK;ejDabX|!J8rkPrwfHdp&fI1Itrv zNhsRyAVyT9qZR`@S{6w#AO}22W4{|CHcN6O(yuj(I`8@%d|5!p+fMrzFB`QQ;!(wi zS;@3r;63z>|5{tFrsoOqE{3vM$v8ys)TlP>n>N2H(P&5o!8r^!CwNir$G@q0jvJ#KeV)XYX21 zr!T3pl@|{b>lS;=@}hLrNzv;ma7~fKIt^Q7hPbk@g?#I2N0>V_r@-wzGm%Tx=$I75 zkv_Wr5y5MP*9&CuwgSd~L%jKy0L`R?9Qa}0=R0X<0-CLloBhZ#a>ibYXs`FKyyO^Y zpG|N(m#BUF>SrC_vq9%Xn-61_c4m6mgSF_O7sbBLMlmeUiTb!pj9$rXF2%Lh9|jHb z|5h`93l410OP;$0*b}caS1|6zb&=~51Uux&BK6QE8{o*24_RkwFs|FkeAqyn@OCRu zNs#n|4#0@En3zLi_`qEKi3QwH(R?(T))JRbqDc7VOgeZ^-O8)!)L9~D_ENt(3I-Vs zbQKX3y%g#!%XJ45TUb0dC-!}>aN3-%@_(;EE9G}`<^>1d)zoCLewN|0~>20hRg zhaYC91I>ZfU#s3H)TVXfL0MKEV#K+<#@yYx*mQjE*}MJ3=F(XK>?Pn(tCn!cfu+Ww zU_HKt9&yI`zT1n&j-s|>iXC6A_Y*( z^U1!9#95+fu<#L8j^B^XJ23dyZmFj5z5nvLTFE&wME8m(svq@)`nBh$c<@tgVq%>4 zv6_e9=J$l4VJUKWByqVNg}PPBi0Lbx50{yB>NBhqz6efb{x=RS$IC$!a;7o5ffe|Q zCDW=aw=dW%TE5<3Y>JKOC-N%;7+V9AhmR9zl zn4dYf=(pF0mwY3J&?3BxdorC{@Nk49mx| zA<9v3FSt9qYK(}zB)KHXs%MstrCUgm0*RDW`%)9^0^^SDxcOsOhA3Q4__Kdkf<&b6 z%eBQO8;|YC4_g(3MyhdrcUCN>?pqjW>NQM1>il@Xxozcz*s4uh9V6go8+V@2#V2S(`t5vOJco;FjGV4>R-{#h^|DV&b#y z;}uL7F6fdge<_n#-94FBQi^%46 zd+bkyQM5%SS$y3ap8puc9dvD%^ET8b)bk3Fb0C?b9`Go8W^VLg%xj9DM$!8>s@)ZC ze*BEJ;oEoZ_+XyN#w!_W1wYOQ3Bcsez8ffQSPI=09-V61f`m$Zk1QHIe12O99}*YX z6o5U?Tul=BXSxj%Dl6>8l5c7-mKJ$RoM__qZVw^Wbm1UW4vM7jQqDcQ_`7#+CNqV; ziSp@=;wDFy7dRH%@sgO&ZB*;#@ROrV!?&wKU`{`qSJ0zP^!N?wC~@)a-65#cR9te~ z3J%v)X^{rcqlY)JO13apc?DnMEq2Y{sItTh>!C5w@oQGR{A)kjMhaLS=k>W>G2UV4 z{3pE>Roo;GLqC)8_%(FcDlp-0Jrw#wrw7l?~^{Z<9MM*qZVIq zQBo%Dumr;W%d&uH@tLhK?)i`JiR3n<>2$mkqBq?S;CCH2`IxkaDp0*6T&7+=R@$`1 z@z7@!oJ?;PP9>Ah--z%_3^hX9qT$NJIlNo!`ifiV4D7fM=K)0peD2X^iU9lmYT0V` z-GbVk!D5>&qhbzCWgaaRAIjB1%V~A_swrfLR6lyXW7*r9UY#Dt=WZzTGeN?Q zXHP+TG&<{`v)8iBe0fW^xRKzZx=J-qzYf3Qt+S_ZQ6tVEX*O91dEyv0oBVbe>@!$p z$&P&BBHvA~x60^?8<3MQBnKgsoBVs|?`HgajDjMkr0{-IUv8I-6M^)D2W<6#TY&oM zTi=zWVSJ8&#(2r<`$=c;y9o<}(BE`}&bTd4(1J)!>?Q3@LrYOThdaRefW1eW9NQh``u1*aqE|~9<=dGI;qvPZ zEa3Pj#{I5ZtmyKZh@5f!@m{%cw-&P zFdA{I3GZXK_3U8;0F#-%l0ZO4-m7|6Bb^ZQ(Qs^Ui=dcrAgRWN0C3td9uWfVJmAk6 z-&NsXEHZ&b(`48gg11GKljcE6gh03TA51D?X#PJMBxW zvd`~v<4i)GtX99#;LS8oibmJ8GWL?m>;d{lA?0-lBRBAcG{wZa-raMaY_b{z2J{}D zZl^M}qaM)mv({X_i#_s0!JQKdWgSlkbVDM)R!%mT4EVO{y8QGgPU5PdNKk&UYYlVaq5t%CyOyM zfX}SEr9=vp-_Cy^pRTscTNTG#S2{Z14^OxJZ)X@80}#4r-am&ILQ;~wbQwrpx&-Aw?x`PBI>}l zIVKj|Dw}1rZF!3!20`GUl_c_tODQAU2lL61RdNiU0&!q1Y6$Xa(6H}#Xm->ef#V4m zLr@0q3K?-yZL66HDr4OjVQtsBCew$jwQe_0lpXMZl@W3q$y%oM0jU@$ITtg)@e)(sI ze#JpP8?=(o2JVn1*f~Sb(VB3M6mhVD_aJncI6^LfeFg-a?B-;4;)WLUXKCHFf96z! zOU`Z%c!H0UVO>~fcs>D37C|;HOv zQxyGpluB_?2mfx%Cr6sBuL@zkFRH}#Y9LxU4_kuCltMO!bP4*ssx;w*N?f61Fy;j{ zB35qJ6xkOT?AT0|K4sLA%-q9>0!a>W6?|=+7Ud1Tq%uI%MuL?8Buwyj%lwX;AMBS+ z-=4&Y3RuWS*=YwI^_*+tib7c)jy6!!)riwT3=`L|3bEP?;_m9>_q~~J*b6TZ3cj6d zwSB@ziE{)b5a7|BTH)`-t8+}!zwyGx1c*Umi!9|*{aPCMelICaj}UnlzwK>TKXFN) zRZs6NWjw+O-Ch4%zPlzXMsc)szo}X8KdT!>qYk{G3cPK*k^5Pri*r`b)l~P!VJ1@E zh2>NrC*;2!QKzm7cYKILPH3gB(#pb-Gx8x6*s+L%=Xw9Y-Rhjoe_?f6$TH60Myxo) z)Vk+~*RA=b?0OcjO;MuQ;!_E+R?D5B-1AAG%DROA6|ESHqjj{S_m)*M@rnnm8ndc)4_V7%|)%p)p(yU)P>@2sDfHfoV$eq zDwSIX9;^Y2QZ0s-S=LK4#*LqvAluDZVxXy@OI_Om1l%XgdTL!MvpK)OQUQUw3g!tf zJMHpZlODu?#hP|)Z z?B8N`n7#|GoMqA@y_6t$J7Q{IidI)h$dOK4qk9+_TSYV3zCwZhYKZLi3uwf-@(XW?_*jUkSNK5M+=6}x394kA964hj$dYUmcoB22`Sy*RK*r>k!q4OG*xJXn zLxO!?zj4Z*obN<(jC3(>WQg_N;;>n--?*tz41XoXq{o5tk|$P4e_f$TmdES8%>cEoh5iKlFp)aN{)P6K{&f%-vWXFl9?#&>!kb zrLQ)S-trb|d1x_Ld(+?ED7(sdY)0R-f;~}{*gCvMSBYGa%wFEzj9APuzTeXR#U_7T zqUK*gY_%`j0AhHTbH))SjFmJQUJ>CD=(B)aJ4}1l0$X4TC!wlk&ux$;uQmX#$=wws zKLO`PuRDwdS28K0AK4A8grmopxR-}_+qNwTJ?*l$FZmfCOj``4Hzki5X(wD> z>+W~(*lrSQD>X2Tu49{350e!YHs=JaZ9AdD=ZUCQ3ujhnRsho1 zl-5ZJ-{yC2XgdB_a^TSX$5zO!sf;Ck>z#)R9$$Ssqp$S(i4k1h+*QbCL~fKY*XpVU zjq~BxrUE9+X$8FhYrEaca6)!P;8V`VOS?G)V*Gba@_|CFl}eLDD~4dVk%}V>a|Og9#WCNDepA zZ@Fy!K_}O2i=}}UQ40(c#h&Y!zI%WnFQEIt9ec3gSW-hyk`BSFrCBA5$D<7g9pZ*>*TD z^;tJ;?fP_Jeq|Z5DdIwuN-%a~H!+C>t12d=^nYMeom8$lN&2Tz|%9U;99CaCTBAcYB5(FW*Yzx12pY zg%|DnuxUgK$j!?o5^eU!y*-y*%Qg&qzN~Y$6Nk%H4E>de)$>)I6n|af93=d1E!5*Y za3U+*hMoQz%*fdf_x}gdw zUAElxJ3$=5#iM)b(wvj#OY5giohlDQ1Y-nL^ZHP{ip<*6{qZtvQGpzMz0e61G#DU% zyovQHBH4!P-v$g*c*=Qh>2LQfLByG{o~_KY!IR>wqJ8UJ#)q=Ji#H#>ZIQ^Ob&kq|fS`u7}|p zb{A-p&-}&SHT3*i(v>-}yfCR_Xj1QT+`f<9y&&3i6Ju6NY~!BS#sq zTG+u1D+LQhg7s0RYw5)B2`DX$F35+daM$$DXRRf^I4$;_WSJ0jK0Dkt1AU!$oJ^ZN zQ~>2;oHL$zW4~$T#(xA-66X7`TR|=gjFb?HeZ#(QO^q<}8t-7%@)aY&`qE2oLD$3@ zadu;&3}J%W&CK^_vYUhnhTH8~Rq>1^1Bh&s_k>{p|J)5VK;%ec7%|ay8FS!BpJJ6$ ze(yd`2;_*2caN<7)*Uz5E5|lLmA~FPW`V7pzy z)#e=fZWx^>D@>snOvyWy-D8S-iq78SE8ak3WK&mUb2AFABb*P&&e`nQbY?Uo?HAgM z%P{EK_cI+=V%2va0Z#{z9UGE!GSxIX-aAzn;+FI2<%2`_4m-$Lg!TD#4GiduY8C*9T5&ehqm$|$cT zHWDg*;?50R-EsVR{#<;{s?ic_qSL>Yl=ok1I9vt^rM!p zeDmz-*NzYtqHrF;1!r55ZEum%BtTl6XWvmQaVg_Q?U!5a?C$Z1g!HCVE;3c$iioPW z`tMA59C8XOlSW=+?a-iV4?#yZa3#w{FlG}%LC@%`h=DNCIrBF<`HDN{_hLch+bA*n z8t9fr>sv_}K)j@ku)e!=n$M2>YrJE`pHy7=@QQuFQ1>xlpdLq?p^^k*A_2|&1C!EN zF8XCpWI*$yR#N+SqJb<*P83m!09?#+m@){gV6#>vt7cmw)Ry24i1{7bEVd#C>TAZR zU)+qF%v}xC4#!h&Q3Cc4mVkPUV4m{pUO2fd@ zh&V{Oq0oa^?E7@6TdVoHrJ(sD9y;;yuv`O!-(YaXyW%qQ44|aaem^QJU_(F?f+3w zj5Qy@QwTPvbAj4qDil;iujoOis&`C&C$bL`A73>HzoUS9P5r9DcbqTHIG;V?vxMOU z0^=wzc1p*&@N0%t^QA1e*iH@WdoW3DjU#O%$38XD$0-Chm$ zC4a+8m2#R70P`Sb+J%+t<0bnNGdHC0UcVfv+A(Bd%o zL!MmjiK;AQ6rZ3a<|;q9mHAm?#*sElyQA1ygP#d_`kErZs`+EudZHP`BE*cg?kaE& zZnwDrIq&Ny7~0`$0v4iha-2i=RP~6xpxngBrb$rD;nl>8E1avR47Texzx)PP?iPFA z)>l9P`?!6G;qWGgg$Lv^c932oI73KxggK6L-whxGS6QR|UT>GerZP|^iul?fkGXN& zW#>E;21%^!e_sdW;5w#8L4u5|TxmAQ5h~B3QzWm4G(J6sETj3Km^Js|r4tyfaa5XY zXF_ER64Xg^ig1MW=uJ+SUKU|Xyg34ljH&-5k^yOkMKThs;1^xGMVX>DeUpJgh($#% zYz|w%p|l4_x$E-kgig<=3r)No(MnKjsnI5tnrn8ogQ!LRdeeb*`V?k)%RaVdm3kbC zEbu>bzO!bw@T$;d4JcIMIt;xWKI*T?CC$z7{93;srAlM`e$=^R#k|GN`JMHd@r(>5 z$Nx%E=G4)}RdCG1kC}7C!JBZ=C{Vs1Dv9!(BwTU}-K=pO`zcB%LsZ^%pM_8qYR%=% zMDHYl^re){DKA6q+0oy6pv$FIyoMw=jt8pDr=TQCuw>M&>t(=y@;Tr0w38XS}tI^H~$B~74iz9-^4K=kbdB9O5 zfD(S)eSf*m`fJYN^(xb6 z;Cz2q*sN!U<7{Al6&x5N#-x;Sy$G^jH_p{_TfouBJpW4f4i>@$&u>t%96GQHYN!_k zrX2z;{)j@6xdG!(VH6)@yU-4o@TO|cIF=>6oQfS)$5wRdE^L!-L*!6fuW=lX;7fUL zcqs8ymPfMc3%_v50b%9rzI_ssOTG7!gYsc5+kWv1#iA>Lh$2xS1@>d;BNI>lr7CvO z;nJ)UBrs`xlt0{t$0!YW7gF>;}6&><>6A%ktU0W10Stp)>iW=^= zxNa|z&%Rf%l*kdMuNNv2o)u<9AsLLp#R3GCLGomC z2CdqbQ`pNoBU9W6@uzIc|Ik^VfG7XZ8u`axC@foBibSa)BvjVdubJ8Lv>MtmI=?0C z&Rk2DmHs3?n{#iipQcAmtgf6+97Jjw!)8UxuQV-XEJ4n@Lxy_xdz&IbbvJaNFG(M89wI9_dBWa^m^A12Joh z3)$5z$`Oj&w#?ncpOTy}P>IA_`6VSDWWRF*T%MA<7eJJ#0q~0&ky(}I6Mjt6o&cix;9G!)D`7Bx=~~(j?9O+=>v4)z32m#V$JK zcM7ho6@;qWpIm7S3j;Lp^qXSmG&;frnr25VdfP`~wAxv&E3fUGSot zeVcXWC;|qyul~(o858eDoQJGXeD%Yx?9Me`_8)g$oOyQ%o3`G|iRHlAjxJw6(DjA* zGcNwva7Kv4X8{la$<`~%C~|ZekU8AfGpTy1-Z8f4KM%7MCIr_24X9V;Sb&>nLM`a3 z+yIz)C$BB>qXl7i^hWb4KD&aJH53YHi+H!L!`IM9@g)ahg@cP1cOeQ&sMJ7T>MB>i zoB&$V+;7}S7oWHb>mi=9AaLu3Vw9TRef?AJOFL;u-8ouLNDPjK9_aybi3>VQ1}58K zL1BDLx?B$w8OJlyieTa8ut#O6Xy~UveR707q5|+RTCU<6g-(LD?popzCk0F+dB~;R z6n`c78n^TjypfIita3U;-a?6BU1Ksi$iCl2{Q2xwMR?dM(N$2ChA?S9Isu?_Q|iYUy!k|I}^r`~Lj2COP)jyCuaq`VH%>x;^^k(#}_-9QAkQ zJ|X#XUYBp5FT~_39q|an+Tz*o_*`{w?oh)@V(UJ5QFNkAa7NDWMf9Rycv|&ey(wcE zwCi-+-})C-=ckT=qf?XEWpB z51m8`b*MN=Q^t1tJ4`qFo6!oI{Q@+Y9B%cai~W5 zhWNHV=&l z9PSHBQLp`M4?p<(35=Sa0b^dgE!8k*N1rR+Hs&Q@Yq?JC0l67ZVE?c%O({f5wQzR@~PoE+wXGS{q~3IjS**3B!c z*%1D2XTq-e&0$3wOTT_Pbj(rt5Ur&->q({_i=iB}MK#P0q}nzy%STrX8+oJF_=+Yv z94X{Znr1|JJF{}&7sG&uZ>_P3Z2!amJPVSN|3P8e<<36n*{oggXm6Gv6%(=X=& zUoyA?!FL zQ3tT?ZA#HT@BHKjGF#+U=cFZy|K{~SLg%MUQ4F}M0lc5_O2{2HUwY9=A1&_ac0Xlh z_yjv>pKK!!!2@PBAUgm%|5B?q7E9QsjcYAbHmEL-%zxwz=brGnBB=&vV38>ya9u5Y z%W%H61z1d=2dA=Iyjb3*(tw!$-27>JBIOTer>ooWW`l%J_7d;=>e7TL=_@H1jcrPy zi@o`f=buk6j)$8&2rx~s*Ojtc#ZJ1=7)i>!Sa>da3b`@<5`e)IWLIOLY9HVOc^i^+tQl!+`VFM@#rA<4+#E&nYxLz>q)xFNti*r`eQhcBgxi6JZ7uyC~ zVR~f~?7Q7;;Rk$`O>=s0b5FrU1B$@5|wHu@q;$6F^moiq2vO%X9laE!WEO#lWXQS7~vOP8VIMe#7>_@LL@M_vj2TRKH zbn!3SMnOVy>5LSs!rJl*nEie)Lfo#4@iwI}sO^;IHsYmPE}9^JLoRX|?}}2I;VU^2 z%2+8oUWB~x(aCKXDWPw$j46lWj!u!m0eY;De>#v}T+?8|ijFf~QvQ zf`QN;?LCG%act8+)+LU$kNhzCw~hNOa$&P3`%zmMFUkJ_&TBbp(@4*=I~GDGOXxYb zGK+{^yZ*wBP_V#L0K9_`<`laHU|7f07(DipZic=Fu;)RHj=r)?>P6!o21E70Quf^y_)c z)ps9!(Rk+g4yJ_Q2yCcEfCUwq^ZpVf?0>U!yS}qqCv^6LO<|RD=xw=FxYUviUAhmI zHn1*xGm>N|@6{Wk#&>!z^WPE75N){QC|F)X+uXhGSk^a`557QLkn)Y*>3<~9ZDqFx z9E!{(u;1m$D&xklQuRlcr7rVS7SivCY0Uqij4k1QdjE;fNUVOuf97EVbi@U2MJB@| zEBdBx1TLn{T=y(&4i}<8wt}n`zER8ZVzeNZe!~w{>=1qN3QCJOXew-zNw2$~DNQT$ zEtY*bc<*PX2l0!O=jW}RrFatf5p;?-eH^q-f%#7)R$URqB>Hs>0!#n{qY)ejMvVNM z!Fx1_rKhx)wLg8F=4dl(nILW;OM%FvUK=a*tVp#WoGDVuo-JYiw?p}VcSr^?_H&xy z{p-}!0svKvd;#=Ib` z!dKnyogqf0!(^6;mm-;(NTh^KF^9+{4H9b^xmeb3a-^AekNx@Bx#Egn{BCUUX2H+S z0WVZv?R}r^Xd~8oc+%`VvX(KQ=7|sx&)?16Wo?b;@k99# zqYrQ=!$9S|LWr?wzwn0#W9`Jp~#RRI?` zKvZQmqwc8VqTHZ-2v0-hqYaqp2pKnD188D8bJ6Nb?2$3=-qVP;h4-up|qD7)*D3X0GG$*G>q!g8kku4;$ zL`nUw&(!(;zR&A<{)Fe{yv{kVaXN1Iecjjfe!s8lzAn`6?7r6-iajJ>Ym)3^D#)C0 zlz3BdeCTfKETpSqh@hiq)bmJh2~wP)rsbAQVpLU(uFC(|JQ4F;D8#;O<3a;?>y#Wa zUY|3OJ)j?pzId_Ykdw>Z(0m;g{O*$5UytvKkDVKQ^w}~0x~1b(9nEea++lA?IYH1DlaBx5pMwjhDc>|dOS`-I{Rtq4=A@}%HT ze_ScuW5t$UHwzT7dD)x(_&(RF2x)N@J+Dnaa+S4jznxgFo|;y7>l-C@5?@3YP%Tvm zQ_@ebnSONse(c7epqHU!8ofL%QP*+EbmvDHWE%f^@u$LlFFBl)5(J)m-kENiPv35P z$a&W1o#YcPX_a|56{Cv;DN=ALHo#5_cIxh^F@32O<#}p4j&mMMH%t^;l_Ukf`5Tw- ze%X=T_e~es#zL;2c+%*Hlvjyi5URIQcftGfvFE=lNlZ@kYh1qwQ=oVDBY!S^MPhXq z)7)1yfOM|lwaLHGAf56h=?tTbNRy*Ydmi5CG#*4w16_zsgfq%*$vty)&7UE`@nuf; z`R+T1qtCqu)BwxMsr$VR#%&Yd86=01d-cKYDbifWGLi4$IZ{!&j(={1g#$ASJp5At zRf{svP@U+;wbP#D_T>67>B>h=CL7)Sz*u(-|01l5iY-D_*u51uH62@BY!q1JhYN)h z`O@v+At=6TNm6KSFDMAUDLTSV|MyYn0ID?)-FoNZVv&EhFYWsDmI8^9orV=MRC0k9_NoK|@1A_R!AMC_Pn^S#awgriFWQPw-z_Gad`&0$*MedLp> z`m=BM;OA)+Qwv*}RXupH@ARK}X^#@7)(ptK&VZ?S-)FuuHwPrn4W$oxt^D|;-=Muj z`RVPn4QiQzQNPedF`f-tH@Vv zvFzWSdd1wrG?;1Hb`rUlbL_v2p&q!eF-~^-UGe5jwZj2aE)5PHKBK)H?@!P}Se7q= zmqV^+khT~yDeRliHM#W?zhnmjWi`QxJiv5E<7Tn?z!DNydqHjUS2Zl58qc11j~>hWgSrBQbf675SWVzI`wOxLe18tSSJg zB1^5yO+N`(t$1z=EW2eA#fsEHVONru2~!$uaWH&FzAU(^Qj)VF(O;TQ6XGmvrMCxdYfkY)9DRw1UiBfySfJ*&6_+8#ezEjp>3?h1(M_ITuuseobdwhlzf@hJRoAnxd+ne>0i#v^)<(*D5f_x2f`Xv7bB0#0kt$0A-r^t_DDJZ+| z+r;)aU!k!VUeSzocvZoWeTIA!&e3FMlx!yaP{+Yu$ z_x<`xlV7T^z5r=a)A93lcjW^)sKn%Uny3`>9eccHM>Xe*eIF~R=y1+w&<7VTSLI*W zTwijSzNB#a*pxe5M9xIc(q%0h7G$P-9MvNIfMx~A;KBvwCBVB`{81)UN{v2br4AAfl2m5#~2+qxvLA#-01F z?tHSNHo&~&J{6zq{(y@g0YY8n4HT&2czh>CHvE%r1hjK##c2srcZiuvun-~#7rgtd z{3!P+#yiP!^Jm?Kik+j+8iKwyFWBxk2|_XM zeE~DizI`8c=;l8L4#$J0ayGlqL;@E+J2&z^vJhXl1GuH^r;&5grY9H(gGPSt1AU0g zVcxssprBb`l%dA@u+&uC^F*$|Z=hrui??h*;&9CNe%Xjbo>p#kEWQ>tOYZYX8wx{M zzS?{s4NLS`v-~9pb!;`Smog%Fy67y_dXjw=W{wU{qvf&#J>x8POvz2Oj(PHF0a$6| zE^xyyY*{)*j$idYm}h&gg~pJ|e&tEiVJ(38lzm=;UZ>4gxINgL_XrLMHL_R|@gDUS zNHQm(gv`{zmyP7NE({=S&rJeFQyErusHl4mDj0Eng@uJrzY6A-Yi5q$s<)B3Wcya4 zIb_^C)A904V;30ehfVqOJ-e zZN_}k^&G8H^i4YbF*ky}04!)*PeP%_xa-n=epubG#so688>m|oX1Yk z-%9-TwM*NSiYu=S)WMO8feS!ZU3^3A6IhrtRQ;)x>ZK#Y55n0?>(@0m0UJ3Dk2){s z+O36D{oA7)VL(#QTljKzbMF>)FTROt*(RZYx0<&8N8`>#=gP2ff9=7o<%fyKsduuc zJsR@^DL! zpsYrRzO=6LaG!v;U097iwFLo- zx}f8z5Iu?+KfE)x*iwcX4g&jhz`P@h*t?6CWISCpq5Zk;g)!o}8t ziKBVw4K`6FBqs0_ka&3Bq|w(m?sN#!-a~l16DhlFL_A4L?m(m=@F$K$q0)No|02W! zC;js(9=x3+o2hVVbxE>Z_R`h|sJ7(X1~*!+nb}g4_NNg8_6obQjUB2N`M(rNe#5=b6D7L%X^7dqB+{u?KMh?FWNtKn9wn zRIkGl8#;^7n%s$E&O3bQe5LhdTFrH&!TJZTL?L{r(+l%sLZ)cEK#^VFf4e9V#}tM& z0`%-@8^#a`87mY3497yPs`&7Gp2CRpTs*8Y4^waY=zg+BUm@`&wRUICKw-z1CxSnc zd(>;O@`Y#1@mbZvsw`qf`Ta%|d&`}d|JXFSgckToVgKRVUxgS?2yY{T4|3fSl5Cb! z9fuHYy_4B>boQK`2hv(nU!0ykRIta; zGq;^JCGVh{_YU${Eo0`#*7rb3=Y*wBb#A1dr1dpw+cwvpjOvPOxbA=ViJO9YRl0n~ zqfR6HW0~|MslS){2#6iCUN$Co_XAmeOhu;IfxzG(m+kc5@?Zy(;JrYgtOId3&niYG zz){d@|GLA_4(49^WpuBA+0f}6W6zI)zS>>)OdiE8-SDq>d3jpJN(2D85Oax&79$ws zb{v|MnH5+9Q~5AIzw?cF1kf77DXxPe9>y> zla}K#!wlz62kyIs}X<5vgUg`S^3Sxa3e#D{?L2n)-}t>8lZ^wF0;;gZrOhIIdnv}UsS-3_QK zz>lIOs3#FW=)C-1=p45AZW_J2^ZNRY2dN^=aN0CCHD+b#2^IpaN>$_J)G{gYh~UnmlE9Tw=#`3 zITBrE>G+JRL22s6eW#UL_QPvJ($=RaP< zRw+6s)GFdbRegDK^p#V;U3n%rAl$@9*|DeD_?MvS-3OhHK?PYg6<6hpyqhzj`G42F z%F~q0PrA@d2(h$mKYmkbJRkaX%(KxvfHfRO82~7VwBYFI`9z4BkfA;HpIkfj+8VPB zTYN_#R;K4}T5ifR;YU5kpVZf@&wKendD2FpL2WR;iXXBc@CLaKl8z0)vTT^W*||Js zb;G%f+aAfzDMJZ@47T98MfdKVedcEqv78lKyka7{zfy;e%BSb(eQ=a;jt^7Q!v;$mA3Sbr7-_Z z228zfU;{OFRz3fmkiQI-T zCdClCiEP03v>t{32m`oJZz1I!|4#w|?!7{RBQFa;9{i8`5E)UlnuEyWm_qEl zA)+w%`RVb#KbhNr&B?iFS9`-_bPs!5YTd2Z-vk%ca_gv~EB3KcL7YJKluZ33s*r zYTi?8%Ahbes2Ub+#2_=?1peapbYnmnF06Zd1bWImzQ4gUi{5cSR2p%2pBzbjk%v6^ zC7UfAtt>d`AdPTZfGGL#-J6r=LXX5tcHQZ14Vw>;)Gk6H(*<5(> z2zy03bt@IfU!~;!2#tkrRP3k&KZe%m@7_g5GSx z_8w+S0wk1By_TG7b%lzA35#@wD+1x6yJJU?ILKZVw@?l$s@kc6J((2iZOnMORqF}a z$s2UYNhBmN!;^Jyl<$^C-}#XOUGz!BFwCzqlCL&N;Z91N0PRk)0j*}_A5?gy_@~wy z#&M;d2vA3UWQ``(cTVZeAH(iup*v?%x*GJb>Z@lrJuy*(AbFMxB#~^pN?aIWz}yrZ z=*@a3=t*C*An!9sWB>*zm(Mo>$91saBk5Xt-L1w7`H!Btb(*~I_wo7gZ@W>RU``cU z&a9H|Im5F2-#fT{UcUW@0{MkBa!1$Ef;}g%_>HMUQY=KkXz2MqsI?s4U)5=%Vbx;A z&_oES!{3y_+j#|~-PWv)Nl$*bZ{7I~WU!o8sBGb6B5!_4$7%s7edL6)!)xlw^ye zH#dq%+x2bZ1O0#-G%pz>u@dkSh*7>m2dWo>nMJZ}&bR&yAiaZov*`Zg&&$`o;>t5< z!L-0M+RdPs1!r>_nR~5KJ{kJD4>hsnpIc8pBIL+wuv<$7f#G!;A!^*>t=U;02MA>6 z(x|~v)W($!9eW5_ZpVd+($5!&peMze;K2-HTg6c2R;M7)eaoyMIXC6caTniEXp+X? zKZXR@QA-&AjSjQz8+GLfb0Y2*@jOLWLDuxDblZ-(KnW0r65uSFD^tf<;G{a;EA$g{eaS+|<(7-V8|^oA?tNl@pWe zRz5GyvPP4)QZ-1mJcu6sSCu1y<-@!;Z|>Yff=;;Nkua71S_OO(7XXC(o+8ZI?3$U4 zpwaxkijvQK72Al~Ou$9-%zd;{kAutgxRcj+V^>=z?fm^! z=A)Q-EyEcE^p@7!SDbY#F!l83F3kiTc&PfUoNZ{W93JzG250-$KE*#u@*AfM-+c#W>K?W)*-+6+0z^_d;X zSopa1rX0uyI$_~C@nx@V# zORno<+Mk%B%;4KzEqME1C6-8zwNyt^zsI#~rG)}&3LxeuWS3Xa(Ur_F84M&Oni30Q9 zOnZ9B8niQ*J+2Ci4csLHLmug4JKI4NRA`c=>^M7U(R2gWLG%VbtrM|7S@&k(;RVp3 z?;T3Jb;FAV)OtSCPAU{kDvCl`T%=Vz^OI9a8x{C!&DC59Ct!8`^C{)wE%d#T4&7JhLfr-_p~!mg7vbJbGDxvveH z+&lnD&RMlL5VBUOW_+ut`FPm4h=aGkHvB?wa3B&ON4n0@^^#`5oyZM?^+@SugtyQI z4z#nL7%Irq%V?(1lag@)rHRiuE70OkwUN=u^V9&s+Z7b--@b6r5=-^AvkTgj^dk># z2Z1!~_F)-NriU6M6?z??%;+vm?Ht{6`?62dwT!3FkG)3pru-M7Zueu49;asE`dt-o zwuWEoXRF}an*!BcTA!Flp<0I~dNkmSuFz7%7VQW_>iukz)L(k_%^vbXdwSU}WV;}~ ztC9MLVnxlOn1;TJq=ry-Xvxy|Ycb>+2ooefEsk^o(NVDH@-OU7o6UWNl8=qUbdV&s z3FPvak*~Jtd}d+Atiat!y%F{N&~fZ;+VrQ^^9~l#2GCc!D--Yg2+|wCMU?{>zCtSk zOW9cwqR3-^#LQ>I#R!xcwS>LwT0oJ-VD19)@x&knG5$F&h9UeXn;n>&;6vG4r|AA; z&kKUiCMGiuW(TGpnS1j@RbA|(iUhMA_UDaB_rA)OtGBMLA3(TX{-e#OI-Q{e$zky9 zkn%GgGi#?Y0>}gL$+h6n&*W|aVEc&SYX1YBD2F%ts}pvW@W4y1DR0|vS{#xHphni{ z95q|YfYekwC%j^L0FNd>@#Trd=j3GDMNI?$R}5NW12W0$GidSfKyFLdo$ql6xx(Sa zbWVbR8J&TGH;9S6v`Ma}KT}3jtX#ow<#TB%TNH94=Qj#~ z64|i)t-Yx2c0r1)KzNJ$2z>Y|R+Z^Y$d5A1Jw+AWkOkf86U;+85P|J_O+k-xDOJJ zt>{+X`K_6hY+1BuF}Zk5Do$0lk!H@BoojF5$LzXiUb9C#WQ%$70CuuNd`0+jYOefd z9-4c`R@2SH-NGb^>CT!~Tzjegki>Pa(gG|1blC4)pLMX@ykNyA)xw}ud|$><43Y5B z_X-+6+m{JGKs7+^!C0(WHi&GcmXc^QmApN??&+rb4-ZnU=}CK@54Q8>aw~YXq02c6 z9s6-LHw3MZy^a6H?p&s@f|{EP84)Y;?%qNBHtM&surRRK91T@=ig;zLr8xRxw12bt z26bikX4?x7H2*PEuHi*yd>%PuOH5&d#Tm-}=17z(jl9Z9cTU;tr14-2GfXApu`(fV z4i8scv>G=?6@5Nc z+k+``)Zm@LcN*d)0-F68cAQ3N{HQwW%}B8|?>^PttO* zReFL^flhoe*#7K#nPUUA@#%xpUgBsIR6F4%ep51eH)nRvb)*qshKC7hJfBDF=0Y*m zL^*F5#)`mNE3-9*p%E2br3>Ri==U?oQE@4(1>@JK6r=iOlRfZcSM%L(_E+76D|S@v!v+YYRIMI5=U8DM7D++LN8 zf&ccfPYbPMU#2r`mqUM|Cl{*;|NZz;>1+p*SMknAl=6exRf-f{k)ij4?Od)030lN?TXqK^!J|@kp>T#ql!nN)4 zewAwDcE8j7Td@UfAx=4lZ~te{24KJh?AnfT3yIR7$Ji~;(O>+KAdOqV^Q~N!z-;61&lIOzfUI6=W_>=^Dq5a@i?j!Xpo48bM+b;jpx5>=cqRcpI z>+{zGJ!c4{J`Uqz<{R6T`tN}$G{I}FR~)!zp-Zp=;YrO+@k_(BQDc?an+EMx8O!nl zKAik46)%TA`7(>#Jb?+nR-BL8_w|k)?_W%s=SfjWci#f zwbL4;NmBIV&b91sMh2W;x>sH#i(ZFAp)<)$9&)H4&LJk425k-eWzilc&zN*0nFeJA zo3^YYyJ<#xkJcF>bbqhSEmubGd%rnB$8?z(kUO8d%1&^DfPpwA=)Ck672;I3ZJCj% z@tVf4r7z5|66<80t;vhoWxXEE%>WlZ*07o(l*xOsHmUp>|H>(toxWxscu1(o@l3T^ zdxPmi8HQ4su>t1&+rf6C3f>tl=k~u0YpA_o&;2;=LtK4?Cw(tGcYK+4ixYLe*;jvV zBd>OO8m^nmn5DJm&|BBJo+I1j+AN9Ib=0W*je_6{OVvm{NnK2a8Yt`CP@57th8&xg z8FQJSaaO!6#Gol3DyzPU;^u<27nF0QuWo%Sj+PRML%Ux&lH6M-`JMc3T|WI=Fg=q^ zC2!wY(SflWG(xmXN+^LV`?vmbXseIBTcU!gAl|FN_2bFY{1?Zs$T;?0I;s;OhL?0| zl3v|Nej*S|3LDy8fSIjD~##!{xGTd_zrQk%AU3-Q7lRY)o_QP5e8(+Yh{7zDf)G=>!r$O^}}mvHVC-^qoi z8AVEYuX|wv6S8wJXY&}R!BVG}tbD;uxOAVTvaY~XFZ{D)53lOOe|}$i2O}@)D*d8o zC*0Isf6e0WVUJv&7B~BP?ZZ#I-88oszr&HEb!qIt)EN)d(xdQKvI{#p(to&a@{Si2 zj_C$aKh<<#h$<=IDZG57co&0(X^oHU{OHHn)E8AuefCXp0uDCT;K89w|2VK4-v~0= z$T;{e{mA$l!EJQf@sMrbkvEFir{)HU*O1-YYZEM_Nj=U@HNnB*cBHk`-#q;;+wkta z?ITuZfi2Ll}tI zqN5VL8zTi+$ai9qHQqQf7Ax#|wyyHBUc|w+EY4hM@ThVJvoQ_VgBpTfhuzy8sSBN! zOAXk4UU>GtQ1BYSwI*l1e_H+xyOP3Kw!x74*u#QvA3WxCVV0nAB~$C2h5nfO*D&-B ztn-gOE`%qWK)=a(d`2fnf&zw+ zFRC1wa866xf7QS3vNB!j`Owqmsgs^kBRzyCwN?)wotE?%1FIuBsE@54*pu#t_K`xL zB&>;1Y|FGUgeT9D6Mj5%ob7>4*|rYUaYDdq6n$Y$7CzerkYp$9Sn+pLc+K)-8Dj_X z=P?5yOF1g4?LP1X(&dUcu>cT+hwKE2$wP1Duzv3`rl9W1E7NxMgb)gD@wbC)ZLfxU znqOVJCz79S_%_-xcDK`ol_P;ucJXZAtpgiYB9qR&CyvF^Pw~vZ;VyV_dBj}ByCuUy~If46v#BF7J!#!Go$#c>lZ&t&r6lQyiWwDhS3WhRL}ScMp(8Xq(g zcP4?CHVh@H)9zjPYOh``FYP$^T6@;2d&`*`h#2CA7+BkYu-p5=p{L8=t@{XltNf^4 zh67Rd2UG56J6?yVdOv#c(7AGZ;tfWqz;gG#D^)1T&}`52(?U2Sb|e;V5N@XX@k~ORoTs~Kx)~APrye-Pd(e4G8~(#f`Dqq3 zjn}2Q!&v&l?kxJ5|FJ>#+T>nTRCbUP_cVam^r)#~1c4kCOg2^Na!@9mr9G_%(45jpa2H!pmq`MxP3v|`!<10^} zPayD!F{?#PTeHUvl!a-zrhB$d&34aclE#u4sIpKh8+}keIhP;7?G$V@`U4@4A{e1H z9;F^(Ns1F-6H=yNkDVrehc2utdGC&6DmVgh{h4zp3qS@xpJ8Mn@DK}MQsY%zn#GAN zmpBgSaa}f%Ldi$${mi_NzA*Q^;cxzqzBps?yekGaSuVfb%qyrIc}jMlfSuM zM$u08GzR!eJ@e8zqLrFY_jQPyF~fJxrW-2vUr?j_v;2-& ze-`<&Y_bW-+6hzXpivph_-)%~FMopky%0im-n((e{p_HpIJhx-n#d|k+>%X{C|$)n zKT2GfKg6FXEo~cLaS|&&m~iQ&=&kJWr@65kHl>yAV?Nw;vag5onCiCzCU zJw>L#qjMpKa1*Z54e6%M4}+i@e@up7)kD0d+^6=^@hznOPd*PzY*QAg;{YJ*H~w!B z??T>O#Z{`mE;D}KLF0i1&CvCMZCPR}7;0Al=lm`Pkv_Nx)|@d+VfcVe=gi0>+gZj7 z%nR=TPg8b@QSUD%b=$-7SJOnze1-c@m?Q802XsYyqE+Pms0UE+0>Aa;mI?AwnlElJt}K|Mx#^^B8h(+h%x)K%I}uNkK?U*L$(aQUbDqW-L#)RG-AwbXKX5ym^-67 zP%mkAR>RfARxC%*KM5ZZ>BJ|6puIlz@wG#TH2WNw;XDN=a<|#(MQ*u%I`67kTNXW^ z*6TLi9pNXxpOUOdUjNS{w!#*^wk#P&UsEh?`2$w-HFmM>!uD5yBkEfXpzZ<$o15sR zltIUzw{=$??)FV<5sWGq4S14{6BY{8i$1m(7f2bglI;_%$N&0k&FgO>0`Vy z7mn0daZvcZU+aax4pZdxV589e2lw)J&vi!{&N+?mRRKMro z4{ZPme;}3{Fh8)c1{nx58#(+TIaDTy5RH;;yzW;aXyOvt-<)3ZnG*gKq+P?VN&*Lb zV2s3^ynUgduzJDUpez0aIIKPd+`{QggR6Kl^~f zCMo=N^!rOE+z_&%vhIEGf?Tq}nM4KmTmN&Aq5z#QPq1}tO4`e&WusAI<@X1%Bh%{pSZ!m6?-(WS@|JX@_w=b zR^DN3@E#)x;)*hraI`&xHghOT#LPe+SYmEfU`^9O(@rPKJ#^?jOv zR;o>%zrnwz_l|Xh#BoyM`dYmv25Bl8TLAQBEEm`_ z;XsguI8AqfvQ`Df4HipiO2T^u>xF&`s`vU|{V9AD6jtb}Cc)ig)(U zbk2*T%PPU)lsTj-^S8XC@7Kr_?2CUScND~+GZA26+8^*9(OAAi8 zoaT>F%5YLC|1#1laNkP`r%B5RP15=*b=51(G3POUxSOeataFX}H2Jsv6A1+A{Rc-)GOR1LD6N)qbg7iDH!S?S+~6Kx|E?UGJ#X|R*nJyN)kK*v?s%f`zL z0QY1(*`OjcP>FMhlOH!bQ{I5Rc+-Byr|VhYCb;3`ci4le=D+Iic8-6ymd!T2oI*|b z$i2v?4Q@AL7YI-E!^j1?|82fUxJmKSncR?fTdCh!CtuQ;&;O3}oIeD4>@p%oimsDI6iYWXg^<$;UfUJlEkrOW!9a@(da#IvzZi--|-5#?%piaI{a+d zq%uKI4j)WGG9B|>67fEM7ZG{Z*q;ika^dYS$dqqnQy{DnQ*Cwy(^S=80NyD17GXWV zndWOeU)+F=XEoW#RKw%ezKjP3qZWgG#4iewq!igOuj?aVFnQ5 ztfpYvIIK|Ss)-TAz&QE7FdCXBe4Rb{xcRon-Ml6xrl!dG2z6hiM$FCD+Ch&#UW zVa;oJs0Ul(^?@5MWsQvIkY}&+X?}&PTVv%ZC#->U4#qbdF{G%0^s#q)c>(xU8UOZ< z#yP)4w7yctMXJHh8umZSDUDvjyrXIK7NHg|CGt_OLA`{u&cHS#ky3$cXZbqz#_uR51K*w)Vhbi>$VrK z2e$4@FgcrD*O*{?X2LdQluzbMrwMeK+S%bwL(&nmQduHEvD3Na!nyzN_JZ0&Q_2b+iSvk{e zjb;V|ISGfIlRL7%NqW@WG49p`f(+r&l&lUh74fjjmeWu%%*o4Y*XsXKIANS|f>nbPR5u^&8 z%_+ItXoxHYAy^wiFh%ivFlQBj(jU~{5Juq5!Rc<4H#InUlxFD7I`$gI4E%~^R)F`? z4a+E}RmI;W#Nn$hbZ$_xdhNiqo>PR0%RK~oLULB4R9uZXMF4=9m8K|ni`B#3@6zAP zp!a<_G!jN16vm0$UE3&uuXs`=KoL!uDp6;@uZHv5h$BZNt$IA4 zKjhHWR89x&b>eFj@j6?#L8KyRW+H?vVXpsP8ha44L&<8F!hELm_tXaLO63>jrz&^k zuJe@Y^Ml}8%#brT7~|DW3`B&>^S2>f6RCTdVho5p;nGc?neoqb{69H?34k!?M)`t` zkR`S;5T!mxrT@YPH0kh>6G#rFN8H`Rmf-_{V14B@ApPYxxlHJVC%3e#MO4Hnvf2-9Q%v=I8K z+)Nw=!O{q5&DqoE?6GaiEVP8B%L;3{a$<8_C z1Y^`@j$d-5fXiGzt>>fECX5(x!3Gh%Nre*b!r=Agh%n4iJAgwr^^dAwKf8k}KkucI zv72|w#60pdq5tsFbC#z7HCCRb4h$#`ohq|ceu|rtsdI$E+i~os>77n?3j0Fm#xD%) z9XdgNf`*^Ckrrcgp;J=d$F;Lsgpytt9W}9qD{427HKv8F)I8CIar!j>hA*8pFi=)) zcBM8=d>7a1e>%y?8uXb67)Fd2g#5HQ)G$1YtA2tC zN2)-)R9pzMQ!(XU-FCB9J_`G_H_1&k=i6O7x@py%3@hca7{9XS;$wDUx4SJ_REJMe*e$+fIFGnlc z(w4RKvbRc~vjW?9ifol~!2hW7QvtA!V9gp{Pdk;2vR&|C`v1y3K{2Qz&OSn(Dxc3k ztHm(8q$}7er(|#&y=w&;8BQtKNJW_DsJ(m*F)T!oYbXVN+d_A)0^TJo#ix(3=Gyu7 zQ3JM;V-WE2Km|0*_qbUGSlwy2iG}mwjS?K#?QDSoY1nA4KuIQIx~cPi@@II32JLC#~Xj3cOE$;G1Y);fdrSK=5x z^hqLHu!q#!fxHoGvXOc1(w@GKc7glfNvE{>-mLHE*5bO*n@s(-_e(l1QTbeUCZ1mX z2A!yC|3%&ZZ6=u$&?5jIiFv(iY^C=;^Q#Vhpp63D$*=!5zXe?+_&hnt>HlFfg8Mit z9Lp?w7?UzZ2H9PA>U;PNUquChT1pUmDUO>{ePrU7p2c>D$C4Xx?+#yF zgjbq8LIIWAH(Jz$IBA?5z}}hzbO~>HIO)@ouML*v|EyUS@#ec}7IsWE05WA^{|VP6 z?b~4jz=AxcL)g5x0S*%vnDM@Pq-7cUzZ|FK z_u1!^cm?Zroc{2hr`r*JID%?JQ8k3T0G&-6JUiYXa>OxTWWsL_d3`Te0Z##sdkqwMJlEvrz63e`WC|!K8lq`QFY2E3*5&nx*nD z8i^}d-7+0(Rb-8NxBb!di-PbWUORUZ5Br=O{M?3sA_J(Cl zyNzsVO*dvh&lu8uf7k{2!*{6g_iXeSE3gSMGG)3Jf%N=<}xx$ z_XEYt1t8~g^IKTvaNYq`+R7g_fgX-ohxbEy0oHy|>{TdmH-dk}gno)hgj$3ejQGiG z$3R!i9K(167SNBku4Id3OlBJfQRuEeejfh(WaULeuuwQwUx*S;jTzXI)Rr|4u+P=K zIbLQn+WP%a&%mXR@xE$(p6r~`ry;GOStZeAHtZDtNk;OK4X3HXrIQ*THc@-*75P=u z1q4C>m!BeE_f9a=`8@K28`9t}znMO~Z*j4P$2mU`JZdKO9iZz%i9fk7l~B}n$1<(O zgc6f>on8SL9t z^!f5$qNp8dAzBW`@{z)Fi#_~U_a>0h{}tZ_zP*%qnz7HMo7tCWa1x?a*9r|{!47** z4mt>cu)e;zX{Rzv18N-O=DBD8gI(OS45-MLBM9g^OgY$hV?-}8l=Wiz!(hP-qAcYc zlF&&Il6u6Dg%){9SNlEaGgBfu502q`ua&**e|Vu`&Fs9W6lM~a;YdHjbMFVako^nh z%joNcIMT_D5#)QBuN_S_E|_Yt@8xTeb!niXvYg&XVv)a1$F?ZXGZKuY;oU5=40i zW<6u%P~9)S*aF6A^}og>%e{@Mp$PLN@btG8?h^4*#LcNJ$kH_=(2c1gVczFQm615G zGCxG}Ri(juY{X*d7(ClL;|y{gp0o4M|CB^mf#_cMt`+ zgqm)S3Cst}2ONa)eirjk?GRx+VSQ<-xYrb6hT(#|u;6B$OtmR*C%RE-ildmOc1^QA z>8_OzR7NS!I&ml7t53ZSr4cvF`r*W8%rk{3ZT4gKbl&q2Cf=Y!_Uj@`>)j-G$qHTT zM%^WRNkD$PzwxFkW3t>;VsEzi9z6FmP1J&_z!n`n_J&G+ly6h_`Q+oB42YVLe%KD6Dp9|rEP=ye9*Q)#+ z)rheTVy^+pP}#i=A>3ra7}T9?ji>0s;jfG$@LnQs&cNgHzj$vUUN1r)`PwAq?0f{u zvom^NoP-L+ayv?rO$a*`2S>C;xm_Z(Ret-J4m zEjCG&@*Wv`!}i%%d?Cq@U*NDb)sWWyHtW*Is+~8e!)cr3eK$Uk+;7U*e(WZ$LlFE- zf>6|fYka0x)5x>Yr$jXoH32!8u)q%UFx47v=e|XX?VoZ!vfw@r4?ujX3@xVfw>frS zPSW=fdy-kVeOclTs4`p%MoWSZxP?i@;hz+uegWchtmH98n(byd;i~v!g`ImfGYp-H z2~Ao?n6v_@M>xV;5vr+Qn>`{(?uRfOur9BX6Ahq)*ssc2LVhH$ z6+HU|r`V12qVyoi@2U}7H6oto%fCY+_T0^psShoi9j$G~j0k=#5%~Y2?eTedtO9tK{VpLez|e~zI~9*82ob3oq@iJpwHDNk3+c8NatyB7L*4#u5=WE2c+ zHavGh{E{nw!S*qJU-P~;jHcPRarm|o;Goq)3J9S$Z;8P?IQT_1u5QK%as;9csCLwKnxW(}Dj4|E>tdE%H7GWRu3=oH)wxmV z`(H&GX6fRelTSI@2=vXJiHJykDmU=*U7{9;;e=7|xo=bZO5!7M^2#TST$cN&h4dqtGns`mj$+= z3rFiSp*CIC3Q5yD_jTs;%JX)!2clEa;af31hpWS7L3`>#K9!3>%sNqyQERthTLSMb3 zXpXNpV<~mp?1^oYK+-;yuyJ7ws4Hx>$vlKI`+e}73*{@9)u5sFi>M9r zekvIk?h>AuK;2bz$Y!P$1J4LxxY$U|M=@_Pf|x{KolR^I;ZsdX5=gb+xrMnxq96+Z=M{H#R}k|JPSpHhpB-=o*mS=nvJ5 z{m3uebQKyD(>G^3YEZX$dUJ%MbsPt2odrY3+9IdSty?WSaJLVP1xuqUZU>M4BBw8= z6`XinLf~{Br2ZtTEwyEwM;FNB=A6oul;?HXJ^01FsuQgr{Mlf$r6{&daum9QK1env z$%^qT!wu$oW=FiR!e)YYqs9?#P96gR{U!xVTChr0nQaR)B#R~ETL#EVJ>jh~mpcCs zSziJU^&b5_i!t^k`_9-Rd$NU;FoRTsLPS|=h(va2=86{UOp*|~hN2QmmeB9EA(W;> z5>t^>BviD%=SThD|NFcT&waX{+jZuAzu&Wd&gbmkgsH6c<1G+!bznITpp~7yBcdJv zpJ2a?H8B#7m#17|g6QK7ty@Oj+>g299ykuwiB?iQ{GLAWEG>E%;<44|>b3E;gY&!B zeI+g!Me?!O#`>fG9g*>WH8z5DZOqOgOHDkm=sfX8k)Tm^C;!s8VW5=E#syh;EpVLR zAS(A`4@vf5%%{`&PNz=1x;|6?<*jm)}Nym{o_h|AF8mi_A*(~wcv zzvh{*ZO^V-H(R$X&bxu=kpLIJwJ2u7IEiCiBG~_8V6Es0`Y0 zABxe&E?p7o0|JMfm}N@kKa_&~eJx07ld+M`t?#+9s*WR@+12e?n;U}`MXD_}H6*Jn zq`a`2-b}D{h584({%aR;FlqRTg(a_>fn&CPwX45i?Y}zRZKyT65-hFr;C$yI7Pq7u ziZg31!-3otg)8A2v6j4doXVLCjo?ZsyI9w^WWmTanu|-v~8amx@Xza5&>;bY` z)<3}w*3u*uY|1~^qE}OC5j9~GWRJSpHS-N)oku%A8P^M(x&k*`O7mvnp?9#QxbI#g zZkXVq?pe8mo~G|z;{@iTE^D7X19r81qziHOo(Rg-brGc|bfWf3<1Kc2gL6JV$Z-@9 z>3KX4)ip;oP#OK^++iw$B3~E{=^|A}5vzEdJqq%$D)r)4csp}-*k_AMb)))OX4xC0 zF6(sLe9xS?jr}FRV9R3Pxql_3^c!wsK6~Oi#$OHkI4c}d6kkgG{0>*e2gsdPv;+ws z-ZpygJmr9TB)T^qzIBOyWY#Zt?HLqX0pGXZ8vt2}##Boq4$VM41ioXv(SxB$uGzB~ zn(2!ZQk`<;7VPG_WQDsrN$LCB`@8G^eqV8*N06bgHGzL#mFC(Vmm9G%(&wYawCIc! zNsy`QLEnFu^eo?}rtKSFBAidCZ^!V#!s}v9B*&5TJW1F#jQ(oQ`$-(U|F(ZodKL8b zes@Y*3%#%2+u8W57l^a0n$}%-w(u4C+7?y!wo`=zI7+%k%aueVrs6v-67)I)Gx(lO$ zfNJ6bUn!1$6&ne(+;T}sjEA03>2YZNwT^*|Tt=e#cu*tIhAB|I6ewp1{b3OXy zLdsKUBP!HPvC|)oWZ7s^d%7mF!`JDLnU5cqxS2gV_(m8N1&JD9Vtu#Vi@fj@| zk4OQ*`iRF%$dfqMrTO1t?;Y(t&x|D@AHnrs(V@f0RYPEPDs77f4 zNf|5MAw~atTi%-^yT^FAJUE($FlN;))m4QuGe#_r4X33o$^>j($C7c6|I|mkKG~EB zLPC0KvXt@nkq*EeH8nlCREd%omQyAE5#({Qp3CWLt8H(Mlh=M7**#XBj`Qqoxa9b) zq0*;aH2G%x*#@;J-bgJYWeO*P=G$@2>n?vAQ{vbY)k#mRvREVxuF+xW>G5=rglLW1wazXa&(byf&hdqkiWsXK(0!~)fCZ9n+w4=iwc^pjGsG%wH~C!{tu4_AEX`nc@3YzFr`}lKX!|q zfNO#=A?tO76gr!opscf~1y@sJYVa)LgFe)+I49J+J&Kj5rn#tVdd$^X$I%!SIKnJ-@vsJMQ@_= z-rFVLa$PafVkVp|aLUoScD&)ot67{2aaZz@=v=tErFS|fpOa5Nlhq_$&qkJJ{sgAAr-!D6uS)islt7{JL30w4TPrW($;@(tE*tO%9v7b*HcMPF7I;I?QKT1kGZbr zWLx`X`aUVFfaDI%ogi3ISF0E5RL#=-Yfo$5;moknx$OIW!UzAV=NH}!0ulSufF}n1 zu{*H?*x%V63=TP1)&KU>bTb2u896SVeC0~rIpoNCuj{S({;E?O-*32%kr`-=Ud@!i z7xT3$Wo5!;DzDsSb8;mMA7v*JFUX1=n?N?5J8|mrT)Y5Cly2q*AMP1=A(|fCAvi2Z z&_c!D;@+rIPMwp0|7Pg`U)Bk%ggM{7VAS2Q6h(p0oc|8R~q+o5mDL63c?HuD^$2LzeCmtuM%N`}H0~7fz}r4v#*tJX1^V zMY7b&EDuN9IH7f3QKBw^%ugG$*C!sDSlxLi-7Ft`p&Ca4WDMkX#Dpz|dP-?KT8I~< zX`DLqZPN#Z#4hnJVG)-4EK~<=)*WM>N@bktj&21}$hQ-JndN>qX%~{>%F)9E^DSG`HK3jDz)1E zw-fW%sB~+w{rXzy(Chma8?lMtmYJHI3%F*Tm+}L%(*>lCjIm?T#!^VIMehHH-FDp| zXf^uW$<_O9_H?X&pa!KT^h7jh9f=G#7<8uXd(Wf1G?%79d-#)Q>DDxgqIh2l#Z;~n zJL*tvL}QygIjE!v%VbwJElqmb6GF>xhpkjW4^+Aj=JmI8G3!`9_y*(;C%`DIaCeeK8Hkg zW*6IQyMKT;JmYqC|tGtB_6%8fw4mPS&0tZ65-k`d<<$PZQunO~#iS#K6^{jVJ zze1v@huKgUt(2;6v2*YHMrUu5?85F~jtt(xG@94E_GR1jo@P_zJOUS_^aw; ztbSLB>uC0j(mRq$2o2JWscPVDs!p&~3H9rg)~MR}ZouT?xwAwH(9=mSUvKscFo{`y ze`2bs<||NArf{aaaPCY;C}UtL^43 zp#{RoKQDmt&X+BBQ=x^qbW$2qXWXEE#~GYk1)&~#Qf!+!L&UiZ;FswUhN>BPLJD*P zNZs0CR{YfYMvNs}0J@Iq5Z;BZoH5327qb+a%+iN&A!p~i;Ad-8%7`>kUvQj}hklv0 z&%e@;#x#q0+Xd(j89)s9KJQCmsL?5KbsnR-0Ka%K08H}6zF9)Nq?{tH^ zjjZKr7~^D+6EA~FMMb){HiiEn_M;u^UcMW5*RmGB^f+@NNFK8B=E|~z2EmZrbA;x$ zbujEZ^H=kvn{s41zTNLM3BNXrThVpM`!%+#OHuhn8vXAmBw3EEnQp`GjT6*Dz0yI-%c&EUkD)WOA>a-@rYy zd+ei5RJ2QW2gH=LeG~H*ooCc)jh0=nMpn}vx&c=e4C`B-_GNW)rf=}0hV_P_;15f% zVNeqZQ?KQzvL2_$;YHQ64$#=f)tW9&ehT<11OthNLXi}A?lVDTU)8t|)D8>BiHNBD z)96D7=mbyCvE4z~nGlUESY;6@i&s>_J4swntKYtnW7M2RGH(oOCZZMtXUWY3rgt&E zWlKe=cuXihs*$y{dIpYos$FI2g_d-YG;gnlZeGF;-%-ci@_VsX5Xpu?|xE1DiYB9Fr{52;A+PqLn?7a zDB|^rr)dC1bygi+|LyMSCZqXiOCti9wHjils-{0`T27seK}Z@^tv}US_q+bO18FREd0BLV`<>1%kP{kI zdk!%SuRWO!hIIby!;pZvu14nXIiYABs(nBNU0uhCDMUSUgPqznnG!-QP{Ln5-Xj8e zn7WqZHqVNt;AhECA@7O&GFz8h1a$kdDt~&-SBrGswaE zJ_BC$EBKC*F5GV^9XT`13lLP6#|-DBhI$V^;AAw`&GJ|e1!kv35!>M#S&b{sq+VM^ zIxNRchYD+WO|TC2_=^;QRD?GGZ+J26yp`0VYNWi;*(h=Foc0jvQ%{i;+@EkB9zgDN zc9U3+PXq6EYVWvu4bGl=Gv8Zn(<+BIV!5Wne=ABhpJn^_i!(J&oj3Sjx$jz$HmHCwFRvq~ z%nQR)d|I1r$+o~+h!m{n@FeE0cn2YZ+xaf4L-)wGU;6qN? z1u8rax zSF9!1jcp9k`8lYQ|MXzod?A91fdR3{T2F50(a0)U>mJ!%zXBs%R+uubwd8loCla}dkV+_fbFV-wY6~lm@`TPJj@-TM**xF| z|G`Jgq^Nr~Jq%tP&s z4_}Il5Jhq#PHOD5cge`GG6{C8uJ?v@r8--+uEou>EUE*6c}|!-Oc;) zQBWtnS76HWx=#0hvv$E_)NDiM4`EJ?0Gsrrg46n))nEK;@pXc<&db8kc=>qw1uH{{ zp0E9fw0dx5RRx1&(H@St$ADYRN)v6KY@QJDGtvp(^keKJZ?Vwxv8Z`C0JG>>wP-u# zbDafyfd}4?<92Xb^?y5lx=y2bWh{h}vE)hU zksO@I$g#(|m;Be~O##v@59Ri3cGTxp?6IPy(Rh|WP8bP+t7AK%s_CRK!^6sh;@(%x zT<-nHLR>uJw{r3e?+AaZ1mG{+xPC~HTw&FZrw3!F4?s)R34t9A^z$nvXwdD5)-T<+ zvX5v=67Kb&9(;1;_0{jL0W|oOkUrMakHS~Qq6o)u-|K)AJ(rvh#Ao*KhyaFH6N*DR zYQ4o6CtP&m_bM$1D~vdW-^`AiZ-GcbztNH9=+&M&pJ8}l*Zy^8yW4vNavhwrg~?Qt81!?%-V;I1N0I>|k&=;;GZD@cQ^<41 z$!F7+Tw5;euA9N|=R0y#_SOVb>=}pg@<7Wl)pC%0AlQH1)0Ros{K1v3E!`_McBjXg2$g(Mn0v9n1mOLSA#7nRi zV6^Gmr7t)-r~k%GJFr+Z^*+%|d)>{J+yL&@JRTa?f}4tv0!Up({1ze>*;C4dY2J1r z>rJ=V%xm_&4SH7$XAaU&Asg9h$09>6yeeohgMK0>-89|zko7~%LkuA$`~dL`zoGGJ z-Vp3j00LC~sDYl0h-porkFi4xJr%F0yh|jP|Hn2v>NCE5nRq>#H$)Cp{wFeCPXxPh zJ$BT3$S;-zzI|EHLZ0G$-HnSDSKxc+=xI87u~IDAV}qC<7#I>K(nZRotE_+W=Mt11 z%B$qP`1E)}P9_uaM7#_>45(J*%Z~Dp)oJG;R4m`6wBy1n1JX%mdWf@bZOu8m$0M(s zNdC7m9NCyn!Zrwe{qXteQ%SwsgE@-3dJLMWOYOYt#;K&$?u?tHEt_DcomQ>iMylL% z-%LH)WweOKfI>BjRf!4R&UHCVH?yVvG_|~W1sjoA0g#j2UnY=ll8v9tl$+_65y=lV;*0> ztX3z`aL*keV&4qJ4Fj`lARBros3#Dqxn+s77vfO^ZVHTz>zq+0{CNOF%~^JWGa4ZN zO+6!i9k~_;Jc0Ufo^OCm7@ypW!fQ|6H%6-9Dj9cB!88Y?3=&$JoS>hp+XLfV{wu;!bxSpjg(J zkz}5)u8yCLo?AG)k_mlN{nVrbVh$n#%v3%_$(LnfmMeKH*$WMP;*`L?xZNi_>q@UQ zfV*Lt{$rp&Q`yF4^JzO+h)~9L{(9iX^<@$kE8xNypbH<++cZ?Cg4IV*g2jb4xlS;? zyxdaYW=i6Fi-{tSuY!-3b!`$EN4)I{>u6d=5({BmU9NNheTB0+?KE0UTNHWl_QkzW zQZzGtIu~PZCTS7>@xYUDfB;mhdZq^LK5^DE7hxcZq?^VTe8Pkhkr}Te9rQi)h!Y2i zZzJx1GL_%DD54cHwa4d}tFVzikBESg`mK3V51+$z!B*XwdOExAGI8O@$ZM~XE9XQh z^1A@bL1?Py>nH2(|KGTRs^QDLP?Tg%8fcul@e{@f(bwXUeI2JlG=s3W6F_z8d5Uom z%AHf{R6BJgn(kOe%sSGAIMH)(ht~%e@LuAP#`@!)Qb{z8sd@-A=adwmKYzFmb6#z# z1Ug|ow0*DVp+TcMndO>cB-}Y$N)=f6Lth_`YqQS0kfePI{Zjca-2=7*+g zug^FJ=`v|*GkXxz09^){C)b)+!GIhiS8q9ld^XH_eRV0W%PNBk^oXsS`dv-CHIQkX z{~sKJ$8-u`vW$fg#W}x&;lzZ0P0LF@2}eh5PQZ2Ii{RYGis5tR;GybpA3(=~D683H z9v}^n>5J^fst|1Lf~~NyxCNWcKX{AJMV&^TT_c1!Z!Yej63yegqJv#&k< za$Xg9e64vVwo=MHi0WYCP>^5!s#n|uc_Ma3Q4Q}pF2$U}U6Eq4x6fnEC5d|@&_WlD zXM8RB>*oX8!0(*gy~J)0XBF#U*hf%I*U*Ic3o5^8jzQ`X3H?NWZt?Nq^%fS3cjYYe`zT zi}KGCMvt@s*-g5wMphneW2M7R=>$*mdTub5Bu2U;Ise7pp0WKP{t!=#eNdQ?waeHu zuQ5W(W}$=I9e5X%#CdTCS8q(!(b^_;rsv~IUab{p6saEK1@~hg$2$b+=JIOcB_@v+ zyz;~z)8Vtr{NcEf6-!Zxv_C+9vy@mmZfQ?F=&!Qp^OtB#oytE(l6Ikm?gBCIjk>W%nD^MP3m*pGM5Ept05hkh1Ix+|u!>!2sQ6dYvb zxp9v|yMSt9O*pt*6rA7j0y)npf{-X^iWtOXz5b3?JzDqiln}o-==xg*tQnFZ;v4ag zbuQ~85=bC&5|O^1Bt3XP)}3K$gtg#X1|~_Y%u1!dXV8!p0rXuEHYn2y9|{YH|2Sc% zKI6uIAr`P+=*Pe}+`!bF>cGUDBt#~<=fvW*93yg=&x^tmFckUqrMffPueoelOMVN| zpP~^>)P{HvdsLOu)yMxsX!%l2rHJ?v(6XDlAyJW$r}!1pQEI}XZ6CH(r7Ag?A%_I+ zjVtE4#G_?0`;DH3nA7)TUwmjcHIlhNkj6fiOE`_|M<$W0LgkXIFJ964S1P%Z0fLbN1?_os_-0)+Js_ zS5@^K>4~!B-p=WNIR?`ABXIxxBmJMW2u}B-hT5|iL01irh-n->-*)n#qnvK=av0NSnrX=3w<*q^+(U5kG$*LeyoX^{`4YgK>^>@*Uok+W%&+1>Wl(G4c=-F40jRE zSmmeLtu>OPG_2aP40`b7TmkwS+K12hb<30U{Vk&ye=JM{vK%)n4~ptoCeqhy=$uW# z%#$j1;QNlm4iJ#ht#YD(>n-K^CaX}y2&3Po8{%`^Q%=&0==Lj9g;@8i!v%`2#uQ7# zvddG`cRpBH?YLo~pfjHS4qAQktJP-ofWf?>oGCcrx(CnlP`m7>I6QPW__hBVbU((R zDCGo7Gze-tWk%P?Y6HX){F!=r6$n-X%$AR?YUesRXP;(Nkf|-7)hhgzr?Rg?$f7v@ z*)-BBQqo99cK3&$l1+*hihGIIsXG(6S^@sfc@ETPezvDd8x6P_K?w{y0QHDu&s@$o z(CR>huPUI~0VhI$u_tisfo${Da^^UhN@y^6!*@c_|Hx~6Yw91+Ec?+Kj5Yp4jz(6N z@bVa^Mpgb#fhM6Q6yLmEV&(vQ%u7Jep2fK=v!FjiBv1D9w5lxd51qPAKg@15!<~jI zR;q?O7U&J-XEeFK_xG%b)HV{94LV{%u0Hmqe8JDDL^?p4nviUZ{QJhhZKLzKrPty3 zcpJCy+?4C|-mzQRj``{oDb8^3SiZq@mV>m@!g~z#3t}Zfzvyk#4YvUrfvmGBT*@Ea zecR4mfmR~r4<&%HUeDruJZh&xZJ81sdsLIGxS*R`kt~hV26vRIG0Mxe=IU{s@*axY z5yfQB>9KUH%bBZov^?9umMS>raiX&w_;My+0i9#|vxN4XycPB_k|yF5YFZj1VH~yD zIO=1u^+kBS&5@Q#*-4Do{E8|Wikr(sD2X=AWsnigbRNGN8lI1qW)djXp6dt(c;N3~ zKl<8Xr>tX{f|h>!&xtNbQd`zw{fVT+hOgMwj2x=%Zy{fT_4;o=Bx;qL#P+hSfAJ1t zVCE52*OblME>7CBV(^%Ua;YGr=BY!Eo%8V})#=jkk)NL0EL_cYG&pl|&l;8oHoPrp zhxCD=rqQd#NH}s_ZOc)OHl`gEbwOXIkk5UF(&BSIp=EW3K9WE*5tPKlY!+FY%PJ>& z0Z!)VPS^P<>X4$*_Rp z5e;FdKSo7f_#rH&a(=;?>V)E>>yK~<0BCyoea^nLbW>@do;o6GM ztO4b&ZlJmKskPvlmdhND4pzUpfu$R?&72r~LQ#d+(%=^3Pxrt%70X=>GQC?ibNz)< zfG9@i#$pTUn;{s#iTb9PNdnyCMlEg|;=YoYoHjcsRLrYa-qMP>hK0BfTb~!A3v^t1 z>1rA5tudbJVGnn7IwTEz@;!0}u_af02L#oPIHi+2TAI$G50k~NmL$!cU?dIRw^8bt zO+qbkOjNv9q&4$3U+daGo>Rx+8^a&Je?7~wLn{EYbg~{Uy!3ARU6AhjEKs82tpTiY z+Ln77*}>$z$Eb>0r`7O&DYteb=GR4EM!xF+U2OM{qcN?!@lc{M9gmi&Rf*KExPfUB z_PQN$pG14&m!x5c3DIm<}9}XQ;~_N<|(ckL}jg@_60u^dlzAT4Zp77)6j6 zq<%=(W8HIg!jFbdzG}v-R!>%}vi@a(3|w#T8Oy&r+-IBxWH;<7n84?z@_zrO(KZ*ZSS7C9&F&5$!d8MCAhtKw@!!!xS^T1T>PUS%rc(y`%4nXmv z61jrfGS0B>qfE6F&U#_4FNOpb=9-44TN z`3}X`gK;@Q+#5(25)WWo9DzN=vInRhs=;l*YUTLwYp%!Tkt2D@kZ1rR-kZ0W-x_8z zyCSWq_0~MrM>RapHzV8F2L+M3Cp8bT#H}r9h#n_+9oLlm9W28Ep@*t4qmQZhF31gE z5_GtDa^EqiuwMbn1d*oQ-LY0XtQPe+vhrw*F!|XbMU2bA9E~xBZJAk@99X*C3>S;q ziVVa331`NGnxs%r(KWJ2)pf~Sjv5*(RQEdqqShlJ18h$9HbYg-QJy6mRD*XTn2{({ zlQCGN#tK$rd9<0~zmh6Owdr5CB-JAhc{P|~(My_VzhG?f!-B&+LC+55;3n`zI=wHE z^Mk_B`oHyow*((@J$BYtsD}qI63rVcR}>6!?mm%i3Xa9wz^UIk*2mv-zOyx3-5XR& z4%05pWCZdOEwy+?D1T6q)EET}i~;C|{dq#YX835jB_a*Xe%aPyxvW6VT1l8oZUdU1 z$uG95qRFX4QSwH|IcIU!n_>#EiWo)FeL-I@VcRjo&a`%(WQD=|?>k94TZiU+`-gg? zjZGvhWUE?j6-e$yH<~I$BAOur1Ig;d7ybBp)J+cmK6EDTLC|&ZcYRO0ZFlT^BWt3l zq~3u^O8e`Phm0`$uwOQ^bOYBe(`&%dzXuV`nHoCYBf?8|v6dfXJcL|Do-;X@`4@N_ z#$EDiMBlPo6nU-r6h%P79vF8a8b^J{9_IJGjY@c6NMXV4J*gx|?ia6xd{yg+GWQk9 zHGFa`f5D`&$2NZyQXt3fA-*|#rMaQ=bwZTt+^2*ODjf&r^TQSG0Z_8^j$!`2#EF3| z6Q@*Xru^yVQ`aO)7T|{=;^;a_9LdSk0~7f=^As}Hg3ivM z=0+4eaKkxKou4Ll5)I`H&LNqmqIWT7k!CkO-az73_G;4|cREQ^j>p{9PN=_p`ix=t z>t?Yt6d1=iwPVbEW99XO2V8`PO>Qtw!C^){6X?-4GUR;|=Nc#h0iV7fdW4eDpH2w- zbIn-4U=W<|0+aw&;r^JH&}cy;g#~ed|2+0kMYRNEdmakk=>C}BN;)qJ=~wXU%3H?i z?m90#J#Kb0cF-?>C|SjD*XcbNfV}~n=l?cr+O;~8*1fx=ywerBHsj`43q$0|Q@4mN zME`g0L;@IuV@HsOIwm(|3(RjY?~B!3UOG4X z6o2$;(mTe__`BpNWr`(kD;wo>GT)}^xXL)`3_62L#lbzGDX~XebWkd)5 zOXm8KPl+ws30)KYFVp+1uZ8wzZR(jP!psagjjEx`#P+(M!R?3oAMJz7KlabgsMn1L zwuwIwH4>*dr`lnwu8-w%&Y`a)^goOd0Z8hJObgLZe=7~yri zx8X$9C=J2JGv4g^wMwYyLe8uCdUhWa!o43n6qewi@_QG_NWwy<(th!zmqi4iGwL`-b?n*H&Yp#scm2ITJD86=;tXppOG}=5fYOA(W0#6#aGjr zaOc%${W?{3Jl~9Y`>o5CQ;W2rY7$I%LXIG}@(q5qUp{>(*Le%-YpS&vS$0V)qqZdB zODFdRO!p_GWN3IIm)vGA`}O2FBr*i~TOr9T`Wr584J$}YvQ8TyU47ANeKsw zWk4ka3}@6J+|eyf2@qQfdh)gv(QgtT5S4-B@!&!!#Mj=5^fhE)wsxo%N#LIKIG^L* z;>LNOScc3@bKLs-Y*oN4E3(d9=$gQ`LNM4yEh!#3k=J{79Hk=ir6(D;bPGr z`n8Z@a@0jX!1y5``-b|7)~R_ZDwx6xZbWLK1LEkoIeT_U!2147+u4^zE;QRNw;*O- z=+vfp_{jUTZZ{i8Sy9K8B)KDy0cl5dl)P>(4;M%!?+>_;y#X4ZlPe}qFw#8obkTO;5+sxwQxMM1~>jy@Ag;e^p7dr?#$s*>_A#@v0v$@J!#H-nWTNYam7lErDKT6TBZZlv*}H|01~X9k-$=Wpbo?6 zd-;-2kV-=*-yGR8mi@&VYRgms!rL51nmsdZ` z<|IBlFh~URj&ZrwS(@Tm5hx#*Tvw!hlr(`o%rNyKl^XE6?VTRW-qP{as`3Cpxiv>O z2L>I`vi~YbkUl*37}#-j*$KvnB~Q3&8=WPgi1TVc5gjD~kgY1e2XBV^4*lk<$^|)W z9}}$3A{5VIuErZ%jJpIkcfOFXI5SUA0j1-$5wym&%IN$I!JNkIs1hM=DNM+0R~(NK zn^8{xXr9?~8#$0U%s{>-oYG@eKUim4TN!RU(RPivIUQ++*4b9EvTQ_0a?vBqJWj}A zy5Mmq4h|((Y*0}>!@ewOZ|>db?6LD`9YL>>NqT(TmG}i>d{(GEK*=@zQ4DK;;YZvA z2petA3hmW`#w%*AW3P!ci8qNaKpnl}`2>?SaRY$@^6v=oNO>?Jg2~cNjCV5~d2e<( z`ZH$lXXh{b>hQoo^!DeHW)P8@T|Akr6)ED7jjM-fZ>@mja@mS9vnl{n z0EzoU%#6a4KB+5Shz#=qgp3-~rLOsq$|Yz}t~?bi2AlWUPWi3oi>SO1eC?S>rwJo> zXIce2aT1Iw*0yHKqM`zqxXKNm=A_Lzf^&A=mwr zZ$wCO2Te`rp|`durMW^hKue(C6>nK9?*+3XUhhe^K9|&0ND166@M{fO%auM&hjo!= zpwK_j7CPDi>Wl}wT-_Z6cF!6TgxDuUYiu$`=Vu{3jPaR0u+e$WG%%1fv-;lCpLuU+ zP&C!ZU>BO4XA;8O=EI*oXxJE{nw8Zkw&;&o>c8j_w)Y(xxnt$g|1Q1rE&zNYiui}6 z(icX-jH^+ko<4Q?ayNYJJ-3)|QLKvu;8q6e;sNfi!wc=TpG@|PIf-TFL|7kVjyK13 zGr?|-Ga6MjT~q)}M02_c9ZXTxiqWHV>zQ!Q#S)I>g-vaUP2L-VbF~MRCcHAP;l^wfy!hM2{;+v+m|)UzU>W~3@^Q9 zrBQXuz~@fs(|+b?eO5BBtu)Nn?$9{uu_KB+xl}%F*lSa(2ty`AHaWPP8>#w*^yM_O zT3>3tCG z7iPX$T780GAwavt^M&y1AJXAZoYMNTijxU3-fw>1)?U_|FtjG6@awjOiym4ULJxZ7#)A87Ji6}W;COFHauSZ zQjC|O*6+iDhZ{Pz26>GTnVgS-NCzW@GufM`MNN8x`MRY=%`rj|6|%CyyzfQJG3`Ju z1vi0voqKN!y_`>Rjp-flUcVkexi>McuZB?0( zF^2yk`U_e$%O<9}7gMWTa7?4Eea{~TbdJ|Eksd+>ArX$*4AQneIskLv)a*kK84}-} zb`pJ?t@^hX+J!d@E|9ha?Q6?Y3SxT94HbD2d-hGf=@PRvzeQx;8|lC$XOc?8rgz&i z60R!1khjN_8NLmhc+_P60WCn7Wh&5aTH3f@#mUxAc`vEn{I9nKY@iUPbUkaQ|GHXq zP#BSyu=h#&XQUB&KkM(mc5sYX z4XUF2-i%gUrpLN+U;2_tfifeGxDW<1O@)l~iOmJ3Cu}#HC9!~DA}Vv9_pLB<}LfG$zM=XVU7ZqGw=_6Hh%$NW2OAhlw4ZG$NI2&8a3I zl+@1$nG|nePrNdC6Ml3NW_D`+Uv3oV*8(=U4TzmZ`iA!b#5oJPgbrjbIAou=r{0nC*JNa@O2_kz+K&W`-)Bh^;{e1lu$HJm3uVOWQ zUWhdptYdaOcx%~c#jIU z-mv+AVRTeyBrnr`H(Yw!wf{-S{Inx+tZUJJK&Q$+etpKkR7F*P41vgLcUwiri3v8OvoEqBW|}7v+dn01if#9^!LUtDc5$aBk;^ zXshDketHso!H^s0=6A?3brK@bol^4IYZ-#fYJ2sWH39g^PYTX+{&H)>KMae_NTu&F ziByhUQ$8`3IqdQQ_R_R5u9qk9H&w=nf4yrJL5kv`;s|_5op|<(`mE3Te#w_DI)^PE zKcnCCT{?W~=OHniVET#Swy*KuhS0P_K3FEsxQcJjMvE+x#G*MA)pzrtp&$f(8sB6B zFaSMDyqv<-S+Y7*OB>>_@Gv?F!`i7Y+A5)w03apd?a*VR#w_sV5+k&1O3s_ApwTIW z_czRl3#$UwrBCb!%F3+FY*FK=n@jh>yurL~`d@?W`?sDMM~N`Ls%s8x3~&n(_3>%( zTCuCEPt~QS@WI1J1+vxUxyQ-X(hONr_la6PB=x|mZmaC^ukfV8cHa4JpUxZLBQ8(> z1l>=azkazN18}c-pFd$vR0404LKD#oYBob#l~5C;f^`8;v|XiE04irzrdacn$~Uno zysbsd??y9x<*Pbs!l9i~6Br)_l*xh@S{w0)J3DQLBj0D88A$)t7Vopii-lon zX&31`PMc_9Su|et)IeIoi#mLZ^$abkQ*M9Sa3mP39CR#rY7?}x?%8$k);dDAuOr)Q z-M%K9p#8^YvZ?d<9dsLjM}V1L4jwBKaZSV~1+?ZV=rMc zW00t`IHK^fZR0Q9Cs*4mGKyA5K1ZvpQrpcp8ieHL*%HdG%1-V&MnJ6vMPB{#Sw%7Y zo-IL+M~y;MuSz^_+0aujGLq;w7-;R$oaN+P35H(1p9o|| zBKvWL^rPe)73@)?sKdO&9dGY{`o_yx#b_1+7@HiTr8{BdJcs&6ie|ytiI%4h)^19B zIy#{qYAn7GCBAee0uJ=lvIFIF&Yf7k5zb9~X+oDU%ExEOgAYzX*H4LM>S&Nc|0?x0 zz-J{#3@>bF1L-<6LPK5tq&Ac*Uc9Cx$`t?}?==3C*5oO!g5Lfp;X0%hu^qNBV zIlxlGmd3n?k+=Sr(RK<(WGicX+ONEz7U8`(x8V3}dx-&)roRmQigQd564Rb*(^b+V=;yplhk zgSm+6jzq3_fP?XD=ZN79`T6;0=^9ye`3{QGcwx2MrX)aEEZ=DPP2KdA00CC0-?|yY zWiv8xmH#bI3*8OPxjAG=aA}a>Qa{6{p#F0HrnH0c1st9LliGhCxd(jJXOg}V`Kx^}Z+-}B)vd$mYHtti#%r}&JM+U3t=!5N)lez%BJD%b9q{w@te z(#q)*j{!5PJRT!dk&ufDEoHR!Va(+WFR}Mc$2-Nzld*Q4GjZa-YAT9?>pH{jpEtj+ zE`N4RLvw()?27Cpzk}5tm99h$9krpvs>3veMa=n#XZNJCYjvHPk zlFjQRCySRHb6o@WZ3gpKi{0o_e`ZBxu=vq5s|SPiXJ6ycejGKXTdl$#?y<(?Iz2)O zuLubI4GSLro9JI(J#Nl7j}?WuFbl11UnD-*=v?_cSL0OoSNb;gy8~J?QUSSRJNS!l zceV*zNG@1*9aQM!aSe&LCCp<7sLCqDN|UH{yxZy|fMnoS$V78xb5X~JF-7h}aG8gW zl$$WhF9e3U$aLFt)lTRguIro)%NZlg%P+Ut$Hjr}`JJq{@THM3>x<7TjX?dbVKSx1 zm{mNLxWg;dp=~SG1^b$Cx%STOXuq8k_JSW6@qHRF;0S?vA6z7hJqwgsK;)#%1AM-K zR*{3rxCF2n=%>t&JY^O_?XP$ z&2$-5`=$aR=6#P_=>0+7QmczM1s!I;+bWt&azU{%DEdw>dr`-=_{`KAxY6) ze`E@~jp*fhr2mZ2#pmZg;Wc#5`N({FRIp2zS>;e5k3+SOep7vfFf!^@(&6}Js87{( zYhG05{3T)rd40e#yo2E0?K$*CLE=X|!_Kpp@lSClWt-)Y3WS$6`5ak9G}8C@a|O79 zd0NCDniUCjKe*WC?4K)BhzGA3?DAB6r3hQZLnRl*X4G&KLA;B>OR7I-bZ2%|VZ1CQ zG0;%lTM2hDqt;5Oz9rBqM~%WCdqC#o2~^>Z1p=+dG5;g)eId*$Mi;qy4Z4Px6NdH) z*ze|yW>itSI~1JZ_*@cwkvDlE>NvH-aK?fCj!8tVTyiv{TecbaQjIe1cN-_C+7Tlq zZpV>oG4XU}7;jYOaAJTZG&NX`-ru#oIZs_i^TVxiKWw(-c(jcOD5|J;bO>ELSSRfz zcEWm#|HFD{HB;4>v+mf4Rg5(ow{3hoYL?v|RtX+(Ma|{Y-%ej=ibgt$&+@#3g3$K= z-8kihwejfTwbZMgLQMbLTV%O{Hh*s$?koN)HV1Iwhont3>>NicZ~SHx#_28t_d z&kpv*E8hqeP++npE%vMJA6nb}#4G4semDSdQEoFpOnB@NfIGbixH71nc^gJzS)mk& zP}sjOL6(l@!HJi95#0c(S^+P@4<4AAmfTO2&t*+U0PJX1iMs;5PRU9O`dwdE_6Zl? z>1@MW@W&qu8WI`iRpkG0d!`s{z-YjN8+OEj){>wEIf76xtqis{RT#GQz;Yu(u6?x> zkiUJ*V9M2H|Cb-JO>9#BpYJvM#tu}wE`_qyHjJ!|`Phpag6=;3bg30Zf9M2<#6)k= zidqrrlj*7fya+RYL)mMx7&N_O`#cgwsqWxIP0o{0*)Q>I!>$8tqo${_g_ur43hcjT zdFUKMG|Bz11MMUt72rgQH`3gAr-_N=$^__XTMCWJ*%{98+_)lck;btWI5=koftGAo z(Jj@;pFNAi4IS|{$Cy`hLpSGl?m~9O%zV}NyCldU7|(7wklT*N;%w7F*8pLrl(_P) zV^UXhf^1lM)Ly|evhND8#dc$=OX7iq4gbP?3z1Z{Z8u8LzlhwU zQ1rRQ_q@4jRQC?f*V?^k`llch3!|5?=Nb?eXRjxxN(t2do}eCgwd zT7|~Ot5v^E`wuIY|BmNg{dqp_SJsz?f0TS4bM^_iITDK!%O35itRhxF%zU&*MENb_ zd3*CwoEM?y=KpE$+rOGR)BQJ6OIwc8IaW~-I5RyR$HM^i2!RqJU0UI^7*InJE{14P zt_D;Hm&5?tv7A=HQyoFM7E=PrP5^}vNlbw8D5J{aopi>4?$2)?sWm&>&jqJDyKYTI6GD%76*jMi~CgN}LF9lgQ% zg%#s_3ON|d3BjzQ6G3#V2>r~pLPG5)w`s0Q`jm5bPyzb7vOEWK68J^TS?+s@=8uJ# z7V3%vPUS!zeV$b8AzIaTEMy_x;LMuy!77lzk#n>@{i(gfx`VaeFZ&7y=3e<=j&bOP z-pGNTTb>5S%Bb|Z(@E=u8Ekm2Q-7hWixZC%r!QzTb&8rAh;n&yd!OZD5~Hjzc38cZ z)LphBe>9=_lk(_wr!L&w2SLwX&BtJO(DHmFHH7(jey8MP;fbnMkbjh`&~Md_pOToRs!t^K(eiTytQI zO#xv#B7uj6#)e<7TyJP3;<-ey3V4+YmH#P-O8Z#o<^s3H%4;v0hRF+|%&3rw0l~0H zr@8x5g?PXv<|mvOF<06aWO|88QKqZhOs+D1c*)`Dzh-wLoJPw?VHPIpW`&4t;&^4ZB{zGB7!ipMA@BQk`^2E6iLuoQqoKk8L#dBV4sAVoZ zE}nV+=tq%*N-1Y+rr{gq^|#$l#1pmQSS9<=f-FQzs2EMjZNB@x3kIX}q6hTHm;1Ee9{8Xu>Te?Q*kE1xUGTShiGSQIE*kkh_K z+`U z$%zDCCkn4>RsI@7!lI6`@@i@8g_{y;Ta?J47s9#qtgo>H^eGxao@&%rP_1<22G^`Z zzaxA&4TbHK>_=vj=Kz4rqOQp8{J2LOkXd9l?q$L4(o(mlBcLEONZTw3Kam<#RPa_w><-`|G6t-XpsYJ7Cwc`CUO@*!=c+x39DB)O0iu)IRnyoBD2cJ=>SqXU{ ziCb)bI+PJ2Zi{wl)K2lYgR%kDmJ*Qf5TF<)LySPZTCFS8e$}@@@oD7cObQ;vAv+DT z??7J+A>9?5GO5FKxtuGxYB3{Dlm!JcCKLFPCj(e`cz|_&+^+mdYOB=jWz>H+xB6uZ zT8L=#M-Rr*_J9j7*or9ZLF~C5X--jFRq8YlX&VieyDF{tdX2n8U(K62t5qfT`e1lZ zNrJjE*{l^Cq6Y7GK>k-i2y&i$8Plm316MQDQXbejP$v6f%n*&?7|Aex0Gx#!$iog` zd&Ch2zMC+&ja8?m5hB2~9%}zVBG#@K?hbN+!V73f4|?hV_8@{GG8&&e`wmHg&jhea z$U!s?6*GqAPNQ>zw&APk0Dmig$${W@km8iI)~CkYD@rG#*mvYBZJ+vkzL%+SZy|ri zwa!IiKES0R9HMnJ*aapM8A2Q-W%LkGVV}5GSvj`TY7}PrX}1Nld;H}?XJWMOQ24{g zMH&OX4|b6!51(u)jPCzrQK9lbnHpMF3UMYUpw7j|2gJ3VQ~cEwCo&r@5cHDS0y*&2 zPDGa+&iowwfnng(#Yvjovy9n?KqKaC5o!!u?EsrZsc`IlWlW4U5o00@WZ&l6X@Nx$G)Wh{$Wx?ir#C4c{3n zMTrR66>k#FM};c`^T~P*EQ>o9l4Zj8mBA7+$mL&bWJ6FOsNj5f1|KcU%X2C2Z_aHQ z+(B|F32`KP-qU1^`-tn6-}7)0lY+tut($PhT&Fayb|JBAl{*Q^Xf@gt*GedeCp5>m z$`PYAB}U2#4YhmL+-kHJRaIzO9_le0j&$3Dbi)iCcOG>>m6z~>p^m%P{6@aYsk2NX zJKB@4)&{I^|7mES^-$wA!`X{dn+5a_4jg_!a-BM35V@ge2oZ)=AP6kUw8m?k!1*iR zA>@v`&6}ahFqxo)Jbi%q#e+Lz!{lCJt}t!_4~N2SR8I57Gyp%W1aW;_LKenDJ1VdI zuKho9jRdWaG9)XX!TrbM~IRK7J zNyDxsCxjey0~?<9HPkD{EV6+B7waA-rG3kz@e}||!Y}=XhN4t@W??L%9wQ_Jy8GrD z`D$(sIf=6IEwMD(Ub%#ohW%6t@>vTmvMU=fLoRqgl3V+h$F4E5IgvSYw)#8TfL@E< z&tH>}{CmiM7m+mTnPq;F^s*4~3u{tb<*Q0p|&!MAA0!%@KE zS(p_uR=QCmhN(2=^Q#gI9_hrt9}KFeg!9XUt7S)j7&irWToLw{3d#PV({}A!zI{%Y zWG-Ch#F2(0IinHYj!+mQYw$R8_+Q_65CnNBsg#9AY4zxYlYlM+ZNV+HM9`acLf8-+ zO{2)0SxLU8SFaG%oxGrkI|NkISDhfWk%nH}cD4Tf=^T6X64pj_wCn5z@E|=bbcR#?-9*2udfb{?uaBXM&;IeC#s4YN6xbcsFdt8ej18GonP8YdLuCtWIMCtNq==?6go^qAH~^N` zt(oK^!$P;I^<@rUnzDD{;R?SK*YUTj>lqmg%-0TTAHDu)C(TFG+!|F@yOXvZp}X2G zc@`x6$RwHpq`xHVTOsTv@Jb#l6ATFQD1-;cJ_j}P*d&>A#qz&IcVkgfYYLnKC#;0p zw;0m$er|~Lmv=X51Boa<$UP3K{ox@#mE8Y+VUiz;MjFd|)^(Y=r$BV5Q zSRCS=QQmDo5-Od&Ypgdao>uCH)swX>4g?9K(}r|84z*tH+-?V_%pCFZoss${1OxEG z9#Gi64Me|JXG=JG?cyLcA`Kx&7-45s`(HgP>gu2 zfJS~fKI9M`L?0M0mc9Ootqnx4@i#7W%+dEu^)1wfhI$cBVhP+Qmsqk80%6s--LqeA zOxGCTdF*jfZt}vsf?0~Spu&9bG!V3bMBR1n>1{J+RFlCZHKewlUhQHBZN9fibN;zy z%6G%?{4Q)18Rpl+xR!vyy_`I8Q!VgN<@sBlu0kF2CShpXY9^y8mQLIB>=kd_iu^T) zp4)S7eVUN*@RIb1pzHQYuM|yF*$SxAi{!Xu>y4#T4XF*e0@h(nfRYhum{(iuP!y-m z66w(%2o>EGHR#(S7N&S?SzN#aabdk^z7}}6I#nub;<27alo(uSI9`vb@pu2+@8YXY zrHL9uwni|+uptB#A<+@{hTJQxG;!P7L+`b3#fsT;DXNb8ir{{V_rJV~!>6@M9qseu z4n5~o%(tHQDkIWx!6ige37NhYYTP$;tPA0UE_p=xM;dYo)f9mzqj1(z7+PIy-o;&zijR~Qdq`pp$amI-K9 z{%Q0IdIP-&`iHC92!{TL0lJ}nCV~y{#MXMu1$+%M=(^N!kt4OdTxEWFArAFLTF|w2 z(6j4`EH<63RUa<&8psMSA(y;3hZUCDsOH zk)#0ujRU2HhS1U!a8-H3Q(u+31)W?0MIKJIMD`lfY5&aDNphZ+Li<;!{p+dj0ofrUEfD!k^(k#of12nUaAaBag|atI zLy=okIZOaF;~1XRU+Ue=h)#{+!uF8=hx_xwy~|zE^YOI(w0*R}gP$(71vK(QsYCmZ z(?ye^f6uRBWq~W^bfjGgW$}YIYHQgO_|3g4%7Rslagw#+XCgs z*?7#GYvjK{M(>-erOwb{cYf{@o=kR4$s{pWQA(Bjpg;ql1bK((&V|0Tt>_quc6EP> z3y3cNB9QKHih)9Wobt(|KD3Hr3KpBG!{f6B`L6gXOt-bLozu)G4f=v^v}-z~ou)$fCyh4RUu;9gVB z-`(d7NRxL`hO+JqFUtGvU~RnLZGQlfE@s|lLDls~#+G}*{kyTd=pYa>$}@0o`bZPi zyHqCg`LTe!d&?3A3flqP{Mn#X?1~iKa<$&H&dD;PRat(_q@!2P0xEmy`utADrEb}` zXJfY$85PMS644G49^lseQxWiLQI{=;kA=>`S!9#+Ae&mqI?N`(#h?c6F@nLk;MOAgZ zpnfBFVkNXmyhv*U`p&q}9_arT>>S37U$P7ErT|FjeBJ|o1x((+8pg~o|HiA|$VArD zz95Pm0b9^rlc)2Pr&a1RWzuIO9bQK?bmKW-=Iv2idz8S}nau+lek34Oa*@ z;Ze(qg4(OnRRyxfF|Tr7?4bT{W6IM+0ym^^sRS^%5#~V8{{4ovG`H0+Dnqx!yM%0e@##)y!*l61ydcL01aOph~1^DvJr^+9U;<()w5m9`4^v71jTb0 zOEm+;uP+et57W(qx(`ZZ(C`T=J*bR%tIiWCMx*T7RZqS@XTJB`0qo@UR*Qw-)6i4J z(`3E{o+D4Ug5hqy`wbo)&J|fE{01N1j+dw2V=+i2o&1BqM5-r+_-KE_G%bMVRnaXy zWZ7SWOR2U2h5zs00Xa}uV+{RaB|3x7xEPPbHO2bH%N6`k`+RQmbuM#mCaVKv`B@y* zIpgQ1jR*k1r7QnpiW?MIn94cK21IxuO{??!v$rJYEY1(~BpYB`U)TAdOkzf+1JVTP zwE=1BoVnE*sZEst0uSuE{0>Vu?if75tP*S|Tc^v6$*LE^ZZ0mIRMx4Vv>*9v=4b(% z(p7;+Z}5BI9#Kyj3fug@bRB(B<^T>A!0C={l=yO$$#l6Zp*aVq-Os4UQELG0Z8C*oBrjF+ZSjo~AX&tv~|9X5p#c~m)3I?3BnvFl%GhDe;U$I#Gv4<*g zOa4t;s;)+IxQc`79F_h)PZMqjH45HUw95b2HlJJ!>X)H}ID-z54X;Y{F}PZ<{wDyv z@_jnXykgw3S6_a0@XT#tdMQg8;T>Jy;7W)`8-f4ARxTsiw#oxhe#lZZa}e){w!VY? zd3CGeG`9-9D!W;CBAopI&~^?I-KSTI%(R%`+ z$Qs>q0T>f1EVHZGUGfU~jlo+)KurRXTijqM@ZcGIsR{q!On~}KDKm`ZE$F$7?oz2Y z6wdc8cLNlD3*^qO8^0!?Zs?%j5W?HZ^+@2R*;?geuK_}b{fA8+q3?^7V%g786IAD^ zxgE|vV#Qv7ckOEcF<9c0#f<(@I$N4a4T`m97tsCz{Uuatgri;gA5x_-u^_&}51tlG zhg3*RUrq@5IZgBGx3*^ML`R(DqiYSqOp3%i_veOJuQ$-H2V#$dopIP>^mR508lK8b z+gPQ5*W<=*3aJ>j*o-D*MQjI#yO5vy?gW;&_5z0DE@@!6vk)vbz*ZcnjnkcigiFoab2c ze$F+^p}Z%e0lcYC4Qh)&s4QLb>&k8R{wGskAMM@ffM@4u9>$?l!3k9Fz=#At>|StQ z;zmOJ@+@or249S8KMqRt!1m(qebh&g8J6lf_UX(5O>A_z*!u{5?-9{*+5Dl zPf#Wz62#!^qCzqECMb)I$KMs>M|Tf`Gd#2iEm|E@UTTm@(qoK+D7b> z-YH-JZ{{eReOS5NiF5pkaFTc8%hwzS)_fhv!2AoyJL)Y|bmo@wI5vqim_z1gHscLZ zPFcoMemb1)Qh_H{#K`|`IqWWQY9~!p5Pv>df3Ez^g=(im6=IO@YD9)NdyPC zhjpwpdei=1+IIEyZGEe!c`&$FR5_rY-YBXPELPc(Pu>|~YZJghHylo_GWK47G)hRH zY3c3eB-gC>TMres&me&#?SS)6-KA)p=;>oI7&Ub``o+qK~c)-sHN)))#~jZtQpJMIZTM z&)olPe&!^<*4w!^XLv4pdLfXTP?Z_&NF)G|``abf%zDp1hZERm*9j8NMPEW6B0I3D zjfo0yG=iTR5i+O8G+7T`2(hLy z4fBCd%FX6M?P`}4RxIGD1BpA&l>)z5BN*jWQbFNHS6s5-wmy6}Fwv&dpuc=`*?J1d zzm=iiOG*H{3fkIjA#uHrkIpQ#5&VNiD?ZAHuIF9&LrujNQue~OsBB2|-VfV%?r8p> zh6jKD!?p|5b6;-wTLk*8GA%dB d|DUzq)X+0`B)<7$*tXrTcZGdbzvIBE{{viFZSnvB diff --git a/api/core/model_runtime/model_providers/nvidia/_assets/icon_s_en.svg b/api/core/model_runtime/model_providers/nvidia/_assets/icon_s_en.svg deleted file mode 100644 index 9fc02f91649a87..00000000000000 --- a/api/core/model_runtime/model_providers/nvidia/_assets/icon_s_en.svg +++ /dev/null @@ -1,3 +0,0 @@ - - - diff --git a/api/core/model_runtime/model_providers/nvidia/llm/_position.yaml b/api/core/model_runtime/model_providers/nvidia/llm/_position.yaml deleted file mode 100644 index ad01d430d61c79..00000000000000 --- a/api/core/model_runtime/model_providers/nvidia/llm/_position.yaml +++ /dev/null @@ -1,17 +0,0 @@ -- google/gemma-7b -- google/codegemma-7b -- google/recurrentgemma-2b -- meta/llama2-70b -- meta/llama-3.1-8b-instruct -- meta/llama-3.1-70b-instruct -- meta/llama-3.1-405b-instruct -- meta/llama3-8b-instruct -- meta/llama3-70b-instruct -- mistralai/mistral-large -- mistralai/mixtral-8x7b-instruct-v0.1 -- mistralai/mixtral-8x22b-instruct-v0.1 -- nvidia/nemotron-4-340b-instruct -- microsoft/phi-3-medium-128k-instruct -- microsoft/phi-3-mini-128k-instruct -- fuyu-8b -- snowflake/arctic diff --git a/api/core/model_runtime/model_providers/nvidia/llm/arctic.yaml b/api/core/model_runtime/model_providers/nvidia/llm/arctic.yaml deleted file mode 100644 index 7f53ae58e695a9..00000000000000 --- a/api/core/model_runtime/model_providers/nvidia/llm/arctic.yaml +++ /dev/null @@ -1,36 +0,0 @@ -model: snowflake/arctic -label: - zh_Hans: snowflake/arctic - en_US: snowflake/arctic -model_type: llm -features: - - agent-thought -model_properties: - mode: chat - context_size: 4000 -parameter_rules: - - name: temperature - use_template: temperature - min: 0 - max: 1 - default: 0.5 - - name: top_p - use_template: top_p - min: 0 - max: 1 - default: 1 - - name: max_tokens - use_template: max_tokens - min: 1 - max: 1024 - default: 1024 - - name: frequency_penalty - use_template: frequency_penalty - min: -2 - max: 2 - default: 0 - - name: presence_penalty - use_template: presence_penalty - min: -2 - max: 2 - default: 0 diff --git a/api/core/model_runtime/model_providers/nvidia/llm/codegemma-7b.yaml b/api/core/model_runtime/model_providers/nvidia/llm/codegemma-7b.yaml deleted file mode 100644 index 57446224a8a811..00000000000000 --- a/api/core/model_runtime/model_providers/nvidia/llm/codegemma-7b.yaml +++ /dev/null @@ -1,36 +0,0 @@ -model: google/codegemma-7b -label: - zh_Hans: google/codegemma-7b - en_US: google/codegemma-7b -model_type: llm -features: - - agent-thought -model_properties: - mode: chat - context_size: 8192 -parameter_rules: - - name: temperature - use_template: temperature - min: 0 - max: 1 - default: 0.5 - - name: top_p - use_template: top_p - min: 0 - max: 1 - default: 1 - - name: max_tokens - use_template: max_tokens - min: 1 - max: 1024 - default: 1024 - - name: frequency_penalty - use_template: frequency_penalty - min: -2 - max: 2 - default: 0 - - name: presence_penalty - use_template: presence_penalty - min: -2 - max: 2 - default: 0 diff --git a/api/core/model_runtime/model_providers/nvidia/llm/fuyu-8b.yaml b/api/core/model_runtime/model_providers/nvidia/llm/fuyu-8b.yaml deleted file mode 100644 index 6ae524c6d80913..00000000000000 --- a/api/core/model_runtime/model_providers/nvidia/llm/fuyu-8b.yaml +++ /dev/null @@ -1,27 +0,0 @@ -model: fuyu-8b -label: - zh_Hans: fuyu-8b - en_US: fuyu-8b -model_type: llm -features: - - agent-thought - - vision -model_properties: - mode: chat - context_size: 16000 -parameter_rules: - - name: temperature - use_template: temperature - default: 0.2 - min: 0.1 - max: 1 - - name: top_p - use_template: top_p - default: 0.7 - min: 0.1 - max: 1 - - name: max_tokens - use_template: max_tokens - default: 1024 - min: 1 - max: 1024 diff --git a/api/core/model_runtime/model_providers/nvidia/llm/gemma-7b.yaml b/api/core/model_runtime/model_providers/nvidia/llm/gemma-7b.yaml deleted file mode 100644 index 794b820bf42175..00000000000000 --- a/api/core/model_runtime/model_providers/nvidia/llm/gemma-7b.yaml +++ /dev/null @@ -1,36 +0,0 @@ -model: google/gemma-7b -label: - zh_Hans: google/gemma-7b - en_US: google/gemma-7b -model_type: llm -features: - - agent-thought -model_properties: - mode: chat - context_size: 8192 -parameter_rules: - - name: temperature - use_template: temperature - min: 0 - max: 1 - default: 0.5 - - name: top_p - use_template: top_p - min: 0 - max: 1 - default: 1 - - name: max_tokens - use_template: max_tokens - min: 1 - max: 1024 - default: 1024 - - name: frequency_penalty - use_template: frequency_penalty - min: -2 - max: 2 - default: 0 - - name: presence_penalty - use_template: presence_penalty - min: -2 - max: 2 - default: 0 diff --git a/api/core/model_runtime/model_providers/nvidia/llm/llama-3.1-405b.yaml b/api/core/model_runtime/model_providers/nvidia/llm/llama-3.1-405b.yaml deleted file mode 100644 index 5472de99027643..00000000000000 --- a/api/core/model_runtime/model_providers/nvidia/llm/llama-3.1-405b.yaml +++ /dev/null @@ -1,36 +0,0 @@ -model: meta/llama-3.1-405b-instruct -label: - zh_Hans: meta/llama-3.1-405b-instruct - en_US: meta/llama-3.1-405b-instruct -model_type: llm -features: - - agent-thought -model_properties: - mode: chat - context_size: 131072 -parameter_rules: - - name: temperature - use_template: temperature - min: 0 - max: 1 - default: 0.5 - - name: top_p - use_template: top_p - min: 0 - max: 1 - default: 1 - - name: max_tokens - use_template: max_tokens - min: 1 - max: 4096 - default: 1024 - - name: frequency_penalt - use_template: frequency_penalty - min: -2 - max: 2 - default: 0 - - name: presence_penalty - use_template: presence_penalty - min: -2 - max: 2 - default: 0 diff --git a/api/core/model_runtime/model_providers/nvidia/llm/llama-3.1-70b.yaml b/api/core/model_runtime/model_providers/nvidia/llm/llama-3.1-70b.yaml deleted file mode 100644 index 16af0554a1ef41..00000000000000 --- a/api/core/model_runtime/model_providers/nvidia/llm/llama-3.1-70b.yaml +++ /dev/null @@ -1,36 +0,0 @@ -model: meta/llama-3.1-70b-instruct -label: - zh_Hans: meta/llama-3.1-70b-instruct - en_US: meta/llama-3.1-70b-instruct -model_type: llm -features: - - agent-thought -model_properties: - mode: chat - context_size: 131072 -parameter_rules: - - name: temperature - use_template: temperature - min: 0 - max: 1 - default: 0.5 - - name: top_p - use_template: top_p - min: 0 - max: 1 - default: 1 - - name: max_tokens - use_template: max_tokens - min: 1 - max: 4096 - default: 1024 - - name: frequency_penalty - use_template: frequency_penalty - min: -2 - max: 2 - default: 0 - - name: presence_penalty - use_template: presence_penalty - min: -2 - max: 2 - default: 0 diff --git a/api/core/model_runtime/model_providers/nvidia/llm/llama-3.1-8b.yaml b/api/core/model_runtime/model_providers/nvidia/llm/llama-3.1-8b.yaml deleted file mode 100644 index f2d43dc30edf89..00000000000000 --- a/api/core/model_runtime/model_providers/nvidia/llm/llama-3.1-8b.yaml +++ /dev/null @@ -1,36 +0,0 @@ -model: meta/llama-3.1-8b-instruct -label: - zh_Hans: meta/llama-3.1-8b-instruct - en_US: meta/llama-3.1-8b-instruct -model_type: llm -features: - - agent-thought -model_properties: - mode: chat - context_size: 131072 -parameter_rules: - - name: temperature - use_template: temperature - min: 0 - max: 1 - default: 0.5 - - name: top_p - use_template: top_p - min: 0 - max: 1 - default: 1 - - name: max_tokens - use_template: max_tokens - min: 1 - max: 4096 - default: 1024 - - name: frequency_penalty - use_template: frequency_penalty - min: -2 - max: 2 - default: 0 - - name: presence_penalty - use_template: presence_penalty - min: -2 - max: 2 - default: 0 diff --git a/api/core/model_runtime/model_providers/nvidia/llm/llama2-70b.yaml b/api/core/model_runtime/model_providers/nvidia/llm/llama2-70b.yaml deleted file mode 100644 index 9fba816b7fd21d..00000000000000 --- a/api/core/model_runtime/model_providers/nvidia/llm/llama2-70b.yaml +++ /dev/null @@ -1,36 +0,0 @@ -model: meta/llama2-70b -label: - zh_Hans: meta/llama2-70b - en_US: meta/llama2-70b -model_type: llm -features: - - agent-thought -model_properties: - mode: chat - context_size: 4096 -parameter_rules: - - name: temperature - use_template: temperature - min: 0 - max: 1 - default: 0.5 - - name: top_p - use_template: top_p - min: 0 - max: 1 - default: 1 - - name: max_tokens - use_template: max_tokens - min: 1 - max: 1024 - default: 1024 - - name: frequency_penalty - use_template: frequency_penalty - min: -2 - max: 2 - default: 0 - - name: presence_penalty - use_template: presence_penalty - min: -2 - max: 2 - default: 0 diff --git a/api/core/model_runtime/model_providers/nvidia/llm/llama3-70b.yaml b/api/core/model_runtime/model_providers/nvidia/llm/llama3-70b.yaml deleted file mode 100644 index 4d591d42265825..00000000000000 --- a/api/core/model_runtime/model_providers/nvidia/llm/llama3-70b.yaml +++ /dev/null @@ -1,36 +0,0 @@ -model: meta/llama3-70b-instruct -label: - zh_Hans: meta/llama3-70b-instruct - en_US: meta/llama3-70b-instruct -model_type: llm -features: - - agent-thought -model_properties: - mode: chat - context_size: 8192 -parameter_rules: - - name: temperature - use_template: temperature - min: 0 - max: 1 - default: 0.5 - - name: top_p - use_template: top_p - min: 0 - max: 1 - default: 1 - - name: max_tokens - use_template: max_tokens - min: 1 - max: 1024 - default: 1024 - - name: frequency_penalty - use_template: frequency_penalty - min: -2 - max: 2 - default: 0 - - name: presence_penalty - use_template: presence_penalty - min: -2 - max: 2 - default: 0 diff --git a/api/core/model_runtime/model_providers/nvidia/llm/llama3-8b.yaml b/api/core/model_runtime/model_providers/nvidia/llm/llama3-8b.yaml deleted file mode 100644 index 01395666745b69..00000000000000 --- a/api/core/model_runtime/model_providers/nvidia/llm/llama3-8b.yaml +++ /dev/null @@ -1,36 +0,0 @@ -model: meta/llama3-8b-instruct -label: - zh_Hans: meta/llama3-8b-instruct - en_US: meta/llama3-8b-instruct -model_type: llm -features: - - agent-thought -model_properties: - mode: chat - context_size: 8192 -parameter_rules: - - name: temperature - use_template: temperature - min: 0 - max: 1 - default: 0.5 - - name: top_p - use_template: top_p - min: 0 - max: 1 - default: 1 - - name: max_tokens - use_template: max_tokens - min: 1 - max: 1024 - default: 1024 - - name: frequency_penalty - use_template: frequency_penalty - min: -2 - max: 2 - default: 0 - - name: presence_penalty - use_template: presence_penalty - min: -2 - max: 2 - default: 0 diff --git a/api/core/model_runtime/model_providers/nvidia/llm/llm.py b/api/core/model_runtime/model_providers/nvidia/llm/llm.py deleted file mode 100644 index 1c98c6be6ca72d..00000000000000 --- a/api/core/model_runtime/model_providers/nvidia/llm/llm.py +++ /dev/null @@ -1,247 +0,0 @@ -import json -from collections.abc import Generator -from typing import Optional, Union - -import requests -from yarl import URL - -from core.model_runtime.entities.llm_entities import LLMMode, LLMResult -from core.model_runtime.entities.message_entities import ( - PromptMessage, - PromptMessageContentType, - PromptMessageFunction, - PromptMessageTool, - UserPromptMessage, -) -from core.model_runtime.errors.invoke import InvokeError -from core.model_runtime.errors.validate import CredentialsValidateFailedError -from core.model_runtime.model_providers.openai_api_compatible.llm.llm import OAIAPICompatLargeLanguageModel -from core.model_runtime.utils import helper - - -class NVIDIALargeLanguageModel(OAIAPICompatLargeLanguageModel): - MODEL_SUFFIX_MAP = { - "fuyu-8b": "vlm/adept/fuyu-8b", - "mistralai/mistral-large": "", - "mistralai/mixtral-8x7b-instruct-v0.1": "", - "mistralai/mixtral-8x22b-instruct-v0.1": "", - "google/gemma-7b": "", - "google/codegemma-7b": "", - "snowflake/arctic": "", - "meta/llama2-70b": "", - "meta/llama3-8b-instruct": "", - "meta/llama3-70b-instruct": "", - "meta/llama-3.1-8b-instruct": "", - "meta/llama-3.1-70b-instruct": "", - "meta/llama-3.1-405b-instruct": "", - "google/recurrentgemma-2b": "", - "nvidia/nemotron-4-340b-instruct": "", - "microsoft/phi-3-medium-128k-instruct": "", - "microsoft/phi-3-mini-128k-instruct": "", - } - - def _invoke( - self, - model: str, - credentials: dict, - prompt_messages: list[PromptMessage], - model_parameters: dict, - tools: Optional[list[PromptMessageTool]] = None, - stop: Optional[list[str]] = None, - stream: bool = True, - user: Optional[str] = None, - ) -> Union[LLMResult, Generator]: - self._add_custom_parameters(credentials, model) - prompt_messages = self._transform_prompt_messages(prompt_messages) - stop = [] - user = None - - return super()._invoke(model, credentials, prompt_messages, model_parameters, tools, stop, stream, user) - - def _transform_prompt_messages(self, prompt_messages: list[PromptMessage]) -> list[PromptMessage]: - """ - Handle Image transform - """ - for i, p in enumerate(prompt_messages): - if isinstance(p, UserPromptMessage) and isinstance(p.content, list): - content = p.content - content_text = "" - for prompt_content in content: - if prompt_content.type == PromptMessageContentType.TEXT: - content_text += prompt_content.data - else: - content_text += f' ' - - prompt_message = UserPromptMessage(content=content_text) - prompt_messages[i] = prompt_message - return prompt_messages - - def validate_credentials(self, model: str, credentials: dict) -> None: - self._add_custom_parameters(credentials, model) - self._validate_credentials(model, credentials) - - def _add_custom_parameters(self, credentials: dict, model: str) -> None: - credentials["mode"] = "chat" - - if self.MODEL_SUFFIX_MAP[model]: - credentials["server_url"] = f"https://ai.api.nvidia.com/v1/{self.MODEL_SUFFIX_MAP[model]}" - credentials.pop("endpoint_url") - else: - credentials["endpoint_url"] = "https://integrate.api.nvidia.com/v1" - - credentials["stream_mode_delimiter"] = "\n" - - def _validate_credentials(self, model: str, credentials: dict) -> None: - """ - Validate model credentials using requests to ensure compatibility with all providers following - OpenAI's API standard. - - :param model: model name - :param credentials: model credentials - :return: - """ - try: - headers = {"Content-Type": "application/json"} - - api_key = credentials.get("api_key") - if api_key: - headers["Authorization"] = f"Bearer {api_key}" - - endpoint_url = credentials.get("endpoint_url") - if endpoint_url and not endpoint_url.endswith("/"): - endpoint_url += "/" - server_url = credentials.get("server_url") - - # prepare the payload for a simple ping to the model - data = {"model": model, "max_tokens": 5} - - completion_type = LLMMode.value_of(credentials["mode"]) - - if completion_type is LLMMode.CHAT: - data["messages"] = [ - {"role": "user", "content": "ping"}, - ] - if "endpoint_url" in credentials: - endpoint_url = str(URL(endpoint_url) / "chat" / "completions") - elif "server_url" in credentials: - endpoint_url = server_url - elif completion_type is LLMMode.COMPLETION: - data["prompt"] = "ping" - if "endpoint_url" in credentials: - endpoint_url = str(URL(endpoint_url) / "completions") - elif "server_url" in credentials: - endpoint_url = server_url - else: - raise ValueError("Unsupported completion type for model configuration.") - - # send a post request to validate the credentials - response = requests.post(endpoint_url, headers=headers, json=data, timeout=(10, 300)) - - if response.status_code != 200: - raise CredentialsValidateFailedError( - f"Credentials validation failed with status code {response.status_code}" - ) - - try: - json_result = response.json() - except json.JSONDecodeError as e: - raise CredentialsValidateFailedError("Credentials validation failed: JSON decode error") - except CredentialsValidateFailedError: - raise - except Exception as ex: - raise CredentialsValidateFailedError(f"An error occurred during credentials validation: {str(ex)}") - - def _generate( - self, - model: str, - credentials: dict, - prompt_messages: list[PromptMessage], - model_parameters: dict, - tools: Optional[list[PromptMessageTool]] = None, - stop: Optional[list[str]] = None, - stream: bool = True, - user: Optional[str] = None, - ) -> Union[LLMResult, Generator]: - """ - Invoke llm completion model - - :param model: model name - :param credentials: credentials - :param prompt_messages: prompt messages - :param model_parameters: model parameters - :param stop: stop words - :param stream: is stream response - :param user: unique user id - :return: full response or stream response chunk generator result - """ - headers = { - "Content-Type": "application/json", - "Accept-Charset": "utf-8", - } - - api_key = credentials.get("api_key") - if api_key: - headers["Authorization"] = f"Bearer {api_key}" - - if stream: - headers["Accept"] = "text/event-stream" - - endpoint_url = credentials.get("endpoint_url") - if endpoint_url and not endpoint_url.endswith("/"): - endpoint_url += "/" - server_url = credentials.get("server_url") - - data = {"model": model, "stream": stream, **model_parameters} - - completion_type = LLMMode.value_of(credentials["mode"]) - - if completion_type is LLMMode.CHAT: - if "endpoint_url" in credentials: - endpoint_url = str(URL(endpoint_url) / "chat" / "completions") - elif "server_url" in credentials: - endpoint_url = server_url - data["messages"] = [self._convert_prompt_message_to_dict(m, credentials) for m in prompt_messages] - elif completion_type is LLMMode.COMPLETION: - data["prompt"] = "ping" - if "endpoint_url" in credentials: - endpoint_url = str(URL(endpoint_url) / "completions") - elif "server_url" in credentials: - endpoint_url = server_url - else: - raise ValueError("Unsupported completion type for model configuration.") - - # annotate tools with names, descriptions, etc. - function_calling_type = credentials.get("function_calling_type", "no_call") - formatted_tools = [] - if tools: - if function_calling_type == "function_call": - data["functions"] = [ - {"name": tool.name, "description": tool.description, "parameters": tool.parameters} - for tool in tools - ] - elif function_calling_type == "tool_call": - data["tool_choice"] = "auto" - - for tool in tools: - formatted_tools.append(helper.dump_model(PromptMessageFunction(function=tool))) - - data["tools"] = formatted_tools - - if stop: - data["stop"] = stop - - if user: - data["user"] = user - - response = requests.post(endpoint_url, headers=headers, json=data, timeout=(10, 300), stream=stream) - - if response.encoding is None or response.encoding == "ISO-8859-1": - response.encoding = "utf-8" - - if not response.ok: - raise InvokeError(f"API request failed with status code {response.status_code}: {response.text}") - - if stream: - return self._handle_generate_stream_response(model, credentials, response, prompt_messages) - - return self._handle_generate_response(model, credentials, response, prompt_messages) diff --git a/api/core/model_runtime/model_providers/nvidia/llm/mistral-large.yaml b/api/core/model_runtime/model_providers/nvidia/llm/mistral-large.yaml deleted file mode 100644 index 3e14d221417178..00000000000000 --- a/api/core/model_runtime/model_providers/nvidia/llm/mistral-large.yaml +++ /dev/null @@ -1,36 +0,0 @@ -model: mistralai/mistral-large -label: - zh_Hans: mistralai/mistral-large - en_US: mistralai/mistral-large -model_type: llm -features: - - agent-thought -model_properties: - mode: chat - context_size: 32000 -parameter_rules: - - name: temperature - use_template: temperature - min: 0 - max: 1 - default: 0.5 - - name: top_p - use_template: top_p - min: 0 - max: 1 - default: 1 - - name: max_tokens - use_template: max_tokens - min: 1 - max: 1024 - default: 1024 - - name: frequency_penalty - use_template: frequency_penalty - min: -2 - max: 2 - default: 0 - - name: presence_penalty - use_template: presence_penalty - min: -2 - max: 2 - default: 0 diff --git a/api/core/model_runtime/model_providers/nvidia/llm/mistralai_mixtral-8x7b-instruct-v0.1.yaml b/api/core/model_runtime/model_providers/nvidia/llm/mistralai_mixtral-8x7b-instruct-v0.1.yaml deleted file mode 100644 index d2c4dc5d934a23..00000000000000 --- a/api/core/model_runtime/model_providers/nvidia/llm/mistralai_mixtral-8x7b-instruct-v0.1.yaml +++ /dev/null @@ -1,36 +0,0 @@ -model: mistralai/mixtral-8x7b-instruct-v0.1 -label: - zh_Hans: mistralai/mixtral-8x7b-instruct-v0.1 - en_US: mistralai/mixtral-8x7b-instruct-v0.1 -model_type: llm -features: - - agent-thought -model_properties: - mode: chat - context_size: 32768 -parameter_rules: - - name: temperature - use_template: temperature - min: 0 - max: 1 - default: 0.5 - - name: top_p - use_template: top_p - min: 0 - max: 1 - default: 1 - - name: max_tokens - use_template: max_tokens - min: 1 - max: 1024 - default: 1024 - - name: frequency_penalty - use_template: frequency_penalty - min: -2 - max: 2 - default: 0 - - name: presence_penalty - use_template: presence_penalty - min: -2 - max: 2 - default: 0 diff --git a/api/core/model_runtime/model_providers/nvidia/llm/mixtral-8x22b-instruct-v0.1.yaml b/api/core/model_runtime/model_providers/nvidia/llm/mixtral-8x22b-instruct-v0.1.yaml deleted file mode 100644 index 05500c03362f0e..00000000000000 --- a/api/core/model_runtime/model_providers/nvidia/llm/mixtral-8x22b-instruct-v0.1.yaml +++ /dev/null @@ -1,36 +0,0 @@ -model: mistralai/mixtral-8x22b-instruct-v0.1 -label: - zh_Hans: mistralai/mixtral-8x22b-instruct-v0.1 - en_US: mistralai/mixtral-8x22b-instruct-v0.1 -model_type: llm -features: - - agent-thought -model_properties: - mode: chat - context_size: 64000 -parameter_rules: - - name: temperature - use_template: temperature - min: 0 - max: 1 - default: 0.5 - - name: top_p - use_template: top_p - min: 0 - max: 1 - default: 1 - - name: max_tokens - use_template: max_tokens - min: 1 - max: 1024 - default: 1024 - - name: frequency_penalty - use_template: frequency_penalty - min: -2 - max: 2 - default: 0 - - name: presence_penalty - use_template: presence_penalty - min: -2 - max: 2 - default: 0 diff --git a/api/core/model_runtime/model_providers/nvidia/llm/nemotron-4-340b-instruct.yaml b/api/core/model_runtime/model_providers/nvidia/llm/nemotron-4-340b-instruct.yaml deleted file mode 100644 index e5537cd2fd9dc8..00000000000000 --- a/api/core/model_runtime/model_providers/nvidia/llm/nemotron-4-340b-instruct.yaml +++ /dev/null @@ -1,36 +0,0 @@ -model: nvidia/nemotron-4-340b-instruct -label: - zh_Hans: nvidia/nemotron-4-340b-instruct - en_US: nvidia/nemotron-4-340b-instruct -model_type: llm -features: - - agent-thought -model_properties: - mode: chat - context_size: 131072 -parameter_rules: - - name: temperature - use_template: temperature - min: 0 - max: 1 - default: 0.5 - - name: top_p - use_template: top_p - min: 0 - max: 1 - default: 1 - - name: max_tokens - use_template: max_tokens - min: 1 - max: 4096 - default: 1024 - - name: frequency_penalty - use_template: frequency_penalty - min: -2 - max: 2 - default: 0 - - name: presence_penalty - use_template: presence_penalty - min: -2 - max: 2 - default: 0 diff --git a/api/core/model_runtime/model_providers/nvidia/llm/phi-3-medium-128k-instruct.yaml b/api/core/model_runtime/model_providers/nvidia/llm/phi-3-medium-128k-instruct.yaml deleted file mode 100644 index 0c5538d1350613..00000000000000 --- a/api/core/model_runtime/model_providers/nvidia/llm/phi-3-medium-128k-instruct.yaml +++ /dev/null @@ -1,36 +0,0 @@ -model: microsoft/phi-3-medium-128k-instruct -label: - zh_Hans: microsoft/phi-3-medium-128k-instruct - en_US: microsoft/phi-3-medium-128k-instruct -model_type: llm -features: - - agent-thought -model_properties: - mode: chat - context_size: 131072 -parameter_rules: - - name: temperature - use_template: temperature - min: 0 - max: 1 - default: 0.5 - - name: top_p - use_template: top_p - min: 0 - max: 1 - default: 1 - - name: max_tokens - use_template: max_tokens - min: 1 - max: 4096 - default: 1024 - - name: frequency_penalty - use_template: frequency_penalty - min: -2 - max: 2 - default: 0 - - name: presence_penalty - use_template: presence_penalty - min: -2 - max: 2 - default: 0 diff --git a/api/core/model_runtime/model_providers/nvidia/llm/phi-3-mini-128k-instruct.yaml b/api/core/model_runtime/model_providers/nvidia/llm/phi-3-mini-128k-instruct.yaml deleted file mode 100644 index 1eb1c51d01157c..00000000000000 --- a/api/core/model_runtime/model_providers/nvidia/llm/phi-3-mini-128k-instruct.yaml +++ /dev/null @@ -1,36 +0,0 @@ -model: microsoft/phi-3-mini-128k-instruct -label: - zh_Hans: microsoft/phi-3-mini-128k-instruct - en_US: microsoft/phi-3-mini-128k-instruct -model_type: llm -features: - - agent-thought -model_properties: - mode: chat - context_size: 131072 -parameter_rules: - - name: temperature - use_template: temperature - min: 0 - max: 1 - default: 0.5 - - name: top_p - use_template: top_p - min: 0 - max: 1 - default: 1 - - name: max_tokens - use_template: max_tokens - min: 1 - max: 4096 - default: 1024 - - name: frequency_penalty - use_template: frequency_penalty - min: -2 - max: 2 - default: 0 - - name: presence_penalty - use_template: presence_penalty - min: -2 - max: 2 - default: 0 diff --git a/api/core/model_runtime/model_providers/nvidia/llm/recurrentgemma-2b.yaml b/api/core/model_runtime/model_providers/nvidia/llm/recurrentgemma-2b.yaml deleted file mode 100644 index 73fcce39305347..00000000000000 --- a/api/core/model_runtime/model_providers/nvidia/llm/recurrentgemma-2b.yaml +++ /dev/null @@ -1,37 +0,0 @@ -model: google/recurrentgemma-2b -label: - zh_Hans: google/recurrentgemma-2b - en_US: google/recurrentgemma-2b -model_type: llm -features: - - agent-thought -model_properties: - mode: chat - context_size: 2048 -parameter_rules: - - name: temperature - use_template: temperature - min: 0 - max: 1 - default: 0.2 - - name: top_p - use_template: top_p - min: 0 - max: 1 - default: 0.7 - - name: max_tokens - use_template: max_tokens - min: 1 - max: 1024 - default: 1024 - - name: random_seed - type: int - help: - en_US: The seed to use for random sampling. If set, different calls will generate deterministic results. - zh_Hans: 当开启随机数种子以后,你可以通过指定一个固定的种子来使得回答结果更加稳定 - label: - en_US: Seed - zh_Hans: 种子 - default: 0 - min: 0 - max: 2147483647 diff --git a/api/core/model_runtime/model_providers/nvidia/nvidia.py b/api/core/model_runtime/model_providers/nvidia/nvidia.py deleted file mode 100644 index 058fa003462585..00000000000000 --- a/api/core/model_runtime/model_providers/nvidia/nvidia.py +++ /dev/null @@ -1,26 +0,0 @@ -import logging - -from core.model_runtime.entities.model_entities import ModelType -from core.model_runtime.errors.validate import CredentialsValidateFailedError -from core.model_runtime.model_providers.__base.model_provider import ModelProvider - -logger = logging.getLogger(__name__) - - -class MistralAIProvider(ModelProvider): - def validate_provider_credentials(self, credentials: dict) -> None: - """ - Validate provider credentials - if validate failed, raise exception - - :param credentials: provider credentials, credentials form defined in `provider_credential_schema`. - """ - try: - model_instance = self.get_model_instance(ModelType.LLM) - - model_instance.validate_credentials(model="mistralai/mixtral-8x7b-instruct-v0.1", credentials=credentials) - except CredentialsValidateFailedError as ex: - raise ex - except Exception as ex: - logger.exception(f"{self.get_provider_schema().provider} credentials validate failed") - raise ex diff --git a/api/core/model_runtime/model_providers/nvidia/nvidia.yaml b/api/core/model_runtime/model_providers/nvidia/nvidia.yaml deleted file mode 100644 index ce894a3372b757..00000000000000 --- a/api/core/model_runtime/model_providers/nvidia/nvidia.yaml +++ /dev/null @@ -1,33 +0,0 @@ -provider: nvidia -label: - en_US: API Catalog -description: - en_US: API Catalog - zh_Hans: API Catalog -icon_small: - en_US: icon_s_en.svg -icon_large: - en_US: icon_l_en.png -background: "#FFFFFF" -help: - title: - en_US: Get your API Key from NVIDIA - zh_Hans: 从 NVIDIA 获取 API Key - url: - en_US: https://build.nvidia.com/explore/discover -supported_model_types: - - llm - - text-embedding - - rerank -configurate_methods: - - predefined-model -provider_credential_schema: - credential_form_schemas: - - variable: api_key - label: - en_US: API Key - type: secret-input - required: true - placeholder: - zh_Hans: 在此输入您的 API Key - en_US: Enter your API Key diff --git a/api/core/model_runtime/model_providers/nvidia/rerank/__init__.py b/api/core/model_runtime/model_providers/nvidia/rerank/__init__.py deleted file mode 100644 index e69de29bb2d1d6..00000000000000 diff --git a/api/core/model_runtime/model_providers/nvidia/rerank/rerank-qa-mistral-4b.yaml b/api/core/model_runtime/model_providers/nvidia/rerank/rerank-qa-mistral-4b.yaml deleted file mode 100644 index 461f4e1cbe47a4..00000000000000 --- a/api/core/model_runtime/model_providers/nvidia/rerank/rerank-qa-mistral-4b.yaml +++ /dev/null @@ -1,4 +0,0 @@ -model: nv-rerank-qa-mistral-4b:1 -model_type: rerank -model_properties: - context_size: 512 diff --git a/api/core/model_runtime/model_providers/nvidia/rerank/rerank.py b/api/core/model_runtime/model_providers/nvidia/rerank/rerank.py deleted file mode 100644 index fabebc67ab0eeb..00000000000000 --- a/api/core/model_runtime/model_providers/nvidia/rerank/rerank.py +++ /dev/null @@ -1,121 +0,0 @@ -from math import exp -from typing import Optional - -import requests - -from core.model_runtime.entities.rerank_entities import RerankDocument, RerankResult -from core.model_runtime.errors.invoke import ( - InvokeAuthorizationError, - InvokeBadRequestError, - InvokeConnectionError, - InvokeError, - InvokeRateLimitError, - InvokeServerUnavailableError, -) -from core.model_runtime.errors.validate import CredentialsValidateFailedError -from core.model_runtime.model_providers.__base.rerank_model import RerankModel - - -class NvidiaRerankModel(RerankModel): - """ - Model class for NVIDIA rerank model. - """ - - def _sigmoid(self, logit: float) -> float: - return 1 / (1 + exp(-logit)) - - def _invoke( - self, - model: str, - credentials: dict, - query: str, - docs: list[str], - score_threshold: Optional[float] = None, - top_n: Optional[int] = None, - user: Optional[str] = None, - ) -> RerankResult: - """ - Invoke rerank model - - :param model: model name - :param credentials: model credentials - :param query: search query - :param docs: docs for reranking - :param score_threshold: score threshold - :param top_n: top n documents to return - :param user: unique user id - :return: rerank result - """ - if len(docs) == 0: - return RerankResult(model=model, docs=[]) - - try: - invoke_url = "https://ai.api.nvidia.com/v1/retrieval/nvidia/reranking" - - headers = { - "Authorization": f"Bearer {credentials.get('api_key')}", - "Accept": "application/json", - } - payload = { - "model": model, - "query": {"text": query}, - "passages": [{"text": doc} for doc in docs], - } - session = requests.Session() - response = session.post(invoke_url, headers=headers, json=payload) - response.raise_for_status() - results = response.json() - - rerank_documents = [] - for result in results["rankings"]: - index = result["index"] - logit = result["logit"] - rerank_document = RerankDocument( - index=index, - text=docs[index], - score=self._sigmoid(logit), - ) - - rerank_documents.append(rerank_document) - if rerank_documents: - rerank_documents = sorted(rerank_documents, key=lambda x: x.score, reverse=True) - if top_n: - rerank_documents = rerank_documents[:top_n] - return RerankResult(model=model, docs=rerank_documents) - except requests.HTTPError as e: - raise InvokeServerUnavailableError(str(e)) - - def validate_credentials(self, model: str, credentials: dict) -> None: - """ - Validate model credentials - - :param model: model name - :param credentials: model credentials - :return: - """ - try: - self._invoke( - model=model, - credentials=credentials, - query="What is the GPU memory bandwidth of H100 SXM?", - docs=[ - "Example doc 1", - "Example doc 2", - "Example doc 3", - ], - ) - except Exception as ex: - raise CredentialsValidateFailedError(str(ex)) - - @property - def _invoke_error_mapping(self) -> dict[type[InvokeError], list[type[Exception]]]: - """ - Map model invoke error to unified error - """ - return { - InvokeConnectionError: [requests.ConnectionError], - InvokeServerUnavailableError: [requests.HTTPError], - InvokeRateLimitError: [], - InvokeAuthorizationError: [requests.HTTPError], - InvokeBadRequestError: [requests.RequestException], - } diff --git a/api/core/model_runtime/model_providers/nvidia/text_embedding/__init__.py b/api/core/model_runtime/model_providers/nvidia/text_embedding/__init__.py deleted file mode 100644 index e69de29bb2d1d6..00000000000000 diff --git a/api/core/model_runtime/model_providers/nvidia/text_embedding/embed-qa-4.yaml b/api/core/model_runtime/model_providers/nvidia/text_embedding/embed-qa-4.yaml deleted file mode 100644 index a9b5e25c3c695e..00000000000000 --- a/api/core/model_runtime/model_providers/nvidia/text_embedding/embed-qa-4.yaml +++ /dev/null @@ -1,5 +0,0 @@ -model: NV-Embed-QA -model_type: text-embedding -model_properties: - context_size: 512 - max_chunks: 1 diff --git a/api/core/model_runtime/model_providers/nvidia_nim/__init__.py b/api/core/model_runtime/model_providers/nvidia_nim/__init__.py deleted file mode 100644 index e69de29bb2d1d6..00000000000000 diff --git a/api/core/model_runtime/model_providers/nvidia_nim/_assets/icon_l_en.png b/api/core/model_runtime/model_providers/nvidia_nim/_assets/icon_l_en.png deleted file mode 100644 index 5a7f42e61792b7b8be0529f6ae0ad4f3ba5fa7f9..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 112528 zcmeEt2U`?bw=S_!Nh$+KR%k>7LCK*3BhY{_B2ls=k<{cIT2w~Kg@8!<96%%^NKTCi zZA%gua%zI)&;-fh?!uY#UC%l9{({e^9<0J$d#}CLyWSOc`Qmp?75Y=`r^v|2=+UaT zwaLhs{vacxtT=%HzfnYSz9%Cu*GJ#Jp>vLHu|cbKZ0kn<>ekHO{@@!aW=6-?X)noE zL$J*!Kk4N)H=-{woG!PMyeOhNSvG{sxHkRQ6;_^0ci&vQ;4)}8^8Oo+87dAcVabO2E* zOb-%hR+iFWvHtzXUtRs*5C3(S_v!3^-T(V0w}|TB5C7cZE(??YbN}zZi%$J#TC)Er z0RZAZO8Acwe)7V9knkTQ{09j?HSixK{09mDLBfA&;Q!+e7vU_n{k!K(SMJiyAI3-D z?nk69f0r1__uLEG%%KFiC+SIN)BimaS#a(k6-hT>-IQuQX-K+YhmJ;wW$*XLTSm; z-297{p;W)wIuZ8>Kca#{XW!nnpgX{aFF)D zFX27XPen#LJ94Bdtt(yHZCaAlJZqkwl#`USq${o9m(9W6GfSP`p}4(%piv{NxI3D< zNUi10Y|FDWz2s1sviW97a)?9zfb@E)Z7_E;r}n?kAKW(#lq}SqV(&AZHJCKCDW&e` zR#u+xC7fO>J7oW{&%rr-b9(Xl;pi-NiL+xFN8t;a(Qc+5rU2TlBMWDf)McW9oVSlV z#z9_Y-aI{|gh5K)U`dxIw;YrUB<(g&%3oG2c+5nG8IjrYrb(^)Ydn)7%GWlXr_jmC zv%92J+D1D3-q3)4#N&3;t9v=Jd+p&O&5r{%+7AnMtLMW^nhv7efyx~J8n>RqCvl~E z7>>#D;ZN0SLk5A$3O{*VPW5ioCet6@-`Wnaj&cpWy_d?m1ItCcQ28_pSs(&qT^nt~_O?GivyKh(h z_l7#(TF-s+EK{2Q{iyn2>5=o?LK`veM^e^a?tEd+QD@(FoQiMvR?XW3m&ya>y|zsb zuAI%>gZV4JP71^~1M?c>yUq9wiK_!~-*~(q2=sVA=sfhc@}qc;to_EHXY#(T)Rc?r z2ALlC0EyFs+YvazR=fRo1QGN2Z+Ef{L=}Ym*S0>IQYvw*Zs(B06bHcWk+xoNc>f49g=s zW0n?sugP^SE`hx-NDa~dVzY{zyJR?_Y5k6^GD`JwhjK1Ecl8p!j=)Nz=GoBQmuE<4 zQSN4}IkY{tzMYAoLqh}ZR=xR_?pE^E9^sJ^ety491|CRqCBROG{m2HIebF{rs=HVj}bND+*O6NJ`G}GmfA4zk|F!F1=ESKv0F=(R3dqZq)%t}m zpYs^X8=B6$_Sm!O{h6_@*@q-{U%(qWtT`PUf4W6^5feqdjD|FY)soGE;4)|ArEKucU7a@gH(ZZ5bj9oVN=D4B+YdB4bc2i!Ttwyk>KY@Q(mZOa zY6n)8oq;*Fms|fAZ#AA#+@1?`FkSBVba!ZtR5!&&HG#? z$|}_l+u6x$BzP*zMRm;HD~>Yr$c*LpEJZ}>mrKi!&PM!qx5IgvGm#nJYDW!^7SHv+ zPcMno#g}<_yEnbJArsA|(b}@@Nz*<0Af!mj4t!|3y#0QNgR9V~Q0jloQ*OaTRuh@x zS3}tmkS#wmo2u+3C2z|0`h#40Sc3<#A#Fh`S1JE6KtI{1qBbS7+bt{2X>)mg8pIG^ ziK=tIY!RcIL`L+OUff}DUf2+;S{YCu8JK-u_YhGe$2ijWY~Hd5Up29qK4M(!x4hBa zv-f7?*Y{x+Jz^ru8y?UqS4vyCozB$5RWd4>!&WF%G7`+~t)->IldHIi)%sDZo-*7^ zxF#>VUFa^Dzb6 zM9Yy6Tq${)dICvYbiYghGtk$IWCmD9g~Os-K7jl78khwpZ8|9DZb zjk7%XD=*g4vsi9mw)qz!z(d7F2Z~_*FO_(CQjo}x68Ut$nMHSxiZgw=olU_BO(9#G zmGf>rw~p3UJ;yeTSM0O=l4=VFfmiiDa=-S=WW*c*-ZB3Lg*KpI^Cf_{w0y-qp{R{<=@s|K?aOQ)Ih`MVc^NzyMBF7K-A#u5 z$9TTro>x?-iJPZaN#}{+zU(m{XR{pcd~St3v%Na0l*jNSb{mtw?g3E#8<6;!O%6{k zDXxEC<}RsDt^I2xeYA(!4S|!K#%}$P?pe{VKUzqerK#J0t{IVe-_5h*JW1K7ed^apDOyG=G;K&xZf z{|lkxJSO3&*tVFOlA;On7eD2LN8;WdfT~Ts0v8SxbPqg#3?1!u9YbgS>K<*DC62 zOlrpa#yDvtH7Cn94VgYsTYz4hd9ia{Fo+VG<<4kn3iOj&exHipkcq^C!4 z^P!}F?~%4mK#D2 zlx%L{7C$izKcQa4UD>86vm~eOkL2p-C%A4X*4z^dU})rIK@w6|ZxXC3={dkFR)Vh^ zlM6kgYi+^EXI#s!Or8$~oIDe_`kzq?gH4;0)50@bRV6AevcbfTEw+8Jz?}{H)Os1qz;kVrc0b?0Vy+2Uoo$`C&N-V@ z%@y>lW4w^0zs-4g>iD+kK4iCk50=nomAA$SLSeLU4M3-U{epyyA$S6L-#wZ_F= z;_yY`IexU5OM7zp$)w(ZZ$tRdB%asOucdu6V9g~ex3cw^Gea!XLFSao3zeRSNo1r0%NXh^=KE=?EakqXf__56i-+O|I(Zw^#mkHY9 zVSPk8G-Pe32U(%Fv+23lG1K&VmfT~CYVW>0(uDRYZS%^QoevY~4-Z79L)q3F9|cZz zg4e}Nl-;23)|wE~Y>bISg~In$(GL~Tr+xD1f0+*nQwIf0tG?^l8W*g10HLl5pf9%c zRG9wM*t?csN=&d{8ST&q${`m@py)D78c#LYf_}Cvg)vrwzE|3T1>=DQxwRLh=93MJ}F)HA`q2sPVl_V7EGm)uFEmorfzR=f1Te`+zCE+Y8v3!a$V z)>wWZS*UvPD+SgLTo$lI9N2H|{02S41FJxrB45rnOV4N|dO1vd+OZ*C+Jrbfz^utB z<1&ST%~R~&29*1Dl?p|UuN=BkL6`Qqr-FhO8s_1_IrgiFnba|Ne9q!01e7a9#%aWd zArmi_13>tFnk32Ot@U_X2$fCTub|Mkya8*>u84i?B0+nTO9qh!Ar18aPBtt0OqVRi zHhpqgRo25=8AAjjfDS2ttVFKcZkMEu&JGVWya-EYW#ANr{)0g&GS6U7cle%pP4HdD~3MjoC>`<#H>@fB$n1XLr8I%O7(+-hS~yVyeG5{B%y z0{%Sp7IT%MS#a4^99Xh~p>N{B@CA4?+CRiNFA(Bx6U(l_1#&tZ6Znm-Y_?p=>0 z6oYu=TA%ERrKAg7jAT=KLdn0=3~0^@)W@SN4nC9ofy-yScCYOt831UYOpBuPhNja1 zWCN*@S(B0=2q``fTfH!hG^|q$$#n^~+U1N4<0sT^NuchD8IB4NK33|21#?oL3XCJY zhOj~yK`9GB#t@S6*#YJyGIlC%n{AjET`UYNGi+a2JJ`s*E*;6knqrPYaSIV@frB6! zpO~S(QndTDP#><#Og@_=-cCLb&ye4lY~^DJo97+=P=-xlQp%q*Vbax~(+eu!HM7UellV;(Zrir+M(1jM3ez+s#p zkK9Q$FK2q*As=Wp3RV+#vE`hp{|S+$PD&Dn_=f zCO~uLY(Wvu2^tBFBn7a@&a@3|6A_pLt+Hi*3!M-s3KoJzh}*7#MerNsyCzCNJ_zwL z&q5OZ0_jAhN|p(+D3LkRY_mx$KBLb&GC@E#$4**$O^ID6@Gg99KD8Y; z^rk#Kd%XW3WTK{#i=ZiQqDWMO#qs))IHeekNckc|czmV_8t=ZH^End@d->;aZ3Yk>lmZt*<{!M1#T*7!Uh<-;A>8z0< zOfFAh2?o{yDvD770-DW)Wmm~Sp87~GXwLNlkU>#pLP;HRUOqvv+WD;jvSJ?rys42Z zFpl1~@EUl2xBrj3700B$H^~oa6eo=PqoHhRA@rITOz5n2v#$~xw8GPG(Lsx7z}m&P zlRpgVIhX{!2=cXz6?_^P6*%^|nHCXc97Z#lMEO1ey~QMK7nY zssb1s6M{R<`1u;{Y7Dj11j->6ygW7fx)F#}99zT#^3cS`;*eFM{1QcIN?m7t?Lk;U zGTXiYCodsY8h}x~bvJ!c&c1ZLIVDv-Qj@FlI;{T4V<-X_Iecq7$x6!d<~fv2y@{Tm z77)Hwc)-l)*UH4DBqG;9+>XIBz+|OSyY%c(VD#WVkJo6UQ;DZd=b#JbPuK|H@@upt zJ*+t=J(0eBNu(htRwKSm<~W;df~Hcru+DKBk!{-*@W}4R{JrM%PEGvqKo9ukedm(b z30CZ_lS>u%A*;u2YLKSZc}s4e)TwVf?aLCo0+KGK4~0Giy`+!$EUWv1_a>TIKJ(MUMtlb zujY@n6#q#9mGWeTpK>t@iZ9hk_lAYR1`-3uB7D&A2<-l%hq*|C&YKwI{pq_2{VN-^ z2mR%Woixpzdl9A~cmNz9veB~Uy^aS<%#%d^$ysTHQv$4lEeDX}4KUR)6*jI+My>iT z@)0gapixspCmAzHeT++HTvup{ATx(&$b9yTd5f&jQ8!8A&nQ( zW{c=7v)Q{PW@}iNjkIu6V0$Y@kUJj|9oJ1I=}aGzoYId3!OQ5n-Y0Mb1JdpL zc3|C#w95Vx&H8Ug@+CuH$Fj;DAWJGvNO8%dZI|oKZVWHo*4jSC+msWi0tln7rw{q1 zh>R|0ZTK1U)j^rZ=KTh_^O(jW&dSxH@uJUp1fk0djyE(;ECSq`>`WQw=3-36^nIRh%N|y1~l?s{t55=;itqoc-zp|8qQ_;AjUzoownK08#_& zC~6~q^t-R&$i&lGj_zGmVgCM{wLFEN()klPVM3)#4oFr9crs&1pJCsZgR72{$LTR) zbW6+S;%HP_(*=kI?__*AQ89Fq08NR|jGrJ(ZBreC8R^$dOA_%v?cAgUJCVk7AO9$1 z_u`A!3CKh_pxKi~K`-LUgF16V89|;DPZdnHb17FA~OE#Z3Fr=Hv>p$3T zN*7jzoQbVIL!>v|(%Y3T7%6~gcx%v9gDad)c`GRfN2zZXv~9By+;^lnBkQA)d7sMF zFXC;6Mc{#(dABepOhZAUy?uHaR=M!qbx4@Tp2V;ik4$8TrSa{iSt@_nTVFA$S;&Fd zQy23X-MF!*;LxjxC{d=_E_P`?Z2U7WEF>U&bKP28Xy1&lMA;4D_F7U7XR_I-=r#O; zEy(#JD?MsLv)6Hhxw@!PpU&x1yM_CZOhr>dMD++!2vOb|?!;lc$jLG?C4`V?VO{bc za`99Z>j4!ly{H@rEc7MqSLL+foqY$zHq!7^9j#J?^n~=PTmQuYd8vmq4AB}1GsJq; zvyrbf;@7RU&LP#uVB+NVk7}|fu^We&eR6&X1KDa$=A3|9F(+r3?ZjgcH$grZAKfi2 z^N4ew$}}ntfNTg8+rY4Rwd$m_k$X2TvWaake`vOT{Q6PLY!KUit;_R<1p32vnW~{j z*@;cqx`*!n66FXOm|dY0KOB|WmY)o-m(DXEQ8xQ9LcEyH1*%g+Fb#u)S!GuwC~Ei# zxBZ4(3UK$FN6iCmFo@$|Z1o22A%Hy7X7zD$@L01SDHKvzsr8Npn0*-s8U?Opq6{Ds z*<!r_wMd6y5q0p=SH$l~Y-#3msP*VtP!1F`Gwkkw(!c>1a@H4QJkcD%cA*|!5 z8rI{o1SP?Mt3kF9*imdj7tmtNHvEKJvmYZg*%3;WG5`gkpW3mGd_zT!oB>Yfh9!in zP7di8xOamz<0DO8zoS!f%-+ly8O+r6^m=LYb8IR$HiqS$pkcI7G23lpT>hmufZJ&Z zjDBTB@W#6wveZ`{o}C#zyi>@7I6AclGK)l;pWfW%a(?v513hZE^2cfNOETYOhpO^9p zu=5WjA=1RFEI3E5iuWrrIGtk|DXM6pH}ef&s>eK)N1gC2EV?TGbyPC+7Us*<$(1}D zm)Gu)2ZymzYrj-v0S}^2RHQ1Bo^Po_DM1WL?%;&~|+OJ7#gk$&Gcu^h@y=TxLy=>R01UK_eMjyZTW7S^>VSjqZ* zY!JtHiU$nHh|w9i&s0Se@KgkHnmA()CjMNo<*^wOKWhFV8jw8#p)R#6_L>p%o$J2Q z*s|36bJqA1<>BSQ^2elQh z1$5L{XDmRH2U)H7icIqxMoxb~@PL%my95-4m_cO$`h5;gF*R4E5gQCw8CwARP;*0> zJP<{c+q&Uw`NQ%`7v=c)&`Cd)x<2AcYim(OlGd`l#mKWV2ICUHPJyR@5uJiSH)t_>Q28CViV|<+L?Dw=m?ON8T1$Fv(8cuNRpdpBtpc!{t~IN-=I=wLeD)D~ z>aNa9&3X=819HNHaw3COG7I&oZh4}wf3Mazc+F<*+RDZy<=WpG9Mdhet8Q%Xp5IU! z7FM8O8)1Jyg2T=0VcW*e6MN9qDLqT8ciRlk^a}gU zRiPMHVy$H5)Brv>(;x-AzhiTva2C;^hIOUsJ1nxojElh)f~%(l!zU>;^g>Dqy;<~w z12rAlsqPPwYmz^Dd0;ju+<^;mrMIb}wtJgW-_)8=SRd`298-oG{t8?Lnw5CohcENMIs$ z@Eba9HoSlcF3e7^6zrmf1K_?&B5EvBnwU1OG%oB>m$O6zf*$7h}nl(Atw{BxAqYQ zO{F%*VfGzz&H2!?M}+nX*Dq`f2y z@Uh75NixnCQK0jl{6`!B?~Tom_lh@`Kv&z!HUBfmYKXUVdlM^|0Ce%f_)iKDmDC`O zkA~^`D)C#=}e%#Jz>!Y)(uN;1(=N*mTYN zR4Kz6Bv@%HU&N*b0Y_WC27(A5GImnD|2Rv!*e$-Q0Y+8)4&HDbG*)A=2SX*rxF@2#wE)V1r_DGiniXlw-QS@3e zw>SAfer0ThF+fVBp=KR5;9yJf!{e+#qb$J!45X^d2b+s1@ckU2 z_~B5}Q*DhtWz|3Ed3o~YnIs#V8rP|~*Kqv|uBiqLogTa_k9u9N%_5E*nsbqXJt96t zSLU{u>~Tj4n!}JC@}M|oCd%Qr#)d7gg5p{#YUPtHz++b)vNEC9Wa^}{eQ4)kOrr`x z?CLhunbq7W*%)qbQ!%Zc4l9usT4W2MbRm9GR~bI^4ssnt=+g@sm&)vZ9>7;ALw*}& zk|NDbAocG2l8ecM*HLeP!MIdMeMBKJnMF#13OCt|Bfz=N8yXrMt zo1(r4SG|Qm4+IkC51}Tk6$b-TTgmn_)?;nnY1osE$DjcTP zEIOHm^s62o-1Af`eubjeN*s^y8n)}JqBXg;?7512SJGRd0o9iuUaBVXjC#KqGv491 z4Nr$#UB~7IHAUZK_O^A6zH5T|YTvdeVMB&43dhQyXw!kaOqiEU zR*eV=w}wMBk9%ha8pCZT$MS||?>Cya$n+OKEWe_pw4#jOpSZ_Dn#^)0?M)Knwk?2o zjHR7F>q7-rt)}~{n){`~UPd3-?D@MV4P`iaVy)ife@aKjj$br}d+_M^&{BhEZmUkl zaMRd=J1P!oW=}os@)k+>oEV2ZYS~rK8{Xj;;HY)rY*T;Nm_S$XarUy1vFxI2Polj; zg4F|*C8t8;tl1*BgOS(UE0g9etxiTDTfbY}nj-7mxUwG^RuK5{ZX+>H2C(w6`f44O z_LaOZkG_Z^xR%3$eZT6j0oi#zwQ@CnsRUCfvIRf6T1aw84m;w7p9YjZ)gUWv?Ma>v zpftKv0tx#~K9X?^iCwAaZXw+yWN;iLvgw1B-WZO56^~lQ1EkQfaY=-miZeJF1F?Y8 zDu&`qrUTSK4`?IPb8-Z<2>%3yqU_pb)Sds5>q_cNuZaX}*`aW78ubo84r!FEzy|$E zfbm&8pWR3rHI!+vV|;(a4Y~~qey4hgfNGem^&sn|k5SuiyG7TE6WK)LGzvH(3fK|) zA|h4uMtW7Q2lt#nIQ6UPRL6I!hc=8ARDX-dUYUH<0?s*&i|J~YWaKf;b37mHl2_Gg z&VpcftgFSlT21n+I{RXW8u95`7UG((G@=?wNMlig#Diy){ITQfKfxw!ChpCb8t!GD z4&D=%UWcZVlJM1lSH9*UTe^%y+=rL%2rz~uuqhae8P@$F4YQ+ol7K|bK%%d69Gnsi z?FO9rTnGZPvW>WJQ6ATTJl*)K7CG{Y`gTnkdZX?NA^oYG@m5DZ{}Vrg(*1;b?Mp7&*j&-IO$S_n47SQRy3!R;&PcWsRr04)dj z)CjpVR!8HEzu`@z2u{b5-A>$N7tnQ;p%&c#NcO2J4CQyO*1_0aY%^BlxqXqf)%xNo zqd-s?#lI-^d-g2xUh!X?!P0M5n>{!Jh97nFCV|4|PO(OE>7Y-RvuoUlCYLO86hLvY zbT~|ENFUDj=0h18X@cwONr&dgfGao|@hOl-EKIDi<9a{+a4qOf1IMaW4J8N2cB~%c ze+Gcx>Lo61gLARluj8z8QV!236;*s{5%vYeKY{>BMIK^onx|blXsRNde^pX?>^y`{Q8b!NKma)dJDa&<;Gbc5qxdFleP#-4zahYtoSYW zCpRRhiQR>RfsHY&8_?yeA)^`R@&+eikVd)>ryzQ{HtQjM;2p?{w^dCn4g{;cMb9-E z?}4knpqQ@eeguf=;l~{#Zi^OBQvoJQiQ~g_a7NXGj}P9te4NviI+&>8;J?3^*Q4k< zXf49w|fZ>)5fWw5kq5CL3%yp}%X70glM#!+xH6k?v(zhvs`A zpjRNfr?L(B6Y0t)kLx|i*#@EXB80@?0E66)q|>Sn?KyFGG^&7;kQzhuyH8rq8%rfM z4_(Uj&-vE_*bG1V*c9F-0X$y*JD1l^|} zr+pUabHvDMO3FO=TRXMmb}}4$lbnl?T^XASU?+=h5r5E#D>~%{5sKK-qG=j00kse$ zyvZ3p$&)l;6}hi(920 zj|vpo?;71gJKb#cWgJJ8ljx)(svUiY*wf-Q5=*8JHmkynrG(`?V(!z_-TzxG1xKZ1 z?12k|=a-LFI$|c=p^d;g?;3#7T(V)H5dwZFc+01Tmrsg=H^g4(nVUkxLF?Y~AYHRU zC&gJkO!0hsi>94h1$3d=MO(>l}`Gz$zy^JpcoBq8r0gz)5+~@KVCD zP47?TS^Bdqj1_nKgoSvqaeGq+yp0f1-@@C{b3(W}hVE zOMMtK^|riwb|U6(qZ4b|4~jmuL+sBOZC7?yfKe#9#wjEM!NaY$O@hA>A-s~YEX`N? z;euD#m&#&Kd1(s-t1Jg!6#&f)b96NGJftuCDj~;s^gv7j94sO_niU}ehQ16811-(d zp+v@2UgL}4DRxQ)9QAI=B@3X7GuqU|uJt;SK3b4o#^{Hq3#f8GPH$;$cxTbsWSj4j zh$NUGazFSEFX8(fH^wbGLpzKoEg?Y?tx5&uivN13N!`OrgG82jmSy=d*BGgU$o9Ed3AC@WWyp% z5hcFRXp0(~;b&T2WUT=09H)>Lmz)OBhXd;XcFJg6)k9J6MBBLvDq?w}LC^ynfGz{Y z=+9hwQBdm>o43SaEf_BBu`Z(uzNU%#jDo*tuxe8)wgM&7*P)e-mlhkRIQe~M{aKR2 zMMf3+-P<1rT|v8oL($nxgs6$Jds9_}BB~Xb3%_kicq(r{x{~bqw&i(ewhXft?AGA2 z((+*$eW^l3C9-&#iw9m8Q(%a-g7QBBh+_nQ_HhSdHyhddS#8a;j?xq=uq9O@rxo@O zQe=td_rpzi#lH?jT+xQ$^-{_wBqta*Kcy3gn))ne;?QC&M{4Yl73qIny$E2lf+g?JJ(nLyj%FY_HwgkAkN6P(V1f926ws z`v9AZP2)qWWe7uOv(uxzmWSKe$TjceL~`M5h^gA<>u;#{&!M{x!z3u%K720Tq~Q2c zWN|Z|KIIA>U#3n6#UXXYpZN*SR${_0VoKn{YH&$BV4xA9VIWhCdfap#b4`2cHU0ic z5*5_-h(X(z4$jKZmqS-^K{TcacXBOoLZ6XgNH!Id(dmYara45QwRx>$UR~m<)ri3M zTytgD_CV1MbA}BMmP9+Mre_qWTDG2_+hQOe)q1hDGTheaSZnM>Bp>b{_#~3?YM#)| zxKYm9uS$H(8w=#wO~IXpW!mh~A-WR5ch5Ss2h3o+cceohlAr4sR^KFwU%Of&k58(0 zGwy*!gDzVupD?a=ldgXxT2z*vkgY1%n=;uuZtKHW#C@z7Bs%@OBqJH&4Skb5kpbP_FR0zsh#W{ zpRTr*+X)BZ2usVYll!N$!(V#PS@S}s27E;*8<~Dx z5Fp*k5X%+n`Xi&3DUqs%r@#N6`F;GwNENm00&0IJZ${da!{8K~eTd=~+=^qTXTr8% zqyF`ggR1O&Ph>jQ2ASDroUw5ZKI(;Z(Cb(s?0q1UcIo$+j5RxI(`QIQmtxhNicJau zN?XdT3%aT&=w7Kgobqs3OzmKdzMN-(|NSDb-BaEvLbS6GMjzp1%UJGpWg>_#^9!sM zjGhYi@B}Lv3|mXBj5Br)c0fJZ{_|L&VB27iOZkV)-X~r^*O9j&-?|`pr5GouI#)zL zd#5M*hFTpLtPf)wax=2glMXOmdalxY(?B|k;=z?TUnKekbVs|%{WqB$DqBiha+(Wq zWA~|~;$J$vWRo(Dn_)wuvwt(c_waEc&EyBe8^~rS>Nt!E)6UP*D`IWa^4yE&`7V$&etB$3}OJ>*-wK&zMU31L){$Hu*N{8?|u+ zxWBw)jLhC2rO#7=UR^Ro#*Jjw*5te%&pRh8j~iFDX5ky7!$x=*N}2lbj@^KCY8oA z*#;vr2EUz>Ak*R#q<<6BHOZiJg)L)h2n!EfI!(NPovVleysE3FP(+2kT;Ft^sfZb^ z{m?2}M{g-0*^tgZRaF`zqrI~)_Nnc5y9|kp#dD@&;w#e*1C;T=Ln0tv^UlEx33fdY!xi8!8vS>3HAcmEi$g< zEiMx$j^n!Ih`_qRE=6QP@{eekivWirJBozI2nvc&s^PxF0u8UJYJZ=?YQY?+_LL4% z?pX_DAEM2R#Nv5{Z(dHja;@0mlZ1Wd6Hc7`D^cC;+Yu*GJUs(HURAQcCk4>toYMg=Ym- z6xqR4iI73qZ77^Hrb+Hfv2@v@aGLj_slN$j%xq~BYfhLs2w_Z&cm#MeuY-Te384)x z#fihd11|P#7NRy$_0`1?)lzFI2+BM88XzMCR}X&~hF1Gi7{i7UgvU9?Vo{KePd<(l zcyIKbE=402Ef0`pB#()Y%=})zNQZ3{L(F{vaEwunLIEY|Q07QTTZy-Uj3(orb z$=RFWKAoSjh8mqylR%!2qf~`*L_#voEG%Z#)m~RQDW$S~{#&p_6?aq&GgEW-aO`AP zk8ItuSFRp+*kg3=9?=`Hh!2m9;^{%$yrWjJj0*B1db2#k%>Z_rV?B41-H071Jf~ob zOrPBF6(Wj!OeKda|2icE&3k9Kd$Vt!$|SymdN@Vq@yrHX^2GY zCfw*UHS*{1aI6&`s7#8$J%;sio0xmeb$R(qUMgxq&d8fRTlu$zb2qimTD?eSqNH4+ z?kyN>b;zJva2TH}psrD(8r7PrR)sP)UP|yrTsijLF+tq-OIPXu&(34-sH**$Kqem3 zHNFyTq2u@uu|fZ0+NOokUH=Aa8Iv(DdrpQ6t=(fC{ZU_!`SoTN(;Pjvp87$wa>vyo zMV3V1FewT?P6kOc396Fs3RL+ZXzH}NPxhBS;9Mq^*pQgBdG_RKVf>>EFm-3%H$MSe zqNj+$;CYw%>3%Grd_KV!(-h7()k1F^ThdPKOvP_9G+@|L6zba_NL_vIV*xgv$n9l2 zixyQ;DUnTIh)6ZTRsBBEQRODQZ1^Qa6~O5SZcQUzVDO)Hb8IgRRk^hS8{$NMLIk-K zK^goVck`9|7$qY5v5NTbp&1#8A+H#es1e5zIJ4~nAeD#k_C*;e0A>6rr6`F(>gpoU z9UyoJ%6qM!`$ldRql_0ZH?e!IG}r#&&)m3tuz0|~7a!H))4us*%25AQD}wv)@8`Nk zy%mZmUPnSsr?WjdDO;7byISu6qA=$G%78K?cEyQ&I5co0yNorQFfx0W9XLov&e!vE zvU-~?#(q4~R25v0ZnD#3dCpXb{RY%-+4LsX`>Y+EVDGi384~Yl;-kje0+y)d26q#m zyt;1BLL!l_W<9+wO%eiOd=20htfO;x9c*9y9z;(e9|N`zK~{*XfFOR-L*_tIusU`U z4D3?<$crX)p3=Ap=|~zP4}y3^NN*g7$z(I9ckX9&exNomOBh$zZ=c*#gP+E$Yol;jpV5u+70GI)x&pVe4j?-{&Jz` z`GBe#REfi?61K$6f98dQZR8@S8ejkSJxGM>aNGslPSW7 zORqR1fQtFM@xdsy;=P#1_?!PCN29MX3UVEidsFb=K?v@60;0&sc*E$={s{I?Q5!|l zDWss`Xb3X7Hkxw z<5-9o4S&VJ@jLV?8gk|Z0;mct74>XqG zfpxp|f$H`1k4M~*MX6^O{2uIF0Qp0zFR|wIBS#^O+aJTjDKmbmCtVIY7s(H+9GRk? z(zOKNhcN-J4Kp96B8qKWqQE7)I&wOSE2^b$*(#XPJyNVPzX|RHlK;$?b0k^Gw8)R( zQ}YA;HQWbP?X?=tFO-0!bzBTHuD#F4+2PLe(#C9#dGY3T+^+j8WyS@tU_dC90UKpL zSHQ3iJHAcfio-3nKT`FUE8WGTehc+^g%cw({~bPdEd?l#sPf33-|IRS!a^#VTun9S zoviBI)f4;2yrF&N!O49g3_TEg*D_yK+qINJpR)coq;n5QU}Y`pOrUBfk0+y7-Qy!U zng@I-s`KMkx&vigyNKI3H}`e%ZE0;nlEC8lbbIWMq&9`IY*bcoi`=d^GJgB~G4!F! zxgeruOjV!u{%xC1?M)%bV|t!j2KP}KMeE$n;>~Qdc_r~r%GLERZUagb6$(xW18vV@ zd6HuRA4v|p)_@h{Yse-2B#G~tjUbk>LCnvp)wkUk7rdRJHYu{J#MyTeX zueow!7reM}3QI{cEP^+xSV);CX)t2gBtS?#%y2Au4(`-K!>~S=?6}G60AXt0)RrtI zyAKc|4YmeUT*QYjX&y5Ml|QARmAzu)omY!fk>AT!QTt<=v;6s&By1>*&aZ{A zH8y0JWqA18lrsih&i)>i@tDAuc}rC7&woOwUC9Xp_=^G?fL!ghyA+xLk@wg#*#GKn zflR9BB4TKu_q@A6w1eIrrZ3;ehBQ}OtQ z>Ob6t|nZkuz_Sj&_skvQyIO+MZAzP_&Z|_CAtSAgP{TuoUASadjIs5KNApE zoBaxf2>2|`&Ox3DtrkUnoe#=*N#N~?E;Ee;undb?=d+65)FldCb5++${Q)9#0ITdZ zsz_&13@4itsG|;4bv^Gy3IsDdQU}r*Q)$4TxPhxH7n_M|#C80BcAxzmRittbeH)H7 zM#s(#`$ClBlQP}J?$)q+zwy+Y6O@RHEx=iv3grH=Ii zc%!O|(&4ZnZ&S_5AmEHMruHPS9;4DO?+v*y%^MhR@?vm|NYf);LZX>t?x)aM$;*hm!LXI^mGr%m+3G2SbsClA5Jz>p`Afu@7VRc65*L~tlr7%19$g{+K1C>SV0d#Qzz zHacU&hx*&8eM%^!xaEB2??cLO7*9A*i2ym3omeqF1*HL!TQ5kxHX^H60O|)cGpH_3 zpnVGv=0Dm;92{1@yMtA~u6s+h2wot)I03gJ(gnI;<7P-0p4yjHdqQqbtL^xjz6ODg zI~`o9N+_rK*(b;td%#e`1jY?YF@ySMPLlP}9Z1KM4b)18=J&Eyv8qPF349U5jc;!k z$m4hHS*`|WyvzYj3Zz2)4waLWC`Nrq`Q3TlPQ2DsS``Zh9bAIV0*;ir_3Vr0(rou*?$qh!8tc@i8KZOf@8a-4 z?i`aa00BQC5;*3vj=Q1u9?bYQKY^CdUxX^2VlrBJn)SBtAIT%Jwj=XC`ylfZq>m(1 zl&p(c(BE|7)Mox6Oym~xKT&bYo8_ky*e%L(Ji5~(bz=XwY$*v!a`bml&h7C2^gMJ& zDV@Pf7FBX#P|f$>?%sake}$?5VRJs>_u$dfK1N=to6D#u-t75# zPz|8@&d7>0RptWOVQG0s&|6w$o~)T`}iM?D{X+?qI#zMMp-{ltAU)=?CLZY7<&%4j!L z_KG*(MD_t((Sul4{rq%=?+o`cDEip7nLTDIBq_xyQJ}U5=7S{4YFK4BbcfD^#!Ezo z>l<1P&{OzXQ2Pd|P_zUm09@#BFi-=ETasac5hul#@pN{Uzut|}jH;LHhbrHh?Hj>m zU01KzuWj%Ox`1}w1@zwKF5{Y7Og5U(?ubBV^Wi#7;uNniZ&6(>?oE5(d`yn{5~rue89 z1{qOBr~NE{(Ng&jhZEmUaz2ruxIjv@#f#U=<4g5Lrwoyq$}p+geWcb5$Jndz=zDE; z2kTJh&bW*{T}{>`0(S3Or=`0PaRn{|dfqWow7q}8O<5PJ!vVTjvdQ7E{D*=m)WYQc z?x;0C@%$G!!pzvnrfI(eJ?Wd9v4rQDBPn%3=4a(qbCQ}nm zE3!2nPJK$L9w>5UTDg_;q@Bgnqix1m&6soC765Jrc0tgS(#yNkv+;j;x(;|K`2T;y zIa~JLXBx4T_>G*`vE;&pWazDc3j&$!gFwP#NhWGOwi+rKq(3pVRaE zzdWxVJ$dr^e!rjb{=7f$@0S~#x=eWv^YIX$mxpI3zye+n917xIb%ZRyJZyz2Kjs^9 z8r`bt_Xzv%+hdv|Uu-*QJAzchgk(2=OLgXIo%?ihe5H~TAAO9hu(qEJA%F4s@O%?f zNSHP{`B2wP!QXY6pZ8bu5=I37fYb0HInw-nt#I?z(0CiKOp-aB6=J~Cs+Ow;R3_UUGscGIDQ+o-l20lwvA*8PM@;r2rtgg3%+mUe39JN7J? zjI>6+m?!uRTAViC^Dav-2Zr+9jOi z`j0mZH?$IuHvk)FP2|dReM7z>|00%?o=Xrq6LF{o(?9*^%2ZNPmfnW6j@G zaffbijipRgrt^Zu{Lp2MyDjq=`^19|#Xw5$Q=J|N3ql@-zkz4OMIM$MTPqx|R$JPj zHJnjQOT^`8@RKD-pHtAwGM>f%fA9>JBd;-^Z4`*dlM=olZD>EKAK?J32n=^W{_v8R zCDWFU!VjcZiDY}u)K6`SUmqnZuIf?e&q-+r`?0u8MA@FW=*V#$Z`ayJ;C6lE3jEkY z+#WdH@H!r(WPLrwubff)T#0SOT)Kpn)d)8~i5k+VE*{%K zR=Q>3O3s=d;}^k>`ssg`K7!+K9d^r5pzTyw(%ihPRbl;S>oNY+2EmKEhfGWa_d1fa zPsy6T8cz%wE`FoP6aq9duZ{U|DQ%p_A~4MTF}z-c;k!=^D~tY%_8V)XA=NWWQm@j! z5XQDg7co9!*@zV<%P)wg{iLb!BG@Z|tH=EO4&B-HaZ>abDqe;Z^NQA>PE?c$V)_g| z+jzI$uC9PCOn0A|mH$3U!%{Zv5xz^{MB6EllSKAoTP2=}{o1N`<#PFsB)uj(sraVO zsz{iK6-!1cRWKfG+PK>j@3QN>>#jbE$$}(mv-2x5+4GnVH1K33|$# z8Oa>>EAS}&W_C4oU7IW5l@&vW3!YHNsXd#{iK48SODFP~zlRaP%yc?&%l<{8^$cn~ z)ek<`-mJ>{^<&wk5OsofwWzj6rd$kaR`{(@`q#~X^x{6u@RJoe1ilZVZl#CYnq-K( z9+)(Wj;g{cY>B&-8nsGB#rCTVG}>+6eG!T5|(i8aIDjAlJEcrWMtjc&N^!wmN3DNX*yh&jXXeVu!)md9gg4G0Wb`ZQxn2 zt)5mA1vPp>9p-PrTmn>S9)ZDNK8`V>I5Z;Rv)xww_rZjo$YyRDy_iLcz{T6xW(8t3M{mBM{y+=5_wPegH zl=CZNywK;7UkJOlz3M~9Mkn{^47}s<&qDBPcWeHB!I%j#W!~ky2p=Bu+|_fK+u^8N z+k61AnlM|2eyS0^eKjPuOzMSAio+V((_-+Y=B1~ECNLAcmf_IJLf_OQJxAu{*eNh4 z8a{|Jkpjw|w~YKvNwoEz6st{z&EKjD=zexq^-d)HI(?JN0#8t0S_}M|PXx_jnM~_H zE{=jDE2dNA{ywNhJ;^ay&GvCLqnHn7nSb@lR`?J#ey>No?oG~sYzAGQxkp31HF9^@ z#h2b>UOA|>a9KgkyLxU=w^G&4dcMQj=ai&oA{+Qq3z+moGZ+?qj&u(9`(J{t`-uQ9 zMj8&{U6Qd0hINyl`f=gcTEu#K?Hey{DvtP5JGZ%PO!($%_fphH7sZOLyI%dMvi(i) zECEmAEPuLLFR+=$s$jqO)IOg9znWMrsA=Akc@{@^FOk0#y&u_%*#3H*zb*UN!M_6% z73?K}C$w4D0(FWB^jH}pWw2_cG2!6JQ!kNBq1mAOt;bVt+PE4CQjic1O@MoN=kZJe z!a~TGn8rr4m6?Z=R+}gV7BcAoCLKLnPa}Zuhp+Or?`O}lW#Dta?Yz*YPTJn;Xa5Fb zYxuE{GXF1V-&0E6#)Kiq>fRY!wbyklX=Bg*+A7--byltQ&j`jU>)f)FD!lu_&qc+5dd6tD~xQ%aE}lpeHKMH z@VGz*^Ru-|yD+a&WWGxpvE!8s19&j9nxl1VMf&7FF^XZFp zcTLLHWABp=O{6h%t}lD*g!V)S3Ml*QAB-1JZjD}-8OkNs;yGEQb63-MaN*;Ri-+;% zTzW+X`!Qbue;7)tD8g+wNvE;%a@jt;gBy6WJ3qokgH!XEDwGj_2#jJ{bpG{@TjFqy z^2?UBOs8rz{&$#GIP*Lr8VcLLh<^AnANKw(Z|^r6EO;_MUssCHl511gixqRNEwEj{ zlf_~88E%QHzz^ip&S{uRg&VuYYf4=!unV5<$oiYlY}(R74-na7WhiBpcmFw`m=;Bz z9?&hIp`Kq`Z(E2iM7J#{&37oPn)c;U%;~TIM}GWdh$L!$cS4E>ZQu1n$to=8>(RkC zYT3p)(0g86aSABw{UJLr|~i4;ujK@iwHGKJ*q^g9dR-H;C!LfdV&R7p$W7W)=Nw2>15&`;6g) zR`u|qn6JgZzo>@YlQes0dK2$nx!%N1iuLBN&@X>n1@O#r=@2{KLAJplnx+}kDv1h+586Wm-uxn(Ytn1F@gyLVZHx;cb0m@tZQNn|~zU?)n(XKULssm%!ae zV0R&h&+up0G9O7=0fm4& z_b-Kx2yu(qM!xtqw3@r;Eh$SVkARyy_O+H%z13RL?Sx*x=qh1!k94(?X>Tn z$iAr%zc?^?GhL+kdp*8SzsXr>hNJ3 zNlU(EIb27%ELjK~NO)m&Nvzv{GJ>T{Bjf-ghZhIkhq-~w&*S&*iy+%jZ`1+uHu44L z;==DW?d{w8tS4{N3t@@#TKSFw!Dc&@4Q#fKrZXkB!hp>zs6d={ZOx(m#}r&60>}dS z;ggHskqc<<7@W}QXZ<&?vu$?b#JxaB@zm?p<;ae@xd)31Orf{?cgRt?9-_qeaAg54 zF*y1rymt5ySX)_H1qyDfiHw4gT@Z&pr$ZnoJM%V$8vsazImp#H)={4=Bo8T6SQ^|UIUk_yPAH1u20@jaKi|1 zazpF`zZh?>77oKfpdDjPDdr2SYD;XW!gNl9!m=qN#0z+#4^A!B|ItG@yOB(qF<>TN zg*{rwr**3D-%pVU#Ckc`7JO%Wah-5PEr#1JPA%%H=YkV=Ka-v{A@7--xWQoBog*Ca z{$-I!Uk-g2HHf{2y*?{%1KpkZ=JC}cEDb9+?f&Mp*Q;X)UHIrx*YvKr{G=?>fWG9cgY41N9yeL= zgEL@!B$DM4HHxYHIDASN;sRnj=r33|D`~TY*A^lZ_c|5$qcBFUwN>H>aZREk-B2-l zZ0*4i!BV7$#(c-=4EBX1*G}wfaH!a>1@l8FrRZ1_gQo{Y=)1D|a!fG9r1|w9&kIb5 zPE7K$(;lle{N4y}0TYmBYU*}rZcs_7d%p#S+4|KsJ?Kk9&1IUma1=2x9R49SokG{4 zGB#<8pyQBxG|!EjR|}5jFyCAifE~xQ0;(=a{)qnIrsX59+^1HF+WozQ=!m`*0KYr1 zflmjynVLpRqq*Z)wnpeq9($FLf?)khr0G73JB{Tn=^IK8xlxNawlBy$?C_GQ1;4#O z@+wP2_{T{F&C?$qKF(!2P-epl48_b0!Yp-u3X5&5w}z#msfr8tPCoT^zCj<_wl*CJ5%c2LK^M~=uy>P9O%%#Sc)!JJ z5R(q@FW7?!1*R4+McD@luiL7AFWQk2mSQaHceF+7-@B?42*Nfw0kyw>G9|9m>@rF) zx_)u))k9KmNlx34A301$YCEB$&fKDZ>2v<6F5Ct4$!(kPDSa1i&bhf@Hn@cD3EJwI zXwl2bxsh4*mYH<1T{*&-qVV;e{rNpvlF0GyJ?mROe%DE(QGQfB ze6Q>W3r4~@*;>Mnv=@iY)1SW;2#cH^K)SBNK-;cQxUqLf)pleXx~=Sod`qZ-uDgP_ z&?e{uM5W1x`P3eWEquN9ZLtdDaaDrVEj#mFhH~&qUjywDOj`xv|DI*=1{E7^(1BU+ zUK??B$mXP`euRBGQ{@4wN2_bAOkiGRmjH8lJP(JTO-29W5A4nDMZTndVyCj-)6i5j z^$LM=`X{212W7^}kvC;ot!t5SjY+M@4)zX0ezg7q5l6IHom;tuRX9-^d2&5Bn0uUH z^HB*IHTN)DiS$q=U?HN_EI%upZO(Q4v)<7~^1g;>8cBF74L-)NCu}V_<5(?Ierzc4 ziCI!3^Kzf=jn9T|wP^#kDqW^0Xlu!8IAYFKM=|_1eMg>rY98B%EUiu)g|U~ni)SmE z7J&FGM|||n8neZO9z^iE2DP*@oJS4#^pkm!j2Bfyi6eAxVqx zyqgK@@`)?kFj3CDMV&S>db7rTK?b!8PN*JE2#&||QVmu6Vu(Lz_q50NQ6I1;35lI} z!lyPxSTAc$s?aZKy~W@Lqz*!gD#+&Jtwuhb877D9LB~N*o8J_2RAqfllvkbRPo%vp zQoMk$rQ*Ub?Dp9lVZ2B~ypfDI<9O$2rmAu`Z~T+@-XVJJco7N5cXoZRm$1YW{vBKR zo=|VO-_DTMj1!nSC>AUg`$NMzLe!_+D}g1{WI>-xc4?4=VlI!U-V{Cv22S_&K$D5fG$J* zCNM&LWi5`A)N0zIUS3~iDHXo=X=J9AaM))(QjT<)S-wb5>o=1VS%SaH1cqH*F)T6b z&$cLARPxTKb?ILB6xjRKbA3HDC?V$`Hh+zpIV>HR>AOqi$?T~lhr&Hy$%3#>57r8Thl!Ck#{f_<2e;yV-vV=CE9sl+C64iy8 z&DLPMk_{P^T?vq;7&y_w6q;61#K*N6BN5y;zymApGN)Km>=;bB<4zlD=8-y)3fwCz zOZkuxfjh9xq}YQ_B1eg1WqN~s3>WznR8dAq9d*{Mq2AzR#hwB3@c?81N{RmNef6qO z3lpYSi{6oMKi)sFPdp4dyp;mLc=1|~`x-n6xGEg?GU3PO%|$N`$nZ+TAv@#*I+(v1 zbpYedXHAu17ZW_2UV?=%rBlUlV+buR5^s0J|27!R9SQ`|OsuD@b_lC*t`s>2YE(nc zz@-fKL_I3Es4_Tdos{#vpVVSOFGo_Xp=D)0`xi;=^yu~D#V#4*kZgxNjL^L~p)+wa z*Cbipo~!p`W~6(AMVrA@c~*Aab>;5mMqgC*N8jTAgC!jbg=~G7kk~EFj875@&jilZJ?J(A3-aNCyRoQOa?gCcBpbQMQ(*~MVQ^;7$L`Xu&*Uf z+5U!RufJ{l`}-r#)2(K3qu<{xPJnG-cBDSFmB6#SZLn zZy`+$Xb=&cofcL(UHX6nvNOGM!#^JZGEbCMV$ zOk;Q|;9~wcwmY-i8mN)nht>Bg$2ZWjl4`Ox7iFj){n`5A_FMrfPQ#+26)Qjev>(%@ zs;bzxAc{Jz{(_)sdhrV6jWwStviHBQygjLs;GN5F#TO{xTNGiKW2x(E7@h7IL0{oHyCY`)#J6MO5X4nrV!f71fgXTy8YLCpBAIh8&#_&?q1MA9x_ z>Nz%^O=CY}tFRmB(g8u-U^tQ3N8yB)Z+Z)&5j8@|f~+eAh_7yprK6^L2R&1E=Z4nz zx*F=5$cDf1qa~;$6a+zK&blGzBLOs} z*1Nvk;A%NmY&U(!!EqrYN66y-Pm7*63;1ggNi}?jco|$$x!~PvAF>Q69Gp45R2Jb= zt780$C}!hN4Irv@O;nZz7SvZ2Nn^379ss+|`?k=2(Y-R^j5_`9d*ZqWvy<5h)tsAGCrL zh@ZZ!=5A&CfoPt{X0h!F5lnH!2(JvqwwRPnnI1OrR!A+fF#^xX^)q zyNlP9x;-&Jp3vLXYx%lz*XS`{ms1IyKJQ+ZtR4^aCMyEHwAmFyBq>K?o@&{RSZ6#6 zB-@&Z$&2StAD6^?-dv+po$yyQr=L3(^;p8SL$19hM zd!YetqhXNThNjLs8pEI19B_d84|2=^-J6*tn9c@y5X$U@X4yWdgJQg|$KZGH5YEnuudB7b`|xc? z+6#C&K9KWq@@%hHn3xRWPNqo|(~^oZMt}lTiSF%luDt>t**ZcP$k&VlS#VMG*(qG-m$@${7l@;sP#+)l1t6pzH0{l^5p?39!k4H6eq)W^Eo;(BK(#B5Vu?8{j9Egn*`tGx)mOXLxhmS%m^Ib~ zo>$>jDbTupcH(-%{`m@9F?U~7?;6qJYH-ci>5i)L78G?8Y@omrN(QIhAHsfqG2{b~ zu5qZEBF^k(wg6<%qm@Db<^IpVl^O*sQI;wIXIj3Ru86*32Yk8xgqFBI2?UdU>sY$eAddAFyRi7SD=f7Ga3I8St4o>w>%k`d|4hc&zKYBpYQbs<# zaEVtql6F1}t7goyJZf zZ;H3e5nG0YZt`ZGa_g|HuOM+FrhGx?(iQhFC9Wb0brCzwEFnM4T4ddh^fo2cC7$cbR5t4hIHQo#UI5yZsUZSC|*L#4m7L#GPx$mL{(q z6s+T~CFJBn68neBb~BeMgz{W++IUkdV(qzkwj@`Y%g2=^n=vw;tl@0MW*;(Nn|*ZL<>`&|KX z%R*?EDU(cniz>$(z#C(^CR{bDIBMFNqRsdf+|;4DCTLG! z{?2)B&;1Pm%^xRPrT`Th>aU?NTnkH@M*DN*H=%JyqWPNn4JNTtPUH6(m{J5*C8BSNHxj=$~o^s@ubiF&Yp#3s7x@JRKoAa zTvh@t%xsfbyf{T@Fpc?q!H8$<+-|ZhX@2$jD-RtVBkG z>&4(~X_x35_~x=8dz7j$%?#QbQqJm#Gn|S&QPS})eBU(YuRoSB41Ygk3!AIg7}e-0 z-1tu_R5@}8fR3z*TwpdcQ3(l9A=H8g6g&%ApsxPZD#SxO`G;*5QpxrUM~a$l!caeX zL;E*E0J#o0isugtL(cX$px_wHzokww77cxM0Axf@=9wQhIGa^0EB)3zhyFq+WSe1h z>Xp76uX~>n80H({S4E_{8us-9+n!bNaRP_6<}3G0A56J&Vj!VoF5lNhxFBp-$#b=A zXweuME-Nh@FYV3tu$`+hu~K$`2WKJvtNX{ zo?HUg9yec`LgUG@mV#f>ilOUKVsBAwz_PBm^CbGe+eB&$9ewAX-%SG%AwOqu(vuw3 zJ;H}=w-Aba$`6Xh-^cVB7)j6E% zen4|u&rChCWGY_&p_lrLVfWn;JcP*kkj1NvVt568!Wcc1CTW7PRDt2wmIo;8 znZ%%slH}r|%ISC@rsHqOopbwFP#9I9F*DnQkVW}HW#3QcQM->49Z2($hz~ldo!Dg9 z=C9cE>ck&uBDJg+LZ5G`am7#IP~^f9WDo@-fpX% z;+sqJO*)OOz1OpikRK-rUvj^?GTb2AC@tCaXiZUu9{VM7CRX-sjwXtfmdw2|_c&yR zxwFEihP@3y=K3O>=!pT_-Xm=l-#iosu$`Ia=Bs_~`9&I7sp~N+L{`N-p1y~iC!SGdpFtH3f4J$wmx9rCiL@~h|Bz`JWR}NPeQ~yENo0H_wmuTSz2J#@lf=VO0D0W@2A|2TOTf zj{i)G@lVn9=n1ht4NbU>pc9oW_glrReaAfkZ>%ix1U=TDywm)?-+Zr?C@Z7hDq}-Z z{_fPQw>-kxSO;La44kYuO>+=6QNVouS)8rp(=m}-vR@0X=v0pAk=K%L#?+WU(CnHH zgRR`o5f2vcc>_E*}z? zkV6JB&*TIKO?E#}!Ca>+Ru6f!wx8VGCbF=J@3c5APE#}!YHr97bPhZAR0-dW6=KKH zW2fG`WIU2Bi`FNJ9^dlzV4hXmiz>&!z8cgkOkJ z>1wM*H>08jf60fx+5LdBE2$#Eaiuw!@qkNrNg(%8SgFbVZ&bOy+{4v!#2dRLYs*@>AZplVD@lu>YKNs$u0b99{IFVb9wKYIfja|Q1ZO!tDK zoFA|izN?iVmrT#?0?%A9Za=y z-S0eRB`4i$X!M2dR(qx%wT{mcbCFkb9ge$rW(YAddX1?d0!J#*&3$)K*VQBj2`y=9 zkMCyATvek8kdh-YA6&zWArdo~uGns8my%_jRtby#MpY=ZQ35-LlxloSouoQ>N@RpL;QEOJ*WKvF5GUKO3qb z%}^5pBI+IO#o)t_0BdIYx@=1*bzeB6_d<@qSSo|Ye3rxm2#k!a>nMf`+BnhS9T!7d zfmYFmpYJ{sEdq40;C{F8&HrVrZ9bR*>>t5p1;(GD6A*YX5{gEch_b$R3MQY3-zt1q z%VH}0pSj@%iyJjwSXX0!0d$jClx=R}V~H7cl$8Nwzoqg4*C+sOI{sjN<%61S3HH)l zG~ZLO^qbfn$N&ij(3!vbF0v_yE4Y%ZIC0cPDrim%n5QfRN2P{)BOs-UDCZ3Yp9ky$ zHHm?q!%%-zdP%^Fhq^GFKlJX#VTvH zHwDgYVk*8Xf6(uU&snyD+yqhE25tU#_DE5)nHWk3YLw_N8`7b9K`-YBRV->V{l)uf z+6t77@c)L8L$+zmDrADNjI&$3n(CYMSFlc9A(6IS%i`$?nm-PI&UEjE7CZK`{=|lR zZmzqFVx9OVlk#8L1-$vb7E%nvl<}M9IYOtx?Z0_k6Q+C4MfLE&ZKw7`?xE!>yb2`S zafAin>Wa$0<5o$qZ%ZkPrh>5L$7Tr`FqbnM#kB<~>3m)rxFmKZ;}iOK=`WC&%UdvY zyio|rEQzmO;4L42(P6y>9bSw}Q?=+34cKMt2McK6c|HjdNS>jC-r*m^5=&nHWVTs;*1(A17Ik)<3{IP!6wSJg#{Rpq@RlGC{nms)tOPUpEnJoCr0{)5kc%x)VA=wUt2h$K3dm3+U{YO1Ditc$N3&RL)GI>h+p z9B}tc#qh1Sfy`R3wJcNJ8 z({P*(X9-G$e#L5;w;rAicY*I=FvH^XO@vrq zOHsdZz5d)-*g?_znnZlDlZ|h%Q>saC3pYA@as$rii~=IqBWtRb%^S*v8=h!J5JUIg!>sxrbc3mcf-1}pacz1)6u&3ve* z7I!V#oKyePJ1ahJkHNm8jK}9DZ=UTqk^3z<2U{ajhOQ}MXcoDUCS#hL?8kCnx^QdQ z#`HhIYmc7D4K;GAJp&x-F_DkLG&Q*5eqm*>XE*J_PUeQ-ezVZD#}`9XF)+md>?27! zv;lO^T_gJ3OkU6IdBcWQi2j>XVh=(%h-iyTC{jutB6XwbZ(W#O#9xM|Iq+_#Z^eOG zEa1Ne&x|1dD29(j-e>>39tKgg!@RApZeHS&%ROgJF?Y{OfIW(5U#i$X^W)|;o_YqP zpu;-;z(FA02^K>ZX*X&hY$3zUuGD6t&1Vx6wRS1uqxjVn_1X)5ycrk%8m}*kD2WB3 z25z=c7OjR&%_%W@N#&*L{#M~J zUd^fX_jbW4YxDS?l@4ASTZr6)0MSC;Zy&qKf|C6`fc$m?%{f(C-8_z78No5$yT zuU5lvD*)=#qQ->=?bTEq^#FbUCE7=*%L*WG_skZ-1AkLdk+qLjLAddzs_1zW2BYe% z$loQS9B6ozDzaT*Aqt2W{!^ILrK{72La5?WpBe~{q}=+ZoAL{R(@FQ$QZf7PDUf`U zUHET6_2?~@H>_G}cG+YXiBSDtdf~m`@nWASPY4T~O}zzsjiU_S?mRdVY~aj(NPwvR z=WChWO4vSk21(P!QzpO!(0GqCcMDe(Y>P#e#q`5{`WxXP#z}2u*Gxn{yTQ-~9+xoB zE4`UJ4@Fo8^(LJiK(8{U%$8nQ9@P&0BgAqCAQ8%^hUO)hp_40smUPk=Dy^~BZ2rZm zQqX*eFke|A?`w9AnBqn=R>sQ2FRhZxhy^~9bsqo9_7!WBmXu@nP!bElWNrbw(_Q-DLtBRx2 z+czRtFd_5SoGkYx+qj6c!xWHYg*4T!2x?p4#^`wEw{8(P{mG_qDO`bczN3uid}82y z>)a?W$@iX}ll?)Db-B`aCeki>t!oKdnL3mQC85ikl^y{NK@A&AYau*}ak`S}wwU-# ziRBJ6V5!?ye5pA&)-?=o2ok=>j{h)f+SRk642i8yOcV%AF9|NDrWG-E9U$wIk}soH z?v3L*8c*;7SdR651JC<%1CW;qUmm+e&w_nx909X}eY+tG?yy-;RUq8J=d|w_BG^w@ z6d&;zb31464yc*Gq=sGLn&%qf4`sj0--C9-loHHWsP&u0&cwDwZ)O!5lJXC~x|Yul zhZ@YBk@Y@}@W^S%Ec&xo{NI2hg82v$N$5YI z*eDL$sNjY9>QUR#!Zxk!RblJE2@69R*@(zj2s1NsCHa~rtT}D?t zpiB{B481Znzt*d|dm562XfUn$CrUzXdLJ&D9_w=u^91|h$c#SWc}&e`10X{3JvbHbPVRYmZj1t703F{L(z;C$p7NQqN5=G)Rs*hb^>)S zp7k#Qy_hQJN^f@naQpkQ>OVeE8e4|!fEHT^>f-EIbOCIe{sN}*i6k=~?nUtIy8f(tKpLY^{=gZFro-e5EJ~ zxaEN4f;DsbI(9Z_9DW0>C|<1AQ6eyV=;l{!2q?cio4#od_W)N=`9zDY$m~ABdt0G0 zFhghfE28+Ugsq5|t0KP!Wk&!!%WyvP77Qhh+kfnY%_B zl=~V+Z_a8L9~H;UthAdzITO8zhWRFAyqn9C;hDII~}(!NelLjrli@<$O66vr@e87BJ*Tuvo)EA z6O?M({_?3hd)8}F4-!6EC|dHi4wXiDH1=OTdolBby@Aj_yB7c{>C`d!0n1d~y;0k- zs!kC%+mM@!m^#PL5T$Q*|4H+c@$l8CLb56}ZT7#y==@BFrj++q%vNV=`DP3%R(TnH1aV%o!Uex@g zm-u)f;a)i5u0|tGL~Z_Iv=+zv2caUMr3{~*b%F+0*ja)xudc1mX~#hU*qFOa5$L{0HQ;RG4Yko@CBc~*T>-+7g0YE3Rr~(S zaXB@*R82ta!S1LP!*s;+oN7CIXjnp~F={Ja@gt6G%NROliUYyN6PJ4o0U^TO8bwW` zxs$i|&|^slt)A2lO2J!l#|b~8Aq`uk7uar3dO+~b;o39*N@9#ukpY^+o8EXC!38R! zXML6s>AG(bLnD)k8$%>Z*UsGj<<0yp5xtKT{v+u zGFSy!W-#hW`{RoydJ^gIyACunWxkM0qvKhL@ciY5!-D?+eds1F`F zWh^$5@Tp~m$5%{UhdRJFz-y1uLDe~sU5=s_0055jEfpDcuzwaQHXz_ej3hBaGf48{ zkVkAULYOLCl(Er*jR*chbN*PXsjfRZSPteAwvmXZ?h@~vxo;k;D`k&1+TA^W@!|RU zno&p5{*KcJHF`(q)OVrt#3go(U$@d_|Bz^m8mg)@ZrS8U+If7-Mbw+Nn8g~s7m4b* zoDWBLR{R;TU8G0s9#T!F^dhXduHZ6YUfXJRKYN@e2uXYUkiaF{5oAbViB~c0C;dgJ z5GmOXRR=u|ObDhC3l6k((DGN_JwbJ$9^m;hbO>s8>~Dm!5bheTBeT1bPny~V$?5^a z1Pb*56|&$aM@9Q##D&AJgL0NMc~j*%g1zX;tf6>6?mON~cU>yWbyF?-ODd;Z$w-$M9t z!B?*ui6s9{nLa5ZSkT{oK9+Ljo?@$0nx@-{qPK^x7g@R77dq{{KK4xa5x=-EQ|+eF zI!Vu8hPP}8<5&6`A*VMb>84ad9z>Y!F`X(Fc}zw+-Ilay8|?78uy%|XHz!hT_NL?d z=p_P`X!@q_qus}E3Gt={?dZL3L2;4a)E(vwC}@&O$!Fj7GEX;p(a}aA2*w9T!Yd3J zO!IN{gvgSXMcYWtH4`CQ#4jC^bqr!eGySzBlQN0T)3O^+W-h2eWl#U`p55YzVI`at zO_m`tq_01r8PFAX={_?Pn!^mZS0?>Mjuzl7=*oDG8JqC{9}BMwgn#vw3Y;FfEyk1^ z+&1%B?}fXhtd>QHOuS)Ee53!%_xC1_yh~O!cwK?(C`GL=J|_GfPvk5;Fl ziTMX47XwAbnp-K=B^Pp-+$A~r3Evq}3wcCowO6iYYpl5QcA8SrJNVNrwUjUSeE11G zeHN*_o4S^{(4v8#j{?rj$R#4g7(Ijb@ToaYy95*QVfH@S0rm9fwp>FdW!}lUwTAF~ zE9lof)9m0G3^bwtc^hpbFqY13NO)&xdKWw3{&^%fg%JI*4WzhOI#Y+PrJaMW^WOOZ zF-8yDXhHjuqB7^0sH!&6^&(h!9jdHY-NN&DRthc|UlF!Elx|CUXhvN7N-|K)uyCi* z8{HYC)MhW)Uga!J)w<#3u#yGt9SrYl3Yb=H*V542zBQUwJ@S_HUso6EuLvLujKCV0>6(s=%)`jbb1cyRaP_B9wR5eH=L<(5!jx5sQ6|y{_mE zMU^5%Di=e4Q&;)lVV>(!0|8qsbLj?LsVU+A=opa)adQ8B5-p8QVqW`l34{%41XA&R zI;smW=aqn;<_`JwxWK?F2H-Dc;8#V{YT{8c+k2Lc_h#L+f(E^ zln2z?A}#Xd&-jD3-T{))qJqLjVUG%_r0rOnqAHh-71Oo1)!Y{{afBENx8!xib_ZI6 zj^H=7?=~O)=Eg;r=$P9TUa>qA0xnyV)mwor-u&xUmuVqRu<+g*@c=2+Yn^GjMMJC3 z*}fbpN$9bCi?pXU8Oq&$TBWh2t!{@BNoA=av^j8Q()E|1AV|g14bFgO5I6iJmaMXH zps4c6rNLkY2~LAi+8hchL17IGYDK6HjHg!h^WQtEy^!aB_9&+k*5_^T}j)v zP2U!${^i)Q9o*KaIS0iiG8V49Kwy(>o@M%CJ5=)a>o3-Cxt9Wt!H_pV7tww^u{qPq|TFyocLrV+d5jmn|Aji~TaZ9%oS1{534n*DWyu@8Oh z-S7C}2v>D?ZYRAoJ9vj%>RsiUU#hjkqS4xOnR2CXuk^vV)n}3;gwx=72(`JL-k#gi zlykJA_zvk^+LEoHCpP$O{DD2wdUWD8?FFJt!VJNCKLIv=Z6x+7+^DuVeQs~PGM_uu zfX)fmrWkW3tkv+rNwgy?7iJYNp?+yE?0|baa*!fxQ4~ee@85PN$5JEY+oi)l#Gu}Q zmX1L((lI&l&@eZ8PIm`}Gk2rDfh|ht88l;KE_i)15e(4iFK}UdoIPfiG_r8M6SXdK z_7ze3=vUnHQxX$v)^s_@W?q!;UD~gIRfKr8{uWy|f}&tj&{TK4d>j0&F15u|-d~3$ z>k+)(t-oU}^RT=N$?Urnv2M-@uQcI!^ z@QuOVYm9+c{iHS2oc@6`$u|EM!e8gasC4vlOyqB`<`+k-FAAF$?NCYPn=LERf0Yxu zR(H(}nl?dq4gmV>IjvEIZFbY#S-lrCq;!9#^=wDh`(nitke#%4ih%!H zJiI_lBlN#fHZQZ!EeTNp!&j5q|2^?EGZjOaZH2o4(u}oS60>{Re3RR)y{jzqq%%Nr zftNGO^prW&r4fa$cdIRYp1!)y_%%!Ow-*HQw~}WvZpk8h-K_14U6ZXn+b_1I1uVr< zCQDIRpC{|dR)eSaJ^$Hd8h7~kHKs2>^Ovnq9$WMcXlT9OfeOoqj^ImR?0cr`y|p~I z>Hg<73Vt*Pj|lr)v>A>I+d<>p6Dyd{D}HQYA7Yx=3EJi@-6_yaKOh11q5ms#yoj?; zZ~+<=lHQbKsFr3YW}I-r8!>zqqa?Du;`iYSlV70(vFpDrvY{r?^YNu53s3QRxhCt{cp+y37N=b<|yS!USgcs$nl& z3ibu?vm|AgW_YT6rvpWl)2HGkId!WsJ>Y9>@0- z64Dycq0KY%k8SNS&051QchEj}l75bu=J5G^Q~$VLGZy(qqrY#J#K-IlR<|11J?j&6 z9@p`*$FoNB>}xSQbBw>t&7}N|Dc|nGS=4{6aN2trtr_JO6uaSI*59MPx^M`WwQtJB z^AR}{guN~gOV=n!-gSm+%H|HYhl0EuD|nt zdw;V2BR4yJ@w*Ed`>WkIl{OUbV3LhvebVfPn!*Wg7q&b)WE)a2GZw-; z9DxBJ8Td|kuDIeqca{>kCZr#R*ivZKW=KeQ++)|AyYVX|SspKPiv7NbDV-Azxvn3e zrlT0kzIN#C+}68kdW+XHEcfip@gTMf&0<_loQR0yw=G&iI2)zItdHS%1cWhZZ6B*P|vfhiu0l2mdrA zCH@wJ4bG50uTRnC^!+aq7b8HCfEylJ$!d7vD31tLV~^;0^+q}TsR86z z(YufRY)PicJX~P!pOF5Cfh!+epc3IRIW1tk>klJtr4PxEQ}rveQ$W)7A!9)AeHk;C7I`-ZF*6>_{*}b*$-h}m< zu9w@=q>Gje&)E(`7RSa`*F_Z0T)E+UMuz2`Xwz<=vuw#r47gAig?Is$)yWKJo}s01 zMfKeQ8B+V#6(F@k34Y!{i9L*A88e>9tfF@fLLz4>tOZ11GpBj-mC&80TJ! zcn3rH60>bPt+2-Hy~6I zFHH_iQ;Y;^fQLSh7W9~qEUoFxlI)7mf1qt6*@|T6w*UP+S4$5_>v}IuRd2Et=gJ0@ zT0(95)gM9=z7IFM3H2U7SkQ9=n;_1pI#Y7tdSm*f?9t37{PUyPH4_l=E1MWBy7K-f zki-olzsyKKPGvZopyoV0FB~U)`4YiRW}4*x_e|#ValvE>{I=5nW9m!bp!i zfB)C(zFzmfaLwa+&Uv5rd7tx~hvy0Kf+Am@;~J(9C9Q(*L;v{>JP%UTmR?r?Iq7A< z)I*!M-xqnmU`P+B6}}OA>Jl@hT4vH;vrW6V907ducM6Weep|8eVev14okdrK&bY->yv+i=Dc4(h)D%yCCn_ zvt~!f?Za<6HvDEpo*cGk|A8f{>fSCm(sRZ=KkR0H_}daLkn;cb;TyK6ZC;PiueGr9 z-9#2`25#m`he!$E`%^>@o}zwypGs|CTu=Fz*cmHK$S44`>e|VdCP>(Z%%O;84D)t7 zM;G2jJ$#mj*UBCX4dJEHrHz}bBQwPSLE--A74N#e->On(97j2SUeFb3Wj7Y>dLQ6ODvRvLSVmm>+$353}MW;k)j535-t_S~)w zD}-^~4_>28x*?(o6!i(U06M9Ba1sN(JX=HJqCM0jCvZ@cI{jb>S}H&8vdQ!BPc_c< z(n*$RGR_->Yj3u_xM!`6V7OKg&Mo!xv;^|Q+hQ+1%fUf}y|5`>A&w{9+c0nNM#Iy! z468wMhy^JP=^O!8c!d;#0jn)`xQZrJ2t!doai^njZv{6~{V^oE8D8vA0HF1jQmoEao#Ftw-R3t%#kN+L3O|- zx^NR}OVV0B=6cQS9Wt|uI0ApDr#%3>|Acsm78#8b+P17RRJ67}l3IJzXIdB7V5DL} z^zBQjV&4raZ1Vbsjz1LonJxg8DIiaZj;iv`0WBt#Jt`|qAf`aZHXP|S6K8O*h> zqwrWQW*Bj@MLO#1B?sYNLGBHH9HHWqv)hSvh@&GQRG<0g!#}os(=NjIS7&-6iz@bM zaF}|4MZ@z1Vv^27eSRDPx+P#U?MUt_|Kde=>*y`ruh@^x7}NgL2SW&`le77W3W&DU zcO50~I(W2k-zfeXSRP^1mj+;T;#~YAFMP*VGODcU%-vNi<9lfuWA_3Vd|vZC-~n8kmIFF4C2-IrI2oK^?U3*Ux_trfS1yssP*zw_4+ zdRb(AIO2)X>>p70hIuQ9WKL+rIk1v3Wf^H8AP*_^f!EI>a8t8cqkr84gB5dI2j;V> zH7;d4CRJmy95*`hY3&JE*+ERItLfY9rs%}fQyWV^{*#EoL<{-xAUSIB_7(lDz)r$l zv$p?A4K+hb-qo#24w8m4TNUDteb}Y7?7aWHwD~6=v@7K%f|q`wqu=KA$sc8!nQX1( z`Q#|vS<78zh|j6;_~K>AU0sgAm8^@mG2rxG6X@9smLvT;ks3<2eW(|;9_bqSE9(ZHGvJw1a^O`?WJ6)p-UuVXfoc`n@ z>jRi|THKZlq-4*THAnbYHDH{DQdY0;+#a%*dCmR)blL z_X%AOy!DXCOyBXZN=%-nU2)U7WYy^*|1)9;Wf*XHfXr+Kgt#%3OAoH+UE`oJSbzv$ z&P3fr#l*fgcS9yIXfS0>)kNkmb91p!moPt#pmgd|`TyXND+7 z-kA0oNLKaJ4|<7@ZMf{~l}7Ztbn6d#oimX)b(jA%xRUUHGLGX1|{w;>Na2YKNzE8y4@9lZU?Kqa_ZTW>W!Gm zcP;AKp9LW=6R|@B+U*d61*$?DF>IV8Q{3crG0%1f;m+4oMrMai?kLZNJOxmU6><4X zX$L8rs-NPIPv-5=eU`u2qJkJ7_yIM^=J<>1CIo5=^$;>M63y8F!2%2NgCw~EG~oW? z1cf;6-A&k*9UO4TXh%5IoKki}X41SNWHvKgzgVykorWqKG?_5=rNQ0#=)&FSg6*=f z#p6tvO7CUd>7^WEYK*pDO&$|nbFv5Hr2}czzMSj)1ETM!A-9juUIq>4I+!5M(8s)=oldQ~w;Pg3 z=krISkD-}65Yr+>i%Y22_G@Cxq&Vrs$!6&^F?OD{8o~V=Ivue+#zYflNrSx94bARd zZJwMWE~+_xF)73rdxj{`u+=BSATJmz+;l%&ZCmG#zD2WR0mpZZ*hw4O?|bz{WNX^} z32|}tsJ51ejm`cOZm=52QoM`4oz;)l4vlPZSk2a7z2(uk=ZJVUGX5ShW;8U!*7m}Z zvY3J(hYB~MCLbX99~{E1(M21UpwfX$=^#@rOipv{{o`(VL{RZV7_z-_C)GXCnw*7; z`|?G_CZ{YItiSch&&+V5QQ*c8(=*iTT-;M#t&%_*E5qs!G`UHd*2Y7Yxq|L!hp3$uy;;?hXEaA zi(do-kLQ?ejxFK&2K)#}U1n(Una`o(yb#b+_XCQasq5c1kr0}4v=Nd_zEr<@i(7Y@ z>a>q~`2&Ia3p*Hv`_5;tM?LhC-8N|ZD4iNX`o^OzZ3c13BoQLwvmD073!s2(^gi+x zom@R5xm{&K-!`|tK|1aW+HAR^mmZ2!`TGY6Tp!@_H zg2*AEm9PI~Z{8&JQ?{*9$n@XPQ@b>;qy00RE(<5^o(y^8+8-}ZDoWT5RD@ADdFMp0 zoO`_5vz(@LirO|gCL+M@3q$Q7r-pOun&gFuc-UdlBZQBzKNx+{-s%81G|q-0-*2$A z1?Db1EDhihlV%KoyIi9We;?-9w|m>uP^%V{W!vK9jEXm_)wI?Gc|(Taae*6vI-z|} zzDU|C-TJyJDRD;_cPSt4oRE2Lqe=|1WHA^Is}@t5Bqnjm!%u{{Gh)D~gC%%Mv}$`a zvFD{=Re4Dn-8{Y;f8%uD&?i`k0&-Gb@Kw`?Cn&I#KWKSCab3VrEcN7jFV;E*=b>Vg zpXa@cz9141tY*i(2iVt;TWQ?OdTbIq0P+ASF$&g`TUr)$_35$a@YK)@{zA^CZA@3H zC%K1bdR^Bc#IPaH{;d6tLc$4z{9hq+RNsc#VIuG@0{nN>Xf4bU5@*RpM(ffraFW=ICT zwteKn(0O5NFEk+Da`vL)r7bsZf}tl#yU7F|J;{r>7h_c~H&u~_i@W=S^Bukz%_cJz zCwXMGx`I>*?h-s<#OC_{EIM(GQ8BiyAJS6Km!X{XAWrCd*FZ9@kv$fTBd8nqjCBhA zqzJS1^EgKz3>_(ZX&QduXI0hcI^0E7HU=;{-hCXx0n7$T;Gvqk}my<+7!;kOWcwYV=>BE z20Co`hYC!r#Z@Xs%ib1P94aICsBe6KuMIysTys&$qwdDpRI!7;dftvd^7i(&obhU* z#qCc$^`x}S9;8^^g#pj@gYajC#ZoqDR4@d8<4D&PN$lx-dH7VQ#)vK%l5greXejYk zI=EFGBd%)}w!jc<4x*>I5xQx|dh$RpUpd@UvGNvy2?t}MQ1B52eacmrhCOmp&Z{C+ z$tW)=;e4O}A7Jn<9#Oz{NDuZi*V1=`I8f^Bjb{~~%QWTvX`K?4w` zC*K#@dR-jsU&2ds2O5D;T=7$9_8elokF1X~AVe~=#_D1E{J z(@z0|n{<+a4hF?BYvvvkzpp4TIz04vGI$=`&Mdh+W)vsKethHdwWIsb5o9p_+p?DJ ztywMj(Yow~8|Of>CAwuP8>2gTeYI-6`qi`dHmdnU-O}q4>qV{27RFJi+T`FT93kR% z6?021w8|77$Rcwb%nt!s;0?}NY)4}9zr^M%|JmK5Bs59y*a~i8! z3b4iNC?d$Zp4uCX93|@F58X<)9)M#6bblbqvm5{9JgNOBLjT~CVSLc)g#m6#hMiRX zCrtUO2-N%ikW)q$t$QYtDsuEB@~B3cEf-I5wlh1x&RpRDrD4Ca7Jmg}%CoVRpLrxS zy6*gCfZ;Dr{I_Sf6(4Dtb{m@Xe?=WE!4@YI$vmME)d1nMIQCq=MMhHowKH{n2*m5z zuVyvP@m>C~dNv*&X=)!;+`La9ARl`j1m_+}a~G+t6lKa4*cLl@U=%#EnwWGfRKW}jygs;4*^&HlvY>+jRahYk z(|h>{F_92yiu-Mn8zV~bIvvFp>%HFa#;M-TC#t643A0&*>lO%SOy8X9nMNYrIyVln;|52j&gy6tX# zH%N{ml_&X*>9ssuxPkVEUVGS@A95ucwHuX$-v~CU5%&8+CxKpw5IM~y^A9*YxMPnu zM~V^muRJt9(UNQs#=`f@RJbFRvV#%#O9vpn>&(e9$rslZCM|GrQNXoLvq6}G)QcYF z{Xkc?rSUR+D9MA6|J@5ov{2wy0-<_PNd9puu>FStzqn>aH933e9riVe+u^a?!lmbn ziQA#E+{gPv9rQ%k_E<5(($|qs=9GT?AI5)G2Uvo?p{&6-1Wg{Bb~^K2ne2brfi~gw zG=6~nxKdEnv8>^6$I(skyly}bE-_u`$*0>cj%-7m%(PW-&#k(r2@K24`puSkH)I}Z zEW4A(N+7!eKnEd@#suR;EN44>(9M<_lO+`SM+^GBhV5%@;>an!PmorTXiZZG7>yOs z%30QM0TSVd^5{U4 z)^zs2l<6J>x`OZ&lr8bD`k{MJy9Ph^k60(91a5x*E?4-V9r%P{rc&88Ic$y;4C0(j zG-j{c#-qPMxTk_?L0)#(SSfrx`7OJoO1|z1Wu&e?xJddPYn11(sK98vtwJi(GVYx0 z^BE8*)add;oj9(iZ{N@dv9}r%9ss>AklFz|3%D7QKZ6Jx0+UG2Ptar55_^*3nzFyZ zqSC2F_RyJ3DtgY1#_XOKFb1TyvjH#m5JBr3f|I9wK~<1!xbs&tq=&EP`7KKbLfk7N z)Dj+}9gA^2^TUMf|5iZXo;G3sG#;uZm^O{B&F!NHE6-+<>)|sH`w1WG14oDP&Zp@1 zJ3{ljh+@xdAMA}U&ZR6+E2AQ4-JAhq@ni`*lrK$`u~YdFG2{ahde&`k@O1YV1);b z$o?QmTS_LPKu=UZw1IKT?RpES@U;$5_c(jx68?!x*rRVi8A(4HPAc8XvA{Hl1CuQZ zK{Cx4waKXv3|mM~ywM7p#2&%#hR=x@eNCJ+R;%qO;o>l5v1Ne*EmSMI_>YEIjqE8C zz#0((Pb6TC1VsaT^9}PG6;Sw>HLl@asOK}2UzU(Gd%7nOgo7bY;wEJ2;TT%f+eQ$b zi0q!$heFmHWdchhNZdlnC1!4EvBsf$*#^I)*g~&1M<^Jl@?2VRrBgr5X2Tdsuq*!n z(e{N@5ZYgC)}vPH2R|CnOnLH2PxJxknpw;MsA*P86U_b;n+Z_!6ayP0P+++MbUHDc zJQE+|Sm%2MTCXydS?8f{{2d{ZINR5>Ji?w0W&A5?O4BXRH5qAmvuPAXfxu z2|ojGg^jUN9p=9Scl8&%%L;jhUEEGIQ)KW{2jEF5f=xQ5lY2zVeZw#xSJ+3i9B^iz z;a=`J;SC^0v4wtF(faI-bu~!Wg`(=D4_w$qP4s>?x;H~?NrQFv4bvKQ!u-t`MBVP< zutHCL)2cXJxk31btldVa-i96F7>Am@rACTQ@+}TMnHp0*Cz@rn=Vj1aSfDE&-OmT zkf^(6m(zodnrZ8VUrmU7Hy{$~vmPLIE3=Aeyj4Ivz7? zP3-io@ci)c-y>_;LijB^JraIZK?r0tIu(L3F|^#3U0fp1IBZllW%W00-N!}UvyU{xD-QDMQFck1x{DYJkCyx9Pe@Cr+ z-6^X&(nIxRVp6ZBh31!_|m4y8v61}eis$RUVuZYC?7S`ys34BTDkGTvdGpChRdox zMBR|zgcyOhMbHi^74~1N3cGp#DuT+y8IxBsOR6(yT{_D8U^8 z>H~w)oCL;00=gH!yAqW{ZExcB&%~M9KaDxEgk_6r3w{4H_9sjtnDb6w#8KkU-B_I+ z6rzArSxrmbThH^#`7)+*n-5Vdw5zM7q6+wB)_h42Z4V~5h?6x2t3!9o#BRYBspEYe z*B))1D&E;jcC#Ibu2ps3s&!VpvGH`*r`z?mueLis3Z_-W4#!UXvDwZs#_yp_dA$~% z9e?(&{GQKu4YEs}u!+}e(kep*`O!(DL|6z$w|Hj!C@+iHOXNJ!7&*iWd{{bhj;t|r zWv)%qLXAWoLTNL)+4ihlY1*RKddzPz!hvo0k~;!$ulUX{)j0e|r(M+1=2ajWrF>QI zh$Vn0-klX=SlB|LY;mgFmDRnCDapF926T~*F(Q=4$O7bvdlShS!q>&3QYzP4K5q8v zD0resO- zuTM~EpOpKpLh$?p#BA|GKJ!Uw(}E1gjP7?(dWMVMg2L4cY)N@OO%E}?Av?ELx)<_ z1fqikgYt0%wf}GTW`FY&4tUW0%r(Y0DI}MP*-H@&k1~hV!*@xLe;tRcdr{<1KDPco z1qF+|`-`_xlwe)8VQ;DdIjB;o{0OeqpJg){_y9uz6-vtD!e^agm*?lpJDgBj=PC+4 z@9jNtWlV>`?{)I*;a7vT|C$y%s$7Z1$e<3E;%sE21iA=s@9b6a)0oMjO}bw)jBNVeB%ECKf;gwZdxtAiL~XxQf~~J*NKppM2HdAZO6^}{PSzg#PHTO_JYy4y8=SCdL>!DC$9gv=Xwo~YHuSS;p(Xm7R^ zIsByd@|HGjLhOdJicR8Qy*9j(_;-uv#tgqPYteKD@{ycLKqswR% z!*k2=w7^LWcO4gt>1$khR(sd`i`noK?4T(I)={^xpOK~l6#vJja2PTd!&ja2szZ_- zRLsvJl0|Bt)~xPl^Vl6Rv3`=CN_M$=OLaU@FsQLMcNdir6El4)@7TGp#g`U_2J^{q zNh|_hwW0N{2Cb>A=C?|@Jxe@9h`SXHFIEmDG8y14BOu$VLy@u6M}g6;5KbmD)Ru(E z<>Ek8RmAC~P0b<$*|3J$5@P)123jCKyi%KTmrtiOLc=4?GJoq)C9ldNwFJq76kK@y zN$gTw_({XQulp{P9~Zd82XhFmYcxg{FHH>=nYf-lf3X@~MgL3S^@oIPGO^d3@jZL^ zKZ04n;PvD~T*CnKMAlIVFEQu$B6hL1QJP?F+UNNRFb_EnB~o-LeQ(>1rz*@Z#O6eo z&nd^x?A$#$62ZyEU%C14Tbw41en}>5RZ#w`z=OvM$V;Ly&gN^d@%7@Z)QO!w$7DR@ zbzpcBj`ZEzft4#jTb%tQa3BYJEzA3@n8x^%&!|lzANy&3ROy0uA1bJ5e)UKaL(pAj zXSL^i6ZnMJ>&XjqAAaj0?+`jO0))K=*C2dZ?(9ZgULGY?d+F}be}H0WD&>94#H*p(|CF#b>dvA z^%L6RZ<5IM``_9oRI80>f|?tVeP@}66n*J4?=9+lxOQ}Ek4t{f7oteye|R!{pb&hQ zNb4I67QmJ^C2B?csX699DTVGohAc6v0+f#Kd5Yc|m*+QcpN{1p$eaK8!Nb(kIWWYB zU-4Y;#k7z$GJUc?&X*@V)%hJZ+HiWcdT zpDXLcG3HUPTR(P(h>XM8wSUWRBgzI2mT_Zn1B+w5>HRQuqWapy zcvAam;s+o&a3%(fH$?K(i_-b=Ql?(tWw;&ZlK;ejDabX|!J8rkPrwfHdp&fI1Itrv zNhsRyAVyT9qZR`@S{6w#AO}22W4{|CHcN6O(yuj(I`8@%d|5!p+fMrzFB`QQ;!(wi zS;@3r;63z>|5{tFrsoOqE{3vM$v8ys)TlP>n>N2H(P&5o!8r^!CwNir$G@q0jvJ#KeV)XYX21 zr!T3pl@|{b>lS;=@}hLrNzv;ma7~fKIt^Q7hPbk@g?#I2N0>V_r@-wzGm%Tx=$I75 zkv_Wr5y5MP*9&CuwgSd~L%jKy0L`R?9Qa}0=R0X<0-CLloBhZ#a>ibYXs`FKyyO^Y zpG|N(m#BUF>SrC_vq9%Xn-61_c4m6mgSF_O7sbBLMlmeUiTb!pj9$rXF2%Lh9|jHb z|5h`93l410OP;$0*b}caS1|6zb&=~51Uux&BK6QE8{o*24_RkwFs|FkeAqyn@OCRu zNs#n|4#0@En3zLi_`qEKi3QwH(R?(T))JRbqDc7VOgeZ^-O8)!)L9~D_ENt(3I-Vs zbQKX3y%g#!%XJ45TUb0dC-!}>aN3-%@_(;EE9G}`<^>1d)zoCLewN|0~>20hRg zhaYC91I>ZfU#s3H)TVXfL0MKEV#K+<#@yYx*mQjE*}MJ3=F(XK>?Pn(tCn!cfu+Ww zU_HKt9&yI`zT1n&j-s|>iXC6A_Y*( z^U1!9#95+fu<#L8j^B^XJ23dyZmFj5z5nvLTFE&wME8m(svq@)`nBh$c<@tgVq%>4 zv6_e9=J$l4VJUKWByqVNg}PPBi0Lbx50{yB>NBhqz6efb{x=RS$IC$!a;7o5ffe|Q zCDW=aw=dW%TE5<3Y>JKOC-N%;7+V9AhmR9zl zn4dYf=(pF0mwY3J&?3BxdorC{@Nk49mx| zA<9v3FSt9qYK(}zB)KHXs%MstrCUgm0*RDW`%)9^0^^SDxcOsOhA3Q4__Kdkf<&b6 z%eBQO8;|YC4_g(3MyhdrcUCN>?pqjW>NQM1>il@Xxozcz*s4uh9V6go8+V@2#V2S(`t5vOJco;FjGV4>R-{#h^|DV&b#y z;}uL7F6fdge<_n#-94FBQi^%46 zd+bkyQM5%SS$y3ap8puc9dvD%^ET8b)bk3Fb0C?b9`Go8W^VLg%xj9DM$!8>s@)ZC ze*BEJ;oEoZ_+XyN#w!_W1wYOQ3Bcsez8ffQSPI=09-V61f`m$Zk1QHIe12O99}*YX z6o5U?Tul=BXSxj%Dl6>8l5c7-mKJ$RoM__qZVw^Wbm1UW4vM7jQqDcQ_`7#+CNqV; ziSp@=;wDFy7dRH%@sgO&ZB*;#@ROrV!?&wKU`{`qSJ0zP^!N?wC~@)a-65#cR9te~ z3J%v)X^{rcqlY)JO13apc?DnMEq2Y{sItTh>!C5w@oQGR{A)kjMhaLS=k>W>G2UV4 z{3pE>Roo;GLqC)8_%(FcDlp-0Jrw#wrw7l?~^{Z<9MM*qZVIq zQBo%Dumr;W%d&uH@tLhK?)i`JiR3n<>2$mkqBq?S;CCH2`IxkaDp0*6T&7+=R@$`1 z@z7@!oJ?;PP9>Ah--z%_3^hX9qT$NJIlNo!`ifiV4D7fM=K)0peD2X^iU9lmYT0V` z-GbVk!D5>&qhbzCWgaaRAIjB1%V~A_swrfLR6lyXW7*r9UY#Dt=WZzTGeN?Q zXHP+TG&<{`v)8iBe0fW^xRKzZx=J-qzYf3Qt+S_ZQ6tVEX*O91dEyv0oBVbe>@!$p z$&P&BBHvA~x60^?8<3MQBnKgsoBVs|?`HgajDjMkr0{-IUv8I-6M^)D2W<6#TY&oM zTi=zWVSJ8&#(2r<`$=c;y9o<}(BE`}&bTd4(1J)!>?Q3@LrYOThdaRefW1eW9NQh``u1*aqE|~9<=dGI;qvPZ zEa3Pj#{I5ZtmyKZh@5f!@m{%cw-&P zFdA{I3GZXK_3U8;0F#-%l0ZO4-m7|6Bb^ZQ(Qs^Ui=dcrAgRWN0C3td9uWfVJmAk6 z-&NsXEHZ&b(`48gg11GKljcE6gh03TA51D?X#PJMBxW zvd`~v<4i)GtX99#;LS8oibmJ8GWL?m>;d{lA?0-lBRBAcG{wZa-raMaY_b{z2J{}D zZl^M}qaM)mv({X_i#_s0!JQKdWgSlkbVDM)R!%mT4EVO{y8QGgPU5PdNKk&UYYlVaq5t%CyOyM zfX}SEr9=vp-_Cy^pRTscTNTG#S2{Z14^OxJZ)X@80}#4r-am&ILQ;~wbQwrpx&-Aw?x`PBI>}l zIVKj|Dw}1rZF!3!20`GUl_c_tODQAU2lL61RdNiU0&!q1Y6$Xa(6H}#Xm->ef#V4m zLr@0q3K?-yZL66HDr4OjVQtsBCew$jwQe_0lpXMZl@W3q$y%oM0jU@$ITtg)@e)(sI ze#JpP8?=(o2JVn1*f~Sb(VB3M6mhVD_aJncI6^LfeFg-a?B-;4;)WLUXKCHFf96z! zOU`Z%c!H0UVO>~fcs>D37C|;HOv zQxyGpluB_?2mfx%Cr6sBuL@zkFRH}#Y9LxU4_kuCltMO!bP4*ssx;w*N?f61Fy;j{ zB35qJ6xkOT?AT0|K4sLA%-q9>0!a>W6?|=+7Ud1Tq%uI%MuL?8Buwyj%lwX;AMBS+ z-=4&Y3RuWS*=YwI^_*+tib7c)jy6!!)riwT3=`L|3bEP?;_m9>_q~~J*b6TZ3cj6d zwSB@ziE{)b5a7|BTH)`-t8+}!zwyGx1c*Umi!9|*{aPCMelICaj}UnlzwK>TKXFN) zRZs6NWjw+O-Ch4%zPlzXMsc)szo}X8KdT!>qYk{G3cPK*k^5Pri*r`b)l~P!VJ1@E zh2>NrC*;2!QKzm7cYKILPH3gB(#pb-Gx8x6*s+L%=Xw9Y-Rhjoe_?f6$TH60Myxo) z)Vk+~*RA=b?0OcjO;MuQ;!_E+R?D5B-1AAG%DROA6|ESHqjj{S_m)*M@rnnm8ndc)4_V7%|)%p)p(yU)P>@2sDfHfoV$eq zDwSIX9;^Y2QZ0s-S=LK4#*LqvAluDZVxXy@OI_Om1l%XgdTL!MvpK)OQUQUw3g!tf zJMHpZlODu?#hP|)Z z?B8N`n7#|GoMqA@y_6t$J7Q{IidI)h$dOK4qk9+_TSYV3zCwZhYKZLi3uwf-@(XW?_*jUkSNK5M+=6}x394kA964hj$dYUmcoB22`Sy*RK*r>k!q4OG*xJXn zLxO!?zj4Z*obN<(jC3(>WQg_N;;>n--?*tz41XoXq{o5tk|$P4e_f$TmdES8%>cEoh5iKlFp)aN{)P6K{&f%-vWXFl9?#&>!kb zrLQ)S-trb|d1x_Ld(+?ED7(sdY)0R-f;~}{*gCvMSBYGa%wFEzj9APuzTeXR#U_7T zqUK*gY_%`j0AhHTbH))SjFmJQUJ>CD=(B)aJ4}1l0$X4TC!wlk&ux$;uQmX#$=wws zKLO`PuRDwdS28K0AK4A8grmopxR-}_+qNwTJ?*l$FZmfCOj``4Hzki5X(wD> z>+W~(*lrSQD>X2Tu49{350e!YHs=JaZ9AdD=ZUCQ3ujhnRsho1 zl-5ZJ-{yC2XgdB_a^TSX$5zO!sf;Ck>z#)R9$$Ssqp$S(i4k1h+*QbCL~fKY*XpVU zjq~BxrUE9+X$8FhYrEaca6)!P;8V`VOS?G)V*Gba@_|CFl}eLDD~4dVk%}V>a|Og9#WCNDepA zZ@Fy!K_}O2i=}}UQ40(c#h&Y!zI%WnFQEIt9ec3gSW-hyk`BSFrCBA5$D<7g9pZ*>*TD z^;tJ;?fP_Jeq|Z5DdIwuN-%a~H!+C>t12d=^nYMeom8$lN&2Tz|%9U;99CaCTBAcYB5(FW*Yzx12pY zg%|DnuxUgK$j!?o5^eU!y*-y*%Qg&qzN~Y$6Nk%H4E>de)$>)I6n|af93=d1E!5*Y za3U+*hMoQz%*fdf_x}gdw zUAElxJ3$=5#iM)b(wvj#OY5giohlDQ1Y-nL^ZHP{ip<*6{qZtvQGpzMz0e61G#DU% zyovQHBH4!P-v$g*c*=Qh>2LQfLByG{o~_KY!IR>wqJ8UJ#)q=Ji#H#>ZIQ^Ob&kq|fS`u7}|p zb{A-p&-}&SHT3*i(v>-}yfCR_Xj1QT+`f<9y&&3i6Ju6NY~!BS#sq zTG+u1D+LQhg7s0RYw5)B2`DX$F35+daM$$DXRRf^I4$;_WSJ0jK0Dkt1AU!$oJ^ZN zQ~>2;oHL$zW4~$T#(xA-66X7`TR|=gjFb?HeZ#(QO^q<}8t-7%@)aY&`qE2oLD$3@ zadu;&3}J%W&CK^_vYUhnhTH8~Rq>1^1Bh&s_k>{p|J)5VK;%ec7%|ay8FS!BpJJ6$ ze(yd`2;_*2caN<7)*Uz5E5|lLmA~FPW`V7pzy z)#e=fZWx^>D@>snOvyWy-D8S-iq78SE8ak3WK&mUb2AFABb*P&&e`nQbY?Uo?HAgM z%P{EK_cI+=V%2va0Z#{z9UGE!GSxIX-aAzn;+FI2<%2`_4m-$Lg!TD#4GiduY8C*9T5&ehqm$|$cT zHWDg*;?50R-EsVR{#<;{s?ic_qSL>Yl=ok1I9vt^rM!p zeDmz-*NzYtqHrF;1!r55ZEum%BtTl6XWvmQaVg_Q?U!5a?C$Z1g!HCVE;3c$iioPW z`tMA59C8XOlSW=+?a-iV4?#yZa3#w{FlG}%LC@%`h=DNCIrBF<`HDN{_hLch+bA*n z8t9fr>sv_}K)j@ku)e!=n$M2>YrJE`pHy7=@QQuFQ1>xlpdLq?p^^k*A_2|&1C!EN zF8XCpWI*$yR#N+SqJb<*P83m!09?#+m@){gV6#>vt7cmw)Ry24i1{7bEVd#C>TAZR zU)+qF%v}xC4#!h&Q3Cc4mVkPUV4m{pUO2fd@ zh&V{Oq0oa^?E7@6TdVoHrJ(sD9y;;yuv`O!-(YaXyW%qQ44|aaem^QJU_(F?f+3w zj5Qy@QwTPvbAj4qDil;iujoOis&`C&C$bL`A73>HzoUS9P5r9DcbqTHIG;V?vxMOU z0^=wzc1p*&@N0%t^QA1e*iH@WdoW3DjU#O%$38XD$0-Chm$ zC4a+8m2#R70P`Sb+J%+t<0bnNGdHC0UcVfv+A(Bd%o zL!MmjiK;AQ6rZ3a<|;q9mHAm?#*sElyQA1ygP#d_`kErZs`+EudZHP`BE*cg?kaE& zZnwDrIq&Ny7~0`$0v4iha-2i=RP~6xpxngBrb$rD;nl>8E1avR47Texzx)PP?iPFA z)>l9P`?!6G;qWGgg$Lv^c932oI73KxggK6L-whxGS6QR|UT>GerZP|^iul?fkGXN& zW#>E;21%^!e_sdW;5w#8L4u5|TxmAQ5h~B3QzWm4G(J6sETj3Km^Js|r4tyfaa5XY zXF_ER64Xg^ig1MW=uJ+SUKU|Xyg34ljH&-5k^yOkMKThs;1^xGMVX>DeUpJgh($#% zYz|w%p|l4_x$E-kgig<=3r)No(MnKjsnI5tnrn8ogQ!LRdeeb*`V?k)%RaVdm3kbC zEbu>bzO!bw@T$;d4JcIMIt;xWKI*T?CC$z7{93;srAlM`e$=^R#k|GN`JMHd@r(>5 z$Nx%E=G4)}RdCG1kC}7C!JBZ=C{Vs1Dv9!(BwTU}-K=pO`zcB%LsZ^%pM_8qYR%=% zMDHYl^re){DKA6q+0oy6pv$FIyoMw=jt8pDr=TQCuw>M&>t(=y@;Tr0w38XS}tI^H~$B~74iz9-^4K=kbdB9O5 zfD(S)eSf*m`fJYN^(xb6 z;Cz2q*sN!U<7{Al6&x5N#-x;Sy$G^jH_p{_TfouBJpW4f4i>@$&u>t%96GQHYN!_k zrX2z;{)j@6xdG!(VH6)@yU-4o@TO|cIF=>6oQfS)$5wRdE^L!-L*!6fuW=lX;7fUL zcqs8ymPfMc3%_v50b%9rzI_ssOTG7!gYsc5+kWv1#iA>Lh$2xS1@>d;BNI>lr7CvO z;nJ)UBrs`xlt0{t$0!YW7gF>;}6&><>6A%ktU0W10Stp)>iW=^= zxNa|z&%Rf%l*kdMuNNv2o)u<9AsLLp#R3GCLGomC z2CdqbQ`pNoBU9W6@uzIc|Ik^VfG7XZ8u`axC@foBibSa)BvjVdubJ8Lv>MtmI=?0C z&Rk2DmHs3?n{#iipQcAmtgf6+97Jjw!)8UxuQV-XEJ4n@Lxy_xdz&IbbvJaNFG(M89wI9_dBWa^m^A12Joh z3)$5z$`Oj&w#?ncpOTy}P>IA_`6VSDWWRF*T%MA<7eJJ#0q~0&ky(}I6Mjt6o&cix;9G!)D`7Bx=~~(j?9O+=>v4)z32m#V$JK zcM7ho6@;qWpIm7S3j;Lp^qXSmG&;frnr25VdfP`~wAxv&E3fUGSot zeVcXWC;|qyul~(o858eDoQJGXeD%Yx?9Me`_8)g$oOyQ%o3`G|iRHlAjxJw6(DjA* zGcNwva7Kv4X8{la$<`~%C~|ZekU8AfGpTy1-Z8f4KM%7MCIr_24X9V;Sb&>nLM`a3 z+yIz)C$BB>qXl7i^hWb4KD&aJH53YHi+H!L!`IM9@g)ahg@cP1cOeQ&sMJ7T>MB>i zoB&$V+;7}S7oWHb>mi=9AaLu3Vw9TRef?AJOFL;u-8ouLNDPjK9_aybi3>VQ1}58K zL1BDLx?B$w8OJlyieTa8ut#O6Xy~UveR707q5|+RTCU<6g-(LD?popzCk0F+dB~;R z6n`c78n^TjypfIita3U;-a?6BU1Ksi$iCl2{Q2xwMR?dM(N$2ChA?S9Isu?_Q|iYUy!k|I}^r`~Lj2COP)jyCuaq`VH%>x;^^k(#}_-9QAkQ zJ|X#XUYBp5FT~_39q|an+Tz*o_*`{w?oh)@V(UJ5QFNkAa7NDWMf9Rycv|&ey(wcE zwCi-+-})C-=ckT=qf?XEWpB z51m8`b*MN=Q^t1tJ4`qFo6!oI{Q@+Y9B%cai~W5 zhWNHV=&l z9PSHBQLp`M4?p<(35=Sa0b^dgE!8k*N1rR+Hs&Q@Yq?JC0l67ZVE?c%O({f5wQzR@~PoE+wXGS{q~3IjS**3B!c z*%1D2XTq-e&0$3wOTT_Pbj(rt5Ur&->q({_i=iB}MK#P0q}nzy%STrX8+oJF_=+Yv z94X{Znr1|JJF{}&7sG&uZ>_P3Z2!amJPVSN|3P8e<<36n*{oggXm6Gv6%(=X=& zUoyA?!FL zQ3tT?ZA#HT@BHKjGF#+U=cFZy|K{~SLg%MUQ4F}M0lc5_O2{2HUwY9=A1&_ac0Xlh z_yjv>pKK!!!2@PBAUgm%|5B?q7E9QsjcYAbHmEL-%zxwz=brGnBB=&vV38>ya9u5Y z%W%H61z1d=2dA=Iyjb3*(tw!$-27>JBIOTer>ooWW`l%J_7d;=>e7TL=_@H1jcrPy zi@o`f=buk6j)$8&2rx~s*Ojtc#ZJ1=7)i>!Sa>da3b`@<5`e)IWLIOLY9HVOc^i^+tQl!+`VFM@#rA<4+#E&nYxLz>q)xFNti*r`eQhcBgxi6JZ7uyC~ zVR~f~?7Q7;;Rk$`O>=s0b5FrU1B$@5|wHu@q;$6F^moiq2vO%X9laE!WEO#lWXQS7~vOP8VIMe#7>_@LL@M_vj2TRKH zbn!3SMnOVy>5LSs!rJl*nEie)Lfo#4@iwI}sO^;IHsYmPE}9^JLoRX|?}}2I;VU^2 z%2+8oUWB~x(aCKXDWPw$j46lWj!u!m0eY;De>#v}T+?8|ijFf~QvQ zf`QN;?LCG%act8+)+LU$kNhzCw~hNOa$&P3`%zmMFUkJ_&TBbp(@4*=I~GDGOXxYb zGK+{^yZ*wBP_V#L0K9_`<`laHU|7f07(DipZic=Fu;)RHj=r)?>P6!o21E70Quf^y_)c z)ps9!(Rk+g4yJ_Q2yCcEfCUwq^ZpVf?0>U!yS}qqCv^6LO<|RD=xw=FxYUviUAhmI zHn1*xGm>N|@6{Wk#&>!z^WPE75N){QC|F)X+uXhGSk^a`557QLkn)Y*>3<~9ZDqFx z9E!{(u;1m$D&xklQuRlcr7rVS7SivCY0Uqij4k1QdjE;fNUVOuf97EVbi@U2MJB@| zEBdBx1TLn{T=y(&4i}<8wt}n`zER8ZVzeNZe!~w{>=1qN3QCJOXew-zNw2$~DNQT$ zEtY*bc<*PX2l0!O=jW}RrFatf5p;?-eH^q-f%#7)R$URqB>Hs>0!#n{qY)ejMvVNM z!Fx1_rKhx)wLg8F=4dl(nILW;OM%FvUK=a*tVp#WoGDVuo-JYiw?p}VcSr^?_H&xy z{p-}!0svKvd;#=Ib` z!dKnyogqf0!(^6;mm-;(NTh^KF^9+{4H9b^xmeb3a-^AekNx@Bx#Egn{BCUUX2H+S z0WVZv?R}r^Xd~8oc+%`VvX(KQ=7|sx&)?16Wo?b;@k99# zqYrQ=!$9S|LWr?wzwn0#W9`Jp~#RRI?` zKvZQmqwc8VqTHZ-2v0-hqYaqp2pKnD188D8bJ6Nb?2$3=-qVP;h4-up|qD7)*D3X0GG$*G>q!g8kku4;$ zL`nUw&(!(;zR&A<{)Fe{yv{kVaXN1Iecjjfe!s8lzAn`6?7r6-iajJ>Ym)3^D#)C0 zlz3BdeCTfKETpSqh@hiq)bmJh2~wP)rsbAQVpLU(uFC(|JQ4F;D8#;O<3a;?>y#Wa zUY|3OJ)j?pzId_Ykdw>Z(0m;g{O*$5UytvKkDVKQ^w}~0x~1b(9nEea++lA?IYH1DlaBx5pMwjhDc>|dOS`-I{Rtq4=A@}%HT ze_ScuW5t$UHwzT7dD)x(_&(RF2x)N@J+Dnaa+S4jznxgFo|;y7>l-C@5?@3YP%Tvm zQ_@ebnSONse(c7epqHU!8ofL%QP*+EbmvDHWE%f^@u$LlFFBl)5(J)m-kENiPv35P z$a&W1o#YcPX_a|56{Cv;DN=ALHo#5_cIxh^F@32O<#}p4j&mMMH%t^;l_Ukf`5Tw- ze%X=T_e~es#zL;2c+%*Hlvjyi5URIQcftGfvFE=lNlZ@kYh1qwQ=oVDBY!S^MPhXq z)7)1yfOM|lwaLHGAf56h=?tTbNRy*Ydmi5CG#*4w16_zsgfq%*$vty)&7UE`@nuf; z`R+T1qtCqu)BwxMsr$VR#%&Yd86=01d-cKYDbifWGLi4$IZ{!&j(={1g#$ASJp5At zRf{svP@U+;wbP#D_T>67>B>h=CL7)Sz*u(-|01l5iY-D_*u51uH62@BY!q1JhYN)h z`O@v+At=6TNm6KSFDMAUDLTSV|MyYn0ID?)-FoNZVv&EhFYWsDmI8^9orV=MRC0k9_NoK|@1A_R!AMC_Pn^S#awgriFWQPw-z_Gad`&0$*MedLp> z`m=BM;OA)+Qwv*}RXupH@ARK}X^#@7)(ptK&VZ?S-)FuuHwPrn4W$oxt^D|;-=Muj z`RVPn4QiQzQNPedF`f-tH@Vv zvFzWSdd1wrG?;1Hb`rUlbL_v2p&q!eF-~^-UGe5jwZj2aE)5PHKBK)H?@!P}Se7q= zmqV^+khT~yDeRliHM#W?zhnmjWi`QxJiv5E<7Tn?z!DNydqHjUS2Zl58qc11j~>hWgSrBQbf675SWVzI`wOxLe18tSSJg zB1^5yO+N`(t$1z=EW2eA#fsEHVONru2~!$uaWH&FzAU(^Qj)VF(O;TQ6XGmvrMCxdYfkY)9DRw1UiBfySfJ*&6_+8#ezEjp>3?h1(M_ITuuseobdwhlzf@hJRoAnxd+ne>0i#v^)<(*D5f_x2f`Xv7bB0#0kt$0A-r^t_DDJZ+| z+r;)aU!k!VUeSzocvZoWeTIA!&e3FMlx!yaP{+Yu$ z_x<`xlV7T^z5r=a)A93lcjW^)sKn%Uny3`>9eccHM>Xe*eIF~R=y1+w&<7VTSLI*W zTwijSzNB#a*pxe5M9xIc(q%0h7G$P-9MvNIfMx~A;KBvwCBVB`{81)UN{v2br4AAfl2m5#~2+qxvLA#-01F z?tHSNHo&~&J{6zq{(y@g0YY8n4HT&2czh>CHvE%r1hjK##c2srcZiuvun-~#7rgtd z{3!P+#yiP!^Jm?Kik+j+8iKwyFWBxk2|_XM zeE~DizI`8c=;l8L4#$J0ayGlqL;@E+J2&z^vJhXl1GuH^r;&5grY9H(gGPSt1AU0g zVcxssprBb`l%dA@u+&uC^F*$|Z=hrui??h*;&9CNe%Xjbo>p#kEWQ>tOYZYX8wx{M zzS?{s4NLS`v-~9pb!;`Smog%Fy67y_dXjw=W{wU{qvf&#J>x8POvz2Oj(PHF0a$6| zE^xyyY*{)*j$idYm}h&gg~pJ|e&tEiVJ(38lzm=;UZ>4gxINgL_XrLMHL_R|@gDUS zNHQm(gv`{zmyP7NE({=S&rJeFQyErusHl4mDj0Eng@uJrzY6A-Yi5q$s<)B3Wcya4 zIb_^C)A904V;30ehfVqOJ-e zZN_}k^&G8H^i4YbF*ky}04!)*PeP%_xa-n=epubG#so688>m|oX1Yk z-%9-TwM*NSiYu=S)WMO8feS!ZU3^3A6IhrtRQ;)x>ZK#Y55n0?>(@0m0UJ3Dk2){s z+O36D{oA7)VL(#QTljKzbMF>)FTROt*(RZYx0<&8N8`>#=gP2ff9=7o<%fyKsduuc zJsR@^DL! zpsYrRzO=6LaG!v;U097iwFLo- zx}f8z5Iu?+KfE)x*iwcX4g&jhz`P@h*t?6CWISCpq5Zk;g)!o}8t ziKBVw4K`6FBqs0_ka&3Bq|w(m?sN#!-a~l16DhlFL_A4L?m(m=@F$K$q0)No|02W! zC;js(9=x3+o2hVVbxE>Z_R`h|sJ7(X1~*!+nb}g4_NNg8_6obQjUB2N`M(rNe#5=b6D7L%X^7dqB+{u?KMh?FWNtKn9wn zRIkGl8#;^7n%s$E&O3bQe5LhdTFrH&!TJZTL?L{r(+l%sLZ)cEK#^VFf4e9V#}tM& z0`%-@8^#a`87mY3497yPs`&7Gp2CRpTs*8Y4^waY=zg+BUm@`&wRUICKw-z1CxSnc zd(>;O@`Y#1@mbZvsw`qf`Ta%|d&`}d|JXFSgckToVgKRVUxgS?2yY{T4|3fSl5Cb! z9fuHYy_4B>boQK`2hv(nU!0ykRIta; zGq;^JCGVh{_YU${Eo0`#*7rb3=Y*wBb#A1dr1dpw+cwvpjOvPOxbA=ViJO9YRl0n~ zqfR6HW0~|MslS){2#6iCUN$Co_XAmeOhu;IfxzG(m+kc5@?Zy(;JrYgtOId3&niYG zz){d@|GLA_4(49^WpuBA+0f}6W6zI)zS>>)OdiE8-SDq>d3jpJN(2D85Oax&79$ws zb{v|MnH5+9Q~5AIzw?cF1kf77DXxPe9>y> zla}K#!wlz62kyIs}X<5vgUg`S^3Sxa3e#D{?L2n)-}t>8lZ^wF0;;gZrOhIIdnv}UsS-3_QK zz>lIOs3#FW=)C-1=p45AZW_J2^ZNRY2dN^=aN0CCHD+b#2^IpaN>$_J)G{gYh~UnmlE9Tw=#`3 zITBrE>G+JRL22s6eW#UL_QPvJ($=RaP< zRw+6s)GFdbRegDK^p#V;U3n%rAl$@9*|DeD_?MvS-3OhHK?PYg6<6hpyqhzj`G42F z%F~q0PrA@d2(h$mKYmkbJRkaX%(KxvfHfRO82~7VwBYFI`9z4BkfA;HpIkfj+8VPB zTYN_#R;K4}T5ifR;YU5kpVZf@&wKendD2FpL2WR;iXXBc@CLaKl8z0)vTT^W*||Js zb;G%f+aAfzDMJZ@47T98MfdKVedcEqv78lKyka7{zfy;e%BSb(eQ=a;jt^7Q!v;$mA3Sbr7-_Z z228zfU;{OFRz3fmkiQI-T zCdClCiEP03v>t{32m`oJZz1I!|4#w|?!7{RBQFa;9{i8`5E)UlnuEyWm_qEl zA)+w%`RVb#KbhNr&B?iFS9`-_bPs!5YTd2Z-vk%ca_gv~EB3KcL7YJKluZ33s*r zYTi?8%Ahbes2Ub+#2_=?1peapbYnmnF06Zd1bWImzQ4gUi{5cSR2p%2pBzbjk%v6^ zC7UfAtt>d`AdPTZfGGL#-J6r=LXX5tcHQZ14Vw>;)Gk6H(*<5(> z2zy03bt@IfU!~;!2#tkrRP3k&KZe%m@7_g5GSx z_8w+S0wk1By_TG7b%lzA35#@wD+1x6yJJU?ILKZVw@?l$s@kc6J((2iZOnMORqF}a z$s2UYNhBmN!;^Jyl<$^C-}#XOUGz!BFwCzqlCL&N;Z91N0PRk)0j*}_A5?gy_@~wy z#&M;d2vA3UWQ``(cTVZeAH(iup*v?%x*GJb>Z@lrJuy*(AbFMxB#~^pN?aIWz}yrZ z=*@a3=t*C*An!9sWB>*zm(Mo>$91saBk5Xt-L1w7`H!Btb(*~I_wo7gZ@W>RU``cU z&a9H|Im5F2-#fT{UcUW@0{MkBa!1$Ef;}g%_>HMUQY=KkXz2MqsI?s4U)5=%Vbx;A z&_oES!{3y_+j#|~-PWv)Nl$*bZ{7I~WU!o8sBGb6B5!_4$7%s7edL6)!)xlw^ye zH#dq%+x2bZ1O0#-G%pz>u@dkSh*7>m2dWo>nMJZ}&bR&yAiaZov*`Zg&&$`o;>t5< z!L-0M+RdPs1!r>_nR~5KJ{kJD4>hsnpIc8pBIL+wuv<$7f#G!;A!^*>t=U;02MA>6 z(x|~v)W($!9eW5_ZpVd+($5!&peMze;K2-HTg6c2R;M7)eaoyMIXC6caTniEXp+X? zKZXR@QA-&AjSjQz8+GLfb0Y2*@jOLWLDuxDblZ-(KnW0r65uSFD^tf<;G{a;EA$g{eaS+|<(7-V8|^oA?tNl@pWe zRz5GyvPP4)QZ-1mJcu6sSCu1y<-@!;Z|>Yff=;;Nkua71S_OO(7XXC(o+8ZI?3$U4 zpwaxkijvQK72Al~Ou$9-%zd;{kAutgxRcj+V^>=z?fm^! z=A)Q-EyEcE^p@7!SDbY#F!l83F3kiTc&PfUoNZ{W93JzG250-$KE*#u@*AfM-+c#W>K?W)*-+6+0z^_d;X zSopa1rX0uyI$_~C@nx@V# zORno<+Mk%B%;4KzEqME1C6-8zwNyt^zsI#~rG)}&3LxeuWS3Xa(Ur_F84M&Oni30Q9 zOnZ9B8niQ*J+2Ci4csLHLmug4JKI4NRA`c=>^M7U(R2gWLG%VbtrM|7S@&k(;RVp3 z?;T3Jb;FAV)OtSCPAU{kDvCl`T%=Vz^OI9a8x{C!&DC59Ct!8`^C{)wE%d#T4&7JhLfr-_p~!mg7vbJbGDxvveH z+&lnD&RMlL5VBUOW_+ut`FPm4h=aGkHvB?wa3B&ON4n0@^^#`5oyZM?^+@SugtyQI z4z#nL7%Irq%V?(1lag@)rHRiuE70OkwUN=u^V9&s+Z7b--@b6r5=-^AvkTgj^dk># z2Z1!~_F)-NriU6M6?z??%;+vm?Ht{6`?62dwT!3FkG)3pru-M7Zueu49;asE`dt-o zwuWEoXRF}an*!BcTA!Flp<0I~dNkmSuFz7%7VQW_>iukz)L(k_%^vbXdwSU}WV;}~ ztC9MLVnxlOn1;TJq=ry-Xvxy|Ycb>+2ooefEsk^o(NVDH@-OU7o6UWNl8=qUbdV&s z3FPvak*~Jtd}d+Atiat!y%F{N&~fZ;+VrQ^^9~l#2GCc!D--Yg2+|wCMU?{>zCtSk zOW9cwqR3-^#LQ>I#R!xcwS>LwT0oJ-VD19)@x&knG5$F&h9UeXn;n>&;6vG4r|AA; z&kKUiCMGiuW(TGpnS1j@RbA|(iUhMA_UDaB_rA)OtGBMLA3(TX{-e#OI-Q{e$zky9 zkn%GgGi#?Y0>}gL$+h6n&*W|aVEc&SYX1YBD2F%ts}pvW@W4y1DR0|vS{#xHphni{ z95q|YfYekwC%j^L0FNd>@#Trd=j3GDMNI?$R}5NW12W0$GidSfKyFLdo$ql6xx(Sa zbWVbR8J&TGH;9S6v`Ma}KT}3jtX#ow<#TB%TNH94=Qj#~ z64|i)t-Yx2c0r1)KzNJ$2z>Y|R+Z^Y$d5A1Jw+AWkOkf86U;+85P|J_O+k-xDOJJ zt>{+X`K_6hY+1BuF}Zk5Do$0lk!H@BoojF5$LzXiUb9C#WQ%$70CuuNd`0+jYOefd z9-4c`R@2SH-NGb^>CT!~Tzjegki>Pa(gG|1blC4)pLMX@ykNyA)xw}ud|$><43Y5B z_X-+6+m{JGKs7+^!C0(WHi&GcmXc^QmApN??&+rb4-ZnU=}CK@54Q8>aw~YXq02c6 z9s6-LHw3MZy^a6H?p&s@f|{EP84)Y;?%qNBHtM&surRRK91T@=ig;zLr8xRxw12bt z26bikX4?x7H2*PEuHi*yd>%PuOH5&d#Tm-}=17z(jl9Z9cTU;tr14-2GfXApu`(fV z4i8scv>G=?6@5Nc z+k+``)Zm@LcN*d)0-F68cAQ3N{HQwW%}B8|?>^PttO* zReFL^flhoe*#7K#nPUUA@#%xpUgBsIR6F4%ep51eH)nRvb)*qshKC7hJfBDF=0Y*m zL^*F5#)`mNE3-9*p%E2br3>Ri==U?oQE@4(1>@JK6r=iOlRfZcSM%L(_E+76D|S@v!v+YYRIMI5=U8DM7D++LN8 zf&ccfPYbPMU#2r`mqUM|Cl{*;|NZz;>1+p*SMknAl=6exRf-f{k)ij4?Od)030lN?TXqK^!J|@kp>T#ql!nN)4 zewAwDcE8j7Td@UfAx=4lZ~te{24KJh?AnfT3yIR7$Ji~;(O>+KAdOqV^Q~N!z-;61&lIOzfUI6=W_>=^Dq5a@i?j!Xpo48bM+b;jpx5>=cqRcpI z>+{zGJ!c4{J`Uqz<{R6T`tN}$G{I}FR~)!zp-Zp=;YrO+@k_(BQDc?an+EMx8O!nl zKAik46)%TA`7(>#Jb?+nR-BL8_w|k)?_W%s=SfjWci#f zwbL4;NmBIV&b91sMh2W;x>sH#i(ZFAp)<)$9&)H4&LJk425k-eWzilc&zN*0nFeJA zo3^YYyJ<#xkJcF>bbqhSEmubGd%rnB$8?z(kUO8d%1&^DfPpwA=)Ck672;I3ZJCj% z@tVf4r7z5|66<80t;vhoWxXEE%>WlZ*07o(l*xOsHmUp>|H>(toxWxscu1(o@l3T^ zdxPmi8HQ4su>t1&+rf6C3f>tl=k~u0YpA_o&;2;=LtK4?Cw(tGcYK+4ixYLe*;jvV zBd>OO8m^nmn5DJm&|BBJo+I1j+AN9Ib=0W*je_6{OVvm{NnK2a8Yt`CP@57th8&xg z8FQJSaaO!6#Gol3DyzPU;^u<27nF0QuWo%Sj+PRML%Ux&lH6M-`JMc3T|WI=Fg=q^ zC2!wY(SflWG(xmXN+^LV`?vmbXseIBTcU!gAl|FN_2bFY{1?Zs$T;?0I;s;OhL?0| zl3v|Nej*S|3LDy8fSIjD~##!{xGTd_zrQk%AU3-Q7lRY)o_QP5e8(+Yh{7zDf)G=>!r$O^}}mvHVC-^qoi z8AVEYuX|wv6S8wJXY&}R!BVG}tbD;uxOAVTvaY~XFZ{D)53lOOe|}$i2O}@)D*d8o zC*0Isf6e0WVUJv&7B~BP?ZZ#I-88oszr&HEb!qIt)EN)d(xdQKvI{#p(to&a@{Si2 zj_C$aKh<<#h$<=IDZG57co&0(X^oHU{OHHn)E8AuefCXp0uDCT;K89w|2VK4-v~0= z$T;{e{mA$l!EJQf@sMrbkvEFir{)HU*O1-YYZEM_Nj=U@HNnB*cBHk`-#q;;+wkta z?ITuZfi2Ll}tI zqN5VL8zTi+$ai9qHQqQf7Ax#|wyyHBUc|w+EY4hM@ThVJvoQ_VgBpTfhuzy8sSBN! zOAXk4UU>GtQ1BYSwI*l1e_H+xyOP3Kw!x74*u#QvA3WxCVV0nAB~$C2h5nfO*D&-B ztn-gOE`%qWK)=a(d`2fnf&zw+ zFRC1wa866xf7QS3vNB!j`Owqmsgs^kBRzyCwN?)wotE?%1FIuBsE@54*pu#t_K`xL zB&>;1Y|FGUgeT9D6Mj5%ob7>4*|rYUaYDdq6n$Y$7CzerkYp$9Sn+pLc+K)-8Dj_X z=P?5yOF1g4?LP1X(&dUcu>cT+hwKE2$wP1Duzv3`rl9W1E7NxMgb)gD@wbC)ZLfxU znqOVJCz79S_%_-xcDK`ol_P;ucJXZAtpgiYB9qR&CyvF^Pw~vZ;VyV_dBj}ByCuUy~If46v#BF7J!#!Go$#c>lZ&t&r6lQyiWwDhS3WhRL}ScMp(8Xq(g zcP4?CHVh@H)9zjPYOh``FYP$^T6@;2d&`*`h#2CA7+BkYu-p5=p{L8=t@{XltNf^4 zh67Rd2UG56J6?yVdOv#c(7AGZ;tfWqz;gG#D^)1T&}`52(?U2Sb|e;V5N@XX@k~ORoTs~Kx)~APrye-Pd(e4G8~(#f`Dqq3 zjn}2Q!&v&l?kxJ5|FJ>#+T>nTRCbUP_cVam^r)#~1c4kCOg2^Na!@9mr9G_%(45jpa2H!pmq`MxP3v|`!<10^} zPayD!F{?#PTeHUvl!a-zrhB$d&34aclE#u4sIpKh8+}keIhP;7?G$V@`U4@4A{e1H z9;F^(Ns1F-6H=yNkDVrehc2utdGC&6DmVgh{h4zp3qS@xpJ8Mn@DK}MQsY%zn#GAN zmpBgSaa}f%Ldi$${mi_NzA*Q^;cxzqzBps?yekGaSuVfb%qyrIc}jMlfSuM zM$u08GzR!eJ@e8zqLrFY_jQPyF~fJxrW-2vUr?j_v;2-& ze-`<&Y_bW-+6hzXpivph_-)%~FMopky%0im-n((e{p_HpIJhx-n#d|k+>%X{C|$)n zKT2GfKg6FXEo~cLaS|&&m~iQ&=&kJWr@65kHl>yAV?Nw;vag5onCiCzCU zJw>L#qjMpKa1*Z54e6%M4}+i@e@up7)kD0d+^6=^@hznOPd*PzY*QAg;{YJ*H~w!B z??T>O#Z{`mE;D}KLF0i1&CvCMZCPR}7;0Al=lm`Pkv_Nx)|@d+VfcVe=gi0>+gZj7 z%nR=TPg8b@QSUD%b=$-7SJOnze1-c@m?Q802XsYyqE+Pms0UE+0>Aa;mI?AwnlElJt}K|Mx#^^B8h(+h%x)K%I}uNkK?U*L$(aQUbDqW-L#)RG-AwbXKX5ym^-67 zP%mkAR>RfARxC%*KM5ZZ>BJ|6puIlz@wG#TH2WNw;XDN=a<|#(MQ*u%I`67kTNXW^ z*6TLi9pNXxpOUOdUjNS{w!#*^wk#P&UsEh?`2$w-HFmM>!uD5yBkEfXpzZ<$o15sR zltIUzw{=$??)FV<5sWGq4S14{6BY{8i$1m(7f2bglI;_%$N&0k&FgO>0`Vy z7mn0daZvcZU+aax4pZdxV589e2lw)J&vi!{&N+?mRRKMro z4{ZPme;}3{Fh8)c1{nx58#(+TIaDTy5RH;;yzW;aXyOvt-<)3ZnG*gKq+P?VN&*Lb zV2s3^ynUgduzJDUpez0aIIKPd+`{QggR6Kl^~f zCMo=N^!rOE+z_&%vhIEGf?Tq}nM4KmTmN&Aq5z#QPq1}tO4`e&WusAI<@X1%Bh%{pSZ!m6?-(WS@|JX@_w=b zR^DN3@E#)x;)*hraI`&xHghOT#LPe+SYmEfU`^9O(@rPKJ#^?jOv zR;o>%zrnwz_l|Xh#BoyM`dYmv25Bl8TLAQBEEm`_ z;XsguI8AqfvQ`Df4HipiO2T^u>xF&`s`vU|{V9AD6jtb}Cc)ig)(U zbk2*T%PPU)lsTj-^S8XC@7Kr_?2CUScND~+GZA26+8^*9(OAAi8 zoaT>F%5YLC|1#1laNkP`r%B5RP15=*b=51(G3POUxSOeataFX}H2Jsv6A1+A{Rc-)GOR1LD6N)qbg7iDH!S?S+~6Kx|E?UGJ#X|R*nJyN)kK*v?s%f`zL z0QY1(*`OjcP>FMhlOH!bQ{I5Rc+-Byr|VhYCb;3`ci4le=D+Iic8-6ymd!T2oI*|b z$i2v?4Q@AL7YI-E!^j1?|82fUxJmKSncR?fTdCh!CtuQ;&;O3}oIeD4>@p%oimsDI6iYWXg^<$;UfUJlEkrOW!9a@(da#IvzZi--|-5#?%piaI{a+d zq%uKI4j)WGG9B|>67fEM7ZG{Z*q;ika^dYS$dqqnQy{DnQ*Cwy(^S=80NyD17GXWV zndWOeU)+F=XEoW#RKw%ezKjP3qZWgG#4iewq!igOuj?aVFnQ5 ztfpYvIIK|Ss)-TAz&QE7FdCXBe4Rb{xcRon-Ml6xrl!dG2z6hiM$FCD+Ch&#UW zVa;oJs0Ul(^?@5MWsQvIkY}&+X?}&PTVv%ZC#->U4#qbdF{G%0^s#q)c>(xU8UOZ< z#yP)4w7yctMXJHh8umZSDUDvjyrXIK7NHg|CGt_OLA`{u&cHS#ky3$cXZbqz#_uR51K*w)Vhbi>$VrK z2e$4@FgcrD*O*{?X2LdQluzbMrwMeK+S%bwL(&nmQduHEvD3Na!nyzN_JZ0&Q_2b+iSvk{e zjb;V|ISGfIlRL7%NqW@WG49p`f(+r&l&lUh74fjjmeWu%%*o4Y*XsXKIANS|f>nbPR5u^&8 z%_+ItXoxHYAy^wiFh%ivFlQBj(jU~{5Juq5!Rc<4H#InUlxFD7I`$gI4E%~^R)F`? z4a+E}RmI;W#Nn$hbZ$_xdhNiqo>PR0%RK~oLULB4R9uZXMF4=9m8K|ni`B#3@6zAP zp!a<_G!jN16vm0$UE3&uuXs`=KoL!uDp6;@uZHv5h$BZNt$IA4 zKjhHWR89x&b>eFj@j6?#L8KyRW+H?vVXpsP8ha44L&<8F!hELm_tXaLO63>jrz&^k zuJe@Y^Ml}8%#brT7~|DW3`B&>^S2>f6RCTdVho5p;nGc?neoqb{69H?34k!?M)`t` zkR`S;5T!mxrT@YPH0kh>6G#rFN8H`Rmf-_{V14B@ApPYxxlHJVC%3e#MO4Hnvf2-9Q%v=I8K z+)Nw=!O{q5&DqoE?6GaiEVP8B%L;3{a$<8_C z1Y^`@j$d-5fXiGzt>>fECX5(x!3Gh%Nre*b!r=Agh%n4iJAgwr^^dAwKf8k}KkucI zv72|w#60pdq5tsFbC#z7HCCRb4h$#`ohq|ceu|rtsdI$E+i~os>77n?3j0Fm#xD%) z9XdgNf`*^Ckrrcgp;J=d$F;Lsgpytt9W}9qD{427HKv8F)I8CIar!j>hA*8pFi=)) zcBM8=d>7a1e>%y?8uXb67)Fd2g#5HQ)G$1YtA2tC zN2)-)R9pzMQ!(XU-FCB9J_`G_H_1&k=i6O7x@py%3@hca7{9XS;$wDUx4SJ_REJMe*e$+fIFGnlc z(w4RKvbRc~vjW?9ifol~!2hW7QvtA!V9gp{Pdk;2vR&|C`v1y3K{2Qz&OSn(Dxc3k ztHm(8q$}7er(|#&y=w&;8BQtKNJW_DsJ(m*F)T!oYbXVN+d_A)0^TJo#ix(3=Gyu7 zQ3JM;V-WE2Km|0*_qbUGSlwy2iG}mwjS?K#?QDSoY1nA4KuIQIx~cPi@@II32JLC#~Xj3cOE$;G1Y);fdrSK=5x z^hqLHu!q#!fxHoGvXOc1(w@GKc7glfNvE{>-mLHE*5bO*n@s(-_e(l1QTbeUCZ1mX z2A!yC|3%&ZZ6=u$&?5jIiFv(iY^C=;^Q#Vhpp63D$*=!5zXe?+_&hnt>HlFfg8Mit z9Lp?w7?UzZ2H9PA>U;PNUquChT1pUmDUO>{ePrU7p2c>D$C4Xx?+#yF zgjbq8LIIWAH(Jz$IBA?5z}}hzbO~>HIO)@ouML*v|EyUS@#ec}7IsWE05WA^{|VP6 z?b~4jz=AxcL)g5x0S*%vnDM@Pq-7cUzZ|FK z_u1!^cm?Zroc{2hr`r*JID%?JQ8k3T0G&-6JUiYXa>OxTWWsL_d3`Te0Z##sdkqwMJlEvrz63e`WC|!K8lq`QFY2E3*5&nx*nD z8i^}d-7+0(Rb-8NxBb!di-PbWUORUZ5Br=O{M?3sA_J(Cl zyNzsVO*dvh&lu8uf7k{2!*{6g_iXeSE3gSMGG)3Jf%N=<}xx$ z_XEYt1t8~g^IKTvaNYq`+R7g_fgX-ohxbEy0oHy|>{TdmH-dk}gno)hgj$3ejQGiG z$3R!i9K(167SNBku4Id3OlBJfQRuEeejfh(WaULeuuwQwUx*S;jTzXI)Rr|4u+P=K zIbLQn+WP%a&%mXR@xE$(p6r~`ry;GOStZeAHtZDtNk;OK4X3HXrIQ*THc@-*75P=u z1q4C>m!BeE_f9a=`8@K28`9t}znMO~Z*j4P$2mU`JZdKO9iZz%i9fk7l~B}n$1<(O zgc6f>on8SL9t z^!f5$qNp8dAzBW`@{z)Fi#_~U_a>0h{}tZ_zP*%qnz7HMo7tCWa1x?a*9r|{!47** z4mt>cu)e;zX{Rzv18N-O=DBD8gI(OS45-MLBM9g^OgY$hV?-}8l=Wiz!(hP-qAcYc zlF&&Il6u6Dg%){9SNlEaGgBfu502q`ua&**e|Vu`&Fs9W6lM~a;YdHjbMFVako^nh z%joNcIMT_D5#)QBuN_S_E|_Yt@8xTeb!niXvYg&XVv)a1$F?ZXGZKuY;oU5=40i zW<6u%P~9)S*aF6A^}og>%e{@Mp$PLN@btG8?h^4*#LcNJ$kH_=(2c1gVczFQm615G zGCxG}Ri(juY{X*d7(ClL;|y{gp0o4M|CB^mf#_cMt`+ zgqm)S3Cst}2ONa)eirjk?GRx+VSQ<-xYrb6hT(#|u;6B$OtmR*C%RE-ildmOc1^QA z>8_OzR7NS!I&ml7t53ZSr4cvF`r*W8%rk{3ZT4gKbl&q2Cf=Y!_Uj@`>)j-G$qHTT zM%^WRNkD$PzwxFkW3t>;VsEzi9z6FmP1J&_z!n`n_J&G+ly6h_`Q+oB42YVLe%KD6Dp9|rEP=ye9*Q)#+ z)rheTVy^+pP}#i=A>3ra7}T9?ji>0s;jfG$@LnQs&cNgHzj$vUUN1r)`PwAq?0f{u zvom^NoP-L+ayv?rO$a*`2S>C;xm_Z(Ret-J4m zEjCG&@*Wv`!}i%%d?Cq@U*NDb)sWWyHtW*Is+~8e!)cr3eK$Uk+;7U*e(WZ$LlFE- zf>6|fYka0x)5x>Yr$jXoH32!8u)q%UFx47v=e|XX?VoZ!vfw@r4?ujX3@xVfw>frS zPSW=fdy-kVeOclTs4`p%MoWSZxP?i@;hz+uegWchtmH98n(byd;i~v!g`ImfGYp-H z2~Ao?n6v_@M>xV;5vr+Qn>`{(?uRfOur9BX6Ahq)*ssc2LVhH$ z6+HU|r`V12qVyoi@2U}7H6oto%fCY+_T0^psShoi9j$G~j0k=#5%~Y2?eTedtO9tK{VpLez|e~zI~9*82ob3oq@iJpwHDNk3+c8NatyB7L*4#u5=WE2c+ zHavGh{E{nw!S*qJU-P~;jHcPRarm|o;Goq)3J9S$Z;8P?IQT_1u5QK%as;9csCLwKnxW(}Dj4|E>tdE%H7GWRu3=oH)wxmV z`(H&GX6fRelTSI@2=vXJiHJykDmU=*U7{9;;e=7|xo=bZO5!7M^2#TST$cN&h4dqtGns`mj$+= z3rFiSp*CIC3Q5yD_jTs;%JX)!2clEa;af31hpWS7L3`>#K9!3>%sNqyQERthTLSMb3 zXpXNpV<~mp?1^oYK+-;yuyJ7ws4Hx>$vlKI`+e}73*{@9)u5sFi>M9r zekvIk?h>AuK;2bz$Y!P$1J4LxxY$U|M=@_Pf|x{KolR^I;ZsdX5=gb+xrMnxq96+Z=M{H#R}k|JPSpHhpB-=o*mS=nvJ5 z{m3uebQKyD(>G^3YEZX$dUJ%MbsPt2odrY3+9IdSty?WSaJLVP1xuqUZU>M4BBw8= z6`XinLf~{Br2ZtTEwyEwM;FNB=A6oul;?HXJ^01FsuQgr{Mlf$r6{&daum9QK1env z$%^qT!wu$oW=FiR!e)YYqs9?#P96gR{U!xVTChr0nQaR)B#R~ETL#EVJ>jh~mpcCs zSziJU^&b5_i!t^k`_9-Rd$NU;FoRTsLPS|=h(va2=86{UOp*|~hN2QmmeB9EA(W;> z5>t^>BviD%=SThD|NFcT&waX{+jZuAzu&Wd&gbmkgsH6c<1G+!bznITpp~7yBcdJv zpJ2a?H8B#7m#17|g6QK7ty@Oj+>g299ykuwiB?iQ{GLAWEG>E%;<44|>b3E;gY&!B zeI+g!Me?!O#`>fG9g*>WH8z5DZOqOgOHDkm=sfX8k)Tm^C;!s8VW5=E#syh;EpVLR zAS(A`4@vf5%%{`&PNz=1x;|6?<*jm)}Nym{o_h|AF8mi_A*(~wcv zzvh{*ZO^V-H(R$X&bxu=kpLIJwJ2u7IEiCiBG~_8V6Es0`Y0 zABxe&E?p7o0|JMfm}N@kKa_&~eJx07ld+M`t?#+9s*WR@+12e?n;U}`MXD_}H6*Jn zq`a`2-b}D{h584({%aR;FlqRTg(a_>fn&CPwX45i?Y}zRZKyT65-hFr;C$yI7Pq7u ziZg31!-3otg)8A2v6j4doXVLCjo?ZsyI9w^WWmTanu|-v~8amx@Xza5&>;bY` z)<3}w*3u*uY|1~^qE}OC5j9~GWRJSpHS-N)oku%A8P^M(x&k*`O7mvnp?9#QxbI#g zZkXVq?pe8mo~G|z;{@iTE^D7X19r81qziHOo(Rg-brGc|bfWf3<1Kc2gL6JV$Z-@9 z>3KX4)ip;oP#OK^++iw$B3~E{=^|A}5vzEdJqq%$D)r)4csp}-*k_AMb)))OX4xC0 zF6(sLe9xS?jr}FRV9R3Pxql_3^c!wsK6~Oi#$OHkI4c}d6kkgG{0>*e2gsdPv;+ws z-ZpygJmr9TB)T^qzIBOyWY#Zt?HLqX0pGXZ8vt2}##Boq4$VM41ioXv(SxB$uGzB~ zn(2!ZQk`<;7VPG_WQDsrN$LCB`@8G^eqV8*N06bgHGzL#mFC(Vmm9G%(&wYawCIc! zNsy`QLEnFu^eo?}rtKSFBAidCZ^!V#!s}v9B*&5TJW1F#jQ(oQ`$-(U|F(ZodKL8b zes@Y*3%#%2+u8W57l^a0n$}%-w(u4C+7?y!wo`=zI7+%k%aueVrs6v-67)I)Gx(lO$ zfNJ6bUn!1$6&ne(+;T}sjEA03>2YZNwT^*|Tt=e#cu*tIhAB|I6ewp1{b3OXy zLdsKUBP!HPvC|)oWZ7s^d%7mF!`JDLnU5cqxS2gV_(m8N1&JD9Vtu#Vi@fj@| zk4OQ*`iRF%$dfqMrTO1t?;Y(t&x|D@AHnrs(V@f0RYPEPDs77f4 zNf|5MAw~atTi%-^yT^FAJUE($FlN;))m4QuGe#_r4X33o$^>j($C7c6|I|mkKG~EB zLPC0KvXt@nkq*EeH8nlCREd%omQyAE5#({Qp3CWLt8H(Mlh=M7**#XBj`Qqoxa9b) zq0*;aH2G%x*#@;J-bgJYWeO*P=G$@2>n?vAQ{vbY)k#mRvREVxuF+xW>G5=rglLW1wazXa&(byf&hdqkiWsXK(0!~)fCZ9n+w4=iwc^pjGsG%wH~C!{tu4_AEX`nc@3YzFr`}lKX!|q zfNO#=A?tO76gr!opscf~1y@sJYVa)LgFe)+I49J+J&Kj5rn#tVdd$^X$I%!SIKnJ-@vsJMQ@_= z-rFVLa$PafVkVp|aLUoScD&)ot67{2aaZz@=v=tErFS|fpOa5Nlhq_$&qkJJ{sgAAr-!D6uS)islt7{JL30w4TPrW($;@(tE*tO%9v7b*HcMPF7I;I?QKT1kGZbr zWLx`X`aUVFfaDI%ogi3ISF0E5RL#=-Yfo$5;moknx$OIW!UzAV=NH}!0ulSufF}n1 zu{*H?*x%V63=TP1)&KU>bTb2u896SVeC0~rIpoNCuj{S({;E?O-*32%kr`-=Ud@!i z7xT3$Wo5!;DzDsSb8;mMA7v*JFUX1=n?N?5J8|mrT)Y5Cly2q*AMP1=A(|fCAvi2Z z&_c!D;@+rIPMwp0|7Pg`U)Bk%ggM{7VAS2Q6h(p0oc|8R~q+o5mDL63c?HuD^$2LzeCmtuM%N`}H0~7fz}r4v#*tJX1^V zMY7b&EDuN9IH7f3QKBw^%ugG$*C!sDSlxLi-7Ft`p&Ca4WDMkX#Dpz|dP-?KT8I~< zX`DLqZPN#Z#4hnJVG)-4EK~<=)*WM>N@bktj&21}$hQ-JndN>qX%~{>%F)9E^DSG`HK3jDz)1E zw-fW%sB~+w{rXzy(Chma8?lMtmYJHI3%F*Tm+}L%(*>lCjIm?T#!^VIMehHH-FDp| zXf^uW$<_O9_H?X&pa!KT^h7jh9f=G#7<8uXd(Wf1G?%79d-#)Q>DDxgqIh2l#Z;~n zJL*tvL}QygIjE!v%VbwJElqmb6GF>xhpkjW4^+Aj=JmI8G3!`9_y*(;C%`DIaCeeK8Hkg zW*6IQyMKT;JmYqC|tGtB_6%8fw4mPS&0tZ65-k`d<<$PZQunO~#iS#K6^{jVJ zze1v@huKgUt(2;6v2*YHMrUu5?85F~jtt(xG@94E_GR1jo@P_zJOUS_^aw; ztbSLB>uC0j(mRq$2o2JWscPVDs!p&~3H9rg)~MR}ZouT?xwAwH(9=mSUvKscFo{`y ze`2bs<||NArf{aaaPCY;C}UtL^43 zp#{RoKQDmt&X+BBQ=x^qbW$2qXWXEE#~GYk1)&~#Qf!+!L&UiZ;FswUhN>BPLJD*P zNZs0CR{YfYMvNs}0J@Iq5Z;BZoH5327qb+a%+iN&A!p~i;Ad-8%7`>kUvQj}hklv0 z&%e@;#x#q0+Xd(j89)s9KJQCmsL?5KbsnR-0Ka%K08H}6zF9)Nq?{tH^ zjjZKr7~^D+6EA~FMMb){HiiEn_M;u^UcMW5*RmGB^f+@NNFK8B=E|~z2EmZrbA;x$ zbujEZ^H=kvn{s41zTNLM3BNXrThVpM`!%+#OHuhn8vXAmBw3EEnQp`GjT6*Dz0yI-%c&EUkD)WOA>a-@rYy zd+ei5RJ2QW2gH=LeG~H*ooCc)jh0=nMpn}vx&c=e4C`B-_GNW)rf=}0hV_P_;15f% zVNeqZQ?KQzvL2_$;YHQ64$#=f)tW9&ehT<11OthNLXi}A?lVDTU)8t|)D8>BiHNBD z)96D7=mbyCvE4z~nGlUESY;6@i&s>_J4swntKYtnW7M2RGH(oOCZZMtXUWY3rgt&E zWlKe=cuXihs*$y{dIpYos$FI2g_d-YG;gnlZeGF;-%-ci@_VsX5Xpu?|xE1DiYB9Fr{52;A+PqLn?7a zDB|^rr)dC1bygi+|LyMSCZqXiOCti9wHjils-{0`T27seK}Z@^tv}US_q+bO18FREd0BLV`<>1%kP{kI zdk!%SuRWO!hIIby!;pZvu14nXIiYABs(nBNU0uhCDMUSUgPqznnG!-QP{Ln5-Xj8e zn7WqZHqVNt;AhECA@7O&GFz8h1a$kdDt~&-SBrGswaE zJ_BC$EBKC*F5GV^9XT`13lLP6#|-DBhI$V^;AAw`&GJ|e1!kv35!>M#S&b{sq+VM^ zIxNRchYD+WO|TC2_=^;QRD?GGZ+J26yp`0VYNWi;*(h=Foc0jvQ%{i;+@EkB9zgDN zc9U3+PXq6EYVWvu4bGl=Gv8Zn(<+BIV!5Wne=ABhpJn^_i!(J&oj3Sjx$jz$HmHCwFRvq~ z%nQR)d|I1r$+o~+h!m{n@FeE0cn2YZ+xaf4L-)wGU;6qN? z1u8rax zSF9!1jcp9k`8lYQ|MXzod?A91fdR3{T2F50(a0)U>mJ!%zXBs%R+uubwd8loCla}dkV+_fbFV-wY6~lm@`TPJj@-TM**xF| z|G`Jgq^Nr~Jq%tP&s z4_}Il5Jhq#PHOD5cge`GG6{C8uJ?v@r8--+uEou>EUE*6c}|!-Oc;) zQBWtnS76HWx=#0hvv$E_)NDiM4`EJ?0Gsrrg46n))nEK;@pXc<&db8kc=>qw1uH{{ zp0E9fw0dx5RRx1&(H@St$ADYRN)v6KY@QJDGtvp(^keKJZ?Vwxv8Z`C0JG>>wP-u# zbDafyfd}4?<92Xb^?y5lx=y2bWh{h}vE)hU zksO@I$g#(|m;Be~O##v@59Ri3cGTxp?6IPy(Rh|WP8bP+t7AK%s_CRK!^6sh;@(%x zT<-nHLR>uJw{r3e?+AaZ1mG{+xPC~HTw&FZrw3!F4?s)R34t9A^z$nvXwdD5)-T<+ zvX5v=67Kb&9(;1;_0{jL0W|oOkUrMakHS~Qq6o)u-|K)AJ(rvh#Ao*KhyaFH6N*DR zYQ4o6CtP&m_bM$1D~vdW-^`AiZ-GcbztNH9=+&M&pJ8}l*Zy^8yW4vNavhwrg~?Qt81!?%-V;I1N0I>|k&=;;GZD@cQ^<41 z$!F7+Tw5;euA9N|=R0y#_SOVb>=}pg@<7Wl)pC%0AlQH1)0Ros{K1v3E!`_McBjXg2$g(Mn0v9n1mOLSA#7nRi zV6^Gmr7t)-r~k%GJFr+Z^*+%|d)>{J+yL&@JRTa?f}4tv0!Up({1ze>*;C4dY2J1r z>rJ=V%xm_&4SH7$XAaU&Asg9h$09>6yeeohgMK0>-89|zko7~%LkuA$`~dL`zoGGJ z-Vp3j00LC~sDYl0h-porkFi4xJr%F0yh|jP|Hn2v>NCE5nRq>#H$)Cp{wFeCPXxPh zJ$BT3$S;-zzI|EHLZ0G$-HnSDSKxc+=xI87u~IDAV}qC<7#I>K(nZRotE_+W=Mt11 z%B$qP`1E)}P9_uaM7#_>45(J*%Z~Dp)oJG;R4m`6wBy1n1JX%mdWf@bZOu8m$0M(s zNdC7m9NCyn!Zrwe{qXteQ%SwsgE@-3dJLMWOYOYt#;K&$?u?tHEt_DcomQ>iMylL% z-%LH)WweOKfI>BjRf!4R&UHCVH?yVvG_|~W1sjoA0g#j2UnY=ll8v9tl$+_65y=lV;*0> ztX3z`aL*keV&4qJ4Fj`lARBros3#Dqxn+s77vfO^ZVHTz>zq+0{CNOF%~^JWGa4ZN zO+6!i9k~_;Jc0Ufo^OCm7@ypW!fQ|6H%6-9Dj9cB!88Y?3=&$JoS>hp+XLfV{wu;!bxSpjg(J zkz}5)u8yCLo?AG)k_mlN{nVrbVh$n#%v3%_$(LnfmMeKH*$WMP;*`L?xZNi_>q@UQ zfV*Lt{$rp&Q`yF4^JzO+h)~9L{(9iX^<@$kE8xNypbH<++cZ?Cg4IV*g2jb4xlS;? zyxdaYW=i6Fi-{tSuY!-3b!`$EN4)I{>u6d=5({BmU9NNheTB0+?KE0UTNHWl_QkzW zQZzGtIu~PZCTS7>@xYUDfB;mhdZq^LK5^DE7hxcZq?^VTe8Pkhkr}Te9rQi)h!Y2i zZzJx1GL_%DD54cHwa4d}tFVzikBESg`mK3V51+$z!B*XwdOExAGI8O@$ZM~XE9XQh z^1A@bL1?Py>nH2(|KGTRs^QDLP?Tg%8fcul@e{@f(bwXUeI2JlG=s3W6F_z8d5Uom z%AHf{R6BJgn(kOe%sSGAIMH)(ht~%e@LuAP#`@!)Qb{z8sd@-A=adwmKYzFmb6#z# z1Ug|ow0*DVp+TcMndO>cB-}Y$N)=f6Lth_`YqQS0kfePI{Zjca-2=7*+g zug^FJ=`v|*GkXxz09^){C)b)+!GIhiS8q9ld^XH_eRV0W%PNBk^oXsS`dv-CHIQkX z{~sKJ$8-u`vW$fg#W}x&;lzZ0P0LF@2}eh5PQZ2Ii{RYGis5tR;GybpA3(=~D683H z9v}^n>5J^fst|1Lf~~NyxCNWcKX{AJMV&^TT_c1!Z!Yej63yegqJv#&k< za$Xg9e64vVwo=MHi0WYCP>^5!s#n|uc_Ma3Q4Q}pF2$U}U6Eq4x6fnEC5d|@&_WlD zXM8RB>*oX8!0(*gy~J)0XBF#U*hf%I*U*Ic3o5^8jzQ`X3H?NWZt?Nq^%fS3cjYYe`zT zi}KGCMvt@s*-g5wMphneW2M7R=>$*mdTub5Bu2U;Ise7pp0WKP{t!=#eNdQ?waeHu zuQ5W(W}$=I9e5X%#CdTCS8q(!(b^_;rsv~IUab{p6saEK1@~hg$2$b+=JIOcB_@v+ zyz;~z)8Vtr{NcEf6-!Zxv_C+9vy@mmZfQ?F=&!Qp^OtB#oytE(l6Ikm?gBCIjk>W%nD^MP3m*pGM5Ept05hkh1Ix+|u!>!2sQ6dYvb zxp9v|yMSt9O*pt*6rA7j0y)npf{-X^iWtOXz5b3?JzDqiln}o-==xg*tQnFZ;v4ag zbuQ~85=bC&5|O^1Bt3XP)}3K$gtg#X1|~_Y%u1!dXV8!p0rXuEHYn2y9|{YH|2Sc% zKI6uIAr`P+=*Pe}+`!bF>cGUDBt#~<=fvW*93yg=&x^tmFckUqrMffPueoelOMVN| zpP~^>)P{HvdsLOu)yMxsX!%l2rHJ?v(6XDlAyJW$r}!1pQEI}XZ6CH(r7Ag?A%_I+ zjVtE4#G_?0`;DH3nA7)TUwmjcHIlhNkj6fiOE`_|M<$W0LgkXIFJ964S1P%Z0fLbN1?_os_-0)+Js_ zS5@^K>4~!B-p=WNIR?`ABXIxxBmJMW2u}B-hT5|iL01irh-n->-*)n#qnvK=av0NSnrX=3w<*q^+(U5kG$*LeyoX^{`4YgK>^>@*Uok+W%&+1>Wl(G4c=-F40jRE zSmmeLtu>OPG_2aP40`b7TmkwS+K12hb<30U{Vk&ye=JM{vK%)n4~ptoCeqhy=$uW# z%#$j1;QNlm4iJ#ht#YD(>n-K^CaX}y2&3Po8{%`^Q%=&0==Lj9g;@8i!v%`2#uQ7# zvddG`cRpBH?YLo~pfjHS4qAQktJP-ofWf?>oGCcrx(CnlP`m7>I6QPW__hBVbU((R zDCGo7Gze-tWk%P?Y6HX){F!=r6$n-X%$AR?YUesRXP;(Nkf|-7)hhgzr?Rg?$f7v@ z*)-BBQqo99cK3&$l1+*hihGIIsXG(6S^@sfc@ETPezvDd8x6P_K?w{y0QHDu&s@$o z(CR>huPUI~0VhI$u_tisfo${Da^^UhN@y^6!*@c_|Hx~6Yw91+Ec?+Kj5Yp4jz(6N z@bVa^Mpgb#fhM6Q6yLmEV&(vQ%u7Jep2fK=v!FjiBv1D9w5lxd51qPAKg@15!<~jI zR;q?O7U&J-XEeFK_xG%b)HV{94LV{%u0Hmqe8JDDL^?p4nviUZ{QJhhZKLzKrPty3 zcpJCy+?4C|-mzQRj``{oDb8^3SiZq@mV>m@!g~z#3t}Zfzvyk#4YvUrfvmGBT*@Ea zecR4mfmR~r4<&%HUeDruJZh&xZJ81sdsLIGxS*R`kt~hV26vRIG0Mxe=IU{s@*axY z5yfQB>9KUH%bBZov^?9umMS>raiX&w_;My+0i9#|vxN4XycPB_k|yF5YFZj1VH~yD zIO=1u^+kBS&5@Q#*-4Do{E8|Wikr(sD2X=AWsnigbRNGN8lI1qW)djXp6dt(c;N3~ zKl<8Xr>tX{f|h>!&xtNbQd`zw{fVT+hOgMwj2x=%Zy{fT_4;o=Bx;qL#P+hSfAJ1t zVCE52*OblME>7CBV(^%Ua;YGr=BY!Eo%8V})#=jkk)NL0EL_cYG&pl|&l;8oHoPrp zhxCD=rqQd#NH}s_ZOc)OHl`gEbwOXIkk5UF(&BSIp=EW3K9WE*5tPKlY!+FY%PJ>& z0Z!)VPS^P<>X4$*_Rp z5e;FdKSo7f_#rH&a(=;?>V)E>>yK~<0BCyoea^nLbW>@do;o6GM ztO4b&ZlJmKskPvlmdhND4pzUpfu$R?&72r~LQ#d+(%=^3Pxrt%70X=>GQC?ibNz)< zfG9@i#$pTUn;{s#iTb9PNdnyCMlEg|;=YoYoHjcsRLrYa-qMP>hK0BfTb~!A3v^t1 z>1rA5tudbJVGnn7IwTEz@;!0}u_af02L#oPIHi+2TAI$G50k~NmL$!cU?dIRw^8bt zO+qbkOjNv9q&4$3U+daGo>Rx+8^a&Je?7~wLn{EYbg~{Uy!3ARU6AhjEKs82tpTiY z+Ln77*}>$z$Eb>0r`7O&DYteb=GR4EM!xF+U2OM{qcN?!@lc{M9gmi&Rf*KExPfUB z_PQN$pG14&m!x5c3DIm<}9}XQ;~_N<|(ckL}jg@_60u^dlzAT4Zp77)6j6 zq<%=(W8HIg!jFbdzG}v-R!>%}vi@a(3|w#T8Oy&r+-IBxWH;<7n84?z@_zrO(KZ*ZSS7C9&F&5$!d8MCAhtKw@!!!xS^T1T>PUS%rc(y`%4nXmv z61jrfGS0B>qfE6F&U#_4FNOpb=9-44TN z`3}X`gK;@Q+#5(25)WWo9DzN=vInRhs=;l*YUTLwYp%!Tkt2D@kZ1rR-kZ0W-x_8z zyCSWq_0~MrM>RapHzV8F2L+M3Cp8bT#H}r9h#n_+9oLlm9W28Ep@*t4qmQZhF31gE z5_GtDa^EqiuwMbn1d*oQ-LY0XtQPe+vhrw*F!|XbMU2bA9E~xBZJAk@99X*C3>S;q ziVVa331`NGnxs%r(KWJ2)pf~Sjv5*(RQEdqqShlJ18h$9HbYg-QJy6mRD*XTn2{({ zlQCGN#tK$rd9<0~zmh6Owdr5CB-JAhc{P|~(My_VzhG?f!-B&+LC+55;3n`zI=wHE z^Mk_B`oHyow*((@J$BYtsD}qI63rVcR}>6!?mm%i3Xa9wz^UIk*2mv-zOyx3-5XR& z4%05pWCZdOEwy+?D1T6q)EET}i~;C|{dq#YX835jB_a*Xe%aPyxvW6VT1l8oZUdU1 z$uG95qRFX4QSwH|IcIU!n_>#EiWo)FeL-I@VcRjo&a`%(WQD=|?>k94TZiU+`-gg? zjZGvhWUE?j6-e$yH<~I$BAOur1Ig;d7ybBp)J+cmK6EDTLC|&ZcYRO0ZFlT^BWt3l zq~3u^O8e`Phm0`$uwOQ^bOYBe(`&%dzXuV`nHoCYBf?8|v6dfXJcL|Do-;X@`4@N_ z#$EDiMBlPo6nU-r6h%P79vF8a8b^J{9_IJGjY@c6NMXV4J*gx|?ia6xd{yg+GWQk9 zHGFa`f5D`&$2NZyQXt3fA-*|#rMaQ=bwZTt+^2*ODjf&r^TQSG0Z_8^j$!`2#EF3| z6Q@*Xru^yVQ`aO)7T|{=;^;a_9LdSk0~7f=^As}Hg3ivM z=0+4eaKkxKou4Ll5)I`H&LNqmqIWT7k!CkO-az73_G;4|cREQ^j>p{9PN=_p`ix=t z>t?Yt6d1=iwPVbEW99XO2V8`PO>Qtw!C^){6X?-4GUR;|=Nc#h0iV7fdW4eDpH2w- zbIn-4U=W<|0+aw&;r^JH&}cy;g#~ed|2+0kMYRNEdmakk=>C}BN;)qJ=~wXU%3H?i z?m90#J#Kb0cF-?>C|SjD*XcbNfV}~n=l?cr+O;~8*1fx=ywerBHsj`43q$0|Q@4mN zME`g0L;@IuV@HsOIwm(|3(RjY?~B!3UOG4X z6o2$;(mTe__`BpNWr`(kD;wo>GT)}^xXL)`3_62L#lbzGDX~XebWkd)5 zOXm8KPl+ws30)KYFVp+1uZ8wzZR(jP!psagjjEx`#P+(M!R?3oAMJz7KlabgsMn1L zwuwIwH4>*dr`lnwu8-w%&Y`a)^goOd0Z8hJObgLZe=7~yri zx8X$9C=J2JGv4g^wMwYyLe8uCdUhWa!o43n6qewi@_QG_NWwy<(th!zmqi4iGwL`-b?n*H&Yp#scm2ITJD86=;tXppOG}=5fYOA(W0#6#aGjr zaOc%${W?{3Jl~9Y`>o5CQ;W2rY7$I%LXIG}@(q5qUp{>(*Le%-YpS&vS$0V)qqZdB zODFdRO!p_GWN3IIm)vGA`}O2FBr*i~TOr9T`Wr584J$}YvQ8TyU47ANeKsw zWk4ka3}@6J+|eyf2@qQfdh)gv(QgtT5S4-B@!&!!#Mj=5^fhE)wsxo%N#LIKIG^L* z;>LNOScc3@bKLs-Y*oN4E3(d9=$gQ`LNM4yEh!#3k=J{79Hk=ir6(D;bPGr z`n8Z@a@0jX!1y5``-b|7)~R_ZDwx6xZbWLK1LEkoIeT_U!2147+u4^zE;QRNw;*O- z=+vfp_{jUTZZ{i8Sy9K8B)KDy0cl5dl)P>(4;M%!?+>_;y#X4ZlPe}qFw#8obkTO;5+sxwQxMM1~>jy@Ag;e^p7dr?#$s*>_A#@v0v$@J!#H-nWTNYam7lErDKT6TBZZlv*}H|01~X9k-$=Wpbo?6 zd-;-2kV-=*-yGR8mi@&VYRgms!rL51nmsdZ` z<|IBlFh~URj&ZrwS(@Tm5hx#*Tvw!hlr(`o%rNyKl^XE6?VTRW-qP{as`3Cpxiv>O z2L>I`vi~YbkUl*37}#-j*$KvnB~Q3&8=WPgi1TVc5gjD~kgY1e2XBV^4*lk<$^|)W z9}}$3A{5VIuErZ%jJpIkcfOFXI5SUA0j1-$5wym&%IN$I!JNkIs1hM=DNM+0R~(NK zn^8{xXr9?~8#$0U%s{>-oYG@eKUim4TN!RU(RPivIUQ++*4b9EvTQ_0a?vBqJWj}A zy5Mmq4h|((Y*0}>!@ewOZ|>db?6LD`9YL>>NqT(TmG}i>d{(GEK*=@zQ4DK;;YZvA z2petA3hmW`#w%*AW3P!ci8qNaKpnl}`2>?SaRY$@^6v=oNO>?Jg2~cNjCV5~d2e<( z`ZH$lXXh{b>hQoo^!DeHW)P8@T|Akr6)ED7jjM-fZ>@mja@mS9vnl{n z0EzoU%#6a4KB+5Shz#=qgp3-~rLOsq$|Yz}t~?bi2AlWUPWi3oi>SO1eC?S>rwJo> zXIce2aT1Iw*0yHKqM`zqxXKNm=A_Lzf^&A=mwr zZ$wCO2Te`rp|`durMW^hKue(C6>nK9?*+3XUhhe^K9|&0ND166@M{fO%auM&hjo!= zpwK_j7CPDi>Wl}wT-_Z6cF!6TgxDuUYiu$`=Vu{3jPaR0u+e$WG%%1fv-;lCpLuU+ zP&C!ZU>BO4XA;8O=EI*oXxJE{nw8Zkw&;&o>c8j_w)Y(xxnt$g|1Q1rE&zNYiui}6 z(icX-jH^+ko<4Q?ayNYJJ-3)|QLKvu;8q6e;sNfi!wc=TpG@|PIf-TFL|7kVjyK13 zGr?|-Ga6MjT~q)}M02_c9ZXTxiqWHV>zQ!Q#S)I>g-vaUP2L-VbF~MRCcHAP;l^wfy!hM2{;+v+m|)UzU>W~3@^Q9 zrBQXuz~@fs(|+b?eO5BBtu)Nn?$9{uu_KB+xl}%F*lSa(2ty`AHaWPP8>#w*^yM_O zT3>3tCG z7iPX$T780GAwavt^M&y1AJXAZoYMNTijxU3-fw>1)?U_|FtjG6@awjOiym4ULJxZ7#)A87Ji6}W;COFHauSZ zQjC|O*6+iDhZ{Pz26>GTnVgS-NCzW@GufM`MNN8x`MRY=%`rj|6|%CyyzfQJG3`Ju z1vi0voqKN!y_`>Rjp-flUcVkexi>McuZB?0( zF^2yk`U_e$%O<9}7gMWTa7?4Eea{~TbdJ|Eksd+>ArX$*4AQneIskLv)a*kK84}-} zb`pJ?t@^hX+J!d@E|9ha?Q6?Y3SxT94HbD2d-hGf=@PRvzeQx;8|lC$XOc?8rgz&i z60R!1khjN_8NLmhc+_P60WCn7Wh&5aTH3f@#mUxAc`vEn{I9nKY@iUPbUkaQ|GHXq zP#BSyu=h#&XQUB&KkM(mc5sYX z4XUF2-i%gUrpLN+U;2_tfifeGxDW<1O@)l~iOmJ3Cu}#HC9!~DA}Vv9_pLB<}LfG$zM=XVU7ZqGw=_6Hh%$NW2OAhlw4ZG$NI2&8a3I zl+@1$nG|nePrNdC6Ml3NW_D`+Uv3oV*8(=U4TzmZ`iA!b#5oJPgbrjbIAou=r{0nC*JNa@O2_kz+K&W`-)Bh^;{e1lu$HJm3uVOWQ zUWhdptYdaOcx%~c#jIU z-mv+AVRTeyBrnr`H(Yw!wf{-S{Inx+tZUJJK&Q$+etpKkR7F*P41vgLcUwiri3v8OvoEqBW|}7v+dn01if#9^!LUtDc5$aBk;^ zXshDketHso!H^s0=6A?3brK@bol^4IYZ-#fYJ2sWH39g^PYTX+{&H)>KMae_NTu&F ziByhUQ$8`3IqdQQ_R_R5u9qk9H&w=nf4yrJL5kv`;s|_5op|<(`mE3Te#w_DI)^PE zKcnCCT{?W~=OHniVET#Swy*KuhS0P_K3FEsxQcJjMvE+x#G*MA)pzrtp&$f(8sB6B zFaSMDyqv<-S+Y7*OB>>_@Gv?F!`i7Y+A5)w03apd?a*VR#w_sV5+k&1O3s_ApwTIW z_czRl3#$UwrBCb!%F3+FY*FK=n@jh>yurL~`d@?W`?sDMM~N`Ls%s8x3~&n(_3>%( zTCuCEPt~QS@WI1J1+vxUxyQ-X(hONr_la6PB=x|mZmaC^ukfV8cHa4JpUxZLBQ8(> z1l>=azkazN18}c-pFd$vR0404LKD#oYBob#l~5C;f^`8;v|XiE04irzrdacn$~Uno zysbsd??y9x<*Pbs!l9i~6Br)_l*xh@S{w0)J3DQLBj0D88A$)t7Vopii-lon zX&31`PMc_9Su|et)IeIoi#mLZ^$abkQ*M9Sa3mP39CR#rY7?}x?%8$k);dDAuOr)Q z-M%K9p#8^YvZ?d<9dsLjM}V1L4jwBKaZSV~1+?ZV=rMc zW00t`IHK^fZR0Q9Cs*4mGKyA5K1ZvpQrpcp8ieHL*%HdG%1-V&MnJ6vMPB{#Sw%7Y zo-IL+M~y;MuSz^_+0aujGLq;w7-;R$oaN+P35H(1p9o|| zBKvWL^rPe)73@)?sKdO&9dGY{`o_yx#b_1+7@HiTr8{BdJcs&6ie|ytiI%4h)^19B zIy#{qYAn7GCBAee0uJ=lvIFIF&Yf7k5zb9~X+oDU%ExEOgAYzX*H4LM>S&Nc|0?x0 zz-J{#3@>bF1L-<6LPK5tq&Ac*Uc9Cx$`t?}?==3C*5oO!g5Lfp;X0%hu^qNBV zIlxlGmd3n?k+=Sr(RK<(WGicX+ONEz7U8`(x8V3}dx-&)roRmQigQd564Rb*(^b+V=;yplhk zgSm+6jzq3_fP?XD=ZN79`T6;0=^9ye`3{QGcwx2MrX)aEEZ=DPP2KdA00CC0-?|yY zWiv8xmH#bI3*8OPxjAG=aA}a>Qa{6{p#F0HrnH0c1st9LliGhCxd(jJXOg}V`Kx^}Z+-}B)vd$mYHtti#%r}&JM+U3t=!5N)lez%BJD%b9q{w@te z(#q)*j{!5PJRT!dk&ufDEoHR!Va(+WFR}Mc$2-Nzld*Q4GjZa-YAT9?>pH{jpEtj+ zE`N4RLvw()?27Cpzk}5tm99h$9krpvs>3veMa=n#XZNJCYjvHPk zlFjQRCySRHb6o@WZ3gpKi{0o_e`ZBxu=vq5s|SPiXJ6ycejGKXTdl$#?y<(?Iz2)O zuLubI4GSLro9JI(J#Nl7j}?WuFbl11UnD-*=v?_cSL0OoSNb;gy8~J?QUSSRJNS!l zceV*zNG@1*9aQM!aSe&LCCp<7sLCqDN|UH{yxZy|fMnoS$V78xb5X~JF-7h}aG8gW zl$$WhF9e3U$aLFt)lTRguIro)%NZlg%P+Ut$Hjr}`JJq{@THM3>x<7TjX?dbVKSx1 zm{mNLxWg;dp=~SG1^b$Cx%STOXuq8k_JSW6@qHRF;0S?vA6z7hJqwgsK;)#%1AM-K zR*{3rxCF2n=%>t&JY^O_?XP$ z&2$-5`=$aR=6#P_=>0+7QmczM1s!I;+bWt&azU{%DEdw>dr`-=_{`KAxY6) ze`E@~jp*fhr2mZ2#pmZg;Wc#5`N({FRIp2zS>;e5k3+SOep7vfFf!^@(&6}Js87{( zYhG05{3T)rd40e#yo2E0?K$*CLE=X|!_Kpp@lSClWt-)Y3WS$6`5ak9G}8C@a|O79 zd0NCDniUCjKe*WC?4K)BhzGA3?DAB6r3hQZLnRl*X4G&KLA;B>OR7I-bZ2%|VZ1CQ zG0;%lTM2hDqt;5Oz9rBqM~%WCdqC#o2~^>Z1p=+dG5;g)eId*$Mi;qy4Z4Px6NdH) z*ze|yW>itSI~1JZ_*@cwkvDlE>NvH-aK?fCj!8tVTyiv{TecbaQjIe1cN-_C+7Tlq zZpV>oG4XU}7;jYOaAJTZG&NX`-ru#oIZs_i^TVxiKWw(-c(jcOD5|J;bO>ELSSRfz zcEWm#|HFD{HB;4>v+mf4Rg5(ow{3hoYL?v|RtX+(Ma|{Y-%ej=ibgt$&+@#3g3$K= z-8kihwejfTwbZMgLQMbLTV%O{Hh*s$?koN)HV1Iwhont3>>NicZ~SHx#_28t_d z&kpv*E8hqeP++npE%vMJA6nb}#4G4semDSdQEoFpOnB@NfIGbixH71nc^gJzS)mk& zP}sjOL6(l@!HJi95#0c(S^+P@4<4AAmfTO2&t*+U0PJX1iMs;5PRU9O`dwdE_6Zl? z>1@MW@W&qu8WI`iRpkG0d!`s{z-YjN8+OEj){>wEIf76xtqis{RT#GQz;Yu(u6?x> zkiUJ*V9M2H|Cb-JO>9#BpYJvM#tu}wE`_qyHjJ!|`Phpag6=;3bg30Zf9M2<#6)k= zidqrrlj*7fya+RYL)mMx7&N_O`#cgwsqWxIP0o{0*)Q>I!>$8tqo${_g_ur43hcjT zdFUKMG|Bz11MMUt72rgQH`3gAr-_N=$^__XTMCWJ*%{98+_)lck;btWI5=koftGAo z(Jj@;pFNAi4IS|{$Cy`hLpSGl?m~9O%zV}NyCldU7|(7wklT*N;%w7F*8pLrl(_P) zV^UXhf^1lM)Ly|evhND8#dc$=OX7iq4gbP?3z1Z{Z8u8LzlhwU zQ1rRQ_q@4jRQC?f*V?^k`llch3!|5?=Nb?eXRjxxN(t2do}eCgwd zT7|~Ot5v^E`wuIY|BmNg{dqp_SJsz?f0TS4bM^_iITDK!%O35itRhxF%zU&*MENb_ zd3*CwoEM?y=KpE$+rOGR)BQJ6OIwc8IaW~-I5RyR$HM^i2!RqJU0UI^7*InJE{14P zt_D;Hm&5?tv7A=HQyoFM7E=PrP5^}vNlbw8D5J{aopi>4?$2)?sWm&>&jqJDyKYTI6GD%76*jMi~CgN}LF9lgQ% zg%#s_3ON|d3BjzQ6G3#V2>r~pLPG5)w`s0Q`jm5bPyzb7vOEWK68J^TS?+s@=8uJ# z7V3%vPUS!zeV$b8AzIaTEMy_x;LMuy!77lzk#n>@{i(gfx`VaeFZ&7y=3e<=j&bOP z-pGNTTb>5S%Bb|Z(@E=u8Ekm2Q-7hWixZC%r!QzTb&8rAh;n&yd!OZD5~Hjzc38cZ z)LphBe>9=_lk(_wr!L&w2SLwX&BtJO(DHmFHH7(jey8MP;fbnMkbjh`&~Md_pOToRs!t^K(eiTytQI zO#xv#B7uj6#)e<7TyJP3;<-ey3V4+YmH#P-O8Z#o<^s3H%4;v0hRF+|%&3rw0l~0H zr@8x5g?PXv<|mvOF<06aWO|88QKqZhOs+D1c*)`Dzh-wLoJPw?VHPIpW`&4t;&^4ZB{zGB7!ipMA@BQk`^2E6iLuoQqoKk8L#dBV4sAVoZ zE}nV+=tq%*N-1Y+rr{gq^|#$l#1pmQSS9<=f-FQzs2EMjZNB@x3kIX}q6hTHm;1Ee9{8Xu>Te?Q*kE1xUGTShiGSQIE*kkh_K z+`U z$%zDCCkn4>RsI@7!lI6`@@i@8g_{y;Ta?J47s9#qtgo>H^eGxao@&%rP_1<22G^`Z zzaxA&4TbHK>_=vj=Kz4rqOQp8{J2LOkXd9l?q$L4(o(mlBcLEONZTw3Kam<#RPa_w><-`|G6t-XpsYJ7Cwc`CUO@*!=c+x39DB)O0iu)IRnyoBD2cJ=>SqXU{ ziCb)bI+PJ2Zi{wl)K2lYgR%kDmJ*Qf5TF<)LySPZTCFS8e$}@@@oD7cObQ;vAv+DT z??7J+A>9?5GO5FKxtuGxYB3{Dlm!JcCKLFPCj(e`cz|_&+^+mdYOB=jWz>H+xB6uZ zT8L=#M-Rr*_J9j7*or9ZLF~C5X--jFRq8YlX&VieyDF{tdX2n8U(K62t5qfT`e1lZ zNrJjE*{l^Cq6Y7GK>k-i2y&i$8Plm316MQDQXbejP$v6f%n*&?7|Aex0Gx#!$iog` zd&Ch2zMC+&ja8?m5hB2~9%}zVBG#@K?hbN+!V73f4|?hV_8@{GG8&&e`wmHg&jhea z$U!s?6*GqAPNQ>zw&APk0Dmig$${W@km8iI)~CkYD@rG#*mvYBZJ+vkzL%+SZy|ri zwa!IiKES0R9HMnJ*aapM8A2Q-W%LkGVV}5GSvj`TY7}PrX}1Nld;H}?XJWMOQ24{g zMH&OX4|b6!51(u)jPCzrQK9lbnHpMF3UMYUpw7j|2gJ3VQ~cEwCo&r@5cHDS0y*&2 zPDGa+&iowwfnng(#Yvjovy9n?KqKaC5o!!u?EsrZsc`IlWlW4U5o00@WZ&l6X@Nx$G)Wh{$Wx?ir#C4c{3n zMTrR66>k#FM};c`^T~P*EQ>o9l4Zj8mBA7+$mL&bWJ6FOsNj5f1|KcU%X2C2Z_aHQ z+(B|F32`KP-qU1^`-tn6-}7)0lY+tut($PhT&Fayb|JBAl{*Q^Xf@gt*GedeCp5>m z$`PYAB}U2#4YhmL+-kHJRaIzO9_le0j&$3Dbi)iCcOG>>m6z~>p^m%P{6@aYsk2NX zJKB@4)&{I^|7mES^-$wA!`X{dn+5a_4jg_!a-BM35V@ge2oZ)=AP6kUw8m?k!1*iR zA>@v`&6}ahFqxo)Jbi%q#e+Lz!{lCJt}t!_4~N2SR8I57Gyp%W1aW;_LKenDJ1VdI zuKho9jRdWaG9)XX!TrbM~IRK7J zNyDxsCxjey0~?<9HPkD{EV6+B7waA-rG3kz@e}||!Y}=XhN4t@W??L%9wQ_Jy8GrD z`D$(sIf=6IEwMD(Ub%#ohW%6t@>vTmvMU=fLoRqgl3V+h$F4E5IgvSYw)#8TfL@E< z&tH>}{CmiM7m+mTnPq;F^s*4~3u{tb<*Q0p|&!MAA0!%@KE zS(p_uR=QCmhN(2=^Q#gI9_hrt9}KFeg!9XUt7S)j7&irWToLw{3d#PV({}A!zI{%Y zWG-Ch#F2(0IinHYj!+mQYw$R8_+Q_65CnNBsg#9AY4zxYlYlM+ZNV+HM9`acLf8-+ zO{2)0SxLU8SFaG%oxGrkI|NkISDhfWk%nH}cD4Tf=^T6X64pj_wCn5z@E|=bbcR#?-9*2udfb{?uaBXM&;IeC#s4YN6xbcsFdt8ej18GonP8YdLuCtWIMCtNq==?6go^qAH~^N` zt(oK^!$P;I^<@rUnzDD{;R?SK*YUTj>lqmg%-0TTAHDu)C(TFG+!|F@yOXvZp}X2G zc@`x6$RwHpq`xHVTOsTv@Jb#l6ATFQD1-;cJ_j}P*d&>A#qz&IcVkgfYYLnKC#;0p zw;0m$er|~Lmv=X51Boa<$UP3K{ox@#mE8Y+VUiz;MjFd|)^(Y=r$BV5Q zSRCS=QQmDo5-Od&Ypgdao>uCH)swX>4g?9K(}r|84z*tH+-?V_%pCFZoss${1OxEG z9#Gi64Me|JXG=JG?cyLcA`Kx&7-45s`(HgP>gu2 zfJS~fKI9M`L?0M0mc9Ootqnx4@i#7W%+dEu^)1wfhI$cBVhP+Qmsqk80%6s--LqeA zOxGCTdF*jfZt}vsf?0~Spu&9bG!V3bMBR1n>1{J+RFlCZHKewlUhQHBZN9fibN;zy z%6G%?{4Q)18Rpl+xR!vyy_`I8Q!VgN<@sBlu0kF2CShpXY9^y8mQLIB>=kd_iu^T) zp4)S7eVUN*@RIb1pzHQYuM|yF*$SxAi{!Xu>y4#T4XF*e0@h(nfRYhum{(iuP!y-m z66w(%2o>EGHR#(S7N&S?SzN#aabdk^z7}}6I#nub;<27alo(uSI9`vb@pu2+@8YXY zrHL9uwni|+uptB#A<+@{hTJQxG;!P7L+`b3#fsT;DXNb8ir{{V_rJV~!>6@M9qseu z4n5~o%(tHQDkIWx!6ige37NhYYTP$;tPA0UE_p=xM;dYo)f9mzqj1(z7+PIy-o;&zijR~Qdq`pp$amI-K9 z{%Q0IdIP-&`iHC92!{TL0lJ}nCV~y{#MXMu1$+%M=(^N!kt4OdTxEWFArAFLTF|w2 z(6j4`EH<63RUa<&8psMSA(y;3hZUCDsOH zk)#0ujRU2HhS1U!a8-H3Q(u+31)W?0MIKJIMD`lfY5&aDNphZ+Li<;!{p+dj0ofrUEfD!k^(k#of12nUaAaBag|atI zLy=okIZOaF;~1XRU+Ue=h)#{+!uF8=hx_xwy~|zE^YOI(w0*R}gP$(71vK(QsYCmZ z(?ye^f6uRBWq~W^bfjGgW$}YIYHQgO_|3g4%7Rslagw#+XCgs z*?7#GYvjK{M(>-erOwb{cYf{@o=kR4$s{pWQA(Bjpg;ql1bK((&V|0Tt>_quc6EP> z3y3cNB9QKHih)9Wobt(|KD3Hr3KpBG!{f6B`L6gXOt-bLozu)G4f=v^v}-z~ou)$fCyh4RUu;9gVB z-`(d7NRxL`hO+JqFUtGvU~RnLZGQlfE@s|lLDls~#+G}*{kyTd=pYa>$}@0o`bZPi zyHqCg`LTe!d&?3A3flqP{Mn#X?1~iKa<$&H&dD;PRat(_q@!2P0xEmy`utADrEb}` zXJfY$85PMS644G49^lseQxWiLQI{=;kA=>`S!9#+Ae&mqI?N`(#h?c6F@nLk;MOAgZ zpnfBFVkNXmyhv*U`p&q}9_arT>>S37U$P7ErT|FjeBJ|o1x((+8pg~o|HiA|$VArD zz95Pm0b9^rlc)2Pr&a1RWzuIO9bQK?bmKW-=Iv2idz8S}nau+lek34Oa*@ z;Ze(qg4(OnRRyxfF|Tr7?4bT{W6IM+0ym^^sRS^%5#~V8{{4ovG`H0+Dnqx!yM%0e@##)y!*l61ydcL01aOph~1^DvJr^+9U;<()w5m9`4^v71jTb0 zOEm+;uP+et57W(qx(`ZZ(C`T=J*bR%tIiWCMx*T7RZqS@XTJB`0qo@UR*Qw-)6i4J z(`3E{o+D4Ug5hqy`wbo)&J|fE{01N1j+dw2V=+i2o&1BqM5-r+_-KE_G%bMVRnaXy zWZ7SWOR2U2h5zs00Xa}uV+{RaB|3x7xEPPbHO2bH%N6`k`+RQmbuM#mCaVKv`B@y* zIpgQ1jR*k1r7QnpiW?MIn94cK21IxuO{??!v$rJYEY1(~BpYB`U)TAdOkzf+1JVTP zwE=1BoVnE*sZEst0uSuE{0>Vu?if75tP*S|Tc^v6$*LE^ZZ0mIRMx4Vv>*9v=4b(% z(p7;+Z}5BI9#Kyj3fug@bRB(B<^T>A!0C={l=yO$$#l6Zp*aVq-Os4UQELG0Z8C*oBrjF+ZSjo~AX&tv~|9X5p#c~m)3I?3BnvFl%GhDe;U$I#Gv4<*g zOa4t;s;)+IxQc`79F_h)PZMqjH45HUw95b2HlJJ!>X)H}ID-z54X;Y{F}PZ<{wDyv z@_jnXykgw3S6_a0@XT#tdMQg8;T>Jy;7W)`8-f4ARxTsiw#oxhe#lZZa}e){w!VY? zd3CGeG`9-9D!W;CBAopI&~^?I-KSTI%(R%`+ z$Qs>q0T>f1EVHZGUGfU~jlo+)KurRXTijqM@ZcGIsR{q!On~}KDKm`ZE$F$7?oz2Y z6wdc8cLNlD3*^qO8^0!?Zs?%j5W?HZ^+@2R*;?geuK_}b{fA8+q3?^7V%g786IAD^ zxgE|vV#Qv7ckOEcF<9c0#f<(@I$N4a4T`m97tsCz{Uuatgri;gA5x_-u^_&}51tlG zhg3*RUrq@5IZgBGx3*^ML`R(DqiYSqOp3%i_veOJuQ$-H2V#$dopIP>^mR508lK8b z+gPQ5*W<=*3aJ>j*o-D*MQjI#yO5vy?gW;&_5z0DE@@!6vk)vbz*ZcnjnkcigiFoab2c ze$F+^p}Z%e0lcYC4Qh)&s4QLb>&k8R{wGskAMM@ffM@4u9>$?l!3k9Fz=#At>|StQ z;zmOJ@+@or249S8KMqRt!1m(qebh&g8J6lf_UX(5O>A_z*!u{5?-9{*+5Dl zPf#Wz62#!^qCzqECMb)I$KMs>M|Tf`Gd#2iEm|E@UTTm@(qoK+D7b> z-YH-JZ{{eReOS5NiF5pkaFTc8%hwzS)_fhv!2AoyJL)Y|bmo@wI5vqim_z1gHscLZ zPFcoMemb1)Qh_H{#K`|`IqWWQY9~!p5Pv>df3Ez^g=(im6=IO@YD9)NdyPC zhjpwpdei=1+IIEyZGEe!c`&$FR5_rY-YBXPELPc(Pu>|~YZJghHylo_GWK47G)hRH zY3c3eB-gC>TMres&me&#?SS)6-KA)p=;>oI7&Ub``o+qK~c)-sHN)))#~jZtQpJMIZTM z&)olPe&!^<*4w!^XLv4pdLfXTP?Z_&NF)G|``abf%zDp1hZERm*9j8NMPEW6B0I3D zjfo0yG=iTR5i+O8G+7T`2(hLy z4fBCd%FX6M?P`}4RxIGD1BpA&l>)z5BN*jWQbFNHS6s5-wmy6}Fwv&dpuc=`*?J1d zzm=iiOG*H{3fkIjA#uHrkIpQ#5&VNiD?ZAHuIF9&LrujNQue~OsBB2|-VfV%?r8p> zh6jKD!?p|5b6;-wTLk*8GA%dB d|DUzq)X+0`B)<7$*tXrTcZGdbzvIBE{{viFZSnvB diff --git a/api/core/model_runtime/model_providers/nvidia_nim/_assets/icon_s_en.svg b/api/core/model_runtime/model_providers/nvidia_nim/_assets/icon_s_en.svg deleted file mode 100644 index 9fc02f91649a87..00000000000000 --- a/api/core/model_runtime/model_providers/nvidia_nim/_assets/icon_s_en.svg +++ /dev/null @@ -1,3 +0,0 @@ - - - diff --git a/api/core/model_runtime/model_providers/nvidia_nim/llm/__init__.py b/api/core/model_runtime/model_providers/nvidia_nim/llm/__init__.py deleted file mode 100644 index e69de29bb2d1d6..00000000000000 diff --git a/api/core/model_runtime/model_providers/nvidia_nim/llm/llm.py b/api/core/model_runtime/model_providers/nvidia_nim/llm/llm.py deleted file mode 100644 index 6ff380bdd99c8b..00000000000000 --- a/api/core/model_runtime/model_providers/nvidia_nim/llm/llm.py +++ /dev/null @@ -1,13 +0,0 @@ -import logging - -from core.model_runtime.model_providers.openai_api_compatible.llm.llm import OAIAPICompatLargeLanguageModel - -logger = logging.getLogger(__name__) - - -class NVIDIANIMProvider(OAIAPICompatLargeLanguageModel): - """ - Model class for NVIDIA NIM large language model. - """ - - pass diff --git a/api/core/model_runtime/model_providers/nvidia_nim/nvidia_nim.py b/api/core/model_runtime/model_providers/nvidia_nim/nvidia_nim.py deleted file mode 100644 index ad890ada22abc8..00000000000000 --- a/api/core/model_runtime/model_providers/nvidia_nim/nvidia_nim.py +++ /dev/null @@ -1,10 +0,0 @@ -import logging - -from core.model_runtime.model_providers.__base.model_provider import ModelProvider - -logger = logging.getLogger(__name__) - - -class NVIDIANIMProvider(ModelProvider): - def validate_provider_credentials(self, credentials: dict) -> None: - pass diff --git a/api/core/model_runtime/model_providers/nvidia_nim/nvidia_nim.yaml b/api/core/model_runtime/model_providers/nvidia_nim/nvidia_nim.yaml deleted file mode 100644 index 0e892665d7e114..00000000000000 --- a/api/core/model_runtime/model_providers/nvidia_nim/nvidia_nim.yaml +++ /dev/null @@ -1,79 +0,0 @@ -provider: nvidia_nim -label: - en_US: NVIDIA NIM -description: - en_US: NVIDIA NIM, a set of easy-to-use inference microservices. - zh_Hans: NVIDIA NIM,一组易于使用的模型推理微服务。 -icon_small: - en_US: icon_s_en.svg -icon_large: - en_US: icon_l_en.png -background: "#EFFDFD" -help: - title: - en_US: Learn more about NVIDIA NIM - zh_Hans: 了解 NVIDIA NIM 更多信息 - url: - en_US: https://www.nvidia.com/en-us/ai/ -supported_model_types: - - llm -configurate_methods: - - customizable-model -model_credential_schema: - model: - label: - en_US: Model Name - zh_Hans: 模型名称 - placeholder: - en_US: Enter full model name - zh_Hans: 输入模型全称 - credential_form_schemas: - - variable: endpoint_url - label: - zh_Hans: API endpoint URL - en_US: API endpoint URL - type: text-input - required: true - placeholder: - zh_Hans: Base URL, e.g. http://192.168.1.100:8000/v1 - en_US: Base URL, e.g. http://192.168.1.100:8000/v1 - - variable: mode - show_on: - - variable: __model_type - value: llm - label: - en_US: Completion mode - type: select - required: false - default: chat - placeholder: - zh_Hans: 选择对话类型 - en_US: Select completion mode - options: - - value: completion - label: - en_US: Completion - zh_Hans: 补全 - - value: chat - label: - en_US: Chat - zh_Hans: 对话 - - variable: context_size - label: - zh_Hans: 模型上下文长度 - en_US: Model context size - required: true - type: text-input - default: '4096' - placeholder: - zh_Hans: 在此输入您的模型上下文长度 - en_US: Enter your Model context size - - variable: max_tokens_to_sample - label: - zh_Hans: 最大 token 上限 - en_US: Upper bound for max tokens - show_on: - - variable: __model_type - value: llm - default: '4096' - type: text-input diff --git a/api/core/model_runtime/model_providers/oci/__init__.py b/api/core/model_runtime/model_providers/oci/__init__.py deleted file mode 100644 index e69de29bb2d1d6..00000000000000 diff --git a/api/core/model_runtime/model_providers/oci/_assets/icon_l_en.svg b/api/core/model_runtime/model_providers/oci/_assets/icon_l_en.svg deleted file mode 100644 index 0981dfcff28c78..00000000000000 --- a/api/core/model_runtime/model_providers/oci/_assets/icon_l_en.svg +++ /dev/null @@ -1 +0,0 @@ - \ No newline at end of file diff --git a/api/core/model_runtime/model_providers/oci/_assets/icon_s_en.svg b/api/core/model_runtime/model_providers/oci/_assets/icon_s_en.svg deleted file mode 100644 index 0981dfcff28c78..00000000000000 --- a/api/core/model_runtime/model_providers/oci/_assets/icon_s_en.svg +++ /dev/null @@ -1 +0,0 @@ - \ No newline at end of file diff --git a/api/core/model_runtime/model_providers/oci/llm/cohere.command-r-16k.yaml b/api/core/model_runtime/model_providers/oci/llm/cohere.command-r-16k.yaml deleted file mode 100644 index eb60cbcd90f5cd..00000000000000 --- a/api/core/model_runtime/model_providers/oci/llm/cohere.command-r-16k.yaml +++ /dev/null @@ -1,52 +0,0 @@ -model: cohere.command-r-16k -label: - en_US: cohere.command-r-16k v1.2 -model_type: llm -features: - - multi-tool-call - - agent-thought - - stream-tool-call -model_properties: - mode: chat - context_size: 128000 -parameter_rules: - - name: temperature - use_template: temperature - default: 1 - max: 1.0 - - name: topP - use_template: top_p - default: 0.75 - min: 0 - max: 1 - - name: topK - label: - zh_Hans: 取样数量 - en_US: Top k - type: int - help: - zh_Hans: 仅从每个后续标记的前 K 个选项中采样。 - en_US: Only sample from the top K options for each subsequent token. - required: false - default: 0 - min: 0 - max: 500 - - name: presencePenalty - use_template: presence_penalty - min: 0 - max: 1 - default: 0 - - name: frequencyPenalty - use_template: frequency_penalty - min: 0 - max: 1 - default: 0 - - name: maxTokens - use_template: max_tokens - default: 600 - max: 4000 -pricing: - input: '0.004' - output: '0.004' - unit: '0.0001' - currency: USD diff --git a/api/core/model_runtime/model_providers/oci/llm/cohere.command-r-plus.yaml b/api/core/model_runtime/model_providers/oci/llm/cohere.command-r-plus.yaml deleted file mode 100644 index df31b0d0df355d..00000000000000 --- a/api/core/model_runtime/model_providers/oci/llm/cohere.command-r-plus.yaml +++ /dev/null @@ -1,52 +0,0 @@ -model: cohere.command-r-plus -label: - en_US: cohere.command-r-plus v1.2 -model_type: llm -features: - - multi-tool-call - - agent-thought - - stream-tool-call -model_properties: - mode: chat - context_size: 128000 -parameter_rules: - - name: temperature - use_template: temperature - default: 1 - max: 1.0 - - name: topP - use_template: top_p - default: 0.75 - min: 0 - max: 1 - - name: topK - label: - zh_Hans: 取样数量 - en_US: Top k - type: int - help: - zh_Hans: 仅从每个后续标记的前 K 个选项中采样。 - en_US: Only sample from the top K options for each subsequent token. - required: false - default: 0 - min: 0 - max: 500 - - name: presencePenalty - use_template: presence_penalty - min: 0 - max: 1 - default: 0 - - name: frequencyPenalty - use_template: frequency_penalty - min: 0 - max: 1 - default: 0 - - name: maxTokens - use_template: max_tokens - default: 600 - max: 4000 -pricing: - input: '0.0219' - output: '0.0219' - unit: '0.0001' - currency: USD diff --git a/api/core/model_runtime/model_providers/oci/llm/llm.py b/api/core/model_runtime/model_providers/oci/llm/llm.py deleted file mode 100644 index 1e1fc5b3ea89aa..00000000000000 --- a/api/core/model_runtime/model_providers/oci/llm/llm.py +++ /dev/null @@ -1,469 +0,0 @@ -import base64 -import copy -import json -import logging -from collections.abc import Generator -from typing import Optional, Union - -import oci -from oci.generative_ai_inference.models.base_chat_response import BaseChatResponse - -from core.model_runtime.entities.llm_entities import LLMResult, LLMResultChunk, LLMResultChunkDelta -from core.model_runtime.entities.message_entities import ( - AssistantPromptMessage, - PromptMessage, - PromptMessageContentType, - PromptMessageTool, - SystemPromptMessage, - ToolPromptMessage, - UserPromptMessage, -) -from core.model_runtime.errors.invoke import ( - InvokeAuthorizationError, - InvokeBadRequestError, - InvokeConnectionError, - InvokeError, - InvokeRateLimitError, - InvokeServerUnavailableError, -) -from core.model_runtime.errors.validate import CredentialsValidateFailedError -from core.model_runtime.model_providers.__base.large_language_model import LargeLanguageModel - -logger = logging.getLogger(__name__) - -request_template = { - "compartmentId": "", - "servingMode": {"modelId": "cohere.command-r-plus", "servingType": "ON_DEMAND"}, - "chatRequest": { - "apiFormat": "COHERE", - # "preambleOverride": "You are a helpful assistant.", - # "message": "Hello!", - # "chatHistory": [], - "maxTokens": 600, - "isStream": False, - "frequencyPenalty": 0, - "presencePenalty": 0, - "temperature": 1, - "topP": 0.75, - }, -} -oci_config_template = { - "user": "", - "fingerprint": "", - "tenancy": "", - "region": "", - "compartment_id": "", - "key_content": "", -} - - -class OCILargeLanguageModel(LargeLanguageModel): - # https://docs.oracle.com/en-us/iaas/Content/generative-ai/pretrained-models.htm - _supported_models = { - "meta.llama-3-70b-instruct": { - "system": True, - "multimodal": False, - "tool_call": False, - "stream_tool_call": False, - }, - "cohere.command-r-16k": { - "system": True, - "multimodal": False, - "tool_call": True, - "stream_tool_call": False, - }, - "cohere.command-r-plus": { - "system": True, - "multimodal": False, - "tool_call": True, - "stream_tool_call": False, - }, - } - - def _is_tool_call_supported(self, model_id: str, stream: bool = False) -> bool: - feature = self._supported_models.get(model_id) - if not feature: - return False - return feature["stream_tool_call"] if stream else feature["tool_call"] - - def _is_multimodal_supported(self, model_id: str) -> bool: - feature = self._supported_models.get(model_id) - if not feature: - return False - return feature["multimodal"] - - def _is_system_prompt_supported(self, model_id: str) -> bool: - feature = self._supported_models.get(model_id) - if not feature: - return False - return feature["system"] - - def _invoke( - self, - model: str, - credentials: dict, - prompt_messages: list[PromptMessage], - model_parameters: dict, - tools: Optional[list[PromptMessageTool]] = None, - stop: Optional[list[str]] = None, - stream: bool = True, - user: Optional[str] = None, - ) -> Union[LLMResult, Generator]: - """ - Invoke large language model - - :param model: model name - :param credentials: model credentials - :param prompt_messages: prompt messages - :param model_parameters: model parameters - :param tools: tools for tool calling - :param stop: stop words - :param stream: is stream response - :param user: unique user id - :return: full response or stream response chunk generator result - """ - # print("model"+"*"*20) - # print(model) - # print("credentials"+"*"*20) - # print(credentials) - # print("model_parameters"+"*"*20) - # print(model_parameters) - # print("prompt_messages"+"*"*200) - # print(prompt_messages) - # print("tools"+"*"*20) - # print(tools) - - # invoke model - return self._generate(model, credentials, prompt_messages, model_parameters, tools, stop, stream, user) - - def get_num_tokens( - self, - model: str, - credentials: dict, - prompt_messages: list[PromptMessage], - tools: Optional[list[PromptMessageTool]] = None, - ) -> int: - """ - Get number of tokens for given prompt messages - - :param model: model name - :param credentials: model credentials - :param prompt_messages: prompt messages - :param tools: tools for tool calling - :return:md = genai.GenerativeModel(model) - """ - prompt = self._convert_messages_to_prompt(prompt_messages) - - return self._get_num_tokens_by_gpt2(prompt) - - def get_num_characters( - self, - model: str, - credentials: dict, - prompt_messages: list[PromptMessage], - tools: Optional[list[PromptMessageTool]] = None, - ) -> int: - """ - Get number of tokens for given prompt messages - - :param model: model name - :param credentials: model credentials - :param prompt_messages: prompt messages - :param tools: tools for tool calling - :return:md = genai.GenerativeModel(model) - """ - prompt = self._convert_messages_to_prompt(prompt_messages) - - return len(prompt) - - def _convert_messages_to_prompt(self, messages: list[PromptMessage]) -> str: - """ - :param messages: List of PromptMessage to combine. - :return: Combined string with necessary human_prompt and ai_prompt tags. - """ - messages = messages.copy() # don't mutate the original list - - text = "".join(self._convert_one_message_to_text(message) for message in messages) - - return text.rstrip() - - def validate_credentials(self, model: str, credentials: dict) -> None: - """ - Validate model credentials - - :param model: model name - :param credentials: model credentials - :return: - """ - # Setup basic variables - # Auth Config - try: - ping_message = SystemPromptMessage(content="ping") - self._generate(model, credentials, [ping_message], {"maxTokens": 5}) - except Exception as ex: - raise CredentialsValidateFailedError(str(ex)) - - def _generate( - self, - model: str, - credentials: dict, - prompt_messages: list[PromptMessage], - model_parameters: dict, - tools: Optional[list[PromptMessageTool]] = None, - stop: Optional[list[str]] = None, - stream: bool = True, - user: Optional[str] = None, - ) -> Union[LLMResult, Generator]: - """ - Invoke large language model - - :param model: model name - :param credentials: credentials kwargs - :param prompt_messages: prompt messages - :param model_parameters: model parameters - :param stop: stop words - :param stream: is stream response - :param user: unique user id - :return: full response or stream response chunk generator result - """ - # config_kwargs = model_parameters.copy() - # config_kwargs['max_output_tokens'] = config_kwargs.pop('max_tokens_to_sample', None) - # if stop: - # config_kwargs["stop_sequences"] = stop - - # initialize client - # ref: https://docs.oracle.com/en-us/iaas/api/#/en/generative-ai-inference/20231130/ChatResult/Chat - oci_config = copy.deepcopy(oci_config_template) - if "oci_config_content" in credentials: - oci_config_content = base64.b64decode(credentials.get("oci_config_content")).decode("utf-8") - config_items = oci_config_content.split("/") - if len(config_items) != 5: - raise CredentialsValidateFailedError( - "oci_config_content should be base64.b64encode(" - "'user_ocid/fingerprint/tenancy_ocid/region/compartment_ocid'.encode('utf-8'))" - ) - oci_config["user"] = config_items[0] - oci_config["fingerprint"] = config_items[1] - oci_config["tenancy"] = config_items[2] - oci_config["region"] = config_items[3] - oci_config["compartment_id"] = config_items[4] - else: - raise CredentialsValidateFailedError("need to set oci_config_content in credentials ") - if "oci_key_content" in credentials: - oci_key_content = base64.b64decode(credentials.get("oci_key_content")).decode("utf-8") - oci_config["key_content"] = oci_key_content.encode(encoding="utf-8") - else: - raise CredentialsValidateFailedError("need to set oci_config_content in credentials ") - - # oci_config = oci.config.from_file('~/.oci/config', credentials.get('oci_api_profile')) - compartment_id = oci_config["compartment_id"] - client = oci.generative_ai_inference.GenerativeAiInferenceClient(config=oci_config) - # call embedding model - request_args = copy.deepcopy(request_template) - request_args["compartmentId"] = compartment_id - request_args["servingMode"]["modelId"] = model - - chat_history = [] - system_prompts = [] - # if "meta.llama" in model: - # request_args["chatRequest"]["apiFormat"] = "GENERIC" - request_args["chatRequest"]["maxTokens"] = model_parameters.pop("maxTokens", 600) - request_args["chatRequest"].update(model_parameters) - frequency_penalty = model_parameters.get("frequencyPenalty", 0) - presence_penalty = model_parameters.get("presencePenalty", 0) - if frequency_penalty > 0 and presence_penalty > 0: - raise InvokeBadRequestError("Cannot set both frequency penalty and presence penalty") - - # for msg in prompt_messages: # makes message roles strictly alternating - # content = self._format_message_to_glm_content(msg) - # if history and history[-1]["role"] == content["role"]: - # history[-1]["parts"].extend(content["parts"]) - # else: - # history.append(content) - - # temporary not implement the tool call function - valid_value = self._is_tool_call_supported(model, stream) - if tools is not None and len(tools) > 0: - if not valid_value: - raise InvokeBadRequestError("Does not support function calling") - if model.startswith("cohere"): - # print("run cohere " * 10) - for message in prompt_messages[:-1]: - text = "" - if isinstance(message.content, str): - text = message.content - if isinstance(message, UserPromptMessage): - chat_history.append({"role": "USER", "message": text}) - else: - chat_history.append({"role": "CHATBOT", "message": text}) - if isinstance(message, SystemPromptMessage): - if isinstance(message.content, str): - system_prompts.append(message.content) - args = { - "apiFormat": "COHERE", - "preambleOverride": " ".join(system_prompts), - "message": prompt_messages[-1].content, - "chatHistory": chat_history, - } - request_args["chatRequest"].update(args) - elif model.startswith("meta"): - # print("run meta " * 10) - meta_messages = [] - for message in prompt_messages: - text = message.content - meta_messages.append({"role": message.role.name, "content": [{"type": "TEXT", "text": text}]}) - args = {"apiFormat": "GENERIC", "messages": meta_messages, "numGenerations": 1, "topK": -1} - request_args["chatRequest"].update(args) - - if stream: - request_args["chatRequest"]["isStream"] = True - # print("final request" + "|" * 20) - # print(request_args) - response = client.chat(request_args) - # print(vars(response)) - - if stream: - return self._handle_generate_stream_response(model, credentials, response, prompt_messages) - - return self._handle_generate_response(model, credentials, response, prompt_messages) - - def _handle_generate_response( - self, model: str, credentials: dict, response: BaseChatResponse, prompt_messages: list[PromptMessage] - ) -> LLMResult: - """ - Handle llm response - - :param model: model name - :param credentials: credentials - :param response: response - :param prompt_messages: prompt messages - :return: llm response - """ - # transform assistant message to prompt message - assistant_prompt_message = AssistantPromptMessage(content=response.data.chat_response.text) - - # calculate num tokens - prompt_tokens = self.get_num_characters(model, credentials, prompt_messages) - completion_tokens = self.get_num_characters(model, credentials, [assistant_prompt_message]) - - # transform usage - usage = self._calc_response_usage(model, credentials, prompt_tokens, completion_tokens) - - # transform response - result = LLMResult( - model=model, - prompt_messages=prompt_messages, - message=assistant_prompt_message, - usage=usage, - ) - - return result - - def _handle_generate_stream_response( - self, model: str, credentials: dict, response: BaseChatResponse, prompt_messages: list[PromptMessage] - ) -> Generator: - """ - Handle llm stream response - - :param model: model name - :param credentials: credentials - :param response: response - :param prompt_messages: prompt messages - :return: llm response chunk generator result - """ - index = -1 - events = response.data.events() - for stream in events: - chunk = json.loads(stream.data) - # print(chunk) - # chunk: {'apiFormat': 'COHERE', 'text': 'Hello'} - - # for chunk in response: - # for part in chunk.parts: - # if part.function_call: - # assistant_prompt_message.tool_calls = [ - # AssistantPromptMessage.ToolCall( - # id=part.function_call.name, - # type='function', - # function=AssistantPromptMessage.ToolCall.ToolCallFunction( - # name=part.function_call.name, - # arguments=json.dumps(dict(part.function_call.args.items())) - # ) - # ) - # ] - - if "finishReason" not in chunk: - assistant_prompt_message = AssistantPromptMessage(content="") - if model.startswith("cohere"): - if chunk["text"]: - assistant_prompt_message.content += chunk["text"] - elif model.startswith("meta"): - assistant_prompt_message.content += chunk["message"]["content"][0]["text"] - index += 1 - # transform assistant message to prompt message - yield LLMResultChunk( - model=model, - prompt_messages=prompt_messages, - delta=LLMResultChunkDelta(index=index, message=assistant_prompt_message), - ) - else: - # calculate num tokens - prompt_tokens = self.get_num_characters(model, credentials, prompt_messages) - completion_tokens = self.get_num_characters(model, credentials, [assistant_prompt_message]) - - # transform usage - usage = self._calc_response_usage(model, credentials, prompt_tokens, completion_tokens) - - yield LLMResultChunk( - model=model, - prompt_messages=prompt_messages, - delta=LLMResultChunkDelta( - index=index, - message=assistant_prompt_message, - finish_reason=str(chunk["finishReason"]), - usage=usage, - ), - ) - - def _convert_one_message_to_text(self, message: PromptMessage) -> str: - """ - Convert a single message to a string. - - :param message: PromptMessage to convert. - :return: String representation of the message. - """ - human_prompt = "\n\nuser:" - ai_prompt = "\n\nmodel:" - - content = message.content - if isinstance(content, list): - content = "".join(c.data for c in content if c.type != PromptMessageContentType.IMAGE) - - if isinstance(message, UserPromptMessage): - message_text = f"{human_prompt} {content}" - elif isinstance(message, AssistantPromptMessage): - message_text = f"{ai_prompt} {content}" - elif isinstance(message, SystemPromptMessage | ToolPromptMessage): - message_text = f"{human_prompt} {content}" - else: - raise ValueError(f"Got unknown type {message}") - - return message_text - - @property - def _invoke_error_mapping(self) -> dict[type[InvokeError], list[type[Exception]]]: - """ - Map model invoke error to unified error - The key is the error type thrown to the caller - The value is the error type thrown by the model, - which needs to be converted into a unified error type for the caller. - - :return: Invoke error mapping - """ - return { - InvokeConnectionError: [], - InvokeServerUnavailableError: [], - InvokeRateLimitError: [], - InvokeAuthorizationError: [], - InvokeBadRequestError: [], - } diff --git a/api/core/model_runtime/model_providers/oci/llm/meta.llama-3-70b-instruct.yaml b/api/core/model_runtime/model_providers/oci/llm/meta.llama-3-70b-instruct.yaml deleted file mode 100644 index dd5be107c07570..00000000000000 --- a/api/core/model_runtime/model_providers/oci/llm/meta.llama-3-70b-instruct.yaml +++ /dev/null @@ -1,51 +0,0 @@ -model: meta.llama-3-70b-instruct -label: - zh_Hans: meta.llama-3-70b-instruct - en_US: meta.llama-3-70b-instruct -model_type: llm -features: - - agent-thought -model_properties: - mode: chat - context_size: 131072 -parameter_rules: - - name: temperature - use_template: temperature - default: 1 - max: 2.0 - - name: topP - use_template: top_p - default: 0.75 - min: 0 - max: 1 - - name: topK - label: - zh_Hans: 取样数量 - en_US: Top k - type: int - help: - zh_Hans: 仅从每个后续标记的前 K 个选项中采样。 - en_US: Only sample from the top K options for each subsequent token. - required: false - default: 0 - min: 0 - max: 500 - - name: presencePenalty - use_template: presence_penalty - min: -2 - max: 2 - default: 0 - - name: frequencyPenalty - use_template: frequency_penalty - min: -2 - max: 2 - default: 0 - - name: maxTokens - use_template: max_tokens - default: 600 - max: 8000 -pricing: - input: '0.015' - output: '0.015' - unit: '0.0001' - currency: USD diff --git a/api/core/model_runtime/model_providers/oci/oci.py b/api/core/model_runtime/model_providers/oci/oci.py deleted file mode 100644 index e182d2d0439d77..00000000000000 --- a/api/core/model_runtime/model_providers/oci/oci.py +++ /dev/null @@ -1,28 +0,0 @@ -import logging - -from core.model_runtime.entities.model_entities import ModelType -from core.model_runtime.errors.validate import CredentialsValidateFailedError -from core.model_runtime.model_providers.__base.model_provider import ModelProvider - -logger = logging.getLogger(__name__) - - -class OCIGENAIProvider(ModelProvider): - def validate_provider_credentials(self, credentials: dict) -> None: - """ - Validate provider credentials - - if validate failed, raise exception - - :param credentials: provider credentials, credentials form defined in `provider_credential_schema`. - """ - try: - model_instance = self.get_model_instance(ModelType.LLM) - - # Use `cohere.command-r-plus` model for validate, - model_instance.validate_credentials(model="cohere.command-r-plus", credentials=credentials) - except CredentialsValidateFailedError as ex: - raise ex - except Exception as ex: - logger.exception(f"{self.get_provider_schema().provider} credentials validate failed") - raise ex diff --git a/api/core/model_runtime/model_providers/oci/oci.yaml b/api/core/model_runtime/model_providers/oci/oci.yaml deleted file mode 100644 index f2f23e18f12073..00000000000000 --- a/api/core/model_runtime/model_providers/oci/oci.yaml +++ /dev/null @@ -1,42 +0,0 @@ -provider: oci -label: - en_US: OCIGenerativeAI -description: - en_US: Models provided by OCI, such as Cohere Command R and Cohere Command R+. - zh_Hans: OCI 提供的模型,例如 Cohere Command R 和 Cohere Command R+。 -icon_small: - en_US: icon_s_en.svg -icon_large: - en_US: icon_l_en.svg -background: "#FFFFFF" -help: - title: - en_US: Get your API Key from OCI - zh_Hans: 从 OCI 获取 API Key - url: - en_US: https://docs.cloud.oracle.com/Content/API/Concepts/sdkconfig.htm -supported_model_types: - - llm - - text-embedding - #- rerank -configurate_methods: - - predefined-model - #- customizable-model -provider_credential_schema: - credential_form_schemas: - - variable: oci_config_content - label: - en_US: oci api key config file's content - type: text-input - required: true - placeholder: - zh_Hans: 在此输入您的 oci api key config 文件的内容(base64.b64encode("user_ocid/fingerprint/tenancy_ocid/region/compartment_ocid".encode('utf-8')) ) - en_US: Enter your oci api key config file's content(base64.b64encode("user_ocid/fingerprint/tenancy_ocid/region/compartment_ocid".encode('utf-8')) ) - - variable: oci_key_content - label: - en_US: oci api key file's content - type: text-input - required: true - placeholder: - zh_Hans: 在此输入您的 oci api key 文件的内容(base64.b64encode("pem file content".encode('utf-8'))) - en_US: Enter your oci api key file's content(base64.b64encode("pem file content".encode('utf-8'))) diff --git a/api/core/model_runtime/model_providers/oci/text_embedding/__init__.py b/api/core/model_runtime/model_providers/oci/text_embedding/__init__.py deleted file mode 100644 index e69de29bb2d1d6..00000000000000 diff --git a/api/core/model_runtime/model_providers/oci/text_embedding/_position.yaml b/api/core/model_runtime/model_providers/oci/text_embedding/_position.yaml deleted file mode 100644 index 149f1e3797850f..00000000000000 --- a/api/core/model_runtime/model_providers/oci/text_embedding/_position.yaml +++ /dev/null @@ -1,5 +0,0 @@ -- cohere.embed-english-light-v2.0 -- cohere.embed-english-light-v3.0 -- cohere.embed-english-v3.0 -- cohere.embed-multilingual-light-v3.0 -- cohere.embed-multilingual-v3.0 diff --git a/api/core/model_runtime/model_providers/oci/text_embedding/cohere.embed-english-light-v2.0.yaml b/api/core/model_runtime/model_providers/oci/text_embedding/cohere.embed-english-light-v2.0.yaml deleted file mode 100644 index 259d5b45b7a2f1..00000000000000 --- a/api/core/model_runtime/model_providers/oci/text_embedding/cohere.embed-english-light-v2.0.yaml +++ /dev/null @@ -1,9 +0,0 @@ -model: cohere.embed-english-light-v2.0 -model_type: text-embedding -model_properties: - context_size: 1024 - max_chunks: 48 -pricing: - input: '0.001' - unit: '0.0001' - currency: USD diff --git a/api/core/model_runtime/model_providers/oci/text_embedding/cohere.embed-english-light-v3.0.yaml b/api/core/model_runtime/model_providers/oci/text_embedding/cohere.embed-english-light-v3.0.yaml deleted file mode 100644 index 065e7474c0bb97..00000000000000 --- a/api/core/model_runtime/model_providers/oci/text_embedding/cohere.embed-english-light-v3.0.yaml +++ /dev/null @@ -1,9 +0,0 @@ -model: cohere.embed-english-light-v3.0 -model_type: text-embedding -model_properties: - context_size: 384 - max_chunks: 48 -pricing: - input: '0.001' - unit: '0.0001' - currency: USD diff --git a/api/core/model_runtime/model_providers/oci/text_embedding/cohere.embed-english-v3.0.yaml b/api/core/model_runtime/model_providers/oci/text_embedding/cohere.embed-english-v3.0.yaml deleted file mode 100644 index 3e2deea16a1d0b..00000000000000 --- a/api/core/model_runtime/model_providers/oci/text_embedding/cohere.embed-english-v3.0.yaml +++ /dev/null @@ -1,9 +0,0 @@ -model: cohere.embed-english-v3.0 -model_type: text-embedding -model_properties: - context_size: 1024 - max_chunks: 48 -pricing: - input: '0.001' - unit: '0.0001' - currency: USD diff --git a/api/core/model_runtime/model_providers/oci/text_embedding/cohere.embed-multilingual-light-v3.0.yaml b/api/core/model_runtime/model_providers/oci/text_embedding/cohere.embed-multilingual-light-v3.0.yaml deleted file mode 100644 index 0d2b892c64290e..00000000000000 --- a/api/core/model_runtime/model_providers/oci/text_embedding/cohere.embed-multilingual-light-v3.0.yaml +++ /dev/null @@ -1,9 +0,0 @@ -model: cohere.embed-multilingual-light-v3.0 -model_type: text-embedding -model_properties: - context_size: 384 - max_chunks: 48 -pricing: - input: '0.001' - unit: '0.0001' - currency: USD diff --git a/api/core/model_runtime/model_providers/oci/text_embedding/cohere.embed-multilingual-v3.0.yaml b/api/core/model_runtime/model_providers/oci/text_embedding/cohere.embed-multilingual-v3.0.yaml deleted file mode 100644 index 9ebe260b32875b..00000000000000 --- a/api/core/model_runtime/model_providers/oci/text_embedding/cohere.embed-multilingual-v3.0.yaml +++ /dev/null @@ -1,9 +0,0 @@ -model: cohere.embed-multilingual-v3.0 -model_type: text-embedding -model_properties: - context_size: 1024 - max_chunks: 48 -pricing: - input: '0.001' - unit: '0.0001' - currency: USD diff --git a/api/core/model_runtime/model_providers/ollama/__init__.py b/api/core/model_runtime/model_providers/ollama/__init__.py deleted file mode 100644 index e69de29bb2d1d6..00000000000000 diff --git a/api/core/model_runtime/model_providers/ollama/_assets/icon_l_en.svg b/api/core/model_runtime/model_providers/ollama/_assets/icon_l_en.svg deleted file mode 100644 index 39d8a1ece60a10..00000000000000 --- a/api/core/model_runtime/model_providers/ollama/_assets/icon_l_en.svg +++ /dev/null @@ -1,15 +0,0 @@ - - - - - - - - - - - - - - - diff --git a/api/core/model_runtime/model_providers/ollama/_assets/icon_s_en.svg b/api/core/model_runtime/model_providers/ollama/_assets/icon_s_en.svg deleted file mode 100644 index f8482a96b99cbc..00000000000000 --- a/api/core/model_runtime/model_providers/ollama/_assets/icon_s_en.svg +++ /dev/null @@ -1,15 +0,0 @@ - - - - - - - - - - - - - - - diff --git a/api/core/model_runtime/model_providers/ollama/llm/__init__.py b/api/core/model_runtime/model_providers/ollama/llm/__init__.py deleted file mode 100644 index e69de29bb2d1d6..00000000000000 diff --git a/api/core/model_runtime/model_providers/ollama/llm/llm.py b/api/core/model_runtime/model_providers/ollama/llm/llm.py deleted file mode 100644 index a7ea53e0e99c5f..00000000000000 --- a/api/core/model_runtime/model_providers/ollama/llm/llm.py +++ /dev/null @@ -1,726 +0,0 @@ -import json -import logging -import re -from collections.abc import Generator -from decimal import Decimal -from typing import Optional, Union, cast -from urllib.parse import urljoin - -import requests - -from core.model_runtime.entities.llm_entities import ( - LLMMode, - LLMResult, - LLMResultChunk, - LLMResultChunkDelta, -) -from core.model_runtime.entities.message_entities import ( - AssistantPromptMessage, - ImagePromptMessageContent, - PromptMessage, - PromptMessageContentType, - PromptMessageTool, - SystemPromptMessage, - TextPromptMessageContent, - UserPromptMessage, -) -from core.model_runtime.entities.model_entities import ( - AIModelEntity, - DefaultParameterName, - FetchFrom, - I18nObject, - ModelFeature, - ModelPropertyKey, - ModelType, - ParameterRule, - ParameterType, - PriceConfig, -) -from core.model_runtime.errors.invoke import ( - InvokeAuthorizationError, - InvokeBadRequestError, - InvokeConnectionError, - InvokeError, - InvokeRateLimitError, - InvokeServerUnavailableError, -) -from core.model_runtime.errors.validate import CredentialsValidateFailedError -from core.model_runtime.model_providers.__base.large_language_model import ( - LargeLanguageModel, -) - -logger = logging.getLogger(__name__) - - -class OllamaLargeLanguageModel(LargeLanguageModel): - """ - Model class for Ollama large language model. - """ - - def _invoke( - self, - model: str, - credentials: dict, - prompt_messages: list[PromptMessage], - model_parameters: dict, - tools: Optional[list[PromptMessageTool]] = None, - stop: Optional[list[str]] = None, - stream: bool = True, - user: Optional[str] = None, - ) -> Union[LLMResult, Generator]: - """ - Invoke large language model - - :param model: model name - :param credentials: model credentials - :param prompt_messages: prompt messages - :param model_parameters: model parameters - :param tools: tools for tool calling - :param stop: stop words - :param stream: is stream response - :param user: unique user id - :return: full response or stream response chunk generator result - """ - return self._generate( - model=model, - credentials=credentials, - prompt_messages=prompt_messages, - model_parameters=model_parameters, - stop=stop, - stream=stream, - user=user, - ) - - def get_num_tokens( - self, - model: str, - credentials: dict, - prompt_messages: list[PromptMessage], - tools: Optional[list[PromptMessageTool]] = None, - ) -> int: - """ - Get number of tokens for given prompt messages - - :param model: model name - :param credentials: model credentials - :param prompt_messages: prompt messages - :param tools: tools for tool calling - :return: - """ - # get model mode - model_mode = self.get_model_mode(model, credentials) - - if model_mode == LLMMode.CHAT: - # chat model - return self._num_tokens_from_messages(prompt_messages) - else: - first_prompt_message = prompt_messages[0] - if isinstance(first_prompt_message.content, str): - text = first_prompt_message.content - else: - text = "" - for message_content in first_prompt_message.content: - if message_content.type == PromptMessageContentType.TEXT: - message_content = cast(TextPromptMessageContent, message_content) - text = message_content.data - break - return self._get_num_tokens_by_gpt2(text) - - def validate_credentials(self, model: str, credentials: dict) -> None: - """ - Validate model credentials - - :param model: model name - :param credentials: model credentials - :return: - """ - try: - self._generate( - model=model, - credentials=credentials, - prompt_messages=[UserPromptMessage(content="ping")], - model_parameters={"num_predict": 5}, - stream=False, - ) - except InvokeError as ex: - raise CredentialsValidateFailedError(f"An error occurred during credentials validation: {ex.description}") - except Exception as ex: - raise CredentialsValidateFailedError(f"An error occurred during credentials validation: {str(ex)}") - - def _generate( - self, - model: str, - credentials: dict, - prompt_messages: list[PromptMessage], - model_parameters: dict, - stop: Optional[list[str]] = None, - stream: bool = True, - user: Optional[str] = None, - ) -> Union[LLMResult, Generator]: - """ - Invoke llm completion model - - :param model: model name - :param credentials: credentials - :param prompt_messages: prompt messages - :param model_parameters: model parameters - :param stop: stop words - :param stream: is stream response - :param user: unique user id - :return: full response or stream response chunk generator result - """ - headers = {"Content-Type": "application/json"} - - endpoint_url = credentials["base_url"] - if not endpoint_url.endswith("/"): - endpoint_url += "/" - - # prepare the payload for a simple ping to the model - data = {"model": model, "stream": stream} - - if "format" in model_parameters: - data["format"] = model_parameters["format"] - del model_parameters["format"] - - if "keep_alive" in model_parameters: - data["keep_alive"] = model_parameters["keep_alive"] - del model_parameters["keep_alive"] - - data["options"] = model_parameters or {} - - if stop: - data["options"]["stop"] = stop - - completion_type = LLMMode.value_of(credentials["mode"]) - - if completion_type is LLMMode.CHAT: - endpoint_url = urljoin(endpoint_url, "api/chat") - data["messages"] = [self._convert_prompt_message_to_dict(m) for m in prompt_messages] - else: - endpoint_url = urljoin(endpoint_url, "api/generate") - first_prompt_message = prompt_messages[0] - if isinstance(first_prompt_message, UserPromptMessage): - first_prompt_message = cast(UserPromptMessage, first_prompt_message) - if isinstance(first_prompt_message.content, str): - data["prompt"] = first_prompt_message.content - else: - text = "" - images = [] - for message_content in first_prompt_message.content: - if message_content.type == PromptMessageContentType.TEXT: - message_content = cast(TextPromptMessageContent, message_content) - text = message_content.data - elif message_content.type == PromptMessageContentType.IMAGE: - message_content = cast(ImagePromptMessageContent, message_content) - image_data = re.sub( - r"^data:image\/[a-zA-Z]+;base64,", - "", - message_content.data, - ) - images.append(image_data) - - data["prompt"] = text - data["images"] = images - - # send a post request to validate the credentials - response = requests.post(endpoint_url, headers=headers, json=data, timeout=(10, 300), stream=stream) - - response.encoding = "utf-8" - if response.status_code != 200: - raise InvokeError(f"API request failed with status code {response.status_code}: {response.text}") - - if stream: - return self._handle_generate_stream_response(model, credentials, completion_type, response, prompt_messages) - - return self._handle_generate_response(model, credentials, completion_type, response, prompt_messages) - - def _handle_generate_response( - self, - model: str, - credentials: dict, - completion_type: LLMMode, - response: requests.Response, - prompt_messages: list[PromptMessage], - ) -> LLMResult: - """ - Handle llm completion response - - :param model: model name - :param credentials: model credentials - :param completion_type: completion type - :param response: response - :param prompt_messages: prompt messages - :return: llm result - """ - response_json = response.json() - - if completion_type is LLMMode.CHAT: - message = response_json.get("message", {}) - response_content = message.get("content", "") - else: - response_content = response_json["response"] - - assistant_message = AssistantPromptMessage(content=response_content) - - if "prompt_eval_count" in response_json and "eval_count" in response_json: - # transform usage - prompt_tokens = response_json["prompt_eval_count"] - completion_tokens = response_json["eval_count"] - else: - # calculate num tokens - prompt_tokens = self._get_num_tokens_by_gpt2(prompt_messages[0].content) - completion_tokens = self._get_num_tokens_by_gpt2(assistant_message.content) - - # transform usage - usage = self._calc_response_usage(model, credentials, prompt_tokens, completion_tokens) - - # transform response - result = LLMResult( - model=response_json["model"], - prompt_messages=prompt_messages, - message=assistant_message, - usage=usage, - ) - - return result - - def _handle_generate_stream_response( - self, - model: str, - credentials: dict, - completion_type: LLMMode, - response: requests.Response, - prompt_messages: list[PromptMessage], - ) -> Generator: - """ - Handle llm completion stream response - - :param model: model name - :param credentials: model credentials - :param completion_type: completion type - :param response: response - :param prompt_messages: prompt messages - :return: llm response chunk generator result - """ - full_text = "" - chunk_index = 0 - - def create_final_llm_result_chunk( - index: int, message: AssistantPromptMessage, finish_reason: str - ) -> LLMResultChunk: - # calculate num tokens - prompt_tokens = self._get_num_tokens_by_gpt2(prompt_messages[0].content) - completion_tokens = self._get_num_tokens_by_gpt2(full_text) - - # transform usage - usage = self._calc_response_usage(model, credentials, prompt_tokens, completion_tokens) - - return LLMResultChunk( - model=model, - prompt_messages=prompt_messages, - delta=LLMResultChunkDelta( - index=index, - message=message, - finish_reason=finish_reason, - usage=usage, - ), - ) - - for chunk in response.iter_lines(decode_unicode=True, delimiter="\n"): - if not chunk: - continue - - try: - chunk_json = json.loads(chunk) - # stream ended - except json.JSONDecodeError as e: - yield create_final_llm_result_chunk( - index=chunk_index, - message=AssistantPromptMessage(content=""), - finish_reason="Non-JSON encountered.", - ) - - chunk_index += 1 - break - - if completion_type is LLMMode.CHAT: - if not chunk_json: - continue - - if "message" not in chunk_json: - text = "" - else: - text = chunk_json.get("message").get("content", "") - else: - if not chunk_json: - continue - - # transform assistant message to prompt message - text = chunk_json["response"] - - assistant_prompt_message = AssistantPromptMessage(content=text) - - full_text += text - - if chunk_json["done"]: - # calculate num tokens - if "prompt_eval_count" in chunk_json: - prompt_tokens = chunk_json["prompt_eval_count"] - else: - prompt_message_content = prompt_messages[0].content - if isinstance(prompt_message_content, str): - prompt_tokens = self._get_num_tokens_by_gpt2(prompt_message_content) - else: - content_text = "" - for message_content in prompt_message_content: - if message_content.type == PromptMessageContentType.TEXT: - message_content = cast(TextPromptMessageContent, message_content) - content_text += message_content.data - prompt_tokens = self._get_num_tokens_by_gpt2(content_text) - - completion_tokens = chunk_json.get("eval_count", self._get_num_tokens_by_gpt2(full_text)) - - # transform usage - usage = self._calc_response_usage(model, credentials, prompt_tokens, completion_tokens) - - yield LLMResultChunk( - model=chunk_json["model"], - prompt_messages=prompt_messages, - delta=LLMResultChunkDelta( - index=chunk_index, - message=assistant_prompt_message, - finish_reason="stop", - usage=usage, - ), - ) - else: - yield LLMResultChunk( - model=chunk_json["model"], - prompt_messages=prompt_messages, - delta=LLMResultChunkDelta( - index=chunk_index, - message=assistant_prompt_message, - ), - ) - - chunk_index += 1 - - def _convert_prompt_message_to_dict(self, message: PromptMessage) -> dict: - """ - Convert PromptMessage to dict for Ollama API - """ - if isinstance(message, UserPromptMessage): - message = cast(UserPromptMessage, message) - if isinstance(message.content, str): - message_dict = {"role": "user", "content": message.content} - else: - text = "" - images = [] - for message_content in message.content: - if message_content.type == PromptMessageContentType.TEXT: - message_content = cast(TextPromptMessageContent, message_content) - text = message_content.data - elif message_content.type == PromptMessageContentType.IMAGE: - message_content = cast(ImagePromptMessageContent, message_content) - image_data = re.sub(r"^data:image\/[a-zA-Z]+;base64,", "", message_content.data) - images.append(image_data) - - message_dict = {"role": "user", "content": text, "images": images} - elif isinstance(message, AssistantPromptMessage): - message = cast(AssistantPromptMessage, message) - message_dict = {"role": "assistant", "content": message.content} - elif isinstance(message, SystemPromptMessage): - message = cast(SystemPromptMessage, message) - message_dict = {"role": "system", "content": message.content} - else: - raise ValueError(f"Got unknown type {message}") - - return message_dict - - def _num_tokens_from_messages(self, messages: list[PromptMessage]) -> int: - """ - Calculate num tokens. - - :param messages: messages - """ - num_tokens = 0 - messages_dict = [self._convert_prompt_message_to_dict(m) for m in messages] - for message in messages_dict: - for key, value in message.items(): - num_tokens += self._get_num_tokens_by_gpt2(str(key)) - num_tokens += self._get_num_tokens_by_gpt2(str(value)) - - return num_tokens - - def get_customizable_model_schema(self, model: str, credentials: dict) -> AIModelEntity: - """ - Get customizable model schema. - - :param model: model name - :param credentials: credentials - - :return: model schema - """ - extras = {} - - if "vision_support" in credentials and credentials["vision_support"] == "true": - extras["features"] = [ModelFeature.VISION] - - entity = AIModelEntity( - model=model, - label=I18nObject(zh_Hans=model, en_US=model), - model_type=ModelType.LLM, - fetch_from=FetchFrom.CUSTOMIZABLE_MODEL, - model_properties={ - ModelPropertyKey.MODE: credentials.get("mode"), - ModelPropertyKey.CONTEXT_SIZE: int(credentials.get("context_size", 4096)), - }, - parameter_rules=[ - ParameterRule( - name=DefaultParameterName.TEMPERATURE.value, - use_template=DefaultParameterName.TEMPERATURE.value, - label=I18nObject(en_US="Temperature", zh_Hans="温度"), - type=ParameterType.FLOAT, - help=I18nObject( - en_US="The temperature of the model. " - "Increasing the temperature will make the model answer " - "more creatively. (Default: 0.8)", - zh_Hans="模型的温度。增加温度将使模型的回答更具创造性。(默认值:0.8)", - ), - default=0.1, - min=0, - max=1, - ), - ParameterRule( - name=DefaultParameterName.TOP_P.value, - use_template=DefaultParameterName.TOP_P.value, - label=I18nObject(en_US="Top P", zh_Hans="Top P"), - type=ParameterType.FLOAT, - help=I18nObject( - en_US="Works together with top-k. A higher value (e.g., 0.95) will lead to " - "more diverse text, while a lower value (e.g., 0.5) will generate more " - "focused and conservative text. (Default: 0.9)", - zh_Hans="与top-k一起工作。较高的值(例如,0.95)会导致生成更多样化的文本,而较低的值(例如,0.5)会生成更专注和保守的文本。(默认值:0.9)", - ), - default=0.9, - min=0, - max=1, - ), - ParameterRule( - name="top_k", - label=I18nObject(en_US="Top K", zh_Hans="Top K"), - type=ParameterType.INT, - help=I18nObject( - en_US="Reduces the probability of generating nonsense. " - "A higher value (e.g. 100) will give more diverse answers, " - "while a lower value (e.g. 10) will be more conservative. (Default: 40)", - zh_Hans="减少生成无意义内容的可能性。较高的值(例如100)将提供更多样化的答案,而较低的值(例如10)将更为保守。(默认值:40)", - ), - min=1, - max=100, - ), - ParameterRule( - name="repeat_penalty", - label=I18nObject(en_US="Repeat Penalty"), - type=ParameterType.FLOAT, - help=I18nObject( - en_US="Sets how strongly to penalize repetitions. " - "A higher value (e.g., 1.5) will penalize repetitions more strongly, " - "while a lower value (e.g., 0.9) will be more lenient. (Default: 1.1)", - zh_Hans="设置对重复内容的惩罚强度。一个较高的值(例如,1.5)会更强地惩罚重复内容,而一个较低的值(例如,0.9)则会相对宽容。(默认值:1.1)", - ), - min=-2, - max=2, - ), - ParameterRule( - name="num_predict", - use_template="max_tokens", - label=I18nObject(en_US="Num Predict", zh_Hans="最大令牌数预测"), - type=ParameterType.INT, - help=I18nObject( - en_US="Maximum number of tokens to predict when generating text. " - "(Default: 128, -1 = infinite generation, -2 = fill context)", - zh_Hans="生成文本时预测的最大令牌数。(默认值:128,-1 = 无限生成,-2 = 填充上下文)", - ), - default=(512 if int(credentials.get("max_tokens", 4096)) >= 768 else 128), - min=-2, - max=int(credentials.get("max_tokens", 4096)), - ), - ParameterRule( - name="mirostat", - label=I18nObject(en_US="Mirostat sampling", zh_Hans="Mirostat 采样"), - type=ParameterType.INT, - help=I18nObject( - en_US="Enable Mirostat sampling for controlling perplexity. " - "(default: 0, 0 = disabled, 1 = Mirostat, 2 = Mirostat 2.0)", - zh_Hans="启用 Mirostat 采样以控制困惑度。" - "(默认值:0,0 = 禁用,1 = Mirostat,2 = Mirostat 2.0)", - ), - min=0, - max=2, - ), - ParameterRule( - name="mirostat_eta", - label=I18nObject(en_US="Mirostat Eta", zh_Hans="学习率"), - type=ParameterType.FLOAT, - help=I18nObject( - en_US="Influences how quickly the algorithm responds to feedback from " - "the generated text. A lower learning rate will result in slower adjustments, " - "while a higher learning rate will make the algorithm more responsive. " - "(Default: 0.1)", - zh_Hans="影响算法对生成文本反馈响应的速度。较低的学习率会导致调整速度变慢,而较高的学习率会使得算法更加灵敏。(默认值:0.1)", - ), - precision=1, - ), - ParameterRule( - name="mirostat_tau", - label=I18nObject(en_US="Mirostat Tau", zh_Hans="文本连贯度"), - type=ParameterType.FLOAT, - help=I18nObject( - en_US="Controls the balance between coherence and diversity of the output. " - "A lower value will result in more focused and coherent text. (Default: 5.0)", - zh_Hans="控制输出的连贯性和多样性之间的平衡。较低的值会导致更专注和连贯的文本。(默认值:5.0)", - ), - precision=1, - ), - ParameterRule( - name="num_ctx", - label=I18nObject(en_US="Size of context window", zh_Hans="上下文窗口大小"), - type=ParameterType.INT, - help=I18nObject( - en_US="Sets the size of the context window used to generate the next token. (Default: 2048)", - zh_Hans="设置用于生成下一个标记的上下文窗口大小。(默认值:2048)", - ), - default=2048, - min=1, - ), - ParameterRule( - name="num_gpu", - label=I18nObject(en_US="GPU Layers", zh_Hans="GPU 层数"), - type=ParameterType.INT, - help=I18nObject( - en_US="The number of layers to offload to the GPU(s). " - "On macOS it defaults to 1 to enable metal support, 0 to disable." - "As long as a model fits into one gpu it stays in one. " - "It does not set the number of GPU(s). ", - zh_Hans="加载到 GPU 的层数。在 macOS 上,默认为 1 以启用 Metal 支持,设置为 0 则禁用。" - "只要模型适合一个 GPU,它就保留在其中。它不设置 GPU 的数量。", - ), - min=-1, - default=1, - ), - ParameterRule( - name="num_thread", - label=I18nObject(en_US="Num Thread", zh_Hans="线程数"), - type=ParameterType.INT, - help=I18nObject( - en_US="Sets the number of threads to use during computation. " - "By default, Ollama will detect this for optimal performance. " - "It is recommended to set this value to the number of physical CPU cores " - "your system has (as opposed to the logical number of cores).", - zh_Hans="设置计算过程中使用的线程数。默认情况下,Ollama会检测以获得最佳性能。建议将此值设置为系统拥有的物理CPU核心数(而不是逻辑核心数)。", - ), - min=1, - ), - ParameterRule( - name="repeat_last_n", - label=I18nObject(en_US="Repeat last N", zh_Hans="回溯内容"), - type=ParameterType.INT, - help=I18nObject( - en_US="Sets how far back for the model to look back to prevent repetition. " - "(Default: 64, 0 = disabled, -1 = num_ctx)", - zh_Hans="设置模型回溯多远的内容以防止重复。(默认值:64,0 = 禁用,-1 = num_ctx)", - ), - min=-1, - ), - ParameterRule( - name="tfs_z", - label=I18nObject(en_US="TFS Z", zh_Hans="减少标记影响"), - type=ParameterType.FLOAT, - help=I18nObject( - en_US="Tail free sampling is used to reduce the impact of less probable tokens " - "from the output. A higher value (e.g., 2.0) will reduce the impact more, " - "while a value of 1.0 disables this setting. (default: 1)", - zh_Hans="用于减少输出中不太可能的标记的影响。较高的值(例如,2.0)会更多地减少这种影响,而1.0的值则会禁用此设置。(默认值:1)", - ), - precision=1, - ), - ParameterRule( - name="seed", - label=I18nObject(en_US="Seed", zh_Hans="随机数种子"), - type=ParameterType.INT, - help=I18nObject( - en_US="Sets the random number seed to use for generation. Setting this to " - "a specific number will make the model generate the same text for " - "the same prompt. (Default: 0)", - zh_Hans="设置用于生成的随机数种子。将此设置为特定数字将使模型对相同的提示生成相同的文本。(默认值:0)", - ), - ), - ParameterRule( - name="keep_alive", - label=I18nObject(en_US="Keep Alive", zh_Hans="模型存活时间"), - type=ParameterType.STRING, - help=I18nObject( - en_US="Sets how long the model is kept in memory after generating a response. " - "This must be a duration string with a unit (e.g., '10m' for 10 minutes or '24h' for 24 hours)." - " A negative number keeps the model loaded indefinitely, and '0' unloads the model" - " immediately after generating a response." - " Valid time units are 's','m','h'. (Default: 5m)", - zh_Hans="设置模型在生成响应后在内存中保留的时间。" - "这必须是一个带有单位的持续时间字符串(例如,'10m' 表示10分钟,'24h' 表示24小时)。" - "负数表示无限期地保留模型,'0'表示在生成响应后立即卸载模型。" - "有效的时间单位有 's'(秒)、'm'(分钟)、'h'(小时)。(默认值:5m)", - ), - ), - ParameterRule( - name="format", - label=I18nObject(en_US="Format", zh_Hans="返回格式"), - type=ParameterType.STRING, - help=I18nObject( - en_US="the format to return a response in. Currently the only accepted value is json.", - zh_Hans="返回响应的格式。目前唯一接受的值是json。", - ), - options=["json"], - ), - ], - pricing=PriceConfig( - input=Decimal(credentials.get("input_price", 0)), - output=Decimal(credentials.get("output_price", 0)), - unit=Decimal(credentials.get("unit", 0)), - currency=credentials.get("currency", "USD"), - ), - **extras, - ) - - return entity - - @property - def _invoke_error_mapping(self) -> dict[type[InvokeError], list[type[Exception]]]: - """ - Map model invoke error to unified error - The key is the error type thrown to the caller - The value is the error type thrown by the model, - which needs to be converted into a unified error type for the caller. - - :return: Invoke error mapping - """ - return { - InvokeAuthorizationError: [ - requests.exceptions.InvalidHeader, # Missing or Invalid API Key - ], - InvokeBadRequestError: [ - requests.exceptions.HTTPError, # Invalid Endpoint URL or model name - requests.exceptions.InvalidURL, # Misconfigured request or other API error - ], - InvokeRateLimitError: [ - requests.exceptions.RetryError # Too many requests sent in a short period of time - ], - InvokeServerUnavailableError: [ - requests.exceptions.ConnectionError, # Engine Overloaded - requests.exceptions.HTTPError, # Server Error - ], - InvokeConnectionError: [ - requests.exceptions.ConnectTimeout, # Timeout - requests.exceptions.ReadTimeout, # Timeout - ], - } diff --git a/api/core/model_runtime/model_providers/ollama/ollama.py b/api/core/model_runtime/model_providers/ollama/ollama.py deleted file mode 100644 index 115280193a5ed6..00000000000000 --- a/api/core/model_runtime/model_providers/ollama/ollama.py +++ /dev/null @@ -1,16 +0,0 @@ -import logging - -from core.model_runtime.model_providers.__base.model_provider import ModelProvider - -logger = logging.getLogger(__name__) - - -class OpenAIProvider(ModelProvider): - def validate_provider_credentials(self, credentials: dict) -> None: - """ - Validate provider credentials - if validate failed, raise exception - - :param credentials: provider credentials, credentials form defined in `provider_credential_schema`. - """ - pass diff --git a/api/core/model_runtime/model_providers/ollama/ollama.yaml b/api/core/model_runtime/model_providers/ollama/ollama.yaml deleted file mode 100644 index 33747753bd9f67..00000000000000 --- a/api/core/model_runtime/model_providers/ollama/ollama.yaml +++ /dev/null @@ -1,98 +0,0 @@ -provider: ollama -label: - en_US: Ollama -icon_large: - en_US: icon_l_en.svg -icon_small: - en_US: icon_s_en.svg -background: "#F9FAFB" -help: - title: - en_US: How to integrate with Ollama - zh_Hans: 如何集成 Ollama - url: - en_US: https://docs.dify.ai/tutorials/model-configuration/ollama -supported_model_types: - - llm - - text-embedding -configurate_methods: - - customizable-model -model_credential_schema: - model: - label: - en_US: Model Name - zh_Hans: 模型名称 - placeholder: - en_US: Enter your model name - zh_Hans: 输入模型名称 - credential_form_schemas: - - variable: base_url - label: - zh_Hans: 基础 URL - en_US: Base URL - type: text-input - required: true - placeholder: - zh_Hans: Ollama server 的基础 URL,例如 http://192.168.1.100:11434 - en_US: Base url of Ollama server, e.g. http://192.168.1.100:11434 - - variable: mode - show_on: - - variable: __model_type - value: llm - label: - zh_Hans: 模型类型 - en_US: Completion mode - type: select - required: true - default: chat - placeholder: - zh_Hans: 选择对话类型 - en_US: Select completion mode - options: - - value: completion - label: - en_US: Completion - zh_Hans: 补全 - - value: chat - label: - en_US: Chat - zh_Hans: 对话 - - variable: context_size - label: - zh_Hans: 模型上下文长度 - en_US: Model context size - required: true - type: text-input - default: '4096' - placeholder: - zh_Hans: 在此输入您的模型上下文长度 - en_US: Enter your Model context size - - variable: max_tokens - label: - zh_Hans: 最大 token 上限 - en_US: Upper bound for max tokens - show_on: - - variable: __model_type - value: llm - default: '4096' - type: text-input - required: true - - variable: vision_support - label: - zh_Hans: 是否支持 Vision - en_US: Vision support - show_on: - - variable: __model_type - value: llm - default: 'false' - type: radio - required: false - options: - - value: 'true' - label: - en_US: 'Yes' - zh_Hans: 是 - - value: 'false' - label: - en_US: 'No' - zh_Hans: 否 diff --git a/api/core/model_runtime/model_providers/ollama/text_embedding/__init__.py b/api/core/model_runtime/model_providers/ollama/text_embedding/__init__.py deleted file mode 100644 index e69de29bb2d1d6..00000000000000 diff --git a/api/core/model_runtime/model_providers/openai/__init__.py b/api/core/model_runtime/model_providers/openai/__init__.py deleted file mode 100644 index e69de29bb2d1d6..00000000000000 diff --git a/api/core/model_runtime/model_providers/openai/_assets/icon_l_en.svg b/api/core/model_runtime/model_providers/openai/_assets/icon_l_en.svg deleted file mode 100644 index dae73f58d71908..00000000000000 --- a/api/core/model_runtime/model_providers/openai/_assets/icon_l_en.svg +++ /dev/null @@ -1,11 +0,0 @@ - - - - - - - - - - - diff --git a/api/core/model_runtime/model_providers/openai/_assets/icon_s_en.svg b/api/core/model_runtime/model_providers/openai/_assets/icon_s_en.svg deleted file mode 100644 index 70686f9b3b58aa..00000000000000 --- a/api/core/model_runtime/model_providers/openai/_assets/icon_s_en.svg +++ /dev/null @@ -1,4 +0,0 @@ - - - - diff --git a/api/core/model_runtime/model_providers/openai/_common.py b/api/core/model_runtime/model_providers/openai/_common.py deleted file mode 100644 index 2181bb4f08fd8f..00000000000000 --- a/api/core/model_runtime/model_providers/openai/_common.py +++ /dev/null @@ -1,60 +0,0 @@ -from collections.abc import Mapping - -import openai -from httpx import Timeout - -from core.model_runtime.errors.invoke import ( - InvokeAuthorizationError, - InvokeBadRequestError, - InvokeConnectionError, - InvokeError, - InvokeRateLimitError, - InvokeServerUnavailableError, -) - - -class _CommonOpenAI: - def _to_credential_kwargs(self, credentials: Mapping) -> dict: - """ - Transform credentials to kwargs for model instance - - :param credentials: - :return: - """ - credentials_kwargs = { - "api_key": credentials["openai_api_key"], - "timeout": Timeout(315.0, read=300.0, write=10.0, connect=5.0), - "max_retries": 1, - } - - if credentials.get("openai_api_base"): - openai_api_base = credentials["openai_api_base"].rstrip("/") - credentials_kwargs["base_url"] = openai_api_base + "/v1" - - if "openai_organization" in credentials: - credentials_kwargs["organization"] = credentials["openai_organization"] - - return credentials_kwargs - - @property - def _invoke_error_mapping(self) -> dict[type[InvokeError], list[type[Exception]]]: - """ - Map model invoke error to unified error - The key is the error type thrown to the caller - The value is the error type thrown by the model, - which needs to be converted into a unified error type for the caller. - - :return: Invoke error mapping - """ - return { - InvokeConnectionError: [openai.APIConnectionError, openai.APITimeoutError], - InvokeServerUnavailableError: [openai.InternalServerError], - InvokeRateLimitError: [openai.RateLimitError], - InvokeAuthorizationError: [openai.AuthenticationError, openai.PermissionDeniedError], - InvokeBadRequestError: [ - openai.BadRequestError, - openai.NotFoundError, - openai.UnprocessableEntityError, - openai.APIError, - ], - } diff --git a/api/core/model_runtime/model_providers/openai/llm/__init__.py b/api/core/model_runtime/model_providers/openai/llm/__init__.py deleted file mode 100644 index e69de29bb2d1d6..00000000000000 diff --git a/api/core/model_runtime/model_providers/openai/llm/_position.yaml b/api/core/model_runtime/model_providers/openai/llm/_position.yaml deleted file mode 100644 index 7501bc1164dc43..00000000000000 --- a/api/core/model_runtime/model_providers/openai/llm/_position.yaml +++ /dev/null @@ -1,26 +0,0 @@ -- gpt-4 -- gpt-4o -- gpt-4o-2024-05-13 -- gpt-4o-2024-08-06 -- chatgpt-4o-latest -- gpt-4o-mini -- gpt-4o-mini-2024-07-18 -- o1-preview -- o1-preview-2024-09-12 -- o1-mini -- o1-mini-2024-09-12 -- gpt-4-turbo -- gpt-4-turbo-2024-04-09 -- gpt-4-turbo-preview -- gpt-4-32k -- gpt-4-1106-preview -- gpt-4-0125-preview -- gpt-4-vision-preview -- gpt-3.5-turbo -- gpt-3.5-turbo-16k -- gpt-3.5-turbo-16k-0613 -- gpt-3.5-turbo-0125 -- gpt-3.5-turbo-1106 -- gpt-3.5-turbo-0613 -- gpt-3.5-turbo-instruct -- text-davinci-003 diff --git a/api/core/model_runtime/model_providers/openai/llm/chatgpt-4o-latest.yaml b/api/core/model_runtime/model_providers/openai/llm/chatgpt-4o-latest.yaml deleted file mode 100644 index b47449a49abc2e..00000000000000 --- a/api/core/model_runtime/model_providers/openai/llm/chatgpt-4o-latest.yaml +++ /dev/null @@ -1,44 +0,0 @@ -model: chatgpt-4o-latest -label: - zh_Hans: chatgpt-4o-latest - en_US: chatgpt-4o-latest -model_type: llm -features: - - multi-tool-call - - agent-thought - - stream-tool-call - - vision -model_properties: - mode: chat - context_size: 128000 -parameter_rules: - - name: temperature - use_template: temperature - - name: top_p - use_template: top_p - - name: presence_penalty - use_template: presence_penalty - - name: frequency_penalty - use_template: frequency_penalty - - name: max_tokens - use_template: max_tokens - default: 512 - min: 1 - max: 16384 - - name: response_format - label: - zh_Hans: 回复格式 - en_US: Response Format - type: string - help: - zh_Hans: 指定模型必须输出的格式 - en_US: specifying the format that the model must output - required: false - options: - - text - - json_object -pricing: - input: '2.50' - output: '10.00' - unit: '0.000001' - currency: USD diff --git a/api/core/model_runtime/model_providers/openai/llm/gpt-3.5-turbo-0125.yaml b/api/core/model_runtime/model_providers/openai/llm/gpt-3.5-turbo-0125.yaml deleted file mode 100644 index ffa725ec40f4f5..00000000000000 --- a/api/core/model_runtime/model_providers/openai/llm/gpt-3.5-turbo-0125.yaml +++ /dev/null @@ -1,43 +0,0 @@ -model: gpt-3.5-turbo-0125 -label: - zh_Hans: gpt-3.5-turbo-0125 - en_US: gpt-3.5-turbo-0125 -model_type: llm -features: - - multi-tool-call - - agent-thought - - stream-tool-call -model_properties: - mode: chat - context_size: 16385 -parameter_rules: - - name: temperature - use_template: temperature - - name: top_p - use_template: top_p - - name: presence_penalty - use_template: presence_penalty - - name: frequency_penalty - use_template: frequency_penalty - - name: max_tokens - use_template: max_tokens - default: 512 - min: 1 - max: 4096 - - name: response_format - label: - zh_Hans: 回复格式 - en_US: Response Format - type: string - help: - zh_Hans: 指定模型必须输出的格式 - en_US: specifying the format that the model must output - required: false - options: - - text - - json_object -pricing: - input: '0.0005' - output: '0.0015' - unit: '0.001' - currency: USD diff --git a/api/core/model_runtime/model_providers/openai/llm/gpt-3.5-turbo-0613.yaml b/api/core/model_runtime/model_providers/openai/llm/gpt-3.5-turbo-0613.yaml deleted file mode 100644 index a1ad07d7129568..00000000000000 --- a/api/core/model_runtime/model_providers/openai/llm/gpt-3.5-turbo-0613.yaml +++ /dev/null @@ -1,34 +0,0 @@ -model: gpt-3.5-turbo-0613 -label: - zh_Hans: gpt-3.5-turbo-0613 - en_US: gpt-3.5-turbo-0613 -model_type: llm -features: - - multi-tool-call - - agent-thought - - stream-tool-call -model_properties: - mode: chat - context_size: 4096 -parameter_rules: - - name: temperature - use_template: temperature - - name: top_p - use_template: top_p - - name: presence_penalty - use_template: presence_penalty - - name: frequency_penalty - use_template: frequency_penalty - - name: max_tokens - use_template: max_tokens - default: 512 - min: 1 - max: 4096 - - name: response_format - use_template: response_format -pricing: - input: '0.0015' - output: '0.002' - unit: '0.001' - currency: USD -deprecated: true diff --git a/api/core/model_runtime/model_providers/openai/llm/gpt-3.5-turbo-1106.yaml b/api/core/model_runtime/model_providers/openai/llm/gpt-3.5-turbo-1106.yaml deleted file mode 100644 index 21150fc3a6df61..00000000000000 --- a/api/core/model_runtime/model_providers/openai/llm/gpt-3.5-turbo-1106.yaml +++ /dev/null @@ -1,43 +0,0 @@ -model: gpt-3.5-turbo-1106 -label: - zh_Hans: gpt-3.5-turbo-1106 - en_US: gpt-3.5-turbo-1106 -model_type: llm -features: - - multi-tool-call - - agent-thought - - stream-tool-call -model_properties: - mode: chat - context_size: 16385 -parameter_rules: - - name: temperature - use_template: temperature - - name: top_p - use_template: top_p - - name: presence_penalty - use_template: presence_penalty - - name: frequency_penalty - use_template: frequency_penalty - - name: max_tokens - use_template: max_tokens - default: 512 - min: 1 - max: 4096 - - name: response_format - label: - zh_Hans: 回复格式 - en_US: Response Format - type: string - help: - zh_Hans: 指定模型必须输出的格式 - en_US: specifying the format that the model must output - required: false - options: - - text - - json_object -pricing: - input: '0.001' - output: '0.002' - unit: '0.001' - currency: USD diff --git a/api/core/model_runtime/model_providers/openai/llm/gpt-3.5-turbo-16k-0613.yaml b/api/core/model_runtime/model_providers/openai/llm/gpt-3.5-turbo-16k-0613.yaml deleted file mode 100644 index 4e302792842415..00000000000000 --- a/api/core/model_runtime/model_providers/openai/llm/gpt-3.5-turbo-16k-0613.yaml +++ /dev/null @@ -1,34 +0,0 @@ -model: gpt-3.5-turbo-16k-0613 -label: - zh_Hans: gpt-3.5-turbo-16k-0613 - en_US: gpt-3.5-turbo-16k-0613 -model_type: llm -features: - - multi-tool-call - - agent-thought - - stream-tool-call -model_properties: - mode: chat - context_size: 16385 -parameter_rules: - - name: temperature - use_template: temperature - - name: top_p - use_template: top_p - - name: presence_penalty - use_template: presence_penalty - - name: frequency_penalty - use_template: frequency_penalty - - name: max_tokens - use_template: max_tokens - default: 512 - min: 1 - max: 16385 - - name: response_format - use_template: response_format -pricing: - input: '0.003' - output: '0.004' - unit: '0.001' - currency: USD -deprecated: true diff --git a/api/core/model_runtime/model_providers/openai/llm/gpt-3.5-turbo-16k.yaml b/api/core/model_runtime/model_providers/openai/llm/gpt-3.5-turbo-16k.yaml deleted file mode 100644 index 3684c1945c7d29..00000000000000 --- a/api/core/model_runtime/model_providers/openai/llm/gpt-3.5-turbo-16k.yaml +++ /dev/null @@ -1,33 +0,0 @@ -model: gpt-3.5-turbo-16k -label: - zh_Hans: gpt-3.5-turbo-16k - en_US: gpt-3.5-turbo-16k -model_type: llm -features: - - multi-tool-call - - agent-thought - - stream-tool-call -model_properties: - mode: chat - context_size: 16385 -parameter_rules: - - name: temperature - use_template: temperature - - name: top_p - use_template: top_p - - name: presence_penalty - use_template: presence_penalty - - name: frequency_penalty - use_template: frequency_penalty - - name: max_tokens - use_template: max_tokens - default: 512 - min: 1 - max: 16385 - - name: response_format - use_template: response_format -pricing: - input: '0.003' - output: '0.004' - unit: '0.001' - currency: USD diff --git a/api/core/model_runtime/model_providers/openai/llm/gpt-3.5-turbo-instruct.yaml b/api/core/model_runtime/model_providers/openai/llm/gpt-3.5-turbo-instruct.yaml deleted file mode 100644 index ad831539e09d1a..00000000000000 --- a/api/core/model_runtime/model_providers/openai/llm/gpt-3.5-turbo-instruct.yaml +++ /dev/null @@ -1,30 +0,0 @@ -model: gpt-3.5-turbo-instruct -label: - zh_Hans: gpt-3.5-turbo-instruct - en_US: gpt-3.5-turbo-instruct -model_type: llm -features: [ ] -model_properties: - mode: completion - context_size: 4096 -parameter_rules: - - name: temperature - use_template: temperature - - name: top_p - use_template: top_p - - name: presence_penalty - use_template: presence_penalty - - name: frequency_penalty - use_template: frequency_penalty - - name: max_tokens - use_template: max_tokens - default: 512 - min: 1 - max: 4096 - - name: response_format - use_template: response_format -pricing: - input: '0.0015' - output: '0.002' - unit: '0.001' - currency: USD diff --git a/api/core/model_runtime/model_providers/openai/llm/gpt-3.5-turbo.yaml b/api/core/model_runtime/model_providers/openai/llm/gpt-3.5-turbo.yaml deleted file mode 100644 index d3a8ee535a9d16..00000000000000 --- a/api/core/model_runtime/model_providers/openai/llm/gpt-3.5-turbo.yaml +++ /dev/null @@ -1,43 +0,0 @@ -model: gpt-3.5-turbo -label: - zh_Hans: gpt-3.5-turbo - en_US: gpt-3.5-turbo -model_type: llm -features: - - multi-tool-call - - agent-thought - - stream-tool-call -model_properties: - mode: chat - context_size: 16385 -parameter_rules: - - name: temperature - use_template: temperature - - name: top_p - use_template: top_p - - name: presence_penalty - use_template: presence_penalty - - name: frequency_penalty - use_template: frequency_penalty - - name: max_tokens - use_template: max_tokens - default: 512 - min: 1 - max: 4096 - - name: response_format - label: - zh_Hans: 回复格式 - en_US: Response Format - type: string - help: - zh_Hans: 指定模型必须输出的格式 - en_US: specifying the format that the model must output - required: false - options: - - text - - json_object -pricing: - input: '0.0005' - output: '0.0015' - unit: '0.001' - currency: USD diff --git a/api/core/model_runtime/model_providers/openai/llm/gpt-4-0125-preview.yaml b/api/core/model_runtime/model_providers/openai/llm/gpt-4-0125-preview.yaml deleted file mode 100644 index ac4ec5840bd771..00000000000000 --- a/api/core/model_runtime/model_providers/openai/llm/gpt-4-0125-preview.yaml +++ /dev/null @@ -1,56 +0,0 @@ -model: gpt-4-0125-preview -label: - zh_Hans: gpt-4-0125-preview - en_US: gpt-4-0125-preview -model_type: llm -features: - - multi-tool-call - - agent-thought - - stream-tool-call -model_properties: - mode: chat - context_size: 128000 -parameter_rules: - - name: temperature - use_template: temperature - - name: top_p - use_template: top_p - - name: presence_penalty - use_template: presence_penalty - - name: frequency_penalty - use_template: frequency_penalty - - name: max_tokens - use_template: max_tokens - default: 512 - min: 1 - max: 4096 - - name: seed - label: - zh_Hans: 种子 - en_US: Seed - type: int - help: - zh_Hans: 如果指定,模型将尽最大努力进行确定性采样,使得重复的具有相同种子和参数的请求应该返回相同的结果。不能保证确定性,您应该参考 system_fingerprint - 响应参数来监视变化。 - en_US: If specified, model will make a best effort to sample deterministically, - such that repeated requests with the same seed and parameters should return - the same result. Determinism is not guaranteed, and you should refer to the - system_fingerprint response parameter to monitor changes in the backend. - required: false - - name: response_format - label: - zh_Hans: 回复格式 - en_US: Response Format - type: string - help: - zh_Hans: 指定模型必须输出的格式 - en_US: specifying the format that the model must output - required: false - options: - - text - - json_object -pricing: - input: '0.01' - output: '0.03' - unit: '0.001' - currency: USD diff --git a/api/core/model_runtime/model_providers/openai/llm/gpt-4-1106-preview.yaml b/api/core/model_runtime/model_providers/openai/llm/gpt-4-1106-preview.yaml deleted file mode 100644 index d7752397701f9a..00000000000000 --- a/api/core/model_runtime/model_providers/openai/llm/gpt-4-1106-preview.yaml +++ /dev/null @@ -1,56 +0,0 @@ -model: gpt-4-1106-preview -label: - zh_Hans: gpt-4-1106-preview - en_US: gpt-4-1106-preview -model_type: llm -features: - - multi-tool-call - - agent-thought - - stream-tool-call -model_properties: - mode: chat - context_size: 128000 -parameter_rules: - - name: temperature - use_template: temperature - - name: top_p - use_template: top_p - - name: presence_penalty - use_template: presence_penalty - - name: frequency_penalty - use_template: frequency_penalty - - name: max_tokens - use_template: max_tokens - default: 512 - min: 1 - max: 4096 - - name: seed - label: - zh_Hans: 种子 - en_US: Seed - type: int - help: - zh_Hans: 如果指定,模型将尽最大努力进行确定性采样,使得重复的具有相同种子和参数的请求应该返回相同的结果。不能保证确定性,您应该参考 system_fingerprint - 响应参数来监视变化。 - en_US: If specified, model will make a best effort to sample deterministically, - such that repeated requests with the same seed and parameters should return - the same result. Determinism is not guaranteed, and you should refer to the - system_fingerprint response parameter to monitor changes in the backend. - required: false - - name: response_format - label: - zh_Hans: 回复格式 - en_US: Response Format - type: string - help: - zh_Hans: 指定模型必须输出的格式 - en_US: specifying the format that the model must output - required: false - options: - - text - - json_object -pricing: - input: '0.01' - output: '0.03' - unit: '0.001' - currency: USD diff --git a/api/core/model_runtime/model_providers/openai/llm/gpt-4-32k.yaml b/api/core/model_runtime/model_providers/openai/llm/gpt-4-32k.yaml deleted file mode 100644 index 8358425e6d2909..00000000000000 --- a/api/core/model_runtime/model_providers/openai/llm/gpt-4-32k.yaml +++ /dev/null @@ -1,56 +0,0 @@ -model: gpt-4-32k -label: - zh_Hans: gpt-4-32k - en_US: gpt-4-32k -model_type: llm -features: - - multi-tool-call - - agent-thought - - stream-tool-call -model_properties: - mode: chat - context_size: 32768 -parameter_rules: - - name: temperature - use_template: temperature - - name: top_p - use_template: top_p - - name: presence_penalty - use_template: presence_penalty - - name: frequency_penalty - use_template: frequency_penalty - - name: max_tokens - use_template: max_tokens - default: 512 - min: 1 - max: 32768 - - name: seed - label: - zh_Hans: 种子 - en_US: Seed - type: int - help: - zh_Hans: 如果指定,模型将尽最大努力进行确定性采样,使得重复的具有相同种子和参数的请求应该返回相同的结果。不能保证确定性,您应该参考 system_fingerprint - 响应参数来监视变化。 - en_US: If specified, model will make a best effort to sample deterministically, - such that repeated requests with the same seed and parameters should return - the same result. Determinism is not guaranteed, and you should refer to the - system_fingerprint response parameter to monitor changes in the backend. - required: false - - name: response_format - label: - zh_Hans: 回复格式 - en_US: Response Format - type: string - help: - zh_Hans: 指定模型必须输出的格式 - en_US: specifying the format that the model must output - required: false - options: - - text - - json_object -pricing: - input: '0.06' - output: '0.12' - unit: '0.001' - currency: USD diff --git a/api/core/model_runtime/model_providers/openai/llm/gpt-4-turbo-2024-04-09.yaml b/api/core/model_runtime/model_providers/openai/llm/gpt-4-turbo-2024-04-09.yaml deleted file mode 100644 index 0234499164abf4..00000000000000 --- a/api/core/model_runtime/model_providers/openai/llm/gpt-4-turbo-2024-04-09.yaml +++ /dev/null @@ -1,57 +0,0 @@ -model: gpt-4-turbo-2024-04-09 -label: - zh_Hans: gpt-4-turbo-2024-04-09 - en_US: gpt-4-turbo-2024-04-09 -model_type: llm -features: - - multi-tool-call - - agent-thought - - stream-tool-call - - vision -model_properties: - mode: chat - context_size: 128000 -parameter_rules: - - name: temperature - use_template: temperature - - name: top_p - use_template: top_p - - name: presence_penalty - use_template: presence_penalty - - name: frequency_penalty - use_template: frequency_penalty - - name: max_tokens - use_template: max_tokens - default: 512 - min: 1 - max: 4096 - - name: seed - label: - zh_Hans: 种子 - en_US: Seed - type: int - help: - zh_Hans: 如果指定,模型将尽最大努力进行确定性采样,使得重复的具有相同种子和参数的请求应该返回相同的结果。不能保证确定性,您应该参考 system_fingerprint - 响应参数来监视变化。 - en_US: If specified, model will make a best effort to sample deterministically, - such that repeated requests with the same seed and parameters should return - the same result. Determinism is not guaranteed, and you should refer to the - system_fingerprint response parameter to monitor changes in the backend. - required: false - - name: response_format - label: - zh_Hans: 回复格式 - en_US: Response Format - type: string - help: - zh_Hans: 指定模型必须输出的格式 - en_US: specifying the format that the model must output - required: false - options: - - text - - json_object -pricing: - input: '0.01' - output: '0.03' - unit: '0.001' - currency: USD diff --git a/api/core/model_runtime/model_providers/openai/llm/gpt-4-turbo-preview.yaml b/api/core/model_runtime/model_providers/openai/llm/gpt-4-turbo-preview.yaml deleted file mode 100644 index 8d29cf0c04a1df..00000000000000 --- a/api/core/model_runtime/model_providers/openai/llm/gpt-4-turbo-preview.yaml +++ /dev/null @@ -1,56 +0,0 @@ -model: gpt-4-turbo-preview -label: - zh_Hans: gpt-4-turbo-preview - en_US: gpt-4-turbo-preview -model_type: llm -features: - - multi-tool-call - - agent-thought - - stream-tool-call -model_properties: - mode: chat - context_size: 128000 -parameter_rules: - - name: temperature - use_template: temperature - - name: top_p - use_template: top_p - - name: presence_penalty - use_template: presence_penalty - - name: frequency_penalty - use_template: frequency_penalty - - name: max_tokens - use_template: max_tokens - default: 512 - min: 1 - max: 4096 - - name: seed - label: - zh_Hans: 种子 - en_US: Seed - type: int - help: - zh_Hans: 如果指定,模型将尽最大努力进行确定性采样,使得重复的具有相同种子和参数的请求应该返回相同的结果。不能保证确定性,您应该参考 system_fingerprint - 响应参数来监视变化。 - en_US: If specified, model will make a best effort to sample deterministically, - such that repeated requests with the same seed and parameters should return - the same result. Determinism is not guaranteed, and you should refer to the - system_fingerprint response parameter to monitor changes in the backend. - required: false - - name: response_format - label: - zh_Hans: 回复格式 - en_US: Response Format - type: string - help: - zh_Hans: 指定模型必须输出的格式 - en_US: specifying the format that the model must output - required: false - options: - - text - - json_object -pricing: - input: '0.01' - output: '0.03' - unit: '0.001' - currency: USD diff --git a/api/core/model_runtime/model_providers/openai/llm/gpt-4-turbo.yaml b/api/core/model_runtime/model_providers/openai/llm/gpt-4-turbo.yaml deleted file mode 100644 index b25ff6a81269fa..00000000000000 --- a/api/core/model_runtime/model_providers/openai/llm/gpt-4-turbo.yaml +++ /dev/null @@ -1,57 +0,0 @@ -model: gpt-4-turbo -label: - zh_Hans: gpt-4-turbo - en_US: gpt-4-turbo -model_type: llm -features: - - multi-tool-call - - agent-thought - - stream-tool-call - - vision -model_properties: - mode: chat - context_size: 128000 -parameter_rules: - - name: temperature - use_template: temperature - - name: top_p - use_template: top_p - - name: presence_penalty - use_template: presence_penalty - - name: frequency_penalty - use_template: frequency_penalty - - name: max_tokens - use_template: max_tokens - default: 512 - min: 1 - max: 4096 - - name: seed - label: - zh_Hans: 种子 - en_US: Seed - type: int - help: - zh_Hans: 如果指定,模型将尽最大努力进行确定性采样,使得重复的具有相同种子和参数的请求应该返回相同的结果。不能保证确定性,您应该参考 system_fingerprint - 响应参数来监视变化。 - en_US: If specified, model will make a best effort to sample deterministically, - such that repeated requests with the same seed and parameters should return - the same result. Determinism is not guaranteed, and you should refer to the - system_fingerprint response parameter to monitor changes in the backend. - required: false - - name: response_format - label: - zh_Hans: 回复格式 - en_US: Response Format - type: string - help: - zh_Hans: 指定模型必须输出的格式 - en_US: specifying the format that the model must output - required: false - options: - - text - - json_object -pricing: - input: '0.01' - output: '0.03' - unit: '0.001' - currency: USD diff --git a/api/core/model_runtime/model_providers/openai/llm/gpt-4-vision-preview.yaml b/api/core/model_runtime/model_providers/openai/llm/gpt-4-vision-preview.yaml deleted file mode 100644 index 07037c66438dd2..00000000000000 --- a/api/core/model_runtime/model_providers/openai/llm/gpt-4-vision-preview.yaml +++ /dev/null @@ -1,54 +0,0 @@ -model: gpt-4-vision-preview -label: - zh_Hans: gpt-4-vision-preview - en_US: gpt-4-vision-preview -model_type: llm -features: - - vision -model_properties: - mode: chat - context_size: 128000 -parameter_rules: - - name: temperature - use_template: temperature - - name: top_p - use_template: top_p - - name: presence_penalty - use_template: presence_penalty - - name: frequency_penalty - use_template: frequency_penalty - - name: max_tokens - use_template: max_tokens - default: 512 - min: 1 - max: 4096 - - name: seed - label: - zh_Hans: 种子 - en_US: Seed - type: int - help: - zh_Hans: 如果指定,模型将尽最大努力进行确定性采样,使得重复的具有相同种子和参数的请求应该返回相同的结果。不能保证确定性,您应该参考 system_fingerprint - 响应参数来监视变化。 - en_US: If specified, model will make a best effort to sample deterministically, - such that repeated requests with the same seed and parameters should return - the same result. Determinism is not guaranteed, and you should refer to the - system_fingerprint response parameter to monitor changes in the backend. - required: false - - name: response_format - label: - zh_Hans: 回复格式 - en_US: Response Format - type: string - help: - zh_Hans: 指定模型必须输出的格式 - en_US: specifying the format that the model must output - required: false - options: - - text - - json_object -pricing: - input: '0.01' - output: '0.03' - unit: '0.001' - currency: USD diff --git a/api/core/model_runtime/model_providers/openai/llm/gpt-4.yaml b/api/core/model_runtime/model_providers/openai/llm/gpt-4.yaml deleted file mode 100644 index f7b5138b7df366..00000000000000 --- a/api/core/model_runtime/model_providers/openai/llm/gpt-4.yaml +++ /dev/null @@ -1,56 +0,0 @@ -model: gpt-4 -label: - zh_Hans: gpt-4 - en_US: gpt-4 -model_type: llm -features: - - multi-tool-call - - agent-thought - - stream-tool-call -model_properties: - mode: chat - context_size: 8192 -parameter_rules: - - name: temperature - use_template: temperature - - name: top_p - use_template: top_p - - name: presence_penalty - use_template: presence_penalty - - name: frequency_penalty - use_template: frequency_penalty - - name: max_tokens - use_template: max_tokens - default: 512 - min: 1 - max: 8192 - - name: seed - label: - zh_Hans: 种子 - en_US: Seed - type: int - help: - zh_Hans: 如果指定,模型将尽最大努力进行确定性采样,使得重复的具有相同种子和参数的请求应该返回相同的结果。不能保证确定性,您应该参考 system_fingerprint - 响应参数来监视变化。 - en_US: If specified, model will make a best effort to sample deterministically, - such that repeated requests with the same seed and parameters should return - the same result. Determinism is not guaranteed, and you should refer to the - system_fingerprint response parameter to monitor changes in the backend. - required: false - - name: response_format - label: - zh_Hans: 回复格式 - en_US: Response Format - type: string - help: - zh_Hans: 指定模型必须输出的格式 - en_US: specifying the format that the model must output - required: false - options: - - text - - json_object -pricing: - input: '0.03' - output: '0.06' - unit: '0.001' - currency: USD diff --git a/api/core/model_runtime/model_providers/openai/llm/gpt-4o-2024-05-13.yaml b/api/core/model_runtime/model_providers/openai/llm/gpt-4o-2024-05-13.yaml deleted file mode 100644 index b630d6f63075c2..00000000000000 --- a/api/core/model_runtime/model_providers/openai/llm/gpt-4o-2024-05-13.yaml +++ /dev/null @@ -1,44 +0,0 @@ -model: gpt-4o-2024-05-13 -label: - zh_Hans: gpt-4o-2024-05-13 - en_US: gpt-4o-2024-05-13 -model_type: llm -features: - - multi-tool-call - - agent-thought - - stream-tool-call - - vision -model_properties: - mode: chat - context_size: 128000 -parameter_rules: - - name: temperature - use_template: temperature - - name: top_p - use_template: top_p - - name: presence_penalty - use_template: presence_penalty - - name: frequency_penalty - use_template: frequency_penalty - - name: max_tokens - use_template: max_tokens - default: 512 - min: 1 - max: 4096 - - name: response_format - label: - zh_Hans: 回复格式 - en_US: Response Format - type: string - help: - zh_Hans: 指定模型必须输出的格式 - en_US: specifying the format that the model must output - required: false - options: - - text - - json_object -pricing: - input: '5.00' - output: '15.00' - unit: '0.000001' - currency: USD diff --git a/api/core/model_runtime/model_providers/openai/llm/gpt-4o-2024-08-06.yaml b/api/core/model_runtime/model_providers/openai/llm/gpt-4o-2024-08-06.yaml deleted file mode 100644 index 73b7f6970076c0..00000000000000 --- a/api/core/model_runtime/model_providers/openai/llm/gpt-4o-2024-08-06.yaml +++ /dev/null @@ -1,47 +0,0 @@ -model: gpt-4o-2024-08-06 -label: - zh_Hans: gpt-4o-2024-08-06 - en_US: gpt-4o-2024-08-06 -model_type: llm -features: - - multi-tool-call - - agent-thought - - stream-tool-call - - vision -model_properties: - mode: chat - context_size: 128000 -parameter_rules: - - name: temperature - use_template: temperature - - name: top_p - use_template: top_p - - name: presence_penalty - use_template: presence_penalty - - name: frequency_penalty - use_template: frequency_penalty - - name: max_tokens - use_template: max_tokens - default: 512 - min: 1 - max: 16384 - - name: response_format - label: - zh_Hans: 回复格式 - en_US: Response Format - type: string - help: - zh_Hans: 指定模型必须输出的格式 - en_US: specifying the format that the model must output - required: false - options: - - text - - json_object - - json_schema - - name: json_schema - use_template: json_schema -pricing: - input: '2.50' - output: '10.00' - unit: '0.000001' - currency: USD diff --git a/api/core/model_runtime/model_providers/openai/llm/gpt-4o-mini-2024-07-18.yaml b/api/core/model_runtime/model_providers/openai/llm/gpt-4o-mini-2024-07-18.yaml deleted file mode 100644 index df38270f79b1c3..00000000000000 --- a/api/core/model_runtime/model_providers/openai/llm/gpt-4o-mini-2024-07-18.yaml +++ /dev/null @@ -1,47 +0,0 @@ -model: gpt-4o-mini-2024-07-18 -label: - zh_Hans: gpt-4o-mini-2024-07-18 - en_US: gpt-4o-mini-2024-07-18 -model_type: llm -features: - - multi-tool-call - - agent-thought - - stream-tool-call - - vision -model_properties: - mode: chat - context_size: 128000 -parameter_rules: - - name: temperature - use_template: temperature - - name: top_p - use_template: top_p - - name: presence_penalty - use_template: presence_penalty - - name: frequency_penalty - use_template: frequency_penalty - - name: max_tokens - use_template: max_tokens - default: 512 - min: 1 - max: 16384 - - name: response_format - label: - zh_Hans: 回复格式 - en_US: Response Format - type: string - help: - zh_Hans: 指定模型必须输出的格式 - en_US: specifying the format that the model must output - required: false - options: - - text - - json_object - - json_schema - - name: json_schema - use_template: json_schema -pricing: - input: '0.15' - output: '0.60' - unit: '0.000001' - currency: USD diff --git a/api/core/model_runtime/model_providers/openai/llm/gpt-4o-mini.yaml b/api/core/model_runtime/model_providers/openai/llm/gpt-4o-mini.yaml deleted file mode 100644 index 5e3c94fbe255c0..00000000000000 --- a/api/core/model_runtime/model_providers/openai/llm/gpt-4o-mini.yaml +++ /dev/null @@ -1,47 +0,0 @@ -model: gpt-4o-mini -label: - zh_Hans: gpt-4o-mini - en_US: gpt-4o-mini -model_type: llm -features: - - multi-tool-call - - agent-thought - - stream-tool-call - - vision -model_properties: - mode: chat - context_size: 128000 -parameter_rules: - - name: temperature - use_template: temperature - - name: top_p - use_template: top_p - - name: presence_penalty - use_template: presence_penalty - - name: frequency_penalty - use_template: frequency_penalty - - name: max_tokens - use_template: max_tokens - default: 512 - min: 1 - max: 16384 - - name: response_format - label: - zh_Hans: 回复格式 - en_US: Response Format - type: string - help: - zh_Hans: 指定模型必须输出的格式 - en_US: specifying the format that the model must output - required: false - options: - - text - - json_object - - json_schema - - name: json_schema - use_template: json_schema -pricing: - input: '0.15' - output: '0.60' - unit: '0.000001' - currency: USD diff --git a/api/core/model_runtime/model_providers/openai/llm/gpt-4o.yaml b/api/core/model_runtime/model_providers/openai/llm/gpt-4o.yaml deleted file mode 100644 index 3090a9e090c2c5..00000000000000 --- a/api/core/model_runtime/model_providers/openai/llm/gpt-4o.yaml +++ /dev/null @@ -1,44 +0,0 @@ -model: gpt-4o -label: - zh_Hans: gpt-4o - en_US: gpt-4o -model_type: llm -features: - - multi-tool-call - - agent-thought - - stream-tool-call - - vision -model_properties: - mode: chat - context_size: 128000 -parameter_rules: - - name: temperature - use_template: temperature - - name: top_p - use_template: top_p - - name: presence_penalty - use_template: presence_penalty - - name: frequency_penalty - use_template: frequency_penalty - - name: max_tokens - use_template: max_tokens - default: 512 - min: 1 - max: 4096 - - name: response_format - label: - zh_Hans: 回复格式 - en_US: Response Format - type: string - help: - zh_Hans: 指定模型必须输出的格式 - en_US: specifying the format that the model must output - required: false - options: - - text - - json_object -pricing: - input: '5.00' - output: '15.00' - unit: '0.000001' - currency: USD diff --git a/api/core/model_runtime/model_providers/openai/llm/llm.py b/api/core/model_runtime/model_providers/openai/llm/llm.py deleted file mode 100644 index d42fce528a8f30..00000000000000 --- a/api/core/model_runtime/model_providers/openai/llm/llm.py +++ /dev/null @@ -1,1182 +0,0 @@ -import json -import logging -from collections.abc import Generator -from typing import Optional, Union, cast - -import tiktoken -from openai import OpenAI, Stream -from openai.types import Completion -from openai.types.chat import ChatCompletion, ChatCompletionChunk, ChatCompletionMessageToolCall -from openai.types.chat.chat_completion_chunk import ChoiceDeltaFunctionCall, ChoiceDeltaToolCall -from openai.types.chat.chat_completion_message import FunctionCall - -from core.model_runtime.callbacks.base_callback import Callback -from core.model_runtime.entities.llm_entities import LLMMode, LLMResult, LLMResultChunk, LLMResultChunkDelta -from core.model_runtime.entities.message_entities import ( - AssistantPromptMessage, - ImagePromptMessageContent, - PromptMessage, - PromptMessageContentType, - PromptMessageTool, - SystemPromptMessage, - TextPromptMessageContent, - ToolPromptMessage, - UserPromptMessage, -) -from core.model_runtime.entities.model_entities import AIModelEntity, FetchFrom, I18nObject, ModelType, PriceConfig -from core.model_runtime.errors.validate import CredentialsValidateFailedError -from core.model_runtime.model_providers.__base.large_language_model import LargeLanguageModel -from core.model_runtime.model_providers.openai._common import _CommonOpenAI - -logger = logging.getLogger(__name__) - -OPENAI_BLOCK_MODE_PROMPT = """You should always follow the instructions and output a valid {{block}} object. -The structure of the {{block}} object you can found in the instructions, use {"answer": "$your_answer"} as the default structure -if you are not sure about the structure. - - -{{instructions}} - -""" # noqa: E501 - - -class OpenAILargeLanguageModel(_CommonOpenAI, LargeLanguageModel): - """ - Model class for OpenAI large language model. - """ - - def _invoke( - self, - model: str, - credentials: dict, - prompt_messages: list[PromptMessage], - model_parameters: dict, - tools: Optional[list[PromptMessageTool]] = None, - stop: Optional[list[str]] = None, - stream: bool = True, - user: Optional[str] = None, - ) -> Union[LLMResult, Generator]: - """ - Invoke large language model - - :param model: model name - :param credentials: model credentials - :param prompt_messages: prompt messages - :param model_parameters: model parameters - :param tools: tools for tool calling - :param stop: stop words - :param stream: is stream response - :param user: unique user id - :return: full response or stream response chunk generator result - """ - # handle fine tune remote models - base_model = model - if model.startswith("ft:"): - base_model = model.split(":")[1] - - # get model mode - model_mode = self.get_model_mode(base_model, credentials) - - if model_mode == LLMMode.CHAT: - # chat model - return self._chat_generate( - model=model, - credentials=credentials, - prompt_messages=prompt_messages, - model_parameters=model_parameters, - tools=tools, - stop=stop, - stream=stream, - user=user, - ) - else: - # text completion model - return self._generate( - model=model, - credentials=credentials, - prompt_messages=prompt_messages, - model_parameters=model_parameters, - stop=stop, - stream=stream, - user=user, - ) - - def _code_block_mode_wrapper( - self, - model: str, - credentials: dict, - prompt_messages: list[PromptMessage], - model_parameters: dict, - tools: Optional[list[PromptMessageTool]] = None, - stop: Optional[list[str]] = None, - stream: bool = True, - user: Optional[str] = None, - callbacks: list[Callback] = None, - ) -> Union[LLMResult, Generator]: - """ - Code block mode wrapper for invoking large language model - """ - # handle fine tune remote models - base_model = model - if model.startswith("ft:"): - base_model = model.split(":")[1] - - # get model mode - model_mode = self.get_model_mode(base_model, credentials) - - # transform response format - if "response_format" in model_parameters and model_parameters["response_format"] in {"JSON", "XML"}: - stop = stop or [] - if model_mode == LLMMode.CHAT: - # chat model - self._transform_chat_json_prompts( - model=base_model, - credentials=credentials, - prompt_messages=prompt_messages, - model_parameters=model_parameters, - tools=tools, - stop=stop, - stream=stream, - user=user, - response_format=model_parameters["response_format"], - ) - else: - self._transform_completion_json_prompts( - model=base_model, - credentials=credentials, - prompt_messages=prompt_messages, - model_parameters=model_parameters, - tools=tools, - stop=stop, - stream=stream, - user=user, - response_format=model_parameters["response_format"], - ) - model_parameters.pop("response_format") - - return self._invoke( - model=model, - credentials=credentials, - prompt_messages=prompt_messages, - model_parameters=model_parameters, - tools=tools, - stop=stop, - stream=stream, - user=user, - ) - - def _transform_chat_json_prompts( - self, - model: str, - credentials: dict, - prompt_messages: list[PromptMessage], - model_parameters: dict, - tools: list[PromptMessageTool] | None = None, - stop: list[str] | None = None, - stream: bool = True, - user: str | None = None, - response_format: str = "JSON", - ) -> None: - """ - Transform json prompts - """ - if "```\n" not in stop: - stop.append("```\n") - if "\n```" not in stop: - stop.append("\n```") - - # check if there is a system message - if len(prompt_messages) > 0 and isinstance(prompt_messages[0], SystemPromptMessage): - # override the system message - prompt_messages[0] = SystemPromptMessage( - content=OPENAI_BLOCK_MODE_PROMPT.replace("{{instructions}}", prompt_messages[0].content).replace( - "{{block}}", response_format - ) - ) - prompt_messages.append(AssistantPromptMessage(content=f"\n```{response_format}\n")) - else: - # insert the system message - prompt_messages.insert( - 0, - SystemPromptMessage( - content=OPENAI_BLOCK_MODE_PROMPT.replace( - "{{instructions}}", f"Please output a valid {response_format} object." - ).replace("{{block}}", response_format) - ), - ) - prompt_messages.append(AssistantPromptMessage(content=f"\n```{response_format}")) - - def _transform_completion_json_prompts( - self, - model: str, - credentials: dict, - prompt_messages: list[PromptMessage], - model_parameters: dict, - tools: list[PromptMessageTool] | None = None, - stop: list[str] | None = None, - stream: bool = True, - user: str | None = None, - response_format: str = "JSON", - ) -> None: - """ - Transform json prompts - """ - if "```\n" not in stop: - stop.append("```\n") - if "\n```" not in stop: - stop.append("\n```") - - # override the last user message - user_message = None - for i in range(len(prompt_messages) - 1, -1, -1): - if isinstance(prompt_messages[i], UserPromptMessage): - user_message = prompt_messages[i] - break - - if user_message: - if prompt_messages[i].content[-11:] == "Assistant: ": - # now we are in the chat app, remove the last assistant message - prompt_messages[i].content = prompt_messages[i].content[:-11] - prompt_messages[i] = UserPromptMessage( - content=OPENAI_BLOCK_MODE_PROMPT.replace("{{instructions}}", user_message.content).replace( - "{{block}}", response_format - ) - ) - prompt_messages[i].content += f"Assistant:\n```{response_format}\n" - else: - prompt_messages[i] = UserPromptMessage( - content=OPENAI_BLOCK_MODE_PROMPT.replace("{{instructions}}", user_message.content).replace( - "{{block}}", response_format - ) - ) - prompt_messages[i].content += f"\n```{response_format}\n" - - def get_num_tokens( - self, - model: str, - credentials: dict, - prompt_messages: list[PromptMessage], - tools: Optional[list[PromptMessageTool]] = None, - ) -> int: - """ - Get number of tokens for given prompt messages - - :param model: model name - :param credentials: model credentials - :param prompt_messages: prompt messages - :param tools: tools for tool calling - :return: - """ - # handle fine tune remote models - if model.startswith("ft:"): - base_model = model.split(":")[1] - else: - base_model = model - - # get model mode - model_mode = self.get_model_mode(model) - - if model_mode == LLMMode.CHAT: - # chat model - return self._num_tokens_from_messages(base_model, prompt_messages, tools) - else: - # text completion model, do not support tool calling - return self._num_tokens_from_string(base_model, prompt_messages[0].content) - - def validate_credentials(self, model: str, credentials: dict) -> None: - """ - Validate model credentials - - :param model: model name - :param credentials: model credentials - :return: - """ - try: - # transform credentials to kwargs for model instance - credentials_kwargs = self._to_credential_kwargs(credentials) - client = OpenAI(**credentials_kwargs) - - # handle fine tune remote models - base_model = model - # fine-tuned model name likes ft:gpt-3.5-turbo-0613:personal::xxxxx - if model.startswith("ft:"): - base_model = model.split(":")[1] - - # check if model exists - remote_models = self.remote_models(credentials) - remote_model_map = {model.model: model for model in remote_models} - if model not in remote_model_map: - raise CredentialsValidateFailedError(f"Fine-tuned model {model} not found") - - # get model mode - model_mode = self.get_model_mode(base_model, credentials) - - if model_mode == LLMMode.CHAT: - # chat model - client.chat.completions.create( - messages=[{"role": "user", "content": "ping"}], - model=model, - temperature=0, - max_tokens=20, - stream=False, - ) - else: - # text completion model - client.completions.create( - prompt="ping", - model=model, - temperature=0, - max_tokens=20, - stream=False, - ) - except Exception as ex: - raise CredentialsValidateFailedError(str(ex)) - - def remote_models(self, credentials: dict) -> list[AIModelEntity]: - """ - Return remote models if credentials are provided. - - :param credentials: provider credentials - :return: - """ - # get predefined models - predefined_models = self.predefined_models() - predefined_models_map = {model.model: model for model in predefined_models} - - # transform credentials to kwargs for model instance - credentials_kwargs = self._to_credential_kwargs(credentials) - client = OpenAI(**credentials_kwargs) - - # get all remote models - remote_models = client.models.list() - - fine_tune_models = [model for model in remote_models if model.id.startswith("ft:")] - - ai_model_entities = [] - for model in fine_tune_models: - base_model = model.id.split(":")[1] - - base_model_schema = None - for predefined_model_name, predefined_model in predefined_models_map.items(): - if predefined_model_name in base_model: - base_model_schema = predefined_model - - if not base_model_schema: - continue - - ai_model_entity = AIModelEntity( - model=model.id, - label=I18nObject(zh_Hans=model.id, en_US=model.id), - model_type=ModelType.LLM, - features=base_model_schema.features, - fetch_from=FetchFrom.CUSTOMIZABLE_MODEL, - model_properties=base_model_schema.model_properties, - parameter_rules=base_model_schema.parameter_rules, - pricing=PriceConfig(input=0.003, output=0.006, unit=0.001, currency="USD"), - ) - - ai_model_entities.append(ai_model_entity) - - return ai_model_entities - - def _generate( - self, - model: str, - credentials: dict, - prompt_messages: list[PromptMessage], - model_parameters: dict, - stop: Optional[list[str]] = None, - stream: bool = True, - user: Optional[str] = None, - ) -> Union[LLMResult, Generator]: - """ - Invoke llm completion model - - :param model: model name - :param credentials: credentials - :param prompt_messages: prompt messages - :param model_parameters: model parameters - :param stop: stop words - :param stream: is stream response - :param user: unique user id - :return: full response or stream response chunk generator result - """ - # transform credentials to kwargs for model instance - credentials_kwargs = self._to_credential_kwargs(credentials) - - # init model client - client = OpenAI(**credentials_kwargs) - - extra_model_kwargs = {} - - if stop: - extra_model_kwargs["stop"] = stop - - if user: - extra_model_kwargs["user"] = user - - if stream: - extra_model_kwargs["stream_options"] = {"include_usage": True} - - # text completion model - response = client.completions.create( - prompt=prompt_messages[0].content, model=model, stream=stream, **model_parameters, **extra_model_kwargs - ) - - if stream: - return self._handle_generate_stream_response(model, credentials, response, prompt_messages) - - return self._handle_generate_response(model, credentials, response, prompt_messages) - - def _handle_generate_response( - self, model: str, credentials: dict, response: Completion, prompt_messages: list[PromptMessage] - ) -> LLMResult: - """ - Handle llm completion response - - :param model: model name - :param credentials: model credentials - :param response: response - :param prompt_messages: prompt messages - :return: llm result - """ - assistant_text = response.choices[0].text - - # transform assistant message to prompt message - assistant_prompt_message = AssistantPromptMessage(content=assistant_text) - - # calculate num tokens - if response.usage: - # transform usage - prompt_tokens = response.usage.prompt_tokens - completion_tokens = response.usage.completion_tokens - else: - # calculate num tokens - prompt_tokens = self._num_tokens_from_string(model, prompt_messages[0].content) - completion_tokens = self._num_tokens_from_string(model, assistant_text) - - # transform usage - usage = self._calc_response_usage(model, credentials, prompt_tokens, completion_tokens) - - # transform response - result = LLMResult( - model=response.model, - prompt_messages=prompt_messages, - message=assistant_prompt_message, - usage=usage, - system_fingerprint=response.system_fingerprint, - ) - - return result - - def _handle_generate_stream_response( - self, model: str, credentials: dict, response: Stream[Completion], prompt_messages: list[PromptMessage] - ) -> Generator: - """ - Handle llm completion stream response - - :param model: model name - :param credentials: model credentials - :param response: response - :param prompt_messages: prompt messages - :return: llm response chunk generator result - """ - full_text = "" - prompt_tokens = 0 - completion_tokens = 0 - - final_chunk = LLMResultChunk( - model=model, - prompt_messages=prompt_messages, - delta=LLMResultChunkDelta( - index=0, - message=AssistantPromptMessage(content=""), - ), - ) - - for chunk in response: - if len(chunk.choices) == 0: - if chunk.usage: - # calculate num tokens - prompt_tokens = chunk.usage.prompt_tokens - completion_tokens = chunk.usage.completion_tokens - continue - - delta = chunk.choices[0] - - if delta.finish_reason is None and (delta.text is None or delta.text == ""): - continue - - # transform assistant message to prompt message - text = delta.text or "" - assistant_prompt_message = AssistantPromptMessage(content=text) - - full_text += text - - if delta.finish_reason is not None: - final_chunk = LLMResultChunk( - model=chunk.model, - prompt_messages=prompt_messages, - system_fingerprint=chunk.system_fingerprint, - delta=LLMResultChunkDelta( - index=delta.index, - message=assistant_prompt_message, - finish_reason=delta.finish_reason, - ), - ) - else: - yield LLMResultChunk( - model=chunk.model, - prompt_messages=prompt_messages, - system_fingerprint=chunk.system_fingerprint, - delta=LLMResultChunkDelta( - index=delta.index, - message=assistant_prompt_message, - ), - ) - - if not prompt_tokens: - prompt_tokens = self._num_tokens_from_string(model, prompt_messages[0].content) - - if not completion_tokens: - completion_tokens = self._num_tokens_from_string(model, full_text) - - # transform usage - usage = self._calc_response_usage(model, credentials, prompt_tokens, completion_tokens) - - final_chunk.delta.usage = usage - - yield final_chunk - - def _chat_generate( - self, - model: str, - credentials: dict, - prompt_messages: list[PromptMessage], - model_parameters: dict, - tools: Optional[list[PromptMessageTool]] = None, - stop: Optional[list[str]] = None, - stream: bool = True, - user: Optional[str] = None, - ) -> Union[LLMResult, Generator]: - """ - Invoke llm chat model - - :param model: model name - :param credentials: credentials - :param prompt_messages: prompt messages - :param model_parameters: model parameters - :param tools: tools for tool calling - :param stop: stop words - :param stream: is stream response - :param user: unique user id - :return: full response or stream response chunk generator result - """ - # transform credentials to kwargs for model instance - credentials_kwargs = self._to_credential_kwargs(credentials) - - # init model client - client = OpenAI(**credentials_kwargs) - - response_format = model_parameters.get("response_format") - if response_format: - if response_format == "json_schema": - json_schema = model_parameters.get("json_schema") - if not json_schema: - raise ValueError("Must define JSON Schema when the response format is json_schema") - try: - schema = json.loads(json_schema) - except: - raise ValueError(f"not correct json_schema format: {json_schema}") - model_parameters.pop("json_schema") - model_parameters["response_format"] = {"type": "json_schema", "json_schema": schema} - else: - model_parameters["response_format"] = {"type": response_format} - - extra_model_kwargs = {} - - if tools: - # extra_model_kwargs['tools'] = [helper.dump_model(PromptMessageFunction(function=tool)) for tool in tools] - extra_model_kwargs["functions"] = [ - {"name": tool.name, "description": tool.description, "parameters": tool.parameters} for tool in tools - ] - - if stop: - extra_model_kwargs["stop"] = stop - - if user: - extra_model_kwargs["user"] = user - - if stream: - extra_model_kwargs["stream_options"] = {"include_usage": True} - - # clear illegal prompt messages - prompt_messages = self._clear_illegal_prompt_messages(model, prompt_messages) - - block_as_stream = False - if model.startswith("o1"): - if stream: - block_as_stream = True - stream = False - - if "stream_options" in extra_model_kwargs: - del extra_model_kwargs["stream_options"] - - if "stop" in extra_model_kwargs: - del extra_model_kwargs["stop"] - - # chat model - response = client.chat.completions.create( - messages=[self._convert_prompt_message_to_dict(m) for m in prompt_messages], - model=model, - stream=stream, - **model_parameters, - **extra_model_kwargs, - ) - - if stream: - return self._handle_chat_generate_stream_response(model, credentials, response, prompt_messages, tools) - - block_result = self._handle_chat_generate_response(model, credentials, response, prompt_messages, tools) - - if block_as_stream: - return self._handle_chat_block_as_stream_response(block_result, prompt_messages, stop) - - return block_result - - def _handle_chat_block_as_stream_response( - self, - block_result: LLMResult, - prompt_messages: list[PromptMessage], - stop: Optional[list[str]] = None, - ) -> Generator[LLMResultChunk, None, None]: - """ - Handle llm chat response - - :param model: model name - :param credentials: credentials - :param response: response - :param prompt_messages: prompt messages - :param tools: tools for tool calling - :param stop: stop words - :return: llm response chunk generator - """ - text = block_result.message.content - text = cast(str, text) - - if stop: - text = self.enforce_stop_tokens(text, stop) - - yield LLMResultChunk( - model=block_result.model, - prompt_messages=prompt_messages, - system_fingerprint=block_result.system_fingerprint, - delta=LLMResultChunkDelta( - index=0, - message=AssistantPromptMessage(content=text), - finish_reason="stop", - usage=block_result.usage, - ), - ) - - def _handle_chat_generate_response( - self, - model: str, - credentials: dict, - response: ChatCompletion, - prompt_messages: list[PromptMessage], - tools: Optional[list[PromptMessageTool]] = None, - ) -> LLMResult: - """ - Handle llm chat response - - :param model: model name - :param credentials: credentials - :param response: response - :param prompt_messages: prompt messages - :param tools: tools for tool calling - :return: llm response - """ - assistant_message = response.choices[0].message - # assistant_message_tool_calls = assistant_message.tool_calls - assistant_message_function_call = assistant_message.function_call - - # extract tool calls from response - # tool_calls = self._extract_response_tool_calls(assistant_message_tool_calls) - function_call = self._extract_response_function_call(assistant_message_function_call) - tool_calls = [function_call] if function_call else [] - - # transform assistant message to prompt message - assistant_prompt_message = AssistantPromptMessage(content=assistant_message.content, tool_calls=tool_calls) - - # calculate num tokens - if response.usage: - # transform usage - prompt_tokens = response.usage.prompt_tokens - completion_tokens = response.usage.completion_tokens - else: - # calculate num tokens - prompt_tokens = self._num_tokens_from_messages(model, prompt_messages, tools) - completion_tokens = self._num_tokens_from_messages(model, [assistant_prompt_message]) - - # transform usage - usage = self._calc_response_usage(model, credentials, prompt_tokens, completion_tokens) - - # transform response - response = LLMResult( - model=response.model, - prompt_messages=prompt_messages, - message=assistant_prompt_message, - usage=usage, - system_fingerprint=response.system_fingerprint, - ) - - return response - - def _handle_chat_generate_stream_response( - self, - model: str, - credentials: dict, - response: Stream[ChatCompletionChunk], - prompt_messages: list[PromptMessage], - tools: Optional[list[PromptMessageTool]] = None, - ) -> Generator: - """ - Handle llm chat stream response - - :param model: model name - :param response: response - :param prompt_messages: prompt messages - :param tools: tools for tool calling - :return: llm response chunk generator - """ - full_assistant_content = "" - delta_assistant_message_function_call_storage: ChoiceDeltaFunctionCall = None - prompt_tokens = 0 - completion_tokens = 0 - final_tool_calls = [] - final_chunk = LLMResultChunk( - model=model, - prompt_messages=prompt_messages, - delta=LLMResultChunkDelta( - index=0, - message=AssistantPromptMessage(content=""), - ), - ) - - for chunk in response: - if len(chunk.choices) == 0: - if chunk.usage: - # calculate num tokens - prompt_tokens = chunk.usage.prompt_tokens - completion_tokens = chunk.usage.completion_tokens - continue - - delta = chunk.choices[0] - has_finish_reason = delta.finish_reason is not None - - if ( - not has_finish_reason - and (delta.delta.content is None or delta.delta.content == "") - and delta.delta.function_call is None - ): - continue - - # assistant_message_tool_calls = delta.delta.tool_calls - assistant_message_function_call = delta.delta.function_call - - # extract tool calls from response - if delta_assistant_message_function_call_storage is not None: - # handle process of stream function call - if assistant_message_function_call: - # message has not ended ever - delta_assistant_message_function_call_storage.arguments += assistant_message_function_call.arguments - continue - else: - # message has ended - assistant_message_function_call = delta_assistant_message_function_call_storage - delta_assistant_message_function_call_storage = None - else: - if assistant_message_function_call: - # start of stream function call - delta_assistant_message_function_call_storage = assistant_message_function_call - if delta_assistant_message_function_call_storage.arguments is None: - delta_assistant_message_function_call_storage.arguments = "" - if not has_finish_reason: - continue - - # tool_calls = self._extract_response_tool_calls(assistant_message_tool_calls) - function_call = self._extract_response_function_call(assistant_message_function_call) - tool_calls = [function_call] if function_call else [] - if tool_calls: - final_tool_calls.extend(tool_calls) - - # transform assistant message to prompt message - assistant_prompt_message = AssistantPromptMessage(content=delta.delta.content or "", tool_calls=tool_calls) - - full_assistant_content += delta.delta.content or "" - - if has_finish_reason: - final_chunk = LLMResultChunk( - model=chunk.model, - prompt_messages=prompt_messages, - system_fingerprint=chunk.system_fingerprint, - delta=LLMResultChunkDelta( - index=delta.index, - message=assistant_prompt_message, - finish_reason=delta.finish_reason, - ), - ) - else: - yield LLMResultChunk( - model=chunk.model, - prompt_messages=prompt_messages, - system_fingerprint=chunk.system_fingerprint, - delta=LLMResultChunkDelta( - index=delta.index, - message=assistant_prompt_message, - ), - ) - - if not prompt_tokens: - prompt_tokens = self._num_tokens_from_messages(model, prompt_messages, tools) - - if not completion_tokens: - full_assistant_prompt_message = AssistantPromptMessage( - content=full_assistant_content, tool_calls=final_tool_calls - ) - completion_tokens = self._num_tokens_from_messages(model, [full_assistant_prompt_message]) - - # transform usage - usage = self._calc_response_usage(model, credentials, prompt_tokens, completion_tokens) - final_chunk.delta.usage = usage - - yield final_chunk - - def _extract_response_tool_calls( - self, response_tool_calls: list[ChatCompletionMessageToolCall | ChoiceDeltaToolCall] - ) -> list[AssistantPromptMessage.ToolCall]: - """ - Extract tool calls from response - - :param response_tool_calls: response tool calls - :return: list of tool calls - """ - tool_calls = [] - if response_tool_calls: - for response_tool_call in response_tool_calls: - function = AssistantPromptMessage.ToolCall.ToolCallFunction( - name=response_tool_call.function.name, arguments=response_tool_call.function.arguments - ) - - tool_call = AssistantPromptMessage.ToolCall( - id=response_tool_call.id, type=response_tool_call.type, function=function - ) - tool_calls.append(tool_call) - - return tool_calls - - def _extract_response_function_call( - self, response_function_call: FunctionCall | ChoiceDeltaFunctionCall - ) -> AssistantPromptMessage.ToolCall: - """ - Extract function call from response - - :param response_function_call: response function call - :return: tool call - """ - tool_call = None - if response_function_call: - function = AssistantPromptMessage.ToolCall.ToolCallFunction( - name=response_function_call.name, arguments=response_function_call.arguments - ) - - tool_call = AssistantPromptMessage.ToolCall( - id=response_function_call.name, type="function", function=function - ) - - return tool_call - - def _clear_illegal_prompt_messages(self, model: str, prompt_messages: list[PromptMessage]) -> list[PromptMessage]: - """ - Clear illegal prompt messages for OpenAI API - - :param model: model name - :param prompt_messages: prompt messages - :return: cleaned prompt messages - """ - checklist = ["gpt-4-turbo", "gpt-4-turbo-2024-04-09"] - - if model in checklist: - # count how many user messages are there - user_message_count = len([m for m in prompt_messages if isinstance(m, UserPromptMessage)]) - if user_message_count > 1: - for prompt_message in prompt_messages: - if isinstance(prompt_message, UserPromptMessage): - if isinstance(prompt_message.content, list): - prompt_message.content = "\n".join( - [ - item.data - if item.type == PromptMessageContentType.TEXT - else "[IMAGE]" - if item.type == PromptMessageContentType.IMAGE - else "" - for item in prompt_message.content - ] - ) - - if model.startswith("o1"): - system_message_count = len([m for m in prompt_messages if isinstance(m, SystemPromptMessage)]) - if system_message_count > 0: - new_prompt_messages = [] - for prompt_message in prompt_messages: - if isinstance(prompt_message, SystemPromptMessage): - prompt_message = UserPromptMessage( - content=prompt_message.content, - name=prompt_message.name, - ) - - new_prompt_messages.append(prompt_message) - prompt_messages = new_prompt_messages - - return prompt_messages - - def _convert_prompt_message_to_dict(self, message: PromptMessage) -> dict: - """ - Convert PromptMessage to dict for OpenAI API - """ - if isinstance(message, UserPromptMessage): - message = cast(UserPromptMessage, message) - if isinstance(message.content, str): - message_dict = {"role": "user", "content": message.content} - else: - sub_messages = [] - for message_content in message.content: - if message_content.type == PromptMessageContentType.TEXT: - message_content = cast(TextPromptMessageContent, message_content) - sub_message_dict = {"type": "text", "text": message_content.data} - sub_messages.append(sub_message_dict) - elif message_content.type == PromptMessageContentType.IMAGE: - message_content = cast(ImagePromptMessageContent, message_content) - sub_message_dict = { - "type": "image_url", - "image_url": {"url": message_content.data, "detail": message_content.detail.value}, - } - sub_messages.append(sub_message_dict) - - message_dict = {"role": "user", "content": sub_messages} - elif isinstance(message, AssistantPromptMessage): - message = cast(AssistantPromptMessage, message) - message_dict = {"role": "assistant", "content": message.content} - if message.tool_calls: - # message_dict["tool_calls"] = [tool_call.dict() for tool_call in - # message.tool_calls] - function_call = message.tool_calls[0] - message_dict["function_call"] = { - "name": function_call.function.name, - "arguments": function_call.function.arguments, - } - elif isinstance(message, SystemPromptMessage): - message = cast(SystemPromptMessage, message) - message_dict = {"role": "system", "content": message.content} - elif isinstance(message, ToolPromptMessage): - message = cast(ToolPromptMessage, message) - # message_dict = { - # "role": "tool", - # "content": message.content, - # "tool_call_id": message.tool_call_id - # } - message_dict = {"role": "function", "content": message.content, "name": message.tool_call_id} - else: - raise ValueError(f"Got unknown type {message}") - - if message.name: - message_dict["name"] = message.name - - return message_dict - - def _num_tokens_from_string(self, model: str, text: str, tools: Optional[list[PromptMessageTool]] = None) -> int: - """ - Calculate num tokens for text completion model with tiktoken package. - - :param model: model name - :param text: prompt text - :param tools: tools for tool calling - :return: number of tokens - """ - try: - encoding = tiktoken.encoding_for_model(model) - except KeyError: - encoding = tiktoken.get_encoding("cl100k_base") - - num_tokens = len(encoding.encode(text)) - - if tools: - num_tokens += self._num_tokens_for_tools(encoding, tools) - - return num_tokens - - def _num_tokens_from_messages( - self, model: str, messages: list[PromptMessage], tools: Optional[list[PromptMessageTool]] = None - ) -> int: - """Calculate num tokens for gpt-3.5-turbo and gpt-4 with tiktoken package. - - Official documentation: https://github.com/openai/openai-cookbook/blob/main/examples/How_to_format_inputs_to_ChatGPT_models.ipynb""" - if model.startswith("ft:"): - model = model.split(":")[1] - - # Currently, we can use gpt4o to calculate chatgpt-4o-latest's token. - if model == "chatgpt-4o-latest" or model.startswith("o1"): - model = "gpt-4o" - - try: - encoding = tiktoken.encoding_for_model(model) - except KeyError: - logger.warning("Warning: model not found. Using cl100k_base encoding.") - model = "cl100k_base" - encoding = tiktoken.get_encoding(model) - - if model.startswith("gpt-3.5-turbo-0301"): - # every message follows {role/name}\n{content}\n - tokens_per_message = 4 - # if there's a name, the role is omitted - tokens_per_name = -1 - elif model.startswith("gpt-3.5-turbo") or model.startswith("gpt-4") or model.startswith("o1"): - tokens_per_message = 3 - tokens_per_name = 1 - else: - raise NotImplementedError( - f"get_num_tokens_from_messages() is not presently implemented " - f"for model {model}." - "See https://platform.openai.com/docs/advanced-usage/managing-tokens for " - "information on how messages are converted to tokens." - ) - num_tokens = 0 - messages_dict = [self._convert_prompt_message_to_dict(m) for m in messages] - for message in messages_dict: - num_tokens += tokens_per_message - for key, value in message.items(): - # Cast str(value) in case the message value is not a string - # This occurs with function messages - # TODO: The current token calculation method for the image type is not implemented, - # which need to download the image and then get the resolution for calculation, - # and will increase the request delay - if isinstance(value, list): - text = "" - for item in value: - if isinstance(item, dict) and item["type"] == "text": - text += item["text"] - - value = text - - if key == "tool_calls": - for tool_call in value: - for t_key, t_value in tool_call.items(): - num_tokens += len(encoding.encode(t_key)) - if t_key == "function": - for f_key, f_value in t_value.items(): - num_tokens += len(encoding.encode(f_key)) - num_tokens += len(encoding.encode(f_value)) - else: - num_tokens += len(encoding.encode(t_key)) - num_tokens += len(encoding.encode(t_value)) - else: - num_tokens += len(encoding.encode(str(value))) - - if key == "name": - num_tokens += tokens_per_name - - # every reply is primed with assistant - num_tokens += 3 - - if tools: - num_tokens += self._num_tokens_for_tools(encoding, tools) - - return num_tokens - - def _num_tokens_for_tools(self, encoding: tiktoken.Encoding, tools: list[PromptMessageTool]) -> int: - """ - Calculate num tokens for tool calling with tiktoken package. - - :param encoding: encoding - :param tools: tools for tool calling - :return: number of tokens - """ - num_tokens = 0 - for tool in tools: - num_tokens += len(encoding.encode("type")) - num_tokens += len(encoding.encode("function")) - - # calculate num tokens for function object - num_tokens += len(encoding.encode("name")) - num_tokens += len(encoding.encode(tool.name)) - num_tokens += len(encoding.encode("description")) - num_tokens += len(encoding.encode(tool.description)) - parameters = tool.parameters - num_tokens += len(encoding.encode("parameters")) - if "title" in parameters: - num_tokens += len(encoding.encode("title")) - num_tokens += len(encoding.encode(parameters.get("title"))) - num_tokens += len(encoding.encode("type")) - num_tokens += len(encoding.encode(parameters.get("type"))) - if "properties" in parameters: - num_tokens += len(encoding.encode("properties")) - for key, value in parameters.get("properties").items(): - num_tokens += len(encoding.encode(key)) - for field_key, field_value in value.items(): - num_tokens += len(encoding.encode(field_key)) - if field_key == "enum": - for enum_field in field_value: - num_tokens += 3 - num_tokens += len(encoding.encode(enum_field)) - else: - num_tokens += len(encoding.encode(field_key)) - num_tokens += len(encoding.encode(str(field_value))) - if "required" in parameters: - num_tokens += len(encoding.encode("required")) - for required_field in parameters["required"]: - num_tokens += 3 - num_tokens += len(encoding.encode(required_field)) - - return num_tokens - - def get_customizable_model_schema(self, model: str, credentials: dict) -> AIModelEntity: - """ - OpenAI supports fine-tuning of their models. This method returns the schema of the base model - but renamed to the fine-tuned model name. - - :param model: model name - :param credentials: credentials - - :return: model schema - """ - if not model.startswith("ft:"): - base_model = model - else: - # get base_model - base_model = model.split(":")[1] - - # get model schema - models = self.predefined_models() - model_map = {model.model: model for model in models} - if base_model not in model_map: - raise ValueError(f"Base model {base_model} not found") - - base_model_schema = model_map[base_model] - - base_model_schema_features = base_model_schema.features or [] - base_model_schema_model_properties = base_model_schema.model_properties or {} - base_model_schema_parameters_rules = base_model_schema.parameter_rules or [] - - entity = AIModelEntity( - model=model, - label=I18nObject(zh_Hans=model, en_US=model), - model_type=ModelType.LLM, - features=list(base_model_schema_features), - fetch_from=FetchFrom.CUSTOMIZABLE_MODEL, - model_properties=dict(base_model_schema_model_properties.items()), - parameter_rules=list(base_model_schema_parameters_rules), - pricing=base_model_schema.pricing, - ) - - return entity diff --git a/api/core/model_runtime/model_providers/openai/llm/o1-mini-2024-09-12.yaml b/api/core/model_runtime/model_providers/openai/llm/o1-mini-2024-09-12.yaml deleted file mode 100644 index 0ade7f8ded9d4b..00000000000000 --- a/api/core/model_runtime/model_providers/openai/llm/o1-mini-2024-09-12.yaml +++ /dev/null @@ -1,33 +0,0 @@ -model: o1-mini-2024-09-12 -label: - zh_Hans: o1-mini-2024-09-12 - en_US: o1-mini-2024-09-12 -model_type: llm -features: - - agent-thought -model_properties: - mode: chat - context_size: 128000 -parameter_rules: - - name: max_tokens - use_template: max_tokens - default: 65536 - min: 1 - max: 65536 - - name: response_format - label: - zh_Hans: 回复格式 - en_US: response_format - type: string - help: - zh_Hans: 指定模型必须输出的格式 - en_US: specifying the format that the model must output - required: false - options: - - text - - json_object -pricing: - input: '3.00' - output: '12.00' - unit: '0.000001' - currency: USD diff --git a/api/core/model_runtime/model_providers/openai/llm/o1-mini.yaml b/api/core/model_runtime/model_providers/openai/llm/o1-mini.yaml deleted file mode 100644 index 60816c5d1e4d93..00000000000000 --- a/api/core/model_runtime/model_providers/openai/llm/o1-mini.yaml +++ /dev/null @@ -1,33 +0,0 @@ -model: o1-mini -label: - zh_Hans: o1-mini - en_US: o1-mini -model_type: llm -features: - - agent-thought -model_properties: - mode: chat - context_size: 128000 -parameter_rules: - - name: max_tokens - use_template: max_tokens - default: 65536 - min: 1 - max: 65536 - - name: response_format - label: - zh_Hans: 回复格式 - en_US: response_format - type: string - help: - zh_Hans: 指定模型必须输出的格式 - en_US: specifying the format that the model must output - required: false - options: - - text - - json_object -pricing: - input: '3.00' - output: '12.00' - unit: '0.000001' - currency: USD diff --git a/api/core/model_runtime/model_providers/openai/llm/o1-preview-2024-09-12.yaml b/api/core/model_runtime/model_providers/openai/llm/o1-preview-2024-09-12.yaml deleted file mode 100644 index c9da96f611bc91..00000000000000 --- a/api/core/model_runtime/model_providers/openai/llm/o1-preview-2024-09-12.yaml +++ /dev/null @@ -1,33 +0,0 @@ -model: o1-preview-2024-09-12 -label: - zh_Hans: o1-preview-2024-09-12 - en_US: o1-preview-2024-09-12 -model_type: llm -features: - - agent-thought -model_properties: - mode: chat - context_size: 128000 -parameter_rules: - - name: max_tokens - use_template: max_tokens - default: 32768 - min: 1 - max: 32768 - - name: response_format - label: - zh_Hans: 回复格式 - en_US: response_format - type: string - help: - zh_Hans: 指定模型必须输出的格式 - en_US: specifying the format that the model must output - required: false - options: - - text - - json_object -pricing: - input: '15.00' - output: '60.00' - unit: '0.000001' - currency: USD diff --git a/api/core/model_runtime/model_providers/openai/llm/o1-preview.yaml b/api/core/model_runtime/model_providers/openai/llm/o1-preview.yaml deleted file mode 100644 index c83874b76582e8..00000000000000 --- a/api/core/model_runtime/model_providers/openai/llm/o1-preview.yaml +++ /dev/null @@ -1,33 +0,0 @@ -model: o1-preview -label: - zh_Hans: o1-preview - en_US: o1-preview -model_type: llm -features: - - agent-thought -model_properties: - mode: chat - context_size: 128000 -parameter_rules: - - name: max_tokens - use_template: max_tokens - default: 32768 - min: 1 - max: 32768 - - name: response_format - label: - zh_Hans: 回复格式 - en_US: response_format - type: string - help: - zh_Hans: 指定模型必须输出的格式 - en_US: specifying the format that the model must output - required: false - options: - - text - - json_object -pricing: - input: '15.00' - output: '60.00' - unit: '0.000001' - currency: USD diff --git a/api/core/model_runtime/model_providers/openai/llm/text-davinci-003.yaml b/api/core/model_runtime/model_providers/openai/llm/text-davinci-003.yaml deleted file mode 100644 index 76b5d848753da5..00000000000000 --- a/api/core/model_runtime/model_providers/openai/llm/text-davinci-003.yaml +++ /dev/null @@ -1,29 +0,0 @@ -model: text-davinci-003 -label: - zh_Hans: text-davinci-003 - en_US: text-davinci-003 -model_type: llm -features: [ ] -model_properties: - mode: completion - context_size: 4096 -parameter_rules: - - name: temperature - use_template: temperature - - name: top_p - use_template: top_p - - name: presence_penalty - use_template: presence_penalty - - name: frequency_penalty - use_template: frequency_penalty - - name: max_tokens - use_template: max_tokens - default: 512 - min: 1 - max: 4096 -pricing: - input: '0.001' - output: '0.002' - unit: '0.001' - currency: USD -deprecated: true diff --git a/api/core/model_runtime/model_providers/openai/moderation/moderation.py b/api/core/model_runtime/model_providers/openai/moderation/moderation.py index 619044d808cdf6..a83248c0c25b41 100644 --- a/api/core/model_runtime/model_providers/openai/moderation/moderation.py +++ b/api/core/model_runtime/model_providers/openai/moderation/moderation.py @@ -1,15 +1,25 @@ +from collections.abc import Mapping from typing import Optional +import openai +from httpx import Timeout from openai import OpenAI from openai.types import ModerationCreateResponse from core.model_runtime.entities.model_entities import ModelPropertyKey +from core.model_runtime.errors.invoke import ( + InvokeAuthorizationError, + InvokeBadRequestError, + InvokeConnectionError, + InvokeError, + InvokeRateLimitError, + InvokeServerUnavailableError, +) from core.model_runtime.errors.validate import CredentialsValidateFailedError from core.model_runtime.model_providers.__base.moderation_model import ModerationModel -from core.model_runtime.model_providers.openai._common import _CommonOpenAI -class OpenAIModerationModel(_CommonOpenAI, ModerationModel): +class OpenAIModerationModel(ModerationModel): """ Model class for OpenAI text moderation model. """ @@ -111,3 +121,48 @@ def _get_max_chunks(self, model: str, credentials: dict) -> int: return model_schema.model_properties[ModelPropertyKey.MAX_CHUNKS] return 1 + + def _to_credential_kwargs(self, credentials: Mapping) -> dict: + """ + Transform credentials to kwargs for model instance + + :param credentials: + :return: + """ + credentials_kwargs = { + "api_key": credentials["openai_api_key"], + "timeout": Timeout(315.0, read=300.0, write=10.0, connect=5.0), + "max_retries": 1, + } + + if credentials.get("openai_api_base"): + openai_api_base = credentials["openai_api_base"].rstrip("/") + credentials_kwargs["base_url"] = openai_api_base + "/v1" + + if "openai_organization" in credentials: + credentials_kwargs["organization"] = credentials["openai_organization"] + + return credentials_kwargs + + @property + def _invoke_error_mapping(self) -> dict[type[InvokeError], list[type[Exception]]]: + """ + Map model invoke error to unified error + The key is the error type thrown to the caller + The value is the error type thrown by the model, + which needs to be converted into a unified error type for the caller. + + :return: Invoke error mapping + """ + return { + InvokeConnectionError: [openai.APIConnectionError, openai.APITimeoutError], + InvokeServerUnavailableError: [openai.InternalServerError], + InvokeRateLimitError: [openai.RateLimitError], + InvokeAuthorizationError: [openai.AuthenticationError, openai.PermissionDeniedError], + InvokeBadRequestError: [ + openai.BadRequestError, + openai.NotFoundError, + openai.UnprocessableEntityError, + openai.APIError, + ], + } diff --git a/api/core/model_runtime/model_providers/openai/moderation/text-moderation-stable.yaml b/api/core/model_runtime/model_providers/openai/moderation/text-moderation-stable.yaml deleted file mode 100644 index 5ca180916782d9..00000000000000 --- a/api/core/model_runtime/model_providers/openai/moderation/text-moderation-stable.yaml +++ /dev/null @@ -1,5 +0,0 @@ -model: text-moderation-stable -model_type: moderation -model_properties: - max_chunks: 32 - max_characters_per_chunk: 2000 diff --git a/api/core/model_runtime/model_providers/openai/openai.py b/api/core/model_runtime/model_providers/openai/openai.py deleted file mode 100644 index 175d7db73c46d4..00000000000000 --- a/api/core/model_runtime/model_providers/openai/openai.py +++ /dev/null @@ -1,29 +0,0 @@ -import logging -from collections.abc import Mapping - -from core.model_runtime.entities.model_entities import ModelType -from core.model_runtime.errors.validate import CredentialsValidateFailedError -from core.model_runtime.model_providers.__base.model_provider import ModelProvider - -logger = logging.getLogger(__name__) - - -class OpenAIProvider(ModelProvider): - def validate_provider_credentials(self, credentials: Mapping) -> None: - """ - Validate provider credentials - if validate failed, raise exception - - :param credentials: provider credentials, credentials form defined in `provider_credential_schema`. - """ - try: - model_instance = self.get_model_instance(ModelType.LLM) - - # Use `gpt-3.5-turbo` model for validate, - # no matter what model you pass in, text completion model or chat model - model_instance.validate_credentials(model="gpt-3.5-turbo", credentials=credentials) - except CredentialsValidateFailedError as ex: - raise ex - except Exception as ex: - logger.exception(f"{self.get_provider_schema().provider} credentials validate failed") - raise ex diff --git a/api/core/model_runtime/model_providers/openai/openai.yaml b/api/core/model_runtime/model_providers/openai/openai.yaml deleted file mode 100644 index b4dc8fd4f2dfad..00000000000000 --- a/api/core/model_runtime/model_providers/openai/openai.yaml +++ /dev/null @@ -1,89 +0,0 @@ -provider: openai -label: - en_US: OpenAI -description: - en_US: Models provided by OpenAI, such as GPT-3.5-Turbo and GPT-4. - zh_Hans: OpenAI 提供的模型,例如 GPT-3.5-Turbo 和 GPT-4。 -icon_small: - en_US: icon_s_en.svg -icon_large: - en_US: icon_l_en.svg -background: "#E5E7EB" -help: - title: - en_US: Get your API Key from OpenAI - zh_Hans: 从 OpenAI 获取 API Key - url: - en_US: https://platform.openai.com/account/api-keys -supported_model_types: - - llm - - text-embedding - - speech2text - - moderation - - tts -configurate_methods: - - predefined-model - - customizable-model -model_credential_schema: - model: - label: - en_US: Model Name - zh_Hans: 模型名称 - placeholder: - en_US: Enter your model name - zh_Hans: 输入模型名称 - credential_form_schemas: - - variable: openai_api_key - label: - en_US: API Key - type: secret-input - required: true - placeholder: - zh_Hans: 在此输入您的 API Key - en_US: Enter your API Key - - variable: openai_organization - label: - zh_Hans: 组织 ID - en_US: Organization - type: text-input - required: false - placeholder: - zh_Hans: 在此输入您的组织 ID - en_US: Enter your Organization ID - - variable: openai_api_base - label: - zh_Hans: API Base - en_US: API Base - type: text-input - required: false - placeholder: - zh_Hans: 在此输入您的 API Base - en_US: Enter your API Base -provider_credential_schema: - credential_form_schemas: - - variable: openai_api_key - label: - en_US: API Key - type: secret-input - required: true - placeholder: - zh_Hans: 在此输入您的 API Key - en_US: Enter your API Key - - variable: openai_organization - label: - zh_Hans: 组织 ID - en_US: Organization - type: text-input - required: false - placeholder: - zh_Hans: 在此输入您的组织 ID - en_US: Enter your Organization ID - - variable: openai_api_base - label: - zh_Hans: API Base - en_US: API Base - type: text-input - required: false - placeholder: - zh_Hans: 在此输入您的 API Base, 如:https://api.openai.com - en_US: Enter your API Base, e.g. https://api.openai.com diff --git a/api/core/model_runtime/model_providers/openai/speech2text/__init__.py b/api/core/model_runtime/model_providers/openai/speech2text/__init__.py deleted file mode 100644 index e69de29bb2d1d6..00000000000000 diff --git a/api/core/model_runtime/model_providers/openai/speech2text/speech2text.py b/api/core/model_runtime/model_providers/openai/speech2text/speech2text.py deleted file mode 100644 index 18f97e45f33bd8..00000000000000 --- a/api/core/model_runtime/model_providers/openai/speech2text/speech2text.py +++ /dev/null @@ -1,60 +0,0 @@ -from typing import IO, Optional - -from openai import OpenAI - -from core.model_runtime.errors.validate import CredentialsValidateFailedError -from core.model_runtime.model_providers.__base.speech2text_model import Speech2TextModel -from core.model_runtime.model_providers.openai._common import _CommonOpenAI - - -class OpenAISpeech2TextModel(_CommonOpenAI, Speech2TextModel): - """ - Model class for OpenAI Speech to text model. - """ - - def _invoke(self, model: str, credentials: dict, file: IO[bytes], user: Optional[str] = None) -> str: - """ - Invoke speech2text model - - :param model: model name - :param credentials: model credentials - :param file: audio file - :param user: unique user id - :return: text for given audio file - """ - return self._speech2text_invoke(model, credentials, file) - - def validate_credentials(self, model: str, credentials: dict) -> None: - """ - Validate model credentials - - :param model: model name - :param credentials: model credentials - :return: - """ - try: - audio_file_path = self._get_demo_file_path() - - with open(audio_file_path, "rb") as audio_file: - self._speech2text_invoke(model, credentials, audio_file) - except Exception as ex: - raise CredentialsValidateFailedError(str(ex)) - - def _speech2text_invoke(self, model: str, credentials: dict, file: IO[bytes]) -> str: - """ - Invoke speech2text model - - :param model: model name - :param credentials: model credentials - :param file: audio file - :return: text for given audio file - """ - # transform credentials to kwargs for model instance - credentials_kwargs = self._to_credential_kwargs(credentials) - - # init model client - client = OpenAI(**credentials_kwargs) - - response = client.audio.transcriptions.create(model=model, file=file) - - return response.text diff --git a/api/core/model_runtime/model_providers/openai/speech2text/whisper-1.yaml b/api/core/model_runtime/model_providers/openai/speech2text/whisper-1.yaml deleted file mode 100644 index 6c14c766192d6b..00000000000000 --- a/api/core/model_runtime/model_providers/openai/speech2text/whisper-1.yaml +++ /dev/null @@ -1,5 +0,0 @@ -model: whisper-1 -model_type: speech2text -model_properties: - file_upload_limit: 25 - supported_file_extensions: flac,mp3,mp4,mpeg,mpga,m4a,ogg,wav,webm diff --git a/api/core/model_runtime/model_providers/openai/text_embedding/__init__.py b/api/core/model_runtime/model_providers/openai/text_embedding/__init__.py deleted file mode 100644 index e69de29bb2d1d6..00000000000000 diff --git a/api/core/model_runtime/model_providers/openai/text_embedding/text-embedding-3-large.yaml b/api/core/model_runtime/model_providers/openai/text_embedding/text-embedding-3-large.yaml deleted file mode 100644 index 9489170fdea0b4..00000000000000 --- a/api/core/model_runtime/model_providers/openai/text_embedding/text-embedding-3-large.yaml +++ /dev/null @@ -1,9 +0,0 @@ -model: text-embedding-3-large -model_type: text-embedding -model_properties: - context_size: 8191 - max_chunks: 32 -pricing: - input: '0.00013' - unit: '0.001' - currency: USD diff --git a/api/core/model_runtime/model_providers/openai/text_embedding/text-embedding-3-small.yaml b/api/core/model_runtime/model_providers/openai/text_embedding/text-embedding-3-small.yaml deleted file mode 100644 index 586ba2b28f9041..00000000000000 --- a/api/core/model_runtime/model_providers/openai/text_embedding/text-embedding-3-small.yaml +++ /dev/null @@ -1,9 +0,0 @@ -model: text-embedding-3-small -model_type: text-embedding -model_properties: - context_size: 8191 - max_chunks: 32 -pricing: - input: '0.00002' - unit: '0.001' - currency: USD diff --git a/api/core/model_runtime/model_providers/openai/text_embedding/text-embedding-ada-002.yaml b/api/core/model_runtime/model_providers/openai/text_embedding/text-embedding-ada-002.yaml deleted file mode 100644 index ef1c49b017dc34..00000000000000 --- a/api/core/model_runtime/model_providers/openai/text_embedding/text-embedding-ada-002.yaml +++ /dev/null @@ -1,9 +0,0 @@ -model: text-embedding-ada-002 -model_type: text-embedding -model_properties: - context_size: 8097 - max_chunks: 32 -pricing: - input: '0.0001' - unit: '0.001' - currency: USD diff --git a/api/core/model_runtime/model_providers/openai/tts/__init__.py b/api/core/model_runtime/model_providers/openai/tts/__init__.py deleted file mode 100644 index e69de29bb2d1d6..00000000000000 diff --git a/api/core/model_runtime/model_providers/openai/tts/tts-1-hd.yaml b/api/core/model_runtime/model_providers/openai/tts/tts-1-hd.yaml deleted file mode 100644 index 449c131f9d10f9..00000000000000 --- a/api/core/model_runtime/model_providers/openai/tts/tts-1-hd.yaml +++ /dev/null @@ -1,31 +0,0 @@ -model: tts-1-hd -model_type: tts -model_properties: - default_voice: 'alloy' - voices: - - mode: 'alloy' - name: 'Alloy' - language: [ 'zh-Hans', 'en-US', 'de-DE', 'fr-FR', 'es-ES', 'it-IT', 'th-TH', 'id-ID' ] - - mode: 'echo' - name: 'Echo' - language: [ 'zh-Hans', 'en-US', 'de-DE', 'fr-FR', 'es-ES', 'it-IT', 'th-TH', 'id-ID' ] - - mode: 'fable' - name: 'Fable' - language: [ 'zh-Hans', 'en-US', 'de-DE', 'fr-FR', 'es-ES', 'it-IT', 'th-TH', 'id-ID' ] - - mode: 'onyx' - name: 'Onyx' - language: [ 'zh-Hans', 'en-US', 'de-DE', 'fr-FR', 'es-ES', 'it-IT', 'th-TH', 'id-ID' ] - - mode: 'nova' - name: 'Nova' - language: [ 'zh-Hans', 'en-US', 'de-DE', 'fr-FR', 'es-ES', 'it-IT', 'th-TH', 'id-ID' ] - - mode: 'shimmer' - name: 'Shimmer' - language: [ 'zh-Hans', 'en-US', 'de-DE', 'fr-FR', 'es-ES', 'it-IT', 'th-TH', 'id-ID' ] - word_limit: 3500 - audio_type: 'mp3' - max_workers: 5 -pricing: - input: '0.03' - output: '0' - unit: '0.001' - currency: USD diff --git a/api/core/model_runtime/model_providers/openai/tts/tts-1.yaml b/api/core/model_runtime/model_providers/openai/tts/tts-1.yaml deleted file mode 100644 index 83969fb2f7a129..00000000000000 --- a/api/core/model_runtime/model_providers/openai/tts/tts-1.yaml +++ /dev/null @@ -1,31 +0,0 @@ -model: tts-1 -model_type: tts -model_properties: - default_voice: 'alloy' - voices: - - mode: 'alloy' - name: 'Alloy' - language: ['zh-Hans', 'en-US', 'de-DE', 'fr-FR', 'es-ES', 'it-IT', 'th-TH', 'id-ID'] - - mode: 'echo' - name: 'Echo' - language: ['zh-Hans', 'en-US', 'de-DE', 'fr-FR', 'es-ES', 'it-IT', 'th-TH', 'id-ID'] - - mode: 'fable' - name: 'Fable' - language: ['zh-Hans', 'en-US', 'de-DE', 'fr-FR', 'es-ES', 'it-IT', 'th-TH', 'id-ID'] - - mode: 'onyx' - name: 'Onyx' - language: ['zh-Hans', 'en-US', 'de-DE', 'fr-FR', 'es-ES', 'it-IT', 'th-TH', 'id-ID'] - - mode: 'nova' - name: 'Nova' - language: ['zh-Hans', 'en-US', 'de-DE', 'fr-FR', 'es-ES', 'it-IT', 'th-TH', 'id-ID'] - - mode: 'shimmer' - name: 'Shimmer' - language: ['zh-Hans', 'en-US', 'de-DE', 'fr-FR', 'es-ES', 'it-IT', 'th-TH', 'id-ID'] - word_limit: 3500 - audio_type: 'mp3' - max_workers: 5 -pricing: - input: '0.015' - output: '0' - unit: '0.001' - currency: USD diff --git a/api/core/model_runtime/model_providers/openai/tts/tts.py b/api/core/model_runtime/model_providers/openai/tts/tts.py deleted file mode 100644 index a14c91639b88fc..00000000000000 --- a/api/core/model_runtime/model_providers/openai/tts/tts.py +++ /dev/null @@ -1,118 +0,0 @@ -import concurrent.futures -from typing import Optional - -from openai import OpenAI - -from core.model_runtime.errors.invoke import InvokeBadRequestError -from core.model_runtime.errors.validate import CredentialsValidateFailedError -from core.model_runtime.model_providers.__base.tts_model import TTSModel -from core.model_runtime.model_providers.openai._common import _CommonOpenAI - - -class OpenAIText2SpeechModel(_CommonOpenAI, TTSModel): - """ - Model class for OpenAI Speech to text model. - """ - - def _invoke( - self, model: str, tenant_id: str, credentials: dict, content_text: str, voice: str, user: Optional[str] = None - ) -> any: - """ - _invoke text2speech model - - :param model: model name - :param tenant_id: user tenant id - :param credentials: model credentials - :param content_text: text content to be translated - :param voice: model timbre - :param user: unique user id - :return: text translated to audio file - """ - - if not voice or voice not in [ - d["value"] for d in self.get_tts_model_voices(model=model, credentials=credentials) - ]: - voice = self._get_model_default_voice(model, credentials) - # if streaming: - return self._tts_invoke_streaming(model=model, credentials=credentials, content_text=content_text, voice=voice) - - def validate_credentials(self, model: str, credentials: dict, user: Optional[str] = None) -> None: - """ - validate credentials text2speech model - - :param model: model name - :param credentials: model credentials - :param user: unique user id - :return: text translated to audio file - """ - try: - self._tts_invoke_streaming( - model=model, - credentials=credentials, - content_text="Hello Dify!", - voice=self._get_model_default_voice(model, credentials), - ) - except Exception as ex: - raise CredentialsValidateFailedError(str(ex)) - - def _tts_invoke_streaming(self, model: str, credentials: dict, content_text: str, voice: str) -> any: - """ - _tts_invoke_streaming text2speech model - - :param model: model name - :param credentials: model credentials - :param content_text: text content to be translated - :param voice: model timbre - :return: text translated to audio file - """ - try: - # doc: https://platform.openai.com/docs/guides/text-to-speech - credentials_kwargs = self._to_credential_kwargs(credentials) - client = OpenAI(**credentials_kwargs) - model_support_voice = [ - x.get("value") for x in self.get_tts_model_voices(model=model, credentials=credentials) - ] - if not voice or voice not in model_support_voice: - voice = self._get_model_default_voice(model, credentials) - word_limit = self._get_model_word_limit(model, credentials) - if len(content_text) > word_limit: - sentences = self._split_text_into_sentences(content_text, max_length=word_limit) - executor = concurrent.futures.ThreadPoolExecutor(max_workers=min(3, len(sentences))) - futures = [ - executor.submit( - client.audio.speech.with_streaming_response.create, - model=model, - response_format="mp3", - input=sentences[i], - voice=voice, - ) - for i in range(len(sentences)) - ] - for future in futures: - yield from future.result().__enter__().iter_bytes(1024) # noqa:PLC2801 - - else: - response = client.audio.speech.with_streaming_response.create( - model=model, voice=voice, response_format="mp3", input=content_text.strip() - ) - - yield from response.__enter__().iter_bytes(1024) # noqa:PLC2801 - except Exception as ex: - raise InvokeBadRequestError(str(ex)) - - def _process_sentence(self, sentence: str, model: str, voice, credentials: dict): - """ - _tts_invoke openai text2speech model api - - :param model: model name - :param credentials: model credentials - :param voice: model timbre - :param sentence: text content to be translated - :return: text translated to audio file - """ - # transform credentials to kwargs for model instance - credentials_kwargs = self._to_credential_kwargs(credentials) - client = OpenAI(**credentials_kwargs) - response = client.audio.speech.create(model=model, voice=voice, input=sentence.strip()) - if isinstance(response.read(), bytes): - return response.read() diff --git a/api/core/model_runtime/model_providers/openai_api_compatible/__init__.py b/api/core/model_runtime/model_providers/openai_api_compatible/__init__.py deleted file mode 100644 index e69de29bb2d1d6..00000000000000 diff --git a/api/core/model_runtime/model_providers/openai_api_compatible/_common.py b/api/core/model_runtime/model_providers/openai_api_compatible/_common.py deleted file mode 100644 index 1234e44f80c40c..00000000000000 --- a/api/core/model_runtime/model_providers/openai_api_compatible/_common.py +++ /dev/null @@ -1,43 +0,0 @@ -import requests - -from core.model_runtime.errors.invoke import ( - InvokeAuthorizationError, - InvokeBadRequestError, - InvokeConnectionError, - InvokeError, - InvokeRateLimitError, - InvokeServerUnavailableError, -) - - -class _CommonOaiApiCompat: - @property - def _invoke_error_mapping(self) -> dict[type[InvokeError], list[type[Exception]]]: - """ - Map model invoke error to unified error - The key is the error type thrown to the caller - The value is the error type thrown by the model, - which needs to be converted into a unified error type for the caller. - - :return: Invoke error mapping - """ - return { - InvokeAuthorizationError: [ - requests.exceptions.InvalidHeader, # Missing or Invalid API Key - ], - InvokeBadRequestError: [ - requests.exceptions.HTTPError, # Invalid Endpoint URL or model name - requests.exceptions.InvalidURL, # Misconfigured request or other API error - ], - InvokeRateLimitError: [ - requests.exceptions.RetryError # Too many requests sent in a short period of time - ], - InvokeServerUnavailableError: [ - requests.exceptions.ConnectionError, # Engine Overloaded - requests.exceptions.HTTPError, # Server Error - ], - InvokeConnectionError: [ - requests.exceptions.ConnectTimeout, # Timeout - requests.exceptions.ReadTimeout, # Timeout - ], - } diff --git a/api/core/model_runtime/model_providers/openai_api_compatible/llm/__init__.py b/api/core/model_runtime/model_providers/openai_api_compatible/llm/__init__.py deleted file mode 100644 index e69de29bb2d1d6..00000000000000 diff --git a/api/core/model_runtime/model_providers/openai_api_compatible/llm/llm.py b/api/core/model_runtime/model_providers/openai_api_compatible/llm/llm.py deleted file mode 100644 index c2ffe653c8b70a..00000000000000 --- a/api/core/model_runtime/model_providers/openai_api_compatible/llm/llm.py +++ /dev/null @@ -1,828 +0,0 @@ -import json -import logging -from collections.abc import Generator -from decimal import Decimal -from typing import Optional, Union, cast -from urllib.parse import urljoin - -import requests - -from core.model_runtime.entities.common_entities import I18nObject -from core.model_runtime.entities.llm_entities import LLMMode, LLMResult, LLMResultChunk, LLMResultChunkDelta -from core.model_runtime.entities.message_entities import ( - AssistantPromptMessage, - ImagePromptMessageContent, - PromptMessage, - PromptMessageContent, - PromptMessageContentType, - PromptMessageFunction, - PromptMessageTool, - SystemPromptMessage, - ToolPromptMessage, - UserPromptMessage, -) -from core.model_runtime.entities.model_entities import ( - AIModelEntity, - DefaultParameterName, - FetchFrom, - ModelFeature, - ModelPropertyKey, - ModelType, - ParameterRule, - ParameterType, - PriceConfig, -) -from core.model_runtime.errors.invoke import InvokeError -from core.model_runtime.errors.validate import CredentialsValidateFailedError -from core.model_runtime.model_providers.__base.large_language_model import LargeLanguageModel -from core.model_runtime.model_providers.openai_api_compatible._common import _CommonOaiApiCompat -from core.model_runtime.utils import helper - -logger = logging.getLogger(__name__) - - -class OAIAPICompatLargeLanguageModel(_CommonOaiApiCompat, LargeLanguageModel): - """ - Model class for OpenAI large language model. - """ - - def _invoke( - self, - model: str, - credentials: dict, - prompt_messages: list[PromptMessage], - model_parameters: dict, - tools: Optional[list[PromptMessageTool]] = None, - stop: Optional[list[str]] = None, - stream: bool = True, - user: Optional[str] = None, - ) -> Union[LLMResult, Generator]: - """ - Invoke large language model - - :param model: model name - :param credentials: model credentials - :param prompt_messages: prompt messages - :param model_parameters: model parameters - :param tools: tools for tool calling - :param stop: stop words - :param stream: is stream response - :param user: unique user id - :return: full response or stream response chunk generator result - """ - - # text completion model - return self._generate( - model=model, - credentials=credentials, - prompt_messages=prompt_messages, - model_parameters=model_parameters, - tools=tools, - stop=stop, - stream=stream, - user=user, - ) - - def get_num_tokens( - self, - model: str, - credentials: dict, - prompt_messages: list[PromptMessage], - tools: Optional[list[PromptMessageTool]] = None, - ) -> int: - """ - Get number of tokens for given prompt messages - - :param model: - :param credentials: - :param prompt_messages: - :param tools: tools for tool calling - :return: - """ - return self._num_tokens_from_messages(model, prompt_messages, tools, credentials) - - def validate_credentials(self, model: str, credentials: dict) -> None: - """ - Validate model credentials using requests to ensure compatibility with all providers following - OpenAI's API standard. - - :param model: model name - :param credentials: model credentials - :return: - """ - try: - headers = {"Content-Type": "application/json"} - - api_key = credentials.get("api_key") - if api_key: - headers["Authorization"] = f"Bearer {api_key}" - - endpoint_url = credentials["endpoint_url"] - if not endpoint_url.endswith("/"): - endpoint_url += "/" - - # prepare the payload for a simple ping to the model - data = {"model": model, "max_tokens": 5} - - completion_type = LLMMode.value_of(credentials["mode"]) - - if completion_type is LLMMode.CHAT: - data["messages"] = [ - {"role": "user", "content": "ping"}, - ] - endpoint_url = urljoin(endpoint_url, "chat/completions") - elif completion_type is LLMMode.COMPLETION: - data["prompt"] = "ping" - endpoint_url = urljoin(endpoint_url, "completions") - else: - raise ValueError("Unsupported completion type for model configuration.") - - # send a post request to validate the credentials - response = requests.post(endpoint_url, headers=headers, json=data, timeout=(10, 300)) - - if response.status_code != 200: - raise CredentialsValidateFailedError( - f"Credentials validation failed with status code {response.status_code}" - ) - - try: - json_result = response.json() - except json.JSONDecodeError as e: - raise CredentialsValidateFailedError("Credentials validation failed: JSON decode error") - - if completion_type is LLMMode.CHAT and json_result.get("object", "") == "": - json_result["object"] = "chat.completion" - elif completion_type is LLMMode.COMPLETION and json_result.get("object", "") == "": - json_result["object"] = "text_completion" - - if completion_type is LLMMode.CHAT and ( - "object" not in json_result or json_result["object"] != "chat.completion" - ): - raise CredentialsValidateFailedError( - "Credentials validation failed: invalid response object, must be 'chat.completion'" - ) - elif completion_type is LLMMode.COMPLETION and ( - "object" not in json_result or json_result["object"] != "text_completion" - ): - raise CredentialsValidateFailedError( - "Credentials validation failed: invalid response object, must be 'text_completion'" - ) - except CredentialsValidateFailedError: - raise - except Exception as ex: - raise CredentialsValidateFailedError(f"An error occurred during credentials validation: {str(ex)}") - - def get_customizable_model_schema(self, model: str, credentials: dict) -> AIModelEntity: - """ - generate custom model entities from credentials - """ - features = [] - - function_calling_type = credentials.get("function_calling_type", "no_call") - if function_calling_type == "function_call": - features.append(ModelFeature.TOOL_CALL) - elif function_calling_type == "tool_call": - features.append(ModelFeature.MULTI_TOOL_CALL) - - stream_function_calling = credentials.get("stream_function_calling", "supported") - if stream_function_calling == "supported": - features.append(ModelFeature.STREAM_TOOL_CALL) - - vision_support = credentials.get("vision_support", "not_support") - if vision_support == "support": - features.append(ModelFeature.VISION) - - entity = AIModelEntity( - model=model, - label=I18nObject(en_US=model), - model_type=ModelType.LLM, - fetch_from=FetchFrom.CUSTOMIZABLE_MODEL, - features=features, - model_properties={ - ModelPropertyKey.CONTEXT_SIZE: int(credentials.get("context_size", "4096")), - ModelPropertyKey.MODE: credentials.get("mode"), - }, - parameter_rules=[ - ParameterRule( - name=DefaultParameterName.TEMPERATURE.value, - label=I18nObject(en_US="Temperature", zh_Hans="温度"), - help=I18nObject( - en_US="Kernel sampling threshold. Used to determine the randomness of the results." - "The higher the value, the stronger the randomness." - "The higher the possibility of getting different answers to the same question.", - zh_Hans="核采样阈值。用于决定结果随机性,取值越高随机性越强即相同的问题得到的不同答案的可能性越高。", - ), - type=ParameterType.FLOAT, - default=float(credentials.get("temperature", 0.7)), - min=0, - max=2, - precision=2, - ), - ParameterRule( - name=DefaultParameterName.TOP_P.value, - label=I18nObject(en_US="Top P", zh_Hans="Top P"), - help=I18nObject( - en_US="The probability threshold of the nucleus sampling method during the generation process." - "The larger the value is, the higher the randomness of generation will be." - "The smaller the value is, the higher the certainty of generation will be.", - zh_Hans="生成过程中核采样方法概率阈值。取值越大,生成的随机性越高;取值越小,生成的确定性越高。", - ), - type=ParameterType.FLOAT, - default=float(credentials.get("top_p", 1)), - min=0, - max=1, - precision=2, - ), - ParameterRule( - name=DefaultParameterName.FREQUENCY_PENALTY.value, - label=I18nObject(en_US="Frequency Penalty", zh_Hans="频率惩罚"), - help=I18nObject( - en_US="For controlling the repetition rate of words used by the model." - "Increasing this can reduce the repetition of the same words in the model's output.", - zh_Hans="用于控制模型已使用字词的重复率。 提高此项可以降低模型在输出中重复相同字词的重复度。", - ), - type=ParameterType.FLOAT, - default=float(credentials.get("frequency_penalty", 0)), - min=-2, - max=2, - ), - ParameterRule( - name=DefaultParameterName.PRESENCE_PENALTY.value, - label=I18nObject(en_US="Presence Penalty", zh_Hans="存在惩罚"), - help=I18nObject( - en_US="Used to control the repetition rate when generating models." - "Increasing this can reduce the repetition rate of model generation.", - zh_Hans="用于控制模型生成时的重复度。提高此项可以降低模型生成的重复度。", - ), - type=ParameterType.FLOAT, - default=float(credentials.get("presence_penalty", 0)), - min=-2, - max=2, - ), - ParameterRule( - name=DefaultParameterName.MAX_TOKENS.value, - label=I18nObject(en_US="Max Tokens", zh_Hans="最大标记"), - help=I18nObject( - en_US="Maximum length of tokens for the model response.", zh_Hans="模型回答的tokens的最大长度。" - ), - type=ParameterType.INT, - default=512, - min=1, - max=int(credentials.get("max_tokens_to_sample", 4096)), - ), - ], - pricing=PriceConfig( - input=Decimal(credentials.get("input_price", 0)), - output=Decimal(credentials.get("output_price", 0)), - unit=Decimal(credentials.get("unit", 0)), - currency=credentials.get("currency", "USD"), - ), - ) - - if credentials["mode"] == "chat": - entity.model_properties[ModelPropertyKey.MODE] = LLMMode.CHAT.value - elif credentials["mode"] == "completion": - entity.model_properties[ModelPropertyKey.MODE] = LLMMode.COMPLETION.value - else: - raise ValueError(f"Unknown completion type {credentials['completion_type']}") - - return entity - - # validate_credentials method has been rewritten to use the requests library for compatibility with all providers - # following OpenAI's API standard. - def _generate( - self, - model: str, - credentials: dict, - prompt_messages: list[PromptMessage], - model_parameters: dict, - tools: Optional[list[PromptMessageTool]] = None, - stop: Optional[list[str]] = None, - stream: bool = True, - user: Optional[str] = None, - ) -> Union[LLMResult, Generator]: - """ - Invoke llm completion model - - :param model: model name - :param credentials: credentials - :param prompt_messages: prompt messages - :param model_parameters: model parameters - :param stop: stop words - :param stream: is stream response - :param user: unique user id - :return: full response or stream response chunk generator result - """ - headers = { - "Content-Type": "application/json", - "Accept-Charset": "utf-8", - } - extra_headers = credentials.get("extra_headers") - if extra_headers is not None: - headers = { - **headers, - **extra_headers, - } - - api_key = credentials.get("api_key") - if api_key: - headers["Authorization"] = f"Bearer {api_key}" - - endpoint_url = credentials["endpoint_url"] - if not endpoint_url.endswith("/"): - endpoint_url += "/" - - data = {"model": model, "stream": stream, **model_parameters} - - completion_type = LLMMode.value_of(credentials["mode"]) - - if completion_type is LLMMode.CHAT: - endpoint_url = urljoin(endpoint_url, "chat/completions") - data["messages"] = [self._convert_prompt_message_to_dict(m, credentials) for m in prompt_messages] - elif completion_type is LLMMode.COMPLETION: - endpoint_url = urljoin(endpoint_url, "completions") - data["prompt"] = prompt_messages[0].content - else: - raise ValueError("Unsupported completion type for model configuration.") - - # annotate tools with names, descriptions, etc. - function_calling_type = credentials.get("function_calling_type", "no_call") - formatted_tools = [] - if tools: - if function_calling_type == "function_call": - data["functions"] = [ - {"name": tool.name, "description": tool.description, "parameters": tool.parameters} - for tool in tools - ] - elif function_calling_type == "tool_call": - data["tool_choice"] = "auto" - - for tool in tools: - formatted_tools.append(helper.dump_model(PromptMessageFunction(function=tool))) - - data["tools"] = formatted_tools - - if stop: - data["stop"] = stop - - if user: - data["user"] = user - - response = requests.post(endpoint_url, headers=headers, json=data, timeout=(10, 300), stream=stream) - - if response.encoding is None or response.encoding == "ISO-8859-1": - response.encoding = "utf-8" - - if response.status_code != 200: - raise InvokeError(f"API request failed with status code {response.status_code}: {response.text}") - - if stream: - return self._handle_generate_stream_response(model, credentials, response, prompt_messages) - - return self._handle_generate_response(model, credentials, response, prompt_messages) - - def _handle_generate_stream_response( - self, model: str, credentials: dict, response: requests.Response, prompt_messages: list[PromptMessage] - ) -> Generator: - """ - Handle llm stream response - - :param model: model name - :param credentials: model credentials - :param response: streamed response - :param prompt_messages: prompt messages - :return: llm response chunk generator - """ - full_assistant_content = "" - chunk_index = 0 - - def create_final_llm_result_chunk( - index: int, message: AssistantPromptMessage, finish_reason: str - ) -> LLMResultChunk: - # calculate num tokens - prompt_tokens = self._num_tokens_from_string(model, prompt_messages[0].content) - completion_tokens = self._num_tokens_from_string(model, full_assistant_content) - - # transform usage - usage = self._calc_response_usage(model, credentials, prompt_tokens, completion_tokens) - - return LLMResultChunk( - model=model, - prompt_messages=prompt_messages, - delta=LLMResultChunkDelta(index=index, message=message, finish_reason=finish_reason, usage=usage), - ) - - # delimiter for stream response, need unicode_escape - import codecs - - delimiter = credentials.get("stream_mode_delimiter", "\n\n") - delimiter = codecs.decode(delimiter, "unicode_escape") - - tools_calls: list[AssistantPromptMessage.ToolCall] = [] - - def increase_tool_call(new_tool_calls: list[AssistantPromptMessage.ToolCall]): - def get_tool_call(tool_call_id: str): - if not tool_call_id: - return tools_calls[-1] - - tool_call = next((tool_call for tool_call in tools_calls if tool_call.id == tool_call_id), None) - if tool_call is None: - tool_call = AssistantPromptMessage.ToolCall( - id=tool_call_id, - type="function", - function=AssistantPromptMessage.ToolCall.ToolCallFunction(name="", arguments=""), - ) - tools_calls.append(tool_call) - - return tool_call - - for new_tool_call in new_tool_calls: - # get tool call - tool_call = get_tool_call(new_tool_call.function.name) - # update tool call - if new_tool_call.id: - tool_call.id = new_tool_call.id - if new_tool_call.type: - tool_call.type = new_tool_call.type - if new_tool_call.function.name: - tool_call.function.name = new_tool_call.function.name - if new_tool_call.function.arguments: - tool_call.function.arguments += new_tool_call.function.arguments - - finish_reason = None # The default value of finish_reason is None - - for chunk in response.iter_lines(decode_unicode=True, delimiter=delimiter): - chunk = chunk.strip() - if chunk: - # ignore sse comments - if chunk.startswith(":"): - continue - decoded_chunk = chunk.strip().lstrip("data: ").lstrip() - if decoded_chunk == "[DONE]": # Some provider returns "data: [DONE]" - continue - - try: - chunk_json = json.loads(decoded_chunk) - # stream ended - except json.JSONDecodeError as e: - yield create_final_llm_result_chunk( - index=chunk_index + 1, - message=AssistantPromptMessage(content=""), - finish_reason="Non-JSON encountered.", - ) - break - if not chunk_json or len(chunk_json["choices"]) == 0: - continue - - choice = chunk_json["choices"][0] - finish_reason = chunk_json["choices"][0].get("finish_reason") - chunk_index += 1 - - if "delta" in choice: - delta = choice["delta"] - delta_content = delta.get("content") - - assistant_message_tool_calls = None - - if "tool_calls" in delta and credentials.get("function_calling_type", "no_call") == "tool_call": - assistant_message_tool_calls = delta.get("tool_calls", None) - elif ( - "function_call" in delta - and credentials.get("function_calling_type", "no_call") == "function_call" - ): - assistant_message_tool_calls = [ - {"id": "tool_call_id", "type": "function", "function": delta.get("function_call", {})} - ] - - # assistant_message_function_call = delta.delta.function_call - - # extract tool calls from response - if assistant_message_tool_calls: - tool_calls = self._extract_response_tool_calls(assistant_message_tool_calls) - increase_tool_call(tool_calls) - - if delta_content is None or delta_content == "": - continue - - # transform assistant message to prompt message - assistant_prompt_message = AssistantPromptMessage( - content=delta_content, - ) - - # reset tool calls - tool_calls = [] - full_assistant_content += delta_content - elif "text" in choice: - choice_text = choice.get("text", "") - if choice_text == "": - continue - - # transform assistant message to prompt message - assistant_prompt_message = AssistantPromptMessage(content=choice_text) - full_assistant_content += choice_text - else: - continue - - yield LLMResultChunk( - model=model, - prompt_messages=prompt_messages, - delta=LLMResultChunkDelta( - index=chunk_index, - message=assistant_prompt_message, - ), - ) - - chunk_index += 1 - - if tools_calls: - yield LLMResultChunk( - model=model, - prompt_messages=prompt_messages, - delta=LLMResultChunkDelta( - index=chunk_index, - message=AssistantPromptMessage(tool_calls=tools_calls, content=""), - ), - ) - - yield create_final_llm_result_chunk( - index=chunk_index, message=AssistantPromptMessage(content=""), finish_reason=finish_reason - ) - - def _handle_generate_response( - self, model: str, credentials: dict, response: requests.Response, prompt_messages: list[PromptMessage] - ) -> LLMResult: - response_json = response.json() - - completion_type = LLMMode.value_of(credentials["mode"]) - - output = response_json["choices"][0] - - response_content = "" - tool_calls = None - function_calling_type = credentials.get("function_calling_type", "no_call") - if completion_type is LLMMode.CHAT: - response_content = output.get("message", {})["content"] - if function_calling_type == "tool_call": - tool_calls = output.get("message", {}).get("tool_calls") - elif function_calling_type == "function_call": - tool_calls = output.get("message", {}).get("function_call") - - elif completion_type is LLMMode.COMPLETION: - response_content = output["text"] - - assistant_message = AssistantPromptMessage(content=response_content, tool_calls=[]) - - if tool_calls: - if function_calling_type == "tool_call": - assistant_message.tool_calls = self._extract_response_tool_calls(tool_calls) - elif function_calling_type == "function_call": - assistant_message.tool_calls = [self._extract_response_function_call(tool_calls)] - - usage = response_json.get("usage") - if usage: - # transform usage - prompt_tokens = usage["prompt_tokens"] - completion_tokens = usage["completion_tokens"] - else: - # calculate num tokens - prompt_tokens = self._num_tokens_from_string(model, prompt_messages[0].content) - completion_tokens = self._num_tokens_from_string(model, assistant_message.content) - - # transform usage - usage = self._calc_response_usage(model, credentials, prompt_tokens, completion_tokens) - - # transform response - result = LLMResult( - model=response_json["model"], - prompt_messages=prompt_messages, - message=assistant_message, - usage=usage, - ) - - return result - - def _convert_prompt_message_to_dict(self, message: PromptMessage, credentials: Optional[dict] = None) -> dict: - """ - Convert PromptMessage to dict for OpenAI API format - """ - if isinstance(message, UserPromptMessage): - message = cast(UserPromptMessage, message) - if isinstance(message.content, str): - message_dict = {"role": "user", "content": message.content} - else: - sub_messages = [] - for message_content in message.content: - if message_content.type == PromptMessageContentType.TEXT: - message_content = cast(PromptMessageContent, message_content) - sub_message_dict = {"type": "text", "text": message_content.data} - sub_messages.append(sub_message_dict) - elif message_content.type == PromptMessageContentType.IMAGE: - message_content = cast(ImagePromptMessageContent, message_content) - sub_message_dict = { - "type": "image_url", - "image_url": {"url": message_content.data, "detail": message_content.detail.value}, - } - sub_messages.append(sub_message_dict) - - message_dict = {"role": "user", "content": sub_messages} - elif isinstance(message, AssistantPromptMessage): - message = cast(AssistantPromptMessage, message) - message_dict = {"role": "assistant", "content": message.content} - if message.tool_calls: - function_calling_type = credentials.get("function_calling_type", "no_call") - if function_calling_type == "tool_call": - message_dict["tool_calls"] = [tool_call.dict() for tool_call in message.tool_calls] - elif function_calling_type == "function_call": - function_call = message.tool_calls[0] - message_dict["function_call"] = { - "name": function_call.function.name, - "arguments": function_call.function.arguments, - } - elif isinstance(message, SystemPromptMessage): - message = cast(SystemPromptMessage, message) - message_dict = {"role": "system", "content": message.content} - elif isinstance(message, ToolPromptMessage): - message = cast(ToolPromptMessage, message) - function_calling_type = credentials.get("function_calling_type", "no_call") - if function_calling_type == "tool_call": - message_dict = {"role": "tool", "content": message.content, "tool_call_id": message.tool_call_id} - elif function_calling_type == "function_call": - message_dict = {"role": "function", "content": message.content, "name": message.tool_call_id} - else: - raise ValueError(f"Got unknown type {message}") - - if message.name and message_dict.get("role", "") != "tool": - message_dict["name"] = message.name - - return message_dict - - def _num_tokens_from_string( - self, model: str, text: Union[str, list[PromptMessageContent]], tools: Optional[list[PromptMessageTool]] = None - ) -> int: - """ - Approximate num tokens for model with gpt2 tokenizer. - - :param model: model name - :param text: prompt text - :param tools: tools for tool calling - :return: number of tokens - """ - if isinstance(text, str): - full_text = text - else: - full_text = "" - for message_content in text: - if message_content.type == PromptMessageContentType.TEXT: - message_content = cast(PromptMessageContent, message_content) - full_text += message_content.data - - num_tokens = self._get_num_tokens_by_gpt2(full_text) - - if tools: - num_tokens += self._num_tokens_for_tools(tools) - - return num_tokens - - def _num_tokens_from_messages( - self, - model: str, - messages: list[PromptMessage], - tools: Optional[list[PromptMessageTool]] = None, - credentials: dict = None, - ) -> int: - """ - Approximate num tokens with GPT2 tokenizer. - """ - - tokens_per_message = 3 - tokens_per_name = 1 - - num_tokens = 0 - messages_dict = [self._convert_prompt_message_to_dict(m, credentials) for m in messages] - for message in messages_dict: - num_tokens += tokens_per_message - for key, value in message.items(): - # Cast str(value) in case the message value is not a string - # This occurs with function messages - # TODO: The current token calculation method for the image type is not implemented, - # which need to download the image and then get the resolution for calculation, - # and will increase the request delay - if isinstance(value, list): - text = "" - for item in value: - if isinstance(item, dict) and item["type"] == "text": - text += item["text"] - - value = text - - if key == "tool_calls": - for tool_call in value: - for t_key, t_value in tool_call.items(): - num_tokens += self._get_num_tokens_by_gpt2(t_key) - if t_key == "function": - for f_key, f_value in t_value.items(): - num_tokens += self._get_num_tokens_by_gpt2(f_key) - num_tokens += self._get_num_tokens_by_gpt2(f_value) - else: - num_tokens += self._get_num_tokens_by_gpt2(t_key) - num_tokens += self._get_num_tokens_by_gpt2(t_value) - else: - num_tokens += self._get_num_tokens_by_gpt2(str(value)) - - if key == "name": - num_tokens += tokens_per_name - - # every reply is primed with assistant - num_tokens += 3 - - if tools: - num_tokens += self._num_tokens_for_tools(tools) - - return num_tokens - - def _num_tokens_for_tools(self, tools: list[PromptMessageTool]) -> int: - """ - Calculate num tokens for tool calling with tiktoken package. - - :param tools: tools for tool calling - :return: number of tokens - """ - num_tokens = 0 - for tool in tools: - num_tokens += self._get_num_tokens_by_gpt2("type") - num_tokens += self._get_num_tokens_by_gpt2("function") - num_tokens += self._get_num_tokens_by_gpt2("function") - - # calculate num tokens for function object - num_tokens += self._get_num_tokens_by_gpt2("name") - num_tokens += self._get_num_tokens_by_gpt2(tool.name) - num_tokens += self._get_num_tokens_by_gpt2("description") - num_tokens += self._get_num_tokens_by_gpt2(tool.description) - parameters = tool.parameters - num_tokens += self._get_num_tokens_by_gpt2("parameters") - if "title" in parameters: - num_tokens += self._get_num_tokens_by_gpt2("title") - num_tokens += self._get_num_tokens_by_gpt2(parameters.get("title")) - num_tokens += self._get_num_tokens_by_gpt2("type") - num_tokens += self._get_num_tokens_by_gpt2(parameters.get("type")) - if "properties" in parameters: - num_tokens += self._get_num_tokens_by_gpt2("properties") - for key, value in parameters.get("properties").items(): - num_tokens += self._get_num_tokens_by_gpt2(key) - for field_key, field_value in value.items(): - num_tokens += self._get_num_tokens_by_gpt2(field_key) - if field_key == "enum": - for enum_field in field_value: - num_tokens += 3 - num_tokens += self._get_num_tokens_by_gpt2(enum_field) - else: - num_tokens += self._get_num_tokens_by_gpt2(field_key) - num_tokens += self._get_num_tokens_by_gpt2(str(field_value)) - if "required" in parameters: - num_tokens += self._get_num_tokens_by_gpt2("required") - for required_field in parameters["required"]: - num_tokens += 3 - num_tokens += self._get_num_tokens_by_gpt2(required_field) - - return num_tokens - - def _extract_response_tool_calls(self, response_tool_calls: list[dict]) -> list[AssistantPromptMessage.ToolCall]: - """ - Extract tool calls from response - - :param response_tool_calls: response tool calls - :return: list of tool calls - """ - tool_calls = [] - if response_tool_calls: - for response_tool_call in response_tool_calls: - function = AssistantPromptMessage.ToolCall.ToolCallFunction( - name=response_tool_call.get("function", {}).get("name", ""), - arguments=response_tool_call.get("function", {}).get("arguments", ""), - ) - - tool_call = AssistantPromptMessage.ToolCall( - id=response_tool_call.get("id", ""), type=response_tool_call.get("type", ""), function=function - ) - tool_calls.append(tool_call) - - return tool_calls - - def _extract_response_function_call(self, response_function_call) -> AssistantPromptMessage.ToolCall: - """ - Extract function call from response - - :param response_function_call: response function call - :return: tool call - """ - tool_call = None - if response_function_call: - function = AssistantPromptMessage.ToolCall.ToolCallFunction( - name=response_function_call.get("name", ""), arguments=response_function_call.get("arguments", "") - ) - - tool_call = AssistantPromptMessage.ToolCall( - id=response_function_call.get("id", ""), type="function", function=function - ) - - return tool_call diff --git a/api/core/model_runtime/model_providers/openai_api_compatible/openai_api_compatible.py b/api/core/model_runtime/model_providers/openai_api_compatible/openai_api_compatible.py deleted file mode 100644 index ca6f1852872fed..00000000000000 --- a/api/core/model_runtime/model_providers/openai_api_compatible/openai_api_compatible.py +++ /dev/null @@ -1,10 +0,0 @@ -import logging - -from core.model_runtime.model_providers.__base.model_provider import ModelProvider - -logger = logging.getLogger(__name__) - - -class OAICompatProvider(ModelProvider): - def validate_provider_credentials(self, credentials: dict) -> None: - pass diff --git a/api/core/model_runtime/model_providers/openai_api_compatible/openai_api_compatible.yaml b/api/core/model_runtime/model_providers/openai_api_compatible/openai_api_compatible.yaml deleted file mode 100644 index 88c76fe16ef733..00000000000000 --- a/api/core/model_runtime/model_providers/openai_api_compatible/openai_api_compatible.yaml +++ /dev/null @@ -1,162 +0,0 @@ -provider: openai_api_compatible -label: - en_US: OpenAI-API-compatible -description: - en_US: Model providers compatible with OpenAI's API standard, such as LM Studio. - zh_Hans: 兼容 OpenAI API 的模型供应商,例如 LM Studio 。 -supported_model_types: - - llm - - text-embedding - - speech2text -configurate_methods: - - customizable-model -model_credential_schema: - model: - label: - en_US: Model Name - zh_Hans: 模型名称 - placeholder: - en_US: Enter full model name - zh_Hans: 输入模型全称 - credential_form_schemas: - - variable: api_key - label: - en_US: API Key - type: secret-input - required: false - placeholder: - zh_Hans: 在此输入您的 API Key - en_US: Enter your API Key - - variable: endpoint_url - label: - zh_Hans: API endpoint URL - en_US: API endpoint URL - type: text-input - required: true - placeholder: - zh_Hans: Base URL, e.g. https://api.openai.com/v1 - en_US: Base URL, e.g. https://api.openai.com/v1 - - variable: mode - show_on: - - variable: __model_type - value: llm - label: - en_US: Completion mode - type: select - required: false - default: chat - placeholder: - zh_Hans: 选择对话类型 - en_US: Select completion mode - options: - - value: completion - label: - en_US: Completion - zh_Hans: 补全 - - value: chat - label: - en_US: Chat - zh_Hans: 对话 - - variable: context_size - label: - zh_Hans: 模型上下文长度 - en_US: Model context size - required: true - show_on: - - variable: __model_type - value: llm - type: text-input - default: '4096' - placeholder: - zh_Hans: 在此输入您的模型上下文长度 - en_US: Enter your Model context size - - variable: context_size - label: - zh_Hans: 模型上下文长度 - en_US: Model context size - required: true - show_on: - - variable: __model_type - value: text-embedding - type: text-input - default: '4096' - placeholder: - zh_Hans: 在此输入您的模型上下文长度 - en_US: Enter your Model context size - - variable: max_tokens_to_sample - label: - zh_Hans: 最大 token 上限 - en_US: Upper bound for max tokens - show_on: - - variable: __model_type - value: llm - default: '4096' - type: text-input - - variable: function_calling_type - show_on: - - variable: __model_type - value: llm - label: - en_US: Function calling - type: select - required: false - default: no_call - options: - - value: function_call - label: - en_US: Function Call - zh_Hans: Function Call - - value: tool_call - label: - en_US: Tool Call - zh_Hans: Tool Call - - value: no_call - label: - en_US: Not Support - zh_Hans: 不支持 - - variable: stream_function_calling - show_on: - - variable: __model_type - value: llm - label: - en_US: Stream function calling - type: select - required: false - default: not_supported - options: - - value: supported - label: - en_US: Support - zh_Hans: 支持 - - value: not_supported - label: - en_US: Not Support - zh_Hans: 不支持 - - variable: vision_support - show_on: - - variable: __model_type - value: llm - label: - zh_Hans: Vision 支持 - en_US: Vision Support - type: select - required: false - default: no_support - options: - - value: support - label: - en_US: Support - zh_Hans: 支持 - - value: no_support - label: - en_US: Not Support - zh_Hans: 不支持 - - variable: stream_mode_delimiter - label: - zh_Hans: 流模式返回结果的分隔符 - en_US: Delimiter for streaming results - show_on: - - variable: __model_type - value: llm - default: '\n\n' - type: text-input diff --git a/api/core/model_runtime/model_providers/openai_api_compatible/speech2text/__init__.py b/api/core/model_runtime/model_providers/openai_api_compatible/speech2text/__init__.py deleted file mode 100644 index e69de29bb2d1d6..00000000000000 diff --git a/api/core/model_runtime/model_providers/openai_api_compatible/speech2text/speech2text.py b/api/core/model_runtime/model_providers/openai_api_compatible/speech2text/speech2text.py deleted file mode 100644 index 405096578cdd5d..00000000000000 --- a/api/core/model_runtime/model_providers/openai_api_compatible/speech2text/speech2text.py +++ /dev/null @@ -1,61 +0,0 @@ -from typing import IO, Optional -from urllib.parse import urljoin - -import requests - -from core.model_runtime.errors.invoke import InvokeBadRequestError -from core.model_runtime.errors.validate import CredentialsValidateFailedError -from core.model_runtime.model_providers.__base.speech2text_model import Speech2TextModel -from core.model_runtime.model_providers.openai_api_compatible._common import _CommonOaiApiCompat - - -class OAICompatSpeech2TextModel(_CommonOaiApiCompat, Speech2TextModel): - """ - Model class for OpenAI Compatible Speech to text model. - """ - - def _invoke(self, model: str, credentials: dict, file: IO[bytes], user: Optional[str] = None) -> str: - """ - Invoke speech2text model - - :param model: model name - :param credentials: model credentials - :param file: audio file - :param user: unique user id - :return: text for given audio file - """ - headers = {} - - api_key = credentials.get("api_key") - if api_key: - headers["Authorization"] = f"Bearer {api_key}" - - endpoint_url = credentials.get("endpoint_url") - if not endpoint_url.endswith("/"): - endpoint_url += "/" - endpoint_url = urljoin(endpoint_url, "audio/transcriptions") - - payload = {"model": model} - files = [("file", file)] - response = requests.post(endpoint_url, headers=headers, data=payload, files=files) - - if response.status_code != 200: - raise InvokeBadRequestError(response.text) - response_data = response.json() - return response_data["text"] - - def validate_credentials(self, model: str, credentials: dict) -> None: - """ - Validate model credentials - - :param model: model name - :param credentials: model credentials - :return: - """ - try: - audio_file_path = self._get_demo_file_path() - - with open(audio_file_path, "rb") as audio_file: - self._invoke(model, credentials, audio_file) - except Exception as ex: - raise CredentialsValidateFailedError(str(ex)) diff --git a/api/core/model_runtime/model_providers/openai_api_compatible/text_embedding/__init__.py b/api/core/model_runtime/model_providers/openai_api_compatible/text_embedding/__init__.py deleted file mode 100644 index e69de29bb2d1d6..00000000000000 diff --git a/api/core/model_runtime/model_providers/openllm/__init__.py b/api/core/model_runtime/model_providers/openllm/__init__.py deleted file mode 100644 index e69de29bb2d1d6..00000000000000 diff --git a/api/core/model_runtime/model_providers/openllm/_assets/icon_l_en.svg b/api/core/model_runtime/model_providers/openllm/_assets/icon_l_en.svg deleted file mode 100644 index 59bb57992ca511..00000000000000 --- a/api/core/model_runtime/model_providers/openllm/_assets/icon_l_en.svg +++ /dev/null @@ -1,19 +0,0 @@ - - - - - - - - - - - - - - - - - - - diff --git a/api/core/model_runtime/model_providers/openllm/_assets/icon_s_en.svg b/api/core/model_runtime/model_providers/openllm/_assets/icon_s_en.svg deleted file mode 100644 index d25d627020c26d..00000000000000 --- a/api/core/model_runtime/model_providers/openllm/_assets/icon_s_en.svg +++ /dev/null @@ -1,12 +0,0 @@ - - - - - - - - - - - - diff --git a/api/core/model_runtime/model_providers/openllm/llm/__init__.py b/api/core/model_runtime/model_providers/openllm/llm/__init__.py deleted file mode 100644 index e69de29bb2d1d6..00000000000000 diff --git a/api/core/model_runtime/model_providers/openllm/llm/llm.py b/api/core/model_runtime/model_providers/openllm/llm/llm.py deleted file mode 100644 index 34b4de796206dd..00000000000000 --- a/api/core/model_runtime/model_providers/openllm/llm/llm.py +++ /dev/null @@ -1,264 +0,0 @@ -from collections.abc import Generator - -from core.model_runtime.entities.common_entities import I18nObject -from core.model_runtime.entities.llm_entities import LLMMode, LLMResult, LLMResultChunk, LLMResultChunkDelta -from core.model_runtime.entities.message_entities import ( - AssistantPromptMessage, - PromptMessage, - PromptMessageTool, - UserPromptMessage, -) -from core.model_runtime.entities.model_entities import ( - AIModelEntity, - FetchFrom, - ModelPropertyKey, - ModelType, - ParameterRule, - ParameterType, -) -from core.model_runtime.errors.invoke import ( - InvokeAuthorizationError, - InvokeBadRequestError, - InvokeConnectionError, - InvokeError, - InvokeRateLimitError, - InvokeServerUnavailableError, -) -from core.model_runtime.errors.validate import CredentialsValidateFailedError -from core.model_runtime.model_providers.__base.large_language_model import LargeLanguageModel -from core.model_runtime.model_providers.openllm.llm.openllm_generate import OpenLLMGenerate, OpenLLMGenerateMessage -from core.model_runtime.model_providers.openllm.llm.openllm_generate_errors import ( - BadRequestError, - InsufficientAccountBalanceError, - InternalServerError, - InvalidAPIKeyError, - InvalidAuthenticationError, - RateLimitReachedError, -) - - -class OpenLLMLargeLanguageModel(LargeLanguageModel): - def _invoke( - self, - model: str, - credentials: dict, - prompt_messages: list[PromptMessage], - model_parameters: dict, - tools: list[PromptMessageTool] | None = None, - stop: list[str] | None = None, - stream: bool = True, - user: str | None = None, - ) -> LLMResult | Generator: - return self._generate(model, credentials, prompt_messages, model_parameters, tools, stop, stream, user) - - def validate_credentials(self, model: str, credentials: dict) -> None: - """ - Validate credentials for Baichuan model - """ - if not credentials.get("server_url"): - raise CredentialsValidateFailedError("Invalid server URL") - - # ping - instance = OpenLLMGenerate() - try: - instance.generate( - server_url=credentials["server_url"], - model_name=model, - prompt_messages=[OpenLLMGenerateMessage(content="ping\nAnswer: ", role="user")], - model_parameters={ - "max_tokens": 64, - "temperature": 0.8, - "top_p": 0.9, - "top_k": 15, - }, - stream=False, - user="", - stop=[], - ) - except InvalidAuthenticationError as e: - raise CredentialsValidateFailedError(f"Invalid API key: {e}") - - def get_num_tokens( - self, - model: str, - credentials: dict, - prompt_messages: list[PromptMessage], - tools: list[PromptMessageTool] | None = None, - ) -> int: - return self._num_tokens_from_messages(prompt_messages, tools) - - def _num_tokens_from_messages(self, messages: list[PromptMessage], tools: list[PromptMessageTool]) -> int: - """ - Calculate num tokens for OpenLLM model - it's a generate model, so we just join them by spe - """ - messages = ",".join([message.content for message in messages]) - return self._get_num_tokens_by_gpt2(messages) - - def _generate( - self, - model: str, - credentials: dict, - prompt_messages: list[PromptMessage], - model_parameters: dict, - tools: list[PromptMessageTool] | None = None, - stop: list[str] | None = None, - stream: bool = True, - user: str | None = None, - ) -> LLMResult | Generator: - client = OpenLLMGenerate() - response = client.generate( - model_name=model, - server_url=credentials["server_url"], - prompt_messages=[self._convert_prompt_message_to_openllm_message(message) for message in prompt_messages], - model_parameters=model_parameters, - stop=stop, - stream=stream, - user=user, - ) - - if stream: - return self._handle_chat_generate_stream_response( - model=model, prompt_messages=prompt_messages, credentials=credentials, response=response - ) - return self._handle_chat_generate_response( - model=model, prompt_messages=prompt_messages, credentials=credentials, response=response - ) - - def _convert_prompt_message_to_openllm_message(self, prompt_message: PromptMessage) -> OpenLLMGenerateMessage: - """ - convert PromptMessage to OpenLLMGenerateMessage so that we can use OpenLLMGenerateMessage interface - """ - if isinstance(prompt_message, UserPromptMessage): - return OpenLLMGenerateMessage(role=OpenLLMGenerateMessage.Role.USER.value, content=prompt_message.content) - elif isinstance(prompt_message, AssistantPromptMessage): - return OpenLLMGenerateMessage( - role=OpenLLMGenerateMessage.Role.ASSISTANT.value, content=prompt_message.content - ) - else: - raise NotImplementedError(f"Prompt message type {type(prompt_message)} is not supported") - - def _handle_chat_generate_response( - self, model: str, prompt_messages: list[PromptMessage], credentials: dict, response: OpenLLMGenerateMessage - ) -> LLMResult: - usage = self._calc_response_usage( - model=model, - credentials=credentials, - prompt_tokens=response.usage["prompt_tokens"], - completion_tokens=response.usage["completion_tokens"], - ) - return LLMResult( - model=model, - prompt_messages=prompt_messages, - message=AssistantPromptMessage( - content=response.content, - tool_calls=[], - ), - usage=usage, - ) - - def _handle_chat_generate_stream_response( - self, - model: str, - prompt_messages: list[PromptMessage], - credentials: dict, - response: Generator[OpenLLMGenerateMessage, None, None], - ) -> Generator[LLMResultChunk, None, None]: - for message in response: - if message.usage: - usage = self._calc_response_usage( - model=model, - credentials=credentials, - prompt_tokens=message.usage["prompt_tokens"], - completion_tokens=message.usage["completion_tokens"], - ) - yield LLMResultChunk( - model=model, - prompt_messages=prompt_messages, - delta=LLMResultChunkDelta( - index=0, - message=AssistantPromptMessage(content=message.content, tool_calls=[]), - usage=usage, - finish_reason=message.stop_reason or None, - ), - ) - else: - yield LLMResultChunk( - model=model, - prompt_messages=prompt_messages, - delta=LLMResultChunkDelta( - index=0, - message=AssistantPromptMessage(content=message.content, tool_calls=[]), - finish_reason=message.stop_reason or None, - ), - ) - - def get_customizable_model_schema(self, model: str, credentials: dict) -> AIModelEntity | None: - """ - used to define customizable model schema - """ - rules = [ - ParameterRule( - name="temperature", - type=ParameterType.FLOAT, - use_template="temperature", - label=I18nObject(zh_Hans="温度", en_US="Temperature"), - ), - ParameterRule( - name="top_p", - type=ParameterType.FLOAT, - use_template="top_p", - label=I18nObject(zh_Hans="Top P", en_US="Top P"), - ), - ParameterRule( - name="top_k", - type=ParameterType.INT, - use_template="top_k", - min=1, - default=1, - label=I18nObject(zh_Hans="Top K", en_US="Top K"), - ), - ParameterRule( - name="max_tokens", - type=ParameterType.INT, - use_template="max_tokens", - min=1, - default=512, - label=I18nObject(zh_Hans="最大生成长度", en_US="Max Tokens"), - ), - ] - - entity = AIModelEntity( - model=model, - label=I18nObject(en_US=model), - fetch_from=FetchFrom.CUSTOMIZABLE_MODEL, - model_type=ModelType.LLM, - model_properties={ - ModelPropertyKey.MODE: LLMMode.COMPLETION.value, - }, - parameter_rules=rules, - ) - - return entity - - @property - def _invoke_error_mapping(self) -> dict[type[InvokeError], list[type[Exception]]]: - """ - Map model invoke error to unified error - The key is the error type thrown to the caller - The value is the error type thrown by the model, - which needs to be converted into a unified error type for the caller. - - :return: Invoke error mapping - """ - return { - InvokeConnectionError: [], - InvokeServerUnavailableError: [InternalServerError], - InvokeRateLimitError: [RateLimitReachedError], - InvokeAuthorizationError: [ - InvalidAuthenticationError, - InsufficientAccountBalanceError, - InvalidAPIKeyError, - ], - InvokeBadRequestError: [BadRequestError, KeyError], - } diff --git a/api/core/model_runtime/model_providers/openllm/llm/openllm_generate.py b/api/core/model_runtime/model_providers/openllm/llm/openllm_generate.py deleted file mode 100644 index 351dcced153750..00000000000000 --- a/api/core/model_runtime/model_providers/openllm/llm/openllm_generate.py +++ /dev/null @@ -1,198 +0,0 @@ -from collections.abc import Generator -from enum import Enum -from json import dumps, loads -from typing import Any, Union - -from requests import Response, post -from requests.exceptions import ConnectionError, InvalidSchema, MissingSchema - -from core.model_runtime.model_providers.openllm.llm.openllm_generate_errors import ( - BadRequestError, - InternalServerError, - InvalidAuthenticationError, -) - - -class OpenLLMGenerateMessage: - class Role(Enum): - USER = "user" - ASSISTANT = "assistant" - - role: str = Role.USER.value - content: str - usage: dict[str, int] = None - stop_reason: str = "" - - def to_dict(self) -> dict[str, Any]: - return { - "role": self.role, - "content": self.content, - } - - def __init__(self, content: str, role: str = "user") -> None: - self.content = content - self.role = role - - -class OpenLLMGenerate: - def generate( - self, - server_url: str, - model_name: str, - stream: bool, - model_parameters: dict[str, Any], - stop: list[str], - prompt_messages: list[OpenLLMGenerateMessage], - user: str, - ) -> Union[Generator[OpenLLMGenerateMessage, None, None], OpenLLMGenerateMessage]: - if not server_url: - raise InvalidAuthenticationError("Invalid server URL") - - default_llm_config = { - "max_new_tokens": 128, - "min_length": 0, - "early_stopping": False, - "num_beams": 1, - "num_beam_groups": 1, - "use_cache": True, - "temperature": 0.75, - "top_k": 15, - "top_p": 0.9, - "typical_p": 1, - "epsilon_cutoff": 0, - "eta_cutoff": 0, - "diversity_penalty": 0, - "repetition_penalty": 1, - "encoder_repetition_penalty": 1, - "length_penalty": 1, - "no_repeat_ngram_size": 0, - "renormalize_logits": False, - "remove_invalid_values": False, - "num_return_sequences": 1, - "output_attentions": False, - "output_hidden_states": False, - "output_scores": False, - "encoder_no_repeat_ngram_size": 0, - "n": 1, - "presence_penalty": 0, - "frequency_penalty": 0, - "use_beam_search": False, - "ignore_eos": False, - "skip_special_tokens": True, - } - - if "max_tokens" in model_parameters and type(model_parameters["max_tokens"]) == int: - default_llm_config["max_new_tokens"] = model_parameters["max_tokens"] - - if "temperature" in model_parameters and type(model_parameters["temperature"]) == float: - default_llm_config["temperature"] = model_parameters["temperature"] - - if "top_p" in model_parameters and type(model_parameters["top_p"]) == float: - default_llm_config["top_p"] = model_parameters["top_p"] - - if "top_k" in model_parameters and type(model_parameters["top_k"]) == int: - default_llm_config["top_k"] = model_parameters["top_k"] - - if "use_cache" in model_parameters and type(model_parameters["use_cache"]) == bool: - default_llm_config["use_cache"] = model_parameters["use_cache"] - - headers = {"Content-Type": "application/json", "accept": "application/json"} - - if stream: - url = f"{server_url}/v1/generate_stream" - timeout = 10 - else: - url = f"{server_url}/v1/generate" - timeout = 120 - - data = { - "stop": stop or [], - "prompt": "\n".join([message.content for message in prompt_messages]), - "llm_config": default_llm_config, - } - - try: - response = post(url=url, data=dumps(data), timeout=timeout, stream=stream, headers=headers) - except (ConnectionError, InvalidSchema, MissingSchema) as e: - # cloud not connect to the server - raise InvalidAuthenticationError(f"Invalid server URL: {e}") - - if not response.ok: - resp = response.json() - msg = resp["msg"] - if response.status_code == 400: - raise BadRequestError(msg) - elif response.status_code == 404: - raise InvalidAuthenticationError(msg) - elif response.status_code == 500: - raise InternalServerError(msg) - else: - raise InternalServerError(msg) - - if stream: - return self._handle_chat_stream_generate_response(response) - return self._handle_chat_generate_response(response) - - def _handle_chat_generate_response(self, response: Response) -> OpenLLMGenerateMessage: - try: - data = response.json() - except Exception as e: - raise InternalServerError(f"Failed to convert response to json: {e} with text: {response.text}") - - message = data["outputs"][0] - text = message["text"] - token_ids = message["token_ids"] - prompt_token_ids = data["prompt_token_ids"] - stop_reason = message["finish_reason"] - - message = OpenLLMGenerateMessage(content=text, role=OpenLLMGenerateMessage.Role.ASSISTANT.value) - message.stop_reason = stop_reason - message.usage = { - "prompt_tokens": len(prompt_token_ids), - "completion_tokens": len(token_ids), - "total_tokens": len(prompt_token_ids) + len(token_ids), - } - - return message - - def _handle_chat_stream_generate_response( - self, response: Response - ) -> Generator[OpenLLMGenerateMessage, None, None]: - completion_usage = 0 - - for line in response.iter_lines(): - if not line: - continue - - line: str = line.decode("utf-8") - if line.startswith("data: "): - line = line[6:].strip() - - if line == "[DONE]": - return - - try: - data = loads(line) - except Exception as e: - raise InternalServerError(f"Failed to convert response to json: {e} with text: {line}") - - output = data["outputs"] - - for choice in output: - text = choice["text"] - token_ids = choice["token_ids"] - - completion_usage += len(token_ids) - message = OpenLLMGenerateMessage(content=text, role=OpenLLMGenerateMessage.Role.ASSISTANT.value) - - if choice.get("finish_reason"): - finish_reason = choice["finish_reason"] - prompt_token_ids = data["prompt_token_ids"] - message.stop_reason = finish_reason - message.usage = { - "prompt_tokens": len(prompt_token_ids), - "completion_tokens": completion_usage, - "total_tokens": completion_usage + len(prompt_token_ids), - } - - yield message diff --git a/api/core/model_runtime/model_providers/openllm/llm/openllm_generate_errors.py b/api/core/model_runtime/model_providers/openllm/llm/openllm_generate_errors.py deleted file mode 100644 index 309b5cf413bd54..00000000000000 --- a/api/core/model_runtime/model_providers/openllm/llm/openllm_generate_errors.py +++ /dev/null @@ -1,22 +0,0 @@ -class InvalidAuthenticationError(Exception): - pass - - -class InvalidAPIKeyError(Exception): - pass - - -class RateLimitReachedError(Exception): - pass - - -class InsufficientAccountBalanceError(Exception): - pass - - -class InternalServerError(Exception): - pass - - -class BadRequestError(Exception): - pass diff --git a/api/core/model_runtime/model_providers/openllm/openllm.py b/api/core/model_runtime/model_providers/openllm/openllm.py deleted file mode 100644 index 80148021446944..00000000000000 --- a/api/core/model_runtime/model_providers/openllm/openllm.py +++ /dev/null @@ -1,10 +0,0 @@ -import logging - -from core.model_runtime.model_providers.__base.model_provider import ModelProvider - -logger = logging.getLogger(__name__) - - -class OpenLLMProvider(ModelProvider): - def validate_provider_credentials(self, credentials: dict) -> None: - pass diff --git a/api/core/model_runtime/model_providers/openllm/openllm.yaml b/api/core/model_runtime/model_providers/openllm/openllm.yaml deleted file mode 100644 index fef52695e3eeed..00000000000000 --- a/api/core/model_runtime/model_providers/openllm/openllm.yaml +++ /dev/null @@ -1,37 +0,0 @@ -provider: openllm -label: - en_US: OpenLLM -icon_small: - en_US: icon_s_en.svg -icon_large: - en_US: icon_l_en.svg -background: "#F9FAFB" -help: - title: - en_US: How to deploy OpenLLM - zh_Hans: 如何部署 OpenLLM - url: - en_US: https://github.com/bentoml/OpenLLM -supported_model_types: - - llm - - text-embedding -configurate_methods: - - customizable-model -model_credential_schema: - model: - label: - en_US: Model Name - zh_Hans: 模型名称 - placeholder: - en_US: Enter your model name - zh_Hans: 输入模型名称 - credential_form_schemas: - - variable: server_url - label: - zh_Hans: 服务器URL - en_US: Server url - type: text-input - required: true - placeholder: - zh_Hans: 在此输入OpenLLM的服务器地址,如 http://192.168.1.100:3000 - en_US: Enter the url of your OpenLLM, e.g. http://192.168.1.100:3000 diff --git a/api/core/model_runtime/model_providers/openllm/text_embedding/__init__.py b/api/core/model_runtime/model_providers/openllm/text_embedding/__init__.py deleted file mode 100644 index e69de29bb2d1d6..00000000000000 diff --git a/api/core/model_runtime/model_providers/openrouter/__init__.py b/api/core/model_runtime/model_providers/openrouter/__init__.py deleted file mode 100644 index e69de29bb2d1d6..00000000000000 diff --git a/api/core/model_runtime/model_providers/openrouter/_assets/openrouter.svg b/api/core/model_runtime/model_providers/openrouter/_assets/openrouter.svg deleted file mode 100644 index 2e9590d923b8ef..00000000000000 --- a/api/core/model_runtime/model_providers/openrouter/_assets/openrouter.svg +++ /dev/null @@ -1,11 +0,0 @@ - - - - - - - - - - - diff --git a/api/core/model_runtime/model_providers/openrouter/_assets/openrouter_square.svg b/api/core/model_runtime/model_providers/openrouter/_assets/openrouter_square.svg deleted file mode 100644 index ed81fc041fc3e2..00000000000000 --- a/api/core/model_runtime/model_providers/openrouter/_assets/openrouter_square.svg +++ /dev/null @@ -1,10 +0,0 @@ - - - - - - - - - - diff --git a/api/core/model_runtime/model_providers/openrouter/llm/__init__.py b/api/core/model_runtime/model_providers/openrouter/llm/__init__.py deleted file mode 100644 index e69de29bb2d1d6..00000000000000 diff --git a/api/core/model_runtime/model_providers/openrouter/llm/_position.yaml b/api/core/model_runtime/model_providers/openrouter/llm/_position.yaml deleted file mode 100644 index d9497b76b894c4..00000000000000 --- a/api/core/model_runtime/model_providers/openrouter/llm/_position.yaml +++ /dev/null @@ -1,27 +0,0 @@ -- openai/o1-preview -- openai/o1-mini -- openai/gpt-4o -- openai/gpt-4o-mini -- openai/gpt-4 -- openai/gpt-4-32k -- openai/gpt-3.5-turbo -- anthropic/claude-3.5-sonnet -- anthropic/claude-3-haiku -- anthropic/claude-3-opus -- anthropic/claude-3-sonnet -- google/gemini-pro-1.5 -- google/gemini-flash-1.5 -- google/gemini-pro -- cohere/command-r-plus -- cohere/command-r -- meta-llama/llama-3.1-405b-instruct -- meta-llama/llama-3.1-70b-instruct -- meta-llama/llama-3.1-8b-instruct -- meta-llama/llama-3-70b-instruct -- meta-llama/llama-3-8b-instruct -- mistralai/mixtral-8x22b-instruct -- mistralai/mixtral-8x7b-instruct -- mistralai/mistral-7b-instruct -- qwen/qwen-2-72b-instruct -- deepseek/deepseek-chat -- deepseek/deepseek-coder diff --git a/api/core/model_runtime/model_providers/openrouter/llm/claude-3-5-sonnet.yaml b/api/core/model_runtime/model_providers/openrouter/llm/claude-3-5-sonnet.yaml deleted file mode 100644 index 40558854e2a7bd..00000000000000 --- a/api/core/model_runtime/model_providers/openrouter/llm/claude-3-5-sonnet.yaml +++ /dev/null @@ -1,39 +0,0 @@ -model: anthropic/claude-3.5-sonnet -label: - en_US: claude-3.5-sonnet -model_type: llm -features: - - agent-thought - - vision - - tool-call - - stream-tool-call -model_properties: - mode: chat - context_size: 200000 -parameter_rules: - - name: temperature - use_template: temperature - - name: top_p - use_template: top_p - - name: top_k - label: - zh_Hans: 取样数量 - en_US: Top k - type: int - help: - zh_Hans: 仅从每个后续标记的前 K 个选项中采样。 - en_US: Only sample from the top K options for each subsequent token. - required: false - - name: max_tokens - use_template: max_tokens - required: true - default: 4096 - min: 1 - max: 4096 - - name: response_format - use_template: response_format -pricing: - input: "3.00" - output: "15.00" - unit: "0.000001" - currency: USD diff --git a/api/core/model_runtime/model_providers/openrouter/llm/claude-3-haiku.yaml b/api/core/model_runtime/model_providers/openrouter/llm/claude-3-haiku.yaml deleted file mode 100644 index ce17d4123e5c13..00000000000000 --- a/api/core/model_runtime/model_providers/openrouter/llm/claude-3-haiku.yaml +++ /dev/null @@ -1,39 +0,0 @@ -model: anthropic/claude-3-haiku -label: - en_US: claude-3-haiku -model_type: llm -features: - - agent-thought - - vision - - tool-call - - stream-tool-call -model_properties: - mode: chat - context_size: 200000 -parameter_rules: - - name: temperature - use_template: temperature - - name: top_p - use_template: top_p - - name: top_k - label: - zh_Hans: 取样数量 - en_US: Top k - type: int - help: - zh_Hans: 仅从每个后续标记的前 K 个选项中采样。 - en_US: Only sample from the top K options for each subsequent token. - required: false - - name: max_tokens - use_template: max_tokens - required: true - default: 4096 - min: 1 - max: 4096 - - name: response_format - use_template: response_format -pricing: - input: "0.25" - output: "1.25" - unit: "0.000001" - currency: USD diff --git a/api/core/model_runtime/model_providers/openrouter/llm/claude-3-opus.yaml b/api/core/model_runtime/model_providers/openrouter/llm/claude-3-opus.yaml deleted file mode 100644 index 68a92219eb0e52..00000000000000 --- a/api/core/model_runtime/model_providers/openrouter/llm/claude-3-opus.yaml +++ /dev/null @@ -1,39 +0,0 @@ -model: anthropic/claude-3-opus -label: - en_US: claude-3-opus -model_type: llm -features: - - agent-thought - - vision - - tool-call - - stream-tool-call -model_properties: - mode: chat - context_size: 200000 -parameter_rules: - - name: temperature - use_template: temperature - - name: top_p - use_template: top_p - - name: top_k - label: - zh_Hans: 取样数量 - en_US: Top k - type: int - help: - zh_Hans: 仅从每个后续标记的前 K 个选项中采样。 - en_US: Only sample from the top K options for each subsequent token. - required: false - - name: max_tokens - use_template: max_tokens - required: true - default: 4096 - min: 1 - max: 4096 - - name: response_format - use_template: response_format -pricing: - input: "15.00" - output: "75.00" - unit: "0.000001" - currency: USD diff --git a/api/core/model_runtime/model_providers/openrouter/llm/claude-3-sonnet.yaml b/api/core/model_runtime/model_providers/openrouter/llm/claude-3-sonnet.yaml deleted file mode 100644 index ede88459ca0ccc..00000000000000 --- a/api/core/model_runtime/model_providers/openrouter/llm/claude-3-sonnet.yaml +++ /dev/null @@ -1,39 +0,0 @@ -model: anthropic/claude-3-sonnet -label: - en_US: claude-3-sonnet -model_type: llm -features: - - agent-thought - - vision - - tool-call - - stream-tool-call -model_properties: - mode: chat - context_size: 200000 -parameter_rules: - - name: temperature - use_template: temperature - - name: top_p - use_template: top_p - - name: top_k - label: - zh_Hans: 取样数量 - en_US: Top k - type: int - help: - zh_Hans: 仅从每个后续标记的前 K 个选项中采样。 - en_US: Only sample from the top K options for each subsequent token. - required: false - - name: max_tokens - use_template: max_tokens - required: true - default: 4096 - min: 1 - max: 4096 - - name: response_format - use_template: response_format -pricing: - input: "3.00" - output: "15.00" - unit: "0.000001" - currency: USD diff --git a/api/core/model_runtime/model_providers/openrouter/llm/command-r-plus.yaml b/api/core/model_runtime/model_providers/openrouter/llm/command-r-plus.yaml deleted file mode 100644 index a23eb269d1084b..00000000000000 --- a/api/core/model_runtime/model_providers/openrouter/llm/command-r-plus.yaml +++ /dev/null @@ -1,45 +0,0 @@ -model: cohere/command-r-plus -label: - en_US: command-r-plus -model_type: llm -features: - - multi-tool-call - - agent-thought - - stream-tool-call -model_properties: - mode: chat - context_size: 128000 -parameter_rules: - - name: temperature - use_template: temperature - max: 5.0 - - name: top_p - use_template: top_p - default: 0.75 - min: 0.01 - max: 0.99 - - name: top_k - label: - zh_Hans: 取样数量 - en_US: Top k - type: int - help: - zh_Hans: 仅从每个后续标记的前 K 个选项中采样。 - en_US: Only sample from the top K options for each subsequent token. - required: false - default: 0 - min: 0 - max: 500 - - name: presence_penalty - use_template: presence_penalty - - name: frequency_penalty - use_template: frequency_penalty - - name: max_tokens - use_template: max_tokens - default: 1024 - max: 4096 -pricing: - input: "3" - output: "15" - unit: "0.000001" - currency: USD diff --git a/api/core/model_runtime/model_providers/openrouter/llm/command-r.yaml b/api/core/model_runtime/model_providers/openrouter/llm/command-r.yaml deleted file mode 100644 index 7165bf29b0c22c..00000000000000 --- a/api/core/model_runtime/model_providers/openrouter/llm/command-r.yaml +++ /dev/null @@ -1,45 +0,0 @@ -model: cohere/command-r -label: - en_US: command-r -model_type: llm -features: - - multi-tool-call - - agent-thought - - stream-tool-call -model_properties: - mode: chat - context_size: 128000 -parameter_rules: - - name: temperature - use_template: temperature - max: 5.0 - - name: top_p - use_template: top_p - default: 0.75 - min: 0.01 - max: 0.99 - - name: top_k - label: - zh_Hans: 取样数量 - en_US: Top k - type: int - help: - zh_Hans: 仅从每个后续标记的前 K 个选项中采样。 - en_US: Only sample from the top K options for each subsequent token. - required: false - default: 0 - min: 0 - max: 500 - - name: presence_penalty - use_template: presence_penalty - - name: frequency_penalty - use_template: frequency_penalty - - name: max_tokens - use_template: max_tokens - default: 1024 - max: 4096 -pricing: - input: "0.5" - output: "1.5" - unit: "0.000001" - currency: USD diff --git a/api/core/model_runtime/model_providers/openrouter/llm/deepseek-chat.yaml b/api/core/model_runtime/model_providers/openrouter/llm/deepseek-chat.yaml deleted file mode 100644 index 7a1dea69503daa..00000000000000 --- a/api/core/model_runtime/model_providers/openrouter/llm/deepseek-chat.yaml +++ /dev/null @@ -1,50 +0,0 @@ -model: deepseek/deepseek-chat -label: - en_US: deepseek-chat -model_type: llm -features: - - agent-thought -model_properties: - mode: chat - context_size: 32000 -parameter_rules: - - name: temperature - use_template: temperature - type: float - default: 1 - min: 0.0 - max: 2.0 - help: - zh_Hans: 控制生成结果的多样性和随机性。数值越小,越严谨;数值越大,越发散。 - en_US: Control the diversity and randomness of generated results. The smaller the value, the more rigorous it is; the larger the value, the more divergent it is. - - name: max_tokens - use_template: max_tokens - type: int - default: 4096 - min: 1 - max: 4096 - help: - zh_Hans: 指定生成结果长度的上限。如果生成结果截断,可以调大该参数。 - en_US: Specifies the upper limit on the length of generated results. If the generated results are truncated, you can increase this parameter. - - name: top_p - use_template: top_p - type: float - default: 1 - min: 0.01 - max: 1.00 - help: - zh_Hans: 控制生成结果的随机性。数值越小,随机性越弱;数值越大,随机性越强。一般而言,top_p 和 temperature 两个参数选择一个进行调整即可。 - en_US: Control the randomness of generated results. The smaller the value, the weaker the randomness; the larger the value, the stronger the randomness. Generally speaking, you can adjust one of the two parameters top_p and temperature. - - name: frequency_penalty - use_template: frequency_penalty - default: 0 - min: -2.0 - max: 2.0 - help: - zh_Hans: 介于 -2.0 和 2.0 之间的数字。如果该值为正,那么新 token 会根据其在已有文本中的出现频率受到相应的惩罚,降低模型重复相同内容的可能性。 - en_US: A number between -2.0 and 2.0. If the value is positive, new tokens are penalized based on their frequency of occurrence in existing text, reducing the likelihood that the model will repeat the same content. -pricing: - input: "0.14" - output: "0.28" - unit: "0.000001" - currency: USD diff --git a/api/core/model_runtime/model_providers/openrouter/llm/deepseek-coder.yaml b/api/core/model_runtime/model_providers/openrouter/llm/deepseek-coder.yaml deleted file mode 100644 index c05f4769b83354..00000000000000 --- a/api/core/model_runtime/model_providers/openrouter/llm/deepseek-coder.yaml +++ /dev/null @@ -1,30 +0,0 @@ -model: deepseek/deepseek-coder -label: - en_US: deepseek-coder -model_type: llm -features: - - agent-thought -model_properties: - mode: chat - context_size: 32000 -parameter_rules: - - name: temperature - use_template: temperature - min: 0 - max: 1 - default: 0.5 - - name: top_p - use_template: top_p - min: 0 - max: 1 - default: 1 - - name: max_tokens - use_template: max_tokens - min: 1 - max: 4096 - default: 1024 -pricing: - input: "0.14" - output: "0.28" - unit: "0.000001" - currency: USD diff --git a/api/core/model_runtime/model_providers/openrouter/llm/gemini-1.5-flash.yaml b/api/core/model_runtime/model_providers/openrouter/llm/gemini-1.5-flash.yaml deleted file mode 100644 index 0b2f329b28ea49..00000000000000 --- a/api/core/model_runtime/model_providers/openrouter/llm/gemini-1.5-flash.yaml +++ /dev/null @@ -1,39 +0,0 @@ -model: google/gemini-flash-1.5 -label: - en_US: gemini-flash-1.5 -model_type: llm -features: - - agent-thought - - vision - - tool-call - - stream-tool-call -model_properties: - mode: chat - context_size: 1048576 -parameter_rules: - - name: temperature - use_template: temperature - - name: top_p - use_template: top_p - - name: top_k - label: - zh_Hans: 取样数量 - en_US: Top k - type: int - help: - zh_Hans: 仅从每个后续标记的前 K 个选项中采样。 - en_US: Only sample from the top K options for each subsequent token. - required: false - - name: max_tokens - use_template: max_tokens - required: true - default: 8192 - min: 1 - max: 8192 - - name: response_format - use_template: response_format -pricing: - input: "0.25" - output: "0.75" - unit: "0.000001" - currency: USD diff --git a/api/core/model_runtime/model_providers/openrouter/llm/gemini-1.5-pro.yaml b/api/core/model_runtime/model_providers/openrouter/llm/gemini-1.5-pro.yaml deleted file mode 100644 index 679ce9bdcd443f..00000000000000 --- a/api/core/model_runtime/model_providers/openrouter/llm/gemini-1.5-pro.yaml +++ /dev/null @@ -1,39 +0,0 @@ -model: google/gemini-pro-1.5 -label: - en_US: gemini-pro-1.5 -model_type: llm -features: - - agent-thought - - vision - - tool-call - - stream-tool-call -model_properties: - mode: chat - context_size: 1048576 -parameter_rules: - - name: temperature - use_template: temperature - - name: top_p - use_template: top_p - - name: top_k - label: - zh_Hans: 取样数量 - en_US: Top k - type: int - help: - zh_Hans: 仅从每个后续标记的前 K 个选项中采样。 - en_US: Only sample from the top K options for each subsequent token. - required: false - - name: max_tokens - use_template: max_tokens - required: true - default: 8192 - min: 1 - max: 8192 - - name: response_format - use_template: response_format -pricing: - input: "2.5" - output: "7.5" - unit: "0.000001" - currency: USD diff --git a/api/core/model_runtime/model_providers/openrouter/llm/gemini-pro.yaml b/api/core/model_runtime/model_providers/openrouter/llm/gemini-pro.yaml deleted file mode 100644 index 9f5d96c5b82e64..00000000000000 --- a/api/core/model_runtime/model_providers/openrouter/llm/gemini-pro.yaml +++ /dev/null @@ -1,38 +0,0 @@ -model: google/gemini-pro -label: - en_US: gemini-pro -model_type: llm -features: - - agent-thought - - tool-call - - stream-tool-call -model_properties: - mode: chat - context_size: 30720 -parameter_rules: - - name: temperature - use_template: temperature - - name: top_p - use_template: top_p - - name: top_k - label: - zh_Hans: 取样数量 - en_US: Top k - type: int - help: - zh_Hans: 仅从每个后续标记的前 K 个选项中采样。 - en_US: Only sample from the top K options for each subsequent token. - required: false - - name: max_tokens - use_template: max_tokens - required: true - default: 2048 - min: 1 - max: 2048 - - name: response_format - use_template: response_format -pricing: - input: "0.125" - output: "0.375" - unit: "0.000001" - currency: USD diff --git a/api/core/model_runtime/model_providers/openrouter/llm/gpt-3.5-turbo.yaml b/api/core/model_runtime/model_providers/openrouter/llm/gpt-3.5-turbo.yaml deleted file mode 100644 index 186c1cc6636049..00000000000000 --- a/api/core/model_runtime/model_providers/openrouter/llm/gpt-3.5-turbo.yaml +++ /dev/null @@ -1,42 +0,0 @@ -model: openai/gpt-3.5-turbo -label: - en_US: gpt-3.5-turbo -model_type: llm -features: - - multi-tool-call - - agent-thought - - stream-tool-call -model_properties: - mode: chat - context_size: 16385 -parameter_rules: - - name: temperature - use_template: temperature - - name: top_p - use_template: top_p - - name: presence_penalty - use_template: presence_penalty - - name: frequency_penalty - use_template: frequency_penalty - - name: max_tokens - use_template: max_tokens - default: 512 - min: 1 - max: 4096 - - name: response_format - label: - zh_Hans: 回复格式 - en_US: Response Format - type: string - help: - zh_Hans: 指定模型必须输出的格式 - en_US: specifying the format that the model must output - required: false - options: - - text - - json_object -pricing: - input: "0.5" - output: "1.5" - unit: "0.000001" - currency: USD diff --git a/api/core/model_runtime/model_providers/openrouter/llm/gpt-4-32k.yaml b/api/core/model_runtime/model_providers/openrouter/llm/gpt-4-32k.yaml deleted file mode 100644 index 8c2989b300076d..00000000000000 --- a/api/core/model_runtime/model_providers/openrouter/llm/gpt-4-32k.yaml +++ /dev/null @@ -1,57 +0,0 @@ -model: openai/gpt-4-32k -label: - en_US: gpt-4-32k -model_type: llm -features: - - multi-tool-call - - agent-thought - - stream-tool-call -model_properties: - mode: chat - context_size: 32768 -parameter_rules: - - name: temperature - use_template: temperature - - name: top_p - use_template: top_p - - name: presence_penalty - use_template: presence_penalty - - name: frequency_penalty - use_template: frequency_penalty - - name: max_tokens - use_template: max_tokens - default: 512 - min: 1 - max: 32768 - - name: seed - label: - zh_Hans: 种子 - en_US: Seed - type: int - help: - zh_Hans: - 如果指定,模型将尽最大努力进行确定性采样,使得重复的具有相同种子和参数的请求应该返回相同的结果。不能保证确定性,您应该参考 system_fingerprint - 响应参数来监视变化。 - en_US: - If specified, model will make a best effort to sample deterministically, - such that repeated requests with the same seed and parameters should return - the same result. Determinism is not guaranteed, and you should refer to the - system_fingerprint response parameter to monitor changes in the backend. - required: false - - name: response_format - label: - zh_Hans: 回复格式 - en_US: Response Format - type: string - help: - zh_Hans: 指定模型必须输出的格式 - en_US: specifying the format that the model must output - required: false - options: - - text - - json_object -pricing: - input: "60" - output: "120" - unit: "0.000001" - currency: USD diff --git a/api/core/model_runtime/model_providers/openrouter/llm/gpt-4.yaml b/api/core/model_runtime/model_providers/openrouter/llm/gpt-4.yaml deleted file mode 100644 index ef19d4f6f0dba4..00000000000000 --- a/api/core/model_runtime/model_providers/openrouter/llm/gpt-4.yaml +++ /dev/null @@ -1,57 +0,0 @@ -model: openai/gpt-4 -label: - en_US: gpt-4 -model_type: llm -features: - - multi-tool-call - - agent-thought - - stream-tool-call -model_properties: - mode: chat - context_size: 8192 -parameter_rules: - - name: temperature - use_template: temperature - - name: top_p - use_template: top_p - - name: presence_penalty - use_template: presence_penalty - - name: frequency_penalty - use_template: frequency_penalty - - name: max_tokens - use_template: max_tokens - default: 512 - min: 1 - max: 8192 - - name: seed - label: - zh_Hans: 种子 - en_US: Seed - type: int - help: - zh_Hans: - 如果指定,模型将尽最大努力进行确定性采样,使得重复的具有相同种子和参数的请求应该返回相同的结果。不能保证确定性,您应该参考 system_fingerprint - 响应参数来监视变化。 - en_US: - If specified, model will make a best effort to sample deterministically, - such that repeated requests with the same seed and parameters should return - the same result. Determinism is not guaranteed, and you should refer to the - system_fingerprint response parameter to monitor changes in the backend. - required: false - - name: response_format - label: - zh_Hans: 回复格式 - en_US: Response Format - type: string - help: - zh_Hans: 指定模型必须输出的格式 - en_US: specifying the format that the model must output - required: false - options: - - text - - json_object -pricing: - input: "30" - output: "60" - unit: "0.000001" - currency: USD diff --git a/api/core/model_runtime/model_providers/openrouter/llm/gpt-4o-2024-08-06.yaml b/api/core/model_runtime/model_providers/openrouter/llm/gpt-4o-2024-08-06.yaml deleted file mode 100644 index 0be325f55bdc90..00000000000000 --- a/api/core/model_runtime/model_providers/openrouter/llm/gpt-4o-2024-08-06.yaml +++ /dev/null @@ -1,44 +0,0 @@ -model: gpt-4o-2024-08-06 -label: - zh_Hans: gpt-4o-2024-08-06 - en_US: gpt-4o-2024-08-06 -model_type: llm -features: - - multi-tool-call - - agent-thought - - stream-tool-call - - vision -model_properties: - mode: chat - context_size: 128000 -parameter_rules: - - name: temperature - use_template: temperature - - name: top_p - use_template: top_p - - name: presence_penalty - use_template: presence_penalty - - name: frequency_penalty - use_template: frequency_penalty - - name: max_tokens - use_template: max_tokens - default: 512 - min: 1 - max: 16384 - - name: response_format - label: - zh_Hans: 回复格式 - en_US: Response Format - type: string - help: - zh_Hans: 指定模型必须输出的格式 - en_US: specifying the format that the model must output - required: false - options: - - text - - json_object -pricing: - input: '2.50' - output: '10.00' - unit: '0.000001' - currency: USD diff --git a/api/core/model_runtime/model_providers/openrouter/llm/gpt-4o-mini.yaml b/api/core/model_runtime/model_providers/openrouter/llm/gpt-4o-mini.yaml deleted file mode 100644 index 3b1d95643d22aa..00000000000000 --- a/api/core/model_runtime/model_providers/openrouter/llm/gpt-4o-mini.yaml +++ /dev/null @@ -1,43 +0,0 @@ -model: openai/gpt-4o-mini -label: - en_US: gpt-4o-mini -model_type: llm -features: - - multi-tool-call - - agent-thought - - stream-tool-call - - vision -model_properties: - mode: chat - context_size: 128000 -parameter_rules: - - name: temperature - use_template: temperature - - name: top_p - use_template: top_p - - name: presence_penalty - use_template: presence_penalty - - name: frequency_penalty - use_template: frequency_penalty - - name: max_tokens - use_template: max_tokens - default: 512 - min: 1 - max: 16384 - - name: response_format - label: - zh_Hans: 回复格式 - en_US: Response Format - type: string - help: - zh_Hans: 指定模型必须输出的格式 - en_US: specifying the format that the model must output - required: false - options: - - text - - json_object -pricing: - input: "0.15" - output: "0.60" - unit: "0.000001" - currency: USD diff --git a/api/core/model_runtime/model_providers/openrouter/llm/gpt-4o.yaml b/api/core/model_runtime/model_providers/openrouter/llm/gpt-4o.yaml deleted file mode 100644 index a8c97efdd64b12..00000000000000 --- a/api/core/model_runtime/model_providers/openrouter/llm/gpt-4o.yaml +++ /dev/null @@ -1,43 +0,0 @@ -model: openai/gpt-4o -label: - en_US: gpt-4o -model_type: llm -features: - - multi-tool-call - - agent-thought - - stream-tool-call - - vision -model_properties: - mode: chat - context_size: 128000 -parameter_rules: - - name: temperature - use_template: temperature - - name: top_p - use_template: top_p - - name: presence_penalty - use_template: presence_penalty - - name: frequency_penalty - use_template: frequency_penalty - - name: max_tokens - use_template: max_tokens - default: 512 - min: 1 - max: 4096 - - name: response_format - label: - zh_Hans: 回复格式 - en_US: Response Format - type: string - help: - zh_Hans: 指定模型必须输出的格式 - en_US: specifying the format that the model must output - required: false - options: - - text - - json_object -pricing: - input: "5.00" - output: "15.00" - unit: "0.000001" - currency: USD diff --git a/api/core/model_runtime/model_providers/openrouter/llm/llama-3-70b-instruct.yaml b/api/core/model_runtime/model_providers/openrouter/llm/llama-3-70b-instruct.yaml deleted file mode 100644 index b91c39e729eda3..00000000000000 --- a/api/core/model_runtime/model_providers/openrouter/llm/llama-3-70b-instruct.yaml +++ /dev/null @@ -1,23 +0,0 @@ -model: meta-llama/llama-3-70b-instruct -label: - en_US: llama-3-70b-instruct -model_type: llm -model_properties: - mode: completion - context_size: 8192 -parameter_rules: - - name: temperature - use_template: temperature - - name: top_p - use_template: top_p - - name: max_tokens - use_template: max_tokens - required: true - default: 512 - min: 1 - max: 2048 -pricing: - input: "0.59" - output: "0.79" - unit: "0.000001" - currency: USD diff --git a/api/core/model_runtime/model_providers/openrouter/llm/llama-3-8b-instruct.yaml b/api/core/model_runtime/model_providers/openrouter/llm/llama-3-8b-instruct.yaml deleted file mode 100644 index 84b2c7fac2c0f0..00000000000000 --- a/api/core/model_runtime/model_providers/openrouter/llm/llama-3-8b-instruct.yaml +++ /dev/null @@ -1,23 +0,0 @@ -model: meta-llama/llama-3-8b-instruct -label: - en_US: llama-3-8b-instruct -model_type: llm -model_properties: - mode: completion - context_size: 8192 -parameter_rules: - - name: temperature - use_template: temperature - - name: top_p - use_template: top_p - - name: max_tokens - use_template: max_tokens - required: true - default: 512 - min: 1 - max: 2048 -pricing: - input: "0.07" - output: "0.07" - unit: "0.000001" - currency: USD diff --git a/api/core/model_runtime/model_providers/openrouter/llm/llama-3.1-405b-instruct.yaml b/api/core/model_runtime/model_providers/openrouter/llm/llama-3.1-405b-instruct.yaml deleted file mode 100644 index a489ce1b5ad384..00000000000000 --- a/api/core/model_runtime/model_providers/openrouter/llm/llama-3.1-405b-instruct.yaml +++ /dev/null @@ -1,23 +0,0 @@ -model: meta-llama/llama-3.1-405b-instruct -label: - en_US: llama-3.1-405b-instruct -model_type: llm -model_properties: - mode: chat - context_size: 131072 -parameter_rules: - - name: temperature - use_template: temperature - - name: top_p - use_template: top_p - - name: max_tokens - use_template: max_tokens - required: true - default: 512 - min: 1 - max: 131072 -pricing: - input: "2.7" - output: "2.7" - unit: "0.000001" - currency: USD diff --git a/api/core/model_runtime/model_providers/openrouter/llm/llama-3.1-70b-instruct.yaml b/api/core/model_runtime/model_providers/openrouter/llm/llama-3.1-70b-instruct.yaml deleted file mode 100644 index 12037411b100bd..00000000000000 --- a/api/core/model_runtime/model_providers/openrouter/llm/llama-3.1-70b-instruct.yaml +++ /dev/null @@ -1,23 +0,0 @@ -model: meta-llama/llama-3.1-70b-instruct -label: - en_US: llama-3.1-70b-instruct -model_type: llm -model_properties: - mode: chat - context_size: 131072 -parameter_rules: - - name: temperature - use_template: temperature - - name: top_p - use_template: top_p - - name: max_tokens - use_template: max_tokens - required: true - default: 512 - min: 1 - max: 131072 -pricing: - input: "0.52" - output: "0.75" - unit: "0.000001" - currency: USD diff --git a/api/core/model_runtime/model_providers/openrouter/llm/llama-3.1-8b-instruct.yaml b/api/core/model_runtime/model_providers/openrouter/llm/llama-3.1-8b-instruct.yaml deleted file mode 100644 index 6f06493f293f9d..00000000000000 --- a/api/core/model_runtime/model_providers/openrouter/llm/llama-3.1-8b-instruct.yaml +++ /dev/null @@ -1,23 +0,0 @@ -model: meta-llama/llama-3.1-8b-instruct -label: - en_US: llama-3.1-8b-instruct -model_type: llm -model_properties: - mode: chat - context_size: 131072 -parameter_rules: - - name: temperature - use_template: temperature - - name: top_p - use_template: top_p - - name: max_tokens - use_template: max_tokens - required: true - default: 512 - min: 1 - max: 131072 -pricing: - input: "0.06" - output: "0.06" - unit: "0.000001" - currency: USD diff --git a/api/core/model_runtime/model_providers/openrouter/llm/llm.py b/api/core/model_runtime/model_providers/openrouter/llm/llm.py deleted file mode 100644 index 736ab8e7a8f6ce..00000000000000 --- a/api/core/model_runtime/model_providers/openrouter/llm/llm.py +++ /dev/null @@ -1,106 +0,0 @@ -from collections.abc import Generator -from typing import Optional, Union - -from core.model_runtime.entities.llm_entities import LLMResult, LLMResultChunk, LLMResultChunkDelta -from core.model_runtime.entities.message_entities import PromptMessage, PromptMessageTool -from core.model_runtime.entities.model_entities import AIModelEntity -from core.model_runtime.model_providers.openai_api_compatible.llm.llm import OAIAPICompatLargeLanguageModel - - -class OpenRouterLargeLanguageModel(OAIAPICompatLargeLanguageModel): - def _update_credential(self, model: str, credentials: dict): - credentials["endpoint_url"] = "https://openrouter.ai/api/v1" - credentials["mode"] = self.get_model_mode(model).value - credentials["function_calling_type"] = "tool_call" - - def _invoke( - self, - model: str, - credentials: dict, - prompt_messages: list[PromptMessage], - model_parameters: dict, - tools: Optional[list[PromptMessageTool]] = None, - stop: Optional[list[str]] = None, - stream: bool = True, - user: Optional[str] = None, - ) -> Union[LLMResult, Generator]: - self._update_credential(model, credentials) - - return self._generate(model, credentials, prompt_messages, model_parameters, tools, stop, stream, user) - - def validate_credentials(self, model: str, credentials: dict) -> None: - self._update_credential(model, credentials) - - return super().validate_credentials(model, credentials) - - def _generate( - self, - model: str, - credentials: dict, - prompt_messages: list[PromptMessage], - model_parameters: dict, - tools: Optional[list[PromptMessageTool]] = None, - stop: Optional[list[str]] = None, - stream: bool = True, - user: Optional[str] = None, - ) -> Union[LLMResult, Generator]: - self._update_credential(model, credentials) - - block_as_stream = False - if model.startswith("openai/o1"): - block_as_stream = True - stop = None - - # invoke block as stream - if stream and block_as_stream: - return self._generate_block_as_stream( - model, credentials, prompt_messages, model_parameters, tools, stop, user - ) - else: - return super()._generate(model, credentials, prompt_messages, model_parameters, tools, stop, stream, user) - - def _generate_block_as_stream( - self, - model: str, - credentials: dict, - prompt_messages: list[PromptMessage], - model_parameters: dict, - tools: Optional[list[PromptMessageTool]] = None, - stop: Optional[list[str]] = None, - user: Optional[str] = None, - ) -> Generator: - resp: LLMResult = super()._generate( - model, credentials, prompt_messages, model_parameters, tools, stop, False, user - ) - - yield LLMResultChunk( - model=model, - prompt_messages=prompt_messages, - delta=LLMResultChunkDelta( - index=0, - message=resp.message, - usage=self._calc_response_usage( - model=model, - credentials=credentials, - prompt_tokens=resp.usage.prompt_tokens, - completion_tokens=resp.usage.completion_tokens, - ), - finish_reason="stop", - ), - ) - - def get_customizable_model_schema(self, model: str, credentials: dict) -> AIModelEntity: - self._update_credential(model, credentials) - - return super().get_customizable_model_schema(model, credentials) - - def get_num_tokens( - self, - model: str, - credentials: dict, - prompt_messages: list[PromptMessage], - tools: Optional[list[PromptMessageTool]] = None, - ) -> int: - self._update_credential(model, credentials) - - return super().get_num_tokens(model, credentials, prompt_messages, tools) diff --git a/api/core/model_runtime/model_providers/openrouter/llm/mistral-7b-instruct.yaml b/api/core/model_runtime/model_providers/openrouter/llm/mistral-7b-instruct.yaml deleted file mode 100644 index 012dfc55ce18a8..00000000000000 --- a/api/core/model_runtime/model_providers/openrouter/llm/mistral-7b-instruct.yaml +++ /dev/null @@ -1,30 +0,0 @@ -model: mistralai/mistral-7b-instruct -label: - en_US: mistral-7b-instruct -model_type: llm -features: - - agent-thought -model_properties: - mode: completion - context_size: 8000 -parameter_rules: - - name: temperature - use_template: temperature - default: 0.7 - min: 0 - max: 1 - - name: top_p - use_template: top_p - default: 1 - min: 0 - max: 1 - - name: max_tokens - use_template: max_tokens - default: 1024 - min: 1 - max: 2048 -pricing: - input: "0.07" - output: "0.07" - unit: "0.000001" - currency: USD diff --git a/api/core/model_runtime/model_providers/openrouter/llm/mixtral-8x22b-instruct.yaml b/api/core/model_runtime/model_providers/openrouter/llm/mixtral-8x22b-instruct.yaml deleted file mode 100644 index f4eb4e45d95cb2..00000000000000 --- a/api/core/model_runtime/model_providers/openrouter/llm/mixtral-8x22b-instruct.yaml +++ /dev/null @@ -1,30 +0,0 @@ -model: mistralai/mixtral-8x22b-instruct -label: - en_US: mixtral-8x22b-instruct -model_type: llm -features: - - agent-thought -model_properties: - mode: completion - context_size: 64000 -parameter_rules: - - name: temperature - use_template: temperature - default: 0.7 - min: 0 - max: 1 - - name: top_p - use_template: top_p - default: 1 - min: 0 - max: 1 - - name: max_tokens - use_template: max_tokens - default: 1024 - min: 1 - max: 8000 -pricing: - input: "0.65" - output: "0.65" - unit: "0.000001" - currency: USD diff --git a/api/core/model_runtime/model_providers/openrouter/llm/mixtral-8x7b-instruct.yaml b/api/core/model_runtime/model_providers/openrouter/llm/mixtral-8x7b-instruct.yaml deleted file mode 100644 index 7871e1f7a05c17..00000000000000 --- a/api/core/model_runtime/model_providers/openrouter/llm/mixtral-8x7b-instruct.yaml +++ /dev/null @@ -1,31 +0,0 @@ -model: mistralai/mixtral-8x7b-instruct -label: - zh_Hans: mixtral-8x7b-instruct - en_US: mixtral-8x7b-instruct -model_type: llm -features: - - agent-thought -model_properties: - mode: completion - context_size: 32000 -parameter_rules: - - name: temperature - use_template: temperature - default: 0.7 - min: 0 - max: 1 - - name: top_p - use_template: top_p - default: 1 - min: 0 - max: 1 - - name: max_tokens - use_template: max_tokens - default: 1024 - min: 1 - max: 8000 -pricing: - input: "0.24" - output: "0.24" - unit: "0.000001" - currency: USD diff --git a/api/core/model_runtime/model_providers/openrouter/llm/o1-mini.yaml b/api/core/model_runtime/model_providers/openrouter/llm/o1-mini.yaml deleted file mode 100644 index 85a918ff5e0ed5..00000000000000 --- a/api/core/model_runtime/model_providers/openrouter/llm/o1-mini.yaml +++ /dev/null @@ -1,40 +0,0 @@ -model: openai/o1-mini -label: - en_US: o1-mini -model_type: llm -features: - - agent-thought -model_properties: - mode: chat - context_size: 128000 -parameter_rules: - - name: temperature - use_template: temperature - - name: top_p - use_template: top_p - - name: presence_penalty - use_template: presence_penalty - - name: frequency_penalty - use_template: frequency_penalty - - name: max_tokens - use_template: max_tokens - default: 512 - min: 1 - max: 65536 - - name: response_format - label: - zh_Hans: 回复格式 - en_US: response_format - type: string - help: - zh_Hans: 指定模型必须输出的格式 - en_US: specifying the format that the model must output - required: false - options: - - text - - json_object -pricing: - input: "3.00" - output: "12.00" - unit: "0.000001" - currency: USD diff --git a/api/core/model_runtime/model_providers/openrouter/llm/o1-preview.yaml b/api/core/model_runtime/model_providers/openrouter/llm/o1-preview.yaml deleted file mode 100644 index 74b0a511bed270..00000000000000 --- a/api/core/model_runtime/model_providers/openrouter/llm/o1-preview.yaml +++ /dev/null @@ -1,40 +0,0 @@ -model: openai/o1-preview -label: - en_US: o1-preview -model_type: llm -features: - - agent-thought -model_properties: - mode: chat - context_size: 128000 -parameter_rules: - - name: temperature - use_template: temperature - - name: top_p - use_template: top_p - - name: presence_penalty - use_template: presence_penalty - - name: frequency_penalty - use_template: frequency_penalty - - name: max_tokens - use_template: max_tokens - default: 512 - min: 1 - max: 32768 - - name: response_format - label: - zh_Hans: 回复格式 - en_US: response_format - type: string - help: - zh_Hans: 指定模型必须输出的格式 - en_US: specifying the format that the model must output - required: false - options: - - text - - json_object -pricing: - input: "15.00" - output: "60.00" - unit: "0.000001" - currency: USD diff --git a/api/core/model_runtime/model_providers/openrouter/llm/qwen2-72b-instruct.yaml b/api/core/model_runtime/model_providers/openrouter/llm/qwen2-72b-instruct.yaml deleted file mode 100644 index 7b75fcb0c986a3..00000000000000 --- a/api/core/model_runtime/model_providers/openrouter/llm/qwen2-72b-instruct.yaml +++ /dev/null @@ -1,30 +0,0 @@ -model: qwen/qwen-2-72b-instruct -label: - en_US: qwen-2-72b-instruct -model_type: llm -features: - - agent-thought -model_properties: - mode: completion - context_size: 32768 -parameter_rules: - - name: temperature - use_template: temperature - - name: max_tokens - use_template: max_tokens - type: int - default: 512 - min: 1 - max: 4096 - help: - zh_Hans: 指定生成结果长度的上限。如果生成结果截断,可以调大该参数。 - en_US: Specifies the upper limit on the length of generated results. If the generated results are truncated, you can increase this parameter. - - name: top_p - use_template: top_p - - name: frequency_penalty - use_template: frequency_penalty -pricing: - input: "0.59" - output: "0.79" - unit: "0.000001" - currency: USD diff --git a/api/core/model_runtime/model_providers/openrouter/openrouter.py b/api/core/model_runtime/model_providers/openrouter/openrouter.py deleted file mode 100644 index 2e59ab50598b8b..00000000000000 --- a/api/core/model_runtime/model_providers/openrouter/openrouter.py +++ /dev/null @@ -1,20 +0,0 @@ -import logging - -from core.model_runtime.entities.model_entities import ModelType -from core.model_runtime.errors.validate import CredentialsValidateFailedError -from core.model_runtime.model_providers.__base.model_provider import ModelProvider - -logger = logging.getLogger(__name__) - - -class OpenRouterProvider(ModelProvider): - def validate_provider_credentials(self, credentials: dict) -> None: - try: - model_instance = self.get_model_instance(ModelType.LLM) - - model_instance.validate_credentials(model="openai/gpt-3.5-turbo", credentials=credentials) - except CredentialsValidateFailedError as ex: - raise ex - except Exception as ex: - logger.exception(f"{self.get_provider_schema().provider} credentials validate failed") - raise ex diff --git a/api/core/model_runtime/model_providers/openrouter/openrouter.yaml b/api/core/model_runtime/model_providers/openrouter/openrouter.yaml deleted file mode 100644 index f7536609ec5193..00000000000000 --- a/api/core/model_runtime/model_providers/openrouter/openrouter.yaml +++ /dev/null @@ -1,105 +0,0 @@ -provider: openrouter -label: - en_US: OpenRouter -icon_small: - en_US: openrouter_square.svg -icon_large: - en_US: openrouter.svg -background: "#F1EFED" -help: - title: - en_US: Get your API key from openrouter.ai - zh_Hans: 从 openrouter.ai 获取 API Key - url: - en_US: https://openrouter.ai/keys -supported_model_types: - - llm -configurate_methods: - - predefined-model - - customizable-model -model_credential_schema: - model: - label: - en_US: Model Name - zh_Hans: 模型名称 - placeholder: - en_US: Enter full model name - zh_Hans: 输入模型全称 - credential_form_schemas: - - variable: api_key - required: true - label: - en_US: API Key - type: secret-input - placeholder: - zh_Hans: 在此输入您的 API Key - en_US: Enter your API Key - - variable: mode - show_on: - - variable: __model_type - value: llm - label: - en_US: Completion mode - type: select - required: false - default: chat - placeholder: - zh_Hans: 选择对话类型 - en_US: Select completion mode - options: - - value: completion - label: - en_US: Completion - zh_Hans: 补全 - - value: chat - label: - en_US: Chat - zh_Hans: 对话 - - variable: context_size - label: - zh_Hans: 模型上下文长度 - en_US: Model context size - required: true - type: text-input - default: "4096" - placeholder: - zh_Hans: 在此输入您的模型上下文长度 - en_US: Enter your Model context size - - variable: max_tokens_to_sample - label: - zh_Hans: 最大 token 上限 - en_US: Upper bound for max tokens - show_on: - - variable: __model_type - value: llm - default: "4096" - type: text-input - - variable: vision_support - show_on: - - variable: __model_type - value: llm - label: - zh_Hans: 是否支持 Vision - en_US: Vision Support - type: radio - required: false - default: "no_support" - options: - - value: "support" - label: - en_US: "Yes" - zh_Hans: 是 - - value: "no_support" - label: - en_US: "No" - zh_Hans: 否 -provider_credential_schema: - credential_form_schemas: - - variable: api_key - required: true - label: - en_US: API Key - type: secret-input - placeholder: - zh_Hans: 在此输入您的 API Key - en_US: Enter your API Key diff --git a/api/core/model_runtime/model_providers/perfxcloud/__init__.py b/api/core/model_runtime/model_providers/perfxcloud/__init__.py deleted file mode 100644 index e69de29bb2d1d6..00000000000000 diff --git a/api/core/model_runtime/model_providers/perfxcloud/_assets/icon_l_en.svg b/api/core/model_runtime/model_providers/perfxcloud/_assets/icon_l_en.svg deleted file mode 100644 index 060d9de3a95c30..00000000000000 --- a/api/core/model_runtime/model_providers/perfxcloud/_assets/icon_l_en.svg +++ /dev/null @@ -1,8 +0,0 @@ - - - - - - - - diff --git a/api/core/model_runtime/model_providers/perfxcloud/_assets/icon_s_en.svg b/api/core/model_runtime/model_providers/perfxcloud/_assets/icon_s_en.svg deleted file mode 100644 index be0c2eeb1ccc48..00000000000000 --- a/api/core/model_runtime/model_providers/perfxcloud/_assets/icon_s_en.svg +++ /dev/null @@ -1,8 +0,0 @@ - - - - - - - - diff --git a/api/core/model_runtime/model_providers/perfxcloud/llm/Llama3-Chinese_v2.yaml b/api/core/model_runtime/model_providers/perfxcloud/llm/Llama3-Chinese_v2.yaml deleted file mode 100644 index bf91468fcf56bf..00000000000000 --- a/api/core/model_runtime/model_providers/perfxcloud/llm/Llama3-Chinese_v2.yaml +++ /dev/null @@ -1,62 +0,0 @@ -model: Llama3-Chinese_v2 -label: - en_US: Llama3-Chinese_v2 -model_type: llm -features: - - agent-thought -model_properties: - mode: chat - context_size: 8192 -parameter_rules: - - name: temperature - use_template: temperature - type: float - default: 0.5 - min: 0.0 - max: 2.0 - help: - zh_Hans: 用于控制随机性和多样性的程度。具体来说,temperature值控制了生成文本时对每个候选词的概率分布进行平滑的程度。较高的temperature值会降低概率分布的峰值,使得更多的低概率词被选择,生成结果更加多样化;而较低的temperature值则会增强概率分布的峰值,使得高概率词更容易被选择,生成结果更加确定。 - en_US: Used to control the degree of randomness and diversity. Specifically, the temperature value controls the degree to which the probability distribution of each candidate word is smoothed when generating text. A higher temperature value will reduce the peak value of the probability distribution, allowing more low-probability words to be selected, and the generated results will be more diverse; while a lower temperature value will enhance the peak value of the probability distribution, making it easier for high-probability words to be selected. , the generated results are more certain. - - name: max_tokens - use_template: max_tokens - type: int - default: 600 - min: 1 - max: 1248 - help: - zh_Hans: 用于指定模型在生成内容时token的最大数量,它定义了生成的上限,但不保证每次都会生成到这个数量。 - en_US: It is used to specify the maximum number of tokens when the model generates content. It defines the upper limit of generation, but does not guarantee that this number will be generated every time. - - name: top_p - use_template: top_p - type: float - default: 0.8 - min: 0.1 - max: 0.9 - help: - zh_Hans: 生成过程中核采样方法概率阈值,例如,取值为0.8时,仅保留概率加起来大于等于0.8的最可能token的最小集合作为候选集。取值范围为(0,1.0),取值越大,生成的随机性越高;取值越低,生成的确定性越高。 - en_US: The probability threshold of the kernel sampling method during the generation process. For example, when the value is 0.8, only the smallest set of the most likely tokens with a sum of probabilities greater than or equal to 0.8 is retained as the candidate set. The value range is (0,1.0). The larger the value, the higher the randomness generated; the lower the value, the higher the certainty generated. - - name: top_k - type: int - min: 0 - max: 99 - label: - zh_Hans: 取样数量 - en_US: Top k - help: - zh_Hans: 生成时,采样候选集的大小。例如,取值为50时,仅将单次生成中得分最高的50个token组成随机采样的候选集。取值越大,生成的随机性越高;取值越小,生成的确定性越高。 - en_US: The size of the sample candidate set when generated. For example, when the value is 50, only the 50 highest-scoring tokens in a single generation form a randomly sampled candidate set. The larger the value, the higher the randomness generated; the smaller the value, the higher the certainty generated. - - name: repetition_penalty - required: false - type: float - default: 1.1 - label: - en_US: Repetition penalty - help: - zh_Hans: 用于控制模型生成时的重复度。提高repetition_penalty时可以降低模型生成的重复度。1.0表示不做惩罚。 - en_US: Used to control the repeatability when generating models. Increasing repetition_penalty can reduce the duplication of model generation. 1.0 means no punishment. -pricing: - input: "0.000" - output: "0.000" - unit: "0.000" - currency: RMB -deprecated: true diff --git a/api/core/model_runtime/model_providers/perfxcloud/llm/Meta-Llama-3-70B-Instruct-GPTQ-Int4.yaml b/api/core/model_runtime/model_providers/perfxcloud/llm/Meta-Llama-3-70B-Instruct-GPTQ-Int4.yaml deleted file mode 100644 index 781b837e8eceea..00000000000000 --- a/api/core/model_runtime/model_providers/perfxcloud/llm/Meta-Llama-3-70B-Instruct-GPTQ-Int4.yaml +++ /dev/null @@ -1,62 +0,0 @@ -model: Meta-Llama-3-70B-Instruct-GPTQ-Int4 -label: - en_US: Meta-Llama-3-70B-Instruct-GPTQ-Int4 -model_type: llm -features: - - agent-thought -model_properties: - mode: chat - context_size: 1024 -parameter_rules: - - name: temperature - use_template: temperature - type: float - default: 0.5 - min: 0.0 - max: 2.0 - help: - zh_Hans: 用于控制随机性和多样性的程度。具体来说,temperature值控制了生成文本时对每个候选词的概率分布进行平滑的程度。较高的temperature值会降低概率分布的峰值,使得更多的低概率词被选择,生成结果更加多样化;而较低的temperature值则会增强概率分布的峰值,使得高概率词更容易被选择,生成结果更加确定。 - en_US: Used to control the degree of randomness and diversity. Specifically, the temperature value controls the degree to which the probability distribution of each candidate word is smoothed when generating text. A higher temperature value will reduce the peak value of the probability distribution, allowing more low-probability words to be selected, and the generated results will be more diverse; while a lower temperature value will enhance the peak value of the probability distribution, making it easier for high-probability words to be selected. , the generated results are more certain. - - name: max_tokens - use_template: max_tokens - type: int - default: 600 - min: 1 - max: 1248 - help: - zh_Hans: 用于指定模型在生成内容时token的最大数量,它定义了生成的上限,但不保证每次都会生成到这个数量。 - en_US: It is used to specify the maximum number of tokens when the model generates content. It defines the upper limit of generation, but does not guarantee that this number will be generated every time. - - name: top_p - use_template: top_p - type: float - default: 0.8 - min: 0.1 - max: 0.9 - help: - zh_Hans: 生成过程中核采样方法概率阈值,例如,取值为0.8时,仅保留概率加起来大于等于0.8的最可能token的最小集合作为候选集。取值范围为(0,1.0),取值越大,生成的随机性越高;取值越低,生成的确定性越高。 - en_US: The probability threshold of the kernel sampling method during the generation process. For example, when the value is 0.8, only the smallest set of the most likely tokens with a sum of probabilities greater than or equal to 0.8 is retained as the candidate set. The value range is (0,1.0). The larger the value, the higher the randomness generated; the lower the value, the higher the certainty generated. - - name: top_k - type: int - min: 0 - max: 99 - label: - zh_Hans: 取样数量 - en_US: Top k - help: - zh_Hans: 生成时,采样候选集的大小。例如,取值为50时,仅将单次生成中得分最高的50个token组成随机采样的候选集。取值越大,生成的随机性越高;取值越小,生成的确定性越高。 - en_US: The size of the sample candidate set when generated. For example, when the value is 50, only the 50 highest-scoring tokens in a single generation form a randomly sampled candidate set. The larger the value, the higher the randomness generated; the smaller the value, the higher the certainty generated. - - name: repetition_penalty - required: false - type: float - default: 1.1 - label: - en_US: Repetition penalty - help: - zh_Hans: 用于控制模型生成时的重复度。提高repetition_penalty时可以降低模型生成的重复度。1.0表示不做惩罚。 - en_US: Used to control the repeatability when generating models. Increasing repetition_penalty can reduce the duplication of model generation. 1.0 means no punishment. -pricing: - input: "0.000" - output: "0.000" - unit: "0.000" - currency: RMB -deprecated: true diff --git a/api/core/model_runtime/model_providers/perfxcloud/llm/Meta-Llama-3-8B-Instruct.yaml b/api/core/model_runtime/model_providers/perfxcloud/llm/Meta-Llama-3-8B-Instruct.yaml deleted file mode 100644 index 67210e9020fe6a..00000000000000 --- a/api/core/model_runtime/model_providers/perfxcloud/llm/Meta-Llama-3-8B-Instruct.yaml +++ /dev/null @@ -1,62 +0,0 @@ -model: Meta-Llama-3-8B-Instruct -label: - en_US: Meta-Llama-3-8B-Instruct -model_type: llm -features: - - agent-thought -model_properties: - mode: chat - context_size: 8192 -parameter_rules: - - name: temperature - use_template: temperature - type: float - default: 0.5 - min: 0.0 - max: 2.0 - help: - zh_Hans: 用于控制随机性和多样性的程度。具体来说,temperature值控制了生成文本时对每个候选词的概率分布进行平滑的程度。较高的temperature值会降低概率分布的峰值,使得更多的低概率词被选择,生成结果更加多样化;而较低的temperature值则会增强概率分布的峰值,使得高概率词更容易被选择,生成结果更加确定。 - en_US: Used to control the degree of randomness and diversity. Specifically, the temperature value controls the degree to which the probability distribution of each candidate word is smoothed when generating text. A higher temperature value will reduce the peak value of the probability distribution, allowing more low-probability words to be selected, and the generated results will be more diverse; while a lower temperature value will enhance the peak value of the probability distribution, making it easier for high-probability words to be selected. , the generated results are more certain. - - name: max_tokens - use_template: max_tokens - type: int - default: 600 - min: 1 - max: 1248 - help: - zh_Hans: 用于指定模型在生成内容时token的最大数量,它定义了生成的上限,但不保证每次都会生成到这个数量。 - en_US: It is used to specify the maximum number of tokens when the model generates content. It defines the upper limit of generation, but does not guarantee that this number will be generated every time. - - name: top_p - use_template: top_p - type: float - default: 0.8 - min: 0.1 - max: 0.9 - help: - zh_Hans: 生成过程中核采样方法概率阈值,例如,取值为0.8时,仅保留概率加起来大于等于0.8的最可能token的最小集合作为候选集。取值范围为(0,1.0),取值越大,生成的随机性越高;取值越低,生成的确定性越高。 - en_US: The probability threshold of the kernel sampling method during the generation process. For example, when the value is 0.8, only the smallest set of the most likely tokens with a sum of probabilities greater than or equal to 0.8 is retained as the candidate set. The value range is (0,1.0). The larger the value, the higher the randomness generated; the lower the value, the higher the certainty generated. - - name: top_k - type: int - min: 0 - max: 99 - label: - zh_Hans: 取样数量 - en_US: Top k - help: - zh_Hans: 生成时,采样候选集的大小。例如,取值为50时,仅将单次生成中得分最高的50个token组成随机采样的候选集。取值越大,生成的随机性越高;取值越小,生成的确定性越高。 - en_US: The size of the sample candidate set when generated. For example, when the value is 50, only the 50 highest-scoring tokens in a single generation form a randomly sampled candidate set. The larger the value, the higher the randomness generated; the smaller the value, the higher the certainty generated. - - name: repetition_penalty - required: false - type: float - default: 1.1 - label: - en_US: Repetition penalty - help: - zh_Hans: 用于控制模型生成时的重复度。提高repetition_penalty时可以降低模型生成的重复度。1.0表示不做惩罚。 - en_US: Used to control the repeatability when generating models. Increasing repetition_penalty can reduce the duplication of model generation. 1.0 means no punishment. -pricing: - input: "0.000" - output: "0.000" - unit: "0.000" - currency: RMB -deprecated: true diff --git a/api/core/model_runtime/model_providers/perfxcloud/llm/Meta-Llama-3.1-405B-Instruct-AWQ-INT4.yaml b/api/core/model_runtime/model_providers/perfxcloud/llm/Meta-Llama-3.1-405B-Instruct-AWQ-INT4.yaml deleted file mode 100644 index 482632ff0673d2..00000000000000 --- a/api/core/model_runtime/model_providers/perfxcloud/llm/Meta-Llama-3.1-405B-Instruct-AWQ-INT4.yaml +++ /dev/null @@ -1,62 +0,0 @@ -model: Meta-Llama-3.1-405B-Instruct-AWQ-INT4 -label: - en_US: Meta-Llama-3.1-405B-Instruct-AWQ-INT4 -model_type: llm -features: - - agent-thought -model_properties: - mode: chat - context_size: 410960 -parameter_rules: - - name: temperature - use_template: temperature - type: float - default: 0.5 - min: 0.0 - max: 2.0 - help: - zh_Hans: 用于控制随机性和多样性的程度。具体来说,temperature值控制了生成文本时对每个候选词的概率分布进行平滑的程度。较高的temperature值会降低概率分布的峰值,使得更多的低概率词被选择,生成结果更加多样化;而较低的temperature值则会增强概率分布的峰值,使得高概率词更容易被选择,生成结果更加确定。 - en_US: Used to control the degree of randomness and diversity. Specifically, the temperature value controls the degree to which the probability distribution of each candidate word is smoothed when generating text. A higher temperature value will reduce the peak value of the probability distribution, allowing more low-probability words to be selected, and the generated results will be more diverse; while a lower temperature value will enhance the peak value of the probability distribution, making it easier for high-probability words to be selected. , the generated results are more certain. - - name: max_tokens - use_template: max_tokens - type: int - default: 600 - min: 1 - max: 1248 - help: - zh_Hans: 用于指定模型在生成内容时token的最大数量,它定义了生成的上限,但不保证每次都会生成到这个数量。 - en_US: It is used to specify the maximum number of tokens when the model generates content. It defines the upper limit of generation, but does not guarantee that this number will be generated every time. - - name: top_p - use_template: top_p - type: float - default: 0.8 - min: 0.1 - max: 0.9 - help: - zh_Hans: 生成过程中核采样方法概率阈值,例如,取值为0.8时,仅保留概率加起来大于等于0.8的最可能token的最小集合作为候选集。取值范围为(0,1.0),取值越大,生成的随机性越高;取值越低,生成的确定性越高。 - en_US: The probability threshold of the kernel sampling method during the generation process. For example, when the value is 0.8, only the smallest set of the most likely tokens with a sum of probabilities greater than or equal to 0.8 is retained as the candidate set. The value range is (0,1.0). The larger the value, the higher the randomness generated; the lower the value, the higher the certainty generated. - - name: top_k - type: int - min: 0 - max: 99 - label: - zh_Hans: 取样数量 - en_US: Top k - help: - zh_Hans: 生成时,采样候选集的大小。例如,取值为50时,仅将单次生成中得分最高的50个token组成随机采样的候选集。取值越大,生成的随机性越高;取值越小,生成的确定性越高。 - en_US: The size of the sample candidate set when generated. For example, when the value is 50, only the 50 highest-scoring tokens in a single generation form a randomly sampled candidate set. The larger the value, the higher the randomness generated; the smaller the value, the higher the certainty generated. - - name: repetition_penalty - required: false - type: float - default: 1.1 - label: - en_US: Repetition penalty - help: - zh_Hans: 用于控制模型生成时的重复度。提高repetition_penalty时可以降低模型生成的重复度。1.0表示不做惩罚。 - en_US: Used to control the repeatability when generating models. Increasing repetition_penalty can reduce the duplication of model generation. 1.0 means no punishment. -pricing: - input: "0.000" - output: "0.000" - unit: "0.000" - currency: RMB -deprecated: true diff --git a/api/core/model_runtime/model_providers/perfxcloud/llm/Meta-Llama-3.1-8B-Instruct.yaml b/api/core/model_runtime/model_providers/perfxcloud/llm/Meta-Llama-3.1-8B-Instruct.yaml deleted file mode 100644 index bbab46344c3a12..00000000000000 --- a/api/core/model_runtime/model_providers/perfxcloud/llm/Meta-Llama-3.1-8B-Instruct.yaml +++ /dev/null @@ -1,61 +0,0 @@ -model: Meta-Llama-3.1-8B-Instruct -label: - en_US: Meta-Llama-3.1-8B-Instruct -model_type: llm -features: - - agent-thought -model_properties: - mode: chat - context_size: 4096 -parameter_rules: - - name: temperature - use_template: temperature - type: float - default: 0.1 - min: 0.0 - max: 2.0 - help: - zh_Hans: 用于控制随机性和多样性的程度。具体来说,temperature值控制了生成文本时对每个候选词的概率分布进行平滑的程度。较高的temperature值会降低概率分布的峰值,使得更多的低概率词被选择,生成结果更加多样化;而较低的temperature值则会增强概率分布的峰值,使得高概率词更容易被选择,生成结果更加确定。 - en_US: Used to control the degree of randomness and diversity. Specifically, the temperature value controls the degree to which the probability distribution of each candidate word is smoothed when generating text. A higher temperature value will reduce the peak value of the probability distribution, allowing more low-probability words to be selected, and the generated results will be more diverse; while a lower temperature value will enhance the peak value of the probability distribution, making it easier for high-probability words to be selected. , the generated results are more certain. - - name: max_tokens - use_template: max_tokens - type: int - default: 600 - min: 1 - max: 1248 - help: - zh_Hans: 用于指定模型在生成内容时token的最大数量,它定义了生成的上限,但不保证每次都会生成到这个数量。 - en_US: It is used to specify the maximum number of tokens when the model generates content. It defines the upper limit of generation, but does not guarantee that this number will be generated every time. - - name: top_p - use_template: top_p - type: float - default: 0.8 - min: 0.1 - max: 0.9 - help: - zh_Hans: 生成过程中核采样方法概率阈值,例如,取值为0.8时,仅保留概率加起来大于等于0.8的最可能token的最小集合作为候选集。取值范围为(0,1.0),取值越大,生成的随机性越高;取值越低,生成的确定性越高。 - en_US: The probability threshold of the kernel sampling method during the generation process. For example, when the value is 0.8, only the smallest set of the most likely tokens with a sum of probabilities greater than or equal to 0.8 is retained as the candidate set. The value range is (0,1.0). The larger the value, the higher the randomness generated; the lower the value, the higher the certainty generated. - - name: top_k - type: int - min: 0 - max: 99 - label: - zh_Hans: 取样数量 - en_US: Top k - help: - zh_Hans: 生成时,采样候选集的大小。例如,取值为50时,仅将单次生成中得分最高的50个token组成随机采样的候选集。取值越大,生成的随机性越高;取值越小,生成的确定性越高。 - en_US: The size of the sample candidate set when generated. For example, when the value is 50, only the 50 highest-scoring tokens in a single generation form a randomly sampled candidate set. The larger the value, the higher the randomness generated; the smaller the value, the higher the certainty generated. - - name: repetition_penalty - required: false - type: float - default: 1.1 - label: - en_US: Repetition penalty - help: - zh_Hans: 用于控制模型生成时的重复度。提高repetition_penalty时可以降低模型生成的重复度。1.0表示不做惩罚。 - en_US: Used to control the repeatability when generating models. Increasing repetition_penalty can reduce the duplication of model generation. 1.0 means no punishment. -pricing: - input: "0.000" - output: "0.000" - unit: "0.000" - currency: RMB diff --git a/api/core/model_runtime/model_providers/perfxcloud/llm/Qwen-14B-Chat-Int4.yaml b/api/core/model_runtime/model_providers/perfxcloud/llm/Qwen-14B-Chat-Int4.yaml deleted file mode 100644 index ec6d9bcc149fdf..00000000000000 --- a/api/core/model_runtime/model_providers/perfxcloud/llm/Qwen-14B-Chat-Int4.yaml +++ /dev/null @@ -1,62 +0,0 @@ -model: Qwen-14B-Chat-Int4 -label: - en_US: Qwen-14B-Chat-Int4 -model_type: llm -features: - - agent-thought -model_properties: - mode: chat - context_size: 4096 -parameter_rules: - - name: temperature - use_template: temperature - type: float - default: 0.3 - min: 0.0 - max: 2.0 - help: - zh_Hans: 用于控制随机性和多样性的程度。具体来说,temperature值控制了生成文本时对每个候选词的概率分布进行平滑的程度。较高的temperature值会降低概率分布的峰值,使得更多的低概率词被选择,生成结果更加多样化;而较低的temperature值则会增强概率分布的峰值,使得高概率词更容易被选择,生成结果更加确定。 - en_US: Used to control the degree of randomness and diversity. Specifically, the temperature value controls the degree to which the probability distribution of each candidate word is smoothed when generating text. A higher temperature value will reduce the peak value of the probability distribution, allowing more low-probability words to be selected, and the generated results will be more diverse; while a lower temperature value will enhance the peak value of the probability distribution, making it easier for high-probability words to be selected. , the generated results are more certain. - - name: max_tokens - use_template: max_tokens - type: int - default: 600 - min: 1 - max: 1248 - help: - zh_Hans: 用于指定模型在生成内容时token的最大数量,它定义了生成的上限,但不保证每次都会生成到这个数量。 - en_US: It is used to specify the maximum number of tokens when the model generates content. It defines the upper limit of generation, but does not guarantee that this number will be generated every time. - - name: top_p - use_template: top_p - type: float - default: 0.8 - min: 0.1 - max: 0.9 - help: - zh_Hans: 生成过程中核采样方法概率阈值,例如,取值为0.8时,仅保留概率加起来大于等于0.8的最可能token的最小集合作为候选集。取值范围为(0,1.0),取值越大,生成的随机性越高;取值越低,生成的确定性越高。 - en_US: The probability threshold of the kernel sampling method during the generation process. For example, when the value is 0.8, only the smallest set of the most likely tokens with a sum of probabilities greater than or equal to 0.8 is retained as the candidate set. The value range is (0,1.0). The larger the value, the higher the randomness generated; the lower the value, the higher the certainty generated. - - name: top_k - type: int - min: 0 - max: 99 - label: - zh_Hans: 取样数量 - en_US: Top k - help: - zh_Hans: 生成时,采样候选集的大小。例如,取值为50时,仅将单次生成中得分最高的50个token组成随机采样的候选集。取值越大,生成的随机性越高;取值越小,生成的确定性越高。 - en_US: The size of the sample candidate set when generated. For example, when the value is 50, only the 50 highest-scoring tokens in a single generation form a randomly sampled candidate set. The larger the value, the higher the randomness generated; the smaller the value, the higher the certainty generated. - - name: repetition_penalty - required: false - type: float - default: 1.1 - label: - en_US: Repetition penalty - help: - zh_Hans: 用于控制模型生成时的重复度。提高repetition_penalty时可以降低模型生成的重复度。1.0表示不做惩罚。 - en_US: Used to control the repeatability when generating models. Increasing repetition_penalty can reduce the duplication of model generation. 1.0 means no punishment. -pricing: - input: "0.000" - output: "0.000" - unit: "0.000" - currency: RMB -deprecated: true diff --git a/api/core/model_runtime/model_providers/perfxcloud/llm/Qwen1.5-110B-Chat-GPTQ-Int4.yaml b/api/core/model_runtime/model_providers/perfxcloud/llm/Qwen1.5-110B-Chat-GPTQ-Int4.yaml deleted file mode 100644 index b561a53039a78e..00000000000000 --- a/api/core/model_runtime/model_providers/perfxcloud/llm/Qwen1.5-110B-Chat-GPTQ-Int4.yaml +++ /dev/null @@ -1,62 +0,0 @@ -model: Qwen1.5-110B-Chat-GPTQ-Int4 -label: - en_US: Qwen1.5-110B-Chat-GPTQ-Int4 -model_type: llm -features: - - agent-thought -model_properties: - mode: chat - context_size: 8192 -parameter_rules: - - name: temperature - use_template: temperature - type: float - default: 0.3 - min: 0.0 - max: 2.0 - help: - zh_Hans: 用于控制随机性和多样性的程度。具体来说,temperature值控制了生成文本时对每个候选词的概率分布进行平滑的程度。较高的temperature值会降低概率分布的峰值,使得更多的低概率词被选择,生成结果更加多样化;而较低的temperature值则会增强概率分布的峰值,使得高概率词更容易被选择,生成结果更加确定。 - en_US: Used to control the degree of randomness and diversity. Specifically, the temperature value controls the degree to which the probability distribution of each candidate word is smoothed when generating text. A higher temperature value will reduce the peak value of the probability distribution, allowing more low-probability words to be selected, and the generated results will be more diverse; while a lower temperature value will enhance the peak value of the probability distribution, making it easier for high-probability words to be selected. , the generated results are more certain. - - name: max_tokens - use_template: max_tokens - type: int - default: 128 - min: 1 - max: 256 - help: - zh_Hans: 用于指定模型在生成内容时token的最大数量,它定义了生成的上限,但不保证每次都会生成到这个数量。 - en_US: It is used to specify the maximum number of tokens when the model generates content. It defines the upper limit of generation, but does not guarantee that this number will be generated every time. - - name: top_p - use_template: top_p - type: float - default: 0.8 - min: 0.1 - max: 0.9 - help: - zh_Hans: 生成过程中核采样方法概率阈值,例如,取值为0.8时,仅保留概率加起来大于等于0.8的最可能token的最小集合作为候选集。取值范围为(0,1.0),取值越大,生成的随机性越高;取值越低,生成的确定性越高。 - en_US: The probability threshold of the kernel sampling method during the generation process. For example, when the value is 0.8, only the smallest set of the most likely tokens with a sum of probabilities greater than or equal to 0.8 is retained as the candidate set. The value range is (0,1.0). The larger the value, the higher the randomness generated; the lower the value, the higher the certainty generated. - - name: top_k - type: int - min: 0 - max: 99 - label: - zh_Hans: 取样数量 - en_US: Top k - help: - zh_Hans: 生成时,采样候选集的大小。例如,取值为50时,仅将单次生成中得分最高的50个token组成随机采样的候选集。取值越大,生成的随机性越高;取值越小,生成的确定性越高。 - en_US: The size of the sample candidate set when generated. For example, when the value is 50, only the 50 highest-scoring tokens in a single generation form a randomly sampled candidate set. The larger the value, the higher the randomness generated; the smaller the value, the higher the certainty generated. - - name: repetition_penalty - required: false - type: float - default: 1.1 - label: - en_US: Repetition penalty - help: - zh_Hans: 用于控制模型生成时的重复度。提高repetition_penalty时可以降低模型生成的重复度。1.0表示不做惩罚。 - en_US: Used to control the repeatability when generating models. Increasing repetition_penalty can reduce the duplication of model generation. 1.0 means no punishment. -pricing: - input: "0.000" - output: "0.000" - unit: "0.000" - currency: RMB -deprecated: true diff --git a/api/core/model_runtime/model_providers/perfxcloud/llm/Qwen1.5-72B-Chat-GPTQ-Int4.yaml b/api/core/model_runtime/model_providers/perfxcloud/llm/Qwen1.5-72B-Chat-GPTQ-Int4.yaml deleted file mode 100644 index ddb6fd977c5a56..00000000000000 --- a/api/core/model_runtime/model_providers/perfxcloud/llm/Qwen1.5-72B-Chat-GPTQ-Int4.yaml +++ /dev/null @@ -1,62 +0,0 @@ -model: Qwen1.5-72B-Chat-GPTQ-Int4 -label: - en_US: Qwen1.5-72B-Chat-GPTQ-Int4 -model_type: llm -features: - - agent-thought -model_properties: - mode: chat - context_size: 2048 -parameter_rules: - - name: temperature - use_template: temperature - type: float - default: 0.3 - min: 0.0 - max: 2.0 - help: - zh_Hans: 用于控制随机性和多样性的程度。具体来说,temperature值控制了生成文本时对每个候选词的概率分布进行平滑的程度。较高的temperature值会降低概率分布的峰值,使得更多的低概率词被选择,生成结果更加多样化;而较低的temperature值则会增强概率分布的峰值,使得高概率词更容易被选择,生成结果更加确定。 - en_US: Used to control the degree of randomness and diversity. Specifically, the temperature value controls the degree to which the probability distribution of each candidate word is smoothed when generating text. A higher temperature value will reduce the peak value of the probability distribution, allowing more low-probability words to be selected, and the generated results will be more diverse; while a lower temperature value will enhance the peak value of the probability distribution, making it easier for high-probability words to be selected. , the generated results are more certain. - - name: max_tokens - use_template: max_tokens - type: int - default: 600 - min: 1 - max: 2000 - help: - zh_Hans: 用于指定模型在生成内容时token的最大数量,它定义了生成的上限,但不保证每次都会生成到这个数量。 - en_US: It is used to specify the maximum number of tokens when the model generates content. It defines the upper limit of generation, but does not guarantee that this number will be generated every time. - - name: top_p - use_template: top_p - type: float - default: 0.8 - min: 0.1 - max: 0.9 - help: - zh_Hans: 生成过程中核采样方法概率阈值,例如,取值为0.8时,仅保留概率加起来大于等于0.8的最可能token的最小集合作为候选集。取值范围为(0,1.0),取值越大,生成的随机性越高;取值越低,生成的确定性越高。 - en_US: The probability threshold of the kernel sampling method during the generation process. For example, when the value is 0.8, only the smallest set of the most likely tokens with a sum of probabilities greater than or equal to 0.8 is retained as the candidate set. The value range is (0,1.0). The larger the value, the higher the randomness generated; the lower the value, the higher the certainty generated. - - name: top_k - type: int - min: 0 - max: 99 - label: - zh_Hans: 取样数量 - en_US: Top k - help: - zh_Hans: 生成时,采样候选集的大小。例如,取值为50时,仅将单次生成中得分最高的50个token组成随机采样的候选集。取值越大,生成的随机性越高;取值越小,生成的确定性越高。 - en_US: The size of the sample candidate set when generated. For example, when the value is 50, only the 50 highest-scoring tokens in a single generation form a randomly sampled candidate set. The larger the value, the higher the randomness generated; the smaller the value, the higher the certainty generated. - - name: repetition_penalty - required: false - type: float - default: 1.1 - label: - en_US: Repetition penalty - help: - zh_Hans: 用于控制模型生成时的重复度。提高repetition_penalty时可以降低模型生成的重复度。1.0表示不做惩罚。 - en_US: Used to control the repeatability when generating models. Increasing repetition_penalty can reduce the duplication of model generation. 1.0 means no punishment. -pricing: - input: "0.000" - output: "0.000" - unit: "0.000" - currency: RMB -deprecated: true diff --git a/api/core/model_runtime/model_providers/perfxcloud/llm/Qwen1.5-7B.yaml b/api/core/model_runtime/model_providers/perfxcloud/llm/Qwen1.5-7B.yaml deleted file mode 100644 index 024c79dbcfd76c..00000000000000 --- a/api/core/model_runtime/model_providers/perfxcloud/llm/Qwen1.5-7B.yaml +++ /dev/null @@ -1,62 +0,0 @@ -model: Qwen1.5-7B -label: - en_US: Qwen1.5-7B -model_type: llm -features: - - agent-thought -model_properties: - mode: completion - context_size: 32768 -parameter_rules: - - name: temperature - use_template: temperature - type: float - default: 0.3 - min: 0.0 - max: 2.0 - help: - zh_Hans: 用于控制随机性和多样性的程度。具体来说,temperature值控制了生成文本时对每个候选词的概率分布进行平滑的程度。较高的temperature值会降低概率分布的峰值,使得更多的低概率词被选择,生成结果更加多样化;而较低的temperature值则会增强概率分布的峰值,使得高概率词更容易被选择,生成结果更加确定。 - en_US: Used to control the degree of randomness and diversity. Specifically, the temperature value controls the degree to which the probability distribution of each candidate word is smoothed when generating text. A higher temperature value will reduce the peak value of the probability distribution, allowing more low-probability words to be selected, and the generated results will be more diverse; while a lower temperature value will enhance the peak value of the probability distribution, making it easier for high-probability words to be selected. , the generated results are more certain. - - name: max_tokens - use_template: max_tokens - type: int - default: 600 - min: 1 - max: 2000 - help: - zh_Hans: 用于指定模型在生成内容时token的最大数量,它定义了生成的上限,但不保证每次都会生成到这个数量。 - en_US: It is used to specify the maximum number of tokens when the model generates content. It defines the upper limit of generation, but does not guarantee that this number will be generated every time. - - name: top_p - use_template: top_p - type: float - default: 0.8 - min: 0.1 - max: 0.9 - help: - zh_Hans: 生成过程中核采样方法概率阈值,例如,取值为0.8时,仅保留概率加起来大于等于0.8的最可能token的最小集合作为候选集。取值范围为(0,1.0),取值越大,生成的随机性越高;取值越低,生成的确定性越高。 - en_US: The probability threshold of the kernel sampling method during the generation process. For example, when the value is 0.8, only the smallest set of the most likely tokens with a sum of probabilities greater than or equal to 0.8 is retained as the candidate set. The value range is (0,1.0). The larger the value, the higher the randomness generated; the lower the value, the higher the certainty generated. - - name: top_k - type: int - min: 0 - max: 99 - label: - zh_Hans: 取样数量 - en_US: Top k - help: - zh_Hans: 生成时,采样候选集的大小。例如,取值为50时,仅将单次生成中得分最高的50个token组成随机采样的候选集。取值越大,生成的随机性越高;取值越小,生成的确定性越高。 - en_US: The size of the sample candidate set when generated. For example, when the value is 50, only the 50 highest-scoring tokens in a single generation form a randomly sampled candidate set. The larger the value, the higher the randomness generated; the smaller the value, the higher the certainty generated. - - name: repetition_penalty - required: false - type: float - default: 1.1 - label: - en_US: Repetition penalty - help: - zh_Hans: 用于控制模型生成时的重复度。提高repetition_penalty时可以降低模型生成的重复度。1.0表示不做惩罚。 - en_US: Used to control the repeatability when generating models. Increasing repetition_penalty can reduce the duplication of model generation. 1.0 means no punishment. -pricing: - input: "0.000" - output: "0.000" - unit: "0.000" - currency: RMB -deprecated: true diff --git a/api/core/model_runtime/model_providers/perfxcloud/llm/Qwen2-72B-Instruct-AWQ-int4.yaml b/api/core/model_runtime/model_providers/perfxcloud/llm/Qwen2-72B-Instruct-AWQ-int4.yaml deleted file mode 100644 index 94f661f40d5c90..00000000000000 --- a/api/core/model_runtime/model_providers/perfxcloud/llm/Qwen2-72B-Instruct-AWQ-int4.yaml +++ /dev/null @@ -1,61 +0,0 @@ -model: Qwen2-72B-Instruct-AWQ-int4 -label: - en_US: Qwen2-72B-Instruct-AWQ-int4 -model_type: llm -features: - - agent-thought -model_properties: - mode: chat - context_size: 32768 -parameter_rules: - - name: temperature - use_template: temperature - type: float - default: 0.5 - min: 0.0 - max: 2.0 - help: - zh_Hans: 用于控制随机性和多样性的程度。具体来说,temperature值控制了生成文本时对每个候选词的概率分布进行平滑的程度。较高的temperature值会降低概率分布的峰值,使得更多的低概率词被选择,生成结果更加多样化;而较低的temperature值则会增强概率分布的峰值,使得高概率词更容易被选择,生成结果更加确定。 - en_US: Used to control the degree of randomness and diversity. Specifically, the temperature value controls the degree to which the probability distribution of each candidate word is smoothed when generating text. A higher temperature value will reduce the peak value of the probability distribution, allowing more low-probability words to be selected, and the generated results will be more diverse; while a lower temperature value will enhance the peak value of the probability distribution, making it easier for high-probability words to be selected. , the generated results are more certain. - - name: max_tokens - use_template: max_tokens - type: int - default: 600 - min: 1 - max: 1248 - help: - zh_Hans: 用于指定模型在生成内容时token的最大数量,它定义了生成的上限,但不保证每次都会生成到这个数量。 - en_US: It is used to specify the maximum number of tokens when the model generates content. It defines the upper limit of generation, but does not guarantee that this number will be generated every time. - - name: top_p - use_template: top_p - type: float - default: 0.8 - min: 0.1 - max: 0.9 - help: - zh_Hans: 生成过程中核采样方法概率阈值,例如,取值为0.8时,仅保留概率加起来大于等于0.8的最可能token的最小集合作为候选集。取值范围为(0,1.0),取值越大,生成的随机性越高;取值越低,生成的确定性越高。 - en_US: The probability threshold of the kernel sampling method during the generation process. For example, when the value is 0.8, only the smallest set of the most likely tokens with a sum of probabilities greater than or equal to 0.8 is retained as the candidate set. The value range is (0,1.0). The larger the value, the higher the randomness generated; the lower the value, the higher the certainty generated. - - name: top_k - type: int - min: 0 - max: 99 - label: - zh_Hans: 取样数量 - en_US: Top k - help: - zh_Hans: 生成时,采样候选集的大小。例如,取值为50时,仅将单次生成中得分最高的50个token组成随机采样的候选集。取值越大,生成的随机性越高;取值越小,生成的确定性越高。 - en_US: The size of the sample candidate set when generated. For example, when the value is 50, only the 50 highest-scoring tokens in a single generation form a randomly sampled candidate set. The larger the value, the higher the randomness generated; the smaller the value, the higher the certainty generated. - - name: repetition_penalty - required: false - type: float - default: 1.1 - label: - en_US: Repetition penalty - help: - zh_Hans: 用于控制模型生成时的重复度。提高repetition_penalty时可以降低模型生成的重复度。1.0表示不做惩罚。 - en_US: Used to control the repeatability when generating models. Increasing repetition_penalty can reduce the duplication of model generation. 1.0 means no punishment. -pricing: - input: "0.000" - output: "0.000" - unit: "0.000" - currency: RMB diff --git a/api/core/model_runtime/model_providers/perfxcloud/llm/Qwen2-72B-Instruct-GPTQ-Int4.yaml b/api/core/model_runtime/model_providers/perfxcloud/llm/Qwen2-72B-Instruct-GPTQ-Int4.yaml deleted file mode 100644 index a06f8d5ab18f4c..00000000000000 --- a/api/core/model_runtime/model_providers/perfxcloud/llm/Qwen2-72B-Instruct-GPTQ-Int4.yaml +++ /dev/null @@ -1,64 +0,0 @@ -model: Qwen2-72B-Instruct-GPTQ-Int4 -label: - en_US: Qwen2-72B-Instruct-GPTQ-Int4 -model_type: llm -features: - - multi-tool-call - - agent-thought - - stream-tool-call -model_properties: - mode: chat - context_size: 2048 -parameter_rules: - - name: temperature - use_template: temperature - type: float - default: 0.7 - min: 0.0 - max: 2.0 - help: - zh_Hans: 用于控制随机性和多样性的程度。具体来说,temperature值控制了生成文本时对每个候选词的概率分布进行平滑的程度。较高的temperature值会降低概率分布的峰值,使得更多的低概率词被选择,生成结果更加多样化;而较低的temperature值则会增强概率分布的峰值,使得高概率词更容易被选择,生成结果更加确定。 - en_US: Used to control the degree of randomness and diversity. Specifically, the temperature value controls the degree to which the probability distribution of each candidate word is smoothed when generating text. A higher temperature value will reduce the peak value of the probability distribution, allowing more low-probability words to be selected, and the generated results will be more diverse; while a lower temperature value will enhance the peak value of the probability distribution, making it easier for high-probability words to be selected. , the generated results are more certain. - - name: max_tokens - use_template: max_tokens - type: int - default: 600 - min: 1 - max: 2000 - help: - zh_Hans: 用于指定模型在生成内容时token的最大数量,它定义了生成的上限,但不保证每次都会生成到这个数量。 - en_US: It is used to specify the maximum number of tokens when the model generates content. It defines the upper limit of generation, but does not guarantee that this number will be generated every time. - - name: top_p - use_template: top_p - type: float - default: 0.8 - min: 0.1 - max: 0.9 - help: - zh_Hans: 生成过程中核采样方法概率阈值,例如,取值为0.8时,仅保留概率加起来大于等于0.8的最可能token的最小集合作为候选集。取值范围为(0,1.0),取值越大,生成的随机性越高;取值越低,生成的确定性越高。 - en_US: The probability threshold of the kernel sampling method during the generation process. For example, when the value is 0.8, only the smallest set of the most likely tokens with a sum of probabilities greater than or equal to 0.8 is retained as the candidate set. The value range is (0,1.0). The larger the value, the higher the randomness generated; the lower the value, the higher the certainty generated. - - name: top_k - type: int - min: 0 - max: 99 - label: - zh_Hans: 取样数量 - en_US: Top k - help: - zh_Hans: 生成时,采样候选集的大小。例如,取值为50时,仅将单次生成中得分最高的50个token组成随机采样的候选集。取值越大,生成的随机性越高;取值越小,生成的确定性越高。 - en_US: The size of the sample candidate set when generated. For example, when the value is 50, only the 50 highest-scoring tokens in a single generation form a randomly sampled candidate set. The larger the value, the higher the randomness generated; the smaller the value, the higher the certainty generated. - - name: repetition_penalty - required: false - type: float - default: 1.1 - label: - en_US: Repetition penalty - help: - zh_Hans: 用于控制模型生成时的重复度。提高repetition_penalty时可以降低模型生成的重复度。1.0表示不做惩罚。 - en_US: Used to control the repeatability when generating models. Increasing repetition_penalty can reduce the duplication of model generation. 1.0 means no punishment. -pricing: - input: "0.000" - output: "0.000" - unit: "0.000" - currency: RMB -deprecated: true diff --git a/api/core/model_runtime/model_providers/perfxcloud/llm/Qwen2-72B-Instruct.yaml b/api/core/model_runtime/model_providers/perfxcloud/llm/Qwen2-72B-Instruct.yaml deleted file mode 100644 index cea6560295267a..00000000000000 --- a/api/core/model_runtime/model_providers/perfxcloud/llm/Qwen2-72B-Instruct.yaml +++ /dev/null @@ -1,61 +0,0 @@ -model: Qwen2-72B-Instruct -label: - en_US: Qwen2-72B-Instruct -model_type: llm -features: - - agent-thought -model_properties: - mode: chat - context_size: 131072 -parameter_rules: - - name: temperature - use_template: temperature - type: float - default: 0.5 - min: 0.0 - max: 2.0 - help: - zh_Hans: 用于控制随机性和多样性的程度。具体来说,temperature值控制了生成文本时对每个候选词的概率分布进行平滑的程度。较高的temperature值会降低概率分布的峰值,使得更多的低概率词被选择,生成结果更加多样化;而较低的temperature值则会增强概率分布的峰值,使得高概率词更容易被选择,生成结果更加确定。 - en_US: Used to control the degree of randomness and diversity. Specifically, the temperature value controls the degree to which the probability distribution of each candidate word is smoothed when generating text. A higher temperature value will reduce the peak value of the probability distribution, allowing more low-probability words to be selected, and the generated results will be more diverse; while a lower temperature value will enhance the peak value of the probability distribution, making it easier for high-probability words to be selected. , the generated results are more certain. - - name: max_tokens - use_template: max_tokens - type: int - default: 600 - min: 1 - max: 1248 - help: - zh_Hans: 用于指定模型在生成内容时token的最大数量,它定义了生成的上限,但不保证每次都会生成到这个数量。 - en_US: It is used to specify the maximum number of tokens when the model generates content. It defines the upper limit of generation, but does not guarantee that this number will be generated every time. - - name: top_p - use_template: top_p - type: float - default: 0.8 - min: 0.1 - max: 0.9 - help: - zh_Hans: 生成过程中核采样方法概率阈值,例如,取值为0.8时,仅保留概率加起来大于等于0.8的最可能token的最小集合作为候选集。取值范围为(0,1.0),取值越大,生成的随机性越高;取值越低,生成的确定性越高。 - en_US: The probability threshold of the kernel sampling method during the generation process. For example, when the value is 0.8, only the smallest set of the most likely tokens with a sum of probabilities greater than or equal to 0.8 is retained as the candidate set. The value range is (0,1.0). The larger the value, the higher the randomness generated; the lower the value, the higher the certainty generated. - - name: top_k - type: int - min: 0 - max: 99 - label: - zh_Hans: 取样数量 - en_US: Top k - help: - zh_Hans: 生成时,采样候选集的大小。例如,取值为50时,仅将单次生成中得分最高的50个token组成随机采样的候选集。取值越大,生成的随机性越高;取值越小,生成的确定性越高。 - en_US: The size of the sample candidate set when generated. For example, when the value is 50, only the 50 highest-scoring tokens in a single generation form a randomly sampled candidate set. The larger the value, the higher the randomness generated; the smaller the value, the higher the certainty generated. - - name: repetition_penalty - required: false - type: float - default: 1.1 - label: - en_US: Repetition penalty - help: - zh_Hans: 用于控制模型生成时的重复度。提高repetition_penalty时可以降低模型生成的重复度。1.0表示不做惩罚。 - en_US: Used to control the repeatability when generating models. Increasing repetition_penalty can reduce the duplication of model generation. 1.0 means no punishment. -pricing: - input: "0.000" - output: "0.000" - unit: "0.000" - currency: RMB diff --git a/api/core/model_runtime/model_providers/perfxcloud/llm/Qwen2-7B-Instruct.yaml b/api/core/model_runtime/model_providers/perfxcloud/llm/Qwen2-7B-Instruct.yaml deleted file mode 100644 index 4369411399e72a..00000000000000 --- a/api/core/model_runtime/model_providers/perfxcloud/llm/Qwen2-7B-Instruct.yaml +++ /dev/null @@ -1,63 +0,0 @@ -model: Qwen2-7B-Instruct -label: - en_US: Qwen2-7B-Instruct -model_type: llm -features: - - multi-tool-call - - agent-thought - - stream-tool-call -model_properties: - mode: completion - context_size: 32768 -parameter_rules: - - name: temperature - use_template: temperature - type: float - default: 0.3 - min: 0.0 - max: 2.0 - help: - zh_Hans: 用于控制随机性和多样性的程度。具体来说,temperature值控制了生成文本时对每个候选词的概率分布进行平滑的程度。较高的temperature值会降低概率分布的峰值,使得更多的低概率词被选择,生成结果更加多样化;而较低的temperature值则会增强概率分布的峰值,使得高概率词更容易被选择,生成结果更加确定。 - en_US: Used to control the degree of randomness and diversity. Specifically, the temperature value controls the degree to which the probability distribution of each candidate word is smoothed when generating text. A higher temperature value will reduce the peak value of the probability distribution, allowing more low-probability words to be selected, and the generated results will be more diverse; while a lower temperature value will enhance the peak value of the probability distribution, making it easier for high-probability words to be selected. , the generated results are more certain. - - name: max_tokens - use_template: max_tokens - type: int - default: 600 - min: 1 - max: 2000 - help: - zh_Hans: 用于指定模型在生成内容时token的最大数量,它定义了生成的上限,但不保证每次都会生成到这个数量。 - en_US: It is used to specify the maximum number of tokens when the model generates content. It defines the upper limit of generation, but does not guarantee that this number will be generated every time. - - name: top_p - use_template: top_p - type: float - default: 0.8 - min: 0.1 - max: 0.9 - help: - zh_Hans: 生成过程中核采样方法概率阈值,例如,取值为0.8时,仅保留概率加起来大于等于0.8的最可能token的最小集合作为候选集。取值范围为(0,1.0),取值越大,生成的随机性越高;取值越低,生成的确定性越高。 - en_US: The probability threshold of the kernel sampling method during the generation process. For example, when the value is 0.8, only the smallest set of the most likely tokens with a sum of probabilities greater than or equal to 0.8 is retained as the candidate set. The value range is (0,1.0). The larger the value, the higher the randomness generated; the lower the value, the higher the certainty generated. - - name: top_k - type: int - min: 0 - max: 99 - label: - zh_Hans: 取样数量 - en_US: Top k - help: - zh_Hans: 生成时,采样候选集的大小。例如,取值为50时,仅将单次生成中得分最高的50个token组成随机采样的候选集。取值越大,生成的随机性越高;取值越小,生成的确定性越高。 - en_US: The size of the sample candidate set when generated. For example, when the value is 50, only the 50 highest-scoring tokens in a single generation form a randomly sampled candidate set. The larger the value, the higher the randomness generated; the smaller the value, the higher the certainty generated. - - name: repetition_penalty - required: false - type: float - default: 1.1 - label: - en_US: Repetition penalty - help: - zh_Hans: 用于控制模型生成时的重复度。提高repetition_penalty时可以降低模型生成的重复度。1.0表示不做惩罚。 - en_US: Used to control the repeatability when generating models. Increasing repetition_penalty can reduce the duplication of model generation. 1.0 means no punishment. -pricing: - input: "0.000" - output: "0.000" - unit: "0.000" - currency: RMB diff --git a/api/core/model_runtime/model_providers/perfxcloud/llm/Qwen2-7B.yaml b/api/core/model_runtime/model_providers/perfxcloud/llm/Qwen2-7B.yaml deleted file mode 100644 index d549ecd227dfb8..00000000000000 --- a/api/core/model_runtime/model_providers/perfxcloud/llm/Qwen2-7B.yaml +++ /dev/null @@ -1,64 +0,0 @@ -model: Qwen2-7B -label: - en_US: Qwen2-7B -model_type: llm -features: - - multi-tool-call - - agent-thought - - stream-tool-call -model_properties: - mode: completion - context_size: 32768 -parameter_rules: - - name: temperature - use_template: temperature - type: float - default: 0.3 - min: 0.0 - max: 2.0 - help: - zh_Hans: 用于控制随机性和多样性的程度。具体来说,temperature值控制了生成文本时对每个候选词的概率分布进行平滑的程度。较高的temperature值会降低概率分布的峰值,使得更多的低概率词被选择,生成结果更加多样化;而较低的temperature值则会增强概率分布的峰值,使得高概率词更容易被选择,生成结果更加确定。 - en_US: Used to control the degree of randomness and diversity. Specifically, the temperature value controls the degree to which the probability distribution of each candidate word is smoothed when generating text. A higher temperature value will reduce the peak value of the probability distribution, allowing more low-probability words to be selected, and the generated results will be more diverse; while a lower temperature value will enhance the peak value of the probability distribution, making it easier for high-probability words to be selected. , the generated results are more certain. - - name: max_tokens - use_template: max_tokens - type: int - default: 600 - min: 1 - max: 2000 - help: - zh_Hans: 用于指定模型在生成内容时token的最大数量,它定义了生成的上限,但不保证每次都会生成到这个数量。 - en_US: It is used to specify the maximum number of tokens when the model generates content. It defines the upper limit of generation, but does not guarantee that this number will be generated every time. - - name: top_p - use_template: top_p - type: float - default: 0.8 - min: 0.1 - max: 0.9 - help: - zh_Hans: 生成过程中核采样方法概率阈值,例如,取值为0.8时,仅保留概率加起来大于等于0.8的最可能token的最小集合作为候选集。取值范围为(0,1.0),取值越大,生成的随机性越高;取值越低,生成的确定性越高。 - en_US: The probability threshold of the kernel sampling method during the generation process. For example, when the value is 0.8, only the smallest set of the most likely tokens with a sum of probabilities greater than or equal to 0.8 is retained as the candidate set. The value range is (0,1.0). The larger the value, the higher the randomness generated; the lower the value, the higher the certainty generated. - - name: top_k - type: int - min: 0 - max: 99 - label: - zh_Hans: 取样数量 - en_US: Top k - help: - zh_Hans: 生成时,采样候选集的大小。例如,取值为50时,仅将单次生成中得分最高的50个token组成随机采样的候选集。取值越大,生成的随机性越高;取值越小,生成的确定性越高。 - en_US: The size of the sample candidate set when generated. For example, when the value is 50, only the 50 highest-scoring tokens in a single generation form a randomly sampled candidate set. The larger the value, the higher the randomness generated; the smaller the value, the higher the certainty generated. - - name: repetition_penalty - required: false - type: float - default: 1.1 - label: - en_US: Repetition penalty - help: - zh_Hans: 用于控制模型生成时的重复度。提高repetition_penalty时可以降低模型生成的重复度。1.0表示不做惩罚。 - en_US: Used to control the repeatability when generating models. Increasing repetition_penalty can reduce the duplication of model generation. 1.0 means no punishment. -pricing: - input: "0.000" - output: "0.000" - unit: "0.000" - currency: RMB -deprecated: true diff --git a/api/core/model_runtime/model_providers/perfxcloud/llm/Qwen2.5-72B-Instruct.yaml b/api/core/model_runtime/model_providers/perfxcloud/llm/Qwen2.5-72B-Instruct.yaml deleted file mode 100644 index 15cbf01f1f66da..00000000000000 --- a/api/core/model_runtime/model_providers/perfxcloud/llm/Qwen2.5-72B-Instruct.yaml +++ /dev/null @@ -1,61 +0,0 @@ -model: Qwen2.5-72B-Instruct -label: - en_US: Qwen2.5-72B-Instruct -model_type: llm -features: - - agent-thought -model_properties: - mode: chat - context_size: 30720 -parameter_rules: - - name: temperature - use_template: temperature - type: float - default: 0.5 - min: 0.0 - max: 2.0 - help: - zh_Hans: 用于控制随机性和多样性的程度。具体来说,temperature值控制了生成文本时对每个候选词的概率分布进行平滑的程度。较高的temperature值会降低概率分布的峰值,使得更多的低概率词被选择,生成结果更加多样化;而较低的temperature值则会增强概率分布的峰值,使得高概率词更容易被选择,生成结果更加确定。 - en_US: Used to control the degree of randomness and diversity. Specifically, the temperature value controls the degree to which the probability distribution of each candidate word is smoothed when generating text. A higher temperature value will reduce the peak value of the probability distribution, allowing more low-probability words to be selected, and the generated results will be more diverse; while a lower temperature value will enhance the peak value of the probability distribution, making it easier for high-probability words to be selected. , the generated results are more certain. - - name: max_tokens - use_template: max_tokens - type: int - default: 600 - min: 1 - max: 1248 - help: - zh_Hans: 用于指定模型在生成内容时token的最大数量,它定义了生成的上限,但不保证每次都会生成到这个数量。 - en_US: It is used to specify the maximum number of tokens when the model generates content. It defines the upper limit of generation, but does not guarantee that this number will be generated every time. - - name: top_p - use_template: top_p - type: float - default: 0.8 - min: 0.1 - max: 0.9 - help: - zh_Hans: 生成过程中核采样方法概率阈值,例如,取值为0.8时,仅保留概率加起来大于等于0.8的最可能token的最小集合作为候选集。取值范围为(0,1.0),取值越大,生成的随机性越高;取值越低,生成的确定性越高。 - en_US: The probability threshold of the kernel sampling method during the generation process. For example, when the value is 0.8, only the smallest set of the most likely tokens with a sum of probabilities greater than or equal to 0.8 is retained as the candidate set. The value range is (0,1.0). The larger the value, the higher the randomness generated; the lower the value, the higher the certainty generated. - - name: top_k - type: int - min: 0 - max: 99 - label: - zh_Hans: 取样数量 - en_US: Top k - help: - zh_Hans: 生成时,采样候选集的大小。例如,取值为50时,仅将单次生成中得分最高的50个token组成随机采样的候选集。取值越大,生成的随机性越高;取值越小,生成的确定性越高。 - en_US: The size of the sample candidate set when generated. For example, when the value is 50, only the 50 highest-scoring tokens in a single generation form a randomly sampled candidate set. The larger the value, the higher the randomness generated; the smaller the value, the higher the certainty generated. - - name: repetition_penalty - required: false - type: float - default: 1.1 - label: - en_US: Repetition penalty - help: - zh_Hans: 用于控制模型生成时的重复度。提高repetition_penalty时可以降低模型生成的重复度。1.0表示不做惩罚。 - en_US: Used to control the repeatability when generating models. Increasing repetition_penalty can reduce the duplication of model generation. 1.0 means no punishment. -pricing: - input: "0.000" - output: "0.000" - unit: "0.000" - currency: RMB diff --git a/api/core/model_runtime/model_providers/perfxcloud/llm/Qwen2.5-7B-Instruct.yaml b/api/core/model_runtime/model_providers/perfxcloud/llm/Qwen2.5-7B-Instruct.yaml deleted file mode 100644 index dadc8f8f3275e5..00000000000000 --- a/api/core/model_runtime/model_providers/perfxcloud/llm/Qwen2.5-7B-Instruct.yaml +++ /dev/null @@ -1,61 +0,0 @@ -model: Qwen2.5-7B-Instruct -label: - en_US: Qwen2.5-7B-Instruct -model_type: llm -features: - - agent-thought -model_properties: - mode: chat - context_size: 8192 -parameter_rules: - - name: temperature - use_template: temperature - type: float - default: 0.5 - min: 0.0 - max: 2.0 - help: - zh_Hans: 用于控制随机性和多样性的程度。具体来说,temperature值控制了生成文本时对每个候选词的概率分布进行平滑的程度。较高的temperature值会降低概率分布的峰值,使得更多的低概率词被选择,生成结果更加多样化;而较低的temperature值则会增强概率分布的峰值,使得高概率词更容易被选择,生成结果更加确定。 - en_US: Used to control the degree of randomness and diversity. Specifically, the temperature value controls the degree to which the probability distribution of each candidate word is smoothed when generating text. A higher temperature value will reduce the peak value of the probability distribution, allowing more low-probability words to be selected, and the generated results will be more diverse; while a lower temperature value will enhance the peak value of the probability distribution, making it easier for high-probability words to be selected. , the generated results are more certain. - - name: max_tokens - use_template: max_tokens - type: int - default: 600 - min: 1 - max: 1248 - help: - zh_Hans: 用于指定模型在生成内容时token的最大数量,它定义了生成的上限,但不保证每次都会生成到这个数量。 - en_US: It is used to specify the maximum number of tokens when the model generates content. It defines the upper limit of generation, but does not guarantee that this number will be generated every time. - - name: top_p - use_template: top_p - type: float - default: 0.8 - min: 0.1 - max: 0.9 - help: - zh_Hans: 生成过程中核采样方法概率阈值,例如,取值为0.8时,仅保留概率加起来大于等于0.8的最可能token的最小集合作为候选集。取值范围为(0,1.0),取值越大,生成的随机性越高;取值越低,生成的确定性越高。 - en_US: The probability threshold of the kernel sampling method during the generation process. For example, when the value is 0.8, only the smallest set of the most likely tokens with a sum of probabilities greater than or equal to 0.8 is retained as the candidate set. The value range is (0,1.0). The larger the value, the higher the randomness generated; the lower the value, the higher the certainty generated. - - name: top_k - type: int - min: 0 - max: 99 - label: - zh_Hans: 取样数量 - en_US: Top k - help: - zh_Hans: 生成时,采样候选集的大小。例如,取值为50时,仅将单次生成中得分最高的50个token组成随机采样的候选集。取值越大,生成的随机性越高;取值越小,生成的确定性越高。 - en_US: The size of the sample candidate set when generated. For example, when the value is 50, only the 50 highest-scoring tokens in a single generation form a randomly sampled candidate set. The larger the value, the higher the randomness generated; the smaller the value, the higher the certainty generated. - - name: repetition_penalty - required: false - type: float - default: 1.1 - label: - en_US: Repetition penalty - help: - zh_Hans: 用于控制模型生成时的重复度。提高repetition_penalty时可以降低模型生成的重复度。1.0表示不做惩罚。 - en_US: Used to control the repeatability when generating models. Increasing repetition_penalty can reduce the duplication of model generation. 1.0 means no punishment. -pricing: - input: "0.000" - output: "0.000" - unit: "0.000" - currency: RMB diff --git a/api/core/model_runtime/model_providers/perfxcloud/llm/Reflection-Llama-3.1-70B.yaml b/api/core/model_runtime/model_providers/perfxcloud/llm/Reflection-Llama-3.1-70B.yaml deleted file mode 100644 index 649be20b48abef..00000000000000 --- a/api/core/model_runtime/model_providers/perfxcloud/llm/Reflection-Llama-3.1-70B.yaml +++ /dev/null @@ -1,61 +0,0 @@ -model: Reflection-Llama-3.1-70B -label: - en_US: Reflection-Llama-3.1-70B -model_type: llm -features: - - agent-thought -model_properties: - mode: chat - context_size: 10240 -parameter_rules: - - name: temperature - use_template: temperature - type: float - default: 0.5 - min: 0.0 - max: 2.0 - help: - zh_Hans: 用于控制随机性和多样性的程度。具体来说,temperature值控制了生成文本时对每个候选词的概率分布进行平滑的程度。较高的temperature值会降低概率分布的峰值,使得更多的低概率词被选择,生成结果更加多样化;而较低的temperature值则会增强概率分布的峰值,使得高概率词更容易被选择,生成结果更加确定。 - en_US: Used to control the degree of randomness and diversity. Specifically, the temperature value controls the degree to which the probability distribution of each candidate word is smoothed when generating text. A higher temperature value will reduce the peak value of the probability distribution, allowing more low-probability words to be selected, and the generated results will be more diverse; while a lower temperature value will enhance the peak value of the probability distribution, making it easier for high-probability words to be selected. , the generated results are more certain. - - name: max_tokens - use_template: max_tokens - type: int - default: 600 - min: 1 - max: 1248 - help: - zh_Hans: 用于指定模型在生成内容时token的最大数量,它定义了生成的上限,但不保证每次都会生成到这个数量。 - en_US: It is used to specify the maximum number of tokens when the model generates content. It defines the upper limit of generation, but does not guarantee that this number will be generated every time. - - name: top_p - use_template: top_p - type: float - default: 0.8 - min: 0.1 - max: 0.9 - help: - zh_Hans: 生成过程中核采样方法概率阈值,例如,取值为0.8时,仅保留概率加起来大于等于0.8的最可能token的最小集合作为候选集。取值范围为(0,1.0),取值越大,生成的随机性越高;取值越低,生成的确定性越高。 - en_US: The probability threshold of the kernel sampling method during the generation process. For example, when the value is 0.8, only the smallest set of the most likely tokens with a sum of probabilities greater than or equal to 0.8 is retained as the candidate set. The value range is (0,1.0). The larger the value, the higher the randomness generated; the lower the value, the higher the certainty generated. - - name: top_k - type: int - min: 0 - max: 99 - label: - zh_Hans: 取样数量 - en_US: Top k - help: - zh_Hans: 生成时,采样候选集的大小。例如,取值为50时,仅将单次生成中得分最高的50个token组成随机采样的候选集。取值越大,生成的随机性越高;取值越小,生成的确定性越高。 - en_US: The size of the sample candidate set when generated. For example, when the value is 50, only the 50 highest-scoring tokens in a single generation form a randomly sampled candidate set. The larger the value, the higher the randomness generated; the smaller the value, the higher the certainty generated. - - name: repetition_penalty - required: false - type: float - default: 1.1 - label: - en_US: Repetition penalty - help: - zh_Hans: 用于控制模型生成时的重复度。提高repetition_penalty时可以降低模型生成的重复度。1.0表示不做惩罚。 - en_US: Used to control the repeatability when generating models. Increasing repetition_penalty can reduce the duplication of model generation. 1.0 means no punishment. -pricing: - input: "0.000" - output: "0.000" - unit: "0.000" - currency: RMB diff --git a/api/core/model_runtime/model_providers/perfxcloud/llm/Yi-1_5-9B-Chat-16K.yaml b/api/core/model_runtime/model_providers/perfxcloud/llm/Yi-1_5-9B-Chat-16K.yaml deleted file mode 100644 index 92eae6804f61fa..00000000000000 --- a/api/core/model_runtime/model_providers/perfxcloud/llm/Yi-1_5-9B-Chat-16K.yaml +++ /dev/null @@ -1,61 +0,0 @@ -model: Yi-1_5-9B-Chat-16K -label: - en_US: Yi-1_5-9B-Chat-16K -model_type: llm -features: - - agent-thought -model_properties: - mode: chat - context_size: 16384 -parameter_rules: - - name: temperature - use_template: temperature - type: float - default: 0.5 - min: 0.0 - max: 2.0 - help: - zh_Hans: 用于控制随机性和多样性的程度。具体来说,temperature值控制了生成文本时对每个候选词的概率分布进行平滑的程度。较高的temperature值会降低概率分布的峰值,使得更多的低概率词被选择,生成结果更加多样化;而较低的temperature值则会增强概率分布的峰值,使得高概率词更容易被选择,生成结果更加确定。 - en_US: Used to control the degree of randomness and diversity. Specifically, the temperature value controls the degree to which the probability distribution of each candidate word is smoothed when generating text. A higher temperature value will reduce the peak value of the probability distribution, allowing more low-probability words to be selected, and the generated results will be more diverse; while a lower temperature value will enhance the peak value of the probability distribution, making it easier for high-probability words to be selected. , the generated results are more certain. - - name: max_tokens - use_template: max_tokens - type: int - default: 600 - min: 1 - max: 1248 - help: - zh_Hans: 用于指定模型在生成内容时token的最大数量,它定义了生成的上限,但不保证每次都会生成到这个数量。 - en_US: It is used to specify the maximum number of tokens when the model generates content. It defines the upper limit of generation, but does not guarantee that this number will be generated every time. - - name: top_p - use_template: top_p - type: float - default: 0.8 - min: 0.1 - max: 0.9 - help: - zh_Hans: 生成过程中核采样方法概率阈值,例如,取值为0.8时,仅保留概率加起来大于等于0.8的最可能token的最小集合作为候选集。取值范围为(0,1.0),取值越大,生成的随机性越高;取值越低,生成的确定性越高。 - en_US: The probability threshold of the kernel sampling method during the generation process. For example, when the value is 0.8, only the smallest set of the most likely tokens with a sum of probabilities greater than or equal to 0.8 is retained as the candidate set. The value range is (0,1.0). The larger the value, the higher the randomness generated; the lower the value, the higher the certainty generated. - - name: top_k - type: int - min: 0 - max: 99 - label: - zh_Hans: 取样数量 - en_US: Top k - help: - zh_Hans: 生成时,采样候选集的大小。例如,取值为50时,仅将单次生成中得分最高的50个token组成随机采样的候选集。取值越大,生成的随机性越高;取值越小,生成的确定性越高。 - en_US: The size of the sample candidate set when generated. For example, when the value is 50, only the 50 highest-scoring tokens in a single generation form a randomly sampled candidate set. The larger the value, the higher the randomness generated; the smaller the value, the higher the certainty generated. - - name: repetition_penalty - required: false - type: float - default: 1.1 - label: - en_US: Repetition penalty - help: - zh_Hans: 用于控制模型生成时的重复度。提高repetition_penalty时可以降低模型生成的重复度。1.0表示不做惩罚。 - en_US: Used to control the repeatability when generating models. Increasing repetition_penalty can reduce the duplication of model generation. 1.0 means no punishment. -pricing: - input: "0.000" - output: "0.000" - unit: "0.000" - currency: RMB diff --git a/api/core/model_runtime/model_providers/perfxcloud/llm/Yi-Coder-1.5B-Chat.yaml b/api/core/model_runtime/model_providers/perfxcloud/llm/Yi-Coder-1.5B-Chat.yaml deleted file mode 100644 index 0e21ce148c39bd..00000000000000 --- a/api/core/model_runtime/model_providers/perfxcloud/llm/Yi-Coder-1.5B-Chat.yaml +++ /dev/null @@ -1,61 +0,0 @@ -model: Yi-Coder-1.5B-Chat -label: - en_US: Yi-Coder-1.5B-Chat -model_type: llm -features: - - agent-thought -model_properties: - mode: chat - context_size: 20480 -parameter_rules: - - name: temperature - use_template: temperature - type: float - default: 0.5 - min: 0.0 - max: 2.0 - help: - zh_Hans: 用于控制随机性和多样性的程度。具体来说,temperature值控制了生成文本时对每个候选词的概率分布进行平滑的程度。较高的temperature值会降低概率分布的峰值,使得更多的低概率词被选择,生成结果更加多样化;而较低的temperature值则会增强概率分布的峰值,使得高概率词更容易被选择,生成结果更加确定。 - en_US: Used to control the degree of randomness and diversity. Specifically, the temperature value controls the degree to which the probability distribution of each candidate word is smoothed when generating text. A higher temperature value will reduce the peak value of the probability distribution, allowing more low-probability words to be selected, and the generated results will be more diverse; while a lower temperature value will enhance the peak value of the probability distribution, making it easier for high-probability words to be selected. , the generated results are more certain. - - name: max_tokens - use_template: max_tokens - type: int - default: 600 - min: 1 - max: 1248 - help: - zh_Hans: 用于指定模型在生成内容时token的最大数量,它定义了生成的上限,但不保证每次都会生成到这个数量。 - en_US: It is used to specify the maximum number of tokens when the model generates content. It defines the upper limit of generation, but does not guarantee that this number will be generated every time. - - name: top_p - use_template: top_p - type: float - default: 0.8 - min: 0.1 - max: 0.9 - help: - zh_Hans: 生成过程中核采样方法概率阈值,例如,取值为0.8时,仅保留概率加起来大于等于0.8的最可能token的最小集合作为候选集。取值范围为(0,1.0),取值越大,生成的随机性越高;取值越低,生成的确定性越高。 - en_US: The probability threshold of the kernel sampling method during the generation process. For example, when the value is 0.8, only the smallest set of the most likely tokens with a sum of probabilities greater than or equal to 0.8 is retained as the candidate set. The value range is (0,1.0). The larger the value, the higher the randomness generated; the lower the value, the higher the certainty generated. - - name: top_k - type: int - min: 0 - max: 99 - label: - zh_Hans: 取样数量 - en_US: Top k - help: - zh_Hans: 生成时,采样候选集的大小。例如,取值为50时,仅将单次生成中得分最高的50个token组成随机采样的候选集。取值越大,生成的随机性越高;取值越小,生成的确定性越高。 - en_US: The size of the sample candidate set when generated. For example, when the value is 50, only the 50 highest-scoring tokens in a single generation form a randomly sampled candidate set. The larger the value, the higher the randomness generated; the smaller the value, the higher the certainty generated. - - name: repetition_penalty - required: false - type: float - default: 1.1 - label: - en_US: Repetition penalty - help: - zh_Hans: 用于控制模型生成时的重复度。提高repetition_penalty时可以降低模型生成的重复度。1.0表示不做惩罚。 - en_US: Used to control the repeatability when generating models. Increasing repetition_penalty can reduce the duplication of model generation. 1.0 means no punishment. -pricing: - input: "0.000" - output: "0.000" - unit: "0.000" - currency: RMB diff --git a/api/core/model_runtime/model_providers/perfxcloud/llm/Yi-Coder-9B-Chat.yaml b/api/core/model_runtime/model_providers/perfxcloud/llm/Yi-Coder-9B-Chat.yaml deleted file mode 100644 index 23b0841ce4ed65..00000000000000 --- a/api/core/model_runtime/model_providers/perfxcloud/llm/Yi-Coder-9B-Chat.yaml +++ /dev/null @@ -1,61 +0,0 @@ -model: Yi-Coder-9B-Chat -label: - en_US: Yi-Coder-9B-Chat -model_type: llm -features: - - agent-thought -model_properties: - mode: chat - context_size: 20480 -parameter_rules: - - name: temperature - use_template: temperature - type: float - default: 0.5 - min: 0.0 - max: 2.0 - help: - zh_Hans: 用于控制随机性和多样性的程度。具体来说,temperature值控制了生成文本时对每个候选词的概率分布进行平滑的程度。较高的temperature值会降低概率分布的峰值,使得更多的低概率词被选择,生成结果更加多样化;而较低的temperature值则会增强概率分布的峰值,使得高概率词更容易被选择,生成结果更加确定。 - en_US: Used to control the degree of randomness and diversity. Specifically, the temperature value controls the degree to which the probability distribution of each candidate word is smoothed when generating text. A higher temperature value will reduce the peak value of the probability distribution, allowing more low-probability words to be selected, and the generated results will be more diverse; while a lower temperature value will enhance the peak value of the probability distribution, making it easier for high-probability words to be selected. , the generated results are more certain. - - name: max_tokens - use_template: max_tokens - type: int - default: 600 - min: 1 - max: 1248 - help: - zh_Hans: 用于指定模型在生成内容时token的最大数量,它定义了生成的上限,但不保证每次都会生成到这个数量。 - en_US: It is used to specify the maximum number of tokens when the model generates content. It defines the upper limit of generation, but does not guarantee that this number will be generated every time. - - name: top_p - use_template: top_p - type: float - default: 0.8 - min: 0.1 - max: 0.9 - help: - zh_Hans: 生成过程中核采样方法概率阈值,例如,取值为0.8时,仅保留概率加起来大于等于0.8的最可能token的最小集合作为候选集。取值范围为(0,1.0),取值越大,生成的随机性越高;取值越低,生成的确定性越高。 - en_US: The probability threshold of the kernel sampling method during the generation process. For example, when the value is 0.8, only the smallest set of the most likely tokens with a sum of probabilities greater than or equal to 0.8 is retained as the candidate set. The value range is (0,1.0). The larger the value, the higher the randomness generated; the lower the value, the higher the certainty generated. - - name: top_k - type: int - min: 0 - max: 99 - label: - zh_Hans: 取样数量 - en_US: Top k - help: - zh_Hans: 生成时,采样候选集的大小。例如,取值为50时,仅将单次生成中得分最高的50个token组成随机采样的候选集。取值越大,生成的随机性越高;取值越小,生成的确定性越高。 - en_US: The size of the sample candidate set when generated. For example, when the value is 50, only the 50 highest-scoring tokens in a single generation form a randomly sampled candidate set. The larger the value, the higher the randomness generated; the smaller the value, the higher the certainty generated. - - name: repetition_penalty - required: false - type: float - default: 1.1 - label: - en_US: Repetition penalty - help: - zh_Hans: 用于控制模型生成时的重复度。提高repetition_penalty时可以降低模型生成的重复度。1.0表示不做惩罚。 - en_US: Used to control the repeatability when generating models. Increasing repetition_penalty can reduce the duplication of model generation. 1.0 means no punishment. -pricing: - input: "0.000" - output: "0.000" - unit: "0.000" - currency: RMB diff --git a/api/core/model_runtime/model_providers/perfxcloud/llm/__init__.py b/api/core/model_runtime/model_providers/perfxcloud/llm/__init__.py deleted file mode 100644 index e69de29bb2d1d6..00000000000000 diff --git a/api/core/model_runtime/model_providers/perfxcloud/llm/_position.yaml b/api/core/model_runtime/model_providers/perfxcloud/llm/_position.yaml deleted file mode 100644 index 37bf400f1e3475..00000000000000 --- a/api/core/model_runtime/model_providers/perfxcloud/llm/_position.yaml +++ /dev/null @@ -1,24 +0,0 @@ -- Qwen2.5-72B-Instruct -- Qwen2.5-7B-Instruct -- Yi-Coder-1.5B-Chat -- Yi-Coder-9B-Chat -- Qwen2-72B-Instruct-AWQ-int4 -- Yi-1_5-9B-Chat-16K -- Qwen2-7B-Instruct -- Reflection-Llama-3.1-70B -- Qwen2-72B-Instruct -- Meta-Llama-3.1-8B-Instruct - -- Meta-Llama-3.1-405B-Instruct-AWQ-INT4 -- Meta-Llama-3-70B-Instruct-GPTQ-Int4 -- chatglm3-6b -- Meta-Llama-3-8B-Instruct -- Llama3-Chinese_v2 -- deepseek-v2-lite-chat -- Qwen2-72B-Instruct-GPTQ-Int4 -- Qwen2-7B -- Qwen-14B-Chat-Int4 -- Qwen1.5-72B-Chat-GPTQ-Int4 -- Qwen1.5-7B -- Qwen1.5-110B-Chat-GPTQ-Int4 -- deepseek-v2-chat diff --git a/api/core/model_runtime/model_providers/perfxcloud/llm/chatglm3-6b.yaml b/api/core/model_runtime/model_providers/perfxcloud/llm/chatglm3-6b.yaml deleted file mode 100644 index 75d80f784a71f5..00000000000000 --- a/api/core/model_runtime/model_providers/perfxcloud/llm/chatglm3-6b.yaml +++ /dev/null @@ -1,62 +0,0 @@ -model: chatglm3-6b -label: - en_US: chatglm3-6b -model_type: llm -features: - - agent-thought -model_properties: - mode: chat - context_size: 8192 -parameter_rules: - - name: temperature - use_template: temperature - type: float - default: 0.5 - min: 0.0 - max: 2.0 - help: - zh_Hans: 用于控制随机性和多样性的程度。具体来说,temperature值控制了生成文本时对每个候选词的概率分布进行平滑的程度。较高的temperature值会降低概率分布的峰值,使得更多的低概率词被选择,生成结果更加多样化;而较低的temperature值则会增强概率分布的峰值,使得高概率词更容易被选择,生成结果更加确定。 - en_US: Used to control the degree of randomness and diversity. Specifically, the temperature value controls the degree to which the probability distribution of each candidate word is smoothed when generating text. A higher temperature value will reduce the peak value of the probability distribution, allowing more low-probability words to be selected, and the generated results will be more diverse; while a lower temperature value will enhance the peak value of the probability distribution, making it easier for high-probability words to be selected. , the generated results are more certain. - - name: max_tokens - use_template: max_tokens - type: int - default: 600 - min: 1 - max: 1248 - help: - zh_Hans: 用于指定模型在生成内容时token的最大数量,它定义了生成的上限,但不保证每次都会生成到这个数量。 - en_US: It is used to specify the maximum number of tokens when the model generates content. It defines the upper limit of generation, but does not guarantee that this number will be generated every time. - - name: top_p - use_template: top_p - type: float - default: 0.8 - min: 0.1 - max: 0.9 - help: - zh_Hans: 生成过程中核采样方法概率阈值,例如,取值为0.8时,仅保留概率加起来大于等于0.8的最可能token的最小集合作为候选集。取值范围为(0,1.0),取值越大,生成的随机性越高;取值越低,生成的确定性越高。 - en_US: The probability threshold of the kernel sampling method during the generation process. For example, when the value is 0.8, only the smallest set of the most likely tokens with a sum of probabilities greater than or equal to 0.8 is retained as the candidate set. The value range is (0,1.0). The larger the value, the higher the randomness generated; the lower the value, the higher the certainty generated. - - name: top_k - type: int - min: 0 - max: 99 - label: - zh_Hans: 取样数量 - en_US: Top k - help: - zh_Hans: 生成时,采样候选集的大小。例如,取值为50时,仅将单次生成中得分最高的50个token组成随机采样的候选集。取值越大,生成的随机性越高;取值越小,生成的确定性越高。 - en_US: The size of the sample candidate set when generated. For example, when the value is 50, only the 50 highest-scoring tokens in a single generation form a randomly sampled candidate set. The larger the value, the higher the randomness generated; the smaller the value, the higher the certainty generated. - - name: repetition_penalty - required: false - type: float - default: 1.1 - label: - en_US: Repetition penalty - help: - zh_Hans: 用于控制模型生成时的重复度。提高repetition_penalty时可以降低模型生成的重复度。1.0表示不做惩罚。 - en_US: Used to control the repeatability when generating models. Increasing repetition_penalty can reduce the duplication of model generation. 1.0 means no punishment. -pricing: - input: "0.000" - output: "0.000" - unit: "0.000" - currency: RMB -deprecated: true diff --git a/api/core/model_runtime/model_providers/perfxcloud/llm/deepseek-v2-chat.yaml b/api/core/model_runtime/model_providers/perfxcloud/llm/deepseek-v2-chat.yaml deleted file mode 100644 index fa9a7b7175e9dc..00000000000000 --- a/api/core/model_runtime/model_providers/perfxcloud/llm/deepseek-v2-chat.yaml +++ /dev/null @@ -1,62 +0,0 @@ -model: deepseek-v2-chat -label: - en_US: deepseek-v2-chat -model_type: llm -features: - - agent-thought -model_properties: - mode: chat - context_size: 4096 -parameter_rules: - - name: temperature - use_template: temperature - type: float - default: 0.5 - min: 0.0 - max: 2.0 - help: - zh_Hans: 用于控制随机性和多样性的程度。具体来说,temperature值控制了生成文本时对每个候选词的概率分布进行平滑的程度。较高的temperature值会降低概率分布的峰值,使得更多的低概率词被选择,生成结果更加多样化;而较低的temperature值则会增强概率分布的峰值,使得高概率词更容易被选择,生成结果更加确定。 - en_US: Used to control the degree of randomness and diversity. Specifically, the temperature value controls the degree to which the probability distribution of each candidate word is smoothed when generating text. A higher temperature value will reduce the peak value of the probability distribution, allowing more low-probability words to be selected, and the generated results will be more diverse; while a lower temperature value will enhance the peak value of the probability distribution, making it easier for high-probability words to be selected. , the generated results are more certain. - - name: max_tokens - use_template: max_tokens - type: int - default: 600 - min: 1 - max: 1248 - help: - zh_Hans: 用于指定模型在生成内容时token的最大数量,它定义了生成的上限,但不保证每次都会生成到这个数量。 - en_US: It is used to specify the maximum number of tokens when the model generates content. It defines the upper limit of generation, but does not guarantee that this number will be generated every time. - - name: top_p - use_template: top_p - type: float - default: 0.8 - min: 0.1 - max: 0.9 - help: - zh_Hans: 生成过程中核采样方法概率阈值,例如,取值为0.8时,仅保留概率加起来大于等于0.8的最可能token的最小集合作为候选集。取值范围为(0,1.0),取值越大,生成的随机性越高;取值越低,生成的确定性越高。 - en_US: The probability threshold of the kernel sampling method during the generation process. For example, when the value is 0.8, only the smallest set of the most likely tokens with a sum of probabilities greater than or equal to 0.8 is retained as the candidate set. The value range is (0,1.0). The larger the value, the higher the randomness generated; the lower the value, the higher the certainty generated. - - name: top_k - type: int - min: 0 - max: 99 - label: - zh_Hans: 取样数量 - en_US: Top k - help: - zh_Hans: 生成时,采样候选集的大小。例如,取值为50时,仅将单次生成中得分最高的50个token组成随机采样的候选集。取值越大,生成的随机性越高;取值越小,生成的确定性越高。 - en_US: The size of the sample candidate set when generated. For example, when the value is 50, only the 50 highest-scoring tokens in a single generation form a randomly sampled candidate set. The larger the value, the higher the randomness generated; the smaller the value, the higher the certainty generated. - - name: repetition_penalty - required: false - type: float - default: 1.1 - label: - en_US: Repetition penalty - help: - zh_Hans: 用于控制模型生成时的重复度。提高repetition_penalty时可以降低模型生成的重复度。1.0表示不做惩罚。 - en_US: Used to control the repeatability when generating models. Increasing repetition_penalty can reduce the duplication of model generation. 1.0 means no punishment. -pricing: - input: "0.000" - output: "0.000" - unit: "0.000" - currency: RMB -deprecated: true diff --git a/api/core/model_runtime/model_providers/perfxcloud/llm/deepseek-v2-lite-chat.yaml b/api/core/model_runtime/model_providers/perfxcloud/llm/deepseek-v2-lite-chat.yaml deleted file mode 100644 index 75a26d25051aea..00000000000000 --- a/api/core/model_runtime/model_providers/perfxcloud/llm/deepseek-v2-lite-chat.yaml +++ /dev/null @@ -1,62 +0,0 @@ -model: deepseek-v2-lite-chat -label: - en_US: deepseek-v2-lite-chat -model_type: llm -features: - - agent-thought -model_properties: - mode: chat - context_size: 2048 -parameter_rules: - - name: temperature - use_template: temperature - type: float - default: 0.5 - min: 0.0 - max: 2.0 - help: - zh_Hans: 用于控制随机性和多样性的程度。具体来说,temperature值控制了生成文本时对每个候选词的概率分布进行平滑的程度。较高的temperature值会降低概率分布的峰值,使得更多的低概率词被选择,生成结果更加多样化;而较低的temperature值则会增强概率分布的峰值,使得高概率词更容易被选择,生成结果更加确定。 - en_US: Used to control the degree of randomness and diversity. Specifically, the temperature value controls the degree to which the probability distribution of each candidate word is smoothed when generating text. A higher temperature value will reduce the peak value of the probability distribution, allowing more low-probability words to be selected, and the generated results will be more diverse; while a lower temperature value will enhance the peak value of the probability distribution, making it easier for high-probability words to be selected. , the generated results are more certain. - - name: max_tokens - use_template: max_tokens - type: int - default: 600 - min: 1 - max: 1248 - help: - zh_Hans: 用于指定模型在生成内容时token的最大数量,它定义了生成的上限,但不保证每次都会生成到这个数量。 - en_US: It is used to specify the maximum number of tokens when the model generates content. It defines the upper limit of generation, but does not guarantee that this number will be generated every time. - - name: top_p - use_template: top_p - type: float - default: 0.8 - min: 0.1 - max: 0.9 - help: - zh_Hans: 生成过程中核采样方法概率阈值,例如,取值为0.8时,仅保留概率加起来大于等于0.8的最可能token的最小集合作为候选集。取值范围为(0,1.0),取值越大,生成的随机性越高;取值越低,生成的确定性越高。 - en_US: The probability threshold of the kernel sampling method during the generation process. For example, when the value is 0.8, only the smallest set of the most likely tokens with a sum of probabilities greater than or equal to 0.8 is retained as the candidate set. The value range is (0,1.0). The larger the value, the higher the randomness generated; the lower the value, the higher the certainty generated. - - name: top_k - type: int - min: 0 - max: 99 - label: - zh_Hans: 取样数量 - en_US: Top k - help: - zh_Hans: 生成时,采样候选集的大小。例如,取值为50时,仅将单次生成中得分最高的50个token组成随机采样的候选集。取值越大,生成的随机性越高;取值越小,生成的确定性越高。 - en_US: The size of the sample candidate set when generated. For example, when the value is 50, only the 50 highest-scoring tokens in a single generation form a randomly sampled candidate set. The larger the value, the higher the randomness generated; the smaller the value, the higher the certainty generated. - - name: repetition_penalty - required: false - type: float - default: 1.1 - label: - en_US: Repetition penalty - help: - zh_Hans: 用于控制模型生成时的重复度。提高repetition_penalty时可以降低模型生成的重复度。1.0表示不做惩罚。 - en_US: Used to control the repeatability when generating models. Increasing repetition_penalty can reduce the duplication of model generation. 1.0 means no punishment. -pricing: - input: "0.000" - output: "0.000" - unit: "0.000" - currency: RMB -deprecated: true diff --git a/api/core/model_runtime/model_providers/perfxcloud/llm/llm.py b/api/core/model_runtime/model_providers/perfxcloud/llm/llm.py deleted file mode 100644 index 89cac665aa5a08..00000000000000 --- a/api/core/model_runtime/model_providers/perfxcloud/llm/llm.py +++ /dev/null @@ -1,116 +0,0 @@ -from collections.abc import Generator -from typing import Optional, Union -from urllib.parse import urlparse - -import tiktoken - -from core.model_runtime.entities.llm_entities import LLMResult -from core.model_runtime.entities.message_entities import ( - PromptMessage, - PromptMessageTool, -) -from core.model_runtime.model_providers.openai.llm.llm import OpenAILargeLanguageModel - - -class PerfXCloudLargeLanguageModel(OpenAILargeLanguageModel): - def _invoke( - self, - model: str, - credentials: dict, - prompt_messages: list[PromptMessage], - model_parameters: dict, - tools: Optional[list[PromptMessageTool]] = None, - stop: Optional[list[str]] = None, - stream: bool = True, - user: Optional[str] = None, - ) -> Union[LLMResult, Generator]: - self._add_custom_parameters(credentials) - - return super()._invoke(model, credentials, prompt_messages, model_parameters, tools, stop, stream) - - def validate_credentials(self, model: str, credentials: dict) -> None: - self._add_custom_parameters(credentials) - super().validate_credentials(model, credentials) - - # refactored from openai model runtime, use cl100k_base for calculate token number - def _num_tokens_from_string(self, model: str, text: str, tools: Optional[list[PromptMessageTool]] = None) -> int: - """ - Calculate num tokens for text completion model with tiktoken package. - - :param model: model name - :param text: prompt text - :param tools: tools for tool calling - :return: number of tokens - """ - encoding = tiktoken.get_encoding("cl100k_base") - num_tokens = len(encoding.encode(text)) - - if tools: - num_tokens += self._num_tokens_for_tools(encoding, tools) - - return num_tokens - - # refactored from openai model runtime, use cl100k_base for calculate token number - def _num_tokens_from_messages( - self, model: str, messages: list[PromptMessage], tools: Optional[list[PromptMessageTool]] = None - ) -> int: - """Calculate num tokens for gpt-3.5-turbo and gpt-4 with tiktoken package. - - Official documentation: https://github.com/openai/openai-cookbook/blob/ - main/examples/How_to_format_inputs_to_ChatGPT_models.ipynb""" - encoding = tiktoken.get_encoding("cl100k_base") - tokens_per_message = 3 - tokens_per_name = 1 - - num_tokens = 0 - messages_dict = [self._convert_prompt_message_to_dict(m) for m in messages] - for message in messages_dict: - num_tokens += tokens_per_message - for key, value in message.items(): - # Cast str(value) in case the message value is not a string - # This occurs with function messages - # TODO: The current token calculation method for the image type is not implemented, - # which need to download the image and then get the resolution for calculation, - # and will increase the request delay - if isinstance(value, list): - text = "" - for item in value: - if isinstance(item, dict) and item["type"] == "text": - text += item["text"] - - value = text - - if key == "tool_calls": - for tool_call in value: - for t_key, t_value in tool_call.items(): - num_tokens += len(encoding.encode(t_key)) - if t_key == "function": - for f_key, f_value in t_value.items(): - num_tokens += len(encoding.encode(f_key)) - num_tokens += len(encoding.encode(f_value)) - else: - num_tokens += len(encoding.encode(t_key)) - num_tokens += len(encoding.encode(t_value)) - else: - num_tokens += len(encoding.encode(str(value))) - - if key == "name": - num_tokens += tokens_per_name - - # every reply is primed with assistant - num_tokens += 3 - - if tools: - num_tokens += self._num_tokens_for_tools(encoding, tools) - - return num_tokens - - @staticmethod - def _add_custom_parameters(credentials: dict) -> None: - credentials["mode"] = "chat" - credentials["openai_api_key"] = credentials["api_key"] - if "endpoint_url" not in credentials or credentials["endpoint_url"] == "": - credentials["openai_api_base"] = "https://cloud.perfxlab.cn" - else: - parsed_url = urlparse(credentials["endpoint_url"]) - credentials["openai_api_base"] = f"{parsed_url.scheme}://{parsed_url.netloc}" diff --git a/api/core/model_runtime/model_providers/perfxcloud/perfxcloud.py b/api/core/model_runtime/model_providers/perfxcloud/perfxcloud.py deleted file mode 100644 index 9a4ead031d018c..00000000000000 --- a/api/core/model_runtime/model_providers/perfxcloud/perfxcloud.py +++ /dev/null @@ -1,10 +0,0 @@ -import logging - -from core.model_runtime.model_providers.__base.model_provider import ModelProvider - -logger = logging.getLogger(__name__) - - -class PerfXCloudProvider(ModelProvider): - def validate_provider_credentials(self, credentials: dict) -> None: - pass diff --git a/api/core/model_runtime/model_providers/perfxcloud/perfxcloud.yaml b/api/core/model_runtime/model_providers/perfxcloud/perfxcloud.yaml deleted file mode 100644 index 10ee691ebd0609..00000000000000 --- a/api/core/model_runtime/model_providers/perfxcloud/perfxcloud.yaml +++ /dev/null @@ -1,42 +0,0 @@ -provider: perfxcloud -label: - en_US: PerfXCloud - zh_Hans: PerfXCloud -description: - en_US: PerfXCloud (Pengfeng Technology) is an AI development and deployment platform tailored for developers and enterprises, providing reasoning capabilities for multiple models. - zh_Hans: PerfXCloud(澎峰科技)为开发者和企业量身打造的AI开发和部署平台,提供多种模型的的推理能力。 -icon_small: - en_US: icon_s_en.svg -icon_large: - en_US: icon_l_en.svg -background: "#e3f0ff" -help: - title: - en_US: Get your API Key from PerfXCloud - zh_Hans: 从 PerfXCloud 获取 API Key - url: - en_US: https://cloud.perfxlab.cn/panel/token -supported_model_types: - - llm - - text-embedding -configurate_methods: - - predefined-model -provider_credential_schema: - credential_form_schemas: - - variable: api_key - label: - en_US: API Key - type: secret-input - required: true - placeholder: - zh_Hans: 在此输入您的 API Key - en_US: Enter your API Key - - variable: endpoint_url - label: - zh_Hans: 自定义 API endpoint 地址 - en_US: Custom API endpoint URL - type: text-input - required: false - placeholder: - zh_Hans: Base URL, e.g. https://cloud.perfxlab.cn/v1 - en_US: Base URL, e.g. https://cloud.perfxlab.cn/v1 diff --git a/api/core/model_runtime/model_providers/perfxcloud/text_embedding/BAAI-bge-large-en-v1.5.yaml b/api/core/model_runtime/model_providers/perfxcloud/text_embedding/BAAI-bge-large-en-v1.5.yaml deleted file mode 100644 index 5756fb3d149d4c..00000000000000 --- a/api/core/model_runtime/model_providers/perfxcloud/text_embedding/BAAI-bge-large-en-v1.5.yaml +++ /dev/null @@ -1,4 +0,0 @@ -model: BAAI/bge-large-en-v1.5 -model_type: text-embedding -model_properties: - context_size: 32768 diff --git a/api/core/model_runtime/model_providers/perfxcloud/text_embedding/BAAI-bge-large-zh-v1.5.yaml b/api/core/model_runtime/model_providers/perfxcloud/text_embedding/BAAI-bge-large-zh-v1.5.yaml deleted file mode 100644 index 4204ab2860248b..00000000000000 --- a/api/core/model_runtime/model_providers/perfxcloud/text_embedding/BAAI-bge-large-zh-v1.5.yaml +++ /dev/null @@ -1,4 +0,0 @@ -model: BAAI/bge-large-zh-v1.5 -model_type: text-embedding -model_properties: - context_size: 32768 diff --git a/api/core/model_runtime/model_providers/perfxcloud/text_embedding/BAAI-bge-m3.yaml b/api/core/model_runtime/model_providers/perfxcloud/text_embedding/BAAI-bge-m3.yaml deleted file mode 100644 index 55488e56885d10..00000000000000 --- a/api/core/model_runtime/model_providers/perfxcloud/text_embedding/BAAI-bge-m3.yaml +++ /dev/null @@ -1,4 +0,0 @@ -model: BAAI/bge-m3 -model_type: text-embedding -model_properties: - context_size: 32768 diff --git a/api/core/model_runtime/model_providers/perfxcloud/text_embedding/__init__.py b/api/core/model_runtime/model_providers/perfxcloud/text_embedding/__init__.py deleted file mode 100644 index e69de29bb2d1d6..00000000000000 diff --git a/api/core/model_runtime/model_providers/perfxcloud/text_embedding/gte-Qwen2-7B-instruct.yaml b/api/core/model_runtime/model_providers/perfxcloud/text_embedding/gte-Qwen2-7B-instruct.yaml deleted file mode 100644 index 03db0d8bce8500..00000000000000 --- a/api/core/model_runtime/model_providers/perfxcloud/text_embedding/gte-Qwen2-7B-instruct.yaml +++ /dev/null @@ -1,4 +0,0 @@ -model: gte-Qwen2-7B-instruct -model_type: text-embedding -model_properties: - context_size: 2048 diff --git a/api/core/model_runtime/model_providers/replicate/__init__.py b/api/core/model_runtime/model_providers/replicate/__init__.py deleted file mode 100644 index e69de29bb2d1d6..00000000000000 diff --git a/api/core/model_runtime/model_providers/replicate/_assets/icon_l_en.svg b/api/core/model_runtime/model_providers/replicate/_assets/icon_l_en.svg deleted file mode 100644 index 63c09470d54059..00000000000000 --- a/api/core/model_runtime/model_providers/replicate/_assets/icon_l_en.svg +++ /dev/null @@ -1,13 +0,0 @@ - - - - - - - - - - - - - diff --git a/api/core/model_runtime/model_providers/replicate/_assets/icon_s_en.svg b/api/core/model_runtime/model_providers/replicate/_assets/icon_s_en.svg deleted file mode 100644 index 527316edb6f738..00000000000000 --- a/api/core/model_runtime/model_providers/replicate/_assets/icon_s_en.svg +++ /dev/null @@ -1,4 +0,0 @@ - - - - diff --git a/api/core/model_runtime/model_providers/replicate/_common.py b/api/core/model_runtime/model_providers/replicate/_common.py deleted file mode 100644 index 915f6e0eefcd08..00000000000000 --- a/api/core/model_runtime/model_providers/replicate/_common.py +++ /dev/null @@ -1,9 +0,0 @@ -from replicate.exceptions import ModelError, ReplicateError - -from core.model_runtime.errors.invoke import InvokeBadRequestError, InvokeError - - -class _CommonReplicate: - @property - def _invoke_error_mapping(self) -> dict[type[InvokeError], list[type[Exception]]]: - return {InvokeBadRequestError: [ReplicateError, ModelError]} diff --git a/api/core/model_runtime/model_providers/replicate/llm/__init__.py b/api/core/model_runtime/model_providers/replicate/llm/__init__.py deleted file mode 100644 index e69de29bb2d1d6..00000000000000 diff --git a/api/core/model_runtime/model_providers/replicate/llm/llm.py b/api/core/model_runtime/model_providers/replicate/llm/llm.py deleted file mode 100644 index 3641b35dc02a39..00000000000000 --- a/api/core/model_runtime/model_providers/replicate/llm/llm.py +++ /dev/null @@ -1,305 +0,0 @@ -from collections.abc import Generator -from typing import Optional, Union - -from replicate import Client as ReplicateClient -from replicate.exceptions import ReplicateError -from replicate.prediction import Prediction - -from core.model_runtime.entities.common_entities import I18nObject -from core.model_runtime.entities.llm_entities import LLMMode, LLMResult, LLMResultChunk, LLMResultChunkDelta -from core.model_runtime.entities.message_entities import ( - AssistantPromptMessage, - PromptMessage, - PromptMessageRole, - PromptMessageTool, - SystemPromptMessage, - UserPromptMessage, -) -from core.model_runtime.entities.model_entities import ( - AIModelEntity, - FetchFrom, - ModelPropertyKey, - ModelType, - ParameterRule, -) -from core.model_runtime.errors.validate import CredentialsValidateFailedError -from core.model_runtime.model_providers.__base.large_language_model import LargeLanguageModel -from core.model_runtime.model_providers.replicate._common import _CommonReplicate - - -class ReplicateLargeLanguageModel(_CommonReplicate, LargeLanguageModel): - def _invoke( - self, - model: str, - credentials: dict, - prompt_messages: list[PromptMessage], - model_parameters: dict, - tools: Optional[list[PromptMessageTool]] = None, - stop: Optional[list[str]] = None, - stream: bool = True, - user: Optional[str] = None, - ) -> Union[LLMResult, Generator]: - model_version = "" - if "model_version" in credentials: - model_version = credentials["model_version"] - - client = ReplicateClient(api_token=credentials["replicate_api_token"], timeout=30) - model_info = client.models.get(model) - - if model_version: - model_info_version = model_info.versions.get(model_version) - else: - model_info_version = model_info.latest_version - - inputs = {**model_parameters} - - if prompt_messages[0].role == PromptMessageRole.SYSTEM: - if "system_prompt" in model_info_version.openapi_schema["components"]["schemas"]["Input"]["properties"]: - inputs["system_prompt"] = prompt_messages[0].content - inputs["prompt"] = prompt_messages[1].content - else: - inputs["prompt"] = prompt_messages[0].content - - prediction = client.predictions.create(version=model_info_version, input=inputs) - - if stream: - return self._handle_generate_stream_response(model, credentials, prediction, stop, prompt_messages) - return self._handle_generate_response(model, credentials, prediction, stop, prompt_messages) - - def get_num_tokens( - self, - model: str, - credentials: dict, - prompt_messages: list[PromptMessage], - tools: Optional[list[PromptMessageTool]] = None, - ) -> int: - prompt = self._convert_messages_to_prompt(prompt_messages) - return self._get_num_tokens_by_gpt2(prompt) - - def validate_credentials(self, model: str, credentials: dict) -> None: - if "replicate_api_token" not in credentials: - raise CredentialsValidateFailedError("Replicate Access Token must be provided.") - - model_version = "" - if "model_version" in credentials: - model_version = credentials["model_version"] - - if model.count("/") != 1: - raise CredentialsValidateFailedError( - "Replicate Model Name must be provided, format: {user_name}/{model_name}" - ) - - try: - client = ReplicateClient(api_token=credentials["replicate_api_token"], timeout=30) - model_info = client.models.get(model) - - if model_version: - model_info_version = model_info.versions.get(model_version) - else: - model_info_version = model_info.latest_version - - self._check_text_generation_model(model_info_version, model, model_version, model_info.description) - except ReplicateError as e: - raise CredentialsValidateFailedError( - f"Model {model}:{model_version} not exists, cause: {e.__class__.__name__}:{str(e)}" - ) - except Exception as e: - raise CredentialsValidateFailedError(str(e)) - - @staticmethod - def _check_text_generation_model(model_info_version, model_name, version, description): - if "language model" in description.lower(): - return - - if ( - "temperature" not in model_info_version.openapi_schema["components"]["schemas"]["Input"]["properties"] - or "top_p" not in model_info_version.openapi_schema["components"]["schemas"]["Input"]["properties"] - or "top_k" not in model_info_version.openapi_schema["components"]["schemas"]["Input"]["properties"] - ): - raise CredentialsValidateFailedError(f"Model {model_name}:{version} is not a Text Generation model.") - - def get_customizable_model_schema(self, model: str, credentials: dict) -> Optional[AIModelEntity]: - model_type = LLMMode.CHAT if model.endswith("-chat") else LLMMode.COMPLETION - - entity = AIModelEntity( - model=model, - label=I18nObject(en_US=model), - fetch_from=FetchFrom.CUSTOMIZABLE_MODEL, - model_type=ModelType.LLM, - model_properties={ModelPropertyKey.MODE: model_type.value}, - parameter_rules=self._get_customizable_model_parameter_rules(model, credentials), - ) - - return entity - - @classmethod - def _get_customizable_model_parameter_rules(cls, model: str, credentials: dict) -> list[ParameterRule]: - model_version = "" - if "model_version" in credentials: - model_version = credentials["model_version"] - - client = ReplicateClient(api_token=credentials["replicate_api_token"], timeout=30) - model_info = client.models.get(model) - - if model_version: - model_info_version = model_info.versions.get(model_version) - else: - model_info_version = model_info.latest_version - - parameter_rules = [] - - input_properties = sorted( - model_info_version.openapi_schema["components"]["schemas"]["Input"]["properties"].items(), - key=lambda item: item[1].get("x-order", 0), - ) - - for key, value in input_properties: - if key not in {"system_prompt", "prompt"} and "stop" not in key: - value_type = value.get("type") - - if not value_type: - continue - - param_type = cls._get_parameter_type(value_type) - - rule = ParameterRule( - name=key, - label={"en_US": value["title"]}, - type=param_type, - help={ - "en_US": value.get("description"), - }, - required=False, - default=value.get("default"), - min=value.get("minimum"), - max=value.get("maximum"), - ) - parameter_rules.append(rule) - - return parameter_rules - - def _handle_generate_stream_response( - self, - model: str, - credentials: dict, - prediction: Prediction, - stop: list[str], - prompt_messages: list[PromptMessage], - ) -> Generator: - index = -1 - current_completion: str = "" - stop_condition_reached = False - - prediction_output_length = 10000 - is_prediction_output_finished = False - - for output in prediction.output_iterator(): - current_completion += output - - if not is_prediction_output_finished and prediction.status == "succeeded": - prediction_output_length = len(prediction.output) - 1 - is_prediction_output_finished = True - - if stop: - for s in stop: - if s in current_completion: - prediction.cancel() - stop_index = current_completion.find(s) - current_completion = current_completion[:stop_index] - stop_condition_reached = True - break - - if stop_condition_reached: - break - - index += 1 - - assistant_prompt_message = AssistantPromptMessage(content=output or "") - - if index < prediction_output_length: - yield LLMResultChunk( - model=model, - prompt_messages=prompt_messages, - delta=LLMResultChunkDelta(index=index, message=assistant_prompt_message), - ) - else: - prompt_tokens = self.get_num_tokens(model, credentials, prompt_messages) - completion_tokens = self.get_num_tokens(model, credentials, [assistant_prompt_message]) - - usage = self._calc_response_usage(model, credentials, prompt_tokens, completion_tokens) - - yield LLMResultChunk( - model=model, - prompt_messages=prompt_messages, - delta=LLMResultChunkDelta(index=index, message=assistant_prompt_message, usage=usage), - ) - - def _handle_generate_response( - self, - model: str, - credentials: dict, - prediction: Prediction, - stop: list[str], - prompt_messages: list[PromptMessage], - ) -> LLMResult: - current_completion: str = "" - stop_condition_reached = False - for output in prediction.output_iterator(): - current_completion += output - - if stop: - for s in stop: - if s in current_completion: - prediction.cancel() - stop_index = current_completion.find(s) - current_completion = current_completion[:stop_index] - stop_condition_reached = True - break - - if stop_condition_reached: - break - - assistant_prompt_message = AssistantPromptMessage(content=current_completion) - - prompt_tokens = self.get_num_tokens(model, credentials, prompt_messages) - completion_tokens = self.get_num_tokens(model, credentials, [assistant_prompt_message]) - - usage = self._calc_response_usage(model, credentials, prompt_tokens, completion_tokens) - - result = LLMResult( - model=model, - prompt_messages=prompt_messages, - message=assistant_prompt_message, - usage=usage, - ) - - return result - - @classmethod - def _get_parameter_type(cls, param_type: str) -> str: - type_mapping = {"integer": "int", "number": "float", "boolean": "boolean", "string": "string"} - return type_mapping.get(param_type) - - def _convert_messages_to_prompt(self, messages: list[PromptMessage]) -> str: - messages = messages.copy() # don't mutate the original list - - text = "".join(self._convert_one_message_to_text(message) for message in messages) - - return text.rstrip() - - @staticmethod - def _convert_one_message_to_text(message: PromptMessage) -> str: - human_prompt = "\n\nHuman:" - ai_prompt = "\n\nAssistant:" - content = message.content - - if isinstance(message, UserPromptMessage): - message_text = f"{human_prompt} {content}" - elif isinstance(message, AssistantPromptMessage): - message_text = f"{ai_prompt} {content}" - elif isinstance(message, SystemPromptMessage): - message_text = content - else: - raise ValueError(f"Got unknown type {message}") - - return message_text diff --git a/api/core/model_runtime/model_providers/replicate/replicate.py b/api/core/model_runtime/model_providers/replicate/replicate.py deleted file mode 100644 index ca137579c96f2c..00000000000000 --- a/api/core/model_runtime/model_providers/replicate/replicate.py +++ /dev/null @@ -1,10 +0,0 @@ -import logging - -from core.model_runtime.model_providers.__base.model_provider import ModelProvider - -logger = logging.getLogger(__name__) - - -class ReplicateProvider(ModelProvider): - def validate_provider_credentials(self, credentials: dict) -> None: - pass diff --git a/api/core/model_runtime/model_providers/replicate/replicate.yaml b/api/core/model_runtime/model_providers/replicate/replicate.yaml deleted file mode 100644 index 9cad6d4f0df595..00000000000000 --- a/api/core/model_runtime/model_providers/replicate/replicate.yaml +++ /dev/null @@ -1,41 +0,0 @@ -provider: replicate -label: - en_US: Replicate -icon_small: - en_US: icon_s_en.svg -icon_large: - en_US: icon_l_en.svg -background: "#E5E7EB" -help: - title: - en_US: Get your API Key from Replicate - zh_Hans: 从 Replicate 获取 API Key - url: - en_US: https://replicate.com/account/api-tokens -supported_model_types: - - llm - - text-embedding -configurate_methods: - - customizable-model -model_credential_schema: - model: - label: - en_US: Model Name - zh_Hans: 模型名称 - credential_form_schemas: - - variable: replicate_api_token - label: - en_US: API Key - type: secret-input - required: true - placeholder: - zh_Hans: 在此输入您的 Replicate API Key - en_US: Enter your Replicate API Key - - variable: model_version - label: - en_US: Model Version - type: text-input - required: false - placeholder: - zh_Hans: 在此输入您的模型版本,默认为最新版本 - en_US: Enter your model version, default to the latest version diff --git a/api/core/model_runtime/model_providers/replicate/text_embedding/__init__.py b/api/core/model_runtime/model_providers/replicate/text_embedding/__init__.py deleted file mode 100644 index e69de29bb2d1d6..00000000000000 diff --git a/api/core/model_runtime/model_providers/sagemaker/__init__.py b/api/core/model_runtime/model_providers/sagemaker/__init__.py deleted file mode 100644 index e69de29bb2d1d6..00000000000000 diff --git a/api/core/model_runtime/model_providers/sagemaker/_assets/icon_l_en.png b/api/core/model_runtime/model_providers/sagemaker/_assets/icon_l_en.png deleted file mode 100644 index 0abe07a78ff776018db04d84b7cc0ceb1c9ae81f..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 9395 zcmai)WmsEFwD$uo5Ue;9in|tCpm=e2cLK$o;0{Gv2<{YjFD^wxDG&+;iWj#6MS{ED zlylDeKF|GfKP1_EXYH9aGi%nKJ->gXnu;to1{nqb0Kk@)lhObHkV_HAAAzWdZ?@^f z7laapHGjSiJ(WauH znX3hb#6i<4EOjqRsbqeQBcRqg-{O0hg~XB)v8o&}06oFbhAUveW?OQmb{1VDzrAYr z6ERHf>-n?AX=4wiA_}iM8%xE$W>Zhiv(>B@pt!9iw~;$OFRL!9=lJ&f3!+;&ej^w6 zBL}1Tp`lLz-CkXhqyS3*3c#HDTR&wmYw&S+){=G;Ork3^hX2daA;qixA1^w?CFrS~{(SY!O*+De_ zG5^o5kLA&v=0Cf&;I+jes~NWTt(ovxh3EyNZEq?*Y|kw;u`i+!J(bDKYHq+dWxM9 zJ)fyv9BDh!8&Y8Q6e-+C0$A z(I6z@hXUVrS)s+kBjtr>Jx{IG{~6yk9&x45#e8h{NU|nAYN2^aut|DTwqYHue`>?( z&^g`w$)0!q#aWkJf3pk<_J&%=OZ+572 zuUXe|*`u#L)e-!6Xygx#IB^2~F)j-)5(!G2_FICdyn-LDYvBG|RVsTS_YJddn4< z1}hCtTXrvYHNc-9q^9M2N#25-0(IPUf6z*%{Tm(VLvFT0G%y1)M(Q%dwfie+ymBfu zL^l<`0mf*Hu(AGiaDeMjVot^8u6;E$z9Vs&N|ZF4vpnfn(D{#~^`;BU#6U^204PKC z%?95r&0|c16_9}wHCEatC5PPTA54Gi%;(4My)6jhnyGnczWisl zap#y5q!(grKKexSHE)9lAj_C2q+=v*^!JX)+~|}_q=74k`k#%vyv>Atw#;)kOX9lf zCs&-mmcruSXOzK^^o+aeGd%;+IHdm8AU1?*MPM*j%CBru|HIy)KQbxabaVzQIOO&> z;XJa~lVI#T)V%4&-Aso*t)&UlKC5=+b|SbX67Mm&kX5A&>%f{)XPaXOFT8-z!{b5?~vFEu_?HyG%&}8dv zOf)$$++#;r$R3OE-BM1x8GeZmpr(OAuCz9J=ve$l8?T!u6Z}Q)hpXUgRw#IxZfz0WlIDsyfoX(?b z+0P4^J7lnaT)~b(C*GODE0(G;==_yrsFC z7;k*Mv^ns!_LSJgt=MZ8163~TTDia+DNuHxwt$X0im~!p{X3NyWg*h>^y{gYS@3E_R4n?iUje@z2oxDS>iBsp#`#M$P;0V@QvB~t*$jE^lu$DdA8NSU%m(_wF0 zEU5JNSGm*5Yz|z|Yx7)I!P+Gi1D;FRK6?c-B4CjxLnlI~4ge@x`rl@-Ia zyCzBX+D~fMiRhGu0Lno&<#rPdhbI6SGB7S%N@1MiCT$B9%dQki8{76aPX7f7fk)>PbN^MQn4yPvh< z2o=yTf=^Ql_He{&Jg~aaNZE9;lN4amCQIgqOHU>kM%jJ0z6mfUoxtR!-wK%}HN_!a zGMjHGpaxhPHN$t2a^5RTX-UhU#|$COQ?tQ>`R{g0H{6N~$kzo|N0$>AW1Re8DHf|$AJ zc*gxTD!YHB*OjI-hy*^pgfnw5#lm`Fq&Pf2IyUVc?n%U(<_ zEGogHZEKQ{bnpE%-~yCacl(D)jHmBCOD5U8(GR^=0|Ssu+u{4TTsuCqEbJpJ3(0d` zS#s+_Z%Vqd54t$4!j_p^$ooP^Y1RBNP9IPu*}B@dT@=}+A+nyEgIH;#^~ z4H@rIHn~|eZfH)VUm_ZtMPs;{<4RnAL27}jfS?e?P{Q-+1AKPQ! ze^FM5w%3hUERvXn@4ovaBB$juL|pJADvmQeO3zsob$aQDB;Xm{$MB#Xst19?(jauB zFBrGq>ksRcdn$I4UEsx34x)6+)OVDp6>H1cSat^GTuu(e^*X}1JU59L{nA5Wpw77d8515k3gYL-E4xbd@qb~Mt z??UdRJF&VL;tWG0#uq)%cOfOUzmn#ssmF62$o3_@R=`!yn{W7yQD;eKRRX6OIKlNS zXiAzd?HkowPNvZ5XOlt$r>2@wt_)KQrAf!wp9m&w@aj@8MDHaHils!S48d?6jXwC` z4y9)Wa}HJ^6Fs+3KlJ&qa96)M@r=P6jkfj1YS4{ z#}&N3qxw~@eP!PyRYwKhS5|s2D8nQHle2P4RVS?mv+yTP*#dmzz@+B2_ z6hon2VGqVOvHooF5@Q;)g~nf}Ve7|Go}jq=a=R2t`n%gDrx)4IxvtcwiUYaB zdaa*;Cvv_r8*kuk9B9#KkN|HP`?mACNS7hupg+VTWID;pwy#cwoThhMN~ZCLyE7AE z0h-ufj2p-Xkh3=J*X)ezx$<8IYJf)%XhHk`EtS);nF%j57#9c{t7``gU1ccRn z$g`b^Iq@2VSE4UiQ{w?$A z=b_IBkUSq6F3S}X&g@un{G{<&BH3JJ!9=U0tfHS#6px2k95**NZ_l0LuszjREA<|C zU6*5jOSaid+MK+t;3&jU?bama^$+I<^iU>~GPx z{hRzy_5dAe$1;VdPA7xQ5(yNb=|4g|C9bSOR!Y@XvlX|~?XFTTeMYR0%?~EJspat1 zzB17+N+- zm{jlf7&;9GC{PnVdd2)fK{m%i$$W~G1t1}y{36}G*L)ZD&+b>XyNb^fH3TZFm9R-nt-oA%6RYs&|f4eFBY;Ba} zN$au58_>8z7TRW4KSTDjm@_xFwhpb8bHERjtPZmbt6bn=1U;(?4zO6bn~&&Kp}%A3 zU)SZmrojbssip z)P&!^zmUvG!4M=0yS*l#H->>`37d* zasjF-(_hNLT7(7T+~OWT_KxfPWQz6&Z?qGLJ-3QSid>b^G1ywo9IuQ9rHtl&#r#>S ztSSvGXr5W0;px%U#ZX)_Wm)pMQeN|+6b8>N=^C<8+=fJ3jh1PuE_S`VHPL=s!n7G& z+#-cLDH6AFl}QBdAb@V;mx!J%EkzX)&{|FgI`Ucwh4& za-fYw{`u2F%G^+uRvGOOyK$lXHkQ7x^+Q`~vdoCbI{G`$+yb5#+E{(I>^c)X(I6z| ze9=(nClg6p8Y->o6lNJBOiv3*tT`cA02SIG&gmNTv2p?z51lc@mJ<+f+g~Ys`-o*# zfy?hYa_D{1kD%@O=*zhDo?wd7nI4|p#P`Mr_ys4vY-3XYdb$n^@g`H+^^9DoeCd2347Yq5 z-?YZZP~+h5*2LS)Tj>0~JA1vjzW1>Jz|y4H*tb4nsr)XySpWSMWMQ=iy- zuXWcb_?f#>5|srsDu@u5totL!^bOS;IdwkIQ%&Z*PFufm7`ox6dvK!B%S*)$Qe07r z%RGh0Lzkmhvj#KPYVn$;CS7jlk}vp>rAG?K^3Qeq=X$`36!doZqT1Nk8Kj9O5numQXHP2o^;NIxXjgD z0VC8>qKeL_)e-}+n+a#ObXm#>rlfS%*Jp{cN#d$G=~(kOwF8vTD?ch@WLDQpzeL6aIWcejEpo`!KFlbbS@in0;N|S<|vOhQ4a=x6{#< zL9~5+I4lPD2l3th=@`VSKPWFwS6d@2A?{(C#bJAck-Ls-ub{lb-kdOn!c?kLV16HPYcD>FqTGnVhu@u zzBy}9>0@j*YE=pV@$j&fUaE&b)Gt2oJqFKi8Nql_@87%K#m|UP%D4{&l=Uv?4Ayg& zVYqiB*11}84t+L4^Z&)@qKXrpuT)ENF{t@5(f$lk}VQU z$#mx^@9gLN1f^1;oEB;+6V`shHi^U{{#lm@KxG45-#~IOSgw#YA*K}o=-{%`wiO8Y z?55iJ+btVcDOi>*qB$Wvfb2=OSASP#%J(<#j^b64e9B{-d_n20VFtT$haeA$<#}XA zYwOgwA{r#bQSF+gCle7(iTZt`6i<~)S=%IZx#8?RVHpw2K9qTA8{c@n+AksPrqaB& z>NYU7w=Oz`jTe6Hns!uKHrB6>Ice8F*!g+P`sEPt9s>JSZh(Fr*gweoyV+BAIX!{X zbG|*1WobBW zfjJA16}Li?UBxl{k5?6BBrKJWkaF>7odjCIn!Pb&I3#O8`{q_6xbG()ntH7Yoj*ef z8>Q0|-k5b~#Hz5Ku%5dfB%@7Q0TiipCa1O1H5h&S(6}3=R2RCA zeqOI1sDjv^?!wY~&yv*2QdxJz@1}qME;G1}5mIQ;)#7RX{{8*_6JXS&JC%tw9v&05 zS7ap_)uUt&{RU5C!**IbxCo0c^`)9x%PT^YP|E=&%TUmA>En?$0Q6lKytV=biW|q>PG`Nb#Jc z8ltN8ReThhKJsFsUYfVE-@qL%O8-ZXs|TW)&37nHL~F+QmCsbCiwhf>7jhCH6rzOu zZIB4tYzG*Zg1dI1AhiY?7mgDFLOV&NR@%4lX>x0an72OWab`PH=MWK1buLcwcv`d% zJ63m%>GlX~fPJA1?12@3t~9s8e@+fBe-dG;s&)bm5x5onuzqee>dkZ;S1FOqBDgiS zL;_z@eTgkXSZZkIrlhc1KvqEjweUSa;;vEnoi3G@q)X zPAV9Cb$MvzU%OKeL;DBKmkR z{c&(vDo5%zNf!#a&^8^GD(2AEFr=j{LxfHt!M8X=@HNf38S5b9R_Wb6CjsZ5MS6W$J~MR+2d7j%oIYc?uzPtS+zYJfE(E4I=KTDVsOQJdNBY zE9Cs$+4T1;|s5JeRC zkqV2H}CU97-UmLq2$0cAYCG&3uREQ)wc? zR(tO~R;hCgMGPhF!?YyjeAMn#D9*gq z$^GbvZuK!|wNN z;H5~rvAMDoESF;kezEm{di#gd_vE*-{&;Ocqr?2Y;ueSt_$wn?Gs&fyK9 zJ9)`f3*qQq6Rpkb=(L~) zmO}O+iuL}HGohN&*hv%ealt!s;>PR!I@8(c&ABLGcUk%VgLifi6QAx{&2#C$)UpmU z1PwAAi(RNk`%QVzN9DjDL)ZrWK@F;)hS&`EGC|Fu8k={nq+zdaO9yIFzqKz?7^nYC2=}Kns1G&B06pOhyNXy+gmB^f>_fEH58mJ}69SG}~T{j|QO&Q`Hh7 zLg}hTSYY!TMZyS6gtnXO$a1~^O`BH=2ZA<y2uP2<**)v_3SuX)YRCebg!HdW~0P3u>}f@YgFJHReP`L`pw!!?DO18@j46v{tTKACAFo8Tgcv_FLNr750v3X3W`fTn|~p3D6)VT z()?nUp&_76zz@&+RCMeMrLc{9I-+0Vk0O6m$^-js4St@n4d)Q_Xw4wcJ3`yS0ZN}9 z22HQ~5ZpL21r?PG=`BqFhf2`#MRy$Z02;$85XAIwA>gv>+{W3IDRZ=oKNEj3TLl9%bz zzY4;N-1VU^+P>dX>@OgbRqoX1TpVOX8XKp7M4;vmGI`U~igupKA5K{p+tBcaL7{Tu zeFMl6*Xmh0f{2yc`3KzMw)-5gzSes6k`w}yr>GHwsR4ke`hT?m5=|I+gQ?UOz}|A! zKigxm5#4dD!16jqscMpwsHKprO^nd|M}UZGkkx$Aj;Xcy`&QlUrcLgI$4}25t+4+vZN3ci~0^e!r5H$(0(0nSJkQ5yxbC#DHp$387r%c~PR&!E^J3 z^rcY=0Vp%=4Tb$~XnTP_>+U$gU;L<+W#`rmygg~!*G|Z?bQ5LV>U}HdmPaYX;xFZ_ zGwwxzVBr^|4cw2|zDOmD@)H2Tz}kAtp2;$WF`p@7dbraX&s4aL`~`mqERTNZg2umk zau+GEVt;_1JKmoG1=)Jav#(!ZGH(4?Vg090#%}m-_nz&UCxU~IHX2F=o@ZT}q*x8E zF4tUieYGA1ULY^Jbp>Q+KjMAD(mFRNN>wLYYEvA`&d)e<;)H8x6R;|X#2c)2X+t?? zR1znMW{3NjMRh@y-$L8hvh-~s|9XTmIpuZwITEIV%n2=rKAd*}^LC2Qz!q9YnXxIq z1h!0mJ3dl#QeW^60*K>fQxzYS4eCyPWs}>v)Vg%z@(}tdV*S_gc=3&&gOzA#wu1TV znmyysc|wf&20>f=^DlZJwr!33DI=^q>%GJOALdJPs8D1r5>rE}UIK94;6$Yqm_(=|Xy~)Yi{Jq(=Wzv* z$+=liHlfG8kZTvF^c6VTKUxU6@eZyCk~6jIr2em+G4~2+(tjt|xT-LW@lmz%|CA*E zDO3I#kH4J%u{ps1Lviw-kmWzS|AZ`I|0+|klK2T>7!&u zPPNyTv@tQEbT;W9nGC%?*msrn4bpEGwMfPEP0N3UH9A?y-QktBNO}MrW{r;q$<=^b hi=clrH(3v84ipNJtpSyN2o(}QURp(}TEaBse*pS#84mye diff --git a/api/core/model_runtime/model_providers/sagemaker/_assets/icon_s_en.png b/api/core/model_runtime/model_providers/sagemaker/_assets/icon_s_en.png deleted file mode 100644 index 6b88942a5ce27cb57e27ce876cecedf3c5dfce3b..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 9720 zcmZ{KWmFy8vh~J&Gi0z?1+fFdU=ss3iQ-j+5z?Ax`=Z1wk>fwmS?5(5D0 z6Oo?GVBYS@EM(P{002K~03aj+0C;!{g&YC^o~!`CktqPcp9KKmx_sdk|AI{^>`fIrpVJb*j|;lF%!2-<%zkN`lGEdcr-jQ*SbbI87} zKb*fU6d2;)6~U1ILTiDc{^e``p|x}Jn!XtX7g;@b000T|&w>Ete8d9)AV+OAbv<;I z6!|Tj9huGEIh$KD`#8G%VF3hv_}^SdOAj+LA4dl#cYYrs&|e7tH~&wV1w{52#KT?) zq^qP#CgJR6Nyg30%FGH9MkFI66LfoL#jh?Y{SW=^P6%Y<;o-v1!s6}i&Fsy=?CfUE z!p6tP$HL0a!p_e0hG25{b@DLtVRCY(_`Ase){(Syw{Wv{@vwDvBKuR<%-q@2LkI-= zBlNH1?>;?jt^Td#V`i`LFPQvvmKR|F_J42LGF&;%004_8|Z2 zi|wDb{xkL;_&?L}tJ?ZlI_OH;I$Aop|Cxo2omG(K|IGY1Q^MK7*-gX6%)(OmP3SMm ze}w)U{g;m3f9m`%>Ay4oFa=ruyzT#Z`+sk(zoKt@OBhj*L6 zG<_gW0)3OTmV6IZP6O>(s!%3?;uz6VjN-C3P~?MI)Ypp z=@sSjKN7PwClsk0=ZooG%>_24sT6FaG8~cs5a#HQAkpaN(9%MaZtqt2$)_&&EyqqL z{jyvqRrbgLs=nkPK|IK z@j>&b7kN33i8?>3vdnN_-|K!YnFwYx8~-KZz#OO6VjOM!TV+MSPx-9zY$Z@TGXJ*Q zd0b(?k#qX$oApXyhDc>**W8zHXYo@HnV-DVyus{xd>z0vf;Ah)f(=58mBCYrDDIQj z5pR=#GnJ*QGoddpB24WT*S8Yjnp9nPX4NxMBZ9K_wJCMeb~;@5%@L$1<)<&*?fy)! zwZZeQEm?$IV#mwpSTk#~DA*8HzSM0u5$c&W`OlbYs!UWCJ%mnB{F<~PBU8P4aXa$z z+!eL~dZs2I#AnL>lP`o406Yu^Sw-_6QD-PW3tFHd;&O+P?RINx!6qwY)54ZAMO=@g zstlfLMU9$JJHkS+SNcGRO+nN4bD> zyW7Kfju;T&312~>cw8uyd4z_Sz7SruOa1QAP?UYtBJC3>Zr0;oX-Y`^h_F%6jf-J zIX)|ymUZXoR7ewHH1*gtH!(hINc>$c>p!kO$r6WF%#yD2drE^{N|oHs<{G@t2uk#& zksyc-^M#9cZ&inWLY1o8V26!C9JP&*ZLzBoTiU7~SX$69Z7=LkJ$PN_HH2KW3;Xa* zTCr%iI99-WcUJq~bk?{JpWcJ@hk&B4_@3GVD7ikL%a{Vc&2iui6Xicfq^5mT%B%d^ zV#~h#+*0!^fN3OZ1H4*lpwP@itl1h9(osj5oyjxvVe7W($*Lc1CCu`HG`X~AsWR@DP(bsYxI3| z#muRdGFz-$%>-Z1q)z;+G`n0-IBHO;;YK?%ohCComoIs35s<^N#tm!J0M7bBJ}Tx3 z68Akdj~=I(-FLx zJC;tH&3^jYuO8V&jtU`>T7)MX;7_+Ea!(`Pv4<}#6)xlN#6r*yzb^M7jddUTVxFO0 z-0xBw-evVsf3sCrN0cGvx7rD1V)LoLkXADL@nYSXAHyLD3{-7WgE(AI;mYSZA<^5o z6el9~#ZB7+6_0NS8jro+dWSJ1*9~IC#EFiUag?v_CG3N~p5Z6P=Y9{t(8Gj&`1y%{ z`SOaAuX!rXjVVk?RIfGYywHEQPFxM}6+DQ* zq5VLWQ~4=3?$=S1*|9E3MlZ(sXsEajy+ZKveSPun`BHFZnY)ZbOb@ybGlDqz-i70; zfd4Ed)2`37ptNjS=(~{H59KXC`*PV;2Fz@}U%{EGrWkJD%TiVrW5mP+a}O6hx3kQF+&CwQq3~uQH$%er31v5z&_uXlUM8m;nmOn(-H?tu@Zz%Pf|pIZ3{R^ z7VT;0WtQ@g*S*h$!hSy_8qN839Q}oz^lvkhg?)PrdoeX*N2Be8eoO?arY{AoeL`Ud zEhb+(w?!ipY92+y4ssgDwmNqadPS_xCCDiRS{cGbrw8ZlF;Lh69>@p# ziyqNU#r-6NPX1gmUfs!9c6o;N-+oHqXJV3ejQ*b8N?q_LQD?%7BiniX9QBDd27gbt z=pE1O@9@z=k|IvBBV+XUdKSB`bNCG0a&x}l$|?_b_$C8qi^%RZ5rfxW6`nA^c$^=S z`Ush-P+F?iN-|t{@;KgcL)zYf7X`%TSK4Cp`JBExz9!u%%@xmbRb53+RJ3j-%vRCH z!ybC_k7Twv5RCQ3eumfVYcOkSn}9`?WYRRqA8W+RHIEm?n0*+q^6E;Cpt;2>c}mNf z;Z=$!=1@C7RmeEmK!d}y{y;R+JTDq{8vl-fP1Wc#kx4y-~bAaq{SY$7cA z@8EITtPk{w{u7Qroq`>;P!^U9-g~$@wwMXxv#CbSGMPs@32`%@6apc!O|f;OCBP0k z$;aYQD_n{jTwr5UUMBrH@CNM(Ksk8_ytT@uQjqIxF0; zb9o?w%;{wqbV0EOtz3qTTv``zC%S6?frPPxQXSot#Ou62*W+T@i&)%#w)w*~AN-Pi z*y492OCW99Gcr)?L6PyLnq`cF9=i$h2?ts;5FZ*SO^4S zZofWK4CLH19*7AI&u>Q>2mClm5-kZ$$tq@$W-(;g!MG5_dw<1AJCui3>bX-~KJ&ah z0&4a*Z-!|5o)f$yfaJ(JxSOA1Dj3`1a+5u3^$uevXBLSSUu-4z5FM1v2>Jap?%4USmKURzz z7hksBmi2={!87zzxvzEajn7JZJh?k9T!#=afB6agklE^m)sUR9NnEDhF1} z4hEu5l1%FpG6$-2UX*5%`8$^ldt&fLjUzPQ56QJ(HB}4WeKB4ajLba3WVD*hRI1+E zUGNSf(NQ^JgLxll%8HrM%xg188cML_aJlVX(c*%A;|{STBVQ6Z#Of7H+W!AnRxo* za6~XNIBpN7A`v&YUBtF!+B#viZFWTP(m9abA8K*M{(%syOi~FQl0;hR$FG^%J?QL- zqY(baO@m<4`p#od2yLoq8@q&0z=BQtCrmhx+0}2mi>`;PC(L+4N@&OnJDc(RPZOvX zzQ2St22wN!`Wq57a^;47uOyvHCqoOc{L#|~r76Ju{*9;UrPHH!dDfzyBpQ^<$!*Zs zY1GcDF-KP)K58*@G;FcYSjw~#$eEihwyKnW#hn*p32q4V>q0|o z(BOfC4ZN?4F}{n9_V#WFaEyWdtjBx1BcrQB_JtEt3fNU~NL?j4+lM&)DH zdJWQjY=kDPXj=^piD5gREF7DzF)T9c*&C|E4}IX3q`KAZyqzl!coz%I3{=w+E$qrv z5i0em+;%He%z5Yv>fP?2A>%#3L?Kiv$6 z?v0)XL#PEu~@H5pypH_4Ik%!PQ4=MM>%JmK9yg!!t~su^*)!_Ti+DLoP-Wi*~5 zU-C#4P}8zQ#CI-3Frd6gJTdtb$b9R7IKnq5EcyI8I7K;JLzzF1G7d0(_mS6Wz;?A{ zgF}xF_j{AHeuu{u9fz)odlo5zeuYqdE_OR~)K$SnL0`OmcBq?LH6p$}WymM}uCeH%3zF zyafu%;Ns&#P5jEbTb|R4{a+0_%H$Q*;Z-y-UmjU(1RN8pdRfI==F^}cN9SbX%DrYO zi+j|@(xQh_)@a(YJ|ON@sOes=$jiCMj~~>DO37@LiuC|gV^QEkQDQdl8#1twk!Q$= z#**F}E%um5a*AS#LPap!Sd>-paY3x{=cm}M+_PaO>GX5!?$Z&<_biahGrac1er8H) zo)Ae4TrKZ}6a-`E`acfU2i=Vs54O98Ov%7;^-HJU=9a_Q(J9S~T$*r+>1REIf6Hdv z-9cfR@HxA;UrxW9u9`eHawgIYi_XBLBVtoXIFaikuC@2|Yi0f+gQOgZ5Mo}vm?9Okhwy?vAHK=*auU-3FrBD^1Jf!x{+Su|(XNp%KE-Z9*Z*He9 z{jbCYomYC0L>!sCu=g-d!D}O}P4;jq^i$h$98*scXk;hzLbhlcp22ceTrF^qp)1tq z!0W2AvifCWs%U$US%b`#Zmi(im3N5{4m+fX;>Qz$k^k$pB+;4!;(B;+xlzwP@>-AY zCu=6$1Zz1k%{!?`a(m) zFS#ja0ywU$Ht05~M~A|1O1N5gwWF-$VMFSqJ};NPUn9OH5x(E}FkRj%(W6Slc=_!R z4hlRFP8>3^>w}P^>0;-PB~CKAFRvN>!dhom`qm%{lvD`H$d(UMqPpn$#B&EO(*L*0LCVUbat5Pj~rIig9ff+uL8{@XbWs&T>2U=Z&Gh zeBRP1z~UFS)B0Q?A`1c*Yt%nIU3Yov# zQ4(;9geO`eeSjgxnO`}!*MGMZ@oc6RQPkA`rmI?^WJcXkxFm1@$pdPB({+&siX+tz zY5f*!Zc}gtMTj~G#iToKw`nP@pr^)!%5b;jLc`Z_sWv5lgM~l@m{*abVWaCg$a><# zPiM+t3zm@AyZZ{!=ZY92^hE(WHea>EHg20W0QxbgWNg_~lB6c-o8(i10}>@HzM^?~O#W}+h;4pa z0e3}QEq6xuW?&a~o1P+FdutW8z+!gkGhz6cO2C=`dtknPG$L_K$Lfq-$(&>qH(@xf z`1I+u-RYx~D%eU&G}P`Gi2x}}hcJE~OZ%m#k%Vwz(mpph#V4Iv`&Q_1Hn z)!6L-gI9g0RosY3fK9-ESn~7DO2F%63P;_d*~z@+*N#Iu6_qZ96um+6#4-$HJF^tQ zI(n7icS3~g>6#ivo;*rrDdyJH{Z-5S+KRIFWwpNV#D@dwF0AW2UHb6LYm*skmcE0< zO7x2JIA_MU&ck*4uoX|S=C+kDPK`_V4!P9=AK_d}jz4OT6{KE#d^Bq&*xj2q*2>6e zQM!HRH!Sd|pB_jV>i*QnhgxRX^^sY@P5>s%Nq8i8quskK)tJxD{VT7(|4F(aB0uNI zB};k!ph_9Yye3D3Zcxi9;bdk{`C?m{Zd216(KDW35dxz@Yj~sf@N#f2k|TNo;x|4;+OTh!x}6SW>`5ZTplyg)5Z(C&R7`_XPk@9d zv|j{C&drz+cvCj zN=NF6ELR{@DnW$9F~q%_RU8(z8KvvFLal-8&Mufra^_{&ilrJo8u~gobbCm`p0C)h z(5x=S=`&DBzdAfjqVTkIs~3hDJW{+eOOXsD$Xj`_GTeCn0Acc4%%Yi}5?}q^9DQzY z#=GcF;j@zC^VR@PFV?;}A<29v+V2|ge|q@&X^I|BUN#$_%*v?wJdlC35Xo*}wE-Eua-G#n za@Tl@egRgswoVoHk-uGdZFRxw{pBwx74Jh(Fcf5BnZvghjqgd5-{Wa}eU>o=jNmFc z@~!uJGp7rGO`im=*3Ii(f0+CF8?M&Adv(I?g*KlzihR1_!2R=$`g4)@@`}I`_oZtU zuJe;(Vd7a&%?E7)u@k22k%TB8{7Ll8Yr1=3I=1C6X5sQEGa--OPxNuUc{GYO>&&jw zqYdR(mQ(#xz3OjYt}BGeVdZ234iSJBc=UjZU0$NdYC|k?FDk8pyr>Lu`sgMid{j=> zA!b17ZudYSdM-?$h<4ZT$_w$A7RWRxZzu2%`$puGk?0V4C4P2Wo7_8+UujN?=*l&W z`V?1yCKX9UMp?FBbr*2Qm_b$|Me1m#2`FfWc!Q<_WchmS>b2PKe($9JEMq(hqxp>X7XpV>*&%KY#e5G5`XXsX1-o~c1XkG)a< zkPNBIDC{m7Rfl|mc}B>O`W_5lghRN%Uwugj4xrQC_S<6b-`swRF?4+GN&{x6&=fHa`FwTO%Iof7egAA zh9KO}01bpMjH})|^F6(;Ga#{%fwT6v>8lnIfXHnf8*zMJZe$^a%)#91Se`E58JA^V zr;)s>6rjb2pU3KlyYGX)EjBzLr)>B4)fTWfCIX4r<6 z&UsfgeMg^=Z!v&=;-%nXBf+yzLNAmug;V}WlU$};SYwifOf33*!fEyJTofrWfOUjG z2&fnuCEuNrE!_$KkbwS5Sx6ApMu-?Ac7M3yOR=ML|G~E4Co`6kI)XAXqlUgHqj;s#T64N9S4qF7FuTSQs zy;01kPBl3c+NE@VXT*^1|(PiK$T=9b~O`bIIXUQ*sy|8TJdr0Gj)bobN_{ zmGG}WxDkQg88JRoSf3hGQ^79DRQ0iP8glML8N|V@RbEk}(Opw_l%&TpIP%8#-25nH z1f)vNtZGG&WA79^G$7&%(`(r_Y2*?vnDECCldON-Gi3x7ktTCGsFvdnqyef{FH?(s zUZSg+{Pe%+PM(eng(irFnU{naP%1*&7k=wlUT}a0jChy~Y`26KbYUF0=B(=Q^;V#b z!SmTc20J;cKoZG^&kZt!@ubu(BoQs^j;dPL+c~gX#K30+_F7ddS zBmZz%Il!Sc!MigfT7k5lJt(7J>fI7^*1Jgy_K&=&sr(45pYEtnS#;^Xd$UkCA5rCg z-a-JQL}hdv9I;kGZ|7p3BZh;150HCskQD% zOFka)1C8bFKzCW)yD-%W&I>z5*4wfAN1=%&IG*gr0<~1t+!HBA$GiC`ueb?aUZJF1 zJ$#T)E)&ZFl8zTVo$nUK)c~zzf;u z$ys zrty(&H^c^J@vyyYDCv#sB;+%r>gcFcS87j6{)b2#xNF?{x{K4L6vO7+AQ>vp&xx#A zo!yFT-$ibdfkMxerZJM@Hk!I~7wcM_E~wUkBL)8bw6vAJyV+F9405}iJI7rs^DnN; z$eR&9eIB8BJ@c26J?bFr*2?}x8;JpR@A}5yA2-8qrAbf4_r?`sj(B0cLNzwaJ!9SC ze2!o|C4KE)ol4U}FuhpOK{=yq&yhC?KW#atCbC7QhJRt%Xl3@)7OX!+J{m?eSGbvO4B2? zKD+GR2V02>1R+5PwS_?;-~BAUX*ZOxt;0#f%Pv7-AB?x&Cu-2H81DUJ%Ou4>0^k~k2xINDO)wL*EG z{5cR{_OTfD!M*#^qV4tqZgDO;6;#c9@cmT;SM(A2!X1k17)A{%dsnuLik`9uR>BhK zvgh61uFt|VP5JI*ef0PMNRN^cQk<8~EFrJ?LwXdYJh{-CTJF+|zzo_coWCzb-LqA! zlMf8cPTh!bdYUW85Zvs0kd1P6f)e;;3loVZzD?D4#y+3IAjJDE*625BtSI}hos9=X z(LBjqgdnr;Zr(Cd#3HSa&Kn|^Fe#nSi2*2q!Zz{!>cCppGm*<{47Fb;!3jVz!$b%S zSv@zh#qM{vdxyWSre+qlwqBwz{ezR<|1SJRsyJSQU;JECaEVIu5-cwf3H#B0(e2Ow OOmb4nk~QL{VgCa)i~r65 diff --git a/api/core/model_runtime/model_providers/sagemaker/llm/__init__.py b/api/core/model_runtime/model_providers/sagemaker/llm/__init__.py deleted file mode 100644 index e69de29bb2d1d6..00000000000000 diff --git a/api/core/model_runtime/model_providers/sagemaker/rerank/__init__.py b/api/core/model_runtime/model_providers/sagemaker/rerank/__init__.py deleted file mode 100644 index e69de29bb2d1d6..00000000000000 diff --git a/api/core/model_runtime/model_providers/sagemaker/rerank/rerank.py b/api/core/model_runtime/model_providers/sagemaker/rerank/rerank.py deleted file mode 100644 index 959dff6a21c6d5..00000000000000 --- a/api/core/model_runtime/model_providers/sagemaker/rerank/rerank.py +++ /dev/null @@ -1,173 +0,0 @@ -import json -import logging -import operator -from typing import Any, Optional - -import boto3 - -from core.model_runtime.entities.common_entities import I18nObject -from core.model_runtime.entities.model_entities import AIModelEntity, FetchFrom, ModelType -from core.model_runtime.entities.rerank_entities import RerankDocument, RerankResult -from core.model_runtime.errors.invoke import ( - InvokeAuthorizationError, - InvokeBadRequestError, - InvokeConnectionError, - InvokeError, - InvokeRateLimitError, - InvokeServerUnavailableError, -) -from core.model_runtime.errors.validate import CredentialsValidateFailedError -from core.model_runtime.model_providers.__base.rerank_model import RerankModel - -logger = logging.getLogger(__name__) - - -class SageMakerRerankModel(RerankModel): - """ - Model class for SageMaker rerank model. - """ - - sagemaker_client: Any = None - - def _sagemaker_rerank(self, query_input: str, docs: list[str], rerank_endpoint: str): - inputs = [query_input] * len(docs) - response_model = self.sagemaker_client.invoke_endpoint( - EndpointName=rerank_endpoint, - Body=json.dumps({"inputs": inputs, "docs": docs}), - ContentType="application/json", - ) - json_str = response_model["Body"].read().decode("utf8") - json_obj = json.loads(json_str) - scores = json_obj["scores"] - return scores if isinstance(scores, list) else [scores] - - def _invoke( - self, - model: str, - credentials: dict, - query: str, - docs: list[str], - score_threshold: Optional[float] = None, - top_n: Optional[int] = None, - user: Optional[str] = None, - ) -> RerankResult: - """ - Invoke rerank model - - :param model: model name - :param credentials: model credentials - :param query: search query - :param docs: docs for reranking - :param score_threshold: score threshold - :param top_n: top n - :param user: unique user id - :return: rerank result - """ - line = 0 - try: - if len(docs) == 0: - return RerankResult(model=model, docs=docs) - - line = 1 - if not self.sagemaker_client: - access_key = credentials.get("aws_access_key_id") - secret_key = credentials.get("aws_secret_access_key") - aws_region = credentials.get("aws_region") - if aws_region: - if access_key and secret_key: - self.sagemaker_client = boto3.client( - "sagemaker-runtime", - aws_access_key_id=access_key, - aws_secret_access_key=secret_key, - region_name=aws_region, - ) - else: - self.sagemaker_client = boto3.client("sagemaker-runtime", region_name=aws_region) - else: - self.sagemaker_client = boto3.client("sagemaker-runtime") - - line = 2 - - sagemaker_endpoint = credentials.get("sagemaker_endpoint") - candidate_docs = [] - - scores = self._sagemaker_rerank(query, docs, sagemaker_endpoint) - for idx in range(len(scores)): - candidate_docs.append({"content": docs[idx], "score": scores[idx]}) - - sorted(candidate_docs, key=operator.itemgetter("score"), reverse=True) - - line = 3 - rerank_documents = [] - for idx, result in enumerate(candidate_docs): - rerank_document = RerankDocument( - index=idx, text=result.get("content"), score=result.get("score", -100.0) - ) - - if score_threshold is not None: - if rerank_document.score >= score_threshold: - rerank_documents.append(rerank_document) - else: - rerank_documents.append(rerank_document) - - return RerankResult(model=model, docs=rerank_documents) - - except Exception as e: - logger.exception(f"Exception {e}, line : {line}") - - def validate_credentials(self, model: str, credentials: dict) -> None: - """ - Validate model credentials - - :param model: model name - :param credentials: model credentials - :return: - """ - try: - self._invoke( - model=model, - credentials=credentials, - query="What is the capital of the United States?", - docs=[ - "Carson City is the capital city of the American state of Nevada. At the 2010 United States " - "Census, Carson City had a population of 55,274.", - "The Commonwealth of the Northern Mariana Islands is a group of islands in the Pacific Ocean that " - "are a political division controlled by the United States. Its capital is Saipan.", - ], - score_threshold=0.8, - ) - except Exception as ex: - raise CredentialsValidateFailedError(str(ex)) - - @property - def _invoke_error_mapping(self) -> dict[type[InvokeError], list[type[Exception]]]: - """ - Map model invoke error to unified error - The key is the error type thrown to the caller - The value is the error type thrown by the model, - which needs to be converted into a unified error type for the caller. - - :return: Invoke error mapping - """ - return { - InvokeConnectionError: [InvokeConnectionError], - InvokeServerUnavailableError: [InvokeServerUnavailableError], - InvokeRateLimitError: [InvokeRateLimitError], - InvokeAuthorizationError: [InvokeAuthorizationError], - InvokeBadRequestError: [InvokeBadRequestError, KeyError, ValueError], - } - - def get_customizable_model_schema(self, model: str, credentials: dict) -> AIModelEntity | None: - """ - used to define customizable model schema - """ - entity = AIModelEntity( - model=model, - label=I18nObject(en_US=model), - fetch_from=FetchFrom.CUSTOMIZABLE_MODEL, - model_type=ModelType.RERANK, - model_properties={}, - parameter_rules=[], - ) - - return entity diff --git a/api/core/model_runtime/model_providers/sagemaker/sagemaker.py b/api/core/model_runtime/model_providers/sagemaker/sagemaker.py deleted file mode 100644 index 042155b1522fef..00000000000000 --- a/api/core/model_runtime/model_providers/sagemaker/sagemaker.py +++ /dev/null @@ -1,41 +0,0 @@ -import logging -import uuid -from typing import IO, Any - -from core.model_runtime.model_providers.__base.model_provider import ModelProvider - -logger = logging.getLogger(__name__) - - -class SageMakerProvider(ModelProvider): - def validate_provider_credentials(self, credentials: dict) -> None: - """ - Validate provider credentials - - if validate failed, raise exception - - :param credentials: provider credentials, credentials form defined in `provider_credential_schema`. - """ - pass - - -def buffer_to_s3(s3_client: Any, file: IO[bytes], bucket: str, s3_prefix: str) -> str: - """ - return s3_uri of this file - """ - s3_key = f"{s3_prefix}{uuid.uuid4()}.mp3" - s3_client.put_object(Body=file.read(), Bucket=bucket, Key=s3_key, ContentType="audio/mp3") - return s3_key - - -def generate_presigned_url(s3_client: Any, file: IO[bytes], bucket_name: str, s3_prefix: str, expiration=600) -> str: - object_key = buffer_to_s3(s3_client, file, bucket_name, s3_prefix) - try: - response = s3_client.generate_presigned_url( - "get_object", Params={"Bucket": bucket_name, "Key": object_key}, ExpiresIn=expiration - ) - except Exception as e: - print(f"Error generating presigned URL: {e}") - return None - - return response diff --git a/api/core/model_runtime/model_providers/sagemaker/sagemaker.yaml b/api/core/model_runtime/model_providers/sagemaker/sagemaker.yaml deleted file mode 100644 index 87cd50f50cbb85..00000000000000 --- a/api/core/model_runtime/model_providers/sagemaker/sagemaker.yaml +++ /dev/null @@ -1,193 +0,0 @@ -provider: sagemaker -label: - zh_Hans: Sagemaker - en_US: Sagemaker -icon_small: - en_US: icon_s_en.png -icon_large: - en_US: icon_l_en.png -description: - en_US: Customized model on Sagemaker - zh_Hans: Sagemaker上的私有化部署的模型 -background: "#ECE9E3" -help: - title: - en_US: How to deploy customized model on Sagemaker - zh_Hans: 如何在Sagemaker上的私有化部署的模型 - url: - en_US: https://github.com/aws-samples/dify-aws-tool/blob/main/README.md#how-to-deploy-sagemaker-endpoint - zh_Hans: https://github.com/aws-samples/dify-aws-tool/blob/main/README_ZH.md#%E5%A6%82%E4%BD%95%E9%83%A8%E7%BD%B2sagemaker%E6%8E%A8%E7%90%86%E7%AB%AF%E7%82%B9 -supported_model_types: - - llm - - text-embedding - - rerank - - speech2text - - tts -configurate_methods: - - customizable-model -model_credential_schema: - model: - label: - en_US: Model Name - zh_Hans: 模型名称 - placeholder: - en_US: Enter your model name - zh_Hans: 输入模型名称 - credential_form_schemas: - - variable: mode - show_on: - - variable: __model_type - value: llm - label: - en_US: Completion mode - type: select - required: false - default: chat - placeholder: - zh_Hans: 选择对话类型 - en_US: Select completion mode - options: - - value: chat - label: - en_US: Chat - zh_Hans: Chat - - variable: sagemaker_endpoint - label: - en_US: sagemaker endpoint - type: text-input - required: true - placeholder: - zh_Hans: 请输出你的Sagemaker推理端点 - en_US: Enter your Sagemaker Inference endpoint - - variable: audio_s3_cache_bucket - show_on: - - variable: __model_type - value: speech2text - label: - zh_Hans: 音频缓存桶(s3 bucket) - en_US: audio cache bucket(s3 bucket) - type: text-input - required: true - placeholder: - zh_Hans: sagemaker-us-east-1-******207838 - en_US: sagemaker-us-east-1-*******7838 - - variable: audio_model_type - show_on: - - variable: __model_type - value: tts - label: - en_US: Audio model type - type: select - required: true - placeholder: - zh_Hans: 语音模型类型 - en_US: Audio model type - options: - - value: PresetVoice - label: - en_US: preset voice - zh_Hans: 内置音色 - - value: CloneVoice - label: - en_US: clone voice - zh_Hans: 克隆音色 - - value: CloneVoice_CrossLingual - label: - en_US: crosslingual clone voice - zh_Hans: 跨语种克隆音色 - - value: InstructVoice - label: - en_US: Instruct voice - zh_Hans: 文字指令音色 - - variable: prompt_audio - show_on: - - variable: __model_type - value: tts - label: - en_US: Mock Audio Source - type: text-input - required: false - placeholder: - zh_Hans: 被模仿的音色音频 - en_US: source audio to be mocked - - variable: prompt_text - show_on: - - variable: __model_type - value: tts - label: - en_US: Prompt Audio Text - type: text-input - required: false - placeholder: - zh_Hans: 模仿音色的对应文本 - en_US: text for the mocked source audio - - variable: instruct_text - show_on: - - variable: __model_type - value: tts - label: - en_US: instruct text for speaker - type: text-input - required: false - - variable: aws_access_key_id - required: false - label: - en_US: Access Key (If not provided, credentials are obtained from the running environment.) - zh_Hans: Access Key (如果未提供,凭证将从运行环境中获取。) - type: secret-input - placeholder: - en_US: Enter your Access Key - zh_Hans: 在此输入您的 Access Key - - variable: aws_secret_access_key - required: false - label: - en_US: Secret Access Key - zh_Hans: Secret Access Key - type: secret-input - placeholder: - en_US: Enter your Secret Access Key - zh_Hans: 在此输入您的 Secret Access Key - - variable: aws_region - required: false - label: - en_US: AWS Region - zh_Hans: AWS 地区 - type: select - default: us-east-1 - options: - - value: us-east-1 - label: - en_US: US East (N. Virginia) - zh_Hans: 美国东部 (弗吉尼亚北部) - - value: us-west-2 - label: - en_US: US West (Oregon) - zh_Hans: 美国西部 (俄勒冈州) - - value: ap-southeast-1 - label: - en_US: Asia Pacific (Singapore) - zh_Hans: 亚太地区 (新加坡) - - value: ap-northeast-1 - label: - en_US: Asia Pacific (Tokyo) - zh_Hans: 亚太地区 (东京) - - value: eu-central-1 - label: - en_US: Europe (Frankfurt) - zh_Hans: 欧洲 (法兰克福) - - value: us-gov-west-1 - label: - en_US: AWS GovCloud (US-West) - zh_Hans: AWS GovCloud (US-West) - - value: ap-southeast-2 - label: - en_US: Asia Pacific (Sydney) - zh_Hans: 亚太地区 (悉尼) - - value: cn-north-1 - label: - en_US: AWS Beijing (cn-north-1) - zh_Hans: 中国北京 (cn-north-1) - - value: cn-northwest-1 - label: - en_US: AWS Ningxia (cn-northwest-1) - zh_Hans: 中国宁夏 (cn-northwest-1) diff --git a/api/core/model_runtime/model_providers/sagemaker/speech2text/__init__.py b/api/core/model_runtime/model_providers/sagemaker/speech2text/__init__.py deleted file mode 100644 index e69de29bb2d1d6..00000000000000 diff --git a/api/core/model_runtime/model_providers/sagemaker/speech2text/speech2text.py b/api/core/model_runtime/model_providers/sagemaker/speech2text/speech2text.py deleted file mode 100644 index 6aa8c9995f68b4..00000000000000 --- a/api/core/model_runtime/model_providers/sagemaker/speech2text/speech2text.py +++ /dev/null @@ -1,125 +0,0 @@ -import json -import logging -from typing import IO, Any, Optional - -import boto3 - -from core.model_runtime.entities.common_entities import I18nObject -from core.model_runtime.entities.model_entities import AIModelEntity, FetchFrom, ModelType -from core.model_runtime.errors.invoke import ( - InvokeAuthorizationError, - InvokeBadRequestError, - InvokeConnectionError, - InvokeError, - InvokeRateLimitError, - InvokeServerUnavailableError, -) -from core.model_runtime.model_providers.__base.speech2text_model import Speech2TextModel -from core.model_runtime.model_providers.sagemaker.sagemaker import generate_presigned_url - -logger = logging.getLogger(__name__) - - -class SageMakerSpeech2TextModel(Speech2TextModel): - """ - Model class for Xinference speech to text model. - """ - - sagemaker_client: Any = None - s3_client: Any = None - - def _invoke(self, model: str, credentials: dict, file: IO[bytes], user: Optional[str] = None) -> str: - """ - Invoke speech2text model - - :param model: model name - :param credentials: model credentials - :param file: audio file - :param user: unique user id - :return: text for given audio file - """ - asr_text = None - - try: - if not self.sagemaker_client: - access_key = credentials.get("aws_access_key_id") - secret_key = credentials.get("aws_secret_access_key") - aws_region = credentials.get("aws_region") - if aws_region: - if access_key and secret_key: - self.sagemaker_client = boto3.client( - "sagemaker-runtime", - aws_access_key_id=access_key, - aws_secret_access_key=secret_key, - region_name=aws_region, - ) - self.s3_client = boto3.client( - "s3", aws_access_key_id=access_key, aws_secret_access_key=secret_key, region_name=aws_region - ) - else: - self.sagemaker_client = boto3.client("sagemaker-runtime", region_name=aws_region) - self.s3_client = boto3.client("s3", region_name=aws_region) - else: - self.sagemaker_client = boto3.client("sagemaker-runtime") - self.s3_client = boto3.client("s3") - - s3_prefix = "dify/speech2text/" - sagemaker_endpoint = credentials.get("sagemaker_endpoint") - bucket = credentials.get("audio_s3_cache_bucket") - - s3_presign_url = generate_presigned_url(self.s3_client, file, bucket, s3_prefix) - payload = {"audio_s3_presign_uri": s3_presign_url} - - response_model = self.sagemaker_client.invoke_endpoint( - EndpointName=sagemaker_endpoint, Body=json.dumps(payload), ContentType="application/json" - ) - json_str = response_model["Body"].read().decode("utf8") - json_obj = json.loads(json_str) - asr_text = json_obj["text"] - except Exception as e: - logger.exception(f"Exception {e}, line : {line}") - - return asr_text - - def validate_credentials(self, model: str, credentials: dict) -> None: - """ - Validate model credentials - - :param model: model name - :param credentials: model credentials - :return: - """ - pass - - @property - def _invoke_error_mapping(self) -> dict[type[InvokeError], list[type[Exception]]]: - """ - Map model invoke error to unified error - The key is the error type thrown to the caller - The value is the error type thrown by the model, - which needs to be converted into a unified error type for the caller. - - :return: Invoke error mapping - """ - return { - InvokeConnectionError: [InvokeConnectionError], - InvokeServerUnavailableError: [InvokeServerUnavailableError], - InvokeRateLimitError: [InvokeRateLimitError], - InvokeAuthorizationError: [InvokeAuthorizationError], - InvokeBadRequestError: [InvokeBadRequestError, KeyError, ValueError], - } - - def get_customizable_model_schema(self, model: str, credentials: dict) -> AIModelEntity | None: - """ - used to define customizable model schema - """ - entity = AIModelEntity( - model=model, - label=I18nObject(en_US=model), - fetch_from=FetchFrom.CUSTOMIZABLE_MODEL, - model_type=ModelType.SPEECH2TEXT, - model_properties={}, - parameter_rules=[], - ) - - return entity diff --git a/api/core/model_runtime/model_providers/sagemaker/text_embedding/__init__.py b/api/core/model_runtime/model_providers/sagemaker/text_embedding/__init__.py deleted file mode 100644 index e69de29bb2d1d6..00000000000000 diff --git a/api/core/model_runtime/model_providers/sagemaker/tts/__init__.py b/api/core/model_runtime/model_providers/sagemaker/tts/__init__.py deleted file mode 100644 index e69de29bb2d1d6..00000000000000 diff --git a/api/core/model_runtime/model_providers/sagemaker/tts/tts.py b/api/core/model_runtime/model_providers/sagemaker/tts/tts.py deleted file mode 100644 index a22bd6dd6ef4f8..00000000000000 --- a/api/core/model_runtime/model_providers/sagemaker/tts/tts.py +++ /dev/null @@ -1,275 +0,0 @@ -import concurrent.futures -import copy -import json -import logging -from enum import Enum -from typing import Any, Optional - -import boto3 -import requests - -from core.model_runtime.entities.common_entities import I18nObject -from core.model_runtime.entities.model_entities import AIModelEntity, FetchFrom, ModelType -from core.model_runtime.errors.invoke import ( - InvokeAuthorizationError, - InvokeBadRequestError, - InvokeConnectionError, - InvokeError, - InvokeRateLimitError, - InvokeServerUnavailableError, -) -from core.model_runtime.model_providers.__base.tts_model import TTSModel - -logger = logging.getLogger(__name__) - - -class TTSModelType(Enum): - PresetVoice = "PresetVoice" - CloneVoice = "CloneVoice" - CloneVoice_CrossLingual = "CloneVoice_CrossLingual" - InstructVoice = "InstructVoice" - - -class SageMakerText2SpeechModel(TTSModel): - sagemaker_client: Any = None - s3_client: Any = None - comprehend_client: Any = None - - def __init__(self): - # preset voices, need support custom voice - self.model_voices = { - "__default": { - "all": [ - {"name": "Default", "value": "default"}, - ] - }, - "CosyVoice": { - "zh-Hans": [ - {"name": "中文男", "value": "中文男"}, - {"name": "中文女", "value": "中文女"}, - {"name": "粤语女", "value": "粤语女"}, - ], - "zh-Hant": [ - {"name": "中文男", "value": "中文男"}, - {"name": "中文女", "value": "中文女"}, - {"name": "粤语女", "value": "粤语女"}, - ], - "en-US": [ - {"name": "英文男", "value": "英文男"}, - {"name": "英文女", "value": "英文女"}, - ], - "ja-JP": [ - {"name": "日语男", "value": "日语男"}, - ], - "ko-KR": [ - {"name": "韩语女", "value": "韩语女"}, - ], - }, - } - - def validate_credentials(self, model: str, credentials: dict) -> None: - """ - Validate model credentials - - :param model: model name - :param credentials: model credentials - :return: - """ - pass - - def _detect_lang_code(self, content: str, map_dict: dict = None): - map_dict = {"zh": "<|zh|>", "en": "<|en|>", "ja": "<|jp|>", "zh-TW": "<|yue|>", "ko": "<|ko|>"} - - response = self.comprehend_client.detect_dominant_language(Text=content) - language_code = response["Languages"][0]["LanguageCode"] - - return map_dict.get(language_code, "<|zh|>") - - def _build_tts_payload( - self, - model_type: str, - content_text: str, - model_role: str, - prompt_text: str, - prompt_audio: str, - instruct_text: str, - ): - if model_type == TTSModelType.PresetVoice.value and model_role: - return {"tts_text": content_text, "role": model_role} - if model_type == TTSModelType.CloneVoice.value and prompt_text and prompt_audio: - return {"tts_text": content_text, "prompt_text": prompt_text, "prompt_audio": prompt_audio} - if model_type == TTSModelType.CloneVoice_CrossLingual.value and prompt_audio: - lang_tag = self._detect_lang_code(content_text) - return {"tts_text": f"{content_text}", "prompt_audio": prompt_audio, "lang_tag": lang_tag} - if model_type == TTSModelType.InstructVoice.value and instruct_text and model_role: - return {"tts_text": content_text, "role": model_role, "instruct_text": instruct_text} - - raise RuntimeError(f"Invalid params for {model_type}") - - def _invoke( - self, model: str, tenant_id: str, credentials: dict, content_text: str, voice: str, user: Optional[str] = None - ): - """ - _invoke text2speech model - - :param model: model name - :param tenant_id: user tenant id - :param credentials: model credentials - :param voice: model timbre - :param content_text: text content to be translated - :param user: unique user id - :return: text translated to audio file - """ - if not self.sagemaker_client: - access_key = credentials.get("aws_access_key_id") - secret_key = credentials.get("aws_secret_access_key") - aws_region = credentials.get("aws_region") - if aws_region: - if access_key and secret_key: - self.sagemaker_client = boto3.client( - "sagemaker-runtime", - aws_access_key_id=access_key, - aws_secret_access_key=secret_key, - region_name=aws_region, - ) - self.s3_client = boto3.client( - "s3", aws_access_key_id=access_key, aws_secret_access_key=secret_key, region_name=aws_region - ) - self.comprehend_client = boto3.client( - "comprehend", - aws_access_key_id=access_key, - aws_secret_access_key=secret_key, - region_name=aws_region, - ) - else: - self.sagemaker_client = boto3.client("sagemaker-runtime", region_name=aws_region) - self.s3_client = boto3.client("s3", region_name=aws_region) - self.comprehend_client = boto3.client("comprehend", region_name=aws_region) - else: - self.sagemaker_client = boto3.client("sagemaker-runtime") - self.s3_client = boto3.client("s3") - self.comprehend_client = boto3.client("comprehend") - - model_type = credentials.get("audio_model_type", "PresetVoice") - prompt_text = credentials.get("prompt_text") - prompt_audio = credentials.get("prompt_audio") - instruct_text = credentials.get("instruct_text") - sagemaker_endpoint = credentials.get("sagemaker_endpoint") - payload = self._build_tts_payload(model_type, content_text, voice, prompt_text, prompt_audio, instruct_text) - - return self._tts_invoke_streaming(model_type, payload, sagemaker_endpoint) - - def get_customizable_model_schema(self, model: str, credentials: dict) -> AIModelEntity | None: - """ - used to define customizable model schema - """ - entity = AIModelEntity( - model=model, - label=I18nObject(en_US=model), - fetch_from=FetchFrom.CUSTOMIZABLE_MODEL, - model_type=ModelType.TTS, - model_properties={}, - parameter_rules=[], - ) - - return entity - - @property - def _invoke_error_mapping(self) -> dict[type[InvokeError], list[type[Exception]]]: - """ - Map model invoke error to unified error - The key is the error type thrown to the caller - The value is the error type thrown by the model, - which needs to be converted into a unified error type for the caller. - - :return: Invoke error mapping - """ - return { - InvokeConnectionError: [InvokeConnectionError], - InvokeServerUnavailableError: [InvokeServerUnavailableError], - InvokeRateLimitError: [InvokeRateLimitError], - InvokeAuthorizationError: [InvokeAuthorizationError], - InvokeBadRequestError: [InvokeBadRequestError, KeyError, ValueError], - } - - def _get_model_default_voice(self, model: str, credentials: dict) -> any: - return "" - - def _get_model_word_limit(self, model: str, credentials: dict) -> int: - return 15 - - def _get_model_audio_type(self, model: str, credentials: dict) -> str: - return "mp3" - - def _get_model_workers_limit(self, model: str, credentials: dict) -> int: - return 5 - - def get_tts_model_voices(self, model: str, credentials: dict, language: Optional[str] = None) -> list: - audio_model_name = "CosyVoice" - for key, voices in self.model_voices.items(): - if key in audio_model_name: - if language and language in voices: - return voices[language] - elif "all" in voices: - return voices["all"] - - return self.model_voices["__default"]["all"] - - def _invoke_sagemaker(self, payload: dict, endpoint: str): - response_model = self.sagemaker_client.invoke_endpoint( - EndpointName=endpoint, - Body=json.dumps(payload), - ContentType="application/json", - ) - json_str = response_model["Body"].read().decode("utf8") - json_obj = json.loads(json_str) - return json_obj - - def _tts_invoke_streaming(self, model_type: str, payload: dict, sagemaker_endpoint: str) -> any: - """ - _tts_invoke_streaming text2speech model - - :param model: model name - :param credentials: model credentials - :param content_text: text content to be translated - :param voice: model timbre - :return: text translated to audio file - """ - try: - lang_tag = "" - if model_type == TTSModelType.CloneVoice_CrossLingual.value: - lang_tag = payload.pop("lang_tag") - - word_limit = self._get_model_word_limit(model="", credentials={}) - content_text = payload.get("tts_text") - if len(content_text) > word_limit: - split_sentences = self._split_text_into_sentences(content_text, max_length=word_limit) - sentences = [f"{lang_tag}{s}" for s in split_sentences if len(s)] - len_sent = len(sentences) - executor = concurrent.futures.ThreadPoolExecutor(max_workers=min(4, len_sent)) - payloads = [copy.deepcopy(payload) for i in range(len_sent)] - for idx in range(len_sent): - payloads[idx]["tts_text"] = sentences[idx] - - futures = [ - executor.submit( - self._invoke_sagemaker, - payload=payload, - endpoint=sagemaker_endpoint, - ) - for payload in payloads - ] - - for future in futures: - resp = future.result() - audio_bytes = requests.get(resp.get("s3_presign_url")).content - for i in range(0, len(audio_bytes), 1024): - yield audio_bytes[i : i + 1024] - else: - resp = self._invoke_sagemaker(payload, sagemaker_endpoint) - audio_bytes = requests.get(resp.get("s3_presign_url")).content - - for i in range(0, len(audio_bytes), 1024): - yield audio_bytes[i : i + 1024] - except Exception as ex: - raise InvokeBadRequestError(str(ex)) diff --git a/api/core/model_runtime/model_providers/siliconflow/_assets/siliconflow.svg b/api/core/model_runtime/model_providers/siliconflow/_assets/siliconflow.svg deleted file mode 100644 index 16e406f030225b..00000000000000 --- a/api/core/model_runtime/model_providers/siliconflow/_assets/siliconflow.svg +++ /dev/null @@ -1 +0,0 @@ - \ No newline at end of file diff --git a/api/core/model_runtime/model_providers/siliconflow/_assets/siliconflow_square.svg b/api/core/model_runtime/model_providers/siliconflow/_assets/siliconflow_square.svg deleted file mode 100644 index ad6b384f7acd21..00000000000000 --- a/api/core/model_runtime/model_providers/siliconflow/_assets/siliconflow_square.svg +++ /dev/null @@ -1 +0,0 @@ - \ No newline at end of file diff --git a/api/core/model_runtime/model_providers/siliconflow/llm/deepdeek-coder-v2-instruct.yaml b/api/core/model_runtime/model_providers/siliconflow/llm/deepdeek-coder-v2-instruct.yaml deleted file mode 100644 index d4431179e5656d..00000000000000 --- a/api/core/model_runtime/model_providers/siliconflow/llm/deepdeek-coder-v2-instruct.yaml +++ /dev/null @@ -1,30 +0,0 @@ -model: deepseek-ai/DeepSeek-Coder-V2-Instruct -label: - en_US: deepseek-ai/DeepSeek-Coder-V2-Instruct -model_type: llm -features: - - agent-thought -model_properties: - mode: chat - context_size: 32768 -parameter_rules: - - name: temperature - use_template: temperature - - name: max_tokens - use_template: max_tokens - type: int - default: 512 - min: 1 - max: 4096 - help: - zh_Hans: 指定生成结果长度的上限。如果生成结果截断,可以调大该参数。 - en_US: Specifies the upper limit on the length of generated results. If the generated results are truncated, you can increase this parameter. - - name: top_p - use_template: top_p - - name: frequency_penalty - use_template: frequency_penalty -pricing: - input: '1.33' - output: '1.33' - unit: '0.000001' - currency: RMB diff --git a/api/core/model_runtime/model_providers/siliconflow/llm/deepseek-v2-chat.yaml b/api/core/model_runtime/model_providers/siliconflow/llm/deepseek-v2-chat.yaml deleted file mode 100644 index caa6508b5ed2a2..00000000000000 --- a/api/core/model_runtime/model_providers/siliconflow/llm/deepseek-v2-chat.yaml +++ /dev/null @@ -1,30 +0,0 @@ -model: deepseek-ai/DeepSeek-V2-Chat -label: - en_US: deepseek-ai/DeepSeek-V2-Chat -model_type: llm -features: - - agent-thought -model_properties: - mode: chat - context_size: 32768 -parameter_rules: - - name: temperature - use_template: temperature - - name: max_tokens - use_template: max_tokens - type: int - default: 512 - min: 1 - max: 4096 - help: - zh_Hans: 指定生成结果长度的上限。如果生成结果截断,可以调大该参数。 - en_US: Specifies the upper limit on the length of generated results. If the generated results are truncated, you can increase this parameter. - - name: top_p - use_template: top_p - - name: frequency_penalty - use_template: frequency_penalty -pricing: - input: '1.33' - output: '1.33' - unit: '0.000001' - currency: RMB diff --git a/api/core/model_runtime/model_providers/siliconflow/llm/deepseek-v2.5.yaml b/api/core/model_runtime/model_providers/siliconflow/llm/deepseek-v2.5.yaml deleted file mode 100644 index 1c8e15ae52afa7..00000000000000 --- a/api/core/model_runtime/model_providers/siliconflow/llm/deepseek-v2.5.yaml +++ /dev/null @@ -1,30 +0,0 @@ -model: deepseek-ai/DeepSeek-V2.5 -label: - en_US: deepseek-ai/DeepSeek-V2.5 -model_type: llm -features: - - agent-thought -model_properties: - mode: chat - context_size: 32768 -parameter_rules: - - name: temperature - use_template: temperature - - name: max_tokens - use_template: max_tokens - type: int - default: 512 - min: 1 - max: 4096 - help: - zh_Hans: 指定生成结果长度的上限。如果生成结果截断,可以调大该参数。 - en_US: Specifies the upper limit on the length of generated results. If the generated results are truncated, you can increase this parameter. - - name: top_p - use_template: top_p - - name: frequency_penalty - use_template: frequency_penalty -pricing: - input: '1.33' - output: '1.33' - unit: '0.000001' - currency: RMB diff --git a/api/core/model_runtime/model_providers/siliconflow/llm/gemma-2-27b-it.yaml b/api/core/model_runtime/model_providers/siliconflow/llm/gemma-2-27b-it.yaml deleted file mode 100644 index 2840e3dcf4b113..00000000000000 --- a/api/core/model_runtime/model_providers/siliconflow/llm/gemma-2-27b-it.yaml +++ /dev/null @@ -1,30 +0,0 @@ -model: google/gemma-2-27b-it -label: - en_US: google/gemma-2-27b-it -model_type: llm -features: - - agent-thought -model_properties: - mode: chat - context_size: 8196 -parameter_rules: - - name: temperature - use_template: temperature - - name: max_tokens - use_template: max_tokens - type: int - default: 512 - min: 1 - max: 4096 - help: - zh_Hans: 指定生成结果长度的上限。如果生成结果截断,可以调大该参数。 - en_US: Specifies the upper limit on the length of generated results. If the generated results are truncated, you can increase this parameter. - - name: top_p - use_template: top_p - - name: frequency_penalty - use_template: frequency_penalty -pricing: - input: '1.26' - output: '1.26' - unit: '0.000001' - currency: RMB diff --git a/api/core/model_runtime/model_providers/siliconflow/llm/gemma-2-9b-it.yaml b/api/core/model_runtime/model_providers/siliconflow/llm/gemma-2-9b-it.yaml deleted file mode 100644 index d7e19b46f6d6f2..00000000000000 --- a/api/core/model_runtime/model_providers/siliconflow/llm/gemma-2-9b-it.yaml +++ /dev/null @@ -1,30 +0,0 @@ -model: google/gemma-2-9b-it -label: - en_US: google/gemma-2-9b-it -model_type: llm -features: - - agent-thought -model_properties: - mode: chat - context_size: 8196 -parameter_rules: - - name: temperature - use_template: temperature - - name: max_tokens - use_template: max_tokens - type: int - default: 512 - min: 1 - max: 4096 - help: - zh_Hans: 指定生成结果长度的上限。如果生成结果截断,可以调大该参数。 - en_US: Specifies the upper limit on the length of generated results. If the generated results are truncated, you can increase this parameter. - - name: top_p - use_template: top_p - - name: frequency_penalty - use_template: frequency_penalty -pricing: - input: '0' - output: '0' - unit: '0.000001' - currency: RMB diff --git a/api/core/model_runtime/model_providers/siliconflow/llm/glm4-9b-chat.yaml b/api/core/model_runtime/model_providers/siliconflow/llm/glm4-9b-chat.yaml deleted file mode 100644 index 9b32a024774d06..00000000000000 --- a/api/core/model_runtime/model_providers/siliconflow/llm/glm4-9b-chat.yaml +++ /dev/null @@ -1,30 +0,0 @@ -model: THUDM/glm-4-9b-chat -label: - en_US: THUDM/glm-4-9b-chat -model_type: llm -features: - - agent-thought -model_properties: - mode: chat - context_size: 32768 -parameter_rules: - - name: temperature - use_template: temperature - - name: max_tokens - use_template: max_tokens - type: int - default: 512 - min: 1 - max: 4096 - help: - zh_Hans: 指定生成结果长度的上限。如果生成结果截断,可以调大该参数。 - en_US: Specifies the upper limit on the length of generated results. If the generated results are truncated, you can increase this parameter. - - name: top_p - use_template: top_p - - name: frequency_penalty - use_template: frequency_penalty -pricing: - input: '0' - output: '0' - unit: '0.000001' - currency: RMB diff --git a/api/core/model_runtime/model_providers/siliconflow/llm/internlm2_5-7b-chat.yaml b/api/core/model_runtime/model_providers/siliconflow/llm/internlm2_5-7b-chat.yaml deleted file mode 100644 index 73ad4480aa2968..00000000000000 --- a/api/core/model_runtime/model_providers/siliconflow/llm/internlm2_5-7b-chat.yaml +++ /dev/null @@ -1,30 +0,0 @@ -model: internlm/internlm2_5-7b-chat -label: - en_US: internlm/internlm2_5-7b-chat -model_type: llm -features: - - agent-thought -model_properties: - mode: chat - context_size: 32768 -parameter_rules: - - name: temperature - use_template: temperature - - name: max_tokens - use_template: max_tokens - type: int - default: 512 - min: 1 - max: 4096 - help: - zh_Hans: 指定生成结果长度的上限。如果生成结果截断,可以调大该参数。 - en_US: Specifies the upper limit on the length of generated results. If the generated results are truncated, you can increase this parameter. - - name: top_p - use_template: top_p - - name: frequency_penalty - use_template: frequency_penalty -pricing: - input: '0' - output: '0' - unit: '0.000001' - currency: RMB diff --git a/api/core/model_runtime/model_providers/siliconflow/llm/llm.py b/api/core/model_runtime/model_providers/siliconflow/llm/llm.py deleted file mode 100644 index c1868b6ad02b83..00000000000000 --- a/api/core/model_runtime/model_providers/siliconflow/llm/llm.py +++ /dev/null @@ -1,31 +0,0 @@ -from collections.abc import Generator -from typing import Optional, Union - -from core.model_runtime.entities.llm_entities import LLMResult -from core.model_runtime.entities.message_entities import PromptMessage, PromptMessageTool -from core.model_runtime.model_providers.openai_api_compatible.llm.llm import OAIAPICompatLargeLanguageModel - - -class SiliconflowLargeLanguageModel(OAIAPICompatLargeLanguageModel): - def _invoke( - self, - model: str, - credentials: dict, - prompt_messages: list[PromptMessage], - model_parameters: dict, - tools: Optional[list[PromptMessageTool]] = None, - stop: Optional[list[str]] = None, - stream: bool = True, - user: Optional[str] = None, - ) -> Union[LLMResult, Generator]: - self._add_custom_parameters(credentials) - return super()._invoke(model, credentials, prompt_messages, model_parameters, tools, stop, stream) - - def validate_credentials(self, model: str, credentials: dict) -> None: - self._add_custom_parameters(credentials) - super().validate_credentials(model, credentials) - - @classmethod - def _add_custom_parameters(cls, credentials: dict) -> None: - credentials["mode"] = "chat" - credentials["endpoint_url"] = "https://api.siliconflow.cn/v1" diff --git a/api/core/model_runtime/model_providers/siliconflow/llm/meta-mlama-3-70b-instruct.yaml b/api/core/model_runtime/model_providers/siliconflow/llm/meta-mlama-3-70b-instruct.yaml deleted file mode 100644 index 9993d781ac8959..00000000000000 --- a/api/core/model_runtime/model_providers/siliconflow/llm/meta-mlama-3-70b-instruct.yaml +++ /dev/null @@ -1,30 +0,0 @@ -model: meta-llama/Meta-Llama-3-70B-Instruct -label: - en_US: meta-llama/Meta-Llama-3-70B-Instruct -model_type: llm -features: - - agent-thought -model_properties: - mode: chat - context_size: 32768 -parameter_rules: - - name: temperature - use_template: temperature - - name: max_tokens - use_template: max_tokens - type: int - default: 512 - min: 1 - max: 4096 - help: - zh_Hans: 指定生成结果长度的上限。如果生成结果截断,可以调大该参数。 - en_US: Specifies the upper limit on the length of generated results. If the generated results are truncated, you can increase this parameter. - - name: top_p - use_template: top_p - - name: frequency_penalty - use_template: frequency_penalty -pricing: - input: '4.13' - output: '4.13' - unit: '0.000001' - currency: RMB diff --git a/api/core/model_runtime/model_providers/siliconflow/llm/meta-mlama-3-8b-instruct.yaml b/api/core/model_runtime/model_providers/siliconflow/llm/meta-mlama-3-8b-instruct.yaml deleted file mode 100644 index 60e3764789e1f5..00000000000000 --- a/api/core/model_runtime/model_providers/siliconflow/llm/meta-mlama-3-8b-instruct.yaml +++ /dev/null @@ -1,30 +0,0 @@ -model: meta-llama/Meta-Llama-3-8B-Instruct -label: - en_US: meta-llama/Meta-Llama-3-8B-Instruct -model_type: llm -features: - - agent-thought -model_properties: - mode: chat - context_size: 8192 -parameter_rules: - - name: temperature - use_template: temperature - - name: max_tokens - use_template: max_tokens - type: int - default: 512 - min: 1 - max: 4096 - help: - zh_Hans: 指定生成结果长度的上限。如果生成结果截断,可以调大该参数。 - en_US: Specifies the upper limit on the length of generated results. If the generated results are truncated, you can increase this parameter. - - name: top_p - use_template: top_p - - name: frequency_penalty - use_template: frequency_penalty -pricing: - input: '0' - output: '0' - unit: '0.000001' - currency: RMB diff --git a/api/core/model_runtime/model_providers/siliconflow/llm/meta-mlama-3.1-405b-instruct.yaml b/api/core/model_runtime/model_providers/siliconflow/llm/meta-mlama-3.1-405b-instruct.yaml deleted file mode 100644 index f992660aa2e66f..00000000000000 --- a/api/core/model_runtime/model_providers/siliconflow/llm/meta-mlama-3.1-405b-instruct.yaml +++ /dev/null @@ -1,30 +0,0 @@ -model: meta-llama/Meta-Llama-3.1-405B-Instruct -label: - en_US: meta-llama/Meta-Llama-3.1-405B-Instruct -model_type: llm -features: - - agent-thought -model_properties: - mode: chat - context_size: 32768 -parameter_rules: - - name: temperature - use_template: temperature - - name: max_tokens - use_template: max_tokens - type: int - default: 512 - min: 1 - max: 4096 - help: - zh_Hans: 指定生成结果长度的上限。如果生成结果截断,可以调大该参数。 - en_US: Specifies the upper limit on the length of generated results. If the generated results are truncated, you can increase this parameter. - - name: top_p - use_template: top_p - - name: frequency_penalty - use_template: frequency_penalty -pricing: - input: '21' - output: '21' - unit: '0.000001' - currency: RMB diff --git a/api/core/model_runtime/model_providers/siliconflow/llm/meta-mlama-3.1-70b-instruct.yaml b/api/core/model_runtime/model_providers/siliconflow/llm/meta-mlama-3.1-70b-instruct.yaml deleted file mode 100644 index 1c69d63a400219..00000000000000 --- a/api/core/model_runtime/model_providers/siliconflow/llm/meta-mlama-3.1-70b-instruct.yaml +++ /dev/null @@ -1,30 +0,0 @@ -model: meta-llama/Meta-Llama-3.1-70B-Instruct -label: - en_US: meta-llama/Meta-Llama-3.1-70B-Instruct -model_type: llm -features: - - agent-thought -model_properties: - mode: chat - context_size: 32768 -parameter_rules: - - name: temperature - use_template: temperature - - name: max_tokens - use_template: max_tokens - type: int - default: 512 - min: 1 - max: 4096 - help: - zh_Hans: 指定生成结果长度的上限。如果生成结果截断,可以调大该参数。 - en_US: Specifies the upper limit on the length of generated results. If the generated results are truncated, you can increase this parameter. - - name: top_p - use_template: top_p - - name: frequency_penalty - use_template: frequency_penalty -pricing: - input: '4.13' - output: '4.13' - unit: '0.000001' - currency: RMB diff --git a/api/core/model_runtime/model_providers/siliconflow/llm/meta-mlama-3.1-8b-instruct.yaml b/api/core/model_runtime/model_providers/siliconflow/llm/meta-mlama-3.1-8b-instruct.yaml deleted file mode 100644 index a97002a5ca3658..00000000000000 --- a/api/core/model_runtime/model_providers/siliconflow/llm/meta-mlama-3.1-8b-instruct.yaml +++ /dev/null @@ -1,30 +0,0 @@ -model: meta-llama/Meta-Llama-3.1-8B-Instruct -label: - en_US: meta-llama/Meta-Llama-3.1-8B-Instruct -model_type: llm -features: - - agent-thought -model_properties: - mode: chat - context_size: 8192 -parameter_rules: - - name: temperature - use_template: temperature - - name: max_tokens - use_template: max_tokens - type: int - default: 512 - min: 1 - max: 4096 - help: - zh_Hans: 指定生成结果长度的上限。如果生成结果截断,可以调大该参数。 - en_US: Specifies the upper limit on the length of generated results. If the generated results are truncated, you can increase this parameter. - - name: top_p - use_template: top_p - - name: frequency_penalty - use_template: frequency_penalty -pricing: - input: '0' - output: '0' - unit: '0.000001' - currency: RMB diff --git a/api/core/model_runtime/model_providers/siliconflow/llm/mistral-7b-instruct-v0.2.yaml b/api/core/model_runtime/model_providers/siliconflow/llm/mistral-7b-instruct-v0.2.yaml deleted file mode 100644 index 89fb153ba09ac9..00000000000000 --- a/api/core/model_runtime/model_providers/siliconflow/llm/mistral-7b-instruct-v0.2.yaml +++ /dev/null @@ -1,31 +0,0 @@ -model: mistralai/Mistral-7B-Instruct-v0.2 -label: - en_US: mistralai/Mistral-7B-Instruct-v0.2 -model_type: llm -features: - - agent-thought -model_properties: - mode: chat - context_size: 32768 -parameter_rules: - - name: temperature - use_template: temperature - - name: max_tokens - use_template: max_tokens - type: int - default: 512 - min: 1 - max: 4096 - help: - zh_Hans: 指定生成结果长度的上限。如果生成结果截断,可以调大该参数。 - en_US: Specifies the upper limit on the length of generated results. If the generated results are truncated, you can increase this parameter. - - name: top_p - use_template: top_p - - name: frequency_penalty - use_template: frequency_penalty -pricing: - input: '0' - output: '0' - unit: '0.000001' - currency: RMB -deprecated: true diff --git a/api/core/model_runtime/model_providers/siliconflow/llm/mistral-8x7b-instruct-v0.1.yaml b/api/core/model_runtime/model_providers/siliconflow/llm/mistral-8x7b-instruct-v0.1.yaml deleted file mode 100644 index 2785e7496fb060..00000000000000 --- a/api/core/model_runtime/model_providers/siliconflow/llm/mistral-8x7b-instruct-v0.1.yaml +++ /dev/null @@ -1,31 +0,0 @@ -model: mistralai/Mixtral-8x7B-Instruct-v0.1 -label: - en_US: mistralai/Mixtral-8x7B-Instruct-v0.1 -model_type: llm -features: - - agent-thought -model_properties: - mode: chat - context_size: 32768 -parameter_rules: - - name: temperature - use_template: temperature - - name: max_tokens - use_template: max_tokens - type: int - default: 512 - min: 1 - max: 4096 - help: - zh_Hans: 指定生成结果长度的上限。如果生成结果截断,可以调大该参数。 - en_US: Specifies the upper limit on the length of generated results. If the generated results are truncated, you can increase this parameter. - - name: top_p - use_template: top_p - - name: frequency_penalty - use_template: frequency_penalty -pricing: - input: '1.26' - output: '1.26' - unit: '0.000001' - currency: RMB -deprecated: true diff --git a/api/core/model_runtime/model_providers/siliconflow/llm/qwen2-1.5b-instruct.yaml b/api/core/model_runtime/model_providers/siliconflow/llm/qwen2-1.5b-instruct.yaml deleted file mode 100644 index f6c976af8e7b6e..00000000000000 --- a/api/core/model_runtime/model_providers/siliconflow/llm/qwen2-1.5b-instruct.yaml +++ /dev/null @@ -1,30 +0,0 @@ -model: Qwen/Qwen2-1.5B-Instruct -label: - en_US: Qwen/Qwen2-1.5B-Instruct -model_type: llm -features: - - agent-thought -model_properties: - mode: chat - context_size: 32768 -parameter_rules: - - name: temperature - use_template: temperature - - name: max_tokens - use_template: max_tokens - type: int - default: 512 - min: 1 - max: 4096 - help: - zh_Hans: 指定生成结果长度的上限。如果生成结果截断,可以调大该参数。 - en_US: Specifies the upper limit on the length of generated results. If the generated results are truncated, you can increase this parameter. - - name: top_p - use_template: top_p - - name: frequency_penalty - use_template: frequency_penalty -pricing: - input: '0' - output: '0' - unit: '0.000001' - currency: RMB diff --git a/api/core/model_runtime/model_providers/siliconflow/llm/qwen2-57b-a14b-instruct.yaml b/api/core/model_runtime/model_providers/siliconflow/llm/qwen2-57b-a14b-instruct.yaml deleted file mode 100644 index a996e919ea9f27..00000000000000 --- a/api/core/model_runtime/model_providers/siliconflow/llm/qwen2-57b-a14b-instruct.yaml +++ /dev/null @@ -1,30 +0,0 @@ -model: Qwen/Qwen2-57B-A14B-Instruct -label: - en_US: Qwen/Qwen2-57B-A14B-Instruct -model_type: llm -features: - - agent-thought -model_properties: - mode: chat - context_size: 32768 -parameter_rules: - - name: temperature - use_template: temperature - - name: max_tokens - use_template: max_tokens - type: int - default: 512 - min: 1 - max: 4096 - help: - zh_Hans: 指定生成结果长度的上限。如果生成结果截断,可以调大该参数。 - en_US: Specifies the upper limit on the length of generated results. If the generated results are truncated, you can increase this parameter. - - name: top_p - use_template: top_p - - name: frequency_penalty - use_template: frequency_penalty -pricing: - input: '1.26' - output: '1.26' - unit: '0.000001' - currency: RMB diff --git a/api/core/model_runtime/model_providers/siliconflow/llm/qwen2-72b-instruct.yaml b/api/core/model_runtime/model_providers/siliconflow/llm/qwen2-72b-instruct.yaml deleted file mode 100644 index a6e2c22dac87c0..00000000000000 --- a/api/core/model_runtime/model_providers/siliconflow/llm/qwen2-72b-instruct.yaml +++ /dev/null @@ -1,30 +0,0 @@ -model: Qwen/Qwen2-72B-Instruct -label: - en_US: Qwen/Qwen2-72B-Instruct -model_type: llm -features: - - agent-thought -model_properties: - mode: chat - context_size: 32768 -parameter_rules: - - name: temperature - use_template: temperature - - name: max_tokens - use_template: max_tokens - type: int - default: 512 - min: 1 - max: 4096 - help: - zh_Hans: 指定生成结果长度的上限。如果生成结果截断,可以调大该参数。 - en_US: Specifies the upper limit on the length of generated results. If the generated results are truncated, you can increase this parameter. - - name: top_p - use_template: top_p - - name: frequency_penalty - use_template: frequency_penalty -pricing: - input: '4.13' - output: '4.13' - unit: '0.000001' - currency: RMB diff --git a/api/core/model_runtime/model_providers/siliconflow/llm/qwen2-7b-instruct.yaml b/api/core/model_runtime/model_providers/siliconflow/llm/qwen2-7b-instruct.yaml deleted file mode 100644 index d8bea5e12927e7..00000000000000 --- a/api/core/model_runtime/model_providers/siliconflow/llm/qwen2-7b-instruct.yaml +++ /dev/null @@ -1,30 +0,0 @@ -model: Qwen/Qwen2-7B-Instruct -label: - en_US: Qwen/Qwen2-7B-Instruct -model_type: llm -features: - - agent-thought -model_properties: - mode: chat - context_size: 32768 -parameter_rules: - - name: temperature - use_template: temperature - - name: max_tokens - use_template: max_tokens - type: int - default: 512 - min: 1 - max: 4096 - help: - zh_Hans: 指定生成结果长度的上限。如果生成结果截断,可以调大该参数。 - en_US: Specifies the upper limit on the length of generated results. If the generated results are truncated, you can increase this parameter. - - name: top_p - use_template: top_p - - name: frequency_penalty - use_template: frequency_penalty -pricing: - input: '0' - output: '0' - unit: '0.000001' - currency: RMB diff --git a/api/core/model_runtime/model_providers/siliconflow/llm/qwen2.5-14b-instruct.yaml b/api/core/model_runtime/model_providers/siliconflow/llm/qwen2.5-14b-instruct.yaml deleted file mode 100644 index 02a401464bb8c0..00000000000000 --- a/api/core/model_runtime/model_providers/siliconflow/llm/qwen2.5-14b-instruct.yaml +++ /dev/null @@ -1,30 +0,0 @@ -model: Qwen/Qwen2.5-14B-Instruct -label: - en_US: Qwen/Qwen2.5-14B-Instruct -model_type: llm -features: - - agent-thought -model_properties: - mode: chat - context_size: 32768 -parameter_rules: - - name: temperature - use_template: temperature - - name: max_tokens - use_template: max_tokens - type: int - default: 512 - min: 1 - max: 8192 - help: - zh_Hans: 指定生成结果长度的上限。如果生成结果截断,可以调大该参数。 - en_US: Specifies the upper limit on the length of generated results. If the generated results are truncated, you can increase this parameter. - - name: top_p - use_template: top_p - - name: frequency_penalty - use_template: frequency_penalty -pricing: - input: '0.7' - output: '0.7' - unit: '0.000001' - currency: RMB diff --git a/api/core/model_runtime/model_providers/siliconflow/llm/qwen2.5-32b-instruct.yaml b/api/core/model_runtime/model_providers/siliconflow/llm/qwen2.5-32b-instruct.yaml deleted file mode 100644 index d084617e7d7366..00000000000000 --- a/api/core/model_runtime/model_providers/siliconflow/llm/qwen2.5-32b-instruct.yaml +++ /dev/null @@ -1,30 +0,0 @@ -model: Qwen/Qwen2.5-32B-Instruct -label: - en_US: Qwen/Qwen2.5-32B-Instruct -model_type: llm -features: - - agent-thought -model_properties: - mode: chat - context_size: 32768 -parameter_rules: - - name: temperature - use_template: temperature - - name: max_tokens - use_template: max_tokens - type: int - default: 512 - min: 1 - max: 8192 - help: - zh_Hans: 指定生成结果长度的上限。如果生成结果截断,可以调大该参数。 - en_US: Specifies the upper limit on the length of generated results. If the generated results are truncated, you can increase this parameter. - - name: top_p - use_template: top_p - - name: frequency_penalty - use_template: frequency_penalty -pricing: - input: '1.26' - output: '1.26' - unit: '0.000001' - currency: RMB diff --git a/api/core/model_runtime/model_providers/siliconflow/llm/qwen2.5-72b-instruct.yaml b/api/core/model_runtime/model_providers/siliconflow/llm/qwen2.5-72b-instruct.yaml deleted file mode 100644 index dfbad2494cdf74..00000000000000 --- a/api/core/model_runtime/model_providers/siliconflow/llm/qwen2.5-72b-instruct.yaml +++ /dev/null @@ -1,30 +0,0 @@ -model: Qwen/Qwen2.5-72B-Instruct -label: - en_US: Qwen/Qwen2.5-72B-Instruct -model_type: llm -features: - - agent-thought -model_properties: - mode: chat - context_size: 32768 -parameter_rules: - - name: temperature - use_template: temperature - - name: max_tokens - use_template: max_tokens - type: int - default: 512 - min: 1 - max: 8192 - help: - zh_Hans: 指定生成结果长度的上限。如果生成结果截断,可以调大该参数。 - en_US: Specifies the upper limit on the length of generated results. If the generated results are truncated, you can increase this parameter. - - name: top_p - use_template: top_p - - name: frequency_penalty - use_template: frequency_penalty -pricing: - input: '4.13' - output: '4.13' - unit: '0.000001' - currency: RMB diff --git a/api/core/model_runtime/model_providers/siliconflow/llm/qwen2.5-7b-instruct.yaml b/api/core/model_runtime/model_providers/siliconflow/llm/qwen2.5-7b-instruct.yaml deleted file mode 100644 index cdc8ffc4d21041..00000000000000 --- a/api/core/model_runtime/model_providers/siliconflow/llm/qwen2.5-7b-instruct.yaml +++ /dev/null @@ -1,30 +0,0 @@ -model: Qwen/Qwen2.5-7B-Instruct -label: - en_US: Qwen/Qwen2.5-7B-Instruct -model_type: llm -features: - - agent-thought -model_properties: - mode: chat - context_size: 32768 -parameter_rules: - - name: temperature - use_template: temperature - - name: max_tokens - use_template: max_tokens - type: int - default: 512 - min: 1 - max: 8192 - help: - zh_Hans: 指定生成结果长度的上限。如果生成结果截断,可以调大该参数。 - en_US: Specifies the upper limit on the length of generated results. If the generated results are truncated, you can increase this parameter. - - name: top_p - use_template: top_p - - name: frequency_penalty - use_template: frequency_penalty -pricing: - input: '0' - output: '0' - unit: '0.000001' - currency: RMB diff --git a/api/core/model_runtime/model_providers/siliconflow/llm/yi-1.5-34b-chat.yaml b/api/core/model_runtime/model_providers/siliconflow/llm/yi-1.5-34b-chat.yaml deleted file mode 100644 index 864ba46f1adfdf..00000000000000 --- a/api/core/model_runtime/model_providers/siliconflow/llm/yi-1.5-34b-chat.yaml +++ /dev/null @@ -1,30 +0,0 @@ -model: 01-ai/Yi-1.5-34B-Chat -label: - en_US: 01-ai/Yi-1.5-34B-Chat-16K -model_type: llm -features: - - agent-thought -model_properties: - mode: chat - context_size: 16384 -parameter_rules: - - name: temperature - use_template: temperature - - name: max_tokens - use_template: max_tokens - type: int - default: 512 - min: 1 - max: 4096 - help: - zh_Hans: 指定生成结果长度的上限。如果生成结果截断,可以调大该参数。 - en_US: Specifies the upper limit on the length of generated results. If the generated results are truncated, you can increase this parameter. - - name: top_p - use_template: top_p - - name: frequency_penalty - use_template: frequency_penalty -pricing: - input: '1.26' - output: '1.26' - unit: '0.000001' - currency: RMB diff --git a/api/core/model_runtime/model_providers/siliconflow/llm/yi-1.5-6b-chat.yaml b/api/core/model_runtime/model_providers/siliconflow/llm/yi-1.5-6b-chat.yaml deleted file mode 100644 index fe4c8b4b3e0350..00000000000000 --- a/api/core/model_runtime/model_providers/siliconflow/llm/yi-1.5-6b-chat.yaml +++ /dev/null @@ -1,30 +0,0 @@ -model: 01-ai/Yi-1.5-6B-Chat -label: - en_US: 01-ai/Yi-1.5-6B-Chat -model_type: llm -features: - - agent-thought -model_properties: - mode: chat - context_size: 4096 -parameter_rules: - - name: temperature - use_template: temperature - - name: max_tokens - use_template: max_tokens - type: int - default: 512 - min: 1 - max: 4096 - help: - zh_Hans: 指定生成结果长度的上限。如果生成结果截断,可以调大该参数。 - en_US: Specifies the upper limit on the length of generated results. If the generated results are truncated, you can increase this parameter. - - name: top_p - use_template: top_p - - name: frequency_penalty - use_template: frequency_penalty -pricing: - input: '0' - output: '0' - unit: '0.000001' - currency: RMB diff --git a/api/core/model_runtime/model_providers/siliconflow/llm/yi-1.5-9b-chat.yaml b/api/core/model_runtime/model_providers/siliconflow/llm/yi-1.5-9b-chat.yaml deleted file mode 100644 index c61f0dc53fe6ec..00000000000000 --- a/api/core/model_runtime/model_providers/siliconflow/llm/yi-1.5-9b-chat.yaml +++ /dev/null @@ -1,30 +0,0 @@ -model: 01-ai/Yi-1.5-9B-Chat-16K -label: - en_US: 01-ai/Yi-1.5-9B-Chat-16K -model_type: llm -features: - - agent-thought -model_properties: - mode: chat - context_size: 16384 -parameter_rules: - - name: temperature - use_template: temperature - - name: max_tokens - use_template: max_tokens - type: int - default: 512 - min: 1 - max: 4096 - help: - zh_Hans: 指定生成结果长度的上限。如果生成结果截断,可以调大该参数。 - en_US: Specifies the upper limit on the length of generated results. If the generated results are truncated, you can increase this parameter. - - name: top_p - use_template: top_p - - name: frequency_penalty - use_template: frequency_penalty -pricing: - input: '0' - output: '0' - unit: '0.000001' - currency: RMB diff --git a/api/core/model_runtime/model_providers/siliconflow/rerank/__init__.py b/api/core/model_runtime/model_providers/siliconflow/rerank/__init__.py deleted file mode 100644 index e69de29bb2d1d6..00000000000000 diff --git a/api/core/model_runtime/model_providers/siliconflow/rerank/bce-reranker-base_v1.yaml b/api/core/model_runtime/model_providers/siliconflow/rerank/bce-reranker-base_v1.yaml deleted file mode 100644 index ff3635bfeb1273..00000000000000 --- a/api/core/model_runtime/model_providers/siliconflow/rerank/bce-reranker-base_v1.yaml +++ /dev/null @@ -1,4 +0,0 @@ -model: netease-youdao/bce-reranker-base_v1 -model_type: rerank -model_properties: - context_size: 512 diff --git a/api/core/model_runtime/model_providers/siliconflow/rerank/bge-reranker-v2-m3.yaml b/api/core/model_runtime/model_providers/siliconflow/rerank/bge-reranker-v2-m3.yaml deleted file mode 100644 index 807f531b084892..00000000000000 --- a/api/core/model_runtime/model_providers/siliconflow/rerank/bge-reranker-v2-m3.yaml +++ /dev/null @@ -1,4 +0,0 @@ -model: BAAI/bge-reranker-v2-m3 -model_type: rerank -model_properties: - context_size: 8192 diff --git a/api/core/model_runtime/model_providers/siliconflow/rerank/rerank.py b/api/core/model_runtime/model_providers/siliconflow/rerank/rerank.py deleted file mode 100644 index 58b033d28aa90e..00000000000000 --- a/api/core/model_runtime/model_providers/siliconflow/rerank/rerank.py +++ /dev/null @@ -1,85 +0,0 @@ -from typing import Optional - -import httpx - -from core.model_runtime.entities.rerank_entities import RerankDocument, RerankResult -from core.model_runtime.errors.invoke import ( - InvokeAuthorizationError, - InvokeBadRequestError, - InvokeConnectionError, - InvokeError, - InvokeRateLimitError, - InvokeServerUnavailableError, -) -from core.model_runtime.errors.validate import CredentialsValidateFailedError -from core.model_runtime.model_providers.__base.rerank_model import RerankModel - - -class SiliconflowRerankModel(RerankModel): - def _invoke( - self, - model: str, - credentials: dict, - query: str, - docs: list[str], - score_threshold: Optional[float] = None, - top_n: Optional[int] = None, - user: Optional[str] = None, - ) -> RerankResult: - if len(docs) == 0: - return RerankResult(model=model, docs=[]) - - base_url = credentials.get("base_url", "https://api.siliconflow.cn/v1") - base_url = base_url.removesuffix("/") - try: - response = httpx.post( - base_url + "/rerank", - json={"model": model, "query": query, "documents": docs, "top_n": top_n, "return_documents": True}, - headers={"Authorization": f"Bearer {credentials.get('api_key')}"}, - ) - response.raise_for_status() - results = response.json() - - rerank_documents = [] - for result in results["results"]: - rerank_document = RerankDocument( - index=result["index"], - text=result["document"]["text"], - score=result["relevance_score"], - ) - if score_threshold is None or result["relevance_score"] >= score_threshold: - rerank_documents.append(rerank_document) - - return RerankResult(model=model, docs=rerank_documents) - except httpx.HTTPStatusError as e: - raise InvokeServerUnavailableError(str(e)) - - def validate_credentials(self, model: str, credentials: dict) -> None: - try: - self._invoke( - model=model, - credentials=credentials, - query="What is the capital of the United States?", - docs=[ - "Carson City is the capital city of the American state of Nevada. At the 2010 United States " - "Census, Carson City had a population of 55,274.", - "The Commonwealth of the Northern Mariana Islands is a group of islands in the Pacific Ocean that " - "are a political division controlled by the United States. Its capital is Saipan.", - ], - score_threshold=0.8, - ) - except Exception as ex: - raise CredentialsValidateFailedError(str(ex)) - - @property - def _invoke_error_mapping(self) -> dict[type[InvokeError], list[type[Exception]]]: - """ - Map model invoke error to unified error - """ - return { - InvokeConnectionError: [httpx.ConnectError], - InvokeServerUnavailableError: [httpx.RemoteProtocolError], - InvokeRateLimitError: [], - InvokeAuthorizationError: [httpx.HTTPStatusError], - InvokeBadRequestError: [httpx.RequestError], - } diff --git a/api/core/model_runtime/model_providers/siliconflow/siliconflow.py b/api/core/model_runtime/model_providers/siliconflow/siliconflow.py deleted file mode 100644 index e121ab8c7e4e2f..00000000000000 --- a/api/core/model_runtime/model_providers/siliconflow/siliconflow.py +++ /dev/null @@ -1,26 +0,0 @@ -import logging - -from core.model_runtime.entities.model_entities import ModelType -from core.model_runtime.errors.validate import CredentialsValidateFailedError -from core.model_runtime.model_providers.__base.model_provider import ModelProvider - -logger = logging.getLogger(__name__) - - -class SiliconflowProvider(ModelProvider): - def validate_provider_credentials(self, credentials: dict) -> None: - """ - Validate provider credentials - if validate failed, raise exception - - :param credentials: provider credentials, credentials form defined in `provider_credential_schema`. - """ - try: - model_instance = self.get_model_instance(ModelType.LLM) - - model_instance.validate_credentials(model="deepseek-ai/DeepSeek-V2-Chat", credentials=credentials) - except CredentialsValidateFailedError as ex: - raise ex - except Exception as ex: - logger.exception(f"{self.get_provider_schema().provider} credentials validate failed") - raise ex diff --git a/api/core/model_runtime/model_providers/siliconflow/siliconflow.yaml b/api/core/model_runtime/model_providers/siliconflow/siliconflow.yaml deleted file mode 100644 index c46a891604c480..00000000000000 --- a/api/core/model_runtime/model_providers/siliconflow/siliconflow.yaml +++ /dev/null @@ -1,32 +0,0 @@ -provider: siliconflow -label: - zh_Hans: 硅基流动 - en_US: SiliconFlow -icon_small: - en_US: siliconflow_square.svg -icon_large: - en_US: siliconflow.svg -background: "#ffecff" -help: - title: - en_US: Get your API Key from SiliconFlow - zh_Hans: 从 SiliconFlow 获取 API Key - url: - en_US: https://cloud.siliconflow.cn/account/ak -supported_model_types: - - llm - - text-embedding - - rerank - - speech2text -configurate_methods: - - predefined-model -provider_credential_schema: - credential_form_schemas: - - variable: api_key - label: - en_US: API Key - type: secret-input - required: true - placeholder: - zh_Hans: 在此输入您的 API Key - en_US: Enter your API Key diff --git a/api/core/model_runtime/model_providers/siliconflow/speech2text/__init__.py b/api/core/model_runtime/model_providers/siliconflow/speech2text/__init__.py deleted file mode 100644 index e69de29bb2d1d6..00000000000000 diff --git a/api/core/model_runtime/model_providers/siliconflow/speech2text/sense-voice-small.yaml b/api/core/model_runtime/model_providers/siliconflow/speech2text/sense-voice-small.yaml deleted file mode 100644 index deceaf60f4f017..00000000000000 --- a/api/core/model_runtime/model_providers/siliconflow/speech2text/sense-voice-small.yaml +++ /dev/null @@ -1,5 +0,0 @@ -model: iic/SenseVoiceSmall -model_type: speech2text -model_properties: - file_upload_limit: 1 - supported_file_extensions: mp3,wav diff --git a/api/core/model_runtime/model_providers/siliconflow/speech2text/speech2text.py b/api/core/model_runtime/model_providers/siliconflow/speech2text/speech2text.py deleted file mode 100644 index 8d1932863e09d9..00000000000000 --- a/api/core/model_runtime/model_providers/siliconflow/speech2text/speech2text.py +++ /dev/null @@ -1,30 +0,0 @@ -from typing import IO, Optional - -from core.model_runtime.model_providers.openai_api_compatible.speech2text.speech2text import OAICompatSpeech2TextModel - - -class SiliconflowSpeech2TextModel(OAICompatSpeech2TextModel): - """ - Model class for Siliconflow Speech to text model. - """ - - def _invoke(self, model: str, credentials: dict, file: IO[bytes], user: Optional[str] = None) -> str: - """ - Invoke speech2text model - - :param model: model name - :param credentials: model credentials - :param file: audio file - :param user: unique user id - :return: text for given audio file - """ - self._add_custom_parameters(credentials) - return super()._invoke(model, credentials, file) - - def validate_credentials(self, model: str, credentials: dict) -> None: - self._add_custom_parameters(credentials) - return super().validate_credentials(model, credentials) - - @classmethod - def _add_custom_parameters(cls, credentials: dict) -> None: - credentials["endpoint_url"] = "https://api.siliconflow.cn/v1" diff --git a/api/core/model_runtime/model_providers/siliconflow/text_embedding/bce-embedding-base-v1.yaml b/api/core/model_runtime/model_providers/siliconflow/text_embedding/bce-embedding-base-v1.yaml deleted file mode 100644 index 710fbc04f6ad12..00000000000000 --- a/api/core/model_runtime/model_providers/siliconflow/text_embedding/bce-embedding-base-v1.yaml +++ /dev/null @@ -1,5 +0,0 @@ -model: netease-youdao/bce-embedding-base_v1 -model_type: text-embedding -model_properties: - context_size: 512 - max_chunks: 1 diff --git a/api/core/model_runtime/model_providers/siliconflow/text_embedding/bge-large-en-v1.5.yaml b/api/core/model_runtime/model_providers/siliconflow/text_embedding/bge-large-en-v1.5.yaml deleted file mode 100644 index 84f69b41a08c13..00000000000000 --- a/api/core/model_runtime/model_providers/siliconflow/text_embedding/bge-large-en-v1.5.yaml +++ /dev/null @@ -1,5 +0,0 @@ -model: BAAI/bge-large-en-v1.5 -model_type: text-embedding -model_properties: - context_size: 512 - max_chunks: 1 diff --git a/api/core/model_runtime/model_providers/siliconflow/text_embedding/bge-large-zh-v1.5.yaml b/api/core/model_runtime/model_providers/siliconflow/text_embedding/bge-large-zh-v1.5.yaml deleted file mode 100644 index 5248375d0b507e..00000000000000 --- a/api/core/model_runtime/model_providers/siliconflow/text_embedding/bge-large-zh-v1.5.yaml +++ /dev/null @@ -1,5 +0,0 @@ -model: BAAI/bge-large-zh-v1.5 -model_type: text-embedding -model_properties: - context_size: 512 - max_chunks: 1 diff --git a/api/core/model_runtime/model_providers/siliconflow/text_embedding/bge-m3.yaml b/api/core/model_runtime/model_providers/siliconflow/text_embedding/bge-m3.yaml deleted file mode 100644 index f0b12dd420ab2b..00000000000000 --- a/api/core/model_runtime/model_providers/siliconflow/text_embedding/bge-m3.yaml +++ /dev/null @@ -1,5 +0,0 @@ -model: BAAI/bge-m3 -model_type: text-embedding -model_properties: - context_size: 8192 - max_chunks: 1 diff --git a/api/core/model_runtime/model_providers/spark/__init__.py b/api/core/model_runtime/model_providers/spark/__init__.py deleted file mode 100644 index e69de29bb2d1d6..00000000000000 diff --git a/api/core/model_runtime/model_providers/spark/_assets/icon_l_en.svg b/api/core/model_runtime/model_providers/spark/_assets/icon_l_en.svg deleted file mode 100644 index 521c68cae52a78..00000000000000 --- a/api/core/model_runtime/model_providers/spark/_assets/icon_l_en.svg +++ /dev/null @@ -1,24 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/api/core/model_runtime/model_providers/spark/_assets/icon_l_zh.svg b/api/core/model_runtime/model_providers/spark/_assets/icon_l_zh.svg deleted file mode 100644 index 71d85216aaa691..00000000000000 --- a/api/core/model_runtime/model_providers/spark/_assets/icon_l_zh.svg +++ /dev/null @@ -1,11 +0,0 @@ - - - - - - - - - - - diff --git a/api/core/model_runtime/model_providers/spark/_assets/icon_s_en.svg b/api/core/model_runtime/model_providers/spark/_assets/icon_s_en.svg deleted file mode 100644 index ef0a9131a48e43..00000000000000 --- a/api/core/model_runtime/model_providers/spark/_assets/icon_s_en.svg +++ /dev/null @@ -1,5 +0,0 @@ - - - - - diff --git a/api/core/model_runtime/model_providers/spark/llm/__init__.py b/api/core/model_runtime/model_providers/spark/llm/__init__.py deleted file mode 100644 index e69de29bb2d1d6..00000000000000 diff --git a/api/core/model_runtime/model_providers/spark/llm/_client.py b/api/core/model_runtime/model_providers/spark/llm/_client.py deleted file mode 100644 index 48911f657a52e3..00000000000000 --- a/api/core/model_runtime/model_providers/spark/llm/_client.py +++ /dev/null @@ -1,163 +0,0 @@ -import base64 -import hashlib -import hmac -import json -import queue -import ssl -from datetime import datetime -from time import mktime -from typing import Optional -from urllib.parse import urlencode, urlparse -from wsgiref.handlers import format_date_time - -import websocket - - -class SparkLLMClient: - def __init__(self, model: str, app_id: str, api_key: str, api_secret: str, api_domain: Optional[str] = None): - domain = "spark-api.xf-yun.com" - endpoint = "chat" - if api_domain: - domain = api_domain - - model_api_configs = { - "spark-lite": {"version": "v1.1", "chat_domain": "general"}, - "spark-pro": {"version": "v3.1", "chat_domain": "generalv3"}, - "spark-pro-128k": {"version": "pro-128k", "chat_domain": "pro-128k"}, - "spark-max": {"version": "v3.5", "chat_domain": "generalv3.5"}, - "spark-max-32k": {"version": "max-32k", "chat_domain": "max-32k"}, - "spark-4.0-ultra": {"version": "v4.0", "chat_domain": "4.0Ultra"}, - } - - api_version = model_api_configs[model]["version"] - - self.chat_domain = model_api_configs[model]["chat_domain"] - - if model in ["spark-pro-128k", "spark-max-32k"]: - self.api_base = f"wss://{domain}/{endpoint}/{api_version}" - else: - self.api_base = f"wss://{domain}/{api_version}/{endpoint}" - - self.app_id = app_id - self.ws_url = self.create_url( - urlparse(self.api_base).netloc, urlparse(self.api_base).path, self.api_base, api_key, api_secret - ) - - self.queue = queue.Queue() - self.blocking_message = "" - - def create_url(self, host: str, path: str, api_base: str, api_key: str, api_secret: str) -> str: - # generate timestamp by RFC1123 - now = datetime.now() - date = format_date_time(mktime(now.timetuple())) - - signature_origin = "host: " + host + "\n" - signature_origin += "date: " + date + "\n" - signature_origin += "GET " + path + " HTTP/1.1" - - # encrypt using hmac-sha256 - signature_sha = hmac.new( - api_secret.encode("utf-8"), signature_origin.encode("utf-8"), digestmod=hashlib.sha256 - ).digest() - - signature_sha_base64 = base64.b64encode(signature_sha).decode(encoding="utf-8") - - authorization_origin = ( - f'api_key="{api_key}", algorithm="hmac-sha256", headers="host date request-line",' - f' signature="{signature_sha_base64}"' - ) - - authorization = base64.b64encode(authorization_origin.encode("utf-8")).decode(encoding="utf-8") - - v = {"authorization": authorization, "date": date, "host": host} - # generate url - url = api_base + "?" + urlencode(v) - return url - - def run(self, messages: list, user_id: str, model_kwargs: Optional[dict] = None, streaming: bool = False): - websocket.enableTrace(False) - ws = websocket.WebSocketApp( - self.ws_url, - on_message=self.on_message, - on_error=self.on_error, - on_close=self.on_close, - on_open=self.on_open, - ) - ws.messages = messages - ws.user_id = user_id - ws.model_kwargs = model_kwargs - ws.streaming = streaming - ws.run_forever(sslopt={"cert_reqs": ssl.CERT_NONE}) - - def on_error(self, ws, error): - self.queue.put({"status_code": error.status_code, "error": error.resp_body.decode("utf-8")}) - ws.close() - - def on_close(self, ws, close_status_code, close_reason): - self.queue.put({"done": True}) - - def on_open(self, ws): - self.blocking_message = "" - data = json.dumps(self.gen_params(messages=ws.messages, user_id=ws.user_id, model_kwargs=ws.model_kwargs)) - ws.send(data) - - def on_message(self, ws, message): - data = json.loads(message) - code = data["header"]["code"] - if code != 0: - self.queue.put({"status_code": 400, "error": f"Code: {code}, Error: {data['header']['message']}"}) - ws.close() - else: - choices = data["payload"]["choices"] - status = choices["status"] - content = choices["text"][0]["content"] - if ws.streaming: - self.queue.put({"data": content}) - else: - self.blocking_message += content - - if status == 2: - if not ws.streaming: - self.queue.put({"data": self.blocking_message}) - ws.close() - - def gen_params(self, messages: list, user_id: str, model_kwargs: Optional[dict] = None) -> dict: - data = { - "header": { - "app_id": self.app_id, - # resolve this error message => $.header.uid' length must be less or equal than 32 - "uid": user_id[:32] if user_id else None, - }, - "parameter": {"chat": {"domain": self.chat_domain}}, - "payload": {"message": {"text": messages}}, - } - - if model_kwargs: - data["parameter"]["chat"].update(model_kwargs) - - return data - - def subscribe(self): - while True: - content = self.queue.get() - if "error" in content: - if content["status_code"] == 401: - raise SparkError( - "[Spark] The credentials you provided are incorrect. " - "Please double-check and fill them in again." - ) - elif content["status_code"] == 403: - raise SparkError( - "[Spark] Sorry, the credentials you provided are access denied. " - "Please try again after obtaining the necessary permissions." - ) - else: - raise SparkError(f"[Spark] code: {content['status_code']}, error: {content['error']}") - - if "data" not in content: - break - yield content - - -class SparkError(Exception): - pass diff --git a/api/core/model_runtime/model_providers/spark/llm/_position.yaml b/api/core/model_runtime/model_providers/spark/llm/_position.yaml deleted file mode 100644 index 73f39cb1197b48..00000000000000 --- a/api/core/model_runtime/model_providers/spark/llm/_position.yaml +++ /dev/null @@ -1,11 +0,0 @@ -- spark-max-32k -- spark-4.0-ultra -- spark-max -- spark-pro-128k -- spark-pro -- spark-lite -- spark-4 -- spark-3.5 -- spark-3 -- spark-1.5 -- spark-2 diff --git a/api/core/model_runtime/model_providers/spark/llm/spark-1.5.yaml b/api/core/model_runtime/model_providers/spark/llm/spark-1.5.yaml deleted file mode 100644 index fcd65c24e0f60c..00000000000000 --- a/api/core/model_runtime/model_providers/spark/llm/spark-1.5.yaml +++ /dev/null @@ -1,34 +0,0 @@ -model: spark-1.5 -deprecated: true -label: - en_US: Spark V1.5 -model_type: llm -model_properties: - mode: chat -parameter_rules: - - name: temperature - use_template: temperature - default: 0.5 - help: - zh_Hans: 核采样阈值。用于决定结果随机性,取值越高随机性越强即相同的问题得到的不同答案的可能性越高。 - en_US: Kernel sampling threshold. Used to determine the randomness of the results. The higher the value, the stronger the randomness, that is, the higher the possibility of getting different answers to the same question. - - name: max_tokens - use_template: max_tokens - default: 512 - min: 1 - max: 4096 - help: - zh_Hans: 模型回答的tokens的最大长度。 - en_US: 模型回答的tokens的最大长度。 - - name: top_k - label: - zh_Hans: 取样数量 - en_US: Top k - type: int - default: 4 - min: 1 - max: 6 - help: - zh_Hans: 从 k 个候选中随机选择一个(非等概率)。 - en_US: Randomly select one from k candidates (non-equal probability). - required: false diff --git a/api/core/model_runtime/model_providers/spark/llm/spark-2.yaml b/api/core/model_runtime/model_providers/spark/llm/spark-2.yaml deleted file mode 100644 index 2db6805a2e2af0..00000000000000 --- a/api/core/model_runtime/model_providers/spark/llm/spark-2.yaml +++ /dev/null @@ -1,34 +0,0 @@ -model: spark-2 -deprecated: true -label: - en_US: Spark V2.0 -model_type: llm -model_properties: - mode: chat -parameter_rules: - - name: temperature - use_template: temperature - default: 0.5 - help: - zh_Hans: 核采样阈值。用于决定结果随机性,取值越高随机性越强即相同的问题得到的不同答案的可能性越高。 - en_US: Kernel sampling threshold. Used to determine the randomness of the results. The higher the value, the stronger the randomness, that is, the higher the possibility of getting different answers to the same question. - - name: max_tokens - use_template: max_tokens - default: 2048 - min: 1 - max: 8192 - help: - zh_Hans: 模型回答的tokens的最大长度。 - en_US: 模型回答的tokens的最大长度。 - - name: top_k - label: - zh_Hans: 取样数量 - en_US: Top k - type: int - default: 4 - min: 1 - max: 6 - help: - zh_Hans: 从 k 个候选中随机选择一个(非等概率)。 - en_US: Randomly select one from k candidates (non-equal probability). - required: false diff --git a/api/core/model_runtime/model_providers/spark/llm/spark-3.5.yaml b/api/core/model_runtime/model_providers/spark/llm/spark-3.5.yaml deleted file mode 100644 index 86617a53d0d4fb..00000000000000 --- a/api/core/model_runtime/model_providers/spark/llm/spark-3.5.yaml +++ /dev/null @@ -1,34 +0,0 @@ -model: spark-3.5 -deprecated: true -label: - en_US: Spark V3.5 -model_type: llm -model_properties: - mode: chat -parameter_rules: - - name: temperature - use_template: temperature - default: 0.5 - help: - zh_Hans: 核采样阈值。用于决定结果随机性,取值越高随机性越强即相同的问题得到的不同答案的可能性越高。 - en_US: Kernel sampling threshold. Used to determine the randomness of the results. The higher the value, the stronger the randomness, that is, the higher the possibility of getting different answers to the same question. - - name: max_tokens - use_template: max_tokens - default: 2048 - min: 1 - max: 8192 - help: - zh_Hans: 模型回答的tokens的最大长度。 - en_US: 模型回答的tokens的最大长度。 - - name: top_k - label: - zh_Hans: 取样数量 - en_US: Top k - type: int - default: 4 - min: 1 - max: 6 - help: - zh_Hans: 从 k 个候选中随机选择一个(非等概率)。 - en_US: Randomly select one from k candidates (non-equal probability). - required: false diff --git a/api/core/model_runtime/model_providers/spark/llm/spark-3.yaml b/api/core/model_runtime/model_providers/spark/llm/spark-3.yaml deleted file mode 100644 index 9f296c684db78d..00000000000000 --- a/api/core/model_runtime/model_providers/spark/llm/spark-3.yaml +++ /dev/null @@ -1,34 +0,0 @@ -model: spark-3 -deprecated: true -label: - en_US: Spark V3.0 -model_type: llm -model_properties: - mode: chat -parameter_rules: - - name: temperature - use_template: temperature - default: 0.5 - help: - zh_Hans: 核采样阈值。用于决定结果随机性,取值越高随机性越强即相同的问题得到的不同答案的可能性越高。 - en_US: Kernel sampling threshold. Used to determine the randomness of the results. The higher the value, the stronger the randomness, that is, the higher the possibility of getting different answers to the same question. - - name: max_tokens - use_template: max_tokens - default: 2048 - min: 1 - max: 8192 - help: - zh_Hans: 模型回答的tokens的最大长度。 - en_US: 模型回答的tokens的最大长度。 - - name: top_k - label: - zh_Hans: 取样数量 - en_US: Top k - type: int - default: 4 - min: 1 - max: 6 - help: - zh_Hans: 从 k 个候选中随机选择一个(非等概率)。 - en_US: Randomly select one from k candidates (non-equal probability). - required: false diff --git a/api/core/model_runtime/model_providers/spark/llm/spark-4.0-ultra.yaml b/api/core/model_runtime/model_providers/spark/llm/spark-4.0-ultra.yaml deleted file mode 100644 index bbf85764f1c8c1..00000000000000 --- a/api/core/model_runtime/model_providers/spark/llm/spark-4.0-ultra.yaml +++ /dev/null @@ -1,42 +0,0 @@ -model: spark-4.0-ultra -label: - en_US: Spark 4.0 Ultra -model_type: llm -model_properties: - mode: chat -parameter_rules: - - name: temperature - use_template: temperature - default: 0.5 - help: - zh_Hans: 核采样阈值。用于决定结果随机性,取值越高随机性越强即相同的问题得到的不同答案的可能性越高。 - en_US: Kernel sampling threshold. Used to determine the randomness of the results. The higher the value, the stronger the randomness, that is, the higher the possibility of getting different answers to the same question. - - name: max_tokens - use_template: max_tokens - default: 4096 - min: 1 - max: 8192 - help: - zh_Hans: 模型回答的tokens的最大长度。 - en_US: Maximum length of tokens for the model response. - - name: top_k - label: - zh_Hans: 取样数量 - en_US: Top k - type: int - default: 4 - min: 1 - max: 6 - help: - zh_Hans: 从 k 个候选中随机选择一个(非等概率)。 - en_US: Randomly select one from k candidates (non-equal probability). - required: false - - name: show_ref_label - label: - zh_Hans: 联网检索 - en_US: web search - type: boolean - default: false - help: - zh_Hans: 该参数仅4.0 Ultra版本支持,当设置为true时,如果输入内容触发联网检索插件,会先返回检索信源列表,然后再返回星火回复结果,否则仅返回星火回复结果 - en_US: The parameter is only supported in the 4.0 Ultra version. When set to true, if the input triggers the online search plugin, it will first return a list of search sources and then return the Spark response. Otherwise, it will only return the Spark response. diff --git a/api/core/model_runtime/model_providers/spark/llm/spark-4.yaml b/api/core/model_runtime/model_providers/spark/llm/spark-4.yaml deleted file mode 100644 index 4b5529e81c0602..00000000000000 --- a/api/core/model_runtime/model_providers/spark/llm/spark-4.yaml +++ /dev/null @@ -1,34 +0,0 @@ -model: spark-4 -deprecated: true -label: - en_US: Spark V4.0 -model_type: llm -model_properties: - mode: chat -parameter_rules: - - name: temperature - use_template: temperature - default: 0.5 - help: - zh_Hans: 核采样阈值。用于决定结果随机性,取值越高随机性越强即相同的问题得到的不同答案的可能性越高。 - en_US: Kernel sampling threshold. Used to determine the randomness of the results. The higher the value, the stronger the randomness, that is, the higher the possibility of getting different answers to the same question. - - name: max_tokens - use_template: max_tokens - default: 4096 - min: 1 - max: 8192 - help: - zh_Hans: 模型回答的tokens的最大长度。 - en_US: 模型回答的tokens的最大长度。 - - name: top_k - label: - zh_Hans: 取样数量 - en_US: Top k - type: int - default: 4 - min: 1 - max: 6 - help: - zh_Hans: 从 k 个候选中随机选择一个(非等概率)。 - en_US: Randomly select one from k candidates (non-equal probability). - required: false diff --git a/api/core/model_runtime/model_providers/spark/llm/spark-lite.yaml b/api/core/model_runtime/model_providers/spark/llm/spark-lite.yaml deleted file mode 100644 index 1f6141a816b8e1..00000000000000 --- a/api/core/model_runtime/model_providers/spark/llm/spark-lite.yaml +++ /dev/null @@ -1,33 +0,0 @@ -model: spark-lite -label: - en_US: Spark Lite -model_type: llm -model_properties: - mode: chat -parameter_rules: - - name: temperature - use_template: temperature - default: 0.5 - help: - zh_Hans: 核采样阈值。用于决定结果随机性,取值越高随机性越强即相同的问题得到的不同答案的可能性越高。 - en_US: Kernel sampling threshold. Used to determine the randomness of the results. The higher the value, the stronger the randomness, that is, the higher the possibility of getting different answers to the same question. - - name: max_tokens - use_template: max_tokens - default: 4096 - min: 1 - max: 4096 - help: - zh_Hans: 模型回答的tokens的最大长度。 - en_US: Maximum length of tokens for the model response. - - name: top_k - label: - zh_Hans: 取样数量 - en_US: Top k - type: int - default: 4 - min: 1 - max: 6 - help: - zh_Hans: 从 k 个候选中随机选择一个(非等概率)。 - en_US: Randomly select one from k candidates (non-equal probability). - required: false diff --git a/api/core/model_runtime/model_providers/spark/llm/spark-max-32k.yaml b/api/core/model_runtime/model_providers/spark/llm/spark-max-32k.yaml deleted file mode 100644 index 1a1ab6844c69c5..00000000000000 --- a/api/core/model_runtime/model_providers/spark/llm/spark-max-32k.yaml +++ /dev/null @@ -1,33 +0,0 @@ -model: spark-max-32k -label: - en_US: Spark Max-32K -model_type: llm -model_properties: - mode: chat -parameter_rules: - - name: temperature - use_template: temperature - default: 0.5 - help: - zh_Hans: 核采样阈值。用于决定结果随机性,取值越高随机性越强即相同的问题得到的不同答案的可能性越高。 - en_US: Kernel sampling threshold. Used to determine the randomness of the results. The higher the value, the stronger the randomness, that is, the higher the possibility of getting different answers to the same question. - - name: max_tokens - use_template: max_tokens - default: 4096 - min: 1 - max: 8192 - help: - zh_Hans: 模型回答的tokens的最大长度。 - en_US: Maximum length of tokens for the model response. - - name: top_k - label: - zh_Hans: 取样数量 - en_US: Top k - type: int - default: 4 - min: 1 - max: 6 - help: - zh_Hans: 从 k 个候选中随机选择一个(非等概率)。 - en_US: Randomly select one from k candidates (non-equal probability). - required: false diff --git a/api/core/model_runtime/model_providers/spark/llm/spark-max.yaml b/api/core/model_runtime/model_providers/spark/llm/spark-max.yaml deleted file mode 100644 index 71eb2b86d36ac4..00000000000000 --- a/api/core/model_runtime/model_providers/spark/llm/spark-max.yaml +++ /dev/null @@ -1,33 +0,0 @@ -model: spark-max -label: - en_US: Spark Max -model_type: llm -model_properties: - mode: chat -parameter_rules: - - name: temperature - use_template: temperature - default: 0.5 - help: - zh_Hans: 核采样阈值。用于决定结果随机性,取值越高随机性越强即相同的问题得到的不同答案的可能性越高。 - en_US: Kernel sampling threshold. Used to determine the randomness of the results. The higher the value, the stronger the randomness, that is, the higher the possibility of getting different answers to the same question. - - name: max_tokens - use_template: max_tokens - default: 4096 - min: 1 - max: 8192 - help: - zh_Hans: 模型回答的tokens的最大长度。 - en_US: Maximum length of tokens for the model response. - - name: top_k - label: - zh_Hans: 取样数量 - en_US: Top k - type: int - default: 4 - min: 1 - max: 6 - help: - zh_Hans: 从 k 个候选中随机选择一个(非等概率)。 - en_US: Randomly select one from k candidates (non-equal probability). - required: false diff --git a/api/core/model_runtime/model_providers/spark/llm/spark-pro-128k.yaml b/api/core/model_runtime/model_providers/spark/llm/spark-pro-128k.yaml deleted file mode 100644 index da1fead6da940d..00000000000000 --- a/api/core/model_runtime/model_providers/spark/llm/spark-pro-128k.yaml +++ /dev/null @@ -1,33 +0,0 @@ -model: spark-pro-128k -label: - en_US: Spark Pro-128K -model_type: llm -model_properties: - mode: chat -parameter_rules: - - name: temperature - use_template: temperature - default: 0.5 - help: - zh_Hans: 核采样阈值。用于决定结果随机性,取值越高随机性越强即相同的问题得到的不同答案的可能性越高。 - en_US: Kernel sampling threshold. Used to determine the randomness of the results. The higher the value, the stronger the randomness, that is, the higher the possibility of getting different answers to the same question. - - name: max_tokens - use_template: max_tokens - default: 4096 - min: 1 - max: 4096 - help: - zh_Hans: 模型回答的tokens的最大长度。 - en_US: Maximum length of tokens for the model response. - - name: top_k - label: - zh_Hans: 取样数量 - en_US: Top k - type: int - default: 4 - min: 1 - max: 6 - help: - zh_Hans: 从 k 个候选中随机选择一个(非等概率)。 - en_US: Randomly select one from k candidates (non-equal probability). - required: false diff --git a/api/core/model_runtime/model_providers/spark/llm/spark-pro.yaml b/api/core/model_runtime/model_providers/spark/llm/spark-pro.yaml deleted file mode 100644 index 9ee479f15b0504..00000000000000 --- a/api/core/model_runtime/model_providers/spark/llm/spark-pro.yaml +++ /dev/null @@ -1,33 +0,0 @@ -model: spark-pro -label: - en_US: Spark Pro -model_type: llm -model_properties: - mode: chat -parameter_rules: - - name: temperature - use_template: temperature - default: 0.5 - help: - zh_Hans: 核采样阈值。用于决定结果随机性,取值越高随机性越强即相同的问题得到的不同答案的可能性越高。 - en_US: Kernel sampling threshold. Used to determine the randomness of the results. The higher the value, the stronger the randomness, that is, the higher the possibility of getting different answers to the same question. - - name: max_tokens - use_template: max_tokens - default: 4096 - min: 1 - max: 8192 - help: - zh_Hans: 模型回答的tokens的最大长度。 - en_US: Maximum length of tokens for the model response. - - name: top_k - label: - zh_Hans: 取样数量 - en_US: Top k - type: int - default: 4 - min: 1 - max: 6 - help: - zh_Hans: 从 k 个候选中随机选择一个(非等概率)。 - en_US: Randomly select one from k candidates (non-equal probability). - required: false diff --git a/api/core/model_runtime/model_providers/spark/spark.py b/api/core/model_runtime/model_providers/spark/spark.py deleted file mode 100644 index b3695e0501cd8a..00000000000000 --- a/api/core/model_runtime/model_providers/spark/spark.py +++ /dev/null @@ -1,18 +0,0 @@ -import logging - -from core.model_runtime.model_providers.__base.model_provider import ModelProvider - -logger = logging.getLogger(__name__) - - -class SparkProvider(ModelProvider): - def validate_provider_credentials(self, credentials: dict) -> None: - """ - Validate provider credentials - - if validate failed, raise exception - - :param credentials: provider credentials, credentials form defined in `provider_credential_schema`. - """ - # ignore credentials validation because every model has their own spark quota pool - pass diff --git a/api/core/model_runtime/model_providers/spark/spark.yaml b/api/core/model_runtime/model_providers/spark/spark.yaml deleted file mode 100644 index 3b07b30f2472be..00000000000000 --- a/api/core/model_runtime/model_providers/spark/spark.yaml +++ /dev/null @@ -1,46 +0,0 @@ -provider: spark -label: - zh_Hans: 讯飞星火 - en_US: iFLYTEK SPARK -icon_small: - en_US: icon_s_en.svg -icon_large: - zh_Hans: icon_l_zh.svg - en_US: icon_l_en.svg -background: "#EBF8FF" -help: - title: - en_US: Get your API key from iFLYTEK SPARK - zh_Hans: 从讯飞星火获取 API Keys - url: - en_US: https://www.xfyun.cn/solutions/xinghuoAPI -supported_model_types: - - llm -configurate_methods: - - predefined-model -provider_credential_schema: - credential_form_schemas: - - variable: app_id - label: - en_US: APPID - type: text-input - required: true - placeholder: - zh_Hans: 在此输入您的 APPID - en_US: Enter your APPID - - variable: api_secret - label: - en_US: APISecret - type: secret-input - required: true - placeholder: - zh_Hans: 在此输入您的 APISecret - en_US: Enter your APISecret - - variable: api_key - label: - en_US: APIKey - type: secret-input - required: true - placeholder: - zh_Hans: 在此输入您的 APIKey - en_US: Enter your APIKey diff --git a/api/core/model_runtime/model_providers/stepfun/__init__.py b/api/core/model_runtime/model_providers/stepfun/__init__.py deleted file mode 100644 index e69de29bb2d1d6..00000000000000 diff --git a/api/core/model_runtime/model_providers/stepfun/_assets/icon_l_en.png b/api/core/model_runtime/model_providers/stepfun/_assets/icon_l_en.png deleted file mode 100644 index c118ea09bd8a84d7d43f03f6f5bc15c9a0c67e95..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 9176 zcmd^l`9IX}`|k+Zq9T-uWG%8YBC;=oLAGd$EW=E^?7Ol?A<7y}H1_P&7=)CvFWHwF z`@T)G@8*p6`}6*M&i8T7=llcb^~20-xvuAZEzkR!`+i*$X{fKoNY6nJ0)ZHz+UiCi z5G6o9Z=t0mf9kRB-zI-u@zgN)GUY!KW~z}f*}gs`^rb8AH?khN~0 zOw2vaVS4hmE`X@@sf?&E;7Zm8ffST{U9D{$5uV&O2nUq2BF|b)Jr6g^PLb!n3``v6 zs)j(KwEf)?#{T*yw*HQ`a&|mQcexdO<;eg5!qb}D7jSa+koQ&O`3qN`e13XcjEDO# z5l=@&p1+MUhZ%CKxws>^);^|=x%2(Z=|mIui?m7iabb9Pgi*{F&`fvQ6DK$7k39S2{}1AF>y&T zNl6j1gouZqv!}JMh_eUpKM3jw4_kMXt0&6Enfnyc+Q!ApQ;~;k?B8AhuK$L0_V`z% z$T1W1wRRPg5EVc5>K{QE?Efta0RNWu@H9gF$KL-du!o7CD?-c&;o;)tZcEOcJ@4sI zuJUT`2y0IlcM}&Er+)%zh;;FE@j$w`a!X2!igVw9S=*wVPw(9PO9cj#hdO(BT07e! zpz4Y|WDrpl%1&NFT|!MlT24wzO&lU2p`ocFqb?&SBP)4ZUFP;}Nfp(9bk$vKy#R!> z=Rdl3|IwBHuezs%09?r?>>#9K=jah6si&m3MST`8M*ggC=!7kT0nz||I=g!O=;%nmo5bZx zO8PXPUEUgyx!Bn^hHIK_>z}P@nXpdyy|l8r_Gd%ZX9-)NJsYNluB7he@je`&(Moh30lkcz(apS2>*F8;BJB|IM?F`-C0 z0Rw8G8$Y_H3%|{)gdYieZe@P%JvcZVn_9AqpQ&r?9~@t8>Y0T`Y~#o0TUK{hR)164 zkfvvsjG|Tp(EUC56%psTn@)?NS)+qvi@SUKJG*-d&(_b{k!Y<+VL8*wtLuJfLTq+B zr|lf*0ZH0#Yh!a;KXSt&cC~kCg2Q2p--86QBbi3d#pI8=CH4hk#(WYxKnA4IiMi={ z;*+GQ#`dA{+21xvekA$CGYU*8-3p|*Z znrE~j4Nnq-KMW*&UJ1?r9g#mbIkOa-*H8aw8Jec43vy>WK0&DL8l8DP>;MZy>K~@RHs3^uijYC8_k=Sl95PWzrrkiuAFfYh-ll z%tI2N`$2W{RC?v?2kgkm#=&{SA;^*hf|EdxNT7!#kQGV)S)aJeOk{e~4dmFb;RQzZ6x@ul=Ag()$4A-h!UWUBW%W(J=v)y{F7bfh*Lsmk;|_D52U?xyk_s3 za9K?0rot3@b-{e*JmUqDz5`p@I?FrwyLj$}+d@V=)B&1|p8rmCg$vr4Lz3cXRSD(Nh@uSrJ?;P%%|wmbha&UPTJfn$UqL zXbvPga~^+u^o!~|kS^@d_&Ddm)vU?DJiurK_pDU$EwN!>1;-WVWH5Snh~&ti(%a$G zh|2t=Z55>`rHs2bu6AJTLgMTm%q=ljFuxK<*EdFU<(#rscTT{2@8$@f%BwInYY}eXP78>6R`WFCH{#y0D@Q{s1v+Oo)v8J)Qn4p3Yc3 zwOMBj3!2__s;csxJ<-s8jAqn-3tl``ql_alzvFwg7a-_JhXuW8h1&kf)K5 zZ~Q1+oFAr`SY351>FWzxQp7%;@uE?=aPG(CW!L54kKpls&c$4vQ`APm^>O2LYj5a`J*?}vgot@1f-_K)n zs?6znww><981fHCWqxC9og1gU4l2C z{()uVWtLZu_te;EM1e$rj!cUEMBKwv_&~-)-Pgcl;gw%mRTm#oCPM_}=*e^7DHMGC zZZ?gHQT=-XbfGsYe%GdOEP0LcE$;i@{trjT8!r*>@w5!UKD@qu@8JT?EkK{}@YK)i z05qQeP{^Nm{-krB;cd4->FPjmPVp`3Q(P^u=?$+B1PgP4N}Qj5W>u;)6&G-+@l!NU z?yp`|aO<+D(H+3DC8p?`fzdmNW|*9*rPAI_a_I-`9b}#99@wMjREY zRng1M^iwWtFhibDdbb5@^Dkr$kaMkmJlekICP5-{S0p@YH^z@@UT?q4 z3!I3}Km{-(rNn_W+H7LNAJb2UnVWGUI?eWmkS2dsG}fa&4?Cci{PL za?FW|TqihC;EvL7>}Jv9w$`FgUN6&*)-i0Fl?05w#a8oG$}>khuYXRGvul2pa>Qc* zqbFi!RftE6|;@zK2^jhfp z-#UX2eS?$!*_XoVF{u>Ah;Qp{ytb7~y}3Td%7HaSZ@>LPho!y_yr#b(*jm@HrB5fR!WqhvrdEYowPLs>f)XB_)-?PBnGdAQ z4r`TVYJppjP!X$xdpoKfVUFRfYfh4IgnVq8pD4K3X3`D2X4*I(tMmt>GuhNDD~`M4PuX;k7etigjLN5 zw5xhKs4AB_ix63}yKF))TW?cf_ah+Z;kdjIve~)_Vl4gXrU0?<1Bzv z@`qAUao%lXSAongysK+dws$M5s5fHKR?Ak%FRLz;_;VTrr$82UZ|CEq`=Mc3!=IPA z{9_Cw(Sj0{M=%12oCXhF_$_AbXyPzE8Ut?Czkk^8;MhO46X2P8+&DEYZ^cLs0Rhx|ABL-VCT^@oUPXr{y*);1dO*qB z+B*#sQe>oqmL%6x_+KjBOQTZynKaGcD_$IZfa&zXZp)8O(8wQ%DWlKeHF-H>LS+~~YU z7f-%XY%q>$AU5(fQC$OAbmOBv&Bl3OFk$x}Dx;-=ma-W6UU4V1i4bx`Zwh~{&L9|k zoHvwo7{?`CPr9N4brjH3-Eh-J*r!;ewu&b1J?*y1Qo40#P=J&1Xvh8<>iBnAueM{ms&bvo%@?be|M(f* zvZFFv5oaB3KNPjF9Gu|6jyx%DrJwD}=4T>j)>w^jhZg9kw8XR&jA0Yf#+MNT=?)AB zYjf8IqmjJNNBVm&?2P`Dk1YH|r-coAs7@Av1RffIvyeRel(n@C=&eN*tbynP-L z+lJ#KBlhWozra0>JVz`-;x6?zff>Gb>--tcDKUPq(G`U4yS}Bi)a;+(o={Igu7uA} z49#67k(Ld*EaORiNf6PUiRp^jXCLq2Yp3S)Pfk~kTR9^8x~skujXE5ci?OIn-Sc#S zY3tW+A2UqnF}*VNAcNE#CGcGXKB0K=ZuhrqXMo=*LVO6sdCL_8y9o%)>(z;hM}@D2 z8x}Ay{>neBfZ!j?=DQ(h>{H8|fIHo}uW++>_!)!vmF^b+Ez#Y_Yd2u#y<6D|7?Ez& zOjmCdhb*SkFVZ*tB(M#Cd%Ml>pE0Pe5 zdy^fbcK#qI7f&uBTxC-S6@Xx>vdMRjDe2YdF}xHFnPSCV5ZU8+v{}wnS*r;Z;6gd-*$YA ze^Q@%vrv}w3P2zJsdxA?AUn~HKwUBl@4R=qP)UJ!K8U}1#T;85b&u3yy31zUs z{2ch$sMmO)!f+_6#9^?ONO;4iqxmu(+xTq5uCB>@_8LTeEqyop>z9PwM4Zya824~- z;Gw8d){D6-$&ibiL96p6?9|V0dcSfQT-83y1ay9BkUg_T_276e0wC*6W!FCkBZ{B41UlW{V6lBe-Z z!(r9!CUKH4ku}#PTWrqz+H}ZS=>JXoq10rtm~S~ic?)+m2dsfheZiiJwRngh@H#Sc^rfOq7+ znt#Q-&|senCp4=k**PncJw}D_y4LJQ5j#@t7p~Tj`4)EjjXMkW{osS{AR(1VEx-wYKJXNs$yi_k{hioV@d`_bUdX zp7M>0i~-$3;q-@5R=kV9qgZgu(Yu@HR0&J0z?y0-C0^kjKlA~@1kc>m%-CS}HubJv zl7uZbm&Kw{A0{c;T@JXZHu3mbNqC5u=0t zGM3Hi&cacIYa$A^D89%E=K>fElCF<27L0P;z>b_OY|5HG@u)q)J}RDa5NZ(80uyAF zkt^<0`K(C-in;s_ygeExP2pDo+2rZX+Sz4ga<&u}D64Y-Nu^4??8$={wep&^EZkUA z3=OA?d?c4dLrMSRRy12@k49)1PTxZH)=Rn&RX96uH`|MIuG-(@`QNq&O_Au&;itQE zV-&hrdHHoxner=V^`AfXe)_V<41NnQM|vGZg-`5@h{)j)n;zQSbMPpur#*-7&h1Ra z27_Y)-K~rLb-z9zZIHZ)Me2&NBBmRz1w-brrWvst7KL_N%0FKPV*sO@%Y_vgC!d|L z+MSA~aCgceSjf_kaAp9-%NF^?m&Io$Look6F1F=7#Q27`(xQA$Z4>x8?jn-n!W3hD zCvrwpt3bwXLn6>Qk)-jf?K~AevqL)o zw&r{!UBfJLr(!*LTvgz~s3teP0RF(#?pQ5MIB0PYd7u>Gh~Z3l{f#|??0TCzt4hzF^l}AHv`f;UrdEo&FxeIN35nF zar~a!qw5!TJINFgQp1W$;SzYP&f>FC1a!=O=JH&kxn=R+v8!RWvy!?v0m4aUngoKBGv=n=ygqI1FbVMHf8tTTCs{*6l0%(SFs{5?WiO4m`eIH?p=!0$4P?7%neKj+Ui=+)xkvT0Jc0o0WnP6Ls{9V7Isot#;$ z_7`HH`tK8}9SpnKlCCc+s={-%^H5ntFPD_-7f=NDRPF%@gJ{F6-o0Vj2ifg@t)U+Ba=!OMAJTtnK^+ z&gKDU=C_ocFFefwn?bf_+9Z9uN42x?pv%^u%29-9*k_|;zXq{68+1{fTEA621STJyZ?oUh)UNSMq_S#L({>UPVwaZAFE z1VCUw5SHIXpVGWY_vTZIx!8X7tIQMw?tS&xikbPQhbxBZ&xDB)+<2u?mZ z`{R5#YL!X$&u~lp`ggX;O<>jT{3_V^-LhE^a$GXY*tFG*YmFD*0I7O(WMhAy0uVx~ zsh;%a#>PW?#NE7>PI5Y-JoR79sRr?imsLH>x3(abcYoRtcgEa#x{9j;wTouY7fkm< zA-B=7*lm0k=Dyz1Wu)?GU>A!4Ot#Mc~hIgE(mF!&r4;&aVALh$C_Dk$X>Lk2a0Z zdd6|8usiG4b<400xLP{S3K8v6A{R=Uzt`ge0@_fHLf0ZH7$Xy4kJ0F_Z`@Zb+7BR-Mx86-@R{SOyut0f=Fl^R(0o~Nt zT3W*m11rXFvI1Rgn^Y<_oGP;$Y$+^%uFY!8GOQ2%!IkxzFt=eFuZgg%Xh_C-ZHK44 zcIo07q9B8`yYbukc*zpeRJ9ML{8J%$@qNWNbyp6&W3Q~OB+l0aOuE&|6k#>OMS;3Z zjlx@g_4$n-LcuHUVi=eNfU0q>`QWJb4x(dWU(uU<6nelv{Uy1CxJD&x$+X*-H#kzI z%b2w1{Ib`qqW6}(qN<+D&8;c(`;&vm;-`yeo#$tF?S0(FGl^S+VKuUr-H(svIqCLu zd`+(>nk&sHXwNdtBK~|}hGkDJyPW305%$(-5Wv91@S1`cDdx>u zEqE+>$NkRBoTtX%V|~!O3lM^RPVN%ljXWJ=Ut~|RlxZz{wbtVo1H;|L^5qQOx#xj( z-MDe;n%GS)pdwZ1Anp=0z5MBMPen@Zg{7U5P#HN7u{oTau&a+li4Ws-d;ycZI$)ru zblq34$LQ%IgcI5K0(VhcCi~JgiQ;gGV}vSR8`pJ7qI&HeKLDi`H5YnTmd>1=_i#)# z0;e{TIYy7?`+{e&b$57zfw2JhyDyJFpQitaqQ->X@wj&hR(8mHc8x=kts0R;>qIkP zq2)^>=r+p1f%TG!q}ltGyco6HI3hg;!Ykwft(sfvxp)06bBaqxzr|Q&^={$txo>Yu);F(oA?x* zF7mvhN_BII0A7c$LqkG(>hkTa#>8D>rfvIv=sk8hbFa^dY&S;>^z2_A)n-3~kHQO? zT+|!f6%~s1^&3eGSAZ_7`q84zOh#~>f)?J)!_C2YgBR0jj4yNjF?+A82HP058fcHi zJ$^Rz^0fm#$B32g(csc6-|d}?)vuAi_Mb=gh(4{X@N<;H8lTcEXLoKg4!V0Uptt>YX&)p4xSU;X*ZZSu#y`e1 zaXErJSuAVpg;J{0n5xCkI(~zcg0OdxwjRZix_q0g^%T(`ArK;#Som zsJB_x;CEGbZ-^%oxGeIwm5q(s0D+GLNK|dMXGcw}GW;hy`x$_%BYt$|_+??taju_i ze)7)CfxfgV7h^bdXTGZ2k&vP{$8~#ftTu;or-e0zf8B}tvDL~dczcd+rZ<@#x;*en zX3-|hfqJuyf!!Yfqb0P$GxRGP?RmV-WO#g`H=f0l3lB*aPfh$-MbUXfltYHeBe|* z)W8=>Rj>{L)~G-L)vxy0J*%8nCeG^8?<`;2zdlohpV%udgjF75oQBVI$zn%nd9{BU zNcLFkKRM8qw8A|Nv8s2kK8aT|K~!_Z49Te8N)}s73`k)2^n0<6T5&5fK*!!(yIS(@ z2X*j9gwyUoLa*Sfjd}svAxv?_2ZJYl3lmdAmHk$Mjv-a695MI!@kCS(u}00sS7m|_ zV*sX|F!@hEi`*&utGg8Pci)V>`1yV^iZZ?vD3M!(g z9aA4|@c~t-Xr+LIFs*jVIHfk!_j77|j*lXWU{tVH@N|PfTL=5McV_qAd*=JT^WA&S z?3Se@O%0-l(h&p+QY9!f)a>#;0lw6Gb7^J`HH{^e>0~ONP3CI}43Qad9fqn*+8j)S zX$_7At=MD)@nsp)(#dpnqC}6II9e};V>ekSHiAr!v0JqI`51}nupFaV%IrGW%tVa_ zDRX9|ny0qNv0P(90fD6!B&F#K=Ig}%(P9oiny(i`XKQsIq5_^kz~za!{3y0SAc=^S z2>Iy1U{VPQL$*Yth#yEt?WD|HlC(&;-2D7}PJRRjCvv!au~^LI3Ah3Qn?kUy4l}8> zv(479K?Mb7)e}YwX~fN_S5d3OZKRY*nSFi=lVwQOY#q2M>dv@!t%b|y@VuuQ1gh2l z4KaUo`iCvX$me`<+w7%$P0 zaS`!6xrove4)5hT*YHv=BIQfHa49ofZ*Bik>%kD!K;Gq0^HQ6k_Q%Xr&l6O0?|oDr zgdjc(QWDdY$ZUAp3-9ECa1r1;K)n{)J@DNrKye712e&%GF9SZX!sT{2aSfzP;l@4C zZw8+^u>UyJAA|4(;GYFC%VFATV3DwTKa|$PdjyD}-QD4CPdyVmrr7hS zo|WL&xasrLGj8v(`_Ng}*(-_N*OvNETt!=5%)FC@w_hg@A4+sz5IWAZJ!u**XvGEP zmks*fo`hEzXlya!LsNtrgqqGKyQ^^LC8fu?flkl^7{}Nw*L08b2>rnf)Q~$lrG0Lk zEHnA?#80!QhRbTl0-hLBb!J3}FRiMl^UK(f^=NV;8ZkY#!q#DXyj1V(Y<+#xt|FE! z%OUi9Pj~&;w{}|R35QKw`AsDwcyHL2mb%7b#K>~h&P4ZSF5TKguNvu!ac;}|ssdZH zP?kBS``)sQ6RzzNxf?3}s0gk8S$?27xaEfD`e+ZM!6q4SJ|f`Xi&1{FNCpw+-oe(l z%3KdCkGTuFf0Mbo3vjO6FOcR#dyy~@sgaeQ!@Ae3%u^oe3kjZ375BAftjD?Bm)4TO z$U0oz(D3$aGWuug=HsWM$1S@_0)yYh2qzU0;Q=F7NuA=l*z=c0&T?$P8|QZUKaM-E zJlgRlx@7!wZ%qFOCoek_I>vf9$z6DUea Union[LLMResult, Generator]: - self._add_custom_parameters(credentials) - self._add_function_call(model, credentials) - user = user[:32] if user else None - return super()._invoke(model, credentials, prompt_messages, model_parameters, tools, stop, stream, user) - - def validate_credentials(self, model: str, credentials: dict) -> None: - self._add_custom_parameters(credentials) - super().validate_credentials(model, credentials) - - def get_customizable_model_schema(self, model: str, credentials: dict) -> AIModelEntity | None: - return AIModelEntity( - model=model, - label=I18nObject(en_US=model, zh_Hans=model), - model_type=ModelType.LLM, - features=[ModelFeature.TOOL_CALL, ModelFeature.MULTI_TOOL_CALL, ModelFeature.STREAM_TOOL_CALL] - if credentials.get("function_calling_type") == "tool_call" - else [], - fetch_from=FetchFrom.CUSTOMIZABLE_MODEL, - model_properties={ - ModelPropertyKey.CONTEXT_SIZE: int(credentials.get("context_size", 8000)), - ModelPropertyKey.MODE: LLMMode.CHAT.value, - }, - parameter_rules=[ - ParameterRule( - name="temperature", - use_template="temperature", - label=I18nObject(en_US="Temperature", zh_Hans="温度"), - type=ParameterType.FLOAT, - ), - ParameterRule( - name="max_tokens", - use_template="max_tokens", - default=512, - min=1, - max=int(credentials.get("max_tokens", 1024)), - label=I18nObject(en_US="Max Tokens", zh_Hans="最大标记"), - type=ParameterType.INT, - ), - ParameterRule( - name="top_p", - use_template="top_p", - label=I18nObject(en_US="Top P", zh_Hans="Top P"), - type=ParameterType.FLOAT, - ), - ], - ) - - def _add_custom_parameters(self, credentials: dict) -> None: - credentials["mode"] = "chat" - credentials["endpoint_url"] = "https://api.stepfun.com/v1" - - def _add_function_call(self, model: str, credentials: dict) -> None: - model_schema = self.get_model_schema(model, credentials) - if model_schema and {ModelFeature.TOOL_CALL, ModelFeature.MULTI_TOOL_CALL}.intersection( - model_schema.features or [] - ): - credentials["function_calling_type"] = "tool_call" - - def _convert_prompt_message_to_dict(self, message: PromptMessage, credentials: Optional[dict] = None) -> dict: - """ - Convert PromptMessage to dict for OpenAI API format - """ - if isinstance(message, UserPromptMessage): - message = cast(UserPromptMessage, message) - if isinstance(message.content, str): - message_dict = {"role": "user", "content": message.content} - else: - sub_messages = [] - for message_content in message.content: - if message_content.type == PromptMessageContentType.TEXT: - message_content = cast(PromptMessageContent, message_content) - sub_message_dict = {"type": "text", "text": message_content.data} - sub_messages.append(sub_message_dict) - elif message_content.type == PromptMessageContentType.IMAGE: - message_content = cast(ImagePromptMessageContent, message_content) - sub_message_dict = { - "type": "image_url", - "image_url": { - "url": message_content.data, - }, - } - sub_messages.append(sub_message_dict) - message_dict = {"role": "user", "content": sub_messages} - elif isinstance(message, AssistantPromptMessage): - message = cast(AssistantPromptMessage, message) - message_dict = {"role": "assistant", "content": message.content} - if message.tool_calls: - message_dict["tool_calls"] = [] - for function_call in message.tool_calls: - message_dict["tool_calls"].append( - { - "id": function_call.id, - "type": function_call.type, - "function": { - "name": function_call.function.name, - "arguments": function_call.function.arguments, - }, - } - ) - elif isinstance(message, ToolPromptMessage): - message = cast(ToolPromptMessage, message) - message_dict = {"role": "tool", "content": message.content, "tool_call_id": message.tool_call_id} - elif isinstance(message, SystemPromptMessage): - message = cast(SystemPromptMessage, message) - message_dict = {"role": "system", "content": message.content} - else: - raise ValueError(f"Got unknown type {message}") - - if message.name: - message_dict["name"] = message.name - - return message_dict - - def _extract_response_tool_calls(self, response_tool_calls: list[dict]) -> list[AssistantPromptMessage.ToolCall]: - """ - Extract tool calls from response - - :param response_tool_calls: response tool calls - :return: list of tool calls - """ - tool_calls = [] - if response_tool_calls: - for response_tool_call in response_tool_calls: - function = AssistantPromptMessage.ToolCall.ToolCallFunction( - name=response_tool_call["function"]["name"] - if response_tool_call.get("function", {}).get("name") - else "", - arguments=response_tool_call["function"]["arguments"] - if response_tool_call.get("function", {}).get("arguments") - else "", - ) - - tool_call = AssistantPromptMessage.ToolCall( - id=response_tool_call["id"] if response_tool_call.get("id") else "", - type=response_tool_call["type"] if response_tool_call.get("type") else "", - function=function, - ) - tool_calls.append(tool_call) - - return tool_calls - - def _handle_generate_stream_response( - self, model: str, credentials: dict, response: requests.Response, prompt_messages: list[PromptMessage] - ) -> Generator: - """ - Handle llm stream response - - :param model: model name - :param credentials: model credentials - :param response: streamed response - :param prompt_messages: prompt messages - :return: llm response chunk generator - """ - full_assistant_content = "" - chunk_index = 0 - - def create_final_llm_result_chunk( - index: int, message: AssistantPromptMessage, finish_reason: str - ) -> LLMResultChunk: - # calculate num tokens - prompt_tokens = self._num_tokens_from_string(model, prompt_messages[0].content) - completion_tokens = self._num_tokens_from_string(model, full_assistant_content) - - # transform usage - usage = self._calc_response_usage(model, credentials, prompt_tokens, completion_tokens) - - return LLMResultChunk( - model=model, - prompt_messages=prompt_messages, - delta=LLMResultChunkDelta(index=index, message=message, finish_reason=finish_reason, usage=usage), - ) - - tools_calls: list[AssistantPromptMessage.ToolCall] = [] - finish_reason = "Unknown" - - def increase_tool_call(new_tool_calls: list[AssistantPromptMessage.ToolCall]): - def get_tool_call(tool_name: str): - if not tool_name: - return tools_calls[-1] - - tool_call = next((tool_call for tool_call in tools_calls if tool_call.function.name == tool_name), None) - if tool_call is None: - tool_call = AssistantPromptMessage.ToolCall( - id="", - type="", - function=AssistantPromptMessage.ToolCall.ToolCallFunction(name=tool_name, arguments=""), - ) - tools_calls.append(tool_call) - - return tool_call - - for new_tool_call in new_tool_calls: - # get tool call - tool_call = get_tool_call(new_tool_call.function.name) - # update tool call - if new_tool_call.id: - tool_call.id = new_tool_call.id - if new_tool_call.type: - tool_call.type = new_tool_call.type - if new_tool_call.function.name: - tool_call.function.name = new_tool_call.function.name - if new_tool_call.function.arguments: - tool_call.function.arguments += new_tool_call.function.arguments - - for chunk in response.iter_lines(decode_unicode=True, delimiter="\n\n"): - if chunk: - # ignore sse comments - if chunk.startswith(":"): - continue - decoded_chunk = chunk.strip().lstrip("data: ").lstrip() - chunk_json = None - try: - chunk_json = json.loads(decoded_chunk) - # stream ended - except json.JSONDecodeError as e: - yield create_final_llm_result_chunk( - index=chunk_index + 1, - message=AssistantPromptMessage(content=""), - finish_reason="Non-JSON encountered.", - ) - break - if not chunk_json or len(chunk_json["choices"]) == 0: - continue - - choice = chunk_json["choices"][0] - finish_reason = chunk_json["choices"][0].get("finish_reason") - chunk_index += 1 - - if "delta" in choice: - delta = choice["delta"] - delta_content = delta.get("content") - - assistant_message_tool_calls = delta.get("tool_calls", None) - # assistant_message_function_call = delta.delta.function_call - - # extract tool calls from response - if assistant_message_tool_calls: - tool_calls = self._extract_response_tool_calls(assistant_message_tool_calls) - increase_tool_call(tool_calls) - - if delta_content is None or delta_content == "": - continue - - # transform assistant message to prompt message - assistant_prompt_message = AssistantPromptMessage( - content=delta_content, tool_calls=tool_calls if assistant_message_tool_calls else [] - ) - - full_assistant_content += delta_content - elif "text" in choice: - choice_text = choice.get("text", "") - if choice_text == "": - continue - - # transform assistant message to prompt message - assistant_prompt_message = AssistantPromptMessage(content=choice_text) - full_assistant_content += choice_text - else: - continue - - # check payload indicator for completion - yield LLMResultChunk( - model=model, - prompt_messages=prompt_messages, - delta=LLMResultChunkDelta( - index=chunk_index, - message=assistant_prompt_message, - ), - ) - - chunk_index += 1 - - if tools_calls: - yield LLMResultChunk( - model=model, - prompt_messages=prompt_messages, - delta=LLMResultChunkDelta( - index=chunk_index, - message=AssistantPromptMessage(tool_calls=tools_calls, content=""), - ), - ) - - yield create_final_llm_result_chunk( - index=chunk_index, message=AssistantPromptMessage(content=""), finish_reason=finish_reason - ) diff --git a/api/core/model_runtime/model_providers/stepfun/llm/step-1-128k.yaml b/api/core/model_runtime/model_providers/stepfun/llm/step-1-128k.yaml deleted file mode 100644 index 13f7b7fd264aa8..00000000000000 --- a/api/core/model_runtime/model_providers/stepfun/llm/step-1-128k.yaml +++ /dev/null @@ -1,25 +0,0 @@ -model: step-1-128k -label: - zh_Hans: step-1-128k - en_US: step-1-128k -model_type: llm -features: - - agent-thought -model_properties: - mode: chat - context_size: 128000 -parameter_rules: - - name: temperature - use_template: temperature - - name: top_p - use_template: top_p - - name: max_tokens - use_template: max_tokens - default: 1024 - min: 1 - max: 128000 -pricing: - input: '0.04' - output: '0.20' - unit: '0.001' - currency: RMB diff --git a/api/core/model_runtime/model_providers/stepfun/llm/step-1-256k.yaml b/api/core/model_runtime/model_providers/stepfun/llm/step-1-256k.yaml deleted file mode 100644 index f80ec9851c451d..00000000000000 --- a/api/core/model_runtime/model_providers/stepfun/llm/step-1-256k.yaml +++ /dev/null @@ -1,25 +0,0 @@ -model: step-1-256k -label: - zh_Hans: step-1-256k - en_US: step-1-256k -model_type: llm -features: - - agent-thought -model_properties: - mode: chat - context_size: 256000 -parameter_rules: - - name: temperature - use_template: temperature - - name: top_p - use_template: top_p - - name: max_tokens - use_template: max_tokens - default: 1024 - min: 1 - max: 256000 -pricing: - input: '0.095' - output: '0.300' - unit: '0.001' - currency: RMB diff --git a/api/core/model_runtime/model_providers/stepfun/llm/step-1-32k.yaml b/api/core/model_runtime/model_providers/stepfun/llm/step-1-32k.yaml deleted file mode 100644 index 96132d14a8251f..00000000000000 --- a/api/core/model_runtime/model_providers/stepfun/llm/step-1-32k.yaml +++ /dev/null @@ -1,28 +0,0 @@ -model: step-1-32k -label: - zh_Hans: step-1-32k - en_US: step-1-32k -model_type: llm -features: - - agent-thought - - tool-call - - multi-tool-call - - stream-tool-call -model_properties: - mode: chat - context_size: 32000 -parameter_rules: - - name: temperature - use_template: temperature - - name: top_p - use_template: top_p - - name: max_tokens - use_template: max_tokens - default: 1024 - min: 1 - max: 32000 -pricing: - input: '0.015' - output: '0.070' - unit: '0.001' - currency: RMB diff --git a/api/core/model_runtime/model_providers/stepfun/llm/step-1-8k.yaml b/api/core/model_runtime/model_providers/stepfun/llm/step-1-8k.yaml deleted file mode 100644 index 4a4ba8d17825d3..00000000000000 --- a/api/core/model_runtime/model_providers/stepfun/llm/step-1-8k.yaml +++ /dev/null @@ -1,28 +0,0 @@ -model: step-1-8k -label: - zh_Hans: step-1-8k - en_US: step-1-8k -model_type: llm -features: - - agent-thought - - tool-call - - multi-tool-call - - stream-tool-call -model_properties: - mode: chat - context_size: 8000 -parameter_rules: - - name: temperature - use_template: temperature - - name: top_p - use_template: top_p - - name: max_tokens - use_template: max_tokens - default: 512 - min: 1 - max: 8000 -pricing: - input: '0.005' - output: '0.020' - unit: '0.001' - currency: RMB diff --git a/api/core/model_runtime/model_providers/stepfun/llm/step-1-flash.yaml b/api/core/model_runtime/model_providers/stepfun/llm/step-1-flash.yaml deleted file mode 100644 index afb880f2a40bbc..00000000000000 --- a/api/core/model_runtime/model_providers/stepfun/llm/step-1-flash.yaml +++ /dev/null @@ -1,25 +0,0 @@ -model: step-1-flash -label: - zh_Hans: step-1-flash - en_US: step-1-flash -model_type: llm -features: - - agent-thought -model_properties: - mode: chat - context_size: 8000 -parameter_rules: - - name: temperature - use_template: temperature - - name: top_p - use_template: top_p - - name: max_tokens - use_template: max_tokens - default: 512 - min: 1 - max: 8000 -pricing: - input: '0.001' - output: '0.004' - unit: '0.001' - currency: RMB diff --git a/api/core/model_runtime/model_providers/stepfun/llm/step-1v-32k.yaml b/api/core/model_runtime/model_providers/stepfun/llm/step-1v-32k.yaml deleted file mode 100644 index 08d6ad245d2dd6..00000000000000 --- a/api/core/model_runtime/model_providers/stepfun/llm/step-1v-32k.yaml +++ /dev/null @@ -1,28 +0,0 @@ -model: step-1v-32k -label: - zh_Hans: step-1v-32k - en_US: step-1v-32k -model_type: llm -features: - - vision - - tool-call - - multi-tool-call - - stream-tool-call -model_properties: - mode: chat - context_size: 32000 -parameter_rules: - - name: temperature - use_template: temperature - - name: top_p - use_template: top_p - - name: max_tokens - use_template: max_tokens - default: 1024 - min: 1 - max: 32000 -pricing: - input: '0.015' - output: '0.070' - unit: '0.001' - currency: RMB diff --git a/api/core/model_runtime/model_providers/stepfun/llm/step-1v-8k.yaml b/api/core/model_runtime/model_providers/stepfun/llm/step-1v-8k.yaml deleted file mode 100644 index 843d14d9c67e80..00000000000000 --- a/api/core/model_runtime/model_providers/stepfun/llm/step-1v-8k.yaml +++ /dev/null @@ -1,28 +0,0 @@ -model: step-1v-8k -label: - zh_Hans: step-1v-8k - en_US: step-1v-8k -model_type: llm -features: - - vision - - tool-call - - multi-tool-call - - stream-tool-call -model_properties: - mode: chat - context_size: 8192 -parameter_rules: - - name: temperature - use_template: temperature - - name: top_p - use_template: top_p - - name: max_tokens - use_template: max_tokens - default: 512 - min: 1 - max: 8192 -pricing: - input: '0.005' - output: '0.020' - unit: '0.001' - currency: RMB diff --git a/api/core/model_runtime/model_providers/stepfun/llm/step-2-16k.yaml b/api/core/model_runtime/model_providers/stepfun/llm/step-2-16k.yaml deleted file mode 100644 index 6f2dabbfb0e308..00000000000000 --- a/api/core/model_runtime/model_providers/stepfun/llm/step-2-16k.yaml +++ /dev/null @@ -1,28 +0,0 @@ -model: step-2-16k -label: - zh_Hans: step-2-16k - en_US: step-2-16k -model_type: llm -features: - - agent-thought - - tool-call - - multi-tool-call - - stream-tool-call -model_properties: - mode: chat - context_size: 16000 -parameter_rules: - - name: temperature - use_template: temperature - - name: top_p - use_template: top_p - - name: max_tokens - use_template: max_tokens - default: 1024 - min: 1 - max: 16000 -pricing: - input: '0.038' - output: '0.120' - unit: '0.001' - currency: RMB diff --git a/api/core/model_runtime/model_providers/stepfun/stepfun.py b/api/core/model_runtime/model_providers/stepfun/stepfun.py deleted file mode 100644 index e1c41a91537cd1..00000000000000 --- a/api/core/model_runtime/model_providers/stepfun/stepfun.py +++ /dev/null @@ -1,26 +0,0 @@ -import logging - -from core.model_runtime.entities.model_entities import ModelType -from core.model_runtime.errors.validate import CredentialsValidateFailedError -from core.model_runtime.model_providers.__base.model_provider import ModelProvider - -logger = logging.getLogger(__name__) - - -class StepfunProvider(ModelProvider): - def validate_provider_credentials(self, credentials: dict) -> None: - """ - Validate provider credentials - if validate failed, raise exception - - :param credentials: provider credentials, credentials form defined in `provider_credential_schema`. - """ - try: - model_instance = self.get_model_instance(ModelType.LLM) - - model_instance.validate_credentials(model="step-1-8k", credentials=credentials) - except CredentialsValidateFailedError as ex: - raise ex - except Exception as ex: - logger.exception(f"{self.get_provider_schema().provider} credentials validate failed") - raise ex diff --git a/api/core/model_runtime/model_providers/stepfun/stepfun.yaml b/api/core/model_runtime/model_providers/stepfun/stepfun.yaml deleted file mode 100644 index ccc8455adcf3ca..00000000000000 --- a/api/core/model_runtime/model_providers/stepfun/stepfun.yaml +++ /dev/null @@ -1,81 +0,0 @@ -provider: stepfun -label: - zh_Hans: 阶跃星辰 - en_US: Stepfun -description: - en_US: Models provided by stepfun, such as step-1-8k, step-1-32k、step-1v-8k、step-1v-32k, step-1-128k and step-1-256k - zh_Hans: 阶跃星辰提供的模型,例如 step-1-8k、step-1-32k、step-1v-8k、step-1v-32k、step-1-128k 和 step-1-256k。 -icon_small: - en_US: icon_s_en.png -icon_large: - en_US: icon_l_en.png -background: "#FFFFFF" -help: - title: - en_US: Get your API Key from stepfun - zh_Hans: 从 stepfun 获取 API Key - url: - en_US: https://platform.stepfun.com/interface-key -supported_model_types: - - llm -configurate_methods: - - predefined-model - - customizable-model -provider_credential_schema: - credential_form_schemas: - - variable: api_key - label: - en_US: API Key - type: secret-input - required: true - placeholder: - zh_Hans: 在此输入您的 API Key - en_US: Enter your API Key -model_credential_schema: - model: - label: - en_US: Model Name - zh_Hans: 模型名称 - placeholder: - en_US: Enter your model name - zh_Hans: 输入模型名称 - credential_form_schemas: - - variable: api_key - label: - en_US: API Key - type: secret-input - required: true - placeholder: - zh_Hans: 在此输入您的 API Key - en_US: Enter your API Key - - variable: context_size - label: - zh_Hans: 模型上下文长度 - en_US: Model context size - required: true - type: text-input - default: '8192' - placeholder: - zh_Hans: 在此输入您的模型上下文长度 - en_US: Enter your Model context size - - variable: max_tokens - label: - zh_Hans: 最大 token 上限 - en_US: Upper bound for max tokens - default: '8192' - type: text-input - - variable: function_calling_type - label: - en_US: Function calling - type: select - required: false - default: no_call - options: - - value: no_call - label: - en_US: Not supported - zh_Hans: 不支持 - - value: tool_call - label: - en_US: Tool Call - zh_Hans: Tool Call diff --git a/api/core/model_runtime/model_providers/tencent/__init__.py b/api/core/model_runtime/model_providers/tencent/__init__.py deleted file mode 100644 index e69de29bb2d1d6..00000000000000 diff --git a/api/core/model_runtime/model_providers/tencent/_assets/icon_l_en.svg b/api/core/model_runtime/model_providers/tencent/_assets/icon_l_en.svg deleted file mode 100644 index 63c7c8f988a904..00000000000000 --- a/api/core/model_runtime/model_providers/tencent/_assets/icon_l_en.svg +++ /dev/null @@ -1,13 +0,0 @@ - - - - - tencent-cloud - - - \ No newline at end of file diff --git a/api/core/model_runtime/model_providers/tencent/_assets/icon_l_zh.svg b/api/core/model_runtime/model_providers/tencent/_assets/icon_l_zh.svg deleted file mode 100644 index 63c7c8f988a904..00000000000000 --- a/api/core/model_runtime/model_providers/tencent/_assets/icon_l_zh.svg +++ /dev/null @@ -1,13 +0,0 @@ - - - - - tencent-cloud - - - \ No newline at end of file diff --git a/api/core/model_runtime/model_providers/tencent/_assets/icon_s_en.svg b/api/core/model_runtime/model_providers/tencent/_assets/icon_s_en.svg deleted file mode 100644 index a3299b920198d2..00000000000000 --- a/api/core/model_runtime/model_providers/tencent/_assets/icon_s_en.svg +++ /dev/null @@ -1,11 +0,0 @@ - - - - - tencent-cloud - - \ No newline at end of file diff --git a/api/core/model_runtime/model_providers/tencent/speech2text/__init__.py b/api/core/model_runtime/model_providers/tencent/speech2text/__init__.py deleted file mode 100644 index e69de29bb2d1d6..00000000000000 diff --git a/api/core/model_runtime/model_providers/tencent/speech2text/flash_recognizer.py b/api/core/model_runtime/model_providers/tencent/speech2text/flash_recognizer.py deleted file mode 100644 index c3c21793e8eb39..00000000000000 --- a/api/core/model_runtime/model_providers/tencent/speech2text/flash_recognizer.py +++ /dev/null @@ -1,164 +0,0 @@ -import base64 -import hashlib -import hmac -import operator -import time - -import requests - - -class Credential: - def __init__(self, secret_id, secret_key): - self.secret_id = secret_id - self.secret_key = secret_key - - -class FlashRecognitionRequest: - def __init__(self, voice_format="mp3", engine_type="16k_zh"): - self.engine_type = engine_type - self.speaker_diarization = 0 - self.hotword_id = "" - self.customization_id = "" - self.filter_dirty = 0 - self.filter_modal = 0 - self.filter_punc = 0 - self.convert_num_mode = 1 - self.word_info = 0 - self.voice_format = voice_format - self.first_channel_only = 1 - self.reinforce_hotword = 0 - self.sentence_max_length = 0 - - def set_first_channel_only(self, first_channel_only): - self.first_channel_only = first_channel_only - - def set_speaker_diarization(self, speaker_diarization): - self.speaker_diarization = speaker_diarization - - def set_filter_dirty(self, filter_dirty): - self.filter_dirty = filter_dirty - - def set_filter_modal(self, filter_modal): - self.filter_modal = filter_modal - - def set_filter_punc(self, filter_punc): - self.filter_punc = filter_punc - - def set_convert_num_mode(self, convert_num_mode): - self.convert_num_mode = convert_num_mode - - def set_word_info(self, word_info): - self.word_info = word_info - - def set_hotword_id(self, hotword_id): - self.hotword_id = hotword_id - - def set_customization_id(self, customization_id): - self.customization_id = customization_id - - def set_voice_format(self, voice_format): - self.voice_format = voice_format - - def set_sentence_max_length(self, sentence_max_length): - self.sentence_max_length = sentence_max_length - - def set_reinforce_hotword(self, reinforce_hotword): - self.reinforce_hotword = reinforce_hotword - - -class FlashRecognizer: - """ - response: - request_id string - status Integer - message String - audio_duration Integer - flash_result Result Array - - Result: - text String - channel_id Integer - sentence_list Sentence Array - - Sentence: - text String - start_time Integer - end_time Integer - speaker_id Integer - word_list Word Array - - Word: - word String - start_time Integer - end_time Integer - stable_flag: Integer - """ - - def __init__(self, appid, credential): - self.credential = credential - self.appid = appid - - def _format_sign_string(self, param): - signstr = "POSTasr.cloud.tencent.com/asr/flash/v1/" - for t in param: - if "appid" in t: - signstr += str(t[1]) - break - signstr += "?" - for x in param: - tmp = x - if "appid" in x: - continue - for t in tmp: - signstr += str(t) - signstr += "=" - signstr = signstr[:-1] - signstr += "&" - signstr = signstr[:-1] - return signstr - - def _build_header(self): - header = {"Host": "asr.cloud.tencent.com"} - return header - - def _sign(self, signstr, secret_key): - hmacstr = hmac.new(secret_key.encode("utf-8"), signstr.encode("utf-8"), hashlib.sha1).digest() - s = base64.b64encode(hmacstr) - s = s.decode("utf-8") - return s - - def _build_req_with_signature(self, secret_key, params, header): - query = sorted(params.items(), key=operator.itemgetter(0)) - signstr = self._format_sign_string(query) - signature = self._sign(signstr, secret_key) - header["Authorization"] = signature - req_url = "https://" - req_url += signstr[4::] - return req_url - - def _create_query_arr(self, req): - return { - "appid": self.appid, - "secretid": self.credential.secret_id, - "timestamp": str(int(time.time())), - "engine_type": req.engine_type, - "voice_format": req.voice_format, - "speaker_diarization": req.speaker_diarization, - "hotword_id": req.hotword_id, - "customization_id": req.customization_id, - "filter_dirty": req.filter_dirty, - "filter_modal": req.filter_modal, - "filter_punc": req.filter_punc, - "convert_num_mode": req.convert_num_mode, - "word_info": req.word_info, - "first_channel_only": req.first_channel_only, - "reinforce_hotword": req.reinforce_hotword, - "sentence_max_length": req.sentence_max_length, - } - - def recognize(self, req, data): - header = self._build_header() - query_arr = self._create_query_arr(req) - req_url = self._build_req_with_signature(self.credential.secret_key, query_arr, header) - r = requests.post(req_url, headers=header, data=data) - return r.text diff --git a/api/core/model_runtime/model_providers/tencent/speech2text/speech2text.py b/api/core/model_runtime/model_providers/tencent/speech2text/speech2text.py deleted file mode 100644 index 5b427663ca85b0..00000000000000 --- a/api/core/model_runtime/model_providers/tencent/speech2text/speech2text.py +++ /dev/null @@ -1,86 +0,0 @@ -import json -from typing import IO, Optional - -import requests - -from core.model_runtime.errors.invoke import ( - InvokeAuthorizationError, - InvokeConnectionError, - InvokeError, -) -from core.model_runtime.errors.validate import CredentialsValidateFailedError -from core.model_runtime.model_providers.__base.speech2text_model import Speech2TextModel -from core.model_runtime.model_providers.tencent.speech2text.flash_recognizer import ( - Credential, - FlashRecognitionRequest, - FlashRecognizer, -) - - -class TencentSpeech2TextModel(Speech2TextModel): - def _invoke(self, model: str, credentials: dict, file: IO[bytes], user: Optional[str] = None) -> str: - """ - Invoke speech2text model - - :param model: model name - :param credentials: model credentials - :param file: audio file - :param user: unique user id - :return: text for given audio file - """ - return self._speech2text_invoke(model, credentials, file) - - def validate_credentials(self, model: str, credentials: dict) -> None: - """ - Validate model credentials - - :param model: model name - :param credentials: model credentials - :return: - """ - try: - audio_file_path = self._get_demo_file_path() - - with open(audio_file_path, "rb") as audio_file: - self._speech2text_invoke(model, credentials, audio_file) - except Exception as ex: - raise CredentialsValidateFailedError(str(ex)) - - def _speech2text_invoke(self, model: str, credentials: dict, file: IO[bytes]) -> str: - """ - Invoke speech2text model - - :param model: model name - :param credentials: model credentials - :param file: audio file - :return: text for given audio file - """ - app_id = credentials["app_id"] - secret_id = credentials["secret_id"] - secret_key = credentials["secret_key"] - voice_format = file.voice_format if hasattr(file, "voice_format") else "mp3" - tencent_voice_recognizer = FlashRecognizer(app_id, Credential(secret_id, secret_key)) - resp = tencent_voice_recognizer.recognize(FlashRecognitionRequest(voice_format), file) - resp = json.loads(resp) - code = resp["code"] - message = resp["message"] - if code == 4002: - raise CredentialsValidateFailedError(str(message)) - elif code != 0: - return f"Tencent ASR Recognition failed with code {code} and message {message}" - return "\n".join(item["text"] for item in resp["flash_result"]) - - @property - def _invoke_error_mapping(self) -> dict[type[InvokeError], list[type[Exception]]]: - """ - Map model invoke error to unified error - The key is the error type thrown to the caller - The value is the error type thrown by the model, - which needs to be converted into a unified error type for the caller. - - :return: Invoke error mapping - """ - return { - InvokeConnectionError: [requests.exceptions.ConnectionError], - InvokeAuthorizationError: [CredentialsValidateFailedError], - } diff --git a/api/core/model_runtime/model_providers/tencent/speech2text/tencent.yaml b/api/core/model_runtime/model_providers/tencent/speech2text/tencent.yaml deleted file mode 100644 index 618d19ac7c3b38..00000000000000 --- a/api/core/model_runtime/model_providers/tencent/speech2text/tencent.yaml +++ /dev/null @@ -1,5 +0,0 @@ -model: tencent -model_type: speech2text -model_properties: - file_upload_limit: 25 - supported_file_extensions: flac,mp3,mp4,mpeg,mpga,m4a,ogg,wav,webm diff --git a/api/core/model_runtime/model_providers/tencent/tencent.py b/api/core/model_runtime/model_providers/tencent/tencent.py deleted file mode 100644 index 79c6f577b8d5ef..00000000000000 --- a/api/core/model_runtime/model_providers/tencent/tencent.py +++ /dev/null @@ -1,26 +0,0 @@ -import logging - -from core.model_runtime.entities.model_entities import ModelType -from core.model_runtime.errors.validate import CredentialsValidateFailedError -from core.model_runtime.model_providers.__base.model_provider import ModelProvider - -logger = logging.getLogger(__name__) - - -class TencentProvider(ModelProvider): - def validate_provider_credentials(self, credentials: dict) -> None: - """ - Validate provider credentials - - if validate failed, raise exception - - :param credentials: provider credentials, credentials form defined in `provider_credential_schema`. - """ - try: - model_instance = self.get_model_instance(ModelType.SPEECH2TEXT) - model_instance.validate_credentials(model="tencent", credentials=credentials) - except CredentialsValidateFailedError as ex: - raise ex - except Exception as ex: - logger.exception(f"{self.get_provider_schema().provider} credentials validate failed") - raise ex diff --git a/api/core/model_runtime/model_providers/tencent/tencent.yaml b/api/core/model_runtime/model_providers/tencent/tencent.yaml deleted file mode 100644 index 7d8d5a186684be..00000000000000 --- a/api/core/model_runtime/model_providers/tencent/tencent.yaml +++ /dev/null @@ -1,49 +0,0 @@ -provider: tencent -label: - zh_Hans: 腾讯云 - en_US: Tencent -icon_small: - en_US: icon_s_en.svg -icon_large: - zh_Hans: icon_l_zh.svg - en_US: icon_l_en.svg -background: "#E5E7EB" -help: - title: - en_US: Get your API key from Tencent AI - zh_Hans: 从腾讯云获取 API Key - url: - en_US: https://cloud.tencent.com/product/asr -supported_model_types: - - speech2text -configurate_methods: - - predefined-model -provider_credential_schema: - credential_form_schemas: - - variable: app_id - label: - zh_Hans: APPID - en_US: APPID - type: text-input - required: true - placeholder: - zh_Hans: 在此输入您的腾讯语音识别服务的 APPID - en_US: Enter the APPID of your Tencent Cloud ASR service - - variable: secret_id - label: - zh_Hans: SecretId - en_US: SecretId - type: secret-input - required: true - placeholder: - zh_Hans: 在此输入您的腾讯语音识别服务的 SecretId - en_US: Enter the SecretId of your Tencent Cloud ASR service - - variable: secret_key - label: - zh_Hans: SecretKey - en_US: SecretKey - type: secret-input - required: true - placeholder: - zh_Hans: 在此输入您的腾讯语音识别服务的 SecretKey - en_US: Enter the SecretKey of your Tencent Cloud ASR service diff --git a/api/core/model_runtime/model_providers/togetherai/__init__.py b/api/core/model_runtime/model_providers/togetherai/__init__.py deleted file mode 100644 index e69de29bb2d1d6..00000000000000 diff --git a/api/core/model_runtime/model_providers/togetherai/_assets/togetherai.svg b/api/core/model_runtime/model_providers/togetherai/_assets/togetherai.svg deleted file mode 100644 index e9d918b15e1abd..00000000000000 --- a/api/core/model_runtime/model_providers/togetherai/_assets/togetherai.svg +++ /dev/null @@ -1,13 +0,0 @@ - - - - - - - - - - - - - diff --git a/api/core/model_runtime/model_providers/togetherai/_assets/togetherai_square.svg b/api/core/model_runtime/model_providers/togetherai/_assets/togetherai_square.svg deleted file mode 100644 index 16bae5235f5de0..00000000000000 --- a/api/core/model_runtime/model_providers/togetherai/_assets/togetherai_square.svg +++ /dev/null @@ -1,19 +0,0 @@ - - - - - - - - - - - - - - - - - - - diff --git a/api/core/model_runtime/model_providers/togetherai/llm/__init__.py b/api/core/model_runtime/model_providers/togetherai/llm/__init__.py deleted file mode 100644 index e69de29bb2d1d6..00000000000000 diff --git a/api/core/model_runtime/model_providers/togetherai/llm/llm.py b/api/core/model_runtime/model_providers/togetherai/llm/llm.py deleted file mode 100644 index b96d43979ef54a..00000000000000 --- a/api/core/model_runtime/model_providers/togetherai/llm/llm.py +++ /dev/null @@ -1,170 +0,0 @@ -from collections.abc import Generator -from decimal import Decimal -from typing import Optional, Union - -from core.model_runtime.entities.common_entities import I18nObject -from core.model_runtime.entities.llm_entities import LLMMode, LLMResult -from core.model_runtime.entities.message_entities import ( - PromptMessage, - PromptMessageTool, -) -from core.model_runtime.entities.model_entities import ( - AIModelEntity, - DefaultParameterName, - FetchFrom, - ModelPropertyKey, - ModelType, - ParameterRule, - ParameterType, - PriceConfig, -) -from core.model_runtime.model_providers.openai_api_compatible.llm.llm import OAIAPICompatLargeLanguageModel - - -class TogetherAILargeLanguageModel(OAIAPICompatLargeLanguageModel): - def _update_endpoint_url(self, credentials: dict): - credentials["endpoint_url"] = "https://api.together.xyz/v1" - return credentials - - def _invoke( - self, - model: str, - credentials: dict, - prompt_messages: list[PromptMessage], - model_parameters: dict, - tools: Optional[list[PromptMessageTool]] = None, - stop: Optional[list[str]] = None, - stream: bool = True, - user: Optional[str] = None, - ) -> Union[LLMResult, Generator]: - cred_with_endpoint = self._update_endpoint_url(credentials=credentials) - - return super()._invoke(model, cred_with_endpoint, prompt_messages, model_parameters, tools, stop, stream, user) - - def validate_credentials(self, model: str, credentials: dict) -> None: - cred_with_endpoint = self._update_endpoint_url(credentials=credentials) - - return super().validate_credentials(model, cred_with_endpoint) - - def _generate( - self, - model: str, - credentials: dict, - prompt_messages: list[PromptMessage], - model_parameters: dict, - tools: Optional[list[PromptMessageTool]] = None, - stop: Optional[list[str]] = None, - stream: bool = True, - user: Optional[str] = None, - ) -> Union[LLMResult, Generator]: - cred_with_endpoint = self._update_endpoint_url(credentials=credentials) - - return super()._generate( - model, cred_with_endpoint, prompt_messages, model_parameters, tools, stop, stream, user - ) - - def get_customizable_model_schema(self, model: str, credentials: dict) -> AIModelEntity: - cred_with_endpoint = self._update_endpoint_url(credentials=credentials) - REPETITION_PENALTY = "repetition_penalty" - TOP_K = "top_k" - features = [] - - entity = AIModelEntity( - model=model, - label=I18nObject(en_US=model), - model_type=ModelType.LLM, - fetch_from=FetchFrom.CUSTOMIZABLE_MODEL, - features=features, - model_properties={ - ModelPropertyKey.CONTEXT_SIZE: int(cred_with_endpoint.get("context_size", "4096")), - ModelPropertyKey.MODE: cred_with_endpoint.get("mode"), - }, - parameter_rules=[ - ParameterRule( - name=DefaultParameterName.TEMPERATURE.value, - label=I18nObject(en_US="Temperature"), - type=ParameterType.FLOAT, - default=float(cred_with_endpoint.get("temperature", 0.7)), - min=0, - max=2, - precision=2, - ), - ParameterRule( - name=DefaultParameterName.TOP_P.value, - label=I18nObject(en_US="Top P"), - type=ParameterType.FLOAT, - default=float(cred_with_endpoint.get("top_p", 1)), - min=0, - max=1, - precision=2, - ), - ParameterRule( - name=TOP_K, - label=I18nObject(en_US="Top K"), - type=ParameterType.INT, - default=int(cred_with_endpoint.get("top_k", 50)), - min=-2147483647, - max=2147483647, - precision=0, - ), - ParameterRule( - name=REPETITION_PENALTY, - label=I18nObject(en_US="Repetition Penalty"), - type=ParameterType.FLOAT, - default=float(cred_with_endpoint.get("repetition_penalty", 1)), - min=-3.4, - max=3.4, - precision=1, - ), - ParameterRule( - name=DefaultParameterName.MAX_TOKENS.value, - label=I18nObject(en_US="Max Tokens"), - type=ParameterType.INT, - default=512, - min=1, - max=int(cred_with_endpoint.get("max_tokens_to_sample", 4096)), - ), - ParameterRule( - name=DefaultParameterName.FREQUENCY_PENALTY.value, - label=I18nObject(en_US="Frequency Penalty"), - type=ParameterType.FLOAT, - default=float(credentials.get("frequency_penalty", 0)), - min=-2, - max=2, - ), - ParameterRule( - name=DefaultParameterName.PRESENCE_PENALTY.value, - label=I18nObject(en_US="Presence Penalty"), - type=ParameterType.FLOAT, - default=float(credentials.get("presence_penalty", 0)), - min=-2, - max=2, - ), - ], - pricing=PriceConfig( - input=Decimal(cred_with_endpoint.get("input_price", 0)), - output=Decimal(cred_with_endpoint.get("output_price", 0)), - unit=Decimal(cred_with_endpoint.get("unit", 0)), - currency=cred_with_endpoint.get("currency", "USD"), - ), - ) - - if cred_with_endpoint["mode"] == "chat": - entity.model_properties[ModelPropertyKey.MODE] = LLMMode.CHAT.value - elif cred_with_endpoint["mode"] == "completion": - entity.model_properties[ModelPropertyKey.MODE] = LLMMode.COMPLETION.value - else: - raise ValueError(f"Unknown completion type {cred_with_endpoint['completion_type']}") - - return entity - - def get_num_tokens( - self, - model: str, - credentials: dict, - prompt_messages: list[PromptMessage], - tools: Optional[list[PromptMessageTool]] = None, - ) -> int: - cred_with_endpoint = self._update_endpoint_url(credentials=credentials) - - return super().get_num_tokens(model, cred_with_endpoint, prompt_messages, tools) diff --git a/api/core/model_runtime/model_providers/togetherai/togetherai.py b/api/core/model_runtime/model_providers/togetherai/togetherai.py deleted file mode 100644 index aa4100a7c9b4d8..00000000000000 --- a/api/core/model_runtime/model_providers/togetherai/togetherai.py +++ /dev/null @@ -1,10 +0,0 @@ -import logging - -from core.model_runtime.model_providers.__base.model_provider import ModelProvider - -logger = logging.getLogger(__name__) - - -class TogetherAIProvider(ModelProvider): - def validate_provider_credentials(self, credentials: dict) -> None: - pass diff --git a/api/core/model_runtime/model_providers/togetherai/togetherai.yaml b/api/core/model_runtime/model_providers/togetherai/togetherai.yaml deleted file mode 100644 index e69471b15def73..00000000000000 --- a/api/core/model_runtime/model_providers/togetherai/togetherai.yaml +++ /dev/null @@ -1,75 +0,0 @@ -provider: togetherai -label: - en_US: together.ai -icon_small: - en_US: togetherai_square.svg -icon_large: - en_US: togetherai.svg -background: "#F1EFED" -help: - title: - en_US: Get your API key from together.ai - zh_Hans: 从 together.ai 获取 API Key - url: - en_US: https://api.together.xyz/ -supported_model_types: - - llm -configurate_methods: - - customizable-model -model_credential_schema: - model: - label: - en_US: Model Name - zh_Hans: 模型名称 - placeholder: - en_US: Enter full model name - zh_Hans: 输入模型全称 - credential_form_schemas: - - variable: api_key - required: true - label: - en_US: API Key - type: secret-input - placeholder: - zh_Hans: 在此输入您的 API Key - en_US: Enter your API Key - - variable: mode - show_on: - - variable: __model_type - value: llm - label: - en_US: Completion mode - type: select - required: false - default: chat - placeholder: - zh_Hans: 选择对话类型 - en_US: Select completion mode - options: - - value: completion - label: - en_US: Completion - zh_Hans: 补全 - - value: chat - label: - en_US: Chat - zh_Hans: 对话 - - variable: context_size - label: - zh_Hans: 模型上下文长度 - en_US: Model context size - required: true - type: text-input - default: '4096' - placeholder: - zh_Hans: 在此输入您的模型上下文长度 - en_US: Enter your Model context size - - variable: max_tokens_to_sample - label: - zh_Hans: 最大 token 上限 - en_US: Upper bound for max tokens - show_on: - - variable: __model_type - value: llm - default: '4096' - type: text-input diff --git a/api/core/model_runtime/model_providers/tongyi/__init__.py b/api/core/model_runtime/model_providers/tongyi/__init__.py deleted file mode 100644 index e69de29bb2d1d6..00000000000000 diff --git a/api/core/model_runtime/model_providers/tongyi/_assets/icon_l_en.png b/api/core/model_runtime/model_providers/tongyi/_assets/icon_l_en.png deleted file mode 100644 index 94de01136a64b6c8fead0003f048fcb6058f07ec..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 4741 zcmV;05_;{4P)eqno2SD{W9>V$jL)$f8N0RaOXy(=anVJ>6aR?*CWyqpG{Qr)Q=) zv^n)bbyeS2Rd;{)zu*7gTQvX<95`^`;KG1G0kCjEa6NFj7!Os(7~p?k+}QBVVHtD% z!=_CGJ_QF33JebY#Qgc~S8GBp5HWa13Xnob5CW2$6mkq#qO0$|Te`u4g94*p1a4@M ziWZ5j6dLRU&Sps|Pe>xIlpqcA>U$)0gQ5ig792PzF#1K{>ayS@r0FeDjjeq0$?Bt^ z!M_>AO^}xv#}Wo;z*d6;2L(o-HSmlXpNuFgso1Ybw(V!n4SI~y8XIM)TW$Lgb?+#% z4(6QcXW|OnRicJp&TOq5pdvH10=_us^NkFH0|!~qX9Cau-l>NqXLCeH_x0QNU2-Oo zKJTB~CZp}$gU%c`lbD=4Bybn!&r}qkt3mok^gO^;fiu-*&76Ati zY@k9PFl+j!qcy4B36kGuCNCkPg%2v=4sb3<0a;#Qq%*GT;W7cNOpi{G#7OoZsf!s& z<*158B1^q@EsT|f{s)s^1_uso&?gP7NwG$V$lAIMlyM#uDly*)zf5>PBPxg?{C#%7M0~_?3z%!2D}MB&6GOp)gEZJWad*`J_GTRZ z8yHiB?r8xrozaLsm@|Hhi)q*F*mLc1bDf#fKfM|?@7oO2fk-&4rV#N=kqET z`~Hk$^Ud{zc zAoiwlYac2<0R=+!MOQXL{}2eCJmQD!?R)SGn~dYiNc>P$@#If%^XIBZ7&B%Je100n zf+EJ6lBr%GTb<#<>!3?(ug5Ph%CmUQ z@8i*%cIWhXybuhZr1Bn1MmW4L7z_lVcb`kIH+HNi*m~roCw?7ywP}e$pax}={^`y8 zkUFD~KrP2VA299Eq481313O+E`4sM+f&ydiT7n-O0bfF4u?_kMe|)n5 ziR;DPN}wNt>9LjcA}g`XG2E|0ay)C-@aLft?Em5VcsmB9Btl&WSrBt~F$nWTU?zYf zR^h8HfvY`Zq3Nj?SAx6jUToazC^KgMJk9ZI4Wu7HeE`M2`zT(#Y5U|g$s48Za<`3kxbAJ)m%O7iuZI&oevlrhh=@xPk6mvy3Uc%)kPh`hn=ja zC{H79N3qVVL_#3ohcZ^~$3treWb)o@pJymqU!g^)EptBg@Kw1+mRv{@xUTMZE&SlQDH!dvjsN9{x+ z>S!KIhC#wrsF}xsri|0CHQl=EzBJpKI5+8MsH$qipJt=A6swQRs3fvlUm>!}jeL;O zz*_hbWQa<$V@!`Y7<;^!r4uLaeD<~1rWIO$=ERB>p-a29=);;KSFC@u>=QT-$Tn=@ zmv7Zuj0N);;_USFY^>L8dLO{ekL|Z$r`T(;fX2}Ka!<}(90|hr>c@{;Uk@J6vSg69 z?hbz^-BCh~>_-PG0A?M|SPg1Y>V!Uedv9YsG;Mr#Be>nvrYBJf3`^@zR5Pq^B)yY1 z2Ty&x5Bh;5fzkSoVsYV~A~D4s*a#-eS77pzY>hND94xb@TnhPM*RD&uvgLK6j!~ox zaiP@W7fe0>d|2RuAZ~rVY00b7$eRRp2COp$t=@Yk{YgVFMl3R&LR<8X@H$M^*9;2# zOtJIwgMn5U>KU^t8WuN!_4EB*;()9$TGzJdmx5LsZZ(zhk`<=wBWF>B!|7I^e$Rfd zP)te)A+!fkHfXwZpe-RIL=4FtnRDxey>;0diA9usv2HaaBJPlk4hS(Vh{Wp%sn8X2 zu~^8h$WWQeLjy`mLKW@-p~|xI(7^KYU*@{7rnML@W3UFVY{8;s?UzF_L$Z3C)aG1O zDpVf8&z9x@QOGTC5E-}jMoNUC3yiRy1Q=E<#MRCv#4H_EM73k>oKJt zg92DzLXyC*zV-b!B=8eL;(&pc*N8-Dx&_vWEVTL(rOnRLfUB(h5M$y$jXN<6;zFFr z=6z!BSOD9-ZCmZHd%8HEueL)nzJWzzYv;e zA>wL8T#>NA`c{V48LjZPj4y!}^+>KhidS|DOcppr-njKO7Csz>m{^-{RH5U{$G^au zZ)uFL_4#B86eDCt9JTLGr#;D1C@-Pd?J83WY3ClJ65ZYHq!TmBq!Wo|^DtEQU7Bdg zu^qLG^~UAKoV2i7dbvRG&mT2GfsnSC$0sz|^%#=%3&R4(bPH_AiWm`ayaE-|?HX!n z`Nn6+$qhPriMWuG3@I|}Vq#143qy|BwXtwvaN2?eXDlAzmM!C>QYy>Qjk}>s>zoE( z1<3Qa{F59jv~)yZe=?CG18(h1w_Vb zw1Wp1wo1^pOCqfhhK$gOEObi@OrS8|d68XXE_daB;O;0$vkp#KU=+)(JQRM~uI`<8 zel|!7xsh{aMMHzt(r#E+d6)_I9JY1;^5q@Fp!g85yi2Wn$X>yy;mOF*v|tZa?JXU~ zUZY*wqHJ|)h?r6aGBQd_3)~@r&Lw84$1mwvbtb6aV9At zMktIewyTFyqC=DX5n-5LFgjeyp+iCRNU2j{_($(j|kprwZp(JtB}X6Ba!Eht{6zlBMGnRpw5Ss%8{kJc1_dL%cEQ<10}iwiCG)J1A|`v ze&Ic5GW2e#Qq_#RBZ?YX0mX!Hc%RLXxiVK3jP*2v)%sCy@73^{#c9e4;o^4Z@aF1l zZEa;Vb#0XYi>p`?jQ6nWPDp zyI$OV`ETsXR#u+65qFc5iX&b_VaiVy-FJ4!;-zPIEP0^)`K8O-UkF9dHDms8{08fx zh@e}e%pucTJ?x6TcHX+as!Ou;1up@xv0Ax6*3vbIlhc&flyVk*N>i?T%qz_jxa5*C ztV?vzb(-^f-OI2w~2IvVUdya(mVEor(e7_>Ttd>E1qWfo1 zYzT*&&>^x0?9s78vJjE|`lN*souEmNv1w7nJ})VxS+HTlu4(Ru=p<}2XU8TPG3aYI zw2Ag0YG+diVo(FLw3C)^vOy>r!UX_`;2N zE+o>vQRXgbw5#9Ja@}Dp@g&mzC%boFVH-B%T1^A0ked@$7ji|06M2jskH%b&L6IS@ z32CY|V9B7FFUE|8>5MJPvV?JUe@L-l!4wp1;W?1Uqy%Z_GG0O2+3tKE{TdDV(`d6! z7#&CW4iph=34Hza>n{>o>;Y+-{*{TlE(uoS@!)OVy&k)EIzq}%!;v9d?DFou`^-SJ ztSd8`ONTWRXO_jku3zIm3Pp&x1f}8ij#z(SOJJ$dIt4jczn9JIrX_|w)9{C!9t1AsL9n@R=JnUF?R@2xi$iwj z2nAsYR=yZ|0lbz&-7$41E8>-DaSuMRR@iS_h%_jzF=q0*Ld{CiRBnZeQ@JwhB!cxi zaYAJ5l^6NISf@4m5=PFLhxPUX-qg-a{ zCyc9)xfFK?-GNE}=*CS@cy}~9a4%_J$OAKHe*V{rP!BSuRMTC@JpOp8#9hgWJCQBN zoONRqc<$qCsMKyal>Q+fnu{_1mk8)lsAfI$OcUbLbNlGD_m9#uyx|lRU;uk z#^867<1q%J>A->6{km~;=FF4jOq2IxvnhSWEc?@FVc)^?M~z4Npex4&Ic~oW92vL+ zzG8?Izz;=^!{`h`mYY9|8MXwDYNhL4YHS)>$kE0V*D3nI-i0u{1pS1FbWc!}k~?fO zrT$Earp=oNt~*b+5IOh@gRKT`ZCxBx6?H8VS5g=a_aSuTo;+}9Uca8n(9rOwD#rOT zJghWsuVP8-t7~lzM>$uXamec+Uu0iXz~u`_u=kCp>+oNlJQdvu{e%mYJD6nOGTxcw zcY?%1KD>EzAWyJdnciaWvW=#EX99IS$K&xBMnPPP#J?~hF@AU-{)fj960C&3Ql19%SAHUx(T)O#T_5~-}H)c9WfV-e*xt}LsB}> zUg3!+P#(v(|F~t#pbx--gI=QG1SU+c|6nwRFMi1s^&U{vh$591ByMSnz)Et00(rdd z>7Nf;4h|gj0q5n;hL`sJ&7W{;4Xv=UiVIkerzzD>#f*uO!~;|?TK31TXlMgCaL`Ab zw_9_hBDUik`YDtP*XYmtWn9Bo4Uc2EMKGyN3b}0icIzLBap1s#0|yQqd=>EjIKQm{ T^40aD00000NkvXXu0mjf!iXSF diff --git a/api/core/model_runtime/model_providers/tongyi/_assets/icon_l_zh.png b/api/core/model_runtime/model_providers/tongyi/_assets/icon_l_zh.png deleted file mode 100644 index bd8f2762d18333f9c9f945abdaeadec98cc4e8d0..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 7052 zcmV;78*}7|P)HplMH>+hRmGg&stZnELf{lUx3y!YnKn>XM0zT+KB6hQY3{)f(R}%REa@q#GdH`Hn`R86%%L8sjwMCfad&C+v=9b~{8-g#IK&rH7*&g&&+> zT0Bw0`LPLfGB+cmX(2@r!9_x)8D!|a$Nyg{;z2FHsLTANjdQH{;lDZE7qa7h80(6i zN!WM?gA^zVog+OS*H)Tp81~Fae66xvQy`B7{mtn~cse6P6jKBdTm(3%5_n+Xq2^xc zeTby;l$|*k#ooeoDn%bq6xobea%e`pP;`4FRwEIsd$(8x^YN$Dl|9M%_!bF9kvjK| z@I<*jMf3+0fsTxf3|g^bMXbl;>F08}mh9TK>nG89q@|_N{{8#gI-SnF>({R@pbLab zNs#-}4=e&DCl_gXcV=d`E%kvO9DM2#Fu*KBYWPVCOOohPgG!`^?OaiyZ0303*GLy7*?g7=ziw5JUjvcHD#B(e?CP$zuh zN9;D6LD{_@OG5g5 zoYtf&Kx3ab(Z9a<;)|CkbO;cJ!%qC|+O=yABKsJp&CRGhRaGC_v}seO|IOUv?%TJo z4(iYa;hXK-x9_JggBjrFn{RgH=H}KwXOIA?Ruj+SHfsn%zJiYk4RGENd7I$;N-EJ> zKE=EJs-kCHfBp4TSXg)~zR^3ltjBTOY2q*(494F~rlzLS*|TRGqAl7`6@a>X^jnB~&7OY$JwHrQ6vwkLKS&Rz6k(}AqJ-e> z45FAeNFWAMtaw33y{`QBQ>HXHL194W&Ykba2YZvukA?GXX!Kk0!A|lu2(cyzaR(V6 zxJp2|h4_%M!R7~3B;#-!>U0N)Vh8S>W~HGQR#CTykv7w+JIenSgxPD`wrvOOR}wSP;~7ygF)3Nd@Yn+8)>NjK0Jr;0)lEw;&*xs-)yOw=V^p#R5f@||GzeG z-h6mwW~P@T0@@%0`fa*J^U5?KR86N)ViQ5ospD#GMQGP4qF|Uma||AMsI4ULZ#9i2 zhcs@}6y3Jq0Yimt5ZD&1Z16`Q-#O{_2y7EF2HuT0wn21Pu`Y(Fz8aUv4e$(w0qE^A zi;Iiz#c5B{pD8*ZJQJ?+?ZCSRt^S}VbmZmbt-)yv)F%e_9&O#a^=p=$fu7WhJa0l} z>4nR_X8A4Ab4~z}47W=gFcUM#b$rU@Av6bJvnsi;ygH<(r}Mo`=! z^=p-hUXL>xQg36EAq-fVnJ=tP z?Ds%qU$kn~Di`)iPka;ydO7_boFj{j0)aHfaUYJp4g%+m5H||Jr!sJ6-18N}bHUat zJ3HHhI(&)Kwq{g)hW5=`(*;UD7Cw`}PgR1diDy{2#A z@CJn$!-h#`?S^?DE2Ow`YL(yHgaU~69i2LLdIb&ssu>l9_kXo}_wIK!~_;+YAx<_+}Zr)t)$ITRmgB$e0&j00S5K|#Sn^!_0+H13;+^6usy(~KGc zd*WkD>9E$Y`=c}XxXx5uOsa~ zTwY^Fw*y^2jN^0|=PcE3LAr1L8|uCs>dM)QohOBgrcHkjsjtBi0#c=;jhdXOq&kd&0qof@5d1o}@yLHZa48)%c#QWvkv%udgzi-6G?N9zfX zcpBp;bEZBY^(}=NP-zMwMOX!5hG6C!1tMG7qD717TD5AG8ruhfWwBQZfkk|6G9*TG zzw{X(mTzz=p(-^1Q)&1JC;aWT^1u=Y_p<(zux<@3WYq}bB{$Dd$9OB z?!og{aXK6f0yfqVTd`d4#lbwERT{RNCQX`z$7Dz zbEn1iD!}(y1J!I6B*xyv#KeDFlC`?vd$T%qPX5xXd4>wzu_a91Po(5UCGv;)16>Tb z6YkZ77Vq>r{$RIK!U;%`Y^X}FqBmT)b?esc9M`d9$9wQ0-m$4U6#^fxFY@=b8>uLR zKKS4RwhMYw2!J}W5Gu~&Wc1*Sd!bgn3=h<9D-Rp5EnBu+0V0`)OE-FQ9uLVeU7pvY zcWq}Y@&~-w`Fym`1CSUi=mMaG^}U4?B&!e193;p( zqP0f%?78@_AAg))W#c#$j2q`_Qsi}y_o{N-w3lKJQZ<342vvjk$^wY36NLyiUeP=M z1;mhHGHEOrU@VS9ZRK(gTZkU?$+~sx7P5hF-mCoVXFnV6SL9n(`sEJ1_f>r4ah`(* z57uE@FqHeja5<2suYtk74ko{kK_ESvS-oEgldyAFG!!y^9HK{+RY+ReIzK;u6AA{I znN7+x1kb?;RHbw&np#iy4~y_f5V#DvH!n3x4(Z{BS78BBwWq)J zEpaR`)vzBGPMkQw>K%{I+!e}w?3zZKust<8nEXB!0)vRpYMR@NKzi_+N6iD~DSq&_ zoBokfRw15Zb*ZPw;nIquUD6%r65cZ)?0lDl3S3U%aYd`r73)x3aZypOgy=X|Vr;xC zDL(#RNE1-ejveI$B zfsK26It4KDLj@c8DA(RQI#G}5@UkVAbn= z9?ypbs$2#Bx>+~WbQ!{(BKwyya;hq$Qd3txn4NvodDG9Omtf6l-JuM`lWyd3@1^0W zm`5%Nj#^$w#U)E}X~I*XT9X)4yeO|QYLL?@tZ56DJgksC1$1D0e2qO<9CQI7 zqHicRr3D{@e7I)N>6?1YdlduWH(+oshRf1bEL|k=aJbqpND z4&Hk>51UJcrQlPA~s zf-XE1G;FCU41OcXoseEFvyI`=%7;og0Uvmw`El88I}UMQ!bs1y=Y{le!n4E9lp40^ z73a>KTNli;;3*PdCT6}qR0Z%HES^N^7f0n?li_9iEsj-Xkgw1#zq3bh#{qS$8|w6H zs>A}pQa7wzePFrf)v`2*1lS~~H3H7f5DPU7QsnI#{)O<2Ua2@upQu#UGEytZ}yh51_uVXm;i38DdrE;JU{pJ?~2?+^*2SM3AHucb! zGvI0S%@c!%i$}npMtpZ$zEikw`kp;|!a8>JBSFyitRF{ZV0^G(DwyFU50q*gG!D!l z9wzY4DkTgIR8Uq{*Y82|({<)x#;}+|4;HiVnKozMyLG+^RK|!AXVM>iG^dn>KW9#7 zx0LEw2%&B+%DbJa2pG8=H*em&iUSz~UloBHb1kaA+zPlpU`yu-jvxD_AE7XT)g#zP zBXIg(TXZ`x_#ZL49}foN`BCh6c*Z_S5cQag-gHxtkKwTsKCsioRf%8+t8?c<3}^6; zL5=cXXq?w0uUj`_+l@EAF(35&6l;f4pDPF&(Hz9{tndjYq^2&;%Fa$dZ=}>2o%m-@ zVg7K4o@N%K7|tEV@E(wNo8t1!4pfnc9{Ro(%;DD+Ma@b}laFO)maHP2J|%J2n6Y^a zq({Cmc5ME}$&(W*oWm0?NCRUrM&cVsfQmB>aXfjF)rB26?+@eLM<~$0c|zd56ejRY zT|Dy%Q`L$xqwfS!Prdr;t3NcS%OqR*xlXWEUZfC^lanK1?@YqG%n9W_Fy|HK#767c zhfvSaI0n6bhx4L+x6&nufGP{r(JAgC&mfS;6#_=OF`Vi(CKkf&s|pA2fi=eeKB!vx zeSW=Z0>sh+Rhr8JZE6;n>&1CXn+9w+hmtrN4P771cwd7Cr`V%ZN_TdP;#ZvHOqWse zcTlw^W@Ly9GLU5~9Tvktp!SJ>sn&^UkdH;W&kzQIbO?u$ZU>~n5-=VQ{_%`f%lLg5 z&%t*pS+{~~o^v&H=gyrW&3@oHUclF82nvBl(orXj;}xy1j;SUW0|AYe?CjwurI3>? z`-9z7(y}ddiQpa9Gj(x`K=&rsj9v;NUuopGt9o)xp|&Y2&^-@7kkoN(W>98LHkQ`iTPVQX5k3MteAZ{CE$<^MuYyiS#nPLTlHK$-CjY8E-)n zysnRD$P(8rO9w@&J#V|^@3$iBC;s=IIDK|)&7_34;36Jw%vX__?=87{W1#0GCLZqr z>!q)?NXqgrs6u`D@u$xA)tBMIUoIRZT%KGuNE@IcUFgDrnVQ|Fd#wmGcyDuNuRh9- zI`Je$`>cQ=2;C^m`1adxo3IqYvu_BLjyl{6qBsVFcXmZ92Y-w(+hsL{5oIz+O3LhH zugt&Sut6}MA&r4QVNx;TU@SUb=+R^G*43-iOD%=PV4|mQ-!Eoj7^n}gwDm}JdZ1_U z8g@lJ}o5zA42*t zWvW=JdUiO}q7>iqKC@h7Ji~g9>>3*#H6+meSy`QSA;&DR)yqqkv<&FhRlGIRprzg7 zt8#f&x6*_-?DjaPQWep%WlNs2;af4s(=&Ld6RIRIlHMyXFOOBAy=bG3wsI@I@g2Ky zQSP0|$;oR`4sTjeiaGEc-n4+H>-)FwFAz3@NfzLKUNsqBkHIAZw6zy)z#A+C70kTs zJlGNJGXhGGE?uV8*Swx*Sg(=h^bli;h_P8!X+oCI_DcOn=8Eh)5BQgp?^6E|6;&r7 z*^eBVlazl(n`jkR<`*n|w+0pO_tRd8-uFsKPR!CUH)UYgsBeB5-m}@hefxL;(qE|( zK(Dcx9-I~_%v>)u743BX6YT`c@2xI*=m$XC#`Y12KB zt`rOcGKloLpTQ@ot?!p$|1{R~gnjM})gq_*EKVOKKXU%$lWWUdBI`Osqi}PhkUBRh zHx+pti$xV?_VSiL9*>8ojtr*KyJRq4g!;6>v?mUcAz?k{@=`n4o|?fYysHQX>xx0B zC%|C5$(=R+QGm2n=JrBC0TQDgY>a!@b&F?qKydnH;rHGQ=6HqOzUDO(#lg01+ujGJ zAAmG|H~SJ;W;UJ`+>bYe08z~caqt@V%YMT-bCZf~q29-9X&40s1>CVknHy3BL|_IH zqGt)&8jGj&>F-h;#9^u;)O;xveKI_MBKWzAQd3u|Y264oJVCUhKN8|&km%JeZ55_%*-uXI(w_+iELSCsc1(P_J_}>iX zn*`?j2=|`=vF^hMI2nYotUwLwif6Ii!rT4u6p}zq!ga{N$9xxKYTO3CPQn~>DTWQS z2u!dN*H54vW=#8s=k0m!MCggEBS)}{&h5cTSAnmo)tA|!;V}`+lkH!lxMxljmQk^JuJi$}Q z@qD-O+X+zY1aAr?*2lyoeL|H)>(;GV_23ciU;Ea*Kwrc80vub563#kkt<0Nblag$DBHKYB8QS9{2O! zSoYs$iNkdp#B&(Hu2A&E!A!ZitijB2`t<2W_&%fX{)u3eBrpdLyteYonmcIFAh)^J zCNP7zy)o0A4o^CI_~yFw&^Lk;o2iigMpcAXTyztp+JJzG#x_B(&77GuCEQDFL0oRA zPdh;r{mdwK-_C+w{v?R6K8T42F|E~q-1G9Ze_g*7`lfe^wb+L2Tg;eixXIct%9?*> z-#^NF9`xT0L99n;oVwRt)SzXFQjxXZ^W^t-z)|Ph%;<`%J~4+7oQF zZrr%>FeJd;i2FBHAJ~g>-ik$+%f(X4x1j>s#wigWqUVjcd;VCe~GU%B+h1@r?&l@6i@7}#X zh>G>}!BC@qVVf8j4xGeu&tni?^UAtkKTCFTl9;+?nS63OXF(8bBbN$)&+F-@uy3(_LVS5xqihqhScv_QP zmu1)QXFQ&T@upJil*+Y_Q@hW-&Lg$YR3&POP#oXzexiylbR7CoaYT_r0goX#6eFG# zwb-GFMel#q{h+<9p+k4qEk5V`T2URf6~Qxu9lVdFzHhH$Xe_H|&#n1OA*yHhl2FmO zapTx}_3E+C%Ih!mJ!io*y!Jj0y>UFQSta5P+f>ub<;J-_eVe^t(%47Jg}iwP`3Pqr zseYhKV7HKJfWH^0TDRh<3a+~9s_3}5xE5fPLUc-bIOg*pZ&~@vU4T@#;U}Aq9_^I@ zioJz5Do~AIzR`c{L-$~jrvFo={yT&$b(nRoln%8f^tfm3x?IsYWt1Y6+GLhts9092 za61&5c&RW4A?D_t( zYxSuPci*+MqoT&GhSZ3Mix|?wiJ+Q0xbvD;n%kgB%||ZO6!r)r2o_;y5dP&`>E1R2 zwo9Y@ul_qQp<}+sNuA$?I+f;At@x-a)=wTB#^vpE=G6L%B8cD;L$%D&8QAZu=5T-g zOQ?>=NOcseQq2Nm#FQ|I>ieM_@X3_fZ`K-15kznaBII2jg2CUGw%bcWg%nXIDH{K% zi4vnfO4zqGfRQMkc@&3jQ_vP~d0000+~EOR*HiVTy>BvS}iV2_$(*-n;kw{^#7>_g-G^OCXDkGyNwg_rA0K z-@l#zoD1+J-CJVN3^Z+O=^EfTlU@zR7|nDNA z7NYdq&;ryX0O`Kc=ZMYzA&V{9v?=`pSn!t3xWhcN-w~25V9THdsL?(!a^$5BK5xn) zfo$t9cC=ot>c)=6!~~<{GqU^6(SE==(<^ZS6TL>&dv#|^21uz}n*uj(`~44BK}!G_ zH|p9#lxSn;FG|7 zIyxy63(F6+F{A3Ih9Ch!E!}CRH}HF^elH#`jq(8t<#H*5XDfr(55%@HGhrAp+F3^- z3u_`#Br1`^16tZIlyf^i0ukSdI6om0wAezwAJd_^0E`@V{wWL0XURgA6%=%>rP{G$ zzeq=vi^7c6Fe9UxQxVExSb@MO5Kdck0qhy#w_j)$0Fx$t>OnA<5Jx+^eBZKNlr~)d$6ONbPAFDN`D4 z!x``IVb*)=YT>9zWIL|=d&H?}SS<(V!tO`^UwdUy z%G?G43>|vnK^c^X8R!|VG95C6)|CdqWWk<&nLmvk_x#8&rjSz)a~KzD44E)D7_yov zl!prPPNwU)JuQobTE#pUXh$rYdE^ugC{5`?ebPvp-B&J5E{Y3)Y>++o{SVjD(4Wu* zg;$G`Mw~j;BLw%5CQU+5n+|2aBOrm>>B+-3&S2Hi?X`U1*+GBkj?&CQw3xKa0Ug_x z41jw~8Ta4LAvyPw(la}jc|~CWnVFw@B!s3&azaFtqJyOEAC_ik<)^?sqS^l9c4TaG z$f@&I`MrO0>k|ghw(X5)ENPCDG6a?t0`-(=WtMqUC&Im=P|7F@NXf!7-T=L|)uo6D zSyhWk@`fbo+ZwQ+6xe1D9GKr4{>PM?8|XxmTtu7Z>nm3HF2(B?P9;eCot5`1Qd>OJ zVN=B&)`4MiE>+KpH%WH(abNA4PH-nwP|&F|URwo8Pap=k6zxcoDp{&u15#S^$!sib zI;uEkL?;zX2FDNRyDu}|VrEe6H>-_+3?>9HloViADJ@(cFrYHvVP+sfV8CbaK$6!R zNJ&Tvr23KrY01eSKs~g6C51}{%Zan8FU~7_BwoKT0H0|dpa}FK0U^MavVUztsGl+& z1xaCCVy$4#B9c)21xYN$sH&o-LrLTyN{s( z^84GBBjals;?M;PN}s3~fTBZ3F1{}P0EC%Rpvu+6>Z-0{{SyAD?y_!P<_BX(??p(mbfRRolRXxF!)0I_W$V-+a6-U*CyRl@6Vrgujbt~FjVQ6ijDaQxQ>aW!cKoMgVTS=vVvF%^hgx0V$(@hTR zRQ4J>ZpV&p6^7wiMo5FUBD6o;B6FNwP%y%Z?N8?70};w4#@f}Kz|_C*Oq+4Dj=d*} zR@>-tP8h*lr!MZH{@Ag3WxacCc!wnUCMu0lp->st0!J}$K)+x2r>+NM$6hbFbu1$- zyM-k6an#3>bAQ<07BfFOlr?`7#kvwTN&A8h5a0< z;pziuQJ%t}jXujpyQ#BzMs1>msWo`Se0rT4yWGKp-6|js1xhKuLF{-pmxIL1scO^6 zXHl7{I{?N8My6%@kgd~eWS8i%nlqtt6rF4!2xTXsD&qZ(B^j*lD};Br zYa{AAq4gU#rmecuWY5)!PoJJyY6xQ`F-|*`zUK*$BX2r#YH|@YfwHqdZ^byDN3W@l zX*$x^zQ*AK=xWb3;fx>UE?1WUkl|!6UhT&INdVzwtq!gz5_zyqKz9< zufttXz5bK+^p;*G%8vy1?qS?Gr>@Jm#%$;(H@uXDkrh%gxLr@zX+cA>clZAG=~@Tz zapV4eU$DaSS1t^?o}}?KhN=04eVs^jWb>AcXBfM4=>s(Iec-+h2Tv9k5AxHD{b=I0 z!u~R$@A`EF(x)p4qUKVTw5Fha1r3qm_xtBlkdPgDPH!OJ=~#Z|pE)Vr%S{jT*hS`&9X1{H6x_LizDhs8mzUT23^an8 z9e|>oA!oW!`1*heV+K&}D^L^?eU(){@{TE7=vLgs#BhLT;002ovPDHLkV1h8+JI(+A diff --git a/api/core/model_runtime/model_providers/tongyi/_common.py b/api/core/model_runtime/model_providers/tongyi/_common.py deleted file mode 100644 index 8a50c7aa05f38c..00000000000000 --- a/api/core/model_runtime/model_providers/tongyi/_common.py +++ /dev/null @@ -1,55 +0,0 @@ -from dashscope.common.error import ( - AuthenticationError, - InvalidParameter, - RequestFailure, - ServiceUnavailableError, - UnsupportedHTTPMethod, - UnsupportedModel, -) - -from core.model_runtime.errors.invoke import ( - InvokeAuthorizationError, - InvokeBadRequestError, - InvokeConnectionError, - InvokeError, - InvokeRateLimitError, - InvokeServerUnavailableError, -) - - -class _CommonTongyi: - @staticmethod - def _to_credential_kwargs(credentials: dict) -> dict: - credentials_kwargs = { - "dashscope_api_key": credentials["dashscope_api_key"], - } - - return credentials_kwargs - - @property - def _invoke_error_mapping(self) -> dict[type[InvokeError], list[type[Exception]]]: - """ - Map model invoke error to unified error - The key is the error type thrown to the caller - The value is the error type thrown by the model, - which needs to be converted into a unified error type for the caller. - - :return: Invoke error mapping - """ - return { - InvokeConnectionError: [ - RequestFailure, - ], - InvokeServerUnavailableError: [ - ServiceUnavailableError, - ], - InvokeRateLimitError: [], - InvokeAuthorizationError: [ - AuthenticationError, - ], - InvokeBadRequestError: [ - InvalidParameter, - UnsupportedModel, - UnsupportedHTTPMethod, - ], - } diff --git a/api/core/model_runtime/model_providers/tongyi/llm/__init__.py b/api/core/model_runtime/model_providers/tongyi/llm/__init__.py deleted file mode 100644 index e69de29bb2d1d6..00000000000000 diff --git a/api/core/model_runtime/model_providers/tongyi/llm/_position.yaml b/api/core/model_runtime/model_providers/tongyi/llm/_position.yaml deleted file mode 100644 index 8ce336d60cb396..00000000000000 --- a/api/core/model_runtime/model_providers/tongyi/llm/_position.yaml +++ /dev/null @@ -1,51 +0,0 @@ -- qwen-vl-max-0809 -- qwen-vl-max-0201 -- qwen-vl-max -- qwen-max-latest -- qwen-max-1201 -- qwen-max-0919 -- qwen-max-0428 -- qwen-max-0403 -- qwen-max-0107 -- qwen-max -- qwen-max-longcontext -- qwen-plus-latest -- qwen-plus-0919 -- qwen-plus-0806 -- qwen-plus-0723 -- qwen-plus-0624 -- qwen-plus-0206 -- qwen-plus-chat -- qwen-plus -- qwen-vl-plus-0809 -- qwen-vl-plus-0201 -- qwen-vl-plus -- qwen-turbo-latest -- qwen-turbo-0919 -- qwen-turbo-0624 -- qwen-turbo-0206 -- qwen-turbo-chat -- qwen-turbo -- qwen2.5-72b-instruct -- qwen2.5-32b-instruct -- qwen2.5-14b-instruct -- qwen2.5-7b-instruct -- qwen2.5-3b-instruct -- qwen2.5-1.5b-instruct -- qwen2.5-0.5b-instruct -- qwen2.5-coder-7b-instruct -- qwen2-math-72b-instruct -- qwen2-math-7b-instruct -- qwen2-math-1.5b-instruct -- qwen-long -- qwen-math-plus-latest -- qwen-math-plus-0919 -- qwen-math-plus-0816 -- qwen-math-plus -- qwen-math-turbo-latest -- qwen-math-turbo-0919 -- qwen-math-turbo -- qwen-coder-turbo-latest -- qwen-coder-turbo-0919 -- qwen-coder-turbo -- farui-plus diff --git a/api/core/model_runtime/model_providers/tongyi/llm/farui-plus.yaml b/api/core/model_runtime/model_providers/tongyi/llm/farui-plus.yaml deleted file mode 100644 index 34a57d1fc0c9a5..00000000000000 --- a/api/core/model_runtime/model_providers/tongyi/llm/farui-plus.yaml +++ /dev/null @@ -1,77 +0,0 @@ -# for more details, please refer to https://help.aliyun.com/zh/model-studio/getting-started/models -model: farui-plus -label: - en_US: farui-plus -model_type: llm -features: - - multi-tool-call - - agent-thought - - stream-tool-call -model_properties: - mode: chat - context_size: 12288 -parameter_rules: - - name: temperature - use_template: temperature - type: float - default: 0.3 - min: 0.0 - max: 2.0 - help: - zh_Hans: 用于控制随机性和多样性的程度。具体来说,temperature值控制了生成文本时对每个候选词的概率分布进行平滑的程度。较高的temperature值会降低概率分布的峰值,使得更多的低概率词被选择,生成结果更加多样化;而较低的temperature值则会增强概率分布的峰值,使得高概率词更容易被选择,生成结果更加确定。 - en_US: Used to control the degree of randomness and diversity. Specifically, the temperature value controls the degree to which the probability distribution of each candidate word is smoothed when generating text. A higher temperature value will reduce the peak value of the probability distribution, allowing more low-probability words to be selected, and the generated results will be more diverse; while a lower temperature value will enhance the peak value of the probability distribution, making it easier for high-probability words to be selected. , the generated results are more certain. - - name: max_tokens - use_template: max_tokens - type: int - default: 2000 - min: 1 - max: 2000 - help: - zh_Hans: 用于指定模型在生成内容时token的最大数量,它定义了生成的上限,但不保证每次都会生成到这个数量。 - en_US: It is used to specify the maximum number of tokens when the model generates content. It defines the upper limit of generation, but does not guarantee that this number will be generated every time. - - name: top_p - use_template: top_p - type: float - default: 0.8 - min: 0.1 - max: 0.9 - help: - zh_Hans: 生成过程中核采样方法概率阈值,例如,取值为0.8时,仅保留概率加起来大于等于0.8的最可能token的最小集合作为候选集。取值范围为(0,1.0),取值越大,生成的随机性越高;取值越低,生成的确定性越高。 - en_US: The probability threshold of the kernel sampling method during the generation process. For example, when the value is 0.8, only the smallest set of the most likely tokens with a sum of probabilities greater than or equal to 0.8 is retained as the candidate set. The value range is (0,1.0). The larger the value, the higher the randomness generated; the lower the value, the higher the certainty generated. - - name: top_k - type: int - min: 0 - max: 99 - label: - zh_Hans: 取样数量 - en_US: Top k - help: - zh_Hans: 生成时,采样候选集的大小。例如,取值为50时,仅将单次生成中得分最高的50个token组成随机采样的候选集。取值越大,生成的随机性越高;取值越小,生成的确定性越高。 - en_US: The size of the sample candidate set when generated. For example, when the value is 50, only the 50 highest-scoring tokens in a single generation form a randomly sampled candidate set. The larger the value, the higher the randomness generated; the smaller the value, the higher the certainty generated. - - name: seed - required: false - type: int - default: 1234 - label: - zh_Hans: 随机种子 - en_US: Random seed - help: - zh_Hans: 生成时使用的随机数种子,用户控制模型生成内容的随机性。支持无符号64位整数,默认值为 1234。在使用seed时,模型将尽可能生成相同或相似的结果,但目前不保证每次生成的结果完全相同。 - en_US: The random number seed used when generating, the user controls the randomness of the content generated by the model. Supports unsigned 64-bit integers, default value is 1234. When using seed, the model will try its best to generate the same or similar results, but there is currently no guarantee that the results will be exactly the same every time. - - name: repetition_penalty - required: false - type: float - default: 1.1 - label: - zh_Hans: 重复惩罚 - en_US: Repetition penalty - help: - zh_Hans: 用于控制模型生成时的重复度。提高repetition_penalty时可以降低模型生成的重复度。1.0表示不做惩罚。 - en_US: Used to control the repeatability when generating models. Increasing repetition_penalty can reduce the duplication of model generation. 1.0 means no punishment. - - name: response_format - use_template: response_format -pricing: - input: '0.02' - output: '0.02' - unit: '0.001' - currency: RMB diff --git a/api/core/model_runtime/model_providers/tongyi/llm/llm.py b/api/core/model_runtime/model_providers/tongyi/llm/llm.py deleted file mode 100644 index 3e3585b30ae33d..00000000000000 --- a/api/core/model_runtime/model_providers/tongyi/llm/llm.py +++ /dev/null @@ -1,593 +0,0 @@ -import base64 -import os -import tempfile -import uuid -from collections.abc import Generator -from http import HTTPStatus -from pathlib import Path -from typing import Optional, Union, cast - -from dashscope import Generation, MultiModalConversation, get_tokenizer -from dashscope.api_entities.dashscope_response import GenerationResponse -from dashscope.common.error import ( - AuthenticationError, - InvalidParameter, - RequestFailure, - ServiceUnavailableError, - UnsupportedHTTPMethod, - UnsupportedModel, -) - -from core.model_runtime.entities.llm_entities import LLMMode, LLMResult, LLMResultChunk, LLMResultChunkDelta -from core.model_runtime.entities.message_entities import ( - AssistantPromptMessage, - ImagePromptMessageContent, - PromptMessage, - PromptMessageContentType, - PromptMessageTool, - SystemPromptMessage, - TextPromptMessageContent, - ToolPromptMessage, - UserPromptMessage, -) -from core.model_runtime.entities.model_entities import ( - AIModelEntity, - FetchFrom, - I18nObject, - ModelFeature, - ModelPropertyKey, - ModelType, - ParameterRule, - ParameterType, -) -from core.model_runtime.errors.invoke import ( - InvokeAuthorizationError, - InvokeBadRequestError, - InvokeConnectionError, - InvokeError, - InvokeRateLimitError, - InvokeServerUnavailableError, -) -from core.model_runtime.errors.validate import CredentialsValidateFailedError -from core.model_runtime.model_providers.__base.large_language_model import LargeLanguageModel - - -class TongyiLargeLanguageModel(LargeLanguageModel): - tokenizers = {} - - def _invoke( - self, - model: str, - credentials: dict, - prompt_messages: list[PromptMessage], - model_parameters: dict, - tools: Optional[list[PromptMessageTool]] = None, - stop: Optional[list[str]] = None, - stream: bool = True, - user: Optional[str] = None, - ) -> Union[LLMResult, Generator]: - """ - Invoke large language model - - :param model: model name - :param credentials: model credentials - :param prompt_messages: prompt messages - :param model_parameters: model parameters - :param tools: tools for tool calling - :param stop: stop words - :param stream: is stream response - :param user: unique user id - :return: full response or stream response chunk generator result - """ - # invoke model without code wrapper - return self._generate(model, credentials, prompt_messages, model_parameters, tools, stop, stream, user) - - def get_num_tokens( - self, - model: str, - credentials: dict, - prompt_messages: list[PromptMessage], - tools: Optional[list[PromptMessageTool]] = None, - ) -> int: - """ - Get number of tokens for given prompt messages - - :param model: model name - :param credentials: model credentials - :param prompt_messages: prompt messages - :param tools: tools for tool calling - :return: - """ - # Check if the model was added via get_customizable_model_schema - if self.get_customizable_model_schema(model, credentials) is not None: - # For custom models, tokens are not calculated. - return 0 - - if model in {"qwen-turbo-chat", "qwen-plus-chat"}: - model = model.replace("-chat", "") - if model == "farui-plus": - model = "qwen-farui-plus" - - if model in self.tokenizers: - tokenizer = self.tokenizers[model] - else: - tokenizer = get_tokenizer(model) - self.tokenizers[model] = tokenizer - - # convert string to token ids - tokens = tokenizer.encode(self._convert_messages_to_prompt(prompt_messages)) - - return len(tokens) - - def validate_credentials(self, model: str, credentials: dict) -> None: - """ - Validate model credentials - - :param model: model name - :param credentials: model credentials - :return: - """ - try: - self._generate( - model=model, - credentials=credentials, - prompt_messages=[ - UserPromptMessage(content="ping"), - ], - model_parameters={ - "temperature": 0.5, - }, - stream=False, - ) - except Exception as ex: - raise CredentialsValidateFailedError(str(ex)) - - def _generate( - self, - model: str, - credentials: dict, - prompt_messages: list[PromptMessage], - model_parameters: dict, - tools: Optional[list[PromptMessageTool]] = None, - stop: Optional[list[str]] = None, - stream: bool = True, - user: Optional[str] = None, - ) -> Union[LLMResult, Generator]: - """ - Invoke large language model - - :param model: model name - :param credentials: credentials - :param prompt_messages: prompt messages - :param tools: tools for tool calling - :param model_parameters: model parameters - :param stop: stop words - :param stream: is stream response - :param user: unique user id - :return: full response or stream response chunk generator result - """ - # transform credentials to kwargs for model instance - credentials_kwargs = self._to_credential_kwargs(credentials) - - mode = self.get_model_mode(model, credentials) - - if model in {"qwen-turbo-chat", "qwen-plus-chat"}: - model = model.replace("-chat", "") - - extra_model_kwargs = {} - if tools: - extra_model_kwargs["tools"] = self._convert_tools(tools) - - if stop: - extra_model_kwargs["stop"] = stop - - params = { - "model": model, - **model_parameters, - **credentials_kwargs, - **extra_model_kwargs, - } - - model_schema = self.get_model_schema(model, credentials) - if ModelFeature.VISION in (model_schema.features or []): - params["messages"] = self._convert_prompt_messages_to_tongyi_messages(prompt_messages, rich_content=True) - - response = MultiModalConversation.call(**params, stream=stream) - else: - # nothing different between chat model and completion model in tongyi - params["messages"] = self._convert_prompt_messages_to_tongyi_messages(prompt_messages) - response = Generation.call(**params, result_format="message", stream=stream) - - if stream: - return self._handle_generate_stream_response(model, credentials, response, prompt_messages) - - return self._handle_generate_response(model, credentials, response, prompt_messages) - - def _handle_generate_response( - self, model: str, credentials: dict, response: GenerationResponse, prompt_messages: list[PromptMessage] - ) -> LLMResult: - """ - Handle llm response - - :param model: model name - :param credentials: credentials - :param response: response - :param prompt_messages: prompt messages - :return: llm response - """ - if response.status_code not in {200, HTTPStatus.OK}: - raise ServiceUnavailableError(response.message) - # transform assistant message to prompt message - assistant_prompt_message = AssistantPromptMessage( - content=response.output.choices[0].message.content, - ) - - # transform usage - usage = self._calc_response_usage(model, credentials, response.usage.input_tokens, response.usage.output_tokens) - - # transform response - result = LLMResult( - model=model, - message=assistant_prompt_message, - prompt_messages=prompt_messages, - usage=usage, - ) - - return result - - def _handle_generate_stream_response( - self, - model: str, - credentials: dict, - responses: Generator[GenerationResponse, None, None], - prompt_messages: list[PromptMessage], - ) -> Generator: - """ - Handle llm stream response - - :param model: model name - :param credentials: credentials - :param responses: response - :param prompt_messages: prompt messages - :return: llm response chunk generator result - """ - full_text = "" - tool_calls = [] - for index, response in enumerate(responses): - if response.status_code not in {200, HTTPStatus.OK}: - raise ServiceUnavailableError( - f"Failed to invoke model {model}, status code: {response.status_code}, " - f"message: {response.message}" - ) - - resp_finish_reason = response.output.choices[0].finish_reason - - if resp_finish_reason is not None and resp_finish_reason != "null": - resp_content = response.output.choices[0].message.content - - assistant_prompt_message = AssistantPromptMessage( - content="", - ) - - if "tool_calls" in response.output.choices[0].message: - tool_calls = response.output.choices[0].message["tool_calls"] - elif resp_content: - # special for qwen-vl - if isinstance(resp_content, list): - resp_content = resp_content[0]["text"] - - # transform assistant message to prompt message - assistant_prompt_message.content = resp_content.replace(full_text, "", 1) - - full_text = resp_content - - if tool_calls: - message_tool_calls = [] - for tool_call_obj in tool_calls: - message_tool_call = AssistantPromptMessage.ToolCall( - id=tool_call_obj["function"]["name"], - type="function", - function=AssistantPromptMessage.ToolCall.ToolCallFunction( - name=tool_call_obj["function"]["name"], arguments=tool_call_obj["function"]["arguments"] - ), - ) - message_tool_calls.append(message_tool_call) - - assistant_prompt_message.tool_calls = message_tool_calls - - # transform usage - usage = response.usage - usage = self._calc_response_usage(model, credentials, usage.input_tokens, usage.output_tokens) - - yield LLMResultChunk( - model=model, - prompt_messages=prompt_messages, - delta=LLMResultChunkDelta( - index=index, message=assistant_prompt_message, finish_reason=resp_finish_reason, usage=usage - ), - ) - else: - resp_content = response.output.choices[0].message.content - if not resp_content: - if "tool_calls" in response.output.choices[0].message: - tool_calls = response.output.choices[0].message["tool_calls"] - continue - - # special for qwen-vl - if isinstance(resp_content, list): - resp_content = resp_content[0]["text"] - - # transform assistant message to prompt message - assistant_prompt_message = AssistantPromptMessage( - content=resp_content.replace(full_text, "", 1), - ) - - full_text = resp_content - - yield LLMResultChunk( - model=model, - prompt_messages=prompt_messages, - delta=LLMResultChunkDelta(index=index, message=assistant_prompt_message), - ) - - def _to_credential_kwargs(self, credentials: dict) -> dict: - """ - Transform credentials to kwargs for model instance - - :param credentials: - :return: - """ - credentials_kwargs = { - "api_key": credentials["dashscope_api_key"], - } - - return credentials_kwargs - - def _convert_one_message_to_text(self, message: PromptMessage) -> str: - """ - Convert a single message to a string. - - :param message: PromptMessage to convert. - :return: String representation of the message. - """ - human_prompt = "\n\nHuman:" - ai_prompt = "\n\nAssistant:" - content = message.content - - if isinstance(message, UserPromptMessage): - if isinstance(content, str): - message_text = f"{human_prompt} {content}" - else: - message_text = "" - for sub_message in content: - if sub_message.type == PromptMessageContentType.TEXT: - message_text = f"{human_prompt} {sub_message.data}" - break - elif isinstance(message, AssistantPromptMessage): - message_text = f"{ai_prompt} {content}" - elif isinstance(message, SystemPromptMessage | ToolPromptMessage): - message_text = content - else: - raise ValueError(f"Got unknown type {message}") - - return message_text - - def _convert_messages_to_prompt(self, messages: list[PromptMessage]) -> str: - """ - Format a list of messages into a full prompt for the Anthropic model - - :param messages: List of PromptMessage to combine. - :return: Combined string with necessary human_prompt and ai_prompt tags. - """ - messages = messages.copy() # don't mutate the original list - - text = "".join(self._convert_one_message_to_text(message) for message in messages) - - # trim off the trailing ' ' that might come from the "Assistant: " - return text.rstrip() - - def _convert_prompt_messages_to_tongyi_messages( - self, prompt_messages: list[PromptMessage], rich_content: bool = False - ) -> list[dict]: - """ - Convert prompt messages to tongyi messages - - :param prompt_messages: prompt messages - :return: tongyi messages - """ - tongyi_messages = [] - for prompt_message in prompt_messages: - if isinstance(prompt_message, SystemPromptMessage): - tongyi_messages.append( - { - "role": "system", - "content": prompt_message.content if not rich_content else [{"text": prompt_message.content}], - } - ) - elif isinstance(prompt_message, UserPromptMessage): - if isinstance(prompt_message.content, str): - tongyi_messages.append( - { - "role": "user", - "content": prompt_message.content - if not rich_content - else [{"text": prompt_message.content}], - } - ) - else: - sub_messages = [] - for message_content in prompt_message.content: - if message_content.type == PromptMessageContentType.TEXT: - message_content = cast(TextPromptMessageContent, message_content) - sub_message_dict = {"text": message_content.data} - sub_messages.append(sub_message_dict) - elif message_content.type == PromptMessageContentType.IMAGE: - message_content = cast(ImagePromptMessageContent, message_content) - - image_url = message_content.data - if message_content.data.startswith("data:"): - # convert image base64 data to file in /tmp - image_url = self._save_base64_image_to_file(message_content.data) - - sub_message_dict = {"image": image_url} - sub_messages.append(sub_message_dict) - - # resort sub_messages to ensure text is always at last - sub_messages = sorted(sub_messages, key=lambda x: "text" in x) - - tongyi_messages.append({"role": "user", "content": sub_messages}) - elif isinstance(prompt_message, AssistantPromptMessage): - content = prompt_message.content - if not content: - content = " " - message = {"role": "assistant", "content": content if not rich_content else [{"text": content}]} - if prompt_message.tool_calls: - message["tool_calls"] = [tool_call.model_dump() for tool_call in prompt_message.tool_calls] - tongyi_messages.append(message) - elif isinstance(prompt_message, ToolPromptMessage): - tongyi_messages.append( - {"role": "tool", "content": prompt_message.content, "name": prompt_message.tool_call_id} - ) - else: - raise ValueError(f"Got unknown type {prompt_message}") - - return tongyi_messages - - def _save_base64_image_to_file(self, base64_image: str) -> str: - """ - Save base64 image to file - 'data:{upload_file.mime_type};base64,{encoded_string}' - - :param base64_image: base64 image data - :return: image file path - """ - # get mime type and encoded string - mime_type, encoded_string = base64_image.split(",")[0].split(";")[0].split(":")[1], base64_image.split(",")[1] - - # save image to file - temp_dir = tempfile.gettempdir() - - file_path = os.path.join(temp_dir, f"{uuid.uuid4()}.{mime_type.split('/')[1]}") - - Path(file_path).write_bytes(base64.b64decode(encoded_string)) - - return f"file://{file_path}" - - def _convert_tools(self, tools: list[PromptMessageTool]) -> list[dict]: - """ - Convert tools - """ - tool_definitions = [] - for tool in tools: - properties = tool.parameters["properties"] - required_properties = tool.parameters["required"] - - properties_definitions = {} - for p_key, p_val in properties.items(): - desc = p_val["description"] - if "enum" in p_val: - desc += f"; Only accepts one of the following predefined options: [{', '.join(p_val['enum'])}]" - - properties_definitions[p_key] = { - "description": desc, - "type": p_val["type"], - } - - tool_definition = { - "type": "function", - "function": { - "name": tool.name, - "description": tool.description, - "parameters": properties_definitions, - "required": required_properties, - }, - } - - tool_definitions.append(tool_definition) - - return tool_definitions - - @property - def _invoke_error_mapping(self) -> dict[type[InvokeError], list[type[Exception]]]: - """ - Map model invoke error to unified error - The key is the error type thrown to the caller - The value is the error type thrown by the model, - which needs to be converted into a unified error type for the caller. - - :return: Invoke error mapping - """ - return { - InvokeConnectionError: [ - RequestFailure, - ], - InvokeServerUnavailableError: [ - ServiceUnavailableError, - ], - InvokeRateLimitError: [], - InvokeAuthorizationError: [ - AuthenticationError, - ], - InvokeBadRequestError: [ - InvalidParameter, - UnsupportedModel, - UnsupportedHTTPMethod, - ], - } - - def get_customizable_model_schema(self, model: str, credentials: dict) -> AIModelEntity | None: - """ - Architecture for defining customizable models - - :param model: model name - :param credentials: model credentials - :return: AIModelEntity or None - """ - return AIModelEntity( - model=model, - label=I18nObject(en_US=model, zh_Hans=model), - model_type=ModelType.LLM, - features=[ModelFeature.TOOL_CALL, ModelFeature.MULTI_TOOL_CALL, ModelFeature.STREAM_TOOL_CALL] - if credentials.get("function_calling_type") == "tool_call" - else [], - fetch_from=FetchFrom.CUSTOMIZABLE_MODEL, - model_properties={ - ModelPropertyKey.CONTEXT_SIZE: int(credentials.get("context_size", 8000)), - ModelPropertyKey.MODE: LLMMode.CHAT.value, - }, - parameter_rules=[ - ParameterRule( - name="temperature", - use_template="temperature", - label=I18nObject(en_US="Temperature", zh_Hans="温度"), - type=ParameterType.FLOAT, - ), - ParameterRule( - name="max_tokens", - use_template="max_tokens", - default=512, - min=1, - max=int(credentials.get("max_tokens", 1024)), - label=I18nObject(en_US="Max Tokens", zh_Hans="最大标记"), - type=ParameterType.INT, - ), - ParameterRule( - name="top_p", - use_template="top_p", - label=I18nObject(en_US="Top P", zh_Hans="Top P"), - type=ParameterType.FLOAT, - ), - ParameterRule( - name="top_k", - use_template="top_k", - label=I18nObject(en_US="Top K", zh_Hans="Top K"), - type=ParameterType.FLOAT, - ), - ParameterRule( - name="frequency_penalty", - use_template="frequency_penalty", - label=I18nObject(en_US="Frequency Penalty", zh_Hans="重复惩罚"), - type=ParameterType.FLOAT, - ), - ], - ) diff --git a/api/core/model_runtime/model_providers/tongyi/llm/qwen-coder-turbo-0919.yaml b/api/core/model_runtime/model_providers/tongyi/llm/qwen-coder-turbo-0919.yaml deleted file mode 100644 index 64a3f331336bc0..00000000000000 --- a/api/core/model_runtime/model_providers/tongyi/llm/qwen-coder-turbo-0919.yaml +++ /dev/null @@ -1,75 +0,0 @@ -# for more details, please refer to https://help.aliyun.com/zh/model-studio/getting-started/models -model: qwen-coder-turbo-0919 -label: - en_US: qwen-coder-turbo-0919 -model_type: llm -features: - - agent-thought -model_properties: - mode: chat - context_size: 131072 -parameter_rules: - - name: temperature - use_template: temperature - type: float - default: 0.3 - min: 0.0 - max: 2.0 - help: - zh_Hans: 用于控制随机性和多样性的程度。具体来说,temperature值控制了生成文本时对每个候选词的概率分布进行平滑的程度。较高的temperature值会降低概率分布的峰值,使得更多的低概率词被选择,生成结果更加多样化;而较低的temperature值则会增强概率分布的峰值,使得高概率词更容易被选择,生成结果更加确定。 - en_US: Used to control the degree of randomness and diversity. Specifically, the temperature value controls the degree to which the probability distribution of each candidate word is smoothed when generating text. A higher temperature value will reduce the peak value of the probability distribution, allowing more low-probability words to be selected, and the generated results will be more diverse; while a lower temperature value will enhance the peak value of the probability distribution, making it easier for high-probability words to be selected. , the generated results are more certain. - - name: max_tokens - use_template: max_tokens - type: int - default: 8192 - min: 1 - max: 8192 - help: - zh_Hans: 用于指定模型在生成内容时token的最大数量,它定义了生成的上限,但不保证每次都会生成到这个数量。 - en_US: It is used to specify the maximum number of tokens when the model generates content. It defines the upper limit of generation, but does not guarantee that this number will be generated every time. - - name: top_p - use_template: top_p - type: float - default: 0.8 - min: 0.1 - max: 0.9 - help: - zh_Hans: 生成过程中核采样方法概率阈值,例如,取值为0.8时,仅保留概率加起来大于等于0.8的最可能token的最小集合作为候选集。取值范围为(0,1.0),取值越大,生成的随机性越高;取值越低,生成的确定性越高。 - en_US: The probability threshold of the kernel sampling method during the generation process. For example, when the value is 0.8, only the smallest set of the most likely tokens with a sum of probabilities greater than or equal to 0.8 is retained as the candidate set. The value range is (0,1.0). The larger the value, the higher the randomness generated; the lower the value, the higher the certainty generated. - - name: top_k - type: int - min: 0 - max: 99 - label: - zh_Hans: 取样数量 - en_US: Top k - help: - zh_Hans: 生成时,采样候选集的大小。例如,取值为50时,仅将单次生成中得分最高的50个token组成随机采样的候选集。取值越大,生成的随机性越高;取值越小,生成的确定性越高。 - en_US: The size of the sample candidate set when generated. For example, when the value is 50, only the 50 highest-scoring tokens in a single generation form a randomly sampled candidate set. The larger the value, the higher the randomness generated; the smaller the value, the higher the certainty generated. - - name: seed - required: false - type: int - default: 1234 - label: - zh_Hans: 随机种子 - en_US: Random seed - help: - zh_Hans: 生成时使用的随机数种子,用户控制模型生成内容的随机性。支持无符号64位整数,默认值为 1234。在使用seed时,模型将尽可能生成相同或相似的结果,但目前不保证每次生成的结果完全相同。 - en_US: The random number seed used when generating, the user controls the randomness of the content generated by the model. Supports unsigned 64-bit integers, default value is 1234. When using seed, the model will try its best to generate the same or similar results, but there is currently no guarantee that the results will be exactly the same every time. - - name: repetition_penalty - required: false - type: float - default: 1.1 - label: - zh_Hans: 重复惩罚 - en_US: Repetition penalty - help: - zh_Hans: 用于控制模型生成时的重复度。提高repetition_penalty时可以降低模型生成的重复度。1.0表示不做惩罚。 - en_US: Used to control the repeatability when generating models. Increasing repetition_penalty can reduce the duplication of model generation. 1.0 means no punishment. - - name: response_format - use_template: response_format -pricing: - input: '0.002' - output: '0.006' - unit: '0.001' - currency: RMB diff --git a/api/core/model_runtime/model_providers/tongyi/llm/qwen-coder-turbo-latest.yaml b/api/core/model_runtime/model_providers/tongyi/llm/qwen-coder-turbo-latest.yaml deleted file mode 100644 index a4c93f7047ff58..00000000000000 --- a/api/core/model_runtime/model_providers/tongyi/llm/qwen-coder-turbo-latest.yaml +++ /dev/null @@ -1,75 +0,0 @@ -# for more details, please refer to https://help.aliyun.com/zh/model-studio/getting-started/models -model: qwen-coder-turbo-latest -label: - en_US: qwen-coder-turbo-latest -model_type: llm -features: - - agent-thought -model_properties: - mode: chat - context_size: 131072 -parameter_rules: - - name: temperature - use_template: temperature - type: float - default: 0.3 - min: 0.0 - max: 2.0 - help: - zh_Hans: 用于控制随机性和多样性的程度。具体来说,temperature值控制了生成文本时对每个候选词的概率分布进行平滑的程度。较高的temperature值会降低概率分布的峰值,使得更多的低概率词被选择,生成结果更加多样化;而较低的temperature值则会增强概率分布的峰值,使得高概率词更容易被选择,生成结果更加确定。 - en_US: Used to control the degree of randomness and diversity. Specifically, the temperature value controls the degree to which the probability distribution of each candidate word is smoothed when generating text. A higher temperature value will reduce the peak value of the probability distribution, allowing more low-probability words to be selected, and the generated results will be more diverse; while a lower temperature value will enhance the peak value of the probability distribution, making it easier for high-probability words to be selected. , the generated results are more certain. - - name: max_tokens - use_template: max_tokens - type: int - default: 8192 - min: 1 - max: 8192 - help: - zh_Hans: 用于指定模型在生成内容时token的最大数量,它定义了生成的上限,但不保证每次都会生成到这个数量。 - en_US: It is used to specify the maximum number of tokens when the model generates content. It defines the upper limit of generation, but does not guarantee that this number will be generated every time. - - name: top_p - use_template: top_p - type: float - default: 0.8 - min: 0.1 - max: 0.9 - help: - zh_Hans: 生成过程中核采样方法概率阈值,例如,取值为0.8时,仅保留概率加起来大于等于0.8的最可能token的最小集合作为候选集。取值范围为(0,1.0),取值越大,生成的随机性越高;取值越低,生成的确定性越高。 - en_US: The probability threshold of the kernel sampling method during the generation process. For example, when the value is 0.8, only the smallest set of the most likely tokens with a sum of probabilities greater than or equal to 0.8 is retained as the candidate set. The value range is (0,1.0). The larger the value, the higher the randomness generated; the lower the value, the higher the certainty generated. - - name: top_k - type: int - min: 0 - max: 99 - label: - zh_Hans: 取样数量 - en_US: Top k - help: - zh_Hans: 生成时,采样候选集的大小。例如,取值为50时,仅将单次生成中得分最高的50个token组成随机采样的候选集。取值越大,生成的随机性越高;取值越小,生成的确定性越高。 - en_US: The size of the sample candidate set when generated. For example, when the value is 50, only the 50 highest-scoring tokens in a single generation form a randomly sampled candidate set. The larger the value, the higher the randomness generated; the smaller the value, the higher the certainty generated. - - name: seed - required: false - type: int - default: 1234 - label: - zh_Hans: 随机种子 - en_US: Random seed - help: - zh_Hans: 生成时使用的随机数种子,用户控制模型生成内容的随机性。支持无符号64位整数,默认值为 1234。在使用seed时,模型将尽可能生成相同或相似的结果,但目前不保证每次生成的结果完全相同。 - en_US: The random number seed used when generating, the user controls the randomness of the content generated by the model. Supports unsigned 64-bit integers, default value is 1234. When using seed, the model will try its best to generate the same or similar results, but there is currently no guarantee that the results will be exactly the same every time. - - name: repetition_penalty - required: false - type: float - default: 1.1 - label: - zh_Hans: 重复惩罚 - en_US: Repetition penalty - help: - zh_Hans: 用于控制模型生成时的重复度。提高repetition_penalty时可以降低模型生成的重复度。1.0表示不做惩罚。 - en_US: Used to control the repeatability when generating models. Increasing repetition_penalty can reduce the duplication of model generation. 1.0 means no punishment. - - name: response_format - use_template: response_format -pricing: - input: '0.002' - output: '0.006' - unit: '0.001' - currency: RMB diff --git a/api/core/model_runtime/model_providers/tongyi/llm/qwen-coder-turbo.yaml b/api/core/model_runtime/model_providers/tongyi/llm/qwen-coder-turbo.yaml deleted file mode 100644 index ff68faed80810b..00000000000000 --- a/api/core/model_runtime/model_providers/tongyi/llm/qwen-coder-turbo.yaml +++ /dev/null @@ -1,75 +0,0 @@ -# for more details, please refer to https://help.aliyun.com/zh/model-studio/getting-started/models -model: qwen-coder-turbo -label: - en_US: qwen-coder-turbo -model_type: llm -features: - - agent-thought -model_properties: - mode: chat - context_size: 131072 -parameter_rules: - - name: temperature - use_template: temperature - type: float - default: 0.3 - min: 0.0 - max: 2.0 - help: - zh_Hans: 用于控制随机性和多样性的程度。具体来说,temperature值控制了生成文本时对每个候选词的概率分布进行平滑的程度。较高的temperature值会降低概率分布的峰值,使得更多的低概率词被选择,生成结果更加多样化;而较低的temperature值则会增强概率分布的峰值,使得高概率词更容易被选择,生成结果更加确定。 - en_US: Used to control the degree of randomness and diversity. Specifically, the temperature value controls the degree to which the probability distribution of each candidate word is smoothed when generating text. A higher temperature value will reduce the peak value of the probability distribution, allowing more low-probability words to be selected, and the generated results will be more diverse; while a lower temperature value will enhance the peak value of the probability distribution, making it easier for high-probability words to be selected. , the generated results are more certain. - - name: max_tokens - use_template: max_tokens - type: int - default: 8192 - min: 1 - max: 8192 - help: - zh_Hans: 用于指定模型在生成内容时token的最大数量,它定义了生成的上限,但不保证每次都会生成到这个数量。 - en_US: It is used to specify the maximum number of tokens when the model generates content. It defines the upper limit of generation, but does not guarantee that this number will be generated every time. - - name: top_p - use_template: top_p - type: float - default: 0.8 - min: 0.1 - max: 0.9 - help: - zh_Hans: 生成过程中核采样方法概率阈值,例如,取值为0.8时,仅保留概率加起来大于等于0.8的最可能token的最小集合作为候选集。取值范围为(0,1.0),取值越大,生成的随机性越高;取值越低,生成的确定性越高。 - en_US: The probability threshold of the kernel sampling method during the generation process. For example, when the value is 0.8, only the smallest set of the most likely tokens with a sum of probabilities greater than or equal to 0.8 is retained as the candidate set. The value range is (0,1.0). The larger the value, the higher the randomness generated; the lower the value, the higher the certainty generated. - - name: top_k - type: int - min: 0 - max: 99 - label: - zh_Hans: 取样数量 - en_US: Top k - help: - zh_Hans: 生成时,采样候选集的大小。例如,取值为50时,仅将单次生成中得分最高的50个token组成随机采样的候选集。取值越大,生成的随机性越高;取值越小,生成的确定性越高。 - en_US: The size of the sample candidate set when generated. For example, when the value is 50, only the 50 highest-scoring tokens in a single generation form a randomly sampled candidate set. The larger the value, the higher the randomness generated; the smaller the value, the higher the certainty generated. - - name: seed - required: false - type: int - default: 1234 - label: - zh_Hans: 随机种子 - en_US: Random seed - help: - zh_Hans: 生成时使用的随机数种子,用户控制模型生成内容的随机性。支持无符号64位整数,默认值为 1234。在使用seed时,模型将尽可能生成相同或相似的结果,但目前不保证每次生成的结果完全相同。 - en_US: The random number seed used when generating, the user controls the randomness of the content generated by the model. Supports unsigned 64-bit integers, default value is 1234. When using seed, the model will try its best to generate the same or similar results, but there is currently no guarantee that the results will be exactly the same every time. - - name: repetition_penalty - required: false - type: float - default: 1.1 - label: - zh_Hans: 重复惩罚 - en_US: Repetition penalty - help: - zh_Hans: 用于控制模型生成时的重复度。提高repetition_penalty时可以降低模型生成的重复度。1.0表示不做惩罚。 - en_US: Used to control the repeatability when generating models. Increasing repetition_penalty can reduce the duplication of model generation. 1.0 means no punishment. - - name: response_format - use_template: response_format -pricing: - input: '0.002' - output: '0.006' - unit: '0.001' - currency: RMB diff --git a/api/core/model_runtime/model_providers/tongyi/llm/qwen-long.yaml b/api/core/model_runtime/model_providers/tongyi/llm/qwen-long.yaml deleted file mode 100644 index c3dbb3616fb961..00000000000000 --- a/api/core/model_runtime/model_providers/tongyi/llm/qwen-long.yaml +++ /dev/null @@ -1,77 +0,0 @@ -# for more details, please refer to https://help.aliyun.com/zh/model-studio/getting-started/models -model: qwen-long -label: - en_US: qwen-long -model_type: llm -features: - - multi-tool-call - - agent-thought - - stream-tool-call -model_properties: - mode: chat - context_size: 10000000 -parameter_rules: - - name: temperature - use_template: temperature - type: float - default: 0.3 - min: 0.0 - max: 2.0 - help: - zh_Hans: 用于控制随机性和多样性的程度。具体来说,temperature值控制了生成文本时对每个候选词的概率分布进行平滑的程度。较高的temperature值会降低概率分布的峰值,使得更多的低概率词被选择,生成结果更加多样化;而较低的temperature值则会增强概率分布的峰值,使得高概率词更容易被选择,生成结果更加确定。 - en_US: Used to control the degree of randomness and diversity. Specifically, the temperature value controls the degree to which the probability distribution of each candidate word is smoothed when generating text. A higher temperature value will reduce the peak value of the probability distribution, allowing more low-probability words to be selected, and the generated results will be more diverse; while a lower temperature value will enhance the peak value of the probability distribution, making it easier for high-probability words to be selected. , the generated results are more certain. - - name: max_tokens - use_template: max_tokens - type: int - default: 2000 - min: 1 - max: 6000 - help: - zh_Hans: 用于指定模型在生成内容时token的最大数量,它定义了生成的上限,但不保证每次都会生成到这个数量。 - en_US: It is used to specify the maximum number of tokens when the model generates content. It defines the upper limit of generation, but does not guarantee that this number will be generated every time. - - name: top_p - use_template: top_p - type: float - default: 0.8 - min: 0.1 - max: 0.9 - help: - zh_Hans: 生成过程中核采样方法概率阈值,例如,取值为0.8时,仅保留概率加起来大于等于0.8的最可能token的最小集合作为候选集。取值范围为(0,1.0),取值越大,生成的随机性越高;取值越低,生成的确定性越高。 - en_US: The probability threshold of the kernel sampling method during the generation process. For example, when the value is 0.8, only the smallest set of the most likely tokens with a sum of probabilities greater than or equal to 0.8 is retained as the candidate set. The value range is (0,1.0). The larger the value, the higher the randomness generated; the lower the value, the higher the certainty generated. - - name: top_k - type: int - min: 0 - max: 99 - label: - zh_Hans: 取样数量 - en_US: Top k - help: - zh_Hans: 生成时,采样候选集的大小。例如,取值为50时,仅将单次生成中得分最高的50个token组成随机采样的候选集。取值越大,生成的随机性越高;取值越小,生成的确定性越高。 - en_US: The size of the sample candidate set when generated. For example, when the value is 50, only the 50 highest-scoring tokens in a single generation form a randomly sampled candidate set. The larger the value, the higher the randomness generated; the smaller the value, the higher the certainty generated. - - name: seed - required: false - type: int - default: 1234 - label: - zh_Hans: 随机种子 - en_US: Random seed - help: - zh_Hans: 生成时使用的随机数种子,用户控制模型生成内容的随机性。支持无符号64位整数,默认值为 1234。在使用seed时,模型将尽可能生成相同或相似的结果,但目前不保证每次生成的结果完全相同。 - en_US: The random number seed used when generating, the user controls the randomness of the content generated by the model. Supports unsigned 64-bit integers, default value is 1234. When using seed, the model will try its best to generate the same or similar results, but there is currently no guarantee that the results will be exactly the same every time. - - name: repetition_penalty - required: false - type: float - default: 1.1 - label: - zh_Hans: 重复惩罚 - en_US: Repetition penalty - help: - zh_Hans: 用于控制模型生成时的重复度。提高repetition_penalty时可以降低模型生成的重复度。1.0表示不做惩罚。 - en_US: Used to control the repeatability when generating models. Increasing repetition_penalty can reduce the duplication of model generation. 1.0 means no punishment. - - name: response_format - use_template: response_format -pricing: - input: '0.0005' - output: '0.002' - unit: '0.001' - currency: RMB diff --git a/api/core/model_runtime/model_providers/tongyi/llm/qwen-math-plus-0816.yaml b/api/core/model_runtime/model_providers/tongyi/llm/qwen-math-plus-0816.yaml deleted file mode 100644 index 42fe1f68623bc4..00000000000000 --- a/api/core/model_runtime/model_providers/tongyi/llm/qwen-math-plus-0816.yaml +++ /dev/null @@ -1,75 +0,0 @@ -# for more details, please refer to https://help.aliyun.com/zh/model-studio/getting-started/models -model: qwen-math-plus-0816 -label: - en_US: qwen-math-plus-0816 -model_type: llm -features: - - agent-thought -model_properties: - mode: chat - context_size: 4096 -parameter_rules: - - name: temperature - use_template: temperature - type: float - default: 0.3 - min: 0.0 - max: 2.0 - help: - zh_Hans: 用于控制随机性和多样性的程度。具体来说,temperature值控制了生成文本时对每个候选词的概率分布进行平滑的程度。较高的temperature值会降低概率分布的峰值,使得更多的低概率词被选择,生成结果更加多样化;而较低的temperature值则会增强概率分布的峰值,使得高概率词更容易被选择,生成结果更加确定。 - en_US: Used to control the degree of randomness and diversity. Specifically, the temperature value controls the degree to which the probability distribution of each candidate word is smoothed when generating text. A higher temperature value will reduce the peak value of the probability distribution, allowing more low-probability words to be selected, and the generated results will be more diverse; while a lower temperature value will enhance the peak value of the probability distribution, making it easier for high-probability words to be selected. , the generated results are more certain. - - name: max_tokens - use_template: max_tokens - type: int - default: 3072 - min: 1 - max: 3072 - help: - zh_Hans: 用于指定模型在生成内容时token的最大数量,它定义了生成的上限,但不保证每次都会生成到这个数量。 - en_US: It is used to specify the maximum number of tokens when the model generates content. It defines the upper limit of generation, but does not guarantee that this number will be generated every time. - - name: top_p - use_template: top_p - type: float - default: 0.8 - min: 0.1 - max: 0.9 - help: - zh_Hans: 生成过程中核采样方法概率阈值,例如,取值为0.8时,仅保留概率加起来大于等于0.8的最可能token的最小集合作为候选集。取值范围为(0,1.0),取值越大,生成的随机性越高;取值越低,生成的确定性越高。 - en_US: The probability threshold of the kernel sampling method during the generation process. For example, when the value is 0.8, only the smallest set of the most likely tokens with a sum of probabilities greater than or equal to 0.8 is retained as the candidate set. The value range is (0,1.0). The larger the value, the higher the randomness generated; the lower the value, the higher the certainty generated. - - name: top_k - type: int - min: 0 - max: 99 - label: - zh_Hans: 取样数量 - en_US: Top k - help: - zh_Hans: 生成时,采样候选集的大小。例如,取值为50时,仅将单次生成中得分最高的50个token组成随机采样的候选集。取值越大,生成的随机性越高;取值越小,生成的确定性越高。 - en_US: The size of the sample candidate set when generated. For example, when the value is 50, only the 50 highest-scoring tokens in a single generation form a randomly sampled candidate set. The larger the value, the higher the randomness generated; the smaller the value, the higher the certainty generated. - - name: seed - required: false - type: int - default: 1234 - label: - zh_Hans: 随机种子 - en_US: Random seed - help: - zh_Hans: 生成时使用的随机数种子,用户控制模型生成内容的随机性。支持无符号64位整数,默认值为 1234。在使用seed时,模型将尽可能生成相同或相似的结果,但目前不保证每次生成的结果完全相同。 - en_US: The random number seed used when generating, the user controls the randomness of the content generated by the model. Supports unsigned 64-bit integers, default value is 1234. When using seed, the model will try its best to generate the same or similar results, but there is currently no guarantee that the results will be exactly the same every time. - - name: repetition_penalty - required: false - type: float - default: 1.1 - label: - zh_Hans: 重复惩罚 - en_US: Repetition penalty - help: - zh_Hans: 用于控制模型生成时的重复度。提高repetition_penalty时可以降低模型生成的重复度。1.0表示不做惩罚。 - en_US: Used to control the repeatability when generating models. Increasing repetition_penalty can reduce the duplication of model generation. 1.0 means no punishment. - - name: response_format - use_template: response_format -pricing: - input: '0.004' - output: '0.012' - unit: '0.001' - currency: RMB diff --git a/api/core/model_runtime/model_providers/tongyi/llm/qwen-math-plus-0919.yaml b/api/core/model_runtime/model_providers/tongyi/llm/qwen-math-plus-0919.yaml deleted file mode 100644 index 9b6567b8cda4d7..00000000000000 --- a/api/core/model_runtime/model_providers/tongyi/llm/qwen-math-plus-0919.yaml +++ /dev/null @@ -1,75 +0,0 @@ -# for more details, please refer to https://help.aliyun.com/zh/model-studio/getting-started/models -model: qwen-math-plus-0919 -label: - en_US: qwen-math-plus-0919 -model_type: llm -features: - - agent-thought -model_properties: - mode: chat - context_size: 4096 -parameter_rules: - - name: temperature - use_template: temperature - type: float - default: 0.3 - min: 0.0 - max: 2.0 - help: - zh_Hans: 用于控制随机性和多样性的程度。具体来说,temperature值控制了生成文本时对每个候选词的概率分布进行平滑的程度。较高的temperature值会降低概率分布的峰值,使得更多的低概率词被选择,生成结果更加多样化;而较低的temperature值则会增强概率分布的峰值,使得高概率词更容易被选择,生成结果更加确定。 - en_US: Used to control the degree of randomness and diversity. Specifically, the temperature value controls the degree to which the probability distribution of each candidate word is smoothed when generating text. A higher temperature value will reduce the peak value of the probability distribution, allowing more low-probability words to be selected, and the generated results will be more diverse; while a lower temperature value will enhance the peak value of the probability distribution, making it easier for high-probability words to be selected. , the generated results are more certain. - - name: max_tokens - use_template: max_tokens - type: int - default: 3072 - min: 1 - max: 3072 - help: - zh_Hans: 用于指定模型在生成内容时token的最大数量,它定义了生成的上限,但不保证每次都会生成到这个数量。 - en_US: It is used to specify the maximum number of tokens when the model generates content. It defines the upper limit of generation, but does not guarantee that this number will be generated every time. - - name: top_p - use_template: top_p - type: float - default: 0.8 - min: 0.1 - max: 0.9 - help: - zh_Hans: 生成过程中核采样方法概率阈值,例如,取值为0.8时,仅保留概率加起来大于等于0.8的最可能token的最小集合作为候选集。取值范围为(0,1.0),取值越大,生成的随机性越高;取值越低,生成的确定性越高。 - en_US: The probability threshold of the kernel sampling method during the generation process. For example, when the value is 0.8, only the smallest set of the most likely tokens with a sum of probabilities greater than or equal to 0.8 is retained as the candidate set. The value range is (0,1.0). The larger the value, the higher the randomness generated; the lower the value, the higher the certainty generated. - - name: top_k - type: int - min: 0 - max: 99 - label: - zh_Hans: 取样数量 - en_US: Top k - help: - zh_Hans: 生成时,采样候选集的大小。例如,取值为50时,仅将单次生成中得分最高的50个token组成随机采样的候选集。取值越大,生成的随机性越高;取值越小,生成的确定性越高。 - en_US: The size of the sample candidate set when generated. For example, when the value is 50, only the 50 highest-scoring tokens in a single generation form a randomly sampled candidate set. The larger the value, the higher the randomness generated; the smaller the value, the higher the certainty generated. - - name: seed - required: false - type: int - default: 1234 - label: - zh_Hans: 随机种子 - en_US: Random seed - help: - zh_Hans: 生成时使用的随机数种子,用户控制模型生成内容的随机性。支持无符号64位整数,默认值为 1234。在使用seed时,模型将尽可能生成相同或相似的结果,但目前不保证每次生成的结果完全相同。 - en_US: The random number seed used when generating, the user controls the randomness of the content generated by the model. Supports unsigned 64-bit integers, default value is 1234. When using seed, the model will try its best to generate the same or similar results, but there is currently no guarantee that the results will be exactly the same every time. - - name: repetition_penalty - required: false - type: float - default: 1.1 - label: - zh_Hans: 重复惩罚 - en_US: Repetition penalty - help: - zh_Hans: 用于控制模型生成时的重复度。提高repetition_penalty时可以降低模型生成的重复度。1.0表示不做惩罚。 - en_US: Used to control the repeatability when generating models. Increasing repetition_penalty can reduce the duplication of model generation. 1.0 means no punishment. - - name: response_format - use_template: response_format -pricing: - input: '0.004' - output: '0.012' - unit: '0.001' - currency: RMB diff --git a/api/core/model_runtime/model_providers/tongyi/llm/qwen-math-plus-latest.yaml b/api/core/model_runtime/model_providers/tongyi/llm/qwen-math-plus-latest.yaml deleted file mode 100644 index b2a2393b365fcb..00000000000000 --- a/api/core/model_runtime/model_providers/tongyi/llm/qwen-math-plus-latest.yaml +++ /dev/null @@ -1,75 +0,0 @@ -# for more details, please refer to https://help.aliyun.com/zh/model-studio/getting-started/models -model: qwen-math-plus-latest -label: - en_US: qwen-math-plus-latest -model_type: llm -features: - - agent-thought -model_properties: - mode: chat - context_size: 4096 -parameter_rules: - - name: temperature - use_template: temperature - type: float - default: 0.3 - min: 0.0 - max: 2.0 - help: - zh_Hans: 用于控制随机性和多样性的程度。具体来说,temperature值控制了生成文本时对每个候选词的概率分布进行平滑的程度。较高的temperature值会降低概率分布的峰值,使得更多的低概率词被选择,生成结果更加多样化;而较低的temperature值则会增强概率分布的峰值,使得高概率词更容易被选择,生成结果更加确定。 - en_US: Used to control the degree of randomness and diversity. Specifically, the temperature value controls the degree to which the probability distribution of each candidate word is smoothed when generating text. A higher temperature value will reduce the peak value of the probability distribution, allowing more low-probability words to be selected, and the generated results will be more diverse; while a lower temperature value will enhance the peak value of the probability distribution, making it easier for high-probability words to be selected. , the generated results are more certain. - - name: max_tokens - use_template: max_tokens - type: int - default: 3072 - min: 1 - max: 3072 - help: - zh_Hans: 用于指定模型在生成内容时token的最大数量,它定义了生成的上限,但不保证每次都会生成到这个数量。 - en_US: It is used to specify the maximum number of tokens when the model generates content. It defines the upper limit of generation, but does not guarantee that this number will be generated every time. - - name: top_p - use_template: top_p - type: float - default: 0.8 - min: 0.1 - max: 0.9 - help: - zh_Hans: 生成过程中核采样方法概率阈值,例如,取值为0.8时,仅保留概率加起来大于等于0.8的最可能token的最小集合作为候选集。取值范围为(0,1.0),取值越大,生成的随机性越高;取值越低,生成的确定性越高。 - en_US: The probability threshold of the kernel sampling method during the generation process. For example, when the value is 0.8, only the smallest set of the most likely tokens with a sum of probabilities greater than or equal to 0.8 is retained as the candidate set. The value range is (0,1.0). The larger the value, the higher the randomness generated; the lower the value, the higher the certainty generated. - - name: top_k - type: int - min: 0 - max: 99 - label: - zh_Hans: 取样数量 - en_US: Top k - help: - zh_Hans: 生成时,采样候选集的大小。例如,取值为50时,仅将单次生成中得分最高的50个token组成随机采样的候选集。取值越大,生成的随机性越高;取值越小,生成的确定性越高。 - en_US: The size of the sample candidate set when generated. For example, when the value is 50, only the 50 highest-scoring tokens in a single generation form a randomly sampled candidate set. The larger the value, the higher the randomness generated; the smaller the value, the higher the certainty generated. - - name: seed - required: false - type: int - default: 1234 - label: - zh_Hans: 随机种子 - en_US: Random seed - help: - zh_Hans: 生成时使用的随机数种子,用户控制模型生成内容的随机性。支持无符号64位整数,默认值为 1234。在使用seed时,模型将尽可能生成相同或相似的结果,但目前不保证每次生成的结果完全相同。 - en_US: The random number seed used when generating, the user controls the randomness of the content generated by the model. Supports unsigned 64-bit integers, default value is 1234. When using seed, the model will try its best to generate the same or similar results, but there is currently no guarantee that the results will be exactly the same every time. - - name: repetition_penalty - required: false - type: float - default: 1.1 - label: - zh_Hans: 重复惩罚 - en_US: Repetition penalty - help: - zh_Hans: 用于控制模型生成时的重复度。提高repetition_penalty时可以降低模型生成的重复度。1.0表示不做惩罚。 - en_US: Used to control the repeatability when generating models. Increasing repetition_penalty can reduce the duplication of model generation. 1.0 means no punishment. - - name: response_format - use_template: response_format -pricing: - input: '0.004' - output: '0.012' - unit: '0.001' - currency: RMB diff --git a/api/core/model_runtime/model_providers/tongyi/llm/qwen-math-plus.yaml b/api/core/model_runtime/model_providers/tongyi/llm/qwen-math-plus.yaml deleted file mode 100644 index 63f4b7ff0a0879..00000000000000 --- a/api/core/model_runtime/model_providers/tongyi/llm/qwen-math-plus.yaml +++ /dev/null @@ -1,75 +0,0 @@ -# for more details, please refer to https://help.aliyun.com/zh/model-studio/getting-started/models -model: qwen-math-plus -label: - en_US: qwen-math-plus -model_type: llm -features: - - agent-thought -model_properties: - mode: chat - context_size: 4096 -parameter_rules: - - name: temperature - use_template: temperature - type: float - default: 0.3 - min: 0.0 - max: 2.0 - help: - zh_Hans: 用于控制随机性和多样性的程度。具体来说,temperature值控制了生成文本时对每个候选词的概率分布进行平滑的程度。较高的temperature值会降低概率分布的峰值,使得更多的低概率词被选择,生成结果更加多样化;而较低的temperature值则会增强概率分布的峰值,使得高概率词更容易被选择,生成结果更加确定。 - en_US: Used to control the degree of randomness and diversity. Specifically, the temperature value controls the degree to which the probability distribution of each candidate word is smoothed when generating text. A higher temperature value will reduce the peak value of the probability distribution, allowing more low-probability words to be selected, and the generated results will be more diverse; while a lower temperature value will enhance the peak value of the probability distribution, making it easier for high-probability words to be selected. , the generated results are more certain. - - name: max_tokens - use_template: max_tokens - type: int - default: 3072 - min: 1 - max: 3072 - help: - zh_Hans: 用于指定模型在生成内容时token的最大数量,它定义了生成的上限,但不保证每次都会生成到这个数量。 - en_US: It is used to specify the maximum number of tokens when the model generates content. It defines the upper limit of generation, but does not guarantee that this number will be generated every time. - - name: top_p - use_template: top_p - type: float - default: 0.8 - min: 0.1 - max: 0.9 - help: - zh_Hans: 生成过程中核采样方法概率阈值,例如,取值为0.8时,仅保留概率加起来大于等于0.8的最可能token的最小集合作为候选集。取值范围为(0,1.0),取值越大,生成的随机性越高;取值越低,生成的确定性越高。 - en_US: The probability threshold of the kernel sampling method during the generation process. For example, when the value is 0.8, only the smallest set of the most likely tokens with a sum of probabilities greater than or equal to 0.8 is retained as the candidate set. The value range is (0,1.0). The larger the value, the higher the randomness generated; the lower the value, the higher the certainty generated. - - name: top_k - type: int - min: 0 - max: 99 - label: - zh_Hans: 取样数量 - en_US: Top k - help: - zh_Hans: 生成时,采样候选集的大小。例如,取值为50时,仅将单次生成中得分最高的50个token组成随机采样的候选集。取值越大,生成的随机性越高;取值越小,生成的确定性越高。 - en_US: The size of the sample candidate set when generated. For example, when the value is 50, only the 50 highest-scoring tokens in a single generation form a randomly sampled candidate set. The larger the value, the higher the randomness generated; the smaller the value, the higher the certainty generated. - - name: seed - required: false - type: int - default: 1234 - label: - zh_Hans: 随机种子 - en_US: Random seed - help: - zh_Hans: 生成时使用的随机数种子,用户控制模型生成内容的随机性。支持无符号64位整数,默认值为 1234。在使用seed时,模型将尽可能生成相同或相似的结果,但目前不保证每次生成的结果完全相同。 - en_US: The random number seed used when generating, the user controls the randomness of the content generated by the model. Supports unsigned 64-bit integers, default value is 1234. When using seed, the model will try its best to generate the same or similar results, but there is currently no guarantee that the results will be exactly the same every time. - - name: repetition_penalty - required: false - type: float - default: 1.1 - label: - zh_Hans: 重复惩罚 - en_US: Repetition penalty - help: - zh_Hans: 用于控制模型生成时的重复度。提高repetition_penalty时可以降低模型生成的重复度。1.0表示不做惩罚。 - en_US: Used to control the repeatability when generating models. Increasing repetition_penalty can reduce the duplication of model generation. 1.0 means no punishment. - - name: response_format - use_template: response_format -pricing: - input: '0.004' - output: '0.012' - unit: '0.001' - currency: RMB diff --git a/api/core/model_runtime/model_providers/tongyi/llm/qwen-math-turbo-0919.yaml b/api/core/model_runtime/model_providers/tongyi/llm/qwen-math-turbo-0919.yaml deleted file mode 100644 index 4da90eec3eddfd..00000000000000 --- a/api/core/model_runtime/model_providers/tongyi/llm/qwen-math-turbo-0919.yaml +++ /dev/null @@ -1,75 +0,0 @@ -# for more details, please refer to https://help.aliyun.com/zh/model-studio/getting-started/models -model: qwen-math-turbo-0919 -label: - en_US: qwen-math-turbo-0919 -model_type: llm -features: - - agent-thought -model_properties: - mode: chat - context_size: 4096 -parameter_rules: - - name: temperature - use_template: temperature - type: float - default: 0.3 - min: 0.0 - max: 2.0 - help: - zh_Hans: 用于控制随机性和多样性的程度。具体来说,temperature值控制了生成文本时对每个候选词的概率分布进行平滑的程度。较高的temperature值会降低概率分布的峰值,使得更多的低概率词被选择,生成结果更加多样化;而较低的temperature值则会增强概率分布的峰值,使得高概率词更容易被选择,生成结果更加确定。 - en_US: Used to control the degree of randomness and diversity. Specifically, the temperature value controls the degree to which the probability distribution of each candidate word is smoothed when generating text. A higher temperature value will reduce the peak value of the probability distribution, allowing more low-probability words to be selected, and the generated results will be more diverse; while a lower temperature value will enhance the peak value of the probability distribution, making it easier for high-probability words to be selected. , the generated results are more certain. - - name: max_tokens - use_template: max_tokens - type: int - default: 3072 - min: 1 - max: 3072 - help: - zh_Hans: 用于指定模型在生成内容时token的最大数量,它定义了生成的上限,但不保证每次都会生成到这个数量。 - en_US: It is used to specify the maximum number of tokens when the model generates content. It defines the upper limit of generation, but does not guarantee that this number will be generated every time. - - name: top_p - use_template: top_p - type: float - default: 0.8 - min: 0.1 - max: 0.9 - help: - zh_Hans: 生成过程中核采样方法概率阈值,例如,取值为0.8时,仅保留概率加起来大于等于0.8的最可能token的最小集合作为候选集。取值范围为(0,1.0),取值越大,生成的随机性越高;取值越低,生成的确定性越高。 - en_US: The probability threshold of the kernel sampling method during the generation process. For example, when the value is 0.8, only the smallest set of the most likely tokens with a sum of probabilities greater than or equal to 0.8 is retained as the candidate set. The value range is (0,1.0). The larger the value, the higher the randomness generated; the lower the value, the higher the certainty generated. - - name: top_k - type: int - min: 0 - max: 99 - label: - zh_Hans: 取样数量 - en_US: Top k - help: - zh_Hans: 生成时,采样候选集的大小。例如,取值为50时,仅将单次生成中得分最高的50个token组成随机采样的候选集。取值越大,生成的随机性越高;取值越小,生成的确定性越高。 - en_US: The size of the sample candidate set when generated. For example, when the value is 50, only the 50 highest-scoring tokens in a single generation form a randomly sampled candidate set. The larger the value, the higher the randomness generated; the smaller the value, the higher the certainty generated. - - name: seed - required: false - type: int - default: 1234 - label: - zh_Hans: 随机种子 - en_US: Random seed - help: - zh_Hans: 生成时使用的随机数种子,用户控制模型生成内容的随机性。支持无符号64位整数,默认值为 1234。在使用seed时,模型将尽可能生成相同或相似的结果,但目前不保证每次生成的结果完全相同。 - en_US: The random number seed used when generating, the user controls the randomness of the content generated by the model. Supports unsigned 64-bit integers, default value is 1234. When using seed, the model will try its best to generate the same or similar results, but there is currently no guarantee that the results will be exactly the same every time. - - name: repetition_penalty - required: false - type: float - default: 1.1 - label: - zh_Hans: 重复惩罚 - en_US: Repetition penalty - help: - zh_Hans: 用于控制模型生成时的重复度。提高repetition_penalty时可以降低模型生成的重复度。1.0表示不做惩罚。 - en_US: Used to control the repeatability when generating models. Increasing repetition_penalty can reduce the duplication of model generation. 1.0 means no punishment. - - name: response_format - use_template: response_format -pricing: - input: '0.002' - output: '0.006' - unit: '0.001' - currency: RMB diff --git a/api/core/model_runtime/model_providers/tongyi/llm/qwen-math-turbo-latest.yaml b/api/core/model_runtime/model_providers/tongyi/llm/qwen-math-turbo-latest.yaml deleted file mode 100644 index d29f8851dd3909..00000000000000 --- a/api/core/model_runtime/model_providers/tongyi/llm/qwen-math-turbo-latest.yaml +++ /dev/null @@ -1,75 +0,0 @@ -# for more details, please refer to https://help.aliyun.com/zh/model-studio/getting-started/models -model: qwen-math-turbo-latest -label: - en_US: qwen-math-turbo-latest -model_type: llm -features: - - agent-thought -model_properties: - mode: chat - context_size: 4096 -parameter_rules: - - name: temperature - use_template: temperature - type: float - default: 0.3 - min: 0.0 - max: 2.0 - help: - zh_Hans: 用于控制随机性和多样性的程度。具体来说,temperature值控制了生成文本时对每个候选词的概率分布进行平滑的程度。较高的temperature值会降低概率分布的峰值,使得更多的低概率词被选择,生成结果更加多样化;而较低的temperature值则会增强概率分布的峰值,使得高概率词更容易被选择,生成结果更加确定。 - en_US: Used to control the degree of randomness and diversity. Specifically, the temperature value controls the degree to which the probability distribution of each candidate word is smoothed when generating text. A higher temperature value will reduce the peak value of the probability distribution, allowing more low-probability words to be selected, and the generated results will be more diverse; while a lower temperature value will enhance the peak value of the probability distribution, making it easier for high-probability words to be selected. , the generated results are more certain. - - name: max_tokens - use_template: max_tokens - type: int - default: 3072 - min: 1 - max: 3072 - help: - zh_Hans: 用于指定模型在生成内容时token的最大数量,它定义了生成的上限,但不保证每次都会生成到这个数量。 - en_US: It is used to specify the maximum number of tokens when the model generates content. It defines the upper limit of generation, but does not guarantee that this number will be generated every time. - - name: top_p - use_template: top_p - type: float - default: 0.8 - min: 0.1 - max: 0.9 - help: - zh_Hans: 生成过程中核采样方法概率阈值,例如,取值为0.8时,仅保留概率加起来大于等于0.8的最可能token的最小集合作为候选集。取值范围为(0,1.0),取值越大,生成的随机性越高;取值越低,生成的确定性越高。 - en_US: The probability threshold of the kernel sampling method during the generation process. For example, when the value is 0.8, only the smallest set of the most likely tokens with a sum of probabilities greater than or equal to 0.8 is retained as the candidate set. The value range is (0,1.0). The larger the value, the higher the randomness generated; the lower the value, the higher the certainty generated. - - name: top_k - type: int - min: 0 - max: 99 - label: - zh_Hans: 取样数量 - en_US: Top k - help: - zh_Hans: 生成时,采样候选集的大小。例如,取值为50时,仅将单次生成中得分最高的50个token组成随机采样的候选集。取值越大,生成的随机性越高;取值越小,生成的确定性越高。 - en_US: The size of the sample candidate set when generated. For example, when the value is 50, only the 50 highest-scoring tokens in a single generation form a randomly sampled candidate set. The larger the value, the higher the randomness generated; the smaller the value, the higher the certainty generated. - - name: seed - required: false - type: int - default: 1234 - label: - zh_Hans: 随机种子 - en_US: Random seed - help: - zh_Hans: 生成时使用的随机数种子,用户控制模型生成内容的随机性。支持无符号64位整数,默认值为 1234。在使用seed时,模型将尽可能生成相同或相似的结果,但目前不保证每次生成的结果完全相同。 - en_US: The random number seed used when generating, the user controls the randomness of the content generated by the model. Supports unsigned 64-bit integers, default value is 1234. When using seed, the model will try its best to generate the same or similar results, but there is currently no guarantee that the results will be exactly the same every time. - - name: repetition_penalty - required: false - type: float - default: 1.1 - label: - zh_Hans: 重复惩罚 - en_US: Repetition penalty - help: - zh_Hans: 用于控制模型生成时的重复度。提高repetition_penalty时可以降低模型生成的重复度。1.0表示不做惩罚。 - en_US: Used to control the repeatability when generating models. Increasing repetition_penalty can reduce the duplication of model generation. 1.0 means no punishment. - - name: response_format - use_template: response_format -pricing: - input: '0.002' - output: '0.006' - unit: '0.001' - currency: RMB diff --git a/api/core/model_runtime/model_providers/tongyi/llm/qwen-math-turbo.yaml b/api/core/model_runtime/model_providers/tongyi/llm/qwen-math-turbo.yaml deleted file mode 100644 index 2a8f7f725e9366..00000000000000 --- a/api/core/model_runtime/model_providers/tongyi/llm/qwen-math-turbo.yaml +++ /dev/null @@ -1,75 +0,0 @@ -# for more details, please refer to https://help.aliyun.com/zh/model-studio/getting-started/models -model: qwen-math-turbo -label: - en_US: qwen-math-turbo -model_type: llm -features: - - agent-thought -model_properties: - mode: chat - context_size: 4096 -parameter_rules: - - name: temperature - use_template: temperature - type: float - default: 0.3 - min: 0.0 - max: 2.0 - help: - zh_Hans: 用于控制随机性和多样性的程度。具体来说,temperature值控制了生成文本时对每个候选词的概率分布进行平滑的程度。较高的temperature值会降低概率分布的峰值,使得更多的低概率词被选择,生成结果更加多样化;而较低的temperature值则会增强概率分布的峰值,使得高概率词更容易被选择,生成结果更加确定。 - en_US: Used to control the degree of randomness and diversity. Specifically, the temperature value controls the degree to which the probability distribution of each candidate word is smoothed when generating text. A higher temperature value will reduce the peak value of the probability distribution, allowing more low-probability words to be selected, and the generated results will be more diverse; while a lower temperature value will enhance the peak value of the probability distribution, making it easier for high-probability words to be selected. , the generated results are more certain. - - name: max_tokens - use_template: max_tokens - type: int - default: 3072 - min: 1 - max: 3072 - help: - zh_Hans: 用于指定模型在生成内容时token的最大数量,它定义了生成的上限,但不保证每次都会生成到这个数量。 - en_US: It is used to specify the maximum number of tokens when the model generates content. It defines the upper limit of generation, but does not guarantee that this number will be generated every time. - - name: top_p - use_template: top_p - type: float - default: 0.8 - min: 0.1 - max: 0.9 - help: - zh_Hans: 生成过程中核采样方法概率阈值,例如,取值为0.8时,仅保留概率加起来大于等于0.8的最可能token的最小集合作为候选集。取值范围为(0,1.0),取值越大,生成的随机性越高;取值越低,生成的确定性越高。 - en_US: The probability threshold of the kernel sampling method during the generation process. For example, when the value is 0.8, only the smallest set of the most likely tokens with a sum of probabilities greater than or equal to 0.8 is retained as the candidate set. The value range is (0,1.0). The larger the value, the higher the randomness generated; the lower the value, the higher the certainty generated. - - name: top_k - type: int - min: 0 - max: 99 - label: - zh_Hans: 取样数量 - en_US: Top k - help: - zh_Hans: 生成时,采样候选集的大小。例如,取值为50时,仅将单次生成中得分最高的50个token组成随机采样的候选集。取值越大,生成的随机性越高;取值越小,生成的确定性越高。 - en_US: The size of the sample candidate set when generated. For example, when the value is 50, only the 50 highest-scoring tokens in a single generation form a randomly sampled candidate set. The larger the value, the higher the randomness generated; the smaller the value, the higher the certainty generated. - - name: seed - required: false - type: int - default: 1234 - label: - zh_Hans: 随机种子 - en_US: Random seed - help: - zh_Hans: 生成时使用的随机数种子,用户控制模型生成内容的随机性。支持无符号64位整数,默认值为 1234。在使用seed时,模型将尽可能生成相同或相似的结果,但目前不保证每次生成的结果完全相同。 - en_US: The random number seed used when generating, the user controls the randomness of the content generated by the model. Supports unsigned 64-bit integers, default value is 1234. When using seed, the model will try its best to generate the same or similar results, but there is currently no guarantee that the results will be exactly the same every time. - - name: repetition_penalty - required: false - type: float - default: 1.1 - label: - zh_Hans: 重复惩罚 - en_US: Repetition penalty - help: - zh_Hans: 用于控制模型生成时的重复度。提高repetition_penalty时可以降低模型生成的重复度。1.0表示不做惩罚。 - en_US: Used to control the repeatability when generating models. Increasing repetition_penalty can reduce the duplication of model generation. 1.0 means no punishment. - - name: response_format - use_template: response_format -pricing: - input: '0.002' - output: '0.006' - unit: '0.001' - currency: RMB diff --git a/api/core/model_runtime/model_providers/tongyi/llm/qwen-max-0107.yaml b/api/core/model_runtime/model_providers/tongyi/llm/qwen-max-0107.yaml deleted file mode 100644 index ef1841b5173bc5..00000000000000 --- a/api/core/model_runtime/model_providers/tongyi/llm/qwen-max-0107.yaml +++ /dev/null @@ -1,78 +0,0 @@ -# this model corresponds to qwen-max, for more details -# please refer to (https://help.aliyun.com/zh/model-studio/getting-started/models#cf6cc4aa2aokf) -model: qwen-max-0107 -label: - en_US: qwen-max-0107 -model_type: llm -features: - - multi-tool-call - - agent-thought - - stream-tool-call -model_properties: - mode: chat - context_size: 8000 -parameter_rules: - - name: temperature - use_template: temperature - type: float - default: 0.3 - min: 0.0 - max: 2.0 - help: - zh_Hans: 用于控制随机性和多样性的程度。具体来说,temperature值控制了生成文本时对每个候选词的概率分布进行平滑的程度。较高的temperature值会降低概率分布的峰值,使得更多的低概率词被选择,生成结果更加多样化;而较低的temperature值则会增强概率分布的峰值,使得高概率词更容易被选择,生成结果更加确定。 - en_US: Used to control the degree of randomness and diversity. Specifically, the temperature value controls the degree to which the probability distribution of each candidate word is smoothed when generating text. A higher temperature value will reduce the peak value of the probability distribution, allowing more low-probability words to be selected, and the generated results will be more diverse; while a lower temperature value will enhance the peak value of the probability distribution, making it easier for high-probability words to be selected. , the generated results are more certain. - - name: max_tokens - use_template: max_tokens - type: int - default: 2000 - min: 1 - max: 2000 - help: - zh_Hans: 用于指定模型在生成内容时token的最大数量,它定义了生成的上限,但不保证每次都会生成到这个数量。 - en_US: It is used to specify the maximum number of tokens when the model generates content. It defines the upper limit of generation, but does not guarantee that this number will be generated every time. - - name: top_p - use_template: top_p - type: float - default: 0.8 - min: 0.1 - max: 0.9 - help: - zh_Hans: 生成过程中核采样方法概率阈值,例如,取值为0.8时,仅保留概率加起来大于等于0.8的最可能token的最小集合作为候选集。取值范围为(0,1.0),取值越大,生成的随机性越高;取值越低,生成的确定性越高。 - en_US: The probability threshold of the kernel sampling method during the generation process. For example, when the value is 0.8, only the smallest set of the most likely tokens with a sum of probabilities greater than or equal to 0.8 is retained as the candidate set. The value range is (0,1.0). The larger the value, the higher the randomness generated; the lower the value, the higher the certainty generated. - - name: top_k - type: int - min: 0 - max: 99 - label: - zh_Hans: 取样数量 - en_US: Top k - help: - zh_Hans: 生成时,采样候选集的大小。例如,取值为50时,仅将单次生成中得分最高的50个token组成随机采样的候选集。取值越大,生成的随机性越高;取值越小,生成的确定性越高。 - en_US: The size of the sample candidate set when generated. For example, when the value is 50, only the 50 highest-scoring tokens in a single generation form a randomly sampled candidate set. The larger the value, the higher the randomness generated; the smaller the value, the higher the certainty generated. - - name: seed - required: false - type: int - default: 1234 - label: - zh_Hans: 随机种子 - en_US: Random seed - help: - zh_Hans: 生成时使用的随机数种子,用户控制模型生成内容的随机性。支持无符号64位整数,默认值为 1234。在使用seed时,模型将尽可能生成相同或相似的结果,但目前不保证每次生成的结果完全相同。 - en_US: The random number seed used when generating, the user controls the randomness of the content generated by the model. Supports unsigned 64-bit integers, default value is 1234. When using seed, the model will try its best to generate the same or similar results, but there is currently no guarantee that the results will be exactly the same every time. - - name: repetition_penalty - required: false - type: float - default: 1.1 - label: - zh_Hans: 重复惩罚 - en_US: Repetition penalty - help: - zh_Hans: 用于控制模型生成时的重复度。提高repetition_penalty时可以降低模型生成的重复度。1.0表示不做惩罚。 - en_US: Used to control the repeatability when generating models. Increasing repetition_penalty can reduce the duplication of model generation. 1.0 means no punishment. - - name: response_format - use_template: response_format -pricing: - input: '0.04' - output: '0.12' - unit: '0.001' - currency: RMB diff --git a/api/core/model_runtime/model_providers/tongyi/llm/qwen-max-0403.yaml b/api/core/model_runtime/model_providers/tongyi/llm/qwen-max-0403.yaml deleted file mode 100644 index a2ea5df130f379..00000000000000 --- a/api/core/model_runtime/model_providers/tongyi/llm/qwen-max-0403.yaml +++ /dev/null @@ -1,78 +0,0 @@ -# this model corresponds to qwen-max-0403, for more details -# please refer to (https://help.aliyun.com/zh/model-studio/getting-started/models#cf6cc4aa2aokf) -model: qwen-max-0403 -label: - en_US: qwen-max-0403 -model_type: llm -features: - - multi-tool-call - - agent-thought - - stream-tool-call -model_properties: - mode: chat - context_size: 8000 -parameter_rules: - - name: temperature - use_template: temperature - type: float - default: 0.3 - min: 0.0 - max: 2.0 - help: - zh_Hans: 用于控制随机性和多样性的程度。具体来说,temperature值控制了生成文本时对每个候选词的概率分布进行平滑的程度。较高的temperature值会降低概率分布的峰值,使得更多的低概率词被选择,生成结果更加多样化;而较低的temperature值则会增强概率分布的峰值,使得高概率词更容易被选择,生成结果更加确定。 - en_US: Used to control the degree of randomness and diversity. Specifically, the temperature value controls the degree to which the probability distribution of each candidate word is smoothed when generating text. A higher temperature value will reduce the peak value of the probability distribution, allowing more low-probability words to be selected, and the generated results will be more diverse; while a lower temperature value will enhance the peak value of the probability distribution, making it easier for high-probability words to be selected. , the generated results are more certain. - - name: max_tokens - use_template: max_tokens - type: int - default: 2000 - min: 1 - max: 2000 - help: - zh_Hans: 用于指定模型在生成内容时token的最大数量,它定义了生成的上限,但不保证每次都会生成到这个数量。 - en_US: It is used to specify the maximum number of tokens when the model generates content. It defines the upper limit of generation, but does not guarantee that this number will be generated every time. - - name: top_p - use_template: top_p - type: float - default: 0.8 - min: 0.1 - max: 0.9 - help: - zh_Hans: 生成过程中核采样方法概率阈值,例如,取值为0.8时,仅保留概率加起来大于等于0.8的最可能token的最小集合作为候选集。取值范围为(0,1.0),取值越大,生成的随机性越高;取值越低,生成的确定性越高。 - en_US: The probability threshold of the kernel sampling method during the generation process. For example, when the value is 0.8, only the smallest set of the most likely tokens with a sum of probabilities greater than or equal to 0.8 is retained as the candidate set. The value range is (0,1.0). The larger the value, the higher the randomness generated; the lower the value, the higher the certainty generated. - - name: top_k - type: int - min: 0 - max: 99 - label: - zh_Hans: 取样数量 - en_US: Top k - help: - zh_Hans: 生成时,采样候选集的大小。例如,取值为50时,仅将单次生成中得分最高的50个token组成随机采样的候选集。取值越大,生成的随机性越高;取值越小,生成的确定性越高。 - en_US: The size of the sample candidate set when generated. For example, when the value is 50, only the 50 highest-scoring tokens in a single generation form a randomly sampled candidate set. The larger the value, the higher the randomness generated; the smaller the value, the higher the certainty generated. - - name: seed - required: false - type: int - default: 1234 - label: - zh_Hans: 随机种子 - en_US: Random seed - help: - zh_Hans: 生成时使用的随机数种子,用户控制模型生成内容的随机性。支持无符号64位整数,默认值为 1234。在使用seed时,模型将尽可能生成相同或相似的结果,但目前不保证每次生成的结果完全相同。 - en_US: The random number seed used when generating, the user controls the randomness of the content generated by the model. Supports unsigned 64-bit integers, default value is 1234. When using seed, the model will try its best to generate the same or similar results, but there is currently no guarantee that the results will be exactly the same every time. - - name: repetition_penalty - required: false - type: float - default: 1.1 - label: - zh_Hans: 重复惩罚 - en_US: Repetition penalty - help: - zh_Hans: 用于控制模型生成时的重复度。提高repetition_penalty时可以降低模型生成的重复度。1.0表示不做惩罚。 - en_US: Used to control the repeatability when generating models. Increasing repetition_penalty can reduce the duplication of model generation. 1.0 means no punishment. - - name: response_format - use_template: response_format -pricing: - input: '0.04' - output: '0.12' - unit: '0.001' - currency: RMB diff --git a/api/core/model_runtime/model_providers/tongyi/llm/qwen-max-0428.yaml b/api/core/model_runtime/model_providers/tongyi/llm/qwen-max-0428.yaml deleted file mode 100644 index a467665f118a68..00000000000000 --- a/api/core/model_runtime/model_providers/tongyi/llm/qwen-max-0428.yaml +++ /dev/null @@ -1,78 +0,0 @@ -# this model corresponds to qwen-max-0428, for more details -# please refer to (https://help.aliyun.com/zh/model-studio/getting-started/models#cf6cc4aa2aokf) -model: qwen-max-0428 -label: - en_US: qwen-max-0428 -model_type: llm -features: - - multi-tool-call - - agent-thought - - stream-tool-call -model_properties: - mode: chat - context_size: 8000 -parameter_rules: - - name: temperature - use_template: temperature - type: float - default: 0.3 - min: 0.0 - max: 2.0 - help: - zh_Hans: 用于控制随机性和多样性的程度。具体来说,temperature值控制了生成文本时对每个候选词的概率分布进行平滑的程度。较高的temperature值会降低概率分布的峰值,使得更多的低概率词被选择,生成结果更加多样化;而较低的temperature值则会增强概率分布的峰值,使得高概率词更容易被选择,生成结果更加确定。 - en_US: Used to control the degree of randomness and diversity. Specifically, the temperature value controls the degree to which the probability distribution of each candidate word is smoothed when generating text. A higher temperature value will reduce the peak value of the probability distribution, allowing more low-probability words to be selected, and the generated results will be more diverse; while a lower temperature value will enhance the peak value of the probability distribution, making it easier for high-probability words to be selected. , the generated results are more certain. - - name: max_tokens - use_template: max_tokens - type: int - default: 2000 - min: 1 - max: 2000 - help: - zh_Hans: 用于指定模型在生成内容时token的最大数量,它定义了生成的上限,但不保证每次都会生成到这个数量。 - en_US: It is used to specify the maximum number of tokens when the model generates content. It defines the upper limit of generation, but does not guarantee that this number will be generated every time. - - name: top_p - use_template: top_p - type: float - default: 0.8 - min: 0.1 - max: 0.9 - help: - zh_Hans: 生成过程中核采样方法概率阈值,例如,取值为0.8时,仅保留概率加起来大于等于0.8的最可能token的最小集合作为候选集。取值范围为(0,1.0),取值越大,生成的随机性越高;取值越低,生成的确定性越高。 - en_US: The probability threshold of the kernel sampling method during the generation process. For example, when the value is 0.8, only the smallest set of the most likely tokens with a sum of probabilities greater than or equal to 0.8 is retained as the candidate set. The value range is (0,1.0). The larger the value, the higher the randomness generated; the lower the value, the higher the certainty generated. - - name: top_k - type: int - min: 0 - max: 99 - label: - zh_Hans: 取样数量 - en_US: Top k - help: - zh_Hans: 生成时,采样候选集的大小。例如,取值为50时,仅将单次生成中得分最高的50个token组成随机采样的候选集。取值越大,生成的随机性越高;取值越小,生成的确定性越高。 - en_US: The size of the sample candidate set when generated. For example, when the value is 50, only the 50 highest-scoring tokens in a single generation form a randomly sampled candidate set. The larger the value, the higher the randomness generated; the smaller the value, the higher the certainty generated. - - name: seed - required: false - type: int - default: 1234 - label: - zh_Hans: 随机种子 - en_US: Random seed - help: - zh_Hans: 生成时使用的随机数种子,用户控制模型生成内容的随机性。支持无符号64位整数,默认值为 1234。在使用seed时,模型将尽可能生成相同或相似的结果,但目前不保证每次生成的结果完全相同。 - en_US: The random number seed used when generating, the user controls the randomness of the content generated by the model. Supports unsigned 64-bit integers, default value is 1234. When using seed, the model will try its best to generate the same or similar results, but there is currently no guarantee that the results will be exactly the same every time. - - name: repetition_penalty - required: false - type: float - default: 1.1 - label: - zh_Hans: 重复惩罚 - en_US: Repetition penalty - help: - zh_Hans: 用于控制模型生成时的重复度。提高repetition_penalty时可以降低模型生成的重复度。1.0表示不做惩罚。 - en_US: Used to control the repeatability when generating models. Increasing repetition_penalty can reduce the duplication of model generation. 1.0 means no punishment. - - name: response_format - use_template: response_format -pricing: - input: '0.04' - output: '0.12' - unit: '0.001' - currency: RMB diff --git a/api/core/model_runtime/model_providers/tongyi/llm/qwen-max-0919.yaml b/api/core/model_runtime/model_providers/tongyi/llm/qwen-max-0919.yaml deleted file mode 100644 index 78661eaea065f2..00000000000000 --- a/api/core/model_runtime/model_providers/tongyi/llm/qwen-max-0919.yaml +++ /dev/null @@ -1,78 +0,0 @@ -# this model corresponds to qwen-max-0919, for more details -# please refer to (https://help.aliyun.com/zh/model-studio/getting-started/models#cf6cc4aa2aokf) -model: qwen-max-0919 -label: - en_US: qwen-max-0919 -model_type: llm -features: - - multi-tool-call - - agent-thought - - stream-tool-call -model_properties: - mode: chat - context_size: 32768 -parameter_rules: - - name: temperature - use_template: temperature - type: float - default: 0.3 - min: 0.0 - max: 2.0 - help: - zh_Hans: 用于控制随机性和多样性的程度。具体来说,temperature值控制了生成文本时对每个候选词的概率分布进行平滑的程度。较高的temperature值会降低概率分布的峰值,使得更多的低概率词被选择,生成结果更加多样化;而较低的temperature值则会增强概率分布的峰值,使得高概率词更容易被选择,生成结果更加确定。 - en_US: Used to control the degree of randomness and diversity. Specifically, the temperature value controls the degree to which the probability distribution of each candidate word is smoothed when generating text. A higher temperature value will reduce the peak value of the probability distribution, allowing more low-probability words to be selected, and the generated results will be more diverse; while a lower temperature value will enhance the peak value of the probability distribution, making it easier for high-probability words to be selected. , the generated results are more certain. - - name: max_tokens - use_template: max_tokens - type: int - default: 8192 - min: 1 - max: 8192 - help: - zh_Hans: 用于指定模型在生成内容时token的最大数量,它定义了生成的上限,但不保证每次都会生成到这个数量。 - en_US: It is used to specify the maximum number of tokens when the model generates content. It defines the upper limit of generation, but does not guarantee that this number will be generated every time. - - name: top_p - use_template: top_p - type: float - default: 0.8 - min: 0.1 - max: 0.9 - help: - zh_Hans: 生成过程中核采样方法概率阈值,例如,取值为0.8时,仅保留概率加起来大于等于0.8的最可能token的最小集合作为候选集。取值范围为(0,1.0),取值越大,生成的随机性越高;取值越低,生成的确定性越高。 - en_US: The probability threshold of the kernel sampling method during the generation process. For example, when the value is 0.8, only the smallest set of the most likely tokens with a sum of probabilities greater than or equal to 0.8 is retained as the candidate set. The value range is (0,1.0). The larger the value, the higher the randomness generated; the lower the value, the higher the certainty generated. - - name: top_k - type: int - min: 0 - max: 99 - label: - zh_Hans: 取样数量 - en_US: Top k - help: - zh_Hans: 生成时,采样候选集的大小。例如,取值为50时,仅将单次生成中得分最高的50个token组成随机采样的候选集。取值越大,生成的随机性越高;取值越小,生成的确定性越高。 - en_US: The size of the sample candidate set when generated. For example, when the value is 50, only the 50 highest-scoring tokens in a single generation form a randomly sampled candidate set. The larger the value, the higher the randomness generated; the smaller the value, the higher the certainty generated. - - name: seed - required: false - type: int - default: 1234 - label: - zh_Hans: 随机种子 - en_US: Random seed - help: - zh_Hans: 生成时使用的随机数种子,用户控制模型生成内容的随机性。支持无符号64位整数,默认值为 1234。在使用seed时,模型将尽可能生成相同或相似的结果,但目前不保证每次生成的结果完全相同。 - en_US: The random number seed used when generating, the user controls the randomness of the content generated by the model. Supports unsigned 64-bit integers, default value is 1234. When using seed, the model will try its best to generate the same or similar results, but there is currently no guarantee that the results will be exactly the same every time. - - name: repetition_penalty - required: false - type: float - default: 1.1 - label: - zh_Hans: 重复惩罚 - en_US: Repetition penalty - help: - zh_Hans: 用于控制模型生成时的重复度。提高repetition_penalty时可以降低模型生成的重复度。1.0表示不做惩罚。 - en_US: Used to control the repeatability when generating models. Increasing repetition_penalty can reduce the duplication of model generation. 1.0 means no punishment. - - name: response_format - use_template: response_format -pricing: - input: '0.02' - output: '0.06' - unit: '0.001' - currency: RMB diff --git a/api/core/model_runtime/model_providers/tongyi/llm/qwen-max-1201.yaml b/api/core/model_runtime/model_providers/tongyi/llm/qwen-max-1201.yaml deleted file mode 100644 index 6f4674576b4426..00000000000000 --- a/api/core/model_runtime/model_providers/tongyi/llm/qwen-max-1201.yaml +++ /dev/null @@ -1,78 +0,0 @@ -# this model corresponds to qwen-max, for more details -# please refer to (https://help.aliyun.com/zh/model-studio/getting-started/models#cf6cc4aa2aokf) -model: qwen-max-1201 -label: - en_US: qwen-max-1201 -model_type: llm -features: - - multi-tool-call - - agent-thought - - stream-tool-call -model_properties: - mode: chat - context_size: 8192 -parameter_rules: - - name: temperature - use_template: temperature - type: float - default: 0.3 - min: 0.0 - max: 2.0 - help: - zh_Hans: 用于控制随机性和多样性的程度。具体来说,temperature值控制了生成文本时对每个候选词的概率分布进行平滑的程度。较高的temperature值会降低概率分布的峰值,使得更多的低概率词被选择,生成结果更加多样化;而较低的temperature值则会增强概率分布的峰值,使得高概率词更容易被选择,生成结果更加确定。 - en_US: Used to control the degree of randomness and diversity. Specifically, the temperature value controls the degree to which the probability distribution of each candidate word is smoothed when generating text. A higher temperature value will reduce the peak value of the probability distribution, allowing more low-probability words to be selected, and the generated results will be more diverse; while a lower temperature value will enhance the peak value of the probability distribution, making it easier for high-probability words to be selected. , the generated results are more certain. - - name: max_tokens - use_template: max_tokens - type: int - default: 2000 - min: 1 - max: 2000 - help: - zh_Hans: 用于指定模型在生成内容时token的最大数量,它定义了生成的上限,但不保证每次都会生成到这个数量。 - en_US: It is used to specify the maximum number of tokens when the model generates content. It defines the upper limit of generation, but does not guarantee that this number will be generated every time. - - name: top_p - use_template: top_p - type: float - default: 0.8 - min: 0.1 - max: 0.9 - help: - zh_Hans: 生成过程中核采样方法概率阈值,例如,取值为0.8时,仅保留概率加起来大于等于0.8的最可能token的最小集合作为候选集。取值范围为(0,1.0),取值越大,生成的随机性越高;取值越低,生成的确定性越高。 - en_US: The probability threshold of the kernel sampling method during the generation process. For example, when the value is 0.8, only the smallest set of the most likely tokens with a sum of probabilities greater than or equal to 0.8 is retained as the candidate set. The value range is (0,1.0). The larger the value, the higher the randomness generated; the lower the value, the higher the certainty generated. - - name: top_k - type: int - min: 0 - max: 99 - label: - zh_Hans: 取样数量 - en_US: Top k - help: - zh_Hans: 生成时,采样候选集的大小。例如,取值为50时,仅将单次生成中得分最高的50个token组成随机采样的候选集。取值越大,生成的随机性越高;取值越小,生成的确定性越高。 - en_US: The size of the sample candidate set when generated. For example, when the value is 50, only the 50 highest-scoring tokens in a single generation form a randomly sampled candidate set. The larger the value, the higher the randomness generated; the smaller the value, the higher the certainty generated. - - name: seed - required: false - type: int - default: 1234 - label: - zh_Hans: 随机种子 - en_US: Random seed - help: - zh_Hans: 生成时使用的随机数种子,用户控制模型生成内容的随机性。支持无符号64位整数,默认值为 1234。在使用seed时,模型将尽可能生成相同或相似的结果,但目前不保证每次生成的结果完全相同。 - en_US: The random number seed used when generating, the user controls the randomness of the content generated by the model. Supports unsigned 64-bit integers, default value is 1234. When using seed, the model will try its best to generate the same or similar results, but there is currently no guarantee that the results will be exactly the same every time. - - name: repetition_penalty - required: false - type: float - default: 1.1 - label: - en_US: Repetition penalty - help: - zh_Hans: 用于控制模型生成时的重复度。提高repetition_penalty时可以降低模型生成的重复度。1.0表示不做惩罚。 - en_US: Used to control the repeatability when generating models. Increasing repetition_penalty can reduce the duplication of model generation. 1.0 means no punishment. - - name: response_format - use_template: response_format -pricing: - input: '0.04' - output: '0.12' - unit: '0.001' - currency: RMB -deprecated: true diff --git a/api/core/model_runtime/model_providers/tongyi/llm/qwen-max-latest.yaml b/api/core/model_runtime/model_providers/tongyi/llm/qwen-max-latest.yaml deleted file mode 100644 index 8b5f0054733455..00000000000000 --- a/api/core/model_runtime/model_providers/tongyi/llm/qwen-max-latest.yaml +++ /dev/null @@ -1,78 +0,0 @@ -# this model corresponds to qwen-max, for more details -# please refer to (https://help.aliyun.com/zh/model-studio/getting-started/models#cf6cc4aa2aokf) -model: qwen-max-latest -label: - en_US: qwen-max-latest -model_type: llm -features: - - multi-tool-call - - agent-thought - - stream-tool-call -model_properties: - mode: chat - context_size: 32768 -parameter_rules: - - name: temperature - use_template: temperature - type: float - default: 0.3 - min: 0.0 - max: 2.0 - help: - zh_Hans: 用于控制随机性和多样性的程度。具体来说,temperature值控制了生成文本时对每个候选词的概率分布进行平滑的程度。较高的temperature值会降低概率分布的峰值,使得更多的低概率词被选择,生成结果更加多样化;而较低的temperature值则会增强概率分布的峰值,使得高概率词更容易被选择,生成结果更加确定。 - en_US: Used to control the degree of randomness and diversity. Specifically, the temperature value controls the degree to which the probability distribution of each candidate word is smoothed when generating text. A higher temperature value will reduce the peak value of the probability distribution, allowing more low-probability words to be selected, and the generated results will be more diverse; while a lower temperature value will enhance the peak value of the probability distribution, making it easier for high-probability words to be selected. , the generated results are more certain. - - name: max_tokens - use_template: max_tokens - type: int - default: 8192 - min: 1 - max: 8192 - help: - zh_Hans: 用于指定模型在生成内容时token的最大数量,它定义了生成的上限,但不保证每次都会生成到这个数量。 - en_US: It is used to specify the maximum number of tokens when the model generates content. It defines the upper limit of generation, but does not guarantee that this number will be generated every time. - - name: top_p - use_template: top_p - type: float - default: 0.8 - min: 0.1 - max: 0.9 - help: - zh_Hans: 生成过程中核采样方法概率阈值,例如,取值为0.8时,仅保留概率加起来大于等于0.8的最可能token的最小集合作为候选集。取值范围为(0,1.0),取值越大,生成的随机性越高;取值越低,生成的确定性越高。 - en_US: The probability threshold of the kernel sampling method during the generation process. For example, when the value is 0.8, only the smallest set of the most likely tokens with a sum of probabilities greater than or equal to 0.8 is retained as the candidate set. The value range is (0,1.0). The larger the value, the higher the randomness generated; the lower the value, the higher the certainty generated. - - name: top_k - type: int - min: 0 - max: 99 - label: - zh_Hans: 取样数量 - en_US: Top k - help: - zh_Hans: 生成时,采样候选集的大小。例如,取值为50时,仅将单次生成中得分最高的50个token组成随机采样的候选集。取值越大,生成的随机性越高;取值越小,生成的确定性越高。 - en_US: The size of the sample candidate set when generated. For example, when the value is 50, only the 50 highest-scoring tokens in a single generation form a randomly sampled candidate set. The larger the value, the higher the randomness generated; the smaller the value, the higher the certainty generated. - - name: seed - required: false - type: int - default: 1234 - label: - zh_Hans: 随机种子 - en_US: Random seed - help: - zh_Hans: 生成时使用的随机数种子,用户控制模型生成内容的随机性。支持无符号64位整数,默认值为 1234。在使用seed时,模型将尽可能生成相同或相似的结果,但目前不保证每次生成的结果完全相同。 - en_US: The random number seed used when generating, the user controls the randomness of the content generated by the model. Supports unsigned 64-bit integers, default value is 1234. When using seed, the model will try its best to generate the same or similar results, but there is currently no guarantee that the results will be exactly the same every time. - - name: repetition_penalty - required: false - type: float - default: 1.1 - label: - zh_Hans: 重复惩罚 - en_US: Repetition penalty - help: - zh_Hans: 用于控制模型生成时的重复度。提高repetition_penalty时可以降低模型生成的重复度。1.0表示不做惩罚。 - en_US: Used to control the repeatability when generating models. Increasing repetition_penalty can reduce the duplication of model generation. 1.0 means no punishment. - - name: response_format - use_template: response_format -pricing: - input: '0.02' - output: '0.06' - unit: '0.001' - currency: RMB diff --git a/api/core/model_runtime/model_providers/tongyi/llm/qwen-max-longcontext.yaml b/api/core/model_runtime/model_providers/tongyi/llm/qwen-max-longcontext.yaml deleted file mode 100644 index 098494ff95012d..00000000000000 --- a/api/core/model_runtime/model_providers/tongyi/llm/qwen-max-longcontext.yaml +++ /dev/null @@ -1,78 +0,0 @@ -# this model corresponds to qwen-max, for more details -# please refer to (https://help.aliyun.com/zh/model-studio/getting-started/models#cf6cc4aa2aokf) -model: qwen-max-longcontext -label: - en_US: qwen-max-longcontext -model_type: llm -features: - - multi-tool-call - - agent-thought - - stream-tool-call -model_properties: - mode: chat - context_size: 32000 -parameter_rules: - - name: temperature - use_template: temperature - type: float - default: 0.3 - min: 0.0 - max: 2.0 - help: - zh_Hans: 用于控制随机性和多样性的程度。具体来说,temperature值控制了生成文本时对每个候选词的概率分布进行平滑的程度。较高的temperature值会降低概率分布的峰值,使得更多的低概率词被选择,生成结果更加多样化;而较低的temperature值则会增强概率分布的峰值,使得高概率词更容易被选择,生成结果更加确定。 - en_US: Used to control the degree of randomness and diversity. Specifically, the temperature value controls the degree to which the probability distribution of each candidate word is smoothed when generating text. A higher temperature value will reduce the peak value of the probability distribution, allowing more low-probability words to be selected, and the generated results will be more diverse; while a lower temperature value will enhance the peak value of the probability distribution, making it easier for high-probability words to be selected. , the generated results are more certain. - - name: max_tokens - use_template: max_tokens - type: int - default: 8000 - min: 1 - max: 8000 - help: - zh_Hans: 用于指定模型在生成内容时token的最大数量,它定义了生成的上限,但不保证每次都会生成到这个数量。 - en_US: It is used to specify the maximum number of tokens when the model generates content. It defines the upper limit of generation, but does not guarantee that this number will be generated every time. - - name: top_p - use_template: top_p - type: float - default: 0.8 - min: 0.1 - max: 0.9 - help: - zh_Hans: 生成过程中核采样方法概率阈值,例如,取值为0.8时,仅保留概率加起来大于等于0.8的最可能token的最小集合作为候选集。取值范围为(0,1.0),取值越大,生成的随机性越高;取值越低,生成的确定性越高。 - en_US: The probability threshold of the kernel sampling method during the generation process. For example, when the value is 0.8, only the smallest set of the most likely tokens with a sum of probabilities greater than or equal to 0.8 is retained as the candidate set. The value range is (0,1.0). The larger the value, the higher the randomness generated; the lower the value, the higher the certainty generated. - - name: top_k - type: int - min: 0 - max: 99 - label: - zh_Hans: 取样数量 - en_US: Top k - help: - zh_Hans: 生成时,采样候选集的大小。例如,取值为50时,仅将单次生成中得分最高的50个token组成随机采样的候选集。取值越大,生成的随机性越高;取值越小,生成的确定性越高。 - en_US: The size of the sample candidate set when generated. For example, when the value is 50, only the 50 highest-scoring tokens in a single generation form a randomly sampled candidate set. The larger the value, the higher the randomness generated; the smaller the value, the higher the certainty generated. - - name: seed - required: false - type: int - default: 1234 - label: - zh_Hans: 随机种子 - en_US: Random seed - help: - zh_Hans: 生成时使用的随机数种子,用户控制模型生成内容的随机性。支持无符号64位整数,默认值为 1234。在使用seed时,模型将尽可能生成相同或相似的结果,但目前不保证每次生成的结果完全相同。 - en_US: The random number seed used when generating, the user controls the randomness of the content generated by the model. Supports unsigned 64-bit integers, default value is 1234. When using seed, the model will try its best to generate the same or similar results, but there is currently no guarantee that the results will be exactly the same every time. - - name: repetition_penalty - required: false - type: float - default: 1.1 - label: - zh_Hans: 重复惩罚 - en_US: Repetition penalty - help: - zh_Hans: 用于控制模型生成时的重复度。提高repetition_penalty时可以降低模型生成的重复度。1.0表示不做惩罚。 - en_US: Used to control the repeatability when generating models. Increasing repetition_penalty can reduce the duplication of model generation. 1.0 means no punishment. - - name: response_format - use_template: response_format -pricing: - input: '0.04' - output: '0.12' - unit: '0.001' - currency: RMB diff --git a/api/core/model_runtime/model_providers/tongyi/llm/qwen-max.yaml b/api/core/model_runtime/model_providers/tongyi/llm/qwen-max.yaml deleted file mode 100644 index 9d0d3f8db39c23..00000000000000 --- a/api/core/model_runtime/model_providers/tongyi/llm/qwen-max.yaml +++ /dev/null @@ -1,87 +0,0 @@ -# this model corresponds to qwen-max, for more details -# please refer to (https://help.aliyun.com/zh/model-studio/getting-started/models#cf6cc4aa2aokf) -model: qwen-max -label: - en_US: qwen-max -model_type: llm -features: - - multi-tool-call - - agent-thought - - stream-tool-call -model_properties: - mode: chat - context_size: 8000 -parameter_rules: - - name: temperature - use_template: temperature - type: float - default: 0.3 - min: 0.0 - max: 2.0 - help: - zh_Hans: 用于控制随机性和多样性的程度。具体来说,temperature值控制了生成文本时对每个候选词的概率分布进行平滑的程度。较高的temperature值会降低概率分布的峰值,使得更多的低概率词被选择,生成结果更加多样化;而较低的temperature值则会增强概率分布的峰值,使得高概率词更容易被选择,生成结果更加确定。 - en_US: Used to control the degree of randomness and diversity. Specifically, the temperature value controls the degree to which the probability distribution of each candidate word is smoothed when generating text. A higher temperature value will reduce the peak value of the probability distribution, allowing more low-probability words to be selected, and the generated results will be more diverse; while a lower temperature value will enhance the peak value of the probability distribution, making it easier for high-probability words to be selected. , the generated results are more certain. - - name: max_tokens - use_template: max_tokens - type: int - default: 2000 - min: 1 - max: 2000 - help: - zh_Hans: 用于指定模型在生成内容时token的最大数量,它定义了生成的上限,但不保证每次都会生成到这个数量。 - en_US: It is used to specify the maximum number of tokens when the model generates content. It defines the upper limit of generation, but does not guarantee that this number will be generated every time. - - name: top_p - use_template: top_p - type: float - default: 0.8 - min: 0.1 - max: 0.9 - help: - zh_Hans: 生成过程中核采样方法概率阈值,例如,取值为0.8时,仅保留概率加起来大于等于0.8的最可能token的最小集合作为候选集。取值范围为(0,1.0),取值越大,生成的随机性越高;取值越低,生成的确定性越高。 - en_US: The probability threshold of the kernel sampling method during the generation process. For example, when the value is 0.8, only the smallest set of the most likely tokens with a sum of probabilities greater than or equal to 0.8 is retained as the candidate set. The value range is (0,1.0). The larger the value, the higher the randomness generated; the lower the value, the higher the certainty generated. - - name: top_k - type: int - min: 0 - max: 99 - label: - zh_Hans: 取样数量 - en_US: Top k - help: - zh_Hans: 生成时,采样候选集的大小。例如,取值为50时,仅将单次生成中得分最高的50个token组成随机采样的候选集。取值越大,生成的随机性越高;取值越小,生成的确定性越高。 - en_US: The size of the sample candidate set when generated. For example, when the value is 50, only the 50 highest-scoring tokens in a single generation form a randomly sampled candidate set. The larger the value, the higher the randomness generated; the smaller the value, the higher the certainty generated. - - name: seed - required: false - type: int - default: 1234 - label: - zh_Hans: 随机种子 - en_US: Random seed - help: - zh_Hans: 生成时使用的随机数种子,用户控制模型生成内容的随机性。支持无符号64位整数,默认值为 1234。在使用seed时,模型将尽可能生成相同或相似的结果,但目前不保证每次生成的结果完全相同。 - en_US: The random number seed used when generating, the user controls the randomness of the content generated by the model. Supports unsigned 64-bit integers, default value is 1234. When using seed, the model will try its best to generate the same or similar results, but there is currently no guarantee that the results will be exactly the same every time. - - name: repetition_penalty - required: false - type: float - default: 1.1 - label: - zh_Hans: 重复惩罚 - en_US: Repetition penalty - help: - zh_Hans: 用于控制模型生成时的重复度。提高repetition_penalty时可以降低模型生成的重复度。1.0表示不做惩罚。 - en_US: Used to control the repeatability when generating models. Increasing repetition_penalty can reduce the duplication of model generation. 1.0 means no punishment. - - name: enable_search - type: boolean - default: false - label: - zh_Hans: 联网搜索 - en_US: Web Search - help: - zh_Hans: 模型内置了互联网搜索服务,该参数控制模型在生成文本时是否参考使用互联网搜索结果。启用互联网搜索,模型会将搜索结果作为文本生成过程中的参考信息,但模型会基于其内部逻辑“自行判断”是否使用互联网搜索结果。 - en_US: The model has a built-in Internet search service. This parameter controls whether the model refers to Internet search results when generating text. When Internet search is enabled, the model will use the search results as reference information in the text generation process, but the model will "judge" whether to use Internet search results based on its internal logic. - - name: response_format - use_template: response_format -pricing: - input: '0.02' - output: '0.06' - unit: '0.001' - currency: RMB diff --git a/api/core/model_runtime/model_providers/tongyi/llm/qwen-plus-0206.yaml b/api/core/model_runtime/model_providers/tongyi/llm/qwen-plus-0206.yaml deleted file mode 100644 index 0b1a6f81df80c0..00000000000000 --- a/api/core/model_runtime/model_providers/tongyi/llm/qwen-plus-0206.yaml +++ /dev/null @@ -1,76 +0,0 @@ -# this model corresponds to qwen-plus-0206, for more details -# please refer to (https://help.aliyun.com/zh/model-studio/getting-started/models#bb0ffee88bwnk) -model: qwen-plus-0206 -label: - en_US: qwen-plus-0206 -model_type: llm -features: - - agent-thought -model_properties: - mode: completion - context_size: 32000 -parameter_rules: - - name: temperature - use_template: temperature - type: float - default: 0.3 - min: 0.0 - max: 2.0 - help: - zh_Hans: 用于控制随机性和多样性的程度。具体来说,temperature值控制了生成文本时对每个候选词的概率分布进行平滑的程度。较高的temperature值会降低概率分布的峰值,使得更多的低概率词被选择,生成结果更加多样化;而较低的temperature值则会增强概率分布的峰值,使得高概率词更容易被选择,生成结果更加确定。 - en_US: Used to control the degree of randomness and diversity. Specifically, the temperature value controls the degree to which the probability distribution of each candidate word is smoothed when generating text. A higher temperature value will reduce the peak value of the probability distribution, allowing more low-probability words to be selected, and the generated results will be more diverse; while a lower temperature value will enhance the peak value of the probability distribution, making it easier for high-probability words to be selected. , the generated results are more certain. - - name: max_tokens - use_template: max_tokens - type: int - default: 8000 - min: 1 - max: 8000 - help: - zh_Hans: 用于指定模型在生成内容时token的最大数量,它定义了生成的上限,但不保证每次都会生成到这个数量。 - en_US: It is used to specify the maximum number of tokens when the model generates content. It defines the upper limit of generation, but does not guarantee that this number will be generated every time. - - name: top_p - use_template: top_p - type: float - default: 0.8 - min: 0.1 - max: 0.9 - help: - zh_Hans: 生成过程中核采样方法概率阈值,例如,取值为0.8时,仅保留概率加起来大于等于0.8的最可能token的最小集合作为候选集。取值范围为(0,1.0),取值越大,生成的随机性越高;取值越低,生成的确定性越高。 - en_US: The probability threshold of the kernel sampling method during the generation process. For example, when the value is 0.8, only the smallest set of the most likely tokens with a sum of probabilities greater than or equal to 0.8 is retained as the candidate set. The value range is (0,1.0). The larger the value, the higher the randomness generated; the lower the value, the higher the certainty generated. - - name: top_k - type: int - min: 0 - max: 99 - label: - zh_Hans: 取样数量 - en_US: Top k - help: - zh_Hans: 生成时,采样候选集的大小。例如,取值为50时,仅将单次生成中得分最高的50个token组成随机采样的候选集。取值越大,生成的随机性越高;取值越小,生成的确定性越高。 - en_US: The size of the sample candidate set when generated. For example, when the value is 50, only the 50 highest-scoring tokens in a single generation form a randomly sampled candidate set. The larger the value, the higher the randomness generated; the smaller the value, the higher the certainty generated. - - name: seed - required: false - type: int - default: 1234 - label: - zh_Hans: 随机种子 - en_US: Random seed - help: - zh_Hans: 生成时使用的随机数种子,用户控制模型生成内容的随机性。支持无符号64位整数,默认值为 1234。在使用seed时,模型将尽可能生成相同或相似的结果,但目前不保证每次生成的结果完全相同。 - en_US: The random number seed used when generating, the user controls the randomness of the content generated by the model. Supports unsigned 64-bit integers, default value is 1234. When using seed, the model will try its best to generate the same or similar results, but there is currently no guarantee that the results will be exactly the same every time. - - name: repetition_penalty - required: false - type: float - default: 1.1 - label: - zh_Hans: 重复惩罚 - en_US: Repetition penalty - help: - zh_Hans: 用于控制模型生成时的重复度。提高repetition_penalty时可以降低模型生成的重复度。1.0表示不做惩罚。 - en_US: Used to control the repeatability when generating models. Increasing repetition_penalty can reduce the duplication of model generation. 1.0 means no punishment. - - name: response_format - use_template: response_format -pricing: - input: '0.004' - output: '0.012' - unit: '0.001' - currency: RMB diff --git a/api/core/model_runtime/model_providers/tongyi/llm/qwen-plus-0624.yaml b/api/core/model_runtime/model_providers/tongyi/llm/qwen-plus-0624.yaml deleted file mode 100644 index 7706005bb535cd..00000000000000 --- a/api/core/model_runtime/model_providers/tongyi/llm/qwen-plus-0624.yaml +++ /dev/null @@ -1,76 +0,0 @@ -# this model corresponds to qwen-plus-0624, for more details -# please refer to (https://help.aliyun.com/zh/model-studio/getting-started/models#bb0ffee88bwnk) -model: qwen-plus-0624 -label: - en_US: qwen-plus-0624 -model_type: llm -features: - - agent-thought -model_properties: - mode: completion - context_size: 32000 -parameter_rules: - - name: temperature - use_template: temperature - type: float - default: 0.3 - min: 0.0 - max: 2.0 - help: - zh_Hans: 用于控制随机性和多样性的程度。具体来说,temperature值控制了生成文本时对每个候选词的概率分布进行平滑的程度。较高的temperature值会降低概率分布的峰值,使得更多的低概率词被选择,生成结果更加多样化;而较低的temperature值则会增强概率分布的峰值,使得高概率词更容易被选择,生成结果更加确定。 - en_US: Used to control the degree of randomness and diversity. Specifically, the temperature value controls the degree to which the probability distribution of each candidate word is smoothed when generating text. A higher temperature value will reduce the peak value of the probability distribution, allowing more low-probability words to be selected, and the generated results will be more diverse; while a lower temperature value will enhance the peak value of the probability distribution, making it easier for high-probability words to be selected. , the generated results are more certain. - - name: max_tokens - use_template: max_tokens - type: int - default: 8000 - min: 1 - max: 8000 - help: - zh_Hans: 用于指定模型在生成内容时token的最大数量,它定义了生成的上限,但不保证每次都会生成到这个数量。 - en_US: It is used to specify the maximum number of tokens when the model generates content. It defines the upper limit of generation, but does not guarantee that this number will be generated every time. - - name: top_p - use_template: top_p - type: float - default: 0.8 - min: 0.1 - max: 0.9 - help: - zh_Hans: 生成过程中核采样方法概率阈值,例如,取值为0.8时,仅保留概率加起来大于等于0.8的最可能token的最小集合作为候选集。取值范围为(0,1.0),取值越大,生成的随机性越高;取值越低,生成的确定性越高。 - en_US: The probability threshold of the kernel sampling method during the generation process. For example, when the value is 0.8, only the smallest set of the most likely tokens with a sum of probabilities greater than or equal to 0.8 is retained as the candidate set. The value range is (0,1.0). The larger the value, the higher the randomness generated; the lower the value, the higher the certainty generated. - - name: top_k - type: int - min: 0 - max: 99 - label: - zh_Hans: 取样数量 - en_US: Top k - help: - zh_Hans: 生成时,采样候选集的大小。例如,取值为50时,仅将单次生成中得分最高的50个token组成随机采样的候选集。取值越大,生成的随机性越高;取值越小,生成的确定性越高。 - en_US: The size of the sample candidate set when generated. For example, when the value is 50, only the 50 highest-scoring tokens in a single generation form a randomly sampled candidate set. The larger the value, the higher the randomness generated; the smaller the value, the higher the certainty generated. - - name: seed - required: false - type: int - default: 1234 - label: - zh_Hans: 随机种子 - en_US: Random seed - help: - zh_Hans: 生成时使用的随机数种子,用户控制模型生成内容的随机性。支持无符号64位整数,默认值为 1234。在使用seed时,模型将尽可能生成相同或相似的结果,但目前不保证每次生成的结果完全相同。 - en_US: The random number seed used when generating, the user controls the randomness of the content generated by the model. Supports unsigned 64-bit integers, default value is 1234. When using seed, the model will try its best to generate the same or similar results, but there is currently no guarantee that the results will be exactly the same every time. - - name: repetition_penalty - required: false - type: float - default: 1.1 - label: - zh_Hans: 重复惩罚 - en_US: Repetition penalty - help: - zh_Hans: 用于控制模型生成时的重复度。提高repetition_penalty时可以降低模型生成的重复度。1.0表示不做惩罚。 - en_US: Used to control the repeatability when generating models. Increasing repetition_penalty can reduce the duplication of model generation. 1.0 means no punishment. - - name: response_format - use_template: response_format -pricing: - input: '0.004' - output: '0.012' - unit: '0.001' - currency: RMB diff --git a/api/core/model_runtime/model_providers/tongyi/llm/qwen-plus-0723.yaml b/api/core/model_runtime/model_providers/tongyi/llm/qwen-plus-0723.yaml deleted file mode 100644 index 348276fc08f98c..00000000000000 --- a/api/core/model_runtime/model_providers/tongyi/llm/qwen-plus-0723.yaml +++ /dev/null @@ -1,76 +0,0 @@ -# this model corresponds to qwen-plus-0723, for more details -# please refer to (https://help.aliyun.com/zh/model-studio/getting-started/models#bb0ffee88bwnk) -model: qwen-plus-0723 -label: - en_US: qwen-plus-0723 -model_type: llm -features: - - agent-thought -model_properties: - mode: completion - context_size: 32000 -parameter_rules: - - name: temperature - use_template: temperature - type: float - default: 0.3 - min: 0.0 - max: 2.0 - help: - zh_Hans: 用于控制随机性和多样性的程度。具体来说,temperature值控制了生成文本时对每个候选词的概率分布进行平滑的程度。较高的temperature值会降低概率分布的峰值,使得更多的低概率词被选择,生成结果更加多样化;而较低的temperature值则会增强概率分布的峰值,使得高概率词更容易被选择,生成结果更加确定。 - en_US: Used to control the degree of randomness and diversity. Specifically, the temperature value controls the degree to which the probability distribution of each candidate word is smoothed when generating text. A higher temperature value will reduce the peak value of the probability distribution, allowing more low-probability words to be selected, and the generated results will be more diverse; while a lower temperature value will enhance the peak value of the probability distribution, making it easier for high-probability words to be selected. , the generated results are more certain. - - name: max_tokens - use_template: max_tokens - type: int - default: 8000 - min: 1 - max: 8000 - help: - zh_Hans: 用于指定模型在生成内容时token的最大数量,它定义了生成的上限,但不保证每次都会生成到这个数量。 - en_US: It is used to specify the maximum number of tokens when the model generates content. It defines the upper limit of generation, but does not guarantee that this number will be generated every time. - - name: top_p - use_template: top_p - type: float - default: 0.8 - min: 0.1 - max: 0.9 - help: - zh_Hans: 生成过程中核采样方法概率阈值,例如,取值为0.8时,仅保留概率加起来大于等于0.8的最可能token的最小集合作为候选集。取值范围为(0,1.0),取值越大,生成的随机性越高;取值越低,生成的确定性越高。 - en_US: The probability threshold of the kernel sampling method during the generation process. For example, when the value is 0.8, only the smallest set of the most likely tokens with a sum of probabilities greater than or equal to 0.8 is retained as the candidate set. The value range is (0,1.0). The larger the value, the higher the randomness generated; the lower the value, the higher the certainty generated. - - name: top_k - type: int - min: 0 - max: 99 - label: - zh_Hans: 取样数量 - en_US: Top k - help: - zh_Hans: 生成时,采样候选集的大小。例如,取值为50时,仅将单次生成中得分最高的50个token组成随机采样的候选集。取值越大,生成的随机性越高;取值越小,生成的确定性越高。 - en_US: The size of the sample candidate set when generated. For example, when the value is 50, only the 50 highest-scoring tokens in a single generation form a randomly sampled candidate set. The larger the value, the higher the randomness generated; the smaller the value, the higher the certainty generated. - - name: seed - required: false - type: int - default: 1234 - label: - zh_Hans: 随机种子 - en_US: Random seed - help: - zh_Hans: 生成时使用的随机数种子,用户控制模型生成内容的随机性。支持无符号64位整数,默认值为 1234。在使用seed时,模型将尽可能生成相同或相似的结果,但目前不保证每次生成的结果完全相同。 - en_US: The random number seed used when generating, the user controls the randomness of the content generated by the model. Supports unsigned 64-bit integers, default value is 1234. When using seed, the model will try its best to generate the same or similar results, but there is currently no guarantee that the results will be exactly the same every time. - - name: repetition_penalty - required: false - type: float - default: 1.1 - label: - zh_Hans: 重复惩罚 - en_US: Repetition penalty - help: - zh_Hans: 用于控制模型生成时的重复度。提高repetition_penalty时可以降低模型生成的重复度。1.0表示不做惩罚。 - en_US: Used to control the repeatability when generating models. Increasing repetition_penalty can reduce the duplication of model generation. 1.0 means no punishment. - - name: response_format - use_template: response_format -pricing: - input: '0.004' - output: '0.012' - unit: '0.001' - currency: RMB diff --git a/api/core/model_runtime/model_providers/tongyi/llm/qwen-plus-0806.yaml b/api/core/model_runtime/model_providers/tongyi/llm/qwen-plus-0806.yaml deleted file mode 100644 index 29f125135eaa3f..00000000000000 --- a/api/core/model_runtime/model_providers/tongyi/llm/qwen-plus-0806.yaml +++ /dev/null @@ -1,76 +0,0 @@ -# this model corresponds to qwen-plus-0806, for more details -# please refer to (https://help.aliyun.com/zh/model-studio/getting-started/models#bb0ffee88bwnk) -model: qwen-plus-0806 -label: - en_US: qwen-plus-0806 -model_type: llm -features: - - agent-thought -model_properties: - mode: completion - context_size: 131072 -parameter_rules: - - name: temperature - use_template: temperature - type: float - default: 0.3 - min: 0.0 - max: 2.0 - help: - zh_Hans: 用于控制随机性和多样性的程度。具体来说,temperature值控制了生成文本时对每个候选词的概率分布进行平滑的程度。较高的temperature值会降低概率分布的峰值,使得更多的低概率词被选择,生成结果更加多样化;而较低的temperature值则会增强概率分布的峰值,使得高概率词更容易被选择,生成结果更加确定。 - en_US: Used to control the degree of randomness and diversity. Specifically, the temperature value controls the degree to which the probability distribution of each candidate word is smoothed when generating text. A higher temperature value will reduce the peak value of the probability distribution, allowing more low-probability words to be selected, and the generated results will be more diverse; while a lower temperature value will enhance the peak value of the probability distribution, making it easier for high-probability words to be selected. , the generated results are more certain. - - name: max_tokens - use_template: max_tokens - type: int - default: 8192 - min: 1 - max: 8192 - help: - zh_Hans: 用于指定模型在生成内容时token的最大数量,它定义了生成的上限,但不保证每次都会生成到这个数量。 - en_US: It is used to specify the maximum number of tokens when the model generates content. It defines the upper limit of generation, but does not guarantee that this number will be generated every time. - - name: top_p - use_template: top_p - type: float - default: 0.8 - min: 0.1 - max: 0.9 - help: - zh_Hans: 生成过程中核采样方法概率阈值,例如,取值为0.8时,仅保留概率加起来大于等于0.8的最可能token的最小集合作为候选集。取值范围为(0,1.0),取值越大,生成的随机性越高;取值越低,生成的确定性越高。 - en_US: The probability threshold of the kernel sampling method during the generation process. For example, when the value is 0.8, only the smallest set of the most likely tokens with a sum of probabilities greater than or equal to 0.8 is retained as the candidate set. The value range is (0,1.0). The larger the value, the higher the randomness generated; the lower the value, the higher the certainty generated. - - name: top_k - type: int - min: 0 - max: 99 - label: - zh_Hans: 取样数量 - en_US: Top k - help: - zh_Hans: 生成时,采样候选集的大小。例如,取值为50时,仅将单次生成中得分最高的50个token组成随机采样的候选集。取值越大,生成的随机性越高;取值越小,生成的确定性越高。 - en_US: The size of the sample candidate set when generated. For example, when the value is 50, only the 50 highest-scoring tokens in a single generation form a randomly sampled candidate set. The larger the value, the higher the randomness generated; the smaller the value, the higher the certainty generated. - - name: seed - required: false - type: int - default: 1234 - label: - zh_Hans: 随机种子 - en_US: Random seed - help: - zh_Hans: 生成时使用的随机数种子,用户控制模型生成内容的随机性。支持无符号64位整数,默认值为 1234。在使用seed时,模型将尽可能生成相同或相似的结果,但目前不保证每次生成的结果完全相同。 - en_US: The random number seed used when generating, the user controls the randomness of the content generated by the model. Supports unsigned 64-bit integers, default value is 1234. When using seed, the model will try its best to generate the same or similar results, but there is currently no guarantee that the results will be exactly the same every time. - - name: repetition_penalty - required: false - type: float - default: 1.1 - label: - zh_Hans: 重复惩罚 - en_US: Repetition penalty - help: - zh_Hans: 用于控制模型生成时的重复度。提高repetition_penalty时可以降低模型生成的重复度。1.0表示不做惩罚。 - en_US: Used to control the repeatability when generating models. Increasing repetition_penalty can reduce the duplication of model generation. 1.0 means no punishment. - - name: response_format - use_template: response_format -pricing: - input: '0.004' - output: '0.012' - unit: '0.001' - currency: RMB diff --git a/api/core/model_runtime/model_providers/tongyi/llm/qwen-plus-0919.yaml b/api/core/model_runtime/model_providers/tongyi/llm/qwen-plus-0919.yaml deleted file mode 100644 index 905fa1e1028bbf..00000000000000 --- a/api/core/model_runtime/model_providers/tongyi/llm/qwen-plus-0919.yaml +++ /dev/null @@ -1,76 +0,0 @@ -# this model corresponds to qwen-plus-0919, for more details -# please refer to (https://help.aliyun.com/zh/model-studio/getting-started/models#bb0ffee88bwnk) -model: qwen-plus-0919 -label: - en_US: qwen-plus-0919 -model_type: llm -features: - - agent-thought -model_properties: - mode: completion - context_size: 131072 -parameter_rules: - - name: temperature - use_template: temperature - type: float - default: 0.3 - min: 0.0 - max: 2.0 - help: - zh_Hans: 用于控制随机性和多样性的程度。具体来说,temperature值控制了生成文本时对每个候选词的概率分布进行平滑的程度。较高的temperature值会降低概率分布的峰值,使得更多的低概率词被选择,生成结果更加多样化;而较低的temperature值则会增强概率分布的峰值,使得高概率词更容易被选择,生成结果更加确定。 - en_US: Used to control the degree of randomness and diversity. Specifically, the temperature value controls the degree to which the probability distribution of each candidate word is smoothed when generating text. A higher temperature value will reduce the peak value of the probability distribution, allowing more low-probability words to be selected, and the generated results will be more diverse; while a lower temperature value will enhance the peak value of the probability distribution, making it easier for high-probability words to be selected. , the generated results are more certain. - - name: max_tokens - use_template: max_tokens - type: int - default: 8192 - min: 1 - max: 8192 - help: - zh_Hans: 用于指定模型在生成内容时token的最大数量,它定义了生成的上限,但不保证每次都会生成到这个数量。 - en_US: It is used to specify the maximum number of tokens when the model generates content. It defines the upper limit of generation, but does not guarantee that this number will be generated every time. - - name: top_p - use_template: top_p - type: float - default: 0.8 - min: 0.1 - max: 0.9 - help: - zh_Hans: 生成过程中核采样方法概率阈值,例如,取值为0.8时,仅保留概率加起来大于等于0.8的最可能token的最小集合作为候选集。取值范围为(0,1.0),取值越大,生成的随机性越高;取值越低,生成的确定性越高。 - en_US: The probability threshold of the kernel sampling method during the generation process. For example, when the value is 0.8, only the smallest set of the most likely tokens with a sum of probabilities greater than or equal to 0.8 is retained as the candidate set. The value range is (0,1.0). The larger the value, the higher the randomness generated; the lower the value, the higher the certainty generated. - - name: top_k - type: int - min: 0 - max: 99 - label: - zh_Hans: 取样数量 - en_US: Top k - help: - zh_Hans: 生成时,采样候选集的大小。例如,取值为50时,仅将单次生成中得分最高的50个token组成随机采样的候选集。取值越大,生成的随机性越高;取值越小,生成的确定性越高。 - en_US: The size of the sample candidate set when generated. For example, when the value is 50, only the 50 highest-scoring tokens in a single generation form a randomly sampled candidate set. The larger the value, the higher the randomness generated; the smaller the value, the higher the certainty generated. - - name: seed - required: false - type: int - default: 1234 - label: - zh_Hans: 随机种子 - en_US: Random seed - help: - zh_Hans: 生成时使用的随机数种子,用户控制模型生成内容的随机性。支持无符号64位整数,默认值为 1234。在使用seed时,模型将尽可能生成相同或相似的结果,但目前不保证每次生成的结果完全相同。 - en_US: The random number seed used when generating, the user controls the randomness of the content generated by the model. Supports unsigned 64-bit integers, default value is 1234. When using seed, the model will try its best to generate the same or similar results, but there is currently no guarantee that the results will be exactly the same every time. - - name: repetition_penalty - required: false - type: float - default: 1.1 - label: - zh_Hans: 重复惩罚 - en_US: Repetition penalty - help: - zh_Hans: 用于控制模型生成时的重复度。提高repetition_penalty时可以降低模型生成的重复度。1.0表示不做惩罚。 - en_US: Used to control the repeatability when generating models. Increasing repetition_penalty can reduce the duplication of model generation. 1.0 means no punishment. - - name: response_format - use_template: response_format -pricing: - input: '0.0008' - output: '0.002' - unit: '0.001' - currency: RMB diff --git a/api/core/model_runtime/model_providers/tongyi/llm/qwen-plus-chat.yaml b/api/core/model_runtime/model_providers/tongyi/llm/qwen-plus-chat.yaml deleted file mode 100644 index c7a3549727ce8e..00000000000000 --- a/api/core/model_runtime/model_providers/tongyi/llm/qwen-plus-chat.yaml +++ /dev/null @@ -1,79 +0,0 @@ -# this model corresponds to qwen-plus, for more details -# please refer to (https://help.aliyun.com/zh/model-studio/getting-started/models#bb0ffee88bwnk) -model: qwen-plus-chat -label: - en_US: qwen-plus-chat -model_type: llm -features: - - multi-tool-call - - agent-thought - - stream-tool-call -model_properties: - mode: chat - context_size: 32768 -parameter_rules: - - name: temperature - use_template: temperature - type: float - default: 0.3 - min: 0.0 - max: 2.0 - help: - zh_Hans: 用于控制随机性和多样性的程度。具体来说,temperature值控制了生成文本时对每个候选词的概率分布进行平滑的程度。较高的temperature值会降低概率分布的峰值,使得更多的低概率词被选择,生成结果更加多样化;而较低的temperature值则会增强概率分布的峰值,使得高概率词更容易被选择,生成结果更加确定。 - en_US: Used to control the degree of randomness and diversity. Specifically, the temperature value controls the degree to which the probability distribution of each candidate word is smoothed when generating text. A higher temperature value will reduce the peak value of the probability distribution, allowing more low-probability words to be selected, and the generated results will be more diverse; while a lower temperature value will enhance the peak value of the probability distribution, making it easier for high-probability words to be selected. , the generated results are more certain. - - name: max_tokens - use_template: max_tokens - type: int - default: 2000 - min: 1 - max: 2000 - help: - zh_Hans: 用于指定模型在生成内容时token的最大数量,它定义了生成的上限,但不保证每次都会生成到这个数量。 - en_US: It is used to specify the maximum number of tokens when the model generates content. It defines the upper limit of generation, but does not guarantee that this number will be generated every time. - - name: top_p - use_template: top_p - type: float - default: 0.8 - min: 0.1 - max: 0.9 - help: - zh_Hans: 生成过程中核采样方法概率阈值,例如,取值为0.8时,仅保留概率加起来大于等于0.8的最可能token的最小集合作为候选集。取值范围为(0,1.0),取值越大,生成的随机性越高;取值越低,生成的确定性越高。 - en_US: The probability threshold of the kernel sampling method during the generation process. For example, when the value is 0.8, only the smallest set of the most likely tokens with a sum of probabilities greater than or equal to 0.8 is retained as the candidate set. The value range is (0,1.0). The larger the value, the higher the randomness generated; the lower the value, the higher the certainty generated. - - name: top_k - type: int - min: 0 - max: 99 - label: - zh_Hans: 取样数量 - en_US: Top k - help: - zh_Hans: 生成时,采样候选集的大小。例如,取值为50时,仅将单次生成中得分最高的50个token组成随机采样的候选集。取值越大,生成的随机性越高;取值越小,生成的确定性越高。 - en_US: The size of the sample candidate set when generated. For example, when the value is 50, only the 50 highest-scoring tokens in a single generation form a randomly sampled candidate set. The larger the value, the higher the randomness generated; the smaller the value, the higher the certainty generated. - - name: seed - required: false - type: int - default: 1234 - label: - zh_Hans: 随机种子 - en_US: Random seed - help: - zh_Hans: 生成时使用的随机数种子,用户控制模型生成内容的随机性。支持无符号64位整数,默认值为 1234。在使用seed时,模型将尽可能生成相同或相似的结果,但目前不保证每次生成的结果完全相同。 - en_US: The random number seed used when generating, the user controls the randomness of the content generated by the model. Supports unsigned 64-bit integers, default value is 1234. When using seed, the model will try its best to generate the same or similar results, but there is currently no guarantee that the results will be exactly the same every time. - - name: repetition_penalty - required: false - type: float - default: 1.1 - label: - zh_Hans: 重复惩罚 - en_US: Repetition penalty - help: - zh_Hans: 用于控制模型生成时的重复度。提高repetition_penalty时可以降低模型生成的重复度。1.0表示不做惩罚。 - en_US: Used to control the repeatability when generating models. Increasing repetition_penalty can reduce the duplication of model generation. 1.0 means no punishment. - - name: response_format - use_template: response_format -pricing: - input: '0.004' - output: '0.012' - unit: '0.001' - currency: RMB -deprecated: true diff --git a/api/core/model_runtime/model_providers/tongyi/llm/qwen-plus-latest.yaml b/api/core/model_runtime/model_providers/tongyi/llm/qwen-plus-latest.yaml deleted file mode 100644 index 608f52c2964ea3..00000000000000 --- a/api/core/model_runtime/model_providers/tongyi/llm/qwen-plus-latest.yaml +++ /dev/null @@ -1,76 +0,0 @@ -# this model corresponds to qwen-plus-latest, for more details -# please refer to (https://help.aliyun.com/zh/model-studio/getting-started/models#bb0ffee88bwnk) -model: qwen-plus-latest -label: - en_US: qwen-plus-latest -model_type: llm -features: - - agent-thought -model_properties: - mode: chat - context_size: 131072 -parameter_rules: - - name: temperature - use_template: temperature - type: float - default: 0.3 - min: 0.0 - max: 2.0 - help: - zh_Hans: 用于控制随机性和多样性的程度。具体来说,temperature值控制了生成文本时对每个候选词的概率分布进行平滑的程度。较高的temperature值会降低概率分布的峰值,使得更多的低概率词被选择,生成结果更加多样化;而较低的temperature值则会增强概率分布的峰值,使得高概率词更容易被选择,生成结果更加确定。 - en_US: Used to control the degree of randomness and diversity. Specifically, the temperature value controls the degree to which the probability distribution of each candidate word is smoothed when generating text. A higher temperature value will reduce the peak value of the probability distribution, allowing more low-probability words to be selected, and the generated results will be more diverse; while a lower temperature value will enhance the peak value of the probability distribution, making it easier for high-probability words to be selected. , the generated results are more certain. - - name: max_tokens - use_template: max_tokens - type: int - default: 8192 - min: 1 - max: 8192 - help: - zh_Hans: 用于指定模型在生成内容时token的最大数量,它定义了生成的上限,但不保证每次都会生成到这个数量。 - en_US: It is used to specify the maximum number of tokens when the model generates content. It defines the upper limit of generation, but does not guarantee that this number will be generated every time. - - name: top_p - use_template: top_p - type: float - default: 0.8 - min: 0.1 - max: 0.9 - help: - zh_Hans: 生成过程中核采样方法概率阈值,例如,取值为0.8时,仅保留概率加起来大于等于0.8的最可能token的最小集合作为候选集。取值范围为(0,1.0),取值越大,生成的随机性越高;取值越低,生成的确定性越高。 - en_US: The probability threshold of the kernel sampling method during the generation process. For example, when the value is 0.8, only the smallest set of the most likely tokens with a sum of probabilities greater than or equal to 0.8 is retained as the candidate set. The value range is (0,1.0). The larger the value, the higher the randomness generated; the lower the value, the higher the certainty generated. - - name: top_k - type: int - min: 0 - max: 99 - label: - zh_Hans: 取样数量 - en_US: Top k - help: - zh_Hans: 生成时,采样候选集的大小。例如,取值为50时,仅将单次生成中得分最高的50个token组成随机采样的候选集。取值越大,生成的随机性越高;取值越小,生成的确定性越高。 - en_US: The size of the sample candidate set when generated. For example, when the value is 50, only the 50 highest-scoring tokens in a single generation form a randomly sampled candidate set. The larger the value, the higher the randomness generated; the smaller the value, the higher the certainty generated. - - name: seed - required: false - type: int - default: 1234 - label: - zh_Hans: 随机种子 - en_US: Random seed - help: - zh_Hans: 生成时使用的随机数种子,用户控制模型生成内容的随机性。支持无符号64位整数,默认值为 1234。在使用seed时,模型将尽可能生成相同或相似的结果,但目前不保证每次生成的结果完全相同。 - en_US: The random number seed used when generating, the user controls the randomness of the content generated by the model. Supports unsigned 64-bit integers, default value is 1234. When using seed, the model will try its best to generate the same or similar results, but there is currently no guarantee that the results will be exactly the same every time. - - name: repetition_penalty - required: false - type: float - default: 1.1 - label: - zh_Hans: 重复惩罚 - en_US: Repetition penalty - help: - zh_Hans: 用于控制模型生成时的重复度。提高repetition_penalty时可以降低模型生成的重复度。1.0表示不做惩罚。 - en_US: Used to control the repeatability when generating models. Increasing repetition_penalty can reduce the duplication of model generation. 1.0 means no punishment. - - name: response_format - use_template: response_format -pricing: - input: '0.0008' - output: '0.002' - unit: '0.001' - currency: RMB diff --git a/api/core/model_runtime/model_providers/tongyi/llm/qwen-plus.yaml b/api/core/model_runtime/model_providers/tongyi/llm/qwen-plus.yaml deleted file mode 100644 index 9089e57255bb70..00000000000000 --- a/api/core/model_runtime/model_providers/tongyi/llm/qwen-plus.yaml +++ /dev/null @@ -1,87 +0,0 @@ -# this model corresponds to qwen-plus, for more details -# please refer to (https://help.aliyun.com/zh/model-studio/getting-started/models#bb0ffee88bwnk) -model: qwen-plus -label: - en_US: qwen-plus -model_type: llm -features: - - multi-tool-call - - agent-thought - - stream-tool-call -model_properties: - mode: chat - context_size: 131072 -parameter_rules: - - name: temperature - use_template: temperature - type: float - default: 0.3 - min: 0.0 - max: 2.0 - help: - zh_Hans: 用于控制随机性和多样性的程度。具体来说,temperature值控制了生成文本时对每个候选词的概率分布进行平滑的程度。较高的temperature值会降低概率分布的峰值,使得更多的低概率词被选择,生成结果更加多样化;而较低的temperature值则会增强概率分布的峰值,使得高概率词更容易被选择,生成结果更加确定。 - en_US: Used to control the degree of randomness and diversity. Specifically, the temperature value controls the degree to which the probability distribution of each candidate word is smoothed when generating text. A higher temperature value will reduce the peak value of the probability distribution, allowing more low-probability words to be selected, and the generated results will be more diverse; while a lower temperature value will enhance the peak value of the probability distribution, making it easier for high-probability words to be selected. , the generated results are more certain. - - name: max_tokens - use_template: max_tokens - type: int - default: 8192 - min: 1 - max: 8192 - help: - zh_Hans: 用于指定模型在生成内容时token的最大数量,它定义了生成的上限,但不保证每次都会生成到这个数量。 - en_US: It is used to specify the maximum number of tokens when the model generates content. It defines the upper limit of generation, but does not guarantee that this number will be generated every time. - - name: top_p - use_template: top_p - type: float - default: 0.8 - min: 0.1 - max: 0.9 - help: - zh_Hans: 生成过程中核采样方法概率阈值,例如,取值为0.8时,仅保留概率加起来大于等于0.8的最可能token的最小集合作为候选集。取值范围为(0,1.0),取值越大,生成的随机性越高;取值越低,生成的确定性越高。 - en_US: The probability threshold of the kernel sampling method during the generation process. For example, when the value is 0.8, only the smallest set of the most likely tokens with a sum of probabilities greater than or equal to 0.8 is retained as the candidate set. The value range is (0,1.0). The larger the value, the higher the randomness generated; the lower the value, the higher the certainty generated. - - name: top_k - type: int - min: 0 - max: 99 - label: - zh_Hans: 取样数量 - en_US: Top k - help: - zh_Hans: 生成时,采样候选集的大小。例如,取值为50时,仅将单次生成中得分最高的50个token组成随机采样的候选集。取值越大,生成的随机性越高;取值越小,生成的确定性越高。 - en_US: The size of the sample candidate set when generated. For example, when the value is 50, only the 50 highest-scoring tokens in a single generation form a randomly sampled candidate set. The larger the value, the higher the randomness generated; the smaller the value, the higher the certainty generated. - - name: seed - required: false - type: int - default: 1234 - label: - zh_Hans: 随机种子 - en_US: Random seed - help: - zh_Hans: 生成时使用的随机数种子,用户控制模型生成内容的随机性。支持无符号64位整数,默认值为 1234。在使用seed时,模型将尽可能生成相同或相似的结果,但目前不保证每次生成的结果完全相同。 - en_US: The random number seed used when generating, the user controls the randomness of the content generated by the model. Supports unsigned 64-bit integers, default value is 1234. When using seed, the model will try its best to generate the same or similar results, but there is currently no guarantee that the results will be exactly the same every time. - - name: repetition_penalty - required: false - type: float - default: 1.1 - label: - zh_Hans: 重复惩罚 - en_US: Repetition penalty - help: - zh_Hans: 用于控制模型生成时的重复度。提高repetition_penalty时可以降低模型生成的重复度。1.0表示不做惩罚。 - en_US: Used to control the repeatability when generating models. Increasing repetition_penalty can reduce the duplication of model generation. 1.0 means no punishment. - - name: enable_search - type: boolean - default: false - label: - zh_Hans: 联网搜索 - en_US: Web Search - help: - zh_Hans: 模型内置了互联网搜索服务,该参数控制模型在生成文本时是否参考使用互联网搜索结果。启用互联网搜索,模型会将搜索结果作为文本生成过程中的参考信息,但模型会基于其内部逻辑“自行判断”是否使用互联网搜索结果。 - en_US: The model has a built-in Internet search service. This parameter controls whether the model refers to Internet search results when generating text. When Internet search is enabled, the model will use the search results as reference information in the text generation process, but the model will "judge" whether to use Internet search results based on its internal logic. - - name: response_format - use_template: response_format -pricing: - input: '0.0008' - output: '0.002' - unit: '0.001' - currency: RMB diff --git a/api/core/model_runtime/model_providers/tongyi/llm/qwen-turbo-0206.yaml b/api/core/model_runtime/model_providers/tongyi/llm/qwen-turbo-0206.yaml deleted file mode 100644 index 7ee0d44f2f2834..00000000000000 --- a/api/core/model_runtime/model_providers/tongyi/llm/qwen-turbo-0206.yaml +++ /dev/null @@ -1,77 +0,0 @@ -# this model corresponds to qwen-turbo-0206, for more details -# please refer to (https://help.aliyun.com/zh/model-studio/getting-started/models#ff492e2c10lub) - -model: qwen-turbo-0206 -label: - en_US: qwen-turbo-0206 -model_type: llm -features: - - agent-thought -model_properties: - mode: chat - context_size: 8000 -parameter_rules: - - name: temperature - use_template: temperature - type: float - default: 0.3 - min: 0.0 - max: 2.0 - help: - zh_Hans: 用于控制随机性和多样性的程度。具体来说,temperature值控制了生成文本时对每个候选词的概率分布进行平滑的程度。较高的temperature值会降低概率分布的峰值,使得更多的低概率词被选择,生成结果更加多样化;而较低的temperature值则会增强概率分布的峰值,使得高概率词更容易被选择,生成结果更加确定。 - en_US: Used to control the degree of randomness and diversity. Specifically, the temperature value controls the degree to which the probability distribution of each candidate word is smoothed when generating text. A higher temperature value will reduce the peak value of the probability distribution, allowing more low-probability words to be selected, and the generated results will be more diverse; while a lower temperature value will enhance the peak value of the probability distribution, making it easier for high-probability words to be selected. , the generated results are more certain. - - name: max_tokens - use_template: max_tokens - type: int - default: 2000 - min: 1 - max: 2000 - help: - zh_Hans: 用于指定模型在生成内容时token的最大数量,它定义了生成的上限,但不保证每次都会生成到这个数量。 - en_US: It is used to specify the maximum number of tokens when the model generates content. It defines the upper limit of generation, but does not guarantee that this number will be generated every time. - - name: top_p - use_template: top_p - type: float - default: 0.8 - min: 0.1 - max: 0.9 - help: - zh_Hans: 生成过程中核采样方法概率阈值,例如,取值为0.8时,仅保留概率加起来大于等于0.8的最可能token的最小集合作为候选集。取值范围为(0,1.0),取值越大,生成的随机性越高;取值越低,生成的确定性越高。 - en_US: The probability threshold of the kernel sampling method during the generation process. For example, when the value is 0.8, only the smallest set of the most likely tokens with a sum of probabilities greater than or equal to 0.8 is retained as the candidate set. The value range is (0,1.0). The larger the value, the higher the randomness generated; the lower the value, the higher the certainty generated. - - name: top_k - type: int - min: 0 - max: 99 - label: - zh_Hans: 取样数量 - en_US: Top k - help: - zh_Hans: 生成时,采样候选集的大小。例如,取值为50时,仅将单次生成中得分最高的50个token组成随机采样的候选集。取值越大,生成的随机性越高;取值越小,生成的确定性越高。 - en_US: The size of the sample candidate set when generated. For example, when the value is 50, only the 50 highest-scoring tokens in a single generation form a randomly sampled candidate set. The larger the value, the higher the randomness generated; the smaller the value, the higher the certainty generated. - - name: seed - required: false - type: int - default: 1234 - label: - zh_Hans: 随机种子 - en_US: Random seed - help: - zh_Hans: 生成时使用的随机数种子,用户控制模型生成内容的随机性。支持无符号64位整数,默认值为 1234。在使用seed时,模型将尽可能生成相同或相似的结果,但目前不保证每次生成的结果完全相同。 - en_US: The random number seed used when generating, the user controls the randomness of the content generated by the model. Supports unsigned 64-bit integers, default value is 1234. When using seed, the model will try its best to generate the same or similar results, but there is currently no guarantee that the results will be exactly the same every time. - - name: repetition_penalty - required: false - type: float - default: 1.1 - label: - zh_Hans: 重复惩罚 - en_US: Repetition penalty - help: - zh_Hans: 用于控制模型生成时的重复度。提高repetition_penalty时可以降低模型生成的重复度。1.0表示不做惩罚。 - en_US: Used to control the repeatability when generating models. Increasing repetition_penalty can reduce the duplication of model generation. 1.0 means no punishment. - - name: response_format - use_template: response_format -pricing: - input: '0.002' - output: '0.006' - unit: '0.001' - currency: RMB diff --git a/api/core/model_runtime/model_providers/tongyi/llm/qwen-turbo-0624.yaml b/api/core/model_runtime/model_providers/tongyi/llm/qwen-turbo-0624.yaml deleted file mode 100644 index 20a3f7eb6460f3..00000000000000 --- a/api/core/model_runtime/model_providers/tongyi/llm/qwen-turbo-0624.yaml +++ /dev/null @@ -1,76 +0,0 @@ -# this model corresponds to qwen-turbo-0624, for more details -# please refer to (https://help.aliyun.com/zh/model-studio/getting-started/models#ff492e2c10lub) -model: qwen-turbo-0624 -label: - en_US: qwen-turbo-0624 -model_type: llm -features: - - agent-thought -model_properties: - mode: chat - context_size: 8000 -parameter_rules: - - name: temperature - use_template: temperature - type: float - default: 0.3 - min: 0.0 - max: 2.0 - help: - zh_Hans: 用于控制随机性和多样性的程度。具体来说,temperature值控制了生成文本时对每个候选词的概率分布进行平滑的程度。较高的temperature值会降低概率分布的峰值,使得更多的低概率词被选择,生成结果更加多样化;而较低的temperature值则会增强概率分布的峰值,使得高概率词更容易被选择,生成结果更加确定。 - en_US: Used to control the degree of randomness and diversity. Specifically, the temperature value controls the degree to which the probability distribution of each candidate word is smoothed when generating text. A higher temperature value will reduce the peak value of the probability distribution, allowing more low-probability words to be selected, and the generated results will be more diverse; while a lower temperature value will enhance the peak value of the probability distribution, making it easier for high-probability words to be selected. , the generated results are more certain. - - name: max_tokens - use_template: max_tokens - type: int - default: 2000 - min: 1 - max: 2000 - help: - zh_Hans: 用于指定模型在生成内容时token的最大数量,它定义了生成的上限,但不保证每次都会生成到这个数量。 - en_US: It is used to specify the maximum number of tokens when the model generates content. It defines the upper limit of generation, but does not guarantee that this number will be generated every time. - - name: top_p - use_template: top_p - type: float - default: 0.8 - min: 0.1 - max: 0.9 - help: - zh_Hans: 生成过程中核采样方法概率阈值,例如,取值为0.8时,仅保留概率加起来大于等于0.8的最可能token的最小集合作为候选集。取值范围为(0,1.0),取值越大,生成的随机性越高;取值越低,生成的确定性越高。 - en_US: The probability threshold of the kernel sampling method during the generation process. For example, when the value is 0.8, only the smallest set of the most likely tokens with a sum of probabilities greater than or equal to 0.8 is retained as the candidate set. The value range is (0,1.0). The larger the value, the higher the randomness generated; the lower the value, the higher the certainty generated. - - name: top_k - type: int - min: 0 - max: 99 - label: - zh_Hans: 取样数量 - en_US: Top k - help: - zh_Hans: 生成时,采样候选集的大小。例如,取值为50时,仅将单次生成中得分最高的50个token组成随机采样的候选集。取值越大,生成的随机性越高;取值越小,生成的确定性越高。 - en_US: The size of the sample candidate set when generated. For example, when the value is 50, only the 50 highest-scoring tokens in a single generation form a randomly sampled candidate set. The larger the value, the higher the randomness generated; the smaller the value, the higher the certainty generated. - - name: seed - required: false - type: int - default: 1234 - label: - zh_Hans: 随机种子 - en_US: Random seed - help: - zh_Hans: 生成时使用的随机数种子,用户控制模型生成内容的随机性。支持无符号64位整数,默认值为 1234。在使用seed时,模型将尽可能生成相同或相似的结果,但目前不保证每次生成的结果完全相同。 - en_US: The random number seed used when generating, the user controls the randomness of the content generated by the model. Supports unsigned 64-bit integers, default value is 1234. When using seed, the model will try its best to generate the same or similar results, but there is currently no guarantee that the results will be exactly the same every time. - - name: repetition_penalty - required: false - type: float - default: 1.1 - label: - zh_Hans: 重复惩罚 - en_US: Repetition penalty - help: - zh_Hans: 用于控制模型生成时的重复度。提高repetition_penalty时可以降低模型生成的重复度。1.0表示不做惩罚。 - en_US: Used to control the repeatability when generating models. Increasing repetition_penalty can reduce the duplication of model generation. 1.0 means no punishment. - - name: response_format - use_template: response_format -pricing: - input: '0.002' - output: '0.006' - unit: '0.001' - currency: RMB diff --git a/api/core/model_runtime/model_providers/tongyi/llm/qwen-turbo-0919.yaml b/api/core/model_runtime/model_providers/tongyi/llm/qwen-turbo-0919.yaml deleted file mode 100644 index ba73dec3631fb5..00000000000000 --- a/api/core/model_runtime/model_providers/tongyi/llm/qwen-turbo-0919.yaml +++ /dev/null @@ -1,76 +0,0 @@ -# this model corresponds to qwen-turbo-0919, for more details -# please refer to (https://help.aliyun.com/zh/model-studio/getting-started/models#ff492e2c10lub) -model: qwen-turbo-0919 -label: - en_US: qwen-turbo-0919 -model_type: llm -features: - - agent-thought -model_properties: - mode: chat - context_size: 131072 -parameter_rules: - - name: temperature - use_template: temperature - type: float - default: 0.3 - min: 0.0 - max: 2.0 - help: - zh_Hans: 用于控制随机性和多样性的程度。具体来说,temperature值控制了生成文本时对每个候选词的概率分布进行平滑的程度。较高的temperature值会降低概率分布的峰值,使得更多的低概率词被选择,生成结果更加多样化;而较低的temperature值则会增强概率分布的峰值,使得高概率词更容易被选择,生成结果更加确定。 - en_US: Used to control the degree of randomness and diversity. Specifically, the temperature value controls the degree to which the probability distribution of each candidate word is smoothed when generating text. A higher temperature value will reduce the peak value of the probability distribution, allowing more low-probability words to be selected, and the generated results will be more diverse; while a lower temperature value will enhance the peak value of the probability distribution, making it easier for high-probability words to be selected. , the generated results are more certain. - - name: max_tokens - use_template: max_tokens - type: int - default: 8192 - min: 1 - max: 8192 - help: - zh_Hans: 用于指定模型在生成内容时token的最大数量,它定义了生成的上限,但不保证每次都会生成到这个数量。 - en_US: It is used to specify the maximum number of tokens when the model generates content. It defines the upper limit of generation, but does not guarantee that this number will be generated every time. - - name: top_p - use_template: top_p - type: float - default: 0.8 - min: 0.1 - max: 0.9 - help: - zh_Hans: 生成过程中核采样方法概率阈值,例如,取值为0.8时,仅保留概率加起来大于等于0.8的最可能token的最小集合作为候选集。取值范围为(0,1.0),取值越大,生成的随机性越高;取值越低,生成的确定性越高。 - en_US: The probability threshold of the kernel sampling method during the generation process. For example, when the value is 0.8, only the smallest set of the most likely tokens with a sum of probabilities greater than or equal to 0.8 is retained as the candidate set. The value range is (0,1.0). The larger the value, the higher the randomness generated; the lower the value, the higher the certainty generated. - - name: top_k - type: int - min: 0 - max: 99 - label: - zh_Hans: 取样数量 - en_US: Top k - help: - zh_Hans: 生成时,采样候选集的大小。例如,取值为50时,仅将单次生成中得分最高的50个token组成随机采样的候选集。取值越大,生成的随机性越高;取值越小,生成的确定性越高。 - en_US: The size of the sample candidate set when generated. For example, when the value is 50, only the 50 highest-scoring tokens in a single generation form a randomly sampled candidate set. The larger the value, the higher the randomness generated; the smaller the value, the higher the certainty generated. - - name: seed - required: false - type: int - default: 1234 - label: - zh_Hans: 随机种子 - en_US: Random seed - help: - zh_Hans: 生成时使用的随机数种子,用户控制模型生成内容的随机性。支持无符号64位整数,默认值为 1234。在使用seed时,模型将尽可能生成相同或相似的结果,但目前不保证每次生成的结果完全相同。 - en_US: The random number seed used when generating, the user controls the randomness of the content generated by the model. Supports unsigned 64-bit integers, default value is 1234. When using seed, the model will try its best to generate the same or similar results, but there is currently no guarantee that the results will be exactly the same every time. - - name: repetition_penalty - required: false - type: float - default: 1.1 - label: - zh_Hans: 重复惩罚 - en_US: Repetition penalty - help: - zh_Hans: 用于控制模型生成时的重复度。提高repetition_penalty时可以降低模型生成的重复度。1.0表示不做惩罚。 - en_US: Used to control the repeatability when generating models. Increasing repetition_penalty can reduce the duplication of model generation. 1.0 means no punishment. - - name: response_format - use_template: response_format -pricing: - input: '0.0003' - output: '0.0006' - unit: '0.001' - currency: RMB diff --git a/api/core/model_runtime/model_providers/tongyi/llm/qwen-turbo-chat.yaml b/api/core/model_runtime/model_providers/tongyi/llm/qwen-turbo-chat.yaml deleted file mode 100644 index d785b7fe857878..00000000000000 --- a/api/core/model_runtime/model_providers/tongyi/llm/qwen-turbo-chat.yaml +++ /dev/null @@ -1,79 +0,0 @@ -# this model corresponds to qwen-turbo, for more details -# please refer to (https://help.aliyun.com/zh/model-studio/getting-started/models#ff492e2c10lub) -model: qwen-turbo-chat -label: - en_US: qwen-turbo-chat -model_type: llm -features: - - multi-tool-call - - agent-thought - - stream-tool-call -model_properties: - mode: chat - context_size: 8192 -parameter_rules: - - name: temperature - use_template: temperature - type: float - default: 0.3 - min: 0.0 - max: 2.0 - help: - zh_Hans: 用于控制随机性和多样性的程度。具体来说,temperature值控制了生成文本时对每个候选词的概率分布进行平滑的程度。较高的temperature值会降低概率分布的峰值,使得更多的低概率词被选择,生成结果更加多样化;而较低的temperature值则会增强概率分布的峰值,使得高概率词更容易被选择,生成结果更加确定。 - en_US: Used to control the degree of randomness and diversity. Specifically, the temperature value controls the degree to which the probability distribution of each candidate word is smoothed when generating text. A higher temperature value will reduce the peak value of the probability distribution, allowing more low-probability words to be selected, and the generated results will be more diverse; while a lower temperature value will enhance the peak value of the probability distribution, making it easier for high-probability words to be selected. , the generated results are more certain. - - name: max_tokens - use_template: max_tokens - type: int - default: 1500 - min: 1 - max: 1500 - help: - zh_Hans: 用于指定模型在生成内容时token的最大数量,它定义了生成的上限,但不保证每次都会生成到这个数量。 - en_US: It is used to specify the maximum number of tokens when the model generates content. It defines the upper limit of generation, but does not guarantee that this number will be generated every time. - - name: top_p - use_template: top_p - type: float - default: 0.8 - min: 0.1 - max: 0.9 - help: - zh_Hans: 生成过程中核采样方法概率阈值,例如,取值为0.8时,仅保留概率加起来大于等于0.8的最可能token的最小集合作为候选集。取值范围为(0,1.0),取值越大,生成的随机性越高;取值越低,生成的确定性越高。 - en_US: The probability threshold of the kernel sampling method during the generation process. For example, when the value is 0.8, only the smallest set of the most likely tokens with a sum of probabilities greater than or equal to 0.8 is retained as the candidate set. The value range is (0,1.0). The larger the value, the higher the randomness generated; the lower the value, the higher the certainty generated. - - name: top_k - type: int - min: 0 - max: 99 - label: - zh_Hans: 取样数量 - en_US: Top k - help: - zh_Hans: 生成时,采样候选集的大小。例如,取值为50时,仅将单次生成中得分最高的50个token组成随机采样的候选集。取值越大,生成的随机性越高;取值越小,生成的确定性越高。 - en_US: The size of the sample candidate set when generated. For example, when the value is 50, only the 50 highest-scoring tokens in a single generation form a randomly sampled candidate set. The larger the value, the higher the randomness generated; the smaller the value, the higher the certainty generated. - - name: seed - required: false - type: int - default: 1234 - label: - zh_Hans: 随机种子 - en_US: Random seed - help: - zh_Hans: 生成时使用的随机数种子,用户控制模型生成内容的随机性。支持无符号64位整数,默认值为 1234。在使用seed时,模型将尽可能生成相同或相似的结果,但目前不保证每次生成的结果完全相同。 - en_US: The random number seed used when generating, the user controls the randomness of the content generated by the model. Supports unsigned 64-bit integers, default value is 1234. When using seed, the model will try its best to generate the same or similar results, but there is currently no guarantee that the results will be exactly the same every time. - - name: repetition_penalty - required: false - type: float - default: 1.1 - label: - zh_Hans: 重复惩罚 - en_US: Repetition penalty - help: - zh_Hans: 用于控制模型生成时的重复度。提高repetition_penalty时可以降低模型生成的重复度。1.0表示不做惩罚。 - en_US: Used to control the repeatability when generating models. Increasing repetition_penalty can reduce the duplication of model generation. 1.0 means no punishment. - - name: response_format - use_template: response_format -pricing: - input: '0.002' - output: '0.006' - unit: '0.001' - currency: RMB -deprecated: true diff --git a/api/core/model_runtime/model_providers/tongyi/llm/qwen-turbo-latest.yaml b/api/core/model_runtime/model_providers/tongyi/llm/qwen-turbo-latest.yaml deleted file mode 100644 index fe38a4283c2d1e..00000000000000 --- a/api/core/model_runtime/model_providers/tongyi/llm/qwen-turbo-latest.yaml +++ /dev/null @@ -1,76 +0,0 @@ -# this model corresponds to qwen-turbo-latest, for more details -# please refer to (https://help.aliyun.com/zh/model-studio/getting-started/models#ff492e2c10lub) -model: qwen-turbo-latest -label: - en_US: qwen-turbo-latest -model_type: llm -features: - - agent-thought -model_properties: - mode: chat - context_size: 131072 -parameter_rules: - - name: temperature - use_template: temperature - type: float - default: 0.3 - min: 0.0 - max: 2.0 - help: - zh_Hans: 用于控制随机性和多样性的程度。具体来说,temperature值控制了生成文本时对每个候选词的概率分布进行平滑的程度。较高的temperature值会降低概率分布的峰值,使得更多的低概率词被选择,生成结果更加多样化;而较低的temperature值则会增强概率分布的峰值,使得高概率词更容易被选择,生成结果更加确定。 - en_US: Used to control the degree of randomness and diversity. Specifically, the temperature value controls the degree to which the probability distribution of each candidate word is smoothed when generating text. A higher temperature value will reduce the peak value of the probability distribution, allowing more low-probability words to be selected, and the generated results will be more diverse; while a lower temperature value will enhance the peak value of the probability distribution, making it easier for high-probability words to be selected. , the generated results are more certain. - - name: max_tokens - use_template: max_tokens - type: int - default: 8192 - min: 1 - max: 8192 - help: - zh_Hans: 用于指定模型在生成内容时token的最大数量,它定义了生成的上限,但不保证每次都会生成到这个数量。 - en_US: It is used to specify the maximum number of tokens when the model generates content. It defines the upper limit of generation, but does not guarantee that this number will be generated every time. - - name: top_p - use_template: top_p - type: float - default: 0.8 - min: 0.1 - max: 0.9 - help: - zh_Hans: 生成过程中核采样方法概率阈值,例如,取值为0.8时,仅保留概率加起来大于等于0.8的最可能token的最小集合作为候选集。取值范围为(0,1.0),取值越大,生成的随机性越高;取值越低,生成的确定性越高。 - en_US: The probability threshold of the kernel sampling method during the generation process. For example, when the value is 0.8, only the smallest set of the most likely tokens with a sum of probabilities greater than or equal to 0.8 is retained as the candidate set. The value range is (0,1.0). The larger the value, the higher the randomness generated; the lower the value, the higher the certainty generated. - - name: top_k - type: int - min: 0 - max: 99 - label: - zh_Hans: 取样数量 - en_US: Top k - help: - zh_Hans: 生成时,采样候选集的大小。例如,取值为50时,仅将单次生成中得分最高的50个token组成随机采样的候选集。取值越大,生成的随机性越高;取值越小,生成的确定性越高。 - en_US: The size of the sample candidate set when generated. For example, when the value is 50, only the 50 highest-scoring tokens in a single generation form a randomly sampled candidate set. The larger the value, the higher the randomness generated; the smaller the value, the higher the certainty generated. - - name: seed - required: false - type: int - default: 1234 - label: - zh_Hans: 随机种子 - en_US: Random seed - help: - zh_Hans: 生成时使用的随机数种子,用户控制模型生成内容的随机性。支持无符号64位整数,默认值为 1234。在使用seed时,模型将尽可能生成相同或相似的结果,但目前不保证每次生成的结果完全相同。 - en_US: The random number seed used when generating, the user controls the randomness of the content generated by the model. Supports unsigned 64-bit integers, default value is 1234. When using seed, the model will try its best to generate the same or similar results, but there is currently no guarantee that the results will be exactly the same every time. - - name: repetition_penalty - required: false - type: float - default: 1.1 - label: - zh_Hans: 重复惩罚 - en_US: Repetition penalty - help: - zh_Hans: 用于控制模型生成时的重复度。提高repetition_penalty时可以降低模型生成的重复度。1.0表示不做惩罚。 - en_US: Used to control the repeatability when generating models. Increasing repetition_penalty can reduce the duplication of model generation. 1.0 means no punishment. - - name: response_format - use_template: response_format -pricing: - input: '0.0006' - output: '0.0003' - unit: '0.001' - currency: RMB diff --git a/api/core/model_runtime/model_providers/tongyi/llm/qwen-turbo.yaml b/api/core/model_runtime/model_providers/tongyi/llm/qwen-turbo.yaml deleted file mode 100644 index 215c9ec5fc96aa..00000000000000 --- a/api/core/model_runtime/model_providers/tongyi/llm/qwen-turbo.yaml +++ /dev/null @@ -1,87 +0,0 @@ -# this model corresponds to qwen-turbo, for more details -# please refer to (https://help.aliyun.com/zh/model-studio/getting-started/models#ff492e2c10lub) -model: qwen-turbo -label: - en_US: qwen-turbo -model_type: llm -features: - - multi-tool-call - - agent-thought - - stream-tool-call -model_properties: - mode: chat - context_size: 8000 -parameter_rules: - - name: temperature - use_template: temperature - type: float - default: 0.3 - min: 0.0 - max: 2.0 - help: - zh_Hans: 用于控制随机性和多样性的程度。具体来说,temperature值控制了生成文本时对每个候选词的概率分布进行平滑的程度。较高的temperature值会降低概率分布的峰值,使得更多的低概率词被选择,生成结果更加多样化;而较低的temperature值则会增强概率分布的峰值,使得高概率词更容易被选择,生成结果更加确定。 - en_US: Used to control the degree of randomness and diversity. Specifically, the temperature value controls the degree to which the probability distribution of each candidate word is smoothed when generating text. A higher temperature value will reduce the peak value of the probability distribution, allowing more low-probability words to be selected, and the generated results will be more diverse; while a lower temperature value will enhance the peak value of the probability distribution, making it easier for high-probability words to be selected. , the generated results are more certain. - - name: max_tokens - use_template: max_tokens - type: int - default: 2000 - min: 1 - max: 2000 - help: - zh_Hans: 用于指定模型在生成内容时token的最大数量,它定义了生成的上限,但不保证每次都会生成到这个数量。 - en_US: It is used to specify the maximum number of tokens when the model generates content. It defines the upper limit of generation, but does not guarantee that this number will be generated every time. - - name: top_p - use_template: top_p - type: float - default: 0.8 - min: 0.1 - max: 0.9 - help: - zh_Hans: 生成过程中核采样方法概率阈值,例如,取值为0.8时,仅保留概率加起来大于等于0.8的最可能token的最小集合作为候选集。取值范围为(0,1.0),取值越大,生成的随机性越高;取值越低,生成的确定性越高。 - en_US: The probability threshold of the kernel sampling method during the generation process. For example, when the value is 0.8, only the smallest set of the most likely tokens with a sum of probabilities greater than or equal to 0.8 is retained as the candidate set. The value range is (0,1.0). The larger the value, the higher the randomness generated; the lower the value, the higher the certainty generated. - - name: top_k - type: int - min: 0 - max: 99 - label: - zh_Hans: 取样数量 - en_US: Top k - help: - zh_Hans: 生成时,采样候选集的大小。例如,取值为50时,仅将单次生成中得分最高的50个token组成随机采样的候选集。取值越大,生成的随机性越高;取值越小,生成的确定性越高。 - en_US: The size of the sample candidate set when generated. For example, when the value is 50, only the 50 highest-scoring tokens in a single generation form a randomly sampled candidate set. The larger the value, the higher the randomness generated; the smaller the value, the higher the certainty generated. - - name: seed - required: false - type: int - default: 1234 - label: - zh_Hans: 随机种子 - en_US: Random seed - help: - zh_Hans: 生成时使用的随机数种子,用户控制模型生成内容的随机性。支持无符号64位整数,默认值为 1234。在使用seed时,模型将尽可能生成相同或相似的结果,但目前不保证每次生成的结果完全相同。 - en_US: The random number seed used when generating, the user controls the randomness of the content generated by the model. Supports unsigned 64-bit integers, default value is 1234. When using seed, the model will try its best to generate the same or similar results, but there is currently no guarantee that the results will be exactly the same every time. - - name: repetition_penalty - required: false - type: float - default: 1.1 - label: - zh_Hans: 重复惩罚 - en_US: Repetition penalty - help: - zh_Hans: 用于控制模型生成时的重复度。提高repetition_penalty时可以降低模型生成的重复度。1.0表示不做惩罚。 - en_US: Used to control the repeatability when generating models. Increasing repetition_penalty can reduce the duplication of model generation. 1.0 means no punishment. - - name: enable_search - type: boolean - default: false - label: - zh_Hans: 联网搜索 - en_US: Web Search - help: - zh_Hans: 模型内置了互联网搜索服务,该参数控制模型在生成文本时是否参考使用互联网搜索结果。启用互联网搜索,模型会将搜索结果作为文本生成过程中的参考信息,但模型会基于其内部逻辑“自行判断”是否使用互联网搜索结果。 - en_US: The model has a built-in Internet search service. This parameter controls whether the model refers to Internet search results when generating text. When Internet search is enabled, the model will use the search results as reference information in the text generation process, but the model will "judge" whether to use Internet search results based on its internal logic. - - name: response_format - use_template: response_format -pricing: - input: '0.0006' - output: '0.0003' - unit: '0.001' - currency: RMB diff --git a/api/core/model_runtime/model_providers/tongyi/llm/qwen-vl-max-0201.yaml b/api/core/model_runtime/model_providers/tongyi/llm/qwen-vl-max-0201.yaml deleted file mode 100644 index d80168ffc3fb55..00000000000000 --- a/api/core/model_runtime/model_providers/tongyi/llm/qwen-vl-max-0201.yaml +++ /dev/null @@ -1,49 +0,0 @@ -# for more details, please refer to https://help.aliyun.com/zh/model-studio/getting-started/models -model: qwen-vl-max-0201 -label: - en_US: qwen-vl-max-0201 -model_type: llm -features: - - vision - - agent-thought -model_properties: - mode: chat - context_size: 8192 -parameter_rules: - - name: top_p - use_template: top_p - type: float - default: 0.8 - min: 0.1 - max: 0.9 - help: - zh_Hans: 生成过程中核采样方法概率阈值,例如,取值为0.8时,仅保留概率加起来大于等于0.8的最可能token的最小集合作为候选集。取值范围为(0,1.0),取值越大,生成的随机性越高;取值越低,生成的确定性越高。 - en_US: The probability threshold of the kernel sampling method during the generation process. For example, when the value is 0.8, only the smallest set of the most likely tokens with a sum of probabilities greater than or equal to 0.8 is retained as the candidate set. The value range is (0,1.0). The larger the value, the higher the randomness generated; the lower the value, the higher the certainty generated. - - name: top_k - type: int - min: 0 - max: 99 - label: - zh_Hans: 取样数量 - en_US: Top k - help: - zh_Hans: 生成时,采样候选集的大小。例如,取值为50时,仅将单次生成中得分最高的50个token组成随机采样的候选集。取值越大,生成的随机性越高;取值越小,生成的确定性越高。 - en_US: The size of the sample candidate set when generated. For example, when the value is 50, only the 50 highest-scoring tokens in a single generation form a randomly sampled candidate set. The larger the value, the higher the randomness generated; the smaller the value, the higher the certainty generated. - - name: seed - required: false - type: int - default: 1234 - label: - zh_Hans: 随机种子 - en_US: Random seed - help: - zh_Hans: 生成时使用的随机数种子,用户控制模型生成内容的随机性。支持无符号64位整数,默认值为 1234。在使用seed时,模型将尽可能生成相同或相似的结果,但目前不保证每次生成的结果完全相同。 - en_US: The random number seed used when generating, the user controls the randomness of the content generated by the model. Supports unsigned 64-bit integers, default value is 1234. When using seed, the model will try its best to generate the same or similar results, but there is currently no guarantee that the results will be exactly the same every time. - - name: response_format - use_template: response_format -pricing: - input: '0.02' - output: '0.02' - unit: '0.001' - currency: RMB -deprecated: true diff --git a/api/core/model_runtime/model_providers/tongyi/llm/qwen-vl-max-0809.yaml b/api/core/model_runtime/model_providers/tongyi/llm/qwen-vl-max-0809.yaml deleted file mode 100644 index 50e10226a5f5c4..00000000000000 --- a/api/core/model_runtime/model_providers/tongyi/llm/qwen-vl-max-0809.yaml +++ /dev/null @@ -1,79 +0,0 @@ -# for more details, please refer to https://help.aliyun.com/zh/model-studio/getting-started/models -model: qwen-vl-max-0809 -label: - en_US: qwen-vl-max-0809 -model_type: llm -features: - - vision - - agent-thought -model_properties: - mode: chat - context_size: 32000 -parameter_rules: - - name: temperature - use_template: temperature - type: float - default: 0.3 - min: 0.0 - max: 2.0 - help: - zh_Hans: 用于控制随机性和多样性的程度。具体来说,temperature值控制了生成文本时对每个候选词的概率分布进行平滑的程度。较高的temperature值会降低概率分布的峰值,使得更多的低概率词被选择,生成结果更加多样化;而较低的temperature值则会增强概率分布的峰值,使得高概率词更容易被选择,生成结果更加确定。 - en_US: Used to control the degree of randomness and diversity. Specifically, the temperature value controls the degree to which the probability distribution of each candidate word is smoothed when generating text. A higher temperature value will reduce the peak value of the probability distribution, allowing more low-probability words to be selected, and the generated results will be more diverse; while a lower temperature value will enhance the peak value of the probability distribution, making it easier for high-probability words to be selected. , the generated results are more certain. - - name: top_p - use_template: top_p - type: float - default: 0.8 - min: 0.1 - max: 0.9 - help: - zh_Hans: 生成过程中核采样方法概率阈值,例如,取值为0.8时,仅保留概率加起来大于等于0.8的最可能token的最小集合作为候选集。取值范围为(0,1.0),取值越大,生成的随机性越高;取值越低,生成的确定性越高。 - en_US: The probability threshold of the kernel sampling method during the generation process. For example, when the value is 0.8, only the smallest set of the most likely tokens with a sum of probabilities greater than or equal to 0.8 is retained as the candidate set. The value range is (0,1.0). The larger the value, the higher the randomness generated; the lower the value, the higher the certainty generated. - - name: top_k - type: int - min: 0 - max: 99 - label: - zh_Hans: 取样数量 - en_US: Top k - help: - zh_Hans: 生成时,采样候选集的大小。例如,取值为50时,仅将单次生成中得分最高的50个token组成随机采样的候选集。取值越大,生成的随机性越高;取值越小,生成的确定性越高。 - en_US: The size of the sample candidate set when generated. For example, when the value is 50, only the 50 highest-scoring tokens in a single generation form a randomly sampled candidate set. The larger the value, the higher the randomness generated; the smaller the value, the higher the certainty generated. - - name: max_tokens - required: false - use_template: max_tokens - type: int - default: 2000 - min: 1 - max: 2000 - help: - zh_Hans: 用于指定模型在生成内容时token的最大数量,它定义了生成的上限,但不保证每次都会生成到这个数量。 - en_US: It is used to specify the maximum number of tokens when the model generates content. It defines the upper limit of generation, but does not guarantee that this number will be generated every time. - - name: seed - required: false - type: int - default: 1234 - label: - zh_Hans: 随机种子 - en_US: Random seed - help: - zh_Hans: 生成时使用的随机数种子,用户控制模型生成内容的随机性。支持无符号64位整数,默认值为 1234。在使用seed时,模型将尽可能生成相同或相似的结果,但目前不保证每次生成的结果完全相同。 - en_US: The random number seed used when generating, the user controls the randomness of the content generated by the model. Supports unsigned 64-bit integers, default value is 1234. When using seed, the model will try its best to generate the same or similar results, but there is currently no guarantee that the results will be exactly the same every time. - - name: response_format - use_template: response_format - - name: repetition_penalty - required: false - type: float - default: 1.1 - label: - zh_Hans: 重复惩罚 - en_US: Repetition penalty - help: - zh_Hans: 用于控制模型生成时的重复度。提高repetition_penalty时可以降低模型生成的重复度。1.0表示不做惩罚。 - en_US: Used to control the repeatability when generating models. Increasing repetition_penalty can reduce the duplication of model generation. 1.0 means no punishment. - - name: response_format - use_template: response_format -pricing: - input: '0.02' - output: '0.02' - unit: '0.001' - currency: RMB diff --git a/api/core/model_runtime/model_providers/tongyi/llm/qwen-vl-max.yaml b/api/core/model_runtime/model_providers/tongyi/llm/qwen-vl-max.yaml deleted file mode 100644 index 21b127f56c47d9..00000000000000 --- a/api/core/model_runtime/model_providers/tongyi/llm/qwen-vl-max.yaml +++ /dev/null @@ -1,79 +0,0 @@ -# for more details, please refer to https://help.aliyun.com/zh/model-studio/getting-started/models -model: qwen-vl-max -label: - en_US: qwen-vl-max -model_type: llm -features: - - vision - - agent-thought -model_properties: - mode: chat - context_size: 32000 -parameter_rules: - - name: temperature - use_template: temperature - type: float - default: 0.3 - min: 0.0 - max: 2.0 - help: - zh_Hans: 用于控制随机性和多样性的程度。具体来说,temperature值控制了生成文本时对每个候选词的概率分布进行平滑的程度。较高的temperature值会降低概率分布的峰值,使得更多的低概率词被选择,生成结果更加多样化;而较低的temperature值则会增强概率分布的峰值,使得高概率词更容易被选择,生成结果更加确定。 - en_US: Used to control the degree of randomness and diversity. Specifically, the temperature value controls the degree to which the probability distribution of each candidate word is smoothed when generating text. A higher temperature value will reduce the peak value of the probability distribution, allowing more low-probability words to be selected, and the generated results will be more diverse; while a lower temperature value will enhance the peak value of the probability distribution, making it easier for high-probability words to be selected. , the generated results are more certain. - - name: top_p - use_template: top_p - type: float - default: 0.8 - min: 0.1 - max: 0.9 - help: - zh_Hans: 生成过程中核采样方法概率阈值,例如,取值为0.8时,仅保留概率加起来大于等于0.8的最可能token的最小集合作为候选集。取值范围为(0,1.0),取值越大,生成的随机性越高;取值越低,生成的确定性越高。 - en_US: The probability threshold of the kernel sampling method during the generation process. For example, when the value is 0.8, only the smallest set of the most likely tokens with a sum of probabilities greater than or equal to 0.8 is retained as the candidate set. The value range is (0,1.0). The larger the value, the higher the randomness generated; the lower the value, the higher the certainty generated. - - name: top_k - type: int - min: 0 - max: 99 - label: - zh_Hans: 取样数量 - en_US: Top k - help: - zh_Hans: 生成时,采样候选集的大小。例如,取值为50时,仅将单次生成中得分最高的50个token组成随机采样的候选集。取值越大,生成的随机性越高;取值越小,生成的确定性越高。 - en_US: The size of the sample candidate set when generated. For example, when the value is 50, only the 50 highest-scoring tokens in a single generation form a randomly sampled candidate set. The larger the value, the higher the randomness generated; the smaller the value, the higher the certainty generated. - - name: max_tokens - required: false - use_template: max_tokens - type: int - default: 2000 - min: 1 - max: 2000 - help: - zh_Hans: 用于指定模型在生成内容时token的最大数量,它定义了生成的上限,但不保证每次都会生成到这个数量。 - en_US: It is used to specify the maximum number of tokens when the model generates content. It defines the upper limit of generation, but does not guarantee that this number will be generated every time. - - name: seed - required: false - type: int - default: 1234 - label: - zh_Hans: 随机种子 - en_US: Random seed - help: - zh_Hans: 生成时使用的随机数种子,用户控制模型生成内容的随机性。支持无符号64位整数,默认值为 1234。在使用seed时,模型将尽可能生成相同或相似的结果,但目前不保证每次生成的结果完全相同。 - en_US: The random number seed used when generating, the user controls the randomness of the content generated by the model. Supports unsigned 64-bit integers, default value is 1234. When using seed, the model will try its best to generate the same or similar results, but there is currently no guarantee that the results will be exactly the same every time. - - name: response_format - use_template: response_format - - name: repetition_penalty - required: false - type: float - default: 1.1 - label: - zh_Hans: 重复惩罚 - en_US: Repetition penalty - help: - zh_Hans: 用于控制模型生成时的重复度。提高repetition_penalty时可以降低模型生成的重复度。1.0表示不做惩罚。 - en_US: Used to control the repeatability when generating models. Increasing repetition_penalty can reduce the duplication of model generation. 1.0 means no punishment. - - name: response_format - use_template: response_format -pricing: - input: '0.02' - output: '0.02' - unit: '0.001' - currency: RMB diff --git a/api/core/model_runtime/model_providers/tongyi/llm/qwen-vl-plus-0201.yaml b/api/core/model_runtime/model_providers/tongyi/llm/qwen-vl-plus-0201.yaml deleted file mode 100644 index 03cb039d15a7dd..00000000000000 --- a/api/core/model_runtime/model_providers/tongyi/llm/qwen-vl-plus-0201.yaml +++ /dev/null @@ -1,79 +0,0 @@ -# for more details, please refer to https://help.aliyun.com/zh/model-studio/getting-started/models -model: qwen-vl-plus-0201 -label: - en_US: qwen-vl-plus-0201 -model_type: llm -features: - - vision - - agent-thought -model_properties: - mode: chat - context_size: 8000 -parameter_rules: - - name: temperature - use_template: temperature - type: float - default: 0.3 - min: 0.0 - max: 2.0 - help: - zh_Hans: 用于控制随机性和多样性的程度。具体来说,temperature值控制了生成文本时对每个候选词的概率分布进行平滑的程度。较高的temperature值会降低概率分布的峰值,使得更多的低概率词被选择,生成结果更加多样化;而较低的temperature值则会增强概率分布的峰值,使得高概率词更容易被选择,生成结果更加确定。 - en_US: Used to control the degree of randomness and diversity. Specifically, the temperature value controls the degree to which the probability distribution of each candidate word is smoothed when generating text. A higher temperature value will reduce the peak value of the probability distribution, allowing more low-probability words to be selected, and the generated results will be more diverse; while a lower temperature value will enhance the peak value of the probability distribution, making it easier for high-probability words to be selected. , the generated results are more certain. - - name: top_p - use_template: top_p - type: float - default: 0.8 - min: 0.1 - max: 0.9 - help: - zh_Hans: 生成过程中核采样方法概率阈值,例如,取值为0.8时,仅保留概率加起来大于等于0.8的最可能token的最小集合作为候选集。取值范围为(0,1.0),取值越大,生成的随机性越高;取值越低,生成的确定性越高。 - en_US: The probability threshold of the kernel sampling method during the generation process. For example, when the value is 0.8, only the smallest set of the most likely tokens with a sum of probabilities greater than or equal to 0.8 is retained as the candidate set. The value range is (0,1.0). The larger the value, the higher the randomness generated; the lower the value, the higher the certainty generated. - - name: top_k - type: int - min: 0 - max: 99 - label: - zh_Hans: 取样数量 - en_US: Top k - help: - zh_Hans: 生成时,采样候选集的大小。例如,取值为50时,仅将单次生成中得分最高的50个token组成随机采样的候选集。取值越大,生成的随机性越高;取值越小,生成的确定性越高。 - en_US: The size of the sample candidate set when generated. For example, when the value is 50, only the 50 highest-scoring tokens in a single generation form a randomly sampled candidate set. The larger the value, the higher the randomness generated; the smaller the value, the higher the certainty generated. - - name: max_tokens - required: false - use_template: max_tokens - type: int - default: 2000 - min: 1 - max: 2000 - help: - zh_Hans: 用于指定模型在生成内容时token的最大数量,它定义了生成的上限,但不保证每次都会生成到这个数量。 - en_US: It is used to specify the maximum number of tokens when the model generates content. It defines the upper limit of generation, but does not guarantee that this number will be generated every time. - - name: seed - required: false - type: int - default: 1234 - label: - zh_Hans: 随机种子 - en_US: Random seed - help: - zh_Hans: 生成时使用的随机数种子,用户控制模型生成内容的随机性。支持无符号64位整数,默认值为 1234。在使用seed时,模型将尽可能生成相同或相似的结果,但目前不保证每次生成的结果完全相同。 - en_US: The random number seed used when generating, the user controls the randomness of the content generated by the model. Supports unsigned 64-bit integers, default value is 1234. When using seed, the model will try its best to generate the same or similar results, but there is currently no guarantee that the results will be exactly the same every time. - - name: response_format - use_template: response_format - - name: repetition_penalty - required: false - type: float - default: 1.1 - label: - zh_Hans: 重复惩罚 - en_US: Repetition penalty - help: - zh_Hans: 用于控制模型生成时的重复度。提高repetition_penalty时可以降低模型生成的重复度。1.0表示不做惩罚。 - en_US: Used to control the repeatability when generating models. Increasing repetition_penalty can reduce the duplication of model generation. 1.0 means no punishment. - - name: response_format - use_template: response_format -pricing: - input: '0.02' - output: '0.02' - unit: '0.001' - currency: RMB diff --git a/api/core/model_runtime/model_providers/tongyi/llm/qwen-vl-plus-0809.yaml b/api/core/model_runtime/model_providers/tongyi/llm/qwen-vl-plus-0809.yaml deleted file mode 100644 index 67b2b2ebddc616..00000000000000 --- a/api/core/model_runtime/model_providers/tongyi/llm/qwen-vl-plus-0809.yaml +++ /dev/null @@ -1,79 +0,0 @@ -# for more details, please refer to https://help.aliyun.com/zh/model-studio/getting-started/models -model: qwen-vl-plus-0809 -label: - en_US: qwen-vl-plus-0809 -model_type: llm -features: - - vision - - agent-thought -model_properties: - mode: chat - context_size: 32768 -parameter_rules: - - name: temperature - use_template: temperature - type: float - default: 0.3 - min: 0.0 - max: 2.0 - help: - zh_Hans: 用于控制随机性和多样性的程度。具体来说,temperature值控制了生成文本时对每个候选词的概率分布进行平滑的程度。较高的temperature值会降低概率分布的峰值,使得更多的低概率词被选择,生成结果更加多样化;而较低的temperature值则会增强概率分布的峰值,使得高概率词更容易被选择,生成结果更加确定。 - en_US: Used to control the degree of randomness and diversity. Specifically, the temperature value controls the degree to which the probability distribution of each candidate word is smoothed when generating text. A higher temperature value will reduce the peak value of the probability distribution, allowing more low-probability words to be selected, and the generated results will be more diverse; while a lower temperature value will enhance the peak value of the probability distribution, making it easier for high-probability words to be selected. , the generated results are more certain. - - name: top_p - use_template: top_p - type: float - default: 0.8 - min: 0.1 - max: 0.9 - help: - zh_Hans: 生成过程中核采样方法概率阈值,例如,取值为0.8时,仅保留概率加起来大于等于0.8的最可能token的最小集合作为候选集。取值范围为(0,1.0),取值越大,生成的随机性越高;取值越低,生成的确定性越高。 - en_US: The probability threshold of the kernel sampling method during the generation process. For example, when the value is 0.8, only the smallest set of the most likely tokens with a sum of probabilities greater than or equal to 0.8 is retained as the candidate set. The value range is (0,1.0). The larger the value, the higher the randomness generated; the lower the value, the higher the certainty generated. - - name: top_k - type: int - min: 0 - max: 99 - label: - zh_Hans: 取样数量 - en_US: Top k - help: - zh_Hans: 生成时,采样候选集的大小。例如,取值为50时,仅将单次生成中得分最高的50个token组成随机采样的候选集。取值越大,生成的随机性越高;取值越小,生成的确定性越高。 - en_US: The size of the sample candidate set when generated. For example, when the value is 50, only the 50 highest-scoring tokens in a single generation form a randomly sampled candidate set. The larger the value, the higher the randomness generated; the smaller the value, the higher the certainty generated. - - name: max_tokens - required: false - use_template: max_tokens - type: int - default: 2000 - min: 1 - max: 2000 - help: - zh_Hans: 用于指定模型在生成内容时token的最大数量,它定义了生成的上限,但不保证每次都会生成到这个数量。 - en_US: It is used to specify the maximum number of tokens when the model generates content. It defines the upper limit of generation, but does not guarantee that this number will be generated every time. - - name: seed - required: false - type: int - default: 1234 - label: - zh_Hans: 随机种子 - en_US: Random seed - help: - zh_Hans: 生成时使用的随机数种子,用户控制模型生成内容的随机性。支持无符号64位整数,默认值为 1234。在使用seed时,模型将尽可能生成相同或相似的结果,但目前不保证每次生成的结果完全相同。 - en_US: The random number seed used when generating, the user controls the randomness of the content generated by the model. Supports unsigned 64-bit integers, default value is 1234. When using seed, the model will try its best to generate the same or similar results, but there is currently no guarantee that the results will be exactly the same every time. - - name: response_format - use_template: response_format - - name: repetition_penalty - required: false - type: float - default: 1.1 - label: - zh_Hans: 重复惩罚 - en_US: Repetition penalty - help: - zh_Hans: 用于控制模型生成时的重复度。提高repetition_penalty时可以降低模型生成的重复度。1.0表示不做惩罚。 - en_US: Used to control the repeatability when generating models. Increasing repetition_penalty can reduce the duplication of model generation. 1.0 means no punishment. - - name: response_format - use_template: response_format -pricing: - input: '0.008' - output: '0.008' - unit: '0.001' - currency: RMB diff --git a/api/core/model_runtime/model_providers/tongyi/llm/qwen-vl-plus.yaml b/api/core/model_runtime/model_providers/tongyi/llm/qwen-vl-plus.yaml deleted file mode 100644 index f55764c6c05500..00000000000000 --- a/api/core/model_runtime/model_providers/tongyi/llm/qwen-vl-plus.yaml +++ /dev/null @@ -1,79 +0,0 @@ -# for more details, please refer to https://help.aliyun.com/zh/model-studio/getting-started/models -model: qwen-vl-plus -label: - en_US: qwen-vl-plus -model_type: llm -features: - - vision - - agent-thought -model_properties: - mode: chat - context_size: 8000 -parameter_rules: - - name: temperature - use_template: temperature - type: float - default: 0.3 - min: 0.0 - max: 2.0 - help: - zh_Hans: 用于控制随机性和多样性的程度。具体来说,temperature值控制了生成文本时对每个候选词的概率分布进行平滑的程度。较高的temperature值会降低概率分布的峰值,使得更多的低概率词被选择,生成结果更加多样化;而较低的temperature值则会增强概率分布的峰值,使得高概率词更容易被选择,生成结果更加确定。 - en_US: Used to control the degree of randomness and diversity. Specifically, the temperature value controls the degree to which the probability distribution of each candidate word is smoothed when generating text. A higher temperature value will reduce the peak value of the probability distribution, allowing more low-probability words to be selected, and the generated results will be more diverse; while a lower temperature value will enhance the peak value of the probability distribution, making it easier for high-probability words to be selected. , the generated results are more certain. - - name: top_p - use_template: top_p - type: float - default: 0.8 - min: 0.1 - max: 0.9 - help: - zh_Hans: 生成过程中核采样方法概率阈值,例如,取值为0.8时,仅保留概率加起来大于等于0.8的最可能token的最小集合作为候选集。取值范围为(0,1.0),取值越大,生成的随机性越高;取值越低,生成的确定性越高。 - en_US: The probability threshold of the kernel sampling method during the generation process. For example, when the value is 0.8, only the smallest set of the most likely tokens with a sum of probabilities greater than or equal to 0.8 is retained as the candidate set. The value range is (0,1.0). The larger the value, the higher the randomness generated; the lower the value, the higher the certainty generated. - - name: top_k - type: int - min: 0 - max: 99 - label: - zh_Hans: 取样数量 - en_US: Top k - help: - zh_Hans: 生成时,采样候选集的大小。例如,取值为50时,仅将单次生成中得分最高的50个token组成随机采样的候选集。取值越大,生成的随机性越高;取值越小,生成的确定性越高。 - en_US: The size of the sample candidate set when generated. For example, when the value is 50, only the 50 highest-scoring tokens in a single generation form a randomly sampled candidate set. The larger the value, the higher the randomness generated; the smaller the value, the higher the certainty generated. - - name: max_tokens - required: false - use_template: max_tokens - type: int - default: 2000 - min: 1 - max: 2000 - help: - zh_Hans: 用于指定模型在生成内容时token的最大数量,它定义了生成的上限,但不保证每次都会生成到这个数量。 - en_US: It is used to specify the maximum number of tokens when the model generates content. It defines the upper limit of generation, but does not guarantee that this number will be generated every time. - - name: seed - required: false - type: int - default: 1234 - label: - zh_Hans: 随机种子 - en_US: Random seed - help: - zh_Hans: 生成时使用的随机数种子,用户控制模型生成内容的随机性。支持无符号64位整数,默认值为 1234。在使用seed时,模型将尽可能生成相同或相似的结果,但目前不保证每次生成的结果完全相同。 - en_US: The random number seed used when generating, the user controls the randomness of the content generated by the model. Supports unsigned 64-bit integers, default value is 1234. When using seed, the model will try its best to generate the same or similar results, but there is currently no guarantee that the results will be exactly the same every time. - - name: response_format - use_template: response_format - - name: repetition_penalty - required: false - type: float - default: 1.1 - label: - zh_Hans: 重复惩罚 - en_US: Repetition penalty - help: - zh_Hans: 用于控制模型生成时的重复度。提高repetition_penalty时可以降低模型生成的重复度。1.0表示不做惩罚。 - en_US: Used to control the repeatability when generating models. Increasing repetition_penalty can reduce the duplication of model generation. 1.0 means no punishment. - - name: response_format - use_template: response_format -pricing: - input: '0.008' - output: '0.008' - unit: '0.001' - currency: RMB diff --git a/api/core/model_runtime/model_providers/tongyi/llm/qwen2-math-1.5b-instruct.yaml b/api/core/model_runtime/model_providers/tongyi/llm/qwen2-math-1.5b-instruct.yaml deleted file mode 100644 index ea157f42ded914..00000000000000 --- a/api/core/model_runtime/model_providers/tongyi/llm/qwen2-math-1.5b-instruct.yaml +++ /dev/null @@ -1,75 +0,0 @@ -# for more details, please refer to https://help.aliyun.com/zh/model-studio/getting-started/models -model: qwen2-math-1.5b-instruct -label: - en_US: qwen2-math-1.5b-instruct -model_type: llm -features: - - agent-thought -model_properties: - mode: chat - context_size: 4096 -parameter_rules: - - name: temperature - use_template: temperature - type: float - default: 0.3 - min: 0.0 - max: 2.0 - help: - zh_Hans: 用于控制随机性和多样性的程度。具体来说,temperature值控制了生成文本时对每个候选词的概率分布进行平滑的程度。较高的temperature值会降低概率分布的峰值,使得更多的低概率词被选择,生成结果更加多样化;而较低的temperature值则会增强概率分布的峰值,使得高概率词更容易被选择,生成结果更加确定。 - en_US: Used to control the degree of randomness and diversity. Specifically, the temperature value controls the degree to which the probability distribution of each candidate word is smoothed when generating text. A higher temperature value will reduce the peak value of the probability distribution, allowing more low-probability words to be selected, and the generated results will be more diverse; while a lower temperature value will enhance the peak value of the probability distribution, making it easier for high-probability words to be selected. , the generated results are more certain. - - name: max_tokens - use_template: max_tokens - type: int - default: 2000 - min: 1 - max: 2000 - help: - zh_Hans: 用于指定模型在生成内容时token的最大数量,它定义了生成的上限,但不保证每次都会生成到这个数量。 - en_US: It is used to specify the maximum number of tokens when the model generates content. It defines the upper limit of generation, but does not guarantee that this number will be generated every time. - - name: top_p - use_template: top_p - type: float - default: 0.8 - min: 0.1 - max: 0.9 - help: - zh_Hans: 生成过程中核采样方法概率阈值,例如,取值为0.8时,仅保留概率加起来大于等于0.8的最可能token的最小集合作为候选集。取值范围为(0,1.0),取值越大,生成的随机性越高;取值越低,生成的确定性越高。 - en_US: The probability threshold of the kernel sampling method during the generation process. For example, when the value is 0.8, only the smallest set of the most likely tokens with a sum of probabilities greater than or equal to 0.8 is retained as the candidate set. The value range is (0,1.0). The larger the value, the higher the randomness generated; the lower the value, the higher the certainty generated. - - name: top_k - type: int - min: 0 - max: 99 - label: - zh_Hans: 取样数量 - en_US: Top k - help: - zh_Hans: 生成时,采样候选集的大小。例如,取值为50时,仅将单次生成中得分最高的50个token组成随机采样的候选集。取值越大,生成的随机性越高;取值越小,生成的确定性越高。 - en_US: The size of the sample candidate set when generated. For example, when the value is 50, only the 50 highest-scoring tokens in a single generation form a randomly sampled candidate set. The larger the value, the higher the randomness generated; the smaller the value, the higher the certainty generated. - - name: seed - required: false - type: int - default: 1234 - label: - zh_Hans: 随机种子 - en_US: Random seed - help: - zh_Hans: 生成时使用的随机数种子,用户控制模型生成内容的随机性。支持无符号64位整数,默认值为 1234。在使用seed时,模型将尽可能生成相同或相似的结果,但目前不保证每次生成的结果完全相同。 - en_US: The random number seed used when generating, the user controls the randomness of the content generated by the model. Supports unsigned 64-bit integers, default value is 1234. When using seed, the model will try its best to generate the same or similar results, but there is currently no guarantee that the results will be exactly the same every time. - - name: repetition_penalty - required: false - type: float - default: 1.1 - label: - zh_Hans: 重复惩罚 - en_US: Repetition penalty - help: - zh_Hans: 用于控制模型生成时的重复度。提高repetition_penalty时可以降低模型生成的重复度。1.0表示不做惩罚。 - en_US: Used to control the repeatability when generating models. Increasing repetition_penalty can reduce the duplication of model generation. 1.0 means no punishment. - - name: response_format - use_template: response_format -pricing: - input: '0.004' - output: '0.012' - unit: '0.001' - currency: RMB diff --git a/api/core/model_runtime/model_providers/tongyi/llm/qwen2-math-72b-instruct.yaml b/api/core/model_runtime/model_providers/tongyi/llm/qwen2-math-72b-instruct.yaml deleted file mode 100644 index 37052a923317d9..00000000000000 --- a/api/core/model_runtime/model_providers/tongyi/llm/qwen2-math-72b-instruct.yaml +++ /dev/null @@ -1,75 +0,0 @@ -# for more details, please refer to https://help.aliyun.com/zh/model-studio/getting-started/models -model: qwen2-math-72b-instruct -label: - en_US: qwen2-math-72b-instruct -model_type: llm -features: - - agent-thought -model_properties: - mode: chat - context_size: 4096 -parameter_rules: - - name: temperature - use_template: temperature - type: float - default: 0.3 - min: 0.0 - max: 2.0 - help: - zh_Hans: 用于控制随机性和多样性的程度。具体来说,temperature值控制了生成文本时对每个候选词的概率分布进行平滑的程度。较高的temperature值会降低概率分布的峰值,使得更多的低概率词被选择,生成结果更加多样化;而较低的temperature值则会增强概率分布的峰值,使得高概率词更容易被选择,生成结果更加确定。 - en_US: Used to control the degree of randomness and diversity. Specifically, the temperature value controls the degree to which the probability distribution of each candidate word is smoothed when generating text. A higher temperature value will reduce the peak value of the probability distribution, allowing more low-probability words to be selected, and the generated results will be more diverse; while a lower temperature value will enhance the peak value of the probability distribution, making it easier for high-probability words to be selected. , the generated results are more certain. - - name: max_tokens - use_template: max_tokens - type: int - default: 2000 - min: 1 - max: 2000 - help: - zh_Hans: 用于指定模型在生成内容时token的最大数量,它定义了生成的上限,但不保证每次都会生成到这个数量。 - en_US: It is used to specify the maximum number of tokens when the model generates content. It defines the upper limit of generation, but does not guarantee that this number will be generated every time. - - name: top_p - use_template: top_p - type: float - default: 0.8 - min: 0.1 - max: 0.9 - help: - zh_Hans: 生成过程中核采样方法概率阈值,例如,取值为0.8时,仅保留概率加起来大于等于0.8的最可能token的最小集合作为候选集。取值范围为(0,1.0),取值越大,生成的随机性越高;取值越低,生成的确定性越高。 - en_US: The probability threshold of the kernel sampling method during the generation process. For example, when the value is 0.8, only the smallest set of the most likely tokens with a sum of probabilities greater than or equal to 0.8 is retained as the candidate set. The value range is (0,1.0). The larger the value, the higher the randomness generated; the lower the value, the higher the certainty generated. - - name: top_k - type: int - min: 0 - max: 99 - label: - zh_Hans: 取样数量 - en_US: Top k - help: - zh_Hans: 生成时,采样候选集的大小。例如,取值为50时,仅将单次生成中得分最高的50个token组成随机采样的候选集。取值越大,生成的随机性越高;取值越小,生成的确定性越高。 - en_US: The size of the sample candidate set when generated. For example, when the value is 50, only the 50 highest-scoring tokens in a single generation form a randomly sampled candidate set. The larger the value, the higher the randomness generated; the smaller the value, the higher the certainty generated. - - name: seed - required: false - type: int - default: 1234 - label: - zh_Hans: 随机种子 - en_US: Random seed - help: - zh_Hans: 生成时使用的随机数种子,用户控制模型生成内容的随机性。支持无符号64位整数,默认值为 1234。在使用seed时,模型将尽可能生成相同或相似的结果,但目前不保证每次生成的结果完全相同。 - en_US: The random number seed used when generating, the user controls the randomness of the content generated by the model. Supports unsigned 64-bit integers, default value is 1234. When using seed, the model will try its best to generate the same or similar results, but there is currently no guarantee that the results will be exactly the same every time. - - name: repetition_penalty - required: false - type: float - default: 1.1 - label: - zh_Hans: 重复惩罚 - en_US: Repetition penalty - help: - zh_Hans: 用于控制模型生成时的重复度。提高repetition_penalty时可以降低模型生成的重复度。1.0表示不做惩罚。 - en_US: Used to control the repeatability when generating models. Increasing repetition_penalty can reduce the duplication of model generation. 1.0 means no punishment. - - name: response_format - use_template: response_format -pricing: - input: '0.004' - output: '0.012' - unit: '0.001' - currency: RMB diff --git a/api/core/model_runtime/model_providers/tongyi/llm/qwen2-math-7b-instruct.yaml b/api/core/model_runtime/model_providers/tongyi/llm/qwen2-math-7b-instruct.yaml deleted file mode 100644 index e182f1c27f7ea9..00000000000000 --- a/api/core/model_runtime/model_providers/tongyi/llm/qwen2-math-7b-instruct.yaml +++ /dev/null @@ -1,75 +0,0 @@ -# for more details, please refer to https://help.aliyun.com/zh/model-studio/getting-started/models -model: qwen2-math-7b-instruct -label: - en_US: qwen2-math-7b-instruct -model_type: llm -features: - - agent-thought -model_properties: - mode: chat - context_size: 4096 -parameter_rules: - - name: temperature - use_template: temperature - type: float - default: 0.3 - min: 0.0 - max: 2.0 - help: - zh_Hans: 用于控制随机性和多样性的程度。具体来说,temperature值控制了生成文本时对每个候选词的概率分布进行平滑的程度。较高的temperature值会降低概率分布的峰值,使得更多的低概率词被选择,生成结果更加多样化;而较低的temperature值则会增强概率分布的峰值,使得高概率词更容易被选择,生成结果更加确定。 - en_US: Used to control the degree of randomness and diversity. Specifically, the temperature value controls the degree to which the probability distribution of each candidate word is smoothed when generating text. A higher temperature value will reduce the peak value of the probability distribution, allowing more low-probability words to be selected, and the generated results will be more diverse; while a lower temperature value will enhance the peak value of the probability distribution, making it easier for high-probability words to be selected. , the generated results are more certain. - - name: max_tokens - use_template: max_tokens - type: int - default: 2000 - min: 1 - max: 2000 - help: - zh_Hans: 用于指定模型在生成内容时token的最大数量,它定义了生成的上限,但不保证每次都会生成到这个数量。 - en_US: It is used to specify the maximum number of tokens when the model generates content. It defines the upper limit of generation, but does not guarantee that this number will be generated every time. - - name: top_p - use_template: top_p - type: float - default: 0.8 - min: 0.1 - max: 0.9 - help: - zh_Hans: 生成过程中核采样方法概率阈值,例如,取值为0.8时,仅保留概率加起来大于等于0.8的最可能token的最小集合作为候选集。取值范围为(0,1.0),取值越大,生成的随机性越高;取值越低,生成的确定性越高。 - en_US: The probability threshold of the kernel sampling method during the generation process. For example, when the value is 0.8, only the smallest set of the most likely tokens with a sum of probabilities greater than or equal to 0.8 is retained as the candidate set. The value range is (0,1.0). The larger the value, the higher the randomness generated; the lower the value, the higher the certainty generated. - - name: top_k - type: int - min: 0 - max: 99 - label: - zh_Hans: 取样数量 - en_US: Top k - help: - zh_Hans: 生成时,采样候选集的大小。例如,取值为50时,仅将单次生成中得分最高的50个token组成随机采样的候选集。取值越大,生成的随机性越高;取值越小,生成的确定性越高。 - en_US: The size of the sample candidate set when generated. For example, when the value is 50, only the 50 highest-scoring tokens in a single generation form a randomly sampled candidate set. The larger the value, the higher the randomness generated; the smaller the value, the higher the certainty generated. - - name: seed - required: false - type: int - default: 1234 - label: - zh_Hans: 随机种子 - en_US: Random seed - help: - zh_Hans: 生成时使用的随机数种子,用户控制模型生成内容的随机性。支持无符号64位整数,默认值为 1234。在使用seed时,模型将尽可能生成相同或相似的结果,但目前不保证每次生成的结果完全相同。 - en_US: The random number seed used when generating, the user controls the randomness of the content generated by the model. Supports unsigned 64-bit integers, default value is 1234. When using seed, the model will try its best to generate the same or similar results, but there is currently no guarantee that the results will be exactly the same every time. - - name: repetition_penalty - required: false - type: float - default: 1.1 - label: - zh_Hans: 重复惩罚 - en_US: Repetition penalty - help: - zh_Hans: 用于控制模型生成时的重复度。提高repetition_penalty时可以降低模型生成的重复度。1.0表示不做惩罚。 - en_US: Used to control the repeatability when generating models. Increasing repetition_penalty can reduce the duplication of model generation. 1.0 means no punishment. - - name: response_format - use_template: response_format -pricing: - input: '0.004' - output: '0.012' - unit: '0.001' - currency: RMB diff --git a/api/core/model_runtime/model_providers/tongyi/llm/qwen2.5-0.5b-instruct.yaml b/api/core/model_runtime/model_providers/tongyi/llm/qwen2.5-0.5b-instruct.yaml deleted file mode 100644 index 9e75ccc1f210d9..00000000000000 --- a/api/core/model_runtime/model_providers/tongyi/llm/qwen2.5-0.5b-instruct.yaml +++ /dev/null @@ -1,75 +0,0 @@ -# for more details, please refer to https://help.aliyun.com/zh/model-studio/getting-started/models -model: qwen2.5-0.5b-instruct -label: - en_US: qwen2.5-0.5b-instruct -model_type: llm -features: - - agent-thought -model_properties: - mode: chat - context_size: 32768 -parameter_rules: - - name: temperature - use_template: temperature - type: float - default: 0.3 - min: 0.0 - max: 2.0 - help: - zh_Hans: 用于控制随机性和多样性的程度。具体来说,temperature值控制了生成文本时对每个候选词的概率分布进行平滑的程度。较高的temperature值会降低概率分布的峰值,使得更多的低概率词被选择,生成结果更加多样化;而较低的temperature值则会增强概率分布的峰值,使得高概率词更容易被选择,生成结果更加确定。 - en_US: Used to control the degree of randomness and diversity. Specifically, the temperature value controls the degree to which the probability distribution of each candidate word is smoothed when generating text. A higher temperature value will reduce the peak value of the probability distribution, allowing more low-probability words to be selected, and the generated results will be more diverse; while a lower temperature value will enhance the peak value of the probability distribution, making it easier for high-probability words to be selected. , the generated results are more certain. - - name: max_tokens - use_template: max_tokens - type: int - default: 8192 - min: 1 - max: 8192 - help: - zh_Hans: 用于指定模型在生成内容时token的最大数量,它定义了生成的上限,但不保证每次都会生成到这个数量。 - en_US: It is used to specify the maximum number of tokens when the model generates content. It defines the upper limit of generation, but does not guarantee that this number will be generated every time. - - name: top_p - use_template: top_p - type: float - default: 0.8 - min: 0.1 - max: 0.9 - help: - zh_Hans: 生成过程中核采样方法概率阈值,例如,取值为0.8时,仅保留概率加起来大于等于0.8的最可能token的最小集合作为候选集。取值范围为(0,1.0),取值越大,生成的随机性越高;取值越低,生成的确定性越高。 - en_US: The probability threshold of the kernel sampling method during the generation process. For example, when the value is 0.8, only the smallest set of the most likely tokens with a sum of probabilities greater than or equal to 0.8 is retained as the candidate set. The value range is (0,1.0). The larger the value, the higher the randomness generated; the lower the value, the higher the certainty generated. - - name: top_k - type: int - min: 0 - max: 99 - label: - zh_Hans: 取样数量 - en_US: Top k - help: - zh_Hans: 生成时,采样候选集的大小。例如,取值为50时,仅将单次生成中得分最高的50个token组成随机采样的候选集。取值越大,生成的随机性越高;取值越小,生成的确定性越高。 - en_US: The size of the sample candidate set when generated. For example, when the value is 50, only the 50 highest-scoring tokens in a single generation form a randomly sampled candidate set. The larger the value, the higher the randomness generated; the smaller the value, the higher the certainty generated. - - name: seed - required: false - type: int - default: 1234 - label: - zh_Hans: 随机种子 - en_US: Random seed - help: - zh_Hans: 生成时使用的随机数种子,用户控制模型生成内容的随机性。支持无符号64位整数,默认值为 1234。在使用seed时,模型将尽可能生成相同或相似的结果,但目前不保证每次生成的结果完全相同。 - en_US: The random number seed used when generating, the user controls the randomness of the content generated by the model. Supports unsigned 64-bit integers, default value is 1234. When using seed, the model will try its best to generate the same or similar results, but there is currently no guarantee that the results will be exactly the same every time. - - name: repetition_penalty - required: false - type: float - default: 1.1 - label: - zh_Hans: 重复惩罚 - en_US: Repetition penalty - help: - zh_Hans: 用于控制模型生成时的重复度。提高repetition_penalty时可以降低模型生成的重复度。1.0表示不做惩罚。 - en_US: Used to control the repeatability when generating models. Increasing repetition_penalty can reduce the duplication of model generation. 1.0 means no punishment. - - name: response_format - use_template: response_format -pricing: - input: '0.000' - output: '0.000' - unit: '0.001' - currency: RMB diff --git a/api/core/model_runtime/model_providers/tongyi/llm/qwen2.5-1.5b-instruct.yaml b/api/core/model_runtime/model_providers/tongyi/llm/qwen2.5-1.5b-instruct.yaml deleted file mode 100644 index 67c9d312432af7..00000000000000 --- a/api/core/model_runtime/model_providers/tongyi/llm/qwen2.5-1.5b-instruct.yaml +++ /dev/null @@ -1,75 +0,0 @@ -# for more details, please refer to https://help.aliyun.com/zh/model-studio/getting-started/models -model: qwen2.5-1.5b-instruct -label: - en_US: qwen2.5-1.5b-instruct -model_type: llm -features: - - agent-thought -model_properties: - mode: chat - context_size: 32768 -parameter_rules: - - name: temperature - use_template: temperature - type: float - default: 0.3 - min: 0.0 - max: 2.0 - help: - zh_Hans: 用于控制随机性和多样性的程度。具体来说,temperature值控制了生成文本时对每个候选词的概率分布进行平滑的程度。较高的temperature值会降低概率分布的峰值,使得更多的低概率词被选择,生成结果更加多样化;而较低的temperature值则会增强概率分布的峰值,使得高概率词更容易被选择,生成结果更加确定。 - en_US: Used to control the degree of randomness and diversity. Specifically, the temperature value controls the degree to which the probability distribution of each candidate word is smoothed when generating text. A higher temperature value will reduce the peak value of the probability distribution, allowing more low-probability words to be selected, and the generated results will be more diverse; while a lower temperature value will enhance the peak value of the probability distribution, making it easier for high-probability words to be selected. , the generated results are more certain. - - name: max_tokens - use_template: max_tokens - type: int - default: 8192 - min: 1 - max: 8192 - help: - zh_Hans: 用于指定模型在生成内容时token的最大数量,它定义了生成的上限,但不保证每次都会生成到这个数量。 - en_US: It is used to specify the maximum number of tokens when the model generates content. It defines the upper limit of generation, but does not guarantee that this number will be generated every time. - - name: top_p - use_template: top_p - type: float - default: 0.8 - min: 0.1 - max: 0.9 - help: - zh_Hans: 生成过程中核采样方法概率阈值,例如,取值为0.8时,仅保留概率加起来大于等于0.8的最可能token的最小集合作为候选集。取值范围为(0,1.0),取值越大,生成的随机性越高;取值越低,生成的确定性越高。 - en_US: The probability threshold of the kernel sampling method during the generation process. For example, when the value is 0.8, only the smallest set of the most likely tokens with a sum of probabilities greater than or equal to 0.8 is retained as the candidate set. The value range is (0,1.0). The larger the value, the higher the randomness generated; the lower the value, the higher the certainty generated. - - name: top_k - type: int - min: 0 - max: 99 - label: - zh_Hans: 取样数量 - en_US: Top k - help: - zh_Hans: 生成时,采样候选集的大小。例如,取值为50时,仅将单次生成中得分最高的50个token组成随机采样的候选集。取值越大,生成的随机性越高;取值越小,生成的确定性越高。 - en_US: The size of the sample candidate set when generated. For example, when the value is 50, only the 50 highest-scoring tokens in a single generation form a randomly sampled candidate set. The larger the value, the higher the randomness generated; the smaller the value, the higher the certainty generated. - - name: seed - required: false - type: int - default: 1234 - label: - zh_Hans: 随机种子 - en_US: Random seed - help: - zh_Hans: 生成时使用的随机数种子,用户控制模型生成内容的随机性。支持无符号64位整数,默认值为 1234。在使用seed时,模型将尽可能生成相同或相似的结果,但目前不保证每次生成的结果完全相同。 - en_US: The random number seed used when generating, the user controls the randomness of the content generated by the model. Supports unsigned 64-bit integers, default value is 1234. When using seed, the model will try its best to generate the same or similar results, but there is currently no guarantee that the results will be exactly the same every time. - - name: repetition_penalty - required: false - type: float - default: 1.1 - label: - zh_Hans: 重复惩罚 - en_US: Repetition penalty - help: - zh_Hans: 用于控制模型生成时的重复度。提高repetition_penalty时可以降低模型生成的重复度。1.0表示不做惩罚。 - en_US: Used to control the repeatability when generating models. Increasing repetition_penalty can reduce the duplication of model generation. 1.0 means no punishment. - - name: response_format - use_template: response_format -pricing: - input: '0.000' - output: '0.000' - unit: '0.001' - currency: RMB diff --git a/api/core/model_runtime/model_providers/tongyi/llm/qwen2.5-14b-instruct.yaml b/api/core/model_runtime/model_providers/tongyi/llm/qwen2.5-14b-instruct.yaml deleted file mode 100644 index 2a38be921cf3fd..00000000000000 --- a/api/core/model_runtime/model_providers/tongyi/llm/qwen2.5-14b-instruct.yaml +++ /dev/null @@ -1,75 +0,0 @@ -# for more details, please refer to https://help.aliyun.com/zh/model-studio/getting-started/models -model: qwen2.5-14b-instruct -label: - en_US: qwen2.5-14b-instruct -model_type: llm -features: - - agent-thought -model_properties: - mode: chat - context_size: 131072 -parameter_rules: - - name: temperature - use_template: temperature - type: float - default: 0.3 - min: 0.0 - max: 2.0 - help: - zh_Hans: 用于控制随机性和多样性的程度。具体来说,temperature值控制了生成文本时对每个候选词的概率分布进行平滑的程度。较高的temperature值会降低概率分布的峰值,使得更多的低概率词被选择,生成结果更加多样化;而较低的temperature值则会增强概率分布的峰值,使得高概率词更容易被选择,生成结果更加确定。 - en_US: Used to control the degree of randomness and diversity. Specifically, the temperature value controls the degree to which the probability distribution of each candidate word is smoothed when generating text. A higher temperature value will reduce the peak value of the probability distribution, allowing more low-probability words to be selected, and the generated results will be more diverse; while a lower temperature value will enhance the peak value of the probability distribution, making it easier for high-probability words to be selected. , the generated results are more certain. - - name: max_tokens - use_template: max_tokens - type: int - default: 8192 - min: 1 - max: 8192 - help: - zh_Hans: 用于指定模型在生成内容时token的最大数量,它定义了生成的上限,但不保证每次都会生成到这个数量。 - en_US: It is used to specify the maximum number of tokens when the model generates content. It defines the upper limit of generation, but does not guarantee that this number will be generated every time. - - name: top_p - use_template: top_p - type: float - default: 0.8 - min: 0.1 - max: 0.9 - help: - zh_Hans: 生成过程中核采样方法概率阈值,例如,取值为0.8时,仅保留概率加起来大于等于0.8的最可能token的最小集合作为候选集。取值范围为(0,1.0),取值越大,生成的随机性越高;取值越低,生成的确定性越高。 - en_US: The probability threshold of the kernel sampling method during the generation process. For example, when the value is 0.8, only the smallest set of the most likely tokens with a sum of probabilities greater than or equal to 0.8 is retained as the candidate set. The value range is (0,1.0). The larger the value, the higher the randomness generated; the lower the value, the higher the certainty generated. - - name: top_k - type: int - min: 0 - max: 99 - label: - zh_Hans: 取样数量 - en_US: Top k - help: - zh_Hans: 生成时,采样候选集的大小。例如,取值为50时,仅将单次生成中得分最高的50个token组成随机采样的候选集。取值越大,生成的随机性越高;取值越小,生成的确定性越高。 - en_US: The size of the sample candidate set when generated. For example, when the value is 50, only the 50 highest-scoring tokens in a single generation form a randomly sampled candidate set. The larger the value, the higher the randomness generated; the smaller the value, the higher the certainty generated. - - name: seed - required: false - type: int - default: 1234 - label: - zh_Hans: 随机种子 - en_US: Random seed - help: - zh_Hans: 生成时使用的随机数种子,用户控制模型生成内容的随机性。支持无符号64位整数,默认值为 1234。在使用seed时,模型将尽可能生成相同或相似的结果,但目前不保证每次生成的结果完全相同。 - en_US: The random number seed used when generating, the user controls the randomness of the content generated by the model. Supports unsigned 64-bit integers, default value is 1234. When using seed, the model will try its best to generate the same or similar results, but there is currently no guarantee that the results will be exactly the same every time. - - name: repetition_penalty - required: false - type: float - default: 1.1 - label: - zh_Hans: 重复惩罚 - en_US: Repetition penalty - help: - zh_Hans: 用于控制模型生成时的重复度。提高repetition_penalty时可以降低模型生成的重复度。1.0表示不做惩罚。 - en_US: Used to control the repeatability when generating models. Increasing repetition_penalty can reduce the duplication of model generation. 1.0 means no punishment. - - name: response_format - use_template: response_format -pricing: - input: '0.002' - output: '0.006' - unit: '0.001' - currency: RMB diff --git a/api/core/model_runtime/model_providers/tongyi/llm/qwen2.5-32b-instruct.yaml b/api/core/model_runtime/model_providers/tongyi/llm/qwen2.5-32b-instruct.yaml deleted file mode 100644 index e6e4fbf97808be..00000000000000 --- a/api/core/model_runtime/model_providers/tongyi/llm/qwen2.5-32b-instruct.yaml +++ /dev/null @@ -1,75 +0,0 @@ -# for more details, please refer to https://help.aliyun.com/zh/model-studio/getting-started/models -model: qwen2.5-32b-instruct -label: - en_US: qwen2.5-32b-instruct -model_type: llm -features: - - agent-thought -model_properties: - mode: chat - context_size: 131072 -parameter_rules: - - name: temperature - use_template: temperature - type: float - default: 0.3 - min: 0.0 - max: 2.0 - help: - zh_Hans: 用于控制随机性和多样性的程度。具体来说,temperature值控制了生成文本时对每个候选词的概率分布进行平滑的程度。较高的temperature值会降低概率分布的峰值,使得更多的低概率词被选择,生成结果更加多样化;而较低的temperature值则会增强概率分布的峰值,使得高概率词更容易被选择,生成结果更加确定。 - en_US: Used to control the degree of randomness and diversity. Specifically, the temperature value controls the degree to which the probability distribution of each candidate word is smoothed when generating text. A higher temperature value will reduce the peak value of the probability distribution, allowing more low-probability words to be selected, and the generated results will be more diverse; while a lower temperature value will enhance the peak value of the probability distribution, making it easier for high-probability words to be selected. , the generated results are more certain. - - name: max_tokens - use_template: max_tokens - type: int - default: 8192 - min: 1 - max: 8192 - help: - zh_Hans: 用于指定模型在生成内容时token的最大数量,它定义了生成的上限,但不保证每次都会生成到这个数量。 - en_US: It is used to specify the maximum number of tokens when the model generates content. It defines the upper limit of generation, but does not guarantee that this number will be generated every time. - - name: top_p - use_template: top_p - type: float - default: 0.8 - min: 0.1 - max: 0.9 - help: - zh_Hans: 生成过程中核采样方法概率阈值,例如,取值为0.8时,仅保留概率加起来大于等于0.8的最可能token的最小集合作为候选集。取值范围为(0,1.0),取值越大,生成的随机性越高;取值越低,生成的确定性越高。 - en_US: The probability threshold of the kernel sampling method during the generation process. For example, when the value is 0.8, only the smallest set of the most likely tokens with a sum of probabilities greater than or equal to 0.8 is retained as the candidate set. The value range is (0,1.0). The larger the value, the higher the randomness generated; the lower the value, the higher the certainty generated. - - name: top_k - type: int - min: 0 - max: 99 - label: - zh_Hans: 取样数量 - en_US: Top k - help: - zh_Hans: 生成时,采样候选集的大小。例如,取值为50时,仅将单次生成中得分最高的50个token组成随机采样的候选集。取值越大,生成的随机性越高;取值越小,生成的确定性越高。 - en_US: The size of the sample candidate set when generated. For example, when the value is 50, only the 50 highest-scoring tokens in a single generation form a randomly sampled candidate set. The larger the value, the higher the randomness generated; the smaller the value, the higher the certainty generated. - - name: seed - required: false - type: int - default: 1234 - label: - zh_Hans: 随机种子 - en_US: Random seed - help: - zh_Hans: 生成时使用的随机数种子,用户控制模型生成内容的随机性。支持无符号64位整数,默认值为 1234。在使用seed时,模型将尽可能生成相同或相似的结果,但目前不保证每次生成的结果完全相同。 - en_US: The random number seed used when generating, the user controls the randomness of the content generated by the model. Supports unsigned 64-bit integers, default value is 1234. When using seed, the model will try its best to generate the same or similar results, but there is currently no guarantee that the results will be exactly the same every time. - - name: repetition_penalty - required: false - type: float - default: 1.1 - label: - zh_Hans: 重复惩罚 - en_US: Repetition penalty - help: - zh_Hans: 用于控制模型生成时的重复度。提高repetition_penalty时可以降低模型生成的重复度。1.0表示不做惩罚。 - en_US: Used to control the repeatability when generating models. Increasing repetition_penalty can reduce the duplication of model generation. 1.0 means no punishment. - - name: response_format - use_template: response_format -pricing: - input: '0.0035' - output: '0.007' - unit: '0.001' - currency: RMB diff --git a/api/core/model_runtime/model_providers/tongyi/llm/qwen2.5-3b-instruct.yaml b/api/core/model_runtime/model_providers/tongyi/llm/qwen2.5-3b-instruct.yaml deleted file mode 100644 index 8f250379a788ab..00000000000000 --- a/api/core/model_runtime/model_providers/tongyi/llm/qwen2.5-3b-instruct.yaml +++ /dev/null @@ -1,75 +0,0 @@ -# for more details, please refer to https://help.aliyun.com/zh/model-studio/getting-started/models -model: qwen2.5-3b-instruct -label: - en_US: qwen2.5-3b-instruct -model_type: llm -features: - - agent-thought -model_properties: - mode: chat - context_size: 32768 -parameter_rules: - - name: temperature - use_template: temperature - type: float - default: 0.3 - min: 0.0 - max: 2.0 - help: - zh_Hans: 用于控制随机性和多样性的程度。具体来说,temperature值控制了生成文本时对每个候选词的概率分布进行平滑的程度。较高的temperature值会降低概率分布的峰值,使得更多的低概率词被选择,生成结果更加多样化;而较低的temperature值则会增强概率分布的峰值,使得高概率词更容易被选择,生成结果更加确定。 - en_US: Used to control the degree of randomness and diversity. Specifically, the temperature value controls the degree to which the probability distribution of each candidate word is smoothed when generating text. A higher temperature value will reduce the peak value of the probability distribution, allowing more low-probability words to be selected, and the generated results will be more diverse; while a lower temperature value will enhance the peak value of the probability distribution, making it easier for high-probability words to be selected. , the generated results are more certain. - - name: max_tokens - use_template: max_tokens - type: int - default: 8192 - min: 1 - max: 8192 - help: - zh_Hans: 用于指定模型在生成内容时token的最大数量,它定义了生成的上限,但不保证每次都会生成到这个数量。 - en_US: It is used to specify the maximum number of tokens when the model generates content. It defines the upper limit of generation, but does not guarantee that this number will be generated every time. - - name: top_p - use_template: top_p - type: float - default: 0.8 - min: 0.1 - max: 0.9 - help: - zh_Hans: 生成过程中核采样方法概率阈值,例如,取值为0.8时,仅保留概率加起来大于等于0.8的最可能token的最小集合作为候选集。取值范围为(0,1.0),取值越大,生成的随机性越高;取值越低,生成的确定性越高。 - en_US: The probability threshold of the kernel sampling method during the generation process. For example, when the value is 0.8, only the smallest set of the most likely tokens with a sum of probabilities greater than or equal to 0.8 is retained as the candidate set. The value range is (0,1.0). The larger the value, the higher the randomness generated; the lower the value, the higher the certainty generated. - - name: top_k - type: int - min: 0 - max: 99 - label: - zh_Hans: 取样数量 - en_US: Top k - help: - zh_Hans: 生成时,采样候选集的大小。例如,取值为50时,仅将单次生成中得分最高的50个token组成随机采样的候选集。取值越大,生成的随机性越高;取值越小,生成的确定性越高。 - en_US: The size of the sample candidate set when generated. For example, when the value is 50, only the 50 highest-scoring tokens in a single generation form a randomly sampled candidate set. The larger the value, the higher the randomness generated; the smaller the value, the higher the certainty generated. - - name: seed - required: false - type: int - default: 1234 - label: - zh_Hans: 随机种子 - en_US: Random seed - help: - zh_Hans: 生成时使用的随机数种子,用户控制模型生成内容的随机性。支持无符号64位整数,默认值为 1234。在使用seed时,模型将尽可能生成相同或相似的结果,但目前不保证每次生成的结果完全相同。 - en_US: The random number seed used when generating, the user controls the randomness of the content generated by the model. Supports unsigned 64-bit integers, default value is 1234. When using seed, the model will try its best to generate the same or similar results, but there is currently no guarantee that the results will be exactly the same every time. - - name: repetition_penalty - required: false - type: float - default: 1.1 - label: - zh_Hans: 重复惩罚 - en_US: Repetition penalty - help: - zh_Hans: 用于控制模型生成时的重复度。提高repetition_penalty时可以降低模型生成的重复度。1.0表示不做惩罚。 - en_US: Used to control the repeatability when generating models. Increasing repetition_penalty can reduce the duplication of model generation. 1.0 means no punishment. - - name: response_format - use_template: response_format -pricing: - input: '0.000' - output: '0.000' - unit: '0.001' - currency: RMB diff --git a/api/core/model_runtime/model_providers/tongyi/llm/qwen2.5-72b-instruct.yaml b/api/core/model_runtime/model_providers/tongyi/llm/qwen2.5-72b-instruct.yaml deleted file mode 100644 index bb3cdd6141f1ea..00000000000000 --- a/api/core/model_runtime/model_providers/tongyi/llm/qwen2.5-72b-instruct.yaml +++ /dev/null @@ -1,75 +0,0 @@ -# for more details, please refer to https://help.aliyun.com/zh/model-studio/getting-started/models -model: qwen2.5-72b-instruct -label: - en_US: qwen2.5-72b-instruct -model_type: llm -features: - - agent-thought -model_properties: - mode: chat - context_size: 131072 -parameter_rules: - - name: temperature - use_template: temperature - type: float - default: 0.3 - min: 0.0 - max: 2.0 - help: - zh_Hans: 用于控制随机性和多样性的程度。具体来说,temperature值控制了生成文本时对每个候选词的概率分布进行平滑的程度。较高的temperature值会降低概率分布的峰值,使得更多的低概率词被选择,生成结果更加多样化;而较低的temperature值则会增强概率分布的峰值,使得高概率词更容易被选择,生成结果更加确定。 - en_US: Used to control the degree of randomness and diversity. Specifically, the temperature value controls the degree to which the probability distribution of each candidate word is smoothed when generating text. A higher temperature value will reduce the peak value of the probability distribution, allowing more low-probability words to be selected, and the generated results will be more diverse; while a lower temperature value will enhance the peak value of the probability distribution, making it easier for high-probability words to be selected. , the generated results are more certain. - - name: max_tokens - use_template: max_tokens - type: int - default: 8192 - min: 1 - max: 8192 - help: - zh_Hans: 用于指定模型在生成内容时token的最大数量,它定义了生成的上限,但不保证每次都会生成到这个数量。 - en_US: It is used to specify the maximum number of tokens when the model generates content. It defines the upper limit of generation, but does not guarantee that this number will be generated every time. - - name: top_p - use_template: top_p - type: float - default: 0.8 - min: 0.1 - max: 0.9 - help: - zh_Hans: 生成过程中核采样方法概率阈值,例如,取值为0.8时,仅保留概率加起来大于等于0.8的最可能token的最小集合作为候选集。取值范围为(0,1.0),取值越大,生成的随机性越高;取值越低,生成的确定性越高。 - en_US: The probability threshold of the kernel sampling method during the generation process. For example, when the value is 0.8, only the smallest set of the most likely tokens with a sum of probabilities greater than or equal to 0.8 is retained as the candidate set. The value range is (0,1.0). The larger the value, the higher the randomness generated; the lower the value, the higher the certainty generated. - - name: top_k - type: int - min: 0 - max: 99 - label: - zh_Hans: 取样数量 - en_US: Top k - help: - zh_Hans: 生成时,采样候选集的大小。例如,取值为50时,仅将单次生成中得分最高的50个token组成随机采样的候选集。取值越大,生成的随机性越高;取值越小,生成的确定性越高。 - en_US: The size of the sample candidate set when generated. For example, when the value is 50, only the 50 highest-scoring tokens in a single generation form a randomly sampled candidate set. The larger the value, the higher the randomness generated; the smaller the value, the higher the certainty generated. - - name: seed - required: false - type: int - default: 1234 - label: - zh_Hans: 随机种子 - en_US: Random seed - help: - zh_Hans: 生成时使用的随机数种子,用户控制模型生成内容的随机性。支持无符号64位整数,默认值为 1234。在使用seed时,模型将尽可能生成相同或相似的结果,但目前不保证每次生成的结果完全相同。 - en_US: The random number seed used when generating, the user controls the randomness of the content generated by the model. Supports unsigned 64-bit integers, default value is 1234. When using seed, the model will try its best to generate the same or similar results, but there is currently no guarantee that the results will be exactly the same every time. - - name: repetition_penalty - required: false - type: float - default: 1.1 - label: - zh_Hans: 重复惩罚 - en_US: Repetition penalty - help: - zh_Hans: 用于控制模型生成时的重复度。提高repetition_penalty时可以降低模型生成的重复度。1.0表示不做惩罚。 - en_US: Used to control the repeatability when generating models. Increasing repetition_penalty can reduce the duplication of model generation. 1.0 means no punishment. - - name: response_format - use_template: response_format -pricing: - input: '0.004' - output: '0.012' - unit: '0.001' - currency: RMB diff --git a/api/core/model_runtime/model_providers/tongyi/llm/qwen2.5-7b-instruct.yaml b/api/core/model_runtime/model_providers/tongyi/llm/qwen2.5-7b-instruct.yaml deleted file mode 100644 index fdcd3d42757edb..00000000000000 --- a/api/core/model_runtime/model_providers/tongyi/llm/qwen2.5-7b-instruct.yaml +++ /dev/null @@ -1,75 +0,0 @@ -# for more details, please refer to https://help.aliyun.com/zh/model-studio/getting-started/models -model: qwen2.5-7b-instruct -label: - en_US: qwen2.5-7b-instruct -model_type: llm -features: - - agent-thought -model_properties: - mode: chat - context_size: 131072 -parameter_rules: - - name: temperature - use_template: temperature - type: float - default: 0.3 - min: 0.0 - max: 2.0 - help: - zh_Hans: 用于控制随机性和多样性的程度。具体来说,temperature值控制了生成文本时对每个候选词的概率分布进行平滑的程度。较高的temperature值会降低概率分布的峰值,使得更多的低概率词被选择,生成结果更加多样化;而较低的temperature值则会增强概率分布的峰值,使得高概率词更容易被选择,生成结果更加确定。 - en_US: Used to control the degree of randomness and diversity. Specifically, the temperature value controls the degree to which the probability distribution of each candidate word is smoothed when generating text. A higher temperature value will reduce the peak value of the probability distribution, allowing more low-probability words to be selected, and the generated results will be more diverse; while a lower temperature value will enhance the peak value of the probability distribution, making it easier for high-probability words to be selected. , the generated results are more certain. - - name: max_tokens - use_template: max_tokens - type: int - default: 8192 - min: 1 - max: 8192 - help: - zh_Hans: 用于指定模型在生成内容时token的最大数量,它定义了生成的上限,但不保证每次都会生成到这个数量。 - en_US: It is used to specify the maximum number of tokens when the model generates content. It defines the upper limit of generation, but does not guarantee that this number will be generated every time. - - name: top_p - use_template: top_p - type: float - default: 0.8 - min: 0.1 - max: 0.9 - help: - zh_Hans: 生成过程中核采样方法概率阈值,例如,取值为0.8时,仅保留概率加起来大于等于0.8的最可能token的最小集合作为候选集。取值范围为(0,1.0),取值越大,生成的随机性越高;取值越低,生成的确定性越高。 - en_US: The probability threshold of the kernel sampling method during the generation process. For example, when the value is 0.8, only the smallest set of the most likely tokens with a sum of probabilities greater than or equal to 0.8 is retained as the candidate set. The value range is (0,1.0). The larger the value, the higher the randomness generated; the lower the value, the higher the certainty generated. - - name: top_k - type: int - min: 0 - max: 99 - label: - zh_Hans: 取样数量 - en_US: Top k - help: - zh_Hans: 生成时,采样候选集的大小。例如,取值为50时,仅将单次生成中得分最高的50个token组成随机采样的候选集。取值越大,生成的随机性越高;取值越小,生成的确定性越高。 - en_US: The size of the sample candidate set when generated. For example, when the value is 50, only the 50 highest-scoring tokens in a single generation form a randomly sampled candidate set. The larger the value, the higher the randomness generated; the smaller the value, the higher the certainty generated. - - name: seed - required: false - type: int - default: 1234 - label: - zh_Hans: 随机种子 - en_US: Random seed - help: - zh_Hans: 生成时使用的随机数种子,用户控制模型生成内容的随机性。支持无符号64位整数,默认值为 1234。在使用seed时,模型将尽可能生成相同或相似的结果,但目前不保证每次生成的结果完全相同。 - en_US: The random number seed used when generating, the user controls the randomness of the content generated by the model. Supports unsigned 64-bit integers, default value is 1234. When using seed, the model will try its best to generate the same or similar results, but there is currently no guarantee that the results will be exactly the same every time. - - name: repetition_penalty - required: false - type: float - default: 1.1 - label: - zh_Hans: 重复惩罚 - en_US: Repetition penalty - help: - zh_Hans: 用于控制模型生成时的重复度。提高repetition_penalty时可以降低模型生成的重复度。1.0表示不做惩罚。 - en_US: Used to control the repeatability when generating models. Increasing repetition_penalty can reduce the duplication of model generation. 1.0 means no punishment. - - name: response_format - use_template: response_format -pricing: - input: '0.001' - output: '0.002' - unit: '0.001' - currency: RMB diff --git a/api/core/model_runtime/model_providers/tongyi/llm/qwen2.5-coder-7b-instruct.yaml b/api/core/model_runtime/model_providers/tongyi/llm/qwen2.5-coder-7b-instruct.yaml deleted file mode 100644 index 7ebeec395393c7..00000000000000 --- a/api/core/model_runtime/model_providers/tongyi/llm/qwen2.5-coder-7b-instruct.yaml +++ /dev/null @@ -1,75 +0,0 @@ -# for more details, please refer to https://help.aliyun.com/zh/model-studio/getting-started/models -model: qwen2.5-coder-7b-instruct -label: - en_US: qwen2.5-coder-7b-instruct -model_type: llm -features: - - agent-thought -model_properties: - mode: chat - context_size: 131072 -parameter_rules: - - name: temperature - use_template: temperature - type: float - default: 0.3 - min: 0.0 - max: 2.0 - help: - zh_Hans: 用于控制随机性和多样性的程度。具体来说,temperature值控制了生成文本时对每个候选词的概率分布进行平滑的程度。较高的temperature值会降低概率分布的峰值,使得更多的低概率词被选择,生成结果更加多样化;而较低的temperature值则会增强概率分布的峰值,使得高概率词更容易被选择,生成结果更加确定。 - en_US: Used to control the degree of randomness and diversity. Specifically, the temperature value controls the degree to which the probability distribution of each candidate word is smoothed when generating text. A higher temperature value will reduce the peak value of the probability distribution, allowing more low-probability words to be selected, and the generated results will be more diverse; while a lower temperature value will enhance the peak value of the probability distribution, making it easier for high-probability words to be selected. , the generated results are more certain. - - name: max_tokens - use_template: max_tokens - type: int - default: 8192 - min: 1 - max: 8192 - help: - zh_Hans: 用于指定模型在生成内容时token的最大数量,它定义了生成的上限,但不保证每次都会生成到这个数量。 - en_US: It is used to specify the maximum number of tokens when the model generates content. It defines the upper limit of generation, but does not guarantee that this number will be generated every time. - - name: top_p - use_template: top_p - type: float - default: 0.8 - min: 0.1 - max: 0.9 - help: - zh_Hans: 生成过程中核采样方法概率阈值,例如,取值为0.8时,仅保留概率加起来大于等于0.8的最可能token的最小集合作为候选集。取值范围为(0,1.0),取值越大,生成的随机性越高;取值越低,生成的确定性越高。 - en_US: The probability threshold of the kernel sampling method during the generation process. For example, when the value is 0.8, only the smallest set of the most likely tokens with a sum of probabilities greater than or equal to 0.8 is retained as the candidate set. The value range is (0,1.0). The larger the value, the higher the randomness generated; the lower the value, the higher the certainty generated. - - name: top_k - type: int - min: 0 - max: 99 - label: - zh_Hans: 取样数量 - en_US: Top k - help: - zh_Hans: 生成时,采样候选集的大小。例如,取值为50时,仅将单次生成中得分最高的50个token组成随机采样的候选集。取值越大,生成的随机性越高;取值越小,生成的确定性越高。 - en_US: The size of the sample candidate set when generated. For example, when the value is 50, only the 50 highest-scoring tokens in a single generation form a randomly sampled candidate set. The larger the value, the higher the randomness generated; the smaller the value, the higher the certainty generated. - - name: seed - required: false - type: int - default: 1234 - label: - zh_Hans: 随机种子 - en_US: Random seed - help: - zh_Hans: 生成时使用的随机数种子,用户控制模型生成内容的随机性。支持无符号64位整数,默认值为 1234。在使用seed时,模型将尽可能生成相同或相似的结果,但目前不保证每次生成的结果完全相同。 - en_US: The random number seed used when generating, the user controls the randomness of the content generated by the model. Supports unsigned 64-bit integers, default value is 1234. When using seed, the model will try its best to generate the same or similar results, but there is currently no guarantee that the results will be exactly the same every time. - - name: repetition_penalty - required: false - type: float - default: 1.1 - label: - zh_Hans: 重复惩罚 - en_US: Repetition penalty - help: - zh_Hans: 用于控制模型生成时的重复度。提高repetition_penalty时可以降低模型生成的重复度。1.0表示不做惩罚。 - en_US: Used to control the repeatability when generating models. Increasing repetition_penalty can reduce the duplication of model generation. 1.0 means no punishment. - - name: response_format - use_template: response_format -pricing: - input: '0.001' - output: '0.002' - unit: '0.001' - currency: RMB diff --git a/api/core/model_runtime/model_providers/tongyi/text_embedding/__init__.py b/api/core/model_runtime/model_providers/tongyi/text_embedding/__init__.py deleted file mode 100644 index e69de29bb2d1d6..00000000000000 diff --git a/api/core/model_runtime/model_providers/tongyi/text_embedding/text-embedding-v1.yaml b/api/core/model_runtime/model_providers/tongyi/text_embedding/text-embedding-v1.yaml deleted file mode 100644 index 52e35d8b50afd8..00000000000000 --- a/api/core/model_runtime/model_providers/tongyi/text_embedding/text-embedding-v1.yaml +++ /dev/null @@ -1,10 +0,0 @@ -# for more details, please refer to https://help.aliyun.com/zh/model-studio/getting-started/models#3383780daf8hw -model: text-embedding-v1 -model_type: text-embedding -model_properties: - context_size: 2048 - max_chunks: 25 -pricing: - input: "0.0007" - unit: "0.001" - currency: RMB diff --git a/api/core/model_runtime/model_providers/tongyi/text_embedding/text-embedding-v2.yaml b/api/core/model_runtime/model_providers/tongyi/text_embedding/text-embedding-v2.yaml deleted file mode 100644 index 5bb6a8f4243d53..00000000000000 --- a/api/core/model_runtime/model_providers/tongyi/text_embedding/text-embedding-v2.yaml +++ /dev/null @@ -1,10 +0,0 @@ -# for more details, please refer to https://help.aliyun.com/zh/model-studio/getting-started/models#3383780daf8hw -model: text-embedding-v2 -model_type: text-embedding -model_properties: - context_size: 2048 - max_chunks: 25 -pricing: - input: "0.0007" - unit: "0.001" - currency: RMB diff --git a/api/core/model_runtime/model_providers/tongyi/text_embedding/text-embedding-v3.yaml b/api/core/model_runtime/model_providers/tongyi/text_embedding/text-embedding-v3.yaml deleted file mode 100644 index d8af0e2b63565d..00000000000000 --- a/api/core/model_runtime/model_providers/tongyi/text_embedding/text-embedding-v3.yaml +++ /dev/null @@ -1,10 +0,0 @@ -# for more details, please refer to https://help.aliyun.com/zh/model-studio/getting-started/models#3383780daf8hw -model: text-embedding-v3 -model_type: text-embedding -model_properties: - context_size: 8192 - max_chunks: 25 -pricing: - input: "0.0007" - unit: "0.001" - currency: RMB diff --git a/api/core/model_runtime/model_providers/tongyi/tongyi.py b/api/core/model_runtime/model_providers/tongyi/tongyi.py deleted file mode 100644 index a084512de9a885..00000000000000 --- a/api/core/model_runtime/model_providers/tongyi/tongyi.py +++ /dev/null @@ -1,28 +0,0 @@ -import logging - -from core.model_runtime.entities.model_entities import ModelType -from core.model_runtime.errors.validate import CredentialsValidateFailedError -from core.model_runtime.model_providers.__base.model_provider import ModelProvider - -logger = logging.getLogger(__name__) - - -class TongyiProvider(ModelProvider): - def validate_provider_credentials(self, credentials: dict) -> None: - """ - Validate provider credentials - - if validate failed, raise exception - - :param credentials: provider credentials, credentials form defined in `provider_credential_schema`. - """ - try: - model_instance = self.get_model_instance(ModelType.LLM) - - # Use `qwen-turbo` model for validate, - model_instance.validate_credentials(model="qwen-turbo", credentials=credentials) - except CredentialsValidateFailedError as ex: - raise ex - except Exception as ex: - logger.exception(f"{self.get_provider_schema().provider} credentials validate failed") - raise ex diff --git a/api/core/model_runtime/model_providers/tongyi/tongyi.yaml b/api/core/model_runtime/model_providers/tongyi/tongyi.yaml deleted file mode 100644 index 1a09c20fd93f61..00000000000000 --- a/api/core/model_runtime/model_providers/tongyi/tongyi.yaml +++ /dev/null @@ -1,87 +0,0 @@ -provider: tongyi -label: - zh_Hans: 通义千问 - en_US: TONGYI -icon_small: - en_US: icon_s_en.png -icon_large: - zh_Hans: icon_l_zh.png - en_US: icon_l_en.png -background: "#EFF1FE" -help: - title: - en_US: Get your API key from AliCloud - zh_Hans: 从阿里云百炼获取 API Key - url: - en_US: https://bailian.console.aliyun.com/?apiKey=1#/api-key -supported_model_types: - - llm - - tts - - text-embedding -configurate_methods: - - predefined-model - - customizable-model -provider_credential_schema: - credential_form_schemas: - - variable: dashscope_api_key - label: - en_US: API Key - type: secret-input - required: true - placeholder: - zh_Hans: 在此输入您的 API Key - en_US: Enter your API Key -model_credential_schema: - model: - label: - en_US: Model Name - zh_Hans: 模型名称 - placeholder: - en_US: Enter your model name - zh_Hans: 输入模型名称 - credential_form_schemas: - - variable: dashscope_api_key - label: - en_US: API Key - type: secret-input - required: true - placeholder: - zh_Hans: 在此输入您的 API Key - en_US: Enter your API Key - - variable: context_size - label: - zh_Hans: 模型上下文长度 - en_US: Model context size - required: true - type: text-input - default: '4096' - placeholder: - zh_Hans: 在此输入您的模型上下文长度 - en_US: Enter your Model context size - - variable: max_tokens - label: - zh_Hans: 最大 token 上限 - en_US: Upper bound for max tokens - default: '4096' - type: text-input - show_on: - - variable: __model_type - value: llm - - variable: function_calling_type - label: - en_US: Function calling - type: select - required: false - default: no_call - options: - - value: no_call - label: - en_US: Not Support - zh_Hans: 不支持 - - value: function_call - label: - en_US: Support - zh_Hans: 支持 - show_on: - - variable: __model_type - value: llm diff --git a/api/core/model_runtime/model_providers/tongyi/tts/__init__.py b/api/core/model_runtime/model_providers/tongyi/tts/__init__.py deleted file mode 100644 index e69de29bb2d1d6..00000000000000 diff --git a/api/core/model_runtime/model_providers/tongyi/tts/tts-1.yaml b/api/core/model_runtime/model_providers/tongyi/tts/tts-1.yaml deleted file mode 100644 index 4eaa0ff3612bf2..00000000000000 --- a/api/core/model_runtime/model_providers/tongyi/tts/tts-1.yaml +++ /dev/null @@ -1,139 +0,0 @@ -model: tts-1 -model_type: tts -model_properties: - default_voice: 'sambert-zhiru-v1' - voices: - - mode: "sambert-zhinan-v1" - name: "知楠(广告男声)" - language: [ "zh-Hans", "en-US" ] - - mode: "sambert-zhiqi-v1" - name: "知琪(温柔女声)" - language: [ "zh-Hans", "en-US" ] - - mode: "sambert-zhichu-v1" - name: "知厨(新闻播报)" - language: [ "zh-Hans", "en-US" ] - - mode: "sambert-zhide-v1" - name: "知德(新闻男声)" - language: [ "zh-Hans", "en-US" ] - - mode: "sambert-zhijia-v1" - name: "知佳(标准女声)" - language: [ "zh-Hans", "en-US" ] - - mode: "sambert-zhiru-v1" - name: "知茹(新闻女声)" - language: [ "zh-Hans", "en-US" ] - - mode: "sambert-zhiqian-v1" - name: "知倩(配音解说、新闻播报)" - language: [ "zh-Hans", "en-US" ] - - mode: "sambert-zhixiang-v1" - name: "知祥(配音解说)" - language: [ "zh-Hans", "en-US" ] - - mode: "sambert-zhiwei-v1" - name: "知薇(萝莉女声)" - language: [ "zh-Hans", "en-US" ] - - mode: "sambert-zhihao-v1" - name: "知浩(咨询男声)" - language: [ "zh-Hans", "en-US" ] - - mode: "sambert-zhijing-v1" - name: "知婧(严厉女声)" - language: [ "zh-Hans", "en-US" ] - - mode: "sambert-zhiming-v1" - name: "知茗(诙谐男声)" - language: [ "zh-Hans", "en-US" ] - - mode: "sambert-zhimo-v1" - name: "知墨(情感男声)" - language: [ "zh-Hans", "en-US" ] - - mode: "sambert-zhina-v1" - name: "知娜(浙普女声)" - language: [ "zh-Hans", "en-US" ] - - mode: "sambert-zhishu-v1" - name: "知树(资讯男声)" - language: [ "zh-Hans", "en-US" ] - - mode: "sambert-zhistella-v1" - name: "知莎(知性女声)" - language: [ "zh-Hans", "en-US" ] - - mode: "sambert-zhiting-v1" - name: "知婷(电台女声)" - language: [ "zh-Hans", "en-US" ] - - mode: "sambert-zhixiao-v1" - name: "知笑(资讯女声)" - language: [ "zh-Hans", "en-US" ] - - mode: "sambert-zhiya-v1" - name: "知雅(严厉女声)" - language: [ "zh-Hans", "en-US" ] - - mode: "sambert-zhiye-v1" - name: "知晔(青年男声)" - language: [ "zh-Hans", "en-US" ] - - mode: "sambert-zhiying-v1" - name: "知颖(软萌童声)" - language: [ "zh-Hans", "en-US" ] - - mode: "sambert-zhiyuan-v1" - name: "知媛(知心姐姐)" - language: [ "zh-Hans", "en-US" ] - - mode: "sambert-zhigui-v1" - name: "知柜(直播女声)" - language: [ "zh-Hans", "en-US" ] - - mode: "sambert-zhishuo-v1" - name: "知硕(自然男声)" - language: [ "zh-Hans", "en-US" ] - - mode: "sambert-zhimiao-emo-v1" - name: "知妙(多种情感女声)" - language: [ "zh-Hans", "en-US" ] - - mode: "sambert-zhimao-v1" - name: "知猫(直播女声)" - language: [ "zh-Hans", "en-US" ] - - mode: "sambert-zhilun-v1" - name: "知伦(悬疑解说)" - language: [ "zh-Hans", "en-US" ] - - mode: "sambert-zhifei-v1" - name: "知飞(激昂解说)" - language: [ "zh-Hans", "en-US" ] - - mode: "sambert-zhida-v1" - name: "知达(标准男声)" - language: [ "zh-Hans", "en-US" ] - - mode: "sambert-camila-v1" - name: "Camila(西班牙语女声)" - language: [ "es-ES" ] - - mode: "sambert-perla-v1" - name: "Perla(意大利语女声)" - language: [ "it-IT" ] - - mode: "sambert-indah-v1" - name: "Indah(印尼语女声)" - language: [ "id-ID" ] - - mode: "sambert-clara-v1" - name: "Clara(法语女声)" - language: [ "fr-FR" ] - - mode: "sambert-hanna-v1" - name: "Hanna(德语女声)" - language: [ "de-DE" ] - - mode: "sambert-beth-v1" - name: "Beth(咨询女声)" - language: [ "en-US" ] - - mode: "sambert-betty-v1" - name: "Betty(客服女声)" - language: [ "en-US" ] - - mode: "sambert-cally-v1" - name: "Cally(自然女声)" - language: [ "en-US" ] - - mode: "sambert-cindy-v1" - name: "Cindy(对话女声)" - language: [ "en-US" ] - - mode: "sambert-eva-v1" - name: "Eva(陪伴女声)" - language: [ "en-US" ] - - mode: "sambert-donna-v1" - name: "Donna(教育女声)" - language: [ "en-US" ] - - mode: "sambert-brian-v1" - name: "Brian(客服男声)" - language: [ "en-US" ] - - mode: "sambert-waan-v1" - name: "Waan(泰语女声)" - language: [ "th-TH" ] - word_limit: 7000 - audio_type: 'mp3' - max_workers: 5 -pricing: - input: '1' - output: '0' - unit: '0.0001' - currency: RMB diff --git a/api/core/model_runtime/model_providers/tongyi/tts/tts.py b/api/core/model_runtime/model_providers/tongyi/tts/tts.py deleted file mode 100644 index 48a38897a87e9e..00000000000000 --- a/api/core/model_runtime/model_providers/tongyi/tts/tts.py +++ /dev/null @@ -1,152 +0,0 @@ -import threading -from queue import Queue -from typing import Optional - -import dashscope -from dashscope import SpeechSynthesizer -from dashscope.api_entities.dashscope_response import SpeechSynthesisResponse -from dashscope.audio.tts import ResultCallback, SpeechSynthesisResult - -from core.model_runtime.errors.invoke import InvokeBadRequestError -from core.model_runtime.errors.validate import CredentialsValidateFailedError -from core.model_runtime.model_providers.__base.tts_model import TTSModel -from core.model_runtime.model_providers.tongyi._common import _CommonTongyi - - -class TongyiText2SpeechModel(_CommonTongyi, TTSModel): - """ - Model class for Tongyi Speech to text model. - """ - - def _invoke( - self, model: str, tenant_id: str, credentials: dict, content_text: str, voice: str, user: Optional[str] = None - ) -> any: - """ - _invoke text2speech model - - :param model: model name - :param tenant_id: user tenant id - :param credentials: model credentials - :param voice: model timbre - :param content_text: text content to be translated - :param user: unique user id - :return: text translated to audio file - """ - if not voice or voice not in [ - d["value"] for d in self.get_tts_model_voices(model=model, credentials=credentials) - ]: - voice = self._get_model_default_voice(model, credentials) - - return self._tts_invoke_streaming(model=model, credentials=credentials, content_text=content_text, voice=voice) - - def validate_credentials(self, model: str, credentials: dict, user: Optional[str] = None) -> None: - """ - validate credentials text2speech model - - :param model: model name - :param credentials: model credentials - :param user: unique user id - :return: text translated to audio file - """ - try: - self._tts_invoke_streaming( - model=model, - credentials=credentials, - content_text="Hello Dify!", - voice=self._get_model_default_voice(model, credentials), - ) - except Exception as ex: - raise CredentialsValidateFailedError(str(ex)) - - def _tts_invoke_streaming(self, model: str, credentials: dict, content_text: str, voice: str) -> any: - """ - _tts_invoke_streaming text2speech model - - :param model: model name - :param credentials: model credentials - :param voice: model timbre - :param content_text: text content to be translated - :return: text translated to audio file - """ - word_limit = self._get_model_word_limit(model, credentials) - audio_type = self._get_model_audio_type(model, credentials) - try: - audio_queue: Queue = Queue() - callback = Callback(queue=audio_queue) - - def invoke_remote(content, v, api_key, cb, at, wl): - if len(content) < word_limit: - sentences = [content] - else: - sentences = list(self._split_text_into_sentences(org_text=content, max_length=wl)) - for sentence in sentences: - SpeechSynthesizer.call( - model=v, - sample_rate=16000, - api_key=api_key, - text=sentence.strip(), - callback=cb, - format=at, - word_timestamp_enabled=True, - phoneme_timestamp_enabled=True, - ) - - threading.Thread( - target=invoke_remote, - args=(content_text, voice, credentials.get("dashscope_api_key"), callback, audio_type, word_limit), - ).start() - - while True: - audio = audio_queue.get() - if audio is None: - break - yield audio - - except Exception as ex: - raise InvokeBadRequestError(str(ex)) - - @staticmethod - def _process_sentence(sentence: str, credentials: dict, voice: str, audio_type: str): - """ - _tts_invoke Tongyi text2speech model api - - :param credentials: model credentials - :param sentence: text content to be translated - :param voice: model timbre - :param audio_type: audio file type - :return: text translated to audio file - """ - response = dashscope.audio.tts.SpeechSynthesizer.call( - model=voice, - sample_rate=48000, - api_key=credentials.get("dashscope_api_key"), - text=sentence.strip(), - format=audio_type, - ) - if isinstance(response.get_audio_data(), bytes): - return response.get_audio_data() - - -class Callback(ResultCallback): - def __init__(self, queue: Queue): - self._queue = queue - - def on_open(self): - pass - - def on_complete(self): - self._queue.put(None) - self._queue.task_done() - - def on_error(self, response: SpeechSynthesisResponse): - self._queue.put(None) - self._queue.task_done() - - def on_close(self): - self._queue.put(None) - self._queue.task_done() - - def on_event(self, result: SpeechSynthesisResult): - ad = result.get_audio_frame() - if ad: - self._queue.put(ad) diff --git a/api/core/model_runtime/model_providers/triton_inference_server/__init__.py b/api/core/model_runtime/model_providers/triton_inference_server/__init__.py deleted file mode 100644 index e69de29bb2d1d6..00000000000000 diff --git a/api/core/model_runtime/model_providers/triton_inference_server/_assets/icon_l_en.png b/api/core/model_runtime/model_providers/triton_inference_server/_assets/icon_l_en.png deleted file mode 100644 index dd32d45803ee423f506fb3dc021e118ceccc0cfe..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 79751 zcmeFZ`CHQI_djkq*_7MN)TE^8%oMq6?zvY^nc$KO?qKFZDr%x=xHqLvSz3sY3!0Uh zxN9!BR#pZGnEP%eh~`${65{fC_Wpdo)$4VA|AO!J=DN59evos{eeQFg^LRXNZeFmn zlH7lIzley4r1kl~9YsWbtrrp5#rjzc_#}ptR0I6kcjdf$gowzVn_r*1LN1dwfDd;? zI9i<%sbtE}0dIanp0+(LB2tqfzUi|^WS615_1~wRukKoy*!v8Qi8uem9XHJVHRa!b zMR&;z-nsSjNpDa6eN7(0!lBV1_~i?~-ne`1^-b~mpov)f zwKs9gf*rfi9@)tSL+X(Nd*PKXxxd!)B3NeY?d!rk)n|4XCbHH$7Q6k6NQ~nEGm}fY z%NGSq`r+~HiN~%V-u})G&IdZ(-7~`VnN>(*2hQ#_NR#(y^nwfZV7B#p^^u$_9?eYI|9EI-{AHJ4C^dDyo zA$dJtY=Saa?Aw0JLH<%27BOLP4!NIF9K?}cR872GK^-^>t5h_^$qgMFnu>G^-CXaB z@NIY-D6SLv?#qIe|A&J2tK4@R$r z4vm-`9Z`ZECkPXD6VFL?OGQ1O<0NWRqTmg0Kw+#eW=n+9Y1e?NLYFlUCu9e2Dt64p z+G9iu6Y8o!e-1+vd0o=~zL2~43(P;`=l`_e**TT^Yqn*~CY|ZER@DQ?CH&W8FjAGC zV9b&e%t#%!;8srNJQ1(qfc?8=>n@PM7VPN=En@Y%62hIYxAaR6Dgix^?nDtvF?PAxLx{ zgdkX}xy(pmNS0)mB`2lC%(93pg13uS=g`8ZI)RIdtwl{Ugcmu+o|UxoF~f7Do$cct z2%>-E=EChp|PSk<2@pg6G^_&j{Q6wR;EAF!UxoFiyzebUjCUV#M^nZSWG6aWvdDaHl>+`n8YX>*lKe}qjcMJce$oahZj*x ze_e7YaH8p0rssj$=wOUga3h^piFS4`msY1o?bMc+l~?TBJ^Sm?^n(BTLW5Tq3`F1W z5!VfGq#8Xy+-q^IkiQ2uiaSec8K@X9f+08EWQI@>xc@Oxza~hF*Z5P-4pnzYot>}# zwWqQ*7xjbFZb(((_7VoJu+Uf`+UQS_i6G_R-T(PTUsts-jNzVhecsrrnE4bZ94bR| zokz;-Jc4&l!DHmJB(o0orcNA%GbnEy3^gu+eus}doyDr?_dLhy@2SpGeCgr@n_V|Q+qJ8p~6h;Ag49yw*LBDkN7vwuq> zJ!xIK{nLN`*Q7g+L6-I>Lr$kiW#i1|Z&F-ik99HKaaN5U6mJvI249ifdw^^_-RV+B z?&o}vp65|;6fL2Rl&t$bDPegD@1N<3&a1*bX%t&OT*G_jL-4S<}AxHm;Bl|Ydg*LiMA}8%L#5nbK)?pvX2dH zVAOx~;CNgs^;4~lM~&%v_k{q$n7yV&tLI>dmB?6G>$#DP|7x{RKoN?f$Y{_JPA=SQ zM;C2&Z0j3tu&bL9jUg^`7WLeTbOTcjHQKA8+}15x>-(+_R=nplugMt5lISYj=Uu}t zN7w#)Z{=Q6&V^t40?&T>Aszd-8Lq^+EMnz6X{?0y#rQ-6)6}$#{wm8B(R{K>&Jf>8 zi;9lPGA%`g?fvimE=jwnKZP{&Dy^1vLshyIm+9(^jWfe-_85l%nbK8F+BmJ?5Q@gbU*N)u@o_YluDZPe=VJD(zNTeff7Qu zI>mPa6)o!G$9E<7?3s+YF@EbL%8#iz8yCfnD?IOy{h*>**XpEa! zoq4nTdNm=+Y1@(4h=#jld39(GWsO%aq5Wr-fbt>b`GLR{X$z^sO>|z)8Tz-ZHbQ^- z^)jLO;oa2T1>!n5pu!Z&rtl9S@ z76S^_>+!EEtB-5B({EH0#8ttd-QpZJSmaR=cw=_#rS1EeJ%)9f^ocT6mMV~Ds(ZT< zejD!jck6}0$yz^`i}U(L28;)sRXMNiO18b+W6JVqWC}m`LtelY)ZMa!EpOAH2!)21 zsnGljy;5ytl?&KJ>P;WLDosJjf|qUS^AhNq^t=#7)t-}FR?Gf1#?~ox_~2oKu5iYM z#E9F22jA-@pKO$`tMEo$-D*zczQ#0$Q3vMaxfNdP%Ttc&v-LPe;ADp89mhJ!wo3Nz zK6mvvS1goAOcrdythsF`zAeK&*+IV7@zo<`QC#Q zo3-o`umvJ|WPID!*s^~iE)9anVaE9Pr7MpdC52>9_AfLpDW2=YdwBiQoQ&HW^>ktg zZpT;)oOBw0ipcpVw^)RmF<)dfC_GCI8!DpM&qMd1n*^fS|vJiF0m543JU3_#s3g;OnMnm|tG>E48eD8SR_=Wr|ReR;J^*`(5 zl_Z`9dA0V3mSs2Q+vPIQU5h!Wfj+zDD@#HPy3LZQC8`0Nn(V70aJ~D+N zEtwmUii;)2X|9m>;ujRp@mF!nDWNsTd*pI<7kj3!8WbUxYmM54Z({;)N#$;_Sc6+A ztm)pQ=O+r;RIqv9s36o8eTgdkevq6h<2X z;)H2y0hSWC`sr16$0f%A=S9=H@;B48TY0}-eAi#U^~FGT-$Kkl^8$kR4nsNX6xr80 zx#MjNHnbgKH1>k7hNjl^s&noCE@cp6BJrauwN~c}=U&I1jLa-Nl~_91g4%yYp;~j^ z(}7nOnZA>7f!Bv2+UD?b2X@VBF_y2S|J-thAJczuzlO5=_md-O7Z7m`r;@|%>apXd z_&INQUPl$#^lC_9DnBN)ZSMm@i;#fp44_-2E>ai{xl{*=wsBUyLoz(Es8qED$ z+f6ITJlM$mb^w<8L1e)rvsYy#nD+*=uHTiY2CdW(1W}QYb}t?SCkGvTsr^S!OFn_w zo&opupgspriGRzNXGso0oyR4$=uNpUGv*aw>?qGg!3jo*ptR^buUQPTEHw+eIy!o~ zI4sLlxR9J}qK7XTz~twsrv5aWXc@e|>r`Sehd69ZpqDGJGm;jP zQ`=cQTWv&e|21jyL_Is($2Dkwat`Zv1n8T|{}XVBwu?T;ms~h6Ib}TQ3RP*9=*5Pp zZtGk|R9TMkPaChXN;6#??B^UK;*9slaICZYb1lv2DXmjGhr>6YO5ziGq>tk_8O^4; zq+Q%%lwTkUv9*T%L$@t)*X1Iq*-k&$XX(UzZR?fBLNE75 z-o3~aW<*%B0=e{yVQ;ZlmwQ;Ef7>^B(V<=B_q~ zf{Tt9?+z}kC}MC`h_~67i?)_O5Oy=bBzQ6dJ2mN<^UvK(pxEb(XQYJIsXiMVK1#xM za`ADk>_)IS6yb=j!rk08tH9uD+e=Mc+`(Dx`4-KK&t%h&c6J5=eVdTwj@v0u;T4gy z8v9A_Z>!#>(A;~3qn`HNG)iCH72eCdak5U`g1^Nf%dsBsb2^%+il_%W`dzFYXCO`O zvsxS2l|Fhvsi0SbrL;zrR&XtN`9tUU*Kt)hLZ@~251ZS9zU%{s*>F!*s~9DmrIJLe z=Yh<4UMQ#65f*#tChPZa@JrIQ5A;k?)K&K=?{-xyEqla;dj1>q&(*g&d3cv)HCMU^ZJ#{m04YJYBoFmivU`R_5W z-p(DAzY;8iFPEA)_g?=t$qb^E){ypXW6~&iTSuuO`^+g4=3p_sEq{oXC@af9gK;NB6L-uS_vhJVdrd1R;wmT^^C zoA%U|t6@Jam}WxrLCw^BaK;ZCRz&-0)<%=q@qUhtIU#O1i8BOwL25`wzA;1V?xx|Ti^eX%QOzss_0 zGh-@i;n25CO4?P}^O!Y&ArrNzZRZYgM__99<(XTyAC^5}wY3!a?Wp7GJGRG(n5Uj) zB$d?*ly>8mqAcpbg<|K$g?!^!{A$CT57#)DX53b@X=X^E=cba!#}Q7Bnq7@({+kP- z`Eg#MvX#A3E#sd)jjn#WgT`(!tk|E(v=RR!=FKSC1G`sl@LJ)Z2Et!wzoeCdwaz1} z1-{@N7?7N&Os$(L^&XCwxX6LlKHLm5*fHx+-I452C0c0g zyw;-aOjgkpH#zvR8R^s`gnNB;Yudw(QU72#2HuG3KkPSY2&0GPRYQc;gznLhU9&;{ z7A6N92t$XO>f4~(4eWhjB0+)}XLx-zPH+W3in=+2;eJuBpWDN@LY@|Qs)gAOEpg?g zBTH1DRbDwy&BJktaS!}Lsan3?`)=QfC72$IEJG6F9{bsTkIhD&W3IXGW~Gfs_{DVh zcE80_FRg3RyF8pcBH&~%7`US<7kyu~B5a zrRweB;ewru_3%tJ*hKmA$H7Fd@m{=8VR-W$p_;Pk6wp{z;V_E5^`~9 zw2n;ymk>9R)UiI5M`FVY8Cuzgn+NWJ*Odhw-{Y`il&lp0;prYw^E?9k@|JN``MkWt zQjTncJt>O3Sxx3MnzPhAF$vYRO#Ar(fm2&-Xfrlrd?mfJ&e*dI683UqR_3#UZf?7$h~y$ZJ1QX_ALD99LacYzzNk=buaQfbrc zcPLNTDLobEc1K>1hXfEe;l^G6>^SmDL{M$}=31*B$Lc2s*nYn2DEH0J2g@q;bKzQU z_ix5ZaE7CJ5APXw6_x(w6I@|~JkKV=vX<+uc%;FmxFQ&*rOHCHgC*;!kBf1PdGQ6; zmfH_oa@NcUt>?W!yR#5^iY3e;`YKDho|icAa`RnA3nzNanb+wpSP!Jsc*R}0>T^g_ zk%%PDCwdcBpYLk6TC6`bT)3u&^*0s`)lw1oS$8P-W;JS4p0aBKgAR@0P4MF_>;r|K zjN898zSU##Gk!|a7{Cv=J$x*%d17>Y^TNUFyF5#bG}}C9qwH0(iOl7aZ=Q*45FB$A zFt4*f9YZjqQV8M%PX53=IxWk;f$SAmMT!f5!jzd;okB@+T-RHS6OFebk;+rM8P7f! zatcxj^uBW+``z=)))3jt3`Aa)GlpQ3b6-o!o?h>hD_L-Vp8hHqu|4Ni9e~{~RA1(1 zjm7@-zz`dsToLU?wE`M%)HZgbj{`O&E*vr_QgM#GiXV+EEB<9=_(V$)h7;=088XmA632jrrIdj zkvmGJv79Q7Y=7}9BWblK1SI6b{TlTH?>NsX!2}&+cut%%CBYfad8OvUc|8K<37VNN z!WZL${Jav(cfNSZG>pzG!&3`qST^qQpCUt3!(MruBa3 zsC=lVsyk_$mlm1wG($YM54E>Sv(&PY5<=a2aXII&VmV7g&jSxU^$R8}9Fy|~UZ_ReZkic(%CNl=x z4x9We2!Ua!pSVp=g?GiY&3e`e71OI#B{OD7E{ z9_8PSA|UVTjV-|EjiWWx=G`m8DL%{(^~n3?iGH@`ZDaV&z^ggBonbfMlTjwrWG#3e zxWbqf2+M{wY>Dn(a9-3$q+Nqwxjo}SdSb@G zT{wuB^n#(!ipZ+1s|rq0Tc9)eO%)LU24>$3_Zo%f?0(pFu2k~d+Vpe*Nm{x`TxX>| zo|jam2{(DrBcW+0H7|r`d5wfJHxOKYrqYbCv#<6qvx^K#P>|%3ZWTi2t@tL z>Ma-rz4jrlJjnlZ>oh;J+Wr6HES`vjKuviNEw9$J07nB|^(g^4zTC5*!?^On3`)Rn zMo>MWE=B8G?QAGm_r;9fvgFRA$5Imyu~ngCM<$R^fb@Iq=v2EA4PIBlo;L2fAaW1{L zmz?;3Kan$)75A6O!g*n_ijm!tH?J0Fu}9W9l^`z@<50#<4z!!S%Nqmu-~Z>4Wd5kX zSiGBZEEXBJU1OI|G;U-(DC2jLX~{5Nehj8vcm1)KXXwz}f#7pG{rhvnOK>tzKjt}1 z;0u@t{CZ_2Z_<$QMyvD5_SVX{+Yq+Xh{!$48PREEhu37J9`RKUeleKy%SEfpw{{_m zo#a2i#8nkX$9J$}!O{}BI_-WQK(?mMtS-Ly(IXPaWn-R`McT@Z46fQyEg|_{yhV}g zv!B5&sR&FY)eO@99nb^p{&Z0$XO*&WPc9+|&iS1)^Bm-b0ot4Th_j@)%V-m#1W@6_ z1n*l{h~bu?@MeaFK}~3TH=Ki+raJ|!eNaEIww&E+>MfTL87h_N4;S>=Gq1+x8?V|K z)eq3r%GgU7E~@I88|dq}Dgo_G$6TgbLGoRABP+%CaN>{p1E8ilq;(!-FsjY67rXs= zD5tTNcznyrZj6hUK9ksFPX&YRRRm`iuKV0wa6x+*7nQ6O2OKdtTVIvFW%&OtfJhJW z;E{#hB^qyf%3m4PH>*I&s>^TQ9U3jKtc?QYIaJUrk2Bu*a#`Jb2eqEgSpmy%u;Eod zj_bUr4mVdx>t&g)#k5CG((G*AdESFW7~%QDs4PA+H(xQ}nRA(B7Am85(MEOWBpG$y zN}~Wb)a{;USYd5mEtKK$-@kv_SwJ$>=unft+@ot}Cr zWdL~WgJ@bW3&N>G(Da{kbtvp%Ty!#g@$rcqpffZUS=cWuj<{WI1q=xI+A}+`F4QkQ z^HOR?wSLy$f-17{FZvFNU6d9cI(X^wVBg_p^t-Wz#(Vdu7m zcQTj-I?r70$S#fTS}QFxZB(Ffr6$!BO_AXW0Mt1BqCgVpYBHFoU3XPb zEe+F359Sx$c^2Rdn`L+B+P_fDd*Ikq_}p-0e(+|!9Mt?bk*IoWaFuGd9QaWWE8nHo zb5t2_fe*E9@SB+L@= zEJ+X2dEh!uVOxUNA`b4C*Tj>rCoshXdC<8CACXyuL~k9#Qyth-)OgE|z+1P9 zzcU&S>~D+v&MZJ9S_(g76U=Zj#M~C-6)#tq`*0<{fRAK5abk`b1o@qP?dqeHeP1<0 znJ#=EL16nXMon(?&GWOx?O5l<#;;^WQJTW>-ru-p}u zbsaM4gm$DZ_sVH1V^8C&L+zY`bZh+lhewv+=u^Wz`rrbowt#n2ik%{tCo|ff_?@Me zK@WynCSHXYFYlezO3gBT{2yB=>CfldCrtAO%U@y71uJ_$nQyIC$UBoO=g1l7H7T0T zeKB1f&vq~F3E;mT5)(#WBbyp3de)=oo;&eB0~n2Er$=WhcgEV4?Qe;(Q)O`#e;O`# z@&6Pp=N3>MZ1#Zd4Lx3D7+3l-oI0~*d$4RlRoZ^pv>>Xu3cqQ*a=5_Z*}JRO)=saN zqIdTQ*VK&kR~`i@Zk2yqWRaDd;Xb+g{sw9ac|u?)Nvt$Z{$;C5r6Q2K4pS4?!5Okl z{oZWOXCSb zL|&&m%FRk4iKfPF?`v@|p9lq+sD?4(xEAV(N*(3c%3no?PPpZ~;3leoZnNqNE%aH_ zB{vcpJgHMZq^7iT{RjlvYBr73qN&hbIk(Uec2x7Z#Evo#qcCOcvvClX)0!=CfPrF0 zUCM}Z3(JRLp-$U^UVxq97(sdC?lS%v5>4-8{Rb$zJPZG$X2tAGPWGhKC)baJp=TlQ z>ZO}Im%uw!?01nJVfal`6!T~w=&B#r$ebX!oWnYw@Jq}+Ur$+6C&;8bnkCXU+dh9L zF&0IAw$uI{_nVD}gfzTx7mpa8+{@WSuRa1t+wQbzVkVs%aH-uU=5+`Q-44mIkPo9f zCG_$3O1|R2}9^8?sL{0x@Xi~H^%|DWLt379NtfOJk{QcR=i zNyIS;#r#~ZLcvhOm=Q%itzeH^S!Gsch-i^P54<=lFMCN#r%4%QS0`1HN-9zjxq5!M zM-p7{a~r^6N>l=KPz4}wXuA|d4fOfNMi~qFzAa(^_{UbZvM;Ai&q{KD^0<<>(ND46 zaLX!xVtoBk#(k5ctdn6e32v~Ip03d^R6-z8&XxCMEhjEvu($O?d74m&?J#1PzZ*Zm zySes7-ZeC#b>ZmXPhwsJ247kvTh~rUgH(ZIZL4p>+?AJFZDd8X*>BSMD>ybqGQN%N zTIg!jko-LA4Ap&C%&SuH-jvhfm&(JwnfKsp*!~@X>SA8?=6TjfxRXu)+G;J&`H*Am zU786pyu;oHRK)%M1}4lLxk0I4`xeRL#bkcY=L7tah(lbqL=#^jTd}HH(_Y!t8&<1x z;!O(PWLLaT0d9x5q&eGUPU^KaSj^X+ZK|_@wid{}>Rn;R{#1Yw*{m-f7jVEOduF4i zq4`I|s0eVP%r~==6pH%;M89qXprPsxI_BDsVUjZ^`Z_+U3LJovJu%LF3)rO_X6E4j zO3Dztl1^R_U3zP0ZXO#x>A5DE8j#c`SWz!9JQ~plirh#LX~^3xGHaJgZ2iNQC1bM`n)J4Pi2?+XK&) z((;TwG=EJ$@tnDNsBM5V^~E!hAXl-(#%um!6QwHASAD3@=MdhS0?crpRqBxn&`L+_ za8ER}J!{e-RZ?(!_vP_+wj@||D_bVAW~OcI0OWY;>AqHPiyzcgW{^_bw+EfOJ@TBa z&J9Z0n71wEU2F5!M>_1deUvv}|I*6S(B+R+J5_jP4ivf`xd0CHY*$q@NBT_3c!%`P z+tFCA2%-~x09#sXdg7GF;%(_uJ53=}wZ66@F;`-o0g72Ox4d36anXbNvbrz>U%pRd z>m42^^VlhWS0vF4svsxkz&G-9y3Ague2dp9`@>(fPBSU$do{eXqG2%e3!#%LTZ@TK z+!g8p^yp@`AZi%wk)^A!{xoPYfsbFiU=CQ!JdC)z4|E~S+>D-xh9ex-)L0PYJ9)7) zCZK(u^mmexjY>sMw|ei;LJiXf;kDsHqN(1(dHelxLFqTmagz_9ydRVw+dUt9mMSV5 znF|8%O+JmeWV|f+W|l|{^O=-vLAd{GmV2I3e??rwcwErOdZfvM>kte!W1oLF5Wqhk zz7J$RY*&-CrG4y?UMnlhYrGVsX@U%2au~X9B#(VQ2g{!bs<1!D6be7{JLv?tOS6LAB#*p6qrr& zb??`wB(-_hd-R+4_DjFEyje+s=wJEfWHMY?%|1s|U}`G>nxb!Ti@MxA}= zkfa*ttgSaSi|hkjb!tQkQF7@S>uX%y4=K4)_xdX1%w6mE*1Zfex%1O(loj68U}8@C zYl$`ei}cT5uP1Ng3abn05SL^`N!%~PJx3w=zdrCBuVNR1Y%J_bl2%>cQ^1+uE&=ytzqTnip1#X)KQ}h7nLF+b z6WCJLa?#U|l~m>;r93|i zD&=*BgO8S>F|k#fpT{4C!kz0Lbt7_0D+Y|N_$lvgAj`WA89jz9xktCl#$OaHk5#F3 z?ei>GZ>cqo_;6m$+cef1(LDCW>!fLgsp8)5Pjl`tO=fQWNedPFbO>i7MD)Qa$(UDD zcvA|SG-qC*N@?~hsMfX(=Qub7=?>=LdX9FcBslOGEBUIZCr6SzTi{eRXnwB5B-gX& z?3yMPCFWGO97sduIKD`OY!~`Din;QIz7sEkQMyp`CGxEnd4-CY0y z4$9eTm9DH-liNkY$12-?mQYCO{;gAQrK`2=&zCHGN(1`$)GT@2^ZtX%3H}qSRm8YP z_BIHm+lLL~(Z83Fm4Dkc>#-vb<$Dk~BMfhy6JR0zz;KU&_}BrSG}n$^^+HNFF1)xr z|9-sHsL`>C`d9kvQ2yDEWoS2(?AkSHn4Ha0*a-!fp$U@{|0C`?n>j_?^fne{1k*za zRWwnuvDHw8X_M)*z4#VsN;l;UyJxg=7l$|2JV_KTIu z;>*}Te1FI+X0Fw>1R!`Gc|%K1#rJq+G575mw76->PxvGRkFCUJoA;_;4;Yi(_bnQy zq-BZby7Lbg-sa#VXajYW$OOl^$rrz@5U zNk>CBycJ6)SNrW@&mbp|sUn*Lkx_tNBE%?S0%dt=6PM{mD_Nlro+6;QS+I z2(5v?OjpcF&5yt$bk>{d=^+okB5bLt{13Gf3C@JB*4hx8eJizV&!kzXv}@|C|0qNi z;4&fR`AfXa$ofVl})o zQ(=pFZUl}sCd(u>AaP}7?fjB{J`JQSBTrVs?Tw^iq1yuzSwKEhnA(IqTZuIvc|5=(DA^ z)5{R^dl>GeZ>ugc8xXJUeC73+Zo+a4+r3n`(QkRBYDS!F@nqgKe)4u)5joBW6HEeH zjj0)Xifv04h%@tOUM|A9g*FMpYA%!~yAOJMS2ey53>FX7)X0c6t%TC&y3lc<8_ zuIRh^yJ8r>f)ky1j7TCMiXbxoFneUu#I6LyPGgT$0`DsrtnhJFsR<9SJZ^+L>g2&o zJhbE{L)RYJ3K-kqz2)>p$dViSym1%E_Wj}w42aE2Ylb_y>6wtu1Q%heBjn-e%VK@Y zP;&|(nq1#8APwP9uf2&x*eCw_Eiy@(L;UM%b;KFFK<=GV{{5BpKbSOQK~-U4YtT|S z<>PpD5u){vOeH&pPbu1Kt0Zr{5?#>D%nwd%QhV($=Jz)eKG%B(Of<-@Q{M}Kg!j=q zeYC&6;1bHI3QTIES~wdb|+?Fb2Oq;5%ul(E9^GT;4{QPqa zYYA+xE^uj{H`<)XsX-JJ3<5H6spm==Yt2pou$4l{TH(dYb{%UR+>dH8&P_m6oJSUX zzV47P7*WL^ihV{3 z@o2HJ1V9_H8T%~;BF$b}(v{9YV|EipV&UgA>=P58uQwZGKjQZZx{7;_qXs`+@jz-( zw*s_i9kzKnJ7r~Y4VAiDuyj>Anzi5&tdlfjOJ^}j*^T9+N3CE~?NWX|4hFKxq(yy^ z4&;8QBUYF}1<$f$9TQFjo(Ukte|dFnQunU#^P*ppd`#bgUX{kVtWsJse3**_k0=q?&C^QzWP5P#CQL@GV7f+hou>PV)Ud4Wns!UKq6N(pE5y&C`V@2=cC{)q_z{`YYT4h=(v z)%9W<_0loJ&%qfg*sx}LEiM-zWaJ_usrX33fw7t4ljai_oXkIutQgF%3`!*Tg}C51 z|MRYZHDNzL%f>JidxAz=eg*uz+JTW3Kg(L~TI9 zcq>xi_dNj3ZY)XJg1Qr_8j8eM7?S(k@|oO$Qr=o~7Kndv6xEwYNB1ne%C7TN^A2@& zcWL;TSeYp-hNgOwM|&6=Ge-&v0Krtm@gYE+QrA&ou#>!+5P6}6nOsLGKjI>L@+)#z zwoVnN*L@h^%+N%(f{vZ4ih(qE+vPzo!8*)I?`oiQxj61gI#Ia z%FC-Bi#ENDI;Og=#Izm3P_p1%*lSKWh6GTnFGzQvS4WR1PwK~pFKOdi*Cch(PGaZl z3gi$33eY@`*2>Rhao^xi$ppjJrpGQ`l4VFk6!6=6Mx>$7W`l4$#B%$L+jcs*QH>H$ z-31on7uuG3uf#_isghP#>5&<~gP{-wS%YGbfdz$~=HqGF#C}lS9z}gM7oR>I8glkQ zw4Q%L|htmpp4 z42|vht{(X08uLNCpp#xZbPI;{rEK7vr&1rz$92Fspok0>@kK?q`+eaMl$ZBA%+4mu zq~Gl1iWe++?-kis;sEfD4&VOtigHh{FLhpK^>3Hi?RL{DquoVgL1_e5& zXw&eB(SJ6D@HJ*tS=2TV+21T(gT^w##ObK%O8dmo(JuW`zm7;Tzh!r{n{hJOmP#7B z0WVX@4W>D4AD!?&O*QgyvQx$S$2{Dqn>PgxKM;N^Xx0jHYS!QOIYSPzUBPYwNK3na zaW9XQ3Q*f%@Xjw6%k??2jPTh9?FNa>Y_8`QWQQxSK9Z)qcO(R&0OvJ)Pxr{u%5C%l zP`BtAZ_J2QfFj_Pu&CqCV`j-mmwfZ+n1lnyADy+D)MITbdH<4W;`6E*Ua`^A8UBQw zsO$Q<+MBcEUYeR3?k8g%?4Vii(OaQ&{KCi0b@mC)BV2gG8ZIwta%J;`DEptjAp#;U zlG*F!ov*|t?m_3Ck_j5g2usuV(Mx%=bLp&c@;+PhmdP#KLYe~@Xx?X+nDa}%+(0q4 z-1nb$Y@*GGJB@75xr2(@%`HSg2(s5FSg&)V>xN`}F`K+%4z_Qcxq2g^M;?m6F_G2` z?ddW|diwP1Bb$N8V{%RowvBXMf`ls1^tYzIdE78Yw}xA zk~9Y(L=Vr)5q|lZL5m|GVOvjtEsP_%x`bVz><`&q_)32PNSmp@vUjD2z-{pN(Us=f zN~;$HGC(K(Lv@Pyd&JYMY<$~6tTWydbBMUO^F-nS(A@LL%F>qMt8E zHWV>}Z!G~VKb$Js2Xahspb{Z204@pIZ}Gdl}r;61Yx%aZbzONSBDa1 zWM}0TzSsttmbEn6kUb?8@6B@+3)b=A=&K{Y%n({Ct#?|WqZGfdz_5ydmXvLH)Vm@D z4WVpooocL6L@umY8a-n^HD!GE;c0=p@HT2ji4$7{PI|qo@%8r;#VR9L3^wirPg)+4 z11osWzX!J@uj&g&z`JIT*t++*Lh>DXO}K*BpFaR2{sD_ZzcWcSy`}{~Jzveo{XSwm zr6NEpcR)Nnx98&(zxmpe)f}qaqN11W#=%9Iurv%)d#4f%oVlO5%K*Fa>_*LlV~ z)*(RCo0K70&wE8?*3J8^uN00F(;7#BK4hYG4vU?jb&-EMV?z_f<}++#@6f&5>@+ogC~q90OV)@AB+KaNSgGP&Y4eX3J}Bi zRKmiq4bFTJwZ_L%QH5eG(pI9<5e)gHfVZ>e)%%p|b;+jwB&%i{I1d0ehD5LX6+`%} zHZ45`cl5&cZdtJtWd8o@xHY@|K}8$DMQ|8w7sfZS8m#S&CKc3 zKedxcEymn|&^JgAn5HW9!(;S1d!?p!Y2Z&(1~1MC7`E1P;(7hGdt+@V+?F|C3<6(iM074^cGn^A`KufIo)tgb>KRVrHhkc)EuTA4zv*g`*gvb;kC^kcR zM0bLdg0oMtHG0BU@PRJ6pd*KS{^?=KXPo%C5-48O&OQ~2kI)uUa`Cs}1jh1ZOV#{e8 z1IVsfNk;20H_7@qE7z34x!)EE|8u?Kkwc_mRF3aLK~`%M`%kb8@oHsyg;}qP;-Z0~ zbZ+kgQUTP^uMHNatZp^1p>vA+y+Un$j$VHV2W}!bL1(hmrN`z$bM3vBvu7LCjQ+e{ zrgFh6J0t>5EhlDDJ0mA4_ZB&K{US9EHtbW(L5P4$azFjG!A;?@Q z)Gp^izXA3Nwe1J^%s^_r_3ZN?VuAE?-%{(5tejFX*AmaMM92 z_~}gj%-38HjFi?_#W}lj16Z?$6Y6Ho*GvzMTIWnJ?wl{V!J+$<%fob3CW_pL6s2;} zSE+ycL`0=M-;~tn1QUxoAj7R0P5NC85)bvkiW)`~cCL@7{#hTTb3U@DjL%@C-xatU zuLV`022J=iCuEND!(^<};&*rLwwi7N6PKs&$eD&^nUw6nT_rioGj=Sw$?%zrByHZ; zi>PZ4^ANV(B~IKePGxMWzE7%byQ>Q$_nsku{+PlJm2{+N?L4F%rJq}QYXptBBG&!s zsn;0dUP+e!mD3xTt=`qq{T>akzMMFW{B7!Uy9@dZb?gJiS6%NFe(z(U`o9lv*#=Io zTbQEkoVNS!jICEjMo>Sj$h;D7Y*0PXl*8);&EZu~5{3`#83eB>iA$4|goxHX)8>tr;4bfu(oKFZtx=^(`bHW+^V z1lBe`Ghh~#v{n|GK}(49R6YPA`92~&0$mYyMNTFv|2Aq42(5v0E*ESeor^}{u_)%# zt9Pa@IIVq}?^yV^hh}#B$+~CjakZnWXX|o#XFdGdc zvu{3$!2L=R+@sbWj+ZIW&T>BX>B@>?EFdNs1%|q5I;Rlr$5MgoW9#^ec_2Zx>vxq( zR_rqw{|apYfFp+U(HiVpY%$Sd+M#I@>)spAm1fo|0K^xYKJ{C!);<7D7nh6i-pE*&YAo*F2h1+{9=!L; zXNNr)$juDNY7MFL+`9SGt2L36^gGE_MTv=ftMzl@(SWeP=m%FWmEiC6W5^?EXb2f7 z^PQ2`-w8`2K5ic1Q6&D2FwX=AO=j*8Pz2x{Yfj~7A3d?xQ4az6%`398E^V6|!ZD(# zZWYrCD^z7jZO7K>NgzOzm2ZmUX067YmMEz`@U=C+bVk;8r%e;n7=Z04WEa&eGa74R(_ow-#lvX|V^FKEEb;poJ9>V0_(}I7Nc3d!a z`4+lUZ$$0`sxxh*L;e^dUyGQlr8=U4@(LM_LdNpDz2X`6{152Wurl`goJF)Ox$aXQ z7(gN0#e&X9e0k|*K_PMRWs1D{<4NEH{jGQJ2D$O{hGtZ>hd2+B#op+ z*kGIp1F-FiaZ_a!B1_+=Bo??PZk8Beej)!+hO%LmO+jg<=tpBkBcwz6o20;7A)yM? zYz_tEa8c=U1|TC4v-HuUHAUT-G1`2MoB1+eJRDRJU(uiY8Q^ zFY)s{XxqaHx84XYkoKH@DdIW34VI?9i$PM|cJkR>U~j2AGZ@s=_a%wEcElx!e=FJO z*rIxQW*0GcT1SM;?#skkbu8TzU^d3flPR6R#g0VIM;vbN>*|fc;X;2$)aW=!F!?pf z4y<+HwpC?Sg=1S|FwOkBlU)JNT33Pfz@Ev64Ny*2KdN8o0_kFgVJ^g0GMiVek!~Fw zlQ@h&3Jp~>cxo^4A#65fWqG@1|Fao5oR^0yNY+^W7q|o6@OOl1?}D$`D)4&)%9Inn zRVp8a7cT?XAK5<|U!zE?bYghqO}k3_2O3*OTrXsPus(qP_~~}N_nSq}PH`QdwTGMW zfNYw^(1jjPsc=*g8Rbj@1Fe0#0n6T`j=s@kwDp)4LbI4M6!umgF_`lCib#}Gb?L_N zgTvQ6)~0*3LRGN|RISdz=EUYcC1`@8K{0m6!VEP#i=+C)PpJzWM8w8`a->OmsAaHR zjT03`-5krxyZantOz?NN#K*LbtnoilnWNe7TV4RPkp9k%*3!@lGvB$YUMreG)G2*) zaR5S1^?TM0+<3TZDkV$8 zU22bSl}bG$2%V{$!_lalZBT`S7oqarI!`*h1JkuFJbIbw;V^3Indxib#2@sba8x(s ztL?qF^s(_U?joiZDfV}kmyTe@6#G?#8&57Dn*cztvU8&XKV$EvV%=`u7>`wj54q?= z`E0@2$fd0g;e{+~6Qq7Axm!h!S_VL38pLH)3*lA%a;snX(lGv1GCUMuj-HffX3_`3 zN;3K}Yg%I*s{jq!b=`~qhphJuYjSJahEWt15LB9M1Z?!CR4D?Ai1gk|Kzf%NLkOsV zY^0YES}4+cFQEhh0YmSQgrfA0v;;zXuf3n|y?yTY`vd$)IA*PD)|zw9IcF{abQgdA zyqC1+f2w1tRFD9235tD^@>C$nCRKn}QqCpvOogCMbfkDBgo$E_*}N8-FXFACw2$qK zwSw`f$ljvBXnlxujN+Nqsd@~=(G+3@ndw)aMYgf8m^>pwHMTOv8XF+sF#T8jD0bqH zxA_)6RxU<$d(!hZlrH}Rc^(+M2uy?aHDx* zhx$_NMV{G(6YA8n7+!9xkCxc&G6Ne4?;&?`*w|*P+OUoMqX@x1WLI==bg!4)KRs01 z=1V7~zUZeqOZX+pgDvqA$r*LbBs*fR)!##drO*okiJB4kR+jg}6}S!?6?F9uzr?L^TniR>T5EF= z2g*P3Z(JnBwX{3X@bkD^@1ceooH1EeFvG-C(KBbQYd<79?P)6d*=X#Yc1TE^J=X{=v;)~Vg``surgd4KzYf14w~ImnD=t1jGw zzkFjPWTgUATJA>|5_(KGq%>7Cxbb_eqdgq<0)0I0GEe6)WRRJN)4fd&NtC|56*Zqo zBF-_Sv{rG+o&1L-jCiDv*^C3jsYCjcjXd8;b$kLc)rF(in2rty&V+jmx#IQDIFVb) ztqo7kTq|T}ri7zht@Z=7h07P4pBYRiC5NpATm_Yk(=Ky=KV**YT zXzY25!T+hmSA@GvBJ2lI{IsP-X6bFbHIDw`#rtFR=~IXh@o`l^ID6P{JW;myMKnY_ zD0AQqhHHIsr&uab#L<>b*xfH3ydB}+z|(a%{AH?~X*FiW{5WIq<5J+(ZT-DdU*v_d zH0UoCv;-ij2^^SDlLG2mPi0(IU6hgn{mN3kUDM4c+NKW-FLh)sn4l~t;-+QEmz&p) zt-4mm_U@g-EK6$WYh1{CUA!#@n^bidf5R(s8*87GNxEIUn@XVjs<*+IAAC5YRV7OO zSNEd|+wYHGVdJvxvavgXE zr87P4dME{oUMAB=l0LH-;4WFt4X?(hbwY-ZvH(>_v-|i(P!yjkT6?c&Pk!fVpk0(S z`1Lt#w(r#8Xc#3a{o^RMl=p|@MB8HAGK8?`>-oZ2C!l&h3*}9SYqkL};RXQVWh0V* zjaIM!-k5NsE9X`Xgpd^B9Ys>??BCy}9(OOZZ26|^l@Vruj-l)#`b#cB9#*2xFC|cw zi|UmYbz@d*7V0mCX{6J4$Z2`8LZaQBN}ms5LelO-?+rcfft@MPR=uL#U+m!}l&QiZ z-@4Q^bsglAQs%$>T-)a!V5y%=~jjN}jlX zlDGpvr0DfqYj0OdvVMA6;B0GM?-`ch!#%dvPOMu2RFV*pC!2OIQewY$D5XqL1&+cAPn>eH1_#}w}y%))6G*#v#A`sgQWT<`VA1GJ3cal z5<5S=iCWW29eFb1ebgJy1Sq@mMRj1SSO)vIL}$W-3ms4MsU&g$##kzZCdfPta`uo~ z%9b8g2wQz?^@jpIJ<{K%%mIE6O!8&(-Z_YP^SrvU``F5t*1n*9lcrK^Lo(u#P z+VqrqgVI|2yJ&NgKHMD`L|}e%>F;BaeG)`zC1vwvaWSjt6lg_!$>ZD_=}&ToE}a6< z(q>9le;ok)0mfyT0=>k~2Vqr_G6Bcq$MwIiCdXV7S)&4w+q~G*z8+tPNs7R%K>_|_ zjj6OV6}Qyw+LA?5#Zy{AtaMN@Hh`Uef*8m#M;-J7`e;fDB!qhG(wEK=N-;Gw2d6f} z9Qn=0BXXq zbilOE)=;FIhW*chjql5>3EPL+aqgj!jwne!_aoCa^EH7S%~*AB%tPT=uV`8hR-h7Ke7@VRys#ew5!pNcXnoC;G_a+jzA< z_a_;9;;G9n`pTp!PF`Wy>(=L1LF$z&zy2gIQy6h$C$E8dnVd3DW8|qi#b8mD_dxm_ zz41ZVuMq5hTCQYy(SDg{H9M6?86SRwg)4H=eOYr54?yHD@2tjI6o>TEv2?Ckb0!<2 z9C3KjAX4TnKHw@AXeY=k-JfrWIb6Hu(`BeXE4s2*8^l1qG@b3KfbEtmG&^&$KQoaj=?j=3UMX_kZ9rkhSKfE*X0{JQ8X{;dL?XPb2)iWCv*0AGDluLG6?MHTKYQ z%Hl%1N;qN}%^@2Y9JT!Zq>y>~WhaY@Y@+!Jm;>fD@?}f3aB+;A^-pm1t%{`C2h>>c zI3HOH4sc&)p$_5#+N)WHksjevh!MJYwcYH`&0eJ`bzRIBDd2E(>4#VTN^S?2 zFZkCR{jHR?;i_#~m-=eUo#OqEl8B|u3Uz?_QNm%UM!A9lZLifsU+>KcweS-$ACd$8 z%beAJ==N?*3z3^<>GM3qQc&2vo}`&6g3$gRsE`D!fKQeipEO=e+)QorLU@1PIu7qu zZLFeCM7}&`a!D;u@P8Bmj1y_P>L9w4%$D9Zp3r$N+GMZ~mCb1NmMOEz_>WVPZNAEe z_l+*8ckD#WtuIC(0n ziG-S=>BQ~1eKZ72+wy^kk=-#-tTULAPSo?U&`Pug`7@(6Xzot-0H_*y%B56(F#oLkZ+tjwz4L0LyRZ}*RZeJcF3G&N6ix2tTK=PNvrD| zWtEP!?)K-zL9aN-JU}jNzc_5$BzYbUCi+W=Zkq`U{8^6lFzBYIfKf~q&~4Z}2C+wO zrnh-__ADxPjQSokxz^*~Aty?5kMRcZ=y4w!wLqR?o{QSN0od}ukM3EIJgehUZ9|k7 za6tcQ{RuQr3bQKK(_d!?WnAzZt{iGt_Gjtu&2{urxX;Of{bWJWi&vM{i^!yVwC+Z> zk+vtOc>^E1s0bzC_#-Ee0O=ov>b{@1juA+A_7F}Th2|x(K7whlQaAlLQFWuqMu9(I zI0k!vHVV#Yr%#=h?Q~=f-=k_Rurd3yAm2DS#J^-cC^ZETmL+<6Jy8_|;vRo2R|~|h zOd}fF)~;rTe(YYd-rQ0$`g~NWXi-U@k4x^I-++Hd#OVWC4q|I z5(2cX{8*G;7PAfVHzX+{*MM^Cw5Q~|pmbf%rNEx-^Jub7GoMvofVHj)#2k6o@03zy zLtkOsnD2pkQHDk!k)KU6WG>-z-MjN$GWWk7#7Wz{LxM>AiC37Q2StzuV0I&n58z zxjL9DeoeC<>$9i1cAoW}b!pv?TQeolKOmNkIIoDculInsr-%qnz-nYSYVBz+-Z*nB z^-IelGAtxhkM7Yb$FsAl-Q1J?%z!tC@4nqhPZr=AVW6vz_vM<7IG(hGLOmaS{1zTV<>YnUpDerGdq(Ua?}91)S<3SJTan9bypK{ftpb$ zR1d;lZ9*p}H|NmIfYB?e0d(Tt^>NA{+kCp?tVJ49tott1i4H2hvVXblCdI-2TwE1& zr>O6FWq>rDTyoXWu(Wvzs7Qg!<<1cTn6vdPexB(0-v`llNdIiHgls@UEFGF}t3LIh z=TpFHiWoqGw%@x`*ON}?sg62&P3oRy#!!jK+X?lL#g0q1Hq%TCe2aQ==e0;dNL-7J zK8C5iOEiSLDfa}1T4b`aj^asI3$=D+p$pUe3tg;t@Q+XYLoo z`X)~3Z>ODW1JPmhubxLMKqW00`Kt_Q_x@d?dg3IF%leF1^6fRWePO;bb`TA^8ENy3 z{B46^dB`J(DmIXf!BRZ#SQ zC2WQTEk^4jMMCorx3SC{ey&;lbUI7VInG4?ZushwCX^y7JD3ig#sRAL$f|*foYZj? zvBBf`NR?@&&MF>I0>SpQx&YH`;iK!=Sd16M^^Du&~?gw+o+wdzYDRWP&_P$aYj`mCIeV<8$S89qwaDaumbM^?_viEPMEk+bi7 zFc-jFtnON9j)Z^qVz-sGhFTyC$Wz5=ZY0k6a+H!KzrRrbN^%J>s&6Ut+fPRo`oTU+ zsC{Govtc-YTrNM7B1Vi#JIg}uXmuaqttf1%=-^`49_I&f_b1wt)mHxs> zn$^RQq5<%_Zxr(4huH+ZK!|ysj8YVHy%eB;j&IL^ub7G1JDxw-GBa|Um4)TV{9-Jf zeqPR;urgbs2mxdZ&4n^kH!k^#D#~Z;5=N!bw_cA@^X(vK-&sp>utjVy#wo9X)Ne06 zu!61FKRHhOHPO5hx>aAa_@yFC`YOdBt7_E1PZ9*v?p-hRy5BmKQ<1L;BNKccHkT7;}_z0&6V=Id&Jq)iW~2W&vQ z5glD>>h2Ed?%}Ev!Nd2o_P!mIFQ}68sC}tVzb<+)t3@fhm9kp$^8+0hluQ13KQfsQ z;$p%_c<8qb`cK-4p{Al7YSGmHIn>{1#&Ex*!ix7}J$H)_P*M=Raz-CD=8Vb2Ywt8j zZ{VkcvQ0|~lh1iuY0Ps=Qbi0)Fry+rf#$3ejfMv+h+?miKypb5IBZ^b85xsdOV($S z0!`c^!Pbxu{8z65T=%0aLw~*=Epa>ILDXR4DL`)@9ON%gweZwx+wCy{0`CFKD%NRW zbmLtX&==qU%;ed;wXBfg2y(W?gpQtFMFUo61i#*g%;^5FtCW1EXv_|%6lCzSzsV?$ z4=uR|wCOysH%s0HMtq1luH@RX9`?R_-z1Y*KNs-Wen6I2BDhR9osX`^goFS(tPK{CQpaY7Qny(>nd~XJ_pKDKBe#g&WHIfsOCqy&e$>vL*X6Uq6Bd=G9p`%E|?5 zQA`q5(P`V0{%q$L?E-d*_x_iHgEdENy=BWkQ^gH9MMX+`2M2!xN?O9X?tlgyksDaT zgU8ANnUYQLob*!fw442h2gb>9}mXXY4LQOI! z2-xZ;&)R&-exRo|c=$k7`Pco1y2W}RI~p-1p}+Y^1Ek-FgA4$%vb*he#|hcITScNhfceK-r4NI zH(77prRGZ9gtz%PVuVJd4HEsI?K00vfw`0a*XS}<%j{fnxkf=m?)H9l?*ua_@!LXZ zs08|6qcWUd;KC?=dVdyVYp>r5jYSh!w*|m{2zUssrd>Z zSUft&suDuwZ62mScQ*-0lm+8HQR_=A3E0A9#h)0Mw0KO*Yd(<~HwEM&Lsb-Iu#8wh#oK>V;s9V?_DMSN4l?Js0>M<9j@7?~)^v)JTu^>McS2tHPzrk{qGHRCn zse@2uj?s3KAPr%megY9vRuO*s6cE;X%V%CZmkT=cYZOJEfrLnd@4_+v)xZhZ`~vqY z8P!;{hdyZH6>^}wH9~IUyeC%(b$`-aw7>7mw?gfgrJe!V-t2_vk>vNEndm1b6ob{- z`A+s}HBw?GX$;TI1w8;)DSh@(Nbsb`@cRc&?{MCTE=!pEu@7dwB5QZ;d4J`QQb*T4 zf!}oR`uDR1%91qEGKeyZtJJv9E25FQRR)JeM>#2-^xzT4?MMcfO zY{UJC5&zsR)d(8{lwSY_y=Lzt3n*$VTA5UVnSVjVKDmT$5-CTbr)TvA+|`+dJLxa; z)_Zi+Us6>7C_h#_xI+t!Sb6OQ=r^CVZ2thtDFW!fPAWcsZ86n#BDc=x@j22r%Fs!@ z7@UQXO=-i=*f089@Dk5IYQ0p_mRIz<{Xm{qQIU>;<b5j;U8+{dP_>s#8yIV$!gSZcQS@li|>&Ca!AePG6eHN?#v@ z!V&y=B)~#)R!)FT%M4-kZPm4 zQ}r7A**6OVzRq^7d~?5IUxUl^mjx&rIjuNVGwVZcs0EDRgvDFOhjyFlX9L z*w;t7ThSSn?$s(!i{A~}Dzxkr=dU^DPDdz`v^cZfafsT>B&`=cRPrc4jW_Xg*R`i1 zpXlJ57?i@;A@fkKqPemw85wNH)=D+(*&Gf1ntXV+9@@Z!UuW-Zd|uhAnofZ5XL9bp zrpgOp%W}jGqb98>Yvk9&IZKpZzl|b65B0d8XXf1SY3B%sNR>;~mCtg#38`u%%zw5~ z(@Cy;b0Q((7(1TziesKfrQq=D+B6^6y1PoXR$E`E)T6%D)H-wLD+iTo@;a#xbEG~R z(N+j?Xs*Y9hbK1){1w7&4d1LgDP?>V(tqx-Tk*b+T2X|0EYxHR1PnM}o zaz-CsP+)}-FczumsJ0?m^%jZkKR4~arjs4SZK;&qzCtOIg)V$DL+4I4VxQN?cNPXk zrL~M}NpL$}*2ngoW%ihd;Kz0B=hQMCt&B$Kt^_fMi_OmdnjsvrBm`_(Wl}9iOtPw= z*1`|n@WcQo&;->gZO9l0*9L9Yt&p%o%zkqWr`yP}Trl^EX|p!wP|!+Mj^U@$9OJ6_ z<65kNB-fd|A1YXVNB7us1>%;V<5jxQ&FI>0w_-&7C&c}Kn?1FipJO3u5tv;0N`hxh zFL#oKz*@9RXw>2;t21eJa~pTLiqluVvs%Kl@4anUejMMJB^w(Ak>z=~(a?+7(XC1u|gxfQItA_`|JtQ2a?WvCUpZp~c(^i7&jQe5l| z-R)$OD7vl-^zO-~t&|6zIWMJ%IXl5`C9CCWX>0##0auI^;M&qrS65FhE&g$6*}ZrguPM9csx-_|cCni; zcUIU~SXgMHcvsf{y%!7yTdX|)Mo!yjDm}D8WFRa(g_O4toOa=+Fc(9W1DldthV0-O zpGgARNE24u-y0@9J0;6z%dQ0Li+mpa3R99E|1h|W>8VdAhV>Mle2R#J)XTbKt0|y=!frtDu|?WSE5cc zrG+@#l$V|T$P-lM3oBrS!dzd?;A$k`Xuyot1W+KBfgFm$nwY9Sm&n zx~vdN6M2s|Er7HJ*}&8LM2;g*s)_W*E1N6E(sni>Mho51#MwuV)~OX!uGR~FJ7RZO znDig7J+qy>Y63dJO}?%J*SUu}u7SvYY6T(Ib~d-m1NlAOIf4H0jR<^K2wT+ui9y2pZ+v<1c zi9!w7^7^4xbU9FjyAo`6{<@2iLGDrfoYRsLzNO z#2VGR5TKI*`0YE#5ZAMDlVtZ#E}?Klf3OZI@;Tk4`@vS=D*N{Pw)M3eqRQ>wCrUwCyo4=`&F=t$I zM_hXLZQ%bKD~D(}AU`7YmBfqP`P6oT3$*@6@hu zcgAR5SbFtQ3-A%n3*Z00!CZdNY-+c)Eo0qwG9aO^i@q=A74_G365&s>YLv1v!+3So z8IW}LqkKaJC882w1h1&Po#2E=a*10#veZ94iDi8-q*<0U4$gt@jU8}EiBXFUqVGMh z{9v@g!?(#jVJsS#{Wx0n-XwpHdeBsv_Vn?m_=J-0Hrf19cf_Gn^G~f*ALH-_p=1Y~ zq;Px@Lkyc>a(}2k+Dy3ln6Xp^xa^k^?TNAv@5KMN?H|~1zjDNN^LaCPR)uq+BfWxF z7AriL#Fr0Qpb+gF*3z@~EB$Sf=ZB4| z={?_%FG!WoD6*jkN*`vGstOE?Z^y8?S5KKQIU_q=Rmx2TEbfTo4SDCT>HQCi`oBLv z%vN?NyQZy&k*Zs!OX#U`z2r_j+6-gj6vo7%w>mj~Hk;C(TBai*^I0AdM|dc?kN_%Q3Hiae)j zx-!;}Tj-S)NQG@O8qb{*iO*9F<}Sz5EAI+giPVFH?$o`G%W&1dF7IxAy6>cD9z>=R zqiPUX*Mr{?NH_39y_%4?ZYpJX!1Uj{E>BB;Cc2y)?qk-beyZxe_xF>&)oIkifdafi z)ooHRx#Zl|)QgLwm)5C!quDI;Q_l|$FqYuN7oHFM{327RpRN2tpS@RLC}z0?{dQ|O zw3IDj@Kv&w&@~1rYXT3cc!XE)$%2OZ^?g$A8;77fR<3wGXZ$ji^xM2nc>BcktBSMW z$A$Gr;hi7b?cAEZqvCeB!{x$kbhQO58%%NsBSYD-2b62F;gyV~?RH}8QiN?@7Ys+4 ztO@4l9WnJ=bq8{cl38OV6?ZwF2_{eB1>DBJd2}lvzQqL07Mok`UbRwcZkvy0RM1N| zgUOD+%^K;CylK9d?$l=N{EJO%w?Iq0sJNJmo-*X@dFS@*P z)<&q?T%=lw3x3(rnVi%jC-(mLPi-_IyR_65@q`)-d?~{a2c1=CGuSShGlYCGXC+QZ z;6p)S$1T0nrs2HR#A@HM0&P`TFZyUcoa4~)(;0EDm9&Ha5oT8`PdXkmfZ1PJi>I2e z^SL-WLcUp~BwT*(h;RvnjDY(J3JQ$SRvn*1LySDmqNAdsg0>p`E52B6Tm;}lVTXqd zc=7TA#Nqf;Y)S24RiB~5A)wI@OOIffU(bf%CgSU94NGLFmu)-hu$19H7Q(iOLN3z{ zyDu2Bs;c(1@Laypyww5n$WDg`iIGso$%%=we>S1Xwm(HIlJ{kDr1T`ICr*5Ov+KTd zYd49~wW;x3cIBn@lz5=kRE1UG0lamuhp$&Zm_#C(m&o}9mrvJ4I&;Am1Q3}s65IX* z0YrA0UdO}rah9CLK7552`!(aar=DeTomH^H+muCql2!p@-o!IU>DVegV^ z+kmJ^*te=5LPeY4ZK-Aa1gGS#0(i z_dfu9_N=LV$bA##Mv4?NZhhL~W$qUy9cr`i^~Y2A;nruIWDe|kxWO@LLSy<^8oaG} z4uJ4Ugmup}!EHaG)=2v2mb3B6i4Z86HQ%ghv@BpObY-VUIesgY&p)J&CcM$!LAtIV zn!xX|5X@)@ma#4Mv}Qo6K?UCAm&gh!Z*`y*9E@UZ2qWpBN_45q}#`!v=m- zrm%{nJG!}{d+RexvtK z^Ui;XyRhHOdpPGV$t>%C6p5~CY;5d$+s!OqCO2bfhwti7y_>W6ZpGBUo8$}17^ytl zH|C3;ZY^as^H_{k@{3jk&%wYv$l^;qCc~POcxdqXULWs_Rm;awxW=|b4s~^-McVb3JeYm&`G-%2AD9?(5L9n~@6_gW zQ*h(t#^9UtV(k+HdctwIdz>0c?f^HDrJnFLUONBo+3ifC$ED5jy9$p4%vu}F=y1=X zi}|oXo1=26;EQirpBCKI{PG*`SY`RERUT#L_eB;ys{;=nUV};KpMU>akRuKtOF+checKywKpW%IHoV<4o~>+V?X6H`Df2EF=&gpdP)BFhctst_P?)jT!>S zK1}tp!^T=xQlJj70ZY9IS|CGJF+7=myYZyz;uqT$E1z{a^wrV}aA_@FEM{?{#CQR> zY$0dwwln_x)$XJ8w+PYwn552`N_59QG*HrKeXPOD5yJ8Jq2$6HA=HMjB8P)$VEumG zpb~>oi^oO*uz`etxs?qs+#tvqpFc%36)yoD+k>!Okt{PZ@ULCWb=xi0N_g>5qhmG| znw^dpW3>VEaqInvm3!5-Aw;?JqlH*&8E1u=xrKC#%vbc!+m22<#x1V!LV82@mXcIc$Qu-rg=>-n84DdjbkE@xLr8T)M9XZh;=$jnwew8dPR zZap4#b?MN1G_$3dvm!K0!^v^Ha9us?6Oe!;41zVr`0Lf8StvisO4(PrRTCuObi%F~ zHvr5ZDx8J3pQB;adQ15rJ}dlN&Va+FDn>yk)!R#x{EZ$XF<_qaH(UP5ayqy0i>W?a zufSK|2w4DyiUtHm)6vsLF*fOMf-t&#`k3 zNq%k3T3BZAYCIuVwV<}RN7ec6`sg7sS0dR+P|t4pjx*g$STBL~4vuWC&BifD@x|j> zWf8=c#T!=6+s$o8@H&{@Y99OT;zI`wAt#7VIel5S z`lIZM+~}}?NB?a$_seK^?hG-DK&l)NU+yP)?ENt=A)`zuc*8pHa|fYt_!Fyw=mUfH z_{R?Ag1up9<;8#HsCuE&YbBW^3eMX)8)f)coe8cjJ-x3P4w6xHhYF_AS97(t1@&u0 zSMMQ)x_Fh5AMU_!Bz@Dq5{G+b4N}Pye^4rJ!cYITdHn6)DwE2L9CCI$T##^-5Xy=58Qy z-qOpgAjG&fuLr7fy<%&(U*1VO?ut}qqeXt9nx0Ls*F&YeOfQqD(Nw)0)$I{MhLPy4 zUS{4_E}44#we;fyKNyI|XO_Ik#cfj5nHGIpMgOvju?0hW)LR|<8a+-PKJu-XCa4?X zHyK&mCkR;jT&FJ;*hh@CRI-oXVHq);A7+2$Voa8rVZy1Ed&p;hnc;rp-^Nb8(&7E{PpKi|next9j@iGv0=Y4gL zX4B>VBryw+p~}*>PG=W9#VTVo1zV+R6#Te{lOJN{Xj$bgifgv{dh<Fd!zJY19fR<3UOB<6 z{#_ z|NVkg_NHXU7hgJHSm!ieG@Ptv=b)E7o@})jZCzYyQ5gOHWQrhh4SVv}CJ&ocWw<+* zM>mh32=C93lM@RwbY<1It;W7U?*XV82e5?FRi!l<;AsF~r72MOX~JtOXGyEY;R7SN zs;F&hGV)Llt~mY}do|k=d*q?|>rVbAJ#J->1(KNaL__7tYC+Y@tS5bSO+=D6d(=kD z)@q)(cSXKSLZopEacQgd;=F$%@paTp=}jNvgME31#}i>(>8r@rV}M}YbUUmCSjPa_ z9-QwSs>+X#{cq^-%$Xf}P}kaLMp~mz*qvo)Y)tR2xQo%^N@YgB z7m|viO%UyQ0@+_3&YmPCnxp+8@n06Y#L%`)(!@<1?c8qjUS&61ahP%46jA!V*_=#X zgK%EPGD(K@X={ClA=-0Eg`~pm<3aq1B4mv;Sx$=eQ)*(lH5Iq;rWK*(( zYzwH9i&7Js&gL-d{%V?5qSS;ia%3bkezR9vuRTvgKcMU3QV9esf#+O)?QGh5&HY|c z^b}&w09RT)0FLY##}nC$lnEsKmbvBU@urH*9X#TspwH;4TXm0zV?PWj@g;v`lBs|c z;%n~M9Yv)-8?O*^KO`osRX$l2cBf0YzH441oR-oQn%^m*@28<>IKt}9F~hs&#Ym<_ z@%e`7BMeX6rbGB-p+73sG9YVj(d_`GY_1&Xw`Jngj%xhLR#5qhY>3z_(Hv@Z)YX7_ z1`yJ^FxzAQ^Z5S${_)d!WNlFWG(Hu;+7Iw zza2!a6ww@n9m-O;7@nGHcun@<;ow=N=M^*mR3{>vc zV{#<$^(Whl(-paB0)xp+gOlz|n2z-lI!7GF{WyOW$ZH>bbjCSKD(?hmMa+< z6${6o3WO2Y$L1T7IyYcf>!y>s*#l3tj?Z3TJmbpm=iT&~u7gY)I^f-sYvDqaZ#dn7 z7i(MlSZmIGG9XkYRWbP=wy*c~mP7iT^L`9+tUr7}Kcwj=@MuLpe|_oxXmZTs+T_A^ zd4yR-=0Qe{H1yMUO=8Q78`e9=)gnv|T)Adhua{20XP207pzONXMVp%flX`hXtSblV zD3%O)JA%bI-80Tq#pt|COT+a5)EQl{9zqH0kNPWv;f8C>{Y!MlUtT=SbWqz?5k7U!g*@Bj!+-s!BY zw(x?$#wGOPUOvA@ziR%`!u=azeC5kIYCTN&d|F-#==veGGR8f4eLl9o1Y)^WFE+Pj}kRm6^*9dH@hh{NPH%7h`sND=EXJS$JkvClP zgocL_nGPNm6IY56(TegQDq*DYvr6kXG`AxOLdi)9sR2Y|89tZNuK6o)MhSywucutX zIw>wme^$oG?>q#CM6Aj^DzvcnsOrJHGDh-(Hu#sROr?hBYyWgU{cHS`MvGJ|q1d`s zykLkr7d-Fjb9X&l3e!L_zWyP!ASIKP}X*KRf|_QeYcBUoVchbxp+MfAS(%C29ZhRG!B zB)_96uhKThyr?J*;}y7OyvgI@n?1w|0K$Ja9|^SoU4N5T$QYzI8^C$`3AN zdjC%(bji0+cUlmwU9$@zAxC(J!bX|asNoIMFPx~z`>TZF;qn|qvjQJwOX-~lg{}H< ze`|+OnoHWISDBt4?Sq}l*QI9YRqV0##_$YAytH77h_=^?ruvF*i9P=?I)rA%1%D*^ z6msVV!xQGlyCotyH z125LR@rDi2u)xaePBc0-fB$3tg_WxPmtf8R1|yczhkiPMK-s`KX}IM6n;kkMaz|3< z%_76FR1uB9nyued9BOyh<3UCxlia0AU6t5mpo)VR4-{#6(e3N)kOhOp^?2x*7u4O) zR;4j5F6RNZylj%Ud~Rw27EVU?MlcSS5PHIIKxvA{e@*Cea%U26>int>Tts#C6=DOQ zABNFXzYprcU)<1W>MRctV0G~b{%;G(>J)(V&|^7jbf>*SL{d@es?^u7nHZH1_s8H_ zH3rIe9P~_IjCxAybB7c;!x_@<>kc72!H!cvk-P)l+CP8DtMIXAz(Q6GnueD2DcV!X z0)Xq^FGIGe$y>DL+ZhrD{3tD(fGUJcb3xg6@%OcVh3hD5y^rZky^`PYuw})P?7p$@ zojRNRJY}b#S5o+|I!hZI;hpVv^(LZ}sla=aSN;qO{nv6!=>zqV77?{dV=r zNPpRj7?nZt7q?{C^lyFs#cLWGT|`9fLiE*+H#XfTUolJ#(pC_gLi79124eRrU~6WO zx=7eS#^mH=`4`^mRzrZO>!5^RIw=XPKl1LATPybBbN|gSJKFl5pTp7bUkZ2}j&J}sHJ)w=2xO4k>17p3$)fbAS^fQPBSS_|B?lv2ZY@aS8SfkEz$+lOtaLfC6;H^4v?o;&F& zm7A;aSa=P;Ro^D@J>jAxx~v=iaA(TnN6Z6t-xlMuK4kI1EUguOsTjR4_I$A=|2tvp z0YO+4P&V#Kk9-}dE^(#`eV>S5L#?ErB;~8s*49d3jTRu6R+(V&V_jw*Rf+1(Lz5*} zNpYm*<+_iAyx8Ic59nE=ijiP``vFHvcC)OdsZ~YXLp@it0;pyC67pR@7#GdU=qrR) zT|M+)Ta+7EFRoR)TrEMpJjcqsDTEy58CQO994ozad)Z@lM;Y16Qvsbov%I`@6FeGJ z$Oq}YWury<04k*xhncMDz#_kOpDxj2 z=iKym!cX}5wyKK~uDtl^CK{!tW4p0}Qjh0=nob~Ux_Ia*%aFV9CL5+$2|v8OwuD4a zW9rX`(H?cv4ku0H>2s#ybUi=`!(w1%-y`0Yl*3x#!#;T-Wd_WDzr?`6pn32sC*=q0 z)71VlKHpEfd1qb)`F$KslH7J4JNw%^U*6V6&!{F`XE^!V)?r@TwfvKFN@?_)mp$bUn%4EQ19gn0g zy}Qnp12a=o)5y|`tfv=`-OP5n+B(zYjct74ugj zn@P9lN1Q`*b5m1yv4Le}WrCgO)%9}?%EkvQTmDn>v(`q!>i|s0;qkP;;qEEl%?nvq zq9$j*b8nxvsy~j-SC)J=%|Y;^*x@91?&b? zfWrLm2A8%vowV@AHOW5HxnXtIR3hEVw68fo;eHS7%Bf6?e|Pp|Cv$=jb}@xEVk9n$Km$CliBqzwa*_3T-lI;T9 z$fZog;Tm00Y5gH96!MzKRw+(Z5H*u}qtd32*&ey2CCL%NY8M3zhh0&xlwUX6aY32S zrKC&*iyM3lO5|vKLbn?FxCSDIC$O~3XVf0|8M>g9(>_1fc*#vZO|8Fl=4$K=1=XC`zjU`_L3< zebl`v*s#FqBm2y)9S7orMmfs7$8OaL<0+auA(8BffjV3X{XD+;2aEUAPyax#{ZX(dk@Kb zqTtaLdNZm++wH~>Boh+i^NM5`GaNVbx<3D`MI4hRLWvHB7T<8APh}li;Lb2$a!r_8 zPE(mmfA(xx0-gMh!JYb2q9z4}Y_cU}$;X2t@BMsL=L zHPH!LDubO927|<|jpvYH9><6-UW?Yw4yARf^#77>|8Ti=yd*^vrx+?hyYEwt*E#3>FdKd z`Tch(E#Hq4Y$y+dx(L-ZlMN*NI!jgbWSaf`2cXoXYq1yYJz7}KOJ9UcIw*bi(M&(R z2T~S%zDcp=_lzr+d6L{DqF(;8eIE+8Y^H;Fh%g@gcnjI6#UPab^HXy$@6bo|Wx@~F zKOBab3*c*;X-wVDM%D?FYgmhx@~9DcOMO6bs{`mRBPCwx9bS!T^Q~2qyavtO2SbRa zFo&Yq<@;pAHubzLX=xvt667#ISolJw^=m}2rt?PHPhn=dB`#wb9f*O83a>ru)5Z0y z3-d6I=9T%anmm0%(isMp=RiY)u$|j)OlYldyZY2j69<=;fKysR2j46e=whzI*eUR| zV{}hm-;O=1>Z)n9;PT(xWXWSj9(cuiTU4_e0fMqGR0h*q_6dW*C`58u04vs>kRF6^ zRq} zjvqZNFy46=#P12urgQxx?w=Va6sIF#RB% zKk1-@+f@mw4nxwj}U$F&)399oIY4rABvc};meP#>RQr9 zkkxfLit`^7zZq|gGJjRvLg%dhV#ftFobKBK{2E_-{Gd@91H89J@RUqduF&1sejfYEbcxXT;?Y(-8{6r~ih+I8T$M)`maDiAm?T(T?UT$kdqBG8-amec zeZ};Sox;*5@p91oB=lv642wS6_!PUBEGrnT|qvDP2Sy&u$*S1hAUyp0#G?|9L5_zD4@>TKZG zglQ$R$C_aR;X}sAo|+mwq(Hm{6GB?_*jMxO^INDsi(^ZXa-BwZs`uyoULDopTkRK1 zsBCU?`ka))3yrMvt@PH_t@!BOlK~$uCE8%#0ZM|JmpMiQiqQscg0Cm{*3}FIq5h^6 z_8*$*o1-lV@E%`}G2n>#Dr6y5(6~|9top4dkpO55e!J^m03s&zji*-ajlrQVrZ2FC zIZW`oFN&zzLNQY}%S{|u9tqn5mwrE`5bfGM7aG5H*ccibdQ-fyhtG%)uLDxsPJdBu z>+(m{n+t_ZVarjEpR;R=`?AgfNI>E9B^M> zH_eMiRn$=ag1x$*csDBSZlj5ff?*ZRX(yS72fyY3OPmRqN*9>8^bRf0_de~sLRItU z+w*QMapad?o@C^4^u7sZvNStqh1}DE<7r-qtVsX}+MX&>=t=&^C>iTaDNXQvNj_7b zvR&iybR9T}TOE=%cucnH&1KMMNH#q;_}kdtLu~4hX2*^|!%=trPSB$*F?)>({sk6* zX9;R!BWj&?{Q~{4xtRQkoo-S0Ym1QgG6Qn%C|a~VOR4l-DDS`Pfw$vznlN9-8AccX z48%gAKU%@ADE3SJfj2f=p=Upapk&+^6%l`JpWMt?ACK5$GZ8Je5~vYRVmE%MbU&DrnEQ^WT6~U@{>K?3%7fx z@y0PJx-g|u@k1@dlY&ru?D?(h>FYgnc~&V@M3b|SstYvCoo!Jv;&ciCq=FfuRrZ#( z(%DF8v{Byn(>z!%*&`)h(0_#uy$V!FqTP!Q{Qs%WJWH1OCr8=qE>UgYE}47d`d}+g z$mdbMMET?yI(drd3o8(z8wV^jYQkyI2EM{3fccpI98$Wy$ zJ7{GsG(Tj)VCME0s~MuxJwhs3NNM&TeUWqPwfq#RW;vFo{XCzDKLl|TWMG@y^5;!` zi=*hyj1u-ZHYIw5F};`tWEAf>jkNKdV!ef3ATp=wJ5*6Hn#}mzoxC4 z1Az7^GpLgz?S)UeIo;gyjV{UCwI^AK4pWnp89&^-dqrA6WQ(lL>SoSsU@;?Ca9+?U z?NmQ)a=VFdhaTvHUX>SN+6h$f%@*(+Z27a?vr`Q_C7r2ubkRW*B_VC$`IU7&FN9& zyi#(JLXy^ef}Tmp6e-0o6^!|FV91(Y)mDH5>FPptJhJAuGi8FdVEM-V947O6$B!cH zFlF)z=-mcii0_pbE$*?#(@OD8H$`Q8&CLrzyRv*7-m!p^JjU)WsWXq(iZwe|f|rO@ z1#a1L|6N{RL@+RK)ciZw!{YsaFUk4e-1)?E(eoY*FPNjxeAaFx#IzHe=P}OlKbij- z-9a$rzGaMTN3wetd|oR@_$gPk5cRDSZ`@OaC+W9UQR-my&@NcVygVPbi%IJHfF*@` z;}PVcG9dn&r$$(RVoz>0)q^;0t+I`ol?c2H0v)8i5#?Y)21Y)PCNJFpN`Fh1cTqkd z11&wXvpaMtE@_Xm!Y9{jC3RZL4TJsY6}f^p zxEqvS3T_S13&-dSGF?Ph8Fi=?K#KC4^QceLXhOWm_ttk2q02orCMxq1Ivue$uepAXe6UN%sqcW zazl7>l{CX&COWP!WqDH9{dpBnaWBw!iX7F{pO>_}tMv$!{8jhhZyf4#--m0hH@zBE z5CggoAezr_ixO_Ud#vPT&GUpRq+j=klI`2Q#wYHtCO63Su9aQw#fxAX6j>>@2^Cx? zn&Tolw8DH&nbQ*3547Nxw@l3}E#>4dN0#(Bf04jy=fa(;J;A^|;qO)$WlHx#B(#Qq ze14^Y?zx%4RSWF5AYlbP)Nwir4HOVLtu3vsXC^1RNg5o#<5_|3OmEWICU;!`W(>-C zdUY?hb94JGAkgzwehMYjQkE;%o*#*Pg}VAvl8822KFGy{->KaerlvCh5l8u1h;oD%@e^)~pBI2BQs7B-KQ|E`dh-6>rf>df;*8<=?O<(talUBCK9NsPgS!BIDS zZ`&p*(Y-96w z^PA*Cy(e$ZsF^b#!n?doXh&@F&wr_Av7$VDJdZAXN2@Ck18lY#3YJH6Cfoj<4u@h2 z7Ztfcpm7W~h7@S8O(_0rzhh0{S@nwCPek262hxgG;Lmuw-+TUtTt#zAchPQpaSXh& zYcapJI7T2|LFsRKJuevYGU4Bo4J31GyjW(m0O#+=C2Dj#x>EGXa%K;YOjUrU?Ah~{ zHxVxL@YB|0zVV2($vMAPK5nQ^z1I&Mdr0w9ijC@c?W#*&VeiF9LwuW2w58I|Z0=Td z_SFs!K6Z}M69h+`D7=y0IS|&Hr4ur2Z*XsSj|_#+Td~8eiA|&;*Hq+*e>k(stm}0x zA-b0|1^OVKF(#IT%hvEL@~mutA~OgYlO|isk`RhTHd2iH`1^k&Jq1>K$n^^b%tncO2e3N=Pm0gtqh774*k>MO6O|%`Kc;6ZPBQeI2 z#FV@+4zH3o@gp-G6>_PN zPL~SER4`TxbOhOaKQ~9qEp z7Swa(2`nVgK~RLzAz!Sc$rASbHI5r0J7%#g_S%;$yG;M{UW6im*RtCXazE9j95o-wAM?ZQEC zH3RQ;%|=kyJKV5%^AnrA`~yQPS$Z=E!0v;B=RA4$bCs+$ zMk}Bzc6SCollHJ7yWL3y7oAJaLWM<%w(p0##`J*y{bU9-iv|{*of{Q6KJ+wANrC%Z zt6yuSUWu4NTS!IiwGKXaKHcb=U5`@mmVa7(ehynO6JIm=*QTdgnT@n<@3(dNyazcP z1(Djs=>p~%E5_G3DRT2lc6e`~SGcJ+4b^gKtQ+Z*TRqs_{Y}otkMUdnoeiB8x`86m zB&XnI`Ls2-7w8jp!dhSG%_n<|^;db{@sLz|?q$|UDK6oHuC^~d3ZoWSdV>nS!I?h& zTFq0y^o!_H>D~s1#BaI+aGRg45(`ph!U6p6u@>HFejmKJdCe%RJ1cf~I6X~o0BpYF zs1=SIGlxeX2vYV0d3DN1zdlmR{-+bC#{4>PNh03qIk-4X>ie_4w6ZNbpkGKFhwroJ zXjWglj=Ui>{ueETAu%1)8#&EVpR|`660fM_xGbMOp103XPNM>)HU+Q*(O4@OicLHt zX5EvGV+o=K83?IcWefi4a%fOdb_b{Mhx`aEBLDB70g3F; zXF0L)eyVBew*MoB9ambD5`UV=_hPvV`4^h_whYtShWWQLv{|-|Gn9K9y>V$G+{`;? zVz6%A2bX^pgFjzaU;NK+tTn2jPjh|R7`iP~!c zZ%_)+BiP`LF^j)XR&0dAm!v}uw#WeaGpm*cbk*X3GqM6ogk$c~^G#fqqH{%;?b}I5 z%CCBfE8Lv5_DVtYFNAiiycnnxFni+aY0mdElHtEBjE$$Pf@?+1Oie{w$M$T#uTc6^ zXD}T%ubVzq;w*NubR!-UdsM77#4^f=JqoRy&WgkBetVoGkureg3q9_m6Mx^;b6g^h z5Ntl0HeY@J^itUA*@El4zhIK>g@cQj0o!`y!LsB!KT0Gw&XN%Dt|Fn`hH%er0W>q< zmA*ft;Zw&L&IlYl^*BDfWB)m7+^o)Q!TIzRF7K-d{A08?wmnM}$nXD3qL>3)(vk>$ zm6TVHy4@mt+#K=t+qxrJWYHZ4tMTym=G-{$eieD|(!H$y&q)Y9Tr8xBu?cY19PSb6 zJz$<^(H~LWxNoE{0Z7_8_&Wq%c-*@hg0y5CzhUY^A*fdk3%wRq_!@c?YUpexK1-`y z$?iY?$aD-s*?npktgSt)H{0{4Utng>FU|=2yJ?=%gRU6~`DFfaY;(Epo+bRn+d5A` z1NE4zv7W=7L`XXJPot${zxm;3FL5~Ie?k7Y_abvS4pS9*L6~}(r%JH}z<=^qKg_S2 zv9h$h6|AHTxU~n~F+apy-_qp;e{QSQ0)tz((@E=>((ccS5CGG*jb52$>R;UxbKU8} zTUW`2-J2e%&9QGHNW~tSz64Dh@ym$OJ1U}|ra;Mb`S4fNP zFpq3w^TaC-#C6lBhvm_%5qmoh{j$q+06Da_%kU(&M@B1iW*}g3dZ0nlnUEP>7q(K{ z$<7Z_?sde*P(N!z41(>0y3>L;b>|_wqZo2O?Ux#C(Ao2xxPks}V~XV!;-uA{+w=#6 z#FhY)Zb3mME%+3Zjh2Ll-Z^a+>6gw)m6YJ3FCUx^p zmZ0RKiVDG1wmeTpuzHLKcb^x!)B*UYUOzV-fbVpI6|^d$mzD_Rr&Tk2hmf7hv{c^wpFLM4Pw^G#{8Sp%3sScq$bH+gCLP-LRn3#mf-J_5 z{ra~Pgrw%O3(`pUUHerQpQW4jK64c-ef^u&ZrG~S;ID3x^Lv`LiMEd*p7hS^j!7u~ zs1chinNf#`kTISKGkLLvmAdF9E^}m(LjQv{*wrj|e>sPL2GBl#l1fK`9D+e~O1Y(Dvxc52g@Ttbu#rt#2Md%jZBk1x$m zB};T9x46WQZ;Km9qNwt2_(r$IzUCL(AH~775L#91#Qmpk3b3O`K`&BTFOM#ZoN^yr z>|ezcj?wRaydAr(62!r15c1lpOBY0*5@hN855bgLldR*uI;}Sji#Sj%`7MZ!P>;TV z*atkQW>nwVt2Vy`<~UogIP)grl?O~raq0iik!j)aGN!a8+y2Ud8TFE8lEL%=YWZWzK#+O1sPFl}UVgF3_@g`h5WkZON}S^4z8!RzerkQaXRFu1ZMwof3zv1 zL>d1LWVs5_fbX@f$He*n@ zk%6=1$i3?^Lzl*-e7Rlo4_+!0b{~eH1|pyA{@-l9e}<6qg3xx><+@u{Y4-VkyTj@w zQ~af(nmUCs&u3pO*4D%eFI7b6pd3nfg1+cEQVpSZVZv zkU(^)=mainpZprpk)moZT!25{JL+^eD9rz?y?(LwVTm6C>c8!9@_*5mi?aXNLf-A{$!Y!_+KKoe z-J?ROPOu7&D|=xseAH6JjKn_C&j8RdTlnFOIXz&D&Fgg#ue&Uu#O`xw2;CgIX9;~c zZu>HzMINv?=O~PulUnK!c)60JyUCz&;r{oxjB&zjZ+*(xnQjIZn$JU!@+pIVQRYA{ zihljc2w2Ql=_pRo113jAJUl!+3=3Yw*|N&@y!`d~(xR+b+7V9Uo0*A;T09Q6=n7-6 zd;;5hAB{gk(kN?|*VcNq$hR*UcP$OA5YAP9IQu)z<*|IUpBmTf_f8{t$i&!qKNW6g z=nNR&{PBT?n+u?W4qCP%HPT(rmyIbcoZj)4NHf$RrJp}OjalKa<>^Vm)ulca`?$lH zmwx3{A{Pg}-vdl{0)`vF(rUfE=3YeSFl`y5 zMs?Yrb&qHO;oEiUvvfZ5{>x~+M&q(M zbb_sTK0Np?Et5mUnje^@y_lQGEaS&dM{hiqjMg#w&1yBdAyWYM@eQ_i)8@K*aKT*P z#cEg7e;@~b+!;Q3|Lck(l|&++#jaP#}_#Q0y;PU9Sh1Xq4!?J)uD;k zUOTIi9~+a9ySBM2pUq_ETdQYK#%r5sP%;{%{JzCwE`(p}+SfKI0PngnPKTwo$K-_) zgo#^QvKptwAuW5$RjZ+==kznGz)Dc0r{&sl%0Cu#{2+gxNGu!)`)=Zl>uU`aHs$e9 z%ESk0kchiPX<+32nJ$CO8~}`_8Zuh@gGfcA50(tr84-d>n`PB7PMG9SQ#K`MEMy2f z|J$5p>~Y%F+Yg?-x?6G8fJ!}q0}69YmDg{8f7`-1vILzpJM;^%OU|K8fF|7Zq>%}e zVLcKB&u#1IKrc#3ppAD&(}D-wXF287LJ@nS4L$Q7g+sfWb%(s0Tk)c;-)&EC zo4TARa74YEeh1Y0B57lrcC1Z*e$HO|gH8b80I`@kKgqBlMEiTo2&2TeG0ni?tK!>E z3Ta(RtAJ~K`c~{=!*~hyNq7~1c|~cZ-{gj(R;99IWS3WG@uRZ!zSEjZFjFmbN^SD! zV#~qDnL0muwz8(l%<+w;*k)YwMW{C?DTML?Xj3)xAGSwA4Kk!2`SnOQFC61Oob^cV@m-vq_>+GCRjlgH>B13tVcEP`{GOj4eg6~>)vfoaTTcb)_9B`|IQoZ^XqB% z#cs{Me=J`jU)1?ZnbXkxgNvu&xZSI2K3nNW;CD2e-9kaHWkzq84bRlJqC-v#PTqPh zg2H|M^Inj*X8WsNaErz>X4_dc(|{xru~S~PP35e}H`-cf$>EbfYa`mJPaZ3sFe=wQ z5BQB(A^#^x&Y52lh~&t0U%alhY({$_LcKgcSn4$Yr`Fp#opC?Y3pux*j0qNahTV3; zod}EKI|9+7x2r=9GS(|b5n|0Xw|5ev4IDRhrXQQF)_RBwJpR_pZR`fWo=Bhrs?(9WD|oL>wl!I@`M1V zkMqmvk;>v0FT0SsOdEPS#I+S|;^b$`ZJFqf@1%11pQhExu=+nbup&qZUaf|OY~$*i z<kU5R8mFB4c?PJvV{Hz-q}=QU#H9N5&tL}Qa$13m%ykme3*6>7MXkj4 zRsS%C9aYl05U*u^Lxw@n*t2oXc8TDCBJ7394zBSa521i>yEGjTV9W?b`?gkk^*bP{ z$4iwkt&Y*=bn^U89fQ66=+Iwyz>`RG;!eOchmvHWrr&UvDS#@ zmI=1kUTs;$_fn3^Xh&6IuTqgYG-2Ve_SFfs*eVgBDExXqIE=wSzEWiLdbrX31qbv0 z7N1GqzOpy*ySaUw;XhyT*o{fRII#x;ONuv%{GMB`5UK3CAY)nUgb}@RfB^_bEHC|a ztz5hxJMH1VPCk=M!rXwLV~desz~DW0v{B8meYtnE!`XX-`jelo%*?mJ@JNmu7pF~A z4ozT1dcslUj|fT^NbN4rT;m{E1f|!Pyb=kVi(eR6xf-cW+olHiE>`00msvJ#1E-5v zhN)BIQ$$Iuz!x{-x&QBJ0lWZ`W|N=Ztf&;LM?4j@RKl4lQuQhw72<5=Vvg@2b$4OZ zM@2nt7qJOB(rMUrE!9+$0(bn$*t6Q%c{jQM(*Kmu^1^(Hu^XhNAB-RWN{ei*e3`}d zR2Zk3Yvmk;3u#nO91CsvV!&@`G4%+0)kox)u#%n}HjEt2OwZK zw*$WuFQi@rFXdKJ+nqmeG&D1ERORs(D+DT8xfyWysx@iQ>9(-2@N$(ckjjwOG0Ohf z-OWvI!qy$|-u>YS_ENm^g>XvXCQWV#Qck{SNO>QMW(}`XADj=$?Vo#tACxW$#0($? zmWrWY{D`f(s2J=i`DAMqLD+FZ7Z9=^8>teZ&g=@aU^}Y^?(JIE(ocyr{ z@?XRsr!Ab$O<=KqBg5}EZ|1^n3BqvFg6mP7Ti%NzaMCNfCU>p5V zT3TAH)cicZ+c1}MHuaZ%nzde<*ohe^cN>nKT zNz>)0Us+k%mRv!}8pER6I14%AJlQ>OP?-lmNu7cqp5Fm`)NUTywSOoB1=?ZV72GMBhjK z)bACyNEhPAO9-bqa$?E2=r3{G$1Vf5e`aw!oV$)aDLnfn>ft!+8~oyM+zTB}FZ^iJ z1>Yo64`T}^jD?;$X=n^^0zqw|L1uH(xF<;1HFM7T09mcv(@#JyBB}N0@b*}t&q*xd zv=rka4j;1Jb$L0-e}qPF`X8k$t_nh?9qK-!^nWp1bT;G44U2N)h1ecEGoSztTx%0* zh4jF;-J9S16uq+g0L^#;o!!#T#ZY&7Nv?FZ9=j6A%~0#0pF-e zxjy{xQ%iv@Y^G!gE?blAXvKt{pXdA7^W;*TMM;)wIzv>H4CJgKz;6djLzbt3X-#2& zhcuLG5gQzj2Jo1#%Xd;}GOk<&yXEa z&s-z|003C;G$nje(&|7o!YCjW74aVjpYz>`LFrK?8@#=Go=JI$R+*sbmsI1v@9cHf z_b;@6|Jy+uK)(xlMDdYld4!HL{PP;nt_jIx~gU^q75B5bdLUcbSyeqr8C zfski0k1Gch&%Z}_0&KVeA-=Iqm#2AKFg;N;PIEI!$GhQ05h%6sI*TjqzsrSUTX(+w z>RSakymQZ_TdPD>o*g!^tBl1~+On6vxb*^5AE(=s-!$6w**5Ip2g4!+k+hWBsm-i# zAeU4A!GKeV((8hr@>;fzhMH?t@Lr_V zi)mA)!#6hZQ8JnzvMP}atA|j^o*zpAV{xMEiuZNK^se%}aemO`=1<*-GhaZ@n?0dpZoM8k0a6i^2NanGpEh=@}#o>Bh=mZh@_?g_HyH zyUe??9~QmeLfq|aoh6@W8Yk7BGJ*2v79EnB${FvZ%YM=|>yq18&bI$7Q6a2Jqy-kh zNn4iXR#{6L?Ru*|LbHW%{)6no|E(Q%IViX&$BpoF%&&iLab12n$e~cZ3p#|%vDU>4 z)+L?UerOB*s~(&FGMZ%BHE%eBSpo)Lx$W)S9swC#76k@>o|O+p5-qclBR_>U=gXfK3wxJZ(Hu2h)h=-#NPk?xeDSrGqtICAuVtjMpNbBjM zPXImduxd}FrwpZ8E8!@!5taqrb&g8@qHtP2W>n_CFtsx>n%fw0JIKtxsajtyP65g( z=LPLD8)O7X>y5iNXOU+p^7*6_@=Hjsh7Q6%U_(0hek+mFRlEjBsR~V}XO};a9P?%O z0CJ{E51!g2h|VmW*8AxkGw?p)OS!c!Tvc6N?FNG}JG5A41?AO^qc_;9UwZ2|C~R=7 z3*u_Wn99(k3HzSDHb`b??Nf2l+?6=wuKRMM49jVIU3EtX`SX`+3L?ID8mIIWq5*DH zfAZmw4j2}W3*#yEewMfX$m0@P5hWhv?=SG^;sWIMT>9c;#Fv?wZ>OJ1_hnI|hQ5f3 zYh%Q23ip_-vM;EJYM8UO8_zF{I>NanspMc4c3&9JmY#updfY6N}YT#{cz|2WUPTS zS!?SOz+L+yF^!&-8W2=7+HJlp9ySlrUklcw9N(_;TY{%l$IT0XPWS>+8pS#5_yOZR ze|2zXS=7~oR#gTT|DU0%heiZ3Xck+Q+9A2_WB%qYLVsz#ln7uh^Y)Uu;Aj){Z_@b; z10uG~q7~YuJV#$S(~XQn-YR#cstUqhYwZtt9M-J(qfD3G{ZGGBfbG|enuDejo{yn# zY|||(&Kl%@ctN=2UfcNrh>ZJ`bCdqm?Yqn_f6cyp(Sq8K{0P?CSv!n(Vh2J`PevE7 zko9A^sR!7J`bOrBdF4roIQl#xS*+R>5TeW$%CxEE-OxIo70R9epe?jaVOgzDh)WJQ z>$@`W$1>*l&pvp!A6LDPxo) zyl2a6)AaF4X=Xx^#%`y@0QN#xAh;In%|_Nf}WSh4w$>FCMW{d+d#m_OkQbx zS`3_1$ze8Q$JLYTj=5PhdPlb$XtvGE{tDJ_$oho%SXRyN zw}(AEv;xY+IrK+o$FaKsQ%CSg^YMv^27e$Da!i07i_{#+HamJwjj8RCgKFZmO+B^V z?M#_#!pQG1Vo4dOYEDw(;qMQCzd~ljx^tR%a+yu4rk>=b3b;YinMpk>EJw&-V+tqhrz05t?$#o;Y{(yk`FRIVmWeJmiTf7L8HI{(hEfbT5L&(2?mHDsELP zRbh@}!3&5jo?QvmUm&+NNQ9rJRq~Gj%f$^*<&^ejtV-9Es#o6Zf~%%1{-b8 zyMC-%$;&lq^z5x_Iw34Byl0VV-PQ+sq`TPjAv4BIjndtDpT+K~P?lFoA)1L3Kc!9O z{YWoHmb1r8@2s&r`}UoeoR^=k^g5z6il;edKN8jHL70i=Y1T}L^_sJ_PYZ?Fh3aVP zJ}Vksa{|bVPh)Yate34rs{XPnFEY{F^0%?QVxjwo&5CXZSK;RWLX*ZzTibc4k7xz1 z5@lJ#kFJrRxQ>Uns)>Md9 z46~U+JCj&5?p9-r@SXT~GGQYN5-^RInANtw^Lx zbF-{wf_0vb?|jPDYLhSy#aoC!@nkH5m@$MmTRBO7DZG(q3udsH%IegKp2adkz28#=b328L8b5P8iT&p7D4J^}5)$ZyWO|TXX zi}C13kAkjxO#u)S^r=#)NrbRgjfN+io!Qp-gnev$hLj;IH257sXaG?0S+{jZU>%Fd zqcOZSBl_#*;sBP-IkO!RMs)E&YVeSkO|`hytDq)C)}JVuYfYi&K$xGLEW>~f>zmiA z_7}^mn8nq`Szh5}N^pGuruV%6KJU;ZjhCZhj_%83)&p<6MAhRbHqPO&LHbME-fs6o4%*3_<}&NN zUfoXC%7EON3GH1By3zL5qVW9b{-#d8zKvXuAC;_;Lq8+lL-cb?;1!6fAdm{p|HE-0 zkxt;Ye&VIcVMY5>z1TV0tnR55xb=x=6e=rveMhS+yvLjYslLqOcE5jRktMHrUSrFn zwztUus1;)v2QDi!wy*hlb^USuPs?dqGr)+=bymJqtGn6(G<%e z#i8(ZS!r6g>lU4%uV*6aq6K+cHX86{2(~@olc*6CNxq+rjLV?Cs9V zxK8d?k1ylS@?Cw~&6R$h-cD-;%)HX{b8Q=&he{;8y~>e_qSx>3g_LQoVs%%FX-!6S zS7~%@w^l)Z;^^1~&{n{aubjj^EHz2gavhHc!Ev;6tJWd&HrD!b+{Vp)of&9*qy7R{ zbXcSwXv~>Tw-|#{QV`Yp;9~cv({S78BoK&pk7J${yigsl1eExc4B}ni_X^tPHVD6g zRMu3Ehlx}3J$R1Vzb}B+5=ELtqw2EP9s8f1L6y?!!zWX%yJ_aOf`7xXc4-gUS(bWY zT8h|Gxk#biu#x~a>u8P!$X9odH{8PjTytNz6AV;qbMJuyb{%YS8eDYCx<}q+p{>Ux z(0Eq@-s|Y_?&#%848Iwm$%bx_Q#Em6)gyt9#7zuIvuI z@Zz!;x>&*Z{{H@-lD%__6auEU;Y11}Cj8Qw(3L;Q>lJ3@hPx+{Nk$Hhnwby$M6hy`j_SaMmK)YEIO7WH_S{%Re0mrT|LwMa2Ef2Z|nO_q6-= z`BIGqr{Y0XkZc82wlrnqvcSNnJ!i)#vW7-EQK%&5QS|^Crr|jy^LMIrJea=Rw@e-X zfxg8LQa2HJumli}3HkYoyQ~BbmS*e32?I>)r8m%&$DOOQEo{%)_3ClIdOe9iwhb#? zL2m2vVKxQbaanxE>Cn^5b43hE%91d7zRlR((w?6YNV9o^IoP-K`wh?D6Q2UrbXx#p zWifIj-(#Ono&eX_W`Eom=ATXfu||n6g1zt8-Ba#pPjRPvY?L)(LwwqGnl;@IWk8hu z=yz(pe8S7^DJsc2a4vvXIEZF)$uL!{!A*z_s*Tz!P7OjQAW0 zo=fXx9gT>fUV6HHtT^H zw#uPA(OK}=1-{|RU7y2=zbad!nYxHA0k0L`Nj}SvlroF6jvGYPz@rBBB@ai;`q54% zHu(j~a#rB+?tYsGZBI-LFjvw&rtXc+z%n0XWjelQ56Wu^tsjC01RNq}!NDSx-$^Hn z!03msu%x>h)ye_G_EBdSNefNpF!?6aH|?@M+$Od zU;ng%x?ybfLT~Xobbf`5(m&r`m=_;uO`6U zqMw&}d#7x^fy~Pri;>YENEJ8s5rQdRjTa2^a|(LF(W?E-vG+*EE?8H0gcS(gZ547k zPS!dAlEp827qa3K#G1y)sdd<)r8Qwc*av)U5|Dem9H)5ZjRWFA&PH-Q8v!3n)%qNou}u^m@M3ni@;Ll zOlQw{Qb>1##pHSBug!Jh=!p-x#ErlH_=XI6yh*h2rW`ztH8YiFe$Z9uJ{2c-M5e;MnZ=_MBC&sO^YDIM3 zjaG|MmhYPATZ|(8SGbxMsM1^zvmXec-^wgA4_MmzYgt0v?;Pad^m3a*)3I#DE`O~4 zTDm)|J1ZYeUOTubJ>TkwPw6&uyG|m6rn6x(uJ$@san{XV>o|RB509dCdGM_F!+VJ7 z^_^Fwl%IX1XIE8Dv@D@AK-dAWbhx~-of8>pJU@ab^K{Xxkc{#6)625D`P6-k^k#%(fE4V$rahy^CoLkIpeNaxy`&78RioE zWYj2C;zhzBnlSjD{;lJ`@hVV-VPHz*7|>B;U8)+cob>>}raEnd0{@E3s)*Vi718$2 zgLPdR{bWG{S&a_o4XQ=!@^SM>10s!a{n+b>J(YV4J#EHqYOLtcKZYI(>NFVs>HQ34 z*6Q08n7UROizm_lbfJEw$zyiDhyX^4&X-P1FFo01b<|=$Tn}&d8{|6^NJe{Z@CkA_ z{!T?%*q3yW=$~={30>z=aoqIC7e%N7viFKq`okBA^I+`^ZS)cAx$|RCVOed0&eibd zj}AWVsF45EekKB)fMua&pfZ@uh)}tJuY90aOqY69b>x}X9D2WY3S6tCw4ll3qC5L8 zc;BCNUiF}pYdjsV;h6gLoF*)p@6X>HwCHld|Hsu?1~eJA?H>~X6$JqS0fSD-Nry_O zfOL0GItMBWDy7uu4(Z+)F+>q7MxWX_o!>}o_R7`O zowu!Y2Nz0ybu&s{{4AAcFjLPBOsD++KkDrb06frA+CXI3b8PO4TLGjE{*znZnAd7v z;@SIk_OYs6!_TJnBTwW#L^!fwrn|<#{!^k9u%mZW;TB4R>30z#%NBM*a`I^RsYw0m zcBCuruz;goZDny6ooVU^3@VjOP~qtd!*6O}n_(3LR%stQpWD)DN^nnKS}K^s%>dqh z-3}D=GBo%Ae(@^1hFX%>Y$?S_6zdEGYajLc4QLh@b@8)mjKw`5Bj^AB&jE8HF7MW*(g5hVhHX zkPb&3i+s^8I;+pL`xnr)fee9@PpKs#!|Dq@5b7k?g2DHP6@e#oR-QUDJYJ*(#ygCz z{jp=*&&ugu`}$;|VxI4O7!G9Gf(NStYrum6CYmYG9wn&J%PCi({vj?G)Qj9>fq4sF zI;TtIC0gZMr$YabzApR%0_VjKCjqD}5a8iy1f2F01_z){V7~5bi}!fjNqQ`8VYtz9 zp^K8kl20*+oCLd4QuXv~*W?OA?ZBE^bD3bJqt7;ccGt|CZP{U9VDSW~(Ow5Coties zz45G87geyw5a<|k#8B${LD!(1(*R*tCW-pI1R^Dm7ac)CM^Rsf))sW) zZ1P@cZ9ZrK{Cd-Cm>GaQkas2*#=OPH?ncsf%pivoVmKm>9CozK&%5J=Ao!D9xjTsO zpc;4{lpJ37!U?_nT=q=zXrNk`Y)T~-e)I}#fdHJ`TYYO!X%&w0Ejo2_aiu`ZJFL$K z&_}UC4@ttXL}$yRv}OhK_}Zb93UC8Vap&Hb{n15R5to5cSq{|VsK$YB905QeyH9@2 z3LmVoMEtBSdLh#XA3u4qDD|y3p|hrKDs5y(eM32(d;jJs&2jgsjHb+q15^np>s^b$a@&y z?`}-}hugim!!p1i%DeKKfAbARJ%fK(^Vh6PnwkKfTmrsEfqqPwhaCsst%$zS)NL*p z;cnHZ1-uiBK2sYm^R*Fd+05&$u}$E_J7=Bui`ghCHXcL2{o(pDbme?WIDEW}f`ap- zY|z=e*KY_l9vy1^S>ZIcBm}EfZJntA1-YPFpZD3~221qsHNb1I#Q*)a+C77Md(@{N ztk3d7*UF)rV8*cI#WZie%@V#+rP;^C#o0`+6s$zTPKZ-`5k}B(|l^86-#CC+N`iTyzle{sKLkQ5blj z^knvZsO~S1jCC&8BxmZ@& z$i!@Lhr9dq{C2_yjVZ))wN&Jn z7hGzJXs%~wdC{JN>s|8QEK7Nf!z;`=KwfDRJLdun`4y>RVi^qf$LAQ$Xpa>Eg*!GF zd8BSLs7#am$4OnR{Kc!Ti^ij-#UF>OfxI;TTJp;GvE3Awo@@Jup32}nab?1nF>ZNb zgjL&ytdWVaxuoqv6n zjD9C-<}!ZxKjthIo$|r8#m8gS)c&Pj$q&9KP%SlF9JRKohFeQ(+)&}q4$9#NN_Z1u z3^^Zj6+A@MXJ5WVDWFQSiF(c|K;q_Tc z&9Bn~jx;@M85>bO+yB$(@BikQTRoIHmXZD2fl)}42ULDaZ`|!q-s55cT=Q(jFhug+ zdcIYklh{c%d4(!9{r)d*%$vuX!toj7`7~LT`%3BK@drguh4|mIrNY^QmAS`S=I{Zl z@ILpb&(z|q{*!+@Bx^s`dgMeQ_q1IQzhPO z|59-CWh(`dTtLg@@(|&glJt{)oX-FGk3Xuu$f~{!02=TCMb#O*m)wee@>S|@)2a-& z?cI!jNjtIc{}_*nm`OD0y; z_i_H|>8yQ{{>YgS-KFcC0N9!T8U+y2WGOUHof2JzGW+hl{=Vre6YT?{kH)MEkm>i| z3P+ujPCj@jui`XhW)1S*SNcK?Z!JLFO|HO-~!+)e?}6A&Kd=uV3PO)~xIF15-MTo8Oh}8ver6jV|0zkQG4!#n9Oq z`>J0pp&_3>Gh&;cn0>&lk?R@ZR7$viF#_K0%+kwMH8l}@*WSMWhcpP!JDsUrt8SM8 zAOwU(0D}9%GV-@~EaL?({=88T`fO9y#Bi@Ul#~-Co}v9m+Km(v5EkaEYXZPl`AKq5 zjqe>QTi6fSwhhiVdw@=;0iLIK!@lBA!|=G)6K^0Id9G_GYZ3@Q0~@_*-i$$G@92B8 z1K_Qht*~_mC#R)RdqxfyqAIo!7^Hm3gvARqwzlL^7K1Zss zn&{R;ZvYd9dC{}6;CqC%dFGw4*|mkMdUKy&unNqEdHFen>6wk=bxGOHNS}^W;;qXR zkcZGglw^Yz@ba|&$j^H>Y)&4i{ZY=ftY7@%ce9F(&9N`51xtF@LVNSlkF_}Tc{_o) ziCNqBR9xf(G#Mdge$eipH81=fkpD9n5*u%jrk2_Pq=)I1S9)`*0KG}-buIFg$)XOn zR@VwL_;o4pY`_HBPzzQ@3&;F82dSY13Cov)u_F34RUM?p0Lq8!Ma^cl^-#QAY^&eQ zey*4a0TZ(Q!*BWEF!LV;DpEDoERXBogazW}wUlez4Sg+V;mri!6Zb*Uie~RorlnO( z@wW?7{uWQXzKmqccF7pNc@YLu@@|-(8w_W)#!j|AqxWh#rE=O(x*M1)xb$(8Dj6`c z#Is7YHoU-l;UPn?Ute;fA6um-Y>q154Rs} zS#^X;Ces)UU~Z%$+t@xS)H(%>aJ?-%5N zNo>V!bem;O+AZ-X1zCbOt>P*2!96!9Oq!aW1%&`th8T;m!N&!W9(dN-=CP z>2(Qj7(uEbKNARF`dz9<;yy|faBJ^D{Zad(nBxh%TrsFW7oZq`tUyp^3BRtfc81m` zN&eDsZEZ8m{*eewxmgQw6$ug znU31?6u<#M;9*fhpjE(dx!gxtp?}gqLS))yBR|snuD(que{u;RDDMpC-15^C&bJ;y!bP zvkx$K15~1w-|l_@yz3HXUc|6iK9xJr$+bs}dG!D1sJNHp;mHI2Rd8SpQF|jyUCdZw zV#Pbmj7+QJ*w!0BL}rcXY_UhAXtu(O=dkTCSNCj)G7$j0N((0dt& z6FPD}|Cplj7?@#A+v20}?_2%Sp;}-2s^UrNUa8!GGp#dejQbd9x4HM)cYwZ5$-gv2 zy4A<3yH0piC5$C(KcTzuTmX;`q2FyVjbaHThLvv5sSkKgUutwlQnn z{}%wZmT}3>HD;bm8N(Ye5#;m1w2W@rV6}H!s!N|8uR$E6s_}!33@2w*hE1C4?UrA)ciCiMOCiHBa3bBNP9Etyl`jC$e{3~c`3Dea(Ke(Rpu54*Btzrat)`U|F z)@e=JI^>*)IgclU6gy2Rmr5o)m$aMmI)tw`HGUrnXeI~lVaqbAV`G^-$_A8b;rp_` zyY$O07cAO(eC7UQHU{CD47KRnrD15@sYsSR1an&ciuWALi)y|)vnSSlRvk6NxcF4uLUGxL zcdEPp_nfg34v3e#hERHMsjCQ zc;`!``S)oeTz-#;q--ABq;Lk6u0y^%E7tK|a|?)4cc*u0N1ZY1Q7hp9XFK3p>o;MK z{;e9Tj6^MUE?PU(mz8M)Sk?d_=icoTf0kk-vW@9AUht~Xt;<^lT3<9&t99={ys&9( z?ww&}|6-8zYv~#Ry8ld5bF)+7z{?s-GoB!w_9xzKFrf2y;zr+^+V40$e|BdsVR`qM z;h;gxnb_i3^2n3*86&_x=ksB@TqjwX2(;{`VYPLw_{zl8Pl%bRHJmU~jFhwWI}v%J zE`LS;MJsENxdrpgxdVpk*(?w#-%g9FQ|M`ATn8@tNCe5N19Ba8_y}YAz&G3#lKh)r z^uStH(7b41gAA(--UoCGc`<-r-Wb;TOd-1&fj~^p56Bx`wxAT6ru8PK!ke1pg!11l z`69HnGdvs&iGLqdR))}Qcb|X9>2ms#o-4aUWnVq!UDY^03V&W)7ZjV`8?vK#bJ%72=;B( z00cLmBV}0hlT3Y~46hX5l9}+!ZRpCXp*l7?Ik_0f?mF8VupW4LYJF&(OZPhI zTk*!jCEpyLG;q#hKJ9F!mZ!M38-r_0GsY(D)rW&Dzi;QBo#B~VeRs*LW7FM+cG3B_k`O?# zUzug_A3sH^>JI8X`2Dfe#@_X19jeUFuf$pRWMpqarjc@LRv;j5&vCGfZTorlvxHtp zgM1)`i2!EO@ESnTGe$CMDyz=20M)?}u~;A<;tnv*7J2-t%dE1-7F zvz;ozhJO&0lL?8Hp5Z$&UmHB7*5|WdwCQ<9?Po|GQE%Sc6Gw`9a2$N$d4Q~rxwMfP z6?D%f@a63daI4SK!o|P*J_Lmg^Oer3h1PMyb{mjG-)-c@N_@;eNoBp>DD`bgp+-dLR{@5F3Wwp5z+3yEmth(>$ZNPVXyM~_63(&9p2+wzr5}wYgywoxMDG;`= zq!M+(cAbp1%QtIIZIuSSWOb&|I6RhZHc^WyWhy@8AEuhpG<{GkFK{urq^Mrx!cuh? z?nQvk(9>43CNBe!+XKwgzivw>2xHFGlvWW+vm1wua=-Z&F!5V7LJIef+J}KiB(^q9 zgK88XBM0os3oAX2gMIkA6l(boC|Qb((eVs^VWpAaUH7ZM;lI?$>t~P3bwtwTHG=eP z3Js0NYB$m3RPVm%D+|)q%7kjRa*GNw2INKgH&L@^jVw=G&j6HCcmvjla`JSpC)o8d(?uSN5s*u7=^&Mog<7rH=m? zZoI4!AZts_C}I}gT>1jrM6Q3XE&+Y)-U|l2V`|7lSQiqN&&0rC*? z{RhLAFd{H=kWEEPt;@#QvT(tl-#xb z#@m(q$;_QoO><>#@pV?ti4zlmm8+V~ICK%hadl1qr9ft5sME5&ylj!O9{(gw!Se@| zQ2zP-7XkTDzvO0J@&-V~V1H3Bep(?;_;|>~>D#=DZy+!P$}pb(;bIEhIrhrK za-Dg3FCuM$(PQagi;xSvCc?ydC&td(`kA`Tce$Tzu?#&dxYxqzu+@I%t#T*GiUf2S zwF~c@)O&}EcHtrgZ<=f?t``j8fc8}LQ7+0d7BEfz|3CMYfoCJOM%s8#*s^${@*?>s zQvSInvG`EQxWK=LKVY)thRpgWVkqaeXEpCP$8wG9PzeTh0$wIz3sb;F^qAfx-5{OH znAeCGE67kP^3xnZD!**_`N?EMq@n37;-Gq<1Ldg&Sowa zE3FBdz|F+e+g-QSQa586FTs5+KqD+h!4nNrDBej;xc*W7(>KReSM4gw$D=s=l8}uL zVS+60Unu`WLvmamFswvr$JWNeZkLqL@g(MJb$EadzSn%Uf-vm(^b;L~Ufuh(oV3Rcf zJc|JDH;Yr*Q&YYgHb28B0%k+HmsocSB0!LOU&H0q)?>qrrp263{LO?m3$5Shof*yR zs?+)6{X@c5&qIx!Glb}^0P1CrR1OW{(LsCuSU{Plive~NqMy`qAnaGco zu*SYsw+E|lS7Y~jomY!qnqeKLe2VpbzyG6>0fgU=uII2P>gts?G!(CY;SL8g|rK1Ho`fByT6(EkbSD@xzw=B@7`C)&f0 zt9jq$=(XQSaefmxpl*W>O6S(=)X7W#M}Ik$NzpiFe{)u^K)XU!@!B8fG)8*&!uoi$7eq4Lrx-J=H9zKq#l zk-GK%Dqf$Ll#Rer0SO+>HdQrLZt74AK`ClrwSJ9ES=;%fUUA_u7zk_*ZmXNy1h6F1 zJ|E2iw$C5#4^qAM?Wo$xPE}KktB>hW=W!Xzi4pGJ*HkHxF>V=~XBs|&b6|{mbmmo! z3KC=y@}~Q&^pCAg9%r-(0Z<3=Kx(PFk(okXWLC$$4U0+p;{OrbhJdRbV6y>EoGp=q z9l&eg3J~iT-+2KLlfjK84#08ceZ0Se*30cFRlcR&fECg+E1Jq+ouX8YXOWyWao#7= zzgMw~eBN0GnF>L`cq8FUkJdx(MvKtM+mptv4tDasHUPP&3!Yi*42>=8*jG2$SGcAV zTl;Z`0MEaM!%hSoXO>M$qbODh!jAyhlkMYyyFnLay#Wyc88SK8XdIQAH5~%ExP6H4 zTP#U2MTR5V)OdF?b)~W(T_>*RAIwf~*}SPPlAjD93u*#PtdU(I+2FswY-+4KT4(5c z64~Wj7v~3m5JLb(lX0-4Fk0w+YO>Ld#UQ@K0MS`&l2mtXyJKiM<^c|ASRJuuFinUQ z4K}Oni-eI|L;<5=f?8!|B@@1#byW%vAG$B^qoa}1m` z;3?mMoi>C3Q$iT|mSs+cuIc2qz`8}l)8hE^+&ix~ff0%Gx~a+^$750nPv)&#J+{MV zuF=9qG2ct)`M-S}S0*qU!so+VvZ}hlBIXMdr z3}S8tqthi-?C;#I0?uq`*575}*3Ose^y^|P&cLC~f{DKkHE+o(zB^Gk+40os|Ta}NuCk1iY6{TRx zFA%x#b(iZ)L+e+heE^0uxDZb`fIs4)Mvr<_TcYZm(3{(;y1tD8O1?Ynz*w`px0{<8 zgR%2Eymwk2&yt@9DYyuqfQ!Ifg-g; zZ!3ha)1Q5PR<$ZPtS%qdauy?k_;u2@TSZ0CbC^98C~d+ zd2T7VIJD1q&Yx1GXd&lO5C7%&7G=$-R~{KYhU*qxvT05)ui=z!<5PHG(6Wm4im(q( zhIP1&S=m^7`edWRV?obP*+FyXEU`qo8N2^3SCxD$mLfZjl(!O+WF@811>4a-`S@vN z=Um8vQ!*V8=O~~GsGLas?cfziMA7s7*c_GHb@$f^jluxup`=EWo z$zlzoO#)x%xP5eM?ya^M)B>ZAUcUd#YYaWw(w_BD0h>^O1zrDxiX#ode@ zzuh^=HF{o*n)+{9mVX%tqvWs(XviHHo+uaU_J`bEgYd`fb5y)|qbu|-6o9@!l=n}5 zW8nU}rMwoyti}eze$Kat7yh0e-X9B)Jg@DLebZPaNc-+Q+d|7^TO!uJk?*1KT?{Dh zA@AE~q6S^hcAX}l42Acnf7HtF$|x9OdUQ)%yEj9FQ`eq5BVEZp?p7V^ZOg};Z|(0G zF+O(!XO_S#)+$;FVT(1~uswM=d~K`M`22kPaI#zmoYR&aw&;gF%-&USxqoJV^lq3* zVxma5lk+4&;r`8>><{9qrNXXQtbD1%eIwql=%YJAyiF>)Gwz2}}}Q~wFNOr#UMfPTlmcIY&Ne>@?)MTkn1~Tk5EW$yP2Ofw)o|_he8~4}B)Ppr{L=cH zz7!a?ENTw-J5J<^_!p^uSd*7sU@I3sUR3fvdnoD+Gnj_m&#*VX%klt~uBoruSmu~b z7dG7xcMdP`l*4btNs2;pGkr8-2<`m%NJ+unp17d@O&=Qq!1)`Oy8C)c3u=3xBMz`H zo?}@3G#Ew)4^PXlsW z^C85g`qM<3JlD%5qzv#Ps=LT?&P0;Er_Hg#;w?jv?3|2hp&xEI zgt;?|D4rM=cg~nIDggDX=Hi>h{bxB|Gxsi}AAb4$dzk4_tsJT1iS*i0V;lj68Mjf( z&YXbebnq+qY!PsUSb3){&ch5-CWw$Ec&oy}(8$M-f!N9OP+@mGdBLrD2J(09iKG3) z#S~I=kV(Kji)Cg}slk2Jv%1EUx4PGLFToC)<2YRIvG27i(mM0^=fEYIzj=!0`ndIi z9jmBTgrb#}MS{`{MlN9+ zr`q4dl`xNv9D$g@ACJme-=s8voWeaSLxP+vvwf^%2*IkoequgQ_fyLZrd2=!PFl)~3c&y}C}x$;m++p(_O-fByCD?b_}3U0o2jUEaXv;dB)% z6m&;qc&9ALtesQaLq_;u(h{~R4Z3;#xUIB}%b z%>cUe5K4-G;c`m!f?v{3P z!1}b;(8(;ZH4SsV*pYK`w0l7ut>j(CmYDnp2H873-c5{^zK^yG5;U!|eOlHqRcQh} zsXcFCL)-+r@}rr^BW+u)mD!C8cf_@9ka-E)l2|+r@d69XdIH%+@#MHFzY{w2VN{i$ z&5F_I{p!6-i<3@s??ttGWdR0Vs>k=tlYOUiA5SPs%WA{Jw8p+YLk{deW+wgF26LjKqS!{S{1L-DI5>!Q z3y#bLI2!);oKMu%*7p2yI8O`{TtDvx@-8`g1$|Cd%MO>2o%r~L+Rn2RLQ3GXvW6=9 zrFK0y-h6amWj30(uV2b*-ruy_wi%bdr75%4z>M|Z>I3WB=mOB z+nNSz%*3Qxf5Xld&`&3#uR=9Gf=?0ZD}Um1Be6|}#IswZd|)uJJJM7y43}W|Lz=r4 z*1NzQB$cwje8<=jTmM60il$+~&vfWQEThl~w6wufF-1m_-48Pi!pemViRS3tR!H&f z41i2DAaZijlds%BEdX3mkmu~*hSKj`T4~`zBoOVDIurHk`b4W&FT&S5@?vMMr^Vi? z4E7u1G8P_7U})3XdYypVdK!4Ei_Wk+pSM>asOt5drOHlNDdh9=qu?xWVS^!TkD*1{%R7cu^{QZ}xO`?2qe$?V_e0uAxwQ}WT->wZ-`_ajaM`8)y zyri-?whoHMz2^hc1${Q^hw7?`u@=sH{!6shyDdA=zDZ(tHA(bb-r>167tgFYLZ8?S zaNruBA%4}?tS2!2ssS!Ki-I1lW610naJ#Em$9?E#uxEZn1?x_|EcZ{hW^*UzD3!Ug z<>9QL^-`TI_D!QS^hy`P>)aiR8hoG4Hmb4YTaq(AIn7HRe9AH~@T(h~@cc}wifOoge zO2bCuZe#M~)zVS*>Ar-SlHokX^DRsyXj)CfYvn>o2i70Cy@Ej!dL%RE&d_x}_+FB1 zs*nY&YU9$L4eSvq$HwD-p1@1Aa<6cJjWyc`NIO675FR)=Xh^@*fLzGiuSO4RaW&5+ z6eY;4QPRr&{kD)ZpsJ=u%go9e*5o!P1nD*t-LiLeU8(u<<;!5}=^&{!B~<}3(ac67 zgob)SooImuN*}i0X?xx1GpVYfGt^!}Rsrh8a?M5oUNC#*xV^IILBB~F`R}S;WN4q? zg;*w$n%a(o`E(7&77xXRH2H|ajq;BDl?3!YQF|C4aa+&fXgS5a);<^-8 zF;E$qD~UKI-#{H=BG1WT{BEdRePn0r=B4Q+QVzq+p<|dEMC0?%B^yrQ^8-Wtd$~Ya z{!f>#(_9QM`UPCU`gqZ6UL-X%RKKhNbGGHamgi}3ATOT`7{KYGbfVg_azSviY*e5B zCJ#Yj`wQ@eZqKIQl3YLjV->U@Nl2bA?oXO6HB>&xU?1>WIuRaBW99VMa-R^b*V&>O z40s;zwJmpUesVWy{+snfMDO)i<9x)izj?hPiNcB7(}qDmt?v10u*JBnAJe|cS9;J;1t6k`ZY64h(@g6qC*V))JRS8w0s{#}YwyF_u2@H3)C8n5Ua{vbc7gR7M` zILAYQReZaqeS@_GY(SR_RBx=9d(P|A&w9L^`1m zzDrjJq(4-PFbXpLwERM(VPhvu#-0DKh3aus(_3b{qa0D0y+oo-h5IfFjZH+Xq5=fY zLLGT)tci%=T`uU*h~sTCM3i|baJ@k|vcjs*ZND%Wb)_~Qn+D8ocbu<1!BwUDSj*p< zF}@9*He`y^auqkx(R6R5nS_osQN8XHh{9nctApgz(tn&mocW{Fn>AogVec0MHG0=o zR9HhT5Ei4dZ~Ia>nW2}p`5H``xsK()xremw6SF<{^!vS zLI59hl@dSL&2x9Q|GG(EZ&f2UA?dH<jrguS-Eb7m+~L4$TdyQ9w?ri{e_u`n4?Z&H8RZ_*7vmN3 zH$91Syxs{GXw^$*j=8L(`64|CWppur)pM6)>T%~D8ObVJlYL&zIxyg&D1)E8>2wG- zJ0N;vc7mty%TO3OS={@7*+A(JIGZ!x2#0%N#82mrO1``~ivNnk#39nv3oJhz5A=T+ zRFwi&?Ir38?Iy>J<>~jkiR_?y;S+QBMNeNfW0Z^UNqM~*J}q|S!K`N2)IrJ1`_0m^1PLY)R#c$Apc$%$uX=5e_&2fW_@vuffWCRxNM|GL$S@(}U@j4dsi@yQ0LaRablUk4(eGx>=q zmQX+>uR()+dyH4B8N`d zImGhoeTC^lxv9jYU!p-SmaKKvii1Z}W4W8o=joca!XmU2edYY@o|>&TZfj#}Iscw) z&Pvldo`yU%lUAGOE7y@u+`#;=>=y^p&creAc~JW%py7B=9 zTN)7K>?dMCpnZ_>X4jCwPb332LJ2?R=cJv-3>i?AK?fL1NycPh?h>p<-tqy`{4dF6ANpr*r;t z5g;zNkuWjjjK1}Wq)wkj79CMZkNNIh*Tlp`5St=Lb%ncE+1u6{_)Lwf95>dzIeNzj zDs=}Mdg%I1p$SMw=YxDdY*9JrR(!fb|)8s~i~48UimGY?eK4KP2o1%3aOd z1qoWWw?I$!t!Dly*=2%MF4j*6<>A2CFJ+3CO3!mc@G1BCbs!Qn*%Tx)y};MCrcRrz z60xZ;txCJD9rE<1c`M3lU_iR5bh~e}S*;B?7zyj{GB!xjP;EFw$vkao;pCV6qfYu0 zUyjEt8x3f1XNST{epKeX)*~Hq^22qV2}Jd9tlFUz1nxA`cinL0y+ms2c44+eDm0+3 za;876Z=>bB-z!osSR0O)bc!@V-5aako?XI#EcW*IJ?@kMl zhXP||hjV70cD#mo>;p10GeL1tH1g~;NJ8j|s3>tfTdu62<+`^A%aNGK`egn1#;hP% za43`^Oi4rdj9mI1C@|;F83i8wj!9ey#>WebVL{V1ruDdU35JNHZjJ~IU73WdBapvu z5IORauifecH|HfePR3$dr}`qXPQ*vzH>qdgj1mk@=uJ@Hy)W4Kpv1Vi@D4+>ni#Qb zN5Atcfg2v^<`@RJ*}bgXkIFvv84`@LuxI5$=pplHd+&YDJDzDoE=(@jY*E8pPJygX z@LKGGBzUp?e6q>b+k*hy(XeyB#y4g_+&Wv+wWB8oZ!DR3KzE>aeDQ!Lz=#)`%|t-p z4GWJKg&ueTDtQjr^nv_?0Zm);9<@0i*uQK$gnSe}_jMnlJI;uxY|>--bk2p}71i>W zg*DlBz4aB|X3ZIBVBgMLUwJydVaBWzRZS`i*&w_P$1wE?&pr(ZSCgAG`s`Jx*xyoB z;3;@mH2##)8QpFnpQl55-L~5iQ90OoYE~zo)sXnp=Mo1C%ZZQfS<_%cXGXov)46un zCue!2*H0TFe+J9R=N-})6&1zz;SS#e&p$eIpn=0=AalAq0)4EzwVcIl;?MtsoOB&( zIIvG25BZHzJPjSXJr~qoRW8`st@%(Kt<5A7Iv&e@x(w=^JIbjCuI!Pl!(N3AwH~cL zQ%RVc*aiKUYdce&=cLYa^|A)QUy|tG7IBo|5jPmTqtvvW%OpWk% zrTtQjUIGWDJUIA?d67u`qxF}k81){sML_%gLyiW^1})$7U-o@DJb4x_S1T?6A|uQ9 z_LQZr>%7(g^WtbU`9ynWvxeR;qo?}uZc5LU8Pw$K0}luO-)uvwOAm#pWFiVJ+d&?} zD~|K23Vo@}=MOTsdk2iNI;)B*9S%t@PRFG*hG{(`d|q36J(CNtyd>iC&C&fVi!&Fs zn%>Po(UG_|2loG*=6+elQu511wXu;vaVR*TGNEz)_IA9?a^F1@sAq)nspeFIxGD$* zGO3D-twbJ?65C7tN>3qPh63F2g;{15AHsIk`s2b^$CSd!%TvU^B`$YoL9FeEm7vuxBL~oB}ah|+^6kV6; zl*T7BLtQW|N!DWUcD^P*_(4Zp)DRbJqs`#aAA`6oftpWxJ*JkTO`=;Y3Xh~v*3&E>A82?TwDiZl0C|tj zzw(-n9duSvc~px1E9vv=hiQAeI|{uX*e-e)8KwHVzHfi?!1fx&?CnFxa3TDWEw=z< zpp%AT7U}z=@VdmgW^bQ(&G22?^e|1|GaG8y!D?Lm1+)qBDKeQjl%GZDF~UJT>hJa^A%HdVzvEGj-B6yP`X&-m#?cttv3gpLWz?h#M3TPBfXm3@y5qp zXQwNrPoyiJGzs^&7w_7G%YjtU3$m&=uFQs%%)AOwcNd=^8}=SXSr_Ou2tk}@cO$ZW z0(uGJ$aJ5euVF)8A~&g|VNz*6X5SL{sI)ZcifOMdG>M%-HX>eRS%|X_JSl!U=OM=E zR291^KqmD?9ztgh;PqZfwG-kB%)_B7$b=eV3>afw=yY4d781eP z(4S*5c za!;-r_EAPWy2mp+&&CY8ve9D9?BaC&^1`WQ@0a>84Rw&CWd?EUrc(@16vxxrlTPf> zWNT^4V>EN^;4HZE7}yIu_?~plP=-VSSy>qYuS5~bp$ku+KDBXgHpi|Q!GLNNboq~v z6{F29!;iHxA>O?cG7OUekQp2*_PogsVG8$N=)Udk(XAjE_~Sv01dMMR$o4*eEG->k zThzd? z3Ayb29x}c?1lic1+dgfJUfPq*7*@UcdXYfdtpJJ$1x3A)W!mp(p zPEL`BJkhnRFp8d(%*P_e>H`Jp4FFl|PU;ichyg2*bguYaI z0P%zC=cPeEvD$)`?6$Q)&subFYs^jvNJ@@irQ=m@Z)^Jt=X#)26B~2!$V3FPsSB1vT{2$@+M5>VEV zeO{F<2sW2GbP0Oahz6?PdmR)9EgrQUuqJsHuwW|rgPsdyq1JFNZ8t~6&9v!Wnc1xV z%QVQ~?5t@Ya_Q*k2-HoK6lm!n*SLSj0b1t{pN-L{+7vj&=Mqis*9@ZOc>6tvht z*I%Z~;s(;MLcPYcn((j^`HO9}oK<{0@beI@a7p|@9CN@9yYq)K=j=P4o3-}n9)>X; zL|w-oLmq>_;niC?xjxDpx#!G={T?zo=&(p_uDUiC*+c_1LhN7gFt3K#BB|xr>2p`D z%jrztu%cupJxxj%c_Hc2x2k_yz8>f`BwVrp2HGvkW+iuh$6t2ig}veElhGkpI5r~8 zA%I7$+Uw4Laja6W7qkA6`( zHJ^@NiMoM$WR;fsm?2Qd0kZLeDK@@9;iBI+#;FIScmun7v=cANgy21gxv(2 zYc2UD06|PQnU3w)Y zEZ_M2^yXYJa0lN0k~Q)F+WYRern0SJ9IkR_9AyLnGc*+trA3NBs5U@Envl>tNC!gz zgFu4ffIti*C?FkCN&-Q;w15JM)JX531c(xfAyNa$dvv~UeDvOb;CM< z8-8c~WJ4aAN8vw?c5D!ud_%$}`-g4Ei{)|xPe*wJd{J(lCEXT1_Ia1$8L7h##EXje zz&@Ngl2p8u8_iVlt&65o=7Z;0zOL(I_}LwpDE`z0*B zl>ljv$c6C^ZUtzBG@1j0wy%e15>?TXsAH`5o%j5&_r==;02(sg@o4O!4YII7S-ey4 z^82w6L=19NdhO5hrmbE?f0J?tDr)&m^jz7$4gGK60^0|juiuWPo}B0xyOU*MlK%Lh zWLG&p$g>#iZZ9Nt0+FGw&VV%JP%07KSdZOoFFhz|TIr$i`}F$Ba~1J+h{#D;?Q9BBhNxX} zJfPvj%N`-7vcX`Tsm5kJa@-x=ctd5DwwCa*tt9YL%$-*yPnY|iXp6p(=saGdbiS7u zyif;}Xv@mEQ71E)Z!-D*m-NTa_5lTUb+N^gsDLn9WPv#HRKP~ad4@qK zqvSiclkV%rFWjrO@`wO(+5ca2K9Dqc~VT?jue2D16hR|>d(s;)^v~b*0n1bwod>!#*L$CAvfA&Dyj?r)s+c*LXlPiT?aEk0f#P+9hw0My(akOV`*K#{WkRR}|>V^$f_|Q;XTW%wKaV zmDo6}x-J^+V8RnUWv*~`D$qzveuO5h>IB#LO;JgZ4lBU~jzA;8i!bwi$fC;{D6^5e zYXZJ&&4LE21WrqvSn1tF-L012S33M?xxtUIC1`6Tlq4TRD-HF;@r;C1X#C@6DswwX0`r>(T8e%0zuhU^# zY0Tj&<|<5+){_v+vuLdg5%e9Ybz3HzoZE`MA>OEgJr+m?&ux6(nIz?;R&JEhxfW8y z-R=3crW5Zvx`(s9XAW(|P~1htC#I{jB5B~ejur>lsB;BL{cHRjz9wqM2?*1#MzP?) zjnw-3_i(0u%{4WWDc_LasFHknXx9wDVLt%aUE^SF7*|NueGpKK{YLa?!&O&$P$sML zmD^Rmcunq52gAE-jm`#-B+DC;2Mb<;saYU2e-`U#m)oN9b`~bV73WZ)iR}h##BE+W zf@WBO-Ndx3G2dzhC@1ZMBPPGZpd4h)W@^litWl!xHoYetG+9APld&;nae2nc<-x96 z8@`9yk~Z8;=+@V3Ya|=I$-B3G%vwzGuFr4<2UaYXj}_yK4|*Z$%eBSmOvPb++2MAi z*6sJgh+hVt`Bmogmp2BGJL*J<@{7o@EuxEHhd--Fh!{|~TJsf{^FaNVYf?#1M&{9M z2(>ZoDo*uxP1>Irryg}_x1ui2<9k_U4{{{Am%M>wAEy^LdeQ@ECZ>4$ZI`8IKvK?* z_q8HbRn-k@+SPlb{DNS|h?coIck<<&lwpFveSz>8Q+f+11P04Lc1$x;o^`A55kUE7(-bB_HZ~Op~85Z^%P3?(RP*7!vWi zqoaNM_KYClP?nuoHO}f>TMxK86ECI&7~2Zf9?#e8B}JGTkQCm`>kL`e+avMPf~&pyo=VT{tiHrDE5j}j$U*?1WNoEze(c;Pl|Q3%t*KE)+5oT@s={iY^{W}QqR!O* zvAVF?x9PgG{E(s*2~KUQwzvSUY2Emvga+;j87hD9kREy=W%*NeH3CW~M1NRaU42>B zz1{!^Vn%q|6-F)gdhJ*6+jWGGmIX5qDRJhCUk%5_@+k%w1B+kdziyI0z;+CgrTyb+g|mq$g2_roKM zh$9|#Vu#}#5Z*Q68mLfS-QAnsm6*2f3d!0hSn{*V2Q^NF1h))9IkII%WAUmZwoY}* zy67{VoWBhh$Hwek*O`edh0%#79^2Z?4?qzfI6~zOYOi3F6*swQ*#Pf-6B3LbD9_A9 zqiZ(He(AjJBVwA&YLGynN8p;>8~y!qnlD}Yi1uXGt+3vp{Fs8(j+L#)wj0ns2}^N^9#~G^f0HsaJDVj+NCh+OYA4(NHI;%dD=OLFX20O`K}D&E2u< zB`D`F(@l-zD1^Q`EMk}CO%Adh)AcUFk69tDh=o#s8|9^At=rZVk^y(fT8Mt%7s_qc#X|WuA zpmlQw%~5ce0d5y?@{#GHEWF$GOW%0RZ!X}V~X+ubJ)O*e&r z!P3Nl3qv_c#u=(1B1dhZLvz5PSD2?|8psip%3~2ECw|&^YSpv|-J=he($xb#$Lj9B ze+v{96Hczq3>miBJoh;3)^u3Niu6Y4PcmFwx#V>ogd^0ymw*gsYk9VJZe5;=M^uJ~ z)!#cBeoZ!){Mg-Eyk2f0u{(iQgG0KlfE{y~*|6F|C5Yz5_7d1w;#c!bw>d{Gf0EHS zS?v?n%6Zf*N7(~mKt`Y|^F+xT56EF50)h9>-bZT;?rX}Itk`Z5fFqsIjqr9T z-1b)z({_0jy!o?y_`eo#ur7kgm%*H|tN@aoGU^0X*HcTjV8)u1({6b&BWaXW?)~>< zw*h~v%I`WSP6e&47g1?wUysa5+-&4Fi(%gG{DO{4`!(&=G-pixMc`S~DO*M*tt38~ z#iBkpUs2i_ecqomc9v{B-1k@L=|X){@IslALf6(jW(0`gNw3=J7un42*6M5=N3$n*Nhffzi`^5dm3<@FhV(cqIbGTJ-M~zNJc5 zyTm#K+hgYDIumDp%i*xok;ZIxHpU9r#87L2KA{(hUqVcvml&8(O~;25bKVm4S-m>= z;R7!slgCZJj^yWNK{B?RbpvsODBZ}br1u~rSDB6;YZ3{=Qbwi0jY!-;#{09fvcQU| z0Dz)BUYh?K_+z&j5Q<~ggk;Qy7FNLUL%nayVr`BCQlUPRyfO}j8h?>zK!~)n=SRaA zUA>Jb4ji`TrN2GUR0CUOYHFI5Zp5SBsLM&sI;iAKsAf= zbEZL;krUfx38vpvxxdyfy3VuoG{NZ@6|!B(2dGuOIhpr4)&`B|IH>Da(*DI7d?KEB zhx>5C-YL9#&0+$-0jym(XhYFP*^XgN@TmTwo+H;NH-JZ5z^Tq?&1_sPfin7FqJgCn z(;%rk`j=&+nr5}F+LFScA_F7wR#r#Al-paGnPMW=U|H?0VAq){uDFD`wzqGr zwW@KmQtLP*ylPfpxdW-sp8CbPtaxIB`fa?gPg7TXTE#*b2i>%!Q*%&;_F-GE)q`)I zHSw>dazj?%q~+s!!u*}djhssOyLV=d1PAh&d^g7H$*2_CfvU5e1(Kx)miVDv{}onQ z1#O8_h_zOXZp5k-qvO?c25xt+8CMC;)>m*^3%qaC>S0$6U(A^+QMn&d~P^CST!rpuX zRBD7AP@58>1emrlpaNjg2gm>i@>-FbIhXRt7mvYr?Y!Cnk4j1=T>sj_Tr5Z&%&_D9 zMp>pOI(5uASs923R0gw3@I}!OT6ZG3Wi6i?Ro;QL1*j7!5BK{v8~~Unb!{95uq%P( z@5TjK{KIFe-N%|Y5pWBW01f&owbAE-h)^=4zG(IkWc3UP3OGacpMjrn$AIHrmG4oD zyvPUFxY2ObA_X*d)WEZkKS&~NJlnGe_AMsnuPHtOsX=F{_FE+D)a1uf0RI#aCR693 zqRmO@W>sn~hWNpM*%@}`XFo9qzJ!JK_9N@=B34lNYp*sVdK~h4)zf&}o!`@-LMd6VFxz-*_r> zDjMXumHt6Ikj$@q*msDrG>Og5wi`#_VOoer?EDdnV&6rwU{MX5JI!Lzi|4yJxVRoj zR_D?D4TBKAarIbTTgJ9^zZKbHJfuAI%T!Inq=tj(@BOg;p$C5>;fTmk2eSD#NL*X-h^d#J9DdWBPvQscMAhkE7hZVu{=FB?eLZE1hS8N{ zQtD3~UKUQI)zXoA0*ob%*^dxbF6Ix#sg3}g*M80A&qhyI)9zSnRh&+lqXk<-$;m(( zL45!jLHA+OLfqLpX2Ssvrt-|P0j@Ot>OJ-Er#!EgG=9GR)4i9n(o#BhY<5ehwfb8j zgDUA+?@}Vtb_(k`e1z=YXW=R{)}tU@Up#01(Pt~(T4`9QOL_u+Wk|>X8Y1#gcIkzj zMuenmSQRy{^nUw1dcwolB(U?u_j7S^g(g2!ef!G56nh9#=V5eo=ws?-`a8Wo-J@_X zC$v^Gfw~P9FzxX+3`SgL>*%6$grLLtdSBe*(C!PV_I#1GB1gG>^<8f=oV&$y?q9Nn ziX_pO|Fl10P`%g_7oDxc2fxA&NB0KK-}dX^`d7F5(B$M_3g0(4mQEW2jCn9M`$#fG z9!^eq`MLb{mt7jVERuO{@f{8;Z6Io}urx2_Vk7`qFxu}xR&z6Jt9r@tHbo`Tp>6C- z%hsyXLR~v+o0)rUnECA(-iC6~*!BYTn-`o!Mgz$EN@Zd?fszFo%3S`H*Ku5FPsV|p z!M|_868`^Mx)35d6sz#yj{4~X;-U$!UJisX7TW9fqUv@k8dWCt8%~>%FnsS(8G&Gfe`J*Ewi>TnqE`gYuG@>QBS7tu9?na*bz#-3eN8Ec!iU-XldX6 z^0PbCs8G$rCARSMz1qi}So^kr$~NO$uNu!NH0=)Kp;6^J_w!Wv9WS`pFXs&F+?)I; zlr4h(fzfjXOBNqm@(^#7JN)DrrlE!z$Zw-5-to(hnFCp3dvj+u(ydpxquC&+ULnX9 zs%TRXGN%IF4ri6mNE~32-=)m3*2uf1VrUnI%F`S)J7q@5efw^`=)pBbp#={3O&H5# zY?td@VU;aoAu;o4&bLIRzc8k;$m?9@1AbtvSTTjBCu$rnkvJUTr@vRGi&843=QtJ- zG(2%T#UA3iXuK{C*BZfvx~hGI`F&;?SCl@G&h|r#@&@j6|DlhIYXJEE{-d)1;KBdV zc@5Aj{-YB(g8jJHTwG`UzueLJLvtSgv7!<1LQ54;1^tH?xuU!QrRc}be*palqu - - diff --git a/api/core/model_runtime/model_providers/triton_inference_server/llm/__init__.py b/api/core/model_runtime/model_providers/triton_inference_server/llm/__init__.py deleted file mode 100644 index e69de29bb2d1d6..00000000000000 diff --git a/api/core/model_runtime/model_providers/triton_inference_server/llm/llm.py b/api/core/model_runtime/model_providers/triton_inference_server/llm/llm.py deleted file mode 100644 index cf7e3f14be03ad..00000000000000 --- a/api/core/model_runtime/model_providers/triton_inference_server/llm/llm.py +++ /dev/null @@ -1,280 +0,0 @@ -from collections.abc import Generator - -from httpx import Response, post -from yarl import URL - -from core.model_runtime.entities.common_entities import I18nObject -from core.model_runtime.entities.llm_entities import LLMMode, LLMResult, LLMResultChunk, LLMResultChunkDelta, LLMUsage -from core.model_runtime.entities.message_entities import ( - AssistantPromptMessage, - PromptMessage, - PromptMessageTool, - SystemPromptMessage, - UserPromptMessage, -) -from core.model_runtime.entities.model_entities import ( - AIModelEntity, - FetchFrom, - ModelPropertyKey, - ModelType, - ParameterRule, - ParameterType, -) -from core.model_runtime.errors.invoke import ( - InvokeAuthorizationError, - InvokeBadRequestError, - InvokeConnectionError, - InvokeError, - InvokeRateLimitError, - InvokeServerUnavailableError, -) -from core.model_runtime.errors.validate import CredentialsValidateFailedError -from core.model_runtime.model_providers.__base.large_language_model import LargeLanguageModel - - -class TritonInferenceAILargeLanguageModel(LargeLanguageModel): - def _invoke( - self, - model: str, - credentials: dict, - prompt_messages: list[PromptMessage], - model_parameters: dict, - tools: list[PromptMessageTool] | None = None, - stop: list[str] | None = None, - stream: bool = True, - user: str | None = None, - ) -> LLMResult | Generator: - """ - invoke LLM - - see `core.model_runtime.model_providers.__base.large_language_model.LargeLanguageModel._invoke` - """ - return self._generate( - model=model, - credentials=credentials, - prompt_messages=prompt_messages, - model_parameters=model_parameters, - tools=tools, - stop=stop, - stream=stream, - user=user, - ) - - def validate_credentials(self, model: str, credentials: dict) -> None: - """ - validate credentials - """ - if "server_url" not in credentials: - raise CredentialsValidateFailedError("server_url is required in credentials") - - try: - self._invoke( - model=model, - credentials=credentials, - prompt_messages=[UserPromptMessage(content="ping")], - model_parameters={}, - stream=False, - ) - except InvokeError as ex: - raise CredentialsValidateFailedError(f"An error occurred during connection: {str(ex)}") - - def get_num_tokens( - self, - model: str, - credentials: dict, - prompt_messages: list[PromptMessage], - tools: list[PromptMessageTool] | None = None, - ) -> int: - """ - get number of tokens - - cause TritonInference LLM is a customized model, we could net detect which tokenizer to use - so we just take the GPT2 tokenizer as default - """ - return self._get_num_tokens_by_gpt2(self._convert_prompt_message_to_text(prompt_messages)) - - def _convert_prompt_message_to_text(self, message: list[PromptMessage]) -> str: - """ - convert prompt message to text - """ - text = "" - for item in message: - if isinstance(item, UserPromptMessage): - text += f"User: {item.content}" - elif isinstance(item, SystemPromptMessage): - text += f"System: {item.content}" - elif isinstance(item, AssistantPromptMessage): - text += f"Assistant: {item.content}" - else: - raise NotImplementedError(f"PromptMessage type {type(item)} is not supported") - return text - - def get_customizable_model_schema(self, model: str, credentials: dict) -> AIModelEntity | None: - """ - used to define customizable model schema - """ - rules = [ - ParameterRule( - name="temperature", - type=ParameterType.FLOAT, - use_template="temperature", - label=I18nObject(zh_Hans="温度", en_US="Temperature"), - ), - ParameterRule( - name="top_p", - type=ParameterType.FLOAT, - use_template="top_p", - label=I18nObject(zh_Hans="Top P", en_US="Top P"), - ), - ParameterRule( - name="max_tokens", - type=ParameterType.INT, - use_template="max_tokens", - min=1, - max=int(credentials.get("context_length", 2048)), - default=min(512, int(credentials.get("context_length", 2048))), - label=I18nObject(zh_Hans="最大生成长度", en_US="Max Tokens"), - ), - ] - - completion_type = None - - if "completion_type" in credentials: - if credentials["completion_type"] == "chat": - completion_type = LLMMode.CHAT.value - elif credentials["completion_type"] == "completion": - completion_type = LLMMode.COMPLETION.value - else: - raise ValueError(f'completion_type {credentials["completion_type"]} is not supported') - - entity = AIModelEntity( - model=model, - label=I18nObject(en_US=model), - parameter_rules=rules, - fetch_from=FetchFrom.CUSTOMIZABLE_MODEL, - model_type=ModelType.LLM, - model_properties={ - ModelPropertyKey.MODE: completion_type, - ModelPropertyKey.CONTEXT_SIZE: int(credentials.get("context_length", 2048)), - }, - ) - - return entity - - def _generate( - self, - model: str, - credentials: dict, - prompt_messages: list[PromptMessage], - model_parameters: dict, - tools: list[PromptMessageTool] | None = None, - stop: list[str] | None = None, - stream: bool = True, - user: str | None = None, - ) -> LLMResult | Generator: - """ - generate text from LLM - """ - if "server_url" not in credentials: - raise CredentialsValidateFailedError("server_url is required in credentials") - - if "stream" in credentials and not bool(credentials["stream"]) and stream: - raise ValueError(f"stream is not supported by model {model}") - - try: - parameters = {} - if "temperature" in model_parameters: - parameters["temperature"] = model_parameters["temperature"] - if "top_p" in model_parameters: - parameters["top_p"] = model_parameters["top_p"] - if "top_k" in model_parameters: - parameters["top_k"] = model_parameters["top_k"] - if "presence_penalty" in model_parameters: - parameters["presence_penalty"] = model_parameters["presence_penalty"] - if "frequency_penalty" in model_parameters: - parameters["frequency_penalty"] = model_parameters["frequency_penalty"] - - response = post( - str(URL(credentials["server_url"]) / "v2" / "models" / model / "generate"), - json={ - "text_input": self._convert_prompt_message_to_text(prompt_messages), - "max_tokens": model_parameters.get("max_tokens", 512), - "parameters": {"stream": False, **parameters}, - }, - timeout=(10, 120), - ) - response.raise_for_status() - if response.status_code != 200: - raise InvokeBadRequestError(f"Invoke failed with status code {response.status_code}, {response.text}") - - if stream: - return self._handle_chat_stream_response( - model=model, credentials=credentials, prompt_messages=prompt_messages, tools=tools, resp=response - ) - return self._handle_chat_generate_response( - model=model, credentials=credentials, prompt_messages=prompt_messages, tools=tools, resp=response - ) - except Exception as ex: - raise InvokeConnectionError(f"An error occurred during connection: {str(ex)}") - - def _handle_chat_generate_response( - self, - model: str, - credentials: dict, - prompt_messages: list[PromptMessage], - tools: list[PromptMessageTool], - resp: Response, - ) -> LLMResult: - """ - handle normal chat generate response - """ - text = resp.json()["text_output"] - - usage = LLMUsage.empty_usage() - usage.prompt_tokens = self.get_num_tokens(model, credentials, prompt_messages) - usage.completion_tokens = self._get_num_tokens_by_gpt2(text) - - return LLMResult( - model=model, prompt_messages=prompt_messages, message=AssistantPromptMessage(content=text), usage=usage - ) - - def _handle_chat_stream_response( - self, - model: str, - credentials: dict, - prompt_messages: list[PromptMessage], - tools: list[PromptMessageTool], - resp: Response, - ) -> Generator: - """ - handle normal chat generate response - """ - text = resp.json()["text_output"] - - usage = LLMUsage.empty_usage() - usage.prompt_tokens = self.get_num_tokens(model, credentials, prompt_messages) - usage.completion_tokens = self._get_num_tokens_by_gpt2(text) - - yield LLMResultChunk( - model=model, - prompt_messages=prompt_messages, - delta=LLMResultChunkDelta(index=0, message=AssistantPromptMessage(content=text), usage=usage), - ) - - @property - def _invoke_error_mapping(self) -> dict[type[InvokeError], list[type[Exception]]]: - """ - Map model invoke error to unified error - The key is the error type thrown to the caller - The value is the error type thrown by the model, - which needs to be converted into a unified error type for the caller. - - :return: Invoke error mapping - """ - return { - InvokeConnectionError: [], - InvokeServerUnavailableError: [], - InvokeRateLimitError: [], - InvokeAuthorizationError: [], - InvokeBadRequestError: [ValueError], - } diff --git a/api/core/model_runtime/model_providers/triton_inference_server/triton_inference_server.py b/api/core/model_runtime/model_providers/triton_inference_server/triton_inference_server.py deleted file mode 100644 index d85f7c82e7db71..00000000000000 --- a/api/core/model_runtime/model_providers/triton_inference_server/triton_inference_server.py +++ /dev/null @@ -1,10 +0,0 @@ -import logging - -from core.model_runtime.model_providers.__base.model_provider import ModelProvider - -logger = logging.getLogger(__name__) - - -class XinferenceAIProvider(ModelProvider): - def validate_provider_credentials(self, credentials: dict) -> None: - pass diff --git a/api/core/model_runtime/model_providers/triton_inference_server/triton_inference_server.yaml b/api/core/model_runtime/model_providers/triton_inference_server/triton_inference_server.yaml deleted file mode 100644 index 218678b8835519..00000000000000 --- a/api/core/model_runtime/model_providers/triton_inference_server/triton_inference_server.yaml +++ /dev/null @@ -1,84 +0,0 @@ -provider: triton_inference_server -label: - en_US: Triton Inference Server -icon_small: - en_US: icon_s_en.svg -icon_large: - en_US: icon_l_en.png -background: "#EFFDFD" -help: - title: - en_US: How to deploy Triton Inference Server - zh_Hans: 如何部署 Triton Inference Server - url: - en_US: https://github.com/triton-inference-server/server -supported_model_types: - - llm -configurate_methods: - - customizable-model -model_credential_schema: - model: - label: - en_US: Model Name - zh_Hans: 模型名称 - placeholder: - en_US: Enter your model name - zh_Hans: 输入模型名称 - credential_form_schemas: - - variable: server_url - label: - zh_Hans: 服务器URL - en_US: Server url - type: text-input - required: true - placeholder: - zh_Hans: 在此输入 Triton Inference Server 的服务器地址,如 http://192.168.1.100:8000 - en_US: Enter the url of your Triton Inference Server, e.g. http://192.168.1.100:8000 - - variable: context_size - label: - zh_Hans: 上下文大小 - en_US: Context size - type: text-input - required: true - placeholder: - zh_Hans: 在此输入您的上下文大小 - en_US: Enter the context size - default: '2048' - - variable: completion_type - label: - zh_Hans: 补全类型 - en_US: Model type - type: select - required: true - default: chat - placeholder: - zh_Hans: 在此输入您的补全类型 - en_US: Enter the completion type - options: - - label: - zh_Hans: 补全模型 - en_US: Completion model - value: completion - - label: - zh_Hans: 对话模型 - en_US: Chat model - value: chat - - variable: stream - label: - zh_Hans: 流式输出 - en_US: Stream output - type: select - required: true - default: 'true' - placeholder: - zh_Hans: 是否支持流式输出 - en_US: Whether to support stream output - options: - - label: - zh_Hans: 是 - en_US: 'Yes' - value: 'true' - - label: - zh_Hans: 否 - en_US: 'No' - value: 'false' diff --git a/api/core/model_runtime/model_providers/upstage/__init__.py b/api/core/model_runtime/model_providers/upstage/__init__.py deleted file mode 100644 index e69de29bb2d1d6..00000000000000 diff --git a/api/core/model_runtime/model_providers/upstage/_assets/icon_l_en.svg b/api/core/model_runtime/model_providers/upstage/_assets/icon_l_en.svg deleted file mode 100644 index 0761f85ba64958..00000000000000 --- a/api/core/model_runtime/model_providers/upstage/_assets/icon_l_en.svg +++ /dev/null @@ -1,14 +0,0 @@ - - - - - - - - - - - - - - diff --git a/api/core/model_runtime/model_providers/upstage/_assets/icon_s_en.svg b/api/core/model_runtime/model_providers/upstage/_assets/icon_s_en.svg deleted file mode 100644 index 44ef12b7303098..00000000000000 --- a/api/core/model_runtime/model_providers/upstage/_assets/icon_s_en.svg +++ /dev/null @@ -1,3 +0,0 @@ - - - diff --git a/api/core/model_runtime/model_providers/upstage/_common.py b/api/core/model_runtime/model_providers/upstage/_common.py deleted file mode 100644 index 47ebaccd84ab8a..00000000000000 --- a/api/core/model_runtime/model_providers/upstage/_common.py +++ /dev/null @@ -1,54 +0,0 @@ -from collections.abc import Mapping - -import openai -from httpx import Timeout - -from core.model_runtime.errors.invoke import ( - InvokeAuthorizationError, - InvokeBadRequestError, - InvokeConnectionError, - InvokeError, - InvokeRateLimitError, - InvokeServerUnavailableError, -) - - -class _CommonUpstage: - def _to_credential_kwargs(self, credentials: Mapping) -> dict: - """ - Transform credentials to kwargs for model instance - - :param credentials: - :return: - """ - credentials_kwargs = { - "api_key": credentials["upstage_api_key"], - "base_url": "https://api.upstage.ai/v1/solar", - "timeout": Timeout(315.0, read=300.0, write=20.0, connect=10.0), - "max_retries": 1, - } - - return credentials_kwargs - - @property - def _invoke_error_mapping(self) -> dict[type[InvokeError], list[type[Exception]]]: - """ - Map model invoke error to unified error - The key is the error type thrown to the caller - The value is the error type thrown by the model, - which needs to be converted into a unified error type for the caller. - - :return: Invoke error mapping - """ - return { - InvokeConnectionError: [openai.APIConnectionError, openai.APITimeoutError], - InvokeServerUnavailableError: [openai.InternalServerError], - InvokeRateLimitError: [openai.RateLimitError], - InvokeAuthorizationError: [openai.AuthenticationError, openai.PermissionDeniedError], - InvokeBadRequestError: [ - openai.BadRequestError, - openai.NotFoundError, - openai.UnprocessableEntityError, - openai.APIError, - ], - } diff --git a/api/core/model_runtime/model_providers/upstage/llm/__init__.py b/api/core/model_runtime/model_providers/upstage/llm/__init__.py deleted file mode 100644 index e69de29bb2d1d6..00000000000000 diff --git a/api/core/model_runtime/model_providers/upstage/llm/_position.yaml b/api/core/model_runtime/model_providers/upstage/llm/_position.yaml deleted file mode 100644 index 7992843dcb1d1d..00000000000000 --- a/api/core/model_runtime/model_providers/upstage/llm/_position.yaml +++ /dev/null @@ -1 +0,0 @@ -- solar-1-mini-chat diff --git a/api/core/model_runtime/model_providers/upstage/llm/llm.py b/api/core/model_runtime/model_providers/upstage/llm/llm.py deleted file mode 100644 index a18ee906248a49..00000000000000 --- a/api/core/model_runtime/model_providers/upstage/llm/llm.py +++ /dev/null @@ -1,603 +0,0 @@ -import logging -from collections.abc import Generator -from typing import Optional, Union, cast - -from openai import OpenAI, Stream -from openai.types.chat import ChatCompletion, ChatCompletionChunk, ChatCompletionMessageToolCall -from openai.types.chat.chat_completion_chunk import ChoiceDeltaFunctionCall, ChoiceDeltaToolCall -from openai.types.chat.chat_completion_message import FunctionCall -from tokenizers import Tokenizer - -from core.model_runtime.callbacks.base_callback import Callback -from core.model_runtime.entities.llm_entities import LLMResult, LLMResultChunk, LLMResultChunkDelta -from core.model_runtime.entities.message_entities import ( - AssistantPromptMessage, - ImagePromptMessageContent, - PromptMessage, - PromptMessageContentType, - PromptMessageTool, - SystemPromptMessage, - TextPromptMessageContent, - ToolPromptMessage, - UserPromptMessage, -) -from core.model_runtime.errors.validate import CredentialsValidateFailedError -from core.model_runtime.model_providers.__base.large_language_model import LargeLanguageModel -from core.model_runtime.model_providers.upstage._common import _CommonUpstage - -logger = logging.getLogger(__name__) - -UPSTAGE_BLOCK_MODE_PROMPT = """You should always follow the instructions and output a valid {{block}} object. -The structure of the {{block}} object you can found in the instructions, use {"answer": "$your_answer"} as the default structure -if you are not sure about the structure. - - -{{instructions}} - -""" # noqa: E501 - - -class UpstageLargeLanguageModel(_CommonUpstage, LargeLanguageModel): - """ - Model class for Upstage large language model. - """ - - def _invoke( - self, - model: str, - credentials: dict, - prompt_messages: list[PromptMessage], - model_parameters: dict, - tools: Optional[list[PromptMessageTool]] = None, - stop: Optional[list[str]] = None, - stream: bool = True, - user: Optional[str] = None, - ) -> Union[LLMResult, Generator]: - """ - Invoke large language model - - :param model: model name - :param credentials: model credentials - :param prompt_messages: prompt messages - :param model_parameters: model parameters - :param tools: tools for tool calling - :param stop: stop words - :param stream: is stream response - :param user: unique user id - :return: full response or stream response chunk generator result - """ - - return self._chat_generate( - model=model, - credentials=credentials, - prompt_messages=prompt_messages, - model_parameters=model_parameters, - tools=tools, - stop=stop, - stream=stream, - user=user, - ) - - def _code_block_mode_wrapper( - self, - model: str, - credentials: dict, - prompt_messages: list[PromptMessage], - model_parameters: dict, - tools: Optional[list[PromptMessageTool]] = None, - stop: Optional[list[str]] = None, - stream: bool = True, - user: Optional[str] = None, - callbacks: Optional[list[Callback]] = None, - ) -> Union[LLMResult, Generator]: - """ - Code block mode wrapper for invoking large language model - """ - if "response_format" in model_parameters and model_parameters["response_format"] in {"JSON", "XML"}: - stop = stop or [] - self._transform_chat_json_prompts( - model=model, - credentials=credentials, - prompt_messages=prompt_messages, - model_parameters=model_parameters, - tools=tools, - stop=stop, - stream=stream, - user=user, - response_format=model_parameters["response_format"], - ) - model_parameters.pop("response_format") - - return self._invoke( - model=model, - credentials=credentials, - prompt_messages=prompt_messages, - model_parameters=model_parameters, - tools=tools, - stop=stop, - stream=stream, - user=user, - ) - - def _transform_chat_json_prompts( - self, - model: str, - credentials: dict, - prompt_messages: list[PromptMessage], - model_parameters: dict, - tools: list[PromptMessageTool] | None = None, - stop: list[str] | None = None, - stream: bool = True, - user: str | None = None, - response_format: str = "JSON", - ) -> None: - """ - Transform json prompts - """ - if stop is None: - stop = [] - if "```\n" not in stop: - stop.append("```\n") - if "\n```" not in stop: - stop.append("\n```") - - if len(prompt_messages) > 0 and isinstance(prompt_messages[0], SystemPromptMessage): - prompt_messages[0] = SystemPromptMessage( - content=UPSTAGE_BLOCK_MODE_PROMPT.replace("{{instructions}}", prompt_messages[0].content).replace( - "{{block}}", response_format - ) - ) - prompt_messages.append(AssistantPromptMessage(content=f"\n```{response_format}\n")) - else: - prompt_messages.insert( - 0, - SystemPromptMessage( - content=UPSTAGE_BLOCK_MODE_PROMPT.replace( - "{{instructions}}", f"Please output a valid {response_format} object." - ).replace("{{block}}", response_format) - ), - ) - prompt_messages.append(AssistantPromptMessage(content=f"\n```{response_format}")) - - def get_num_tokens( - self, - model: str, - credentials: dict, - prompt_messages: list[PromptMessage], - tools: Optional[list[PromptMessageTool]] = None, - ) -> int: - """ - Get number of tokens for given prompt messages - - :param model: model name - :param credentials: model credentials - :param prompt_messages: prompt messages - :param tools: tools for tool calling - :return: - """ - return self._num_tokens_from_messages(model, prompt_messages, tools) - - def validate_credentials(self, model: str, credentials: dict) -> None: - """ - Validate model credentials - - :param model: model name - :param credentials: model credentials - :return: - """ - try: - credentials_kwargs = self._to_credential_kwargs(credentials) - client = OpenAI(**credentials_kwargs) - - client.chat.completions.create( - messages=[{"role": "user", "content": "ping"}], model=model, temperature=0, max_tokens=10, stream=False - ) - except Exception as e: - raise CredentialsValidateFailedError(str(e)) - - def _chat_generate( - self, - model: str, - credentials: dict, - prompt_messages: list[PromptMessage], - model_parameters: dict, - tools: Optional[list[PromptMessageTool]] = None, - stop: Optional[list[str]] = None, - stream: bool = True, - user: Optional[str] = None, - ) -> Union[LLMResult, Generator]: - credentials_kwargs = self._to_credential_kwargs(credentials) - client = OpenAI(**credentials_kwargs) - - extra_model_kwargs = {} - - if tools: - extra_model_kwargs["functions"] = [ - {"name": tool.name, "description": tool.description, "parameters": tool.parameters} for tool in tools - ] - - if stop: - extra_model_kwargs["stop"] = stop - - if user: - extra_model_kwargs["user"] = user - - # chat model - response = client.chat.completions.create( - messages=[self._convert_prompt_message_to_dict(m) for m in prompt_messages], - model=model, - stream=stream, - **model_parameters, - **extra_model_kwargs, - ) - - if stream: - return self._handle_chat_generate_stream_response(model, credentials, response, prompt_messages, tools) - return self._handle_chat_generate_response(model, credentials, response, prompt_messages, tools) - - def _handle_chat_generate_response( - self, - model: str, - credentials: dict, - response: ChatCompletion, - prompt_messages: list[PromptMessage], - tools: Optional[list[PromptMessageTool]] = None, - ) -> LLMResult: - """ - Handle llm chat response - - :param model: model name - :param credentials: credentials - :param response: response - :param prompt_messages: prompt messages - :param tools: tools for tool calling - :return: llm response - """ - assistant_message = response.choices[0].message - # assistant_message_tool_calls = assistant_message.tool_calls - assistant_message_function_call = assistant_message.function_call - - # extract tool calls from response - # tool_calls = self._extract_response_tool_calls(assistant_message_tool_calls) - function_call = self._extract_response_function_call(assistant_message_function_call) - tool_calls = [function_call] if function_call else [] - - # transform assistant message to prompt message - assistant_prompt_message = AssistantPromptMessage(content=assistant_message.content, tool_calls=tool_calls) - - # calculate num tokens - if response.usage: - # transform usage - prompt_tokens = response.usage.prompt_tokens - completion_tokens = response.usage.completion_tokens - else: - # calculate num tokens - prompt_tokens = self._num_tokens_from_messages(model, prompt_messages, tools) - completion_tokens = self._num_tokens_from_messages(model, [assistant_prompt_message]) - - # transform usage - usage = self._calc_response_usage(model, credentials, prompt_tokens, completion_tokens) - - # transform response - response = LLMResult( - model=response.model, - prompt_messages=prompt_messages, - message=assistant_prompt_message, - usage=usage, - system_fingerprint=response.system_fingerprint, - ) - - return response - - def _handle_chat_generate_stream_response( - self, - model: str, - credentials: dict, - response: Stream[ChatCompletionChunk], - prompt_messages: list[PromptMessage], - tools: Optional[list[PromptMessageTool]] = None, - ) -> Generator: - """ - Handle llm chat stream response - - :param model: model name - :param response: response - :param prompt_messages: prompt messages - :param tools: tools for tool calling - :return: llm response chunk generator - """ - full_assistant_content = "" - delta_assistant_message_function_call_storage: Optional[ChoiceDeltaFunctionCall] = None - prompt_tokens = 0 - completion_tokens = 0 - final_tool_calls = [] - final_chunk = LLMResultChunk( - model=model, - prompt_messages=prompt_messages, - delta=LLMResultChunkDelta( - index=0, - message=AssistantPromptMessage(content=""), - ), - ) - - for chunk in response: - if len(chunk.choices) == 0: - if chunk.usage: - # calculate num tokens - prompt_tokens = chunk.usage.prompt_tokens - completion_tokens = chunk.usage.completion_tokens - continue - - delta = chunk.choices[0] - has_finish_reason = delta.finish_reason is not None - - if ( - not has_finish_reason - and (delta.delta.content is None or delta.delta.content == "") - and delta.delta.function_call is None - ): - continue - - # assistant_message_tool_calls = delta.delta.tool_calls - assistant_message_function_call = delta.delta.function_call - - # extract tool calls from response - if delta_assistant_message_function_call_storage is not None: - # handle process of stream function call - if assistant_message_function_call: - # message has not ended ever - delta_assistant_message_function_call_storage.arguments += assistant_message_function_call.arguments - continue - else: - # message has ended - assistant_message_function_call = delta_assistant_message_function_call_storage - delta_assistant_message_function_call_storage = None - else: - if assistant_message_function_call: - # start of stream function call - delta_assistant_message_function_call_storage = assistant_message_function_call - if delta_assistant_message_function_call_storage.arguments is None: - delta_assistant_message_function_call_storage.arguments = "" - if not has_finish_reason: - continue - - # tool_calls = self._extract_response_tool_calls(assistant_message_tool_calls) - function_call = self._extract_response_function_call(assistant_message_function_call) - tool_calls = [function_call] if function_call else [] - if tool_calls: - final_tool_calls.extend(tool_calls) - - # transform assistant message to prompt message - assistant_prompt_message = AssistantPromptMessage(content=delta.delta.content or "", tool_calls=tool_calls) - - full_assistant_content += delta.delta.content or "" - - if has_finish_reason: - final_chunk = LLMResultChunk( - model=chunk.model, - prompt_messages=prompt_messages, - system_fingerprint=chunk.system_fingerprint, - delta=LLMResultChunkDelta( - index=delta.index, - message=assistant_prompt_message, - finish_reason=delta.finish_reason, - ), - ) - else: - yield LLMResultChunk( - model=chunk.model, - prompt_messages=prompt_messages, - system_fingerprint=chunk.system_fingerprint, - delta=LLMResultChunkDelta( - index=delta.index, - message=assistant_prompt_message, - ), - ) - - if not prompt_tokens: - prompt_tokens = self._num_tokens_from_messages(model, prompt_messages, tools) - - if not completion_tokens: - full_assistant_prompt_message = AssistantPromptMessage( - content=full_assistant_content, tool_calls=final_tool_calls - ) - completion_tokens = self._num_tokens_from_messages(model, [full_assistant_prompt_message]) - - # transform usage - usage = self._calc_response_usage(model, credentials, prompt_tokens, completion_tokens) - final_chunk.delta.usage = usage - - yield final_chunk - - def _extract_response_tool_calls( - self, response_tool_calls: list[ChatCompletionMessageToolCall | ChoiceDeltaToolCall] - ) -> list[AssistantPromptMessage.ToolCall]: - """ - Extract tool calls from response - - :param response_tool_calls: response tool calls - :return: list of tool calls - """ - tool_calls = [] - if response_tool_calls: - for response_tool_call in response_tool_calls: - function = AssistantPromptMessage.ToolCall.ToolCallFunction( - name=response_tool_call.function.name, arguments=response_tool_call.function.arguments - ) - - tool_call = AssistantPromptMessage.ToolCall( - id=response_tool_call.id, type=response_tool_call.type, function=function - ) - tool_calls.append(tool_call) - - return tool_calls - - def _extract_response_function_call( - self, response_function_call: FunctionCall | ChoiceDeltaFunctionCall - ) -> AssistantPromptMessage.ToolCall: - """ - Extract function call from response - - :param response_function_call: response function call - :return: tool call - """ - tool_call = None - if response_function_call: - function = AssistantPromptMessage.ToolCall.ToolCallFunction( - name=response_function_call.name, arguments=response_function_call.arguments - ) - - tool_call = AssistantPromptMessage.ToolCall( - id=response_function_call.name, type="function", function=function - ) - - return tool_call - - def _convert_prompt_message_to_dict(self, message: PromptMessage) -> dict: - """ - Convert PromptMessage to dict for Upstage API - """ - if isinstance(message, UserPromptMessage): - message = cast(UserPromptMessage, message) - if isinstance(message.content, str): - message_dict = {"role": "user", "content": message.content} - else: - sub_messages = [] - for message_content in message.content: - if message_content.type == PromptMessageContentType.TEXT: - message_content = cast(TextPromptMessageContent, message_content) - sub_message_dict = {"type": "text", "text": message_content.data} - sub_messages.append(sub_message_dict) - elif message_content.type == PromptMessageContentType.IMAGE: - message_content = cast(ImagePromptMessageContent, message_content) - sub_message_dict = { - "type": "image_url", - "image_url": {"url": message_content.data, "detail": message_content.detail.value}, - } - sub_messages.append(sub_message_dict) - - message_dict = {"role": "user", "content": sub_messages} - elif isinstance(message, AssistantPromptMessage): - message = cast(AssistantPromptMessage, message) - message_dict = {"role": "assistant", "content": message.content} - if message.tool_calls: - # message_dict["tool_calls"] = [tool_call.dict() for tool_call in - # message.tool_calls] - function_call = message.tool_calls[0] - message_dict["function_call"] = { - "name": function_call.function.name, - "arguments": function_call.function.arguments, - } - elif isinstance(message, SystemPromptMessage): - message = cast(SystemPromptMessage, message) - message_dict = {"role": "system", "content": message.content} - elif isinstance(message, ToolPromptMessage): - message = cast(ToolPromptMessage, message) - # message_dict = { - # "role": "tool", - # "content": message.content, - # "tool_call_id": message.tool_call_id - # } - message_dict = {"role": "function", "content": message.content, "name": message.tool_call_id} - else: - raise ValueError(f"Got unknown type {message}") - - if message.name: - message_dict["name"] = message.name - - return message_dict - - def _get_tokenizer(self) -> Tokenizer: - return Tokenizer.from_pretrained("upstage/solar-1-mini-tokenizer") - - def _num_tokens_from_messages( - self, model: str, messages: list[PromptMessage], tools: Optional[list[PromptMessageTool]] = None - ) -> int: - """ - Calculate num tokens for solar with Huggingface Solar tokenizer. - Solar tokenizer is opened in huggingface https://huggingface.co/upstage/solar-1-mini-tokenizer - """ - tokenizer = self._get_tokenizer() - tokens_per_message = 5 # <|im_start|>{role}\n{message}<|im_end|> - tokens_prefix = 1 # <|startoftext|> - tokens_suffix = 3 # <|im_start|>assistant\n - - num_tokens = 0 - num_tokens += tokens_prefix - - messages_dict = [self._convert_prompt_message_to_dict(message) for message in messages] - for message in messages_dict: - num_tokens += tokens_per_message - for key, value in message.items(): - if isinstance(value, list): - text = "" - for item in value: - if isinstance(item, dict) and item["type"] == "text": - text += item["text"] - value = text - - if key == "tool_calls": - for tool_call in value: - for t_key, t_value in tool_call.items(): - num_tokens += len(tokenizer.encode(t_key, add_special_tokens=False)) - if t_key == "function": - for f_key, f_value in t_value.items(): - num_tokens += len(tokenizer.encode(f_key, add_special_tokens=False)) - num_tokens += len(tokenizer.encode(f_value, add_special_tokens=False)) - else: - num_tokens += len(tokenizer.encode(t_key, add_special_tokens=False)) - num_tokens += len(tokenizer.encode(t_value, add_special_tokens=False)) - else: - num_tokens += len(tokenizer.encode(str(value), add_special_tokens=False)) - num_tokens += tokens_suffix - - if tools: - num_tokens += self._num_tokens_for_tools(tokenizer, tools) - - return num_tokens - - def _num_tokens_for_tools(self, tokenizer: Tokenizer, tools: list[PromptMessageTool]) -> int: - """ - Calculate num tokens for tool calling with upstage tokenizer. - - :param tokenizer: huggingface tokenizer - :param tools: tools for tool calling - :return: number of tokens - """ - num_tokens = 0 - for tool in tools: - num_tokens += len(tokenizer.encode("type")) - num_tokens += len(tokenizer.encode("function")) - - # calculate num tokens for function object - num_tokens += len(tokenizer.encode("name")) - num_tokens += len(tokenizer.encode(tool.name)) - num_tokens += len(tokenizer.encode("description")) - num_tokens += len(tokenizer.encode(tool.description)) - parameters = tool.parameters - num_tokens += len(tokenizer.encode("parameters")) - if "title" in parameters: - num_tokens += len(tokenizer.encode("title")) - num_tokens += len(tokenizer.encode(parameters.get("title"))) - num_tokens += len(tokenizer.encode("type")) - num_tokens += len(tokenizer.encode(parameters.get("type"))) - if "properties" in parameters: - num_tokens += len(tokenizer.encode("properties")) - for key, value in parameters.get("properties").items(): - num_tokens += len(tokenizer.encode(key)) - for field_key, field_value in value.items(): - num_tokens += len(tokenizer.encode(field_key)) - if field_key == "enum": - for enum_field in field_value: - num_tokens += 3 - num_tokens += len(tokenizer.encode(enum_field)) - else: - num_tokens += len(tokenizer.encode(field_key)) - num_tokens += len(tokenizer.encode(str(field_value))) - if "required" in parameters: - num_tokens += len(tokenizer.encode("required")) - for required_field in parameters["required"]: - num_tokens += 3 - num_tokens += len(tokenizer.encode(required_field)) - - return num_tokens diff --git a/api/core/model_runtime/model_providers/upstage/llm/solar-1-mini-chat.yaml b/api/core/model_runtime/model_providers/upstage/llm/solar-1-mini-chat.yaml deleted file mode 100644 index 787ac83f8ad92d..00000000000000 --- a/api/core/model_runtime/model_providers/upstage/llm/solar-1-mini-chat.yaml +++ /dev/null @@ -1,43 +0,0 @@ -model: solar-1-mini-chat -label: - zh_Hans: solar-1-mini-chat - en_US: solar-1-mini-chat - ko_KR: solar-1-mini-chat -model_type: llm -features: - - multi-tool-call - - agent-thought - - stream-tool-call -model_properties: - mode: chat - context_size: 32768 -parameter_rules: - - name: temperature - use_template: temperature - - name: top_p - use_template: top_p - - name: max_tokens - use_template: max_tokens - default: 512 - min: 1 - max: 32768 - - name: seed - label: - zh_Hans: 种子 - en_US: Seed - type: int - help: - zh_Hans: - 如果指定,模型将尽最大努力进行确定性采样,使得重复的具有相同种子和参数的请求应该返回相同的结果。不能保证确定性,您应该参考 system_fingerprint - 响应参数来监视变化。 - en_US: - If specified, model will make a best effort to sample deterministically, - such that repeated requests with the same seed and parameters should return - the same result. Determinism is not guaranteed, and you should refer to the - system_fingerprint response parameter to monitor changes in the backend. - required: false -pricing: - input: "0.5" - output: "0.5" - unit: "0.000001" - currency: USD diff --git a/api/core/model_runtime/model_providers/upstage/text_embedding/__init__.py b/api/core/model_runtime/model_providers/upstage/text_embedding/__init__.py deleted file mode 100644 index e69de29bb2d1d6..00000000000000 diff --git a/api/core/model_runtime/model_providers/upstage/text_embedding/solar-embedding-1-large-passage.yaml b/api/core/model_runtime/model_providers/upstage/text_embedding/solar-embedding-1-large-passage.yaml deleted file mode 100644 index d838a5bbb1bbfd..00000000000000 --- a/api/core/model_runtime/model_providers/upstage/text_embedding/solar-embedding-1-large-passage.yaml +++ /dev/null @@ -1,9 +0,0 @@ -model: solar-embedding-1-large-passage -model_type: text-embedding -model_properties: - context_size: 4000 - max_chunks: 32 -pricing: - input: '0.1' - unit: '0.000001' - currency: 'USD' diff --git a/api/core/model_runtime/model_providers/upstage/text_embedding/solar-embedding-1-large-query.yaml b/api/core/model_runtime/model_providers/upstage/text_embedding/solar-embedding-1-large-query.yaml deleted file mode 100644 index c77645cffdd8f4..00000000000000 --- a/api/core/model_runtime/model_providers/upstage/text_embedding/solar-embedding-1-large-query.yaml +++ /dev/null @@ -1,9 +0,0 @@ -model: solar-embedding-1-large-query -model_type: text-embedding -model_properties: - context_size: 4000 - max_chunks: 32 -pricing: - input: '0.1' - unit: '0.000001' - currency: 'USD' diff --git a/api/core/model_runtime/model_providers/upstage/upstage.py b/api/core/model_runtime/model_providers/upstage/upstage.py deleted file mode 100644 index e45d4aae19eb6c..00000000000000 --- a/api/core/model_runtime/model_providers/upstage/upstage.py +++ /dev/null @@ -1,27 +0,0 @@ -import logging - -from core.model_runtime.entities.model_entities import ModelType -from core.model_runtime.errors.validate import CredentialsValidateFailedError -from core.model_runtime.model_providers.__base.model_provider import ModelProvider - -logger = logging.getLogger(__name__) - - -class UpstageProvider(ModelProvider): - def validate_provider_credentials(self, credentials: dict) -> None: - """ - Validate provider credentials - if validate failed, raise exception - - :param credentials: provider credentials, credentials from defined in `provider_credential_schema`. - """ - try: - model_instance = self.get_model_instance(ModelType.LLM) - - model_instance.validate_credentials(model="solar-1-mini-chat", credentials=credentials) - except CredentialsValidateFailedError as e: - logger.exception(f"{self.get_provider_schema().provider} credentials validate failed") - raise e - except Exception as e: - logger.exception(f"{self.get_provider_schema().provider} credentials validate failed") - raise e diff --git a/api/core/model_runtime/model_providers/upstage/upstage.yaml b/api/core/model_runtime/model_providers/upstage/upstage.yaml deleted file mode 100644 index 837667cfa9b41f..00000000000000 --- a/api/core/model_runtime/model_providers/upstage/upstage.yaml +++ /dev/null @@ -1,49 +0,0 @@ -provider: upstage -label: - en_US: Upstage -description: - en_US: Models provided by Upstage, such as Solar-1-mini-chat. - zh_Hans: Upstage 提供的模型,例如 Solar-1-mini-chat. -icon_small: - en_US: icon_s_en.svg -icon_large: - en_US: icon_l_en.svg -background: "#FFFFF" -help: - title: - en_US: Get your API Key from Upstage - zh_Hans: 从 Upstage 获取 API Key - url: - en_US: https://console.upstage.ai/api-keys -supported_model_types: - - llm - - text-embedding -configurate_methods: - - predefined-model -model_credential_schema: - model: - label: - en_US: Model Name - zh_Hans: 模型名称 - placeholder: - en_US: Enter your model name - zh_Hans: 输入模型名称 - credential_form_schemas: - - variable: upstage_api_key - label: - en_US: API Key - type: secret-input - required: true - placeholder: - zh_Hans: 在此输入您的 API Key - en_US: Enter your API Key -provider_credential_schema: - credential_form_schemas: - - variable: upstage_api_key - label: - en_US: API Key - type: secret-input - required: true - placeholder: - zh_Hans: 在此输入您的 API Key - en_US: Enter your API Key diff --git a/api/core/model_runtime/model_providers/vertex_ai/__init__.py b/api/core/model_runtime/model_providers/vertex_ai/__init__.py deleted file mode 100644 index e69de29bb2d1d6..00000000000000 diff --git a/api/core/model_runtime/model_providers/vertex_ai/_assets/icon_l_en.png b/api/core/model_runtime/model_providers/vertex_ai/_assets/icon_l_en.png deleted file mode 100644 index 9f8f05231a6733a138502ba7a254e2125ae8d02b..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 18078 zcmb??g;N~e6YVbU?hrI2xVyUqcemi~Zi@wn1oz+sceh1?yGyV@AUMIDH{b8o`ww1G z%ff8c&fLD;xBHwv9j&S?i;hBq0s?{1<>jQ*K_D1q;BN~g1mL&Jzuq_y2pn!JDXA(i zDM{(#=4@r_UWh@u3Q7%WpA0&OUv+lWz(VabKDDfhj_Gt@(2yFp!@cWwTNF7YvidPcjD z(se6=Oss}>Evn}jEFUTMPl!QG?@l> z?qz$`FvBkMkbX&N)7hvo(daSbxMCT9)O&(@IBXb;1yvb%p5IMJ#kz9yLH)y`+mF(2 zqk9aJRKqY8GnDN?b)xqh)mj-j7fIB}l-IxcC?v&v2yK?$9XzeLW_=X9Aa0vmf3<_n zczEBvx%639Y>UUmbz^Byc-sx}Z!pJu4Yv|Xr5aVx=JeleH?eDizj{}G_NTko;1N}w za?EFu>5+m7kZPC@^tJH^UgicTlD!R1EyU|?GiJ}bydf!_Z6$?q;p2Cg8x$mzL* zK&ZI?y}+QX93tQ%qKCYaG~!=)JX{t!a10a)xJ2wBt?MD_?C5CeL3Be!)fha-pQsSCEYp1!^rldcYzHBuzj?yZ#Vj*FD`b&!ko320?^8P(m zOo#${3(cr8?ZS)*8$F%|Yq4wtQ~OM12v*Wph~#)l#-=|qeKGH90_|zec_i6yzPV2g z?^`FXb~o#m&Zed&rh44>C-@;!)ZgUZ<3#`edb9PX`<7g3_v{awfn>@CNd=LQkG6De zsWF<{)YpnaIzeQJrreOsMQJ_fQK$cV>-SVUnp(T(u-ghCe{OyzP)>5PquwvDv<55a zJM2p9>Xo6UQ3|(?7ykc!$WJf_1U$G(>$SY?VM`IsnKo;$LU-f;@4M7?Jbc1;WxZxP zi12pqg9gr4?UIb7|92yK>QE@z4xi^t$308`zwZPpMxU+WTqxid;uKyuu-Ck>m0=Z!h|*CMq!asz%cXbs_j~EaMjtyf|1+}yM>JogDCYJY zTku*^vg6?<_Wz6{2r&R#!0N)A%6O_RS$CQb-=7qIgZ1NCFwP=Ciqq81DXo)J*z;2i z&o2y`DxNUOUcHGhH!;3133&P6{zYgY5-4{HDzFM~HQg;K5oYq)w8Fs-sQcDJ>>iar zHW%|SS6|k(J3FQ|vjSV5lmMZw6m!cin(1;|pY)1?(d4U#1k0Ljr3}j$rDk-16sGbEAws{|`23Nt4hr5Pz3H zk_Hdtj-Rnr-c#u=_~p&-j9AC=_0qDQ;reCm_iM#WCrleRrz)@0YLo_rZhGp?UsqP< z6pa1R>!A3mUa9imYlE+Lk4Z6e{@D?eui>I6wQX@=Jah z*U|XT7jl*O;Hk+75ejYJL;UL+OYGqwzu(7}HiCs_PDCWZLJlrdbRb@H>)zTvO2uP` z_}AePst7~}!>d$zI2C*EcS%3RZTr6PrscXHEjY_2){W?B{_kt0`2tCI+Ggk0AFvZl zlJ*Wr-tF9%AROf2@7#|}&8>VlVe1%gaDRQ#EDg?9F#EP@JT5;WhspJ8#eIGM?v2Ft zsOOS9^*T&O*V`9Wt_?G8szomkC zUwcAl$%8DsUWF*~a6k52<4+~Se;bU!oa5HOObQafhAM&-z+FhDa>^|wYuinPG*MY) z$=tZV;U$L_fjCIcuTVJ8RyIg{eqLzY z@*cl>gEl({kb8t{2loc5e$f1c8T&!zwe`XCpOG-bAA3~Y(*)xVXaUMUG4QnxbOSC~ zk7@z6lo{pU_t5$(6NDaLpR{pSdqu#)Ns`%nsg;j|TexxJprw5agCLc~z$u1XPwQ<{ z&~Zg<1KK$!@D#$KNY|O6g$ISB8v=&9p%hc3OTCp_cX@i4@cLfg@-`0v7 z8)}Z$Sq!<#&$~8GrJ3nw=E7iTJcj6Crfg1@>#^Q%jg7Wc{r!PIyBa<}T{JZ{f^xs99TD_d);`wqrFN)0a^K7zcXmNaXfgBVkqY(W z%{sIC&dSMJem)c(%@94`AIJFKSu8_Zm0?`i-J+{%10s}d&H#NGcwtuB8UFN(mSS8# zN$&Hr}G*!2RE;mL#wP zw~Z2Ra@5I@nYW4oGkzs0JDP@L-q zbODBx_6QTAG2iAI7^U3ESUb(svXTXfr8&>XX>JWWbFji*FtP_#^BVBr=w^5dhh-pU z+-!?sAuvh|cG)*Z)A%U%5*8;A1)U=}&JG&aOwD;c@C+NHgWs;uo;&fK{h6TL4?L&) zZi)6SWH_{)xV-UZQW7c^Hrl1DlR~Itaie>EPH1dkb$xtX!z`D5ag15^gNcRXID}D)%1`$3 zv_VGpLRWg;%j4(s2)}o2faA_yx3H7zwM!+es759%BK3N~R z*`#=A*N_Ke>XB$;EEAjaa+z|G7%8>sfwDimW%h|-f)K$(@78jwohKXZSaD`^+!C*n zea64qFZRH@_%sfPYWo>QjuF@io^?4n+~IqsDRlzVN?>_VGu` z@&z^Z@ykl*(aY|AQ96A^ZzFMtyRI?zi zYb(!&(BO6%>L6TZ@{2B4FmqtPP9MAeY&6$O{FhPe8BOow6&s<{b5u(< z3IVune7k9jn(S?LJLfy_wmBk%bAI(-RupPlv6iOGw@x1#m<ng6sou0ctj@H2%~i;Otmzys8memTjAw-9P zala;GH>kghpTTG^=U+|prq11M@zT@Tt`;N~!-n*MmMEJlYXtD|)s0fLA`@DK?^=*6 z9v|ME6q0P#I%tc=Bf|EX%ZK{!)YX3t9P~WA{KI4wDexZnLQLry&~N$S&$XI&EuYit zUbmp4Ls>nKqe<>Rb+p2AWvXBGvK_%oVU1(@WIU!{=8Z5-vnJ2zFj4Zx#9nWE>BAos zKhS5r&y5K|MuFDZ?s2984vqFS zLM1h5q3W4yU}}}W|EJt+aNK0000DcO1Z`J#K}^mnZDX-bk_U;?J7?}%`-|=o z=!4qeoHK|a>Tf%6LX`c$CGRv3rn>(Gia(d@rHI zzf=U8;w9??3xl4cAF1q3E3Lc|Y1QW7DtNZa4mx|aPG_kC!V`y3@B%Ch8-+gR6{K3# z-wCF?K7g=2lBGlE(TS{Xkm7{nt`_}?N#w)bWXe82u0@BqWgtcF!WC|3g5T(kqQ3MQ0b(au3;*riVb=sE_qT1z}| z-#_!Dej4IE<%t$p>=ynxJ+v4lqg-zMj!zks9P&H=OIOM!OtFJ z&?}R`8z_rSvyTx=O48OT3W5v%0+ZBHjp%?Z(M zDmUtbRb84a=6lO3`HBWl4O7P&Qqs}_s3OJWtp^nQ{NZ-zq9ozjn2qqg)7mzey?)ow z<>G_btgANuHDEnUT&>t!{eBY8!=cX-pa)=?-3oVHuz*B;Qk>MhM|g{>4#-EtEI=2) zc8*M0M<`(ZdAfx(LZ9P2Cb~+cmZI;>6Ojhdw?dASkm_s}YAcO3Rf-OiJ5G+b8wFJ9 z`I;XwwbmWJi}5f10dH4n53s_d2k(R^@ux2?g(Dr5=k13-_`MU~S6F{S{g^@Vr9Syc zJA$oZ@Khf@S_+5ImuHwiR1I%u`UYhu=~Jz}1a)CKy=BRi)(4HXL!?tEhLULX*|pE7 z7z=y39lQiN!1qX_N>S_VsW%)zm_$*f8eqQqM%+^^(~jvG5Vk0yG+)p81%3A7?HVLg zb`j%nU-!#Ia!^Q;adwun#7!yMIEsCGQ>v(w-wPDEylpsC-oLBF<>Jp^c(UIkzBkk9 zrt!L6)uihWQ$q*C<(VlN%=spM-=-*&02LkfB?I-uU+2V@dN`+tj2;o3 z`6|>l_IB!?))#1<$u4|drFC2_S|_*(2!2rbc&&1KGw-=&u|^x@Y?z^K{qE&oo&WN? zRr2-NzU4l?p_MMT#A90kJG9NhpE)X~H;h-xhBGn>P0^Njc>5kI=He+R1EpEM12TP1 zoD0l-r|%ZLOY3Yad%tT@395U}Fj$5dVre!;uo^IS0zVUg7)}9YK0Sl&g0nT)KfjAx zZFOvR*{5cM z9cPagM9qYv0#Fsv*@l(oq6u|ZSrA!9p_+6Jqc4|p#Nw+soruv=7$zE8yVD44@Pb~8 z!d7_6dAwk+*5Z8^{_Y(|WL` zR-S6woo@MoTigu>lH0q!#A&ILYnrYafxTQBb$*tM?1X2jSATvNd&*&G!pR9U_Kg=G z#{6kFyQ?{F_xb$YYlo?aR;&nQA)VqMPN<-u5Oh`r8^0*RBFToS4uuVi%Y%c{>PMh{ zZj=4+N|$$rQ>{l{Ju_P-elP$&p~;v-CR4?)QjO|iRI1l_t?}GlQLq*vh85334S-uqwmU8}dm}-=b-pgD2HNmGdYTdGL zU-3BG=-Vvwoa&Ll((>2(9Z6>-Ugv&Hc`KchKXEyy*k8fFa#fgYpAO?Hp+xw2zwtot zUwsWJM+$ zOu^W~=Gv3aX=xL3kC7ku&WGSZENC(BEN3%Y?g$;`iQBbLnIpC>IfQl81QbAGd0K*L zkdOlJubv3lA(@$)AN*STcMZQ+@Gvux)$+jID1Z#HXKosOW&IdHzjT9!S||~Ll4xDZ zC#yinXh%LbYOK{Hokfb$a8vU~xWSlEqhMfLd}#kMUN1Qr%mx1hUkF4Je1dJWQnjzw zj1YdQ_X^q$Ob%_|RA2ti+&=BOTk|oL5*3~f2^W`0fH8yJT3Z75yTK4)1vtYShnpev zdBuTn_$@C{fpCHl=bj89K^-X)?%99?=9PJ*(Ka~bA3F9X4|cDvzW$|bC2HAHzvE<% z=61C^uYPD6=if$kdjxY{nvJd<`Q!#)Ie&&^iS@Zo%jegzES*H5@-6tS_a_U_j~j>% z0+l|;2l0-%Y#mAaLQk9VjGH+t?#84#scH#Ty-$`UI^JnPcFnet2*loq@GRnJXP|hB zEqAa;tN3u7P$ds~v5@voW^zh~A#keNSmjY?B(Afp6{eI?Kn$&>ZV+F3|L8S8;c4ve zTGA#S%KylI@fb1}xRQ=yF5A}&btT}_O>v$_0gk*k_I!4%n2n)zR`K{Z1xZlX_ZY9q zMq3?VD-h9PM9;1r&9)t^a?qoG2V{FC)>yPtc)#&Jq)B5KBIO^m;0kW9pAjFr&qmj; zHZ$e2Pf?&54nrXy0^4kZr^M(qhG#*qxOf?P){C2(_(FTkp56GZSsWWv0YGJa@bFQvh?G zB5o6bDm{iip8rby$)1ypF!V}U1!Q9Vbz|eNleOn2DNe>gYEJ=y(!8<)WPF99QkgLQ7z@lxWk9d!cNa-u~U^jz2fMb?}*G zW*-@qCdjCW&i7WywD*@z8F)IEKGIz#GG~VP4k>xqDkq<3ZW&6! zEiYuw3!K9FS1CQIh?$Z=e0YzrU}C0%H5KI-B-t`Qn(?um^A|HTCbYc1YW z1h_V543^&r_hpkfd^EOAlI7ZqE(uPp595ixzFatYUhdtq z-rH&`j7qC`9KHxtbqPB#y%@Zw3BgqSiAxu*1;D4m3xeqW?y%4;@oTsr%sWGtBA(w3 zYTkLT23t=?+mtMMu=6vRM#rCDI=lreX zar@iw9`iris}8+BB~KBJ>Ed)99mV9S@0SO=?qw&fh1|#T)rJ~E4-a@zrKO(iK+bwG+VSkhk@R)K z9mQ3umN5%STg|_!BE*}KdL2M+O9d?XRd?mJ%* zw#7kZGLPEQe$!}YxVG|BLPrTVTyG(p)jis;T7NQ)ChaD3PwVF&!vZ&-uJ8Jal-(!p zJafz<=U(j6M_0~t*Y&;OHF?OMQ@whRBOyIz<19bILufwp1+ybf>*3WP*IAZG&rSvz zk&VU8gW{q;bP>$}oKq*@W1QcD%$w~u!eCgbAKi3?vGZt6d*u79#d(~0H1Z9^w&|>L z2B4{sf)yWAi8jSDLJOP zk$4D=LBFZ3dEi2DA}*3DW(XaQ2#QS|^~L^v4Dm&S2w~`*4KyF<#Ta`FqBY12TF`D^ zBpiJj*(lWbX1!R$InsB4s0u)L{JgqObrS&O*?Z=QZ4dKx-Bb7Z*i{AoGP5jkUurq% zs9Srt;n_DWJx2;B)R%9-*gdsq_G$>bt(R?-jfL0t-5bN(BKps3G*VeQjjnT?#F{9g zX#mmBr{X^TZ&l30#5q^g5Pq?m8@(x_B#PCrK#(iMw*Htn1uC5WlLZ>P;!0a6|M4+}yrlSZCxWUxo?S&hm1TNTapjc)p%X4V+)ud^iX$)Bi@ zE${?0)68qF_;4Vap|wjc(`U6WNIjyhM)s{$<<=_icj{iWsJE#Qgf(|Ozl^6}`}MqZ z_-BSKyNr}~$=Y*W>t^rj=C;q&86i*Or2PAn$3Hnw;2=q7I3Z~6xTcv~55o4^VRuPi zJoD){I5NzG$}MqA^d4)DdjN%n$i%a1xM;09@R^jM|MQrt zGu5CXD82RIaRy#=#BAt2(AXfuC)crUviJF9L<;7+tV-Y|e#VP~bvcP;c#q7k@*87`h^7A(zs}&RwZo3g>KrF?UnnMsr$b2cS+0}1+$3OO}ohl`;CMwwN ztb_kcCiLGHUEv`=IEPG}>t(>BOoR2V?H9w22^{)V1%~ z!;b`#U8pZCxD2%IEMOs=pRtwRbTL1bMWfVKB4Jkn4w42$#foEx zlr$H2N&j(xJXM@H8NU>Jr?o$M)0sh}i4la{HVa~3^$iE}N&G#$}h7402 z#Nw z)(cZurkQiPmiKu-+&uZc?!B@y^;8}x?dLm-O2Z8e_{bBwdOZGG za!`n<+q{>Icv*XbZUjil6B@|CvZ2&p+~yI#vM2f*z3QH;?Yq9@W68j*d{)(o;OQ*# z&qxR&MPvF4r&x2*VKsC$0i1M=c$+92*F&91?a!gd_$ZE6(M-RQ-stbzJ|}Gap=W*| zmr5r>ZnrRP7R0bH%@7w(3wLT#{C`>iEx%m$c1%P5Gbflfiqovu3i{=^X8uP>9PG}G z*+iORdvzppz%{<>yBRf>92>!D%3|5IwisZK3Q>jbSU6=4b9d4%1F6E8Qeq=ASXIMG2cn=@b&r)zu8ja5nZOe9Yh+O6~DxnviwGvR2I3Sn>B$ zz)1}2ZRflrnv1Q6hHEC%KHN-s*5H#vfn}y2Q>~;!8bJ8U!#K-)0n~r#0okQMgCmfc z=-h>F%H%y$D8DFJzL&Y{Psrro_i%6~17-m+$UfZP_^infX!?uuvBW4Rqz<1AD~n|P zD^dOXd`f2PX1iEI8Pp`Q*@!#@y{gnpwE5)K!{n<4S?j@U-9ZY0Z|;vdZ!fa{a#{}} zOjx&c{YO^8-OHjB{=s$A{wB|^6C*`6zMU=1#}=C@FF}Yd0wKXF*ZQ9Oc}4n_zbZn% zn{^p_ySc%U1j|4f;WV)?+;-o3aR%zsk$vC`_!Ohq8aLWrwtR1oC(+_Kuwv#Jgx&Rp z=&3bySR>{nrPQ+s&)y$-z9>pvcp|6fI;j*JGozewink{DAJkja`aW zh53&Uq31ZN-pYPbFv331wRR^1a$qUy{$l7(oaH?9Rs%O9<(?YpX*2ha0W|7sP4wSB zsoGO|&ld@(g4t_I4OVOCigL$HBwK%rqgLkdg97z(%{AXL_JyM2)`2eTqni?xi4uWM z(VkUccJ4P}E5L<<1aV-2R`M)xL?>D3T+BQL_=mXgLIA24fvp+vy-5N_Bj@U9393`x zwrZo6;s*EUX<<9e`_+#4D+Hq&h8u-2zw{D{XSe(Yj|K~tY#R4k|!e2r9I(ZmhXfj&1T)@;2>1=b%OTe3*X6tDvTkynSlqjgjT)tPZAYgz zJ?}z1JiZi!T$;3v!rzDc!)I+Z&};8*H*C%!_N&9FX~ox%VvYE}8{n)^kNbT~zlH?s z$@UwwZJsROxEy8$m0$3QDvpK6UT|I2Ol^~`HNOV(f4y|(jH)33qfy-b^mMQ zniF*;iS@FZnQO%dD{Co9%AiVVn1p}k(8C^+_F{qj1ll3rXSpnc_>c!JKKwU!&*{@9OJ%9z99D5HuK zgTp6{RY3t4Q^-c0ZR?a0Ns7W)*vFg`q zBvB*lo$2k_K;5&B`gEY*JX(2_(hIlii}4UXM4bf(uEjQ62-&!XW$2!g9TTd#VXj4e z{2-A{wn-eGY#JyUj`@<@6cCO=e=hZRMYd^x0a@^YOKtBTH+e_KKXFA*g0K~SVli5X zh*Fg%2E-09pzM$y3wkGlrUv-EU~Uw8r&EX)=oiJbG(eW=0S5XA(&X|D!iU5|=@mi> zzvg|Nj%877453HGxTh?H? ze2R@@GI(Ln?3U~PM{ssr!;hE;Ya+Np+=-X01xM#FR(tiiJFE+hy*qq=$G5v9f$MPb zG2+zw*ui52laj(0*f?SI!5ur$NUNwxl5~FTwFFK}bFWLwc zv-Gnrh%B{JO+Z}Xwza{Dj0*z&PFj+oRVVlb(}B5F{yqu^4pfweLJfgy27zo?EJ7;z z1|78Yg0!AP!PxCBR=0Qw!!M z$ZH+Q@j8=dj64{$-uU9@=IET0Ul;ZJNr<1p?4LcgX#2{`8~h?-V^g;lnYLO^u&sP# zqI2iE@LS0-HC;+9MLy`@gol&(D+&W)YA z+86LO2eWM@E^BOuh}Oo!w)4}0Uh_FT0BTso9h}G&p`9o@W5S$LZ$LbMju>ud%7phy zpTMBTzucn6nEFkF$}6P`g%-V-x=gjYZoW}CBszZpFblXKLs=%w6eamh zJyy^`?0mJ(d&)Z4lot}h8j^M{he}7hp;%vCYS%}w})kSKGm4O2x zgl4hDJK%9OGR4VV(tMiY?`*9}#9`}2F=Z%>H2(g*Lc1|zNl!nhI%NLJF>7hM=j@u)jjKJ_Yxu!o%U18&#on!69>Kbk{_Gw z@WYATj_1Y<37}@tj4&3SWNst$PzphWS{GvlX zrK?j_vq(;JCnec26O~97$;pBeatDO2_rwcS1-qu17%CrMcz_RL0j&ZJ->6RGUrQ5< zpw|dh{j_s16WFC-J&SlYwg+gkF@{7^Hu;~Y%Jg}C24H|7Hlm&L#vxn)1|Op=#!MH%K4WKxLn1waAOH|Df_h6riGhqi7$r{ltda=RS7_SZ-_2H4i^^=PCaL$ z_Gu0o5nv#;L_u;5uTPcLTi=7Vwa^#kApT>PaN;o0HSn>}UuMV(W!L>mtQRQF@U|*pp=M$>u;Bv(VvVfDF2H} zY-PEz1kt6SQdYjQe#M6*7ienw9aK3V*F(fHzZ`R-#f>!bziRBbiX>tn(T|(&;88gx~aVfn=p>C{DEaGpOJRLr}gK;Wa7Bi@h_#7BNQtc?I- z*3cPksBVjR9{lz=pv{W9{Y8)3)eM_Y=M0;HCuvj#W*c*y?4nHt{5YJ=QPCoSV{iNX zwOLR%2PsGQ+L^#WzjGorkDoCr4weCCMG`4UaBE-gFzqNHn{&;CE8S_&^)S zx2TC-6x2>OYQz=A@JQ<^vvAiYrzMJg92?DuWlzVokKK=P|dltk>!MuzqhKzQ|v@xh9*7rtka9=Yz#uNZo}P~d(IHz!{m3;Yz)>SP+y z%EQTDSbi1&=>cIWP!6_&(7X&d37dw-%TUKvqI14~YdMUWV#%87_Wwg4x)9znj`4a0 z{k~p!(!UTWApIc5UPs6o{MCAqMZUw7AxF$t`;?$B15@fYm2x{5=v)3YLb#eq;HBA; z_0L#_Z_9tw2i{3yg=XkEXnYJ6foKKoPV)=8N8-q&AYP+SB3 zG{4*rAUR8hIGd$99Y6;_5FZBUyMEO3OlCDi%6Ty{@2#;e@H zr|N)%_?M4}vIh@PbhhxY+C*mU5A@${V8>Jvoe`i8C~DK_9U5Hs5EAgsfd|P7gyzF} zv8`H}0l6fB%?56HEb=bOBpTj(BR;U$6f%ME$8|SCNF%r^h6T%&5v%~nsF{tm=`WUy zs*o&c0di&yX#Ql|-9753#QvzOno4Y$m3WBv5Ws(^t2qO@ zK$W^zzg_QdpxgHfLf97$;k&!;)zNw-$N@TSD{^oFKzrtuq_np-+O%1qT6(u_${aL! z@||n_&!u!mlc)2HLEab=i08f>tKfuklu%izN<{%g7Qf=PksCEr(>bnBnUx?-H@bhj zAz%gK#Yqv=cc~7`{6~eAtZss^?a|78CDO{>sFfRa5SaOrxgAGv?xL2mb{!HJd;xNj z_-HZMeavE^&MM~uz3*cFUNC6XK$3q2k}^uOG>x0O7oM)bUq4yPV{SL;nPjJ1uJDBk z?J<*@l5KgHny=-TgY6EJl=V-0*D6NNQUr%-lZkG|?umTjogj)h|5>wqR2OblgrvfqDO`(5O>m^HGfH`Q5tys0W|`Rxk-ctPjjgM2H|P45OPQ zr`MzvtI%G>F*zDzf^+AqfJV)tt74v3r!b)K!=DhPvPeoKFABdf%^bFDd#df)V@Ggd zMC%t|cTo>Zm>yo^8py4PfY-Om)5;!<^ET~5Yo03(-`X)abbXbEF3`bT`w_g-@^o6Y zRjI1xqXRKD&0#Kp_S}nyfhEfL`4`f7QO0e*N2^U@Q)B~P+E zsc4Z(BhBirN1@mgzvt`K0*0MPbZn*??R(9akdRk6u}?iGsm)tq&}Da z+6~Cgdf<0r4Kq>I%A3U25vVp?Zv!T4JO{BaIBf}{fjo$el!<4qmJ29dRg_z3E0%*o zT5I5Tmfy7k`hpE}zFtNNWWL(ypKeIoR#<`?=ABKp#T{!AFJ-!l#~(g=;mb(6Bz}C= z51wfosPP+)#F!fK*G6|#?d*wyBg1Y@CjaHJ@;nL~WP>@KYVS#gc|j`nOuRwAU>&@# zI@1Tt+<=9NiNe$1>)cDc4|!`WhJ@K~-h?W(abiuxcIOs3rS4Y>FFsjiJXet^ZqH}9 z!o#YiQ)~5)a3tEB4SH|XSl+{n?9w3{)9-z6k3w<>KQc)!@-7~K;>B>hqUeK0M3HZM zW}s1~LIh290w-V2S;3i?mX~z&C1e z^e8fVz7PWHMKxs&^Ir!4P(^lf9;v{s^!&`mnF#-NSWKtLnv*`DrL1Xh*J!*fFv3XE zJ3t306U{DawA{kaiNGd|;m;h2<1YQV*1by;)IOvVq=o%@UhraNI)1t5q$#^*7)n|F zN*zLn3^krrID>J{h>oQ_a<}bTB>rxc%G7_~yNbYneEKoFC>o zC0u!jPIu_dQ64ecme>F2?Yz0zZA4qwwAKvk84~fU;aXBY1nDkw!sw%KPU7INLpR6W z`KTLjqMgRHkJS#)6qH&%X5kS&HrSi%6@m2Z5f z7ENiEI#Mzq@$xT>(|SUH50crVz4?xm0k6J)qpb59;IYI62EoJYzvV+qLR~pYQ>X}b z7FI-^t+&O)PynV64g*^wXaI9$DwZL{w0n2LZIvzNAIfdkfpO$ExbH(%jo66b5) zeXDx!atqx{jQMe~A_||97{B3HzXr;Az0s4(ii@o#pU<_nr%IFw&9+AH+g3w=sy7z# zLYsf>`nAx0j3f*G`P+wy#ueG9{~D;=hBBmoC<(F?U8a7D@VTrEX9QAA1z#0~HblWd zQixWAZ-ZyEqQ;R;l#xh&Yjrh4xh#L~m_flZaOE^Y_$4$C!9|&|M`i0r_^c)QG6g~* zi_;EX)(<`mn5$0E#P_G_61bngx?|jzK3Y0h9Rv}8$a&fV9e{+(}<%?EsrL*-3AH30jQuzpmXox-_*3g zeVw%_%*npfRHepMZ3z?A`8PbrPg=!Kk9h&Vs<;4FHJw=Z+%XZJ@v5x_rj2^5wEI2L z{0xc+GaM6DX!Wo8OGqky9zQ8i{bx@2duTvseIoBJrD_;W&s#I;N|JFv z<#&DIGuNQFMI+3%)2COH@uAg7@pBUE4@1b`B_Y%}UVou==|zflCjFMIuQf6303U!V z`?hJo*Ma>9Hk8ivhgy1ks^(2Tzy)KX<)E2*p^0u0LOTK8MX*8yBm8FE#elOHcqstb z{Q;rykSjbi;wN>M2-wBYA72SHbMQQFj0X4AGE+=ml{GGaPWXx<|GNUcV*2@$!t6ltwa#vzbEeRf;N8u7#Avn-W!y8g`k@ni~0m z4h&ZY1m(S1cQ)jw6@1wEE}oLophkoQA1;&?8PA+kj0XJJ(ERw@-2Ind`>^FlRS~p) zyurRUp#5g0?h+;8gEDK5)e^p}Sy%)-$~vR^`>d>(kIA9QNIpWy z>FaXg<3Q(zlp^O(e7mt#dRuEqiW!phSLZ{)8!ov?rM=@lx6Uw$Q#Z3+7AQgQ0g4&b zH#e{L<#!rG=(MTm0j@2U2EY`aes~Tuuhc_3GW3Nqs0bLD6U}>C5o)#ra2E&Uh{);? zZJQnR2EeUg=}xPd3-0S4VKaqf3;+4r zNRw!*nmZ=ZJaeVxMwc$R3SzlyE3K3Y1Ep{Y)C>z|)Mz5%ig`%258o(L|MC5G{7Zeu z^AD^C2ZUFjuP#Um`WIl}+R7GgbV7e$!p4Rc(WN*9NaoyJwg9Pg?l@rC>EK)I@%&No4-<5O7N<^lV#*A$2WIF{ zi(cxW2Ccl1vXBfImugrSC3|U*3V|o;6DwQx($6lLUL&q*Tj|-9=HF^`>yJ&xeq7OC zo~3d9bv!gz)1BSP;4WOOa=_ab7)m&CA4C8eoo+_t;^}g zeU!$3pi)N?6S~=)ptM$&5kO%ehN;E}qG>e%*$50z2{h6PRW&v3O_mP3RVQeGF*>DE z3u0Ieo$ii&#+;f3ACdRjCy3y;@e)or8!!q=whx|4V|6Gp|2XN8YFwTym*65!5yZJ8 z-4jOs#vy61g+sb0fWAf5wRvatZ1*-Q7^L8GEm!2?><3*yi#vg4K}lt>7CsoR&okZxAN)+UfJZ zjBbmYX#3>`4{IdUN8sWu9Z$iz#3S8Ii^ zBgW(g+KNVdIj(nc+*M6>#Os|d>bfQu`$ikG1-NF+Z9E&{P?4a56I<$UC7ctWjHm#( zDoc*Heb4DCdrV6}M5=;v{dyyS-t_hF5Cntrb0X_AG@yqCXNv2%&L(iZY{XbrK1PlW6 z(0phQ+!PA>ucpzenOb3{=eq&+$B8CH2QMWp1awQ?yWPq!0w{7K@55A<7i| zyg7b9|KNmr_~jCMGCnvl6BvWgs)a24=cIy|-ZDW2b&`n(eWl~ysM{t|e=#X@L;PTP zaA$P@RlKM{Be_$jHjN3$f8m%*j&>Gb3^wf5DkF%AF&AHPZ_TkG0nk>97R`Rb^OR@# zv77vH^Co=}!R3QQU&9eNph?B~DAlU;AG?qw7IXtd4xHAW9;z)96h1*6zLShnU^|tx z(geyJq%KA5*OQ;P{$YgBUXfDmr{9U>gHGBTVu3f#eWsReucb#tH1;JBwrd4z2Ja@LGqr&@g zN!vA$h+z7+yycpRe_YtR8D940;1xaUyezleymG~-xZLvnGh5XfDL zO_H2?VmY&O&`z`!0$Vk}k+Dc%_h7<+j6kuy7}P5k+)IVOukxXvL6Jy9?d#(`eZwP7 zSaaTT|7XmXwlgrW#DUHQ;Bn<;Py=?Vimj5HOYOgZk2XJN?UAzdf9376izbd;%L*2) zJh$Zf{O`w>*iJjKYr+<@$@v>DZHoF_aIoM7)8fkGDqH8k`r=A4{gnK4`D{zRjrY+#g!{rPKN39yamRhU%MP|;}^x^-ezUt5C*4)0o$N05V zH|O%8$q|OL%l~|1XHYK<1J-%-p8qYXjy=4u&;D=wkNlfO9Md){Rew4+f5KC-y>%xR zdHx0N5Zc0dI_vCaSBI+Wu8zJ%4J}bdNIOhgUS_ zZ3AxaUB@FN_AkCy+f*0z3nF0!KfntNB135fEZ8j!Y!}>3id^=4 z=l)%vuF2LNELyl`(%lO7Bagnu-Tr4&_i#zw{FApPY!TI3DB`f>tcF_Jo(cJv^?JWn zRfZj(+jHWY8q>V*)2yC8WA9vIy3hFS#sJ`c*7b`GOPi0jq>JC1SL*utKF{v^=3eQ0 z{+(}J@FK`|XY}27*BYE{!@qth&8(0}SiN9d_W5mPeCi+ALVx7HJ5dPS%hg>oJ4{M7 zv{T`MWaz<^{LG5>k2bR(rx=JE8(eu{#W=MfW#;j(lh>4Xs(hVsD|6?+&Q+&n?wRtg1UzoMQHy4>P~!8ne83=sNjV-n8{w zmh4@y*~qa`;?w~~!Ma5T1)%{CP5~#kH5>dTwlFe?Jle~e%wa4qaCq%MZP&>zmuB6c zTXy0?(tPFvbKbJPndBCJe(%!L!MFe0eNw#7_)o#^f$YUHao}!l-sxvK=fqXsbYz^o zf%}nmGZ%lx+TYu@9qo*$NlvPtx$1h=bN`$9-8q$>)hg$oS+JdXU)ZwZrKiM!>={=S zr_Q^{yJ?&1q`MIob3@;}K6oIXp~_5blCGM~`2&|0^wmmkFkI|DIk*V8$|r8x*`3DA zL*vROS_@`ADwGLKa!LF#w_5Mit{Ah_60UcfqPCSho%j0N zhqpU5WBp$l;Hd#=H}80JI%>U5*oQPyNJ&gU7W&NLj4yW)QT@No);4vi(p00i_>zopr01C**NB{r; diff --git a/api/core/model_runtime/model_providers/vertex_ai/_assets/icon_s_en.svg b/api/core/model_runtime/model_providers/vertex_ai/_assets/icon_s_en.svg deleted file mode 100644 index efc3589c07f831..00000000000000 --- a/api/core/model_runtime/model_providers/vertex_ai/_assets/icon_s_en.svg +++ /dev/null @@ -1 +0,0 @@ - \ No newline at end of file diff --git a/api/core/model_runtime/model_providers/vertex_ai/_common.py b/api/core/model_runtime/model_providers/vertex_ai/_common.py deleted file mode 100644 index 8f7c859e3803c0..00000000000000 --- a/api/core/model_runtime/model_providers/vertex_ai/_common.py +++ /dev/null @@ -1,15 +0,0 @@ -from core.model_runtime.errors.invoke import InvokeError - - -class _CommonVertexAi: - @property - def _invoke_error_mapping(self) -> dict[type[InvokeError], list[type[Exception]]]: - """ - Map model invoke error to unified error - The key is the error type thrown to the caller - The value is the error type thrown by the model, - which needs to be converted into a unified error type for the caller. - - :return: Invoke error mapping - """ - pass diff --git a/api/core/model_runtime/model_providers/vertex_ai/llm/__init__.py b/api/core/model_runtime/model_providers/vertex_ai/llm/__init__.py deleted file mode 100644 index e69de29bb2d1d6..00000000000000 diff --git a/api/core/model_runtime/model_providers/vertex_ai/llm/anthropic.claude-3-haiku.yaml b/api/core/model_runtime/model_providers/vertex_ai/llm/anthropic.claude-3-haiku.yaml deleted file mode 100644 index 5613348695dc20..00000000000000 --- a/api/core/model_runtime/model_providers/vertex_ai/llm/anthropic.claude-3-haiku.yaml +++ /dev/null @@ -1,56 +0,0 @@ -model: claude-3-haiku@20240307 -label: - en_US: Claude 3 Haiku -model_type: llm -features: - - agent-thought - - vision -model_properties: - mode: chat - context_size: 200000 -parameter_rules: - - name: max_tokens - use_template: max_tokens - required: true - type: int - default: 4096 - min: 1 - max: 4096 - help: - zh_Hans: 停止前生成的最大令牌数。请注意,Anthropic Claude 模型可能会在达到 max_tokens 的值之前停止生成令牌。不同的 Anthropic Claude 模型对此参数具有不同的最大值。 - en_US: The maximum number of tokens to generate before stopping. Note that Anthropic Claude models might stop generating tokens before reaching the value of max_tokens. Different Anthropic Claude models have different maximum values for this parameter. - # docs: https://docs.anthropic.com/claude/docs/system-prompts - - name: temperature - use_template: temperature - required: false - type: float - default: 1 - min: 0.0 - max: 1.0 - help: - zh_Hans: 生成内容的随机性。 - en_US: The amount of randomness injected into the response. - - name: top_p - required: false - type: float - default: 0.999 - min: 0.000 - max: 1.000 - help: - zh_Hans: 在核采样中,Anthropic Claude 按概率递减顺序计算每个后续标记的所有选项的累积分布,并在达到 top_p 指定的特定概率时将其切断。您应该更改温度或top_p,但不能同时更改两者。 - en_US: In nucleus sampling, Anthropic Claude computes the cumulative distribution over all the options for each subsequent token in decreasing probability order and cuts it off once it reaches a particular probability specified by top_p. You should alter either temperature or top_p, but not both. - - name: top_k - required: false - type: int - default: 0 - min: 0 - # tip docs from aws has error, max value is 500 - max: 500 - help: - zh_Hans: 对于每个后续标记,仅从前 K 个选项中进行采样。使用 top_k 删除长尾低概率响应。 - en_US: Only sample from the top K options for each subsequent token. Use top_k to remove long tail low probability responses. -pricing: - input: '0.00025' - output: '0.00125' - unit: '0.001' - currency: USD diff --git a/api/core/model_runtime/model_providers/vertex_ai/llm/anthropic.claude-3-opus.yaml b/api/core/model_runtime/model_providers/vertex_ai/llm/anthropic.claude-3-opus.yaml deleted file mode 100644 index ab084636b5dec7..00000000000000 --- a/api/core/model_runtime/model_providers/vertex_ai/llm/anthropic.claude-3-opus.yaml +++ /dev/null @@ -1,56 +0,0 @@ -model: claude-3-opus@20240229 -label: - en_US: Claude 3 Opus -model_type: llm -features: - - agent-thought - - vision -model_properties: - mode: chat - context_size: 200000 -parameter_rules: - - name: max_tokens - use_template: max_tokens - required: true - type: int - default: 4096 - min: 1 - max: 4096 - help: - zh_Hans: 停止前生成的最大令牌数。请注意,Anthropic Claude 模型可能会在达到 max_tokens 的值之前停止生成令牌。不同的 Anthropic Claude 模型对此参数具有不同的最大值。 - en_US: The maximum number of tokens to generate before stopping. Note that Anthropic Claude models might stop generating tokens before reaching the value of max_tokens. Different Anthropic Claude models have different maximum values for this parameter. - # docs: https://docs.anthropic.com/claude/docs/system-prompts - - name: temperature - use_template: temperature - required: false - type: float - default: 1 - min: 0.0 - max: 1.0 - help: - zh_Hans: 生成内容的随机性。 - en_US: The amount of randomness injected into the response. - - name: top_p - required: false - type: float - default: 0.999 - min: 0.000 - max: 1.000 - help: - zh_Hans: 在核采样中,Anthropic Claude 按概率递减顺序计算每个后续标记的所有选项的累积分布,并在达到 top_p 指定的特定概率时将其切断。您应该更改温度或top_p,但不能同时更改两者。 - en_US: In nucleus sampling, Anthropic Claude computes the cumulative distribution over all the options for each subsequent token in decreasing probability order and cuts it off once it reaches a particular probability specified by top_p. You should alter either temperature or top_p, but not both. - - name: top_k - required: false - type: int - default: 0 - min: 0 - # tip docs from aws has error, max value is 500 - max: 500 - help: - zh_Hans: 对于每个后续标记,仅从前 K 个选项中进行采样。使用 top_k 删除长尾低概率响应。 - en_US: Only sample from the top K options for each subsequent token. Use top_k to remove long tail low probability responses. -pricing: - input: '0.015' - output: '0.075' - unit: '0.001' - currency: USD diff --git a/api/core/model_runtime/model_providers/vertex_ai/llm/anthropic.claude-3-sonnet.yaml b/api/core/model_runtime/model_providers/vertex_ai/llm/anthropic.claude-3-sonnet.yaml deleted file mode 100644 index 0be0113ffd98cf..00000000000000 --- a/api/core/model_runtime/model_providers/vertex_ai/llm/anthropic.claude-3-sonnet.yaml +++ /dev/null @@ -1,55 +0,0 @@ -model: claude-3-sonnet@20240229 -label: - en_US: Claude 3 Sonnet -model_type: llm -features: - - agent-thought - - vision -model_properties: - mode: chat - context_size: 200000 -parameter_rules: - - name: max_tokens - use_template: max_tokens - required: true - type: int - default: 4096 - min: 1 - max: 4096 - help: - zh_Hans: 停止前生成的最大令牌数。请注意,Anthropic Claude 模型可能会在达到 max_tokens 的值之前停止生成令牌。不同的 Anthropic Claude 模型对此参数具有不同的最大值。 - en_US: The maximum number of tokens to generate before stopping. Note that Anthropic Claude models might stop generating tokens before reaching the value of max_tokens. Different Anthropic Claude models have different maximum values for this parameter. - - name: temperature - use_template: temperature - required: false - type: float - default: 1 - min: 0.0 - max: 1.0 - help: - zh_Hans: 生成内容的随机性。 - en_US: The amount of randomness injected into the response. - - name: top_p - required: false - type: float - default: 0.999 - min: 0.000 - max: 1.000 - help: - zh_Hans: 在核采样中,Anthropic Claude 按概率递减顺序计算每个后续标记的所有选项的累积分布,并在达到 top_p 指定的特定概率时将其切断。您应该更改温度或top_p,但不能同时更改两者。 - en_US: In nucleus sampling, Anthropic Claude computes the cumulative distribution over all the options for each subsequent token in decreasing probability order and cuts it off once it reaches a particular probability specified by top_p. You should alter either temperature or top_p, but not both. - - name: top_k - required: false - type: int - default: 0 - min: 0 - # tip docs from aws has error, max value is 500 - max: 500 - help: - zh_Hans: 对于每个后续标记,仅从前 K 个选项中进行采样。使用 top_k 删除长尾低概率响应。 - en_US: Only sample from the top K options for each subsequent token. Use top_k to remove long tail low probability responses. -pricing: - input: '0.003' - output: '0.015' - unit: '0.001' - currency: USD diff --git a/api/core/model_runtime/model_providers/vertex_ai/llm/anthropic.claude-3.5-sonnet.yaml b/api/core/model_runtime/model_providers/vertex_ai/llm/anthropic.claude-3.5-sonnet.yaml deleted file mode 100644 index c64384e6a2f153..00000000000000 --- a/api/core/model_runtime/model_providers/vertex_ai/llm/anthropic.claude-3.5-sonnet.yaml +++ /dev/null @@ -1,55 +0,0 @@ -model: claude-3-5-sonnet@20240620 -label: - en_US: Claude 3.5 Sonnet -model_type: llm -features: - - agent-thought - - vision -model_properties: - mode: chat - context_size: 200000 -parameter_rules: - - name: max_tokens - use_template: max_tokens - required: true - type: int - default: 4096 - min: 1 - max: 4096 - help: - zh_Hans: 停止前生成的最大令牌数。请注意,Anthropic Claude 模型可能会在达到 max_tokens 的值之前停止生成令牌。不同的 Anthropic Claude 模型对此参数具有不同的最大值。 - en_US: The maximum number of tokens to generate before stopping. Note that Anthropic Claude models might stop generating tokens before reaching the value of max_tokens. Different Anthropic Claude models have different maximum values for this parameter. - - name: temperature - use_template: temperature - required: false - type: float - default: 1 - min: 0.0 - max: 1.0 - help: - zh_Hans: 生成内容的随机性。 - en_US: The amount of randomness injected into the response. - - name: top_p - required: false - type: float - default: 0.999 - min: 0.000 - max: 1.000 - help: - zh_Hans: 在核采样中,Anthropic Claude 按概率递减顺序计算每个后续标记的所有选项的累积分布,并在达到 top_p 指定的特定概率时将其切断。您应该更改温度或top_p,但不能同时更改两者。 - en_US: In nucleus sampling, Anthropic Claude computes the cumulative distribution over all the options for each subsequent token in decreasing probability order and cuts it off once it reaches a particular probability specified by top_p. You should alter either temperature or top_p, but not both. - - name: top_k - required: false - type: int - default: 0 - min: 0 - # tip docs from aws has error, max value is 500 - max: 500 - help: - zh_Hans: 对于每个后续标记,仅从前 K 个选项中进行采样。使用 top_k 删除长尾低概率响应。 - en_US: Only sample from the top K options for each subsequent token. Use top_k to remove long tail low probability responses. -pricing: - input: '0.003' - output: '0.015' - unit: '0.001' - currency: USD diff --git a/api/core/model_runtime/model_providers/vertex_ai/llm/gemini-1.0-pro-vision.yaml b/api/core/model_runtime/model_providers/vertex_ai/llm/gemini-1.0-pro-vision.yaml deleted file mode 100644 index ebb276b8af18f5..00000000000000 --- a/api/core/model_runtime/model_providers/vertex_ai/llm/gemini-1.0-pro-vision.yaml +++ /dev/null @@ -1,37 +0,0 @@ -model: gemini-1.0-pro-vision-001 -label: - en_US: Gemini 1.0 Pro Vision -model_type: llm -features: - - agent-thought - - vision -model_properties: - mode: chat - context_size: 16384 -parameter_rules: - - name: temperature - use_template: temperature - - name: top_p - use_template: top_p - - name: top_k - label: - en_US: Top k - type: int - help: - en_US: Only sample from the top K options for each subsequent token. - required: false - - name: presence_penalty - use_template: presence_penalty - - name: frequency_penalty - use_template: frequency_penalty - - name: max_output_tokens - use_template: max_tokens - required: true - default: 2048 - min: 1 - max: 2048 -pricing: - input: '0.00' - output: '0.00' - unit: '0.000001' - currency: USD diff --git a/api/core/model_runtime/model_providers/vertex_ai/llm/gemini-1.0-pro.yaml b/api/core/model_runtime/model_providers/vertex_ai/llm/gemini-1.0-pro.yaml deleted file mode 100644 index c325973846f5f8..00000000000000 --- a/api/core/model_runtime/model_providers/vertex_ai/llm/gemini-1.0-pro.yaml +++ /dev/null @@ -1,36 +0,0 @@ -model: gemini-1.0-pro-002 -label: - en_US: Gemini 1.0 Pro -model_type: llm -features: - - agent-thought -model_properties: - mode: chat - context_size: 32760 -parameter_rules: - - name: temperature - use_template: temperature - - name: top_p - use_template: top_p - - name: top_k - label: - en_US: Top k - type: int - help: - en_US: Only sample from the top K options for each subsequent token. - required: false - - name: presence_penalty - use_template: presence_penalty - - name: frequency_penalty - use_template: frequency_penalty - - name: max_output_tokens - use_template: max_tokens - required: true - default: 8192 - min: 1 - max: 8192 -pricing: - input: '0.00' - output: '0.00' - unit: '0.000001' - currency: USD diff --git a/api/core/model_runtime/model_providers/vertex_ai/text_embedding/__init__.py b/api/core/model_runtime/model_providers/vertex_ai/text_embedding/__init__.py deleted file mode 100644 index e69de29bb2d1d6..00000000000000 diff --git a/api/core/model_runtime/model_providers/vertex_ai/text_embedding/text-embedding-004.yaml b/api/core/model_runtime/model_providers/vertex_ai/text_embedding/text-embedding-004.yaml deleted file mode 100644 index 32db6faf89940f..00000000000000 --- a/api/core/model_runtime/model_providers/vertex_ai/text_embedding/text-embedding-004.yaml +++ /dev/null @@ -1,8 +0,0 @@ -model: text-embedding-004 -model_type: text-embedding -model_properties: - context_size: 2048 -pricing: - input: '0.00013' - unit: '0.001' - currency: USD diff --git a/api/core/model_runtime/model_providers/vertex_ai/text_embedding/text-multilingual-embedding-002.yaml b/api/core/model_runtime/model_providers/vertex_ai/text_embedding/text-multilingual-embedding-002.yaml deleted file mode 100644 index 2ec0eea9f21b78..00000000000000 --- a/api/core/model_runtime/model_providers/vertex_ai/text_embedding/text-multilingual-embedding-002.yaml +++ /dev/null @@ -1,8 +0,0 @@ -model: text-multilingual-embedding-002 -model_type: text-embedding -model_properties: - context_size: 2048 -pricing: - input: '0.00013' - unit: '0.001' - currency: USD diff --git a/api/core/model_runtime/model_providers/vertex_ai/vertex_ai.py b/api/core/model_runtime/model_providers/vertex_ai/vertex_ai.py deleted file mode 100644 index 466a86fd36a181..00000000000000 --- a/api/core/model_runtime/model_providers/vertex_ai/vertex_ai.py +++ /dev/null @@ -1,28 +0,0 @@ -import logging - -from core.model_runtime.entities.model_entities import ModelType -from core.model_runtime.errors.validate import CredentialsValidateFailedError -from core.model_runtime.model_providers.__base.model_provider import ModelProvider - -logger = logging.getLogger(__name__) - - -class VertexAiProvider(ModelProvider): - def validate_provider_credentials(self, credentials: dict) -> None: - """ - Validate provider credentials - - if validate failed, raise exception - - :param credentials: provider credentials, credentials form defined in `provider_credential_schema`. - """ - try: - model_instance = self.get_model_instance(ModelType.LLM) - - # Use `gemini-1.0-pro-002` model for validate, - model_instance.validate_credentials(model="gemini-1.0-pro-002", credentials=credentials) - except CredentialsValidateFailedError as ex: - raise ex - except Exception as ex: - logger.exception(f"{self.get_provider_schema().provider} credentials validate failed") - raise ex diff --git a/api/core/model_runtime/model_providers/vertex_ai/vertex_ai.yaml b/api/core/model_runtime/model_providers/vertex_ai/vertex_ai.yaml deleted file mode 100644 index 27a4d03fe2975b..00000000000000 --- a/api/core/model_runtime/model_providers/vertex_ai/vertex_ai.yaml +++ /dev/null @@ -1,43 +0,0 @@ -provider: vertex_ai -label: - en_US: Vertex AI | Google Cloud Platform -description: - en_US: Vertex AI in Google Cloud Platform. -icon_small: - en_US: icon_s_en.svg -icon_large: - en_US: icon_l_en.png -background: "#FCFDFF" -help: - title: - en_US: Get your Access Details from Google - url: - en_US: https://cloud.google.com/vertex-ai/ -supported_model_types: - - llm - - text-embedding -configurate_methods: - - predefined-model -provider_credential_schema: - credential_form_schemas: - - variable: vertex_project_id - label: - en_US: Project ID - type: text-input - required: true - placeholder: - en_US: Enter your Google Cloud Project ID - - variable: vertex_location - label: - en_US: Location - type: text-input - required: true - placeholder: - en_US: Enter your Google Cloud Location - - variable: vertex_service_account_key - label: - en_US: Service Account Key (Leave blank if you use Application Default Credentials) - type: secret-input - required: false - placeholder: - en_US: Enter your Google Cloud Service Account Key in base64 format diff --git a/api/core/model_runtime/model_providers/volcengine_maas/__init__.py b/api/core/model_runtime/model_providers/volcengine_maas/__init__.py deleted file mode 100644 index e69de29bb2d1d6..00000000000000 diff --git a/api/core/model_runtime/model_providers/volcengine_maas/_assets/icon_l_en.svg b/api/core/model_runtime/model_providers/volcengine_maas/_assets/icon_l_en.svg deleted file mode 100644 index 616e90916b2810..00000000000000 --- a/api/core/model_runtime/model_providers/volcengine_maas/_assets/icon_l_en.svg +++ /dev/null @@ -1,23 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - \ No newline at end of file diff --git a/api/core/model_runtime/model_providers/volcengine_maas/_assets/icon_l_zh.svg b/api/core/model_runtime/model_providers/volcengine_maas/_assets/icon_l_zh.svg deleted file mode 100644 index 24b92195bd5522..00000000000000 --- a/api/core/model_runtime/model_providers/volcengine_maas/_assets/icon_l_zh.svg +++ /dev/null @@ -1,39 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - \ No newline at end of file diff --git a/api/core/model_runtime/model_providers/volcengine_maas/_assets/icon_s_en.svg b/api/core/model_runtime/model_providers/volcengine_maas/_assets/icon_s_en.svg deleted file mode 100644 index e6454a89b7ccdf..00000000000000 --- a/api/core/model_runtime/model_providers/volcengine_maas/_assets/icon_s_en.svg +++ /dev/null @@ -1,8 +0,0 @@ - - - - - - - - \ No newline at end of file diff --git a/api/core/model_runtime/model_providers/volcengine_maas/client.py b/api/core/model_runtime/model_providers/volcengine_maas/client.py deleted file mode 100644 index cfe21e4b9f4617..00000000000000 --- a/api/core/model_runtime/model_providers/volcengine_maas/client.py +++ /dev/null @@ -1,216 +0,0 @@ -import re -from collections.abc import Generator -from typing import Optional, cast - -from volcenginesdkarkruntime import Ark -from volcenginesdkarkruntime.types.chat import ( - ChatCompletion, - ChatCompletionAssistantMessageParam, - ChatCompletionChunk, - ChatCompletionContentPartImageParam, - ChatCompletionContentPartTextParam, - ChatCompletionMessageParam, - ChatCompletionMessageToolCallParam, - ChatCompletionSystemMessageParam, - ChatCompletionToolMessageParam, - ChatCompletionToolParam, - ChatCompletionUserMessageParam, -) -from volcenginesdkarkruntime.types.chat.chat_completion_content_part_image_param import ImageURL -from volcenginesdkarkruntime.types.chat.chat_completion_message_tool_call_param import Function -from volcenginesdkarkruntime.types.create_embedding_response import CreateEmbeddingResponse -from volcenginesdkarkruntime.types.shared_params import FunctionDefinition - -from core.model_runtime.entities.message_entities import ( - AssistantPromptMessage, - ImagePromptMessageContent, - PromptMessage, - PromptMessageContentType, - PromptMessageTool, - SystemPromptMessage, - ToolPromptMessage, - UserPromptMessage, -) - -DEFAULT_V2_ENDPOINT = "maas-api.ml-platform-cn-beijing.volces.com" -DEFAULT_V3_ENDPOINT = "https://ark.cn-beijing.volces.com/api/v3" - - -class ArkClientV3: - endpoint_id: Optional[str] = None - ark: Optional[Ark] = None - - def __init__(self, *args, **kwargs): - self.ark = Ark(*args, **kwargs) - self.endpoint_id = None - - @staticmethod - def is_legacy(credentials: dict) -> bool: - # match default v2 endpoint - if ArkClientV3.is_compatible_with_legacy(credentials): - return False - # match default v3 endpoint - if credentials.get("api_endpoint_host") == DEFAULT_V3_ENDPOINT: - return False - # only v3 support api_key - if credentials.get("auth_method") == "api_key": - return False - # these cases are considered as sdk v2 - # - modified default v2 endpoint - # - modified default v3 endpoint and auth without api_key - return True - - @staticmethod - def is_compatible_with_legacy(credentials: dict) -> bool: - endpoint = credentials.get("api_endpoint_host") - return endpoint == DEFAULT_V2_ENDPOINT - - @classmethod - def from_credentials(cls, credentials): - """Initialize the client using the credentials provided.""" - args = { - "base_url": credentials["api_endpoint_host"], - "region": credentials["volc_region"], - } - if credentials.get("auth_method") == "api_key": - args = { - **args, - "api_key": credentials["volc_api_key"], - } - else: - args = { - **args, - "ak": credentials["volc_access_key_id"], - "sk": credentials["volc_secret_access_key"], - } - - if cls.is_compatible_with_legacy(credentials): - args = {**args, "base_url": DEFAULT_V3_ENDPOINT} - - client = ArkClientV3(**args) - client.endpoint_id = credentials["endpoint_id"] - return client - - @staticmethod - def convert_prompt_message(message: PromptMessage) -> ChatCompletionMessageParam: - """Converts a PromptMessage to a ChatCompletionMessageParam""" - if isinstance(message, UserPromptMessage): - message = cast(UserPromptMessage, message) - if isinstance(message.content, str): - content = message.content - else: - content = [] - for message_content in message.content: - if message_content.type == PromptMessageContentType.TEXT: - content.append( - ChatCompletionContentPartTextParam( - text=message_content.text, - type="text", - ) - ) - elif message_content.type == PromptMessageContentType.IMAGE: - message_content = cast(ImagePromptMessageContent, message_content) - image_data = re.sub(r"^data:image\/[a-zA-Z]+;base64,", "", message_content.data) - content.append( - ChatCompletionContentPartImageParam( - image_url=ImageURL( - url=image_data, - detail=message_content.detail.value, - ), - type="image_url", - ) - ) - message_dict = ChatCompletionUserMessageParam(role="user", content=content) - elif isinstance(message, AssistantPromptMessage): - message = cast(AssistantPromptMessage, message) - message_dict = ChatCompletionAssistantMessageParam( - content=message.content, - role="assistant", - tool_calls=None - if not message.tool_calls - else [ - ChatCompletionMessageToolCallParam( - id=call.id, - function=Function(name=call.function.name, arguments=call.function.arguments), - type="function", - ) - for call in message.tool_calls - ], - ) - elif isinstance(message, SystemPromptMessage): - message = cast(SystemPromptMessage, message) - message_dict = ChatCompletionSystemMessageParam(content=message.content, role="system") - elif isinstance(message, ToolPromptMessage): - message = cast(ToolPromptMessage, message) - message_dict = ChatCompletionToolMessageParam( - content=message.content, role="tool", tool_call_id=message.tool_call_id - ) - else: - raise ValueError(f"Got unknown PromptMessage type {message}") - - return message_dict - - @staticmethod - def _convert_tool_prompt(message: PromptMessageTool) -> ChatCompletionToolParam: - return ChatCompletionToolParam( - type="function", - function=FunctionDefinition( - name=message.name, - description=message.description, - parameters=message.parameters, - ), - ) - - def chat( - self, - messages: list[PromptMessage], - tools: Optional[list[PromptMessageTool]] = None, - stop: Optional[list[str]] = None, - frequency_penalty: Optional[float] = None, - max_tokens: Optional[int] = None, - presence_penalty: Optional[float] = None, - top_p: Optional[float] = None, - temperature: Optional[float] = None, - ) -> ChatCompletion: - """Block chat""" - return self.ark.chat.completions.create( - model=self.endpoint_id, - messages=[self.convert_prompt_message(message) for message in messages], - tools=[self._convert_tool_prompt(tool) for tool in tools] if tools else None, - stop=stop, - frequency_penalty=frequency_penalty, - max_tokens=max_tokens, - presence_penalty=presence_penalty, - top_p=top_p, - temperature=temperature, - ) - - def stream_chat( - self, - messages: list[PromptMessage], - tools: Optional[list[PromptMessageTool]] = None, - stop: Optional[list[str]] = None, - frequency_penalty: Optional[float] = None, - max_tokens: Optional[int] = None, - presence_penalty: Optional[float] = None, - top_p: Optional[float] = None, - temperature: Optional[float] = None, - ) -> Generator[ChatCompletionChunk]: - """Stream chat""" - chunks = self.ark.chat.completions.create( - stream=True, - model=self.endpoint_id, - messages=[self.convert_prompt_message(message) for message in messages], - tools=[self._convert_tool_prompt(tool) for tool in tools] if tools else None, - stop=stop, - frequency_penalty=frequency_penalty, - max_tokens=max_tokens, - presence_penalty=presence_penalty, - top_p=top_p, - temperature=temperature, - stream_options={"include_usage": True}, - ) - yield from chunks - - def embeddings(self, texts: list[str]) -> CreateEmbeddingResponse: - return self.ark.embeddings.create(model=self.endpoint_id, input=texts) diff --git a/api/core/model_runtime/model_providers/volcengine_maas/legacy/__init__.py b/api/core/model_runtime/model_providers/volcengine_maas/legacy/__init__.py deleted file mode 100644 index e69de29bb2d1d6..00000000000000 diff --git a/api/core/model_runtime/model_providers/volcengine_maas/legacy/client.py b/api/core/model_runtime/model_providers/volcengine_maas/legacy/client.py deleted file mode 100644 index 266f1216f82b29..00000000000000 --- a/api/core/model_runtime/model_providers/volcengine_maas/legacy/client.py +++ /dev/null @@ -1,123 +0,0 @@ -import re -from collections.abc import Callable, Generator -from typing import cast - -from core.model_runtime.entities.message_entities import ( - AssistantPromptMessage, - ImagePromptMessageContent, - PromptMessage, - PromptMessageContentType, - PromptMessageTool, - SystemPromptMessage, - ToolPromptMessage, - UserPromptMessage, -) -from core.model_runtime.model_providers.volcengine_maas.legacy.errors import wrap_error -from core.model_runtime.model_providers.volcengine_maas.legacy.volc_sdk import ChatRole, MaasError, MaasService - - -class MaaSClient(MaasService): - def __init__(self, host: str, region: str): - self.endpoint_id = None - super().__init__(host, region) - - def set_endpoint_id(self, endpoint_id: str): - self.endpoint_id = endpoint_id - - @classmethod - def from_credential(cls, credentials: dict) -> "MaaSClient": - host = credentials["api_endpoint_host"] - region = credentials["volc_region"] - ak = credentials["volc_access_key_id"] - sk = credentials["volc_secret_access_key"] - endpoint_id = credentials["endpoint_id"] - - client = cls(host, region) - client.set_endpoint_id(endpoint_id) - client.set_ak(ak) - client.set_sk(sk) - return client - - def chat(self, params: dict, messages: list[PromptMessage], stream=False, **extra_model_kwargs) -> Generator | dict: - req = { - "parameters": params, - "messages": [self.convert_prompt_message_to_maas_message(prompt) for prompt in messages], - **extra_model_kwargs, - } - if not stream: - return super().chat( - self.endpoint_id, - req, - ) - return super().stream_chat( - self.endpoint_id, - req, - ) - - def embeddings(self, texts: list[str]) -> dict: - req = {"input": texts} - return super().embeddings(self.endpoint_id, req) - - @staticmethod - def convert_prompt_message_to_maas_message(message: PromptMessage) -> dict: - if isinstance(message, UserPromptMessage): - message = cast(UserPromptMessage, message) - if isinstance(message.content, str): - message_dict = {"role": ChatRole.USER, "content": message.content} - else: - content = [] - for message_content in message.content: - if message_content.type == PromptMessageContentType.TEXT: - raise ValueError("Content object type only support image_url") - elif message_content.type == PromptMessageContentType.IMAGE: - message_content = cast(ImagePromptMessageContent, message_content) - image_data = re.sub(r"^data:image\/[a-zA-Z]+;base64,", "", message_content.data) - content.append( - { - "type": "image_url", - "image_url": { - "url": "", - "image_bytes": image_data, - "detail": message_content.detail, - }, - } - ) - - message_dict = {"role": ChatRole.USER, "content": content} - elif isinstance(message, AssistantPromptMessage): - message = cast(AssistantPromptMessage, message) - message_dict = {"role": ChatRole.ASSISTANT, "content": message.content} - if message.tool_calls: - message_dict["tool_calls"] = [ - {"name": call.function.name, "arguments": call.function.arguments} for call in message.tool_calls - ] - elif isinstance(message, SystemPromptMessage): - message = cast(SystemPromptMessage, message) - message_dict = {"role": ChatRole.SYSTEM, "content": message.content} - elif isinstance(message, ToolPromptMessage): - message = cast(ToolPromptMessage, message) - message_dict = {"role": ChatRole.FUNCTION, "content": message.content, "name": message.tool_call_id} - else: - raise ValueError(f"Got unknown PromptMessage type {message}") - - return message_dict - - @staticmethod - def wrap_exception(fn: Callable[[], dict | Generator]) -> dict | Generator: - try: - resp = fn() - except MaasError as e: - raise wrap_error(e) - - return resp - - @staticmethod - def transform_tool_prompt_to_maas_config(tool: PromptMessageTool): - return { - "type": "function", - "function": { - "name": tool.name, - "description": tool.description, - "parameters": tool.parameters, - }, - } diff --git a/api/core/model_runtime/model_providers/volcengine_maas/legacy/errors.py b/api/core/model_runtime/model_providers/volcengine_maas/legacy/errors.py deleted file mode 100644 index 91dbe21a616195..00000000000000 --- a/api/core/model_runtime/model_providers/volcengine_maas/legacy/errors.py +++ /dev/null @@ -1,156 +0,0 @@ -from core.model_runtime.model_providers.volcengine_maas.legacy.volc_sdk import MaasError - - -class ClientSDKRequestError(MaasError): - pass - - -class SignatureDoesNotMatchError(MaasError): - pass - - -class RequestTimeoutError(MaasError): - pass - - -class ServiceConnectionTimeoutError(MaasError): - pass - - -class MissingAuthenticationHeaderError(MaasError): - pass - - -class AuthenticationHeaderIsInvalidError(MaasError): - pass - - -class InternalServiceError(MaasError): - pass - - -class MissingParameterError(MaasError): - pass - - -class InvalidParameterError(MaasError): - pass - - -class AuthenticationExpireError(MaasError): - pass - - -class EndpointIsInvalidError(MaasError): - pass - - -class EndpointIsNotEnableError(MaasError): - pass - - -class ModelNotSupportStreamModeError(MaasError): - pass - - -class ReqTextExistRiskError(MaasError): - pass - - -class RespTextExistRiskError(MaasError): - pass - - -class EndpointRateLimitExceededError(MaasError): - pass - - -class ServiceConnectionRefusedError(MaasError): - pass - - -class ServiceConnectionClosedError(MaasError): - pass - - -class UnauthorizedUserForEndpointError(MaasError): - pass - - -class InvalidEndpointWithNoURLError(MaasError): - pass - - -class EndpointAccountRpmRateLimitExceededError(MaasError): - pass - - -class EndpointAccountTpmRateLimitExceededError(MaasError): - pass - - -class ServiceResourceWaitQueueFullError(MaasError): - pass - - -class EndpointIsPendingError(MaasError): - pass - - -class ServiceNotOpenError(MaasError): - pass - - -AuthErrors = { - "SignatureDoesNotMatch": SignatureDoesNotMatchError, - "MissingAuthenticationHeader": MissingAuthenticationHeaderError, - "AuthenticationHeaderIsInvalid": AuthenticationHeaderIsInvalidError, - "AuthenticationExpire": AuthenticationExpireError, - "UnauthorizedUserForEndpoint": UnauthorizedUserForEndpointError, -} - -BadRequestErrors = { - "MissingParameter": MissingParameterError, - "InvalidParameter": InvalidParameterError, - "EndpointIsInvalid": EndpointIsInvalidError, - "EndpointIsNotEnable": EndpointIsNotEnableError, - "ModelNotSupportStreamMode": ModelNotSupportStreamModeError, - "ReqTextExistRisk": ReqTextExistRiskError, - "RespTextExistRisk": RespTextExistRiskError, - "InvalidEndpointWithNoURL": InvalidEndpointWithNoURLError, - "ServiceNotOpen": ServiceNotOpenError, -} - -RateLimitErrors = { - "EndpointRateLimitExceeded": EndpointRateLimitExceededError, - "EndpointAccountRpmRateLimitExceeded": EndpointAccountRpmRateLimitExceededError, - "EndpointAccountTpmRateLimitExceeded": EndpointAccountTpmRateLimitExceededError, -} - -ServerUnavailableErrors = { - "InternalServiceError": InternalServiceError, - "EndpointIsPending": EndpointIsPendingError, - "ServiceResourceWaitQueueFull": ServiceResourceWaitQueueFullError, -} - -ConnectionErrors = { - "ClientSDKRequestError": ClientSDKRequestError, - "RequestTimeout": RequestTimeoutError, - "ServiceConnectionTimeout": ServiceConnectionTimeoutError, - "ServiceConnectionRefused": ServiceConnectionRefusedError, - "ServiceConnectionClosed": ServiceConnectionClosedError, -} - -ErrorCodeMap = { - **AuthErrors, - **BadRequestErrors, - **RateLimitErrors, - **ServerUnavailableErrors, - **ConnectionErrors, -} - - -def wrap_error(e: MaasError) -> Exception: - if ErrorCodeMap.get(e.code): - return ErrorCodeMap.get(e.code)(e.code_n, e.code, e.message, e.req_id) - return e diff --git a/api/core/model_runtime/model_providers/volcengine_maas/legacy/volc_sdk/__init__.py b/api/core/model_runtime/model_providers/volcengine_maas/legacy/volc_sdk/__init__.py deleted file mode 100644 index 8b3eb157be5cfe..00000000000000 --- a/api/core/model_runtime/model_providers/volcengine_maas/legacy/volc_sdk/__init__.py +++ /dev/null @@ -1,4 +0,0 @@ -from .common import ChatRole -from .maas import MaasError, MaasService - -__all__ = ["MaasService", "ChatRole", "MaasError"] diff --git a/api/core/model_runtime/model_providers/volcengine_maas/legacy/volc_sdk/base/__init__.py b/api/core/model_runtime/model_providers/volcengine_maas/legacy/volc_sdk/base/__init__.py deleted file mode 100644 index 8b137891791fe9..00000000000000 --- a/api/core/model_runtime/model_providers/volcengine_maas/legacy/volc_sdk/base/__init__.py +++ /dev/null @@ -1 +0,0 @@ - diff --git a/api/core/model_runtime/model_providers/volcengine_maas/legacy/volc_sdk/base/auth.py b/api/core/model_runtime/model_providers/volcengine_maas/legacy/volc_sdk/base/auth.py deleted file mode 100644 index c22bf8e76de36a..00000000000000 --- a/api/core/model_runtime/model_providers/volcengine_maas/legacy/volc_sdk/base/auth.py +++ /dev/null @@ -1,159 +0,0 @@ -# coding : utf-8 -import datetime -from itertools import starmap - -import pytz - -from .util import Util - - -class MetaData: - def __init__(self): - self.algorithm = "" - self.credential_scope = "" - self.signed_headers = "" - self.date = "" - self.region = "" - self.service = "" - - def set_date(self, date): - self.date = date - - def set_service(self, service): - self.service = service - - def set_region(self, region): - self.region = region - - def set_algorithm(self, algorithm): - self.algorithm = algorithm - - def set_credential_scope(self, credential_scope): - self.credential_scope = credential_scope - - def set_signed_headers(self, signed_headers): - self.signed_headers = signed_headers - - -class SignResult: - def __init__(self): - self.xdate = "" - self.xCredential = "" - self.xAlgorithm = "" - self.xSignedHeaders = "" - self.xSignedQueries = "" - self.xSignature = "" - self.xContextSha256 = "" - self.xSecurityToken = "" - - self.authorization = "" - - def __str__(self): - return "\n".join(list(starmap("{}:{}".format, self.__dict__.items()))) - - -class Credentials: - def __init__(self, ak, sk, service, region, session_token=""): - self.ak = ak - self.sk = sk - self.service = service - self.region = region - self.session_token = session_token - - def set_ak(self, ak): - self.ak = ak - - def set_sk(self, sk): - self.sk = sk - - def set_session_token(self, session_token): - self.session_token = session_token - - -class Signer: - @staticmethod - def sign(request, credentials): - if request.path == "": - request.path = "/" - if request.method != "GET" and "Content-Type" not in request.headers: - request.headers["Content-Type"] = "application/x-www-form-urlencoded; charset=utf-8" - - format_date = Signer.get_current_format_date() - request.headers["X-Date"] = format_date - if credentials.session_token != "": - request.headers["X-Security-Token"] = credentials.session_token - - md = MetaData() - md.set_algorithm("HMAC-SHA256") - md.set_service(credentials.service) - md.set_region(credentials.region) - md.set_date(format_date[:8]) - - hashed_canon_req = Signer.hashed_canonical_request_v4(request, md) - md.set_credential_scope("/".join([md.date, md.region, md.service, "request"])) - - signing_str = "\n".join([md.algorithm, format_date, md.credential_scope, hashed_canon_req]) - signing_key = Signer.get_signing_secret_key_v4(credentials.sk, md.date, md.region, md.service) - sign = Util.to_hex(Util.hmac_sha256(signing_key, signing_str)) - request.headers["Authorization"] = Signer.build_auth_header_v4(sign, md, credentials) - - @staticmethod - def hashed_canonical_request_v4(request, meta): - body_hash = Util.sha256(request.body) - request.headers["X-Content-Sha256"] = body_hash - - signed_headers = {} - for key in request.headers: - if key in {"Content-Type", "Content-Md5", "Host"} or key.startswith("X-"): - signed_headers[key.lower()] = request.headers[key] - - if "host" in signed_headers: - v = signed_headers["host"] - if v.find(":") != -1: - split = v.split(":") - port = split[1] - if str(port) == "80" or str(port) == "443": - signed_headers["host"] = split[0] - - signed_str = "" - for key in sorted(signed_headers.keys()): - signed_str += key + ":" + signed_headers[key] + "\n" - - meta.set_signed_headers(";".join(sorted(signed_headers.keys()))) - - canonical_request = "\n".join( - [ - request.method, - Util.norm_uri(request.path), - Util.norm_query(request.query), - signed_str, - meta.signed_headers, - body_hash, - ] - ) - - return Util.sha256(canonical_request) - - @staticmethod - def get_signing_secret_key_v4(sk, date, region, service): - date = Util.hmac_sha256(bytes(sk, encoding="utf-8"), date) - region = Util.hmac_sha256(date, region) - service = Util.hmac_sha256(region, service) - return Util.hmac_sha256(service, "request") - - @staticmethod - def build_auth_header_v4(signature, meta, credentials): - credential = credentials.ak + "/" + meta.credential_scope - return ( - meta.algorithm - + " Credential=" - + credential - + ", SignedHeaders=" - + meta.signed_headers - + ", Signature=" - + signature - ) - - @staticmethod - def get_current_format_date(): - return datetime.datetime.now(tz=pytz.timezone("UTC")).strftime("%Y%m%dT%H%M%SZ") diff --git a/api/core/model_runtime/model_providers/volcengine_maas/legacy/volc_sdk/base/service.py b/api/core/model_runtime/model_providers/volcengine_maas/legacy/volc_sdk/base/service.py deleted file mode 100644 index 33c41f3eb331a3..00000000000000 --- a/api/core/model_runtime/model_providers/volcengine_maas/legacy/volc_sdk/base/service.py +++ /dev/null @@ -1,216 +0,0 @@ -import json -from collections import OrderedDict -from urllib.parse import urlencode - -import requests - -from .auth import Signer - -VERSION = "v1.0.137" - - -class Service: - def __init__(self, service_info, api_info): - self.service_info = service_info - self.api_info = api_info - self.session = requests.session() - - def set_ak(self, ak): - self.service_info.credentials.set_ak(ak) - - def set_sk(self, sk): - self.service_info.credentials.set_sk(sk) - - def set_session_token(self, session_token): - self.service_info.credentials.set_session_token(session_token) - - def set_host(self, host): - self.service_info.host = host - - def set_scheme(self, scheme): - self.service_info.scheme = scheme - - def get(self, api, params, doseq=0): - if api not in self.api_info: - raise Exception("no such api") - api_info = self.api_info[api] - - r = self.prepare_request(api_info, params, doseq) - - Signer.sign(r, self.service_info.credentials) - - url = r.build(doseq) - resp = self.session.get( - url, headers=r.headers, timeout=(self.service_info.connection_timeout, self.service_info.socket_timeout) - ) - if resp.status_code == 200: - return resp.text - else: - raise Exception(resp.text) - - def post(self, api, params, form): - if api not in self.api_info: - raise Exception("no such api") - api_info = self.api_info[api] - r = self.prepare_request(api_info, params) - r.headers["Content-Type"] = "application/x-www-form-urlencoded" - r.form = self.merge(api_info.form, form) - r.body = urlencode(r.form, True) - Signer.sign(r, self.service_info.credentials) - - url = r.build() - - resp = self.session.post( - url, - headers=r.headers, - data=r.form, - timeout=(self.service_info.connection_timeout, self.service_info.socket_timeout), - ) - if resp.status_code == 200: - return resp.text - else: - raise Exception(resp.text) - - def json(self, api, params, body): - if api not in self.api_info: - raise Exception("no such api") - api_info = self.api_info[api] - r = self.prepare_request(api_info, params) - r.headers["Content-Type"] = "application/json" - r.body = body - - Signer.sign(r, self.service_info.credentials) - - url = r.build() - resp = self.session.post( - url, - headers=r.headers, - data=r.body, - timeout=(self.service_info.connection_timeout, self.service_info.socket_timeout), - ) - if resp.status_code == 200: - return json.dumps(resp.json()) - else: - raise Exception(resp.text.encode("utf-8")) - - def put(self, url, file_path, headers): - with open(file_path, "rb") as f: - resp = self.session.put(url, headers=headers, data=f) - if resp.status_code == 200: - return True, resp.text.encode("utf-8") - else: - return False, resp.text.encode("utf-8") - - def put_data(self, url, data, headers): - resp = self.session.put(url, headers=headers, data=data) - if resp.status_code == 200: - return True, resp.text.encode("utf-8") - else: - return False, resp.text.encode("utf-8") - - def prepare_request(self, api_info, params, doseq=0): - for key in params: - if type(params[key]) == int or type(params[key]) == float or type(params[key]) == bool: - params[key] = str(params[key]) - elif type(params[key]) == list: - if not doseq: - params[key] = ",".join(params[key]) - - connection_timeout = self.service_info.connection_timeout - socket_timeout = self.service_info.socket_timeout - - r = Request() - r.set_schema(self.service_info.scheme) - r.set_method(api_info.method) - r.set_connection_timeout(connection_timeout) - r.set_socket_timeout(socket_timeout) - - headers = self.merge(api_info.header, self.service_info.header) - headers["Host"] = self.service_info.host - headers["User-Agent"] = "volc-sdk-python/" + VERSION - r.set_headers(headers) - - query = self.merge(api_info.query, params) - r.set_query(query) - - r.set_host(self.service_info.host) - r.set_path(api_info.path) - - return r - - @staticmethod - def merge(param1, param2): - od = OrderedDict() - for key in param1: - od[key] = param1[key] - - for key in param2: - od[key] = param2[key] - - return od - - -class Request: - def __init__(self): - self.schema = "" - self.method = "" - self.host = "" - self.path = "" - self.headers = OrderedDict() - self.query = OrderedDict() - self.body = "" - self.form = {} - self.connection_timeout = 0 - self.socket_timeout = 0 - - def set_schema(self, schema): - self.schema = schema - - def set_method(self, method): - self.method = method - - def set_host(self, host): - self.host = host - - def set_path(self, path): - self.path = path - - def set_headers(self, headers): - self.headers = headers - - def set_query(self, query): - self.query = query - - def set_body(self, body): - self.body = body - - def set_connection_timeout(self, connection_timeout): - self.connection_timeout = connection_timeout - - def set_socket_timeout(self, socket_timeout): - self.socket_timeout = socket_timeout - - def build(self, doseq=0): - return self.schema + "://" + self.host + self.path + "?" + urlencode(self.query, doseq) - - -class ServiceInfo: - def __init__(self, host, header, credentials, connection_timeout, socket_timeout, scheme="http"): - self.host = host - self.header = header - self.credentials = credentials - self.connection_timeout = connection_timeout - self.socket_timeout = socket_timeout - self.scheme = scheme - - -class ApiInfo: - def __init__(self, method, path, query, form, header): - self.method = method - self.path = path - self.query = query - self.form = form - self.header = header - - def __str__(self): - return "method: " + self.method + ", path: " + self.path diff --git a/api/core/model_runtime/model_providers/volcengine_maas/legacy/volc_sdk/base/util.py b/api/core/model_runtime/model_providers/volcengine_maas/legacy/volc_sdk/base/util.py deleted file mode 100644 index 178d63714e9cf1..00000000000000 --- a/api/core/model_runtime/model_providers/volcengine_maas/legacy/volc_sdk/base/util.py +++ /dev/null @@ -1,44 +0,0 @@ -import hashlib -import hmac -import operator -from functools import reduce -from urllib.parse import quote - - -class Util: - @staticmethod - def norm_uri(path): - return quote(path).replace("%2F", "/").replace("+", "%20") - - @staticmethod - def norm_query(params): - query = "" - for key in sorted(params.keys()): - if type(params[key]) == list: - for k in params[key]: - query = query + quote(key, safe="-_.~") + "=" + quote(k, safe="-_.~") + "&" - else: - query = query + quote(key, safe="-_.~") + "=" + quote(params[key], safe="-_.~") + "&" - query = query[:-1] - return query.replace("+", "%20") - - @staticmethod - def hmac_sha256(key, content): - return hmac.new(key, bytes(content, encoding="utf-8"), hashlib.sha256).digest() - - @staticmethod - def sha256(content): - if isinstance(content, str) is True: - return hashlib.sha256(content.encode("utf-8")).hexdigest() - else: - return hashlib.sha256(content).hexdigest() - - @staticmethod - def to_hex(content): - lst = [] - for ch in content: - hv = hex(ch).replace("0x", "") - if len(hv) == 1: - hv = "0" + hv - lst.append(hv) - return reduce(operator.add, lst) diff --git a/api/core/model_runtime/model_providers/volcengine_maas/legacy/volc_sdk/common.py b/api/core/model_runtime/model_providers/volcengine_maas/legacy/volc_sdk/common.py deleted file mode 100644 index 3825fd65741ef5..00000000000000 --- a/api/core/model_runtime/model_providers/volcengine_maas/legacy/volc_sdk/common.py +++ /dev/null @@ -1,77 +0,0 @@ -import json -import random -from datetime import datetime - - -class ChatRole: - USER = "user" - ASSISTANT = "assistant" - SYSTEM = "system" - FUNCTION = "function" - - -class _Dict(dict): - __setattr__ = dict.__setitem__ - __getattr__ = dict.__getitem__ - - def __missing__(self, key): - return None - - -def dict_to_object(dict_obj): - # 支持嵌套类型 - if isinstance(dict_obj, list): - insts = [] - for i in dict_obj: - insts.append(dict_to_object(i)) - return insts - - if isinstance(dict_obj, dict): - inst = _Dict() - for k, v in dict_obj.items(): - inst[k] = dict_to_object(v) - return inst - - return dict_obj - - -def json_to_object(json_str, req_id=None): - obj = dict_to_object(json.loads(json_str)) - if obj and isinstance(obj, dict) and req_id: - obj["req_id"] = req_id - return obj - - -def gen_req_id(): - return datetime.now().strftime("%Y%m%d%H%M%S") + format(random.randint(0, 2**64 - 1), "020X") - - -class SSEDecoder: - def __init__(self, source): - self.source = source - - def _read(self): - data = b"" - for chunk in self.source: - for line in chunk.splitlines(True): - data += line - if data.endswith((b"\r\r", b"\n\n", b"\r\n\r\n")): - yield data - data = b"" - if data: - yield data - - def next(self): - for chunk in self._read(): - for line in chunk.splitlines(): - # skip comment - if line.startswith(b":"): - continue - - if b":" in line: - field, value = line.split(b":", 1) - else: - field, value = line, b"" - - if field == b"data" and len(value) > 0: - yield value diff --git a/api/core/model_runtime/model_providers/volcengine_maas/legacy/volc_sdk/maas.py b/api/core/model_runtime/model_providers/volcengine_maas/legacy/volc_sdk/maas.py deleted file mode 100644 index a3836685f1fbf4..00000000000000 --- a/api/core/model_runtime/model_providers/volcengine_maas/legacy/volc_sdk/maas.py +++ /dev/null @@ -1,198 +0,0 @@ -import copy -import json -from collections.abc import Iterator - -from .base.auth import Credentials, Signer -from .base.service import ApiInfo, Service, ServiceInfo -from .common import SSEDecoder, dict_to_object, gen_req_id, json_to_object - - -class MaasService(Service): - def __init__(self, host, region, connection_timeout=60, socket_timeout=60): - service_info = self.get_service_info(host, region, connection_timeout, socket_timeout) - self._apikey = None - api_info = self.get_api_info() - super().__init__(service_info, api_info) - - def set_apikey(self, apikey): - self._apikey = apikey - - @staticmethod - def get_service_info(host, region, connection_timeout, socket_timeout): - service_info = ServiceInfo( - host, - {"Accept": "application/json"}, - Credentials("", "", "ml_maas", region), - connection_timeout, - socket_timeout, - "https", - ) - return service_info - - @staticmethod - def get_api_info(): - api_info = { - "chat": ApiInfo("POST", "/api/v2/endpoint/{endpoint_id}/chat", {}, {}, {}), - "embeddings": ApiInfo("POST", "/api/v2/endpoint/{endpoint_id}/embeddings", {}, {}, {}), - } - return api_info - - def chat(self, endpoint_id, req): - req["stream"] = False - return self._request(endpoint_id, "chat", req) - - def stream_chat(self, endpoint_id, req): - req_id = gen_req_id() - self._validate("chat", req_id) - apikey = self._apikey - - try: - req["stream"] = True - res = self._call(endpoint_id, "chat", req_id, {}, json.dumps(req).encode("utf-8"), apikey, stream=True) - - decoder = SSEDecoder(res) - - def iter_fn(): - for data in decoder.next(): - if data == b"[DONE]": - return - - try: - res = json_to_object(str(data, encoding="utf-8"), req_id=req_id) - except Exception: - raise - - if res.error is not None and res.error.code_n != 0: - raise MaasError( - res.error.code_n, - res.error.code, - res.error.message, - req_id, - ) - yield res - - return iter_fn() - except MaasError: - raise - except Exception as e: - raise new_client_sdk_request_error(str(e)) - - def embeddings(self, endpoint_id, req): - return self._request(endpoint_id, "embeddings", req) - - def _request(self, endpoint_id, api, req, params={}): - req_id = gen_req_id() - - self._validate(api, req_id) - - apikey = self._apikey - - try: - res = self._call(endpoint_id, api, req_id, params, json.dumps(req).encode("utf-8"), apikey) - resp = dict_to_object(res.json()) - if resp and isinstance(resp, dict): - resp["req_id"] = req_id - return resp - - except MaasError as e: - raise e - except Exception as e: - raise new_client_sdk_request_error(str(e), req_id) - - def _validate(self, api, req_id): - credentials_exist = ( - self.service_info.credentials is not None - and self.service_info.credentials.sk is not None - and self.service_info.credentials.ak is not None - ) - - if not self._apikey and not credentials_exist: - raise new_client_sdk_request_error("no valid credential", req_id) - - if api not in self.api_info: - raise new_client_sdk_request_error("no such api", req_id) - - def _call(self, endpoint_id, api, req_id, params, body, apikey=None, stream=False): - api_info = copy.deepcopy(self.api_info[api]) - api_info.path = api_info.path.format(endpoint_id=endpoint_id) - - r = self.prepare_request(api_info, params) - r.headers["x-tt-logid"] = req_id - r.headers["Content-Type"] = "application/json" - r.body = body - - if apikey is None: - Signer.sign(r, self.service_info.credentials) - elif apikey is not None: - r.headers["Authorization"] = "Bearer " + apikey - - url = r.build() - res = self.session.post( - url, - headers=r.headers, - data=r.body, - timeout=( - self.service_info.connection_timeout, - self.service_info.socket_timeout, - ), - stream=stream, - ) - - if res.status_code != 200: - raw = res.text.encode() - res.close() - try: - resp = json_to_object(str(raw, encoding="utf-8"), req_id=req_id) - except Exception: - raise new_client_sdk_request_error(raw, req_id) - - if resp.error: - raise MaasError(resp.error.code_n, resp.error.code, resp.error.message, req_id) - else: - raise new_client_sdk_request_error(resp, req_id) - - return res - - -class MaasError(Exception): - def __init__(self, code_n, code, message, req_id): - self.code_n = code_n - self.code = code - self.message = message - self.req_id = req_id - - def __str__(self): - return ( - "Detailed exception information is listed below.\n" - + "req_id: {}\n" - + "code_n: {}\n" - + "code: {}\n" - + "message: {}" - ).format(self.req_id, self.code_n, self.code, self.message) - - -def new_client_sdk_request_error(raw, req_id=""): - return MaasError(1709701, "ClientSDKRequestError", "MaaS SDK request error: {}".format(raw), req_id) - - -class BinaryResponseContent: - def __init__(self, response, request_id) -> None: - self.response = response - self.request_id = request_id - - def stream_to_file(self, file: str) -> None: - is_first = True - error_bytes = b"" - with open(file, mode="wb") as f: - for data in self.response: - if len(error_bytes) > 0 or (is_first and '"error":' in str(data)): - error_bytes += data - else: - f.write(data) - - if len(error_bytes) > 0: - resp = json_to_object(str(error_bytes, encoding="utf-8"), req_id=self.request_id) - raise MaasError(resp.error.code_n, resp.error.code, resp.error.message, self.request_id) - - def iter_bytes(self) -> Iterator[bytes]: - yield from self.response diff --git a/api/core/model_runtime/model_providers/volcengine_maas/llm/__init__.py b/api/core/model_runtime/model_providers/volcengine_maas/llm/__init__.py deleted file mode 100644 index e69de29bb2d1d6..00000000000000 diff --git a/api/core/model_runtime/model_providers/volcengine_maas/llm/llm.py b/api/core/model_runtime/model_providers/volcengine_maas/llm/llm.py deleted file mode 100644 index dec6c9d789c135..00000000000000 --- a/api/core/model_runtime/model_providers/volcengine_maas/llm/llm.py +++ /dev/null @@ -1,388 +0,0 @@ -import logging -from collections.abc import Generator - -from volcenginesdkarkruntime.types.chat import ChatCompletion, ChatCompletionChunk - -from core.model_runtime.entities.common_entities import I18nObject -from core.model_runtime.entities.llm_entities import LLMResult, LLMResultChunk, LLMResultChunkDelta -from core.model_runtime.entities.message_entities import ( - AssistantPromptMessage, - PromptMessage, - PromptMessageTool, - UserPromptMessage, -) -from core.model_runtime.entities.model_entities import ( - AIModelEntity, - FetchFrom, - ModelPropertyKey, - ModelType, - ParameterRule, - ParameterType, -) -from core.model_runtime.errors.invoke import ( - InvokeAuthorizationError, - InvokeBadRequestError, - InvokeConnectionError, - InvokeError, - InvokeRateLimitError, - InvokeServerUnavailableError, -) -from core.model_runtime.errors.validate import CredentialsValidateFailedError -from core.model_runtime.model_providers.__base.large_language_model import LargeLanguageModel -from core.model_runtime.model_providers.volcengine_maas.client import ArkClientV3 -from core.model_runtime.model_providers.volcengine_maas.legacy.client import MaaSClient -from core.model_runtime.model_providers.volcengine_maas.legacy.errors import ( - AuthErrors, - BadRequestErrors, - ConnectionErrors, - MaasError, - RateLimitErrors, - ServerUnavailableErrors, -) -from core.model_runtime.model_providers.volcengine_maas.llm.models import ( - get_model_config, - get_v2_req_params, - get_v3_req_params, -) - -logger = logging.getLogger(__name__) - - -class VolcengineMaaSLargeLanguageModel(LargeLanguageModel): - def _invoke( - self, - model: str, - credentials: dict, - prompt_messages: list[PromptMessage], - model_parameters: dict, - tools: list[PromptMessageTool] | None = None, - stop: list[str] | None = None, - stream: bool = True, - user: str | None = None, - ) -> LLMResult | Generator: - if ArkClientV3.is_legacy(credentials): - return self._generate_v2(model, credentials, prompt_messages, model_parameters, tools, stop, stream, user) - return self._generate_v3(model, credentials, prompt_messages, model_parameters, tools, stop, stream, user) - - def validate_credentials(self, model: str, credentials: dict) -> None: - """ - Validate credentials - """ - if ArkClientV3.is_legacy(credentials): - return self._validate_credentials_v2(credentials) - return self._validate_credentials_v3(credentials) - - @staticmethod - def _validate_credentials_v2(credentials: dict) -> None: - client = MaaSClient.from_credential(credentials) - try: - client.chat( - { - "max_new_tokens": 16, - "temperature": 0.7, - "top_p": 0.9, - "top_k": 15, - }, - [UserPromptMessage(content="ping\nAnswer: ")], - ) - except MaasError as e: - raise CredentialsValidateFailedError(e.message) - - @staticmethod - def _validate_credentials_v3(credentials: dict) -> None: - client = ArkClientV3.from_credentials(credentials) - try: - client.chat( - max_tokens=16, - temperature=0.7, - top_p=0.9, - messages=[UserPromptMessage(content="ping\nAnswer: ")], - ) - except Exception as e: - raise CredentialsValidateFailedError(e) - - def get_num_tokens( - self, - model: str, - credentials: dict, - prompt_messages: list[PromptMessage], - tools: list[PromptMessageTool] | None = None, - ) -> int: - if ArkClientV3.is_legacy(credentials): - return self._get_num_tokens_v2(prompt_messages) - return self._get_num_tokens_v3(prompt_messages) - - def _get_num_tokens_v2(self, messages: list[PromptMessage]) -> int: - if len(messages) == 0: - return 0 - num_tokens = 0 - messages_dict = [MaaSClient.convert_prompt_message_to_maas_message(m) for m in messages] - for message in messages_dict: - for key, value in message.items(): - num_tokens += self._get_num_tokens_by_gpt2(str(key)) - num_tokens += self._get_num_tokens_by_gpt2(str(value)) - - return num_tokens - - def _get_num_tokens_v3(self, messages: list[PromptMessage]) -> int: - if len(messages) == 0: - return 0 - num_tokens = 0 - messages_dict = [ArkClientV3.convert_prompt_message(m) for m in messages] - for message in messages_dict: - for key, value in message.items(): - num_tokens += self._get_num_tokens_by_gpt2(str(key)) - num_tokens += self._get_num_tokens_by_gpt2(str(value)) - - return num_tokens - - def _generate_v2( - self, - model: str, - credentials: dict, - prompt_messages: list[PromptMessage], - model_parameters: dict, - tools: list[PromptMessageTool] | None = None, - stop: list[str] | None = None, - stream: bool = True, - user: str | None = None, - ) -> LLMResult | Generator: - client = MaaSClient.from_credential(credentials) - req_params = get_v2_req_params(credentials, model_parameters, stop) - extra_model_kwargs = {} - if tools: - extra_model_kwargs["tools"] = [MaaSClient.transform_tool_prompt_to_maas_config(tool) for tool in tools] - resp = MaaSClient.wrap_exception(lambda: client.chat(req_params, prompt_messages, stream, **extra_model_kwargs)) - - def _handle_stream_chat_response() -> Generator: - for index, r in enumerate(resp): - choices = r["choices"] - if not choices: - continue - choice = choices[0] - message = choice["message"] - usage = None - if r.get("usage"): - usage = self._calc_response_usage( - model=model, - credentials=credentials, - prompt_tokens=r["usage"]["prompt_tokens"], - completion_tokens=r["usage"]["completion_tokens"], - ) - yield LLMResultChunk( - model=model, - prompt_messages=prompt_messages, - delta=LLMResultChunkDelta( - index=index, - message=AssistantPromptMessage(content=message["content"] or "", tool_calls=[]), - usage=usage, - finish_reason=choice.get("finish_reason"), - ), - ) - - def _handle_chat_response() -> LLMResult: - choices = resp["choices"] - if not choices: - raise ValueError("No choices found") - - choice = choices[0] - message = choice["message"] - - # parse tool calls - tool_calls = [] - if message["tool_calls"]: - for call in message["tool_calls"]: - tool_call = AssistantPromptMessage.ToolCall( - id=call["function"]["name"], - type=call["type"], - function=AssistantPromptMessage.ToolCall.ToolCallFunction( - name=call["function"]["name"], arguments=call["function"]["arguments"] - ), - ) - tool_calls.append(tool_call) - - usage = resp["usage"] - return LLMResult( - model=model, - prompt_messages=prompt_messages, - message=AssistantPromptMessage( - content=message["content"] or "", - tool_calls=tool_calls, - ), - usage=self._calc_response_usage( - model=model, - credentials=credentials, - prompt_tokens=usage["prompt_tokens"], - completion_tokens=usage["completion_tokens"], - ), - ) - - if not stream: - return _handle_chat_response() - return _handle_stream_chat_response() - - def _generate_v3( - self, - model: str, - credentials: dict, - prompt_messages: list[PromptMessage], - model_parameters: dict, - tools: list[PromptMessageTool] | None = None, - stop: list[str] | None = None, - stream: bool = True, - user: str | None = None, - ) -> LLMResult | Generator: - client = ArkClientV3.from_credentials(credentials) - req_params = get_v3_req_params(credentials, model_parameters, stop) - if tools: - req_params["tools"] = tools - - def _handle_stream_chat_response(chunks: Generator[ChatCompletionChunk]) -> Generator: - for chunk in chunks: - yield LLMResultChunk( - model=model, - prompt_messages=prompt_messages, - delta=LLMResultChunkDelta( - index=0, - message=AssistantPromptMessage( - content=chunk.choices[0].delta.content if chunk.choices else "", tool_calls=[] - ), - usage=self._calc_response_usage( - model=model, - credentials=credentials, - prompt_tokens=chunk.usage.prompt_tokens, - completion_tokens=chunk.usage.completion_tokens, - ) - if chunk.usage - else None, - finish_reason=chunk.choices[0].finish_reason if chunk.choices else None, - ), - ) - - def _handle_chat_response(resp: ChatCompletion) -> LLMResult: - choice = resp.choices[0] - message = choice.message - # parse tool calls - tool_calls = [] - if message.tool_calls: - for call in message.tool_calls: - tool_call = AssistantPromptMessage.ToolCall( - id=call.id, - type=call.type, - function=AssistantPromptMessage.ToolCall.ToolCallFunction( - name=call.function.name, arguments=call.function.arguments - ), - ) - tool_calls.append(tool_call) - - usage = resp.usage - return LLMResult( - model=model, - prompt_messages=prompt_messages, - message=AssistantPromptMessage( - content=message.content or "", - tool_calls=tool_calls, - ), - usage=self._calc_response_usage( - model=model, - credentials=credentials, - prompt_tokens=usage.prompt_tokens, - completion_tokens=usage.completion_tokens, - ), - ) - - if not stream: - resp = client.chat(prompt_messages, **req_params) - return _handle_chat_response(resp) - - chunks = client.stream_chat(prompt_messages, **req_params) - return _handle_stream_chat_response(chunks) - - def get_customizable_model_schema(self, model: str, credentials: dict) -> AIModelEntity | None: - """ - used to define customizable model schema - """ - model_config = get_model_config(credentials) - - rules = [ - ParameterRule( - name="temperature", - type=ParameterType.FLOAT, - use_template="temperature", - label=I18nObject(zh_Hans="温度", en_US="Temperature"), - ), - ParameterRule( - name="top_p", - type=ParameterType.FLOAT, - use_template="top_p", - label=I18nObject(zh_Hans="Top P", en_US="Top P"), - ), - ParameterRule( - name="top_k", type=ParameterType.INT, min=1, default=1, label=I18nObject(zh_Hans="Top K", en_US="Top K") - ), - ParameterRule( - name="presence_penalty", - type=ParameterType.FLOAT, - use_template="presence_penalty", - label=I18nObject( - en_US="Presence Penalty", - zh_Hans="存在惩罚", - ), - min=-2.0, - max=2.0, - ), - ParameterRule( - name="frequency_penalty", - type=ParameterType.FLOAT, - use_template="frequency_penalty", - label=I18nObject( - en_US="Frequency Penalty", - zh_Hans="频率惩罚", - ), - min=-2.0, - max=2.0, - ), - ParameterRule( - name="max_tokens", - type=ParameterType.INT, - use_template="max_tokens", - min=1, - max=model_config.properties.max_tokens, - default=512, - label=I18nObject(zh_Hans="最大生成长度", en_US="Max Tokens"), - ), - ] - - model_properties = {} - model_properties[ModelPropertyKey.CONTEXT_SIZE] = model_config.properties.context_size - model_properties[ModelPropertyKey.MODE] = model_config.properties.mode.value - - entity = AIModelEntity( - model=model, - label=I18nObject(en_US=model), - fetch_from=FetchFrom.CUSTOMIZABLE_MODEL, - model_type=ModelType.LLM, - model_properties=model_properties, - parameter_rules=rules, - features=model_config.features, - ) - - return entity - - @property - def _invoke_error_mapping(self) -> dict[type[InvokeError], list[type[Exception]]]: - """ - Map model invoke error to unified error - The key is the error type thrown to the caller - The value is the error type thrown by the model, - which needs to be converted into a unified error type for the caller. - - :return: Invoke error mapping - """ - return { - InvokeConnectionError: ConnectionErrors.values(), - InvokeServerUnavailableError: ServerUnavailableErrors.values(), - InvokeRateLimitError: RateLimitErrors.values(), - InvokeAuthorizationError: AuthErrors.values(), - InvokeBadRequestError: BadRequestErrors.values(), - } diff --git a/api/core/model_runtime/model_providers/volcengine_maas/llm/models.py b/api/core/model_runtime/model_providers/volcengine_maas/llm/models.py deleted file mode 100644 index d8be14b0247698..00000000000000 --- a/api/core/model_runtime/model_providers/volcengine_maas/llm/models.py +++ /dev/null @@ -1,142 +0,0 @@ -from pydantic import BaseModel - -from core.model_runtime.entities.llm_entities import LLMMode -from core.model_runtime.entities.model_entities import ModelFeature - - -class ModelProperties(BaseModel): - context_size: int - max_tokens: int - mode: LLMMode - - -class ModelConfig(BaseModel): - properties: ModelProperties - features: list[ModelFeature] - - -configs: dict[str, ModelConfig] = { - "Doubao-pro-4k": ModelConfig( - properties=ModelProperties(context_size=4096, max_tokens=4096, mode=LLMMode.CHAT), - features=[ModelFeature.TOOL_CALL], - ), - "Doubao-lite-4k": ModelConfig( - properties=ModelProperties(context_size=4096, max_tokens=4096, mode=LLMMode.CHAT), - features=[ModelFeature.TOOL_CALL], - ), - "Doubao-pro-32k": ModelConfig( - properties=ModelProperties(context_size=32768, max_tokens=4096, mode=LLMMode.CHAT), - features=[ModelFeature.TOOL_CALL], - ), - "Doubao-lite-32k": ModelConfig( - properties=ModelProperties(context_size=32768, max_tokens=4096, mode=LLMMode.CHAT), - features=[ModelFeature.TOOL_CALL], - ), - "Doubao-pro-128k": ModelConfig( - properties=ModelProperties(context_size=131072, max_tokens=4096, mode=LLMMode.CHAT), - features=[ModelFeature.TOOL_CALL], - ), - "Doubao-lite-128k": ModelConfig( - properties=ModelProperties(context_size=131072, max_tokens=4096, mode=LLMMode.CHAT), features=[] - ), - "Skylark2-pro-4k": ModelConfig( - properties=ModelProperties(context_size=4096, max_tokens=4096, mode=LLMMode.CHAT), features=[] - ), - "Llama3-8B": ModelConfig( - properties=ModelProperties(context_size=8192, max_tokens=8192, mode=LLMMode.CHAT), features=[] - ), - "Llama3-70B": ModelConfig( - properties=ModelProperties(context_size=8192, max_tokens=8192, mode=LLMMode.CHAT), features=[] - ), - "Moonshot-v1-8k": ModelConfig( - properties=ModelProperties(context_size=8192, max_tokens=4096, mode=LLMMode.CHAT), - features=[ModelFeature.TOOL_CALL], - ), - "Moonshot-v1-32k": ModelConfig( - properties=ModelProperties(context_size=32768, max_tokens=16384, mode=LLMMode.CHAT), - features=[ModelFeature.TOOL_CALL], - ), - "Moonshot-v1-128k": ModelConfig( - properties=ModelProperties(context_size=131072, max_tokens=65536, mode=LLMMode.CHAT), - features=[ModelFeature.TOOL_CALL], - ), - "GLM3-130B": ModelConfig( - properties=ModelProperties(context_size=8192, max_tokens=4096, mode=LLMMode.CHAT), - features=[ModelFeature.TOOL_CALL], - ), - "GLM3-130B-Fin": ModelConfig( - properties=ModelProperties(context_size=8192, max_tokens=4096, mode=LLMMode.CHAT), - features=[ModelFeature.TOOL_CALL], - ), - "Mistral-7B": ModelConfig( - properties=ModelProperties(context_size=8192, max_tokens=2048, mode=LLMMode.CHAT), features=[] - ), -} - - -def get_model_config(credentials: dict) -> ModelConfig: - base_model = credentials.get("base_model_name", "") - model_configs = configs.get(base_model) - if not model_configs: - return ModelConfig( - properties=ModelProperties( - context_size=int(credentials.get("context_size", 0)), - max_tokens=int(credentials.get("max_tokens", 0)), - mode=LLMMode.value_of(credentials.get("mode", "chat")), - ), - features=[], - ) - return model_configs - - -def get_v2_req_params(credentials: dict, model_parameters: dict, stop: list[str] | None = None): - req_params = {} - # predefined properties - model_configs = get_model_config(credentials) - if model_configs: - req_params["max_prompt_tokens"] = model_configs.properties.context_size - req_params["max_new_tokens"] = model_configs.properties.max_tokens - - # model parameters - if model_parameters.get("max_tokens"): - req_params["max_new_tokens"] = model_parameters.get("max_tokens") - if model_parameters.get("temperature"): - req_params["temperature"] = model_parameters.get("temperature") - if model_parameters.get("top_p"): - req_params["top_p"] = model_parameters.get("top_p") - if model_parameters.get("top_k"): - req_params["top_k"] = model_parameters.get("top_k") - if model_parameters.get("presence_penalty"): - req_params["presence_penalty"] = model_parameters.get("presence_penalty") - if model_parameters.get("frequency_penalty"): - req_params["frequency_penalty"] = model_parameters.get("frequency_penalty") - - if stop: - req_params["stop"] = stop - - return req_params - - -def get_v3_req_params(credentials: dict, model_parameters: dict, stop: list[str] | None = None): - req_params = {} - # predefined properties - model_configs = get_model_config(credentials) - if model_configs: - req_params["max_tokens"] = model_configs.properties.max_tokens - - # model parameters - if model_parameters.get("max_tokens"): - req_params["max_tokens"] = model_parameters.get("max_tokens") - if model_parameters.get("temperature"): - req_params["temperature"] = model_parameters.get("temperature") - if model_parameters.get("top_p"): - req_params["top_p"] = model_parameters.get("top_p") - if model_parameters.get("presence_penalty"): - req_params["presence_penalty"] = model_parameters.get("presence_penalty") - if model_parameters.get("frequency_penalty"): - req_params["frequency_penalty"] = model_parameters.get("frequency_penalty") - - if stop: - req_params["stop"] = stop - - return req_params diff --git a/api/core/model_runtime/model_providers/volcengine_maas/text_embedding/__init__.py b/api/core/model_runtime/model_providers/volcengine_maas/text_embedding/__init__.py deleted file mode 100644 index e69de29bb2d1d6..00000000000000 diff --git a/api/core/model_runtime/model_providers/volcengine_maas/text_embedding/models.py b/api/core/model_runtime/model_providers/volcengine_maas/text_embedding/models.py deleted file mode 100644 index ce4f0c3ab1960e..00000000000000 --- a/api/core/model_runtime/model_providers/volcengine_maas/text_embedding/models.py +++ /dev/null @@ -1,28 +0,0 @@ -from pydantic import BaseModel - - -class ModelProperties(BaseModel): - context_size: int - max_chunks: int - - -class ModelConfig(BaseModel): - properties: ModelProperties - - -ModelConfigs = { - "Doubao-embedding": ModelConfig(properties=ModelProperties(context_size=4096, max_chunks=32)), -} - - -def get_model_config(credentials: dict) -> ModelConfig: - base_model = credentials.get("base_model_name", "") - model_configs = ModelConfigs.get(base_model) - if not model_configs: - return ModelConfig( - properties=ModelProperties( - context_size=int(credentials.get("context_size", 0)), - max_chunks=int(credentials.get("max_chunks", 0)), - ) - ) - return model_configs diff --git a/api/core/model_runtime/model_providers/volcengine_maas/volcengine_maas.py b/api/core/model_runtime/model_providers/volcengine_maas/volcengine_maas.py deleted file mode 100644 index 10f9be2d08e70d..00000000000000 --- a/api/core/model_runtime/model_providers/volcengine_maas/volcengine_maas.py +++ /dev/null @@ -1,10 +0,0 @@ -import logging - -from core.model_runtime.model_providers.__base.model_provider import ModelProvider - -logger = logging.getLogger(__name__) - - -class VolcengineMaaSProvider(ModelProvider): - def validate_provider_credentials(self, credentials: dict) -> None: - pass diff --git a/api/core/model_runtime/model_providers/volcengine_maas/volcengine_maas.yaml b/api/core/model_runtime/model_providers/volcengine_maas/volcengine_maas.yaml deleted file mode 100644 index 13e00da76fb149..00000000000000 --- a/api/core/model_runtime/model_providers/volcengine_maas/volcengine_maas.yaml +++ /dev/null @@ -1,266 +0,0 @@ -provider: volcengine_maas -label: - en_US: Volcengine -description: - en_US: Volcengine Ark models. - zh_Hans: 火山方舟提供的模型,例如 Doubao-pro-4k、Doubao-pro-32k 和 Doubao-pro-128k。 -icon_small: - en_US: icon_s_en.svg -icon_large: - en_US: icon_l_en.svg - zh_Hans: icon_l_zh.svg -background: "#F9FAFB" -help: - title: - en_US: Get your Access Key and Secret Access Key from Volcengine Console - zh_Hans: 从火山引擎控制台获取您的 Access Key 和 Secret Access Key - url: - en_US: https://console.volcengine.com/iam/keymanage/ -supported_model_types: - - llm - - text-embedding -configurate_methods: - - customizable-model -model_credential_schema: - model: - label: - en_US: Model Name - zh_Hans: 模型名称 - placeholder: - en_US: Enter your Model Name - zh_Hans: 输入模型名称 - credential_form_schemas: - - variable: auth_method - required: true - label: - en_US: Authentication Method - zh_Hans: 鉴权方式 - type: select - default: aksk - options: - - label: - en_US: API Key - value: api_key - - label: - en_US: Access Key / Secret Access Key - value: aksk - placeholder: - en_US: Enter your Authentication Method - zh_Hans: 选择鉴权方式 - - variable: volc_access_key_id - required: true - show_on: - - variable: auth_method - value: aksk - label: - en_US: Access Key - zh_Hans: Access Key - type: secret-input - placeholder: - en_US: Enter your Access Key - zh_Hans: 输入您的 Access Key - - variable: volc_secret_access_key - required: true - show_on: - - variable: auth_method - value: aksk - label: - en_US: Secret Access Key - zh_Hans: Secret Access Key - type: secret-input - placeholder: - en_US: Enter your Secret Access Key - zh_Hans: 输入您的 Secret Access Key - - variable: volc_api_key - required: true - show_on: - - variable: auth_method - value: api_key - label: - en_US: API Key - type: secret-input - placeholder: - en_US: Enter your API Key - zh_Hans: 输入您的 API Key - - variable: volc_region - required: true - label: - en_US: Volcengine Region - zh_Hans: 火山引擎地域 - type: text-input - default: cn-beijing - placeholder: - en_US: Enter Volcengine Region - zh_Hans: 输入火山引擎地域 - - variable: api_endpoint_host - required: true - label: - en_US: API Endpoint Host - zh_Hans: API Endpoint Host - type: text-input - default: https://ark.cn-beijing.volces.com/api/v3 - placeholder: - en_US: Enter your API Endpoint Host - zh_Hans: 输入 API Endpoint Host - - variable: endpoint_id - required: true - label: - en_US: Endpoint ID - zh_Hans: Endpoint ID - type: text-input - placeholder: - en_US: Enter your Endpoint ID - zh_Hans: 输入您的 Endpoint ID - - variable: base_model_name - label: - en_US: Base Model - zh_Hans: 基础模型 - type: select - required: true - options: - - label: - en_US: Doubao-pro-4k - value: Doubao-pro-4k - show_on: - - variable: __model_type - value: llm - - label: - en_US: Doubao-lite-4k - value: Doubao-lite-4k - show_on: - - variable: __model_type - value: llm - - label: - en_US: Doubao-pro-32k - value: Doubao-pro-32k - show_on: - - variable: __model_type - value: llm - - label: - en_US: Doubao-lite-32k - value: Doubao-lite-32k - show_on: - - variable: __model_type - value: llm - - label: - en_US: Doubao-pro-128k - value: Doubao-pro-128k - show_on: - - variable: __model_type - value: llm - - label: - en_US: Doubao-lite-128k - value: Doubao-lite-128k - show_on: - - variable: __model_type - value: llm - - label: - en_US: Llama3-8B - value: Llama3-8B - show_on: - - variable: __model_type - value: llm - - label: - en_US: Llama3-70B - value: Llama3-70B - show_on: - - variable: __model_type - value: llm - - label: - en_US: Moonshot-v1-8k - value: Moonshot-v1-8k - show_on: - - variable: __model_type - value: llm - - label: - en_US: Moonshot-v1-32k - value: Moonshot-v1-32k - show_on: - - variable: __model_type - value: llm - - label: - en_US: Moonshot-v1-128k - value: Moonshot-v1-128k - show_on: - - variable: __model_type - value: llm - - label: - en_US: GLM3-130B - value: GLM3-130B - show_on: - - variable: __model_type - value: llm - - label: - en_US: GLM3-130B-Fin - value: GLM3-130B-Fin - show_on: - - variable: __model_type - value: llm - - label: - en_US: Mistral-7B - value: Mistral-7B - show_on: - - variable: __model_type - value: llm - - label: - en_US: Doubao-embedding - value: Doubao-embedding - show_on: - - variable: __model_type - value: text-embedding - - label: - en_US: Custom - zh_Hans: 自定义 - value: Custom - - variable: mode - required: true - show_on: - - variable: __model_type - value: llm - - variable: base_model_name - value: Custom - label: - zh_Hans: 模型类型 - en_US: Completion Mode - type: select - default: chat - placeholder: - zh_Hans: 选择对话类型 - en_US: Select Completion Mode - options: - - value: completion - label: - en_US: Completion - zh_Hans: 补全 - - value: chat - label: - en_US: Chat - zh_Hans: 对话 - - variable: context_size - required: true - show_on: - - variable: base_model_name - value: Custom - label: - zh_Hans: 模型上下文长度 - en_US: Model Context Size - type: text-input - default: "4096" - placeholder: - zh_Hans: 输入您的模型上下文长度 - en_US: Enter your Model Context Size - - variable: max_tokens - required: true - show_on: - - variable: __model_type - value: llm - - variable: base_model_name - value: Custom - label: - zh_Hans: 最大 token 上限 - en_US: Upper Bound for Max Tokens - default: "4096" - type: text-input - placeholder: - zh_Hans: 输入您的模型最大 token 上限 - en_US: Enter your model Upper Bound for Max Tokens diff --git a/api/core/model_runtime/model_providers/wenxin/__init__.py b/api/core/model_runtime/model_providers/wenxin/__init__.py deleted file mode 100644 index e69de29bb2d1d6..00000000000000 diff --git a/api/core/model_runtime/model_providers/wenxin/_assets/icon_l_en.png b/api/core/model_runtime/model_providers/wenxin/_assets/icon_l_en.png deleted file mode 100644 index fb50487cceaa78bd265acc25d84cf797ff402b04..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 6615 zcmV;|87St7P)U900009a7bBm000&x z000&x0ZCFM@Bjb+0drDELIAGL9O(c600d`2O+f$vv5yPJK~#7F?Ol6t z9L1Ucx_frDdf6+#pLmUpA>dSONG_KO6|eKCTnL91h`9g>2onhA%3!$4qb|f=QO7+Z~GF3xrXAc~O6q67lM2OP{ zwIhs_GR{b3&W-^47!t)5O34p0&KW4FdtqDv2z(>q9jyIR1@ga#P8fge`4Ss~5FtXG z78pNaD8@>xN*wrcC1n&Gez5-oZB;ObFf>7U-se6G zrqJRT082iLA2ypA*wuY4OGAhdAx=3|MHq>cilry_&x$JZS`fxHU>IkBW!dzQvm(G_ zRq#A9;9F4kK3&f}$h9~Og9?TiQ@^m3^_`fAy!6WDECV4zgg9kTAz>&sM-izSPa}YD!9iLAyVMN;=&kE4AQ7|v|w=*MNkF~VK9dgm_mFKp2X(qz=rFg1nL&(a)pXu?VZhXmIDW2UmvxXK+)fR74c&4|{RX!xDyn z*4#I9&nqdPD?w3}sr?IhUFj>h@yC_v1w0D>z(K7==VYEMY2@7$o_ANYJ|YxaH6|AHDhoF6y^`Gp zAwq;WsVE|hS^wPi3R(;A0cH8DiM&<;+BIQ{MWQE^%I;QD{Cm&H)L%iW!f+^B8q=pA zYG{nsEoCb5b!_;P*k6Qs@-_LULo0<2$c6EIhga{Ghq`~kPNDG!QmG$II zfD;bYwgwJXs>O#f>aj;=GykQa@xI=#H&>o;mD*{{nsRhWM8sBsC9YRYoq?TdMMQDC zGv;=xh@9-sm9#}tW?EPe>MS0ci95zk7_oC&6Bx@$ddnhNxud^(M}JWL#rb=?wMSrzcc0k% zSVvG>$z%$i?d>dT&pqbcb63M#Stz((-g8VO+u-p26G_En!rgCWWkmwf>0b8deZeC@vPzWX|$0^D@dO^{Bf+x*AfaKjB?Syro3DlUZR zy!YOFJ13$aFR1zyPCEuJ;vXclyXOVdwbCsC?X667LH-= zaD+W@m9r1YBBK;LfLHD*42M46lb!txRJ!sMt%|wzN9XVu{{-XWv&>>EP{(LQ;?bd1 z-Jepg@67QCRLaB6JyzmKZ=K-i1)+YQ<6Gl1)aYN?YP#~ab@n=_-8sPezaB#EYe%t9k zBFUz+F1rT5WHPshss31^b$QSre&OI>m|^i-@O!NH6yq(TuBA5zqK!!K$8j*RjqXkf zf=FHy#*qUfc&k}$C~aTk?l&T0C%$|XN?;^Ah@mpE1#g3d+niEx85vC9quo4aV9J4d`q(do8xL z-QSNF2M!!yNKz@Rr>_qlL-*vZFbe2g@t6mkW4hYf+RhIe{9YV9co6V=taQ(bhz4Ja zj^V|eJxIIty{I-QpB=+<*W5qka!hKXebZ;QQ^P^@Dv-Kl*%- zEV@hegLUX7cMl=aUxS0cs=qBBv(PY%4NxN#5r#JXl|6v<#O`eP#fFhT9y|2fAAWuK zJLe35G8ukK{>dqB#3U$`md^k)kfCX&N$taDhC zhPzMY>(>$(eRbllfa17m7|Vj&1%LT2m|)=5OPEnH2Ft~UCNu?GeZ{S4 zxg@;>FZaAk*2NRPdVN;bx&y`s47RZiXw@WO9H4<2kH^WH*BodR}v4rVJ#MX(V10LPFe>I-hyrbeg@?r*-A5GEiAVg z8CgkmsC7KEw2`$^c&^M>+kW3bYuJA~LEPIFJDc z{#KgB!S-@D2i2oNPXm)?aHr@FG~v6ULiqc^*X`r&r>Ccf+Hvc3;P#-?_SH~5uuHZb z-K7MK6U7#WWAcN?4RFgWH`A&VQYjAa+dQ{n)l**@{=YrH$@cYs&B|oXl-X>I)|}+r z3Np21o-7pOCX&iQDXj-HBj%^O*EW8;rKkB#3e*&yk8+MD4FhTh5;^^4=ocr8Yx4I& zAcD%RBCEs7SY<5?yy?1Wz?IudiLrj*EWL58vo!U=EuXK9D>OH!Zm|1>*pn>w*Of`f#wg*1|HJ{fmfn|Ni|9ul;Z6Jr2k&6d+Sj1^oSx z8-V?!M(YRP-&LwT)Dz+kv5>~m4mM#Bl2y7TWMNh0E>(hJ!f@OXrTp%~Xl?+8G#-s# zfJ1HT(A#f*um6SJXCB%4YZ!Q`6Rg1#1@45V9qnQuVKU<|zWFU=(`#jR(%PO|<1dg= znwhef(Uk7GN6I}n_k3z-VCX@pG>dyss6S|3z^`BAszS0zH(-7pM0M6^_N^3g6cESk znhd=0_zvGS(XPT>?VWKm%5U*mPxwdSmqN|KHrP?Av18MwP2dQ4w2_su!8_ncfFyUt zbrHaEVaC!AY6TtGjyw6>?48kr`Y9zuk^t#-J8VH7IsTv;ZkRu8HG9j5e#E``wu!8&wi8Lsxwv; zHm3@$4y7hszbL{u1w>O=q~3dP>XvMT1kilcgP?`Z5~1aBfp^viSS$)aDCHjW5vT$f2HxQus|j;Fu1Ro|#1koj zoW`I-7NuK?vU7lm58$_|%avj5OCeE=^z?#4@=&@vft19ax1{wimNajui%hwvXZ_r_ zgPQbOJK7zQ9^IqXkeb2sr;anI_ePZh3Uvxr7gH`M5;gl#Xo7A_cQ(_x@sVFdOA?Mykv}jhlN&zk4xpJ0o&Z%+=T$<+IQ{gw{kTHwzJ2@16}j0Xbly>}4ct%ll3-g1T9BVNZ(i;_j3TW1F=x&k z>ho4dVR3Fv^Ra4A7(wAxZxWN&VzB-7$G3XxqYksmeILmRzZqRoI2FT}T}?|a zQzf^vF55X;8z`J^^|=z`a!p8NEfPTU(57`QT`eV97n7_YLV+V?y+2A*E$+GJ9%yK2 zAd)9bNYAuh0TN&Du14rA*H0`KqkcM^em0{;lfYNk(km;d9yxMECy3E|8sMm>Xp$h8 z$Lp5hzEw~m0<@gvK^jXX2oOE<7KQS>YSGgw?URU^vl*+41@CKW$|%eHrLx#n!zTv6 zv0u*o)1lOi6EF&-EIb$GxF)uHRQq2l@P}8C;`G2TvKZU}spNzCx_OI4ahdcreNx?# zB3EJ@u1P!*@AnU27n(T7lXDRa#iPt0+GUEX?c;S#rcRwo1b3VBaV5^QFX#@(Aas_) z^y$;7AJ>XlUQIv4!^1=dH#;9Yy!(*ws9k+sJdu`)K!k(scHrh-*A3NJDJ}vm4C!bM z?m)FaQ8Rn<8iQO3WPom3}{F!g2uC#Lfy>S5S=m|>gLXergN7; zZ1())<`iE2a&%J`pNm5rwTzOuC=RbOJB!m7@em`X{-YVDN#?D`jbSUC`UL}Xv~a2i zJ4iq*wG|1oW#B|P3iSuDs&Y*zn=_s6U+5P(Js=TuGt?MlLeFW4^!Xz%?)ZykrIQ2Vo_3_n=!7?bYpDGWhni=cXTIvCmfR&AkRk;wT zO=KG@J^n9xt%%rO?oyKoiu%GswN80|<&{SvzO3yKtMlzqaEW-1$S~eiUwUKQ!wu?BMV`)bt3-rTDNZ9i-UuMw;^#5*^>bL z6o5aiAQ*wE;dZi=EU++C1x|!3NHQc29-&|fv)81IF{+{6Bg=I~3f-g$FfeTRW^wFu zy*K9o$+Nv+KUX>3L)WuJ;edQM!l@33&`^IUbWMEOr=>vIoI_cw%@ zs_BWax7`j}R^#V-%C#3t#PkliCMLLaD3$id2ce+)c zCGe!DOpJ{|0}Ml$baNIlE)5vJ%RmBSm{cG$W1`~f^tDfG$*n1 z?s9quEyKrX%5t!CHIkHwRmn3i{Cy(2*Q{A{hvS;G($f9Wjw^m1hGh?Kdcu{`jKOT8 zQ1r+=$xF6m>9C}eg{f?^v7hay+h8Nr5k}gK>4$RY^PxA~eW(uTg|a5Ey@ug}!mHkA zG!?BFtQdC_*Rn~Mg)jVX zV_-|BmD4_Qu7gtfRP$<7&y?cBeQGHlKg~^3&z+0=K3n@dj2wzruWDdu^1b8&IfWq`NQ+v{uiu2*O{p zYl1U-LGHz9Q5&)j8X6ktI&?ZkH^Ug=OPe44T>S;!@MXcWaw#VpM#Jd1)qBRT%UcCxs+>7Gy zs*NhbsV4=4dmp>2l$Auls03uv!$qzL1Y~orZ%t4Z=vKb~N(#loV@bkt`^QjimuG=mSpe!{+S^wDZA~9$NBJl&yRSR^==UqAtdbVRUg`!1s3^`tHJB2oWO0NkwpMGP$#< zzBzIcj^FFiJUAZ-f>yl<+eLxAd*HeX?vE%to(p(g8QUefFKZ5cV{owhXiLjEXDiNE z3t?P|2cAh3rBl$sJ|@>Tw@(jAf_fgpKHfxE<>9O}AL+aE%zaS7Lx@9!2vJ*UJts_v6N9Yj<~QVP(Ir79vE5nq&0rrwg8oMCTk|5<&V%c<*df>yWAVePnj)VbIbH;(TPMlY8tO_pdA7BP{0J!`Wet0+o zBY!@=W$94}5hBF9iptX^KmEIwdS;!&x%e#V8&@GI%+$Z=u1NztZVJnZlsOlJWAvO0 zojVF8trIL4^ux|#1XYg?B#nDVVB~)Ww=O*xx*s7zoEoT{FdR%q;>Za2I1cPv(QSA? zx(yNLj%OZVVfcPbxuDFkP{_fYHpMb* zwP2Ck6gSRd%8G)+7kR?q9%0n!$|04UP$~qiGU>yp%1;by7LP)U>I7(HN`#?m!wXcyHnK_fCE2eD)hAa&9lGd4QW zN9;@h<6<%bu!qK*IMeA);Gv7uO!nPL$>X} zA#Noo$!hQf5U@zn`AzTV9pqUQ(^MO}8Iv#B(*9Y{@U2``%c68(g;8A{qeY8&pRJ3g zBe1GFfCK|Br1=w0W0&~ioBUx(Q^=Gqs=IMK|r>hGH@0a%I7qT*S#YsaMea@lVN@1Dw_zFrbuNo<+a=XbSPe zO2}q1zJ2nlcJh5?8LfP^iw=a;)DkW0EhaO33NC)G-X^%9k76rm`t%O~=+706zGlrD zBODGN27nwRgpinUPl?51FX5*R~Y zHtye9d&1oDm_lyHpiU*3be%b0K{9d;eezr*9S8~JFsht!6qe@WQ{`Msied7(lSJ2D z)bL6>eU7BCu#nB1IrCJX&-WzsrW7D)#P_ug4Gjwy?c-!Is0BIv&bLEWN5z+c%FDRn zJ535}yjD-4`eP1d>$*%5ldD#FrPyTj!fkrjj>%tR`HhW5SsMrpRn>#LelLJgel+tGThjsD2_}XgB5#|);_hIrrjuM$MRhi3r29gLhdez0gXyB^1@g? zz-pmOTCakQgs!BVJ0QW5Fxn~0a zM6ywJpH;-##UK@%@(R@aMC5ovZn$={#@flWkW8ZGG#&rn7jC1%C|E$l8?U;0rHBc$=n#~=ygW8{?%YZ! z_qUiv|R%V#T zkY$CSCER1CBox*Q#GU+~bc1W&PL};j%xB-x6KQW~`QiAs0lP-#98Tf-ni{$B-P3#C z;(jp~(E2u7Ih?tQrrLEt=zx*ogaIH^a$^iL*yY?uXGbl%6MNiE=?If2V(x&42TRk$9vVrKA0J5)n9_Fqd@@4sSYXn^Q0C>#A3B*3YCSAYEXti~8od6G4i zid!!tGa8~pQ2c&B!q8YT6#X11_heWDN>^Ytpe}+b!E(82nzKFkJ&iVc(1v2b1_p>B z?O&1fO_s4}f|6CBz>Q)$@FsU1|9t*VDPl{dMZaw7Eqnz4NQs`UpF*-&c5Kg<0mLUh zC@w3N()J~2V0F7iUrJa-$%1?UWCcd?lZlT-o?m@MXg#GWQeSbU?7n@B+q?$z(`W`k zo6Lb0_$VFOy?x;l1-INn^u$tu^(N}8M|oriMmbiEW#T0FoAmbfD&_0~NIt}ORLXrH ze!5mj33M<53)JMT_XCKqshY_cB`d4v{zw&wI|zy3Y}SZa^!J$P9^U$+oYWJ1bqmeX zf?d@2 zYirjc;J&`KwKYW(pLq!eg96qo;;r`^h$I_2%Uw)3iW)cal7MXX8T{NApc2nC#me5M zR4a3lbuSO?8qI_JY{rd?n9WXu-ehYRf_t&LbjOfTH7D#z%qCKSMCrN}p|nJZ56R?r zLy6ps3|uEhwP>&`X|Hzt{s(-2rj}onm}@ z{J5%`Jo58Pu_;YnLoJy_g2vf9`rbkr2H^5(bI72&c@{m{n+1z9 z3uCy7Sj^Oy(O~OnoN*&SvIijUe{o$Q6<2PjgGl-#RVY9Rmz_#33-Q^Q=@(nJyi-QI z-rDiAiJhsM@#4DerJ?K{7f90p!@#E@Yn~@bCR`}VoG&R7)GuNiwa#qW$K7JCVM%-X zYk1~kqUk@4ua{N(DX(P=EXr&r@KNn6OF)07X!i5e*q9pd%ff!j$@(55!wci>AghCx zUZzg@-?X_Ij8X?iT!kO&6QE;OfTaH@^f}v|5^Mk|XNcaCKvE>(5BH5{nCZhs%O|VE z*{{3VqF_ZBM;(VB-QXP)f{%{C!a)U)WT~sGqm3Ik=3pkDBvggQFbn#yKoYFJ`|i6{ z?A%AE`&$qdhjr zXlvlkxp9l-0Me4ScppQr!ahYTZc`-D-utFd&P53YZ2~`@Tk{w6H9N5jgPTN?sH+P} zrbVfhyTra3BG!2%#m`C)vI7Wyhc-`XO=R4c4KyU)hXhOK)c6sYIaasqMiCfT&!vbB z3xYVsv|}{xlb!HMF*8dKAfSUqB^KVKgH-Oj?>;tb)~qTh?wyDSiZT(8>`Bl^TSrI7 zkETzbzIgY%Nq-BXi5qfeaaSLiauIC1hugF{RdAI3d{}BhnLB%Ko@>dK%F%|{xx=Q4n3Yzr+ zYNpN5GL&88zk%BKgeY!zS z;uPXWS(1Adg}3wug#w@L@mRZ;6H>p91&P<*r06*+<k_iE1Fh5%s?~j!gE0?v3j%=-$|9a-Tt2m18Npx|(=)II*Yj zTr0H7&7DyGl@#mwrI3ZMG~IyN?s{n1J!{cfF@7=}tL9P7(j^TtVn7rpK?{hB88|gZ z$rzG=0pl!kT#!A(u%S4Yve|U(mJ@G}5EsOJOX!o>p0n99sAh=1{!`1<*U4Xet1p$~d6o8~JoeaQ{kHXYv25uOIPSxM8YRM=<-{HUa^xK)WSHNF=Fh|# zo66XTo)~p~x|WRWEaFv1V4~%Yb>dprBTQwMh4sf6>siorefh?IZCU_>DP3^7bg!a; zIb}|`d<#-IXB8Mu+aw>OVRT2;o#nY3hs=?0iwWh%PqI?^78VjsXe*I_acb04I}odq6?o62`psp@$w)6V#rAbj@OU&VHZL zbamVw?zx{$!sK+kzUiJQHwt-+-lnY-8eKs$TM;UTg5W>kPW=HtKa&+GYTWd1Y{0T) zZOt^RCf(UkH7S$4nG8rYdnX0DpFrEk=KVKrq3f>yFcV4n_o`%2$i8>l6~)EnztTnRbJujmx^opVgJL z55UYuQLJxC%IejtSvVXn$83HjR)@FNtgj%2YrGGTsPYaC5LI?EXU?1|xK{rW2n6zn zJ4i?S9r*fRuxJ0kX9yNcy0a+XJ&1N75f$y!O{=!bi^|rM9a7L5Q`? z)swLg_7|@2s4kFLOVal;7_gyTC7}!0vJ%+Rk{&?J9!wj)-K-7^knPU8#c<;rgNTW! z%S~r7H~SjI!Tl&@C);(s!OF;ztn5g@p+VP=k!`d0N*ZTMR6ZM*Qo3ssQ58A|&o zzH@09MU`!#j|lqO0Gd|-Bz=A#z=DW4d*y(`b+t5eGPsMCJK<>AyJZvgZ2FW$tk>-v z6Kzfo`3p;)GkeVMH~p+^Bc&<21m24~bb!v|6miBx0bugXKarhv5e9v#8;HPFWS0^T zF9YNAOoH)&_3*7a0b{ugD?w+FDsyfvq3o_Fz~rT-Sq9=XX$Jb92v5CC{rvMKy>Hq5 z_-@I3Na#APPMHEC``sAj_}XwvYW!|37=S6QKD1_ATbrCef4-_gdYlfhAQn(x3NSyU z11$Z0CY8B%OUVwR>|`K zq#v*f(?KdK4o0wSHZ?U(fcsM=rOfor_C%G%+i)vCZD?p%hdf9>6EI+@iBmx6m3_Ez z2+@%AsyJC+)G|oXV7h?pR|?nvUjIeEXIa`s$UOUxB9}{FbN-llReQ* z9;!Iu63PnY0qBSx*{nq2pmJ~IOJw_g2@8`>P^3;O8z&M>%A~8=Xm8%cvD?gri|w&k zTC#DuWH<6tmP=2n3yGP_8mjK=fXTdh^JvA26``V{qOU=LZ{#Y1_a3*%0Hc-M4whp8 z;e(ZxmG@z_b@zP-07w#Je?22pm{CQISYr6cc?FCAVQar?2Tx`z>v<##DVJO zM20JswdNUQ*DwAl&fz!cvBcFs<*(1i3~yjC-ZfK$S`OvApVt0veR`cJO9TaFMsko2 zMYh{0ec5exMNUR0C9GJnGPwx;6|3PX7vdjb?AKgJx&q2s#qS~9`(o)_b4zGHKP?$> ze-~`ZdZ?I8x&9$okpY!-;94y8?L8jO-Iu_*uMizf;GZDwbST+AW7Bx?xZ*TL69(Mv zRwk9it9U5WK}cNSe5`q$+Q$DBAiNFht}Bw1t#+Os+XS8aIkmF)i74G0r}Q{X1`8%_ z=*4;}^Vh!UZZxczmpdI`K)2Rn2r=ZjkQoL#pZ*?p=*>uss)&EK=Qq|1Q0 zF_6lE^ri=p9xJFHO3~vX*X8pv-LPsFA1=2PvTT_})m6{oB#y$^uj#M{v^7^tgL|HCr8J%5k;@oLO0tGbcYw31GP>t}JshJDmX z2Q5H6fdbq3tlzLnDy8rO0Y*J+@+~>`HF!}vwG4` z#T&|6c&wAU`L@eCr4yysNgipDQ*nHeK2|1;#aoUIQ=MJW`8h}&9tq{02XM?D&^V~s zm&tRH(fSS@QFAm+JU>l;zFEya`MX{IP6XY<@FH(@?e!CK$t;$d0Rt3hN$(Nh11K!R z*-OS!3ycR2(wP&GC#oQHY?8~kaYc@_cZsV$e6j1-Q?@xYmn(`(;Z8!&dj^rg6_#A4 zQ_;8U1zrKChVvuAP~B$Qv2(MpTK-JnyGvf$b?=ws z)TFH`SU0ifB^k+eB54SzlV(w*@nnO^3!yJxW0IA_D)}60mnB#=Rqh_I58FpkCEpv1 zCqqxCp@V5S>1pVr0^e?(Qk+ii25?@3a{*c*7#{ved;--sGjY5(M2*q6xk4_TsG`a z>t#xyhl-BXPaE#5+I4{3w8`f zI;P@#6RaKQqsf&;WE&M^$S=}o?0E9=FJYjgNxhlgE5(S5?zViLmJlt zte4_!i{mGzt6$}DP{gVDo&nRjo+k1YBx;tD$yZV=Ya8YHT4~us4zr*hi8Ui7P8S|k zh$LaabljjW(1AIa36h-vHRDSzcT4Qs~46chMUtsj>%FdrCvj{ zjSkF=sDOzALmLWU-Fo~`|#S0bdUUT+eSFdsv&(qjhi!mQuh~qAT!XaLf zc@A|-PT`uK@olwUh`tZUKBF)RJVC0)XmdS+=4o@NtS~Wcj0#z_>IDZN>rU>XNgpgH z@~N`RUu5OY^OsVuu|U5qMa3Sv^d-&6i)mL%?8A#L4yd@y zmCB;q_`DR0#a0t#gqG%Lx&pb7E4-xPU^@={g04lUcSfl0>A=j>mvb&=5%bM~aw%3P zK*F;il$6~T@z(=MmjTnKp}hcw8ewuC9G9Ov@f?`^YjF5KNmaH%mNVJLUl`}KspB5$ zy}pVXkQ7`fWMF=Fj#%1QH-0bwxQ>P6%*`01vIy<^JypcELT^8TB7cM|=PJr7`YawN zq$;fdN-MpLi2qd@Q!s`CK2XGnZ={5ei`a%C8_!r4K24ZSQg8di7q(IL%sY^uT!IU7 zI<93YITgk!YP-He&ilE@NUsR;Adu$fX1k)IVmtKeBUKK9O^>vP9YyfYklmkD8H}s5j+~0lv(e2ef9+(Mhaw+c5WGJ=C+;`^=_*bMnT2_5? zjgn8uFj~NlRs%eR@q6sO_QxxySIg>;eR7pOXP?2>e+4U#N2&LN_PeI;+g}W-o>@+| zzZ%vIKoJ0l)lT^(I{}E{_#>joZGR<;ibDky4Hm#MWr0E#WOuKk)i3OYvVyrPi{w%u zl>QqBgI0`6TiOn_zZrr=o>5RxP#B3s)J!kK;qaz=?zv|NWBdL}RmFsD+qP{~XJ_a5 zP$&e)#Mjou?AX#648K!cndSQ_k0VMwi`sK~8UrgOj#12g#VMGpjNjsa62cSd^Itz+? zJ@O)7f+_dKLCpuSAS@1&{@Yv&XGUS92#3!=ndlQLA5Rtw_TCr!Ke!5xntznVs5 zL=FH;i<1vsxOQl zkrCNXvd;j~GHpR9tElu$#3ok&2&aLVD^d@SdnF+Ww@bY&L@~mw6&djlY}=~$`-0E! z`t{V!BXP!v9E_3~fM`Jge)d~sSpj*R^qXI2Qk;*(URYN(sE!Oxz`$yl??_=kNq+u% z=ce|xpp-vXf01=W4lT(bKwPO?Xa-mM%elqQME3kTVB%P&9#-eH1vcWD7_;s#hy*t^ zz4)c!pjSs^L^3Lw2Z$%gp9Ctp$HAex0^ZhmLmCeS$@=?_7fx0W7>&47BXUT{{{wB$ V9l%)Y#!LVJ002ovPDHLkV1ij9WcdI9 diff --git a/api/core/model_runtime/model_providers/wenxin/_assets/icon_s_en.png b/api/core/model_runtime/model_providers/wenxin/_assets/icon_s_en.png deleted file mode 100644 index 923919958a156a0a470ec0bcd228226b70500217..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 3350 zcmV+x4e9cUP)P#o}7F8c2CbTvpc)2w`z8} zd%EvE-#OoT+|!FN&SU-qG{LX+&Jix&frzs~z{9R5HZ<&k_E$3K!8i|uaT*Ozwxz4{ z6K4und=C(pvG7f1b|D53)D}Mv4?B+Bv3oakKn4|z(>Q(r8FEgTlRL=;yo3c`L8Q(> zArMHJ*-=OU$p;D!^4-r8s2BQsowr(V!G0KrF?Ik%*t~9F771}Be^v;fFY#=3Kt7sj zh5oC*gC$T3Z4IA~_JZ=C2jM)m$bnrOm!l8IF~$ZULt^%{zROYIEv)VH8O;<{G0Tg= zgoyT|Er1XKvey|*7az1JpbdRe{IP90w!;|5r~%Brujdq!>bpXUl`P0aQYryf-rx@y zt)m5C1dbg?=Msr6(gVJFlTX%qIoP)EM$Ez}M@0bUWGI!bJKC5a??#Z$_24)sfbR$5 zMp}lL)3J_NErgas0}h~wxB{fP9EPyRhA>F)Y)Ip(0kF#-U>X?J}-&2jM}DdbHo z`~^&vBu5piQ4uIz#Hgc%6w(*c=^=zH`^qG~oM5L$umI2k2rZ`lgFzFU;Uk$vP4}RI?n@i z7ZZ8&&3!|khl0gG13}EPUbZX3$H0SaE!V&yzNjh&FmvtpDz9hOxk5PC3fH}mlsC^T zptY`bC#_S)om5Ibm4oQOTYerNM@PQ8zdrR};0V+FNJpI#Rg3tpn^*};__4_*#6rw( zV&7&hp^YVu@Y&lS{0$;2U;OBLJTd|R!bzD!vl5_I3MX+b(_|k1oYIR$+zojQb}=0q zrAy4;7XD?uf`R91I~#U?m>FVv9kr$$oh_27#enKM6q<^Z43!co!stZ=&oB_4-{W}? za_-ooeDPQ^0dEhbRRmz_o%=7467NFkEMlRncmaV$n!!R8(GRfmv9`LI5t}{Nzc)M3 z-O~Bfd0jBfShkGhTNh_%iv)a!^Trw`)6CE%3bYahdKv$c#8&tCLL2$8wq+&$wj_YY zpY8pSwRs9Bq9NKYa2*VlkkC!^66JT0lD}^qoU)lqgUZ4mTVm?e_Ud#Zxe&>{lPA8M z*K;wJH;VitCWBEJQIGI)U;cPi)!uMQCp=I-EI`V~Vm8?DXKcX_8@mp0RN1Zaxjzx4 zA8os{w(OghT1&aU*AL%wQZHD0 zj5MZZK5wi_AHIql`3>gwd7O3Yj9Nmf5C&hQ3e;h$=yUY>PE-XtTf+=3oC+3T0AZL8 z3$Q>FG@umxx&4zjUWSd!D=NI^-%81B)BjEfNk0IYxB!%z!FeF6G>kTFu!$@lD9j^Y z?PP5>^F8$#*@OaEp^lMD0dFcCDt|33GRQRUfbj=zwM`J3||J<$5Yx_3|g zCAfW4do4l=XR;da@8AFFgGYDnXg;#_W$4-U z9{7E|(OhOeWd-1HLFgRjgI6J+U8C|7*0kO>aVJYixydb>=?I}bp=_Z%#RiB`$k*mk zhW}7@}cKPBc6q2o|^wkp#L;nPx3?$EH)XC_YrU}RG z!PYjYnm!9CUBgpJt)lmsE`Q;lH&rL=_Z|2?4wjlyYZ{IADOlQ2j=~bEG=RWNV7G#C zCkuR|>_FtQ1FE0|)0F42UUUdlU2K>qg$GZ2O8mP$qe(u@;IKy+6p&%XYNDiBuN(lM zVqq={(}qe4%I7TE(1fb97btt|9$nVdxvi52H^Yyrq(UT9&8Kg!!k_;K2isJ+KE6L_74Q|!&McTxCq^3S!E*Cy zE&6D7k`(Phf?6$)vK)Yz$A@9draoT{6GlBSF*`_jLFm%+7$M{s0qB4tZQ_a)X>&rc z{6lIXh-3)%00A-p98P`@qzd=U0FlEOJYg&zEdm`-gk6?mCATi8hpmNz1V&*D4R#wk z7nE0}4?!7j7r?M+16wW+jt)T9OPNiHkq0cL!$88U@I>)^1;H#p!)N=ke&H}37Q%J^ zN|#(w@02v(wYKdu#`HSxnou|jFXqk9KqoEa=j+Y06q6-9&U>W+P(fgGb5uYnH33Pk zswIC#`i2i0ZL%l~y@gg}OZIS~SZ1$mVEr8*)lSPDxQwZPJ2R+JDj(BN z=2q#qm?Bqj%U}ju)eS-?7NTOjy;BMR*!>LHof(PP{#5BcC{5uK_)4zu&+)nvmsel6 z^TGc0XS}21lZdKD3etU86pm0dG^}o!rg}fUiIug2Bi4!JsT4$|Ls$t1=lUH3ml&Mn zr78_T;Wo}lmvR{yC6KTdGS)ERDWBkt|4eY?)x2{(zjoET>qzB}66iv-&~2k|qbSNm zup9QKrcU&hatm?|$IG+1UaE;|b`dzgv7*3Ob40SDy!L!oc7I6#KGJIGXMe#j9+vR! z6Seb#&B&n;L2lq{IX}sin$MqKq0cRH^3sGsTO|B8MWA-XSlpiUlRb5!C%=GX?#GJ6 z*LVv#0sO3~oydn+$fW{}S(V($8(@m>B@15Hg2T-h^i?-HO)w)Y~zMENnXRyF^ z#&}_y9EbG%!=@}kq0bpMdxJ*rqj|6Y=Y4(qIwnmz`6PamvrI~7B|q4#Rc1;SDDXU5 z$5_wNEBEsnpRyyb&ns_3=iSW*U`R)#gn=_6x%A?B{F;kfA?`Q%{zW`*vPejTV>;8( zo)_$9$N&&euwvJ-gDzl!Zf4=W!oRsemm3SOpF&3!Rwtp>gP&age8)!mp=9Bd~7<|9`zJ(ny%osUMUh&UKRp_5AxU0tgaSaRE5OoNOXoZCr z9U&zIzrf=Sv8ijzT92@}}2h(8R%;A?&NFZ>)TXrNV`=j#|qPQsk3@U6Ce=z{eUtl0 z&p^Fd7I2VtyeW^~Lz4$m2U=e|Yh3r&vobNKni gx|wxcbSX9d4@ZukA*`%*ZvX%Q07*qoM6N<$g0pl#g8%>k diff --git a/api/core/model_runtime/model_providers/wenxin/_common.py b/api/core/model_runtime/model_providers/wenxin/_common.py deleted file mode 100644 index d72d1bd83a7e25..00000000000000 --- a/api/core/model_runtime/model_providers/wenxin/_common.py +++ /dev/null @@ -1,194 +0,0 @@ -from datetime import datetime, timedelta -from threading import Lock - -from requests import post - -from core.model_runtime.model_providers.wenxin.wenxin_errors import ( - BadRequestError, - InternalServerError, - InvalidAPIKeyError, - InvalidAuthenticationError, - RateLimitReachedError, -) - -baidu_access_tokens: dict[str, "BaiduAccessToken"] = {} -baidu_access_tokens_lock = Lock() - - -class BaiduAccessToken: - api_key: str - access_token: str - expires: datetime - - def __init__(self, api_key: str) -> None: - self.api_key = api_key - self.access_token = "" - self.expires = datetime.now() + timedelta(days=3) - - @staticmethod - def _get_access_token(api_key: str, secret_key: str) -> str: - """ - request access token from Baidu - """ - try: - response = post( - url=f"https://aip.baidubce.com/oauth/2.0/token?grant_type=client_credentials&client_id={api_key}&client_secret={secret_key}", - headers={"Content-Type": "application/json", "Accept": "application/json"}, - ) - except Exception as e: - raise InvalidAuthenticationError(f"Failed to get access token from Baidu: {e}") - - resp = response.json() - if "error" in resp: - if resp["error"] == "invalid_client": - raise InvalidAPIKeyError(f'Invalid API key or secret key: {resp["error_description"]}') - elif resp["error"] == "unknown_error": - raise InternalServerError(f'Internal server error: {resp["error_description"]}') - elif resp["error"] == "invalid_request": - raise BadRequestError(f'Bad request: {resp["error_description"]}') - elif resp["error"] == "rate_limit_exceeded": - raise RateLimitReachedError(f'Rate limit reached: {resp["error_description"]}') - else: - raise Exception(f'Unknown error: {resp["error_description"]}') - - return resp["access_token"] - - @staticmethod - def get_access_token(api_key: str, secret_key: str) -> "BaiduAccessToken": - """ - LLM from Baidu requires access token to invoke the API. - however, we have api_key and secret_key, and access token is valid for 30 days. - so we can cache the access token for 3 days. (avoid memory leak) - - it may be more efficient to use a ticker to refresh access token, but it will cause - more complexity, so we just refresh access tokens when get_access_token is called. - """ - - # loop up cache, remove expired access token - baidu_access_tokens_lock.acquire() - now = datetime.now() - for key in list(baidu_access_tokens.keys()): - token = baidu_access_tokens[key] - if token.expires < now: - baidu_access_tokens.pop(key) - - if api_key not in baidu_access_tokens: - # if access token not in cache, request it - token = BaiduAccessToken(api_key) - baidu_access_tokens[api_key] = token - try: - # try to get access token - token_str = BaiduAccessToken._get_access_token(api_key, secret_key) - finally: - # release it to enhance performance - # btw, _get_access_token will raise exception if failed, release lock here to avoid deadlock - baidu_access_tokens_lock.release() - token.access_token = token_str - token.expires = now + timedelta(days=3) - return token - else: - # if access token in cache, return it - token = baidu_access_tokens[api_key] - baidu_access_tokens_lock.release() - return token - - -class _CommonWenxin: - api_bases = { - "ernie-bot": "https://aip.baidubce.com/rpc/2.0/ai_custom/v1/wenxinworkshop/chat/ernie-3.5-4k-0205", - "ernie-bot-4": "https://aip.baidubce.com/rpc/2.0/ai_custom/v1/wenxinworkshop/chat/completions_pro", - "ernie-bot-8k": "https://aip.baidubce.com/rpc/2.0/ai_custom/v1/wenxinworkshop/chat/completions", - "ernie-bot-turbo": "https://aip.baidubce.com/rpc/2.0/ai_custom/v1/wenxinworkshop/chat/eb-instant", - "ernie-3.5-8k": "https://aip.baidubce.com/rpc/2.0/ai_custom/v1/wenxinworkshop/chat/completions", - "ernie-3.5-8k-0205": "https://aip.baidubce.com/rpc/2.0/ai_custom/v1/wenxinworkshop/chat/ernie-3.5-8k-0205", - "ernie-3.5-8k-1222": "https://aip.baidubce.com/rpc/2.0/ai_custom/v1/wenxinworkshop/chat/ernie-3.5-8k-1222", - "ernie-3.5-4k-0205": "https://aip.baidubce.com/rpc/2.0/ai_custom/v1/wenxinworkshop/chat/ernie-3.5-4k-0205", - "ernie-3.5-128k": "https://aip.baidubce.com/rpc/2.0/ai_custom/v1/wenxinworkshop/chat/ernie-3.5-128k", - "ernie-4.0-8k": "https://aip.baidubce.com/rpc/2.0/ai_custom/v1/wenxinworkshop/chat/completions_pro", - "ernie-4.0-8k-latest": "https://aip.baidubce.com/rpc/2.0/ai_custom/v1/wenxinworkshop/chat/completions_pro", - "ernie-speed-8k": "https://aip.baidubce.com/rpc/2.0/ai_custom/v1/wenxinworkshop/chat/ernie_speed", - "ernie-speed-128k": "https://aip.baidubce.com/rpc/2.0/ai_custom/v1/wenxinworkshop/chat/ernie-speed-128k", - "ernie-speed-appbuilder": "https://aip.baidubce.com/rpc/2.0/ai_custom/v1/wenxinworkshop/chat/ai_apaas", - "ernie-lite-8k-0922": "https://aip.baidubce.com/rpc/2.0/ai_custom/v1/wenxinworkshop/chat/eb-instant", - "ernie-lite-8k-0308": "https://aip.baidubce.com/rpc/2.0/ai_custom/v1/wenxinworkshop/chat/ernie-lite-8k", - "ernie-character-8k": "https://aip.baidubce.com/rpc/2.0/ai_custom/v1/wenxinworkshop/chat/ernie-char-8k", - "ernie-character-8k-0321": "https://aip.baidubce.com/rpc/2.0/ai_custom/v1/wenxinworkshop/chat/ernie-char-8k", - "ernie-4.0-turbo-8k": "https://aip.baidubce.com/rpc/2.0/ai_custom/v1/wenxinworkshop/chat/ernie-4.0-turbo-8k", - "ernie-4.0-turbo-8k-preview": "https://aip.baidubce.com/rpc/2.0/ai_custom/v1/wenxinworkshop/chat/ernie-4.0-turbo-8k-preview", - "yi_34b_chat": "https://aip.baidubce.com/rpc/2.0/ai_custom/v1/wenxinworkshop/chat/yi_34b_chat", - "embedding-v1": "https://aip.baidubce.com/rpc/2.0/ai_custom/v1/wenxinworkshop/embeddings/embedding-v1", - "bge-large-en": "https://aip.baidubce.com/rpc/2.0/ai_custom/v1/wenxinworkshop/embeddings/bge_large_en", - "bge-large-zh": "https://aip.baidubce.com/rpc/2.0/ai_custom/v1/wenxinworkshop/embeddings/bge_large_zh", - "tao-8k": "https://aip.baidubce.com/rpc/2.0/ai_custom/v1/wenxinworkshop/embeddings/tao_8k", - } - - function_calling_supports = [ - "ernie-bot", - "ernie-bot-8k", - "ernie-3.5-8k", - "ernie-3.5-8k-0205", - "ernie-3.5-8k-1222", - "ernie-3.5-4k-0205", - "ernie-3.5-128k", - "ernie-4.0-8k", - "ernie-4.0-turbo-8k", - "ernie-4.0-turbo-8k-preview", - "yi_34b_chat", - ] - - api_key: str = "" - secret_key: str = "" - - def __init__(self, api_key: str, secret_key: str): - self.api_key = api_key - self.secret_key = secret_key - - @staticmethod - def _to_credential_kwargs(credentials: dict) -> dict: - credentials_kwargs = {"api_key": credentials["api_key"], "secret_key": credentials["secret_key"]} - return credentials_kwargs - - def _handle_error(self, code: int, msg: str): - error_map = { - 1: InternalServerError, - 2: InternalServerError, - 3: BadRequestError, - 4: RateLimitReachedError, - 6: InvalidAuthenticationError, - 13: InvalidAPIKeyError, - 14: InvalidAPIKeyError, - 15: InvalidAPIKeyError, - 17: RateLimitReachedError, - 18: RateLimitReachedError, - 19: RateLimitReachedError, - 100: InvalidAPIKeyError, - 111: InvalidAPIKeyError, - 200: InternalServerError, - 336000: InternalServerError, - 336001: BadRequestError, - 336002: BadRequestError, - 336003: BadRequestError, - 336004: InvalidAuthenticationError, - 336005: InvalidAPIKeyError, - 336006: BadRequestError, - 336007: BadRequestError, - 336008: BadRequestError, - 336100: InternalServerError, - 336101: BadRequestError, - 336102: BadRequestError, - 336103: BadRequestError, - 336104: BadRequestError, - 336105: BadRequestError, - 336200: InternalServerError, - 336303: BadRequestError, - 337006: BadRequestError, - } - - if code in error_map: - raise error_map[code](msg) - else: - raise InternalServerError(f"Unknown error: {msg}") - - def _get_access_token(self) -> str: - token = BaiduAccessToken.get_access_token(self.api_key, self.secret_key) - return token.access_token diff --git a/api/core/model_runtime/model_providers/wenxin/llm/__init__.py b/api/core/model_runtime/model_providers/wenxin/llm/__init__.py deleted file mode 100644 index e69de29bb2d1d6..00000000000000 diff --git a/api/core/model_runtime/model_providers/wenxin/llm/ernie-3.5-128k.yaml b/api/core/model_runtime/model_providers/wenxin/llm/ernie-3.5-128k.yaml deleted file mode 100644 index b1b1ba1f69a0fb..00000000000000 --- a/api/core/model_runtime/model_providers/wenxin/llm/ernie-3.5-128k.yaml +++ /dev/null @@ -1,37 +0,0 @@ -model: ernie-3.5-128k -label: - en_US: Ernie-3.5-128K -model_type: llm -features: - - agent-thought -model_properties: - mode: chat - context_size: 128000 -parameter_rules: - - name: temperature - use_template: temperature - min: 0.1 - max: 1.0 - default: 0.8 - - name: top_p - use_template: top_p - - name: max_tokens - use_template: max_tokens - default: 4096 - min: 2 - max: 4096 - - name: presence_penalty - use_template: presence_penalty - - name: frequency_penalty - use_template: frequency_penalty - - name: response_format - use_template: response_format - - name: disable_search - label: - zh_Hans: 禁用搜索 - en_US: Disable Search - type: boolean - help: - zh_Hans: 禁用模型自行进行外部搜索。 - en_US: Disable the model to perform external search. - required: false diff --git a/api/core/model_runtime/model_providers/wenxin/llm/ernie-3.5-4k-0205.yaml b/api/core/model_runtime/model_providers/wenxin/llm/ernie-3.5-4k-0205.yaml deleted file mode 100644 index 1e8cf96440e71e..00000000000000 --- a/api/core/model_runtime/model_providers/wenxin/llm/ernie-3.5-4k-0205.yaml +++ /dev/null @@ -1,38 +0,0 @@ -model: ernie-3.5-4k-0205 -label: - en_US: Ernie-3.5-4k-0205 -model_type: llm -features: - - agent-thought -model_properties: - mode: chat - context_size: 4096 -parameter_rules: - - name: temperature - use_template: temperature - min: 0.1 - max: 1.0 - default: 0.8 - - name: top_p - use_template: top_p - - name: max_tokens - use_template: max_tokens - default: 1024 - min: 2 - max: 2048 - - name: presence_penalty - use_template: presence_penalty - - name: frequency_penalty - use_template: frequency_penalty - - name: response_format - use_template: response_format - - name: disable_search - label: - zh_Hans: 禁用搜索 - en_US: Disable Search - type: boolean - help: - zh_Hans: 禁用模型自行进行外部搜索。 - en_US: Disable the model to perform external search. - required: false -deprecated: true diff --git a/api/core/model_runtime/model_providers/wenxin/llm/ernie-3.5-8k-0205.yaml b/api/core/model_runtime/model_providers/wenxin/llm/ernie-3.5-8k-0205.yaml deleted file mode 100644 index b308abcb323f3d..00000000000000 --- a/api/core/model_runtime/model_providers/wenxin/llm/ernie-3.5-8k-0205.yaml +++ /dev/null @@ -1,38 +0,0 @@ -model: ernie-3.5-8k-0205 -label: - en_US: Ernie-3.5-8K-0205 -model_type: llm -features: - - agent-thought -model_properties: - mode: chat - context_size: 8192 -parameter_rules: - - name: temperature - use_template: temperature - min: 0.1 - max: 1.0 - default: 0.8 - - name: top_p - use_template: top_p - - name: max_tokens - use_template: max_tokens - default: 1024 - min: 2 - max: 2048 - - name: presence_penalty - use_template: presence_penalty - - name: frequency_penalty - use_template: frequency_penalty - - name: response_format - use_template: response_format - - name: disable_search - label: - zh_Hans: 禁用搜索 - en_US: Disable Search - type: boolean - help: - zh_Hans: 禁用模型自行进行外部搜索。 - en_US: Disable the model to perform external search. - required: false -deprecated: true diff --git a/api/core/model_runtime/model_providers/wenxin/llm/ernie-3.5-8k-1222.yaml b/api/core/model_runtime/model_providers/wenxin/llm/ernie-3.5-8k-1222.yaml deleted file mode 100644 index c43588cfe11ea7..00000000000000 --- a/api/core/model_runtime/model_providers/wenxin/llm/ernie-3.5-8k-1222.yaml +++ /dev/null @@ -1,38 +0,0 @@ -model: ernie-3.5-8k-1222 -label: - en_US: Ernie-3.5-8K-1222 -model_type: llm -features: - - agent-thought -model_properties: - mode: chat - context_size: 8192 -parameter_rules: - - name: temperature - use_template: temperature - min: 0.1 - max: 1.0 - default: 0.8 - - name: top_p - use_template: top_p - - name: max_tokens - use_template: max_tokens - default: 1024 - min: 2 - max: 2048 - - name: presence_penalty - use_template: presence_penalty - - name: frequency_penalty - use_template: frequency_penalty - - name: response_format - use_template: response_format - - name: disable_search - label: - zh_Hans: 禁用搜索 - en_US: Disable Search - type: boolean - help: - zh_Hans: 禁用模型自行进行外部搜索。 - en_US: Disable the model to perform external search. - required: false -deprecated: true diff --git a/api/core/model_runtime/model_providers/wenxin/llm/ernie-3.5-8k.yaml b/api/core/model_runtime/model_providers/wenxin/llm/ernie-3.5-8k.yaml deleted file mode 100644 index 145844a4fffe85..00000000000000 --- a/api/core/model_runtime/model_providers/wenxin/llm/ernie-3.5-8k.yaml +++ /dev/null @@ -1,40 +0,0 @@ -model: ernie-3.5-8k -label: - en_US: Ernie-3.5-8K -model_type: llm -features: - - agent-thought -model_properties: - mode: chat - context_size: 8192 -parameter_rules: - - name: temperature - use_template: temperature - min: 0.1 - max: 1.0 - default: 0.8 - - name: top_p - use_template: top_p - - name: max_tokens - use_template: max_tokens - default: 1024 - min: 2 - max: 2048 - - name: presence_penalty - use_template: presence_penalty - default: 1.0 - min: 1.0 - max: 2.0 - - name: frequency_penalty - use_template: frequency_penalty - - name: response_format - use_template: response_format - - name: disable_search - label: - zh_Hans: 禁用搜索 - en_US: Disable Search - type: boolean - help: - zh_Hans: 禁用模型自行进行外部搜索。 - en_US: Disable the model to perform external search. - required: false diff --git a/api/core/model_runtime/model_providers/wenxin/llm/ernie-4.0-8k-latest.yaml b/api/core/model_runtime/model_providers/wenxin/llm/ernie-4.0-8k-latest.yaml deleted file mode 100644 index d23ae0dc48c12f..00000000000000 --- a/api/core/model_runtime/model_providers/wenxin/llm/ernie-4.0-8k-latest.yaml +++ /dev/null @@ -1,40 +0,0 @@ -model: ernie-4.0-8k-latest -label: - en_US: Ernie-4.0-8K-Latest -model_type: llm -features: - - agent-thought -model_properties: - mode: chat - context_size: 8192 -parameter_rules: - - name: temperature - use_template: temperature - min: 0.1 - max: 1.0 - default: 0.8 - - name: top_p - use_template: top_p - - name: max_tokens - use_template: max_tokens - default: 1024 - min: 2 - max: 2048 - - name: presence_penalty - use_template: presence_penalty - default: 1.0 - min: 1.0 - max: 2.0 - - name: frequency_penalty - use_template: frequency_penalty - - name: response_format - use_template: response_format - - name: disable_search - label: - zh_Hans: 禁用搜索 - en_US: Disable Search - type: boolean - help: - zh_Hans: 禁用模型自行进行外部搜索。 - en_US: Disable the model to perform external search. - required: false diff --git a/api/core/model_runtime/model_providers/wenxin/llm/ernie-4.0-8k.yaml b/api/core/model_runtime/model_providers/wenxin/llm/ernie-4.0-8k.yaml deleted file mode 100644 index 9ebb5c8c4fe36b..00000000000000 --- a/api/core/model_runtime/model_providers/wenxin/llm/ernie-4.0-8k.yaml +++ /dev/null @@ -1,40 +0,0 @@ -model: ernie-4.0-8k -label: - en_US: Ernie-4.0-8K -model_type: llm -features: - - agent-thought -model_properties: - mode: chat - context_size: 8192 -parameter_rules: - - name: temperature - use_template: temperature - min: 0.1 - max: 1.0 - default: 0.8 - - name: top_p - use_template: top_p - - name: max_tokens - use_template: max_tokens - default: 1024 - min: 2 - max: 2048 - - name: presence_penalty - use_template: presence_penalty - default: 1.0 - min: 1.0 - max: 2.0 - - name: frequency_penalty - use_template: frequency_penalty - - name: response_format - use_template: response_format - - name: disable_search - label: - zh_Hans: 禁用搜索 - en_US: Disable Search - type: boolean - help: - zh_Hans: 禁用模型自行进行外部搜索。 - en_US: Disable the model to perform external search. - required: false diff --git a/api/core/model_runtime/model_providers/wenxin/llm/ernie-4.0-turbo-8k-preview.yaml b/api/core/model_runtime/model_providers/wenxin/llm/ernie-4.0-turbo-8k-preview.yaml deleted file mode 100644 index 16df54022021b1..00000000000000 --- a/api/core/model_runtime/model_providers/wenxin/llm/ernie-4.0-turbo-8k-preview.yaml +++ /dev/null @@ -1,40 +0,0 @@ -model: ernie-4.0-turbo-8k-preview -label: - en_US: Ernie-4.0-turbo-8k-preview -model_type: llm -features: - - agent-thought -model_properties: - mode: chat - context_size: 8192 -parameter_rules: - - name: temperature - use_template: temperature - min: 0.1 - max: 1.0 - default: 0.8 - - name: top_p - use_template: top_p - - name: max_tokens - use_template: max_tokens - default: 1024 - min: 2 - max: 2048 - - name: presence_penalty - use_template: presence_penalty - default: 1.0 - min: 1.0 - max: 2.0 - - name: frequency_penalty - use_template: frequency_penalty - - name: response_format - use_template: response_format - - name: disable_search - label: - zh_Hans: 禁用搜索 - en_US: Disable Search - type: boolean - help: - zh_Hans: 禁用模型自行进行外部搜索。 - en_US: Disable the model to perform external search. - required: false diff --git a/api/core/model_runtime/model_providers/wenxin/llm/ernie-4.0-turbo-8k.yaml b/api/core/model_runtime/model_providers/wenxin/llm/ernie-4.0-turbo-8k.yaml deleted file mode 100644 index 2887a510d05157..00000000000000 --- a/api/core/model_runtime/model_providers/wenxin/llm/ernie-4.0-turbo-8k.yaml +++ /dev/null @@ -1,40 +0,0 @@ -model: ernie-4.0-turbo-8k -label: - en_US: Ernie-4.0-turbo-8K -model_type: llm -features: - - agent-thought -model_properties: - mode: chat - context_size: 8192 -parameter_rules: - - name: temperature - use_template: temperature - min: 0.1 - max: 1.0 - default: 0.8 - - name: top_p - use_template: top_p - - name: max_tokens - use_template: max_tokens - default: 1024 - min: 2 - max: 2048 - - name: presence_penalty - use_template: presence_penalty - default: 1.0 - min: 1.0 - max: 2.0 - - name: frequency_penalty - use_template: frequency_penalty - - name: response_format - use_template: response_format - - name: disable_search - label: - zh_Hans: 禁用搜索 - en_US: Disable Search - type: boolean - help: - zh_Hans: 禁用模型自行进行外部搜索。 - en_US: Disable the model to perform external search. - required: false diff --git a/api/core/model_runtime/model_providers/wenxin/llm/ernie-bot-4.yaml b/api/core/model_runtime/model_providers/wenxin/llm/ernie-bot-4.yaml deleted file mode 100644 index f352787aecb279..00000000000000 --- a/api/core/model_runtime/model_providers/wenxin/llm/ernie-bot-4.yaml +++ /dev/null @@ -1,39 +0,0 @@ -model: ernie-bot-4 -label: - en_US: Ernie Bot 4 -model_type: llm -features: - - agent-thought -model_properties: - mode: chat - context_size: 4800 -parameter_rules: - - name: temperature - use_template: temperature - min: 0.1 - max: 1.0 - default: 0.8 - - name: top_p - use_template: top_p - - name: max_tokens - use_template: max_tokens - required: true - default: 256 - min: 1 - max: 4800 - - name: presence_penalty - use_template: presence_penalty - - name: frequency_penalty - use_template: frequency_penalty - - name: response_format - use_template: response_format - - name: disable_search - label: - zh_Hans: 禁用搜索 - en_US: Disable Search - type: boolean - help: - zh_Hans: 禁用模型自行进行外部搜索。 - en_US: Disable the model to perform external search. - required: false -deprecated: true diff --git a/api/core/model_runtime/model_providers/wenxin/llm/ernie-bot-8k.yaml b/api/core/model_runtime/model_providers/wenxin/llm/ernie-bot-8k.yaml deleted file mode 100644 index fa4b7dd800c11b..00000000000000 --- a/api/core/model_runtime/model_providers/wenxin/llm/ernie-bot-8k.yaml +++ /dev/null @@ -1,39 +0,0 @@ -model: ernie-bot-8k -label: - en_US: Ernie Bot 8k -model_type: llm -features: - - agent-thought -model_properties: - mode: chat - context_size: 8000 -parameter_rules: - - name: temperature - use_template: temperature - min: 0.1 - max: 1.0 - default: 0.8 - - name: top_p - use_template: top_p - - name: max_tokens - use_template: max_tokens - required: true - default: 1024 - min: 1 - max: 8000 - - name: presence_penalty - use_template: presence_penalty - - name: frequency_penalty - use_template: frequency_penalty - - name: response_format - use_template: response_format - - name: disable_search - label: - zh_Hans: 禁用搜索 - en_US: Disable Search - type: boolean - help: - zh_Hans: 禁用模型自行进行外部搜索。 - en_US: Disable the model to perform external search. - required: false -deprecated: true diff --git a/api/core/model_runtime/model_providers/wenxin/llm/ernie-bot-turbo.yaml b/api/core/model_runtime/model_providers/wenxin/llm/ernie-bot-turbo.yaml deleted file mode 100644 index c94aa2db88646a..00000000000000 --- a/api/core/model_runtime/model_providers/wenxin/llm/ernie-bot-turbo.yaml +++ /dev/null @@ -1,30 +0,0 @@ -model: ernie-bot-turbo -label: - en_US: Ernie Bot Turbo -model_type: llm -features: - - agent-thought -model_properties: - mode: chat - context_size: 11200 -parameter_rules: - - name: temperature - use_template: temperature - min: 0.1 - max: 1.0 - default: 0.8 - - name: top_p - use_template: top_p - - name: max_tokens - use_template: max_tokens - required: true - default: 1024 - min: 1 - max: 11200 - - name: presence_penalty - use_template: presence_penalty - - name: frequency_penalty - use_template: frequency_penalty - - name: response_format - use_template: response_format -deprecated: true diff --git a/api/core/model_runtime/model_providers/wenxin/llm/ernie-bot.yaml b/api/core/model_runtime/model_providers/wenxin/llm/ernie-bot.yaml deleted file mode 100644 index 13985b74831e0d..00000000000000 --- a/api/core/model_runtime/model_providers/wenxin/llm/ernie-bot.yaml +++ /dev/null @@ -1,39 +0,0 @@ -model: ernie-bot -label: - en_US: Ernie Bot -model_type: llm -features: - - agent-thought -model_properties: - mode: chat - context_size: 4800 -parameter_rules: - - name: temperature - use_template: temperature - min: 0.1 - max: 1.0 - default: 0.8 - - name: top_p - use_template: top_p - - name: max_tokens - use_template: max_tokens - required: true - default: 256 - min: 1 - max: 4800 - - name: presence_penalty - use_template: presence_penalty - - name: frequency_penalty - use_template: frequency_penalty - - name: disable_search - label: - zh_Hans: 禁用搜索 - en_US: Disable Search - type: boolean - help: - zh_Hans: 禁用模型自行进行外部搜索。 - en_US: Disable the model to perform external search. - required: false - - name: response_format - use_template: response_format -deprecated: true diff --git a/api/core/model_runtime/model_providers/wenxin/llm/ernie-character-8k-0321.yaml b/api/core/model_runtime/model_providers/wenxin/llm/ernie-character-8k-0321.yaml deleted file mode 100644 index 74451ff9e356e6..00000000000000 --- a/api/core/model_runtime/model_providers/wenxin/llm/ernie-character-8k-0321.yaml +++ /dev/null @@ -1,31 +0,0 @@ -model: ernie-character-8k-0321 -label: - en_US: ERNIE-Character-8K-0321 -model_type: llm -features: - - agent-thought -model_properties: - mode: chat - context_size: 8192 -parameter_rules: - - name: temperature - use_template: temperature - min: 0.1 - max: 1.0 - default: 0.95 - - name: top_p - use_template: top_p - min: 0 - max: 1.0 - default: 0.7 - - name: max_tokens - use_template: max_tokens - default: 1024 - min: 2 - max: 1024 - - name: presence_penalty - use_template: presence_penalty - default: 1.0 - min: 1.0 - max: 2.0 -deprecated: true diff --git a/api/core/model_runtime/model_providers/wenxin/llm/ernie-character-8k.yaml b/api/core/model_runtime/model_providers/wenxin/llm/ernie-character-8k.yaml deleted file mode 100644 index 4b11b3e895be9f..00000000000000 --- a/api/core/model_runtime/model_providers/wenxin/llm/ernie-character-8k.yaml +++ /dev/null @@ -1,30 +0,0 @@ -model: ernie-character-8k-0321 -label: - en_US: ERNIE-Character-8K -model_type: llm -features: - - agent-thought -model_properties: - mode: chat - context_size: 8192 -parameter_rules: - - name: temperature - use_template: temperature - min: 0.1 - max: 1.0 - default: 0.95 - - name: top_p - use_template: top_p - min: 0 - max: 1.0 - default: 0.7 - - name: max_tokens - use_template: max_tokens - default: 1024 - min: 2 - max: 1024 - - name: presence_penalty - use_template: presence_penalty - default: 1.0 - min: 1.0 - max: 2.0 diff --git a/api/core/model_runtime/model_providers/wenxin/llm/ernie-lite-8k-0308.yaml b/api/core/model_runtime/model_providers/wenxin/llm/ernie-lite-8k-0308.yaml deleted file mode 100644 index 97ecb03f87b623..00000000000000 --- a/api/core/model_runtime/model_providers/wenxin/llm/ernie-lite-8k-0308.yaml +++ /dev/null @@ -1,31 +0,0 @@ -model: ernie-lite-8k-0308 -label: - en_US: ERNIE-Lite-8K-0308 -model_type: llm -features: - - agent-thought -model_properties: - mode: chat - context_size: 8192 -parameter_rules: - - name: temperature - use_template: temperature - min: 0.1 - max: 1.0 - default: 0.95 - - name: top_p - use_template: top_p - min: 0 - max: 1.0 - default: 0.7 - - name: max_tokens - use_template: max_tokens - default: 1024 - min: 2 - max: 2048 - - name: presence_penalty - use_template: presence_penalty - default: 1.0 - min: 1.0 - max: 2.0 -deprecated: true diff --git a/api/core/model_runtime/model_providers/wenxin/llm/ernie-lite-8k-0922.yaml b/api/core/model_runtime/model_providers/wenxin/llm/ernie-lite-8k-0922.yaml deleted file mode 100644 index 7410ce51df00ef..00000000000000 --- a/api/core/model_runtime/model_providers/wenxin/llm/ernie-lite-8k-0922.yaml +++ /dev/null @@ -1,31 +0,0 @@ -model: ernie-lite-8k-0922 -label: - en_US: ERNIE-Lite-8K-0922 -model_type: llm -features: - - agent-thought -model_properties: - mode: chat - context_size: 8192 -parameter_rules: - - name: temperature - use_template: temperature - min: 0.1 - max: 1.0 - default: 0.95 - - name: top_p - use_template: top_p - min: 0 - max: 1.0 - default: 0.7 - - name: max_tokens - use_template: max_tokens - default: 1024 - min: 2 - max: 1024 - - name: presence_penalty - use_template: presence_penalty - default: 1.0 - min: 1.0 - max: 2.0 -deprecated: true diff --git a/api/core/model_runtime/model_providers/wenxin/llm/ernie-speed-128k.yaml b/api/core/model_runtime/model_providers/wenxin/llm/ernie-speed-128k.yaml deleted file mode 100644 index 331639624ccbbb..00000000000000 --- a/api/core/model_runtime/model_providers/wenxin/llm/ernie-speed-128k.yaml +++ /dev/null @@ -1,30 +0,0 @@ -model: ernie-speed-128k -label: - en_US: ERNIE-Speed-128K -model_type: llm -features: - - agent-thought -model_properties: - mode: chat - context_size: 128000 -parameter_rules: - - name: temperature - use_template: temperature - min: 0.1 - max: 1.0 - default: 0.95 - - name: top_p - use_template: top_p - min: 0 - max: 1.0 - default: 0.7 - - name: max_tokens - use_template: max_tokens - default: 4096 - min: 2 - max: 4096 - - name: presence_penalty - use_template: presence_penalty - default: 1.0 - min: 1.0 - max: 2.0 diff --git a/api/core/model_runtime/model_providers/wenxin/llm/ernie-speed-8k.yaml b/api/core/model_runtime/model_providers/wenxin/llm/ernie-speed-8k.yaml deleted file mode 100644 index 304c6d1f7ed8de..00000000000000 --- a/api/core/model_runtime/model_providers/wenxin/llm/ernie-speed-8k.yaml +++ /dev/null @@ -1,30 +0,0 @@ -model: ernie-speed-8k -label: - en_US: ERNIE-Speed-8K -model_type: llm -features: - - agent-thought -model_properties: - mode: chat - context_size: 8192 -parameter_rules: - - name: temperature - use_template: temperature - min: 0.1 - max: 1.0 - default: 0.95 - - name: top_p - use_template: top_p - min: 0 - max: 1.0 - default: 0.7 - - name: max_tokens - use_template: max_tokens - default: 1024 - min: 2 - max: 2048 - - name: presence_penalty - use_template: presence_penalty - default: 1.0 - min: 1.0 - max: 2.0 diff --git a/api/core/model_runtime/model_providers/wenxin/llm/ernie-speed-appbuilder.yaml b/api/core/model_runtime/model_providers/wenxin/llm/ernie-speed-appbuilder.yaml deleted file mode 100644 index c254ae02604c7b..00000000000000 --- a/api/core/model_runtime/model_providers/wenxin/llm/ernie-speed-appbuilder.yaml +++ /dev/null @@ -1,25 +0,0 @@ -model: ernie-speed-appbuilder -label: - en_US: ERNIE-Speed-AppBuilder -model_type: llm -features: - - agent-thought -model_properties: - mode: chat - context_size: 8192 -parameter_rules: - - name: temperature - use_template: temperature - min: 0.1 - max: 1.0 - default: 0.95 - - name: top_p - use_template: top_p - min: 0 - max: 1.0 - default: 0.7 - - name: presence_penalty - use_template: presence_penalty - default: 1.0 - min: 1.0 - max: 2.0 diff --git a/api/core/model_runtime/model_providers/wenxin/llm/ernie_bot.py b/api/core/model_runtime/model_providers/wenxin/llm/ernie_bot.py deleted file mode 100644 index 07b970f8104c8f..00000000000000 --- a/api/core/model_runtime/model_providers/wenxin/llm/ernie_bot.py +++ /dev/null @@ -1,245 +0,0 @@ -from collections.abc import Generator -from enum import Enum -from json import dumps, loads -from typing import Any, Union - -from requests import Response, post - -from core.model_runtime.entities.message_entities import PromptMessageTool -from core.model_runtime.model_providers.wenxin._common import _CommonWenxin -from core.model_runtime.model_providers.wenxin.wenxin_errors import ( - BadRequestError, - InternalServerError, -) - - -class ErnieMessage: - class Role(Enum): - USER = "user" - ASSISTANT = "assistant" - FUNCTION = "function" - SYSTEM = "system" - - role: str = Role.USER.value - content: str - usage: dict[str, int] = None - stop_reason: str = "" - - def to_dict(self) -> dict[str, Any]: - return { - "role": self.role, - "content": self.content, - } - - def __init__(self, content: str, role: str = "user") -> None: - self.content = content - self.role = role - - -class ErnieBotModel(_CommonWenxin): - def generate( - self, - model: str, - stream: bool, - messages: list[ErnieMessage], - parameters: dict[str, Any], - timeout: int, - tools: list[PromptMessageTool], - stop: list[str], - user: str, - ) -> Union[Generator[ErnieMessage, None, None], ErnieMessage]: - # check parameters - self._check_parameters(model, parameters, tools, stop) - - # get access token - access_token = self._get_access_token() - - # generate request body - url = f"{self.api_bases[model]}?access_token={access_token}" - - # clone messages - messages_cloned = self._copy_messages(messages=messages) - - # build body - body = self._build_request_body( - model, messages=messages_cloned, stream=stream, parameters=parameters, tools=tools, stop=stop, user=user - ) - headers = { - "Content-Type": "application/json", - } - - resp = post(url=url, data=dumps(body), headers=headers, stream=stream) - - if resp.status_code != 200: - raise InternalServerError(f"Failed to invoke ernie bot: {resp.text}") - - if stream: - return self._handle_chat_stream_generate_response(resp) - return self._handle_chat_generate_response(resp) - - def _copy_messages(self, messages: list[ErnieMessage]) -> list[ErnieMessage]: - return [ErnieMessage(message.content, message.role) for message in messages] - - def _check_parameters( - self, model: str, parameters: dict[str, Any], tools: list[PromptMessageTool], stop: list[str] - ) -> None: - if model not in self.api_bases: - raise BadRequestError(f"Invalid model: {model}") - - # if model not in self.function_calling_supports and tools is not None and len(tools) > 0: - # raise BadRequestError(f'Model {model} does not support calling function.') - # ErnieBot supports function calling, however, there is lots of limitations. - # such as, the messages should be ordered as user by assistant or function... - # so, we just disable function calling for now. - - if tools is not None and len(tools) > 0: - raise BadRequestError("function calling is not supported yet.") - - if stop is not None: - if len(stop) > 4: - raise BadRequestError("stop list should not exceed 4 items.") - - for s in stop: - if len(s) > 20: - raise BadRequestError("stop item should not exceed 20 characters.") - - def _build_request_body( - self, - model: str, - messages: list[ErnieMessage], - stream: bool, - parameters: dict[str, Any], - tools: list[PromptMessageTool], - stop: list[str], - user: str, - ) -> dict[str, Any]: - # if model in self.function_calling_supports: - # return self._build_function_calling_request_body(model, messages, parameters, tools, stop, user) - return self._build_chat_request_body(model, messages, stream, parameters, stop, user) - - def _build_function_calling_request_body( - self, - model: str, - messages: list[ErnieMessage], - stream: bool, - parameters: dict[str, Any], - tools: list[PromptMessageTool], - stop: list[str], - user: str, - ) -> dict[str, Any]: - if len(messages) % 2 == 0: - raise BadRequestError("The number of messages should be odd.") - if messages[0].role == "function": - raise BadRequestError("The first message should be user message.") - - """ - TODO: implement function calling - """ - - def _build_chat_request_body( - self, - model: str, - messages: list[ErnieMessage], - stream: bool, - parameters: dict[str, Any], - stop: list[str], - user: str, - ) -> dict[str, Any]: - if len(messages) == 0: - raise BadRequestError("The number of messages should not be zero.") - - # check if the first element is system, shift it - system_message = "" - if messages[0].role == "system": - message = messages.pop(0) - system_message = message.content - - if len(messages) % 2 == 0: - raise BadRequestError("The number of messages should be odd.") - if messages[0].role != "user": - raise BadRequestError("The first message should be user message.") - body = { - "messages": [message.to_dict() for message in messages], - "stream": stream, - "stop": stop, - "user_id": user, - **parameters, - } - - if "max_tokens" in parameters and type(parameters["max_tokens"]) == int: - body["max_output_tokens"] = parameters["max_tokens"] - - if "presence_penalty" in parameters and type(parameters["presence_penalty"]) == float: - body["penalty_score"] = parameters["presence_penalty"] - - if system_message: - body["system"] = system_message - - return body - - def _handle_chat_generate_response(self, response: Response) -> ErnieMessage: - data = response.json() - if "error_code" in data: - code = data["error_code"] - msg = data["error_msg"] - # raise error - self._handle_error(code, msg) - - result = data["result"] - usage = data["usage"] - - message = ErnieMessage(content=result, role="assistant") - message.usage = { - "prompt_tokens": usage["prompt_tokens"], - "completion_tokens": usage["completion_tokens"], - "total_tokens": usage["total_tokens"], - } - - return message - - def _handle_chat_stream_generate_response(self, response: Response) -> Generator[ErnieMessage, None, None]: - for line in response.iter_lines(): - if len(line) == 0: - continue - line = line.decode("utf-8") - if line[0] == "{": - try: - data = loads(line) - if "error_code" in data: - code = data["error_code"] - msg = data["error_msg"] - # raise error - self._handle_error(code, msg) - except Exception as e: - raise InternalServerError(f"Failed to parse response: {e}") - - if line.startswith("data:"): - line = line[5:].strip() - else: - continue - - if not line: - continue - try: - data = loads(line) - except Exception as e: - raise InternalServerError(f"Failed to parse response: {e}") - - result = data["result"] - is_end = data["is_end"] - - if is_end: - usage = data["usage"] - finish_reason = data.get("finish_reason", None) - message = ErnieMessage(content=result, role="assistant") - message.usage = { - "prompt_tokens": usage["prompt_tokens"], - "completion_tokens": usage["completion_tokens"], - "total_tokens": usage["total_tokens"], - } - message.stop_reason = finish_reason - - yield message - else: - message = ErnieMessage(content=result, role="assistant") - yield message diff --git a/api/core/model_runtime/model_providers/wenxin/llm/llm.py b/api/core/model_runtime/model_providers/wenxin/llm/llm.py deleted file mode 100644 index f7c160b6b47b79..00000000000000 --- a/api/core/model_runtime/model_providers/wenxin/llm/llm.py +++ /dev/null @@ -1,316 +0,0 @@ -from collections.abc import Generator -from typing import Optional, Union, cast - -from core.model_runtime.callbacks.base_callback import Callback -from core.model_runtime.entities.llm_entities import LLMResult, LLMResultChunk, LLMResultChunkDelta -from core.model_runtime.entities.message_entities import ( - AssistantPromptMessage, - PromptMessage, - PromptMessageTool, - SystemPromptMessage, - UserPromptMessage, -) -from core.model_runtime.errors.invoke import ( - InvokeError, -) -from core.model_runtime.errors.validate import CredentialsValidateFailedError -from core.model_runtime.model_providers.__base.large_language_model import LargeLanguageModel -from core.model_runtime.model_providers.wenxin._common import BaiduAccessToken -from core.model_runtime.model_providers.wenxin.llm.ernie_bot import ErnieBotModel, ErnieMessage -from core.model_runtime.model_providers.wenxin.wenxin_errors import invoke_error_mapping - -ERNIE_BOT_BLOCK_MODE_PROMPT = """You should always follow the instructions and output a valid {{block}} object. -The structure of the {{block}} object you can found in the instructions, use {"answer": "$your_answer"} as the default structure -if you are not sure about the structure. - - -{{instructions}} - - -You should also complete the text started with ``` but not tell ``` directly. -""" # noqa: E501 - - -class ErnieBotLargeLanguageModel(LargeLanguageModel): - def _invoke( - self, - model: str, - credentials: dict, - prompt_messages: list[PromptMessage], - model_parameters: dict, - tools: list[PromptMessageTool] | None = None, - stop: list[str] | None = None, - stream: bool = True, - user: str | None = None, - ) -> LLMResult | Generator: - return self._generate( - model=model, - credentials=credentials, - prompt_messages=prompt_messages, - model_parameters=model_parameters, - tools=tools, - stop=stop, - stream=stream, - user=user, - ) - - def _code_block_mode_wrapper( - self, - model: str, - credentials: dict, - prompt_messages: list[PromptMessage], - model_parameters: dict, - tools: Optional[list[PromptMessageTool]] = None, - stop: Optional[list[str]] = None, - stream: bool = True, - user: Optional[str] = None, - callbacks: list[Callback] = None, - ) -> Union[LLMResult, Generator]: - """ - Code block mode wrapper for invoking large language model - """ - if "response_format" in model_parameters and model_parameters["response_format"] in {"JSON", "XML"}: - response_format = model_parameters["response_format"] - stop = stop or [] - self._transform_json_prompts( - model, credentials, prompt_messages, model_parameters, tools, stop, stream, user, response_format - ) - model_parameters.pop("response_format") - if stream: - return self._code_block_mode_stream_processor( - model=model, - prompt_messages=prompt_messages, - input_generator=self._invoke( - model=model, - credentials=credentials, - prompt_messages=prompt_messages, - model_parameters=model_parameters, - tools=tools, - stop=stop, - stream=stream, - user=user, - ), - ) - - return self._invoke(model, credentials, prompt_messages, model_parameters, tools, stop, stream, user) - - def _transform_json_prompts( - self, - model: str, - credentials: dict, - prompt_messages: list[PromptMessage], - model_parameters: dict, - tools: list[PromptMessageTool] | None = None, - stop: list[str] | None = None, - stream: bool = True, - user: str | None = None, - response_format: str = "JSON", - ) -> None: - """ - Transform json prompts to model prompts - """ - - # check if there is a system message - if len(prompt_messages) > 0 and isinstance(prompt_messages[0], SystemPromptMessage): - # override the system message - prompt_messages[0] = SystemPromptMessage( - content=ERNIE_BOT_BLOCK_MODE_PROMPT.replace("{{instructions}}", prompt_messages[0].content).replace( - "{{block}}", response_format - ) - ) - else: - # insert the system message - prompt_messages.insert( - 0, - SystemPromptMessage( - content=ERNIE_BOT_BLOCK_MODE_PROMPT.replace( - "{{instructions}}", f"Please output a valid {response_format} object." - ).replace("{{block}}", response_format) - ), - ) - - if len(prompt_messages) > 0 and isinstance(prompt_messages[-1], UserPromptMessage): - # add ```JSON\n to the last message - prompt_messages[-1].content += "\n```JSON\n{\n" - else: - # append a user message - prompt_messages.append(UserPromptMessage(content="```JSON\n{\n")) - - def get_num_tokens( - self, - model: str, - credentials: dict, - prompt_messages: list[PromptMessage], - tools: list[PromptMessageTool] | None = None, - ) -> int: - # tools is not supported yet - return self._num_tokens_from_messages(prompt_messages) - - def _num_tokens_from_messages( - self, - messages: list[PromptMessage], - ) -> int: - """Calculate num tokens for baichuan model""" - - def tokens(text: str): - return self._get_num_tokens_by_gpt2(text) - - tokens_per_message = 3 - - num_tokens = 0 - messages_dict = [self._convert_prompt_message_to_dict(m) for m in messages] - for message in messages_dict: - num_tokens += tokens_per_message - for key, value in message.items(): - if isinstance(value, list): - text = "" - for item in value: - if isinstance(item, dict) and item["type"] == "text": - text += item["text"] - - value = text - - num_tokens += tokens(str(value)) - num_tokens += 3 - - return num_tokens - - def validate_credentials(self, model: str, credentials: dict) -> None: - api_key = credentials["api_key"] - secret_key = credentials["secret_key"] - try: - BaiduAccessToken.get_access_token(api_key, secret_key) - except Exception as e: - raise CredentialsValidateFailedError(f"Credentials validation failed: {e}") - - def _generate( - self, - model: str, - credentials: dict, - prompt_messages: list[PromptMessage], - model_parameters: dict, - tools: list[PromptMessageTool] | None = None, - stop: list[str] | None = None, - stream: bool = True, - user: str | None = None, - ) -> LLMResult | Generator: - instance = ErnieBotModel( - api_key=credentials["api_key"], - secret_key=credentials["secret_key"], - ) - - user = user or "ErnieBotDefault" - - # convert prompt messages to baichuan messages - messages = [ - ErnieMessage( - content=message.content - if isinstance(message.content, str) - else "".join([content.data for content in message.content]), - role=message.role.value, - ) - for message in prompt_messages - ] - - # invoke model - response = instance.generate( - model=model, - stream=stream, - messages=messages, - parameters=model_parameters, - timeout=60, - tools=tools, - stop=stop, - user=user, - ) - - if stream: - return self._handle_chat_generate_stream_response(model, prompt_messages, credentials, response) - else: - return self._handle_chat_generate_response(model, prompt_messages, credentials, response) - - def _convert_prompt_message_to_dict(self, message: PromptMessage) -> dict: - """ - Convert PromptMessage to dict for Baichuan - """ - if isinstance(message, UserPromptMessage): - message = cast(UserPromptMessage, message) - if isinstance(message.content, str): - message_dict = {"role": "user", "content": message.content} - else: - raise ValueError("User message content must be str") - elif isinstance(message, AssistantPromptMessage): - message = cast(AssistantPromptMessage, message) - message_dict = {"role": "assistant", "content": message.content} - elif isinstance(message, SystemPromptMessage): - message = cast(SystemPromptMessage, message) - message_dict = {"role": "system", "content": message.content} - else: - raise ValueError(f"Unknown message type {type(message)}") - - return message_dict - - def _handle_chat_generate_response( - self, model: str, prompt_messages: list[PromptMessage], credentials: dict, response: ErnieMessage - ) -> LLMResult: - # convert baichuan message to llm result - usage = self._calc_response_usage( - model=model, - credentials=credentials, - prompt_tokens=response.usage["prompt_tokens"], - completion_tokens=response.usage["completion_tokens"], - ) - return LLMResult( - model=model, - prompt_messages=prompt_messages, - message=AssistantPromptMessage(content=response.content, tool_calls=[]), - usage=usage, - ) - - def _handle_chat_generate_stream_response( - self, - model: str, - prompt_messages: list[PromptMessage], - credentials: dict, - response: Generator[ErnieMessage, None, None], - ) -> Generator: - for message in response: - if message.usage: - usage = self._calc_response_usage( - model=model, - credentials=credentials, - prompt_tokens=message.usage["prompt_tokens"], - completion_tokens=message.usage["completion_tokens"], - ) - yield LLMResultChunk( - model=model, - prompt_messages=prompt_messages, - delta=LLMResultChunkDelta( - index=0, - message=AssistantPromptMessage(content=message.content, tool_calls=[]), - usage=usage, - finish_reason=message.stop_reason or None, - ), - ) - else: - yield LLMResultChunk( - model=model, - prompt_messages=prompt_messages, - delta=LLMResultChunkDelta( - index=0, - message=AssistantPromptMessage(content=message.content, tool_calls=[]), - finish_reason=message.stop_reason or None, - ), - ) - - @property - def _invoke_error_mapping(self) -> dict[type[InvokeError], list[type[Exception]]]: - """ - Map model invoke error to unified error - The key is the error type thrown to the caller - The value is the error type thrown by the model, - which needs to be converted into a unified error type for the caller. - - :return: Invoke error mapping - """ - return invoke_error_mapping() diff --git a/api/core/model_runtime/model_providers/wenxin/llm/yi_34b_chat.yaml b/api/core/model_runtime/model_providers/wenxin/llm/yi_34b_chat.yaml deleted file mode 100644 index 0b247fbd223dac..00000000000000 --- a/api/core/model_runtime/model_providers/wenxin/llm/yi_34b_chat.yaml +++ /dev/null @@ -1,30 +0,0 @@ -model: yi_34b_chat -label: - en_US: yi_34b_chat -model_type: llm -features: - - agent-thought -model_properties: - mode: chat - context_size: 32000 -parameter_rules: - - name: temperature - use_template: temperature - min: 0.1 - max: 1.0 - default: 0.95 - - name: top_p - use_template: top_p - min: 0 - max: 1.0 - default: 0.7 - - name: max_tokens - use_template: max_tokens - default: 4096 - min: 2 - max: 4096 - - name: presence_penalty - use_template: presence_penalty - default: 1.0 - min: 1.0 - max: 2.0 diff --git a/api/core/model_runtime/model_providers/wenxin/text_embedding/__init__.py b/api/core/model_runtime/model_providers/wenxin/text_embedding/__init__.py deleted file mode 100644 index e69de29bb2d1d6..00000000000000 diff --git a/api/core/model_runtime/model_providers/wenxin/text_embedding/bge-large-en.yaml b/api/core/model_runtime/model_providers/wenxin/text_embedding/bge-large-en.yaml deleted file mode 100644 index 74fadb7f9de60f..00000000000000 --- a/api/core/model_runtime/model_providers/wenxin/text_embedding/bge-large-en.yaml +++ /dev/null @@ -1,9 +0,0 @@ -model: bge-large-en -model_type: text-embedding -model_properties: - context_size: 512 - max_chunks: 16 -pricing: - input: '0.0005' - unit: '0.001' - currency: RMB diff --git a/api/core/model_runtime/model_providers/wenxin/text_embedding/bge-large-zh.yaml b/api/core/model_runtime/model_providers/wenxin/text_embedding/bge-large-zh.yaml deleted file mode 100644 index d4af27ec389a66..00000000000000 --- a/api/core/model_runtime/model_providers/wenxin/text_embedding/bge-large-zh.yaml +++ /dev/null @@ -1,9 +0,0 @@ -model: bge-large-zh -model_type: text-embedding -model_properties: - context_size: 512 - max_chunks: 16 -pricing: - input: '0.0005' - unit: '0.001' - currency: RMB diff --git a/api/core/model_runtime/model_providers/wenxin/text_embedding/embedding-v1.yaml b/api/core/model_runtime/model_providers/wenxin/text_embedding/embedding-v1.yaml deleted file mode 100644 index eda48d965533e5..00000000000000 --- a/api/core/model_runtime/model_providers/wenxin/text_embedding/embedding-v1.yaml +++ /dev/null @@ -1,9 +0,0 @@ -model: embedding-v1 -model_type: text-embedding -model_properties: - context_size: 384 - max_chunks: 16 -pricing: - input: '0.0005' - unit: '0.001' - currency: RMB diff --git a/api/core/model_runtime/model_providers/wenxin/text_embedding/tao-8k.yaml b/api/core/model_runtime/model_providers/wenxin/text_embedding/tao-8k.yaml deleted file mode 100644 index e28f253eb6b861..00000000000000 --- a/api/core/model_runtime/model_providers/wenxin/text_embedding/tao-8k.yaml +++ /dev/null @@ -1,9 +0,0 @@ -model: tao-8k -model_type: text-embedding -model_properties: - context_size: 8192 - max_chunks: 1 -pricing: - input: '0.0005' - unit: '0.001' - currency: RMB diff --git a/api/core/model_runtime/model_providers/wenxin/wenxin.py b/api/core/model_runtime/model_providers/wenxin/wenxin.py deleted file mode 100644 index 895af20bc8541d..00000000000000 --- a/api/core/model_runtime/model_providers/wenxin/wenxin.py +++ /dev/null @@ -1,28 +0,0 @@ -import logging - -from core.model_runtime.entities.model_entities import ModelType -from core.model_runtime.errors.validate import CredentialsValidateFailedError -from core.model_runtime.model_providers.__base.model_provider import ModelProvider - -logger = logging.getLogger(__name__) - - -class WenxinProvider(ModelProvider): - def validate_provider_credentials(self, credentials: dict) -> None: - """ - Validate provider credentials - - if validate failed, raise exception - - :param credentials: provider credentials, credentials form defined in `provider_credential_schema`. - """ - try: - model_instance = self.get_model_instance(ModelType.LLM) - - # Use `ernie-bot` model for validate, - model_instance.validate_credentials(model="ernie-bot", credentials=credentials) - except CredentialsValidateFailedError as ex: - raise ex - except Exception as ex: - logger.exception(f"{self.get_provider_schema().provider} credentials validate failed") - raise ex diff --git a/api/core/model_runtime/model_providers/wenxin/wenxin.yaml b/api/core/model_runtime/model_providers/wenxin/wenxin.yaml deleted file mode 100644 index 6a6b38e6a1b7ab..00000000000000 --- a/api/core/model_runtime/model_providers/wenxin/wenxin.yaml +++ /dev/null @@ -1,40 +0,0 @@ -provider: wenxin -label: - en_US: WenXin - zh_Hans: 文心一言 -icon_small: - en_US: icon_s_en.png - zh_Hans: icon_s_en.png -icon_large: - en_US: icon_l_en.png - zh_Hans: icon_l_zh.png -background: "#E8F5FE" -help: - title: - en_US: Get your API Key from WenXin - zh_Hans: 从文心一言获取您的 API Key - url: - en_US: https://cloud.baidu.com/wenxin.html -supported_model_types: - - llm - - text-embedding -configurate_methods: - - predefined-model -provider_credential_schema: - credential_form_schemas: - - variable: api_key - label: - en_US: API Key - type: secret-input - required: true - placeholder: - zh_Hans: 在此输入您的 API Key - en_US: Enter your API Key - - variable: secret_key - label: - en_US: Secret Key - type: secret-input - required: true - placeholder: - zh_Hans: 在此输入您的 Secret Key - en_US: Enter your Secret Key diff --git a/api/core/model_runtime/model_providers/wenxin/wenxin_errors.py b/api/core/model_runtime/model_providers/wenxin/wenxin_errors.py deleted file mode 100644 index bd074e047717ac..00000000000000 --- a/api/core/model_runtime/model_providers/wenxin/wenxin_errors.py +++ /dev/null @@ -1,54 +0,0 @@ -from core.model_runtime.errors.invoke import ( - InvokeAuthorizationError, - InvokeBadRequestError, - InvokeConnectionError, - InvokeError, - InvokeRateLimitError, - InvokeServerUnavailableError, -) - - -def invoke_error_mapping() -> dict[type[InvokeError], list[type[Exception]]]: - """ - Map model invoke error to unified error - The key is the error type thrown to the caller - The value is the error type thrown by the model, - which needs to be converted into a unified error type for the caller. - - :return: Invoke error mapping - """ - return { - InvokeConnectionError: [], - InvokeServerUnavailableError: [InternalServerError], - InvokeRateLimitError: [RateLimitReachedError], - InvokeAuthorizationError: [ - InvalidAuthenticationError, - InsufficientAccountBalanceError, - InvalidAPIKeyError, - ], - InvokeBadRequestError: [BadRequestError, KeyError], - } - - -class InvalidAuthenticationError(Exception): - pass - - -class InvalidAPIKeyError(Exception): - pass - - -class RateLimitReachedError(Exception): - pass - - -class InsufficientAccountBalanceError(Exception): - pass - - -class InternalServerError(Exception): - pass - - -class BadRequestError(Exception): - pass diff --git a/api/core/model_runtime/model_providers/xinference/__init__.py b/api/core/model_runtime/model_providers/xinference/__init__.py deleted file mode 100644 index e69de29bb2d1d6..00000000000000 diff --git a/api/core/model_runtime/model_providers/xinference/_assets/icon_l_en.svg b/api/core/model_runtime/model_providers/xinference/_assets/icon_l_en.svg deleted file mode 100644 index 8109176543385c..00000000000000 --- a/api/core/model_runtime/model_providers/xinference/_assets/icon_l_en.svg +++ /dev/null @@ -1,42 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/api/core/model_runtime/model_providers/xinference/_assets/icon_s_en.svg b/api/core/model_runtime/model_providers/xinference/_assets/icon_s_en.svg deleted file mode 100644 index f5c5f75ea817ff..00000000000000 --- a/api/core/model_runtime/model_providers/xinference/_assets/icon_s_en.svg +++ /dev/null @@ -1,24 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/api/core/model_runtime/model_providers/xinference/llm/__init__.py b/api/core/model_runtime/model_providers/xinference/llm/__init__.py deleted file mode 100644 index e69de29bb2d1d6..00000000000000 diff --git a/api/core/model_runtime/model_providers/xinference/llm/llm.py b/api/core/model_runtime/model_providers/xinference/llm/llm.py deleted file mode 100644 index 286640079b02a9..00000000000000 --- a/api/core/model_runtime/model_providers/xinference/llm/llm.py +++ /dev/null @@ -1,816 +0,0 @@ -from collections.abc import Generator, Iterator -from typing import cast - -from openai import ( - APIConnectionError, - APITimeoutError, - AuthenticationError, - ConflictError, - InternalServerError, - NotFoundError, - OpenAI, - PermissionDeniedError, - RateLimitError, - UnprocessableEntityError, -) -from openai.types.chat import ChatCompletion, ChatCompletionChunk, ChatCompletionMessageToolCall -from openai.types.chat.chat_completion_chunk import ChoiceDeltaFunctionCall, ChoiceDeltaToolCall -from openai.types.chat.chat_completion_message import FunctionCall -from openai.types.completion import Completion -from xinference_client.client.restful.restful_client import ( - Client, - RESTfulChatModelHandle, - RESTfulGenerateModelHandle, -) - -from core.model_runtime.entities.common_entities import I18nObject -from core.model_runtime.entities.llm_entities import LLMMode, LLMResult, LLMResultChunk, LLMResultChunkDelta -from core.model_runtime.entities.message_entities import ( - AssistantPromptMessage, - ImagePromptMessageContent, - PromptMessage, - PromptMessageContent, - PromptMessageContentType, - PromptMessageTool, - SystemPromptMessage, - ToolPromptMessage, - UserPromptMessage, -) -from core.model_runtime.entities.model_entities import ( - AIModelEntity, - DefaultParameterName, - FetchFrom, - ModelFeature, - ModelPropertyKey, - ModelType, - ParameterRule, - ParameterType, -) -from core.model_runtime.errors.invoke import ( - InvokeAuthorizationError, - InvokeBadRequestError, - InvokeConnectionError, - InvokeError, - InvokeRateLimitError, - InvokeServerUnavailableError, -) -from core.model_runtime.errors.validate import CredentialsValidateFailedError -from core.model_runtime.model_providers.__base.large_language_model import LargeLanguageModel -from core.model_runtime.model_providers.xinference.xinference_helper import ( - XinferenceHelper, - XinferenceModelExtraParameter, -) -from core.model_runtime.utils import helper - - -class XinferenceAILargeLanguageModel(LargeLanguageModel): - def _invoke( - self, - model: str, - credentials: dict, - prompt_messages: list[PromptMessage], - model_parameters: dict, - tools: list[PromptMessageTool] | None = None, - stop: list[str] | None = None, - stream: bool = True, - user: str | None = None, - ) -> LLMResult | Generator: - """ - invoke LLM - - see `core.model_runtime.model_providers.__base.large_language_model.LargeLanguageModel._invoke` - """ - if "temperature" in model_parameters: - if model_parameters["temperature"] < 0.01: - model_parameters["temperature"] = 0.01 - elif model_parameters["temperature"] > 1.0: - model_parameters["temperature"] = 0.99 - - return self._generate( - model=model, - credentials=credentials, - prompt_messages=prompt_messages, - model_parameters=model_parameters, - tools=tools, - stop=stop, - stream=stream, - user=user, - extra_model_kwargs=XinferenceHelper.get_xinference_extra_parameter( - server_url=credentials["server_url"], - model_uid=credentials["model_uid"], - api_key=credentials.get("api_key"), - ), - ) - - def validate_credentials(self, model: str, credentials: dict) -> None: - """ - validate credentials - - credentials should be like: - { - 'model_type': 'text-generation', - 'server_url': 'server url', - 'model_uid': 'model uid', - } - """ - try: - if "/" in credentials["model_uid"] or "?" in credentials["model_uid"] or "#" in credentials["model_uid"]: - raise CredentialsValidateFailedError("model_uid should not contain /, ?, or #") - - extra_param = XinferenceHelper.get_xinference_extra_parameter( - server_url=credentials["server_url"], - model_uid=credentials["model_uid"], - api_key=credentials.get("api_key"), - ) - if "completion_type" not in credentials: - if "chat" in extra_param.model_ability: - credentials["completion_type"] = "chat" - elif "generate" in extra_param.model_ability: - credentials["completion_type"] = "completion" - else: - raise ValueError( - f"xinference model ability {extra_param.model_ability} is not supported," - f" check if you have the right model type" - ) - - if extra_param.support_function_call: - credentials["support_function_call"] = True - - if extra_param.support_vision: - credentials["support_vision"] = True - - if extra_param.context_length: - credentials["context_length"] = extra_param.context_length - - except RuntimeError as e: - raise CredentialsValidateFailedError(f"Xinference credentials validate failed: {e}") - except KeyError as e: - raise CredentialsValidateFailedError(f"Xinference credentials validate failed: {e}") - except Exception as e: - raise e - - def get_num_tokens( - self, - model: str, - credentials: dict, - prompt_messages: list[PromptMessage], - tools: list[PromptMessageTool] | None = None, - ) -> int: - """ - get number of tokens - - cause XinferenceAI LLM is a customized model, we could net detect which tokenizer to use - so we just take the GPT2 tokenizer as default - """ - return self._num_tokens_from_messages(prompt_messages, tools) - - def _num_tokens_from_messages( - self, messages: list[PromptMessage], tools: list[PromptMessageTool], is_completion_model: bool = False - ) -> int: - def tokens(text: str): - return self._get_num_tokens_by_gpt2(text) - - if is_completion_model: - return sum(tokens(str(message.content)) for message in messages) - - tokens_per_message = 3 - tokens_per_name = 1 - - num_tokens = 0 - messages_dict = [self._convert_prompt_message_to_dict(m) for m in messages] - for message in messages_dict: - num_tokens += tokens_per_message - for key, value in message.items(): - if isinstance(value, list): - text = "" - for item in value: - if isinstance(item, dict) and item["type"] == "text": - text += item["text"] - - value = text - - if key == "tool_calls": - for tool_call in value: - for t_key, t_value in tool_call.items(): - num_tokens += tokens(t_key) - if t_key == "function": - for f_key, f_value in t_value.items(): - num_tokens += tokens(f_key) - num_tokens += tokens(f_value) - else: - num_tokens += tokens(t_key) - num_tokens += tokens(t_value) - if key == "function_call": - for t_key, t_value in value.items(): - num_tokens += tokens(t_key) - if t_key == "function": - for f_key, f_value in t_value.items(): - num_tokens += tokens(f_key) - num_tokens += tokens(f_value) - else: - num_tokens += tokens(t_key) - num_tokens += tokens(t_value) - else: - num_tokens += tokens(str(value)) - - if key == "name": - num_tokens += tokens_per_name - num_tokens += 3 - - if tools: - num_tokens += self._num_tokens_for_tools(tools) - - return num_tokens - - def _num_tokens_for_tools(self, tools: list[PromptMessageTool]) -> int: - """ - Calculate num tokens for tool calling - - :param encoding: encoding - :param tools: tools for tool calling - :return: number of tokens - """ - - def tokens(text: str): - return self._get_num_tokens_by_gpt2(text) - - num_tokens = 0 - for tool in tools: - # calculate num tokens for function object - num_tokens += tokens("name") - num_tokens += tokens(tool.name) - num_tokens += tokens("description") - num_tokens += tokens(tool.description) - parameters = tool.parameters - num_tokens += tokens("parameters") - num_tokens += tokens("type") - num_tokens += tokens(parameters.get("type")) - if "properties" in parameters: - num_tokens += tokens("properties") - for key, value in parameters.get("properties").items(): - num_tokens += tokens(key) - for field_key, field_value in value.items(): - num_tokens += tokens(field_key) - if field_key == "enum": - for enum_field in field_value: - num_tokens += 3 - num_tokens += tokens(enum_field) - else: - num_tokens += tokens(field_key) - num_tokens += tokens(str(field_value)) - if "required" in parameters: - num_tokens += tokens("required") - for required_field in parameters["required"]: - num_tokens += 3 - num_tokens += tokens(required_field) - - return num_tokens - - def _convert_prompt_message_to_text(self, message: list[PromptMessage]) -> str: - """ - convert prompt message to text - """ - text = "" - for item in message: - if isinstance(item, UserPromptMessage | SystemPromptMessage | AssistantPromptMessage): - text += item.content - else: - raise NotImplementedError(f"PromptMessage type {type(item)} is not supported") - return text - - def _convert_prompt_message_to_dict(self, message: PromptMessage) -> dict: - """ - Convert PromptMessage to dict for OpenAI Compatibility API - """ - if isinstance(message, UserPromptMessage): - message = cast(UserPromptMessage, message) - if isinstance(message.content, str): - message_dict = {"role": "user", "content": message.content} - else: - sub_messages = [] - for message_content in message.content: - if message_content.type == PromptMessageContentType.TEXT: - message_content = cast(PromptMessageContent, message_content) - sub_message_dict = {"type": "text", "text": message_content.data} - sub_messages.append(sub_message_dict) - elif message_content.type == PromptMessageContentType.IMAGE: - message_content = cast(ImagePromptMessageContent, message_content) - sub_message_dict = { - "type": "image_url", - "image_url": {"url": message_content.data, "detail": message_content.detail.value}, - } - sub_messages.append(sub_message_dict) - message_dict = {"role": "user", "content": sub_messages} - elif isinstance(message, AssistantPromptMessage): - message = cast(AssistantPromptMessage, message) - message_dict = {"role": "assistant", "content": message.content} - if message.tool_calls and len(message.tool_calls) > 0: - message_dict["function_call"] = { - "name": message.tool_calls[0].function.name, - "arguments": message.tool_calls[0].function.arguments, - } - elif isinstance(message, SystemPromptMessage): - message = cast(SystemPromptMessage, message) - message_dict = {"role": "system", "content": message.content} - elif isinstance(message, ToolPromptMessage): - message = cast(ToolPromptMessage, message) - message_dict = {"tool_call_id": message.tool_call_id, "role": "tool", "content": message.content} - else: - raise ValueError(f"Unknown message type {type(message)}") - - return message_dict - - def get_customizable_model_schema(self, model: str, credentials: dict) -> AIModelEntity | None: - """ - used to define customizable model schema - """ - rules = [ - ParameterRule( - name="temperature", - type=ParameterType.FLOAT, - use_template="temperature", - label=I18nObject(zh_Hans="温度", en_US="Temperature"), - ), - ParameterRule( - name="top_p", - type=ParameterType.FLOAT, - use_template="top_p", - label=I18nObject(zh_Hans="Top P", en_US="Top P"), - ), - ParameterRule( - name="max_tokens", - type=ParameterType.INT, - use_template="max_tokens", - min=1, - max=credentials.get("context_length", 2048), - default=512, - label=I18nObject(zh_Hans="最大生成长度", en_US="Max Tokens"), - ), - ParameterRule( - name=DefaultParameterName.PRESENCE_PENALTY, - use_template=DefaultParameterName.PRESENCE_PENALTY, - type=ParameterType.FLOAT, - label=I18nObject( - en_US="Presence Penalty", - zh_Hans="存在惩罚", - ), - required=False, - help=I18nObject( - en_US="Number between -2.0 and 2.0. Positive values penalize new tokens based on whether they " - "appear in the text so far, increasing the model's likelihood to talk about new topics.", - zh_Hans="介于 -2.0 和 2.0 之间的数字。正值会根据新词是否已出现在文本中对其进行惩罚," - "从而增加模型谈论新话题的可能性。", - ), - default=0.0, - min=-2.0, - max=2.0, - precision=2, - ), - ParameterRule( - name=DefaultParameterName.FREQUENCY_PENALTY, - use_template=DefaultParameterName.FREQUENCY_PENALTY, - type=ParameterType.FLOAT, - label=I18nObject( - en_US="Frequency Penalty", - zh_Hans="频率惩罚", - ), - required=False, - help=I18nObject( - en_US="Number between -2.0 and 2.0. Positive values penalize new tokens based on their " - "existing frequency in the text so far, decreasing the model's likelihood to repeat the " - "same line verbatim.", - zh_Hans="介于 -2.0 和 2.0 之间的数字。正值会根据新词在文本中的现有频率对其进行惩罚," - "从而降低模型逐字重复相同内容的可能性。", - ), - default=0.0, - min=-2.0, - max=2.0, - precision=2, - ), - ] - - completion_type = None - - if "completion_type" in credentials: - if credentials["completion_type"] == "chat": - completion_type = LLMMode.CHAT.value - elif credentials["completion_type"] == "completion": - completion_type = LLMMode.COMPLETION.value - else: - raise ValueError(f'completion_type {credentials["completion_type"]} is not supported') - else: - extra_args = XinferenceHelper.get_xinference_extra_parameter( - server_url=credentials["server_url"], - model_uid=credentials["model_uid"], - api_key=credentials.get("api_key"), - ) - - if "chat" in extra_args.model_ability: - completion_type = LLMMode.CHAT.value - elif "generate" in extra_args.model_ability: - completion_type = LLMMode.COMPLETION.value - else: - raise ValueError(f"xinference model ability {extra_args.model_ability} is not supported") - - features = [] - - support_function_call = credentials.get("support_function_call", False) - if support_function_call: - features.append(ModelFeature.TOOL_CALL) - - support_vision = credentials.get("support_vision", False) - if support_vision: - features.append(ModelFeature.VISION) - - context_length = credentials.get("context_length", 2048) - - entity = AIModelEntity( - model=model, - label=I18nObject(en_US=model), - fetch_from=FetchFrom.CUSTOMIZABLE_MODEL, - model_type=ModelType.LLM, - features=features, - model_properties={ModelPropertyKey.MODE: completion_type, ModelPropertyKey.CONTEXT_SIZE: context_length}, - parameter_rules=rules, - ) - - return entity - - def _generate( - self, - model: str, - credentials: dict, - prompt_messages: list[PromptMessage], - model_parameters: dict, - extra_model_kwargs: XinferenceModelExtraParameter, - tools: list[PromptMessageTool] | None = None, - stop: list[str] | None = None, - stream: bool = True, - user: str | None = None, - ) -> LLMResult | Generator: - """ - generate text from LLM - - see `core.model_runtime.model_providers.__base.large_language_model.LargeLanguageModel._generate` - - extra_model_kwargs can be got by `XinferenceHelper.get_xinference_extra_parameter` - """ - if "server_url" not in credentials: - raise CredentialsValidateFailedError("server_url is required in credentials") - - credentials["server_url"] = credentials["server_url"].removesuffix("/") - - api_key = credentials.get("api_key") or "abc" - - client = OpenAI( - base_url=f'{credentials["server_url"]}/v1', - api_key=api_key, - max_retries=3, - timeout=60, - ) - - xinference_client = Client( - base_url=credentials["server_url"], - api_key=credentials.get("api_key"), - ) - - xinference_model = xinference_client.get_model(credentials["model_uid"]) - - generate_config = { - "temperature": model_parameters.get("temperature", 1.0), - "top_p": model_parameters.get("top_p", 0.7), - "max_tokens": model_parameters.get("max_tokens", 512), - "presence_penalty": model_parameters.get("presence_penalty", 0.0), - "frequency_penalty": model_parameters.get("frequency_penalty", 0.0), - } - - if stop: - generate_config["stop"] = stop - - if tools and len(tools) > 0: - generate_config["tools"] = [{"type": "function", "function": helper.dump_model(tool)} for tool in tools] - vision = credentials.get("support_vision", False) - if isinstance(xinference_model, RESTfulChatModelHandle): - resp = client.chat.completions.create( - model=credentials["model_uid"], - messages=[self._convert_prompt_message_to_dict(message) for message in prompt_messages], - stream=stream, - user=user, - **generate_config, - ) - if stream: - if tools and len(tools) > 0: - raise InvokeBadRequestError("xinference tool calls does not support stream mode") - return self._handle_chat_stream_response( - model=model, credentials=credentials, prompt_messages=prompt_messages, tools=tools, resp=resp - ) - return self._handle_chat_generate_response( - model=model, credentials=credentials, prompt_messages=prompt_messages, tools=tools, resp=resp - ) - elif isinstance(xinference_model, RESTfulGenerateModelHandle): - resp = client.completions.create( - model=credentials["model_uid"], - prompt=self._convert_prompt_message_to_text(prompt_messages), - stream=stream, - user=user, - **generate_config, - ) - if stream: - return self._handle_completion_stream_response( - model=model, credentials=credentials, prompt_messages=prompt_messages, tools=tools, resp=resp - ) - return self._handle_completion_generate_response( - model=model, credentials=credentials, prompt_messages=prompt_messages, tools=tools, resp=resp - ) - else: - raise NotImplementedError(f"xinference model handle type {type(xinference_model)} is not supported") - - def _extract_response_tool_calls( - self, response_tool_calls: list[ChatCompletionMessageToolCall | ChoiceDeltaToolCall] - ) -> list[AssistantPromptMessage.ToolCall]: - """ - Extract tool calls from response - - :param response_tool_calls: response tool calls - :return: list of tool calls - """ - tool_calls = [] - if response_tool_calls: - for response_tool_call in response_tool_calls: - function = AssistantPromptMessage.ToolCall.ToolCallFunction( - name=response_tool_call.function.name, arguments=response_tool_call.function.arguments - ) - - tool_call = AssistantPromptMessage.ToolCall( - id=response_tool_call.id, type=response_tool_call.type, function=function - ) - tool_calls.append(tool_call) - - return tool_calls - - def _extract_response_function_call( - self, response_function_call: FunctionCall | ChoiceDeltaFunctionCall - ) -> AssistantPromptMessage.ToolCall: - """ - Extract function call from response - - :param response_function_call: response function call - :return: tool call - """ - tool_call = None - if response_function_call: - function = AssistantPromptMessage.ToolCall.ToolCallFunction( - name=response_function_call.name, arguments=response_function_call.arguments - ) - - tool_call = AssistantPromptMessage.ToolCall( - id=response_function_call.name, type="function", function=function - ) - - return tool_call - - def _handle_chat_generate_response( - self, - model: str, - credentials: dict, - prompt_messages: list[PromptMessage], - tools: list[PromptMessageTool], - resp: ChatCompletion, - ) -> LLMResult: - """ - handle normal chat generate response - """ - if len(resp.choices) == 0: - raise InvokeServerUnavailableError("Empty response") - - assistant_message = resp.choices[0].message - - # convert tool call to assistant message tool call - tool_calls = assistant_message.tool_calls - assistant_prompt_message_tool_calls = self._extract_response_tool_calls(tool_calls or []) - function_call = assistant_message.function_call - if function_call: - assistant_prompt_message_tool_calls += [self._extract_response_function_call(function_call)] - - # transform assistant message to prompt message - assistant_prompt_message = AssistantPromptMessage( - content=assistant_message.content, tool_calls=assistant_prompt_message_tool_calls - ) - - prompt_tokens = self._num_tokens_from_messages(messages=prompt_messages, tools=tools) - completion_tokens = self._num_tokens_from_messages(messages=[assistant_prompt_message], tools=tools) - - usage = self._calc_response_usage( - model=model, credentials=credentials, prompt_tokens=prompt_tokens, completion_tokens=completion_tokens - ) - - response = LLMResult( - model=model, - prompt_messages=prompt_messages, - system_fingerprint=resp.system_fingerprint, - usage=usage, - message=assistant_prompt_message, - ) - - return response - - def _handle_chat_stream_response( - self, - model: str, - credentials: dict, - prompt_messages: list[PromptMessage], - tools: list[PromptMessageTool], - resp: Iterator[ChatCompletionChunk], - ) -> Generator: - """ - handle stream chat generate response - """ - full_response = "" - - for chunk in resp: - if len(chunk.choices) == 0: - continue - - delta = chunk.choices[0] - - if delta.finish_reason is None and (delta.delta.content is None or delta.delta.content == ""): - continue - - # check if there is a tool call in the response - function_call = None - tool_calls = [] - if delta.delta.tool_calls: - tool_calls += delta.delta.tool_calls - if delta.delta.function_call: - function_call = delta.delta.function_call - - assistant_message_tool_calls = self._extract_response_tool_calls(tool_calls) - if function_call: - assistant_message_tool_calls += [self._extract_response_function_call(function_call)] - - # transform assistant message to prompt message - assistant_prompt_message = AssistantPromptMessage( - content=delta.delta.content or "", tool_calls=assistant_message_tool_calls - ) - - if delta.finish_reason is not None: - # temp_assistant_prompt_message is used to calculate usage - temp_assistant_prompt_message = AssistantPromptMessage( - content=full_response, tool_calls=assistant_message_tool_calls - ) - - prompt_tokens = self._num_tokens_from_messages(messages=prompt_messages, tools=tools) - completion_tokens = self._num_tokens_from_messages(messages=[temp_assistant_prompt_message], tools=[]) - - usage = self._calc_response_usage( - model=model, - credentials=credentials, - prompt_tokens=prompt_tokens, - completion_tokens=completion_tokens, - ) - - yield LLMResultChunk( - model=model, - prompt_messages=prompt_messages, - system_fingerprint=chunk.system_fingerprint, - delta=LLMResultChunkDelta( - index=0, message=assistant_prompt_message, finish_reason=delta.finish_reason, usage=usage - ), - ) - else: - yield LLMResultChunk( - model=model, - prompt_messages=prompt_messages, - system_fingerprint=chunk.system_fingerprint, - delta=LLMResultChunkDelta( - index=0, - message=assistant_prompt_message, - ), - ) - - full_response += delta.delta.content - - def _handle_completion_generate_response( - self, - model: str, - credentials: dict, - prompt_messages: list[PromptMessage], - tools: list[PromptMessageTool], - resp: Completion, - ) -> LLMResult: - """ - handle normal completion generate response - """ - if len(resp.choices) == 0: - raise InvokeServerUnavailableError("Empty response") - - assistant_message = resp.choices[0].text - - # transform assistant message to prompt message - assistant_prompt_message = AssistantPromptMessage(content=assistant_message, tool_calls=[]) - - prompt_tokens = self._get_num_tokens_by_gpt2(self._convert_prompt_message_to_text(prompt_messages)) - completion_tokens = self._num_tokens_from_messages( - messages=[assistant_prompt_message], tools=[], is_completion_model=True - ) - usage = self._calc_response_usage( - model=model, credentials=credentials, prompt_tokens=prompt_tokens, completion_tokens=completion_tokens - ) - - response = LLMResult( - model=model, - prompt_messages=prompt_messages, - system_fingerprint=resp.system_fingerprint, - usage=usage, - message=assistant_prompt_message, - ) - - return response - - def _handle_completion_stream_response( - self, - model: str, - credentials: dict, - prompt_messages: list[PromptMessage], - tools: list[PromptMessageTool], - resp: Iterator[Completion], - ) -> Generator: - """ - handle stream completion generate response - """ - full_response = "" - - for chunk in resp: - if len(chunk.choices) == 0: - continue - - delta = chunk.choices[0] - - # transform assistant message to prompt message - assistant_prompt_message = AssistantPromptMessage(content=delta.text or "", tool_calls=[]) - - if delta.finish_reason is not None: - # temp_assistant_prompt_message is used to calculate usage - temp_assistant_prompt_message = AssistantPromptMessage(content=full_response, tool_calls=[]) - - prompt_tokens = self._get_num_tokens_by_gpt2(self._convert_prompt_message_to_text(prompt_messages)) - completion_tokens = self._num_tokens_from_messages( - messages=[temp_assistant_prompt_message], tools=[], is_completion_model=True - ) - usage = self._calc_response_usage( - model=model, - credentials=credentials, - prompt_tokens=prompt_tokens, - completion_tokens=completion_tokens, - ) - - yield LLMResultChunk( - model=model, - prompt_messages=prompt_messages, - system_fingerprint=chunk.system_fingerprint, - delta=LLMResultChunkDelta( - index=0, message=assistant_prompt_message, finish_reason=delta.finish_reason, usage=usage - ), - ) - else: - if delta.text is None or delta.text == "": - continue - - yield LLMResultChunk( - model=model, - prompt_messages=prompt_messages, - system_fingerprint=chunk.system_fingerprint, - delta=LLMResultChunkDelta( - index=0, - message=assistant_prompt_message, - ), - ) - - full_response += delta.text - - @property - def _invoke_error_mapping(self) -> dict[type[InvokeError], list[type[Exception]]]: - """ - Map model invoke error to unified error - The key is the error type thrown to the caller - The value is the error type thrown by the model, - which needs to be converted into a unified error type for the caller. - - :return: Invoke error mapping - """ - return { - InvokeConnectionError: [ - APIConnectionError, - APITimeoutError, - ], - InvokeServerUnavailableError: [ - InternalServerError, - ConflictError, - NotFoundError, - UnprocessableEntityError, - PermissionDeniedError, - ], - InvokeRateLimitError: [RateLimitError], - InvokeAuthorizationError: [AuthenticationError], - InvokeBadRequestError: [ValueError], - } diff --git a/api/core/model_runtime/model_providers/xinference/rerank/__init__.py b/api/core/model_runtime/model_providers/xinference/rerank/__init__.py deleted file mode 100644 index e69de29bb2d1d6..00000000000000 diff --git a/api/core/model_runtime/model_providers/xinference/rerank/rerank.py b/api/core/model_runtime/model_providers/xinference/rerank/rerank.py deleted file mode 100644 index 8f18bc42d2339d..00000000000000 --- a/api/core/model_runtime/model_providers/xinference/rerank/rerank.py +++ /dev/null @@ -1,189 +0,0 @@ -from typing import Optional - -from xinference_client.client.restful.restful_client import Client, RESTfulRerankModelHandle - -from core.model_runtime.entities.common_entities import I18nObject -from core.model_runtime.entities.model_entities import AIModelEntity, FetchFrom, ModelType -from core.model_runtime.entities.rerank_entities import RerankDocument, RerankResult -from core.model_runtime.errors.invoke import ( - InvokeAuthorizationError, - InvokeBadRequestError, - InvokeConnectionError, - InvokeError, - InvokeRateLimitError, - InvokeServerUnavailableError, -) -from core.model_runtime.errors.validate import CredentialsValidateFailedError -from core.model_runtime.model_providers.__base.rerank_model import RerankModel - - -class XinferenceRerankModel(RerankModel): - """ - Model class for Xinference rerank model. - """ - - def _invoke( - self, - model: str, - credentials: dict, - query: str, - docs: list[str], - score_threshold: Optional[float] = None, - top_n: Optional[int] = None, - user: Optional[str] = None, - ) -> RerankResult: - """ - Invoke rerank model - - :param model: model name - :param credentials: model credentials - :param query: search query - :param docs: docs for reranking - :param score_threshold: score threshold - :param top_n: top n - :param user: unique user id - :return: rerank result - """ - if len(docs) == 0: - return RerankResult(model=model, docs=[]) - - server_url = credentials["server_url"] - model_uid = credentials["model_uid"] - api_key = credentials.get("api_key") - server_url = server_url.removesuffix("/") - auth_headers = {"Authorization": f"Bearer {api_key}"} if api_key else {} - - params = {"documents": docs, "query": query, "top_n": top_n, "return_documents": True} - try: - handle = RESTfulRerankModelHandle(model_uid, server_url, auth_headers) - response = handle.rerank(**params) - except RuntimeError as e: - if "rerank hasn't support extra parameter" not in str(e): - raise InvokeServerUnavailableError(str(e)) - - # compatible xinference server between v0.10.1 - v0.12.1, not support 'return_len' - handle = RESTfulRerankModelHandleWithoutExtraParameter(model_uid, server_url, auth_headers) - response = handle.rerank(**params) - - rerank_documents = [] - for idx, result in enumerate(response["results"]): - # format document - index = result["index"] - page_content = result["document"] if isinstance(result["document"], str) else result["document"]["text"] - rerank_document = RerankDocument( - index=index, - text=page_content, - score=result["relevance_score"], - ) - - # score threshold check - if score_threshold is not None: - if result["relevance_score"] >= score_threshold: - rerank_documents.append(rerank_document) - else: - rerank_documents.append(rerank_document) - - return RerankResult(model=model, docs=rerank_documents) - - def validate_credentials(self, model: str, credentials: dict) -> None: - """ - Validate model credentials - - :param model: model name - :param credentials: model credentials - :return: - """ - try: - if "/" in credentials["model_uid"] or "?" in credentials["model_uid"] or "#" in credentials["model_uid"]: - raise CredentialsValidateFailedError("model_uid should not contain /, ?, or #") - - credentials["server_url"] = credentials["server_url"].removesuffix("/") - - # initialize client - client = Client( - base_url=credentials["server_url"], - api_key=credentials.get("api_key"), - ) - - xinference_client = client.get_model(model_uid=credentials["model_uid"]) - - if not isinstance(xinference_client, RESTfulRerankModelHandle): - raise InvokeBadRequestError( - "please check model type, the model you want to invoke is not a rerank model" - ) - - self.invoke( - model=model, - credentials=credentials, - query="Whose kasumi", - docs=[ - 'Kasumi is a girl\'s name of Japanese origin meaning "mist".', - "Her music is a kawaii bass, a mix of future bass, pop, and kawaii music ", - "and she leads a team named PopiParty.", - ], - score_threshold=0.8, - ) - except Exception as ex: - raise CredentialsValidateFailedError(str(ex)) - - @property - def _invoke_error_mapping(self) -> dict[type[InvokeError], list[type[Exception]]]: - """ - Map model invoke error to unified error - The key is the error type thrown to the caller - The value is the error type thrown by the model, - which needs to be converted into a unified error type for the caller. - - :return: Invoke error mapping - """ - return { - InvokeConnectionError: [InvokeConnectionError], - InvokeServerUnavailableError: [InvokeServerUnavailableError], - InvokeRateLimitError: [InvokeRateLimitError], - InvokeAuthorizationError: [InvokeAuthorizationError], - InvokeBadRequestError: [InvokeBadRequestError, KeyError, ValueError], - } - - def get_customizable_model_schema(self, model: str, credentials: dict) -> AIModelEntity | None: - """ - used to define customizable model schema - """ - entity = AIModelEntity( - model=model, - label=I18nObject(en_US=model), - fetch_from=FetchFrom.CUSTOMIZABLE_MODEL, - model_type=ModelType.RERANK, - model_properties={}, - parameter_rules=[], - ) - - return entity - - -class RESTfulRerankModelHandleWithoutExtraParameter(RESTfulRerankModelHandle): - def rerank( - self, - documents: list[str], - query: str, - top_n: Optional[int] = None, - max_chunks_per_doc: Optional[int] = None, - return_documents: Optional[bool] = None, - **kwargs, - ): - url = f"{self._base_url}/v1/rerank" - request_body = { - "model": self._model_uid, - "documents": documents, - "query": query, - "top_n": top_n, - "max_chunks_per_doc": max_chunks_per_doc, - "return_documents": return_documents, - } - - import requests - - response = requests.post(url, json=request_body, headers=self.auth_headers) - if response.status_code != 200: - raise InvokeServerUnavailableError(f"Failed to rerank documents, detail: {response.json()['detail']}") - response_data = response.json() - return response_data diff --git a/api/core/model_runtime/model_providers/xinference/speech2text/__init__.py b/api/core/model_runtime/model_providers/xinference/speech2text/__init__.py deleted file mode 100644 index e69de29bb2d1d6..00000000000000 diff --git a/api/core/model_runtime/model_providers/xinference/speech2text/speech2text.py b/api/core/model_runtime/model_providers/xinference/speech2text/speech2text.py deleted file mode 100644 index a6c5b8a0a571e9..00000000000000 --- a/api/core/model_runtime/model_providers/xinference/speech2text/speech2text.py +++ /dev/null @@ -1,144 +0,0 @@ -from typing import IO, Optional - -from xinference_client.client.restful.restful_client import Client, RESTfulAudioModelHandle - -from core.model_runtime.entities.common_entities import I18nObject -from core.model_runtime.entities.model_entities import AIModelEntity, FetchFrom, ModelType -from core.model_runtime.errors.invoke import ( - InvokeAuthorizationError, - InvokeBadRequestError, - InvokeConnectionError, - InvokeError, - InvokeRateLimitError, - InvokeServerUnavailableError, -) -from core.model_runtime.errors.validate import CredentialsValidateFailedError -from core.model_runtime.model_providers.__base.speech2text_model import Speech2TextModel - - -class XinferenceSpeech2TextModel(Speech2TextModel): - """ - Model class for Xinference speech to text model. - """ - - def _invoke(self, model: str, credentials: dict, file: IO[bytes], user: Optional[str] = None) -> str: - """ - Invoke speech2text model - - :param model: model name - :param credentials: model credentials - :param file: audio file - :param user: unique user id - :return: text for given audio file - """ - return self._speech2text_invoke(model, credentials, file) - - def validate_credentials(self, model: str, credentials: dict) -> None: - """ - Validate model credentials - - :param model: model name - :param credentials: model credentials - :return: - """ - try: - if "/" in credentials["model_uid"] or "?" in credentials["model_uid"] or "#" in credentials["model_uid"]: - raise CredentialsValidateFailedError("model_uid should not contain /, ?, or #") - - credentials["server_url"] = credentials["server_url"].removesuffix("/") - - # initialize client - client = Client( - base_url=credentials["server_url"], - api_key=credentials.get("api_key"), - ) - - xinference_client = client.get_model(model_uid=credentials["model_uid"]) - - if not isinstance(xinference_client, RESTfulAudioModelHandle): - raise InvokeBadRequestError( - "please check model type, the model you want to invoke is not a audio model" - ) - - audio_file_path = self._get_demo_file_path() - - with open(audio_file_path, "rb") as audio_file: - self.invoke(model, credentials, audio_file) - except Exception as ex: - raise CredentialsValidateFailedError(str(ex)) - - @property - def _invoke_error_mapping(self) -> dict[type[InvokeError], list[type[Exception]]]: - """ - Map model invoke error to unified error - The key is the error type thrown to the caller - The value is the error type thrown by the model, - which needs to be converted into a unified error type for the caller. - - :return: Invoke error mapping - """ - return { - InvokeConnectionError: [InvokeConnectionError], - InvokeServerUnavailableError: [InvokeServerUnavailableError], - InvokeRateLimitError: [InvokeRateLimitError], - InvokeAuthorizationError: [InvokeAuthorizationError], - InvokeBadRequestError: [InvokeBadRequestError, KeyError, ValueError], - } - - def _speech2text_invoke( - self, - model: str, - credentials: dict, - file: IO[bytes], - language: Optional[str] = None, - prompt: Optional[str] = None, - response_format: Optional[str] = "json", - temperature: Optional[float] = 0, - ) -> str: - """ - Invoke speech2text model - - :param model: model name - :param credentials: model credentials - :param file: The audio file object (not file name) to transcribe, in one of these formats: flac, mp3, mp4, mpeg, - mpga, m4a, ogg, wav, or webm. - :param language: The language of the input audio. Supplying the input language in ISO-639-1 - :param prompt: An optional text to guide the model's style or continue a previous audio segment. - The prompt should match the audio language. - :param response_format: The format of the transcript output, in one of these options: json, text, srt, - verbose_json, or vtt. - :param temperature: The sampling temperature, between 0 and 1. Higher values like 0.8 will make the output more - random,while lower values like 0.2 will make it more focused and deterministic.If set to 0, the model will use - log probability to automatically increase the temperature until certain thresholds are hit. - :return: text for given audio file - """ - server_url = credentials["server_url"] - model_uid = credentials["model_uid"] - api_key = credentials.get("api_key") - server_url = server_url.removesuffix("/") - auth_headers = {"Authorization": f"Bearer {api_key}"} if api_key else {} - - try: - handle = RESTfulAudioModelHandle(model_uid, server_url, auth_headers) - response = handle.transcriptions( - audio=file, language=language, prompt=prompt, response_format=response_format, temperature=temperature - ) - except RuntimeError as e: - raise InvokeServerUnavailableError(str(e)) - - return response["text"] - - def get_customizable_model_schema(self, model: str, credentials: dict) -> AIModelEntity | None: - """ - used to define customizable model schema - """ - entity = AIModelEntity( - model=model, - label=I18nObject(en_US=model), - fetch_from=FetchFrom.CUSTOMIZABLE_MODEL, - model_type=ModelType.SPEECH2TEXT, - model_properties={}, - parameter_rules=[], - ) - - return entity diff --git a/api/core/model_runtime/model_providers/xinference/text_embedding/__init__.py b/api/core/model_runtime/model_providers/xinference/text_embedding/__init__.py deleted file mode 100644 index e69de29bb2d1d6..00000000000000 diff --git a/api/core/model_runtime/model_providers/xinference/tts/__init__.py b/api/core/model_runtime/model_providers/xinference/tts/__init__.py deleted file mode 100644 index e69de29bb2d1d6..00000000000000 diff --git a/api/core/model_runtime/model_providers/xinference/tts/tts.py b/api/core/model_runtime/model_providers/xinference/tts/tts.py deleted file mode 100644 index 81dbe397d2f10c..00000000000000 --- a/api/core/model_runtime/model_providers/xinference/tts/tts.py +++ /dev/null @@ -1,228 +0,0 @@ -import concurrent.futures -from typing import Optional - -from xinference_client.client.restful.restful_client import RESTfulAudioModelHandle - -from core.model_runtime.entities.common_entities import I18nObject -from core.model_runtime.entities.model_entities import AIModelEntity, FetchFrom, ModelType -from core.model_runtime.errors.invoke import ( - InvokeAuthorizationError, - InvokeBadRequestError, - InvokeConnectionError, - InvokeError, - InvokeRateLimitError, - InvokeServerUnavailableError, -) -from core.model_runtime.errors.validate import CredentialsValidateFailedError -from core.model_runtime.model_providers.__base.tts_model import TTSModel -from core.model_runtime.model_providers.xinference.xinference_helper import XinferenceHelper - - -class XinferenceText2SpeechModel(TTSModel): - def __init__(self): - # preset voices, need support custom voice - self.model_voices = { - "__default": { - "all": [ - {"name": "Default", "value": "default"}, - ] - }, - "ChatTTS": { - "all": [ - {"name": "Alloy", "value": "alloy"}, - {"name": "Echo", "value": "echo"}, - {"name": "Fable", "value": "fable"}, - {"name": "Onyx", "value": "onyx"}, - {"name": "Nova", "value": "nova"}, - {"name": "Shimmer", "value": "shimmer"}, - ] - }, - "CosyVoice": { - "zh-Hans": [ - {"name": "中文男", "value": "中文男"}, - {"name": "中文女", "value": "中文女"}, - {"name": "粤语女", "value": "粤语女"}, - ], - "zh-Hant": [ - {"name": "中文男", "value": "中文男"}, - {"name": "中文女", "value": "中文女"}, - {"name": "粤语女", "value": "粤语女"}, - ], - "en-US": [ - {"name": "英文男", "value": "英文男"}, - {"name": "英文女", "value": "英文女"}, - ], - "ja-JP": [ - {"name": "日语男", "value": "日语男"}, - ], - "ko-KR": [ - {"name": "韩语女", "value": "韩语女"}, - ], - }, - } - - def validate_credentials(self, model: str, credentials: dict) -> None: - """ - Validate model credentials - - :param model: model name - :param credentials: model credentials - :return: - """ - try: - if "/" in credentials["model_uid"] or "?" in credentials["model_uid"] or "#" in credentials["model_uid"]: - raise CredentialsValidateFailedError("model_uid should not contain /, ?, or #") - - credentials["server_url"] = credentials["server_url"].removesuffix("/") - - extra_param = XinferenceHelper.get_xinference_extra_parameter( - server_url=credentials["server_url"], - model_uid=credentials["model_uid"], - api_key=credentials.get("api_key"), - ) - - if "text-to-audio" not in extra_param.model_ability: - raise InvokeBadRequestError( - "please check model type, the model you want to invoke is not a text-to-audio model" - ) - - if extra_param.model_family and extra_param.model_family in self.model_voices: - credentials["audio_model_name"] = extra_param.model_family - else: - credentials["audio_model_name"] = "__default" - - self._tts_invoke_streaming( - model=model, - credentials=credentials, - content_text="Hello Dify!", - voice=self._get_model_default_voice(model, credentials), - ) - except Exception as ex: - raise CredentialsValidateFailedError(str(ex)) - - def _invoke( - self, model: str, tenant_id: str, credentials: dict, content_text: str, voice: str, user: Optional[str] = None - ): - """ - _invoke text2speech model - - :param model: model name - :param tenant_id: user tenant id - :param credentials: model credentials - :param voice: model timbre - :param content_text: text content to be translated - :param user: unique user id - :return: text translated to audio file - """ - return self._tts_invoke_streaming(model, credentials, content_text, voice) - - def get_customizable_model_schema(self, model: str, credentials: dict) -> AIModelEntity | None: - """ - used to define customizable model schema - """ - - entity = AIModelEntity( - model=model, - label=I18nObject(en_US=model), - fetch_from=FetchFrom.CUSTOMIZABLE_MODEL, - model_type=ModelType.TTS, - model_properties={}, - parameter_rules=[], - ) - - return entity - - @property - def _invoke_error_mapping(self) -> dict[type[InvokeError], list[type[Exception]]]: - """ - Map model invoke error to unified error - The key is the error type thrown to the caller - The value is the error type thrown by the model, - which needs to be converted into a unified error type for the caller. - - :return: Invoke error mapping - """ - return { - InvokeConnectionError: [InvokeConnectionError], - InvokeServerUnavailableError: [InvokeServerUnavailableError], - InvokeRateLimitError: [InvokeRateLimitError], - InvokeAuthorizationError: [InvokeAuthorizationError], - InvokeBadRequestError: [InvokeBadRequestError, KeyError, ValueError], - } - - def get_tts_model_voices(self, model: str, credentials: dict, language: Optional[str] = None) -> list: - audio_model_name = credentials.get("audio_model_name", "__default") - for key, voices in self.model_voices.items(): - if key in audio_model_name: - if language and language in voices: - return voices[language] - elif "all" in voices: - return voices["all"] - else: - all_voices = [] - for lang, lang_voices in voices.items(): - all_voices.extend(lang_voices) - return all_voices - - return self.model_voices["__default"]["all"] - - def _get_model_default_voice(self, model: str, credentials: dict) -> any: - return "" - - def _get_model_word_limit(self, model: str, credentials: dict) -> int: - return 3500 - - def _get_model_audio_type(self, model: str, credentials: dict) -> str: - return "mp3" - - def _get_model_workers_limit(self, model: str, credentials: dict) -> int: - return 5 - - def _tts_invoke_streaming(self, model: str, credentials: dict, content_text: str, voice: str) -> any: - """ - _tts_invoke_streaming text2speech model - - :param model: model name - :param credentials: model credentials - :param content_text: text content to be translated - :param voice: model timbre - :return: text translated to audio file - """ - credentials["server_url"] = credentials["server_url"].removesuffix("/") - - try: - api_key = credentials.get("api_key") - auth_headers = {"Authorization": f"Bearer {api_key}"} if api_key else {} - handle = RESTfulAudioModelHandle( - credentials["model_uid"], credentials["server_url"], auth_headers=auth_headers - ) - - model_support_voice = [ - x.get("value") for x in self.get_tts_model_voices(model=model, credentials=credentials) - ] - if not voice or voice not in model_support_voice: - voice = self._get_model_default_voice(model, credentials) - word_limit = self._get_model_word_limit(model, credentials) - if len(content_text) > word_limit: - sentences = self._split_text_into_sentences(content_text, max_length=word_limit) - executor = concurrent.futures.ThreadPoolExecutor(max_workers=min(3, len(sentences))) - futures = [ - executor.submit( - handle.speech, input=sentences[i], voice=voice, response_format="mp3", speed=1.0, stream=True - ) - for i in range(len(sentences)) - ] - - for future in futures: - response = future.result() - for chunk in response: - yield chunk - else: - response = handle.speech( - input=content_text.strip(), voice=voice, response_format="mp3", speed=1.0, stream=True - ) - - for chunk in response: - yield chunk - except Exception as ex: - raise InvokeBadRequestError(str(ex)) diff --git a/api/core/model_runtime/model_providers/xinference/xinference.py b/api/core/model_runtime/model_providers/xinference/xinference.py deleted file mode 100644 index d85f7c82e7db71..00000000000000 --- a/api/core/model_runtime/model_providers/xinference/xinference.py +++ /dev/null @@ -1,10 +0,0 @@ -import logging - -from core.model_runtime.model_providers.__base.model_provider import ModelProvider - -logger = logging.getLogger(__name__) - - -class XinferenceAIProvider(ModelProvider): - def validate_provider_credentials(self, credentials: dict) -> None: - pass diff --git a/api/core/model_runtime/model_providers/xinference/xinference.yaml b/api/core/model_runtime/model_providers/xinference/xinference.yaml deleted file mode 100644 index be9073c1cab1f4..00000000000000 --- a/api/core/model_runtime/model_providers/xinference/xinference.yaml +++ /dev/null @@ -1,58 +0,0 @@ -provider: xinference -label: - en_US: Xorbits Inference -icon_small: - en_US: icon_s_en.svg -icon_large: - en_US: icon_l_en.svg -background: "#FAF5FF" -help: - title: - en_US: How to deploy Xinference - zh_Hans: 如何部署 Xinference - url: - en_US: https://github.com/xorbitsai/inference -supported_model_types: - - llm - - text-embedding - - rerank - - speech2text - - tts -configurate_methods: - - customizable-model -model_credential_schema: - model: - label: - en_US: Model Name - zh_Hans: 模型名称 - placeholder: - en_US: Enter your model name - zh_Hans: 输入模型名称 - credential_form_schemas: - - variable: server_url - label: - zh_Hans: 服务器URL - en_US: Server url - type: secret-input - required: true - placeholder: - zh_Hans: 在此输入Xinference的服务器地址,如 http://192.168.1.100:9997 - en_US: Enter the url of your Xinference, e.g. http://192.168.1.100:9997 - - variable: model_uid - label: - zh_Hans: 模型UID - en_US: Model uid - type: text-input - required: true - placeholder: - zh_Hans: 在此输入您的Model UID - en_US: Enter the model uid - - variable: api_key - label: - zh_Hans: API密钥 - en_US: API key - type: secret-input - required: false - placeholder: - zh_Hans: 在此输入您的API密钥 - en_US: Enter the api key diff --git a/api/core/model_runtime/model_providers/xinference/xinference_helper.py b/api/core/model_runtime/model_providers/xinference/xinference_helper.py deleted file mode 100644 index 619ee1492a9272..00000000000000 --- a/api/core/model_runtime/model_providers/xinference/xinference_helper.py +++ /dev/null @@ -1,134 +0,0 @@ -from threading import Lock -from time import time -from typing import Optional - -from requests.adapters import HTTPAdapter -from requests.exceptions import ConnectionError, MissingSchema, Timeout -from requests.sessions import Session -from yarl import URL - - -class XinferenceModelExtraParameter: - model_format: str - model_handle_type: str - model_ability: list[str] - max_tokens: int = 512 - context_length: int = 2048 - support_function_call: bool = False - support_vision: bool = False - model_family: Optional[str] - - def __init__( - self, - model_format: str, - model_handle_type: str, - model_ability: list[str], - support_function_call: bool, - support_vision: bool, - max_tokens: int, - context_length: int, - model_family: Optional[str], - ) -> None: - self.model_format = model_format - self.model_handle_type = model_handle_type - self.model_ability = model_ability - self.support_function_call = support_function_call - self.support_vision = support_vision - self.max_tokens = max_tokens - self.context_length = context_length - self.model_family = model_family - - -cache = {} -cache_lock = Lock() - - -class XinferenceHelper: - @staticmethod - def get_xinference_extra_parameter(server_url: str, model_uid: str, api_key: str) -> XinferenceModelExtraParameter: - XinferenceHelper._clean_cache() - with cache_lock: - if model_uid not in cache: - cache[model_uid] = { - "expires": time() + 300, - "value": XinferenceHelper._get_xinference_extra_parameter(server_url, model_uid, api_key), - } - return cache[model_uid]["value"] - - @staticmethod - def _clean_cache() -> None: - try: - with cache_lock: - expired_keys = [model_uid for model_uid, model in cache.items() if model["expires"] < time()] - for model_uid in expired_keys: - del cache[model_uid] - except RuntimeError as e: - pass - - @staticmethod - def _get_xinference_extra_parameter(server_url: str, model_uid: str, api_key: str) -> XinferenceModelExtraParameter: - """ - get xinference model extra parameter like model_format and model_handle_type - """ - - if not model_uid or not model_uid.strip() or not server_url or not server_url.strip(): - raise RuntimeError("model_uid is empty") - - url = str(URL(server_url) / "v1" / "models" / model_uid) - - # this method is surrounded by a lock, and default requests may hang forever, - # so we just set a Adapter with max_retries=3 - session = Session() - session.mount("http://", HTTPAdapter(max_retries=3)) - session.mount("https://", HTTPAdapter(max_retries=3)) - headers = {"Authorization": f"Bearer {api_key}"} if api_key else {} - - try: - response = session.get(url, headers=headers, timeout=10) - except (MissingSchema, ConnectionError, Timeout) as e: - raise RuntimeError(f"get xinference model extra parameter failed, url: {url}, error: {e}") - if response.status_code != 200: - raise RuntimeError( - f"get xinference model extra parameter failed, status code: {response.status_code}," - f" response: {response.text}" - ) - - response_json = response.json() - - model_format = response_json.get("model_format", "ggmlv3") - model_ability = response_json.get("model_ability", []) - model_family = response_json.get("model_family", None) - - if response_json.get("model_type") == "embedding": - model_handle_type = "embedding" - elif response_json.get("model_type") == "audio": - model_handle_type = "audio" - if model_family and model_family in {"ChatTTS", "CosyVoice", "FishAudio"}: - model_ability.append("text-to-audio") - else: - model_ability.append("audio-to-text") - elif model_format == "ggmlv3" and "chatglm" in response_json["model_name"]: - model_handle_type = "chatglm" - elif "generate" in model_ability: - model_handle_type = "generate" - elif "chat" in model_ability: - model_handle_type = "chat" - else: - raise NotImplementedError("xinference model handle type is not supported") - - support_function_call = "tools" in model_ability - support_vision = "vision" in model_ability - max_tokens = response_json.get("max_tokens", 512) - - context_length = response_json.get("context_length", 2048) - - return XinferenceModelExtraParameter( - model_format=model_format, - model_handle_type=model_handle_type, - model_ability=model_ability, - support_function_call=support_function_call, - support_vision=support_vision, - max_tokens=max_tokens, - context_length=context_length, - model_family=model_family, - ) diff --git a/api/core/model_runtime/model_providers/yi/__init__.py b/api/core/model_runtime/model_providers/yi/__init__.py deleted file mode 100644 index e69de29bb2d1d6..00000000000000 diff --git a/api/core/model_runtime/model_providers/yi/_assets/icon_l_en.svg b/api/core/model_runtime/model_providers/yi/_assets/icon_l_en.svg deleted file mode 100644 index 9ce3baddaa94ed..00000000000000 --- a/api/core/model_runtime/model_providers/yi/_assets/icon_l_en.svg +++ /dev/null @@ -1,12 +0,0 @@ - - - - - - - - - - - - \ No newline at end of file diff --git a/api/core/model_runtime/model_providers/yi/_assets/icon_s_en.svg b/api/core/model_runtime/model_providers/yi/_assets/icon_s_en.svg deleted file mode 100644 index eb0395a21c5dd8..00000000000000 --- a/api/core/model_runtime/model_providers/yi/_assets/icon_s_en.svg +++ /dev/null @@ -1,8 +0,0 @@ - - - - - - - - \ No newline at end of file diff --git a/api/core/model_runtime/model_providers/yi/llm/__init__.py b/api/core/model_runtime/model_providers/yi/llm/__init__.py deleted file mode 100644 index e69de29bb2d1d6..00000000000000 diff --git a/api/core/model_runtime/model_providers/yi/llm/_position.yaml b/api/core/model_runtime/model_providers/yi/llm/_position.yaml deleted file mode 100644 index e876893b414985..00000000000000 --- a/api/core/model_runtime/model_providers/yi/llm/_position.yaml +++ /dev/null @@ -1,9 +0,0 @@ -- yi-34b-chat-0205 -- yi-34b-chat-200k -- yi-vl-plus -- yi-large -- yi-medium -- yi-vision -- yi-medium-200k -- yi-spark -- yi-large-turbo diff --git a/api/core/model_runtime/model_providers/yi/llm/llm.py b/api/core/model_runtime/model_providers/yi/llm/llm.py deleted file mode 100644 index 5ab7fd126e3082..00000000000000 --- a/api/core/model_runtime/model_providers/yi/llm/llm.py +++ /dev/null @@ -1,127 +0,0 @@ -from collections.abc import Generator -from typing import Optional, Union -from urllib.parse import urlparse - -import tiktoken - -from core.model_runtime.entities.llm_entities import LLMResult -from core.model_runtime.entities.message_entities import ( - PromptMessage, - PromptMessageTool, - SystemPromptMessage, -) -from core.model_runtime.model_providers.openai.llm.llm import OpenAILargeLanguageModel - - -class YiLargeLanguageModel(OpenAILargeLanguageModel): - def _invoke( - self, - model: str, - credentials: dict, - prompt_messages: list[PromptMessage], - model_parameters: dict, - tools: Optional[list[PromptMessageTool]] = None, - stop: Optional[list[str]] = None, - stream: bool = True, - user: Optional[str] = None, - ) -> Union[LLMResult, Generator]: - self._add_custom_parameters(credentials) - - # yi-vl-plus not support system prompt yet. - if model == "yi-vl-plus": - prompt_message_except_system: list[PromptMessage] = [] - for message in prompt_messages: - if not isinstance(message, SystemPromptMessage): - prompt_message_except_system.append(message) - return super()._invoke( - model, credentials, prompt_message_except_system, model_parameters, tools, stop, stream - ) - - return super()._invoke(model, credentials, prompt_messages, model_parameters, tools, stop, stream) - - def validate_credentials(self, model: str, credentials: dict) -> None: - self._add_custom_parameters(credentials) - super().validate_credentials(model, credentials) - - # refactored from openai model runtime, use cl100k_base for calculate token number - def _num_tokens_from_string(self, model: str, text: str, tools: Optional[list[PromptMessageTool]] = None) -> int: - """ - Calculate num tokens for text completion model with tiktoken package. - - :param model: model name - :param text: prompt text - :param tools: tools for tool calling - :return: number of tokens - """ - encoding = tiktoken.get_encoding("cl100k_base") - num_tokens = len(encoding.encode(text)) - - if tools: - num_tokens += self._num_tokens_for_tools(encoding, tools) - - return num_tokens - - # refactored from openai model runtime, use cl100k_base for calculate token number - def _num_tokens_from_messages( - self, model: str, messages: list[PromptMessage], tools: Optional[list[PromptMessageTool]] = None - ) -> int: - """Calculate num tokens for gpt-3.5-turbo and gpt-4 with tiktoken package. - - Official documentation: https://github.com/openai/openai-cookbook/blob/ - main/examples/How_to_format_inputs_to_ChatGPT_models.ipynb""" - encoding = tiktoken.get_encoding("cl100k_base") - tokens_per_message = 3 - tokens_per_name = 1 - - num_tokens = 0 - messages_dict = [self._convert_prompt_message_to_dict(m) for m in messages] - for message in messages_dict: - num_tokens += tokens_per_message - for key, value in message.items(): - # Cast str(value) in case the message value is not a string - # This occurs with function messages - # TODO: The current token calculation method for the image type is not implemented, - # which need to download the image and then get the resolution for calculation, - # and will increase the request delay - if isinstance(value, list): - text = "" - for item in value: - if isinstance(item, dict) and item["type"] == "text": - text += item["text"] - - value = text - - if key == "tool_calls": - for tool_call in value: - for t_key, t_value in tool_call.items(): - num_tokens += len(encoding.encode(t_key)) - if t_key == "function": - for f_key, f_value in t_value.items(): - num_tokens += len(encoding.encode(f_key)) - num_tokens += len(encoding.encode(f_value)) - else: - num_tokens += len(encoding.encode(t_key)) - num_tokens += len(encoding.encode(t_value)) - else: - num_tokens += len(encoding.encode(str(value))) - - if key == "name": - num_tokens += tokens_per_name - - # every reply is primed with assistant - num_tokens += 3 - - if tools: - num_tokens += self._num_tokens_for_tools(encoding, tools) - - return num_tokens - - @staticmethod - def _add_custom_parameters(credentials: dict) -> None: - credentials["mode"] = "chat" - credentials["openai_api_key"] = credentials["api_key"] - if "endpoint_url" not in credentials or credentials["endpoint_url"] == "": - credentials["openai_api_base"] = "https://api.lingyiwanwu.com" - else: - parsed_url = urlparse(credentials["endpoint_url"]) - credentials["openai_api_base"] = f"{parsed_url.scheme}://{parsed_url.netloc}" diff --git a/api/core/model_runtime/model_providers/yi/llm/yi-34b-chat-0205.yaml b/api/core/model_runtime/model_providers/yi/llm/yi-34b-chat-0205.yaml deleted file mode 100644 index ea3d8f5dcee376..00000000000000 --- a/api/core/model_runtime/model_providers/yi/llm/yi-34b-chat-0205.yaml +++ /dev/null @@ -1,43 +0,0 @@ -model: yi-34b-chat-0205 -label: - zh_Hans: yi-34b-chat-0205 - en_US: yi-34b-chat-0205 -model_type: llm -features: - - agent-thought -model_properties: - mode: chat - context_size: 4096 -parameter_rules: - - name: temperature - use_template: temperature - type: float - default: 0.3 - min: 0.0 - max: 2.0 - help: - zh_Hans: 控制生成结果的多样性和随机性。数值越小,越严谨;数值越大,越发散。 - en_US: Control the diversity and randomness of generated results. The smaller the value, the more rigorous it is; the larger the value, the more divergent it is. - - name: max_tokens - use_template: max_tokens - type: int - default: 512 - min: 1 - max: 4000 - help: - zh_Hans: 指定生成结果长度的上限。如果生成结果截断,可以调大该参数。 - en_US: Specifies the upper limit on the length of generated results. If the generated results are truncated, you can increase this parameter. - - name: top_p - use_template: top_p - type: float - default: 0.8 - min: 0.01 - max: 1.00 - help: - zh_Hans: 控制生成结果的随机性。数值越小,随机性越弱;数值越大,随机性越强。一般而言,top_p 和 temperature 两个参数选择一个进行调整即可。 - en_US: Control the randomness of generated results. The smaller the value, the weaker the randomness; the larger the value, the stronger the randomness. Generally speaking, you can adjust one of the two parameters top_p and temperature. -pricing: - input: '2.5' - output: '2.5' - unit: '0.000001' - currency: RMB diff --git a/api/core/model_runtime/model_providers/yi/llm/yi-34b-chat-200k.yaml b/api/core/model_runtime/model_providers/yi/llm/yi-34b-chat-200k.yaml deleted file mode 100644 index d91f984d7f006f..00000000000000 --- a/api/core/model_runtime/model_providers/yi/llm/yi-34b-chat-200k.yaml +++ /dev/null @@ -1,43 +0,0 @@ -model: yi-34b-chat-200k -label: - zh_Hans: yi-34b-chat-200k - en_US: yi-34b-chat-200k -model_type: llm -features: - - agent-thought -model_properties: - mode: chat - context_size: 200000 -parameter_rules: - - name: temperature - use_template: temperature - type: float - default: 0.6 - min: 0.0 - max: 2.0 - help: - zh_Hans: 控制生成结果的多样性和随机性。数值越小,越严谨;数值越大,越发散。 - en_US: Control the diversity and randomness of generated results. The smaller the value, the more rigorous it is; the larger the value, the more divergent it is. - - name: max_tokens - use_template: max_tokens - type: int - default: 4096 - min: 1 - max: 199950 - help: - zh_Hans: 指定生成结果长度的上限。如果生成结果截断,可以调大该参数。 - en_US: Specifies the upper limit on the length of generated results. If the generated results are truncated, you can increase this parameter. - - name: top_p - use_template: top_p - type: float - default: 0.9 - min: 0.01 - max: 1.00 - help: - zh_Hans: 控制生成结果的随机性。数值越小,随机性越弱;数值越大,随机性越强。一般而言,top_p 和 temperature 两个参数选择一个进行调整即可。 - en_US: Control the randomness of generated results. The smaller the value, the weaker the randomness; the larger the value, the stronger the randomness. Generally speaking, you can adjust one of the two parameters top_p and temperature. -pricing: - input: '12' - output: '12' - unit: '0.000001' - currency: RMB diff --git a/api/core/model_runtime/model_providers/yi/llm/yi-large-turbo.yaml b/api/core/model_runtime/model_providers/yi/llm/yi-large-turbo.yaml deleted file mode 100644 index 1d00eca2cafebb..00000000000000 --- a/api/core/model_runtime/model_providers/yi/llm/yi-large-turbo.yaml +++ /dev/null @@ -1,43 +0,0 @@ -model: yi-large-turbo -label: - zh_Hans: yi-large-turbo - en_US: yi-large-turbo -model_type: llm -features: - - agent-thought -model_properties: - mode: chat - context_size: 16384 -parameter_rules: - - name: temperature - use_template: temperature - type: float - default: 0.3 - min: 0.0 - max: 2.0 - help: - zh_Hans: 控制生成结果的多样性和随机性。数值越小,越严谨;数值越大,越发散。 - en_US: Control the diversity and randomness of generated results. The smaller the value, the more rigorous it is; the larger the value, the more divergent it is. - - name: max_tokens - use_template: max_tokens - type: int - default: 1024 - min: 1 - max: 16384 - help: - zh_Hans: 指定生成结果长度的上限。如果生成结果截断,可以调大该参数。 - en_US: Specifies the upper limit on the length of generated results. If the generated results are truncated, you can increase this parameter. - - name: top_p - use_template: top_p - type: float - default: 0.9 - min: 0.01 - max: 1.00 - help: - zh_Hans: 控制生成结果的随机性。数值越小,随机性越弱;数值越大,随机性越强。一般而言,top_p 和 temperature 两个参数选择一个进行调整即可。 - en_US: Control the randomness of generated results. The smaller the value, the weaker the randomness; the larger the value, the stronger the randomness. Generally speaking, you can adjust one of the two parameters top_p and temperature. -pricing: - input: '12' - output: '12' - unit: '0.000001' - currency: RMB diff --git a/api/core/model_runtime/model_providers/yi/llm/yi-large.yaml b/api/core/model_runtime/model_providers/yi/llm/yi-large.yaml deleted file mode 100644 index 347f511280b9ad..00000000000000 --- a/api/core/model_runtime/model_providers/yi/llm/yi-large.yaml +++ /dev/null @@ -1,43 +0,0 @@ -model: yi-large -label: - zh_Hans: yi-large - en_US: yi-large -model_type: llm -features: - - agent-thought -model_properties: - mode: chat - context_size: 16384 -parameter_rules: - - name: temperature - use_template: temperature - type: float - default: 0.3 - min: 0.0 - max: 2.0 - help: - zh_Hans: 控制生成结果的多样性和随机性。数值越小,越严谨;数值越大,越发散。 - en_US: Control the diversity and randomness of generated results. The smaller the value, the more rigorous it is; the larger the value, the more divergent it is. - - name: max_tokens - use_template: max_tokens - type: int - default: 1024 - min: 1 - max: 16384 - help: - zh_Hans: 指定生成结果长度的上限。如果生成结果截断,可以调大该参数。 - en_US: Specifies the upper limit on the length of generated results. If the generated results are truncated, you can increase this parameter. - - name: top_p - use_template: top_p - type: float - default: 0.9 - min: 0.01 - max: 1.00 - help: - zh_Hans: 控制生成结果的随机性。数值越小,随机性越弱;数值越大,随机性越强。一般而言,top_p 和 temperature 两个参数选择一个进行调整即可。 - en_US: Control the randomness of generated results. The smaller the value, the weaker the randomness; the larger the value, the stronger the randomness. Generally speaking, you can adjust one of the two parameters top_p and temperature. -pricing: - input: '20' - output: '20' - unit: '0.000001' - currency: RMB diff --git a/api/core/model_runtime/model_providers/yi/llm/yi-medium-200k.yaml b/api/core/model_runtime/model_providers/yi/llm/yi-medium-200k.yaml deleted file mode 100644 index e8ddbcba97b415..00000000000000 --- a/api/core/model_runtime/model_providers/yi/llm/yi-medium-200k.yaml +++ /dev/null @@ -1,43 +0,0 @@ -model: yi-medium-200k -label: - zh_Hans: yi-medium-200k - en_US: yi-medium-200k -model_type: llm -features: - - agent-thought -model_properties: - mode: chat - context_size: 204800 -parameter_rules: - - name: temperature - use_template: temperature - type: float - default: 0.3 - min: 0.0 - max: 2.0 - help: - zh_Hans: 控制生成结果的多样性和随机性。数值越小,越严谨;数值越大,越发散。 - en_US: Control the diversity and randomness of generated results. The smaller the value, the more rigorous it is; the larger the value, the more divergent it is. - - name: max_tokens - use_template: max_tokens - type: int - default: 1024 - min: 1 - max: 204800 - help: - zh_Hans: 指定生成结果长度的上限。如果生成结果截断,可以调大该参数。 - en_US: Specifies the upper limit on the length of generated results. If the generated results are truncated, you can increase this parameter. - - name: top_p - use_template: top_p - type: float - default: 0.9 - min: 0.01 - max: 1.00 - help: - zh_Hans: 控制生成结果的随机性。数值越小,随机性越弱;数值越大,随机性越强。一般而言,top_p 和 temperature 两个参数选择一个进行调整即可。 - en_US: Control the randomness of generated results. The smaller the value, the weaker the randomness; the larger the value, the stronger the randomness. Generally speaking, you can adjust one of the two parameters top_p and temperature. -pricing: - input: '12' - output: '12' - unit: '0.000001' - currency: RMB diff --git a/api/core/model_runtime/model_providers/yi/llm/yi-medium.yaml b/api/core/model_runtime/model_providers/yi/llm/yi-medium.yaml deleted file mode 100644 index 4f0244d1f5e7b7..00000000000000 --- a/api/core/model_runtime/model_providers/yi/llm/yi-medium.yaml +++ /dev/null @@ -1,43 +0,0 @@ -model: yi-medium -label: - zh_Hans: yi-medium - en_US: yi-medium -model_type: llm -features: - - agent-thought -model_properties: - mode: chat - context_size: 16384 -parameter_rules: - - name: temperature - use_template: temperature - type: float - default: 0.3 - min: 0.0 - max: 2.0 - help: - zh_Hans: 控制生成结果的多样性和随机性。数值越小,越严谨;数值越大,越发散。 - en_US: Control the diversity and randomness of generated results. The smaller the value, the more rigorous it is; the larger the value, the more divergent it is. - - name: max_tokens - use_template: max_tokens - type: int - default: 1024 - min: 1 - max: 16384 - help: - zh_Hans: 指定生成结果长度的上限。如果生成结果截断,可以调大该参数。 - en_US: Specifies the upper limit on the length of generated results. If the generated results are truncated, you can increase this parameter. - - name: top_p - use_template: top_p - type: float - default: 0.9 - min: 0.01 - max: 1.00 - help: - zh_Hans: 控制生成结果的随机性。数值越小,随机性越弱;数值越大,随机性越强。一般而言,top_p 和 temperature 两个参数选择一个进行调整即可。 - en_US: Control the randomness of generated results. The smaller the value, the weaker the randomness; the larger the value, the stronger the randomness. Generally speaking, you can adjust one of the two parameters top_p and temperature. -pricing: - input: '2.5' - output: '2.5' - unit: '0.000001' - currency: RMB diff --git a/api/core/model_runtime/model_providers/yi/llm/yi-spark.yaml b/api/core/model_runtime/model_providers/yi/llm/yi-spark.yaml deleted file mode 100644 index e28e9fd8c09e82..00000000000000 --- a/api/core/model_runtime/model_providers/yi/llm/yi-spark.yaml +++ /dev/null @@ -1,43 +0,0 @@ -model: yi-spark -label: - zh_Hans: yi-spark - en_US: yi-spark -model_type: llm -features: - - agent-thought -model_properties: - mode: chat - context_size: 16384 -parameter_rules: - - name: temperature - use_template: temperature - type: float - default: 0.3 - min: 0.0 - max: 2.0 - help: - zh_Hans: 控制生成结果的多样性和随机性。数值越小,越严谨;数值越大,越发散。 - en_US: Control the diversity and randomness of generated results. The smaller the value, the more rigorous it is; the larger the value, the more divergent it is. - - name: max_tokens - use_template: max_tokens - type: int - default: 1024 - min: 1 - max: 16384 - help: - zh_Hans: 指定生成结果长度的上限。如果生成结果截断,可以调大该参数。 - en_US: Specifies the upper limit on the length of generated results. If the generated results are truncated, you can increase this parameter. - - name: top_p - use_template: top_p - type: float - default: 0.9 - min: 0.01 - max: 1.00 - help: - zh_Hans: 控制生成结果的随机性。数值越小,随机性越弱;数值越大,随机性越强。一般而言,top_p 和 temperature 两个参数选择一个进行调整即可。 - en_US: Control the randomness of generated results. The smaller the value, the weaker the randomness; the larger the value, the stronger the randomness. Generally speaking, you can adjust one of the two parameters top_p and temperature. -pricing: - input: '1' - output: '1' - unit: '0.000001' - currency: RMB diff --git a/api/core/model_runtime/model_providers/yi/llm/yi-vision.yaml b/api/core/model_runtime/model_providers/yi/llm/yi-vision.yaml deleted file mode 100644 index bce34f58364f24..00000000000000 --- a/api/core/model_runtime/model_providers/yi/llm/yi-vision.yaml +++ /dev/null @@ -1,44 +0,0 @@ -model: yi-vision -label: - zh_Hans: yi-vision - en_US: yi-vision -model_type: llm -features: - - agent-thought - - vision -model_properties: - mode: chat - context_size: 4096 -parameter_rules: - - name: temperature - use_template: temperature - type: float - default: 0.3 - min: 0.0 - max: 2.0 - help: - zh_Hans: 控制生成结果的多样性和随机性。数值越小,越严谨;数值越大,越发散。 - en_US: Control the diversity and randomness of generated results. The smaller the value, the more rigorous it is; the larger the value, the more divergent it is. - - name: max_tokens - use_template: max_tokens - type: int - default: 1024 - min: 1 - max: 4096 - help: - zh_Hans: 指定生成结果长度的上限。如果生成结果截断,可以调大该参数。 - en_US: Specifies the upper limit on the length of generated results. If the generated results are truncated, you can increase this parameter. - - name: top_p - use_template: top_p - type: float - default: 0.9 - min: 0.01 - max: 1.00 - help: - zh_Hans: 控制生成结果的随机性。数值越小,随机性越弱;数值越大,随机性越强。一般而言,top_p 和 temperature 两个参数选择一个进行调整即可。 - en_US: Control the randomness of generated results. The smaller the value, the weaker the randomness; the larger the value, the stronger the randomness. Generally speaking, you can adjust one of the two parameters top_p and temperature. -pricing: - input: '6' - output: '6' - unit: '0.000001' - currency: RMB diff --git a/api/core/model_runtime/model_providers/yi/llm/yi-vl-plus.yaml b/api/core/model_runtime/model_providers/yi/llm/yi-vl-plus.yaml deleted file mode 100644 index 461c68583fe669..00000000000000 --- a/api/core/model_runtime/model_providers/yi/llm/yi-vl-plus.yaml +++ /dev/null @@ -1,43 +0,0 @@ -model: yi-vl-plus -label: - zh_Hans: yi-vl-plus - en_US: yi-vl-plus -model_type: llm -features: - - vision -model_properties: - mode: chat - context_size: 4096 -parameter_rules: - - name: temperature - use_template: temperature - type: float - default: 0.3 - min: 0.0 - max: 2.0 - help: - zh_Hans: 控制生成结果的多样性和随机性。数值越小,越严谨;数值越大,越发散。 - en_US: Control the diversity and randomness of generated results. The smaller the value, the more rigorous it is; the larger the value, the more divergent it is. - - name: max_tokens - use_template: max_tokens - type: int - default: 512 - min: 1 - max: 4000 - help: - zh_Hans: 指定生成结果长度的上限。如果生成结果截断,可以调大该参数。 - en_US: Specifies the upper limit on the length of generated results. If the generated results are truncated, you can increase this parameter. - - name: top_p - use_template: top_p - type: float - default: 0.8 - min: 0.01 - max: 1.00 - help: - zh_Hans: 控制生成结果的随机性。数值越小,随机性越弱;数值越大,随机性越强。一般而言,top_p 和 temperature 两个参数选择一个进行调整即可。 - en_US: Control the randomness of generated results. The smaller the value, the weaker the randomness; the larger the value, the stronger the randomness. Generally speaking, you can adjust one of the two parameters top_p and temperature. -pricing: - input: '6' - output: '6' - unit: '0.000001' - currency: RMB diff --git a/api/core/model_runtime/model_providers/yi/yi.py b/api/core/model_runtime/model_providers/yi/yi.py deleted file mode 100644 index 9599acb22b505a..00000000000000 --- a/api/core/model_runtime/model_providers/yi/yi.py +++ /dev/null @@ -1,28 +0,0 @@ -import logging - -from core.model_runtime.entities.model_entities import ModelType -from core.model_runtime.errors.validate import CredentialsValidateFailedError -from core.model_runtime.model_providers.__base.model_provider import ModelProvider - -logger = logging.getLogger(__name__) - - -class YiProvider(ModelProvider): - def validate_provider_credentials(self, credentials: dict) -> None: - """ - Validate provider credentials - if validate failed, raise exception - - :param credentials: provider credentials, credentials form defined in `provider_credential_schema`. - """ - try: - model_instance = self.get_model_instance(ModelType.LLM) - - # Use `yi-34b-chat-0205` model for validate, - # no matter what model you pass in, text completion model or chat model - model_instance.validate_credentials(model="yi-34b-chat-0205", credentials=credentials) - except CredentialsValidateFailedError as ex: - raise ex - except Exception as ex: - logger.exception(f"{self.get_provider_schema().provider} credentials validate failed") - raise ex diff --git a/api/core/model_runtime/model_providers/yi/yi.yaml b/api/core/model_runtime/model_providers/yi/yi.yaml deleted file mode 100644 index de741afb10990c..00000000000000 --- a/api/core/model_runtime/model_providers/yi/yi.yaml +++ /dev/null @@ -1,41 +0,0 @@ -provider: yi -label: - en_US: 01.AI - zh_Hans: 零一万物 -description: - en_US: Models provided by 01.AI, such as yi-34b-chat and yi-vl-plus. - zh_Hans: 零一万物提供的模型,例如 yi-34b-chat 和 yi-vl-plus。 -icon_small: - en_US: icon_s_en.svg -icon_large: - en_US: icon_l_en.svg -background: "#E9F1EC" -help: - title: - en_US: Get your API Key from 01.ai - zh_Hans: 从零一万物获取 API Key - url: - en_US: https://platform.lingyiwanwu.com/apikeys -supported_model_types: - - llm -configurate_methods: - - predefined-model -provider_credential_schema: - credential_form_schemas: - - variable: api_key - label: - en_US: API Key - type: secret-input - required: true - placeholder: - zh_Hans: 在此输入您的 API Key - en_US: Enter your API Key - - variable: endpoint_url - label: - zh_Hans: 自定义 API endpoint 地址 - en_US: Custom API endpoint URL - type: text-input - required: false - placeholder: - zh_Hans: Base URL, e.g. https://api.lingyiwanwu.com/v1 - en_US: Base URL, e.g. https://api.lingyiwanwu.com/v1 diff --git a/api/core/model_runtime/model_providers/zhinao/__init__.py b/api/core/model_runtime/model_providers/zhinao/__init__.py deleted file mode 100644 index e69de29bb2d1d6..00000000000000 diff --git a/api/core/model_runtime/model_providers/zhinao/_assets/icon_l_en.svg b/api/core/model_runtime/model_providers/zhinao/_assets/icon_l_en.svg deleted file mode 100644 index b22b8694419bc7..00000000000000 --- a/api/core/model_runtime/model_providers/zhinao/_assets/icon_l_en.svg +++ /dev/null @@ -1,8 +0,0 @@ - - - - - - - - diff --git a/api/core/model_runtime/model_providers/zhinao/_assets/icon_s_en.svg b/api/core/model_runtime/model_providers/zhinao/_assets/icon_s_en.svg deleted file mode 100644 index 8fe72b7d0928e6..00000000000000 --- a/api/core/model_runtime/model_providers/zhinao/_assets/icon_s_en.svg +++ /dev/null @@ -1,8 +0,0 @@ - - - - - - - - diff --git a/api/core/model_runtime/model_providers/zhinao/llm/360gpt-turbo-responsibility-8k.yaml b/api/core/model_runtime/model_providers/zhinao/llm/360gpt-turbo-responsibility-8k.yaml deleted file mode 100644 index f420df0001b3a2..00000000000000 --- a/api/core/model_runtime/model_providers/zhinao/llm/360gpt-turbo-responsibility-8k.yaml +++ /dev/null @@ -1,36 +0,0 @@ -model: 360gpt-turbo-responsibility-8k -label: - zh_Hans: 360gpt-turbo-responsibility-8k - en_US: 360gpt-turbo-responsibility-8k -model_type: llm -features: - - agent-thought -model_properties: - mode: chat - context_size: 8192 -parameter_rules: - - name: temperature - use_template: temperature - min: 0 - max: 1 - default: 0.5 - - name: top_p - use_template: top_p - min: 0 - max: 1 - default: 1 - - name: max_tokens - use_template: max_tokens - min: 1 - max: 8192 - default: 1024 - - name: frequency_penalty - use_template: frequency_penalty - min: -2 - max: 2 - default: 0 - - name: presence_penalty - use_template: presence_penalty - min: -2 - max: 2 - default: 0 diff --git a/api/core/model_runtime/model_providers/zhinao/llm/360gpt-turbo.yaml b/api/core/model_runtime/model_providers/zhinao/llm/360gpt-turbo.yaml deleted file mode 100644 index a2658fbe4f5c0e..00000000000000 --- a/api/core/model_runtime/model_providers/zhinao/llm/360gpt-turbo.yaml +++ /dev/null @@ -1,36 +0,0 @@ -model: 360gpt-turbo -label: - zh_Hans: 360gpt-turbo - en_US: 360gpt-turbo -model_type: llm -features: - - agent-thought -model_properties: - mode: chat - context_size: 2048 -parameter_rules: - - name: temperature - use_template: temperature - min: 0 - max: 1 - default: 0.5 - - name: top_p - use_template: top_p - min: 0 - max: 1 - default: 1 - - name: max_tokens - use_template: max_tokens - min: 1 - max: 2048 - default: 1024 - - name: frequency_penalty - use_template: frequency_penalty - min: -2 - max: 2 - default: 0 - - name: presence_penalty - use_template: presence_penalty - min: -2 - max: 2 - default: 0 diff --git a/api/core/model_runtime/model_providers/zhinao/llm/360gpt2-pro.yaml b/api/core/model_runtime/model_providers/zhinao/llm/360gpt2-pro.yaml deleted file mode 100644 index 00c81eb1daaffb..00000000000000 --- a/api/core/model_runtime/model_providers/zhinao/llm/360gpt2-pro.yaml +++ /dev/null @@ -1,36 +0,0 @@ -model: 360gpt2-pro -label: - zh_Hans: 360gpt2-pro - en_US: 360gpt2-pro -model_type: llm -features: - - agent-thought -model_properties: - mode: chat - context_size: 2048 -parameter_rules: - - name: temperature - use_template: temperature - min: 0 - max: 1 - default: 0.5 - - name: top_p - use_template: top_p - min: 0 - max: 1 - default: 1 - - name: max_tokens - use_template: max_tokens - min: 1 - max: 2048 - default: 1024 - - name: frequency_penalty - use_template: frequency_penalty - min: -2 - max: 2 - default: 0 - - name: presence_penalty - use_template: presence_penalty - min: -2 - max: 2 - default: 0 diff --git a/api/core/model_runtime/model_providers/zhinao/llm/__init__.py b/api/core/model_runtime/model_providers/zhinao/llm/__init__.py deleted file mode 100644 index e69de29bb2d1d6..00000000000000 diff --git a/api/core/model_runtime/model_providers/zhinao/llm/_position.yaml b/api/core/model_runtime/model_providers/zhinao/llm/_position.yaml deleted file mode 100644 index ab8dbf51821c0a..00000000000000 --- a/api/core/model_runtime/model_providers/zhinao/llm/_position.yaml +++ /dev/null @@ -1,3 +0,0 @@ -- 360gpt2-pro -- 360gpt-turbo -- 360gpt-turbo-responsibility-8k diff --git a/api/core/model_runtime/model_providers/zhinao/llm/llm.py b/api/core/model_runtime/model_providers/zhinao/llm/llm.py deleted file mode 100644 index befc3de021e1f2..00000000000000 --- a/api/core/model_runtime/model_providers/zhinao/llm/llm.py +++ /dev/null @@ -1,31 +0,0 @@ -from collections.abc import Generator -from typing import Optional, Union - -from core.model_runtime.entities.llm_entities import LLMResult -from core.model_runtime.entities.message_entities import PromptMessage, PromptMessageTool -from core.model_runtime.model_providers.openai_api_compatible.llm.llm import OAIAPICompatLargeLanguageModel - - -class ZhinaoLargeLanguageModel(OAIAPICompatLargeLanguageModel): - def _invoke( - self, - model: str, - credentials: dict, - prompt_messages: list[PromptMessage], - model_parameters: dict, - tools: Optional[list[PromptMessageTool]] = None, - stop: Optional[list[str]] = None, - stream: bool = True, - user: Optional[str] = None, - ) -> Union[LLMResult, Generator]: - self._add_custom_parameters(credentials) - return super()._invoke(model, credentials, prompt_messages, model_parameters, tools, stop, stream) - - def validate_credentials(self, model: str, credentials: dict) -> None: - self._add_custom_parameters(credentials) - super().validate_credentials(model, credentials) - - @classmethod - def _add_custom_parameters(cls, credentials: dict) -> None: - credentials["mode"] = "chat" - credentials["endpoint_url"] = "https://api.360.cn/v1" diff --git a/api/core/model_runtime/model_providers/zhinao/zhinao.py b/api/core/model_runtime/model_providers/zhinao/zhinao.py deleted file mode 100644 index 2a263292f98f14..00000000000000 --- a/api/core/model_runtime/model_providers/zhinao/zhinao.py +++ /dev/null @@ -1,28 +0,0 @@ -import logging - -from core.model_runtime.entities.model_entities import ModelType -from core.model_runtime.errors.validate import CredentialsValidateFailedError -from core.model_runtime.model_providers.__base.model_provider import ModelProvider - -logger = logging.getLogger(__name__) - - -class ZhinaoProvider(ModelProvider): - def validate_provider_credentials(self, credentials: dict) -> None: - """ - Validate provider credentials - if validate failed, raise exception - - :param credentials: provider credentials, credentials form defined in `provider_credential_schema`. - """ - try: - model_instance = self.get_model_instance(ModelType.LLM) - - # Use `360gpt-turbo` model for validate, - # no matter what model you pass in, text completion model or chat model - model_instance.validate_credentials(model="360gpt-turbo", credentials=credentials) - except CredentialsValidateFailedError as ex: - raise ex - except Exception as ex: - logger.exception(f"{self.get_provider_schema().provider} credentials validate failed") - raise ex diff --git a/api/core/model_runtime/model_providers/zhinao/zhinao.yaml b/api/core/model_runtime/model_providers/zhinao/zhinao.yaml deleted file mode 100644 index c5cb142c47d4c3..00000000000000 --- a/api/core/model_runtime/model_providers/zhinao/zhinao.yaml +++ /dev/null @@ -1,32 +0,0 @@ -provider: zhinao -label: - en_US: 360 AI - zh_Hans: 360 智脑 -description: - en_US: Models provided by 360 AI. - zh_Hans: 360 智脑提供的模型。 -icon_small: - en_US: icon_s_en.svg -icon_large: - en_US: icon_l_en.svg -background: "#e3f0ff" -help: - title: - en_US: Get your API Key from 360 AI. - zh_Hans: 从360 智脑获取 API Key - url: - en_US: https://ai.360.com/platform/keys -supported_model_types: - - llm -configurate_methods: - - predefined-model -provider_credential_schema: - credential_form_schemas: - - variable: api_key - label: - en_US: API Key - type: secret-input - required: true - placeholder: - zh_Hans: 在此输入您的 API Key - en_US: Enter your API Key diff --git a/api/core/model_runtime/model_providers/zhipuai/__init__.py b/api/core/model_runtime/model_providers/zhipuai/__init__.py deleted file mode 100644 index e69de29bb2d1d6..00000000000000 diff --git a/api/core/model_runtime/model_providers/zhipuai/_assets/icon_l_en.svg b/api/core/model_runtime/model_providers/zhipuai/_assets/icon_l_en.svg deleted file mode 100644 index d32499917d3c72..00000000000000 --- a/api/core/model_runtime/model_providers/zhipuai/_assets/icon_l_en.svg +++ /dev/null @@ -1,6 +0,0 @@ - - - - - - diff --git a/api/core/model_runtime/model_providers/zhipuai/_assets/icon_l_zh.svg b/api/core/model_runtime/model_providers/zhipuai/_assets/icon_l_zh.svg deleted file mode 100644 index 067ea2c4272ee8..00000000000000 --- a/api/core/model_runtime/model_providers/zhipuai/_assets/icon_l_zh.svg +++ /dev/null @@ -1,8 +0,0 @@ - - - - - - - - diff --git a/api/core/model_runtime/model_providers/zhipuai/_assets/icon_s_en.svg b/api/core/model_runtime/model_providers/zhipuai/_assets/icon_s_en.svg deleted file mode 100644 index 016f97ddab8dab..00000000000000 --- a/api/core/model_runtime/model_providers/zhipuai/_assets/icon_s_en.svg +++ /dev/null @@ -1,8 +0,0 @@ - - - - - - - - diff --git a/api/core/model_runtime/model_providers/zhipuai/_common.py b/api/core/model_runtime/model_providers/zhipuai/_common.py deleted file mode 100644 index fa95232f717d78..00000000000000 --- a/api/core/model_runtime/model_providers/zhipuai/_common.py +++ /dev/null @@ -1,41 +0,0 @@ -from core.model_runtime.errors.invoke import ( - InvokeAuthorizationError, - InvokeBadRequestError, - InvokeConnectionError, - InvokeError, - InvokeRateLimitError, - InvokeServerUnavailableError, -) - - -class _CommonZhipuaiAI: - def _to_credential_kwargs(self, credentials: dict) -> dict: - """ - Transform credentials to kwargs for model instance - - :param credentials: - :return: - """ - credentials_kwargs = { - "api_key": credentials["api_key"] if "api_key" in credentials else credentials.get("zhipuai_api_key"), - } - - return credentials_kwargs - - @property - def _invoke_error_mapping(self) -> dict[type[InvokeError], list[type[Exception]]]: - """ - Map model invoke error to unified error - The key is the error type thrown to the caller - The value is the error type thrown by the model, - which needs to be converted into a unified error type for the caller. - - :return: Invoke error mapping - """ - return { - InvokeConnectionError: [], - InvokeServerUnavailableError: [], - InvokeRateLimitError: [], - InvokeAuthorizationError: [], - InvokeBadRequestError: [], - } diff --git a/api/core/model_runtime/model_providers/zhipuai/llm/__init__.py b/api/core/model_runtime/model_providers/zhipuai/llm/__init__.py deleted file mode 100644 index e69de29bb2d1d6..00000000000000 diff --git a/api/core/model_runtime/model_providers/zhipuai/llm/chatglm_lite.yaml b/api/core/model_runtime/model_providers/zhipuai/llm/chatglm_lite.yaml deleted file mode 100644 index 9778de1a2eeaa0..00000000000000 --- a/api/core/model_runtime/model_providers/zhipuai/llm/chatglm_lite.yaml +++ /dev/null @@ -1,22 +0,0 @@ -model: chatglm_lite -label: - en_US: chatglm_lite -model_type: llm -model_properties: - mode: chat -parameter_rules: - - name: temperature - use_template: temperature - default: 0.9 - min: 0.0 - max: 1.0 - help: - zh_Hans: 采样温度,控制输出的随机性,必须为正数取值范围是:(0.0,1.0],不能等于 0,默认值为 0.95 值越大,会使输出更随机,更具创造性;值越小,输出会更加稳定或确定建议您根据应用场景调整 top_p 或 temperature 参数,但不要同时调整两个参数。 - en_US: Sampling temperature, controls the randomness of the output, must be a positive number. The value range is (0.0,1.0], which cannot be equal to 0. The default value is 0.95. The larger the value, the more random and creative the output will be; the smaller the value, The output will be more stable or certain. It is recommended that you adjust the top_p or temperature parameters according to the application scenario, but do not adjust both parameters at the same time. - - name: top_p - use_template: top_p - default: 0.7 - help: - zh_Hans: 用温度取样的另一种方法,称为核取样取值范围是:(0.0, 1.0) 开区间,不能等于 0 或 1,默认值为 0.7 模型考虑具有 top_p 概率质量tokens的结果例如:0.1 意味着模型解码器只考虑从前 10% 的概率的候选集中取 tokens 建议您根据应用场景调整 top_p 或 temperature 参数,但不要同时调整两个参数。 - en_US: Another method of temperature sampling is called kernel sampling. The value range is (0.0, 1.0) open interval, which cannot be equal to 0 or 1. The default value is 0.7. The model considers the results with top_p probability mass tokens. For example 0.1 means The model decoder only considers tokens from the candidate set with the top 10% probability. It is recommended that you adjust the top_p or temperature parameters according to the application scenario, but do not adjust both parameters at the same time. -deprecated: true diff --git a/api/core/model_runtime/model_providers/zhipuai/llm/chatglm_lite_32k.yaml b/api/core/model_runtime/model_providers/zhipuai/llm/chatglm_lite_32k.yaml deleted file mode 100644 index 7836d964c64cf9..00000000000000 --- a/api/core/model_runtime/model_providers/zhipuai/llm/chatglm_lite_32k.yaml +++ /dev/null @@ -1,22 +0,0 @@ -model: chatglm_lite_32k -label: - en_US: chatglm_lite_32k -model_type: llm -model_properties: - mode: chat -parameter_rules: - - name: temperature - use_template: temperature - default: 0.9 - min: 0.0 - max: 1.0 - help: - zh_Hans: 采样温度,控制输出的随机性,必须为正数取值范围是:(0.0,1.0],不能等于 0,默认值为 0.95 值越大,会使输出更随机,更具创造性;值越小,输出会更加稳定或确定建议您根据应用场景调整 top_p 或 temperature 参数,但不要同时调整两个参数。 - en_US: Sampling temperature, controls the randomness of the output, must be a positive number. The value range is (0.0,1.0], which cannot be equal to 0. The default value is 0.95. The larger the value, the more random and creative the output will be; the smaller the value, The output will be more stable or certain. It is recommended that you adjust the top_p or temperature parameters according to the application scenario, but do not adjust both parameters at the same time. - - name: top_p - use_template: top_p - default: 0.7 - help: - zh_Hans: 用温度取样的另一种方法,称为核取样取值范围是:(0.0, 1.0) 开区间,不能等于 0 或 1,默认值为 0.7 模型考虑具有 top_p 概率质量tokens的结果例如:0.1 意味着模型解码器只考虑从前 10% 的概率的候选集中取 tokens 建议您根据应用场景调整 top_p 或 temperature 参数,但不要同时调整两个参数。 - en_US: Another method of temperature sampling is called kernel sampling. The value range is (0.0, 1.0) open interval, which cannot be equal to 0 or 1. The default value is 0.7. The model considers the results with top_p probability mass tokens. For example 0.1 means The model decoder only considers tokens from the candidate set with the top 10% probability. It is recommended that you adjust the top_p or temperature parameters according to the application scenario, but do not adjust both parameters at the same time. -deprecated: true diff --git a/api/core/model_runtime/model_providers/zhipuai/llm/chatglm_pro.yaml b/api/core/model_runtime/model_providers/zhipuai/llm/chatglm_pro.yaml deleted file mode 100644 index b3d53c812bd8fd..00000000000000 --- a/api/core/model_runtime/model_providers/zhipuai/llm/chatglm_pro.yaml +++ /dev/null @@ -1,22 +0,0 @@ -model: chatglm_pro -label: - en_US: chatglm_pro -model_type: llm -model_properties: - mode: chat -parameter_rules: - - name: temperature - use_template: temperature - default: 0.9 - min: 0.0 - max: 1.0 - help: - zh_Hans: 采样温度,控制输出的随机性,必须为正数取值范围是:(0.0,1.0],不能等于 0,默认值为 0.95 值越大,会使输出更随机,更具创造性;值越小,输出会更加稳定或确定建议您根据应用场景调整 top_p 或 temperature 参数,但不要同时调整两个参数。 - en_US: Sampling temperature, controls the randomness of the output, must be a positive number. The value range is (0.0,1.0], which cannot be equal to 0. The default value is 0.95. The larger the value, the more random and creative the output will be; the smaller the value, The output will be more stable or certain. It is recommended that you adjust the top_p or temperature parameters according to the application scenario, but do not adjust both parameters at the same time. - - name: top_p - use_template: top_p - default: 0.7 - help: - zh_Hans: 用温度取样的另一种方法,称为核取样取值范围是:(0.0, 1.0) 开区间,不能等于 0 或 1,默认值为 0.7 模型考虑具有 top_p 概率质量tokens的结果例如:0.1 意味着模型解码器只考虑从前 10% 的概率的候选集中取 tokens 建议您根据应用场景调整 top_p 或 temperature 参数,但不要同时调整两个参数。 - en_US: Another method of temperature sampling is called kernel sampling. The value range is (0.0, 1.0) open interval, which cannot be equal to 0 or 1. The default value is 0.7. The model considers the results with top_p probability mass tokens. For example 0.1 means The model decoder only considers tokens from the candidate set with the top 10% probability. It is recommended that you adjust the top_p or temperature parameters according to the application scenario, but do not adjust both parameters at the same time. -deprecated: true diff --git a/api/core/model_runtime/model_providers/zhipuai/llm/chatglm_std.yaml b/api/core/model_runtime/model_providers/zhipuai/llm/chatglm_std.yaml deleted file mode 100644 index 7d8b9520a0ee47..00000000000000 --- a/api/core/model_runtime/model_providers/zhipuai/llm/chatglm_std.yaml +++ /dev/null @@ -1,22 +0,0 @@ -model: chatglm_std -label: - en_US: chatglm_std -model_type: llm -model_properties: - mode: chat -parameter_rules: - - name: temperature - use_template: temperature - default: 0.9 - min: 0.0 - max: 1.0 - help: - zh_Hans: 采样温度,控制输出的随机性,必须为正数取值范围是:(0.0,1.0],不能等于 0,默认值为 0.95 值越大,会使输出更随机,更具创造性;值越小,输出会更加稳定或确定建议您根据应用场景调整 top_p 或 temperature 参数,但不要同时调整两个参数。 - en_US: Sampling temperature, controls the randomness of the output, must be a positive number. The value range is (0.0,1.0], which cannot be equal to 0. The default value is 0.95. The larger the value, the more random and creative the output will be; the smaller the value, The output will be more stable or certain. It is recommended that you adjust the top_p or temperature parameters according to the application scenario, but do not adjust both parameters at the same time. - - name: top_p - use_template: top_p - default: 0.7 - help: - zh_Hans: 用温度取样的另一种方法,称为核取样取值范围是:(0.0, 1.0) 开区间,不能等于 0 或 1,默认值为 0.7 模型考虑具有 top_p 概率质量tokens的结果例如:0.1 意味着模型解码器只考虑从前 10% 的概率的候选集中取 tokens 建议您根据应用场景调整 top_p 或 temperature 参数,但不要同时调整两个参数。 - en_US: Another method of temperature sampling is called kernel sampling. The value range is (0.0, 1.0) open interval, which cannot be equal to 0 or 1. The default value is 0.7. The model considers the results with top_p probability mass tokens. For example 0.1 means The model decoder only considers tokens from the candidate set with the top 10% probability. It is recommended that you adjust the top_p or temperature parameters according to the application scenario, but do not adjust both parameters at the same time. -deprecated: true diff --git a/api/core/model_runtime/model_providers/zhipuai/llm/chatglm_turbo.yaml b/api/core/model_runtime/model_providers/zhipuai/llm/chatglm_turbo.yaml deleted file mode 100644 index fcd5c5ef640206..00000000000000 --- a/api/core/model_runtime/model_providers/zhipuai/llm/chatglm_turbo.yaml +++ /dev/null @@ -1,51 +0,0 @@ -model: chatglm_turbo -label: - en_US: chatglm_turbo -model_type: llm -model_properties: - mode: chat -parameter_rules: - - name: temperature - use_template: temperature - default: 0.95 - min: 0.0 - max: 1.0 - help: - zh_Hans: 采样温度,控制输出的随机性,必须为正数取值范围是:(0.0,1.0],不能等于 0,默认值为 0.95 值越大,会使输出更随机,更具创造性;值越小,输出会更加稳定或确定建议您根据应用场景调整 top_p 或 temperature 参数,但不要同时调整两个参数。 - en_US: Sampling temperature, controls the randomness of the output, must be a positive number. The value range is (0.0,1.0], which cannot be equal to 0. The default value is 0.95. The larger the value, the more random and creative the output will be; the smaller the value, The output will be more stable or certain. It is recommended that you adjust the top_p or temperature parameters according to the application scenario, but do not adjust both parameters at the same time. - - name: top_p - use_template: top_p - default: 0.7 - help: - zh_Hans: 用温度取样的另一种方法,称为核取样取值范围是:(0.0, 1.0) 开区间,不能等于 0 或 1,默认值为 0.7 模型考虑具有 top_p 概率质量tokens的结果例如:0.1 意味着模型解码器只考虑从前 10% 的概率的候选集中取 tokens 建议您根据应用场景调整 top_p 或 temperature 参数,但不要同时调整两个参数。 - en_US: Another method of temperature sampling is called kernel sampling. The value range is (0.0, 1.0) open interval, which cannot be equal to 0 or 1. The default value is 0.7. The model considers the results with top_p probability mass tokens. For example 0.1 means The model decoder only considers tokens from the candidate set with the top 10% probability. It is recommended that you adjust the top_p or temperature parameters according to the application scenario, but do not adjust both parameters at the same time. - - name: do_sample - label: - zh_Hans: 采样策略 - en_US: Sampling strategy - type: boolean - help: - zh_Hans: do_sample 为 true 时启用采样策略,do_sample 为 false 时采样策略 temperature、top_p 将不生效。默认值为 true。 - en_US: When `do_sample` is set to true, the sampling strategy is enabled. When `do_sample` is set to false, the sampling strategies such as `temperature` and `top_p` will not take effect. The default value is true. - default: true - - name: stream - label: - zh_Hans: 流处理 - en_US: Event Stream - type: boolean - help: - zh_Hans: 使用同步调用时,此参数应当设置为 fasle 或者省略。表示模型生成完所有内容后一次性返回所有内容。默认值为 false。如果设置为 true,模型将通过标准 Event Stream ,逐块返回模型生成内容。Event Stream 结束时会返回一条data:[DONE]消息。注意:在模型流式输出生成内容的过程中,我们会分批对模型生成内容进行检测,当检测到违法及不良信息时,API会返回错误码(1301)。开发者识别到错误码(1301),应及时采取(清屏、重启对话)等措施删除生成内容,并确保不将含有违法及不良信息的内容传递给模型继续生成,避免其造成负面影响。 - en_US: When using synchronous invocation, this parameter should be set to false or omitted. It indicates that the model will return all the generated content at once after the generation is complete. The default value is false. If set to true, the model will return the generated content in chunks via the standard Event Stream. A data:[DONE] message will be sent at the end of the Event Stream.Note:During the model's streaming output process, we will batch check the generated content. If illegal or harmful information is detected, the API will return an error code (1301). Developers who identify error code (1301) should promptly take actions such as clearing the screen or restarting the conversation to delete the generated content. They should also ensure that no illegal or harmful content is passed back to the model for continued generation to avoid negative impacts. - default: false - - name: return_type - label: - zh_Hans: 回复类型 - en_US: Return Type - type: string - help: - zh_Hans: 用于控制每次返回内容的类型,空或者没有此字段时默认按照 json_string 返回,json_string 返回标准的 JSON 字符串,text 返回原始的文本内容。 - en_US: Used to control the type of content returned each time. When it is empty or does not have this field, it will be returned as json_string by default. json_string returns a standard JSON string, and text returns the original text content. - required: false - options: - - text - - json_string diff --git a/api/core/model_runtime/model_providers/zhipuai/llm/glm-4-0520.yaml b/api/core/model_runtime/model_providers/zhipuai/llm/glm-4-0520.yaml deleted file mode 100644 index 7fcf6922023fbf..00000000000000 --- a/api/core/model_runtime/model_providers/zhipuai/llm/glm-4-0520.yaml +++ /dev/null @@ -1,62 +0,0 @@ -model: glm-4-0520 -label: - en_US: glm-4-0520 -model_type: llm -features: - - multi-tool-call - - agent-thought - - stream-tool-call -model_properties: - mode: chat -parameter_rules: - - name: temperature - use_template: temperature - default: 0.95 - min: 0.0 - max: 1.0 - help: - zh_Hans: 采样温度,控制输出的随机性,必须为正数取值范围是:(0.0,1.0],不能等于 0,默认值为 0.95 值越大,会使输出更随机,更具创造性;值越小,输出会更加稳定或确定建议您根据应用场景调整 top_p 或 temperature 参数,但不要同时调整两个参数。 - en_US: Sampling temperature, controls the randomness of the output, must be a positive number. The value range is (0.0,1.0], which cannot be equal to 0. The default value is 0.95. The larger the value, the more random and creative the output will be; the smaller the value, The output will be more stable or certain. It is recommended that you adjust the top_p or temperature parameters according to the application scenario, but do not adjust both parameters at the same time. - - name: top_p - use_template: top_p - default: 0.7 - help: - zh_Hans: 用温度取样的另一种方法,称为核取样取值范围是:(0.0, 1.0) 开区间,不能等于 0 或 1,默认值为 0.7 模型考虑具有 top_p 概率质量tokens的结果例如:0.1 意味着模型解码器只考虑从前 10% 的概率的候选集中取 tokens 建议您根据应用场景调整 top_p 或 temperature 参数,但不要同时调整两个参数。 - en_US: Another method of temperature sampling is called kernel sampling. The value range is (0.0, 1.0) open interval, which cannot be equal to 0 or 1. The default value is 0.7. The model considers the results with top_p probability mass tokens. For example 0.1 means The model decoder only considers tokens from the candidate set with the top 10% probability. It is recommended that you adjust the top_p or temperature parameters according to the application scenario, but do not adjust both parameters at the same time. - - name: do_sample - label: - zh_Hans: 采样策略 - en_US: Sampling strategy - type: boolean - help: - zh_Hans: do_sample 为 true 时启用采样策略,do_sample 为 false 时采样策略 temperature、top_p 将不生效。默认值为 true。 - en_US: When `do_sample` is set to true, the sampling strategy is enabled. When `do_sample` is set to false, the sampling strategies such as `temperature` and `top_p` will not take effect. The default value is true. - default: true - - name: stream - label: - zh_Hans: 流处理 - en_US: Event Stream - type: boolean - help: - zh_Hans: 使用同步调用时,此参数应当设置为 fasle 或者省略。表示模型生成完所有内容后一次性返回所有内容。默认值为 false。如果设置为 true,模型将通过标准 Event Stream ,逐块返回模型生成内容。Event Stream 结束时会返回一条data:[DONE]消息。注意:在模型流式输出生成内容的过程中,我们会分批对模型生成内容进行检测,当检测到违法及不良信息时,API会返回错误码(1301)。开发者识别到错误码(1301),应及时采取(清屏、重启对话)等措施删除生成内容,并确保不将含有违法及不良信息的内容传递给模型继续生成,避免其造成负面影响。 - en_US: When using synchronous invocation, this parameter should be set to false or omitted. It indicates that the model will return all the generated content at once after the generation is complete. The default value is false. If set to true, the model will return the generated content in chunks via the standard Event Stream. A data:[DONE] message will be sent at the end of the Event Stream.Note:During the model's streaming output process, we will batch check the generated content. If illegal or harmful information is detected, the API will return an error code (1301). Developers who identify error code (1301) should promptly take actions such as clearing the screen or restarting the conversation to delete the generated content. They should also ensure that no illegal or harmful content is passed back to the model for continued generation to avoid negative impacts. - default: false - - name: max_tokens - use_template: max_tokens - default: 1024 - min: 1 - max: 4095 - - name: web_search - type: boolean - label: - zh_Hans: 联网搜索 - en_US: Web Search - default: false - help: - zh_Hans: 模型内置了互联网搜索服务,该参数控制模型在生成文本时是否参考使用互联网搜索结果。启用互联网搜索,模型会将搜索结果作为文本生成过程中的参考信息,但模型会基于其内部逻辑“自行判断”是否使用互联网搜索结果。 - en_US: The model has a built-in Internet search service. This parameter controls whether the model refers to Internet search results when generating text. When Internet search is enabled, the model will use the search results as reference information in the text generation process, but the model will "judge" whether to use Internet search results based on its internal logic. -pricing: - input: '0.1' - output: '0.1' - unit: '0.001' - currency: RMB diff --git a/api/core/model_runtime/model_providers/zhipuai/llm/glm-4-air.yaml b/api/core/model_runtime/model_providers/zhipuai/llm/glm-4-air.yaml deleted file mode 100644 index fcd7c7768c7179..00000000000000 --- a/api/core/model_runtime/model_providers/zhipuai/llm/glm-4-air.yaml +++ /dev/null @@ -1,62 +0,0 @@ -model: glm-4-air -label: - en_US: glm-4-air -model_type: llm -features: - - multi-tool-call - - agent-thought - - stream-tool-call -model_properties: - mode: chat -parameter_rules: - - name: temperature - use_template: temperature - default: 0.95 - min: 0.0 - max: 1.0 - help: - zh_Hans: 采样温度,控制输出的随机性,必须为正数取值范围是:(0.0,1.0],不能等于 0,默认值为 0.95 值越大,会使输出更随机,更具创造性;值越小,输出会更加稳定或确定建议您根据应用场景调整 top_p 或 temperature 参数,但不要同时调整两个参数。 - en_US: Sampling temperature, controls the randomness of the output, must be a positive number. The value range is (0.0,1.0], which cannot be equal to 0. The default value is 0.95. The larger the value, the more random and creative the output will be; the smaller the value, The output will be more stable or certain. It is recommended that you adjust the top_p or temperature parameters according to the application scenario, but do not adjust both parameters at the same time. - - name: top_p - use_template: top_p - default: 0.7 - help: - zh_Hans: 用温度取样的另一种方法,称为核取样取值范围是:(0.0, 1.0) 开区间,不能等于 0 或 1,默认值为 0.7 模型考虑具有 top_p 概率质量tokens的结果例如:0.1 意味着模型解码器只考虑从前 10% 的概率的候选集中取 tokens 建议您根据应用场景调整 top_p 或 temperature 参数,但不要同时调整两个参数。 - en_US: Another method of temperature sampling is called kernel sampling. The value range is (0.0, 1.0) open interval, which cannot be equal to 0 or 1. The default value is 0.7. The model considers the results with top_p probability mass tokens. For example 0.1 means The model decoder only considers tokens from the candidate set with the top 10% probability. It is recommended that you adjust the top_p or temperature parameters according to the application scenario, but do not adjust both parameters at the same time. - - name: do_sample - label: - zh_Hans: 采样策略 - en_US: Sampling strategy - type: boolean - help: - zh_Hans: do_sample 为 true 时启用采样策略,do_sample 为 false 时采样策略 temperature、top_p 将不生效。默认值为 true。 - en_US: When `do_sample` is set to true, the sampling strategy is enabled. When `do_sample` is set to false, the sampling strategies such as `temperature` and `top_p` will not take effect. The default value is true. - default: true - - name: stream - label: - zh_Hans: 流处理 - en_US: Event Stream - type: boolean - help: - zh_Hans: 使用同步调用时,此参数应当设置为 fasle 或者省略。表示模型生成完所有内容后一次性返回所有内容。默认值为 false。如果设置为 true,模型将通过标准 Event Stream ,逐块返回模型生成内容。Event Stream 结束时会返回一条data:[DONE]消息。注意:在模型流式输出生成内容的过程中,我们会分批对模型生成内容进行检测,当检测到违法及不良信息时,API会返回错误码(1301)。开发者识别到错误码(1301),应及时采取(清屏、重启对话)等措施删除生成内容,并确保不将含有违法及不良信息的内容传递给模型继续生成,避免其造成负面影响。 - en_US: When using synchronous invocation, this parameter should be set to false or omitted. It indicates that the model will return all the generated content at once after the generation is complete. The default value is false. If set to true, the model will return the generated content in chunks via the standard Event Stream. A data:[DONE] message will be sent at the end of the Event Stream.Note:During the model's streaming output process, we will batch check the generated content. If illegal or harmful information is detected, the API will return an error code (1301). Developers who identify error code (1301) should promptly take actions such as clearing the screen or restarting the conversation to delete the generated content. They should also ensure that no illegal or harmful content is passed back to the model for continued generation to avoid negative impacts. - default: false - - name: max_tokens - use_template: max_tokens - default: 1024 - min: 1 - max: 4095 - - name: web_search - type: boolean - label: - zh_Hans: 联网搜索 - en_US: Web Search - default: false - help: - zh_Hans: 模型内置了互联网搜索服务,该参数控制模型在生成文本时是否参考使用互联网搜索结果。启用互联网搜索,模型会将搜索结果作为文本生成过程中的参考信息,但模型会基于其内部逻辑“自行判断”是否使用互联网搜索结果。 - en_US: The model has a built-in Internet search service. This parameter controls whether the model refers to Internet search results when generating text. When Internet search is enabled, the model will use the search results as reference information in the text generation process, but the model will "judge" whether to use Internet search results based on its internal logic. -pricing: - input: '0.001' - output: '0.001' - unit: '0.001' - currency: RMB diff --git a/api/core/model_runtime/model_providers/zhipuai/llm/glm-4-airx.yaml b/api/core/model_runtime/model_providers/zhipuai/llm/glm-4-airx.yaml deleted file mode 100644 index c9ae5abf196360..00000000000000 --- a/api/core/model_runtime/model_providers/zhipuai/llm/glm-4-airx.yaml +++ /dev/null @@ -1,62 +0,0 @@ -model: glm-4-airx -label: - en_US: glm-4-airx -model_type: llm -features: - - multi-tool-call - - agent-thought - - stream-tool-call -model_properties: - mode: chat -parameter_rules: - - name: temperature - use_template: temperature - default: 0.95 - min: 0.0 - max: 1.0 - help: - zh_Hans: 采样温度,控制输出的随机性,必须为正数取值范围是:(0.0,1.0],不能等于 0,默认值为 0.95 值越大,会使输出更随机,更具创造性;值越小,输出会更加稳定或确定建议您根据应用场景调整 top_p 或 temperature 参数,但不要同时调整两个参数。 - en_US: Sampling temperature, controls the randomness of the output, must be a positive number. The value range is (0.0,1.0], which cannot be equal to 0. The default value is 0.95. The larger the value, the more random and creative the output will be; the smaller the value, The output will be more stable or certain. It is recommended that you adjust the top_p or temperature parameters according to the application scenario, but do not adjust both parameters at the same time. - - name: top_p - use_template: top_p - default: 0.7 - help: - zh_Hans: 用温度取样的另一种方法,称为核取样取值范围是:(0.0, 1.0) 开区间,不能等于 0 或 1,默认值为 0.7 模型考虑具有 top_p 概率质量tokens的结果例如:0.1 意味着模型解码器只考虑从前 10% 的概率的候选集中取 tokens 建议您根据应用场景调整 top_p 或 temperature 参数,但不要同时调整两个参数。 - en_US: Another method of temperature sampling is called kernel sampling. The value range is (0.0, 1.0) open interval, which cannot be equal to 0 or 1. The default value is 0.7. The model considers the results with top_p probability mass tokens. For example 0.1 means The model decoder only considers tokens from the candidate set with the top 10% probability. It is recommended that you adjust the top_p or temperature parameters according to the application scenario, but do not adjust both parameters at the same time. - - name: do_sample - label: - zh_Hans: 采样策略 - en_US: Sampling strategy - type: boolean - help: - zh_Hans: do_sample 为 true 时启用采样策略,do_sample 为 false 时采样策略 temperature、top_p 将不生效。默认值为 true。 - en_US: When `do_sample` is set to true, the sampling strategy is enabled. When `do_sample` is set to false, the sampling strategies such as `temperature` and `top_p` will not take effect. The default value is true. - default: true - - name: stream - label: - zh_Hans: 流处理 - en_US: Event Stream - type: boolean - help: - zh_Hans: 使用同步调用时,此参数应当设置为 fasle 或者省略。表示模型生成完所有内容后一次性返回所有内容。默认值为 false。如果设置为 true,模型将通过标准 Event Stream ,逐块返回模型生成内容。Event Stream 结束时会返回一条data:[DONE]消息。注意:在模型流式输出生成内容的过程中,我们会分批对模型生成内容进行检测,当检测到违法及不良信息时,API会返回错误码(1301)。开发者识别到错误码(1301),应及时采取(清屏、重启对话)等措施删除生成内容,并确保不将含有违法及不良信息的内容传递给模型继续生成,避免其造成负面影响。 - en_US: When using synchronous invocation, this parameter should be set to false or omitted. It indicates that the model will return all the generated content at once after the generation is complete. The default value is false. If set to true, the model will return the generated content in chunks via the standard Event Stream. A data:[DONE] message will be sent at the end of the Event Stream.Note:During the model's streaming output process, we will batch check the generated content. If illegal or harmful information is detected, the API will return an error code (1301). Developers who identify error code (1301) should promptly take actions such as clearing the screen or restarting the conversation to delete the generated content. They should also ensure that no illegal or harmful content is passed back to the model for continued generation to avoid negative impacts. - default: false - - name: max_tokens - use_template: max_tokens - default: 1024 - min: 1 - max: 4095 - - name: web_search - type: boolean - label: - zh_Hans: 联网搜索 - en_US: Web Search - default: false - help: - zh_Hans: 模型内置了互联网搜索服务,该参数控制模型在生成文本时是否参考使用互联网搜索结果。启用互联网搜索,模型会将搜索结果作为文本生成过程中的参考信息,但模型会基于其内部逻辑“自行判断”是否使用互联网搜索结果。 - en_US: The model has a built-in Internet search service. This parameter controls whether the model refers to Internet search results when generating text. When Internet search is enabled, the model will use the search results as reference information in the text generation process, but the model will "judge" whether to use Internet search results based on its internal logic. -pricing: - input: '0.01' - output: '0.01' - unit: '0.001' - currency: RMB diff --git a/api/core/model_runtime/model_providers/zhipuai/llm/glm-4-flash.yaml b/api/core/model_runtime/model_providers/zhipuai/llm/glm-4-flash.yaml deleted file mode 100644 index 98c4f72c7237f7..00000000000000 --- a/api/core/model_runtime/model_providers/zhipuai/llm/glm-4-flash.yaml +++ /dev/null @@ -1,62 +0,0 @@ -model: glm-4-flash -label: - en_US: glm-4-flash -model_type: llm -features: - - multi-tool-call - - agent-thought - - stream-tool-call -model_properties: - mode: chat -parameter_rules: - - name: temperature - use_template: temperature - default: 0.95 - min: 0.0 - max: 1.0 - help: - zh_Hans: 采样温度,控制输出的随机性,必须为正数取值范围是:(0.0,1.0],不能等于 0,默认值为 0.95 值越大,会使输出更随机,更具创造性;值越小,输出会更加稳定或确定建议您根据应用场景调整 top_p 或 temperature 参数,但不要同时调整两个参数。 - en_US: Sampling temperature, controls the randomness of the output, must be a positive number. The value range is (0.0,1.0], which cannot be equal to 0. The default value is 0.95. The larger the value, the more random and creative the output will be; the smaller the value, The output will be more stable or certain. It is recommended that you adjust the top_p or temperature parameters according to the application scenario, but do not adjust both parameters at the same time. - - name: top_p - use_template: top_p - default: 0.7 - help: - zh_Hans: 用温度取样的另一种方法,称为核取样取值范围是:(0.0, 1.0) 开区间,不能等于 0 或 1,默认值为 0.7 模型考虑具有 top_p 概率质量tokens的结果例如:0.1 意味着模型解码器只考虑从前 10% 的概率的候选集中取 tokens 建议您根据应用场景调整 top_p 或 temperature 参数,但不要同时调整两个参数。 - en_US: Another method of temperature sampling is called kernel sampling. The value range is (0.0, 1.0) open interval, which cannot be equal to 0 or 1. The default value is 0.7. The model considers the results with top_p probability mass tokens. For example 0.1 means The model decoder only considers tokens from the candidate set with the top 10% probability. It is recommended that you adjust the top_p or temperature parameters according to the application scenario, but do not adjust both parameters at the same time. - - name: do_sample - label: - zh_Hans: 采样策略 - en_US: Sampling strategy - type: boolean - help: - zh_Hans: do_sample 为 true 时启用采样策略,do_sample 为 false 时采样策略 temperature、top_p 将不生效。默认值为 true。 - en_US: When `do_sample` is set to true, the sampling strategy is enabled. When `do_sample` is set to false, the sampling strategies such as `temperature` and `top_p` will not take effect. The default value is true. - default: true - - name: stream - label: - zh_Hans: 流处理 - en_US: Event Stream - type: boolean - help: - zh_Hans: 使用同步调用时,此参数应当设置为 fasle 或者省略。表示模型生成完所有内容后一次性返回所有内容。默认值为 false。如果设置为 true,模型将通过标准 Event Stream ,逐块返回模型生成内容。Event Stream 结束时会返回一条data:[DONE]消息。注意:在模型流式输出生成内容的过程中,我们会分批对模型生成内容进行检测,当检测到违法及不良信息时,API会返回错误码(1301)。开发者识别到错误码(1301),应及时采取(清屏、重启对话)等措施删除生成内容,并确保不将含有违法及不良信息的内容传递给模型继续生成,避免其造成负面影响。 - en_US: When using synchronous invocation, this parameter should be set to false or omitted. It indicates that the model will return all the generated content at once after the generation is complete. The default value is false. If set to true, the model will return the generated content in chunks via the standard Event Stream. A data:[DONE] message will be sent at the end of the Event Stream.Note:During the model's streaming output process, we will batch check the generated content. If illegal or harmful information is detected, the API will return an error code (1301). Developers who identify error code (1301) should promptly take actions such as clearing the screen or restarting the conversation to delete the generated content. They should also ensure that no illegal or harmful content is passed back to the model for continued generation to avoid negative impacts. - default: false - - name: max_tokens - use_template: max_tokens - default: 1024 - min: 1 - max: 4095 - - name: web_search - type: boolean - label: - zh_Hans: 联网搜索 - en_US: Web Search - default: false - help: - zh_Hans: 模型内置了互联网搜索服务,该参数控制模型在生成文本时是否参考使用互联网搜索结果。启用互联网搜索,模型会将搜索结果作为文本生成过程中的参考信息,但模型会基于其内部逻辑“自行判断”是否使用互联网搜索结果。 - en_US: The model has a built-in Internet search service. This parameter controls whether the model refers to Internet search results when generating text. When Internet search is enabled, the model will use the search results as reference information in the text generation process, but the model will "judge" whether to use Internet search results based on its internal logic. -pricing: - input: '0' - output: '0' - unit: '0.001' - currency: RMB diff --git a/api/core/model_runtime/model_providers/zhipuai/llm/glm_3_turbo.yaml b/api/core/model_runtime/model_providers/zhipuai/llm/glm_3_turbo.yaml deleted file mode 100644 index 0b5391ce2f83fd..00000000000000 --- a/api/core/model_runtime/model_providers/zhipuai/llm/glm_3_turbo.yaml +++ /dev/null @@ -1,62 +0,0 @@ -model: glm-3-turbo -label: - en_US: glm-3-turbo -model_type: llm -features: - - multi-tool-call - - agent-thought - - stream-tool-call -model_properties: - mode: chat -parameter_rules: - - name: temperature - use_template: temperature - default: 0.95 - min: 0.0 - max: 1.0 - help: - zh_Hans: 采样温度,控制输出的随机性,必须为正数取值范围是:(0.0,1.0],不能等于 0,默认值为 0.95 值越大,会使输出更随机,更具创造性;值越小,输出会更加稳定或确定建议您根据应用场景调整 top_p 或 temperature 参数,但不要同时调整两个参数。 - en_US: Sampling temperature, controls the randomness of the output, must be a positive number. The value range is (0.0,1.0], which cannot be equal to 0. The default value is 0.95. The larger the value, the more random and creative the output will be; the smaller the value, The output will be more stable or certain. It is recommended that you adjust the top_p or temperature parameters according to the application scenario, but do not adjust both parameters at the same time. - - name: top_p - use_template: top_p - default: 0.7 - help: - zh_Hans: 用温度取样的另一种方法,称为核取样取值范围是:(0.0, 1.0) 开区间,不能等于 0 或 1,默认值为 0.7 模型考虑具有 top_p 概率质量tokens的结果例如:0.1 意味着模型解码器只考虑从前 10% 的概率的候选集中取 tokens 建议您根据应用场景调整 top_p 或 temperature 参数,但不要同时调整两个参数。 - en_US: Another method of temperature sampling is called kernel sampling. The value range is (0.0, 1.0) open interval, which cannot be equal to 0 or 1. The default value is 0.7. The model considers the results with top_p probability mass tokens. For example 0.1 means The model decoder only considers tokens from the candidate set with the top 10% probability. It is recommended that you adjust the top_p or temperature parameters according to the application scenario, but do not adjust both parameters at the same time. - - name: do_sample - label: - zh_Hans: 采样策略 - en_US: Sampling strategy - type: boolean - help: - zh_Hans: do_sample 为 true 时启用采样策略,do_sample 为 false 时采样策略 temperature、top_p 将不生效。默认值为 true。 - en_US: When `do_sample` is set to true, the sampling strategy is enabled. When `do_sample` is set to false, the sampling strategies such as `temperature` and `top_p` will not take effect. The default value is true. - default: true - - name: stream - label: - zh_Hans: 流处理 - en_US: Event Stream - type: boolean - help: - zh_Hans: 使用同步调用时,此参数应当设置为 fasle 或者省略。表示模型生成完所有内容后一次性返回所有内容。默认值为 false。如果设置为 true,模型将通过标准 Event Stream ,逐块返回模型生成内容。Event Stream 结束时会返回一条data:[DONE]消息。注意:在模型流式输出生成内容的过程中,我们会分批对模型生成内容进行检测,当检测到违法及不良信息时,API会返回错误码(1301)。开发者识别到错误码(1301),应及时采取(清屏、重启对话)等措施删除生成内容,并确保不将含有违法及不良信息的内容传递给模型继续生成,避免其造成负面影响。 - en_US: When using synchronous invocation, this parameter should be set to false or omitted. It indicates that the model will return all the generated content at once after the generation is complete. The default value is false. If set to true, the model will return the generated content in chunks via the standard Event Stream. A data:[DONE] message will be sent at the end of the Event Stream.Note:During the model's streaming output process, we will batch check the generated content. If illegal or harmful information is detected, the API will return an error code (1301). Developers who identify error code (1301) should promptly take actions such as clearing the screen or restarting the conversation to delete the generated content. They should also ensure that no illegal or harmful content is passed back to the model for continued generation to avoid negative impacts. - default: false - - name: max_tokens - use_template: max_tokens - default: 1024 - min: 1 - max: 8192 - - name: web_search - type: boolean - label: - zh_Hans: 联网搜索 - en_US: Web Search - default: false - help: - zh_Hans: 模型内置了互联网搜索服务,该参数控制模型在生成文本时是否参考使用互联网搜索结果。启用互联网搜索,模型会将搜索结果作为文本生成过程中的参考信息,但模型会基于其内部逻辑“自行判断”是否使用互联网搜索结果。 - en_US: The model has a built-in Internet search service. This parameter controls whether the model refers to Internet search results when generating text. When Internet search is enabled, the model will use the search results as reference information in the text generation process, but the model will "judge" whether to use Internet search results based on its internal logic. -pricing: - input: '0.001' - output: '0.001' - unit: '0.001' - currency: RMB diff --git a/api/core/model_runtime/model_providers/zhipuai/llm/glm_4.yaml b/api/core/model_runtime/model_providers/zhipuai/llm/glm_4.yaml deleted file mode 100644 index 62f453fb775b9b..00000000000000 --- a/api/core/model_runtime/model_providers/zhipuai/llm/glm_4.yaml +++ /dev/null @@ -1,62 +0,0 @@ -model: glm-4 -label: - en_US: glm-4 -model_type: llm -features: - - multi-tool-call - - agent-thought - - stream-tool-call -model_properties: - mode: chat -parameter_rules: - - name: temperature - use_template: temperature - default: 0.95 - min: 0.0 - max: 1.0 - help: - zh_Hans: 采样温度,控制输出的随机性,必须为正数取值范围是:(0.0,1.0],不能等于 0,默认值为 0.95 值越大,会使输出更随机,更具创造性;值越小,输出会更加稳定或确定建议您根据应用场景调整 top_p 或 temperature 参数,但不要同时调整两个参数。 - en_US: Sampling temperature, controls the randomness of the output, must be a positive number. The value range is (0.0,1.0], which cannot be equal to 0. The default value is 0.95. The larger the value, the more random and creative the output will be; the smaller the value, The output will be more stable or certain. It is recommended that you adjust the top_p or temperature parameters according to the application scenario, but do not adjust both parameters at the same time. - - name: top_p - use_template: top_p - default: 0.7 - help: - zh_Hans: 用温度取样的另一种方法,称为核取样取值范围是:(0.0, 1.0) 开区间,不能等于 0 或 1,默认值为 0.7 模型考虑具有 top_p 概率质量tokens的结果例如:0.1 意味着模型解码器只考虑从前 10% 的概率的候选集中取 tokens 建议您根据应用场景调整 top_p 或 temperature 参数,但不要同时调整两个参数。 - en_US: Another method of temperature sampling is called kernel sampling. The value range is (0.0, 1.0) open interval, which cannot be equal to 0 or 1. The default value is 0.7. The model considers the results with top_p probability mass tokens. For example 0.1 means The model decoder only considers tokens from the candidate set with the top 10% probability. It is recommended that you adjust the top_p or temperature parameters according to the application scenario, but do not adjust both parameters at the same time. - - name: do_sample - label: - zh_Hans: 采样策略 - en_US: Sampling strategy - type: boolean - help: - zh_Hans: do_sample 为 true 时启用采样策略,do_sample 为 false 时采样策略 temperature、top_p 将不生效。默认值为 true。 - en_US: When `do_sample` is set to true, the sampling strategy is enabled. When `do_sample` is set to false, the sampling strategies such as `temperature` and `top_p` will not take effect. The default value is true. - default: true - - name: stream - label: - zh_Hans: 流处理 - en_US: Event Stream - type: boolean - help: - zh_Hans: 使用同步调用时,此参数应当设置为 fasle 或者省略。表示模型生成完所有内容后一次性返回所有内容。默认值为 false。如果设置为 true,模型将通过标准 Event Stream ,逐块返回模型生成内容。Event Stream 结束时会返回一条data:[DONE]消息。注意:在模型流式输出生成内容的过程中,我们会分批对模型生成内容进行检测,当检测到违法及不良信息时,API会返回错误码(1301)。开发者识别到错误码(1301),应及时采取(清屏、重启对话)等措施删除生成内容,并确保不将含有违法及不良信息的内容传递给模型继续生成,避免其造成负面影响。 - en_US: When using synchronous invocation, this parameter should be set to false or omitted. It indicates that the model will return all the generated content at once after the generation is complete. The default value is false. If set to true, the model will return the generated content in chunks via the standard Event Stream. A data:[DONE] message will be sent at the end of the Event Stream.Note:During the model's streaming output process, we will batch check the generated content. If illegal or harmful information is detected, the API will return an error code (1301). Developers who identify error code (1301) should promptly take actions such as clearing the screen or restarting the conversation to delete the generated content. They should also ensure that no illegal or harmful content is passed back to the model for continued generation to avoid negative impacts. - default: false - - name: max_tokens - use_template: max_tokens - default: 1024 - min: 1 - max: 4095 - - name: web_search - type: boolean - label: - zh_Hans: 联网搜索 - en_US: Web Search - default: false - help: - zh_Hans: 模型内置了互联网搜索服务,该参数控制模型在生成文本时是否参考使用互联网搜索结果。启用互联网搜索,模型会将搜索结果作为文本生成过程中的参考信息,但模型会基于其内部逻辑“自行判断”是否使用互联网搜索结果。 - en_US: The model has a built-in Internet search service. This parameter controls whether the model refers to Internet search results when generating text. When Internet search is enabled, the model will use the search results as reference information in the text generation process, but the model will "judge" whether to use Internet search results based on its internal logic. -pricing: - input: '0.1' - output: '0.1' - unit: '0.001' - currency: RMB diff --git a/api/core/model_runtime/model_providers/zhipuai/llm/glm_4_long.yaml b/api/core/model_runtime/model_providers/zhipuai/llm/glm_4_long.yaml deleted file mode 100644 index 350b080c3fc11b..00000000000000 --- a/api/core/model_runtime/model_providers/zhipuai/llm/glm_4_long.yaml +++ /dev/null @@ -1,65 +0,0 @@ -model: glm-4-long -label: - en_US: glm-4-long -model_type: llm -features: - - multi-tool-call - - agent-thought - - stream-tool-call -model_properties: - mode: chat - context_size: 10240 -parameter_rules: - - name: temperature - use_template: temperature - default: 0.95 - min: 0.0 - max: 1.0 - help: - zh_Hans: 采样温度,控制输出的随机性,必须为正数取值范围是:(0.0,1.0],不能等于 0,默认值为 0.95 值越大,会使输出更随机,更具创造性;值越小,输出会更加稳定或确定建议您根据应用场景调整 top_p 或 temperature 参数,但不要同时调整两个参数。 - en_US: Sampling temperature, controls the randomness of the output, must be a positive number. The value range is (0.0,1.0], which cannot be equal to 0. The default value is 0.95. The larger the value, the more random and creative the output will be; the smaller the value, The output will be more stable or certain. It is recommended that you adjust the top_p or temperature parameters according to the application scenario, but do not adjust both parameters at the same time. - - name: top_p - use_template: top_p - default: 0.7 - min: 0.0 - max: 1.0 - help: - zh_Hans: 用温度取样的另一种方法,称为核取样取值范围是:(0.0, 1.0) 开区间,不能等于 0 或 1,默认值为 0.7 模型考虑具有 top_p 概率质量tokens的结果例如:0.1 意味着模型解码器只考虑从前 10% 的概率的候选集中取 tokens 建议您根据应用场景调整 top_p 或 temperature 参数,但不要同时调整两个参数。 - en_US: Another method of temperature sampling is called kernel sampling. The value range is (0.0, 1.0) open interval, which cannot be equal to 0 or 1. The default value is 0.7. The model considers the results with top_p probability mass tokens. For example 0.1 means The model decoder only considers tokens from the candidate set with the top 10% probability. It is recommended that you adjust the top_p or temperature parameters according to the application scenario, but do not adjust both parameters at the same time. - - name: do_sample - label: - zh_Hans: 采样策略 - en_US: Sampling strategy - type: boolean - help: - zh_Hans: do_sample 为 true 时启用采样策略,do_sample 为 false 时采样策略 temperature、top_p 将不生效。默认值为 true。 - en_US: When `do_sample` is set to true, the sampling strategy is enabled. When `do_sample` is set to false, the sampling strategies such as `temperature` and `top_p` will not take effect. The default value is true. - default: true - - name: stream - label: - zh_Hans: 流处理 - en_US: Event Stream - type: boolean - help: - zh_Hans: 使用同步调用时,此参数应当设置为 fasle 或者省略。表示模型生成完所有内容后一次性返回所有内容。默认值为 false。如果设置为 true,模型将通过标准 Event Stream ,逐块返回模型生成内容。Event Stream 结束时会返回一条data:[DONE]消息。注意:在模型流式输出生成内容的过程中,我们会分批对模型生成内容进行检测,当检测到违法及不良信息时,API会返回错误码(1301)。开发者识别到错误码(1301),应及时采取(清屏、重启对话)等措施删除生成内容,并确保不将含有违法及不良信息的内容传递给模型继续生成,避免其造成负面影响。 - en_US: When using synchronous invocation, this parameter should be set to false or omitted. It indicates that the model will return all the generated content at once after the generation is complete. The default value is false. If set to true, the model will return the generated content in chunks via the standard Event Stream. A data:[DONE] message will be sent at the end of the Event Stream.Note:During the model's streaming output process, we will batch check the generated content. If illegal or harmful information is detected, the API will return an error code (1301). Developers who identify error code (1301) should promptly take actions such as clearing the screen or restarting the conversation to delete the generated content. They should also ensure that no illegal or harmful content is passed back to the model for continued generation to avoid negative impacts. - default: false - - name: max_tokens - use_template: max_tokens - default: 1024 - min: 1 - max: 4095 - - name: web_search - type: boolean - label: - zh_Hans: 联网搜索 - en_US: Web Search - default: false - help: - zh_Hans: 模型内置了互联网搜索服务,该参数控制模型在生成文本时是否参考使用互联网搜索结果。启用互联网搜索,模型会将搜索结果作为文本生成过程中的参考信息,但模型会基于其内部逻辑“自行判断”是否使用互联网搜索结果。 - en_US: The model has a built-in Internet search service. This parameter controls whether the model refers to Internet search results when generating text. When Internet search is enabled, the model will use the search results as reference information in the text generation process, but the model will "judge" whether to use Internet search results based on its internal logic. -pricing: - input: '0.001' - output: '0.001' - unit: '0.001' - currency: RMB diff --git a/api/core/model_runtime/model_providers/zhipuai/llm/glm_4_plus.yaml b/api/core/model_runtime/model_providers/zhipuai/llm/glm_4_plus.yaml deleted file mode 100644 index 2d7ebd71cf26e1..00000000000000 --- a/api/core/model_runtime/model_providers/zhipuai/llm/glm_4_plus.yaml +++ /dev/null @@ -1,62 +0,0 @@ -model: glm-4-plus -label: - en_US: glm-4-plus -model_type: llm -features: - - multi-tool-call - - agent-thought - - stream-tool-call -model_properties: - mode: chat -parameter_rules: - - name: temperature - use_template: temperature - default: 0.95 - min: 0.0 - max: 1.0 - help: - zh_Hans: 采样温度,控制输出的随机性,必须为正数取值范围是:(0.0,1.0],不能等于 0,默认值为 0.95 值越大,会使输出更随机,更具创造性;值越小,输出会更加稳定或确定建议您根据应用场景调整 top_p 或 temperature 参数,但不要同时调整两个参数。 - en_US: Sampling temperature, controls the randomness of the output, must be a positive number. The value range is (0.0,1.0], which cannot be equal to 0. The default value is 0.95. The larger the value, the more random and creative the output will be; the smaller the value, The output will be more stable or certain. It is recommended that you adjust the top_p or temperature parameters according to the application scenario, but do not adjust both parameters at the same time. - - name: top_p - use_template: top_p - default: 0.7 - help: - zh_Hans: 用温度取样的另一种方法,称为核取样取值范围是:(0.0, 1.0) 开区间,不能等于 0 或 1,默认值为 0.7 模型考虑具有 top_p 概率质量tokens的结果例如:0.1 意味着模型解码器只考虑从前 10% 的概率的候选集中取 tokens 建议您根据应用场景调整 top_p 或 temperature 参数,但不要同时调整两个参数。 - en_US: Another method of temperature sampling is called kernel sampling. The value range is (0.0, 1.0) open interval, which cannot be equal to 0 or 1. The default value is 0.7. The model considers the results with top_p probability mass tokens. For example 0.1 means The model decoder only considers tokens from the candidate set with the top 10% probability. It is recommended that you adjust the top_p or temperature parameters according to the application scenario, but do not adjust both parameters at the same time. - - name: do_sample - label: - zh_Hans: 采样策略 - en_US: Sampling strategy - type: boolean - help: - zh_Hans: do_sample 为 true 时启用采样策略,do_sample 为 false 时采样策略 temperature、top_p 将不生效。默认值为 true。 - en_US: When `do_sample` is set to true, the sampling strategy is enabled. When `do_sample` is set to false, the sampling strategies such as `temperature` and `top_p` will not take effect. The default value is true. - default: true - - name: stream - label: - zh_Hans: 流处理 - en_US: Event Stream - type: boolean - help: - zh_Hans: 使用同步调用时,此参数应当设置为 fasle 或者省略。表示模型生成完所有内容后一次性返回所有内容。默认值为 false。如果设置为 true,模型将通过标准 Event Stream ,逐块返回模型生成内容。Event Stream 结束时会返回一条data:[DONE]消息。注意:在模型流式输出生成内容的过程中,我们会分批对模型生成内容进行检测,当检测到违法及不良信息时,API会返回错误码(1301)。开发者识别到错误码(1301),应及时采取(清屏、重启对话)等措施删除生成内容,并确保不将含有违法及不良信息的内容传递给模型继续生成,避免其造成负面影响。 - en_US: When using synchronous invocation, this parameter should be set to false or omitted. It indicates that the model will return all the generated content at once after the generation is complete. The default value is false. If set to true, the model will return the generated content in chunks via the standard Event Stream. A data:[DONE] message will be sent at the end of the Event Stream.Note:During the model's streaming output process, we will batch check the generated content. If illegal or harmful information is detected, the API will return an error code (1301). Developers who identify error code (1301) should promptly take actions such as clearing the screen or restarting the conversation to delete the generated content. They should also ensure that no illegal or harmful content is passed back to the model for continued generation to avoid negative impacts. - default: false - - name: max_tokens - use_template: max_tokens - default: 1024 - min: 1 - max: 4095 - - name: web_search - type: boolean - label: - zh_Hans: 联网搜索 - en_US: Web Search - default: false - help: - zh_Hans: 模型内置了互联网搜索服务,该参数控制模型在生成文本时是否参考使用互联网搜索结果。启用互联网搜索,模型会将搜索结果作为文本生成过程中的参考信息,但模型会基于其内部逻辑“自行判断”是否使用互联网搜索结果。 - en_US: The model has a built-in Internet search service. This parameter controls whether the model refers to Internet search results when generating text. When Internet search is enabled, the model will use the search results as reference information in the text generation process, but the model will "judge" whether to use Internet search results based on its internal logic. -pricing: - input: '0.05' - output: '0.05' - unit: '0.001' - currency: RMB diff --git a/api/core/model_runtime/model_providers/zhipuai/llm/glm_4v.yaml b/api/core/model_runtime/model_providers/zhipuai/llm/glm_4v.yaml deleted file mode 100644 index 3a1120ff375c19..00000000000000 --- a/api/core/model_runtime/model_providers/zhipuai/llm/glm_4v.yaml +++ /dev/null @@ -1,60 +0,0 @@ -model: glm-4v -label: - en_US: glm-4v -model_type: llm -model_properties: - mode: chat -features: - - vision -parameter_rules: - - name: temperature - use_template: temperature - default: 0.95 - min: 0.0 - max: 1.0 - help: - zh_Hans: 采样温度,控制输出的随机性,必须为正数取值范围是:(0.0,1.0],不能等于 0,默认值为 0.95 值越大,会使输出更随机,更具创造性;值越小,输出会更加稳定或确定建议您根据应用场景调整 top_p 或 temperature 参数,但不要同时调整两个参数。 - en_US: Sampling temperature, controls the randomness of the output, must be a positive number. The value range is (0.0,1.0], which cannot be equal to 0. The default value is 0.95. The larger the value, the more random and creative the output will be; the smaller the value, The output will be more stable or certain. It is recommended that you adjust the top_p or temperature parameters according to the application scenario, but do not adjust both parameters at the same time. - - name: top_p - use_template: top_p - default: 0.6 - help: - zh_Hans: 用温度取样的另一种方法,称为核取样取值范围是:(0.0, 1.0) 开区间,不能等于 0 或 1,默认值为 0.7 模型考虑具有 top_p 概率质量tokens的结果例如:0.1 意味着模型解码器只考虑从前 10% 的概率的候选集中取 tokens 建议您根据应用场景调整 top_p 或 temperature 参数,但不要同时调整两个参数。 - en_US: Another method of temperature sampling is called kernel sampling. The value range is (0.0, 1.0) open interval, which cannot be equal to 0 or 1. The default value is 0.7. The model considers the results with top_p probability mass tokens. For example 0.1 means The model decoder only considers tokens from the candidate set with the top 10% probability. It is recommended that you adjust the top_p or temperature parameters according to the application scenario, but do not adjust both parameters at the same time. - - name: do_sample - label: - zh_Hans: 采样策略 - en_US: Sampling strategy - type: boolean - help: - zh_Hans: do_sample 为 true 时启用采样策略,do_sample 为 false 时采样策略 temperature、top_p 将不生效。默认值为 true。 - en_US: When `do_sample` is set to true, the sampling strategy is enabled. When `do_sample` is set to false, the sampling strategies such as `temperature` and `top_p` will not take effect. The default value is true. - default: true - - name: stream - label: - zh_Hans: 流处理 - en_US: Event Stream - type: boolean - help: - zh_Hans: 使用同步调用时,此参数应当设置为 fasle 或者省略。表示模型生成完所有内容后一次性返回所有内容。默认值为 false。如果设置为 true,模型将通过标准 Event Stream ,逐块返回模型生成内容。Event Stream 结束时会返回一条data:[DONE]消息。注意:在模型流式输出生成内容的过程中,我们会分批对模型生成内容进行检测,当检测到违法及不良信息时,API会返回错误码(1301)。开发者识别到错误码(1301),应及时采取(清屏、重启对话)等措施删除生成内容,并确保不将含有违法及不良信息的内容传递给模型继续生成,避免其造成负面影响。 - en_US: When using synchronous invocation, this parameter should be set to false or omitted. It indicates that the model will return all the generated content at once after the generation is complete. The default value is false. If set to true, the model will return the generated content in chunks via the standard Event Stream. A data:[DONE] message will be sent at the end of the Event Stream.Note:During the model's streaming output process, we will batch check the generated content. If illegal or harmful information is detected, the API will return an error code (1301). Developers who identify error code (1301) should promptly take actions such as clearing the screen or restarting the conversation to delete the generated content. They should also ensure that no illegal or harmful content is passed back to the model for continued generation to avoid negative impacts. - default: false - - name: max_tokens - use_template: max_tokens - default: 1024 - min: 1 - max: 1024 - - name: web_search - type: boolean - label: - zh_Hans: 联网搜索 - en_US: Web Search - default: false - help: - zh_Hans: 模型内置了互联网搜索服务,该参数控制模型在生成文本时是否参考使用互联网搜索结果。启用互联网搜索,模型会将搜索结果作为文本生成过程中的参考信息,但模型会基于其内部逻辑“自行判断”是否使用互联网搜索结果。 - en_US: The model has a built-in Internet search service. This parameter controls whether the model refers to Internet search results when generating text. When Internet search is enabled, the model will use the search results as reference information in the text generation process, but the model will "judge" whether to use Internet search results based on its internal logic. -pricing: - input: '0.05' - output: '0.05' - unit: '0.001' - currency: RMB diff --git a/api/core/model_runtime/model_providers/zhipuai/llm/glm_4v_plus.yaml b/api/core/model_runtime/model_providers/zhipuai/llm/glm_4v_plus.yaml deleted file mode 100644 index 14b9623e5a8c44..00000000000000 --- a/api/core/model_runtime/model_providers/zhipuai/llm/glm_4v_plus.yaml +++ /dev/null @@ -1,60 +0,0 @@ -model: glm-4v-plus -label: - en_US: glm-4v-plus -model_type: llm -model_properties: - mode: chat -features: - - vision -parameter_rules: - - name: temperature - use_template: temperature - default: 0.95 - min: 0.0 - max: 1.0 - help: - zh_Hans: 采样温度,控制输出的随机性,必须为正数取值范围是:(0.0,1.0],不能等于 0,默认值为 0.95 值越大,会使输出更随机,更具创造性;值越小,输出会更加稳定或确定建议您根据应用场景调整 top_p 或 temperature 参数,但不要同时调整两个参数。 - en_US: Sampling temperature, controls the randomness of the output, must be a positive number. The value range is (0.0,1.0], which cannot be equal to 0. The default value is 0.95. The larger the value, the more random and creative the output will be; the smaller the value, The output will be more stable or certain. It is recommended that you adjust the top_p or temperature parameters according to the application scenario, but do not adjust both parameters at the same time. - - name: top_p - use_template: top_p - default: 0.6 - help: - zh_Hans: 用温度取样的另一种方法,称为核取样取值范围是:(0.0, 1.0) 开区间,不能等于 0 或 1,默认值为 0.7 模型考虑具有 top_p 概率质量tokens的结果例如:0.1 意味着模型解码器只考虑从前 10% 的概率的候选集中取 tokens 建议您根据应用场景调整 top_p 或 temperature 参数,但不要同时调整两个参数。 - en_US: Another method of temperature sampling is called kernel sampling. The value range is (0.0, 1.0) open interval, which cannot be equal to 0 or 1. The default value is 0.7. The model considers the results with top_p probability mass tokens. For example 0.1 means The model decoder only considers tokens from the candidate set with the top 10% probability. It is recommended that you adjust the top_p or temperature parameters according to the application scenario, but do not adjust both parameters at the same time. - - name: do_sample - label: - zh_Hans: 采样策略 - en_US: Sampling strategy - type: boolean - help: - zh_Hans: do_sample 为 true 时启用采样策略,do_sample 为 false 时采样策略 temperature、top_p 将不生效。默认值为 true。 - en_US: When `do_sample` is set to true, the sampling strategy is enabled. When `do_sample` is set to false, the sampling strategies such as `temperature` and `top_p` will not take effect. The default value is true. - default: true - - name: stream - label: - zh_Hans: 流处理 - en_US: Event Stream - type: boolean - help: - zh_Hans: 使用同步调用时,此参数应当设置为 fasle 或者省略。表示模型生成完所有内容后一次性返回所有内容。默认值为 false。如果设置为 true,模型将通过标准 Event Stream ,逐块返回模型生成内容。Event Stream 结束时会返回一条data:[DONE]消息。注意:在模型流式输出生成内容的过程中,我们会分批对模型生成内容进行检测,当检测到违法及不良信息时,API会返回错误码(1301)。开发者识别到错误码(1301),应及时采取(清屏、重启对话)等措施删除生成内容,并确保不将含有违法及不良信息的内容传递给模型继续生成,避免其造成负面影响。 - en_US: When using synchronous invocation, this parameter should be set to false or omitted. It indicates that the model will return all the generated content at once after the generation is complete. The default value is false. If set to true, the model will return the generated content in chunks via the standard Event Stream. A data:[DONE] message will be sent at the end of the Event Stream.Note:During the model's streaming output process, we will batch check the generated content. If illegal or harmful information is detected, the API will return an error code (1301). Developers who identify error code (1301) should promptly take actions such as clearing the screen or restarting the conversation to delete the generated content. They should also ensure that no illegal or harmful content is passed back to the model for continued generation to avoid negative impacts. - default: false - - name: max_tokens - use_template: max_tokens - default: 1024 - min: 1 - max: 1024 - - name: web_search - type: boolean - label: - zh_Hans: 联网搜索 - en_US: Web Search - default: false - help: - zh_Hans: 模型内置了互联网搜索服务,该参数控制模型在生成文本时是否参考使用互联网搜索结果。启用互联网搜索,模型会将搜索结果作为文本生成过程中的参考信息,但模型会基于其内部逻辑“自行判断”是否使用互联网搜索结果。 - en_US: The model has a built-in Internet search service. This parameter controls whether the model refers to Internet search results when generating text. When Internet search is enabled, the model will use the search results as reference information in the text generation process, but the model will "judge" whether to use Internet search results based on its internal logic. -pricing: - input: '0.01' - output: '0.01' - unit: '0.001' - currency: RMB diff --git a/api/core/model_runtime/model_providers/zhipuai/llm/llm.py b/api/core/model_runtime/model_providers/zhipuai/llm/llm.py deleted file mode 100644 index ea331701abb78a..00000000000000 --- a/api/core/model_runtime/model_providers/zhipuai/llm/llm.py +++ /dev/null @@ -1,486 +0,0 @@ -from collections.abc import Generator -from typing import Optional, Union - -from core.model_runtime.entities.llm_entities import LLMResult, LLMResultChunk, LLMResultChunkDelta -from core.model_runtime.entities.message_entities import ( - AssistantPromptMessage, - PromptMessage, - PromptMessageContent, - PromptMessageContentType, - PromptMessageRole, - PromptMessageTool, - SystemPromptMessage, - ToolPromptMessage, - UserPromptMessage, -) -from core.model_runtime.errors.validate import CredentialsValidateFailedError -from core.model_runtime.model_providers.__base.large_language_model import LargeLanguageModel -from core.model_runtime.model_providers.zhipuai._common import _CommonZhipuaiAI -from core.model_runtime.model_providers.zhipuai.zhipuai_sdk._client import ZhipuAI -from core.model_runtime.model_providers.zhipuai.zhipuai_sdk.types.chat.chat_completion import Completion -from core.model_runtime.model_providers.zhipuai.zhipuai_sdk.types.chat.chat_completion_chunk import ChatCompletionChunk -from core.model_runtime.utils import helper - -GLM_JSON_MODE_PROMPT = """You should always follow the instructions and output a valid JSON object. -The structure of the JSON object you can found in the instructions, use {"answer": "$your_answer"} as the default structure -if you are not sure about the structure. - -And you should always end the block with a "```" to indicate the end of the JSON object. - - -{{instructions}} - - -```JSON""" # noqa: E501 - - -class ZhipuAILargeLanguageModel(_CommonZhipuaiAI, LargeLanguageModel): - def _invoke( - self, - model: str, - credentials: dict, - prompt_messages: list[PromptMessage], - model_parameters: dict, - tools: Optional[list[PromptMessageTool]] = None, - stop: Optional[list[str]] = None, - stream: bool = True, - user: Optional[str] = None, - ) -> Union[LLMResult, Generator]: - """ - Invoke large language model - - :param model: model name - :param credentials: model credentials - :param prompt_messages: prompt messages - :param model_parameters: model parameters - :param tools: tools for tool calling - :param stop: stop words - :param stream: is stream response - :param user: unique user id - :return: full response or stream response chunk generator result - """ - # transform credentials to kwargs for model instance - credentials_kwargs = self._to_credential_kwargs(credentials) - - # invoke model - # stop = stop or [] - # self._transform_json_prompts(model, credentials, prompt_messages, model_parameters, tools, stop, stream, user) - return self._generate(model, credentials_kwargs, prompt_messages, model_parameters, tools, stop, stream, user) - - # def _transform_json_prompts(self, model: str, credentials: dict, - # prompt_messages: list[PromptMessage], model_parameters: dict, - # tools: list[PromptMessageTool] | None = None, stop: list[str] | None = None, - # stream: bool = True, user: str | None = None) \ - # -> None: - # """ - # Transform json prompts to model prompts - # """ - # if "}\n\n" not in stop: - # stop.append("}\n\n") - - # # check if there is a system message - # if len(prompt_messages) > 0 and isinstance(prompt_messages[0], SystemPromptMessage): - # # override the system message - # prompt_messages[0] = SystemPromptMessage( - # content=GLM_JSON_MODE_PROMPT.replace("{{instructions}}", prompt_messages[0].content) - # ) - # else: - # # insert the system message - # prompt_messages.insert(0, SystemPromptMessage( - # content=GLM_JSON_MODE_PROMPT.replace("{{instructions}}", "Please output a valid JSON object.") - # )) - # # check if the last message is a user message - # if len(prompt_messages) > 0 and isinstance(prompt_messages[-1], UserPromptMessage): - # # add ```JSON\n to the last message - # prompt_messages[-1].content += "\n```JSON\n" - # else: - # # append a user message - # prompt_messages.append(UserPromptMessage( - # content="```JSON\n" - # )) - - def get_num_tokens( - self, - model: str, - credentials: dict, - prompt_messages: list[PromptMessage], - tools: Optional[list[PromptMessageTool]] = None, - ) -> int: - """ - Get number of tokens for given prompt messages - - :param model: model name - :param credentials: model credentials - :param prompt_messages: prompt messages - :param tools: tools for tool calling - :return: - """ - prompt = self._convert_messages_to_prompt(prompt_messages, tools) - - return self._get_num_tokens_by_gpt2(prompt) - - def validate_credentials(self, model: str, credentials: dict) -> None: - """ - Validate model credentials - - :param model: model name - :param credentials: model credentials - :return: - """ - try: - # transform credentials to kwargs for model instance - credentials_kwargs = self._to_credential_kwargs(credentials) - self._generate( - model=model, - credentials_kwargs=credentials_kwargs, - prompt_messages=[ - UserPromptMessage(content="ping"), - ], - model_parameters={ - "temperature": 0.5, - }, - tools=[], - stream=False, - ) - except Exception as ex: - raise CredentialsValidateFailedError(str(ex)) - - def _generate( - self, - model: str, - credentials_kwargs: dict, - prompt_messages: list[PromptMessage], - model_parameters: dict, - tools: Optional[list[PromptMessageTool]] = None, - stop: Optional[list[str]] = None, - stream: bool = True, - user: Optional[str] = None, - ) -> Union[LLMResult, Generator]: - """ - Invoke large language model - - :param model: model name - :param credentials_kwargs: credentials kwargs - :param prompt_messages: prompt messages - :param model_parameters: model parameters - :param stop: stop words - :param stream: is stream response - :param user: unique user id - :return: full response or stream response chunk generator result - """ - extra_model_kwargs = {} - # request to glm-4v-plus with stop words will always response "finish_reason":"network_error" - if stop and model != "glm-4v-plus": - extra_model_kwargs["stop"] = stop - - client = ZhipuAI(api_key=credentials_kwargs["api_key"]) - - if len(prompt_messages) == 0: - raise ValueError("At least one message is required") - - if prompt_messages[0].role == PromptMessageRole.SYSTEM: - if not prompt_messages[0].content: - prompt_messages = prompt_messages[1:] - - # resolve zhipuai model not support system message and user message, assistant message must be in sequence - new_prompt_messages: list[PromptMessage] = [] - for prompt_message in prompt_messages: - copy_prompt_message = prompt_message.copy() - if copy_prompt_message.role in {PromptMessageRole.USER, PromptMessageRole.SYSTEM, PromptMessageRole.TOOL}: - if isinstance(copy_prompt_message.content, list): - # check if model is 'glm-4v' - if model not in {"glm-4v", "glm-4v-plus"}: - # not support list message - continue - # get image and - if not isinstance(copy_prompt_message, UserPromptMessage): - # not support system message - continue - new_prompt_messages.append(copy_prompt_message) - - if not isinstance(copy_prompt_message.content, str): - # not support image message - continue - - if ( - new_prompt_messages - and new_prompt_messages[-1].role == PromptMessageRole.USER - and copy_prompt_message.role == PromptMessageRole.USER - ): - new_prompt_messages[-1].content += "\n\n" + copy_prompt_message.content - else: - if copy_prompt_message.role in {PromptMessageRole.USER, PromptMessageRole.TOOL}: - new_prompt_messages.append(copy_prompt_message) - elif copy_prompt_message.role == PromptMessageRole.SYSTEM: - new_prompt_message = SystemPromptMessage(content=copy_prompt_message.content) - new_prompt_messages.append(new_prompt_message) - else: - new_prompt_message = UserPromptMessage(content=copy_prompt_message.content) - new_prompt_messages.append(new_prompt_message) - else: - if new_prompt_messages and new_prompt_messages[-1].role == PromptMessageRole.ASSISTANT: - new_prompt_messages[-1].content += "\n\n" + copy_prompt_message.content - else: - new_prompt_messages.append(copy_prompt_message) - - if model in {"glm-4v", "glm-4v-plus"}: - params = self._construct_glm_4v_parameter(model, new_prompt_messages, model_parameters) - else: - params = {"model": model, "messages": [], **model_parameters} - # glm model - if not model.startswith("chatglm"): - for prompt_message in new_prompt_messages: - if prompt_message.role == PromptMessageRole.TOOL: - params["messages"].append( - { - "role": "tool", - "content": prompt_message.content, - "tool_call_id": prompt_message.tool_call_id, - } - ) - elif isinstance(prompt_message, AssistantPromptMessage): - if prompt_message.tool_calls: - params["messages"].append( - { - "role": "assistant", - "content": prompt_message.content, - "tool_calls": [ - { - "id": tool_call.id, - "type": tool_call.type, - "function": { - "name": tool_call.function.name, - "arguments": tool_call.function.arguments, - }, - } - for tool_call in prompt_message.tool_calls - ], - } - ) - else: - params["messages"].append({"role": "assistant", "content": prompt_message.content}) - else: - params["messages"].append( - {"role": prompt_message.role.value, "content": prompt_message.content} - ) - else: - # chatglm model - for prompt_message in new_prompt_messages: - # merge system message to user message - if prompt_message.role in { - PromptMessageRole.SYSTEM, - PromptMessageRole.TOOL, - PromptMessageRole.USER, - }: - if len(params["messages"]) > 0 and params["messages"][-1]["role"] == "user": - params["messages"][-1]["content"] += "\n\n" + prompt_message.content - else: - params["messages"].append({"role": "user", "content": prompt_message.content}) - else: - params["messages"].append( - {"role": prompt_message.role.value, "content": prompt_message.content} - ) - - if tools and len(tools) > 0: - params["tools"] = [{"type": "function", "function": helper.dump_model(tool)} for tool in tools] - - if stream: - response = client.chat.completions.create(stream=stream, **params, **extra_model_kwargs) - return self._handle_generate_stream_response(model, credentials_kwargs, tools, response, prompt_messages) - - response = client.chat.completions.create(**params, **extra_model_kwargs) - return self._handle_generate_response(model, credentials_kwargs, tools, response, prompt_messages) - - def _construct_glm_4v_parameter(self, model: str, prompt_messages: list[PromptMessage], model_parameters: dict): - messages = [ - {"role": message.role.value, "content": self._construct_glm_4v_messages(message.content)} - for message in prompt_messages - ] - - params = {"model": model, "messages": messages, **model_parameters} - - return params - - def _construct_glm_4v_messages(self, prompt_message: Union[str, list[PromptMessageContent]]) -> list[dict]: - if isinstance(prompt_message, str): - return [{"type": "text", "text": prompt_message}] - - return [ - {"type": "image_url", "image_url": {"url": self._remove_image_header(item.data)}} - if item.type == PromptMessageContentType.IMAGE - else {"type": "text", "text": item.data} - for item in prompt_message - ] - - def _remove_image_header(self, image: str) -> str: - if image.startswith("data:image"): - return image.split(",")[1] - - return image - - def _handle_generate_response( - self, - model: str, - credentials: dict, - tools: Optional[list[PromptMessageTool]], - response: Completion, - prompt_messages: list[PromptMessage], - ) -> LLMResult: - """ - Handle llm response - - :param model: model name - :param response: response - :param prompt_messages: prompt messages - :return: llm response - """ - text = "" - assistant_tool_calls: list[AssistantPromptMessage.ToolCall] = [] - for choice in response.choices: - if choice.message.tool_calls: - for tool_call in choice.message.tool_calls: - if tool_call.type == "function": - assistant_tool_calls.append( - AssistantPromptMessage.ToolCall( - id=tool_call.id, - type=tool_call.type, - function=AssistantPromptMessage.ToolCall.ToolCallFunction( - name=tool_call.function.name, - arguments=tool_call.function.arguments, - ), - ) - ) - - text += choice.message.content or "" - - prompt_usage = response.usage.prompt_tokens - completion_usage = response.usage.completion_tokens - - # transform usage - usage = self._calc_response_usage(model, credentials, prompt_usage, completion_usage) - - # transform response - result = LLMResult( - model=model, - prompt_messages=prompt_messages, - message=AssistantPromptMessage(content=text, tool_calls=assistant_tool_calls), - usage=usage, - ) - - return result - - def _handle_generate_stream_response( - self, - model: str, - credentials: dict, - tools: Optional[list[PromptMessageTool]], - responses: Generator[ChatCompletionChunk, None, None], - prompt_messages: list[PromptMessage], - ) -> Generator: - """ - Handle llm stream response - - :param model: model name - :param response: response - :param prompt_messages: prompt messages - :return: llm response chunk generator result - """ - full_assistant_content = "" - for chunk in responses: - if len(chunk.choices) == 0: - continue - - delta = chunk.choices[0] - - if delta.finish_reason is None and (delta.delta.content is None or delta.delta.content == ""): - continue - - assistant_tool_calls: list[AssistantPromptMessage.ToolCall] = [] - for tool_call in delta.delta.tool_calls or []: - if tool_call.type == "function": - assistant_tool_calls.append( - AssistantPromptMessage.ToolCall( - id=tool_call.id, - type=tool_call.type, - function=AssistantPromptMessage.ToolCall.ToolCallFunction( - name=tool_call.function.name, - arguments=tool_call.function.arguments, - ), - ) - ) - - # transform assistant message to prompt message - assistant_prompt_message = AssistantPromptMessage( - content=delta.delta.content or "", tool_calls=assistant_tool_calls - ) - - full_assistant_content += delta.delta.content or "" - - if delta.finish_reason is not None and chunk.usage is not None: - completion_tokens = chunk.usage.completion_tokens - prompt_tokens = chunk.usage.prompt_tokens - - # transform usage - usage = self._calc_response_usage(model, credentials, prompt_tokens, completion_tokens) - - yield LLMResultChunk( - model=chunk.model, - prompt_messages=prompt_messages, - system_fingerprint="", - delta=LLMResultChunkDelta( - index=delta.index, - message=assistant_prompt_message, - finish_reason=delta.finish_reason, - usage=usage, - ), - ) - else: - yield LLMResultChunk( - model=chunk.model, - prompt_messages=prompt_messages, - system_fingerprint="", - delta=LLMResultChunkDelta( - index=delta.index, message=assistant_prompt_message, finish_reason=delta.finish_reason - ), - ) - - def _convert_one_message_to_text(self, message: PromptMessage) -> str: - """ - Convert a single message to a string. - - :param message: PromptMessage to convert. - :return: String representation of the message. - """ - human_prompt = "\n\nHuman:" - ai_prompt = "\n\nAssistant:" - content = message.content - - if isinstance(message, UserPromptMessage): - message_text = f"{human_prompt} {content}" - elif isinstance(message, AssistantPromptMessage): - message_text = f"{ai_prompt} {content}" - elif isinstance(message, SystemPromptMessage | ToolPromptMessage): - message_text = content - else: - raise ValueError(f"Got unknown type {message}") - - return message_text - - def _convert_messages_to_prompt( - self, messages: list[PromptMessage], tools: Optional[list[PromptMessageTool]] = None - ) -> str: - """ - :param messages: List of PromptMessage to combine. - :return: Combined string with necessary human_prompt and ai_prompt tags. - """ - messages = messages.copy() # don't mutate the original list - - text = "".join(self._convert_one_message_to_text(message) for message in messages) - - if tools and len(tools) > 0: - text += "\n\nTools:" - for tool in tools: - text += f"\n{tool.json()}" - - # trim off the trailing ' ' that might come from the "Assistant: " - return text.rstrip() diff --git a/api/core/model_runtime/model_providers/zhipuai/text_embedding/__init__.py b/api/core/model_runtime/model_providers/zhipuai/text_embedding/__init__.py deleted file mode 100644 index e69de29bb2d1d6..00000000000000 diff --git a/api/core/model_runtime/model_providers/zhipuai/text_embedding/embedding-2.yaml b/api/core/model_runtime/model_providers/zhipuai/text_embedding/embedding-2.yaml deleted file mode 100644 index f1b8b356028e25..00000000000000 --- a/api/core/model_runtime/model_providers/zhipuai/text_embedding/embedding-2.yaml +++ /dev/null @@ -1,8 +0,0 @@ -model: embedding-2 -model_type: text-embedding -model_properties: - context_size: 8192 -pricing: - input: '0.0005' - unit: '0.001' - currency: RMB diff --git a/api/core/model_runtime/model_providers/zhipuai/text_embedding/embedding-3.yaml b/api/core/model_runtime/model_providers/zhipuai/text_embedding/embedding-3.yaml deleted file mode 100644 index 5c55c911c4bdd1..00000000000000 --- a/api/core/model_runtime/model_providers/zhipuai/text_embedding/embedding-3.yaml +++ /dev/null @@ -1,8 +0,0 @@ -model: embedding-3 -model_type: text-embedding -model_properties: - context_size: 8192 -pricing: - input: '0.0005' - unit: '0.001' - currency: RMB diff --git a/api/core/model_runtime/model_providers/zhipuai/text_embedding/text_embedding.yaml b/api/core/model_runtime/model_providers/zhipuai/text_embedding/text_embedding.yaml deleted file mode 100644 index b9f5bc6397a36b..00000000000000 --- a/api/core/model_runtime/model_providers/zhipuai/text_embedding/text_embedding.yaml +++ /dev/null @@ -1,4 +0,0 @@ -model: text_embedding -model_type: text-embedding -model_properties: - context_size: 512 diff --git a/api/core/model_runtime/model_providers/zhipuai/zhipuai.py b/api/core/model_runtime/model_providers/zhipuai/zhipuai.py deleted file mode 100644 index e75aad6eb0eb53..00000000000000 --- a/api/core/model_runtime/model_providers/zhipuai/zhipuai.py +++ /dev/null @@ -1,27 +0,0 @@ -import logging - -from core.model_runtime.entities.model_entities import ModelType -from core.model_runtime.errors.validate import CredentialsValidateFailedError -from core.model_runtime.model_providers.__base.model_provider import ModelProvider - -logger = logging.getLogger(__name__) - - -class ZhipuaiProvider(ModelProvider): - def validate_provider_credentials(self, credentials: dict) -> None: - """ - Validate provider credentials - - if validate failed, raise exception - - :param credentials: provider credentials, credentials form defined in `provider_credential_schema`. - """ - try: - model_instance = self.get_model_instance(ModelType.LLM) - - model_instance.validate_credentials(model="glm-4", credentials=credentials) - except CredentialsValidateFailedError as ex: - raise ex - except Exception as ex: - logger.exception(f"{self.get_provider_schema().provider} credentials validate failed") - raise ex diff --git a/api/core/model_runtime/model_providers/zhipuai/zhipuai.yaml b/api/core/model_runtime/model_providers/zhipuai/zhipuai.yaml deleted file mode 100644 index 303a549128a89c..00000000000000 --- a/api/core/model_runtime/model_providers/zhipuai/zhipuai.yaml +++ /dev/null @@ -1,31 +0,0 @@ -provider: zhipuai -label: - zh_Hans: 智谱 AI - en_US: ZHIPU AI -icon_small: - en_US: icon_s_en.svg -icon_large: - zh_Hans: icon_l_zh.svg - en_US: icon_l_en.svg -background: "#EFF1FE" -help: - title: - en_US: Get your API key from ZHIPU AI - zh_Hans: 从智谱 AI 获取 API Key - url: - en_US: https://open.bigmodel.cn/usercenter/apikeys -supported_model_types: - - llm - - text-embedding -configurate_methods: - - predefined-model -provider_credential_schema: - credential_form_schemas: - - variable: api_key - label: - en_US: APIKey - type: secret-input - required: true - placeholder: - zh_Hans: 在此输入您的 APIKey - en_US: Enter your APIKey diff --git a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/__init__.py b/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/__init__.py deleted file mode 100644 index fc71d64714bd96..00000000000000 --- a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/__init__.py +++ /dev/null @@ -1,15 +0,0 @@ -from .__version__ import __version__ -from ._client import ZhipuAI -from .core import ( - APIAuthenticationError, - APIConnectionError, - APIInternalError, - APIReachLimitError, - APIRequestFailedError, - APIResponseError, - APIResponseValidationError, - APIServerFlowExceedError, - APIStatusError, - APITimeoutError, - ZhipuAIError, -) diff --git a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/__version__.py b/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/__version__.py deleted file mode 100644 index 51f8c49ecb827d..00000000000000 --- a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/__version__.py +++ /dev/null @@ -1 +0,0 @@ -__version__ = "v2.1.0" diff --git a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/_client.py b/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/_client.py deleted file mode 100644 index 705d371e628f08..00000000000000 --- a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/_client.py +++ /dev/null @@ -1,82 +0,0 @@ -from __future__ import annotations - -import os -from collections.abc import Mapping -from typing import Union - -import httpx -from httpx import Timeout -from typing_extensions import override - -from . import api_resource -from .core import NOT_GIVEN, ZHIPUAI_DEFAULT_MAX_RETRIES, HttpClient, NotGiven, ZhipuAIError, _jwt_token - - -class ZhipuAI(HttpClient): - chat: api_resource.chat.Chat - api_key: str - _disable_token_cache: bool = True - - def __init__( - self, - *, - api_key: str | None = None, - base_url: str | httpx.URL | None = None, - timeout: Union[float, Timeout, None, NotGiven] = NOT_GIVEN, - max_retries: int = ZHIPUAI_DEFAULT_MAX_RETRIES, - http_client: httpx.Client | None = None, - custom_headers: Mapping[str, str] | None = None, - disable_token_cache: bool = True, - _strict_response_validation: bool = False, - ) -> None: - if api_key is None: - api_key = os.environ.get("ZHIPUAI_API_KEY") - if api_key is None: - raise ZhipuAIError("未提供api_key,请通过参数或环境变量提供") - self.api_key = api_key - self._disable_token_cache = disable_token_cache - - if base_url is None: - base_url = os.environ.get("ZHIPUAI_BASE_URL") - if base_url is None: - base_url = "https://open.bigmodel.cn/api/paas/v4" - from .__version__ import __version__ - - super().__init__( - version=__version__, - base_url=base_url, - max_retries=max_retries, - timeout=timeout, - custom_httpx_client=http_client, - custom_headers=custom_headers, - _strict_response_validation=_strict_response_validation, - ) - self.chat = api_resource.chat.Chat(self) - self.images = api_resource.images.Images(self) - self.embeddings = api_resource.embeddings.Embeddings(self) - self.files = api_resource.files.Files(self) - self.fine_tuning = api_resource.fine_tuning.FineTuning(self) - self.batches = api_resource.Batches(self) - self.knowledge = api_resource.Knowledge(self) - self.tools = api_resource.Tools(self) - self.videos = api_resource.Videos(self) - self.assistant = api_resource.Assistant(self) - - @property - @override - def auth_headers(self) -> dict[str, str]: - api_key = self.api_key - if self._disable_token_cache: - return {"Authorization": f"Bearer {api_key}"} - else: - return {"Authorization": f"Bearer {_jwt_token.generate_token(api_key)}"} - - def __del__(self) -> None: - if not hasattr(self, "_has_custom_http_client") or not hasattr(self, "close") or not hasattr(self, "_client"): - # if the '__init__' method raised an error, self would not have client attr - return - - if self._has_custom_http_client: - return - - self.close() diff --git a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/api_resource/__init__.py b/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/api_resource/__init__.py deleted file mode 100644 index 4fe0719dde3e0b..00000000000000 --- a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/api_resource/__init__.py +++ /dev/null @@ -1,34 +0,0 @@ -from .assistant import ( - Assistant, -) -from .batches import Batches -from .chat import ( - AsyncCompletions, - Chat, - Completions, -) -from .embeddings import Embeddings -from .files import Files, FilesWithRawResponse -from .fine_tuning import FineTuning -from .images import Images -from .knowledge import Knowledge -from .tools import Tools -from .videos import ( - Videos, -) - -__all__ = [ - "Videos", - "AsyncCompletions", - "Chat", - "Completions", - "Images", - "Embeddings", - "Files", - "FilesWithRawResponse", - "FineTuning", - "Batches", - "Knowledge", - "Tools", - "Assistant", -] diff --git a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/api_resource/assistant/__init__.py b/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/api_resource/assistant/__init__.py deleted file mode 100644 index ce619aa7f09222..00000000000000 --- a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/api_resource/assistant/__init__.py +++ /dev/null @@ -1,3 +0,0 @@ -from .assistant import Assistant - -__all__ = ["Assistant"] diff --git a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/api_resource/assistant/assistant.py b/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/api_resource/assistant/assistant.py deleted file mode 100644 index f772340a82c4be..00000000000000 --- a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/api_resource/assistant/assistant.py +++ /dev/null @@ -1,122 +0,0 @@ -from __future__ import annotations - -from typing import TYPE_CHECKING, Optional - -import httpx - -from ...core import ( - NOT_GIVEN, - BaseAPI, - Body, - Headers, - NotGiven, - StreamResponse, - deepcopy_minimal, - make_request_options, - maybe_transform, -) -from ...types.assistant import AssistantCompletion -from ...types.assistant.assistant_conversation_resp import ConversationUsageListResp -from ...types.assistant.assistant_support_resp import AssistantSupportResp - -if TYPE_CHECKING: - from ..._client import ZhipuAI - -from ...types.assistant import assistant_conversation_params, assistant_create_params - -__all__ = ["Assistant"] - - -class Assistant(BaseAPI): - def __init__(self, client: ZhipuAI) -> None: - super().__init__(client) - - def conversation( - self, - assistant_id: str, - model: str, - messages: list[assistant_create_params.ConversationMessage], - *, - stream: bool = True, - conversation_id: Optional[str] = None, - attachments: Optional[list[assistant_create_params.AssistantAttachments]] = None, - metadata: dict | None = None, - request_id: str = None, - user_id: str = None, - extra_headers: Headers | None = None, - extra_body: Body | None = None, - timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, - ) -> StreamResponse[AssistantCompletion]: - body = deepcopy_minimal( - { - "assistant_id": assistant_id, - "model": model, - "messages": messages, - "stream": stream, - "conversation_id": conversation_id, - "attachments": attachments, - "metadata": metadata, - "request_id": request_id, - "user_id": user_id, - } - ) - return self._post( - "/assistant", - body=maybe_transform(body, assistant_create_params.AssistantParameters), - options=make_request_options(extra_headers=extra_headers, extra_body=extra_body, timeout=timeout), - cast_type=AssistantCompletion, - stream=stream or True, - stream_cls=StreamResponse[AssistantCompletion], - ) - - def query_support( - self, - *, - assistant_id_list: list[str] = None, - request_id: str = None, - user_id: str = None, - extra_headers: Headers | None = None, - extra_body: Body | None = None, - timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, - ) -> AssistantSupportResp: - body = deepcopy_minimal( - { - "assistant_id_list": assistant_id_list, - "request_id": request_id, - "user_id": user_id, - } - ) - return self._post( - "/assistant/list", - body=body, - options=make_request_options(extra_headers=extra_headers, extra_body=extra_body, timeout=timeout), - cast_type=AssistantSupportResp, - ) - - def query_conversation_usage( - self, - assistant_id: str, - page: int = 1, - page_size: int = 10, - *, - request_id: str = None, - user_id: str = None, - extra_headers: Headers | None = None, - extra_body: Body | None = None, - timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, - ) -> ConversationUsageListResp: - body = deepcopy_minimal( - { - "assistant_id": assistant_id, - "page": page, - "page_size": page_size, - "request_id": request_id, - "user_id": user_id, - } - ) - return self._post( - "/assistant/conversation/list", - body=maybe_transform(body, assistant_conversation_params.ConversationParameters), - options=make_request_options(extra_headers=extra_headers, extra_body=extra_body, timeout=timeout), - cast_type=ConversationUsageListResp, - ) diff --git a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/api_resource/batches.py b/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/api_resource/batches.py deleted file mode 100644 index ae2f2be85eb9b4..00000000000000 --- a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/api_resource/batches.py +++ /dev/null @@ -1,146 +0,0 @@ -from __future__ import annotations - -from typing import TYPE_CHECKING, Literal, Optional - -import httpx - -from ..core import NOT_GIVEN, BaseAPI, Body, Headers, NotGiven, make_request_options, maybe_transform -from ..core.pagination import SyncCursorPage -from ..types import batch_create_params, batch_list_params -from ..types.batch import Batch - -if TYPE_CHECKING: - from .._client import ZhipuAI - - -class Batches(BaseAPI): - def __init__(self, client: ZhipuAI) -> None: - super().__init__(client) - - def create( - self, - *, - completion_window: str | None = None, - endpoint: Literal["/v1/chat/completions", "/v1/embeddings"], - input_file_id: str, - metadata: Optional[dict[str, str]] | NotGiven = NOT_GIVEN, - auto_delete_input_file: bool = True, - extra_headers: Headers | None = None, - extra_body: Body | None = None, - timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, - ) -> Batch: - return self._post( - "/batches", - body=maybe_transform( - { - "completion_window": completion_window, - "endpoint": endpoint, - "input_file_id": input_file_id, - "metadata": metadata, - "auto_delete_input_file": auto_delete_input_file, - }, - batch_create_params.BatchCreateParams, - ), - options=make_request_options(extra_headers=extra_headers, extra_body=extra_body, timeout=timeout), - cast_type=Batch, - ) - - def retrieve( - self, - batch_id: str, - *, - extra_headers: Headers | None = None, - extra_body: Body | None = None, - timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, - ) -> Batch: - """ - Retrieves a batch. - - Args: - extra_headers: Send extra headers - - extra_body: Add additional JSON properties to the request - - timeout: Override the client-level default timeout for this request, in seconds - """ - if not batch_id: - raise ValueError(f"Expected a non-empty value for `batch_id` but received {batch_id!r}") - return self._get( - f"/batches/{batch_id}", - options=make_request_options(extra_headers=extra_headers, extra_body=extra_body, timeout=timeout), - cast_type=Batch, - ) - - def list( - self, - *, - after: str | NotGiven = NOT_GIVEN, - limit: int | NotGiven = NOT_GIVEN, - extra_headers: Headers | None = None, - extra_body: Body | None = None, - timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, - ) -> SyncCursorPage[Batch]: - """List your organization's batches. - - Args: - after: A cursor for use in pagination. - - `after` is an object ID that defines your place - in the list. For instance, if you make a list request and receive 100 objects, - ending with obj_foo, your subsequent call can include after=obj_foo in order to - fetch the next page of the list. - - limit: A limit on the number of objects to be returned. Limit can range between 1 and - 100, and the default is 20. - - extra_headers: Send extra headers - - extra_body: Add additional JSON properties to the request - - timeout: Override the client-level default timeout for this request, in seconds - """ - return self._get_api_list( - "/batches", - page=SyncCursorPage[Batch], - options=make_request_options( - extra_headers=extra_headers, - extra_body=extra_body, - timeout=timeout, - query=maybe_transform( - { - "after": after, - "limit": limit, - }, - batch_list_params.BatchListParams, - ), - ), - model=Batch, - ) - - def cancel( - self, - batch_id: str, - *, - extra_headers: Headers | None = None, - extra_body: Body | None = None, - timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, - ) -> Batch: - """ - Cancels an in-progress batch. - - Args: - batch_id: The ID of the batch to cancel. - extra_headers: Send extra headers - - extra_body: Add additional JSON properties to the request - - timeout: Override the client-level default timeout for this request, in seconds - - """ - if not batch_id: - raise ValueError(f"Expected a non-empty value for `batch_id` but received {batch_id!r}") - return self._post( - f"/batches/{batch_id}/cancel", - options=make_request_options(extra_headers=extra_headers, extra_body=extra_body, timeout=timeout), - cast_type=Batch, - ) diff --git a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/api_resource/chat/__init__.py b/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/api_resource/chat/__init__.py deleted file mode 100644 index 5cd8dc6f339a60..00000000000000 --- a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/api_resource/chat/__init__.py +++ /dev/null @@ -1,5 +0,0 @@ -from .async_completions import AsyncCompletions -from .chat import Chat -from .completions import Completions - -__all__ = ["AsyncCompletions", "Chat", "Completions"] diff --git a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/api_resource/chat/async_completions.py b/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/api_resource/chat/async_completions.py deleted file mode 100644 index 05510a3ec421d0..00000000000000 --- a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/api_resource/chat/async_completions.py +++ /dev/null @@ -1,115 +0,0 @@ -from __future__ import annotations - -import logging -from typing import TYPE_CHECKING, Literal, Optional, Union - -import httpx - -from ...core import ( - NOT_GIVEN, - BaseAPI, - Body, - Headers, - NotGiven, - drop_prefix_image_data, - make_request_options, - maybe_transform, -) -from ...types.chat.async_chat_completion import AsyncCompletion, AsyncTaskStatus -from ...types.chat.code_geex import code_geex_params -from ...types.sensitive_word_check import SensitiveWordCheckRequest - -logger = logging.getLogger(__name__) - -if TYPE_CHECKING: - from ..._client import ZhipuAI - - -class AsyncCompletions(BaseAPI): - def __init__(self, client: ZhipuAI) -> None: - super().__init__(client) - - def create( - self, - *, - model: str, - request_id: Optional[str] | NotGiven = NOT_GIVEN, - user_id: Optional[str] | NotGiven = NOT_GIVEN, - do_sample: Optional[Literal[False]] | Literal[True] | NotGiven = NOT_GIVEN, - temperature: Optional[float] | NotGiven = NOT_GIVEN, - top_p: Optional[float] | NotGiven = NOT_GIVEN, - max_tokens: int | NotGiven = NOT_GIVEN, - seed: int | NotGiven = NOT_GIVEN, - messages: Union[str, list[str], list[int], list[list[int]], None], - stop: Optional[Union[str, list[str], None]] | NotGiven = NOT_GIVEN, - sensitive_word_check: Optional[SensitiveWordCheckRequest] | NotGiven = NOT_GIVEN, - tools: Optional[object] | NotGiven = NOT_GIVEN, - tool_choice: str | NotGiven = NOT_GIVEN, - meta: Optional[dict[str, str]] | NotGiven = NOT_GIVEN, - extra: Optional[code_geex_params.CodeGeexExtra] | NotGiven = NOT_GIVEN, - extra_headers: Headers | None = None, - extra_body: Body | None = None, - timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, - ) -> AsyncTaskStatus: - _cast_type = AsyncTaskStatus - logger.debug(f"temperature:{temperature}, top_p:{top_p}") - if temperature is not None and temperature != NOT_GIVEN: - if temperature <= 0: - do_sample = False - temperature = 0.01 - # logger.warning("temperature:取值范围是:(0.0, 1.0) 开区间,do_sample重写为:false(参数top_p temperature不生效)") # noqa: E501 - if temperature >= 1: - temperature = 0.99 - # logger.warning("temperature:取值范围是:(0.0, 1.0) 开区间") - if top_p is not None and top_p != NOT_GIVEN: - if top_p >= 1: - top_p = 0.99 - # logger.warning("top_p:取值范围是:(0.0, 1.0) 开区间,不能等于 0 或 1") - if top_p <= 0: - top_p = 0.01 - # logger.warning("top_p:取值范围是:(0.0, 1.0) 开区间,不能等于 0 或 1") - - logger.debug(f"temperature:{temperature}, top_p:{top_p}") - if isinstance(messages, list): - for item in messages: - if item.get("content"): - item["content"] = drop_prefix_image_data(item["content"]) - - body = { - "model": model, - "request_id": request_id, - "user_id": user_id, - "temperature": temperature, - "top_p": top_p, - "do_sample": do_sample, - "max_tokens": max_tokens, - "seed": seed, - "messages": messages, - "stop": stop, - "sensitive_word_check": sensitive_word_check, - "tools": tools, - "tool_choice": tool_choice, - "meta": meta, - "extra": maybe_transform(extra, code_geex_params.CodeGeexExtra), - } - return self._post( - "/async/chat/completions", - body=body, - options=make_request_options(extra_headers=extra_headers, extra_body=extra_body, timeout=timeout), - cast_type=_cast_type, - stream=False, - ) - - def retrieve_completion_result( - self, - id: str, - extra_headers: Headers | None = None, - extra_body: Body | None = None, - timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, - ) -> Union[AsyncCompletion, AsyncTaskStatus]: - _cast_type = Union[AsyncCompletion, AsyncTaskStatus] - return self._get( - path=f"/async-result/{id}", - cast_type=_cast_type, - options=make_request_options(extra_headers=extra_headers, extra_body=extra_body, timeout=timeout), - ) diff --git a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/api_resource/chat/chat.py b/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/api_resource/chat/chat.py deleted file mode 100644 index b3cc46566c7bf3..00000000000000 --- a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/api_resource/chat/chat.py +++ /dev/null @@ -1,18 +0,0 @@ -from typing import TYPE_CHECKING - -from ...core import BaseAPI, cached_property -from .async_completions import AsyncCompletions -from .completions import Completions - -if TYPE_CHECKING: - pass - - -class Chat(BaseAPI): - @cached_property - def completions(self) -> Completions: - return Completions(self._client) - - @cached_property - def asyncCompletions(self) -> AsyncCompletions: # noqa: N802 - return AsyncCompletions(self._client) diff --git a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/api_resource/chat/completions.py b/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/api_resource/chat/completions.py deleted file mode 100644 index 8e5bb454e6ce7e..00000000000000 --- a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/api_resource/chat/completions.py +++ /dev/null @@ -1,108 +0,0 @@ -from __future__ import annotations - -import logging -from typing import TYPE_CHECKING, Literal, Optional, Union - -import httpx - -from ...core import ( - NOT_GIVEN, - BaseAPI, - Body, - Headers, - NotGiven, - StreamResponse, - deepcopy_minimal, - drop_prefix_image_data, - make_request_options, - maybe_transform, -) -from ...types.chat.chat_completion import Completion -from ...types.chat.chat_completion_chunk import ChatCompletionChunk -from ...types.chat.code_geex import code_geex_params -from ...types.sensitive_word_check import SensitiveWordCheckRequest - -logger = logging.getLogger(__name__) - -if TYPE_CHECKING: - from ..._client import ZhipuAI - - -class Completions(BaseAPI): - def __init__(self, client: ZhipuAI) -> None: - super().__init__(client) - - def create( - self, - *, - model: str, - request_id: Optional[str] | NotGiven = NOT_GIVEN, - user_id: Optional[str] | NotGiven = NOT_GIVEN, - do_sample: Optional[Literal[False]] | Literal[True] | NotGiven = NOT_GIVEN, - stream: Optional[Literal[False]] | Literal[True] | NotGiven = NOT_GIVEN, - temperature: Optional[float] | NotGiven = NOT_GIVEN, - top_p: Optional[float] | NotGiven = NOT_GIVEN, - max_tokens: int | NotGiven = NOT_GIVEN, - seed: int | NotGiven = NOT_GIVEN, - messages: Union[str, list[str], list[int], object, None], - stop: Optional[Union[str, list[str], None]] | NotGiven = NOT_GIVEN, - sensitive_word_check: Optional[SensitiveWordCheckRequest] | NotGiven = NOT_GIVEN, - tools: Optional[object] | NotGiven = NOT_GIVEN, - tool_choice: str | NotGiven = NOT_GIVEN, - meta: Optional[dict[str, str]] | NotGiven = NOT_GIVEN, - extra: Optional[code_geex_params.CodeGeexExtra] | NotGiven = NOT_GIVEN, - extra_headers: Headers | None = None, - extra_body: Body | None = None, - timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, - ) -> Completion | StreamResponse[ChatCompletionChunk]: - logger.debug(f"temperature:{temperature}, top_p:{top_p}") - if temperature is not None and temperature != NOT_GIVEN: - if temperature <= 0: - do_sample = False - temperature = 0.01 - # logger.warning("temperature:取值范围是:(0.0, 1.0) 开区间,do_sample重写为:false(参数top_p temperature不生效)") # noqa: E501 - if temperature >= 1: - temperature = 0.99 - # logger.warning("temperature:取值范围是:(0.0, 1.0) 开区间") - if top_p is not None and top_p != NOT_GIVEN: - if top_p >= 1: - top_p = 0.99 - # logger.warning("top_p:取值范围是:(0.0, 1.0) 开区间,不能等于 0 或 1") - if top_p <= 0: - top_p = 0.01 - # logger.warning("top_p:取值范围是:(0.0, 1.0) 开区间,不能等于 0 或 1") - - logger.debug(f"temperature:{temperature}, top_p:{top_p}") - if isinstance(messages, list): - for item in messages: - if item.get("content"): - item["content"] = drop_prefix_image_data(item["content"]) - - body = deepcopy_minimal( - { - "model": model, - "request_id": request_id, - "user_id": user_id, - "temperature": temperature, - "top_p": top_p, - "do_sample": do_sample, - "max_tokens": max_tokens, - "seed": seed, - "messages": messages, - "stop": stop, - "sensitive_word_check": sensitive_word_check, - "stream": stream, - "tools": tools, - "tool_choice": tool_choice, - "meta": meta, - "extra": maybe_transform(extra, code_geex_params.CodeGeexExtra), - } - ) - return self._post( - "/chat/completions", - body=body, - options=make_request_options(extra_headers=extra_headers, extra_body=extra_body, timeout=timeout), - cast_type=Completion, - stream=stream or False, - stream_cls=StreamResponse[ChatCompletionChunk], - ) diff --git a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/api_resource/embeddings.py b/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/api_resource/embeddings.py deleted file mode 100644 index 4b4baef9421ba6..00000000000000 --- a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/api_resource/embeddings.py +++ /dev/null @@ -1,50 +0,0 @@ -from __future__ import annotations - -from typing import TYPE_CHECKING, Optional, Union - -import httpx - -from ..core import NOT_GIVEN, BaseAPI, Body, Headers, NotGiven, make_request_options -from ..types.embeddings import EmbeddingsResponded - -if TYPE_CHECKING: - from .._client import ZhipuAI - - -class Embeddings(BaseAPI): - def __init__(self, client: ZhipuAI) -> None: - super().__init__(client) - - def create( - self, - *, - input: Union[str, list[str], list[int], list[list[int]]], - model: Union[str], - dimensions: Union[int] | NotGiven = NOT_GIVEN, - encoding_format: str | NotGiven = NOT_GIVEN, - user: str | NotGiven = NOT_GIVEN, - request_id: Optional[str] | NotGiven = NOT_GIVEN, - sensitive_word_check: Optional[object] | NotGiven = NOT_GIVEN, - extra_headers: Headers | None = None, - extra_body: Body | None = None, - disable_strict_validation: Optional[bool] | None = None, - timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, - ) -> EmbeddingsResponded: - _cast_type = EmbeddingsResponded - if disable_strict_validation: - _cast_type = object - return self._post( - "/embeddings", - body={ - "input": input, - "model": model, - "dimensions": dimensions, - "encoding_format": encoding_format, - "user": user, - "request_id": request_id, - "sensitive_word_check": sensitive_word_check, - }, - options=make_request_options(extra_headers=extra_headers, extra_body=extra_body, timeout=timeout), - cast_type=_cast_type, - stream=False, - ) diff --git a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/api_resource/files.py b/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/api_resource/files.py deleted file mode 100644 index ba9de75b7ef092..00000000000000 --- a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/api_resource/files.py +++ /dev/null @@ -1,194 +0,0 @@ -from __future__ import annotations - -from collections.abc import Mapping -from typing import TYPE_CHECKING, Literal, cast - -import httpx - -from ..core import ( - NOT_GIVEN, - BaseAPI, - Body, - FileTypes, - Headers, - NotGiven, - _legacy_binary_response, - _legacy_response, - deepcopy_minimal, - extract_files, - make_request_options, - maybe_transform, -) -from ..types.files import FileDeleted, FileObject, ListOfFileObject, UploadDetail, file_create_params - -if TYPE_CHECKING: - from .._client import ZhipuAI - -__all__ = ["Files", "FilesWithRawResponse"] - - -class Files(BaseAPI): - def __init__(self, client: ZhipuAI) -> None: - super().__init__(client) - - def create( - self, - *, - file: FileTypes = None, - upload_detail: list[UploadDetail] = None, - purpose: Literal["fine-tune", "retrieval", "batch"], - knowledge_id: str = None, - sentence_size: int = None, - extra_headers: Headers | None = None, - extra_body: Body | None = None, - timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, - ) -> FileObject: - if not file and not upload_detail: - raise ValueError("At least one of `file` and `upload_detail` must be provided.") - body = deepcopy_minimal( - { - "file": file, - "upload_detail": upload_detail, - "purpose": purpose, - "knowledge_id": knowledge_id, - "sentence_size": sentence_size, - } - ) - files = extract_files(cast(Mapping[str, object], body), paths=[["file"]]) - if files: - # It should be noted that the actual Content-Type header that will be - # sent to the server will contain a `boundary` parameter, e.g. - # multipart/form-data; boundary=---abc-- - extra_headers = {"Content-Type": "multipart/form-data", **(extra_headers or {})} - return self._post( - "/files", - body=maybe_transform(body, file_create_params.FileCreateParams), - files=files, - options=make_request_options(extra_headers=extra_headers, extra_body=extra_body, timeout=timeout), - cast_type=FileObject, - ) - - # def retrieve( - # self, - # file_id: str, - # *, - # extra_headers: Headers | None = None, - # extra_body: Body | None = None, - # timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, - # ) -> FileObject: - # """ - # Returns information about a specific file. - # - # Args: - # file_id: The ID of the file to retrieve information about - # extra_headers: Send extra headers - # - # extra_body: Add additional JSON properties to the request - # - # timeout: Override the client-level default timeout for this request, in seconds - # """ - # if not file_id: - # raise ValueError(f"Expected a non-empty value for `file_id` but received {file_id!r}") - # return self._get( - # f"/files/{file_id}", - # options=make_request_options( - # extra_headers=extra_headers, extra_body=extra_body, timeout=timeout - # ), - # cast_type=FileObject, - # ) - - def list( - self, - *, - purpose: str | NotGiven = NOT_GIVEN, - limit: int | NotGiven = NOT_GIVEN, - after: str | NotGiven = NOT_GIVEN, - order: str | NotGiven = NOT_GIVEN, - extra_headers: Headers | None = None, - extra_body: Body | None = None, - timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, - ) -> ListOfFileObject: - return self._get( - "/files", - cast_type=ListOfFileObject, - options=make_request_options( - extra_headers=extra_headers, - extra_body=extra_body, - timeout=timeout, - query={ - "purpose": purpose, - "limit": limit, - "after": after, - "order": order, - }, - ), - ) - - def delete( - self, - file_id: str, - *, - extra_headers: Headers | None = None, - extra_body: Body | None = None, - timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, - ) -> FileDeleted: - """ - Delete a file. - - Args: - file_id: The ID of the file to delete - extra_headers: Send extra headers - - extra_body: Add additional JSON properties to the request - - timeout: Override the client-level default timeout for this request, in seconds - """ - if not file_id: - raise ValueError(f"Expected a non-empty value for `file_id` but received {file_id!r}") - return self._delete( - f"/files/{file_id}", - options=make_request_options(extra_headers=extra_headers, extra_body=extra_body, timeout=timeout), - cast_type=FileDeleted, - ) - - def content( - self, - file_id: str, - *, - extra_headers: Headers | None = None, - extra_body: Body | None = None, - timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, - ) -> _legacy_response.HttpxBinaryResponseContent: - """ - Returns the contents of the specified file. - - Args: - extra_headers: Send extra headers - - extra_body: Add additional JSON properties to the request - - timeout: Override the client-level default timeout for this request, in seconds - """ - if not file_id: - raise ValueError(f"Expected a non-empty value for `file_id` but received {file_id!r}") - extra_headers = {"Accept": "application/binary", **(extra_headers or {})} - return self._get( - f"/files/{file_id}/content", - options=make_request_options(extra_headers=extra_headers, extra_body=extra_body, timeout=timeout), - cast_type=_legacy_binary_response.HttpxBinaryResponseContent, - ) - - -class FilesWithRawResponse: - def __init__(self, files: Files) -> None: - self._files = files - - self.create = _legacy_response.to_raw_response_wrapper( - files.create, - ) - self.list = _legacy_response.to_raw_response_wrapper( - files.list, - ) - self.content = _legacy_response.to_raw_response_wrapper( - files.content, - ) diff --git a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/api_resource/fine_tuning/__init__.py b/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/api_resource/fine_tuning/__init__.py deleted file mode 100644 index 7c309b83416803..00000000000000 --- a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/api_resource/fine_tuning/__init__.py +++ /dev/null @@ -1,5 +0,0 @@ -from .fine_tuning import FineTuning -from .jobs import Jobs -from .models import FineTunedModels - -__all__ = ["Jobs", "FineTunedModels", "FineTuning"] diff --git a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/api_resource/fine_tuning/fine_tuning.py b/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/api_resource/fine_tuning/fine_tuning.py deleted file mode 100644 index 8670f7de00df84..00000000000000 --- a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/api_resource/fine_tuning/fine_tuning.py +++ /dev/null @@ -1,18 +0,0 @@ -from typing import TYPE_CHECKING - -from ...core import BaseAPI, cached_property -from .jobs import Jobs -from .models import FineTunedModels - -if TYPE_CHECKING: - pass - - -class FineTuning(BaseAPI): - @cached_property - def jobs(self) -> Jobs: - return Jobs(self._client) - - @cached_property - def models(self) -> FineTunedModels: - return FineTunedModels(self._client) diff --git a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/api_resource/fine_tuning/jobs/__init__.py b/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/api_resource/fine_tuning/jobs/__init__.py deleted file mode 100644 index 40777a153f272a..00000000000000 --- a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/api_resource/fine_tuning/jobs/__init__.py +++ /dev/null @@ -1,3 +0,0 @@ -from .jobs import Jobs - -__all__ = ["Jobs"] diff --git a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/api_resource/fine_tuning/jobs/jobs.py b/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/api_resource/fine_tuning/jobs/jobs.py deleted file mode 100644 index 8b038cadc06407..00000000000000 --- a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/api_resource/fine_tuning/jobs/jobs.py +++ /dev/null @@ -1,152 +0,0 @@ -from __future__ import annotations - -from typing import TYPE_CHECKING, Optional - -import httpx - -from ....core import ( - NOT_GIVEN, - BaseAPI, - Body, - Headers, - NotGiven, - make_request_options, -) -from ....types.fine_tuning import ( - FineTuningJob, - FineTuningJobEvent, - ListOfFineTuningJob, - job_create_params, -) - -if TYPE_CHECKING: - from ...._client import ZhipuAI - -__all__ = ["Jobs"] - - -class Jobs(BaseAPI): - def __init__(self, client: ZhipuAI) -> None: - super().__init__(client) - - def create( - self, - *, - model: str, - training_file: str, - hyperparameters: job_create_params.Hyperparameters | NotGiven = NOT_GIVEN, - suffix: Optional[str] | NotGiven = NOT_GIVEN, - request_id: Optional[str] | NotGiven = NOT_GIVEN, - validation_file: Optional[str] | NotGiven = NOT_GIVEN, - extra_headers: Headers | None = None, - extra_body: Body | None = None, - timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, - ) -> FineTuningJob: - return self._post( - "/fine_tuning/jobs", - body={ - "model": model, - "training_file": training_file, - "hyperparameters": hyperparameters, - "suffix": suffix, - "validation_file": validation_file, - "request_id": request_id, - }, - options=make_request_options(extra_headers=extra_headers, extra_body=extra_body, timeout=timeout), - cast_type=FineTuningJob, - ) - - def retrieve( - self, - fine_tuning_job_id: str, - *, - extra_headers: Headers | None = None, - extra_body: Body | None = None, - timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, - ) -> FineTuningJob: - return self._get( - f"/fine_tuning/jobs/{fine_tuning_job_id}", - options=make_request_options(extra_headers=extra_headers, extra_body=extra_body, timeout=timeout), - cast_type=FineTuningJob, - ) - - def list( - self, - *, - after: str | NotGiven = NOT_GIVEN, - limit: int | NotGiven = NOT_GIVEN, - extra_headers: Headers | None = None, - extra_body: Body | None = None, - timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, - ) -> ListOfFineTuningJob: - return self._get( - "/fine_tuning/jobs", - cast_type=ListOfFineTuningJob, - options=make_request_options( - extra_headers=extra_headers, - extra_body=extra_body, - timeout=timeout, - query={ - "after": after, - "limit": limit, - }, - ), - ) - - def cancel( - self, - fine_tuning_job_id: str, - *, - # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. # noqa: E501 - # The extra values given here take precedence over values defined on the client or passed to this method. - extra_headers: Headers | None = None, - extra_body: Body | None = None, - timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, - ) -> FineTuningJob: - if not fine_tuning_job_id: - raise ValueError(f"Expected a non-empty value for `fine_tuning_job_id` but received {fine_tuning_job_id!r}") - return self._post( - f"/fine_tuning/jobs/{fine_tuning_job_id}/cancel", - options=make_request_options(extra_headers=extra_headers, extra_body=extra_body, timeout=timeout), - cast_type=FineTuningJob, - ) - - def list_events( - self, - fine_tuning_job_id: str, - *, - after: str | NotGiven = NOT_GIVEN, - limit: int | NotGiven = NOT_GIVEN, - extra_headers: Headers | None = None, - extra_body: Body | None = None, - timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, - ) -> FineTuningJobEvent: - return self._get( - f"/fine_tuning/jobs/{fine_tuning_job_id}/events", - cast_type=FineTuningJobEvent, - options=make_request_options( - extra_headers=extra_headers, - extra_body=extra_body, - timeout=timeout, - query={ - "after": after, - "limit": limit, - }, - ), - ) - - def delete( - self, - fine_tuning_job_id: str, - *, - extra_headers: Headers | None = None, - extra_body: Body | None = None, - timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, - ) -> FineTuningJob: - if not fine_tuning_job_id: - raise ValueError(f"Expected a non-empty value for `fine_tuning_job_id` but received {fine_tuning_job_id!r}") - return self._delete( - f"/fine_tuning/jobs/{fine_tuning_job_id}", - options=make_request_options(extra_headers=extra_headers, extra_body=extra_body, timeout=timeout), - cast_type=FineTuningJob, - ) diff --git a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/api_resource/fine_tuning/models/__init__.py b/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/api_resource/fine_tuning/models/__init__.py deleted file mode 100644 index d832635bafbc6f..00000000000000 --- a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/api_resource/fine_tuning/models/__init__.py +++ /dev/null @@ -1,3 +0,0 @@ -from .fine_tuned_models import FineTunedModels - -__all__ = ["FineTunedModels"] diff --git a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/api_resource/fine_tuning/models/fine_tuned_models.py b/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/api_resource/fine_tuning/models/fine_tuned_models.py deleted file mode 100644 index 29c023e3b1cd5a..00000000000000 --- a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/api_resource/fine_tuning/models/fine_tuned_models.py +++ /dev/null @@ -1,41 +0,0 @@ -from __future__ import annotations - -from typing import TYPE_CHECKING - -import httpx - -from ....core import ( - NOT_GIVEN, - BaseAPI, - Body, - Headers, - NotGiven, - make_request_options, -) -from ....types.fine_tuning.models import FineTunedModelsStatus - -if TYPE_CHECKING: - from ...._client import ZhipuAI - -__all__ = ["FineTunedModels"] - - -class FineTunedModels(BaseAPI): - def __init__(self, client: ZhipuAI) -> None: - super().__init__(client) - - def delete( - self, - fine_tuned_model: str, - *, - extra_headers: Headers | None = None, - extra_body: Body | None = None, - timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, - ) -> FineTunedModelsStatus: - if not fine_tuned_model: - raise ValueError(f"Expected a non-empty value for `fine_tuned_model` but received {fine_tuned_model!r}") - return self._delete( - f"fine_tuning/fine_tuned_models/{fine_tuned_model}", - options=make_request_options(extra_headers=extra_headers, extra_body=extra_body, timeout=timeout), - cast_type=FineTunedModelsStatus, - ) diff --git a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/api_resource/images.py b/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/api_resource/images.py deleted file mode 100644 index 8ad411913fa115..00000000000000 --- a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/api_resource/images.py +++ /dev/null @@ -1,59 +0,0 @@ -from __future__ import annotations - -from typing import TYPE_CHECKING, Optional - -import httpx - -from ..core import NOT_GIVEN, BaseAPI, Body, Headers, NotGiven, make_request_options -from ..types.image import ImagesResponded -from ..types.sensitive_word_check import SensitiveWordCheckRequest - -if TYPE_CHECKING: - from .._client import ZhipuAI - - -class Images(BaseAPI): - def __init__(self, client: ZhipuAI) -> None: - super().__init__(client) - - def generations( - self, - *, - prompt: str, - model: str | NotGiven = NOT_GIVEN, - n: Optional[int] | NotGiven = NOT_GIVEN, - quality: Optional[str] | NotGiven = NOT_GIVEN, - response_format: Optional[str] | NotGiven = NOT_GIVEN, - size: Optional[str] | NotGiven = NOT_GIVEN, - style: Optional[str] | NotGiven = NOT_GIVEN, - sensitive_word_check: Optional[SensitiveWordCheckRequest] | NotGiven = NOT_GIVEN, - user: str | NotGiven = NOT_GIVEN, - request_id: Optional[str] | NotGiven = NOT_GIVEN, - user_id: Optional[str] | NotGiven = NOT_GIVEN, - extra_headers: Headers | None = None, - extra_body: Body | None = None, - disable_strict_validation: Optional[bool] | None = None, - timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, - ) -> ImagesResponded: - _cast_type = ImagesResponded - if disable_strict_validation: - _cast_type = object - return self._post( - "/images/generations", - body={ - "prompt": prompt, - "model": model, - "n": n, - "quality": quality, - "response_format": response_format, - "sensitive_word_check": sensitive_word_check, - "size": size, - "style": style, - "user": user, - "user_id": user_id, - "request_id": request_id, - }, - options=make_request_options(extra_headers=extra_headers, extra_body=extra_body, timeout=timeout), - cast_type=_cast_type, - stream=False, - ) diff --git a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/api_resource/knowledge/__init__.py b/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/api_resource/knowledge/__init__.py deleted file mode 100644 index 5a67d743c35b9b..00000000000000 --- a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/api_resource/knowledge/__init__.py +++ /dev/null @@ -1,3 +0,0 @@ -from .knowledge import Knowledge - -__all__ = ["Knowledge"] diff --git a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/api_resource/knowledge/document/__init__.py b/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/api_resource/knowledge/document/__init__.py deleted file mode 100644 index fd289e2232b955..00000000000000 --- a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/api_resource/knowledge/document/__init__.py +++ /dev/null @@ -1,3 +0,0 @@ -from .document import Document - -__all__ = ["Document"] diff --git a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/api_resource/knowledge/document/document.py b/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/api_resource/knowledge/document/document.py deleted file mode 100644 index 2c4066d8930342..00000000000000 --- a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/api_resource/knowledge/document/document.py +++ /dev/null @@ -1,217 +0,0 @@ -from __future__ import annotations - -from collections.abc import Mapping -from typing import TYPE_CHECKING, Literal, Optional, cast - -import httpx - -from ....core import ( - NOT_GIVEN, - BaseAPI, - Body, - FileTypes, - Headers, - NotGiven, - deepcopy_minimal, - extract_files, - make_request_options, - maybe_transform, -) -from ....types.files import UploadDetail, file_create_params -from ....types.knowledge.document import DocumentData, DocumentObject, document_edit_params, document_list_params -from ....types.knowledge.document.document_list_resp import DocumentPage - -if TYPE_CHECKING: - from ...._client import ZhipuAI - -__all__ = ["Document"] - - -class Document(BaseAPI): - def __init__(self, client: ZhipuAI) -> None: - super().__init__(client) - - def create( - self, - *, - file: FileTypes = None, - custom_separator: Optional[list[str]] = None, - upload_detail: list[UploadDetail] = None, - purpose: Literal["retrieval"], - knowledge_id: str = None, - sentence_size: int = None, - extra_headers: Headers | None = None, - extra_body: Body | None = None, - timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, - ) -> DocumentObject: - if not file and not upload_detail: - raise ValueError("At least one of `file` and `upload_detail` must be provided.") - body = deepcopy_minimal( - { - "file": file, - "upload_detail": upload_detail, - "purpose": purpose, - "custom_separator": custom_separator, - "knowledge_id": knowledge_id, - "sentence_size": sentence_size, - } - ) - files = extract_files(cast(Mapping[str, object], body), paths=[["file"]]) - if files: - # It should be noted that the actual Content-Type header that will be - # sent to the server will contain a `boundary` parameter, e.g. - # multipart/form-data; boundary=---abc-- - extra_headers = {"Content-Type": "multipart/form-data", **(extra_headers or {})} - return self._post( - "/files", - body=maybe_transform(body, file_create_params.FileCreateParams), - files=files, - options=make_request_options(extra_headers=extra_headers, extra_body=extra_body, timeout=timeout), - cast_type=DocumentObject, - ) - - def edit( - self, - document_id: str, - knowledge_type: str, - *, - custom_separator: Optional[list[str]] = None, - sentence_size: Optional[int] = None, - callback_url: Optional[str] = None, - callback_header: Optional[dict[str, str]] = None, - extra_headers: Headers | None = None, - extra_body: Body | None = None, - timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, - ) -> httpx.Response: - """ - - Args: - document_id: 知识id - knowledge_type: 知识类型: - 1:文章知识: 支持pdf,url,docx - 2.问答知识-文档: 支持pdf,url,docx - 3.问答知识-表格: 支持xlsx - 4.商品库-表格: 支持xlsx - 5.自定义: 支持pdf,url,docx - extra_headers: Send extra headers - - extra_body: Add additional JSON properties to the request - - timeout: Override the client-level default timeout for this request, in seconds - :param knowledge_type: - :param document_id: - :param timeout: - :param extra_body: - :param callback_header: - :param sentence_size: - :param extra_headers: - :param callback_url: - :param custom_separator: - """ - if not document_id: - raise ValueError(f"Expected a non-empty value for `document_id` but received {document_id!r}") - - body = deepcopy_minimal( - { - "id": document_id, - "knowledge_type": knowledge_type, - "custom_separator": custom_separator, - "sentence_size": sentence_size, - "callback_url": callback_url, - "callback_header": callback_header, - } - ) - - return self._put( - f"/document/{document_id}", - body=maybe_transform(body, document_edit_params.DocumentEditParams), - options=make_request_options(extra_headers=extra_headers, extra_body=extra_body, timeout=timeout), - cast_type=httpx.Response, - ) - - def list( - self, - knowledge_id: str, - *, - purpose: str | NotGiven = NOT_GIVEN, - page: str | NotGiven = NOT_GIVEN, - limit: str | NotGiven = NOT_GIVEN, - order: Literal["desc", "asc"] | NotGiven = NOT_GIVEN, - extra_headers: Headers | None = None, - extra_body: Body | None = None, - timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, - ) -> DocumentPage: - return self._get( - "/files", - options=make_request_options( - extra_headers=extra_headers, - extra_body=extra_body, - timeout=timeout, - query=maybe_transform( - { - "knowledge_id": knowledge_id, - "purpose": purpose, - "page": page, - "limit": limit, - "order": order, - }, - document_list_params.DocumentListParams, - ), - ), - cast_type=DocumentPage, - ) - - def delete( - self, - document_id: str, - *, - extra_headers: Headers | None = None, - extra_body: Body | None = None, - timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, - ) -> httpx.Response: - """ - Delete a file. - - Args: - - document_id: 知识id - extra_headers: Send extra headers - - extra_body: Add additional JSON properties to the request - - timeout: Override the client-level default timeout for this request, in seconds - """ - if not document_id: - raise ValueError(f"Expected a non-empty value for `document_id` but received {document_id!r}") - - return self._delete( - f"/document/{document_id}", - options=make_request_options(extra_headers=extra_headers, extra_body=extra_body, timeout=timeout), - cast_type=httpx.Response, - ) - - def retrieve( - self, - document_id: str, - *, - extra_headers: Headers | None = None, - extra_body: Body | None = None, - timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, - ) -> DocumentData: - """ - - Args: - extra_headers: Send extra headers - - extra_body: Add additional JSON properties to the request - - timeout: Override the client-level default timeout for this request, in seconds - """ - if not document_id: - raise ValueError(f"Expected a non-empty value for `document_id` but received {document_id!r}") - - return self._get( - f"/document/{document_id}", - options=make_request_options(extra_headers=extra_headers, extra_body=extra_body, timeout=timeout), - cast_type=DocumentData, - ) diff --git a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/api_resource/knowledge/knowledge.py b/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/api_resource/knowledge/knowledge.py deleted file mode 100644 index fea4c73ac997c3..00000000000000 --- a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/api_resource/knowledge/knowledge.py +++ /dev/null @@ -1,173 +0,0 @@ -from __future__ import annotations - -from typing import TYPE_CHECKING, Literal, Optional - -import httpx - -from ...core import ( - NOT_GIVEN, - BaseAPI, - Body, - Headers, - NotGiven, - cached_property, - deepcopy_minimal, - make_request_options, - maybe_transform, -) -from ...types.knowledge import KnowledgeInfo, KnowledgeUsed, knowledge_create_params, knowledge_list_params -from ...types.knowledge.knowledge_list_resp import KnowledgePage -from .document import Document - -if TYPE_CHECKING: - from ..._client import ZhipuAI - -__all__ = ["Knowledge"] - - -class Knowledge(BaseAPI): - def __init__(self, client: ZhipuAI) -> None: - super().__init__(client) - - @cached_property - def document(self) -> Document: - return Document(self._client) - - def create( - self, - embedding_id: int, - name: str, - *, - customer_identifier: Optional[str] = None, - description: Optional[str] = None, - background: Optional[Literal["blue", "red", "orange", "purple", "sky"]] = None, - icon: Optional[Literal["question", "book", "seal", "wrench", "tag", "horn", "house"]] = None, - bucket_id: Optional[str] = None, - extra_headers: Headers | None = None, - extra_body: Body | None = None, - timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, - ) -> KnowledgeInfo: - body = deepcopy_minimal( - { - "embedding_id": embedding_id, - "name": name, - "customer_identifier": customer_identifier, - "description": description, - "background": background, - "icon": icon, - "bucket_id": bucket_id, - } - ) - return self._post( - "/knowledge", - body=maybe_transform(body, knowledge_create_params.KnowledgeBaseParams), - options=make_request_options(extra_headers=extra_headers, extra_body=extra_body, timeout=timeout), - cast_type=KnowledgeInfo, - ) - - def modify( - self, - knowledge_id: str, - embedding_id: int, - *, - name: str, - description: Optional[str] = None, - background: Optional[Literal["blue", "red", "orange", "purple", "sky"]] = None, - icon: Optional[Literal["question", "book", "seal", "wrench", "tag", "horn", "house"]] = None, - extra_headers: Headers | None = None, - extra_body: Body | None = None, - timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, - ) -> httpx.Response: - body = deepcopy_minimal( - { - "id": knowledge_id, - "embedding_id": embedding_id, - "name": name, - "description": description, - "background": background, - "icon": icon, - } - ) - return self._put( - f"/knowledge/{knowledge_id}", - body=maybe_transform(body, knowledge_create_params.KnowledgeBaseParams), - options=make_request_options(extra_headers=extra_headers, extra_body=extra_body, timeout=timeout), - cast_type=httpx.Response, - ) - - def query( - self, - *, - page: int | NotGiven = 1, - size: int | NotGiven = 10, - extra_headers: Headers | None = None, - extra_body: Body | None = None, - timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, - ) -> KnowledgePage: - return self._get( - "/knowledge", - options=make_request_options( - extra_headers=extra_headers, - extra_body=extra_body, - timeout=timeout, - query=maybe_transform( - { - "page": page, - "size": size, - }, - knowledge_list_params.KnowledgeListParams, - ), - ), - cast_type=KnowledgePage, - ) - - def delete( - self, - knowledge_id: str, - *, - extra_headers: Headers | None = None, - extra_body: Body | None = None, - timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, - ) -> httpx.Response: - """ - Delete a file. - - Args: - knowledge_id: 知识库ID - extra_headers: Send extra headers - - extra_body: Add additional JSON properties to the request - - timeout: Override the client-level default timeout for this request, in seconds - """ - if not knowledge_id: - raise ValueError("Expected a non-empty value for `knowledge_id`") - - return self._delete( - f"/knowledge/{knowledge_id}", - options=make_request_options(extra_headers=extra_headers, extra_body=extra_body, timeout=timeout), - cast_type=httpx.Response, - ) - - def used( - self, - *, - extra_headers: Headers | None = None, - extra_body: Body | None = None, - timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, - ) -> KnowledgeUsed: - """ - Returns the contents of the specified file. - - Args: - extra_headers: Send extra headers - - extra_body: Add additional JSON properties to the request - - timeout: Override the client-level default timeout for this request, in seconds - """ - return self._get( - "/knowledge/capacity", - options=make_request_options(extra_headers=extra_headers, extra_body=extra_body, timeout=timeout), - cast_type=KnowledgeUsed, - ) diff --git a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/api_resource/tools/__init__.py b/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/api_resource/tools/__init__.py deleted file mode 100644 index 43e4e37da1779f..00000000000000 --- a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/api_resource/tools/__init__.py +++ /dev/null @@ -1,3 +0,0 @@ -from .tools import Tools - -__all__ = ["Tools"] diff --git a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/api_resource/tools/tools.py b/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/api_resource/tools/tools.py deleted file mode 100644 index 3c3a630aff47d7..00000000000000 --- a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/api_resource/tools/tools.py +++ /dev/null @@ -1,65 +0,0 @@ -from __future__ import annotations - -import logging -from typing import TYPE_CHECKING, Literal, Optional, Union - -import httpx - -from ...core import ( - NOT_GIVEN, - BaseAPI, - Body, - Headers, - NotGiven, - StreamResponse, - deepcopy_minimal, - make_request_options, - maybe_transform, -) -from ...types.tools import WebSearch, WebSearchChunk, tools_web_search_params - -logger = logging.getLogger(__name__) - -if TYPE_CHECKING: - from ..._client import ZhipuAI - -__all__ = ["Tools"] - - -class Tools(BaseAPI): - def __init__(self, client: ZhipuAI) -> None: - super().__init__(client) - - def web_search( - self, - *, - model: str, - request_id: Optional[str] | NotGiven = NOT_GIVEN, - stream: Optional[Literal[False]] | Literal[True] | NotGiven = NOT_GIVEN, - messages: Union[str, list[str], list[int], object, None], - scope: Optional[str] | NotGiven = NOT_GIVEN, - location: Optional[str] | NotGiven = NOT_GIVEN, - recent_days: Optional[int] | NotGiven = NOT_GIVEN, - extra_headers: Headers | None = None, - extra_body: Body | None = None, - timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, - ) -> WebSearch | StreamResponse[WebSearchChunk]: - body = deepcopy_minimal( - { - "model": model, - "request_id": request_id, - "messages": messages, - "stream": stream, - "scope": scope, - "location": location, - "recent_days": recent_days, - } - ) - return self._post( - "/tools", - body=maybe_transform(body, tools_web_search_params.WebSearchParams), - options=make_request_options(extra_headers=extra_headers, extra_body=extra_body, timeout=timeout), - cast_type=WebSearch, - stream=stream or False, - stream_cls=StreamResponse[WebSearchChunk], - ) diff --git a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/api_resource/videos/__init__.py b/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/api_resource/videos/__init__.py deleted file mode 100644 index 6b0f99ed09efe3..00000000000000 --- a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/api_resource/videos/__init__.py +++ /dev/null @@ -1,7 +0,0 @@ -from .videos import ( - Videos, -) - -__all__ = [ - "Videos", -] diff --git a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/api_resource/videos/videos.py b/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/api_resource/videos/videos.py deleted file mode 100644 index f1f1c08036a660..00000000000000 --- a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/api_resource/videos/videos.py +++ /dev/null @@ -1,77 +0,0 @@ -from __future__ import annotations - -from typing import TYPE_CHECKING, Optional - -import httpx - -from ...core import ( - NOT_GIVEN, - BaseAPI, - Body, - Headers, - NotGiven, - deepcopy_minimal, - make_request_options, - maybe_transform, -) -from ...types.sensitive_word_check import SensitiveWordCheckRequest -from ...types.video import VideoObject, video_create_params - -if TYPE_CHECKING: - from ..._client import ZhipuAI - -__all__ = ["Videos"] - - -class Videos(BaseAPI): - def __init__(self, client: ZhipuAI) -> None: - super().__init__(client) - - def generations( - self, - model: str, - *, - prompt: str = None, - image_url: str = None, - sensitive_word_check: Optional[SensitiveWordCheckRequest] | NotGiven = NOT_GIVEN, - request_id: str = None, - user_id: str = None, - extra_headers: Headers | None = None, - extra_body: Body | None = None, - timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, - ) -> VideoObject: - if not model and not model: - raise ValueError("At least one of `model` and `prompt` must be provided.") - body = deepcopy_minimal( - { - "model": model, - "prompt": prompt, - "image_url": image_url, - "sensitive_word_check": sensitive_word_check, - "request_id": request_id, - "user_id": user_id, - } - ) - return self._post( - "/videos/generations", - body=maybe_transform(body, video_create_params.VideoCreateParams), - options=make_request_options(extra_headers=extra_headers, extra_body=extra_body, timeout=timeout), - cast_type=VideoObject, - ) - - def retrieve_videos_result( - self, - id: str, - *, - extra_headers: Headers | None = None, - extra_body: Body | None = None, - timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, - ) -> VideoObject: - if not id: - raise ValueError("At least one of `id` must be provided.") - - return self._get( - f"/async-result/{id}", - options=make_request_options(extra_headers=extra_headers, extra_body=extra_body, timeout=timeout), - cast_type=VideoObject, - ) diff --git a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/core/__init__.py b/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/core/__init__.py deleted file mode 100644 index 3d6466d279861a..00000000000000 --- a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/core/__init__.py +++ /dev/null @@ -1,108 +0,0 @@ -from ._base_api import BaseAPI -from ._base_compat import ( - PYDANTIC_V2, - ConfigDict, - GenericModel, - cached_property, - field_get_default, - get_args, - get_model_config, - get_model_fields, - get_origin, - is_literal_type, - is_union, - parse_obj, -) -from ._base_models import BaseModel, construct_type -from ._base_type import ( - NOT_GIVEN, - Body, - FileTypes, - Headers, - IncEx, - ModelT, - NotGiven, - Query, -) -from ._constants import ( - ZHIPUAI_DEFAULT_LIMITS, - ZHIPUAI_DEFAULT_MAX_RETRIES, - ZHIPUAI_DEFAULT_TIMEOUT, -) -from ._errors import ( - APIAuthenticationError, - APIConnectionError, - APIInternalError, - APIReachLimitError, - APIRequestFailedError, - APIResponseError, - APIResponseValidationError, - APIServerFlowExceedError, - APIStatusError, - APITimeoutError, - ZhipuAIError, -) -from ._files import is_file_content -from ._http_client import HttpClient, make_request_options -from ._sse_client import StreamResponse -from ._utils import ( - deepcopy_minimal, - drop_prefix_image_data, - extract_files, - is_given, - is_list, - is_mapping, - maybe_transform, - parse_date, - parse_datetime, -) - -__all__ = [ - "BaseModel", - "construct_type", - "BaseAPI", - "NOT_GIVEN", - "Headers", - "NotGiven", - "Body", - "IncEx", - "ModelT", - "Query", - "FileTypes", - "PYDANTIC_V2", - "ConfigDict", - "GenericModel", - "get_args", - "is_union", - "parse_obj", - "get_origin", - "is_literal_type", - "get_model_config", - "get_model_fields", - "field_get_default", - "is_file_content", - "ZhipuAIError", - "APIStatusError", - "APIRequestFailedError", - "APIAuthenticationError", - "APIReachLimitError", - "APIInternalError", - "APIServerFlowExceedError", - "APIResponseError", - "APIResponseValidationError", - "APITimeoutError", - "make_request_options", - "HttpClient", - "ZHIPUAI_DEFAULT_TIMEOUT", - "ZHIPUAI_DEFAULT_MAX_RETRIES", - "ZHIPUAI_DEFAULT_LIMITS", - "is_list", - "is_mapping", - "parse_date", - "parse_datetime", - "is_given", - "maybe_transform", - "deepcopy_minimal", - "extract_files", - "StreamResponse", -] diff --git a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/core/_base_api.py b/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/core/_base_api.py deleted file mode 100644 index 3592ea6bacd170..00000000000000 --- a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/core/_base_api.py +++ /dev/null @@ -1,19 +0,0 @@ -from __future__ import annotations - -from typing import TYPE_CHECKING - -if TYPE_CHECKING: - from .._client import ZhipuAI - - -class BaseAPI: - _client: ZhipuAI - - def __init__(self, client: ZhipuAI) -> None: - self._client = client - self._delete = client.delete - self._get = client.get - self._post = client.post - self._put = client.put - self._patch = client.patch - self._get_api_list = client.get_api_list diff --git a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/core/_base_compat.py b/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/core/_base_compat.py deleted file mode 100644 index 92a5d683be6732..00000000000000 --- a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/core/_base_compat.py +++ /dev/null @@ -1,209 +0,0 @@ -from __future__ import annotations - -from collections.abc import Callable -from datetime import date, datetime -from typing import TYPE_CHECKING, Any, Generic, TypeVar, Union, cast, overload - -import pydantic -from pydantic.fields import FieldInfo -from typing_extensions import Self - -from ._base_type import StrBytesIntFloat - -_T = TypeVar("_T") -_ModelT = TypeVar("_ModelT", bound=pydantic.BaseModel) - -# --------------- Pydantic v2 compatibility --------------- - -# Pyright incorrectly reports some of our functions as overriding a method when they don't -# pyright: reportIncompatibleMethodOverride=false - -PYDANTIC_V2 = pydantic.VERSION.startswith("2.") - -# v1 re-exports -if TYPE_CHECKING: - - def parse_date(value: date | StrBytesIntFloat) -> date: ... - - def parse_datetime(value: Union[datetime, StrBytesIntFloat]) -> datetime: ... - - def get_args(t: type[Any]) -> tuple[Any, ...]: ... - - def is_union(tp: type[Any] | None) -> bool: ... - - def get_origin(t: type[Any]) -> type[Any] | None: ... - - def is_literal_type(type_: type[Any]) -> bool: ... - - def is_typeddict(type_: type[Any]) -> bool: ... - -else: - if PYDANTIC_V2: - from pydantic.v1.typing import ( # noqa: I001 - get_args as get_args, # noqa: PLC0414 - is_union as is_union, # noqa: PLC0414 - get_origin as get_origin, # noqa: PLC0414 - is_typeddict as is_typeddict, # noqa: PLC0414 - is_literal_type as is_literal_type, # noqa: PLC0414 - ) - from pydantic.v1.datetime_parse import parse_date as parse_date, parse_datetime as parse_datetime # noqa: PLC0414 - else: - from pydantic.typing import ( # noqa: I001 - get_args as get_args, # noqa: PLC0414 - is_union as is_union, # noqa: PLC0414 - get_origin as get_origin, # noqa: PLC0414 - is_typeddict as is_typeddict, # noqa: PLC0414 - is_literal_type as is_literal_type, # noqa: PLC0414 - ) - from pydantic.datetime_parse import parse_date as parse_date, parse_datetime as parse_datetime # noqa: PLC0414 - - -# refactored config -if TYPE_CHECKING: - from pydantic import ConfigDict -else: - if PYDANTIC_V2: - from pydantic import ConfigDict - else: - # TODO: provide an error message here? - ConfigDict = None - - -# renamed methods / properties -def parse_obj(model: type[_ModelT], value: object) -> _ModelT: - if PYDANTIC_V2: - return model.model_validate(value) - else: - # pyright: ignore[reportDeprecated, reportUnnecessaryCast] - return cast(_ModelT, model.parse_obj(value)) - - -def field_is_required(field: FieldInfo) -> bool: - if PYDANTIC_V2: - return field.is_required() - return field.required # type: ignore - - -def field_get_default(field: FieldInfo) -> Any: - value = field.get_default() - if PYDANTIC_V2: - from pydantic_core import PydanticUndefined - - if value == PydanticUndefined: - return None - return value - return value - - -def field_outer_type(field: FieldInfo) -> Any: - if PYDANTIC_V2: - return field.annotation - return field.outer_type_ # type: ignore - - -def get_model_config(model: type[pydantic.BaseModel]) -> Any: - if PYDANTIC_V2: - return model.model_config - return model.__config__ # type: ignore - - -def get_model_fields(model: type[pydantic.BaseModel]) -> dict[str, FieldInfo]: - if PYDANTIC_V2: - return model.model_fields - return model.__fields__ # type: ignore - - -def model_copy(model: _ModelT) -> _ModelT: - if PYDANTIC_V2: - return model.model_copy() - return model.copy() # type: ignore - - -def model_json(model: pydantic.BaseModel, *, indent: int | None = None) -> str: - if PYDANTIC_V2: - return model.model_dump_json(indent=indent) - return model.json(indent=indent) # type: ignore - - -def model_dump( - model: pydantic.BaseModel, - *, - exclude_unset: bool = False, - exclude_defaults: bool = False, -) -> dict[str, Any]: - if PYDANTIC_V2: - return model.model_dump( - exclude_unset=exclude_unset, - exclude_defaults=exclude_defaults, - ) - return cast( - "dict[str, Any]", - model.dict( # pyright: ignore[reportDeprecated, reportUnnecessaryCast] - exclude_unset=exclude_unset, - exclude_defaults=exclude_defaults, - ), - ) - - -def model_parse(model: type[_ModelT], data: Any) -> _ModelT: - if PYDANTIC_V2: - return model.model_validate(data) - return model.parse_obj(data) # pyright: ignore[reportDeprecated] - - -# generic models -if TYPE_CHECKING: - - class GenericModel(pydantic.BaseModel): ... - -else: - if PYDANTIC_V2: - # there no longer needs to be a distinction in v2 but - # we still have to create our own subclass to avoid - # inconsistent MRO ordering errors - class GenericModel(pydantic.BaseModel): ... - - else: - import pydantic.generics - - class GenericModel(pydantic.generics.GenericModel, pydantic.BaseModel): ... - - -# cached properties -if TYPE_CHECKING: - cached_property = property - - # we define a separate type (copied from typeshed) - # that represents that `cached_property` is `set`able - # at runtime, which differs from `@property`. - # - # this is a separate type as editors likely special case - # `@property` and we don't want to cause issues just to have - # more helpful internal types. - - class typed_cached_property(Generic[_T]): # noqa: N801 - func: Callable[[Any], _T] - attrname: str | None - - def __init__(self, func: Callable[[Any], _T]) -> None: ... - - @overload - def __get__(self, instance: None, owner: type[Any] | None = None) -> Self: ... - - @overload - def __get__(self, instance: object, owner: type[Any] | None = None) -> _T: ... - - def __get__(self, instance: object, owner: type[Any] | None = None) -> _T | Self: - raise NotImplementedError() - - def __set_name__(self, owner: type[Any], name: str) -> None: ... - - # __set__ is not defined at runtime, but @cached_property is designed to be settable - def __set__(self, instance: object, value: _T) -> None: ... -else: - try: - from functools import cached_property - except ImportError: - from cached_property import cached_property - - typed_cached_property = cached_property diff --git a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/core/_base_models.py b/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/core/_base_models.py deleted file mode 100644 index 6d8ba700b7b1dc..00000000000000 --- a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/core/_base_models.py +++ /dev/null @@ -1,670 +0,0 @@ -from __future__ import annotations - -import inspect -import os -from collections.abc import Callable -from datetime import date, datetime -from typing import TYPE_CHECKING, Any, ClassVar, Generic, Literal, TypeGuard, TypeVar, cast - -import pydantic -import pydantic.generics -from pydantic.fields import FieldInfo -from typing_extensions import ( - ParamSpec, - Protocol, - override, - runtime_checkable, -) - -from ._base_compat import ( - PYDANTIC_V2, - ConfigDict, - field_get_default, - get_args, - get_model_config, - get_model_fields, - get_origin, - is_literal_type, - is_union, - parse_obj, -) -from ._base_compat import ( - GenericModel as BaseGenericModel, -) -from ._base_type import ( - IncEx, - ModelT, -) -from ._utils import ( - PropertyInfo, - coerce_boolean, - extract_type_arg, - is_annotated_type, - is_list, - is_mapping, - parse_date, - parse_datetime, - strip_annotated_type, -) - -if TYPE_CHECKING: - from pydantic_core.core_schema import LiteralSchema, ModelField, ModelFieldsSchema - -__all__ = ["BaseModel", "GenericModel"] -_BaseModelT = TypeVar("_BaseModelT", bound="BaseModel") - -_T = TypeVar("_T") -P = ParamSpec("P") - - -@runtime_checkable -class _ConfigProtocol(Protocol): - allow_population_by_field_name: bool - - -class BaseModel(pydantic.BaseModel): - if PYDANTIC_V2: - model_config: ClassVar[ConfigDict] = ConfigDict( - extra="allow", defer_build=coerce_boolean(os.environ.get("DEFER_PYDANTIC_BUILD", "true")) - ) - else: - - @property - @override - def model_fields_set(self) -> set[str]: - # a forwards-compat shim for pydantic v2 - return self.__fields_set__ # type: ignore - - class Config(pydantic.BaseConfig): # pyright: ignore[reportDeprecated] - extra: Any = pydantic.Extra.allow # type: ignore - - def to_dict( - self, - *, - mode: Literal["json", "python"] = "python", - use_api_names: bool = True, - exclude_unset: bool = True, - exclude_defaults: bool = False, - exclude_none: bool = False, - warnings: bool = True, - ) -> dict[str, object]: - """Recursively generate a dictionary representation of the model, optionally specifying which fields to include or exclude. - - By default, fields that were not set by the API will not be included, - and keys will match the API response, *not* the property names from the model. - - For example, if the API responds with `"fooBar": true` but we've defined a `foo_bar: bool` property, - the output will use the `"fooBar"` key (unless `use_api_names=False` is passed). - - Args: - mode: - If mode is 'json', the dictionary will only contain JSON serializable types. e.g. `datetime` will be turned into a string, `"2024-3-22T18:11:19.117000Z"`. - If mode is 'python', the dictionary may contain any Python objects. e.g. `datetime(2024, 3, 22)` - - use_api_names: Whether to use the key that the API responded with or the property name. Defaults to `True`. - exclude_unset: Whether to exclude fields that have not been explicitly set. - exclude_defaults: Whether to exclude fields that are set to their default value from the output. - exclude_none: Whether to exclude fields that have a value of `None` from the output. - warnings: Whether to log warnings when invalid fields are encountered. This is only supported in Pydantic v2. - """ # noqa: E501 - return self.model_dump( - mode=mode, - by_alias=use_api_names, - exclude_unset=exclude_unset, - exclude_defaults=exclude_defaults, - exclude_none=exclude_none, - warnings=warnings, - ) - - def to_json( - self, - *, - indent: int | None = 2, - use_api_names: bool = True, - exclude_unset: bool = True, - exclude_defaults: bool = False, - exclude_none: bool = False, - warnings: bool = True, - ) -> str: - """Generates a JSON string representing this model as it would be received from or sent to the API (but with indentation). - - By default, fields that were not set by the API will not be included, - and keys will match the API response, *not* the property names from the model. - - For example, if the API responds with `"fooBar": true` but we've defined a `foo_bar: bool` property, - the output will use the `"fooBar"` key (unless `use_api_names=False` is passed). - - Args: - indent: Indentation to use in the JSON output. If `None` is passed, the output will be compact. Defaults to `2` - use_api_names: Whether to use the key that the API responded with or the property name. Defaults to `True`. - exclude_unset: Whether to exclude fields that have not been explicitly set. - exclude_defaults: Whether to exclude fields that have the default value. - exclude_none: Whether to exclude fields that have a value of `None`. - warnings: Whether to show any warnings that occurred during serialization. This is only supported in Pydantic v2. - """ # noqa: E501 - return self.model_dump_json( - indent=indent, - by_alias=use_api_names, - exclude_unset=exclude_unset, - exclude_defaults=exclude_defaults, - exclude_none=exclude_none, - warnings=warnings, - ) - - @override - def __str__(self) -> str: - # mypy complains about an invalid self arg - return f'{self.__repr_name__()}({self.__repr_str__(", ")})' # type: ignore[misc] - - # Override the 'construct' method in a way that supports recursive parsing without validation. - # Based on https://github.com/samuelcolvin/pydantic/issues/1168#issuecomment-817742836. - @classmethod - @override - def construct( - cls: type[ModelT], - _fields_set: set[str] | None = None, - **values: object, - ) -> ModelT: - m = cls.__new__(cls) - fields_values: dict[str, object] = {} - - config = get_model_config(cls) - populate_by_name = ( - config.allow_population_by_field_name - if isinstance(config, _ConfigProtocol) - else config.get("populate_by_name") - ) - - if _fields_set is None: - _fields_set = set() - - model_fields = get_model_fields(cls) - for name, field in model_fields.items(): - key = field.alias - if key is None or (key not in values and populate_by_name): - key = name - - if key in values: - fields_values[name] = _construct_field(value=values[key], field=field, key=key) - _fields_set.add(name) - else: - fields_values[name] = field_get_default(field) - - _extra = {} - for key, value in values.items(): - if key not in model_fields: - if PYDANTIC_V2: - _extra[key] = value - else: - _fields_set.add(key) - fields_values[key] = value - - object.__setattr__(m, "__dict__", fields_values) # noqa: PLC2801 - - if PYDANTIC_V2: - # these properties are copied from Pydantic's `model_construct()` method - object.__setattr__(m, "__pydantic_private__", None) # noqa: PLC2801 - object.__setattr__(m, "__pydantic_extra__", _extra) # noqa: PLC2801 - object.__setattr__(m, "__pydantic_fields_set__", _fields_set) # noqa: PLC2801 - else: - # init_private_attributes() does not exist in v2 - m._init_private_attributes() # type: ignore - - # copied from Pydantic v1's `construct()` method - object.__setattr__(m, "__fields_set__", _fields_set) # noqa: PLC2801 - - return m - - if not TYPE_CHECKING: - # type checkers incorrectly complain about this assignment - # because the type signatures are technically different - # although not in practice - model_construct = construct - - if not PYDANTIC_V2: - # we define aliases for some of the new pydantic v2 methods so - # that we can just document these methods without having to specify - # a specific pydantic version as some users may not know which - # pydantic version they are currently using - - @override - def model_dump( - self, - *, - mode: Literal["json", "python"] | str = "python", - include: IncEx = None, - exclude: IncEx = None, - by_alias: bool = False, - exclude_unset: bool = False, - exclude_defaults: bool = False, - exclude_none: bool = False, - round_trip: bool = False, - warnings: bool | Literal["none", "warn", "error"] = True, - context: dict[str, Any] | None = None, - serialize_as_any: bool = False, - ) -> dict[str, Any]: - """Usage docs: https://docs.pydantic.dev/2.4/concepts/serialization/#modelmodel_dump - - Generate a dictionary representation of the model, optionally specifying which fields to include or exclude. - - Args: - mode: The mode in which `to_python` should run. - If mode is 'json', the dictionary will only contain JSON serializable types. - If mode is 'python', the dictionary may contain any Python objects. - include: A list of fields to include in the output. - exclude: A list of fields to exclude from the output. - by_alias: Whether to use the field's alias in the dictionary key if defined. - exclude_unset: Whether to exclude fields that are unset or None from the output. - exclude_defaults: Whether to exclude fields that are set to their default value from the output. - exclude_none: Whether to exclude fields that have a value of `None` from the output. - round_trip: Whether to enable serialization and deserialization round-trip support. - warnings: Whether to log warnings when invalid fields are encountered. - - Returns: - A dictionary representation of the model. - """ - if mode != "python": - raise ValueError("mode is only supported in Pydantic v2") - if round_trip != False: - raise ValueError("round_trip is only supported in Pydantic v2") - if warnings != True: - raise ValueError("warnings is only supported in Pydantic v2") - if context is not None: - raise ValueError("context is only supported in Pydantic v2") - if serialize_as_any != False: - raise ValueError("serialize_as_any is only supported in Pydantic v2") - return super().dict( # pyright: ignore[reportDeprecated] - include=include, - exclude=exclude, - by_alias=by_alias, - exclude_unset=exclude_unset, - exclude_defaults=exclude_defaults, - exclude_none=exclude_none, - ) - - @override - def model_dump_json( - self, - *, - indent: int | None = None, - include: IncEx = None, - exclude: IncEx = None, - by_alias: bool = False, - exclude_unset: bool = False, - exclude_defaults: bool = False, - exclude_none: bool = False, - round_trip: bool = False, - warnings: bool | Literal["none", "warn", "error"] = True, - context: dict[str, Any] | None = None, - serialize_as_any: bool = False, - ) -> str: - """Usage docs: https://docs.pydantic.dev/2.4/concepts/serialization/#modelmodel_dump_json - - Generates a JSON representation of the model using Pydantic's `to_json` method. - - Args: - indent: Indentation to use in the JSON output. If None is passed, the output will be compact. - include: Field(s) to include in the JSON output. Can take either a string or set of strings. - exclude: Field(s) to exclude from the JSON output. Can take either a string or set of strings. - by_alias: Whether to serialize using field aliases. - exclude_unset: Whether to exclude fields that have not been explicitly set. - exclude_defaults: Whether to exclude fields that have the default value. - exclude_none: Whether to exclude fields that have a value of `None`. - round_trip: Whether to use serialization/deserialization between JSON and class instance. - warnings: Whether to show any warnings that occurred during serialization. - - Returns: - A JSON string representation of the model. - """ - if round_trip != False: - raise ValueError("round_trip is only supported in Pydantic v2") - if warnings != True: - raise ValueError("warnings is only supported in Pydantic v2") - if context is not None: - raise ValueError("context is only supported in Pydantic v2") - if serialize_as_any != False: - raise ValueError("serialize_as_any is only supported in Pydantic v2") - return super().json( # type: ignore[reportDeprecated] - indent=indent, - include=include, - exclude=exclude, - by_alias=by_alias, - exclude_unset=exclude_unset, - exclude_defaults=exclude_defaults, - exclude_none=exclude_none, - ) - - -def _construct_field(value: object, field: FieldInfo, key: str) -> object: - if value is None: - return field_get_default(field) - - if PYDANTIC_V2: - type_ = field.annotation - else: - type_ = cast(type, field.outer_type_) # type: ignore - - if type_ is None: - raise RuntimeError(f"Unexpected field type is None for {key}") - - return construct_type(value=value, type_=type_) - - -def is_basemodel(type_: type) -> bool: - """Returns whether or not the given type is either a `BaseModel` or a union of `BaseModel`""" - if is_union(type_): - return any(is_basemodel(variant) for variant in get_args(type_)) - - return is_basemodel_type(type_) - - -def is_basemodel_type(type_: type) -> TypeGuard[type[BaseModel] | type[GenericModel]]: - origin = get_origin(type_) or type_ - return issubclass(origin, BaseModel) or issubclass(origin, GenericModel) - - -def build( - base_model_cls: Callable[P, _BaseModelT], - *args: P.args, - **kwargs: P.kwargs, -) -> _BaseModelT: - """Construct a BaseModel class without validation. - - This is useful for cases where you need to instantiate a `BaseModel` - from an API response as this provides type-safe params which isn't supported - by helpers like `construct_type()`. - - ```py - build(MyModel, my_field_a="foo", my_field_b=123) - ``` - """ - if args: - raise TypeError( - "Received positional arguments which are not supported; Keyword arguments must be used instead", - ) - - return cast(_BaseModelT, construct_type(type_=base_model_cls, value=kwargs)) - - -def construct_type_unchecked(*, value: object, type_: type[_T]) -> _T: - """Loose coercion to the expected type with construction of nested values. - - Note: the returned value from this function is not guaranteed to match the - given type. - """ - return cast(_T, construct_type(value=value, type_=type_)) - - -def construct_type(*, value: object, type_: type) -> object: - """Loose coercion to the expected type with construction of nested values. - - If the given value does not match the expected type then it is returned as-is. - """ - # we allow `object` as the input type because otherwise, passing things like - # `Literal['value']` will be reported as a type error by type checkers - type_ = cast("type[object]", type_) - - # unwrap `Annotated[T, ...]` -> `T` - if is_annotated_type(type_): - meta: tuple[Any, ...] = get_args(type_)[1:] - type_ = extract_type_arg(type_, 0) - else: - meta = () - # we need to use the origin class for any types that are subscripted generics - # e.g. Dict[str, object] - origin = get_origin(type_) or type_ - args = get_args(type_) - - if is_union(origin): - try: - return validate_type(type_=cast("type[object]", type_), value=value) - except Exception: - pass - - # if the type is a discriminated union then we want to construct the right variant - # in the union, even if the data doesn't match exactly, otherwise we'd break code - # that relies on the constructed class types, e.g. - # - # class FooType: - # kind: Literal['foo'] - # value: str - # - # class BarType: - # kind: Literal['bar'] - # value: int - # - # without this block, if the data we get is something like `{'kind': 'bar', 'value': 'foo'}` then - # we'd end up constructing `FooType` when it should be `BarType`. - discriminator = _build_discriminated_union_meta(union=type_, meta_annotations=meta) - if discriminator and is_mapping(value): - variant_value = value.get(discriminator.field_alias_from or discriminator.field_name) - if variant_value and isinstance(variant_value, str): - variant_type = discriminator.mapping.get(variant_value) - if variant_type: - return construct_type(type_=variant_type, value=value) - - # if the data is not valid, use the first variant that doesn't fail while deserializing - for variant in args: - try: - return construct_type(value=value, type_=variant) - except Exception: - continue - - raise RuntimeError(f"Could not convert data into a valid instance of {type_}") - if origin == dict: - if not is_mapping(value): - return value - - _, items_type = get_args(type_) # Dict[_, items_type] - return {key: construct_type(value=item, type_=items_type) for key, item in value.items()} - - if not is_literal_type(type_) and (issubclass(origin, BaseModel) or issubclass(origin, GenericModel)): - if is_list(value): - return [cast(Any, type_).construct(**entry) if is_mapping(entry) else entry for entry in value] - - if is_mapping(value): - if issubclass(type_, BaseModel): - return type_.construct(**value) # type: ignore[arg-type] - - return cast(Any, type_).construct(**value) - - if origin == list: - if not is_list(value): - return value - - inner_type = args[0] # List[inner_type] - return [construct_type(value=entry, type_=inner_type) for entry in value] - - if origin == float: - if isinstance(value, int): - coerced = float(value) - if coerced != value: - return value - return coerced - - return value - - if type_ == datetime: - try: - return parse_datetime(value) # type: ignore - except Exception: - return value - - if type_ == date: - try: - return parse_date(value) # type: ignore - except Exception: - return value - - return value - - -@runtime_checkable -class CachedDiscriminatorType(Protocol): - __discriminator__: DiscriminatorDetails - - -class DiscriminatorDetails: - field_name: str - """The name of the discriminator field in the variant class, e.g. - - ```py - class Foo(BaseModel): - type: Literal['foo'] - ``` - - Will result in field_name='type' - """ - - field_alias_from: str | None - """The name of the discriminator field in the API response, e.g. - - ```py - class Foo(BaseModel): - type: Literal['foo'] = Field(alias='type_from_api') - ``` - - Will result in field_alias_from='type_from_api' - """ - - mapping: dict[str, type] - """Mapping of discriminator value to variant type, e.g. - - {'foo': FooVariant, 'bar': BarVariant} - """ - - def __init__( - self, - *, - mapping: dict[str, type], - discriminator_field: str, - discriminator_alias: str | None, - ) -> None: - self.mapping = mapping - self.field_name = discriminator_field - self.field_alias_from = discriminator_alias - - -def _build_discriminated_union_meta(*, union: type, meta_annotations: tuple[Any, ...]) -> DiscriminatorDetails | None: - if isinstance(union, CachedDiscriminatorType): - return union.__discriminator__ - - discriminator_field_name: str | None = None - - for annotation in meta_annotations: - if isinstance(annotation, PropertyInfo) and annotation.discriminator is not None: - discriminator_field_name = annotation.discriminator - break - - if not discriminator_field_name: - return None - - mapping: dict[str, type] = {} - discriminator_alias: str | None = None - - for variant in get_args(union): - variant = strip_annotated_type(variant) - if is_basemodel_type(variant): - if PYDANTIC_V2: - field = _extract_field_schema_pv2(variant, discriminator_field_name) - if not field: - continue - - # Note: if one variant defines an alias then they all should - discriminator_alias = field.get("serialization_alias") - - field_schema = field["schema"] - - if field_schema["type"] == "literal": - for entry in cast("LiteralSchema", field_schema)["expected"]: - if isinstance(entry, str): - mapping[entry] = variant - else: - field_info = cast("dict[str, FieldInfo]", variant.__fields__).get(discriminator_field_name) # pyright: ignore[reportDeprecated, reportUnnecessaryCast] - if not field_info: - continue - - # Note: if one variant defines an alias then they all should - discriminator_alias = field_info.alias - - if field_info.annotation and is_literal_type(field_info.annotation): - for entry in get_args(field_info.annotation): - if isinstance(entry, str): - mapping[entry] = variant - - if not mapping: - return None - - details = DiscriminatorDetails( - mapping=mapping, - discriminator_field=discriminator_field_name, - discriminator_alias=discriminator_alias, - ) - cast(CachedDiscriminatorType, union).__discriminator__ = details - return details - - -def _extract_field_schema_pv2(model: type[BaseModel], field_name: str) -> ModelField | None: - schema = model.__pydantic_core_schema__ - if schema["type"] != "model": - return None - - fields_schema = schema["schema"] - if fields_schema["type"] != "model-fields": - return None - - fields_schema = cast("ModelFieldsSchema", fields_schema) - - field = fields_schema["fields"].get(field_name) - if not field: - return None - - return cast("ModelField", field) # pyright: ignore[reportUnnecessaryCast] - - -def validate_type(*, type_: type[_T], value: object) -> _T: - """Strict validation that the given value matches the expected type""" - if inspect.isclass(type_) and issubclass(type_, pydantic.BaseModel): - return cast(_T, parse_obj(type_, value)) - - return cast(_T, _validate_non_model_type(type_=type_, value=value)) - - -# Subclassing here confuses type checkers, so we treat this class as non-inheriting. -if TYPE_CHECKING: - GenericModel = BaseModel -else: - - class GenericModel(BaseGenericModel, BaseModel): - pass - - -if PYDANTIC_V2: - from pydantic import TypeAdapter - - def _validate_non_model_type(*, type_: type[_T], value: object) -> _T: - return TypeAdapter(type_).validate_python(value) - -elif not TYPE_CHECKING: - - class TypeAdapter(Generic[_T]): - """Used as a placeholder to easily convert runtime types to a Pydantic format - to provide validation. - - For example: - ```py - validated = RootModel[int](__root__="5").__root__ - # validated: 5 - ``` - """ - - def __init__(self, type_: type[_T]): - self.type_ = type_ - - def validate_python(self, value: Any) -> _T: - if not isinstance(value, self.type_): - raise ValueError(f"Invalid type: {value} is not of type {self.type_}") - return value - - def _validate_non_model_type(*, type_: type[_T], value: object) -> _T: - return TypeAdapter(type_).validate_python(value) diff --git a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/core/_base_type.py b/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/core/_base_type.py deleted file mode 100644 index ea1d3f09dc42ea..00000000000000 --- a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/core/_base_type.py +++ /dev/null @@ -1,170 +0,0 @@ -from __future__ import annotations - -from collections.abc import Callable, Mapping, Sequence -from os import PathLike -from typing import ( - IO, - TYPE_CHECKING, - Any, - Literal, - Optional, - TypeAlias, - TypeVar, - Union, -) - -import pydantic -from httpx import Response -from typing_extensions import Protocol, TypedDict, override, runtime_checkable - -Query = Mapping[str, object] -Body = object -AnyMapping = Mapping[str, object] -PrimitiveData = Union[str, int, float, bool, None] -Data = Union[PrimitiveData, list[Any], tuple[Any], "Mapping[str, Any]"] -ModelT = TypeVar("ModelT", bound=pydantic.BaseModel) -_T = TypeVar("_T") - -if TYPE_CHECKING: - NoneType: type[None] -else: - NoneType = type(None) - - -# Sentinel class used until PEP 0661 is accepted -class NotGiven: - """ - A sentinel singleton class used to distinguish omitted keyword arguments - from those passed in with the value None (which may have different behavior). - - For example: - - ```py - def get(timeout: Union[int, NotGiven, None] = NotGiven()) -> Response: ... - - get(timeout=1) # 1s timeout - get(timeout=None) # No timeout - get() # Default timeout behavior, which may not be statically known at the method definition. - ``` - """ - - def __bool__(self) -> Literal[False]: - return False - - @override - def __repr__(self) -> str: - return "NOT_GIVEN" - - -NotGivenOr = Union[_T, NotGiven] -NOT_GIVEN = NotGiven() - - -class Omit: - """In certain situations you need to be able to represent a case where a default value has - to be explicitly removed and `None` is not an appropriate substitute, for example: - - ```py - # as the default `Content-Type` header is `application/json` that will be sent - client.post('/upload/files', files={'file': b'my raw file content'}) - - # you can't explicitly override the header as it has to be dynamically generated - # to look something like: 'multipart/form-data; boundary=0d8382fcf5f8c3be01ca2e11002d2983' - client.post(..., headers={'Content-Type': 'multipart/form-data'}) - - # instead you can remove the default `application/json` header by passing Omit - client.post(..., headers={'Content-Type': Omit()}) - ``` - """ - - def __bool__(self) -> Literal[False]: - return False - - -@runtime_checkable -class ModelBuilderProtocol(Protocol): - @classmethod - def build( - cls: type[_T], - *, - response: Response, - data: object, - ) -> _T: ... - - -Headers = Mapping[str, Union[str, Omit]] - - -class HeadersLikeProtocol(Protocol): - def get(self, __key: str) -> str | None: ... - - -HeadersLike = Union[Headers, HeadersLikeProtocol] - -ResponseT = TypeVar( - "ResponseT", - bound="Union[str, None, BaseModel, list[Any], dict[str, Any], Response, UnknownResponse, ModelBuilderProtocol, BinaryResponseContent]", # noqa: E501 -) - -StrBytesIntFloat = Union[str, bytes, int, float] - -# Note: copied from Pydantic -# https://github.com/pydantic/pydantic/blob/32ea570bf96e84234d2992e1ddf40ab8a565925a/pydantic/main.py#L49 -IncEx: TypeAlias = "set[int] | set[str] | dict[int, Any] | dict[str, Any] | None" - -PostParser = Callable[[Any], Any] - - -@runtime_checkable -class InheritsGeneric(Protocol): - """Represents a type that has inherited from `Generic` - - The `__orig_bases__` property can be used to determine the resolved - type variable for a given base class. - """ - - __orig_bases__: tuple[_GenericAlias] - - -class _GenericAlias(Protocol): - __origin__: type[object] - - -class HttpxSendArgs(TypedDict, total=False): - auth: httpx.Auth - - -# for user input files -if TYPE_CHECKING: - Base64FileInput = Union[IO[bytes], PathLike[str]] - FileContent = Union[IO[bytes], bytes, PathLike[str]] -else: - Base64FileInput = Union[IO[bytes], PathLike] - FileContent = Union[IO[bytes], bytes, PathLike] - -FileTypes = Union[ - # file (or bytes) - FileContent, - # (filename, file (or bytes)) - tuple[Optional[str], FileContent], - # (filename, file (or bytes), content_type) - tuple[Optional[str], FileContent, Optional[str]], - # (filename, file (or bytes), content_type, headers) - tuple[Optional[str], FileContent, Optional[str], Mapping[str, str]], -] -RequestFiles = Union[Mapping[str, FileTypes], Sequence[tuple[str, FileTypes]]] - -# duplicate of the above but without our custom file support -HttpxFileContent = Union[bytes, IO[bytes]] -HttpxFileTypes = Union[ - # file (or bytes) - HttpxFileContent, - # (filename, file (or bytes)) - tuple[Optional[str], HttpxFileContent], - # (filename, file (or bytes), content_type) - tuple[Optional[str], HttpxFileContent, Optional[str]], - # (filename, file (or bytes), content_type, headers) - tuple[Optional[str], HttpxFileContent, Optional[str], Mapping[str, str]], -] - -HttpxRequestFiles = Union[Mapping[str, HttpxFileTypes], Sequence[tuple[str, HttpxFileTypes]]] diff --git a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/core/_constants.py b/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/core/_constants.py deleted file mode 100644 index 8e43bdebecb61f..00000000000000 --- a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/core/_constants.py +++ /dev/null @@ -1,12 +0,0 @@ -import httpx - -RAW_RESPONSE_HEADER = "X-Stainless-Raw-Response" -# 通过 `Timeout` 控制接口`connect` 和 `read` 超时时间,默认为`timeout=300.0, connect=8.0` -ZHIPUAI_DEFAULT_TIMEOUT = httpx.Timeout(timeout=300.0, connect=8.0) -# 通过 `retry` 参数控制重试次数,默认为3次 -ZHIPUAI_DEFAULT_MAX_RETRIES = 3 -# 通过 `Limits` 控制最大连接数和保持连接数,默认为`max_connections=50, max_keepalive_connections=10` -ZHIPUAI_DEFAULT_LIMITS = httpx.Limits(max_connections=50, max_keepalive_connections=10) - -INITIAL_RETRY_DELAY = 0.5 -MAX_RETRY_DELAY = 8.0 diff --git a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/core/_errors.py b/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/core/_errors.py deleted file mode 100644 index e2c9d24c6c0d24..00000000000000 --- a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/core/_errors.py +++ /dev/null @@ -1,86 +0,0 @@ -from __future__ import annotations - -import httpx - -__all__ = [ - "ZhipuAIError", - "APIStatusError", - "APIRequestFailedError", - "APIAuthenticationError", - "APIReachLimitError", - "APIInternalError", - "APIServerFlowExceedError", - "APIResponseError", - "APIResponseValidationError", - "APITimeoutError", - "APIConnectionError", -] - - -class ZhipuAIError(Exception): - def __init__( - self, - message: str, - ) -> None: - super().__init__(message) - - -class APIStatusError(ZhipuAIError): - response: httpx.Response - status_code: int - - def __init__(self, message: str, *, response: httpx.Response) -> None: - super().__init__(message) - self.response = response - self.status_code = response.status_code - - -class APIRequestFailedError(APIStatusError): ... - - -class APIAuthenticationError(APIStatusError): ... - - -class APIReachLimitError(APIStatusError): ... - - -class APIInternalError(APIStatusError): ... - - -class APIServerFlowExceedError(APIStatusError): ... - - -class APIResponseError(ZhipuAIError): - message: str - request: httpx.Request - json_data: object - - def __init__(self, message: str, request: httpx.Request, json_data: object): - self.message = message - self.request = request - self.json_data = json_data - super().__init__(message) - - -class APIResponseValidationError(APIResponseError): - status_code: int - response: httpx.Response - - def __init__(self, response: httpx.Response, json_data: object | None, *, message: str | None = None) -> None: - super().__init__( - message=message or "Data returned by API invalid for expected schema.", - request=response.request, - json_data=json_data, - ) - self.response = response - self.status_code = response.status_code - - -class APIConnectionError(APIResponseError): - def __init__(self, *, message: str = "Connection error.", request: httpx.Request) -> None: - super().__init__(message, request, json_data=None) - - -class APITimeoutError(APIConnectionError): - def __init__(self, request: httpx.Request) -> None: - super().__init__(message="Request timed out.", request=request) diff --git a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/core/_files.py b/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/core/_files.py deleted file mode 100644 index f9d2e14d9ecb93..00000000000000 --- a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/core/_files.py +++ /dev/null @@ -1,75 +0,0 @@ -from __future__ import annotations - -import io -import os -import pathlib -from typing import TypeGuard, overload - -from ._base_type import ( - Base64FileInput, - FileContent, - FileTypes, - HttpxFileContent, - HttpxFileTypes, - HttpxRequestFiles, - RequestFiles, -) -from ._utils import is_mapping_t, is_sequence_t, is_tuple_t - - -def is_base64_file_input(obj: object) -> TypeGuard[Base64FileInput]: - return isinstance(obj, io.IOBase | os.PathLike) - - -def is_file_content(obj: object) -> TypeGuard[FileContent]: - return isinstance(obj, bytes | tuple | io.IOBase | os.PathLike) - - -def assert_is_file_content(obj: object, *, key: str | None = None) -> None: - if not is_file_content(obj): - prefix = f"Expected entry at `{key}`" if key is not None else f"Expected file input `{obj!r}`" - raise RuntimeError( - f"{prefix} to be bytes, an io.IOBase instance, PathLike or a tuple but received {type(obj)} instead. See https://github.com/openai/openai-python/tree/main#file-uploads" - ) from None - - -@overload -def to_httpx_files(files: None) -> None: ... - - -@overload -def to_httpx_files(files: RequestFiles) -> HttpxRequestFiles: ... - - -def to_httpx_files(files: RequestFiles | None) -> HttpxRequestFiles | None: - if files is None: - return None - - if is_mapping_t(files): - files = {key: _transform_file(file) for key, file in files.items()} - elif is_sequence_t(files): - files = [(key, _transform_file(file)) for key, file in files] - else: - raise TypeError(f"Unexpected file type input {type(files)}, expected mapping or sequence") - - return files - - -def _transform_file(file: FileTypes) -> HttpxFileTypes: - if is_file_content(file): - if isinstance(file, os.PathLike): - path = pathlib.Path(file) - return (path.name, path.read_bytes()) - - return file - - if is_tuple_t(file): - return (file[0], _read_file_content(file[1]), *file[2:]) - - raise TypeError("Expected file types input to be a FileContent type or to be a tuple") - - -def _read_file_content(file: FileContent) -> HttpxFileContent: - if isinstance(file, os.PathLike): - return pathlib.Path(file).read_bytes() - return file diff --git a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/core/_http_client.py b/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/core/_http_client.py deleted file mode 100644 index ffdafb85d581fe..00000000000000 --- a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/core/_http_client.py +++ /dev/null @@ -1,910 +0,0 @@ -from __future__ import annotations - -import inspect -import logging -import time -import warnings -from collections.abc import Iterator, Mapping -from itertools import starmap -from random import random -from typing import TYPE_CHECKING, Any, Generic, Literal, Optional, TypeVar, Union, cast, overload - -import httpx -import pydantic -from httpx import URL, Timeout - -from . import _errors, get_origin -from ._base_compat import model_copy -from ._base_models import GenericModel, construct_type, validate_type -from ._base_type import ( - NOT_GIVEN, - AnyMapping, - Body, - Data, - Headers, - HttpxSendArgs, - ModelBuilderProtocol, - NotGiven, - Omit, - PostParser, - Query, - RequestFiles, - ResponseT, -) -from ._constants import ( - INITIAL_RETRY_DELAY, - MAX_RETRY_DELAY, - RAW_RESPONSE_HEADER, - ZHIPUAI_DEFAULT_LIMITS, - ZHIPUAI_DEFAULT_MAX_RETRIES, - ZHIPUAI_DEFAULT_TIMEOUT, -) -from ._errors import APIConnectionError, APIResponseValidationError, APIStatusError, APITimeoutError -from ._files import to_httpx_files -from ._legacy_response import LegacyAPIResponse -from ._request_opt import FinalRequestOptions, UserRequestInput -from ._response import APIResponse, BaseAPIResponse, extract_response_type -from ._sse_client import StreamResponse -from ._utils import flatten, is_given, is_mapping - -log: logging.Logger = logging.getLogger(__name__) - -# TODO: make base page type vars covariant -SyncPageT = TypeVar("SyncPageT", bound="BaseSyncPage[Any]") -# AsyncPageT = TypeVar("AsyncPageT", bound="BaseAsyncPage[Any]") - -_T = TypeVar("_T") -_T_co = TypeVar("_T_co", covariant=True) - -if TYPE_CHECKING: - from httpx._config import DEFAULT_TIMEOUT_CONFIG as HTTPX_DEFAULT_TIMEOUT -else: - try: - from httpx._config import DEFAULT_TIMEOUT_CONFIG as HTTPX_DEFAULT_TIMEOUT - except ImportError: - # taken from https://github.com/encode/httpx/blob/3ba5fe0d7ac70222590e759c31442b1cab263791/httpx/_config.py#L366 - HTTPX_DEFAULT_TIMEOUT = Timeout(5.0) - - -headers = { - "Accept": "application/json", - "Content-Type": "application/json; charset=UTF-8", -} - - -class PageInfo: - """Stores the necessary information to build the request to retrieve the next page. - - Either `url` or `params` must be set. - """ - - url: URL | NotGiven - params: Query | NotGiven - - @overload - def __init__( - self, - *, - url: URL, - ) -> None: ... - - @overload - def __init__( - self, - *, - params: Query, - ) -> None: ... - - def __init__( - self, - *, - url: URL | NotGiven = NOT_GIVEN, - params: Query | NotGiven = NOT_GIVEN, - ) -> None: - self.url = url - self.params = params - - -class BasePage(GenericModel, Generic[_T]): - """ - Defines the core interface for pagination. - - Type Args: - ModelT: The pydantic model that represents an item in the response. - - Methods: - has_next_page(): Check if there is another page available - next_page_info(): Get the necessary information to make a request for the next page - """ - - _options: FinalRequestOptions = pydantic.PrivateAttr() - _model: type[_T] = pydantic.PrivateAttr() - - def has_next_page(self) -> bool: - items = self._get_page_items() - if not items: - return False - return self.next_page_info() is not None - - def next_page_info(self) -> Optional[PageInfo]: ... - - def _get_page_items(self) -> Iterable[_T]: # type: ignore[empty-body] - ... - - def _params_from_url(self, url: URL) -> httpx.QueryParams: - # TODO: do we have to preprocess params here? - return httpx.QueryParams(cast(Any, self._options.params)).merge(url.params) - - def _info_to_options(self, info: PageInfo) -> FinalRequestOptions: - options = model_copy(self._options) - options._strip_raw_response_header() - - if not isinstance(info.params, NotGiven): - options.params = {**options.params, **info.params} - return options - - if not isinstance(info.url, NotGiven): - params = self._params_from_url(info.url) - url = info.url.copy_with(params=params) - options.params = dict(url.params) - options.url = str(url) - return options - - raise ValueError("Unexpected PageInfo state") - - -class BaseSyncPage(BasePage[_T], Generic[_T]): - _client: HttpClient = pydantic.PrivateAttr() - - def _set_private_attributes( - self, - client: HttpClient, - model: type[_T], - options: FinalRequestOptions, - ) -> None: - self._model = model - self._client = client - self._options = options - - # Pydantic uses a custom `__iter__` method to support casting BaseModels - # to dictionaries. e.g. dict(model). - # As we want to support `for item in page`, this is inherently incompatible - # with the default pydantic behavior. It is not possible to support both - # use cases at once. Fortunately, this is not a big deal as all other pydantic - # methods should continue to work as expected as there is an alternative method - # to cast a model to a dictionary, model.dict(), which is used internally - # by pydantic. - def __iter__(self) -> Iterator[_T]: # type: ignore - for page in self.iter_pages(): - yield from page._get_page_items() - - def iter_pages(self: SyncPageT) -> Iterator[SyncPageT]: - page = self - while True: - yield page - if page.has_next_page(): - page = page.get_next_page() - else: - return - - def get_next_page(self: SyncPageT) -> SyncPageT: - info = self.next_page_info() - if not info: - raise RuntimeError( - "No next page expected; please check `.has_next_page()` before calling `.get_next_page()`." - ) - - options = self._info_to_options(info) - return self._client._request_api_list(self._model, page=self.__class__, options=options) - - -class HttpClient: - _client: httpx.Client - _version: str - _base_url: URL - max_retries: int - timeout: Union[float, Timeout, None] - _limits: httpx.Limits - _has_custom_http_client: bool - _default_stream_cls: type[StreamResponse[Any]] | None = None - - _strict_response_validation: bool - - def __init__( - self, - *, - version: str, - base_url: URL, - _strict_response_validation: bool, - max_retries: int = ZHIPUAI_DEFAULT_MAX_RETRIES, - timeout: Union[float, Timeout, None], - limits: httpx.Limits | None = None, - custom_httpx_client: httpx.Client | None = None, - custom_headers: Mapping[str, str] | None = None, - ) -> None: - if limits is not None: - warnings.warn( - "The `connection_pool_limits` argument is deprecated. The `http_client` argument should be passed instead", # noqa: E501 - category=DeprecationWarning, - stacklevel=3, - ) - if custom_httpx_client is not None: - raise ValueError("The `http_client` argument is mutually exclusive with `connection_pool_limits`") - else: - limits = ZHIPUAI_DEFAULT_LIMITS - - if not is_given(timeout): - if custom_httpx_client and custom_httpx_client.timeout != HTTPX_DEFAULT_TIMEOUT: - timeout = custom_httpx_client.timeout - else: - timeout = ZHIPUAI_DEFAULT_TIMEOUT - self.max_retries = max_retries - self.timeout = timeout - self._limits = limits - self._has_custom_http_client = bool(custom_httpx_client) - self._client = custom_httpx_client or httpx.Client( - base_url=base_url, - timeout=self.timeout, - limits=limits, - ) - self._version = version - url = URL(url=base_url) - if not url.raw_path.endswith(b"/"): - url = url.copy_with(raw_path=url.raw_path + b"/") - self._base_url = url - self._custom_headers = custom_headers or {} - self._strict_response_validation = _strict_response_validation - - def _prepare_url(self, url: str) -> URL: - sub_url = URL(url) - if sub_url.is_relative_url: - request_raw_url = self._base_url.raw_path + sub_url.raw_path.lstrip(b"/") - return self._base_url.copy_with(raw_path=request_raw_url) - - return sub_url - - @property - def _default_headers(self): - return { - "Accept": "application/json", - "Content-Type": "application/json; charset=UTF-8", - "ZhipuAI-SDK-Ver": self._version, - "source_type": "zhipu-sdk-python", - "x-request-sdk": "zhipu-sdk-python", - **self.auth_headers, - **self._custom_headers, - } - - @property - def custom_auth(self) -> httpx.Auth | None: - return None - - @property - def auth_headers(self): - return {} - - def _prepare_headers(self, options: FinalRequestOptions) -> httpx.Headers: - custom_headers = options.headers or {} - headers_dict = _merge_mappings(self._default_headers, custom_headers) - - httpx_headers = httpx.Headers(headers_dict) - - return httpx_headers - - def _remaining_retries( - self, - remaining_retries: Optional[int], - options: FinalRequestOptions, - ) -> int: - return remaining_retries if remaining_retries is not None else options.get_max_retries(self.max_retries) - - def _calculate_retry_timeout( - self, - remaining_retries: int, - options: FinalRequestOptions, - response_headers: Optional[httpx.Headers] = None, - ) -> float: - max_retries = options.get_max_retries(self.max_retries) - - # If the API asks us to wait a certain amount of time (and it's a reasonable amount), just do what it says. - # retry_after = self._parse_retry_after_header(response_headers) - # if retry_after is not None and 0 < retry_after <= 60: - # return retry_after - - nb_retries = max_retries - remaining_retries - - # Apply exponential backoff, but not more than the max. - sleep_seconds = min(INITIAL_RETRY_DELAY * pow(2.0, nb_retries), MAX_RETRY_DELAY) - - # Apply some jitter, plus-or-minus half a second. - jitter = 1 - 0.25 * random() - timeout = sleep_seconds * jitter - return max(timeout, 0) - - def _build_request(self, options: FinalRequestOptions) -> httpx.Request: - kwargs: dict[str, Any] = {} - headers = self._prepare_headers(options) - url = self._prepare_url(options.url) - json_data = options.json_data - if options.extra_json is not None: - if json_data is None: - json_data = cast(Body, options.extra_json) - elif is_mapping(json_data): - json_data = _merge_mappings(json_data, options.extra_json) - else: - raise RuntimeError(f"Unexpected JSON data type, {type(json_data)}, cannot merge with `extra_body`") - - content_type = headers.get("Content-Type") - # multipart/form-data; boundary=---abc-- - if headers.get("Content-Type") == "multipart/form-data": - if "boundary" not in content_type: - # only remove the header if the boundary hasn't been explicitly set - # as the caller doesn't want httpx to come up with their own boundary - headers.pop("Content-Type") - - if json_data: - kwargs["data"] = self._make_multipartform(json_data) - - return self._client.build_request( - headers=headers, - timeout=self.timeout if isinstance(options.timeout, NotGiven) else options.timeout, - method=options.method, - url=url, - json=json_data, - files=options.files, - params=options.params, - **kwargs, - ) - - def _object_to_formdata(self, key: str, value: Data | Mapping[object, object]) -> list[tuple[str, str]]: - items = [] - - if isinstance(value, Mapping): - for k, v in value.items(): - items.extend(self._object_to_formdata(f"{key}[{k}]", v)) - return items - if isinstance(value, list | tuple): - for v in value: - items.extend(self._object_to_formdata(key + "[]", v)) - return items - - def _primitive_value_to_str(val) -> str: - # copied from httpx - if val is True: - return "true" - elif val is False: - return "false" - elif val is None: - return "" - return str(val) - - str_data = _primitive_value_to_str(value) - - if not str_data: - return [] - return [(key, str_data)] - - def _make_multipartform(self, data: Mapping[object, object]) -> dict[str, object]: - items = flatten(list(starmap(self._object_to_formdata, data.items()))) - - serialized: dict[str, object] = {} - for key, value in items: - if key in serialized: - raise ValueError(f"存在重复的键: {key};") - serialized[key] = value - return serialized - - def _process_response_data( - self, - *, - data: object, - cast_type: type[ResponseT], - response: httpx.Response, - ) -> ResponseT: - if data is None: - return cast(ResponseT, None) - - if cast_type is object: - return cast(ResponseT, data) - - try: - if inspect.isclass(cast_type) and issubclass(cast_type, ModelBuilderProtocol): - return cast(ResponseT, cast_type.build(response=response, data=data)) - - if self._strict_response_validation: - return cast(ResponseT, validate_type(type_=cast_type, value=data)) - - return cast(ResponseT, construct_type(type_=cast_type, value=data)) - except pydantic.ValidationError as err: - raise APIResponseValidationError(response=response, json_data=data) from err - - def _should_stream_response_body(self, request: httpx.Request) -> bool: - return request.headers.get(RAW_RESPONSE_HEADER) == "stream" # type: ignore[no-any-return] - - def _should_retry(self, response: httpx.Response) -> bool: - # Note: this is not a standard header - should_retry_header = response.headers.get("x-should-retry") - - # If the server explicitly says whether or not to retry, obey. - if should_retry_header == "true": - log.debug("Retrying as header `x-should-retry` is set to `true`") - return True - if should_retry_header == "false": - log.debug("Not retrying as header `x-should-retry` is set to `false`") - return False - - # Retry on request timeouts. - if response.status_code == 408: - log.debug("Retrying due to status code %i", response.status_code) - return True - - # Retry on lock timeouts. - if response.status_code == 409: - log.debug("Retrying due to status code %i", response.status_code) - return True - - # Retry on rate limits. - if response.status_code == 429: - log.debug("Retrying due to status code %i", response.status_code) - return True - - # Retry internal errors. - if response.status_code >= 500: - log.debug("Retrying due to status code %i", response.status_code) - return True - - log.debug("Not retrying") - return False - - def is_closed(self) -> bool: - return self._client.is_closed - - def close(self): - self._client.close() - - def __enter__(self): - return self - - def __exit__(self, exc_type, exc_val, exc_tb): - self.close() - - def request( - self, - cast_type: type[ResponseT], - options: FinalRequestOptions, - remaining_retries: Optional[int] = None, - *, - stream: bool = False, - stream_cls: type[StreamResponse] | None = None, - ) -> ResponseT | StreamResponse: - return self._request( - cast_type=cast_type, - options=options, - stream=stream, - stream_cls=stream_cls, - remaining_retries=remaining_retries, - ) - - def _request( - self, - *, - cast_type: type[ResponseT], - options: FinalRequestOptions, - remaining_retries: int | None, - stream: bool, - stream_cls: type[StreamResponse] | None, - ) -> ResponseT | StreamResponse: - retries = self._remaining_retries(remaining_retries, options) - request = self._build_request(options) - - kwargs: HttpxSendArgs = {} - if self.custom_auth is not None: - kwargs["auth"] = self.custom_auth - try: - response = self._client.send( - request, - stream=stream or self._should_stream_response_body(request=request), - **kwargs, - ) - except httpx.TimeoutException as err: - log.debug("Encountered httpx.TimeoutException", exc_info=True) - - if retries > 0: - return self._retry_request( - options, - cast_type, - retries, - stream=stream, - stream_cls=stream_cls, - response_headers=None, - ) - - log.debug("Raising timeout error") - raise APITimeoutError(request=request) from err - except Exception as err: - log.debug("Encountered Exception", exc_info=True) - - if retries > 0: - return self._retry_request( - options, - cast_type, - retries, - stream=stream, - stream_cls=stream_cls, - response_headers=None, - ) - - log.debug("Raising connection error") - raise APIConnectionError(request=request) from err - - log.debug( - 'HTTP Request: %s %s "%i %s"', request.method, request.url, response.status_code, response.reason_phrase - ) - - try: - response.raise_for_status() - except httpx.HTTPStatusError as err: # thrown on 4xx and 5xx status code - log.debug("Encountered httpx.HTTPStatusError", exc_info=True) - - if retries > 0 and self._should_retry(err.response): - err.response.close() - return self._retry_request( - options, - cast_type, - retries, - err.response.headers, - stream=stream, - stream_cls=stream_cls, - ) - - # If the response is streamed then we need to explicitly read the response - # to completion before attempting to access the response text. - if not err.response.is_closed: - err.response.read() - - log.debug("Re-raising status error") - raise self._make_status_error(err.response) from None - - # return self._parse_response( - # cast_type=cast_type, - # options=options, - # response=response, - # stream=stream, - # stream_cls=stream_cls, - # ) - return self._process_response( - cast_type=cast_type, - options=options, - response=response, - stream=stream, - stream_cls=stream_cls, - ) - - def _retry_request( - self, - options: FinalRequestOptions, - cast_type: type[ResponseT], - remaining_retries: int, - response_headers: httpx.Headers | None, - *, - stream: bool, - stream_cls: type[StreamResponse] | None, - ) -> ResponseT | StreamResponse: - remaining = remaining_retries - 1 - if remaining == 1: - log.debug("1 retry left") - else: - log.debug("%i retries left", remaining) - - timeout = self._calculate_retry_timeout(remaining, options, response_headers) - log.info("Retrying request to %s in %f seconds", options.url, timeout) - - # In a synchronous context we are blocking the entire thread. Up to the library user to run the client in a - # different thread if necessary. - time.sleep(timeout) - - return self._request( - options=options, - cast_type=cast_type, - remaining_retries=remaining, - stream=stream, - stream_cls=stream_cls, - ) - - def _process_response( - self, - *, - cast_type: type[ResponseT], - options: FinalRequestOptions, - response: httpx.Response, - stream: bool, - stream_cls: type[StreamResponse] | None, - ) -> ResponseT: - # _legacy_response with raw_response_header to parser method - if response.request.headers.get(RAW_RESPONSE_HEADER) == "true": - return cast( - ResponseT, - LegacyAPIResponse( - raw=response, - client=self, - cast_type=cast_type, - stream=stream, - stream_cls=stream_cls, - options=options, - ), - ) - - origin = get_origin(cast_type) or cast_type - - if inspect.isclass(origin) and issubclass(origin, BaseAPIResponse): - if not issubclass(origin, APIResponse): - raise TypeError(f"API Response types must subclass {APIResponse}; Received {origin}") - - response_cls = cast("type[BaseAPIResponse[Any]]", cast_type) - return cast( - ResponseT, - response_cls( - raw=response, - client=self, - cast_type=extract_response_type(response_cls), - stream=stream, - stream_cls=stream_cls, - options=options, - ), - ) - - if cast_type == httpx.Response: - return cast(ResponseT, response) - - api_response = APIResponse( - raw=response, - client=self, - cast_type=cast("type[ResponseT]", cast_type), # pyright: ignore[reportUnnecessaryCast] - stream=stream, - stream_cls=stream_cls, - options=options, - ) - if bool(response.request.headers.get(RAW_RESPONSE_HEADER)): - return cast(ResponseT, api_response) - - return api_response.parse() - - def _request_api_list( - self, - model: type[object], - page: type[SyncPageT], - options: FinalRequestOptions, - ) -> SyncPageT: - def _parser(resp: SyncPageT) -> SyncPageT: - resp._set_private_attributes( - client=self, - model=model, - options=options, - ) - return resp - - options.post_parser = _parser - - return self.request(page, options, stream=False) - - @overload - def get( - self, - path: str, - *, - cast_type: type[ResponseT], - options: UserRequestInput = {}, - stream: Literal[False] = False, - ) -> ResponseT: ... - - @overload - def get( - self, - path: str, - *, - cast_type: type[ResponseT], - options: UserRequestInput = {}, - stream: Literal[True], - stream_cls: type[StreamResponse], - ) -> StreamResponse: ... - - @overload - def get( - self, - path: str, - *, - cast_type: type[ResponseT], - options: UserRequestInput = {}, - stream: bool, - stream_cls: type[StreamResponse] | None = None, - ) -> ResponseT | StreamResponse: ... - - def get( - self, - path: str, - *, - cast_type: type[ResponseT], - options: UserRequestInput = {}, - stream: bool = False, - stream_cls: type[StreamResponse] | None = None, - ) -> ResponseT: - opts = FinalRequestOptions.construct(method="get", url=path, **options) - return cast(ResponseT, self.request(cast_type, opts, stream=stream, stream_cls=stream_cls)) - - @overload - def post( - self, - path: str, - *, - cast_type: type[ResponseT], - body: Body | None = None, - options: UserRequestInput = {}, - files: RequestFiles | None = None, - stream: Literal[False] = False, - ) -> ResponseT: ... - - @overload - def post( - self, - path: str, - *, - cast_type: type[ResponseT], - body: Body | None = None, - options: UserRequestInput = {}, - files: RequestFiles | None = None, - stream: Literal[True], - stream_cls: type[StreamResponse], - ) -> StreamResponse: ... - - @overload - def post( - self, - path: str, - *, - cast_type: type[ResponseT], - body: Body | None = None, - options: UserRequestInput = {}, - files: RequestFiles | None = None, - stream: bool, - stream_cls: type[StreamResponse] | None = None, - ) -> ResponseT | StreamResponse: ... - - def post( - self, - path: str, - *, - cast_type: type[ResponseT], - body: Body | None = None, - options: UserRequestInput = {}, - files: RequestFiles | None = None, - stream: bool = False, - stream_cls: type[StreamResponse[Any]] | None = None, - ) -> ResponseT | StreamResponse: - opts = FinalRequestOptions.construct( - method="post", url=path, json_data=body, files=to_httpx_files(files), **options - ) - - return cast(ResponseT, self.request(cast_type, opts, stream=stream, stream_cls=stream_cls)) - - def patch( - self, - path: str, - *, - cast_type: type[ResponseT], - body: Body | None = None, - options: UserRequestInput = {}, - ) -> ResponseT: - opts = FinalRequestOptions.construct(method="patch", url=path, json_data=body, **options) - - return self.request( - cast_type=cast_type, - options=opts, - ) - - def put( - self, - path: str, - *, - cast_type: type[ResponseT], - body: Body | None = None, - options: UserRequestInput = {}, - files: RequestFiles | None = None, - ) -> ResponseT | StreamResponse: - opts = FinalRequestOptions.construct( - method="put", url=path, json_data=body, files=to_httpx_files(files), **options - ) - - return self.request( - cast_type=cast_type, - options=opts, - ) - - def delete( - self, - path: str, - *, - cast_type: type[ResponseT], - body: Body | None = None, - options: UserRequestInput = {}, - ) -> ResponseT | StreamResponse: - opts = FinalRequestOptions.construct(method="delete", url=path, json_data=body, **options) - - return self.request( - cast_type=cast_type, - options=opts, - ) - - def get_api_list( - self, - path: str, - *, - model: type[object], - page: type[SyncPageT], - body: Body | None = None, - options: UserRequestInput = {}, - method: str = "get", - ) -> SyncPageT: - opts = FinalRequestOptions.construct(method=method, url=path, json_data=body, **options) - return self._request_api_list(model, page, opts) - - def _make_status_error(self, response) -> APIStatusError: - response_text = response.text.strip() - status_code = response.status_code - error_msg = f"Error code: {status_code}, with error text {response_text}" - - if status_code == 400: - return _errors.APIRequestFailedError(message=error_msg, response=response) - elif status_code == 401: - return _errors.APIAuthenticationError(message=error_msg, response=response) - elif status_code == 429: - return _errors.APIReachLimitError(message=error_msg, response=response) - elif status_code == 500: - return _errors.APIInternalError(message=error_msg, response=response) - elif status_code == 503: - return _errors.APIServerFlowExceedError(message=error_msg, response=response) - return APIStatusError(message=error_msg, response=response) - - -def make_request_options( - *, - query: Query | None = None, - extra_headers: Headers | None = None, - extra_query: Query | None = None, - extra_body: Body | None = None, - timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, - post_parser: PostParser | NotGiven = NOT_GIVEN, -) -> UserRequestInput: - """Create a dict of type RequestOptions without keys of NotGiven values.""" - options: UserRequestInput = {} - if extra_headers is not None: - options["headers"] = extra_headers - - if extra_body is not None: - options["extra_json"] = cast(AnyMapping, extra_body) - - if query is not None: - options["params"] = query - - if extra_query is not None: - options["params"] = {**options.get("params", {}), **extra_query} - - if not isinstance(timeout, NotGiven): - options["timeout"] = timeout - - if is_given(post_parser): - # internal - options["post_parser"] = post_parser # type: ignore - - return options - - -def _merge_mappings( - obj1: Mapping[_T_co, Union[_T, Omit]], - obj2: Mapping[_T_co, Union[_T, Omit]], -) -> dict[_T_co, _T]: - """Merge two mappings of the same type, removing any values that are instances of `Omit`. - - In cases with duplicate keys the second mapping takes precedence. - """ - merged = {**obj1, **obj2} - return {key: value for key, value in merged.items() if not isinstance(value, Omit)} diff --git a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/core/_jwt_token.py b/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/core/_jwt_token.py deleted file mode 100644 index 21f158a5f45251..00000000000000 --- a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/core/_jwt_token.py +++ /dev/null @@ -1,31 +0,0 @@ -import time - -import cachetools.func -import jwt - -# 缓存时间 3分钟 -CACHE_TTL_SECONDS = 3 * 60 - -# token 有效期比缓存时间 多30秒 -API_TOKEN_TTL_SECONDS = CACHE_TTL_SECONDS + 30 - - -@cachetools.func.ttl_cache(maxsize=10, ttl=CACHE_TTL_SECONDS) -def generate_token(apikey: str): - try: - api_key, secret = apikey.split(".") - except Exception as e: - raise Exception("invalid api_key", e) - - payload = { - "api_key": api_key, - "exp": int(round(time.time() * 1000)) + API_TOKEN_TTL_SECONDS * 1000, - "timestamp": int(round(time.time() * 1000)), - } - ret = jwt.encode( - payload, - secret, - algorithm="HS256", - headers={"alg": "HS256", "sign_type": "SIGN"}, - ) - return ret diff --git a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/core/_legacy_binary_response.py b/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/core/_legacy_binary_response.py deleted file mode 100644 index 51623bd860951f..00000000000000 --- a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/core/_legacy_binary_response.py +++ /dev/null @@ -1,207 +0,0 @@ -from __future__ import annotations - -import os -from collections.abc import AsyncIterator, Iterator -from typing import Any - -import httpx - - -class HttpxResponseContent: - @property - def content(self) -> bytes: - raise NotImplementedError("This method is not implemented for this class.") - - @property - def text(self) -> str: - raise NotImplementedError("This method is not implemented for this class.") - - @property - def encoding(self) -> str | None: - raise NotImplementedError("This method is not implemented for this class.") - - @property - def charset_encoding(self) -> str | None: - raise NotImplementedError("This method is not implemented for this class.") - - def json(self, **kwargs: Any) -> Any: - raise NotImplementedError("This method is not implemented for this class.") - - def read(self) -> bytes: - raise NotImplementedError("This method is not implemented for this class.") - - def iter_bytes(self, chunk_size: int | None = None) -> Iterator[bytes]: - raise NotImplementedError("This method is not implemented for this class.") - - def iter_text(self, chunk_size: int | None = None) -> Iterator[str]: - raise NotImplementedError("This method is not implemented for this class.") - - def iter_lines(self) -> Iterator[str]: - raise NotImplementedError("This method is not implemented for this class.") - - def iter_raw(self, chunk_size: int | None = None) -> Iterator[bytes]: - raise NotImplementedError("This method is not implemented for this class.") - - def write_to_file( - self, - file: str | os.PathLike[str], - ) -> None: - raise NotImplementedError("This method is not implemented for this class.") - - def stream_to_file( - self, - file: str | os.PathLike[str], - *, - chunk_size: int | None = None, - ) -> None: - raise NotImplementedError("This method is not implemented for this class.") - - def close(self) -> None: - raise NotImplementedError("This method is not implemented for this class.") - - async def aread(self) -> bytes: - raise NotImplementedError("This method is not implemented for this class.") - - async def aiter_bytes(self, chunk_size: int | None = None) -> AsyncIterator[bytes]: - raise NotImplementedError("This method is not implemented for this class.") - - async def aiter_text(self, chunk_size: int | None = None) -> AsyncIterator[str]: - raise NotImplementedError("This method is not implemented for this class.") - - async def aiter_lines(self) -> AsyncIterator[str]: - raise NotImplementedError("This method is not implemented for this class.") - - async def aiter_raw(self, chunk_size: int | None = None) -> AsyncIterator[bytes]: - raise NotImplementedError("This method is not implemented for this class.") - - async def astream_to_file( - self, - file: str | os.PathLike[str], - *, - chunk_size: int | None = None, - ) -> None: - raise NotImplementedError("This method is not implemented for this class.") - - async def aclose(self) -> None: - raise NotImplementedError("This method is not implemented for this class.") - - -class HttpxBinaryResponseContent(HttpxResponseContent): - response: httpx.Response - - def __init__(self, response: httpx.Response) -> None: - self.response = response - - @property - def content(self) -> bytes: - return self.response.content - - @property - def encoding(self) -> str | None: - return self.response.encoding - - @property - def charset_encoding(self) -> str | None: - return self.response.charset_encoding - - def read(self) -> bytes: - return self.response.read() - - def text(self) -> str: - raise NotImplementedError("Not implemented for binary response content") - - def json(self, **kwargs: Any) -> Any: - raise NotImplementedError("Not implemented for binary response content") - - def iter_text(self, chunk_size: int | None = None) -> Iterator[str]: - raise NotImplementedError("Not implemented for binary response content") - - def iter_lines(self) -> Iterator[str]: - raise NotImplementedError("Not implemented for binary response content") - - async def aiter_text(self, chunk_size: int | None = None) -> AsyncIterator[str]: - raise NotImplementedError("Not implemented for binary response content") - - async def aiter_lines(self) -> AsyncIterator[str]: - raise NotImplementedError("Not implemented for binary response content") - - def iter_bytes(self, chunk_size: int | None = None) -> Iterator[bytes]: - return self.response.iter_bytes(chunk_size) - - def iter_raw(self, chunk_size: int | None = None) -> Iterator[bytes]: - return self.response.iter_raw(chunk_size) - - def write_to_file( - self, - file: str | os.PathLike[str], - ) -> None: - """Write the output to the given file. - - Accepts a filename or any path-like object, e.g. pathlib.Path - - Note: if you want to stream the data to the file instead of writing - all at once then you should use `.with_streaming_response` when making - the API request, e.g. `client.with_streaming_response.foo().stream_to_file('my_filename.txt')` - """ - with open(file, mode="wb") as f: - for data in self.response.iter_bytes(): - f.write(data) - - def stream_to_file( - self, - file: str | os.PathLike[str], - *, - chunk_size: int | None = None, - ) -> None: - with open(file, mode="wb") as f: - for data in self.response.iter_bytes(chunk_size): - f.write(data) - - def close(self) -> None: - return self.response.close() - - async def aread(self) -> bytes: - return await self.response.aread() - - async def aiter_bytes(self, chunk_size: int | None = None) -> AsyncIterator[bytes]: - return self.response.aiter_bytes(chunk_size) - - async def aiter_raw(self, chunk_size: int | None = None) -> AsyncIterator[bytes]: - return self.response.aiter_raw(chunk_size) - - async def astream_to_file( - self, - file: str | os.PathLike[str], - *, - chunk_size: int | None = None, - ) -> None: - path = anyio.Path(file) - async with await path.open(mode="wb") as f: - async for data in self.response.aiter_bytes(chunk_size): - await f.write(data) - - async def aclose(self) -> None: - return await self.response.aclose() - - -class HttpxTextBinaryResponseContent(HttpxBinaryResponseContent): - response: httpx.Response - - @property - def text(self) -> str: - return self.response.text - - def json(self, **kwargs: Any) -> Any: - return self.response.json(**kwargs) - - def iter_text(self, chunk_size: int | None = None) -> Iterator[str]: - return self.response.iter_text(chunk_size) - - def iter_lines(self) -> Iterator[str]: - return self.response.iter_lines() - - async def aiter_text(self, chunk_size: int | None = None) -> AsyncIterator[str]: - return self.response.aiter_text(chunk_size) - - async def aiter_lines(self) -> AsyncIterator[str]: - return self.response.aiter_lines() diff --git a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/core/_legacy_response.py b/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/core/_legacy_response.py deleted file mode 100644 index 51bf21bcdc17a8..00000000000000 --- a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/core/_legacy_response.py +++ /dev/null @@ -1,341 +0,0 @@ -from __future__ import annotations - -import datetime -import functools -import inspect -import logging -from collections.abc import Callable -from typing import TYPE_CHECKING, Any, Generic, TypeVar, Union, cast, get_origin, overload - -import httpx -import pydantic -from typing_extensions import ParamSpec, override - -from ._base_models import BaseModel, is_basemodel -from ._base_type import NoneType -from ._constants import RAW_RESPONSE_HEADER -from ._errors import APIResponseValidationError -from ._legacy_binary_response import HttpxResponseContent, HttpxTextBinaryResponseContent -from ._sse_client import StreamResponse, extract_stream_chunk_type, is_stream_class_type -from ._utils import extract_type_arg, is_annotated_type, is_given - -if TYPE_CHECKING: - from ._http_client import HttpClient - from ._request_opt import FinalRequestOptions - -P = ParamSpec("P") -R = TypeVar("R") -_T = TypeVar("_T") - -log: logging.Logger = logging.getLogger(__name__) - - -class LegacyAPIResponse(Generic[R]): - """This is a legacy class as it will be replaced by `APIResponse` - and `AsyncAPIResponse` in the `_response.py` file in the next major - release. - - For the sync client this will mostly be the same with the exception - of `content` & `text` will be methods instead of properties. In the - async client, all methods will be async. - - A migration script will be provided & the migration in general should - be smooth. - """ - - _cast_type: type[R] - _client: HttpClient - _parsed_by_type: dict[type[Any], Any] - _stream: bool - _stream_cls: type[StreamResponse[Any]] | None - _options: FinalRequestOptions - - http_response: httpx.Response - - def __init__( - self, - *, - raw: httpx.Response, - cast_type: type[R], - client: HttpClient, - stream: bool, - stream_cls: type[StreamResponse[Any]] | None, - options: FinalRequestOptions, - ) -> None: - self._cast_type = cast_type - self._client = client - self._parsed_by_type = {} - self._stream = stream - self._stream_cls = stream_cls - self._options = options - self.http_response = raw - - @property - def request_id(self) -> str | None: - return self.http_response.headers.get("x-request-id") # type: ignore[no-any-return] - - @overload - def parse(self, *, to: type[_T]) -> _T: ... - - @overload - def parse(self) -> R: ... - - def parse(self, *, to: type[_T] | None = None) -> R | _T: - """Returns the rich python representation of this response's data. - - NOTE: For the async client: this will become a coroutine in the next major version. - - For lower-level control, see `.read()`, `.json()`, `.iter_bytes()`. - - You can customize the type that the response is parsed into through - the `to` argument, e.g. - - ```py - from zhipuai import BaseModel - - - class MyModel(BaseModel): - foo: str - - - obj = response.parse(to=MyModel) - print(obj.foo) - ``` - - We support parsing: - - `BaseModel` - - `dict` - - `list` - - `Union` - - `str` - - `int` - - `float` - - `httpx.Response` - """ - cache_key = to if to is not None else self._cast_type - cached = self._parsed_by_type.get(cache_key) - if cached is not None: - return cached # type: ignore[no-any-return] - - parsed = self._parse(to=to) - if is_given(self._options.post_parser): - parsed = self._options.post_parser(parsed) - - self._parsed_by_type[cache_key] = parsed - return parsed - - @property - def headers(self) -> httpx.Headers: - return self.http_response.headers - - @property - def http_request(self) -> httpx.Request: - return self.http_response.request - - @property - def status_code(self) -> int: - return self.http_response.status_code - - @property - def url(self) -> httpx.URL: - return self.http_response.url - - @property - def method(self) -> str: - return self.http_request.method - - @property - def content(self) -> bytes: - """Return the binary response content. - - NOTE: this will be removed in favour of `.read()` in the - next major version. - """ - return self.http_response.content - - @property - def text(self) -> str: - """Return the decoded response content. - - NOTE: this will be turned into a method in the next major version. - """ - return self.http_response.text - - @property - def http_version(self) -> str: - return self.http_response.http_version - - @property - def is_closed(self) -> bool: - return self.http_response.is_closed - - @property - def elapsed(self) -> datetime.timedelta: - """The time taken for the complete request/response cycle to complete.""" - return self.http_response.elapsed - - def _parse(self, *, to: type[_T] | None = None) -> R | _T: - # unwrap `Annotated[T, ...]` -> `T` - if to and is_annotated_type(to): - to = extract_type_arg(to, 0) - - if self._stream: - if to: - if not is_stream_class_type(to): - raise TypeError(f"Expected custom parse type to be a subclass of {StreamResponse}") - - return cast( - _T, - to( - cast_type=extract_stream_chunk_type( - to, - failure_message="Expected custom stream type to be passed with a type argument, e.g. StreamResponse[ChunkType]", # noqa: E501 - ), - response=self.http_response, - client=cast(Any, self._client), - ), - ) - - if self._stream_cls: - return cast( - R, - self._stream_cls( - cast_type=extract_stream_chunk_type(self._stream_cls), - response=self.http_response, - client=cast(Any, self._client), - ), - ) - - stream_cls = cast("type[StreamResponse[Any]] | None", self._client._default_stream_cls) - if stream_cls is None: - raise MissingStreamClassError() - - return cast( - R, - stream_cls( - cast_type=self._cast_type, - response=self.http_response, - client=cast(Any, self._client), - ), - ) - - cast_type = to if to is not None else self._cast_type - - # unwrap `Annotated[T, ...]` -> `T` - if is_annotated_type(cast_type): - cast_type = extract_type_arg(cast_type, 0) - - if cast_type is NoneType: - return cast(R, None) - - response = self.http_response - if cast_type == str: - return cast(R, response.text) - - if cast_type == int: - return cast(R, int(response.text)) - - if cast_type == float: - return cast(R, float(response.text)) - - origin = get_origin(cast_type) or cast_type - - if inspect.isclass(origin) and issubclass(origin, HttpxResponseContent): - # in the response, e.g. mime file - *_, filename = response.headers.get("content-disposition", "").split("filename=") - # 判断文件类型是jsonl类型的使用HttpxTextBinaryResponseContent - if filename and filename.endswith(".jsonl") or filename and filename.endswith(".xlsx"): - return cast(R, HttpxTextBinaryResponseContent(response)) - else: - return cast(R, cast_type(response)) # type: ignore - - if origin == LegacyAPIResponse: - raise RuntimeError("Unexpected state - cast_type is `APIResponse`") - - if inspect.isclass(origin) and issubclass(origin, httpx.Response): - # Because of the invariance of our ResponseT TypeVar, users can subclass httpx.Response - # and pass that class to our request functions. We cannot change the variance to be either - # covariant or contravariant as that makes our usage of ResponseT illegal. We could construct - # the response class ourselves but that is something that should be supported directly in httpx - # as it would be easy to incorrectly construct the Response object due to the multitude of arguments. - if cast_type != httpx.Response: - raise ValueError("Subclasses of httpx.Response cannot be passed to `cast_type`") - return cast(R, response) - - if inspect.isclass(origin) and not issubclass(origin, BaseModel) and issubclass(origin, pydantic.BaseModel): - raise TypeError("Pydantic models must subclass our base model type, e.g. `from openai import BaseModel`") - - if ( - cast_type is not object - and origin is not list - and origin is not dict - and origin is not Union - and not issubclass(origin, BaseModel) - ): - raise RuntimeError( - f"Unsupported type, expected {cast_type} to be a subclass of {BaseModel}, {dict}, {list}, {Union}, {NoneType}, {str} or {httpx.Response}." # noqa: E501 - ) - - # split is required to handle cases where additional information is included - # in the response, e.g. application/json; charset=utf-8 - content_type, *_ = response.headers.get("content-type", "*").split(";") - if content_type != "application/json": - if is_basemodel(cast_type): - try: - data = response.json() - except Exception as exc: - log.debug("Could not read JSON from response data due to %s - %s", type(exc), exc) - else: - return self._client._process_response_data( - data=data, - cast_type=cast_type, # type: ignore - response=response, - ) - - if self._client._strict_response_validation: - raise APIResponseValidationError( - response=response, - message=f"Expected Content-Type response header to be `application/json` but received `{content_type}` instead.", # noqa: E501 - json_data=response.text, - ) - - # If the API responds with content that isn't JSON then we just return - # the (decoded) text without performing any parsing so that you can still - # handle the response however you need to. - return response.text # type: ignore - - data = response.json() - - return self._client._process_response_data( - data=data, - cast_type=cast_type, # type: ignore - response=response, - ) - - @override - def __repr__(self) -> str: - return f"" - - -class MissingStreamClassError(TypeError): - def __init__(self) -> None: - super().__init__( - "The `stream` argument was set to `True` but the `stream_cls` argument was not given. See `openai._streaming` for reference", # noqa: E501 - ) - - -def to_raw_response_wrapper(func: Callable[P, R]) -> Callable[P, LegacyAPIResponse[R]]: - """Higher order function that takes one of our bound API methods and wraps it - to support returning the raw `APIResponse` object directly. - """ - - @functools.wraps(func) - def wrapped(*args: P.args, **kwargs: P.kwargs) -> LegacyAPIResponse[R]: - extra_headers: dict[str, str] = {**(cast(Any, kwargs.get("extra_headers")) or {})} - extra_headers[RAW_RESPONSE_HEADER] = "true" - - kwargs["extra_headers"] = extra_headers - - return cast(LegacyAPIResponse[R], func(*args, **kwargs)) - - return wrapped diff --git a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/core/_request_opt.py b/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/core/_request_opt.py deleted file mode 100644 index c3b894b3a3d88f..00000000000000 --- a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/core/_request_opt.py +++ /dev/null @@ -1,97 +0,0 @@ -from __future__ import annotations - -from collections.abc import Callable -from typing import TYPE_CHECKING, Any, ClassVar, Union, cast - -import pydantic.generics -from httpx import Timeout -from typing_extensions import Required, TypedDict, Unpack, final - -from ._base_compat import PYDANTIC_V2, ConfigDict -from ._base_type import AnyMapping, Body, Headers, HttpxRequestFiles, NotGiven, Query -from ._constants import RAW_RESPONSE_HEADER -from ._utils import is_given, strip_not_given - - -class UserRequestInput(TypedDict, total=False): - headers: Headers - max_retries: int - timeout: float | Timeout | None - params: Query - extra_json: AnyMapping - - -class FinalRequestOptionsInput(TypedDict, total=False): - method: Required[str] - url: Required[str] - params: Query - headers: Headers - max_retries: int - timeout: float | Timeout | None - files: HttpxRequestFiles | None - json_data: Body - extra_json: AnyMapping - - -@final -class FinalRequestOptions(pydantic.BaseModel): - method: str - url: str - params: Query = {} - headers: Union[Headers, NotGiven] = NotGiven() - max_retries: Union[int, NotGiven] = NotGiven() - timeout: Union[float, Timeout, None, NotGiven] = NotGiven() - files: Union[HttpxRequestFiles, None] = None - idempotency_key: Union[str, None] = None - post_parser: Union[Callable[[Any], Any], NotGiven] = NotGiven() - - # It should be noted that we cannot use `json` here as that would override - # a BaseModel method in an incompatible fashion. - json_data: Union[Body, None] = None - extra_json: Union[AnyMapping, None] = None - - if PYDANTIC_V2: - model_config: ClassVar[ConfigDict] = ConfigDict(arbitrary_types_allowed=True) - else: - - class Config(pydantic.BaseConfig): # pyright: ignore[reportDeprecated] - arbitrary_types_allowed: bool = True - - def get_max_retries(self, max_retries: int) -> int: - if isinstance(self.max_retries, NotGiven): - return max_retries - return self.max_retries - - def _strip_raw_response_header(self) -> None: - if not is_given(self.headers): - return - - if self.headers.get(RAW_RESPONSE_HEADER): - self.headers = {**self.headers} - self.headers.pop(RAW_RESPONSE_HEADER) - - # override the `construct` method so that we can run custom transformations. - # this is necessary as we don't want to do any actual runtime type checking - # (which means we can't use validators) but we do want to ensure that `NotGiven` - # values are not present - # - # type ignore required because we're adding explicit types to `**values` - @classmethod - def construct( # type: ignore - cls, - _fields_set: set[str] | None = None, - **values: Unpack[UserRequestInput], - ) -> FinalRequestOptions: - kwargs: dict[str, Any] = { - # we unconditionally call `strip_not_given` on any value - # as it will just ignore any non-mapping types - key: strip_not_given(value) - for key, value in values.items() - } - if PYDANTIC_V2: - return super().model_construct(_fields_set, **kwargs) - return cast(FinalRequestOptions, super().construct(_fields_set, **kwargs)) # pyright: ignore[reportDeprecated] - - if not TYPE_CHECKING: - # type checkers incorrectly complain about this assignment - model_construct = construct diff --git a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/core/_response.py b/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/core/_response.py deleted file mode 100644 index 92e601805569f3..00000000000000 --- a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/core/_response.py +++ /dev/null @@ -1,398 +0,0 @@ -from __future__ import annotations - -import datetime -import inspect -import logging -from collections.abc import Iterator -from typing import TYPE_CHECKING, Any, Generic, TypeVar, Union, cast, get_origin, overload - -import httpx -import pydantic -from typing_extensions import ParamSpec, override - -from ._base_models import BaseModel, is_basemodel -from ._base_type import NoneType -from ._errors import APIResponseValidationError, ZhipuAIError -from ._sse_client import StreamResponse, extract_stream_chunk_type, is_stream_class_type -from ._utils import extract_type_arg, extract_type_var_from_base, is_annotated_type, is_given - -if TYPE_CHECKING: - from ._http_client import HttpClient - from ._request_opt import FinalRequestOptions - -P = ParamSpec("P") -R = TypeVar("R") -_T = TypeVar("_T") -_APIResponseT = TypeVar("_APIResponseT", bound="APIResponse[Any]") -log: logging.Logger = logging.getLogger(__name__) - - -class BaseAPIResponse(Generic[R]): - _cast_type: type[R] - _client: HttpClient - _parsed_by_type: dict[type[Any], Any] - _is_sse_stream: bool - _stream_cls: type[StreamResponse[Any]] - _options: FinalRequestOptions - http_response: httpx.Response - - def __init__( - self, - *, - raw: httpx.Response, - cast_type: type[R], - client: HttpClient, - stream: bool, - stream_cls: type[StreamResponse[Any]] | None = None, - options: FinalRequestOptions, - ) -> None: - self._cast_type = cast_type - self._client = client - self._parsed_by_type = {} - self._is_sse_stream = stream - self._stream_cls = stream_cls - self._options = options - self.http_response = raw - - def _parse(self, *, to: type[_T] | None = None) -> R | _T: - # unwrap `Annotated[T, ...]` -> `T` - if to and is_annotated_type(to): - to = extract_type_arg(to, 0) - - if self._is_sse_stream: - if to: - if not is_stream_class_type(to): - raise TypeError(f"Expected custom parse type to be a subclass of {StreamResponse}") - - return cast( - _T, - to( - cast_type=extract_stream_chunk_type( - to, - failure_message="Expected custom stream type to be passed with a type argument, e.g. StreamResponse[ChunkType]", # noqa: E501 - ), - response=self.http_response, - client=cast(Any, self._client), - ), - ) - - if self._stream_cls: - return cast( - R, - self._stream_cls( - cast_type=extract_stream_chunk_type(self._stream_cls), - response=self.http_response, - client=cast(Any, self._client), - ), - ) - - stream_cls = cast("type[Stream[Any]] | None", self._client._default_stream_cls) - if stream_cls is None: - raise MissingStreamClassError() - - return cast( - R, - stream_cls( - cast_type=self._cast_type, - response=self.http_response, - client=cast(Any, self._client), - ), - ) - - cast_type = to if to is not None else self._cast_type - - # unwrap `Annotated[T, ...]` -> `T` - if is_annotated_type(cast_type): - cast_type = extract_type_arg(cast_type, 0) - - if cast_type is NoneType: - return cast(R, None) - - response = self.http_response - if cast_type == str: - return cast(R, response.text) - - if cast_type == bytes: - return cast(R, response.content) - - if cast_type == int: - return cast(R, int(response.text)) - - if cast_type == float: - return cast(R, float(response.text)) - - origin = get_origin(cast_type) or cast_type - - # handle the legacy binary response case - if inspect.isclass(cast_type) and cast_type.__name__ == "HttpxBinaryResponseContent": - return cast(R, cast_type(response)) # type: ignore - - if origin == APIResponse: - raise RuntimeError("Unexpected state - cast_type is `APIResponse`") - - if inspect.isclass(origin) and issubclass(origin, httpx.Response): - # Because of the invariance of our ResponseT TypeVar, users can subclass httpx.Response - # and pass that class to our request functions. We cannot change the variance to be either - # covariant or contravariant as that makes our usage of ResponseT illegal. We could construct - # the response class ourselves but that is something that should be supported directly in httpx - # as it would be easy to incorrectly construct the Response object due to the multitude of arguments. - if cast_type != httpx.Response: - raise ValueError("Subclasses of httpx.Response cannot be passed to `cast_type`") - return cast(R, response) - - if inspect.isclass(origin) and not issubclass(origin, BaseModel) and issubclass(origin, pydantic.BaseModel): - raise TypeError("Pydantic models must subclass our base model type, e.g. `from openai import BaseModel`") - - if ( - cast_type is not object - and origin is not list - and origin is not dict - and origin is not Union - and not issubclass(origin, BaseModel) - ): - raise RuntimeError( - f"Unsupported type, expected {cast_type} to be a subclass of {BaseModel}, {dict}, {list}, {Union}, {NoneType}, {str} or {httpx.Response}." # noqa: E501 - ) - - # split is required to handle cases where additional information is included - # in the response, e.g. application/json; charset=utf-8 - content_type, *_ = response.headers.get("content-type", "*").split(";") - if content_type != "application/json": - if is_basemodel(cast_type): - try: - data = response.json() - except Exception as exc: - log.debug("Could not read JSON from response data due to %s - %s", type(exc), exc) - else: - return self._client._process_response_data( - data=data, - cast_type=cast_type, # type: ignore - response=response, - ) - - if self._client._strict_response_validation: - raise APIResponseValidationError( - response=response, - message=f"Expected Content-Type response header to be `application/json` but received `{content_type}` instead.", # noqa: E501 - json_data=response.text, - ) - - # If the API responds with content that isn't JSON then we just return - # the (decoded) text without performing any parsing so that you can still - # handle the response however you need to. - return response.text # type: ignore - - data = response.json() - - return self._client._process_response_data( - data=data, - cast_type=cast_type, # type: ignore - response=response, - ) - - @property - def headers(self) -> httpx.Headers: - return self.http_response.headers - - @property - def http_request(self) -> httpx.Request: - """Returns the httpx Request instance associated with the current response.""" - return self.http_response.request - - @property - def status_code(self) -> int: - return self.http_response.status_code - - @property - def url(self) -> httpx.URL: - """Returns the URL for which the request was made.""" - return self.http_response.url - - @property - def method(self) -> str: - return self.http_request.method - - @property - def http_version(self) -> str: - return self.http_response.http_version - - @property - def elapsed(self) -> datetime.timedelta: - """The time taken for the complete request/response cycle to complete.""" - return self.http_response.elapsed - - @property - def is_closed(self) -> bool: - """Whether or not the response body has been closed. - - If this is False then there is response data that has not been read yet. - You must either fully consume the response body or call `.close()` - before discarding the response to prevent resource leaks. - """ - return self.http_response.is_closed - - @override - def __repr__(self) -> str: - return f"<{self.__class__.__name__} [{self.status_code} {self.http_response.reason_phrase}] type={self._cast_type}>" # noqa: E501 - - -class APIResponse(BaseAPIResponse[R]): - @property - def request_id(self) -> str | None: - return self.http_response.headers.get("x-request-id") # type: ignore[no-any-return] - - @overload - def parse(self, *, to: type[_T]) -> _T: ... - - @overload - def parse(self) -> R: ... - - def parse(self, *, to: type[_T] | None = None) -> R | _T: - """Returns the rich python representation of this response's data. - - For lower-level control, see `.read()`, `.json()`, `.iter_bytes()`. - - You can customize the type that the response is parsed into through - the `to` argument, e.g. - - ```py - from openai import BaseModel - - - class MyModel(BaseModel): - foo: str - - - obj = response.parse(to=MyModel) - print(obj.foo) - ``` - - We support parsing: - - `BaseModel` - - `dict` - - `list` - - `Union` - - `str` - - `int` - - `float` - - `httpx.Response` - """ - cache_key = to if to is not None else self._cast_type - cached = self._parsed_by_type.get(cache_key) - if cached is not None: - return cached # type: ignore[no-any-return] - - if not self._is_sse_stream: - self.read() - - parsed = self._parse(to=to) - if is_given(self._options.post_parser): - parsed = self._options.post_parser(parsed) - - self._parsed_by_type[cache_key] = parsed - return parsed - - def read(self) -> bytes: - """Read and return the binary response content.""" - try: - return self.http_response.read() - except httpx.StreamConsumed as exc: - # The default error raised by httpx isn't very - # helpful in our case so we re-raise it with - # a different error message. - raise StreamAlreadyConsumed() from exc - - def text(self) -> str: - """Read and decode the response content into a string.""" - self.read() - return self.http_response.text - - def json(self) -> object: - """Read and decode the JSON response content.""" - self.read() - return self.http_response.json() - - def close(self) -> None: - """Close the response and release the connection. - - Automatically called if the response body is read to completion. - """ - self.http_response.close() - - def iter_bytes(self, chunk_size: int | None = None) -> Iterator[bytes]: - """ - A byte-iterator over the decoded response content. - - This automatically handles gzip, deflate and brotli encoded responses. - """ - yield from self.http_response.iter_bytes(chunk_size) - - def iter_text(self, chunk_size: int | None = None) -> Iterator[str]: - """A str-iterator over the decoded response content - that handles both gzip, deflate, etc but also detects the content's - string encoding. - """ - yield from self.http_response.iter_text(chunk_size) - - def iter_lines(self) -> Iterator[str]: - """Like `iter_text()` but will only yield chunks for each line""" - yield from self.http_response.iter_lines() - - -class MissingStreamClassError(TypeError): - def __init__(self) -> None: - super().__init__( - "The `stream` argument was set to `True` but the `stream_cls` argument was not given. See `openai._streaming` for reference", # noqa: E501 - ) - - -class StreamAlreadyConsumed(ZhipuAIError): # noqa: N818 - """ - Attempted to read or stream content, but the content has already - been streamed. - - This can happen if you use a method like `.iter_lines()` and then attempt - to read th entire response body afterwards, e.g. - - ```py - response = await client.post(...) - async for line in response.iter_lines(): - ... # do something with `line` - - content = await response.read() - # ^ error - ``` - - If you want this behavior you'll need to either manually accumulate the response - content or call `await response.read()` before iterating over the stream. - """ - - def __init__(self) -> None: - message = ( - "Attempted to read or stream some content, but the content has " - "already been streamed. " - "This could be due to attempting to stream the response " - "content more than once." - "\n\n" - "You can fix this by manually accumulating the response content while streaming " - "or by calling `.read()` before starting to stream." - ) - super().__init__(message) - - -def extract_response_type(typ: type[BaseAPIResponse[Any]]) -> type: - """Given a type like `APIResponse[T]`, returns the generic type variable `T`. - - This also handles the case where a concrete subclass is given, e.g. - ```py - class MyResponse(APIResponse[bytes]): - ... - - extract_response_type(MyResponse) -> bytes - ``` - """ - return extract_type_var_from_base( - typ, - generic_bases=cast("tuple[type, ...]", (BaseAPIResponse, APIResponse)), - index=0, - ) diff --git a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/core/_sse_client.py b/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/core/_sse_client.py deleted file mode 100644 index cbc449d24421d0..00000000000000 --- a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/core/_sse_client.py +++ /dev/null @@ -1,206 +0,0 @@ -from __future__ import annotations - -import inspect -import json -from collections.abc import Iterator, Mapping -from typing import TYPE_CHECKING, Generic, TypeGuard, cast - -import httpx - -from . import get_origin -from ._base_type import ResponseT -from ._errors import APIResponseError -from ._utils import extract_type_var_from_base, is_mapping - -_FIELD_SEPARATOR = ":" - -if TYPE_CHECKING: - from ._http_client import HttpClient - - -class StreamResponse(Generic[ResponseT]): - response: httpx.Response - _cast_type: type[ResponseT] - - def __init__( - self, - *, - cast_type: type[ResponseT], - response: httpx.Response, - client: HttpClient, - ) -> None: - self.response = response - self._cast_type = cast_type - self._data_process_func = client._process_response_data - self._stream_chunks = self.__stream__() - - def __next__(self) -> ResponseT: - return self._stream_chunks.__next__() - - def __iter__(self) -> Iterator[ResponseT]: - yield from self._stream_chunks - - def __stream__(self) -> Iterator[ResponseT]: - sse_line_parser = SSELineParser() - iterator = sse_line_parser.iter_lines(self.response.iter_lines()) - - for sse in iterator: - if sse.data.startswith("[DONE]"): - break - - if sse.event is None: - data = sse.json_data() - if isinstance(data, Mapping) and data.get("error"): - raise APIResponseError( - message="An error occurred during streaming", - request=self.response.request, - json_data=data["error"], - ) - if sse.event is None: - data = sse.json_data() - if is_mapping(data) and data.get("error"): - message = None - error = data.get("error") - if is_mapping(error): - message = error.get("message") - if not message or not isinstance(message, str): - message = "An error occurred during streaming" - - raise APIResponseError( - message=message, - request=self.response.request, - json_data=data["error"], - ) - yield self._data_process_func(data=data, cast_type=self._cast_type, response=self.response) - - else: - data = sse.json_data() - - if sse.event == "error" and is_mapping(data) and data.get("error"): - message = None - error = data.get("error") - if is_mapping(error): - message = error.get("message") - if not message or not isinstance(message, str): - message = "An error occurred during streaming" - - raise APIResponseError( - message=message, - request=self.response.request, - json_data=data["error"], - ) - yield self._data_process_func(data=data, cast_type=self._cast_type, response=self.response) - - for sse in iterator: - pass - - -class Event: - def __init__( - self, event: str | None = None, data: str | None = None, id: str | None = None, retry: int | None = None - ): - self._event = event - self._data = data - self._id = id - self._retry = retry - - def __repr__(self): - data_len = len(self._data) if self._data else 0 - return ( - f"Event(event={self._event}, data={self._data} ,data_length={data_len}, id={self._id}, retry={self._retry}" - ) - - @property - def event(self): - return self._event - - @property - def data(self): - return self._data - - def json_data(self): - return json.loads(self._data) - - @property - def id(self): - return self._id - - @property - def retry(self): - return self._retry - - -class SSELineParser: - _data: list[str] - _event: str | None - _retry: int | None - _id: str | None - - def __init__(self): - self._event = None - self._data = [] - self._id = None - self._retry = None - - def iter_lines(self, lines: Iterator[str]) -> Iterator[Event]: - for line in lines: - line = line.rstrip("\n") - if not line: - if self._event is None and not self._data and self._id is None and self._retry is None: - continue - sse_event = Event(event=self._event, data="\n".join(self._data), id=self._id, retry=self._retry) - self._event = None - self._data = [] - self._id = None - self._retry = None - - yield sse_event - self.decode_line(line) - - def decode_line(self, line: str): - if line.startswith(":") or not line: - return - - field, _p, value = line.partition(":") - - value = value.removeprefix(" ") - if field == "data": - self._data.append(value) - elif field == "event": - self._event = value - elif field == "retry": - try: - self._retry = int(value) - except (TypeError, ValueError): - pass - return - - -def is_stream_class_type(typ: type) -> TypeGuard[type[StreamResponse[object]]]: - """TypeGuard for determining whether or not the given type is a subclass of `Stream` / `AsyncStream`""" - origin = get_origin(typ) or typ - return inspect.isclass(origin) and issubclass(origin, StreamResponse) - - -def extract_stream_chunk_type( - stream_cls: type, - *, - failure_message: str | None = None, -) -> type: - """Given a type like `StreamResponse[T]`, returns the generic type variable `T`. - - This also handles the case where a concrete subclass is given, e.g. - ```py - class MyStream(StreamResponse[bytes]): - ... - - extract_stream_chunk_type(MyStream) -> bytes - ``` - """ - - return extract_type_var_from_base( - stream_cls, - index=0, - generic_bases=cast("tuple[type, ...]", (StreamResponse,)), - failure_message=failure_message, - ) diff --git a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/core/_utils/__init__.py b/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/core/_utils/__init__.py deleted file mode 100644 index a66b095816b8b0..00000000000000 --- a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/core/_utils/__init__.py +++ /dev/null @@ -1,52 +0,0 @@ -from ._utils import ( # noqa: I001 - remove_notgiven_indict as remove_notgiven_indict, # noqa: PLC0414 - flatten as flatten, # noqa: PLC0414 - is_dict as is_dict, # noqa: PLC0414 - is_list as is_list, # noqa: PLC0414 - is_given as is_given, # noqa: PLC0414 - is_tuple as is_tuple, # noqa: PLC0414 - is_mapping as is_mapping, # noqa: PLC0414 - is_tuple_t as is_tuple_t, # noqa: PLC0414 - parse_date as parse_date, # noqa: PLC0414 - is_iterable as is_iterable, # noqa: PLC0414 - is_sequence as is_sequence, # noqa: PLC0414 - coerce_float as coerce_float, # noqa: PLC0414 - is_mapping_t as is_mapping_t, # noqa: PLC0414 - removeprefix as removeprefix, # noqa: PLC0414 - removesuffix as removesuffix, # noqa: PLC0414 - extract_files as extract_files, # noqa: PLC0414 - is_sequence_t as is_sequence_t, # noqa: PLC0414 - required_args as required_args, # noqa: PLC0414 - coerce_boolean as coerce_boolean, # noqa: PLC0414 - coerce_integer as coerce_integer, # noqa: PLC0414 - file_from_path as file_from_path, # noqa: PLC0414 - parse_datetime as parse_datetime, # noqa: PLC0414 - strip_not_given as strip_not_given, # noqa: PLC0414 - deepcopy_minimal as deepcopy_minimal, # noqa: PLC0414 - get_async_library as get_async_library, # noqa: PLC0414 - maybe_coerce_float as maybe_coerce_float, # noqa: PLC0414 - get_required_header as get_required_header, # noqa: PLC0414 - maybe_coerce_boolean as maybe_coerce_boolean, # noqa: PLC0414 - maybe_coerce_integer as maybe_coerce_integer, # noqa: PLC0414 - drop_prefix_image_data as drop_prefix_image_data, # noqa: PLC0414 -) - - -from ._typing import ( - is_list_type as is_list_type, # noqa: PLC0414 - is_union_type as is_union_type, # noqa: PLC0414 - extract_type_arg as extract_type_arg, # noqa: PLC0414 - is_iterable_type as is_iterable_type, # noqa: PLC0414 - is_required_type as is_required_type, # noqa: PLC0414 - is_annotated_type as is_annotated_type, # noqa: PLC0414 - strip_annotated_type as strip_annotated_type, # noqa: PLC0414 - extract_type_var_from_base as extract_type_var_from_base, # noqa: PLC0414 -) - -from ._transform import ( - PropertyInfo as PropertyInfo, # noqa: PLC0414 - transform as transform, # noqa: PLC0414 - async_transform as async_transform, # noqa: PLC0414 - maybe_transform as maybe_transform, # noqa: PLC0414 - async_maybe_transform as async_maybe_transform, # noqa: PLC0414 -) diff --git a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/core/_utils/_transform.py b/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/core/_utils/_transform.py deleted file mode 100644 index e8ef1f79358a96..00000000000000 --- a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/core/_utils/_transform.py +++ /dev/null @@ -1,383 +0,0 @@ -from __future__ import annotations - -import base64 -import io -import pathlib -from collections.abc import Mapping -from datetime import date, datetime -from typing import Any, Literal, TypeVar, cast, get_args, get_type_hints - -import anyio -import pydantic -from typing_extensions import override - -from .._base_compat import is_typeddict, model_dump -from .._files import is_base64_file_input -from ._typing import ( - extract_type_arg, - is_annotated_type, - is_iterable_type, - is_list_type, - is_required_type, - is_union_type, - strip_annotated_type, -) -from ._utils import ( - is_iterable, - is_list, - is_mapping, -) - -_T = TypeVar("_T") - - -# TODO: support for drilling globals() and locals() -# TODO: ensure works correctly with forward references in all cases - - -PropertyFormat = Literal["iso8601", "base64", "custom"] - - -class PropertyInfo: - """Metadata class to be used in Annotated types to provide information about a given type. - - For example: - - class MyParams(TypedDict): - account_holder_name: Annotated[str, PropertyInfo(alias='accountHolderName')] - - This means that {'account_holder_name': 'Robert'} will be transformed to {'accountHolderName': 'Robert'} before being sent to the API. - """ # noqa: E501 - - alias: str | None - format: PropertyFormat | None - format_template: str | None - discriminator: str | None - - def __init__( - self, - *, - alias: str | None = None, - format: PropertyFormat | None = None, - format_template: str | None = None, - discriminator: str | None = None, - ) -> None: - self.alias = alias - self.format = format - self.format_template = format_template - self.discriminator = discriminator - - @override - def __repr__(self) -> str: - return f"{self.__class__.__name__}(alias='{self.alias}', format={self.format}, format_template='{self.format_template}', discriminator='{self.discriminator}')" # noqa: E501 - - -def maybe_transform( - data: object, - expected_type: object, -) -> Any | None: - """Wrapper over `transform()` that allows `None` to be passed. - - See `transform()` for more details. - """ - if data is None: - return None - return transform(data, expected_type) - - -# Wrapper over _transform_recursive providing fake types -def transform( - data: _T, - expected_type: object, -) -> _T: - """Transform dictionaries based off of type information from the given type, for example: - - ```py - class Params(TypedDict, total=False): - card_id: Required[Annotated[str, PropertyInfo(alias="cardID")]] - - - transformed = transform({"card_id": ""}, Params) - # {'cardID': ''} - ``` - - Any keys / data that does not have type information given will be included as is. - - It should be noted that the transformations that this function does are not represented in the type system. - """ - transformed = _transform_recursive(data, annotation=cast(type, expected_type)) - return cast(_T, transformed) - - -def _get_annotated_type(type_: type) -> type | None: - """If the given type is an `Annotated` type then it is returned, if not `None` is returned. - - This also unwraps the type when applicable, e.g. `Required[Annotated[T, ...]]` - """ - if is_required_type(type_): - # Unwrap `Required[Annotated[T, ...]]` to `Annotated[T, ...]` - type_ = get_args(type_)[0] - - if is_annotated_type(type_): - return type_ - - return None - - -def _maybe_transform_key(key: str, type_: type) -> str: - """Transform the given `data` based on the annotations provided in `type_`. - - Note: this function only looks at `Annotated` types that contain `PropertInfo` metadata. - """ - annotated_type = _get_annotated_type(type_) - if annotated_type is None: - # no `Annotated` definition for this type, no transformation needed - return key - - # ignore the first argument as it is the actual type - annotations = get_args(annotated_type)[1:] - for annotation in annotations: - if isinstance(annotation, PropertyInfo) and annotation.alias is not None: - return annotation.alias - - return key - - -def _transform_recursive( - data: object, - *, - annotation: type, - inner_type: type | None = None, -) -> object: - """Transform the given data against the expected type. - - Args: - annotation: The direct type annotation given to the particular piece of data. - This may or may not be wrapped in metadata types, e.g. `Required[T]`, `Annotated[T, ...]` etc - - inner_type: If applicable, this is the "inside" type. This is useful in certain cases where the outside type - is a container type such as `List[T]`. In that case `inner_type` should be set to `T` so that each entry in - the list can be transformed using the metadata from the container type. - - Defaults to the same value as the `annotation` argument. - """ - if inner_type is None: - inner_type = annotation - - stripped_type = strip_annotated_type(inner_type) - if is_typeddict(stripped_type) and is_mapping(data): - return _transform_typeddict(data, stripped_type) - - if ( - # List[T] - (is_list_type(stripped_type) and is_list(data)) - # Iterable[T] - or (is_iterable_type(stripped_type) and is_iterable(data) and not isinstance(data, str)) - ): - inner_type = extract_type_arg(stripped_type, 0) - return [_transform_recursive(d, annotation=annotation, inner_type=inner_type) for d in data] - - if is_union_type(stripped_type): - # For union types we run the transformation against all subtypes to ensure that everything is transformed. - # - # TODO: there may be edge cases where the same normalized field name will transform to two different names - # in different subtypes. - for subtype in get_args(stripped_type): - data = _transform_recursive(data, annotation=annotation, inner_type=subtype) - return data - - if isinstance(data, pydantic.BaseModel): - return model_dump(data, exclude_unset=True) - - annotated_type = _get_annotated_type(annotation) - if annotated_type is None: - return data - - # ignore the first argument as it is the actual type - annotations = get_args(annotated_type)[1:] - for annotation in annotations: - if isinstance(annotation, PropertyInfo) and annotation.format is not None: - return _format_data(data, annotation.format, annotation.format_template) - - return data - - -def _format_data(data: object, format_: PropertyFormat, format_template: str | None) -> object: - if isinstance(data, date | datetime): - if format_ == "iso8601": - return data.isoformat() - - if format_ == "custom" and format_template is not None: - return data.strftime(format_template) - - if format_ == "base64" and is_base64_file_input(data): - binary: str | bytes | None = None - - if isinstance(data, pathlib.Path): - binary = data.read_bytes() - elif isinstance(data, io.IOBase): - binary = data.read() - - if isinstance(binary, str): # type: ignore[unreachable] - binary = binary.encode() - - if not isinstance(binary, bytes): - raise RuntimeError(f"Could not read bytes from {data}; Received {type(binary)}") - - return base64.b64encode(binary).decode("ascii") - - return data - - -def _transform_typeddict( - data: Mapping[str, object], - expected_type: type, -) -> Mapping[str, object]: - result: dict[str, object] = {} - annotations = get_type_hints(expected_type, include_extras=True) - for key, value in data.items(): - type_ = annotations.get(key) - if type_ is None: - # we do not have a type annotation for this field, leave it as is - result[key] = value - else: - result[_maybe_transform_key(key, type_)] = _transform_recursive(value, annotation=type_) - return result - - -async def async_maybe_transform( - data: object, - expected_type: object, -) -> Any | None: - """Wrapper over `async_transform()` that allows `None` to be passed. - - See `async_transform()` for more details. - """ - if data is None: - return None - return await async_transform(data, expected_type) - - -async def async_transform( - data: _T, - expected_type: object, -) -> _T: - """Transform dictionaries based off of type information from the given type, for example: - - ```py - class Params(TypedDict, total=False): - card_id: Required[Annotated[str, PropertyInfo(alias="cardID")]] - - - transformed = transform({"card_id": ""}, Params) - # {'cardID': ''} - ``` - - Any keys / data that does not have type information given will be included as is. - - It should be noted that the transformations that this function does are not represented in the type system. - """ - transformed = await _async_transform_recursive(data, annotation=cast(type, expected_type)) - return cast(_T, transformed) - - -async def _async_transform_recursive( - data: object, - *, - annotation: type, - inner_type: type | None = None, -) -> object: - """Transform the given data against the expected type. - - Args: - annotation: The direct type annotation given to the particular piece of data. - This may or may not be wrapped in metadata types, e.g. `Required[T]`, `Annotated[T, ...]` etc - - inner_type: If applicable, this is the "inside" type. This is useful in certain cases where the outside type - is a container type such as `List[T]`. In that case `inner_type` should be set to `T` so that each entry in - the list can be transformed using the metadata from the container type. - - Defaults to the same value as the `annotation` argument. - """ - if inner_type is None: - inner_type = annotation - - stripped_type = strip_annotated_type(inner_type) - if is_typeddict(stripped_type) and is_mapping(data): - return await _async_transform_typeddict(data, stripped_type) - - if ( - # List[T] - (is_list_type(stripped_type) and is_list(data)) - # Iterable[T] - or (is_iterable_type(stripped_type) and is_iterable(data) and not isinstance(data, str)) - ): - inner_type = extract_type_arg(stripped_type, 0) - return [await _async_transform_recursive(d, annotation=annotation, inner_type=inner_type) for d in data] - - if is_union_type(stripped_type): - # For union types we run the transformation against all subtypes to ensure that everything is transformed. - # - # TODO: there may be edge cases where the same normalized field name will transform to two different names - # in different subtypes. - for subtype in get_args(stripped_type): - data = await _async_transform_recursive(data, annotation=annotation, inner_type=subtype) - return data - - if isinstance(data, pydantic.BaseModel): - return model_dump(data, exclude_unset=True) - - annotated_type = _get_annotated_type(annotation) - if annotated_type is None: - return data - - # ignore the first argument as it is the actual type - annotations = get_args(annotated_type)[1:] - for annotation in annotations: - if isinstance(annotation, PropertyInfo) and annotation.format is not None: - return await _async_format_data(data, annotation.format, annotation.format_template) - - return data - - -async def _async_format_data(data: object, format_: PropertyFormat, format_template: str | None) -> object: - if isinstance(data, date | datetime): - if format_ == "iso8601": - return data.isoformat() - - if format_ == "custom" and format_template is not None: - return data.strftime(format_template) - - if format_ == "base64" and is_base64_file_input(data): - binary: str | bytes | None = None - - if isinstance(data, pathlib.Path): - binary = await anyio.Path(data).read_bytes() - elif isinstance(data, io.IOBase): - binary = data.read() - - if isinstance(binary, str): # type: ignore[unreachable] - binary = binary.encode() - - if not isinstance(binary, bytes): - raise RuntimeError(f"Could not read bytes from {data}; Received {type(binary)}") - - return base64.b64encode(binary).decode("ascii") - - return data - - -async def _async_transform_typeddict( - data: Mapping[str, object], - expected_type: type, -) -> Mapping[str, object]: - result: dict[str, object] = {} - annotations = get_type_hints(expected_type, include_extras=True) - for key, value in data.items(): - type_ = annotations.get(key) - if type_ is None: - # we do not have a type annotation for this field, leave it as is - result[key] = value - else: - result[_maybe_transform_key(key, type_)] = await _async_transform_recursive(value, annotation=type_) - return result diff --git a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/core/_utils/_typing.py b/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/core/_utils/_typing.py deleted file mode 100644 index c7c54dcc37458d..00000000000000 --- a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/core/_utils/_typing.py +++ /dev/null @@ -1,122 +0,0 @@ -from __future__ import annotations - -from collections import abc as _c_abc -from collections.abc import Iterable -from typing import Annotated, Any, TypeVar, cast, get_args, get_origin - -from typing_extensions import Required - -from .._base_compat import is_union as _is_union -from .._base_type import InheritsGeneric - - -def is_annotated_type(typ: type) -> bool: - return get_origin(typ) == Annotated - - -def is_list_type(typ: type) -> bool: - return (get_origin(typ) or typ) == list - - -def is_iterable_type(typ: type) -> bool: - """If the given type is `typing.Iterable[T]`""" - origin = get_origin(typ) or typ - return origin in {Iterable, _c_abc.Iterable} - - -def is_union_type(typ: type) -> bool: - return _is_union(get_origin(typ)) - - -def is_required_type(typ: type) -> bool: - return get_origin(typ) == Required - - -def is_typevar(typ: type) -> bool: - # type ignore is required because type checkers - # think this expression will always return False - return type(typ) == TypeVar # type: ignore - - -# Extracts T from Annotated[T, ...] or from Required[Annotated[T, ...]] -def strip_annotated_type(typ: type) -> type: - if is_required_type(typ) or is_annotated_type(typ): - return strip_annotated_type(cast(type, get_args(typ)[0])) - - return typ - - -def extract_type_arg(typ: type, index: int) -> type: - args = get_args(typ) - try: - return cast(type, args[index]) - except IndexError as err: - raise RuntimeError(f"Expected type {typ} to have a type argument at index {index} but it did not") from err - - -def extract_type_var_from_base( - typ: type, - *, - generic_bases: tuple[type, ...], - index: int, - failure_message: str | None = None, -) -> type: - """Given a type like `Foo[T]`, returns the generic type variable `T`. - - This also handles the case where a concrete subclass is given, e.g. - ```py - class MyResponse(Foo[bytes]): - ... - - extract_type_var(MyResponse, bases=(Foo,), index=0) -> bytes - ``` - - And where a generic subclass is given: - ```py - _T = TypeVar('_T') - class MyResponse(Foo[_T]): - ... - - extract_type_var(MyResponse[bytes], bases=(Foo,), index=0) -> bytes - ``` - """ - cls = cast(object, get_origin(typ) or typ) - if cls in generic_bases: - # we're given the class directly - return extract_type_arg(typ, index) - - # if a subclass is given - # --- - # this is needed as __orig_bases__ is not present in the typeshed stubs - # because it is intended to be for internal use only, however there does - # not seem to be a way to resolve generic TypeVars for inherited subclasses - # without using it. - if isinstance(cls, InheritsGeneric): - target_base_class: Any | None = None - for base in cls.__orig_bases__: - if base.__origin__ in generic_bases: - target_base_class = base - break - - if target_base_class is None: - raise RuntimeError( - "Could not find the generic base class;\n" - "This should never happen;\n" - f"Does {cls} inherit from one of {generic_bases} ?" - ) - - extracted = extract_type_arg(target_base_class, index) - if is_typevar(extracted): - # If the extracted type argument is itself a type variable - # then that means the subclass itself is generic, so we have - # to resolve the type argument from the class itself, not - # the base class. - # - # Note: if there is more than 1 type argument, the subclass could - # change the ordering of the type arguments, this is not currently - # supported. - return extract_type_arg(typ, index) - - return extracted - - raise RuntimeError(failure_message or f"Could not resolve inner type variable at index {index} for {typ}") diff --git a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/core/_utils/_utils.py b/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/core/_utils/_utils.py deleted file mode 100644 index ce5e7786aa2937..00000000000000 --- a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/core/_utils/_utils.py +++ /dev/null @@ -1,409 +0,0 @@ -from __future__ import annotations - -import functools -import inspect -import os -import re -from collections.abc import Callable, Iterable, Mapping, Sequence -from pathlib import Path -from typing import ( - Any, - TypeGuard, - TypeVar, - Union, - cast, - overload, -) - -import sniffio - -from .._base_compat import parse_date as parse_date # noqa: PLC0414 -from .._base_compat import parse_datetime as parse_datetime # noqa: PLC0414 -from .._base_type import FileTypes, Headers, HeadersLike, NotGiven, NotGivenOr - - -def remove_notgiven_indict(obj): - if obj is None or (not isinstance(obj, Mapping)): - return obj - return {key: value for key, value in obj.items() if not isinstance(value, NotGiven)} - - -_T = TypeVar("_T") -_TupleT = TypeVar("_TupleT", bound=tuple[object, ...]) -_MappingT = TypeVar("_MappingT", bound=Mapping[str, object]) -_SequenceT = TypeVar("_SequenceT", bound=Sequence[object]) -CallableT = TypeVar("CallableT", bound=Callable[..., Any]) - - -def flatten(t: Iterable[Iterable[_T]]) -> list[_T]: - return [item for sublist in t for item in sublist] - - -def extract_files( - # TODO: this needs to take Dict but variance issues..... - # create protocol type ? - query: Mapping[str, object], - *, - paths: Sequence[Sequence[str]], -) -> list[tuple[str, FileTypes]]: - """Recursively extract files from the given dictionary based on specified paths. - - A path may look like this ['foo', 'files', '', 'data']. - - Note: this mutates the given dictionary. - """ - files: list[tuple[str, FileTypes]] = [] - for path in paths: - files.extend(_extract_items(query, path, index=0, flattened_key=None)) - return files - - -def _extract_items( - obj: object, - path: Sequence[str], - *, - index: int, - flattened_key: str | None, -) -> list[tuple[str, FileTypes]]: - try: - key = path[index] - except IndexError: - if isinstance(obj, NotGiven): - # no value was provided - we can safely ignore - return [] - - # cyclical import - from .._files import assert_is_file_content - - # We have exhausted the path, return the entry we found. - assert_is_file_content(obj, key=flattened_key) - assert flattened_key is not None - return [(flattened_key, cast(FileTypes, obj))] - - index += 1 - if is_dict(obj): - try: - # We are at the last entry in the path so we must remove the field - if (len(path)) == index: - item = obj.pop(key) - else: - item = obj[key] - except KeyError: - # Key was not present in the dictionary, this is not indicative of an error - # as the given path may not point to a required field. We also do not want - # to enforce required fields as the API may differ from the spec in some cases. - return [] - if flattened_key is None: - flattened_key = key - else: - flattened_key += f"[{key}]" - return _extract_items( - item, - path, - index=index, - flattened_key=flattened_key, - ) - elif is_list(obj): - if key != "": - return [] - - return flatten( - [ - _extract_items( - item, - path, - index=index, - flattened_key=flattened_key + "[]" if flattened_key is not None else "[]", - ) - for item in obj - ] - ) - - # Something unexpected was passed, just ignore it. - return [] - - -def is_given(obj: NotGivenOr[_T]) -> TypeGuard[_T]: - return not isinstance(obj, NotGiven) - - -# Type safe methods for narrowing types with TypeVars. -# The default narrowing for isinstance(obj, dict) is dict[unknown, unknown], -# however this cause Pyright to rightfully report errors. As we know we don't -# care about the contained types we can safely use `object` in it's place. -# -# There are two separate functions defined, `is_*` and `is_*_t` for different use cases. -# `is_*` is for when you're dealing with an unknown input -# `is_*_t` is for when you're narrowing a known union type to a specific subset - - -def is_tuple(obj: object) -> TypeGuard[tuple[object, ...]]: - return isinstance(obj, tuple) - - -def is_tuple_t(obj: _TupleT | object) -> TypeGuard[_TupleT]: - return isinstance(obj, tuple) - - -def is_sequence(obj: object) -> TypeGuard[Sequence[object]]: - return isinstance(obj, Sequence) - - -def is_sequence_t(obj: _SequenceT | object) -> TypeGuard[_SequenceT]: - return isinstance(obj, Sequence) - - -def is_mapping(obj: object) -> TypeGuard[Mapping[str, object]]: - return isinstance(obj, Mapping) - - -def is_mapping_t(obj: _MappingT | object) -> TypeGuard[_MappingT]: - return isinstance(obj, Mapping) - - -def is_dict(obj: object) -> TypeGuard[dict[object, object]]: - return isinstance(obj, dict) - - -def is_list(obj: object) -> TypeGuard[list[object]]: - return isinstance(obj, list) - - -def is_iterable(obj: object) -> TypeGuard[Iterable[object]]: - return isinstance(obj, Iterable) - - -def deepcopy_minimal(item: _T) -> _T: - """Minimal reimplementation of copy.deepcopy() that will only copy certain object types: - - - mappings, e.g. `dict` - - list - - This is done for performance reasons. - """ - if is_mapping(item): - return cast(_T, {k: deepcopy_minimal(v) for k, v in item.items()}) - if is_list(item): - return cast(_T, [deepcopy_minimal(entry) for entry in item]) - return item - - -# copied from https://github.com/Rapptz/RoboDanny -def human_join(seq: Sequence[str], *, delim: str = ", ", final: str = "or") -> str: - size = len(seq) - if size == 0: - return "" - - if size == 1: - return seq[0] - - if size == 2: - return f"{seq[0]} {final} {seq[1]}" - - return delim.join(seq[:-1]) + f" {final} {seq[-1]}" - - -def quote(string: str) -> str: - """Add single quotation marks around the given string. Does *not* do any escaping.""" - return f"'{string}'" - - -def required_args(*variants: Sequence[str]) -> Callable[[CallableT], CallableT]: - """Decorator to enforce a given set of arguments or variants of arguments are passed to the decorated function. - - Useful for enforcing runtime validation of overloaded functions. - - Example usage: - ```py - @overload - def foo(*, a: str) -> str: - ... - - - @overload - def foo(*, b: bool) -> str: - ... - - - # This enforces the same constraints that a static type checker would - # i.e. that either a or b must be passed to the function - @required_args(["a"], ["b"]) - def foo(*, a: str | None = None, b: bool | None = None) -> str: - ... - ``` - """ - - def inner(func: CallableT) -> CallableT: - params = inspect.signature(func).parameters - positional = [ - name - for name, param in params.items() - if param.kind - in { - param.POSITIONAL_ONLY, - param.POSITIONAL_OR_KEYWORD, - } - ] - - @functools.wraps(func) - def wrapper(*args: object, **kwargs: object) -> object: - given_params: set[str] = set() - for i, _ in enumerate(args): - try: - given_params.add(positional[i]) - except IndexError: - raise TypeError( - f"{func.__name__}() takes {len(positional)} argument(s) but {len(args)} were given" - ) from None - - given_params.update(kwargs.keys()) - - for variant in variants: - matches = all(param in given_params for param in variant) - if matches: - break - else: # no break - if len(variants) > 1: - variations = human_join( - ["(" + human_join([quote(arg) for arg in variant], final="and") + ")" for variant in variants] - ) - msg = f"Missing required arguments; Expected either {variations} arguments to be given" - else: - # TODO: this error message is not deterministic - missing = list(set(variants[0]) - given_params) - if len(missing) > 1: - msg = f"Missing required arguments: {human_join([quote(arg) for arg in missing])}" - else: - msg = f"Missing required argument: {quote(missing[0])}" - raise TypeError(msg) - return func(*args, **kwargs) - - return wrapper # type: ignore - - return inner - - -_K = TypeVar("_K") -_V = TypeVar("_V") - - -@overload -def strip_not_given(obj: None) -> None: ... - - -@overload -def strip_not_given(obj: Mapping[_K, _V | NotGiven]) -> dict[_K, _V]: ... - - -@overload -def strip_not_given(obj: object) -> object: ... - - -def strip_not_given(obj: object | None) -> object: - """Remove all top-level keys where their values are instances of `NotGiven`""" - if obj is None: - return None - - if not is_mapping(obj): - return obj - - return {key: value for key, value in obj.items() if not isinstance(value, NotGiven)} - - -def coerce_integer(val: str) -> int: - return int(val, base=10) - - -def coerce_float(val: str) -> float: - return float(val) - - -def coerce_boolean(val: str) -> bool: - return val in {"true", "1", "on"} - - -def maybe_coerce_integer(val: str | None) -> int | None: - if val is None: - return None - return coerce_integer(val) - - -def maybe_coerce_float(val: str | None) -> float | None: - if val is None: - return None - return coerce_float(val) - - -def maybe_coerce_boolean(val: str | None) -> bool | None: - if val is None: - return None - return coerce_boolean(val) - - -def removeprefix(string: str, prefix: str) -> str: - """Remove a prefix from a string. - - Backport of `str.removeprefix` for Python < 3.9 - """ - if string.startswith(prefix): - return string[len(prefix) :] - return string - - -def removesuffix(string: str, suffix: str) -> str: - """Remove a suffix from a string. - - Backport of `str.removesuffix` for Python < 3.9 - """ - if string.endswith(suffix): - return string[: -len(suffix)] - return string - - -def file_from_path(path: str) -> FileTypes: - contents = Path(path).read_bytes() - file_name = os.path.basename(path) - return (file_name, contents) - - -def get_required_header(headers: HeadersLike, header: str) -> str: - lower_header = header.lower() - if isinstance(headers, Mapping): - headers = cast(Headers, headers) - for k, v in headers.items(): - if k.lower() == lower_header and isinstance(v, str): - return v - - """ to deal with the case where the header looks like Stainless-Event-Id """ - intercaps_header = re.sub(r"([^\w])(\w)", lambda pat: pat.group(1) + pat.group(2).upper(), header.capitalize()) - - for normalized_header in [header, lower_header, header.upper(), intercaps_header]: - value = headers.get(normalized_header) - if value: - return value - - raise ValueError(f"Could not find {header} header") - - -def get_async_library() -> str: - try: - return sniffio.current_async_library() - except Exception: - return "false" - - -def drop_prefix_image_data(content: Union[str, list[dict]]) -> Union[str, list[dict]]: - """ - 删除 ;base64, 前缀 - :param image_data: - :return: - """ - if isinstance(content, list): - for data in content: - if data.get("type") == "image_url": - image_data = data.get("image_url").get("url") - if image_data.startswith("data:image/"): - image_data = image_data.split("base64,")[-1] - data["image_url"]["url"] = image_data - - return content diff --git a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/core/logs.py b/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/core/logs.py deleted file mode 100644 index e5fce94c00e9e0..00000000000000 --- a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/core/logs.py +++ /dev/null @@ -1,78 +0,0 @@ -import logging -import os -import time - -logger = logging.getLogger(__name__) - - -class LoggerNameFilter(logging.Filter): - def filter(self, record): - # return record.name.startswith("loom_core") or record.name in "ERROR" or ( - # record.name.startswith("uvicorn.error") - # and record.getMessage().startswith("Uvicorn running on") - # ) - return True - - -def get_log_file(log_path: str, sub_dir: str): - """ - sub_dir should contain a timestamp. - """ - log_dir = os.path.join(log_path, sub_dir) - # Here should be creating a new directory each time, so `exist_ok=False` - os.makedirs(log_dir, exist_ok=False) - return os.path.join(log_dir, "zhipuai.log") - - -def get_config_dict(log_level: str, log_file_path: str, log_backup_count: int, log_max_bytes: int) -> dict: - # for windows, the path should be a raw string. - log_file_path = log_file_path.encode("unicode-escape").decode() if os.name == "nt" else log_file_path - log_level = log_level.upper() - config_dict = { - "version": 1, - "disable_existing_loggers": False, - "formatters": { - "formatter": {"format": ("%(asctime)s %(name)-12s %(process)d %(levelname)-8s %(message)s")}, - }, - "filters": { - "logger_name_filter": { - "()": __name__ + ".LoggerNameFilter", - }, - }, - "handlers": { - "stream_handler": { - "class": "logging.StreamHandler", - "formatter": "formatter", - "level": log_level, - # "stream": "ext://sys.stdout", - # "filters": ["logger_name_filter"], - }, - "file_handler": { - "class": "logging.handlers.RotatingFileHandler", - "formatter": "formatter", - "level": log_level, - "filename": log_file_path, - "mode": "a", - "maxBytes": log_max_bytes, - "backupCount": log_backup_count, - "encoding": "utf8", - }, - }, - "loggers": { - "loom_core": { - "handlers": ["stream_handler", "file_handler"], - "level": log_level, - "propagate": False, - } - }, - "root": { - "level": log_level, - "handlers": ["stream_handler", "file_handler"], - }, - } - return config_dict - - -def get_timestamp_ms(): - t = time.time() - return int(round(t * 1000)) diff --git a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/core/pagination.py b/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/core/pagination.py deleted file mode 100644 index 7f0b1b91d98556..00000000000000 --- a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/core/pagination.py +++ /dev/null @@ -1,62 +0,0 @@ -# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. - -from typing import Any, Generic, Optional, TypeVar, cast - -from typing_extensions import Protocol, override, runtime_checkable - -from ._http_client import BasePage, BaseSyncPage, PageInfo - -__all__ = ["SyncPage", "SyncCursorPage"] - -_T = TypeVar("_T") - - -@runtime_checkable -class CursorPageItem(Protocol): - id: Optional[str] - - -class SyncPage(BaseSyncPage[_T], BasePage[_T], Generic[_T]): - """Note: no pagination actually occurs yet, this is for forwards-compatibility.""" - - data: list[_T] - object: str - - @override - def _get_page_items(self) -> list[_T]: - data = self.data - if not data: - return [] - return data - - @override - def next_page_info(self) -> None: - """ - This page represents a response that isn't actually paginated at the API level - so there will never be a next page. - """ - return None - - -class SyncCursorPage(BaseSyncPage[_T], BasePage[_T], Generic[_T]): - data: list[_T] - - @override - def _get_page_items(self) -> list[_T]: - data = self.data - if not data: - return [] - return data - - @override - def next_page_info(self) -> Optional[PageInfo]: - data = self.data - if not data: - return None - - item = cast(Any, data[-1]) - if not isinstance(item, CursorPageItem) or item.id is None: - # TODO emit warning log - return None - - return PageInfo(params={"after": item.id}) diff --git a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/__init__.py b/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/__init__.py deleted file mode 100644 index e69de29bb2d1d6..00000000000000 diff --git a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/assistant/__init__.py b/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/assistant/__init__.py deleted file mode 100644 index 9f941fb91c8776..00000000000000 --- a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/assistant/__init__.py +++ /dev/null @@ -1,5 +0,0 @@ -from .assistant_completion import AssistantCompletion - -__all__ = [ - "AssistantCompletion", -] diff --git a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/assistant/assistant_completion.py b/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/assistant/assistant_completion.py deleted file mode 100644 index cbfb6edaeb1f19..00000000000000 --- a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/assistant/assistant_completion.py +++ /dev/null @@ -1,40 +0,0 @@ -from typing import Any, Optional - -from ...core import BaseModel -from .message import MessageContent - -__all__ = ["AssistantCompletion", "CompletionUsage"] - - -class ErrorInfo(BaseModel): - code: str # 错误码 - message: str # 错误信息 - - -class AssistantChoice(BaseModel): - index: int # 结果下标 - delta: MessageContent # 当前会话输出消息体 - finish_reason: str - """ - # 推理结束原因 stop代表推理自然结束或触发停止词。 sensitive 代表模型推理内容被安全审核接口拦截。请注意,针对此类内容,请用户自行判断并决定是否撤回已公开的内容。 - # network_error 代表模型推理服务异常。 - """ # noqa: E501 - metadata: dict # 元信息,拓展字段 - - -class CompletionUsage(BaseModel): - prompt_tokens: int # 输入的 tokens 数量 - completion_tokens: int # 输出的 tokens 数量 - total_tokens: int # 总 tokens 数量 - - -class AssistantCompletion(BaseModel): - id: str # 请求 ID - conversation_id: str # 会话 ID - assistant_id: str # 智能体 ID - created: int # 请求创建时间,Unix 时间戳 - status: str # 返回状态,包括:`completed` 表示生成结束`in_progress`表示生成中 `failed` 表示生成异常 - last_error: Optional[ErrorInfo] # 异常信息 - choices: list[AssistantChoice] # 增量返回的信息 - metadata: Optional[dict[str, Any]] # 元信息,拓展字段 - usage: Optional[CompletionUsage] # tokens 数量统计 diff --git a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/assistant/assistant_conversation_params.py b/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/assistant/assistant_conversation_params.py deleted file mode 100644 index 03f14f4238f37f..00000000000000 --- a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/assistant/assistant_conversation_params.py +++ /dev/null @@ -1,7 +0,0 @@ -from typing import TypedDict - - -class ConversationParameters(TypedDict, total=False): - assistant_id: str # 智能体 ID - page: int # 当前分页 - page_size: int # 分页数量 diff --git a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/assistant/assistant_conversation_resp.py b/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/assistant/assistant_conversation_resp.py deleted file mode 100644 index d1833d220a2e3b..00000000000000 --- a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/assistant/assistant_conversation_resp.py +++ /dev/null @@ -1,29 +0,0 @@ -from ...core import BaseModel - -__all__ = ["ConversationUsageListResp"] - - -class Usage(BaseModel): - prompt_tokens: int # 用户输入的 tokens 数量 - completion_tokens: int # 模型输入的 tokens 数量 - total_tokens: int # 总 tokens 数量 - - -class ConversationUsage(BaseModel): - id: str # 会话 id - assistant_id: str # 智能体Assistant id - create_time: int # 创建时间 - update_time: int # 更新时间 - usage: Usage # 会话中 tokens 数量统计 - - -class ConversationUsageList(BaseModel): - assistant_id: str # 智能体id - has_more: bool # 是否还有更多页 - conversation_list: list[ConversationUsage] # 返回的 - - -class ConversationUsageListResp(BaseModel): - code: int - msg: str - data: ConversationUsageList diff --git a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/assistant/assistant_create_params.py b/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/assistant/assistant_create_params.py deleted file mode 100644 index 2def1025cd2b33..00000000000000 --- a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/assistant/assistant_create_params.py +++ /dev/null @@ -1,32 +0,0 @@ -from typing import Optional, TypedDict, Union - - -class AssistantAttachments: - file_id: str - - -class MessageTextContent: - type: str # 目前支持 type = text - text: str - - -MessageContent = Union[MessageTextContent] - - -class ConversationMessage(TypedDict): - """会话消息体""" - - role: str # 用户的输入角色,例如 'user' - content: list[MessageContent] # 会话消息体的内容 - - -class AssistantParameters(TypedDict, total=False): - """智能体参数类""" - - assistant_id: str # 智能体 ID - conversation_id: Optional[str] # 会话 ID,不传则创建新会话 - model: str # 模型名称,默认为 'GLM-4-Assistant' - stream: bool # 是否支持流式 SSE,需要传入 True - messages: list[ConversationMessage] # 会话消息体 - attachments: Optional[list[AssistantAttachments]] # 会话指定的文件,非必填 - metadata: Optional[dict] # 元信息,拓展字段,非必填 diff --git a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/assistant/assistant_support_resp.py b/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/assistant/assistant_support_resp.py deleted file mode 100644 index 0709cdbcad25e1..00000000000000 --- a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/assistant/assistant_support_resp.py +++ /dev/null @@ -1,21 +0,0 @@ -from ...core import BaseModel - -__all__ = ["AssistantSupportResp"] - - -class AssistantSupport(BaseModel): - assistant_id: str # 智能体的 Assistant id,用于智能体会话 - created_at: int # 创建时间 - updated_at: int # 更新时间 - name: str # 智能体名称 - avatar: str # 智能体头像 - description: str # 智能体描述 - status: str # 智能体状态,目前只有 publish - tools: list[str] # 智能体支持的工具名 - starter_prompts: list[str] # 智能体启动推荐的 prompt - - -class AssistantSupportResp(BaseModel): - code: int - msg: str - data: list[AssistantSupport] # 智能体列表 diff --git a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/assistant/message/__init__.py b/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/assistant/message/__init__.py deleted file mode 100644 index 562e0151e53b48..00000000000000 --- a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/assistant/message/__init__.py +++ /dev/null @@ -1,3 +0,0 @@ -from .message_content import MessageContent - -__all__ = ["MessageContent"] diff --git a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/assistant/message/message_content.py b/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/assistant/message/message_content.py deleted file mode 100644 index 6a1a438a6fe03d..00000000000000 --- a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/assistant/message/message_content.py +++ /dev/null @@ -1,13 +0,0 @@ -from typing import Annotated, TypeAlias, Union - -from ....core._utils import PropertyInfo -from .text_content_block import TextContentBlock -from .tools_delta_block import ToolsDeltaBlock - -__all__ = ["MessageContent"] - - -MessageContent: TypeAlias = Annotated[ - Union[ToolsDeltaBlock, TextContentBlock], - PropertyInfo(discriminator="type"), -] diff --git a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/assistant/message/text_content_block.py b/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/assistant/message/text_content_block.py deleted file mode 100644 index 865fb1139e2f75..00000000000000 --- a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/assistant/message/text_content_block.py +++ /dev/null @@ -1,14 +0,0 @@ -from typing import Literal - -from ....core import BaseModel - -__all__ = ["TextContentBlock"] - - -class TextContentBlock(BaseModel): - content: str - - role: str = "assistant" - - type: Literal["content"] = "content" - """Always `content`.""" diff --git a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/assistant/message/tools/code_interpreter_delta_block.py b/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/assistant/message/tools/code_interpreter_delta_block.py deleted file mode 100644 index 9d569b282ef9f7..00000000000000 --- a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/assistant/message/tools/code_interpreter_delta_block.py +++ /dev/null @@ -1,27 +0,0 @@ -from typing import Literal - -__all__ = ["CodeInterpreterToolBlock"] - -from .....core import BaseModel - - -class CodeInterpreterToolOutput(BaseModel): - """代码工具输出结果""" - - type: str # 代码执行日志,目前只有 logs - logs: str # 代码执行的日志结果 - error_msg: str # 错误信息 - - -class CodeInterpreter(BaseModel): - """代码解释器""" - - input: str # 生成的代码片段,输入给代码沙盒 - outputs: list[CodeInterpreterToolOutput] # 代码执行后的输出结果 - - -class CodeInterpreterToolBlock(BaseModel): - """代码工具块""" - - code_interpreter: CodeInterpreter # 代码解释器对象 - type: Literal["code_interpreter"] # 调用工具的类型,始终为 `code_interpreter` diff --git a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/assistant/message/tools/drawing_tool_delta_block.py b/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/assistant/message/tools/drawing_tool_delta_block.py deleted file mode 100644 index 0b6895556b6164..00000000000000 --- a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/assistant/message/tools/drawing_tool_delta_block.py +++ /dev/null @@ -1,21 +0,0 @@ -from typing import Literal - -from .....core import BaseModel - -__all__ = ["DrawingToolBlock"] - - -class DrawingToolOutput(BaseModel): - image: str - - -class DrawingTool(BaseModel): - input: str - outputs: list[DrawingToolOutput] - - -class DrawingToolBlock(BaseModel): - drawing_tool: DrawingTool - - type: Literal["drawing_tool"] - """Always `drawing_tool`.""" diff --git a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/assistant/message/tools/function_delta_block.py b/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/assistant/message/tools/function_delta_block.py deleted file mode 100644 index c439bc4b3fbbb8..00000000000000 --- a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/assistant/message/tools/function_delta_block.py +++ /dev/null @@ -1,22 +0,0 @@ -from typing import Literal, Union - -__all__ = ["FunctionToolBlock"] - -from .....core import BaseModel - - -class FunctionToolOutput(BaseModel): - content: str - - -class FunctionTool(BaseModel): - name: str - arguments: Union[str, dict] - outputs: list[FunctionToolOutput] - - -class FunctionToolBlock(BaseModel): - function: FunctionTool - - type: Literal["function"] - """Always `drawing_tool`.""" diff --git a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/assistant/message/tools/retrieval_delta_black.py b/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/assistant/message/tools/retrieval_delta_black.py deleted file mode 100644 index 4789e9378a8a39..00000000000000 --- a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/assistant/message/tools/retrieval_delta_black.py +++ /dev/null @@ -1,41 +0,0 @@ -from typing import Literal - -from .....core import BaseModel - - -class RetrievalToolOutput(BaseModel): - """ - This class represents the output of a retrieval tool. - - Attributes: - - text (str): The text snippet retrieved from the knowledge base. - - document (str): The name of the document from which the text snippet was retrieved, returned only in intelligent configuration. - """ # noqa: E501 - - text: str - document: str - - -class RetrievalTool(BaseModel): - """ - This class represents the outputs of a retrieval tool. - - Attributes: - - outputs (List[RetrievalToolOutput]): A list of text snippets and their respective document names retrieved from the knowledge base. - """ # noqa: E501 - - outputs: list[RetrievalToolOutput] - - -class RetrievalToolBlock(BaseModel): - """ - This class represents a block for invoking the retrieval tool. - - Attributes: - - retrieval (RetrievalTool): An instance of the RetrievalTool class containing the retrieval outputs. - - type (Literal["retrieval"]): The type of tool being used, always set to "retrieval". - """ - - retrieval: RetrievalTool - type: Literal["retrieval"] - """Always `retrieval`.""" diff --git a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/assistant/message/tools/tools_type.py b/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/assistant/message/tools/tools_type.py deleted file mode 100644 index 98544053d4c83a..00000000000000 --- a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/assistant/message/tools/tools_type.py +++ /dev/null @@ -1,16 +0,0 @@ -from typing import Annotated, TypeAlias, Union - -from .....core._utils import PropertyInfo -from .code_interpreter_delta_block import CodeInterpreterToolBlock -from .drawing_tool_delta_block import DrawingToolBlock -from .function_delta_block import FunctionToolBlock -from .retrieval_delta_black import RetrievalToolBlock -from .web_browser_delta_block import WebBrowserToolBlock - -__all__ = ["ToolsType"] - - -ToolsType: TypeAlias = Annotated[ - Union[DrawingToolBlock, CodeInterpreterToolBlock, WebBrowserToolBlock, RetrievalToolBlock, FunctionToolBlock], - PropertyInfo(discriminator="type"), -] diff --git a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/assistant/message/tools/web_browser_delta_block.py b/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/assistant/message/tools/web_browser_delta_block.py deleted file mode 100644 index 966e6fe0c84fef..00000000000000 --- a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/assistant/message/tools/web_browser_delta_block.py +++ /dev/null @@ -1,48 +0,0 @@ -from typing import Literal - -from .....core import BaseModel - -__all__ = ["WebBrowserToolBlock"] - - -class WebBrowserOutput(BaseModel): - """ - This class represents the output of a web browser search result. - - Attributes: - - title (str): The title of the search result. - - link (str): The URL link to the search result's webpage. - - content (str): The textual content extracted from the search result. - - error_msg (str): Any error message encountered during the search or retrieval process. - """ - - title: str - link: str - content: str - error_msg: str - - -class WebBrowser(BaseModel): - """ - This class represents the input and outputs of a web browser search. - - Attributes: - - input (str): The input query for the web browser search. - - outputs (List[WebBrowserOutput]): A list of search results returned by the web browser. - """ - - input: str - outputs: list[WebBrowserOutput] - - -class WebBrowserToolBlock(BaseModel): - """ - This class represents a block for invoking the web browser tool. - - Attributes: - - web_browser (WebBrowser): An instance of the WebBrowser class containing the search input and outputs. - - type (Literal["web_browser"]): The type of tool being used, always set to "web_browser". - """ - - web_browser: WebBrowser - type: Literal["web_browser"] diff --git a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/assistant/message/tools_delta_block.py b/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/assistant/message/tools_delta_block.py deleted file mode 100644 index 781a1ab819c286..00000000000000 --- a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/assistant/message/tools_delta_block.py +++ /dev/null @@ -1,16 +0,0 @@ -from typing import Literal - -from ....core import BaseModel -from .tools.tools_type import ToolsType - -__all__ = ["ToolsDeltaBlock"] - - -class ToolsDeltaBlock(BaseModel): - tool_calls: list[ToolsType] - """The index of the content part in the message.""" - - role: str = "tool" - - type: Literal["tool_calls"] = "tool_calls" - """Always `tool_calls`.""" diff --git a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/batch.py b/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/batch.py deleted file mode 100644 index 560562915c9d32..00000000000000 --- a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/batch.py +++ /dev/null @@ -1,82 +0,0 @@ -# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. - -import builtins -from typing import Literal, Optional - -from ..core import BaseModel -from .batch_error import BatchError -from .batch_request_counts import BatchRequestCounts - -__all__ = ["Batch", "Errors"] - - -class Errors(BaseModel): - data: Optional[list[BatchError]] = None - - object: Optional[str] = None - """这个类型,一直是`list`。""" - - -class Batch(BaseModel): - id: str - - completion_window: str - """用于执行请求的地址信息。""" - - created_at: int - """这是 Unix timestamp (in seconds) 表示的创建时间。""" - - endpoint: str - """这是ZhipuAI endpoint的地址。""" - - input_file_id: str - """标记为batch的输入文件的ID。""" - - object: Literal["batch"] - """这个类型,一直是`batch`.""" - - status: Literal[ - "validating", "failed", "in_progress", "finalizing", "completed", "expired", "cancelling", "cancelled" - ] - """batch 的状态。""" - - cancelled_at: Optional[int] = None - """Unix timestamp (in seconds) 表示的取消时间。""" - - cancelling_at: Optional[int] = None - """Unix timestamp (in seconds) 表示发起取消的请求时间 """ - - completed_at: Optional[int] = None - """Unix timestamp (in seconds) 表示的完成时间。""" - - error_file_id: Optional[str] = None - """这个文件id包含了执行请求失败的请求的输出。""" - - errors: Optional[Errors] = None - - expired_at: Optional[int] = None - """Unix timestamp (in seconds) 表示的将在过期时间。""" - - expires_at: Optional[int] = None - """Unix timestamp (in seconds) 触发过期""" - - failed_at: Optional[int] = None - """Unix timestamp (in seconds) 表示的失败时间。""" - - finalizing_at: Optional[int] = None - """Unix timestamp (in seconds) 表示的最终时间。""" - - in_progress_at: Optional[int] = None - """Unix timestamp (in seconds) 表示的开始处理时间。""" - - metadata: Optional[builtins.object] = None - """ - key:value形式的元数据,以便将信息存储 - 结构化格式。键的长度是64个字符,值最长512个字符 - """ - - output_file_id: Optional[str] = None - """完成请求的输出文件的ID。""" - - request_counts: Optional[BatchRequestCounts] = None - """批次中不同状态的请求计数""" diff --git a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/batch_create_params.py b/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/batch_create_params.py deleted file mode 100644 index 3dae65ea46fcbe..00000000000000 --- a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/batch_create_params.py +++ /dev/null @@ -1,37 +0,0 @@ -from __future__ import annotations - -from typing import Literal, Optional - -from typing_extensions import Required, TypedDict - -__all__ = ["BatchCreateParams"] - - -class BatchCreateParams(TypedDict, total=False): - completion_window: Required[str] - """The time frame within which the batch should be processed. - - Currently only `24h` is supported. - """ - - endpoint: Required[Literal["/v1/chat/completions", "/v1/embeddings"]] - """The endpoint to be used for all requests in the batch. - - Currently `/v1/chat/completions` and `/v1/embeddings` are supported. - """ - - input_file_id: Required[str] - """The ID of an uploaded file that contains requests for the new batch. - - See [upload file](https://platform.openai.com/docs/api-reference/files/create) - for how to upload a file. - - Your input file must be formatted as a - [JSONL file](https://platform.openai.com/docs/api-reference/batch/requestInput), - and must be uploaded with the purpose `batch`. - """ - - metadata: Optional[dict[str, str]] - """Optional custom metadata for the batch.""" - - auto_delete_input_file: Optional[bool] diff --git a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/batch_error.py b/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/batch_error.py deleted file mode 100644 index f934db19781e41..00000000000000 --- a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/batch_error.py +++ /dev/null @@ -1,21 +0,0 @@ -# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. - -from typing import Optional - -from ..core import BaseModel - -__all__ = ["BatchError"] - - -class BatchError(BaseModel): - code: Optional[str] = None - """定义的业务错误码""" - - line: Optional[int] = None - """文件中的行号""" - - message: Optional[str] = None - """关于对话文件中的错误的描述""" - - param: Optional[str] = None - """参数名称,如果有的话""" diff --git a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/batch_list_params.py b/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/batch_list_params.py deleted file mode 100644 index 1a681671320eca..00000000000000 --- a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/batch_list_params.py +++ /dev/null @@ -1,20 +0,0 @@ -from __future__ import annotations - -from typing_extensions import TypedDict - -__all__ = ["BatchListParams"] - - -class BatchListParams(TypedDict, total=False): - after: str - """分页的游标,用于获取下一页的数据。 - - `after` 是一个指向当前页面的游标,用于获取下一页的数据。如果没有提供 `after`,则返回第一页的数据。 - list. - """ - - limit: int - """这个参数用于限制返回的结果数量。 - - Limit 用于限制返回的结果数量。默认值为 10 - """ diff --git a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/batch_request_counts.py b/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/batch_request_counts.py deleted file mode 100644 index ca3ccae625052b..00000000000000 --- a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/batch_request_counts.py +++ /dev/null @@ -1,14 +0,0 @@ -from ..core import BaseModel - -__all__ = ["BatchRequestCounts"] - - -class BatchRequestCounts(BaseModel): - completed: int - """这个数字表示已经完成的请求。""" - - failed: int - """这个数字表示失败的请求。""" - - total: int - """这个数字表示总的请求。""" diff --git a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/chat/__init__.py b/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/chat/__init__.py deleted file mode 100644 index e69de29bb2d1d6..00000000000000 diff --git a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/chat/async_chat_completion.py b/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/chat/async_chat_completion.py deleted file mode 100644 index c1eed070f32d9f..00000000000000 --- a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/chat/async_chat_completion.py +++ /dev/null @@ -1,22 +0,0 @@ -from typing import Optional - -from ...core import BaseModel -from .chat_completion import CompletionChoice, CompletionUsage - -__all__ = ["AsyncTaskStatus", "AsyncCompletion"] - - -class AsyncTaskStatus(BaseModel): - id: Optional[str] = None - request_id: Optional[str] = None - model: Optional[str] = None - task_status: Optional[str] = None - - -class AsyncCompletion(BaseModel): - id: Optional[str] = None - request_id: Optional[str] = None - model: Optional[str] = None - task_status: str - choices: list[CompletionChoice] - usage: CompletionUsage diff --git a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/chat/chat_completion.py b/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/chat/chat_completion.py deleted file mode 100644 index 1945a826cda2d0..00000000000000 --- a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/chat/chat_completion.py +++ /dev/null @@ -1,43 +0,0 @@ -from typing import Optional - -from ...core import BaseModel - -__all__ = ["Completion", "CompletionUsage"] - - -class Function(BaseModel): - arguments: str - name: str - - -class CompletionMessageToolCall(BaseModel): - id: str - function: Function - type: str - - -class CompletionMessage(BaseModel): - content: Optional[str] = None - role: str - tool_calls: Optional[list[CompletionMessageToolCall]] = None - - -class CompletionUsage(BaseModel): - prompt_tokens: int - completion_tokens: int - total_tokens: int - - -class CompletionChoice(BaseModel): - index: int - finish_reason: str - message: CompletionMessage - - -class Completion(BaseModel): - model: Optional[str] = None - created: Optional[int] = None - choices: list[CompletionChoice] - request_id: Optional[str] = None - id: Optional[str] = None - usage: CompletionUsage diff --git a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/chat/chat_completion_chunk.py b/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/chat/chat_completion_chunk.py deleted file mode 100644 index 27fad0008a1dd4..00000000000000 --- a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/chat/chat_completion_chunk.py +++ /dev/null @@ -1,57 +0,0 @@ -from typing import Any, Optional - -from ...core import BaseModel - -__all__ = [ - "CompletionUsage", - "ChatCompletionChunk", - "Choice", - "ChoiceDelta", - "ChoiceDeltaFunctionCall", - "ChoiceDeltaToolCall", - "ChoiceDeltaToolCallFunction", -] - - -class ChoiceDeltaFunctionCall(BaseModel): - arguments: Optional[str] = None - name: Optional[str] = None - - -class ChoiceDeltaToolCallFunction(BaseModel): - arguments: Optional[str] = None - name: Optional[str] = None - - -class ChoiceDeltaToolCall(BaseModel): - index: int - id: Optional[str] = None - function: Optional[ChoiceDeltaToolCallFunction] = None - type: Optional[str] = None - - -class ChoiceDelta(BaseModel): - content: Optional[str] = None - role: Optional[str] = None - tool_calls: Optional[list[ChoiceDeltaToolCall]] = None - - -class Choice(BaseModel): - delta: ChoiceDelta - finish_reason: Optional[str] = None - index: int - - -class CompletionUsage(BaseModel): - prompt_tokens: int - completion_tokens: int - total_tokens: int - - -class ChatCompletionChunk(BaseModel): - id: Optional[str] = None - choices: list[Choice] - created: Optional[int] = None - model: Optional[str] = None - usage: Optional[CompletionUsage] = None - extra_json: dict[str, Any] diff --git a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/chat/chat_completions_create_param.py b/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/chat/chat_completions_create_param.py deleted file mode 100644 index 6ee4dc4794b201..00000000000000 --- a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/chat/chat_completions_create_param.py +++ /dev/null @@ -1,8 +0,0 @@ -from typing import Optional - -from typing_extensions import TypedDict - - -class Reference(TypedDict, total=False): - enable: Optional[bool] - search_query: Optional[str] diff --git a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/chat/code_geex/code_geex_params.py b/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/chat/code_geex/code_geex_params.py deleted file mode 100644 index 666b38855cd637..00000000000000 --- a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/chat/code_geex/code_geex_params.py +++ /dev/null @@ -1,146 +0,0 @@ -from typing import Literal, Optional - -from typing_extensions import Required, TypedDict - -__all__ = [ - "CodeGeexTarget", - "CodeGeexContext", - "CodeGeexExtra", -] - - -class CodeGeexTarget(TypedDict, total=False): - """补全的内容参数""" - - path: Optional[str] - """文件路径""" - language: Required[ - Literal[ - "c", - "c++", - "cpp", - "c#", - "csharp", - "c-sharp", - "css", - "cuda", - "dart", - "lua", - "objectivec", - "objective-c", - "objective-c++", - "python", - "perl", - "prolog", - "swift", - "lisp", - "java", - "scala", - "tex", - "jsx", - "tsx", - "vue", - "markdown", - "html", - "php", - "js", - "javascript", - "typescript", - "go", - "shell", - "rust", - "sql", - "kotlin", - "vb", - "ruby", - "pascal", - "r", - "fortran", - "lean", - "matlab", - "delphi", - "scheme", - "basic", - "assembly", - "groovy", - "abap", - "gdscript", - "haskell", - "julia", - "elixir", - "excel", - "clojure", - "actionscript", - "solidity", - "powershell", - "erlang", - "cobol", - "alloy", - "awk", - "thrift", - "sparql", - "augeas", - "cmake", - "f-sharp", - "stan", - "isabelle", - "dockerfile", - "rmarkdown", - "literate-agda", - "tcl", - "glsl", - "antlr", - "verilog", - "racket", - "standard-ml", - "elm", - "yaml", - "smalltalk", - "ocaml", - "idris", - "visual-basic", - "protocol-buffer", - "bluespec", - "applescript", - "makefile", - "tcsh", - "maple", - "systemverilog", - "literate-coffeescript", - "vhdl", - "restructuredtext", - "sas", - "literate-haskell", - "java-server-pages", - "coffeescript", - "emacs-lisp", - "mathematica", - "xslt", - "zig", - "common-lisp", - "stata", - "agda", - "ada", - ] - ] - """代码语言类型,如python""" - code_prefix: Required[str] - """补全位置的前文""" - code_suffix: Required[str] - """补全位置的后文""" - - -class CodeGeexContext(TypedDict, total=False): - """附加代码""" - - path: Required[str] - """附加代码文件的路径""" - code: Required[str] - """附加的代码内容""" - - -class CodeGeexExtra(TypedDict, total=False): - target: Required[CodeGeexTarget] - """补全的内容参数""" - contexts: Optional[list[CodeGeexContext]] - """附加代码""" diff --git a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/embeddings.py b/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/embeddings.py deleted file mode 100644 index 8425b5c86688dd..00000000000000 --- a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/embeddings.py +++ /dev/null @@ -1,21 +0,0 @@ -from __future__ import annotations - -from typing import Optional - -from ..core import BaseModel -from .chat.chat_completion import CompletionUsage - -__all__ = ["Embedding", "EmbeddingsResponded"] - - -class Embedding(BaseModel): - object: str - index: Optional[int] = None - embedding: list[float] - - -class EmbeddingsResponded(BaseModel): - object: str - data: list[Embedding] - model: str - usage: CompletionUsage diff --git a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/files/__init__.py b/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/files/__init__.py deleted file mode 100644 index bbaf59e4d7d17a..00000000000000 --- a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/files/__init__.py +++ /dev/null @@ -1,5 +0,0 @@ -from .file_deleted import FileDeleted -from .file_object import FileObject, ListOfFileObject -from .upload_detail import UploadDetail - -__all__ = ["FileObject", "ListOfFileObject", "UploadDetail", "FileDeleted"] diff --git a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/files/file_create_params.py b/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/files/file_create_params.py deleted file mode 100644 index 4ef93b1c05acae..00000000000000 --- a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/files/file_create_params.py +++ /dev/null @@ -1,38 +0,0 @@ -from __future__ import annotations - -from typing import Literal, Optional - -from typing_extensions import Required, TypedDict - -__all__ = ["FileCreateParams"] - -from ...core import FileTypes -from . import UploadDetail - - -class FileCreateParams(TypedDict, total=False): - file: FileTypes - """file和 upload_detail二选一必填""" - - upload_detail: list[UploadDetail] - """file和 upload_detail二选一必填""" - - purpose: Required[Literal["fine-tune", "retrieval", "batch"]] - """ - 上传文件的用途,支持 "fine-tune和 "retrieval" - retrieval支持上传Doc、Docx、PDF、Xlsx、URL类型文件,且单个文件的大小不超过 5MB。 - fine-tune支持上传.jsonl文件且当前单个文件的大小最大可为 100 MB ,文件中语料格式需满足微调指南中所描述的格式。 - """ - custom_separator: Optional[list[str]] - """ - 当 purpose 为 retrieval 且文件类型为 pdf, url, docx 时上传,切片规则默认为 `\n`。 - """ - knowledge_id: str - """ - 当文件上传目的为 retrieval 时,需要指定知识库ID进行上传。 - """ - - sentence_size: int - """ - 当文件上传目的为 retrieval 时,需要指定知识库ID进行上传。 - """ diff --git a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/files/file_deleted.py b/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/files/file_deleted.py deleted file mode 100644 index a384b1a69a5735..00000000000000 --- a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/files/file_deleted.py +++ /dev/null @@ -1,13 +0,0 @@ -from typing import Literal - -from ...core import BaseModel - -__all__ = ["FileDeleted"] - - -class FileDeleted(BaseModel): - id: str - - deleted: bool - - object: Literal["file"] diff --git a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/files/file_object.py b/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/files/file_object.py deleted file mode 100644 index 8f9d0fbb8e6ce3..00000000000000 --- a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/files/file_object.py +++ /dev/null @@ -1,22 +0,0 @@ -from typing import Optional - -from ...core import BaseModel - -__all__ = ["FileObject", "ListOfFileObject"] - - -class FileObject(BaseModel): - id: Optional[str] = None - bytes: Optional[int] = None - created_at: Optional[int] = None - filename: Optional[str] = None - object: Optional[str] = None - purpose: Optional[str] = None - status: Optional[str] = None - status_details: Optional[str] = None - - -class ListOfFileObject(BaseModel): - object: Optional[str] = None - data: list[FileObject] - has_more: Optional[bool] = None diff --git a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/files/upload_detail.py b/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/files/upload_detail.py deleted file mode 100644 index 8f1ca5ce5756aa..00000000000000 --- a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/files/upload_detail.py +++ /dev/null @@ -1,13 +0,0 @@ -from typing import Optional - -from ...core import BaseModel - - -class UploadDetail(BaseModel): - url: str - knowledge_type: int - file_name: Optional[str] = None - sentence_size: Optional[int] = None - custom_separator: Optional[list[str]] = None - callback_url: Optional[str] = None - callback_header: Optional[dict[str, str]] = None diff --git a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/fine_tuning/__init__.py b/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/fine_tuning/__init__.py deleted file mode 100644 index 416f516ef7bf1c..00000000000000 --- a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/fine_tuning/__init__.py +++ /dev/null @@ -1,4 +0,0 @@ -from __future__ import annotations - -from .fine_tuning_job import FineTuningJob, ListOfFineTuningJob -from .fine_tuning_job_event import FineTuningJobEvent diff --git a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/fine_tuning/fine_tuning_job.py b/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/fine_tuning/fine_tuning_job.py deleted file mode 100644 index 75c7553dbe35c6..00000000000000 --- a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/fine_tuning/fine_tuning_job.py +++ /dev/null @@ -1,51 +0,0 @@ -from typing import Optional, Union - -from ...core import BaseModel - -__all__ = ["FineTuningJob", "Error", "Hyperparameters", "ListOfFineTuningJob"] - - -class Error(BaseModel): - code: str - message: str - param: Optional[str] = None - - -class Hyperparameters(BaseModel): - n_epochs: Union[str, int, None] = None - - -class FineTuningJob(BaseModel): - id: Optional[str] = None - - request_id: Optional[str] = None - - created_at: Optional[int] = None - - error: Optional[Error] = None - - fine_tuned_model: Optional[str] = None - - finished_at: Optional[int] = None - - hyperparameters: Optional[Hyperparameters] = None - - model: Optional[str] = None - - object: Optional[str] = None - - result_files: list[str] - - status: str - - trained_tokens: Optional[int] = None - - training_file: str - - validation_file: Optional[str] = None - - -class ListOfFineTuningJob(BaseModel): - object: Optional[str] = None - data: list[FineTuningJob] - has_more: Optional[bool] = None diff --git a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/fine_tuning/fine_tuning_job_event.py b/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/fine_tuning/fine_tuning_job_event.py deleted file mode 100644 index f996cff11430b0..00000000000000 --- a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/fine_tuning/fine_tuning_job_event.py +++ /dev/null @@ -1,35 +0,0 @@ -from typing import Optional, Union - -from ...core import BaseModel - -__all__ = ["FineTuningJobEvent", "Metric", "JobEvent"] - - -class Metric(BaseModel): - epoch: Optional[Union[str, int, float]] = None - current_steps: Optional[int] = None - total_steps: Optional[int] = None - elapsed_time: Optional[str] = None - remaining_time: Optional[str] = None - trained_tokens: Optional[int] = None - loss: Optional[Union[str, int, float]] = None - eval_loss: Optional[Union[str, int, float]] = None - acc: Optional[Union[str, int, float]] = None - eval_acc: Optional[Union[str, int, float]] = None - learning_rate: Optional[Union[str, int, float]] = None - - -class JobEvent(BaseModel): - object: Optional[str] = None - id: Optional[str] = None - type: Optional[str] = None - created_at: Optional[int] = None - level: Optional[str] = None - message: Optional[str] = None - data: Optional[Metric] = None - - -class FineTuningJobEvent(BaseModel): - object: Optional[str] = None - data: list[JobEvent] - has_more: Optional[bool] = None diff --git a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/fine_tuning/job_create_params.py b/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/fine_tuning/job_create_params.py deleted file mode 100644 index e1ebc352bc97fd..00000000000000 --- a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/fine_tuning/job_create_params.py +++ /dev/null @@ -1,15 +0,0 @@ -from __future__ import annotations - -from typing import Literal, Union - -from typing_extensions import TypedDict - -__all__ = ["Hyperparameters"] - - -class Hyperparameters(TypedDict, total=False): - batch_size: Union[Literal["auto"], int] - - learning_rate_multiplier: Union[Literal["auto"], float] - - n_epochs: Union[Literal["auto"], int] diff --git a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/fine_tuning/models/__init__.py b/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/fine_tuning/models/__init__.py deleted file mode 100644 index 57d0d2511dbc14..00000000000000 --- a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/fine_tuning/models/__init__.py +++ /dev/null @@ -1 +0,0 @@ -from .fine_tuned_models import FineTunedModelsStatus diff --git a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/fine_tuning/models/fine_tuned_models.py b/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/fine_tuning/models/fine_tuned_models.py deleted file mode 100644 index b286a5b5774d3d..00000000000000 --- a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/fine_tuning/models/fine_tuned_models.py +++ /dev/null @@ -1,13 +0,0 @@ -from typing import ClassVar - -from ....core import PYDANTIC_V2, BaseModel, ConfigDict - -__all__ = ["FineTunedModelsStatus"] - - -class FineTunedModelsStatus(BaseModel): - if PYDANTIC_V2: - model_config: ClassVar[ConfigDict] = ConfigDict(extra="allow", protected_namespaces=()) - request_id: str # 请求id - model_name: str # 模型名称 - delete_status: str # 删除状态 deleting(删除中), deleted (已删除) diff --git a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/image.py b/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/image.py deleted file mode 100644 index 3bcad0acabd215..00000000000000 --- a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/image.py +++ /dev/null @@ -1,18 +0,0 @@ -from __future__ import annotations - -from typing import Optional - -from ..core import BaseModel - -__all__ = ["GeneratedImage", "ImagesResponded"] - - -class GeneratedImage(BaseModel): - b64_json: Optional[str] = None - url: Optional[str] = None - revised_prompt: Optional[str] = None - - -class ImagesResponded(BaseModel): - created: int - data: list[GeneratedImage] diff --git a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/knowledge/__init__.py b/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/knowledge/__init__.py deleted file mode 100644 index 8c81d703e214a3..00000000000000 --- a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/knowledge/__init__.py +++ /dev/null @@ -1,8 +0,0 @@ -from .knowledge import KnowledgeInfo -from .knowledge_used import KnowledgeStatistics, KnowledgeUsed - -__all__ = [ - "KnowledgeInfo", - "KnowledgeStatistics", - "KnowledgeUsed", -] diff --git a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/knowledge/document/__init__.py b/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/knowledge/document/__init__.py deleted file mode 100644 index 59cb41d7124a7f..00000000000000 --- a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/knowledge/document/__init__.py +++ /dev/null @@ -1,8 +0,0 @@ -from .document import DocumentData, DocumentFailedInfo, DocumentObject, DocumentSuccessInfo - -__all__ = [ - "DocumentData", - "DocumentObject", - "DocumentSuccessInfo", - "DocumentFailedInfo", -] diff --git a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/knowledge/document/document.py b/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/knowledge/document/document.py deleted file mode 100644 index 980bc6f4a7c40d..00000000000000 --- a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/knowledge/document/document.py +++ /dev/null @@ -1,51 +0,0 @@ -from typing import Optional - -from ....core import BaseModel - -__all__ = ["DocumentData", "DocumentObject", "DocumentSuccessInfo", "DocumentFailedInfo"] - - -class DocumentSuccessInfo(BaseModel): - documentId: Optional[str] = None - """文件id""" - filename: Optional[str] = None - """文件名称""" - - -class DocumentFailedInfo(BaseModel): - failReason: Optional[str] = None - """上传失败的原因,包括:文件格式不支持、文件大小超出限制、知识库容量已满、容量上限为 50 万字。""" - filename: Optional[str] = None - """文件名称""" - documentId: Optional[str] = None - """知识库id""" - - -class DocumentObject(BaseModel): - """文档信息""" - - successInfos: Optional[list[DocumentSuccessInfo]] = None - """上传成功的文件信息""" - failedInfos: Optional[list[DocumentFailedInfo]] = None - """上传失败的文件信息""" - - -class DocumentDataFailInfo(BaseModel): - """失败原因""" - - embedding_code: Optional[int] = ( - None # 失败码 10001:知识不可用,知识库空间已达上限 10002:知识不可用,知识库空间已达上限(字数超出限制) - ) - embedding_msg: Optional[str] = None # 失败原因 - - -class DocumentData(BaseModel): - id: str = None # 知识唯一id - custom_separator: list[str] = None # 切片规则 - sentence_size: str = None # 切片大小 - length: int = None # 文件大小(字节) - word_num: int = None # 文件字数 - name: str = None # 文件名 - url: str = None # 文件下载链接 - embedding_stat: int = None # 0:向量化中 1:向量化完成 2:向量化失败 - failInfo: Optional[DocumentDataFailInfo] = None # 失败原因 向量化失败embedding_stat=2的时候 会有此值 diff --git a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/knowledge/document/document_edit_params.py b/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/knowledge/document/document_edit_params.py deleted file mode 100644 index 509cb3a451af5f..00000000000000 --- a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/knowledge/document/document_edit_params.py +++ /dev/null @@ -1,29 +0,0 @@ -from typing import Optional, TypedDict - -__all__ = ["DocumentEditParams"] - - -class DocumentEditParams(TypedDict): - """ - 知识参数类型定义 - - Attributes: - id (str): 知识ID - knowledge_type (int): 知识类型: - 1:文章知识: 支持pdf,url,docx - 2.问答知识-文档: 支持pdf,url,docx - 3.问答知识-表格: 支持xlsx - 4.商品库-表格: 支持xlsx - 5.自定义: 支持pdf,url,docx - custom_separator (Optional[List[str]]): 当前知识类型为自定义(knowledge_type=5)时的切片规则,默认\n - sentence_size (Optional[int]): 当前知识类型为自定义(knowledge_type=5)时的切片字数,取值范围: 20-2000,默认300 - callback_url (Optional[str]): 回调地址 - callback_header (Optional[dict]): 回调时携带的header - """ - - id: str - knowledge_type: int - custom_separator: Optional[list[str]] - sentence_size: Optional[int] - callback_url: Optional[str] - callback_header: Optional[dict[str, str]] diff --git a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/knowledge/document/document_list_params.py b/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/knowledge/document/document_list_params.py deleted file mode 100644 index 910c8c045e1b97..00000000000000 --- a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/knowledge/document/document_list_params.py +++ /dev/null @@ -1,26 +0,0 @@ -from __future__ import annotations - -from typing import Optional - -from typing_extensions import TypedDict - - -class DocumentListParams(TypedDict, total=False): - """ - 文件查询参数类型定义 - - Attributes: - purpose (Optional[str]): 文件用途 - knowledge_id (Optional[str]): 当文件用途为 retrieval 时,需要提供查询的知识库ID - page (Optional[int]): 页,默认1 - limit (Optional[int]): 查询文件列表数,默认10 - after (Optional[str]): 查询指定fileID之后的文件列表(当文件用途为 fine-tune 时需要) - order (Optional[str]): 排序规则,可选值['desc', 'asc'],默认desc(当文件用途为 fine-tune 时需要) - """ - - purpose: Optional[str] - knowledge_id: Optional[str] - page: Optional[int] - limit: Optional[int] - after: Optional[str] - order: Optional[str] diff --git a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/knowledge/document/document_list_resp.py b/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/knowledge/document/document_list_resp.py deleted file mode 100644 index acae4fad9ff36b..00000000000000 --- a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/knowledge/document/document_list_resp.py +++ /dev/null @@ -1,11 +0,0 @@ -from __future__ import annotations - -from ....core import BaseModel -from . import DocumentData - -__all__ = ["DocumentPage"] - - -class DocumentPage(BaseModel): - list: list[DocumentData] - object: str diff --git a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/knowledge/knowledge.py b/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/knowledge/knowledge.py deleted file mode 100644 index bc6f159eb211e5..00000000000000 --- a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/knowledge/knowledge.py +++ /dev/null @@ -1,21 +0,0 @@ -from typing import Optional - -from ...core import BaseModel - -__all__ = ["KnowledgeInfo"] - - -class KnowledgeInfo(BaseModel): - id: Optional[str] = None - """知识库唯一 id""" - embedding_id: Optional[str] = ( - None # 知识库绑定的向量化模型 见模型列表 [内部服务开放接口文档](https://lslfd0slxc.feishu.cn/docx/YauWdbBiMopV0FxB7KncPWCEn8f#H15NduiQZo3ugmxnWQFcfAHpnQ4) - ) - name: Optional[str] = None # 知识库名称 100字限制 - customer_identifier: Optional[str] = None # 用户标识 长度32位以内 - description: Optional[str] = None # 知识库描述 500字限制 - background: Optional[str] = None # 背景颜色(给枚举)'blue', 'red', 'orange', 'purple', 'sky' - icon: Optional[str] = ( - None # 知识库图标(给枚举) question: 问号、book: 书籍、seal: 印章、wrench: 扳手、tag: 标签、horn: 喇叭、house: 房子 # noqa: E501 - ) - bucket_id: Optional[str] = None # 桶id 限制32位 diff --git a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/knowledge/knowledge_create_params.py b/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/knowledge/knowledge_create_params.py deleted file mode 100644 index c3da201727c34a..00000000000000 --- a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/knowledge/knowledge_create_params.py +++ /dev/null @@ -1,30 +0,0 @@ -from __future__ import annotations - -from typing import Literal, Optional - -from typing_extensions import TypedDict - -__all__ = ["KnowledgeBaseParams"] - - -class KnowledgeBaseParams(TypedDict): - """ - 知识库参数类型定义 - - Attributes: - embedding_id (int): 知识库绑定的向量化模型ID - name (str): 知识库名称,限制100字 - customer_identifier (Optional[str]): 用户标识,长度32位以内 - description (Optional[str]): 知识库描述,限制500字 - background (Optional[Literal['blue', 'red', 'orange', 'purple', 'sky']]): 背景颜色 - icon (Optional[Literal['question', 'book', 'seal', 'wrench', 'tag', 'horn', 'house']]): 知识库图标 - bucket_id (Optional[str]): 桶ID,限制32位 - """ - - embedding_id: int - name: str - customer_identifier: Optional[str] - description: Optional[str] - background: Optional[Literal["blue", "red", "orange", "purple", "sky"]] = None - icon: Optional[Literal["question", "book", "seal", "wrench", "tag", "horn", "house"]] = None - bucket_id: Optional[str] diff --git a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/knowledge/knowledge_list_params.py b/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/knowledge/knowledge_list_params.py deleted file mode 100644 index a221b28e4603be..00000000000000 --- a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/knowledge/knowledge_list_params.py +++ /dev/null @@ -1,15 +0,0 @@ -from __future__ import annotations - -from typing_extensions import TypedDict - -__all__ = ["KnowledgeListParams"] - - -class KnowledgeListParams(TypedDict, total=False): - page: int = 1 - """ 页码,默认 1,第一页 - """ - - size: int = 10 - """每页数量 默认10 - """ diff --git a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/knowledge/knowledge_list_resp.py b/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/knowledge/knowledge_list_resp.py deleted file mode 100644 index e462eddc550d61..00000000000000 --- a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/knowledge/knowledge_list_resp.py +++ /dev/null @@ -1,11 +0,0 @@ -from __future__ import annotations - -from ...core import BaseModel -from . import KnowledgeInfo - -__all__ = ["KnowledgePage"] - - -class KnowledgePage(BaseModel): - list: list[KnowledgeInfo] - object: str diff --git a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/knowledge/knowledge_used.py b/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/knowledge/knowledge_used.py deleted file mode 100644 index cfda7097026c59..00000000000000 --- a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/knowledge/knowledge_used.py +++ /dev/null @@ -1,21 +0,0 @@ -from typing import Optional - -from ...core import BaseModel - -__all__ = ["KnowledgeStatistics", "KnowledgeUsed"] - - -class KnowledgeStatistics(BaseModel): - """ - 使用量统计 - """ - - word_num: Optional[int] = None - length: Optional[int] = None - - -class KnowledgeUsed(BaseModel): - used: Optional[KnowledgeStatistics] = None - """已使用量""" - total: Optional[KnowledgeStatistics] = None - """知识库总量""" diff --git a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/sensitive_word_check/__init__.py b/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/sensitive_word_check/__init__.py deleted file mode 100644 index c9bd60419ce606..00000000000000 --- a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/sensitive_word_check/__init__.py +++ /dev/null @@ -1,3 +0,0 @@ -from .sensitive_word_check import SensitiveWordCheckRequest - -__all__ = ["SensitiveWordCheckRequest"] diff --git a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/sensitive_word_check/sensitive_word_check.py b/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/sensitive_word_check/sensitive_word_check.py deleted file mode 100644 index 0c37d99e653292..00000000000000 --- a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/sensitive_word_check/sensitive_word_check.py +++ /dev/null @@ -1,14 +0,0 @@ -from typing import Optional - -from typing_extensions import TypedDict - - -class SensitiveWordCheckRequest(TypedDict, total=False): - type: Optional[str] - """敏感词类型,当前仅支持ALL""" - status: Optional[str] - """敏感词启用禁用状态 - 启用:ENABLE - 禁用:DISABLE - 备注:默认开启敏感词校验,如果要关闭敏感词校验,需联系商务获取对应权限,否则敏感词禁用不生效。 - """ diff --git a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/tools/__init__.py b/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/tools/__init__.py deleted file mode 100644 index 62f77344eee56b..00000000000000 --- a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/tools/__init__.py +++ /dev/null @@ -1,9 +0,0 @@ -from .web_search import ( - SearchIntent, - SearchRecommend, - SearchResult, - WebSearch, -) -from .web_search_chunk import WebSearchChunk - -__all__ = ["WebSearch", "SearchIntent", "SearchResult", "SearchRecommend", "WebSearchChunk"] diff --git a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/tools/tools_web_search_params.py b/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/tools/tools_web_search_params.py deleted file mode 100644 index b3a3b26f07ee58..00000000000000 --- a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/tools/tools_web_search_params.py +++ /dev/null @@ -1,35 +0,0 @@ -from __future__ import annotations - -from typing import Optional, Union - -from typing_extensions import TypedDict - -__all__ = ["WebSearchParams"] - - -class WebSearchParams(TypedDict): - """ - 工具名:web-search-pro参数类型定义 - - Attributes: - :param model: str, 模型名称 - :param request_id: Optional[str], 请求ID - :param stream: Optional[bool], 是否流式 - :param messages: Union[str, List[str], List[int], object, None], - 包含历史对话上下文的内容,按照 {"role": "user", "content": "你好"} 的json 数组形式进行传参 - 当前版本仅支持 User Message 单轮对话,工具会理解User Message并进行搜索, - 请尽可能传入不带指令格式的用户原始提问,以提高搜索准确率。 - :param scope: Optional[str], 指定搜索范围,全网、学术等,默认全网 - :param location: Optional[str], 指定搜索用户地区 location 提高相关性 - :param recent_days: Optional[int],支持指定返回 N 天(1-30)更新的搜索结果 - - - """ - - model: str - request_id: Optional[str] - stream: Optional[bool] - messages: Union[str, list[str], list[int], object, None] - scope: Optional[str] = None - location: Optional[str] = None - recent_days: Optional[int] = None diff --git a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/tools/web_search.py b/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/tools/web_search.py deleted file mode 100644 index ac9fa3821e979b..00000000000000 --- a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/tools/web_search.py +++ /dev/null @@ -1,71 +0,0 @@ -from typing import Optional - -from ...core import BaseModel - -__all__ = [ - "WebSearch", - "SearchIntent", - "SearchResult", - "SearchRecommend", -] - - -class SearchIntent(BaseModel): - index: int - # 搜索轮次,默认为 0 - query: str - # 搜索优化 query - intent: str - # 判断的意图类型 - keywords: str - # 搜索关键词 - - -class SearchResult(BaseModel): - index: int - # 搜索轮次,默认为 0 - title: str - # 标题 - link: str - # 链接 - content: str - # 内容 - icon: str - # 图标 - media: str - # 来源媒体 - refer: str - # 角标序号 [ref_1] - - -class SearchRecommend(BaseModel): - index: int - # 搜索轮次,默认为 0 - query: str - # 推荐query - - -class WebSearchMessageToolCall(BaseModel): - id: str - search_intent: Optional[SearchIntent] - search_result: Optional[SearchResult] - search_recommend: Optional[SearchRecommend] - type: str - - -class WebSearchMessage(BaseModel): - role: str - tool_calls: Optional[list[WebSearchMessageToolCall]] = None - - -class WebSearchChoice(BaseModel): - index: int - finish_reason: str - message: WebSearchMessage - - -class WebSearch(BaseModel): - created: Optional[int] = None - choices: list[WebSearchChoice] - request_id: Optional[str] = None - id: Optional[str] = None diff --git a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/tools/web_search_chunk.py b/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/tools/web_search_chunk.py deleted file mode 100644 index 7fb0e02bb58719..00000000000000 --- a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/tools/web_search_chunk.py +++ /dev/null @@ -1,33 +0,0 @@ -from typing import Optional - -from ...core import BaseModel -from .web_search import SearchIntent, SearchRecommend, SearchResult - -__all__ = ["WebSearchChunk"] - - -class ChoiceDeltaToolCall(BaseModel): - index: int - id: Optional[str] = None - - search_intent: Optional[SearchIntent] = None - search_result: Optional[SearchResult] = None - search_recommend: Optional[SearchRecommend] = None - type: Optional[str] = None - - -class ChoiceDelta(BaseModel): - role: Optional[str] = None - tool_calls: Optional[list[ChoiceDeltaToolCall]] = None - - -class Choice(BaseModel): - delta: ChoiceDelta - finish_reason: Optional[str] = None - index: int - - -class WebSearchChunk(BaseModel): - id: Optional[str] = None - choices: list[Choice] - created: Optional[int] = None diff --git a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/video/__init__.py b/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/video/__init__.py deleted file mode 100644 index b14072b1a771af..00000000000000 --- a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/video/__init__.py +++ /dev/null @@ -1,3 +0,0 @@ -from .video_object import VideoObject, VideoResult - -__all__ = ["VideoObject", "VideoResult"] diff --git a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/video/video_create_params.py b/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/video/video_create_params.py deleted file mode 100644 index f5489d708e7227..00000000000000 --- a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/video/video_create_params.py +++ /dev/null @@ -1,27 +0,0 @@ -from __future__ import annotations - -from typing import Optional - -from typing_extensions import TypedDict - -__all__ = ["VideoCreateParams"] - -from ..sensitive_word_check import SensitiveWordCheckRequest - - -class VideoCreateParams(TypedDict, total=False): - model: str - """模型编码""" - prompt: str - """所需视频的文本描述""" - image_url: str - """所需视频的文本描述""" - sensitive_word_check: Optional[SensitiveWordCheckRequest] - """支持 URL 或者 Base64、传入 image 奖进行图生视频 - * 图片格式: - * 图片大小:""" - request_id: str - """由用户端传参,需保证唯一性;用于区分每次请求的唯一标识,用户端不传时平台会默认生成。""" - - user_id: str - """用户端。""" diff --git a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/video/video_object.py b/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/video/video_object.py deleted file mode 100644 index 85c3844d8a791c..00000000000000 --- a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/video/video_object.py +++ /dev/null @@ -1,30 +0,0 @@ -from typing import Optional - -from ...core import BaseModel - -__all__ = ["VideoObject", "VideoResult"] - - -class VideoResult(BaseModel): - url: str - """视频url""" - cover_image_url: str - """预览图""" - - -class VideoObject(BaseModel): - id: Optional[str] = None - """智谱 AI 开放平台生成的任务订单号,调用请求结果接口时请使用此订单号""" - - model: str - """模型名称""" - - video_result: list[VideoResult] - """视频生成结果""" - - task_status: str - """处理状态,PROCESSING(处理中),SUCCESS(成功),FAIL(失败) - 注:处理中状态需通过查询获取结果""" - - request_id: str - """用户在客户端请求时提交的任务编号或者平台生成的任务编号""" diff --git a/api/core/plugin/entities/plugin_daemon.py b/api/core/plugin/entities/plugin_daemon.py index c5dced121faa05..554353c6c9e938 100644 --- a/api/core/plugin/entities/plugin_daemon.py +++ b/api/core/plugin/entities/plugin_daemon.py @@ -1,8 +1,11 @@ +from datetime import datetime from enum import Enum from typing import Generic, Optional, TypeVar -from pydantic import BaseModel +from pydantic import BaseModel, ConfigDict, Field +from core.model_runtime.entities.model_entities import AIModelEntity +from core.model_runtime.entities.provider_entities import ProviderEntity from core.tools.entities.tool_entities import ToolProviderEntityWithPlugin T = TypeVar("T", bound=(BaseModel | dict | list | bool)) @@ -22,6 +25,7 @@ class InstallPluginMessage(BaseModel): """ Message for installing a plugin. """ + class Event(Enum): Info = "info" Done = "done" @@ -42,4 +46,44 @@ class PluginBasicBooleanResponse(BaseModel): """ Basic boolean response from plugin daemon. """ - result: bool \ No newline at end of file + + result: bool + + +class PluginModelSchemaEntity(BaseModel): + model_schema: AIModelEntity = Field(description="The model schema.") + + # pydantic configs + model_config = ConfigDict(protected_namespaces=()) + + +class PluginModelProviderEntity(BaseModel): + id: str = Field(alias="ID", description="ID") + created_at: datetime = Field(alias="CreatedAt", description="The created at time of the model provider.") + updated_at: datetime = Field(alias="UpdatedAt", description="The updated at time of the model provider.") + provider: str = Field(description="The provider of the model.") + tenant_id: str = Field(description="The tenant ID.") + plugin_unique_identifier: str = Field(description="The plugin unique identifier.") + plugin_id: str = Field(description="The plugin ID.") + declaration: ProviderEntity = Field(description="The declaration of the model provider.") + + +class PluginNumTokensResponse(BaseModel): + """ + Response for number of tokens. + """ + + num_tokens: int = Field(description="The number of tokens.") + + +class PluginStringResultResponse(BaseModel): + result: str = Field(description="The result of the string.") + + +class PluginVoiceEntity(BaseModel): + name: str = Field(description="The name of the voice.") + value: str = Field(description="The value of the voice.") + + +class PluginVoicesResponse(BaseModel): + voices: list[PluginVoiceEntity] = Field(description="The result of the voices.") diff --git a/api/core/plugin/entities/request.py b/api/core/plugin/entities/request.py index 5fe9ea1ddaf878..af40ebc5ca5b31 100644 --- a/api/core/plugin/entities/request.py +++ b/api/core/plugin/entities/request.py @@ -1,7 +1,7 @@ from collections.abc import Mapping from typing import Any, Literal, Optional -from pydantic import BaseModel, Field, field_validator +from pydantic import BaseModel, ConfigDict, Field, field_validator from core.entities.provider_entities import BasicProviderConfig from core.model_runtime.entities.message_entities import ( @@ -39,6 +39,8 @@ class BaseRequestInvokeModel(BaseModel): model: str model_type: ModelType + model_config = ConfigDict(protected_namespaces=()) + class RequestInvokeLLM(BaseRequestInvokeModel): """ @@ -53,6 +55,8 @@ class RequestInvokeLLM(BaseRequestInvokeModel): stop: Optional[list[str]] = Field(default_factory=list) stream: Optional[bool] = False + model_config = ConfigDict(protected_namespaces=()) + @field_validator("prompt_messages", mode="before") @classmethod def convert_prompt_messages(cls, v): diff --git a/api/core/plugin/manager/asset.py b/api/core/plugin/manager/asset.py index df76f56a6dc8d9..fc4a99ad49abb9 100644 --- a/api/core/plugin/manager/asset.py +++ b/api/core/plugin/manager/asset.py @@ -2,11 +2,11 @@ class PluginAssetManager(BasePluginManager): - def fetch_asset(self, id: str) -> bytes: + def fetch_asset(self, tenant_id: str, id: str) -> bytes: """ Fetch an asset by id. """ - response = self._request(method="GET", path=f"/assets/plugin/{id}") + response = self._request(method="GET", path=f"plugin/{tenant_id}/assets/{id}") if response.status_code != 200: raise ValueError(f"can not found asset {id}") return response.content diff --git a/api/core/plugin/manager/base.py b/api/core/plugin/manager/base.py index fd18b3798e9b6a..1e050427d4e890 100644 --- a/api/core/plugin/manager/base.py +++ b/api/core/plugin/manager/base.py @@ -132,8 +132,19 @@ def _request_with_plugin_daemon_response_stream( line_data = json.loads(line) rep = PluginDaemonBasicResponse[type](**line_data) if rep.code != 0: - raise ValueError(f"got error from plugin daemon: {rep.message}, code: {rep.code}") + raise PluginDaemonRespError(rep.message, rep.code) if rep.data is None: raise ValueError("got empty data from plugin daemon") yield rep.data - \ No newline at end of file + + +class PluginDaemonRespError(Exception): + """ + Plugin daemon response error. + """ + + def __init__(self, resp_message: str, code: int): + super().__init__() + self.message = f"got error from plugin daemon: {resp_message}, code: {code}" + self.resp_message = resp_message + self.code = code diff --git a/api/core/plugin/manager/model.py b/api/core/plugin/manager/model.py index 4411d76fe10079..e34e9b516efa90 100644 --- a/api/core/plugin/manager/model.py +++ b/api/core/plugin/manager/model.py @@ -1,13 +1,523 @@ -from core.model_runtime.entities.provider_entities import ProviderEntity -from core.plugin.manager.base import BasePluginManager +import binascii +from collections.abc import Generator, Sequence +from typing import IO, Optional + +from core.model_runtime.entities.llm_entities import LLMResultChunk +from core.model_runtime.entities.message_entities import PromptMessage, PromptMessageTool +from core.model_runtime.entities.model_entities import AIModelEntity +from core.model_runtime.entities.rerank_entities import RerankResult +from core.model_runtime.entities.text_embedding_entities import TextEmbeddingResult +from core.model_runtime.utils.encoders import jsonable_encoder +from core.plugin.entities.plugin_daemon import ( + PluginBasicBooleanResponse, + PluginModelProviderEntity, + PluginModelSchemaEntity, + PluginNumTokensResponse, + PluginStringResultResponse, + PluginVoicesResponse, +) +from core.plugin.manager.base import BasePluginManager, PluginDaemonRespError class PluginModelManager(BasePluginManager): - def fetch_model_providers(self, tenant_id: str) -> list[ProviderEntity]: + def fetch_model_providers(self, tenant_id: str) -> Sequence[PluginModelProviderEntity]: """ Fetch model providers for the given tenant. """ response = self._request_with_plugin_daemon_response( - "GET", f"plugin/{tenant_id}/models", list[ProviderEntity], params={"page": 1, "page_size": 256} + "GET", + f"plugin/{tenant_id}/management/models", + list[PluginModelProviderEntity], + params={"page": 1, "page_size": 256}, ) return response + + def get_model_schema( + self, + tenant_id: str, + user_id: str, + plugin_id: str, + provider: str, + model_type: str, + model: str, + credentials: dict, + ) -> AIModelEntity | None: + """ + Get model schema + """ + response = self._request_with_plugin_daemon_response_stream( + "POST", + f"plugin/{tenant_id}/dispatch/model/schema", + PluginModelSchemaEntity, + data={ + "user_id": user_id, + "data": { + "provider": provider, + "model_type": model_type, + "model": model, + "credentials": credentials, + }, + }, + headers={ + "X-Plugin-ID": plugin_id, + "Content-Type": "application/json", + }, + ) + + for resp in response: + return resp.model_schema + + return None + + def validate_provider_credentials( + self, tenant_id: str, user_id: str, plugin_id: str, provider: str, credentials: dict + ) -> bool: + """ + validate the credentials of the provider + """ + response = self._request_with_plugin_daemon_response_stream( + "POST", + f"plugin/{tenant_id}/dispatch/model/validate_provider_credentials", + PluginBasicBooleanResponse, + data={ + "user_id": user_id, + "data": { + "provider": provider, + "credentials": credentials, + }, + }, + headers={ + "X-Plugin-ID": plugin_id, + "Content-Type": "application/json", + }, + ) + + for resp in response: + return resp.result + + return False + + def validate_model_credentials( + self, + tenant_id: str, + user_id: str, + plugin_id: str, + provider: str, + model_type: str, + model: str, + credentials: dict, + ) -> bool: + """ + validate the credentials of the provider + """ + response = self._request_with_plugin_daemon_response_stream( + "POST", + f"plugin/{tenant_id}/dispatch/model/validate_model_credentials", + PluginBasicBooleanResponse, + data={ + "user_id": user_id, + "data": { + "provider": provider, + "model_type": model_type, + "model": model, + "credentials": credentials, + }, + }, + headers={ + "X-Plugin-ID": plugin_id, + "Content-Type": "application/json", + }, + ) + + for resp in response: + return resp.result + + return False + + def invoke_llm( + self, + tenant_id: str, + user_id: str, + plugin_id: str, + provider: str, + model: str, + credentials: dict, + prompt_messages: list[PromptMessage], + model_parameters: Optional[dict] = None, + tools: Optional[list[PromptMessageTool]] = None, + stop: Optional[list[str]] = None, + stream: bool = True, + ) -> Generator[LLMResultChunk, None, None]: + """ + Invoke llm + """ + response = self._request_with_plugin_daemon_response_stream( + method="POST", + path=f"plugin/{tenant_id}/dispatch/llm/invoke", + type=LLMResultChunk, + data=jsonable_encoder( + { + "user_id": user_id, + "data": { + "provider": provider, + "model_type": "llm", + "model": model, + "credentials": credentials, + "prompt_messages": prompt_messages, + "model_parameters": model_parameters, + "tools": tools, + "stop": stop, + "stream": stream, + }, + } + ), + headers={ + "X-Plugin-ID": plugin_id, + "Content-Type": "application/json", + }, + ) + + try: + yield from response + except PluginDaemonRespError as e: + raise ValueError(e.resp_message + str(e.code)) + + def get_llm_num_tokens( + self, + tenant_id: str, + user_id: str, + plugin_id: str, + provider: str, + model_type: str, + model: str, + credentials: dict, + prompt_messages: list[PromptMessage], + tools: Optional[list[PromptMessageTool]] = None, + ) -> int: + """ + Get number of tokens for llm + """ + response = self._request_with_plugin_daemon_response_stream( + method="POST", + path=f"plugin/{tenant_id}/dispatch/llm/num_tokens", + type=PluginNumTokensResponse, + data=jsonable_encoder( + { + "user_id": user_id, + "data": { + "provider": provider, + "model_type": model_type, + "model": model, + "credentials": credentials, + "prompt_messages": prompt_messages, + "tools": tools, + }, + } + ), + headers={ + "X-Plugin-ID": plugin_id, + "Content-Type": "application/json", + }, + ) + + for resp in response: + return resp.num_tokens + + return 0 + + def invoke_text_embedding( + self, + tenant_id: str, + user_id: str, + plugin_id: str, + provider: str, + model: str, + credentials: dict, + texts: list[str], + ) -> TextEmbeddingResult: + """ + Invoke text embedding + """ + response = self._request_with_plugin_daemon_response_stream( + method="POST", + path=f"plugin/{tenant_id}/dispatch/text_embedding/invoke", + type=TextEmbeddingResult, + data=jsonable_encoder( + { + "user_id": user_id, + "data": { + "provider": provider, + "model_type": "text-embedding", + "model": model, + "credentials": credentials, + "texts": texts, + }, + } + ), + headers={ + "X-Plugin-ID": plugin_id, + "Content-Type": "application/json", + }, + ) + + for resp in response: + return resp + + raise ValueError("Failed to invoke text embedding") + + def get_text_embedding_num_tokens( + self, + tenant_id: str, + user_id: str, + plugin_id: str, + provider: str, + model_type: str, + model: str, + credentials: dict, + texts: list[str], + ) -> int: + """ + Get number of tokens for text embedding + """ + response = self._request_with_plugin_daemon_response_stream( + method="POST", + path=f"plugin/{tenant_id}/dispatch/text_embedding/num_tokens", + type=PluginNumTokensResponse, + data=jsonable_encoder( + { + "user_id": user_id, + "data": { + "provider": provider, + "model_type": model_type, + "model": model, + "credentials": credentials, + "texts": texts, + }, + } + ), + headers={ + "X-Plugin-ID": plugin_id, + "Content-Type": "application/json", + }, + ) + + for resp in response: + return resp.num_tokens + + return 0 + + def invoke_rerank( + self, + tenant_id: str, + user_id: str, + plugin_id: str, + provider: str, + model_type: str, + model: str, + credentials: dict, + query: str, + docs: list[str], + score_threshold: Optional[float] = None, + top_n: Optional[int] = None, + ) -> RerankResult: + """ + Invoke rerank + """ + response = self._request_with_plugin_daemon_response_stream( + method="POST", + path=f"plugin/{tenant_id}/dispatch/rerank/invoke", + type=RerankResult, + data=jsonable_encoder( + { + "user_id": user_id, + "data": { + "provider": provider, + "model_type": model_type, + "model": model, + "credentials": credentials, + "query": query, + "docs": docs, + "score_threshold": score_threshold, + "top_n": top_n, + }, + } + ), + headers={ + "X-Plugin-ID": plugin_id, + "Content-Type": "application/json", + }, + ) + + for resp in response: + return resp + + raise ValueError("Failed to invoke rerank") + + def invoke_tts( + self, + tenant_id: str, + user_id: str, + plugin_id: str, + provider: str, + model_type: str, + model: str, + credentials: dict, + content_text: str, + voice: str, + ) -> Generator[bytes, None, None]: + """ + Invoke tts + """ + response = self._request_with_plugin_daemon_response_stream( + method="POST", + path=f"plugin/{tenant_id}/dispatch/tts/invoke", + type=PluginStringResultResponse, + data=jsonable_encoder( + { + "user_id": user_id, + "data": { + "provider": provider, + "model_type": model_type, + "model": model, + "credentials": credentials, + "content_text": content_text, + "voice": voice, + }, + } + ), + headers={ + "X-Plugin-ID": plugin_id, + "Content-Type": "application/json", + }, + ) + + try: + for result in response: + hex_str = result.result + yield binascii.unhexlify(hex_str) + except PluginDaemonRespError as e: + raise ValueError(e.resp_message + str(e.code)) + + def get_tts_model_voices( + self, + tenant_id: str, + user_id: str, + plugin_id: str, + provider: str, + model_type: str, + model: str, + credentials: dict, + language: Optional[str] = None, + ) -> list[dict]: + """ + Get tts model voices + """ + response = self._request_with_plugin_daemon_response_stream( + method="POST", + path=f"plugin/{tenant_id}/dispatch/model/voices", + type=PluginVoicesResponse, + data=jsonable_encoder( + { + "user_id": user_id, + "data": { + "provider": provider, + "model_type": model_type, + "model": model, + "credentials": credentials, + "language": language, + }, + } + ), + headers={ + "X-Plugin-ID": plugin_id, + "Content-Type": "application/json", + }, + ) + + for resp in response: + for voice in resp.voices: + return [{"name": voice.name, "value": voice.value}] + + return [] + + def invoke_speech_to_text( + self, + tenant_id: str, + user_id: str, + plugin_id: str, + provider: str, + model_type: str, + model: str, + credentials: dict, + file: IO[bytes], + ) -> str: + """ + Invoke speech to text + """ + response = self._request_with_plugin_daemon_response_stream( + method="POST", + path=f"plugin/{tenant_id}/dispatch/speech2text/invoke", + type=PluginStringResultResponse, + data=jsonable_encoder( + { + "user_id": user_id, + "data": { + "provider": provider, + "model_type": model_type, + "model": model, + "credentials": credentials, + "file": binascii.hexlify(file.read()).decode(), + }, + } + ), + headers={ + "X-Plugin-ID": plugin_id, + "Content-Type": "application/json", + }, + ) + + for resp in response: + return resp.result + + raise ValueError("Failed to invoke speech to text") + + def invoke_moderation( + self, + tenant_id: str, + user_id: str, + plugin_id: str, + provider: str, + model_type: str, + model: str, + credentials: dict, + text: str, + ) -> bool: + """ + Invoke moderation + """ + response = self._request_with_plugin_daemon_response_stream( + method="POST", + path=f"plugin/{tenant_id}/dispatch/moderation/invoke", + type=PluginBasicBooleanResponse, + data=jsonable_encoder( + { + "user_id": user_id, + "data": { + "provider": provider, + "model_type": model_type, + "model": model, + "credentials": credentials, + "text": text, + }, + } + ), + headers={ + "X-Plugin-ID": plugin_id, + "Content-Type": "application/json", + }, + ) + + for resp in response: + return resp.result + + raise ValueError("Failed to invoke moderation") diff --git a/api/core/provider_manager.py b/api/core/provider_manager.py index 3a1fe300dfd311..cb49a6cf5619b7 100644 --- a/api/core/provider_manager.py +++ b/api/core/provider_manager.py @@ -22,7 +22,7 @@ from core.helper.position_helper import is_filtered from core.model_runtime.entities.model_entities import ModelType from core.model_runtime.entities.provider_entities import CredentialFormSchema, FormType, ProviderEntity -from core.model_runtime.model_providers import model_provider_factory +from core.model_runtime.model_providers.model_provider_factory import ModelProviderFactory from extensions import ext_hosting_provider from extensions.ext_database import db from extensions.ext_redis import redis_client @@ -97,6 +97,7 @@ def get_configurations(self, tenant_id: str) -> ProviderConfigurations: provider_name_to_provider_model_records_dict = self._get_all_provider_models(tenant_id) # Get all provider entities + model_provider_factory = ModelProviderFactory(tenant_id) provider_entities = model_provider_factory.get_providers() # Get All preferred provider types of the workspace @@ -204,12 +205,10 @@ def get_provider_model_bundle(self, tenant_id: str, provider: str, model_type: M if not provider_configuration: raise ValueError(f"Provider {provider} does not exist.") - provider_instance = provider_configuration.get_provider_instance() - model_type_instance = provider_instance.get_model_instance(model_type) + model_type_instance = provider_configuration.get_model_type_instance(model_type) return ProviderModelBundle( configuration=provider_configuration, - provider_instance=provider_instance, model_type_instance=model_type_instance, ) @@ -257,8 +256,8 @@ def get_default_model(self, tenant_id: str, model_type: ModelType) -> Optional[D if not default_model: return None - provider_instance = model_provider_factory.get_provider_instance(default_model.provider_name) - provider_schema = provider_instance.get_provider_schema() + model_provider_factory = ModelProviderFactory(tenant_id) + provider_schema = model_provider_factory.get_provider_schema(provider=default_model.provider_name) return DefaultModelEntity( model=default_model.model_name, diff --git a/api/core/tools/utils/model_invocation_utils.py b/api/core/tools/utils/model_invocation_utils.py index 4e226810d6ac90..b3c3292f5d58c2 100644 --- a/api/core/tools/utils/model_invocation_utils.py +++ b/api/core/tools/utils/model_invocation_utils.py @@ -10,7 +10,7 @@ from core.model_manager import ModelManager from core.model_runtime.entities.llm_entities import LLMResult from core.model_runtime.entities.message_entities import PromptMessage -from core.model_runtime.entities.model_entities import ModelType +from core.model_runtime.entities.model_entities import ModelPropertyKey, ModelType from core.model_runtime.errors.invoke import ( InvokeAuthorizationError, InvokeBadRequestError, @@ -18,7 +18,7 @@ InvokeRateLimitError, InvokeServerUnavailableError, ) -from core.model_runtime.model_providers.__base.large_language_model import LargeLanguageModel, ModelPropertyKey +from core.model_runtime.model_providers.__base.large_language_model import LargeLanguageModel from core.model_runtime.utils.encoders import jsonable_encoder from extensions.ext_database import db from models.tools import ToolModelInvoke diff --git a/api/services/model_load_balancing_service.py b/api/services/model_load_balancing_service.py index e7b9422cfe1e08..5c5c0c16078ba5 100644 --- a/api/services/model_load_balancing_service.py +++ b/api/services/model_load_balancing_service.py @@ -14,7 +14,7 @@ ModelCredentialSchema, ProviderCredentialSchema, ) -from core.model_runtime.model_providers import model_provider_factory +from core.model_runtime.model_providers.model_provider_factory import ModelProviderFactory from core.provider_manager import ProviderManager from extensions.ext_database import db from models.provider import LoadBalancingModelConfig @@ -527,6 +527,7 @@ def _custom_credentials_validate( credentials[key] = encrypter.decrypt_token(tenant_id, original_credentials[key]) if validate: + model_provider_factory = ModelProviderFactory(tenant_id) if isinstance(credential_schemas, ModelCredentialSchema): credentials = model_provider_factory.model_credentials_validate( provider=provider_configuration.provider.provider, diff --git a/api/services/model_provider_service.py b/api/services/model_provider_service.py index 384a072b371fdd..630d575f8225cf 100644 --- a/api/services/model_provider_service.py +++ b/api/services/model_provider_service.py @@ -1,16 +1,12 @@ import logging -import mimetypes import os -from pathlib import Path -from typing import Optional, cast +from typing import Optional import requests -from flask import current_app from core.entities.model_entities import ModelStatus, ProviderModelWithStatusEntity from core.model_runtime.entities.model_entities import ModelType, ParameterRule -from core.model_runtime.model_providers import model_provider_factory -from core.model_runtime.model_providers.__base.large_language_model import LargeLanguageModel +from core.model_runtime.model_providers.model_provider_factory import ModelProviderFactory from core.provider_manager import ProviderManager from models.provider import ProviderType from services.entities.model_provider_entities import ( @@ -100,7 +96,7 @@ def get_models_by_provider(self, tenant_id: str, provider: str) -> list[ModelWit ModelWithProviderEntityResponse(model) for model in provider_configurations.get_models(provider=provider) ] - def get_provider_credentials(self, tenant_id: str, provider: str) -> dict: + def get_provider_credentials(self, tenant_id: str, provider: str) -> Optional[dict]: """ get provider credentials. @@ -176,7 +172,7 @@ def remove_provider_credentials(self, tenant_id: str, provider: str) -> None: # Remove custom provider credentials. provider_configuration.delete_custom_credentials() - def get_model_credentials(self, tenant_id: str, provider: str, model_type: str, model: str) -> dict: + def get_model_credentials(self, tenant_id: str, provider: str, model_type: str, model: str) -> Optional[dict]: """ get model credentials. @@ -351,18 +347,17 @@ def get_model_parameter_rules(self, tenant_id: str, provider: str, model: str) - if not provider_configuration: raise ValueError(f"Provider {provider} does not exist.") - # Get model instance of LLM - model_type_instance = provider_configuration.get_model_type_instance(ModelType.LLM) - model_type_instance = cast(LargeLanguageModel, model_type_instance) - # fetch credentials credentials = provider_configuration.get_current_credentials(model_type=ModelType.LLM, model=model) if not credentials: return [] - # Call get_parameter_rules method of model instance to get model parameter rules - return model_type_instance.get_parameter_rules(model=model, credentials=credentials) + model_schema = provider_configuration.get_model_schema( + model_type=ModelType.LLM, model=model, credentials=credentials + ) + + return model_schema.parameter_rules if model_schema else [] def get_default_model_of_model_type(self, tenant_id: str, model_type: str) -> Optional[DefaultModelResponse]: """ @@ -410,52 +405,21 @@ def update_default_model_of_model_type(self, tenant_id: str, model_type: str, pr ) def get_model_provider_icon( - self, provider: str, icon_type: str, lang: str + self, tenant_id: str, provider: str, icon_type: str, lang: str ) -> tuple[Optional[bytes], Optional[str]]: """ get model provider icon. + :param tenant_id: workspace id :param provider: provider name :param icon_type: icon type (icon_small or icon_large) :param lang: language (zh_Hans or en_US) :return: """ - provider_instance = model_provider_factory.get_provider_instance(provider) - provider_schema = provider_instance.get_provider_schema() - - if icon_type.lower() == "icon_small": - if not provider_schema.icon_small: - raise ValueError(f"Provider {provider} does not have small icon.") - - if lang.lower() == "zh_hans": - file_name = provider_schema.icon_small.zh_Hans - else: - file_name = provider_schema.icon_small.en_US - else: - if not provider_schema.icon_large: - raise ValueError(f"Provider {provider} does not have large icon.") + model_provider_factory = ModelProviderFactory(tenant_id) + byte_data = model_provider_factory.get_provider_icon(provider, icon_type, lang) - if lang.lower() == "zh_hans": - file_name = provider_schema.icon_large.zh_Hans - else: - file_name = provider_schema.icon_large.en_US - - root_path = current_app.root_path - provider_instance_path = os.path.dirname( - os.path.join(root_path, provider_instance.__class__.__module__.replace(".", "/")) - ) - file_path = os.path.join(provider_instance_path, "_assets") - file_path = os.path.join(file_path, file_name) - - if not os.path.exists(file_path): - return None, None - - mimetype, _ = mimetypes.guess_type(file_path) - mimetype = mimetype or "application/octet-stream" - - # read binary from file - byte_data = Path(file_path).read_bytes() - return byte_data, mimetype + return byte_data, "application/octet-stream" def switch_preferred_provider(self, tenant_id: str, provider: str, preferred_provider_type: str) -> None: """ @@ -525,6 +489,9 @@ def disable_model(self, tenant_id: str, provider: str, model: str, model_type: s def free_quota_submit(self, tenant_id: str, provider: str): api_key = os.environ.get("FREE_QUOTA_APPLY_API_KEY") api_base_url = os.environ.get("FREE_QUOTA_APPLY_BASE_URL") + if not api_base_url: + raise Exception("FREE_QUOTA_APPLY_BASE_URL is not set") + api_url = api_base_url + "/api/v1/providers/apply" headers = {"Content-Type": "application/json", "Authorization": f"Bearer {api_key}"} @@ -546,6 +513,9 @@ def free_quota_submit(self, tenant_id: str, provider: str): def free_quota_qualification_verify(self, tenant_id: str, provider: str, token: Optional[str]): api_key = os.environ.get("FREE_QUOTA_APPLY_API_KEY") api_base_url = os.environ.get("FREE_QUOTA_APPLY_BASE_URL") + if not api_base_url: + raise Exception("FREE_QUOTA_APPLY_BASE_URL is not set") + api_url = api_base_url + "/api/v1/providers/qualification-verify" headers = {"Content-Type": "application/json", "Authorization": f"Bearer {api_key}"} diff --git a/api/tests/integration_tests/model_runtime/test_model_provider_factory.py b/api/tests/integration_tests/model_runtime/test_model_provider_factory.py index 0ec4b0b7243176..5cb8a6252af20c 100644 --- a/api/tests/integration_tests/model_runtime/test_model_provider_factory.py +++ b/api/tests/integration_tests/model_runtime/test_model_provider_factory.py @@ -9,7 +9,7 @@ def test_get_providers(): - factory = ModelProviderFactory() + factory = ModelProviderFactory("test_tenant") providers = factory.get_providers() for provider in providers: @@ -20,7 +20,7 @@ def test_get_providers(): def test_get_models(): - factory = ModelProviderFactory() + factory = ModelProviderFactory("test_tenant") providers = factory.get_models( model_type=ModelType.LLM, provider_configs=[ @@ -51,19 +51,7 @@ def test_get_models(): def test_provider_credentials_validate(): - factory = ModelProviderFactory() + factory = ModelProviderFactory("test_tenant") factory.provider_credentials_validate( provider="openai", credentials={"openai_api_key": os.environ.get("OPENAI_API_KEY")} ) - - -def test__get_model_provider_map(): - factory = ModelProviderFactory() - model_providers = factory._get_model_provider_map() - - for name, model_provider in model_providers.items(): - logger.debug(name) - logger.debug(model_provider.provider_instance) - - assert len(model_providers) >= 1 - assert isinstance(model_providers["openai"], ModelProviderExtension) diff --git a/api/tests/integration_tests/workflow/nodes/test_llm.py b/api/tests/integration_tests/workflow/nodes/test_llm.py index dfb43650d20c1c..aff6e1c48199f8 100644 --- a/api/tests/integration_tests/workflow/nodes/test_llm.py +++ b/api/tests/integration_tests/workflow/nodes/test_llm.py @@ -115,7 +115,6 @@ def test_execute_llm(setup_openai_mock): custom_configuration=CustomConfiguration(provider=CustomProviderConfiguration(credentials=credentials)), model_settings=[], ), - provider_instance=provider_instance, model_type_instance=model_type_instance, ) model_instance = ModelInstance(provider_model_bundle=provider_model_bundle, model="gpt-3.5-turbo") @@ -203,7 +202,6 @@ def test_execute_llm_with_jinja2(setup_code_executor_mock, setup_openai_mock): custom_configuration=CustomConfiguration(provider=CustomProviderConfiguration(credentials=credentials)), model_settings=[], ), - provider_instance=provider_instance, model_type_instance=model_type_instance, ) diff --git a/api/tests/integration_tests/workflow/nodes/test_parameter_extractor.py b/api/tests/integration_tests/workflow/nodes/test_parameter_extractor.py index 88435c40227371..90c0d54e171a37 100644 --- a/api/tests/integration_tests/workflow/nodes/test_parameter_extractor.py +++ b/api/tests/integration_tests/workflow/nodes/test_parameter_extractor.py @@ -35,23 +35,27 @@ def get_mocked_fetch_model_config( mode: str, credentials: dict, ): - provider_instance = ModelProviderFactory().get_provider_instance(provider) - model_type_instance = provider_instance.get_model_instance(ModelType.LLM) + model_provider_factory = ModelProviderFactory(tenant_id="test_tenant") + model_type_instance = model_provider_factory.get_model_type_instance(provider, ModelType.LLM) provider_model_bundle = ProviderModelBundle( configuration=ProviderConfiguration( tenant_id="1", - provider=provider_instance.get_provider_schema(), + provider=model_provider_factory.get_provider_schema(provider), preferred_provider_type=ProviderType.CUSTOM, using_provider_type=ProviderType.CUSTOM, system_configuration=SystemConfiguration(enabled=False), custom_configuration=CustomConfiguration(provider=CustomProviderConfiguration(credentials=credentials)), model_settings=[], ), - provider_instance=provider_instance, model_type_instance=model_type_instance, ) model_instance = ModelInstance(provider_model_bundle=provider_model_bundle, model=model) - model_schema = model_type_instance.get_model_schema(model) + model_schema = model_provider_factory.get_model_schema( + provider=provider, + model_type=model_type_instance.model_type, + model=model, + credentials=credentials, + ) assert model_schema is not None model_config = ModelConfigWithCredentialsEntity( model=model, diff --git a/api/tests/unit_tests/core/test_provider_manager.py b/api/tests/unit_tests/core/test_provider_manager.py index 2f4214a5801de1..44284e03d0bfb5 100644 --- a/api/tests/unit_tests/core/test_provider_manager.py +++ b/api/tests/unit_tests/core/test_provider_manager.py @@ -1,12 +1,13 @@ from core.entities.provider_entities import ModelSettings from core.model_runtime.entities.model_entities import ModelType -from core.model_runtime.model_providers import model_provider_factory +from core.model_runtime.model_providers.model_provider_factory import ModelProviderFactory from core.provider_manager import ProviderManager from models.provider import LoadBalancingModelConfig, ProviderModelSetting def test__to_model_settings(mocker): # Get all provider entities + model_provider_factory = ModelProviderFactory("test_tenant") provider_entities = model_provider_factory.get_providers() provider_entity = None @@ -71,6 +72,7 @@ def test__to_model_settings(mocker): def test__to_model_settings_only_one_lb(mocker): # Get all provider entities + model_provider_factory = ModelProviderFactory("test_tenant") provider_entities = model_provider_factory.get_providers() provider_entity = None @@ -123,6 +125,7 @@ def test__to_model_settings_only_one_lb(mocker): def test__to_model_settings_lb_disabled(mocker): # Get all provider entities + model_provider_factory = ModelProviderFactory("test_tenant") provider_entities = model_provider_factory.get_providers() provider_entity = None From 47c8824be689b6224ba6e6f2b28f1d1484a9ecb0 Mon Sep 17 00:00:00 2001 From: takatost Date: Sun, 29 Sep 2024 00:14:44 +0800 Subject: [PATCH 061/325] feat: move model request to plugin daemon --- api/configs/feature/__init__.py | 14 +++--- api/controllers/console/app/workflow.py | 18 ++++---- api/controllers/console/workspace/plugin.py | 8 ++-- api/controllers/inner_api/plugin/__init__.py | 2 +- api/controllers/inner_api/plugin/wraps.py | 45 ++++++++++--------- api/core/agent/entities.py | 1 + api/core/agent/fc_agent_runner.py | 4 +- .../agent_chat/generate_response_converter.py | 10 +++-- .../base_app_generate_response_converter.py | 7 ++- api/core/app/apps/base_app_generator.py | 9 ++-- api/core/app/apps/chat/app_generator.py | 3 +- .../apps/chat/generate_response_converter.py | 10 +++-- api/core/app/apps/completion/app_generator.py | 12 +++-- .../completion/generate_response_converter.py | 10 +++-- api/core/app/apps/workflow/app_generator.py | 16 +++---- .../workflow/generate_response_converter.py | 10 +++-- .../plugin_tool_callback_handler.py | 2 +- api/core/entities/parameter_entities.py | 2 +- api/core/entities/provider_entities.py | 4 +- api/core/file/tool_file_parser.py | 4 +- api/core/plugin/backwards_invocation/node.py | 2 +- api/core/rag/retrieval/dataset_retrieval.py | 4 +- api/core/tools/entities/tool_entities.py | 3 -- .../utils/workflow_configuration_sync.py | 2 +- api/core/tools/workflow_as_tool/provider.py | 30 +++++++------ api/models/base.py | 2 +- .../tools/api_tools_manage_service.py | 6 +-- api/services/workflow_service.py | 4 +- .../plugin/tools/test_fetch_all_tools.py | 1 - 29 files changed, 127 insertions(+), 118 deletions(-) diff --git a/api/configs/feature/__init__.py b/api/configs/feature/__init__.py index 1b0364c2fc9835..8e03415ae03360 100644 --- a/api/configs/feature/__init__.py +++ b/api/configs/feature/__init__.py @@ -114,20 +114,18 @@ class PluginConfig(BaseSettings): """ Plugin configs """ + PLUGIN_API_URL: HttpUrl = Field( - description='Plugin API URL', - default='http://plugin:5002', + description="Plugin API URL", + default="http://plugin:5002", ) PLUGIN_API_KEY: str = Field( - description='Plugin API key', - default='plugin-api-key', + description="Plugin API key", + default="plugin-api-key", ) - INNER_API_KEY_FOR_PLUGIN: str = Field( - description='Inner api key for plugin', - default='inner-api-key' - ) + INNER_API_KEY_FOR_PLUGIN: str = Field(description="Inner api key for plugin", default="inner-api-key") class EndpointConfig(BaseSettings): diff --git a/api/controllers/console/app/workflow.py b/api/controllers/console/app/workflow.py index 19c6e14cc43758..22a1fbb563bf57 100644 --- a/api/controllers/console/app/workflow.py +++ b/api/controllers/console/app/workflow.py @@ -140,7 +140,7 @@ def post(self, app_model: App): # The role of the current user in the ta table must be admin, owner, or editor if not current_user.is_editor: raise Forbidden() - + if not isinstance(current_user, Account): raise Forbidden() @@ -167,7 +167,7 @@ def post(self, app_model: App): # The role of the current user in the ta table must be admin, owner, or editor if not current_user.is_editor: raise Forbidden() - + if not isinstance(current_user, Account): raise Forbidden() @@ -209,7 +209,7 @@ def post(self, app_model: App, node_id: str): # The role of the current user in the ta table must be admin, owner, or editor if not current_user.is_editor: raise Forbidden() - + if not isinstance(current_user, Account): raise Forbidden() @@ -246,7 +246,7 @@ def post(self, app_model: App, node_id: str): # The role of the current user in the ta table must be admin, owner, or editor if not current_user.is_editor: raise Forbidden() - + if not isinstance(current_user, Account): raise Forbidden() @@ -283,7 +283,7 @@ def post(self, app_model: App): # The role of the current user in the ta table must be admin, owner, or editor if not current_user.is_editor: raise Forbidden() - + if not isinstance(current_user, Account): raise Forbidden() @@ -336,7 +336,7 @@ def post(self, app_model: App, node_id: str): # The role of the current user in the ta table must be admin, owner, or editor if not current_user.is_editor: raise Forbidden() - + if not isinstance(current_user, Account): raise Forbidden() @@ -388,7 +388,7 @@ def post(self, app_model: App): # The role of the current user in the ta table must be admin, owner, or editor if not current_user.is_editor: raise Forbidden() - + if not isinstance(current_user, Account): raise Forbidden() @@ -428,7 +428,7 @@ def get(self, app_model: App, block_type: str): # The role of the current user in the ta table must be admin, owner, or editor if not current_user.is_editor: raise Forbidden() - + if not isinstance(current_user, Account): raise Forbidden() @@ -464,7 +464,7 @@ def post(self, app_model: App): # The role of the current user in the ta table must be admin, owner, or editor if not current_user.is_editor: raise Forbidden() - + if not isinstance(current_user, Account): raise Forbidden() diff --git a/api/controllers/console/workspace/plugin.py b/api/controllers/console/workspace/plugin.py index c3e89321d6bbf5..4001b69a595733 100644 --- a/api/controllers/console/workspace/plugin.py +++ b/api/controllers/console/workspace/plugin.py @@ -17,11 +17,9 @@ def get(self): user = current_user if not user.is_admin_or_owner: raise Forbidden() - + tenant_id = user.current_tenant_id - return { - "key": PluginDebuggingService.get_plugin_debugging_key(tenant_id) - } + return {"key": PluginDebuggingService.get_plugin_debugging_key(tenant_id)} -api.add_resource(PluginDebuggingKeyApi, "/workspaces/current/plugin/debugging-key") \ No newline at end of file +api.add_resource(PluginDebuggingKeyApi, "/workspaces/current/plugin/debugging-key") diff --git a/api/controllers/inner_api/plugin/__init__.py b/api/controllers/inner_api/plugin/__init__.py index fd1918e6b26fee..48aad58ec84321 100644 --- a/api/controllers/inner_api/plugin/__init__.py +++ b/api/controllers/inner_api/plugin/__init__.py @@ -1 +1 @@ -from .plugin import * \ No newline at end of file +from .plugin import * diff --git a/api/controllers/inner_api/plugin/wraps.py b/api/controllers/inner_api/plugin/wraps.py index aa1f36d33c7dc4..07249013f9e7f7 100644 --- a/api/controllers/inner_api/plugin/wraps.py +++ b/api/controllers/inner_api/plugin/wraps.py @@ -16,31 +16,36 @@ def decorator(view_func): def decorated_view(*args, **kwargs): # fetch json body parser = reqparse.RequestParser() - parser.add_argument('tenant_id', type=str, required=True, location='json') - parser.add_argument('user_id', type=str, required=True, location='json') + parser.add_argument("tenant_id", type=str, required=True, location="json") + parser.add_argument("user_id", type=str, required=True, location="json") kwargs = parser.parse_args() - user_id = kwargs.get('user_id') - tenant_id = kwargs.get('tenant_id') + user_id = kwargs.get("user_id") + tenant_id = kwargs.get("tenant_id") - del kwargs['tenant_id'] - del kwargs['user_id'] + del kwargs["tenant_id"] + del kwargs["user_id"] try: - tenant_model = db.session.query(Tenant).filter( - Tenant.id == tenant_id, - ).first() + tenant_model = ( + db.session.query(Tenant) + .filter( + Tenant.id == tenant_id, + ) + .first() + ) except Exception: - raise ValueError('tenant not found') + raise ValueError("tenant not found") if not tenant_model: - raise ValueError('tenant not found') + raise ValueError("tenant not found") - kwargs['tenant_model'] = tenant_model - kwargs['user_id'] = user_id + kwargs["tenant_model"] = tenant_model + kwargs["user_id"] = user_id return view_func(*args, **kwargs) + return decorated_view if view is None: @@ -55,18 +60,18 @@ def decorated_view(*args, **kwargs): try: data = request.get_json() except Exception: - raise ValueError('invalid json') - + raise ValueError("invalid json") + try: payload = payload_type(**data) except Exception as e: - raise ValueError(f'invalid payload: {str(e)}') - - kwargs['payload'] = payload + raise ValueError(f"invalid payload: {str(e)}") + + kwargs["payload"] = payload return view_func(*args, **kwargs) - + return decorated_view - + if view is None: return decorator else: diff --git a/api/core/agent/entities.py b/api/core/agent/entities.py index b51a1635495893..5287b9a7144fda 100644 --- a/api/core/agent/entities.py +++ b/api/core/agent/entities.py @@ -10,6 +10,7 @@ class AgentToolEntity(BaseModel): """ Agent Tool Entity. """ + provider_type: ToolProviderType provider_id: str tool_name: str diff --git a/api/core/agent/fc_agent_runner.py b/api/core/agent/fc_agent_runner.py index 991c542846626a..afdb1d70e22469 100644 --- a/api/core/agent/fc_agent_runner.py +++ b/api/core/agent/fc_agent_runner.py @@ -366,9 +366,7 @@ def extract_blocking_tool_calls(self, llm_result: LLMResult) -> list[tuple[str, return tool_calls - def _init_system_message( - self, prompt_template: str, prompt_messages: list[PromptMessage] - ) -> list[PromptMessage]: + def _init_system_message(self, prompt_template: str, prompt_messages: list[PromptMessage]) -> list[PromptMessage]: """ Initialize system message """ diff --git a/api/core/app/apps/agent_chat/generate_response_converter.py b/api/core/app/apps/agent_chat/generate_response_converter.py index 5f294432c94187..99c95b296af4f3 100644 --- a/api/core/app/apps/agent_chat/generate_response_converter.py +++ b/api/core/app/apps/agent_chat/generate_response_converter.py @@ -50,8 +50,9 @@ def convert_blocking_simple_response(cls, blocking_response: ChatbotAppBlockingR return response @classmethod - def convert_stream_full_response(cls, stream_response: Generator[ChatbotAppStreamResponse, None, None]) \ - -> Generator[dict | str, None, None]: + def convert_stream_full_response( + cls, stream_response: Generator[ChatbotAppStreamResponse, None, None] + ) -> Generator[dict | str, None, None]: """ Convert stream full response. :param stream_response: stream response @@ -80,8 +81,9 @@ def convert_stream_full_response(cls, stream_response: Generator[ChatbotAppStrea yield response_chunk @classmethod - def convert_stream_simple_response(cls, stream_response: Generator[ChatbotAppStreamResponse, None, None]) \ - -> Generator[dict | str, None, None]: + def convert_stream_simple_response( + cls, stream_response: Generator[ChatbotAppStreamResponse, None, None] + ) -> Generator[dict | str, None, None]: """ Convert stream simple response. :param stream_response: stream response diff --git a/api/core/app/apps/base_app_generate_response_converter.py b/api/core/app/apps/base_app_generate_response_converter.py index e5b77538c7bba0..c38e67d5779c68 100644 --- a/api/core/app/apps/base_app_generate_response_converter.py +++ b/api/core/app/apps/base_app_generate_response_converter.py @@ -20,6 +20,7 @@ def convert( if isinstance(response, AppBlockingResponse): return cls.convert_blocking_full_response(response) else: + def _generate_full_response() -> Generator[dict | str, Any, None]: yield from cls.convert_stream_simple_response(response) @@ -28,6 +29,7 @@ def _generate_full_response() -> Generator[dict | str, Any, None]: if isinstance(response, AppBlockingResponse): return cls.convert_blocking_simple_response(response) else: + def _generate_simple_response() -> Generator[dict | str, Any, None]: yield from cls.convert_stream_simple_response(response) @@ -45,8 +47,9 @@ def convert_blocking_simple_response(cls, blocking_response: AppBlockingResponse @classmethod @abstractmethod - def convert_stream_full_response(cls, stream_response: Generator[AppStreamResponse, None, None]) \ - -> Generator[dict | str, None, None]: + def convert_stream_full_response( + cls, stream_response: Generator[AppStreamResponse, None, None] + ) -> Generator[dict | str, None, None]: raise NotImplementedError @classmethod diff --git a/api/core/app/apps/base_app_generator.py b/api/core/app/apps/base_app_generator.py index 2da0fd058401c0..06a18864b77874 100644 --- a/api/core/app/apps/base_app_generator.py +++ b/api/core/app/apps/base_app_generator.py @@ -64,11 +64,12 @@ def convert_to_event_stream(cls, generator: Union[dict, Generator[dict | str, No if isinstance(generator, dict): return generator else: + def gen(): for message in generator: if isinstance(message, dict): - yield f'data: {json.dumps(message)}\n\n' + yield f"data: {json.dumps(message)}\n\n" else: - yield f'event: {message}\n\n' - - return gen() \ No newline at end of file + yield f"event: {message}\n\n" + + return gen() diff --git a/api/core/app/apps/chat/app_generator.py b/api/core/app/apps/chat/app_generator.py index cec2217093f1cc..492ae4924409bc 100644 --- a/api/core/app/apps/chat/app_generator.py +++ b/api/core/app/apps/chat/app_generator.py @@ -50,7 +50,8 @@ def generate( @overload def generate( - self, app_model: App, + self, + app_model: App, user: Union[Account, EndUser], args: Any, invoke_from: InvokeFrom, diff --git a/api/core/app/apps/chat/generate_response_converter.py b/api/core/app/apps/chat/generate_response_converter.py index c7e29686e9de7f..f67df2f1ad5df1 100644 --- a/api/core/app/apps/chat/generate_response_converter.py +++ b/api/core/app/apps/chat/generate_response_converter.py @@ -50,8 +50,9 @@ def convert_blocking_simple_response(cls, blocking_response: ChatbotAppBlockingR return response @classmethod - def convert_stream_full_response(cls, stream_response: Generator[ChatbotAppStreamResponse, None, None]) \ - -> Generator[dict | str, None, None]: + def convert_stream_full_response( + cls, stream_response: Generator[ChatbotAppStreamResponse, None, None] + ) -> Generator[dict | str, None, None]: """ Convert stream full response. :param stream_response: stream response @@ -80,8 +81,9 @@ def convert_stream_full_response(cls, stream_response: Generator[ChatbotAppStrea yield response_chunk @classmethod - def convert_stream_simple_response(cls, stream_response: Generator[ChatbotAppStreamResponse, None, None]) \ - -> Generator[dict | str, None, None]: + def convert_stream_simple_response( + cls, stream_response: Generator[ChatbotAppStreamResponse, None, None] + ) -> Generator[dict | str, None, None]: """ Convert stream simple response. :param stream_response: stream response diff --git a/api/core/app/apps/completion/app_generator.py b/api/core/app/apps/completion/app_generator.py index 2e99e4ef709fb4..729ff1e1e0b522 100644 --- a/api/core/app/apps/completion/app_generator.py +++ b/api/core/app/apps/completion/app_generator.py @@ -52,19 +52,17 @@ def generate( @overload def generate( - self, app_model: App, + self, + app_model: App, user: Union[Account, EndUser], args: dict, invoke_from: InvokeFrom, stream: bool = False, ) -> dict | Generator[str, None, None]: ... - def generate(self, app_model: App, - user: Union[Account, EndUser], - args: Any, - invoke_from: InvokeFrom, - stream: bool = True) \ - -> Union[dict, Generator[str, None, None]]: + def generate( + self, app_model: App, user: Union[Account, EndUser], args: Any, invoke_from: InvokeFrom, stream: bool = True + ) -> Union[dict, Generator[str, None, None]]: """ Generate App response. diff --git a/api/core/app/apps/completion/generate_response_converter.py b/api/core/app/apps/completion/generate_response_converter.py index 77aa1e37a23202..6f8d0894d55424 100644 --- a/api/core/app/apps/completion/generate_response_converter.py +++ b/api/core/app/apps/completion/generate_response_converter.py @@ -49,8 +49,9 @@ def convert_blocking_simple_response(cls, blocking_response: CompletionAppBlocki return response @classmethod - def convert_stream_full_response(cls, stream_response: Generator[CompletionAppStreamResponse, None, None]) \ - -> Generator[dict | str, None, None]: + def convert_stream_full_response( + cls, stream_response: Generator[CompletionAppStreamResponse, None, None] + ) -> Generator[dict | str, None, None]: """ Convert stream full response. :param stream_response: stream response @@ -78,8 +79,9 @@ def convert_stream_full_response(cls, stream_response: Generator[CompletionAppSt yield response_chunk @classmethod - def convert_stream_simple_response(cls, stream_response: Generator[CompletionAppStreamResponse, None, None]) \ - -> Generator[dict | str, None, None]: + def convert_stream_simple_response( + cls, stream_response: Generator[CompletionAppStreamResponse, None, None] + ) -> Generator[dict | str, None, None]: """ Convert stream simple response. :param stream_response: stream response diff --git a/api/core/app/apps/workflow/app_generator.py b/api/core/app/apps/workflow/app_generator.py index f11fedebc886a7..8edbeee3dd5156 100644 --- a/api/core/app/apps/workflow/app_generator.py +++ b/api/core/app/apps/workflow/app_generator.py @@ -42,7 +42,7 @@ def generate( invoke_from: InvokeFrom, stream: Literal[True] = True, call_depth: int = 0, - workflow_thread_pool_id: Optional[str] = None + workflow_thread_pool_id: Optional[str] = None, ) -> Generator[dict | str, None, None]: ... @overload @@ -60,7 +60,8 @@ def generate( @overload def generate( - self, app_model: App, + self, + app_model: App, workflow: Workflow, user: Union[Account, EndUser], args: dict, @@ -143,7 +144,7 @@ def _generate( application_generate_entity: WorkflowAppGenerateEntity, invoke_from: InvokeFrom, stream: bool = True, - workflow_thread_pool_id: Optional[str] = None + workflow_thread_pool_id: Optional[str] = None, ) -> Union[dict, Generator[str | dict, None, None]]: """ Generate App response. @@ -189,12 +190,9 @@ def _generate( return WorkflowAppGenerateResponseConverter.convert(response=response, invoke_from=invoke_from) - def single_iteration_generate(self, app_model: App, - workflow: Workflow, - node_id: str, - user: Account | EndUser, - args: dict, - stream: bool = True) -> dict[str, Any] | Generator[str | dict, Any, None]: + def single_iteration_generate( + self, app_model: App, workflow: Workflow, node_id: str, user: Account | EndUser, args: dict, stream: bool = True + ) -> dict[str, Any] | Generator[str | dict, Any, None]: """ Generate App response. diff --git a/api/core/app/apps/workflow/generate_response_converter.py b/api/core/app/apps/workflow/generate_response_converter.py index 989834ef3b0ab0..72357e5e0c3927 100644 --- a/api/core/app/apps/workflow/generate_response_converter.py +++ b/api/core/app/apps/workflow/generate_response_converter.py @@ -34,8 +34,9 @@ def convert_blocking_simple_response(cls, blocking_response: WorkflowAppBlocking return cls.convert_blocking_full_response(blocking_response) @classmethod - def convert_stream_full_response(cls, stream_response: Generator[WorkflowAppStreamResponse, None, None]) \ - -> Generator[dict | str, None, None]: + def convert_stream_full_response( + cls, stream_response: Generator[WorkflowAppStreamResponse, None, None] + ) -> Generator[dict | str, None, None]: """ Convert stream full response. :param stream_response: stream response @@ -62,8 +63,9 @@ def convert_stream_full_response(cls, stream_response: Generator[WorkflowAppStre yield response_chunk @classmethod - def convert_stream_simple_response(cls, stream_response: Generator[WorkflowAppStreamResponse, None, None]) \ - -> Generator[dict | str, None, None]: + def convert_stream_simple_response( + cls, stream_response: Generator[WorkflowAppStreamResponse, None, None] + ) -> Generator[dict | str, None, None]: """ Convert stream simple response. :param stream_response: stream response diff --git a/api/core/callback_handler/plugin_tool_callback_handler.py b/api/core/callback_handler/plugin_tool_callback_handler.py index e9b9784014fe35..033b8d423cc246 100644 --- a/api/core/callback_handler/plugin_tool_callback_handler.py +++ b/api/core/callback_handler/plugin_tool_callback_handler.py @@ -2,4 +2,4 @@ class DifyPluginCallbackHandler(DifyAgentCallbackHandler): - """Callback Handler that prints to std out.""" \ No newline at end of file + """Callback Handler that prints to std out.""" diff --git a/api/core/entities/parameter_entities.py b/api/core/entities/parameter_entities.py index cc402de7736a55..0045fbf2b49f88 100644 --- a/api/core/entities/parameter_entities.py +++ b/api/core/entities/parameter_entities.py @@ -27,4 +27,4 @@ class ModelConfigScope(Enum): TTS = "tts" SPEECH2TEXT = "speech2text" MODERATION = "moderation" - VISION = "vision" \ No newline at end of file + VISION = "vision" diff --git a/api/core/entities/provider_entities.py b/api/core/entities/provider_entities.py index 88b16f13afb0aa..53d52b5866c0ea 100644 --- a/api/core/entities/provider_entities.py +++ b/api/core/entities/provider_entities.py @@ -116,6 +116,7 @@ class BasicProviderConfig(BaseModel): """ Base model class for common provider settings like credentials """ + class Type(Enum): SECRET_INPUT = CommonParameterType.SECRET_INPUT.value TEXT_INPUT = CommonParameterType.TEXT_INPUT.value @@ -135,7 +136,7 @@ def value_of(cls, value: str) -> "ProviderConfig.Type": for mode in cls: if mode.value == value: return mode - raise ValueError(f'invalid mode value {value}') + raise ValueError(f"invalid mode value {value}") type: Type = Field(..., description="The type of the credentials") name: str = Field(..., description="The name of the credentials") @@ -145,6 +146,7 @@ class ProviderConfig(BasicProviderConfig): """ Model class for common provider settings like credentials """ + class Option(BaseModel): value: str = Field(..., description="The value of the option") label: I18nObject = Field(..., description="The label of the option") diff --git a/api/core/file/tool_file_parser.py b/api/core/file/tool_file_parser.py index 4d113a9cc28e2e..a17b7be3675ab1 100644 --- a/api/core/file/tool_file_parser.py +++ b/api/core/file/tool_file_parser.py @@ -3,9 +3,7 @@ if TYPE_CHECKING: from core.tools.tool_file_manager import ToolFileManager -tool_file_manager: dict[str, Any] = { - 'manager': None -} +tool_file_manager: dict[str, Any] = {"manager": None} class ToolFileParser: diff --git a/api/core/plugin/backwards_invocation/node.py b/api/core/plugin/backwards_invocation/node.py index 4c1d21437e609f..1bd5d84e4c6488 100644 --- a/api/core/plugin/backwards_invocation/node.py +++ b/api/core/plugin/backwards_invocation/node.py @@ -114,4 +114,4 @@ def invoke_question_classifier( "inputs": execution.inputs_dict, "outputs": execution.outputs_dict, "process_data": execution.process_data_dict, - } \ No newline at end of file + } diff --git a/api/core/rag/retrieval/dataset_retrieval.py b/api/core/rag/retrieval/dataset_retrieval.py index bd5cf86628de14..ebcf5a2575b98b 100644 --- a/api/core/rag/retrieval/dataset_retrieval.py +++ b/api/core/rag/retrieval/dataset_retrieval.py @@ -492,8 +492,9 @@ def to_dataset_retriever_tool( score_threshold_enabled = retrieval_model_config.get("score_threshold_enabled") if score_threshold_enabled: score_threshold = retrieval_model_config.get("score_threshold") - + from core.tools.utils.dataset_retriever.dataset_retriever_tool import DatasetRetrieverTool + tool = DatasetRetrieverTool.from_dataset( dataset=dataset, top_k=top_k, @@ -506,6 +507,7 @@ def to_dataset_retriever_tool( tools.append(tool) elif retrieve_config.retrieve_strategy == DatasetRetrieveConfigEntity.RetrieveStrategy.MULTIPLE: from core.tools.utils.dataset_retriever.dataset_multi_retriever_tool import DatasetMultiRetrieverTool + tool = DatasetMultiRetrieverTool.from_dataset( dataset_ids=[dataset.id for dataset in available_datasets], tenant_id=tenant_id, diff --git a/api/core/tools/entities/tool_entities.py b/api/core/tools/entities/tool_entities.py index 07ea2d2b11f1fe..0808ff721d16d2 100644 --- a/api/core/tools/entities/tool_entities.py +++ b/api/core/tools/entities/tool_entities.py @@ -375,6 +375,3 @@ class ToolInvokeFrom(Enum): WORKFLOW = "workflow" AGENT = "agent" - - - diff --git a/api/core/tools/utils/workflow_configuration_sync.py b/api/core/tools/utils/workflow_configuration_sync.py index 7f20605024d1f1..df09609402d181 100644 --- a/api/core/tools/utils/workflow_configuration_sync.py +++ b/api/core/tools/utils/workflow_configuration_sync.py @@ -43,4 +43,4 @@ def check_is_synced( for parameter in tool_configurations: if parameter.name not in variable_names: - raise ValueError('parameter configuration mismatch, please republish the tool to update') + raise ValueError("parameter configuration mismatch, please republish the tool to update") diff --git a/api/core/tools/workflow_as_tool/provider.py b/api/core/tools/workflow_as_tool/provider.py index 2d0d33ffd9a138..dec353ec93469b 100644 --- a/api/core/tools/workflow_as_tool/provider.py +++ b/api/core/tools/workflow_as_tool/provider.py @@ -65,7 +65,7 @@ def from_db(cls, db_provider: WorkflowToolProvider) -> "WorkflowToolProviderCont @property def provider_type(self) -> ToolProviderType: return ToolProviderType.WORKFLOW - + def _get_db_provider_tool(self, db_provider: WorkflowToolProvider, app: App) -> WorkflowTool: """ get db provider tool @@ -73,10 +73,11 @@ def _get_db_provider_tool(self, db_provider: WorkflowToolProvider, app: App) -> :param app: the app :return: the tool """ - workflow: Workflow | None = db.session.query(Workflow).filter( - Workflow.app_id == db_provider.app_id, - Workflow.version == db_provider.version - ).first() + workflow: Workflow | None = ( + db.session.query(Workflow) + .filter(Workflow.app_id == db_provider.app_id, Workflow.version == db_provider.version) + .first() + ) if not workflow: raise ValueError("workflow not found") @@ -84,10 +85,7 @@ def _get_db_provider_tool(self, db_provider: WorkflowToolProvider, app: App) -> # fetch start node graph: Mapping = workflow.graph_dict features_dict: Mapping = workflow.features_dict - features = WorkflowAppConfigManager.convert_features( - config_dict=features_dict, - app_mode=AppMode.WORKFLOW - ) + features = WorkflowAppConfigManager.convert_features(config_dict=features_dict, app_mode=AppMode.WORKFLOW) parameters = db_provider.parameter_configurations variables = WorkflowToolConfigurationUtils.get_workflow_graph_variables(graph) @@ -180,14 +178,18 @@ def get_tools(self, tenant_id: str) -> list[WorkflowTool]: if self.tools is not None: return self.tools - db_providers: WorkflowToolProvider | None = db.session.query(WorkflowToolProvider).filter( - WorkflowToolProvider.tenant_id == tenant_id, - WorkflowToolProvider.app_id == self.provider_id, - ).first() + db_providers: WorkflowToolProvider | None = ( + db.session.query(WorkflowToolProvider) + .filter( + WorkflowToolProvider.tenant_id == tenant_id, + WorkflowToolProvider.app_id == self.provider_id, + ) + .first() + ) if not db_providers: return [] - + app = db_providers.app if not app: raise ValueError("can not read app of workflow") diff --git a/api/models/base.py b/api/models/base.py index 1c2dcc40b94f8a..fa2b68a5d245bb 100644 --- a/api/models/base.py +++ b/api/models/base.py @@ -2,4 +2,4 @@ class Base(DeclarativeBase): - pass \ No newline at end of file + pass diff --git a/api/services/tools/api_tools_manage_service.py b/api/services/tools/api_tools_manage_service.py index 58f0e7bbf52098..82bfc8c8e5bcfe 100644 --- a/api/services/tools/api_tools_manage_service.py +++ b/api/services/tools/api_tools_manage_service.py @@ -161,7 +161,7 @@ def create_api_tool_provider( tenant_id=tenant_id, config=provider_controller.get_credentials_schema(), provider_type=provider_controller.provider_type.value, - provider_identity=provider_controller.entity.identity.name + provider_identity=provider_controller.entity.identity.name, ) encrypted_credentials = tool_configuration.encrypt(credentials) @@ -293,7 +293,7 @@ def update_api_tool_provider( tenant_id=tenant_id, config=provider_controller.get_credentials_schema(), provider_type=provider_controller.provider_type.value, - provider_identity=provider_controller.entity.identity.name + provider_identity=provider_controller.entity.identity.name, ) original_credentials = tool_configuration.decrypt(provider.credentials) @@ -412,7 +412,7 @@ def test_api_tool_preview( tenant_id=tenant_id, config=provider_controller.get_credentials_schema(), provider_type=provider_controller.provider_type.value, - provider_identity=provider_controller.entity.identity.name + provider_identity=provider_controller.entity.identity.name, ) decrypted_credentials = tool_configuration.decrypt(credentials) # check if the credential has changed, save the original credential diff --git a/api/services/workflow_service.py b/api/services/workflow_service.py index a1e2f3105738b8..eec3d26a7b1b5a 100644 --- a/api/services/workflow_service.py +++ b/api/services/workflow_service.py @@ -238,7 +238,7 @@ def run_draft_workflow_node( db.session.commit() return workflow_node_execution - + def run_free_workflow_node( self, node_data: dict, tenant_id: str, user_id: str, node_id: str, user_inputs: dict[str, Any] ) -> WorkflowNodeExecution: @@ -258,7 +258,7 @@ def run_free_workflow_node( ), start_at=start_at, tenant_id=tenant_id, - node_id=node_id + node_id=node_id, ) return workflow_node_execution diff --git a/api/tests/integration_tests/plugin/tools/test_fetch_all_tools.py b/api/tests/integration_tests/plugin/tools/test_fetch_all_tools.py index d50bba4eccc103..c6d836ed6dd965 100644 --- a/api/tests/integration_tests/plugin/tools/test_fetch_all_tools.py +++ b/api/tests/integration_tests/plugin/tools/test_fetch_all_tools.py @@ -6,4 +6,3 @@ def test_fetch_all_plugin_tools(setup_http_mock): manager = PluginToolManager() tools = manager.fetch_tool_providers(tenant_id="test-tenant") assert len(tools) >= 1 - From 7b76b1ff82b3cb28f5ef2da024f9a9629449a2cb Mon Sep 17 00:00:00 2001 From: Yeuoly Date: Sun, 29 Sep 2024 13:12:22 +0800 Subject: [PATCH 062/325] Merge fix/chore-fix into fix/chore-fix --- api/core/plugin/entities/plugin_daemon.py | 19 ++++++++ api/core/plugin/manager/base.py | 56 +++++++++++++++++------ api/core/tools/tool_manager.py | 3 -- 3 files changed, 61 insertions(+), 17 deletions(-) diff --git a/api/core/plugin/entities/plugin_daemon.py b/api/core/plugin/entities/plugin_daemon.py index 554353c6c9e938..ebce6e6cee649f 100644 --- a/api/core/plugin/entities/plugin_daemon.py +++ b/api/core/plugin/entities/plugin_daemon.py @@ -87,3 +87,22 @@ class PluginVoiceEntity(BaseModel): class PluginVoicesResponse(BaseModel): voices: list[PluginVoiceEntity] = Field(description="The result of the voices.") + + +class PluginDaemonError(BaseModel): + """ + Error from plugin daemon. + """ + + error_type: str + message: str + args: Optional[dict] = None + + +class PluginDaemonInnerError(Exception): + code: int + message: str + + def __init__(self, code: int, message: str): + self.code = code + self.message = message diff --git a/api/core/plugin/manager/base.py b/api/core/plugin/manager/base.py index 1e050427d4e890..cb402849e982a2 100644 --- a/api/core/plugin/manager/base.py +++ b/api/core/plugin/manager/base.py @@ -1,13 +1,20 @@ import json from collections.abc import Callable, Generator -from typing import TypeVar +from typing import Optional, TypeVar import requests from pydantic import BaseModel from yarl import URL from configs import dify_config -from core.plugin.entities.plugin_daemon import PluginDaemonBasicResponse +from core.model_runtime.errors.invoke import ( + InvokeAuthorizationError, + InvokeBadRequestError, + InvokeConnectionError, + InvokeRateLimitError, + InvokeServerUnavailableError, +) +from core.plugin.entities.plugin_daemon import PluginDaemonBasicResponse, PluginDaemonError, PluginDaemonInnerError plugin_daemon_inner_api_baseurl = dify_config.PLUGIN_API_URL plugin_daemon_inner_api_key = dify_config.PLUGIN_API_KEY @@ -110,6 +117,12 @@ def _request_with_plugin_daemon_response( rep = PluginDaemonBasicResponse[type](**json_response) if rep.code != 0: + if rep.code == -500: + try: + error = PluginDaemonError(**json.loads(rep.message)) + self._handle_plugin_daemon_error(error.error_type, error.message, error.args) + except Exception as e: + raise ValueError(f"got error from plugin daemon: {rep.message}, code: {rep.code}") raise ValueError(f"got error from plugin daemon: {rep.message}, code: {rep.code}") if rep.data is None: raise ValueError("got empty data from plugin daemon") @@ -132,19 +145,34 @@ def _request_with_plugin_daemon_response_stream( line_data = json.loads(line) rep = PluginDaemonBasicResponse[type](**line_data) if rep.code != 0: - raise PluginDaemonRespError(rep.message, rep.code) + if rep.code == -500: + try: + error = PluginDaemonError(**json.loads(rep.message)) + self._handle_plugin_daemon_error(error.error_type, error.message, error.args) + except Exception as e: + raise PluginDaemonInnerError(code=rep.code, message=rep.message) + raise ValueError(f"got error from plugin daemon: {rep.message}, code: {rep.code}") if rep.data is None: raise ValueError("got empty data from plugin daemon") yield rep.data - -class PluginDaemonRespError(Exception): - """ - Plugin daemon response error. - """ - - def __init__(self, resp_message: str, code: int): - super().__init__() - self.message = f"got error from plugin daemon: {resp_message}, code: {code}" - self.resp_message = resp_message - self.code = code + def _handle_plugin_daemon_error(self, error_type: str, message: str, args: Optional[dict] = None): + """ + handle the error from plugin daemon + """ + args = args or {} + + if error_type == PluginDaemonInnerError.__name__: + raise PluginDaemonInnerError(code=-500, message=message) + elif error_type == InvokeRateLimitError.__name__: + raise InvokeRateLimitError(description=args.get("description")) + elif error_type == InvokeAuthorizationError.__name__: + raise InvokeAuthorizationError(description=args.get("description")) + elif error_type == InvokeBadRequestError.__name__: + raise InvokeBadRequestError(description=args.get("description")) + elif error_type == InvokeConnectionError.__name__: + raise InvokeConnectionError(description=args.get("description")) + elif error_type == InvokeServerUnavailableError.__name__: + raise InvokeServerUnavailableError(description=args.get("description")) + else: + raise ValueError(f"got unknown error from plugin daemon: {error_type}, message: {message}, args: {args}") diff --git a/api/core/tools/tool_manager.py b/api/core/tools/tool_manager.py index 225cd0c251f596..698ce3e900639d 100644 --- a/api/core/tools/tool_manager.py +++ b/api/core/tools/tool_manager.py @@ -618,9 +618,6 @@ def user_get_api_provider(cls, provider: str, tenant_id: str) -> dict: """ get api provider """ - """ - get tool provider - """ provider_obj: ApiToolProvider | None = ( db.session.query(ApiToolProvider) .filter( From 635a53ea38cad192c169060c475158b624d8e028 Mon Sep 17 00:00:00 2001 From: Yeuoly Date: Sun, 29 Sep 2024 13:23:14 +0800 Subject: [PATCH 063/325] fix: import undefined types --- api/core/plugin/manager/model.py | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/api/core/plugin/manager/model.py b/api/core/plugin/manager/model.py index e34e9b516efa90..30bd9bc0a8e034 100644 --- a/api/core/plugin/manager/model.py +++ b/api/core/plugin/manager/model.py @@ -10,13 +10,14 @@ from core.model_runtime.utils.encoders import jsonable_encoder from core.plugin.entities.plugin_daemon import ( PluginBasicBooleanResponse, + PluginDaemonInnerError, PluginModelProviderEntity, PluginModelSchemaEntity, PluginNumTokensResponse, PluginStringResultResponse, PluginVoicesResponse, ) -from core.plugin.manager.base import BasePluginManager, PluginDaemonRespError +from core.plugin.manager.base import BasePluginManager class PluginModelManager(BasePluginManager): @@ -179,8 +180,8 @@ def invoke_llm( try: yield from response - except PluginDaemonRespError as e: - raise ValueError(e.resp_message + str(e.code)) + except PluginDaemonInnerError as e: + raise ValueError(e.message + str(e.code)) def get_llm_num_tokens( self, @@ -395,8 +396,8 @@ def invoke_tts( for result in response: hex_str = result.result yield binascii.unhexlify(hex_str) - except PluginDaemonRespError as e: - raise ValueError(e.resp_message + str(e.code)) + except PluginDaemonInnerError as e: + raise ValueError(e.message + str(e.code)) def get_tts_model_voices( self, From 735e57b73a27aa59960ca9a439e755d20124f4a5 Mon Sep 17 00:00:00 2001 From: Yeuoly Date: Sun, 29 Sep 2024 13:46:16 +0800 Subject: [PATCH 064/325] fix: transform generic error message into correct type --- api/core/plugin/manager/base.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/api/core/plugin/manager/base.py b/api/core/plugin/manager/base.py index cb402849e982a2..ebfa68b914d882 100644 --- a/api/core/plugin/manager/base.py +++ b/api/core/plugin/manager/base.py @@ -120,9 +120,10 @@ def _request_with_plugin_daemon_response( if rep.code == -500: try: error = PluginDaemonError(**json.loads(rep.message)) - self._handle_plugin_daemon_error(error.error_type, error.message, error.args) except Exception as e: raise ValueError(f"got error from plugin daemon: {rep.message}, code: {rep.code}") + + self._handle_plugin_daemon_error(error.error_type, error.message, error.args) raise ValueError(f"got error from plugin daemon: {rep.message}, code: {rep.code}") if rep.data is None: raise ValueError("got empty data from plugin daemon") @@ -148,9 +149,10 @@ def _request_with_plugin_daemon_response_stream( if rep.code == -500: try: error = PluginDaemonError(**json.loads(rep.message)) - self._handle_plugin_daemon_error(error.error_type, error.message, error.args) except Exception as e: raise PluginDaemonInnerError(code=rep.code, message=rep.message) + + self._handle_plugin_daemon_error(error.error_type, error.message, error.args) raise ValueError(f"got error from plugin daemon: {rep.message}, code: {rep.code}") if rep.data is None: raise ValueError("got empty data from plugin daemon") From c8bc3892b339e0c36e40683e05d870399a011dca Mon Sep 17 00:00:00 2001 From: Yeuoly Date: Sun, 29 Sep 2024 14:44:22 +0800 Subject: [PATCH 065/325] refactor: invoke tool from dify --- api/core/plugin/manager/tool.py | 52 +++++++++++++++++++++++++++++---- api/core/tools/tool_manager.py | 8 ++--- 2 files changed, 50 insertions(+), 10 deletions(-) diff --git a/api/core/plugin/manager/tool.py b/api/core/plugin/manager/tool.py index 4f5fa1fa5ccfba..5981bcb55eb9e8 100644 --- a/api/core/plugin/manager/tool.py +++ b/api/core/plugin/manager/tool.py @@ -7,9 +7,23 @@ class PluginToolManager(BasePluginManager): + def _split_provider(self, provider: str) -> tuple[str, str]: + """ + split the provider to plugin_id and provider_name + + provider follows format: plugin_id/provider_name + """ + if "/" in provider: + parts = provider.split("/", 1) + if len(parts) == 2: + return parts[0], parts[1] + raise ValueError(f"invalid provider format: {provider}") + + raise ValueError(f"invalid provider format: {provider}") + def fetch_tool_providers(self, tenant_id: str) -> list[PluginToolProviderEntity]: """ - Fetch tool providers for the given asset. + Fetch tool providers for the given tenant. """ def transformer(json_response: dict[str, Any]) -> dict: @@ -28,18 +42,44 @@ def transformer(json_response: dict[str, Any]) -> dict: params={"page": 1, "page_size": 256}, transformer=transformer, ) + + for provider in response: + provider.declaration.identity.name = f"{provider.plugin_id}/{provider.declaration.identity.name}" + + return response + + def fetch_tool_provider(self, tenant_id: str, provider: str) -> PluginToolProviderEntity: + """ + Fetch tool provider for the given tenant and plugin. + """ + plugin_id, provider_name = self._split_provider(provider) + + response = self._request_with_plugin_daemon_response( + "GET", + f"plugin/{tenant_id}/management/tool", + PluginToolProviderEntity, + params={"provider": provider_name, "plugin_id": plugin_id}, + ) + + response.declaration.identity.name = f"{response.plugin_id}/{response.declaration.identity.name}" + return response def invoke( self, tenant_id: str, user_id: str, - plugin_id: str, tool_provider: str, tool_name: str, credentials: dict[str, Any], tool_parameters: dict[str, Any], ) -> Generator[ToolInvokeMessage, None, None]: + """ + Invoke the tool with the given tenant, user, plugin, provider, name, credentials and parameters. + """ + + plugin_id, provider_name = self._split_provider(tool_provider) + response = self._request_with_plugin_daemon_response_stream( "POST", f"plugin/{tenant_id}/dispatch/tool/invoke", @@ -47,7 +87,7 @@ def invoke( data={ "user_id": user_id, "data": { - "provider": tool_provider, + "provider": provider_name, "tool": tool_name, "credentials": credentials, "tool_parameters": tool_parameters, @@ -61,11 +101,13 @@ def invoke( return response def validate_provider_credentials( - self, tenant_id: str, user_id: str, plugin_id: str, provider: str, credentials: dict[str, Any] + self, tenant_id: str, user_id: str, provider: str, credentials: dict[str, Any] ) -> bool: """ validate the credentials of the provider """ + plugin_id, provider_name = self._split_provider(provider) + response = self._request_with_plugin_daemon_response_stream( "POST", f"plugin/{tenant_id}/dispatch/tool/validate_credentials", @@ -73,7 +115,7 @@ def validate_provider_credentials( data={ "user_id": user_id, "data": { - "provider": provider, + "provider": provider_name, "credentials": credentials, }, }, diff --git a/api/core/tools/tool_manager.py b/api/core/tools/tool_manager.py index 698ce3e900639d..0463f84817bdfc 100644 --- a/api/core/tools/tool_manager.py +++ b/api/core/tools/tool_manager.py @@ -59,6 +59,8 @@ def get_builtin_provider( :param tenant_id: the id of the tenant :return: the provider """ + # split provider to + if len(cls._hardcoded_providers) == 0: # init the builtin providers cls.load_hardcoded_providers_cache() @@ -77,8 +79,7 @@ def get_plugin_provider(cls, provider: str, tenant_id: str) -> PluginToolProvide get the plugin provider """ manager = PluginToolManager() - providers = manager.fetch_tool_providers(tenant_id) - provider_entity = next((x for x in providers if x.declaration.identity.name == provider), None) + provider_entity = manager.fetch_tool_provider(tenant_id, provider) if not provider_entity: raise ToolProviderNotFoundError(f"plugin provider {provider} not found") @@ -181,9 +182,6 @@ def get_tool_runtime( ) elif provider_type == ToolProviderType.API: - if tenant_id is None: - raise ValueError("tenant id is required for api provider") - api_provider, credentials = cls.get_api_provider_controller(tenant_id, provider_id) # decrypt the credentials From e9e5c8806ab9c84b021fa17fae62da8cbf444adb Mon Sep 17 00:00:00 2001 From: Yeuoly Date: Sun, 29 Sep 2024 17:00:58 +0800 Subject: [PATCH 066/325] refactor: using DeclarativeBase as parent class of models, refactored tools --- api/controllers/console/setup.py | 4 +- .../console/workspace/tool_providers.py | 13 ++- api/core/plugin/manager/tool.py | 21 +++- api/core/tools/plugin_tool/provider.py | 7 +- api/core/tools/plugin_tool/tool.py | 6 +- api/core/tools/tool_manager.py | 11 +- api/migrations/env.py | 7 +- ...ase_max_length_of_builtin_tool_provider.py | 39 +++++++ api/models/base.py | 6 +- api/models/model.py | 107 +++++++++--------- api/models/provider.py | 15 +-- api/models/source.py | 5 +- api/models/task.py | 5 +- api/models/tool.py | 3 +- api/models/tools.py | 52 ++++++++- api/models/web.py | 5 +- api/models/workflow.py | 39 ++++--- 17 files changed, 225 insertions(+), 120 deletions(-) create mode 100644 api/migrations/versions/2024_09_29_0835-ddcc8bbef391_increase_max_length_of_builtin_tool_provider.py diff --git a/api/controllers/console/setup.py b/api/controllers/console/setup.py index 46b4ef5d87a8a0..b66e74aee0cf84 100644 --- a/api/controllers/console/setup.py +++ b/api/controllers/console/setup.py @@ -6,7 +6,7 @@ from configs import dify_config from libs.helper import StrLen, email, get_remote_ip from libs.password import valid_password -from models.model import DifySetup +from models.model import DifySetup, db from services.account_service import RegisterService, TenantService from . import api @@ -69,7 +69,7 @@ def decorated(*args, **kwargs): def get_setup_status(): if dify_config.EDITION == "SELF_HOSTED": - return DifySetup.query.first() + return db.session.query(DifySetup).first() else: return True diff --git a/api/controllers/console/workspace/tool_providers.py b/api/controllers/console/workspace/tool_providers.py index 1b49103ced63ed..14edc9ac136ba7 100644 --- a/api/controllers/console/workspace/tool_providers.py +++ b/api/controllers/console/workspace/tool_providers.py @@ -610,16 +610,17 @@ def get(self): api.add_resource(ToolProviderListApi, "/workspaces/current/tool-providers") # builtin tool provider -api.add_resource(ToolBuiltinProviderListToolsApi, "/workspaces/current/tool-provider/builtin//tools") -api.add_resource(ToolBuiltinProviderDeleteApi, "/workspaces/current/tool-provider/builtin//delete") -api.add_resource(ToolBuiltinProviderUpdateApi, "/workspaces/current/tool-provider/builtin//update") +api.add_resource(ToolBuiltinProviderListToolsApi, "/workspaces/current/tool-provider/builtin//tools") +api.add_resource(ToolBuiltinProviderDeleteApi, "/workspaces/current/tool-provider/builtin//delete") +api.add_resource(ToolBuiltinProviderUpdateApi, "/workspaces/current/tool-provider/builtin//update") api.add_resource( - ToolBuiltinProviderGetCredentialsApi, "/workspaces/current/tool-provider/builtin//credentials" + ToolBuiltinProviderGetCredentialsApi, "/workspaces/current/tool-provider/builtin//credentials" ) api.add_resource( - ToolBuiltinProviderCredentialsSchemaApi, "/workspaces/current/tool-provider/builtin//credentials_schema" + ToolBuiltinProviderCredentialsSchemaApi, + "/workspaces/current/tool-provider/builtin//credentials_schema", ) -api.add_resource(ToolBuiltinProviderIconApi, "/workspaces/current/tool-provider/builtin//icon") +api.add_resource(ToolBuiltinProviderIconApi, "/workspaces/current/tool-provider/builtin//icon") # api tool provider api.add_resource(ToolApiProviderAddApi, "/workspaces/current/tool-provider/api/add") diff --git a/api/core/plugin/manager/tool.py b/api/core/plugin/manager/tool.py index 5981bcb55eb9e8..50970243a11955 100644 --- a/api/core/plugin/manager/tool.py +++ b/api/core/plugin/manager/tool.py @@ -14,9 +14,9 @@ def _split_provider(self, provider: str) -> tuple[str, str]: provider follows format: plugin_id/provider_name """ if "/" in provider: - parts = provider.split("/", 1) - if len(parts) == 2: - return parts[0], parts[1] + parts = provider.split("/", -1) + if len(parts) >= 2: + return "/".join(parts[:-1]), parts[-1] raise ValueError(f"invalid provider format: {provider}") raise ValueError(f"invalid provider format: {provider}") @@ -46,6 +46,10 @@ def transformer(json_response: dict[str, Any]) -> dict: for provider in response: provider.declaration.identity.name = f"{provider.plugin_id}/{provider.declaration.identity.name}" + # override the provider name for each tool to plugin_id/provider_name + for tool in provider.declaration.tools: + tool.identity.provider = provider.declaration.identity.name + return response def fetch_tool_provider(self, tenant_id: str, provider: str) -> PluginToolProviderEntity: @@ -54,15 +58,26 @@ def fetch_tool_provider(self, tenant_id: str, provider: str) -> PluginToolProvid """ plugin_id, provider_name = self._split_provider(provider) + def transformer(json_response: dict[str, Any]) -> dict: + for tool in json_response.get("data", {}).get("declaration", {}).get("tools", []): + tool["identity"]["provider"] = provider_name + + return json_response + response = self._request_with_plugin_daemon_response( "GET", f"plugin/{tenant_id}/management/tool", PluginToolProviderEntity, params={"provider": provider_name, "plugin_id": plugin_id}, + transformer=transformer, ) response.declaration.identity.name = f"{response.plugin_id}/{response.declaration.identity.name}" + # override the provider name for each tool to plugin_id/provider_name + for tool in response.declaration.tools: + tool.identity.provider = response.declaration.identity.name + return response def invoke( diff --git a/api/core/tools/plugin_tool/provider.py b/api/core/tools/plugin_tool/provider.py index 4982e7405662d2..b6758df2bb106d 100644 --- a/api/core/tools/plugin_tool/provider.py +++ b/api/core/tools/plugin_tool/provider.py @@ -11,12 +11,10 @@ class PluginToolProviderController(BuiltinToolProviderController): entity: ToolProviderEntityWithPlugin tenant_id: str - plugin_id: str - def __init__(self, entity: ToolProviderEntityWithPlugin, tenant_id: str, plugin_id: str) -> None: + def __init__(self, entity: ToolProviderEntityWithPlugin, tenant_id: str) -> None: self.entity = entity self.tenant_id = tenant_id - self.plugin_id = plugin_id @property def provider_type(self) -> ToolProviderType: @@ -35,7 +33,6 @@ def _validate_credentials(self, user_id: str, credentials: dict[str, Any]) -> No if not manager.validate_provider_credentials( tenant_id=self.tenant_id, user_id=user_id, - plugin_id=self.plugin_id, provider=self.entity.identity.name, credentials=credentials, ): @@ -54,7 +51,6 @@ def get_tool(self, tool_name: str) -> PluginTool: entity=tool_entity, runtime=ToolRuntime(tenant_id=self.tenant_id), tenant_id=self.tenant_id, - plugin_id=self.plugin_id, ) def get_tools(self) -> list[PluginTool]: @@ -66,7 +62,6 @@ def get_tools(self) -> list[PluginTool]: entity=tool_entity, runtime=ToolRuntime(tenant_id=self.tenant_id), tenant_id=self.tenant_id, - plugin_id=self.plugin_id, ) for tool_entity in self.entity.tools ] diff --git a/api/core/tools/plugin_tool/tool.py b/api/core/tools/plugin_tool/tool.py index 7a4f147cd04085..7c6c4de3e00828 100644 --- a/api/core/tools/plugin_tool/tool.py +++ b/api/core/tools/plugin_tool/tool.py @@ -9,12 +9,10 @@ class PluginTool(Tool): tenant_id: str - plugin_id: str - def __init__(self, entity: ToolEntity, runtime: ToolRuntime, tenant_id: str, plugin_id: str) -> None: + def __init__(self, entity: ToolEntity, runtime: ToolRuntime, tenant_id: str) -> None: super().__init__(entity, runtime) self.tenant_id = tenant_id - self.plugin_id = plugin_id @property def tool_provider_type(self) -> ToolProviderType: @@ -25,7 +23,6 @@ def _invoke(self, user_id: str, tool_parameters: dict[str, Any]) -> Generator[To return manager.invoke( tenant_id=self.tenant_id, user_id=user_id, - plugin_id=self.plugin_id, tool_provider=self.entity.identity.provider, tool_name=self.entity.identity.name, credentials=self.runtime.credentials, @@ -37,5 +34,4 @@ def fork_tool_runtime(self, runtime: ToolRuntime) -> "PluginTool": entity=self.entity, runtime=runtime, tenant_id=self.tenant_id, - plugin_id=self.plugin_id, ) diff --git a/api/core/tools/tool_manager.py b/api/core/tools/tool_manager.py index 0463f84817bdfc..fdd7e8385b3971 100644 --- a/api/core/tools/tool_manager.py +++ b/api/core/tools/tool_manager.py @@ -86,7 +86,6 @@ def get_plugin_provider(cls, provider: str, tenant_id: str) -> PluginToolProvide return PluginToolProviderController( entity=provider_entity.declaration, tenant_id=tenant_id, - plugin_id=provider_entity.plugin_id, ) @classmethod @@ -158,12 +157,11 @@ def get_tool_runtime( # decrypt the credentials credentials = builtin_provider.credentials - controller = cls.get_builtin_provider(provider_id, tenant_id) tool_configuration = ProviderConfigEncrypter( tenant_id=tenant_id, - config=controller.get_credentials_schema(), - provider_type=controller.provider_type.value, - provider_identity=controller.entity.identity.name, + config=provider_controller.get_credentials_schema(), + provider_type=provider_controller.provider_type.value, + provider_identity=provider_controller.entity.identity.name, ) decrypted_credentials = tool_configuration.decrypt(credentials) @@ -400,7 +398,6 @@ def list_plugin_providers(cls, tenant_id: str) -> list[PluginToolProviderControl PluginToolProviderController( entity=provider.declaration, tenant_id=tenant_id, - plugin_id=provider.plugin_id, ) for provider in provider_entities ] @@ -525,7 +522,7 @@ def list_providers_from_api( ) if isinstance(provider, PluginToolProviderController): - result_providers[f"plugin_provider.{user_provider.name}.{provider.plugin_id}"] = user_provider + result_providers[f"plugin_provider.{user_provider.name}"] = user_provider else: result_providers[f"builtin_provider.{user_provider.name}"] = user_provider diff --git a/api/migrations/env.py b/api/migrations/env.py index ad3a122c04bc2d..a5d815dcfd44bc 100644 --- a/api/migrations/env.py +++ b/api/migrations/env.py @@ -31,19 +31,16 @@ def get_engine_url(): # from myapp import mymodel # target_metadata = mymodel.Base.metadata config.set_main_option('sqlalchemy.url', get_engine_url()) -target_db = current_app.extensions['migrate'].db # other values from the config, defined by the needs of env.py, # can be acquired: # my_important_option = config.get_main_option("my_important_option") # ... etc. +from models.base import Base def get_metadata(): - if hasattr(target_db, 'metadatas'): - return target_db.metadatas[None] - return target_db.metadata - + return Base.metadata def include_object(object, name, type_, reflected, compare_to): if type_ == "foreign_key_constraint": diff --git a/api/migrations/versions/2024_09_29_0835-ddcc8bbef391_increase_max_length_of_builtin_tool_provider.py b/api/migrations/versions/2024_09_29_0835-ddcc8bbef391_increase_max_length_of_builtin_tool_provider.py new file mode 100644 index 00000000000000..4b16fe7f310442 --- /dev/null +++ b/api/migrations/versions/2024_09_29_0835-ddcc8bbef391_increase_max_length_of_builtin_tool_provider.py @@ -0,0 +1,39 @@ +"""increase max length of builtin tool provider + +Revision ID: ddcc8bbef391 +Revises: d57ba9ebb251 +Create Date: 2024-09-29 08:35:58.062698 + +""" +from alembic import op +import models as models +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = 'ddcc8bbef391' +down_revision = 'd57ba9ebb251' +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + with op.batch_alter_table('tool_builtin_providers', schema=None) as batch_op: + batch_op.alter_column('provider', + existing_type=sa.VARCHAR(length=40), + type_=sa.String(length=256), + existing_nullable=False) + + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + with op.batch_alter_table('tool_builtin_providers', schema=None) as batch_op: + batch_op.alter_column('provider', + existing_type=sa.String(length=256), + type_=sa.VARCHAR(length=40), + existing_nullable=False) + + # ### end Alembic commands ### diff --git a/api/models/base.py b/api/models/base.py index fa2b68a5d245bb..da4648efa695a3 100644 --- a/api/models/base.py +++ b/api/models/base.py @@ -1,5 +1,5 @@ -from sqlalchemy.orm import DeclarativeBase +from sqlalchemy.orm import declarative_base +from extensions.ext_database import metadata -class Base(DeclarativeBase): - pass +Base = declarative_base(metadata=metadata) diff --git a/api/models/model.py b/api/models/model.py index 2fad0f54095498..660c7e0a360257 100644 --- a/api/models/model.py +++ b/api/models/model.py @@ -2,12 +2,15 @@ import re import uuid from enum import Enum -from typing import Optional +from typing import TYPE_CHECKING, Optional + +if TYPE_CHECKING: + from models.workflow import Workflow from flask import request from flask_login import UserMixin -from sqlalchemy import Float, func, text -from sqlalchemy.orm import Mapped, mapped_column, relationship +from sqlalchemy import Float, Index, PrimaryKeyConstraint, func, text +from sqlalchemy.orm import Mapped, mapped_column from configs import dify_config from core.file.tool_file_parser import ToolFileParser @@ -20,7 +23,7 @@ from .types import StringUUID -class DifySetup(db.Model): +class DifySetup(Base): __tablename__ = "dify_setups" __table_args__ = (db.PrimaryKeyConstraint("version", name="dify_setup_pkey"),) @@ -55,7 +58,7 @@ class IconType(Enum): EMOJI = "emoji" -class App(db.Model): +class App(Base): __tablename__ = "apps" __table_args__ = (db.PrimaryKeyConstraint("id", name="app_pkey"), db.Index("app_tenant_id_idx", "tenant_id")) @@ -133,7 +136,8 @@ def is_agent(self) -> bool: return False if not app_model_config.agent_mode: return False - if self.app_model_config.agent_mode_dict.get("enabled", False) and self.app_model_config.agent_mode_dict.get( + + if app_model_config.agent_mode_dict.get("enabled", False) and app_model_config.agent_mode_dict.get( "strategy", "" ) in {"function_call", "react"}: self.mode = AppMode.AGENT_CHAT.value @@ -250,7 +254,7 @@ def app(self): return app @property - def model_dict(self) -> dict: + def model_dict(self): return json.loads(self.model) if self.model else None @property @@ -284,6 +288,9 @@ def annotation_reply_dict(self) -> dict: ) if annotation_setting: collection_binding_detail = annotation_setting.collection_binding_detail + if not collection_binding_detail: + raise ValueError("Collection binding detail not found") + return { "id": annotation_setting.id, "enabled": True, @@ -314,7 +321,7 @@ def external_data_tools_list(self) -> list[dict]: return json.loads(self.external_data_tools) if self.external_data_tools else [] @property - def user_input_form_list(self) -> dict: + def user_input_form_list(self): return json.loads(self.user_input_form) if self.user_input_form else [] @property @@ -458,7 +465,7 @@ def copy(self): return new_app_model_config -class RecommendedApp(db.Model): +class RecommendedApp(Base): __tablename__ = "recommended_apps" __table_args__ = ( db.PrimaryKeyConstraint("id", name="recommended_app_pkey"), @@ -486,7 +493,7 @@ def app(self): return app -class InstalledApp(db.Model): +class InstalledApp(Base): __tablename__ = "installed_apps" __table_args__ = ( db.PrimaryKeyConstraint("id", name="installed_app_pkey"), @@ -522,7 +529,7 @@ class Conversation(Base): db.Index("conversation_app_from_user_idx", "app_id", "from_source", "from_end_user_id"), ) - id: Mapped[str] = mapped_column(StringUUID, server_default=db.text("uuid_generate_v4()")) + id = db.Column(StringUUID, server_default=db.text("uuid_generate_v4()")) app_id = db.Column(StringUUID, nullable=False) app_model_config_id = db.Column(StringUUID, nullable=True) model_provider = db.Column(db.String(255), nullable=True) @@ -546,10 +553,8 @@ class Conversation(Base): created_at = db.Column(db.DateTime, nullable=False, server_default=db.text("CURRENT_TIMESTAMP(0)")) updated_at = db.Column(db.DateTime, nullable=False, server_default=db.text("CURRENT_TIMESTAMP(0)")) - messages: Mapped[list["Message"]] = relationship( - "Message", backref="conversation", lazy="select", passive_deletes="all" - ) - message_annotations: Mapped[list["MessageAnnotation"]] = relationship( + messages = db.relationship("Message", backref="conversation", lazy="select", passive_deletes="all") + message_annotations = db.relationship( "MessageAnnotation", backref="conversation", lazy="select", passive_deletes="all" ) @@ -578,7 +583,7 @@ def model_config(self): ) if not app_model_config: - raise ValueError("app config not found") + return {} model_config = app_model_config.to_dict() @@ -692,12 +697,12 @@ def in_debug_mode(self): class Message(Base): __tablename__ = "messages" __table_args__ = ( - db.PrimaryKeyConstraint("id", name="message_pkey"), - db.Index("message_app_id_idx", "app_id", "created_at"), - db.Index("message_conversation_id_idx", "conversation_id"), - db.Index("message_end_user_idx", "app_id", "from_source", "from_end_user_id"), - db.Index("message_account_idx", "app_id", "from_source", "from_account_id"), - db.Index("message_workflow_run_id_idx", "conversation_id", "workflow_run_id"), + PrimaryKeyConstraint("id", name="message_pkey"), + Index("message_app_id_idx", "app_id", "created_at"), + Index("message_conversation_id_idx", "conversation_id"), + Index("message_end_user_idx", "app_id", "from_source", "from_end_user_id"), + Index("message_account_idx", "app_id", "from_source", "from_account_id"), + Index("message_workflow_run_id_idx", "conversation_id", "workflow_run_id"), ) id = db.Column(StringUUID, server_default=db.text("uuid_generate_v4()")) @@ -705,10 +710,10 @@ class Message(Base): model_provider = db.Column(db.String(255), nullable=True) model_id = db.Column(db.String(255), nullable=True) override_model_configs = db.Column(db.Text) - conversation_id: Mapped[str] = mapped_column(StringUUID, db.ForeignKey("conversations.id"), nullable=False) - inputs: Mapped[str] = mapped_column(db.JSON) - query: Mapped[str] = mapped_column(db.Text, nullable=False) - message: Mapped[str] = mapped_column(db.JSON, nullable=False) + conversation_id = db.Column(StringUUID, db.ForeignKey("conversations.id"), nullable=False) + inputs = db.Column(db.JSON) + query = db.Column(db.Text, nullable=False) + message = db.Column(db.JSON, nullable=False) message_tokens = db.Column(db.Integer, nullable=False, server_default=db.text("0")) message_unit_price = db.Column(db.Numeric(10, 4), nullable=False) message_price_unit = db.Column(db.Numeric(10, 7), nullable=False, server_default=db.text("0.001")) @@ -974,7 +979,7 @@ def from_dict(cls, data: dict): ) -class MessageFeedback(db.Model): +class MessageFeedback(Base): __tablename__ = "message_feedbacks" __table_args__ = ( db.PrimaryKeyConstraint("id", name="message_feedback_pkey"), @@ -1009,15 +1014,15 @@ class MessageFile(Base): db.Index("message_file_created_by_idx", "created_by"), ) - id: Mapped[str] = mapped_column(StringUUID, server_default=db.text("uuid_generate_v4()")) - message_id: Mapped[str] = mapped_column(StringUUID, nullable=False) - type: Mapped[str] = mapped_column(db.String(255), nullable=False) - transfer_method: Mapped[str] = mapped_column(db.String(255), nullable=False) - url: Mapped[str] = mapped_column(db.Text, nullable=True) - belongs_to: Mapped[str] = mapped_column(db.String(255), nullable=True) - upload_file_id: Mapped[str] = mapped_column(StringUUID, nullable=True) - created_by_role: Mapped[str] = mapped_column(db.String(255), nullable=False) - created_by: Mapped[str] = mapped_column(StringUUID, nullable=False) + id = db.Column(StringUUID, server_default=db.text("uuid_generate_v4()")) + message_id = db.Column(StringUUID, nullable=False) + type = db.Column(db.String(255), nullable=False) + transfer_method = db.Column(db.String(255), nullable=False) + url = db.Column(db.Text, nullable=True) + belongs_to = db.Column(db.String(255), nullable=True) + upload_file_id = db.Column(StringUUID, nullable=True) + created_by_role = db.Column(db.String(255), nullable=False) + created_by = db.Column(StringUUID, nullable=False) created_at = db.Column(db.DateTime, nullable=False, server_default=db.text("CURRENT_TIMESTAMP(0)")) @@ -1032,7 +1037,7 @@ class MessageAnnotation(Base): id = db.Column(StringUUID, server_default=db.text("uuid_generate_v4()")) app_id = db.Column(StringUUID, nullable=False) - conversation_id: Mapped[str] = mapped_column(StringUUID, db.ForeignKey("conversations.id"), nullable=True) + conversation_id = db.Column(StringUUID, db.ForeignKey("conversations.id"), nullable=True) message_id = db.Column(StringUUID, nullable=True) question = db.Column(db.Text, nullable=True) content = db.Column(db.Text, nullable=False) @@ -1052,7 +1057,7 @@ def annotation_create_account(self): return account -class AppAnnotationHitHistory(db.Model): +class AppAnnotationHitHistory(Base): __tablename__ = "app_annotation_hit_histories" __table_args__ = ( db.PrimaryKeyConstraint("id", name="app_annotation_hit_histories_pkey"), @@ -1090,7 +1095,7 @@ def annotation_create_account(self): return account -class AppAnnotationSetting(db.Model): +class AppAnnotationSetting(Base): __tablename__ = "app_annotation_settings" __table_args__ = ( db.PrimaryKeyConstraint("id", name="app_annotation_settings_pkey"), @@ -1138,7 +1143,7 @@ def collection_binding_detail(self): return collection_binding_detail -class OperationLog(db.Model): +class OperationLog(Base): __tablename__ = "operation_logs" __table_args__ = ( db.PrimaryKeyConstraint("id", name="operation_log_pkey"), @@ -1155,7 +1160,7 @@ class OperationLog(db.Model): updated_at = db.Column(db.DateTime, nullable=False, server_default=db.text("CURRENT_TIMESTAMP(0)")) -class EndUser(UserMixin, db.Model): +class EndUser(UserMixin, Base): __tablename__ = "end_users" __table_args__ = ( db.PrimaryKeyConstraint("id", name="end_user_pkey"), @@ -1175,7 +1180,7 @@ class EndUser(UserMixin, db.Model): updated_at = db.Column(db.DateTime, nullable=False, server_default=db.text("CURRENT_TIMESTAMP(0)")) -class Site(db.Model): +class Site(Base): __tablename__ = "sites" __table_args__ = ( db.PrimaryKeyConstraint("id", name="site_pkey"), @@ -1222,7 +1227,7 @@ def app_base_url(self): return dify_config.APP_WEB_URL or request.url_root.rstrip("/") -class ApiToken(db.Model): +class ApiToken(Base): __tablename__ = "api_tokens" __table_args__ = ( db.PrimaryKeyConstraint("id", name="api_token_pkey"), @@ -1249,7 +1254,7 @@ def generate_api_key(prefix, n): return result -class UploadFile(db.Model): +class UploadFile(Base): __tablename__ = "upload_files" __table_args__ = ( db.PrimaryKeyConstraint("id", name="upload_file_pkey"), @@ -1273,7 +1278,7 @@ class UploadFile(db.Model): hash = db.Column(db.String(255), nullable=True) -class ApiRequest(db.Model): +class ApiRequest(Base): __tablename__ = "api_requests" __table_args__ = ( db.PrimaryKeyConstraint("id", name="api_request_pkey"), @@ -1290,7 +1295,7 @@ class ApiRequest(db.Model): created_at = db.Column(db.DateTime, nullable=False, server_default=db.text("CURRENT_TIMESTAMP(0)")) -class MessageChain(db.Model): +class MessageChain(Base): __tablename__ = "message_chains" __table_args__ = ( db.PrimaryKeyConstraint("id", name="message_chain_pkey"), @@ -1395,7 +1400,7 @@ def tool_inputs_dict(self) -> dict: return {} @property - def tool_outputs_dict(self) -> dict: + def tool_outputs_dict(self): tools = self.tools try: if self.observation: @@ -1417,7 +1422,7 @@ def tool_outputs_dict(self) -> dict: return dict.fromkeys(tools, self.observation) -class DatasetRetrieverResource(db.Model): +class DatasetRetrieverResource(Base): __tablename__ = "dataset_retriever_resources" __table_args__ = ( db.PrimaryKeyConstraint("id", name="dataset_retriever_resource_pkey"), @@ -1444,7 +1449,7 @@ class DatasetRetrieverResource(db.Model): created_at = db.Column(db.DateTime, nullable=False, server_default=db.func.current_timestamp()) -class Tag(db.Model): +class Tag(Base): __tablename__ = "tags" __table_args__ = ( db.PrimaryKeyConstraint("id", name="tag_pkey"), @@ -1462,7 +1467,7 @@ class Tag(db.Model): created_at = db.Column(db.DateTime, nullable=False, server_default=db.text("CURRENT_TIMESTAMP(0)")) -class TagBinding(db.Model): +class TagBinding(Base): __tablename__ = "tag_bindings" __table_args__ = ( db.PrimaryKeyConstraint("id", name="tag_binding_pkey"), @@ -1478,7 +1483,7 @@ class TagBinding(db.Model): created_at = db.Column(db.DateTime, nullable=False, server_default=db.text("CURRENT_TIMESTAMP(0)")) -class TraceAppConfig(db.Model): +class TraceAppConfig(Base): __tablename__ = "trace_app_config" __table_args__ = ( db.PrimaryKeyConstraint("id", name="tracing_app_config_pkey"), diff --git a/api/models/provider.py b/api/models/provider.py index 644915e781084b..d3c6db9babb073 100644 --- a/api/models/provider.py +++ b/api/models/provider.py @@ -1,6 +1,7 @@ from enum import Enum from extensions.ext_database import db +from models.base import Base from .types import StringUUID @@ -35,7 +36,7 @@ def value_of(value): raise ValueError(f"No matching enum found for value '{value}'") -class Provider(db.Model): +class Provider(Base): """ Provider model representing the API providers and their configurations. """ @@ -88,7 +89,7 @@ def is_enabled(self): return self.is_valid and self.token_is_set -class ProviderModel(db.Model): +class ProviderModel(Base): """ Provider model representing the API provider_models and their configurations. """ @@ -113,7 +114,7 @@ class ProviderModel(db.Model): updated_at = db.Column(db.DateTime, nullable=False, server_default=db.text("CURRENT_TIMESTAMP(0)")) -class TenantDefaultModel(db.Model): +class TenantDefaultModel(Base): __tablename__ = "tenant_default_models" __table_args__ = ( db.PrimaryKeyConstraint("id", name="tenant_default_model_pkey"), @@ -129,7 +130,7 @@ class TenantDefaultModel(db.Model): updated_at = db.Column(db.DateTime, nullable=False, server_default=db.text("CURRENT_TIMESTAMP(0)")) -class TenantPreferredModelProvider(db.Model): +class TenantPreferredModelProvider(Base): __tablename__ = "tenant_preferred_model_providers" __table_args__ = ( db.PrimaryKeyConstraint("id", name="tenant_preferred_model_provider_pkey"), @@ -144,7 +145,7 @@ class TenantPreferredModelProvider(db.Model): updated_at = db.Column(db.DateTime, nullable=False, server_default=db.text("CURRENT_TIMESTAMP(0)")) -class ProviderOrder(db.Model): +class ProviderOrder(Base): __tablename__ = "provider_orders" __table_args__ = ( db.PrimaryKeyConstraint("id", name="provider_order_pkey"), @@ -169,7 +170,7 @@ class ProviderOrder(db.Model): updated_at = db.Column(db.DateTime, nullable=False, server_default=db.text("CURRENT_TIMESTAMP(0)")) -class ProviderModelSetting(db.Model): +class ProviderModelSetting(Base): """ Provider model settings for record the model enabled status and load balancing status. """ @@ -191,7 +192,7 @@ class ProviderModelSetting(db.Model): updated_at = db.Column(db.DateTime, nullable=False, server_default=db.text("CURRENT_TIMESTAMP(0)")) -class LoadBalancingModelConfig(db.Model): +class LoadBalancingModelConfig(Base): """ Configurations for load balancing models. """ diff --git a/api/models/source.py b/api/models/source.py index 07695f06e6cf00..efd94227d0593a 100644 --- a/api/models/source.py +++ b/api/models/source.py @@ -3,11 +3,12 @@ from sqlalchemy.dialects.postgresql import JSONB from extensions.ext_database import db +from models.base import Base from .types import StringUUID -class DataSourceOauthBinding(db.Model): +class DataSourceOauthBinding(Base): __tablename__ = "data_source_oauth_bindings" __table_args__ = ( db.PrimaryKeyConstraint("id", name="source_binding_pkey"), @@ -25,7 +26,7 @@ class DataSourceOauthBinding(db.Model): disabled = db.Column(db.Boolean, nullable=True, server_default=db.text("false")) -class DataSourceApiKeyAuthBinding(db.Model): +class DataSourceApiKeyAuthBinding(Base): __tablename__ = "data_source_api_key_auth_bindings" __table_args__ = ( db.PrimaryKeyConstraint("id", name="data_source_api_key_auth_binding_pkey"), diff --git a/api/models/task.py b/api/models/task.py index 57b147c78db110..6fab2a72c26712 100644 --- a/api/models/task.py +++ b/api/models/task.py @@ -3,9 +3,10 @@ from celery import states from extensions.ext_database import db +from models.base import Base -class CeleryTask(db.Model): +class CeleryTask(Base): """Task result/status.""" __tablename__ = "celery_taskmeta" @@ -29,7 +30,7 @@ class CeleryTask(db.Model): queue = db.Column(db.String(155), nullable=True) -class CeleryTaskSet(db.Model): +class CeleryTaskSet(Base): """TaskSet result.""" __tablename__ = "celery_tasksetmeta" diff --git a/api/models/tool.py b/api/models/tool.py index a81bb65174a724..d70c90585193c7 100644 --- a/api/models/tool.py +++ b/api/models/tool.py @@ -2,6 +2,7 @@ from enum import Enum from extensions.ext_database import db +from models.base import Base from .types import StringUUID @@ -17,7 +18,7 @@ def value_of(value): raise ValueError(f"No matching enum found for value '{value}'") -class ToolProvider(db.Model): +class ToolProvider(Base): __tablename__ = "tool_providers" __table_args__ = ( db.PrimaryKeyConstraint("id", name="tool_provider_pkey"), diff --git a/api/models/tools.py b/api/models/tools.py index 485e16b2280def..1e99749b243e6f 100644 --- a/api/models/tools.py +++ b/api/models/tools.py @@ -1,8 +1,11 @@ import json from datetime import datetime +from deprecated import deprecated +from sqlalchemy import ForeignKey from sqlalchemy.orm import Mapped, mapped_column +from core.tools.entities.common_entities import I18nObject from core.tools.entities.tool_bundle import ApiToolBundle from core.tools.entities.tool_entities import ApiProviderSchemaType, WorkflowToolParameterConfiguration from extensions.ext_database import db @@ -31,7 +34,7 @@ class BuiltinToolProvider(Base): # who created this tool provider user_id: Mapped[str] = mapped_column(StringUUID, nullable=False) # name of the tool provider - provider: Mapped[str] = mapped_column(db.String(40), nullable=False) + provider: Mapped[str] = mapped_column(db.String(256), nullable=False) # credential of the tool provider encrypted_credentials: Mapped[str] = mapped_column(db.Text, nullable=True) created_at: Mapped[datetime] = mapped_column( @@ -182,7 +185,7 @@ def app(self) -> App | None: return db.session.query(App).filter(App.id == self.app_id).first() -class ToolModelInvoke(db.Model): +class ToolModelInvoke(Base): """ store the invoke logs from tool invoke """ @@ -219,7 +222,7 @@ class ToolModelInvoke(db.Model): updated_at = db.Column(db.DateTime, nullable=False, server_default=db.text("CURRENT_TIMESTAMP(0)")) -class ToolConversationVariables(db.Model): +class ToolConversationVariables(Base): """ store the conversation variables from tool invoke """ @@ -275,3 +278,46 @@ class ToolFile(Base): mimetype: Mapped[str] = mapped_column(db.String(255), nullable=False) # original url original_url: Mapped[str] = mapped_column(db.String(2048), nullable=True) + + +@deprecated +class DeprecatedPublishedAppTool(Base): + """ + The table stores the apps published as a tool for each person. + """ + + __tablename__ = "tool_published_apps" + __table_args__ = ( + db.PrimaryKeyConstraint("id", name="published_app_tool_pkey"), + db.UniqueConstraint("app_id", "user_id", name="unique_published_app_tool"), + ) + + # id of the tool provider + id = db.Column(StringUUID, server_default=db.text("uuid_generate_v4()")) + # id of the app + app_id = db.Column(StringUUID, ForeignKey("apps.id"), nullable=False) + # who published this tool + user_id = db.Column(StringUUID, nullable=False) + # description of the tool, stored in i18n format, for human + description = db.Column(db.Text, nullable=False) + # llm_description of the tool, for LLM + llm_description = db.Column(db.Text, nullable=False) + # query description, query will be seem as a parameter of the tool, + # to describe this parameter to llm, we need this field + query_description = db.Column(db.Text, nullable=False) + # query name, the name of the query parameter + query_name = db.Column(db.String(40), nullable=False) + # name of the tool provider + tool_name = db.Column(db.String(40), nullable=False) + # author + author = db.Column(db.String(40), nullable=False) + created_at = db.Column(db.DateTime, nullable=False, server_default=db.text("CURRENT_TIMESTAMP(0)")) + updated_at = db.Column(db.DateTime, nullable=False, server_default=db.text("CURRENT_TIMESTAMP(0)")) + + @property + def description_i18n(self) -> I18nObject: + return I18nObject(**json.loads(self.description)) + + @property + def app(self) -> App: + return db.session.query(App).filter(App.id == self.app_id).first() diff --git a/api/models/web.py b/api/models/web.py index bc088c185d5a8b..934008a4434a6f 100644 --- a/api/models/web.py +++ b/api/models/web.py @@ -1,10 +1,11 @@ from extensions.ext_database import db +from models.base import Base from .model import Message from .types import StringUUID -class SavedMessage(db.Model): +class SavedMessage(Base): __tablename__ = "saved_messages" __table_args__ = ( db.PrimaryKeyConstraint("id", name="saved_message_pkey"), @@ -23,7 +24,7 @@ def message(self): return db.session.query(Message).filter(Message.id == self.message_id).first() -class PinnedConversation(db.Model): +class PinnedConversation(Base): __tablename__ = "pinned_conversations" __table_args__ = ( db.PrimaryKeyConstraint("id", name="pinned_conversation_pkey"), diff --git a/api/models/workflow.py b/api/models/workflow.py index 9c93ea4cea84e7..0b7d255954e10a 100644 --- a/api/models/workflow.py +++ b/api/models/workflow.py @@ -2,10 +2,13 @@ from collections.abc import Mapping, Sequence from datetime import datetime from enum import Enum -from typing import Any, Optional, Union +from typing import TYPE_CHECKING, Any, Union -from sqlalchemy import func -from sqlalchemy.orm import Mapped +if TYPE_CHECKING: + from models.model import AppMode + +from sqlalchemy import Index, PrimaryKeyConstraint, func +from sqlalchemy.orm import Mapped, mapped_column import contexts from constants import HIDDEN_VALUE @@ -13,6 +16,7 @@ from core.helper import encrypter from extensions.ext_database import db from libs import helper +from models.base import Base from .account import Account from .types import StringUUID @@ -75,7 +79,7 @@ def from_app_mode(cls, app_mode: Union[str, "AppMode"]) -> "WorkflowType": return cls.WORKFLOW if app_mode == AppMode.WORKFLOW else cls.CHAT -class Workflow(db.Model): +class Workflow(Base): """ Workflow, for `Workflow App` and `Chat App workflow mode`. @@ -345,7 +349,7 @@ def value_of(cls, value: str) -> "WorkflowRunStatus": raise ValueError(f"invalid workflow run status value {value}") -class WorkflowRun(db.Model): +class WorkflowRun(Base): """ Workflow Run @@ -436,7 +440,7 @@ def outputs_dict(self): return json.loads(self.outputs) if self.outputs else None @property - def message(self) -> Optional["Message"]: + def message(self): from models.model import Message return ( @@ -542,7 +546,7 @@ def value_of(cls, value: str) -> "WorkflowNodeExecutionStatus": raise ValueError(f"invalid workflow node execution status value {value}") -class WorkflowNodeExecution(db.Model): +class WorkflowNodeExecution(Base): """ Workflow Node Execution @@ -708,7 +712,7 @@ def value_of(cls, value: str) -> "WorkflowAppLogCreatedFrom": raise ValueError(f"invalid workflow app log created from value {value}") -class WorkflowAppLog(db.Model): +class WorkflowAppLog(Base): """ Workflow App execution log, excluding workflow debugging records. @@ -770,15 +774,20 @@ def created_by_end_user(self): return db.session.get(EndUser, self.created_by) if created_by_role == CreatedByRole.END_USER else None -class ConversationVariable(db.Model): +class ConversationVariable(Base): __tablename__ = "workflow_conversation_variables" + __table_args__ = ( + PrimaryKeyConstraint("id", "conversation_id", name="workflow_conversation_variables_pkey"), + Index("workflow__conversation_variables_app_id_idx", "app_id"), + Index("workflow__conversation_variables_created_at_idx", "created_at"), + ) - id: Mapped[str] = db.Column(StringUUID, primary_key=True) - conversation_id: Mapped[str] = db.Column(StringUUID, nullable=False, primary_key=True) - app_id: Mapped[str] = db.Column(StringUUID, nullable=False, index=True) - data = db.Column(db.Text, nullable=False) - created_at = db.Column(db.DateTime, nullable=False, index=True, server_default=db.text("CURRENT_TIMESTAMP(0)")) - updated_at = db.Column( + id: Mapped[str] = mapped_column(StringUUID, primary_key=True) + conversation_id: Mapped[str] = mapped_column(StringUUID, nullable=False, primary_key=True) + app_id: Mapped[str] = mapped_column(StringUUID, nullable=False) + data = mapped_column(db.Text, nullable=False) + created_at = mapped_column(db.DateTime, nullable=False, server_default=db.text("CURRENT_TIMESTAMP(0)")) + updated_at = mapped_column( db.DateTime, nullable=False, server_default=func.current_timestamp(), onupdate=func.current_timestamp() ) From 957ab093c909ec15b46c32665617b9a9660faea2 Mon Sep 17 00:00:00 2001 From: Yeuoly Date: Sun, 29 Sep 2024 17:07:40 +0800 Subject: [PATCH 067/325] enhancement: reduce requests to plugin daemon --- api/core/tools/tool_manager.py | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/api/core/tools/tool_manager.py b/api/core/tools/tool_manager.py index fdd7e8385b3971..2f747a317e0e43 100644 --- a/api/core/tools/tool_manager.py +++ b/api/core/tools/tool_manager.py @@ -123,12 +123,13 @@ def get_tool_runtime( :return: the tool """ if provider_type == ToolProviderType.BUILT_IN: - builtin_tool = cls.get_builtin_tool(provider_id, tool_name, tenant_id) - if not builtin_tool: - raise ValueError(f"tool {tool_name} not found") - # check if the builtin tool need credentials provider_controller = cls.get_builtin_provider(provider_id, tenant_id) + + builtin_tool = provider_controller.get_tool(tool_name) + if not builtin_tool: + raise ToolProviderNotFoundError(f"builtin tool {tool_name} not found") + if not provider_controller.need_credentials: return cast( BuiltinTool, From 823637349819d5603fd3a90eb2229411b4769e3d Mon Sep 17 00:00:00 2001 From: takatost Date: Sun, 29 Sep 2024 18:15:18 +0800 Subject: [PATCH 068/325] feat: remove unused codes --- api/Dockerfile | 4 + .../console/datasets/datasets_document.py | 15 +- .../model_runtime/entities/model_entities.py | 5 - .../model_providers/__base/ai_model.py | 222 +- .../model_providers/__base/audio.mp3 | Bin 218880 -> 0 bytes .../model_providers/__base/model_provider.py | 120 - .../__base/moderation_model.py | 26 +- .../model_providers/__base/rerank_model.py | 44 +- .../__base/speech2text_model.py | 41 +- .../model_providers/__base/text2img_model.py | 54 - .../__base/text_embedding_model.py | 38 +- .../__base/tokenizers/gpt2_tokenzier.py | 33 +- .../model_providers/__base/tts_model.py | 124 +- .../text_embedding/text_embedding.py | 191 -- .../baichuan/text_embedding/text_embedding.py | 207 -- .../cohere/text_embedding/text_embedding.py | 223 -- .../model_providers/fireworks/fireworks.yaml | 30 - .../llm/llama-v3p2-11b-vision-instruct.yaml | 46 - .../fireworks/llm/llama-v3p2-1b-instruct.yaml | 46 - .../fireworks/llm/llama-v3p2-3b-instruct.yaml | 46 - .../llm/llama-v3p2-90b-vision-instruct.yaml | 46 - .../text_embedding/UAE-Large-V1.yaml | 12 - .../fireworks/text_embedding/__init__.py | 0 .../fireworks/text_embedding/gte-base.yaml | 12 - .../fireworks/text_embedding/gte-large.yaml | 12 - .../text_embedding/nomic-embed-text-v1.5.yaml | 12 - .../text_embedding/nomic-embed-text-v1.yaml | 12 - .../text_embedding/text_embedding.py | 151 - .../model_providers/fishaudio/fishaudio.yaml | 76 - .../google/llm/gemini-1.5-flash-001.yaml | 48 - .../google/llm/gemini-1.5-flash-002.yaml | 48 - .../llm/gemini-1.5-flash-8b-exp-0924.yaml | 48 - .../google/llm/gemini-1.5-flash.yaml | 48 - .../google/llm/gemini-1.5-pro-001.yaml | 48 - .../google/llm/gemini-1.5-pro-002.yaml | 48 - .../google/llm/gemini-1.5-pro.yaml | 48 - .../groq/llm/llama-3.2-11b-text-preview.yaml | 25 - .../groq/llm/llama-3.2-1b-preview.yaml | 25 - .../groq/llm/llama-3.2-3b-preview.yaml | 25 - .../groq/llm/llama-3.2-90b-text-preview.yaml | 25 - .../text_embedding/text_embedding.py | 189 -- .../text_embedding/text_embedding.py | 209 -- .../hunyuan/text_embedding/text_embedding.py | 169 - .../model_providers/jina/jina.yaml | 69 - .../jina/text_embedding/text_embedding.py | 199 -- .../localai/text_embedding/text_embedding.py | 189 -- .../minimax/text_embedding/text_embedding.py | 184 - .../text_embedding/text_embedding.py | 170 - .../model_providers/model_provider_factory.py | 3 - .../nomic/text_embedding/text_embedding.py | 165 - .../nvidia/text_embedding/text_embedding.py | 158 - .../oci/text_embedding/text_embedding.py | 224 -- .../ollama/text_embedding/text_embedding.py | 211 -- .../openai/text_embedding/text_embedding.py | 203 -- .../text_embedding/text_embedding.py | 217 -- .../openllm/text_embedding/text_embedding.py | 155 - .../text_embedding/text_embedding.py | 152 - .../model_providers/sagemaker/llm/llm.py | 463 --- .../text_embedding/text_embedding.py | 200 -- .../siliconflow/llm/_position.yaml | 28 - .../siliconflow/llm/internlm2_5-20b-chat.yaml | 30 - .../llm/qwen2.5-coder-7b-instruct.yaml | 74 - .../llm/qwen2.5-math-72b-instruct.yaml | 74 - .../text_embedding/text_embedding.py | 46 - .../model_providers/spark/llm/llm.py | 309 -- .../tongyi/text_embedding/text_embedding.py | 177 - .../upstage/text_embedding/text_embedding.py | 197 -- .../vertex_ai/llm/gemini-1.5-flash-001.yaml | 37 - .../vertex_ai/llm/gemini-1.5-flash-002.yaml | 37 - .../vertex_ai/llm/gemini-1.5-pro-001.yaml | 37 - .../vertex_ai/llm/gemini-1.5-pro-002.yaml | 37 - .../llm/gemini-flash-experimental.yaml | 37 - .../llm/gemini-pro-experimental.yaml | 37 - .../model_providers/vertex_ai/llm/llm.py | 733 ---- .../text_embedding/text_embedding.py | 187 - .../text_embedding/text_embedding.py | 198 -- .../wenxin/text_embedding/text_embedding.py | 187 - .../text_embedding/text_embedding.py | 204 -- api/core/plugin/manager/model.py | 20 +- api/core/provider_manager.py | 28 +- api/core/tools/builtin_tool/tool.py | 7 - api/core/tools/utils/web_reader_tool.py | 357 -- api/poetry.lock | 3013 ++++------------- api/pyproject.toml | 27 +- .../model_runtime/__mock/anthropic.py | 98 - .../model_runtime/__mock/fishaudio.py | 82 - .../model_runtime/__mock/google.py | 116 - .../model_runtime/__mock/huggingface.py | 20 - .../model_runtime/__mock/huggingface_chat.py | 56 - .../model_runtime/__mock/huggingface_tei.py | 94 - .../model_runtime/__mock/nomic_embeddings.py | 59 - .../model_runtime/__mock/openai.py | 25 - .../model_runtime/__mock/openai_chat.py | 269 -- .../model_runtime/__mock/openai_completion.py | 130 - .../model_runtime/__mock/openai_embeddings.py | 58 - .../model_runtime/__mock/openai_remote.py | 23 - .../__mock/openai_speech2text.py | 29 - .../model_runtime/__mock/xinference.py | 170 - .../model_runtime/anthropic/__init__.py | 0 .../model_runtime/anthropic/test_llm.py | 92 - .../model_runtime/anthropic/test_provider.py | 17 - .../model_runtime/assets/audio.mp3 | Bin 218880 -> 0 bytes .../model_runtime/azure_ai_studio/__init__.py | 0 .../model_runtime/azure_ai_studio/test_llm.py | 113 - .../azure_ai_studio/test_provider.py | 17 - .../azure_ai_studio/test_rerank.py | 50 - .../model_runtime/azure_openai/__init__.py | 0 .../model_runtime/azure_openai/test_llm.py | 290 -- .../azure_openai/test_text_embedding.py | 62 - .../model_runtime/baichuan/__init__.py | 0 .../model_runtime/baichuan/test_llm.py | 172 - .../model_runtime/baichuan/test_provider.py | 15 - .../baichuan/test_text_embedding.py | 87 - .../model_runtime/bedrock/__init__.py | 0 .../model_runtime/bedrock/test_llm.py | 103 - .../model_runtime/bedrock/test_provider.py | 21 - .../model_runtime/chatglm/__init__.py | 0 .../model_runtime/chatglm/test_llm.py | 230 -- .../model_runtime/chatglm/test_provider.py | 17 - .../model_runtime/cohere/__init__.py | 0 .../model_runtime/cohere/test_llm.py | 191 -- .../model_runtime/cohere/test_provider.py | 15 - .../model_runtime/cohere/test_rerank.py | 40 - .../cohere/test_text_embedding.py | 45 - .../model_runtime/fireworks/__init__.py | 0 .../model_runtime/fireworks/test_llm.py | 186 - .../model_runtime/fireworks/test_provider.py | 17 - .../fireworks/test_text_embedding.py | 54 - .../model_runtime/fishaudio/__init__.py | 0 .../model_runtime/fishaudio/test_provider.py | 33 - .../model_runtime/fishaudio/test_tts.py | 32 - .../model_runtime/google/__init__.py | 0 .../model_runtime/google/test_llm.py | 177 - .../model_runtime/google/test_provider.py | 17 - .../model_runtime/huggingface_hub/__init__.py | 0 .../model_runtime/huggingface_hub/test_llm.py | 277 -- .../huggingface_hub/test_text_embedding.py | 112 - .../model_runtime/huggingface_tei/__init__.py | 0 .../huggingface_tei/test_embeddings.py | 70 - .../huggingface_tei/test_rerank.py | 78 - .../model_runtime/hunyuan/__init__.py | 0 .../model_runtime/hunyuan/test_llm.py | 90 - .../model_runtime/hunyuan/test_provider.py | 20 - .../hunyuan/test_text_embedding.py | 96 - .../model_runtime/jina/__init__.py | 0 .../model_runtime/jina/test_provider.py | 15 - .../model_runtime/jina/test_text_embedding.py | 49 - .../model_runtime/localai/__init__.py | 0 .../model_runtime/localai/test_embedding.py | 4 - .../model_runtime/localai/test_llm.py | 174 - .../model_runtime/localai/test_rerank.py | 96 - .../model_runtime/localai/test_speech2text.py | 42 - .../model_runtime/minimax/__init__.py | 0 .../model_runtime/minimax/test_embedding.py | 58 - .../model_runtime/minimax/test_llm.py | 143 - .../model_runtime/minimax/test_provider.py | 25 - .../model_runtime/mixedbread/__init__.py | 0 .../model_runtime/mixedbread/test_provider.py | 28 - .../model_runtime/mixedbread/test_rerank.py | 100 - .../mixedbread/test_text_embedding.py | 78 - .../model_runtime/nomic/__init__.py | 0 .../model_runtime/nomic/test_embeddings.py | 62 - .../model_runtime/nomic/test_provider.py | 22 - .../model_runtime/novita/__init__.py | 0 .../model_runtime/novita/test_llm.py | 99 - .../model_runtime/novita/test_provider.py | 19 - .../model_runtime/oci/__init__.py | 0 .../model_runtime/oci/test_llm.py | 130 - .../model_runtime/oci/test_provider.py | 20 - .../model_runtime/oci/test_text_embedding.py | 58 - .../model_runtime/ollama/__init__.py | 0 .../model_runtime/ollama/test_llm.py | 222 -- .../ollama/test_text_embedding.py | 65 - .../model_runtime/openai/test_llm.py | 313 -- .../model_runtime/openai/test_provider.py | 17 - .../model_runtime/openai/test_speech2text.py | 45 - .../openai/test_text_embedding.py | 48 - .../openai_api_compatible/__init__.py | 0 .../openai_api_compatible/test_llm.py | 197 -- .../openai_api_compatible/test_speech2text.py | 50 - .../test_text_embedding.py | 67 - .../model_runtime/openllm/__init__.py | 0 .../model_runtime/openllm/test_embedding.py | 57 - .../model_runtime/openllm/test_llm.py | 95 - .../model_runtime/openrouter/__init__.py | 0 .../model_runtime/openrouter/test_llm.py | 104 - .../model_runtime/replicate/__init__.py | 0 .../model_runtime/replicate/test_llm.py | 112 - .../replicate/test_text_embedding.py | 136 - .../model_runtime/sagemaker/__init__.py | 0 .../model_runtime/sagemaker/test_provider.py | 15 - .../model_runtime/sagemaker/test_rerank.py | 55 - .../sagemaker/test_text_embedding.py | 33 - .../model_runtime/siliconflow/__init__.py | 0 .../model_runtime/siliconflow/test_llm.py | 73 - .../siliconflow/test_provider.py | 15 - .../model_runtime/siliconflow/test_rerank.py | 47 - .../siliconflow/test_speech2text.py | 45 - .../siliconflow/test_text_embedding.py | 60 - .../model_runtime/spark/__init__.py | 0 .../model_runtime/spark/test_llm.py | 92 - .../model_runtime/spark/test_provider.py | 21 - .../model_runtime/stepfun/__init__.py | 0 .../model_runtime/stepfun/test_llm.py | 125 - .../model_runtime/test_tiktoken.py | 11 + .../model_runtime/togetherai/__init__.py | 0 .../model_runtime/togetherai/test_llm.py | 104 - .../model_runtime/tongyi/__init__.py | 0 .../model_runtime/tongyi/test_llm.py | 75 - .../model_runtime/tongyi/test_provider.py | 17 - .../tongyi/test_response_format.py | 80 - .../model_runtime/upstage/__init__.py | 0 .../model_runtime/upstage/test_llm.py | 186 - .../model_runtime/upstage/test_provider.py | 17 - .../upstage/test_text_embedding.py | 54 - .../model_runtime/volcengine_maas/__init__.py | 0 .../volcengine_maas/test_embedding.py | 79 - .../model_runtime/volcengine_maas/test_llm.py | 118 - .../model_runtime/wenxin/__init__.py | 0 .../model_runtime/wenxin/test_embedding.py | 69 - .../model_runtime/wenxin/test_llm.py | 214 -- .../model_runtime/wenxin/test_provider.py | 17 - .../model_runtime/xinference/__init__.py | 0 .../xinference/test_embeddings.py | 64 - .../model_runtime/xinference/test_llm.py | 366 -- .../model_runtime/xinference/test_rerank.py | 52 - .../model_runtime/zhinao/__init__.py | 0 .../model_runtime/zhinao/test_llm.py | 73 - .../model_runtime/zhinao/test_provider.py | 15 - .../model_runtime/zhipuai/__init__.py | 0 .../model_runtime/zhipuai/test_llm.py | 109 - .../model_runtime/zhipuai/test_provider.py | 15 - .../zhipuai/test_text_embedding.py | 41 - .../model_runtime/model_providers/__init__.py | 0 .../model_providers/wenxin/__init__.py | 0 .../wenxin/test_text_embedding.py | 75 - 236 files changed, 847 insertions(+), 20331 deletions(-) delete mode 100644 api/core/model_runtime/model_providers/__base/audio.mp3 delete mode 100644 api/core/model_runtime/model_providers/__base/model_provider.py delete mode 100644 api/core/model_runtime/model_providers/__base/text2img_model.py delete mode 100644 api/core/model_runtime/model_providers/azure_openai/text_embedding/text_embedding.py delete mode 100644 api/core/model_runtime/model_providers/baichuan/text_embedding/text_embedding.py delete mode 100644 api/core/model_runtime/model_providers/cohere/text_embedding/text_embedding.py delete mode 100644 api/core/model_runtime/model_providers/fireworks/fireworks.yaml delete mode 100644 api/core/model_runtime/model_providers/fireworks/llm/llama-v3p2-11b-vision-instruct.yaml delete mode 100644 api/core/model_runtime/model_providers/fireworks/llm/llama-v3p2-1b-instruct.yaml delete mode 100644 api/core/model_runtime/model_providers/fireworks/llm/llama-v3p2-3b-instruct.yaml delete mode 100644 api/core/model_runtime/model_providers/fireworks/llm/llama-v3p2-90b-vision-instruct.yaml delete mode 100644 api/core/model_runtime/model_providers/fireworks/text_embedding/UAE-Large-V1.yaml delete mode 100644 api/core/model_runtime/model_providers/fireworks/text_embedding/__init__.py delete mode 100644 api/core/model_runtime/model_providers/fireworks/text_embedding/gte-base.yaml delete mode 100644 api/core/model_runtime/model_providers/fireworks/text_embedding/gte-large.yaml delete mode 100644 api/core/model_runtime/model_providers/fireworks/text_embedding/nomic-embed-text-v1.5.yaml delete mode 100644 api/core/model_runtime/model_providers/fireworks/text_embedding/nomic-embed-text-v1.yaml delete mode 100644 api/core/model_runtime/model_providers/fireworks/text_embedding/text_embedding.py delete mode 100644 api/core/model_runtime/model_providers/fishaudio/fishaudio.yaml delete mode 100644 api/core/model_runtime/model_providers/google/llm/gemini-1.5-flash-001.yaml delete mode 100644 api/core/model_runtime/model_providers/google/llm/gemini-1.5-flash-002.yaml delete mode 100644 api/core/model_runtime/model_providers/google/llm/gemini-1.5-flash-8b-exp-0924.yaml delete mode 100644 api/core/model_runtime/model_providers/google/llm/gemini-1.5-flash.yaml delete mode 100644 api/core/model_runtime/model_providers/google/llm/gemini-1.5-pro-001.yaml delete mode 100644 api/core/model_runtime/model_providers/google/llm/gemini-1.5-pro-002.yaml delete mode 100644 api/core/model_runtime/model_providers/google/llm/gemini-1.5-pro.yaml delete mode 100644 api/core/model_runtime/model_providers/groq/llm/llama-3.2-11b-text-preview.yaml delete mode 100644 api/core/model_runtime/model_providers/groq/llm/llama-3.2-1b-preview.yaml delete mode 100644 api/core/model_runtime/model_providers/groq/llm/llama-3.2-3b-preview.yaml delete mode 100644 api/core/model_runtime/model_providers/groq/llm/llama-3.2-90b-text-preview.yaml delete mode 100644 api/core/model_runtime/model_providers/huggingface_hub/text_embedding/text_embedding.py delete mode 100644 api/core/model_runtime/model_providers/huggingface_tei/text_embedding/text_embedding.py delete mode 100644 api/core/model_runtime/model_providers/hunyuan/text_embedding/text_embedding.py delete mode 100644 api/core/model_runtime/model_providers/jina/jina.yaml delete mode 100644 api/core/model_runtime/model_providers/jina/text_embedding/text_embedding.py delete mode 100644 api/core/model_runtime/model_providers/localai/text_embedding/text_embedding.py delete mode 100644 api/core/model_runtime/model_providers/minimax/text_embedding/text_embedding.py delete mode 100644 api/core/model_runtime/model_providers/mixedbread/text_embedding/text_embedding.py delete mode 100644 api/core/model_runtime/model_providers/nomic/text_embedding/text_embedding.py delete mode 100644 api/core/model_runtime/model_providers/nvidia/text_embedding/text_embedding.py delete mode 100644 api/core/model_runtime/model_providers/oci/text_embedding/text_embedding.py delete mode 100644 api/core/model_runtime/model_providers/ollama/text_embedding/text_embedding.py delete mode 100644 api/core/model_runtime/model_providers/openai/text_embedding/text_embedding.py delete mode 100644 api/core/model_runtime/model_providers/openai_api_compatible/text_embedding/text_embedding.py delete mode 100644 api/core/model_runtime/model_providers/openllm/text_embedding/text_embedding.py delete mode 100644 api/core/model_runtime/model_providers/replicate/text_embedding/text_embedding.py delete mode 100644 api/core/model_runtime/model_providers/sagemaker/llm/llm.py delete mode 100644 api/core/model_runtime/model_providers/sagemaker/text_embedding/text_embedding.py delete mode 100644 api/core/model_runtime/model_providers/siliconflow/llm/_position.yaml delete mode 100644 api/core/model_runtime/model_providers/siliconflow/llm/internlm2_5-20b-chat.yaml delete mode 100644 api/core/model_runtime/model_providers/siliconflow/llm/qwen2.5-coder-7b-instruct.yaml delete mode 100644 api/core/model_runtime/model_providers/siliconflow/llm/qwen2.5-math-72b-instruct.yaml delete mode 100644 api/core/model_runtime/model_providers/siliconflow/text_embedding/text_embedding.py delete mode 100644 api/core/model_runtime/model_providers/spark/llm/llm.py delete mode 100644 api/core/model_runtime/model_providers/tongyi/text_embedding/text_embedding.py delete mode 100644 api/core/model_runtime/model_providers/upstage/text_embedding/text_embedding.py delete mode 100644 api/core/model_runtime/model_providers/vertex_ai/llm/gemini-1.5-flash-001.yaml delete mode 100644 api/core/model_runtime/model_providers/vertex_ai/llm/gemini-1.5-flash-002.yaml delete mode 100644 api/core/model_runtime/model_providers/vertex_ai/llm/gemini-1.5-pro-001.yaml delete mode 100644 api/core/model_runtime/model_providers/vertex_ai/llm/gemini-1.5-pro-002.yaml delete mode 100644 api/core/model_runtime/model_providers/vertex_ai/llm/gemini-flash-experimental.yaml delete mode 100644 api/core/model_runtime/model_providers/vertex_ai/llm/gemini-pro-experimental.yaml delete mode 100644 api/core/model_runtime/model_providers/vertex_ai/llm/llm.py delete mode 100644 api/core/model_runtime/model_providers/vertex_ai/text_embedding/text_embedding.py delete mode 100644 api/core/model_runtime/model_providers/volcengine_maas/text_embedding/text_embedding.py delete mode 100644 api/core/model_runtime/model_providers/wenxin/text_embedding/text_embedding.py delete mode 100644 api/core/model_runtime/model_providers/xinference/text_embedding/text_embedding.py delete mode 100644 api/core/tools/utils/web_reader_tool.py delete mode 100644 api/tests/integration_tests/model_runtime/__mock/anthropic.py delete mode 100644 api/tests/integration_tests/model_runtime/__mock/fishaudio.py delete mode 100644 api/tests/integration_tests/model_runtime/__mock/google.py delete mode 100644 api/tests/integration_tests/model_runtime/__mock/huggingface.py delete mode 100644 api/tests/integration_tests/model_runtime/__mock/huggingface_chat.py delete mode 100644 api/tests/integration_tests/model_runtime/__mock/huggingface_tei.py delete mode 100644 api/tests/integration_tests/model_runtime/__mock/nomic_embeddings.py delete mode 100644 api/tests/integration_tests/model_runtime/__mock/openai_chat.py delete mode 100644 api/tests/integration_tests/model_runtime/__mock/openai_completion.py delete mode 100644 api/tests/integration_tests/model_runtime/__mock/openai_embeddings.py delete mode 100644 api/tests/integration_tests/model_runtime/__mock/openai_remote.py delete mode 100644 api/tests/integration_tests/model_runtime/__mock/openai_speech2text.py delete mode 100644 api/tests/integration_tests/model_runtime/__mock/xinference.py delete mode 100644 api/tests/integration_tests/model_runtime/anthropic/__init__.py delete mode 100644 api/tests/integration_tests/model_runtime/anthropic/test_llm.py delete mode 100644 api/tests/integration_tests/model_runtime/anthropic/test_provider.py delete mode 100644 api/tests/integration_tests/model_runtime/assets/audio.mp3 delete mode 100644 api/tests/integration_tests/model_runtime/azure_ai_studio/__init__.py delete mode 100644 api/tests/integration_tests/model_runtime/azure_ai_studio/test_llm.py delete mode 100644 api/tests/integration_tests/model_runtime/azure_ai_studio/test_provider.py delete mode 100644 api/tests/integration_tests/model_runtime/azure_ai_studio/test_rerank.py delete mode 100644 api/tests/integration_tests/model_runtime/azure_openai/__init__.py delete mode 100644 api/tests/integration_tests/model_runtime/azure_openai/test_llm.py delete mode 100644 api/tests/integration_tests/model_runtime/azure_openai/test_text_embedding.py delete mode 100644 api/tests/integration_tests/model_runtime/baichuan/__init__.py delete mode 100644 api/tests/integration_tests/model_runtime/baichuan/test_llm.py delete mode 100644 api/tests/integration_tests/model_runtime/baichuan/test_provider.py delete mode 100644 api/tests/integration_tests/model_runtime/baichuan/test_text_embedding.py delete mode 100644 api/tests/integration_tests/model_runtime/bedrock/__init__.py delete mode 100644 api/tests/integration_tests/model_runtime/bedrock/test_llm.py delete mode 100644 api/tests/integration_tests/model_runtime/bedrock/test_provider.py delete mode 100644 api/tests/integration_tests/model_runtime/chatglm/__init__.py delete mode 100644 api/tests/integration_tests/model_runtime/chatglm/test_llm.py delete mode 100644 api/tests/integration_tests/model_runtime/chatglm/test_provider.py delete mode 100644 api/tests/integration_tests/model_runtime/cohere/__init__.py delete mode 100644 api/tests/integration_tests/model_runtime/cohere/test_llm.py delete mode 100644 api/tests/integration_tests/model_runtime/cohere/test_provider.py delete mode 100644 api/tests/integration_tests/model_runtime/cohere/test_rerank.py delete mode 100644 api/tests/integration_tests/model_runtime/cohere/test_text_embedding.py delete mode 100644 api/tests/integration_tests/model_runtime/fireworks/__init__.py delete mode 100644 api/tests/integration_tests/model_runtime/fireworks/test_llm.py delete mode 100644 api/tests/integration_tests/model_runtime/fireworks/test_provider.py delete mode 100644 api/tests/integration_tests/model_runtime/fireworks/test_text_embedding.py delete mode 100644 api/tests/integration_tests/model_runtime/fishaudio/__init__.py delete mode 100644 api/tests/integration_tests/model_runtime/fishaudio/test_provider.py delete mode 100644 api/tests/integration_tests/model_runtime/fishaudio/test_tts.py delete mode 100644 api/tests/integration_tests/model_runtime/google/__init__.py delete mode 100644 api/tests/integration_tests/model_runtime/google/test_llm.py delete mode 100644 api/tests/integration_tests/model_runtime/google/test_provider.py delete mode 100644 api/tests/integration_tests/model_runtime/huggingface_hub/__init__.py delete mode 100644 api/tests/integration_tests/model_runtime/huggingface_hub/test_llm.py delete mode 100644 api/tests/integration_tests/model_runtime/huggingface_hub/test_text_embedding.py delete mode 100644 api/tests/integration_tests/model_runtime/huggingface_tei/__init__.py delete mode 100644 api/tests/integration_tests/model_runtime/huggingface_tei/test_embeddings.py delete mode 100644 api/tests/integration_tests/model_runtime/huggingface_tei/test_rerank.py delete mode 100644 api/tests/integration_tests/model_runtime/hunyuan/__init__.py delete mode 100644 api/tests/integration_tests/model_runtime/hunyuan/test_llm.py delete mode 100644 api/tests/integration_tests/model_runtime/hunyuan/test_provider.py delete mode 100644 api/tests/integration_tests/model_runtime/hunyuan/test_text_embedding.py delete mode 100644 api/tests/integration_tests/model_runtime/jina/__init__.py delete mode 100644 api/tests/integration_tests/model_runtime/jina/test_provider.py delete mode 100644 api/tests/integration_tests/model_runtime/jina/test_text_embedding.py delete mode 100644 api/tests/integration_tests/model_runtime/localai/__init__.py delete mode 100644 api/tests/integration_tests/model_runtime/localai/test_embedding.py delete mode 100644 api/tests/integration_tests/model_runtime/localai/test_llm.py delete mode 100644 api/tests/integration_tests/model_runtime/localai/test_rerank.py delete mode 100644 api/tests/integration_tests/model_runtime/localai/test_speech2text.py delete mode 100644 api/tests/integration_tests/model_runtime/minimax/__init__.py delete mode 100644 api/tests/integration_tests/model_runtime/minimax/test_embedding.py delete mode 100644 api/tests/integration_tests/model_runtime/minimax/test_llm.py delete mode 100644 api/tests/integration_tests/model_runtime/minimax/test_provider.py delete mode 100644 api/tests/integration_tests/model_runtime/mixedbread/__init__.py delete mode 100644 api/tests/integration_tests/model_runtime/mixedbread/test_provider.py delete mode 100644 api/tests/integration_tests/model_runtime/mixedbread/test_rerank.py delete mode 100644 api/tests/integration_tests/model_runtime/mixedbread/test_text_embedding.py delete mode 100644 api/tests/integration_tests/model_runtime/nomic/__init__.py delete mode 100644 api/tests/integration_tests/model_runtime/nomic/test_embeddings.py delete mode 100644 api/tests/integration_tests/model_runtime/nomic/test_provider.py delete mode 100644 api/tests/integration_tests/model_runtime/novita/__init__.py delete mode 100644 api/tests/integration_tests/model_runtime/novita/test_llm.py delete mode 100644 api/tests/integration_tests/model_runtime/novita/test_provider.py delete mode 100644 api/tests/integration_tests/model_runtime/oci/__init__.py delete mode 100644 api/tests/integration_tests/model_runtime/oci/test_llm.py delete mode 100644 api/tests/integration_tests/model_runtime/oci/test_provider.py delete mode 100644 api/tests/integration_tests/model_runtime/oci/test_text_embedding.py delete mode 100644 api/tests/integration_tests/model_runtime/ollama/__init__.py delete mode 100644 api/tests/integration_tests/model_runtime/ollama/test_llm.py delete mode 100644 api/tests/integration_tests/model_runtime/ollama/test_text_embedding.py delete mode 100644 api/tests/integration_tests/model_runtime/openai/test_llm.py delete mode 100644 api/tests/integration_tests/model_runtime/openai/test_provider.py delete mode 100644 api/tests/integration_tests/model_runtime/openai/test_speech2text.py delete mode 100644 api/tests/integration_tests/model_runtime/openai/test_text_embedding.py delete mode 100644 api/tests/integration_tests/model_runtime/openai_api_compatible/__init__.py delete mode 100644 api/tests/integration_tests/model_runtime/openai_api_compatible/test_llm.py delete mode 100644 api/tests/integration_tests/model_runtime/openai_api_compatible/test_speech2text.py delete mode 100644 api/tests/integration_tests/model_runtime/openai_api_compatible/test_text_embedding.py delete mode 100644 api/tests/integration_tests/model_runtime/openllm/__init__.py delete mode 100644 api/tests/integration_tests/model_runtime/openllm/test_embedding.py delete mode 100644 api/tests/integration_tests/model_runtime/openllm/test_llm.py delete mode 100644 api/tests/integration_tests/model_runtime/openrouter/__init__.py delete mode 100644 api/tests/integration_tests/model_runtime/openrouter/test_llm.py delete mode 100644 api/tests/integration_tests/model_runtime/replicate/__init__.py delete mode 100644 api/tests/integration_tests/model_runtime/replicate/test_llm.py delete mode 100644 api/tests/integration_tests/model_runtime/replicate/test_text_embedding.py delete mode 100644 api/tests/integration_tests/model_runtime/sagemaker/__init__.py delete mode 100644 api/tests/integration_tests/model_runtime/sagemaker/test_provider.py delete mode 100644 api/tests/integration_tests/model_runtime/sagemaker/test_rerank.py delete mode 100644 api/tests/integration_tests/model_runtime/sagemaker/test_text_embedding.py delete mode 100644 api/tests/integration_tests/model_runtime/siliconflow/__init__.py delete mode 100644 api/tests/integration_tests/model_runtime/siliconflow/test_llm.py delete mode 100644 api/tests/integration_tests/model_runtime/siliconflow/test_provider.py delete mode 100644 api/tests/integration_tests/model_runtime/siliconflow/test_rerank.py delete mode 100644 api/tests/integration_tests/model_runtime/siliconflow/test_speech2text.py delete mode 100644 api/tests/integration_tests/model_runtime/siliconflow/test_text_embedding.py delete mode 100644 api/tests/integration_tests/model_runtime/spark/__init__.py delete mode 100644 api/tests/integration_tests/model_runtime/spark/test_llm.py delete mode 100644 api/tests/integration_tests/model_runtime/spark/test_provider.py delete mode 100644 api/tests/integration_tests/model_runtime/stepfun/__init__.py delete mode 100644 api/tests/integration_tests/model_runtime/stepfun/test_llm.py create mode 100644 api/tests/integration_tests/model_runtime/test_tiktoken.py delete mode 100644 api/tests/integration_tests/model_runtime/togetherai/__init__.py delete mode 100644 api/tests/integration_tests/model_runtime/togetherai/test_llm.py delete mode 100644 api/tests/integration_tests/model_runtime/tongyi/__init__.py delete mode 100644 api/tests/integration_tests/model_runtime/tongyi/test_llm.py delete mode 100644 api/tests/integration_tests/model_runtime/tongyi/test_provider.py delete mode 100644 api/tests/integration_tests/model_runtime/tongyi/test_response_format.py delete mode 100644 api/tests/integration_tests/model_runtime/upstage/__init__.py delete mode 100644 api/tests/integration_tests/model_runtime/upstage/test_llm.py delete mode 100644 api/tests/integration_tests/model_runtime/upstage/test_provider.py delete mode 100644 api/tests/integration_tests/model_runtime/upstage/test_text_embedding.py delete mode 100644 api/tests/integration_tests/model_runtime/volcengine_maas/__init__.py delete mode 100644 api/tests/integration_tests/model_runtime/volcengine_maas/test_embedding.py delete mode 100644 api/tests/integration_tests/model_runtime/volcengine_maas/test_llm.py delete mode 100644 api/tests/integration_tests/model_runtime/wenxin/__init__.py delete mode 100644 api/tests/integration_tests/model_runtime/wenxin/test_embedding.py delete mode 100644 api/tests/integration_tests/model_runtime/wenxin/test_llm.py delete mode 100644 api/tests/integration_tests/model_runtime/wenxin/test_provider.py delete mode 100644 api/tests/integration_tests/model_runtime/xinference/__init__.py delete mode 100644 api/tests/integration_tests/model_runtime/xinference/test_embeddings.py delete mode 100644 api/tests/integration_tests/model_runtime/xinference/test_llm.py delete mode 100644 api/tests/integration_tests/model_runtime/xinference/test_rerank.py delete mode 100644 api/tests/integration_tests/model_runtime/zhinao/__init__.py delete mode 100644 api/tests/integration_tests/model_runtime/zhinao/test_llm.py delete mode 100644 api/tests/integration_tests/model_runtime/zhinao/test_provider.py delete mode 100644 api/tests/integration_tests/model_runtime/zhipuai/__init__.py delete mode 100644 api/tests/integration_tests/model_runtime/zhipuai/test_llm.py delete mode 100644 api/tests/integration_tests/model_runtime/zhipuai/test_provider.py delete mode 100644 api/tests/integration_tests/model_runtime/zhipuai/test_text_embedding.py delete mode 100644 api/tests/unit_tests/core/model_runtime/model_providers/__init__.py delete mode 100644 api/tests/unit_tests/core/model_runtime/model_providers/wenxin/__init__.py delete mode 100644 api/tests/unit_tests/core/model_runtime/model_providers/wenxin/test_text_embedding.py diff --git a/api/Dockerfile b/api/Dockerfile index 6483f8281b7fa6..d32f70321d5c6d 100644 --- a/api/Dockerfile +++ b/api/Dockerfile @@ -67,6 +67,10 @@ ENV PATH="${VIRTUAL_ENV}/bin:${PATH}" # Download nltk data RUN python -c "import nltk; nltk.download('punkt'); nltk.download('averaged_perceptron_tagger')" +ENV TIKTOKEN_CACHE_DIR=/app/api/.tiktoken_cache + +RUN python -c "import tiktoken; tiktoken.encoding_for_model('gpt2')" + # Copy source code COPY . /app/api/ diff --git a/api/controllers/console/datasets/datasets_document.py b/api/controllers/console/datasets/datasets_document.py index 829ef11e521d8e..ca6c5717271268 100644 --- a/api/controllers/console/datasets/datasets_document.py +++ b/api/controllers/console/datasets/datasets_document.py @@ -6,7 +6,6 @@ from flask_login import current_user from flask_restful import Resource, fields, marshal, marshal_with, reqparse from sqlalchemy import asc, desc -from transformers.hf_argparser import string_to_bool from werkzeug.exceptions import Forbidden, NotFound import services @@ -145,7 +144,19 @@ def get(self, dataset_id): sort = request.args.get("sort", default="-created_at", type=str) # "yes", "true", "t", "y", "1" convert to True, while others convert to False. try: - fetch = string_to_bool(request.args.get("fetch", default="false")) + fetch_val = request.args.get("fetch", default="false") + if isinstance(fetch_val, bool): + fetch = fetch_val + else: + if fetch_val.lower() in ("yes", "true", "t", "y", "1"): + fetch = True + elif fetch_val.lower() in ("no", "false", "f", "n", "0"): + fetch = False + else: + raise ArgumentTypeError( + f"Truthy value expected: got {fetch_val} but expected one of yes/no, true/false, t/f, y/n, 1/0 " + f"(case insensitive)." + ) except (ArgumentTypeError, ValueError, Exception) as e: fetch = False dataset = DatasetService.get_dataset(dataset_id) diff --git a/api/core/model_runtime/entities/model_entities.py b/api/core/model_runtime/entities/model_entities.py index 52ea787c3ad572..09b9e3aa0d44d2 100644 --- a/api/core/model_runtime/entities/model_entities.py +++ b/api/core/model_runtime/entities/model_entities.py @@ -18,7 +18,6 @@ class ModelType(Enum): SPEECH2TEXT = "speech2text" MODERATION = "moderation" TTS = "tts" - TEXT2IMG = "text2img" @classmethod def value_of(cls, origin_model_type: str) -> "ModelType": @@ -37,8 +36,6 @@ def value_of(cls, origin_model_type: str) -> "ModelType": return cls.SPEECH2TEXT elif origin_model_type in {"tts", cls.TTS.value}: return cls.TTS - elif origin_model_type in {"text2img", cls.TEXT2IMG.value}: - return cls.TEXT2IMG elif origin_model_type == cls.MODERATION.value: return cls.MODERATION else: @@ -62,8 +59,6 @@ def to_origin_model_type(self) -> str: return "tts" elif self == self.MODERATION: return "moderation" - elif self == self.TEXT2IMG: - return "text2img" else: raise ValueError(f"invalid model type {self}") diff --git a/api/core/model_runtime/model_providers/__base/ai_model.py b/api/core/model_runtime/model_providers/__base/ai_model.py index 6b04ba2efd9ca3..a28d69ce8084e6 100644 --- a/api/core/model_runtime/model_providers/__base/ai_model.py +++ b/api/core/model_runtime/model_providers/__base/ai_model.py @@ -1,26 +1,18 @@ import decimal -import os -from collections.abc import Mapping from typing import Optional from pydantic import ConfigDict, Field -from core.helper.position_helper import get_position_map, sort_by_position_map -from core.model_runtime.entities.common_entities import I18nObject -from core.model_runtime.entities.defaults import PARAMETER_RULE_TEMPLATE from core.model_runtime.entities.model_entities import ( AIModelEntity, - DefaultParameterName, - FetchFrom, ModelType, PriceConfig, PriceInfo, PriceType, ) from core.model_runtime.errors.invoke import InvokeAuthorizationError, InvokeError -from core.model_runtime.model_providers.__base.tokenizers.gpt2_tokenzier import GPT2Tokenizer from core.plugin.entities.plugin_daemon import PluginModelProviderEntity -from core.tools.utils.yaml_utils import load_yaml_file +from core.plugin.manager.model import PluginModelManager class AIModel: @@ -117,93 +109,7 @@ def get_price(self, model: str, credentials: dict, price_type: PriceType, tokens currency=price_config.currency, ) - def predefined_models(self) -> list[AIModelEntity]: - """ - Get all predefined models for given provider. - - :return: - """ - if self.model_schemas: - return self.model_schemas - - model_schemas = [] - - # get module name - model_type = self.__class__.__module__.split(".")[-1] - - # get provider name - provider_name = self.__class__.__module__.split(".")[-3] - - # get the path of current classes - current_path = os.path.abspath(__file__) - # get parent path of the current path - provider_model_type_path = os.path.join( - os.path.dirname(os.path.dirname(current_path)), provider_name, model_type - ) - - # get all yaml files path under provider_model_type_path that do not start with __ - model_schema_yaml_paths = [ - os.path.join(provider_model_type_path, model_schema_yaml) - for model_schema_yaml in os.listdir(provider_model_type_path) - if not model_schema_yaml.startswith("__") - and not model_schema_yaml.startswith("_") - and os.path.isfile(os.path.join(provider_model_type_path, model_schema_yaml)) - and model_schema_yaml.endswith(".yaml") - ] - - # get _position.yaml file path - position_map = get_position_map(provider_model_type_path) - - # traverse all model_schema_yaml_paths - for model_schema_yaml_path in model_schema_yaml_paths: - # read yaml data from yaml file - yaml_data = load_yaml_file(model_schema_yaml_path) - - new_parameter_rules = [] - for parameter_rule in yaml_data.get("parameter_rules", []): - if "use_template" in parameter_rule: - try: - default_parameter_name = DefaultParameterName.value_of(parameter_rule["use_template"]) - default_parameter_rule = self._get_default_parameter_rule_variable_map(default_parameter_name) - copy_default_parameter_rule = default_parameter_rule.copy() - copy_default_parameter_rule.update(parameter_rule) - parameter_rule = copy_default_parameter_rule - except ValueError: - pass - - if "label" not in parameter_rule: - parameter_rule["label"] = {"zh_Hans": parameter_rule["name"], "en_US": parameter_rule["name"]} - - new_parameter_rules.append(parameter_rule) - - yaml_data["parameter_rules"] = new_parameter_rules - - if "label" not in yaml_data: - yaml_data["label"] = {"zh_Hans": yaml_data["model"], "en_US": yaml_data["model"]} - - yaml_data["fetch_from"] = FetchFrom.PREDEFINED_MODEL.value - - try: - # yaml_data to entity - model_schema = AIModelEntity(**yaml_data) - except Exception as e: - model_schema_yaml_file_name = os.path.basename(model_schema_yaml_path).rstrip(".yaml") - raise Exception( - f"Invalid model schema for {provider_name}.{model_type}.{model_schema_yaml_file_name}: {str(e)}" - ) - - # cache model schema - model_schemas.append(model_schema) - - # resort model schemas by position - model_schemas = sort_by_position_map(position_map, model_schemas, lambda x: x.model) - - # cache model schemas - self.model_schemas = model_schemas - - return model_schemas - - def get_model_schema(self, model: str, credentials: Optional[Mapping] = None) -> Optional[AIModelEntity]: + def get_model_schema(self, model: str, credentials: Optional[dict] = None) -> Optional[AIModelEntity]: """ Get model schema by model name and credentials @@ -211,117 +117,13 @@ def get_model_schema(self, model: str, credentials: Optional[Mapping] = None) -> :param credentials: model credentials :return: model schema """ - # get predefined models (predefined_models) - models = self.predefined_models() - - model_map = {model.model: model for model in models} - if model in model_map: - return model_map[model] - - if credentials: - model_schema = self.get_customizable_model_schema_from_credentials(model, credentials) - if model_schema: - return model_schema - - return None - - def get_customizable_model_schema_from_credentials( - self, model: str, credentials: Mapping - ) -> Optional[AIModelEntity]: - """ - Get customizable model schema from credentials - - :param model: model name - :param credentials: model credentials - :return: model schema - """ - return self._get_customizable_model_schema(model, credentials) - - def _get_customizable_model_schema(self, model: str, credentials: Mapping) -> Optional[AIModelEntity]: - """ - Get customizable model schema and fill in the template - """ - schema = self.get_customizable_model_schema(model, credentials) - - if not schema: - return None - - # fill in the template - new_parameter_rules = [] - for parameter_rule in schema.parameter_rules: - if parameter_rule.use_template: - try: - default_parameter_name = DefaultParameterName.value_of(parameter_rule.use_template) - default_parameter_rule = self._get_default_parameter_rule_variable_map(default_parameter_name) - if not parameter_rule.max and "max" in default_parameter_rule: - parameter_rule.max = default_parameter_rule["max"] - if not parameter_rule.min and "min" in default_parameter_rule: - parameter_rule.min = default_parameter_rule["min"] - if not parameter_rule.default and "default" in default_parameter_rule: - parameter_rule.default = default_parameter_rule["default"] - if not parameter_rule.precision and "precision" in default_parameter_rule: - parameter_rule.precision = default_parameter_rule["precision"] - if not parameter_rule.required and "required" in default_parameter_rule: - parameter_rule.required = default_parameter_rule["required"] - if not parameter_rule.help and "help" in default_parameter_rule: - parameter_rule.help = I18nObject( - en_US=default_parameter_rule["help"]["en_US"], - ) - if ( - parameter_rule.help - and not parameter_rule.help.en_US - and ("help" in default_parameter_rule and "en_US" in default_parameter_rule["help"]) - ): - parameter_rule.help.en_US = default_parameter_rule["help"]["en_US"] - if ( - parameter_rule.help - and not parameter_rule.help.zh_Hans - and ("help" in default_parameter_rule and "zh_Hans" in default_parameter_rule["help"]) - ): - parameter_rule.help.zh_Hans = default_parameter_rule["help"].get( - "zh_Hans", default_parameter_rule["help"]["en_US"] - ) - except ValueError: - pass - - new_parameter_rules.append(parameter_rule) - - schema.parameter_rules = new_parameter_rules - - return schema - - def get_customizable_model_schema(self, model: str, credentials: Mapping) -> Optional[AIModelEntity]: - """ - Get customizable model schema - - :param model: model name - :param credentials: model credentials - :return: model schema - """ - return None - - def _get_default_parameter_rule_variable_map(self, name: DefaultParameterName) -> dict: - """ - Get default parameter rule for given name - - :param name: parameter name - :return: parameter rule - """ - default_parameter_rule = PARAMETER_RULE_TEMPLATE.get(name) - - if not default_parameter_rule: - raise Exception(f"Invalid model parameter rule name {name}") - - return default_parameter_rule - - def _get_num_tokens_by_gpt2(self, text: str) -> int: - """ - Get number of tokens for given prompt messages by gpt2 - Some provider models do not provide an interface for obtaining the number of tokens. - Here, the gpt2 tokenizer is used to calculate the number of tokens. - This method can be executed offline, and the gpt2 tokenizer has been cached in the project. - - :param text: plain text of prompt. You need to convert the original message to plain text - :return: number of tokens - """ - return GPT2Tokenizer.get_num_tokens(text) + plugin_model_manager = PluginModelManager() + return plugin_model_manager.get_model_schema( + tenant_id=self.tenant_id, + user_id="unknown", + plugin_id=self.plugin_id, + provider=self.provider_name, + model_type=self.model_type.value, + model=model, + credentials=credentials or {}, + ) diff --git a/api/core/model_runtime/model_providers/__base/audio.mp3 b/api/core/model_runtime/model_providers/__base/audio.mp3 deleted file mode 100644 index 7c86e02e160909223668c7b21a60b68afc74ef98..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 218880 zcmcG0cU%)$`|qR>0t85?2}MIdYQTghO-T?C6%_@#E?|uKe_w%`b+?x!iKJ%Q(e9v>vGv|B}LC1Iq0`6m9MKwl zQOLqq<%dJL>PQ|d8Lmeek1oD$l{)`(=EI!niFJ=puYcHLhj3{$8WrtKb_;(&JYUIZfDf`YPj9t!!tV+X^-In;Qw`dl@%@0fArnB(okKn+wjEjDj ztawKL&zVY^EbBl{V7?=pyFUHZPb})0FdW9ERrL}%K$<^&i;sgHl|tF^fBIMhiMsJ$ z1rhtp?N1GDq?AMx%npSD?`< z{Mgl;X8OEax*={%#2Zu63G*t(Kg(uK7$H`r!BDZ&UEb;TZ%=+OLcuNljqpey9HXwE z>(X5l8>D+uAGeke$2k~?HX`E^^aiK~raHFT)>Nq<-NmPt*=5NIH`USUO4nM!;+>;c zlTC6RS?DYsB@OS{W5heyQ#2cMrI>6|6rh2i5HTQxZ>PN~4h|eNSO@$MzO%Y`3Nepb zqpo%!W9VsUB61Vx>)W^&L2~*_iMSQvyT^_bK*%SFe3ewn-$$)5UWAYI&*#~ove-LU zPsQB~@SeQzTUF|1t;e7DTo2oGaL^{!=nC!YAMwZ0(z^RmQxRI-DB2H~Gk-<>8oA5+ zv%O4@pVc?|SK%v<-_<@m+iWYT|K;hSzt5N0mi%dOiH;|4Nz z%w4kZZr<%Ln1o$>?v=9=9TXU3H$vwQQt1SqshtTu5KH6)x3ACSo;se)lzDlt_fO+oR5COO zBjH!zo!%fc>>LoUX&2ozsW>YfWn{WRn3kg^)F+G)(`gcpKato^OB4E=Ssr5~dbIQX ztttmDDPcFH34|BI;HxwV>3TTsFDretjKB^V7ZZp&V=tm) z*yn2nqZ-gr!|Y8ZZVk-7Eed?!J6EeN1;n~M{jdG*e*3xh*YQ(tfilEY>F8j+Gt_pky9Q4p%#Ycl${$sz}8`F5@@tH-1 zE)O$%OT5x}Y3VOR0_v~qIcga^QDkEs5-?Fj)}52Cw+yJSf|Q%CjJ}VrDzuE)R0P#B zY$hZ8tGuLM()HHC;(*?+IqCMsQz4!ujIMy1qC)$$RLMJ<+c4rXtPpXTd92dd8dp!) z3jOUN>@f|MElBKdXcnX|snN2q20S>vp2|Actxa7xcz&pe!Ul?J*AfV-I<*&ovm%Eovod%yZd*iwRE=Z zUi$0i2XT}h=C!meGQN!AkoCZ!e{lI&PGmhe+1)}G$r))8!#2BxYEa~K8&Xg_ccZOOEq>SYkXwtp6!?B~ z*$#gO;hXXwe4&~oMYX~6TSeF8jNPr*mK%iHo@fhseWT^t;o0m~(^$l`---4wF}L`(BfhT$q2qdqK4rM5v945flZ-qY`GGWua)1(Hp3NCpV4{$r&q)u^O_I_6*)SLU13~E6p|8hC52Sek;lTu*aeSq{4ax@w z<$`2XS{4)pWCfvoQgE4q(4jC7(jd2KbivTI0ucqZHSKeLxs~_-oAH^4o;Mt9T6d=D z1KMe<^(4em5}NO?E^BD%`p)T^_)veC+8Rs!;}o^!hqKXl8$)OiYG@s-YUvtp z?fTTx^{Ew-=^AgTkF`%O>*>3=S39$(YwO-(mC3Y+%<_oQMx?9hjOw zd9VNZVX|TW%UFk`mVv2>HHMi3laUi^N`?5N?dz7rkX}e-V^dP;Kqo(5ab8kr%K$W1 zLMMY{!`^|e*fk}$zbWwj;5>((VkH#*7vHJFYl=#j#D{Ix5L#b`{2MXWES3(ujGT`T zZK+p8r2|tx*65Zl5$>W6b3Z=EL0{#&i4JiNaU1^qCa(DBxoL~$@!6c&wSP&z9Y)Ux z66mGUQgP&1 z|Kd9*)jQ|z4C@XVtoZz_>SCk2QM1nIjf^FOI_s*7cbO%6rnh&9w{hY=57ftGw?1y| zJyiVw5tr3_0D-A~67Wn~kCCfJ502NXJ{;+87OHlOyBmxLjJuobpJjH5>SOw2y2=LX zpY=jvZHy4HHS($B)GxkI5A6FTB5W(exoN`@Te9ixP`#o&7E|caEf%UFcQH1Ekscp+ zJidY5NVeHrmUc1#(=4%wYz#ZL9}M zrQj;O9A;fVgNpo;sVCoJIYJM%*r%cg7kpFs7PH|$6xtvye2XF1C2+wv1!{nA@e)@N zr>GiHNLKcI)=2e4ZId~?pVyu%by&2z(0trxL)eD!1cwEyChgbfprVV;RW;P=SkjHv zZb`8YvCi|<>g^M@S7r9kY#Up89lxJ*J6!5^pw#huspsobvc>%^HC+`JQ_vwl-<9B5 zBTyXBeA{{YbDuLbPXc|UerUGrv+q|V><4>?C$uKHx093& z{+Qu7*QEvMm&qJv!@uJyxPvv4ql>df-;*&FEW(n;Nl!u4@(n|6tL*uTQPeNbzpe*V zUqa*ap8t%`V?DF>$+(I!XzG21zFT?t+cDBdl)L1`gIQrKG{r(i_v5J06!`DlNaVbU zCT5~Zd<2~+{-0Qy4wa}Yo1!ac*%~N59f4&~mC%({Iy!jXazx^BD|6#AF(e)zn}VUU zbnpf$%6u%XLk~xn3W?Ax+{D*QR!qgS>_K#WWoIG22})=7&9b{mjpI6a9~6cT$w!-E zsYo1@O2+e-8^h7PEQxG1iH9Q2W~!kp-;I$6&;>s6lwbo|`mFWilM_+02O z^lR>J5O4J=1>V?-HN5@_c=*D2K#Y6-a1=7kbg|gyGCMiK*`hPTnVn+MW$`*W1PUh| z*(pJhP$ulV@sua`C1a&bk|0XZ96z0ypoeH_Z*>>KUdS05a}7&mbysFgR?MlgE>TSX zvs24P4u#KpJC-^Ys)b69466&$E%=JU(Ts%(J=N%rrERTHqdYP+)Ieq)s;6gW304_r zD+USSmYQe1&yM_2D;@@m4UXItU8`PurN;Su&3w$Wqp4d#$gTRA>H#@!;)F%W&kB9c zaL-;!^`BATD|h}KshEE|Rs`)?z@P_yRrq6}&qER_K5Z$gnX0In6fC`KUb^NvCp!rx zuX);R%Nz#D7EoGTkt&Fi^eo%tHnHmxWOLF2sBd8)la=(yF%`(D5$a1!g3NAu<)`PE z1bWUi^@Yq1a*XgF7kpkqz847cjivgjOUwO+^Wx*Q;UBN0Im?B|q3Q+d3pp-<_Ts?| z1^Tiv-sldtG_YsQx|qD;UB+! z{5Lo=O-Cl3xDg+@Ri>6EQ9{_)TmVQWLAQT%zwfviuFU~hd%1Mj_$gxi-UhXGZ?)+A++AvKanFSO zaHc=gt6$bn?kCIoy8v=Ol!KyPS-03x9=6p{f$uq|T_}b9cHomPTf))aFDALwHU>u; zYHSQrZ46@P6p7e5$#5}B1kEi2o3rb?M9(t&?Aw8m&{DWW53kkOy*w-pf_)fRu}{ga zWX85$MGxpy^oBWEO3e3@a>ll7&us&q+xm1Kvd{gesu={qe?>wnKnQRg)B|S2?qL#` zhu=bj2IQ(AetR5~kb+x{P?N|O2+8+wtGQ=|64|UGd5Etj)%uKS9YyI&`X3#Yw00{}`~ z|1iV;BSC)mY5x#;01+qtR#E?Q{rsOZBMzL^?E|{-9Df`UbNBfEhHRouD6h*nL%>|Y%52%sElBpE3Lj&u1eVI9 zvM2?<47(Sv-vFsM4;x&RKJG;l;iDB{@#6M)+3APb{jQf}@n^l_8RhK6GIm@UJN{T$ z-1Cq%qL*v%%Ln1}nxgE^$oP_}_|WrHQG`iFGAXn?TuPntm!yRc7OU{24wK#hQXx@bQ3=!t)Ltl5+*&_~AYBTS?8j1&ewZ3CA`iIj0Y9Pz%4jAypP9W`WW@l=g)}fFF^p5NQjfob)6SU8U!gOBlY4>&mM|<5zI082}nz zhAXMINGTWG%&1(fDak(VDyS6UX#!YUt(pQ~x@EaC8aVMwj^T-_a17e_IsUHF6``d_ zlqP(a?UyT}^Qr{c6(Tg(l}j@&1{f5z;J+WwJZ5o{P~2}TgLr58}O zBy}v!(k07i!`MS5e4qfH`aP>w;^>VhUN}Ixu*n_@FRr)8ecr}f*!O=)CX~!nBi`Ep*_68j|5$9dF#aCyGcORVin!@B|5wT)oR3@i){yv|K zF6+)+j4zkQEW`_7KIoY&rd9Vwk)B0+i>IifX|wiuj-|kv-XA`?ejlcY}&P#11yI3qktia zw@SYXMzJ*Fp1SWN!F}fn7NW<4^vC2ONZ^p^4>FbNaB0&ahmx*wT~hmcvL%b zy;vI(NhFYUkbgE7q%`vq3)f)g#6;=Ys_*2Za z7_J&QBKo7-A>#hX2cyDiuKjoC9nbIThlL+{d3SB2-IrJY z6g8#Z^m#t2e*pPsEO_(Rk-KVhqr2T^4wa7@+Jqk(D1!eYLfVCyuHqeaQuB&KcMK2s zTLFF@!_Nj@92?G72ZrjIU0!|Du)WTWM;hZT5$1c=u=r_rU3}WRwqi+supLzr*XBdOAIaSYfVAVj&0Yr~I7w@qKIuYeTH+P`$R zXQTDz@G84aJ+b#aSG~Dym0w|!;*~4<*}czm;;YyD(cmm(_Zje==+(&IPTM2h`dQvx zo^F@=f8}I{P@f*S+!S))D&eZ}j;z0ockoI=bLzwDpZ##c++X_J`7)uFX}Tlpz`zv; z<47mQ{ic9Ayh<%A80in!4b8F?4RlqKytfspAbj}EMRMN_H$Vcg%5X>UjzCi-+Cj>5 zO;hCTUNSe_A4#Jb5vx@z6WKZ@zAGZ+F)i7?PzG{^2vX1e5@y?qY&GO{oXMQa8yvYwWB~>jvX6NwiMISGO z-SO!T>b`=i6VtE~nhA%%_nOjI6QCaM^{t#xOZUgqsA9CVau}WQIJ{$A8;w+b8TlM- zXq*!%PNzH$38qp$8~LM(FtvOK2F)mu`o48!PCHH3PufAr#Ei4UdOWfo^Bud(WF#gl zpf_#3ldk_(kwcKtK(y&$UCdMdnli!o(SoF_{Ui^n`>~E0)18`I_FYGOf1}rriXKv8 z0JSu%ivd>$r;C7lV4h(e-${nf5KIclc}MI*ZHv|REBXNlE=J#whuVU+$xZTI-xF5( zj+wn9!&Wf13N8|{c3f>Zz=wNHtdPxX3+C-A=FgRwKJ=cGRDhElGfCcbe_?R1bMy55 z=7C2o{Y;S*>jU(fKT$@QJc{mgoE@Z3ODZvm%Bew?uC;MA-=>hP#4CI~55iS^!zNLu zp#yC&*CDcA=loq7`C8H0k=$I9xO_hkKnwP@Z(4hG)yd7D%>ol{vhX*av9m-)Qzr4h z?_nd0GUg=t4N7TUEzt5b+q+o57+H)o_@2gf=}_P+anevH0SS9W+0++@bkNF_PcD58 zE**0pIzE?)_ln9b2*)f+jujNEHEk5UZWf965{~V~msvoEsH~tEId4~Dn)>MIM&ZTq zsU|3q+Sg>)*Jh)g*)b<6j~G23@$gv4vT(T~EP-?#-%B{YS5J#A+Zr)a7BN!J8nL0eqlwxnM{UQrjt{YJMxXlduaXA*H%>SV zuXI>b3+Vrp?|*RcH8GX*bGSzLP313>2tPt)x{=1g))M8v{wZ@lG8(sts#>42M>FK; zy0Ind6R@Z5cEXbbrk?Uq4$#8N)sPT%4o!ZgiO?8-Ed7{7r0?&-6Ue3y_V1)0L#jtG zbw)Rnm=qKTP^Hi`we|CV*GfCOf>uc(!190CJ?a)IaRNEr26&><=qa%Uz7;Lyi)k1JFYm`_ z{qcmG0oY(b4N&)|(5P3CU6-(0>IudEzK-_sF3;X*yI;yy{bWBq)W_xEYWF$Nod2b>7V=%`~Rr?#-oOwMO?Ic?;q`#QNB_(O$x z=X$#C1aMlWXm$h-J$76mk54kGRe`f|C4kQU#--mUpYN~42743sPJX5^A`;loD(`cy33RRt&XMInr~H z(wAT6PyV*z93h`z)%SxsIb6T>RMv%zhkc%B-(7yL>A0aNy?Z#wv_Jcs@Y{xF%tCR< ziE~@Rwna9&E*zx6 zZGQ9?U4Gb3ETDRcf&&n6!1F*C97ti3l&9Qf=_W9}-=FLt#^FQ&EQy1*z++Eu6y}~Y z=tn-}#-Wt~0tW!}03{}N3@sqC$GCS6M|rPAq%kF|6yY=s1pwVgDJ;oAUFBOTho&A{ zhXz)0;Frk&27pnM#4cV>+Md{iDDcgvz?Cc;P88%A6f}s&st?ex996}>QUX>9U!b@2O=t4?$PDaP5yf9~n$uSD-~ zWweM*sGpi;i20Sm>|gb%NTaDd=G4P8X#>GBn6?E6lMJJ%c=WxT7iwpba9A|b^mP-9 zL38n9VY%r@5>Dl8-Kj$>MFN_MzLSq18~2H`VQL?`B$;@yVIdyyf_@)LV9@LoU8hUO zo~nmcLeIu<6o4QhWQZ|@7XpBBI>TUN>U#`YoD%Eg*8GrE1@eRQrw11o<#yZTUdbsd zJNxan>j=0io35i@7Tem08zSpTyczV_@`de~;;Yx3saN0WJ@V4w>Mzggm;(Y6Ey znKEyQ3!-0x<01JV$24?@OA85Vz{*LcQ3$M)qe0O`T#)#Q`l<-rAwLn(?DBz2?r`csm)6-C%qR{OKHE3m2l+r($Tb$D<(lc@&=CD|-dL zYnavx(YN321aDiaUpdK1;&Z#)?2Q(B!z!WsR}7O!23cLlRJ$qb!KO5nQ$^Fg%+Fjt zcPhN1qRB2YAfCA*M1zv(Qg0Mi_3+Y$$lSiNZ6nOFq!MLccj%NZcjKg-)4mbLq7S`? zxue&i0_7rpmXRhU&f`AqwZx;|=wRo&UEX3%8)ftR#RWg%9NA+vTWZWtXSGeGO4VC( zt*o4(>sV5y9stJxmupb+?qdI^;kOP21qE+cwGWp}Ty9%reqJGSUb|GsSnCa1uV{ZV zm9%Rz-07qI?16a~(Pp9Q+G&5^sVqy&^`>4aiJNTBWh}7hDk|r~S8J-=D9G$d^KbyX z`UCNjt`FOrvU&|0Itn7;(J9tWmv?NF```1YUWtsVnpt8kBB9g)3=~(ci@^nBW34)jQCGegP%IrNWPAH-ny!xbY%q_}PiF@R z1~!Vx(s6t{vA-uCPt*-M^TkD>Bz=k?jNE@J(iC>U@#6k*nx(a-D3TzGRKh>sA<9y! zeN;wjp^1RkNJir@6kWgmZgS4m47{Ty=le-8f!J9K!?`xZ>Uj=lZ%_b^V7gE83x$EFkTW-~!9W!&*g<)y(`yn^J9?mtpbpMHu z>9fM-wyT|XE5Rm%v-5xL2f-()?|v&Bt^4rjRn3V5cdhGVuWsa&>W-=(nAJtfdhJ~(Bc;oTXTUySBiAIJOQdH-nKYlas=urc7Q;=^ zGJA1WJ!wtR8_)CMlq@!dW+sNV46KX%Yd<=S5PK{0#T$xvYCr{r9w612U(18lQsUCQ zTk+UH4ioy}zySrmC#_#Y_M1>x_8@QL&UA=mry zCR6IhExvEoSPOeQ_D`AOL|I*tR9tOg1Ruxr4Iw$;h5g&=P8I6Q&??@}F+74e++`06Q!#A0OhAG;G8 zh`0aw+m~3dsjhlkT|w~OnvKV|zq?~unD^h^r2iA?>i@oc!Ks&i+U&M`R~WKVWN)jy z|7-n~XElqw#Z@I?yidH@)nK*kP;p`NlnS#qoMCOQRJn_lqSOY;Sz^~N#`K|xOtmR% z&OL_GoS00pg%86eP^31GjaRlO!yk$7QGSKOV;~t8!)5Nr^Dz$#T59onuo#A3u~LPc zp;z)GCCbnByizq;Q2@$AM(pRq>;I64?3KbM^_Nr9`M*~2z>?%YCL`uopY2ncUww0H?LPZ2!F>by zH>FHStRzVsF(qY*8FhMUlMyrapOX7JCzeGI-P|G`b?!;5zZbHftS?lWSgoJg*BLQi zcXmL$ACeRAgJ%JVe`T7hpGpQONYaL!hbq%hOr-2-8ub`h7JPeW$k-ovqpN{}9ptV*j<5 zse@3pXOiG3I6z`*!B9#PEL)~Db&}N0^hQP^ncfT-HV%v6oas0B@#tJU1uF#pA%i5b;QjI>L|<0iNsxqbqv&<&-=z}4jRz$*^Iuz(skUS3l*<@o8)S+!N$kqVQ|1h z?|_HF!Pj)jes5d-Iu+}cHa?pk2Khkim5@SQ(96J}mum`qaN8AX=VX@3<^>3Sx>g*) zqYP&u*a+Pr6dDRZ{r%t)J(-Q!pKjIpa#K#?+l`F3S{KVUX}zi59Dd7a=!vY+0yelx zg}wLx2foWd^IyRN@OOL)>|z&l@?y?Z0&#E#K>X7REri9K{S@BKb;uuSXS>EG+#j5snt!J9Mt@kZ$QJ_9Am1HE|Rqt4$rMt68Pib%=5Kz!m2H zImtbSe*b9xAU*#KZD`Ab9cu7T!q?x5gj0uH?}xc|)d{;+mnyMAP}I5_-?~~?*vU=o z{B#2BgT%U|rSMOsq;Ez4+ON4fF(9G&zv~+cN%87pr3wAC31{8)JCx>~x9}R0>-T{+ z`mF=p#DRkF0e)2fKK(xUB6B}AdDJ0%bbg1=ytB`|{s7`cOU0eKz_+_+qK* z6`^dg6v~f4g4E=FYuq+DL~dT=GHRorwNJl)L=h_F%zK2rRl?VwnET0UFWHNY?1g*m z+1CFF7saT5hVbK3+-&+RbBsCxJt(Gw;HM+G0u!~|HfUlED^}AQl&PxKoFbxEv6UCoSE(>~Aw&^}DB=a+p^!t=O<%=teK64i@vk0yn|;-9 zRzCxhjXR^_&(FMrHs9pM8sMDtd5DW|nVBS9Viv)Iwt+n7(IzCS%*a%W+D;{*DQpgj z&PM~`x@bfBJ>~tWUS@#33KCz5&ZTv$0&7k{yRje95Z?)6J-I_%C}?-~`3(2&i=pLh zT8I=yuaJ07v}!Iu1SY6@SsvQGl|oxI3dvbv%d&q3uHL66Swkb9DTt?j@|HQ&kHng<~lZ)U!FWXSdUFVgMH#p|6}VQL)|FG8Dj@h(-|&q-p1KZ zk#!~jz7g{)H2*a>L(ezsc7>^FWfg9+T?0)h;ItmshxuCKB6g!BPK!06Cj_yELuIp= z3{qLcOJXuL`d0t0V-8n_>KP$q8$D6;`-5LTo!{AI>sB(fTh5lUuD@TE;-Gh32$wa) zg2ieWu~@t-WLHF5+tjXI5tlC~O!8jMNux?)V(#22Ian1M6B-4Uuvo=z#-CWluLv=$ zP*!m<6fwnpYN{_VH-`GuwhZ0sQ)-)3*l+);?`qb-(PLc(h+mGuvA$r$<`NjY zlfD9%1p36=8z6@jGNq%?!ouS4X$wzKShi>4+rp9^3!FAFSl=-C!l3AF>e9oXujT>7 zK;;pihbvM%=AZhm1G#kk;EnXM783{b@8R!Js$wp-jg6>6yPo$b*xW0xkr(Fc@7X=| zuEeF;%y83hIFpzISnsg=jqAoDik4oTYiPe8owR2gv&2eGJvc_D zj`jDGVI(>R{wf?DF;LEcU67=dN2(k8vqjEUMS@N)UKWR~M^gQNx~+z<$Fx75$fyU!N5&+=0#`4Fjcq778oXvHgtpC|#_hSns^ zLbf}up9mR|^3Mb=Xbh#PFC;HMLX1Y!pD1^p-5A6!gsgj6I(1fa;UlC#nzKauDA)@z z8;|1>Af98G0$-4t{lx&9f435a0|NN(b?g?BVn9&vHP#$$Rj?PwS59wOP+o#87_2<} zHqr35!kl5CCkM)`{Gy;pE*Ll&pbXSu7ZJ2W!AyMgCDe9x1cJsAk5JagOez&Sa;IjQ z2cSJUsL0!KO()zEA2$&fKyAyG(o#Y2&84GrjKcis`8g( zqGrQ9@Pp$n7cb>+G}FpV`R9uu^dFQ1*jVED*Wo97-~V#+_uL(Ke?pB!y@wcmU+PH@ zAVMBQ_47nS!@ImPE7(+5z^+IIWzJtToAM>=bd)pOoeU_444g$BHadL@xL=7>;Ucs) zerr@&Ws0a;y%=^u?5BgeXO7@^Ia*gI6rF+#CRv`fnH%6R=+%WVWl7}GN#=G5R>C_X z!FInnstr?vwNXZ*#Rm3ye!Z*iP_4g>$EUp zzN=CCkROzPu%Uh3N=!Er5m1Za(d<~^AQkxDwX;_v z!4isc@bIKWXn*i0pG*Y7-l0eyy{NKn0L6DS%6gM*e;@t3Z?qY%J2yh0UsTyU4JeyB8ekesZRo{V zNISO{ZK8$t=HZA^0X9P!T1*yL;%Hsd&kZYwtVwENV* zaZ;^j&Bw>-*G<2?2_;RRxs4{-(^%_|1wT9K+M3c`9m5LF^SGf6n`wSq`=%0gCVUcjo>6O_iQy`;R6;a_yMbfD#V~3WH^DtUK zrB3D)^i02FrSBa4Krsp6xo{~k2jq)`M##CTZL?Z&dcMKFdRdy@XqSG{0Qbl0iIrrOqXGZnKBF%3SmeV z5*um8_6?W0M5e=#A!f`Dtbz(98S6-FJ!N91)&^NEle5=4F)gSb#e8IbC69es#g zl>!l|(uoRr6vVX}Vn#dSsVEGjZ-^Fc2eGXnW&~7Bi(ZE}%0?>7#ua2LM?Dk|;%d<% z>_r4WXOtl{ueU440B=WVY(TD0^DP3y7Z(+%aifWE=@?5%We=jEs%gHu@Z8&9+F3Yr#*)((Nr5Xm{*Uu)N_(M%J7g%CX^BVMw;`={Qlz+)S(a3u{p(BAG9#YSKxb$ zd_*z-{?ctwI@t^qOqrdIbgvNoLp<2C$iE*E2E8$3<4*ayzE#tq>4GF-@pNOUWztJy zQT*t6<6=Z_d)C^*(e)2zT}IbuwuAj19uL|Fa$Uqmtnp6POV*#KO^8P%JFlVPy#AtQ1Z8Z-Mq^$x!WD5bJkQ|@ zBG1j0fN;-qxOmYm+@Yx{BEq3^UPF_tOblf-D#3&Smd*E+^lU8_V_KWj^(U)sR~MHk z)+z5d?1#Wtd^+QIj_q1fBf_4m0ih732)L<~rAB1HNAzfoN@b=)XCy=*-Ev_!;EPE> z9mc%GjciDjmdf0h@C*u93+abj4G}74BjvbM6olml=;_R~WdqANnb0N@tA8C{sk$gJHHDb13oZI5-L9&;p@oHTU%u=@5X z-!x{kivBnpdaKUQOlolv39Vr98j=_m{rgsvwOi}YgHziqid8xfHcH(U_{Q7$szLjM zg*k*fi-#cpYVVSQvK^kY1%x(;dOBXqro~&y+WqC*oE}Y!x6Mi2`;hZ{3kHtewU74@ zZ?&;lEa;iKYjN)V)(|L@#4KZ27`!{iu+S}NTM}=D&iONZ0%CfX|Eq&+2c@8MLfygs zvaKLlySJy4mjLa8g@1Wi(9^XIIxVYgE7b6x2ixp_n_h1I&+ykBqBng_Jz&n(J5Hl6 z_2nU3?USIk5gW49wOo9#24=LxhY;x(m2IT1MPaW-Ul{Ay-|I;JCn6yH#i5#z-*31n zP@bJrF2~@cyi|l15#uMK(7gR9eiRy>OOz7X-h8Bt1h*MudkehFX9ur@cdz&UoEBwa z;;Lq~?dO*m#*%bZX~JO2x=X$T_QLy9@u|V<2=a2DuCObZc`$3}!(m3~#k1Yk_T9&W zN}9hqCSToL`L-StWU$(1^W|HaDmz9rbkrCAsyS-beDc$eEw^jG$-e7~C+YP-x2Wsf zp5odk>p2)7k6~dJQXm0#xYfYb40hxiE;Ns^Uk_JHV}MiUZ1S#DkX8kt<21n=0b>q& z`db7epVe$ln0w*9?V0Uukae}U3t?R=maVIkGvTJdH;l|vw+3S4%JtTV9>SnE7`xfz zQ5Ll%@r~#8G_qVg-h}(RxIAbVDyM|3U}#Xj2D!OATx=A-7SBO{ubY}QG;N@-gLtmp z>hyU%ME~@M`oI6ROe5C)NTU@D&1LbMK^pEh;WqA&?P+RIW!9OLOFm z;WNAI5cFVZpDC`Mo32ijI7MG|51t?4Z`&pG5HPK(F={icdYeto3AvY>}=hXxJp9!WDPaEiL%*7 z51|jcyyYi(DoOn(dGx6Ivi(|>E1LnaTe=M|saFD(R>Yr{Vmk);&=GTKOy8ymA-5OS zP{Mv|d7F7VhBLB-uWiRrmDINwVhK!KMrfEF1KoiUH+Sox_HdcLR^kXDY%^C(U{CG2 z9g}qzLEa1JjCf#fJFEF|efNk7iP$@b^opD@x0xnK9u=R54{+m5nC9zOlhp+p!K#dr zUVG?0Nl71YTV1G44u$tOu!mvA)e>Ar4DE6@UuT4@Z$n1y+7%9UTw3$Umi$K$28+jL z%0Up??T4n~e$zk#D7gV2(&Hd1VG>|gZwxkM3kz2ZAOp&rCcecuVOE%e8okBG#YKW+ zJwxF$5M4YhP8iEKI__o1iNFq%-L*q|7Et6Cg#tpss`m&ZZCtTX9?~4_e*4;h?DvwnASL~ zR|4l;=+@(I6z__qT~eiqiH%CdBFw%>lsTEq(BD#!Dk9?v>9-q2-2DTVdpgqw_}Ybx z<@bL%Rw-&LmhtU-cStP$Mbc8=0^QEdo47HJT%XIfCTE$uw%JmP?U3wUUXP$|1XD|` z?ui*9i}#=z>REja@J-IE`kOCEk20YG2<$(cH!JmRDz((%`qUt7;l;LAkZ^qivjOTL zK-v&lRnpfx>v7?xxinpuJIQ8rM~o1Tj73!X0ugBuLuB-x=F1V1KGhNKSz=&JW?!+I zxlE~Y`=P~{f+z^V!L4uQ$#3O};NYas{-s+>R~Nn(r7mnh=VqX#Mu^{VFg(eN;)~{} zk7b~9v(U;2i;f?vyLX7GdS2$GDAJ47C{jNurm}s@EUN}(kUG|}8{$LWT%hVLk)Rhb znyw21W5Kb`o4DQk^<|fP`+G4yxoq-mrs7a^AerT@ah*^(&LZ?D${LKHc1bSWz8X}GcnBV>LVC3FTjtT<-E{S0j0FV$7 z0LTA0Kmp<;ChfqbKIJfgqXuZZ>um-Q!0Am7bniNxR+ZopmzViLTWBDp|;?F1V$l3TJ)osu=4yS?4?#+}3Qfp*7#CmwOrT^)iRrJ9l5p2-@JA#5376bi zLxWmDFduLniZ@9h?0bguvt&962E2YBIj1*mcKYWwxIu}5!A&HlZ(CJZ}pW$-99b?(o#8aw@% zuRLGaJxe(Bm2m44;O2Y%Mw?a6f{r(aL|-s zrS&9iH^Ax6wNO!dD1;jlHlpbHF4lN!fXm$_P|vuY0BEBvqNpG7WQ7f)!UI;j4VpG!A){k zaZ$vL*RcnGo*P%5-uU=)N}a)%u0P^4v>5w-U4H(}gXUii;6!kav5|^2zwAwGQ92oN z!CS?q{eEZ2Dpa`trU|TBM+Q}`_fO+MCHAHcJ!BG-&ZIMV4JhI^QJxYT<5z%f{d2Vq zmZ8PoiO*XBalX=RuGN7<^&6Lk&nPZNiMx*Nt(&gUIYT9>{~EN%tX@G1LyC%^ zx0qMOU&pe&xfxj0syK?Dw?B!9H^b0!(>Y8L#TTOuLEF7i<;=1D!AJ?%BQm zyUUzP`oQkchhkS%FD!2|w*gu^Bky@VX%cSdsrfv0Wq06G+O?@wtYS=}L*fS7fZ#ah zIA#QLe+B!|cQCI|E+oh~9yf-bg2K2t&|9dG|0{S8W~85Q6dE9vUIfq z3y9tZW!DH_{Ll4pRW^Vb}{%L9i`Wxl?wb8cbIYW{vIn!LxR3%Ocw+6MIW znG6jx?;KMw1Dz+0!O6V+=m$EE`F%3Qz1g?@ueCa-mEFsqb_yH>J~xkaRbN@W!O(Ku zld+Bba@6!%Z^of_qM$UV=8gLz{)l|obvyjeD2fr*^hoRK%PUFhCb@2#>hA`rN~%i2 zH#N$?qUJ4nyc;|^ukifB=k(v4*E)jBXv~*Qy^j<1_0A7Ig@1qR6Ih6-)RBmE{V&4N z_ZhkvBt2M$_UaF>ML+aAFXh@Z6OmJE1(B?M%BR$6TcNdz2#z`BIt&2=aDgN-RvD%W zTM|PLeW}fnGDtF+|aY3lRbx|7$u3J`VLjb1lT! zWSj@OGF2(d{uKwp7X_TKEjLcq4aZ)qe|{dQqeNFSN`Pq;%^C)Q@i+n)>!3IBF9hZu zLn|wnVa@}b$Mm71N{y@`x-S+DVLR^wZw`)tPP}NTY(ob)9l=|?3!x_Ama+`Xk_(v` z256#@K?X~!`J^uvb8#Ic_C&U;AXLW(*acyA9e z;aAEpY_d=c$~8-laSs;`sMG76SeG16i66EH9Do)?)hQ3v1BPdZp+J3|0e_DZ%t3vh z0fKgb>!2R+L{fsvazwQpfRbE_dJ5psqj*`^#q>?|hCcKny*wbQXx#9(Qp8*mOJ@nQ z?G?A?4Tx^uFS7!F&fFROV&46@;0rw)+H+o@UXVO9^#Nyv(Ax&h+dyfyKIBSigK|uC z1>ayb^yu4_0iqoDgYrLS!+fCgp-H`hX9fL3n2INJ61YfPiN&R)(N6#)@221HE)C)Ch;^u~VMah@aKKhA4p6kVQlxj)B6-)LPy2pswFm z;L)^pGVexmJLJ&SaH~~RsAE!Btte7U85(ch8Oh?3RCXl`&6%~)EepPt9LGl+i)Zzs+IHoYJ^UFKp$X<#SJCE{cw@ZO@m)^d1cSTL9PG?Q}S_YFo7<`?zq zPW#HQ)D6IzQ}F1kBLz7=Qr~BGS=W7kH_K=xv_0cdGU>a`^jTNbiX~Qh`f9aLnC~4) z%#sG``-qc@Ma}Q}kZ8?UWpSsg##T4~=l0P97mSD(!oTwQ;mJ47x3=vlV*c#$Gdn;1 z_Fpf;zEkE!FWPQ*-r3D$-M3pl`)uw&1zjCBy{zwDfIeh9xzpJ%^3sk*)u7Az+&4p` zrwdx&o3<8@v4+C4L98$dNo~o*cTMw=_0X-Q<^D{$#ZxjT6#w5jp#htq^E zs!-Tw6cDqV>ZCPP8p`Hq185XuHn$kz;ra~xTLfPSFtDh25uMI!Fa-HJZn4O9V`Isg zcmd4J__$Y##|^|H4CvP29b|!W!<;D`CLKf5y+dSN9bQaCFcSh9XpSuS=1oO>Ftju* zl_WrPID|->y`H{+hF?iiQn@50N0;Tj00(mmQaYO^^HR2`h(szP;ObOL54N#Q>JFPq ziH(w<$Py^7C<;1g9YXs#jn1ZC_SwNF+rQ4<`0iwT!y@ku?f2Q|+6UQnf%v*WpS%9) zgNqI=k``r$`Ty=br%qA9%Wkkq-C(+fTjjNWUzV+KcXm#NdPzqAHFl$VN#|mA#mRR` zw0F)0w@#jaowCuLk%na3oL~R_`A%?Ekhs)-bU5F{hk?x8C26}!#6G_@|8{Yj4D;XU0ayIqry{4lo&zU;@dPiIve z-fzZUkFEI6%PUX+7?vt@9r5)OJLVaky&HWa{yS{U(S+Vh!Xb{Z@PnfM2SqU=uoQUl z5edjP6s8>jxg(X_ z%>$sPAS;)>kZnV!r{VEFuzpq|#3Y1`E8huHPD4~mOp9`Xsfxzc(S%rm(2A?T%V3g} zMJJMoBvMwo4<>64bq?|Sa)`0;!bBiYCZ~3{-Da&a5hQzew|1|*W5VYvaYqlI;^%q{ zpUkwUhR!MPu!_autU%~tHjU2Nb&5uN>PBkVqOJP2JCLnnwC$FOr-^N{Yxul*iEj-~ zdCbfcPaZzW@`h4Z%oD#|*=ypOoObK*iFS>Dy2iiJemYPQw#YlyCUs+WSfJI8e@Pwx zJ*Lj2j?5CU2z!tF7i@IGEP?3k&7{pG=u=UGE9pQWS_}gkrfhLh7#c;CdCM% zLHu=i9fDAem3ZxS?G=qz))wg;3AY_)sN0beTuq{((p3U$7(OB}Sc{N#C8pJ%k#Dn% znWr6VsiZkto5ZKGpG!lkQ>!7a%{m?g(_wbLW|dP$GdxLT-9XhnueIhk3=XNsylr!^JfAx$-5^|IR15D2#;&5wV;KDt;^ zAw9w;@#a7S7x#&e>C$Li&AJj5LE$;Ag|f0}=LpeXBDxo3k;K>M?phewyNT-;V!4(G zD2v4(FrWAouJQA*V*4zh5lvdTbec?+g~`Gw@jg~uUHVFuG7ZmW`?-n^TF)nS`|THj z$NE!Fy4Xq}!{JCuft)^~Ba+yl8X#91VuiG9FTo;qsRC2BHGX}1gtGJ-oL710k*$0*4^z27X=_@P?$qM{oGs2!9%kD`C|Y+3 zvU@T9LnUw`^R!z=kc6f^HAHZQG$*6SPT9c;hW3=kP8!lYMzfPGkb1Z7(bmx2YV+e- z*}*r)=)w$A+;Ntvc~Xb6rer~l;ixi`DU_~z8&uZQdNQd4Y*pmelHjwWaw>N#k1Qq0 zu`FzI1}a~@V?J@3_Qm?M^KckE)%DhH5)>)Uh|6;gB{5q4oLT~1SCSc;rTU_DL4|*7 zF_tZ^fhH_2#_He8tN?bP1gK&eWwxv`!=|zoc>XLx*i>eBaYhwuHqKBTqH3c77g$CV z=fQuHE}GPzM;i7^vlXT9G_VzkM@fAgK{PQq23KH>DKzenh5!u3^eZB-0&A}mXNqCj$sa$ld9ITyiK}!+-rs1`zuP{u&wBWYNe^l1R4l?bNz1tvW=rM+M%=AYkIJgu z{xE+_zhZeT6}0ntwhuDa{^7d5+I;;bH{W%>yY%ZXxlU8W5&CILBxMU? zp-P}25lRE3iBRq!+k}{?;^E6T1g(QA8gV6*AcqJAd{r!EKWl=oG|(;68ue$9CXD*? z>8)HVxrKGB4gK9JiF!b&A zq#r-q8+V`pCy8pz4sP0G`nci4y2_pT-7|+>n;VXO{nE?wmoLu$mFjeizW*W}`^kSw z#N}MT4m(XnWBDheg;|lL7-6=pwq)7+qu9Pef8HXMp>K_7{ZY7;1WHXyMr()snT~He zk4Gr6WyxjN2yE#R6afzl^9|A21qHAuWP|DE>M9(C5)=jCY*Fr)E@D-p;6`7enS)(M zCy7BcD=r~7C9qBzC7Sm;k2B&M2-5pfqdf_j2Iu?EjpA|}uI~}1E{AoBe8v3P;K^Sw z4S@w7zO{kr4+9IV#u=a;I%;uJ=wuOl23cjd_VwEWhX^0pJX(dyw~sGEWe#e6x z&x!nZj!B#wq&EN-k9_fGD@5ga=eXYyjC(WG$M05sMt!WG=G>T}{-l5B_<7ngu->Jl z!{-bgE~b#)yn}E*pM!zdQg>@M1)voB9X}uW;Gii93QU|5Dm&6}{!LC_ zahAe3HVL~5keP6&+wkbJ#X_PWBcT8fMF75V}&LHNQ*7E+KyBVj2V3Jh)#tEQkZaWH%AD&2?rknmw%o(jqHN;@7~9&5v_yxxU4xFj4(|A;yWz;kWjupE@$m1 z;LvgXpd{9f)`*+QXc?${S2JItYM7x)8D~X=400oYv0S5L?PuvOV;_NF6CK33M-=fU z_#(q}BZ(9QgjSAr*l3EUDjvi+n~;VY0Jz~JZ~=?}&4?w~Xv;T28U*tiwb=H7_!Ie< zors0f;OV>!Vv?us*+QRImmunRB&UBO=J>l+6*JT(S?^+E_!5KuYxDKQBo9u1`R$Au z>bUH723*#YraXyBwyP>+w+~Nj_a;rPfV|fI6GyyZsmkexbSW^1d_rkCZ8AegkKrF_ zYAJjrO>vK`T;HS%UY}sU;`8y&6)R{k1rap9^&<;^e?D)%=-C{d>I5y_ku41tH=WAB z9u3nR)H2R_1)rNo<`X`gs;oR^RVS{q6hn8jWX$x+H)HR*83gZ_|XrQ?`4NQh6u9DYy>K+$$cU>(>qpmxD?hxJA?GM+{0_*nv zX4ByLIil`nzH-B8tpmP4kRrMp*t_1@fwELTcV$9?QR6o*nf=4h<0tmyeeXi-e>n2< zT#HZB3O>c1$kXmMI@;{wcW3TO*{69G7dyM2$3psxo!xGk-4EwRB_yP@+Y5_MO)*w z`i~Dy`|h4CIAB-UhP9+iY3G_bZd*v9*EUU8SDe@Rj^yXO-FAOQ6PkM&v+N~y*BSMt zGRbDkaI>0*>z#WA6*YBSGl6d9wfB4M=cc$Xu_9oK=fCMXnysIS0sDulSXF!yOZ45X z@=lq=oWQWl!(S|k5ZlQ_gKdb5r-Pk5m7rQ{b5pJ=UZRSr#_`uTZ(QFTHj%1!D&vNX zgy>>`i;2X{l+D|C6q~bJ6;tJ47amQ9(Nk+aw0X}^c?1D0Icry=W+R#X%ccnL){zap z6wGty6Are_U4h4jR2nd$o%{tjnP6OssR~TJgm!TMi0FqnDNKY=`D*QhNHer}hi^H! zk=I!E;n!u0s&SEJi)zG`t-5%9?;23RO%X5-3*}(ZD{={yyM)F-YtVU=Eh}PnC=NWL zhXEOK*)o7NQZ@BZ{I&Ro-eED=E%hUtbd8AdA!ObJSm+SkF!BXrg4qFMEk;I>0h^8?!VfyZC#&a*?3f#=HvWByH*8d{`z{C6kmq!RAN`N1uP87kLHCX~nZQdkAQhr7`D&E@eq0cDG-99&JYLn;aO z8LC)!IS80ZWLyOCI{6AyF{rwLYgdVVbAs@Z+9pDgCL$D`4f9`$&3LGMOZW%Kv-qqQ(DCm_Oy z0*}&rbPRc^g>gEtkPIrl7ajv3^Z-G)2DrlTkW#>sd0dX|=S`!@)?9qq__C?Yb0#8e zIPk1$7xvCO7Vu`;VQ`+{gH+4;AmaVVei*bz9CVFv<~mH!*eqGN(qO_tCdGEMQWsjM z2Uw@?C%&*s%tjuQl=*bbT;%b{-2Pm8>Vp48Nc1OiSwyUR<-Xq!#!MQ^ca%%EvLoC} zUe%fJFyff|m45IuuS`pp?GrTb<1Ni~unb5@=q${kXS%pZy_^_0FE7t2oY+e=*xFW6 zd<{6s#jND=a(yw4+)wT|!r&jTr5IPoRHwFH_HG&k(y|;9SpHRUja3>|f328qQitj=lqpdjzFPP=+wi|(0kXdzM+Rh2&1RW0%%f# zRWn=5zT5&8oU&?$>YQ_s`wqAWXmPhN7TSx(Ji|#Zj@&L#51V&r72W0?$_{0`#GB^@ zm3ig33KQVRIe&%LFf80fbU^RG`G4j6-=U2EdxcM-N*&xYd|uJ^{!XEe@8espHCTCz zV!XqgcZivp>y*c{V}Ck#Y#*!iu5{ZmH)%=xvnW%;KBrJoLF_=as8!Gw<7h;@#SY=w zF*EyILioh%n%()IN$2gHEH)^%WU-Rv@~z&NGGH!bq!_$Z{iV^4wg~P0DL$X~DxPJ2 zFC(Moaz#Yjrn}na`x?D-4*9XE>?7z=nkz-QX+B$gY zjC4Gnb+V&6o_wFF;XCk?R($1uzi8^MwE2FLGaRDw9dxWA|Ay=h``14uIbyK2wRI>0 zSoGS<)9AalS$nyc;(wpTGQXVx|Ekfq_=6YXB7T~@^u46{f?OUK(Kg-;>9qGRyp3qK ziskqd6NUwktYWQV**oY?=$C$_>&D zx_~|`!d>uag2cZeHtr(wi4N%!91H#X``KB*tYDs^4wTEwv&$W~wPcoW%S+5Imp2e& zhkfo^?fARuUFm_~j_|+6X*VI66caKXwp#j(C(7ean|1En%IS~TN0+zUu1>2zZCsOU z$D(J>Rj|jq;|Nc~3%l{A!39R*auEjKy%5K>rI<`MVZ!mfoBbbyp_!1e^NeKEScEjD z;mpqbnsI4NM$Id!L@G^!aTq$L1K+(g=J#vv;Cmnk-vWy9nV5{s@mI071U0V0iy!>H zy!<2?Bggj#8>)F^kLU<>7JiyV?lp_h@krd~hd_Lpf{7y+(JN zHO<^cfwd*IrF*^^mnw+gZ@xIf-laXiTG(O@qGuCCW6?wjjE)pa4I|pF{UE9NG{3Gn4Xp{ z+iyVvPp(|Tf06HhA4xaxaq2F=n2{2Xc{__TTt)!ma*jq#i*Nlu|djv>Eva< z*Sn=}EtcQn>wH#98c6L8GG7T}4OqJPYYOsfENtvs<=1f1Y&ysH)Q=$>a*~Pt*5XKb?>J zt<{4%airxe?zfh+Bt7wSpc_wYSN(}UM1AL|I;;9K{U8UmspDKZ6Gzfqt)D>Rk=w-m z^AFKqTTdLp{Hgm(b&5KIUIIHi6pSYGux^=`=E{L|iH_PM_=E73n8eZDp70!>zkgK` zpG4$lEjC_O!)>d1Qb*Q`n68x-Su4g7lM$zNx>RPBh`=2WxMe81 zo0AREKDd@dRHjN&W1VbLQzgnI+eNX9(s31Hey}+f6E)o*9Eia;iepKHSR18MlDbhW z_2P-07}yTO@_v?QPk&GbzMn>67$NgEv8XO#MD%&7oU65$Z)e^ zd2lfe8Sx@|rOR+!l`ajZLaY&Z&hb~UImW#Lw_M`bM*xW=YbiW_shy2fROM#q?VIzq$yiC=SMii%U z@iO!rw;SnZi-=SeN%s^ISXdiOTVv^|)HI@38n;z|6>z(0(Fl}HHKwxt(2~0axyDqb z0g*^gYZXB4Ed}VW5Wf?_iHTe!n(b$RQ{hN0L@!c{pry z8t?x659=?2r#~zx_9ESuQ}xa6pDygr{UlL5@Z#6s-v31>nQpP!CUf0foAR!0JpGfB zbx{ygORwkXcm3+}j4(i&|CSZK+$E$Xi_zYD0?Tqsp#E`@<-qJUXszsU#g5`Q(jm(#(Zp3&(7$ zRi-i6UVqi>{HkQhB=N(dUEyrbLME(#jjQ6J@`wnZVcZtLxN6m~L|arFcy{vAP*iV# zYBjLts)5c4urti32lXEkY#>x}PXg;SHfPOf&D1c!Tch!SdI4@0_E6k3$U56|KTv6< ze|Zso>1R|iD6ocd(6RjvL19n*ApYklGv%Y-N58^2zq_O8=V*}U-8klVL1A%B1|--u zBzFn6!}jx=>$O1oDm6<#Y?&JW^y60gqn*+lTT}8JGpqIX>-2Aa_uBcW;zrTZ^|h8u&$uo;WM!UbY0$R* zS8PDvhpv^|zf!Yf0T^lbduwgQr(0Um8~2>%aKbqQ>*rc>&V6g{cvZc;WZW=6pP65D z&RpbnPF+%0v8i!WqqJV83a9OeVT}ISNO$=*snHIlc6hqW0aZW~5n-^e%`bVf9w+$@kmsBR$efx7b~5 z#XEA_R{flp+u)e*m=8I$74neQj0}%^!P?rB=TDwzF?hjn&ij&QFF9Gh66=_l ziDL?JCqR|}Y2qO^xfX(vPvirG7NAZ&={JBz=lwcueGO34#}nIrqdtaH+uo!to8{Mz z%rb0;R{?g4rq%<1nzrl^{il|)cdGy&ojnH1mu3Gp6gy2@mY5IK0K0e90q7Cj@V-T5 zZ@F`AwK9SLCjb~UwR1@!my{U54NUt^dBWFc<78`*AbsAwA`uTC&h%bVOHMrJ?{w~v zTAiq_28`rMN3DYu@MoJ*`3CU^Ngt41<{q3=Od8%c|53e`D8=)E_{>&mRmDI~Wu11R z1kR{@WBMJiy6WB_{1UJXjmGrbYB%-sf%~kC8Ed}^x}=wdHRCBFWTFvknw_FuE1D49 zedu7H083t7uecXh#%G`(!D<=s_G-Qv+JqwLZ&*+Zu?)fzVD>hl28)Bu)v2RZfky(< z$*_o?94Zfw3ypCJ<~N_6kpT7$;5{Tqbqe6?N6;>*pQ4~Ke)N+=$#J1_v^d||sfUnV z{53#4DbUrq;c=$hAUv;Jv0go#*B{4Yr-TR`rs7U)nkTGP2V`v(IC@@Z`-b_Cy!g zCumW|l6z61M>yt<_TLt3$QOvrBX?g0rH)Z|L~-)X1k#`PW;zRlX6IJ{d=Ys@9`8Db zzP6OQ1@RRaC5NX)*81%&)od4?u77*=`23GT>{x)TGqX`>%3iw9-I1+LY7Yo*OnvZ& zyQ77$Gm7wA?Qyn4Qdelpk$^odg_CC~r9x_#BWGQ)aY@gY>uPMJuabpfAI^j}cZH6o z+5K?3{zdyA9wq0$6yE)HosHGmfUTjI+(U2Xz50yZ;T{m(HxoCS*;-nYJ8m2%*cZ6yp`mN+$et691MG({yq1ZxO~ChsDeor zKLw}Y0TCi$QE9R)Qn@Fi%E7Hs7E3B6JexzP4LsHY=+qLF)_&9_*jA&RYlt$)ugb0^ zaJQ-`H9Afr<7&8!H>XJ#(q(&vuH@`zXz8j(nUo*X%POR5HI18KsfmDBWfgdBt==occblz0Bqt zBwqJ`8{TlExX@^K#7Mj{2l_#I@EjPn4VwTsEhAKe55w_jFmUoCVE~Otad3;H**154 z?y#&pTmc{!z^2CrnZwOD8)x&BtuQ!{z=FUo&Gi>YbBfEL+mAC;abTy40pL>)Q0UY! z^v<&|l)@jFD-XbA0LJDlhAO6pcew__vOsrD0dv=)DwrrhYaonGwszw#blA5ifVsq@ zIb&8mJPcGW&j|vvXW)^^A^3ea`j>tLTMrEBF*W+hLGa)B1Urk6oIfwF95mnfj@H=N zoid7%&c$|aS%0D@LhLf<`azR92`78Y*MF^w6W_-)VEM!=&AND)7)6|_8}?QwGHrX4 znFHa>UPAg3+SRyqgyXUY?WD$tjvD9tZ^NU)_Y8u{$noEpt!WJR3Mov`oI6-;PTJsi zG|V={zPP}IZ9d|$1;5z^d7xqirVgoCI9+^0yatRvaMv&zYy(V^$~P$QqbB4Up44bN z9Mm^~dV?v>DO5jS7Hk3e?h42MBVVI9m^(?J1=V~$K?)YDGD3XK%_#gsxR-Djz=}i( zfT>lv)chIXmCd8Iv#QOeHQ}B4;2_8YHDbZ+4Ye`4U^e zZ`K3SzDs-qG=I$<)HCM}csGC;ZMxtS^gC#BNCBX^Dwg@yGC#+{baiQ9@(M?*iI$r< zgHj`_e%H1b1GYzx?5$m?*O;+ou|bTgBP#F5%C?L1##ZT|X~6!h>ZipjxNPD#l<%{5~tGA zVi9$CZbc`oc44Kdef>C}zuY4&cmLaYDYdD73b&DE|xprV!MUZ z_0H=%u9`PISMBaIyU(PQek|=`o{{W6J%Gwr@AiQN_*MAMf8={6`Ao9xt?Oh<=jVPY zhqfn4cK`7ZJ$HH_sWr6eyfo z3s4eghtP*dYX@Fukeu8B>4XFx{VkcE`>WCN{lWJZMtyodAooX+o5k&xm1O&Xd?wGD zeI-(++MVh<3U7o?Lp(?~>LOwg;EM3^C1Nk>8rj0$RR4k8u&@qaYVgLMjsQGfgLEPO zUH-ZNJkli&SZ^_q5fHX0JnL?i^#ws5AvC7XWX^*a6tu86AP+PMI z)ZnmzjJ4S6*bY^Qu3k1P7bD>u# z6G~IGx~bJHg3-LHfsTOSFb%w@RWebA_%8-=)kuuL_afwZ5zaQ4S}ztIMdka!-J1yV zP5M{9stm6Tq^A&PA7&NC4x?%k?L#_k)bNIr`zdz3Q1k0B2v*vt5!5yj~5i} z1x34ZLJFWNsG<e!E?%^D~Z-Sp+J5=N6NA;uoz{`)A zLt90ghsyWua&N#l{y*|vxZm7E7JF3n%#WB&^JUL-3RZKSVwD5ua6Y+4=3E6A{i@1J zwXI;t+QqTIV=6m|c?=`qwsO_rjq3<`eA(s;6P;G4+Uf?ZRF!_jJoJoHqT1HnN!%W+ z_9R-R(dCRoRNo1;t8hNfCyNaw^VwN!HJa4IDu-xAw8r_VslKF2(m7S7F9|BoCuTFO z)Kq9bF>fB-XZ9e6h87DgQZJdU#22lcCY)`6ICcO>PosNfsj|x3BdxO7Sys91BWPu6 zbo09ZpU~xmFstvPbtwyXk!3^eJ~+^oXK`?hBkuFvHZN`)K`~l`t;P^^gB{h_5>*a` zVAmb2t@n2-1NzPN3yL9|o>MHc?=C(=^1g6Aj36ck#iz*Hd5BdSX(M4^I99 zqaSdw3W0C6HUK~n;AM2=!c;})+q-tO*28Ra(EJz7NMPz9t3-Rn3=gY|Vg3@BWnh

K z03ugV8Ht5QEs__aftLlWD^jE8Msb1mD#{vxAre4^-qkQ%kJiFfI0fV6XbpeqOReOx z{sTb|qv?T!1B4F=DdihKB*47T69!fu;1S==a>v%7G@>EEJirQ>=sXj#6~=m)C4C8( zO{02);QKO|7|~FK0~R;X6rhn6pXd#QwO^CUpy22%%=!wa9Ti zrKe+TDLz>m)~ebn5`?u_y*0y9BMo_G?^C_N4NH7Hk_(PnJoSV33b)SOjqDzV>+d;) zAdfvQ-c8l{9jO^H)GEc5pV{?=`LHLXcQu^w?>VKY32{@n71>MkC1KjIMmB8nXe$K- zgGD~2sGZ-~38x#}3Uha#D<+pppTziE?5PQ^@A^B5HAgxYR^MeW&6R{b2WBgU;NQdX z&WJhKQ>f?JqW$-o^EKjYWN+xd__Gw-mMOhtLV__8v zVUc|d84whPsCr%FRz8zw`{iFO@;g6ej~I53>^E*E1{)XRE{J~#-~{K%dJ^65T8m?l zU$)Te{K|XqJLq3er4#hNJZsIRmMf6e$Hx3JJtOw ze#)dezXG_HW=pLTUFSeKFN5>MzXZGYTL&0JA#ey)v^wLe5ja|4o*qw>P;1k8Ek(rj z|L;!vc2%AGi6}p&(~e`8_p}0<#G>cIVET$Y(1(t;0l~rd#Z*G{<6sNZg zy*{bSAOJzSf_f36D>h2GNY!R(Db!&hsx4!((OXv`17*FI-dC|sRK1o!tqCew2@QRx1h;h{VW*oD)NzY3Vki)nsnECwO@rf z&X!(hxx>JJ1VnfWb`OR9L_xacup_9qGwy=76KoI~U>#{K&P$Auo2%{@xDOimL!K&Y zcnEqhgW5c=qVyfqa?2g`DvsFw0qL%BA!LE)0yZ0Mpf^w}`9`BEbW_2<(q0eCuRry= zKD>9l^WDb24|T>&dG4+ieNT2e?tOh>YU5&v`0!xu#dX)5UOa0}uC28kOpZw2{IGWM z$$bwM;|;HWJD&fm=Pfr^&7)_5xsM(-&e=^TWF61W=6{(izw+bv>K|`?_ifLQy+&RC zd6M$!*b{i`Nb!5}Nb=;r-J5**^83q|%l7X6_>ndD&YRa4 zu4tcZoRU0B*}Cn?Potl9K7uTxpLQH+x^Z#mF8(ss+*{vW`SHg)zh3`wM*U;YOx}(0 zAHVN`>fmIr(HNxubu{zsuAP(LyxqC$?T1~HZONN<%dh|VV~$bhlfUQB5!gq_=VhXQ z^z%>9yiad+K6*U*>5Sx&z2p#Sy!Wkr{ZF05DEG&+RqRL+#KGXpOhD{RUEM5do zJ{TERt}`jdWgietuzn%OAixHglPbemTc@LP(f#8cxWMkHmnD9>)-lW?@T76 z`uPldnl#0rXH-e^VcTGT3q^Gdg|L=j?6ju|dL!r&_VZP!eh(6E6;%f&o<$uR%>U?6 z&6CcN(%@w}svyt02Co6cce2rQM#_6?e$aXgy;gN!We!@nfm$xQ0^9wTTgH>_8$`(V z)yyj9D$v~C&S^Z-T@Ko%)9i57+#cO8;uz4@VW8+k3?fv673Okt9FOz>bZ;0ZI7Zg< z&QBZTTT?ex!6Etm{C?mnP5rZ<$4TK&`uaYk%ik5VPG6}~Rh<6#qhCqd@hZoe7k}J* zlWsz}um2^TmUsH*U-}p8Kdo;?mW7^b-!9X9M}&1v$gHK8U_`2p0Wj4&20u z^2C*y7K>Kb+SbK~{1d@g<2=EOBsURM8i-~fo+xUN0*8*N7y^pZ*C>+-PCfu` z8*-mjV6>l^X#Y}6yo$|BKysK3L~tK7e}T>?SS(ybe1mH1Xzdhm@Qh;Vioy4qtMWv9 zFzBEYp-+I`Is>s2!zfsdN*sS(+l2Pt@A&7*Q%F3=oz2HEsPuJG6qp{Z0p#`_6D7#+ z=L6y^X^i^_JbM%h1~ejQmQj@fFl|RX01L_Tl*z#qm^B?o0icgCCrbI~;;BauXWza< z0TJ&Zur&daeGu4P02V`F5CB?6;`>qH3E01YBLgf&c32EmP#{XmtXaq%uqm4!6dM5x z6;z;yz(=Or_5Hk$sF{d$i9WSJey8Qyfg?Yc=Pp>YJUC{VzRp$m%ae-ovs3r*xL>c8 z^9Su$_Kntz?}najojJe`PU7yE`s=|t_aB7hha5MRvqj4sp`#O4v3xt4mHsb_e$@DCla{b!5~4c1 z5(@N(5IzvNTZ@mneCZOpy>|_8@U&&~P`kov766qnTaJZ0U}tz{K} zb5JC?O*fk34wfI7+bMlCrPvLUt& z(rW@{n{pRZg^t`lYzFwG>M36)xf)l%2i0Ky3ybAoi$aJcMzQ7-&Y9oaTtnsCDBMDx z0Q`B-Y#Ku)(}U{38G&ykNA0^lu+VrY);ed@;I@E31CM>HN>Hu(0`o{E{*1d`q1yxt zw(AYL&jG2W@r?(+1@W%+M;%~Jqs-2mfIZiOy$gOEtZe{K&IUFn0FRLAJJdj%G86^t zqvH_18$d`s3RK4Y?Lwo9$zh!U!|m#gNzec?Xh>M;IdCoOz{LqTm+GB=oCf(I0%IT7;a@(Iej@TuNo(-Lma1+D{ z+ae+oSd{W$bDa$V?00g-5dIKz|AKvPWQNrCiJf1o8JPv(wC*del%6zq37 zySW?7BN9{+w8~Ogde8b&jDn7VS||_*z7Qn?055zUs2O;6JdAcE)WCxl0gG@{HJ}RK z6dX0c{ld3qifkBmF_^h+!w(H9-`u#9S;b!8_^Mm~p9*#UR(e#*-MVKsc6&KVPIcSg z@}#R1sh_zfUHE9ZwWN2Kel$u{bpGaKdF_W2;tPe<_pT`$PyPh(EjUBu?)qqp-)+A^ zthEm>d1z6;t?l5GN*wm<#(m%ol<~jPeSrOm`owxG)q~S79{~VrM0^7Ba(+8i}U_5jOQaz=BI*6ADRy_Y-t% z)DsTnfQZ1>0Jj@j3=9w!m?Z!raS4?*KbZw8px>8og=<_?ASg0&05wB@f-P{sPB&+< zK>vpg!!Qm#Hg%A@70Q8x(8g)t@q?qx2Lh@zz>bA#dMN$?S{H-}gYg`7*`e7%#Qzi+ z$AQBFE>vJIss>vW3>twMd~6BV?zn_)y{4mi{(igE%{?%4Um@g!Vwx~;l1>Xl%ZYxUi$q;Hwbl$CSg|= z69CjVoo=UT7sx^yDS-R42s|1@W~f%6CICgws47L@)4?Z87VC=%Jcec+Os}W4ON-zF z1@$rwX{36Q+b1C_G^rRb^o5uZkWOo-URsnTSs=YJBV}|zfQT$RwVfI&zcH%v?169) z5E%lD3J?IoPI^YNbhhY__D*gFl*)o=6q>DQgbB|+FV9|VQI>)vHJye={jZ$pIrC)7 zjMTlci`{i0t#0AM)N^G4;alJ5RV~a5XG=|o2j1(Xx zi1tK33GLaKnlWWPaW*1~L(DAqV~R z!`vhdi!j6BCp^+aSN2eq-Bgu1{r}};@|NkhEIO*r&pq!PgCSfW(smVWJC^DW3TaeM zklW1JJeF@S{Y*5Gs#_DDEW0w$LGC~;L-}CX!Me&2LR5x~=ngAjJ{md3%GipS1<{(T zl}7{x#|;hn_+qR-MDSgxwMQ*NU=6Ct1sg?%DwM(GTeHh6B80NA4nja!nASdad@TRB zeM?4nj^*Ym^1Fg_6dWw1Vy16!gvRSEkhVffoY}I6k{R4 zwCW6Hi8^t>^>6zYF4S(3V+cxk2-5Ko9v+Wqu`=!oDMmn>55b4-uGVsmxuV@z9VR~? zh;_<*rjkAKRF-2X?uzAGaawr_wKWUJF6uS0-_R9)JfE1EUgG=&cYGlO6Y6s$+P zAsWmK!kOXO5V8zkk}<~h%BD#cYSBPwRsgimf#_(*>#l!X>Yf_l{p?Z)fbYk2$1aU= zS3&4A2rr>oh3<0scX=M|U&ASgf;6r1lC3Jro}r(z7DSKirMcyDxVve&-j1X`V{27k zVszSKVgN^wJ8}~~xHpNb{kXd^Y~m_XI|sv=h)%OIprF;N;QuY*a^;3+jk=04@Y3bZ&$?4wY zg~s$`nU$QDiYc5>Jf~W;Wl}9lx()a=kRqmvMy`xWT@<|*kEz0>hs8h$C<(-MgBWsf z&n&$lz}&q#@$DtJjO`w`F^)>Iu2g4(Kv<3K#+?2|N`j1vm|~N37LH1);ZReeHc6-C zAridisR}DIE=JgqBq|g39+7mnX!@s)kqul`lq&AUc(R8$d$C5zd2348sSti)3pJJZ`Ia z?oxN@@-gHoRD~2HMUW1PMPE>~0R=4_F`0za2ek(JKE#BF8&%FmDKzMTn9D3A1&wSt zM+fKiqcYEF4FhIoPhzQ=A`Uu8ZOheD2*-jl356@Yeb4t*=Xv`tdcRdTPC%(=WeEPK&UZW zl&WAScCuZiZikK8*ZJ(bEWSI>n5`AW9bOXin2?iEShCN?LI@Y#*0C7M5~Ui#9xX=5 z7?I#<8mnZ_nAGhApY6PkxR0`2@LQjDud!5H-fKpwrx@_4@?IOe!=Fhag^JT0+0)ZT zE6-6a_~QckE$WW2Q(fBy!cxZ8V>iovu`>E<8oh+h5#8>^v@Nf3KV5W!&!V#BX{TE| z4tu4wwhS@aGI`Y6ZRJC}!kRt?+*))EO@aNso2I3TPe6mCtmyl}kD zAhfE3ayazWXSH`<3L#dL&C>c&4;B@uYC_L80~?Vdqpe;a+LLnkwIELBt^l6g9$iuw z-yn2`EfkX5Y6P+o?YP1# zdAgF?U$FUYVAV5D`hx1j?-_5CwqO#G{@9kvX30TI|HWSTuO>5?t;iC+2sjU`S7#H)!Z zEk6O`rPq&HAe@H_V^jmDbBG9^g{!ed*m#Ieijm?qxG@aqjzzNhZy}}-s)2m+ZqZ!$ zRsjhvF25xGcy8~`%K-^lyq(OvY3(xMm7n@{W(l>Av@Vm4P46AwZE1OW?5MEKLJ||W zct;kU?r?Ew`y3l?6wX}g`4zp{zg@84sL33gdjv> z_o^qQRFh#E{TGs(*~V}X%=WCO>~p=n1F>GdBKikU z+!h4jFt&hkuw3XVtDOltY;vf61TTWev=h)sQzk4% z-sIM-JW*AXyz)fGuRO*y_m&nszJp$cOb+rP@!$Aj2r|vyO-jV%NA~fIuPgnIb-x~v z_GD%%GL2`!X<~>D%LJA?h>o@utA$7L+X?U7WQ&Z%!byJcNrPnz>c~WHrpz<9HozP| zxsV#MSz&INY*GLhUR1;;u~=b>X+9!i%p#T!i=^8{(+{T76q!x;=Z6PdiY1Gj?85l! zsV8XWC(8GoDE^?%%sg1$6Zv*uCeOGwz%t;do?GlYnPx*B#fWND#=3rmr7vNH6>x#G zWRmHyCo_|0g1ocU$B)J2P?~JM6U$v~VkWjm#qf6Kh4Jg7v2^yr>pDmo9IuF1{K}Ew zb@sg90z4CI4z6@qtv?O%Po{+k9aamMUY~5@?Uv#vBHJT`Xg~Q5<9-2&BHN%EC!!-l z)UrBIW;VadiI-|#TcC}7L9KbRPv_)#wQA+@7ss{1sS7K;fWbIXlAT+SS)+!J(w}w0 zulU`GEWkmPY2wUao_}$`A4oqXOiAJytN8H2fYS*$QU8hrGMtT%Ln`HDQjy$K=!az6 zR_xH{QE8-raMZF$!lE8g?CT&>gLx~{sQh^=Rp@QQIu@EnAXI1Yg7tYKA}Vzw5dOsD z+wXZ<$2fHBwo=0ZLO6#O^c0F?G`VjcpSeivdZ^QE-@cN_o+FuxBaFX8uSDgF1Fqj9 z+(+K*v$LmKk!I%*?UHS}-yCJxyTx>EknYRm%CaPK_)JBztV}3tKq3v*uUPGN)EDaI zs4SdVW)|>Az<$8b%?3azQbP@!%>5faYBaif!bWq4GHxI6NErz?Bhmn!7a{*4D$DdT zCYigNR7A+jJ$<^OdHwrm-7ZmQD$1#Rh96GYj3x2RV0TCjC2X=0{L}G2swb>iFBfNZ@Mf+*)gA~)7ZC4?!@VWegZBfTgjH`ED%+e$F$~eTdNO`_9cUA7xg3Lw5sw zJNjv7twF5u>a0#)Zu=g0>-^3-8@ri4A6^BKouG@n0kcL~DjM{(@%WZc9;9!_sP{w9V2|s&3l?H*fgkaqEWn7Xxq5ZU_ekVS62rderNb zI9cz6t#9ufSNsUx(@q_eorNSAw%%j1XHgKu?LR;FR>AzGAb^MDgy6HDD#d4je18?j z(-!8dT!3%`v!g2R!Z>UE2lvb#|98XD`->cu{)ms^aNJWSv2Af&+fQz z;6QWl^2>*Id+{DoCAZCYF$xd4xg8XGYX+xfzJIaVcW~0R_JiUzKVEOU`n>SFr)SZp1;GSibYIBcP=bXK&y_tG*<%PtvpWhc0v{$C@RlkdDzpw(d z3eh4{8+J>QG=sZ~uVJk0Zrj1gRu{f&suON>XB|W*y6FZw(puM+9SRH0@g5SnF@lu~ zJ%gTU{|0wfQjD*HUpiuAG88mXBbu*O{sMg2QZ4PQM({}qnP+Ve_l#D(2VQ1`dsCe| zkOQhMZ4YM$V`XZ0+twaVs+Q^wCa%_8+vfVfkX{*IU@DSHT#Afjpz}%Xq{%W>jCgYN ztoWO|?A8e8O9&xrj+f~gW(M8TdO zv5AKlrWPXm=Hc_pW1OQ5tiWHk9Ehe#WY)}GDqbyZ5O9LJ0s_@^chI!tst48a zrj$0*>mN{fHvw;(FmVU649`G zb6hocLXlyvLB|Brh9V80Ct~HX*(88l+OW8}ZPbblBx7;Bh#L?{%N%<)mvMzp=Nkwk z^{r4QEj^#IN2kcAF_N`Sag7tepv~yCS}IWbsCy?|_1t*$u=YC0ajR2CH*6e!-1+7v z96LlYtYw)3m)5Q{H7FBIV@%?!KlRr%M1_St`tz>`m4~g?_4g0)H>}?g7W<}mC*%6; z@5hq%Wru{=%_>(p9NM*L|MA4tt))lo_I{XfcyET=YX5}26C$=-x_@f@QMBxz*B(y7 z_glZ-8lCLhV71Yu8jgXEpAjOFWrWGx3S`k#qybw&NWqp9lCMmmgtzpPxwSeokOra# zBeWsOgfsB=x~s<2xpyz(+zC7L6wkXNbph5JZgdSp*iME72*Ct(WK&xv|vB z%yET?Cph7?V4ZG?Pm@%lO%af=T<%19iA4^hDC6qeV?P-`a#7uYHZmHF5K`Joa~bI4RXJkx{yDggd_*o*)@N zGMj#)Y;|I(zpa*J|N1+%>ID`U^6!4MS~x>G3w7*sgwu4#%TB(`rB0r;t&)?UCGh$T z!jMp>hc%u=zpuxK`DM5>gpn)^<>J2kbFyzLrED^38W6QB|P77~;;q z%(m}dolr{B{ni~jdV)M#+hdqQ9bH{!esJLmjq6LcJ*N2$9%3v^1qME9K$GDEx~{%7 z9rdRlbnzQnI%w#dy8DH8#N|8?qZ7>-p39OFt++`oi~jp%{y(H5|KEOiPz8ykZZkd1 zSx84V!GGs3Kx=b1Xq^7VEQ#(8%dSpHgNm>UzN(dl*|TJ$Y+I%#E+Z^$+e*O zru$$5M1=_mT`e@7%uM1!f&<|TVvF$aov`IPPn8MCYWWCh$d~}5ePk{|ip*4iSVbV~ zWmd>~y#A(6j5c5(JMO*yPkiNFWVynH@>W9Vm65VB5ZPk(ZN^=x6lsXIAT3qf$6jR} ze{Sm8J`W_YQkKKucjfqwV2>gGdgZbv>UCKYaaclcqh6!8$~@*Gt{fb!dKpcm54PII z$zYft1tr3eEQD&gWgOeiks1*A%c6&mF^PWk9{*D4lPx@U1VNQkhCI_bC;Ny|R{|Kz|ufz#qU7c15qGI0HPEFU0i|Cg!)Qxe*m6+6(axJYk~i_RP_I6i{I3TZP6Ob zPzD-0Q68eRsH6CbU4+i8TVI&ePM!t?!1hKj10+Unm`xE85dB1cy%X1rT$k3hC|#U< ziHv!?3Y>O`gQV~bfPm1fk#QnO@g%W_G+QrFX6{7s)e0>@O=^M1cctBOrWLaB{=EOh zmvEFJUMRGn@Kp;1$ZgppW)U;PdY9;T^8*%PC6c?`ySqg2CsnJJ*UUm0tR0f!9`$aEkm%!=xuajDS>F_|zC6M0^iJmF+#Mo1=IanskGS_IV zo3%YGT+i_D&q!c0SRaSw|TOu=ZL8cf{QL6MSDk7x3(78?o z82bS2G$n%~-x23=qUr&E$LbUIt%}$i@)VWpHi?zA>l(VA7cr|qe*GJDdgnNqX^}q- zEOn#~gm_Fyb``2NiS7x64KU?1UAj)IEvVLNdzE;6f3eqx_-_Aw>3`xoa`#iWcK^`Z zV?(OagF8x1s-Lg=z9XWv&}gvX#|ZZ8iPk*E@jZjV!fyqgn{xYITL3FdB6~2Vv7>*EO%E=X4MZRqIi! z2>mZQc{libCZz>*w7yg)*It&mZaa|Ewj!|vZSFDLL7ji^k=gWxXzXx^VmxN%3rj<# z%GqC;Ars58e22JaIZ}2UEytIZBNW98RGIa#4copeYdY0vD4-NSjik=c8i4J@_#Siz z0^n@&WvX^aSgOz+GV`^^Pb1-x-;}cl0z+Woe^$50&r#qeb!>N7>Z=WT$ zHh+5}{#TEW?Tc#EFD#X(_o&$PZtM>&E&Y@BN=@qLjj8a>j{4>o>y9X2ebZ()GnYDk zUd)u3Uj&0+!s>n@f+Zk&SNp#$)oS0`a>f1M8UqewSG2nw^7Y*sTf(zsQa#(AjkJlF zf}s-LmF@f9tr*DqR_u`AAebAPG%MDRS6nq5y)kIdd)6o3wVYpVcd(+?@trN0RNl+$ zPkF?!9m>x}TdU-TmrD9F(|gO;rjRmOehX(#@-ulFSvm3t>v;dN?tu4~*XuPexb%Gj z5@eN(>>%pgkD8N$&~_r$7-^BvC(bjT>lP3zAllI=8YDpFYLe5I_$`Aw6LdvR;LIT3i<)7WB@^wZ((|3PomSq$WS`ni5&BM?)FPEc;#Ekw904pb z*K>0zFYAX7bo6FiKakU!OQ=;*&qf|05hansQMX1^X9w$6r+jA9!N7BGD|lNs?Inn+ooqv!KMiI&m(zX@%S8)b@@SqY~GG z15F*RTCLmXl-j1aL3qWs<*tSF)-))Wao@|Ag+HNaom`8PoACI4b2>-||L^`h{%g@v z@TZhVEXTL|qc`na!GYY%E8KpdP^6)ZHc4DLbZpx=!tO0>OU8J4-Z<SFZ_C+^hU6D#YEVsT#!9dda#x9QHs zWzX4UlS%a&;VNkjfp~pJNv>Wj$IFk%=rU6#MKES%vItCac;lS#-8341ocPp*=B#N5 zEqC{qk6xSk!Sy}1!ACgO^Ftn#bu|YC(bA@x*#ES&=w}HG?Uox(klv|+eyK!1w`se< zkbZ8V+s#p33a1$5j{p9Z3CXdj;3nWn6j>8?fo-H?XeSB1#q;Hn9E!pEp zmOhN+?#vOs58soKJRoZ6R(Gp=z;}uOTQ|+KVwf}_hk8@EZ&39>Ug(sy zX;b}*33+me9ez$)rpDj};WuU{d8R@~eAR%ePa+}=8gjqlgcHB66d^J_pp=5Gr9n0&?85Xv>*SO<5BP!{VTEW0|8GvqFgwa2b4 z*ZE_}>OA0OCije!j`ITnFYP^myQ%BCuWFUc09Z3KDZUx*C)$+drFjp;tAJ;tK1fY4 zYldOVHvr^~>mi5S!PMIfCSF*E5zIC2fOB?-@y=r-o?+DUHhZ;N%#<|o%!U28CT9|a z{o@40bhJFJ?08zb-}}DDY5Ub}U#4ba<)>aL|hXm{dUYqM@+;JlvW=x zu4s!s*9(_uz-gIw_`y96FWU(m0nspdZy@uH#bl3pzur~ z1?Cvnjt%Guz1`{q1A9V4TPbr|gTq5rPQB6T@);(^0vn6Q@}oz&Cq&toxHaXMyh|F( z@%sB=(ptJXVe7^=tBu8Y{k?uv51z*7h{J?(N5_a=z0sE?CQinp@Mx?Y1`V;>Tj#Wf zJma=5%OO_5jOUl>(NzwAEe=h1S-k*;4#C>dX&Eqq-PmcSC~StY@g(UCn0FpF!z2hM zv->Tm3*AjPL`4`pIKkC$h2v>yY?_e3A}EEKL~8}Ypf30NHAVZzVB?#PYJ)GmUwR+E z(7tws4$;A^+BsLcK!3tLJs~=RTSK`;T}tH=-+*p~xY$VC`kU*bMS8Ypi23~08O77g zkGjlEv1{z83QINWp_;ity00 zL__^UH`6=vANA@z2ns#ckTqv+-3POut7T_qedu5CkNXdgclY(?EWMX@;OoSpuTKLv z{4!u`^m%c^iGOO2hi1*^>a&Y~opsO_@;_flvR5SRi+)bab}!PNFn8M-XOk3X^|4oG zK{L$QHgS3T^kIdTin@Gkz`KKFE!k3ZJXaZb$DGs!=pK{aYJdkD$~uFrEG6L16^!GU z%_a0odu5>HJ-5)?+Z%hOzsE`6V>YQNe=2pxTW=5X;Mr#3KmO|5k&vMW=pLcZ%5tUO z*!;3$7eiex@ayu7qkzjoBda2;q&+X-`H1nbOX;XoJsL5tSK2ma4OSdA06Q-iS{I*| z*UUZnHx=18;j%)lj|wgaSW3LL0!a6Rw)bWZq0bWbGa6wD{@=6p>c>0X@U8l9w&eX2 zaPyenKNU$f_4cF^GkZ-cx&(}QDPwH7Yt zZd=?W$Y0?jcMJ?5-`!678u-NLZpiFYl>BPNwdLYP(H&J`)gBMOw$amC(b*_XX!}W2 znTQ5J98T1em3-*~G3`SZM8;rHpCcDWt* z=jAl^4>A|^_umicw^jYKT25VhBCfSpRw|S!Xf!8XxGw3KRZ}|Vx2kX@!u4N>ueoSq zC}NNiSaYD}YMJRstxX4kYG^2(&?-}eyG&6j7U{bEJtO>AVZI_fT$sLak**LQR)*Si zz#FB=_R(~OX|HH5POgKPU*xvPZ3hk%Z(ENxYL>U(XZ#WRyyM2l@aF||f?drC3C*p0 zuL|CXFh5N-HZrYfmfrz#(JUCF24l-+uSa=|IRlw85iv=`_e&GMqtUWRtR`kszKBRj z^I?c(XzAe1sF8*!=ea)TB+eZZj-z$_-uf5XQGe|Qnfd{ ze(jy%G&=X|(~)aOEIRg#|5F(JMJ+L3r)`ZH`9*WA+a^TxhW4` z-rvz%@>kf}1zBIi1*8{!OuH%@MVkSs&qf}HKF*Gx*zna)^HEe)_IS@R>7L`|iU4W5 z?(yvCJ#ZH_7bchsi?W5RRU$4$fMgJ5RYv4C_-7-_Fp{OqShX-%RVZE|44pwCe<70v z%%ZLr+M+vqj_*2{fBSflW%Qn16Rlj|wlOVN@7cvXco6PdCOk$H@%A~_6*Be6*3Efl z?qRctwAO;Qwp>73x3#z32u^6rXom&JG*_Y-G`$Et30#DH{8$fZ8EMuk z;q73)1=WTpEqf%aVlY#O9p|LJu9`ctQ8)Ff+nnoRx2{WXPMGa4DV#9V*HRN+xc5q( z!`zsX39~nZ?w=6bur8YxI%9`}wK!#6+o#W$9qc=dygiLG13PM!8$T?2=+wQ(t#j(m zwzpICJr^#&6MtZy)rl8}M;_;y|498|IN#KJI#A1{nQ?^{)}thzaB%BL0Ur#z@$duc_JiB@Pr@K?DA7a5%$uL>7XW=np- zg-c3tKtUGd4+4sc^6IiO-xq00t~ajEJvfz6S11dxR9{Tkb3w{VP|hw=2F0+qWnPJk zc4yN%h;HiKEAg<@q@oFD*{Wy)dbXNMBNy=uB^HI$zN`T5v&MwHIy}BxgyUZ6A=?X< zTAW;ENo4QzTY6<+*Imtd{pcwI=8eY8skxZy*%ymlIi!si%+ADTUji!h+InN)P6sLV ziv_cJgv(b3gyNTXEYyl*VIn&~zUwx@SS(D`QLdF&ERn^ml38qSq-L!p?CRsG8_lf^ z*%k__n%yX3_ZW-0kJhp+#BD9f0qdxIr0o?dQaxg<^&kaIfd|n^vmWt1NEW-)3KSiC z*kYGl0jrKZln}6%VUd*iC;q}H`jlztL1k)eV4J67J78kB9WuGkjHt{sn{}VJvKE`B zzA}3G4lWU0WG&<#3YlRXoiJg&Q`VWRstL&lbwPG^#<1MUNODm3wAE6dfx(u}-(!D$ zhRWLB(NGiXCqbS6#6}dgNBnbk#`LDY!hT=(%iQ}rtnQcGJHGd|?(Qi~(-y}qvA=Ut z(owT0^_F(Qa>oM(Hur+>&l1x1j!x0ucQ{!(dR1Gy;+043`V;SoXAbX=&(7{XqrHA^ ze6A$qQlCZE1Of~!Oh;)b4ap#D_!1<1aoAa;Y`ieXWg}Ha5pC9MVRR=5KM59EUy+E| zM(+CEpM=@S!W@c+a$~~Gfts*%3VHnTtLZwq4gy{%=z8$@hB;;H7!tQ{`fq;2ZwE}; z2&@(#H8mx1hSP|}ZlczG<5R*Kj0y?NW5(C7?;6KFWvSL}hcWi)vV~-dFjOo`f?h$d z9H>}ft5UmMuY?Bj9rzVaY(G=FlI>@VX(%+MJbfO6#->pZ@ywITR%r;;dOBvpAfZ*h zOiGkj)2Jq%=4ctFLT3qGHP$;|MPnZRfSgq(^(Pr&nrafw5FX=(!XsEwfP>Huc1Is! zID}@xFB+PiuYX%W#;l9j5t?`WxIa%{k(mf6Q)*=W~lk&n#Z1S=2R6 zX&ZVr^laCswr}j@?96kfG=7?Z-;=b*gdJv@smjT02u$no3H(1_pBThACUTh97hNJL4t7hbirR%Pg%_8Z& zn-zEJq#5l&785yo@fJ+8S(Da$IezA5l{3fWFwD4s4svZ&bjxvxs?{CeM`ol$khORnGi`_U#@0V(``t`FCf?+Lf?%k1hFwcvd&wPHZ@O0F0R zND^-CF)3>;GH)&W%}ZX}52$ch1V(s%_N}4X&x=1xO!i1#)|rHbJB~!GkJR>Lq(9KS zPfdR~v@B8h*);PEJG61VDR@K2bfQuGJtkKo_(M64|IJVSZ!*nVfnj9!$z8?It?TxN zl@*=HxPHyO-J-qxLeuf*hXQZC?c}(X#!o0d6_ix;#xSy}cGUbHRgG@!Af7-kCr}e8 znb9?lRa0LblGe`-l;0AV6Y`Q1$9o_8W$)+v^xo{_a1-|u(qWWAJmmoih_P(N@L2W& zauJZSII#Q3a`3n?NOHhy>LT~_4C{4<^J%_)e!PNwpISw4EKyCamG_Bx#fXgIs+rI&P2n!5 z9l#foxhv!cN`zS{{N->rE1AEXYN}2j$Vz53t)Lw!Hyov|a3#+j5?*Gz5)O29xeX_C zmy=vQ{L=danyIGHVdeWNXucX-Ru+TrN6SYwR5s5a?vaLs3OLc6&gMxCbrdv$q0R`` znM(IdyCz(M|H04(u0WmEB}zEk!$yTsYz|!JjlLcC-}wHo7jguIRdUpe%P}xh)F_G+ z+4Ag6NL(7#?Phs#Zyb93<=z*|n@2Cy%&Iwfyh`_$`^}=yjbXdFiee67RiMGP=**N@pm9*>Ar|f6Uu7-|fjJmFlWpv8rwUw9S@^C-LotAR+Ys zZ{L*tpZs&jvf#RVr|xz(;H#+kU_oGl%>p-WfK{X1M79Rx9ZKNx%TIw>gIc!|*LHw9 z0m%8*K-|Z0HHP@XW)0{y{D96+9FuKGp>meR-yoEDuszt5*pnEONgj#ZHS&WV;7Tw% z8tNV-&rri}cZ)dwhB726>&#HVQ^w;a?i!6fEgADS0I?caNJwI<+sk-@@J5;3zlRcv4r05%r5el);U0WM>HH z)6W_vT@=~VkCMCReernanW>oKA+R_UJiDZ%_`-!~-i=+kf7-&$bKBGQI0~gC`^oN4 z{im9qT)y!~GtNS}y!&J0lM=wGJSk~91xwkZ9n9We`kub%%@}N1e;z<1yDoh=)pX{A z3OFM;aO8Q!bIEs)K0u0`|E*;Ew)0bP6bTMr`3Rf5TDQ(=9Y9sakRxNTmL*9ulV@&W zKW9H>KV`g7Od)TTe|pNs>#x@R9IYSXn=)?cR2G>P^ynFNEsiKAP4V|m^Df)|jl2~F zgAo>A z?Z+4sc_8mo&p7bS0YSI`1>4sf6#%lZP~NYAM(S<;%`YGbPSCS8ZOQ}bp7l65GT#X zj-Rb?H|ybs(|^6`_iNQ2U>)-QNit=f{hB#%yUdoxyHF`iv((VE0>oO@n9KFuKQSAP z=h|e-bIP6;>+_D+=tdVSb=J#;D~m4btWQTj7G<(OD?^_a8By1#dD;{iDQ4%Yii>O% zeiEWZYqI~lN!8OWNfE*@mdqM~3FxtOe3V~4;1wrI(&clFOftcVJzo?riU(J=pwA)0 zA%jf-9}jMfmJZiU`IcJ2)Hahi)%`vvA7&KnbCvOmazq0 zDw#^5r7Qi1gbo3`K+gbMSdC*!gVPj^BV3rf%}Lk5DN2PSzrvNh-bB+A?Z|IjI_0H+ zf5?Ru_|h5F+O)Eqm(KKbb{L19bg^f%s)h29ek%Q8`hyW+S4)e&X%rVtv@%_8x+KG# zn{-jVxo5NHea~hEJYl-jrCIY{fsVLb<|dt5>{eOkyX|qo#N}Jh45ya9TFBL3RdS(k zcK=q}xX7r7fA6T4`X5>B7CmW$Tgk1>ZgHEo*V=cc?=!xfdg!+wTi=(?Hq^PLA2xI0 z&i6Jwy$fwBdbcf@-p^>ZPkdUk`M1%-X9|_E<(=#n54S-qH^%Q!d~$9~$jy}XQBt>> zl^fnAp8X!5YPJ6Jk!UwwTr@xNtV`nW)HNq~QekjhLN01;9POzWBhEv4dXikJf1y!P zz-U}YS*V3MFmGW_J5X3@#uAsP1=tK4OOq}j3n-I%f_XFaN;(}D1{Z14ooG^jlPY0( zu)_?RF|Lor7S4me>GV;lKZj_JR!ZrFUXJbgF4G&&KK+y^)PI|EK^hrGoSlhVPMJLgQILXG}jZwBiiCUIjW9085 zMtC-->xGt6Til_t{bH0?EGCl7Rq%nB@FvOoyR_8 z=40bymK57#28MR{*WE!CgCN$7a6tZ;hV4?W8xzA?s=SF)7@pA4^>JxjwiW{0xGbq< z29th@AzX%D9Jtv1$cvjGZDGnE55x|+kx{pypdF0n9y5hK_JIZ<4dE^=bw2DC^zZ)e zLqY!TST9xLriRzqZ{`V2i89e9If&$7S*na@UuKtc8O@q0nkicNNGpfj{HF{Br4G5J zlJ;S}v=?|&GXS4K3m;*~ZP5Tv(q=bfg5k~B-YK62v^l~?iKZupn;Se*?EblcnD21c z{*`Zfu3(N^WIoN5#Ffc$5OBJtk3;9|*AoiJWFDJ_c=}rsS+cOj8poq?YbHB5)B`LK z_{5S6tB{XQ7naEm09rZk+u$YnpC$Hd2m(0@Sd3+|Pee)GhP$9A0O~R}7vLCkKFHCz zK>NUf9|FpSRpJl-;%n0J`dj7r4*Y`~nR(3blv!5D%+M9pt6<&CG5N0*j4}=>{i-*@=UH@H^jMO%((>~N zO>D$M1uijL8-LFLWl|m+lylEPLwA|7dkFlYo}BI|@RDV6x+CRlk#QKQkt1#ZZ7gA> z9Q<_>kW5$u#%7Lzf8_om%j z2mXizYmm9UBDGO~LX$2#2`&oR1A-|)5GGVkeh7S!j{kv@glq;O>tU$1Q42Kv@bxar zcVMmp&1a~f0cabb_60IhYb6d;_GF`z0lu?u1>%SShXZz83EU1`MHslIumqG6vsIE6 z;F!P{fQtZt9u}aBfYkw%3*cHT1XTw31IPReJ%VW3DELG3EEHcqAQN|%0m;dOK3ATI zc#mm35q)s9<~xIq7d)WXIAs#~YGgd|9teB~kk|NQS7kb$$aZ)N48Hl!h>olto(I

Xxpk zdo%p{#s_J-<@aOF$KA(kM~}62-uN+j(V}>-u%gj3TdL3BiMe;~e(JlnU*^6$&=T`X zlrgP9T!Gcq_mvDrO7lAxfSUv*&viH3XKLzBLMF;sFclwkQa}gtqHt9z76ue;0p0Ez zx3K}22#PD}Ht7ZWt5h{nhsg8_mH7B7`YduJI0B-?{82!Sv>p?<0D-#|P@W(ySpoKu zET|)zUJxWCnw%y}01E=O?t5YtX0mY!3um5)q&QWE{skO@5b#(%VB)-f3{`3G9UM5Z zSbiEvD5NL=R3tgVWk_(y789|DpoIqT=H5F%(70X?_#*M5F4y*l&UpQOBG~({f6#(- z4E&NhVqYjAvtUCxI3jVXR%yo=?+Tbi0pN57{x%Rofozdjw&HuD`0OauwE<^&JS7*( zLl-Duxlk#=8+i>}k@=vFL`X6T#5oiK&x-=H$qF~UIjwbs6CSVm=W zyd(tK1X&@1!g$l?!!gxZ5h5jg)xnC0(w>+MR?&<%n2EA>!f(^zMkYHQNmoflxbX5$ zI)y|D;VUJt6hR3)(`9m*Iawm4@}k*92O+KODy`g@>zB*-%M|q((Nxil=?vZ~rC3A~ z=TM<#o)xH;nX5AcF@TJEpJcMAf=rnEiWDX`wQprJcCyr6KCD7bf1{f`IhSB zW!CjB`kE-mwC}PP_rn&68{9-}5+$fkrATnqV1%;>ZBUszO)N_Ake1y>C&l0ichF6m zJ1r2^oe^+M3Jd8e9pk_>g5N+_6_jzOGjLrUc;E2eYFuST)VsBnm1&xm<6}-r5KV%Z zvaoo~@yJydy$W#cxQF&pq%5|{^3i#=91nAL1gci)Zt~7TT%-i_ih-yrdaM z8vM$;^2O3ibamrCZ=uP)x-BBkQF|W|333pZ4x8P+jUEo@(qppS?<7h4J>~f1OPCYn zm^X%g#|h#ab##PDF;r^s464q;dev?xxbi1}Il&BDTfEi|KxOBOQG+7?-+E9hyuznI zoiq?2>N}TMp$g${gE?-;>s{4w=&b<}z6G4iyqn0!l?VUZ^h2lQ$WtYtacN zIK72tqd@lKE{!qH>Fv&Rboy5n2}x1kMdrc?_ggZ7PP-No5rh z`Vei=3|z&R4h>QS3DMI;5V5(0K`fXjK=4Kck?6vEqXjBhQmQqJuVA5Uf|5@bqila$ z26~07V8v)h+5Qe(LQMcol|v;MDQH5{ED=?h$slBM{bYd}0X)Z*mjzNih4N!eVHQo5 zjkgFyTc*D~l_jN>VSMsD2D!HH-98Um%2c29)oV0|FnjH=ZvxY%_EyBm;MIoYkGf04 zzu(r5D(kCKtz7fIZz{LhyE$}OCf2a44!*g@6mV?;adBx=Qd3mi|WB1plHTLmdUZ_pg7DO@*u6Cl?1o@Q- zau(3hX%2f(nAmHc%(dcLMr&H-NsV4Hc8<|iF#BEJLh{F2WegSr31-c~235f)1IZ+Fhwr(CFv8skA#F0m?C9wz7b)Tu$RtQDY5iSLnk(!Y5|=t?Lkm&esF;&BiGF@hkWlG4#49*j>Q8wmY` zegXs~N{~}`u<2+cs^~>IFfecw1S8%k`c>bIj!kkR2(0tyl`EWLxlJ$<}D zsd|=F(W@Z1l(kSyprCA%*wLm(=*kH2cLH8?c-x=|r52YY^HnjRKZ zVui_}<*ZXTO#8lH8c}{V`tHwgksaa%g?ca2p3k`=A2(gNb=J}PT=wMkiVNPqCEpg= z#f5GNy;PwW8h9^TU$}MQ$fsq~X4hPBup1wK=FrDITPzI*N?yDT-KNxgH(#5(a(HIe zZlm$lr(pt0Bp`L}6827Y(Wpp-txl0s)@#nztBeqDcNI}(q^BpW+sEZHT!O_aPw6%p zoHQ}WNms>mghq8@!=&^fPZ>~$jSwTUAx5RaFMMCre#%ZJHV1P;RCraXf-kEiuRw-+ zp|S{_LMS|A-xt(F9(?J%5*b{PM+{pk*K(lpIBinnRgSW@&~geM-y?z}2p-=&%Z`r^ z!ZZ1@r2^twPYJp1w)8HUh*_bHl75W*6t+_7!o&tUcmouagE*0EjmX6FK5Ft)GQ0@` zcvT`?!hm{te$KNm&_Iw+y>KFRvVx(FVIT}sAK0F-B!fmlP2{w)Xa;hHFUFXDN~ZX( zQo<#%CJ%|ArgdN8tMk~>u>i|uaB7yxmYOP9nG7+;NDfd!Br^j^!F<9oJ}kgxajHLnG*d$%7m^CN zEcHI+O40!u2;u}bgaadHJJ?M&ypDl5tyYN!L5jtmx(;w z>qcyRHIvMyvr9+JtSzWZ%~D6%r5ZPOe(o3OR57rrGWJN{iYBH@{KR;HAo^|4{z9o= zJSYBWpnM~T6H*Y;N90=)!Q@C5I{Ow9a|p|`vnOS@R)Ix`7q&{Tbp%vVU{sWYeC>Cj z|G7{b^#D!zU>kIlfeE_dF5m(Irltib@FC{46oW3 z3%&?=BS<3%5P($(J$2~d^_zAFLc%R)P82#B^h%WUyDR_gY6I-o3P7OZGg|1L0W)&N zFwjNut~iJl;4&X$$^c|0N4bfRm83j?Tvgh8DpxoF3CXng8C;hpA@*pJOM`GA(L(cH ztx8(9j;hwuYfUA>+a~O!U6d9$*wSjU(^~=-z}4G2({#<1@W6oOruzJsu9;<3C++#s zi-uIb>A#i?t*S^~S$^b{wy!R=rnIDd_q!?D2YWu>p3y&HYHRSX_m()uG5zN(`TB5l z|G2B^U3H>a4;ubFy0Ch|ZPmqtnYE=))tfZ&j{4e>^3-h$_<3`DK zDH1lIZ6u=!46%a5E$tMDoahx7&h%9ljGT(w%{fGmZoT=9_z1epL`egrpeL*eQ&s$7 z!koKsfB0m9s?t1UpGzb#V}Xg<9ft)?sB~*>e^k6JXjxWd(!>}Si3;)?VD+S!MfX;c zT%80|3B?6xD5DETBd`t*PUDmqpO+s1I_-*%SrJzl7d#fm>Pg0~A62+UL;JkZ+>7(& zZ-h*n>C>7>p9=}AxN2ul5T9K@FRYSPg61onu5|{N;GqVYKNg2Gn=45I=xW2O0)7*nvMe^7Xw)qShqA&(3J&NYXKJwIhrY2Ty|x53%IO_ zV{phS;89X=l;mRA)4mwIe8hoItXT_NEr$f!__t>v>q9dJR_TIzNUx3W=R}2r8Bknb z-x}BD3dL(5p0^nftFIXSmtj2iN`N(me*3Y<=EYg}sDI^qMgF|(K3v@Ck(hpO_(w^) zLtpA?%a;?n-)r}nwRXDCy`FAdWtKvHC`{!W&%EtGIXT{zTo_l;EIQD>8cka`>l16` zhOLR(KU&X6Z9Fo^sz`#3c%Ra`kHprt$Ilqv+gT;#n(e<9A9~@ZbUwfC`PJ=1i{dK} zK3JcbWHMFeaH5)e<~SC3ura9}laeCziHR_P9zKYFlTSn08tf#8BXFVM@NV>9Y+dY`HJ2}3ZBHE z;`O&s;DyK6%+V}Gmw|Az8yZygi5wWT&ylD0;x@?qi5}ty5DEZuVkZebKxl=K;SoSp z&1DOK#)@Ns!RdGmXw1001c1CHz#Bb+fA@Ru0>!(A13Xa>zVUEis1~+t*4~W)0(jR% zV#nNmlehH%g{9!IT3l>ass%q}TqS^i$Dq=XVmXEzAb|)zaTOHr%b-gc#ks>!M8>ev zIA0nrxc=)%0}8mdutW(?r~YG1dZn`?ZPyud-?wup`C6iHiFH=dhJs3YWvArIolZuiKow09hRED? zSIIJX0gzdyi0sFxn%12YvqYKJ+1BuNn!hhBI+f(8g*t5w>|V}zl7y;iz!vR_!*ZZ1 z9R~-Sak5~I>*yLmgbLWs^3zaQ0TM2D!~$iX3-{9?RQpfGJ|enEt~|tZYj8D+VCqx{ z!ajXSTjTL9cBYb{f8@E`vVj8qUkB_3lz((9A#v-4=a^J#!27PAeZHt>d{5BQ~J;IVi8xrM#vd8gKA!QCRr|?iG z1#7M179m!?0LZP<96Go^TW%Z}KnJ>Y_gtVB-wx%sZ#e3=h#YQzTm|%RXKVQWqGEo( z5hpuo-XX7@tQzp4q-Q^bT&KV0ZGFTAAVK4hcWT2K79U$9&Ka()JAGkQ&O98Y9JmIL zZx21e%3&O9q*a%DBtRXFl)!uRsrKxM=miKzKnasBUI z4uJ5+n{lDZIGSBTmV5+#;RuZ%ZsseOq5L9*jrOD&v*HJ;rGuZsYjUSgD z1+Md(ZTZsn#=-02P2p~KhF2$=$hv-&SQ9D_UcVH5+sR?s`1ZmV?WVEyVwG)0{hL3E zOMMc*$q!gi?OLqc+o>1@WhYhK*x}6Mv~HRF(`5I1z2hcfVL`VgPJ_QILmhrMU#N^A z)xn3u{(^7)507~d!Gh_df8~*2DLv*7>@QMNLvLk>%vshOT(?2J5nak?fb%huE6IiA z0;NILfpug*k{^I^aM&XRkFV0XjW&d=F`sVk-2nM#GFF-W2(CGSCwCqI=U)g+;V`>j z&#e=w%n6PFp(r#0&O8F?h8yoB<7_=fYBGNLcY`3{jrb*dsywtGtz&!9?c~KZ6oJe%p|9Ka!>XI{5|o)-0ua9zc&?}u3Sg-ABx@l6)|1@YY|{IB2YDRQcXY(yqe?vhEHTQ)&L1iDFf zq>ML#zj2Y_w6~4A|Gd5UHjQu*dc_nCW{1G<`|5}Bq>TRQ5pSUfbUfBD0G}S1n6`TRSZ@NJV2#c!OBPr zP0r#gA)Q4Oa1{t-xiPMMr*anbeN9O$NL9tCWdStkAEOx4muksR7SWnu^~pVkY?xCF zX%2!65+1OPOho0ikR$=H54DmZ&SAI50?�{d-h~+9o%GJ7k~@z> z9I(z{I+eP=kSZma7C{F?ualv6U|>-JdTy<_{#uU49S7d?(z;dR4mvpNi5w__ptE+P z6dqzU>8-dS-=s$ry0!ja#JverQ|H${dNLA{00~0~gBpSZLVyGiQK^I(5j6~ApeQE@ zh)5ODDp-|-fHEp5U=Wl+WfC=@wrWw52#A1+Q>)e?sNhgr5p4?=$=!#3zpnqf_gnY7 z|F!N~e+XyJIU)N!@7eF(&wd{05%bEOAlL2`8Pf-us5X@k_&YZ1my%R{(3r#S;*=SE zPATH5CSd?4;c1E(qI1SzIY!%41VE2+H1qM-5d1UCr!6O4N>XKEn(GAMq7ZoMY- zyaus~ttw&gHIM}D3M~SONfHfzMGSP(KCQl(dMJ-t4rO4}E-L7cicYby-Fx;G4as~1 zOdYVwtbiuB9S0k+ShMN2uYOn?*a(!=0Z^s|QlUI5yKcd;V=PigQ@9<-o;lbhtYuL% z{sg5svumKVbfmu}u5C{wDBDH(?zetnws>f{z_*C4T-DzcK9}9nROT?0h1vJ5`}aiP z=MVpvA2=uLXacpu2h%^>x5~Pup{(p9D5*7p44$aB%;m&#w=H(z6TP5Ywo?g0PD+qh z$}^7h%g8ep#{g4s3Fzq*YF0>SpOS47vz-$pYn;1qe^_fcuwlO6b!yX1XTW(DE8C6?rj;Eb%{W|HFbz34x z^nC&R+Q-U3j3s+8+&n9|YnVbTE9t}esI65Hkb8sTf$qWMFdaC%l}zFMvaAqeP#0u# z)}X~gj{vE^Nobh`IU84tSJvi$T9$1Y@LYoq7F$#(wgrOh_&5P-ClO(qS*BDLK1UKx zC570V@*5+7LEH9lja__d?Ga5*PGIH6-SJ71Bd>;kskW>QTbCpeN3`q%rfiYDBur2O z0&rf6h(e&-&Y^Tk;YueYVJaZo=GR)o$EKDP?rY*VS^pvlwx`5`ATdDB7~+6AWkmfu9Gw+M)!`w>2Oq=v}IMBZR5O zjdEAxhS7DqRLdPSZt611ohdqQZ7*>HEH;4FpmX5= znL>Z06IY9LfoKDT$>`?MdZgCu^Q+C3}RCTA=t;RqfTfDj8vyUg+>M&)CPtF&& z)9K!f#?%ip>vLA<6ff`oEncx{dH;;>-~2jV5#{y^uYXx#=|g(%&lhrCt)ds7bNl+2 zKmp~-r0cnDxf>t+{62I4<-e|6`xDH)x3o&@O^K?174zxYb1vIIeZ?KZSE_5vbQ2zy z?ZNLSZqBXq8cTI6T6b^XhGmX z%G^vQVz!dvFyMx!4!W^ zvD0bt1hV14_w3^aY&>IWm*^q>g?$;F;RKZ}y>+9l87Q>gB~#kR?3u=5B73F@h-fM} zj7SO(AHs0v;pq&0ICsoU$Fi&axn7QTu9dm1p0tK07FP&KA6X?N;^`5}$xS77($)uBDslKHKR-)}9X^9v^PGdF!YH}b_|TDWp_@72NQ zb&Rsb*5UFuu@~+=8B=^({1EZY_=p!~sSm{UY+JYfk>vKw$a(A?6n02P#mrrY7pGLZ zd|Nq&L$ zW!bzfR4n&aG@Ixf%yufMCm9F$6a{Rc?U@716d=<))%+x~T)U=tXU`%akw8wTah>GUW@%@QQO5>XH>uP1> zB^9%mw9Lt2+qE|yTeMqrs4F28+(G#rM#ML;QS?Y6mYvGpPJppAaeJnG zK2$IVLZJGIe7@}W$1vE=2GlVOuAj-?KO3C(z!mb%y&S?I4wc)9Soy=zjtaZQk`KkA zeQ1p+Z2J0d$FIt}1Lhy5FOJV>t=W@!XQ`t7s5suzZD$-qzp^+WIyTz^ucs!9vbuR6 z#AjLQw~kVr;%;I6bC^TDKG|l=vfbvM25xjp*E{Y-5O@)bK8TfB!**iPc)n+TZTM3& z(*~-0eoteAeQEZtg!8 z?f>AClmG3ltDnF6klA%R%_(l|vgWpV&e$cVg27~3PzNn$jd|DWXn*zA)djUl=A_@9 z3bH;qa)+Jbw9(HeooKZih#`)+^)%51=aKWII;XA=X--F*CzJ*)UtY%QzF(6$(YIe^ zU|A}wz(*bHTODLFw^GC%vt6-w)7Ozt=J}i@OeTxF9yWct@ax1Azct_XpPjkgdCBg( zueJ~idyUWkz|E$52l@S4-GF}=SwD2uaKL6%)kzI@9p`CYF!=4=xU{>noZ$o)i@B5Q zr59TY8(9$StmkP1B-NU(2G^0!>%PV+F~qoObVQ?3iO=OJqf`0kG-@m_fSiDl>dkoyy&#HrWjSsH;i zh55rqs>i9ID?R%<1UvBqbEhPoA)*OV&_E{N!-`II>4ZSkLu?39f>-qwEE`II10nL| z1^%$NH5au~Gnj)jz{XmuK`YgK8)kND(MZvlpTwH$K^_Yn$)yw*We1T*kS+LUfCAe} zW3y^-?`UEc0K3|e1&9%fd(9I50Qj#>fca7Z*3S`_2%t?NK^r~7Lvj5y9BW{0X7<;( zo7ICBYKuv_9n*nwi*i$^amuLc4LjjJ;LwaYeoHRaYw;uIABJ#-CE zU>WH365o#`PGHP!+3R#8@ffHdU?R`RE7S+!m?MK01F$6%i-p}6)^IMi&9aP9cbt$F zEG5sXcV!9fMeK9uhRaJyAg)u%jcW6foj|g>aL{}-r2?9uD zadg}rh-5wmTVbIJ{2_?#l|CRiLIALGD!(7+6oBP_m+iAyRLe27yCnr&ATLtf$09gG zJP}4Uv>@`D=v13jcjG&itl2SbBr6t@fDLaF9`AOJNmLuuKTRYHLIm8IZa73ap9bKtOXTHuQ@EA#?Tm zoe;1lYe>+#5>$O&5GB)!!7eC3^rU)V87V+7kv4$H;*tQ2sg0Ix7;(hK(=vhUFVzvC zo~KDi;TVAQam3grWCXKujU#|K8v*@?0Ep-hm(ys3eQXRv|{al!9&+# zEuf85MdL)tlHr{Mhv3>uTg$n%!2v%;XjVKqU$J9xwEVKwv(CrQY<^u;OK}}&JTTwt zlA%FH;HKV=&-v9&X7_Hn9ozk*sQkx!+f8&gpDkN9|Mv3Go(Cn}-|unw;d0QgE0%ly zVftC>9=r|ajrGPWlUq5}pEUkT6 z0tC%+?xLqQVSy+>?_x6WE`a}pku5YPEYzxAZxRpVXOL2ZY$M#&SY{FQ*g;E8r`<4d z4wI0?cG#2P$Z&`vSL&Y~Y9M?K;m0Kqd)5biNk|}hVFAn~t&ot2o%(Bqw}HT1kU0=M zubCSBLcIrs$Y9`%fjd8Ay}>_u$^?AJ1a|pZ^>7JjXyfalA2V6^1)}CxWV^|&5b1(i zg1I1P_Xz^3a0^s}JZQaT`^x+te-GC&*FiIlmpYf+G&p^5FQDt)_|HJkQlD~2s~HQ7 zRqvZ*?xt_2Tq3`cm}i0>B~ZOgSm+4w1fH;X5Qt56@`d!@neWM|t~$AwPNzNsx|Sor zX?}$%bXzgzk9WvkIjVYdvX@34;9ie_>pl2Nt72y_$52|?Yan7puHvFsk7O za0(5NO<>w=*tlRkb)w_oq~x!0{l52_AMSfq48IlrHeWZO)9%;#e74Z(BFB!O%=~gN z!C)kic&g-6*kt`LKj>;s1(}{A?KY;IA9H(d{}E>`xO-N-$YIff#rG|ElyakG@u@Au zYk>?ZSVchr@gr5BX$VLi*os8N$jMvnkllIG|(MLNEXz z_`HW<=T3dFQMrfYO*Uy4aGc>)A=7w7C4mCKJpF8vl^B821cUg0AFl^Y38 z3?c?Z6#)spI)pmF8W>ui?n z;PL>#hlq{JvXw@oD$kbbC{7$aK8dv~X$I^YM;D9XI*~JUAE8B!s=WGb9AKYB$Cpn4 zKv@USUn$#HHyO$N)b!oAFQc8#ujRei);eGV@7*J! zL{_!rp;?7v=zua!sYVCorlYusB>|>E?EFG`-F$>^J%8GKAs}?#NWuOX&S`H5LT6Cb z4ul&&FT1uB;=+6(-y7gF&$;=vcXFL?}CPe2T)&zcov#1Ldi&{!A<~GvE@c&21@td zn=M*Po}pE7s+PwFuG3|iSX}t#NxFqj@5qo|=dRwu$&QWRtnbtw z?Xi7U{n;fk^8;VvxMSmuGmonOus1pTWzq0g&QFdnJ*jK%3=XC*{%Lh)nbp_pj)?+| z`^*dqJmPnhGzx@gJUdhm3<}Sdiu*wATiQo`P*%=^*z#SRm_;jB^sxki1^9a}mAsa) zBBD8uCh)l(wVb(nd1&bJU06BMiFya{JC4sQhhXi3^{Nk?P!NS4y^A}IcYbXu24oFL zJcBpY)d1?F=YAGbluW?cU=Le38aY`Gd+Hs;r{c~RgkZb%G5Z!fTq1*g9gj{od19x$ zuqi8VJ@WJHYvt7w1`-iE@0wDZu09q`1Ber-zrQS}of0c!w`J+F}!jdFp& zFYWGU(ijtfILdj?h8i55LAiAS2z9!fK*TsWdJ{a>4rs6kHwkk5tYT0Joz;Lvi6M)V z&O**>qedLDQlc?%d;6hA1*=@ajj^QXu)-vq$`Q(t&}b-n8R3Bgd=K`i5H0HhOG;2- zPLwbT+b@Rw$oC{A@+av63!G-Dp7aOVda0DIB}iR!PJKhUyqauHpj09BHG9d<-WrF2 z32v(1plM;&TsASxMKcFc-Nbq5$qjLn^fmI4xKB1_uXnO98(dCk(~YdXe-t)Cmjc`2 z0SbitJ7&Oxm3!GpH)!_SlYLna2D5NLc9<*WxI}L3;>ZVA$%Vvs_r-tjYc9YmEXAz~ z8tz14OiWjQaF;6Ux)KwuggP;jw62@yl;0&uWSPBnZzAH!2p&}C0py8|Fa$7?spKRY z-<^G}?D21zfMg(?qov)2EESAk{jw zeX3ps*85gyw~d3_1aAQh2{t+cw+cQ2OYt6(4uB@9aEAd_^(3ZG+?xXaJ$#g#Hw@d?qDvo?Gt)vGME+@X5Ncw%3hhzWJUN-lgo^3`is zr=5ocPc&$_r?oayt@gsfj zxSKkLoQg-R?-JkPma%b=yI}!xZi5qetQ1pd4dA{;hn=5FsyYLLLj(XifM+v^u2e`rt$R2wQIQK2py-ht9y!mZveoqS)TcM^lP+E>9 z&wqXTSn!s~279*#ueWab&F%+VyyW&yO?TcjH!o|l;vBf0(pH|Zytbv_)x1yWjq2Mr zGOu6nocMm_*S06OoUdrJC#(v{IebNb)b?xrbw@W?mPdU!yF0<~`i{>2pM|PdWzA;~ z-dJ()uL~}77u-8qlkx=rRgd|q!lKspA1dbFtDJn|{_DhV_B^T}RHnDzu{3f~sB8&c zFbHxT9&;BQ+!!yX%`!9OjlDR96#IX>AiM-U;(lO>Sz(}a|6Qwl{zqn}eRIg>$YOoV zb-l|IgN|C`C_bjwEQmtRf-OH?$j0%B@Gzv~XoXG)zD6XX@o)h~MZ@0mw_8_pmEL6S zy-pmiPXvtqI4L85E+}2Z_cY{tF7nL$)+51q;d;#<4df!+(}waXe!^P!Xmc`hV=H3m zklRVvD?)O$&SlaQ=o|$FZ-pGp;%0u*yF~FRlTcRLSfl*q z*zxUuFZwerlEh|_>Mm8)wH4Vta>&NN1?#yipGhL(DM{SgJ>DgI zgn#{W)c|q9A}I*x0&fCcF?%QD9P();z^yZcz@8|UZ1a+b|F5# z5ETtx&lDV=UEEZgCYb4#i}e$e={?cx;+&o~`vVJT)Mj&=9r;;}ZDsf~rq*9Py$51x zBiA?=x&j-2)}AYNGoHra^%=juHP`yLZKB8i5CT;O1#c4jm1MXmrX6zm>5mVQul)Fy zn{U6}^4fLd7G`k9tk;#>!?#tlvHn@ab2oCt|NdY4gDM31_O(xUB?g{tEp|rg&>a4dBukW#3)V){h1*iaBxYi=ByVkp$-jFqPW2y4u+O4mj zu)+uBcy+%=d>4%ze(oB5x|rVJ4cdU94Y_V9RL;ZC zr+d}XVe#&-g_EKMqm*5DQ%V|lEZy_XQP~}ublD$UENY5E2L`^FnQqLxv$ZklVc$ug zv4!`X%dC!%*PLv5z3@Zj2vHv=Or_KQdcW)Q*XN3N&GXdX)v>8YOf74Rm1;c}e=Z!z z6<$yxC!}-uf#d^9qQF&T;)Le&`TBCQkk2>AZ9#;yd3qW7HM#6dCFA8Bf3}HquG9{% zFu@C?9Q+Ok_WZqqY=#jRpp!x>Noc1^r}xvP02ROw#2Kb(2N3J4cw1I(e$P`eK0`(v zo{cYqEMS0xiJ2^fGKbG$sA_yoO8P19-BTH=x-GW){nUKHf|}cLYfVlrGYxcc^}I1N zO;x3;TZ_-@FI?~HZT;NDwK)^B??v}u12*s@{fpl)c-i-g< zJ+-Q{_b#5>UpU8w&$z+ywPp4f27WY=(GXvJULlnv@L+U4UyIl8r;zv@Q@D-^^Pj~| zm9L46P9gDdR8>9Ewj9|*vV{*_7~r06+VgUY8}~+4fs4SpQ|s#wpNuG2VBNAD4HckN z6r|m-e+w@QR-Hg$=83KRb1j7`d##Cn;c}v^emS{XXq}4DWJy(0RgJ1zjwkxa@wgl+ zokFL93zRuF>9ctvrf$egk*vM{3>t-5VI(2)YNT!=(qGhU5N&xb(m&FH+Zbt|sjjia zo;~AIu&BPA~MGw)JjZoOmd4il!*5P`xcQ$Iymd0>E zAkpW3A$v|8BYWt0<#YR%Q<#9FRUk`?xWrM5$0|~)% zB8Cyf;@l$+LP2KZC~PSg#ckeyeRvJU@?E?2E(StBip&%FbqxX+D;=?MNo~Ks7GxJg zI1EFES#}5qaovDEfBI2d?OV`|P;&jaCX(Ex8k527l>xh};tlGi^t*4n|3qUU-&OvL z-!N7oQp6)$iBHhtI(-C~-E2ixM4URKlgi*$85T@XRYx zbFvf}-?qypB@9>h*6$f@!zif*=utfK=dgj52N|=pqIsg{7(#r&^s;hNm z@8azjQ{&k$FK+bzpHVqe;G7XYs~k5$r_UxMJ*X3zBVrMZ20I4Wid|U(>4u6zNA%ldyT!o7uV9lZ#54e; z0G1~~YR-=*`?}DVC1yGbK-k@h2cg&N-f60sZeoo<<{qrkN>EcL@hn#H!;^fMn3lNp zr7E^6zBZS^ixjwo7T*Pgy>THbz|-lx1b*wpxnVwgI&qRLz>%l$byo!TG*jfe{G4@u4ExAouUU(@!Uzq!>vSJN0&<@k=uD~p)`4U06M@e? ziaODa%+B*!0D`{ zfevt}e8fw^Cd@?-7lw;Iwb&QhtI}2J_%31k#YDU?5=E`|Skx&JAI%unm-IsPfX`x8 zDUogj%I{Jvf#$9Ez!D`Hulw|X&!X}93K%7Jyk?Q~UeE{P`4z-tY^E#!EN&|l1Srjg zXK|>2hg_6%Xo^{KJ2tzYBvsK#IVg{W+oGpU;4dW*a43sFpr)`8F%Zm)2?WrAcKzMX zHp8`v5+&e6u`tK6`#~?UCzLYXdW!JbhQ0 zJY(;`RTkm3`hry-@>uf8r+zzaASsvMZ7}#J`qZEcv+v&?Pxa}X?@I6e$G*~yf(s!3 zyVeT5bt+46HuV%^?dA?p$^lMMzxgYuPuyQW0}6f^FG`9&ZG`Gqb^FMzv%4yLZww{A zA60ksKi?B*nK{MB?JnAmQ=LL5vdukXZl}GN z$SyeGQ+!PK!QpuX`xrfp-H4HY6I+(~4J<=q+(;m1Z6X3LIk2y*lYr0vZ|+d~R{@%u z#b_4bI}s@=rFaeP1%Yki7JTY#J_!j^T7h~DO76owl{|jspVo{XcO0HkxB=TnyOL{n6WHqXqJWTDal@)JcJ0c?CbhE4y5ENn|Ny=&6UTux3 z`@HrrWuGINS5P--7DPS~zlDef@^6`isZ8+FQdJ94CJZ^S+YT8RrJmev28)EA_qDnIDCrdZl|6^a5UmVXm*8QT>@mROZpT$&sQ~ogOn@pYJZqb6u zuQMt$=2sN)@4|SqXu=N0> zvwxEf$JoD5Q!}3s+=i>zk5o4V4I_E=vcrk@$m?%-@bB*Au7_>)%>|k>qaj^SHIt*(7wM8+8zAWKd5BD@npezEcQJWg(#Bq-;LT z7jh`Lev&Xk&9)CA;U2dz3lO_V{0XTU-xMAS&?OT#i+8cH{y81$Wpw4F3rfdly2-f5 zu{pAh4x!axD3kGnp9?Ju8?S-z(ZTUXP2rI_2S1>&2kd*!d6WtEy$7y3AqasV-Gp*Q zH>Q$3vErGI(x6@F8X$aq>l|Uln+b0973ML{`p<>9+ofLy-6qM^2S+S-gF|7hR;&F1jG9-eaYBA#rHBS|@$ zkqgR@a?=-&mlUHv#WiwJDOZeNiY!6(1s<2?j~@_tE(dj7yWTvz3RKpGA#&BIF9 zb*rUBRo$~$z8SWWzG;7#hxD^kPAWT=m9%A?iHTcfc4oAd%n2+vuNuF$Fe5HgHy)P} zxB1pl@lnja8FmK@ERpSz|MEYR{ev1PB_fF#ftww{_D6bZ&9w2NeBAM^`(MhQl9G;>PX8NypOLzJT;*Pe`YzfsxIRd>i4 z&LXRfGNHzf&aKfnz^O%Se@^^LTU<%l-0Ywl3sY`!o+)-kdOmXvlPlHOk^RFw;Ix!8 zxd9J5~%@dmRb3s91Of)!QXE%M!S2AFyA(&|B>oqF&Br-&-c zocY)Ii9-+V8AhY)KE5nC?-w?2W>nw8zphu={ruU%OcvB)iZ2~Bqt6D#M?9N~&s0!N z6d{oC#{)Zn^EfK!F=BN?@yz1*DgDshbM0vyWLxXM{WEMY-pDLoGNa=0cC#a0^7flA zE{yCQx%ui-%kSV#J&xRN@pwDS@Z+L=WByMxPTKif+PS~Wr#-N`{FJ_3K21GCD|pJB z1W`HdEv?`&`Th3n)oSxp|0nWkBWv3Yo=lJzsh->%6Fehq$28|g3fkH4$&0jUZd3Ag za&Ju5u9IP@PX;MFNCS2PID=WU4HyQ!Fw+*@(scpG^GVU5x6hzvkSGEkZp|QmFl!Jh zyo)bvHqHH>3#H#Xf##AZ_J8|-$i8yMHyc<;%(;3pf{IBmtY5mjbU~0r>&!rMQ<-H_ zDUk?0bo{l#h>&T>xt3{5k?A0%)`iX(CJH+dyo*vV#E&xf6DK+mI2YY0r=#zt)!g^e z16-76!xeg3O;>aF@-UJPk`7R9TkPmgLqOB{1p%m7Jg?Sxwiv;)q+1Vii8swjrw@C*8gp1tw0K3Iz77QI;8k`vv3kyM^&H(KNY`z> zvs1j{#$xORUHg=t-6-hUWkh3}z3Ojxt=Mkff9hMR_~#m`PaVxqv!O$e?S^KSPUb<@qM-H(QmySL~B}KuVQLzotqkOYEOb=P^jq35wg*q5Xy=jL6 ztrGcZ$CFzX?gr8PaAUEgv?)1Idy^-y<-Lgr-gQWVu%rF;NfSJR521i(?pnw@gMP8eI63z zLBXl~q(YRUpdk6Uh@dMsiaQcevtQd=@WnHaakREPB&(1h>!w#8Zg;6-02Q@Y~CA zp76QaTTJ?pYTb}*2!@~2S2#=$pYHn#7Dm-|9L+0e$Hm@5h|qMDfri{NIaA= zb5I@+=}>7#V%4&pTyu@hcc2eBB`3L?K1Zd2$%xC&71HY;7K7^bgaL~?5XU3~gj zR5#!zE)c#rw~gRm^1ukamim=!n(DsWH9dCDzi%sBT!IfY~{C2{=+mEr;IG)_}0Uy&%hl&0tP*T^r>|P07R&V~vHvr3WYJ%m`Tu28jRW2R?JAxJJn*Uc193h6s36Hd3I0(36z(~;k_~^{ za|X#kFkcKL@&oIH`KtVZ#V7F&!p}TP%cx3Ep@(47^3w=5a=WdH8#&`qu|UW)o?p{! zu|Hf;ogP(|Q<1B1QRvBT%O2<{Vo4uzJ!09hIUmF<1w*zOK=n}7fs;Y}9w7iL%+jDp z{4>2%&hql=rawZO9<@q2)Dy{1o4&#*V`#xxuH5HNne5pRx?N$dr-Q$7@pO!G`-Aw3 z*u1xERCOiNxvXF)?k}vHz2clv?q5+Jf-6s`P#WhcXJquU$FbEgnQ~T1RANF!VhRnP zP>GL>V$odM`XAtL-H`G2iFo-BOi=a=cbtsJpOsLeB$dEvlyrUcERU6tg6pO0N5zYI z{n-7hbDM5th&_L&m+HIWk;2PuXmFp4I$n&1wDAf^6;bh#QC4`BMPgJXqJjl@!9qnM zw#6b*WmcZ3D)-M5wzTp1v!i;EbZd%#qMi(-U4KG1%qj*USaKT#*JB{O(NZokJIajG zjR?>m{<$MZu40c=GTTZIxNI)L>19x`1q}@u7fWCA3o=zj6+7xS~7Q)oBoGh&* zRq1X(JN-w-vzb+&e>`!!3_xew2i}hxjr_jX>+d<@sE2>*zWi2u`k3dF*qZ~p$G)=5 zEq#4Bz~<1IiFn4%AD+f%P6qDc#~U{Yfb=GMBq0y@<%aS3pgq8&c;iF;;HB)rlu_c5)zlZ-^M; zlZ!%|n^y)~Bjc{s%f3$?JLLFbq$awkN#l_D*@100M`1S?Wwe<0AxFg|9^Nc#B%dZ{ zc$=CUkh3T?vRn_Vi;tP$ylOYT+G_OqJz-KO)Sdpv+ok-Nuc<}hlba)JFMzj9*uF@6E#0bP96W6|OJI>BbF%C>`4o1RtmZPp5&w)JNoJuEE9jl)BP&w{&tk-j^H{UfRA#Y9a1A~v> zE@;1PzZ?B?IB;8Z;i~fu4e96UcaJ}QTU#-we0;L+X)X zQFYr6YGd3Vg&TJ?V)yO1_)4Pc3J8)eH0(Db6fPnZ^R6PR-NPmBD?OwfK7va=dcUfy zeh3mmZVXOH9j47)pTq8n&enpz}bWCurkMJYW&~TzXp0y7ZhKRVRvh zeWVhJ6qX{R%E|+xSi2J{)(vI#D5EM8p|7#!^>NkEGgPdz8;?qW?JaFCH)QU}Sr>5K zS^io5eeT4**$DIsEdPW<elowY`FZ0?YESa*NJ!%n zR#E#S1>&(5!1DM<=t&J2knvTa+{St7u+ZRfy_dhIW!@geyupT^k^6%+mOJN>YwnJ@ zNsbwK?Wvi~NR|f}Fp|_QmL6_|K%G3uO}^@C#5~`?!3JcFZX|i+?w;KB2C4T)em0Pt z)a}XLa5Zo&Nv93z*i$`eX;b62r&_nC`s#|2+k5u!>6o-^*88eiLF7u}qEjlyCnP3s z4rP;_85xJ(p4g*4vZ}j-Rb?HGj%4h+mKZ;cO++LJPAa5UX>3?6O^44$^7%m#h6i@v zj8ZA*0g~tYN*~qB;^^rL<+`S!MbSs^?^NQFJAPZ;44gwSMuK|(c;Kp_etLj?J)`QK znBsIrI%eN2`#%jBoGs_?SlH%7&?y0zvG!1j$9m&R1g6-6bxl-Ntz9{d+t#mC!M>@b zmbQ<)0`67(D!Yn!6=p&uIf@0-*-?B_LJ2%ju*V1X6gbOkS4R0b@Z_~}O zu|KGD!=`zMZ;^Rw(DS~LNRL7{%)|O~W>j{hu&c@-zKx*bWx?BSGNZf;ye8>-a@;fU zog=+K!&}t+VP$@6ltHw8-N|bo?2TK;cAI-lI&Ds)Rpk^^oAW1qiVrYixHWf4THEAko@`s5?5qmMI@ljQ_ zUY6$Uf_3XTW5!jIw<0^PqmP zg?W5E#6FnEAuA-nL73%zF!{f-NzhfC`tVn@I$iYyMgVH?!j zhR4Wz#1l$s88IlX2Z&;n*cklA!B-*438Q<qhhNs8oWaPw|=eoSW zmFDOTR&2?kmW5SQ?>j%YHnrm)tG4KSS-2>SZXbN@WnN51gz)lFnop=iCjGPZ!K@QX=Zu2qe?eFM z!o2If1Lnk`vGBYQ?7At~72%o~jM?|NYXJDq+qaJX$A5N>>lUQP&R@86k;N_7_=m3X zv99rJT+s)v@ec$$VL5ibM=XzaZ5FL-ug48p`h9;<*HFacK^VbSv`r0SVNh!|JH={N zqFNH+U|y6N-`W;=d2&Nv_m%d@4&6}B$Jp3;L!%nq;m^9VGv($tGSZdp8~UC`b`0f= zo`11s&Ag%K=U>Fe@|-iyIGdd*J2PD~{_c$B=y>$_8g=w|w65HI0(-+GeM(A+Sx-(G z@EhPy#XZLObL99zMvbZ#{*<{wzHBk5a-0KQ4$wNIozgyV1L0UuXNlo!6Z*inBS#}A z)H#}2nqQkgB0?gMumqh(!y6_tIBKNdlysIn5+G>zU+bS>=JLo)%L!r6lH15RAr>Bn z7TMAug&ywq6v|3EcTYNg%#KiP`ileZuO1Z4Ac%6T)M7fG+JJ;uQ>PB$0>1&`9BU06vAO;Q!=-27Fn;Qv6jG zXu5-(dpYT7!YIhFgLpe=mHnwcRH=6}rDAvUs)&V6>8G2;DbpewzbRKs+QiKj-%Z7z z@45Y>Pj{4c)@CN1;=fw_YQ}=LZzI)*-d+(m&+xeNZPO9SM0o!Fsz=ETB}5_ zM1qEz#m+0B#ul}+rDHfYB@;y9K@I*nh_MaFI-4ER_4a|nT1FJq%?>?RW78MThYAej z&gS`jPydlp8^x5|5}tprKJ?41Z}vMlXP_OEk!yJ7`CT;|B8ibc4sg@L2sks_N5W9% z2KmhaSso%Kn@mz^vauN@M=eX`*WztIqb1@(~ZqttyER(T5TJ zOff!Jmdfy$*>ULCSd~6fvU{TtFi-p$9FmIkW!%H#R$B1Vur*7GCH$g0-r2k0m5Z;AOwgILzA`J+lk!yW4EqULew{(i z!75Lc2ch3t_^#^|rEm{1NL@NJ1a@isX3}R8LsVl(2u+>t0SpVm_7;k-6lo#CBBDZw z1Zl1hVjyanHoAoPXx)l~krDX^ADxU${%z&S=77^p(H-X#g6@|stWUlY3FU%_!KyDO z|6UbQ6dJapUI$=jAaw+C!16;AEuJkNSN9dZ^SCOX8Xr{)?id(yzRTEcZuPAno)T#N z_S4n(%MR7td}vTseDi7nJ!f6wwqA+4+j^TgVS&l9*9-Z>zKe_&uzZV+_`|WT-}^2q zc38yIQ*6F{1f-oo?OACpc@_A~e_dv|Ady9m zoVe-P^1|~yZ+hhBqZw<@dVE~erhdBX$Z)3rx{7Pq!7L*}@6>ra9LlH?%=P2aXbv{f=-om9fv5S5O8CrxSPtP}RE z^Z7~gICs;A2WZB?TFy2!b!(i+NlYp05|D4IMqfu?PAKY#A@$!uW1#xIm z20HDZ>MlKbyIT8P<*C+w)HCewF>;t@>7SnHm)E&Babb|+%?_~(3)h|CVkR%_StJ93b!FA@?wZyQ_5al8_smC4LV|wxetK$|NtM zv>V)_Tw-oDxJB5adWz*5-m$gwy~v*2k@=pm>jHPAAj^sHo%E@8tqT6s`^Qci#5mwj z+qa6%yidZU`2K_B4^d|O?B$W0R~p2cl|-4vDJ_*NyW&?;Ywov3IN;B8MLOV4`iolp zMc9!gk@h)}_FZCdvXgJa!EFxs);%q_R%&ac<%LLLsxV0qWfmIMWY)ZL!op`hMYX`w zf+UG#wu3km8K{K=Qsd=L%qm+SH&AfGyOPDzIz|#e7YWp#)a6Vi@DSU2=>{!;VtA6? zF=30J4;;hFgh4j_sNNhM@Cv6ELgn{_g%^lHh1h0}0e>;9Yg__zglFl}w$=+>#~=I( z3})If6>j9}$k^36W9k=GcU;81O5Rl)wo19Pw@%-zkcuxrqV@>KW$0+tjufaLDV{sB zj{at5)GyB)eyS`#2Z>g>-xR)XrG2+Pg|Ypcqt=B?9@GyGlB8zA9GoF8S*H4Cwb_?| zdnrn@x2gGE((@jlRsx~zrAfY--~p`?@-Nh3K;bx6Zz<73JBn@^hqZs!;KG zj%`J8)F~sY^spkuNEF&{AHP8&!RN?621dK39t#L`*PeCxdeQjWlUe-Qk(v+UsGJJ3 z9B!kq8O{jJ&z?2Ki`<)<|GGIpx0&ITpPyz*^B2kdBE!q9LY178BsNZXZ2Gm=UvEf%@<8`z&TiMF zG?&!D?F+6?RD6FtAxp5p!aZu-EV4gr;iiZ;rqRs~ww~E3-<3SxS7!WpK}oqt?y)iZ z$KHDR8}SQ!WTJ8V*yHx&LszeKK6Y>)KEAqSU-G@C62ldV4-(%TWF?QPVl=N_(CS03 zzjjZk>h^xI)VY7(^!d&n&O_csUJ-KG#j@FRYsTNRNo|GnoH6&M{>K{$_VB4{ED{+ZQ+n zX&`15+y?`!f*_LPBrvn(VYOyZMU5|qfi~0>(Ocv0;K~6-$D-9lp%)K1CG=UusS@&( zDPoS;VmnmQcVx<{dQQM>)deg4Lrx?VPS^#Mr|L9yCLUOfF4EA?9^$*W((r5=pfY{hhW8ZlZ01+oPbxI(5uWj$2zZ$gZwB$aS+q#B+MD*1U( z)Q>VP8PGdN#T4n3tjY`Kc@}Ep#dSFt3;lY@hHJ>sTsF@Yr z{5i+I)q7>u4-Za2N8YsaMA(;%;K-qhFTGt3PB)xR__|KyINOk3Ui$gmH`ELoG|I#Kr1+Gq@*T(9AqJVr-n4{_|#w4!rI-t^9iuKQ1 z9CqvDG5b>cN@AuX{&8=V2?gaxuwy2G%F@w}(E(3wYL`1LB%u##lPc1*30Hu;Lu8W) z6@p7iQ0KIyt#bAnF^Ih69V(Rx6aL{DbuKTZu6C!9q$dV=K-z%xi5#Jz$PP~V=>fbE z8jKSV+W{?THr0^>j2Wp=1%^U0NVX9SAWKB1{i9jdPFn*U3Bq%vGf;H)81a&AngHP> z$R+{P!cC|ogV{BxJ`*aD8CW6-stj+b2>%yp?*Y|R`uF?p^gu#LBnhDhP(%ol03sr4 zXo`x8q8Jc$ho&e>R8&w#5(q__SYUz`8#dIas8~jlP(+HNj3bN_A8%nJMsmYhX2{Ihl#sIRRt;61|iAHAlPN;#yj90 zaJN~wak#s@&2w>+XTkWrX!ZNA?ExoOPrNW%@R>TA@r`a&`}yR|*MEQ8^F8CA@BjR~ z6_UObs)wkgE2)Jp4gX~6v(~20d{SfNAD9(8K3|`#jmVF!n_iRdb4S+G2~V81s0ndPC>3A zkdeiw2yj3x?zvJCZmQ?LWVI+GV1wdrUJLDT}iqKpetY3wVJTLjUNPwFO$jO%p8 zkSXmRKX(B-gC7Ny-=6>v2%*pbBrhWfzI5Wv;S(^FmEt?%j922@T|3r5rQQZhS{bNg zjpeCfgzhmFI{a%4X#!b(_YhmC7(VU=4P7CpAU8pvE{?VevDH*V-QHKA7#0Px3DKyr zf>#(41vJ!f-EJ~0$uJD=QW2R2u=XG5yccEpvK?77=>xkcEC=V=P%>XpFUhCp9ix76 zH}u%%P};xQDR5n zdrkp`D>I6*PGVZ-3+gkr5qLEwJyQmfQy@ztOi>dCu!MNE*}%MSKj0vF-AFkB705IM z3eCo~C+l=EB;3EtIxTgD9DDThK&U+=*kkp3#@QG2>DDugc`c8bcvcH)Mx`N;=}u2z7YO~`jFY_A{ELUHN5Bv(^ zvr5ZE6>U_}>5{Zv1p}*ZJapv7|Lo)ulVkGL*RVKKH$jOzn{~n!d4Xt&8bq4{cvs@Z zI`A6OF}<6yRQu0}ohv*7dKTmADj_^~0*Yy7NB~Z!HZ8Ljt`uVG7UR^R$UF_?;``3L z1F7|pBoF!TkYx{-)5=~DU;bB-eBMhC(19ZjLV&Wk61BvaM761kfoYwL`4`&U*AanV z=UL3%sT>hdiHUqnm4lNBQXMdGgPsP22#=-mV^S;_5CFtbfayU1z-*I{Da;`w3OQd2 z=s01nR!+@i5gm9zK09N0tX}yuw#g0JDLh*VjY8wXcc5Ui^_wjX`VHYnTP*2 z^5Aft)Z0-0c~{;AL!bYmL**+BE*a4GK5`414jkVo~8ONuy7{u!$mA zcoG>n0O8q;lQSh^8^o8H5)aiSLke^4uBKBEjLw1${a z#8sxoNjqd_Ab2!}K|r`RAaDd)CTf$1ZSC_hdIBQEO^t@55%?7gtufewW^Dd^)%^w) z^ff#;{t4lsKkXhX^TT3W4gcAXBJH9^HY?v&$loDMLZb zJe-1zI56QXGMNnD3T2=|iD}rV!6U;8jrJRS^BN2m}gFo`okf zvv68DnaIPQpMXfQFAh}-&&CwOlPkq6{J;MF|8va7=c&}JP%|?VRd_XZ|KO1=p@c0a zZFN(_&3FgH=aL=L~NAo_Sqjhb*g7z`kE^&@|Ree&P< ztJoOrl|xn*x2{Cq9FBx7u1S6ycKxD4cx9b!MYXN3#`i9GIR15OXJ=LCl^O{sm6w^o z6SlP#cESBaCBvW0d4r*^j)>v)p-tb&;Gr7C5-TM!noBa7yGW_bj+iIaC zHhIV@p;J_wWu;t^Wks+`6;(TQI=nEswpLUFzcQ4O&^;~_ExUw;ljhI z!D^9Obl2qCM%D)E4eAZW%?*U47Y?1gPWbF3~f$1Ts7if%M3UQx7l#HA=;c;BPJfAQ517E*&a9CI)B*c>8|H!J$NRKcmOO+|6V}{znXaV#U6n}F*gL50rMDY?z zDtjWR+umL{YurVPp<7NMe^?T`hoCd`3^IsCJj z*t>TC#tDNCJEdi1Er6ln-H;M6>u#}S`XV6iw^+jl-|pd!Scsdl&qrr!#3R^ z6K;+Eul4)?ttkr-Ubj*4HtUj}2gTN^0inv0ANQMFYv$zSv57#Th%lj8SEh)Cpt~_W z*fJzkmNHp@>a>b;g{bCTzJwfJOE;kr_^jFN1xOyzHRvMNJh27~_bGUoEE#_jil_QR z3#&o5p`aS}Lk&j0m%}h!gIiJA4df!)>5b4nF9Ej*;6}hN;;UxB2y8PIHx)PY@Kl(r zLF1&fj*Y)nlmEi_g#U}rzo6X@l*Ufy&eMXzAjrakA|0sP$pJd$D$@|h(D^XQlKZLO zVeL*c0j67Ha%R9dVjQZ#I7*-f-~{6*HWe5K{W;#`H2K(64H3qWK|>a6mV995pS4tq z^XcMxP?g+GV313sx`|l~mQYBQ*E{rZ>Eb7J@qJ7z63-99C}jK~XxT9L<`JMy^MW)Q z$YTix!AZ@}Jdi7+^Xna;;cD99R;v6D(BD7*-}?POkH6;y6i)7;NspUS${%+ffhf4N zBbd17%6C_XpWUO_Db$K5C!AV!vC)_f?-7V)LC|s8NBjJvMs{rrK#vmYwDop^R@xC&wYl#bnr^Q-=<=CDz#r#nxq%H0$ZP| zO%gceN`s;gUbUg$2P=o-`=+0*r#Ig(qhN2Re_(qV1+tgWG&Dm$Q$KUW^bR?7PUcLO ze%8R85l1N6lQj!UD&SR_4w(*F40*~8`dQtx20SzU=-5X09%ZWOh+~f(0ptDEcxKL| z7|60YK>Rag&17}237EZ&(lQZR8c_0wE zqp-}ZeB1fG*<*o>dl*)ztP-NV-(^DDHOVeiviI;N_E*^b{l95uqjzC>>e2iMigR6c7Fdg9HfLf@WAQccbX22FF zAhQgud;t+#9ZRrHNOw%HvV~`(L5ERAD&_@9Aea!?hqcpGYzG3;M~pALFpL5PzrVeGirJj z#x}&xRGVTJ10hmnY>xsG|J>d~3Cp&Z51ftZJ&w9;I590-nsagnm(=fWUWOx2c2U)= zibK!seb_kLxU`@!+WoV6W@yTo#7PAK$pw~zFywA?<{NWBm;>|t&WM)3Pwk)S?=;z* zH-1%&elB3_9W5mHdB+bF{bnn=Uo3HspUx@u$e#D+aO$i0*t7#KplrbQ=?gu#%KGs6g7w^~19=w{J znX_aXdc4u)hy3z@qi}V{GDnLgr{n48KyRUWj>l;liCz_pX(*uQA{Y5I!0x$40uDg< zvHCT5=NRjmTf&dpcC@}Vw#aP-GC!dBv*$GrqsLKDUl4Ex>tpJZu@bmy<~;6*iN|5o3xB!lD2#GFGmz%yI^u)ZfG6 z@m|y?^s_eWf#@&x4bO6g>;A&3@Th2}G*WwKAB{FzHBq@}vs;^=PxbcCR9wcWS+}Nb zrg?{9aNESUz2hcqA3{$2w)Gb0Ps5tfQvwaHg8uXPBQI-zJ7ckj%zyXtr;(%o40jvh z&iuGD&Cnm;MY)qy`)0~dH-D=XOW!sJW)CaFM8w~6t#OPRf9rtHGtIv~_9bt1NKyp1 zO^&K`twSd4<5csuS@#HnS$;-k1E1-gPCx$;0C2M@}L z!Fxe35ctW0m0%Cll!3TW!#4a1P!!DU0F9>-qc~%|oBT(@E$^^NAS!8;(giv0X&^2X z6`)O_;nl8_iGnC}8jxBKR6hVwDG(v@XlNm|%)?CguNH2%g+wuJb}HYU+| zPdyRs@loknKEpxDZ`hjv- z5OAKm+#JAsIe&qn?+iEv04D1WopEA`E8w#LXa-T35wfeOQuNlo%mz7V>)a2PZ^}~iGhlY3k)}zs-n4~eE>AK!_CA-wDR$i^ zv5mdYDHgqHYagt;wrv2$=|5rYOdY(^r)c!qgr@D;w(;emGYK=gq^}Ph?~A=#wW$~z zfX~dH_PE$~to{BdI#2w9Y*|ulne{;PCB2eqE3uZ0jo2_)zI(i9Tg0r^Ehp+u+QK+{ z$5yfp8_Qw%-eW6i>7Vu(hEcfU#$45>zS!pbrNvVN^Y3$RPlW-zg;78#AYf&#|MeOR zcV2ddi^tv{hSz|dq3HL6Ne4D=qu9s|qr0u28s-{P7sBgAGs-AR2$=jqlc8TjOn1~g zB8-BcNB+-$hT%QLoiN&dG-wR!Jp>=e?nl~V+QXC=bQpiZs@$E?zDKr);OD4$NY&eI zqj&E?A1O1)sh*iL$lDcvcR!+jvC!I*x5Fji;u%!EXLd=9k^Gw3l1C7vIpR<=N4=-V zMkYnf!UO1jIsr zz!z4Z(8hjurq^`-NQnK?Ii*Yh%=Hee{jPI)g4rkCZZo%NXOvM31{4v5RSI-H4YZO> zWM)hk&H6nYO$}E#rOXuom+KXm2vG`ga7Wxl)IqbXfCjir`7+Hfl@F@amfcgp1G3Uv zd;#<+vrmCo;HfUqq>VoXJh~%z%`?Lg2QV$F>e_c#K)tpET6{9TDx!u3I(rUDW4dG} zo0GmJftv#8hqDZrgxVp1&cvUAE7d~MQJ6q{O%Vg`Bou&!NCuD#nV73V90~5g7YMMz zxqJz}9yVSBr%+DN1KYa-rxD29Q1D8inA1rtQ6zz9shumo1~~6uNOA?~P?}q9`K~m> z-+jY&!-hu&^<~31gJFMp^nw-NX7wc-ex7&JVBE3$X>G(OXPGIU5Wk zS-o~UN||b$kouvn>J5Vn(H)*wLmT)pSLWu=%~?8wS2HfA2Mntx`8^i9b^La@Pe24{ zbjj)}%n*4%_ZC8NA%i0Hu@+D$RDaz#btVpX99b(?13ErS$#35+2@cXK@ETd2qJu$6 z*VsQ%_Y~(Q*f3NBY80=PQ)c^KQ4M^EJv$fmiB%iP#U#0@DrH*)7(~FQXd~<%)7Loc zb)NbyY>V(0=KLT0MyZU#IypSMv4#wqXfj+sj!_<`Mwz%=N&=3-li?)@?*jt+psYr9 z8X)~(Rq-G5gdf<`Q@{XYB*(#G2+S4D+A)JS*u`Pth6z-U13L%kx8K3&v|!%GenI0~ zgH(aBUEPDNw9qljH-Kv~%RGoiV`t&j#@fM~AiAaiyNQ|tpk$c4jiLky3Pn?c3}l{A z=N#0-cF^%fS&#;PjH+PraB^5s@jwu@v~Nwv z`zhk2arXT*zNvE>_%$SvTyBtU*yn(zx;9f`+RuRAn!A2DJ^5M3WrN}5t{?6`GOYRj zd~nB7LMj<&*yeer)pM8msxkrwdOzzbF!}79L+94A3m8ZjhSzghR~r83EDj zL30P!j2D*6o1`?3U|td_jT19f;xpFv_o<2ddE(RZO4TIP9<^64l!mLqa#Z$}RB6EB zlvfgCDV~$ zg~yF`X_6)CY$*+g{rWLpcQ@1Jn<2SIHCt*4QL_*slg@~?5Eg0ocW;&zl0$41R+>M^ z3t95biY9u=H1XRkXPb&9R>fuly5Dq9~i>a0?BOSb8z6%1Dks_ju(( zRe+3gFr#PG@yCXe>M_J8)?<3Pga9fv8>!R}g&(n-2k>AM1@F;)4%Dk)<#p z0wIlsq}wEJ8Mue_0y(C!0HGOiAg%ze{9b?8S6A^JkGMspMY8Xi>(Kpx>NSMLU~ z7;@xq=$ie&IgDVe*;de1kRt;SE`Z`XA>adZ`s3DMz4cYNXet*FwCxySRt-4WG2rk_ z#BPM<|FjX&_S9&o7S)W4UPjaB4ehpnUhVut;PWqwC#`FK@+Q#6O{`kGvZnB$r^=50K$Ix+woE41%Zg*%Rr=>`Gfib%k(DDg!XrpfV=ln9wdK7Hc6 zhdjh71qSaieT`%x1LFgSZ!{_YiFboaOb0)aD|EvVDGwC_7}l{BI!7*?T-ZHMO+x2W zLJ_kR;6vsic|PDx6zizqecHt`rRGv|$pr9U$#F~qLN^8Pl|3BI%)>4S&xvEAngDyu zs|Y?utVD6>qy>t`6< zx!L7u?dtDthULNNx{nQ;H$}X;Ui_Og|*)-_fg}&kYwLg<`;fq(peqt7xT=Rex&sWf5*{4Uqh*u7)L@2 z14Wa`N%L_nu+qw7!vY1wQ!t`4y+$2PSJOduask9>PD_NCj+0$uLwb z2deP5q{{o4p6d?K`uLAJ2CSE0opflzDd-vsfbs`=-g=KG9q{hL-0<*8WrN1#q(l%G z#sq^;bbNya9`jaW$C{DqCJqF3t;QmTVM4cqgmH^O9)59R0SKD|ta!@W;`3i~fBu1^ zbiTQ~+is-Z-Ta%~sQovt*{J;&#G>pYarNLytyKlt-0-#4X=ka`5fQN{#q$}}vZm_C z=#<;Fw>cC3He?LF^F3fy9^E)=?w>`;I!X+>V&UKaRE*|}K@z7R^6;&MRf}87$P!62 zu?$_cHgh`?vBZ^g_soRM0uq-dfJuEo(NZzKa%qE{{q~h}v$MS{d+7%6Fj$pV0xsK) z53p#!tlzzeIw8TDXNu)`0SJ8GN;HYb^p3NO;$bxz8yPdkM@pXT0sQCWNW2a0dML!_ zh;f$|WMbHM?CWxH=%`QG0RXv!$WF;G(I%Tf1?eI>CHs^c-;4-hK~J2tQJ^Ni_3o9- zq_V>oQ)pA3;RP^*rqKZ4OvCC7>ybBu>`heP`u}`2f-%kaTI`btnP7lbH=F{zJVpb* zUNG;!h|?gm0^gCSrCrdqPsCzBvG*mu46#}uZDKHd0fk@~IcPAa26p*>XyIcta{hV% zMu$YLhW>vCCLo#t;a+3)xfE2P;S4Mc4W{Qmf>=rj2(ql@jDqGUz{tCRGH!r4Ft}_d zTv@Cx42A$7op-23T#bi=wrn0H?!!^~)o6?M+M(GGv}KF31jqF8`yhtL4jyD(U9B(L zub)0K&HxWVh>z4ZTk&OPHY}j7raQ)Uw@id$gMLro2%Hb$LfDeVYx|D6O2`?qmR-SUlytTmv|eu~p@ zplG7tz*W+)`z`cFbJzSYcUE{1;ppRL{0VL4&r-|y6WYheA6Om#0_Uub`g3I-)4gWT zgumR+?NY8wYRE2Iu-t)On9++*{@ySx0Rl~*Th;`aO>KnFZJQo z^{yTFEM%5ggfJvv9x7qHiDnY8&l(?irWVcI>(Pjfj>%4n5oa#}}%1!h_MSAGgFOU8t$0 zU3<23&DYL5w6A|0*>1Qr_)mrZ_8ZH7y#r_c)!J2{zth6MW91s}UKhV|RN?W@P|=1X z+iwIkfeO*BIArO{3qNf?_{;jpUEjA~`ub}1)qgJaw{|U!_<%mhl3x}IS!3nI$Zk;w;aiErLJ`{B9KYy?(gh+f?GDka} zwY}amE}bXdongqLjndtF#!1KZj57=gkLjg2rE)GD=1%5(wG!4@TI)oT7ui<9TbWzS zpD)X>z`OXDtgG3&a{KIs=QtC;4Ta8`T`YX3ttC&}7@$ty8~z|}^nqQMz=EPJ#_Cr* zDTc*F;)j2+3FW9jzYGm8dF%fsfJ8UWVfr2(u_8}nCZE4h(x;@6NqEl%o`IH0I$`%& ze(-W(U4&Vj&1uY~qUv!j z4yP>pEDCI&TQ-^)OAm5P7NXB4o5K7Tlm@|M;RqAytf{huA=^KO=Io-srhdex#xPax zZtL{SASc7r7;GS8qJkq9cZTL<4B38%xiqFf=znO$@izS>^-ti4w1LW*a=UVS02~Xm zbjKRUJ3Tfq>Bf}L$Qza0WeFd%rk*E^zvvXQFBt9B=0r5FT}#;{+wNX@H6mI)H?p## z%$mVZZTltkMLH3U!IOpjxZv!pE%E-GaUy=qTN6sQqNNh8!zoVhUnw8s1J9fE%9PFD zQ1iu#Xfz(;+ij#g)N5^ne+xfG-bC3Ix!)t<(aFK@(_}Fn;VqHP;iqRR%*Ig`oZ7c$ zQC9EJjyV~pw)762O4}T%-Mz;`qF1Nww#wYSB&c3{jJx@Sk<;+CJ?XE_+m2K|@B5h5 z3x1Zj4`&>{w`kuS&jZEjcHY~DcjVm*{nMRxVw8GimwN+<2H@i6_Ssbh zy?;%noft3;7j6)=d(+((XA~SAGTd|j(Qwapc*mS;%Drn$ z56(WdBX9hc-eJ#EMRU2EAv-?(wbSO|%sAS#*2XQg=})GQjcEx_$J{^{l61&ZGq@O`J*Cc=S(P!qQM#jNUqd zZQm{JH9`5A!ccsB$V}CtqQ?ggEjh?8dLXai;N?GSLz(x*&f`pG(+N4T8pZ56+Eb_w z|L0-_lf$!DL@JmdECKB|%5{*&M0Rc{w)nu|37qFd8%Xx82BaKbf$y;M(`A88>2SG@QkyzgqA7C_@B|0mWwgKabPfov)JF!dOQq z506;;1g=Ny_cn1ePe$gH;wv3_O|8C$p|(-qp|(#yClnuEqmLrW_Od$NW{$@tvUEWq+Qi-;8$Xp*^Hib2P?1dhb6-!;tpNi6U6 z?ooT{e5TT#Dq?1jDD7#6kwp{t#@D;sQv;Y+QWw8a8a`s1N)9{-_dgDHm6wL)8@W3f*U?w5ZrwnO$^> zIK&5sLx_BwBUZmF-HMHX51jk2|CdmaFFb^t^EeZFZn|UW8$?KmS;UU8A1CWgHXAYg z^R3CQOE{kVVD@)Ar#*^~qB;aQp+N$zXig<4O zRf6%TJu9y+!D>}GV{L+ZuTnSn=;-y7N5akz&GYzSoFEH`uB!GDmV`2086H=+&3l=c z+^|1l0_WBb-_M*medg4u%Ae*g*tV~4){CO z*PfXhLjfG0_8E+lD0LML&YaGFuP7cJnHFjA8t9H+U4 z$Bm*H$m9t8V)+3$vxz;tyPLFEK7v!AIGK>tcLr9Yu$t9~@J$*6PT;f4m1ub_f+?Q> zwDkayALKBM_&}za$>IxT2)^%H&Zc+O56iny3W=D{A960}hQ85Vh4tnHQXV@hKr>k( z7T0UzTK^yk-=qa~U2WB(8Z8=KT@ldwAT!SRSH+??0!1u6kU(k-9H+cSr+kjwyWawb z3Ri)*2d2Ha%np)5CUTDDqlZZ7%?b(6O(+q$+G(^Ic@-YIo=sesTV@QJ-5>iKrkVTP zy5DoF=+j+i)=vhjH?oaIkMCls+(k_<@=c3A-3~Bq|L&+ComH^ke$KFQ06_UemT&`H z{&n#G8^BTGM<+0O_zuK}9H60BBB|z;xON*SB(;CGN7I3R0^hvSbgd1Uz^9*!gbNtzzqvq z6zKuF4MB~@6L9^}jENSqX`xp%okQ=)`@A#LTF2nYh=Czdc^?U2I(H#O*N&; zyYu!^v*5}MjV3jYKtX5HaeAROLC5#u5#XgwCs4$&RcnAFQHH_Lux8X%v^>8xA{fQe z0FN1zVm{9ak)v!gMW$>a83*7nyja60Qsp5$5>l(rBbrED0L77i1;cM#k>uZV^;xHl z{NJnh-+KOkZ!rASWG6B*8J$e8q|0NZ9%QnSI3^a4|1I`}O4{UC4{ z;>vlu$W%>=n^?5IVAYc#ex4Fp*S7e4E)u{b)X7CukYeZIktxeUp8k48f|n4%PIUe$ z4WUJ>JB`wo|8>DI3gjis=-`Gobi`3<++*LpGc*;qa_sZi^I?St41EHr;k2-RzR3Bi zP{SqAXJhr->9yI2&fZzQ>%Z%lmcw7{{2&v(|M|f4@B_cdG>rkRbvWnxD}p*^bahT- zj#Hs{4rAqz!;)b@;^A^=QIsjJdR2mbfN57#4$^#Yqtd}D)8joa-^aK9xGItAv6^zYeKoB z3aJR@S@c?|M%MHklJu1F=5z=1sNE4zUan%HYj`F|gvzEnkC#(d%bO*?t)go1JVXxH z?ZywI_*lH!%#qY?S4XHJmvmE9mZ#7?6&jq0JYCEr@#=#&1}n2Bsa*90W+EnY^p~HQP@vLhFMa(w`i7)9I7&|smecl;eT4cMiLX1 zN#v~N^C;iU=q7V1@8g>WGp`RnO6+Sn7Q+M z;eWT^sQ7P;*yFq4(5*IfHyirarKUzlioyyQ_X-%>e_psfp@_27WU-A2#l6tYW|0wX zQJ*d}B~|`ehKi)^bcq{bwT-wTv)!{oO_vDBv;c>8^cuB5h+fkbxX6a9LohCTPki8wU_kP0djpN+EfkBg1Nvkn-CUV5YB#{mCT5*&E1zCEtP1fxIGywT4yEid=_^d@+_9q4E ztN1D*->k5FCI7(}S2e(jVoB&L$Zm=%3Hkb3Q35Lp?kt}k=TtP*1UXyEDl`?+004;- zs7$(ZNRWWXr1Qgs$Hh9*NOJ0np_f)eFN%8ZSiLUle8D!==Y>M!W%9V>aX!2-Ih=}D zC~4GeIGJZ|XrQo}!0l)AUcK4bsDCl^T2AC56R}DY;&4o%kp`MXF^ZRmqI>Z{amQFQ zWBB;lsK0_3L9~!CJPhB2{A^h)%MqUUfm`tDnASWpcy>7?#rs%vAlzv zEOXVY<4V;m`Sw!9OX!H$WQsm8XF|o)N@KlUKMA3dP`-+9pP6YP68*CO@Up}xk;HzF zsRNb)??{#vXH%RK`H?q0;r=|Y*Cg^I|WH*9&*^gj99$_l`0_$ z@H}UT$K_-W?}vC?fCI3I5aZ8wnBJ{@1&RoDL*(hI5Q~vna?nNqQG{>gj5(=eyLxQ= zHF#CxsHZZv<=HzHg&_X9eWQwF9Yb)8jQ6LMpOUv-aqRIca>k+o3&-)|MCD#Zo(L|l z z*A4!9-B_#hYVt`sV9?2GQiDiYh{TwnWwZRvaeQYIQd5w3mxx^9vZ8}Ov00nUSqmNZ zvU^?Y!+V__hVoMjGDf}sDYMZXjWea^S!}uFZh3aRsW>Pa>3QsAjKEO7yrLn_ifm%%JcudAdVJ# zXwC1s4|9CVw|3{J&73ou5qg+kZ?pT+=$(2-J>N(7P<3_suKJ9MQPz&B4`%qEPm?6O> zFYQ6p!tevD>FL2c%NR)1$>r;?`mJ!g zM`MI#T;FIxIG)F*&VG6HoaI>=5nvvOjI7A$I}O5~V0`bw7%fXk{KXP?#g7F&lppYWFNgMT`~|zGrc>NOCQVFN4fo!?dPV^GtR9CHDeKPV_g# zMFd9VT;j-|>MmK{1kMJ`k~y1}!$!mEp$nYAp<5v#-|ZcT40czU(Wm&|n-G#Ic@fIoa0t% zvNCvOThy){7F*9vdnP#dX2H9%e-2-X4|HQxY?*lJfX5%V_iWny-tGRb)(y9R`jD{a z%NH7~Z|4au5QdPTF%>f8@xrBnRN)cLR#*IpW)w%%9EX2@9HQyuRy@sHn9}mzc4=#z z2~o3(v{chcKsBo=K6PTt)aF#)fjTk8PrH=ashdwgt4uOAR!)r6wG#@`POi>g`$QX3 zC%tBJppEM!v_CVVsR+=RwLm}~w;Z}!d`BzJ6ys-B6W;^ILRN#NKU7=|4RpxIgvJ?)Df?#p>F;TQ^q8 zYUwVPdjQmXs>5osIeY=&yI>;rCp+WE+tT^K@GkgdJ`#Mw+2L64q`Zpg@sq~^WK!qK zlA5#78Y!C<5-kW0T8bUeSpdwl69RLiyc-U^?!A*8I`cC?awD}M#+fZmVoR!UE!6fp z-S3(|x|_^b@npBPb`TR^n>9-}W5i;V@}2q@1jP?Q_?>>why@m6PaT1nJwtZ}X#6TT zBRut<3@X@&Glo3j9OX{^njuf!4BZTfsnZ@aXphnjx>*K@sJCcegFZAUQyn4B?){(! zo2Y4;xS4V`8b@fHnj?$9Vb@6IXGTEMFK$FCcI{u|O^vzx^^8dPOM)yg$FBFTG+;%- z6O$K->)7Nq6RJ`98ggLfJ?SW$9GG1l_r%Lami<3NRi=Eu~@gn<;2EA|pFfc3HWei^lV24}ZZzT=`25tHLmrY0RvUhvJGVfATOzH`lNmGL4ZUKM;S?p z7l{L5COF~%bD;uhcb-q3kWUbcz1U_I6>#G}O1(&UuQoj+v3#n|Z=X_-YWY;YK7zp2 zPq`reC50>?rw}54Hi>DUawNbNR{-vXp>>Zw0r@7u1r|`9=kr^qq=Sy2@!eqM5(k|# z5MCTUaiPYYqkRgfH{g4ql|ANS--Ptwp@Lp_)+RPZaK{GJF*3B zc(8qMam3^`3oNG{2#9I5X#R(zlU1j`E2*DT)*qGIm%1gxvDEltS5x|%y1^5SBVANuUxC;0uC^TXSUH7{CnEx1X^U*-MW z@h8?>a%Xe>Bp?Yhf9St*<`oD+&lWl&(l{Ymh>P3)tWgpr zar1ccDWD?$BAVdn}zs%=P*@q>dVO&XJehVQI z4RG}l#gVT?61e^xm1vAFiLMtQ!EyA}iX*`lF^|^phb(St(2Aj;6$G2COn$de z=0mq;kO^A(D2tMX)8|b$d|D22km`uQ<(mL<1OO!Hp(W=ziU?Q44-X8aY;;*ev=t@$ zS&kL03NXX9aEm7K%vaDZ(*1y8mt;jDTTfQ6L~p+k_T>*FrC)Da^tk?OHle^m#zh~$Z zAO_3v66GfYxPe9-FO9jDbklXd;7=kaL%7w>*9PkE5IqYavlkU-z@p!e7%ht}6K zB({T=6P4HNpxKSCZ!cNrbM{sinQ%_dt4pw)R7RL>Wx`AHim>`@a-aowRsF1d z?od4cvD8vB3Abk(`9mu2la{5q4{7CEeV$<`Xgk@Ix`)glSo=9ZhdJtMXW*nyk*#a0 zYCxg6J6x+@0`ePq6$Q|S9Ry%((bxEMvg=?5x5{C!d4uFU*jO?+ipZQxU=D)}utt}L z$W?Ez9d4~P9KJkjOYtrdOeS7P-wBJf8 zoM|hM8D%9ecXwD$UmIqddXIB#+fP&TXM7#R>|ewOc|N|Sg^#fnh;P>MJ-kmUt|==| zc$Np9P1ad%xw7zao-j+LJf676Qpu(Bf1YrptzdWOqmtA@T}bu)yn;|0O5y!H|H7r@ zqz9cK(Ba?Sm_(x|G_I|;KGJ$wX|1W&(QN`o-rG7{!P@U;;Y=D4*}1pa{BvR$kveCk zv|wTOn()=M^oA2pu3$o~VUrY~>Wb$<)j$nFWeUh#s8t0XVzSLEn|M)>OfU&H)4*bi zFIWKIPeWhKYgcY*0c0UbfS@=GGr_!Rm)}oZ%q?#pNW1ql_Af&P^~>`r9Wq_>?go#521_dFqX|Yv&0K?? zN(So}IXVp4>}Ap5ACiuW-LQl*Q1`dWC5-?J-zmz3;+AGcPru-^r@E7sdH zfsxgSM)CU@@+dS44(X0U2eA5GG1-=6Y_xO3VVi5(ROGBn1>-dOUYc&+PpWv2z9*fQ zrpKIMow|HU-SOML7VD2kO3PkbT^zn$ zW|1h3Oe_PmX4!D0G%|S@Rae~{sj5~>BBC}HSHd_0W&oNob!PY;L-+weo=r0>K5Yez zvvB3o#kKl{C)Ahoa(uKBM}__k=|1g`L&?QOZ;GVcsb`p#YBgivo4w2Iiy9L zhDF^+jE`}8D~;OEyYyC@DWaF@iz5_B<2k8(jeNlJgE>^yjcgo?RK4y(F zy`<!!~8BYB0+TVHgiyJZ*aCfWkiuNK3Q$4lz-+8#TB<6B5ru*%SuVbM5nkYU6k zdBkyJk5#(k#!>oBebGJoO}G#69`JZYze(Ki8v6M78pmYi1_<nULc#o!eo+WXAGwd85Vp>@eUze=p!!@zqQ%UYeC^|sQtGZ=}9f4I%qS#5veds^DgFVhXb?)b(l53RhtrgFbg@u7Fci7CHs z5ULi89{TR>d*R!_`E_)c^>14LdGRS;I%wFRA;?d5{zGErIQ!4Q=~p*cFR(5?r~2|J zo&Buh?@GH1xbs%o_vqx0lBaFX5989S|Jb}eFzW01CmDZhJ*&0`o{5h<>+XFcx$^0u zkTcIx7z4AhTMF%2$c>}A<4GhkOMn2o=^3=-9Y;wi{GXK$> zRyJh=+^$G?wS~-Qlf$Uw%dY(y2b{vFP8oUR3@SN{?UZ5HpJCU?r%s5$9UvKpBdQyZS2t%Q=6rETI$ z(W@k<`{2x5IlxB6_8=IcsU(7754!JC7+{GaA+uhy$is;E^Z0RBjY?Mc8^VUqhB%2K zYUtk9L0k!&?9aC#1#Imzijp_E;C0F-q8-+s4{E?zvqWu=!Vn<9g@zJ^&TLIZ_+r;6-{c+HHu!iPP^+m`_s@B>eH3+B< z$C((55TaqMYRDg{J!U<2ia2CO_X^ETMFMFL{!#ae?iKmHcOy6sHJ4n1WeXdmVG+a+ z%b=YXyEaHBiWhHu-Ti1HyuyEsGQOl3UPU!XB+;_#Q$*jB&iwrP_SgJunlk92wP0Ok z;)++T57yYO?>v1TOa&^?3C@){s>*ZHloGtk1Yce}^ig#%x2L2gMl~B>u0(gKcI`fw z!#{Y?iFrkd-sF8O63HJs^A7G)L|(TvDweKKFI=orXo;wR-F= zz{b|MB0}p8SKm3Mb^}Ug(2$s5IOZTPzf?@DS-|6Vlb z^Tg-%uU9y@EZULlhWlL{AH6in`}IFBcG9|kJ9Fac#hfp;JGR2i1Tv4MqpIDe?o&Oc z?%lfF<8qFzx4WDpcV9@oE1v*Ao^Dg*lbmBpEm_vsT4n2PiaRGe9+T~!Mv^+;?bcn# z>D*G{t!hin!)118$c*{6)P=e#uC~-%U22}GcerZzLR;^D`O|G>XJTOf+Ze4!-sFy4 z^StGq2S;(;i`=^xs%FD>o@(Z+X7_u?7AoU(y&jnom#TI(t7hPENevjzQg|qfL!@Lr z#zlht*D{Ba2)tpy6ov;&&t=ltp+*UUy${1Odc(wr!r$8Om^CldA|AFwnem}U`+|Gp z1u>n$4~zkw%Tv-84TRpsFB8aQ>V514c7l82SUN|rnpkMT;GCt2Xc7TOz+sErjF{3o z9rfi|>E}F>`5y8?x54Bmg`zzytQ=(dC0nRds^oAE#`7Pt{L(20u?Jmmu`DVorfh6k;ko-Zn6Vi zt$^e1sqce`RM+shETOCQn=PP5NBRNXo%wu@e{}v;6-f{)uDdDd8;a~64hhvf>4hxC zVyI+1n^{;(;^CoL16X(0>*4TkHI5X6q1&-nDI=gI^a9_dpN)Ms+S~NGOVVrK2{MsUTfaUv8I3wD2&} zQUK1qRwYUNaWIX#1grsJkS&d#t+O#n(0ilz#^k1i;2xIb{F%4s|GMaTFZ^N0_(F-} zzUKLzKPOI}Ui<6QqL-Kb8yYYDwzclv-`~l%u4XRki@qM@GW};!`<5%gr=J^y4qL6_ z9SdB4eU3%oy(-Nowki2{D?_cwyDF*7U5ii8Pvst2u~Z&n*}8)Hnf5k6Jz4p~cRlvl zK@+NJ2H_O7$%vhPYu&k+Qg1C6Hva(AB6z?z*TYj&jgIw5)+Bn*(-!)&Epl60ivHMs zE~cu`VzNb)-B)e$m?IOlVjj-ZFtwoGE1Fp!TtAmi(5i{szFEKgJ*-rC{#G|83C_Ts zVDd|3X<7My2CS{wUG+q-PKea3mEsmfnsr7h8~-pZtXhzfFIMTUHIQ{?S0C@^aknU& z^Ofud_Vk=OcjopIx+uolcq&K-`-sBXU>6^#)9SN2L{ei#9+J%Q|atl4vxQ~BRuRC)j>0EqEbc9ap=rxy3>$2?$gky4f%yzgIS=1hbH zDx5jmWnK6XX<_?NC{{Up^%k}fqeUJ!uOYiP%W)U02Ol@Dth;xP6}EuXwhY1uWWm}6 zdFjJqyMS{oLm(aRLe^`n%QUB+P%~cAe7~KQTp|+PIo| z*(CD?dFNU_f8#l1lL=OMD?FRyCY@VeUZC70TahXA*3y!Y!lWtU5ZMATwwPnoSboxO z!MjF~K#Cjb4%w$IKB{M*GWdh*U7{t!cx%x*WE2F|JeGoeQ7y!CiA*~|i>L-uHqZif z$|od=W-Q!tR*O_+hsmU_R_A02Iw@(CWQ?PZGKOwE1+8q%HO{qIy1LJ`U~SBLN($tJ zq^SGqq*rA^kU1ILHuxS!F*DgAGB80hp7Mb3)FlQ;u7j@yLk(pI-$ol^BE#mY4j%$p zU_c3dv!iOW8s#FyxDqKB$FA|SQsn89zc* zi(cFas&#$2+v9ha+i@eW*MB|jr0lPkn82SqeAGMhVshO1yzM78`P8g-W2yISH?m24 zTeI#K`TE-Evl{L;7u(8qK3v9Z^*vC0XIVq;RmbFYL7UBobE>|n7_2#SXW(+d_#+Xv z&5ogKaXj|8EUj6FiK%9^;b=y4v106P0mL0bf_M2w36p4c$?`WivurAsw)LV92T`RJ zlyrL>6Hxgfa=0x!LMMC#(r6SEV<7P#<{>Ct#PT59+sWfjP0bs@94uZ}QXTF3@fb z=KHtC@~lEiFBxC>(s2?z9$~)f-E?QlO~H;|Up>|AaBO;O;9Ls(mw*1s%PMK@)xOpx zKIbKomZtA5H)5XL7EkAE?y&9U9lSTbG3-gyeD9{YC$lxY=|SEd3oR49o617IO}}Je zScnr(rB`#0;L}Y9Az7+_0r|pwJkZi+;8gt(bKJX~=1_V-dV|fx5$QN{Xa0gzDjzH0 zFldCNlt{quNYz-N`-?>As)aN&0af|o?;dFpqspS)fYw3Gk@q(i@?d?+K}f|lVE1$n zU?qUtLCg0yOCA~J#}4Ub4NO3OyLG@r3A}#r5%LLOJ*Cw5a@iy2Pg0RXrD_M@dVYIw z6T1UaF;hW+s3FyK#@`dMk(DoXAAy1};Gt9#!As(%q(YVf{D(kf=-Q28S4WI0pD$*ZZ{G`WR`C>8K_Y$7dFo!Lx;7X)h=<2hN$oDix& z%D@LQa3n_Za=S6~N{1Rw*NHT~#{aXca=T*aW@+ln>g^}iskQW|B4zzwokRb2y>UHn z!8^t6JI`Ku?&#$ze>W|-UUX8!SZ6e>vFA$VcmKpU4o=qoQ~8ufuI<%9^Ub&VUgy?$ z@XsqW=ly;?O8u^c@Y~}^*B@jcj~>x$6&*oz``!y@(N()Y#YhqEeKQeGU7d}2C>`X# z%NG`oz83)4jL=Nzz1EQ#s>;{3JFNUT*!}VHi`tqTi}?gyCATupsOUi9lBr~CxYW@vAz7{JbQc@9uppcml)}m0pN!E*}r-3iJS?ioK z>nV*Q5PBotG$7N?Vmb3bj}O2!vmJ&Upo?RvB*s4VQN{ugU@}EQQ|iVfO`r%-CMj7>1h`OC*a_N9A)5_&LIIox z7a>v+wh;}u0Maow2tJcAL?XN&hKQSVcQG~u1?OxkD)mG~pj;s-i6o>lrE-Irs}d1o z(to`cc%=KN2!jLXGJ_F-*Wv+Z>%y3=t^}V9-bO%FN7&A627|*CozOosd2^df>j=Jx)|4zZ%|G`dVEjS$~xIr$4Bs-`Gwn!f-aMM3w_N)8P}U-HtIq1{j=bdBWC7 zf5D!DLP|V^g*Zph<_9BQbU=F-fjX0Seu0O-vxiTO*alW`K{<)p!M`5TTDeNxD*Oy9 zI7go1Brv_-urvU$IX1*wSTJ=aKf?urJkn|tnnmkl(b&4JLJD%yx%C^|$?%d@zje*5 z59V22LZJTYE;0C|EbYuS6Ttd#me=sMQs3HZdSmlD8b-AU8&NJe2vr-;pXZc4EL|^J z4;KKk4cMZA47CW*8X)WbG1JDvV(~*zahl5s`IiWRlDZA;I>XyQ0@@J;I@Mm=N1>2J zw-~GFS~aQzKiZ1-4e#qlb*4A$T$Qsbxeu0{oeECO5OC%~Qw~ek(4?d29hx=v4rm$y zx-(r&(grE2E1-5tgDUP<+(+LRRDcR=ryo(!8zh2}3h;9tg(|vM7hBy1;og+mZ7NDm zQ@|qc7=Tu$m0l?QN?Me(2o`gRd&Y8PXJ9EoM>%Oz zy0E6J`#K3^Lki*{K(Zw7pQcVaD1hp63h3b?Sp6NiKbC_7;LD⪻)P2+Z%Fm%9<)# zMN!o2aX;^Wm|xr2nuU3>jYW_?3nX8Ua-j%?#3}=fID?NxIiWr@gpaAQ_ye_$m@7Y= zvJy0)j{@TR=Sy&Wd>|Apfv>Wapx0Nxp8pg|)N3U^2>eRnTY+!&nBZdEruaTBU*p6SVWZXt!qrjt7swDGM&j!A^sw|h^+;I|EgR|kjn}HoAR&g zf6TuKvRmc4VKhY@)))$obuGtlLUBpybjVwW5`w(!CR8CT@aS4G-~mO{C z1bCSS^X=@)2*!d`)kxsBLq}EuTssiVV~~_Eit_QWc6INi)LE;Oh~^8@sFf?SKEzi> zr3W|TmKR~o!hBR!-OEWtVg(B**i;e@NO!1~9_S^6q*VMDNX0=F^)pO7*x68lVE9-m zs=TKl-UB!UkEtP7Ys`pJI$`e!l8n`r&IZN4h&^Yew2(%K zTq)HrHOH&{I0d+6`HU_e64G!aL<+imqG~dSh`23An@cqjFrh`Dn@0pOpsh#Jxk4fW zV*CjZ#N!zVn@FWXv&|Ke#L#xK*+MEbz(fRC%>qcuLIfbrNkXWq>S}VPf`pR7li@y)z-bbL$71|{)XCyJ~k`RIV3|2OSBmxSls(LVu0Sr=R3xj(EIPe}MNtURB zDrli%gr%5k(@^JT{*^19I{2RfuRi}bnuJ#zEig)f z@IU;93>^jPP7NCAegzD1`$1(7y7y=xkVO}q0oerP(L>EiIlt-*%Ldj8mxz|@pwuyC zsOb&E=Qwv@pp3S?(6X;3fLW7sz9^T^9TX(U@Afdd@5;q$(5tNrb3=>}8u+KD3pp(uJPa&o1IU0Tf?<&?xg=7u zM-8O9UYH;5UEM66ZMIT9PLcUoeXYD_8>zmes&CB$zp2tpF#IvDyXcYV;lAD@d9LU2DDC5et9|`Fj}LoxPGz3;nDk$M zNj(g80-Esu?o7H|Eib9=v?{A90ei~o`>=eMLN59CM|RIK zwf*_Pu)aaGis<*rKa?!s(Wj5wcKGb?ZTH^@qH)DZXTHL#+ZSBx*AEqFD5~y1&Zi%q zUOw>lHhwDpS&pLeXK+~T!;#i-eXM#e~98&vxCgFcgEZRQC$NQ%;I#h0W5# z_<1u#8!Xv7a3hq8H>r?RMMxvR7;jG50Z{B4Jj`Dhq4Euk09e;j^8q3yWd}nQ>=gjW zEm6LqELMXdF~U#} zrlS2l%wc#KF`R`OE(jRV-U|?c7;PhQd=}1C=E!8ZyV7SehbRW2zM9WGd zu}PA2q5%WYzeFZXPM~3Rvv4O#=8;5@j1Wu`X##_a$`!g3UEtt6A|f@$0G`{3u8zp5 zSaYgCPBbK4GjLBo%K%ASTGB$sva{f9lC+TKa27mF4(RsE7po0)nUVULhmV2}3JN}$ zRv-1rag&`QtB)pt&B@?wcp03Y2#1LA;C^z5Itq3vz)KiS1oITQYVtlEG#k!Ew-Fx+pIS5H@Oo z#W>*K%c7*is24^%W)*Olva*=zk!v;8cpDm7G;t1Pmq5}xrj~OVKpklNEMpm?{tE$k z{R=&ypP9>9b%xW}hvTEY20BJVp6!iVj6D_!!y?*TM=$SUGqkk^tRxI^V4S2}rvQ#5 zWMu_SixN?ug{xu|_$&3$PR|x?hMamh*B+AW!LQmC#=X*T(jl zXQYaBvR<)_e9ms6i^8`-d2cWha-gOODSj|0o~!*}{R6&9>jK;c{>DxyGyskC&V`uI zzj|-&S}5AmAQbPH1*8$^uUgMTxoN_!r^y7lCThh-NfHe~OlG6Ki^=e6e&&9)n6# zp~Gz?gK`Kh61WjyXajF}t`A1fFrB#zKpZed0-q!>NNPlT-gq)Vv*-ZjLAf?=57FKk z#ws)T70_1!VFfMJGcd^b5j6!RuLI-hdg$L_q6G6QEqcY;(ec97IsV^wI*s3)RTZS@ zbS< zPq*nW%QvV$*>Pdpn!Vh)CFPIzy#D|nm~{x(VgCs^4>(ruU4?o-+F-~hin{mrs-yMU zP2%(B?RsP8X6HP@^{rSEAK!6V_|CF=E2y{y#~wv?dE*1?!~{{TwXTPQTcNt410)IYcgR}a^)SB{s%=I+Apk49pj``<2*=+5^rJ@!vyu6w z+q{-<;25`7xbWr|-~#b{Nqsa-n}u2EU@ITSU639Hdb5FTUaR?ma0~-=mkeJx9WCFx z)?)Bq*rvocP;-f3oDqKLV1qTiw)6vO?~lPnBM@!=sesqFv^1Q)rM$h5PD-+LBpU4k4Sha z+!Pi-vax_W3C3bj-REuq=#3US@lhz?B1^z47v?{wu(!}P{WGX)&W}R#KR6V5+*I)` zXsXq`E^#_F&WAMQ)5@y-;&{fxQmaANaH=+9@0p+EwRLwU#^jmbT#J&_PB`rKvhY1! z=F>*C0CPg0uBFh}T>N>wZEdOMshvCj4`N{uk%H=aA=@TLqR~((cwgzwHxAR(f855& z%ue<3UPm>M0;ijbX=a4AQB;PLeaPj=W4|!g!Vv8vmBF!{ZES#TLB|OlofrfDLg>%n z4S2S(Vw8w1+2Opt9|hba#ib0@nXVWG?n8&numJ}Q+z51y+F~B`xB_rPqETsUpo0Y) z4CEzPAk`9r*<+Va^Oc3>0LWh7tfY3!N;$NG1_2|CaE`PPIWRcGk8QHUlA(N^H~(8- z2AFNEBtKb-y-Nmux6m9I-@%wMV0@ zedl4Bvyj_mo7FThzua@lWn0b5$@b-Bb~)MM!<-RXA{0KPQM(CHM=k{$vgLr)4VSDUthQC=OIrk&%ov^(L=?oSdIc`^&n z7I`z*u+OfP7k-^ap}kH^J6|#e+g$bnh&Mj;lzwG|%U;3WjSahaCxci_j_7yKSpfxG; zpK;l6q;?4$*?T%xpg`-d!g(Re=MUIn&@6e2ShOy7I5OVQdEd(A{wE2%=17)OfttfT z*6c`w$XU~T*Y0oK-?G0=Nn(S-le*tD-Cxvs-yuhVTKK}{tk3`t90Pe;5}f#Es`lHW z2A84R_b=VQo&BauN%tlCwr`xjungdGXQp|v;QBj$d+HzO+M$M~`!15i{L_&jPLI^R zyEpWX!woCcANZTg5PWFE$o!JX-3Ln|0ajo8#mc$%jq|?_wZg~9yl3^LOKHY#I5uFK zznJ8f)o!Q~u=+zvWc=3aFE+m%_dWG)1hIR!HOiw%rgYZuM6ZM8mbAs>e?&z)!V;5_ z^}hx%3(AbQtY7oW_)ZBY`G+=c-r%C+q#)(qM$_bwdC0PMH~TNw@mBScJ#*f1lBZ)F zrajAp@4x!C1b6G=UA5IqSQL!M;eV#OHL?Tw7pIJ0SZIX~uBa!5VzclLWfJZ{jNW;g zaMGt;E`|w^UzFzqU+pZls)ts|mgT`~yTEMPFEj4~gJ}tXyUWlthg`{gIv;*;ExbCI zC<9Gw4MCcP;msdgC7gf${0y2GMpNb;F!dJI&aZP>&tegqo&rC5Wr7VM@EKns(UTgZ z<(uzdNrmzA!b9|iy#`cukGJn7d_F{=h&Q3_jFJ$WND{rredAYhA@`L6Q&)%}$xwp0 zAK99%z3ZX&CEoSmOf>#T?xGh0&MpAN#{dR@_(P3K>2RV+vS%S{Oa4b%UTAfw+yd)Jbf>d9kaQsAM70*Ns{o z5N9J*5q5Q93s&XM71$2}0o{6m;0An05NgFXNo0=RjWA9~M&DP-e|FNgm3FU+ES|KN zDbA|r|5~>5v^eODLt>=d?$KG_D>`!fJxPbk241UGHe9Vr*!bYp8}C!xS4`t3L7BZ) z{wdytW|ip++GUrAPe^=b;mW2c9k1ngg{^Nj*mN?~*^@`C6Ed=ugCB?J~9 z0@mO<34@p)prHnuT(RN%dLGzbb_u(B(ejOE6V(u8o8IhMUUgTFpL;K{Xq39ON$WOh zG~-xOh_0ZlL5T0-tXY7KmxTawz=}m+ZE&l951l&nu?+)jhk>8YDK!!p?F6b^8N?Bz z0RRm;$C-{Z4~;j&=zASXkmy=9;0E0~O7?(na$T;t)3MUxQf6k{hxw>%(rNTk`O0~f zq^k0jant3S%RSCPgRtrhB;Y|8tN=gWD zEvSufA5HW4T_?tY?ARz=J0eBxZnMCub8U3_d=(!ISCh|vrS3%3m!%4O^ah$^f@>K& ziTcT_xT$w!sqj2ZEs2@LoHBvjC7hfcldU$EJzHHwl1-wBtmNOOOfDxm0VjM9%*BG_ zi>81B{!CU3gL&!ScZl0i%B-9XtDUZ6*(7@si@vOXho~UA8@YK5t9vH}?ng)+r}Y4^e0Z|sas2s` zobvAHzwf@WaN)^VFvb{qOxKM&QhK2>?AX?nWYqzczr|YinKoQOsIFa<=Jqk$Kk_&x z{xqRLT)w2XQ(~paH78e8{E>cfnT=}F)04D|u!6*g=5E(RCJ4C}kbGAk z)Rt>QD!4}wryi}p0jv-eJe2QSgREeU2$DKJWj7uuno^;}?MD-Aat@`vsqYcWpDdbtdVATj} zEmXp}Xyg-`@Z~_G1Wf6QdURTxI588%?1(xB6LM4(iDuQq_?_uC5DpD_{f3>^b|+Z8 zz^(#Ev&_Dd+VkhIbQT@_8HDz4LM<)@1njCGh>qZ2kK&{>H44LT^I6e+)KC{AfvI=u zCecO>Xh6|y+u_RNS3`a7He9wD9?9=CYwF>Jj8#taihcr!s7@rgH$EVDwKjSy>}S~b5xxvD--6?+b{-199f{c4W8(JxguCqK!Q zKiP28-|s`wx*A^1lV9tuB>MIEuH9GheR*}_6Dz;1ud1KepW9kND|9nbPPX=|wLEp< z${IgtoTr}vtiR?T-qxDDnkybFc^Z-{p4ESDx?mZ;{E4>J`mKjIf*VfCx^yHqJ?05J zc@C{+msAyDCrdR+IN681wS{ig$%x;dzH5~*HCH@qpZrl8)0|w5ezwfw7KBE)VCnBQ zEZF+zrkef!ekp>jooD`N@MkGKG2!{O2LDy_MB5})*Z0KEZF%|M<9ZK&c^i8DUEFnx z?{tHIE>w?tK7Qx=d9l^~o%-3KyOHoW2XoEA)m&mZJ-T|3#%S=d+cEI~dHMIeu`f3# zJ|s>rdv<8fy}ab*?>4>6Q@weuX(;Wk^|Z^lZR4T6#cy#-77t<`kaz2_c8tuHQEN4> z;mdwzoIZF&SHR!ZLCSKAJE~1$a?NPwL7ttpL7%;j@@Ls;{kb~xuJt?O~RbN11WgN;fy{Qu#X?rtlQ|nOx)nO^u?rbl9z9`JLJS_TR$?enq z&*A_8TC_*!&jv?cXaD+;PtjSwcTL=UJ~xcB^3v1xo6mNAo*-S_8QpcI;lwa-Jo|2* z*m*UgyI=9E{?hC3fAlY|Khf7ew*k&LeKS$<984_~imO3?-Rm!26!!fOk>OwWKVGT7 zG~Cz!d{N~1Psflcgo4ofDdR!sUk?AU@3gHuh(BR9x8Cu$7dML+MMht^Rqq(D47|My zR~oKHblyC;C-KtAy&xW-N`K9K!eVINdrs{>(Z8p3)gH&)_ow?e+Wm3K(@VejpP=!e z$vsn=dpCVisDAVn?EIs1@vk$VejSVx-BQH7_dWQd@)urj*r6o!**tDXx}(LHo2DOk zs%0)(b46R(>)pji{%u+N)oyIxNMQBAulK8DBHz`<@j>$+i zHae_kH&pEVD6OvW{PnbE30|8@r<#zmbB(e$qU9UL8X}O9J=P@#P1-S_Ka!HVb%{%{ z_U{UrHS55`(hITt&1OFUBi%KQsgVoBD%7c3(#d??{SR)(KQYPq|X zk6CLOBDMCvaK+!-fR;>H{^Vrkm58e9B+=H>)hDkdt7Da@bI6uOYilB)7S&V&=j)I1 z^;U!nN?0bsq4;l25qfL+F9H(1?>Zn4Jju5|2bhry^Mkft(q!got z96^>MnATVyUZ%XsU)y!EdZ^JaQ(UQS=Qop9C0c-BJUFXMOESgRE?J>!Br!;iT9?l; zZq&>R^w*ZG7A?{qs)Za)f9;{FIm}ZB&|J;++J`JNOZ*2JwW@Qq2kzSvJr1dle_tqW zNMgKVPBHVY4!0$I5N4cLosN`T-JpbCNW$6*{aAfgr-FiE)Qf< z_cSxuHq8hmMXJgNr~-S8r;dk3w4YOj3UglYYKA8w)SHL%#3MB!ud*Q0G%E0>6S2}) z!8_DEahS;vM~4|clE3AMvPR7QWfyh5b@^D*vc(?h%usMoQ04Fh5%{H23m1GFhD!=z zENrt9T@rXx#2ia3W>*J6dQR44+Kk=4CST|}DNs7`U^lc0`#x_<$>p55PTM2P4XNRd z?LRTLKTkot9<%F$?B3FXVeZ8<&$i;9FAD$us{i?%tNt%;;zPPV!Kk&$+x@PUDudm8 zf@-MvuxErJ326b7RY|xDoweg}e%d!`U~PTK+&8MT?2hR_{_^;}o$DjrHPv6dJ=T*{ zl#Fj!a!069Vqti3vYW@6ByYxpa`Ii^%gXL3B|`8yX5BRT5VLt`_H3i{rOC{B3un)s zZ7A$hX`u};3}#`(v`Z9nEWxUBK}*4iVIiL9s(Z55<>-YfF|mil12j!u$=g{CyBhqRWDqZ}n+#(anj zH6$-J{VwA!o@3#jO&0d?wL7INOuBuK8nm$Xd;5vzNo$p|G13>(1UssV@I~i~;_M=1 zxojRr#;G}Kj!3BjQ>^FW@_hod{)XDDrO;9Sq+Z;0XXt;eqe^rru`?p%N~qrDOWi#iZT+Vm^geag z1o;QtD(*Xe~ z@ZM&-Y<1;M86NjDqljHuxKEQwW00zjqUGyr{{8}!zbF3GzvGkHB>CZ&&V;@I!mIvb zhe&osol(eC>9Xg>pV;*gok0;(w{!O=oLV`ri~eL$L{h+S4td1NjkdxJH{L%dl$<=< zZvr3ad-R{Db~{G2M5g2C4v|6g3GuXrP@3ZY)Yn~I`zH#%?CLZHgO#n6 z%*GqNZSmEq`Ts#<{ZH%3nXO6obz!a&n}{(z%)-T{780uE(R5@JHp^Qem|L+=*e+-! zoIlFRh~kv8@Jt7_uM8XpBBzCBI3qrVVTN4KVoOiO>j(3D7!;96BxGPX(#BMVZkmNb z>aL>mf@`!ewv>(2sIXUSYbdI46%PcbK%N6thrs=2mSlnzHu{6))mI|)la1}&eKovq zdWGo-QTXS=l@G(F354u$jchgnSPHX?mxTBf1gG1;S_CMXSb}S;@S?ELvEw`nEY}QI zvRlH~X!&;A|N6xM{n3B;qWlKC<3JGwfuMvH;u{e<#)?)_jT!1DdQ%S(fOi49`_@lsc}vc!QjQ;4 zS;n;6WoXMOWpChl!-^YBQ<(v&#(dqAFqwsEt?+C{#!{mKw0z}u6$CwGf8~Gpgc0C` z0n1SqEIc9G&h;?E!G%m=uqcHrGm&xBvLs=*x2?r0@sjKwO+^^YNvA!v+g12k2zS9@rF7nU?<65nCT5kC0%BWPmbUyJMDYvCtD>cHfxq|a=7 zwHA3IBcqOew+;WsF7J$HtaG8a+5vBS2hINne|8HD=kOGRjS+KL{avO!jQJcvAP-|* zD8X6hN-*YX?tOwPtq9COwm<_vf)y?u!7ypT?h8}hks1LFLl7!|BXGX3h}>ga8%tD5 zMQI5-g*phAuTzM}#0oa=n65r8BM6f4lx&)^6`s))xYgopK4DmJ43AChLUJ4)X4h^~ zObhb%iZM*1qJwvyTjHe{@h`{SJ;Oq7R%R;dcB;Y0Yl{y^l?8!g`+2UTT*Z~Kq^r~XdtXJM%{l3jwSjIh*~a>TOS^~Gps z=`nm(7a|5@R9z*lu)^z}JkmEj)_e2*`TC}HQ(LkNy?;ISdk_$qeL4j!>1h<;KZ{A8PRSFIoD!n9IhIedRj`vt z4jSwvJ4}!jjohLk74vCcSWLQ^@^P$Ee&!)T?>U|donSACu3bAWbpriw1^1kXnAC1h z>B8&ghQUMqoV#u=`Y==!5%W&Eb0HK+Cjq3wl|@6#ci7Pw;*dY*Z@_&iEt#Z(FR zM*xw>O)MZWooU=$*MW9Jh2rZCC*tT!{Su$p8&0W{ufN{ZLSE@NA96AlAD(>KHu*A> zv|}hq|G_m2QmF?)fAX~!>7{<&H7C!B9f^luA6B#ty!yYF?|-+E{-5Wm$&PG1C;l)h zjg4>ZWw3CJg^;ym22HqW(OKkufT^I=B7~hC!nWX%iyBP|Ew|G#YbAXW{ruDs{Xgam zzfF~!n-}I0^;6s2)rs!EuUh_Q(RRc#q^!d7 z$QF5prDce?Vj-4?2oQo0D^(WYYY_=nz%*Zo4NXc3t#HQj!K2CwGcLi+LKJqs>$J7) zxJy0Z*E%hd>ZC}v~!{8seu^3}4^LeJ7-kqgTfWRevHDw#dmFZz0VGJCp+ZPC%Z z=!lVb>KgnEhtSZBbe6v8@B?H^>Zn1>4ywMW%(FNB1vT0cr(cFO5o8*x>nD3kLxZpB zF?SH1^zpDAG3wz%C5uv)4}=M-*p@^sGA0FE8thZbwxp7iuxDviYzr#c&4}*~&y(Gn z?1Es%7 z_<{t3;J$V~uE)>OyHBSJ`!%4RXIt3xF2AqxRaRScF(JjTCrR-3BGUTN;8QqkhxOzo zKv=%@>wy>F!X}hlT0hb*bLac@J1xF%rPJN-PwqKm1=sn(j}Pu=hNZ{zL7T0b4GQgr zs$G}|B57>N-hj5)*TYGfu!_rzElDh3Sv6jIR4>1&ik7d$b{{Z}_sp;Vclkai|7rDm z=ch%{<2PNCD1Gj?fHwz}x`E(BR5~V$AdHDCb>|aV3q@HTLRlCJjHOdJtzv0jr;F=ElJzPQfL7>Nm@{W9<9O-tZcZ&flhA zu3vi3zS1&IztWARy&&IiUD3M34+J~9tD9XuM0|Q)?)+hEN%uFG54%U-<-RD7Q#oYz2~st21~asvS=i-s4Y?#?yg0h z-QOIt-#By+H8{_lfqVH`%o=cvb2h$5Y7j3f4P8WZP$UP8Wi1jF_Y6e5NpOFSF2xN5 zH!*e&+{aOreGP z&9-l3bd5E1kRk&1r6EVIP~xO8xRqm7YPSDsTheyMyFZczdkE`+=B2zfX)Trd55S0N%Uat0)Wyb z$1q!S^*SP8QoZM0h4mGPsW>xI=86+1{UbG<}SH~JiSP| z2)tlG2hYHQTT5F-+PU*jAx~L~9Qj)v=JnQlu3;9})0Zpm4oGrI3+iEj*i8lp|?h{KV6jr$Zk19Z*^P_8Md$0X&$qy56p@y{+Hf#UulUN(tmP4WE&!VyxP*Gw zc+LO=kE5KlSwXhp#!*UATDx5*r(M+^YPizQ8IjTQg?@8_%)`@O+Zx6tvl*lYJvRUHp4tEwX(j(aT(rtR7 ztzl)nmFDzNKVlNFSQ0BFc)Z@yW zHax3a;MNU0yIJCP_%qdI-+obK-wsWJr?enK$~Fr>kza6#&+6G~kTj-0XN%1nI^5r`Brc zKKaU537EmVYlk#smOep69YDo-I_aoFxug>W_>5@%u^S3d?8`s8eSGr2*WU?07Ou66 zeKCh!8^-Q7WGCFV=ac8NHqN)$!Dek_k!FX?-X-`!3y}&*m@bv~&$oU}H&*5g!!$hk z%+7}}=`dlY8`mKc=E>X4La|Q)9xD}xAi{8rTL=Oh;&AjK^;eAM%mMtBi|;K2+S(oU$b7Q z5R7qLX6Y7FhMqE>C?3SoxLOQ76+AJCi)ruWY8e=RA>AC*iLrYvx~V9=8*ls>x96tt z<>vvXxND3r#6@DXeg5LKfj~ueKau>qd|fQ`REX+aY437orW%f0TDoN(%|l64n#+L= zJ3N&O`0h+qb#5@cmofoP5L-CgDUnL0y<6BJREBsg17UDUxkef!_aq!Mmzzw>z#DT& zut9PcQ<<1t5)!5XiID2dG20I1*Ei$76*)HdKPJ)DPqu20#aN51KYEvoU_4 zlP<{`nhS()pyPbwUZFFq@_;f|eGMNE>Z4TMRn-{kr`C;M+LM+Yu+I^;hxIOOQ<%dfOwM zTAOTcdt*mmM7AGx3Qll=ij&!fKs}}}#cu0*wSN&IW$2bz{#799qNwyrBpU#%my!aH zFY|vT^#@t?eGA&W_*Bk#Yf*oH0W0&Qzv z_-hU?sc3ricIti27I@UHI(gs>JPI_wu;vU${rw}@_1l_bt^PCp1PlgXtRQZ$g>J^X0c|ayn#1V|H~`%`+AkCkzOX2-heN*d z?+lCfcL&W(5+nfd3He$dGIks7glzM(VDRuNWb%$;xMG4b#@>JmM#K76otdC0AS(n_ z6NASnC?Y@@3<^&L6-NyCNMZ}To&St4r#6yPG2XIwr< zQ_@n$#wuZ-)$Yci`Scnui>TsQayPw!-rHJKI^ z{bpvB3a8ZR2lD!@rq34@Hw9}aRaiP7H}x2f=Te1Sdu62ERMRKdOYIFa*Ncp8o~#va z)c{(YnOJ$_R6Abd3MQDt_tCzSEiSdkO-i_nhY$2_S@Sa)EnmgLS-+rtF5dC)@*VY2 z6(PMot3~*WNH0xQq|}1}k~8cOCriXU@s@eB<1L6uob{Y77KC`hmN{IvEu=XdF(0x! z=8-kI92RqLDKo$HBo?zlc>`vH5)^>~c+i+&d>y=mC>Hb2e&bHh)53A!)RRhIgYgm} zpQG41f*1`-LulGwf3E+vF7Y3p!teHzlXtEJ`Q4eW-f-ibUuWEhxDQSrVlqF(P6Mj; z&UE?KUF9q1PgjyIOjnX)Kg24gD=p{4AI0>{Zp)Y%rY+1w#=)J&M7%(pt5O)ET%A$n zS?D8HDpEDRp56DgFe&ASfI_|6J<#!KeYb_jf&W3jspE3-OfbrJ;4F}2jR$tkC@BDGGlpqXQ)X@jRaX=Q_T%CyeOj>>(u z=lA`-?mzeSy7!M8LYvK=VC}v3TF-hu?~nAoma^1AR;b#8dCsd*b{A*^wb$_SP=}wS zgCptiYi@CDgs8jfsBVoPl0Qy4q?}r&T9V43 z78<7=d^C#ch7Z(H4bC>xn{0lRGo5DpUP-4CY7$`~1E%S3AI zF|zLCneVy8J=Q%9sUH_XKmjKvxpXE~GwMs2#BF<(;sdvce!ev_!zm9MKB6}km5t3Y+@-A1YZ_rITg#5HYP&z! zr0dX;$JCE{LCst4ku%)bZY^uP;So1{(suKG>Irr~u#ub0@H0;<8r#0Tt!q=E-2bU< zeU#n3ZcxR@=WGu9cXqc=pi25ogKb>@Kw(Kuj>ieBEPnZLm8(a98EwToTNQ1#ZE*Y2 zSGH|2MrM@z+JQ`uQXhM+N~!1BxW@)n{^_%k+P1ba)&)rUep|j~7KJ(x7N?s%t$-qX z*cE2Q@LSjeK5fp8l7aNLh&jIGNilY_jtVs$Autbc6B_LK$R{zWrRe}^_9%(7jbRU@ zXQhk@V8KINm7?@p5*_x_tv%>VOnB34t1X{W!S~wgFjbOy_QBe6_R}~?AbimV+xZRs z593)IvS0x{u|EOS{3)O>rX7QX+I%LY*?~D5ALJ=Ogz%D{;c=GCBcN6ezWX)tK$lEYky^2sK#ghgWJ36$G$DQwf6bjq6cfI(&kQVf5-lW@0}qnRu_*i z2G1wDvTaeuTU(ROdza#Eb4}c-H5je8e~ztIEpX=<2r>&wUoFcuv_^LbMiO=TYy#hb z5NuQ=9*O++6suVyr8g^Ef`A8fIanGD%!DJ#uj)w1b5`c!rW9~&zd!{OHpdl*V_}NX zUNWDOQ(c)eT{1_ceD_=TsVI>5CU0MGOvw@SM~h@@awsJ-XA+q#LlYI+1%W1utrJ}p zR=9Q-t;H5^{ zCs=u&lm)CiLL?#BEUaBf#Lz2?vgd*gcrY*6jOTLMtDQ-{tJ&*J#LyJ1aIAG>ktL}m zmBk8jmNI-ENDjE{*bu=|8sGK)T$X%1v1&^qILuUeCDjrds++JD^g>n01?jDq8*%UYS3cbuZ`NFn^j?p360C`I#%P zd$u9yk4Nj5Kz`99>QYDm%=JGnm!E^$BlI`<&zdc*luPn6@-Jg^kG#B z<-?bVXSMR!f)M)p7dM=SRV|AReu-s1wWcRroL$X|jNAF`F^1sQwUxVm^_44~`v^Ht ze|QeXOrcPu-7Bm+6D7-64m@uYHA+9V)kdKjNSF}zNMc8fXvL9U)GsVWBDM-8pnT<7 z)(%ZLxK%Mmf!iQ(*FdyDS{{@&9)5&MFB5^g*7pUR#iC08B^*O^mh4}V@&Dvs{iTFK z?P8zNTZAh?)x?!o3zTJ33L1MJN{x9my`_4f1$GEV^p0qr2Ku37Z3hWO(j=-DT? z;)41JG{#CiH8zCDrHb8fOzdzbB2qpmQ7i^g7J~XA=mYX+fh1_HM~bX}e~yUkUqqMt zi?JXw3W{;-Ckfma_=_?7Gm{01a=Zb>(8bhBybd(TczwJilOQA;ipaJE0T00>iBN2U z;4Xbcs*=bT*13T`S%15J;?%xm{#!McW5qidwsPs@kEtyo8mY0}Tg0)vE}7O3P$?{? ze&#;?>zVB2sNNrlJ@Y}Wo1-94-8-t+zj-1@E?SITELz-ZBRg542M~VHmO{q5c<;oK zPg*w-7iT1Io$zIj^b$0vUHq^bWtglC|-vH)g;MYZQ@!Fr1$QXK7`xbkuKOZiScnRW&WdK?fVN5on zKluy|RrDVlqCsD(4B8hUEF5f}mj`}(I@z8Nj2=Kf`#>hL{z>C(7;~-xWDY{hA+AMe z4@7}MB7k=xkXGGtdT_`;2f8mpnU$q{0-_(&@ILwS zBr67a&6yHm1Zb7va3bXHep&UcOvB$m)|vA26i^gQALjmK)_3O`HZp(aR^BkLds5e* zyu-Vlq8}TQvzbboNgWMoN*&C2e&i2xUi2@;{;e-sUAK2-6245fy;RrB)3x0*{N>=I z+aaOhw5#F6ch3y5WwZX@H|b-I!WR7H5CGzdA@%-eI=a^3n5QzeN#8XODks**0IktW7E;a{Tl+?HSTH z1^Vjhg1wfIx9a{!2o9^b*MIA%%JZZOKiqGM8@B)PJ#P&N9CGo8R|9@(zDF{7ZDJBWUf!$u=FV7t_=O_O+JYwg9SQeO zH`(ujMBO9>;^fUIkqRdlUk&bSn3y}4_Bx<+Nz|6WznlU#wauO!i}(7L%P_8|o-oVW9j(Lap*@SR>f@jwjnT&8Ib!k*Yss&CDX9+RDH+KQ=s94Yq#=xXu-x=4X2z zS;_{L@%H6BL9o(|4}uT;2a|0zC=--1k9v)l8aygc0`)MOyZEUI3jZ;kfV@@IfxZhR2#mzZFs)DK*JE=q>n4@oE=G7*)`!~CW!TdK?44czarr5w1aDl}-|1`{>j zFJz2Hwqc>742Vfylr zBs3;1EvAOch0Tn^;?}KgtUW7UJz&vjOL|mc3o&VDw*U6_JvVrqyT`iqp$xY4Vn?>- zTW)Afby`ey%o$|;>X@2wIFPjMHKFj|nV5?;??YpbRL4}OwpmzMBjr20Ok3eIYS-@n z`b#34DqgM?pP<#RvPHV;ybF`__?w8P5bT_d`q_ZxHXBOl%|?%5EXX}hyS;*%pCyg# zbdN(G+K+_#75YMI`BoZ)rz53yurfQV*YGMSl*CmJAzYYKhfyXMrUCU9U7`{x&FatU zbdLrPtE@lNL+6K=+~cg~HwzJLs|0!z;R0cPkIir-cIVOA&ag7_L_M|;!|{OW5j-)l zkC)g`I4;9x!zOVsVu!U4HWW?2JlRws8(-|VoQ6xkBP#W@1#6x3vUFTX}a_iAB$eXjP2(p;62rvto%z9YS#9NI*^@4?WEKYC9eUfN?l zbanaAz^z)Y#f<-oo~(x{Eq;7)F_@0rpwaXkL9@G? zxo5@Sflz84E;2q3`9&h7kx#WBmA98~4HnVF@RUAoK!W}l_>Fi_GUZ75%I&R{C!l<{ z>mHk!AkjTe!%&@ID-Aop0u<|hF?fn81Pd>ODh`%gMH@EL;i0mn-|oC~Hs>b``4me_ z%<8m)!p?_1!h{viY6o&cq!2F1CnBH2?|v05L*04qywwdVa4ztm=zc>AP6DTj1hy5~ zANdqk;#t8$@L3~rGKgp*S_+;;XuwNrnq?}5GOF3OEWG{5{01XzN^f_d9foaT$+yo*Tlj>ulg+Z+J(4_!If9&k-CO= zk-7=tn7JuW4pJ}p290_Y1R2WQi(X)|J-L^|p*0Dy*qE~`pkoXAy+1)aTgKulMGgv| z{&FLMwq&dZbz9LJi9v4}4KAU(KmMfv@8A#=;5sC(`o(Iq^jz!bb=9gXXpzaxg=HJP1CfR{*9X zJ^wFWWey?b;LZYe1!UilC#(tTxt+`qW=cd2+ms$J4j{~`4?&2Kw*oPU+_Zg!=TuQP+exaa%n zjL8-KinVhs_wJa^-FEQ%#2H`mcg?j(YwBQEbo2ov6{J>9T2tJ#5!^p&V=c~q^->(3lsrak(EHp zZ%+f?N6WQkSWTcG`vWfq&QVP?5dbSdG>UoomSP}Q=DW~2>C|PAEE?!j(qINNFeehL zRAI8g>QA)I}OdptR!H znfKW)&0~TzSR$PQa%|-Ky=2vg2mOA~5uFu(-p(%^V~Ks@Ul(Ie?QR#3@x9T?ttEcx zWV2f3U`hk5kw)MW_%75mWUK3@2;^2VTj-5G*qX-l4WLi z?&H1hX+RNuPouERr$apJva)QmI?q+b)n%QbT3`fEYPv%2-Dp1xXeG!N-UQuLAq^WU z5Z9Gt&?I{{a~ zQrV}zD+G|F2JwE(SztPZuA2&v9tIjy(nDbbrzrySkXDL75W5$c zsUA(lGz4Fd@9_b;4-e?GV`mWtkN7F(qA&vbsK9iBj%&>WIH&!V01oFvBW`1lHyoWZ zaFh^M4DsI=H|OL5(N>_wpt}1;f|?Ay0sqLLE}>S5v<>opOCol#{dDSnV?9`VyY*`q)b zVhuT+>g>^0SuZ4!q>!zMO^kNtAuao8tm7 zN4i_0Si(5{rgV{(X7Lhh!qlBFQz(oC8qx63xdA|&ezWw zER6C@^H%m5tt!bk4B>Dg3U`=@I(dYKW}}v(nTj(K0Q+!FuxZ-9W1i@-Wf?y(u0pobbEEwd?P_`1bR+`(~t~Wtn?_-F@6*T)uhhQ}dfw z9-aQQ9=}LQocXRUJwK=GTTFMmLA~Xd_?E692HxvjZh_A7hw4c!Ki`nmf}39Jj_hNn zO-gq*e$#p`?6juu9B4nLWo=beMNiy$*qWw#3#m<2*#XUODQ1@~g&Yq81u9%lHB_i+ z;!YZV(@7LGsISLkQinzw0&;)2j-;mLq(xP>vD@7QsKrDoNZ$82n?(qGY| z+TBrfgy|&j{y7i8IXxG=w|KdnwQwx9?APQo$-LLd^_ye$=iFbw&snAIs6s%QK5%n< z)s(H6e0n&l3Z{BL=X}n2od-Z2)-SQ&-zL9JhCIv%Hb>yynP7x=b0$_lIGH!)063c! zRUeyHRLbrpzk9O!rMsv{FZmQac=E}n-q+p#05azK&~YYZ zFUMk2O5Y)%A7s-keXdiFV(B}1u^Mv)LdKE9KY;l-_5quxyAKFGGZJKRpaJE)zSZ!> zCJ)d)3;$=$6@ch@S5)@zVYWPIngJvaFol3BBnRRS9LNm9>4Ckzc6cQl6W!`VwhK5O zn+LG3Dfo{1!70URld`_&9EW9>%V*Yp@gKGN&RF0@V592ag-ROF?vDNa!>@4>b#m~emPfcSYC_Yj0>vYgP*5JmwHQr1>H1|nrO@sn z2ut?pE3+=W*BtSNXdGCpasBx;|7I9fTOV*P#wU1z^jCX7ro6{XF)0E4hax{5YH&}e zCV@LUIDh?aV!gs3{AXX?W+fjKiiMKJYs5ekX9I5q(DINP^(;0l`0^&>v}_`T{L20L z(Fo%PMld>Qa3=ujLu+e$EjARn0SL^c`@HGU^qOg|28fC>RH{ARV`nX_0DjAPLVXpw zs{=DwbP*UZ#hO4zSt>8qT$=}VC*be^>D&SBI-oH?wc7|j5x{PNO(NLmWE;m}Tm##vyj^fA=U za9e;zC=p!q0QdlPH?kbuv;ejT6m@`#LoC%v4na(4=%&bFKDIH~d!)y>)k>q)-b?-c zCQ{=eQbvA`8{w{}haq2Q@sHINzy!bMVVIcXPPiKZH)sH4CdRtJz4x>#>3sy4KUbt6 zsLL6(irp-&eqdX|@^Hg`p8|}`G+0UvED7glM8LM>tt(aBql0W-^%qMHqWoXn_-Jd!TW*(Yb z`CHx7plTieYHei-`FgQm&=m*PDhjLH%okH_c+R}O1L9fK!YaP9VQt-0Aoj)8ktHs* z1zYU1gaJ8?xs!!szCpl#+D3KJwEjb>l$zC}>{yF-Y1JfzT^LF852p^rQ&X$ul`WOE zKXX$c1bnH!GpM{C*3a5Go|tu}R@uJF^IWadzlxS&}+Kr(%IRJ5S8ZAdKo7 zy0BsBU^Uopc;Y8kBc2>T@bBGFJ6s&#O&;7_UZ9=}wjVSa)kfby-%*z?E!Vkah_N*i zBdhQ-rAsj;>g*hKaxOu(0DTh-Cf#fz_aQ3iwMBh!zBMzD@QRAYUuQzzN+}Ae!{#VKnPyS=zwD+@0$$*+%n>ZI&@?|3b z`xGXz@8$~C`!Ay!Q7`nCZa>nAMZNEXQ6ZIz;yyHmcL6jcDkJ&6BEHBj^8gIg{7 z5Plu`j`}}i3ifFCm91D5QIGk7hvP9vxoSeTxB(}iVS+Io>u#?SH<*AZL-tTof`OXB z2Q~zoA);5oewGcMm~2Wwsa`yL9krD zW}|#AJ>tH_uiTbQqM%_BXX>}Qf@gB3)@ON2#f{`bg^ZuiK1XJ*mUk1@tGtiVc>9rT zQp7?Pqe!29I%q0<-iyaxT|F2RA4V+41heApFD$w~U7KXWad60>8&>iLP&+YazO@rY zCd`@iwY1`X80rOrz^OT(E!7J32jM6R*GM-#fn zvdOk|g7IM=O>ZbNOC17ny7Ux5kEAP}QO0WuMV|duzHO#!X#U@P%KylUjGnPkp{J3+@f-8=1$gI+F4ccel0?sMgl_ELeQa3a z#F}sZHwaH=7EWv}z{@V2ZUidR$18iU?(X>RSi9y(t=F%2{0G=_K2BPlr^**&qPluMIYayAVpPaXy+EtmsK$uZWw83ugTwn z8Oguf;-0ULkt)1)Inn!gF)CBK2rGTs|8ZrUN9rl<>ZIIIqsW}WiXbC$HqR$3JBfpZr z$k4qz$=|<6bgFp+L3PZze}C@vEgg#2$-A}3u6tf=NIZ~FQHVk_u?7DvPe^G0TV{l* za8^tcG}&!)*y~40PR|vk_mAje;-_Nm&n%}uj6U-SE1)`D&sd{GQG9b}ndpS~!C)LO zn0;e39jq*+zewy=Ktz)cMwa~1^X{9>Te2R=!MaHfH*O6cG*u#BwVwq!()9~zB>`4d zrA2vZnC_0=gJ#qoqL)3FI8h#wXHi<>=P_D^(;g`CkCPJbE;8se<{4rpUSU^7ICS*C zvU&f(iaM2mw#H1YI%zwC^Nk(GJKJ+}*n~kRUu&JFO}+O`1TA#*Mp%>M$u3T42z(i!JKO| z4Jau``&uGO;3i=}rUjQs$YEDZtxmOJON6eYls}>!_bz6FQoIUoc zV5EG2cQ=oMk2U6v2zZ{$GDSo@ae<8lB>d(mLqp1vN(B_h1BJP3q=+`l>TjfTQC$MU zVt}Fs6VQ|KkbRvLB1Fj&R9ef;c?89KXn`^>0d1nh`$ZT3C;wEy@l(=# zR4*RtQ$@7Yw|?Pf%>9v}l+9l+g}UDl^-v_0^t;)TM31jYbJ$nus#g4rK0%avp1>IE zxll@qTBz=0?PV+Mceqv-g|f=und~eyQc}oa;4M?5mK_>9?s)xU_`3S4*{R6v@V-suV@Nozd)l4iC++dZnc97~52i>}{Z)C`VV7 zz0avz*3N@%Ht zNQ0L)nx0o*O-n@S5EtCkv)$7zAVG9|>+6ba-aEwzNEn5=M`Ul+tV=#2hbTI{i8qlf z624VcEE4tVs<9J4Nmj71w_x8>ilQ8S*iVs2-m9w$2@Dk~_zgx5zpTr7pRWts=+ zpA09KuEM_)-tLbQ<_T|0J4Q7{x}<&XGst|>Ui^yIC4}depiv`UlFKAJ@Un_MT~ErU zt7g)pth!P#+w84KIjc^6xcK+z_XmspqX!edmfdNSzxwO*1CvLtgWVT= zkMb=HHQu|9T)I%{!H@90{m)4u^23Mo%53Bp*8jEpDB4e-y-;Cy5DT4|75}%80|I>y z%m_;X%`+v^omA?N`IVlSyjgmT5K)3deT@eR|6Lgim(Pk*bWMFuN2?clZyC7af*}tO z(1u72tgv1XpsO4;XTH?&SinMsh3kuL^FRLOgo0%9au7-%m)0H^>ELmdMpjAEz23zT9w<;~b1f=^JbjLDyaoy~gf*JKr zbxXygTr3Nf)W0mj#aN)l3*Q%>sK5f?=F{VdMzKxphrOyCo+Jlw2bQ;fbe8OWwM3nB zjjyThPgCd9)IG!O4KLH=@{GFdSC(Y)w+gn`a<$={7$IlZ5<9b{95-6A|XhR(v6;@ECvyZkMR%(n#bnBX)j z?HJpX2p*jdU5{;oXsZ-#jDR*SEh`c2>jwEbPfE|7RaXvd8GY8 zwq$2FnNST6zL=?29~ws2_V4`DBWSJL5KngO@z29f( zpT*&G>U*(zPaetVu9*F@&*Rriv)+3+<@ z#E`ss1J^Pgxqki4`-q09y@$_eMV0O*fxqM%9Li%W!>}qQA-8mIh6ulSP049x5HuA# zV~DpTX;#&Vzfaqcjw|FxvO`kum7`=l+9IC?do>3d?OlGBb7aoK1(Fs>cR3KdbiiEkP zpc$U7407YmM3PcUw=72BmoN2DhIp*cVk>Jwe`=D{0)uhh+_#90$|v2hvqSaY1U!Db>t;&K&y#XV24-#mGEbY=P4snvHRYuJD_tVKb!(D$8{#h zx;@h5X7aIYh)ejzF{KK06})rxxpc1q;w!t9?CF-p({oGy2$_WeV=E(4NKDsZ+ks|( zjRlncyCvp{oloq~4X)q(@^Cu!AmNI<^tW?b_Ge>GojSax*x=&5#M@qa27Unzo4W&^ zmBfuZpGaPR=z0D0uFbuERjXaZnWhg5E0jbA#@`I)Qj0NnvyEx?sCxhf)35JnBp4RJLvsMj_>18)N zkt|$ML&I{8#MBZgUmi0ODc{&mTIIlQ3Y0HD^F(wfFPK;!A%uIzT+E3R+YiL?GdqF6 z%;wUtB)oM_P1NR;w*-)fjMZB0BHM>l(}75Q1_?Jc{x)KIjSMs)Pn@O=I?AiW-q&wg zW;UI6s=b#}gYV=;PT>o?@jR^6G%u{z$LDTXdzqldSQt47Bak#)Zy0wN*WKQ~MB3hd z)&`4OIx4S5yGwiBYcND~wsN~5Z!Gg@GYnXW*fdOumIy3M36Xd{*h3(Jud|pE7^E}- zBa#ApPCCOsT6y08L9Ri9qWqf23hbtgS-)H|7aE99+>Y`K(n6^!rwXJPJl5F{(vyNH zv@H#SCi&E%l&MQo^3SUkwjZ$EHlicATW(cVP#MSdE{buA_#x8tvbiIV1l#iV_y{9amTt7Fd5AOXxL*udj@mz2PZ;R0L8Mmk|M4G4`;+)L@Ib#!{VU(J06@v|7!2)(CIt?T6nzFPJ zK1dMpnu4CO3Hz=TY0uav`X>1+2}I=O^87oNYn1YjB7$Q7pH6Sgi1}p(xNet7P@hJ; zYElVh8l0~89jSGsb{_A`s~oi0@;#sR;v{!I*`yF zW<-r>wf{NNlQNMLD3=v*@(NNa=BV3wJ%`kP`}5D=uUY@m0-QqmqfwodRxtZGKO3gC zgYXP0jbgbxVqA{jcom?zEpzHwBE#~|1}%yJtu|v$JZf8bfu#<9&rFS`)HfKHn8;o3 zD|(4R%Osr=Ynvp(^mK1ZN$Xr+$gSQEV8WAF2TdKx`-4|S$(#d?k@9slTduO`@~+*R z^n7(zWKI)+wKLa@Yml0nd<;{mk6$Y_^2_WODD6AAM= zlzzXmNu~aNJrz(L@e=N(XWi?*hdl3lCftLGnS1xLvF=fc(c~PuE`jYp#;{Q%&SD;x zA?EQCdFVkqtbH75pw8_j)_yDDB;f?Sr6SkP zCK2#tQ#Tk)7Vu#VStusLLWUF)gOkBU=1MX+>ZBvPeTd4ahSPI%LOO z-yS->C3HHfIE6*aKIEPpcQusdSs$HM5NRHwYxI>?>{-7pBw)*Ger{h;P7*)Y%c!@_ zsl-KdRUE0dcs&z;K52K(3iC^!wyt3rb$PWdo7vW6vjF>U;+%>%uYSXD|E?jKTJ#po z1+BELUft0fM$awf`s?Mool9}>(efM#s!|Uin{}%NRjIl^?BB1PzNG&zpCf;)d~kA$ z^1X6Rr+wRsqK{J44KNj=Jpst&cWu9 zn6#Q@F)-wnLuU18TDL>~@+@N06b69wPKfKnpzSbXmJ^DDOb=Qq__UC&gS$>04IxaQ zLM&n|L)U>L&r&(~SHe=l9TOJ9Sd^+TniwG;ODRgxzaBBx*l-}`)PdC(gV(efSeRm@_F~{iQ|u!gtt7|a(Uk# zep+lTE-c9Xe-D}bUts|LpBECWdUO{>poMa*;(1)sT^I^kdsKRzyYsN3Zsu^Kr`FS* zje13gO9#zPD{A#8Qdx*IVBW-tvG@4Nktddn@$jPE}6zn2S;^7S%oBPw(5>dfz*?iNLbyGPt^0 z13e#X8GON&DjCN45Oa;3*S)Xvs&b}sUc-Q5A9!!{c!Fgq(HDk&LZhLGfej80uRjxh zXulfVkC+mcLz<%9ywd{WLfDE7_{ie3*UIOZS+6cKvkD@pKQ6)bXKpc;)LOr3BdeI2 z7xB8k`BL$8Y+gwGj-t6xZqWLoqK@nFn?pm)^(P-bZ2$jX82_izle0}rddpwy9qGYV zDVCD-q37SD=mIyaP%cCid{s_34i7!~_?njOvk@ly`>tSV*(&>0Z3Pe{U?(~@=mx7X zLB`Q89NBoB7$Rg2f)CuL83_a|Xt*=6 z>%V6E*7_smn`VwzBtU+h^grb*f&|jwJ67@i?X`id7V8dIBxNrGQUn_z544yz&1;6# z&T2?;gWN~n!cBs$%dyMF`_Yhp_lO7ica4w-8TXX^)U98uiPPrCzeO6Zb9*8@X@}Q= zTsaou0-p8nD>hW3b`G*4QI8>o4if0-SJgMp=qR?SrR%b#PaQ0;@yiKemXdH@o2v@R z2?|!7ckQ6N!UpwCh69w0W_N{9(0O0Wb_+FZwM1m!aoLe(>Ty~7vNC=nrhla(pP8${ zF9h+NPm&lGw%6B$%{V0$xM~QMJKF_jhh@%aSEJ$_`vB|wxVCj|%P$?NL=q_*-Umld zUY`G1r-*4;tt0$63M<$O>KtUK5wVwVV1*`{avLn6MF(?>Pf65R*lg64BD~0{3rxv$N% z4UTIV;0=dV%Bs#LTr3EwY}&LrH>G5B494~+rLXlJBrpUn@9rp8oaTOkV8qV^P>Ddf z3wmxfkDY~q*2PGcTmX{DMm8nxqoL;cIdU@(BA&c!?LEWSo!=BV_jM&KG>_;&6JVbc zQ5%I(@`(&cRyt_th)p4ks*Bf+gkU-_4`xs+Fi(Wrab<{qy~gEWrQTqw&|&0M)~mA` zm%}ztt+*Q38kjk^|G5)+;Q5}JVx5AA@3e0H?zeITeV4o`bFE{=o_t~1tkbGR=DHiJ zKXKDZGW$tx#*-gU};s9j(O+)$^nHgbTw7ip9%_muj{3ZbotGj#Ox#P)=0xvYR@m*)?m>^& z<~No>I4&4Uum^kgDV9jGq(No1|5T-f6DCm(EL_jTQH2Mk&`6<36j*3w>78KhNy#wJ zmJeZ5+hA=Lex}X2N+rF!%_GnZ6qvXrwvo0Mw1Wd|`95ti){-Yhh!8Ybo87+ybe8j? z%PI#X8{w4+%oxCBsD7%6nnd}$AsP(EW=vN z^MF5vXw^<@7q%8V8t&BT%JPeT?Vj3|GE|H7|ck#y2J+~}>n`ldWLUI!ECfa`RCt^mm6`=()(({T5tL)1H zZtrEyyPFIvO${p&fwS;Z-MO*NDV5>L5mVe{EY!`2eT-W@2_F`I$leoox#7wE1K((? zpJ09Wg*;z@R5E zVBS|9zRPa*kZo*v=Io5E&x2ogKYy}!;guGv6p^U%>GbWxF`54Dv6GGHx*(G|h8B7z zxSX{lXZ#LOy4nxBLKVr~ZT(N|eaR;4??KOuUngkx-g;`^b+D&>vT+*1u)AqRkkQGq zDVIF08w8zeO|I=0+Xm3NvamV&5O#HJ*v-`|E!-Vdp3F^*<5{g$E@G+UF8;^yqX z-)2$pDHL4nS00B&FkzG-L6>eoXLK?Wc_WED zSw3gPxs}aJWMCO023W?}Y?j_XiM8KOvd72uvtLR-XcOS=4v~sT<%io#IpO34U&3z@ zua(|EK{$cTMJ9FdI>M8->bC0kq@}GUsV1`YAMD5K_UiCPm=4>M%hD$@ZyD~8rB`V9 zWtL}N60bGT3@~wat!On_Wc`?AWuk8UK)l*Dj2`K|c%(I~yluhmN2BWf@YjN0@P=gk z;3L=VZqfaKsgw)4hLIlE{4+IzhDr2DkLPsn(-(f8GCgj1IzsH@`R5Eha-AGcKiYgT zRrAk?AMCqj@*VKLK$gBJWj5k_ikwiAFDGw`A|1ut%W!6Mf~4w?vx~#ZJ*Ahj-m#QE zuI&^R{xkOF#F|y!8=enzWW>vV%dH{TH$D{<2;YtmT zrIr{hlq4{mXJr&I<;?9E9dJ-d3oIx2m9E8a4M#eD0tn&%P_2OEY}qoWK8!sP)0f4>nKOY=zIk+j^$EmOmU2 zc@VS&?Y)+fkCP_D12rN4ZA5zD{&kAsGfw40xk zHws;kl-u7Ub617$*d6qy(|)B{9N%>l!;dQ}_^Vq;336X;cGTJgXi!UiKlCrpy8Fhw zN^X4AeD$QaanM)~OL&MlF}&eG%$^rZ!-rx?ud>nxb39CqR{a1liPO(AxGgi4cnjZx z9sGM7#}JYN>~R@&59*NXccX=`l0N7|Un#QHoAKoem=4JJn9tV$V=ckI$sD6RV?J99 zQD7ei6D?RC47r*;D3B!;qmqRV!9cJ@Jl{_?r9mDAPQXy#9J8#3f;waD;8~~8`k7VA z<0|tC?-B&rTE`4cYEH>Yt=_U^hq5OlcIuNCZSDS?kXzCca}1E+jVN|Rp1N6+z5B5I z&$4GuS3{WcMaK+o&edn#opZ_?O*(PY#5Iuqdt>!c+M>C}Ap7Wbum2fsceUqq4J+tV zw78E1=61`HbW3bzyOUg0e_3$Ou-wnpNNa2HIiUi5fuz3mvW|sf4k7MHDATO%XnNF> zpz@|Xy#i{+zOEB<6(u{VE9B#6yp509#ZJ#{nRNG@TNC{{Zz3uMe!%&Ev$pKXH%rgEM?CK(_C?@-nXb!zGi`HRcKXhe<99GsS)_t`W00ZOAj!Z>dWb?R zPI($)XZL!>z@>3$%wy>F&})zd*sqO|W)~N#Y=H(AqJgcK0G$TO`)90gfsU`A1>y^D zT>XuRl<%1(dU84{He`anKin3>@wg4069#ud$Z=kRH~P2^-ne>MfV50HiRxK<`1NrX zf%{Z~k+9gS_?>1*e4tfagJw=pub0XaSx?dgI_d9}UTF^M*rng zK$Ni)#0<)ffK#)Ruvl9!a>JT4XrV#FfRRSL^5Tg^?T`9Q=t-Av8a*3 z@B>VG;vOe>olmi7@R3gruRGl7dMs-!Y$z^mYg>BahXdXZoL;Y=tqq(TGTO9%{p5<) zURS-OgA+MhGh5AX2b|2_qO(1E5BJsUZ3z`AUe_PrVqABBxc&Q-fAJs8rzh*dW9tOa01>{j6)+-rJid$E${?t=4I zc93xa@}}RqPB&d4$L=`!>PJ)Y?!Gd`uG5RMYJo_kfCi%>uh#m3^}dvNU-gIq#KMa5 zZfJ}u2gP?Qt7>y~{ez#mbuYoS9lkHX+pD2J3__Bwc%*!fSf0R9P*M9QsIF995~2Og zZ4AUJ%J-iB^WA}x#?S;IF6S<{7YDiM?cydcF93btGzdj%=JssNy~vq_bjB_l8V%#5 z&Aps$Tyi}_E54W|mm>A518yh=mTprj`TYn-WKEs&fSPRH^#1vzrI90`AB#BCe@n}du`=sgIJ1H))e$VuS1Keeb<(|r^vr+2bQ_VEc35?m{Pt$UR*XGNa_ZyEh4j(;exh-tkwfmtc7tBGmjQ| zcPmsgMPe3iQc_c5FwaNIv;K@D`)%Dn@t^)Lh+O3JVMuCfQB_fD(XkS){VZ0 zXmDVqW65GfHv^CxS3=34hwXvr2y$@0Tmb|za2rcU- zC07o*lVaJ6yG#hn2a^1Fjh3Mg51l$bJQ+(G{i`xZ*62|-ck@)n3*x7L{0HvU-BHW& zDF53m-uZkmDT((oY2=Xo|{RUJXNE3{OfUEQg&%%2h;30D}fG4!1Mmt7W4X}R_h}~xPCW&`%n4C z_u%e}n^@3{ss@b|_v9+*#)D#{Y(80-2rx3UNC;TM^(Jt+7b4J#6%x_JaxjuEfD<;` zE0kxp`>FMqwE7sYd8`Jz#NK~bj5j8mc8Xw$UxLYoeZ$rGp)oXy#%6k9z37r_^yXgo z-tc@$)QDRZtpr0X&A3Jm>ka4)yL*AvTt|zK-^>H^T`U`&om&{z-s_FIS6sZ~Q~73_ zLo^tn()7l9>9+c}^a`^D>5Rx%mw8r630kTC+{@M3)A5iNRo^J|lG~X&;8BX10p>2( zGBEYn)JoQe(-xX$gy%(PYm2iT{z~k7NjVz3F8kH=fn$mmo>rZqeh1cX887QW1>IkN z?wq^tBW;OTB|PxT9$(1MJ^#J|vW}>sxu*Sz+BeSrbz!`OgN7DJebI8n&>OMGXbyr@ zX2>KCEYfk`;eW*eOpi7qQb?xrlrw`Ty2_Nj1B0a{v|sfi#JSXeHCI9n5u3rxvfxXzQUO7NB8tECjl;Tyu26L6hjBjs>p z)5trZq#@tPSWGz-rXiP&_~-1`nL`=TVC*NZ&LDdXd(;O_M{S$00||89;%dpp-NMVfGCKd5CgV^F1m_hUsjUPs|bQ% z0}&~T8bH^!D@iC)6%D=!= zL2s?%PF?xS;h|*L+t5!f1?Lr;+B;(3iC&d-Zatp5{A%dD!<&8I?^>L0{`KbWw{cca z8Y0sEO8Mu~qxJf${;UCW((H=m>$cYR)SOsdR~mn0bzPy^BY3rOaCKqnich0w z2Q;6=)D0}FaC*446|_%=$urZr!OU>#S#O6-Os20n(yNIp)JNZ*_kOvb^e=G*?g-N) zlo@Uk4pTD|7S+1~5H*1uKNB;^e&M4Jw0`TX9F%ks!KWPt!NzQ?PWD7Gzc{BD5+z_g zCW~o7SEtiSWpZ<4^=i3LUMvVU31@QYmQ-u14XKQ%nNF6QqnpQMa$wO~x+UEzos3_V zEC?0Ap7_A7@#DxDt3=1Fsn&G(L^?U0jDrz^4}pEQ0)xqL#e=FRzaIJCa%Rh-+9Q8Xmpqh-hWcYnA{*}SyqeW@ zrB~_RFG{pmYlCl__r*FmeyX?287(*x&(Haq=H(io5|_Af$ri8bd#Uub4iDx`>T6iu zvEj3>(Pw(LJX#3u)_EE?eee|VxccvhUs0680^U5=wYL8G>Zj}P*ZpFkVt`K)XFsiQ zP))z1lCFRJ^Fo^0%u2JP-_xE?IaMx{wzaJPc(kXsW&g+TvG>9aUo|}X@96ZlwmFuw+2VP{C^fk)8-{Cc$hW$rt+~;lKg0{H3~?WaU$zk`+JvyIU;ZDlw6m$RdLR%pUn)Nl01gqz@r>?&!f{A3iv9 z(|BeQAAU)Tr(|XE8pRx6+>+MY;}EE@S=2A~ge6rrtfo>kv5X*2n{PT|W7Z^=EsH%m zAtv`I*z`#suLT~f2?8msDp*tkYo$UtD@)K4RcC_mI!hSJ4$2CLl~7y>1)o-lFIV>H zwlV8&>%(i6PQ>b%rNK5!T0*VWygYhmWoJUD*QX1@RsY%{U$!OVr^ohHxCF;}wVqdB z1OEEz*pu!lDQwI5X)x|KxW)Z2iAyLQH(K;H;3@rj8i+w&flh5j517fNdkXKH=}cs# zR)I;}kNIPLhpE@oh#PJMJcolSJF`WRk$O!*UO}O8-V_&Nw*|MvmF^xf!E>8*{Enzy zeD&3=^wqULZsGrEG8}z%$$p{4t77}Tp(i!6Z*1<4 z_0e_LfFlxhJ3MJ1T#8BR1vZHj;7HCTUalvn07=Ap*AIisq`b0=&TGOK8q2&c?96;L zo(&N~PdkZVeD5@WI%k2ZPccqFaVU*JQnB8GS6~E>1~$*8{i5)x?$Q zslo}(@s38X-!`jd6e^Nb`GygTVN+@NiDZrl^tBu|)zUjJ*-7SW%t41zSpw`=&LSwq z*HmsgCYz@oc*8lsNoJ0Z)5m1bMMn(AVzSef#_5np z!GF#k6IY2buiiDwGK<}1nHgP9fbJHpL$l*l({e_1Gn^l5o74KR72?UG@Cdx0JIAUJ_{-he0x%>9`i<1`rx>9ff+Lx8~n@cJBFB3VG zc2R}W`13*%?eEU}!6~Zxatxy7n%seL6-w2my0^ns>3sIMX^HdcT5-Uk^E6yP3*1T} zun(g-B*sbQ7JM`R4A1IV)><9q2{D5lXT?7sYhxTq0X~~%)A=}i|CweAwk#()kEhwR zh8@h)e0{3fKF2uMKF^B3wmAr(Zfr77v$Lu{4A5`lIsO`W3xQ@hFh7`Y%$kZCSVehZ zy*=+vbbm@7kJhxDhEm^Fbpj2J#H;8y)k$kYF>!9M0Tib>32!@q>iR=pHfdXKT>o82UpfF$mbx0{G zoqM8Z+rJ7)Bc=MWmZMK?!TrFT!s$=XrrNsLy!|5e8XblD_w^|yONW5aF!s)!^zVlC zGxF%)E&dteAG`&Q(iLX~LGi;<(xDNOB`ws_D#VvkPG}hA^L+f>P^q%SVbI~htGXvE z9P9R#kl}ibZ!$)}^p*fUwa#tOH=Wx$-!u!V%z0G8i0U-4X%*swIaa*Plpl3WYx)3- zWs?U%968&2Z>AL#laBWlHg8~kg~AbZn>HTv_g$NoYyM#Vp=S#zI)|sx?-PWrs6m;3 zT6riC5Coff_hw|z(Chb(#f&oO%N)*V{r%DMSNgy^9y)e0xeMfz5)gMf9zhkDJWt8N z$ma_wIdBGUfwN1aw_}#J1@7;fYi7_Bkf2~IE5Dzm6LULfd@qXSCdx$FRL-X4TVFRFW~R{Uo|aCj&fycEPql5i z{{7F_KAJnHzUwL87_V5UpZmOrIAgG7DM?qc;du|KV zFU`xx5z8`+hsn%z-QE@;3tF0ToyGBt|(y_gP)*m8#9>GtU=gX4W+ewybf!Nd8vV z#I|+Lr%=d$Fc?}*HXA0d2`)u(uU^_&j=qiBc1U+5t9k1sD1pH^(Rl}!Y>jD*dd_A$ z=dX@#k7^x=Zs$oZZ53>dx<4a-AeUc*k_B6vc<6Sw_+HD*JfE3`&JpaKyMz`lfUf|P>UgBn zf%yl3zc>DxIv(S?8DTg&K1^WnjQZDD2V^{bkb6`Q4CXDK&RYz41t9+c^KYzU3(<6} zBiSi%I(jh?l-|`H0?o8>%`NFm6moPZHGRo+(FYU{sK7nHJvJa60J5M$mNS-kl;05= z&B^2BS?W3^N4xrKndFw^sVDf-6%^IUeR~pOGGj9N#E>yzETBJCg!2Q zMyE7oe;EdWQ*KORD5ZX@rGg?98W>@Je{^2uO6$lemkRf~_ z4+70vK;7aI0k#D~GF-0|bZJuLDNZpjgVLZZtQ1|s#=PBEkz2si=ZtuA-OsE8FX&Rb zy$0R>yZy1Rf~u+9g^IWP;aJM4NV_bHj>xG@9U8}2o$l&)!0!O)@%;cBsgPwb`7-tD zPFBD9E*p3@NwPSbR}*d_PBf2BGT&w0uBjZp8sFrn`@#MeE-6>Tf6dTCn>P#3zoeD< z4JqnBr70SYX?-W8K5;KQKe;D7Y~A(AgeTJY*7;;bOTS`1aUjP_@y~aBYT}*~g;;T+ zxshTzf|P7$St;y(HLI`hcpK-= zUG!%iLfFAT0O$!gF(HOWq|{L|0cx;*R0s!n%;Im9X=d?;kR1ff1txNkN7zwer}c?{ zi!bKi0H`M@_QaSQP^)@HdY54hRp689+u-_XL8{QxVO5|H)gP!cH4erm`6+_d??c-= zN(kr&&uITW{RvOXe)rcKs5nG2#YerJ%Y-{zxsV0H#`48G+yNm9=g5EWWFTmhCO6>_ zbvdhpMixSTy0ebAJ7y`WRrR~1#q>4-QEI`VT$a)BN+gG{Kus1*#{eTiZpO^;jv)!R zk&5}m-%RkqCiBeXXkv92l_doMp?`)qAJl;CBIx}om1dt*Gh=7VyVD0KCYip1qMyPp<{)@&l679rWdQMMYJ&0@9fRs^H#_lDO-*5=G&IT@ zuNhWLnUMinQx4>ygJ9!Gu!H7TTwghp6{?e{BPd5>sl2Pz0ag-1xsu&>UiqoV$8j;Z z#SWbXx78n3M;>Prqqv)}14-c8YsY(B^yj{%t_WoZ2l+FHZ-{dC#Z>(CC(rddc{0F7+-2KHemH(_~VBFQ|gq!`jtK}F#CHM67 zpSX(WM8@s&Q4vFYB@~mXE^^YSF`+tU@RVnsvHpPAsl; zwGxZx#H9>OuqN=9;YYnk7Obh=hwl{n%ItZ@r6{07jIFS=D%D_MsC0c<6jk$-%tu8> zzej`)^@;%IEb{@T^Xcj)S`%=agEciFt+jTNOpfj73MR!$v&dw=5NOOi0>|4|vK$XI z=XC)VFmfz&L+iKGc>xF4@39#jO*$3J^!tAGoQ}Jj_?(V|+f@yNZ7U)F#m%ksT%{X8 zo@G81q>cGXBMI*OAIFDm( z*;EuQus>Qz+=@N9jI}?^?RIxpqDDnUQFLr1KZI!il^G@4EQ+f~!8$!#Auyj0#M*i^ z-|4<6^Z8UObh$KSA_1+9h#wPRGkkrmX~{m0KFOl|cJBNHog zhtkov4Le4bU3BmHMekb3@ANYZ_aFIMTU_(I({$L?YX6%Y^;gg$Ux2uIurZ_U3&nWNqtylLeQme$Wvo00V9U*D$xgspS~e?4;Nm5p#G(r8fR=)4K2gN9VTk3Yy$?a z-xWTeAn4j>$lbfZ-N!W)yvr2Mw8L0q)4S1Fs;fu;yLb2Q^?RaQF^meek0z9(EMjT& zgNNU?19xo|FrdN#vw>`?JWyp#E>SM4qzm@8$s~LH@tYw}55Iv9iFws|BuRcwoykWAWFe8N^D++o zx$=Q!sD&_Ovehhd$KS^v|FhkW?b5z2&N5*+^`CX`nqJMEs3Gs2vemaVNv^w4Z0Rpu zp15(=+7GY4j^ckl-{A3)6;pSP-W$f}OpEL6-13T|Gx6~@o|*-;jgfQNswGys_xd?0 zEWlpZhW``l?Q@|_l=Qapg7v(entSA}*D*EzJx!V)j=G`DwVf|GS&Sw5NUpQ4%C6jrs2Hg7zBS2N+ClP*4ilf6TC z89yJLjMiVdjqH!i}ALe(GRIgg^tUlc4<(vT0w8~f!kT$Sh zB$**J=a3!VOHg7*5na3poinX6U7$R8!CK~;_oAW6ChueD`j1=0`>fFyTEenI!MAIx zincsBRuq3}?b!+a%8|DBbL`J&wu97fE_7;*|Bi{8>*7lVt!r;Ro7<2qu>2cOUFCRL zv$~pR6m7e=?={(|^JrJnhK;`}G3ysDMX%jvrT$hB=TgCcs8O$z7NU3&xwrNw=qw|6U9o6{f~O* zCZ)Z!5n0^P7tJ)`yWh#6J6?|?u{oy}75 zLF)1hsh{jMZm#3)CeXw&=t_RVluC_CEsg0Uqyy8y(ijfDqzM-B4aXZ~0s_;k~8$sc^rEBW$2P|CqGB3(3N>X*W(stqa-@~(w%d@<`oDFaN_mXb-?`_sB_5)hZ z`uS<$MDI?u*;|&R$Zn4ppC_aiW!@6w7~8LpCF~&2%!1!q9-jJ@*gYM(wh#kj)shW; zgo5T2hNU&TDU!b~JX|EzVkKiG*2kKZMFV<8LJIkOgPyTHA*U33|kNu>#cY*c1O z_CB{MmV)w;+@iWQiB})-fLjTpjF7jn#fC>}20)(V=2l_;0SzqbT$I}i6nMzjU;7ZU z%!H2=U~^P#g@vLPP8y1~0-0S)A-yhbR!CbAeQ1OsQDc~SNZl1GNq+@hG#@j_o`zN$ zc8~$>w`#U1njm2-U|}n^n*nR&Fp_PdK^G-Tm}axplQ~C{6UOe7KIx3zgE5!`D{ici z92qxMklxcDk=o>Tz{w}Q)48G$eMpAR_PptILb?a6m-f}M4Nyi2__T~P2x}|g^Uvth z32E~>Kq#Eoc|Ps+VeGyl<7c_o^brN_gM1nNgRE+S{$|C9cZ>99%cvf9Z1-emZsdxi z3FQG_$3!Kr5^OCE;YQi$9MpTsd}&r}?b}@9X8OQ2jFmaGDa2QEIuls!yPd)Ul5nFTon_+W|265Ut-| zY*neiSBXS)f`(X{q@rO+)X40kSjZvWg(x96Zv}wzPRPX&t%tHhohNE>Q6~+#*Q>*z zrOr5w1vC|CNG{uYi7wfipm`d%KzG0>1NBP-id3A)gh;3Gkwv%en8}zwxY+zKs45k> z1O-RMqDeT=m4HSSs!tIeMwy`tFy6cprX+1}sv_`9(%P&@y(3}a zQ?K@#>aT;RcQyPm_35qW%tU?I$Y=dU3!&Rr&Nm)E^i~!AdYr zZ+Z_+u$7?Ivut`aWT50zdwU@hO?|9AHPPRHAXTEtm%iw&Tc#Q#_pTm~^ILB}J}vT# zz%f{%%+|hDc>C2t*7fpYiCxql6xN0ISvv0|=&yfN`5j`5HkuNvd&bebn}fqt1rVVW z8zoe});hz5P~+smXMmnV>-UgN9q}u&)8UMI(oH=0r+DvMKN>laP<9GplDn!#`7?L= zU*;QBLVERjH(s{qgeW+&PXpr0@JK;}ookyzb_HRz@^~l-?uD!~K@0eC&leI56Z8}I zPXJ^N6e_d%N@b_GyHI4FD~ha}NC13Zq~Zq*@7^rTc*gVS4-585?S$~Bjb^hOug)@y zl^M+xGsP8RtsJf~?+o`xTDL;_-yJGY!j9lvbq1G>9r39meWAaD_n3e2siq5bMY;zu zdBwQV*nMKE+HJWtH8gsGzEMS!d56l=W$Ci~ee&qu(SDI1Chznt^lHo~W}_B!74x^_ zKgTTNmbIO2`&RjwG-B;^X~ngv=e=38I*M<{$-LYdUn|enNu1B0#;kYFq0BETmR}+s z^m*!DvbBOlf_}r;p0<{gUvK6!MHPj`Jc(Vra)2E_9&R8}DN%*1$lqPOKY09wEwFdZ z01lKcSUWs9MPNYdIIlAk2>5x!bpC}tZ8q=n8%;PZqxOv2x*D1R9ZrxE2}x-{9CqIh z=03+H&07oO@&ivI{-uzHCWyXP3@S2-M2io&yWT$lFUpSoAEyg8o4}K_Y_CS^_l)f~ z;9u?3S@nPU8-u0{fQ+*D2(QL&--V{|;Q}(!Onnj6BG{ z172__B*uS7Q-BGzfCL8jBf_`s(3J+_-2Jl4qLmW3yW1{-ggr1pf>b@YU8A&4#kc@E{GLW-3Q_$8%*0hX-|zJj$mp{22*=Dg77jEIPVAP+^E zwaZD4!D(a!n=Il8Py6-WjdxdN;$5SseZ)Xj!86F`MXO?%8>!m3I#(W5kbEnq9GNQO z;TQ-JgH+%VK251CI~a?x0f!lRvs?nbO#49%3`}q&Wwv&O&%un0iolf_*5%k>vm5fD zUb&~-H&sFtV9bSL>BGRm<*7(;nl5`TuW-+I@%4V^RvJJ_G#KxCn(ZY=ldGJYQ;7Z=g zj34<|bI#>({idXxG3qyah3Kp(KzCB_J=1557f9ijeaCdX3E!{v!b zZocWu9|&iSSQfrx$)TG9dn`M6Jfp%J_O~4C1z)8Jb~y-RE`YsSCct=22+W05Fv_$d zokkC!`>BxRmS6o@9XNGjhUh#-6Q@q~wr6!rMgQWrre5D ztD-Eo8ta(mBVVaZeI_j@nUkL3li>@1G(@0ECnS5Tsud+>w&yBtK4gi>CTqNM)(>9& zD(<}V;;+tQ^mBeXc8|8|7o;Vmyl(Ds`|`eE*d&|zVcpDW`Q2Nb-TL|;mrGKvsLL0T zI4Spkak4W#dHZ%pz2bc+vb>|7R!G0UCR0A{@$hLR3&B8c^xzrVu1_!4JFQS9rrmr* zG6_?@kb^J@lYW@u&D$2Pb{!K8pImGH&`!WZ#t1^Sl0b1uL;e`q@aX;0$*;$a%{p;r z$6K>|P8~X*G%h_+Q>%*SBT6{v|B9q!sS+k!jKE;Aj#Af+ToJAF&Q-f=OJUKHilH{zBPEl1NHHB9>IpY{OjxjFB8~@-bh8 zhTwz*J_hmj#@O$2?Yv)FdNN4qVSub~ud_qy#-5>=#U#9Hx(gEgcr!;R9PB(tXA1q+ zN||^Tm8iYc{*RiNOlcPpC(H<7;(VkuSN_tF>PNP{=^6}bI{&^R>B7;AIX-Y4l#oQR zG2Yz~qp|RjjHML4qH0Fi3vm%ibtH)Kf=yv>u`404u^*u#TBhMiXIPlE&T%0lBa1Gi zeGVMay&)t7LhL}xlpFr`^hN76;r~Qjyb#|sqI+?D_=J#xT-$eLq4CK@F!5MFvgq0t z#pj@fkrVaTwn!&mr~f+SY(oJ$JxvDf`?pT7U}s^NZs65c6e zZEm*CrxlVj{QiKnPt(~w(_WvyJPq+Epig_=y;tNxJ|gXR`4AVEwrHmd|KJD5;zgf> zN7k>0`qSz2PE%$^@1usJwieZw zIj&cwjBt_#vTN}d8b^$I7DDpri`VuBH~xMBrXyO85$FN4-n)81pAwOT-k)Rab`VsN zgrC0X1#6ihb6JE5{xvWFJaKz{c&2daTBzj46X&=|7s@V>kR<4W=hcV5fWx#t{O!pR zI9Ll@QkI4{@p7Vg1dous;nY%upOpr%u(1e6YT=7S+uU_Jl@d4UeTVnUlKaJD8rv7! zAohfDWlWd0;i@?yiJ>vziMgNykS)U6YXSsYXM>%j)NM#M`Y!@352LSWbtWrI zLVP^(UQ}OtaXs&gOOST?xeVbV&6EEePclC*+4FbP9?6I0o8#6zJ1Rf8Q+1t-@6vOr zUYmA%y;eL`EX@79Ua|Ne-zn|?V%X{+<+TTSQdZSVc;+k5zr{+V~SxsMB8{a04x zPH+49zq|LXc3$Cb37gqB_$1VK*5^FQPUB*6bJrm5#MR`wLPFB*onfz)7c#7m7rkyO z4E|7=S}@bR(sSni;>s7ZQ_n-rCoXSuSzY$!$T!8Cgunl8AIv&?$<}iBjNYT;$B!4I z^}E_KOPMA0ik#NpW5vO80!|M*&aQMGFg)R;kt(Pg;0$A1XNFrb8&`w!{d@%(u@qKE zDOZy%*iqJ?F3o)5aiXD4 zIqH(AixEi+jZ4>E3xG@Rc96OgemCRSh^Je8-fHh}D|Dl1p2$A_1~1PpZ^BD+WY)pE zoz~McwdckDJ6k@u z$A~#1(*Af7Uk6}x)UbVvqaB79v`e4fIC%V_Y42X=@woc9RRPA>%3EUhZSTugJ4er7 zDO-OIOA`wuXoC2 zbk%@O&(Rr^*vk5|7Y7VfCQ7Db&t_De%_vkg%kMaP{ui~aU)@#GS&wX#A<;Kweznm7whnf6#hq$nKJ9Ge*-`>RT>X^YWzaud@i4dGH2!bj0Z%qI1bd2J zdNR!Z7sR(3sk?`DSX}eZVi`YH-sKl#;yY$G2JXDEce;WZ*sb2VnS|pbt-l7`8$D^0 zz)nw~Ya*5_eZLDMF??<)((KsLzxXi)+x=^d?NJTF++z8=#eE5jCCa7tO6tUN53-7n?E9xZc^Bz^*Y=+>%zpfy zflK{rP%k#TbEh_9CuN{4#FG1mWBty_vfz(1xBhwDx3f~Q_Vkq~!~W0CZs(tcz>RJG zv5U&}U!pE?ZB9HXYr73|TyA_;TZ3M|YpfP1ESgv3%>T8YT~tJG=efy_N(c5Y*!c7J zE2RAfFCxsona%Cq5?6LCX zJ<_enp%R&fNrl8)KQkbdtP(8b3pBAOV&7WCYLpc_eNgcjVLEjz1&>7J&N(tfupETp zSln4}w?8_AUK7BC1IFX zSeg1~jj)oN`J`%^uFzBAZH@UI1}51os=r@8iNzfB_Ztp0pt)rVzDy+8cpK>Eg0Eq*>X!zdT6zXDM)gIUm-AY2FHhmDDDCPU4t(TVfRb{6 zw{q8#>Pb;vohgwZbd^rOYf=ML{!6v=mlQEFhI{U^@7{x@r(16r(T;c^MS7qE@QxCH59hsx^xQNMCMi7t+S4?H^q` zOs>xC4(#=I`u67An=4l~jp?h!`Mj}mIaK(^I$fEbEAXI zah%@RpZ(IjAehepU0F~to>E!nu(gDzQw&69>_ok-ab{1vJw<1ZlkkG4Q#w({rh4a_ zRtBhWDjmObM)MuL0R3l5p;GOF*hYoj{mw@%yNvgcr!6;LMX%r2&YM+$kL1bIZ(Zqx zV2QR}90wgsK9RxP2%r-MTf6_x7EmNhHp4kG)1sI=p5G~+`;`KdNKin(kPV|m2`2b2 zF)9b&B=`W6;C%E0>D4Bf0RT(X!yG1QKTa@|+B`2&gHsTFUjdLyr3zOgc)0-DdDI)D zSKiZtrX)hJa?2+&YSa#$@nDE(L6j1#aiX^T@ZdnraQ-Kn1rbz`sE^)2++3%ngTm46W5-;e|dLKUGD6nv@6OKzfUVtW~Ux4TNLc^`8&07`poIW*@h-L z&k*0q$&lC9>vFby`nGT&Z(Mm)d0a`G^l5-H{;*7u6J^@5KSO_`>BFH-q>H^%!E$YV z%*hWYJNOM539pCli4^+t&*_8Ik=paSD1lLaqik#!(>ov@+ogt+Vg7Bwujg=-9lxK% z3F|7clSW7v4w15?5?p3SjD3lep2)$;h5|zFVXL91lqw{z;4a>v0EHOV&M_c34~3IRdu7DBULFL?k!HbXVkdnP3(X25$^; z9?7|Xq}1g|o|KH%?=gF^Ht@{~&(ISwISi8Djk1du){#!G-uH|1;EkS_AHvV@X*t5* z5@gDyLs>u3nDfx{s_;XiF_@E^%nLC>lh+m}M44S_Oo&Z#mRKO)DW+*v1;DGfNZt z*$jK3w22R%P2N`&ir4p!s-E&po_ja-vc71|nbY@PhZM9o6sVr#{9OQN6#UKO4rAJo zr^s`Z=?GSSXimv6yQ{rpLBf0 zF+8bL=-2V_fP75pI|N~|k#*iTn9-Mo((I7ry@IyO*%Gn{=D4~>z$Gmyu0^JH` zMK6Vc)^8!_iMAQC|I!(qZ9nb7N>(fdXdN4DtN1tAF&`cHH+rK(?O^ylP?KQP9~VIG zIW~$FcY-9ag6+WUZZ+_%JJBCm!KYN%IcSNyHMvZf$)K0A3m#H&IQ4WIhZn`SwBs@v z71UMCXo{A2s)az1%65r6da{rk%*8lDwIJhaX8r2);66un*-jgRv|VRwKZ8&Z8(i>e zUGS^m{=)?6**taA8!$gORL)q@2KSz}zM#Y04Rbr^BG&`WZnI6E{00U=2>4fA{40&KJPpyt zj<5DVxh3BaJ86Bpa;fyzjtf>lJ-B>uaRA|7oLT=*oH@?Wily9?0Nq7wM%TPyBvR2m zM8~;0rd;-my~C~C^T%?vv#ArbLgiC4ySwe5tbE$|CY99qPRH-%l}{Jv^1rWr`un`Y z?-A!e?T@&dNJ zHULGdjPL%*O=?@<;i=p03XBo25yyyGJQimyWPtm|!bu{tvW}T$&c8(fmkbdmhPq}p z8?E1M)`wLr;QH;>{`S`aEG=Mu>og}Sw)>bMhc6J?MeSr0`j00#Nu>~woOyV#Ai!cG zSceVhn7U77EHzl6<9u{pJ`UbU6cC>pW2#1{qLeH^J`KY34>iYrzdYjBkf}EG?&VNah`k7vF zx1rq8W=0-P(al$P@3@9-4?HXM{#Gb!EUF*=$is3Iw2G#{b7=U#`0bMfVipgvU@HY6 z!eteCzt~eFciiu1uae0nO2bq3K^UKk4n*Mme!W6A8J=V6y=b0!F7$x(4PT<0_Z-ZS zf-pnngj}9GS5;9av_KFGUYA9UWtKsDovP466|q2YSuhhpi27*#iaF|Z%d`8lyO}ka zeh8D8sCuH4&!>Ue_5>toBEg8h(@}oY`6@L@u*d^8GSfq}3s z5_3_fd_{o1Dg!N9OGtIa%(4v%um(1e2ZN- zEM3M0du^n>K4o9iY72cw$sP%;7KGpsAr-+mbmJukoH6*}!b%Z|K8J*FFKYKND0q+I zkoHI+fm5m9)txi-^bbNQ0--3Z1T_x$v2W&l^o~dgjn~U; zW8g6e3Q^+M)wG_&A!SR->C_k+UVP;pdKjiAzS zKHTHGu3At}q2PJ;D#Mhzn)vNWO=-udd-(EWob%-1ZHkdj-IwvN+)x^h~jid8P-~EbJscNc@OM(~Sm#%HdC@aCPXw~efyTHSG zDf)j7>0gW`OW}-$`c1N1rDllyd@c3=+Rw8jx#MWBiL^_=4B*nsi6&A%9%-O<>Yh2N zKblz(P0Evd*^uUY9dde2kI7jDO{Oza|B3YCUgLr;R!)-%1eqs<1-ulUs$1>Y`<(ABx4&p1iDCd~BOp_X5D(#Q9qto$z-qH306EeJi4_ka?rG!9x;N#(S#U`wS_Q3V-`E63h*nN5^v}u#Ql7HdqG1HMwBr{kPcBmK6nXZDV|z+sn<^EGAAFTN_U z*Wn4zF@H}U? z@YY-faumjQnpnR{aPefpx0pK~lRx=<h%DB;?S@3xo9YW>~k(%$OVd zeNAsLJ>;gpnV=Jst1$v)G|VSG_|FJfnIV;PEI1#NsmZ`DqS$CS^PEsz(zCwX04oGOC^<;v$ho_?Nh1KxQo z8;-SRF6}+O)wjLy`^JGMb{3z%*zalO;@kc^P%}7eYEWKhl2Mg@knp}J6oW+P?6Xmv zOU*M}y~u6weMW#HJm#$G!tOPS09Rub{`=(_^ya@dp2~}N^}*!a7a5HH_BF1cB_Lx3 z=J$Ws#I0)Vm1+>uagM~b51jd9$`{MA4`ivkwspl-@T2yCPi&nmH#+RJsg+2IDkMEP zZy*U$@w8!?ijE1pg5zBc^V&cdN#@R*jMe$a1tJI;e?F!wDt zauS}Rc(zM?AINrAZiU9bH*XPSAYu2X6zM+{gd(IYXEvKL2gbrN z;5}VlUMUdVdal0E1j7?3(6cuSvdoC3g%QT#AU)+Ej!+E{GQxRu+?O#RHiq(DlRSjO2_ECOO92TBoH8dDUo}a>7vpw#5c47xyMjN_V}kA zA8Ywh)+RTFV{Sqz$CAnkG;+RCXdkF&Lu~7Xz>2I!>cUllngyZYi)e|J2sy)3(aP#~ z(qj`90J3vX#t)M<(kxTE%`{lWD!ylJ(oP74u>?#*Akl}#&|I{crB$5P+il48nPt&} z@ER;y+I%-W6;s9DAnK)xeb|;i{H@Mg*HIw8ajeL&Nu?SYutd3SI(d~K&=-#F|i-OosvH@&SG>v^iGmWkb+i3{8vJQtfK zjt5FQe4<$|R_7DDcI@CSas1_Bu8V7Fn$42gB}-f@N?K#IG}+|2JTkMPksm6tnGmgQ zUaqM*b2aS;pP_ou6AE+f{yjT%|I`V^(*`D%IHv5D-Q+(W80dfe_{Hjir%wk4ertL@ zFWEVb3i7c(HR;XV>s(j9bg5h+ItH^9Ws$#Q0ZgJ3W(tiehr|%uSF>X z!PMFVehq%Fs3atZ#)om~G{u5%{z(WkWETIHk{b!t_}lTG-Qg+9TS{Klo16W*O8tTM zkvXz&amM1}b6*ukHLttZ(l(Z8#a;Ox<(a!$d!(#oeV;}D@ASyVmcv<%tGGiB$7{BQ zl#E<;-4Is|{#6mW4JVCAQq&Ic3o>R%3FKJ)(9*Isu-l6Mc)^by(MwF>$@|3rWE{w%eD z_Ep&aJt2A&Wiu>uMFY5`>vmRmf=4^1DN1Pw)^u#}-#OMP?3CmU=1D;_IEZI!^YoMu zt7HK@wi^I@vOSBbM1Zq{PA)1<> zcKej7AI+%X4)8`Yjy|9!iB>i!r;@{iVy4-8#4(#kT3wERg#EsG|`A_x$9y&(xggCf@ow5oC%4nP(Hq2``#cJ3k)i98Z-juDEld;&kHE=%xmdf8Fe4l86hCAbcsB8V zpoX*9Tl95Ox$K$d1o%n$0XZ%w01H#0~WA^Q0 zQ?Z@&E~+movL(G*alXVZa%OZVJMdRtxw+UnirwemGNgrWTcK!I1J9n_(|9}L#v8cB zM42p$C>wk1t{vI9USa9P!Uq~Hs41jp(%g+Vr9J8%{hs=M%~X>5aqp07s@U0MBxFzQ ztIxm8d0cz2zbf_h-)~mg&z_oNzP|T*UnJ>@X~=AK>niHD4c{zGQa(fWM!mHbeY81me0JUG)I*o<;Y5TQ!35O@B( z_#xZYhRt?Ad9m@<7UlsEk^r3>oSB-MymPKau&b zw+xUCn4&#YeWL4rsMX~68;$QS>U-|GZ;MCY#LU#o*~}M4u7@IR=dZC9Gxu$S4g&ls zuRCdMq^8OF0NSU=b1^~!??T?$Wru$$`B-yoS~XO%toM-7(%y3QiY`TLMrOTHnxm4s z+1%NSTdpkRtUX&+_OIL7&GA?Cd!{kIu0AsQ>yFzuX(o^SzW&|zAdnlG#r^%5EiLQk zZ&mvi=_N8h7pl&>wblg|`iNaS)p<+M;!<34T+^fBH~VR`dVJSdV)+~9Uo@(oES+zE zk1DaSU;O8uO$v?XyYku1e;5zU}wBx9|1Gb^Be{?RVX7*VkJ!W4sTO*Zc5#zn+iB{n2XAdn;aS zXl6*@+oyZsc51}J3-lA>Gqsp$FXk9>SFM?ff#(ddrB}qlsX{e~5C@~DqSaF|Q_}Y3 z#CwFzgzy>r8RM!s@-4DLI!CS|cKvytPjpH};e5`VWaB8=TP#x~O3iBGYzO){5#8I4 zk8E$BdBYkeeXs1#96pfiL9nHiqfm|BO1x0GCyfVreMUFxj|l#<&bKoxJQS&YxU!*- zD~7v-9(_nn7GXyHn&1HCa7g4(xS)>=)O4&g@$QhfIwnZDZNi3LX3!NI(1gU9WFi?R+r~w3s;x)P+MANshAQT5`R&jCZr?8J=OhLy zb<<0emzu8N4}GnkZhrBu5-hUQPmeh6N8PqavsEowgV3!`$~L7 z#K$+miiaGhA2vnazwLhIT*+?!3)8o^SK7UhraW)zikiKBbh@caG!A~Z{M#928l^?B z$?2xa&@DFO&21cOb?Nk-_#Gj=`Mob5H(cVBmC^c^90I4V3@OvFdkoSC^9=)>+5LXcaa*$;s^!KHhZKf zqv9up+ECT!KjFiR(n4Bc8KIA8ktqz$1doRAY9~5=dnV@%Z0NGo(%0fk}2jqmQ^|A`@Ya;q=_xR^kxQ8LUt< z6d-KCL!$xbOye}5)pSb>k%?A#vUrFOpQo^pPUwU1l%38_<54kDUe3<^g&er|gX21F zg?J5t*}nqSV%QF&(WWYXH*2Ufr!9O!&7!VfPJDihu@KMQg`zkMch>G;U9VIa96z8@ziN!gRxUd^ zM@D)qzByZxDh@X64Xsvn)u+)72PcDXv8`w&H|5T8%AvE=da1Ud55V1@m0>q+4|Lk= zjg0AQ^NI9LlE)xphuTls87e(R(8998(oIlZHOh!|#uMx^15wsgp&$8HIQIMlx58ZR zP3kf+Nwr|g;}&(9WaDG*Pe=|>*J$1$)(-5l6RNB!`}gVd`DNFaT~}jJ-<7%=d>L~^ z^2@@BS5c4{1i#l->>xO!U@3r^lW*?8CY?}zx?AkrCom7fCONy?lp7vakeBiLy56bV zmrUgyV(cKE7+dg>6%JdF#XO&7z^0HGBG~8?F=;gB&mNC90`L@(mBY-Ejj9EI5e%vq z%ramJgSM#~YJ|E%Fk%RRx02G~{#(`~1i~`oK^j$BdT7qFa8m+3q6jVI3G`+e_^d|= zprw+l%7!Gt4jo3iGdb%Kgf|RAXP$F^&+<@vUx&GS>VXi*7iyMPC} z34i^3{7&1`3x`cCjPssf-!>Rb=PWkg^KmBRc+-KCv}Je30&zwAY<(SGe%J0Dy}7i$ zvVY=Lo&A?`^KZu{;vOEbSzcC7WR#)SQuBwmtbi(JsDQg8lqaL-&(Sb|feo#x59&P@P=-RmFxQpf z24m=IpTCmv0gvPI*5dFS3@x(^t!-R>XQ%)-q2h+aDxpS=0M5fRZ$m%4i zzyZet2|NYakR!v&?zkRquE+gz}66ng4}K`pJ3S58RRq-pj=vRX~wq_ zfJZ%;7~MHR^ojmA!FT@GI5^T({7&;8c#A zu-1Bh(Rz2#)-Hek7T5Pj1Ii>O8=r{p_G=F(ByVlu=Ra0=m?o0CDqCx|Fs2 z+R#R0ufW4!tWH}!+5FvHV9S^D8`H)=`^*2h&M0YkR0G*rH|o0+VtrcIeM7A1)j8+A zGo*g!n#&Fu-}=pRcug+t=mJ?NyRjGXw=~{Um*ML~5&Ua?knDOI$#^}j^{9I<9d3ONoUK-! z$BQD6_ARsd=@Sn1UCA!S3pW?ww||Aifj0U9zCryq zGXpt+^wZ?Yy)G+8267%R4ENEPo?vHTyJM!K2&g&j7Q(xy0HR|=Jc(d*Mt9!DYNJm&lbx{3 zeaKH>kSnikssma#n{3I+LePHY`aStk4{JefY=V+ynRTaW9osdK>JV z+sB`*PddE(#KBOL*$1B<=TF}kWt)>VUf>7T7CyPWr~``^Md5Q^7&09!$iqIW6iZHo zuq;~HgLm-hHzeDUedwTd5a=7&aCz)Edjx(U#h@FbR*-^&KyUs962pHFJ-uS}+3+_h zln~#bF(9gi?tu_j!*u;N5yNoLZMG%d&se74)Wpe9qkBasn0)=F6C5xwAp~v7Jm34M zm5wGEwSUu*h3#V$493fb185GoN$V@al-b|U@2gi*S{E9=w2kSjSC!fB=@T(5l!m+e zdV)L0z3r!!<0O3lWs1ELrs?~oWr#Eek&Y0HrBq5Tadcrdg^C#@UQ2gA*?tp_fqJZXJqWW3tJ@KDsu-Cm#m+ix>mYBtW})fB#w}bayY%AIbjJ8G^YT@*R*?7fiQGAKWm?Ie(8|mb z8Q(sf-nrZOcFfJ*Oj)nUxHs$O&lEqU;H^qfs}wLgcRwCf87o#Xn8b1>NOSZWR4{&r z1KyFTfVj_`Zm$1i8&~Roc-YW4b8MLAJM#JTCn1QSf^iCse$)cRq6xq1QmgUIy38}d ze!V3wUF7OOY0!&5UB-3n0xg4@j9<_=`u25Ny>zWIEm{fNGB`*9^L?-k>P3#<8a4)x zL+uYdLW^F)AQRUerg30?wsmk&FO6+A9^)#n7F3rY3kAKP*&pk#-t5%w!#iq+*@)5V zq~l6+*$aY~_F+gESNUeO#Ghb6UGK@wN9Tdj$cH7&kI)x%N&*Oyj$XQlY1{#9uc=@< zr`%QaDyPy_@c}0wAI&K<&B!^Gb82iH{zdOFT_1uvuy(-IubB)pbi0y z@MA}h>E^8Bu~)hrSJikzXrE0hc4SJX>P@hPwK)E(Yi*gX$7Zy zRu~y_WCiO@Mt=(0o2U+)_AGzwLK#JowW63Dc}dwu3>WQ<7ci#KC}Y&p;Y%pW&C{8v zB_3Z;dY?_|rj;;mA!vtVZGotGNExE6PGR-m>vkVL^(p$$R8y8TubE&c8fEhzZl`6| z8G5EmFiMQR(T`gw7-K`eHs1xKvAS=I*ZA0_lf+&b4A?l07 z-Uf%I`~CVmQ46ljJ_YzHtug%R9e_gD>}AwYMwpXZG-O z%g;2#HQD9mPT(+QD484FclriH`UKAf^djJenn0s+h+0bY*8b*9gTSMn02KZC@)oWe z*DWY3GtZilnOuaMX+{jeYdY`tcJwdDw2 zl1=H-o9zTzpxsiARwOYJ47tFpbibb8_4kjF0IEdtkmgyQKB)y0%v+IKk{PQGVEN{y9z7C5KKV06S@r2m`ss6&@@Ni2A4dzF!L1H#m| zAL5SXkX^aR3>`WK+uDE2Hf3c(_?WGME;YR%_Y!q|e(t&V`h;YOH<`0g1OE`ag?N$t zJDKm$Y8OG1d^bmaNBoPZZeKQ4q$sL3QBUn`BfcR&c1U-~m^HsNW-?WDi~3AGYu@T0 zxSP?wvVGN<$sxl_Vw0%?+3g5}jH#&6+gO-Mt)41sUqyf(+TNLU8N^A$n}&6~8l^3t ztko&whJjBfVumL`xkskliYezr@*@ePjVM0P8sgA#_+f$EfP&Hva=|2I)pIc8br?)n z?FK!xa$g!iVp+%TomzS5zGvm-lE-XYJO?>00q6GLGuf?c>tyJtORChk8POx&3G`|gV;C=^os<3sGk|It4_ex zm2G=!N#M9-u*WV)JzB`~&>T|s< z3kTgA$rpxM;BE|5YV@kFm|OT@LC1PAHEpUBVo=< z80DLDg!~sjw!QPeWp#DkyUb;YVFPjN%?eWI$_D5^zETgBs(*jEM{=&>;K8}H6&E)I z^;zuLaNm8W)i<*0Z3nBgjB)YDp6PV2`{h(XL5t7QLH{icJdZu^etF=*xLY&udcVGI z<)bB*{KEZtnmSqSdQ2XFVJ zM6uF=>C&lmha*D&_S(xDV(C5|ZiL;&~`h%!?n zS$Hyt3WSx|yXY&Ha+wsRFc71O5JNw>BGYpf5>Uo54c;Af+2#4=`~i+}EST@B9+jR! zy!9d72gb#12q}~eKSkB3o=xlqV|>W3vBtp%Dt7}K6qeJ^+`_%HKgEqy}9_&0f?0q4(bO<^B)( z10S^jn9zJ5pbBMGfJ4w!KzFX}6r5@{G>eX$S_8;~MUp_Zs7BB^mG71h=6Nxl%3s$u zwn*$^s4|^#XmwD;s!jv;V#X7l4;N}?=Pcr^w{x*eiM(oNZ)PgVK$sW~vVk#E@hVBW z`BQUXU>rT7@zdyCx09mZcz0E&Bzwn9j*tf(GT>+>s{wT}YyNENsJkohFpiMlslmQq zy$G%gQ?YJUlJC@neblL==?-eb5kME~ewhW2|Kz&Sdvjy}L8}S-r&f=DK%?y*MSZ`D ztymt>enpHzu~Gi!R4Rd3&RroE)A6EC3m>1z#oaC|1=uX%0wv0kr(gv$&*CGs76ZjB zKM(p#JnlDk`fs~bBX5CrLqGsGxkN^8CK>|l>wQ^#!~;(4SQTP7h*6Hoi^@Q?h*Pd3 zr=%z~$Z6Pq9Bx49s?^wsBA`$LriBagfWZjNvt2N;0*jUV_$#^S`^wh>+-(gq4mD6z zfU;N#nni*ODi{?dvv6?P?*@*n5a4dUZ_XhEaRlL>&@$P{cm`E_Au-tMzh$0H(5KGhm$Q!b|r6;kHC8p$3wN7gU z7*JQIOgj~vT>5ei*>Yq}xZcRk`z7JI167yYv_8e0m}=P!?Q0Qp^NNKS-9SZ!oORoE z)rC`&^z1z28NVQIz%l324~f4~lLIadZvT19|4GKEKU0u*Ty?UV%jNw%;lru)(hrYv zrP@cJ;_djbGh`F6ICQ5sSD@??yxBW07|RlgdJk-_UsG>=Itv<~vofmQN+9Uih_$ds zqX)4R4KKbM;>~hH2`7uvHsABp5=LV!%U4nk)JYC8^7Bqw(qzKD3)s91sK_M}x53gH z-EUCBk)RD8clFUj^rimHG<=?@fk}aREl!F4_c=4EMBQal(Yk{u8bvhBGP95tLJJ##DI!0VHTI-jfZFbS9 zy4Fd=)pl#p%+=#QG!pyiF%m`}YBTSnTD4J+P+vFxF2sTGnpsd$nt z_A;_!yxk`l-CcBhr;Y~0#WbpEsOWZ#=!sY~4#O3{^*aV|A18Hq+QZBpF%dOT3TS~? z(!CrL?L*La98}oYMb0YH>>>uCWDA1^=Lq5RULM22H<7Ajb-Cj8L>ds2LWB20o~fb@ z3cO$fT3LFU%!gHw`5fep$;hE($RllvmxmsvS8ch4R05{2)MaHQx*{b3-SZu9=KV*% zI<3^2LZt>h{7-%CLmQ_bTp!Av3bd2g@>?z-DB;lHYrPL?7Z8N;rsQy>_+Zop1%~j= z9FCgJ2Y7MU89qPw+TF4S3;Vl+GHxU;ZKo3333 zQ|T$hU&eMC-#PoxW{$i~Oi%YrmINbHJ39qo4@dSx)$y8`E(vxkKs~e((?#-a1e&OqKhq@C~>(IbGE0W=A%cCdW9QI`U+#e?}zSy%p-?fYS2pi7te*8rFPz<{qD zZR3QB=h~&ujf#f~3q|GL{bW9Wph)U63AjE8IUbO@xONmt-@3@x^l8LIbeoz2a_{!e zv_<0)FgnM^ar78wD>C&MOfx}}9J0%$ad9IMSMF6?n}CeBJ32;1d7WbH;<~f%u1~z{153WpFI*ot^1^!raqi73oD+7}WtkgE!3o%UUpZ9R zSQ+=Eu+ixIV}iKu?!pc0yR!$De|)bm-R1Z`%IDklFZ88xN!=rTT3)q|!OjWZo}y;t z_?>XJ#e761r~cy~yy5^H>*^Zi69L!)V?@o$M`ikb*qyb`QER;%;lWc>(}esMHX==m zlPAgJx<~pGxUl+opZB|w4cX~%<3>jAQP;zNcBDTk8N%72z96bm4uu{ogt|j%A=nc? z6+%%&jg%qa2^55hGY;W|q@nts&y z3G1lwlh{e@OnvN-QY5>JT`ozZK7~HBn+bNg*l7l4P1&hBIScDZ_@{kAuk*k0^}*PL z!!a%&JFUaO+&L|zYmi8&Aw7yKo65n`@$Jgd_EaiGgDNxV_uw&D3?l;#Zc5B5vhMcr zV-OM8_h+!dxDYSS5-t(f13p-{Oou@+4Mr)2T-$tQ8H%n$JI!6HHL&nVDeS$W16A^{xP|}S68v~Wda3c_?WClv$&QysA8l%cFO1l|ROw6Y05Kt=7 zlz^#hXA*OWQ)CnYM@ADcRQT8eBA1)X1d0lZ;Z8<*peYhJG4w=KLJ5yb^lLY}nY z=t%dia9n|XvWn~ndY#|ioFv3AUlKv7zZDD^qa4(0=0pS&# z?N)vYNj|Nm87aJgy^F`*MdJo7NsviI5J6ZF$8UZ;Wt2^5lp-_DaY;Uw=f-whsjvH? z8VY#}+I7WDoIs=$NdzKQCRr>Ipp`-m3=aMh()s$)yXBF%-yy$y*MFX$y@>WGyEp{4 zZuCHSn4S{Zubt3i^gxyvtO9O(x*fb-ZNy8vE4uGUNP0?sqHypMzP?X9iFHmOJ0iia zPg%*5O`{!xjtu0@M9C(%Y(<_7f?s70UaFG$OjyY#$)+*A2n5wG)*kKPd+s7!*-2;u zygO*f<`rZ(I7^Ezg#OKXkMjg@{9_P|2|zfO1Vnhp|Ck^#mUv2Z(viBHU?kau)mCN0 zR`QTfbQh9mi6=SuTT!+Am`{hIYHEV%hDVTZBl5~}zPOxEFU_hG?4DO1gKm{W&DPG} z2_|O$`r5h)A8D`_-@;xH&e1Oa`B-CN8FJ?A2FG&&oQK*t+HfN0KLMdJpAW;iv>Tw5 z(aGkA#$3#P6B<*L{e~Y?m;Gik`1Qn_P(Fy~p8Y)!x#jZR2ZK$Af+u&?)x(S@y5E3y zIp$*BE@X+K`cOU}+PWgKR`>ki>eFuU$40J7a^fG@$rc#CkM&#q1Mi_68+w&+Sq9gn z0Z2|4)3J2;D}ltWNictrN`xOnMDkGTYe_KfAaeYwohs&jN9|ABVfLK@_(+>xiar92 zA0g<+*@3ps6IoH0EDx4~liC|q9^`Hx{on^*evcqS9&vy3Log(!wwB~>Fb_tkzP>%)5L z4*kWvl5wm~o>~7kYt{m$n~uh#XdOWcn(A3?@e0I&L%5|>KN;`&^NPFEO+3u$!8~6{ zPyhbZo3E-D$BEXTdnrZTzV z68HJY7(xnaW6^zgX(j=+Xbe*y%Sk$JF&mjA%~_bc`>dB^{mz{cZZCth8Xlev&CJ(o zc=U41$1R#~tnoqAl$w&!mb_+QiqWh3d;zc*`Qz!|fzWs`35G-9_LF|h*PW9vwI#us z=KnQ9-*fK@&*&}0?OsD+*8T$7*$MxM0O_@(OEZtMnnKoJJnNmJZ$r~hvi2|G`@gW1 zRqvpjw`R4vBkg;_wuY<*zS(N(+AG|L6cTg7f4%&iwIkn(rXTk#U9M={L8&P$Ey+Lc zNI75G@FWE6>G{B^m7m>l4(;Ba1=T86Gn>_1SaYo+MqOG)J4`G2)=OGb67SX9r&V&+ z+c7xQ`k?hecFm2#gVuO0&%Fyhw?IV79}7J*0<%{5Us>Vb7FeZL`?smp5S(%p-Uuv( zTsN1%sy~eUYqx@v`4STF29a;X?X+V^c;?8M7d(6S21>U2UorCZ2>Js|V}V%<{p(<% zKuK_5Nf5jbFaB5vYlV$?Cc~o)hls_38(g=GrI;8m^mun{_+5 zeCE;XxqWlX!oRldIH}Gt$(ZTxdXn+w^o{4(w%ScEdS}F2Wy)&}CP#jV4_aMe3W)8^ zp>aLqk|-}PD4!NwuuqE|K4sZzn9H29UL7~K(=-k4TcA9Qrte>0M~ zqtB7lW+>}wBFW4&NOZ*+hrv&{Y)C0aSD15(!LFB3}H@{tbN8< zuPF_6;fEf51PLe6!aQ!;^J7`b#qrurC0c5p!UocQmei`jd3k3$41ifR1JjXy9O!mg z^)^g8B!Ajin%rp)-P3gwBu2PqBoH)LKQ!tmfXL~=XZHyWdJHPy;S5_!fFm<27uedt~I>%T(vmxV7Y@1sxIPY;V2v@(tXJ2Du_5x6P8UM5H)0knV5 zXp&c`e-q~vDh9?~0!9PokoG-q8;SI5K6YPG91Y4Z86Th6tbqj&#KUOrVOS=rZ}WKVM+C3P z9A3LHE3s1EIN1BEjsNx~AJf7o>xah7{=!e?zWY@8psl;;{PIuXGuMDKW4Q5;x1#6$ zg<)5?>%U{tp8WPIY#k@y>pJ8^`T# z27#?@WM>rP7;$>*Z30pNzeT6()N|cHJFrZt8w}fIqE# zv2XHr5qlqh?|4)nw*R}!rY?%N@&ntNyI88`8=hj%oxOKUPa>?4v$aWgPdV(o`TC^8 z&iuP3<~tA-T>X{!(0P$u)fTu<4ZVLQt0*hc`{AdEWDv^@Aqu(rCJ5O1yM1pFrs4i~ zm%(qck7S0&PZuB!ClOM4R{umIX-7gzjzaNyuT#MxxF!BUJK-7B>RAfbRk&F4$#1B9 zc-%(3FqMz=zy^xps(KI0Z(&>}aEKS+S_+AoaIq(Ys$O)|m}*KjhA=b6*p%H1kyr)O zsOl+el}e06pedtnOJKmzNR>SWJ*10NC+HrXA}mzig^EAY8-re2LAqz0T`~ytZB)?j z;HZfgW?7e^8?mX~qFOJE0Z}_tXR0H4xnxsHge*eNN+3JaPHj&xCYfFc-PXBmY&#zB zYj(i(+0+oX+9hjj<&l7&5#@mXPz&u5hhkuh1*d zva;Jjy*X6NVJ>Rto=MysBcS#CvO6zAwGLexMd-gxbqHCsZjGU^m1fx`-DIiM{;Xl| zSe?PGezC}utB^JLXC0-VhERK@Tl7{>fov+s#&@lli>71>P=MRpxE`3BaAKo97kOqK zkA{<w&Sn=f3C?hZEdNOAQe?oe}Xo3rHvXfLTEDMymXw_>(gtIaKhVB4{O7 z7(Ks_Ho%4ftN9GOzQ=%KJsS26(!NzT58?Q|@DhdepV2%N3NuSOTHj;TJ`CB}Q1U>a zEfWq`>uAy)m_r23JotG0>Qp)XGq@8 zEf-@6)cQLWgfSbhstcYHpZ39)cCKgQjOG(NsSR3t*M`=$Ryk2tQrT{S&XIB#;v!DP z!I4&XG-D;M+=9xW*f5w_e5Sc>b_ZeY2b`Ojz)MlC*5xuN5O42h2>W#wdQgIKa9m@j zjIf&|va!;Yi+ygcBu9JPO;}07TIIP3862np(T_Q!WY;rQc?iowfW-?13YCE0?O&px zf6A(k&k`kefJaYlzfy(IV!2{rAgC9RoUQAPvPdcKNtm zSd6kh6j#Aa08myr{Srg9SuQUz~FT=DqjokKA+~z6ZKdhi%*Ktc@K3F|| zAtQ9^yAy<5ps@;3W4q7ipuY9v5gG99PCOq4`wfn#hwZqG6*o6e$#vM+S%p1gpGmo~ z$pOJi4tl7yqj=We-^(&CD#*X>tTbJyv+nVr1S?3w=oEHaPFkMzKfj2JGt)sCU6-Kp z>nWM5AB0|mOXZhRjqAP;lBu4BM63V#Ybsf#NLo0RdYo`w$HQbQ_PT%~A`-9|FzmgX zZQf?U>|#}^JJ_fe^(+rkRf)=c{f;d?r|0 zz}xD+-5s{t?+4w8BIm8kxsLj+wQJyASP4pB!BmyOd zS*ZJalpZTfWq%=7Z5ophMgS>y6Dc^e8$y z7KVPzs7{px*B#`>_`{xWZ5Z@0s>9E*A%>SHamPmr;sB5)%+LFr2Z2dCOB;G1kyTHa zH~#&yJ4gDaLhO~%IEQ6Vvez=1=ien=sMwz?@|@W&Gpjg4VLeeYFKcG^FV@20`cPJu zOp9!uY{i}j=IX^1R4J=AI;rC8_w|EQwBj!oe@$2XX#eFz^5JE_wCzV)@4znsfxJealh<=nF`85!zp z$yXM|+$jFDa#HF|J)r+uXOEMi=9i{{wBk;4-Y6C3p7@C4R$m{ z_$ZYrt#1_sJ08DXm8$K<5@HD^9q4)j3EmE%JESx2P7U*JVDa6S7Uf~Kk;3cw6pNKfC#5=! zw7@H_3>^}Fn=Hik1DUy6Tek4od!BeXm{j5OqsJw+1 zdM_hS|2jnY_-n^&?6Ll-qMq4bI-t|a<$gWZ zDnH9X%zu^|7E~;pYzTQ5wMiSU>($8_0b|w6EadW___EEP_wePPeStyM^&x%0 zMVo4mA5uSCw!Q<4tLS(+5WWU(%u#yteBdosztcGF;9oH8w~59O*O}n9m~TxuUva(+ zKOVWDi4+u6L&@Vylql@dm7AsxzbCo{5P24;rAGe_ey~x1e&HARQab{G@o*sVd&G3cI6>8R0!V}VYAC{&NFBaWOk-E8{6 zNG8-2p_r^V;+63Cnv~Ih(i)cXeTD>uA@=zc*(ICp0V;%5~0twSyy8gL}E* z%i|RPDXxacxsfS>>*J4@rZ>1_{I(5yxY`a4ztBovJJY&kMq^3#&&jIU{F z!9OgyVR$QQ;m739MI@61&JS6M`j$2I1r3pzt&YK;X|w6htn9n*!)U`^pc?&`C#`MP z(_P)IH@EDWT5T3S^SnLK8Y#BPUM%>4@eK4;9{q@sQIy4t+h6(p zd+KHf*K9hiB({I4h0nBRe;P{mhsz!>kyxkKfs!d3-Tw9a4aP#5!Y#4vrK-p~4~rvV zEb;zF)c|UDJ8%LzrwAt=2c-9q-TZZ*P8iTmAk_C0^L(?t%XVrk7VJ;^r@lbC*_Wvu zkUMR8&+KhqA3~Mi;(PSwEDwIN{k8*m=B8uI}-A<7lcKi z3FpHimUQn}5WIl@o$L26&IUmYa*!tX`{&Is00p1ZtXww_cxSlnK7lKhRHSSWEJld( z7CRQevO6dUSe_;fiZ zv39!9j)gABWT85V#9S<8F@wok%wodCLi87+Tq<_KVkDZLVZl_~U}R24LVLs4%yr^s zH6`32@^?EAy1`n?GP5)TA%B!VlX+I(Z8zS7M&U=E|EDzZzfpJkFSC4ky=IPyBIBt2 z7c`}Kbbu=b`$?o4%GrACLabXRQ_5(WB`z_H_9+*v5#~PIQ+DHY#nh1q)KT>n9?!7y zR`T&z?=m4vr0m3O%V7d+ePi>irh(VMi*xJ*^K$dQg>}t&z#o|wQl_(`=a&^BgK|3 zKk%5!lo!xN4lbKHv82+_=tz0JHXYrpNK$B*J6aj$krsJq8_rmj zTM~qrB`iLnKX4Pd%bYAF3~l$yl!b_&Kgk`PVn!W`h=1mI6enf#ExY%>eOIM)SZa$HbN$e30hPl}NO5Q(akjDC5vVB(j={v%zYJ(IbZGwim8$fV) zagG!tZM;ZUEQni7v4e$ieuuOPVggeLB%CpQiXD#ZCZ_OU#qeT&Hb;ghj_M}y^UK$w zy777zQ|vWSa6?s$>RF76Kmj9!K;iY^7GasiXbCC;tIq1ccM#j=8*~nI&SMlyE=j2r z@^bYEXr5BYXHc{V0P8~u1^!$Q5*CAI3j8<>{67cb|GifG|Gg&J;o%WEAUMcqy>?Xe zUza3mXJTgwF~ze21?K@7nnH+$J4!>;2OaTQtZ-Vfh(iRpJt}44RyI0eJQ9@E z=s+j03lL?O0o4c~tj;Tzb7|`FIvA7SPCW;TFi7yAeG@GnYMMg*=%4!Fs(#m1vjZJc zSJ#dX_>0aeZYKWLl{m-W;q=Elb~q>LK*XU7>F{RXcw$%Lc%ph79=ZyJu{@`rc%T0u z*ucQm{3HpUa)KI{0IEJpkYe=A6CrkK3`H|qSPICAKFZ5~48fp(6`@h)2Me>%F`|gm_#6%K zqN9$9FW@9tzs|g(mKYhi0wh8@CZ%E!c!Jf8gDGcP2`CDEeG-vIq(OEcWu!upW6G#%n2Woa9Z{q41BoaI{sRPo!+0Y(XEO0W>`;RS*Xc2Z$J2u0)hVr>fx z8LX5;m6Ff|Rb{9vga02I7$h`MCUh!*;vbAL2G0*;V)@rZaz3&POtM_bDKL&Xs#3w= zB{F=OGsAd4Mpde2JW?PJdVis+ZDG}XGXr~;mZ2tPlO$Arfk)8aa%@p1XtE^%{|(#s zf8M@H5UD*F-uS3nuKyfPgydn+k`O0=x3b&<23FO$& zbHxedz`21%i@%RuJ<}b2?P1}aJ;r(Vzh^AEm+>#Q`b|YZ4kqZX|M?8{7v%WevBVm0 zWR<$I`l~X=89@YO{ zjZ$s*BL6n6$nN*TVTD6&BHwjO{C&5#aN72Vuv2ylm*T5u5xd{rxHS$@9g6*TMruI>AWuYifK!*a`8 zuT=~lIBin4?CzlA-7h+yM*2*AnnvVGEd+vwwSziTO_xCH#LtQpDE9Oo(G|;{ot66g z5BXhBi7Fm{Q^282q7uLW5hH1|Q0yY4g1Yra`0ti@*Uj zt^5GB5EsR&S%u5(qs!Mq^Mb42^|mfY*RG>K<+<{?a=HmD{m{%RQ{tS)v#HfHi_#Yz z()~;ZdV%N01y3(?$~GPlC_-7aVOKqgjd54`Hf5wGjY(NYXC*IY)Mgr=NJpQ)GFW4_ zXMt@1MRI%c>!PflHI%b6U(N8nwmWP?NC4!yo^{u3z4w2+_JumG$h{kfsW84D9Ooc| zE@&{vK?V-Y!0=`Hk0#Q@Wd;*c?J}&_LMw(oF?cBlO~Ryz-V{Rdt>%0k8#0S7J~(;l zJjBOtaoC!W?|9BSC4Y<;PFeS-ijN5#^W$T;q!`Z$Om@=43C2TD|G1y>MZp9B!@}xjFvGj7crz#4S zZ`bTgRMY~2;B9cpyL=yLvz zX{EG3(xExTr~9k}BUdr*M3r{#3iRKw$)m=>xVSUH{QK+LUXdJ{55?9l<)(ZG-+)%o zPqDQhg?u<}@o^-(4er(@Cum60oZx0V#wRGK3$N8h_6t(5CyK23ksnwr4z)thFob&S zn4zIzh~Y768RKrQRN)mJ?d1gzNBA(qCqb^lD;Op|%ii80(wt*QCrWb*dLQ>bmP&Kw zxpHYwZhtPkP2HTy> z$~BZ47EwLOUStnK9>vI!7k3OxMb~(W&Uz6F8Fonfs_mNzACdl*$#)|k^$iO;jTur_ zuJ!`1BVRaVPGC(%T6)5gZu)|_A&I}lUzLS=K@*i)68z+h0HD~xL}sh9+?PZws3MjU z^ob>6Ehv9wnJo}4xXB1&1+n}XevDwU6+vI(J-XK7tIg;==I*PGktMGkdvV0?hAFK*ci-ofyWM%>*zZqNt8g3s}&ChemlHoh0@IplUH8#>22ywpY+{6t}fVN%E695aLf<7 ztGKWAekxu;U#ok}Iq)fsxXa>mfeg?O z{U7=?Dqhibz|iC>pS~=8Q2@>)j`4gXsG2=_&qe;iRSkwGSl~d^W!HCXmx1{S<^_9{ zi^56}s2|V?*`sb+U2M1)2S;Q;(?$#(#|H1?w6bb0c-bd`dlR}q3`w6&`97g zMEXQIOQb)QuN*=>K%XIx#!3!CvTh}La?u?1cQP1@)+ts`t$s)R2sR}(k{XItoAh!9 zWUEd$)t^~6@_RLEB)sRv>MH*7ux(}w6}ReV@6J+wX$x8%)@~g9fpTrf)0DVxPdu6% z`9U4N=-OwJ{*B7Uky|d$wsvO!6&~i-z7ne}PQS9_>%yu7aM|QnMBAPJJ&UNIvCRz` zMf;|zBaLErJ~_XBqQTtoX;CoEeueSBV)rPjPY-mjGcPTY?lb11QWQkH^qVTy2V5oz zb=&|$UJzuL&Qt-;JBI`%0FV(Ib0<+svkuB)MQbOK9TvHjku{#!Uz=*_*Gsd{((0}7 zo$xycXnB5DW_G~x3ioqHD?-*p=4IfAnqiekKJ-LTv$TDN_c z_m|RAmvM6STDEa-$+XHHnVPdQvx6F8IdFpa>Z>{WYQ!S`0zLz~h;JFA1)l;e=EjkA zF+PoJt^MTHCDt)o6#;dbS(&$Dyer(ZV#w8eFIg|vnk+Zx#bBI>Bk;ugjaTUINE|fx zi*a`pTa#t>RfMgol3DXGohy<=LIAbaoNq7e#U&E_%>7mdlj}9Sr(#^qcTnXzr361i zzWq(Z^(OJ6`@-II6DwHFdizSlFhjqUBZj;fbP2W&T>_g4GY9I6WWT%3O~V^-I=MGS zpNdNNRM;OfTr=afU%WI$`d0hL4DrGl`~72v;7J7!o3A8Rb$tGp>M7~y-+>(f9v!_i z7(HvaX7tXCR|Y~Qo$*RDUrjzkuB280opjZ#`A-_NL`W$>?F%%p!!G04*>@?klv3X? z+0+0-OG#l0!i*_?wLx0+{7h8nbpe`45V9yZh<R4}d;`cnw0C(HG{&uXfoyB+vRKhb%C3_=SveDWEi8Q9eXp za#Gls#eW5F`0?lqk;5W7ggd9W#%6ZaacmPggqxReTAPH{6IV4Mpf#&teWC!l{`0Bu z5Sk>~3i+?Rn)V)je)u2VhW}uO>az+(EJacan`XXrfcx~+MAo~N|-lh`d zgBP>G%w6COa?y-=fP@x^L+Djt5pNq@HfZIX@zt4vM7~BLnmrJML3|GqnDhO85N&0E zEDKVj*8{&3agM_WfCO{cLM?nF_VG*ezVS71iv?0Lh`EKJ=Yoyn^$;ctakWVNH3Zm# z1Pne~S6=hZ&34-(voNF`MXt5fX5+F$4a_prpk-mjC)(UN9jRM-zftWY5IZ0G2QT-X%y z!xeub8I_QO%5^uU`3f+VESYB?DNuNRXuC7-Er0&E5go5d?a} z=$m@J=}6G7YFW4%)^IQhZuj5?9@IEVi1H5{%=Zu-F(iE-M37Pz@E#MkguDrR{5Q^r zF~_4|hXH-!>!$H@uooRn0xc-A-vC%3`UxOJ049L!5wQD|khB6@MjZ7f4hK(6U)c60 zSlu7fSkkcTO1yNRd(+O(CGv#1D_T)(*Lw#-FU;2QST)Thb!_-R)%6ai!ylj;O3@H1 z(4a}Myc<-VjoF!S{@g3~<91$T>+`ca*L*Gi{TbHv;=0Z)JQ?$PWY7KYg-@1b2V4#G zj<(zX;qYkI-Dzck=mI;;#bE|D8uYC%m2VBlW-#m@yelqj%RWo9n1z2Bv2DCczV!`C zGE25$>S2aUFXF?ruE!(i=Rgii3-psNw*Ps5B)n>4w;lZBDXWxMq2FHm^rCXV=qbM$ zpH6>`#3^I8V}_$O{|;AInv8=Vz3X^1=vRB+LJCIKeh%;=_R-6m_H+sV%?U&1gZ5kz z&$|k(VzUyT0ZjmEDkRBy-}Eq)NiN=L);O?TJ;-T_x>8`EhT#n*!0B6Z=^7cIK}Q?E z29C618_0X_(bq_ZJ?|ETupc=^1n%hT%P


hV~u=CwSA|L>-w)5AcB0G03@gt9|O z79v!$)Q7VeG(89~;ZHss+vg+Namw`68ugqgUV+%#A(hIOh88pStq2t-g&8DU6rvopy4?`0rv1K zOQq~XiVE}ddmd~&<1UC&LLXzvIJ4C_tsK;NzR=K5i(EqdtjTnv3Mk%_GFWg0<8tgc zW|tmpOch4PUPSNT1oHnu1sp>_W$dcKOL^)GSzpCW#N>m~$ zQN3 z+*yZO!2!gnuEwtvt+g*1j&uw=p;3o^5GmhZnO<@WO7V>UtY7Ve6LWa%*hc03j}M3i z^1X5*25LsI8C8N}ING3XSa4D_XW8}5sA3*lN-)k93k|2%`pVuw^FH1}b1*_2$e`ti z;eNq5l!B()v!#Iky-W!I1k@%t6%AejC>*`BmQ#X8&MPTFOK2#>^kkVJMU;;#*LTdN zO1U}X1%bpwXZ#H&pUTAPcHwlRuy{xwVx)U8ae5RIBM>LxkkF}+Js>2;TJb?$%;Mls zLVZy>l_S(oXB`)>qme;71vnz1I4>VVjPN#(L=Xe%pRCi~N&?h1q+i)ffANhKhYfm03;{>vY_JHZ z<$)4p6{NAka`YGoNd2BnH3u7NZH-t)YA~x>g`@0;4GrYH(qYiaQgFH9QG?;p0-K3{(2_2oC54aw zQMQ71G+Lm^9q-Z9_C3fIO=<#mm@Y%Xb~LUVbokr_+PifK(ik!Om`GhR#e60@1M1N4 zh%(gi>~ttlQZ~c_MuoQ;`+Dp?-%6%w?3ZEqOzI$M7^I-0GwBI3ze*hd5$TlL4efTI ztn5;&%;(wTKN~c{lgj&I`vtYi(6ReZC99lFtu*Nt3v}BS8Jc7Ez^voIEYq<@1>7Yw z3{)~K4IEpA)|`jxoDnOwN)G0HnYui+PQC=)%!u})%el&9T~eb3#-&zJiY}ZtB@oCB zZ&ZiRXt@4v16*3GKp>z{{gIy%8LuUzGdFx{3G7TjM`UE#ofdKP`3)I04H;Ej_DArc z$N=vPQ;TQbbcoc5TOWxc{9uDhqP1xN>W4pj-|6mh(%l^<^yd&JJkD416yIudh?bif&ndf1FG38Xc53*1FEWlTchz7Sw=vav(<{b#p2tqXKR)1dKA^7Qm>Z&i~^?NCwhIz`cqfh_n<- zLaIA777!&09MN$}UD96{K)C3#V-85hkU9)pdB}T*RCQZKl${O=vXpU%_akxt%B9CN za;eKcMD;C3RA%WA`FEl%wFl#n7*Me4GN&8T=>s91{u;?oOvAv&B?|&ygPIM}=@G4X zC}iLJ@hSf!6N9E;c~iMV!B*gfD1gdS_aAF5J>-~^gZ6V5?rJJ{0BZ{l?R92mX`y&L zP!sFBQ@vNi_j3N&IRu+seowSnY7)Pd4XX{_iU7$5O%X7%S0m~o#U+W6J!7_7@Xp@e zn)PlqLgsFzU?I>7Cw>6RaDFw!WF6pJFY4w_Kv4IcBAmMT7&SS~Z}ga!+6V+1O5^p; zwHAZ=H1~HnroSr(4Lgb+B1;-2Sh%+b{6V{-uKfN73tjwNhtC2{Urk@AIgGJc+gXQq z4)1JdZQqI}le)vyev-?5k@Ee+#z_wJ-OQG{QkdZL_PbF!&Bz{fDxmHE>ak_b7H8)=HiD}ql*mSsbh&uOxgf` z*e*7AR?cLr8qxAL7R|P-JynfoWUN1oxBBH&!v?0OcfBiJjzYR0_x4ss+KM!suA~3k zt34}{vXp?=M(DpwO*6dVczhR|G%5A*9Exqv#<1-sQl(+SUK>Je@P-8j6*_FT5BuMd zVL4zc)2X%2i&dA0bW=2B5MySOY6@PyIXkBr~HEZclj>eHL^6~ z-r#jN@NV4QAQO-NL-Y+68N@25)=XW;<4Q|QENRkdYkfNe2K{%Fxcjq|Cv`U-Ul!alV2nz{}dGC#LJ#41R2 z(AvwQAiRs)W}KOWu5Lxx8|w09ZdPmqOrFk_bzU$FuKv&*NcuT znjpK=lgMIecOxV5u^i|TU0@4Is{tOuRSO|do69*58<5Ver-$4C{DL!pbj~}2Fd=}Y zT#EQLs|?7wAV3FSYu`HPOQGMhirf^E)~4=7^QL*N(yJB&oS`vXQ0Hp}TODwgU7(JIUFSI8Z*LS z?LSGw+E;N=xNrkOe(GNx2K5_Qhex!(kNq;Tfped~N_19&i$Z!>tTptlT&>tOUvQDc z*QDVMT&=V(v7x{sizE)iu4!N3@4qz#lzbL`mU(Hw=$1iq%Gf0%z7=M$up`8^RJ2$Oyn1Nov{ z>pV-{Dbd(0G9q4$@KGF_8KX!6wsW~uwsCP$f0s+@!(&<$Fhg(M#Y801fP8U5B=5S&-VS~yEKG-41MAw zcmTtyozRJvP}I2p?#T)32B0%V%J-+8HtsVjKIuQ@yK{1BkcN%80+8)mKxV8u~FpShiUWr)@`Kq($M=)Z$I8Ubh6Ore<0be~^DUcP{1CG)#G z7uoGs9(C8|D7;9%@`l#d|NBgLM$Tr+{BGDSBCzXP*R>(q^#SCy1bIW;opG@E`N0?e zi4XqwtSR}q8PRZyhdpm@_JA_QXRAU2l1hkpQ#U^V;a?J@$e0Ec=MdRO$_GBuS%RT1;?jt2CxJr*QsnpE2_v&{OX zoW}JqmtrH(97t~j)GiM`BFL<_&w(_=mIyz1*Qu?61L=-nqT*}1MAU(>@Usu=;j1TT zifCd-`4%^=y}a$B*BwL>SoQ7G5sHor=m)tG-{G?}Pcx#n1U~i~{*6xi#QyXT>c$^f zI^Sm5?Y|-A`^owW{&Q?Rk}3aB{v@4lx;AIqB}6#b4i|tRKLXy#yvp@|%x+tN+>IvM z&P6uxaY`ua-L)ANpjUian~{-m>~Zs%*BKeLN92A3%ne`!NC;h_1>(fWi0KCC?VFa& zc_he)$pM+)0F(iM3rG&uSQLN;(2ahGd>;-*22&hlIwb|eu2>jFz0`+k4{R_$TACUC zgBAcJK`BBR_+Q`#$jpT29LWD^#a)dN0U971DPKs4nYD);aF~_~x(nh167TB=1C~zo z5i7rL3pE!3a8!3ua)_S}Sd_Wj~tq2_B=5N7}HNp?6zz6{X zp*TRTh5V>T!+}fG1aLD#{;#n1NRAVmH1$uOD)N7Ul<%Bv93JZLf92O9nX26dCp=bN z1|$*I?+Idy0+U%}I*W!0?Ex{2Vj(526<7_HmFf>Ffdc`<-zAiSl&|oIDN+nLQ`nZW zVv(uqQn?_-4Mt7)rbW_DNm>(P%LHW)D5DV==z_#_sc3-!0Yx4S*>|gQY?q2HnA-La z9|@hqLI_DtG<1$0S&$9nBrtnI^7NKSNUbnzMwF_J$X5g1v~<4d?B{fX`&6fWX7s}5 z*?=7R6rBC>;TD-y-&xQ8H5(x6@6ObaFRme4ta(cw>u21uE87m&F_2~7BF zk9mp98swo*gUM~?f&dyfXe9xbG4)Ri361Ij2`zz!G|kXjK6ARh9+~;T1JW|Xgl5)s zsqKaKj$8she}3GOTgWf4{yW>iEdldBn3PQY3!2PB6PZ(BLF%JwMsPZDJ!pEXhJs1UF{he+c|Bfs#Lwfnc&=diI;C zga+S3w!d%N0yGE6(A+X z^3y-Kl&zC|L(?iIPfznqvFbO^{;2qcB0`?|!GtlpZ$b z7X4U%zhwH0JwY{7^I+Ja%zjSbr1S;4axssp$IK~Bb19$;G}(%lmG2Z*L)=)X=f|wU zs4!Fp{CKc8>DIxZUh3CLholoalT%6D z(R^)R_$V+>OEXTI;@*gthZl5b}_U?YpIJBT^r(iB&ZL;uN|6RL5@?fN= z>PxPfrs8$UHuZIq1^#u3rpdA6aT(3m@7Smx(J$>UABhoX*%01B!^N?ybaMjP@Gpfb!Y0rIFQw5<_E+EIY02^0M>_>h@*$N zb;<2&1l2d!ny|^&&)WTFJlzj-iX-r|mK%V~Y&3yFC<}azV?zYW!$4{TLJq~&tc(JP z5%{z85=J4=2f+zQysKS`@<>C#Ve^)cEl6kq`2$dC<2DJznU1CffI$3X+Z1_91^f#9 zxBa6$pHsOdtsC$I=nZhI!?b}dpbWnN3h+_LbJO)gxV-?PxEG-u1hK_6)*i-~UxECv z>8#Gg#rM^X)R+1%@PFX1p1H<%cG>1q*nm+_<{s{J0&<9QJx?uMuLYACFzGGd`Qk0n~qc7i1NmIhT-6s z`ftX1AmW^pQv78)&D-CS#%J8Hw6L(^v=QX!id1ymMFj&n>G}~?14r7sGz-P5^e{3a zk5x{_79LkGtR|5+Jq63P?OPkx^Zu;YLkXP?zT0`Ww(?+_yEK0Gq;Fq@_>pqM zbbR(VqsFUtK&r`6)UV>ys(aRIc&Gcg`0Q|DWnePWjk3oH8Jk@awtE-t!21ZiXc=)X z+cjK*=s+fS>Jm8>`fbHz03;{2Au`t2!>QkjOdF7{e?&_+Le%PmKhTupe> z)D@*mc_|J3wCYQsVWy=Uf3i|B-b(V^ic!IDC%adRYo$y-Su4cG=M}ap9JR*vN~Yst z>jb{tf2Y!eMy->|cNu@Jq#-8ysRg@#UE z)ivw6k$1tG!VfYGJI8)&^q@7mUx5{$*CJ4WozVAt`t0WHT>GBG>7w%r;_~h&c(yBW zuUW5Kh=vPg-cz($c9v25DRHCvySuc^_**FjZzDH-*_860EA{YGP}|csRmq}yKp*dX z`5#91RW2*xE%E93is5II$_IBrT30PxxPA~8b>XBIrA7%cCDNynNkxO7dWJU z+sEg308n3uypzTXsql|C^Rs3>E570%!)m=c;O6gibzrL$yD9K0Qh#^RT=8?LDANi} z2U2ZOUAQj5;+;pAXf5qiDp7QQj{X~jV*&wpjG6CE|A`B$PGcJV0v@ zItv68T1e;k3$F{?0J;-vI!8&pjv1k#uA>yvLIJkJY6mQJTBLL(>Mu+$N-@@y;|Hv1 zK-6IGpmg8}6t;k#f>KD8K?zXWfT~V4WuZKM+$!(Cq`uxk3beQ}t`fi}jol5vZ55s6 zt-i4#LGAW}!Md^gBCXdz5(lDq=vXNH5^23bYU|j2iRdgmq#xqjjNPBp{y7{9&r^WJ zbBJ^A4btF9aQmPRCmVF)|5&QyxP2@@c$B)6ORxi2I>s#ON)$&+DIM!fR;R0Ei@^Pd zf_4cMITF&3o^;}4PHa^2yr*3#?zy8|bJB&ZGC_VAAXahldW3$rs$|Q_4gx^o>!$S~ z2h$5c`!Y-AOMedK8{>!4%9T$>HJj*W;cqs!N~ignGRTnsQU2R*1STG|J7{+>Vatix z-Ld*Tx5lE9+sC5hLry`)#MZ>6+)W6giy}JY!%4BLKDJE&J^X48m<7~*8=YSDQ2-%x z3_UHPHH{VZ6O%(g8d=CQ(4VL?HAkZFqKH!nW@*a_h!3M1k$eiEhCn+Fk=*00K#~>Pt2(>$xql{ zjVz`wyln}fW%Kn*UCN1JvAJHPusO>CzS^8iIVmjN@1?;$z$m@pn@aF6I{Bs}u5acn zUP;8(e~_+A#s&^a0Ijt28!6eHoj_Zf4(S8|qpLB_MUQ%glnu}3T!H13=t^58X2%A5 z&si7~#Bv!fng=&T&j*fXI|L3DS6Dj@U~<_4UM+u;?oXpg*8`?fPghg^vcAB>WBoT@ zyooLfmQTTwgC%{*xWBwXOw+F}Cubw&yUC;u|HC6n{s}c%Rtdw%-$j>0+T_&w_8oKr zG|isv+i7ckjdWY{l5!~7m*Q%m;Wd8};rf&8&KA^PfOeaA$!1*IiEx(5bp#R2R6Qv+ zTWa@KCnK_-|C7CBr)+#nb5FCB=T^_{u1_Yw~=V=Y-f0 zeTM|@EKWA~6ANI@;);5e8k5d~55>!|iP!`KDcIdzpR0*Y00$RF7G9_^PStl>ZV(H; zEm`>C6X_|aH}DKOYb-o&5bKllG~f;Jb~$4Hg6;06K-{N3#8WK2YUF#j^G~g-l}{JE zc1~=jyQUZ|HhsO$=-g(sE#_NH`ijeC@kOpz|G4#gc~fii^Ix4qd?Ozhm=8z(dA(U8 zxUfIWvNYhtx^8{r$X=Zp-tTgYK9=VobwmTV(!_7EKV&40P&{Cl1UV%Wyor$PfwBm$ zi~da*I+-%GDi1+QQwbdsnXtcN@}Dh@9)LDg-f+*qe}U}}3^et(S04u}q5+Hsc^k?2 zn|rj={+a*oG1Zs*6BVWAreQgXM>%ez=D4T1T!1s6eNAoEg{$gSa5v;R>cb|JxB65d z-gi+3e1~EB^~pNr4S>{xRn_Hok?M0^$d#@kOJoO8tq8GmmvWxiJVgr{pvs;&qW0 zeu}(!R<=blrLvT*0iGOS!GnN%mXdNn?V52BQh<1mQs%ix>kk7b%Bf*<$kYg@0ICb^ z3mGJkUUT~)>g)yaI8aXEWTPs z`gMH|-^)j*XxYlgmfEHZUOMxcr{4XdxpSN6k@%v`gY%x|k;#=`0z!Z8{<$H?e^OR< z$IisG8Bq|OeZ_Nt9!))KIj%T3U1=9m>eTY$c7;v~ehgg3G&7mcpW zTUNifye|5RH~nOlJrOwQQA06;p{)CD9at`7)eajGk^1XRH(cs}?`SDMjYZYdIJbov z!*pW1+-}58$qOh+g4Srr#jEZB01}06#`eSXKSRpbKTZtY;EIb#v8pNqtO>oJb{Deg zaP7~aNhhe!rR~7b!Mq8e2(E?u+r^j_eNFo-Dsr!F}9IF$iHkEA3 zv?)3UdT$2;Y9~fWLAA_L81!-K1%ru21cEmmc6wY#*WxwlI%fdTc+)3N@f~hh7>f!6 zfS)J9B{9NIi7EoXFo12@N1B-qD>48e9&q&(2i~X6J9MbM7gcOqPB*gZhfAJ<%EYcd3jy2HEZoA5I~X6vEPNiOV59!?LFnxM zT0UgomY`j=rKQ(Tq)x}mduO2xQ7AGGnmaiW7yCY63f;~6JhA*dQoe2`*Kj{j@yqhH zlI2TLzlEb#*t%@D1BBx$Q&(Q6cZTis#iHn@NGrA4H13i?iSjCkfwA;awm%@S@{At(F9@z`;BT22_qqJs>ST6N zIzzu2T-IRdY$8p$C>M+9Xf9b_G>Ko#eq9q@kNh01al4W|dDZAq-Va*+qjJ#KB<5sv zp|VLHd3Zr~)*XGG`CVWAhpKmXl6JX#rGQ=#aW1>-V@7qk>!nImmA$)1f6SziLh@84 zIg+PuAb+_O5?7UQj1<1hc9(5-mLMB1*tHUIZ4u<)@90+>A$DhlWOzebjPJ~kSd687Kv|=!4=Stc0V9@m0#7#xI5k@z-9vTUbh4xC>Ao7f2lBNHlk5U+-#LVtW^QI+VW zt+G@F^4Y>Q)ZAaF&pXew-#V9^SK6vm7A2*@*;!g*io)>GcS)RbkseA6><0z!3q9ub zc!l+6KJPxB)m{(+T)y|g8%*r67}O#83hKN4w%{-MvT28@^&7*42cs`mCokJ#eO_j0 zv}<)(OtxFL*7QcJLQ0TbGNEW!=7BG~EX(%%Q=y#Hzv&m}WjTg>N z&acH>H1oDMTfh3R?((|Fwr*dxTrr!Pd=hsjds zEKhfT{WEqh@s3&G*srQCUt@RY%-t}{ss14TjZ&4cTeVxbp78K6@_{KS))XD7Dm|XK zJLm0`zx3QKbHc;;oZ+h(SIi0HQL5b{@JRcbygP?C?T>kwBOdeb9#1sg=x{CKTT$U!ZN+m|CLB5An5&c9cU4EuOi-~X+f=E| zC=}P%t4B-s2j_=$kF;=(Q>uRLdtg+y9SuU-pfxcv@`!_jaF&R4IHZ3hx=2U0{p5@^ri9_Q(nj zN*64p1pZZECW_|aWA{8QUcFrsx%l*`Le_8g`7PyPTDF6hx$o+8iSH^u8SEbpRqJ&e zcGb70y%jWN^RCh43ir^mzj}cU-u7&9yU*M$GXhT{eqv6jUNncI-lh6AagXZQ-9*;} z{gHpxIrth#Ci__KlcJ1<)^t&9hnT0jg#RNR>4m9EHRt#;aRSG*g*M^kuMx!4D3M$5>LUSk63Par>U4z3JVoD34RzO<+ycY6IXV1UzU8@{|N--N3+ws#}%L2ref-p zQW;WP-Z&`r$6F!pM9Lm1+>jvfP5y-sgoe)>#|ItC zXdEh$?PFvF|7}C^(>FLWnVA`jWI3_8!zxexO4#0GikG^LEe60(u-tJA-tr)?;QlF7 zYSxw8+T@xB*V^Atxq7?jTb(Zow2v$7S-xnX_d;aBF!DP;zs~1ORM&B3OY>o?XOmIt z-|{pfe{o#DV#Y|{-{|)$X_N3}?4b;kYCFP?xE1>*JXgQp{`itAaMR`a11OH*;Wo!{ zo(eUNP#X1xLuluc)UtTMkSgHxUBaHW-P&ryRAQQV-~*XzCA}YlelO3h=BB0v52w=A zERvL-;41gCgsgNuT`8)ALom0Yq!%e)f75e0+TicpsMfm08T_4pj>lRAW$GW5~l8Zkynn@L7rk9mlnq_~U&zYzX zGOY7=4bpvwx|bh>G4y8#2jY@*t(|Ec+8i+g)+U#5GmpvRe|uWMj+mw+#|TlUOy9BZ)C+A2P9Edg6O za!(0=w{KuQv8RyFB+hF_)Zg{Iuc^sM6|loTd>}rL)JLa>vKhfnIA`b3%AVwFoUxu{ zwyGM6s8%(f)kn?h6EcHc`Qo>R3PcK3eS>@0ime5@7{6mF?8PzZM(>GLmm=e*U`u&(}A?Do|gp zZ+m@iO%WWgI^I+`LeO1UNmoP(i16PPntH#R5?B&pStXO0^;vH z94q6(=|938kng!w8rR@D#cGu6{e$JhFriO8atUnubc;wJ5+a6_6Px<28KyYN%}rOb zYgn0PxQk|+E;T$4QgixvvK=fE`o1q>b zTOx-U`*dVmO2f4Hv8oJB#yz;#dpxdSfWO+95qn^*FA>mIEb%*I8KQ!FkSfmm>S|_C zrNBOjYJ01K5z7(WP(Pl^bf#V)GQt_Q1Q{#@2wPLBY-;6}z({H((|Jo^gfhE$A-gc} z8H`;94~wr*%M>&QEKTJqhv@X*;0QX@upRqOKjN=DDGp44+zAQ#44=|(%fjT_l8 zdBXjMYv$*3@tZdoWF&t0$*rhDt!Uf0kEH5Mc>RcgUF(juJn7c=l(}5eV0+^4`zCXD zLlTVF=T_swr8l-eQej@uZ%7Mco^i6>Mf9CJR^7lKFTDIN5pTK8%}UH7al%}BCceupXt8jk2kG8)P6q^eRz-*b^oic+<{$+u*Non8{ zMmB00d`v}M`IM?@bZu`}e{6PNqYARXJ?D9%tT^LGUUfa|5-r^loP{Ona#Z%$nwqi# zuX0#EHc>mnysoByvAViE!+kKN^iyGOqVZNIX{WT%;A?RMuGjJ}>S4xR+H^L^J(V`e z+p9(H7Y`cIdwX+_guf8yCMNgt3b${?Nc)VF^P-QCiFA7Jpt!y#KRCQ=e8psLaE+r! z8$sJrZ`M+W(H>->JtOp|shxRvy@vW@*k@3eEf(|YHta}_iggXag>qdf6f(gjw$h76 z_4on6`08pJIiq3dN1k!7L~;x|M2WHF^l6EiL;va1{gesjxKqYv740IkwPwB^N&Wp; zwZF}X*#S7qaZmSV+vatGU~hcexy_H+u143=t8=d74E0p{yh6uS2B||h%PPttoTWrm zxFzKhDZ73p`7+FMuK#v|uMbO=eHRzsdU2c^XZ%`m<)GT6E6UgH*OCxWkpQ-2C_AgV$o{%2#>@M^7gm z8RR^E}ZnSQ0?GW0pT!49kFGauro^58>}mUbwMumHEro0B=i}Qm z(+d7|w@chc=jutd-cM0F*Owr+so3h7{735@&bsu${KFTw3JpX0-0pN2Qx6)S8I>zN zmB0N#;5Dx;#fJ>{4r^#;3{FSn8>u!FlCu3C(l(}Pf0Qi^yR}o}(r@kq&vo8=NCYSB zb_O=BSJzrrcADFMp`}c?@vpCism@c&P?$jjIh(R3RG&19VY^`xuX=u}e(EMZ-m{p> zNc}dv3%fs)@X3IP$_x36;W*X(rsmMr{{A{y)nXoTk2!;NV`xKL7ulizVw-ACjGPYD{ z$2YcAN(tH!FO|fTt3}%9GK*z0+Nk)Ve2QF^Gx)=bI46a=jY__<4A%1Qw=;J?e{k_? zNy^FS*{+IQC2c=%o9lKHMCOi%Jw&LJ0>@(X3V7QQC#y<9OLI<1VTj*+EGlpGSMmfK z*}C0hd{shjy$>_lXA}h=ZUz-BAqT}1IM$yC;~XkozAJWflqwOnYn96#lH z(f9P`%-fogqY2Wg*?nJnMadtl=r&}>#JNVTA72AvB&&5k8nnbGOF#eNNR{)m_<>4W zPZ09iEIx)+5U!uPec3FAb?WF~L8_X|L$^l!jPTNa{2q7JS6ex5i5!+3$L&(g=2Xi^W&ge5fBKqGze$xYLw-sy@3bGx9-t^g$BRM1*?B@G`IWc|zB8JK zl<#5M@3#^D)un&uU&&ci?@5gl9={}|UcX@;;Tf7x$eYCL2GA=~Pnnz3WNEUXH{ko+ zWAJ7KCun%!1+Lm;upLjs16#%MG;&liPzD{@fFB&F2dd#yOtL_EQfXO;XyxlY+!c}u z?ut}C-rU||7QcPfjv84X#_1Y!JY7}eQPm}rQm2M=DGFvY+E7U;q`PO&o)D4YG9@lr znv2_|#8q^O%tYQ7k@QHYKxcH;9?~-6GMp}JsZgDPM5s?x#>q+^Bz<|Hxq9Kt?5Wqk z99pwx&1|pYqHf|t$H3pIb{|!V{HOn)iFf}!3Ge^)T(Kt|rQu6Q=?P_zg>5{yJdLfb zi*iQE@=&rId&Zh|bW>L;zJadOniAA3R`^yh^#L;-O!Ai=V$8{)r0W87duL2=0U4Q; zj9PA3sXt}D!!x2SsEWOFkrS&OWUL?ZIU8g$oJ1gQmukw8)4=F1|B+#VR&jo|NHVFY zRr>j0P$eXf^ZP}UiZW^iV2^i5GSBg6Q^+~%fLq9nc=A9H2)TNY97id8@+mgHYKd>w zZvI1F9et?i^1sX1PqG;J5a5NG2Pi4?klEK&)jZa7fRcn{ozV&MM?7qq!(;z?D-<;P zA@o8mf{Zs%n*0F+1F9;3A<8G;S@m`J+1tlRj^NNNl0HZWgFgWso)R#R?aysF3b@0v zVSmUOWDAIg1wjIveWH9}uOe340XSzsIh&$>6_JLsKI^ZROq?(cIX_P^E$L|WFCo~G z=cC*G2a=+*CG^d96uMiB3caN7Cb`6|A^Iwv($}I&FZ8;nqCat5Wx$1ZZ}0zkG5*hT z{q|f6bsBsXjnizEDfoo0IVA#$$JvUy<15vj>d25-o?6F|FubrR6fTuT72u^)tC+t^ zDO>Wi7#iqmp?q=bLtd#d)%e)>DLGwrT?I9^V+lra04>E`p!ICqzzDyBH?*lt6BF6mDdgSx_~{RVUrySYutWtP5PKoa>@4xqb_)il&?7C)Fo~^)TAhXLwh> zICG&Z#L~EEu+EDHF9y(%9+=sc0&p|O5~Wb2{&q55WtX827yqaJ@*RsSJDMPKk*=GE zZ1w{+Ca!5b9n=1Cja>EPdoo6kB{ z!e^F_m2ebY_dbqqr%rqyZdvFybWCiwjCjR0+)DL~JTA^kt*^i(UCo~Wdt2Pi8uqei zS1mt|+o4a_9m?e6+*03%>%yUcIRv1gqYBkUS|aKBOCr(WAJ%v?_gY2iRo+}>;+vjt z`+B~-JN)&5CJYZ#()*?Z+tkq`F}`L{wG-M|K=6nCp?%)LwBJbEAaSQ z1&_C-#|b<{!dRh_ln^!5#d-Pm!riX8<)T=sIF;RK#2B%$rldF&yrPqIBop~=pCZrq z4@&52J+FLvj!e`Y$(uaF=uW%Q7{k1o>Ci@ZxSDCZgKp~+Gb4N*_R?gB$64Z$(vj%! zw3~T_hBnS;R@@2-9j_*>n+OUNjtFe5SR(@K8RQvVKD(zo?YgAw#>>#zq_cSwe58CE z%uW)Pp$`35`KCz9I$p-q?BDri@MR3mrbNEy?;{nOFWEK~YJY~lOFDahB6QYfZ4Pcm z)H#6rLbuEweY5C0L<>8+I^l5BD7QR%GHIu6)47T0nJ$N>a^InuF5A(;cVTSO>coeSm#6=o?XzQ!#a?c1jFbs=53u)f`=@+;FbFJ-y zkWW>o2fBIlTmd^;ExE&Y&y4u%XLI$Lzvi2pi(#WCJv??S=@v*`4-UP zWVLJJ_cs2!eeMwWO@Kw-|B{O>WpLIW^ENe=(9O^M_}bh|Za(v4o}2jW+Lj}U&77u0 z*pO)lfGYic*}jkiPq!!E0-q}Y97gzsl@Qk5Pl7!b1b3naCObNO@A!5u!o#nV9luUQ zy^b1q?Ry7AYCDrb@iql=wOWwYL7r&<4Pm7%{h4W|nM_n{FD4gDO57fq8x(fV*ccGk z753d!$#PYdp6kGe`#TMr<-CO@<&1h${r7uvMB%C~FL$eEJpb?Io4Srw-EYlSQEB(p z^wZBWdYD5AJfWG;qokK17ja56xL4b**FEAC#Xvn_B74K8va?p9_bKvcIzlCP_gPmC8kqDFfJ}zE*6IeNb?S`&~#{8>m^TK&c7- zokif^`ObBr=Sen*P6=3-v^HeGIO}5JFdqyQBYv_xdeNma`lw<8fMVVr&)cYDZb;Ln19H3=S8C-Dn z6B`NQElL#p{7CI-i6{!Hwg zc{3y8TsJMrkv{pRWb;Okh4&}ow47_Ea_ZfP%17wLil!Rl_c4Ee8T?djNbKn8QNUM_ zPE{OD?tJFZ7Vj>U3Oy1#T)#hQ-^Sat990Vi^RwcW77AwitJ89vk7{dcCp!#nK;?8j z3O}0E^z$fnb=JbrV)L|vNaxMM_ta%4S2PexY_e5w zV2hKmd7k6ZP3mZT+4xcr9XdI|kD0Zxrdfl5Zdu2M5{-_g8!s07Uu3_4L;GLk{d%Jb z-VwZ7^o^MaUG&|%$hMYP#>Yr#HnY|?G#k1G`Rsh4!9wGU!I6bj8zhhuYns2;=yR&cCEEoKE~1*%^K zYve(^5X<#AzKeJvoKZ(ILAY06Z8MwmlZ-ZPOrp&MNb+MOB?Eqe{w1~5Jv$45n>90- zU9x$(c}{7HPRqj4?m;t8iB6xJq~L-cUSAm$X=?488ZOq5KCR>A%zVn)^b*a$uCF=1 zU-L@9blJ-%clSG%S(F|#spggPmA*IBDgPSsIGy~&%M+9CqM6He(j<=7o+>(U?@R_0 zTOh!?qE(jPRnS|=*oUshM)vLRdy)`~ewdzLryK@iorQ$Hdib|9xDme1N^VOdjuVkF1B*X_wZMH;%2~cUyR>bbtRTIQU!cYTM z#>^vK&tu1^iAn7Jt>;RSUlBR((i8x7umsHg99s}=>MU}{yHh2Q2fZH}&C=MUW5*~8 zh@Ws*b{EK=--Ch)c7u~9XtP0K3M4EV$V=b04ak5B6OwKl--1fAC&h(7AX8is_NVXu zbEAHfNxct2K>2{KeWaXi3+CwdNGe1GWF*mnU4c+2{31HqYco=FC`GczId9t*hLG}o zZupw8935lPt4&AWMWJ+2Ao6Vd0&FXgV}_j%D^Zlph=4Mh)RP3b$mtEK65g7$ztWQx z=hVg?;1_{GHD|eK79oo?rSl)>hs=j6W)~I{JCf(cACqszj^7i1?np7)s%=(ocFrO8 zX8ImtO1eiD=_t|h1WvbCl6C^8sh`N0J}%1D$D)Gx+C+4&zSxL?MRC(TjxQ3yr+r*S zf04*I4upm=p?)HBTzOn|&SmT_H5Sf70!~8;zSJ3~l(R_Ey-OWeo<1(tWsa-J#|Gj` z8DevZ*nF%PtH>u4d?I;h*?iIDYM5~h$YQlF5c^lpUd`_pjd|ScJY~4qLanGT?0cDC zL8CU?LWkH7?%cVU?qkNem>wnp?Y_iGhvENy^IY};aNq9cJ!;|R0MYyn%^qyG+2$pL z*C8{16`#%?=ovAm5GMDS&-*0{_E_Ib9Qd7gZKw=-%n_P-*%85B-u%muY)uuD%NhKsv$s=4f@KCqy2iE5!3bK?^N{sDa+rj>CkBjCXrRS53@_ z=l$pXzjBiW9qc`DZ~*k4N&e6iQ&e*Zi+~b@^U=+b^8L<;!z!WU_x9<%N84f;uG}*- zUz^Q~o6juX`{xSH$Q#JDK1Y*3lbcIa`=50@d$#cG^n#!h*v}a=J)-93q-Hq%S1xqO zOBdX;rWkIT4yQ@-hcT~2!i%M4SC+eYde7CHBsOGMm`JkEljknKt#2kXeRj+>LeB}j z{AqvQu-SMN!Ie@9QQU{I0jkmunFekJa?ms7IyI<*2UTP*W*jF$|-G;Z0LpGq4L_OpKNiq$U_ir(=5O zD|N|mVJ&GmZ!ug=H_0EaOHqRH6Mr(57mhdNrA?$OJ387+=^&SbXI3ik07XoP15zaE z9IP&~IWdq81Dj(3YD|b1;v8{~>6|$W9EqQKh0ML&9qb!;ne4fpoUeX+VS9FG&tD5M zuHRx@Kkt(tcXa)*XvaUZDSzMQlNUPTEq0w=@$US|uF}mXYtC$v-I4L~PN}|v9iGR& zxXF3a@b&JSwZD{|insFR?lox19t{5e*TdcW_jxS*9&f*nIDhWS;akl&%NCzWP8E&3 zev~9bd4Fl&-k&cQ%U;g*b#&IHbe^3RT)9seKewD3*!4wC`PZk{KHbx7T)Q{5`8ZqQ zB#uhVTvxNA7L$o1V8%j&Kg728dJZ?z=`s*JcHMK*kODl01p(!(RR9$r4pZPhOew*( znX}$02;X5--wF>RBkQ!D)iTu2n!XB*SX)t7Cu4t+NcgtpP3dRNAf=lbygmw!g3fhG zy%~{O6)%Gw)I|1!mPV_r=)k0cMXBu}uw9sE3m}*EAs1AM)c}ZjHh`3Gm8B~|$rJn+ zsK0DS)+cLcVYPytVA=3&&>!@QkNcm&atDHr$zq<9t~dT&Q9=0zv7EL;QlecHzxYV^H z~q z?^TS>yLzbmw^PbTiq&?X*nBeFqx?ac!(2&x@vB!j1+y=49~qK~k{1^`cQ{N*@2+`# zZ;`ZC@FHez;hCiQN8HB0|Mk}w>c1{s_*jVRgT z%1^q58Jv%rb9Xjsf%{+IC9>Q%UpeUvOb)8T7r(@3h&3;fhxmS2g~s0#qLW1vR)k5s z99$u_4vz=Uga>djdAuYxu;E4MP51gmL?smwns|=dq$yH&CshizvLooq>P)-8l-2B+ z-8`z1Ubk|oKJ!!!)tFh+4N5BRg?sk~RUMxDZ_zA$czm$5{N5R2Y3aS={r3hd=px5; z{UUc{rr;ow9V{;uZ3IW$dui@xj6{y^DsV}H)Z{RpEe*U2Dd`yqvv8Cp1E>tn9k$dA zfIOuoF!2Czr}Bb;un0GuK9LId%hqG-@xWeJQfjnfW}M#Ol`$daRykHB#XW3^9n+Xt zkxfy`;xJfRSyK8$2YYR;x7xajN?lcAqJyoiV;1(VuC>XU9^Tp<( z6Zl)iL(}=4a~mABr^8Yt(_u%6NUGJiw&)xTU?5j31+wZ6OU53E&My)t3Q&gV>+`9n z3Cqgbj`tfT`9%aS3|^~Az`{6zB~U5yEyHr@Oot3%Ywm57#gh?JreEA+a>`yBfq)T|$4xsYuf~*6kgd3BRZhx))|Qy{ z{vaVnw%;0QQ-~!g5W}y7f*3@FT>majTY-b%^Q&7RHtVIxP`~yTj{*qXwl{z2?0}GC zHJh6MK!}0j=wGCZOCXQ(muTG`bwd)#z2KpR!iAIz63Fj*^Wn$ll_>#VQiAWV`FUz% z@S`2yYd5}M99`ThNk0AN&B={#KGM%gPHjB8Nz(afTgv;^jjJUqP96Sx#oW3vi6pMF zZ}p-)pYtV7x6hh5XCXglE5vNvds_lIvfuqa9X_zebhPAZ=tdJC&xi!yXL7i7=RrZx zhhGmmUOtf9bY|v3gw}Pb%I9u3ZHygv)OMd)=2;6jUZdvftx#_13={_-^5m@&xGz+1 z^-vk~#^tIT5Pveayk-);aJgP81B9f~^oUa4`FVa|SvG5w&v^z`D`}YqM06~}|EwW_ z*L^W^Q4i&R@#?`#{v0DS$N&Wf$XzzRVE;SVt%k@`5bfY>Q8^U?WwgNyDn74QtxO=G#{xl$jzU51Bk^vc)u#nU*2z4e2LE0p-mo}JvU~{Kd_jdtdrg8NEx<^w7pi7du zrnk*xm*nka30c2Ux!fQ&pTuK0k1MCg9>=H33n7`^IW;C$UYM8eY>>`sBD*B6fD3AT=vXLoFmck$8kT{MpBDfoZNd-JfS&aUlyXC#mS36PK=Xc$Bw0g?a; zNF^X3A}Ru6pj5*QN)?oWtWB(F6&tC9Mml7=Y`DhWm1?|x1c7|=tTkZ4umX*5 zklk&v4zg|UKm0nQp%q3Opp+K02UqDm$hnY1t9`$51^aiMJ)kd;BQ$}T9 z7iROLUA;>9CD_1WpN=;sp>Vg>=6X?c4;EaK+D|`UCl4Iuau~&Es)0+^X|J`1+(tyg z;(BpJf)`wga}rka6I&kM3}Kk*QS4pbo3`)qZiM7U4rDIm8A^K&BVA6|Z4U^wS7ERn zxRi%+0hrM)yE6{H5a>TCwhNq@?oFZ^7qkE$g&;yvwW8%ZRA*~9f~rt6aau#?e@Q2# ziVQ17mfL+=*%p})?(h@>L6}{ImPG3Vkx_s zd49~@h)~pE4Dd~+19+K3#iBxm5Ke=U0w_X#!Z>8#(6Di(Qu>YSsur-m)q%QirA-q7 zv^}7D1WgXeX%Ya!%0>C_(O?do4AteiGws*z8JP%tvw!#-M$xUq@q}t%IyzUk=`M>0Tt zhliMP5HjM?0IrE*gf8VEGl(IpHWHV_$C9x+ZS-?o2QG;l%GE}H_}D=I{o|@Es#Tjd z-(ndypp98Xv;s&`0-`{OX&8yy2%;^)irmFQrr~bFKp84QqDJExczT{oPp|($swEoV z8~l5eY4)~|f8yKyP*LPL!uAq-iT>I)R(JEeNUMjrqOz;)-X5&a02^5&lrY%wRl`$pJp1_I^AzE3W&pj+8`ItOX1^f}WWv|LI zp!xI7jnuh=s#aJpikiJR+bav9Av_E#g(PHSK9H--x% zC&qthUi)cI1|wOOCuVgADHz zkyi1(?%v&cS_+Re5j>co%(Bv7(vRIeo(gob8GhIwnWet~i0l{o@o!BNjwhrg92Ws+ z-Lu)_`RKEoaX>gLLF0Rc_dDoQwg~>=*DZ9qGv3_$9A&iA%f1ChJFw11Qh&KSoIpRG zV2Tcwt=k2ITQ{G==$YBf9zIXP;2G@xbLlT7`c8M8_H$g%!CqW7+_bP%pqQzL6IeIt z<>V<^}$@?m5NFaRpmWZ1qrj z+`Rm5>u5t}2_yXsKyQvbDIW%78nCE|Zatp^Y>if6U?iP+71rEC2GE8G{D@Ex20$}r z*c9AFa-APc9stWGCuhSlascRtfZ4HO`6lmcyMfv<)9Ec?cz@xA#{?T3kW%t^g=+epp>0txZ#C6;TcG2LpVT0Q9dcDVJxa5>H@1-#A;57y1?t>_4zy; zyb%PMl#@R9{>SYXbqw1!Co{FB-E33pK|>RFw-!T_gkhgkos}CL`N)nQq-7KXphZTXP{mSHIl{itOeo_QdvKNm)C0(LZY@3xbVZP z47Av+%#qa6+_H7T6s_63%4L{rqD8FA+JE%O}k)P2t-1+ z6gPI4^btjBLXmXg4p1ap$J_u?xLuHC1ONU0C++vY7I6IASNK?A%K&zIz8mGT;S{jX zv4?SouzXz9@`DUpzt*Z-?lCKj`m;F>aXqD*LC^*Q=)=M$^=1{sx4)Jc!mnx+~-$EN5C92Cl z1_XN&aQPJ`zcLQ@13SqWv1s2QZEz`ZDM7WDiieRu%fr_c&qkv^V$_;W+>zf+%=FJvc5e^dj{i+s11s5YGM)&^IAe*bj!IQu`)!!DO=VOYB35mhd}50~v&VnAc<>W`ZHa4StDz zLXaR+ z(7Dn3+T%=$3mPgiE+Qhu?xj*h1=CJyL$InwN)C+39KR=Rz^#SMPU`#A@F#F3gA~p2 zRPj5oDmj5dKU0SxS+nRe*K8W*9T$1rc@5?z?Fl6_J|$&9{S5!*q3^eFNrcPjo!5idoHI@E8;W+BG;_(a)0i}h)X4@g7l@7Tn4XkjROCFX6v}xW^S8|LRyc@~b5VP{7c&{|PK`E%yVd-KGY-B6+8c_adI~+WuvZh6z z-FG+(r{G;ohne4YEY1}f!`+!emJS9i8L&mqsfKm$jMnjeH9)@tzL}E)pU3-m?U1WS zmmKN3`)H|IR%3&nLVG|obXkvnjEP{5?NgA3_DSdx1`?nOfOAauRi!h+yJXd14Py{o~ z5T0R^N?U>`*i86QumRK#_MzgfqSs2yC2iSD8SvOgt;w~6*1zugX*V)=kFHAR6A$6K z1J50=d~>vd31D@xB<$svyAkVOYb0c(1j7@e)byWI8gO{5YVIDL#~l36NP|8a-zP5f zb>RH$^iTh40fvj+hycSK%pJtYMwtjv;m-jM-Z;>Bxk|rcnI3T9D;NkApkjEY=!R8@ za!5oZv7->?Vj`YN&Ba2mR@kZs{a7Xs5s&hyylf_k$5x3|YN=Y9?M1{>*hCx-VXLw* zL@gr1L~=3GEDR|d?P6!Z7R16<1lG|bQqc~wD~~G1NO6ca8&=hVZZTCXR%L;)3=>1z zL&U>M80aipAh~RnG#gRjz+IV%cFNge>_726bqai55~keK%N)e_o@_|X7uis}JPW4R z6jzi`mt(V)64)_pUY3Z)_x*wq;MZ+S`{(>@GdFQ= zHnzo}9TSCDW@-Mc8MLCS zqDSAVuQ{Tlj~+Z6z)TVgj+sGSM&iZl4|clxX^j<7W03*`eyOg!6!dAqY>JE`C8@H- zgDGsh7%!HpWHgBu5%NkTH7*Ma&#`ztJX3)9A%1!^X)7e4FooL%dRkbLDU%n=6c8y= z_!^;GcrdQAiwoQs=WiFc*gMYubyP}Z^hs*e_Qq&@ zBp9jrHbo?!X<*E>(^2fxDb5|y!S1#sb>OAe#)J{{F=tr9Rux+IAoVgyT?u}(%1%TG ztPd#&?JgGN1XeD#2~viBDt@;wND1~u9GleV3>}s?SO;|g-M2&s41#3nPmZiM*bRmL z##y2hkFNpRu%JjzyX}TL8*M{by8sZ5VdzYD*ug?NbkyI-h)WkV!V-3{3uS0GUBiKG zqQ*nZ2+Ig7_#t+oXZbfd!*O7l6g)%IMdPb+t%CmZwvgjsLQuk_3l!j4gtH}vS_=1W7Zk>so2bC&2>gi7c)Jk8XtlHk zmT)iD1$PGQiGHAgr;+g26|@S$6>vc^H^C=r1q`NDTMFI#&|j9&JxK37?hNu0-Gb`n zN^m&>Z=>SbiBz`_?bFYC9}g{mg!w#xk(O(;|77GOUAz*=CT!WFP5X-My1$RFx;e_a zo6zR1@o6P>^e20cka9(Bdz>E&Xv`ztcad+ZNV`%U0Fg6vPhuKwmt+J?YhF$ z__4Bk78Y|EJIgenKhwVDdWzWUIrM&ErI%@$mc0#gt*Es4K*4mclZ~qNhJmWBHw(8w zt9OwOzp&R8I(`TZOF&25PUR2FTKD6*)bC4lLS*-fp96FTu%-#00TELH3?bD+Oe)H) zsIh0$qx&&@7bRA_{aM7~_RX!s)mX5t>8GCsfJW3g8G%tWKjVkUmQhB;Vs&&s{n_>z zKJXsL^>BXP;=Y#IrVIXM`ByeHfV~moA^~F@KpxB|9E_#3P%h}S^&G|>7Ak=`CTt6B3!EF3xc(Udr0SF$kEt;225orTh25O)p>t~}TId`D9cRXSpjKBebiE&udv*PF? z=~5W2Y=)0%0hp=O-a(CZ)}VGeV7L+zh0XH{`j%n)d@Z}qcr+8=t<$vnfs643){6R5 zUvCa!WFvwJIK6k}`(q~;uG_FP~6x%Gu@|nYt&&$8qaJ z{rvSZ&)3zEpm^o^tU~kl#CyYuz|0-1#{~WMQzI1J;t5J?^h>c%^?)A!E(mBPvuP4- znRcai1jf5{5!a}0ZY|ewq^Z4HkWZSVhYqj!mQGvkSF{SqG<2ZHT}s9Pw*vaGm@oj& zt+D6P#2$02?aj|Nq9l>)@GB3555(RaV7_Sk-Qo~IoI*CFV@}?}dt6?}a2mEtd0ZSC$1ig0#C$c3EMvKAvir9i4Y1n)640RDWUTY-y2Zywf4 zmo}m-CLPUqKjfS~CNs(*vbHeg5e(}>=1HAIwS_MqO7VyTj!P!mk-q{~PcSzVz7>F4 zafGiGk95eIB8*OnR_6^Fwb}yEVS?{rq2PNG(^!vLAlA;_&eDJ*USGn*k}ELQGQ=VZ z_u{_Ska{gMHLBl;!UENcu{hFGVo4^c_8N^Q%Xr9$ z=r~PTOIew{%HJqy1@^oBTb%YMb4)GN9xhWBH$RkL+qLTSE9VDoG-}Rih4+I%DkoxZ zeb;CI1~EC5Dh**`>%s8|P#23%#H|l0%#qPUBsq6i`4vSrRMh*Q6xrBCl%eqrw_k*v zW^Z84W*pLGQw)|3*0N|s!l>sUF?XuqQyL$kr$F+;td$Mo!ut|tay?*|Q_0D0z9E&aoKlW%BfNV!JiyXk|;s_XD9q#S@DtFC{T1z^7FH6Pr3<4iuxf+bg- zpB1v-cU?&N`!&9+O*9`|VOmJ}su~mDl^;w#xQ6Tp4%DjZFqj93M;+r=^Ye300hT6z z`o#Jz8bH!?FL<@N&hyp$U8for)jOVE?lkpepZxv9B~#ZwxwCeC+>L$Z&x^?9+|^89 zlEwNSFU)yb8#mvxG3fO45%PIGgB?3b-%p*r@qX5)%~$WP(k$`r`|WeuqCBt9(cV8# z^LB07l$(DFMn-&lzbvG5;4`susXV_|SRe1;5ZyY;h*3vjJB*C#2w7}t+^khCp&Xs% zqSN60V0vU4uL*2=)3nDc_83$L?E?QxwO||xzMODCC_GBt7EYJjIy_I zDh|L$>P9Cs%39iUEsZIOG7$yT=YrmNvlY96b%g zsdGO7#WGrkYvHaL*>Es!zVI}|7_7=l4v{4QNo%hjKM#Z>-&$A-@Je-HF>5T{kUV2YIAvwEZ?uM}S>gHef4JOiu*Zt)XY*)?{Efp}FIVHE{n8 zuUZK^2yj;)hXu}MUK7C0g8M7h!8HSH&UV20y%iEr3;^pnLIPwaHIvbj*EoIDIxf(H z7WhJ=|4zTM;OlFf?A<+IF81J^a$Rz2?puIPOXhi>MvLL~pa{097$d2!)PqJinUT3~GKItC*t8S|BrI_<#ap(gBET$ay z9dYn$XUjyqdPDb3T5vJpG;&Ip&#CUVc6FLA?)LL9Zi?@I;gW`%cjAxXdA-M~oe!N% z?h;ksJpSNbx;pQ0QfafzJr9I<9F6aC?j>U#aql3oIdJ|M34)ya3GY=m*DkoR?(F#e zxjXWydkxB3Cd6?c-`5BG7TPC1$2aM2Sn54&QGMT|Nf7I*=VE7Y-VSqTvE<;9EfW$S zSDWbF&L!QNvv-8K+P~S?I=`r2<-+=^+b>{`k*h=dPxBm`W0TK(flS{k)TumU-fR=& z|L0`Wj)V3An7+oTB+ajlHP;<);3_sBiaN@(j?p?<{#}^j-jR&sE2g_*&$!eq4L;N4 zob%J%gBb@_T4h?D;cJo(#CKND`J;Q_#jRpZ=F^8C{Rd+A9&4&tT36%!`&WC5YpnUt-^h;q9wr^%biX9!wEA4!LlrX}zgbi0W8rvUk8WA0Kh}7{X1W!8U zpE-Co;wx2q`AWyQjR6r&+2Ib$rmHMfe!kWmjP;Loq2s~Br?xg!ajssj#C>D`+lMCu zvw$Bgo!@E2cldJN@|FdeFM9W_DCJxLuyBw~lWw6ps*v_Y6C#@y(G)ZJ!6w)F#zrZ< zDgVURC)#OF2|nJE&njpW(KO#}{(ShgznxeqB00neL*kOBtJX##GE5-*3qDVAQ;IXp zyMomF-4+|f2o*N><6oQ$$*3?`BWx)!a^GZPJBi0sX$6<*rkoylLv zOq%d8oVepgXdnKo)>qJO~i&HDp^^>gloy}ub4=W+c(>!)w-`52Lm zQfzpMDwVooU&JY|Q#Z7x%!qCQnfG7vLtn16wu`Rs6OT8IFx`&_dMk&r`t)K+xH=XV zd4b%s(GpmBb>3f!oWu)V-B{s&O*HtgxDZGJw=s2;XMvlSVCgcky8RmERJpFBdre7Ibp}sJ02!% z$j5{(NAd-dpv^7@9Cp}}KAoh_5)y-CbD&_tjTTNY6xv~^daxdYRElN42(XewtLe-I zm!$pq{aNc-Mk9(`XK7KO-Vu>SEUCC~)j{)cPFiSwQc!#>Ih_6zYnYhf;5fH@SXR5awEqadqS*W{7B98bR%OK6hT_|QYM zBQnem=uyu{L*2NzlgKy{CrB#1An|^%48u5^fATC`ti{R*s#o_mqT3?1X}*a~o0%8! zjL0qNB6)@09}N$V00L-l_$j+p7z|vwd; z=+>?#X`+!dsTmvgPZ&Ms#FERcyX12*ir&b!0fMSNUK5{O zAJ6TM{=x6iq397LDTKk`$2Moi@?#8nyP!zlV%3blWWUohZ4=# zOmILsvX0Q}BrgXTv*6i1=`v}Fw6TOa%RrS*6kRNFNG4;*7-m`1pdZNyc+V+mTpyjKoPX|;VII~e_r5j)?{M+YmfRm&CDT{ZHf@6{uMS?}J~26rJml@hBiRktBk@Hrv39{`!;#|llaiDKaoAT7ES)xGBq>ri2PLvf{E%>Ay0z$nQ4?VnD?YL@N_tp~=f=sU z%VrsL8sQzJzA{zZaj@l;?!t2;Qf#B|F`#mUA#&8ba+$94M0-Xj0#bllTBo!5#9EXR zZ3n&A1sdo(&4HOP`zWE-p2tc`ItcwC4uE8#hX}nb>ze?!gf37V3VWRbTTv3);X-+_ z064TrhJKZ%BH}-dPyTOeY@pL2+wq2j%~x$!viMh2NLst}agPbRA^cC<)pteeJ{W>c zYcEXcs448Iw)e?b8+C+q;E)b-UagUl-$!-bvML#p+%PNi>F{b228(z}vel_bYqJ&G zJ6V1eE2MJ$2+ZRyir*J1*1IsVFkFpQ0qvCx%5YOb;6eD$!`~M8AB3?`1POH4lVb+N z2tRhAz~77?yKTY!I~=P51i7gdIP)+%`tz}Zm*Sg@<*`Tu92o_VhoSJ4(H|JP;*^Gz zGmmc~Ffe`-p`@ju@f~nJu4PW!me%oi`-Qbf@hZK&F|KD-KmRO8i0;@JiS--nZ?D$H z98dp!1bMwdZv?B1+2boQL@-=LLDG)s3LiM8#~Qf1Pp?>_csa1Ifpzz;#Edx#!|32E zj|aMP7&bvnbVWoMexHGAsDaCRTq?py%wRb&>`d|+AbPKL2l^I-Au1_JigU*^@fKR@ zJ%|cJl4c9Xcrf~mWeZi3o8#17=Kia;+-b?{q|=D=Xb?ccJF?ETzrMxK~`OfY9hCbufvwcRmrTmvdP_@P(7KU z&M+=8hGa8&Sh9|{PB%+edfU0#s(GRM4j+y0$bA0KMnO9t?EgEyp@et3lC|hBZ$S4T z9JAnN#KH2`n@(Oq&ck5eAqY*wlkspM+2`Q(ImkNr&&9K{ zP1vP2_@wN_XaCL60QVuMvK(!TxpUVCz4*XX8Y2q^oFpcM3X-tT(Pd50G+Sjfu? z=6{A);LF3nU7k{!jd-5;m}uu&>sbk|03&8YPokc{ixZ!~%|u#<#`h|p^??zzqksJG z`10#aHmmxN90r>+*M9n5&l2L%?vAos^~?5NIojD#{&t1EpQ0hDAZd5`6#55dHT>9{ zwD)8K3@UbaC_2iLdVlOt=$<#(&gIm`2mnp2;uT!Gxh$bm{Q#j8(W0dJRs`=DbokO&7F!jX;8}KH590y6<_@O_WN(+`F}e@ z6Z+&qfC@L&+Ih2pg|yFKw)9mIlsJ$BLrKaue^ww);@N|kO^XcQ8NQqJurz#!qaEeN zX+6hR7;|t2_bOJw#X{jIOW&;`(IvSlR4s&*f`M~_$`x0DyHS0U;!cTcn?TPHs$b!R za!q_~mbGZ83I5ELJmL`E5ysYX38guTaC%NrI2zx*9M-$p$d3NbzvF9D7Hg1qKoZWf z$tlVy3@^?p5`}N~3CAdKSR%Lm@L+UjIHm>@OFZOT=o7x(mm#gXA3^h#v_(*SuMy=m zvFX-*s#{q&-;>{~?yk4T?R)!JZ=uB_J|iWci}TG@TF8ZWqwPsmea<5Jb`yKwip2Ab zA)_zHoWC?v-HnDtPAYX7Rkvv~Go)3IUc)w%E#zoy(SKt%vc!Xz`YsLaA)le}qSVU7 zncvE9hQ3iN#Zj{PpZHL$*NDDNx_9S| zPKJ=-$MBxLJv}9tx~Gtd#j-6YeDM4K&}4Rdu`Dxh2d_l5QY=}Hnt_L%v^UYS|L}0V z8W*tZk?icF-F_R|c2-a3hQIbQgycN7tJ+wYJA0)U&r-WVsy(mzy-aUKWvAO0e64vt zqq6X5$akW z-NnUxJJkDlS8I9uPnAD5#5CyADht20-c+~NH zv+8LN@%e&Tn(kXqL<{Igube@d7~;oao4B*`Z?*3kxOVH8JqwP${9{wyz|r!!tW7NE6R;d>6c-(_V}^*!7n~Z{`?*H6pinjxx>FW1cAQH;!|ZBmel+61^um~OSV!T z)1GKu-ah;DklEnQKXD6i3#7k4fl1f5YF$p5{Toa}U&A^7U;n0Z-hT8fyym&fdje2r zsxJKl2WaumZcAgB(%8t~<2;lV5KX+&aXe*)3MGGxEA8U^epMfLjNQ0w}xHerFbY~ z;i187L{E$9(Jg*^_n2>uG@%5oxd)5F8EQ~ZJJ1gujpe#ojsVYC3mlD}TQg4|&2JC- z$^M|@ce+iF1n**%16m@TG)W#ZN^XbS&oPIHm#lC6WjA4#L@WnsYl%T0X-uFpfYZc8;7x-5>#zmG7EZK z&;bHXF35o4e~Iyoeo!;-n;~xHmT$%^+uXKOPTn#l0~BL8=Z%K*Xa=|umGSVfoZOx> ze*W22S?dJrTP1O=BLuaxC7*%D_Lu9wOpR>0y~6B8!=V?AItSB2Tyt=*TS8WQdD>lG zwWZ<6p<`cH9qxH|?CJ1tPL@Y+F|g*+EdTsiyZKw=mM3?;R)k(DwQVi#UGTMK^!E$D zHGOj8h?mKZ$6|iO>cl4Q_1k}hRHZxj^$-4Fb2CA_tiHc@=kkax7N6=2%UoQ-C;nVK z?|$L*o$~FE4kmnj{3)Yrs`bw;YrnsK=iJ%dCGGb#>l>3RB>oc@{Tn$Qx##AUI(fBQ z<{2G+Y3HGwzkYp!i8IJ=Yr%>&!K5}OV96|#?WYnZYE!e~o#%%+i?gReWv8=zL?IpoaCtSS5n&-2*DOzPNHc~IWRa1T6@pL>I4M{ds=(P*(f3d@8HGQa-AB#n3)00PXYhTA zP^8i5y+$Y!a@of~<_pXZz1`VokiJA+0-}^^rV~kv!l3d(f4o&2)$0Ny1ga;rdPq>! zkw_v+7sD(L=yI`W`_;^ITg62-9aUOezAX}-eZ_5=DZfCs7SOl6nyOyagVYv)VsST# zJfZ{QLR8Hcw30&rNf0&@0z7q8^U>k=%1?zY^;o7YK9NrtzaY3^QBxG%d$zZcxJK(2 zEv@0)MKm}cMj07rhppat@JywVDYJQ+=IE94VaVW@Eb7{xb7Rr^MD_*nA`Bmk?t*J) z!!=6yV0$^KVX*zMW-Pk<`ZZek*^|EUl<-}Y@XWD%Q0VK?ywtglMD~XXUo9fL)y8vz zuLawpcn!AA$qw4i*V0@EGs|^E?kTtq>uc2KntAr1$8WF6GOcmvXt9df@j>YaWp+iu zfV3fa!h_TgT9I52WUz{s(HKO@Y$NieL?VN1t=^}8IxK#!1vWO#T3IvI=nU}IPR&wx z=xZrPO~V%Q6Tk0nuSM7Q-EHp0E({k+waTnzY!nuppZ8=mr#EDLgw>Wu-x+W12Pq)9{o%|CDJf&(!38mroBMFHT`d*?(n$k~K8@zb zSJ&#kUccr<<~NSVj%%hSbl0>77EtEsyZPVBkNm-=>9_IOqnWIwOV50|Hk{`e3tY7EojL|; zXsw2s)IeDUm%7|rsNrlodhyA|Kev^Cc)an=(B+>;Jf%9WjIwP5EJ-N(O6FKZ;34^!s6eF7_Ge>EnDiLl;~ab}iTpZpSSo7cY48UIj? zosO0fvgLttwtUhDPYwM1v+C#D6a2@-H{0-EsW$$Z#{8Y6!R&(#z>~}BeS`8V`jelt z%pUQ7CS8W{g^EQBGB(0p=K|vNf-9RevmS5E+^d=O=*dLIl}*nxXWhNDVEwVF>kj~I z!9O0p<9dFEcdzH=Zff4$idMgUD{M*C+c#Nnh~{ORD`J0(UK+CM-tUjEJX^{giCDej zx2NM)%dHOL43>SH@O7!*@*n43KOcHKsbAkw!dD5n4D)jjqvNT+(pI4!8tT8M(S?I$GCx&^k@LqIazU4M# z2`E|4`@{lLmO$5%OO5w}O%=uurCArb`V|cli>l+2SQUlwGB0|?EuzfXK>rp#YvvJ% za&nTOmE1$J^kR4Sq3yRO84C_Et9CAv{fbo^*V*+Ef8R1;k-oL7aUFA*;Bv*6qFWO~8#l#Sb; zJl~d&56ajywQ29$s~xX{EVuHt6GseQoP^&CBYLSew%T?92Gy0eb8qstEEWB7;Mr*K z;v>se{}bQ8`p@GkBW0pM8#&`0OTe7;Cswd$O~}?8zdcEsyWTJe8eipt@$AmGhTBY* z;bWe7-V%aaGyc~$LofC%yv)r65PnWY@IHLA;Vo^pVnu+XF{o6;&SA5-#DZ!QL18jD z_StF_$@Amb1G}^QLTnj0_yk)ijg}lQ6K%pY`rZrUWTZ1U;QMgTl6}A_<$#mQ-;!Ns z$-d{L074*qv>X%=*h8{tVMS1s5Iyw4gan_v3FQoY*bHi1_Do1TVd@KiPtHFvNOXha zg3r4Xg$$NZ{bu-PV+-PS^^h%Blz-p<79G(gjAVIDMo@rA*h)h80Ng=AtNv|28M>{R z!1t{ULz#syApA2&0f(>S69!@3iy*h)&n7+%y*WChnVz_|_VyfO^>{|=P2=LB7c`CRQJgCTR~a84eY>ybZO)0xeU(v%pI7e7nas(mJbdFE@Isjp?)02mcd)=E z2ekH+y>FO#-ynP6d1vO`s(KYAeic&zdxklVdqT{DV#wro(0g{gx!9}NFjOmKd(U&T zC5y>RLcD(2Vn)ZAt;VV|;Gm-W>yNGJ0Ln z19jgki>FTYeeHV|1D@j*N(fSNT_#!{_PkqnEO0c9YqosXb*Lv7po{_JnXP{XP(~$U5)Vjpb2(&yiK|8RDT~Oj;iSXT{D;JZ3$Ge?q zih?!^7gjDj<)6ImUjCdD1Fw74VQoTbQK8_%bO7qoc` zzPIhNs(k@Y3mu!6+D@kJK0zzaE%3A8+7$R%@~QiK{i3;~f({ElwV=UI#HTBIuy6=H zSPPY0)9V9E%mkdV>ROsS-lLDBehGTU?Q=e7pQU@eBv!xV!tp8Cn`LTo(msHyalZU?`H`st`*Uk1-}1k%TRLo&8xJ!Lz;R` z$Wyqc*R6voX^QU5&wieo&wiQrSHhFe$aK}Ns+8m#*&L` zj=W9JbtI9Nkw=aT%`sGoORx$_p^3C561z|~IoH@*UntW(vxQCLAUrDa%pA!R3su5p zD?~gTqf`Rg>z3ha9Nc!(CrF^6-;oT4dDh?}22i!`cBrTv;fm2Dc0>^SA|$B;mq9XA#ZRx1 z&oa+b)XcIkstNyRb&W;=;DyXd^cXT{NSO^N>`mRCur^BFVG5Sl8jW+yi8uy7bxUk4 zF2GE2aX>K3BK&&Othae1o=3iXD<3%ep;SJAS=SrR9iZrbcKG`|&HIne)H_5H=x!=- zd;HVa&0n7iEZFLFWui5Ei_YQ|PmFl=j~-w`LJ2w#h@o~RKtqSRkU#?xmz=Mj8?};4 z>n))3Dot;fIqe~9QAYJSSSGK*J*HN0tkzA;7i)_#Vr?msu!5zUE6z|_cDE^ttgS#z zZ7{C~o6&ZbkJbHlF_qnx%Zc33#*`9r+M_A{vKcM44?UMd8)2`6^uvM1xzgET?1rV{ zl_hwXY!HoS7R`Zq#ku3E1Mst-4;v%#Ey!5UUjK4F=&k9wnsL|+(Q3)0cxE*;zAs#U zC+Hx*|402saW0pn;ag<1<#7Sz-MA>)hn~lwiVUa2;E+ISZb9+SG63@7k`nx+sC-;i z{;$Arp;|vIMAJj?=%$Z19T!1yK3aJX<@>?4VQ%42B;Q>ORr4oIH+BT`iwojnN*0!) zh5gZPhwS2Xii<%&qK49XsN>JkLu>IHO}n9FUywto46zI9MhoswlzLxF?WPwCih5}I z&rAKM<8;t1R7SfM7o>KlhkL6-Y-jaWuH)Fn1!R`Mev3i#7^Z!PZRHEE5jJQeZS@W#%#VYWz0UQNa5AxG!Rx|&B+mk6w_+D#%Z6e)zh zIwT|npIR3>4-B8fAS`5n<`tI1 zJ48TjJ!mDN6dvAyDsIqwST>@9Vi1BDiUP>=!&qZ`+Y*PPv!Nv5FFQggTc;m}a!WqQ zYUk4%j2`qEuuT1zTs-{MgvRq~-|2GGH&up3A;(817na{&`Eg>_Sd~L=Y2w<|zrGly zo;#}8a*m1|BP|@wtK7JQ?(=b{YQeTXuhd1OM{Jn0%FZGItxvZTzxjjYM@5*;d2c?Y z)`NW4vo5WZ&!Zf z&T)^ny?2b&Tetq(m8HFPNd@t4;%S9?f8c8OkXIRe^}Y~gF$kdeGvdd|%*Czra*L|90d^>o{=ID(hj9-o% zdpH@9@$TXt%d%(ddQKkht4WI4wdB~KIww2nX~AkRVP&>WC5*4iDeuUf%T$%5D%1y_ z6BY5Si4tOfLgWyGvn|z*tKo{Z_WBix?83$8{U0orauBsv51~Yd^d;0C!Sy~MViT83 z0M~mtJ7~7SJ?R=;1VN;`eKs33s#5U=$wR149!6y%T&XyW#7Lf7a){FMhyle;Xnds( zTd{wDJ|)o1aQN!xL$#5pt9BXXJQUyPhZ31PYjkYvC+$9o=i`e5iY_5uOuP>u5CZ$)Gp*2$yr?mRHbM zSuUFFbz}1DtqvN1ooc2jnW*z*)_mv85rasPw)>*zRXd#%w)oMxmh}810yl9`t9M4L zHzg>veN{ndNH3e+VAa6BDa?Rc94(!j6g;AI6=8S?&J2~I(&?Nf96#r6n;S#ueq2lD z$;!+5#V42*_4)aVih4A@v35E0 zl}p2Ci>zQVnaY|(1PLr?RS_;W3?s585jK_}9DIX;64kGctB6bI49BL2&yF<+TN+OE znj6a?X$1ZptIjKDpI?Bjb#dltG~KWt(Yzkuxrk*=p6IKY#R;*+2g1C<=Sr|#X66Ks z$W~e4{e6b0l4L9X!pqXf`TLM6jV!YCTdkNW&RI+!(seapYpLI7e7AaF z%Cj!3aPuqI;GQax@=oqhCg$$9^V~we3_j|9T~M}q&h6js#&yuD>=P?$o9EOF^D3c^64O7 z5)1{}Wd)1wU!uHb1nvC(NYyJ+bO>u~$!dgzDR)g!(GjL$PS9KgjN6RD5&TK~LZ&cp zxxd5W?`HJ-sTS+8uXk+FJvH0#x5vWZ2gIs6vT3(0$2q35_YdTC*QL5jiBN>eW+Ub9 z@>)cJ%Q6XRQY*$~C8Ua<$wm9lGtN|ZOp&iD7x$=-w49zVzw*`a28Z#2!`mJlF76`* zT2}GOQ z#+=X6)MCQ@3CpiBiz*KNS@Ctq*s|YrrB5p`6P9?Xr$#Hqk5nNa!BrNnN4`^g4j$HM zf_{i9d}O9(&V9l>zu1QGgrHOs?w|kI&8e1aUZmE&E{L>Bp ze^hyREoFbOJgdWGImc0B2z!Y-9#|hQrd1QeM7=6{SW8w{l;&+NV%ZRPuXJI)ILnvW zSDEv&)3`}GjBJZg|6KX*(8XadLb}%XOHPiKoVPHhxtKgy8e()g*!n`{@*}?0R#oPF znoH%a@qw>D#l@enP4?ya+Sf1J$nQR%YB;{CD8?%U|C8pABL2OILnq1`0z;ihm5b5# zTf$m|AN$0z-B|PdZ7(9wEk59r_riXO6kIljWthgGj62p#W`1kJ|>_Pc?5Mk__^~8PU(<_W9F2oaF`q zsRz<_cC}qK^>19=P51jFiGDs}f?;C6<3PZ|!Dm-4KO0$FP;BozG3#+^E^` z90-BV;`5znPBsLP7hrF+FSn8~j2Rn>R|`Xh-=CC&zuTHmg4SVZyZzV3P zNbaT;bs{78uFIZd&z-x!;0;yZuyi+e&M&^PWr2&^H4n=ZzQwN1`xmbZaB)hnzoB>P zjKHTZYw5+F1xDZAICOu=ZOtOfZ*MMlamuxH7#-rN_dTD@Y008CS>){wtw-C`ShrCV4CHDpy_PeCBX?!QFkp9c3|rvCrt#s2~pxdCAS diff --git a/api/core/model_runtime/model_providers/__base/model_provider.py b/api/core/model_runtime/model_providers/__base/model_provider.py deleted file mode 100644 index 4374093de4ab38..00000000000000 --- a/api/core/model_runtime/model_providers/__base/model_provider.py +++ /dev/null @@ -1,120 +0,0 @@ -import os -from abc import ABC, abstractmethod -from typing import Optional - -from core.helper.module_import_helper import get_subclasses_from_module, import_module_from_source -from core.model_runtime.entities.model_entities import AIModelEntity, ModelType -from core.model_runtime.entities.provider_entities import ProviderEntity -from core.model_runtime.model_providers.__base.ai_model import AIModel -from core.tools.utils.yaml_utils import load_yaml_file - - -class ModelProvider(ABC): - provider_schema: Optional[ProviderEntity] = None - model_instance_map: dict[str, AIModel] = {} - - @abstractmethod - def validate_provider_credentials(self, credentials: dict) -> None: - """ - Validate provider credentials - You can choose any validate_credentials method of model type or implement validate method by yourself, - such as: get model list api - - if validate failed, raise exception - - :param credentials: provider credentials, credentials form defined in `provider_credential_schema`. - """ - raise NotImplementedError - - def get_provider_schema(self) -> ProviderEntity: - """ - Get provider schema - - :return: provider schema - """ - if self.provider_schema: - return self.provider_schema - - # get dirname of the current path - provider_name = self.__class__.__module__.split(".")[-1] - - # get the path of the model_provider classes - base_path = os.path.abspath(__file__) - current_path = os.path.join(os.path.dirname(os.path.dirname(base_path)), provider_name) - - # read provider schema from yaml file - yaml_path = os.path.join(current_path, f"{provider_name}.yaml") - yaml_data = load_yaml_file(yaml_path) - - try: - # yaml_data to entity - provider_schema = ProviderEntity(**yaml_data) - except Exception as e: - raise Exception(f"Invalid provider schema for {provider_name}: {str(e)}") - - # cache schema - self.provider_schema = provider_schema - - return provider_schema - - def models(self, model_type: ModelType) -> list[AIModelEntity]: - """ - Get all models for given model type - - :param model_type: model type defined in `ModelType` - :return: list of models - """ - provider_schema = self.get_provider_schema() - if model_type not in provider_schema.supported_model_types: - return [] - - # get model instance of the model type - model_instance = self.get_model_instance(model_type) - - # get predefined models (predefined_models) - models = model_instance.predefined_models() - - # return models - return models - - def get_model_instance(self, model_type: ModelType) -> AIModel: - """ - Get model instance - - :param model_type: model type defined in `ModelType` - :return: - """ - # get dirname of the current path - provider_name = self.__class__.__module__.split(".")[-1] - - if f"{provider_name}.{model_type.value}" in self.model_instance_map: - return self.model_instance_map[f"{provider_name}.{model_type.value}"] - - # get the path of the model type classes - base_path = os.path.abspath(__file__) - model_type_name = model_type.value.replace("-", "_") - model_type_path = os.path.join(os.path.dirname(os.path.dirname(base_path)), provider_name, model_type_name) - model_type_py_path = os.path.join(model_type_path, f"{model_type_name}.py") - - if not os.path.isdir(model_type_path) or not os.path.exists(model_type_py_path): - raise Exception(f"Invalid model type {model_type} for provider {provider_name}") - - # Dynamic loading {model_type_name}.py file and find the subclass of AIModel - parent_module = ".".join(self.__class__.__module__.split(".")[:-1]) - mod = import_module_from_source( - module_name=f"{parent_module}.{model_type_name}.{model_type_name}", py_file_path=model_type_py_path - ) - model_class = next( - filter( - lambda x: x.__module__ == mod.__name__ and not x.__abstractmethods__, - get_subclasses_from_module(mod, AIModel), - ), - None, - ) - if not model_class: - raise Exception(f"Missing AIModel Class for model type {model_type} in {model_type_py_path}") - - model_instance_map = model_class() - self.model_instance_map[f"{provider_name}.{model_type.value}"] = model_instance_map - - return model_instance_map diff --git a/api/core/model_runtime/model_providers/__base/moderation_model.py b/api/core/model_runtime/model_providers/__base/moderation_model.py index d04414ccb87a63..f98d7572c727d0 100644 --- a/api/core/model_runtime/model_providers/__base/moderation_model.py +++ b/api/core/model_runtime/model_providers/__base/moderation_model.py @@ -1,11 +1,11 @@ import time -from abc import abstractmethod from typing import Optional from pydantic import ConfigDict from core.model_runtime.entities.model_entities import ModelType from core.model_runtime.model_providers.__base.ai_model import AIModel +from core.plugin.manager.model import PluginModelManager class ModerationModel(AIModel): @@ -31,19 +31,15 @@ def invoke(self, model: str, credentials: dict, text: str, user: Optional[str] = self.started_at = time.perf_counter() try: - return self._invoke(model, credentials, text, user) + plugin_model_manager = PluginModelManager() + return plugin_model_manager.invoke_moderation( + tenant_id=self.tenant_id, + user_id=user or "unknown", + plugin_id=self.plugin_id, + provider=self.provider_name, + model=model, + credentials=credentials, + text=text, + ) except Exception as e: raise self._transform_invoke_error(e) - - @abstractmethod - def _invoke(self, model: str, credentials: dict, text: str, user: Optional[str] = None) -> bool: - """ - Invoke large language model - - :param model: model name - :param credentials: model credentials - :param text: text to moderate - :param user: unique user id - :return: false if text is safe, true otherwise - """ - raise NotImplementedError diff --git a/api/core/model_runtime/model_providers/__base/rerank_model.py b/api/core/model_runtime/model_providers/__base/rerank_model.py index 5fb96047425592..e905cb18d46282 100644 --- a/api/core/model_runtime/model_providers/__base/rerank_model.py +++ b/api/core/model_runtime/model_providers/__base/rerank_model.py @@ -1,10 +1,9 @@ -import time -from abc import abstractmethod from typing import Optional from core.model_runtime.entities.model_entities import ModelType from core.model_runtime.entities.rerank_entities import RerankResult from core.model_runtime.model_providers.__base.ai_model import AIModel +from core.plugin.manager.model import PluginModelManager class RerankModel(AIModel): @@ -36,34 +35,19 @@ def invoke( :param user: unique user id :return: rerank result """ - self.started_at = time.perf_counter() - try: - return self._invoke(model, credentials, query, docs, score_threshold, top_n, user) + plugin_model_manager = PluginModelManager() + return plugin_model_manager.invoke_rerank( + tenant_id=self.tenant_id, + user_id=user or "unknown", + plugin_id=self.plugin_id, + provider=self.provider_name, + model=model, + credentials=credentials, + query=query, + docs=docs, + score_threshold=score_threshold, + top_n=top_n, + ) except Exception as e: raise self._transform_invoke_error(e) - - @abstractmethod - def _invoke( - self, - model: str, - credentials: dict, - query: str, - docs: list[str], - score_threshold: Optional[float] = None, - top_n: Optional[int] = None, - user: Optional[str] = None, - ) -> RerankResult: - """ - Invoke rerank model - - :param model: model name - :param credentials: model credentials - :param query: search query - :param docs: docs for reranking - :param score_threshold: score threshold - :param top_n: top n - :param user: unique user id - :return: rerank result - """ - raise NotImplementedError diff --git a/api/core/model_runtime/model_providers/__base/speech2text_model.py b/api/core/model_runtime/model_providers/__base/speech2text_model.py index b6b0b737436d9c..97ff322f09c2d2 100644 --- a/api/core/model_runtime/model_providers/__base/speech2text_model.py +++ b/api/core/model_runtime/model_providers/__base/speech2text_model.py @@ -1,11 +1,10 @@ -import os -from abc import abstractmethod from typing import IO, Optional from pydantic import ConfigDict from core.model_runtime.entities.model_entities import ModelType from core.model_runtime.model_providers.__base.ai_model import AIModel +from core.plugin.manager.model import PluginModelManager class Speech2TextModel(AIModel): @@ -20,7 +19,7 @@ class Speech2TextModel(AIModel): def invoke(self, model: str, credentials: dict, file: IO[bytes], user: Optional[str] = None) -> str: """ - Invoke large language model + Invoke speech to text model :param model: model name :param credentials: model credentials @@ -29,31 +28,15 @@ def invoke(self, model: str, credentials: dict, file: IO[bytes], user: Optional[ :return: text for given audio file """ try: - return self._invoke(model, credentials, file, user) + plugin_model_manager = PluginModelManager() + return plugin_model_manager.invoke_speech_to_text( + tenant_id=self.tenant_id, + user_id=user or "unknown", + plugin_id=self.plugin_id, + provider=self.provider_name, + model=model, + credentials=credentials, + file=file, + ) except Exception as e: raise self._transform_invoke_error(e) - - @abstractmethod - def _invoke(self, model: str, credentials: dict, file: IO[bytes], user: Optional[str] = None) -> str: - """ - Invoke large language model - - :param model: model name - :param credentials: model credentials - :param file: audio file - :param user: unique user id - :return: text for given audio file - """ - raise NotImplementedError - - def _get_demo_file_path(self) -> str: - """ - Get demo file for given model - - :return: demo file - """ - # Get the directory of the current file - current_dir = os.path.dirname(os.path.abspath(__file__)) - - # Construct the path to the audio file - return os.path.join(current_dir, "audio.mp3") diff --git a/api/core/model_runtime/model_providers/__base/text2img_model.py b/api/core/model_runtime/model_providers/__base/text2img_model.py deleted file mode 100644 index a5810e2f0e4b09..00000000000000 --- a/api/core/model_runtime/model_providers/__base/text2img_model.py +++ /dev/null @@ -1,54 +0,0 @@ -from abc import abstractmethod -from typing import IO, Optional - -from pydantic import ConfigDict - -from core.model_runtime.entities.model_entities import ModelType -from core.model_runtime.model_providers.__base.ai_model import AIModel - - -class Text2ImageModel(AIModel): - """ - Model class for text2img model. - """ - - model_type: ModelType = ModelType.TEXT2IMG - - # pydantic configs - model_config = ConfigDict(protected_namespaces=()) - - def invoke( - self, model: str, credentials: dict, prompt: str, model_parameters: dict, user: Optional[str] = None - ) -> list[IO[bytes]]: - """ - Invoke Text2Image model - - :param model: model name - :param credentials: model credentials - :param prompt: prompt for image generation - :param model_parameters: model parameters - :param user: unique user id - - :return: image bytes - """ - try: - return self._invoke(model, credentials, prompt, model_parameters, user) - except Exception as e: - raise self._transform_invoke_error(e) - - @abstractmethod - def _invoke( - self, model: str, credentials: dict, prompt: str, model_parameters: dict, user: Optional[str] = None - ) -> list[IO[bytes]]: - """ - Invoke Text2Image model - - :param model: model name - :param credentials: model credentials - :param prompt: prompt for image generation - :param model_parameters: model parameters - :param user: unique user id - - :return: image bytes - """ - raise NotImplementedError diff --git a/api/core/model_runtime/model_providers/__base/text_embedding_model.py b/api/core/model_runtime/model_providers/__base/text_embedding_model.py index 1a5c40ed516da1..beade743623a6a 100644 --- a/api/core/model_runtime/model_providers/__base/text_embedding_model.py +++ b/api/core/model_runtime/model_providers/__base/text_embedding_model.py @@ -1,5 +1,3 @@ -import time -from abc import abstractmethod from typing import Optional from pydantic import ConfigDict @@ -39,34 +37,21 @@ def invoke( :param input_type: input type :return: embeddings result """ - self.started_at = time.perf_counter() - try: - return self._invoke(model, credentials, texts, user, input_type) + plugin_model_manager = PluginModelManager() + return plugin_model_manager.invoke_text_embedding( + tenant_id=self.tenant_id, + user_id=user or "unknown", + plugin_id=self.plugin_id, + provider=self.provider_name, + model=model, + credentials=credentials, + texts=texts, + input_type=input_type.value, + ) except Exception as e: raise self._transform_invoke_error(e) - @abstractmethod - def _invoke( - self, - model: str, - credentials: dict, - texts: list[str], - user: Optional[str] = None, - input_type: EmbeddingInputType = EmbeddingInputType.DOCUMENT, - ) -> TextEmbeddingResult: - """ - Invoke text embedding model - - :param model: model name - :param credentials: model credentials - :param texts: texts to embed - :param user: unique user id - :param input_type: input type - :return: embeddings result - """ - raise NotImplementedError - def get_num_tokens(self, model: str, credentials: dict, texts: list[str]) -> int: """ Get number of tokens for given prompt messages @@ -82,7 +67,6 @@ def get_num_tokens(self, model: str, credentials: dict, texts: list[str]) -> int user_id="unknown", plugin_id=self.plugin_id, provider=self.provider_name, - model_type=self.model_type.value, model=model, credentials=credentials, texts=texts, diff --git a/api/core/model_runtime/model_providers/__base/tokenizers/gpt2_tokenzier.py b/api/core/model_runtime/model_providers/__base/tokenizers/gpt2_tokenzier.py index 5fe6dda6ad5d79..609c11d22f1cb7 100644 --- a/api/core/model_runtime/model_providers/__base/tokenizers/gpt2_tokenzier.py +++ b/api/core/model_runtime/model_providers/__base/tokenizers/gpt2_tokenzier.py @@ -1,34 +1,9 @@ -from os.path import abspath, dirname, join -from threading import Lock -from typing import Any - -from transformers import GPT2Tokenizer as TransformerGPT2Tokenizer - -_tokenizer = None -_lock = Lock() +import tiktoken class GPT2Tokenizer: - @staticmethod - def _get_num_tokens_by_gpt2(text: str) -> int: - """ - use gpt2 tokenizer to get num tokens - """ - _tokenizer = GPT2Tokenizer.get_encoder() - tokens = _tokenizer.encode(text, verbose=False) - return len(tokens) - @staticmethod def get_num_tokens(text: str) -> int: - return GPT2Tokenizer._get_num_tokens_by_gpt2(text) - - @staticmethod - def get_encoder() -> Any: - global _tokenizer, _lock - with _lock: - if _tokenizer is None: - base_path = abspath(__file__) - gpt2_tokenizer_path = join(dirname(base_path), "gpt2") - _tokenizer = TransformerGPT2Tokenizer.from_pretrained(gpt2_tokenizer_path) - - return _tokenizer + encoding = tiktoken.encoding_for_model("gpt2") + tiktoken_vec = encoding.encode(text) + return len(tiktoken_vec) diff --git a/api/core/model_runtime/model_providers/__base/tts_model.py b/api/core/model_runtime/model_providers/__base/tts_model.py index 70be9322a75d53..8cefa63ebfb97f 100644 --- a/api/core/model_runtime/model_providers/__base/tts_model.py +++ b/api/core/model_runtime/model_providers/__base/tts_model.py @@ -1,12 +1,11 @@ import logging -import re -from abc import abstractmethod from typing import Optional from pydantic import ConfigDict -from core.model_runtime.entities.model_entities import ModelPropertyKey, ModelType +from core.model_runtime.entities.model_entities import ModelType from core.model_runtime.model_providers.__base.ai_model import AIModel +from core.plugin.manager.model import PluginModelManager logger = logging.getLogger(__name__) @@ -37,36 +36,21 @@ def invoke( :return: translated audio file """ try: - return self._invoke( + plugin_model_manager = PluginModelManager() + return plugin_model_manager.invoke_tts( + tenant_id=self.tenant_id, + user_id=user or "unknown", + plugin_id=self.plugin_id, + provider=self.provider_name, model=model, credentials=credentials, - user=user, content_text=content_text, voice=voice, - tenant_id=tenant_id, ) except Exception as e: raise self._transform_invoke_error(e) - @abstractmethod - def _invoke( - self, model: str, tenant_id: str, credentials: dict, content_text: str, voice: str, user: Optional[str] = None - ): - """ - Invoke large language model - - :param model: model name - :param tenant_id: user tenant id - :param credentials: model credentials - :param voice: model timbre - :param content_text: text content to be translated - :param streaming: output is streaming - :param user: unique user id - :return: translated audio file - """ - raise NotImplementedError - - def get_tts_model_voices(self, model: str, credentials: dict, language: Optional[str] = None) -> list: + def get_tts_model_voices(self, model: str, credentials: dict, language: Optional[str] = None) -> list[dict]: """ Get voice for given tts model voices @@ -75,83 +59,13 @@ def get_tts_model_voices(self, model: str, credentials: dict, language: Optional :param credentials: model credentials :return: voices lists """ - model_schema = self.get_model_schema(model, credentials) - - if model_schema and ModelPropertyKey.VOICES in model_schema.model_properties: - voices = model_schema.model_properties[ModelPropertyKey.VOICES] - if language: - return [ - {"name": d["name"], "value": d["mode"]} - for d in voices - if language and language in d.get("language") - ] - else: - return [{"name": d["name"], "value": d["mode"]} for d in voices] - - def _get_model_default_voice(self, model: str, credentials: dict) -> any: - """ - Get voice for given tts model - - :param model: model name - :param credentials: model credentials - :return: voice - """ - model_schema = self.get_model_schema(model, credentials) - - if model_schema and ModelPropertyKey.DEFAULT_VOICE in model_schema.model_properties: - return model_schema.model_properties[ModelPropertyKey.DEFAULT_VOICE] - - def _get_model_audio_type(self, model: str, credentials: dict) -> str: - """ - Get audio type for given tts model - - :param model: model name - :param credentials: model credentials - :return: voice - """ - model_schema = self.get_model_schema(model, credentials) - - if model_schema and ModelPropertyKey.AUDIO_TYPE in model_schema.model_properties: - return model_schema.model_properties[ModelPropertyKey.AUDIO_TYPE] - - def _get_model_word_limit(self, model: str, credentials: dict) -> int: - """ - Get audio type for given tts model - :return: audio type - """ - model_schema = self.get_model_schema(model, credentials) - - if model_schema and ModelPropertyKey.WORD_LIMIT in model_schema.model_properties: - return model_schema.model_properties[ModelPropertyKey.WORD_LIMIT] - - def _get_model_workers_limit(self, model: str, credentials: dict) -> int: - """ - Get audio max workers for given tts model - :return: audio type - """ - model_schema = self.get_model_schema(model, credentials) - - if model_schema and ModelPropertyKey.MAX_WORKERS in model_schema.model_properties: - return model_schema.model_properties[ModelPropertyKey.MAX_WORKERS] - - @staticmethod - def _split_text_into_sentences(org_text, max_length=2000, pattern=r"[。.!?]"): - match = re.compile(pattern) - tx = match.finditer(org_text) - start = 0 - result = [] - one_sentence = "" - for i in tx: - end = i.regs[0][1] - tmp = org_text[start:end] - if len(one_sentence + tmp) > max_length: - result.append(one_sentence) - one_sentence = "" - one_sentence += tmp - start = end - last_sens = org_text[start:] - if last_sens: - one_sentence += last_sens - if one_sentence != "": - result.append(one_sentence) - return result + plugin_model_manager = PluginModelManager() + return plugin_model_manager.get_tts_model_voices( + tenant_id=self.tenant_id, + user_id="unknown", + plugin_id=self.plugin_id, + provider=self.provider_name, + model=model, + credentials=credentials, + language=language, + ) diff --git a/api/core/model_runtime/model_providers/azure_openai/text_embedding/text_embedding.py b/api/core/model_runtime/model_providers/azure_openai/text_embedding/text_embedding.py deleted file mode 100644 index 8701a3805002eb..00000000000000 --- a/api/core/model_runtime/model_providers/azure_openai/text_embedding/text_embedding.py +++ /dev/null @@ -1,191 +0,0 @@ -import base64 -import copy -import time -from typing import Optional, Union - -import numpy as np -import tiktoken -from openai import AzureOpenAI - -from core.embedding.embedding_constant import EmbeddingInputType -from core.model_runtime.entities.model_entities import AIModelEntity, PriceType -from core.model_runtime.entities.text_embedding_entities import EmbeddingUsage, TextEmbeddingResult -from core.model_runtime.errors.validate import CredentialsValidateFailedError -from core.model_runtime.model_providers.__base.text_embedding_model import TextEmbeddingModel -from core.model_runtime.model_providers.azure_openai._common import _CommonAzureOpenAI -from core.model_runtime.model_providers.azure_openai._constant import EMBEDDING_BASE_MODELS, AzureBaseModel - - -class AzureOpenAITextEmbeddingModel(_CommonAzureOpenAI, TextEmbeddingModel): - def _invoke( - self, - model: str, - credentials: dict, - texts: list[str], - user: Optional[str] = None, - input_type: EmbeddingInputType = EmbeddingInputType.DOCUMENT, - ) -> TextEmbeddingResult: - """ - Invoke text embedding model - - :param model: model name - :param credentials: model credentials - :param texts: texts to embed - :param user: unique user id - :param input_type: input type - :return: embeddings result - """ - base_model_name = credentials["base_model_name"] - credentials_kwargs = self._to_credential_kwargs(credentials) - client = AzureOpenAI(**credentials_kwargs) - - extra_model_kwargs = {} - if user: - extra_model_kwargs["user"] = user - - extra_model_kwargs["encoding_format"] = "base64" - - context_size = self._get_context_size(model, credentials) - max_chunks = self._get_max_chunks(model, credentials) - - embeddings: list[list[float]] = [[] for _ in range(len(texts))] - tokens = [] - indices = [] - used_tokens = 0 - - try: - enc = tiktoken.encoding_for_model(base_model_name) - except KeyError: - enc = tiktoken.get_encoding("cl100k_base") - - for i, text in enumerate(texts): - token = enc.encode(text) - for j in range(0, len(token), context_size): - tokens += [token[j : j + context_size]] - indices += [i] - - batched_embeddings = [] - _iter = range(0, len(tokens), max_chunks) - - for i in _iter: - embeddings_batch, embedding_used_tokens = self._embedding_invoke( - model=model, client=client, texts=tokens[i : i + max_chunks], extra_model_kwargs=extra_model_kwargs - ) - - used_tokens += embedding_used_tokens - batched_embeddings += embeddings_batch - - results: list[list[list[float]]] = [[] for _ in range(len(texts))] - num_tokens_in_batch: list[list[int]] = [[] for _ in range(len(texts))] - for i in range(len(indices)): - results[indices[i]].append(batched_embeddings[i]) - num_tokens_in_batch[indices[i]].append(len(tokens[i])) - - for i in range(len(texts)): - _result = results[i] - if len(_result) == 0: - embeddings_batch, embedding_used_tokens = self._embedding_invoke( - model=model, client=client, texts="", extra_model_kwargs=extra_model_kwargs - ) - - used_tokens += embedding_used_tokens - average = embeddings_batch[0] - else: - average = np.average(_result, axis=0, weights=num_tokens_in_batch[i]) - embeddings[i] = (average / np.linalg.norm(average)).tolist() - - # calc usage - usage = self._calc_response_usage(model=model, credentials=credentials, tokens=used_tokens) - - return TextEmbeddingResult(embeddings=embeddings, usage=usage, model=base_model_name) - - def get_num_tokens(self, model: str, credentials: dict, texts: list[str]) -> int: - if len(texts) == 0: - return 0 - - try: - enc = tiktoken.encoding_for_model(credentials["base_model_name"]) - except KeyError: - enc = tiktoken.get_encoding("cl100k_base") - - total_num_tokens = 0 - for text in texts: - # calculate the number of tokens in the encoded text - tokenized_text = enc.encode(text) - total_num_tokens += len(tokenized_text) - - return total_num_tokens - - def validate_credentials(self, model: str, credentials: dict) -> None: - if "openai_api_base" not in credentials: - raise CredentialsValidateFailedError("Azure OpenAI API Base Endpoint is required") - - if "openai_api_key" not in credentials: - raise CredentialsValidateFailedError("Azure OpenAI API key is required") - - if "base_model_name" not in credentials: - raise CredentialsValidateFailedError("Base Model Name is required") - - if not self._get_ai_model_entity(credentials["base_model_name"], model): - raise CredentialsValidateFailedError(f'Base Model Name {credentials["base_model_name"]} is invalid') - - try: - credentials_kwargs = self._to_credential_kwargs(credentials) - client = AzureOpenAI(**credentials_kwargs) - - self._embedding_invoke(model=model, client=client, texts=["ping"], extra_model_kwargs={}) - except Exception as ex: - raise CredentialsValidateFailedError(str(ex)) - - def get_customizable_model_schema(self, model: str, credentials: dict) -> Optional[AIModelEntity]: - ai_model_entity = self._get_ai_model_entity(credentials["base_model_name"], model) - return ai_model_entity.entity - - @staticmethod - def _embedding_invoke( - model: str, client: AzureOpenAI, texts: Union[list[str], str], extra_model_kwargs: dict - ) -> tuple[list[list[float]], int]: - response = client.embeddings.create( - input=texts, - model=model, - **extra_model_kwargs, - ) - - if "encoding_format" in extra_model_kwargs and extra_model_kwargs["encoding_format"] == "base64": - # decode base64 embedding - return ( - [list(np.frombuffer(base64.b64decode(data.embedding), dtype="float32")) for data in response.data], - response.usage.total_tokens, - ) - - return [data.embedding for data in response.data], response.usage.total_tokens - - def _calc_response_usage(self, model: str, credentials: dict, tokens: int) -> EmbeddingUsage: - input_price_info = self.get_price( - model=model, credentials=credentials, price_type=PriceType.INPUT, tokens=tokens - ) - - # transform usage - usage = EmbeddingUsage( - tokens=tokens, - total_tokens=tokens, - unit_price=input_price_info.unit_price, - price_unit=input_price_info.unit, - total_price=input_price_info.total_amount, - currency=input_price_info.currency, - latency=time.perf_counter() - self.started_at, - ) - - return usage - - @staticmethod - def _get_ai_model_entity(base_model_name: str, model: str) -> AzureBaseModel: - for ai_model_entity in EMBEDDING_BASE_MODELS: - if ai_model_entity.base_model_name == base_model_name: - ai_model_entity_copy = copy.deepcopy(ai_model_entity) - ai_model_entity_copy.entity.model = model - ai_model_entity_copy.entity.label.en_US = model - ai_model_entity_copy.entity.label.zh_Hans = model - return ai_model_entity_copy - - return None diff --git a/api/core/model_runtime/model_providers/baichuan/text_embedding/text_embedding.py b/api/core/model_runtime/model_providers/baichuan/text_embedding/text_embedding.py deleted file mode 100644 index 56b9be1c365340..00000000000000 --- a/api/core/model_runtime/model_providers/baichuan/text_embedding/text_embedding.py +++ /dev/null @@ -1,207 +0,0 @@ -import time -from json import dumps -from typing import Optional - -from requests import post - -from core.embedding.embedding_constant import EmbeddingInputType -from core.model_runtime.entities.model_entities import PriceType -from core.model_runtime.entities.text_embedding_entities import EmbeddingUsage, TextEmbeddingResult -from core.model_runtime.errors.invoke import ( - InvokeAuthorizationError, - InvokeBadRequestError, - InvokeConnectionError, - InvokeError, - InvokeRateLimitError, - InvokeServerUnavailableError, -) -from core.model_runtime.errors.validate import CredentialsValidateFailedError -from core.model_runtime.model_providers.__base.text_embedding_model import TextEmbeddingModel -from core.model_runtime.model_providers.baichuan.llm.baichuan_tokenizer import BaichuanTokenizer -from core.model_runtime.model_providers.baichuan.llm.baichuan_turbo_errors import ( - BadRequestError, - InsufficientAccountBalanceError, - InternalServerError, - InvalidAPIKeyError, - InvalidAuthenticationError, - RateLimitReachedError, -) - - -class BaichuanTextEmbeddingModel(TextEmbeddingModel): - """ - Model class for BaiChuan text embedding model. - """ - - api_base: str = "http://api.baichuan-ai.com/v1/embeddings" - - def _invoke( - self, - model: str, - credentials: dict, - texts: list[str], - user: Optional[str] = None, - input_type: EmbeddingInputType = EmbeddingInputType.DOCUMENT, - ) -> TextEmbeddingResult: - """ - Invoke text embedding model - - :param model: model name - :param credentials: model credentials - :param texts: texts to embed - :param user: unique user id - :param input_type: input type - :return: embeddings result - """ - api_key = credentials["api_key"] - if model != "baichuan-text-embedding": - raise ValueError("Invalid model name") - if not api_key: - raise CredentialsValidateFailedError("api_key is required") - - # split into chunks of batch size 16 - chunks = [] - for i in range(0, len(texts), 16): - chunks.append(texts[i : i + 16]) - - embeddings = [] - token_usage = 0 - - for chunk in chunks: - # embedding chunk - chunk_embeddings, chunk_usage = self.embedding(model=model, api_key=api_key, texts=chunk, user=user) - - embeddings.extend(chunk_embeddings) - token_usage += chunk_usage - - result = TextEmbeddingResult( - model=model, - embeddings=embeddings, - usage=self._calc_response_usage(model=model, credentials=credentials, tokens=token_usage), - ) - - return result - - def embedding( - self, model: str, api_key, texts: list[str], user: Optional[str] = None - ) -> tuple[list[list[float]], int]: - """ - Embed given texts - - :param model: model name - :param credentials: model credentials - :param texts: texts to embed - :param user: unique user id - :return: embeddings result - """ - url = self.api_base - headers = {"Authorization": "Bearer " + api_key, "Content-Type": "application/json"} - - data = {"model": "Baichuan-Text-Embedding", "input": texts} - - try: - response = post(url, headers=headers, data=dumps(data)) - except Exception as e: - raise InvokeConnectionError(str(e)) - - if response.status_code != 200: - try: - resp = response.json() - # try to parse error message - err = resp["error"]["code"] - msg = resp["error"]["message"] - except Exception as e: - raise InternalServerError(f"Failed to convert response to json: {e} with text: {response.text}") - - if err == "invalid_api_key": - raise InvalidAPIKeyError(msg) - elif err == "insufficient_quota": - raise InsufficientAccountBalanceError(msg) - elif err == "invalid_authentication": - raise InvalidAuthenticationError(msg) - elif err and "rate" in err: - raise RateLimitReachedError(msg) - elif err and "internal" in err: - raise InternalServerError(msg) - elif err == "api_key_empty": - raise InvalidAPIKeyError(msg) - else: - raise InternalServerError(f"Unknown error: {err} with message: {msg}") - - try: - resp = response.json() - embeddings = resp["data"] - usage = resp["usage"] - except Exception as e: - raise InternalServerError(f"Failed to convert response to json: {e} with text: {response.text}") - - return [data["embedding"] for data in embeddings], usage["total_tokens"] - - def get_num_tokens(self, model: str, credentials: dict, texts: list[str]) -> int: - """ - Get number of tokens for given prompt messages - - :param model: model name - :param credentials: model credentials - :param texts: texts to embed - :return: - """ - num_tokens = 0 - for text in texts: - # use BaichuanTokenizer to get num tokens - num_tokens += BaichuanTokenizer._get_num_tokens(text) - return num_tokens - - def validate_credentials(self, model: str, credentials: dict) -> None: - """ - Validate model credentials - - :param model: model name - :param credentials: model credentials - :return: - """ - try: - self._invoke(model=model, credentials=credentials, texts=["ping"]) - except InvalidAPIKeyError: - raise CredentialsValidateFailedError("Invalid api key") - - @property - def _invoke_error_mapping(self) -> dict[type[InvokeError], list[type[Exception]]]: - return { - InvokeConnectionError: [], - InvokeServerUnavailableError: [InternalServerError], - InvokeRateLimitError: [RateLimitReachedError], - InvokeAuthorizationError: [ - InvalidAuthenticationError, - InsufficientAccountBalanceError, - InvalidAPIKeyError, - ], - InvokeBadRequestError: [BadRequestError, KeyError], - } - - def _calc_response_usage(self, model: str, credentials: dict, tokens: int) -> EmbeddingUsage: - """ - Calculate response usage - - :param model: model name - :param credentials: model credentials - :param tokens: input tokens - :return: usage - """ - # get input price info - input_price_info = self.get_price( - model=model, credentials=credentials, price_type=PriceType.INPUT, tokens=tokens - ) - - # transform usage - usage = EmbeddingUsage( - tokens=tokens, - total_tokens=tokens, - unit_price=input_price_info.unit_price, - price_unit=input_price_info.unit, - total_price=input_price_info.total_amount, - currency=input_price_info.currency, - latency=time.perf_counter() - self.started_at, - ) - - return usage diff --git a/api/core/model_runtime/model_providers/cohere/text_embedding/text_embedding.py b/api/core/model_runtime/model_providers/cohere/text_embedding/text_embedding.py deleted file mode 100644 index 4da20806904ba0..00000000000000 --- a/api/core/model_runtime/model_providers/cohere/text_embedding/text_embedding.py +++ /dev/null @@ -1,223 +0,0 @@ -import time -from typing import Optional - -import cohere -import numpy as np -from cohere.core import RequestOptions - -from core.embedding.embedding_constant import EmbeddingInputType -from core.model_runtime.entities.model_entities import PriceType -from core.model_runtime.entities.text_embedding_entities import EmbeddingUsage, TextEmbeddingResult -from core.model_runtime.errors.invoke import ( - InvokeAuthorizationError, - InvokeBadRequestError, - InvokeConnectionError, - InvokeError, - InvokeRateLimitError, - InvokeServerUnavailableError, -) -from core.model_runtime.errors.validate import CredentialsValidateFailedError -from core.model_runtime.model_providers.__base.text_embedding_model import TextEmbeddingModel - - -class CohereTextEmbeddingModel(TextEmbeddingModel): - """ - Model class for Cohere text embedding model. - """ - - def _invoke( - self, - model: str, - credentials: dict, - texts: list[str], - user: Optional[str] = None, - input_type: EmbeddingInputType = EmbeddingInputType.DOCUMENT, - ) -> TextEmbeddingResult: - """ - Invoke text embedding model - - :param model: model name - :param credentials: model credentials - :param texts: texts to embed - :param user: unique user id - :param input_type: input type - :return: embeddings result - """ - # get model properties - context_size = self._get_context_size(model, credentials) - max_chunks = self._get_max_chunks(model, credentials) - - embeddings: list[list[float]] = [[] for _ in range(len(texts))] - tokens = [] - indices = [] - used_tokens = 0 - - for i, text in enumerate(texts): - tokenize_response = self._tokenize(model=model, credentials=credentials, text=text) - - for j in range(0, len(tokenize_response), context_size): - tokens += [tokenize_response[j : j + context_size]] - indices += [i] - - batched_embeddings = [] - _iter = range(0, len(tokens), max_chunks) - - for i in _iter: - # call embedding model - embeddings_batch, embedding_used_tokens = self._embedding_invoke( - model=model, credentials=credentials, texts=["".join(token) for token in tokens[i : i + max_chunks]] - ) - - used_tokens += embedding_used_tokens - batched_embeddings += embeddings_batch - - results: list[list[list[float]]] = [[] for _ in range(len(texts))] - num_tokens_in_batch: list[list[int]] = [[] for _ in range(len(texts))] - for i in range(len(indices)): - results[indices[i]].append(batched_embeddings[i]) - num_tokens_in_batch[indices[i]].append(len(tokens[i])) - - for i in range(len(texts)): - _result = results[i] - if len(_result) == 0: - embeddings_batch, embedding_used_tokens = self._embedding_invoke( - model=model, credentials=credentials, texts=[" "] - ) - - used_tokens += embedding_used_tokens - average = embeddings_batch[0] - else: - average = np.average(_result, axis=0, weights=num_tokens_in_batch[i]) - embeddings[i] = (average / np.linalg.norm(average)).tolist() - - # calc usage - usage = self._calc_response_usage(model=model, credentials=credentials, tokens=used_tokens) - - return TextEmbeddingResult(embeddings=embeddings, usage=usage, model=model) - - def get_num_tokens(self, model: str, credentials: dict, texts: list[str]) -> int: - """ - Get number of tokens for given prompt messages - - :param model: model name - :param credentials: model credentials - :param texts: texts to embed - :return: - """ - if len(texts) == 0: - return 0 - - full_text = " ".join(texts) - - try: - response = self._tokenize(model=model, credentials=credentials, text=full_text) - except Exception as e: - raise self._transform_invoke_error(e) - - return len(response) - - def _tokenize(self, model: str, credentials: dict, text: str) -> list[str]: - """ - Tokenize text - :param model: model name - :param credentials: model credentials - :param text: text to tokenize - :return: - """ - if not text: - return [] - - # initialize client - client = cohere.Client(credentials.get("api_key"), base_url=credentials.get("base_url")) - - response = client.tokenize(text=text, model=model, offline=False, request_options=RequestOptions(max_retries=0)) - - return response.token_strings - - def validate_credentials(self, model: str, credentials: dict) -> None: - """ - Validate model credentials - - :param model: model name - :param credentials: model credentials - :return: - """ - try: - # call embedding model - self._embedding_invoke(model=model, credentials=credentials, texts=["ping"]) - except Exception as ex: - raise CredentialsValidateFailedError(str(ex)) - - def _embedding_invoke(self, model: str, credentials: dict, texts: list[str]) -> tuple[list[list[float]], int]: - """ - Invoke embedding model - - :param model: model name - :param credentials: model credentials - :param texts: texts to embed - :return: embeddings and used tokens - """ - # initialize client - client = cohere.Client(credentials.get("api_key"), base_url=credentials.get("base_url")) - - # call embedding model - response = client.embed( - texts=texts, - model=model, - input_type="search_document" if len(texts) > 1 else "search_query", - request_options=RequestOptions(max_retries=1), - ) - - return response.embeddings, int(response.meta.billed_units.input_tokens) - - def _calc_response_usage(self, model: str, credentials: dict, tokens: int) -> EmbeddingUsage: - """ - Calculate response usage - - :param model: model name - :param credentials: model credentials - :param tokens: input tokens - :return: usage - """ - # get input price info - input_price_info = self.get_price( - model=model, credentials=credentials, price_type=PriceType.INPUT, tokens=tokens - ) - - # transform usage - usage = EmbeddingUsage( - tokens=tokens, - total_tokens=tokens, - unit_price=input_price_info.unit_price, - price_unit=input_price_info.unit, - total_price=input_price_info.total_amount, - currency=input_price_info.currency, - latency=time.perf_counter() - self.started_at, - ) - - return usage - - @property - def _invoke_error_mapping(self) -> dict[type[InvokeError], list[type[Exception]]]: - """ - Map model invoke error to unified error - The key is the error type thrown to the caller - The value is the error type thrown by the model, - which needs to be converted into a unified error type for the caller. - - :return: Invoke error mapping - """ - return { - InvokeConnectionError: [cohere.errors.service_unavailable_error.ServiceUnavailableError], - InvokeServerUnavailableError: [cohere.errors.internal_server_error.InternalServerError], - InvokeRateLimitError: [cohere.errors.too_many_requests_error.TooManyRequestsError], - InvokeAuthorizationError: [ - cohere.errors.unauthorized_error.UnauthorizedError, - cohere.errors.forbidden_error.ForbiddenError, - ], - InvokeBadRequestError: [ - cohere.core.api_error.ApiError, - cohere.errors.bad_request_error.BadRequestError, - cohere.errors.not_found_error.NotFoundError, - ], - } diff --git a/api/core/model_runtime/model_providers/fireworks/fireworks.yaml b/api/core/model_runtime/model_providers/fireworks/fireworks.yaml deleted file mode 100644 index cdb87a55e94660..00000000000000 --- a/api/core/model_runtime/model_providers/fireworks/fireworks.yaml +++ /dev/null @@ -1,30 +0,0 @@ -provider: fireworks -label: - zh_Hans: Fireworks AI - en_US: Fireworks AI -icon_small: - en_US: icon_s_en.svg -icon_large: - en_US: icon_l_en.svg -background: "#FCFDFF" -help: - title: - en_US: Get your API Key from Fireworks AI - zh_Hans: 从 Fireworks AI 获取 API Key - url: - en_US: https://fireworks.ai/account/api-keys -supported_model_types: - - llm - - text-embedding -configurate_methods: - - predefined-model -provider_credential_schema: - credential_form_schemas: - - variable: fireworks_api_key - label: - en_US: API Key - type: secret-input - required: true - placeholder: - zh_Hans: 在此输入您的 API Key - en_US: Enter your API Key diff --git a/api/core/model_runtime/model_providers/fireworks/llm/llama-v3p2-11b-vision-instruct.yaml b/api/core/model_runtime/model_providers/fireworks/llm/llama-v3p2-11b-vision-instruct.yaml deleted file mode 100644 index 31415a24fa8b7e..00000000000000 --- a/api/core/model_runtime/model_providers/fireworks/llm/llama-v3p2-11b-vision-instruct.yaml +++ /dev/null @@ -1,46 +0,0 @@ -model: accounts/fireworks/models/llama-v3p2-11b-vision-instruct -label: - zh_Hans: Llama 3.2 11B Vision Instruct - en_US: Llama 3.2 11B Vision Instruct -model_type: llm -features: - - agent-thought - - tool-call -model_properties: - mode: chat - context_size: 131072 -parameter_rules: - - name: temperature - use_template: temperature - - name: top_p - use_template: top_p - - name: top_k - label: - zh_Hans: 取样数量 - en_US: Top k - type: int - help: - zh_Hans: 仅从每个后续标记的前 K 个选项中采样。 - en_US: Only sample from the top K options for each subsequent token. - - name: max_tokens - use_template: max_tokens - - name: context_length_exceeded_behavior - default: None - label: - zh_Hans: 上下文长度超出行为 - en_US: Context Length Exceeded Behavior - help: - zh_Hans: 上下文长度超出行为 - en_US: Context Length Exceeded Behavior - type: string - options: - - None - - truncate - - error - - name: response_format - use_template: response_format -pricing: - input: '0.2' - output: '0.2' - unit: '0.000001' - currency: USD diff --git a/api/core/model_runtime/model_providers/fireworks/llm/llama-v3p2-1b-instruct.yaml b/api/core/model_runtime/model_providers/fireworks/llm/llama-v3p2-1b-instruct.yaml deleted file mode 100644 index c2fd77d2568d29..00000000000000 --- a/api/core/model_runtime/model_providers/fireworks/llm/llama-v3p2-1b-instruct.yaml +++ /dev/null @@ -1,46 +0,0 @@ -model: accounts/fireworks/models/llama-v3p2-1b-instruct -label: - zh_Hans: Llama 3.2 1B Instruct - en_US: Llama 3.2 1B Instruct -model_type: llm -features: - - agent-thought - - tool-call -model_properties: - mode: chat - context_size: 131072 -parameter_rules: - - name: temperature - use_template: temperature - - name: top_p - use_template: top_p - - name: top_k - label: - zh_Hans: 取样数量 - en_US: Top k - type: int - help: - zh_Hans: 仅从每个后续标记的前 K 个选项中采样。 - en_US: Only sample from the top K options for each subsequent token. - - name: max_tokens - use_template: max_tokens - - name: context_length_exceeded_behavior - default: None - label: - zh_Hans: 上下文长度超出行为 - en_US: Context Length Exceeded Behavior - help: - zh_Hans: 上下文长度超出行为 - en_US: Context Length Exceeded Behavior - type: string - options: - - None - - truncate - - error - - name: response_format - use_template: response_format -pricing: - input: '0.1' - output: '0.1' - unit: '0.000001' - currency: USD diff --git a/api/core/model_runtime/model_providers/fireworks/llm/llama-v3p2-3b-instruct.yaml b/api/core/model_runtime/model_providers/fireworks/llm/llama-v3p2-3b-instruct.yaml deleted file mode 100644 index 4b3c459c7bf2fc..00000000000000 --- a/api/core/model_runtime/model_providers/fireworks/llm/llama-v3p2-3b-instruct.yaml +++ /dev/null @@ -1,46 +0,0 @@ -model: accounts/fireworks/models/llama-v3p2-3b-instruct -label: - zh_Hans: Llama 3.2 3B Instruct - en_US: Llama 3.2 3B Instruct -model_type: llm -features: - - agent-thought - - tool-call -model_properties: - mode: chat - context_size: 131072 -parameter_rules: - - name: temperature - use_template: temperature - - name: top_p - use_template: top_p - - name: top_k - label: - zh_Hans: 取样数量 - en_US: Top k - type: int - help: - zh_Hans: 仅从每个后续标记的前 K 个选项中采样。 - en_US: Only sample from the top K options for each subsequent token. - - name: max_tokens - use_template: max_tokens - - name: context_length_exceeded_behavior - default: None - label: - zh_Hans: 上下文长度超出行为 - en_US: Context Length Exceeded Behavior - help: - zh_Hans: 上下文长度超出行为 - en_US: Context Length Exceeded Behavior - type: string - options: - - None - - truncate - - error - - name: response_format - use_template: response_format -pricing: - input: '0.1' - output: '0.1' - unit: '0.000001' - currency: USD diff --git a/api/core/model_runtime/model_providers/fireworks/llm/llama-v3p2-90b-vision-instruct.yaml b/api/core/model_runtime/model_providers/fireworks/llm/llama-v3p2-90b-vision-instruct.yaml deleted file mode 100644 index 0aece7455d6254..00000000000000 --- a/api/core/model_runtime/model_providers/fireworks/llm/llama-v3p2-90b-vision-instruct.yaml +++ /dev/null @@ -1,46 +0,0 @@ -model: accounts/fireworks/models/llama-v3p2-90b-vision-instruct -label: - zh_Hans: Llama 3.2 90B Vision Instruct - en_US: Llama 3.2 90B Vision Instruct -model_type: llm -features: - - agent-thought - - tool-call -model_properties: - mode: chat - context_size: 131072 -parameter_rules: - - name: temperature - use_template: temperature - - name: top_p - use_template: top_p - - name: top_k - label: - zh_Hans: 取样数量 - en_US: Top k - type: int - help: - zh_Hans: 仅从每个后续标记的前 K 个选项中采样。 - en_US: Only sample from the top K options for each subsequent token. - - name: max_tokens - use_template: max_tokens - - name: context_length_exceeded_behavior - default: None - label: - zh_Hans: 上下文长度超出行为 - en_US: Context Length Exceeded Behavior - help: - zh_Hans: 上下文长度超出行为 - en_US: Context Length Exceeded Behavior - type: string - options: - - None - - truncate - - error - - name: response_format - use_template: response_format -pricing: - input: '0.9' - output: '0.9' - unit: '0.000001' - currency: USD diff --git a/api/core/model_runtime/model_providers/fireworks/text_embedding/UAE-Large-V1.yaml b/api/core/model_runtime/model_providers/fireworks/text_embedding/UAE-Large-V1.yaml deleted file mode 100644 index d7c11691cf9bbc..00000000000000 --- a/api/core/model_runtime/model_providers/fireworks/text_embedding/UAE-Large-V1.yaml +++ /dev/null @@ -1,12 +0,0 @@ -model: WhereIsAI/UAE-Large-V1 -label: - zh_Hans: UAE-Large-V1 - en_US: UAE-Large-V1 -model_type: text-embedding -model_properties: - context_size: 512 - max_chunks: 1 -pricing: - input: '0.008' - unit: '0.000001' - currency: 'USD' diff --git a/api/core/model_runtime/model_providers/fireworks/text_embedding/__init__.py b/api/core/model_runtime/model_providers/fireworks/text_embedding/__init__.py deleted file mode 100644 index e69de29bb2d1d6..00000000000000 diff --git a/api/core/model_runtime/model_providers/fireworks/text_embedding/gte-base.yaml b/api/core/model_runtime/model_providers/fireworks/text_embedding/gte-base.yaml deleted file mode 100644 index d09bafb4d312f9..00000000000000 --- a/api/core/model_runtime/model_providers/fireworks/text_embedding/gte-base.yaml +++ /dev/null @@ -1,12 +0,0 @@ -model: thenlper/gte-base -label: - zh_Hans: GTE-base - en_US: GTE-base -model_type: text-embedding -model_properties: - context_size: 512 - max_chunks: 1 -pricing: - input: '0.008' - unit: '0.000001' - currency: 'USD' diff --git a/api/core/model_runtime/model_providers/fireworks/text_embedding/gte-large.yaml b/api/core/model_runtime/model_providers/fireworks/text_embedding/gte-large.yaml deleted file mode 100644 index c41fa2f9d32361..00000000000000 --- a/api/core/model_runtime/model_providers/fireworks/text_embedding/gte-large.yaml +++ /dev/null @@ -1,12 +0,0 @@ -model: thenlper/gte-large -label: - zh_Hans: GTE-large - en_US: GTE-large -model_type: text-embedding -model_properties: - context_size: 512 - max_chunks: 1 -pricing: - input: '0.008' - unit: '0.000001' - currency: 'USD' diff --git a/api/core/model_runtime/model_providers/fireworks/text_embedding/nomic-embed-text-v1.5.yaml b/api/core/model_runtime/model_providers/fireworks/text_embedding/nomic-embed-text-v1.5.yaml deleted file mode 100644 index c9098503d96529..00000000000000 --- a/api/core/model_runtime/model_providers/fireworks/text_embedding/nomic-embed-text-v1.5.yaml +++ /dev/null @@ -1,12 +0,0 @@ -model: nomic-ai/nomic-embed-text-v1.5 -label: - zh_Hans: nomic-embed-text-v1.5 - en_US: nomic-embed-text-v1.5 -model_type: text-embedding -model_properties: - context_size: 8192 - max_chunks: 16 -pricing: - input: '0.008' - unit: '0.000001' - currency: 'USD' diff --git a/api/core/model_runtime/model_providers/fireworks/text_embedding/nomic-embed-text-v1.yaml b/api/core/model_runtime/model_providers/fireworks/text_embedding/nomic-embed-text-v1.yaml deleted file mode 100644 index 89078d3ff69f93..00000000000000 --- a/api/core/model_runtime/model_providers/fireworks/text_embedding/nomic-embed-text-v1.yaml +++ /dev/null @@ -1,12 +0,0 @@ -model: nomic-ai/nomic-embed-text-v1 -label: - zh_Hans: nomic-embed-text-v1 - en_US: nomic-embed-text-v1 -model_type: text-embedding -model_properties: - context_size: 8192 - max_chunks: 16 -pricing: - input: '0.008' - unit: '0.000001' - currency: 'USD' diff --git a/api/core/model_runtime/model_providers/fireworks/text_embedding/text_embedding.py b/api/core/model_runtime/model_providers/fireworks/text_embedding/text_embedding.py deleted file mode 100644 index cdce69ff380338..00000000000000 --- a/api/core/model_runtime/model_providers/fireworks/text_embedding/text_embedding.py +++ /dev/null @@ -1,151 +0,0 @@ -import time -from collections.abc import Mapping -from typing import Optional, Union - -import numpy as np -from openai import OpenAI - -from core.embedding.embedding_constant import EmbeddingInputType -from core.model_runtime.entities.model_entities import PriceType -from core.model_runtime.entities.text_embedding_entities import EmbeddingUsage, TextEmbeddingResult -from core.model_runtime.errors.validate import CredentialsValidateFailedError -from core.model_runtime.model_providers.__base.text_embedding_model import TextEmbeddingModel -from core.model_runtime.model_providers.fireworks._common import _CommonFireworks - - -class FireworksTextEmbeddingModel(_CommonFireworks, TextEmbeddingModel): - """ - Model class for Fireworks text embedding model. - """ - - def _invoke( - self, - model: str, - credentials: dict, - texts: list[str], - user: Optional[str] = None, - input_type: EmbeddingInputType = EmbeddingInputType.DOCUMENT, - ) -> TextEmbeddingResult: - """ - Invoke text embedding model - - :param model: model name - :param credentials: model credentials - :param texts: texts to embed - :param user: unique user id - :param input_type: input type - :return: embeddings result - """ - - credentials_kwargs = self._to_credential_kwargs(credentials) - client = OpenAI(**credentials_kwargs) - - extra_model_kwargs = {} - if user: - extra_model_kwargs["user"] = user - - extra_model_kwargs["encoding_format"] = "float" - - context_size = self._get_context_size(model, credentials) - max_chunks = self._get_max_chunks(model, credentials) - - inputs = [] - indices = [] - used_tokens = 0 - - for i, text in enumerate(texts): - # Here token count is only an approximation based on the GPT2 tokenizer - # TODO: Optimize for better token estimation and chunking - num_tokens = self._get_num_tokens_by_gpt2(text) - - if num_tokens >= context_size: - cutoff = int(np.floor(len(text) * (context_size / num_tokens))) - # if num tokens is larger than context length, only use the start - inputs.append(text[0:cutoff]) - else: - inputs.append(text) - indices += [i] - - batched_embeddings = [] - _iter = range(0, len(inputs), max_chunks) - - for i in _iter: - embeddings_batch, embedding_used_tokens = self._embedding_invoke( - model=model, - client=client, - texts=inputs[i : i + max_chunks], - extra_model_kwargs=extra_model_kwargs, - ) - used_tokens += embedding_used_tokens - batched_embeddings += embeddings_batch - - usage = self._calc_response_usage(model=model, credentials=credentials, tokens=used_tokens) - return TextEmbeddingResult(embeddings=batched_embeddings, usage=usage, model=model) - - def get_num_tokens(self, model: str, credentials: dict, texts: list[str]) -> int: - """ - Get number of tokens for given prompt messages - - :param model: model name - :param credentials: model credentials - :param texts: texts to embed - :return: - """ - return sum(self._get_num_tokens_by_gpt2(text) for text in texts) - - def validate_credentials(self, model: str, credentials: Mapping) -> None: - """ - Validate model credentials - - :param model: model name - :param credentials: model credentials - :return: - """ - try: - # transform credentials to kwargs for model instance - credentials_kwargs = self._to_credential_kwargs(credentials) - client = OpenAI(**credentials_kwargs) - - # call embedding model - self._embedding_invoke(model=model, client=client, texts=["ping"], extra_model_kwargs={}) - except Exception as ex: - raise CredentialsValidateFailedError(str(ex)) - - def _embedding_invoke( - self, model: str, client: OpenAI, texts: Union[list[str], str], extra_model_kwargs: dict - ) -> tuple[list[list[float]], int]: - """ - Invoke embedding model - :param model: model name - :param client: model client - :param texts: texts to embed - :param extra_model_kwargs: extra model kwargs - :return: embeddings and used tokens - """ - response = client.embeddings.create(model=model, input=texts, **extra_model_kwargs) - return [data.embedding for data in response.data], response.usage.total_tokens - - def _calc_response_usage(self, model: str, credentials: dict, tokens: int) -> EmbeddingUsage: - """ - Calculate response usage - - :param model: model name - :param credentials: model credentials - :param tokens: input tokens - :return: usage - """ - input_price_info = self.get_price( - model=model, credentials=credentials, tokens=tokens, price_type=PriceType.INPUT - ) - - usage = EmbeddingUsage( - tokens=tokens, - total_tokens=tokens, - unit_price=input_price_info.unit_price, - price_unit=input_price_info.unit, - total_price=input_price_info.total_amount, - currency=input_price_info.currency, - latency=time.perf_counter() - self.started_at, - ) - - return usage diff --git a/api/core/model_runtime/model_providers/fishaudio/fishaudio.yaml b/api/core/model_runtime/model_providers/fishaudio/fishaudio.yaml deleted file mode 100644 index 479eb7fb85bd76..00000000000000 --- a/api/core/model_runtime/model_providers/fishaudio/fishaudio.yaml +++ /dev/null @@ -1,76 +0,0 @@ -provider: fishaudio -label: - en_US: Fish Audio -description: - en_US: Models provided by Fish Audio, currently only support TTS. - zh_Hans: Fish Audio 提供的模型,目前仅支持 TTS。 -icon_small: - en_US: fishaudio_s_en.svg -icon_large: - en_US: fishaudio_l_en.svg -background: "#E5E7EB" -help: - title: - en_US: Get your API key from Fish Audio - zh_Hans: 从 Fish Audio 获取你的 API Key - url: - en_US: https://fish.audio/go-api/ -supported_model_types: - - tts -configurate_methods: - - predefined-model -provider_credential_schema: - credential_form_schemas: - - variable: api_key - label: - en_US: API Key - type: secret-input - required: true - placeholder: - zh_Hans: 在此输入您的 API Key - en_US: Enter your API Key - - variable: api_base - label: - en_US: API URL - type: text-input - required: false - default: https://api.fish.audio - placeholder: - en_US: Enter your API URL - zh_Hans: 在此输入您的 API URL - - variable: use_public_models - label: - en_US: Use Public Models - type: select - required: false - default: "false" - placeholder: - en_US: Toggle to use public models - zh_Hans: 切换以使用公共模型 - options: - - value: "true" - label: - en_US: Allow Public Models - zh_Hans: 使用公共模型 - - value: "false" - label: - en_US: Private Models Only - zh_Hans: 仅使用私有模型 - - variable: latency - label: - en_US: Latency - type: select - required: false - default: "normal" - placeholder: - en_US: Toggle to choice latency - zh_Hans: 切换以调整延迟 - options: - - value: "balanced" - label: - en_US: Low (may affect quality) - zh_Hans: 低延迟 (可能降低质量) - - value: "normal" - label: - en_US: Normal - zh_Hans: 标准 diff --git a/api/core/model_runtime/model_providers/google/llm/gemini-1.5-flash-001.yaml b/api/core/model_runtime/model_providers/google/llm/gemini-1.5-flash-001.yaml deleted file mode 100644 index d84e9937e0b661..00000000000000 --- a/api/core/model_runtime/model_providers/google/llm/gemini-1.5-flash-001.yaml +++ /dev/null @@ -1,48 +0,0 @@ -model: gemini-1.5-flash-001 -label: - en_US: Gemini 1.5 Flash 001 -model_type: llm -features: - - agent-thought - - vision - - tool-call - - stream-tool-call -model_properties: - mode: chat - context_size: 1048576 -parameter_rules: - - name: temperature - use_template: temperature - - name: top_p - use_template: top_p - - name: top_k - label: - zh_Hans: 取样数量 - en_US: Top k - type: int - help: - zh_Hans: 仅从每个后续标记的前 K 个选项中采样。 - en_US: Only sample from the top K options for each subsequent token. - required: false - - name: max_tokens_to_sample - use_template: max_tokens - required: true - default: 8192 - min: 1 - max: 8192 - - name: response_format - use_template: response_format - - name: stream - label: - zh_Hans: 流式输出 - en_US: Stream - type: boolean - help: - zh_Hans: 流式输出允许模型在生成文本的过程中逐步返回结果,而不是一次性生成全部结果后再返回。 - en_US: Streaming output allows the model to return results incrementally as it generates text, rather than generating all the results at once. - default: false -pricing: - input: '0.00' - output: '0.00' - unit: '0.000001' - currency: USD diff --git a/api/core/model_runtime/model_providers/google/llm/gemini-1.5-flash-002.yaml b/api/core/model_runtime/model_providers/google/llm/gemini-1.5-flash-002.yaml deleted file mode 100644 index 2ff70564b2a951..00000000000000 --- a/api/core/model_runtime/model_providers/google/llm/gemini-1.5-flash-002.yaml +++ /dev/null @@ -1,48 +0,0 @@ -model: gemini-1.5-flash-002 -label: - en_US: Gemini 1.5 Flash 002 -model_type: llm -features: - - agent-thought - - vision - - tool-call - - stream-tool-call -model_properties: - mode: chat - context_size: 1048576 -parameter_rules: - - name: temperature - use_template: temperature - - name: top_p - use_template: top_p - - name: top_k - label: - zh_Hans: 取样数量 - en_US: Top k - type: int - help: - zh_Hans: 仅从每个后续标记的前 K 个选项中采样。 - en_US: Only sample from the top K options for each subsequent token. - required: false - - name: max_tokens_to_sample - use_template: max_tokens - required: true - default: 8192 - min: 1 - max: 8192 - - name: response_format - use_template: response_format - - name: stream - label: - zh_Hans: 流式输出 - en_US: Stream - type: boolean - help: - zh_Hans: 流式输出允许模型在生成文本的过程中逐步返回结果,而不是一次性生成全部结果后再返回。 - en_US: Streaming output allows the model to return results incrementally as it generates text, rather than generating all the results at once. - default: false -pricing: - input: '0.00' - output: '0.00' - unit: '0.000001' - currency: USD diff --git a/api/core/model_runtime/model_providers/google/llm/gemini-1.5-flash-8b-exp-0924.yaml b/api/core/model_runtime/model_providers/google/llm/gemini-1.5-flash-8b-exp-0924.yaml deleted file mode 100644 index 2aea8149f4c794..00000000000000 --- a/api/core/model_runtime/model_providers/google/llm/gemini-1.5-flash-8b-exp-0924.yaml +++ /dev/null @@ -1,48 +0,0 @@ -model: gemini-1.5-flash-8b-exp-0924 -label: - en_US: Gemini 1.5 Flash 8B 0924 -model_type: llm -features: - - agent-thought - - vision - - tool-call - - stream-tool-call -model_properties: - mode: chat - context_size: 1048576 -parameter_rules: - - name: temperature - use_template: temperature - - name: top_p - use_template: top_p - - name: top_k - label: - zh_Hans: 取样数量 - en_US: Top k - type: int - help: - zh_Hans: 仅从每个后续标记的前 K 个选项中采样。 - en_US: Only sample from the top K options for each subsequent token. - required: false - - name: max_tokens_to_sample - use_template: max_tokens - required: true - default: 8192 - min: 1 - max: 8192 - - name: response_format - use_template: response_format - - name: stream - label: - zh_Hans: 流式输出 - en_US: Stream - type: boolean - help: - zh_Hans: 流式输出允许模型在生成文本的过程中逐步返回结果,而不是一次性生成全部结果后再返回。 - en_US: Streaming output allows the model to return results incrementally as it generates text, rather than generating all the results at once. - default: false -pricing: - input: '0.00' - output: '0.00' - unit: '0.000001' - currency: USD diff --git a/api/core/model_runtime/model_providers/google/llm/gemini-1.5-flash.yaml b/api/core/model_runtime/model_providers/google/llm/gemini-1.5-flash.yaml deleted file mode 100644 index dfd55c3a949c97..00000000000000 --- a/api/core/model_runtime/model_providers/google/llm/gemini-1.5-flash.yaml +++ /dev/null @@ -1,48 +0,0 @@ -model: gemini-1.5-flash -label: - en_US: Gemini 1.5 Flash -model_type: llm -features: - - agent-thought - - vision - - tool-call - - stream-tool-call -model_properties: - mode: chat - context_size: 1048576 -parameter_rules: - - name: temperature - use_template: temperature - - name: top_p - use_template: top_p - - name: top_k - label: - zh_Hans: 取样数量 - en_US: Top k - type: int - help: - zh_Hans: 仅从每个后续标记的前 K 个选项中采样。 - en_US: Only sample from the top K options for each subsequent token. - required: false - - name: max_tokens_to_sample - use_template: max_tokens - required: true - default: 8192 - min: 1 - max: 8192 - - name: response_format - use_template: response_format - - name: stream - label: - zh_Hans: 流式输出 - en_US: Stream - type: boolean - help: - zh_Hans: 流式输出允许模型在生成文本的过程中逐步返回结果,而不是一次性生成全部结果后再返回。 - en_US: Streaming output allows the model to return results incrementally as it generates text, rather than generating all the results at once. - default: false -pricing: - input: '0.00' - output: '0.00' - unit: '0.000001' - currency: USD diff --git a/api/core/model_runtime/model_providers/google/llm/gemini-1.5-pro-001.yaml b/api/core/model_runtime/model_providers/google/llm/gemini-1.5-pro-001.yaml deleted file mode 100644 index a1feff171d48c2..00000000000000 --- a/api/core/model_runtime/model_providers/google/llm/gemini-1.5-pro-001.yaml +++ /dev/null @@ -1,48 +0,0 @@ -model: gemini-1.5-pro-001 -label: - en_US: Gemini 1.5 Pro 001 -model_type: llm -features: - - agent-thought - - vision - - tool-call - - stream-tool-call -model_properties: - mode: chat - context_size: 2097152 -parameter_rules: - - name: temperature - use_template: temperature - - name: top_p - use_template: top_p - - name: top_k - label: - zh_Hans: 取样数量 - en_US: Top k - type: int - help: - zh_Hans: 仅从每个后续标记的前 K 个选项中采样。 - en_US: Only sample from the top K options for each subsequent token. - required: false - - name: max_tokens_to_sample - use_template: max_tokens - required: true - default: 8192 - min: 1 - max: 8192 - - name: response_format - use_template: response_format - - name: stream - label: - zh_Hans: 流式输出 - en_US: Stream - type: boolean - help: - zh_Hans: 流式输出允许模型在生成文本的过程中逐步返回结果,而不是一次性生成全部结果后再返回。 - en_US: Streaming output allows the model to return results incrementally as it generates text, rather than generating all the results at once. - default: false -pricing: - input: '0.00' - output: '0.00' - unit: '0.000001' - currency: USD diff --git a/api/core/model_runtime/model_providers/google/llm/gemini-1.5-pro-002.yaml b/api/core/model_runtime/model_providers/google/llm/gemini-1.5-pro-002.yaml deleted file mode 100644 index 9ae07a06c5118b..00000000000000 --- a/api/core/model_runtime/model_providers/google/llm/gemini-1.5-pro-002.yaml +++ /dev/null @@ -1,48 +0,0 @@ -model: gemini-1.5-pro-002 -label: - en_US: Gemini 1.5 Pro 002 -model_type: llm -features: - - agent-thought - - vision - - tool-call - - stream-tool-call -model_properties: - mode: chat - context_size: 2097152 -parameter_rules: - - name: temperature - use_template: temperature - - name: top_p - use_template: top_p - - name: top_k - label: - zh_Hans: 取样数量 - en_US: Top k - type: int - help: - zh_Hans: 仅从每个后续标记的前 K 个选项中采样。 - en_US: Only sample from the top K options for each subsequent token. - required: false - - name: max_tokens_to_sample - use_template: max_tokens - required: true - default: 8192 - min: 1 - max: 8192 - - name: response_format - use_template: response_format - - name: stream - label: - zh_Hans: 流式输出 - en_US: Stream - type: boolean - help: - zh_Hans: 流式输出允许模型在生成文本的过程中逐步返回结果,而不是一次性生成全部结果后再返回。 - en_US: Streaming output allows the model to return results incrementally as it generates text, rather than generating all the results at once. - default: false -pricing: - input: '0.00' - output: '0.00' - unit: '0.000001' - currency: USD diff --git a/api/core/model_runtime/model_providers/google/llm/gemini-1.5-pro.yaml b/api/core/model_runtime/model_providers/google/llm/gemini-1.5-pro.yaml deleted file mode 100644 index bdd70b34a21dc5..00000000000000 --- a/api/core/model_runtime/model_providers/google/llm/gemini-1.5-pro.yaml +++ /dev/null @@ -1,48 +0,0 @@ -model: gemini-1.5-pro -label: - en_US: Gemini 1.5 Pro -model_type: llm -features: - - agent-thought - - vision - - tool-call - - stream-tool-call -model_properties: - mode: chat - context_size: 2097152 -parameter_rules: - - name: temperature - use_template: temperature - - name: top_p - use_template: top_p - - name: top_k - label: - zh_Hans: 取样数量 - en_US: Top k - type: int - help: - zh_Hans: 仅从每个后续标记的前 K 个选项中采样。 - en_US: Only sample from the top K options for each subsequent token. - required: false - - name: max_tokens_to_sample - use_template: max_tokens - required: true - default: 8192 - min: 1 - max: 8192 - - name: response_format - use_template: response_format - - name: stream - label: - zh_Hans: 流式输出 - en_US: Stream - type: boolean - help: - zh_Hans: 流式输出允许模型在生成文本的过程中逐步返回结果,而不是一次性生成全部结果后再返回。 - en_US: Streaming output allows the model to return results incrementally as it generates text, rather than generating all the results at once. - default: false -pricing: - input: '0.00' - output: '0.00' - unit: '0.000001' - currency: USD diff --git a/api/core/model_runtime/model_providers/groq/llm/llama-3.2-11b-text-preview.yaml b/api/core/model_runtime/model_providers/groq/llm/llama-3.2-11b-text-preview.yaml deleted file mode 100644 index 019d45372361d3..00000000000000 --- a/api/core/model_runtime/model_providers/groq/llm/llama-3.2-11b-text-preview.yaml +++ /dev/null @@ -1,25 +0,0 @@ -model: llama-3.2-11b-text-preview -label: - zh_Hans: Llama 3.2 11B Text (Preview) - en_US: Llama 3.2 11B Text (Preview) -model_type: llm -features: - - agent-thought -model_properties: - mode: chat - context_size: 131072 -parameter_rules: - - name: temperature - use_template: temperature - - name: top_p - use_template: top_p - - name: max_tokens - use_template: max_tokens - default: 512 - min: 1 - max: 8192 -pricing: - input: '0.05' - output: '0.1' - unit: '0.000001' - currency: USD diff --git a/api/core/model_runtime/model_providers/groq/llm/llama-3.2-1b-preview.yaml b/api/core/model_runtime/model_providers/groq/llm/llama-3.2-1b-preview.yaml deleted file mode 100644 index a44e4ff508eb82..00000000000000 --- a/api/core/model_runtime/model_providers/groq/llm/llama-3.2-1b-preview.yaml +++ /dev/null @@ -1,25 +0,0 @@ -model: llama-3.2-1b-preview -label: - zh_Hans: Llama 3.2 1B Text (Preview) - en_US: Llama 3.2 1B Text (Preview) -model_type: llm -features: - - agent-thought -model_properties: - mode: chat - context_size: 131072 -parameter_rules: - - name: temperature - use_template: temperature - - name: top_p - use_template: top_p - - name: max_tokens - use_template: max_tokens - default: 512 - min: 1 - max: 8192 -pricing: - input: '0.05' - output: '0.1' - unit: '0.000001' - currency: USD diff --git a/api/core/model_runtime/model_providers/groq/llm/llama-3.2-3b-preview.yaml b/api/core/model_runtime/model_providers/groq/llm/llama-3.2-3b-preview.yaml deleted file mode 100644 index f2fdd0a05e027a..00000000000000 --- a/api/core/model_runtime/model_providers/groq/llm/llama-3.2-3b-preview.yaml +++ /dev/null @@ -1,25 +0,0 @@ -model: llama-3.2-3b-preview -label: - zh_Hans: Llama 3.2 3B Text (Preview) - en_US: Llama 3.2 3B Text (Preview) -model_type: llm -features: - - agent-thought -model_properties: - mode: chat - context_size: 131072 -parameter_rules: - - name: temperature - use_template: temperature - - name: top_p - use_template: top_p - - name: max_tokens - use_template: max_tokens - default: 512 - min: 1 - max: 8192 -pricing: - input: '0.05' - output: '0.1' - unit: '0.000001' - currency: USD diff --git a/api/core/model_runtime/model_providers/groq/llm/llama-3.2-90b-text-preview.yaml b/api/core/model_runtime/model_providers/groq/llm/llama-3.2-90b-text-preview.yaml deleted file mode 100644 index 3b34e7c07996bd..00000000000000 --- a/api/core/model_runtime/model_providers/groq/llm/llama-3.2-90b-text-preview.yaml +++ /dev/null @@ -1,25 +0,0 @@ -model: llama-3.2-90b-text-preview -label: - zh_Hans: Llama 3.2 90B Text (Preview) - en_US: Llama 3.2 90B Text (Preview) -model_type: llm -features: - - agent-thought -model_properties: - mode: chat - context_size: 131072 -parameter_rules: - - name: temperature - use_template: temperature - - name: top_p - use_template: top_p - - name: max_tokens - use_template: max_tokens - default: 512 - min: 1 - max: 8192 -pricing: - input: '0.05' - output: '0.1' - unit: '0.000001' - currency: USD diff --git a/api/core/model_runtime/model_providers/huggingface_hub/text_embedding/text_embedding.py b/api/core/model_runtime/model_providers/huggingface_hub/text_embedding/text_embedding.py deleted file mode 100644 index b2e6d1b6520c72..00000000000000 --- a/api/core/model_runtime/model_providers/huggingface_hub/text_embedding/text_embedding.py +++ /dev/null @@ -1,189 +0,0 @@ -import json -import time -from typing import Optional - -import numpy as np -import requests -from huggingface_hub import HfApi, InferenceClient - -from core.embedding.embedding_constant import EmbeddingInputType -from core.model_runtime.entities.common_entities import I18nObject -from core.model_runtime.entities.model_entities import AIModelEntity, FetchFrom, ModelType, PriceType -from core.model_runtime.entities.text_embedding_entities import EmbeddingUsage, TextEmbeddingResult -from core.model_runtime.errors.validate import CredentialsValidateFailedError -from core.model_runtime.model_providers.__base.text_embedding_model import TextEmbeddingModel -from core.model_runtime.model_providers.huggingface_hub._common import _CommonHuggingfaceHub - -HUGGINGFACE_ENDPOINT_API = "https://api.endpoints.huggingface.cloud/v2/endpoint/" - - -class HuggingfaceHubTextEmbeddingModel(_CommonHuggingfaceHub, TextEmbeddingModel): - def _invoke( - self, - model: str, - credentials: dict, - texts: list[str], - user: Optional[str] = None, - input_type: EmbeddingInputType = EmbeddingInputType.DOCUMENT, - ) -> TextEmbeddingResult: - """ - Invoke text embedding model - - :param model: model name - :param credentials: model credentials - :param texts: texts to embed - :param user: unique user id - :param input_type: input type - :return: embeddings result - """ - client = InferenceClient(token=credentials["huggingfacehub_api_token"]) - - execute_model = model - - if credentials["huggingfacehub_api_type"] == "inference_endpoints": - execute_model = credentials["huggingfacehub_endpoint_url"] - - output = client.post( - json={"inputs": texts, "options": {"wait_for_model": False, "use_cache": False}}, model=execute_model - ) - - embeddings = json.loads(output.decode()) - - tokens = self.get_num_tokens(model, credentials, texts) - usage = self._calc_response_usage(model, credentials, tokens) - - return TextEmbeddingResult(embeddings=self._mean_pooling(embeddings), usage=usage, model=model) - - def get_num_tokens(self, model: str, credentials: dict, texts: list[str]) -> int: - num_tokens = 0 - for text in texts: - num_tokens += self._get_num_tokens_by_gpt2(text) - return num_tokens - - def validate_credentials(self, model: str, credentials: dict) -> None: - try: - if "huggingfacehub_api_type" not in credentials: - raise CredentialsValidateFailedError("Huggingface Hub Endpoint Type must be provided.") - - if "huggingfacehub_api_token" not in credentials: - raise CredentialsValidateFailedError("Huggingface Hub API Token must be provided.") - - if credentials["huggingfacehub_api_type"] == "inference_endpoints": - if "huggingface_namespace" not in credentials: - raise CredentialsValidateFailedError( - "Huggingface Hub User Name / Organization Name must be provided." - ) - - if "huggingfacehub_endpoint_url" not in credentials: - raise CredentialsValidateFailedError("Huggingface Hub Endpoint URL must be provided.") - - if "task_type" not in credentials: - raise CredentialsValidateFailedError("Huggingface Hub Task Type must be provided.") - - if credentials["task_type"] != "feature-extraction": - raise CredentialsValidateFailedError("Huggingface Hub Task Type is invalid.") - - self._check_endpoint_url_model_repository_name(credentials, model) - - model = credentials["huggingfacehub_endpoint_url"] - - elif credentials["huggingfacehub_api_type"] == "hosted_inference_api": - self._check_hosted_model_task_type(credentials["huggingfacehub_api_token"], model) - else: - raise CredentialsValidateFailedError("Huggingface Hub Endpoint Type is invalid.") - - client = InferenceClient(token=credentials["huggingfacehub_api_token"]) - client.feature_extraction(text="hello world", model=model) - except Exception as ex: - raise CredentialsValidateFailedError(str(ex)) - - def get_customizable_model_schema(self, model: str, credentials: dict) -> Optional[AIModelEntity]: - entity = AIModelEntity( - model=model, - label=I18nObject(en_US=model), - fetch_from=FetchFrom.CUSTOMIZABLE_MODEL, - model_type=ModelType.TEXT_EMBEDDING, - model_properties={"context_size": 10000, "max_chunks": 1}, - ) - return entity - - # https://huggingface.co/docs/api-inference/detailed_parameters#feature-extraction-task - # Returned values are a list of floats, or a list[list[floats]] - # (depending on if you sent a string or a list of string, - # and if the automatic reduction, usually mean_pooling for instance was applied for you or not. - # This should be explained on the model's README.) - @staticmethod - def _mean_pooling(embeddings: list) -> list[float]: - # If automatic reduction by giving model, no need to mean_pooling. - # For example one: List[List[float]] - if not isinstance(embeddings[0][0], list): - return embeddings - - # For example two: List[List[List[float]]], need to mean_pooling. - sentence_embeddings = [np.mean(embedding[0], axis=0).tolist() for embedding in embeddings] - return sentence_embeddings - - @staticmethod - def _check_hosted_model_task_type(huggingfacehub_api_token: str, model_name: str) -> None: - hf_api = HfApi(token=huggingfacehub_api_token) - model_info = hf_api.model_info(repo_id=model_name) - - try: - if not model_info: - raise ValueError(f"Model {model_name} not found.") - - if "inference" in model_info.cardData and not model_info.cardData["inference"]: - raise ValueError(f"Inference API has been turned off for this model {model_name}.") - - valid_tasks = "feature-extraction" - if model_info.pipeline_tag not in valid_tasks: - raise ValueError(f"Model {model_name} is not a valid task, must be one of {valid_tasks}.") - except Exception as e: - raise CredentialsValidateFailedError(f"{str(e)}") - - def _calc_response_usage(self, model: str, credentials: dict, tokens: int) -> EmbeddingUsage: - input_price_info = self.get_price( - model=model, credentials=credentials, price_type=PriceType.INPUT, tokens=tokens - ) - - # transform usage - usage = EmbeddingUsage( - tokens=tokens, - total_tokens=tokens, - unit_price=input_price_info.unit_price, - price_unit=input_price_info.unit, - total_price=input_price_info.total_amount, - currency=input_price_info.currency, - latency=time.perf_counter() - self.started_at, - ) - - return usage - - @staticmethod - def _check_endpoint_url_model_repository_name(credentials: dict, model_name: str): - try: - url = f'{HUGGINGFACE_ENDPOINT_API}{credentials["huggingface_namespace"]}' - headers = { - "Authorization": f'Bearer {credentials["huggingfacehub_api_token"]}', - "Content-Type": "application/json", - } - - response = requests.get(url=url, headers=headers) - - if response.status_code != 200: - raise ValueError("User Name or Organization Name is invalid.") - - model_repository_name = "" - - for item in response.json().get("items", []): - if item.get("status", {}).get("url") == credentials["huggingfacehub_endpoint_url"]: - model_repository_name = item.get("model", {}).get("repository") - break - - if model_repository_name != model_name: - raise ValueError( - f"Model Name {model_name} is invalid. Please check it on the inference endpoints console." - ) - - except Exception as e: - raise ValueError(str(e)) diff --git a/api/core/model_runtime/model_providers/huggingface_tei/text_embedding/text_embedding.py b/api/core/model_runtime/model_providers/huggingface_tei/text_embedding/text_embedding.py deleted file mode 100644 index b8ff3ca549a63b..00000000000000 --- a/api/core/model_runtime/model_providers/huggingface_tei/text_embedding/text_embedding.py +++ /dev/null @@ -1,209 +0,0 @@ -import time -from typing import Optional - -from core.embedding.embedding_constant import EmbeddingInputType -from core.model_runtime.entities.common_entities import I18nObject -from core.model_runtime.entities.model_entities import AIModelEntity, FetchFrom, ModelPropertyKey, ModelType, PriceType -from core.model_runtime.entities.text_embedding_entities import EmbeddingUsage, TextEmbeddingResult -from core.model_runtime.errors.invoke import ( - InvokeAuthorizationError, - InvokeBadRequestError, - InvokeConnectionError, - InvokeError, - InvokeRateLimitError, - InvokeServerUnavailableError, -) -from core.model_runtime.errors.validate import CredentialsValidateFailedError -from core.model_runtime.model_providers.__base.text_embedding_model import TextEmbeddingModel -from core.model_runtime.model_providers.huggingface_tei.tei_helper import TeiHelper - - -class HuggingfaceTeiTextEmbeddingModel(TextEmbeddingModel): - """ - Model class for Text Embedding Inference text embedding model. - """ - - def _invoke( - self, - model: str, - credentials: dict, - texts: list[str], - user: Optional[str] = None, - input_type: EmbeddingInputType = EmbeddingInputType.DOCUMENT, - ) -> TextEmbeddingResult: - """ - Invoke text embedding model - - credentials should be like: - { - 'server_url': 'server url', - 'model_uid': 'model uid', - } - - :param model: model name - :param credentials: model credentials - :param texts: texts to embed - :param user: unique user id - :param input_type: input type - :return: embeddings result - """ - server_url = credentials["server_url"] - - server_url = server_url.removesuffix("/") - - # get model properties - context_size = self._get_context_size(model, credentials) - max_chunks = self._get_max_chunks(model, credentials) - - inputs = [] - indices = [] - used_tokens = 0 - - # get tokenized results from TEI - batched_tokenize_result = TeiHelper.invoke_tokenize(server_url, texts) - - for i, (text, tokenize_result) in enumerate(zip(texts, batched_tokenize_result)): - # Check if the number of tokens is larger than the context size - num_tokens = len(tokenize_result) - - if num_tokens >= context_size: - # Find the best cutoff point - pre_special_token_count = 0 - for token in tokenize_result: - if token["special"]: - pre_special_token_count += 1 - else: - break - rest_special_token_count = ( - len([token for token in tokenize_result if token["special"]]) - pre_special_token_count - ) - - # Calculate the cutoff point, leave 20 extra space to avoid exceeding the limit - token_cutoff = context_size - rest_special_token_count - 20 - - # Find the cutoff index - cutpoint_token = tokenize_result[token_cutoff] - cutoff = cutpoint_token["start"] - - inputs.append(text[0:cutoff]) - else: - inputs.append(text) - indices += [i] - - batched_embeddings = [] - _iter = range(0, len(inputs), max_chunks) - - try: - used_tokens = 0 - for i in _iter: - iter_texts = inputs[i : i + max_chunks] - results = TeiHelper.invoke_embeddings(server_url, iter_texts) - embeddings = results["data"] - embeddings = [embedding["embedding"] for embedding in embeddings] - batched_embeddings.extend(embeddings) - - usage = results["usage"] - used_tokens += usage["total_tokens"] - except RuntimeError as e: - raise InvokeServerUnavailableError(str(e)) - - usage = self._calc_response_usage(model=model, credentials=credentials, tokens=used_tokens) - - result = TextEmbeddingResult(model=model, embeddings=batched_embeddings, usage=usage) - - return result - - def get_num_tokens(self, model: str, credentials: dict, texts: list[str]) -> int: - """ - Get number of tokens for given prompt messages - - :param model: model name - :param credentials: model credentials - :param texts: texts to embed - :return: - """ - num_tokens = 0 - server_url = credentials["server_url"] - - server_url = server_url.removesuffix("/") - - batch_tokens = TeiHelper.invoke_tokenize(server_url, texts) - num_tokens = sum(len(tokens) for tokens in batch_tokens) - return num_tokens - - def validate_credentials(self, model: str, credentials: dict) -> None: - """ - Validate model credentials - - :param model: model name - :param credentials: model credentials - :return: - """ - try: - server_url = credentials["server_url"] - extra_args = TeiHelper.get_tei_extra_parameter(server_url, model) - print(extra_args) - if extra_args.model_type != "embedding": - raise CredentialsValidateFailedError("Current model is not a embedding model") - - credentials["context_size"] = extra_args.max_input_length - credentials["max_chunks"] = extra_args.max_client_batch_size - self._invoke(model=model, credentials=credentials, texts=["ping"]) - except Exception as ex: - raise CredentialsValidateFailedError(str(ex)) - - @property - def _invoke_error_mapping(self) -> dict[type[InvokeError], list[type[Exception]]]: - return { - InvokeConnectionError: [InvokeConnectionError], - InvokeServerUnavailableError: [InvokeServerUnavailableError], - InvokeRateLimitError: [InvokeRateLimitError], - InvokeAuthorizationError: [InvokeAuthorizationError], - InvokeBadRequestError: [KeyError], - } - - def _calc_response_usage(self, model: str, credentials: dict, tokens: int) -> EmbeddingUsage: - """ - Calculate response usage - - :param model: model name - :param credentials: model credentials - :param tokens: input tokens - :return: usage - """ - # get input price info - input_price_info = self.get_price( - model=model, credentials=credentials, price_type=PriceType.INPUT, tokens=tokens - ) - - # transform usage - usage = EmbeddingUsage( - tokens=tokens, - total_tokens=tokens, - unit_price=input_price_info.unit_price, - price_unit=input_price_info.unit, - total_price=input_price_info.total_amount, - currency=input_price_info.currency, - latency=time.perf_counter() - self.started_at, - ) - - return usage - - def get_customizable_model_schema(self, model: str, credentials: dict) -> AIModelEntity | None: - """ - used to define customizable model schema - """ - - entity = AIModelEntity( - model=model, - label=I18nObject(en_US=model), - fetch_from=FetchFrom.CUSTOMIZABLE_MODEL, - model_type=ModelType.TEXT_EMBEDDING, - model_properties={ - ModelPropertyKey.MAX_CHUNKS: int(credentials.get("max_chunks", 1)), - ModelPropertyKey.CONTEXT_SIZE: int(credentials.get("context_size", 512)), - }, - parameter_rules=[], - ) - - return entity diff --git a/api/core/model_runtime/model_providers/hunyuan/text_embedding/text_embedding.py b/api/core/model_runtime/model_providers/hunyuan/text_embedding/text_embedding.py deleted file mode 100644 index 75701ebc54a749..00000000000000 --- a/api/core/model_runtime/model_providers/hunyuan/text_embedding/text_embedding.py +++ /dev/null @@ -1,169 +0,0 @@ -import json -import logging -import time -from typing import Optional - -from tencentcloud.common import credential -from tencentcloud.common.exception import TencentCloudSDKException -from tencentcloud.common.profile.client_profile import ClientProfile -from tencentcloud.common.profile.http_profile import HttpProfile -from tencentcloud.hunyuan.v20230901 import hunyuan_client, models - -from core.embedding.embedding_constant import EmbeddingInputType -from core.model_runtime.entities.model_entities import PriceType -from core.model_runtime.entities.text_embedding_entities import EmbeddingUsage, TextEmbeddingResult -from core.model_runtime.errors.invoke import ( - InvokeError, -) -from core.model_runtime.errors.validate import CredentialsValidateFailedError -from core.model_runtime.model_providers.__base.text_embedding_model import TextEmbeddingModel - -logger = logging.getLogger(__name__) - - -class HunyuanTextEmbeddingModel(TextEmbeddingModel): - """ - Model class for Hunyuan text embedding model. - """ - - def _invoke( - self, - model: str, - credentials: dict, - texts: list[str], - user: Optional[str] = None, - input_type: EmbeddingInputType = EmbeddingInputType.DOCUMENT, - ) -> TextEmbeddingResult: - """ - Invoke text embedding model - - :param model: model name - :param credentials: model credentials - :param texts: texts to embed - :param user: unique user id - :param input_type: input type - :return: embeddings result - """ - - if model != "hunyuan-embedding": - raise ValueError("Invalid model name") - - client = self._setup_hunyuan_client(credentials) - - embeddings = [] - token_usage = 0 - - for input in texts: - request = models.GetEmbeddingRequest() - params = {"Input": input} - request.from_json_string(json.dumps(params)) - response = client.GetEmbedding(request) - usage = response.Usage.TotalTokens - - embeddings.extend([data.Embedding for data in response.Data]) - token_usage += usage - - result = TextEmbeddingResult( - model=model, - embeddings=embeddings, - usage=self._calc_response_usage(model=model, credentials=credentials, tokens=token_usage), - ) - - return result - - def validate_credentials(self, model: str, credentials: dict) -> None: - """ - Validate credentials - """ - try: - client = self._setup_hunyuan_client(credentials) - - req = models.ChatCompletionsRequest() - params = { - "Model": model, - "Messages": [{"Role": "user", "Content": "hello"}], - "TopP": 1, - "Temperature": 0, - "Stream": False, - } - req.from_json_string(json.dumps(params)) - client.ChatCompletions(req) - except Exception as e: - raise CredentialsValidateFailedError(f"Credentials validation failed: {e}") - - def _setup_hunyuan_client(self, credentials): - secret_id = credentials["secret_id"] - secret_key = credentials["secret_key"] - cred = credential.Credential(secret_id, secret_key) - httpProfile = HttpProfile() - httpProfile.endpoint = "hunyuan.tencentcloudapi.com" - clientProfile = ClientProfile() - clientProfile.httpProfile = httpProfile - client = hunyuan_client.HunyuanClient(cred, "", clientProfile) - return client - - def _calc_response_usage(self, model: str, credentials: dict, tokens: int) -> EmbeddingUsage: - """ - Calculate response usage - - :param model: model name - :param credentials: model credentials - :param tokens: input tokens - :return: usage - """ - # get input price info - input_price_info = self.get_price( - model=model, credentials=credentials, price_type=PriceType.INPUT, tokens=tokens - ) - - # transform usage - usage = EmbeddingUsage( - tokens=tokens, - total_tokens=tokens, - unit_price=input_price_info.unit_price, - price_unit=input_price_info.unit, - total_price=input_price_info.total_amount, - currency=input_price_info.currency, - latency=time.perf_counter() - self.started_at, - ) - - return usage - - @property - def _invoke_error_mapping(self) -> dict[type[InvokeError], list[type[Exception]]]: - """ - Map model invoke error to unified error - The key is the error type thrown to the caller - The value is the error type thrown by the model, - which needs to be converted into a unified error type for the caller. - - :return: Invoke error mapping - """ - return { - InvokeError: [TencentCloudSDKException], - } - - def get_num_tokens(self, model: str, credentials: dict, texts: list[str]) -> int: - """ - Get number of tokens for given prompt messages - - :param model: model name - :param credentials: model credentials - :param texts: texts to embed - :return: - """ - # client = self._setup_hunyuan_client(credentials) - - num_tokens = 0 - for text in texts: - num_tokens += self._get_num_tokens_by_gpt2(text) - # use client.GetTokenCount to get num tokens - # request = models.GetTokenCountRequest() - # params = { - # "Prompt": text - # } - # request.from_json_string(json.dumps(params)) - # response = client.GetTokenCount(request) - # num_tokens += response.TokenCount - - return num_tokens diff --git a/api/core/model_runtime/model_providers/jina/jina.yaml b/api/core/model_runtime/model_providers/jina/jina.yaml deleted file mode 100644 index 970b22965b5d29..00000000000000 --- a/api/core/model_runtime/model_providers/jina/jina.yaml +++ /dev/null @@ -1,69 +0,0 @@ -provider: jina -label: - en_US: Jina AI -description: - en_US: Embedding and Rerank Model Supported -icon_small: - en_US: icon_s_en.svg -icon_large: - en_US: icon_l_en.svg -background: "#EFFDFD" -help: - title: - en_US: Get your API key from Jina AI - zh_Hans: 从 Jina AI 获取 API Key - url: - en_US: https://jina.ai/ -supported_model_types: - - text-embedding - - rerank -configurate_methods: - - predefined-model - - customizable-model -provider_credential_schema: - credential_form_schemas: - - variable: api_key - label: - en_US: API Key - type: secret-input - required: true - placeholder: - zh_Hans: 在此输入您的 API Key - en_US: Enter your API Key -model_credential_schema: - model: - label: - en_US: Model Name - zh_Hans: 模型名称 - placeholder: - en_US: Enter your model name - zh_Hans: 输入模型名称 - credential_form_schemas: - - variable: api_key - label: - en_US: API Key - type: secret-input - required: true - placeholder: - zh_Hans: 在此输入您的 API Key - en_US: Enter your API Key - - variable: base_url - label: - zh_Hans: 服务器 URL - en_US: Base URL - type: text-input - required: true - placeholder: - zh_Hans: Base URL, e.g. https://api.jina.ai/v1 - en_US: Base URL, e.g. https://api.jina.ai/v1 - default: 'https://api.jina.ai/v1' - - variable: context_size - label: - zh_Hans: 上下文大小 - en_US: Context size - placeholder: - zh_Hans: 输入上下文大小 - en_US: Enter context size - required: false - type: text-input - default: '8192' diff --git a/api/core/model_runtime/model_providers/jina/text_embedding/text_embedding.py b/api/core/model_runtime/model_providers/jina/text_embedding/text_embedding.py deleted file mode 100644 index b39712951256c8..00000000000000 --- a/api/core/model_runtime/model_providers/jina/text_embedding/text_embedding.py +++ /dev/null @@ -1,199 +0,0 @@ -import time -from json import JSONDecodeError, dumps -from typing import Optional - -from requests import post - -from core.embedding.embedding_constant import EmbeddingInputType -from core.model_runtime.entities.common_entities import I18nObject -from core.model_runtime.entities.model_entities import AIModelEntity, FetchFrom, ModelPropertyKey, ModelType, PriceType -from core.model_runtime.entities.text_embedding_entities import EmbeddingUsage, TextEmbeddingResult -from core.model_runtime.errors.invoke import ( - InvokeAuthorizationError, - InvokeBadRequestError, - InvokeConnectionError, - InvokeError, - InvokeRateLimitError, - InvokeServerUnavailableError, -) -from core.model_runtime.errors.validate import CredentialsValidateFailedError -from core.model_runtime.model_providers.__base.text_embedding_model import TextEmbeddingModel -from core.model_runtime.model_providers.jina.text_embedding.jina_tokenizer import JinaTokenizer - - -class JinaTextEmbeddingModel(TextEmbeddingModel): - """ - Model class for Jina text embedding model. - """ - - api_base: str = "https://api.jina.ai/v1" - - def _to_payload(self, model: str, texts: list[str], credentials: dict, input_type: EmbeddingInputType) -> dict: - """ - Parse model credentials - - :param model: model name - :param credentials: model credentials - :param texts: texts to embed - :return: parsed credentials - """ - - def transform_jina_input_text(model, text): - if model == "jina-clip-v1": - return {"text": text} - return text - - data = {"model": model, "input": [transform_jina_input_text(model, text) for text in texts]} - - # model specific parameters - if model == "jina-embeddings-v3": - # set `task` type according to input type for the best performance - data["task"] = "retrieval.query" if input_type == EmbeddingInputType.QUERY else "retrieval.passage" - - return data - - def _invoke( - self, - model: str, - credentials: dict, - texts: list[str], - user: Optional[str] = None, - input_type: EmbeddingInputType = EmbeddingInputType.DOCUMENT, - ) -> TextEmbeddingResult: - """ - Invoke text embedding model - - :param model: model name - :param credentials: model credentials - :param texts: texts to embed - :param user: unique user id - :param input_type: input type - :return: embeddings result - """ - api_key = credentials["api_key"] - if not api_key: - raise CredentialsValidateFailedError("api_key is required") - - base_url = credentials.get("base_url", self.api_base) - base_url = base_url.removesuffix("/") - - url = base_url + "/embeddings" - headers = {"Authorization": "Bearer " + api_key, "Content-Type": "application/json"} - - data = self._to_payload(model=model, texts=texts, credentials=credentials, input_type=input_type) - - try: - response = post(url, headers=headers, data=dumps(data)) - except Exception as e: - raise InvokeConnectionError(str(e)) - - if response.status_code != 200: - try: - resp = response.json() - msg = resp["detail"] - if response.status_code == 401: - raise InvokeAuthorizationError(msg) - elif response.status_code == 429: - raise InvokeRateLimitError(msg) - elif response.status_code == 500: - raise InvokeServerUnavailableError(msg) - else: - raise InvokeBadRequestError(msg) - except JSONDecodeError as e: - raise InvokeServerUnavailableError( - f"Failed to convert response to json: {e} with text: {response.text}" - ) - - try: - resp = response.json() - embeddings = resp["data"] - usage = resp["usage"] - except Exception as e: - raise InvokeServerUnavailableError(f"Failed to convert response to json: {e} with text: {response.text}") - - usage = self._calc_response_usage(model=model, credentials=credentials, tokens=usage["total_tokens"]) - - result = TextEmbeddingResult( - model=model, embeddings=[[float(data) for data in x["embedding"]] for x in embeddings], usage=usage - ) - - return result - - def get_num_tokens(self, model: str, credentials: dict, texts: list[str]) -> int: - """ - Get number of tokens for given prompt messages - - :param model: model name - :param credentials: model credentials - :param texts: texts to embed - :return: - """ - num_tokens = 0 - for text in texts: - # use JinaTokenizer to get num tokens - num_tokens += JinaTokenizer.get_num_tokens(text) - return num_tokens - - def validate_credentials(self, model: str, credentials: dict) -> None: - """ - Validate model credentials - - :param model: model name - :param credentials: model credentials - :return: - """ - try: - self._invoke(model=model, credentials=credentials, texts=["ping"]) - except Exception as e: - raise CredentialsValidateFailedError(f"Credentials validation failed: {e}") - - @property - def _invoke_error_mapping(self) -> dict[type[InvokeError], list[type[Exception]]]: - return { - InvokeConnectionError: [InvokeConnectionError], - InvokeServerUnavailableError: [InvokeServerUnavailableError], - InvokeRateLimitError: [InvokeRateLimitError], - InvokeAuthorizationError: [InvokeAuthorizationError], - InvokeBadRequestError: [KeyError, InvokeBadRequestError], - } - - def _calc_response_usage(self, model: str, credentials: dict, tokens: int) -> EmbeddingUsage: - """ - Calculate response usage - - :param model: model name - :param credentials: model credentials - :param tokens: input tokens - :return: usage - """ - # get input price info - input_price_info = self.get_price( - model=model, credentials=credentials, price_type=PriceType.INPUT, tokens=tokens - ) - - # transform usage - usage = EmbeddingUsage( - tokens=tokens, - total_tokens=tokens, - unit_price=input_price_info.unit_price, - price_unit=input_price_info.unit, - total_price=input_price_info.total_amount, - currency=input_price_info.currency, - latency=time.perf_counter() - self.started_at, - ) - - return usage - - def get_customizable_model_schema(self, model: str, credentials: dict) -> AIModelEntity: - """ - generate custom model entities from credentials - """ - entity = AIModelEntity( - model=model, - label=I18nObject(en_US=model), - model_type=ModelType.TEXT_EMBEDDING, - fetch_from=FetchFrom.CUSTOMIZABLE_MODEL, - model_properties={ModelPropertyKey.CONTEXT_SIZE: int(credentials.get("context_size"))}, - ) - - return entity diff --git a/api/core/model_runtime/model_providers/localai/text_embedding/text_embedding.py b/api/core/model_runtime/model_providers/localai/text_embedding/text_embedding.py deleted file mode 100644 index ab8ca76c2f0971..00000000000000 --- a/api/core/model_runtime/model_providers/localai/text_embedding/text_embedding.py +++ /dev/null @@ -1,189 +0,0 @@ -import time -from json import JSONDecodeError, dumps -from typing import Optional - -from requests import post -from yarl import URL - -from core.embedding.embedding_constant import EmbeddingInputType -from core.model_runtime.entities.common_entities import I18nObject -from core.model_runtime.entities.model_entities import AIModelEntity, FetchFrom, ModelPropertyKey, ModelType, PriceType -from core.model_runtime.entities.text_embedding_entities import EmbeddingUsage, TextEmbeddingResult -from core.model_runtime.errors.invoke import ( - InvokeAuthorizationError, - InvokeBadRequestError, - InvokeConnectionError, - InvokeError, - InvokeRateLimitError, - InvokeServerUnavailableError, -) -from core.model_runtime.errors.validate import CredentialsValidateFailedError -from core.model_runtime.model_providers.__base.text_embedding_model import TextEmbeddingModel - - -class LocalAITextEmbeddingModel(TextEmbeddingModel): - """ - Model class for LocalAI text embedding model. - """ - - def _invoke( - self, - model: str, - credentials: dict, - texts: list[str], - user: Optional[str] = None, - input_type: EmbeddingInputType = EmbeddingInputType.DOCUMENT, - ) -> TextEmbeddingResult: - """ - Invoke text embedding model - - :param model: model name - :param credentials: model credentials - :param texts: texts to embed - :param user: unique user id - :param input_type: input type - :return: embeddings result - """ - if len(texts) != 1: - raise InvokeBadRequestError("Only one text is supported") - - server_url = credentials["server_url"] - model_name = model - if not server_url: - raise CredentialsValidateFailedError("server_url is required") - if not model_name: - raise CredentialsValidateFailedError("model_name is required") - - url = server_url - headers = {"Authorization": "Bearer 123", "Content-Type": "application/json"} - - data = {"model": model_name, "input": texts[0]} - - try: - response = post(str(URL(url) / "embeddings"), headers=headers, data=dumps(data), timeout=10) - except Exception as e: - raise InvokeConnectionError(str(e)) - - if response.status_code != 200: - try: - resp = response.json() - code = resp["error"]["code"] - msg = resp["error"]["message"] - if code == 500: - raise InvokeServerUnavailableError(msg) - - if response.status_code == 401: - raise InvokeAuthorizationError(msg) - elif response.status_code == 429: - raise InvokeRateLimitError(msg) - elif response.status_code == 500: - raise InvokeServerUnavailableError(msg) - else: - raise InvokeError(msg) - except JSONDecodeError as e: - raise InvokeServerUnavailableError( - f"Failed to convert response to json: {e} with text: {response.text}" - ) - - try: - resp = response.json() - embeddings = resp["data"] - usage = resp["usage"] - except Exception as e: - raise InvokeServerUnavailableError(f"Failed to convert response to json: {e} with text: {response.text}") - - usage = self._calc_response_usage(model=model, credentials=credentials, tokens=usage["total_tokens"]) - - result = TextEmbeddingResult( - model=model, embeddings=[[float(data) for data in x["embedding"]] for x in embeddings], usage=usage - ) - - return result - - def get_num_tokens(self, model: str, credentials: dict, texts: list[str]) -> int: - """ - Get number of tokens for given prompt messages - - :param model: model name - :param credentials: model credentials - :param texts: texts to embed - :return: - """ - num_tokens = 0 - for text in texts: - # use GPT2Tokenizer to get num tokens - num_tokens += self._get_num_tokens_by_gpt2(text) - return num_tokens - - def _get_customizable_model_schema(self, model: str, credentials: dict) -> AIModelEntity | None: - """ - Get customizable model schema - - :param model: model name - :param credentials: model credentials - :return: model schema - """ - return AIModelEntity( - model=model, - label=I18nObject(zh_Hans=model, en_US=model), - model_type=ModelType.TEXT_EMBEDDING, - features=[], - fetch_from=FetchFrom.CUSTOMIZABLE_MODEL, - model_properties={ - ModelPropertyKey.CONTEXT_SIZE: int(credentials.get("context_size", "512")), - ModelPropertyKey.MAX_CHUNKS: 1, - }, - parameter_rules=[], - ) - - def validate_credentials(self, model: str, credentials: dict) -> None: - """ - Validate model credentials - - :param model: model name - :param credentials: model credentials - :return: - """ - try: - self._invoke(model=model, credentials=credentials, texts=["ping"]) - except InvokeAuthorizationError: - raise CredentialsValidateFailedError("Invalid credentials") - except InvokeConnectionError as e: - raise CredentialsValidateFailedError(f"Invalid credentials: {e}") - - @property - def _invoke_error_mapping(self) -> dict[type[InvokeError], list[type[Exception]]]: - return { - InvokeConnectionError: [InvokeConnectionError], - InvokeServerUnavailableError: [InvokeServerUnavailableError], - InvokeRateLimitError: [InvokeRateLimitError], - InvokeAuthorizationError: [InvokeAuthorizationError], - InvokeBadRequestError: [KeyError], - } - - def _calc_response_usage(self, model: str, credentials: dict, tokens: int) -> EmbeddingUsage: - """ - Calculate response usage - - :param model: model name - :param credentials: model credentials - :param tokens: input tokens - :return: usage - """ - # get input price info - input_price_info = self.get_price( - model=model, credentials=credentials, price_type=PriceType.INPUT, tokens=tokens - ) - - # transform usage - usage = EmbeddingUsage( - tokens=tokens, - total_tokens=tokens, - unit_price=input_price_info.unit_price, - price_unit=input_price_info.unit, - total_price=input_price_info.total_amount, - currency=input_price_info.currency, - latency=time.perf_counter() - self.started_at, - ) - - return usage diff --git a/api/core/model_runtime/model_providers/minimax/text_embedding/text_embedding.py b/api/core/model_runtime/model_providers/minimax/text_embedding/text_embedding.py deleted file mode 100644 index 74d2a221d1b57e..00000000000000 --- a/api/core/model_runtime/model_providers/minimax/text_embedding/text_embedding.py +++ /dev/null @@ -1,184 +0,0 @@ -import time -from json import dumps -from typing import Optional - -from requests import post - -from core.embedding.embedding_constant import EmbeddingInputType -from core.model_runtime.entities.model_entities import PriceType -from core.model_runtime.entities.text_embedding_entities import EmbeddingUsage, TextEmbeddingResult -from core.model_runtime.errors.invoke import ( - InvokeAuthorizationError, - InvokeBadRequestError, - InvokeConnectionError, - InvokeError, - InvokeRateLimitError, - InvokeServerUnavailableError, -) -from core.model_runtime.errors.validate import CredentialsValidateFailedError -from core.model_runtime.model_providers.__base.text_embedding_model import TextEmbeddingModel -from core.model_runtime.model_providers.minimax.llm.errors import ( - BadRequestError, - InsufficientAccountBalanceError, - InternalServerError, - InvalidAPIKeyError, - InvalidAuthenticationError, - RateLimitReachedError, -) - - -class MinimaxTextEmbeddingModel(TextEmbeddingModel): - """ - Model class for Minimax text embedding model. - """ - - api_base: str = "https://api.minimax.chat/v1/embeddings" - - def _invoke( - self, - model: str, - credentials: dict, - texts: list[str], - user: Optional[str] = None, - input_type: EmbeddingInputType = EmbeddingInputType.DOCUMENT, - ) -> TextEmbeddingResult: - """ - Invoke text embedding model - - :param model: model name - :param credentials: model credentials - :param texts: texts to embed - :param user: unique user id - :param input_type: input type - :return: embeddings result - """ - api_key = credentials["minimax_api_key"] - group_id = credentials["minimax_group_id"] - if model != "embo-01": - raise ValueError("Invalid model name") - if not api_key: - raise CredentialsValidateFailedError("api_key is required") - url = f"{self.api_base}?GroupId={group_id}" - headers = {"Authorization": "Bearer " + api_key, "Content-Type": "application/json"} - - data = {"model": "embo-01", "texts": texts, "type": "db"} - - try: - response = post(url, headers=headers, data=dumps(data)) - except Exception as e: - raise InvokeConnectionError(str(e)) - - if response.status_code != 200: - raise InvokeServerUnavailableError(response.text) - - try: - resp = response.json() - # check if there is an error - if resp["base_resp"]["status_code"] != 0: - code = resp["base_resp"]["status_code"] - msg = resp["base_resp"]["status_msg"] - self._handle_error(code, msg) - - embeddings = resp["vectors"] - total_tokens = resp["total_tokens"] - except InvalidAuthenticationError: - raise InvalidAPIKeyError("Invalid api key") - except KeyError as e: - raise InternalServerError(f"Failed to convert response to json: {e} with text: {response.text}") - - usage = self._calc_response_usage(model=model, credentials=credentials, tokens=total_tokens) - - result = TextEmbeddingResult(model=model, embeddings=embeddings, usage=usage) - - return result - - def get_num_tokens(self, model: str, credentials: dict, texts: list[str]) -> int: - """ - Get number of tokens for given prompt messages - - :param model: model name - :param credentials: model credentials - :param texts: texts to embed - :return: - """ - num_tokens = 0 - for text in texts: - # use MinimaxTokenizer to get num tokens - num_tokens += self._get_num_tokens_by_gpt2(text) - return num_tokens - - def validate_credentials(self, model: str, credentials: dict) -> None: - """ - Validate model credentials - - :param model: model name - :param credentials: model credentials - :return: - """ - try: - self._invoke(model=model, credentials=credentials, texts=["ping"]) - except InvalidAPIKeyError: - raise CredentialsValidateFailedError("Invalid api key") - - def _handle_error(self, code: int, msg: str): - if code in {1000, 1001}: - raise InternalServerError(msg) - elif code == 1002: - raise RateLimitReachedError(msg) - elif code == 1004: - raise InvalidAuthenticationError(msg) - elif code == 1008: - raise InsufficientAccountBalanceError(msg) - elif code == 2013: - raise BadRequestError(msg) - else: - raise InternalServerError(msg) - - @property - def _invoke_error_mapping(self) -> dict[type[InvokeError], list[type[Exception]]]: - """ - Map model invoke error to unified error - The key is the error type thrown to the caller - The value is the error type thrown by the model, - which needs to be converted into a unified error type for the caller. - - :return: Invoke error mapping - """ - return { - InvokeConnectionError: [], - InvokeServerUnavailableError: [InternalServerError], - InvokeRateLimitError: [RateLimitReachedError], - InvokeAuthorizationError: [ - InvalidAuthenticationError, - InsufficientAccountBalanceError, - InvalidAPIKeyError, - ], - InvokeBadRequestError: [BadRequestError, KeyError], - } - - def _calc_response_usage(self, model: str, credentials: dict, tokens: int) -> EmbeddingUsage: - """ - Calculate response usage - - :param model: model name - :param credentials: model credentials - :param tokens: input tokens - :return: usage - """ - # get input price info - input_price_info = self.get_price( - model=model, credentials=credentials, price_type=PriceType.INPUT, tokens=tokens - ) - - # transform usage - usage = EmbeddingUsage( - tokens=tokens, - total_tokens=tokens, - unit_price=input_price_info.unit_price, - price_unit=input_price_info.unit, - total_price=input_price_info.total_amount, - currency=input_price_info.currency, - latency=time.perf_counter() - self.started_at, - ) - - return usage diff --git a/api/core/model_runtime/model_providers/mixedbread/text_embedding/text_embedding.py b/api/core/model_runtime/model_providers/mixedbread/text_embedding/text_embedding.py deleted file mode 100644 index 68b7b448bfec75..00000000000000 --- a/api/core/model_runtime/model_providers/mixedbread/text_embedding/text_embedding.py +++ /dev/null @@ -1,170 +0,0 @@ -import time -from json import JSONDecodeError, dumps -from typing import Optional - -import requests - -from core.embedding.embedding_constant import EmbeddingInputType -from core.model_runtime.entities.common_entities import I18nObject -from core.model_runtime.entities.model_entities import AIModelEntity, FetchFrom, ModelPropertyKey, ModelType, PriceType -from core.model_runtime.entities.text_embedding_entities import EmbeddingUsage, TextEmbeddingResult -from core.model_runtime.errors.invoke import ( - InvokeAuthorizationError, - InvokeBadRequestError, - InvokeConnectionError, - InvokeError, - InvokeRateLimitError, - InvokeServerUnavailableError, -) -from core.model_runtime.errors.validate import CredentialsValidateFailedError -from core.model_runtime.model_providers.__base.text_embedding_model import TextEmbeddingModel - - -class MixedBreadTextEmbeddingModel(TextEmbeddingModel): - """ - Model class for MixedBread text embedding model. - """ - - api_base: str = "https://api.mixedbread.ai/v1" - - def _invoke( - self, - model: str, - credentials: dict, - texts: list[str], - user: Optional[str] = None, - input_type: EmbeddingInputType = EmbeddingInputType.DOCUMENT, - ) -> TextEmbeddingResult: - """ - Invoke text embedding model - - :param model: model name - :param credentials: model credentials - :param texts: texts to embed - :param user: unique user id - :param input_type: input type - :return: embeddings result - """ - api_key = credentials["api_key"] - if not api_key: - raise CredentialsValidateFailedError("api_key is required") - - base_url = credentials.get("base_url", self.api_base) - base_url = base_url.removesuffix("/") - - url = base_url + "/embeddings" - headers = {"Authorization": "Bearer " + api_key, "Content-Type": "application/json"} - - data = {"model": model, "input": texts} - - try: - response = requests.post(url, headers=headers, data=dumps(data)) - except Exception as e: - raise InvokeConnectionError(str(e)) - - if response.status_code != 200: - try: - resp = response.json() - msg = resp["detail"] - if response.status_code == 401: - raise InvokeAuthorizationError(msg) - elif response.status_code == 429: - raise InvokeRateLimitError(msg) - elif response.status_code == 500: - raise InvokeServerUnavailableError(msg) - else: - raise InvokeBadRequestError(msg) - except JSONDecodeError as e: - raise InvokeServerUnavailableError( - f"Failed to convert response to json: {e} with text: {response.text}" - ) - - try: - resp = response.json() - embeddings = resp["data"] - usage = resp["usage"] - except Exception as e: - raise InvokeServerUnavailableError(f"Failed to convert response to json: {e} with text: {response.text}") - - usage = self._calc_response_usage(model=model, credentials=credentials, tokens=usage["total_tokens"]) - - result = TextEmbeddingResult( - model=model, embeddings=[[float(data) for data in x["embedding"]] for x in embeddings], usage=usage - ) - - return result - - def get_num_tokens(self, model: str, credentials: dict, texts: list[str]) -> int: - """ - Get number of tokens for given prompt messages - - :param model: model name - :param credentials: model credentials - :param texts: texts to embed - :return: - """ - return sum(self._get_num_tokens_by_gpt2(text) for text in texts) - - def validate_credentials(self, model: str, credentials: dict) -> None: - """ - Validate model credentials - - :param model: model name - :param credentials: model credentials - :return: - """ - try: - self._invoke(model=model, credentials=credentials, texts=["ping"]) - except Exception as e: - raise CredentialsValidateFailedError(f"Credentials validation failed: {e}") - - @property - def _invoke_error_mapping(self) -> dict[type[InvokeError], list[type[Exception]]]: - return { - InvokeConnectionError: [InvokeConnectionError], - InvokeServerUnavailableError: [InvokeServerUnavailableError], - InvokeRateLimitError: [InvokeRateLimitError], - InvokeAuthorizationError: [InvokeAuthorizationError], - InvokeBadRequestError: [KeyError, InvokeBadRequestError], - } - - def _calc_response_usage(self, model: str, credentials: dict, tokens: int) -> EmbeddingUsage: - """ - Calculate response usage - - :param model: model name - :param credentials: model credentials - :param tokens: input tokens - :return: usage - """ - # get input price info - input_price_info = self.get_price( - model=model, credentials=credentials, price_type=PriceType.INPUT, tokens=tokens - ) - - # transform usage - usage = EmbeddingUsage( - tokens=tokens, - total_tokens=tokens, - unit_price=input_price_info.unit_price, - price_unit=input_price_info.unit, - total_price=input_price_info.total_amount, - currency=input_price_info.currency, - latency=time.perf_counter() - self.started_at, - ) - - return usage - - def get_customizable_model_schema(self, model: str, credentials: dict) -> AIModelEntity: - """ - generate custom model entities from credentials - """ - entity = AIModelEntity( - model=model, - label=I18nObject(en_US=model), - model_type=ModelType.TEXT_EMBEDDING, - fetch_from=FetchFrom.CUSTOMIZABLE_MODEL, - model_properties={ModelPropertyKey.CONTEXT_SIZE: int(credentials.get("context_size", "512"))}, - ) - - return entity diff --git a/api/core/model_runtime/model_providers/model_provider_factory.py b/api/core/model_runtime/model_providers/model_provider_factory.py index 1370676f0e1d85..f2c6d6a6502e62 100644 --- a/api/core/model_runtime/model_providers/model_provider_factory.py +++ b/api/core/model_runtime/model_providers/model_provider_factory.py @@ -13,7 +13,6 @@ from core.model_runtime.model_providers.__base.moderation_model import ModerationModel from core.model_runtime.model_providers.__base.rerank_model import RerankModel from core.model_runtime.model_providers.__base.speech2text_model import Speech2TextModel -from core.model_runtime.model_providers.__base.text2img_model import Text2ImageModel from core.model_runtime.model_providers.__base.text_embedding_model import TextEmbeddingModel from core.model_runtime.model_providers.__base.tts_model import TTSModel from core.model_runtime.schema_validators.model_credential_schema_validator import ModelCredentialSchemaValidator @@ -284,8 +283,6 @@ def get_model_type_instance(self, provider: str, model_type: ModelType) -> AIMod return ModerationModel(**init_params) elif model_type == ModelType.TTS: return TTSModel(**init_params) - elif model_type == ModelType.TEXT2IMG: - return Text2ImageModel(**init_params) def get_provider_icon(self, provider: str, icon_type: str, lang: str) -> bytes: """ diff --git a/api/core/model_runtime/model_providers/nomic/text_embedding/text_embedding.py b/api/core/model_runtime/model_providers/nomic/text_embedding/text_embedding.py deleted file mode 100644 index 857dfb5f41e2f5..00000000000000 --- a/api/core/model_runtime/model_providers/nomic/text_embedding/text_embedding.py +++ /dev/null @@ -1,165 +0,0 @@ -import time -from functools import wraps -from typing import Optional - -from nomic import embed -from nomic import login as nomic_login - -from core.embedding.embedding_constant import EmbeddingInputType -from core.model_runtime.entities.model_entities import PriceType -from core.model_runtime.entities.text_embedding_entities import ( - EmbeddingUsage, - TextEmbeddingResult, -) -from core.model_runtime.errors.validate import CredentialsValidateFailedError -from core.model_runtime.model_providers.__base.text_embedding_model import ( - TextEmbeddingModel, -) -from core.model_runtime.model_providers.nomic._common import _CommonNomic - - -def nomic_login_required(func): - @wraps(func) - def wrapper(*args, **kwargs): - try: - if not kwargs.get("credentials"): - raise ValueError("missing credentials parameters") - credentials = kwargs.get("credentials") - if "nomic_api_key" not in credentials: - raise ValueError("missing nomic_api_key in credentials parameters") - # nomic login - nomic_login(credentials["nomic_api_key"]) - except Exception as ex: - raise CredentialsValidateFailedError(str(ex)) - return func(*args, **kwargs) - - return wrapper - - -class NomicTextEmbeddingModel(_CommonNomic, TextEmbeddingModel): - """ - Model class for nomic text embedding model. - """ - - def _invoke( - self, - model: str, - credentials: dict, - texts: list[str], - user: Optional[str] = None, - input_type: EmbeddingInputType = EmbeddingInputType.DOCUMENT, - ) -> TextEmbeddingResult: - """ - Invoke text embedding model - - :param model: model name - :param credentials: model credentials - :param texts: texts to embed - :param user: unique user id - :param input_type: input type - :return: embeddings result - """ - embeddings, prompt_tokens, total_tokens = self.embed_text( - model=model, - credentials=credentials, - texts=texts, - ) - - # calc usage - usage = self._calc_response_usage( - model=model, credentials=credentials, tokens=prompt_tokens, total_tokens=total_tokens - ) - return TextEmbeddingResult(embeddings=embeddings, usage=usage, model=model) - - def get_num_tokens(self, model: str, credentials: dict, texts: list[str]) -> int: - """ - Get number of tokens for given prompt messages - - :param model: model name - :param credentials: model credentials - :param texts: texts to embed - :return: - """ - return sum(self._get_num_tokens_by_gpt2(text) for text in texts) - - def validate_credentials(self, model: str, credentials: dict) -> None: - """ - Validate model credentials - - :param model: model name - :param credentials: model credentials - :return: - """ - try: - # call embedding model - self.embed_text(model=model, credentials=credentials, texts=["ping"]) - except Exception as ex: - raise CredentialsValidateFailedError(str(ex)) - - @nomic_login_required - def embed_text(self, model: str, credentials: dict, texts: list[str]) -> tuple[list[list[float]], int, int]: - """Call out to Nomic's embedding endpoint. - - Args: - model: The model to use for embedding. - texts: The list of texts to embed. - - Returns: - List of embeddings, one for each text, and tokens usage. - """ - embeddings: list[list[float]] = [] - prompt_tokens = 0 - total_tokens = 0 - - response = embed.text( - model=model, - texts=texts, - ) - - if not (response and "embeddings" in response): - raise ValueError("Embedding data is missing in the response.") - - if not (response and "usage" in response): - raise ValueError("Response usage is missing.") - - if "prompt_tokens" not in response["usage"]: - raise ValueError("Response usage does not contain prompt tokens.") - - if "total_tokens" not in response["usage"]: - raise ValueError("Response usage does not contain total tokens.") - - embeddings = [list(map(float, e)) for e in response["embeddings"]] - total_tokens = response["usage"]["total_tokens"] - prompt_tokens = response["usage"]["prompt_tokens"] - return embeddings, prompt_tokens, total_tokens - - def _calc_response_usage(self, model: str, credentials: dict, tokens: int, total_tokens: int) -> EmbeddingUsage: - """ - Calculate response usage - - :param model: model name - :param credentials: model credentials - :param tokens: prompt tokens - :param total_tokens: total tokens - :return: usage - """ - # get input price info - input_price_info = self.get_price( - model=model, - credentials=credentials, - price_type=PriceType.INPUT, - tokens=tokens, - ) - - # transform usage - usage = EmbeddingUsage( - tokens=tokens, - total_tokens=total_tokens, - unit_price=input_price_info.unit_price, - price_unit=input_price_info.unit, - total_price=input_price_info.total_amount, - currency=input_price_info.currency, - latency=time.perf_counter() - self.started_at, - ) - - return usage diff --git a/api/core/model_runtime/model_providers/nvidia/text_embedding/text_embedding.py b/api/core/model_runtime/model_providers/nvidia/text_embedding/text_embedding.py deleted file mode 100644 index 936ceb8dd2c60e..00000000000000 --- a/api/core/model_runtime/model_providers/nvidia/text_embedding/text_embedding.py +++ /dev/null @@ -1,158 +0,0 @@ -import time -from json import JSONDecodeError, dumps -from typing import Optional - -from requests import post - -from core.embedding.embedding_constant import EmbeddingInputType -from core.model_runtime.entities.model_entities import PriceType -from core.model_runtime.entities.text_embedding_entities import EmbeddingUsage, TextEmbeddingResult -from core.model_runtime.errors.invoke import ( - InvokeAuthorizationError, - InvokeBadRequestError, - InvokeConnectionError, - InvokeError, - InvokeRateLimitError, - InvokeServerUnavailableError, -) -from core.model_runtime.errors.validate import CredentialsValidateFailedError -from core.model_runtime.model_providers.__base.text_embedding_model import TextEmbeddingModel - - -class NvidiaTextEmbeddingModel(TextEmbeddingModel): - """ - Model class for Nvidia text embedding model. - """ - - api_base: str = "https://ai.api.nvidia.com/v1/retrieval/nvidia/embeddings" - models: list[str] = ["NV-Embed-QA"] - - def _invoke( - self, - model: str, - credentials: dict, - texts: list[str], - user: Optional[str] = None, - input_type: EmbeddingInputType = EmbeddingInputType.DOCUMENT, - ) -> TextEmbeddingResult: - """ - Invoke text embedding model - - :param model: model name - :param credentials: model credentials - :param texts: texts to embed - :param user: unique user id - :param input_type: input type - :return: embeddings result - """ - api_key = credentials["api_key"] - if model not in self.models: - raise InvokeBadRequestError("Invalid model name") - if not api_key: - raise CredentialsValidateFailedError("api_key is required") - url = self.api_base - headers = {"Authorization": "Bearer " + api_key, "Content-Type": "application/json"} - - data = {"model": model, "input": texts[0], "input_type": "query"} - - try: - response = post(url, headers=headers, data=dumps(data)) - except Exception as e: - raise InvokeConnectionError(str(e)) - - if response.status_code != 200: - try: - resp = response.json() - msg = resp["detail"] - if response.status_code == 401: - raise InvokeAuthorizationError(msg) - elif response.status_code == 429: - raise InvokeRateLimitError(msg) - elif response.status_code == 500: - raise InvokeServerUnavailableError(msg) - else: - raise InvokeError(msg) - except JSONDecodeError as e: - raise InvokeServerUnavailableError( - f"Failed to convert response to json: {e} with text: {response.text}" - ) - - try: - resp = response.json() - embeddings = resp["data"] - usage = resp["usage"] - except Exception as e: - raise InvokeServerUnavailableError(f"Failed to convert response to json: {e} with text: {response.text}") - - usage = self._calc_response_usage(model=model, credentials=credentials, tokens=usage["total_tokens"]) - - result = TextEmbeddingResult( - model=model, embeddings=[[float(data) for data in x["embedding"]] for x in embeddings], usage=usage - ) - - return result - - def get_num_tokens(self, model: str, credentials: dict, texts: list[str]) -> int: - """ - Get number of tokens for given prompt messages - - :param model: model name - :param credentials: model credentials - :param texts: texts to embed - :return: - """ - num_tokens = 0 - for text in texts: - # use JinaTokenizer to get num tokens - num_tokens += self._get_num_tokens_by_gpt2(text) - return num_tokens - - def validate_credentials(self, model: str, credentials: dict) -> None: - """ - Validate model credentials - - :param model: model name - :param credentials: model credentials - :return: - """ - try: - self._invoke(model=model, credentials=credentials, texts=["ping"]) - except InvokeAuthorizationError: - raise CredentialsValidateFailedError("Invalid api key") - - @property - def _invoke_error_mapping(self) -> dict[type[InvokeError], list[type[Exception]]]: - return { - InvokeConnectionError: [InvokeConnectionError], - InvokeServerUnavailableError: [InvokeServerUnavailableError], - InvokeRateLimitError: [InvokeRateLimitError], - InvokeAuthorizationError: [InvokeAuthorizationError], - InvokeBadRequestError: [KeyError], - } - - def _calc_response_usage(self, model: str, credentials: dict, tokens: int) -> EmbeddingUsage: - """ - Calculate response usage - - :param model: model name - :param credentials: model credentials - :param tokens: input tokens - :return: usage - """ - # get input price info - input_price_info = self.get_price( - model=model, credentials=credentials, price_type=PriceType.INPUT, tokens=tokens - ) - - # transform usage - usage = EmbeddingUsage( - tokens=tokens, - total_tokens=tokens, - unit_price=input_price_info.unit_price, - price_unit=input_price_info.unit, - total_price=input_price_info.total_amount, - currency=input_price_info.currency, - latency=time.perf_counter() - self.started_at, - ) - - return usage diff --git a/api/core/model_runtime/model_providers/oci/text_embedding/text_embedding.py b/api/core/model_runtime/model_providers/oci/text_embedding/text_embedding.py deleted file mode 100644 index 4de9296ccaa92d..00000000000000 --- a/api/core/model_runtime/model_providers/oci/text_embedding/text_embedding.py +++ /dev/null @@ -1,224 +0,0 @@ -import base64 -import copy -import time -from typing import Optional - -import numpy as np -import oci - -from core.embedding.embedding_constant import EmbeddingInputType -from core.model_runtime.entities.model_entities import PriceType -from core.model_runtime.entities.text_embedding_entities import EmbeddingUsage, TextEmbeddingResult -from core.model_runtime.errors.invoke import ( - InvokeAuthorizationError, - InvokeBadRequestError, - InvokeConnectionError, - InvokeError, - InvokeRateLimitError, - InvokeServerUnavailableError, -) -from core.model_runtime.errors.validate import CredentialsValidateFailedError -from core.model_runtime.model_providers.__base.text_embedding_model import TextEmbeddingModel - -request_template = { - "compartmentId": "", - "servingMode": {"modelId": "cohere.embed-english-light-v3.0", "servingType": "ON_DEMAND"}, - "truncate": "NONE", - "inputs": [""], -} -oci_config_template = { - "user": "", - "fingerprint": "", - "tenancy": "", - "region": "", - "compartment_id": "", - "key_content": "", -} - - -class OCITextEmbeddingModel(TextEmbeddingModel): - """ - Model class for Cohere text embedding model. - """ - - def _invoke( - self, - model: str, - credentials: dict, - texts: list[str], - user: Optional[str] = None, - input_type: EmbeddingInputType = EmbeddingInputType.DOCUMENT, - ) -> TextEmbeddingResult: - """ - Invoke text embedding model - - :param model: model name - :param credentials: model credentials - :param texts: texts to embed - :param user: unique user id - :param input_type: input type - :return: embeddings result - """ - # get model properties - context_size = self._get_context_size(model, credentials) - max_chunks = self._get_max_chunks(model, credentials) - - inputs = [] - indices = [] - used_tokens = 0 - - for i, text in enumerate(texts): - # Here token count is only an approximation based on the GPT2 tokenizer - num_tokens = self._get_num_tokens_by_gpt2(text) - - if num_tokens >= context_size: - cutoff = int(len(text) * (np.floor(context_size / num_tokens))) - # if num tokens is larger than context length, only use the start - inputs.append(text[0:cutoff]) - else: - inputs.append(text) - indices += [i] - - batched_embeddings = [] - _iter = range(0, len(inputs), max_chunks) - - for i in _iter: - # call embedding model - embeddings_batch, embedding_used_tokens = self._embedding_invoke( - model=model, credentials=credentials, texts=inputs[i : i + max_chunks] - ) - - used_tokens += embedding_used_tokens - batched_embeddings += embeddings_batch - - # calc usage - usage = self._calc_response_usage(model=model, credentials=credentials, tokens=used_tokens) - - return TextEmbeddingResult(embeddings=batched_embeddings, usage=usage, model=model) - - def get_num_tokens(self, model: str, credentials: dict, texts: list[str]) -> int: - """ - Get number of tokens for given prompt messages - - :param model: model name - :param credentials: model credentials - :param texts: texts to embed - :return: - """ - return sum(self._get_num_tokens_by_gpt2(text) for text in texts) - - def get_num_characters(self, model: str, credentials: dict, texts: list[str]) -> int: - """ - Get number of tokens for given prompt messages - - :param model: model name - :param credentials: model credentials - :param texts: texts to embed - :return: - """ - characters = 0 - for text in texts: - characters += len(text) - return characters - - def validate_credentials(self, model: str, credentials: dict) -> None: - """ - Validate model credentials - - :param model: model name - :param credentials: model credentials - :return: - """ - try: - # call embedding model - self._embedding_invoke(model=model, credentials=credentials, texts=["ping"]) - except Exception as ex: - raise CredentialsValidateFailedError(str(ex)) - - def _embedding_invoke(self, model: str, credentials: dict, texts: list[str]) -> tuple[list[list[float]], int]: - """ - Invoke embedding model - - :param model: model name - :param credentials: model credentials - :param texts: texts to embed - :return: embeddings and used tokens - """ - - # oci - # initialize client - oci_config = copy.deepcopy(oci_config_template) - if "oci_config_content" in credentials: - oci_config_content = base64.b64decode(credentials.get("oci_config_content")).decode("utf-8") - config_items = oci_config_content.split("/") - if len(config_items) != 5: - raise CredentialsValidateFailedError( - "oci_config_content should be base64.b64encode(" - "'user_ocid/fingerprint/tenancy_ocid/region/compartment_ocid'.encode('utf-8'))" - ) - oci_config["user"] = config_items[0] - oci_config["fingerprint"] = config_items[1] - oci_config["tenancy"] = config_items[2] - oci_config["region"] = config_items[3] - oci_config["compartment_id"] = config_items[4] - else: - raise CredentialsValidateFailedError("need to set oci_config_content in credentials ") - if "oci_key_content" in credentials: - oci_key_content = base64.b64decode(credentials.get("oci_key_content")).decode("utf-8") - oci_config["key_content"] = oci_key_content.encode(encoding="utf-8") - else: - raise CredentialsValidateFailedError("need to set oci_config_content in credentials ") - # oci_config = oci.config.from_file('~/.oci/config', credentials.get('oci_api_profile')) - compartment_id = oci_config["compartment_id"] - client = oci.generative_ai_inference.GenerativeAiInferenceClient(config=oci_config) - # call embedding model - request_args = copy.deepcopy(request_template) - request_args["compartmentId"] = compartment_id - request_args["servingMode"]["modelId"] = model - request_args["inputs"] = texts - response = client.embed_text(request_args) - return response.data.embeddings, self.get_num_characters(model=model, credentials=credentials, texts=texts) - - def _calc_response_usage(self, model: str, credentials: dict, tokens: int) -> EmbeddingUsage: - """ - Calculate response usage - - :param model: model name - :param credentials: model credentials - :param tokens: input tokens - :return: usage - """ - # get input price info - input_price_info = self.get_price( - model=model, credentials=credentials, price_type=PriceType.INPUT, tokens=tokens - ) - - # transform usage - usage = EmbeddingUsage( - tokens=tokens, - total_tokens=tokens, - unit_price=input_price_info.unit_price, - price_unit=input_price_info.unit, - total_price=input_price_info.total_amount, - currency=input_price_info.currency, - latency=time.perf_counter() - self.started_at, - ) - - return usage - - @property - def _invoke_error_mapping(self) -> dict[type[InvokeError], list[type[Exception]]]: - """ - Map model invoke error to unified error - The key is the error type thrown to the caller - The value is the error type thrown by the model, - which needs to be converted into a unified error type for the caller. - :return: Invoke error mapping - """ - return { - InvokeConnectionError: [InvokeConnectionError], - InvokeServerUnavailableError: [InvokeServerUnavailableError], - InvokeRateLimitError: [InvokeRateLimitError], - InvokeAuthorizationError: [InvokeAuthorizationError], - InvokeBadRequestError: [KeyError], - } diff --git a/api/core/model_runtime/model_providers/ollama/text_embedding/text_embedding.py b/api/core/model_runtime/model_providers/ollama/text_embedding/text_embedding.py deleted file mode 100644 index 5cf3f1c6fa87f3..00000000000000 --- a/api/core/model_runtime/model_providers/ollama/text_embedding/text_embedding.py +++ /dev/null @@ -1,211 +0,0 @@ -import json -import logging -import time -from decimal import Decimal -from typing import Optional -from urllib.parse import urljoin - -import numpy as np -import requests - -from core.embedding.embedding_constant import EmbeddingInputType -from core.model_runtime.entities.common_entities import I18nObject -from core.model_runtime.entities.model_entities import ( - AIModelEntity, - FetchFrom, - ModelPropertyKey, - ModelType, - PriceConfig, - PriceType, -) -from core.model_runtime.entities.text_embedding_entities import EmbeddingUsage, TextEmbeddingResult -from core.model_runtime.errors.invoke import ( - InvokeAuthorizationError, - InvokeBadRequestError, - InvokeConnectionError, - InvokeError, - InvokeRateLimitError, - InvokeServerUnavailableError, -) -from core.model_runtime.errors.validate import CredentialsValidateFailedError -from core.model_runtime.model_providers.__base.text_embedding_model import TextEmbeddingModel - -logger = logging.getLogger(__name__) - - -class OllamaEmbeddingModel(TextEmbeddingModel): - """ - Model class for an Ollama text embedding model. - """ - - def _invoke( - self, - model: str, - credentials: dict, - texts: list[str], - user: Optional[str] = None, - input_type: EmbeddingInputType = EmbeddingInputType.DOCUMENT, - ) -> TextEmbeddingResult: - """ - Invoke text embedding model - - :param model: model name - :param credentials: model credentials - :param texts: texts to embed - :param user: unique user id - :param input_type: input type - :return: embeddings result - """ - - # Prepare headers and payload for the request - headers = {"Content-Type": "application/json"} - - endpoint_url = credentials.get("base_url") - if not endpoint_url.endswith("/"): - endpoint_url += "/" - - endpoint_url = urljoin(endpoint_url, "api/embed") - - # get model properties - context_size = self._get_context_size(model, credentials) - - inputs = [] - used_tokens = 0 - - for text in texts: - # Here token count is only an approximation based on the GPT2 tokenizer - num_tokens = self._get_num_tokens_by_gpt2(text) - - if num_tokens >= context_size: - cutoff = int(np.floor(len(text) * (context_size / num_tokens))) - # if num tokens is larger than context length, only use the start - inputs.append(text[0:cutoff]) - else: - inputs.append(text) - - # Prepare the payload for the request - payload = {"input": inputs, "model": model, "options": {"use_mmap": True}} - - # Make the request to the Ollama API - response = requests.post(endpoint_url, headers=headers, data=json.dumps(payload), timeout=(10, 300)) - - response.raise_for_status() # Raise an exception for HTTP errors - response_data = response.json() - - # Extract embeddings and used tokens from the response - embeddings = response_data["embeddings"] - embedding_used_tokens = self.get_num_tokens(model, credentials, inputs) - - used_tokens += embedding_used_tokens - - # calc usage - usage = self._calc_response_usage(model=model, credentials=credentials, tokens=used_tokens) - - return TextEmbeddingResult(embeddings=embeddings, usage=usage, model=model) - - def get_num_tokens(self, model: str, credentials: dict, texts: list[str]) -> int: - """ - Approximate number of tokens for given messages using GPT2 tokenizer - - :param model: model name - :param credentials: model credentials - :param texts: texts to embed - :return: - """ - return sum(self._get_num_tokens_by_gpt2(text) for text in texts) - - def validate_credentials(self, model: str, credentials: dict) -> None: - """ - Validate model credentials - - :param model: model name - :param credentials: model credentials - :return: - """ - try: - self._invoke(model=model, credentials=credentials, texts=["ping"]) - except InvokeError as ex: - raise CredentialsValidateFailedError(f"An error occurred during credentials validation: {ex.description}") - except Exception as ex: - raise CredentialsValidateFailedError(f"An error occurred during credentials validation: {str(ex)}") - - def get_customizable_model_schema(self, model: str, credentials: dict) -> AIModelEntity: - """ - generate custom model entities from credentials - """ - entity = AIModelEntity( - model=model, - label=I18nObject(en_US=model), - model_type=ModelType.TEXT_EMBEDDING, - fetch_from=FetchFrom.CUSTOMIZABLE_MODEL, - model_properties={ - ModelPropertyKey.CONTEXT_SIZE: int(credentials.get("context_size")), - ModelPropertyKey.MAX_CHUNKS: 1, - }, - parameter_rules=[], - pricing=PriceConfig( - input=Decimal(credentials.get("input_price", 0)), - unit=Decimal(credentials.get("unit", 0)), - currency=credentials.get("currency", "USD"), - ), - ) - - return entity - - def _calc_response_usage(self, model: str, credentials: dict, tokens: int) -> EmbeddingUsage: - """ - Calculate response usage - - :param model: model name - :param credentials: model credentials - :param tokens: input tokens - :return: usage - """ - # get input price info - input_price_info = self.get_price( - model=model, credentials=credentials, price_type=PriceType.INPUT, tokens=tokens - ) - - # transform usage - usage = EmbeddingUsage( - tokens=tokens, - total_tokens=tokens, - unit_price=input_price_info.unit_price, - price_unit=input_price_info.unit, - total_price=input_price_info.total_amount, - currency=input_price_info.currency, - latency=time.perf_counter() - self.started_at, - ) - - return usage - - @property - def _invoke_error_mapping(self) -> dict[type[InvokeError], list[type[Exception]]]: - """ - Map model invoke error to unified error - The key is the error type thrown to the caller - The value is the error type thrown by the model, - which needs to be converted into a unified error type for the caller. - - :return: Invoke error mapping - """ - return { - InvokeAuthorizationError: [ - requests.exceptions.InvalidHeader, # Missing or Invalid API Key - ], - InvokeBadRequestError: [ - requests.exceptions.HTTPError, # Invalid Endpoint URL or model name - requests.exceptions.InvalidURL, # Misconfigured request or other API error - ], - InvokeRateLimitError: [ - requests.exceptions.RetryError # Too many requests sent in a short period of time - ], - InvokeServerUnavailableError: [ - requests.exceptions.ConnectionError, # Engine Overloaded - requests.exceptions.HTTPError, # Server Error - ], - InvokeConnectionError: [ - requests.exceptions.ConnectTimeout, # Timeout - requests.exceptions.ReadTimeout, # Timeout - ], - } diff --git a/api/core/model_runtime/model_providers/openai/text_embedding/text_embedding.py b/api/core/model_runtime/model_providers/openai/text_embedding/text_embedding.py deleted file mode 100644 index 16f1a0cfa1117b..00000000000000 --- a/api/core/model_runtime/model_providers/openai/text_embedding/text_embedding.py +++ /dev/null @@ -1,203 +0,0 @@ -import base64 -import time -from typing import Optional, Union - -import numpy as np -import tiktoken -from openai import OpenAI - -from core.embedding.embedding_constant import EmbeddingInputType -from core.model_runtime.entities.model_entities import PriceType -from core.model_runtime.entities.text_embedding_entities import EmbeddingUsage, TextEmbeddingResult -from core.model_runtime.errors.validate import CredentialsValidateFailedError -from core.model_runtime.model_providers.__base.text_embedding_model import TextEmbeddingModel -from core.model_runtime.model_providers.openai._common import _CommonOpenAI - - -class OpenAITextEmbeddingModel(_CommonOpenAI, TextEmbeddingModel): - """ - Model class for OpenAI text embedding model. - """ - - def _invoke( - self, - model: str, - credentials: dict, - texts: list[str], - user: Optional[str] = None, - input_type: EmbeddingInputType = EmbeddingInputType.DOCUMENT, - ) -> TextEmbeddingResult: - """ - Invoke text embedding model - - :param model: model name - :param credentials: model credentials - :param texts: texts to embed - :param user: unique user id - :param input_type: input type - :return: embeddings result - """ - # transform credentials to kwargs for model instance - credentials_kwargs = self._to_credential_kwargs(credentials) - # init model client - client = OpenAI(**credentials_kwargs) - - extra_model_kwargs = {} - if user: - extra_model_kwargs["user"] = user - - extra_model_kwargs["encoding_format"] = "base64" - - # get model properties - context_size = self._get_context_size(model, credentials) - max_chunks = self._get_max_chunks(model, credentials) - - embeddings: list[list[float]] = [[] for _ in range(len(texts))] - tokens = [] - indices = [] - used_tokens = 0 - - try: - enc = tiktoken.encoding_for_model(model) - except KeyError: - enc = tiktoken.get_encoding("cl100k_base") - - for i, text in enumerate(texts): - token = enc.encode(text) - for j in range(0, len(token), context_size): - tokens += [token[j : j + context_size]] - indices += [i] - - batched_embeddings = [] - _iter = range(0, len(tokens), max_chunks) - - for i in _iter: - # call embedding model - embeddings_batch, embedding_used_tokens = self._embedding_invoke( - model=model, client=client, texts=tokens[i : i + max_chunks], extra_model_kwargs=extra_model_kwargs - ) - - used_tokens += embedding_used_tokens - batched_embeddings += embeddings_batch - - results: list[list[list[float]]] = [[] for _ in range(len(texts))] - num_tokens_in_batch: list[list[int]] = [[] for _ in range(len(texts))] - for i in range(len(indices)): - results[indices[i]].append(batched_embeddings[i]) - num_tokens_in_batch[indices[i]].append(len(tokens[i])) - - for i in range(len(texts)): - _result = results[i] - if len(_result) == 0: - embeddings_batch, embedding_used_tokens = self._embedding_invoke( - model=model, client=client, texts="", extra_model_kwargs=extra_model_kwargs - ) - - used_tokens += embedding_used_tokens - average = embeddings_batch[0] - else: - average = np.average(_result, axis=0, weights=num_tokens_in_batch[i]) - embeddings[i] = (average / np.linalg.norm(average)).tolist() - - # calc usage - usage = self._calc_response_usage(model=model, credentials=credentials, tokens=used_tokens) - - return TextEmbeddingResult(embeddings=embeddings, usage=usage, model=model) - - def get_num_tokens(self, model: str, credentials: dict, texts: list[str]) -> int: - """ - Get number of tokens for given prompt messages - - :param model: model name - :param credentials: model credentials - :param texts: texts to embed - :return: - """ - if len(texts) == 0: - return 0 - - try: - enc = tiktoken.encoding_for_model(model) - except KeyError: - enc = tiktoken.get_encoding("cl100k_base") - - total_num_tokens = 0 - for text in texts: - # calculate the number of tokens in the encoded text - tokenized_text = enc.encode(text) - total_num_tokens += len(tokenized_text) - - return total_num_tokens - - def validate_credentials(self, model: str, credentials: dict) -> None: - """ - Validate model credentials - - :param model: model name - :param credentials: model credentials - :return: - """ - try: - # transform credentials to kwargs for model instance - credentials_kwargs = self._to_credential_kwargs(credentials) - client = OpenAI(**credentials_kwargs) - - # call embedding model - self._embedding_invoke(model=model, client=client, texts=["ping"], extra_model_kwargs={}) - except Exception as ex: - raise CredentialsValidateFailedError(str(ex)) - - def _embedding_invoke( - self, model: str, client: OpenAI, texts: Union[list[str], str], extra_model_kwargs: dict - ) -> tuple[list[list[float]], int]: - """ - Invoke embedding model - - :param model: model name - :param client: model client - :param texts: texts to embed - :param extra_model_kwargs: extra model kwargs - :return: embeddings and used tokens - """ - # call embedding model - response = client.embeddings.create( - input=texts, - model=model, - **extra_model_kwargs, - ) - - if "encoding_format" in extra_model_kwargs and extra_model_kwargs["encoding_format"] == "base64": - # decode base64 embedding - return ( - [list(np.frombuffer(base64.b64decode(data.embedding), dtype="float32")) for data in response.data], - response.usage.total_tokens, - ) - - return [data.embedding for data in response.data], response.usage.total_tokens - - def _calc_response_usage(self, model: str, credentials: dict, tokens: int) -> EmbeddingUsage: - """ - Calculate response usage - - :param model: model name - :param credentials: model credentials - :param tokens: input tokens - :return: usage - """ - # get input price info - input_price_info = self.get_price( - model=model, credentials=credentials, price_type=PriceType.INPUT, tokens=tokens - ) - - # transform usage - usage = EmbeddingUsage( - tokens=tokens, - total_tokens=tokens, - unit_price=input_price_info.unit_price, - price_unit=input_price_info.unit, - total_price=input_price_info.total_amount, - currency=input_price_info.currency, - latency=time.perf_counter() - self.started_at, - ) - - return usage diff --git a/api/core/model_runtime/model_providers/openai_api_compatible/text_embedding/text_embedding.py b/api/core/model_runtime/model_providers/openai_api_compatible/text_embedding/text_embedding.py deleted file mode 100644 index 64fa6aaa3c5a71..00000000000000 --- a/api/core/model_runtime/model_providers/openai_api_compatible/text_embedding/text_embedding.py +++ /dev/null @@ -1,217 +0,0 @@ -import json -import time -from decimal import Decimal -from typing import Optional -from urllib.parse import urljoin - -import numpy as np -import requests - -from core.embedding.embedding_constant import EmbeddingInputType -from core.model_runtime.entities.common_entities import I18nObject -from core.model_runtime.entities.model_entities import ( - AIModelEntity, - FetchFrom, - ModelPropertyKey, - ModelType, - PriceConfig, - PriceType, -) -from core.model_runtime.entities.text_embedding_entities import EmbeddingUsage, TextEmbeddingResult -from core.model_runtime.errors.validate import CredentialsValidateFailedError -from core.model_runtime.model_providers.__base.text_embedding_model import TextEmbeddingModel -from core.model_runtime.model_providers.openai_api_compatible._common import _CommonOaiApiCompat - - -class OAICompatEmbeddingModel(_CommonOaiApiCompat, TextEmbeddingModel): - """ - Model class for an OpenAI API-compatible text embedding model. - """ - - def _invoke( - self, - model: str, - credentials: dict, - texts: list[str], - user: Optional[str] = None, - input_type: EmbeddingInputType = EmbeddingInputType.DOCUMENT, - ) -> TextEmbeddingResult: - """ - Invoke text embedding model - - :param model: model name - :param credentials: model credentials - :param texts: texts to embed - :param user: unique user id - :param input_type: input type - :return: embeddings result - """ - - # Prepare headers and payload for the request - headers = {"Content-Type": "application/json"} - - api_key = credentials.get("api_key") - if api_key: - headers["Authorization"] = f"Bearer {api_key}" - - endpoint_url = credentials.get("endpoint_url") - if not endpoint_url.endswith("/"): - endpoint_url += "/" - - endpoint_url = urljoin(endpoint_url, "embeddings") - - extra_model_kwargs = {} - if user: - extra_model_kwargs["user"] = user - - extra_model_kwargs["encoding_format"] = "float" - - # get model properties - context_size = self._get_context_size(model, credentials) - max_chunks = self._get_max_chunks(model, credentials) - - inputs = [] - indices = [] - used_tokens = 0 - - for i, text in enumerate(texts): - # Here token count is only an approximation based on the GPT2 tokenizer - # TODO: Optimize for better token estimation and chunking - num_tokens = self._get_num_tokens_by_gpt2(text) - - if num_tokens >= context_size: - cutoff = int(np.floor(len(text) * (context_size / num_tokens))) - # if num tokens is larger than context length, only use the start - inputs.append(text[0:cutoff]) - else: - inputs.append(text) - indices += [i] - - batched_embeddings = [] - _iter = range(0, len(inputs), max_chunks) - - for i in _iter: - # Prepare the payload for the request - payload = {"input": inputs[i : i + max_chunks], "model": model, **extra_model_kwargs} - - # Make the request to the OpenAI API - response = requests.post(endpoint_url, headers=headers, data=json.dumps(payload), timeout=(10, 300)) - - response.raise_for_status() # Raise an exception for HTTP errors - response_data = response.json() - - # Extract embeddings and used tokens from the response - embeddings_batch = [data["embedding"] for data in response_data["data"]] - embedding_used_tokens = response_data["usage"]["total_tokens"] - - used_tokens += embedding_used_tokens - batched_embeddings += embeddings_batch - - # calc usage - usage = self._calc_response_usage(model=model, credentials=credentials, tokens=used_tokens) - - return TextEmbeddingResult(embeddings=batched_embeddings, usage=usage, model=model) - - def get_num_tokens(self, model: str, credentials: dict, texts: list[str]) -> int: - """ - Approximate number of tokens for given messages using GPT2 tokenizer - - :param model: model name - :param credentials: model credentials - :param texts: texts to embed - :return: - """ - return sum(self._get_num_tokens_by_gpt2(text) for text in texts) - - def validate_credentials(self, model: str, credentials: dict) -> None: - """ - Validate model credentials - - :param model: model name - :param credentials: model credentials - :return: - """ - try: - headers = {"Content-Type": "application/json"} - - api_key = credentials.get("api_key") - - if api_key: - headers["Authorization"] = f"Bearer {api_key}" - - endpoint_url = credentials.get("endpoint_url") - if not endpoint_url.endswith("/"): - endpoint_url += "/" - - endpoint_url = urljoin(endpoint_url, "embeddings") - - payload = {"input": "ping", "model": model} - - response = requests.post(url=endpoint_url, headers=headers, data=json.dumps(payload), timeout=(10, 300)) - - if response.status_code != 200: - raise CredentialsValidateFailedError( - f"Credentials validation failed with status code {response.status_code}" - ) - - try: - json_result = response.json() - except json.JSONDecodeError as e: - raise CredentialsValidateFailedError("Credentials validation failed: JSON decode error") - - if "model" not in json_result: - raise CredentialsValidateFailedError("Credentials validation failed: invalid response") - except CredentialsValidateFailedError: - raise - except Exception as ex: - raise CredentialsValidateFailedError(str(ex)) - - def get_customizable_model_schema(self, model: str, credentials: dict) -> AIModelEntity: - """ - generate custom model entities from credentials - """ - entity = AIModelEntity( - model=model, - label=I18nObject(en_US=model), - model_type=ModelType.TEXT_EMBEDDING, - fetch_from=FetchFrom.CUSTOMIZABLE_MODEL, - model_properties={ - ModelPropertyKey.CONTEXT_SIZE: int(credentials.get("context_size")), - ModelPropertyKey.MAX_CHUNKS: 1, - }, - parameter_rules=[], - pricing=PriceConfig( - input=Decimal(credentials.get("input_price", 0)), - unit=Decimal(credentials.get("unit", 0)), - currency=credentials.get("currency", "USD"), - ), - ) - - return entity - - def _calc_response_usage(self, model: str, credentials: dict, tokens: int) -> EmbeddingUsage: - """ - Calculate response usage - - :param model: model name - :param credentials: model credentials - :param tokens: input tokens - :return: usage - """ - # get input price info - input_price_info = self.get_price( - model=model, credentials=credentials, price_type=PriceType.INPUT, tokens=tokens - ) - - # transform usage - usage = EmbeddingUsage( - tokens=tokens, - total_tokens=tokens, - unit_price=input_price_info.unit_price, - price_unit=input_price_info.unit, - total_price=input_price_info.total_amount, - currency=input_price_info.currency, - latency=time.perf_counter() - self.started_at, - ) - - return usage diff --git a/api/core/model_runtime/model_providers/openllm/text_embedding/text_embedding.py b/api/core/model_runtime/model_providers/openllm/text_embedding/text_embedding.py deleted file mode 100644 index c5d43309127822..00000000000000 --- a/api/core/model_runtime/model_providers/openllm/text_embedding/text_embedding.py +++ /dev/null @@ -1,155 +0,0 @@ -import time -from json import dumps -from typing import Optional - -from requests import post -from requests.exceptions import ConnectionError, InvalidSchema, MissingSchema - -from core.embedding.embedding_constant import EmbeddingInputType -from core.model_runtime.entities.model_entities import PriceType -from core.model_runtime.entities.text_embedding_entities import EmbeddingUsage, TextEmbeddingResult -from core.model_runtime.errors.invoke import ( - InvokeAuthorizationError, - InvokeBadRequestError, - InvokeConnectionError, - InvokeError, - InvokeRateLimitError, - InvokeServerUnavailableError, -) -from core.model_runtime.errors.validate import CredentialsValidateFailedError -from core.model_runtime.model_providers.__base.text_embedding_model import TextEmbeddingModel - - -class OpenLLMTextEmbeddingModel(TextEmbeddingModel): - """ - Model class for OpenLLM text embedding model. - """ - - def _invoke( - self, - model: str, - credentials: dict, - texts: list[str], - user: Optional[str] = None, - input_type: EmbeddingInputType = EmbeddingInputType.DOCUMENT, - ) -> TextEmbeddingResult: - """ - Invoke text embedding model - - :param model: model name - :param credentials: model credentials - :param texts: texts to embed - :param user: unique user id - :param input_type: input type - :return: embeddings result - """ - server_url = credentials["server_url"] - if not server_url: - raise CredentialsValidateFailedError("server_url is required") - - headers = {"Content-Type": "application/json", "accept": "application/json"} - - url = f"{server_url}/v1/embeddings" - - data = texts - try: - response = post(url, headers=headers, data=dumps(data)) - except (ConnectionError, InvalidSchema, MissingSchema) as e: - # cloud not connect to the server - raise InvokeAuthorizationError(f"Invalid server URL: {e}") - except Exception as e: - raise InvokeConnectionError(str(e)) - - if response.status_code != 200: - if response.status_code == 400: - raise InvokeBadRequestError(response.text) - elif response.status_code == 404: - raise InvokeAuthorizationError(response.text) - elif response.status_code == 500: - raise InvokeServerUnavailableError(response.text) - - try: - resp = response.json()[0] - embeddings = resp["embeddings"] - total_tokens = resp["num_tokens"] - except KeyError as e: - raise InvokeServerUnavailableError(f"Failed to convert response to json: {e} with text: {response.text}") - - usage = self._calc_response_usage(model=model, credentials=credentials, tokens=total_tokens) - - result = TextEmbeddingResult(model=model, embeddings=embeddings, usage=usage) - - return result - - def get_num_tokens(self, model: str, credentials: dict, texts: list[str]) -> int: - """ - Get number of tokens for given prompt messages - - :param model: model name - :param credentials: model credentials - :param texts: texts to embed - :return: - """ - num_tokens = 0 - for text in texts: - # use GPT2Tokenizer to get num tokens - num_tokens += self._get_num_tokens_by_gpt2(text) - return num_tokens - - def validate_credentials(self, model: str, credentials: dict) -> None: - """ - Validate model credentials - - :param model: model name - :param credentials: model credentials - :return: - """ - try: - self._invoke(model=model, credentials=credentials, texts=["ping"]) - except InvokeAuthorizationError: - raise CredentialsValidateFailedError("Invalid server_url") - - @property - def _invoke_error_mapping(self) -> dict[type[InvokeError], list[type[Exception]]]: - """ - Map model invoke error to unified error - The key is the error type thrown to the caller - The value is the error type thrown by the model, - which needs to be converted into a unified error type for the caller. - - :return: Invoke error mapping - """ - return { - InvokeConnectionError: [InvokeConnectionError], - InvokeServerUnavailableError: [InvokeServerUnavailableError], - InvokeRateLimitError: [InvokeRateLimitError], - InvokeAuthorizationError: [InvokeAuthorizationError], - InvokeBadRequestError: [KeyError], - } - - def _calc_response_usage(self, model: str, credentials: dict, tokens: int) -> EmbeddingUsage: - """ - Calculate response usage - - :param model: model name - :param credentials: model credentials - :param tokens: input tokens - :return: usage - """ - # get input price info - input_price_info = self.get_price( - model=model, credentials=credentials, price_type=PriceType.INPUT, tokens=tokens - ) - - # transform usage - usage = EmbeddingUsage( - tokens=tokens, - total_tokens=tokens, - unit_price=input_price_info.unit_price, - price_unit=input_price_info.unit, - total_price=input_price_info.total_amount, - currency=input_price_info.currency, - latency=time.perf_counter() - self.started_at, - ) - - return usage diff --git a/api/core/model_runtime/model_providers/replicate/text_embedding/text_embedding.py b/api/core/model_runtime/model_providers/replicate/text_embedding/text_embedding.py deleted file mode 100644 index 9f724a77ac040b..00000000000000 --- a/api/core/model_runtime/model_providers/replicate/text_embedding/text_embedding.py +++ /dev/null @@ -1,152 +0,0 @@ -import json -import time -from typing import Optional - -from replicate import Client as ReplicateClient - -from core.embedding.embedding_constant import EmbeddingInputType -from core.model_runtime.entities.common_entities import I18nObject -from core.model_runtime.entities.model_entities import AIModelEntity, FetchFrom, ModelType, PriceType -from core.model_runtime.entities.text_embedding_entities import EmbeddingUsage, TextEmbeddingResult -from core.model_runtime.errors.validate import CredentialsValidateFailedError -from core.model_runtime.model_providers.__base.text_embedding_model import TextEmbeddingModel -from core.model_runtime.model_providers.replicate._common import _CommonReplicate - - -class ReplicateEmbeddingModel(_CommonReplicate, TextEmbeddingModel): - def _invoke( - self, - model: str, - credentials: dict, - texts: list[str], - user: Optional[str] = None, - input_type: EmbeddingInputType = EmbeddingInputType.DOCUMENT, - ) -> TextEmbeddingResult: - """ - Invoke text embedding model - - :param model: model name - :param credentials: model credentials - :param texts: texts to embed - :param user: unique user id - :param input_type: input type - :return: embeddings result - """ - client = ReplicateClient(api_token=credentials["replicate_api_token"], timeout=30) - - if "model_version" in credentials: - model_version = credentials["model_version"] - else: - model_info = client.models.get(model) - model_version = model_info.latest_version.id - - replicate_model_version = f"{model}:{model_version}" - - text_input_key = self._get_text_input_key(model, model_version, client) - - embeddings = self._generate_embeddings_by_text_input_key(client, replicate_model_version, text_input_key, texts) - - tokens = self.get_num_tokens(model, credentials, texts) - usage = self._calc_response_usage(model, credentials, tokens) - - return TextEmbeddingResult(model=model, embeddings=embeddings, usage=usage) - - def get_num_tokens(self, model: str, credentials: dict, texts: list[str]) -> int: - num_tokens = 0 - for text in texts: - num_tokens += self._get_num_tokens_by_gpt2(text) - return num_tokens - - def validate_credentials(self, model: str, credentials: dict) -> None: - if "replicate_api_token" not in credentials: - raise CredentialsValidateFailedError("Replicate Access Token must be provided.") - - try: - client = ReplicateClient(api_token=credentials["replicate_api_token"], timeout=30) - - if "model_version" in credentials: - model_version = credentials["model_version"] - else: - model_info = client.models.get(model) - model_version = model_info.latest_version.id - - replicate_model_version = f"{model}:{model_version}" - - text_input_key = self._get_text_input_key(model, model_version, client) - - self._generate_embeddings_by_text_input_key( - client, replicate_model_version, text_input_key, ["Hello worlds!"] - ) - except Exception as e: - raise CredentialsValidateFailedError(str(e)) - - def get_customizable_model_schema(self, model: str, credentials: dict) -> Optional[AIModelEntity]: - entity = AIModelEntity( - model=model, - label=I18nObject(en_US=model), - fetch_from=FetchFrom.CUSTOMIZABLE_MODEL, - model_type=ModelType.TEXT_EMBEDDING, - model_properties={"context_size": 4096, "max_chunks": 1}, - ) - return entity - - @staticmethod - def _get_text_input_key(model: str, model_version: str, client: ReplicateClient) -> str: - model_info = client.models.get(model) - model_info_version = model_info.versions.get(model_version) - - # sort through the openapi schema to get the name of text, texts or inputs - input_properties = sorted( - model_info_version.openapi_schema["components"]["schemas"]["Input"]["properties"].items(), - key=lambda item: item[1].get("x-order", 0), - ) - - for input_property in input_properties: - if input_property[0] in {"text", "texts", "inputs"}: - text_input_key = input_property[0] - return text_input_key - - return "" - - @staticmethod - def _generate_embeddings_by_text_input_key( - client: ReplicateClient, replicate_model_version: str, text_input_key: str, texts: list[str] - ) -> list[list[float]]: - if text_input_key in {"text", "inputs"}: - embeddings = [] - for text in texts: - result = client.run(replicate_model_version, input={text_input_key: text}) - embeddings.append(result[0].get("embedding")) - - return [list(map(float, e)) for e in embeddings] - elif "texts" == text_input_key: - result = client.run( - replicate_model_version, - input={ - "texts": json.dumps(texts), - "batch_size": 4, - "convert_to_numpy": False, - "normalize_embeddings": True, - }, - ) - return result - else: - raise ValueError(f"embeddings input key is invalid: {text_input_key}") - - def _calc_response_usage(self, model: str, credentials: dict, tokens: int) -> EmbeddingUsage: - input_price_info = self.get_price( - model=model, credentials=credentials, price_type=PriceType.INPUT, tokens=tokens - ) - - # transform usage - usage = EmbeddingUsage( - tokens=tokens, - total_tokens=tokens, - unit_price=input_price_info.unit_price, - price_unit=input_price_info.unit, - total_price=input_price_info.total_amount, - currency=input_price_info.currency, - latency=time.perf_counter() - self.started_at, - ) - - return usage diff --git a/api/core/model_runtime/model_providers/sagemaker/llm/llm.py b/api/core/model_runtime/model_providers/sagemaker/llm/llm.py deleted file mode 100644 index 97b76920443840..00000000000000 --- a/api/core/model_runtime/model_providers/sagemaker/llm/llm.py +++ /dev/null @@ -1,463 +0,0 @@ -import json -import logging -import re -from collections.abc import Generator, Iterator -from typing import Any, Optional, Union, cast - -# from openai.types.chat import ChatCompletion, ChatCompletionChunk -import boto3 -from sagemaker import Predictor, serializers -from sagemaker.session import Session - -from core.model_runtime.entities.llm_entities import LLMMode, LLMResult, LLMResultChunk, LLMResultChunkDelta -from core.model_runtime.entities.message_entities import ( - AssistantPromptMessage, - ImagePromptMessageContent, - PromptMessage, - PromptMessageContent, - PromptMessageContentType, - PromptMessageTool, - SystemPromptMessage, - ToolPromptMessage, - UserPromptMessage, -) -from core.model_runtime.entities.model_entities import ( - AIModelEntity, - FetchFrom, - I18nObject, - ModelFeature, - ModelPropertyKey, - ModelType, - ParameterRule, - ParameterType, -) -from core.model_runtime.errors.invoke import ( - InvokeAuthorizationError, - InvokeBadRequestError, - InvokeConnectionError, - InvokeError, - InvokeRateLimitError, - InvokeServerUnavailableError, -) -from core.model_runtime.errors.validate import CredentialsValidateFailedError -from core.model_runtime.model_providers.__base.large_language_model import LargeLanguageModel - -logger = logging.getLogger(__name__) - - -def inference(predictor, messages: list[dict[str, Any]], params: dict[str, Any], stop: list, stream=False): - """ - params: - predictor : Sagemaker Predictor - messages (List[Dict[str,Any]]): message list。 - messages = [ - {"role": "system", "content":"please answer in Chinese"}, - {"role": "user", "content": "who are you? what are you doing?"}, - ] - params (Dict[str,Any]): model parameters for LLM。 - stream (bool): False by default。 - - response: - result of inference if stream is False - Iterator of Chunks if stream is True - """ - payload = { - "model": params.get("model_name"), - "stop": stop, - "messages": messages, - "stream": stream, - "max_tokens": params.get("max_new_tokens", params.get("max_tokens", 2048)), - "temperature": params.get("temperature", 0.1), - "top_p": params.get("top_p", 0.9), - } - - if not stream: - response = predictor.predict(payload) - return response - else: - response_stream = predictor.predict_stream(payload) - return response_stream - - -class SageMakerLargeLanguageModel(LargeLanguageModel): - """ - Model class for Cohere large language model. - """ - - sagemaker_session: Any = None - predictor: Any = None - sagemaker_endpoint: str = None - - def _handle_chat_generate_response( - self, - model: str, - credentials: dict, - prompt_messages: list[PromptMessage], - tools: list[PromptMessageTool], - resp: bytes, - ) -> LLMResult: - """ - handle normal chat generate response - """ - resp_obj = json.loads(resp.decode("utf-8")) - resp_str = resp_obj.get("choices")[0].get("message").get("content") - - if len(resp_str) == 0: - raise InvokeServerUnavailableError("Empty response") - - assistant_prompt_message = AssistantPromptMessage(content=resp_str, tool_calls=[]) - - prompt_tokens = self._num_tokens_from_messages(messages=prompt_messages, tools=tools) - completion_tokens = self._num_tokens_from_messages(messages=[assistant_prompt_message], tools=tools) - - usage = self._calc_response_usage( - model=model, credentials=credentials, prompt_tokens=prompt_tokens, completion_tokens=completion_tokens - ) - - response = LLMResult( - model=model, - prompt_messages=prompt_messages, - system_fingerprint=None, - usage=usage, - message=assistant_prompt_message, - ) - - return response - - def _handle_chat_stream_response( - self, - model: str, - credentials: dict, - prompt_messages: list[PromptMessage], - tools: list[PromptMessageTool], - resp: Iterator[bytes], - ) -> Generator: - """ - handle stream chat generate response - """ - full_response = "" - buffer = "" - for chunk_bytes in resp: - buffer += chunk_bytes.decode("utf-8") - last_idx = 0 - for match in re.finditer(r"^data:\s*(.+?)(\n\n)", buffer): - try: - data = json.loads(match.group(1).strip()) - last_idx = match.span()[1] - - if "content" in data["choices"][0]["delta"]: - chunk_content = data["choices"][0]["delta"]["content"] - assistant_prompt_message = AssistantPromptMessage(content=chunk_content, tool_calls=[]) - - if data["choices"][0]["finish_reason"] is not None: - temp_assistant_prompt_message = AssistantPromptMessage(content=full_response, tool_calls=[]) - prompt_tokens = self._num_tokens_from_messages(messages=prompt_messages, tools=tools) - completion_tokens = self._num_tokens_from_messages( - messages=[temp_assistant_prompt_message], tools=[] - ) - usage = self._calc_response_usage( - model=model, - credentials=credentials, - prompt_tokens=prompt_tokens, - completion_tokens=completion_tokens, - ) - - yield LLMResultChunk( - model=model, - prompt_messages=prompt_messages, - system_fingerprint=None, - delta=LLMResultChunkDelta( - index=0, - message=assistant_prompt_message, - finish_reason=data["choices"][0]["finish_reason"], - usage=usage, - ), - ) - else: - yield LLMResultChunk( - model=model, - prompt_messages=prompt_messages, - system_fingerprint=None, - delta=LLMResultChunkDelta(index=0, message=assistant_prompt_message), - ) - - full_response += chunk_content - except (json.JSONDecodeError, KeyError, IndexError) as e: - logger.info("json parse exception, content: {}".format(match.group(1).strip())) - pass - - buffer = buffer[last_idx:] - - def _invoke( - self, - model: str, - credentials: dict, - prompt_messages: list[PromptMessage], - model_parameters: dict, - tools: Optional[list[PromptMessageTool]] = None, - stop: Optional[list[str]] = None, - stream: bool = True, - user: Optional[str] = None, - ) -> Union[LLMResult, Generator]: - """ - Invoke large language model - - :param model: model name - :param credentials: model credentials - :param prompt_messages: prompt messages - :param model_parameters: model parameters - :param tools: tools for tool calling - :param stop: stop words - :param stream: is stream response - :param user: unique user id - :return: full response or stream response chunk generator result - """ - if not self.sagemaker_session: - access_key = credentials.get("aws_access_key_id") - secret_key = credentials.get("aws_secret_access_key") - aws_region = credentials.get("aws_region") - boto_session = None - if aws_region: - if access_key and secret_key: - boto_session = boto3.Session( - aws_access_key_id=access_key, aws_secret_access_key=secret_key, region_name=aws_region - ) - else: - boto_session = boto3.Session(region_name=aws_region) - else: - boto_session = boto3.Session() - - sagemaker_client = boto_session.client("sagemaker") - self.sagemaker_session = Session(boto_session=boto_session, sagemaker_client=sagemaker_client) - - if self.sagemaker_endpoint != credentials.get("sagemaker_endpoint"): - self.sagemaker_endpoint = credentials.get("sagemaker_endpoint") - self.predictor = Predictor( - endpoint_name=self.sagemaker_endpoint, - sagemaker_session=self.sagemaker_session, - serializer=serializers.JSONSerializer(), - ) - - messages: list[dict[str, Any]] = [{"role": p.role.value, "content": p.content} for p in prompt_messages] - response = inference( - predictor=self.predictor, messages=messages, params=model_parameters, stop=stop, stream=stream - ) - - if stream: - if tools and len(tools) > 0: - raise InvokeBadRequestError(f"{model}'s tool calls does not support stream mode") - - return self._handle_chat_stream_response( - model=model, credentials=credentials, prompt_messages=prompt_messages, tools=tools, resp=response - ) - return self._handle_chat_generate_response( - model=model, credentials=credentials, prompt_messages=prompt_messages, tools=tools, resp=response - ) - - def _convert_prompt_message_to_dict(self, message: PromptMessage) -> dict: - """ - Convert PromptMessage to dict for OpenAI Compatibility API - """ - if isinstance(message, UserPromptMessage): - message = cast(UserPromptMessage, message) - if isinstance(message.content, str): - message_dict = {"role": "user", "content": message.content} - else: - sub_messages = [] - for message_content in message.content: - if message_content.type == PromptMessageContentType.TEXT: - message_content = cast(PromptMessageContent, message_content) - sub_message_dict = {"type": "text", "text": message_content.data} - sub_messages.append(sub_message_dict) - elif message_content.type == PromptMessageContentType.IMAGE: - message_content = cast(ImagePromptMessageContent, message_content) - sub_message_dict = { - "type": "image_url", - "image_url": {"url": message_content.data, "detail": message_content.detail.value}, - } - sub_messages.append(sub_message_dict) - message_dict = {"role": "user", "content": sub_messages} - elif isinstance(message, AssistantPromptMessage): - message = cast(AssistantPromptMessage, message) - message_dict = {"role": "assistant", "content": message.content} - if message.tool_calls and len(message.tool_calls) > 0: - message_dict["function_call"] = { - "name": message.tool_calls[0].function.name, - "arguments": message.tool_calls[0].function.arguments, - } - elif isinstance(message, SystemPromptMessage): - message = cast(SystemPromptMessage, message) - message_dict = {"role": "system", "content": message.content} - elif isinstance(message, ToolPromptMessage): - message = cast(ToolPromptMessage, message) - message_dict = {"tool_call_id": message.tool_call_id, "role": "tool", "content": message.content} - else: - raise ValueError(f"Unknown message type {type(message)}") - - return message_dict - - def _num_tokens_from_messages( - self, messages: list[PromptMessage], tools: list[PromptMessageTool], is_completion_model: bool = False - ) -> int: - def tokens(text: str): - return self._get_num_tokens_by_gpt2(text) - - if is_completion_model: - return sum(tokens(str(message.content)) for message in messages) - - tokens_per_message = 3 - tokens_per_name = 1 - - num_tokens = 0 - messages_dict = [self._convert_prompt_message_to_dict(m) for m in messages] - for message in messages_dict: - num_tokens += tokens_per_message - for key, value in message.items(): - if isinstance(value, list): - text = "" - for item in value: - if isinstance(item, dict) and item["type"] == "text": - text += item["text"] - - value = text - - if key == "tool_calls": - for tool_call in value: - for t_key, t_value in tool_call.items(): - num_tokens += tokens(t_key) - if t_key == "function": - for f_key, f_value in t_value.items(): - num_tokens += tokens(f_key) - num_tokens += tokens(f_value) - else: - num_tokens += tokens(t_key) - num_tokens += tokens(t_value) - if key == "function_call": - for t_key, t_value in value.items(): - num_tokens += tokens(t_key) - if t_key == "function": - for f_key, f_value in t_value.items(): - num_tokens += tokens(f_key) - num_tokens += tokens(f_value) - else: - num_tokens += tokens(t_key) - num_tokens += tokens(t_value) - else: - num_tokens += tokens(str(value)) - - if key == "name": - num_tokens += tokens_per_name - num_tokens += 3 - - if tools: - num_tokens += self._num_tokens_for_tools(tools) - - return num_tokens - - def get_num_tokens( - self, - model: str, - credentials: dict, - prompt_messages: list[PromptMessage], - tools: Optional[list[PromptMessageTool]] = None, - ) -> int: - """ - Get number of tokens for given prompt messages - - :param model: model name - :param credentials: model credentials - :param prompt_messages: prompt messages - :param tools: tools for tool calling - :return: - """ - # get model mode - try: - return self._num_tokens_from_messages(prompt_messages, tools) - except Exception as e: - raise self._transform_invoke_error(e) - - def validate_credentials(self, model: str, credentials: dict) -> None: - """ - Validate model credentials - - :param model: model name - :param credentials: model credentials - :return: - """ - try: - # get model mode - pass - except Exception as ex: - raise CredentialsValidateFailedError(str(ex)) - - @property - def _invoke_error_mapping(self) -> dict[type[InvokeError], list[type[Exception]]]: - """ - Map model invoke error to unified error - The key is the error type thrown to the caller - The value is the error type thrown by the model, - which needs to be converted into a unified error type for the caller. - - :return: Invoke error mapping - """ - return { - InvokeConnectionError: [InvokeConnectionError], - InvokeServerUnavailableError: [InvokeServerUnavailableError], - InvokeRateLimitError: [InvokeRateLimitError], - InvokeAuthorizationError: [InvokeAuthorizationError], - InvokeBadRequestError: [InvokeBadRequestError, KeyError, ValueError], - } - - def get_customizable_model_schema(self, model: str, credentials: dict) -> AIModelEntity | None: - """ - used to define customizable model schema - """ - rules = [ - ParameterRule( - name="temperature", - type=ParameterType.FLOAT, - use_template="temperature", - label=I18nObject(zh_Hans="温度", en_US="Temperature"), - ), - ParameterRule( - name="top_p", - type=ParameterType.FLOAT, - use_template="top_p", - label=I18nObject(zh_Hans="Top P", en_US="Top P"), - ), - ParameterRule( - name="max_tokens", - type=ParameterType.INT, - use_template="max_tokens", - min=1, - max=credentials.get("context_length", 2048), - default=512, - label=I18nObject(zh_Hans="最大生成长度", en_US="Max Tokens"), - ), - ] - - completion_type = LLMMode.value_of(credentials["mode"]).value - - features = [] - - support_function_call = credentials.get("support_function_call", False) - if support_function_call: - features.append(ModelFeature.TOOL_CALL) - - support_vision = credentials.get("support_vision", False) - if support_vision: - features.append(ModelFeature.VISION) - - context_length = credentials.get("context_length", 2048) - - entity = AIModelEntity( - model=model, - label=I18nObject(en_US=model), - fetch_from=FetchFrom.CUSTOMIZABLE_MODEL, - model_type=ModelType.LLM, - features=features, - model_properties={ModelPropertyKey.MODE: completion_type, ModelPropertyKey.CONTEXT_SIZE: context_length}, - parameter_rules=rules, - ) - - return entity diff --git a/api/core/model_runtime/model_providers/sagemaker/text_embedding/text_embedding.py b/api/core/model_runtime/model_providers/sagemaker/text_embedding/text_embedding.py deleted file mode 100644 index 8f993ce6722522..00000000000000 --- a/api/core/model_runtime/model_providers/sagemaker/text_embedding/text_embedding.py +++ /dev/null @@ -1,200 +0,0 @@ -import itertools -import json -import logging -import time -from typing import Any, Optional - -import boto3 - -from core.embedding.embedding_constant import EmbeddingInputType -from core.model_runtime.entities.common_entities import I18nObject -from core.model_runtime.entities.model_entities import AIModelEntity, FetchFrom, ModelPropertyKey, ModelType, PriceType -from core.model_runtime.entities.text_embedding_entities import EmbeddingUsage, TextEmbeddingResult -from core.model_runtime.errors.invoke import ( - InvokeAuthorizationError, - InvokeBadRequestError, - InvokeConnectionError, - InvokeError, - InvokeRateLimitError, - InvokeServerUnavailableError, -) -from core.model_runtime.errors.validate import CredentialsValidateFailedError -from core.model_runtime.model_providers.__base.text_embedding_model import TextEmbeddingModel - -BATCH_SIZE = 20 -CONTEXT_SIZE = 8192 - -logger = logging.getLogger(__name__) - - -def batch_generator(generator, batch_size): - while True: - batch = list(itertools.islice(generator, batch_size)) - if not batch: - break - yield batch - - -class SageMakerEmbeddingModel(TextEmbeddingModel): - """ - Model class for Cohere text embedding model. - """ - - sagemaker_client: Any = None - - def _sagemaker_embedding(self, sm_client, endpoint_name, content_list: list[str]): - response_model = sm_client.invoke_endpoint( - EndpointName=endpoint_name, - Body=json.dumps({"inputs": content_list, "parameters": {}, "is_query": False, "instruction": ""}), - ContentType="application/json", - ) - json_str = response_model["Body"].read().decode("utf8") - json_obj = json.loads(json_str) - embeddings = json_obj["embeddings"] - return embeddings - - def _invoke( - self, - model: str, - credentials: dict, - texts: list[str], - user: Optional[str] = None, - input_type: EmbeddingInputType = EmbeddingInputType.DOCUMENT, - ) -> TextEmbeddingResult: - """ - Invoke text embedding model - - :param model: model name - :param credentials: model credentials - :param texts: texts to embed - :param user: unique user id - :param input_type: input type - :return: embeddings result - """ - # get model properties - try: - line = 1 - if not self.sagemaker_client: - access_key = credentials.get("aws_access_key_id") - secret_key = credentials.get("aws_secret_access_key") - aws_region = credentials.get("aws_region") - if aws_region: - if access_key and secret_key: - self.sagemaker_client = boto3.client( - "sagemaker-runtime", - aws_access_key_id=access_key, - aws_secret_access_key=secret_key, - region_name=aws_region, - ) - else: - self.sagemaker_client = boto3.client("sagemaker-runtime", region_name=aws_region) - else: - self.sagemaker_client = boto3.client("sagemaker-runtime") - - line = 2 - sagemaker_endpoint = credentials.get("sagemaker_endpoint") - - line = 3 - truncated_texts = [item[:CONTEXT_SIZE] for item in texts] - - batches = batch_generator((text for text in truncated_texts), batch_size=BATCH_SIZE) - all_embeddings = [] - - line = 4 - for batch in batches: - embeddings = self._sagemaker_embedding(self.sagemaker_client, sagemaker_endpoint, batch) - all_embeddings.extend(embeddings) - - line = 5 - # calc usage - usage = self._calc_response_usage( - model=model, - credentials=credentials, - tokens=0, # It's not SAAS API, usage is meaningless - ) - line = 6 - - return TextEmbeddingResult(embeddings=all_embeddings, usage=usage, model=model) - - except Exception as e: - logger.exception(f"Exception {e}, line : {line}") - - def get_num_tokens(self, model: str, credentials: dict, texts: list[str]) -> int: - """ - Get number of tokens for given prompt messages - - :param model: model name - :param credentials: model credentials - :param texts: texts to embed - :return: - """ - return 0 - - def validate_credentials(self, model: str, credentials: dict) -> None: - """ - Validate model credentials - - :param model: model name - :param credentials: model credentials - :return: - """ - try: - print("validate_credentials ok....") - except Exception as ex: - raise CredentialsValidateFailedError(str(ex)) - - def _calc_response_usage(self, model: str, credentials: dict, tokens: int) -> EmbeddingUsage: - """ - Calculate response usage - - :param model: model name - :param credentials: model credentials - :param tokens: input tokens - :return: usage - """ - # get input price info - input_price_info = self.get_price( - model=model, credentials=credentials, price_type=PriceType.INPUT, tokens=tokens - ) - - # transform usage - usage = EmbeddingUsage( - tokens=tokens, - total_tokens=tokens, - unit_price=input_price_info.unit_price, - price_unit=input_price_info.unit, - total_price=input_price_info.total_amount, - currency=input_price_info.currency, - latency=time.perf_counter() - self.started_at, - ) - - return usage - - @property - def _invoke_error_mapping(self) -> dict[type[InvokeError], list[type[Exception]]]: - return { - InvokeConnectionError: [InvokeConnectionError], - InvokeServerUnavailableError: [InvokeServerUnavailableError], - InvokeRateLimitError: [InvokeRateLimitError], - InvokeAuthorizationError: [InvokeAuthorizationError], - InvokeBadRequestError: [KeyError], - } - - def get_customizable_model_schema(self, model: str, credentials: dict) -> AIModelEntity | None: - """ - used to define customizable model schema - """ - - entity = AIModelEntity( - model=model, - label=I18nObject(en_US=model), - fetch_from=FetchFrom.CUSTOMIZABLE_MODEL, - model_type=ModelType.TEXT_EMBEDDING, - model_properties={ - ModelPropertyKey.CONTEXT_SIZE: CONTEXT_SIZE, - ModelPropertyKey.MAX_CHUNKS: BATCH_SIZE, - }, - parameter_rules=[], - ) - - return entity diff --git a/api/core/model_runtime/model_providers/siliconflow/llm/_position.yaml b/api/core/model_runtime/model_providers/siliconflow/llm/_position.yaml deleted file mode 100644 index 8d1df82140b79f..00000000000000 --- a/api/core/model_runtime/model_providers/siliconflow/llm/_position.yaml +++ /dev/null @@ -1,28 +0,0 @@ -- Qwen/Qwen2.5-72B-Instruct -- Qwen/Qwen2.5-32B-Instruct -- Qwen/Qwen2.5-14B-Instruct -- Qwen/Qwen2.5-7B-Instruct -- Qwen/Qwen2.5-Coder-7B-Instruct -- Qwen/Qwen2.5-Math-72B-Instruct -- Qwen/Qwen2-72B-Instruct -- Qwen/Qwen2-57B-A14B-Instruct -- Qwen/Qwen2-7B-Instruct -- Qwen/Qwen2-1.5B-Instruct -- deepseek-ai/DeepSeek-V2.5 -- deepseek-ai/DeepSeek-V2-Chat -- deepseek-ai/DeepSeek-Coder-V2-Instruct -- THUDM/glm-4-9b-chat -- 01-ai/Yi-1.5-34B-Chat-16K -- 01-ai/Yi-1.5-9B-Chat-16K -- 01-ai/Yi-1.5-6B-Chat -- internlm/internlm2_5-20b-chat -- internlm/internlm2_5-7b-chat -- meta-llama/Meta-Llama-3.1-405B-Instruct -- meta-llama/Meta-Llama-3.1-70B-Instruct -- meta-llama/Meta-Llama-3.1-8B-Instruct -- meta-llama/Meta-Llama-3-70B-Instruct -- meta-llama/Meta-Llama-3-8B-Instruct -- google/gemma-2-27b-it -- google/gemma-2-9b-it -- mistralai/Mistral-7B-Instruct-v0.2 -- mistralai/Mixtral-8x7B-Instruct-v0.1 diff --git a/api/core/model_runtime/model_providers/siliconflow/llm/internlm2_5-20b-chat.yaml b/api/core/model_runtime/model_providers/siliconflow/llm/internlm2_5-20b-chat.yaml deleted file mode 100644 index d9663582e5ca26..00000000000000 --- a/api/core/model_runtime/model_providers/siliconflow/llm/internlm2_5-20b-chat.yaml +++ /dev/null @@ -1,30 +0,0 @@ -model: internlm/internlm2_5-20b-chat -label: - en_US: internlm/internlm2_5-20b-chat -model_type: llm -features: - - agent-thought -model_properties: - mode: chat - context_size: 32768 -parameter_rules: - - name: temperature - use_template: temperature - - name: max_tokens - use_template: max_tokens - type: int - default: 512 - min: 1 - max: 4096 - help: - zh_Hans: 指定生成结果长度的上限。如果生成结果截断,可以调大该参数。 - en_US: Specifies the upper limit on the length of generated results. If the generated results are truncated, you can increase this parameter. - - name: top_p - use_template: top_p - - name: frequency_penalty - use_template: frequency_penalty -pricing: - input: '1' - output: '1' - unit: '0.000001' - currency: RMB diff --git a/api/core/model_runtime/model_providers/siliconflow/llm/qwen2.5-coder-7b-instruct.yaml b/api/core/model_runtime/model_providers/siliconflow/llm/qwen2.5-coder-7b-instruct.yaml deleted file mode 100644 index 76526200ccdccc..00000000000000 --- a/api/core/model_runtime/model_providers/siliconflow/llm/qwen2.5-coder-7b-instruct.yaml +++ /dev/null @@ -1,74 +0,0 @@ -model: Qwen/Qwen2.5-Coder-7B-Instruct -label: - en_US: Qwen/Qwen2.5-Coder-7B-Instruct -model_type: llm -features: - - agent-thought -model_properties: - mode: chat - context_size: 131072 -parameter_rules: - - name: temperature - use_template: temperature - type: float - default: 0.3 - min: 0.0 - max: 2.0 - help: - zh_Hans: 用于控制随机性和多样性的程度。具体来说,temperature值控制了生成文本时对每个候选词的概率分布进行平滑的程度。较高的temperature值会降低概率分布的峰值,使得更多的低概率词被选择,生成结果更加多样化;而较低的temperature值则会增强概率分布的峰值,使得高概率词更容易被选择,生成结果更加确定。 - en_US: Used to control the degree of randomness and diversity. Specifically, the temperature value controls the degree to which the probability distribution of each candidate word is smoothed when generating text. A higher temperature value will reduce the peak value of the probability distribution, allowing more low-probability words to be selected, and the generated results will be more diverse; while a lower temperature value will enhance the peak value of the probability distribution, making it easier for high-probability words to be selected. , the generated results are more certain. - - name: max_tokens - use_template: max_tokens - type: int - default: 8192 - min: 1 - max: 8192 - help: - zh_Hans: 用于指定模型在生成内容时token的最大数量,它定义了生成的上限,但不保证每次都会生成到这个数量。 - en_US: It is used to specify the maximum number of tokens when the model generates content. It defines the upper limit of generation, but does not guarantee that this number will be generated every time. - - name: top_p - use_template: top_p - type: float - default: 0.8 - min: 0.1 - max: 0.9 - help: - zh_Hans: 生成过程中核采样方法概率阈值,例如,取值为0.8时,仅保留概率加起来大于等于0.8的最可能token的最小集合作为候选集。取值范围为(0,1.0),取值越大,生成的随机性越高;取值越低,生成的确定性越高。 - en_US: The probability threshold of the kernel sampling method during the generation process. For example, when the value is 0.8, only the smallest set of the most likely tokens with a sum of probabilities greater than or equal to 0.8 is retained as the candidate set. The value range is (0,1.0). The larger the value, the higher the randomness generated; the lower the value, the higher the certainty generated. - - name: top_k - type: int - min: 0 - max: 99 - label: - zh_Hans: 取样数量 - en_US: Top k - help: - zh_Hans: 生成时,采样候选集的大小。例如,取值为50时,仅将单次生成中得分最高的50个token组成随机采样的候选集。取值越大,生成的随机性越高;取值越小,生成的确定性越高。 - en_US: The size of the sample candidate set when generated. For example, when the value is 50, only the 50 highest-scoring tokens in a single generation form a randomly sampled candidate set. The larger the value, the higher the randomness generated; the smaller the value, the higher the certainty generated. - - name: seed - required: false - type: int - default: 1234 - label: - zh_Hans: 随机种子 - en_US: Random seed - help: - zh_Hans: 生成时使用的随机数种子,用户控制模型生成内容的随机性。支持无符号64位整数,默认值为 1234。在使用seed时,模型将尽可能生成相同或相似的结果,但目前不保证每次生成的结果完全相同。 - en_US: The random number seed used when generating, the user controls the randomness of the content generated by the model. Supports unsigned 64-bit integers, default value is 1234. When using seed, the model will try its best to generate the same or similar results, but there is currently no guarantee that the results will be exactly the same every time. - - name: repetition_penalty - required: false - type: float - default: 1.1 - label: - zh_Hans: 重复惩罚 - en_US: Repetition penalty - help: - zh_Hans: 用于控制模型生成时的重复度。提高repetition_penalty时可以降低模型生成的重复度。1.0表示不做惩罚。 - en_US: Used to control the repeatability when generating models. Increasing repetition_penalty can reduce the duplication of model generation. 1.0 means no punishment. - - name: response_format - use_template: response_format -pricing: - input: '0' - output: '0' - unit: '0.000001' - currency: RMB diff --git a/api/core/model_runtime/model_providers/siliconflow/llm/qwen2.5-math-72b-instruct.yaml b/api/core/model_runtime/model_providers/siliconflow/llm/qwen2.5-math-72b-instruct.yaml deleted file mode 100644 index 90afa0cfd5b96a..00000000000000 --- a/api/core/model_runtime/model_providers/siliconflow/llm/qwen2.5-math-72b-instruct.yaml +++ /dev/null @@ -1,74 +0,0 @@ -model: Qwen/Qwen2.5-Math-72B-Instruct -label: - en_US: Qwen/Qwen2.5-Math-72B-Instruct -model_type: llm -features: - - agent-thought -model_properties: - mode: chat - context_size: 4096 -parameter_rules: - - name: temperature - use_template: temperature - type: float - default: 0.3 - min: 0.0 - max: 2.0 - help: - zh_Hans: 用于控制随机性和多样性的程度。具体来说,temperature值控制了生成文本时对每个候选词的概率分布进行平滑的程度。较高的temperature值会降低概率分布的峰值,使得更多的低概率词被选择,生成结果更加多样化;而较低的temperature值则会增强概率分布的峰值,使得高概率词更容易被选择,生成结果更加确定。 - en_US: Used to control the degree of randomness and diversity. Specifically, the temperature value controls the degree to which the probability distribution of each candidate word is smoothed when generating text. A higher temperature value will reduce the peak value of the probability distribution, allowing more low-probability words to be selected, and the generated results will be more diverse; while a lower temperature value will enhance the peak value of the probability distribution, making it easier for high-probability words to be selected. , the generated results are more certain. - - name: max_tokens - use_template: max_tokens - type: int - default: 2000 - min: 1 - max: 2000 - help: - zh_Hans: 用于指定模型在生成内容时token的最大数量,它定义了生成的上限,但不保证每次都会生成到这个数量。 - en_US: It is used to specify the maximum number of tokens when the model generates content. It defines the upper limit of generation, but does not guarantee that this number will be generated every time. - - name: top_p - use_template: top_p - type: float - default: 0.8 - min: 0.1 - max: 0.9 - help: - zh_Hans: 生成过程中核采样方法概率阈值,例如,取值为0.8时,仅保留概率加起来大于等于0.8的最可能token的最小集合作为候选集。取值范围为(0,1.0),取值越大,生成的随机性越高;取值越低,生成的确定性越高。 - en_US: The probability threshold of the kernel sampling method during the generation process. For example, when the value is 0.8, only the smallest set of the most likely tokens with a sum of probabilities greater than or equal to 0.8 is retained as the candidate set. The value range is (0,1.0). The larger the value, the higher the randomness generated; the lower the value, the higher the certainty generated. - - name: top_k - type: int - min: 0 - max: 99 - label: - zh_Hans: 取样数量 - en_US: Top k - help: - zh_Hans: 生成时,采样候选集的大小。例如,取值为50时,仅将单次生成中得分最高的50个token组成随机采样的候选集。取值越大,生成的随机性越高;取值越小,生成的确定性越高。 - en_US: The size of the sample candidate set when generated. For example, when the value is 50, only the 50 highest-scoring tokens in a single generation form a randomly sampled candidate set. The larger the value, the higher the randomness generated; the smaller the value, the higher the certainty generated. - - name: seed - required: false - type: int - default: 1234 - label: - zh_Hans: 随机种子 - en_US: Random seed - help: - zh_Hans: 生成时使用的随机数种子,用户控制模型生成内容的随机性。支持无符号64位整数,默认值为 1234。在使用seed时,模型将尽可能生成相同或相似的结果,但目前不保证每次生成的结果完全相同。 - en_US: The random number seed used when generating, the user controls the randomness of the content generated by the model. Supports unsigned 64-bit integers, default value is 1234. When using seed, the model will try its best to generate the same or similar results, but there is currently no guarantee that the results will be exactly the same every time. - - name: repetition_penalty - required: false - type: float - default: 1.1 - label: - zh_Hans: 重复惩罚 - en_US: Repetition penalty - help: - zh_Hans: 用于控制模型生成时的重复度。提高repetition_penalty时可以降低模型生成的重复度。1.0表示不做惩罚。 - en_US: Used to control the repeatability when generating models. Increasing repetition_penalty can reduce the duplication of model generation. 1.0 means no punishment. - - name: response_format - use_template: response_format -pricing: - input: '4.13' - output: '4.13' - unit: '0.000001' - currency: RMB diff --git a/api/core/model_runtime/model_providers/siliconflow/text_embedding/text_embedding.py b/api/core/model_runtime/model_providers/siliconflow/text_embedding/text_embedding.py deleted file mode 100644 index c5dcc126107aa2..00000000000000 --- a/api/core/model_runtime/model_providers/siliconflow/text_embedding/text_embedding.py +++ /dev/null @@ -1,46 +0,0 @@ -from typing import Optional - -from core.embedding.embedding_constant import EmbeddingInputType -from core.model_runtime.entities.text_embedding_entities import TextEmbeddingResult -from core.model_runtime.model_providers.openai_api_compatible.text_embedding.text_embedding import ( - OAICompatEmbeddingModel, -) - - -class SiliconflowTextEmbeddingModel(OAICompatEmbeddingModel): - """ - Model class for Siliconflow text embedding model. - """ - - def validate_credentials(self, model: str, credentials: dict) -> None: - self._add_custom_parameters(credentials) - super().validate_credentials(model, credentials) - - def _invoke( - self, - model: str, - credentials: dict, - texts: list[str], - user: Optional[str] = None, - input_type: EmbeddingInputType = EmbeddingInputType.DOCUMENT, - ) -> TextEmbeddingResult: - """ - Invoke text embedding model - - :param model: model name - :param credentials: model credentials - :param texts: texts to embed - :param user: unique user id - :param input_type: input type - :return: embeddings result - """ - self._add_custom_parameters(credentials) - return super()._invoke(model, credentials, texts, user) - - def get_num_tokens(self, model: str, credentials: dict, texts: list[str]) -> int: - self._add_custom_parameters(credentials) - return super().get_num_tokens(model, credentials, texts) - - @classmethod - def _add_custom_parameters(cls, credentials: dict) -> None: - credentials["endpoint_url"] = "https://api.siliconflow.cn/v1" diff --git a/api/core/model_runtime/model_providers/spark/llm/llm.py b/api/core/model_runtime/model_providers/spark/llm/llm.py deleted file mode 100644 index 1181ba699af886..00000000000000 --- a/api/core/model_runtime/model_providers/spark/llm/llm.py +++ /dev/null @@ -1,309 +0,0 @@ -import threading -from collections.abc import Generator -from typing import Optional, Union - -from core.model_runtime.entities.llm_entities import LLMResult, LLMResultChunk, LLMResultChunkDelta -from core.model_runtime.entities.message_entities import ( - AssistantPromptMessage, - PromptMessage, - PromptMessageTool, - SystemPromptMessage, - UserPromptMessage, -) -from core.model_runtime.errors.invoke import ( - InvokeAuthorizationError, - InvokeBadRequestError, - InvokeConnectionError, - InvokeError, - InvokeRateLimitError, - InvokeServerUnavailableError, -) -from core.model_runtime.errors.validate import CredentialsValidateFailedError -from core.model_runtime.model_providers.__base.large_language_model import LargeLanguageModel - -from ._client import SparkLLMClient - - -class SparkLargeLanguageModel(LargeLanguageModel): - def _invoke( - self, - model: str, - credentials: dict, - prompt_messages: list[PromptMessage], - model_parameters: dict, - tools: Optional[list[PromptMessageTool]] = None, - stop: Optional[list[str]] = None, - stream: bool = True, - user: Optional[str] = None, - ) -> Union[LLMResult, Generator]: - """ - Invoke large language model - - :param model: model name - :param credentials: model credentials - :param prompt_messages: prompt messages - :param model_parameters: model parameters - :param tools: tools for tool calling - :param stop: stop words - :param stream: is stream response - :param user: unique user id - :return: full response or stream response chunk generator result - """ - # invoke model - return self._generate(model, credentials, prompt_messages, model_parameters, stop, stream, user) - - def get_num_tokens( - self, - model: str, - credentials: dict, - prompt_messages: list[PromptMessage], - tools: Optional[list[PromptMessageTool]] = None, - ) -> int: - """ - Get number of tokens for given prompt messages - - :param model: model name - :param credentials: model credentials - :param prompt_messages: prompt messages - :param tools: tools for tool calling - :return: - """ - prompt = self._convert_messages_to_prompt(prompt_messages) - - return self._get_num_tokens_by_gpt2(prompt) - - def validate_credentials(self, model: str, credentials: dict) -> None: - """ - Validate model credentials - - :param model: model name - :param credentials: model credentials - :return: - """ - try: - self._generate( - model=model, - credentials=credentials, - prompt_messages=[ - UserPromptMessage(content="ping"), - ], - model_parameters={ - "temperature": 0.5, - }, - stream=False, - ) - except Exception as ex: - raise CredentialsValidateFailedError(str(ex)) - - def _generate( - self, - model: str, - credentials: dict, - prompt_messages: list[PromptMessage], - model_parameters: dict, - stop: Optional[list[str]] = None, - stream: bool = True, - user: Optional[str] = None, - ) -> Union[LLMResult, Generator]: - """ - Invoke large language model - - :param model: model name - :param credentials: credentials - :param prompt_messages: prompt messages - :param model_parameters: model parameters - :param stop: stop words - :param stream: is stream response - :param user: unique user id - :return: full response or stream response chunk generator result - """ - extra_model_kwargs = {} - if stop: - extra_model_kwargs["stop_sequences"] = stop - - # transform credentials to kwargs for model instance - credentials_kwargs = self._to_credential_kwargs(credentials) - - client = SparkLLMClient( - model=model, - **credentials_kwargs, - ) - - thread = threading.Thread( - target=client.run, - args=( - [ - {"role": prompt_message.role.value, "content": prompt_message.content} - for prompt_message in prompt_messages - ], - user, - model_parameters, - stream, - ), - ) - thread.start() - - if stream: - return self._handle_generate_stream_response(thread, model, credentials, client, prompt_messages) - - return self._handle_generate_response(thread, model, credentials, client, prompt_messages) - - def _handle_generate_response( - self, - thread: threading.Thread, - model: str, - credentials: dict, - client: SparkLLMClient, - prompt_messages: list[PromptMessage], - ) -> LLMResult: - """ - Handle llm response - - :param model: model name - :param response: response - :param prompt_messages: prompt messages - :return: llm response - """ - completion = "" - - for content in client.subscribe(): - if isinstance(content, dict): - delta = content["data"] - else: - delta = content - - completion += delta - - thread.join() - # transform assistant message to prompt message - assistant_prompt_message = AssistantPromptMessage(content=completion) - - # calculate num tokens - prompt_tokens = self.get_num_tokens(model, credentials, prompt_messages) - completion_tokens = self.get_num_tokens(model, credentials, [assistant_prompt_message]) - - # transform usage - usage = self._calc_response_usage(model, credentials, prompt_tokens, completion_tokens) - - # transform response - result = LLMResult( - model=model, - prompt_messages=prompt_messages, - message=assistant_prompt_message, - usage=usage, - ) - - return result - - def _handle_generate_stream_response( - self, - thread: threading.Thread, - model: str, - credentials: dict, - client: SparkLLMClient, - prompt_messages: list[PromptMessage], - ) -> Generator: - """ - Handle llm stream response - - :param thread: thread - :param model: model name - :param credentials: credentials - :param response: response - :param prompt_messages: prompt messages - :return: llm response chunk generator result - """ - completion = "" - for index, content in enumerate(client.subscribe()): - if isinstance(content, dict): - delta = content["data"] - else: - delta = content - completion += delta - assistant_prompt_message = AssistantPromptMessage( - content=delta or "", - ) - temp_assistant_prompt_message = AssistantPromptMessage( - content=completion, - ) - prompt_tokens = self.get_num_tokens(model, credentials, prompt_messages) - completion_tokens = self.get_num_tokens(model, credentials, [temp_assistant_prompt_message]) - - # transform usage - usage = self._calc_response_usage(model, credentials, prompt_tokens, completion_tokens) - yield LLMResultChunk( - model=model, - prompt_messages=prompt_messages, - delta=LLMResultChunkDelta(index=index, message=assistant_prompt_message, usage=usage), - ) - - thread.join() - - def _to_credential_kwargs(self, credentials: dict) -> dict: - """ - Transform credentials to kwargs for model instance - - :param credentials: - :return: - """ - credentials_kwargs = { - "app_id": credentials["app_id"], - "api_secret": credentials["api_secret"], - "api_key": credentials["api_key"], - } - - return credentials_kwargs - - def _convert_one_message_to_text(self, message: PromptMessage) -> str: - """ - Convert a single message to a string. - - :param message: PromptMessage to convert. - :return: String representation of the message. - """ - human_prompt = "\n\nHuman:" - ai_prompt = "\n\nAssistant:" - content = message.content - - if isinstance(message, UserPromptMessage): - message_text = f"{human_prompt} {content}" - elif isinstance(message, AssistantPromptMessage): - message_text = f"{ai_prompt} {content}" - elif isinstance(message, SystemPromptMessage): - message_text = content - else: - raise ValueError(f"Got unknown type {message}") - - return message_text - - def _convert_messages_to_prompt(self, messages: list[PromptMessage]) -> str: - """ - Format a list of messages into a full prompt for the Anthropic model - - :param messages: List of PromptMessage to combine. - :return: Combined string with necessary human_prompt and ai_prompt tags. - """ - messages = messages.copy() # don't mutate the original list - - text = "".join(self._convert_one_message_to_text(message) for message in messages) - - # trim off the trailing ' ' that might come from the "Assistant: " - return text.rstrip() - - @property - def _invoke_error_mapping(self) -> dict[type[InvokeError], list[type[Exception]]]: - """ - Map model invoke error to unified error - The key is the error type thrown to the caller - The value is the error type thrown by the model, - which needs to be converted into a unified error type for the caller. - - :return: Invoke error mapping - """ - return { - InvokeConnectionError: [], - InvokeServerUnavailableError: [], - InvokeRateLimitError: [], - InvokeAuthorizationError: [], - InvokeBadRequestError: [], - } diff --git a/api/core/model_runtime/model_providers/tongyi/text_embedding/text_embedding.py b/api/core/model_runtime/model_providers/tongyi/text_embedding/text_embedding.py deleted file mode 100644 index 736cd44df8888f..00000000000000 --- a/api/core/model_runtime/model_providers/tongyi/text_embedding/text_embedding.py +++ /dev/null @@ -1,177 +0,0 @@ -import time -from typing import Optional - -import dashscope -import numpy as np - -from core.embedding.embedding_constant import EmbeddingInputType -from core.model_runtime.entities.model_entities import PriceType -from core.model_runtime.entities.text_embedding_entities import ( - EmbeddingUsage, - TextEmbeddingResult, -) -from core.model_runtime.errors.validate import CredentialsValidateFailedError -from core.model_runtime.model_providers.__base.text_embedding_model import ( - TextEmbeddingModel, -) -from core.model_runtime.model_providers.tongyi._common import _CommonTongyi - - -class TongyiTextEmbeddingModel(_CommonTongyi, TextEmbeddingModel): - """ - Model class for Tongyi text embedding model. - """ - - def _invoke( - self, - model: str, - credentials: dict, - texts: list[str], - user: Optional[str] = None, - input_type: EmbeddingInputType = EmbeddingInputType.DOCUMENT, - ) -> TextEmbeddingResult: - """ - Invoke text embedding model - - :param model: model name - :param credentials: model credentials - :param texts: texts to embed - :param user: unique user id - :param input_type: input type - :return: embeddings result - """ - credentials_kwargs = self._to_credential_kwargs(credentials) - - context_size = self._get_context_size(model, credentials) - max_chunks = self._get_max_chunks(model, credentials) - inputs = [] - indices = [] - used_tokens = 0 - - for i, text in enumerate(texts): - # Here token count is only an approximation based on the GPT2 tokenizer - num_tokens = self._get_num_tokens_by_gpt2(text) - - if num_tokens >= context_size: - cutoff = int(np.floor(len(text) * (context_size / num_tokens))) - # if num tokens is larger than context length, only use the start - inputs.append(text[0:cutoff]) - else: - inputs.append(text) - indices += [i] - - batched_embeddings = [] - _iter = range(0, len(inputs), max_chunks) - - for i in _iter: - embeddings_batch, embedding_used_tokens = self.embed_documents( - credentials_kwargs=credentials_kwargs, - model=model, - texts=inputs[i : i + max_chunks], - ) - used_tokens += embedding_used_tokens - batched_embeddings += embeddings_batch - - # calc usage - usage = self._calc_response_usage(model=model, credentials=credentials, tokens=used_tokens) - return TextEmbeddingResult(embeddings=batched_embeddings, usage=usage, model=model) - - def get_num_tokens(self, model: str, credentials: dict, texts: list[str]) -> int: - """ - Get number of tokens for given prompt messages - - :param model: model name - :param credentials: model credentials - :param texts: texts to embed - :return: - """ - if len(texts) == 0: - return 0 - total_num_tokens = 0 - for text in texts: - total_num_tokens += self._get_num_tokens_by_gpt2(text) - - return total_num_tokens - - def validate_credentials(self, model: str, credentials: dict) -> None: - """ - Validate model credentials - - :param model: model name - :param credentials: model credentials - :return: - """ - try: - # transform credentials to kwargs for model instance - credentials_kwargs = self._to_credential_kwargs(credentials) - - # call embedding model - self.embed_documents(credentials_kwargs=credentials_kwargs, model=model, texts=["ping"]) - except Exception as ex: - raise CredentialsValidateFailedError(str(ex)) - - @staticmethod - def embed_documents(credentials_kwargs: dict, model: str, texts: list[str]) -> tuple[list[list[float]], int]: - """Call out to Tongyi's embedding endpoint. - - Args: - credentials_kwargs: The credentials to use for the call. - model: The model to use for embedding. - texts: The list of texts to embed. - - Returns: - List of embeddings, one for each text, and tokens usage. - """ - embeddings = [] - embedding_used_tokens = 0 - for text in texts: - response = dashscope.TextEmbedding.call( - api_key=credentials_kwargs["dashscope_api_key"], - model=model, - input=text, - text_type="document", - ) - if response.output and "embeddings" in response.output and response.output["embeddings"]: - data = response.output["embeddings"][0] - if "embedding" in data: - embeddings.append(data["embedding"]) - else: - raise ValueError("Embedding data is missing in the response.") - else: - raise ValueError("Response output is missing or does not contain embeddings.") - - if response.usage and "total_tokens" in response.usage: - embedding_used_tokens += response.usage["total_tokens"] - else: - raise ValueError("Response usage is missing or does not contain total tokens.") - - return [list(map(float, e)) for e in embeddings], embedding_used_tokens - - def _calc_response_usage(self, model: str, credentials: dict, tokens: int) -> EmbeddingUsage: - """ - Calculate response usage - - :param model: model name - :param tokens: input tokens - :return: usage - """ - # get input price info - input_price_info = self.get_price( - model=model, - credentials=credentials, - price_type=PriceType.INPUT, - tokens=tokens, - ) - - # transform usage - usage = EmbeddingUsage( - tokens=tokens, - total_tokens=tokens, - unit_price=input_price_info.unit_price, - price_unit=input_price_info.unit, - total_price=input_price_info.total_amount, - currency=input_price_info.currency, - latency=time.perf_counter() - self.started_at, - ) - - return usage diff --git a/api/core/model_runtime/model_providers/upstage/text_embedding/text_embedding.py b/api/core/model_runtime/model_providers/upstage/text_embedding/text_embedding.py deleted file mode 100644 index b6509cd26cfa28..00000000000000 --- a/api/core/model_runtime/model_providers/upstage/text_embedding/text_embedding.py +++ /dev/null @@ -1,197 +0,0 @@ -import base64 -import time -from collections.abc import Mapping -from typing import Union - -import numpy as np -from openai import OpenAI -from tokenizers import Tokenizer - -from core.embedding.embedding_constant import EmbeddingInputType -from core.model_runtime.entities.model_entities import PriceType -from core.model_runtime.entities.text_embedding_entities import EmbeddingUsage, TextEmbeddingResult -from core.model_runtime.errors.validate import CredentialsValidateFailedError -from core.model_runtime.model_providers.__base.text_embedding_model import TextEmbeddingModel -from core.model_runtime.model_providers.upstage._common import _CommonUpstage - - -class UpstageTextEmbeddingModel(_CommonUpstage, TextEmbeddingModel): - """ - Model class for Upstage text embedding model. - """ - - def _get_tokenizer(self) -> Tokenizer: - return Tokenizer.from_pretrained("upstage/solar-1-mini-tokenizer") - - def _invoke( - self, - model: str, - credentials: dict, - texts: list[str], - user: str | None = None, - input_type: EmbeddingInputType = EmbeddingInputType.DOCUMENT, - ) -> TextEmbeddingResult: - """ - Invoke text embedding model - - :param model: model name - :param credentials: model credentials - :param texts: texts to embed - :param user: unique user id - :param input_type: input type - :return: embeddings result - """ - - credentials_kwargs = self._to_credential_kwargs(credentials) - client = OpenAI(**credentials_kwargs) - - extra_model_kwargs = {} - if user: - extra_model_kwargs["user"] = user - extra_model_kwargs["encoding_format"] = "base64" - - context_size = self._get_context_size(model, credentials) - max_chunks = self._get_max_chunks(model, credentials) - - embeddings: list[list[float]] = [[] for _ in range(len(texts))] - tokens = [] - indices = [] - used_tokens = 0 - - tokenizer = self._get_tokenizer() - - for i, text in enumerate(texts): - token = tokenizer.encode(text, add_special_tokens=False).tokens - for j in range(0, len(token), context_size): - tokens += [token[j : j + context_size]] - indices += [i] - - batched_embeddings = [] - _iter = range(0, len(tokens), max_chunks) - - for i in _iter: - embeddings_batch, embedding_used_tokens = self._embedding_invoke( - model=model, - client=client, - texts=tokens[i : i + max_chunks], - extra_model_kwargs=extra_model_kwargs, - ) - - used_tokens += embedding_used_tokens - batched_embeddings += embeddings_batch - - results: list[list[list[float]]] = [[] for _ in range(len(texts))] - num_tokens_in_batch: list[list[int]] = [[] for _ in range(len(texts))] - - for i in range(len(indices)): - results[indices[i]].append(batched_embeddings[i]) - num_tokens_in_batch[indices[i]].append(len(tokens[i])) - - for i in range(len(texts)): - _result = results[i] - if len(_result) == 0: - embeddings_batch, embedding_used_tokens = self._embedding_invoke( - model=model, - client=client, - texts=[texts[i]], - extra_model_kwargs=extra_model_kwargs, - ) - used_tokens += embedding_used_tokens - average = embeddings_batch[0] - else: - average = np.average(_result, axis=0, weights=num_tokens_in_batch[i]) - embeddings[i] = (average / np.linalg.norm(average)).tolist() - - usage = self._calc_response_usage(model=model, credentials=credentials, tokens=used_tokens) - - return TextEmbeddingResult(embeddings=embeddings, usage=usage, model=model) - - def get_num_tokens(self, model: str, credentials: dict, texts: list[str]) -> int: - tokenizer = self._get_tokenizer() - """ - Get number of tokens for given prompt messages - - :param model: model name - :param credentials: model credentials - :param texts: texts to embed - :return: - """ - if len(texts) == 0: - return 0 - - tokenizer = self._get_tokenizer() - - total_num_tokens = 0 - for text in texts: - # calculate the number of tokens in the encoded text - tokenized_text = tokenizer.encode(text) - total_num_tokens += len(tokenized_text) - - return total_num_tokens - - def validate_credentials(self, model: str, credentials: Mapping) -> None: - """ - Validate model credentials - - :param model: model name - :param credentials: model credentials - :return: - """ - try: - # transform credentials to kwargs for model instance - credentials_kwargs = self._to_credential_kwargs(credentials) - client = OpenAI(**credentials_kwargs) - - # call embedding model - self._embedding_invoke(model=model, client=client, texts=["ping"], extra_model_kwargs={}) - except Exception as ex: - raise CredentialsValidateFailedError(str(ex)) - - def _embedding_invoke( - self, model: str, client: OpenAI, texts: Union[list[str], str], extra_model_kwargs: dict - ) -> tuple[list[list[float]], int]: - """ - Invoke embedding model - :param model: model name - :param client: model client - :param texts: texts to embed - :param extra_model_kwargs: extra model kwargs - :return: embeddings and used tokens - """ - response = client.embeddings.create(model=model, input=texts, **extra_model_kwargs) - - if "encoding_format" in extra_model_kwargs and extra_model_kwargs["encoding_format"] == "base64": - return ( - [ - list(np.frombuffer(base64.b64decode(embedding.embedding), dtype=np.float32)) - for embedding in response.data - ], - response.usage.total_tokens, - ) - - return [data.embedding for data in response.data], response.usage.total_tokens - - def _calc_response_usage(self, model: str, credentials: dict, tokens: int) -> EmbeddingUsage: - """ - Calculate response usage - - :param model: model name - :param credentials: model credentials - :param tokens: input tokens - :return: usage - """ - input_price_info = self.get_price( - model=model, credentials=credentials, tokens=tokens, price_type=PriceType.INPUT - ) - - usage = EmbeddingUsage( - tokens=tokens, - total_tokens=tokens, - unit_price=input_price_info.unit_price, - price_unit=input_price_info.unit, - total_price=input_price_info.total_amount, - currency=input_price_info.currency, - latency=time.perf_counter() - self.started_at, - ) - - return usage diff --git a/api/core/model_runtime/model_providers/vertex_ai/llm/gemini-1.5-flash-001.yaml b/api/core/model_runtime/model_providers/vertex_ai/llm/gemini-1.5-flash-001.yaml deleted file mode 100644 index f5386be06da6be..00000000000000 --- a/api/core/model_runtime/model_providers/vertex_ai/llm/gemini-1.5-flash-001.yaml +++ /dev/null @@ -1,37 +0,0 @@ -model: gemini-1.5-flash-001 -label: - en_US: Gemini 1.5 Flash 001 -model_type: llm -features: - - agent-thought - - vision -model_properties: - mode: chat - context_size: 1048576 -parameter_rules: - - name: temperature - use_template: temperature - - name: top_p - use_template: top_p - - name: top_k - label: - en_US: Top k - type: int - help: - en_US: Only sample from the top K options for each subsequent token. - required: false - - name: presence_penalty - use_template: presence_penalty - - name: frequency_penalty - use_template: frequency_penalty - - name: max_output_tokens - use_template: max_tokens - required: true - default: 8192 - min: 1 - max: 8192 -pricing: - input: '0.00' - output: '0.00' - unit: '0.000001' - currency: USD diff --git a/api/core/model_runtime/model_providers/vertex_ai/llm/gemini-1.5-flash-002.yaml b/api/core/model_runtime/model_providers/vertex_ai/llm/gemini-1.5-flash-002.yaml deleted file mode 100644 index 97bd44f06b5145..00000000000000 --- a/api/core/model_runtime/model_providers/vertex_ai/llm/gemini-1.5-flash-002.yaml +++ /dev/null @@ -1,37 +0,0 @@ -model: gemini-1.5-flash-002 -label: - en_US: Gemini 1.5 Flash 002 -model_type: llm -features: - - agent-thought - - vision -model_properties: - mode: chat - context_size: 1048576 -parameter_rules: - - name: temperature - use_template: temperature - - name: top_p - use_template: top_p - - name: top_k - label: - en_US: Top k - type: int - help: - en_US: Only sample from the top K options for each subsequent token. - required: false - - name: presence_penalty - use_template: presence_penalty - - name: frequency_penalty - use_template: frequency_penalty - - name: max_output_tokens - use_template: max_tokens - required: true - default: 8192 - min: 1 - max: 8192 -pricing: - input: '0.00' - output: '0.00' - unit: '0.000001' - currency: USD diff --git a/api/core/model_runtime/model_providers/vertex_ai/llm/gemini-1.5-pro-001.yaml b/api/core/model_runtime/model_providers/vertex_ai/llm/gemini-1.5-pro-001.yaml deleted file mode 100644 index 5e08f2294e2ebf..00000000000000 --- a/api/core/model_runtime/model_providers/vertex_ai/llm/gemini-1.5-pro-001.yaml +++ /dev/null @@ -1,37 +0,0 @@ -model: gemini-1.5-pro-001 -label: - en_US: Gemini 1.5 Pro 001 -model_type: llm -features: - - agent-thought - - vision -model_properties: - mode: chat - context_size: 1048576 -parameter_rules: - - name: temperature - use_template: temperature - - name: top_p - use_template: top_p - - name: top_k - label: - en_US: Top k - type: int - help: - en_US: Only sample from the top K options for each subsequent token. - required: false - - name: presence_penalty - use_template: presence_penalty - - name: frequency_penalty - use_template: frequency_penalty - - name: max_output_tokens - use_template: max_tokens - required: true - default: 8192 - min: 1 - max: 8192 -pricing: - input: '0.00' - output: '0.00' - unit: '0.000001' - currency: USD diff --git a/api/core/model_runtime/model_providers/vertex_ai/llm/gemini-1.5-pro-002.yaml b/api/core/model_runtime/model_providers/vertex_ai/llm/gemini-1.5-pro-002.yaml deleted file mode 100644 index 8f327ea2f3d37e..00000000000000 --- a/api/core/model_runtime/model_providers/vertex_ai/llm/gemini-1.5-pro-002.yaml +++ /dev/null @@ -1,37 +0,0 @@ -model: gemini-1.5-pro-002 -label: - en_US: Gemini 1.5 Pro 002 -model_type: llm -features: - - agent-thought - - vision -model_properties: - mode: chat - context_size: 1048576 -parameter_rules: - - name: temperature - use_template: temperature - - name: top_p - use_template: top_p - - name: top_k - label: - en_US: Top k - type: int - help: - en_US: Only sample from the top K options for each subsequent token. - required: false - - name: presence_penalty - use_template: presence_penalty - - name: frequency_penalty - use_template: frequency_penalty - - name: max_output_tokens - use_template: max_tokens - required: true - default: 8192 - min: 1 - max: 8192 -pricing: - input: '0.00' - output: '0.00' - unit: '0.000001' - currency: USD diff --git a/api/core/model_runtime/model_providers/vertex_ai/llm/gemini-flash-experimental.yaml b/api/core/model_runtime/model_providers/vertex_ai/llm/gemini-flash-experimental.yaml deleted file mode 100644 index 0f5eb34c0cdf03..00000000000000 --- a/api/core/model_runtime/model_providers/vertex_ai/llm/gemini-flash-experimental.yaml +++ /dev/null @@ -1,37 +0,0 @@ -model: gemini-flash-experimental -label: - en_US: Gemini Flash Experimental -model_type: llm -features: - - agent-thought - - vision -model_properties: - mode: chat - context_size: 1048576 -parameter_rules: - - name: temperature - use_template: temperature - - name: top_p - use_template: top_p - - name: top_k - label: - en_US: Top k - type: int - help: - en_US: Only sample from the top K options for each subsequent token. - required: false - - name: presence_penalty - use_template: presence_penalty - - name: frequency_penalty - use_template: frequency_penalty - - name: max_output_tokens - use_template: max_tokens - required: true - default: 8192 - min: 1 - max: 8192 -pricing: - input: '0.00' - output: '0.00' - unit: '0.000001' - currency: USD diff --git a/api/core/model_runtime/model_providers/vertex_ai/llm/gemini-pro-experimental.yaml b/api/core/model_runtime/model_providers/vertex_ai/llm/gemini-pro-experimental.yaml deleted file mode 100644 index fa31cabb85abb0..00000000000000 --- a/api/core/model_runtime/model_providers/vertex_ai/llm/gemini-pro-experimental.yaml +++ /dev/null @@ -1,37 +0,0 @@ -model: gemini-pro-experimental -label: - en_US: Gemini Pro Experimental -model_type: llm -features: - - agent-thought - - vision -model_properties: - mode: chat - context_size: 1048576 -parameter_rules: - - name: temperature - use_template: temperature - - name: top_p - use_template: top_p - - name: top_k - label: - en_US: Top k - type: int - help: - en_US: Only sample from the top K options for each subsequent token. - required: false - - name: presence_penalty - use_template: presence_penalty - - name: frequency_penalty - use_template: frequency_penalty - - name: max_output_tokens - use_template: max_tokens - required: true - default: 8192 - min: 1 - max: 8192 -pricing: - input: '0.00' - output: '0.00' - unit: '0.000001' - currency: USD diff --git a/api/core/model_runtime/model_providers/vertex_ai/llm/llm.py b/api/core/model_runtime/model_providers/vertex_ai/llm/llm.py deleted file mode 100644 index 1dd785d5454082..00000000000000 --- a/api/core/model_runtime/model_providers/vertex_ai/llm/llm.py +++ /dev/null @@ -1,733 +0,0 @@ -import base64 -import io -import json -import logging -import time -from collections.abc import Generator -from typing import Optional, Union, cast - -import google.auth.transport.requests -import vertexai.generative_models as glm -from anthropic import AnthropicVertex, Stream -from anthropic.types import ( - ContentBlockDeltaEvent, - Message, - MessageDeltaEvent, - MessageStartEvent, - MessageStopEvent, - MessageStreamEvent, -) -from google.api_core import exceptions -from google.cloud import aiplatform -from google.oauth2 import service_account -from PIL import Image - -from core.model_runtime.entities.llm_entities import LLMResult, LLMResultChunk, LLMResultChunkDelta, LLMUsage -from core.model_runtime.entities.message_entities import ( - AssistantPromptMessage, - ImagePromptMessageContent, - PromptMessage, - PromptMessageContentType, - PromptMessageTool, - SystemPromptMessage, - TextPromptMessageContent, - ToolPromptMessage, - UserPromptMessage, -) -from core.model_runtime.entities.model_entities import PriceType -from core.model_runtime.errors.invoke import ( - InvokeAuthorizationError, - InvokeBadRequestError, - InvokeConnectionError, - InvokeError, - InvokeRateLimitError, - InvokeServerUnavailableError, -) -from core.model_runtime.errors.validate import CredentialsValidateFailedError -from core.model_runtime.model_providers.__base.large_language_model import LargeLanguageModel - -logger = logging.getLogger(__name__) - - -class VertexAiLargeLanguageModel(LargeLanguageModel): - def _invoke( - self, - model: str, - credentials: dict, - prompt_messages: list[PromptMessage], - model_parameters: dict, - tools: Optional[list[PromptMessageTool]] = None, - stop: Optional[list[str]] = None, - stream: bool = True, - user: Optional[str] = None, - ) -> Union[LLMResult, Generator]: - """ - Invoke large language model - - :param model: model name - :param credentials: model credentials - :param prompt_messages: prompt messages - :param model_parameters: model parameters - :param tools: tools for tool calling - :param stop: stop words - :param stream: is stream response - :param user: unique user id - :return: full response or stream response chunk generator result - """ - # invoke anthropic models via anthropic official SDK - if "claude" in model: - return self._generate_anthropic(model, credentials, prompt_messages, model_parameters, stop, stream, user) - # invoke Gemini model - return self._generate(model, credentials, prompt_messages, model_parameters, tools, stop, stream, user) - - def _generate_anthropic( - self, - model: str, - credentials: dict, - prompt_messages: list[PromptMessage], - model_parameters: dict, - stop: Optional[list[str]] = None, - stream: bool = True, - user: Optional[str] = None, - ) -> Union[LLMResult, Generator]: - """ - Invoke Anthropic large language model - - :param model: model name - :param credentials: model credentials - :param prompt_messages: prompt messages - :param model_parameters: model parameters - :param stop: stop words - :param stream: is stream response - :return: full response or stream response chunk generator result - """ - # use Anthropic official SDK references - # - https://github.com/anthropics/anthropic-sdk-python - service_account_info = json.loads(base64.b64decode(credentials["vertex_service_account_key"])) - project_id = credentials["vertex_project_id"] - SCOPES = ["https://www.googleapis.com/auth/cloud-platform"] - token = "" - - # get access token from service account credential - if service_account_info: - credentials = service_account.Credentials.from_service_account_info(service_account_info, scopes=SCOPES) - request = google.auth.transport.requests.Request() - credentials.refresh(request) - token = credentials.token - - # Vertex AI Anthropic Claude3 Opus model available in us-east5 region, Sonnet and Haiku available - # in us-central1 region - if "opus" in model or "claude-3-5-sonnet" in model: - location = "us-east5" - else: - location = "us-central1" - - # use access token to authenticate - if token: - client = AnthropicVertex(region=location, project_id=project_id, access_token=token) - # When access token is empty, try to use the Google Cloud VM's built-in service account - # or the GOOGLE_APPLICATION_CREDENTIALS environment variable - else: - client = AnthropicVertex( - region=location, - project_id=project_id, - ) - - extra_model_kwargs = {} - if stop: - extra_model_kwargs["stop_sequences"] = stop - - system, prompt_message_dicts = self._convert_claude_prompt_messages(prompt_messages) - - if system: - extra_model_kwargs["system"] = system - - response = client.messages.create( - model=model, messages=prompt_message_dicts, stream=stream, **model_parameters, **extra_model_kwargs - ) - - if stream: - return self._handle_claude_stream_response(model, credentials, response, prompt_messages) - - return self._handle_claude_response(model, credentials, response, prompt_messages) - - def _handle_claude_response( - self, model: str, credentials: dict, response: Message, prompt_messages: list[PromptMessage] - ) -> LLMResult: - """ - Handle llm chat response - - :param model: model name - :param credentials: credentials - :param response: response - :param prompt_messages: prompt messages - :return: full response chunk generator result - """ - - # transform assistant message to prompt message - assistant_prompt_message = AssistantPromptMessage(content=response.content[0].text) - - # calculate num tokens - if response.usage: - # transform usage - prompt_tokens = response.usage.input_tokens - completion_tokens = response.usage.output_tokens - else: - # calculate num tokens - prompt_tokens = self.get_num_tokens(model, credentials, prompt_messages) - completion_tokens = self.get_num_tokens(model, credentials, [assistant_prompt_message]) - - # transform usage - usage = self._calc_response_usage(model, credentials, prompt_tokens, completion_tokens) - - # transform response - response = LLMResult( - model=response.model, prompt_messages=prompt_messages, message=assistant_prompt_message, usage=usage - ) - - return response - - def _handle_claude_stream_response( - self, - model: str, - credentials: dict, - response: Stream[MessageStreamEvent], - prompt_messages: list[PromptMessage], - ) -> Generator: - """ - Handle llm chat stream response - - :param model: model name - :param credentials: credentials - :param response: response - :param prompt_messages: prompt messages - :return: full response or stream response chunk generator result - """ - - try: - full_assistant_content = "" - return_model = None - input_tokens = 0 - output_tokens = 0 - finish_reason = None - index = 0 - - for chunk in response: - if isinstance(chunk, MessageStartEvent): - return_model = chunk.message.model - input_tokens = chunk.message.usage.input_tokens - elif isinstance(chunk, MessageDeltaEvent): - output_tokens = chunk.usage.output_tokens - finish_reason = chunk.delta.stop_reason - elif isinstance(chunk, MessageStopEvent): - usage = self._calc_response_usage(model, credentials, input_tokens, output_tokens) - yield LLMResultChunk( - model=return_model, - prompt_messages=prompt_messages, - delta=LLMResultChunkDelta( - index=index + 1, - message=AssistantPromptMessage(content=""), - finish_reason=finish_reason, - usage=usage, - ), - ) - elif isinstance(chunk, ContentBlockDeltaEvent): - chunk_text = chunk.delta.text or "" - full_assistant_content += chunk_text - assistant_prompt_message = AssistantPromptMessage( - content=chunk_text or "", - ) - index = chunk.index - yield LLMResultChunk( - model=model, - prompt_messages=prompt_messages, - delta=LLMResultChunkDelta( - index=index, - message=assistant_prompt_message, - ), - ) - except Exception as ex: - raise InvokeError(str(ex)) - - def _calc_claude_response_usage( - self, model: str, credentials: dict, prompt_tokens: int, completion_tokens: int - ) -> LLMUsage: - """ - Calculate response usage - - :param model: model name - :param credentials: model credentials - :param prompt_tokens: prompt tokens - :param completion_tokens: completion tokens - :return: usage - """ - # get prompt price info - prompt_price_info = self.get_price( - model=model, - credentials=credentials, - price_type=PriceType.INPUT, - tokens=prompt_tokens, - ) - - # get completion price info - completion_price_info = self.get_price( - model=model, credentials=credentials, price_type=PriceType.OUTPUT, tokens=completion_tokens - ) - - # transform usage - usage = LLMUsage( - prompt_tokens=prompt_tokens, - prompt_unit_price=prompt_price_info.unit_price, - prompt_price_unit=prompt_price_info.unit, - prompt_price=prompt_price_info.total_amount, - completion_tokens=completion_tokens, - completion_unit_price=completion_price_info.unit_price, - completion_price_unit=completion_price_info.unit, - completion_price=completion_price_info.total_amount, - total_tokens=prompt_tokens + completion_tokens, - total_price=prompt_price_info.total_amount + completion_price_info.total_amount, - currency=prompt_price_info.currency, - latency=time.perf_counter() - self.started_at, - ) - - return usage - - def _convert_claude_prompt_messages(self, prompt_messages: list[PromptMessage]) -> tuple[str, list[dict]]: - """ - Convert prompt messages to dict list and system - """ - - system = "" - first_loop = True - for message in prompt_messages: - if isinstance(message, SystemPromptMessage): - message.content = message.content.strip() - if first_loop: - system = message.content - first_loop = False - else: - system += "\n" - system += message.content - - prompt_message_dicts = [] - for message in prompt_messages: - if not isinstance(message, SystemPromptMessage): - prompt_message_dicts.append(self._convert_claude_prompt_message_to_dict(message)) - - return system, prompt_message_dicts - - def _convert_claude_prompt_message_to_dict(self, message: PromptMessage) -> dict: - """ - Convert PromptMessage to dict - """ - if isinstance(message, UserPromptMessage): - message = cast(UserPromptMessage, message) - if isinstance(message.content, str): - message_dict = {"role": "user", "content": message.content} - else: - sub_messages = [] - for message_content in message.content: - if message_content.type == PromptMessageContentType.TEXT: - message_content = cast(TextPromptMessageContent, message_content) - sub_message_dict = {"type": "text", "text": message_content.data} - sub_messages.append(sub_message_dict) - elif message_content.type == PromptMessageContentType.IMAGE: - message_content = cast(ImagePromptMessageContent, message_content) - if not message_content.data.startswith("data:"): - # fetch image data from url - try: - image_content = requests.get(message_content.data).content - with Image.open(io.BytesIO(image_content)) as img: - mime_type = f"image/{img.format.lower()}" - base64_data = base64.b64encode(image_content).decode("utf-8") - except Exception as ex: - raise ValueError(f"Failed to fetch image data from url {message_content.data}, {ex}") - else: - data_split = message_content.data.split(";base64,") - mime_type = data_split[0].replace("data:", "") - base64_data = data_split[1] - - if mime_type not in {"image/jpeg", "image/png", "image/gif", "image/webp"}: - raise ValueError( - f"Unsupported image type {mime_type}, " - f"only support image/jpeg, image/png, image/gif, and image/webp" - ) - - sub_message_dict = { - "type": "image", - "source": {"type": "base64", "media_type": mime_type, "data": base64_data}, - } - sub_messages.append(sub_message_dict) - - message_dict = {"role": "user", "content": sub_messages} - elif isinstance(message, AssistantPromptMessage): - message = cast(AssistantPromptMessage, message) - message_dict = {"role": "assistant", "content": message.content} - elif isinstance(message, SystemPromptMessage): - message = cast(SystemPromptMessage, message) - message_dict = {"role": "system", "content": message.content} - else: - raise ValueError(f"Got unknown type {message}") - - return message_dict - - def get_num_tokens( - self, - model: str, - credentials: dict, - prompt_messages: list[PromptMessage], - tools: Optional[list[PromptMessageTool]] = None, - ) -> int: - """ - Get number of tokens for given prompt messages - - :param model: model name - :param credentials: model credentials - :param prompt_messages: prompt messages - :param tools: tools for tool calling - :return:md = gml.GenerativeModel(model) - """ - prompt = self._convert_messages_to_prompt(prompt_messages) - - return self._get_num_tokens_by_gpt2(prompt) - - def _convert_messages_to_prompt(self, messages: list[PromptMessage]) -> str: - """ - Format a list of messages into a full prompt for the Google model - - :param messages: List of PromptMessage to combine. - :return: Combined string with necessary human_prompt and ai_prompt tags. - """ - messages = messages.copy() # don't mutate the original list - - text = "".join(self._convert_one_message_to_text(message) for message in messages) - - return text.rstrip() - - def _convert_tools_to_glm_tool(self, tools: list[PromptMessageTool]) -> glm.Tool: - """ - Convert tool messages to glm tools - - :param tools: tool messages - :return: glm tools - """ - return glm.Tool( - function_declarations=[ - glm.FunctionDeclaration( - name=tool.name, - parameters=glm.Schema( - type=glm.Type.OBJECT, - properties={ - key: { - "type_": value.get("type", "string").upper(), - "description": value.get("description", ""), - "enum": value.get("enum", []), - } - for key, value in tool.parameters.get("properties", {}).items() - }, - required=tool.parameters.get("required", []), - ), - ) - for tool in tools - ] - ) - - def validate_credentials(self, model: str, credentials: dict) -> None: - """ - Validate model credentials - - :param model: model name - :param credentials: model credentials - :return: - """ - - try: - ping_message = SystemPromptMessage(content="ping") - self._generate(model, credentials, [ping_message], {"max_tokens_to_sample": 5}) - - except Exception as ex: - raise CredentialsValidateFailedError(str(ex)) - - def _generate( - self, - model: str, - credentials: dict, - prompt_messages: list[PromptMessage], - model_parameters: dict, - tools: Optional[list[PromptMessageTool]] = None, - stop: Optional[list[str]] = None, - stream: bool = True, - user: Optional[str] = None, - ) -> Union[LLMResult, Generator]: - """ - Invoke large language model - - :param model: model name - :param credentials: credentials kwargs - :param prompt_messages: prompt messages - :param model_parameters: model parameters - :param stop: stop words - :param stream: is stream response - :param user: unique user id - :return: full response or stream response chunk generator result - """ - config_kwargs = model_parameters.copy() - config_kwargs["max_output_tokens"] = config_kwargs.pop("max_tokens_to_sample", None) - - if stop: - config_kwargs["stop_sequences"] = stop - - service_account_info = json.loads(base64.b64decode(credentials["vertex_service_account_key"])) - project_id = credentials["vertex_project_id"] - location = credentials["vertex_location"] - if service_account_info: - service_accountSA = service_account.Credentials.from_service_account_info(service_account_info) - aiplatform.init(credentials=service_accountSA, project=project_id, location=location) - else: - aiplatform.init(project=project_id, location=location) - - history = [] - system_instruction = "" - # hack for gemini-pro-vision, which currently does not support multi-turn chat - if model == "gemini-1.0-pro-vision-001": - last_msg = prompt_messages[-1] - content = self._format_message_to_glm_content(last_msg) - history.append(content) - else: - for msg in prompt_messages: - if isinstance(msg, SystemPromptMessage): - system_instruction = msg.content - else: - content = self._format_message_to_glm_content(msg) - if history and history[-1].role == content.role: - history[-1].parts.extend(content.parts) - else: - history.append(content) - - google_model = glm.GenerativeModel(model_name=model, system_instruction=system_instruction) - - response = google_model.generate_content( - contents=history, - generation_config=glm.GenerationConfig(**config_kwargs), - stream=stream, - tools=self._convert_tools_to_glm_tool(tools) if tools else None, - ) - - if stream: - return self._handle_generate_stream_response(model, credentials, response, prompt_messages) - - return self._handle_generate_response(model, credentials, response, prompt_messages) - - def _handle_generate_response( - self, model: str, credentials: dict, response: glm.GenerationResponse, prompt_messages: list[PromptMessage] - ) -> LLMResult: - """ - Handle llm response - - :param model: model name - :param credentials: credentials - :param response: response - :param prompt_messages: prompt messages - :return: llm response - """ - # transform assistant message to prompt message - assistant_prompt_message = AssistantPromptMessage(content=response.candidates[0].content.parts[0].text) - - # calculate num tokens - prompt_tokens = self.get_num_tokens(model, credentials, prompt_messages) - completion_tokens = self.get_num_tokens(model, credentials, [assistant_prompt_message]) - - # transform usage - usage = self._calc_response_usage(model, credentials, prompt_tokens, completion_tokens) - - # transform response - result = LLMResult( - model=model, - prompt_messages=prompt_messages, - message=assistant_prompt_message, - usage=usage, - ) - - return result - - def _handle_generate_stream_response( - self, model: str, credentials: dict, response: glm.GenerationResponse, prompt_messages: list[PromptMessage] - ) -> Generator: - """ - Handle llm stream response - - :param model: model name - :param credentials: credentials - :param response: response - :param prompt_messages: prompt messages - :return: llm response chunk generator result - """ - index = -1 - for chunk in response: - for part in chunk.candidates[0].content.parts: - assistant_prompt_message = AssistantPromptMessage(content="") - - if part.text: - assistant_prompt_message.content += part.text - - if part.function_call: - assistant_prompt_message.tool_calls = [ - AssistantPromptMessage.ToolCall( - id=part.function_call.name, - type="function", - function=AssistantPromptMessage.ToolCall.ToolCallFunction( - name=part.function_call.name, - arguments=json.dumps(dict(part.function_call.args.items())), - ), - ) - ] - - index += 1 - - if not hasattr(chunk, "finish_reason") or not chunk.finish_reason: - # transform assistant message to prompt message - yield LLMResultChunk( - model=model, - prompt_messages=prompt_messages, - delta=LLMResultChunkDelta(index=index, message=assistant_prompt_message), - ) - else: - # calculate num tokens - prompt_tokens = self.get_num_tokens(model, credentials, prompt_messages) - completion_tokens = self.get_num_tokens(model, credentials, [assistant_prompt_message]) - - # transform usage - usage = self._calc_response_usage(model, credentials, prompt_tokens, completion_tokens) - - yield LLMResultChunk( - model=model, - prompt_messages=prompt_messages, - delta=LLMResultChunkDelta( - index=index, - message=assistant_prompt_message, - finish_reason=chunk.candidates[0].finish_reason, - usage=usage, - ), - ) - - def _convert_one_message_to_text(self, message: PromptMessage) -> str: - """ - Convert a single message to a string. - - :param message: PromptMessage to convert. - :return: String representation of the message. - """ - human_prompt = "\n\nuser:" - ai_prompt = "\n\nmodel:" - - content = message.content - if isinstance(content, list): - content = "".join(c.data for c in content if c.type != PromptMessageContentType.IMAGE) - - if isinstance(message, UserPromptMessage): - message_text = f"{human_prompt} {content}" - elif isinstance(message, AssistantPromptMessage): - message_text = f"{ai_prompt} {content}" - elif isinstance(message, SystemPromptMessage | ToolPromptMessage): - message_text = f"{human_prompt} {content}" - else: - raise ValueError(f"Got unknown type {message}") - - return message_text - - def _format_message_to_glm_content(self, message: PromptMessage) -> glm.Content: - """ - Format a single message into glm.Content for Google API - - :param message: one PromptMessage - :return: glm Content representation of message - """ - if isinstance(message, UserPromptMessage): - glm_content = glm.Content(role="user", parts=[]) - - if isinstance(message.content, str): - glm_content = glm.Content(role="user", parts=[glm.Part.from_text(message.content)]) - else: - parts = [] - for c in message.content: - if c.type == PromptMessageContentType.TEXT: - parts.append(glm.Part.from_text(c.data)) - else: - metadata, data = c.data.split(",", 1) - mime_type = metadata.split(";", 1)[0].split(":")[1] - parts.append(glm.Part.from_data(mime_type=mime_type, data=data)) - glm_content = glm.Content(role="user", parts=parts) - return glm_content - elif isinstance(message, AssistantPromptMessage): - if message.content: - glm_content = glm.Content(role="model", parts=[glm.Part.from_text(message.content)]) - if message.tool_calls: - glm_content = glm.Content( - role="model", - parts=[ - glm.Part.from_function_response( - glm.FunctionCall( - name=message.tool_calls[0].function.name, - args=json.loads(message.tool_calls[0].function.arguments), - ) - ) - ], - ) - return glm_content - elif isinstance(message, ToolPromptMessage): - glm_content = glm.Content( - role="function", - parts=[ - glm.Part( - function_response=glm.FunctionResponse( - name=message.name, response={"response": message.content} - ) - ) - ], - ) - return glm_content - else: - raise ValueError(f"Got unknown type {message}") - - @property - def _invoke_error_mapping(self) -> dict[type[InvokeError], list[type[Exception]]]: - """ - Map model invoke error to unified error - The key is the ermd = gml.GenerativeModel(model) error type thrown to the caller - The value is the md = gml.GenerativeModel(model) error type thrown by the model, - which needs to be converted into a unified error type for the caller. - - :return: Invoke emd = gml.GenerativeModel(model) error mapping - """ - return { - InvokeConnectionError: [exceptions.RetryError], - InvokeServerUnavailableError: [ - exceptions.ServiceUnavailable, - exceptions.InternalServerError, - exceptions.BadGateway, - exceptions.GatewayTimeout, - exceptions.DeadlineExceeded, - ], - InvokeRateLimitError: [exceptions.ResourceExhausted, exceptions.TooManyRequests], - InvokeAuthorizationError: [ - exceptions.Unauthenticated, - exceptions.PermissionDenied, - exceptions.Unauthenticated, - exceptions.Forbidden, - ], - InvokeBadRequestError: [ - exceptions.BadRequest, - exceptions.InvalidArgument, - exceptions.FailedPrecondition, - exceptions.OutOfRange, - exceptions.NotFound, - exceptions.MethodNotAllowed, - exceptions.Conflict, - exceptions.AlreadyExists, - exceptions.Aborted, - exceptions.LengthRequired, - exceptions.PreconditionFailed, - exceptions.RequestRangeNotSatisfiable, - exceptions.Cancelled, - ], - } diff --git a/api/core/model_runtime/model_providers/vertex_ai/text_embedding/text_embedding.py b/api/core/model_runtime/model_providers/vertex_ai/text_embedding/text_embedding.py deleted file mode 100644 index fce9544df0a414..00000000000000 --- a/api/core/model_runtime/model_providers/vertex_ai/text_embedding/text_embedding.py +++ /dev/null @@ -1,187 +0,0 @@ -import base64 -import json -import time -from decimal import Decimal -from typing import Optional - -import tiktoken -from google.cloud import aiplatform -from google.oauth2 import service_account -from vertexai.language_models import TextEmbeddingModel as VertexTextEmbeddingModel - -from core.embedding.embedding_constant import EmbeddingInputType -from core.model_runtime.entities.common_entities import I18nObject -from core.model_runtime.entities.model_entities import ( - AIModelEntity, - FetchFrom, - ModelPropertyKey, - ModelType, - PriceConfig, - PriceType, -) -from core.model_runtime.entities.text_embedding_entities import EmbeddingUsage, TextEmbeddingResult -from core.model_runtime.errors.validate import CredentialsValidateFailedError -from core.model_runtime.model_providers.__base.text_embedding_model import TextEmbeddingModel -from core.model_runtime.model_providers.vertex_ai._common import _CommonVertexAi - - -class VertexAiTextEmbeddingModel(_CommonVertexAi, TextEmbeddingModel): - """ - Model class for Vertex AI text embedding model. - """ - - def _invoke( - self, - model: str, - credentials: dict, - texts: list[str], - user: Optional[str] = None, - input_type: EmbeddingInputType = EmbeddingInputType.DOCUMENT, - ) -> TextEmbeddingResult: - """ - Invoke text embedding model - - :param model: model name - :param credentials: model credentials - :param texts: texts to embed - :param user: unique user id - :param input_type: input type - :return: embeddings result - """ - service_account_info = json.loads(base64.b64decode(credentials["vertex_service_account_key"])) - project_id = credentials["vertex_project_id"] - location = credentials["vertex_location"] - if service_account_info: - service_accountSA = service_account.Credentials.from_service_account_info(service_account_info) - aiplatform.init(credentials=service_accountSA, project=project_id, location=location) - else: - aiplatform.init(project=project_id, location=location) - - client = VertexTextEmbeddingModel.from_pretrained(model) - - embeddings_batch, embedding_used_tokens = self._embedding_invoke(client=client, texts=texts) - - # calc usage - usage = self._calc_response_usage(model=model, credentials=credentials, tokens=embedding_used_tokens) - - return TextEmbeddingResult(embeddings=embeddings_batch, usage=usage, model=model) - - def get_num_tokens(self, model: str, credentials: dict, texts: list[str]) -> int: - """ - Get number of tokens for given prompt messages - - :param model: model name - :param credentials: model credentials - :param texts: texts to embed - :return: - """ - if len(texts) == 0: - return 0 - - try: - enc = tiktoken.encoding_for_model(model) - except KeyError: - enc = tiktoken.get_encoding("cl100k_base") - - total_num_tokens = 0 - for text in texts: - # calculate the number of tokens in the encoded text - tokenized_text = enc.encode(text) - total_num_tokens += len(tokenized_text) - - return total_num_tokens - - def validate_credentials(self, model: str, credentials: dict) -> None: - """ - Validate model credentials - - :param model: model name - :param credentials: model credentials - :return: - """ - try: - service_account_info = json.loads(base64.b64decode(credentials["vertex_service_account_key"])) - project_id = credentials["vertex_project_id"] - location = credentials["vertex_location"] - if service_account_info: - service_accountSA = service_account.Credentials.from_service_account_info(service_account_info) - aiplatform.init(credentials=service_accountSA, project=project_id, location=location) - else: - aiplatform.init(project=project_id, location=location) - - client = VertexTextEmbeddingModel.from_pretrained(model) - - # call embedding model - self._embedding_invoke(model=model, client=client, texts=["ping"]) - except Exception as ex: - raise CredentialsValidateFailedError(str(ex)) - - def _embedding_invoke(self, client: VertexTextEmbeddingModel, texts: list[str]) -> [list[float], int]: # type: ignore - """ - Invoke embedding model - - :param model: model name - :param client: model client - :param texts: texts to embed - :return: embeddings and used tokens - """ - response = client.get_embeddings(texts) - - embeddings = [] - token_usage = 0 - - for i in range(len(response)): - embeddings.append(response[i].values) - token_usage += int(response[i].statistics.token_count) - - return embeddings, token_usage - - def _calc_response_usage(self, model: str, credentials: dict, tokens: int) -> EmbeddingUsage: - """ - Calculate response usage - - :param model: model name - :param credentials: model credentials - :param tokens: input tokens - :return: usage - """ - # get input price info - input_price_info = self.get_price( - model=model, credentials=credentials, price_type=PriceType.INPUT, tokens=tokens - ) - - # transform usage - usage = EmbeddingUsage( - tokens=tokens, - total_tokens=tokens, - unit_price=input_price_info.unit_price, - price_unit=input_price_info.unit, - total_price=input_price_info.total_amount, - currency=input_price_info.currency, - latency=time.perf_counter() - self.started_at, - ) - - return usage - - def get_customizable_model_schema(self, model: str, credentials: dict) -> AIModelEntity: - """ - generate custom model entities from credentials - """ - entity = AIModelEntity( - model=model, - label=I18nObject(en_US=model), - model_type=ModelType.TEXT_EMBEDDING, - fetch_from=FetchFrom.CUSTOMIZABLE_MODEL, - model_properties={ - ModelPropertyKey.CONTEXT_SIZE: int(credentials.get("context_size")), - ModelPropertyKey.MAX_CHUNKS: 1, - }, - parameter_rules=[], - pricing=PriceConfig( - input=Decimal(credentials.get("input_price", 0)), - unit=Decimal(credentials.get("unit", 0)), - currency=credentials.get("currency", "USD"), - ), - ) - - return entity diff --git a/api/core/model_runtime/model_providers/volcengine_maas/text_embedding/text_embedding.py b/api/core/model_runtime/model_providers/volcengine_maas/text_embedding/text_embedding.py deleted file mode 100644 index 0dd4037c958567..00000000000000 --- a/api/core/model_runtime/model_providers/volcengine_maas/text_embedding/text_embedding.py +++ /dev/null @@ -1,198 +0,0 @@ -import time -from decimal import Decimal -from typing import Optional - -from core.embedding.embedding_constant import EmbeddingInputType -from core.model_runtime.entities.common_entities import I18nObject -from core.model_runtime.entities.model_entities import ( - AIModelEntity, - FetchFrom, - ModelPropertyKey, - ModelType, - PriceConfig, - PriceType, -) -from core.model_runtime.entities.text_embedding_entities import EmbeddingUsage, TextEmbeddingResult -from core.model_runtime.errors.invoke import ( - InvokeAuthorizationError, - InvokeBadRequestError, - InvokeConnectionError, - InvokeError, - InvokeRateLimitError, - InvokeServerUnavailableError, -) -from core.model_runtime.errors.validate import CredentialsValidateFailedError -from core.model_runtime.model_providers.__base.text_embedding_model import TextEmbeddingModel -from core.model_runtime.model_providers.volcengine_maas.client import ArkClientV3 -from core.model_runtime.model_providers.volcengine_maas.legacy.client import MaaSClient -from core.model_runtime.model_providers.volcengine_maas.legacy.errors import ( - AuthErrors, - BadRequestErrors, - ConnectionErrors, - MaasError, - RateLimitErrors, - ServerUnavailableErrors, -) -from core.model_runtime.model_providers.volcengine_maas.text_embedding.models import get_model_config - - -class VolcengineMaaSTextEmbeddingModel(TextEmbeddingModel): - """ - Model class for VolcengineMaaS text embedding model. - """ - - def _invoke( - self, - model: str, - credentials: dict, - texts: list[str], - user: Optional[str] = None, - input_type: EmbeddingInputType = EmbeddingInputType.DOCUMENT, - ) -> TextEmbeddingResult: - """ - Invoke text embedding model - - :param model: model name - :param credentials: model credentials - :param texts: texts to embed - :param user: unique user id - :param input_type: input type - :return: embeddings result - """ - if ArkClientV3.is_legacy(credentials): - return self._generate_v2(model, credentials, texts, user) - - return self._generate_v3(model, credentials, texts, user) - - def _generate_v2( - self, model: str, credentials: dict, texts: list[str], user: Optional[str] = None - ) -> TextEmbeddingResult: - client = MaaSClient.from_credential(credentials) - resp = MaaSClient.wrap_exception(lambda: client.embeddings(texts)) - - usage = self._calc_response_usage(model=model, credentials=credentials, tokens=resp["usage"]["total_tokens"]) - - result = TextEmbeddingResult(model=model, embeddings=[v["embedding"] for v in resp["data"]], usage=usage) - - return result - - def _generate_v3( - self, model: str, credentials: dict, texts: list[str], user: Optional[str] = None - ) -> TextEmbeddingResult: - client = ArkClientV3.from_credentials(credentials) - resp = client.embeddings(texts) - - usage = self._calc_response_usage(model=model, credentials=credentials, tokens=resp.usage.total_tokens) - - result = TextEmbeddingResult(model=model, embeddings=[v.embedding for v in resp.data], usage=usage) - - return result - - def get_num_tokens(self, model: str, credentials: dict, texts: list[str]) -> int: - """ - Get number of tokens for given prompt messages - - :param model: model name - :param credentials: model credentials - :param texts: texts to embed - :return: - """ - num_tokens = 0 - for text in texts: - # use GPT2Tokenizer to get num tokens - num_tokens += self._get_num_tokens_by_gpt2(text) - return num_tokens - - def validate_credentials(self, model: str, credentials: dict) -> None: - """ - Validate model credentials - - :param model: model name - :param credentials: model credentials - :return: - """ - if ArkClientV3.is_legacy(credentials): - return self._validate_credentials_v2(model, credentials) - return self._validate_credentials_v3(model, credentials) - - def _validate_credentials_v2(self, model: str, credentials: dict) -> None: - try: - self._invoke(model=model, credentials=credentials, texts=["ping"]) - except MaasError as e: - raise CredentialsValidateFailedError(e.message) - - def _validate_credentials_v3(self, model: str, credentials: dict) -> None: - try: - self._invoke(model=model, credentials=credentials, texts=["ping"]) - except Exception as e: - raise CredentialsValidateFailedError(e) - - @property - def _invoke_error_mapping(self) -> dict[type[InvokeError], list[type[Exception]]]: - """ - Map model invoke error to unified error - The key is the error type thrown to the caller - The value is the error type thrown by the model, - which needs to be converted into a unified error type for the caller. - - :return: Invoke error mapping - """ - return { - InvokeConnectionError: ConnectionErrors.values(), - InvokeServerUnavailableError: ServerUnavailableErrors.values(), - InvokeRateLimitError: RateLimitErrors.values(), - InvokeAuthorizationError: AuthErrors.values(), - InvokeBadRequestError: BadRequestErrors.values(), - } - - def get_customizable_model_schema(self, model: str, credentials: dict) -> AIModelEntity: - """ - generate custom model entities from credentials - """ - model_config = get_model_config(credentials) - model_properties = { - ModelPropertyKey.CONTEXT_SIZE: model_config.properties.context_size, - ModelPropertyKey.MAX_CHUNKS: model_config.properties.max_chunks, - } - entity = AIModelEntity( - model=model, - label=I18nObject(en_US=model), - model_type=ModelType.TEXT_EMBEDDING, - fetch_from=FetchFrom.CUSTOMIZABLE_MODEL, - model_properties=model_properties, - parameter_rules=[], - pricing=PriceConfig( - input=Decimal(credentials.get("input_price", 0)), - unit=Decimal(credentials.get("unit", 0)), - currency=credentials.get("currency", "USD"), - ), - ) - - return entity - - def _calc_response_usage(self, model: str, credentials: dict, tokens: int) -> EmbeddingUsage: - """ - Calculate response usage - - :param model: model name - :param credentials: model credentials - :param tokens: input tokens - :return: usage - """ - # get input price info - input_price_info = self.get_price( - model=model, credentials=credentials, price_type=PriceType.INPUT, tokens=tokens - ) - - # transform usage - usage = EmbeddingUsage( - tokens=tokens, - total_tokens=tokens, - unit_price=input_price_info.unit_price, - price_unit=input_price_info.unit, - total_price=input_price_info.total_amount, - currency=input_price_info.currency, - latency=time.perf_counter() - self.started_at, - ) - - return usage diff --git a/api/core/model_runtime/model_providers/wenxin/text_embedding/text_embedding.py b/api/core/model_runtime/model_providers/wenxin/text_embedding/text_embedding.py deleted file mode 100644 index c21d0c055277f7..00000000000000 --- a/api/core/model_runtime/model_providers/wenxin/text_embedding/text_embedding.py +++ /dev/null @@ -1,187 +0,0 @@ -import time -from abc import abstractmethod -from collections.abc import Mapping -from json import dumps -from typing import Any, Optional - -import numpy as np -from requests import Response, post - -from core.embedding.embedding_constant import EmbeddingInputType -from core.model_runtime.entities.model_entities import PriceType -from core.model_runtime.entities.text_embedding_entities import EmbeddingUsage, TextEmbeddingResult -from core.model_runtime.errors.invoke import InvokeError -from core.model_runtime.errors.validate import CredentialsValidateFailedError -from core.model_runtime.model_providers.__base.text_embedding_model import TextEmbeddingModel -from core.model_runtime.model_providers.wenxin._common import BaiduAccessToken, _CommonWenxin -from core.model_runtime.model_providers.wenxin.wenxin_errors import ( - BadRequestError, - InternalServerError, - invoke_error_mapping, -) - - -class TextEmbedding: - @abstractmethod - def embed_documents(self, model: str, texts: list[str], user: str) -> (list[list[float]], int, int): - raise NotImplementedError - - -class WenxinTextEmbedding(_CommonWenxin, TextEmbedding): - def embed_documents(self, model: str, texts: list[str], user: str) -> (list[list[float]], int, int): - access_token = self._get_access_token() - url = f"{self.api_bases[model]}?access_token={access_token}" - body = self._build_embed_request_body(model, texts, user) - headers = { - "Content-Type": "application/json", - } - - resp = post(url, data=dumps(body), headers=headers) - if resp.status_code != 200: - raise InternalServerError(f"Failed to invoke ernie bot: {resp.text}") - return self._handle_embed_response(model, resp) - - def _build_embed_request_body(self, model: str, texts: list[str], user: str) -> dict[str, Any]: - if len(texts) == 0: - raise BadRequestError("The number of texts should not be zero.") - body = { - "input": texts, - "user_id": user, - } - return body - - def _handle_embed_response(self, model: str, response: Response) -> (list[list[float]], int, int): - data = response.json() - if "error_code" in data: - code = data["error_code"] - msg = data["error_msg"] - # raise error - self._handle_error(code, msg) - - embeddings = [v["embedding"] for v in data["data"]] - _usage = data["usage"] - tokens = _usage["prompt_tokens"] - total_tokens = _usage["total_tokens"] - - return embeddings, tokens, total_tokens - - -class WenxinTextEmbeddingModel(TextEmbeddingModel): - def _create_text_embedding(self, api_key: str, secret_key: str) -> TextEmbedding: - return WenxinTextEmbedding(api_key, secret_key) - - def _invoke( - self, - model: str, - credentials: dict, - texts: list[str], - user: Optional[str] = None, - input_type: EmbeddingInputType = EmbeddingInputType.DOCUMENT, - ) -> TextEmbeddingResult: - """ - Invoke text embedding model - - :param model: model name - :param credentials: model credentials - :param texts: texts to embed - :param user: unique user id - :param input_type: input type - :return: embeddings result - """ - - api_key = credentials["api_key"] - secret_key = credentials["secret_key"] - embedding: TextEmbedding = self._create_text_embedding(api_key, secret_key) - user = user or "ErnieBotDefault" - - context_size = self._get_context_size(model, credentials) - max_chunks = self._get_max_chunks(model, credentials) - inputs = [] - indices = [] - used_tokens = 0 - used_total_tokens = 0 - - for i, text in enumerate(texts): - # Here token count is only an approximation based on the GPT2 tokenizer - num_tokens = self._get_num_tokens_by_gpt2(text) - - if num_tokens >= context_size: - cutoff = int(np.floor(len(text) * (context_size / num_tokens))) - # if num tokens is larger than context length, only use the start - inputs.append(text[0:cutoff]) - else: - inputs.append(text) - indices += [i] - - batched_embeddings = [] - _iter = range(0, len(inputs), max_chunks) - for i in _iter: - embeddings_batch, _used_tokens, _total_used_tokens = embedding.embed_documents( - model, inputs[i : i + max_chunks], user - ) - used_tokens += _used_tokens - used_total_tokens += _total_used_tokens - batched_embeddings += embeddings_batch - - usage = self._calc_response_usage(model, credentials, used_tokens, used_total_tokens) - return TextEmbeddingResult( - model=model, - embeddings=batched_embeddings, - usage=usage, - ) - - def get_num_tokens(self, model: str, credentials: dict, texts: list[str]) -> int: - """ - Get number of tokens for given prompt messages - - :param model: model name - :param credentials: model credentials - :param texts: texts to embed - :return: - """ - if len(texts) == 0: - return 0 - total_num_tokens = 0 - for text in texts: - total_num_tokens += self._get_num_tokens_by_gpt2(text) - - return total_num_tokens - - def validate_credentials(self, model: str, credentials: Mapping) -> None: - api_key = credentials["api_key"] - secret_key = credentials["secret_key"] - try: - BaiduAccessToken.get_access_token(api_key, secret_key) - except Exception as e: - raise CredentialsValidateFailedError(f"Credentials validation failed: {e}") - - @property - def _invoke_error_mapping(self) -> dict[type[InvokeError], list[type[Exception]]]: - return invoke_error_mapping() - - def _calc_response_usage(self, model: str, credentials: dict, tokens: int, total_tokens: int) -> EmbeddingUsage: - """ - Calculate response usage - - :param model: model name - :param credentials: model credentials - :param tokens: input tokens - :return: usage - """ - # get input price info - input_price_info = self.get_price( - model=model, credentials=credentials, price_type=PriceType.INPUT, tokens=tokens - ) - - # transform usage - usage = EmbeddingUsage( - tokens=tokens, - total_tokens=total_tokens, - unit_price=input_price_info.unit_price, - price_unit=input_price_info.unit, - total_price=input_price_info.total_amount, - currency=input_price_info.currency, - latency=time.perf_counter() - self.started_at, - ) - - return usage diff --git a/api/core/model_runtime/model_providers/xinference/text_embedding/text_embedding.py b/api/core/model_runtime/model_providers/xinference/text_embedding/text_embedding.py deleted file mode 100644 index 16272391320d55..00000000000000 --- a/api/core/model_runtime/model_providers/xinference/text_embedding/text_embedding.py +++ /dev/null @@ -1,204 +0,0 @@ -import time -from typing import Optional - -from xinference_client.client.restful.restful_client import Client, RESTfulEmbeddingModelHandle - -from core.embedding.embedding_constant import EmbeddingInputType -from core.model_runtime.entities.common_entities import I18nObject -from core.model_runtime.entities.model_entities import AIModelEntity, FetchFrom, ModelPropertyKey, ModelType, PriceType -from core.model_runtime.entities.text_embedding_entities import EmbeddingUsage, TextEmbeddingResult -from core.model_runtime.errors.invoke import ( - InvokeAuthorizationError, - InvokeBadRequestError, - InvokeConnectionError, - InvokeError, - InvokeRateLimitError, - InvokeServerUnavailableError, -) -from core.model_runtime.errors.validate import CredentialsValidateFailedError -from core.model_runtime.model_providers.__base.text_embedding_model import TextEmbeddingModel -from core.model_runtime.model_providers.xinference.xinference_helper import XinferenceHelper - - -class XinferenceTextEmbeddingModel(TextEmbeddingModel): - """ - Model class for Xinference text embedding model. - """ - - def _invoke( - self, - model: str, - credentials: dict, - texts: list[str], - user: Optional[str] = None, - input_type: EmbeddingInputType = EmbeddingInputType.DOCUMENT, - ) -> TextEmbeddingResult: - """ - Invoke text embedding model - - credentials should be like: - { - 'server_url': 'server url', - 'model_uid': 'model uid', - } - - :param model: model name - :param credentials: model credentials - :param texts: texts to embed - :param user: unique user id - :param input_type: input type - :return: embeddings result - """ - server_url = credentials["server_url"] - model_uid = credentials["model_uid"] - api_key = credentials.get("api_key") - server_url = server_url.removesuffix("/") - auth_headers = {"Authorization": f"Bearer {api_key}"} if api_key else {} - - try: - handle = RESTfulEmbeddingModelHandle(model_uid, server_url, auth_headers) - embeddings = handle.create_embedding(input=texts) - except RuntimeError as e: - raise InvokeServerUnavailableError(str(e)) - - """ - for convenience, the response json is like: - class Embedding(TypedDict): - object: Literal["list"] - model: str - data: List[EmbeddingData] - usage: EmbeddingUsage - class EmbeddingUsage(TypedDict): - prompt_tokens: int - total_tokens: int - class EmbeddingData(TypedDict): - index: int - object: str - embedding: List[float] - """ - - usage = embeddings["usage"] - usage = self._calc_response_usage(model=model, credentials=credentials, tokens=usage["total_tokens"]) - - result = TextEmbeddingResult( - model=model, embeddings=[embedding["embedding"] for embedding in embeddings["data"]], usage=usage - ) - - return result - - def get_num_tokens(self, model: str, credentials: dict, texts: list[str]) -> int: - """ - Get number of tokens for given prompt messages - - :param model: model name - :param credentials: model credentials - :param texts: texts to embed - :return: - """ - num_tokens = 0 - for text in texts: - # use GPT2Tokenizer to get num tokens - num_tokens += self._get_num_tokens_by_gpt2(text) - return num_tokens - - def validate_credentials(self, model: str, credentials: dict) -> None: - """ - Validate model credentials - - :param model: model name - :param credentials: model credentials - :return: - """ - try: - if "/" in credentials["model_uid"] or "?" in credentials["model_uid"] or "#" in credentials["model_uid"]: - raise CredentialsValidateFailedError("model_uid should not contain /, ?, or #") - - server_url = credentials["server_url"] - model_uid = credentials["model_uid"] - api_key = credentials.get("api_key") - extra_args = XinferenceHelper.get_xinference_extra_parameter( - server_url=server_url, - model_uid=model_uid, - api_key=api_key, - ) - - if extra_args.max_tokens: - credentials["max_tokens"] = extra_args.max_tokens - server_url = server_url.removesuffix("/") - - client = Client( - base_url=server_url, - api_key=api_key, - ) - - try: - handle = client.get_model(model_uid=model_uid) - except RuntimeError as e: - raise InvokeAuthorizationError(e) - - if not isinstance(handle, RESTfulEmbeddingModelHandle): - raise InvokeBadRequestError( - "please check model type, the model you want to invoke is not a text embedding model" - ) - - self._invoke(model=model, credentials=credentials, texts=["ping"]) - except InvokeAuthorizationError as e: - raise CredentialsValidateFailedError(f"Failed to validate credentials for model {model}: {e}") - except RuntimeError as e: - raise CredentialsValidateFailedError(e) - - @property - def _invoke_error_mapping(self) -> dict[type[InvokeError], list[type[Exception]]]: - return { - InvokeConnectionError: [InvokeConnectionError], - InvokeServerUnavailableError: [InvokeServerUnavailableError], - InvokeRateLimitError: [InvokeRateLimitError], - InvokeAuthorizationError: [InvokeAuthorizationError], - InvokeBadRequestError: [KeyError], - } - - def _calc_response_usage(self, model: str, credentials: dict, tokens: int) -> EmbeddingUsage: - """ - Calculate response usage - - :param model: model name - :param credentials: model credentials - :param tokens: input tokens - :return: usage - """ - # get input price info - input_price_info = self.get_price( - model=model, credentials=credentials, price_type=PriceType.INPUT, tokens=tokens - ) - - # transform usage - usage = EmbeddingUsage( - tokens=tokens, - total_tokens=tokens, - unit_price=input_price_info.unit_price, - price_unit=input_price_info.unit, - total_price=input_price_info.total_amount, - currency=input_price_info.currency, - latency=time.perf_counter() - self.started_at, - ) - - return usage - - def get_customizable_model_schema(self, model: str, credentials: dict) -> AIModelEntity | None: - """ - used to define customizable model schema - """ - - entity = AIModelEntity( - model=model, - label=I18nObject(en_US=model), - fetch_from=FetchFrom.CUSTOMIZABLE_MODEL, - model_type=ModelType.TEXT_EMBEDDING, - model_properties={ - ModelPropertyKey.MAX_CHUNKS: 1, - ModelPropertyKey.CONTEXT_SIZE: "max_tokens" in credentials and credentials["max_tokens"] or 512, - }, - parameter_rules=[], - ) - - return entity diff --git a/api/core/plugin/manager/model.py b/api/core/plugin/manager/model.py index 30bd9bc0a8e034..fb58c4bb8df5c1 100644 --- a/api/core/plugin/manager/model.py +++ b/api/core/plugin/manager/model.py @@ -235,6 +235,7 @@ def invoke_text_embedding( model: str, credentials: dict, texts: list[str], + input_type: str, ) -> TextEmbeddingResult: """ Invoke text embedding @@ -252,6 +253,7 @@ def invoke_text_embedding( "model": model, "credentials": credentials, "texts": texts, + "input_type": input_type, }, } ), @@ -272,7 +274,6 @@ def get_text_embedding_num_tokens( user_id: str, plugin_id: str, provider: str, - model_type: str, model: str, credentials: dict, texts: list[str], @@ -289,7 +290,7 @@ def get_text_embedding_num_tokens( "user_id": user_id, "data": { "provider": provider, - "model_type": model_type, + "model_type": "text-embedding", "model": model, "credentials": credentials, "texts": texts, @@ -313,7 +314,6 @@ def invoke_rerank( user_id: str, plugin_id: str, provider: str, - model_type: str, model: str, credentials: dict, query: str, @@ -333,7 +333,7 @@ def invoke_rerank( "user_id": user_id, "data": { "provider": provider, - "model_type": model_type, + "model_type": "rerank", "model": model, "credentials": credentials, "query": query, @@ -360,7 +360,6 @@ def invoke_tts( user_id: str, plugin_id: str, provider: str, - model_type: str, model: str, credentials: dict, content_text: str, @@ -378,7 +377,7 @@ def invoke_tts( "user_id": user_id, "data": { "provider": provider, - "model_type": model_type, + "model_type": "tts", "model": model, "credentials": credentials, "content_text": content_text, @@ -405,7 +404,6 @@ def get_tts_model_voices( user_id: str, plugin_id: str, provider: str, - model_type: str, model: str, credentials: dict, language: Optional[str] = None, @@ -422,7 +420,7 @@ def get_tts_model_voices( "user_id": user_id, "data": { "provider": provider, - "model_type": model_type, + "model_type": "tts", "model": model, "credentials": credentials, "language": language, @@ -447,7 +445,6 @@ def invoke_speech_to_text( user_id: str, plugin_id: str, provider: str, - model_type: str, model: str, credentials: dict, file: IO[bytes], @@ -464,7 +461,7 @@ def invoke_speech_to_text( "user_id": user_id, "data": { "provider": provider, - "model_type": model_type, + "model_type": "speech2text", "model": model, "credentials": credentials, "file": binascii.hexlify(file.read()).decode(), @@ -488,7 +485,6 @@ def invoke_moderation( user_id: str, plugin_id: str, provider: str, - model_type: str, model: str, credentials: dict, text: str, @@ -505,7 +501,7 @@ def invoke_moderation( "user_id": user_id, "data": { "provider": provider, - "model_type": model_type, + "model_type": "moderation", "model": model, "credentials": credentials, "text": text, diff --git a/api/core/provider_manager.py b/api/core/provider_manager.py index cb49a6cf5619b7..ae3934327e1cba 100644 --- a/api/core/provider_manager.py +++ b/api/core/provider_manager.py @@ -244,12 +244,11 @@ def get_default_model(self, tenant_id: str, model_type: ModelType) -> Optional[D (model for model in available_models if model.model == "gpt-4"), available_models[0] ) - default_model = TenantDefaultModel( - tenant_id=tenant_id, - model_type=model_type.to_origin_model_type(), - provider_name=available_model.provider.provider, - model_name=available_model.model, - ) + default_model = TenantDefaultModel() + default_model.tenant_id = tenant_id + default_model.model_type = model_type.to_origin_model_type() + default_model.provider_name = available_model.provider.provider + default_model.model_name = available_model.model db.session.add(default_model) db.session.commit() @@ -489,15 +488,14 @@ def _init_trial_provider_records( # Init trial provider records if not exists if ProviderQuotaType.TRIAL not in provider_quota_to_provider_record_dict: try: - provider_record = Provider( - tenant_id=tenant_id, - provider_name=provider_name, - provider_type=ProviderType.SYSTEM.value, - quota_type=ProviderQuotaType.TRIAL.value, - quota_limit=quota.quota_limit, - quota_used=0, - is_valid=True, - ) + provider_record = Provider() + provider_record.tenant_id = tenant_id + provider_record.provider_name = provider_name + provider_record.provider_type = ProviderType.SYSTEM.value + provider_record.quota_type = ProviderQuotaType.TRIAL.value + provider_record.quota_limit = quota.quota_limit + provider_record.quota_used = 0 + provider_record.is_valid = True db.session.add(provider_record) db.session.commit() except IntegrityError: diff --git a/api/core/tools/builtin_tool/tool.py b/api/core/tools/builtin_tool/tool.py index fe77f9ac77bc74..abba542b8edb2a 100644 --- a/api/core/tools/builtin_tool/tool.py +++ b/api/core/tools/builtin_tool/tool.py @@ -3,7 +3,6 @@ from core.tools.__base.tool import Tool from core.tools.entities.tool_entities import ToolProviderType from core.tools.utils.model_invocation_utils import ModelInvocationUtils -from core.tools.utils.web_reader_tool import get_url _SUMMARY_PROMPT = """You are a professional language researcher, you are interested in the language and you can quickly aimed at the main point of an webpage and reproduce it in your own words but @@ -124,9 +123,3 @@ def summarize(content: str) -> str: return self.summary(user_id=user_id, content=result) return result - - def get_url(self, url: str, user_agent: str | None = None) -> str: - """ - get url - """ - return get_url(url, user_agent=user_agent) diff --git a/api/core/tools/utils/web_reader_tool.py b/api/core/tools/utils/web_reader_tool.py deleted file mode 100644 index dcbae9f5aa2260..00000000000000 --- a/api/core/tools/utils/web_reader_tool.py +++ /dev/null @@ -1,357 +0,0 @@ -import hashlib -import json -import mimetypes -import os -import re -import site -import subprocess -import tempfile -import unicodedata -from contextlib import contextmanager -from pathlib import Path -from urllib.parse import unquote - -import chardet -import cloudscraper -from bs4 import BeautifulSoup, CData, Comment, NavigableString -from regex import regex - -from core.helper import ssrf_proxy -from core.rag.extractor import extract_processor -from core.rag.extractor.extract_processor import ExtractProcessor - -FULL_TEMPLATE = """ -TITLE: {title} -AUTHORS: {authors} -PUBLISH DATE: {publish_date} -TOP_IMAGE_URL: {top_image} -TEXT: - -{text} -""" - - -def page_result(text: str, cursor: int, max_length: int) -> str: - """Page through `text` and return a substring of `max_length` characters starting from `cursor`.""" - return text[cursor : cursor + max_length] - - -def get_url(url: str, user_agent: str | None = None) -> str: - """Fetch URL and return the contents as a string.""" - headers = { - "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko)" - " Chrome/91.0.4472.124 Safari/537.36" - } - if user_agent: - headers["User-Agent"] = user_agent - - main_content_type = None - supported_content_types = extract_processor.SUPPORT_URL_CONTENT_TYPES + ["text/html"] - response = ssrf_proxy.head(url, headers=headers, follow_redirects=True, timeout=(5, 10)) - - if response.status_code == 200: - # check content-type - content_type = response.headers.get("Content-Type") - if content_type: - main_content_type = response.headers.get("Content-Type").split(";")[0].strip() - else: - content_disposition = response.headers.get("Content-Disposition", "") - filename_match = re.search(r'filename="([^"]+)"', content_disposition) - if filename_match: - filename = unquote(filename_match.group(1)) - extension = re.search(r"\.(\w+)$", filename) - if extension: - main_content_type = mimetypes.guess_type(filename)[0] - - if main_content_type not in supported_content_types: - return "Unsupported content-type [{}] of URL.".format(main_content_type) - - if main_content_type in extract_processor.SUPPORT_URL_CONTENT_TYPES: - return ExtractProcessor.load_from_url(url, return_text=True) - - response = ssrf_proxy.get(url, headers=headers, follow_redirects=True, timeout=(120, 300)) - elif response.status_code == 403: - scraper = cloudscraper.create_scraper() - scraper.perform_request = ssrf_proxy.make_request - response = scraper.get(url, headers=headers, follow_redirects=True, timeout=(120, 300)) - - if response.status_code != 200: - return "URL returned status code {}.".format(response.status_code) - - # Detect encoding using chardet - detected_encoding = chardet.detect(response.content) - encoding = detected_encoding["encoding"] - if encoding: - try: - content = response.content.decode(encoding) - except (UnicodeDecodeError, TypeError): - content = response.text - else: - content = response.text - - a = extract_using_readabilipy(content) - - if not a["plain_text"] or not a["plain_text"].strip(): - return "" - - res = FULL_TEMPLATE.format( - title=a["title"], - authors=a["byline"], - publish_date=a["date"], - top_image="", - text=a["plain_text"] or "", - ) - - return res - - -def extract_using_readabilipy(html): - with tempfile.NamedTemporaryFile(delete=False, mode="w+") as f_html: - f_html.write(html) - f_html.close() - html_path = f_html.name - - # Call Mozilla's Readability.js Readability.parse() function via node, writing output to a temporary file - article_json_path = html_path + ".json" - jsdir = os.path.join(find_module_path("readabilipy"), "javascript") - with chdir(jsdir): - subprocess.check_call(["node", "ExtractArticle.js", "-i", html_path, "-o", article_json_path]) - - # Read output of call to Readability.parse() from JSON file and return as Python dictionary - input_json = json.loads(Path(article_json_path).read_text(encoding="utf-8")) - - # Deleting files after processing - os.unlink(article_json_path) - os.unlink(html_path) - - article_json = { - "title": None, - "byline": None, - "date": None, - "content": None, - "plain_content": None, - "plain_text": None, - } - # Populate article fields from readability fields where present - if input_json: - if input_json.get("title"): - article_json["title"] = input_json["title"] - if input_json.get("byline"): - article_json["byline"] = input_json["byline"] - if input_json.get("date"): - article_json["date"] = input_json["date"] - if input_json.get("content"): - article_json["content"] = input_json["content"] - article_json["plain_content"] = plain_content(article_json["content"], False, False) - article_json["plain_text"] = extract_text_blocks_as_plain_text(article_json["plain_content"]) - if input_json.get("textContent"): - article_json["plain_text"] = input_json["textContent"] - article_json["plain_text"] = re.sub(r"\n\s*\n", "\n", article_json["plain_text"]) - - return article_json - - -def find_module_path(module_name): - for package_path in site.getsitepackages(): - potential_path = os.path.join(package_path, module_name) - if os.path.exists(potential_path): - return potential_path - - return None - - -@contextmanager -def chdir(path): - """Change directory in context and return to original on exit""" - # From https://stackoverflow.com/a/37996581, couldn't find a built-in - original_path = os.getcwd() - os.chdir(path) - try: - yield - finally: - os.chdir(original_path) - - -def extract_text_blocks_as_plain_text(paragraph_html): - # Load article as DOM - soup = BeautifulSoup(paragraph_html, "html.parser") - # Select all lists - list_elements = soup.find_all(["ul", "ol"]) - # Prefix text in all list items with "* " and make lists paragraphs - for list_element in list_elements: - plain_items = "".join( - list(filter(None, [plain_text_leaf_node(li)["text"] for li in list_element.find_all("li")])) - ) - list_element.string = plain_items - list_element.name = "p" - # Select all text blocks - text_blocks = [s.parent for s in soup.find_all(string=True)] - text_blocks = [plain_text_leaf_node(block) for block in text_blocks] - # Drop empty paragraphs - text_blocks = list(filter(lambda p: p["text"] is not None, text_blocks)) - return text_blocks - - -def plain_text_leaf_node(element): - # Extract all text, stripped of any child HTML elements and normalize it - plain_text = normalize_text(element.get_text()) - if plain_text != "" and element.name == "li": - plain_text = "* {}, ".format(plain_text) - if plain_text == "": - plain_text = None - if "data-node-index" in element.attrs: - plain = {"node_index": element["data-node-index"], "text": plain_text} - else: - plain = {"text": plain_text} - return plain - - -def plain_content(readability_content, content_digests, node_indexes): - # Load article as DOM - soup = BeautifulSoup(readability_content, "html.parser") - # Make all elements plain - elements = plain_elements(soup.contents, content_digests, node_indexes) - if node_indexes: - # Add node index attributes to nodes - elements = [add_node_indexes(element) for element in elements] - # Replace article contents with plain elements - soup.contents = elements - return str(soup) - - -def plain_elements(elements, content_digests, node_indexes): - # Get plain content versions of all elements - elements = [plain_element(element, content_digests, node_indexes) for element in elements] - if content_digests: - # Add content digest attribute to nodes - elements = [add_content_digest(element) for element in elements] - return elements - - -def plain_element(element, content_digests, node_indexes): - # For lists, we make each item plain text - if is_leaf(element): - # For leaf node elements, extract the text content, discarding any HTML tags - # 1. Get element contents as text - plain_text = element.get_text() - # 2. Normalize the extracted text string to a canonical representation - plain_text = normalize_text(plain_text) - # 3. Update element content to be plain text - element.string = plain_text - elif is_text(element): - if is_non_printing(element): - # The simplified HTML may have come from Readability.js so might - # have non-printing text (e.g. Comment or CData). In this case, we - # keep the structure, but ensure that the string is empty. - element = type(element)("") - else: - plain_text = element.string - plain_text = normalize_text(plain_text) - element = type(element)(plain_text) - else: - # If not a leaf node or leaf type call recursively on child nodes, replacing - element.contents = plain_elements(element.contents, content_digests, node_indexes) - return element - - -def add_node_indexes(element, node_index="0"): - # Can't add attributes to string types - if is_text(element): - return element - # Add index to current element - element["data-node-index"] = node_index - # Add index to child elements - for local_idx, child in enumerate([c for c in element.contents if not is_text(c)], start=1): - # Can't add attributes to leaf string types - child_index = "{stem}.{local}".format(stem=node_index, local=local_idx) - add_node_indexes(child, node_index=child_index) - return element - - -def normalize_text(text): - """Normalize unicode and whitespace.""" - # Normalize unicode first to try and standardize whitespace characters as much as possible before normalizing them - text = strip_control_characters(text) - text = normalize_unicode(text) - text = normalize_whitespace(text) - return text - - -def strip_control_characters(text): - """Strip out unicode control characters which might break the parsing.""" - # Unicode control characters - # [Cc]: Other, Control [includes new lines] - # [Cf]: Other, Format - # [Cn]: Other, Not Assigned - # [Co]: Other, Private Use - # [Cs]: Other, Surrogate - control_chars = {"Cc", "Cf", "Cn", "Co", "Cs"} - retained_chars = ["\t", "\n", "\r", "\f"] - - # Remove non-printing control characters - return "".join( - [ - "" if (unicodedata.category(char) in control_chars) and (char not in retained_chars) else char - for char in text - ] - ) - - -def normalize_unicode(text): - """Normalize unicode such that things that are visually equivalent map to the same unicode string where possible.""" - normal_form = "NFKC" - text = unicodedata.normalize(normal_form, text) - return text - - -def normalize_whitespace(text): - """Replace runs of whitespace characters with a single space as this is what happens when HTML text is displayed.""" - text = regex.sub(r"\s+", " ", text) - # Remove leading and trailing whitespace - text = text.strip() - return text - - -def is_leaf(element): - return element.name in {"p", "li"} - - -def is_text(element): - return isinstance(element, NavigableString) - - -def is_non_printing(element): - return any(isinstance(element, _e) for _e in [Comment, CData]) - - -def add_content_digest(element): - if not is_text(element): - element["data-content-digest"] = content_digest(element) - return element - - -def content_digest(element): - if is_text(element): - # Hash - trimmed_string = element.string.strip() - if trimmed_string == "": - digest = "" - else: - digest = hashlib.sha256(trimmed_string.encode("utf-8")).hexdigest() - else: - contents = element.contents - num_contents = len(contents) - if num_contents == 0: - # No hash when no child elements exist - digest = "" - elif num_contents == 1: - # If single child, use digest of child - digest = content_digest(contents[0]) - else: - # Build content digest from the "non-empty" digests of child nodes - digest = hashlib.sha256() - child_digests = list(filter(lambda x: x != "", [content_digest(content) for content in contents])) - for child in child_digests: - digest.update(child.encode("utf-8")) - digest = digest.hexdigest() - return digest diff --git a/api/poetry.lock b/api/poetry.lock index 85c68cd75f1292..5c8485990cc67a 100644 --- a/api/poetry.lock +++ b/api/poetry.lock @@ -2,13 +2,13 @@ [[package]] name = "aiohappyeyeballs" -version = "2.4.0" +version = "2.4.2" description = "Happy Eyeballs for asyncio" optional = false python-versions = ">=3.8" files = [ - {file = "aiohappyeyeballs-2.4.0-py3-none-any.whl", hash = "sha256:7ce92076e249169a13c2f49320d1967425eaf1f407522d707d59cac7628d62bd"}, - {file = "aiohappyeyeballs-2.4.0.tar.gz", hash = "sha256:55a1714f084e63d49639800f95716da97a1f173d46a16dfcfda0016abb93b6b2"}, + {file = "aiohappyeyeballs-2.4.2-py3-none-any.whl", hash = "sha256:8522691d9a154ba1145b157d6d5c15e5c692527ce6a53c5e5f9876977f6dab2f"}, + {file = "aiohappyeyeballs-2.4.2.tar.gz", hash = "sha256:4ca893e6c5c1f5bf3888b04cb5a3bee24995398efef6e0b9f747b5e89d84fd74"}, ] [[package]] @@ -123,20 +123,6 @@ yarl = ">=1.0,<2.0" [package.extras] speedups = ["Brotli", "aiodns (>=3.2.0)", "brotlicffi"] -[[package]] -name = "aiohttp-retry" -version = "2.8.3" -description = "Simple retry client for aiohttp" -optional = false -python-versions = ">=3.7" -files = [ - {file = "aiohttp_retry-2.8.3-py3-none-any.whl", hash = "sha256:3aeeead8f6afe48272db93ced9440cf4eda8b6fd7ee2abb25357b7eb28525b45"}, - {file = "aiohttp_retry-2.8.3.tar.gz", hash = "sha256:9a8e637e31682ad36e1ff9f8bcba912fcfc7d7041722bc901a4b948da4d71ea9"}, -] - -[package.dependencies] -aiohttp = "*" - [[package]] name = "aiosignal" version = "1.3.1" @@ -153,13 +139,13 @@ frozenlist = ">=1.1.0" [[package]] name = "alembic" -version = "1.13.2" +version = "1.13.3" description = "A database migration tool for SQLAlchemy." optional = false python-versions = ">=3.8" files = [ - {file = "alembic-1.13.2-py3-none-any.whl", hash = "sha256:6b8733129a6224a9a711e17c99b08462dbf7cc9670ba8f2e2ae9af860ceb1953"}, - {file = "alembic-1.13.2.tar.gz", hash = "sha256:1ff0ae32975f4fd96028c39ed9bb3c867fe3af956bd7bb37343b54c9fe7445ef"}, + {file = "alembic-1.13.3-py3-none-any.whl", hash = "sha256:908e905976d15235fae59c9ac42c4c5b75cfcefe3d27c0fbf7ae15a37715d80e"}, + {file = "alembic-1.13.3.tar.gz", hash = "sha256:203503117415561e203aa14541740643a611f641517f0209fcae63e9fa09f1a2"}, ] [package.dependencies] @@ -293,13 +279,13 @@ alibabacloud-tea = "*" [[package]] name = "alibabacloud-tea" -version = "0.3.9" +version = "0.3.10" description = "The tea module of alibabaCloud Python SDK." optional = false python-versions = ">=3.6" files = [ - {file = "alibabacloud-tea-0.3.9.tar.gz", hash = "sha256:a9689770003fa9313d1995812f9fe36a2be315e5cdfc8d58de0d96808219ced9"}, - {file = "alibabacloud_tea-0.3.9-py3-none-any.whl", hash = "sha256:402fd2a92e6729f228d8c0300b182f80019edce19d83afa497aeb15fd7947f9a"}, + {file = "alibabacloud-tea-0.3.10.tar.gz", hash = "sha256:bcf972416af5d8b5e671078c2ec20296dbc792e85e68acd685730a0a016afd2a"}, + {file = "alibabacloud_tea-0.3.10-py3-none-any.whl", hash = "sha256:9136f302a3baea8a1528f500bf5d47c3727b827a09b5c14b283ca53578e30082"}, ] [package.dependencies] @@ -321,17 +307,17 @@ alibabacloud-tea = ">=0.0.1" [[package]] name = "alibabacloud-tea-openapi" -version = "0.3.11" +version = "0.3.12" description = "Alibaba Cloud openapi SDK Library for Python" optional = false python-versions = ">=3.6" files = [ - {file = "alibabacloud_tea_openapi-0.3.11.tar.gz", hash = "sha256:3f5cace1b1aeb8a64587574097403cfd066b86ee4c3c9abde587f9abfcad38de"}, + {file = "alibabacloud_tea_openapi-0.3.12.tar.gz", hash = "sha256:2e14809f357438e62c1ef4976a7655110dd54a75bbfa7d905fa3798355cfd974"}, ] [package.dependencies] -alibabacloud_credentials = ">=0.3.1,<1.0.0" -alibabacloud_gateway_spi = ">=0.0.1,<1.0.0" +alibabacloud_credentials = ">=0.3.5,<1.0.0" +alibabacloud_gateway_spi = ">=0.0.2,<1.0.0" alibabacloud_openapi_util = ">=0.2.1,<1.0.0" alibabacloud_tea_util = ">=0.3.13,<1.0.0" alibabacloud_tea_xml = ">=0.0.2,<1.0.0" @@ -429,39 +415,15 @@ files = [ {file = "annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89"}, ] -[[package]] -name = "anthropic" -version = "0.23.1" -description = "The official Python library for the anthropic API" -optional = false -python-versions = ">=3.7" -files = [ - {file = "anthropic-0.23.1-py3-none-any.whl", hash = "sha256:6dc5779dae83a5834864f4a4af0166c972b70f4cb8fd2765e1558282cc6d6242"}, - {file = "anthropic-0.23.1.tar.gz", hash = "sha256:9325103702cbc96bb09d1b58c36bde75c726f6a01029fb4d85f41ebba07e9066"}, -] - -[package.dependencies] -anyio = ">=3.5.0,<5" -distro = ">=1.7.0,<2" -httpx = ">=0.23.0,<1" -pydantic = ">=1.9.0,<3" -sniffio = "*" -tokenizers = ">=0.13.0" -typing-extensions = ">=4.7,<5" - -[package.extras] -bedrock = ["boto3 (>=1.28.57)", "botocore (>=1.31.57)"] -vertex = ["google-auth (>=2,<3)"] - [[package]] name = "anyio" -version = "4.4.0" +version = "4.6.0" description = "High level compatibility layer for multiple asynchronous event loop implementations" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" files = [ - {file = "anyio-4.4.0-py3-none-any.whl", hash = "sha256:c1b2d8f46a8a812513012e1107cb0e68c17159a7a594208005a57dc776e1bdc7"}, - {file = "anyio-4.4.0.tar.gz", hash = "sha256:5aadc6a1bbb7cdb0bede386cac5e2940f5e2ff3aa20277e991cf028e0585ce94"}, + {file = "anyio-4.6.0-py3-none-any.whl", hash = "sha256:c7d2e9d63e31599eeb636c8c5c03a7e108d73b345f064f1c19fdc87b79036a9a"}, + {file = "anyio-4.6.0.tar.gz", hash = "sha256:137b4559cbb034c477165047febb6ff83f390fc3b20bf181c1fc0a728cb8beeb"}, ] [package.dependencies] @@ -471,24 +433,9 @@ sniffio = ">=1.1" typing-extensions = {version = ">=4.1", markers = "python_version < \"3.11\""} [package.extras] -doc = ["Sphinx (>=7)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx-rtd-theme"] -test = ["anyio[trio]", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "uvloop (>=0.17)"] -trio = ["trio (>=0.23)"] - -[[package]] -name = "arxiv" -version = "2.1.0" -description = "Python wrapper for the arXiv API: https://arxiv.org/help/api/" -optional = false -python-versions = ">=3.7" -files = [ - {file = "arxiv-2.1.0-py3-none-any.whl", hash = "sha256:d634a0a59c9f05baf524eaa65563bb0a4532d2b4727a1162a1a9ba7e1e6e48cc"}, - {file = "arxiv-2.1.0.tar.gz", hash = "sha256:eb4b1d5ab9dfd66027c344bb324c20be21d56fe15f6ce216ed5b209df747dea8"}, -] - -[package.dependencies] -feedparser = "6.0.10" -requests = "2.31.0" +doc = ["Sphinx (>=7.4,<8.0)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx-rtd-theme"] +test = ["anyio[trio]", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "uvloop (>=0.21.0b1)"] +trio = ["trio (>=0.26.1)"] [[package]] name = "asgiref" @@ -551,69 +498,6 @@ files = [ [package.dependencies] cryptography = "*" -[[package]] -name = "azure-ai-inference" -version = "1.0.0b4" -description = "Microsoft Azure Ai Inference Client Library for Python" -optional = false -python-versions = ">=3.8" -files = [ - {file = "azure-ai-inference-1.0.0b4.tar.gz", hash = "sha256:5464404bef337338d4af6eefde3af903400ddb8e5c9e6820f902303542fa0f72"}, - {file = "azure_ai_inference-1.0.0b4-py3-none-any.whl", hash = "sha256:e2c949f91845a8cd96cb9a61ffd432b5b0f4ce236b9be8c29d10f38e0a327412"}, -] - -[package.dependencies] -azure-core = ">=1.30.0" -isodate = ">=0.6.1" -typing-extensions = ">=4.6.0" - -[[package]] -name = "azure-ai-ml" -version = "1.20.0" -description = "Microsoft Azure Machine Learning Client Library for Python" -optional = false -python-versions = ">=3.7" -files = [ - {file = "azure-ai-ml-1.20.0.tar.gz", hash = "sha256:6432a0da1b7250cb0db5a1c33202e0419935e19ea32d4c2b3220705f8f1d4101"}, - {file = "azure_ai_ml-1.20.0-py3-none-any.whl", hash = "sha256:c7eb3c5ccf82a6ee94403c3e5060763decd38cf03ff2620a4a6577526e605104"}, -] - -[package.dependencies] -azure-common = ">=1.1" -azure-core = ">=1.23.0" -azure-mgmt-core = ">=1.3.0" -azure-storage-blob = ">=12.10.0" -azure-storage-file-datalake = ">=12.2.0" -azure-storage-file-share = "*" -colorama = "*" -isodate = "*" -jsonschema = ">=4.0.0" -marshmallow = ">=3.5" -msrest = ">=0.6.18" -opencensus-ext-azure = "*" -opencensus-ext-logging = "*" -pydash = ">=6.0.0" -pyjwt = "*" -pyyaml = ">=5.1.0" -strictyaml = "*" -tqdm = "*" -typing-extensions = "*" - -[package.extras] -designer = ["mldesigner"] -mount = ["azureml-dataprep-rslex (>=2.22.0)"] - -[[package]] -name = "azure-common" -version = "1.1.28" -description = "Microsoft Azure Client Library for Python (Common)" -optional = false -python-versions = "*" -files = [ - {file = "azure-common-1.1.28.zip", hash = "sha256:4ac0cd3214e36b6a1b6a442686722a5d8cc449603aa833f3f0f40bda836704a3"}, - {file = "azure_common-1.1.28-py2.py3-none-any.whl", hash = "sha256:5c12d3dcf4ec20599ca6b0d3e09e86e146353d443e7fcc050c9a19c1f9df20ad"}, -] - [[package]] name = "azure-core" version = "1.31.0" @@ -650,20 +534,6 @@ cryptography = ">=2.5" msal = ">=1.24.0" msal-extensions = ">=0.3.0" -[[package]] -name = "azure-mgmt-core" -version = "1.4.0" -description = "Microsoft Azure Management Core Library for Python" -optional = false -python-versions = ">=3.7" -files = [ - {file = "azure-mgmt-core-1.4.0.zip", hash = "sha256:d195208340094f98e5a6661b781cde6f6a051e79ce317caabd8ff97030a9b3ae"}, - {file = "azure_mgmt_core-1.4.0-py3-none-any.whl", hash = "sha256:81071675f186a585555ef01816f2774d49c1c9024cb76e5720c3c0f6b337bb7d"}, -] - -[package.dependencies] -azure-core = ">=1.26.2,<2.0.0" - [[package]] name = "azure-storage-blob" version = "12.13.0" @@ -680,42 +550,6 @@ azure-core = ">=1.23.1,<2.0.0" cryptography = ">=2.1.4" msrest = ">=0.6.21" -[[package]] -name = "azure-storage-file-datalake" -version = "12.8.0" -description = "Microsoft Azure File DataLake Storage Client Library for Python" -optional = false -python-versions = ">=3.6" -files = [ - {file = "azure-storage-file-datalake-12.8.0.zip", hash = "sha256:12e6306e5efb5ca28e0ccd9fa79a2c61acd589866d6109fe5601b18509da92f4"}, - {file = "azure_storage_file_datalake-12.8.0-py3-none-any.whl", hash = "sha256:b6cf5733fe794bf3c866efbe3ce1941409e35b6b125028ac558b436bf90f2de7"}, -] - -[package.dependencies] -azure-core = ">=1.23.1,<2.0.0" -azure-storage-blob = ">=12.13.0,<13.0.0" -msrest = ">=0.6.21" - -[[package]] -name = "azure-storage-file-share" -version = "12.17.0" -description = "Microsoft Azure Azure File Share Storage Client Library for Python" -optional = false -python-versions = ">=3.8" -files = [ - {file = "azure-storage-file-share-12.17.0.tar.gz", hash = "sha256:f7b2c6cfc1b7cb80097a53b1ed2efa9e545b49a291430d369cdb49fafbc841d6"}, - {file = "azure_storage_file_share-12.17.0-py3-none-any.whl", hash = "sha256:c4652759a9d529bf08881bb53275bf38774bb643746b849d27c47118f9cf923d"}, -] - -[package.dependencies] -azure-core = ">=1.28.0" -cryptography = ">=2.1.4" -isodate = ">=0.6.1" -typing-extensions = ">=4.6.0" - -[package.extras] -aio = ["azure-core[aio] (>=1.28.0)"] - [[package]] name = "backoff" version = "2.2.1" @@ -787,13 +621,13 @@ lxml = ["lxml"] [[package]] name = "billiard" -version = "4.2.0" +version = "4.2.1" description = "Python multiprocessing fork with improvements and bugfixes" optional = false python-versions = ">=3.7" files = [ - {file = "billiard-4.2.0-py3-none-any.whl", hash = "sha256:07aa978b308f334ff8282bd4a746e681b3513db5c9a514cbdd810cbbdc19714d"}, - {file = "billiard-4.2.0.tar.gz", hash = "sha256:9a3c3184cb275aa17a732f93f65b20c525d3d9f253722d26a82194803ade5a2c"}, + {file = "billiard-4.2.1-py3-none-any.whl", hash = "sha256:40b59a4ac8806ba2c2369ea98d876bc6108b051c227baffd928c644d15d8f3cb"}, + {file = "billiard-4.2.1.tar.gz", hash = "sha256:12b641b0c539073fc8d3f5b8b7be998956665c4233c7c1fcd66a7e677c4fb36f"}, ] [[package]] @@ -828,13 +662,13 @@ crt = ["botocore[crt] (>=1.21.0,<2.0a0)"] [[package]] name = "botocore" -version = "1.35.19" +version = "1.35.29" description = "Low-level, data-driven core of boto 3." optional = false python-versions = ">=3.8" files = [ - {file = "botocore-1.35.19-py3-none-any.whl", hash = "sha256:c83f7f0cacfe7c19b109b363ebfa8736e570d24922f16ed371681f58ebab44a9"}, - {file = "botocore-1.35.19.tar.gz", hash = "sha256:42d6d8db7250cbd7899f786f9861e02cab17dc238f64d6acb976098ed9809625"}, + {file = "botocore-1.35.29-py3-none-any.whl", hash = "sha256:f8e3ae0d84214eff3fb69cb4dc51cea6c43d3bde82027a94d00c52b941d6c3d5"}, + {file = "botocore-1.35.29.tar.gz", hash = "sha256:4ed28ab03675bb008a290c452c5ddd7aaa5d4e3fa1912aadbdf93057ee84362b"}, ] [package.dependencies] @@ -1597,128 +1431,6 @@ pandas = ["pandas"] sqlalchemy = ["sqlalchemy (>1.3.21,<2.0)"] tzlocal = ["tzlocal (>=4.0)"] -[[package]] -name = "clickhouse-driver" -version = "0.2.9" -description = "Python driver with native interface for ClickHouse" -optional = false -python-versions = "<4,>=3.7" -files = [ - {file = "clickhouse-driver-0.2.9.tar.gz", hash = "sha256:050ea4870ead993910b39e7fae965dc1c347b2e8191dcd977cd4b385f9e19f87"}, - {file = "clickhouse_driver-0.2.9-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6ce04e9d0d0f39561f312d1ac1a8147bc9206e4267e1a23e20e0423ebac95534"}, - {file = "clickhouse_driver-0.2.9-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:7ae5c8931bf290b9d85582e7955b9aad7f19ff9954e48caa4f9a180ea4d01078"}, - {file = "clickhouse_driver-0.2.9-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e51792f3bd12c32cb15a907f12de3c9d264843f0bb33dce400e3966c9f09a3f"}, - {file = "clickhouse_driver-0.2.9-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:42fc546c31e4a04c97b749769335a679c9044dc693fa7a93e38c97fd6727173d"}, - {file = "clickhouse_driver-0.2.9-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6a383a403d185185c64e49edd6a19b2ec973c5adcb8ebff7ed2fc539a2cc65a5"}, - {file = "clickhouse_driver-0.2.9-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f05321a97e816afc75b3e4f9eda989848fecf14ecf1a91d0f22c04258123d1f7"}, - {file = "clickhouse_driver-0.2.9-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:be47e793846aac28442b6b1c6554e0731b848a5a7759a54aa2489997354efe4a"}, - {file = "clickhouse_driver-0.2.9-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:780e42a215d1ae2f6d695d74dd6f087781fb2fa51c508b58f79e68c24c5364e0"}, - {file = "clickhouse_driver-0.2.9-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:9e28f1fe850675e173db586e9f1ac790e8f7edd507a4227cd54cd7445f8e75b6"}, - {file = "clickhouse_driver-0.2.9-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:125aae7f1308d3083dadbb3c78f828ae492e060f13e4007a0cf53a8169ed7b39"}, - {file = "clickhouse_driver-0.2.9-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:2f3c4fbb61e75c62a1ab93a1070d362de4cb5682f82833b2c12deccb3bae888d"}, - {file = "clickhouse_driver-0.2.9-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:0dc03196a84e32d23b88b665be69afae98f57426f5fdf203e16715b756757961"}, - {file = "clickhouse_driver-0.2.9-cp310-cp310-win32.whl", hash = "sha256:25695d78a1d7ad6e221e800612eac08559f6182bf6dee0a220d08de7b612d993"}, - {file = "clickhouse_driver-0.2.9-cp310-cp310-win_amd64.whl", hash = "sha256:367acac95398d721a0a2a6cf87e93638c5588b79498a9848676ce7f182540a6c"}, - {file = "clickhouse_driver-0.2.9-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5a7353a7a08eee3aa0001d8a5d771cb1f37e2acae1b48178002431f23892121a"}, - {file = "clickhouse_driver-0.2.9-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6af1c6cbc3481205503ab72a34aa76d6519249c904aa3f7a84b31e7b435555be"}, - {file = "clickhouse_driver-0.2.9-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:48033803abd1100bfff6b9a1769d831b672cd3cda5147e0323b956fd1416d38d"}, - {file = "clickhouse_driver-0.2.9-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1f202a58a540c85e47c31dabc8f84b6fe79dca5315c866450a538d58d6fa0571"}, - {file = "clickhouse_driver-0.2.9-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e4df50fd84bfa4aa1eb7b52d48136066bfb64fabb7ceb62d4c318b45a296200b"}, - {file = "clickhouse_driver-0.2.9-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:433a650571a0d7766eb6f402e8f5930222997686c2ee01ded22f1d8fd46af9d4"}, - {file = "clickhouse_driver-0.2.9-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:232ee260475611cbf7adb554b81db6b5790b36e634fe2164f4ffcd2ca3e63a71"}, - {file = "clickhouse_driver-0.2.9-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:09049f7e71f15c9c9a03f597f77fc1f7b61ababd155c06c0d9e64d1453d945d7"}, - {file = "clickhouse_driver-0.2.9-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:424153d1d5f5a807f596a48cc88119f9fb3213ca7e38f57b8d15dcc964dd91f7"}, - {file = "clickhouse_driver-0.2.9-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:4f078fd1cf19c4ca63b8d1e0803df665310c8d5b644c5b02bf2465e8d6ef8f55"}, - {file = "clickhouse_driver-0.2.9-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:f138d939e26e767537f891170b69a55a88038919f5c10d8865b67b8777fe4848"}, - {file = "clickhouse_driver-0.2.9-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:9aafabc7e32942f85dcb46f007f447ab69024831575df97cae28c6ed127654d1"}, - {file = "clickhouse_driver-0.2.9-cp311-cp311-win32.whl", hash = "sha256:935e16ebf1a1998d8493979d858821a755503c9b8af572d9c450173d4b88868c"}, - {file = "clickhouse_driver-0.2.9-cp311-cp311-win_amd64.whl", hash = "sha256:306b3102cba278b5dfec6f5f7dc8b78416c403901510475c74913345b56c9e42"}, - {file = "clickhouse_driver-0.2.9-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:fcb2fd00e58650ae206a6d5dbc83117240e622471aa5124733fbf2805eb8bda0"}, - {file = "clickhouse_driver-0.2.9-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b7a3e6b0a1eb218e3d870a94c76daaf65da46dca8f6888ea6542f94905c24d88"}, - {file = "clickhouse_driver-0.2.9-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4a8d8e2888a857d8db3d98765a5ad23ab561241feaef68bbffc5a0bd9c142342"}, - {file = "clickhouse_driver-0.2.9-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:85d50c011467f5ff6772c4059345968b854b72e07a0219030b7c3f68419eb7f7"}, - {file = "clickhouse_driver-0.2.9-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:93b395c1370629ccce8fb3e14cd5be2646d227bd32018c21f753c543e9a7e96b"}, - {file = "clickhouse_driver-0.2.9-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6dbcee870c60d9835e5dce1456ab6b9d807e6669246357f4b321ef747b90fa43"}, - {file = "clickhouse_driver-0.2.9-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fffa5a5f317b1ec92e406a30a008929054cf3164d2324a3c465d0a0330273bf8"}, - {file = "clickhouse_driver-0.2.9-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:476702740a279744badbd177ae1c4a2d089ec128bd676861219d1f92078e4530"}, - {file = "clickhouse_driver-0.2.9-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:5cd6d95fab5ff80e9dc9baedc9a926f62f74072d42d5804388d63b63bec0bb63"}, - {file = "clickhouse_driver-0.2.9-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:05027d32d7cf3e46cb8d04f8c984745ae01bd1bc7b3579f9dadf9b3cca735697"}, - {file = "clickhouse_driver-0.2.9-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:3d11831842250b4c1b26503a6e9c511fc03db096608b7c6af743818c421a3032"}, - {file = "clickhouse_driver-0.2.9-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:81b4b671b785ebb0b8aeabf2432e47072413d81db959eb8cfd8b6ab58c5799c6"}, - {file = "clickhouse_driver-0.2.9-cp312-cp312-win32.whl", hash = "sha256:e893bd4e014877174a59e032b0e99809c95ec61328a0e6bd9352c74a2f6111a8"}, - {file = "clickhouse_driver-0.2.9-cp312-cp312-win_amd64.whl", hash = "sha256:de6624e28eeffd01668803d28ae89e3d4e359b1bff8b60e4933e1cb3c6f86f18"}, - {file = "clickhouse_driver-0.2.9-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:909205324089a9ee59bee7ecbfa94595435118cca310fd62efdf13f225aa2965"}, - {file = "clickhouse_driver-0.2.9-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:03f31d6e47dc2b0f367f598f5629147ed056d7216c1788e25190fcfbfa02e749"}, - {file = "clickhouse_driver-0.2.9-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ed84179914b2b7bb434c2322a6e7fd83daa681c97a050450511b66d917a129bb"}, - {file = "clickhouse_driver-0.2.9-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:67d1bf63efb4ba14ae6c6da99622e4a549e68fc3ee14d859bf611d8e6a61b3fa"}, - {file = "clickhouse_driver-0.2.9-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9eed23ea41dd582d76f7a2ec7e09cbe5e9fec008f11a4799fa35ce44a3ebd283"}, - {file = "clickhouse_driver-0.2.9-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a654291132766efa2703058317749d7c69b69f02d89bac75703eaf7f775e20da"}, - {file = "clickhouse_driver-0.2.9-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:1c26c5ef16d0ef3cabc5bc03e827e01b0a4afb5b4eaf8850b7cf740cee04a1d4"}, - {file = "clickhouse_driver-0.2.9-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:b57e83d7986d3cbda6096974a9510eb53cb33ad9072288c87c820ba5eee3370e"}, - {file = "clickhouse_driver-0.2.9-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:153cc03b36f22cbde55aa6a5bbe99072a025567a54c48b262eb0da15d8cd7c83"}, - {file = "clickhouse_driver-0.2.9-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:83a857d99192936091f495826ae97497cd1873af213b1e069d56369fb182ab8e"}, - {file = "clickhouse_driver-0.2.9-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:bb05a9bb22cbe9ad187ad268f86adf7e60df6083331fe59c01571b7b725212dd"}, - {file = "clickhouse_driver-0.2.9-cp37-cp37m-win32.whl", hash = "sha256:3e282c5c25e32d96ed151e5460d2bf4ecb805ea64449197dd918e84e768016df"}, - {file = "clickhouse_driver-0.2.9-cp37-cp37m-win_amd64.whl", hash = "sha256:c46dccfb04a9afd61a1b0e60bfefceff917f76da2c863f9b36b39248496d5c77"}, - {file = "clickhouse_driver-0.2.9-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:612ca9028c718f362c97f552e63d313cf1a70a616ef8532ddb0effdaf12ebef9"}, - {file = "clickhouse_driver-0.2.9-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:471b884d318e012f68d858476052742048918854f7dfe87d78e819f87a848ffb"}, - {file = "clickhouse_driver-0.2.9-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:58ee63c35e99da887eb035c8d6d9e64fd298a0efc1460395297dd5cc281a6912"}, - {file = "clickhouse_driver-0.2.9-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0819bb63d2c5025a1fb9589f57ef82602687cef11081d6dfa6f2ce44606a1772"}, - {file = "clickhouse_driver-0.2.9-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f6680ee18870bca1fbab1736c8203a965efaec119ab4c37821ad99add248ee08"}, - {file = "clickhouse_driver-0.2.9-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:713c498741b54debd3a10a5529e70b6ed85ca33c3e8629e24ae5cd8160b5a5f2"}, - {file = "clickhouse_driver-0.2.9-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:730837b8f63941065c9c955c44286aef0987fb084ffb3f55bf1e4fe07df62269"}, - {file = "clickhouse_driver-0.2.9-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:9f4e38b2ea09214c8e7848a19391009a18c56a3640e1ba1a606b9e57aeb63404"}, - {file = "clickhouse_driver-0.2.9-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:457f1d6639e0345b717ae603c79bd087a35361ce68c1c308d154b80b841e5e7d"}, - {file = "clickhouse_driver-0.2.9-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:49a55aeb8ea625a87965a96e361bbb1ad67d0931bfb2a575f899c1064e70c2da"}, - {file = "clickhouse_driver-0.2.9-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:9230058d8c9b1a04079afae4650fb67745f0f1c39db335728f64d48bd2c19246"}, - {file = "clickhouse_driver-0.2.9-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8798258bd556542dd9c6b8ebe62f9c5110c9dcdf97c57fb077e7b8b6d6da0826"}, - {file = "clickhouse_driver-0.2.9-cp38-cp38-win32.whl", hash = "sha256:ce8e3f4be46bcc63555863f70ab0035202b082b37e6f16876ef50e7bc4b47056"}, - {file = "clickhouse_driver-0.2.9-cp38-cp38-win_amd64.whl", hash = "sha256:2d982959ff628255808d895a67493f2dab0c3a9bfc65eeda0f00c8ae9962a1b3"}, - {file = "clickhouse_driver-0.2.9-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:a46b227fab4420566ed24ee70d90076226d16fcf09c6ad4d428717efcf536446"}, - {file = "clickhouse_driver-0.2.9-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:7eaa2ce5ea08cf5fddebb8c274c450e102f329f9e6966b6cd85aa671c48e5552"}, - {file = "clickhouse_driver-0.2.9-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f97f0083194d6e23b5ef6156ed0d5388c37847b298118199d7937ba26412a9e2"}, - {file = "clickhouse_driver-0.2.9-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a6cab5cdbb0f8ee51d879d977b78f07068b585225ac656f3c081896c362e8f83"}, - {file = "clickhouse_driver-0.2.9-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cdb1b011a53ee71539e9dc655f268b111bac484db300da92829ed59e910a8fd0"}, - {file = "clickhouse_driver-0.2.9-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7bf51bb761b281d20910b4b689c699ef98027845467daa5bb5dfdb53bd6ee404"}, - {file = "clickhouse_driver-0.2.9-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b8ea462e3cebb121ff55002e9c8a9a0a3fd9b5bbbf688b4960f0a83c0172fb31"}, - {file = "clickhouse_driver-0.2.9-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:70bee21c245226ad0d637bf470472e2d487b86911b6d673a862127b934336ff4"}, - {file = "clickhouse_driver-0.2.9-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:253a3c223b944d691bf0abbd599f592ea3b36f0a71d2526833b1718f37eca5c2"}, - {file = "clickhouse_driver-0.2.9-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:a6549b53fc5c403dc556cb39b2ae94d73f9b113daa00438a660bb1dd5380ae4d"}, - {file = "clickhouse_driver-0.2.9-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:1c685cd4abe61af1c26279ff04b9f567eb4d6c1ec7fb265af7481b1f153043aa"}, - {file = "clickhouse_driver-0.2.9-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:7e25144219577491929d032a6c3ddd63c6cd7fa764af829a5637f798190d9b26"}, - {file = "clickhouse_driver-0.2.9-cp39-cp39-win32.whl", hash = "sha256:0b9925610d25405a8e6d83ff4f54fc2456a121adb0155999972f5edd6ba3efc8"}, - {file = "clickhouse_driver-0.2.9-cp39-cp39-win_amd64.whl", hash = "sha256:b243de483cfa02716053b0148d73558f4694f3c27b97fc1eaa97d7079563a14d"}, - {file = "clickhouse_driver-0.2.9-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:45a3d5b1d06750fd6a18c29b871494a2635670099ec7693e756a5885a4a70dbf"}, - {file = "clickhouse_driver-0.2.9-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8415ffebd6ca9eef3024763abc450f8659f1716d015bd563c537d01c7fbc3569"}, - {file = "clickhouse_driver-0.2.9-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ace48db993aa4bd31c42de0fa8d38c94ad47405916d6b61f7a7168a48fb52ac1"}, - {file = "clickhouse_driver-0.2.9-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b07123334fe143bfe6fa4e3d4b732d647d5fd2cfb9ec7f2f76104b46fe9d20c6"}, - {file = "clickhouse_driver-0.2.9-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:e2af3efa73d296420ce6362789f5b1febf75d4aa159a479393f01549115509d5"}, - {file = "clickhouse_driver-0.2.9-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:baf57eede88d07a1eb04352d26fc58a4d97991ca3d8840f7c5d48691dec9f251"}, - {file = "clickhouse_driver-0.2.9-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:275d0ccdab9c3571bdb3e9acfab4497930aa584ff2766b035bb2f854deaf8b82"}, - {file = "clickhouse_driver-0.2.9-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:293da77bfcac3168fb35b27c242f97c1a05502435c0686ecbb8e2e4abcb3de26"}, - {file = "clickhouse_driver-0.2.9-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8d6c2e5830705e4eeef33070ca4d5a24dfa221f28f2f540e5e6842c26e70b10b"}, - {file = "clickhouse_driver-0.2.9-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:11934bd78d97dd7e1a23a6222b5edd1e1b4d34e1ead5c846dc2b5c56fdc35ff5"}, - {file = "clickhouse_driver-0.2.9-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:b802b6f0fbdcc3ab81b87f09b694dde91ab049f44d1d2c08c3dc8ea9a5950cfa"}, - {file = "clickhouse_driver-0.2.9-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7af871c5315eb829ecf4533c790461ea8f73b3bfd5f533b0467e479fdf6ddcfd"}, - {file = "clickhouse_driver-0.2.9-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9d577dd4867b9e26cf60590e1f500990c8701a6e3cfbb9e644f4d0c0fb607028"}, - {file = "clickhouse_driver-0.2.9-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2ed3dea2d1eca85fef5b8564ddd76dedb15a610c77d55d555b49d9f7c896b64b"}, - {file = "clickhouse_driver-0.2.9-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:91ec96f2c48e5bdeac9eea43a9bc9cc19acb2d2c59df0a13d5520dfc32457605"}, - {file = "clickhouse_driver-0.2.9-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:7667ab423452754f36ba8fb41e006a46baace9c94e2aca2a745689b9f2753dfb"}, - {file = "clickhouse_driver-0.2.9-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:653583b1f3b088d106f180d6f02c90917ecd669ec956b62903a05df4a7f44863"}, - {file = "clickhouse_driver-0.2.9-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7ef3dd0cbdf2f0171caab90389af0ede068ec802bf46c6a77f14e6edc86671bc"}, - {file = "clickhouse_driver-0.2.9-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:11b1833ee8ff8d5df39a34a895e060b57bd81e05ea68822bc60476daff4ce1c8"}, - {file = "clickhouse_driver-0.2.9-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:8a3195639e6393b9d4aafe736036881ff86b6be5855d4bf7d9f5c31637181ec3"}, -] - -[package.dependencies] -pytz = "*" -tzlocal = "*" - -[package.extras] -lz4 = ["clickhouse-cityhash (>=1.0.2.1)", "lz4", "lz4 (<=3.0.1)"] -numpy = ["numpy (>=1.12.0)", "pandas (>=0.24.0)"] -zstd = ["clickhouse-cityhash (>=1.0.2.1)", "zstd"] - [[package]] name = "cloudpickle" version = "2.2.1" @@ -1730,42 +1442,6 @@ files = [ {file = "cloudpickle-2.2.1.tar.gz", hash = "sha256:d89684b8de9e34a2a43b3460fbca07d09d6e25ce858df4d5a44240403b6178f5"}, ] -[[package]] -name = "cloudscraper" -version = "1.2.71" -description = "A Python module to bypass Cloudflare's anti-bot page." -optional = false -python-versions = "*" -files = [ - {file = "cloudscraper-1.2.71-py2.py3-none-any.whl", hash = "sha256:76f50ca529ed2279e220837befdec892626f9511708e200d48d5bb76ded679b0"}, - {file = "cloudscraper-1.2.71.tar.gz", hash = "sha256:429c6e8aa6916d5bad5c8a5eac50f3ea53c9ac22616f6cb21b18dcc71517d0d3"}, -] - -[package.dependencies] -pyparsing = ">=2.4.7" -requests = ">=2.9.2" -requests-toolbelt = ">=0.9.1" - -[[package]] -name = "cohere" -version = "5.2.6" -description = "" -optional = false -python-versions = "<4.0,>=3.8" -files = [ - {file = "cohere-5.2.6-py3-none-any.whl", hash = "sha256:256b4ed00f47eb315401d7f28834655714f098382908e7d0ad5c98225aa6a57d"}, - {file = "cohere-5.2.6.tar.gz", hash = "sha256:15d13682706fbafc8cf700e195f628389a643eb7ebd6d7c5e9d6e1ebd3f942fb"}, -] - -[package.dependencies] -fastavro = ">=1.9.4,<2.0.0" -httpx = ">=0.21.2" -pydantic = ">=1.9.2" -requests = ">=2.0.0,<3.0.0" -tokenizers = ">=0.15.2,<0.16.0" -types-requests = ">=2.0.0,<3.0.0" -typing_extensions = ">=4.0.0" - [[package]] name = "colorama" version = "0.4.6" @@ -1794,90 +1470,6 @@ humanfriendly = ">=9.1" [package.extras] cron = ["capturer (>=2.4)"] -[[package]] -name = "contourpy" -version = "1.3.0" -description = "Python library for calculating contours of 2D quadrilateral grids" -optional = false -python-versions = ">=3.9" -files = [ - {file = "contourpy-1.3.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:880ea32e5c774634f9fcd46504bf9f080a41ad855f4fef54f5380f5133d343c7"}, - {file = "contourpy-1.3.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:76c905ef940a4474a6289c71d53122a4f77766eef23c03cd57016ce19d0f7b42"}, - {file = "contourpy-1.3.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:92f8557cbb07415a4d6fa191f20fd9d2d9eb9c0b61d1b2f52a8926e43c6e9af7"}, - {file = "contourpy-1.3.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:36f965570cff02b874773c49bfe85562b47030805d7d8360748f3eca570f4cab"}, - {file = "contourpy-1.3.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cacd81e2d4b6f89c9f8a5b69b86490152ff39afc58a95af002a398273e5ce589"}, - {file = "contourpy-1.3.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:69375194457ad0fad3a839b9e29aa0b0ed53bb54db1bfb6c3ae43d111c31ce41"}, - {file = "contourpy-1.3.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:7a52040312b1a858b5e31ef28c2e865376a386c60c0e248370bbea2d3f3b760d"}, - {file = "contourpy-1.3.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:3faeb2998e4fcb256542e8a926d08da08977f7f5e62cf733f3c211c2a5586223"}, - {file = "contourpy-1.3.0-cp310-cp310-win32.whl", hash = "sha256:36e0cff201bcb17a0a8ecc7f454fe078437fa6bda730e695a92f2d9932bd507f"}, - {file = "contourpy-1.3.0-cp310-cp310-win_amd64.whl", hash = "sha256:87ddffef1dbe5e669b5c2440b643d3fdd8622a348fe1983fad7a0f0ccb1cd67b"}, - {file = "contourpy-1.3.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:0fa4c02abe6c446ba70d96ece336e621efa4aecae43eaa9b030ae5fb92b309ad"}, - {file = "contourpy-1.3.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:834e0cfe17ba12f79963861e0f908556b2cedd52e1f75e6578801febcc6a9f49"}, - {file = "contourpy-1.3.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dbc4c3217eee163fa3984fd1567632b48d6dfd29216da3ded3d7b844a8014a66"}, - {file = "contourpy-1.3.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4865cd1d419e0c7a7bf6de1777b185eebdc51470800a9f42b9e9decf17762081"}, - {file = "contourpy-1.3.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:303c252947ab4b14c08afeb52375b26781ccd6a5ccd81abcdfc1fafd14cf93c1"}, - {file = "contourpy-1.3.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:637f674226be46f6ba372fd29d9523dd977a291f66ab2a74fbeb5530bb3f445d"}, - {file = "contourpy-1.3.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:76a896b2f195b57db25d6b44e7e03f221d32fe318d03ede41f8b4d9ba1bff53c"}, - {file = "contourpy-1.3.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:e1fd23e9d01591bab45546c089ae89d926917a66dceb3abcf01f6105d927e2cb"}, - {file = "contourpy-1.3.0-cp311-cp311-win32.whl", hash = "sha256:d402880b84df3bec6eab53cd0cf802cae6a2ef9537e70cf75e91618a3801c20c"}, - {file = "contourpy-1.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:6cb6cc968059db9c62cb35fbf70248f40994dfcd7aa10444bbf8b3faeb7c2d67"}, - {file = "contourpy-1.3.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:570ef7cf892f0afbe5b2ee410c507ce12e15a5fa91017a0009f79f7d93a1268f"}, - {file = "contourpy-1.3.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:da84c537cb8b97d153e9fb208c221c45605f73147bd4cadd23bdae915042aad6"}, - {file = "contourpy-1.3.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0be4d8425bfa755e0fd76ee1e019636ccc7c29f77a7c86b4328a9eb6a26d0639"}, - {file = "contourpy-1.3.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9c0da700bf58f6e0b65312d0a5e695179a71d0163957fa381bb3c1f72972537c"}, - {file = "contourpy-1.3.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:eb8b141bb00fa977d9122636b16aa67d37fd40a3d8b52dd837e536d64b9a4d06"}, - {file = "contourpy-1.3.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3634b5385c6716c258d0419c46d05c8aa7dc8cb70326c9a4fb66b69ad2b52e09"}, - {file = "contourpy-1.3.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:0dce35502151b6bd35027ac39ba6e5a44be13a68f55735c3612c568cac3805fd"}, - {file = "contourpy-1.3.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:aea348f053c645100612b333adc5983d87be69acdc6d77d3169c090d3b01dc35"}, - {file = "contourpy-1.3.0-cp312-cp312-win32.whl", hash = "sha256:90f73a5116ad1ba7174341ef3ea5c3150ddf20b024b98fb0c3b29034752c8aeb"}, - {file = "contourpy-1.3.0-cp312-cp312-win_amd64.whl", hash = "sha256:b11b39aea6be6764f84360fce6c82211a9db32a7c7de8fa6dd5397cf1d079c3b"}, - {file = "contourpy-1.3.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:3e1c7fa44aaae40a2247e2e8e0627f4bea3dd257014764aa644f319a5f8600e3"}, - {file = "contourpy-1.3.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:364174c2a76057feef647c802652f00953b575723062560498dc7930fc9b1cb7"}, - {file = "contourpy-1.3.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:32b238b3b3b649e09ce9aaf51f0c261d38644bdfa35cbaf7b263457850957a84"}, - {file = "contourpy-1.3.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d51fca85f9f7ad0b65b4b9fe800406d0d77017d7270d31ec3fb1cc07358fdea0"}, - {file = "contourpy-1.3.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:732896af21716b29ab3e988d4ce14bc5133733b85956316fb0c56355f398099b"}, - {file = "contourpy-1.3.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d73f659398a0904e125280836ae6f88ba9b178b2fed6884f3b1f95b989d2c8da"}, - {file = "contourpy-1.3.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:c6c7c2408b7048082932cf4e641fa3b8ca848259212f51c8c59c45aa7ac18f14"}, - {file = "contourpy-1.3.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:f317576606de89da6b7e0861cf6061f6146ead3528acabff9236458a6ba467f8"}, - {file = "contourpy-1.3.0-cp313-cp313-win32.whl", hash = "sha256:31cd3a85dbdf1fc002280c65caa7e2b5f65e4a973fcdf70dd2fdcb9868069294"}, - {file = "contourpy-1.3.0-cp313-cp313-win_amd64.whl", hash = "sha256:4553c421929ec95fb07b3aaca0fae668b2eb5a5203d1217ca7c34c063c53d087"}, - {file = "contourpy-1.3.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:345af746d7766821d05d72cb8f3845dfd08dd137101a2cb9b24de277d716def8"}, - {file = "contourpy-1.3.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:3bb3808858a9dc68f6f03d319acd5f1b8a337e6cdda197f02f4b8ff67ad2057b"}, - {file = "contourpy-1.3.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:420d39daa61aab1221567b42eecb01112908b2cab7f1b4106a52caaec8d36973"}, - {file = "contourpy-1.3.0-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4d63ee447261e963af02642ffcb864e5a2ee4cbfd78080657a9880b8b1868e18"}, - {file = "contourpy-1.3.0-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:167d6c890815e1dac9536dca00828b445d5d0df4d6a8c6adb4a7ec3166812fa8"}, - {file = "contourpy-1.3.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:710a26b3dc80c0e4febf04555de66f5fd17e9cf7170a7b08000601a10570bda6"}, - {file = "contourpy-1.3.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:75ee7cb1a14c617f34a51d11fa7524173e56551646828353c4af859c56b766e2"}, - {file = "contourpy-1.3.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:33c92cdae89ec5135d036e7218e69b0bb2851206077251f04a6c4e0e21f03927"}, - {file = "contourpy-1.3.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:a11077e395f67ffc2c44ec2418cfebed032cd6da3022a94fc227b6faf8e2acb8"}, - {file = "contourpy-1.3.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e8134301d7e204c88ed7ab50028ba06c683000040ede1d617298611f9dc6240c"}, - {file = "contourpy-1.3.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e12968fdfd5bb45ffdf6192a590bd8ddd3ba9e58360b29683c6bb71a7b41edca"}, - {file = "contourpy-1.3.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fd2a0fc506eccaaa7595b7e1418951f213cf8255be2600f1ea1b61e46a60c55f"}, - {file = "contourpy-1.3.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4cfb5c62ce023dfc410d6059c936dcf96442ba40814aefbfa575425a3a7f19dc"}, - {file = "contourpy-1.3.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:68a32389b06b82c2fdd68276148d7b9275b5f5cf13e5417e4252f6d1a34f72a2"}, - {file = "contourpy-1.3.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:94e848a6b83da10898cbf1311a815f770acc9b6a3f2d646f330d57eb4e87592e"}, - {file = "contourpy-1.3.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:d78ab28a03c854a873787a0a42254a0ccb3cb133c672f645c9f9c8f3ae9d0800"}, - {file = "contourpy-1.3.0-cp39-cp39-win32.whl", hash = "sha256:81cb5ed4952aae6014bc9d0421dec7c5835c9c8c31cdf51910b708f548cf58e5"}, - {file = "contourpy-1.3.0-cp39-cp39-win_amd64.whl", hash = "sha256:14e262f67bd7e6eb6880bc564dcda30b15e351a594657e55b7eec94b6ef72843"}, - {file = "contourpy-1.3.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:fe41b41505a5a33aeaed2a613dccaeaa74e0e3ead6dd6fd3a118fb471644fd6c"}, - {file = "contourpy-1.3.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eca7e17a65f72a5133bdbec9ecf22401c62bcf4821361ef7811faee695799779"}, - {file = "contourpy-1.3.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:1ec4dc6bf570f5b22ed0d7efba0dfa9c5b9e0431aeea7581aa217542d9e809a4"}, - {file = "contourpy-1.3.0-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:00ccd0dbaad6d804ab259820fa7cb0b8036bda0686ef844d24125d8287178ce0"}, - {file = "contourpy-1.3.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8ca947601224119117f7c19c9cdf6b3ab54c5726ef1d906aa4a69dfb6dd58102"}, - {file = "contourpy-1.3.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:c6ec93afeb848a0845a18989da3beca3eec2c0f852322efe21af1931147d12cb"}, - {file = "contourpy-1.3.0.tar.gz", hash = "sha256:7ffa0db17717a8ffb127efd0c95a4362d996b892c2904db72428d5b52e1938a4"}, -] - -[package.dependencies] -numpy = ">=1.23" - -[package.extras] -bokeh = ["bokeh", "selenium"] -docs = ["furo", "sphinx (>=7.2)", "sphinx-copybutton"] -mypy = ["contourpy[bokeh,docs]", "docutils-stubs", "mypy (==1.11.1)", "types-Pillow"] -test = ["Pillow", "contourpy[test-no-images]", "matplotlib"] -test-no-images = ["pytest", "pytest-cov", "pytest-rerunfailures", "pytest-xdist", "wurlitzer"] - [[package]] name = "cos-python-sdk-v5" version = "1.9.30" @@ -2031,77 +1623,6 @@ ssh = ["bcrypt (>=3.1.5)"] test = ["certifi", "pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-xdist"] test-randomorder = ["pytest-randomly"] -[[package]] -name = "cssselect" -version = "1.2.0" -description = "cssselect parses CSS3 Selectors and translates them to XPath 1.0" -optional = false -python-versions = ">=3.7" -files = [ - {file = "cssselect-1.2.0-py2.py3-none-any.whl", hash = "sha256:da1885f0c10b60c03ed5eccbb6b68d6eff248d91976fcde348f395d54c9fd35e"}, - {file = "cssselect-1.2.0.tar.gz", hash = "sha256:666b19839cfaddb9ce9d36bfe4c969132c647b92fc9088c4e23f786b30f1b3dc"}, -] - -[[package]] -name = "cycler" -version = "0.12.1" -description = "Composable style cycles" -optional = false -python-versions = ">=3.8" -files = [ - {file = "cycler-0.12.1-py3-none-any.whl", hash = "sha256:85cef7cff222d8644161529808465972e51340599459b8ac3ccbac5a854e0d30"}, - {file = "cycler-0.12.1.tar.gz", hash = "sha256:88bb128f02ba341da8ef447245a9e138fae777f6a23943da4540077d3601eb1c"}, -] - -[package.extras] -docs = ["ipython", "matplotlib", "numpydoc", "sphinx"] -tests = ["pytest", "pytest-cov", "pytest-xdist"] - -[[package]] -name = "dashscope" -version = "1.17.1" -description = "dashscope client sdk library" -optional = false -python-versions = ">=3.8.0" -files = [ - {file = "dashscope-1.17.1-py3-none-any.whl", hash = "sha256:1e07e7ff4544684797f86ede646766b5ab8f5bd6eb43d2d01f0f757a2941efe1"}, -] - -[package.dependencies] -aiohttp = "*" -requests = "*" -tiktoken = {version = "*", optional = true, markers = "extra == \"tokenizer\""} - -[package.extras] -tokenizer = ["tiktoken"] - -[[package]] -name = "dataclass-wizard" -version = "0.22.3" -description = "Marshal dataclasses to/from JSON. Use field properties with initial values. Construct a dataclass schema with JSON input." -optional = false -python-versions = "*" -files = [ - {file = "dataclass-wizard-0.22.3.tar.gz", hash = "sha256:4c46591782265058f1148cfd1f54a3a91221e63986fdd04c9d59f4ced61f4424"}, - {file = "dataclass_wizard-0.22.3-py2.py3-none-any.whl", hash = "sha256:63751203e54b9b9349212cc185331da73c1adc99c51312575eb73bb5c00c1962"}, -] - -[package.extras] -dev = ["Sphinx (==5.3.0)", "bump2version (==1.0.1)", "coverage (>=6.2)", "dataclass-factory (==2.12)", "dataclasses-json (==0.5.6)", "flake8 (>=3)", "jsons (==1.6.1)", "pip (>=21.3.1)", "pytest (==7.0.1)", "pytest-cov (==3.0.0)", "pytest-mock (>=3.6.1)", "pytimeparse (==1.1.8)", "sphinx-issues (==3.0.1)", "sphinx-issues (==4.0.0)", "tox (==3.24.5)", "twine (==3.8.0)", "watchdog[watchmedo] (==2.1.6)", "wheel (==0.37.1)", "wheel (==0.42.0)"] -timedelta = ["pytimeparse (>=1.1.7)"] -yaml = ["PyYAML (>=5.3)"] - -[[package]] -name = "dataclasses" -version = "0.6" -description = "A backport of the dataclasses module for Python 3.6" -optional = false -python-versions = "*" -files = [ - {file = "dataclasses-0.6-py3-none-any.whl", hash = "sha256:454a69d788c7fda44efd71e259be79577822f5e3f53f029a22d08004e951dc9f"}, - {file = "dataclasses-0.6.tar.gz", hash = "sha256:6988bd2b895eef432d562370bb707d540f32f7360ab13da45340101bc2307d84"}, -] - [[package]] name = "dataclasses-json" version = "0.6.7" @@ -2117,23 +1638,6 @@ files = [ marshmallow = ">=3.18.0,<4.0.0" typing-inspect = ">=0.4.0,<1" -[[package]] -name = "db-dtypes" -version = "1.3.0" -description = "Pandas Data Types for SQL systems (BigQuery, Spanner)" -optional = false -python-versions = ">=3.7" -files = [ - {file = "db_dtypes-1.3.0-py2.py3-none-any.whl", hash = "sha256:7e65c59f849ccbe6f7bc4d0253edcc212a7907662906921caba3e4aadd0bc277"}, - {file = "db_dtypes-1.3.0.tar.gz", hash = "sha256:7bcbc8858b07474dc85b77bb2f3ae488978d1336f5ea73b58c39d9118bc3e91b"}, -] - -[package.dependencies] -numpy = ">=1.16.6" -packaging = ">=17.0" -pandas = ">=0.24.2" -pyarrow = ">=3.0.0" - [[package]] name = "defusedxml" version = "0.7.1" @@ -2164,13 +1668,13 @@ dev = ["PyTest", "PyTest-Cov", "bump2version (<1)", "sphinx (<2)", "tox"] [[package]] name = "dill" -version = "0.3.8" +version = "0.3.9" description = "serialize all of Python" optional = false python-versions = ">=3.8" files = [ - {file = "dill-0.3.8-py3-none-any.whl", hash = "sha256:c36ca9ffb54365bdd2f8eb3eff7d2a21237f8452b57ace88b1ac615b7e815bd7"}, - {file = "dill-0.3.8.tar.gz", hash = "sha256:3ebe3c479ad625c4553aca177444d89b486b1d84982eeacded644afc0cf797ca"}, + {file = "dill-0.3.9-py3-none-any.whl", hash = "sha256:468dff3b89520b474c0397703366b7b95eebe6303f108adf9b19da1f702be87a"}, + {file = "dill-0.3.9.tar.gz", hash = "sha256:81aa267dddf68cbfe8029c42ca9ec6a4ab3b22371d1c450abc54422577b4512c"}, ] [package.extras] @@ -2240,79 +1744,15 @@ ply = ">=3.11,<4.0" typing_extensions = ">=4.0,<5.0" [[package]] -name = "duckdb" -version = "1.1.0" -description = "DuckDB in-process database" +name = "durationpy" +version = "0.7" +description = "Module for converting between datetime.timedelta and Go's Duration strings." optional = false -python-versions = ">=3.7.0" -files = [ - {file = "duckdb-1.1.0-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:5e4cbc408e6e41146dea89b9044dae7356e353db0c96b183e5583ee02bc6ae5d"}, - {file = "duckdb-1.1.0-cp310-cp310-macosx_12_0_universal2.whl", hash = "sha256:6370ae27ec8167ccfbefb94f58ad9fdc7bac142399960549d6d367f233189868"}, - {file = "duckdb-1.1.0-cp310-cp310-macosx_12_0_x86_64.whl", hash = "sha256:4e1c3414f7fd01f4810dc8b335deffc91933a159282d65fef11c1286bc0ded04"}, - {file = "duckdb-1.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c6bc2a58689adf5520303c5f68b065b9f980bd31f1366c541b8c7490abaf55cd"}, - {file = "duckdb-1.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d02be208d2885ca085d4c852b911493b8cdac9d6eae893259da32bd72a437c25"}, - {file = "duckdb-1.1.0-cp310-cp310-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:655df442ceebfc6f3fd6c8766e04b60d44dddedfa90275d794f9fab2d3180879"}, - {file = "duckdb-1.1.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:6e183729bb64be7798ccbfda6283ebf423c869268c25af2b56929e48f763be2f"}, - {file = "duckdb-1.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:61fb838da51e07ceb0222c4406b059b90e10efcc453c19a3650b73c0112138c4"}, - {file = "duckdb-1.1.0-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:7807e2f0d3344668e433f0dc1f54bfaddd410589611393e9a7ed56f8dec9514f"}, - {file = "duckdb-1.1.0-cp311-cp311-macosx_12_0_universal2.whl", hash = "sha256:3da30b7b466f710d52caa1fdc3ef0bf4176ad7f115953cd9f8b0fbf0f723778f"}, - {file = "duckdb-1.1.0-cp311-cp311-macosx_12_0_x86_64.whl", hash = "sha256:b9b6a77ef0183f561b1fc2945fcc762a71570ffd33fea4e3a855d413ed596fe4"}, - {file = "duckdb-1.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:16243e66a9fd0e64ee265f2634d137adc6593f54ddf3ef55cb8a29e1decf6e54"}, - {file = "duckdb-1.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42b910a149e00f40a1766dc74fa309d4255b912a5d2fdcc387287658048650f6"}, - {file = "duckdb-1.1.0-cp311-cp311-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:47849d546dc4238c0f20e95fe53b621aa5b08684e68fff91fd84a7092be91a17"}, - {file = "duckdb-1.1.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:11ec967b67159361ceade34095796a8d19368ea5c30cad988f44896b082b0816"}, - {file = "duckdb-1.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:510b5885ed6c267b9c0e1e7c6138fdffc2dd6f934a5a95b76da85da127213338"}, - {file = "duckdb-1.1.0-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:657bc7ac64d5faf069a782ae73afac51ef30ae2e5d0e09ce6a09d03db84ab35e"}, - {file = "duckdb-1.1.0-cp312-cp312-macosx_12_0_universal2.whl", hash = "sha256:89f3de8cba57d19b41cd3c47dd06d979bd2a2ffead115480e37afbe72b02896d"}, - {file = "duckdb-1.1.0-cp312-cp312-macosx_12_0_x86_64.whl", hash = "sha256:f6486323ab20656d22ffa8f3c6e109dde30d0b327b7c831f22ebcfe747f97fb0"}, - {file = "duckdb-1.1.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:78a4510f82431ee3f14db689fe8727a4a9062c8f2fbb3bcfe3bfad3c1a198004"}, - {file = "duckdb-1.1.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:64bf2a6e23840d662bd2ac09206a9bd4fa657418884d69e5c352d4456dc70b3c"}, - {file = "duckdb-1.1.0-cp312-cp312-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:23fc9aa0af74e3803ed90c8d98280fd5bcac8c940592bf6288e8fd60fb051d00"}, - {file = "duckdb-1.1.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:1f3aea31341ce400640dd522e4399b941f66df17e39884f446638fe958d6117c"}, - {file = "duckdb-1.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:3db4ab31c20de4edaef152930836b38e7662cd71370748fdf2c38ba9cf854dc4"}, - {file = "duckdb-1.1.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e3b6b4fe1edfe35f64f403a9f0ab75258cee35abd964356893ee37424174b7e4"}, - {file = "duckdb-1.1.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aad02f50d5a2020822d1638fc1a9bcf082056f11d2e15ccfc1c1ed4d0f85a3be"}, - {file = "duckdb-1.1.0-cp37-cp37m-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:eb66e9e7391801928ea134dcab12d2e4c97f2ce0391c603a3e480bbb15830bc8"}, - {file = "duckdb-1.1.0-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:069fb7bca459e31edb32a61f0eea95d7a8a766bef7b8318072563abf8e939593"}, - {file = "duckdb-1.1.0-cp37-cp37m-win_amd64.whl", hash = "sha256:e39f9b7b62e64e10d421ff04480290a70129c38067d1a4f600e9212b10542c5a"}, - {file = "duckdb-1.1.0-cp38-cp38-macosx_12_0_arm64.whl", hash = "sha256:55ef98bcc7ba745752607f1b926e8d9b7ce32c42c423bbad10c44820aefe23a7"}, - {file = "duckdb-1.1.0-cp38-cp38-macosx_12_0_universal2.whl", hash = "sha256:e2a08175e43b865c1e9611efd18cacd29ddd69093de442b1ebdf312071df7719"}, - {file = "duckdb-1.1.0-cp38-cp38-macosx_12_0_x86_64.whl", hash = "sha256:0e3644b1f034012d82b9baa12a7ea306fe71dc6623731b28c753c4a617ff9499"}, - {file = "duckdb-1.1.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:211a33c1ddb5cc609f75eb43772b0b03b45d2fa89bec107e4715267ca907806a"}, - {file = "duckdb-1.1.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8e74b6f8a5145abbf7e6c1a2a61f0adbcd493c19b358f524ec9a3cebdf362abb"}, - {file = "duckdb-1.1.0-cp38-cp38-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:58f1633dd2c5af5088ae2d119418e200855d0699d84f2fae9d46d30f404bcead"}, - {file = "duckdb-1.1.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:d18caea926b1e301c29b140418fca697aad728129e269b4f82c2795a184549e1"}, - {file = "duckdb-1.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:cd9fb1408942411ad360f8414bc3fbf0091c396ca903d947a10f2e31324d5cbd"}, - {file = "duckdb-1.1.0-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:bd11bc899cebf5ff936d1276a2dfb7b7db08aba3bcc42924afeafc2163bddb43"}, - {file = "duckdb-1.1.0-cp39-cp39-macosx_12_0_universal2.whl", hash = "sha256:53825a63193c582a78c152ea53de8d145744ddbeea18f452625a82ebc33eb14a"}, - {file = "duckdb-1.1.0-cp39-cp39-macosx_12_0_x86_64.whl", hash = "sha256:29dc18087de47563b3859a6b98bbed96e1c96ce5db829646dc3b16a916997e7d"}, - {file = "duckdb-1.1.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ecb19319883564237a7a03a104dbe7f445e73519bb67108fcab3d19b6b91fe30"}, - {file = "duckdb-1.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aac2fcabe2d5072c252d0b3087365f431de812d8199705089fb073e4d039d19c"}, - {file = "duckdb-1.1.0-cp39-cp39-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d89eaaa5df8a57e7d2bc1f4c46493bb1fee319a00155f2015810ad2ace6570ae"}, - {file = "duckdb-1.1.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:d86a6926313913cd2cc7e08816d3e7f72ba340adf2959279b1a80058be6526d9"}, - {file = "duckdb-1.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:d8333f3e85fa2a0f1c222b752c2bd42ea875235ff88492f7bcbb6867d0f644eb"}, - {file = "duckdb-1.1.0.tar.gz", hash = "sha256:b4d4c12b1f98732151bd31377753e0da1a20f6423016d2d097d2e31953ec7c23"}, -] - -[[package]] -name = "duckduckgo-search" -version = "6.2.12" -description = "Search for words, documents, images, news, maps and text translation using the DuckDuckGo.com search engine." -optional = false -python-versions = ">=3.8" +python-versions = "*" files = [ - {file = "duckduckgo_search-6.2.12-py3-none-any.whl", hash = "sha256:0d379c1f845b632a41553efb13d571788f19ad289229e641a27b5710d92097a6"}, - {file = "duckduckgo_search-6.2.12.tar.gz", hash = "sha256:04f9f1459763668d268344c7a32d943173d0e060dad53a5c2df4b4d3ca9a74cf"}, + {file = "durationpy-0.7.tar.gz", hash = "sha256:8447c43df4f1a0b434e70c15a38d77f5c9bd17284bfc1ff1d430f233d5083732"}, ] -[package.dependencies] -click = ">=8.1.7" -primp = ">=0.6.2" - -[package.extras] -dev = ["mypy (>=1.11.1)", "pytest (>=8.3.1)", "pytest-asyncio (>=0.23.8)", "ruff (>=0.6.1)"] -lxml = ["lxml (>=5.2.2)"] - [[package]] name = "elastic-transport" version = "8.15.0" @@ -2356,18 +1796,15 @@ vectorstore-mmr = ["numpy (>=1)", "simsimd (>=3)"] [[package]] name = "emoji" -version = "2.12.1" +version = "2.13.2" description = "Emoji for Python" optional = false python-versions = ">=3.7" files = [ - {file = "emoji-2.12.1-py3-none-any.whl", hash = "sha256:a00d62173bdadc2510967a381810101624a2f0986145b8da0cffa42e29430235"}, - {file = "emoji-2.12.1.tar.gz", hash = "sha256:4aa0488817691aa58d83764b6c209f8a27c0b3ab3f89d1b8dceca1a62e4973eb"}, + {file = "emoji-2.13.2-py3-none-any.whl", hash = "sha256:ef6f2ee63b245e934c763b1a9a0637713955aa3d9e322432e036bb60559de4d6"}, + {file = "emoji-2.13.2.tar.gz", hash = "sha256:f95d10d96c5f21299ed2c4b32511611ba890b8c07f5f2bf5b04d5d3eee91fd19"}, ] -[package.dependencies] -typing-extensions = ">=4.7.0" - [package.extras] dev = ["coverage", "pytest (>=7.4.4)"] @@ -2432,13 +1869,13 @@ test = ["pytest (>=6)"] [[package]] name = "fastapi" -version = "0.114.2" +version = "0.115.0" description = "FastAPI framework, high performance, easy to learn, fast to code, ready for production" optional = false python-versions = ">=3.8" files = [ - {file = "fastapi-0.114.2-py3-none-any.whl", hash = "sha256:44474a22913057b1acb973ab90f4b671ba5200482e7622816d79105dcece1ac5"}, - {file = "fastapi-0.114.2.tar.gz", hash = "sha256:0adb148b62edb09e8c6eeefa3ea934e8f276dabc038c5a82989ea6346050c3da"}, + {file = "fastapi-0.115.0-py3-none-any.whl", hash = "sha256:17ea427674467486e997206a5ab25760f6b09e069f099b96f5b55a32fb6f1631"}, + {file = "fastapi-0.115.0.tar.gz", hash = "sha256:f93b4ca3529a8ebc6fc3fcf710e5efa8de3df9b41570958abf1d97d843138004"}, ] [package.dependencies] @@ -2450,95 +1887,20 @@ typing-extensions = ">=4.8.0" all = ["email-validator (>=2.0.0)", "fastapi-cli[standard] (>=0.0.5)", "httpx (>=0.23.0)", "itsdangerous (>=1.1.0)", "jinja2 (>=2.11.2)", "orjson (>=3.2.1)", "pydantic-extra-types (>=2.0.0)", "pydantic-settings (>=2.0.0)", "python-multipart (>=0.0.7)", "pyyaml (>=5.3.1)", "ujson (>=4.0.1,!=4.0.2,!=4.1.0,!=4.2.0,!=4.3.0,!=5.0.0,!=5.1.0)", "uvicorn[standard] (>=0.12.0)"] standard = ["email-validator (>=2.0.0)", "fastapi-cli[standard] (>=0.0.5)", "httpx (>=0.23.0)", "jinja2 (>=2.11.2)", "python-multipart (>=0.0.7)", "uvicorn[standard] (>=0.12.0)"] -[[package]] -name = "fastavro" -version = "1.9.7" -description = "Fast read/write of AVRO files" -optional = false -python-versions = ">=3.8" -files = [ - {file = "fastavro-1.9.7-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:cc811fb4f7b5ae95f969cda910241ceacf82e53014c7c7224df6f6e0ca97f52f"}, - {file = "fastavro-1.9.7-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb8749e419a85f251bf1ac87d463311874972554d25d4a0b19f6bdc56036d7cf"}, - {file = "fastavro-1.9.7-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0b2f9bafa167cb4d1c3dd17565cb5bf3d8c0759e42620280d1760f1e778e07fc"}, - {file = "fastavro-1.9.7-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:e87d04b235b29f7774d226b120da2ca4e60b9e6fdf6747daef7f13f218b3517a"}, - {file = "fastavro-1.9.7-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:b525c363e267ed11810aaad8fbdbd1c3bd8837d05f7360977d72a65ab8c6e1fa"}, - {file = "fastavro-1.9.7-cp310-cp310-win_amd64.whl", hash = "sha256:6312fa99deecc319820216b5e1b1bd2d7ebb7d6f221373c74acfddaee64e8e60"}, - {file = "fastavro-1.9.7-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:ec8499dc276c2d2ef0a68c0f1ad11782b2b956a921790a36bf4c18df2b8d4020"}, - {file = "fastavro-1.9.7-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:76d9d96f98052615ab465c63ba8b76ed59baf2e3341b7b169058db104cbe2aa0"}, - {file = "fastavro-1.9.7-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:919f3549e07a8a8645a2146f23905955c35264ac809f6c2ac18142bc5b9b6022"}, - {file = "fastavro-1.9.7-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:9de1fa832a4d9016724cd6facab8034dc90d820b71a5d57c7e9830ffe90f31e4"}, - {file = "fastavro-1.9.7-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:1d09227d1f48f13281bd5ceac958650805aef9a4ef4f95810128c1f9be1df736"}, - {file = "fastavro-1.9.7-cp311-cp311-win_amd64.whl", hash = "sha256:2db993ae6cdc63e25eadf9f93c9e8036f9b097a3e61d19dca42536dcc5c4d8b3"}, - {file = "fastavro-1.9.7-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:4e1289b731214a7315884c74b2ec058b6e84380ce9b18b8af5d387e64b18fc44"}, - {file = "fastavro-1.9.7-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eac69666270a76a3a1d0444f39752061195e79e146271a568777048ffbd91a27"}, - {file = "fastavro-1.9.7-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9be089be8c00f68e343bbc64ca6d9a13e5e5b0ba8aa52bcb231a762484fb270e"}, - {file = "fastavro-1.9.7-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:d576eccfd60a18ffa028259500df67d338b93562c6700e10ef68bbd88e499731"}, - {file = "fastavro-1.9.7-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ee9bf23c157bd7dcc91ea2c700fa3bd924d9ec198bb428ff0b47fa37fe160659"}, - {file = "fastavro-1.9.7-cp312-cp312-win_amd64.whl", hash = "sha256:b6b2ccdc78f6afc18c52e403ee68c00478da12142815c1bd8a00973138a166d0"}, - {file = "fastavro-1.9.7-cp38-cp38-macosx_11_0_universal2.whl", hash = "sha256:7313def3aea3dacface0a8b83f6d66e49a311149aa925c89184a06c1ef99785d"}, - {file = "fastavro-1.9.7-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:536f5644737ad21d18af97d909dba099b9e7118c237be7e4bd087c7abde7e4f0"}, - {file = "fastavro-1.9.7-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2af559f30383b79cf7d020a6b644c42ffaed3595f775fe8f3d7f80b1c43dfdc5"}, - {file = "fastavro-1.9.7-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:edc28ab305e3c424de5ac5eb87b48d1e07eddb6aa08ef5948fcda33cc4d995ce"}, - {file = "fastavro-1.9.7-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:ec2e96bdabd58427fe683329b3d79f42c7b4f4ff6b3644664a345a655ac2c0a1"}, - {file = "fastavro-1.9.7-cp38-cp38-win_amd64.whl", hash = "sha256:3b683693c8a85ede496ebebe115be5d7870c150986e34a0442a20d88d7771224"}, - {file = "fastavro-1.9.7-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:58f76a5c9a312fbd37b84e49d08eb23094d36e10d43bc5df5187bc04af463feb"}, - {file = "fastavro-1.9.7-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:56304401d2f4f69f5b498bdd1552c13ef9a644d522d5de0dc1d789cf82f47f73"}, - {file = "fastavro-1.9.7-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2fcce036c6aa06269fc6a0428050fcb6255189997f5e1a728fc461e8b9d3e26b"}, - {file = "fastavro-1.9.7-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:17de68aae8c2525f5631d80f2b447a53395cdc49134f51b0329a5497277fc2d2"}, - {file = "fastavro-1.9.7-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:7c911366c625d0a997eafe0aa83ffbc6fd00d8fd4543cb39a97c6f3b8120ea87"}, - {file = "fastavro-1.9.7-cp39-cp39-win_amd64.whl", hash = "sha256:912283ed48578a103f523817fdf0c19b1755cea9b4a6387b73c79ecb8f8f84fc"}, - {file = "fastavro-1.9.7.tar.gz", hash = "sha256:13e11c6cb28626da85290933027cd419ce3f9ab8e45410ef24ce6b89d20a1f6c"}, -] - -[package.extras] -codecs = ["cramjam", "lz4", "zstandard"] -lz4 = ["lz4"] -snappy = ["cramjam"] -zstandard = ["zstandard"] - -[[package]] -name = "feedfinder2" -version = "0.0.4" -description = "Find the feed URLs for a website." -optional = false -python-versions = "*" -files = [ - {file = "feedfinder2-0.0.4.tar.gz", hash = "sha256:3701ee01a6c85f8b865a049c30ba0b4608858c803fe8e30d1d289fdbe89d0efe"}, -] - -[package.dependencies] -beautifulsoup4 = "*" -requests = "*" -six = "*" - -[[package]] -name = "feedparser" -version = "6.0.10" -description = "Universal feed parser, handles RSS 0.9x, RSS 1.0, RSS 2.0, CDF, Atom 0.3, and Atom 1.0 feeds" -optional = false -python-versions = ">=3.6" -files = [ - {file = "feedparser-6.0.10-py3-none-any.whl", hash = "sha256:79c257d526d13b944e965f6095700587f27388e50ea16fd245babe4dfae7024f"}, - {file = "feedparser-6.0.10.tar.gz", hash = "sha256:27da485f4637ce7163cdeab13a80312b93b7d0c1b775bef4a47629a3110bca51"}, -] - -[package.dependencies] -sgmllib3k = "*" - [[package]] name = "filelock" -version = "3.16.0" +version = "3.16.1" description = "A platform independent file lock." optional = false python-versions = ">=3.8" files = [ - {file = "filelock-3.16.0-py3-none-any.whl", hash = "sha256:f6ed4c963184f4c84dd5557ce8fece759a3724b37b80c6c4f20a2f63a4dc6609"}, - {file = "filelock-3.16.0.tar.gz", hash = "sha256:81de9eb8453c769b63369f87f11131a7ab04e367f8d97ad39dc230daa07e3bec"}, + {file = "filelock-3.16.1-py3-none-any.whl", hash = "sha256:2082e5703d51fbf98ea75855d9d5527e33d8ff23099bec374a134febee6946b0"}, + {file = "filelock-3.16.1.tar.gz", hash = "sha256:c249fbfcd5db47e5e2d6d62198e565475ee65e4831e2561c8e313fa7eb961435"}, ] [package.extras] -docs = ["furo (>=2024.8.6)", "sphinx (>=8.0.2)", "sphinx-autodoc-typehints (>=2.4)"] -testing = ["covdefaults (>=2.3)", "coverage (>=7.6.1)", "diff-cover (>=9.1.1)", "pytest (>=8.3.2)", "pytest-asyncio (>=0.24)", "pytest-cov (>=5)", "pytest-mock (>=3.14)", "pytest-timeout (>=2.3.1)", "virtualenv (>=20.26.3)"] +docs = ["furo (>=2024.8.6)", "sphinx (>=8.0.2)", "sphinx-autodoc-typehints (>=2.4.1)"] +testing = ["covdefaults (>=2.3)", "coverage (>=7.6.1)", "diff-cover (>=9.2)", "pytest (>=8.3.3)", "pytest-asyncio (>=0.24)", "pytest-cov (>=5)", "pytest-mock (>=3.14)", "pytest-timeout (>=2.3.1)", "virtualenv (>=20.26.4)"] typing = ["typing-extensions (>=4.12.2)"] [[package]] @@ -2655,24 +2017,6 @@ six = ">=1.3.0" [package.extras] docs = ["sphinx"] -[[package]] -name = "flask-sock" -version = "0.7.0" -description = "WebSocket support for Flask" -optional = false -python-versions = ">=3.6" -files = [ - {file = "flask-sock-0.7.0.tar.gz", hash = "sha256:e023b578284195a443b8d8bdb4469e6a6acf694b89aeb51315b1a34fcf427b7d"}, - {file = "flask_sock-0.7.0-py3-none-any.whl", hash = "sha256:caac4d679392aaf010d02fabcf73d52019f5bdaf1c9c131ec5a428cb3491204a"}, -] - -[package.dependencies] -flask = ">=2" -simple-websocket = ">=0.5.1" - -[package.extras] -docs = ["sphinx"] - [[package]] name = "flask-sqlalchemy" version = "3.1.1" @@ -2699,113 +2043,6 @@ files = [ {file = "flatbuffers-24.3.25.tar.gz", hash = "sha256:de2ec5b203f21441716617f38443e0a8ebf3d25bf0d9c0bb0ce68fa00ad546a4"}, ] -[[package]] -name = "fonttools" -version = "4.53.1" -description = "Tools to manipulate font files" -optional = false -python-versions = ">=3.8" -files = [ - {file = "fonttools-4.53.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:0679a30b59d74b6242909945429dbddb08496935b82f91ea9bf6ad240ec23397"}, - {file = "fonttools-4.53.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e8bf06b94694251861ba7fdeea15c8ec0967f84c3d4143ae9daf42bbc7717fe3"}, - {file = "fonttools-4.53.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b96cd370a61f4d083c9c0053bf634279b094308d52fdc2dd9a22d8372fdd590d"}, - {file = "fonttools-4.53.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a1c7c5aa18dd3b17995898b4a9b5929d69ef6ae2af5b96d585ff4005033d82f0"}, - {file = "fonttools-4.53.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:e013aae589c1c12505da64a7d8d023e584987e51e62006e1bb30d72f26522c41"}, - {file = "fonttools-4.53.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:9efd176f874cb6402e607e4cc9b4a9cd584d82fc34a4b0c811970b32ba62501f"}, - {file = "fonttools-4.53.1-cp310-cp310-win32.whl", hash = "sha256:c8696544c964500aa9439efb6761947393b70b17ef4e82d73277413f291260a4"}, - {file = "fonttools-4.53.1-cp310-cp310-win_amd64.whl", hash = "sha256:8959a59de5af6d2bec27489e98ef25a397cfa1774b375d5787509c06659b3671"}, - {file = "fonttools-4.53.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:da33440b1413bad53a8674393c5d29ce64d8c1a15ef8a77c642ffd900d07bfe1"}, - {file = "fonttools-4.53.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5ff7e5e9bad94e3a70c5cd2fa27f20b9bb9385e10cddab567b85ce5d306ea923"}, - {file = "fonttools-4.53.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c6e7170d675d12eac12ad1a981d90f118c06cf680b42a2d74c6c931e54b50719"}, - {file = "fonttools-4.53.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bee32ea8765e859670c4447b0817514ca79054463b6b79784b08a8df3a4d78e3"}, - {file = "fonttools-4.53.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:6e08f572625a1ee682115223eabebc4c6a2035a6917eac6f60350aba297ccadb"}, - {file = "fonttools-4.53.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:b21952c092ffd827504de7e66b62aba26fdb5f9d1e435c52477e6486e9d128b2"}, - {file = "fonttools-4.53.1-cp311-cp311-win32.whl", hash = "sha256:9dfdae43b7996af46ff9da520998a32b105c7f098aeea06b2226b30e74fbba88"}, - {file = "fonttools-4.53.1-cp311-cp311-win_amd64.whl", hash = "sha256:d4d0096cb1ac7a77b3b41cd78c9b6bc4a400550e21dc7a92f2b5ab53ed74eb02"}, - {file = "fonttools-4.53.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:d92d3c2a1b39631a6131c2fa25b5406855f97969b068e7e08413325bc0afba58"}, - {file = "fonttools-4.53.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:3b3c8ebafbee8d9002bd8f1195d09ed2bd9ff134ddec37ee8f6a6375e6a4f0e8"}, - {file = "fonttools-4.53.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:32f029c095ad66c425b0ee85553d0dc326d45d7059dbc227330fc29b43e8ba60"}, - {file = "fonttools-4.53.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:10f5e6c3510b79ea27bb1ebfcc67048cde9ec67afa87c7dd7efa5c700491ac7f"}, - {file = "fonttools-4.53.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:f677ce218976496a587ab17140da141557beb91d2a5c1a14212c994093f2eae2"}, - {file = "fonttools-4.53.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:9e6ceba2a01b448e36754983d376064730690401da1dd104ddb543519470a15f"}, - {file = "fonttools-4.53.1-cp312-cp312-win32.whl", hash = "sha256:791b31ebbc05197d7aa096bbc7bd76d591f05905d2fd908bf103af4488e60670"}, - {file = "fonttools-4.53.1-cp312-cp312-win_amd64.whl", hash = "sha256:6ed170b5e17da0264b9f6fae86073be3db15fa1bd74061c8331022bca6d09bab"}, - {file = "fonttools-4.53.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:c818c058404eb2bba05e728d38049438afd649e3c409796723dfc17cd3f08749"}, - {file = "fonttools-4.53.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:651390c3b26b0c7d1f4407cad281ee7a5a85a31a110cbac5269de72a51551ba2"}, - {file = "fonttools-4.53.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e54f1bba2f655924c1138bbc7fa91abd61f45c68bd65ab5ed985942712864bbb"}, - {file = "fonttools-4.53.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c9cd19cf4fe0595ebdd1d4915882b9440c3a6d30b008f3cc7587c1da7b95be5f"}, - {file = "fonttools-4.53.1-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:2af40ae9cdcb204fc1d8f26b190aa16534fcd4f0df756268df674a270eab575d"}, - {file = "fonttools-4.53.1-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:35250099b0cfb32d799fb5d6c651220a642fe2e3c7d2560490e6f1d3f9ae9169"}, - {file = "fonttools-4.53.1-cp38-cp38-win32.whl", hash = "sha256:f08df60fbd8d289152079a65da4e66a447efc1d5d5a4d3f299cdd39e3b2e4a7d"}, - {file = "fonttools-4.53.1-cp38-cp38-win_amd64.whl", hash = "sha256:7b6b35e52ddc8fb0db562133894e6ef5b4e54e1283dff606fda3eed938c36fc8"}, - {file = "fonttools-4.53.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:75a157d8d26c06e64ace9df037ee93a4938a4606a38cb7ffaf6635e60e253b7a"}, - {file = "fonttools-4.53.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4824c198f714ab5559c5be10fd1adf876712aa7989882a4ec887bf1ef3e00e31"}, - {file = "fonttools-4.53.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:becc5d7cb89c7b7afa8321b6bb3dbee0eec2b57855c90b3e9bf5fb816671fa7c"}, - {file = "fonttools-4.53.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:84ec3fb43befb54be490147b4a922b5314e16372a643004f182babee9f9c3407"}, - {file = "fonttools-4.53.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:73379d3ffdeecb376640cd8ed03e9d2d0e568c9d1a4e9b16504a834ebadc2dfb"}, - {file = "fonttools-4.53.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:02569e9a810f9d11f4ae82c391ebc6fb5730d95a0657d24d754ed7763fb2d122"}, - {file = "fonttools-4.53.1-cp39-cp39-win32.whl", hash = "sha256:aae7bd54187e8bf7fd69f8ab87b2885253d3575163ad4d669a262fe97f0136cb"}, - {file = "fonttools-4.53.1-cp39-cp39-win_amd64.whl", hash = "sha256:e5b708073ea3d684235648786f5f6153a48dc8762cdfe5563c57e80787c29fbb"}, - {file = "fonttools-4.53.1-py3-none-any.whl", hash = "sha256:f1f8758a2ad110bd6432203a344269f445a2907dc24ef6bccfd0ac4e14e0d71d"}, - {file = "fonttools-4.53.1.tar.gz", hash = "sha256:e128778a8e9bc11159ce5447f76766cefbd876f44bd79aff030287254e4752c4"}, -] - -[package.extras] -all = ["brotli (>=1.0.1)", "brotlicffi (>=0.8.0)", "fs (>=2.2.0,<3)", "lxml (>=4.0)", "lz4 (>=1.7.4.2)", "matplotlib", "munkres", "pycairo", "scipy", "skia-pathops (>=0.5.0)", "sympy", "uharfbuzz (>=0.23.0)", "unicodedata2 (>=15.1.0)", "xattr", "zopfli (>=0.1.4)"] -graphite = ["lz4 (>=1.7.4.2)"] -interpolatable = ["munkres", "pycairo", "scipy"] -lxml = ["lxml (>=4.0)"] -pathops = ["skia-pathops (>=0.5.0)"] -plot = ["matplotlib"] -repacker = ["uharfbuzz (>=0.23.0)"] -symfont = ["sympy"] -type1 = ["xattr"] -ufo = ["fs (>=2.2.0,<3)"] -unicode = ["unicodedata2 (>=15.1.0)"] -woff = ["brotli (>=1.0.1)", "brotlicffi (>=0.8.0)", "zopfli (>=0.1.4)"] - -[[package]] -name = "frozendict" -version = "2.4.4" -description = "A simple immutable dictionary" -optional = false -python-versions = ">=3.6" -files = [ - {file = "frozendict-2.4.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:4a59578d47b3949437519b5c39a016a6116b9e787bb19289e333faae81462e59"}, - {file = "frozendict-2.4.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:12a342e439aef28ccec533f0253ea53d75fe9102bd6ea928ff530e76eac38906"}, - {file = "frozendict-2.4.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f79c26dff10ce11dad3b3627c89bb2e87b9dd5958c2b24325f16a23019b8b94"}, - {file = "frozendict-2.4.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:2bd009cf4fc47972838a91e9b83654dc9a095dc4f2bb3a37c3f3124c8a364543"}, - {file = "frozendict-2.4.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:87ebcde21565a14fe039672c25550060d6f6d88cf1f339beac094c3b10004eb0"}, - {file = "frozendict-2.4.4-cp310-cp310-win_amd64.whl", hash = "sha256:fefeb700bc7eb8b4c2dc48704e4221860d254c8989fb53488540bc44e44a1ac2"}, - {file = "frozendict-2.4.4-cp310-cp310-win_arm64.whl", hash = "sha256:4297d694eb600efa429769125a6f910ec02b85606f22f178bafbee309e7d3ec7"}, - {file = "frozendict-2.4.4-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:812ab17522ba13637826e65454115a914c2da538356e85f43ecea069813e4b33"}, - {file = "frozendict-2.4.4-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7fee9420475bb6ff357000092aa9990c2f6182b2bab15764330f4ad7de2eae49"}, - {file = "frozendict-2.4.4-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:3148062675536724502c6344d7c485dd4667fdf7980ca9bd05e338ccc0c4471e"}, - {file = "frozendict-2.4.4-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:78c94991944dd33c5376f720228e5b252ee67faf3bac50ef381adc9e51e90d9d"}, - {file = "frozendict-2.4.4-cp36-cp36m-win_amd64.whl", hash = "sha256:1697793b5f62b416c0fc1d94638ec91ed3aa4ab277f6affa3a95216ecb3af170"}, - {file = "frozendict-2.4.4-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:199a4d32194f3afed6258de7e317054155bc9519252b568d9cfffde7e4d834e5"}, - {file = "frozendict-2.4.4-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:85375ec6e979e6373bffb4f54576a68bf7497c350861d20686ccae38aab69c0a"}, - {file = "frozendict-2.4.4-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:2d8536e068d6bf281f23fa835ac07747fb0f8851879dd189e9709f9567408b4d"}, - {file = "frozendict-2.4.4-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:259528ba6b56fa051bc996f1c4d8b57e30d6dd3bc2f27441891b04babc4b5e73"}, - {file = "frozendict-2.4.4-cp37-cp37m-win_amd64.whl", hash = "sha256:07c3a5dee8bbb84cba770e273cdbf2c87c8e035903af8f781292d72583416801"}, - {file = "frozendict-2.4.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:6874fec816b37b6eb5795b00e0574cba261bf59723e2de607a195d5edaff0786"}, - {file = "frozendict-2.4.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c8f92425686323a950337da4b75b4c17a3327b831df8c881df24038d560640d4"}, - {file = "frozendict-2.4.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5d58d9a8d9e49662c6dafbea5e641f97decdb3d6ccd76e55e79818415362ba25"}, - {file = "frozendict-2.4.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:93a7b19afb429cbf99d56faf436b45ef2fa8fe9aca89c49eb1610c3bd85f1760"}, - {file = "frozendict-2.4.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:2b70b431e3a72d410a2cdf1497b3aba2f553635e0c0f657ce311d841bf8273b6"}, - {file = "frozendict-2.4.4-cp38-cp38-win_amd64.whl", hash = "sha256:e1b941132d79ce72d562a13341d38fc217bc1ee24d8c35a20d754e79ff99e038"}, - {file = "frozendict-2.4.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:dc2228874eacae390e63fd4f2bb513b3144066a977dc192163c9f6c7f6de6474"}, - {file = "frozendict-2.4.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:63aa49f1919af7d45fb8fd5dec4c0859bc09f46880bd6297c79bb2db2969b63d"}, - {file = "frozendict-2.4.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c6bf9260018d653f3cab9bd147bd8592bf98a5c6e338be0491ced3c196c034a3"}, - {file = "frozendict-2.4.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:6eb716e6a6d693c03b1d53280a1947716129f5ef9bcdd061db5c17dea44b80fe"}, - {file = "frozendict-2.4.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:d13b4310db337f4d2103867c5a05090b22bc4d50ca842093779ef541ea9c9eea"}, - {file = "frozendict-2.4.4-cp39-cp39-win_amd64.whl", hash = "sha256:b3b967d5065872e27b06f785a80c0ed0a45d1f7c9b85223da05358e734d858ca"}, - {file = "frozendict-2.4.4-cp39-cp39-win_arm64.whl", hash = "sha256:4ae8d05c8d0b6134bfb6bfb369d5fa0c4df21eabb5ca7f645af95fdc6689678e"}, - {file = "frozendict-2.4.4-py311-none-any.whl", hash = "sha256:705efca8d74d3facbb6ace80ab3afdd28eb8a237bfb4063ed89996b024bc443d"}, - {file = "frozendict-2.4.4-py312-none-any.whl", hash = "sha256:d9647563e76adb05b7cde2172403123380871360a114f546b4ae1704510801e5"}, - {file = "frozendict-2.4.4.tar.gz", hash = "sha256:3f7c031b26e4ee6a3f786ceb5e3abf1181c4ade92dce1f847da26ea2c96008c7"}, -] - [[package]] name = "frozenlist" version = "1.4.1" @@ -3211,30 +2448,30 @@ xai = ["tensorflow (>=2.3.0,<3.0.0dev)"] [[package]] name = "google-cloud-bigquery" -version = "3.25.0" +version = "3.26.0" description = "Google BigQuery API client library" optional = false python-versions = ">=3.7" files = [ - {file = "google-cloud-bigquery-3.25.0.tar.gz", hash = "sha256:5b2aff3205a854481117436836ae1403f11f2594e6810a98886afd57eda28509"}, - {file = "google_cloud_bigquery-3.25.0-py2.py3-none-any.whl", hash = "sha256:7f0c371bc74d2a7fb74dacbc00ac0f90c8c2bec2289b51dd6685a275873b1ce9"}, + {file = "google_cloud_bigquery-3.26.0-py2.py3-none-any.whl", hash = "sha256:e0e9ad28afa67a18696e624cbccab284bf2c0a3f6eeb9eeb0426c69b943793a8"}, + {file = "google_cloud_bigquery-3.26.0.tar.gz", hash = "sha256:edbdc788beea659e04c0af7fe4dcd6d9155344b98951a0d5055bd2f15da4ba23"}, ] [package.dependencies] -google-api-core = {version = ">=1.34.1,<2.0.dev0 || >=2.11.dev0,<3.0.0dev", extras = ["grpc"]} +google-api-core = {version = ">=2.11.1,<3.0.0dev", extras = ["grpc"]} google-auth = ">=2.14.1,<3.0.0dev" -google-cloud-core = ">=1.6.0,<3.0.0dev" -google-resumable-media = ">=0.6.0,<3.0dev" +google-cloud-core = ">=2.4.1,<3.0.0dev" +google-resumable-media = ">=2.0.0,<3.0dev" packaging = ">=20.0.0" -python-dateutil = ">=2.7.2,<3.0dev" +python-dateutil = ">=2.7.3,<3.0dev" requests = ">=2.21.0,<3.0.0dev" [package.extras] -all = ["Shapely (>=1.8.4,<3.0.0dev)", "db-dtypes (>=0.3.0,<2.0.0dev)", "geopandas (>=0.9.0,<1.0dev)", "google-cloud-bigquery-storage (>=2.6.0,<3.0.0dev)", "grpcio (>=1.47.0,<2.0dev)", "grpcio (>=1.49.1,<2.0dev)", "importlib-metadata (>=1.0.0)", "ipykernel (>=6.0.0)", "ipython (>=7.23.1,!=8.1.0)", "ipywidgets (>=7.7.0)", "opentelemetry-api (>=1.1.0)", "opentelemetry-instrumentation (>=0.20b0)", "opentelemetry-sdk (>=1.1.0)", "pandas (>=1.1.0)", "proto-plus (>=1.15.0,<2.0.0dev)", "protobuf (>=3.19.5,!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5,<5.0.0dev)", "pyarrow (>=3.0.0)", "tqdm (>=4.7.4,<5.0.0dev)"] -bigquery-v2 = ["proto-plus (>=1.15.0,<2.0.0dev)", "protobuf (>=3.19.5,!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5,<5.0.0dev)"] +all = ["Shapely (>=1.8.4,<3.0.0dev)", "bigquery-magics (>=0.1.0)", "db-dtypes (>=0.3.0,<2.0.0dev)", "geopandas (>=0.9.0,<1.0dev)", "google-cloud-bigquery-storage (>=2.6.0,<3.0.0dev)", "grpcio (>=1.47.0,<2.0dev)", "grpcio (>=1.49.1,<2.0dev)", "importlib-metadata (>=1.0.0)", "ipykernel (>=6.0.0)", "ipywidgets (>=7.7.0)", "opentelemetry-api (>=1.1.0)", "opentelemetry-instrumentation (>=0.20b0)", "opentelemetry-sdk (>=1.1.0)", "pandas (>=1.1.0)", "proto-plus (>=1.22.3,<2.0.0dev)", "protobuf (>=3.20.2,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5,<6.0.0dev)", "pyarrow (>=3.0.0)", "tqdm (>=4.7.4,<5.0.0dev)"] +bigquery-v2 = ["proto-plus (>=1.22.3,<2.0.0dev)", "protobuf (>=3.20.2,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5,<6.0.0dev)"] bqstorage = ["google-cloud-bigquery-storage (>=2.6.0,<3.0.0dev)", "grpcio (>=1.47.0,<2.0dev)", "grpcio (>=1.49.1,<2.0dev)", "pyarrow (>=3.0.0)"] geopandas = ["Shapely (>=1.8.4,<3.0.0dev)", "geopandas (>=0.9.0,<1.0dev)"] -ipython = ["ipykernel (>=6.0.0)", "ipython (>=7.23.1,!=8.1.0)"] +ipython = ["bigquery-magics (>=0.1.0)"] ipywidgets = ["ipykernel (>=6.0.0)", "ipywidgets (>=7.7.0)"] opentelemetry = ["opentelemetry-api (>=1.1.0)", "opentelemetry-instrumentation (>=0.20b0)", "opentelemetry-sdk (>=1.1.0)"] pandas = ["db-dtypes (>=0.3.0,<2.0.0dev)", "importlib-metadata (>=1.0.0)", "pandas (>=1.1.0)", "pyarrow (>=3.0.0)"] @@ -3413,77 +2650,84 @@ grpc = ["grpcio (>=1.44.0,<2.0.0.dev0)"] [[package]] name = "greenlet" -version = "3.1.0" +version = "3.1.1" description = "Lightweight in-process concurrent programming" optional = false python-versions = ">=3.7" files = [ - {file = "greenlet-3.1.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a814dc3100e8a046ff48faeaa909e80cdb358411a3d6dd5293158425c684eda8"}, - {file = "greenlet-3.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a771dc64fa44ebe58d65768d869fcfb9060169d203446c1d446e844b62bdfdca"}, - {file = "greenlet-3.1.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0e49a65d25d7350cca2da15aac31b6f67a43d867448babf997fe83c7505f57bc"}, - {file = "greenlet-3.1.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2cd8518eade968bc52262d8c46727cfc0826ff4d552cf0430b8d65aaf50bb91d"}, - {file = "greenlet-3.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:76dc19e660baea5c38e949455c1181bc018893f25372d10ffe24b3ed7341fb25"}, - {file = "greenlet-3.1.0-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c0a5b1c22c82831f56f2f7ad9bbe4948879762fe0d59833a4a71f16e5fa0f682"}, - {file = "greenlet-3.1.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:2651dfb006f391bcb240635079a68a261b227a10a08af6349cba834a2141efa1"}, - {file = "greenlet-3.1.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:3e7e6ef1737a819819b1163116ad4b48d06cfdd40352d813bb14436024fcda99"}, - {file = "greenlet-3.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:ffb08f2a1e59d38c7b8b9ac8083c9c8b9875f0955b1e9b9b9a965607a51f8e54"}, - {file = "greenlet-3.1.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:9730929375021ec90f6447bff4f7f5508faef1c02f399a1953870cdb78e0c345"}, - {file = "greenlet-3.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:713d450cf8e61854de9420fb7eea8ad228df4e27e7d4ed465de98c955d2b3fa6"}, - {file = "greenlet-3.1.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4c3446937be153718250fe421da548f973124189f18fe4575a0510b5c928f0cc"}, - {file = "greenlet-3.1.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1ddc7bcedeb47187be74208bc652d63d6b20cb24f4e596bd356092d8000da6d6"}, - {file = "greenlet-3.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:44151d7b81b9391ed759a2f2865bbe623ef00d648fed59363be2bbbd5154656f"}, - {file = "greenlet-3.1.0-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6cea1cca3be76c9483282dc7760ea1cc08a6ecec1f0b6ca0a94ea0d17432da19"}, - {file = "greenlet-3.1.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:619935a44f414274a2c08c9e74611965650b730eb4efe4b2270f91df5e4adf9a"}, - {file = "greenlet-3.1.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:221169d31cada333a0c7fd087b957c8f431c1dba202c3a58cf5a3583ed973e9b"}, - {file = "greenlet-3.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:01059afb9b178606b4b6e92c3e710ea1635597c3537e44da69f4531e111dd5e9"}, - {file = "greenlet-3.1.0-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:24fc216ec7c8be9becba8b64a98a78f9cd057fd2dc75ae952ca94ed8a893bf27"}, - {file = "greenlet-3.1.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3d07c28b85b350564bdff9f51c1c5007dfb2f389385d1bc23288de51134ca303"}, - {file = "greenlet-3.1.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:243a223c96a4246f8a30ea470c440fe9db1f5e444941ee3c3cd79df119b8eebf"}, - {file = "greenlet-3.1.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:26811df4dc81271033a7836bc20d12cd30938e6bd2e9437f56fa03da81b0f8fc"}, - {file = "greenlet-3.1.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c9d86401550b09a55410f32ceb5fe7efcd998bd2dad9e82521713cb148a4a15f"}, - {file = "greenlet-3.1.0-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:26d9c1c4f1748ccac0bae1dbb465fb1a795a75aba8af8ca871503019f4285e2a"}, - {file = "greenlet-3.1.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:cd468ec62257bb4544989402b19d795d2305eccb06cde5da0eb739b63dc04665"}, - {file = "greenlet-3.1.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:a53dfe8f82b715319e9953330fa5c8708b610d48b5c59f1316337302af5c0811"}, - {file = "greenlet-3.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:28fe80a3eb673b2d5cc3b12eea468a5e5f4603c26aa34d88bf61bba82ceb2f9b"}, - {file = "greenlet-3.1.0-cp313-cp313-macosx_11_0_universal2.whl", hash = "sha256:76b3e3976d2a452cba7aa9e453498ac72240d43030fdc6d538a72b87eaff52fd"}, - {file = "greenlet-3.1.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:655b21ffd37a96b1e78cc48bf254f5ea4b5b85efaf9e9e2a526b3c9309d660ca"}, - {file = "greenlet-3.1.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c6f4c2027689093775fd58ca2388d58789009116844432d920e9147f91acbe64"}, - {file = "greenlet-3.1.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:76e5064fd8e94c3f74d9fd69b02d99e3cdb8fc286ed49a1f10b256e59d0d3a0b"}, - {file = "greenlet-3.1.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6a4bf607f690f7987ab3291406e012cd8591a4f77aa54f29b890f9c331e84989"}, - {file = "greenlet-3.1.0-cp313-cp313-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:037d9ac99540ace9424cb9ea89f0accfaff4316f149520b4ae293eebc5bded17"}, - {file = "greenlet-3.1.0-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:90b5bbf05fe3d3ef697103850c2ce3374558f6fe40fd57c9fac1bf14903f50a5"}, - {file = "greenlet-3.1.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:726377bd60081172685c0ff46afbc600d064f01053190e4450857483c4d44484"}, - {file = "greenlet-3.1.0-cp313-cp313-win_amd64.whl", hash = "sha256:d46d5069e2eeda111d6f71970e341f4bd9aeeee92074e649ae263b834286ecc0"}, - {file = "greenlet-3.1.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:81eeec4403a7d7684b5812a8aaa626fa23b7d0848edb3a28d2eb3220daddcbd0"}, - {file = "greenlet-3.1.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4a3dae7492d16e85ea6045fd11cb8e782b63eac8c8d520c3a92c02ac4573b0a6"}, - {file = "greenlet-3.1.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4b5ea3664eed571779403858d7cd0a9b0ebf50d57d2cdeafc7748e09ef8cd81a"}, - {file = "greenlet-3.1.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a22f4e26400f7f48faef2d69c20dc055a1f3043d330923f9abe08ea0aecc44df"}, - {file = "greenlet-3.1.0-cp37-cp37m-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:13ff8c8e54a10472ce3b2a2da007f915175192f18e6495bad50486e87c7f6637"}, - {file = "greenlet-3.1.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:f9671e7282d8c6fcabc32c0fb8d7c0ea8894ae85cee89c9aadc2d7129e1a9954"}, - {file = "greenlet-3.1.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:184258372ae9e1e9bddce6f187967f2e08ecd16906557c4320e3ba88a93438c3"}, - {file = "greenlet-3.1.0-cp37-cp37m-win32.whl", hash = "sha256:a0409bc18a9f85321399c29baf93545152d74a49d92f2f55302f122007cfda00"}, - {file = "greenlet-3.1.0-cp37-cp37m-win_amd64.whl", hash = "sha256:9eb4a1d7399b9f3c7ac68ae6baa6be5f9195d1d08c9ddc45ad559aa6b556bce6"}, - {file = "greenlet-3.1.0-cp38-cp38-macosx_11_0_universal2.whl", hash = "sha256:a8870983af660798dc1b529e1fd6f1cefd94e45135a32e58bd70edd694540f33"}, - {file = "greenlet-3.1.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cfcfb73aed40f550a57ea904629bdaf2e562c68fa1164fa4588e752af6efdc3f"}, - {file = "greenlet-3.1.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f9482c2ed414781c0af0b35d9d575226da6b728bd1a720668fa05837184965b7"}, - {file = "greenlet-3.1.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d58ec349e0c2c0bc6669bf2cd4982d2f93bf067860d23a0ea1fe677b0f0b1e09"}, - {file = "greenlet-3.1.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd65695a8df1233309b701dec2539cc4b11e97d4fcc0f4185b4a12ce54db0491"}, - {file = "greenlet-3.1.0-cp38-cp38-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:665b21e95bc0fce5cab03b2e1d90ba9c66c510f1bb5fdc864f3a377d0f553f6b"}, - {file = "greenlet-3.1.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:d3c59a06c2c28a81a026ff11fbf012081ea34fb9b7052f2ed0366e14896f0a1d"}, - {file = "greenlet-3.1.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:5415b9494ff6240b09af06b91a375731febe0090218e2898d2b85f9b92abcda0"}, - {file = "greenlet-3.1.0-cp38-cp38-win32.whl", hash = "sha256:1544b8dd090b494c55e60c4ff46e238be44fdc472d2589e943c241e0169bcea2"}, - {file = "greenlet-3.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:7f346d24d74c00b6730440f5eb8ec3fe5774ca8d1c9574e8e57c8671bb51b910"}, - {file = "greenlet-3.1.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:db1b3ccb93488328c74e97ff888604a8b95ae4f35f4f56677ca57a4fc3a4220b"}, - {file = "greenlet-3.1.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:44cd313629ded43bb3b98737bba2f3e2c2c8679b55ea29ed73daea6b755fe8e7"}, - {file = "greenlet-3.1.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fad7a051e07f64e297e6e8399b4d6a3bdcad3d7297409e9a06ef8cbccff4f501"}, - {file = "greenlet-3.1.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c3967dcc1cd2ea61b08b0b276659242cbce5caca39e7cbc02408222fb9e6ff39"}, - {file = "greenlet-3.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d45b75b0f3fd8d99f62eb7908cfa6d727b7ed190737dec7fe46d993da550b81a"}, - {file = "greenlet-3.1.0-cp39-cp39-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2d004db911ed7b6218ec5c5bfe4cf70ae8aa2223dffbb5b3c69e342bb253cb28"}, - {file = "greenlet-3.1.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b9505a0c8579899057cbefd4ec34d865ab99852baf1ff33a9481eb3924e2da0b"}, - {file = "greenlet-3.1.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5fd6e94593f6f9714dbad1aaba734b5ec04593374fa6638df61592055868f8b8"}, - {file = "greenlet-3.1.0-cp39-cp39-win32.whl", hash = "sha256:d0dd943282231480aad5f50f89bdf26690c995e8ff555f26d8a5b9887b559bcc"}, - {file = "greenlet-3.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:ac0adfdb3a21dc2a24ed728b61e72440d297d0fd3a577389df566651fcd08f97"}, - {file = "greenlet-3.1.0.tar.gz", hash = "sha256:b395121e9bbe8d02a750886f108d540abe66075e61e22f7353d9acb0b81be0f0"}, + {file = "greenlet-3.1.1-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:0bbae94a29c9e5c7e4a2b7f0aae5c17e8e90acbfd3bf6270eeba60c39fce3563"}, + {file = "greenlet-3.1.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0fde093fb93f35ca72a556cf72c92ea3ebfda3d79fc35bb19fbe685853869a83"}, + {file = "greenlet-3.1.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:36b89d13c49216cadb828db8dfa6ce86bbbc476a82d3a6c397f0efae0525bdd0"}, + {file = "greenlet-3.1.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:94b6150a85e1b33b40b1464a3f9988dcc5251d6ed06842abff82e42632fac120"}, + {file = "greenlet-3.1.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:93147c513fac16385d1036b7e5b102c7fbbdb163d556b791f0f11eada7ba65dc"}, + {file = "greenlet-3.1.1-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:da7a9bff22ce038e19bf62c4dd1ec8391062878710ded0a845bcf47cc0200617"}, + {file = "greenlet-3.1.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:b2795058c23988728eec1f36a4e5e4ebad22f8320c85f3587b539b9ac84128d7"}, + {file = "greenlet-3.1.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:ed10eac5830befbdd0c32f83e8aa6288361597550ba669b04c48f0f9a2c843c6"}, + {file = "greenlet-3.1.1-cp310-cp310-win_amd64.whl", hash = "sha256:77c386de38a60d1dfb8e55b8c1101d68c79dfdd25c7095d51fec2dd800892b80"}, + {file = "greenlet-3.1.1-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:e4d333e558953648ca09d64f13e6d8f0523fa705f51cae3f03b5983489958c70"}, + {file = "greenlet-3.1.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:09fc016b73c94e98e29af67ab7b9a879c307c6731a2c9da0db5a7d9b7edd1159"}, + {file = "greenlet-3.1.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d5e975ca70269d66d17dd995dafc06f1b06e8cb1ec1e9ed54c1d1e4a7c4cf26e"}, + {file = "greenlet-3.1.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3b2813dc3de8c1ee3f924e4d4227999285fd335d1bcc0d2be6dc3f1f6a318ec1"}, + {file = "greenlet-3.1.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e347b3bfcf985a05e8c0b7d462ba6f15b1ee1c909e2dcad795e49e91b152c383"}, + {file = "greenlet-3.1.1-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9e8f8c9cb53cdac7ba9793c276acd90168f416b9ce36799b9b885790f8ad6c0a"}, + {file = "greenlet-3.1.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:62ee94988d6b4722ce0028644418d93a52429e977d742ca2ccbe1c4f4a792511"}, + {file = "greenlet-3.1.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:1776fd7f989fc6b8d8c8cb8da1f6b82c5814957264d1f6cf818d475ec2bf6395"}, + {file = "greenlet-3.1.1-cp311-cp311-win_amd64.whl", hash = "sha256:48ca08c771c268a768087b408658e216133aecd835c0ded47ce955381105ba39"}, + {file = "greenlet-3.1.1-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:4afe7ea89de619adc868e087b4d2359282058479d7cfb94970adf4b55284574d"}, + {file = "greenlet-3.1.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f406b22b7c9a9b4f8aa9d2ab13d6ae0ac3e85c9a809bd590ad53fed2bf70dc79"}, + {file = "greenlet-3.1.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c3a701fe5a9695b238503ce5bbe8218e03c3bcccf7e204e455e7462d770268aa"}, + {file = "greenlet-3.1.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2846930c65b47d70b9d178e89c7e1a69c95c1f68ea5aa0a58646b7a96df12441"}, + {file = "greenlet-3.1.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:99cfaa2110534e2cf3ba31a7abcac9d328d1d9f1b95beede58294a60348fba36"}, + {file = "greenlet-3.1.1-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1443279c19fca463fc33e65ef2a935a5b09bb90f978beab37729e1c3c6c25fe9"}, + {file = "greenlet-3.1.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:b7cede291382a78f7bb5f04a529cb18e068dd29e0fb27376074b6d0317bf4dd0"}, + {file = "greenlet-3.1.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:23f20bb60ae298d7d8656c6ec6db134bca379ecefadb0b19ce6f19d1f232a942"}, + {file = "greenlet-3.1.1-cp312-cp312-win_amd64.whl", hash = "sha256:7124e16b4c55d417577c2077be379514321916d5790fa287c9ed6f23bd2ffd01"}, + {file = "greenlet-3.1.1-cp313-cp313-macosx_11_0_universal2.whl", hash = "sha256:05175c27cb459dcfc05d026c4232f9de8913ed006d42713cb8a5137bd49375f1"}, + {file = "greenlet-3.1.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:935e943ec47c4afab8965954bf49bfa639c05d4ccf9ef6e924188f762145c0ff"}, + {file = "greenlet-3.1.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:667a9706c970cb552ede35aee17339a18e8f2a87a51fba2ed39ceeeb1004798a"}, + {file = "greenlet-3.1.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b8a678974d1f3aa55f6cc34dc480169d58f2e6d8958895d68845fa4ab566509e"}, + {file = "greenlet-3.1.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:efc0f674aa41b92da8c49e0346318c6075d734994c3c4e4430b1c3f853e498e4"}, + {file = "greenlet-3.1.1-cp313-cp313-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0153404a4bb921f0ff1abeb5ce8a5131da56b953eda6e14b88dc6bbc04d2049e"}, + {file = "greenlet-3.1.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:275f72decf9932639c1c6dd1013a1bc266438eb32710016a1c742df5da6e60a1"}, + {file = "greenlet-3.1.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:c4aab7f6381f38a4b42f269057aee279ab0fc7bf2e929e3d4abfae97b682a12c"}, + {file = "greenlet-3.1.1-cp313-cp313-win_amd64.whl", hash = "sha256:b42703b1cf69f2aa1df7d1030b9d77d3e584a70755674d60e710f0af570f3761"}, + {file = "greenlet-3.1.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f1695e76146579f8c06c1509c7ce4dfe0706f49c6831a817ac04eebb2fd02011"}, + {file = "greenlet-3.1.1-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7876452af029456b3f3549b696bb36a06db7c90747740c5302f74a9e9fa14b13"}, + {file = "greenlet-3.1.1-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4ead44c85f8ab905852d3de8d86f6f8baf77109f9da589cb4fa142bd3b57b475"}, + {file = "greenlet-3.1.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8320f64b777d00dd7ccdade271eaf0cad6636343293a25074cc5566160e4de7b"}, + {file = "greenlet-3.1.1-cp313-cp313t-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6510bf84a6b643dabba74d3049ead221257603a253d0a9873f55f6a59a65f822"}, + {file = "greenlet-3.1.1-cp313-cp313t-musllinux_1_1_aarch64.whl", hash = "sha256:04b013dc07c96f83134b1e99888e7a79979f1a247e2a9f59697fa14b5862ed01"}, + {file = "greenlet-3.1.1-cp313-cp313t-musllinux_1_1_x86_64.whl", hash = "sha256:411f015496fec93c1c8cd4e5238da364e1da7a124bcb293f085bf2860c32c6f6"}, + {file = "greenlet-3.1.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:47da355d8687fd65240c364c90a31569a133b7b60de111c255ef5b606f2ae291"}, + {file = "greenlet-3.1.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:98884ecf2ffb7d7fe6bd517e8eb99d31ff7855a840fa6d0d63cd07c037f6a981"}, + {file = "greenlet-3.1.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f1d4aeb8891338e60d1ab6127af1fe45def5259def8094b9c7e34690c8858803"}, + {file = "greenlet-3.1.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:db32b5348615a04b82240cc67983cb315309e88d444a288934ee6ceaebcad6cc"}, + {file = "greenlet-3.1.1-cp37-cp37m-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:dcc62f31eae24de7f8dce72134c8651c58000d3b1868e01392baea7c32c247de"}, + {file = "greenlet-3.1.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:1d3755bcb2e02de341c55b4fca7a745a24a9e7212ac953f6b3a48d117d7257aa"}, + {file = "greenlet-3.1.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:b8da394b34370874b4572676f36acabac172602abf054cbc4ac910219f3340af"}, + {file = "greenlet-3.1.1-cp37-cp37m-win32.whl", hash = "sha256:a0dfc6c143b519113354e780a50381508139b07d2177cb6ad6a08278ec655798"}, + {file = "greenlet-3.1.1-cp37-cp37m-win_amd64.whl", hash = "sha256:54558ea205654b50c438029505def3834e80f0869a70fb15b871c29b4575ddef"}, + {file = "greenlet-3.1.1-cp38-cp38-macosx_11_0_universal2.whl", hash = "sha256:346bed03fe47414091be4ad44786d1bd8bef0c3fcad6ed3dee074a032ab408a9"}, + {file = "greenlet-3.1.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dfc59d69fc48664bc693842bd57acfdd490acafda1ab52c7836e3fc75c90a111"}, + {file = "greenlet-3.1.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d21e10da6ec19b457b82636209cbe2331ff4306b54d06fa04b7c138ba18c8a81"}, + {file = "greenlet-3.1.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:37b9de5a96111fc15418819ab4c4432e4f3c2ede61e660b1e33971eba26ef9ba"}, + {file = "greenlet-3.1.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6ef9ea3f137e5711f0dbe5f9263e8c009b7069d8a1acea822bd5e9dae0ae49c8"}, + {file = "greenlet-3.1.1-cp38-cp38-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:85f3ff71e2e60bd4b4932a043fbbe0f499e263c628390b285cb599154a3b03b1"}, + {file = "greenlet-3.1.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:95ffcf719966dd7c453f908e208e14cde192e09fde6c7186c8f1896ef778d8cd"}, + {file = "greenlet-3.1.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:03a088b9de532cbfe2ba2034b2b85e82df37874681e8c470d6fb2f8c04d7e4b7"}, + {file = "greenlet-3.1.1-cp38-cp38-win32.whl", hash = "sha256:8b8b36671f10ba80e159378df9c4f15c14098c4fd73a36b9ad715f057272fbef"}, + {file = "greenlet-3.1.1-cp38-cp38-win_amd64.whl", hash = "sha256:7017b2be767b9d43cc31416aba48aab0d2309ee31b4dbf10a1d38fb7972bdf9d"}, + {file = "greenlet-3.1.1-cp39-cp39-macosx_11_0_universal2.whl", hash = "sha256:396979749bd95f018296af156201d6211240e7a23090f50a8d5d18c370084dc3"}, + {file = "greenlet-3.1.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ca9d0ff5ad43e785350894d97e13633a66e2b50000e8a183a50a88d834752d42"}, + {file = "greenlet-3.1.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f6ff3b14f2df4c41660a7dec01045a045653998784bf8cfcb5a525bdffffbc8f"}, + {file = "greenlet-3.1.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:94ebba31df2aa506d7b14866fed00ac141a867e63143fe5bca82a8e503b36437"}, + {file = "greenlet-3.1.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:73aaad12ac0ff500f62cebed98d8789198ea0e6f233421059fa68a5aa7220145"}, + {file = "greenlet-3.1.1-cp39-cp39-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:63e4844797b975b9af3a3fb8f7866ff08775f5426925e1e0bbcfe7932059a12c"}, + {file = "greenlet-3.1.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:7939aa3ca7d2a1593596e7ac6d59391ff30281ef280d8632fa03d81f7c5f955e"}, + {file = "greenlet-3.1.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:d0028e725ee18175c6e422797c407874da24381ce0690d6b9396c204c7f7276e"}, + {file = "greenlet-3.1.1-cp39-cp39-win32.whl", hash = "sha256:5e06afd14cbaf9e00899fae69b24a32f2196c19de08fcb9f4779dd4f004e5e7c"}, + {file = "greenlet-3.1.1-cp39-cp39-win_amd64.whl", hash = "sha256:3319aa75e0e0639bc15ff54ca327e8dc7a6fe404003496e3c6925cd3142e0e22"}, + {file = "greenlet-3.1.1.tar.gz", hash = "sha256:4ce3ac6cdb6adf7946475d7ef31777c26d94bccc377e070a7986bd2d5c515467"}, ] [package.extras] @@ -3508,61 +2752,70 @@ protobuf = ">=3.20.2,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4 [[package]] name = "grpcio" -version = "1.66.1" +version = "1.66.2" description = "HTTP/2-based RPC framework" optional = false python-versions = ">=3.8" files = [ - {file = "grpcio-1.66.1-cp310-cp310-linux_armv7l.whl", hash = "sha256:4877ba180591acdf127afe21ec1c7ff8a5ecf0fe2600f0d3c50e8c4a1cbc6492"}, - {file = "grpcio-1.66.1-cp310-cp310-macosx_12_0_universal2.whl", hash = "sha256:3750c5a00bd644c75f4507f77a804d0189d97a107eb1481945a0cf3af3e7a5ac"}, - {file = "grpcio-1.66.1-cp310-cp310-manylinux_2_17_aarch64.whl", hash = "sha256:a013c5fbb12bfb5f927444b477a26f1080755a931d5d362e6a9a720ca7dbae60"}, - {file = "grpcio-1.66.1-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b1b24c23d51a1e8790b25514157d43f0a4dce1ac12b3f0b8e9f66a5e2c4c132f"}, - {file = "grpcio-1.66.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b7ffb8ea674d68de4cac6f57d2498fef477cef582f1fa849e9f844863af50083"}, - {file = "grpcio-1.66.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:307b1d538140f19ccbd3aed7a93d8f71103c5d525f3c96f8616111614b14bf2a"}, - {file = "grpcio-1.66.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:1c17ebcec157cfb8dd445890a03e20caf6209a5bd4ac5b040ae9dbc59eef091d"}, - {file = "grpcio-1.66.1-cp310-cp310-win32.whl", hash = "sha256:ef82d361ed5849d34cf09105d00b94b6728d289d6b9235513cb2fcc79f7c432c"}, - {file = "grpcio-1.66.1-cp310-cp310-win_amd64.whl", hash = "sha256:292a846b92cdcd40ecca46e694997dd6b9be6c4c01a94a0dfb3fcb75d20da858"}, - {file = "grpcio-1.66.1-cp311-cp311-linux_armv7l.whl", hash = "sha256:c30aeceeaff11cd5ddbc348f37c58bcb96da8d5aa93fed78ab329de5f37a0d7a"}, - {file = "grpcio-1.66.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:8a1e224ce6f740dbb6b24c58f885422deebd7eb724aff0671a847f8951857c26"}, - {file = "grpcio-1.66.1-cp311-cp311-manylinux_2_17_aarch64.whl", hash = "sha256:a66fe4dc35d2330c185cfbb42959f57ad36f257e0cc4557d11d9f0a3f14311df"}, - {file = "grpcio-1.66.1-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e3ba04659e4fce609de2658fe4dbf7d6ed21987a94460f5f92df7579fd5d0e22"}, - {file = "grpcio-1.66.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4573608e23f7e091acfbe3e84ac2045680b69751d8d67685ffa193a4429fedb1"}, - {file = "grpcio-1.66.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:7e06aa1f764ec8265b19d8f00140b8c4b6ca179a6dc67aa9413867c47e1fb04e"}, - {file = "grpcio-1.66.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3885f037eb11f1cacc41f207b705f38a44b69478086f40608959bf5ad85826dd"}, - {file = "grpcio-1.66.1-cp311-cp311-win32.whl", hash = "sha256:97ae7edd3f3f91480e48ede5d3e7d431ad6005bfdbd65c1b56913799ec79e791"}, - {file = "grpcio-1.66.1-cp311-cp311-win_amd64.whl", hash = "sha256:cfd349de4158d797db2bd82d2020554a121674e98fbe6b15328456b3bf2495bb"}, - {file = "grpcio-1.66.1-cp312-cp312-linux_armv7l.whl", hash = "sha256:a92c4f58c01c77205df6ff999faa008540475c39b835277fb8883b11cada127a"}, - {file = "grpcio-1.66.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:fdb14bad0835914f325349ed34a51940bc2ad965142eb3090081593c6e347be9"}, - {file = "grpcio-1.66.1-cp312-cp312-manylinux_2_17_aarch64.whl", hash = "sha256:f03a5884c56256e08fd9e262e11b5cfacf1af96e2ce78dc095d2c41ccae2c80d"}, - {file = "grpcio-1.66.1-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2ca2559692d8e7e245d456877a85ee41525f3ed425aa97eb7a70fc9a79df91a0"}, - {file = "grpcio-1.66.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:84ca1be089fb4446490dd1135828bd42a7c7f8421e74fa581611f7afdf7ab761"}, - {file = "grpcio-1.66.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:d639c939ad7c440c7b2819a28d559179a4508783f7e5b991166f8d7a34b52815"}, - {file = "grpcio-1.66.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:b9feb4e5ec8dc2d15709f4d5fc367794d69277f5d680baf1910fc9915c633524"}, - {file = "grpcio-1.66.1-cp312-cp312-win32.whl", hash = "sha256:7101db1bd4cd9b880294dec41a93fcdce465bdbb602cd8dc5bd2d6362b618759"}, - {file = "grpcio-1.66.1-cp312-cp312-win_amd64.whl", hash = "sha256:b0aa03d240b5539648d996cc60438f128c7f46050989e35b25f5c18286c86734"}, - {file = "grpcio-1.66.1-cp38-cp38-linux_armv7l.whl", hash = "sha256:ecfe735e7a59e5a98208447293ff8580e9db1e890e232b8b292dc8bd15afc0d2"}, - {file = "grpcio-1.66.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:4825a3aa5648010842e1c9d35a082187746aa0cdbf1b7a2a930595a94fb10fce"}, - {file = "grpcio-1.66.1-cp38-cp38-manylinux_2_17_aarch64.whl", hash = "sha256:f517fd7259fe823ef3bd21e508b653d5492e706e9f0ef82c16ce3347a8a5620c"}, - {file = "grpcio-1.66.1-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f1fe60d0772831d96d263b53d83fb9a3d050a94b0e94b6d004a5ad111faa5b5b"}, - {file = "grpcio-1.66.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:31a049daa428f928f21090403e5d18ea02670e3d5d172581670be006100db9ef"}, - {file = "grpcio-1.66.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:6f914386e52cbdeb5d2a7ce3bf1fdfacbe9d818dd81b6099a05b741aaf3848bb"}, - {file = "grpcio-1.66.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:bff2096bdba686019fb32d2dde45b95981f0d1490e054400f70fc9a8af34b49d"}, - {file = "grpcio-1.66.1-cp38-cp38-win32.whl", hash = "sha256:aa8ba945c96e73de29d25331b26f3e416e0c0f621e984a3ebdb2d0d0b596a3b3"}, - {file = "grpcio-1.66.1-cp38-cp38-win_amd64.whl", hash = "sha256:161d5c535c2bdf61b95080e7f0f017a1dfcb812bf54093e71e5562b16225b4ce"}, - {file = "grpcio-1.66.1-cp39-cp39-linux_armv7l.whl", hash = "sha256:d0cd7050397b3609ea51727b1811e663ffda8bda39c6a5bb69525ef12414b503"}, - {file = "grpcio-1.66.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:0e6c9b42ded5d02b6b1fea3a25f036a2236eeb75d0579bfd43c0018c88bf0a3e"}, - {file = "grpcio-1.66.1-cp39-cp39-manylinux_2_17_aarch64.whl", hash = "sha256:c9f80f9fad93a8cf71c7f161778ba47fd730d13a343a46258065c4deb4b550c0"}, - {file = "grpcio-1.66.1-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5dd67ed9da78e5121efc5c510f0122a972216808d6de70953a740560c572eb44"}, - {file = "grpcio-1.66.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:48b0d92d45ce3be2084b92fb5bae2f64c208fea8ceed7fccf6a7b524d3c4942e"}, - {file = "grpcio-1.66.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:4d813316d1a752be6f5c4360c49f55b06d4fe212d7df03253dfdae90c8a402bb"}, - {file = "grpcio-1.66.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:9c9bebc6627873ec27a70fc800f6083a13c70b23a5564788754b9ee52c5aef6c"}, - {file = "grpcio-1.66.1-cp39-cp39-win32.whl", hash = "sha256:30a1c2cf9390c894c90bbc70147f2372130ad189cffef161f0432d0157973f45"}, - {file = "grpcio-1.66.1-cp39-cp39-win_amd64.whl", hash = "sha256:17663598aadbedc3cacd7bbde432f541c8e07d2496564e22b214b22c7523dac8"}, - {file = "grpcio-1.66.1.tar.gz", hash = "sha256:35334f9c9745add3e357e3372756fd32d925bd52c41da97f4dfdafbde0bf0ee2"}, -] - -[package.extras] -protobuf = ["grpcio-tools (>=1.66.1)"] + {file = "grpcio-1.66.2-cp310-cp310-linux_armv7l.whl", hash = "sha256:fe96281713168a3270878255983d2cb1a97e034325c8c2c25169a69289d3ecfa"}, + {file = "grpcio-1.66.2-cp310-cp310-macosx_12_0_universal2.whl", hash = "sha256:73fc8f8b9b5c4a03e802b3cd0c18b2b06b410d3c1dcbef989fdeb943bd44aff7"}, + {file = "grpcio-1.66.2-cp310-cp310-manylinux_2_17_aarch64.whl", hash = "sha256:03b0b307ba26fae695e067b94cbb014e27390f8bc5ac7a3a39b7723fed085604"}, + {file = "grpcio-1.66.2-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7d69ce1f324dc2d71e40c9261d3fdbe7d4c9d60f332069ff9b2a4d8a257c7b2b"}, + {file = "grpcio-1.66.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:05bc2ceadc2529ab0b227b1310d249d95d9001cd106aa4d31e8871ad3c428d73"}, + {file = "grpcio-1.66.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:8ac475e8da31484efa25abb774674d837b343afb78bb3bcdef10f81a93e3d6bf"}, + {file = "grpcio-1.66.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:0be4e0490c28da5377283861bed2941d1d20ec017ca397a5df4394d1c31a9b50"}, + {file = "grpcio-1.66.2-cp310-cp310-win32.whl", hash = "sha256:4e504572433f4e72b12394977679161d495c4c9581ba34a88d843eaf0f2fbd39"}, + {file = "grpcio-1.66.2-cp310-cp310-win_amd64.whl", hash = "sha256:2018b053aa15782db2541ca01a7edb56a0bf18c77efed975392583725974b249"}, + {file = "grpcio-1.66.2-cp311-cp311-linux_armv7l.whl", hash = "sha256:2335c58560a9e92ac58ff2bc5649952f9b37d0735608242973c7a8b94a6437d8"}, + {file = "grpcio-1.66.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:45a3d462826f4868b442a6b8fdbe8b87b45eb4f5b5308168c156b21eca43f61c"}, + {file = "grpcio-1.66.2-cp311-cp311-manylinux_2_17_aarch64.whl", hash = "sha256:a9539f01cb04950fd4b5ab458e64a15f84c2acc273670072abe49a3f29bbad54"}, + {file = "grpcio-1.66.2-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ce89f5876662f146d4c1f695dda29d4433a5d01c8681fbd2539afff535da14d4"}, + {file = "grpcio-1.66.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d25a14af966438cddf498b2e338f88d1c9706f3493b1d73b93f695c99c5f0e2a"}, + {file = "grpcio-1.66.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:6001e575b8bbd89eee11960bb640b6da6ae110cf08113a075f1e2051cc596cae"}, + {file = "grpcio-1.66.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:4ea1d062c9230278793820146c95d038dc0f468cbdd172eec3363e42ff1c7d01"}, + {file = "grpcio-1.66.2-cp311-cp311-win32.whl", hash = "sha256:38b68498ff579a3b1ee8f93a05eb48dc2595795f2f62716e797dc24774c1aaa8"}, + {file = "grpcio-1.66.2-cp311-cp311-win_amd64.whl", hash = "sha256:6851de821249340bdb100df5eacfecfc4e6075fa85c6df7ee0eb213170ec8e5d"}, + {file = "grpcio-1.66.2-cp312-cp312-linux_armv7l.whl", hash = "sha256:802d84fd3d50614170649853d121baaaa305de7b65b3e01759247e768d691ddf"}, + {file = "grpcio-1.66.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:80fd702ba7e432994df208f27514280b4b5c6843e12a48759c9255679ad38db8"}, + {file = "grpcio-1.66.2-cp312-cp312-manylinux_2_17_aarch64.whl", hash = "sha256:12fda97ffae55e6526825daf25ad0fa37483685952b5d0f910d6405c87e3adb6"}, + {file = "grpcio-1.66.2-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:950da58d7d80abd0ea68757769c9db0a95b31163e53e5bb60438d263f4bed7b7"}, + {file = "grpcio-1.66.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e636ce23273683b00410f1971d209bf3689238cf5538d960adc3cdfe80dd0dbd"}, + {file = "grpcio-1.66.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:a917d26e0fe980b0ac7bfcc1a3c4ad6a9a4612c911d33efb55ed7833c749b0ee"}, + {file = "grpcio-1.66.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:49f0ca7ae850f59f828a723a9064cadbed90f1ece179d375966546499b8a2c9c"}, + {file = "grpcio-1.66.2-cp312-cp312-win32.whl", hash = "sha256:31fd163105464797a72d901a06472860845ac157389e10f12631025b3e4d0453"}, + {file = "grpcio-1.66.2-cp312-cp312-win_amd64.whl", hash = "sha256:ff1f7882e56c40b0d33c4922c15dfa30612f05fb785074a012f7cda74d1c3679"}, + {file = "grpcio-1.66.2-cp313-cp313-linux_armv7l.whl", hash = "sha256:3b00efc473b20d8bf83e0e1ae661b98951ca56111feb9b9611df8efc4fe5d55d"}, + {file = "grpcio-1.66.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:1caa38fb22a8578ab8393da99d4b8641e3a80abc8fd52646f1ecc92bcb8dee34"}, + {file = "grpcio-1.66.2-cp313-cp313-manylinux_2_17_aarch64.whl", hash = "sha256:c408f5ef75cfffa113cacd8b0c0e3611cbfd47701ca3cdc090594109b9fcbaed"}, + {file = "grpcio-1.66.2-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c806852deaedee9ce8280fe98955c9103f62912a5b2d5ee7e3eaa284a6d8d8e7"}, + {file = "grpcio-1.66.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f145cc21836c332c67baa6fc81099d1d27e266401565bf481948010d6ea32d46"}, + {file = "grpcio-1.66.2-cp313-cp313-musllinux_1_1_i686.whl", hash = "sha256:73e3b425c1e155730273f73e419de3074aa5c5e936771ee0e4af0814631fb30a"}, + {file = "grpcio-1.66.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:9c509a4f78114cbc5f0740eb3d7a74985fd2eff022971bc9bc31f8bc93e66a3b"}, + {file = "grpcio-1.66.2-cp313-cp313-win32.whl", hash = "sha256:20657d6b8cfed7db5e11b62ff7dfe2e12064ea78e93f1434d61888834bc86d75"}, + {file = "grpcio-1.66.2-cp313-cp313-win_amd64.whl", hash = "sha256:fb70487c95786e345af5e854ffec8cb8cc781bcc5df7930c4fbb7feaa72e1cdf"}, + {file = "grpcio-1.66.2-cp38-cp38-linux_armv7l.whl", hash = "sha256:a18e20d8321c6400185b4263e27982488cb5cdd62da69147087a76a24ef4e7e3"}, + {file = "grpcio-1.66.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:02697eb4a5cbe5a9639f57323b4c37bcb3ab2d48cec5da3dc2f13334d72790dd"}, + {file = "grpcio-1.66.2-cp38-cp38-manylinux_2_17_aarch64.whl", hash = "sha256:99a641995a6bc4287a6315989ee591ff58507aa1cbe4c2e70d88411c4dcc0839"}, + {file = "grpcio-1.66.2-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3ed71e81782966ffead60268bbda31ea3f725ebf8aa73634d5dda44f2cf3fb9c"}, + {file = "grpcio-1.66.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bbd27c24a4cc5e195a7f56cfd9312e366d5d61b86e36d46bbe538457ea6eb8dd"}, + {file = "grpcio-1.66.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:d9a9724a156c8ec6a379869b23ba3323b7ea3600851c91489b871e375f710bc8"}, + {file = "grpcio-1.66.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d8d4732cc5052e92cea2f78b233c2e2a52998ac40cd651f40e398893ad0d06ec"}, + {file = "grpcio-1.66.2-cp38-cp38-win32.whl", hash = "sha256:7b2c86457145ce14c38e5bf6bdc19ef88e66c5fee2c3d83285c5aef026ba93b3"}, + {file = "grpcio-1.66.2-cp38-cp38-win_amd64.whl", hash = "sha256:e88264caad6d8d00e7913996030bac8ad5f26b7411495848cc218bd3a9040b6c"}, + {file = "grpcio-1.66.2-cp39-cp39-linux_armv7l.whl", hash = "sha256:c400ba5675b67025c8a9f48aa846f12a39cf0c44df5cd060e23fda5b30e9359d"}, + {file = "grpcio-1.66.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:66a0cd8ba6512b401d7ed46bb03f4ee455839957f28b8d61e7708056a806ba6a"}, + {file = "grpcio-1.66.2-cp39-cp39-manylinux_2_17_aarch64.whl", hash = "sha256:06de8ec0bd71be123eec15b0e0d457474931c2c407869b6c349bd9bed4adbac3"}, + {file = "grpcio-1.66.2-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fb57870449dfcfac428afbb5a877829fcb0d6db9d9baa1148705739e9083880e"}, + {file = "grpcio-1.66.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b672abf90a964bfde2d0ecbce30f2329a47498ba75ce6f4da35a2f4532b7acbc"}, + {file = "grpcio-1.66.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:ad2efdbe90c73b0434cbe64ed372e12414ad03c06262279b104a029d1889d13e"}, + {file = "grpcio-1.66.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:9c3a99c519f4638e700e9e3f83952e27e2ea10873eecd7935823dab0c1c9250e"}, + {file = "grpcio-1.66.2-cp39-cp39-win32.whl", hash = "sha256:78fa51ebc2d9242c0fc5db0feecc57a9943303b46664ad89921f5079e2e4ada7"}, + {file = "grpcio-1.66.2-cp39-cp39-win_amd64.whl", hash = "sha256:728bdf36a186e7f51da73be7f8d09457a03061be848718d0edf000e709418987"}, + {file = "grpcio-1.66.2.tar.gz", hash = "sha256:563588c587b75c34b928bc428548e5b00ea38c46972181a4d8b75ba7e3f24231"}, +] + +[package.extras] +protobuf = ["grpcio-tools (>=1.66.2)"] [[package]] name = "grpcio-status" @@ -3803,27 +3056,6 @@ files = [ {file = "hpack-4.0.0.tar.gz", hash = "sha256:fc41de0c63e687ebffde81187a948221294896f6bdc0ae2312708df339430095"}, ] -[[package]] -name = "html5lib" -version = "1.1" -description = "HTML parser based on the WHATWG HTML specification" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" -files = [ - {file = "html5lib-1.1-py2.py3-none-any.whl", hash = "sha256:0d78f8fde1c230e99fe37986a60526d7049ed4bf8a9fadbad5f00e22e58e041d"}, - {file = "html5lib-1.1.tar.gz", hash = "sha256:b2e5b40261e20f354d198eae92afc10d750afb487ed5e50f9c4eaf07c184146f"}, -] - -[package.dependencies] -six = ">=1.9" -webencodings = "*" - -[package.extras] -all = ["chardet (>=2.2)", "genshi", "lxml"] -chardet = ["chardet (>=2.2)"] -genshi = ["genshi"] -lxml = ["lxml"] - [[package]] name = "httpcore" version = "1.0.5" @@ -3936,18 +3168,18 @@ zstd = ["zstandard (>=0.18.0)"] [[package]] name = "huggingface-hub" -version = "0.16.4" +version = "0.25.1" description = "Client library to download and publish models, datasets and other repos on the huggingface.co hub" optional = false -python-versions = ">=3.7.0" +python-versions = ">=3.8.0" files = [ - {file = "huggingface_hub-0.16.4-py3-none-any.whl", hash = "sha256:0d3df29932f334fead024afc7cb4cc5149d955238b8b5e42dcf9740d6995a349"}, - {file = "huggingface_hub-0.16.4.tar.gz", hash = "sha256:608c7d4f3d368b326d1747f91523dbd1f692871e8e2e7a4750314a2dd8b63e14"}, + {file = "huggingface_hub-0.25.1-py3-none-any.whl", hash = "sha256:a5158ded931b3188f54ea9028097312cb0acd50bffaaa2612014c3c526b44972"}, + {file = "huggingface_hub-0.25.1.tar.gz", hash = "sha256:9ff7cb327343211fbd06e2b149b8f362fd1e389454f3f14c6db75a4999ee20ff"}, ] [package.dependencies] filelock = "*" -fsspec = "*" +fsspec = ">=2023.5.0" packaging = ">=20.9" pyyaml = ">=5.1" requests = "*" @@ -3955,16 +3187,18 @@ tqdm = ">=4.42.1" typing-extensions = ">=3.7.4.3" [package.extras] -all = ["InquirerPy (==0.3.4)", "Jinja2", "Pillow", "aiohttp", "black (>=23.1,<24.0)", "gradio", "jedi", "mypy (==0.982)", "numpy", "pydantic", "pytest", "pytest-asyncio", "pytest-cov", "pytest-env", "pytest-vcr", "pytest-xdist", "ruff (>=0.0.241)", "soundfile", "types-PyYAML", "types-requests", "types-simplejson", "types-toml", "types-tqdm", "types-urllib3", "urllib3 (<2.0)"] +all = ["InquirerPy (==0.3.4)", "Jinja2", "Pillow", "aiohttp", "fastapi", "gradio", "jedi", "minijinja (>=1.0)", "mypy (==1.5.1)", "numpy", "pytest (>=8.1.1,<8.2.2)", "pytest-asyncio", "pytest-cov", "pytest-env", "pytest-mock", "pytest-rerunfailures", "pytest-vcr", "pytest-xdist", "ruff (>=0.5.0)", "soundfile", "types-PyYAML", "types-requests", "types-simplejson", "types-toml", "types-tqdm", "types-urllib3", "typing-extensions (>=4.8.0)", "urllib3 (<2.0)"] cli = ["InquirerPy (==0.3.4)"] -dev = ["InquirerPy (==0.3.4)", "Jinja2", "Pillow", "aiohttp", "black (>=23.1,<24.0)", "gradio", "jedi", "mypy (==0.982)", "numpy", "pydantic", "pytest", "pytest-asyncio", "pytest-cov", "pytest-env", "pytest-vcr", "pytest-xdist", "ruff (>=0.0.241)", "soundfile", "types-PyYAML", "types-requests", "types-simplejson", "types-toml", "types-tqdm", "types-urllib3", "urllib3 (<2.0)"] +dev = ["InquirerPy (==0.3.4)", "Jinja2", "Pillow", "aiohttp", "fastapi", "gradio", "jedi", "minijinja (>=1.0)", "mypy (==1.5.1)", "numpy", "pytest (>=8.1.1,<8.2.2)", "pytest-asyncio", "pytest-cov", "pytest-env", "pytest-mock", "pytest-rerunfailures", "pytest-vcr", "pytest-xdist", "ruff (>=0.5.0)", "soundfile", "types-PyYAML", "types-requests", "types-simplejson", "types-toml", "types-tqdm", "types-urllib3", "typing-extensions (>=4.8.0)", "urllib3 (<2.0)"] fastai = ["fastai (>=2.4)", "fastcore (>=1.3.27)", "toml"] -inference = ["aiohttp", "pydantic"] -quality = ["black (>=23.1,<24.0)", "mypy (==0.982)", "ruff (>=0.0.241)"] +hf-transfer = ["hf-transfer (>=0.1.4)"] +inference = ["aiohttp", "minijinja (>=1.0)"] +quality = ["mypy (==1.5.1)", "ruff (>=0.5.0)"] tensorflow = ["graphviz", "pydot", "tensorflow"] -testing = ["InquirerPy (==0.3.4)", "Jinja2", "Pillow", "aiohttp", "gradio", "jedi", "numpy", "pydantic", "pytest", "pytest-asyncio", "pytest-cov", "pytest-env", "pytest-vcr", "pytest-xdist", "soundfile", "urllib3 (<2.0)"] -torch = ["torch"] -typing = ["pydantic", "types-PyYAML", "types-requests", "types-simplejson", "types-toml", "types-tqdm", "types-urllib3"] +tensorflow-testing = ["keras (<3.0)", "tensorflow"] +testing = ["InquirerPy (==0.3.4)", "Jinja2", "Pillow", "aiohttp", "fastapi", "gradio", "jedi", "minijinja (>=1.0)", "numpy", "pytest (>=8.1.1,<8.2.2)", "pytest-asyncio", "pytest-cov", "pytest-env", "pytest-mock", "pytest-rerunfailures", "pytest-vcr", "pytest-xdist", "soundfile", "urllib3 (<2.0)"] +torch = ["safetensors[torch]", "torch"] +typing = ["types-PyYAML", "types-requests", "types-simplejson", "types-toml", "types-tqdm", "types-urllib3", "typing-extensions (>=4.8.0)"] [[package]] name = "humanfriendly" @@ -3993,13 +3227,13 @@ files = [ [[package]] name = "idna" -version = "3.9" +version = "3.10" description = "Internationalized Domain Names in Applications (IDNA)" optional = false python-versions = ">=3.6" files = [ - {file = "idna-3.9-py3-none-any.whl", hash = "sha256:69297d5da0cc9281c77efffb4e730254dd45943f45bbfb461de5991713989b1e"}, - {file = "idna-3.9.tar.gz", hash = "sha256:e5c5dafde284f26e9e0f28f6ea2d6400abd5ca099864a67f576f3981c6476124"}, + {file = "idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3"}, + {file = "idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9"}, ] [package.extras] @@ -4089,16 +3323,6 @@ files = [ {file = "jieba-0.42.1.tar.gz", hash = "sha256:055ca12f62674fafed09427f176506079bc135638a14e23e25be909131928db2"}, ] -[[package]] -name = "jieba3k" -version = "0.35.1" -description = "Chinese Words Segementation Utilities" -optional = false -python-versions = "*" -files = [ - {file = "jieba3k-0.35.1.zip", hash = "sha256:980a4f2636b778d312518066be90c7697d410dd5a472385f5afced71a2db1c10"}, -] - [[package]] name = "jinja2" version = "3.1.4" @@ -4152,20 +3376,6 @@ files = [ [package.dependencies] attrs = ">=19.2.0" -[[package]] -name = "jsonpath-ng" -version = "1.6.1" -description = "A final implementation of JSONPath for Python that aims to be standard compliant, including arithmetic and binary comparison operators and providing clear AST for metaprogramming." -optional = false -python-versions = "*" -files = [ - {file = "jsonpath-ng-1.6.1.tar.gz", hash = "sha256:086c37ba4917304850bd837aeab806670224d3f038fe2833ff593a672ef0a5fa"}, - {file = "jsonpath_ng-1.6.1-py3-none-any.whl", hash = "sha256:8f22cd8273d7772eea9aaa84d922e0841aa36fdb8a2c6b7f6c3791a16a9bc0be"}, -] - -[package.dependencies] -ply = "*" - [[package]] name = "jsonschema" version = "4.23.0" @@ -4216,142 +3426,20 @@ files = [ {file = "kaleido-0.2.1-py2.py3-none-win_amd64.whl", hash = "sha256:4670985f28913c2d063c5734d125ecc28e40810141bdb0a46f15b76c1d45f23c"}, ] -[[package]] -name = "kiwisolver" -version = "1.4.7" -description = "A fast implementation of the Cassowary constraint solver" -optional = false -python-versions = ">=3.8" -files = [ - {file = "kiwisolver-1.4.7-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:8a9c83f75223d5e48b0bc9cb1bf2776cf01563e00ade8775ffe13b0b6e1af3a6"}, - {file = "kiwisolver-1.4.7-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:58370b1ffbd35407444d57057b57da5d6549d2d854fa30249771775c63b5fe17"}, - {file = "kiwisolver-1.4.7-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:aa0abdf853e09aff551db11fce173e2177d00786c688203f52c87ad7fcd91ef9"}, - {file = "kiwisolver-1.4.7-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:8d53103597a252fb3ab8b5845af04c7a26d5e7ea8122303dd7a021176a87e8b9"}, - {file = "kiwisolver-1.4.7-cp310-cp310-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:88f17c5ffa8e9462fb79f62746428dd57b46eb931698e42e990ad63103f35e6c"}, - {file = "kiwisolver-1.4.7-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:88a9ca9c710d598fd75ee5de59d5bda2684d9db36a9f50b6125eaea3969c2599"}, - {file = "kiwisolver-1.4.7-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f4d742cb7af1c28303a51b7a27aaee540e71bb8e24f68c736f6f2ffc82f2bf05"}, - {file = "kiwisolver-1.4.7-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e28c7fea2196bf4c2f8d46a0415c77a1c480cc0724722f23d7410ffe9842c407"}, - {file = "kiwisolver-1.4.7-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:e968b84db54f9d42046cf154e02911e39c0435c9801681e3fc9ce8a3c4130278"}, - {file = "kiwisolver-1.4.7-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:0c18ec74c0472de033e1bebb2911c3c310eef5649133dd0bedf2a169a1b269e5"}, - {file = "kiwisolver-1.4.7-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:8f0ea6da6d393d8b2e187e6a5e3fb81f5862010a40c3945e2c6d12ae45cfb2ad"}, - {file = "kiwisolver-1.4.7-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:f106407dda69ae456dd1227966bf445b157ccc80ba0dff3802bb63f30b74e895"}, - {file = "kiwisolver-1.4.7-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:84ec80df401cfee1457063732d90022f93951944b5b58975d34ab56bb150dfb3"}, - {file = "kiwisolver-1.4.7-cp310-cp310-win32.whl", hash = "sha256:71bb308552200fb2c195e35ef05de12f0c878c07fc91c270eb3d6e41698c3bcc"}, - {file = "kiwisolver-1.4.7-cp310-cp310-win_amd64.whl", hash = "sha256:44756f9fd339de0fb6ee4f8c1696cfd19b2422e0d70b4cefc1cc7f1f64045a8c"}, - {file = "kiwisolver-1.4.7-cp310-cp310-win_arm64.whl", hash = "sha256:78a42513018c41c2ffd262eb676442315cbfe3c44eed82385c2ed043bc63210a"}, - {file = "kiwisolver-1.4.7-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:d2b0e12a42fb4e72d509fc994713d099cbb15ebf1103545e8a45f14da2dfca54"}, - {file = "kiwisolver-1.4.7-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:2a8781ac3edc42ea4b90bc23e7d37b665d89423818e26eb6df90698aa2287c95"}, - {file = "kiwisolver-1.4.7-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:46707a10836894b559e04b0fd143e343945c97fd170d69a2d26d640b4e297935"}, - {file = "kiwisolver-1.4.7-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ef97b8df011141c9b0f6caf23b29379f87dd13183c978a30a3c546d2c47314cb"}, - {file = "kiwisolver-1.4.7-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3ab58c12a2cd0fc769089e6d38466c46d7f76aced0a1f54c77652446733d2d02"}, - {file = "kiwisolver-1.4.7-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:803b8e1459341c1bb56d1c5c010406d5edec8a0713a0945851290a7930679b51"}, - {file = "kiwisolver-1.4.7-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f9a9e8a507420fe35992ee9ecb302dab68550dedc0da9e2880dd88071c5fb052"}, - {file = "kiwisolver-1.4.7-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:18077b53dc3bb490e330669a99920c5e6a496889ae8c63b58fbc57c3d7f33a18"}, - {file = "kiwisolver-1.4.7-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:6af936f79086a89b3680a280c47ea90b4df7047b5bdf3aa5c524bbedddb9e545"}, - {file = "kiwisolver-1.4.7-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:3abc5b19d24af4b77d1598a585b8a719beb8569a71568b66f4ebe1fb0449460b"}, - {file = "kiwisolver-1.4.7-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:933d4de052939d90afbe6e9d5273ae05fb836cc86c15b686edd4b3560cc0ee36"}, - {file = "kiwisolver-1.4.7-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:65e720d2ab2b53f1f72fb5da5fb477455905ce2c88aaa671ff0a447c2c80e8e3"}, - {file = "kiwisolver-1.4.7-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:3bf1ed55088f214ba6427484c59553123fdd9b218a42bbc8c6496d6754b1e523"}, - {file = "kiwisolver-1.4.7-cp311-cp311-win32.whl", hash = "sha256:4c00336b9dd5ad96d0a558fd18a8b6f711b7449acce4c157e7343ba92dd0cf3d"}, - {file = "kiwisolver-1.4.7-cp311-cp311-win_amd64.whl", hash = "sha256:929e294c1ac1e9f615c62a4e4313ca1823ba37326c164ec720a803287c4c499b"}, - {file = "kiwisolver-1.4.7-cp311-cp311-win_arm64.whl", hash = "sha256:e33e8fbd440c917106b237ef1a2f1449dfbb9b6f6e1ce17c94cd6a1e0d438376"}, - {file = "kiwisolver-1.4.7-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:5360cc32706dab3931f738d3079652d20982511f7c0ac5711483e6eab08efff2"}, - {file = "kiwisolver-1.4.7-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:942216596dc64ddb25adb215c3c783215b23626f8d84e8eff8d6d45c3f29f75a"}, - {file = "kiwisolver-1.4.7-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:48b571ecd8bae15702e4f22d3ff6a0f13e54d3d00cd25216d5e7f658242065ee"}, - {file = "kiwisolver-1.4.7-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ad42ba922c67c5f219097b28fae965e10045ddf145d2928bfac2eb2e17673640"}, - {file = "kiwisolver-1.4.7-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:612a10bdae23404a72941a0fc8fa2660c6ea1217c4ce0dbcab8a8f6543ea9e7f"}, - {file = "kiwisolver-1.4.7-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9e838bba3a3bac0fe06d849d29772eb1afb9745a59710762e4ba3f4cb8424483"}, - {file = "kiwisolver-1.4.7-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:22f499f6157236c19f4bbbd472fa55b063db77a16cd74d49afe28992dff8c258"}, - {file = "kiwisolver-1.4.7-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:693902d433cf585133699972b6d7c42a8b9f8f826ebcaf0132ff55200afc599e"}, - {file = "kiwisolver-1.4.7-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:4e77f2126c3e0b0d055f44513ed349038ac180371ed9b52fe96a32aa071a5107"}, - {file = "kiwisolver-1.4.7-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:657a05857bda581c3656bfc3b20e353c232e9193eb167766ad2dc58b56504948"}, - {file = "kiwisolver-1.4.7-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:4bfa75a048c056a411f9705856abfc872558e33c055d80af6a380e3658766038"}, - {file = "kiwisolver-1.4.7-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:34ea1de54beef1c104422d210c47c7d2a4999bdecf42c7b5718fbe59a4cac383"}, - {file = "kiwisolver-1.4.7-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:90da3b5f694b85231cf93586dad5e90e2d71b9428f9aad96952c99055582f520"}, - {file = "kiwisolver-1.4.7-cp312-cp312-win32.whl", hash = "sha256:18e0cca3e008e17fe9b164b55735a325140a5a35faad8de92dd80265cd5eb80b"}, - {file = "kiwisolver-1.4.7-cp312-cp312-win_amd64.whl", hash = "sha256:58cb20602b18f86f83a5c87d3ee1c766a79c0d452f8def86d925e6c60fbf7bfb"}, - {file = "kiwisolver-1.4.7-cp312-cp312-win_arm64.whl", hash = "sha256:f5a8b53bdc0b3961f8b6125e198617c40aeed638b387913bf1ce78afb1b0be2a"}, - {file = "kiwisolver-1.4.7-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:2e6039dcbe79a8e0f044f1c39db1986a1b8071051efba3ee4d74f5b365f5226e"}, - {file = "kiwisolver-1.4.7-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:a1ecf0ac1c518487d9d23b1cd7139a6a65bc460cd101ab01f1be82ecf09794b6"}, - {file = "kiwisolver-1.4.7-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:7ab9ccab2b5bd5702ab0803676a580fffa2aa178c2badc5557a84cc943fcf750"}, - {file = "kiwisolver-1.4.7-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f816dd2277f8d63d79f9c8473a79fe54047bc0467754962840782c575522224d"}, - {file = "kiwisolver-1.4.7-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cf8bcc23ceb5a1b624572a1623b9f79d2c3b337c8c455405ef231933a10da379"}, - {file = "kiwisolver-1.4.7-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dea0bf229319828467d7fca8c7c189780aa9ff679c94539eed7532ebe33ed37c"}, - {file = "kiwisolver-1.4.7-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7c06a4c7cf15ec739ce0e5971b26c93638730090add60e183530d70848ebdd34"}, - {file = "kiwisolver-1.4.7-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:913983ad2deb14e66d83c28b632fd35ba2b825031f2fa4ca29675e665dfecbe1"}, - {file = "kiwisolver-1.4.7-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:5337ec7809bcd0f424c6b705ecf97941c46279cf5ed92311782c7c9c2026f07f"}, - {file = "kiwisolver-1.4.7-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:4c26ed10c4f6fa6ddb329a5120ba3b6db349ca192ae211e882970bfc9d91420b"}, - {file = "kiwisolver-1.4.7-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:c619b101e6de2222c1fcb0531e1b17bbffbe54294bfba43ea0d411d428618c27"}, - {file = "kiwisolver-1.4.7-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:073a36c8273647592ea332e816e75ef8da5c303236ec0167196793eb1e34657a"}, - {file = "kiwisolver-1.4.7-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:3ce6b2b0231bda412463e152fc18335ba32faf4e8c23a754ad50ffa70e4091ee"}, - {file = "kiwisolver-1.4.7-cp313-cp313-win32.whl", hash = "sha256:f4c9aee212bc89d4e13f58be11a56cc8036cabad119259d12ace14b34476fd07"}, - {file = "kiwisolver-1.4.7-cp313-cp313-win_amd64.whl", hash = "sha256:8a3ec5aa8e38fc4c8af308917ce12c536f1c88452ce554027e55b22cbbfbff76"}, - {file = "kiwisolver-1.4.7-cp313-cp313-win_arm64.whl", hash = "sha256:76c8094ac20ec259471ac53e774623eb62e6e1f56cd8690c67ce6ce4fcb05650"}, - {file = "kiwisolver-1.4.7-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:5d5abf8f8ec1f4e22882273c423e16cae834c36856cac348cfbfa68e01c40f3a"}, - {file = "kiwisolver-1.4.7-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:aeb3531b196ef6f11776c21674dba836aeea9d5bd1cf630f869e3d90b16cfade"}, - {file = "kiwisolver-1.4.7-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:b7d755065e4e866a8086c9bdada157133ff466476a2ad7861828e17b6026e22c"}, - {file = "kiwisolver-1.4.7-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:08471d4d86cbaec61f86b217dd938a83d85e03785f51121e791a6e6689a3be95"}, - {file = "kiwisolver-1.4.7-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7bbfcb7165ce3d54a3dfbe731e470f65739c4c1f85bb1018ee912bae139e263b"}, - {file = "kiwisolver-1.4.7-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5d34eb8494bea691a1a450141ebb5385e4b69d38bb8403b5146ad279f4b30fa3"}, - {file = "kiwisolver-1.4.7-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:9242795d174daa40105c1d86aba618e8eab7bf96ba8c3ee614da8302a9f95503"}, - {file = "kiwisolver-1.4.7-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:a0f64a48bb81af7450e641e3fe0b0394d7381e342805479178b3d335d60ca7cf"}, - {file = "kiwisolver-1.4.7-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:8e045731a5416357638d1700927529e2b8ab304811671f665b225f8bf8d8f933"}, - {file = "kiwisolver-1.4.7-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:4322872d5772cae7369f8351da1edf255a604ea7087fe295411397d0cfd9655e"}, - {file = "kiwisolver-1.4.7-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:e1631290ee9271dffe3062d2634c3ecac02c83890ada077d225e081aca8aab89"}, - {file = "kiwisolver-1.4.7-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:edcfc407e4eb17e037bca59be0e85a2031a2ac87e4fed26d3e9df88b4165f92d"}, - {file = "kiwisolver-1.4.7-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:4d05d81ecb47d11e7f8932bd8b61b720bf0b41199358f3f5e36d38e28f0532c5"}, - {file = "kiwisolver-1.4.7-cp38-cp38-win32.whl", hash = "sha256:b38ac83d5f04b15e515fd86f312479d950d05ce2368d5413d46c088dda7de90a"}, - {file = "kiwisolver-1.4.7-cp38-cp38-win_amd64.whl", hash = "sha256:d83db7cde68459fc803052a55ace60bea2bae361fc3b7a6d5da07e11954e4b09"}, - {file = "kiwisolver-1.4.7-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:3f9362ecfca44c863569d3d3c033dbe8ba452ff8eed6f6b5806382741a1334bd"}, - {file = "kiwisolver-1.4.7-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:e8df2eb9b2bac43ef8b082e06f750350fbbaf2887534a5be97f6cf07b19d9583"}, - {file = "kiwisolver-1.4.7-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f32d6edbc638cde7652bd690c3e728b25332acbadd7cad670cc4a02558d9c417"}, - {file = "kiwisolver-1.4.7-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:e2e6c39bd7b9372b0be21456caab138e8e69cc0fc1190a9dfa92bd45a1e6e904"}, - {file = "kiwisolver-1.4.7-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:dda56c24d869b1193fcc763f1284b9126550eaf84b88bbc7256e15028f19188a"}, - {file = "kiwisolver-1.4.7-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:79849239c39b5e1fd906556c474d9b0439ea6792b637511f3fe3a41158d89ca8"}, - {file = "kiwisolver-1.4.7-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5e3bc157fed2a4c02ec468de4ecd12a6e22818d4f09cde2c31ee3226ffbefab2"}, - {file = "kiwisolver-1.4.7-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3da53da805b71e41053dc670f9a820d1157aae77b6b944e08024d17bcd51ef88"}, - {file = "kiwisolver-1.4.7-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:8705f17dfeb43139a692298cb6637ee2e59c0194538153e83e9ee0c75c2eddde"}, - {file = "kiwisolver-1.4.7-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:82a5c2f4b87c26bb1a0ef3d16b5c4753434633b83d365cc0ddf2770c93829e3c"}, - {file = "kiwisolver-1.4.7-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:ce8be0466f4c0d585cdb6c1e2ed07232221df101a4c6f28821d2aa754ca2d9e2"}, - {file = "kiwisolver-1.4.7-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:409afdfe1e2e90e6ee7fc896f3df9a7fec8e793e58bfa0d052c8a82f99c37abb"}, - {file = "kiwisolver-1.4.7-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:5b9c3f4ee0b9a439d2415012bd1b1cc2df59e4d6a9939f4d669241d30b414327"}, - {file = "kiwisolver-1.4.7-cp39-cp39-win32.whl", hash = "sha256:a79ae34384df2b615eefca647a2873842ac3b596418032bef9a7283675962644"}, - {file = "kiwisolver-1.4.7-cp39-cp39-win_amd64.whl", hash = "sha256:cf0438b42121a66a3a667de17e779330fc0f20b0d97d59d2f2121e182b0505e4"}, - {file = "kiwisolver-1.4.7-cp39-cp39-win_arm64.whl", hash = "sha256:764202cc7e70f767dab49e8df52c7455e8de0df5d858fa801a11aa0d882ccf3f"}, - {file = "kiwisolver-1.4.7-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:94252291e3fe68001b1dd747b4c0b3be12582839b95ad4d1b641924d68fd4643"}, - {file = "kiwisolver-1.4.7-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:5b7dfa3b546da08a9f622bb6becdb14b3e24aaa30adba66749d38f3cc7ea9706"}, - {file = "kiwisolver-1.4.7-pp310-pypy310_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bd3de6481f4ed8b734da5df134cd5a6a64fe32124fe83dde1e5b5f29fe30b1e6"}, - {file = "kiwisolver-1.4.7-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a91b5f9f1205845d488c928e8570dcb62b893372f63b8b6e98b863ebd2368ff2"}, - {file = "kiwisolver-1.4.7-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:40fa14dbd66b8b8f470d5fc79c089a66185619d31645f9b0773b88b19f7223c4"}, - {file = "kiwisolver-1.4.7-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:eb542fe7933aa09d8d8f9d9097ef37532a7df6497819d16efe4359890a2f417a"}, - {file = "kiwisolver-1.4.7-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:bfa1acfa0c54932d5607e19a2c24646fb4c1ae2694437789129cf099789a3b00"}, - {file = "kiwisolver-1.4.7-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:eee3ea935c3d227d49b4eb85660ff631556841f6e567f0f7bda972df6c2c9935"}, - {file = "kiwisolver-1.4.7-pp38-pypy38_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:f3160309af4396e0ed04db259c3ccbfdc3621b5559b5453075e5de555e1f3a1b"}, - {file = "kiwisolver-1.4.7-pp38-pypy38_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:a17f6a29cf8935e587cc8a4dbfc8368c55edc645283db0ce9801016f83526c2d"}, - {file = "kiwisolver-1.4.7-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:10849fb2c1ecbfae45a693c070e0320a91b35dd4bcf58172c023b994283a124d"}, - {file = "kiwisolver-1.4.7-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:ac542bf38a8a4be2dc6b15248d36315ccc65f0743f7b1a76688ffb6b5129a5c2"}, - {file = "kiwisolver-1.4.7-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:8b01aac285f91ca889c800042c35ad3b239e704b150cfd3382adfc9dcc780e39"}, - {file = "kiwisolver-1.4.7-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:48be928f59a1f5c8207154f935334d374e79f2b5d212826307d072595ad76a2e"}, - {file = "kiwisolver-1.4.7-pp39-pypy39_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f37cfe618a117e50d8c240555331160d73d0411422b59b5ee217843d7b693608"}, - {file = "kiwisolver-1.4.7-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:599b5c873c63a1f6ed7eead644a8a380cfbdf5db91dcb6f85707aaab213b1674"}, - {file = "kiwisolver-1.4.7-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:801fa7802e5cfabe3ab0c81a34c323a319b097dfb5004be950482d882f3d7225"}, - {file = "kiwisolver-1.4.7-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:0c6c43471bc764fad4bc99c5c2d6d16a676b1abf844ca7c8702bdae92df01ee0"}, - {file = "kiwisolver-1.4.7.tar.gz", hash = "sha256:9893ff81bd7107f7b685d3017cc6583daadb4fc26e4a888350df530e41980a60"}, -] - [[package]] name = "kombu" -version = "5.4.1" +version = "5.4.2" description = "Messaging library for Python." optional = false python-versions = ">=3.8" files = [ - {file = "kombu-5.4.1-py3-none-any.whl", hash = "sha256:621d365f234e4c089596f3a2510f1ade07026efc28caca426161d8f458786cab"}, - {file = "kombu-5.4.1.tar.gz", hash = "sha256:1c05178826dab811f8cab5b0a154d42a7a33d8bcdde9fa3d7b4582e43c3c03db"}, + {file = "kombu-5.4.2-py3-none-any.whl", hash = "sha256:14212f5ccf022fc0a70453bb025a1dcc32782a588c49ea866884047d66e14763"}, + {file = "kombu-5.4.2.tar.gz", hash = "sha256:eef572dd2fd9fc614b37580e3caeafdd5af46c1eff31e7fba89138cdb406f2cf"}, ] [package.dependencies] amqp = ">=5.1.1,<6.0.0" +tzdata = {version = "*", markers = "python_version >= \"3.9\""} vine = "5.1.0" [package.extras] @@ -4373,17 +3461,18 @@ zookeeper = ["kazoo (>=2.8.0)"] [[package]] name = "kubernetes" -version = "30.1.0" +version = "31.0.0" description = "Kubernetes python client" optional = false python-versions = ">=3.6" files = [ - {file = "kubernetes-30.1.0-py2.py3-none-any.whl", hash = "sha256:e212e8b7579031dd2e512168b617373bc1e03888d41ac4e04039240a292d478d"}, - {file = "kubernetes-30.1.0.tar.gz", hash = "sha256:41e4c77af9f28e7a6c314e3bd06a8c6229ddd787cad684e0ab9f69b498e98ebc"}, + {file = "kubernetes-31.0.0-py2.py3-none-any.whl", hash = "sha256:bf141e2d380c8520eada8b351f4e319ffee9636328c137aa432bc486ca1200e1"}, + {file = "kubernetes-31.0.0.tar.gz", hash = "sha256:28945de906c8c259c1ebe62703b56a03b714049372196f854105afe4e6d014c0"}, ] [package.dependencies] certifi = ">=14.05.14" +durationpy = ">=0.7" google-auth = ">=1.0.1" oauthlib = ">=3.2.2" python-dateutil = ">=2.5.3" @@ -4413,13 +3502,13 @@ six = "*" [[package]] name = "langfuse" -version = "2.48.1" +version = "2.51.2" description = "A client library for accessing langfuse" optional = false python-versions = "<4.0,>=3.8.1" files = [ - {file = "langfuse-2.48.1-py3-none-any.whl", hash = "sha256:8661070b6d94ba1d7da92c054f3110b6ecf4489d6e8204a4080f934f3f49ebf2"}, - {file = "langfuse-2.48.1.tar.gz", hash = "sha256:b8117d90babec6be1bc3303b42e0b71848531eae44118e6e0123d03e7961d0fc"}, + {file = "langfuse-2.51.2-py3-none-any.whl", hash = "sha256:7aab94a9452cda4587a2cd4917e455da1afd7f8a2696688742130e2f2d23ca59"}, + {file = "langfuse-2.51.2.tar.gz", hash = "sha256:0982b108ab4c02947f682e442b0796b7a73825d31eeace1771575f6454b8f79a"}, ] [package.dependencies] @@ -4438,13 +3527,13 @@ openai = ["openai (>=0.27.8)"] [[package]] name = "langsmith" -version = "0.1.120" +version = "0.1.129" description = "Client library to connect to the LangSmith LLM Tracing and Evaluation Platform." optional = false python-versions = "<4.0,>=3.8.1" files = [ - {file = "langsmith-0.1.120-py3-none-any.whl", hash = "sha256:54d2785e301646c0988e0a69ebe4d976488c87b41928b358cb153b6ddd8db62b"}, - {file = "langsmith-0.1.120.tar.gz", hash = "sha256:25499ca187b41bd89d784b272b97a8d76f60e0e21bdf20336e8a2aa6a9b23ac9"}, + {file = "langsmith-0.1.129-py3-none-any.whl", hash = "sha256:31393fbbb17d6be5b99b9b22d530450094fab23c6c37281a6a6efb2143d05347"}, + {file = "langsmith-0.1.129.tar.gz", hash = "sha256:6c3ba66471bef41b9f87da247cc0b493268b3f54656f73648a256a205261b6a0"}, ] [package.dependencies] @@ -4871,54 +3960,6 @@ dev = ["marshmallow[tests]", "pre-commit (>=3.5,<4.0)", "tox"] docs = ["alabaster (==1.0.0)", "autodocsumm (==0.2.13)", "sphinx (==8.0.2)", "sphinx-issues (==4.1.0)", "sphinx-version-warning (==1.1.2)"] tests = ["pytest", "pytz", "simplejson"] -[[package]] -name = "matplotlib" -version = "3.8.4" -description = "Python plotting package" -optional = false -python-versions = ">=3.9" -files = [ - {file = "matplotlib-3.8.4-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:abc9d838f93583650c35eca41cfcec65b2e7cb50fd486da6f0c49b5e1ed23014"}, - {file = "matplotlib-3.8.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8f65c9f002d281a6e904976007b2d46a1ee2bcea3a68a8c12dda24709ddc9106"}, - {file = "matplotlib-3.8.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ce1edd9f5383b504dbc26eeea404ed0a00656c526638129028b758fd43fc5f10"}, - {file = "matplotlib-3.8.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ecd79298550cba13a43c340581a3ec9c707bd895a6a061a78fa2524660482fc0"}, - {file = "matplotlib-3.8.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:90df07db7b599fe7035d2f74ab7e438b656528c68ba6bb59b7dc46af39ee48ef"}, - {file = "matplotlib-3.8.4-cp310-cp310-win_amd64.whl", hash = "sha256:ac24233e8f2939ac4fd2919eed1e9c0871eac8057666070e94cbf0b33dd9c338"}, - {file = "matplotlib-3.8.4-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:72f9322712e4562e792b2961971891b9fbbb0e525011e09ea0d1f416c4645661"}, - {file = "matplotlib-3.8.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:232ce322bfd020a434caaffbd9a95333f7c2491e59cfc014041d95e38ab90d1c"}, - {file = "matplotlib-3.8.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6addbd5b488aedb7f9bc19f91cd87ea476206f45d7116fcfe3d31416702a82fa"}, - {file = "matplotlib-3.8.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cc4ccdc64e3039fc303defd119658148f2349239871db72cd74e2eeaa9b80b71"}, - {file = "matplotlib-3.8.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:b7a2a253d3b36d90c8993b4620183b55665a429da8357a4f621e78cd48b2b30b"}, - {file = "matplotlib-3.8.4-cp311-cp311-win_amd64.whl", hash = "sha256:8080d5081a86e690d7688ffa542532e87f224c38a6ed71f8fbed34dd1d9fedae"}, - {file = "matplotlib-3.8.4-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:6485ac1f2e84676cff22e693eaa4fbed50ef5dc37173ce1f023daef4687df616"}, - {file = "matplotlib-3.8.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:c89ee9314ef48c72fe92ce55c4e95f2f39d70208f9f1d9db4e64079420d8d732"}, - {file = "matplotlib-3.8.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:50bac6e4d77e4262c4340d7a985c30912054745ec99756ce213bfbc3cb3808eb"}, - {file = "matplotlib-3.8.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f51c4c869d4b60d769f7b4406eec39596648d9d70246428745a681c327a8ad30"}, - {file = "matplotlib-3.8.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:b12ba985837e4899b762b81f5b2845bd1a28f4fdd1a126d9ace64e9c4eb2fb25"}, - {file = "matplotlib-3.8.4-cp312-cp312-win_amd64.whl", hash = "sha256:7a6769f58ce51791b4cb8b4d7642489df347697cd3e23d88266aaaee93b41d9a"}, - {file = "matplotlib-3.8.4-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:843cbde2f0946dadd8c5c11c6d91847abd18ec76859dc319362a0964493f0ba6"}, - {file = "matplotlib-3.8.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:1c13f041a7178f9780fb61cc3a2b10423d5e125480e4be51beaf62b172413b67"}, - {file = "matplotlib-3.8.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb44f53af0a62dc80bba4443d9b27f2fde6acfdac281d95bc872dc148a6509cc"}, - {file = "matplotlib-3.8.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:606e3b90897554c989b1e38a258c626d46c873523de432b1462f295db13de6f9"}, - {file = "matplotlib-3.8.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:9bb0189011785ea794ee827b68777db3ca3f93f3e339ea4d920315a0e5a78d54"}, - {file = "matplotlib-3.8.4-cp39-cp39-win_amd64.whl", hash = "sha256:6209e5c9aaccc056e63b547a8152661324404dd92340a6e479b3a7f24b42a5d0"}, - {file = "matplotlib-3.8.4-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:c7064120a59ce6f64103c9cefba8ffe6fba87f2c61d67c401186423c9a20fd35"}, - {file = "matplotlib-3.8.4-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a0e47eda4eb2614300fc7bb4657fced3e83d6334d03da2173b09e447418d499f"}, - {file = "matplotlib-3.8.4-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:493e9f6aa5819156b58fce42b296ea31969f2aab71c5b680b4ea7a3cb5c07d94"}, - {file = "matplotlib-3.8.4.tar.gz", hash = "sha256:8aac397d5e9ec158960e31c381c5ffc52ddd52bd9a47717e2a694038167dffea"}, -] - -[package.dependencies] -contourpy = ">=1.0.1" -cycler = ">=0.10" -fonttools = ">=4.22.0" -kiwisolver = ">=1.3.1" -numpy = ">=1.21" -packaging = ">=20.0" -pillow = ">=8" -pyparsing = ">=2.3.1" -python-dateutil = ">=2.7" - [[package]] name = "mdurl" version = "0.1.2" @@ -4948,95 +3989,116 @@ tqdm = "*" [[package]] name = "mmh3" -version = "4.1.0" +version = "5.0.1" description = "Python extension for MurmurHash (MurmurHash3), a set of fast and robust hash functions." optional = false -python-versions = "*" +python-versions = ">=3.8" files = [ - {file = "mmh3-4.1.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:be5ac76a8b0cd8095784e51e4c1c9c318c19edcd1709a06eb14979c8d850c31a"}, - {file = "mmh3-4.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:98a49121afdfab67cd80e912b36404139d7deceb6773a83620137aaa0da5714c"}, - {file = "mmh3-4.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5259ac0535874366e7d1a5423ef746e0d36a9e3c14509ce6511614bdc5a7ef5b"}, - {file = "mmh3-4.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c5950827ca0453a2be357696da509ab39646044e3fa15cad364eb65d78797437"}, - {file = "mmh3-4.1.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1dd0f652ae99585b9dd26de458e5f08571522f0402155809fd1dc8852a613a39"}, - {file = "mmh3-4.1.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:99d25548070942fab1e4a6f04d1626d67e66d0b81ed6571ecfca511f3edf07e6"}, - {file = "mmh3-4.1.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:53db8d9bad3cb66c8f35cbc894f336273f63489ce4ac416634932e3cbe79eb5b"}, - {file = "mmh3-4.1.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:75da0f615eb55295a437264cc0b736753f830b09d102aa4c2a7d719bc445ec05"}, - {file = "mmh3-4.1.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:b926b07fd678ea84b3a2afc1fa22ce50aeb627839c44382f3d0291e945621e1a"}, - {file = "mmh3-4.1.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:c5b053334f9b0af8559d6da9dc72cef0a65b325ebb3e630c680012323c950bb6"}, - {file = "mmh3-4.1.0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:5bf33dc43cd6de2cb86e0aa73a1cc6530f557854bbbe5d59f41ef6de2e353d7b"}, - {file = "mmh3-4.1.0-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:fa7eacd2b830727ba3dd65a365bed8a5c992ecd0c8348cf39a05cc77d22f4970"}, - {file = "mmh3-4.1.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:42dfd6742b9e3eec599f85270617debfa0bbb913c545bb980c8a4fa7b2d047da"}, - {file = "mmh3-4.1.0-cp310-cp310-win32.whl", hash = "sha256:2974ad343f0d39dcc88e93ee6afa96cedc35a9883bc067febd7ff736e207fa47"}, - {file = "mmh3-4.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:74699a8984ded645c1a24d6078351a056f5a5f1fe5838870412a68ac5e28d865"}, - {file = "mmh3-4.1.0-cp310-cp310-win_arm64.whl", hash = "sha256:f0dc874cedc23d46fc488a987faa6ad08ffa79e44fb08e3cd4d4cf2877c00a00"}, - {file = "mmh3-4.1.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:3280a463855b0eae64b681cd5b9ddd9464b73f81151e87bb7c91a811d25619e6"}, - {file = "mmh3-4.1.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:97ac57c6c3301769e757d444fa7c973ceb002cb66534b39cbab5e38de61cd896"}, - {file = "mmh3-4.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a7b6502cdb4dbd880244818ab363c8770a48cdccecf6d729ade0241b736b5ec0"}, - {file = "mmh3-4.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:52ba2da04671a9621580ddabf72f06f0e72c1c9c3b7b608849b58b11080d8f14"}, - {file = "mmh3-4.1.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5a5fef4c4ecc782e6e43fbeab09cff1bac82c998a1773d3a5ee6a3605cde343e"}, - {file = "mmh3-4.1.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5135358a7e00991f73b88cdc8eda5203bf9de22120d10a834c5761dbeb07dd13"}, - {file = "mmh3-4.1.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cff9ae76a54f7c6fe0167c9c4028c12c1f6de52d68a31d11b6790bb2ae685560"}, - {file = "mmh3-4.1.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f6f02576a4d106d7830ca90278868bf0983554dd69183b7bbe09f2fcd51cf54f"}, - {file = "mmh3-4.1.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:073d57425a23721730d3ff5485e2da489dd3c90b04e86243dd7211f889898106"}, - {file = "mmh3-4.1.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:71e32ddec7f573a1a0feb8d2cf2af474c50ec21e7a8263026e8d3b4b629805db"}, - {file = "mmh3-4.1.0-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:7cbb20b29d57e76a58b40fd8b13a9130db495a12d678d651b459bf61c0714cea"}, - {file = "mmh3-4.1.0-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:a42ad267e131d7847076bb7e31050f6c4378cd38e8f1bf7a0edd32f30224d5c9"}, - {file = "mmh3-4.1.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:4a013979fc9390abadc445ea2527426a0e7a4495c19b74589204f9b71bcaafeb"}, - {file = "mmh3-4.1.0-cp311-cp311-win32.whl", hash = "sha256:1d3b1cdad7c71b7b88966301789a478af142bddcb3a2bee563f7a7d40519a00f"}, - {file = "mmh3-4.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:0dc6dc32eb03727467da8e17deffe004fbb65e8b5ee2b502d36250d7a3f4e2ec"}, - {file = "mmh3-4.1.0-cp311-cp311-win_arm64.whl", hash = "sha256:9ae3a5c1b32dda121c7dc26f9597ef7b01b4c56a98319a7fe86c35b8bc459ae6"}, - {file = "mmh3-4.1.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0033d60c7939168ef65ddc396611077a7268bde024f2c23bdc283a19123f9e9c"}, - {file = "mmh3-4.1.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:d6af3e2287644b2b08b5924ed3a88c97b87b44ad08e79ca9f93d3470a54a41c5"}, - {file = "mmh3-4.1.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:d82eb4defa245e02bb0b0dc4f1e7ee284f8d212633389c91f7fba99ba993f0a2"}, - {file = "mmh3-4.1.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ba245e94b8d54765e14c2d7b6214e832557e7856d5183bc522e17884cab2f45d"}, - {file = "mmh3-4.1.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bb04e2feeabaad6231e89cd43b3d01a4403579aa792c9ab6fdeef45cc58d4ec0"}, - {file = "mmh3-4.1.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1e3b1a27def545ce11e36158ba5d5390cdbc300cfe456a942cc89d649cf7e3b2"}, - {file = "mmh3-4.1.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ce0ab79ff736d7044e5e9b3bfe73958a55f79a4ae672e6213e92492ad5e734d5"}, - {file = "mmh3-4.1.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3b02268be6e0a8eeb8a924d7db85f28e47344f35c438c1e149878bb1c47b1cd3"}, - {file = "mmh3-4.1.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:deb887f5fcdaf57cf646b1e062d56b06ef2f23421c80885fce18b37143cba828"}, - {file = "mmh3-4.1.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:99dd564e9e2b512eb117bd0cbf0f79a50c45d961c2a02402787d581cec5448d5"}, - {file = "mmh3-4.1.0-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:08373082dfaa38fe97aa78753d1efd21a1969e51079056ff552e687764eafdfe"}, - {file = "mmh3-4.1.0-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:54b9c6a2ea571b714e4fe28d3e4e2db37abfd03c787a58074ea21ee9a8fd1740"}, - {file = "mmh3-4.1.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:a7b1edf24c69e3513f879722b97ca85e52f9032f24a52284746877f6a7304086"}, - {file = "mmh3-4.1.0-cp312-cp312-win32.whl", hash = "sha256:411da64b951f635e1e2284b71d81a5a83580cea24994b328f8910d40bed67276"}, - {file = "mmh3-4.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:bebc3ecb6ba18292e3d40c8712482b4477abd6981c2ebf0e60869bd90f8ac3a9"}, - {file = "mmh3-4.1.0-cp312-cp312-win_arm64.whl", hash = "sha256:168473dd608ade6a8d2ba069600b35199a9af837d96177d3088ca91f2b3798e3"}, - {file = "mmh3-4.1.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:372f4b7e1dcde175507640679a2a8790185bb71f3640fc28a4690f73da986a3b"}, - {file = "mmh3-4.1.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:438584b97f6fe13e944faf590c90fc127682b57ae969f73334040d9fa1c7ffa5"}, - {file = "mmh3-4.1.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:6e27931b232fc676675fac8641c6ec6b596daa64d82170e8597f5a5b8bdcd3b6"}, - {file = "mmh3-4.1.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:571a92bad859d7b0330e47cfd1850b76c39b615a8d8e7aa5853c1f971fd0c4b1"}, - {file = "mmh3-4.1.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4a69d6afe3190fa08f9e3a58e5145549f71f1f3fff27bd0800313426929c7068"}, - {file = "mmh3-4.1.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:afb127be0be946b7630220908dbea0cee0d9d3c583fa9114a07156f98566dc28"}, - {file = "mmh3-4.1.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:940d86522f36348ef1a494cbf7248ab3f4a1638b84b59e6c9e90408bd11ad729"}, - {file = "mmh3-4.1.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b3dcccc4935686619a8e3d1f7b6e97e3bd89a4a796247930ee97d35ea1a39341"}, - {file = "mmh3-4.1.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:01bb9b90d61854dfc2407c5e5192bfb47222d74f29d140cb2dd2a69f2353f7cc"}, - {file = "mmh3-4.1.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:bcb1b8b951a2c0b0fb8a5426c62a22557e2ffc52539e0a7cc46eb667b5d606a9"}, - {file = "mmh3-4.1.0-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:6477a05d5e5ab3168e82e8b106e316210ac954134f46ec529356607900aea82a"}, - {file = "mmh3-4.1.0-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:da5892287e5bea6977364b15712a2573c16d134bc5fdcdd4cf460006cf849278"}, - {file = "mmh3-4.1.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:99180d7fd2327a6fffbaff270f760576839dc6ee66d045fa3a450f3490fda7f5"}, - {file = "mmh3-4.1.0-cp38-cp38-win32.whl", hash = "sha256:9b0d4f3949913a9f9a8fb1bb4cc6ecd52879730aab5ff8c5a3d8f5b593594b73"}, - {file = "mmh3-4.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:598c352da1d945108aee0c3c3cfdd0e9b3edef74108f53b49d481d3990402169"}, - {file = "mmh3-4.1.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:475d6d1445dd080f18f0f766277e1237fa2914e5fe3307a3b2a3044f30892103"}, - {file = "mmh3-4.1.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5ca07c41e6a2880991431ac717c2a049056fff497651a76e26fc22224e8b5732"}, - {file = "mmh3-4.1.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:0ebe052fef4bbe30c0548d12ee46d09f1b69035ca5208a7075e55adfe091be44"}, - {file = "mmh3-4.1.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eaefd42e85afb70f2b855a011f7b4d8a3c7e19c3f2681fa13118e4d8627378c5"}, - {file = "mmh3-4.1.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ac0ae43caae5a47afe1b63a1ae3f0986dde54b5fb2d6c29786adbfb8edc9edfb"}, - {file = "mmh3-4.1.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6218666f74c8c013c221e7f5f8a693ac9cf68e5ac9a03f2373b32d77c48904de"}, - {file = "mmh3-4.1.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ac59294a536ba447b5037f62d8367d7d93b696f80671c2c45645fa9f1109413c"}, - {file = "mmh3-4.1.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:086844830fcd1e5c84fec7017ea1ee8491487cfc877847d96f86f68881569d2e"}, - {file = "mmh3-4.1.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:e42b38fad664f56f77f6fbca22d08450f2464baa68acdbf24841bf900eb98e87"}, - {file = "mmh3-4.1.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:d08b790a63a9a1cde3b5d7d733ed97d4eb884bfbc92f075a091652d6bfd7709a"}, - {file = "mmh3-4.1.0-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:73ea4cc55e8aea28c86799ecacebca09e5f86500414870a8abaedfcbaf74d288"}, - {file = "mmh3-4.1.0-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:f90938ff137130e47bcec8dc1f4ceb02f10178c766e2ef58a9f657ff1f62d124"}, - {file = "mmh3-4.1.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:aa1f13e94b8631c8cd53259250556edcf1de71738936b60febba95750d9632bd"}, - {file = "mmh3-4.1.0-cp39-cp39-win32.whl", hash = "sha256:a3b680b471c181490cf82da2142029edb4298e1bdfcb67c76922dedef789868d"}, - {file = "mmh3-4.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:fefef92e9c544a8dbc08f77a8d1b6d48006a750c4375bbcd5ff8199d761e263b"}, - {file = "mmh3-4.1.0-cp39-cp39-win_arm64.whl", hash = "sha256:8e2c1f6a2b41723a4f82bd5a762a777836d29d664fc0095f17910bea0adfd4a6"}, - {file = "mmh3-4.1.0.tar.gz", hash = "sha256:a1cf25348b9acd229dda464a094d6170f47d2850a1fcb762a3b6172d2ce6ca4a"}, -] - -[package.extras] -test = ["mypy (>=1.0)", "pytest (>=7.0.0)"] + {file = "mmh3-5.0.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:f0a4b4bf05778ed77d820d6e7d0e9bd6beb0c01af10e1ce9233f5d2f814fcafa"}, + {file = "mmh3-5.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ac7a391039aeab95810c2d020b69a94eb6b4b37d4e2374831e92db3a0cdf71c6"}, + {file = "mmh3-5.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:3a2583b5521ca49756d8d8bceba80627a9cc295f255dcab4e3df7ccc2f09679a"}, + {file = "mmh3-5.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:081a8423fe53c1ac94f87165f3e4c500125d343410c1a0c5f1703e898a3ef038"}, + {file = "mmh3-5.0.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b8b4d72713799755dc8954a7d36d5c20a6c8de7b233c82404d122c7c7c1707cc"}, + {file = "mmh3-5.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:389a6fd51efc76d3182d36ec306448559c1244f11227d2bb771bdd0e6cc91321"}, + {file = "mmh3-5.0.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:39f4128edaa074bff721b1d31a72508cba4d2887ee7867f22082e1fe9d4edea0"}, + {file = "mmh3-5.0.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1d5d23a94d91aabba3386b3769048d5f4210fdfef80393fece2f34ba5a7b466c"}, + {file = "mmh3-5.0.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:16347d038361f8b8f24fd2b7ef378c9b68ddee9f7706e46269b6e0d322814713"}, + {file = "mmh3-5.0.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:6e299408565af7d61f2d20a5ffdd77cf2ed902460fe4e6726839d59ba4b72316"}, + {file = "mmh3-5.0.1-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:42050af21ddfc5445ee5a66e73a8fc758c71790305e3ee9e4a85a8e69e810f94"}, + {file = "mmh3-5.0.1-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:2ae9b1f5ef27ec54659920f0404b7ceb39966e28867c461bfe83a05e8d18ddb0"}, + {file = "mmh3-5.0.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:50c2495a02045f3047d71d4ae9cdd7a15efc0bcbb7ff17a18346834a8e2d1d19"}, + {file = "mmh3-5.0.1-cp310-cp310-win32.whl", hash = "sha256:c028fa77cddf351ca13b4a56d43c1775652cde0764cadb39120b68f02a23ecf6"}, + {file = "mmh3-5.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:c5e741e421ec14400c4aae30890515c201f518403bdef29ae1e00d375bb4bbb5"}, + {file = "mmh3-5.0.1-cp310-cp310-win_arm64.whl", hash = "sha256:b17156d56fabc73dbf41bca677ceb6faed435cc8544f6566d72ea77d8a17e9d0"}, + {file = "mmh3-5.0.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:9a6d5a9b1b923f1643559ba1fc0bf7a5076c90cbb558878d3bf3641ce458f25d"}, + {file = "mmh3-5.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3349b968be555f7334bbcce839da98f50e1e80b1c615d8e2aa847ea4a964a012"}, + {file = "mmh3-5.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1bd3c94b110e55db02ab9b605029f48a2f7f677c6e58c09d44e42402d438b7e1"}, + {file = "mmh3-5.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d47ba84d48608f79adbb10bb09986b6dc33eeda5c2d1bd75d00820081b73bde9"}, + {file = "mmh3-5.0.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c0217987a8b8525c8d9170f66d036dec4ab45cfbd53d47e8d76125791ceb155e"}, + {file = "mmh3-5.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b2797063a34e78d1b61639a98b0edec1c856fa86ab80c7ec859f1796d10ba429"}, + {file = "mmh3-5.0.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8bba16340adcbd47853a2fbe5afdb397549e8f2e79324ff1dced69a3f8afe7c3"}, + {file = "mmh3-5.0.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:282797957c9f60b51b9d768a602c25f579420cc9af46feb77d457a27823d270a"}, + {file = "mmh3-5.0.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:e4fb670c29e63f954f9e7a2cdcd57b36a854c2538f579ef62681ccbaa1de2b69"}, + {file = "mmh3-5.0.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:8ee7d85438dc6aff328e19ab052086a3c29e8a9b632998a49e5c4b0034e9e8d6"}, + {file = "mmh3-5.0.1-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:b7fb5db231f3092444bc13901e6a8d299667126b00636ffbad4a7b45e1051e2f"}, + {file = "mmh3-5.0.1-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:c100dd441703da5ec136b1d9003ed4a041d8a1136234c9acd887499796df6ad8"}, + {file = "mmh3-5.0.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:71f3b765138260fd7a7a2dba0ea5727dabcd18c1f80323c9cfef97a7e86e01d0"}, + {file = "mmh3-5.0.1-cp311-cp311-win32.whl", hash = "sha256:9a76518336247fd17689ce3ae5b16883fd86a490947d46a0193d47fb913e26e3"}, + {file = "mmh3-5.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:336bc4df2e44271f1c302d289cc3d78bd52d3eed8d306c7e4bff8361a12bf148"}, + {file = "mmh3-5.0.1-cp311-cp311-win_arm64.whl", hash = "sha256:af6522722fbbc5999aa66f7244d0986767a46f1fb05accc5200f75b72428a508"}, + {file = "mmh3-5.0.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:f2730bb263ed9c388e8860438b057a53e3cc701134a6ea140f90443c4c11aa40"}, + {file = "mmh3-5.0.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:6246927bc293f6d56724536400b85fb85f5be26101fa77d5f97dd5e2a4c69bf2"}, + {file = "mmh3-5.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:fbca322519a6e6e25b6abf43e940e1667cf8ea12510e07fb4919b48a0cd1c411"}, + {file = "mmh3-5.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eae8c19903ed8a1724ad9e67e86f15d198a7a1271a4f9be83d47e38f312ed672"}, + {file = "mmh3-5.0.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a09fd6cc72c07c0c07c3357714234b646d78052487c4a3bd5f7f6e08408cff60"}, + {file = "mmh3-5.0.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2ff8551fee7ae3b11c5d986b6347ade0dccaadd4670ffdb2b944dee120ffcc84"}, + {file = "mmh3-5.0.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e39694c73a5a20c8bf36dfd8676ed351e5234d55751ba4f7562d85449b21ef3f"}, + {file = "mmh3-5.0.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eba6001989a92f72a89c7cf382fda831678bd780707a66b4f8ca90239fdf2123"}, + {file = "mmh3-5.0.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:0771f90c9911811cc606a5c7b7b58f33501c9ee896ed68a6ac22c7d55878ecc0"}, + {file = "mmh3-5.0.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:09b31ed0c0c0920363e96641fac4efde65b1ab62b8df86293142f35a254e72b4"}, + {file = "mmh3-5.0.1-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:5cf4a8deda0235312db12075331cb417c4ba163770edfe789bde71d08a24b692"}, + {file = "mmh3-5.0.1-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:41f7090a95185ef20ac018581a99337f0cbc84a2135171ee3290a9c0d9519585"}, + {file = "mmh3-5.0.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:b97b5b368fb7ff22194ec5854f5b12d8de9ab67a0f304728c7f16e5d12135b76"}, + {file = "mmh3-5.0.1-cp312-cp312-win32.whl", hash = "sha256:842516acf04da546f94fad52db125ee619ccbdcada179da51c326a22c4578cb9"}, + {file = "mmh3-5.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:d963be0dbfd9fca209c17172f6110787ebf78934af25e3694fe2ba40e55c1e2b"}, + {file = "mmh3-5.0.1-cp312-cp312-win_arm64.whl", hash = "sha256:a5da292ceeed8ce8e32b68847261a462d30fd7b478c3f55daae841404f433c15"}, + {file = "mmh3-5.0.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:673e3f1c8d4231d6fb0271484ee34cb7146a6499fc0df80788adb56fd76842da"}, + {file = "mmh3-5.0.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f795a306bd16a52ad578b663462cc8e95500b3925d64118ae63453485d67282b"}, + {file = "mmh3-5.0.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:5ed57a5e28e502a1d60436cc25c76c3a5ba57545f250f2969af231dc1221e0a5"}, + {file = "mmh3-5.0.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:632c28e7612e909dbb6cbe2fe496201ada4695b7715584005689c5dc038e59ad"}, + {file = "mmh3-5.0.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:53fd6bd525a5985e391c43384672d9d6b317fcb36726447347c7fc75bfed34ec"}, + {file = "mmh3-5.0.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dceacf6b0b961a0e499836af3aa62d60633265607aef551b2a3e3c48cdaa5edd"}, + {file = "mmh3-5.0.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8f0738d478fdfb5d920f6aff5452c78f2c35b0eff72caa2a97dfe38e82f93da2"}, + {file = "mmh3-5.0.1-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8e70285e7391ab88b872e5bef632bad16b9d99a6d3ca0590656a4753d55988af"}, + {file = "mmh3-5.0.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:27e5fc6360aa6b828546a4318da1a7da6bf6e5474ccb053c3a6aa8ef19ff97bd"}, + {file = "mmh3-5.0.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:7989530c3c1e2c17bf5a0ec2bba09fd19819078ba90beedabb1c3885f5040b0d"}, + {file = "mmh3-5.0.1-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:cdad7bee649950da7ecd3cbbbd12fb81f1161072ecbdb5acfa0018338c5cb9cf"}, + {file = "mmh3-5.0.1-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:e143b8f184c1bb58cecd85ab4a4fd6dc65a2d71aee74157392c3fddac2a4a331"}, + {file = "mmh3-5.0.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:e5eb12e886f3646dd636f16b76eb23fc0c27e8ff3c1ae73d4391e50ef60b40f6"}, + {file = "mmh3-5.0.1-cp313-cp313-win32.whl", hash = "sha256:16e6dddfa98e1c2d021268e72c78951234186deb4df6630e984ac82df63d0a5d"}, + {file = "mmh3-5.0.1-cp313-cp313-win_amd64.whl", hash = "sha256:d3ffb792d70b8c4a2382af3598dad6ae0c5bd9cee5b7ffcc99aa2f5fd2c1bf70"}, + {file = "mmh3-5.0.1-cp313-cp313-win_arm64.whl", hash = "sha256:122fa9ec148383f9124292962bda745f192b47bfd470b2af5fe7bb3982b17896"}, + {file = "mmh3-5.0.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:b12bad8c75e6ff5d67319794fb6a5e8c713826c818d47f850ad08b4aa06960c6"}, + {file = "mmh3-5.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:e5bbb066538c1048d542246fc347bb7994bdda29a3aea61c22f9f8b57111ce69"}, + {file = "mmh3-5.0.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:eee6134273f64e2a106827cc8fd77e70cc7239a285006fc6ab4977d59b015af2"}, + {file = "mmh3-5.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d04d9aa19d48e4c7bbec9cabc2c4dccc6ff3b2402f856d5bf0de03e10f167b5b"}, + {file = "mmh3-5.0.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:79f37da1eed034d06567a69a7988456345c7f29e49192831c3975b464493b16e"}, + {file = "mmh3-5.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:242f77666743337aa828a2bf2da71b6ba79623ee7f93edb11e009f69237c8561"}, + {file = "mmh3-5.0.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffd943fff690463945f6441a2465555b3146deaadf6a5e88f2590d14c655d71b"}, + {file = "mmh3-5.0.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:565b15f8d7df43acb791ff5a360795c20bfa68bca8b352509e0fbabd06cc48cd"}, + {file = "mmh3-5.0.1-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:fc6aafb867c2030df98ac7760ff76b500359252867985f357bd387739f3d5287"}, + {file = "mmh3-5.0.1-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:32898170644d45aa27c974ab0d067809c066205110f5c6d09f47d9ece6978bfe"}, + {file = "mmh3-5.0.1-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:42865567838d2193eb64e0ef571f678bf361a254fcdef0c5c8e73243217829bd"}, + {file = "mmh3-5.0.1-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:5ff5c1f301c4a8b6916498969c0fcc7e3dbc56b4bfce5cfe3fe31f3f4609e5ae"}, + {file = "mmh3-5.0.1-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:be74c2dda8a6f44a504450aa2c3507f8067a159201586fc01dd41ab80efc350f"}, + {file = "mmh3-5.0.1-cp38-cp38-win32.whl", hash = "sha256:5610a842621ff76c04b20b29cf5f809b131f241a19d4937971ba77dc99a7f330"}, + {file = "mmh3-5.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:de15739ac50776fe8aa1ef13f1be46a6ee1fbd45f6d0651084097eb2be0a5aa4"}, + {file = "mmh3-5.0.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:48e84cf3cc7e8c41bc07de72299a73b92d9e3cde51d97851420055b1484995f7"}, + {file = "mmh3-5.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6dd9dc28c2d168c49928195c2e29b96f9582a5d07bd690a28aede4cc07b0e696"}, + {file = "mmh3-5.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:2771a1c56a3d4bdad990309cff5d0a8051f29c8ec752d001f97d6392194ae880"}, + {file = "mmh3-5.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c5ff2a8322ba40951a84411550352fba1073ce1c1d1213bb7530f09aed7f8caf"}, + {file = "mmh3-5.0.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a16bd3ec90682c9e0a343e6bd4c778c09947c8c5395cdb9e5d9b82b2559efbca"}, + {file = "mmh3-5.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d45733a78d68b5b05ff4a823aea51fa664df1d3bf4929b152ff4fd6dea2dd69b"}, + {file = "mmh3-5.0.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:904285e83cedebc8873b0838ed54c20f7344120be26e2ca5a907ab007a18a7a0"}, + {file = "mmh3-5.0.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ac4aeb1784e43df728034d0ed72e4b2648db1a69fef48fa58e810e13230ae5ff"}, + {file = "mmh3-5.0.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:cb3d4f751a0b8b4c8d06ef1c085216c8fddcc8b8c8d72445976b5167a40c6d1e"}, + {file = "mmh3-5.0.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:8021851935600e60c42122ed1176399d7692df338d606195cd599d228a04c1c6"}, + {file = "mmh3-5.0.1-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:6182d5924a5efc451900f864cbb021d7e8ad5d524816ca17304a0f663bc09bb5"}, + {file = "mmh3-5.0.1-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:5f30b834552a4f79c92e3d266336fb87fd92ce1d36dc6813d3e151035890abbd"}, + {file = "mmh3-5.0.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:cd4383f35e915e06d077df27e04ffd3be7513ec6a9de2d31f430393f67e192a7"}, + {file = "mmh3-5.0.1-cp39-cp39-win32.whl", hash = "sha256:1455fb6b42665a97db8fc66e89a861e52b567bce27ed054c47877183f86ea6e3"}, + {file = "mmh3-5.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:9e26a0f4eb9855a143f5938a53592fa14c2d3b25801c2106886ab6c173982780"}, + {file = "mmh3-5.0.1-cp39-cp39-win_arm64.whl", hash = "sha256:0d0a35a69abdad7549c4030a714bb4ad07902edb3bbe61e1bbc403ded5d678be"}, + {file = "mmh3-5.0.1.tar.gz", hash = "sha256:7dab080061aeb31a6069a181f27c473a1f67933854e36a3464931f2716508896"}, +] + +[package.extras] +benchmark = ["pymmh3 (==0.0.5)", "pyperf (==2.7.0)", "xxhash (==3.5.0)"] +docs = ["myst-parser (==4.0.0)", "shibuya (==2024.8.30)", "sphinx (==8.0.2)", "sphinx-copybutton (==0.5.2)"] +lint = ["black (==24.8.0)", "clang-format (==18.1.8)", "isort (==5.13.2)", "pylint (==3.2.7)"] +plot = ["matplotlib (==3.9.2)", "pandas (==2.2.2)"] +test = ["pytest (==8.3.3)", "pytest-sugar (==1.0.0)"] +type = ["mypy (==1.11.2)"] [[package]] name = "mock" @@ -5282,17 +4344,6 @@ files = [ [package.dependencies] dill = ">=0.3.8" -[[package]] -name = "multitasking" -version = "0.0.11" -description = "Non-blocking Python methods using decorators" -optional = false -python-versions = "*" -files = [ - {file = "multitasking-0.0.11-py3-none-any.whl", hash = "sha256:1e5b37a5f8fc1e6cfaafd1a82b6b1cc6d2ed20037d3b89c25a84f499bd7b3dd4"}, - {file = "multitasking-0.0.11.tar.gz", hash = "sha256:4d6bc3cc65f9b2dca72fb5a787850a88dae8f620c2b36ae9b55248e51bcd6026"}, -] - [[package]] name = "mypy-extensions" version = "1.0.0" @@ -5304,32 +4355,6 @@ files = [ {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"}, ] -[[package]] -name = "newspaper3k" -version = "0.2.8" -description = "Simplified python article discovery & extraction." -optional = false -python-versions = "*" -files = [ - {file = "newspaper3k-0.2.8-py3-none-any.whl", hash = "sha256:44a864222633d3081113d1030615991c3dbba87239f6bbf59d91240f71a22e3e"}, - {file = "newspaper3k-0.2.8.tar.gz", hash = "sha256:9f1bd3e1fb48f400c715abf875cc7b0a67b7ddcd87f50c9aeeb8fcbbbd9004fb"}, -] - -[package.dependencies] -beautifulsoup4 = ">=4.4.1" -cssselect = ">=0.9.2" -feedfinder2 = ">=0.0.4" -feedparser = ">=5.2.1" -jieba3k = ">=0.35.1" -lxml = ">=3.6.0" -nltk = ">=3.2.1" -Pillow = ">=3.3.0" -python-dateutil = ">=2.5.3" -PyYAML = ">=3.11" -requests = ">=2.10.0" -tinysegmenter = "0.3" -tldextract = ">=2.0.1" - [[package]] name = "nltk" version = "3.8.1" @@ -5385,22 +4410,6 @@ aws = ["boto3", "sagemaker"] dev = ["black (==24.3.0)", "cairosvg", "coverage", "isort", "mkautodoc", "mkdocs-jupyter", "mkdocs-material", "mkdocstrings[python]", "myst-parser", "nomic[all]", "pandas", "pillow", "pylint", "pyright", "pytest", "pytorch-lightning", "twine"] local = ["gpt4all (>=2.5.0,<3)"] -[[package]] -name = "novita-client" -version = "0.5.7" -description = "novita SDK for Python" -optional = false -python-versions = ">=3.6" -files = [ - {file = "novita_client-0.5.7-py3-none-any.whl", hash = "sha256:844a4c09c98328c8d4f72e1d3f63f76285c2963dcc37ccb2de41cbfdbe7fa51d"}, - {file = "novita_client-0.5.7.tar.gz", hash = "sha256:65baf748757aafd8ab080a64f9ab069a40c0810fc1fa9be9c26596988a0aa4b4"}, -] - -[package.dependencies] -dataclass-wizard = ">=0.22.2" -pillow = ">=10.2.0" -requests = ">=2.27.1" - [[package]] name = "numba" version = "0.60.0" @@ -5437,44 +4446,44 @@ numpy = ">=1.22,<2.1" [[package]] name = "numexpr" -version = "2.9.0" +version = "2.10.1" description = "Fast numerical expression evaluator for NumPy" optional = false python-versions = ">=3.9" files = [ - {file = "numexpr-2.9.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c52b4ac54514f5d4d8ead66768810cd5f77aa198e6064213d9b5c7b2e1c97c35"}, - {file = "numexpr-2.9.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:50f57bc333f285e8c46b1ce61c6e94ec9bb74e4ea0d674d1c6c6f4a286f64fe4"}, - {file = "numexpr-2.9.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:943ba141f3884ffafa3fa1a3ebf3cdda9e9688a67a3c91986e6eae13dc073d43"}, - {file = "numexpr-2.9.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ee48acd6339748a65c0e32403b802ebfadd9cb0e3b602ba5889896238eafdd61"}, - {file = "numexpr-2.9.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:972e29b5cecc21466c5b177e38568372ab66aab1f053ae04690a49cea09e747d"}, - {file = "numexpr-2.9.0-cp310-cp310-win32.whl", hash = "sha256:520e55d75bd99c76e376b6326e35ecf44c5ce2635a5caed72799a3885fc49173"}, - {file = "numexpr-2.9.0-cp310-cp310-win_amd64.whl", hash = "sha256:5615497c3f34b637fda9b571f7774b6a82f2367cc1364b7a4573068dd1aabcaa"}, - {file = "numexpr-2.9.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:bffcbc55dea5a5f5255e2586da08f00929998820e6592ee717273a08ad021eb3"}, - {file = "numexpr-2.9.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:374dc6ca54b2af813cb15c2b34e85092dfeac1f73d51ec358dd81876bd9adcec"}, - {file = "numexpr-2.9.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:549afc1622296cca3478a132c6e0fb5e55a19e08d32bc0d5a415434824a9c157"}, - {file = "numexpr-2.9.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7c618a5895e34db0a364dcdb9960084c080f93f9d377c45b1ca9c394c24b4e77"}, - {file = "numexpr-2.9.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:37a7dd36fd79a2b69c3fd2bc2b51ac8270bebc69cc96e6d78f1148e147fcbfa8"}, - {file = "numexpr-2.9.0-cp311-cp311-win32.whl", hash = "sha256:00dab81d49239ea5423861ad627097b44d10d802df5f883d1b00f742139c3349"}, - {file = "numexpr-2.9.0-cp311-cp311-win_amd64.whl", hash = "sha256:0e2574cafb18373774f351cac45ed23b5b360d9ecd1dbf3c12dac6d6eefefc87"}, - {file = "numexpr-2.9.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:9761195526a228e05eba400b8c484c94bbabfea853b9ea35ab8fa1bf415331b1"}, - {file = "numexpr-2.9.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0f619e91034b346ea85a4e1856ff06011dcb7dce10a60eda75e74db90120f880"}, - {file = "numexpr-2.9.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2749bce1c48706d58894992634a43b8458c4ba9411191471c4565fa41e9979ec"}, - {file = "numexpr-2.9.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e1c31f621a625c7be602f92b027d90f2d3d60dcbc19b106e77fb04a4362152af"}, - {file = "numexpr-2.9.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:1a78b937861d13de67d440d54c85a835faed7572be5a6fd10d4f3bd4e66e157f"}, - {file = "numexpr-2.9.0-cp312-cp312-win32.whl", hash = "sha256:aa6298fb46bd7ec69911b5b80927a00663d066e719b29f48eb952d559bdd8371"}, - {file = "numexpr-2.9.0-cp312-cp312-win_amd64.whl", hash = "sha256:8efd879839572bde5a38a1aa3ac23fd4dd9b956fb969bc5e43d1c403419e1e8c"}, - {file = "numexpr-2.9.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b04f12a6130094a251e3a8fff40130589c1c83be6d4eb223873bea14d8c8b630"}, - {file = "numexpr-2.9.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:977537f2a1cc843f888fb5f0507626f956ada674e4b3847168214a3f3c7446fa"}, - {file = "numexpr-2.9.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6eae6c0c2d5682c02e8ac9c4287c2232c2443c9148b239df22500eaa3c5d73b7"}, - {file = "numexpr-2.9.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1fae6828042b70c2f52a132bfcb9139da704274ed11b982fbf537f91c075d2ef"}, - {file = "numexpr-2.9.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:7c77392aea53f0700d60eb270ad63174b4ff10b04f8de92861101ca2129fee51"}, - {file = "numexpr-2.9.0-cp39-cp39-win32.whl", hash = "sha256:3b03a6cf37a72f5b52f2b962d7ac7f565bea8eaba83c3c4e5fcf8fbb6a938153"}, - {file = "numexpr-2.9.0-cp39-cp39-win_amd64.whl", hash = "sha256:d655b6eacc4e81006b662cba014e4615a9ddd96881b8b4db4ad0d7f6d38069af"}, - {file = "numexpr-2.9.0.tar.gz", hash = "sha256:f21d12f6c432ce349089eb95342babf6629aebb3fddf187a4492d3aadaadaaf0"}, -] - -[package.dependencies] -numpy = ">=1.13.3" + {file = "numexpr-2.10.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:bbd35f17f6efc00ebd4a480192af1ee30996094a0d5343b131b0e90e61e8b554"}, + {file = "numexpr-2.10.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fecdf4bf3c1250e56583db0a4a80382a259ba4c2e1efa13e04ed43f0938071f5"}, + {file = "numexpr-2.10.1-cp310-cp310-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b2efa499f460124538a5b4f1bf2e77b28eb443ee244cc5573ed0f6a069ebc635"}, + {file = "numexpr-2.10.1-cp310-cp310-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ac23a72eff10f928f23b147bdeb0f1b774e862abe332fc9bf4837e9f1bc0bbf9"}, + {file = "numexpr-2.10.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:b28eaf45f1cc1048aad9e90e3a8ada1aef58c5f8155a85267dc781b37998c046"}, + {file = "numexpr-2.10.1-cp310-cp310-win32.whl", hash = "sha256:4f0985bd1c493b23b5aad7d81fa174798f3812efb78d14844194834c9fee38b8"}, + {file = "numexpr-2.10.1-cp310-cp310-win_amd64.whl", hash = "sha256:44f6d12a8c44be90199bbb10d3abf467f88951f48a3d1fbbd3c219d121f39c9d"}, + {file = "numexpr-2.10.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a3c0b0bf165b2d886eb981afa4e77873ca076f5d51c491c4d7b8fc10f17c876f"}, + {file = "numexpr-2.10.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:56648a04679063175681195670ad53e5c8ca19668166ed13875199b5600089c7"}, + {file = "numexpr-2.10.1-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ce04ae6efe2a9d0be1a0e114115c3ae70c68b8b8fbc615c5c55c15704b01e6a4"}, + {file = "numexpr-2.10.1-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:45f598182b4f5c153222e47d5163c3bee8d5ebcaee7e56dd2a5898d4d97e4473"}, + {file = "numexpr-2.10.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:6a50370bea77ba94c3734a44781c716751354c6bfda2d369af3aed3d67d42871"}, + {file = "numexpr-2.10.1-cp311-cp311-win32.whl", hash = "sha256:fa4009d84a8e6e21790e718a80a22d57fe7f215283576ef2adc4183f7247f3c7"}, + {file = "numexpr-2.10.1-cp311-cp311-win_amd64.whl", hash = "sha256:fcbf013bb8494e8ef1d11fa3457827c1571c6a3153982d709e5d17594999d4dd"}, + {file = "numexpr-2.10.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:82fc95c301b15ff4823f98989ee363a2d5555d16a7cfd3710e98ddee726eaaaa"}, + {file = "numexpr-2.10.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:cbf79fef834f88607f977ab9867061dcd9b40ccb08bb28547c6dc6c73e560895"}, + {file = "numexpr-2.10.1-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:552c8d4b2e3b87cdb2abb40a781b9a61a9090a9f66ac7357fc5a0b93aff76be3"}, + {file = "numexpr-2.10.1-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:22cc65e9121aeb3187a2b50827715b2b087ea70e8ab21416ea52662322087b43"}, + {file = "numexpr-2.10.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:00204e5853713b5eba5f3d0bc586a5d8d07f76011b597c8b4087592cc2ec2928"}, + {file = "numexpr-2.10.1-cp312-cp312-win32.whl", hash = "sha256:82bf04a1495ac475de4ab49fbe0a3a2710ed3fd1a00bc03847316b5d7602402d"}, + {file = "numexpr-2.10.1-cp312-cp312-win_amd64.whl", hash = "sha256:300e577b3c006dd7a8270f1bb2e8a00ee15bf235b1650fe2a6febec2954bc2c3"}, + {file = "numexpr-2.10.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:fb704620657a1c99d64933e8a982148d8bfb2b738a1943e107a2bfdee887ce56"}, + {file = "numexpr-2.10.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:368a1972c3186355160f6ee330a7eea146d8443da75a38a30083289ae251ef5a"}, + {file = "numexpr-2.10.1-cp39-cp39-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ca8ae46481d0b0689ca0d00a8670bc464ce375e349599fe674a6d4957e7b7eb6"}, + {file = "numexpr-2.10.1-cp39-cp39-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5a4db4456e0779d5e024220b7b6a7477ac900679bfa74836b06fa526aaed4e3c"}, + {file = "numexpr-2.10.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:926dd426c68f1d927412a2ad843831c1eb9a95871e7bb0bd8b20d547c12238d2"}, + {file = "numexpr-2.10.1-cp39-cp39-win32.whl", hash = "sha256:37598cca41f8f50dc889b0b72be1616a288758c16ab7d48c9ac8719e1a39d835"}, + {file = "numexpr-2.10.1-cp39-cp39-win_amd64.whl", hash = "sha256:78b14c19c403df7498954468385768c86b0d2c52ad03dffb74e45d44ae5a9c77"}, + {file = "numexpr-2.10.1.tar.gz", hash = "sha256:9bba99d354a65f1a008ab8b87f07d84404c668e66bab624df5b6b5373403cf81"}, +] + +[package.dependencies] +numpy = ">=1.23.0" [[package]] name = "numpy" @@ -5539,13 +4548,13 @@ signedtoken = ["cryptography (>=3.0.0)", "pyjwt (>=2.0.0,<3)"] [[package]] name = "oci" -version = "2.133.0" +version = "2.135.0" description = "Oracle Cloud Infrastructure Python SDK" optional = false python-versions = "*" files = [ - {file = "oci-2.133.0-py3-none-any.whl", hash = "sha256:9706365481ca538c89b3a15e6b5c246801eccb06be831a7f21c40f2a2ee310a7"}, - {file = "oci-2.133.0.tar.gz", hash = "sha256:800418025bb98f587c65bbf89c6b6d61ef0f2249e0698d73439baf3251640b7f"}, + {file = "oci-2.135.0-py3-none-any.whl", hash = "sha256:c01f1d103ed034fa7ca2bceb297bf00e6f6c456d14a46b35ee9007b25f3ea397"}, + {file = "oci-2.135.0.tar.gz", hash = "sha256:6e28e6595264705d8fd0719045ffc4b23170e7fd2cd76a1c3aa25e4cdaa5883a"}, ] [package.dependencies] @@ -5648,65 +4657,6 @@ typing-extensions = ">=4.7,<5" [package.extras] datalib = ["numpy (>=1)", "pandas (>=1.2.3)", "pandas-stubs (>=1.1.0.11)"] -[[package]] -name = "opencensus" -version = "0.11.4" -description = "A stats collection and distributed tracing framework" -optional = false -python-versions = "*" -files = [ - {file = "opencensus-0.11.4-py2.py3-none-any.whl", hash = "sha256:a18487ce68bc19900336e0ff4655c5a116daf10c1b3685ece8d971bddad6a864"}, - {file = "opencensus-0.11.4.tar.gz", hash = "sha256:cbef87d8b8773064ab60e5c2a1ced58bbaa38a6d052c41aec224958ce544eff2"}, -] - -[package.dependencies] -google-api-core = {version = ">=1.0.0,<3.0.0", markers = "python_version >= \"3.6\""} -opencensus-context = ">=0.1.3" -six = ">=1.16,<2.0" - -[[package]] -name = "opencensus-context" -version = "0.1.3" -description = "OpenCensus Runtime Context" -optional = false -python-versions = "*" -files = [ - {file = "opencensus-context-0.1.3.tar.gz", hash = "sha256:a03108c3c10d8c80bb5ddf5c8a1f033161fa61972a9917f9b9b3a18517f0088c"}, - {file = "opencensus_context-0.1.3-py2.py3-none-any.whl", hash = "sha256:073bb0590007af276853009fac7e4bab1d523c3f03baf4cb4511ca38967c6039"}, -] - -[[package]] -name = "opencensus-ext-azure" -version = "1.1.13" -description = "OpenCensus Azure Monitor Exporter" -optional = false -python-versions = "*" -files = [ - {file = "opencensus-ext-azure-1.1.13.tar.gz", hash = "sha256:aec30472177005379ba56a702a097d618c5f57558e1bb6676ec75f948130692a"}, - {file = "opencensus_ext_azure-1.1.13-py2.py3-none-any.whl", hash = "sha256:06001fac6f8588ba00726a3a7c6c7f2fc88bc8ad12a65afdca657923085393dd"}, -] - -[package.dependencies] -azure-core = ">=1.12.0,<2.0.0" -azure-identity = ">=1.5.0,<2.0.0" -opencensus = ">=0.11.4,<1.0.0" -psutil = ">=5.6.3" -requests = ">=2.19.0" - -[[package]] -name = "opencensus-ext-logging" -version = "0.1.1" -description = "OpenCensus logging Integration" -optional = false -python-versions = "*" -files = [ - {file = "opencensus-ext-logging-0.1.1.tar.gz", hash = "sha256:c203b70f034151dada529f543af330ba17aaffec27d8a5267d03c713eb1de334"}, - {file = "opencensus_ext_logging-0.1.1-py2.py3-none-any.whl", hash = "sha256:cfdaf5da5d8b195ff3d1af87a4066a6621a28046173f6be4b0b6caec4a3ca89f"}, -] - -[package.dependencies] -opencensus = ">=0.8.0,<1.0.0" - [[package]] name = "openpyxl" version = "3.1.5" @@ -5721,30 +4671,6 @@ files = [ [package.dependencies] et-xmlfile = "*" -[[package]] -name = "opensearch-py" -version = "2.4.0" -description = "Python client for OpenSearch" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, <4" -files = [ - {file = "opensearch-py-2.4.0.tar.gz", hash = "sha256:7eba2b6ed2ddcf33225bfebfba2aee026877838cc39f760ec80f27827308cc4b"}, - {file = "opensearch_py-2.4.0-py2.py3-none-any.whl", hash = "sha256:316077235437c8ceac970232261f3393c65fb92a80f33c5b106f50f1dab24fd9"}, -] - -[package.dependencies] -certifi = ">=2022.12.07" -python-dateutil = "*" -requests = ">=2.4.0,<3.0.0" -six = "*" -urllib3 = ">=1.26.18" - -[package.extras] -async = ["aiohttp (>=3,<4)"] -develop = ["black", "botocore", "coverage (<8.0.0)", "jinja2", "mock", "myst-parser", "pytest (>=3.0.0)", "pytest-cov", "pytest-mock (<4.0.0)", "pytz", "pyyaml", "requests (>=2.0.0,<3.0.0)", "sphinx", "sphinx-copybutton", "sphinx-rtd-theme"] -docs = ["aiohttp (>=3,<4)", "myst-parser", "sphinx", "sphinx-copybutton", "sphinx-rtd-theme"] -kerberos = ["requests-kerberos"] - [[package]] name = "opentelemetry-api" version = "1.27.0" @@ -6059,40 +4985,53 @@ files = [ [[package]] name = "pandas" -version = "2.2.2" +version = "2.2.3" description = "Powerful data structures for data analysis, time series, and statistics" optional = false python-versions = ">=3.9" files = [ - {file = "pandas-2.2.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:90c6fca2acf139569e74e8781709dccb6fe25940488755716d1d354d6bc58bce"}, - {file = "pandas-2.2.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c7adfc142dac335d8c1e0dcbd37eb8617eac386596eb9e1a1b77791cf2498238"}, - {file = "pandas-2.2.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4abfe0be0d7221be4f12552995e58723c7422c80a659da13ca382697de830c08"}, - {file = "pandas-2.2.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8635c16bf3d99040fdf3ca3db669a7250ddf49c55dc4aa8fe0ae0fa8d6dcc1f0"}, - {file = "pandas-2.2.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:40ae1dffb3967a52203105a077415a86044a2bea011b5f321c6aa64b379a3f51"}, - {file = "pandas-2.2.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8e5a0b00e1e56a842f922e7fae8ae4077aee4af0acb5ae3622bd4b4c30aedf99"}, - {file = "pandas-2.2.2-cp310-cp310-win_amd64.whl", hash = "sha256:ddf818e4e6c7c6f4f7c8a12709696d193976b591cc7dc50588d3d1a6b5dc8772"}, - {file = "pandas-2.2.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:696039430f7a562b74fa45f540aca068ea85fa34c244d0deee539cb6d70aa288"}, - {file = "pandas-2.2.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:8e90497254aacacbc4ea6ae5e7a8cd75629d6ad2b30025a4a8b09aa4faf55151"}, - {file = "pandas-2.2.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:58b84b91b0b9f4bafac2a0ac55002280c094dfc6402402332c0913a59654ab2b"}, - {file = "pandas-2.2.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6d2123dc9ad6a814bcdea0f099885276b31b24f7edf40f6cdbc0912672e22eee"}, - {file = "pandas-2.2.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:2925720037f06e89af896c70bca73459d7e6a4be96f9de79e2d440bd499fe0db"}, - {file = "pandas-2.2.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:0cace394b6ea70c01ca1595f839cf193df35d1575986e484ad35c4aeae7266c1"}, - {file = "pandas-2.2.2-cp311-cp311-win_amd64.whl", hash = "sha256:873d13d177501a28b2756375d59816c365e42ed8417b41665f346289adc68d24"}, - {file = "pandas-2.2.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:9dfde2a0ddef507a631dc9dc4af6a9489d5e2e740e226ad426a05cabfbd7c8ef"}, - {file = "pandas-2.2.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:e9b79011ff7a0f4b1d6da6a61aa1aa604fb312d6647de5bad20013682d1429ce"}, - {file = "pandas-2.2.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1cb51fe389360f3b5a4d57dbd2848a5f033350336ca3b340d1c53a1fad33bcad"}, - {file = "pandas-2.2.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eee3a87076c0756de40b05c5e9a6069c035ba43e8dd71c379e68cab2c20f16ad"}, - {file = "pandas-2.2.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:3e374f59e440d4ab45ca2fffde54b81ac3834cf5ae2cdfa69c90bc03bde04d76"}, - {file = "pandas-2.2.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:43498c0bdb43d55cb162cdc8c06fac328ccb5d2eabe3cadeb3529ae6f0517c32"}, - {file = "pandas-2.2.2-cp312-cp312-win_amd64.whl", hash = "sha256:d187d355ecec3629624fccb01d104da7d7f391db0311145817525281e2804d23"}, - {file = "pandas-2.2.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:0ca6377b8fca51815f382bd0b697a0814c8bda55115678cbc94c30aacbb6eff2"}, - {file = "pandas-2.2.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:9057e6aa78a584bc93a13f0a9bf7e753a5e9770a30b4d758b8d5f2a62a9433cd"}, - {file = "pandas-2.2.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:001910ad31abc7bf06f49dcc903755d2f7f3a9186c0c040b827e522e9cef0863"}, - {file = "pandas-2.2.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:66b479b0bd07204e37583c191535505410daa8df638fd8e75ae1b383851fe921"}, - {file = "pandas-2.2.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:a77e9d1c386196879aa5eb712e77461aaee433e54c68cf253053a73b7e49c33a"}, - {file = "pandas-2.2.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:92fd6b027924a7e178ac202cfbe25e53368db90d56872d20ffae94b96c7acc57"}, - {file = "pandas-2.2.2-cp39-cp39-win_amd64.whl", hash = "sha256:640cef9aa381b60e296db324337a554aeeb883ead99dc8f6c18e81a93942f5f4"}, - {file = "pandas-2.2.2.tar.gz", hash = "sha256:9e79019aba43cb4fda9e4d983f8e88ca0373adbb697ae9c6c43093218de28b54"}, + {file = "pandas-2.2.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:1948ddde24197a0f7add2bdc4ca83bf2b1ef84a1bc8ccffd95eda17fd836ecb5"}, + {file = "pandas-2.2.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:381175499d3802cde0eabbaf6324cce0c4f5d52ca6f8c377c29ad442f50f6348"}, + {file = "pandas-2.2.3-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:d9c45366def9a3dd85a6454c0e7908f2b3b8e9c138f5dc38fed7ce720d8453ed"}, + {file = "pandas-2.2.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:86976a1c5b25ae3f8ccae3a5306e443569ee3c3faf444dfd0f41cda24667ad57"}, + {file = "pandas-2.2.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:b8661b0238a69d7aafe156b7fa86c44b881387509653fdf857bebc5e4008ad42"}, + {file = "pandas-2.2.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:37e0aced3e8f539eccf2e099f65cdb9c8aa85109b0be6e93e2baff94264bdc6f"}, + {file = "pandas-2.2.3-cp310-cp310-win_amd64.whl", hash = "sha256:56534ce0746a58afaf7942ba4863e0ef81c9c50d3f0ae93e9497d6a41a057645"}, + {file = "pandas-2.2.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:66108071e1b935240e74525006034333f98bcdb87ea116de573a6a0dccb6c039"}, + {file = "pandas-2.2.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7c2875855b0ff77b2a64a0365e24455d9990730d6431b9e0ee18ad8acee13dbd"}, + {file = "pandas-2.2.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:cd8d0c3be0515c12fed0bdbae072551c8b54b7192c7b1fda0ba56059a0179698"}, + {file = "pandas-2.2.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c124333816c3a9b03fbeef3a9f230ba9a737e9e5bb4060aa2107a86cc0a497fc"}, + {file = "pandas-2.2.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:63cc132e40a2e084cf01adf0775b15ac515ba905d7dcca47e9a251819c575ef3"}, + {file = "pandas-2.2.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:29401dbfa9ad77319367d36940cd8a0b3a11aba16063e39632d98b0e931ddf32"}, + {file = "pandas-2.2.3-cp311-cp311-win_amd64.whl", hash = "sha256:3fc6873a41186404dad67245896a6e440baacc92f5b716ccd1bc9ed2995ab2c5"}, + {file = "pandas-2.2.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:b1d432e8d08679a40e2a6d8b2f9770a5c21793a6f9f47fdd52c5ce1948a5a8a9"}, + {file = "pandas-2.2.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a5a1595fe639f5988ba6a8e5bc9649af3baf26df3998a0abe56c02609392e0a4"}, + {file = "pandas-2.2.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:5de54125a92bb4d1c051c0659e6fcb75256bf799a732a87184e5ea503965bce3"}, + {file = "pandas-2.2.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fffb8ae78d8af97f849404f21411c95062db1496aeb3e56f146f0355c9989319"}, + {file = "pandas-2.2.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6dfcb5ee8d4d50c06a51c2fffa6cff6272098ad6540aed1a76d15fb9318194d8"}, + {file = "pandas-2.2.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:062309c1b9ea12a50e8ce661145c6aab431b1e99530d3cd60640e255778bd43a"}, + {file = "pandas-2.2.3-cp312-cp312-win_amd64.whl", hash = "sha256:59ef3764d0fe818125a5097d2ae867ca3fa64df032331b7e0917cf5d7bf66b13"}, + {file = "pandas-2.2.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f00d1345d84d8c86a63e476bb4955e46458b304b9575dcf71102b5c705320015"}, + {file = "pandas-2.2.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:3508d914817e153ad359d7e069d752cdd736a247c322d932eb89e6bc84217f28"}, + {file = "pandas-2.2.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:22a9d949bfc9a502d320aa04e5d02feab689d61da4e7764b62c30b991c42c5f0"}, + {file = "pandas-2.2.3-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f3a255b2c19987fbbe62a9dfd6cff7ff2aa9ccab3fc75218fd4b7530f01efa24"}, + {file = "pandas-2.2.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:800250ecdadb6d9c78eae4990da62743b857b470883fa27f652db8bdde7f6659"}, + {file = "pandas-2.2.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:6374c452ff3ec675a8f46fd9ab25c4ad0ba590b71cf0656f8b6daa5202bca3fb"}, + {file = "pandas-2.2.3-cp313-cp313-win_amd64.whl", hash = "sha256:61c5ad4043f791b61dd4752191d9f07f0ae412515d59ba8f005832a532f8736d"}, + {file = "pandas-2.2.3-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:3b71f27954685ee685317063bf13c7709a7ba74fc996b84fc6821c59b0f06468"}, + {file = "pandas-2.2.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:38cf8125c40dae9d5acc10fa66af8ea6fdf760b2714ee482ca691fc66e6fcb18"}, + {file = "pandas-2.2.3-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:ba96630bc17c875161df3818780af30e43be9b166ce51c9a18c1feae342906c2"}, + {file = "pandas-2.2.3-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1db71525a1538b30142094edb9adc10be3f3e176748cd7acc2240c2f2e5aa3a4"}, + {file = "pandas-2.2.3-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:15c0e1e02e93116177d29ff83e8b1619c93ddc9c49083f237d4312337a61165d"}, + {file = "pandas-2.2.3-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:ad5b65698ab28ed8d7f18790a0dc58005c7629f227be9ecc1072aa74c0c1d43a"}, + {file = "pandas-2.2.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:bc6b93f9b966093cb0fd62ff1a7e4c09e6d546ad7c1de191767baffc57628f39"}, + {file = "pandas-2.2.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:5dbca4c1acd72e8eeef4753eeca07de9b1db4f398669d5994086f788a5d7cc30"}, + {file = "pandas-2.2.3-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:8cd6d7cc958a3910f934ea8dbdf17b2364827bb4dafc38ce6eef6bb3d65ff09c"}, + {file = "pandas-2.2.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:99df71520d25fade9db7c1076ac94eb994f4d2673ef2aa2e86ee039b6746d20c"}, + {file = "pandas-2.2.3-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:31d0ced62d4ea3e231a9f228366919a5ea0b07440d9d4dac345376fd8e1477ea"}, + {file = "pandas-2.2.3-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:7eee9e7cea6adf3e3d24e304ac6b8300646e2a5d1cd3a3c2abed9101b0846761"}, + {file = "pandas-2.2.3-cp39-cp39-win_amd64.whl", hash = "sha256:4850ba03528b6dd51d6c5d273c46f183f39a9baf3f0143e566b89450965b105e"}, + {file = "pandas-2.2.3.tar.gz", hash = "sha256:4f18ba62b61d7e192368b84517265a99b4d7ee8912f8708660fb4a366cc82667"}, ] [package.dependencies] @@ -6156,16 +5095,6 @@ multiprocess = ">=0.70.16" pox = ">=0.3.4" ppft = ">=1.7.6.8" -[[package]] -name = "peewee" -version = "3.17.6" -description = "a little orm" -optional = false -python-versions = "*" -files = [ - {file = "peewee-3.17.6.tar.gz", hash = "sha256:cea5592c6f4da1592b7cff8eaf655be6648a1f5857469e30037bf920c03fb8fb"}, -] - [[package]] name = "pgvecto-rs" version = "0.2.1" @@ -6300,13 +5229,13 @@ xmp = ["defusedxml"] [[package]] name = "platformdirs" -version = "4.3.3" +version = "4.3.6" description = "A small Python package for determining appropriate platform-specific dirs, e.g. a `user data dir`." optional = false python-versions = ">=3.8" files = [ - {file = "platformdirs-4.3.3-py3-none-any.whl", hash = "sha256:50a5450e2e84f44539718293cbb1da0a0885c9d14adf21b77bae4e66fc99d9b5"}, - {file = "platformdirs-4.3.3.tar.gz", hash = "sha256:d4e0b7d8ec176b341fb03cb11ca12d0276faa8c485f9cd218f613840463fc2c0"}, + {file = "platformdirs-4.3.6-py3-none-any.whl", hash = "sha256:73e575e1408ab8103900836b97580d5307456908a03e92031bab39e4554cc3fb"}, + {file = "platformdirs-4.3.6.tar.gz", hash = "sha256:357fb2acbc885b0419afd3ce3ed34564c13c9b95c89360cd9563f73aa5e2b907"}, ] [package.extras] @@ -6314,21 +5243,6 @@ docs = ["furo (>=2024.8.6)", "proselint (>=0.14)", "sphinx (>=8.0.2)", "sphinx-a test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=8.3.2)", "pytest-cov (>=5)", "pytest-mock (>=3.14)"] type = ["mypy (>=1.11.2)"] -[[package]] -name = "plotly" -version = "5.24.1" -description = "An open-source, interactive data visualization library for Python" -optional = false -python-versions = ">=3.8" -files = [ - {file = "plotly-5.24.1-py3-none-any.whl", hash = "sha256:f67073a1e637eb0dc3e46324d9d51e2fe76e9727c892dde64ddf1e1b51f29089"}, - {file = "plotly-5.24.1.tar.gz", hash = "sha256:dbc8ac8339d248a4bcc36e08a5659bacfe1b079390b8953533f4eb22169b4bae"}, -] - -[package.dependencies] -packaging = "*" -tenacity = ">=6.2.0" - [[package]] name = "pluggy" version = "1.5.0" @@ -6376,13 +5290,13 @@ tests = ["pytest (>=5.4.1)", "pytest-cov (>=2.8.1)", "pytest-mypy (>=0.8.0)", "p [[package]] name = "posthog" -version = "3.6.5" +version = "3.6.6" description = "Integrate PostHog into any python application." optional = false python-versions = "*" files = [ - {file = "posthog-3.6.5-py2.py3-none-any.whl", hash = "sha256:f8b7c573826b061a1d22c9495169c38ebe83a1df2729f49c7129a9c23a02acf6"}, - {file = "posthog-3.6.5.tar.gz", hash = "sha256:7fd3ca809e15476c35f75d18cd6bba31395daf0a17b75242965c469fb6292510"}, + {file = "posthog-3.6.6-py2.py3-none-any.whl", hash = "sha256:38834fd7f0732582a20d4eb4674c8d5c088e464d14d1b3f8c176e389aecaa4ef"}, + {file = "posthog-3.6.6.tar.gz", hash = "sha256:1e04783293117109189ad7048f3eedbe21caff0e39bee5e2d47a93dd790fefac"}, ] [package.dependencies] @@ -6399,58 +5313,38 @@ test = ["coverage", "django", "flake8", "freezegun (==0.3.15)", "mock (>=2.0.0)" [[package]] name = "pox" -version = "0.3.4" +version = "0.3.5" description = "utilities for filesystem exploration and automated builds" optional = false python-versions = ">=3.8" files = [ - {file = "pox-0.3.4-py3-none-any.whl", hash = "sha256:651b8ae8a7b341b7bfd267f67f63106daeb9805f1ac11f323d5280d2da93fdb6"}, - {file = "pox-0.3.4.tar.gz", hash = "sha256:16e6eca84f1bec3828210b06b052adf04cf2ab20c22fd6fbef5f78320c9a6fed"}, + {file = "pox-0.3.5-py3-none-any.whl", hash = "sha256:9e82bcc9e578b43e80a99cad80f0d8f44f4d424f0ee4ee8d4db27260a6aa365a"}, + {file = "pox-0.3.5.tar.gz", hash = "sha256:8120ee4c94e950e6e0483e050a4f0e56076e590ba0a9add19524c254bd23c2d1"}, ] [[package]] name = "ppft" -version = "1.7.6.8" +version = "1.7.6.9" description = "distributed and parallel Python" optional = false python-versions = ">=3.8" files = [ - {file = "ppft-1.7.6.8-py3-none-any.whl", hash = "sha256:de2dd4b1b080923dd9627fbdea52649fd741c752fce4f3cf37e26f785df23d9b"}, - {file = "ppft-1.7.6.8.tar.gz", hash = "sha256:76a429a7d7b74c4d743f6dba8351e58d62b6432ed65df9fe204790160dab996d"}, -] - -[package.extras] -dill = ["dill (>=0.3.8)"] - -[[package]] -name = "primp" -version = "0.6.2" -description = "HTTP client that can impersonate web browsers, mimicking their headers and `TLS/JA3/JA4/HTTP2` fingerprints" -optional = false -python-versions = ">=3.8" -files = [ - {file = "primp-0.6.2-cp38-abi3-macosx_10_12_x86_64.whl", hash = "sha256:4a35d441462a55d9a9525bf170e2ffd2fcb3db6039b23e802859fa22c18cdd51"}, - {file = "primp-0.6.2-cp38-abi3-macosx_11_0_arm64.whl", hash = "sha256:f67ccade95bdbca3cf9b96b93aa53f9617d85ddbf988da4e9c523aa785fd2d54"}, - {file = "primp-0.6.2-cp38-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8074b93befaf36567e4cf3d4a1a8cd6ab9cc6e4dd4ff710650678daa405aee71"}, - {file = "primp-0.6.2-cp38-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:7d3e2a3f8c6262e9b883651b79c4ff2b7677a76f47293a139f541c9ea333ce3b"}, - {file = "primp-0.6.2-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:a460ea389371c6d04839b4b50b5805d99da8ebe281a2e8b534d27377c6d44f0e"}, - {file = "primp-0.6.2-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:5b6b27e89d3c05c811aff0e4fde7a36d6957b15b3112f4ce28b6b99e8ca1e725"}, - {file = "primp-0.6.2-cp38-abi3-win_amd64.whl", hash = "sha256:1006a40a85f88a4c5222094813a1ebc01f85a63e9a33d2c443288c0720bed321"}, - {file = "primp-0.6.2.tar.gz", hash = "sha256:5a96a6b65195a8a989157e67d23bd171c49be238654e02bdf1b1fda36cbcc068"}, + {file = "ppft-1.7.6.9-py3-none-any.whl", hash = "sha256:dab36548db5ca3055067fbe6b1a17db5fee29f3c366c579a9a27cebb52ed96f0"}, + {file = "ppft-1.7.6.9.tar.gz", hash = "sha256:73161c67474ea9d81d04bcdad166d399cff3f084d5d2dc21ebdd46c075bbc265"}, ] [package.extras] -dev = ["certifi", "pytest (>=8.1.1)"] +dill = ["dill (>=0.3.9)"] [[package]] name = "prompt-toolkit" -version = "3.0.47" +version = "3.0.48" description = "Library for building powerful interactive command lines in Python" optional = false python-versions = ">=3.7.0" files = [ - {file = "prompt_toolkit-3.0.47-py3-none-any.whl", hash = "sha256:0d7bfa67001d5e39d02c224b663abc33687405033a8c422d0d675a5a13361d10"}, - {file = "prompt_toolkit-3.0.47.tar.gz", hash = "sha256:1e1b29cb58080b1e69f207c893a1a7bf16d127a5c30c9d17a25a5d77792e5360"}, + {file = "prompt_toolkit-3.0.48-py3-none-any.whl", hash = "sha256:f49a827f90062e411f1ce1f854f2aedb3c23353244f8108b89283587397ac10e"}, + {file = "prompt_toolkit-3.0.48.tar.gz", hash = "sha256:d6623ab0477a80df74e646bdbc93621143f5caf104206aa29294d53de1a03d90"}, ] [package.dependencies] @@ -6475,22 +5369,22 @@ testing = ["google-api-core (>=1.31.5)"] [[package]] name = "protobuf" -version = "4.25.4" +version = "4.25.5" description = "" optional = false python-versions = ">=3.8" files = [ - {file = "protobuf-4.25.4-cp310-abi3-win32.whl", hash = "sha256:db9fd45183e1a67722cafa5c1da3e85c6492a5383f127c86c4c4aa4845867dc4"}, - {file = "protobuf-4.25.4-cp310-abi3-win_amd64.whl", hash = "sha256:ba3d8504116a921af46499471c63a85260c1a5fc23333154a427a310e015d26d"}, - {file = "protobuf-4.25.4-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:eecd41bfc0e4b1bd3fa7909ed93dd14dd5567b98c941d6c1ad08fdcab3d6884b"}, - {file = "protobuf-4.25.4-cp37-abi3-manylinux2014_aarch64.whl", hash = "sha256:4c8a70fdcb995dcf6c8966cfa3a29101916f7225e9afe3ced4395359955d3835"}, - {file = "protobuf-4.25.4-cp37-abi3-manylinux2014_x86_64.whl", hash = "sha256:3319e073562e2515c6ddc643eb92ce20809f5d8f10fead3332f71c63be6a7040"}, - {file = "protobuf-4.25.4-cp38-cp38-win32.whl", hash = "sha256:7e372cbbda66a63ebca18f8ffaa6948455dfecc4e9c1029312f6c2edcd86c4e1"}, - {file = "protobuf-4.25.4-cp38-cp38-win_amd64.whl", hash = "sha256:051e97ce9fa6067a4546e75cb14f90cf0232dcb3e3d508c448b8d0e4265b61c1"}, - {file = "protobuf-4.25.4-cp39-cp39-win32.whl", hash = "sha256:90bf6fd378494eb698805bbbe7afe6c5d12c8e17fca817a646cd6a1818c696ca"}, - {file = "protobuf-4.25.4-cp39-cp39-win_amd64.whl", hash = "sha256:ac79a48d6b99dfed2729ccccee547b34a1d3d63289c71cef056653a846a2240f"}, - {file = "protobuf-4.25.4-py3-none-any.whl", hash = "sha256:bfbebc1c8e4793cfd58589acfb8a1026be0003e852b9da7db5a4285bde996978"}, - {file = "protobuf-4.25.4.tar.gz", hash = "sha256:0dc4a62cc4052a036ee2204d26fe4d835c62827c855c8a03f29fe6da146b380d"}, + {file = "protobuf-4.25.5-cp310-abi3-win32.whl", hash = "sha256:5e61fd921603f58d2f5acb2806a929b4675f8874ff5f330b7d6f7e2e784bbcd8"}, + {file = "protobuf-4.25.5-cp310-abi3-win_amd64.whl", hash = "sha256:4be0571adcbe712b282a330c6e89eae24281344429ae95c6d85e79e84780f5ea"}, + {file = "protobuf-4.25.5-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:b2fde3d805354df675ea4c7c6338c1aecd254dfc9925e88c6d31a2bcb97eb173"}, + {file = "protobuf-4.25.5-cp37-abi3-manylinux2014_aarch64.whl", hash = "sha256:919ad92d9b0310070f8356c24b855c98df2b8bd207ebc1c0c6fcc9ab1e007f3d"}, + {file = "protobuf-4.25.5-cp37-abi3-manylinux2014_x86_64.whl", hash = "sha256:fe14e16c22be926d3abfcb500e60cab068baf10b542b8c858fa27e098123e331"}, + {file = "protobuf-4.25.5-cp38-cp38-win32.whl", hash = "sha256:98d8d8aa50de6a2747efd9cceba361c9034050ecce3e09136f90de37ddba66e1"}, + {file = "protobuf-4.25.5-cp38-cp38-win_amd64.whl", hash = "sha256:b0234dd5a03049e4ddd94b93400b67803c823cfc405689688f59b34e0742381a"}, + {file = "protobuf-4.25.5-cp39-cp39-win32.whl", hash = "sha256:abe32aad8561aa7cc94fc7ba4fdef646e576983edb94a73381b03c53728a626f"}, + {file = "protobuf-4.25.5-cp39-cp39-win_amd64.whl", hash = "sha256:7a183f592dc80aa7c8da7ad9e55091c4ffc9497b3054452d629bb85fa27c2a45"}, + {file = "protobuf-4.25.5-py3-none-any.whl", hash = "sha256:0aebecb809cae990f8129ada5ca273d9d670b76d9bfc9b1809f0a9c02b7dbf41"}, + {file = "protobuf-4.25.5.tar.gz", hash = "sha256:7f8249476b4a9473645db7f8ab42b02fe1488cbe5fb72fddd445e0665afd8584"}, ] [[package]] @@ -6904,23 +5798,6 @@ azure-key-vault = ["azure-identity (>=1.16.0)", "azure-keyvault-secrets (>=4.8.0 toml = ["tomli (>=2.0.1)"] yaml = ["pyyaml (>=6.0.1)"] -[[package]] -name = "pydash" -version = "8.0.3" -description = "The kitchen sink of Python utility libraries for doing \"stuff\" in a functional way. Based on the Lo-Dash Javascript library." -optional = false -python-versions = ">=3.8" -files = [ - {file = "pydash-8.0.3-py3-none-any.whl", hash = "sha256:c16871476822ee6b59b87e206dd27888240eff50a7b4cd72a4b80b43b6b994d7"}, - {file = "pydash-8.0.3.tar.gz", hash = "sha256:1b27cd3da05b72f0e5ff786c523afd82af796936462e631ffd1b228d91f8b9aa"}, -] - -[package.dependencies] -typing-extensions = ">3.10,<4.6.0 || >4.6.0" - -[package.extras] -dev = ["build", "coverage", "furo", "invoke", "mypy", "pytest", "pytest-cov", "pytest-mypy-testing", "ruff", "sphinx", "sphinx-autodoc-typehints", "tox", "twine", "wheel"] - [[package]] name = "pygments" version = "2.18.0" @@ -6957,13 +5834,13 @@ tests = ["coverage[toml] (==5.0.4)", "pytest (>=6.0.0,<7.0.0)"] [[package]] name = "pymilvus" -version = "2.4.6" +version = "2.4.7" description = "Python Sdk for Milvus" optional = false python-versions = ">=3.8" files = [ - {file = "pymilvus-2.4.6-py3-none-any.whl", hash = "sha256:b4c43472edc313b845d313be50610e19054e6954b2c5c3b515565c596c2d3d97"}, - {file = "pymilvus-2.4.6.tar.gz", hash = "sha256:6ac3eb91c92cc01bbe444fe83f895f02d7b2546d96ac67998630bf31ac074d66"}, + {file = "pymilvus-2.4.7-py3-none-any.whl", hash = "sha256:1e5d377bd40fa7eb459d3958dbd96201758f5cf997d41eb3d2d169d0b7fa462e"}, + {file = "pymilvus-2.4.7.tar.gz", hash = "sha256:9ef460b940782a42e1b7b8ae0da03d8cc02d9d80044d13f4b689a7c935ec7aa7"}, ] [package.dependencies] @@ -6980,21 +5857,6 @@ bulk-writer = ["azure-storage-blob", "minio (>=7.0.0)", "pyarrow (>=12.0.0)", "r dev = ["black", "grpcio (==1.62.2)", "grpcio-testing (==1.62.2)", "grpcio-tools (==1.62.2)", "pytest (>=5.3.4)", "pytest-cov (>=2.8.1)", "pytest-timeout (>=1.3.4)", "ruff (>0.4.0)"] model = ["milvus-model (>=0.1.0)"] -[[package]] -name = "pymysql" -version = "1.1.1" -description = "Pure Python MySQL Driver" -optional = false -python-versions = ">=3.7" -files = [ - {file = "PyMySQL-1.1.1-py3-none-any.whl", hash = "sha256:4de15da4c61dc132f4fb9ab763063e693d521a80fd0e87943b9a453dd4c19d6c"}, - {file = "pymysql-1.1.1.tar.gz", hash = "sha256:e127611aaf2b417403c60bf4dc570124aeb4a57f5f37b8e95ae399a42f904cd0"}, -] - -[package.extras] -ed25519 = ["PyNaCl (>=1.4.0)"] -rsa = ["cryptography"] - [[package]] name = "pyopenssl" version = "24.2.1" @@ -7082,28 +5944,28 @@ files = [ [[package]] name = "pyproject-hooks" -version = "1.1.0" +version = "1.2.0" description = "Wrappers to call pyproject.toml-based build backend hooks." optional = false python-versions = ">=3.7" files = [ - {file = "pyproject_hooks-1.1.0-py3-none-any.whl", hash = "sha256:7ceeefe9aec63a1064c18d939bdc3adf2d8aa1988a510afec15151578b232aa2"}, - {file = "pyproject_hooks-1.1.0.tar.gz", hash = "sha256:4b37730834edbd6bd37f26ece6b44802fb1c1ee2ece0e54ddff8bfc06db86965"}, + {file = "pyproject_hooks-1.2.0-py3-none-any.whl", hash = "sha256:9e5c6bfa8dcc30091c74b0cf803c81fdd29d94f01992a7707bc97babb1141913"}, + {file = "pyproject_hooks-1.2.0.tar.gz", hash = "sha256:1e859bd5c40fae9448642dd871adf459e5e2084186e8d2c2a79a824c970da1f8"}, ] [[package]] name = "pyreadline3" -version = "3.5.2" +version = "3.5.4" description = "A python implementation of GNU readline." optional = false python-versions = ">=3.8" files = [ - {file = "pyreadline3-3.5.2-py3-none-any.whl", hash = "sha256:a87d56791e2965b2b187e2ea33dcf664600842c997c0623c95cf8ef07db83de9"}, - {file = "pyreadline3-3.5.2.tar.gz", hash = "sha256:ba82292e52c5a3bb256b291af0c40b457c1e8699cac9a873abbcaac8aef3a1bb"}, + {file = "pyreadline3-3.5.4-py3-none-any.whl", hash = "sha256:eaf8e6cc3c49bcccf145fc6067ba8643d1df34d604a1ec0eccbf7a18e6d3fae6"}, + {file = "pyreadline3-3.5.4.tar.gz", hash = "sha256:8d57d53039a1c75adba8e50dd3d992b28143480816187ea5efbd5c78e6c885b7"}, ] [package.extras] -dev = ["build", "flake8", "pytest", "twine"] +dev = ["build", "flake8", "mypy", "pytest", "twine"] [[package]] name = "pytest" @@ -7149,21 +6011,21 @@ histogram = ["pygal", "pygaljs"] [[package]] name = "pytest-env" -version = "1.1.4" +version = "1.1.5" description = "pytest plugin that allows you to add environment variables." optional = false python-versions = ">=3.8" files = [ - {file = "pytest_env-1.1.4-py3-none-any.whl", hash = "sha256:a4212056d4d440febef311a98fdca56c31256d58fb453d103cba4e8a532b721d"}, - {file = "pytest_env-1.1.4.tar.gz", hash = "sha256:86653658da8f11c6844975db955746c458a9c09f1e64957603161e2ff93f5133"}, + {file = "pytest_env-1.1.5-py3-none-any.whl", hash = "sha256:ce90cf8772878515c24b31cd97c7fa1f4481cd68d588419fd45f10ecaee6bc30"}, + {file = "pytest_env-1.1.5.tar.gz", hash = "sha256:91209840aa0e43385073ac464a554ad2947cc2fd663a9debf88d03b01e0cc1cf"}, ] [package.dependencies] -pytest = ">=8.3.2" +pytest = ">=8.3.3" tomli = {version = ">=2.0.1", markers = "python_version < \"3.11\""} [package.extras] -test = ["covdefaults (>=2.3)", "coverage (>=7.6.1)", "pytest-mock (>=3.14)"] +testing = ["covdefaults (>=2.3)", "coverage (>=7.6.1)", "pytest-mock (>=3.14)"] [[package]] name = "pytest-mock" @@ -7550,145 +6412,103 @@ dev = ["pytest"] [[package]] name = "rapidfuzz" -version = "3.9.7" +version = "3.10.0" description = "rapid fuzzy string matching" optional = false -python-versions = ">=3.8" -files = [ - {file = "rapidfuzz-3.9.7-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ccf68e30b80e903f2309f90a438dbd640dd98e878eeb5ad361a288051ee5b75c"}, - {file = "rapidfuzz-3.9.7-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:696a79018ef989bf1c9abd9005841cee18005ccad4748bad8a4c274c47b6241a"}, - {file = "rapidfuzz-3.9.7-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c4eebf6c93af0ae866c22b403a84747580bb5c10f0d7b51c82a87f25405d4dcb"}, - {file = "rapidfuzz-3.9.7-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0e9125377fa3d21a8abd4fbdbcf1c27be73e8b1850f0b61b5b711364bf3b59db"}, - {file = "rapidfuzz-3.9.7-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c12d180b17a22d107c8747de9c68d0b9c1d15dcda5445ff9bf9f4ccfb67c3e16"}, - {file = "rapidfuzz-3.9.7-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c1318d42610c26dcd68bd3279a1bf9e3605377260867c9a8ed22eafc1bd93a7c"}, - {file = "rapidfuzz-3.9.7-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd5fa6e3c6e0333051c1f3a49f0807b3366f4131c8d6ac8c3e05fd0d0ce3755c"}, - {file = "rapidfuzz-3.9.7-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:fcf79b686962d7bec458a0babc904cb4fa319808805e036b9d5a531ee6b9b835"}, - {file = "rapidfuzz-3.9.7-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:8b01153c7466d0bad48fba77a303d5a768e66f24b763853469f47220b3de4661"}, - {file = "rapidfuzz-3.9.7-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:94baaeea0b4f8632a6da69348b1e741043eba18d4e3088d674d3f76586b6223d"}, - {file = "rapidfuzz-3.9.7-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:6c5b32875646cb7f60c193ade99b2e4b124f19583492115293cd00f6fb198b17"}, - {file = "rapidfuzz-3.9.7-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:110b6294396bc0a447648627479c9320f095c2034c0537f687592e0f58622638"}, - {file = "rapidfuzz-3.9.7-cp310-cp310-win32.whl", hash = "sha256:3445a35c4c8d288f2b2011eb61bce1227c633ce85a3154e727170f37c0266bb2"}, - {file = "rapidfuzz-3.9.7-cp310-cp310-win_amd64.whl", hash = "sha256:0d1415a732ee75e74a90af12020b77a0b396b36c60afae1bde3208a78cd2c9fc"}, - {file = "rapidfuzz-3.9.7-cp310-cp310-win_arm64.whl", hash = "sha256:836f4d88b8bd0fff2ebe815dcaab8aa6c8d07d1d566a7e21dd137cf6fe11ed5b"}, - {file = "rapidfuzz-3.9.7-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d098ce6162eb5e48fceb0745455bc950af059df6113eec83e916c129fca11408"}, - {file = "rapidfuzz-3.9.7-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:048d55d36c02c6685a2b2741688503c3d15149694506655b6169dcfd3b6c2585"}, - {file = "rapidfuzz-3.9.7-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c33211cfff9aec425bb1bfedaf94afcf337063aa273754f22779d6dadebef4c2"}, - {file = "rapidfuzz-3.9.7-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e6d9db2fa4e9be171e9bb31cf2d2575574774966b43f5b951062bb2e67885852"}, - {file = "rapidfuzz-3.9.7-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d4e049d5ad61448c9a020d1061eba20944c4887d720c4069724beb6ea1692507"}, - {file = "rapidfuzz-3.9.7-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cfa74aac64c85898b93d9c80bb935a96bf64985e28d4ee0f1a3d1f3bf11a5106"}, - {file = "rapidfuzz-3.9.7-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:965693c2e9efd425b0f059f5be50ef830129f82892fa1858e220e424d9d0160f"}, - {file = "rapidfuzz-3.9.7-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8501000a5eb8037c4b56857724797fe5a8b01853c363de91c8d0d0ad56bef319"}, - {file = "rapidfuzz-3.9.7-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:8d92c552c6b7577402afdd547dcf5d31ea6c8ae31ad03f78226e055cfa37f3c6"}, - {file = "rapidfuzz-3.9.7-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:1ee2086f490cb501d86b7e386c1eb4e3a0ccbb0c99067089efaa8c79012c8952"}, - {file = "rapidfuzz-3.9.7-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:1de91e7fd7f525e10ea79a6e62c559d1b0278ec097ad83d9da378b6fab65a265"}, - {file = "rapidfuzz-3.9.7-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:a4da514d13f4433e16960a17f05b67e0af30ac771719c9a9fb877e5004f74477"}, - {file = "rapidfuzz-3.9.7-cp311-cp311-win32.whl", hash = "sha256:a40184c67db8252593ec518e17fb8a6e86d7259dc9f2d6c0bf4ff4db8cf1ad4b"}, - {file = "rapidfuzz-3.9.7-cp311-cp311-win_amd64.whl", hash = "sha256:c4f28f1930b09a2c300357d8465b388cecb7e8b2f454a5d5425561710b7fd07f"}, - {file = "rapidfuzz-3.9.7-cp311-cp311-win_arm64.whl", hash = "sha256:675b75412a943bb83f1f53e2e54fd18c80ef15ed642dc6eb0382d1949419d904"}, - {file = "rapidfuzz-3.9.7-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:1ef6a1a8f0b12f8722f595f15c62950c9a02d5abc64742561299ffd49f6c6944"}, - {file = "rapidfuzz-3.9.7-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:32532af1d70c6ec02ea5ac7ee2766dfff7c8ae8c761abfe8da9e527314e634e8"}, - {file = "rapidfuzz-3.9.7-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ae1a38bade755aa9dd95a81cda949e1bf9cd92b79341ccc5e2189c9e7bdfc5ec"}, - {file = "rapidfuzz-3.9.7-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d73ee2df41224c87336448d279b5b6a3a75f36e41dd3dcf538c0c9cce36360d8"}, - {file = "rapidfuzz-3.9.7-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:be3a1fc3e2ab3bdf93dc0c83c00acca8afd2a80602297d96cf4a0ba028333cdf"}, - {file = "rapidfuzz-3.9.7-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:603f48f621272a448ff58bb556feb4371252a02156593303391f5c3281dfaeac"}, - {file = "rapidfuzz-3.9.7-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:268f8e1ca50fc61c0736f3fe9d47891424adf62d96ed30196f30f4bd8216b41f"}, - {file = "rapidfuzz-3.9.7-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:5f8bf3f0d02935751d8660abda6044821a861f6229f7d359f98bcdcc7e66c39b"}, - {file = "rapidfuzz-3.9.7-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:b997ff3b39d4cee9fb025d6c46b0a24bd67595ce5a5b652a97fb3a9d60beb651"}, - {file = "rapidfuzz-3.9.7-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:ca66676c8ef6557f9b81c5b2b519097817a7c776a6599b8d6fcc3e16edd216fe"}, - {file = "rapidfuzz-3.9.7-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:35d3044cb635ca6b1b2b7b67b3597bd19f34f1753b129eb6d2ae04cf98cd3945"}, - {file = "rapidfuzz-3.9.7-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:5a93c9e60904cb76e7aefef67afffb8b37c4894f81415ed513db090f29d01101"}, - {file = "rapidfuzz-3.9.7-cp312-cp312-win32.whl", hash = "sha256:579d107102c0725f7c79b4e79f16d3cf4d7c9208f29c66b064fa1fd4641d5155"}, - {file = "rapidfuzz-3.9.7-cp312-cp312-win_amd64.whl", hash = "sha256:953b3780765c8846866faf891ee4290f6a41a6dacf4fbcd3926f78c9de412ca6"}, - {file = "rapidfuzz-3.9.7-cp312-cp312-win_arm64.whl", hash = "sha256:7c20c1474b068c4bd45bf2fd0ad548df284f74e9a14a68b06746c56e3aa8eb70"}, - {file = "rapidfuzz-3.9.7-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:fde81b1da9a947f931711febe2e2bee694e891f6d3e6aa6bc02c1884702aea19"}, - {file = "rapidfuzz-3.9.7-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:47e92c155a14f44511ea8ebcc6bc1535a1fe8d0a7d67ad3cc47ba61606df7bcf"}, - {file = "rapidfuzz-3.9.7-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8772b745668260c5c4d069c678bbaa68812e6c69830f3771eaad521af7bc17f8"}, - {file = "rapidfuzz-3.9.7-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:578302828dd97ee2ba507d2f71d62164e28d2fc7bc73aad0d2d1d2afc021a5d5"}, - {file = "rapidfuzz-3.9.7-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fc3e6081069eea61593f1d6839029da53d00c8c9b205c5534853eaa3f031085c"}, - {file = "rapidfuzz-3.9.7-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0b1c2d504eddf97bc0f2eba422c8915576dbf025062ceaca2d68aecd66324ad9"}, - {file = "rapidfuzz-3.9.7-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6fb76e5a21034f0307c51c5a2fc08856f698c53a4c593b17d291f7d6e9d09ca3"}, - {file = "rapidfuzz-3.9.7-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:d4ba2318ef670ce505f42881a5d2af70f948124646947341a3c6ccb33cd70369"}, - {file = "rapidfuzz-3.9.7-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:057bb03f39e285047d7e9412e01ecf31bb2d42b9466a5409d715d587460dd59b"}, - {file = "rapidfuzz-3.9.7-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:a8feac9006d5c9758438906f093befffc4290de75663dbb2098461df7c7d28dd"}, - {file = "rapidfuzz-3.9.7-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:95b8292383e717e10455f2c917df45032b611141e43d1adf70f71b1566136b11"}, - {file = "rapidfuzz-3.9.7-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:e9fbf659537d246086d0297628b3795dc3e4a384101ecc01e5791c827b8d7345"}, - {file = "rapidfuzz-3.9.7-cp313-cp313-win32.whl", hash = "sha256:1dc516ac6d32027be2b0196bedf6d977ac26debd09ca182376322ad620460feb"}, - {file = "rapidfuzz-3.9.7-cp313-cp313-win_amd64.whl", hash = "sha256:b4f86e09d3064dca0b014cd48688964036a904a2d28048f00c8f4640796d06a8"}, - {file = "rapidfuzz-3.9.7-cp313-cp313-win_arm64.whl", hash = "sha256:19c64d8ddb2940b42a4567b23f1681af77f50a5ff6c9b8e85daba079c210716e"}, - {file = "rapidfuzz-3.9.7-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:fbda3dd68d8b28ccb20ffb6f756fefd9b5ba570a772bedd7643ed441f5793308"}, - {file = "rapidfuzz-3.9.7-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:2379e0b2578ad3ac7004f223251550f08bca873ff76c169b09410ec562ad78d8"}, - {file = "rapidfuzz-3.9.7-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d1eff95362f993b0276fd3839aee48625b09aac8938bb0c23b40d219cba5dc5"}, - {file = "rapidfuzz-3.9.7-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cd9360e30041690912525a210e48a897b49b230768cc8af1c702e5395690464f"}, - {file = "rapidfuzz-3.9.7-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a93cd834b3c315ab437f0565ee3a2f42dd33768dc885ccbabf9710b131cf70d2"}, - {file = "rapidfuzz-3.9.7-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4ff196996240db7075f62c7bc4506f40a3c80cd4ae3ab0e79ac6892283a90859"}, - {file = "rapidfuzz-3.9.7-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:948dcee7aaa1cd14358b2a7ef08bf0be42bf89049c3a906669874a715fc2c937"}, - {file = "rapidfuzz-3.9.7-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:d95751f505a301af1aaf086c19f34536056d6c8efa91b2240de532a3db57b543"}, - {file = "rapidfuzz-3.9.7-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:90db86fa196eecf96cb6db09f1083912ea945c50c57188039392d810d0b784e1"}, - {file = "rapidfuzz-3.9.7-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:3171653212218a162540a3c8eb8ae7d3dcc8548540b69eaecaf3b47c14d89c90"}, - {file = "rapidfuzz-3.9.7-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:36dd6e820379c37a1ffefc8a52b648758e867cd9d78ee5b5dc0c9a6a10145378"}, - {file = "rapidfuzz-3.9.7-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:7b702de95666a1f7d5c6b47eacadfe2d2794af3742d63d2134767d13e5d1c713"}, - {file = "rapidfuzz-3.9.7-cp38-cp38-win32.whl", hash = "sha256:9030e7238c0df51aed5c9c5ed8eee2bdd47a2ae788e562c1454af2851c3d1906"}, - {file = "rapidfuzz-3.9.7-cp38-cp38-win_amd64.whl", hash = "sha256:f847fb0fbfb72482b1c05c59cbb275c58a55b73708a7f77a83f8035ee3c86497"}, - {file = "rapidfuzz-3.9.7-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:97f2ce529d2a70a60c290f6ab269a2bbf1d3b47b9724dccc84339b85f7afb044"}, - {file = "rapidfuzz-3.9.7-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e2957fdad10bb83b1982b02deb3604a3f6911a5e545f518b59c741086f92d152"}, - {file = "rapidfuzz-3.9.7-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d5262383634626eb45c536017204b8163a03bc43bda880cf1bdd7885db9a163"}, - {file = "rapidfuzz-3.9.7-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:364587827d7cbd41afa0782adc2d2d19e3f07d355b0750a02a8e33ad27a9c368"}, - {file = "rapidfuzz-3.9.7-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ecc24af7f905f3d6efb371a01680116ffea8d64e266618fb9ad1602a9b4f7934"}, - {file = "rapidfuzz-3.9.7-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9dc86aa6b29d174713c5f4caac35ffb7f232e3e649113e8d13812b35ab078228"}, - {file = "rapidfuzz-3.9.7-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e3dcfbe7266e74a707173a12a7b355a531f2dcfbdb32f09468e664330da14874"}, - {file = "rapidfuzz-3.9.7-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:b23806fbdd6b510ba9ac93bb72d503066263b0fba44b71b835be9f063a84025f"}, - {file = "rapidfuzz-3.9.7-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:5551d68264c1bb6943f542da83a4dc8940ede52c5847ef158698799cc28d14f5"}, - {file = "rapidfuzz-3.9.7-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:13d8675a1fa7e2b19650ca7ef9a6ec01391d4bb12ab9e0793e8eb024538b4a34"}, - {file = "rapidfuzz-3.9.7-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:9b6a5de507b9be6de688dae40143b656f7a93b10995fb8bd90deb555e7875c60"}, - {file = "rapidfuzz-3.9.7-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:111a20a3c090cf244d9406e60500b6c34b2375ba3a5009e2b38fd806fe38e337"}, - {file = "rapidfuzz-3.9.7-cp39-cp39-win32.whl", hash = "sha256:22589c0b8ccc6c391ce7f776c93a8c92c96ab8d34e1a19f1bd2b12a235332632"}, - {file = "rapidfuzz-3.9.7-cp39-cp39-win_amd64.whl", hash = "sha256:6f83221db5755b8f34222e40607d87f1176a8d5d4dbda4a55a0f0b67d588a69c"}, - {file = "rapidfuzz-3.9.7-cp39-cp39-win_arm64.whl", hash = "sha256:3665b92e788578c3bb334bd5b5fa7ee1a84bafd68be438e3110861d1578c63a0"}, - {file = "rapidfuzz-3.9.7-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:d7df9c2194c7ec930b33c991c55dbd0c10951bd25800c0b7a7b571994ebbced5"}, - {file = "rapidfuzz-3.9.7-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:68bd888eafd07b09585dcc8bc2716c5ecdb7eed62827470664d25588982b2873"}, - {file = "rapidfuzz-3.9.7-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d1230e0f9026851a6a432beaa0ce575dda7b39fe689b576f99a0704fbb81fc9c"}, - {file = "rapidfuzz-3.9.7-pp310-pypy310_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a3b36e1c61b796ae1777f3e9e11fd39898b09d351c9384baf6e3b7e6191d8ced"}, - {file = "rapidfuzz-3.9.7-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9dba13d86806fcf3fe9c9919f58575e0090eadfb89c058bde02bcc7ab24e4548"}, - {file = "rapidfuzz-3.9.7-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:1f1a33e84056b7892c721d84475d3bde49a145126bc4c6efe0d6d0d59cb31c29"}, - {file = "rapidfuzz-3.9.7-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:3492c7a42b7fa9f0051d7fcce9893e95ed91c97c9ec7fb64346f3e070dd318ed"}, - {file = "rapidfuzz-3.9.7-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:ece45eb2af8b00f90d10f7419322e8804bd42fb1129026f9bfe712c37508b514"}, - {file = "rapidfuzz-3.9.7-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9dcd14cf4876f04b488f6e54a7abd3e9b31db5f5a6aba0ce90659917aaa8c088"}, - {file = "rapidfuzz-3.9.7-pp38-pypy38_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:521c58c72ed8a612b25cda378ff10dee17e6deb4ee99a070b723519a345527b9"}, - {file = "rapidfuzz-3.9.7-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:18669bb6cdf7d40738526d37e550df09ba065b5a7560f3d802287988b6cb63cf"}, - {file = "rapidfuzz-3.9.7-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:7abe2dbae81120a64bb4f8d3fcafe9122f328c9f86d7f327f174187a5af4ed86"}, - {file = "rapidfuzz-3.9.7-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:a3c0783910911f4f24655826d007c9f4360f08107410952c01ee3df98c713eb2"}, - {file = "rapidfuzz-3.9.7-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:03126f9a040ff21d2a110610bfd6b93b79377ce8b4121edcb791d61b7df6eec5"}, - {file = "rapidfuzz-3.9.7-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:591908240f4085e2ade5b685c6e8346e2ed44932cffeaac2fb32ddac95b55c7f"}, - {file = "rapidfuzz-3.9.7-pp39-pypy39_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e9012d86c6397edbc9da4ac0132de7f8ee9d6ce857f4194d5684c4ddbcdd1c5c"}, - {file = "rapidfuzz-3.9.7-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:df596ddd3db38aa513d4c0995611267b3946e7cbe5a8761b50e9306dfec720ee"}, - {file = "rapidfuzz-3.9.7-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:3ed5adb752f4308fcc8f4fb6f8eb7aa4082f9d12676fda0a74fa5564242a8107"}, - {file = "rapidfuzz-3.9.7.tar.gz", hash = "sha256:f1c7296534c1afb6f495aa95871f14ccdc197c6db42965854e483100df313030"}, -] - -[package.extras] -full = ["numpy"] - -[[package]] -name = "readabilipy" -version = "0.2.0" -description = "Python wrapper for Mozilla's Readability.js" -optional = false -python-versions = ">=3.6.0" +python-versions = ">=3.9" files = [ - {file = "readabilipy-0.2.0-py3-none-any.whl", hash = "sha256:0050853cd6ab012ac75bb4d8f06427feb7dc32054da65060da44654d049802d0"}, - {file = "readabilipy-0.2.0.tar.gz", hash = "sha256:098bf347b19f362042fb6c08864ad776588bf844ac2261fb230f7f9c250fdae5"}, -] - -[package.dependencies] -beautifulsoup4 = ">=4.7.1" -html5lib = "*" -lxml = "*" -regex = "*" - -[package.extras] -dev = ["coveralls", "m2r", "pycodestyle", "pyflakes", "pylint", "pytest", "pytest-benchmark", "pytest-cov", "sphinx"] -docs = ["m2r", "sphinx"] -test = ["coveralls", "pycodestyle", "pyflakes", "pylint", "pytest", "pytest-benchmark", "pytest-cov"] + {file = "rapidfuzz-3.10.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:884453860de029380dded8f3c1918af2d8eb5adf8010261645c7e5c88c2b5428"}, + {file = "rapidfuzz-3.10.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:718c9bd369288aca5fa929df6dbf66fdbe9768d90940a940c0b5cdc96ade4309"}, + {file = "rapidfuzz-3.10.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a68e3724b7dab761c01816aaa64b0903734d999d5589daf97c14ef5cc0629a8e"}, + {file = "rapidfuzz-3.10.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1af60988d47534246d9525f77288fdd9de652608a4842815d9018570b959acc6"}, + {file = "rapidfuzz-3.10.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3084161fc3e963056232ef8d937449a2943852e07101f5a136c8f3cfa4119217"}, + {file = "rapidfuzz-3.10.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6cd67d3d017296d98ff505529104299f78433e4b8af31b55003d901a62bbebe9"}, + {file = "rapidfuzz-3.10.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b11a127ac590fc991e8a02c2d7e1ac86e8141c92f78546f18b5c904064a0552c"}, + {file = "rapidfuzz-3.10.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:aadce42147fc09dcef1afa892485311e824c050352e1aa6e47f56b9b27af4cf0"}, + {file = "rapidfuzz-3.10.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:b54853c2371bf0e38d67da379519deb6fbe70055efb32f6607081641af3dc752"}, + {file = "rapidfuzz-3.10.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:ce19887268e90ee81a3957eef5e46a70ecc000713796639f83828b950343f49e"}, + {file = "rapidfuzz-3.10.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:f39a2a5ded23b9b9194ec45740dce57177b80f86c6d8eba953d3ff1a25c97766"}, + {file = "rapidfuzz-3.10.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:0ec338d5f4ad8d9339a88a08db5c23e7f7a52c2b2a10510c48a0cef1fb3f0ddc"}, + {file = "rapidfuzz-3.10.0-cp310-cp310-win32.whl", hash = "sha256:56fd15ea8f4c948864fa5ebd9261c67cf7b89a1c517a0caef4df75446a7af18c"}, + {file = "rapidfuzz-3.10.0-cp310-cp310-win_amd64.whl", hash = "sha256:43dfc5e733808962a822ff6d9c29f3039a3cfb3620706f5953e17cfe4496724c"}, + {file = "rapidfuzz-3.10.0-cp310-cp310-win_arm64.whl", hash = "sha256:ae7966f205b5a7fde93b44ca8fed37c1c8539328d7f179b1197de34eceaceb5f"}, + {file = "rapidfuzz-3.10.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:bb0013795b40db5cf361e6f21ee7cda09627cf294977149b50e217d7fe9a2f03"}, + {file = "rapidfuzz-3.10.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:69ef5b363afff7150a1fbe788007e307b9802a2eb6ad92ed51ab94e6ad2674c6"}, + {file = "rapidfuzz-3.10.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c582c46b1bb0b19f1a5f4c1312f1b640c21d78c371a6615c34025b16ee56369b"}, + {file = "rapidfuzz-3.10.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:288f6f6e7410cacb115fb851f3f18bf0e4231eb3f6cb5bd1cec0e7b25c4d039d"}, + {file = "rapidfuzz-3.10.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c9e29a13d2fd9be3e7d8c26c7ef4ba60b5bc7efbc9dbdf24454c7e9ebba31768"}, + {file = "rapidfuzz-3.10.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ea2da0459b951ee461bd4e02b8904890bd1c4263999d291c5cd01e6620177ad4"}, + {file = "rapidfuzz-3.10.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:457827ba82261aa2ae6ac06a46d0043ab12ba7216b82d87ae1434ec0f29736d6"}, + {file = "rapidfuzz-3.10.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:5d350864269d56f51ab81ab750c9259ae5cad3152c0680baef143dcec92206a1"}, + {file = "rapidfuzz-3.10.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:a9b8f51e08c3f983d857c3889930af9ddecc768453822076683664772d87e374"}, + {file = "rapidfuzz-3.10.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:7f3a6aa6e70fc27e4ff5c479f13cc9fc26a56347610f5f8b50396a0d344c5f55"}, + {file = "rapidfuzz-3.10.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:803f255f10d63420979b1909ef976e7d30dec42025c9b067fc1d2040cc365a7e"}, + {file = "rapidfuzz-3.10.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:2026651761bf83a0f31495cc0f70840d5c0d54388f41316e3f9cb51bd85e49a5"}, + {file = "rapidfuzz-3.10.0-cp311-cp311-win32.whl", hash = "sha256:4df75b3ebbb8cfdb9bf8b213b168620b88fd92d0c16a8bc9f9234630b282db59"}, + {file = "rapidfuzz-3.10.0-cp311-cp311-win_amd64.whl", hash = "sha256:f9f0bbfb6787b97c51516f3ccf97737d504db5d239ad44527673b81f598b84ab"}, + {file = "rapidfuzz-3.10.0-cp311-cp311-win_arm64.whl", hash = "sha256:10fdad800441b9c97d471a937ba7d42625f1b530db05e572f1cb7d401d95c893"}, + {file = "rapidfuzz-3.10.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:7dc87073ba3a40dd65591a2100aa71602107443bf10770579ff9c8a3242edb94"}, + {file = "rapidfuzz-3.10.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a425a0a868cf8e9c6e93e1cda4b758cdfd314bb9a4fc916c5742c934e3613480"}, + {file = "rapidfuzz-3.10.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a86d5d1d75e61df060c1e56596b6b0a4422a929dff19cc3dbfd5eee762c86b61"}, + {file = "rapidfuzz-3.10.0-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:34f213d59219a9c3ca14e94a825f585811a68ac56b4118b4dc388b5b14afc108"}, + {file = "rapidfuzz-3.10.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:96ad46f5f56f70fab2be9e5f3165a21be58d633b90bf6e67fc52a856695e4bcf"}, + {file = "rapidfuzz-3.10.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9178277f72d144a6c7704d7ae7fa15b7b86f0f0796f0e1049c7b4ef748a662ef"}, + {file = "rapidfuzz-3.10.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:76a35e9e19a7c883c422ffa378e9a04bc98cb3b29648c5831596401298ee51e6"}, + {file = "rapidfuzz-3.10.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:8a6405d34c394c65e4f73a1d300c001f304f08e529d2ed6413b46ee3037956eb"}, + {file = "rapidfuzz-3.10.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:bd393683129f446a75d8634306aed7e377627098a1286ff3af2a4f1736742820"}, + {file = "rapidfuzz-3.10.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:b0445fa9880ead81f5a7d0efc0b9c977a947d8052c43519aceeaf56eabaf6843"}, + {file = "rapidfuzz-3.10.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:c50bc308fa29767ed8f53a8d33b7633a9e14718ced038ed89d41b886e301da32"}, + {file = "rapidfuzz-3.10.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:e89605afebbd2d4b045bccfdc12a14b16fe8ccbae05f64b4b4c64a97dad1c891"}, + {file = "rapidfuzz-3.10.0-cp312-cp312-win32.whl", hash = "sha256:2db9187f3acf3cd33424ecdbaad75414c298ecd1513470df7bda885dcb68cc15"}, + {file = "rapidfuzz-3.10.0-cp312-cp312-win_amd64.whl", hash = "sha256:50e3d0c72ea15391ba9531ead7f2068a67c5b18a6a365fef3127583aaadd1725"}, + {file = "rapidfuzz-3.10.0-cp312-cp312-win_arm64.whl", hash = "sha256:9eac95b4278bd53115903d89118a2c908398ee8bdfd977ae844f1bd2b02b917c"}, + {file = "rapidfuzz-3.10.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:fe5231e8afd069c742ac5b4f96344a0fe4aff52df8e53ef87faebf77f827822c"}, + {file = "rapidfuzz-3.10.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:886882367dbc985f5736356105798f2ae6e794e671fc605476cbe2e73838a9bb"}, + {file = "rapidfuzz-3.10.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b33e13e537e3afd1627d421a142a12bbbe601543558a391a6fae593356842f6e"}, + {file = "rapidfuzz-3.10.0-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:094c26116d55bf9c53abd840d08422f20da78ec4c4723e5024322321caedca48"}, + {file = "rapidfuzz-3.10.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:545fc04f2d592e4350f59deb0818886c1b444ffba3bec535b4fbb97191aaf769"}, + {file = "rapidfuzz-3.10.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:916a6abf3632e592b937c3d04c00a6efadd8fd30539cdcd4e6e4d92be7ca5d90"}, + {file = "rapidfuzz-3.10.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fb6ec40cef63b1922083d33bfef2f91fc0b0bc07b5b09bfee0b0f1717d558292"}, + {file = "rapidfuzz-3.10.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:c77a7330dd15c7eb5fd3631dc646fc96327f98db8181138766bd14d3e905f0ba"}, + {file = "rapidfuzz-3.10.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:949b5e9eeaa4ecb4c7e9c2a4689dddce60929dd1ff9c76a889cdbabe8bbf2171"}, + {file = "rapidfuzz-3.10.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:b5363932a5aab67010ae1a6205c567d1ef256fb333bc23c27582481606be480c"}, + {file = "rapidfuzz-3.10.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:5dd6eec15b13329abe66cc241b484002ecb0e17d694491c944a22410a6a9e5e2"}, + {file = "rapidfuzz-3.10.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:79e7f98525b60b3c14524e0a4e1fedf7654657b6e02eb25f1be897ab097706f3"}, + {file = "rapidfuzz-3.10.0-cp313-cp313-win32.whl", hash = "sha256:d29d1b9857c65f8cb3a29270732e1591b9bacf89de9d13fa764f79f07d8f1fd2"}, + {file = "rapidfuzz-3.10.0-cp313-cp313-win_amd64.whl", hash = "sha256:fa9720e56663cc3649d62b4b5f3145e94b8f5611e8a8e1b46507777249d46aad"}, + {file = "rapidfuzz-3.10.0-cp313-cp313-win_arm64.whl", hash = "sha256:eda4c661e68dddd56c8fbfe1ca35e40dd2afd973f7ebb1605f4d151edc63dff8"}, + {file = "rapidfuzz-3.10.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:cffbc50e0767396ed483900900dd58ce4351bc0d40e64bced8694bd41864cc71"}, + {file = "rapidfuzz-3.10.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c038b9939da3035afb6cb2f465f18163e8f070aba0482923ecff9443def67178"}, + {file = "rapidfuzz-3.10.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ca366c2e2a54e2f663f4529b189fdeb6e14d419b1c78b754ec1744f3c01070d4"}, + {file = "rapidfuzz-3.10.0-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7c4c82b1689b23b1b5e6a603164ed2be41b6f6de292a698b98ba2381e889eb9d"}, + {file = "rapidfuzz-3.10.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:98f6ebe28831a482981ecfeedc8237047878424ad0c1add2c7f366ba44a20452"}, + {file = "rapidfuzz-3.10.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4bd1a7676ee2a4c8e2f7f2550bece994f9f89e58afb96088964145a83af7408b"}, + {file = "rapidfuzz-3.10.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ec9139baa3f85b65adc700eafa03ed04995ca8533dd56c924f0e458ffec044ab"}, + {file = "rapidfuzz-3.10.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:26de93e6495078b6af4c4d93a42ca067b16cc0e95699526c82ab7d1025b4d3bf"}, + {file = "rapidfuzz-3.10.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:f3a0bda83c18195c361b5500377d0767749f128564ca95b42c8849fd475bb327"}, + {file = "rapidfuzz-3.10.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:63e4c175cbce8c3adc22dca5e6154588ae673f6c55374d156f3dac732c88d7de"}, + {file = "rapidfuzz-3.10.0-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:4dd3d8443970eaa02ab5ae45ce584b061f2799cd9f7e875190e2617440c1f9d4"}, + {file = "rapidfuzz-3.10.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:e5ddb2388610799fc46abe389600625058f2a73867e63e20107c5ad5ffa57c47"}, + {file = "rapidfuzz-3.10.0-cp39-cp39-win32.whl", hash = "sha256:2e9be5d05cd960914024412b5406fb75a82f8562f45912ff86255acbfdbfb78e"}, + {file = "rapidfuzz-3.10.0-cp39-cp39-win_amd64.whl", hash = "sha256:47aca565a39c9a6067927871973ca827023e8b65ba6c5747f4c228c8d7ddc04f"}, + {file = "rapidfuzz-3.10.0-cp39-cp39-win_arm64.whl", hash = "sha256:b0732343cdc4273b5921268026dd7266f75466eb21873cb7635a200d9d9c3fac"}, + {file = "rapidfuzz-3.10.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:f744b5eb1469bf92dd143d36570d2bdbbdc88fe5cb0b5405e53dd34f479cbd8a"}, + {file = "rapidfuzz-3.10.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:b67cc21a14327a0eb0f47bc3d7e59ec08031c7c55220ece672f9476e7a8068d3"}, + {file = "rapidfuzz-3.10.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2fe5783676f0afba4a522c80b15e99dbf4e393c149ab610308a8ef1f04c6bcc8"}, + {file = "rapidfuzz-3.10.0-pp310-pypy310_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d4688862f957c8629d557d084f20b2d803f8738b6c4066802a0b1cc472e088d9"}, + {file = "rapidfuzz-3.10.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:20bd153aacc244e4c907d772c703fea82754c4db14f8aa64d75ff81b7b8ab92d"}, + {file = "rapidfuzz-3.10.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:50484d563f8bfa723c74c944b0bb15b9e054db9c889348c8c307abcbee75ab92"}, + {file = "rapidfuzz-3.10.0-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:5897242d455461f2c5b82d7397b29341fd11e85bf3608a522177071044784ee8"}, + {file = "rapidfuzz-3.10.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:116c71a81e046ba56551d8ab68067ca7034d94b617545316d460a452c5c3c289"}, + {file = "rapidfuzz-3.10.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f0a547e4350d1fa32624d3eab51eff8cf329f4cae110b4ea0402486b1da8be40"}, + {file = "rapidfuzz-3.10.0-pp39-pypy39_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:399b9b79ccfcf50ca3bad7692bc098bb8eade88d7d5e15773b7f866c91156d0c"}, + {file = "rapidfuzz-3.10.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7947a425d1be3e744707ee58c6cb318b93a56e08f080722dcc0347e0b7a1bb9a"}, + {file = "rapidfuzz-3.10.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:94c48b4a2a4b1d22246f48e2b11cae01ec7d23f0c9123f8bb822839ad79d0a88"}, + {file = "rapidfuzz-3.10.0.tar.gz", hash = "sha256:6b62af27e65bb39276a66533655a2fa3c60a487b03935721c45b7809527979be"}, +] + +[package.extras] +all = ["numpy"] [[package]] name = "redis" @@ -7827,26 +6647,6 @@ files = [ {file = "regex-2024.9.11.tar.gz", hash = "sha256:6c188c307e8433bcb63dc1915022deb553b4203a70722fc542c363bf120a01fd"}, ] -[[package]] -name = "replicate" -version = "0.22.0" -description = "Python client for Replicate" -optional = false -python-versions = ">=3.8" -files = [ - {file = "replicate-0.22.0-py3-none-any.whl", hash = "sha256:a11e20e9589981a96bee6f3817494b5cc29735a108c71aff4515a81863ad9996"}, - {file = "replicate-0.22.0.tar.gz", hash = "sha256:cab48c15ede619d5aa7d023a241626d504c70ea2b7db5792ebfb5ae9fa373cbc"}, -] - -[package.dependencies] -httpx = ">=0.21.0,<1" -packaging = "*" -pydantic = ">1" -typing-extensions = ">=4.5.0" - -[package.extras] -dev = ["pylint", "pyright", "pytest", "pytest-asyncio", "pytest-recording", "respx", "ruff (>=0.1.3)"] - [[package]] name = "requests" version = "2.31.0" @@ -7868,20 +6668,6 @@ urllib3 = ">=1.21.1,<3" socks = ["PySocks (>=1.5.6,!=1.5.7)"] use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] -[[package]] -name = "requests-file" -version = "2.1.0" -description = "File transport adapter for Requests" -optional = false -python-versions = "*" -files = [ - {file = "requests_file-2.1.0-py2.py3-none-any.whl", hash = "sha256:cf270de5a4c5874e84599fc5778303d496c10ae5e870bfa378818f35d21bda5c"}, - {file = "requests_file-2.1.0.tar.gz", hash = "sha256:0f549a3f3b0699415ac04d167e9cb39bccfb730cb832b4d20be3d9867356e658"}, -] - -[package.dependencies] -requests = ">=1.0.0" - [[package]] name = "requests-oauthlib" version = "2.0.0" @@ -7900,20 +6686,6 @@ requests = ">=2.0.0" [package.extras] rsa = ["oauthlib[signedtoken] (>=3.0.0)"] -[[package]] -name = "requests-toolbelt" -version = "1.0.0" -description = "A utility belt for advanced users of python-requests" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -files = [ - {file = "requests-toolbelt-1.0.0.tar.gz", hash = "sha256:7681a0a3d047012b5bdc0ee37d7f8f07ebe76ab08caeccfc3921ce23c88d5bc6"}, - {file = "requests_toolbelt-1.0.0-py2.py3-none-any.whl", hash = "sha256:cccfdd665f0a24fcf4726e690f65639d272bb0637b9b92dfd91a5568ccf6bd06"}, -] - -[package.dependencies] -requests = ">=2.0.1,<3.0.0" - [[package]] name = "resend" version = "0.7.2" @@ -8293,13 +7065,13 @@ test = ["accelerate (>=0.24.1,<=0.27.0)", "apache-airflow (==2.9.3)", "apache-ai [[package]] name = "sagemaker-core" -version = "1.0.4" +version = "1.0.9" description = "An python package for sagemaker core functionalities" optional = false python-versions = ">=3.8" files = [ - {file = "sagemaker_core-1.0.4-py3-none-any.whl", hash = "sha256:bf71d988dbda03a3cd1557524f2fab4f19d89e54bd38fc7f05bbbcf580715f95"}, - {file = "sagemaker_core-1.0.4.tar.gz", hash = "sha256:203f4eb9d0d2a0e6ba80d79ba8c28b8ea27c94d04f6d9ff01c2fd55b95615c78"}, + {file = "sagemaker_core-1.0.9-py3-none-any.whl", hash = "sha256:7a22c46cf93594f8d44e3523d4ba98407911f3530af68a8ffdde5082d3b26fa3"}, + {file = "sagemaker_core-1.0.9.tar.gz", hash = "sha256:664115faf797412553fb81b97a4777e78e51dfd4454c32edb2c8371bf203c535"}, ] [package.dependencies] @@ -8473,34 +7245,24 @@ tornado = ["tornado (>=5)"] [[package]] name = "setuptools" -version = "74.1.2" +version = "75.1.0" description = "Easily download, build, install, upgrade, and uninstall Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "setuptools-74.1.2-py3-none-any.whl", hash = "sha256:5f4c08aa4d3ebcb57a50c33b1b07e94315d7fc7230f7115e47fc99776c8ce308"}, - {file = "setuptools-74.1.2.tar.gz", hash = "sha256:95b40ed940a1c67eb70fc099094bd6e99c6ee7c23aa2306f4d2697ba7916f9c6"}, + {file = "setuptools-75.1.0-py3-none-any.whl", hash = "sha256:35ab7fd3bcd95e6b7fd704e4a1539513edad446c097797f2985e0e4b960772f2"}, + {file = "setuptools-75.1.0.tar.gz", hash = "sha256:d59a21b17a275fb872a9c3dae73963160ae079f1049ed956880cd7c09b120538"}, ] [package.extras] check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)", "ruff (>=0.5.2)"] -core = ["importlib-metadata (>=6)", "importlib-resources (>=5.10.2)", "jaraco.text (>=3.7)", "more-itertools (>=8.8)", "packaging (>=24)", "platformdirs (>=2.6.2)", "tomli (>=2.0.1)", "wheel (>=0.43.0)"] +core = ["importlib-metadata (>=6)", "importlib-resources (>=5.10.2)", "jaraco.collections", "jaraco.functools", "jaraco.text (>=3.7)", "more-itertools", "more-itertools (>=8.8)", "packaging", "packaging (>=24)", "platformdirs (>=2.6.2)", "tomli (>=2.0.1)", "wheel (>=0.43.0)"] cover = ["pytest-cov"] doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "pyproject-hooks (!=1.1)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier", "towncrier (<24.7)"] enabler = ["pytest-enabler (>=2.2)"] test = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "jaraco.test", "packaging (>=23.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.*)", "pytest-home (>=0.5)", "pytest-perf", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel (>=0.44.0)"] type = ["importlib-metadata (>=7.0.2)", "jaraco.develop (>=7.21)", "mypy (==1.11.*)", "pytest-mypy"] -[[package]] -name = "sgmllib3k" -version = "1.0.0" -description = "Py3k port of sgmllib." -optional = false -python-versions = "*" -files = [ - {file = "sgmllib3k-1.0.0.tar.gz", hash = "sha256:7868fb1c8bfa764c1ac563d3cf369c381d1325d36124933a726f29fcdaa812e9"}, -] - [[package]] name = "shapely" version = "2.0.6" @@ -8570,23 +7332,6 @@ files = [ {file = "shellingham-1.5.4.tar.gz", hash = "sha256:8dbca0739d487e5bd35ab3ca4b36e11c4078f3a234bfce294b0a0291363404de"}, ] -[[package]] -name = "simple-websocket" -version = "1.0.0" -description = "Simple WebSocket server and client for Python" -optional = false -python-versions = ">=3.6" -files = [ - {file = "simple-websocket-1.0.0.tar.gz", hash = "sha256:17d2c72f4a2bd85174a97e3e4c88b01c40c3f81b7b648b0cc3ce1305968928c8"}, - {file = "simple_websocket-1.0.0-py3-none-any.whl", hash = "sha256:1d5bf585e415eaa2083e2bcf02a3ecf91f9712e7b3e6b9fa0b461ad04e0837bc"}, -] - -[package.dependencies] -wsproto = "*" - -[package.extras] -docs = ["sphinx"] - [[package]] name = "six" version = "1.16.0" @@ -8644,60 +7389,60 @@ files = [ [[package]] name = "sqlalchemy" -version = "2.0.34" +version = "2.0.35" description = "Database Abstraction Library" optional = false python-versions = ">=3.7" files = [ - {file = "SQLAlchemy-2.0.34-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:95d0b2cf8791ab5fb9e3aa3d9a79a0d5d51f55b6357eecf532a120ba3b5524db"}, - {file = "SQLAlchemy-2.0.34-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:243f92596f4fd4c8bd30ab8e8dd5965afe226363d75cab2468f2c707f64cd83b"}, - {file = "SQLAlchemy-2.0.34-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9ea54f7300553af0a2a7235e9b85f4204e1fc21848f917a3213b0e0818de9a24"}, - {file = "SQLAlchemy-2.0.34-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:173f5f122d2e1bff8fbd9f7811b7942bead1f5e9f371cdf9e670b327e6703ebd"}, - {file = "SQLAlchemy-2.0.34-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:196958cde924a00488e3e83ff917be3b73cd4ed8352bbc0f2989333176d1c54d"}, - {file = "SQLAlchemy-2.0.34-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:bd90c221ed4e60ac9d476db967f436cfcecbd4ef744537c0f2d5291439848768"}, - {file = "SQLAlchemy-2.0.34-cp310-cp310-win32.whl", hash = "sha256:3166dfff2d16fe9be3241ee60ece6fcb01cf8e74dd7c5e0b64f8e19fab44911b"}, - {file = "SQLAlchemy-2.0.34-cp310-cp310-win_amd64.whl", hash = "sha256:6831a78bbd3c40f909b3e5233f87341f12d0b34a58f14115c9e94b4cdaf726d3"}, - {file = "SQLAlchemy-2.0.34-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c7db3db284a0edaebe87f8f6642c2b2c27ed85c3e70064b84d1c9e4ec06d5d84"}, - {file = "SQLAlchemy-2.0.34-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:430093fce0efc7941d911d34f75a70084f12f6ca5c15d19595c18753edb7c33b"}, - {file = "SQLAlchemy-2.0.34-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:79cb400c360c7c210097b147c16a9e4c14688a6402445ac848f296ade6283bbc"}, - {file = "SQLAlchemy-2.0.34-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fb1b30f31a36c7f3fee848391ff77eebdd3af5750bf95fbf9b8b5323edfdb4ec"}, - {file = "SQLAlchemy-2.0.34-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8fddde2368e777ea2a4891a3fb4341e910a056be0bb15303bf1b92f073b80c02"}, - {file = "SQLAlchemy-2.0.34-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:80bd73ea335203b125cf1d8e50fef06be709619eb6ab9e7b891ea34b5baa2287"}, - {file = "SQLAlchemy-2.0.34-cp311-cp311-win32.whl", hash = "sha256:6daeb8382d0df526372abd9cb795c992e18eed25ef2c43afe518c73f8cccb721"}, - {file = "SQLAlchemy-2.0.34-cp311-cp311-win_amd64.whl", hash = "sha256:5bc08e75ed11693ecb648b7a0a4ed80da6d10845e44be0c98c03f2f880b68ff4"}, - {file = "SQLAlchemy-2.0.34-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:53e68b091492c8ed2bd0141e00ad3089bcc6bf0e6ec4142ad6505b4afe64163e"}, - {file = "SQLAlchemy-2.0.34-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:bcd18441a49499bf5528deaa9dee1f5c01ca491fc2791b13604e8f972877f812"}, - {file = "SQLAlchemy-2.0.34-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:165bbe0b376541092bf49542bd9827b048357f4623486096fc9aaa6d4e7c59a2"}, - {file = "SQLAlchemy-2.0.34-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c3330415cd387d2b88600e8e26b510d0370db9b7eaf984354a43e19c40df2e2b"}, - {file = "SQLAlchemy-2.0.34-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:97b850f73f8abbffb66ccbab6e55a195a0eb655e5dc74624d15cff4bfb35bd74"}, - {file = "SQLAlchemy-2.0.34-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:7cee4c6917857fd6121ed84f56d1dc78eb1d0e87f845ab5a568aba73e78adf83"}, - {file = "SQLAlchemy-2.0.34-cp312-cp312-win32.whl", hash = "sha256:fbb034f565ecbe6c530dff948239377ba859420d146d5f62f0271407ffb8c580"}, - {file = "SQLAlchemy-2.0.34-cp312-cp312-win_amd64.whl", hash = "sha256:707c8f44931a4facd4149b52b75b80544a8d824162602b8cd2fe788207307f9a"}, - {file = "SQLAlchemy-2.0.34-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:24af3dc43568f3780b7e1e57c49b41d98b2d940c1fd2e62d65d3928b6f95f021"}, - {file = "SQLAlchemy-2.0.34-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e60ed6ef0a35c6b76b7640fe452d0e47acc832ccbb8475de549a5cc5f90c2c06"}, - {file = "SQLAlchemy-2.0.34-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:413c85cd0177c23e32dee6898c67a5f49296640041d98fddb2c40888fe4daa2e"}, - {file = "SQLAlchemy-2.0.34-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:25691f4adfb9d5e796fd48bf1432272f95f4bbe5f89c475a788f31232ea6afba"}, - {file = "SQLAlchemy-2.0.34-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:526ce723265643dbc4c7efb54f56648cc30e7abe20f387d763364b3ce7506c82"}, - {file = "SQLAlchemy-2.0.34-cp37-cp37m-win32.whl", hash = "sha256:13be2cc683b76977a700948411a94c67ad8faf542fa7da2a4b167f2244781cf3"}, - {file = "SQLAlchemy-2.0.34-cp37-cp37m-win_amd64.whl", hash = "sha256:e54ef33ea80d464c3dcfe881eb00ad5921b60f8115ea1a30d781653edc2fd6a2"}, - {file = "SQLAlchemy-2.0.34-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:43f28005141165edd11fbbf1541c920bd29e167b8bbc1fb410d4fe2269c1667a"}, - {file = "SQLAlchemy-2.0.34-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:b68094b165a9e930aedef90725a8fcfafe9ef95370cbb54abc0464062dbf808f"}, - {file = "SQLAlchemy-2.0.34-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6a1e03db964e9d32f112bae36f0cc1dcd1988d096cfd75d6a588a3c3def9ab2b"}, - {file = "SQLAlchemy-2.0.34-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:203d46bddeaa7982f9c3cc693e5bc93db476ab5de9d4b4640d5c99ff219bee8c"}, - {file = "SQLAlchemy-2.0.34-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:ae92bebca3b1e6bd203494e5ef919a60fb6dfe4d9a47ed2453211d3bd451b9f5"}, - {file = "SQLAlchemy-2.0.34-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:9661268415f450c95f72f0ac1217cc6f10256f860eed85c2ae32e75b60278ad8"}, - {file = "SQLAlchemy-2.0.34-cp38-cp38-win32.whl", hash = "sha256:895184dfef8708e15f7516bd930bda7e50ead069280d2ce09ba11781b630a434"}, - {file = "SQLAlchemy-2.0.34-cp38-cp38-win_amd64.whl", hash = "sha256:6e7cde3a2221aa89247944cafb1b26616380e30c63e37ed19ff0bba5e968688d"}, - {file = "SQLAlchemy-2.0.34-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:dbcdf987f3aceef9763b6d7b1fd3e4ee210ddd26cac421d78b3c206d07b2700b"}, - {file = "SQLAlchemy-2.0.34-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ce119fc4ce0d64124d37f66a6f2a584fddc3c5001755f8a49f1ca0a177ef9796"}, - {file = "SQLAlchemy-2.0.34-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a17d8fac6df9835d8e2b4c5523666e7051d0897a93756518a1fe101c7f47f2f0"}, - {file = "SQLAlchemy-2.0.34-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9ebc11c54c6ecdd07bb4efbfa1554538982f5432dfb8456958b6d46b9f834bb7"}, - {file = "SQLAlchemy-2.0.34-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:2e6965346fc1491a566e019a4a1d3dfc081ce7ac1a736536367ca305da6472a8"}, - {file = "SQLAlchemy-2.0.34-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:220574e78ad986aea8e81ac68821e47ea9202b7e44f251b7ed8c66d9ae3f4278"}, - {file = "SQLAlchemy-2.0.34-cp39-cp39-win32.whl", hash = "sha256:b75b00083e7fe6621ce13cfce9d4469c4774e55e8e9d38c305b37f13cf1e874c"}, - {file = "SQLAlchemy-2.0.34-cp39-cp39-win_amd64.whl", hash = "sha256:c29d03e0adf3cc1a8c3ec62d176824972ae29b67a66cbb18daff3062acc6faa8"}, - {file = "SQLAlchemy-2.0.34-py3-none-any.whl", hash = "sha256:7286c353ee6475613d8beff83167374006c6b3e3f0e6491bfe8ca610eb1dec0f"}, - {file = "sqlalchemy-2.0.34.tar.gz", hash = "sha256:10d8f36990dd929690666679b0f42235c159a7051534adb135728ee52828dd22"}, + {file = "SQLAlchemy-2.0.35-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:67219632be22f14750f0d1c70e62f204ba69d28f62fd6432ba05ab295853de9b"}, + {file = "SQLAlchemy-2.0.35-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:4668bd8faf7e5b71c0319407b608f278f279668f358857dbfd10ef1954ac9f90"}, + {file = "SQLAlchemy-2.0.35-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cb8bea573863762bbf45d1e13f87c2d2fd32cee2dbd50d050f83f87429c9e1ea"}, + {file = "SQLAlchemy-2.0.35-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f552023710d4b93d8fb29a91fadf97de89c5926c6bd758897875435f2a939f33"}, + {file = "SQLAlchemy-2.0.35-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:016b2e665f778f13d3c438651dd4de244214b527a275e0acf1d44c05bc6026a9"}, + {file = "SQLAlchemy-2.0.35-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:7befc148de64b6060937231cbff8d01ccf0bfd75aa26383ffdf8d82b12ec04ff"}, + {file = "SQLAlchemy-2.0.35-cp310-cp310-win32.whl", hash = "sha256:22b83aed390e3099584b839b93f80a0f4a95ee7f48270c97c90acd40ee646f0b"}, + {file = "SQLAlchemy-2.0.35-cp310-cp310-win_amd64.whl", hash = "sha256:a29762cd3d116585278ffb2e5b8cc311fb095ea278b96feef28d0b423154858e"}, + {file = "SQLAlchemy-2.0.35-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:e21f66748ab725ade40fa7af8ec8b5019c68ab00b929f6643e1b1af461eddb60"}, + {file = "SQLAlchemy-2.0.35-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:8a6219108a15fc6d24de499d0d515c7235c617b2540d97116b663dade1a54d62"}, + {file = "SQLAlchemy-2.0.35-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:042622a5306c23b972192283f4e22372da3b8ddf5f7aac1cc5d9c9b222ab3ff6"}, + {file = "SQLAlchemy-2.0.35-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:627dee0c280eea91aed87b20a1f849e9ae2fe719d52cbf847c0e0ea34464b3f7"}, + {file = "SQLAlchemy-2.0.35-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:4fdcd72a789c1c31ed242fd8c1bcd9ea186a98ee8e5408a50e610edfef980d71"}, + {file = "SQLAlchemy-2.0.35-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:89b64cd8898a3a6f642db4eb7b26d1b28a497d4022eccd7717ca066823e9fb01"}, + {file = "SQLAlchemy-2.0.35-cp311-cp311-win32.whl", hash = "sha256:6a93c5a0dfe8d34951e8a6f499a9479ffb9258123551fa007fc708ae2ac2bc5e"}, + {file = "SQLAlchemy-2.0.35-cp311-cp311-win_amd64.whl", hash = "sha256:c68fe3fcde03920c46697585620135b4ecfdfc1ed23e75cc2c2ae9f8502c10b8"}, + {file = "SQLAlchemy-2.0.35-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:eb60b026d8ad0c97917cb81d3662d0b39b8ff1335e3fabb24984c6acd0c900a2"}, + {file = "SQLAlchemy-2.0.35-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6921ee01caf375363be5e9ae70d08ce7ca9d7e0e8983183080211a062d299468"}, + {file = "SQLAlchemy-2.0.35-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8cdf1a0dbe5ced887a9b127da4ffd7354e9c1a3b9bb330dce84df6b70ccb3a8d"}, + {file = "SQLAlchemy-2.0.35-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:93a71c8601e823236ac0e5d087e4f397874a421017b3318fd92c0b14acf2b6db"}, + {file = "SQLAlchemy-2.0.35-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:e04b622bb8a88f10e439084486f2f6349bf4d50605ac3e445869c7ea5cf0fa8c"}, + {file = "SQLAlchemy-2.0.35-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:1b56961e2d31389aaadf4906d453859f35302b4eb818d34a26fab72596076bb8"}, + {file = "SQLAlchemy-2.0.35-cp312-cp312-win32.whl", hash = "sha256:0f9f3f9a3763b9c4deb8c5d09c4cc52ffe49f9876af41cc1b2ad0138878453cf"}, + {file = "SQLAlchemy-2.0.35-cp312-cp312-win_amd64.whl", hash = "sha256:25b0f63e7fcc2a6290cb5f7f5b4fc4047843504983a28856ce9b35d8f7de03cc"}, + {file = "SQLAlchemy-2.0.35-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:f021d334f2ca692523aaf7bbf7592ceff70c8594fad853416a81d66b35e3abf9"}, + {file = "SQLAlchemy-2.0.35-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:05c3f58cf91683102f2f0265c0db3bd3892e9eedabe059720492dbaa4f922da1"}, + {file = "SQLAlchemy-2.0.35-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:032d979ce77a6c2432653322ba4cbeabf5a6837f704d16fa38b5a05d8e21fa00"}, + {file = "SQLAlchemy-2.0.35-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:2e795c2f7d7249b75bb5f479b432a51b59041580d20599d4e112b5f2046437a3"}, + {file = "SQLAlchemy-2.0.35-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:cc32b2990fc34380ec2f6195f33a76b6cdaa9eecf09f0c9404b74fc120aef36f"}, + {file = "SQLAlchemy-2.0.35-cp37-cp37m-win32.whl", hash = "sha256:9509c4123491d0e63fb5e16199e09f8e262066e58903e84615c301dde8fa2e87"}, + {file = "SQLAlchemy-2.0.35-cp37-cp37m-win_amd64.whl", hash = "sha256:3655af10ebcc0f1e4e06c5900bb33e080d6a1fa4228f502121f28a3b1753cde5"}, + {file = "SQLAlchemy-2.0.35-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:4c31943b61ed8fdd63dfd12ccc919f2bf95eefca133767db6fbbd15da62078ec"}, + {file = "SQLAlchemy-2.0.35-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:a62dd5d7cc8626a3634208df458c5fe4f21200d96a74d122c83bc2015b333bc1"}, + {file = "SQLAlchemy-2.0.35-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0630774b0977804fba4b6bbea6852ab56c14965a2b0c7fc7282c5f7d90a1ae72"}, + {file = "SQLAlchemy-2.0.35-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8d625eddf7efeba2abfd9c014a22c0f6b3796e0ffb48f5d5ab106568ef01ff5a"}, + {file = "SQLAlchemy-2.0.35-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:ada603db10bb865bbe591939de854faf2c60f43c9b763e90f653224138f910d9"}, + {file = "SQLAlchemy-2.0.35-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:c41411e192f8d3ea39ea70e0fae48762cd11a2244e03751a98bd3c0ca9a4e936"}, + {file = "SQLAlchemy-2.0.35-cp38-cp38-win32.whl", hash = "sha256:d299797d75cd747e7797b1b41817111406b8b10a4f88b6e8fe5b5e59598b43b0"}, + {file = "SQLAlchemy-2.0.35-cp38-cp38-win_amd64.whl", hash = "sha256:0375a141e1c0878103eb3d719eb6d5aa444b490c96f3fedab8471c7f6ffe70ee"}, + {file = "SQLAlchemy-2.0.35-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:ccae5de2a0140d8be6838c331604f91d6fafd0735dbdcee1ac78fc8fbaba76b4"}, + {file = "SQLAlchemy-2.0.35-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:2a275a806f73e849e1c309ac11108ea1a14cd7058577aba962cd7190e27c9e3c"}, + {file = "SQLAlchemy-2.0.35-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:732e026240cdd1c1b2e3ac515c7a23820430ed94292ce33806a95869c46bd139"}, + {file = "SQLAlchemy-2.0.35-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:890da8cd1941fa3dab28c5bac3b9da8502e7e366f895b3b8e500896f12f94d11"}, + {file = "SQLAlchemy-2.0.35-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:c0d8326269dbf944b9201911b0d9f3dc524d64779a07518199a58384c3d37a44"}, + {file = "SQLAlchemy-2.0.35-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:b76d63495b0508ab9fc23f8152bac63205d2a704cd009a2b0722f4c8e0cba8e0"}, + {file = "SQLAlchemy-2.0.35-cp39-cp39-win32.whl", hash = "sha256:69683e02e8a9de37f17985905a5eca18ad651bf592314b4d3d799029797d0eb3"}, + {file = "SQLAlchemy-2.0.35-cp39-cp39-win_amd64.whl", hash = "sha256:aee110e4ef3c528f3abbc3c2018c121e708938adeeff9006428dd7c8555e9b3f"}, + {file = "SQLAlchemy-2.0.35-py3-none-any.whl", hash = "sha256:2ab3f0336c0387662ce6221ad30ab3a5e6499aab01b9790879b6578fd9b8faa1"}, + {file = "sqlalchemy-2.0.35.tar.gz", hash = "sha256:e11d7ea4d24f0a262bccf9a7cd6284c976c5369dac21db237cff59586045ab9f"}, ] [package.dependencies] @@ -8729,30 +7474,15 @@ postgresql-psycopgbinary = ["psycopg[binary] (>=3.0.7)"] pymysql = ["pymysql"] sqlcipher = ["sqlcipher3_binary"] -[[package]] -name = "sqlparse" -version = "0.5.1" -description = "A non-validating SQL parser." -optional = false -python-versions = ">=3.8" -files = [ - {file = "sqlparse-0.5.1-py3-none-any.whl", hash = "sha256:773dcbf9a5ab44a090f3441e2180efe2560220203dc2f8c0b0fa141e18b505e4"}, - {file = "sqlparse-0.5.1.tar.gz", hash = "sha256:bb6b4df465655ef332548e24f08e205afc81b9ab86cb1c45657a7ff173a3a00e"}, -] - -[package.extras] -dev = ["build", "hatch"] -doc = ["sphinx"] - [[package]] name = "starlette" -version = "0.38.5" +version = "0.38.6" description = "The little ASGI library that shines." optional = false python-versions = ">=3.8" files = [ - {file = "starlette-0.38.5-py3-none-any.whl", hash = "sha256:632f420a9d13e3ee2a6f18f437b0a9f1faecb0bc42e1942aa2ea0e379a4c4206"}, - {file = "starlette-0.38.5.tar.gz", hash = "sha256:04a92830a9b6eb1442c766199d62260c3d4dc9c4f9188360626b1e0273cb7077"}, + {file = "starlette-0.38.6-py3-none-any.whl", hash = "sha256:4517a1409e2e73ee4951214ba012052b9e16f60e90d73cfb06192c19203bbb05"}, + {file = "starlette-0.38.6.tar.gz", hash = "sha256:863a1588f5574e70a821dadefb41e4881ea451a47a3cd1b4df359d4ffefe5ead"}, ] [package.dependencies] @@ -8761,29 +7491,15 @@ anyio = ">=3.4.0,<5" [package.extras] full = ["httpx (>=0.22.0)", "itsdangerous", "jinja2", "python-multipart (>=0.0.7)", "pyyaml"] -[[package]] -name = "strictyaml" -version = "1.7.3" -description = "Strict, typed YAML parser" -optional = false -python-versions = ">=3.7.0" -files = [ - {file = "strictyaml-1.7.3-py3-none-any.whl", hash = "sha256:fb5c8a4edb43bebb765959e420f9b3978d7f1af88c80606c03fb420888f5d1c7"}, - {file = "strictyaml-1.7.3.tar.gz", hash = "sha256:22f854a5fcab42b5ddba8030a0e4be51ca89af0267961c8d6cfa86395586c407"}, -] - -[package.dependencies] -python-dateutil = ">=2.6.0" - [[package]] name = "sympy" -version = "1.13.2" +version = "1.13.3" description = "Computer algebra system (CAS) in Python" optional = false python-versions = ">=3.8" files = [ - {file = "sympy-1.13.2-py3-none-any.whl", hash = "sha256:c51d75517712f1aed280d4ce58506a4a88d635d6b5dd48b39102a7ae1f3fcfe9"}, - {file = "sympy-1.13.2.tar.gz", hash = "sha256:401449d84d07be9d0c7a46a64bd54fe097667d5e7181bfe67ec777be9e01cb13"}, + {file = "sympy-1.13.3-py3-none-any.whl", hash = "sha256:54612cf55a62755ee71824ce692986f23c88ffa77207b30c1368eda4a7060f73"}, + {file = "sympy-1.13.3.tar.gz", hash = "sha256:b27fd2c6530e0ab39e275fc9b683895367e51d5da91baa8d3d64db2565fec4d9"}, ] [package.dependencies] @@ -8849,13 +7565,13 @@ test = ["pytest", "tornado (>=4.5)", "typeguard"] [[package]] name = "tencentcloud-sdk-python-common" -version = "3.0.1231" +version = "3.0.1242" description = "Tencent Cloud Common SDK for Python" optional = false python-versions = "*" files = [ - {file = "tencentcloud-sdk-python-common-3.0.1231.tar.gz", hash = "sha256:22aa281ca2eac511e1615b2953da7c4a0bec87cf93a05a7a15dbb61b23a215ee"}, - {file = "tencentcloud_sdk_python_common-3.0.1231-py2.py3-none-any.whl", hash = "sha256:bd0f7c4df4b156ec35c8731afa1f498043c7e1cd5d2feb595ee441fdb45a061e"}, + {file = "tencentcloud-sdk-python-common-3.0.1242.tar.gz", hash = "sha256:fb9cd993f5d1378932b495e85ba4f29b0b04b4c30d174a6ba98d8995096fe8f0"}, + {file = "tencentcloud_sdk_python_common-3.0.1242-py2.py3-none-any.whl", hash = "sha256:a106d926e772e9a89717b72595e595636ab09907f3c56a3ff99ba2d0444e0da4"}, ] [package.dependencies] @@ -8863,17 +7579,17 @@ requests = ">=2.16.0" [[package]] name = "tencentcloud-sdk-python-hunyuan" -version = "3.0.1231" +version = "3.0.1242" description = "Tencent Cloud Hunyuan SDK for Python" optional = false python-versions = "*" files = [ - {file = "tencentcloud-sdk-python-hunyuan-3.0.1231.tar.gz", hash = "sha256:6da12f418f14305b3a6b7bb29b6d95bf4038a6b66b81c0e03b8dafc4f6df99ca"}, - {file = "tencentcloud_sdk_python_hunyuan-3.0.1231-py2.py3-none-any.whl", hash = "sha256:21ba28f69c34c15e20900be3f2c06376fcaf7e58265f939833c55631f2348792"}, + {file = "tencentcloud-sdk-python-hunyuan-3.0.1242.tar.gz", hash = "sha256:4d0daf9349ac9274887d08514b988e30101ab2a659c034590ce5920458585840"}, + {file = "tencentcloud_sdk_python_hunyuan-3.0.1242-py2.py3-none-any.whl", hash = "sha256:1989f13b5f365ff1c6b3c3fb9c6d2fda71dc63cf62b98f26f1dc449d89547852"}, ] [package.dependencies] -tencentcloud-sdk-python-common = "3.0.1231" +tencentcloud-sdk-python-common = "3.0.1242" [[package]] name = "threadpoolctl" @@ -8955,37 +7671,6 @@ requests = ">=2.26.0" [package.extras] blobfile = ["blobfile (>=2)"] -[[package]] -name = "tinysegmenter" -version = "0.3" -description = "Very compact Japanese tokenizer" -optional = false -python-versions = "*" -files = [ - {file = "tinysegmenter-0.3.tar.gz", hash = "sha256:ed1f6d2e806a4758a73be589754384cbadadc7e1a414c81a166fc9adf2d40c6d"}, -] - -[[package]] -name = "tldextract" -version = "5.1.2" -description = "Accurately separates a URL's subdomain, domain, and public suffix, using the Public Suffix List (PSL). By default, this includes the public ICANN TLDs and their exceptions. You can optionally support the Public Suffix List's private domains as well." -optional = false -python-versions = ">=3.8" -files = [ - {file = "tldextract-5.1.2-py3-none-any.whl", hash = "sha256:4dfc4c277b6b97fa053899fcdb892d2dc27295851ab5fac4e07797b6a21b2e46"}, - {file = "tldextract-5.1.2.tar.gz", hash = "sha256:c9e17f756f05afb5abac04fe8f766e7e70f9fe387adb1859f0f52408ee060200"}, -] - -[package.dependencies] -filelock = ">=3.0.8" -idna = "*" -requests = ">=2.1.0" -requests-file = ">=1.4" - -[package.extras] -release = ["build", "twine"] -testing = ["black", "mypy", "pytest", "pytest-gitignore", "pytest-mock", "responses", "ruff", "syrupy", "tox", "types-filelock", "types-requests"] - [[package]] name = "tokenizers" version = "0.15.2" @@ -9172,91 +7857,6 @@ notebook = ["ipywidgets (>=6)"] slack = ["slack-sdk"] telegram = ["requests"] -[[package]] -name = "transformers" -version = "4.35.2" -description = "State-of-the-art Machine Learning for JAX, PyTorch and TensorFlow" -optional = false -python-versions = ">=3.8.0" -files = [ - {file = "transformers-4.35.2-py3-none-any.whl", hash = "sha256:9dfa76f8692379544ead84d98f537be01cd1070de75c74efb13abcbc938fbe2f"}, - {file = "transformers-4.35.2.tar.gz", hash = "sha256:2d125e197d77b0cdb6c9201df9fa7e2101493272e448b9fba9341c695bee2f52"}, -] - -[package.dependencies] -filelock = "*" -huggingface-hub = ">=0.16.4,<1.0" -numpy = ">=1.17" -packaging = ">=20.0" -pyyaml = ">=5.1" -regex = "!=2019.12.17" -requests = "*" -safetensors = ">=0.3.1" -tokenizers = ">=0.14,<0.19" -tqdm = ">=4.27" - -[package.extras] -accelerate = ["accelerate (>=0.20.3)"] -agents = ["Pillow (<10.0.0)", "accelerate (>=0.20.3)", "datasets (!=2.5.0)", "diffusers", "opencv-python", "sentencepiece (>=0.1.91,!=0.1.92)", "torch (>=1.10,!=1.12.0)"] -all = ["Pillow (<10.0.0)", "accelerate (>=0.20.3)", "av (==9.2.0)", "codecarbon (==1.2.0)", "decord (==0.6.0)", "flax (>=0.4.1,<=0.7.0)", "jax (>=0.4.1,<=0.4.13)", "jaxlib (>=0.4.1,<=0.4.13)", "kenlm", "keras-nlp (>=0.3.1)", "librosa", "onnxconverter-common", "optax (>=0.0.8,<=0.1.4)", "optuna", "phonemizer", "protobuf", "pyctcdecode (>=0.4.0)", "ray[tune]", "sentencepiece (>=0.1.91,!=0.1.92)", "sigopt", "tensorflow (>=2.6,<2.15)", "tensorflow-text (<2.15)", "tf2onnx", "timm", "tokenizers (>=0.14,<0.19)", "torch (>=1.10,!=1.12.0)", "torchaudio", "torchvision"] -audio = ["kenlm", "librosa", "phonemizer", "pyctcdecode (>=0.4.0)"] -codecarbon = ["codecarbon (==1.2.0)"] -deepspeed = ["accelerate (>=0.20.3)", "deepspeed (>=0.9.3)"] -deepspeed-testing = ["GitPython (<3.1.19)", "accelerate (>=0.20.3)", "beautifulsoup4", "black (>=23.1,<24.0)", "cookiecutter (==1.7.3)", "datasets (!=2.5.0)", "deepspeed (>=0.9.3)", "dill (<0.3.5)", "evaluate (>=0.2.0)", "faiss-cpu", "hf-doc-builder (>=0.3.0)", "nltk", "optuna", "parameterized", "protobuf", "psutil", "pytest (>=7.2.0)", "pytest-timeout", "pytest-xdist", "rjieba", "rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1)", "sacrebleu (>=1.4.12,<2.0.0)", "sacremoses", "sentencepiece (>=0.1.91,!=0.1.92)", "tensorboard", "timeout-decorator"] -dev = ["GitPython (<3.1.19)", "Pillow (<10.0.0)", "accelerate (>=0.20.3)", "av (==9.2.0)", "beautifulsoup4", "black (>=23.1,<24.0)", "codecarbon (==1.2.0)", "cookiecutter (==1.7.3)", "datasets (!=2.5.0)", "decord (==0.6.0)", "dill (<0.3.5)", "evaluate (>=0.2.0)", "faiss-cpu", "flax (>=0.4.1,<=0.7.0)", "fugashi (>=1.0)", "hf-doc-builder", "hf-doc-builder (>=0.3.0)", "ipadic (>=1.0.0,<2.0)", "isort (>=5.5.4)", "jax (>=0.4.1,<=0.4.13)", "jaxlib (>=0.4.1,<=0.4.13)", "kenlm", "keras-nlp (>=0.3.1)", "librosa", "nltk", "onnxconverter-common", "optax (>=0.0.8,<=0.1.4)", "optuna", "parameterized", "phonemizer", "protobuf", "psutil", "pyctcdecode (>=0.4.0)", "pytest (>=7.2.0)", "pytest-timeout", "pytest-xdist", "ray[tune]", "rhoknp (>=1.1.0,<1.3.1)", "rjieba", "rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1)", "ruff (>=0.0.241,<=0.0.259)", "sacrebleu (>=1.4.12,<2.0.0)", "sacremoses", "scikit-learn", "sentencepiece (>=0.1.91,!=0.1.92)", "sigopt", "sudachidict-core (>=20220729)", "sudachipy (>=0.6.6)", "tensorboard", "tensorflow (>=2.6,<2.15)", "tensorflow-text (<2.15)", "tf2onnx", "timeout-decorator", "timm", "tokenizers (>=0.14,<0.19)", "torch (>=1.10,!=1.12.0)", "torchaudio", "torchvision", "unidic (>=1.0.2)", "unidic-lite (>=1.0.7)", "urllib3 (<2.0.0)"] -dev-tensorflow = ["GitPython (<3.1.19)", "Pillow (<10.0.0)", "beautifulsoup4", "black (>=23.1,<24.0)", "cookiecutter (==1.7.3)", "datasets (!=2.5.0)", "dill (<0.3.5)", "evaluate (>=0.2.0)", "faiss-cpu", "hf-doc-builder", "hf-doc-builder (>=0.3.0)", "isort (>=5.5.4)", "kenlm", "keras-nlp (>=0.3.1)", "librosa", "nltk", "onnxconverter-common", "onnxruntime (>=1.4.0)", "onnxruntime-tools (>=1.4.2)", "parameterized", "phonemizer", "protobuf", "psutil", "pyctcdecode (>=0.4.0)", "pytest (>=7.2.0)", "pytest-timeout", "pytest-xdist", "rjieba", "rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1)", "ruff (>=0.0.241,<=0.0.259)", "sacrebleu (>=1.4.12,<2.0.0)", "sacremoses", "scikit-learn", "sentencepiece (>=0.1.91,!=0.1.92)", "tensorboard", "tensorflow (>=2.6,<2.15)", "tensorflow-text (<2.15)", "tf2onnx", "timeout-decorator", "tokenizers (>=0.14,<0.19)", "urllib3 (<2.0.0)"] -dev-torch = ["GitPython (<3.1.19)", "Pillow (<10.0.0)", "accelerate (>=0.20.3)", "beautifulsoup4", "black (>=23.1,<24.0)", "codecarbon (==1.2.0)", "cookiecutter (==1.7.3)", "datasets (!=2.5.0)", "dill (<0.3.5)", "evaluate (>=0.2.0)", "faiss-cpu", "fugashi (>=1.0)", "hf-doc-builder", "hf-doc-builder (>=0.3.0)", "ipadic (>=1.0.0,<2.0)", "isort (>=5.5.4)", "kenlm", "librosa", "nltk", "onnxruntime (>=1.4.0)", "onnxruntime-tools (>=1.4.2)", "optuna", "parameterized", "phonemizer", "protobuf", "psutil", "pyctcdecode (>=0.4.0)", "pytest (>=7.2.0)", "pytest-timeout", "pytest-xdist", "ray[tune]", "rhoknp (>=1.1.0,<1.3.1)", "rjieba", "rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1)", "ruff (>=0.0.241,<=0.0.259)", "sacrebleu (>=1.4.12,<2.0.0)", "sacremoses", "scikit-learn", "sentencepiece (>=0.1.91,!=0.1.92)", "sigopt", "sudachidict-core (>=20220729)", "sudachipy (>=0.6.6)", "tensorboard", "timeout-decorator", "timm", "tokenizers (>=0.14,<0.19)", "torch (>=1.10,!=1.12.0)", "torchaudio", "torchvision", "unidic (>=1.0.2)", "unidic-lite (>=1.0.7)", "urllib3 (<2.0.0)"] -docs = ["Pillow (<10.0.0)", "accelerate (>=0.20.3)", "av (==9.2.0)", "codecarbon (==1.2.0)", "decord (==0.6.0)", "flax (>=0.4.1,<=0.7.0)", "hf-doc-builder", "jax (>=0.4.1,<=0.4.13)", "jaxlib (>=0.4.1,<=0.4.13)", "kenlm", "keras-nlp (>=0.3.1)", "librosa", "onnxconverter-common", "optax (>=0.0.8,<=0.1.4)", "optuna", "phonemizer", "protobuf", "pyctcdecode (>=0.4.0)", "ray[tune]", "sentencepiece (>=0.1.91,!=0.1.92)", "sigopt", "tensorflow (>=2.6,<2.15)", "tensorflow-text (<2.15)", "tf2onnx", "timm", "tokenizers (>=0.14,<0.19)", "torch (>=1.10,!=1.12.0)", "torchaudio", "torchvision"] -docs-specific = ["hf-doc-builder"] -flax = ["flax (>=0.4.1,<=0.7.0)", "jax (>=0.4.1,<=0.4.13)", "jaxlib (>=0.4.1,<=0.4.13)", "optax (>=0.0.8,<=0.1.4)"] -flax-speech = ["kenlm", "librosa", "phonemizer", "pyctcdecode (>=0.4.0)"] -ftfy = ["ftfy"] -integrations = ["optuna", "ray[tune]", "sigopt"] -ja = ["fugashi (>=1.0)", "ipadic (>=1.0.0,<2.0)", "rhoknp (>=1.1.0,<1.3.1)", "sudachidict-core (>=20220729)", "sudachipy (>=0.6.6)", "unidic (>=1.0.2)", "unidic-lite (>=1.0.7)"] -modelcreation = ["cookiecutter (==1.7.3)"] -natten = ["natten (>=0.14.6)"] -onnx = ["onnxconverter-common", "onnxruntime (>=1.4.0)", "onnxruntime-tools (>=1.4.2)", "tf2onnx"] -onnxruntime = ["onnxruntime (>=1.4.0)", "onnxruntime-tools (>=1.4.2)"] -optuna = ["optuna"] -quality = ["GitPython (<3.1.19)", "black (>=23.1,<24.0)", "datasets (!=2.5.0)", "hf-doc-builder (>=0.3.0)", "isort (>=5.5.4)", "ruff (>=0.0.241,<=0.0.259)", "urllib3 (<2.0.0)"] -ray = ["ray[tune]"] -retrieval = ["datasets (!=2.5.0)", "faiss-cpu"] -sagemaker = ["sagemaker (>=2.31.0)"] -sentencepiece = ["protobuf", "sentencepiece (>=0.1.91,!=0.1.92)"] -serving = ["fastapi", "pydantic (<2)", "starlette", "uvicorn"] -sigopt = ["sigopt"] -sklearn = ["scikit-learn"] -speech = ["kenlm", "librosa", "phonemizer", "pyctcdecode (>=0.4.0)", "torchaudio"] -testing = ["GitPython (<3.1.19)", "beautifulsoup4", "black (>=23.1,<24.0)", "cookiecutter (==1.7.3)", "datasets (!=2.5.0)", "dill (<0.3.5)", "evaluate (>=0.2.0)", "faiss-cpu", "hf-doc-builder (>=0.3.0)", "nltk", "parameterized", "protobuf", "psutil", "pytest (>=7.2.0)", "pytest-timeout", "pytest-xdist", "rjieba", "rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1)", "sacrebleu (>=1.4.12,<2.0.0)", "sacremoses", "tensorboard", "timeout-decorator"] -tf = ["keras-nlp (>=0.3.1)", "onnxconverter-common", "tensorflow (>=2.6,<2.15)", "tensorflow-text (<2.15)", "tf2onnx"] -tf-cpu = ["keras-nlp (>=0.3.1)", "onnxconverter-common", "tensorflow-cpu (>=2.6,<2.15)", "tensorflow-text (<2.15)", "tf2onnx"] -tf-speech = ["kenlm", "librosa", "phonemizer", "pyctcdecode (>=0.4.0)"] -timm = ["timm"] -tokenizers = ["tokenizers (>=0.14,<0.19)"] -torch = ["accelerate (>=0.20.3)", "torch (>=1.10,!=1.12.0)"] -torch-speech = ["kenlm", "librosa", "phonemizer", "pyctcdecode (>=0.4.0)", "torchaudio"] -torch-vision = ["Pillow (<10.0.0)", "torchvision"] -torchhub = ["filelock", "huggingface-hub (>=0.16.4,<1.0)", "importlib-metadata", "numpy (>=1.17)", "packaging (>=20.0)", "protobuf", "regex (!=2019.12.17)", "requests", "sentencepiece (>=0.1.91,!=0.1.92)", "tokenizers (>=0.14,<0.19)", "torch (>=1.10,!=1.12.0)", "tqdm (>=4.27)"] -video = ["av (==9.2.0)", "decord (==0.6.0)"] -vision = ["Pillow (<10.0.0)"] - -[[package]] -name = "twilio" -version = "9.0.5" -description = "Twilio API client and TwiML generator" -optional = false -python-versions = ">=3.7.0" -files = [ - {file = "twilio-9.0.5-py2.py3-none-any.whl", hash = "sha256:5e09e910b9368f50f23cb3c3dd5ba77164d80a81e9d97db955cbac322deb2a4e"}, - {file = "twilio-9.0.5.tar.gz", hash = "sha256:e9b5727943584d25d618fe502f0100fc5283215f31c863f80b5c64581b4702b0"}, -] - -[package.dependencies] -aiohttp = ">=3.8.4" -aiohttp-retry = ">=2.8.3" -PyJWT = ">=2.0.0,<3.0.0" -requests = ">=2.0.0" - [[package]] name = "typer" version = "0.12.5" @@ -9274,20 +7874,6 @@ rich = ">=10.11.0" shellingham = ">=1.3.0" typing-extensions = ">=3.7.4.3" -[[package]] -name = "types-requests" -version = "2.32.0.20240914" -description = "Typing stubs for requests" -optional = false -python-versions = ">=3.8" -files = [ - {file = "types-requests-2.32.0.20240914.tar.gz", hash = "sha256:2850e178db3919d9bf809e434eef65ba49d0e7e33ac92d588f4a5e295fffd405"}, - {file = "types_requests-2.32.0.20240914-py3-none-any.whl", hash = "sha256:59c2f673eb55f32a99b2894faf6020e1a9f4a402ad0f192bfee0b64469054310"}, -] - -[package.dependencies] -urllib3 = ">=2" - [[package]] name = "typing-extensions" version = "4.12.2" @@ -9316,32 +7902,15 @@ typing-extensions = ">=3.7.4" [[package]] name = "tzdata" -version = "2024.1" +version = "2024.2" description = "Provider of IANA time zone data" optional = false python-versions = ">=2" files = [ - {file = "tzdata-2024.1-py2.py3-none-any.whl", hash = "sha256:9068bc196136463f5245e51efda838afa15aaeca9903f49050dfa2679db4d252"}, - {file = "tzdata-2024.1.tar.gz", hash = "sha256:2674120f8d891909751c38abcdfd386ac0a5a1127954fbc332af6b5ceae07efd"}, -] - -[[package]] -name = "tzlocal" -version = "5.2" -description = "tzinfo object for the local timezone" -optional = false -python-versions = ">=3.8" -files = [ - {file = "tzlocal-5.2-py3-none-any.whl", hash = "sha256:49816ef2fe65ea8ac19d19aa7a1ae0551c834303d5014c6d5a62e4cbda8047b8"}, - {file = "tzlocal-5.2.tar.gz", hash = "sha256:8d399205578f1a9342816409cc1e46a93ebd5755e39ea2d85334bea911bf0e6e"}, + {file = "tzdata-2024.2-py2.py3-none-any.whl", hash = "sha256:a48093786cdcde33cad18c2555e8532f34422074448fbc874186f0abd79565cd"}, + {file = "tzdata-2024.2.tar.gz", hash = "sha256:7d85cc416e9382e69095b7bdf4afd9e3880418a2413feec7069d533d6b4e31cc"}, ] -[package.dependencies] -tzdata = {version = "*", markers = "platform_system == \"Windows\""} - -[package.extras] -devenv = ["check-manifest", "pytest (>=4.3)", "pytest-cov", "pytest-mock (>=3.3)", "zest.releaser"] - [[package]] name = "ujson" version = "5.10.0" @@ -9542,13 +8111,13 @@ zstd = ["zstandard (>=0.18.0)"] [[package]] name = "uvicorn" -version = "0.30.6" +version = "0.31.0" description = "The lightning-fast ASGI server." optional = false python-versions = ">=3.8" files = [ - {file = "uvicorn-0.30.6-py3-none-any.whl", hash = "sha256:65fd46fe3fda5bdc1b03b94eb634923ff18cd35b2f084813ea79d1f103f711b5"}, - {file = "uvicorn-0.30.6.tar.gz", hash = "sha256:4b15decdda1e72be08209e860a1e10e92439ad5b97cf44cc945fcbee66fc5788"}, + {file = "uvicorn-0.31.0-py3-none-any.whl", hash = "sha256:cac7be4dd4d891c363cd942160a7b02e69150dcbc7a36be04d5f4af4b17c8ced"}, + {file = "uvicorn-0.31.0.tar.gz", hash = "sha256:13bc21373d103859f68fe739608e2eb054a816dea79189bc3ca08ea89a275906"}, ] [package.dependencies] @@ -9621,57 +8190,6 @@ files = [ {file = "validators-0.21.0.tar.gz", hash = "sha256:245b98ab778ed9352a7269c6a8f6c2a839bed5b2a7e3e60273ce399d247dd4b3"}, ] -[[package]] -name = "vanna" -version = "0.5.5" -description = "Generate SQL queries from natural language" -optional = false -python-versions = ">=3.9" -files = [ - {file = "vanna-0.5.5-py3-none-any.whl", hash = "sha256:e1a308b7127b9e98c2579c0e4178fc1475d891c498e4a0667cffa10df8891e73"}, - {file = "vanna-0.5.5.tar.gz", hash = "sha256:7d9bf188a635bb75e4f8db15f0e6dbe72a426784779485f087b2df0ce175e664"}, -] - -[package.dependencies] -clickhouse_driver = {version = "*", optional = true, markers = "extra == \"clickhouse\""} -db-dtypes = {version = "*", optional = true, markers = "extra == \"postgres\""} -duckdb = {version = "*", optional = true, markers = "extra == \"duckdb\""} -flask = "*" -flask-sock = "*" -kaleido = "*" -pandas = "*" -plotly = "*" -psycopg2-binary = {version = "*", optional = true, markers = "extra == \"postgres\""} -PyMySQL = {version = "*", optional = true, markers = "extra == \"mysql\""} -requests = "*" -sqlalchemy = "*" -sqlparse = "*" -tabulate = "*" - -[package.extras] -all = ["PyMySQL", "anthropic", "chromadb", "db-dtypes", "duckdb", "fastembed", "google-cloud-aiplatform", "google-cloud-bigquery", "google-generativeai", "httpx", "marqo", "mistralai", "ollama", "openai", "opensearch-dsl", "opensearch-py", "pinecone-client", "psycopg2-binary", "qdrant-client", "snowflake-connector-python", "transformers", "zhipuai"] -anthropic = ["anthropic"] -bigquery = ["google-cloud-bigquery"] -chromadb = ["chromadb"] -clickhouse = ["clickhouse_driver"] -duckdb = ["duckdb"] -gemini = ["google-generativeai"] -google = ["google-cloud-aiplatform", "google-generativeai"] -hf = ["transformers"] -marqo = ["marqo"] -mistralai = ["mistralai"] -mysql = ["PyMySQL"] -ollama = ["httpx", "ollama"] -openai = ["openai"] -opensearch = ["opensearch-dsl", "opensearch-py"] -pinecone = ["fastembed", "pinecone-client"] -postgres = ["db-dtypes", "psycopg2-binary"] -qdrant = ["fastembed", "qdrant-client"] -snowflake = ["snowflake-connector-python"] -test = ["tox"] -vllm = ["vllm"] -zhipuai = ["zhipuai"] - [[package]] name = "vine" version = "5.1.0" @@ -9685,12 +8203,12 @@ files = [ [[package]] name = "volcengine-python-sdk" -version = "1.0.101" +version = "1.0.103" description = "Volcengine SDK for Python" optional = false python-versions = "*" files = [ - {file = "volcengine-python-sdk-1.0.101.tar.gz", hash = "sha256:1b76e71a6dcf3d5be1b2c058e7d281359e6cca2cc920ffe2567d3115beea1d02"}, + {file = "volcengine-python-sdk-1.0.103.tar.gz", hash = "sha256:49fa8572802724972e1cb47a7e692b184b055f41b09099358c1a0fad1d146af5"}, ] [package.dependencies] @@ -9831,126 +8349,115 @@ validators = ">=0.18.2,<=0.21.0" [package.extras] grpc = ["grpcio", "grpcio-tools"] -[[package]] -name = "webencodings" -version = "0.5.1" -description = "Character encoding aliases for legacy web content" -optional = false -python-versions = "*" -files = [ - {file = "webencodings-0.5.1-py2.py3-none-any.whl", hash = "sha256:a0af1213f3c2226497a97e2b3aa01a7e4bee4f403f95be16fc9acd2947514a78"}, - {file = "webencodings-0.5.1.tar.gz", hash = "sha256:b36a1c245f2d304965eb4e0a82848379241dc04b865afcc4aab16748587e1923"}, -] - [[package]] name = "websocket-client" -version = "1.7.0" +version = "1.8.0" description = "WebSocket client for Python with low level API options" optional = false python-versions = ">=3.8" files = [ - {file = "websocket-client-1.7.0.tar.gz", hash = "sha256:10e511ea3a8c744631d3bd77e61eb17ed09304c413ad42cf6ddfa4c7787e8fe6"}, - {file = "websocket_client-1.7.0-py3-none-any.whl", hash = "sha256:f4c3d22fec12a2461427a29957ff07d35098ee2d976d3ba244e688b8b4057588"}, + {file = "websocket_client-1.8.0-py3-none-any.whl", hash = "sha256:17b44cc997f5c498e809b22cdf2d9c7a9e71c02c8cc2b6c56e7c2d1239bfa526"}, + {file = "websocket_client-1.8.0.tar.gz", hash = "sha256:3239df9f44da632f96012472805d40a23281a991027ce11d2f45a6f24ac4c3da"}, ] [package.extras] -docs = ["Sphinx (>=6.0)", "sphinx-rtd-theme (>=1.1.0)"] +docs = ["Sphinx (>=6.0)", "myst-parser (>=2.0.0)", "sphinx-rtd-theme (>=1.1.0)"] optional = ["python-socks", "wsaccel"] test = ["websockets"] [[package]] name = "websockets" -version = "13.0.1" +version = "13.1" description = "An implementation of the WebSocket Protocol (RFC 6455 & 7692)" optional = false python-versions = ">=3.8" files = [ - {file = "websockets-13.0.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:1841c9082a3ba4a05ea824cf6d99570a6a2d8849ef0db16e9c826acb28089e8f"}, - {file = "websockets-13.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c5870b4a11b77e4caa3937142b650fbbc0914a3e07a0cf3131f35c0587489c1c"}, - {file = "websockets-13.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f1d3d1f2eb79fe7b0fb02e599b2bf76a7619c79300fc55f0b5e2d382881d4f7f"}, - {file = "websockets-13.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:15c7d62ee071fa94a2fc52c2b472fed4af258d43f9030479d9c4a2de885fd543"}, - {file = "websockets-13.0.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6724b554b70d6195ba19650fef5759ef11346f946c07dbbe390e039bcaa7cc3d"}, - {file = "websockets-13.0.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:56a952fa2ae57a42ba7951e6b2605e08a24801a4931b5644dfc68939e041bc7f"}, - {file = "websockets-13.0.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:17118647c0ea14796364299e942c330d72acc4b248e07e639d34b75067b3cdd8"}, - {file = "websockets-13.0.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:64a11aae1de4c178fa653b07d90f2fb1a2ed31919a5ea2361a38760192e1858b"}, - {file = "websockets-13.0.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:0617fd0b1d14309c7eab6ba5deae8a7179959861846cbc5cb528a7531c249448"}, - {file = "websockets-13.0.1-cp310-cp310-win32.whl", hash = "sha256:11f9976ecbc530248cf162e359a92f37b7b282de88d1d194f2167b5e7ad80ce3"}, - {file = "websockets-13.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:c3c493d0e5141ec055a7d6809a28ac2b88d5b878bb22df8c621ebe79a61123d0"}, - {file = "websockets-13.0.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:699ba9dd6a926f82a277063603fc8d586b89f4cb128efc353b749b641fcddda7"}, - {file = "websockets-13.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cf2fae6d85e5dc384bf846f8243ddaa9197f3a1a70044f59399af001fd1f51d4"}, - {file = "websockets-13.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:52aed6ef21a0f1a2a5e310fb5c42d7555e9c5855476bbd7173c3aa3d8a0302f2"}, - {file = "websockets-13.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8eb2b9a318542153674c6e377eb8cb9ca0fc011c04475110d3477862f15d29f0"}, - {file = "websockets-13.0.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5df891c86fe68b2c38da55b7aea7095beca105933c697d719f3f45f4220a5e0e"}, - {file = "websockets-13.0.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fac2d146ff30d9dd2fcf917e5d147db037a5c573f0446c564f16f1f94cf87462"}, - {file = "websockets-13.0.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:b8ac5b46fd798bbbf2ac6620e0437c36a202b08e1f827832c4bf050da081b501"}, - {file = "websockets-13.0.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:46af561eba6f9b0848b2c9d2427086cabadf14e0abdd9fde9d72d447df268418"}, - {file = "websockets-13.0.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:b5a06d7f60bc2fc378a333978470dfc4e1415ee52f5f0fce4f7853eb10c1e9df"}, - {file = "websockets-13.0.1-cp311-cp311-win32.whl", hash = "sha256:556e70e4f69be1082e6ef26dcb70efcd08d1850f5d6c5f4f2bcb4e397e68f01f"}, - {file = "websockets-13.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:67494e95d6565bf395476e9d040037ff69c8b3fa356a886b21d8422ad86ae075"}, - {file = "websockets-13.0.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:f9c9e258e3d5efe199ec23903f5da0eeaad58cf6fccb3547b74fd4750e5ac47a"}, - {file = "websockets-13.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:6b41a1b3b561f1cba8321fb32987552a024a8f67f0d05f06fcf29f0090a1b956"}, - {file = "websockets-13.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f73e676a46b0fe9426612ce8caeca54c9073191a77c3e9d5c94697aef99296af"}, - {file = "websockets-13.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f613289f4a94142f914aafad6c6c87903de78eae1e140fa769a7385fb232fdf"}, - {file = "websockets-13.0.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0f52504023b1480d458adf496dc1c9e9811df4ba4752f0bc1f89ae92f4f07d0c"}, - {file = "websockets-13.0.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:139add0f98206cb74109faf3611b7783ceafc928529c62b389917a037d4cfdf4"}, - {file = "websockets-13.0.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:47236c13be337ef36546004ce8c5580f4b1150d9538b27bf8a5ad8edf23ccfab"}, - {file = "websockets-13.0.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:c44ca9ade59b2e376612df34e837013e2b273e6c92d7ed6636d0556b6f4db93d"}, - {file = "websockets-13.0.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:9bbc525f4be3e51b89b2a700f5746c2a6907d2e2ef4513a8daafc98198b92237"}, - {file = "websockets-13.0.1-cp312-cp312-win32.whl", hash = "sha256:3624fd8664f2577cf8de996db3250662e259bfbc870dd8ebdcf5d7c6ac0b5185"}, - {file = "websockets-13.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0513c727fb8adffa6d9bf4a4463b2bade0186cbd8c3604ae5540fae18a90cb99"}, - {file = "websockets-13.0.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:1ee4cc030a4bdab482a37462dbf3ffb7e09334d01dd37d1063be1136a0d825fa"}, - {file = "websockets-13.0.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:dbb0b697cc0655719522406c059eae233abaa3243821cfdfab1215d02ac10231"}, - {file = "websockets-13.0.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:acbebec8cb3d4df6e2488fbf34702cbc37fc39ac7abf9449392cefb3305562e9"}, - {file = "websockets-13.0.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:63848cdb6fcc0bf09d4a155464c46c64ffdb5807ede4fb251da2c2692559ce75"}, - {file = "websockets-13.0.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:872afa52a9f4c414d6955c365b6588bc4401272c629ff8321a55f44e3f62b553"}, - {file = "websockets-13.0.1-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:05e70fec7c54aad4d71eae8e8cab50525e899791fc389ec6f77b95312e4e9920"}, - {file = "websockets-13.0.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:e82db3756ccb66266504f5a3de05ac6b32f287faacff72462612120074103329"}, - {file = "websockets-13.0.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:4e85f46ce287f5c52438bb3703d86162263afccf034a5ef13dbe4318e98d86e7"}, - {file = "websockets-13.0.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:f3fea72e4e6edb983908f0db373ae0732b275628901d909c382aae3b592589f2"}, - {file = "websockets-13.0.1-cp313-cp313-win32.whl", hash = "sha256:254ecf35572fca01a9f789a1d0f543898e222f7b69ecd7d5381d8d8047627bdb"}, - {file = "websockets-13.0.1-cp313-cp313-win_amd64.whl", hash = "sha256:ca48914cdd9f2ccd94deab5bcb5ac98025a5ddce98881e5cce762854a5de330b"}, - {file = "websockets-13.0.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:b74593e9acf18ea5469c3edaa6b27fa7ecf97b30e9dabd5a94c4c940637ab96e"}, - {file = "websockets-13.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:132511bfd42e77d152c919147078460c88a795af16b50e42a0bd14f0ad71ddd2"}, - {file = "websockets-13.0.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:165bedf13556f985a2aa064309baa01462aa79bf6112fbd068ae38993a0e1f1b"}, - {file = "websockets-13.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e801ca2f448850685417d723ec70298feff3ce4ff687c6f20922c7474b4746ae"}, - {file = "websockets-13.0.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:30d3a1f041360f029765d8704eae606781e673e8918e6b2c792e0775de51352f"}, - {file = "websockets-13.0.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:67648f5e50231b5a7f6d83b32f9c525e319f0ddc841be0de64f24928cd75a603"}, - {file = "websockets-13.0.1-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:4f0426d51c8f0926a4879390f53c7f5a855e42d68df95fff6032c82c888b5f36"}, - {file = "websockets-13.0.1-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:ef48e4137e8799998a343706531e656fdec6797b80efd029117edacb74b0a10a"}, - {file = "websockets-13.0.1-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:249aab278810bee585cd0d4de2f08cfd67eed4fc75bde623be163798ed4db2eb"}, - {file = "websockets-13.0.1-cp38-cp38-win32.whl", hash = "sha256:06c0a667e466fcb56a0886d924b5f29a7f0886199102f0a0e1c60a02a3751cb4"}, - {file = "websockets-13.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1f3cf6d6ec1142412d4535adabc6bd72a63f5f148c43fe559f06298bc21953c9"}, - {file = "websockets-13.0.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:1fa082ea38d5de51dd409434edc27c0dcbd5fed2b09b9be982deb6f0508d25bc"}, - {file = "websockets-13.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:4a365bcb7be554e6e1f9f3ed64016e67e2fa03d7b027a33e436aecf194febb63"}, - {file = "websockets-13.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:10a0dc7242215d794fb1918f69c6bb235f1f627aaf19e77f05336d147fce7c37"}, - {file = "websockets-13.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:59197afd478545b1f73367620407b0083303569c5f2d043afe5363676f2697c9"}, - {file = "websockets-13.0.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7d20516990d8ad557b5abeb48127b8b779b0b7e6771a265fa3e91767596d7d97"}, - {file = "websockets-13.0.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a1a2e272d067030048e1fe41aa1ec8cfbbaabce733b3d634304fa2b19e5c897f"}, - {file = "websockets-13.0.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:ad327ac80ba7ee61da85383ca8822ff808ab5ada0e4a030d66703cc025b021c4"}, - {file = "websockets-13.0.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:518f90e6dd089d34eaade01101fd8a990921c3ba18ebbe9b0165b46ebff947f0"}, - {file = "websockets-13.0.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:68264802399aed6fe9652e89761031acc734fc4c653137a5911c2bfa995d6d6d"}, - {file = "websockets-13.0.1-cp39-cp39-win32.whl", hash = "sha256:a5dc0c42ded1557cc7c3f0240b24129aefbad88af4f09346164349391dea8e58"}, - {file = "websockets-13.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:b448a0690ef43db5ef31b3a0d9aea79043882b4632cfc3eaab20105edecf6097"}, - {file = "websockets-13.0.1-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:faef9ec6354fe4f9a2c0bbb52fb1ff852effc897e2a4501e25eb3a47cb0a4f89"}, - {file = "websockets-13.0.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:03d3f9ba172e0a53e37fa4e636b86cc60c3ab2cfee4935e66ed1d7acaa4625ad"}, - {file = "websockets-13.0.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d450f5a7a35662a9b91a64aefa852f0c0308ee256122f5218a42f1d13577d71e"}, - {file = "websockets-13.0.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3f55b36d17ac50aa8a171b771e15fbe1561217510c8768af3d546f56c7576cdc"}, - {file = "websockets-13.0.1-pp310-pypy310_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:14b9c006cac63772b31abbcd3e3abb6228233eec966bf062e89e7fa7ae0b7333"}, - {file = "websockets-13.0.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:b79915a1179a91f6c5f04ece1e592e2e8a6bd245a0e45d12fd56b2b59e559a32"}, - {file = "websockets-13.0.1-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:f40de079779acbcdbb6ed4c65af9f018f8b77c5ec4e17a4b737c05c2db554491"}, - {file = "websockets-13.0.1-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:80e4ba642fc87fa532bac07e5ed7e19d56940b6af6a8c61d4429be48718a380f"}, - {file = "websockets-13.0.1-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2a02b0161c43cc9e0232711eff846569fad6ec836a7acab16b3cf97b2344c060"}, - {file = "websockets-13.0.1-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6aa74a45d4cdc028561a7d6ab3272c8b3018e23723100b12e58be9dfa5a24491"}, - {file = "websockets-13.0.1-pp38-pypy38_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:00fd961943b6c10ee6f0b1130753e50ac5dcd906130dcd77b0003c3ab797d026"}, - {file = "websockets-13.0.1-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:d93572720d781331fb10d3da9ca1067817d84ad1e7c31466e9f5e59965618096"}, - {file = "websockets-13.0.1-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:71e6e5a3a3728886caee9ab8752e8113670936a193284be9d6ad2176a137f376"}, - {file = "websockets-13.0.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:c4a6343e3b0714e80da0b0893543bf9a5b5fa71b846ae640e56e9abc6fbc4c83"}, - {file = "websockets-13.0.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1a678532018e435396e37422a95e3ab87f75028ac79570ad11f5bf23cd2a7d8c"}, - {file = "websockets-13.0.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d6716c087e4aa0b9260c4e579bb82e068f84faddb9bfba9906cb87726fa2e870"}, - {file = "websockets-13.0.1-pp39-pypy39_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e33505534f3f673270dd67f81e73550b11de5b538c56fe04435d63c02c3f26b5"}, - {file = "websockets-13.0.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:acab3539a027a85d568c2573291e864333ec9d912675107d6efceb7e2be5d980"}, - {file = "websockets-13.0.1-py3-none-any.whl", hash = "sha256:b80f0c51681c517604152eb6a572f5a9378f877763231fddb883ba2f968e8817"}, - {file = "websockets-13.0.1.tar.gz", hash = "sha256:4d6ece65099411cfd9a48d13701d7438d9c34f479046b34c50ff60bb8834e43e"}, + {file = "websockets-13.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:f48c749857f8fb598fb890a75f540e3221d0976ed0bf879cf3c7eef34151acee"}, + {file = "websockets-13.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c7e72ce6bda6fb9409cc1e8164dd41d7c91466fb599eb047cfda72fe758a34a7"}, + {file = "websockets-13.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f779498eeec470295a2b1a5d97aa1bc9814ecd25e1eb637bd9d1c73a327387f6"}, + {file = "websockets-13.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4676df3fe46956fbb0437d8800cd5f2b6d41143b6e7e842e60554398432cf29b"}, + {file = "websockets-13.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a7affedeb43a70351bb811dadf49493c9cfd1ed94c9c70095fd177e9cc1541fa"}, + {file = "websockets-13.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1971e62d2caa443e57588e1d82d15f663b29ff9dfe7446d9964a4b6f12c1e700"}, + {file = "websockets-13.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:5f2e75431f8dc4a47f31565a6e1355fb4f2ecaa99d6b89737527ea917066e26c"}, + {file = "websockets-13.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:58cf7e75dbf7e566088b07e36ea2e3e2bd5676e22216e4cad108d4df4a7402a0"}, + {file = "websockets-13.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:c90d6dec6be2c7d03378a574de87af9b1efea77d0c52a8301dd831ece938452f"}, + {file = "websockets-13.1-cp310-cp310-win32.whl", hash = "sha256:730f42125ccb14602f455155084f978bd9e8e57e89b569b4d7f0f0c17a448ffe"}, + {file = "websockets-13.1-cp310-cp310-win_amd64.whl", hash = "sha256:5993260f483d05a9737073be197371940c01b257cc45ae3f1d5d7adb371b266a"}, + {file = "websockets-13.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:61fc0dfcda609cda0fc9fe7977694c0c59cf9d749fbb17f4e9483929e3c48a19"}, + {file = "websockets-13.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ceec59f59d092c5007e815def4ebb80c2de330e9588e101cf8bd94c143ec78a5"}, + {file = "websockets-13.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:c1dca61c6db1166c48b95198c0b7d9c990b30c756fc2923cc66f68d17dc558fd"}, + {file = "websockets-13.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:308e20f22c2c77f3f39caca508e765f8725020b84aa963474e18c59accbf4c02"}, + {file = "websockets-13.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:62d516c325e6540e8a57b94abefc3459d7dab8ce52ac75c96cad5549e187e3a7"}, + {file = "websockets-13.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:87c6e35319b46b99e168eb98472d6c7d8634ee37750d7693656dc766395df096"}, + {file = "websockets-13.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:5f9fee94ebafbc3117c30be1844ed01a3b177bb6e39088bc6b2fa1dc15572084"}, + {file = "websockets-13.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:7c1e90228c2f5cdde263253fa5db63e6653f1c00e7ec64108065a0b9713fa1b3"}, + {file = "websockets-13.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:6548f29b0e401eea2b967b2fdc1c7c7b5ebb3eeb470ed23a54cd45ef078a0db9"}, + {file = "websockets-13.1-cp311-cp311-win32.whl", hash = "sha256:c11d4d16e133f6df8916cc5b7e3e96ee4c44c936717d684a94f48f82edb7c92f"}, + {file = "websockets-13.1-cp311-cp311-win_amd64.whl", hash = "sha256:d04f13a1d75cb2b8382bdc16ae6fa58c97337253826dfe136195b7f89f661557"}, + {file = "websockets-13.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:9d75baf00138f80b48f1eac72ad1535aac0b6461265a0bcad391fc5aba875cfc"}, + {file = "websockets-13.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:9b6f347deb3dcfbfde1c20baa21c2ac0751afaa73e64e5b693bb2b848efeaa49"}, + {file = "websockets-13.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:de58647e3f9c42f13f90ac7e5f58900c80a39019848c5547bc691693098ae1bd"}, + {file = "websockets-13.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a1b54689e38d1279a51d11e3467dd2f3a50f5f2e879012ce8f2d6943f00e83f0"}, + {file = "websockets-13.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cf1781ef73c073e6b0f90af841aaf98501f975d306bbf6221683dd594ccc52b6"}, + {file = "websockets-13.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8d23b88b9388ed85c6faf0e74d8dec4f4d3baf3ecf20a65a47b836d56260d4b9"}, + {file = "websockets-13.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:3c78383585f47ccb0fcf186dcb8a43f5438bd7d8f47d69e0b56f71bf431a0a68"}, + {file = "websockets-13.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:d6d300f8ec35c24025ceb9b9019ae9040c1ab2f01cddc2bcc0b518af31c75c14"}, + {file = "websockets-13.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:a9dcaf8b0cc72a392760bb8755922c03e17a5a54e08cca58e8b74f6902b433cf"}, + {file = "websockets-13.1-cp312-cp312-win32.whl", hash = "sha256:2f85cf4f2a1ba8f602298a853cec8526c2ca42a9a4b947ec236eaedb8f2dc80c"}, + {file = "websockets-13.1-cp312-cp312-win_amd64.whl", hash = "sha256:38377f8b0cdeee97c552d20cf1865695fcd56aba155ad1b4ca8779a5b6ef4ac3"}, + {file = "websockets-13.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:a9ab1e71d3d2e54a0aa646ab6d4eebfaa5f416fe78dfe4da2839525dc5d765c6"}, + {file = "websockets-13.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:b9d7439d7fab4dce00570bb906875734df13d9faa4b48e261c440a5fec6d9708"}, + {file = "websockets-13.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:327b74e915cf13c5931334c61e1a41040e365d380f812513a255aa804b183418"}, + {file = "websockets-13.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:325b1ccdbf5e5725fdcb1b0e9ad4d2545056479d0eee392c291c1bf76206435a"}, + {file = "websockets-13.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:346bee67a65f189e0e33f520f253d5147ab76ae42493804319b5716e46dddf0f"}, + {file = "websockets-13.1-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:91a0fa841646320ec0d3accdff5b757b06e2e5c86ba32af2e0815c96c7a603c5"}, + {file = "websockets-13.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:18503d2c5f3943e93819238bf20df71982d193f73dcecd26c94514f417f6b135"}, + {file = "websockets-13.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:a9cd1af7e18e5221d2878378fbc287a14cd527fdd5939ed56a18df8a31136bb2"}, + {file = "websockets-13.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:70c5be9f416aa72aab7a2a76c90ae0a4fe2755c1816c153c1a2bcc3333ce4ce6"}, + {file = "websockets-13.1-cp313-cp313-win32.whl", hash = "sha256:624459daabeb310d3815b276c1adef475b3e6804abaf2d9d2c061c319f7f187d"}, + {file = "websockets-13.1-cp313-cp313-win_amd64.whl", hash = "sha256:c518e84bb59c2baae725accd355c8dc517b4a3ed8db88b4bc93c78dae2974bf2"}, + {file = "websockets-13.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:c7934fd0e920e70468e676fe7f1b7261c1efa0d6c037c6722278ca0228ad9d0d"}, + {file = "websockets-13.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:149e622dc48c10ccc3d2760e5f36753db9cacf3ad7bc7bbbfd7d9c819e286f23"}, + {file = "websockets-13.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:a569eb1b05d72f9bce2ebd28a1ce2054311b66677fcd46cf36204ad23acead8c"}, + {file = "websockets-13.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:95df24ca1e1bd93bbca51d94dd049a984609687cb2fb08a7f2c56ac84e9816ea"}, + {file = "websockets-13.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d8dbb1bf0c0a4ae8b40bdc9be7f644e2f3fb4e8a9aca7145bfa510d4a374eeb7"}, + {file = "websockets-13.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:035233b7531fb92a76beefcbf479504db8c72eb3bff41da55aecce3a0f729e54"}, + {file = "websockets-13.1-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:e4450fc83a3df53dec45922b576e91e94f5578d06436871dce3a6be38e40f5db"}, + {file = "websockets-13.1-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:463e1c6ec853202dd3657f156123d6b4dad0c546ea2e2e38be2b3f7c5b8e7295"}, + {file = "websockets-13.1-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:6d6855bbe70119872c05107e38fbc7f96b1d8cb047d95c2c50869a46c65a8e96"}, + {file = "websockets-13.1-cp38-cp38-win32.whl", hash = "sha256:204e5107f43095012b00f1451374693267adbb832d29966a01ecc4ce1db26faf"}, + {file = "websockets-13.1-cp38-cp38-win_amd64.whl", hash = "sha256:485307243237328c022bc908b90e4457d0daa8b5cf4b3723fd3c4a8012fce4c6"}, + {file = "websockets-13.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:9b37c184f8b976f0c0a231a5f3d6efe10807d41ccbe4488df8c74174805eea7d"}, + {file = "websockets-13.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:163e7277e1a0bd9fb3c8842a71661ad19c6aa7bb3d6678dc7f89b17fbcc4aeb7"}, + {file = "websockets-13.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4b889dbd1342820cc210ba44307cf75ae5f2f96226c0038094455a96e64fb07a"}, + {file = "websockets-13.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:586a356928692c1fed0eca68b4d1c2cbbd1ca2acf2ac7e7ebd3b9052582deefa"}, + {file = "websockets-13.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7bd6abf1e070a6b72bfeb71049d6ad286852e285f146682bf30d0296f5fbadfa"}, + {file = "websockets-13.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6d2aad13a200e5934f5a6767492fb07151e1de1d6079c003ab31e1823733ae79"}, + {file = "websockets-13.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:df01aea34b6e9e33572c35cd16bae5a47785e7d5c8cb2b54b2acdb9678315a17"}, + {file = "websockets-13.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:e54affdeb21026329fb0744ad187cf812f7d3c2aa702a5edb562b325191fcab6"}, + {file = "websockets-13.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:9ef8aa8bdbac47f4968a5d66462a2a0935d044bf35c0e5a8af152d58516dbeb5"}, + {file = "websockets-13.1-cp39-cp39-win32.whl", hash = "sha256:deeb929efe52bed518f6eb2ddc00cc496366a14c726005726ad62c2dd9017a3c"}, + {file = "websockets-13.1-cp39-cp39-win_amd64.whl", hash = "sha256:7c65ffa900e7cc958cd088b9a9157a8141c991f8c53d11087e6fb7277a03f81d"}, + {file = "websockets-13.1-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:5dd6da9bec02735931fccec99d97c29f47cc61f644264eb995ad6c0c27667238"}, + {file = "websockets-13.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:2510c09d8e8df777177ee3d40cd35450dc169a81e747455cc4197e63f7e7bfe5"}, + {file = "websockets-13.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f1c3cf67185543730888b20682fb186fc8d0fa6f07ccc3ef4390831ab4b388d9"}, + {file = "websockets-13.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bcc03c8b72267e97b49149e4863d57c2d77f13fae12066622dc78fe322490fe6"}, + {file = "websockets-13.1-pp310-pypy310_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:004280a140f220c812e65f36944a9ca92d766b6cc4560be652a0a3883a79ed8a"}, + {file = "websockets-13.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:e2620453c075abeb0daa949a292e19f56de518988e079c36478bacf9546ced23"}, + {file = "websockets-13.1-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:9156c45750b37337f7b0b00e6248991a047be4aa44554c9886fe6bdd605aab3b"}, + {file = "websockets-13.1-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:80c421e07973a89fbdd93e6f2003c17d20b69010458d3a8e37fb47874bd67d51"}, + {file = "websockets-13.1-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:82d0ba76371769d6a4e56f7e83bb8e81846d17a6190971e38b5de108bde9b0d7"}, + {file = "websockets-13.1-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e9875a0143f07d74dc5e1ded1c4581f0d9f7ab86c78994e2ed9e95050073c94d"}, + {file = "websockets-13.1-pp38-pypy38_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a11e38ad8922c7961447f35c7b17bffa15de4d17c70abd07bfbe12d6faa3e027"}, + {file = "websockets-13.1-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:4059f790b6ae8768471cddb65d3c4fe4792b0ab48e154c9f0a04cefaabcd5978"}, + {file = "websockets-13.1-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:25c35bf84bf7c7369d247f0b8cfa157f989862c49104c5cf85cb5436a641d93e"}, + {file = "websockets-13.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:83f91d8a9bb404b8c2c41a707ac7f7f75b9442a0a876df295de27251a856ad09"}, + {file = "websockets-13.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7a43cfdcddd07f4ca2b1afb459824dd3c6d53a51410636a2c7fc97b9a8cf4842"}, + {file = "websockets-13.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:48a2ef1381632a2f0cb4efeff34efa97901c9fbc118e01951ad7cfc10601a9bb"}, + {file = "websockets-13.1-pp39-pypy39_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:459bf774c754c35dbb487360b12c5727adab887f1622b8aed5755880a21c4a20"}, + {file = "websockets-13.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:95858ca14a9f6fa8413d29e0a585b31b278388aa775b8a81fa24830123874678"}, + {file = "websockets-13.1-py3-none-any.whl", hash = "sha256:a9a396a6ad26130cdae92ae10c36af09d9bfe6cafe69670fd3b6da9b07b4044f"}, + {file = "websockets-13.1.tar.gz", hash = "sha256:a3b3366087c1bc0a2795111edcadddb8b3b59509d5db5d7ea3fdd69f954a8878"}, ] [[package]] @@ -9970,20 +8477,6 @@ MarkupSafe = ">=2.1.1" [package.extras] watchdog = ["watchdog (>=2.3)"] -[[package]] -name = "wikipedia" -version = "1.4.0" -description = "Wikipedia API for Python" -optional = false -python-versions = "*" -files = [ - {file = "wikipedia-1.4.0.tar.gz", hash = "sha256:db0fad1829fdd441b1852306e9856398204dc0786d2996dd2e0c8bb8e26133b2"}, -] - -[package.dependencies] -beautifulsoup4 = "*" -requests = ">=2.0.0,<3.0.0" - [[package]] name = "win32-setctime" version = "1.1.0" @@ -10077,39 +8570,6 @@ files = [ {file = "wrapt-1.16.0.tar.gz", hash = "sha256:5f370f952971e7d17c7d1ead40e49f32345a7f7a5373571ef44d800d06b1899d"}, ] -[[package]] -name = "wsproto" -version = "1.2.0" -description = "WebSockets state-machine based protocol implementation" -optional = false -python-versions = ">=3.7.0" -files = [ - {file = "wsproto-1.2.0-py3-none-any.whl", hash = "sha256:b9acddd652b585d75b20477888c56642fdade28bdfd3579aa24a4d2c037dd736"}, - {file = "wsproto-1.2.0.tar.gz", hash = "sha256:ad565f26ecb92588a3e43bc3d96164de84cd9902482b130d0ddbaa9664a85065"}, -] - -[package.dependencies] -h11 = ">=0.9.0,<1" - -[[package]] -name = "xinference-client" -version = "0.15.2" -description = "Client for Xinference" -optional = false -python-versions = "*" -files = [ - {file = "xinference-client-0.15.2.tar.gz", hash = "sha256:5c2259bb133148d1cc9bd2b8ec6eb8b5bbeba7f11d6252959f4e6cd79baa53ed"}, - {file = "xinference_client-0.15.2-py3-none-any.whl", hash = "sha256:b6275adab695e75e75a33e21e0ad212488fc2d5a4d0f693d544c0e78469abbe3"}, -] - -[package.dependencies] -pydantic = "*" -requests = "*" -typing-extensions = "*" - -[package.extras] -dev = ["black", "cython (>=0.29)", "flake8 (>=3.8.0)", "ipython (>=6.5.0)", "pytest (>=3.5.0)", "pytest-asyncio (>=0.14.0)", "pytest-cov (>=2.5.0)", "pytest-forked (>=1.0)", "pytest-mock (>=3.11.1)", "pytest-timeout (>=1.2.0)"] - [[package]] name = "xlrd" version = "2.0.1" @@ -10253,51 +8713,6 @@ files = [ idna = ">=2.0" multidict = ">=4.0" -[[package]] -name = "yfinance" -version = "0.2.43" -description = "Download market data from Yahoo! Finance API" -optional = false -python-versions = "*" -files = [ - {file = "yfinance-0.2.43-py2.py3-none-any.whl", hash = "sha256:11b4f5515b17450bd3bdcdc26b299aeeaea7ff9cb63d0fa0a865f460c0c7618f"}, - {file = "yfinance-0.2.43.tar.gz", hash = "sha256:32404597f325a2a2c2708aceb8d552088dd26891ac0e6018f6c5f3f2f61055f0"}, -] - -[package.dependencies] -beautifulsoup4 = ">=4.11.1" -frozendict = ">=2.3.4" -html5lib = ">=1.1" -lxml = ">=4.9.1" -multitasking = ">=0.0.7" -numpy = ">=1.16.5" -pandas = ">=1.3.0" -peewee = ">=3.16.2" -platformdirs = ">=2.0.0" -pytz = ">=2022.5" -requests = ">=2.31" - -[package.extras] -nospam = ["requests-cache (>=1.0)", "requests-ratelimiter (>=0.3.1)"] -repair = ["scipy (>=1.6.3)"] - -[[package]] -name = "zhipuai" -version = "1.0.7" -description = "A SDK library for accessing big model apis from ZhipuAI" -optional = false -python-versions = ">=3.6" -files = [ - {file = "zhipuai-1.0.7-py3-none-any.whl", hash = "sha256:360c01b8c2698f366061452e86d5a36a5ff68a576ea33940da98e4806f232530"}, - {file = "zhipuai-1.0.7.tar.gz", hash = "sha256:b80f699543d83cce8648acf1ce32bc2725d1c1c443baffa5882abc2cc704d581"}, -] - -[package.dependencies] -cachetools = "*" -dataclasses = "*" -PyJWT = "*" -requests = "*" - [[package]] name = "zipp" version = "3.20.2" @@ -10501,4 +8916,4 @@ cffi = ["cffi (>=1.11)"] [metadata] lock-version = "2.0" python-versions = ">=3.10,<3.13" -content-hash = "c4580c22e2b220c8c80dbc3f765060a09e14874ed29b690c13a533bf0365e789" +content-hash = "46120eb2caaf416a798cfe425674e3dcf83a9c7f1b1273e7703ca32ebea21ffd" diff --git a/api/pyproject.toml b/api/pyproject.toml index e737761f3b2c0b..b5501ff228d48e 100644 --- a/api/pyproject.toml +++ b/api/pyproject.toml @@ -134,7 +134,6 @@ package-mode = false ############################################################ [tool.poetry.dependencies] -anthropic = "~0.23.1" authlib = "1.3.1" azure-identity = "1.16.1" azure-storage-blob = "12.13.0" @@ -145,10 +144,8 @@ bs4 = "~0.0.1" cachetools = "~5.3.0" celery = "~5.3.6" chardet = "~5.1.0" -cohere = "~5.2.4" cos-python-sdk-v5 = "1.9.30" esdk-obs-python = "3.24.6.1" -dashscope = { version = "~1.17.0", extras = ["tokenizer"] } flask = "~3.0.1" flask-compress = "~1.14" flask-cors = "~4.0.0" @@ -169,13 +166,12 @@ google-generativeai = "0.8.1" googleapis-common-protos = "1.63.0" gunicorn = "~22.0.0" httpx = { version = "~0.27.0", extras = ["socks"] } -huggingface-hub = "~0.16.4" jieba = "0.42.1" langfuse = "^2.48.0" langsmith = "^0.1.77" mailchimp-transactional = "~1.0.50" markdown = "~3.5.1" -novita-client = "^0.5.7" +nltk = "3.8.1" numpy = "~1.26.4" openai = "~1.29.0" openpyxl = "~3.1.5" @@ -192,9 +188,7 @@ python = ">=3.10,<3.13" python-docx = "~1.1.0" python-dotenv = "1.0.0" pyyaml = "~6.0.1" -readabilipy = "0.2.0" redis = { version = "~5.0.3", extras = ["hiredis"] } -replicate = "~0.22.0" resend = "~0.7.0" scikit-learn = "^1.5.1" sentry-sdk = { version = "~1.44.1", extras = ["flask"] } @@ -202,21 +196,15 @@ sqlalchemy = "~2.0.29" tencentcloud-sdk-python-hunyuan = "~3.0.1158" tiktoken = "~0.7.0" tokenizers = "~0.15.0" -transformers = "~4.35.0" unstructured = { version = "~0.10.27", extras = ["docx", "epub", "md", "msg", "ppt", "pptx"] } -websocket-client = "~1.7.0" werkzeug = "~3.0.1" -xinference-client = "0.15.2" yarl = "~1.9.4" -zhipuai = "1.0.7" # Before adding new dependency, consider place it in alphabet order (a-z) and suitable group. ############################################################ # Related transparent dependencies with pinned version # required by main implementations ############################################################ -azure-ai-ml = "^1.19.0" -azure-ai-inference = "^1.0.0b3" volcengine-python-sdk = {extras = ["ark"], version = "^1.0.98"} oci = "^2.133.0" tos = "^2.7.1" @@ -231,20 +219,7 @@ safetensors = "~0.4.3" ############################################################ [tool.poetry.group.tool.dependencies] -arxiv = "2.1.0" -cloudscraper = "1.2.71" -matplotlib = "~3.8.2" -newspaper3k = "0.2.8" -duckduckgo-search = "^6.2.6" -jsonpath-ng = "1.6.1" -numexpr = "~2.9.0" -opensearch-py = "2.4.0" qrcode = "~7.4.2" -twilio = "~9.0.4" -vanna = { version = "0.5.5", extras = ["postgres", "mysql", "clickhouse", "duckdb"] } -wikipedia = "1.4.0" -yfinance = "~0.2.40" -nltk = "3.8.1" ############################################################ # VDB dependencies required by vector store clients ############################################################ diff --git a/api/tests/integration_tests/model_runtime/__mock/anthropic.py b/api/tests/integration_tests/model_runtime/__mock/anthropic.py deleted file mode 100644 index 79a3dc03941c9f..00000000000000 --- a/api/tests/integration_tests/model_runtime/__mock/anthropic.py +++ /dev/null @@ -1,98 +0,0 @@ -import os -from collections.abc import Iterable -from typing import Any, Literal, Union - -import anthropic -import pytest -from _pytest.monkeypatch import MonkeyPatch -from anthropic import Anthropic, Stream -from anthropic.resources import Messages -from anthropic.types import ( - ContentBlock, - ContentBlockDeltaEvent, - Message, - MessageDeltaEvent, - MessageDeltaUsage, - MessageParam, - MessageStartEvent, - MessageStopEvent, - MessageStreamEvent, - TextDelta, - Usage, -) -from anthropic.types.message_delta_event import Delta - -MOCK = os.getenv("MOCK_SWITCH", "false") == "true" - - -class MockAnthropicClass: - @staticmethod - def mocked_anthropic_chat_create_sync(model: str) -> Message: - return Message( - id="msg-123", - type="message", - role="assistant", - content=[ContentBlock(text="hello, I'm a chatbot from anthropic", type="text")], - model=model, - stop_reason="stop_sequence", - usage=Usage(input_tokens=1, output_tokens=1), - ) - - @staticmethod - def mocked_anthropic_chat_create_stream(model: str) -> Stream[MessageStreamEvent]: - full_response_text = "hello, I'm a chatbot from anthropic" - - yield MessageStartEvent( - type="message_start", - message=Message( - id="msg-123", - content=[], - role="assistant", - model=model, - stop_reason=None, - type="message", - usage=Usage(input_tokens=1, output_tokens=1), - ), - ) - - index = 0 - for i in range(0, len(full_response_text)): - yield ContentBlockDeltaEvent( - type="content_block_delta", delta=TextDelta(text=full_response_text[i], type="text_delta"), index=index - ) - - index += 1 - - yield MessageDeltaEvent( - type="message_delta", delta=Delta(stop_reason="stop_sequence"), usage=MessageDeltaUsage(output_tokens=1) - ) - - yield MessageStopEvent(type="message_stop") - - def mocked_anthropic( - self: Messages, - *, - max_tokens: int, - messages: Iterable[MessageParam], - model: str, - stream: Literal[True], - **kwargs: Any, - ) -> Union[Message, Stream[MessageStreamEvent]]: - if len(self._client.api_key) < 18: - raise anthropic.AuthenticationError("Invalid API key") - - if stream: - return MockAnthropicClass.mocked_anthropic_chat_create_stream(model=model) - else: - return MockAnthropicClass.mocked_anthropic_chat_create_sync(model=model) - - -@pytest.fixture -def setup_anthropic_mock(request, monkeypatch: MonkeyPatch): - if MOCK: - monkeypatch.setattr(Messages, "create", MockAnthropicClass.mocked_anthropic) - - yield - - if MOCK: - monkeypatch.undo() diff --git a/api/tests/integration_tests/model_runtime/__mock/fishaudio.py b/api/tests/integration_tests/model_runtime/__mock/fishaudio.py deleted file mode 100644 index bec3babeafddab..00000000000000 --- a/api/tests/integration_tests/model_runtime/__mock/fishaudio.py +++ /dev/null @@ -1,82 +0,0 @@ -import os -from collections.abc import Callable -from typing import Literal - -import httpx -import pytest -from _pytest.monkeypatch import MonkeyPatch - - -def mock_get(*args, **kwargs): - if kwargs.get("headers", {}).get("Authorization") != "Bearer test": - raise httpx.HTTPStatusError( - "Invalid API key", - request=httpx.Request("GET", ""), - response=httpx.Response(401), - ) - - return httpx.Response( - 200, - json={ - "items": [ - {"title": "Model 1", "_id": "model1"}, - {"title": "Model 2", "_id": "model2"}, - ] - }, - request=httpx.Request("GET", ""), - ) - - -def mock_stream(*args, **kwargs): - class MockStreamResponse: - def __init__(self): - self.status_code = 200 - - def __enter__(self): - return self - - def __exit__(self, exc_type, exc_val, exc_tb): - pass - - def iter_bytes(self): - yield b"Mocked audio data" - - return MockStreamResponse() - - -def mock_fishaudio( - monkeypatch: MonkeyPatch, - methods: list[Literal["list-models", "tts"]], -) -> Callable[[], None]: - """ - mock fishaudio module - - :param monkeypatch: pytest monkeypatch fixture - :return: unpatch function - """ - - def unpatch() -> None: - monkeypatch.undo() - - if "list-models" in methods: - monkeypatch.setattr(httpx, "get", mock_get) - - if "tts" in methods: - monkeypatch.setattr(httpx, "stream", mock_stream) - - return unpatch - - -MOCK = os.getenv("MOCK_SWITCH", "false").lower() == "true" - - -@pytest.fixture -def setup_fishaudio_mock(request, monkeypatch): - methods = request.param if hasattr(request, "param") else [] - if MOCK: - unpatch = mock_fishaudio(monkeypatch, methods=methods) - - yield - - if MOCK: - unpatch() diff --git a/api/tests/integration_tests/model_runtime/__mock/google.py b/api/tests/integration_tests/model_runtime/__mock/google.py deleted file mode 100644 index 402bd9c2c21f69..00000000000000 --- a/api/tests/integration_tests/model_runtime/__mock/google.py +++ /dev/null @@ -1,116 +0,0 @@ -from collections.abc import Generator - -import google.generativeai.types.generation_types as generation_config_types -import pytest -from _pytest.monkeypatch import MonkeyPatch -from google.ai import generativelanguage as glm -from google.ai.generativelanguage_v1beta.types import content as gag_content -from google.generativeai import GenerativeModel -from google.generativeai.client import _ClientManager, configure -from google.generativeai.types import GenerateContentResponse, content_types, safety_types -from google.generativeai.types.generation_types import BaseGenerateContentResponse - -current_api_key = "" - - -class MockGoogleResponseClass: - _done = False - - def __iter__(self): - full_response_text = "it's google!" - - for i in range(0, len(full_response_text) + 1, 1): - if i == len(full_response_text): - self._done = True - yield GenerateContentResponse( - done=True, iterator=None, result=glm.GenerateContentResponse({}), chunks=[] - ) - else: - yield GenerateContentResponse( - done=False, iterator=None, result=glm.GenerateContentResponse({}), chunks=[] - ) - - -class MockGoogleResponseCandidateClass: - finish_reason = "stop" - - @property - def content(self) -> gag_content.Content: - return gag_content.Content(parts=[gag_content.Part(text="it's google!")]) - - -class MockGoogleClass: - @staticmethod - def generate_content_sync() -> GenerateContentResponse: - return GenerateContentResponse(done=True, iterator=None, result=glm.GenerateContentResponse({}), chunks=[]) - - @staticmethod - def generate_content_stream() -> Generator[GenerateContentResponse, None, None]: - return MockGoogleResponseClass() - - def generate_content( - self: GenerativeModel, - contents: content_types.ContentsType, - *, - generation_config: generation_config_types.GenerationConfigType | None = None, - safety_settings: safety_types.SafetySettingOptions | None = None, - stream: bool = False, - **kwargs, - ) -> GenerateContentResponse: - global current_api_key - - if len(current_api_key) < 16: - raise Exception("Invalid API key") - - if stream: - return MockGoogleClass.generate_content_stream() - - return MockGoogleClass.generate_content_sync() - - @property - def generative_response_text(self) -> str: - return "it's google!" - - @property - def generative_response_candidates(self) -> list[MockGoogleResponseCandidateClass]: - return [MockGoogleResponseCandidateClass()] - - def make_client(self: _ClientManager, name: str): - global current_api_key - - if name.endswith("_async"): - name = name.split("_")[0] - cls = getattr(glm, name.title() + "ServiceAsyncClient") - else: - cls = getattr(glm, name.title() + "ServiceClient") - - # Attempt to configure using defaults. - if not self.client_config: - configure() - - client_options = self.client_config.get("client_options", None) - if client_options: - current_api_key = client_options.api_key - - def nop(self, *args, **kwargs): - pass - - original_init = cls.__init__ - cls.__init__ = nop - client: glm.GenerativeServiceClient = cls(**self.client_config) - cls.__init__ = original_init - - if not self.default_metadata: - return client - - -@pytest.fixture -def setup_google_mock(request, monkeypatch: MonkeyPatch): - monkeypatch.setattr(BaseGenerateContentResponse, "text", MockGoogleClass.generative_response_text) - monkeypatch.setattr(BaseGenerateContentResponse, "candidates", MockGoogleClass.generative_response_candidates) - monkeypatch.setattr(GenerativeModel, "generate_content", MockGoogleClass.generate_content) - monkeypatch.setattr(_ClientManager, "make_client", MockGoogleClass.make_client) - - yield - - monkeypatch.undo() diff --git a/api/tests/integration_tests/model_runtime/__mock/huggingface.py b/api/tests/integration_tests/model_runtime/__mock/huggingface.py deleted file mode 100644 index 97038ef5963e87..00000000000000 --- a/api/tests/integration_tests/model_runtime/__mock/huggingface.py +++ /dev/null @@ -1,20 +0,0 @@ -import os - -import pytest -from _pytest.monkeypatch import MonkeyPatch -from huggingface_hub import InferenceClient - -from tests.integration_tests.model_runtime.__mock.huggingface_chat import MockHuggingfaceChatClass - -MOCK = os.getenv("MOCK_SWITCH", "false").lower() == "true" - - -@pytest.fixture -def setup_huggingface_mock(request, monkeypatch: MonkeyPatch): - if MOCK: - monkeypatch.setattr(InferenceClient, "text_generation", MockHuggingfaceChatClass.text_generation) - - yield - - if MOCK: - monkeypatch.undo() diff --git a/api/tests/integration_tests/model_runtime/__mock/huggingface_chat.py b/api/tests/integration_tests/model_runtime/__mock/huggingface_chat.py deleted file mode 100644 index 9ee76c935c9873..00000000000000 --- a/api/tests/integration_tests/model_runtime/__mock/huggingface_chat.py +++ /dev/null @@ -1,56 +0,0 @@ -import re -from collections.abc import Generator -from typing import Any, Literal, Optional, Union - -from _pytest.monkeypatch import MonkeyPatch -from huggingface_hub import InferenceClient -from huggingface_hub.inference._text_generation import ( - Details, - StreamDetails, - TextGenerationResponse, - TextGenerationStreamResponse, - Token, -) -from huggingface_hub.utils import BadRequestError - - -class MockHuggingfaceChatClass: - @staticmethod - def generate_create_sync(model: str) -> TextGenerationResponse: - response = TextGenerationResponse( - generated_text="You can call me Miku Miku o~e~o~", - details=Details( - finish_reason="length", - generated_tokens=6, - tokens=[Token(id=0, text="You", logprob=0.0, special=False) for i in range(0, 6)], - ), - ) - - return response - - @staticmethod - def generate_create_stream(model: str) -> Generator[TextGenerationStreamResponse, None, None]: - full_text = "You can call me Miku Miku o~e~o~" - - for i in range(0, len(full_text)): - response = TextGenerationStreamResponse( - token=Token(id=i, text=full_text[i], logprob=0.0, special=False), - ) - response.generated_text = full_text[i] - response.details = StreamDetails(finish_reason="stop_sequence", generated_tokens=1) - - yield response - - def text_generation( - self: InferenceClient, prompt: str, *, stream: Literal[False] = ..., model: Optional[str] = None, **kwargs: Any - ) -> Union[TextGenerationResponse, Generator[TextGenerationStreamResponse, None, None]]: - # check if key is valid - if not re.match(r"Bearer\shf\-[a-zA-Z0-9]{16,}", self.headers["authorization"]): - raise BadRequestError("Invalid API key") - - if model is None: - raise BadRequestError("Invalid model") - - if stream: - return MockHuggingfaceChatClass.generate_create_stream(model) - return MockHuggingfaceChatClass.generate_create_sync(model) diff --git a/api/tests/integration_tests/model_runtime/__mock/huggingface_tei.py b/api/tests/integration_tests/model_runtime/__mock/huggingface_tei.py deleted file mode 100644 index b9a721c803fc52..00000000000000 --- a/api/tests/integration_tests/model_runtime/__mock/huggingface_tei.py +++ /dev/null @@ -1,94 +0,0 @@ -from core.model_runtime.model_providers.huggingface_tei.tei_helper import TeiModelExtraParameter - - -class MockTEIClass: - @staticmethod - def get_tei_extra_parameter(server_url: str, model_name: str) -> TeiModelExtraParameter: - # During mock, we don't have a real server to query, so we just return a dummy value - if "rerank" in model_name: - model_type = "reranker" - else: - model_type = "embedding" - - return TeiModelExtraParameter(model_type=model_type, max_input_length=512, max_client_batch_size=1) - - @staticmethod - def invoke_tokenize(server_url: str, texts: list[str]) -> list[list[dict]]: - # Use space as token separator, and split the text into tokens - tokenized_texts = [] - for text in texts: - tokens = text.split(" ") - current_index = 0 - tokenized_text = [] - for idx, token in enumerate(tokens): - s_token = { - "id": idx, - "text": token, - "special": False, - "start": current_index, - "stop": current_index + len(token), - } - current_index += len(token) + 1 - tokenized_text.append(s_token) - tokenized_texts.append(tokenized_text) - return tokenized_texts - - @staticmethod - def invoke_embeddings(server_url: str, texts: list[str]) -> dict: - # { - # "object": "list", - # "data": [ - # { - # "object": "embedding", - # "embedding": [...], - # "index": 0 - # } - # ], - # "model": "MODEL_NAME", - # "usage": { - # "prompt_tokens": 3, - # "total_tokens": 3 - # } - # } - embeddings = [] - for idx in range(len(texts)): - embedding = [0.1] * 768 - embeddings.append( - { - "object": "embedding", - "embedding": embedding, - "index": idx, - } - ) - return { - "object": "list", - "data": embeddings, - "model": "MODEL_NAME", - "usage": { - "prompt_tokens": sum(len(text.split(" ")) for text in texts), - "total_tokens": sum(len(text.split(" ")) for text in texts), - }, - } - - @staticmethod - def invoke_rerank(server_url: str, query: str, texts: list[str]) -> list[dict]: - # Example response: - # [ - # { - # "index": 0, - # "text": "Deep Learning is ...", - # "score": 0.9950755 - # } - # ] - reranked_docs = [] - for idx, text in enumerate(texts): - reranked_docs.append( - { - "index": idx, - "text": text, - "score": 0.9, - } - ) - # For mock, only return the first document - break - return reranked_docs diff --git a/api/tests/integration_tests/model_runtime/__mock/nomic_embeddings.py b/api/tests/integration_tests/model_runtime/__mock/nomic_embeddings.py deleted file mode 100644 index 281e866e45c2e9..00000000000000 --- a/api/tests/integration_tests/model_runtime/__mock/nomic_embeddings.py +++ /dev/null @@ -1,59 +0,0 @@ -import os -from collections.abc import Callable -from typing import Any, Literal, Union - -import pytest - -# import monkeypatch -from _pytest.monkeypatch import MonkeyPatch -from nomic import embed - - -def create_embedding(texts: list[str], model: str, **kwargs: Any) -> dict: - texts_len = len(texts) - - foo_embedding_sample = 0.123456 - - combined = { - "embeddings": [[foo_embedding_sample for _ in range(768)] for _ in range(texts_len)], - "usage": {"prompt_tokens": texts_len, "total_tokens": texts_len}, - "model": model, - "inference_mode": "remote", - } - - return combined - - -def mock_nomic( - monkeypatch: MonkeyPatch, - methods: list[Literal["text_embedding"]], -) -> Callable[[], None]: - """ - mock nomic module - - :param monkeypatch: pytest monkeypatch fixture - :return: unpatch function - """ - - def unpatch() -> None: - monkeypatch.undo() - - if "text_embedding" in methods: - monkeypatch.setattr(embed, "text", create_embedding) - - return unpatch - - -MOCK = os.getenv("MOCK_SWITCH", "false").lower() == "true" - - -@pytest.fixture -def setup_nomic_mock(request, monkeypatch): - methods = request.param if hasattr(request, "param") else [] - if MOCK: - unpatch = mock_nomic(monkeypatch, methods=methods) - - yield - - if MOCK: - unpatch() diff --git a/api/tests/integration_tests/model_runtime/__mock/openai.py b/api/tests/integration_tests/model_runtime/__mock/openai.py index 6637f4f212a50e..22d099739adc31 100644 --- a/api/tests/integration_tests/model_runtime/__mock/openai.py +++ b/api/tests/integration_tests/model_runtime/__mock/openai.py @@ -6,19 +6,9 @@ # import monkeypatch from _pytest.monkeypatch import MonkeyPatch -from openai.resources.audio.transcriptions import Transcriptions -from openai.resources.chat import Completions as ChatCompletions -from openai.resources.completions import Completions -from openai.resources.embeddings import Embeddings -from openai.resources.models import Models from openai.resources.moderations import Moderations -from tests.integration_tests.model_runtime.__mock.openai_chat import MockChatClass -from tests.integration_tests.model_runtime.__mock.openai_completion import MockCompletionsClass -from tests.integration_tests.model_runtime.__mock.openai_embeddings import MockEmbeddingsClass from tests.integration_tests.model_runtime.__mock.openai_moderation import MockModerationClass -from tests.integration_tests.model_runtime.__mock.openai_remote import MockModelClass -from tests.integration_tests.model_runtime.__mock.openai_speech2text import MockSpeech2TextClass def mock_openai( @@ -35,24 +25,9 @@ def mock_openai( def unpatch() -> None: monkeypatch.undo() - if "completion" in methods: - monkeypatch.setattr(Completions, "create", MockCompletionsClass.completion_create) - - if "chat" in methods: - monkeypatch.setattr(ChatCompletions, "create", MockChatClass.chat_create) - - if "remote" in methods: - monkeypatch.setattr(Models, "list", MockModelClass.list) - if "moderation" in methods: monkeypatch.setattr(Moderations, "create", MockModerationClass.moderation_create) - if "speech2text" in methods: - monkeypatch.setattr(Transcriptions, "create", MockSpeech2TextClass.speech2text_create) - - if "text_embedding" in methods: - monkeypatch.setattr(Embeddings, "create", MockEmbeddingsClass.create_embeddings) - return unpatch diff --git a/api/tests/integration_tests/model_runtime/__mock/openai_chat.py b/api/tests/integration_tests/model_runtime/__mock/openai_chat.py deleted file mode 100644 index 439f7d56e9b5d5..00000000000000 --- a/api/tests/integration_tests/model_runtime/__mock/openai_chat.py +++ /dev/null @@ -1,269 +0,0 @@ -import re -from collections.abc import Generator -from json import dumps, loads -from time import time - -# import monkeypatch -from typing import Any, Literal, Optional, Union - -from openai import AzureOpenAI, OpenAI -from openai._types import NOT_GIVEN, NotGiven -from openai.resources.chat.completions import Completions -from openai.types import Completion as CompletionMessage -from openai.types.chat import ( - ChatCompletion, - ChatCompletionChunk, - ChatCompletionMessageParam, - ChatCompletionMessageToolCall, - ChatCompletionToolChoiceOptionParam, - ChatCompletionToolParam, - completion_create_params, -) -from openai.types.chat.chat_completion import ChatCompletion as _ChatCompletion -from openai.types.chat.chat_completion import Choice as _ChatCompletionChoice -from openai.types.chat.chat_completion_chunk import ( - Choice, - ChoiceDelta, - ChoiceDeltaFunctionCall, - ChoiceDeltaToolCall, - ChoiceDeltaToolCallFunction, -) -from openai.types.chat.chat_completion_message import ChatCompletionMessage, FunctionCall -from openai.types.chat.chat_completion_message_tool_call import Function -from openai.types.completion_usage import CompletionUsage - -from core.model_runtime.errors.invoke import InvokeAuthorizationError - - -class MockChatClass: - @staticmethod - def generate_function_call( - functions: list[completion_create_params.Function] | NotGiven = NOT_GIVEN, - ) -> Optional[FunctionCall]: - if not functions or len(functions) == 0: - return None - function: completion_create_params.Function = functions[0] - function_name = function["name"] - function_description = function["description"] - function_parameters = function["parameters"] - function_parameters_type = function_parameters["type"] - if function_parameters_type != "object": - return None - function_parameters_properties = function_parameters["properties"] - function_parameters_required = function_parameters["required"] - parameters = {} - for parameter_name, parameter in function_parameters_properties.items(): - if parameter_name not in function_parameters_required: - continue - parameter_type = parameter["type"] - if parameter_type == "string": - if "enum" in parameter: - if len(parameter["enum"]) == 0: - continue - parameters[parameter_name] = parameter["enum"][0] - else: - parameters[parameter_name] = "kawaii" - elif parameter_type == "integer": - parameters[parameter_name] = 114514 - elif parameter_type == "number": - parameters[parameter_name] = 1919810.0 - elif parameter_type == "boolean": - parameters[parameter_name] = True - - return FunctionCall(name=function_name, arguments=dumps(parameters)) - - @staticmethod - def generate_tool_calls(tools=NOT_GIVEN) -> Optional[list[ChatCompletionMessageToolCall]]: - list_tool_calls = [] - if not tools or len(tools) == 0: - return None - tool = tools[0] - - if "type" in tools and tools["type"] != "function": - return None - - function = tool["function"] - - function_call = MockChatClass.generate_function_call(functions=[function]) - if function_call is None: - return None - - list_tool_calls.append( - ChatCompletionMessageToolCall( - id="sakurajima-mai", - function=Function( - name=function_call.name, - arguments=function_call.arguments, - ), - type="function", - ) - ) - - return list_tool_calls - - @staticmethod - def mocked_openai_chat_create_sync( - model: str, - functions: list[completion_create_params.Function] | NotGiven = NOT_GIVEN, - tools: list[ChatCompletionToolParam] | NotGiven = NOT_GIVEN, - ) -> CompletionMessage: - tool_calls = [] - function_call = MockChatClass.generate_function_call(functions=functions) - if not function_call: - tool_calls = MockChatClass.generate_tool_calls(tools=tools) - - return _ChatCompletion( - id="cmpl-3QJQa5jXJ5Z5X", - choices=[ - _ChatCompletionChoice( - finish_reason="content_filter", - index=0, - message=ChatCompletionMessage( - content="elaina", role="assistant", function_call=function_call, tool_calls=tool_calls - ), - ) - ], - created=int(time()), - model=model, - object="chat.completion", - system_fingerprint="", - usage=CompletionUsage( - prompt_tokens=2, - completion_tokens=1, - total_tokens=3, - ), - ) - - @staticmethod - def mocked_openai_chat_create_stream( - model: str, - functions: list[completion_create_params.Function] | NotGiven = NOT_GIVEN, - tools: list[ChatCompletionToolParam] | NotGiven = NOT_GIVEN, - ) -> Generator[ChatCompletionChunk, None, None]: - tool_calls = [] - function_call = MockChatClass.generate_function_call(functions=functions) - if not function_call: - tool_calls = MockChatClass.generate_tool_calls(tools=tools) - - full_text = "Hello, world!\n\n```python\nprint('Hello, world!')\n```" - for i in range(0, len(full_text) + 1): - if i == len(full_text): - yield ChatCompletionChunk( - id="cmpl-3QJQa5jXJ5Z5X", - choices=[ - Choice( - delta=ChoiceDelta( - content="", - function_call=ChoiceDeltaFunctionCall( - name=function_call.name, - arguments=function_call.arguments, - ) - if function_call - else None, - role="assistant", - tool_calls=[ - ChoiceDeltaToolCall( - index=0, - id="misaka-mikoto", - function=ChoiceDeltaToolCallFunction( - name=tool_calls[0].function.name, - arguments=tool_calls[0].function.arguments, - ), - type="function", - ) - ] - if tool_calls and len(tool_calls) > 0 - else None, - ), - finish_reason="function_call", - index=0, - ) - ], - created=int(time()), - model=model, - object="chat.completion.chunk", - system_fingerprint="", - usage=CompletionUsage( - prompt_tokens=2, - completion_tokens=17, - total_tokens=19, - ), - ) - else: - yield ChatCompletionChunk( - id="cmpl-3QJQa5jXJ5Z5X", - choices=[ - Choice( - delta=ChoiceDelta( - content=full_text[i], - role="assistant", - ), - finish_reason="content_filter", - index=0, - ) - ], - created=int(time()), - model=model, - object="chat.completion.chunk", - system_fingerprint="", - ) - - def chat_create( - self: Completions, - *, - messages: list[ChatCompletionMessageParam], - model: Union[ - str, - Literal[ - "gpt-4-1106-preview", - "gpt-4-vision-preview", - "gpt-4", - "gpt-4-0314", - "gpt-4-0613", - "gpt-4-32k", - "gpt-4-32k-0314", - "gpt-4-32k-0613", - "gpt-3.5-turbo-1106", - "gpt-3.5-turbo", - "gpt-3.5-turbo-16k", - "gpt-3.5-turbo-0301", - "gpt-3.5-turbo-0613", - "gpt-3.5-turbo-16k-0613", - ], - ], - functions: list[completion_create_params.Function] | NotGiven = NOT_GIVEN, - response_format: completion_create_params.ResponseFormat | NotGiven = NOT_GIVEN, - stream: Optional[Literal[False]] | NotGiven = NOT_GIVEN, - tools: list[ChatCompletionToolParam] | NotGiven = NOT_GIVEN, - **kwargs: Any, - ): - openai_models = [ - "gpt-4-1106-preview", - "gpt-4-vision-preview", - "gpt-4", - "gpt-4-0314", - "gpt-4-0613", - "gpt-4-32k", - "gpt-4-32k-0314", - "gpt-4-32k-0613", - "gpt-3.5-turbo-1106", - "gpt-3.5-turbo", - "gpt-3.5-turbo-16k", - "gpt-3.5-turbo-0301", - "gpt-3.5-turbo-0613", - "gpt-3.5-turbo-16k-0613", - ] - azure_openai_models = ["gpt35", "gpt-4v", "gpt-35-turbo"] - if not re.match(r"^(https?):\/\/[^\s\/$.?#].[^\s]*$", str(self._client.base_url)): - raise InvokeAuthorizationError("Invalid base url") - if model in openai_models + azure_openai_models: - if not re.match(r"sk-[a-zA-Z0-9]{24,}$", self._client.api_key) and type(self._client) == OpenAI: - # sometime, provider use OpenAI compatible API will not have api key or have different api key format - # so we only check if model is in openai_models - raise InvokeAuthorizationError("Invalid api key") - if len(self._client.api_key) < 18 and type(self._client) == AzureOpenAI: - raise InvokeAuthorizationError("Invalid api key") - if stream: - return MockChatClass.mocked_openai_chat_create_stream(model=model, functions=functions, tools=tools) - - return MockChatClass.mocked_openai_chat_create_sync(model=model, functions=functions, tools=tools) diff --git a/api/tests/integration_tests/model_runtime/__mock/openai_completion.py b/api/tests/integration_tests/model_runtime/__mock/openai_completion.py deleted file mode 100644 index 14223668e036d9..00000000000000 --- a/api/tests/integration_tests/model_runtime/__mock/openai_completion.py +++ /dev/null @@ -1,130 +0,0 @@ -import re -from collections.abc import Generator -from time import time - -# import monkeypatch -from typing import Any, Literal, Optional, Union - -from openai import AzureOpenAI, BadRequestError, OpenAI -from openai._types import NOT_GIVEN, NotGiven -from openai.resources.completions import Completions -from openai.types import Completion as CompletionMessage -from openai.types.completion import CompletionChoice -from openai.types.completion_usage import CompletionUsage - -from core.model_runtime.errors.invoke import InvokeAuthorizationError - - -class MockCompletionsClass: - @staticmethod - def mocked_openai_completion_create_sync(model: str) -> CompletionMessage: - return CompletionMessage( - id="cmpl-3QJQa5jXJ5Z5X", - object="text_completion", - created=int(time()), - model=model, - system_fingerprint="", - choices=[ - CompletionChoice( - text="mock", - index=0, - logprobs=None, - finish_reason="stop", - ) - ], - usage=CompletionUsage( - prompt_tokens=2, - completion_tokens=1, - total_tokens=3, - ), - ) - - @staticmethod - def mocked_openai_completion_create_stream(model: str) -> Generator[CompletionMessage, None, None]: - full_text = "Hello, world!\n\n```python\nprint('Hello, world!')\n```" - for i in range(0, len(full_text) + 1): - if i == len(full_text): - yield CompletionMessage( - id="cmpl-3QJQa5jXJ5Z5X", - object="text_completion", - created=int(time()), - model=model, - system_fingerprint="", - choices=[ - CompletionChoice( - text="", - index=0, - logprobs=None, - finish_reason="stop", - ) - ], - usage=CompletionUsage( - prompt_tokens=2, - completion_tokens=17, - total_tokens=19, - ), - ) - else: - yield CompletionMessage( - id="cmpl-3QJQa5jXJ5Z5X", - object="text_completion", - created=int(time()), - model=model, - system_fingerprint="", - choices=[ - CompletionChoice(text=full_text[i], index=0, logprobs=None, finish_reason="content_filter") - ], - ) - - def completion_create( - self: Completions, - *, - model: Union[ - str, - Literal[ - "babbage-002", - "davinci-002", - "gpt-3.5-turbo-instruct", - "text-davinci-003", - "text-davinci-002", - "text-davinci-001", - "code-davinci-002", - "text-curie-001", - "text-babbage-001", - "text-ada-001", - ], - ], - prompt: Union[str, list[str], list[int], list[list[int]], None], - stream: Optional[Literal[False]] | NotGiven = NOT_GIVEN, - **kwargs: Any, - ): - openai_models = [ - "babbage-002", - "davinci-002", - "gpt-3.5-turbo-instruct", - "text-davinci-003", - "text-davinci-002", - "text-davinci-001", - "code-davinci-002", - "text-curie-001", - "text-babbage-001", - "text-ada-001", - ] - azure_openai_models = ["gpt-35-turbo-instruct"] - - if not re.match(r"^(https?):\/\/[^\s\/$.?#].[^\s]*$", str(self._client.base_url)): - raise InvokeAuthorizationError("Invalid base url") - if model in openai_models + azure_openai_models: - if not re.match(r"sk-[a-zA-Z0-9]{24,}$", self._client.api_key) and type(self._client) == OpenAI: - # sometime, provider use OpenAI compatible API will not have api key or have different api key format - # so we only check if model is in openai_models - raise InvokeAuthorizationError("Invalid api key") - if len(self._client.api_key) < 18 and type(self._client) == AzureOpenAI: - raise InvokeAuthorizationError("Invalid api key") - - if not prompt: - raise BadRequestError("Invalid prompt") - if stream: - return MockCompletionsClass.mocked_openai_completion_create_stream(model=model) - - return MockCompletionsClass.mocked_openai_completion_create_sync(model=model) diff --git a/api/tests/integration_tests/model_runtime/__mock/openai_embeddings.py b/api/tests/integration_tests/model_runtime/__mock/openai_embeddings.py deleted file mode 100644 index e27b9891f5c8a8..00000000000000 --- a/api/tests/integration_tests/model_runtime/__mock/openai_embeddings.py +++ /dev/null @@ -1,58 +0,0 @@ -import re -from typing import Any, Literal, Union - -from openai import OpenAI -from openai._types import NOT_GIVEN, NotGiven -from openai.resources.embeddings import Embeddings -from openai.types.create_embedding_response import CreateEmbeddingResponse, Usage -from openai.types.embedding import Embedding - -from core.model_runtime.errors.invoke import InvokeAuthorizationError - - -class MockEmbeddingsClass: - def create_embeddings( - self: Embeddings, - *, - input: Union[str, list[str], list[int], list[list[int]]], - model: Union[str, Literal["text-embedding-ada-002"]], - encoding_format: Literal["float", "base64"] | NotGiven = NOT_GIVEN, - **kwargs: Any, - ) -> CreateEmbeddingResponse: - if isinstance(input, str): - input = [input] - - if not re.match(r"^(https?):\/\/[^\s\/$.?#].[^\s]*$", str(self._client.base_url)): - raise InvokeAuthorizationError("Invalid base url") - - if len(self._client.api_key) < 18: - raise InvokeAuthorizationError("Invalid API key") - - if encoding_format == "float": - return CreateEmbeddingResponse( - data=[ - Embedding(embedding=[0.23333 for _ in range(233)], index=i, object="embedding") - for i in range(len(input)) - ], - model=model, - object="list", - # marked: usage of embeddings should equal the number of testcase - usage=Usage(prompt_tokens=2, total_tokens=2), - ) - - embeddings = "VEfNvMLUnrwFleO8hcj9vEE/yrzyjOA84E1MvNfoCrxjrI+8sZUKvNgrBT17uY07gJ/IvNvhHLrUemc8KXXGumalIT3YKwU7ZsnbPMhATrwTt6u8JEwRPNMmCjxGREW7TRKvu6/MG7zAyDU8wXLkuuMDZDsXsL28zHzaOw0IArzOiMO8LtASvPKM4Dul5l+80V0bPGVDZ7wYNrI89ucsvJZdYztzRm+8P8ysOyGbc7zrdgK9sdiEPKQ8sbulKdq7KIgdvKIMDj25dNc8k0AXPBn/oLzrdgK8IXe5uz0Dvrt50V68tTjLO4ZOcjoG9x29oGfZufiwmzwMDXy8EL6ZPHvdx7nKjzE8+LCbPG22hTs3EZq7TM+0POrRzTxVZo084wPkO8Nak7z8cpw8pDwxvA2T8LvBC7C72fltvC8Atjp3fYE8JHDLvEYgC7xAdls8YiabPPkEeTzPUbK8gOLCPEBSIbyt5Oy8CpreusNakzywUhA824vLPHRlr7zAhTs7IZtzvHd9AT2xY/O6ok8IvOihqrql5l88K4EvuknWorvYKwW9iXkbvGMTRLw5qPG7onPCPLgNIzwAbK67ftbZPMxYILvAyDW9TLB0vIid1buzCKi7u+d0u8iDSLxNVam8PZyJPNxnETvVANw8Oi5mu9nVszzl65I7DIKNvLGVirxsMJE7tPXQu2PvCT1zRm87p1l9uyRMkbsdfqe8U52ePHRlr7wt9Mw8/C8ivTu02rwJFGq8tpoFPWnC7blWumq7sfy+vG1zCzy9Nlg8iv+PuvxT3DuLU228kVhoOkmTqDrv1kg8ocmTu1WpBzsKml48DzglvI8ECzxwTd27I+pWvIWkQ7xUR007GqlPPBFEDrzGECu865q8PI7BkDwNxYc8tgG6ullMSLsIajs84lk1PNLjD70mv648ZmInO2tnIjzvb5Q8o5KCPLo9xrwKMyq9QqGEvI8ECzxO2508ATUdPRAlTry5kxc8KVGMPJyBHjxIUC476KGqvIU9DzwX87c88PUIParrWrzdlzS/G3K+uzEw2TxB2BU86AhfPAMiRj2dK808a85WPPCft7xU4Bg95Q9NPDxZjzwrpek7yNkZvHa0EjyQ0nM6Nq9fuyjvUbsRq8I7CAMHO3VSWLyuauE7U1qkvPkEeTxs7ZY7B6FMO48Eizy75/S7ieBPvB07rTxmyVu8onPCO5rc6Tu7XIa7oEMfPYngT7u24vk7/+W5PE8eGDxJ1iI9t4cuvBGHiLyH1GY7jfghu+oUSDwa7Mk7iXmbuut2grrq8I2563v8uyofdTxRTrs44lm1vMeWnzukf6s7r4khvEKhhDyhyZO8G5Z4Oy56wTz4sBs81Zknuz3fg7wnJuO74n1vvASEADu98128gUl3vBtyvrtZCU47yep8u5FYaDx2G0e8a85WO5cmUjz3kds8qgqbPCUaerx50d67WKIZPI7BkDua3Om74vKAvL3zXbzXpRA9CI51vLo9xryKzXg7tXtFO9RWLTwnJuM854LqPEIs8zuO5cq8d8V1u9P0cjrQ++C8cGwdPDdUlLoOGeW8auEtu8Z337nlzFK8aRg/vFCkDD0nRSM879bIvKUFID1iStU8EL6ZvLufgLtKgNE7KVEMvJOnSzwahRU895HbvJiIjLvc8n88bmC0PPLP2rywM9C7jTscOoS3mjy/Znu7dhvHuu5Q1Dyq61o6CI71u09hkry0jhw8gb6IPI8EC7uoVAM8gs9rvGM3fjx2G8e81FYtu/ojubyYRRK72Riuu83elDtNNmk70/TyuzUFsbvgKZI7onNCvAehzLumr8679R6+urr6SztX2So8Bl5SOwSEgLv5NpA8LwC2PGPvibzJ6vw7H2tQvOtXwrzXpRC8j0z/uxwcbTy2vr+8VWYNu+t2ArwKmt68NKN2O3XrIzw9A747UU47vaavzjwU+qW8YBqyvE02aTyEt5o8cCmjOxtyPrxs7ZY775NOu+SJWLxMJQY8/bWWu6IMDrzSSsQ7GSPbPLlQnbpVzcE7Pka4PJ96sLycxJg8v/9GPO2HZTyeW3C8Vpawtx2iYTwWBg87/qI/OviwGzxyWcY7M9WNPIA4FD32C2e8tNGWPJ43trxCoYS8FGHavItTbbu7n4C80NemPLm30Ty1OMu7vG1pvG3aPztBP0o75Q/NPJhFEj2V9i683PL/O97+aLz6iu27cdPRum/mKLwvVgc89fqDu3LA+jvm2Ls8mVZ1PIuFBD3ZGK47Cpreut7+aLziWTU8XSEgPMvSKzzO73e5040+vBlmVTxS1K+8mQ4BPZZ8o7w8FpW6OR0DPSSPCz21Vwu99fqDOjMYiDy7XAY8oYaZO+aVwTyX49c84OaXOqdZfTunEQk7B8AMvMDs7zo/D6e8OP5CvN9gIzwNCII8FefOPE026TpzIjU8XsvOO+J9b7rkIiQ8is34O+e0AbxBpv67hcj9uiPq1jtCoQQ8JfY/u86nAz0Wkf28LnrBPJlW9Tt8P4K7BbSjO9grhbyAOJS8G3K+vJLe3LzXpZA7NQUxPJs+JDz6vAS8QHZbvYNVYDrj3yk88PWIPOJ97zuSIVc8ZUPnPMqPsbx2cZi7QfzPOxYGDz2hqtO6H2tQO543NjyFPY+7JRUAOt0wgDyJeZu8MpKTu6AApTtg1ze82JI5vKllZjvrV0I7HX6nu7vndDxg1ze8jwQLu1ZTNjuJvBU7BXGpvAP+C7xJk6g8j2u/vBABlLzlqBi8M9WNutRWLTx0zGM9sHbKPLoZDDtmyVu8tpqFOvPumjyuRqe87lBUvFU0drxs7Za8ejMZOzJPGbyC7qu863v8PDPVjTxJ1iI7Ca01PLuAQLuNHFy7At9LOwP+i7tYxlO80NemO9elkDx45LU8h9TmuzxZjzz/5bk8p84OurvndLwAkGi7XL9luCSzRTwMgg08vrxMPKIwyDwdomG8K6VpPGPvCTxkmTi7M/lHPGxUSzxwKSM8wQuwvOqtkzrLFSa8SbdivAMixjw2r9+7xWt2vAyCDT1NEi87B8CMvG1zi7xpwm27MrbNO9R6Z7xJt+K7jNnhu9ZiFrve/ug55CKkvCwHJLqsOr47+ortvPwvIr2v8NW8YmmVOE+FTLywUhA8MTBZvMiDyLtx8hG8OEE9vMDsbzroCF88DelBOobnPbx+b6U8sbnEOywr3ro93wO9dMzjup2xwbwnRaO7cRZMu8Z337vS44+7VpYwvFWphzxKgNE8L1aHPLPFLbunzo66zFggPN+jHbs7tFo8nW7HO9JKRLyoeD28Fm1DPGZip7u5dNe7KMsXvFnlkzxQpAw7MrZNPHpX0zwSyoK7ayQovPR0Dz3gClK8/juLPDjaCLvqrZO7a4vcO9HEzzvife88KKzXvDmocbwpMkw7t2huvaIMjjznguo7Gy/EOzxZjzoLuZ48qi5VvCjLFzuDmNo654LquyrXgDy7XAa8e7mNvJ7QAb0Rq8K7ojBIvBN0MTuOfha8GoUVveb89bxMsHS8jV9WPPKM4LyAOJS8me9AvZv7qbsbcr47tuL5uaXmXzweKNa7rkYnPINV4Lxcv+W8tVcLvI8oxbzvbxS7oYaZu9+jHT0cHO08c7uAPCSzRTywUhA85xu2u+wBcTuJvJU8PBYVusTghzsnAim8acJtPFQE0zzFIwI9C7meO1DIRry7XAY8MKpkPJZd47suN0e5JTm6u6BDn7zfx1e8AJDoOr9CQbwaQps7x/1TPLTRFryqLtU8JybjPIXI/Tz6I7k6mVb1PMWKNryd1fs8Ok0mPHt2kzy9Ep48TTZpvPS3ibwGOpi8Ns4fPBqFlbr3Kqc8+QR5vHLA+rt7uY289YXyPI6iULxL4gu8Tv/XuycCKbwCnFG8C7kevVG1b7zIXw68GoWVO4rNeDnrM4i8MxgIPUNLs7zSoJW86ScfO+rRzbs6Cqw8NxGautP0cjw0wjY8CGq7vAkU6rxKgNG5+uA+vJXXbrwKM6o86vCNOu+yjjoQAZS8xATCOQVxKbynzo68wxcZvMhATjzS4488ArsRvNEaobwRh4i7t4euvAvd2DwnAik8UtQvvBFEDrz4sJs79gtnvOknnzy+vEy8D3sfPLH8vjzmLo28KVGMvOtXwjvpapm8HBxtPH3K8Lu753Q8/l9FvLvn9DomoG48fET8u9zy/7wMpke8zmQJu3oU2TzlD828KteAPAwNfLu+mBI5ldduPNZDVjq+vEy8eEvqvDHJpLwUPaC6qi7VPABsLjwFcSm72sJcu+bYO7v41NW8RiALvYB7DjzL0is7qLs3us1FSbzaf2K8MnNTuxABFDzF8Wo838fXvOBNzDzre3w8afQEvQE1nbulBaC78zEVvG5B9LzH/VM82Riuuwu5nrwsByQ8Y6yPvHXro7yQ0nM8nStNPJkyOzwnJmM80m7+O1VmjTzqrZM8dhvHOyAQBbz3baG8KTJMPOlqmbxsVEs8Pq3suy56QbzUVq08X3CDvAE1nTwUHuA7hue9vF8tCbvwOAO6F7A9ugd9kryqLtW7auEtu9ONPryPa7+8o9r2O570OzyFpEO8ntCBPOqtk7sykhO7lC1AOw2TcLswhiq6vx4HvP5fRbwuesG7Mk8ZvA4Z5TlfcAM9DrIwPL//xrzMm5q8JEwRPHBsnbxL4gu8jyjFu99gozrkZZ483GeRPLuAwDuYiIw8iv8PvK5Gpzx+b6W87Yflu3NGbzyE+hQ8a4tcPItT7bsoy5e8L1YHvWQyBDwrga86kPEzvBQ9oDxtl0W8lwKYvGpIYrxQ5wY8AJDovOLyALyw3f489JjJvMdTpTkKMyo8V9mqvH3K8LpyNYy8JHDLOixu2LpQ54Y8Q0uzu8LUnrs0wrY84vIAveihqjwfihA8DIKNvLDd/jywM1C7FB7gOxsLirxAUqE7sulnvH3K8DkAkGg8jsGQvO+TzrynWf287CCxvK4Drbwg8UQ8JRr6vFEqAbskjwu76q2TPNP0cjopDhK8dVJYvFIXKrxLn5G8AK8oPAb3HbxbOXE8Bvedun5Q5ThHyjk8QdiVvBXDlLw0o/Y7aLGKupkOgTxKPdc81kNWPtUAXLxUR827X1FDPf47izxsEVE8akhiPIhaWzxYX5+7hT0PPSrXgLxQC0E8i4WEvKUp2jtCLHM8DcWHO768zLxnK5a89R6+vH9czrorpem73h0pvAnwr7yKzXi8gDgUPf47Czq9zyO8728UOf34EDy6PUY76OSkvKZIGr2ZDgE8gzEmPG3av7v77Ce7/oP/O3MiNTtas/w8x1OlO/D1CDvDfs27ll1jO2Ufrbv1hXK8WINZuxN0sbuxlYq8OYS3uia/rjyiTwi9O7TaO+/WyDyiDA49E7erO3fF9bj6I7k7qHi9O3SoKbyBSfc7drSSvGPvCT2pQay7t2huPGnC7byUCQY8CEaBu6rHoDhx8hE8/fgQvCjLl7zdeHS8x/3TO0Isc7tas3y8jwQLvUKhhDz+foU8fCDCPC+ZgTywD5Y7ZR8tOla66rtCCLm8gWg3vDoKrLxbWDE76SefPBkj2zrlqJi7pebfuv6Df7zWQ9a7lHA6PGDXtzzMv1Q8mtxpOwJ4lzxKGZ28mGnMPDw6z7yxY/O7m2Leu7juYjwvVge8zFigPGpIYjtWumo5xs2wOgyCjbxrZ6K8bbaFvKzTCbsks8W7C7mePIU9DzxQyEY8posUvAW0ozrHlh88CyBTPJRwursxySQ757SBuqcRCbwNCIK8EL6ZvIG+iLsIRgE8rF74vOJZtbuUcDq8r/DVPMpMt7sL3Vi8eWqquww/kzqj2vY5auGtu85kiTwMPxM66KGqvBIxNzuwUpA8v2b7u09C0rx7ms08NUirvFYQPLxKPdc68mimvP5fRTtoPPm7XuqOOgOJ+jxfLYm7u58AvXz8B72PR4W6ldfuuys+tbvYKwW7pkiaPLB2SjvKj7G875POvA6yML7qFEg9Eu68O6Up2rz77Kc84CmSPP6ivzz4sJu6/C+iOaUpWjwq14A84E3MOYB7Dr2d1Xu775NOvC6e+7spUYw8PzPhO5TGizt29ww9yNkZPY7lyrz020M7QRsQu3z8BzwkCZe79YXyO8jZmTzvGUM8HgQcO9kYrrzxBmy8hLeaPLYBOjz+oj88flBlO6GqUzuiMMi8fxlUvCr7ujz41NU8DA38PBeMAzx7uY28TTZpvFG1bzxtc4s89ucsPEereTwfipC82p4iPKtNFbzo5KQ7pcKlOW5gtDzO73c7B6FMOzRbgjxCXoo8v0JBOSl1RrwxDJ+7XWSaPD3Aw7sOsjA8tuJ5vKw6Pry5k5c8ZUNnvG/H6DyVTAA8Shkdvd7+aDvtpiW9qUGsPFTgmDwbcr68TTbpO1DnhryNX9a7mrivvIqpPjxsqhy81HrnOzv31Dvth+U6UtQvPBz4MrvtpqW84OYXvRz4sjxwkFe8zSGPuycCqbyFPY8818nKOw84JTy8bWk8USqBvBGHiLtosQo8BOs0u9skl7xQ54Y8uvrLPOknn7w705o8Jny0PAd9EjxhoKa8Iv2tu2M3/jtsVEs8DcUHPQSEADs3eE48GkKbupRR+rvdeHQ7Xy2JvO1jKz0xMFm8sWPzux07LbyrTZW7bdq/O6Pa9r0ahRW9CyDTOjSjdjyQ8bO8yaIIPfupLTz/CfQ7xndfvJs+JD0zPEK8KO/RvMpw8bwObzY7fm+lPJtiXrz5BHm8WmsIvKlBrLuDdKA7hWHJOgd9Ers0o/Y7nlvwu5NAl7u8BrW6utYRO2SZuDxyNYw8CppevAY6GDxVqQe9oGdZPFa6ary3RLS70NcmO2PQSb36ZrM86q2TPML42LwewaE8k2RRPDmocTsi/S29o/k2PHRlr7zjnC+8gHsOPUpcFzxtl8W6tuL5vHw/gry/2wy9yaIIvINV4Dx3fQG7ISFoPO7pnzwGXlK8HPiyPGAaMjzBC7A7MQyfu+eC6jyV1+67pDyxvBWkVLxrJKg754LqOScCKbwpUQy8KIgdOJDSc7zDfk08tLLWvNZDVjyh7c28ShmdvMnlgjs2NdS8ISHovP5+hbxGIIs8ayQouyKnXDzBcmS6zw44u86IQ7yl5l+7cngGvWvOVrsEhIC7yNkZPJODkbuAn0g8XN6lPOaVwbuTgxG8OR2DPAb3HTzlqJi8nUoNvCAVf73Mmxo9afSEu4FotzveHSk8c0ZvOMFOqjwP9Sq87iwavIEBg7xIUK68IbozuozZ4btg17c7vx4Hvarr2rtp9IQ8Rt0QO+1jqzyeNzY8kNLzO8sVpry98108OCL9uyisV7vhr4Y8FgaPvLFjczw42og8gWg3vPX6gzsNk/C83GeRPCUVgDy0jpw7yNkZu2VD5zvh93o81h+cuw3Fhzyl5t+86Y7TvHa0EjyzCCi7WmsIPIy1Jzy00Ra6NUiru50rTTx50d47/HKcO2wwETw0f7y8sFIQvNxnkbzS4w855pVBu9FdGzx9yvC6TM80vFQjkzy/Zvs7BhtYPLjKKLqPa787A/6LOyiInbzooSq8728UPIFJ97wq+7q8R6v5u1tYMbwdomG6iSPKPAb3HTx3oTu7fGO8POqtk7ze/ug84wNkPMnq/DsB8iK9ogwOu6lBrDznguo8NQUxvHKcwDo28tm7yNmZPN1UurxCoYS80m7+Oy+9OzzGzTC836MdvCDNCrtaawi7dVLYPEfKuTxzRm88cCmjOyXSBbwGOpi879ZIO8dTJbtqnrO8NMI2vR1+J7xwTV087umfPFG17zsC30s8oYaZPKllZrzZGK47zss9vP21FryZywa9bbYFPVNapDt2G0e7E3SxPMUjgry5dNc895Hbu0H8z7ueN7a7OccxPFhfH7vC1B48n3owvEhQLrzu6Z+8HTutvEBSITw6Taa5g1XgPCzEqbxfLYk9OYQ3vBlm1bvPUTI8wIU7PIy1pzyFyP07gzGmO3NGb7yS3ty7O5CguyEhaLyWoF28pmxUOaZImrz+g/87mnU1vFbsgTxvo668PFmPO2KNTzy09VC8LG5YPHhL6rsvJPC7kTQuvEGCxDlhB9s6u58AvfCAd7z0t4k7kVjoOCkOkrxMjDq8iPOmPL0SnrxsMJG7OEG9vCUa+rvx4rE7cpxAPDCGqjukf6u8TEnAvNn57TweBBw7JdKFvIy1p7vIg8i7" # noqa: E501 - - data = [] - for i, text in enumerate(input): - obj = Embedding(embedding=[], index=i, object="embedding") - obj.embedding = embeddings - - data.append(obj) - - return CreateEmbeddingResponse( - data=data, - model=model, - object="list", - # marked: usage of embeddings should equal the number of testcase - usage=Usage(prompt_tokens=2, total_tokens=2), - ) diff --git a/api/tests/integration_tests/model_runtime/__mock/openai_remote.py b/api/tests/integration_tests/model_runtime/__mock/openai_remote.py deleted file mode 100644 index cb8f2495438783..00000000000000 --- a/api/tests/integration_tests/model_runtime/__mock/openai_remote.py +++ /dev/null @@ -1,23 +0,0 @@ -from time import time - -from openai.resources.models import Models -from openai.types.model import Model - - -class MockModelClass: - """ - mock class for openai.models.Models - """ - - def list( - self, - **kwargs, - ) -> list[Model]: - return [ - Model( - id="ft:gpt-3.5-turbo-0613:personal::8GYJLPDQ", - created=int(time()), - object="model", - owned_by="organization:org-123", - ) - ] diff --git a/api/tests/integration_tests/model_runtime/__mock/openai_speech2text.py b/api/tests/integration_tests/model_runtime/__mock/openai_speech2text.py deleted file mode 100644 index a51dcab4be7467..00000000000000 --- a/api/tests/integration_tests/model_runtime/__mock/openai_speech2text.py +++ /dev/null @@ -1,29 +0,0 @@ -import re -from typing import Any, Literal, Union - -from openai._types import NOT_GIVEN, FileTypes, NotGiven -from openai.resources.audio.transcriptions import Transcriptions -from openai.types.audio.transcription import Transcription - -from core.model_runtime.errors.invoke import InvokeAuthorizationError - - -class MockSpeech2TextClass: - def speech2text_create( - self: Transcriptions, - *, - file: FileTypes, - model: Union[str, Literal["whisper-1"]], - language: str | NotGiven = NOT_GIVEN, - prompt: str | NotGiven = NOT_GIVEN, - response_format: Literal["json", "text", "srt", "verbose_json", "vtt"] | NotGiven = NOT_GIVEN, - temperature: float | NotGiven = NOT_GIVEN, - **kwargs: Any, - ) -> Transcription: - if not re.match(r"^(https?):\/\/[^\s\/$.?#].[^\s]*$", str(self._client.base_url)): - raise InvokeAuthorizationError("Invalid base url") - - if len(self._client.api_key) < 18: - raise InvokeAuthorizationError("Invalid API key") - - return Transcription(text="1, 2, 3, 4, 5, 6, 7, 8, 9, 10") diff --git a/api/tests/integration_tests/model_runtime/__mock/xinference.py b/api/tests/integration_tests/model_runtime/__mock/xinference.py deleted file mode 100644 index 8deb50635f3d56..00000000000000 --- a/api/tests/integration_tests/model_runtime/__mock/xinference.py +++ /dev/null @@ -1,170 +0,0 @@ -import os -import re -from typing import Union - -import pytest -from _pytest.monkeypatch import MonkeyPatch -from requests import Response -from requests.exceptions import ConnectionError -from requests.sessions import Session -from xinference_client.client.restful.restful_client import ( - Client, - RESTfulChatModelHandle, - RESTfulEmbeddingModelHandle, - RESTfulGenerateModelHandle, - RESTfulRerankModelHandle, -) -from xinference_client.types import Embedding, EmbeddingData, EmbeddingUsage - - -class MockXinferenceClass: - def get_chat_model(self: Client, model_uid: str) -> Union[RESTfulGenerateModelHandle, RESTfulChatModelHandle]: - if not re.match(r"https?:\/\/[^\s\/$.?#].[^\s]*$", self.base_url): - raise RuntimeError("404 Not Found") - - if "generate" == model_uid: - return RESTfulGenerateModelHandle(model_uid, base_url=self.base_url, auth_headers={}) - if "chat" == model_uid: - return RESTfulChatModelHandle(model_uid, base_url=self.base_url, auth_headers={}) - if "embedding" == model_uid: - return RESTfulEmbeddingModelHandle(model_uid, base_url=self.base_url, auth_headers={}) - if "rerank" == model_uid: - return RESTfulRerankModelHandle(model_uid, base_url=self.base_url, auth_headers={}) - raise RuntimeError("404 Not Found") - - def get(self: Session, url: str, **kwargs): - response = Response() - if "v1/models/" in url: - # get model uid - model_uid = url.split("/")[-1] or "" - if not re.match( - r"[a-f0-9]{8}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{12}", model_uid - ) and model_uid not in {"generate", "chat", "embedding", "rerank"}: - response.status_code = 404 - response._content = b"{}" - return response - - # check if url is valid - if not re.match(r"^(https?):\/\/[^\s\/$.?#].[^\s]*$", url): - response.status_code = 404 - response._content = b"{}" - return response - - if model_uid in {"generate", "chat"}: - response.status_code = 200 - response._content = b"""{ - "model_type": "LLM", - "address": "127.0.0.1:43877", - "accelerators": [ - "0", - "1" - ], - "model_name": "chatglm3-6b", - "model_lang": [ - "en" - ], - "model_ability": [ - "generate", - "chat" - ], - "model_description": "latest chatglm3", - "model_format": "pytorch", - "model_size_in_billions": 7, - "quantization": "none", - "model_hub": "huggingface", - "revision": null, - "context_length": 2048, - "replica": 1 - }""" - return response - - elif model_uid == "embedding": - response.status_code = 200 - response._content = b"""{ - "model_type": "embedding", - "address": "127.0.0.1:43877", - "accelerators": [ - "0", - "1" - ], - "model_name": "bge", - "model_lang": [ - "en" - ], - "revision": null, - "max_tokens": 512 - }""" - return response - - elif "v1/cluster/auth" in url: - response.status_code = 200 - response._content = b"""{ - "auth": true - }""" - return response - - def _check_cluster_authenticated(self): - self._cluster_authed = True - - def rerank( - self: RESTfulRerankModelHandle, documents: list[str], query: str, top_n: int, return_documents: bool - ) -> dict: - # check if self._model_uid is a valid uuid - if ( - not re.match(r"[a-f0-9]{8}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{12}", self._model_uid) - and self._model_uid != "rerank" - ): - raise RuntimeError("404 Not Found") - - if not re.match(r"^(https?):\/\/[^\s\/$.?#].[^\s]*$", self._base_url): - raise RuntimeError("404 Not Found") - - if top_n is None: - top_n = 1 - - return { - "results": [ - {"index": i, "document": doc, "relevance_score": 0.9} for i, doc in enumerate(documents[:top_n]) - ] - } - - def create_embedding(self: RESTfulGenerateModelHandle, input: Union[str, list[str]], **kwargs) -> dict: - # check if self._model_uid is a valid uuid - if ( - not re.match(r"[a-f0-9]{8}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{12}", self._model_uid) - and self._model_uid != "embedding" - ): - raise RuntimeError("404 Not Found") - - if isinstance(input, str): - input = [input] - ipt_len = len(input) - - embedding = Embedding( - object="list", - model=self._model_uid, - data=[ - EmbeddingData(index=i, object="embedding", embedding=[1919.810 for _ in range(768)]) - for i in range(ipt_len) - ], - usage=EmbeddingUsage(prompt_tokens=ipt_len, total_tokens=ipt_len), - ) - - return embedding - - -MOCK = os.getenv("MOCK_SWITCH", "false").lower() == "true" - - -@pytest.fixture -def setup_xinference_mock(request, monkeypatch: MonkeyPatch): - if MOCK: - monkeypatch.setattr(Client, "get_model", MockXinferenceClass.get_chat_model) - monkeypatch.setattr(Client, "_check_cluster_authenticated", MockXinferenceClass._check_cluster_authenticated) - monkeypatch.setattr(Session, "get", MockXinferenceClass.get) - monkeypatch.setattr(RESTfulEmbeddingModelHandle, "create_embedding", MockXinferenceClass.create_embedding) - monkeypatch.setattr(RESTfulRerankModelHandle, "rerank", MockXinferenceClass.rerank) - yield - - if MOCK: - monkeypatch.undo() diff --git a/api/tests/integration_tests/model_runtime/anthropic/__init__.py b/api/tests/integration_tests/model_runtime/anthropic/__init__.py deleted file mode 100644 index e69de29bb2d1d6..00000000000000 diff --git a/api/tests/integration_tests/model_runtime/anthropic/test_llm.py b/api/tests/integration_tests/model_runtime/anthropic/test_llm.py deleted file mode 100644 index 8f7e9ec48743bf..00000000000000 --- a/api/tests/integration_tests/model_runtime/anthropic/test_llm.py +++ /dev/null @@ -1,92 +0,0 @@ -import os -from collections.abc import Generator - -import pytest - -from core.model_runtime.entities.llm_entities import LLMResult, LLMResultChunk, LLMResultChunkDelta -from core.model_runtime.entities.message_entities import AssistantPromptMessage, SystemPromptMessage, UserPromptMessage -from core.model_runtime.errors.validate import CredentialsValidateFailedError -from core.model_runtime.model_providers.anthropic.llm.llm import AnthropicLargeLanguageModel -from tests.integration_tests.model_runtime.__mock.anthropic import setup_anthropic_mock - - -@pytest.mark.parametrize("setup_anthropic_mock", [["none"]], indirect=True) -def test_validate_credentials(setup_anthropic_mock): - model = AnthropicLargeLanguageModel() - - with pytest.raises(CredentialsValidateFailedError): - model.validate_credentials(model="claude-instant-1.2", credentials={"anthropic_api_key": "invalid_key"}) - - model.validate_credentials( - model="claude-instant-1.2", credentials={"anthropic_api_key": os.environ.get("ANTHROPIC_API_KEY")} - ) - - -@pytest.mark.parametrize("setup_anthropic_mock", [["none"]], indirect=True) -def test_invoke_model(setup_anthropic_mock): - model = AnthropicLargeLanguageModel() - - response = model.invoke( - model="claude-instant-1.2", - credentials={ - "anthropic_api_key": os.environ.get("ANTHROPIC_API_KEY"), - "anthropic_api_url": os.environ.get("ANTHROPIC_API_URL"), - }, - prompt_messages=[ - SystemPromptMessage( - content="You are a helpful AI assistant.", - ), - UserPromptMessage(content="Hello World!"), - ], - model_parameters={"temperature": 0.0, "top_p": 1.0, "max_tokens": 10}, - stop=["How"], - stream=False, - user="abc-123", - ) - - assert isinstance(response, LLMResult) - assert len(response.message.content) > 0 - - -@pytest.mark.parametrize("setup_anthropic_mock", [["none"]], indirect=True) -def test_invoke_stream_model(setup_anthropic_mock): - model = AnthropicLargeLanguageModel() - - response = model.invoke( - model="claude-instant-1.2", - credentials={"anthropic_api_key": os.environ.get("ANTHROPIC_API_KEY")}, - prompt_messages=[ - SystemPromptMessage( - content="You are a helpful AI assistant.", - ), - UserPromptMessage(content="Hello World!"), - ], - model_parameters={"temperature": 0.0, "max_tokens": 100}, - stream=True, - user="abc-123", - ) - - assert isinstance(response, Generator) - - for chunk in response: - assert isinstance(chunk, LLMResultChunk) - assert isinstance(chunk.delta, LLMResultChunkDelta) - assert isinstance(chunk.delta.message, AssistantPromptMessage) - assert len(chunk.delta.message.content) > 0 if chunk.delta.finish_reason is None else True - - -def test_get_num_tokens(): - model = AnthropicLargeLanguageModel() - - num_tokens = model.get_num_tokens( - model="claude-instant-1.2", - credentials={"anthropic_api_key": os.environ.get("ANTHROPIC_API_KEY")}, - prompt_messages=[ - SystemPromptMessage( - content="You are a helpful AI assistant.", - ), - UserPromptMessage(content="Hello World!"), - ], - ) - - assert num_tokens == 18 diff --git a/api/tests/integration_tests/model_runtime/anthropic/test_provider.py b/api/tests/integration_tests/model_runtime/anthropic/test_provider.py deleted file mode 100644 index 6f1e50f431849f..00000000000000 --- a/api/tests/integration_tests/model_runtime/anthropic/test_provider.py +++ /dev/null @@ -1,17 +0,0 @@ -import os - -import pytest - -from core.model_runtime.errors.validate import CredentialsValidateFailedError -from core.model_runtime.model_providers.anthropic.anthropic import AnthropicProvider -from tests.integration_tests.model_runtime.__mock.anthropic import setup_anthropic_mock - - -@pytest.mark.parametrize("setup_anthropic_mock", [["none"]], indirect=True) -def test_validate_provider_credentials(setup_anthropic_mock): - provider = AnthropicProvider() - - with pytest.raises(CredentialsValidateFailedError): - provider.validate_provider_credentials(credentials={}) - - provider.validate_provider_credentials(credentials={"anthropic_api_key": os.environ.get("ANTHROPIC_API_KEY")}) diff --git a/api/tests/integration_tests/model_runtime/assets/audio.mp3 b/api/tests/integration_tests/model_runtime/assets/audio.mp3 deleted file mode 100644 index 7c86e02e160909223668c7b21a60b68afc74ef98..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 218880 zcmcG0cU%)$`|qR>0t85?2}MIdYQTghO-T?C6%_@#E?|uKe_w%`b+?x!iKJ%Q(e9v>vGv|B}LC1Iq0`6m9MKwl zQOLqq<%dJL>PQ|d8Lmeek1oD$l{)`(=EI!niFJ=puYcHLhj3{$8WrtKb_;(&JYUIZfDf`YPj9t!!tV+X^-In;Qw`dl@%@0fArnB(okKn+wjEjDj ztawKL&zVY^EbBl{V7?=pyFUHZPb})0FdW9ERrL}%K$<^&i;sgHl|tF^fBIMhiMsJ$ z1rhtp?N1GDq?AMx%npSD?`< z{Mgl;X8OEax*={%#2Zu63G*t(Kg(uK7$H`r!BDZ&UEb;TZ%=+OLcuNljqpey9HXwE z>(X5l8>D+uAGeke$2k~?HX`E^^aiK~raHFT)>Nq<-NmPt*=5NIH`USUO4nM!;+>;c zlTC6RS?DYsB@OS{W5heyQ#2cMrI>6|6rh2i5HTQxZ>PN~4h|eNSO@$MzO%Y`3Nepb zqpo%!W9VsUB61Vx>)W^&L2~*_iMSQvyT^_bK*%SFe3ewn-$$)5UWAYI&*#~ove-LU zPsQB~@SeQzTUF|1t;e7DTo2oGaL^{!=nC!YAMwZ0(z^RmQxRI-DB2H~Gk-<>8oA5+ zv%O4@pVc?|SK%v<-_<@m+iWYT|K;hSzt5N0mi%dOiH;|4Nz z%w4kZZr<%Ln1o$>?v=9=9TXU3H$vwQQt1SqshtTu5KH6)x3ACSo;se)lzDlt_fO+oR5COO zBjH!zo!%fc>>LoUX&2ozsW>YfWn{WRn3kg^)F+G)(`gcpKato^OB4E=Ssr5~dbIQX ztttmDDPcFH34|BI;HxwV>3TTsFDretjKB^V7ZZp&V=tm) z*yn2nqZ-gr!|Y8ZZVk-7Eed?!J6EeN1;n~M{jdG*e*3xh*YQ(tfilEY>F8j+Gt_pky9Q4p%#Ycl${$sz}8`F5@@tH-1 zE)O$%OT5x}Y3VOR0_v~qIcga^QDkEs5-?Fj)}52Cw+yJSf|Q%CjJ}VrDzuE)R0P#B zY$hZ8tGuLM()HHC;(*?+IqCMsQz4!ujIMy1qC)$$RLMJ<+c4rXtPpXTd92dd8dp!) z3jOUN>@f|MElBKdXcnX|snN2q20S>vp2|Actxa7xcz&pe!Ul?J*AfV-I<*&ovm%Eovod%yZd*iwRE=Z zUi$0i2XT}h=C!meGQN!AkoCZ!e{lI&PGmhe+1)}G$r))8!#2BxYEa~K8&Xg_ccZOOEq>SYkXwtp6!?B~ z*$#gO;hXXwe4&~oMYX~6TSeF8jNPr*mK%iHo@fhseWT^t;o0m~(^$l`---4wF}L`(BfhT$q2qdqK4rM5v945flZ-qY`GGWua)1(Hp3NCpV4{$r&q)u^O_I_6*)SLU13~E6p|8hC52Sek;lTu*aeSq{4ax@w z<$`2XS{4)pWCfvoQgE4q(4jC7(jd2KbivTI0ucqZHSKeLxs~_-oAH^4o;Mt9T6d=D z1KMe<^(4em5}NO?E^BD%`p)T^_)veC+8Rs!;}o^!hqKXl8$)OiYG@s-YUvtp z?fTTx^{Ew-=^AgTkF`%O>*>3=S39$(YwO-(mC3Y+%<_oQMx?9hjOw zd9VNZVX|TW%UFk`mVv2>HHMi3laUi^N`?5N?dz7rkX}e-V^dP;Kqo(5ab8kr%K$W1 zLMMY{!`^|e*fk}$zbWwj;5>((VkH#*7vHJFYl=#j#D{Ix5L#b`{2MXWES3(ujGT`T zZK+p8r2|tx*65Zl5$>W6b3Z=EL0{#&i4JiNaU1^qCa(DBxoL~$@!6c&wSP&z9Y)Ux z66mGUQgP&1 z|Kd9*)jQ|z4C@XVtoZz_>SCk2QM1nIjf^FOI_s*7cbO%6rnh&9w{hY=57ftGw?1y| zJyiVw5tr3_0D-A~67Wn~kCCfJ502NXJ{;+87OHlOyBmxLjJuobpJjH5>SOw2y2=LX zpY=jvZHy4HHS($B)GxkI5A6FTB5W(exoN`@Te9ixP`#o&7E|caEf%UFcQH1Ekscp+ zJidY5NVeHrmUc1#(=4%wYz#ZL9}M zrQj;O9A;fVgNpo;sVCoJIYJM%*r%cg7kpFs7PH|$6xtvye2XF1C2+wv1!{nA@e)@N zr>GiHNLKcI)=2e4ZId~?pVyu%by&2z(0trxL)eD!1cwEyChgbfprVV;RW;P=SkjHv zZb`8YvCi|<>g^M@S7r9kY#Up89lxJ*J6!5^pw#huspsobvc>%^HC+`JQ_vwl-<9B5 zBTyXBeA{{YbDuLbPXc|UerUGrv+q|V><4>?C$uKHx093& z{+Qu7*QEvMm&qJv!@uJyxPvv4ql>df-;*&FEW(n;Nl!u4@(n|6tL*uTQPeNbzpe*V zUqa*ap8t%`V?DF>$+(I!XzG21zFT?t+cDBdl)L1`gIQrKG{r(i_v5J06!`DlNaVbU zCT5~Zd<2~+{-0Qy4wa}Yo1!ac*%~N59f4&~mC%({Iy!jXazx^BD|6#AF(e)zn}VUU zbnpf$%6u%XLk~xn3W?Ax+{D*QR!qgS>_K#WWoIG22})=7&9b{mjpI6a9~6cT$w!-E zsYo1@O2+e-8^h7PEQxG1iH9Q2W~!kp-;I$6&;>s6lwbo|`mFWilM_+02O z^lR>J5O4J=1>V?-HN5@_c=*D2K#Y6-a1=7kbg|gyGCMiK*`hPTnVn+MW$`*W1PUh| z*(pJhP$ulV@sua`C1a&bk|0XZ96z0ypoeH_Z*>>KUdS05a}7&mbysFgR?MlgE>TSX zvs24P4u#KpJC-^Ys)b69466&$E%=JU(Ts%(J=N%rrERTHqdYP+)Ieq)s;6gW304_r zD+USSmYQe1&yM_2D;@@m4UXItU8`PurN;Su&3w$Wqp4d#$gTRA>H#@!;)F%W&kB9c zaL-;!^`BATD|h}KshEE|Rs`)?z@P_yRrq6}&qER_K5Z$gnX0In6fC`KUb^NvCp!rx zuX);R%Nz#D7EoGTkt&Fi^eo%tHnHmxWOLF2sBd8)la=(yF%`(D5$a1!g3NAu<)`PE z1bWUi^@Yq1a*XgF7kpkqz847cjivgjOUwO+^Wx*Q;UBN0Im?B|q3Q+d3pp-<_Ts?| z1^Tiv-sldtG_YsQx|qD;UB+! z{5Lo=O-Cl3xDg+@Ri>6EQ9{_)TmVQWLAQT%zwfviuFU~hd%1Mj_$gxi-UhXGZ?)+A++AvKanFSO zaHc=gt6$bn?kCIoy8v=Ol!KyPS-03x9=6p{f$uq|T_}b9cHomPTf))aFDALwHU>u; zYHSQrZ46@P6p7e5$#5}B1kEi2o3rb?M9(t&?Aw8m&{DWW53kkOy*w-pf_)fRu}{ga zWX85$MGxpy^oBWEO3e3@a>ll7&us&q+xm1Kvd{gesu={qe?>wnKnQRg)B|S2?qL#` zhu=bj2IQ(AetR5~kb+x{P?N|O2+8+wtGQ=|64|UGd5Etj)%uKS9YyI&`X3#Yw00{}`~ z|1iV;BSC)mY5x#;01+qtR#E?Q{rsOZBMzL^?E|{-9Df`UbNBfEhHRouD6h*nL%>|Y%52%sElBpE3Lj&u1eVI9 zvM2?<47(Sv-vFsM4;x&RKJG;l;iDB{@#6M)+3APb{jQf}@n^l_8RhK6GIm@UJN{T$ z-1Cq%qL*v%%Ln1}nxgE^$oP_}_|WrHQG`iFGAXn?TuPntm!yRc7OU{24wK#hQXx@bQ3=!t)Ltl5+*&_~AYBTS?8j1&ewZ3CA`iIj0Y9Pz%4jAypP9W`WW@l=g)}fFF^p5NQjfob)6SU8U!gOBlY4>&mM|<5zI082}nz zhAXMINGTWG%&1(fDak(VDyS6UX#!YUt(pQ~x@EaC8aVMwj^T-_a17e_IsUHF6``d_ zlqP(a?UyT}^Qr{c6(Tg(l}j@&1{f5z;J+WwJZ5o{P~2}TgLr58}O zBy}v!(k07i!`MS5e4qfH`aP>w;^>VhUN}Ixu*n_@FRr)8ecr}f*!O=)CX~!nBi`Ep*_68j|5$9dF#aCyGcORVin!@B|5wT)oR3@i){yv|K zF6+)+j4zkQEW`_7KIoY&rd9Vwk)B0+i>IifX|wiuj-|kv-XA`?ejlcY}&P#11yI3qktia zw@SYXMzJ*Fp1SWN!F}fn7NW<4^vC2ONZ^p^4>FbNaB0&ahmx*wT~hmcvL%b zy;vI(NhFYUkbgE7q%`vq3)f)g#6;=Ys_*2Za z7_J&QBKo7-A>#hX2cyDiuKjoC9nbIThlL+{d3SB2-IrJY z6g8#Z^m#t2e*pPsEO_(Rk-KVhqr2T^4wa7@+Jqk(D1!eYLfVCyuHqeaQuB&KcMK2s zTLFF@!_Nj@92?G72ZrjIU0!|Du)WTWM;hZT5$1c=u=r_rU3}WRwqi+supLzr*XBdOAIaSYfVAVj&0Yr~I7w@qKIuYeTH+P`$R zXQTDz@G84aJ+b#aSG~Dym0w|!;*~4<*}czm;;YyD(cmm(_Zje==+(&IPTM2h`dQvx zo^F@=f8}I{P@f*S+!S))D&eZ}j;z0ockoI=bLzwDpZ##c++X_J`7)uFX}Tlpz`zv; z<47mQ{ic9Ayh<%A80in!4b8F?4RlqKytfspAbj}EMRMN_H$Vcg%5X>UjzCi-+Cj>5 zO;hCTUNSe_A4#Jb5vx@z6WKZ@zAGZ+F)i7?PzG{^2vX1e5@y?qY&GO{oXMQa8yvYwWB~>jvX6NwiMISGO z-SO!T>b`=i6VtE~nhA%%_nOjI6QCaM^{t#xOZUgqsA9CVau}WQIJ{$A8;w+b8TlM- zXq*!%PNzH$38qp$8~LM(FtvOK2F)mu`o48!PCHH3PufAr#Ei4UdOWfo^Bud(WF#gl zpf_#3ldk_(kwcKtK(y&$UCdMdnli!o(SoF_{Ui^n`>~E0)18`I_FYGOf1}rriXKv8 z0JSu%ivd>$r;C7lV4h(e-${nf5KIclc}MI*ZHv|REBXNlE=J#whuVU+$xZTI-xF5( zj+wn9!&Wf13N8|{c3f>Zz=wNHtdPxX3+C-A=FgRwKJ=cGRDhElGfCcbe_?R1bMy55 z=7C2o{Y;S*>jU(fKT$@QJc{mgoE@Z3ODZvm%Bew?uC;MA-=>hP#4CI~55iS^!zNLu zp#yC&*CDcA=loq7`C8H0k=$I9xO_hkKnwP@Z(4hG)yd7D%>ol{vhX*av9m-)Qzr4h z?_nd0GUg=t4N7TUEzt5b+q+o57+H)o_@2gf=}_P+anevH0SS9W+0++@bkNF_PcD58 zE**0pIzE?)_ln9b2*)f+jujNEHEk5UZWf965{~V~msvoEsH~tEId4~Dn)>MIM&ZTq zsU|3q+Sg>)*Jh)g*)b<6j~G23@$gv4vT(T~EP-?#-%B{YS5J#A+Zr)a7BN!J8nL0eqlwxnM{UQrjt{YJMxXlduaXA*H%>SV zuXI>b3+Vrp?|*RcH8GX*bGSzLP313>2tPt)x{=1g))M8v{wZ@lG8(sts#>42M>FK; zy0Ind6R@Z5cEXbbrk?Uq4$#8N)sPT%4o!ZgiO?8-Ed7{7r0?&-6Ue3y_V1)0L#jtG zbw)Rnm=qKTP^Hi`we|CV*GfCOf>uc(!190CJ?a)IaRNEr26&><=qa%Uz7;Lyi)k1JFYm`_ z{qcmG0oY(b4N&)|(5P3CU6-(0>IudEzK-_sF3;X*yI;yy{bWBq)W_xEYWF$Nod2b>7V=%`~Rr?#-oOwMO?Ic?;q`#QNB_(O$x z=X$#C1aMlWXm$h-J$76mk54kGRe`f|C4kQU#--mUpYN~42743sPJX5^A`;loD(`cy33RRt&XMInr~H z(wAT6PyV*z93h`z)%SxsIb6T>RMv%zhkc%B-(7yL>A0aNy?Z#wv_Jcs@Y{xF%tCR< ziE~@Rwna9&E*zx6 zZGQ9?U4Gb3ETDRcf&&n6!1F*C97ti3l&9Qf=_W9}-=FLt#^FQ&EQy1*z++Eu6y}~Y z=tn-}#-Wt~0tW!}03{}N3@sqC$GCS6M|rPAq%kF|6yY=s1pwVgDJ;oAUFBOTho&A{ zhXz)0;Frk&27pnM#4cV>+Md{iDDcgvz?Cc;P88%A6f}s&st?ex996}>QUX>9U!b@2O=t4?$PDaP5yf9~n$uSD-~ zWweM*sGpi;i20Sm>|gb%NTaDd=G4P8X#>GBn6?E6lMJJ%c=WxT7iwpba9A|b^mP-9 zL38n9VY%r@5>Dl8-Kj$>MFN_MzLSq18~2H`VQL?`B$;@yVIdyyf_@)LV9@LoU8hUO zo~nmcLeIu<6o4QhWQZ|@7XpBBI>TUN>U#`YoD%Eg*8GrE1@eRQrw11o<#yZTUdbsd zJNxan>j=0io35i@7Tem08zSpTyczV_@`de~;;Yx3saN0WJ@V4w>Mzggm;(Y6Ey znKEyQ3!-0x<01JV$24?@OA85Vz{*LcQ3$M)qe0O`T#)#Q`l<-rAwLn(?DBz2?r`csm)6-C%qR{OKHE3m2l+r($Tb$D<(lc@&=CD|-dL zYnavx(YN321aDiaUpdK1;&Z#)?2Q(B!z!WsR}7O!23cLlRJ$qb!KO5nQ$^Fg%+Fjt zcPhN1qRB2YAfCA*M1zv(Qg0Mi_3+Y$$lSiNZ6nOFq!MLccj%NZcjKg-)4mbLq7S`? zxue&i0_7rpmXRhU&f`AqwZx;|=wRo&UEX3%8)ftR#RWg%9NA+vTWZWtXSGeGO4VC( zt*o4(>sV5y9stJxmupb+?qdI^;kOP21qE+cwGWp}Ty9%reqJGSUb|GsSnCa1uV{ZV zm9%Rz-07qI?16a~(Pp9Q+G&5^sVqy&^`>4aiJNTBWh}7hDk|r~S8J-=D9G$d^KbyX z`UCNjt`FOrvU&|0Itn7;(J9tWmv?NF```1YUWtsVnpt8kBB9g)3=~(ci@^nBW34)jQCGegP%IrNWPAH-ny!xbY%q_}PiF@R z1~!Vx(s6t{vA-uCPt*-M^TkD>Bz=k?jNE@J(iC>U@#6k*nx(a-D3TzGRKh>sA<9y! zeN;wjp^1RkNJir@6kWgmZgS4m47{Ty=le-8f!J9K!?`xZ>Uj=lZ%_b^V7gE83x$EFkTW-~!9W!&*g<)y(`yn^J9?mtpbpMHu z>9fM-wyT|XE5Rm%v-5xL2f-()?|v&Bt^4rjRn3V5cdhGVuWsa&>W-=(nAJtfdhJ~(Bc;oTXTUySBiAIJOQdH-nKYlas=urc7Q;=^ zGJA1WJ!wtR8_)CMlq@!dW+sNV46KX%Yd<=S5PK{0#T$xvYCr{r9w612U(18lQsUCQ zTk+UH4ioy}zySrmC#_#Y_M1>x_8@QL&UA=mry zCR6IhExvEoSPOeQ_D`AOL|I*tR9tOg1Ruxr4Iw$;h5g&=P8I6Q&??@}F+74e++`06Q!#A0OhAG;G8 zh`0aw+m~3dsjhlkT|w~OnvKV|zq?~unD^h^r2iA?>i@oc!Ks&i+U&M`R~WKVWN)jy z|7-n~XElqw#Z@I?yidH@)nK*kP;p`NlnS#qoMCOQRJn_lqSOY;Sz^~N#`K|xOtmR% z&OL_GoS00pg%86eP^31GjaRlO!yk$7QGSKOV;~t8!)5Nr^Dz$#T59onuo#A3u~LPc zp;z)GCCbnByizq;Q2@$AM(pRq>;I64?3KbM^_Nr9`M*~2z>?%YCL`uopY2ncUww0H?LPZ2!F>by zH>FHStRzVsF(qY*8FhMUlMyrapOX7JCzeGI-P|G`b?!;5zZbHftS?lWSgoJg*BLQi zcXmL$ACeRAgJ%JVe`T7hpGpQONYaL!hbq%hOr-2-8ub`h7JPeW$k-ovqpN{}9ptV*j<5 zse@3pXOiG3I6z`*!B9#PEL)~Db&}N0^hQP^ncfT-HV%v6oas0B@#tJU1uF#pA%i5b;QjI>L|<0iNsxqbqv&<&-=z}4jRz$*^Iuz(skUS3l*<@o8)S+!N$kqVQ|1h z?|_HF!Pj)jes5d-Iu+}cHa?pk2Khkim5@SQ(96J}mum`qaN8AX=VX@3<^>3Sx>g*) zqYP&u*a+Pr6dDRZ{r%t)J(-Q!pKjIpa#K#?+l`F3S{KVUX}zi59Dd7a=!vY+0yelx zg}wLx2foWd^IyRN@OOL)>|z&l@?y?Z0&#E#K>X7REri9K{S@BKb;uuSXS>EG+#j5snt!J9Mt@kZ$QJ_9Am1HE|Rqt4$rMt68Pib%=5Kz!m2H zImtbSe*b9xAU*#KZD`Ab9cu7T!q?x5gj0uH?}xc|)d{;+mnyMAP}I5_-?~~?*vU=o z{B#2BgT%U|rSMOsq;Ez4+ON4fF(9G&zv~+cN%87pr3wAC31{8)JCx>~x9}R0>-T{+ z`mF=p#DRkF0e)2fKK(xUB6B}AdDJ0%bbg1=ytB`|{s7`cOU0eKz_+_+qK* z6`^dg6v~f4g4E=FYuq+DL~dT=GHRorwNJl)L=h_F%zK2rRl?VwnET0UFWHNY?1g*m z+1CFF7saT5hVbK3+-&+RbBsCxJt(Gw;HM+G0u!~|HfUlED^}AQl&PxKoFbxEv6UCoSE(>~Aw&^}DB=a+p^!t=O<%=teK64i@vk0yn|;-9 zRzCxhjXR^_&(FMrHs9pM8sMDtd5DW|nVBS9Viv)Iwt+n7(IzCS%*a%W+D;{*DQpgj z&PM~`x@bfBJ>~tWUS@#33KCz5&ZTv$0&7k{yRje95Z?)6J-I_%C}?-~`3(2&i=pLh zT8I=yuaJ07v}!Iu1SY6@SsvQGl|oxI3dvbv%d&q3uHL66Swkb9DTt?j@|HQ&kHng<~lZ)U!FWXSdUFVgMH#p|6}VQL)|FG8Dj@h(-|&q-p1KZ zk#!~jz7g{)H2*a>L(ezsc7>^FWfg9+T?0)h;ItmshxuCKB6g!BPK!06Cj_yELuIp= z3{qLcOJXuL`d0t0V-8n_>KP$q8$D6;`-5LTo!{AI>sB(fTh5lUuD@TE;-Gh32$wa) zg2ieWu~@t-WLHF5+tjXI5tlC~O!8jMNux?)V(#22Ian1M6B-4Uuvo=z#-CWluLv=$ zP*!m<6fwnpYN{_VH-`GuwhZ0sQ)-)3*l+);?`qb-(PLc(h+mGuvA$r$<`NjY zlfD9%1p36=8z6@jGNq%?!ouS4X$wzKShi>4+rp9^3!FAFSl=-C!l3AF>e9oXujT>7 zK;;pihbvM%=AZhm1G#kk;EnXM783{b@8R!Js$wp-jg6>6yPo$b*xW0xkr(Fc@7X=| zuEeF;%y83hIFpzISnsg=jqAoDik4oTYiPe8owR2gv&2eGJvc_D zj`jDGVI(>R{wf?DF;LEcU67=dN2(k8vqjEUMS@N)UKWR~M^gQNx~+z<$Fx75$fyU!N5&+=0#`4Fjcq778oXvHgtpC|#_hSns^ zLbf}up9mR|^3Mb=Xbh#PFC;HMLX1Y!pD1^p-5A6!gsgj6I(1fa;UlC#nzKauDA)@z z8;|1>Af98G0$-4t{lx&9f435a0|NN(b?g?BVn9&vHP#$$Rj?PwS59wOP+o#87_2<} zHqr35!kl5CCkM)`{Gy;pE*Ll&pbXSu7ZJ2W!AyMgCDe9x1cJsAk5JagOez&Sa;IjQ z2cSJUsL0!KO()zEA2$&fKyAyG(o#Y2&84GrjKcis`8g( zqGrQ9@Pp$n7cb>+G}FpV`R9uu^dFQ1*jVED*Wo97-~V#+_uL(Ke?pB!y@wcmU+PH@ zAVMBQ_47nS!@ImPE7(+5z^+IIWzJtToAM>=bd)pOoeU_444g$BHadL@xL=7>;Ucs) zerr@&Ws0a;y%=^u?5BgeXO7@^Ia*gI6rF+#CRv`fnH%6R=+%WVWl7}GN#=G5R>C_X z!FInnstr?vwNXZ*#Rm3ye!Z*iP_4g>$EUp zzN=CCkROzPu%Uh3N=!Er5m1Za(d<~^AQkxDwX;_v z!4isc@bIKWXn*i0pG*Y7-l0eyy{NKn0L6DS%6gM*e;@t3Z?qY%J2yh0UsTyU4JeyB8ekesZRo{V zNISO{ZK8$t=HZA^0X9P!T1*yL;%Hsd&kZYwtVwENV* zaZ;^j&Bw>-*G<2?2_;RRxs4{-(^%_|1wT9K+M3c`9m5LF^SGf6n`wSq`=%0gCVUcjo>6O_iQy`;R6;a_yMbfD#V~3WH^DtUK zrB3D)^i02FrSBa4Krsp6xo{~k2jq)`M##CTZL?Z&dcMKFdRdy@XqSG{0Qbl0iIrrOqXGZnKBF%3SmeV z5*um8_6?W0M5e=#A!f`Dtbz(98S6-FJ!N91)&^NEle5=4F)gSb#e8IbC69es#g zl>!l|(uoRr6vVX}Vn#dSsVEGjZ-^Fc2eGXnW&~7Bi(ZE}%0?>7#ua2LM?Dk|;%d<% z>_r4WXOtl{ueU440B=WVY(TD0^DP3y7Z(+%aifWE=@?5%We=jEs%gHu@Z8&9+F3Yr#*)((Nr5Xm{*Uu)N_(M%J7g%CX^BVMw;`={Qlz+)S(a3u{p(BAG9#YSKxb$ zd_*z-{?ctwI@t^qOqrdIbgvNoLp<2C$iE*E2E8$3<4*ayzE#tq>4GF-@pNOUWztJy zQT*t6<6=Z_d)C^*(e)2zT}IbuwuAj19uL|Fa$Uqmtnp6POV*#KO^8P%JFlVPy#AtQ1Z8Z-Mq^$x!WD5bJkQ|@ zBG1j0fN;-qxOmYm+@Yx{BEq3^UPF_tOblf-D#3&Smd*E+^lU8_V_KWj^(U)sR~MHk z)+z5d?1#Wtd^+QIj_q1fBf_4m0ih732)L<~rAB1HNAzfoN@b=)XCy=*-Ev_!;EPE> z9mc%GjciDjmdf0h@C*u93+abj4G}74BjvbM6olml=;_R~WdqANnb0N@tA8C{sk$gJHHDb13oZI5-L9&;p@oHTU%u=@5X z-!x{kivBnpdaKUQOlolv39Vr98j=_m{rgsvwOi}YgHziqid8xfHcH(U_{Q7$szLjM zg*k*fi-#cpYVVSQvK^kY1%x(;dOBXqro~&y+WqC*oE}Y!x6Mi2`;hZ{3kHtewU74@ zZ?&;lEa;iKYjN)V)(|L@#4KZ27`!{iu+S}NTM}=D&iONZ0%CfX|Eq&+2c@8MLfygs zvaKLlySJy4mjLa8g@1Wi(9^XIIxVYgE7b6x2ixp_n_h1I&+ykBqBng_Jz&n(J5Hl6 z_2nU3?USIk5gW49wOo9#24=LxhY;x(m2IT1MPaW-Ul{Ay-|I;JCn6yH#i5#z-*31n zP@bJrF2~@cyi|l15#uMK(7gR9eiRy>OOz7X-h8Bt1h*MudkehFX9ur@cdz&UoEBwa z;;Lq~?dO*m#*%bZX~JO2x=X$T_QLy9@u|V<2=a2DuCObZc`$3}!(m3~#k1Yk_T9&W zN}9hqCSToL`L-StWU$(1^W|HaDmz9rbkrCAsyS-beDc$eEw^jG$-e7~C+YP-x2Wsf zp5odk>p2)7k6~dJQXm0#xYfYb40hxiE;Ns^Uk_JHV}MiUZ1S#DkX8kt<21n=0b>q& z`db7epVe$ln0w*9?V0Uukae}U3t?R=maVIkGvTJdH;l|vw+3S4%JtTV9>SnE7`xfz zQ5Ll%@r~#8G_qVg-h}(RxIAbVDyM|3U}#Xj2D!OATx=A-7SBO{ubY}QG;N@-gLtmp z>hyU%ME~@M`oI6ROe5C)NTU@D&1LbMK^pEh;WqA&?P+RIW!9OLOFm z;WNAI5cFVZpDC`Mo32ijI7MG|51t?4Z`&pG5HPK(F={icdYeto3AvY>}=hXxJp9!WDPaEiL%*7 z51|jcyyYi(DoOn(dGx6Ivi(|>E1LnaTe=M|saFD(R>Yr{Vmk);&=GTKOy8ymA-5OS zP{Mv|d7F7VhBLB-uWiRrmDINwVhK!KMrfEF1KoiUH+Sox_HdcLR^kXDY%^C(U{CG2 z9g}qzLEa1JjCf#fJFEF|efNk7iP$@b^opD@x0xnK9u=R54{+m5nC9zOlhp+p!K#dr zUVG?0Nl71YTV1G44u$tOu!mvA)e>Ar4DE6@UuT4@Z$n1y+7%9UTw3$Umi$K$28+jL z%0Up??T4n~e$zk#D7gV2(&Hd1VG>|gZwxkM3kz2ZAOp&rCcecuVOE%e8okBG#YKW+ zJwxF$5M4YhP8iEKI__o1iNFq%-L*q|7Et6Cg#tpss`m&ZZCtTX9?~4_e*4;h?DvwnASL~ zR|4l;=+@(I6z__qT~eiqiH%CdBFw%>lsTEq(BD#!Dk9?v>9-q2-2DTVdpgqw_}Ybx z<@bL%Rw-&LmhtU-cStP$Mbc8=0^QEdo47HJT%XIfCTE$uw%JmP?U3wUUXP$|1XD|` z?ui*9i}#=z>REja@J-IE`kOCEk20YG2<$(cH!JmRDz((%`qUt7;l;LAkZ^qivjOTL zK-v&lRnpfx>v7?xxinpuJIQ8rM~o1Tj73!X0ugBuLuB-x=F1V1KGhNKSz=&JW?!+I zxlE~Y`=P~{f+z^V!L4uQ$#3O};NYas{-s+>R~Nn(r7mnh=VqX#Mu^{VFg(eN;)~{} zk7b~9v(U;2i;f?vyLX7GdS2$GDAJ47C{jNurm}s@EUN}(kUG|}8{$LWT%hVLk)Rhb znyw21W5Kb`o4DQk^<|fP`+G4yxoq-mrs7a^AerT@ah*^(&LZ?D${LKHc1bSWz8X}GcnBV>LVC3FTjtT<-E{S0j0FV$7 z0LTA0Kmp<;ChfqbKIJfgqXuZZ>um-Q!0Am7bniNxR+ZopmzViLTWBDp|;?F1V$l3TJ)osu=4yS?4?#+}3Qfp*7#CmwOrT^)iRrJ9l5p2-@JA#5376bi zLxWmDFduLniZ@9h?0bguvt&962E2YBIj1*mcKYWwxIu}5!A&HlZ(CJZ}pW$-99b?(o#8aw@% zuRLGaJxe(Bm2m44;O2Y%Mw?a6f{r(aL|-s zrS&9iH^Ax6wNO!dD1;jlHlpbHF4lN!fXm$_P|vuY0BEBvqNpG7WQ7f)!UI;j4VpG!A){k zaZ$vL*RcnGo*P%5-uU=)N}a)%u0P^4v>5w-U4H(}gXUii;6!kav5|^2zwAwGQ92oN z!CS?q{eEZ2Dpa`trU|TBM+Q}`_fO+MCHAHcJ!BG-&ZIMV4JhI^QJxYT<5z%f{d2Vq zmZ8PoiO*XBalX=RuGN7<^&6Lk&nPZNiMx*Nt(&gUIYT9>{~EN%tX@G1LyC%^ zx0qMOU&pe&xfxj0syK?Dw?B!9H^b0!(>Y8L#TTOuLEF7i<;=1D!AJ?%BQm zyUUzP`oQkchhkS%FD!2|w*gu^Bky@VX%cSdsrfv0Wq06G+O?@wtYS=}L*fS7fZ#ah zIA#QLe+B!|cQCI|E+oh~9yf-bg2K2t&|9dG|0{S8W~85Q6dE9vUIfq z3y9tZW!DH_{Ll4pRW^Vb}{%L9i`Wxl?wb8cbIYW{vIn!LxR3%Ocw+6MIW znG6jx?;KMw1Dz+0!O6V+=m$EE`F%3Qz1g?@ueCa-mEFsqb_yH>J~xkaRbN@W!O(Ku zld+Bba@6!%Z^of_qM$UV=8gLz{)l|obvyjeD2fr*^hoRK%PUFhCb@2#>hA`rN~%i2 zH#N$?qUJ4nyc;|^ukifB=k(v4*E)jBXv~*Qy^j<1_0A7Ig@1qR6Ih6-)RBmE{V&4N z_ZhkvBt2M$_UaF>ML+aAFXh@Z6OmJE1(B?M%BR$6TcNdz2#z`BIt&2=aDgN-RvD%W zTM|PLeW}fnGDtF+|aY3lRbx|7$u3J`VLjb1lT! zWSj@OGF2(d{uKwp7X_TKEjLcq4aZ)qe|{dQqeNFSN`Pq;%^C)Q@i+n)>!3IBF9hZu zLn|wnVa@}b$Mm71N{y@`x-S+DVLR^wZw`)tPP}NTY(ob)9l=|?3!x_Ama+`Xk_(v` z256#@K?X~!`J^uvb8#Ic_C&U;AXLW(*acyA9e z;aAEpY_d=c$~8-laSs;`sMG76SeG16i66EH9Do)?)hQ3v1BPdZp+J3|0e_DZ%t3vh z0fKgb>!2R+L{fsvazwQpfRbE_dJ5psqj*`^#q>?|hCcKny*wbQXx#9(Qp8*mOJ@nQ z?G?A?4Tx^uFS7!F&fFROV&46@;0rw)+H+o@UXVO9^#Nyv(Ax&h+dyfyKIBSigK|uC z1>ayb^yu4_0iqoDgYrLS!+fCgp-H`hX9fL3n2INJ61YfPiN&R)(N6#)@221HE)C)Ch;^u~VMah@aKKhA4p6kVQlxj)B6-)LPy2pswFm z;L)^pGVexmJLJ&SaH~~RsAE!Btte7U85(ch8Oh?3RCXl`&6%~)EepPt9LGl+i)Zzs+IHoYJ^UFKp$X<#SJCE{cw@ZO@m)^d1cSTL9PG?Q}S_YFo7<`?zq zPW#HQ)D6IzQ}F1kBLz7=Qr~BGS=W7kH_K=xv_0cdGU>a`^jTNbiX~Qh`f9aLnC~4) z%#sG``-qc@Ma}Q}kZ8?UWpSsg##T4~=l0P97mSD(!oTwQ;mJ47x3=vlV*c#$Gdn;1 z_Fpf;zEkE!FWPQ*-r3D$-M3pl`)uw&1zjCBy{zwDfIeh9xzpJ%^3sk*)u7Az+&4p` zrwdx&o3<8@v4+C4L98$dNo~o*cTMw=_0X-Q<^D{$#ZxjT6#w5jp#htq^E zs!-Tw6cDqV>ZCPP8p`Hq185XuHn$kz;ra~xTLfPSFtDh25uMI!Fa-HJZn4O9V`Isg zcmd4J__$Y##|^|H4CvP29b|!W!<;D`CLKf5y+dSN9bQaCFcSh9XpSuS=1oO>Ftju* zl_WrPID|->y`H{+hF?iiQn@50N0;Tj00(mmQaYO^^HR2`h(szP;ObOL54N#Q>JFPq ziH(w<$Py^7C<;1g9YXs#jn1ZC_SwNF+rQ4<`0iwT!y@ku?f2Q|+6UQnf%v*WpS%9) zgNqI=k``r$`Ty=br%qA9%Wkkq-C(+fTjjNWUzV+KcXm#NdPzqAHFl$VN#|mA#mRR` zw0F)0w@#jaowCuLk%na3oL~R_`A%?Ekhs)-bU5F{hk?x8C26}!#6G_@|8{Yj4D;XU0ayIqry{4lo&zU;@dPiIve z-fzZUkFEI6%PUX+7?vt@9r5)OJLVaky&HWa{yS{U(S+Vh!Xb{Z@PnfM2SqU=uoQUl z5edjP6s8>jxg(X_ z%>$sPAS;)>kZnV!r{VEFuzpq|#3Y1`E8huHPD4~mOp9`Xsfxzc(S%rm(2A?T%V3g} zMJJMoBvMwo4<>64bq?|Sa)`0;!bBiYCZ~3{-Da&a5hQzew|1|*W5VYvaYqlI;^%q{ zpUkwUhR!MPu!_autU%~tHjU2Nb&5uN>PBkVqOJP2JCLnnwC$FOr-^N{Yxul*iEj-~ zdCbfcPaZzW@`h4Z%oD#|*=ypOoObK*iFS>Dy2iiJemYPQw#YlyCUs+WSfJI8e@Pwx zJ*Lj2j?5CU2z!tF7i@IGEP?3k&7{pG=u=UGE9pQWS_}gkrfhLh7#c;CdCM% zLHu=i9fDAem3ZxS?G=qz))wg;3AY_)sN0beTuq{((p3U$7(OB}Sc{N#C8pJ%k#Dn% znWr6VsiZkto5ZKGpG!lkQ>!7a%{m?g(_wbLW|dP$GdxLT-9XhnueIhk3=XNsylr!^JfAx$-5^|IR15D2#;&5wV;KDt;^ zAw9w;@#a7S7x#&e>C$Li&AJj5LE$;Ag|f0}=LpeXBDxo3k;K>M?phewyNT-;V!4(G zD2v4(FrWAouJQA*V*4zh5lvdTbec?+g~`Gw@jg~uUHVFuG7ZmW`?-n^TF)nS`|THj z$NE!Fy4Xq}!{JCuft)^~Ba+yl8X#91VuiG9FTo;qsRC2BHGX}1gtGJ-oL710k*$0*4^z27X=_@P?$qM{oGs2!9%kD`C|Y+3 zvU@T9LnUw`^R!z=kc6f^HAHZQG$*6SPT9c;hW3=kP8!lYMzfPGkb1Z7(bmx2YV+e- z*}*r)=)w$A+;Ntvc~Xb6rer~l;ixi`DU_~z8&uZQdNQd4Y*pmelHjwWaw>N#k1Qq0 zu`FzI1}a~@V?J@3_Qm?M^KckE)%DhH5)>)Uh|6;gB{5q4oLT~1SCSc;rTU_DL4|*7 zF_tZ^fhH_2#_He8tN?bP1gK&eWwxv`!=|zoc>XLx*i>eBaYhwuHqKBTqH3c77g$CV z=fQuHE}GPzM;i7^vlXT9G_VzkM@fAgK{PQq23KH>DKzenh5!u3^eZB-0&A}mXNqCj$sa$ld9ITyiK}!+-rs1`zuP{u&wBWYNe^l1R4l?bNz1tvW=rM+M%=AYkIJgu z{xE+_zhZeT6}0ntwhuDa{^7d5+I;;bH{W%>yY%ZXxlU8W5&CILBxMU? zp-P}25lRE3iBRq!+k}{?;^E6T1g(QA8gV6*AcqJAd{r!EKWl=oG|(;68ue$9CXD*? z>8)HVxrKGB4gK9JiF!b&A zq#r-q8+V`pCy8pz4sP0G`nci4y2_pT-7|+>n;VXO{nE?wmoLu$mFjeizW*W}`^kSw z#N}MT4m(XnWBDheg;|lL7-6=pwq)7+qu9Pef8HXMp>K_7{ZY7;1WHXyMr()snT~He zk4Gr6WyxjN2yE#R6afzl^9|A21qHAuWP|DE>M9(C5)=jCY*Fr)E@D-p;6`7enS)(M zCy7BcD=r~7C9qBzC7Sm;k2B&M2-5pfqdf_j2Iu?EjpA|}uI~}1E{AoBe8v3P;K^Sw z4S@w7zO{kr4+9IV#u=a;I%;uJ=wuOl23cjd_VwEWhX^0pJX(dyw~sGEWe#e6x z&x!nZj!B#wq&EN-k9_fGD@5ga=eXYyjC(WG$M05sMt!WG=G>T}{-l5B_<7ngu->Jl z!{-bgE~b#)yn}E*pM!zdQg>@M1)voB9X}uW;Gii93QU|5Dm&6}{!LC_ zahAe3HVL~5keP6&+wkbJ#X_PWBcT8fMF75V}&LHNQ*7E+KyBVj2V3Jh)#tEQkZaWH%AD&2?rknmw%o(jqHN;@7~9&5v_yxxU4xFj4(|A;yWz;kWjupE@$m1 z;LvgXpd{9f)`*+QXc?${S2JItYM7x)8D~X=400oYv0S5L?PuvOV;_NF6CK33M-=fU z_#(q}BZ(9QgjSAr*l3EUDjvi+n~;VY0Jz~JZ~=?}&4?w~Xv;T28U*tiwb=H7_!Ie< zors0f;OV>!Vv?us*+QRImmunRB&UBO=J>l+6*JT(S?^+E_!5KuYxDKQBo9u1`R$Au z>bUH723*#YraXyBwyP>+w+~Nj_a;rPfV|fI6GyyZsmkexbSW^1d_rkCZ8AegkKrF_ zYAJjrO>vK`T;HS%UY}sU;`8y&6)R{k1rap9^&<;^e?D)%=-C{d>I5y_ku41tH=WAB z9u3nR)H2R_1)rNo<`X`gs;oR^RVS{q6hn8jWX$x+H)HR*83gZ_|XrQ?`4NQh6u9DYy>K+$$cU>(>qpmxD?hxJA?GM+{0_*nv zX4ByLIil`nzH-B8tpmP4kRrMp*t_1@fwELTcV$9?QR6o*nf=4h<0tmyeeXi-e>n2< zT#HZB3O>c1$kXmMI@;{wcW3TO*{69G7dyM2$3psxo!xGk-4EwRB_yP@+Y5_MO)*w z`i~Dy`|h4CIAB-UhP9+iY3G_bZd*v9*EUU8SDe@Rj^yXO-FAOQ6PkM&v+N~y*BSMt zGRbDkaI>0*>z#WA6*YBSGl6d9wfB4M=cc$Xu_9oK=fCMXnysIS0sDulSXF!yOZ45X z@=lq=oWQWl!(S|k5ZlQ_gKdb5r-Pk5m7rQ{b5pJ=UZRSr#_`uTZ(QFTHj%1!D&vNX zgy>>`i;2X{l+D|C6q~bJ6;tJ47amQ9(Nk+aw0X}^c?1D0Icry=W+R#X%ccnL){zap z6wGty6Are_U4h4jR2nd$o%{tjnP6OssR~TJgm!TMi0FqnDNKY=`D*QhNHer}hi^H! zk=I!E;n!u0s&SEJi)zG`t-5%9?;23RO%X5-3*}(ZD{={yyM)F-YtVU=Eh}PnC=NWL zhXEOK*)o7NQZ@BZ{I&Ro-eED=E%hUtbd8AdA!ObJSm+SkF!BXrg4qFMEk;I>0h^8?!VfyZC#&a*?3f#=HvWByH*8d{`z{C6kmq!RAN`N1uP87kLHCX~nZQdkAQhr7`D&E@eq0cDG-99&JYLn;aO z8LC)!IS80ZWLyOCI{6AyF{rwLYgdVVbAs@Z+9pDgCL$D`4f9`$&3LGMOZW%Kv-qqQ(DCm_Oy z0*}&rbPRc^g>gEtkPIrl7ajv3^Z-G)2DrlTkW#>sd0dX|=S`!@)?9qq__C?Yb0#8e zIPk1$7xvCO7Vu`;VQ`+{gH+4;AmaVVei*bz9CVFv<~mH!*eqGN(qO_tCdGEMQWsjM z2Uw@?C%&*s%tjuQl=*bbT;%b{-2Pm8>Vp48Nc1OiSwyUR<-Xq!#!MQ^ca%%EvLoC} zUe%fJFyff|m45IuuS`pp?GrTb<1Ni~unb5@=q${kXS%pZy_^_0FE7t2oY+e=*xFW6 zd<{6s#jND=a(yw4+)wT|!r&jTr5IPoRHwFH_HG&k(y|;9SpHRUja3>|f328qQitj=lqpdjzFPP=+wi|(0kXdzM+Rh2&1RW0%%f# zRWn=5zT5&8oU&?$>YQ_s`wqAWXmPhN7TSx(Ji|#Zj@&L#51V&r72W0?$_{0`#GB^@ zm3ig33KQVRIe&%LFf80fbU^RG`G4j6-=U2EdxcM-N*&xYd|uJ^{!XEe@8espHCTCz zV!XqgcZivp>y*c{V}Ck#Y#*!iu5{ZmH)%=xvnW%;KBrJoLF_=as8!Gw<7h;@#SY=w zF*EyILioh%n%()IN$2gHEH)^%WU-Rv@~z&NGGH!bq!_$Z{iV^4wg~P0DL$X~DxPJ2 zFC(Moaz#Yjrn}na`x?D-4*9XE>?7z=nkz-QX+B$gY zjC4Gnb+V&6o_wFF;XCk?R($1uzi8^MwE2FLGaRDw9dxWA|Ay=h``14uIbyK2wRI>0 zSoGS<)9AalS$nyc;(wpTGQXVx|Ekfq_=6YXB7T~@^u46{f?OUK(Kg-;>9qGRyp3qK ziskqd6NUwktYWQV**oY?=$C$_>&D zx_~|`!d>uag2cZeHtr(wi4N%!91H#X``KB*tYDs^4wTEwv&$W~wPcoW%S+5Imp2e& zhkfo^?fARuUFm_~j_|+6X*VI66caKXwp#j(C(7ean|1En%IS~TN0+zUu1>2zZCsOU z$D(J>Rj|jq;|Nc~3%l{A!39R*auEjKy%5K>rI<`MVZ!mfoBbbyp_!1e^NeKEScEjD z;mpqbnsI4NM$Id!L@G^!aTq$L1K+(g=J#vv;Cmnk-vWy9nV5{s@mI071U0V0iy!>H zy!<2?Bggj#8>)F^kLU<>7JiyV?lp_h@krd~hd_Lpf{7y+(JN zHO<^cfwd*IrF*^^mnw+gZ@xIf-laXiTG(O@qGuCCW6?wjjE)pa4I|pF{UE9NG{3Gn4Xp{ z+iyVvPp(|Tf06HhA4xaxaq2F=n2{2Xc{__TTt)!ma*jq#i*Nlu|djv>Eva< z*Sn=}EtcQn>wH#98c6L8GG7T}4OqJPYYOsfENtvs<=1f1Y&ysH)Q=$>a*~Pt*5XKb?>J zt<{4%airxe?zfh+Bt7wSpc_wYSN(}UM1AL|I;;9K{U8UmspDKZ6Gzfqt)D>Rk=w-m z^AFKqTTdLp{Hgm(b&5KIUIIHi6pSYGux^=`=E{L|iH_PM_=E73n8eZDp70!>zkgK` zpG4$lEjC_O!)>d1Qb*Q`n68x-Su4g7lM$zNx>RPBh`=2WxMe81 zo0AREKDd@dRHjN&W1VbLQzgnI+eNX9(s31Hey}+f6E)o*9Eia;iepKHSR18MlDbhW z_2P-07}yTO@_v?QPk&GbzMn>67$NgEv8XO#MD%&7oU65$Z)e^ zd2lfe8Sx@|rOR+!l`ajZLaY&Z&hb~UImW#Lw_M`bM*xW=YbiW_shy2fROM#q?VIzq$yiC=SMii%U z@iO!rw;SnZi-=SeN%s^ISXdiOTVv^|)HI@38n;z|6>z(0(Fl}HHKwxt(2~0axyDqb z0g*^gYZXB4Ed}VW5Wf?_iHTe!n(b$RQ{hN0L@!c{pry z8t?x659=?2r#~zx_9ESuQ}xa6pDygr{UlL5@Z#6s-v31>nQpP!CUf0foAR!0JpGfB zbx{ygORwkXcm3+}j4(i&|CSZK+$E$Xi_zYD0?Tqsp#E`@<-qJUXszsU#g5`Q(jm(#(Zp3&(7$ zRi-i6UVqi>{HkQhB=N(dUEyrbLME(#jjQ6J@`wnZVcZtLxN6m~L|arFcy{vAP*iV# zYBjLts)5c4urti32lXEkY#>x}PXg;SHfPOf&D1c!Tch!SdI4@0_E6k3$U56|KTv6< ze|Zso>1R|iD6ocd(6RjvL19n*ApYklGv%Y-N58^2zq_O8=V*}U-8klVL1A%B1|--u zBzFn6!}jx=>$O1oDm6<#Y?&JW^y60gqn*+lTT}8JGpqIX>-2Aa_uBcW;zrTZ^|h8u&$uo;WM!UbY0$R* zS8PDvhpv^|zf!Yf0T^lbduwgQr(0Um8~2>%aKbqQ>*rc>&V6g{cvZc;WZW=6pP65D z&RpbnPF+%0v8i!WqqJV83a9OeVT}ISNO$=*snHIlc6hqW0aZW~5n-^e%`bVf9w+$@kmsBR$efx7b~5 z#XEA_R{flp+u)e*m=8I$74neQj0}%^!P?rB=TDwzF?hjn&ij&QFF9Gh66=_l ziDL?JCqR|}Y2qO^xfX(vPvirG7NAZ&={JBz=lwcueGO34#}nIrqdtaH+uo!to8{Mz z%rb0;R{?g4rq%<1nzrl^{il|)cdGy&ojnH1mu3Gp6gy2@mY5IK0K0e90q7Cj@V-T5 zZ@F`AwK9SLCjb~UwR1@!my{U54NUt^dBWFc<78`*AbsAwA`uTC&h%bVOHMrJ?{w~v zTAiq_28`rMN3DYu@MoJ*`3CU^Ngt41<{q3=Od8%c|53e`D8=)E_{>&mRmDI~Wu11R z1kR{@WBMJiy6WB_{1UJXjmGrbYB%-sf%~kC8Ed}^x}=wdHRCBFWTFvknw_FuE1D49 zedu7H083t7uecXh#%G`(!D<=s_G-Qv+JqwLZ&*+Zu?)fzVD>hl28)Bu)v2RZfky(< z$*_o?94Zfw3ypCJ<~N_6kpT7$;5{Tqbqe6?N6;>*pQ4~Ke)N+=$#J1_v^d||sfUnV z{53#4DbUrq;c=$hAUv;Jv0go#*B{4Yr-TR`rs7U)nkTGP2V`v(IC@@Z`-b_Cy!g zCumW|l6z61M>yt<_TLt3$QOvrBX?g0rH)Z|L~-)X1k#`PW;zRlX6IJ{d=Ys@9`8Db zzP6OQ1@RRaC5NX)*81%&)od4?u77*=`23GT>{x)TGqX`>%3iw9-I1+LY7Yo*OnvZ& zyQ77$Gm7wA?Qyn4Qdelpk$^odg_CC~r9x_#BWGQ)aY@gY>uPMJuabpfAI^j}cZH6o z+5K?3{zdyA9wq0$6yE)HosHGmfUTjI+(U2Xz50yZ;T{m(HxoCS*;-nYJ8m2%*cZ6yp`mN+$et691MG({yq1ZxO~ChsDeor zKLw}Y0TCi$QE9R)Qn@Fi%E7Hs7E3B6JexzP4LsHY=+qLF)_&9_*jA&RYlt$)ugb0^ zaJQ-`H9Afr<7&8!H>XJ#(q(&vuH@`zXz8j(nUo*X%POR5HI18KsfmDBWfgdBt==occblz0Bqt zBwqJ`8{TlExX@^K#7Mj{2l_#I@EjPn4VwTsEhAKe55w_jFmUoCVE~Otad3;H**154 z?y#&pTmc{!z^2CrnZwOD8)x&BtuQ!{z=FUo&Gi>YbBfEL+mAC;abTy40pL>)Q0UY! z^v<&|l)@jFD-XbA0LJDlhAO6pcew__vOsrD0dv=)DwrrhYaonGwszw#blA5ifVsq@ zIb&8mJPcGW&j|vvXW)^^A^3ea`j>tLTMrEBF*W+hLGa)B1Urk6oIfwF95mnfj@H=N zoid7%&c$|aS%0D@LhLf<`azR92`78Y*MF^w6W_-)VEM!=&AND)7)6|_8}?QwGHrX4 znFHa>UPAg3+SRyqgyXUY?WD$tjvD9tZ^NU)_Y8u{$noEpt!WJR3Mov`oI6-;PTJsi zG|V={zPP}IZ9d|$1;5z^d7xqirVgoCI9+^0yatRvaMv&zYy(V^$~P$QqbB4Up44bN z9Mm^~dV?v>DO5jS7Hk3e?h42MBVVI9m^(?J1=V~$K?)YDGD3XK%_#gsxR-Djz=}i( zfT>lv)chIXmCd8Iv#QOeHQ}B4;2_8YHDbZ+4Ye`4U^e zZ`K3SzDs-qG=I$<)HCM}csGC;ZMxtS^gC#BNCBX^Dwg@yGC#+{baiQ9@(M?*iI$r< zgHj`_e%H1b1GYzx?5$m?*O;+ou|bTgBP#F5%C?L1##ZT|X~6!h>ZipjxNPD#l<%{5~tGA zVi9$CZbc`oc44Kdef>C}zuY4&cmLaYDYdD73b&DE|xprV!MUZ z_0H=%u9`PISMBaIyU(PQek|=`o{{W6J%Gwr@AiQN_*MAMf8={6`Ao9xt?Oh<=jVPY zhqfn4cK`7ZJ$HH_sWr6eyfo z3s4eghtP*dYX@Fukeu8B>4XFx{VkcE`>WCN{lWJZMtyodAooX+o5k&xm1O&Xd?wGD zeI-(++MVh<3U7o?Lp(?~>LOwg;EM3^C1Nk>8rj0$RR4k8u&@qaYVgLMjsQGfgLEPO zUH-ZNJkli&SZ^_q5fHX0JnL?i^#ws5AvC7XWX^*a6tu86AP+PMI z)ZnmzjJ4S6*bY^Qu3k1P7bD>u# z6G~IGx~bJHg3-LHfsTOSFb%w@RWebA_%8-=)kuuL_afwZ5zaQ4S}ztIMdka!-J1yV zP5M{9stm6Tq^A&PA7&NC4x?%k?L#_k)bNIr`zdz3Q1k0B2v*vt5!5yj~5i} z1x34ZLJFWNsG<e!E?%^D~Z-Sp+J5=N6NA;uoz{`)A zLt90ghsyWua&N#l{y*|vxZm7E7JF3n%#WB&^JUL-3RZKSVwD5ua6Y+4=3E6A{i@1J zwXI;t+QqTIV=6m|c?=`qwsO_rjq3<`eA(s;6P;G4+Uf?ZRF!_jJoJoHqT1HnN!%W+ z_9R-R(dCRoRNo1;t8hNfCyNaw^VwN!HJa4IDu-xAw8r_VslKF2(m7S7F9|BoCuTFO z)Kq9bF>fB-XZ9e6h87DgQZJdU#22lcCY)`6ICcO>PosNfsj|x3BdxO7Sys91BWPu6 zbo09ZpU~xmFstvPbtwyXk!3^eJ~+^oXK`?hBkuFvHZN`)K`~l`t;P^^gB{h_5>*a` zVAmb2t@n2-1NzPN3yL9|o>MHc?=C(=^1g6Aj36ck#iz*Hd5BdSX(M4^I99 zqaSdw3W0C6HUK~n;AM2=!c;})+q-tO*28Ra(EJz7NMPz9t3-Rn3=gY|Vg3@BWnh

K z03ugV8Ht5QEs__aftLlWD^jE8Msb1mD#{vxAre4^-qkQ%kJiFfI0fV6XbpeqOReOx z{sTb|qv?T!1B4F=DdihKB*47T69!fu;1S==a>v%7G@>EEJirQ>=sXj#6~=m)C4C8( zO{02);QKO|7|~FK0~R;X6rhn6pXd#QwO^CUpy22%%=!wa9Ti zrKe+TDLz>m)~ebn5`?u_y*0y9BMo_G?^C_N4NH7Hk_(PnJoSV33b)SOjqDzV>+d;) zAdfvQ-c8l{9jO^H)GEc5pV{?=`LHLXcQu^w?>VKY32{@n71>MkC1KjIMmB8nXe$K- zgGD~2sGZ-~38x#}3Uha#D<+pppTziE?5PQ^@A^B5HAgxYR^MeW&6R{b2WBgU;NQdX z&WJhKQ>f?JqW$-o^EKjYWN+xd__Gw-mMOhtLV__8v zVUc|d84whPsCr%FRz8zw`{iFO@;g6ej~I53>^E*E1{)XRE{J~#-~{K%dJ^65T8m?l zU$)Te{K|XqJLq3er4#hNJZsIRmMf6e$Hx3JJtOw ze#)dezXG_HW=pLTUFSeKFN5>MzXZGYTL&0JA#ey)v^wLe5ja|4o*qw>P;1k8Ek(rj z|L;!vc2%AGi6}p&(~e`8_p}0<#G>cIVET$Y(1(t;0l~rd#Z*G{<6sNZg zy*{bSAOJzSf_f36D>h2GNY!R(Db!&hsx4!((OXv`17*FI-dC|sRK1o!tqCew2@QRx1h;h{VW*oD)NzY3Vki)nsnECwO@rf z&X!(hxx>JJ1VnfWb`OR9L_xacup_9qGwy=76KoI~U>#{K&P$Auo2%{@xDOimL!K&Y zcnEqhgW5c=qVyfqa?2g`DvsFw0qL%BA!LE)0yZ0Mpf^w}`9`BEbW_2<(q0eCuRry= zKD>9l^WDb24|T>&dG4+ieNT2e?tOh>YU5&v`0!xu#dX)5UOa0}uC28kOpZw2{IGWM z$$bwM;|;HWJD&fm=Pfr^&7)_5xsM(-&e=^TWF61W=6{(izw+bv>K|`?_ifLQy+&RC zd6M$!*b{i`Nb!5}Nb=;r-J5**^83q|%l7X6_>ndD&YRa4 zu4tcZoRU0B*}Cn?Potl9K7uTxpLQH+x^Z#mF8(ss+*{vW`SHg)zh3`wM*U;YOx}(0 zAHVN`>fmIr(HNxubu{zsuAP(LyxqC$?T1~HZONN<%dh|VV~$bhlfUQB5!gq_=VhXQ z^z%>9yiad+K6*U*>5Sx&z2p#Sy!Wkr{ZF05DEG&+RqRL+#KGXpOhD{RUEM5do zJ{TERt}`jdWgietuzn%OAixHglPbemTc@LP(f#8cxWMkHmnD9>)-lW?@T76 z`uPldnl#0rXH-e^VcTGT3q^Gdg|L=j?6ju|dL!r&_VZP!eh(6E6;%f&o<$uR%>U?6 z&6CcN(%@w}svyt02Co6cce2rQM#_6?e$aXgy;gN!We!@nfm$xQ0^9wTTgH>_8$`(V z)yyj9D$v~C&S^Z-T@Ko%)9i57+#cO8;uz4@VW8+k3?fv673Okt9FOz>bZ;0ZI7Zg< z&QBZTTT?ex!6Etm{C?mnP5rZ<$4TK&`uaYk%ik5VPG6}~Rh<6#qhCqd@hZoe7k}J* zlWsz}um2^TmUsH*U-}p8Kdo;?mW7^b-!9X9M}&1v$gHK8U_`2p0Wj4&20u z^2C*y7K>Kb+SbK~{1d@g<2=EOBsURM8i-~fo+xUN0*8*N7y^pZ*C>+-PCfu` z8*-mjV6>l^X#Y}6yo$|BKysK3L~tK7e}T>?SS(ybe1mH1Xzdhm@Qh;Vioy4qtMWv9 zFzBEYp-+I`Is>s2!zfsdN*sS(+l2Pt@A&7*Q%F3=oz2HEsPuJG6qp{Z0p#`_6D7#+ z=L6y^X^i^_JbM%h1~ejQmQj@fFl|RX01L_Tl*z#qm^B?o0icgCCrbI~;;BauXWza< z0TJ&Zur&daeGu4P02V`F5CB?6;`>qH3E01YBLgf&c32EmP#{XmtXaq%uqm4!6dM5x z6;z;yz(=Or_5Hk$sF{d$i9WSJey8Qyfg?Yc=Pp>YJUC{VzRp$m%ae-ovs3r*xL>c8 z^9Su$_Kntz?}najojJe`PU7yE`s=|t_aB7hha5MRvqj4sp`#O4v3xt4mHsb_e$@DCla{b!5~4c1 z5(@N(5IzvNTZ@mneCZOpy>|_8@U&&~P`kov766qnTaJZ0U}tz{K} zb5JC?O*fk34wfI7+bMlCrPvLUt& z(rW@{n{pRZg^t`lYzFwG>M36)xf)l%2i0Ky3ybAoi$aJcMzQ7-&Y9oaTtnsCDBMDx z0Q`B-Y#Ku)(}U{38G&ykNA0^lu+VrY);ed@;I@E31CM>HN>Hu(0`o{E{*1d`q1yxt zw(AYL&jG2W@r?(+1@W%+M;%~Jqs-2mfIZiOy$gOEtZe{K&IUFn0FRLAJJdj%G86^t zqvH_18$d`s3RK4Y?Lwo9$zh!U!|m#gNzec?Xh>M;IdCoOz{LqTm+GB=oCf(I0%IT7;a@(Iej@TuNo(-Lma1+D{ z+ae+oSd{W$bDa$V?00g-5dIKz|AKvPWQNrCiJf1o8JPv(wC*del%6zq37 zySW?7BN9{+w8~Ogde8b&jDn7VS||_*z7Qn?055zUs2O;6JdAcE)WCxl0gG@{HJ}RK z6dX0c{ld3qifkBmF_^h+!w(H9-`u#9S;b!8_^Mm~p9*#UR(e#*-MVKsc6&KVPIcSg z@}#R1sh_zfUHE9ZwWN2Kel$u{bpGaKdF_W2;tPe<_pT`$PyPh(EjUBu?)qqp-)+A^ zthEm>d1z6;t?l5GN*wm<#(m%ol<~jPeSrOm`owxG)q~S79{~VrM0^7Ba(+8i}U_5jOQaz=BI*6ADRy_Y-t% z)DsTnfQZ1>0Jj@j3=9w!m?Z!raS4?*KbZw8px>8og=<_?ASg0&05wB@f-P{sPB&+< zK>vpg!!Qm#Hg%A@70Q8x(8g)t@q?qx2Lh@zz>bA#dMN$?S{H-}gYg`7*`e7%#Qzi+ z$AQBFE>vJIss>vW3>twMd~6BV?zn_)y{4mi{(igE%{?%4Um@g!Vwx~;l1>Xl%ZYxUi$q;Hwbl$CSg|= z69CjVoo=UT7sx^yDS-R42s|1@W~f%6CICgws47L@)4?Z87VC=%Jcec+Os}W4ON-zF z1@$rwX{36Q+b1C_G^rRb^o5uZkWOo-URsnTSs=YJBV}|zfQT$RwVfI&zcH%v?169) z5E%lD3J?IoPI^YNbhhY__D*gFl*)o=6q>DQgbB|+FV9|VQI>)vHJye={jZ$pIrC)7 zjMTlci`{i0t#0AM)N^G4;alJ5RV~a5XG=|o2j1(Xx zi1tK33GLaKnlWWPaW*1~L(DAqV~R z!`vhdi!j6BCp^+aSN2eq-Bgu1{r}};@|NkhEIO*r&pq!PgCSfW(smVWJC^DW3TaeM zklW1JJeF@S{Y*5Gs#_DDEW0w$LGC~;L-}CX!Me&2LR5x~=ngAjJ{md3%GipS1<{(T zl}7{x#|;hn_+qR-MDSgxwMQ*NU=6Ct1sg?%DwM(GTeHh6B80NA4nja!nASdad@TRB zeM?4nj^*Ym^1Fg_6dWw1Vy16!gvRSEkhVffoY}I6k{R4 zwCW6Hi8^t>^>6zYF4S(3V+cxk2-5Ko9v+Wqu`=!oDMmn>55b4-uGVsmxuV@z9VR~? zh;_<*rjkAKRF-2X?uzAGaawr_wKWUJF6uS0-_R9)JfE1EUgG=&cYGlO6Y6s$+P zAsWmK!kOXO5V8zkk}<~h%BD#cYSBPwRsgimf#_(*>#l!X>Yf_l{p?Z)fbYk2$1aU= zS3&4A2rr>oh3<0scX=M|U&ASgf;6r1lC3Jro}r(z7DSKirMcyDxVve&-j1X`V{27k zVszSKVgN^wJ8}~~xHpNb{kXd^Y~m_XI|sv=h)%OIprF;N;QuY*a^;3+jk=04@Y3bZ&$?4wY zg~s$`nU$QDiYc5>Jf~W;Wl}9lx()a=kRqmvMy`xWT@<|*kEz0>hs8h$C<(-MgBWsf z&n&$lz}&q#@$DtJjO`w`F^)>Iu2g4(Kv<3K#+?2|N`j1vm|~N37LH1);ZReeHc6-C zAridisR}DIE=JgqBq|g39+7mnX!@s)kqul`lq&AUc(R8$d$C5zd2348sSti)3pJJZ`Ia z?oxN@@-gHoRD~2HMUW1PMPE>~0R=4_F`0za2ek(JKE#BF8&%FmDKzMTn9D3A1&wSt zM+fKiqcYEF4FhIoPhzQ=A`Uu8ZOheD2*-jl356@Yeb4t*=Xv`tdcRdTPC%(=WeEPK&UZW zl&WAScCuZiZikK8*ZJ(bEWSI>n5`AW9bOXin2?iEShCN?LI@Y#*0C7M5~Ui#9xX=5 z7?I#<8mnZ_nAGhApY6PkxR0`2@LQjDud!5H-fKpwrx@_4@?IOe!=Fhag^JT0+0)ZT zE6-6a_~QckE$WW2Q(fBy!cxZ8V>iovu`>E<8oh+h5#8>^v@Nf3KV5W!&!V#BX{TE| z4tu4wwhS@aGI`Y6ZRJC}!kRt?+*))EO@aNso2I3TPe6mCtmyl}kD zAhfE3ayazWXSH`<3L#dL&C>c&4;B@uYC_L80~?Vdqpe;a+LLnkwIELBt^l6g9$iuw z-yn2`EfkX5Y6P+o?YP1# zdAgF?U$FUYVAV5D`hx1j?-_5CwqO#G{@9kvX30TI|HWSTuO>5?t;iC+2sjU`S7#H)!Z zEk6O`rPq&HAe@H_V^jmDbBG9^g{!ed*m#Ieijm?qxG@aqjzzNhZy}}-s)2m+ZqZ!$ zRsjhvF25xGcy8~`%K-^lyq(OvY3(xMm7n@{W(l>Av@Vm4P46AwZE1OW?5MEKLJ||W zct;kU?r?Ew`y3l?6wX}g`4zp{zg@84sL33gdjv> z_o^qQRFh#E{TGs(*~V}X%=WCO>~p=n1F>GdBKikU z+!h4jFt&hkuw3XVtDOltY;vf61TTWev=h)sQzk4% z-sIM-JW*AXyz)fGuRO*y_m&nszJp$cOb+rP@!$Aj2r|vyO-jV%NA~fIuPgnIb-x~v z_GD%%GL2`!X<~>D%LJA?h>o@utA$7L+X?U7WQ&Z%!byJcNrPnz>c~WHrpz<9HozP| zxsV#MSz&INY*GLhUR1;;u~=b>X+9!i%p#T!i=^8{(+{T76q!x;=Z6PdiY1Gj?85l! zsV8XWC(8GoDE^?%%sg1$6Zv*uCeOGwz%t;do?GlYnPx*B#fWND#=3rmr7vNH6>x#G zWRmHyCo_|0g1ocU$B)J2P?~JM6U$v~VkWjm#qf6Kh4Jg7v2^yr>pDmo9IuF1{K}Ew zb@sg90z4CI4z6@qtv?O%Po{+k9aamMUY~5@?Uv#vBHJT`Xg~Q5<9-2&BHN%EC!!-l z)UrBIW;VadiI-|#TcC}7L9KbRPv_)#wQA+@7ss{1sS7K;fWbIXlAT+SS)+!J(w}w0 zulU`GEWkmPY2wUao_}$`A4oqXOiAJytN8H2fYS*$QU8hrGMtT%Ln`HDQjy$K=!az6 zR_xH{QE8-raMZF$!lE8g?CT&>gLx~{sQh^=Rp@QQIu@EnAXI1Yg7tYKA}Vzw5dOsD z+wXZ<$2fHBwo=0ZLO6#O^c0F?G`VjcpSeivdZ^QE-@cN_o+FuxBaFX8uSDgF1Fqj9 z+(+K*v$LmKk!I%*?UHS}-yCJxyTx>EknYRm%CaPK_)JBztV}3tKq3v*uUPGN)EDaI zs4SdVW)|>Az<$8b%?3azQbP@!%>5faYBaif!bWq4GHxI6NErz?Bhmn!7a{*4D$DdT zCYigNR7A+jJ$<^OdHwrm-7ZmQD$1#Rh96GYj3x2RV0TCjC2X=0{L}G2swb>iFBfNZ@Mf+*)gA~)7ZC4?!@VWegZBfTgjH`ED%+e$F$~eTdNO`_9cUA7xg3Lw5sw zJNjv7twF5u>a0#)Zu=g0>-^3-8@ri4A6^BKouG@n0kcL~DjM{(@%WZc9;9!_sP{w9V2|s&3l?H*fgkaqEWn7Xxq5ZU_ekVS62rderNb zI9cz6t#9ufSNsUx(@q_eorNSAw%%j1XHgKu?LR;FR>AzGAb^MDgy6HDD#d4je18?j z(-!8dT!3%`v!g2R!Z>UE2lvb#|98XD`->cu{)ms^aNJWSv2Af&+fQz z;6QWl^2>*Id+{DoCAZCYF$xd4xg8XGYX+xfzJIaVcW~0R_JiUzKVEOU`n>SFr)SZp1;GSibYIBcP=bXK&y_tG*<%PtvpWhc0v{$C@RlkdDzpw(d z3eh4{8+J>QG=sZ~uVJk0Zrj1gRu{f&suON>XB|W*y6FZw(puM+9SRH0@g5SnF@lu~ zJ%gTU{|0wfQjD*HUpiuAG88mXBbu*O{sMg2QZ4PQM({}qnP+Ve_l#D(2VQ1`dsCe| zkOQhMZ4YM$V`XZ0+twaVs+Q^wCa%_8+vfVfkX{*IU@DSHT#Afjpz}%Xq{%W>jCgYN ztoWO|?A8e8O9&xrj+f~gW(M8TdO zv5AKlrWPXm=Hc_pW1OQ5tiWHk9Ehe#WY)}GDqbyZ5O9LJ0s_@^chI!tst48a zrj$0*>mN{fHvw;(FmVU649`G zb6hocLXlyvLB|Brh9V80Ct~HX*(88l+OW8}ZPbblBx7;Bh#L?{%N%<)mvMzp=Nkwk z^{r4QEj^#IN2kcAF_N`Sag7tepv~yCS}IWbsCy?|_1t*$u=YC0ajR2CH*6e!-1+7v z96LlYtYw)3m)5Q{H7FBIV@%?!KlRr%M1_St`tz>`m4~g?_4g0)H>}?g7W<}mC*%6; z@5hq%Wru{=%_>(p9NM*L|MA4tt))lo_I{XfcyET=YX5}26C$=-x_@f@QMBxz*B(y7 z_glZ-8lCLhV71Yu8jgXEpAjOFWrWGx3S`k#qybw&NWqp9lCMmmgtzpPxwSeokOra# zBeWsOgfsB=x~s<2xpyz(+zC7L6wkXNbph5JZgdSp*iME72*Ct(WK&xv|vB z%yET?Cph7?V4ZG?Pm@%lO%af=T<%19iA4^hDC6qeV?P-`a#7uYHZmHF5K`Joa~bI4RXJkx{yDggd_*o*)@N zGMj#)Y;|I(zpa*J|N1+%>ID`U^6!4MS~x>G3w7*sgwu4#%TB(`rB0r;t&)?UCGh$T z!jMp>hc%u=zpuxK`DM5>gpn)^<>J2kbFyzLrED^38W6QB|P77~;;q z%(m}dolr{B{ni~jdV)M#+hdqQ9bH{!esJLmjq6LcJ*N2$9%3v^1qME9K$GDEx~{%7 z9rdRlbnzQnI%w#dy8DH8#N|8?qZ7>-p39OFt++`oi~jp%{y(H5|KEOiPz8ykZZkd1 zSx84V!GGs3Kx=b1Xq^7VEQ#(8%dSpHgNm>UzN(dl*|TJ$Y+I%#E+Z^$+e*O zru$$5M1=_mT`e@7%uM1!f&<|TVvF$aov`IPPn8MCYWWCh$d~}5ePk{|ip*4iSVbV~ zWmd>~y#A(6j5c5(JMO*yPkiNFWVynH@>W9Vm65VB5ZPk(ZN^=x6lsXIAT3qf$6jR} ze{Sm8J`W_YQkKKucjfqwV2>gGdgZbv>UCKYaaclcqh6!8$~@*Gt{fb!dKpcm54PII z$zYft1tr3eEQD&gWgOeiks1*A%c6&mF^PWk9{*D4lPx@U1VNQkhCI_bC;Ny|R{|Kz|ufz#qU7c15qGI0HPEFU0i|Cg!)Qxe*m6+6(axJYk~i_RP_I6i{I3TZP6Ob zPzD-0Q68eRsH6CbU4+i8TVI&ePM!t?!1hKj10+Unm`xE85dB1cy%X1rT$k3hC|#U< ziHv!?3Y>O`gQV~bfPm1fk#QnO@g%W_G+QrFX6{7s)e0>@O=^M1cctBOrWLaB{=EOh zmvEFJUMRGn@Kp;1$ZgppW)U;PdY9;T^8*%PC6c?`ySqg2CsnJJ*UUm0tR0f!9`$aEkm%!=xuajDS>F_|zC6M0^iJmF+#Mo1=IanskGS_IV zo3%YGT+i_D&q!c0SRaSw|TOu=ZL8cf{QL6MSDk7x3(78?o z82bS2G$n%~-x23=qUr&E$LbUIt%}$i@)VWpHi?zA>l(VA7cr|qe*GJDdgnNqX^}q- zEOn#~gm_Fyb``2NiS7x64KU?1UAj)IEvVLNdzE;6f3eqx_-_Aw>3`xoa`#iWcK^`Z zV?(OagF8x1s-Lg=z9XWv&}gvX#|ZZ8iPk*E@jZjV!fyqgn{xYITL3FdB6~2Vv7>*EO%E=X4MZRqIi! z2>mZQc{libCZz>*w7yg)*It&mZaa|Ewj!|vZSFDLL7ji^k=gWxXzXx^VmxN%3rj<# z%GqC;Ars58e22JaIZ}2UEytIZBNW98RGIa#4copeYdY0vD4-NSjik=c8i4J@_#Siz z0^n@&WvX^aSgOz+GV`^^Pb1-x-;}cl0z+Woe^$50&r#qeb!>N7>Z=WT$ zHh+5}{#TEW?Tc#EFD#X(_o&$PZtM>&E&Y@BN=@qLjj8a>j{4>o>y9X2ebZ()GnYDk zUd)u3Uj&0+!s>n@f+Zk&SNp#$)oS0`a>f1M8UqewSG2nw^7Y*sTf(zsQa#(AjkJlF zf}s-LmF@f9tr*DqR_u`AAebAPG%MDRS6nq5y)kIdd)6o3wVYpVcd(+?@trN0RNl+$ zPkF?!9m>x}TdU-TmrD9F(|gO;rjRmOehX(#@-ulFSvm3t>v;dN?tu4~*XuPexb%Gj z5@eN(>>%pgkD8N$&~_r$7-^BvC(bjT>lP3zAllI=8YDpFYLe5I_$`Aw6LdvR;LIT3i<)7WB@^wZ((|3PomSq$WS`ni5&BM?)FPEc;#Ekw904pb z*K>0zFYAX7bo6FiKakU!OQ=;*&qf|05hansQMX1^X9w$6r+jA9!N7BGD|lNs?Inn+ooqv!KMiI&m(zX@%S8)b@@SqY~GG z15F*RTCLmXl-j1aL3qWs<*tSF)-))Wao@|Ag+HNaom`8PoACI4b2>-||L^`h{%g@v z@TZhVEXTL|qc`na!GYY%E8KpdP^6)ZHc4DLbZpx=!tO0>OU8J4-Z<SFZ_C+^hU6D#YEVsT#!9dda#x9QHs zWzX4UlS%a&;VNkjfp~pJNv>Wj$IFk%=rU6#MKES%vItCac;lS#-8341ocPp*=B#N5 zEqC{qk6xSk!Sy}1!ACgO^Ftn#bu|YC(bA@x*#ES&=w}HG?Uox(klv|+eyK!1w`se< zkbZ8V+s#p33a1$5j{p9Z3CXdj;3nWn6j>8?fo-H?XeSB1#q;Hn9E!pEp zmOhN+?#vOs58soKJRoZ6R(Gp=z;}uOTQ|+KVwf}_hk8@EZ&39>Ug(sy zX;b}*33+me9ez$)rpDj};WuU{d8R@~eAR%ePa+}=8gjqlgcHB66d^J_pp=5Gr9n0&?85Xv>*SO<5BP!{VTEW0|8GvqFgwa2b4 z*ZE_}>OA0OCije!j`ITnFYP^myQ%BCuWFUc09Z3KDZUx*C)$+drFjp;tAJ;tK1fY4 zYldOVHvr^~>mi5S!PMIfCSF*E5zIC2fOB?-@y=r-o?+DUHhZ;N%#<|o%!U28CT9|a z{o@40bhJFJ?08zb-}}DDY5Ub}U#4ba<)>aL|hXm{dUYqM@+;JlvW=x zu4s!s*9(_uz-gIw_`y96FWU(m0nspdZy@uH#bl3pzur~ z1?Cvnjt%Guz1`{q1A9V4TPbr|gTq5rPQB6T@);(^0vn6Q@}oz&Cq&toxHaXMyh|F( z@%sB=(ptJXVe7^=tBu8Y{k?uv51z*7h{J?(N5_a=z0sE?CQinp@Mx?Y1`V;>Tj#Wf zJma=5%OO_5jOUl>(NzwAEe=h1S-k*;4#C>dX&Eqq-PmcSC~StY@g(UCn0FpF!z2hM zv->Tm3*AjPL`4`pIKkC$h2v>yY?_e3A}EEKL~8}Ypf30NHAVZzVB?#PYJ)GmUwR+E z(7tws4$;A^+BsLcK!3tLJs~=RTSK`;T}tH=-+*p~xY$VC`kU*bMS8Ypi23~08O77g zkGjlEv1{z83QINWp_;ity00 zL__^UH`6=vANA@z2ns#ckTqv+-3POut7T_qedu5CkNXdgclY(?EWMX@;OoSpuTKLv z{4!u`^m%c^iGOO2hi1*^>a&Y~opsO_@;_flvR5SRi+)bab}!PNFn8M-XOk3X^|4oG zK{L$QHgS3T^kIdTin@Gkz`KKFE!k3ZJXaZb$DGs!=pK{aYJdkD$~uFrEG6L16^!GU z%_a0odu5>HJ-5)?+Z%hOzsE`6V>YQNe=2pxTW=5X;Mr#3KmO|5k&vMW=pLcZ%5tUO z*!;3$7eiex@ayu7qkzjoBda2;q&+X-`H1nbOX;XoJsL5tSK2ma4OSdA06Q-iS{I*| z*UUZnHx=18;j%)lj|wgaSW3LL0!a6Rw)bWZq0bWbGa6wD{@=6p>c>0X@U8l9w&eX2 zaPyenKNU$f_4cF^GkZ-cx&(}QDPwH7Yt zZd=?W$Y0?jcMJ?5-`!678u-NLZpiFYl>BPNwdLYP(H&J`)gBMOw$amC(b*_XX!}W2 znTQ5J98T1em3-*~G3`SZM8;rHpCcDWt* z=jAl^4>A|^_umicw^jYKT25VhBCfSpRw|S!Xf!8XxGw3KRZ}|Vx2kX@!u4N>ueoSq zC}NNiSaYD}YMJRstxX4kYG^2(&?-}eyG&6j7U{bEJtO>AVZI_fT$sLak**LQR)*Si zz#FB=_R(~OX|HH5POgKPU*xvPZ3hk%Z(ENxYL>U(XZ#WRyyM2l@aF||f?drC3C*p0 zuL|CXFh5N-HZrYfmfrz#(JUCF24l-+uSa=|IRlw85iv=`_e&GMqtUWRtR`kszKBRj z^I?c(XzAe1sF8*!=ea)TB+eZZj-z$_-uf5XQGe|Qnfd{ ze(jy%G&=X|(~)aOEIRg#|5F(JMJ+L3r)`ZH`9*WA+a^TxhW4` z-rvz%@>kf}1zBIi1*8{!OuH%@MVkSs&qf}HKF*Gx*zna)^HEe)_IS@R>7L`|iU4W5 z?(yvCJ#ZH_7bchsi?W5RRU$4$fMgJ5RYv4C_-7-_Fp{OqShX-%RVZE|44pwCe<70v z%%ZLr+M+vqj_*2{fBSflW%Qn16Rlj|wlOVN@7cvXco6PdCOk$H@%A~_6*Be6*3Efl z?qRctwAO;Qwp>73x3#z32u^6rXom&JG*_Y-G`$Et30#DH{8$fZ8EMuk z;q73)1=WTpEqf%aVlY#O9p|LJu9`ctQ8)Ff+nnoRx2{WXPMGa4DV#9V*HRN+xc5q( z!`zsX39~nZ?w=6bur8YxI%9`}wK!#6+o#W$9qc=dygiLG13PM!8$T?2=+wQ(t#j(m zwzpICJr^#&6MtZy)rl8}M;_;y|498|IN#KJI#A1{nQ?^{)}thzaB%BL0Ur#z@$duc_JiB@Pr@K?DA7a5%$uL>7XW=np- zg-c3tKtUGd4+4sc^6IiO-xq00t~ajEJvfz6S11dxR9{Tkb3w{VP|hw=2F0+qWnPJk zc4yN%h;HiKEAg<@q@oFD*{Wy)dbXNMBNy=uB^HI$zN`T5v&MwHIy}BxgyUZ6A=?X< zTAW;ENo4QzTY6<+*Imtd{pcwI=8eY8skxZy*%ymlIi!si%+ADTUji!h+InN)P6sLV ziv_cJgv(b3gyNTXEYyl*VIn&~zUwx@SS(D`QLdF&ERn^ml38qSq-L!p?CRsG8_lf^ z*%k__n%yX3_ZW-0kJhp+#BD9f0qdxIr0o?dQaxg<^&kaIfd|n^vmWt1NEW-)3KSiC z*kYGl0jrKZln}6%VUd*iC;q}H`jlztL1k)eV4J67J78kB9WuGkjHt{sn{}VJvKE`B zzA}3G4lWU0WG&<#3YlRXoiJg&Q`VWRstL&lbwPG^#<1MUNODm3wAE6dfx(u}-(!D$ zhRWLB(NGiXCqbS6#6}dgNBnbk#`LDY!hT=(%iQ}rtnQcGJHGd|?(Qi~(-y}qvA=Ut z(owT0^_F(Qa>oM(Hur+>&l1x1j!x0ucQ{!(dR1Gy;+043`V;SoXAbX=&(7{XqrHA^ ze6A$qQlCZE1Of~!Oh;)b4ap#D_!1<1aoAa;Y`ieXWg}Ha5pC9MVRR=5KM59EUy+E| zM(+CEpM=@S!W@c+a$~~Gfts*%3VHnTtLZwq4gy{%=z8$@hB;;H7!tQ{`fq;2ZwE}; z2&@(#H8mx1hSP|}ZlczG<5R*Kj0y?NW5(C7?;6KFWvSL}hcWi)vV~-dFjOo`f?h$d z9H>}ft5UmMuY?Bj9rzVaY(G=FlI>@VX(%+MJbfO6#->pZ@ywITR%r;;dOBvpAfZ*h zOiGkj)2Jq%=4ctFLT3qGHP$;|MPnZRfSgq(^(Pr&nrafw5FX=(!XsEwfP>Huc1Is! zID}@xFB+PiuYX%W#;l9j5t?`WxIa%{k(mf6Q)*=W~lk&n#Z1S=2R6 zX&ZVr^laCswr}j@?96kfG=7?Z-;=b*gdJv@smjT02u$no3H(1_pBThACUTh97hNJL4t7hbirR%Pg%_8Z& zn-zEJq#5l&785yo@fJ+8S(Da$IezA5l{3fWFwD4s4svZ&bjxvxs?{CeM`ol$khORnGi`_U#@0V(``t`FCf?+Lf?%k1hFwcvd&wPHZ@O0F0R zND^-CF)3>;GH)&W%}ZX}52$ch1V(s%_N}4X&x=1xO!i1#)|rHbJB~!GkJR>Lq(9KS zPfdR~v@B8h*);PEJG61VDR@K2bfQuGJtkKo_(M64|IJVSZ!*nVfnj9!$z8?It?TxN zl@*=HxPHyO-J-qxLeuf*hXQZC?c}(X#!o0d6_ix;#xSy}cGUbHRgG@!Af7-kCr}e8 znb9?lRa0LblGe`-l;0AV6Y`Q1$9o_8W$)+v^xo{_a1-|u(qWWAJmmoih_P(N@L2W& zauJZSII#Q3a`3n?NOHhy>LT~_4C{4<^J%_)e!PNwpISw4EKyCamG_Bx#fXgIs+rI&P2n!5 z9l#foxhv!cN`zS{{N->rE1AEXYN}2j$Vz53t)Lw!Hyov|a3#+j5?*Gz5)O29xeX_C zmy=vQ{L=danyIGHVdeWNXucX-Ru+TrN6SYwR5s5a?vaLs3OLc6&gMxCbrdv$q0R`` znM(IdyCz(M|H04(u0WmEB}zEk!$yTsYz|!JjlLcC-}wHo7jguIRdUpe%P}xh)F_G+ z+4Ag6NL(7#?Phs#Zyb93<=z*|n@2Cy%&Iwfyh`_$`^}=yjbXdFiee67RiMGP=**N@pm9*>Ar|f6Uu7-|fjJmFlWpv8rwUw9S@^C-LotAR+Ys zZ{L*tpZs&jvf#RVr|xz(;H#+kU_oGl%>p-WfK{X1M79Rx9ZKNx%TIw>gIc!|*LHw9 z0m%8*K-|Z0HHP@XW)0{y{D96+9FuKGp>meR-yoEDuszt5*pnEONgj#ZHS&WV;7Tw% z8tNV-&rri}cZ)dwhB726>&#HVQ^w;a?i!6fEgADS0I?caNJwI<+sk-@@J5;3zlRcv4r05%r5el);U0WM>HH z)6W_vT@=~VkCMCReernanW>oKA+R_UJiDZ%_`-!~-i=+kf7-&$bKBGQI0~gC`^oN4 z{im9qT)y!~GtNS}y!&J0lM=wGJSk~91xwkZ9n9We`kub%%@}N1e;z<1yDoh=)pX{A z3OFM;aO8Q!bIEs)K0u0`|E*;Ew)0bP6bTMr`3Rf5TDQ(=9Y9sakRxNTmL*9ulV@&W zKW9H>KV`g7Od)TTe|pNs>#x@R9IYSXn=)?cR2G>P^ynFNEsiKAP4V|m^Df)|jl2~F zgAo>A z?Z+4sc_8mo&p7bS0YSI`1>4sf6#%lZP~NYAM(S<;%`YGbPSCS8ZOQ}bp7l65GT#X zj-Rb?H|ybs(|^6`_iNQ2U>)-QNit=f{hB#%yUdoxyHF`iv((VE0>oO@n9KFuKQSAP z=h|e-bIP6;>+_D+=tdVSb=J#;D~m4btWQTj7G<(OD?^_a8By1#dD;{iDQ4%Yii>O% zeiEWZYqI~lN!8OWNfE*@mdqM~3FxtOe3V~4;1wrI(&clFOftcVJzo?riU(J=pwA)0 zA%jf-9}jMfmJZiU`IcJ2)Hahi)%`vvA7&KnbCvOmazq0 zDw#^5r7Qi1gbo3`K+gbMSdC*!gVPj^BV3rf%}Lk5DN2PSzrvNh-bB+A?Z|IjI_0H+ zf5?Ru_|h5F+O)Eqm(KKbb{L19bg^f%s)h29ek%Q8`hyW+S4)e&X%rVtv@%_8x+KG# zn{-jVxo5NHea~hEJYl-jrCIY{fsVLb<|dt5>{eOkyX|qo#N}Jh45ya9TFBL3RdS(k zcK=q}xX7r7fA6T4`X5>B7CmW$Tgk1>ZgHEo*V=cc?=!xfdg!+wTi=(?Hq^PLA2xI0 z&i6Jwy$fwBdbcf@-p^>ZPkdUk`M1%-X9|_E<(=#n54S-qH^%Q!d~$9~$jy}XQBt>> zl^fnAp8X!5YPJ6Jk!UwwTr@xNtV`nW)HNq~QekjhLN01;9POzWBhEv4dXikJf1y!P zz-U}YS*V3MFmGW_J5X3@#uAsP1=tK4OOq}j3n-I%f_XFaN;(}D1{Z14ooG^jlPY0( zu)_?RF|Lor7S4me>GV;lKZj_JR!ZrFUXJbgF4G&&KK+y^)PI|EK^hrGoSlhVPMJLgQILXG}jZwBiiCUIjW9085 zMtC-->xGt6Til_t{bH0?EGCl7Rq%nB@FvOoyR_8 z=40bymK57#28MR{*WE!CgCN$7a6tZ;hV4?W8xzA?s=SF)7@pA4^>JxjwiW{0xGbq< z29th@AzX%D9Jtv1$cvjGZDGnE55x|+kx{pypdF0n9y5hK_JIZ<4dE^=bw2DC^zZ)e zLqY!TST9xLriRzqZ{`V2i89e9If&$7S*na@UuKtc8O@q0nkicNNGpfj{HF{Br4G5J zlJ;S}v=?|&GXS4K3m;*~ZP5Tv(q=bfg5k~B-YK62v^l~?iKZupn;Se*?EblcnD21c z{*`Zfu3(N^WIoN5#Ffc$5OBJtk3;9|*AoiJWFDJ_c=}rsS+cOj8poq?YbHB5)B`LK z_{5S6tB{XQ7naEm09rZk+u$YnpC$Hd2m(0@Sd3+|Pee)GhP$9A0O~R}7vLCkKFHCz zK>NUf9|FpSRpJl-;%n0J`dj7r4*Y`~nR(3blv!5D%+M9pt6<&CG5N0*j4}=>{i-*@=UH@H^jMO%((>~N zO>D$M1uijL8-LFLWl|m+lylEPLwA|7dkFlYo}BI|@RDV6x+CRlk#QKQkt1#ZZ7gA> z9Q<_>kW5$u#%7Lzf8_om%j z2mXizYmm9UBDGO~LX$2#2`&oR1A-|)5GGVkeh7S!j{kv@glq;O>tU$1Q42Kv@bxar zcVMmp&1a~f0cabb_60IhYb6d;_GF`z0lu?u1>%SShXZz83EU1`MHslIumqG6vsIE6 z;F!P{fQtZt9u}aBfYkw%3*cHT1XTw31IPReJ%VW3DELG3EEHcqAQN|%0m;dOK3ATI zc#mm35q)s9<~xIq7d)WXIAs#~YGgd|9teB~kk|NQS7kb$$aZ)N48Hl!h>olto(I

Xxpk zdo%p{#s_J-<@aOF$KA(kM~}62-uN+j(V}>-u%gj3TdL3BiMe;~e(JlnU*^6$&=T`X zlrgP9T!Gcq_mvDrO7lAxfSUv*&viH3XKLzBLMF;sFclwkQa}gtqHt9z76ue;0p0Ez zx3K}22#PD}Ht7ZWt5h{nhsg8_mH7B7`YduJI0B-?{82!Sv>p?<0D-#|P@W(ySpoKu zET|)zUJxWCnw%y}01E=O?t5YtX0mY!3um5)q&QWE{skO@5b#(%VB)-f3{`3G9UM5Z zSbiEvD5NL=R3tgVWk_(y789|DpoIqT=H5F%(70X?_#*M5F4y*l&UpQOBG~({f6#(- z4E&NhVqYjAvtUCxI3jVXR%yo=?+Tbi0pN57{x%Rofozdjw&HuD`0OauwE<^&JS7*( zLl-Duxlk#=8+i>}k@=vFL`X6T#5oiK&x-=H$qF~UIjwbs6CSVm=W zyd(tK1X&@1!g$l?!!gxZ5h5jg)xnC0(w>+MR?&<%n2EA>!f(^zMkYHQNmoflxbX5$ zI)y|D;VUJt6hR3)(`9m*Iawm4@}k*92O+KODy`g@>zB*-%M|q((Nxil=?vZ~rC3A~ z=TM<#o)xH;nX5AcF@TJEpJcMAf=rnEiWDX`wQprJcCyr6KCD7bf1{f`IhSB zW!CjB`kE-mwC}PP_rn&68{9-}5+$fkrATnqV1%;>ZBUszO)N_Ake1y>C&l0ichF6m zJ1r2^oe^+M3Jd8e9pk_>g5N+_6_jzOGjLrUc;E2eYFuST)VsBnm1&xm<6}-r5KV%Z zvaoo~@yJydy$W#cxQF&pq%5|{^3i#=91nAL1gci)Zt~7TT%-i_ih-yrdaM z8vM$;^2O3ibamrCZ=uP)x-BBkQF|W|333pZ4x8P+jUEo@(qppS?<7h4J>~f1OPCYn zm^X%g#|h#ab##PDF;r^s464q;dev?xxbi1}Il&BDTfEi|KxOBOQG+7?-+E9hyuznI zoiq?2>N}TMp$g${gE?-;>s{4w=&b<}z6G4iyqn0!l?VUZ^h2lQ$WtYtacN zIK72tqd@lKE{!qH>Fv&Rboy5n2}x1kMdrc?_ggZ7PP-No5rh z`Vei=3|z&R4h>QS3DMI;5V5(0K`fXjK=4Kck?6vEqXjBhQmQqJuVA5Uf|5@bqila$ z26~07V8v)h+5Qe(LQMcol|v;MDQH5{ED=?h$slBM{bYd}0X)Z*mjzNih4N!eVHQo5 zjkgFyTc*D~l_jN>VSMsD2D!HH-98Um%2c29)oV0|FnjH=ZvxY%_EyBm;MIoYkGf04 zzu(r5D(kCKtz7fIZz{LhyE$}OCf2a44!*g@6mV?;adBx=Qd3mi|WB1plHTLmdUZ_pg7DO@*u6Cl?1o@Q- zau(3hX%2f(nAmHc%(dcLMr&H-NsV4Hc8<|iF#BEJLh{F2WegSr31-c~235f)1IZ+Fhwr(CFv8skA#F0m?C9wz7b)Tu$RtQDY5iSLnk(!Y5|=t?Lkm&esF;&BiGF@hkWlG4#49*j>Q8wmY` zegXs~N{~}`u<2+cs^~>IFfecw1S8%k`c>bIj!kkR2(0tyl`EWLxlJ$<}D zsd|=F(W@Z1l(kSyprCA%*wLm(=*kH2cLH8?c-x=|r52YY^HnjRKZ zVui_}<*ZXTO#8lH8c}{V`tHwgksaa%g?ca2p3k`=A2(gNb=J}PT=wMkiVNPqCEpg= z#f5GNy;PwW8h9^TU$}MQ$fsq~X4hPBup1wK=FrDITPzI*N?yDT-KNxgH(#5(a(HIe zZlm$lr(pt0Bp`L}6827Y(Wpp-txl0s)@#nztBeqDcNI}(q^BpW+sEZHT!O_aPw6%p zoHQ}WNms>mghq8@!=&^fPZ>~$jSwTUAx5RaFMMCre#%ZJHV1P;RCraXf-kEiuRw-+ zp|S{_LMS|A-xt(F9(?J%5*b{PM+{pk*K(lpIBinnRgSW@&~geM-y?z}2p-=&%Z`r^ z!ZZ1@r2^twPYJp1w)8HUh*_bHl75W*6t+_7!o&tUcmouagE*0EjmX6FK5Ft)GQ0@` zcvT`?!hm{te$KNm&_Iw+y>KFRvVx(FVIT}sAK0F-B!fmlP2{w)Xa;hHFUFXDN~ZX( zQo<#%CJ%|ArgdN8tMk~>u>i|uaB7yxmYOP9nG7+;NDfd!Br^j^!F<9oJ}kgxajHLnG*d$%7m^CN zEcHI+O40!u2;u}bgaadHJJ?M&ypDl5tyYN!L5jtmx(;w z>qcyRHIvMyvr9+JtSzWZ%~D6%r5ZPOe(o3OR57rrGWJN{iYBH@{KR;HAo^|4{z9o= zJSYBWpnM~T6H*Y;N90=)!Q@C5I{Ow9a|p|`vnOS@R)Ix`7q&{Tbp%vVU{sWYeC>Cj z|G7{b^#D!zU>kIlfeE_dF5m(Irltib@FC{46oW3 z3%&?=BS<3%5P($(J$2~d^_zAFLc%R)P82#B^h%WUyDR_gY6I-o3P7OZGg|1L0W)&N zFwjNut~iJl;4&X$$^c|0N4bfRm83j?Tvgh8DpxoF3CXng8C;hpA@*pJOM`GA(L(cH ztx8(9j;hwuYfUA>+a~O!U6d9$*wSjU(^~=-z}4G2({#<1@W6oOruzJsu9;<3C++#s zi-uIb>A#i?t*S^~S$^b{wy!R=rnIDd_q!?D2YWu>p3y&HYHRSX_m()uG5zN(`TB5l z|G2B^U3H>a4;ubFy0Ch|ZPmqtnYE=))tfZ&j{4e>^3-h$_<3`DK zDH1lIZ6u=!46%a5E$tMDoahx7&h%9ljGT(w%{fGmZoT=9_z1epL`egrpeL*eQ&s$7 z!koKsfB0m9s?t1UpGzb#V}Xg<9ft)?sB~*>e^k6JXjxWd(!>}Si3;)?VD+S!MfX;c zT%80|3B?6xD5DETBd`t*PUDmqpO+s1I_-*%SrJzl7d#fm>Pg0~A62+UL;JkZ+>7(& zZ-h*n>C>7>p9=}AxN2ul5T9K@FRYSPg61onu5|{N;GqVYKNg2Gn=45I=xW2O0)7*nvMe^7Xw)qShqA&(3J&NYXKJwIhrY2Ty|x53%IO_ zV{phS;89X=l;mRA)4mwIe8hoItXT_NEr$f!__t>v>q9dJR_TIzNUx3W=R}2r8Bknb z-x}BD3dL(5p0^nftFIXSmtj2iN`N(me*3Y<=EYg}sDI^qMgF|(K3v@Ck(hpO_(w^) zLtpA?%a;?n-)r}nwRXDCy`FAdWtKvHC`{!W&%EtGIXT{zTo_l;EIQD>8cka`>l16` zhOLR(KU&X6Z9Fo^sz`#3c%Ra`kHprt$Ilqv+gT;#n(e<9A9~@ZbUwfC`PJ=1i{dK} zK3JcbWHMFeaH5)e<~SC3ura9}laeCziHR_P9zKYFlTSn08tf#8BXFVM@NV>9Y+dY`HJ2}3ZBHE z;`O&s;DyK6%+V}Gmw|Az8yZygi5wWT&ylD0;x@?qi5}ty5DEZuVkZebKxl=K;SoSp z&1DOK#)@Ns!RdGmXw1001c1CHz#Bb+fA@Ru0>!(A13Xa>zVUEis1~+t*4~W)0(jR% zV#nNmlehH%g{9!IT3l>ass%q}TqS^i$Dq=XVmXEzAb|)zaTOHr%b-gc#ks>!M8>ev zIA0nrxc=)%0}8mdutW(?r~YG1dZn`?ZPyud-?wup`C6iHiFH=dhJs3YWvArIolZuiKow09hRED? zSIIJX0gzdyi0sFxn%12YvqYKJ+1BuNn!hhBI+f(8g*t5w>|V}zl7y;iz!vR_!*ZZ1 z9R~-Sak5~I>*yLmgbLWs^3zaQ0TM2D!~$iX3-{9?RQpfGJ|enEt~|tZYj8D+VCqx{ z!ajXSTjTL9cBYb{f8@E`vVj8qUkB_3lz((9A#v-4=a^J#!27PAeZHt>d{5BQ~J;IVi8xrM#vd8gKA!QCRr|?iG z1#7M179m!?0LZP<96Go^TW%Z}KnJ>Y_gtVB-wx%sZ#e3=h#YQzTm|%RXKVQWqGEo( z5hpuo-XX7@tQzp4q-Q^bT&KV0ZGFTAAVK4hcWT2K79U$9&Ka()JAGkQ&O98Y9JmIL zZx21e%3&O9q*a%DBtRXFl)!uRsrKxM=miKzKnasBUI z4uJ5+n{lDZIGSBTmV5+#;RuZ%ZsseOq5L9*jrOD&v*HJ;rGuZsYjUSgD z1+Md(ZTZsn#=-02P2p~KhF2$=$hv-&SQ9D_UcVH5+sR?s`1ZmV?WVEyVwG)0{hL3E zOMMc*$q!gi?OLqc+o>1@WhYhK*x}6Mv~HRF(`5I1z2hcfVL`VgPJ_QILmhrMU#N^A z)xn3u{(^7)507~d!Gh_df8~*2DLv*7>@QMNLvLk>%vshOT(?2J5nak?fb%huE6IiA z0;NILfpug*k{^I^aM&XRkFV0XjW&d=F`sVk-2nM#GFF-W2(CGSCwCqI=U)g+;V`>j z&#e=w%n6PFp(r#0&O8F?h8yoB<7_=fYBGNLcY`3{jrb*dsywtGtz&!9?c~KZ6oJe%p|9Ka!>XI{5|o)-0ua9zc&?}u3Sg-ABx@l6)|1@YY|{IB2YDRQcXY(yqe?vhEHTQ)&L1iDFf zq>ML#zj2Y_w6~4A|Gd5UHjQu*dc_nCW{1G<`|5}Bq>TRQ5pSUfbUfBD0G}S1n6`TRSZ@NJV2#c!OBPr zP0r#gA)Q4Oa1{t-xiPMMr*anbeN9O$NL9tCWdStkAEOx4muksR7SWnu^~pVkY?xCF zX%2!65+1OPOho0ikR$=H54DmZ&SAI50?�{d-h~+9o%GJ7k~@z> z9I(z{I+eP=kSZma7C{F?ualv6U|>-JdTy<_{#uU49S7d?(z;dR4mvpNi5w__ptE+P z6dqzU>8-dS-=s$ry0!ja#JverQ|H${dNLA{00~0~gBpSZLVyGiQK^I(5j6~ApeQE@ zh)5ODDp-|-fHEp5U=Wl+WfC=@wrWw52#A1+Q>)e?sNhgr5p4?=$=!#3zpnqf_gnY7 z|F!N~e+XyJIU)N!@7eF(&wd{05%bEOAlL2`8Pf-us5X@k_&YZ1my%R{(3r#S;*=SE zPATH5CSd?4;c1E(qI1SzIY!%41VE2+H1qM-5d1UCr!6O4N>XKEn(GAMq7ZoMY- zyaus~ttw&gHIM}D3M~SONfHfzMGSP(KCQl(dMJ-t4rO4}E-L7cicYby-Fx;G4as~1 zOdYVwtbiuB9S0k+ShMN2uYOn?*a(!=0Z^s|QlUI5yKcd;V=PigQ@9<-o;lbhtYuL% z{sg5svumKVbfmu}u5C{wDBDH(?zetnws>f{z_*C4T-DzcK9}9nROT?0h1vJ5`}aiP z=MVpvA2=uLXacpu2h%^>x5~Pup{(p9D5*7p44$aB%;m&#w=H(z6TP5Ywo?g0PD+qh z$}^7h%g8ep#{g4s3Fzq*YF0>SpOS47vz-$pYn;1qe^_fcuwlO6b!yX1XTW(DE8C6?rj;Eb%{W|HFbz34x z^nC&R+Q-U3j3s+8+&n9|YnVbTE9t}esI65Hkb8sTf$qWMFdaC%l}zFMvaAqeP#0u# z)}X~gj{vE^Nobh`IU84tSJvi$T9$1Y@LYoq7F$#(wgrOh_&5P-ClO(qS*BDLK1UKx zC570V@*5+7LEH9lja__d?Ga5*PGIH6-SJ71Bd>;kskW>QTbCpeN3`q%rfiYDBur2O z0&rf6h(e&-&Y^Tk;YueYVJaZo=GR)o$EKDP?rY*VS^pvlwx`5`ATdDB7~+6AWkmfu9Gw+M)!`w>2Oq=v}IMBZR5O zjdEAxhS7DqRLdPSZt611ohdqQZ7*>HEH;4FpmX5= znL>Z06IY9LfoKDT$>`?MdZgCu^Q+C3}RCTA=t;RqfTfDj8vyUg+>M&)CPtF&& z)9K!f#?%ip>vLA<6ff`oEncx{dH;;>-~2jV5#{y^uYXx#=|g(%&lhrCt)ds7bNl+2 zKmp~-r0cnDxf>t+{62I4<-e|6`xDH)x3o&@O^K?174zxYb1vIIeZ?KZSE_5vbQ2zy z?ZNLSZqBXq8cTI6T6b^XhGmX z%G^vQVz!dvFyMx!4!W^ zvD0bt1hV14_w3^aY&>IWm*^q>g?$;F;RKZ}y>+9l87Q>gB~#kR?3u=5B73F@h-fM} zj7SO(AHs0v;pq&0ICsoU$Fi&axn7QTu9dm1p0tK07FP&KA6X?N;^`5}$xS77($)uBDslKHKR-)}9X^9v^PGdF!YH}b_|TDWp_@72NQ zb&Rsb*5UFuu@~+=8B=^({1EZY_=p!~sSm{UY+JYfk>vKw$a(A?6n02P#mrrY7pGLZ zd|Nq&L$ zW!bzfR4n&aG@Ixf%yufMCm9F$6a{Rc?U@716d=<))%+x~T)U=tXU`%akw8wTah>GUW@%@QQO5>XH>uP1> zB^9%mw9Lt2+qE|yTeMqrs4F28+(G#rM#ML;QS?Y6mYvGpPJppAaeJnG zK2$IVLZJGIe7@}W$1vE=2GlVOuAj-?KO3C(z!mb%y&S?I4wc)9Soy=zjtaZQk`KkA zeQ1p+Z2J0d$FIt}1Lhy5FOJV>t=W@!XQ`t7s5suzZD$-qzp^+WIyTz^ucs!9vbuR6 z#AjLQw~kVr;%;I6bC^TDKG|l=vfbvM25xjp*E{Y-5O@)bK8TfB!**iPc)n+TZTM3& z(*~-0eoteAeQEZtg!8 z?f>AClmG3ltDnF6klA%R%_(l|vgWpV&e$cVg27~3PzNn$jd|DWXn*zA)djUl=A_@9 z3bH;qa)+Jbw9(HeooKZih#`)+^)%51=aKWII;XA=X--F*CzJ*)UtY%QzF(6$(YIe^ zU|A}wz(*bHTODLFw^GC%vt6-w)7Ozt=J}i@OeTxF9yWct@ax1Azct_XpPjkgdCBg( zueJ~idyUWkz|E$52l@S4-GF}=SwD2uaKL6%)kzI@9p`CYF!=4=xU{>noZ$o)i@B5Q zr59TY8(9$StmkP1B-NU(2G^0!>%PV+F~qoObVQ?3iO=OJqf`0kG-@m_fSiDl>dkoyy&#HrWjSsH;i zh55rqs>i9ID?R%<1UvBqbEhPoA)*OV&_E{N!-`II>4ZSkLu?39f>-qwEE`II10nL| z1^%$NH5au~Gnj)jz{XmuK`YgK8)kND(MZvlpTwH$K^_Yn$)yw*We1T*kS+LUfCAe} zW3y^-?`UEc0K3|e1&9%fd(9I50Qj#>fca7Z*3S`_2%t?NK^r~7Lvj5y9BW{0X7<;( zo7ICBYKuv_9n*nwi*i$^amuLc4LjjJ;LwaYeoHRaYw;uIABJ#-CE zU>WH365o#`PGHP!+3R#8@ffHdU?R`RE7S+!m?MK01F$6%i-p}6)^IMi&9aP9cbt$F zEG5sXcV!9fMeK9uhRaJyAg)u%jcW6foj|g>aL{}-r2?9uD zadg}rh-5wmTVbIJ{2_?#l|CRiLIALGD!(7+6oBP_m+iAyRLe27yCnr&ATLtf$09gG zJP}4Uv>@`D=v13jcjG&itl2SbBr6t@fDLaF9`AOJNmLuuKTRYHLIm8IZa73ap9bKtOXTHuQ@EA#?Tm zoe;1lYe>+#5>$O&5GB)!!7eC3^rU)V87V+7kv4$H;*tQ2sg0Ix7;(hK(=vhUFVzvC zo~KDi;TVAQam3grWCXKujU#|K8v*@?0Ep-hm(ys3eQXRv|{al!9&+# zEuf85MdL)tlHr{Mhv3>uTg$n%!2v%;XjVKqU$J9xwEVKwv(CrQY<^u;OK}}&JTTwt zlA%FH;HKV=&-v9&X7_Hn9ozk*sQkx!+f8&gpDkN9|Mv3Go(Cn}-|unw;d0QgE0%ly zVftC>9=r|ajrGPWlUq5}pEUkT6 z0tC%+?xLqQVSy+>?_x6WE`a}pku5YPEYzxAZxRpVXOL2ZY$M#&SY{FQ*g;E8r`<4d z4wI0?cG#2P$Z&`vSL&Y~Y9M?K;m0Kqd)5biNk|}hVFAn~t&ot2o%(Bqw}HT1kU0=M zubCSBLcIrs$Y9`%fjd8Ay}>_u$^?AJ1a|pZ^>7JjXyfalA2V6^1)}CxWV^|&5b1(i zg1I1P_Xz^3a0^s}JZQaT`^x+te-GC&*FiIlmpYf+G&p^5FQDt)_|HJkQlD~2s~HQ7 zRqvZ*?xt_2Tq3`cm}i0>B~ZOgSm+4w1fH;X5Qt56@`d!@neWM|t~$AwPNzNsx|Sor zX?}$%bXzgzk9WvkIjVYdvX@34;9ie_>pl2Nt72y_$52|?Yan7puHvFsk7O za0(5NO<>w=*tlRkb)w_oq~x!0{l52_AMSfq48IlrHeWZO)9%;#e74Z(BFB!O%=~gN z!C)kic&g-6*kt`LKj>;s1(}{A?KY;IA9H(d{}E>`xO-N-$YIff#rG|ElyakG@u@Au zYk>?ZSVchr@gr5BX$VLi*os8N$jMvnkllIG|(MLNEXz z_`HW<=T3dFQMrfYO*Uy4aGc>)A=7w7C4mCKJpF8vl^B821cUg0AFl^Y38 z3?c?Z6#)spI)pmF8W>ui?n z;PL>#hlq{JvXw@oD$kbbC{7$aK8dv~X$I^YM;D9XI*~JUAE8B!s=WGb9AKYB$Cpn4 zKv@USUn$#HHyO$N)b!oAFQc8#ujRei);eGV@7*J! zL{_!rp;?7v=zua!sYVCorlYusB>|>E?EFG`-F$>^J%8GKAs}?#NWuOX&S`H5LT6Cb z4ul&&FT1uB;=+6(-y7gF&$;=vcXFL?}CPe2T)&zcov#1Ldi&{!A<~GvE@c&21@td zn=M*Po}pE7s+PwFuG3|iSX}t#NxFqj@5qo|=dRwu$&QWRtnbtw z?Xi7U{n;fk^8;VvxMSmuGmonOus1pTWzq0g&QFdnJ*jK%3=XC*{%Lh)nbp_pj)?+| z`^*dqJmPnhGzx@gJUdhm3<}Sdiu*wATiQo`P*%=^*z#SRm_;jB^sxki1^9a}mAsa) zBBD8uCh)l(wVb(nd1&bJU06BMiFya{JC4sQhhXi3^{Nk?P!NS4y^A}IcYbXu24oFL zJcBpY)d1?F=YAGbluW?cU=Le38aY`Gd+Hs;r{c~RgkZb%G5Z!fTq1*g9gj{od19x$ zuqi8VJ@WJHYvt7w1`-iE@0wDZu09q`1Ber-zrQS}of0c!w`J+F}!jdFp& zFYWGU(ijtfILdj?h8i55LAiAS2z9!fK*TsWdJ{a>4rs6kHwkk5tYT0Joz;Lvi6M)V z&O**>qedLDQlc?%d;6hA1*=@ajj^QXu)-vq$`Q(t&}b-n8R3Bgd=K`i5H0HhOG;2- zPLwbT+b@Rw$oC{A@+av63!G-Dp7aOVda0DIB}iR!PJKhUyqauHpj09BHG9d<-WrF2 z32v(1plM;&TsASxMKcFc-Nbq5$qjLn^fmI4xKB1_uXnO98(dCk(~YdXe-t)Cmjc`2 z0SbitJ7&Oxm3!GpH)!_SlYLna2D5NLc9<*WxI}L3;>ZVA$%Vvs_r-tjYc9YmEXAz~ z8tz14OiWjQaF;6Ux)KwuggP;jw62@yl;0&uWSPBnZzAH!2p&}C0py8|Fa$7?spKRY z-<^G}?D21zfMg(?qov)2EESAk{jw zeX3ps*85gyw~d3_1aAQh2{t+cw+cQ2OYt6(4uB@9aEAd_^(3ZG+?xXaJ$#g#Hw@d?qDvo?Gt)vGME+@X5Ncw%3hhzWJUN-lgo^3`is zr=5ocPc&$_r?oayt@gsfj zxSKkLoQg-R?-JkPma%b=yI}!xZi5qetQ1pd4dA{;hn=5FsyYLLLj(XifM+v^u2e`rt$R2wQIQK2py-ht9y!mZveoqS)TcM^lP+E>9 z&wqXTSn!s~279*#ueWab&F%+VyyW&yO?TcjH!o|l;vBf0(pH|Zytbv_)x1yWjq2Mr zGOu6nocMm_*S06OoUdrJC#(v{IebNb)b?xrbw@W?mPdU!yF0<~`i{>2pM|PdWzA;~ z-dJ()uL~}77u-8qlkx=rRgd|q!lKspA1dbFtDJn|{_DhV_B^T}RHnDzu{3f~sB8&c zFbHxT9&;BQ+!!yX%`!9OjlDR96#IX>AiM-U;(lO>Sz(}a|6Qwl{zqn}eRIg>$YOoV zb-l|IgN|C`C_bjwEQmtRf-OH?$j0%B@Gzv~XoXG)zD6XX@o)h~MZ@0mw_8_pmEL6S zy-pmiPXvtqI4L85E+}2Z_cY{tF7nL$)+51q;d;#<4df!+(}waXe!^P!Xmc`hV=H3m zklRVvD?)O$&SlaQ=o|$FZ-pGp;%0u*yF~FRlTcRLSfl*q z*zxUuFZwerlEh|_>Mm8)wH4Vta>&NN1?#yipGhL(DM{SgJ>DgI zgn#{W)c|q9A}I*x0&fCcF?%QD9P();z^yZcz@8|UZ1a+b|F5# z5ETtx&lDV=UEEZgCYb4#i}e$e={?cx;+&o~`vVJT)Mj&=9r;;}ZDsf~rq*9Py$51x zBiA?=x&j-2)}AYNGoHra^%=juHP`yLZKB8i5CT;O1#c4jm1MXmrX6zm>5mVQul)Fy zn{U6}^4fLd7G`k9tk;#>!?#tlvHn@ab2oCt|NdY4gDM31_O(xUB?g{tEp|rg&>a4dBukW#3)V){h1*iaBxYi=ByVkp$-jFqPW2y4u+O4mj zu)+uBcy+%=d>4%ze(oB5x|rVJ4cdU94Y_V9RL;ZC zr+d}XVe#&-g_EKMqm*5DQ%V|lEZy_XQP~}ublD$UENY5E2L`^FnQqLxv$ZklVc$ug zv4!`X%dC!%*PLv5z3@Zj2vHv=Or_KQdcW)Q*XN3N&GXdX)v>8YOf74Rm1;c}e=Z!z z6<$yxC!}-uf#d^9qQF&T;)Le&`TBCQkk2>AZ9#;yd3qW7HM#6dCFA8Bf3}HquG9{% zFu@C?9Q+Ok_WZqqY=#jRpp!x>Noc1^r}xvP02ROw#2Kb(2N3J4cw1I(e$P`eK0`(v zo{cYqEMS0xiJ2^fGKbG$sA_yoO8P19-BTH=x-GW){nUKHf|}cLYfVlrGYxcc^}I1N zO;x3;TZ_-@FI?~HZT;NDwK)^B??v}u12*s@{fpl)c-i-g< zJ+-Q{_b#5>UpU8w&$z+ywPp4f27WY=(GXvJULlnv@L+U4UyIl8r;zv@Q@D-^^Pj~| zm9L46P9gDdR8>9Ewj9|*vV{*_7~r06+VgUY8}~+4fs4SpQ|s#wpNuG2VBNAD4HckN z6r|m-e+w@QR-Hg$=83KRb1j7`d##Cn;c}v^emS{XXq}4DWJy(0RgJ1zjwkxa@wgl+ zokFL93zRuF>9ctvrf$egk*vM{3>t-5VI(2)YNT!=(qGhU5N&xb(m&FH+Zbt|sjjia zo;~AIu&BPA~MGw)JjZoOmd4il!*5P`xcQ$Iymd0>E zAkpW3A$v|8BYWt0<#YR%Q<#9FRUk`?xWrM5$0|~)% zB8Cyf;@l$+LP2KZC~PSg#ckeyeRvJU@?E?2E(StBip&%FbqxX+D;=?MNo~Ks7GxJg zI1EFES#}5qaovDEfBI2d?OV`|P;&jaCX(Ex8k527l>xh};tlGi^t*4n|3qUU-&OvL z-!N7oQp6)$iBHhtI(-C~-E2ixM4URKlgi*$85T@XRYx zbFvf}-?qypB@9>h*6$f@!zif*=utfK=dgj52N|=pqIsg{7(#r&^s;hNm z@8azjQ{&k$FK+bzpHVqe;G7XYs~k5$r_UxMJ*X3zBVrMZ20I4Wid|U(>4u6zNA%ldyT!o7uV9lZ#54e; z0G1~~YR-=*`?}DVC1yGbK-k@h2cg&N-f60sZeoo<<{qrkN>EcL@hn#H!;^fMn3lNp zr7E^6zBZS^ixjwo7T*Pgy>THbz|-lx1b*wpxnVwgI&qRLz>%l$byo!TG*jfe{G4@u4ExAouUU(@!Uzq!>vSJN0&<@k=uD~p)`4U06M@e? ziaODa%+B*!0D`{ zfevt}e8fw^Cd@?-7lw;Iwb&QhtI}2J_%31k#YDU?5=E`|Skx&JAI%unm-IsPfX`x8 zDUogj%I{Jvf#$9Ez!D`Hulw|X&!X}93K%7Jyk?Q~UeE{P`4z-tY^E#!EN&|l1Srjg zXK|>2hg_6%Xo^{KJ2tzYBvsK#IVg{W+oGpU;4dW*a43sFpr)`8F%Zm)2?WrAcKzMX zHp8`v5+&e6u`tK6`#~?UCzLYXdW!JbhQ0 zJY(;`RTkm3`hry-@>uf8r+zzaASsvMZ7}#J`qZEcv+v&?Pxa}X?@I6e$G*~yf(s!3 zyVeT5bt+46HuV%^?dA?p$^lMMzxgYuPuyQW0}6f^FG`9&ZG`Gqb^FMzv%4yLZww{A zA60ksKi?B*nK{MB?JnAmQ=LL5vdukXZl}GN z$SyeGQ+!PK!QpuX`xrfp-H4HY6I+(~4J<=q+(;m1Z6X3LIk2y*lYr0vZ|+d~R{@%u z#b_4bI}s@=rFaeP1%Yki7JTY#J_!j^T7h~DO76owl{|jspVo{XcO0HkxB=TnyOL{n6WHqXqJWTDal@)JcJ0c?CbhE4y5ENn|Ny=&6UTux3 z`@HrrWuGINS5P--7DPS~zlDef@^6`isZ8+FQdJ94CJZ^S+YT8RrJmev28)EA_qDnIDCrdZl|6^a5UmVXm*8QT>@mROZpT$&sQ~ogOn@pYJZqb6u zuQMt$=2sN)@4|SqXu=N0> zvwxEf$JoD5Q!}3s+=i>zk5o4V4I_E=vcrk@$m?%-@bB*Au7_>)%>|k>qaj^SHIt*(7wM8+8zAWKd5BD@npezEcQJWg(#Bq-;LT z7jh`Lev&Xk&9)CA;U2dz3lO_V{0XTU-xMAS&?OT#i+8cH{y81$Wpw4F3rfdly2-f5 zu{pAh4x!axD3kGnp9?Ju8?S-z(ZTUXP2rI_2S1>&2kd*!d6WtEy$7y3AqasV-Gp*Q zH>Q$3vErGI(x6@F8X$aq>l|Uln+b0973ML{`p<>9+ofLy-6qM^2S+S-gF|7hR;&F1jG9-eaYBA#rHBS|@$ zkqgR@a?=-&mlUHv#WiwJDOZeNiY!6(1s<2?j~@_tE(dj7yWTvz3RKpGA#&BIF9 zb*rUBRo$~$z8SWWzG;7#hxD^kPAWT=m9%A?iHTcfc4oAd%n2+vuNuF$Fe5HgHy)P} zxB1pl@lnja8FmK@ERpSz|MEYR{ev1PB_fF#ftww{_D6bZ&9w2NeBAM^`(MhQl9G;>PX8NypOLzJT;*Pe`YzfsxIRd>i4 z&LXRfGNHzf&aKfnz^O%Se@^^LTU<%l-0Ywl3sY`!o+)-kdOmXvlPlHOk^RFw;Ix!8 zxd9J5~%@dmRb3s91Of)!QXE%M!S2AFyA(&|B>oqF&Br-&-c zocY)Ii9-+V8AhY)KE5nC?-w?2W>nw8zphu={ruU%OcvB)iZ2~Bqt6D#M?9N~&s0!N z6d{oC#{)Zn^EfK!F=BN?@yz1*DgDshbM0vyWLxXM{WEMY-pDLoGNa=0cC#a0^7flA zE{yCQx%ui-%kSV#J&xRN@pwDS@Z+L=WByMxPTKif+PS~Wr#-N`{FJ_3K21GCD|pJB z1W`HdEv?`&`Th3n)oSxp|0nWkBWv3Yo=lJzsh->%6Fehq$28|g3fkH4$&0jUZd3Ag za&Ju5u9IP@PX;MFNCS2PID=WU4HyQ!Fw+*@(scpG^GVU5x6hzvkSGEkZp|QmFl!Jh zyo)bvHqHH>3#H#Xf##AZ_J8|-$i8yMHyc<;%(;3pf{IBmtY5mjbU~0r>&!rMQ<-H_ zDUk?0bo{l#h>&T>xt3{5k?A0%)`iX(CJH+dyo*vV#E&xf6DK+mI2YY0r=#zt)!g^e z16-76!xeg3O;>aF@-UJPk`7R9TkPmgLqOB{1p%m7Jg?Sxwiv;)q+1Vii8swjrw@C*8gp1tw0K3Iz77QI;8k`vv3kyM^&H(KNY`z> zvs1j{#$xORUHg=t-6-hUWkh3}z3Ojxt=Mkff9hMR_~#m`PaVxqv!O$e?S^KSPUb<@qM-H(QmySL~B}KuVQLzotqkOYEOb=P^jq35wg*q5Xy=jL6 ztrGcZ$CFzX?gr8PaAUEgv?)1Idy^-y<-Lgr-gQWVu%rF;NfSJR521i(?pnw@gMP8eI63z zLBXl~q(YRUpdk6Uh@dMsiaQcevtQd=@WnHaakREPB&(1h>!w#8Zg;6-02Q@Y~CA zp76QaTTJ?pYTb}*2!@~2S2#=$pYHn#7Dm-|9L+0e$Hm@5h|qMDfri{NIaA= zb5I@+=}>7#V%4&pTyu@hcc2eBB`3L?K1Zd2$%xC&71HY;7K7^bgaL~?5XU3~gj zR5#!zE)c#rw~gRm^1ukamim=!n(DsWH9dCDzi%sBT!IfY~{C2{=+mEr;IG)_}0Uy&%hl&0tP*T^r>|P07R&V~vHvr3WYJ%m`Tu28jRW2R?JAxJJn*Uc193h6s36Hd3I0(36z(~;k_~^{ za|X#kFkcKL@&oIH`KtVZ#V7F&!p}TP%cx3Ep@(47^3w=5a=WdH8#&`qu|UW)o?p{! zu|Hf;ogP(|Q<1B1QRvBT%O2<{Vo4uzJ!09hIUmF<1w*zOK=n}7fs;Y}9w7iL%+jDp z{4>2%&hql=rawZO9<@q2)Dy{1o4&#*V`#xxuH5HNne5pRx?N$dr-Q$7@pO!G`-Aw3 z*u1xERCOiNxvXF)?k}vHz2clv?q5+Jf-6s`P#WhcXJquU$FbEgnQ~T1RANF!VhRnP zP>GL>V$odM`XAtL-H`G2iFo-BOi=a=cbtsJpOsLeB$dEvlyrUcERU6tg6pO0N5zYI z{n-7hbDM5th&_L&m+HIWk;2PuXmFp4I$n&1wDAf^6;bh#QC4`BMPgJXqJjl@!9qnM zw#6b*WmcZ3D)-M5wzTp1v!i;EbZd%#qMi(-U4KG1%qj*USaKT#*JB{O(NZokJIajG zjR?>m{<$MZu40c=GTTZIxNI)L>19x`1q}@u7fWCA3o=zj6+7xS~7Q)oBoGh&* zRq1X(JN-w-vzb+&e>`!!3_xew2i}hxjr_jX>+d<@sE2>*zWi2u`k3dF*qZ~p$G)=5 zEq#4Bz~<1IiFn4%AD+f%P6qDc#~U{Yfb=GMBq0y@<%aS3pgq8&c;iF;;HB)rlu_c5)zlZ-^M; zlZ!%|n^y)~Bjc{s%f3$?JLLFbq$awkN#l_D*@100M`1S?Wwe<0AxFg|9^Nc#B%dZ{ zc$=CUkh3T?vRn_Vi;tP$ylOYT+G_OqJz-KO)Sdpv+ok-Nuc<}hlba)JFMzj9*uF@6E#0bP96W6|OJI>BbF%C>`4o1RtmZPp5&w)JNoJuEE9jl)BP&w{&tk-j^H{UfRA#Y9a1A~v> zE@;1PzZ?B?IB;8Z;i~fu4e96UcaJ}QTU#-we0;L+X)X zQFYr6YGd3Vg&TJ?V)yO1_)4Pc3J8)eH0(Db6fPnZ^R6PR-NPmBD?OwfK7va=dcUfy zeh3mmZVXOH9j47)pTq8n&enpz}bWCurkMJYW&~TzXp0y7ZhKRVRvh zeWVhJ6qX{R%E|+xSi2J{)(vI#D5EM8p|7#!^>NkEGgPdz8;?qW?JaFCH)QU}Sr>5K zS^io5eeT4**$DIsEdPW<elowY`FZ0?YESa*NJ!%n zR#E#S1>&(5!1DM<=t&J2knvTa+{St7u+ZRfy_dhIW!@geyupT^k^6%+mOJN>YwnJ@ zNsbwK?Wvi~NR|f}Fp|_QmL6_|K%G3uO}^@C#5~`?!3JcFZX|i+?w;KB2C4T)em0Pt z)a}XLa5Zo&Nv93z*i$`eX;b62r&_nC`s#|2+k5u!>6o-^*88eiLF7u}qEjlyCnP3s z4rP;_85xJ(p4g*4vZ}j-Rb?HGj%4h+mKZ;cO++LJPAa5UX>3?6O^44$^7%m#h6i@v zj8ZA*0g~tYN*~qB;^^rL<+`S!MbSs^?^NQFJAPZ;44gwSMuK|(c;Kp_etLj?J)`QK znBsIrI%eN2`#%jBoGs_?SlH%7&?y0zvG!1j$9m&R1g6-6bxl-Ntz9{d+t#mC!M>@b zmbQ<)0`67(D!Yn!6=p&uIf@0-*-?B_LJ2%ju*V1X6gbOkS4R0b@Z_~}O zu|KGD!=`zMZ;^Rw(DS~LNRL7{%)|O~W>j{hu&c@-zKx*bWx?BSGNZf;ye8>-a@;fU zog=+K!&}t+VP$@6ltHw8-N|bo?2TK;cAI-lI&Ds)Rpk^^oAW1qiVrYixHWf4THEAko@`s5?5qmMI@ljQ_ zUY6$Uf_3XTW5!jIw<0^PqmP zg?W5E#6FnEAuA-nL73%zF!{f-NzhfC`tVn@I$iYyMgVH?!j zhR4Wz#1l$s88IlX2Z&;n*cklA!B-*438Q<qhhNs8oWaPw|=eoSW zmFDOTR&2?kmW5SQ?>j%YHnrm)tG4KSS-2>SZXbN@WnN51gz)lFnop=iCjGPZ!K@QX=Zu2qe?eFM z!o2If1Lnk`vGBYQ?7At~72%o~jM?|NYXJDq+qaJX$A5N>>lUQP&R@86k;N_7_=m3X zv99rJT+s)v@ec$$VL5ibM=XzaZ5FL-ug48p`h9;<*HFacK^VbSv`r0SVNh!|JH={N zqFNH+U|y6N-`W;=d2&Nv_m%d@4&6}B$Jp3;L!%nq;m^9VGv($tGSZdp8~UC`b`0f= zo`11s&Ag%K=U>Fe@|-iyIGdd*J2PD~{_c$B=y>$_8g=w|w65HI0(-+GeM(A+Sx-(G z@EhPy#XZLObL99zMvbZ#{*<{wzHBk5a-0KQ4$wNIozgyV1L0UuXNlo!6Z*inBS#}A z)H#}2nqQkgB0?gMumqh(!y6_tIBKNdlysIn5+G>zU+bS>=JLo)%L!r6lH15RAr>Bn z7TMAug&ywq6v|3EcTYNg%#KiP`ileZuO1Z4Ac%6T)M7fG+JJ;uQ>PB$0>1&`9BU06vAO;Q!=-27Fn;Qv6jG zXu5-(dpYT7!YIhFgLpe=mHnwcRH=6}rDAvUs)&V6>8G2;DbpewzbRKs+QiKj-%Z7z z@45Y>Pj{4c)@CN1;=fw_YQ}=LZzI)*-d+(m&+xeNZPO9SM0o!Fsz=ETB}5_ zM1qEz#m+0B#ul}+rDHfYB@;y9K@I*nh_MaFI-4ER_4a|nT1FJq%?>?RW78MThYAej z&gS`jPydlp8^x5|5}tprKJ?41Z}vMlXP_OEk!yJ7`CT;|B8ibc4sg@L2sks_N5W9% z2KmhaSso%Kn@mz^vauN@M=eX`*WztIqb1@(~ZqttyER(T5TJ zOff!Jmdfy$*>ULCSd~6fvU{TtFi-p$9FmIkW!%H#R$B1Vur*7GCH$g0-r2k0m5Z;AOwgILzA`J+lk!yW4EqULew{(i z!75Lc2ch3t_^#^|rEm{1NL@NJ1a@isX3}R8LsVl(2u+>t0SpVm_7;k-6lo#CBBDZw z1Zl1hVjyanHoAoPXx)l~krDX^ADxU${%z&S=77^p(H-X#g6@|stWUlY3FU%_!KyDO z|6UbQ6dJapUI$=jAaw+C!16;AEuJkNSN9dZ^SCOX8Xr{)?id(yzRTEcZuPAno)T#N z_S4n(%MR7td}vTseDi7nJ!f6wwqA+4+j^TgVS&l9*9-Z>zKe_&uzZV+_`|WT-}^2q zc38yIQ*6F{1f-oo?OACpc@_A~e_dv|Ady9m zoVe-P^1|~yZ+hhBqZw<@dVE~erhdBX$Z)3rx{7Pq!7L*}@6>ra9LlH?%=P2aXbv{f=-om9fv5S5O8CrxSPtP}RE z^Z7~gICs;A2WZB?TFy2!b!(i+NlYp05|D4IMqfu?PAKY#A@$!uW1#xIm z20HDZ>MlKbyIT8P<*C+w)HCewF>;t@>7SnHm)E&Babb|+%?_~(3)h|CVkR%_StJ93b!FA@?wZyQ_5al8_smC4LV|wxetK$|NtM zv>V)_Tw-oDxJB5adWz*5-m$gwy~v*2k@=pm>jHPAAj^sHo%E@8tqT6s`^Qci#5mwj z+qa6%yidZU`2K_B4^d|O?B$W0R~p2cl|-4vDJ_*NyW&?;Ywov3IN;B8MLOV4`iolp zMc9!gk@h)}_FZCdvXgJa!EFxs);%q_R%&ac<%LLLsxV0qWfmIMWY)ZL!op`hMYX`w zf+UG#wu3km8K{K=Qsd=L%qm+SH&AfGyOPDzIz|#e7YWp#)a6Vi@DSU2=>{!;VtA6? zF=30J4;;hFgh4j_sNNhM@Cv6ELgn{_g%^lHh1h0}0e>;9Yg__zglFl}w$=+>#~=I( z3})If6>j9}$k^36W9k=GcU;81O5Rl)wo19Pw@%-zkcuxrqV@>KW$0+tjufaLDV{sB zj{at5)GyB)eyS`#2Z>g>-xR)XrG2+Pg|Ypcqt=B?9@GyGlB8zA9GoF8S*H4Cwb_?| zdnrn@x2gGE((@jlRsx~zrAfY--~p`?@-Nh3K;bx6Zz<73JBn@^hqZs!;KG zj%`J8)F~sY^spkuNEF&{AHP8&!RN?621dK39t#L`*PeCxdeQjWlUe-Qk(v+UsGJJ3 z9B!kq8O{jJ&z?2Ki`<)<|GGIpx0&ITpPyz*^B2kdBE!q9LY178BsNZXZ2Gm=UvEf%@<8`z&TiMF zG?&!D?F+6?RD6FtAxp5p!aZu-EV4gr;iiZ;rqRs~ww~E3-<3SxS7!WpK}oqt?y)iZ z$KHDR8}SQ!WTJ8V*yHx&LszeKK6Y>)KEAqSU-G@C62ldV4-(%TWF?QPVl=N_(CS03 zzjjZk>h^xI)VY7(^!d&n&O_csUJ-KG#j@FRYsTNRNo|GnoH6&M{>K{$_VB4{ED{+ZQ+n zX&`15+y?`!f*_LPBrvn(VYOyZMU5|qfi~0>(Ocv0;K~6-$D-9lp%)K1CG=UusS@&( zDPoS;VmnmQcVx<{dQQM>)deg4Lrx?VPS^#Mr|L9yCLUOfF4EA?9^$*W((r5=pfY{hhW8ZlZ01+oPbxI(5uWj$2zZ$gZwB$aS+q#B+MD*1U( z)Q>VP8PGdN#T4n3tjY`Kc@}Ep#dSFt3;lY@hHJ>sTsF@Yr z{5i+I)q7>u4-Za2N8YsaMA(;%;K-qhFTGt3PB)xR__|KyINOk3Ui$gmH`ELoG|I#Kr1+Gq@*T(9AqJVr-n4{_|#w4!rI-t^9iuKQ1 z9CqvDG5b>cN@AuX{&8=V2?gaxuwy2G%F@w}(E(3wYL`1LB%u##lPc1*30Hu;Lu8W) z6@p7iQ0KIyt#bAnF^Ih69V(Rx6aL{DbuKTZu6C!9q$dV=K-z%xi5#Jz$PP~V=>fbE z8jKSV+W{?THr0^>j2Wp=1%^U0NVX9SAWKB1{i9jdPFn*U3Bq%vGf;H)81a&AngHP> z$R+{P!cC|ogV{BxJ`*aD8CW6-stj+b2>%yp?*Y|R`uF?p^gu#LBnhDhP(%ol03sr4 zXo`x8q8Jc$ho&e>R8&w#5(q__SYUz`8#dIas8~jlP(+HNj3bN_A8%nJMsmYhX2{Ihl#sIRRt;61|iAHAlPN;#yj90 zaJN~wak#s@&2w>+XTkWrX!ZNA?ExoOPrNW%@R>TA@r`a&`}yR|*MEQ8^F8CA@BjR~ z6_UObs)wkgE2)Jp4gX~6v(~20d{SfNAD9(8K3|`#jmVF!n_iRdb4S+G2~V81s0ndPC>3A zkdeiw2yj3x?zvJCZmQ?LWVI+GV1wdrUJLDT}iqKpetY3wVJTLjUNPwFO$jO%p8 zkSXmRKX(B-gC7Ny-=6>v2%*pbBrhWfzI5Wv;S(^FmEt?%j922@T|3r5rQQZhS{bNg zjpeCfgzhmFI{a%4X#!b(_YhmC7(VU=4P7CpAU8pvE{?VevDH*V-QHKA7#0Px3DKyr zf>#(41vJ!f-EJ~0$uJD=QW2R2u=XG5yccEpvK?77=>xkcEC=V=P%>XpFUhCp9ix76 zH}u%%P};xQDR5n zdrkp`D>I6*PGVZ-3+gkr5qLEwJyQmfQy@ztOi>dCu!MNE*}%MSKj0vF-AFkB705IM z3eCo~C+l=EB;3EtIxTgD9DDThK&U+=*kkp3#@QG2>DDugc`c8bcvcH)Mx`N;=}u2z7YO~`jFY_A{ELUHN5Bv(^ zvr5ZE6>U_}>5{Zv1p}*ZJapv7|Lo)ulVkGL*RVKKH$jOzn{~n!d4Xt&8bq4{cvs@Z zI`A6OF}<6yRQu0}ohv*7dKTmADj_^~0*Yy7NB~Z!HZ8Ljt`uVG7UR^R$UF_?;``3L z1F7|pBoF!TkYx{-)5=~DU;bB-eBMhC(19ZjLV&Wk61BvaM761kfoYwL`4`&U*AanV z=UL3%sT>hdiHUqnm4lNBQXMdGgPsP22#=-mV^S;_5CFtbfayU1z-*I{Da;`w3OQd2 z=s01nR!+@i5gm9zK09N0tX}yuw#g0JDLh*VjY8wXcc5Ui^_wjX`VHYnTP*2 z^5Aft)Z0-0c~{;AL!bYmL**+BE*a4GK5`414jkVo~8ONuy7{u!$mA zcoG>n0O8q;lQSh^8^o8H5)aiSLke^4uBKBEjLw1${a z#8sxoNjqd_Ab2!}K|r`RAaDd)CTf$1ZSC_hdIBQEO^t@55%?7gtufewW^Dd^)%^w) z^ff#;{t4lsKkXhX^TT3W4gcAXBJH9^HY?v&$loDMLZb zJe-1zI56QXGMNnD3T2=|iD}rV!6U;8jrJRS^BN2m}gFo`okf zvv68DnaIPQpMXfQFAh}-&&CwOlPkq6{J;MF|8va7=c&}JP%|?VRd_XZ|KO1=p@c0a zZFN(_&3FgH=aL=L~NAo_Sqjhb*g7z`kE^&@|Ree&P< ztJoOrl|xn*x2{Cq9FBx7u1S6ycKxD4cx9b!MYXN3#`i9GIR15OXJ=LCl^O{sm6w^o z6SlP#cESBaCBvW0d4r*^j)>v)p-tb&;Gr7C5-TM!noBa7yGW_bj+iIaC zHhIV@p;J_wWu;t^Wks+`6;(TQI=nEswpLUFzcQ4O&^;~_ExUw;ljhI z!D^9Obl2qCM%D)E4eAZW%?*U47Y?1gPWbF3~f$1Ts7if%M3UQx7l#HA=;c;BPJfAQ517E*&a9CI)B*c>8|H!J$NRKcmOO+|6V}{znXaV#U6n}F*gL50rMDY?z zDtjWR+umL{YurVPp<7NMe^?T`hoCd`3^IsCJj z*t>TC#tDNCJEdi1Er6ln-H;M6>u#}S`XV6iw^+jl-|pd!Scsdl&qrr!#3R^ z6K;+Eul4)?ttkr-Ubj*4HtUj}2gTN^0inv0ANQMFYv$zSv57#Th%lj8SEh)Cpt~_W z*fJzkmNHp@>a>b;g{bCTzJwfJOE;kr_^jFN1xOyzHRvMNJh27~_bGUoEE#_jil_QR z3#&o5p`aS}Lk&j0m%}h!gIiJA4df!)>5b4nF9Ej*;6}hN;;UxB2y8PIHx)PY@Kl(r zLF1&fj*Y)nlmEi_g#U}rzo6X@l*Ufy&eMXzAjrakA|0sP$pJd$D$@|h(D^XQlKZLO zVeL*c0j67Ha%R9dVjQZ#I7*-f-~{6*HWe5K{W;#`H2K(64H3qWK|>a6mV995pS4tq z^XcMxP?g+GV313sx`|l~mQYBQ*E{rZ>Eb7J@qJ7z63-99C}jK~XxT9L<`JMy^MW)Q z$YTix!AZ@}Jdi7+^Xna;;cD99R;v6D(BD7*-}?POkH6;y6i)7;NspUS${%+ffhf4N zBbd17%6C_XpWUO_Db$K5C!AV!vC)_f?-7V)LC|s8NBjJvMs{rrK#vmYwDop^R@xC&wYl#bnr^Q-=<=CDz#r#nxq%H0$ZP| zO%gceN`s;gUbUg$2P=o-`=+0*r#Ig(qhN2Re_(qV1+tgWG&Dm$Q$KUW^bR?7PUcLO ze%8R85l1N6lQj!UD&SR_4w(*F40*~8`dQtx20SzU=-5X09%ZWOh+~f(0ptDEcxKL| z7|60YK>Rag&17}237EZ&(lQZR8c_0wE zqp-}ZeB1fG*<*o>dl*)ztP-NV-(^DDHOVeiviI;N_E*^b{l95uqjzC>>e2iMigR6c7Fdg9HfLf@WAQccbX22FF zAhQgud;t+#9ZRrHNOw%HvV~`(L5ERAD&_@9Aea!?hqcpGYzG3;M~pALFpL5PzrVeGirJj z#x}&xRGVTJ10hmnY>xsG|J>d~3Cp&Z51ftZJ&w9;I590-nsagnm(=fWUWOx2c2U)= zibK!seb_kLxU`@!+WoV6W@yTo#7PAK$pw~zFywA?<{NWBm;>|t&WM)3Pwk)S?=;z* zH-1%&elB3_9W5mHdB+bF{bnn=Uo3HspUx@u$e#D+aO$i0*t7#KplrbQ=?gu#%KGs6g7w^~19=w{J znX_aXdc4u)hy3z@qi}V{GDnLgr{n48KyRUWj>l;liCz_pX(*uQA{Y5I!0x$40uDg< zvHCT5=NRjmTf&dpcC@}Vw#aP-GC!dBv*$GrqsLKDUl4Ex>tpJZu@bmy<~;6*iN|5o3xB!lD2#GFGmz%yI^u)ZfG6 z@m|y?^s_eWf#@&x4bO6g>;A&3@Th2}G*WwKAB{FzHBq@}vs;^=PxbcCR9wcWS+}Nb zrg?{9aNESUz2hcqA3{$2w)Gb0Ps5tfQvwaHg8uXPBQI-zJ7ckj%zyXtr;(%o40jvh z&iuGD&Cnm;MY)qy`)0~dH-D=XOW!sJW)CaFM8w~6t#OPRf9rtHGtIv~_9bt1NKyp1 zO^&K`twSd4<5csuS@#HnS$;-k1E1-gPCx$;0C2M@}L z!Fxe35ctW0m0%Cll!3TW!#4a1P!!DU0F9>-qc~%|oBT(@E$^^NAS!8;(giv0X&^2X z6`)O_;nl8_iGnC}8jxBKR6hVwDG(v@XlNm|%)?CguNH2%g+wuJb}HYU+| zPdyRs@loknKEpxDZ`hjv- z5OAKm+#JAsIe&qn?+iEv04D1WopEA`E8w#LXa-T35wfeOQuNlo%mz7V>)a2PZ^}~iGhlY3k)}zs-n4~eE>AK!_CA-wDR$i^ zv5mdYDHgqHYagt;wrv2$=|5rYOdY(^r)c!qgr@D;w(;emGYK=gq^}Ph?~A=#wW$~z zfX~dH_PE$~to{BdI#2w9Y*|ulne{;PCB2eqE3uZ0jo2_)zI(i9Tg0r^Ehp+u+QK+{ z$5yfp8_Qw%-eW6i>7Vu(hEcfU#$45>zS!pbrNvVN^Y3$RPlW-zg;78#AYf&#|MeOR zcV2ddi^tv{hSz|dq3HL6Ne4D=qu9s|qr0u28s-{P7sBgAGs-AR2$=jqlc8TjOn1~g zB8-BcNB+-$hT%QLoiN&dG-wR!Jp>=e?nl~V+QXC=bQpiZs@$E?zDKr);OD4$NY&eI zqj&E?A1O1)sh*iL$lDcvcR!+jvC!I*x5Fji;u%!EXLd=9k^Gw3l1C7vIpR<=N4=-V zMkYnf!UO1jIsr zz!z4Z(8hjurq^`-NQnK?Ii*Yh%=Hee{jPI)g4rkCZZo%NXOvM31{4v5RSI-H4YZO> zWM)hk&H6nYO$}E#rOXuom+KXm2vG`ga7Wxl)IqbXfCjir`7+Hfl@F@amfcgp1G3Uv zd;#<+vrmCo;HfUqq>VoXJh~%z%`?Lg2QV$F>e_c#K)tpET6{9TDx!u3I(rUDW4dG} zo0GmJftv#8hqDZrgxVp1&cvUAE7d~MQJ6q{O%Vg`Bou&!NCuD#nV73V90~5g7YMMz zxqJz}9yVSBr%+DN1KYa-rxD29Q1D8inA1rtQ6zz9shumo1~~6uNOA?~P?}q9`K~m> z-+jY&!-hu&^<~31gJFMp^nw-NX7wc-ex7&JVBE3$X>G(OXPGIU5Wk zS-o~UN||b$kouvn>J5Vn(H)*wLmT)pSLWu=%~?8wS2HfA2Mntx`8^i9b^La@Pe24{ zbjj)}%n*4%_ZC8NA%i0Hu@+D$RDaz#btVpX99b(?13ErS$#35+2@cXK@ETd2qJu$6 z*VsQ%_Y~(Q*f3NBY80=PQ)c^KQ4M^EJv$fmiB%iP#U#0@DrH*)7(~FQXd~<%)7Loc zb)NbyY>V(0=KLT0MyZU#IypSMv4#wqXfj+sj!_<`Mwz%=N&=3-li?)@?*jt+psYr9 z8X)~(Rq-G5gdf<`Q@{XYB*(#G2+S4D+A)JS*u`Pth6z-U13L%kx8K3&v|!%GenI0~ zgH(aBUEPDNw9qljH-Kv~%RGoiV`t&j#@fM~AiAaiyNQ|tpk$c4jiLky3Pn?c3}l{A z=N#0-cF^%fS&#;PjH+PraB^5s@jwu@v~Nwv z`zhk2arXT*zNvE>_%$SvTyBtU*yn(zx;9f`+RuRAn!A2DJ^5M3WrN}5t{?6`GOYRj zd~nB7LMj<&*yeer)pM8msxkrwdOzzbF!}79L+94A3m8ZjhSzghR~r83EDj zL30P!j2D*6o1`?3U|td_jT19f;xpFv_o<2ddE(RZO4TIP9<^64l!mLqa#Z$}RB6EB zlvfgCDV~$ zg~yF`X_6)CY$*+g{rWLpcQ@1Jn<2SIHCt*4QL_*slg@~?5Eg0ocW;&zl0$41R+>M^ z3t95biY9u=H1XRkXPb&9R>fuly5Dq9~i>a0?BOSb8z6%1Dks_ju(( zRe+3gFr#PG@yCXe>M_J8)?<3Pga9fv8>!R}g&(n-2k>AM1@F;)4%Dk)<#p z0wIlsq}wEJ8Mue_0y(C!0HGOiAg%ze{9b?8S6A^JkGMspMY8Xi>(Kpx>NSMLU~ z7;@xq=$ie&IgDVe*;de1kRt;SE`Z`XA>adZ`s3DMz4cYNXet*FwCxySRt-4WG2rk_ z#BPM<|FjX&_S9&o7S)W4UPjaB4ehpnUhVut;PWqwC#`FK@+Q#6O{`kGvZnB$r^=50K$Ix+woE41%Zg*%Rr=>`Gfib%k(DDg!XrpfV=ln9wdK7Hc6 zhdjh71qSaieT`%x1LFgSZ!{_YiFboaOb0)aD|EvVDGwC_7}l{BI!7*?T-ZHMO+x2W zLJ_kR;6vsic|PDx6zizqecHt`rRGv|$pr9U$#F~qLN^8Pl|3BI%)>4S&xvEAngDyu zs|Y?utVD6>qy>t`6< zx!L7u?dtDthULNNx{nQ;H$}X;Ui_Og|*)-_fg}&kYwLg<`;fq(peqt7xT=Rex&sWf5*{4Uqh*u7)L@2 z14Wa`N%L_nu+qw7!vY1wQ!t`4y+$2PSJOduask9>PD_NCj+0$uLwb z2deP5q{{o4p6d?K`uLAJ2CSE0opflzDd-vsfbs`=-g=KG9q{hL-0<*8WrN1#q(l%G z#sq^;bbNya9`jaW$C{DqCJqF3t;QmTVM4cqgmH^O9)59R0SKD|ta!@W;`3i~fBu1^ zbiTQ~+is-Z-Ta%~sQovt*{J;&#G>pYarNLytyKlt-0-#4X=ka`5fQN{#q$}}vZm_C z=#<;Fw>cC3He?LF^F3fy9^E)=?w>`;I!X+>V&UKaRE*|}K@z7R^6;&MRf}87$P!62 zu?$_cHgh`?vBZ^g_soRM0uq-dfJuEo(NZzKa%qE{{q~h}v$MS{d+7%6Fj$pV0xsK) z53p#!tlzzeIw8TDXNu)`0SJ8GN;HYb^p3NO;$bxz8yPdkM@pXT0sQCWNW2a0dML!_ zh;f$|WMbHM?CWxH=%`QG0RXv!$WF;G(I%Tf1?eI>CHs^c-;4-hK~J2tQJ^Ni_3o9- zq_V>oQ)pA3;RP^*rqKZ4OvCC7>ybBu>`heP`u}`2f-%kaTI`btnP7lbH=F{zJVpb* zUNG;!h|?gm0^gCSrCrdqPsCzBvG*mu46#}uZDKHd0fk@~IcPAa26p*>XyIcta{hV% zMu$YLhW>vCCLo#t;a+3)xfE2P;S4Mc4W{Qmf>=rj2(ql@jDqGUz{tCRGH!r4Ft}_d zTv@Cx42A$7op-23T#bi=wrn0H?!!^~)o6?M+M(GGv}KF31jqF8`yhtL4jyD(U9B(L zub)0K&HxWVh>z4ZTk&OPHY}j7raQ)Uw@id$gMLro2%Hb$LfDeVYx|D6O2`?qmR-SUlytTmv|eu~p@ zplG7tz*W+)`z`cFbJzSYcUE{1;ppRL{0VL4&r-|y6WYheA6Om#0_Uub`g3I-)4gWT zgumR+?NY8wYRE2Iu-t)On9++*{@ySx0Rl~*Th;`aO>KnFZJQo z^{yTFEM%5ggfJvv9x7qHiDnY8&l(?irWVcI>(Pjfj>%4n5oa#}}%1!h_MSAGgFOU8t$0 zU3<23&DYL5w6A|0*>1Qr_)mrZ_8ZH7y#r_c)!J2{zth6MW91s}UKhV|RN?W@P|=1X z+iwIkfeO*BIArO{3qNf?_{;jpUEjA~`ub}1)qgJaw{|U!_<%mhl3x}IS!3nI$Zk;w;aiErLJ`{B9KYy?(gh+f?GDka} zwY}amE}bXdongqLjndtF#!1KZj57=gkLjg2rE)GD=1%5(wG!4@TI)oT7ui<9TbWzS zpD)X>z`OXDtgG3&a{KIs=QtC;4Ta8`T`YX3ttC&}7@$ty8~z|}^nqQMz=EPJ#_Cr* zDTc*F;)j2+3FW9jzYGm8dF%fsfJ8UWVfr2(u_8}nCZE4h(x;@6NqEl%o`IH0I$`%& ze(-W(U4&Vj&1uY~qUv!j z4yP>pEDCI&TQ-^)OAm5P7NXB4o5K7Tlm@|M;RqAytf{huA=^KO=Io-srhdex#xPax zZtL{SASc7r7;GS8qJkq9cZTL<4B38%xiqFf=znO$@izS>^-ti4w1LW*a=UVS02~Xm zbjKRUJ3Tfq>Bf}L$Qza0WeFd%rk*E^zvvXQFBt9B=0r5FT}#;{+wNX@H6mI)H?p## z%$mVZZTltkMLH3U!IOpjxZv!pE%E-GaUy=qTN6sQqNNh8!zoVhUnw8s1J9fE%9PFD zQ1iu#Xfz(;+ij#g)N5^ne+xfG-bC3Ix!)t<(aFK@(_}Fn;VqHP;iqRR%*Ig`oZ7c$ zQC9EJjyV~pw)762O4}T%-Mz;`qF1Nww#wYSB&c3{jJx@Sk<;+CJ?XE_+m2K|@B5h5 z3x1Zj4`&>{w`kuS&jZEjcHY~DcjVm*{nMRxVw8GimwN+<2H@i6_Ssbh zy?;%noft3;7j6)=d(+((XA~SAGTd|j(Qwapc*mS;%Drn$ z56(WdBX9hc-eJ#EMRU2EAv-?(wbSO|%sAS#*2XQg=})GQjcEx_$J{^{l61&ZGq@O`J*Cc=S(P!qQM#jNUqd zZQm{JH9`5A!ccsB$V}CtqQ?ggEjh?8dLXai;N?GSLz(x*&f`pG(+N4T8pZ56+Eb_w z|L0-_lf$!DL@JmdECKB|%5{*&M0Rc{w)nu|37qFd8%Xx82BaKbf$y;M(`A88>2SG@QkyzgqA7C_@B|0mWwgKabPfov)JF!dOQq z506;;1g=Ny_cn1ePe$gH;wv3_O|8C$p|(-qp|(#yClnuEqmLrW_Od$NW{$@tvUEWq+Qi-;8$Xp*^Hib2P?1dhb6-!;tpNi6U6 z?ooT{e5TT#Dq?1jDD7#6kwp{t#@D;sQv;Y+QWw8a8a`s1N)9{-_dgDHm6wL)8@W3f*U?w5ZrwnO$^> zIK&5sLx_BwBUZmF-HMHX51jk2|CdmaFFb^t^EeZFZn|UW8$?KmS;UU8A1CWgHXAYg z^R3CQOE{kVVD@)Ar#*^~qB;aQp+N$zXig<4O zRf6%TJu9y+!D>}GV{L+ZuTnSn=;-y7N5akz&GYzSoFEH`uB!GDmV`2086H=+&3l=c z+^|1l0_WBb-_M*medg4u%Ae*g*tV~4){CO z*PfXhLjfG0_8E+lD0LML&YaGFuP7cJnHFjA8t9H+U4 z$Bm*H$m9t8V)+3$vxz;tyPLFEK7v!AIGK>tcLr9Yu$t9~@J$*6PT;f4m1ub_f+?Q> zwDkayALKBM_&}za$>IxT2)^%H&Zc+O56iny3W=D{A960}hQ85Vh4tnHQXV@hKr>k( z7T0UzTK^yk-=qa~U2WB(8Z8=KT@ldwAT!SRSH+??0!1u6kU(k-9H+cSr+kjwyWawb z3Ri)*2d2Ha%np)5CUTDDqlZZ7%?b(6O(+q$+G(^Ic@-YIo=sesTV@QJ-5>iKrkVTP zy5DoF=+j+i)=vhjH?oaIkMCls+(k_<@=c3A-3~Bq|L&+ComH^ke$KFQ06_UemT&`H z{&n#G8^BTGM<+0O_zuK}9H60BBB|z;xON*SB(;CGN7I3R0^hvSbgd1Uz^9*!gbNtzzqvq z6zKuF4MB~@6L9^}jENSqX`xp%okQ=)`@A#LTF2nYh=Czdc^?U2I(H#O*N&; zyYu!^v*5}MjV3jYKtX5HaeAROLC5#u5#XgwCs4$&RcnAFQHH_Lux8X%v^>8xA{fQe z0FN1zVm{9ak)v!gMW$>a83*7nyja60Qsp5$5>l(rBbrED0L77i1;cM#k>uZV^;xHl z{NJnh-+KOkZ!rASWG6B*8J$e8q|0NZ9%QnSI3^a4|1I`}O4{UC4{ z;>vlu$W%>=n^?5IVAYc#ex4Fp*S7e4E)u{b)X7CukYeZIktxeUp8k48f|n4%PIUe$ z4WUJ>JB`wo|8>DI3gjis=-`Gobi`3<++*LpGc*;qa_sZi^I?St41EHr;k2-RzR3Bi zP{SqAXJhr->9yI2&fZzQ>%Z%lmcw7{{2&v(|M|f4@B_cdG>rkRbvWnxD}p*^bahT- zj#Hs{4rAqz!;)b@;^A^=QIsjJdR2mbfN57#4$^#Yqtd}D)8joa-^aK9xGItAv6^zYeKoB z3aJR@S@c?|M%MHklJu1F=5z=1sNE4zUan%HYj`F|gvzEnkC#(d%bO*?t)go1JVXxH z?ZywI_*lH!%#qY?S4XHJmvmE9mZ#7?6&jq0JYCEr@#=#&1}n2Bsa*90W+EnY^p~HQP@vLhFMa(w`i7)9I7&|smecl;eT4cMiLX1 zN#v~N^C;iU=q7V1@8g>WGp`RnO6+Sn7Q+M z;eWT^sQ7P;*yFq4(5*IfHyirarKUzlioyyQ_X-%>e_psfp@_27WU-A2#l6tYW|0wX zQJ*d}B~|`ehKi)^bcq{bwT-wTv)!{oO_vDBv;c>8^cuB5h+fkbxX6a9LohCTPki8wU_kP0djpN+EfkBg1Nvkn-CUV5YB#{mCT5*&E1zCEtP1fxIGywT4yEid=_^d@+_9q4E ztN1D*->k5FCI7(}S2e(jVoB&L$Zm=%3Hkb3Q35Lp?kt}k=TtP*1UXyEDl`?+004;- zs7$(ZNRWWXr1Qgs$Hh9*NOJ0np_f)eFN%8ZSiLUle8D!==Y>M!W%9V>aX!2-Ih=}D zC~4GeIGJZ|XrQo}!0l)AUcK4bsDCl^T2AC56R}DY;&4o%kp`MXF^ZRmqI>Z{amQFQ zWBB;lsK0_3L9~!CJPhB2{A^h)%MqUUfm`tDnASWpcy>7?#rs%vAlzv zEOXVY<4V;m`Sw!9OX!H$WQsm8XF|o)N@KlUKMA3dP`-+9pP6YP68*CO@Up}xk;HzF zsRNb)??{#vXH%RK`H?q0;r=|Y*Cg^I|WH*9&*^gj99$_l`0_$ z@H}UT$K_-W?}vC?fCI3I5aZ8wnBJ{@1&RoDL*(hI5Q~vna?nNqQG{>gj5(=eyLxQ= zHF#CxsHZZv<=HzHg&_X9eWQwF9Yb)8jQ6LMpOUv-aqRIca>k+o3&-)|MCD#Zo(L|l z z*A4!9-B_#hYVt`sV9?2GQiDiYh{TwnWwZRvaeQYIQd5w3mxx^9vZ8}Ov00nUSqmNZ zvU^?Y!+V__hVoMjGDf}sDYMZXjWea^S!}uFZh3aRsW>Pa>3QsAjKEO7yrLn_ifm%%JcudAdVJ# zXwC1s4|9CVw|3{J&73ou5qg+kZ?pT+=$(2-J>N(7P<3_suKJ9MQPz&B4`%qEPm?6O> zFYQ6p!tevD>FL2c%NR)1$>r;?`mJ!g zM`MI#T;FIxIG)F*&VG6HoaI>=5nvvOjI7A$I}O5~V0`bw7%fXk{KXP?#g7F&lppYWFNgMT`~|zGrc>NOCQVFN4fo!?dPV^GtR9CHDeKPV_g# zMFd9VT;j-|>MmK{1kMJ`k~y1}!$!mEp$nYAp<5v#-|ZcT40czU(Wm&|n-G#Ic@fIoa0t% zvNCvOThy){7F*9vdnP#dX2H9%e-2-X4|HQxY?*lJfX5%V_iWny-tGRb)(y9R`jD{a z%NH7~Z|4au5QdPTF%>f8@xrBnRN)cLR#*IpW)w%%9EX2@9HQyuRy@sHn9}mzc4=#z z2~o3(v{chcKsBo=K6PTt)aF#)fjTk8PrH=ashdwgt4uOAR!)r6wG#@`POi>g`$QX3 zC%tBJppEM!v_CVVsR+=RwLm}~w;Z}!d`BzJ6ys-B6W;^ILRN#NKU7=|4RpxIgvJ?)Df?#p>F;TQ^q8 zYUwVPdjQmXs>5osIeY=&yI>;rCp+WE+tT^K@GkgdJ`#Mw+2L64q`Zpg@sq~^WK!qK zlA5#78Y!C<5-kW0T8bUeSpdwl69RLiyc-U^?!A*8I`cC?awD}M#+fZmVoR!UE!6fp z-S3(|x|_^b@npBPb`TR^n>9-}W5i;V@}2q@1jP?Q_?>>why@m6PaT1nJwtZ}X#6TT zBRut<3@X@&Glo3j9OX{^njuf!4BZTfsnZ@aXphnjx>*K@sJCcegFZAUQyn4B?){(! zo2Y4;xS4V`8b@fHnj?$9Vb@6IXGTEMFK$FCcI{u|O^vzx^^8dPOM)yg$FBFTG+;%- z6O$K->)7Nq6RJ`98ggLfJ?SW$9GG1l_r%Lami<3NRi=Eu~@gn<;2EA|pFfc3HWei^lV24}ZZzT=`25tHLmrY0RvUhvJGVfATOzH`lNmGL4ZUKM;S?p z7l{L5COF~%bD;uhcb-q3kWUbcz1U_I6>#G}O1(&UuQoj+v3#n|Z=X_-YWY;YK7zp2 zPq`reC50>?rw}54Hi>DUawNbNR{-vXp>>Zw0r@7u1r|`9=kr^qq=Sy2@!eqM5(k|# z5MCTUaiPYYqkRgfH{g4ql|ANS--Ptwp@Lp_)+RPZaK{GJF*3B zc(8qMam3^`3oNG{2#9I5X#R(zlU1j`E2*DT)*qGIm%1gxvDEltS5x|%y1^5SBVANuUxC;0uC^TXSUH7{CnEx1X^U*-MW z@h8?>a%Xe>Bp?Yhf9St*<`oD+&lWl&(l{Ymh>P3)tWgpr zar1ccDWD?$BAVdn}zs%=P*@q>dVO&XJehVQI z4RG}l#gVT?61e^xm1vAFiLMtQ!EyA}iX*`lF^|^phb(St(2Aj;6$G2COn$de z=0mq;kO^A(D2tMX)8|b$d|D22km`uQ<(mL<1OO!Hp(W=ziU?Q44-X8aY;;*ev=t@$ zS&kL03NXX9aEm7K%vaDZ(*1y8mt;jDTTfQ6L~p+k_T>*FrC)Da^tk?OHle^m#zh~$Z zAO_3v66GfYxPe9-FO9jDbklXd;7=kaL%7w>*9PkE5IqYavlkU-z@p!e7%ht}6K zB({T=6P4HNpxKSCZ!cNrbM{sinQ%_dt4pw)R7RL>Wx`AHim>`@a-aowRsF1d z?od4cvD8vB3Abk(`9mu2la{5q4{7CEeV$<`Xgk@Ix`)glSo=9ZhdJtMXW*nyk*#a0 zYCxg6J6x+@0`ePq6$Q|S9Ry%((bxEMvg=?5x5{C!d4uFU*jO?+ipZQxU=D)}utt}L z$W?Ez9d4~P9KJkjOYtrdOeS7P-wBJf8 zoM|hM8D%9ecXwD$UmIqddXIB#+fP&TXM7#R>|ewOc|N|Sg^#fnh;P>MJ-kmUt|==| zc$Np9P1ad%xw7zao-j+LJf676Qpu(Bf1YrptzdWOqmtA@T}bu)yn;|0O5y!H|H7r@ zqz9cK(Ba?Sm_(x|G_I|;KGJ$wX|1W&(QN`o-rG7{!P@U;;Y=D4*}1pa{BvR$kveCk zv|wTOn()=M^oA2pu3$o~VUrY~>Wb$<)j$nFWeUh#s8t0XVzSLEn|M)>OfU&H)4*bi zFIWKIPeWhKYgcY*0c0UbfS@=GGr_!Rm)}oZ%q?#pNW1ql_Af&P^~>`r9Wq_>?go#521_dFqX|Yv&0K?? zN(So}IXVp4>}Ap5ACiuW-LQl*Q1`dWC5-?J-zmz3;+AGcPru-^r@E7sdH zfsxgSM)CU@@+dS44(X0U2eA5GG1-=6Y_xO3VVi5(ROGBn1>-dOUYc&+PpWv2z9*fQ zrpKIMow|HU-SOML7VD2kO3PkbT^zn$ zW|1h3Oe_PmX4!D0G%|S@Rae~{sj5~>BBC}HSHd_0W&oNob!PY;L-+weo=r0>K5Yez zvvB3o#kKl{C)Ahoa(uKBM}__k=|1g`L&?QOZ;GVcsb`p#YBgivo4w2Iiy9L zhDF^+jE`}8D~;OEyYyC@DWaF@iz5_B<2k8(jeNlJgE>^yjcgo?RK4y(F zy`<!!~8BYB0+TVHgiyJZ*aCfWkiuNK3Q$4lz-+8#TB<6B5ru*%SuVbM5nkYU6k zdBkyJk5#(k#!>oBebGJoO}G#69`JZYze(Ki8v6M78pmYi1_<nULc#o!eo+WXAGwd85Vp>@eUze=p!!@zqQ%UYeC^|sQtGZ=}9f4I%qS#5veds^DgFVhXb?)b(l53RhtrgFbg@u7Fci7CHs z5ULi89{TR>d*R!_`E_)c^>14LdGRS;I%wFRA;?d5{zGErIQ!4Q=~p*cFR(5?r~2|J zo&Buh?@GH1xbs%o_vqx0lBaFX5989S|Jb}eFzW01CmDZhJ*&0`o{5h<>+XFcx$^0u zkTcIx7z4AhTMF%2$c>}A<4GhkOMn2o=^3=-9Y;wi{GXK$> zRyJh=+^$G?wS~-Qlf$Uw%dY(y2b{vFP8oUR3@SN{?UZ5HpJCU?r%s5$9UvKpBdQyZS2t%Q=6rETI$ z(W@k<`{2x5IlxB6_8=IcsU(7754!JC7+{GaA+uhy$is;E^Z0RBjY?Mc8^VUqhB%2K zYUtk9L0k!&?9aC#1#Imzijp_E;C0F-q8-+s4{E?zvqWu=!Vn<9g@zJ^&TLIZ_+r;6-{c+HHu!iPP^+m`_s@B>eH3+B< z$C((55TaqMYRDg{J!U<2ia2CO_X^ETMFMFL{!#ae?iKmHcOy6sHJ4n1WeXdmVG+a+ z%b=YXyEaHBiWhHu-Ti1HyuyEsGQOl3UPU!XB+;_#Q$*jB&iwrP_SgJunlk92wP0Ok z;)++T57yYO?>v1TOa&^?3C@){s>*ZHloGtk1Yce}^ig#%x2L2gMl~B>u0(gKcI`fw z!#{Y?iFrkd-sF8O63HJs^A7G)L|(TvDweKKFI=orXo;wR-F= zz{b|MB0}p8SKm3Mb^}Ug(2$s5IOZTPzf?@DS-|6Vlb z^Tg-%uU9y@EZULlhWlL{AH6in`}IFBcG9|kJ9Fac#hfp;JGR2i1Tv4MqpIDe?o&Oc z?%lfF<8qFzx4WDpcV9@oE1v*Ao^Dg*lbmBpEm_vsT4n2PiaRGe9+T~!Mv^+;?bcn# z>D*G{t!hin!)118$c*{6)P=e#uC~-%U22}GcerZzLR;^D`O|G>XJTOf+Ze4!-sFy4 z^StGq2S;(;i`=^xs%FD>o@(Z+X7_u?7AoU(y&jnom#TI(t7hPENevjzQg|qfL!@Lr z#zlht*D{Ba2)tpy6ov;&&t=ltp+*UUy${1Odc(wr!r$8Om^CldA|AFwnem}U`+|Gp z1u>n$4~zkw%Tv-84TRpsFB8aQ>V514c7l82SUN|rnpkMT;GCt2Xc7TOz+sErjF{3o z9rfi|>E}F>`5y8?x54Bmg`zzytQ=(dC0nRds^oAE#`7Pt{L(20u?Jmmu`DVorfh6k;ko-Zn6Vi zt$^e1sqce`RM+shETOCQn=PP5NBRNXo%wu@e{}v;6-f{)uDdDd8;a~64hhvf>4hxC zVyI+1n^{;(;^CoL16X(0>*4TkHI5X6q1&-nDI=gI^a9_dpN)Ms+S~NGOVVrK2{MsUTfaUv8I3wD2&} zQUK1qRwYUNaWIX#1grsJkS&d#t+O#n(0ilz#^k1i;2xIb{F%4s|GMaTFZ^N0_(F-} zzUKLzKPOI}Ui<6QqL-Kb8yYYDwzclv-`~l%u4XRki@qM@GW};!`<5%gr=J^y4qL6_ z9SdB4eU3%oy(-Nowki2{D?_cwyDF*7U5ii8Pvst2u~Z&n*}8)Hnf5k6Jz4p~cRlvl zK@+NJ2H_O7$%vhPYu&k+Qg1C6Hva(AB6z?z*TYj&jgIw5)+Bn*(-!)&Epl60ivHMs zE~cu`VzNb)-B)e$m?IOlVjj-ZFtwoGE1Fp!TtAmi(5i{szFEKgJ*-rC{#G|83C_Ts zVDd|3X<7My2CS{wUG+q-PKea3mEsmfnsr7h8~-pZtXhzfFIMTUHIQ{?S0C@^aknU& z^Ofud_Vk=OcjopIx+uolcq&K-`-sBXU>6^#)9SN2L{ei#9+J%Q|atl4vxQ~BRuRC)j>0EqEbc9ap=rxy3>$2?$gky4f%yzgIS=1hbH zDx5jmWnK6XX<_?NC{{Up^%k}fqeUJ!uOYiP%W)U02Ol@Dth;xP6}EuXwhY1uWWm}6 zdFjJqyMS{oLm(aRLe^`n%QUB+P%~cAe7~KQTp|+PIo| z*(CD?dFNU_f8#l1lL=OMD?FRyCY@VeUZC70TahXA*3y!Y!lWtU5ZMATwwPnoSboxO z!MjF~K#Cjb4%w$IKB{M*GWdh*U7{t!cx%x*WE2F|JeGoeQ7y!CiA*~|i>L-uHqZif z$|od=W-Q!tR*O_+hsmU_R_A02Iw@(CWQ?PZGKOwE1+8q%HO{qIy1LJ`U~SBLN($tJ zq^SGqq*rA^kU1ILHuxS!F*DgAGB80hp7Mb3)FlQ;u7j@yLk(pI-$ol^BE#mY4j%$p zU_c3dv!iOW8s#FyxDqKB$FA|SQsn89zc* zi(cFas&#$2+v9ha+i@eW*MB|jr0lPkn82SqeAGMhVshO1yzM78`P8g-W2yISH?m24 zTeI#K`TE-Evl{L;7u(8qK3v9Z^*vC0XIVq;RmbFYL7UBobE>|n7_2#SXW(+d_#+Xv z&5ogKaXj|8EUj6FiK%9^;b=y4v106P0mL0bf_M2w36p4c$?`WivurAsw)LV92T`RJ zlyrL>6Hxgfa=0x!LMMC#(r6SEV<7P#<{>Ct#PT59+sWfjP0bs@94uZ}QXTF3@fb z=KHtC@~lEiFBxC>(s2?z9$~)f-E?QlO~H;|Up>|AaBO;O;9Ls(mw*1s%PMK@)xOpx zKIbKomZtA5H)5XL7EkAE?y&9U9lSTbG3-gyeD9{YC$lxY=|SEd3oR49o617IO}}Je zScnr(rB`#0;L}Y9Az7+_0r|pwJkZi+;8gt(bKJX~=1_V-dV|fx5$QN{Xa0gzDjzH0 zFldCNlt{quNYz-N`-?>As)aN&0af|o?;dFpqspS)fYw3Gk@q(i@?d?+K}f|lVE1$n zU?qUtLCg0yOCA~J#}4Ub4NO3OyLG@r3A}#r5%LLOJ*Cw5a@iy2Pg0RXrD_M@dVYIw z6T1UaF;hW+s3FyK#@`dMk(DoXAAy1};Gt9#!As(%q(YVf{D(kf=-Q28S4WI0pD$*ZZ{G`WR`C>8K_Y$7dFo!Lx;7X)h=<2hN$oDix& z%D@LQa3n_Za=S6~N{1Rw*NHT~#{aXca=T*aW@+ln>g^}iskQW|B4zzwokRb2y>UHn z!8^t6JI`Ku?&#$ze>W|-UUX8!SZ6e>vFA$VcmKpU4o=qoQ~8ufuI<%9^Ub&VUgy?$ z@XsqW=ly;?O8u^c@Y~}^*B@jcj~>x$6&*oz``!y@(N()Y#YhqEeKQeGU7d}2C>`X# z%NG`oz83)4jL=Nzz1EQ#s>;{3JFNUT*!}VHi`tqTi}?gyCATupsOUi9lBr~CxYW@vAz7{JbQc@9uppcml)}m0pN!E*}r-3iJS?ioK z>nV*Q5PBotG$7N?Vmb3bj}O2!vmJ&Upo?RvB*s4VQN{ugU@}EQQ|iVfO`r%-CMj7>1h`OC*a_N9A)5_&LIIox z7a>v+wh;}u0Maow2tJcAL?XN&hKQSVcQG~u1?OxkD)mG~pj;s-i6o>lrE-Irs}d1o z(to`cc%=KN2!jLXGJ_F-*Wv+Z>%y3=t^}V9-bO%FN7&A627|*CozOosd2^df>j=Jx)|4zZ%|G`dVEjS$~xIr$4Bs-`Gwn!f-aMM3w_N)8P}U-HtIq1{j=bdBWC7 zf5D!DLP|V^g*Zph<_9BQbU=F-fjX0Seu0O-vxiTO*alW`K{<)p!M`5TTDeNxD*Oy9 zI7go1Brv_-urvU$IX1*wSTJ=aKf?urJkn|tnnmkl(b&4JLJD%yx%C^|$?%d@zje*5 z59V22LZJTYE;0C|EbYuS6Ttd#me=sMQs3HZdSmlD8b-AU8&NJe2vr-;pXZc4EL|^J z4;KKk4cMZA47CW*8X)WbG1JDvV(~*zahl5s`IiWRlDZA;I>XyQ0@@J;I@Mm=N1>2J zw-~GFS~aQzKiZ1-4e#qlb*4A$T$Qsbxeu0{oeECO5OC%~Qw~ek(4?d29hx=v4rm$y zx-(r&(grE2E1-5tgDUP<+(+LRRDcR=ryo(!8zh2}3h;9tg(|vM7hBy1;og+mZ7NDm zQ@|qc7=Tu$m0l?QN?Me(2o`gRd&Y8PXJ9EoM>%Oz zy0E6J`#K3^Lki*{K(Zw7pQcVaD1hp63h3b?Sp6NiKbC_7;LD⪻)P2+Z%Fm%9<)# zMN!o2aX;^Wm|xr2nuU3>jYW_?3nX8Ua-j%?#3}=fID?NxIiWr@gpaAQ_ye_$m@7Y= zvJy0)j{@TR=Sy&Wd>|Apfv>Wapx0Nxp8pg|)N3U^2>eRnTY+!&nBZdEruaTBU*p6SVWZXt!qrjt7swDGM&j!A^sw|h^+;I|EgR|kjn}HoAR&g zf6TuKvRmc4VKhY@)))$obuGtlLUBpybjVwW5`w(!CR8CT@aS4G-~mO{C z1bCSS^X=@)2*!d`)kxsBLq}EuTssiVV~~_Eit_QWc6INi)LE;Oh~^8@sFf?SKEzi> zr3W|TmKR~o!hBR!-OEWtVg(B**i;e@NO!1~9_S^6q*VMDNX0=F^)pO7*x68lVE9-m zs=TKl-UB!UkEtP7Ys`pJI$`e!l8n`r&IZN4h&^Yew2(%K zTq)HrHOH&{I0d+6`HU_e64G!aL<+imqG~dSh`23An@cqjFrh`Dn@0pOpsh#Jxk4fW zV*CjZ#N!zVn@FWXv&|Ke#L#xK*+MEbz(fRC%>qcuLIfbrNkXWq>S}VPf`pR7li@y)z-bbL$71|{)XCyJ~k`RIV3|2OSBmxSls(LVu0Sr=R3xj(EIPe}MNtURB zDrli%gr%5k(@^JT{*^19I{2RfuRi}bnuJ#zEig)f z@IU;93>^jPP7NCAegzD1`$1(7y7y=xkVO}q0oerP(L>EiIlt-*%Ldj8mxz|@pwuyC zsOb&E=Qwv@pp3S?(6X;3fLW7sz9^T^9TX(U@Afdd@5;q$(5tNrb3=>}8u+KD3pp(uJPa&o1IU0Tf?<&?xg=7u zM-8O9UYH;5UEM66ZMIT9PLcUoeXYD_8>zmes&CB$zp2tpF#IvDyXcYV;lAD@d9LU2DDC5et9|`Fj}LoxPGz3;nDk$M zNj(g80-Esu?o7H|Eib9=v?{A90ei~o`>=eMLN59CM|RIK zwf*_Pu)aaGis<*rKa?!s(Wj5wcKGb?ZTH^@qH)DZXTHL#+ZSBx*AEqFD5~y1&Zi%q zUOw>lHhwDpS&pLeXK+~T!;#i-eXM#e~98&vxCgFcgEZRQC$NQ%;I#h0W5# z_<1u#8!Xv7a3hq8H>r?RMMxvR7;jG50Z{B4Jj`Dhq4Euk09e;j^8q3yWd}nQ>=gjW zEm6LqELMXdF~U#} zrlS2l%wc#KF`R`OE(jRV-U|?c7;PhQd=}1C=E!8ZyV7SehbRW2zM9WGd zu}PA2q5%WYzeFZXPM~3Rvv4O#=8;5@j1Wu`X##_a$`!g3UEtt6A|f@$0G`{3u8zp5 zSaYgCPBbK4GjLBo%K%ASTGB$sva{f9lC+TKa27mF4(RsE7po0)nUVULhmV2}3JN}$ zRv-1rag&`QtB)pt&B@?wcp03Y2#1LA;C^z5Itq3vz)KiS1oITQYVtlEG#k!Ew-Fx+pIS5H@Oo z#W>*K%c7*is24^%W)*Olva*=zk!v;8cpDm7G;t1Pmq5}xrj~OVKpklNEMpm?{tE$k z{R=&ypP9>9b%xW}hvTEY20BJVp6!iVj6D_!!y?*TM=$SUGqkk^tRxI^V4S2}rvQ#5 zWMu_SixN?ug{xu|_$&3$PR|x?hMamh*B+AW!LQmC#=X*T(jl zXQYaBvR<)_e9ms6i^8`-d2cWha-gOODSj|0o~!*}{R6&9>jK;c{>DxyGyskC&V`uI zzj|-&S}5AmAQbPH1*8$^uUgMTxoN_!r^y7lCThh-NfHe~OlG6Ki^=e6e&&9)n6# zp~Gz?gK`Kh61WjyXajF}t`A1fFrB#zKpZed0-q!>NNPlT-gq)Vv*-ZjLAf?=57FKk z#ws)T70_1!VFfMJGcd^b5j6!RuLI-hdg$L_q6G6QEqcY;(ec97IsV^wI*s3)RTZS@ zbS< zPq*nW%QvV$*>Pdpn!Vh)CFPIzy#D|nm~{x(VgCs^4>(ruU4?o-+F-~hin{mrs-yMU zP2%(B?RsP8X6HP@^{rSEAK!6V_|CF=E2y{y#~wv?dE*1?!~{{TwXTPQTcNt410)IYcgR}a^)SB{s%=I+Apk49pj``<2*=+5^rJ@!vyu6w z+q{-<;25`7xbWr|-~#b{Nqsa-n}u2EU@ITSU639Hdb5FTUaR?ma0~-=mkeJx9WCFx z)?)Bq*rvocP;-f3oDqKLV1qTiw)6vO?~lPnBM@!=sesqFv^1Q)rM$h5PD-+LBpU4k4Sha z+!Pi-vax_W3C3bj-REuq=#3US@lhz?B1^z47v?{wu(!}P{WGX)&W}R#KR6V5+*I)` zXsXq`E^#_F&WAMQ)5@y-;&{fxQmaANaH=+9@0p+EwRLwU#^jmbT#J&_PB`rKvhY1! z=F>*C0CPg0uBFh}T>N>wZEdOMshvCj4`N{uk%H=aA=@TLqR~((cwgzwHxAR(f855& z%ue<3UPm>M0;ijbX=a4AQB;PLeaPj=W4|!g!Vv8vmBF!{ZES#TLB|OlofrfDLg>%n z4S2S(Vw8w1+2Opt9|hba#ib0@nXVWG?n8&numJ}Q+z51y+F~B`xB_rPqETsUpo0Y) z4CEzPAk`9r*<+Va^Oc3>0LWh7tfY3!N;$NG1_2|CaE`PPIWRcGk8QHUlA(N^H~(8- z2AFNEBtKb-y-Nmux6m9I-@%wMV0@ zedl4Bvyj_mo7FThzua@lWn0b5$@b-Bb~)MM!<-RXA{0KPQM(CHM=k{$vgLr)4VSDUthQC=OIrk&%ov^(L=?oSdIc`^&n z7I`z*u+OfP7k-^ap}kH^J6|#e+g$bnh&Mj;lzwG|%U;3WjSahaCxci_j_7yKSpfxG; zpK;l6q;?4$*?T%xpg`-d!g(Re=MUIn&@6e2ShOy7I5OVQdEd(A{wE2%=17)OfttfT z*6c`w$XU~T*Y0oK-?G0=Nn(S-le*tD-Cxvs-yuhVTKK}{tk3`t90Pe;5}f#Es`lHW z2A84R_b=VQo&BauN%tlCwr`xjungdGXQp|v;QBj$d+HzO+M$M~`!15i{L_&jPLI^R zyEpWX!woCcANZTg5PWFE$o!JX-3Ln|0ajo8#mc$%jq|?_wZg~9yl3^LOKHY#I5uFK zznJ8f)o!Q~u=+zvWc=3aFE+m%_dWG)1hIR!HOiw%rgYZuM6ZM8mbAs>e?&z)!V;5_ z^}hx%3(AbQtY7oW_)ZBY`G+=c-r%C+q#)(qM$_bwdC0PMH~TNw@mBScJ#*f1lBZ)F zrajAp@4x!C1b6G=UA5IqSQL!M;eV#OHL?Tw7pIJ0SZIX~uBa!5VzclLWfJZ{jNW;g zaMGt;E`|w^UzFzqU+pZls)ts|mgT`~yTEMPFEj4~gJ}tXyUWlthg`{gIv;*;ExbCI zC<9Gw4MCcP;msdgC7gf${0y2GMpNb;F!dJI&aZP>&tegqo&rC5Wr7VM@EKns(UTgZ z<(uzdNrmzA!b9|iy#`cukGJn7d_F{=h&Q3_jFJ$WND{rredAYhA@`L6Q&)%}$xwp0 zAK99%z3ZX&CEoSmOf>#T?xGh0&MpAN#{dR@_(P3K>2RV+vS%S{Oa4b%UTAfw+yd)Jbf>d9kaQsAM70*Ns{o z5N9J*5q5Q93s&XM71$2}0o{6m;0An05NgFXNo0=RjWA9~M&DP-e|FNgm3FU+ES|KN zDbA|r|5~>5v^eODLt>=d?$KG_D>`!fJxPbk241UGHe9Vr*!bYp8}C!xS4`t3L7BZ) z{wdytW|ip++GUrAPe^=b;mW2c9k1ngg{^Nj*mN?~*^@`C6Ed=ugCB?J~9 z0@mO<34@p)prHnuT(RN%dLGzbb_u(B(ejOE6V(u8o8IhMUUgTFpL;K{Xq39ON$WOh zG~-xOh_0ZlL5T0-tXY7KmxTawz=}m+ZE&l951l&nu?+)jhk>8YDK!!p?F6b^8N?Bz z0RRm;$C-{Z4~;j&=zASXkmy=9;0E0~O7?(na$T;t)3MUxQf6k{hxw>%(rNTk`O0~f zq^k0jant3S%RSCPgRtrhB;Y|8tN=gWD zEvSufA5HW4T_?tY?ARz=J0eBxZnMCub8U3_d=(!ISCh|vrS3%3m!%4O^ah$^f@>K& ziTcT_xT$w!sqj2ZEs2@LoHBvjC7hfcldU$EJzHHwl1-wBtmNOOOfDxm0VjM9%*BG_ zi>81B{!CU3gL&!ScZl0i%B-9XtDUZ6*(7@si@vOXho~UA8@YK5t9vH}?ng)+r}Y4^e0Z|sas2s` zobvAHzwf@WaN)^VFvb{qOxKM&QhK2>?AX?nWYqzczr|YinKoQOsIFa<=Jqk$Kk_&x z{xqRLT)w2XQ(~paH78e8{E>cfnT=}F)04D|u!6*g=5E(RCJ4C}kbGAk z)Rt>QD!4}wryi}p0jv-eJe2QSgREeU2$DKJWj7uuno^;}?MD-Aat@`vsqYcWpDdbtdVATj} zEmXp}Xyg-`@Z~_G1Wf6QdURTxI588%?1(xB6LM4(iDuQq_?_uC5DpD_{f3>^b|+Z8 zz^(#Ev&_Dd+VkhIbQT@_8HDz4LM<)@1njCGh>qZ2kK&{>H44LT^I6e+)KC{AfvI=u zCecO>Xh6|y+u_RNS3`a7He9wD9?9=CYwF>Jj8#taihcr!s7@rgH$EVDwKjSy>}S~b5xxvD--6?+b{-199f{c4W8(JxguCqK!Q zKiP28-|s`wx*A^1lV9tuB>MIEuH9GheR*}_6Dz;1ud1KepW9kND|9nbPPX=|wLEp< z${IgtoTr}vtiR?T-qxDDnkybFc^Z-{p4ESDx?mZ;{E4>J`mKjIf*VfCx^yHqJ?05J zc@C{+msAyDCrdR+IN681wS{ig$%x;dzH5~*HCH@qpZrl8)0|w5ezwfw7KBE)VCnBQ zEZF+zrkef!ekp>jooD`N@MkGKG2!{O2LDy_MB5})*Z0KEZF%|M<9ZK&c^i8DUEFnx z?{tHIE>w?tK7Qx=d9l^~o%-3KyOHoW2XoEA)m&mZJ-T|3#%S=d+cEI~dHMIeu`f3# zJ|s>rdv<8fy}ab*?>4>6Q@weuX(;Wk^|Z^lZR4T6#cy#-77t<`kaz2_c8tuHQEN4> z;mdwzoIZF&SHR!ZLCSKAJE~1$a?NPwL7ttpL7%;j@@Ls;{kb~xuJt?O~RbN11WgN;fy{Qu#X?rtlQ|nOx)nO^u?rbl9z9`JLJS_TR$?enq z&*A_8TC_*!&jv?cXaD+;PtjSwcTL=UJ~xcB^3v1xo6mNAo*-S_8QpcI;lwa-Jo|2* z*m*UgyI=9E{?hC3fAlY|Khf7ew*k&LeKS$<984_~imO3?-Rm!26!!fOk>OwWKVGT7 zG~Cz!d{N~1Psflcgo4ofDdR!sUk?AU@3gHuh(BR9x8Cu$7dML+MMht^Rqq(D47|My zR~oKHblyC;C-KtAy&xW-N`K9K!eVINdrs{>(Z8p3)gH&)_ow?e+Wm3K(@VejpP=!e z$vsn=dpCVisDAVn?EIs1@vk$VejSVx-BQH7_dWQd@)urj*r6o!**tDXx}(LHo2DOk zs%0)(b46R(>)pji{%u+N)oyIxNMQBAulK8DBHz`<@j>$+i zHae_kH&pEVD6OvW{PnbE30|8@r<#zmbB(e$qU9UL8X}O9J=P@#P1-S_Ka!HVb%{%{ z_U{UrHS55`(hITt&1OFUBi%KQsgVoBD%7c3(#d??{SR)(KQYPq|X zk6CLOBDMCvaK+!-fR;>H{^Vrkm58e9B+=H>)hDkdt7Da@bI6uOYilB)7S&V&=j)I1 z^;U!nN?0bsq4;l25qfL+F9H(1?>Zn4Jju5|2bhry^Mkft(q!got z96^>MnATVyUZ%XsU)y!EdZ^JaQ(UQS=Qop9C0c-BJUFXMOESgRE?J>!Br!;iT9?l; zZq&>R^w*ZG7A?{qs)Za)f9;{FIm}ZB&|J;++J`JNOZ*2JwW@Qq2kzSvJr1dle_tqW zNMgKVPBHVY4!0$I5N4cLosN`T-JpbCNW$6*{aAfgr-FiE)Qf< z_cSxuHq8hmMXJgNr~-S8r;dk3w4YOj3UglYYKA8w)SHL%#3MB!ud*Q0G%E0>6S2}) z!8_DEahS;vM~4|clE3AMvPR7QWfyh5b@^D*vc(?h%usMoQ04Fh5%{H23m1GFhD!=z zENrt9T@rXx#2ia3W>*J6dQR44+Kk=4CST|}DNs7`U^lc0`#x_<$>p55PTM2P4XNRd z?LRTLKTkot9<%F$?B3FXVeZ8<&$i;9FAD$us{i?%tNt%;;zPPV!Kk&$+x@PUDudm8 zf@-MvuxErJ326b7RY|xDoweg}e%d!`U~PTK+&8MT?2hR_{_^;}o$DjrHPv6dJ=T*{ zl#Fj!a!069Vqti3vYW@6ByYxpa`Ii^%gXL3B|`8yX5BRT5VLt`_H3i{rOC{B3un)s zZ7A$hX`u};3}#`(v`Z9nEWxUBK}*4iVIiL9s(Z55<>-YfF|mil12j!u$=g{CyBhqRWDqZ}n+#(anj zH6$-J{VwA!o@3#jO&0d?wL7INOuBuK8nm$Xd;5vzNo$p|G13>(1UssV@I~i~;_M=1 zxojRr#;G}Kj!3BjQ>^FW@_hod{)XDDrO;9Sq+Z;0XXt;eqe^rru`?p%N~qrDOWi#iZT+Vm^geag z1o;QtD(*Xe~ z@ZM&-Y<1;M86NjDqljHuxKEQwW00zjqUGyr{{8}!zbF3GzvGkHB>CZ&&V;@I!mIvb zhe&osol(eC>9Xg>pV;*gok0;(w{!O=oLV`ri~eL$L{h+S4td1NjkdxJH{L%dl$<=< zZvr3ad-R{Db~{G2M5g2C4v|6g3GuXrP@3ZY)Yn~I`zH#%?CLZHgO#n6 z%*GqNZSmEq`Ts#<{ZH%3nXO6obz!a&n}{(z%)-T{780uE(R5@JHp^Qem|L+=*e+-! zoIlFRh~kv8@Jt7_uM8XpBBzCBI3qrVVTN4KVoOiO>j(3D7!;96BxGPX(#BMVZkmNb z>aL>mf@`!ewv>(2sIXUSYbdI46%PcbK%N6thrs=2mSlnzHu{6))mI|)la1}&eKovq zdWGo-QTXS=l@G(F354u$jchgnSPHX?mxTBf1gG1;S_CMXSb}S;@S?ELvEw`nEY}QI zvRlH~X!&;A|N6xM{n3B;qWlKC<3JGwfuMvH;u{e<#)?)_jT!1DdQ%S(fOi49`_@lsc}vc!QjQ;4 zS;n;6WoXMOWpChl!-^YBQ<(v&#(dqAFqwsEt?+C{#!{mKw0z}u6$CwGf8~Gpgc0C` z0n1SqEIc9G&h;?E!G%m=uqcHrGm&xBvLs=*x2?r0@sjKwO+^^YNvA!v+g12k2zS9@rF7nU?<65nCT5kC0%BWPmbUyJMDYvCtD>cHfxq|a=7 zwHA3IBcqOew+;WsF7J$HtaG8a+5vBS2hINne|8HD=kOGRjS+KL{avO!jQJcvAP-|* zD8X6hN-*YX?tOwPtq9COwm<_vf)y?u!7ypT?h8}hks1LFLl7!|BXGX3h}>ga8%tD5 zMQI5-g*phAuTzM}#0oa=n65r8BM6f4lx&)^6`s))xYgopK4DmJ43AChLUJ4)X4h^~ zObhb%iZM*1qJwvyTjHe{@h`{SJ;Oq7R%R;dcB;Y0Yl{y^l?8!g`+2UTT*Z~Kq^r~XdtXJM%{l3jwSjIh*~a>TOS^~Gps z=`nm(7a|5@R9z*lu)^z}JkmEj)_e2*`TC}HQ(LkNy?;ISdk_$qeL4j!>1h<;KZ{A8PRSFIoD!n9IhIedRj`vt z4jSwvJ4}!jjohLk74vCcSWLQ^@^P$Ee&!)T?>U|donSACu3bAWbpriw1^1kXnAC1h z>B8&ghQUMqoV#u=`Y==!5%W&Eb0HK+Cjq3wl|@6#ci7Pw;*dY*Z@_&iEt#Z(FR zM*xw>O)MZWooU=$*MW9Jh2rZCC*tT!{Su$p8&0W{ufN{ZLSE@NA96AlAD(>KHu*A> zv|}hq|G_m2QmF?)fAX~!>7{<&H7C!B9f^luA6B#ty!yYF?|-+E{-5Wm$&PG1C;l)h zjg4>ZWw3CJg^;ym22HqW(OKkufT^I=B7~hC!nWX%iyBP|Ew|G#YbAXW{ruDs{Xgam zzfF~!n-}I0^;6s2)rs!EuUh_Q(RRc#q^!d7 z$QF5prDce?Vj-4?2oQo0D^(WYYY_=nz%*Zo4NXc3t#HQj!K2CwGcLi+LKJqs>$J7) zxJy0Z*E%hd>ZC}v~!{8seu^3}4^LeJ7-kqgTfWRevHDw#dmFZz0VGJCp+ZPC%Z z=!lVb>KgnEhtSZBbe6v8@B?H^>Zn1>4ywMW%(FNB1vT0cr(cFO5o8*x>nD3kLxZpB zF?SH1^zpDAG3wz%C5uv)4}=M-*p@^sGA0FE8thZbwxp7iuxDviYzr#c&4}*~&y(Gn z?1Es%7 z_<{t3;J$V~uE)>OyHBSJ`!%4RXIt3xF2AqxRaRScF(JjTCrR-3BGUTN;8QqkhxOzo zKv=%@>wy>F!X}hlT0hb*bLac@J1xF%rPJN-PwqKm1=sn(j}Pu=hNZ{zL7T0b4GQgr zs$G}|B57>N-hj5)*TYGfu!_rzElDh3Sv6jIR4>1&ik7d$b{{Z}_sp;Vclkai|7rDm z=ch%{<2PNCD1Gj?fHwz}x`E(BR5~V$AdHDCb>|aV3q@HTLRlCJjHOdJtzv0jr;F=ElJzPQfL7>Nm@{W9<9O-tZcZ&flhA zu3vi3zS1&IztWARy&&IiUD3M34+J~9tD9XuM0|Q)?)+hEN%uFG54%U-<-RD7Q#oYz2~st21~asvS=i-s4Y?#?yg0h z-QOIt-#By+H8{_lfqVH`%o=cvb2h$5Y7j3f4P8WZP$UP8Wi1jF_Y6e5NpOFSF2xN5 zH!*e&+{aOreGP z&9-l3bd5E1kRk&1r6EVIP~xO8xRqm7YPSDsTheyMyFZczdkE`+=B2zfX)Trd55S0N%Uat0)Wyb z$1q!S^*SP8QoZM0h4mGPsW>xI=86+1{UbG<}SH~JiSP| z2)tlG2hYHQTT5F-+PU*jAx~L~9Qj)v=JnQlu3;9})0Zpm4oGrI3+iEj*i8lp|?h{KV6jr$Zk19Z*^P_8Md$0X&$qy56p@y{+Hf#UulUN(tmP4WE&!VyxP*Gw zc+LO=kE5KlSwXhp#!*UATDx5*r(M+^YPizQ8IjTQg?@8_%)`@O+Zx6tvl*lYJvRUHp4tEwX(j(aT(rtR7 ztzl)nmFDzNKVlNFSQ0BFc)Z@yW zHax3a;MNU0yIJCP_%qdI-+obK-wsWJr?enK$~Fr>kza6#&+6G~kTj-0XN%1nI^5r`Brc zKKaU537EmVYlk#smOep69YDo-I_aoFxug>W_>5@%u^S3d?8`s8eSGr2*WU?07Ou66 zeKCh!8^-Q7WGCFV=ac8NHqN)$!Dek_k!FX?-X-`!3y}&*m@bv~&$oU}H&*5g!!$hk z%+7}}=`dlY8`mKc=E>X4La|Q)9xD}xAi{8rTL=Oh;&AjK^;eAM%mMtBi|;K2+S(oU$b7Q z5R7qLX6Y7FhMqE>C?3SoxLOQ76+AJCi)ruWY8e=RA>AC*iLrYvx~V9=8*ls>x96tt z<>vvXxND3r#6@DXeg5LKfj~ueKau>qd|fQ`REX+aY437orW%f0TDoN(%|l64n#+L= zJ3N&O`0h+qb#5@cmofoP5L-CgDUnL0y<6BJREBsg17UDUxkef!_aq!Mmzzw>z#DT& zut9PcQ<<1t5)!5XiID2dG20I1*Ei$76*)HdKPJ)DPqu20#aN51KYEvoU_4 zlP<{`nhS()pyPbwUZFFq@_;f|eGMNE>Z4TMRn-{kr`C;M+LM+Yu+I^;hxIOOQ<%dfOwM zTAOTcdt*mmM7AGx3Qll=ij&!fKs}}}#cu0*wSN&IW$2bz{#799qNwyrBpU#%my!aH zFY|vT^#@t?eGA&W_*Bk#Yf*oH0W0&Qzv z_-hU?sc3ricIti27I@UHI(gs>JPI_wu;vU${rw}@_1l_bt^PCp1PlgXtRQZ$g>J^X0c|ayn#1V|H~`%`+AkCkzOX2-heN*d z?+lCfcL&W(5+nfd3He$dGIks7glzM(VDRuNWb%$;xMG4b#@>JmM#K76otdC0AS(n_ z6NASnC?Y@@3<^&L6-NyCNMZ}To&St4r#6yPG2XIwr< zQ_@n$#wuZ-)$Yci`Scnui>TsQayPw!-rHJKI^ z{bpvB3a8ZR2lD!@rq34@Hw9}aRaiP7H}x2f=Te1Sdu62ERMRKdOYIFa*Ncp8o~#va z)c{(YnOJ$_R6Abd3MQDt_tCzSEiSdkO-i_nhY$2_S@Sa)EnmgLS-+rtF5dC)@*VY2 z6(PMot3~*WNH0xQq|}1}k~8cOCriXU@s@eB<1L6uob{Y77KC`hmN{IvEu=XdF(0x! z=8-kI92RqLDKo$HBo?zlc>`vH5)^>~c+i+&d>y=mC>Hb2e&bHh)53A!)RRhIgYgm} zpQG41f*1`-LulGwf3E+vF7Y3p!teHzlXtEJ`Q4eW-f-ibUuWEhxDQSrVlqF(P6Mj; z&UE?KUF9q1PgjyIOjnX)Kg24gD=p{4AI0>{Zp)Y%rY+1w#=)J&M7%(pt5O)ET%A$n zS?D8HDpEDRp56DgFe&ASfI_|6J<#!KeYb_jf&W3jspE3-OfbrJ;4F}2jR$tkC@BDGGlpqXQ)X@jRaX=Q_T%CyeOj>>(u z=lA`-?mzeSy7!M8LYvK=VC}v3TF-hu?~nAoma^1AR;b#8dCsd*b{A*^wb$_SP=}wS zgCptiYi@CDgs8jfsBVoPl0Qy4q?}r&T9V43 z78<7=d^C#ch7Z(H4bC>xn{0lRGo5DpUP-4CY7$`~1E%S3AI zF|zLCneVy8J=Q%9sUH_XKmjKvxpXE~GwMs2#BF<(;sdvce!ev_!zm9MKB6}km5t3Y+@-A1YZ_rITg#5HYP&z! zr0dX;$JCE{LCst4ku%)bZY^uP;So1{(suKG>Irr~u#ub0@H0;<8r#0Tt!q=E-2bU< zeU#n3ZcxR@=WGu9cXqc=pi25ogKb>@Kw(Kuj>ieBEPnZLm8(a98EwToTNQ1#ZE*Y2 zSGH|2MrM@z+JQ`uQXhM+N~!1BxW@)n{^_%k+P1ba)&)rUep|j~7KJ(x7N?s%t$-qX z*cE2Q@LSjeK5fp8l7aNLh&jIGNilY_jtVs$Autbc6B_LK$R{zWrRe}^_9%(7jbRU@ zXQhk@V8KINm7?@p5*_x_tv%>VOnB34t1X{W!S~wgFjbOy_QBe6_R}~?AbimV+xZRs z593)IvS0x{u|EOS{3)O>rX7QX+I%LY*?~D5ALJ=Ogz%D{;c=GCBcN6ezWX)tK$lEYky^2sK#ghgWJ36$G$DQwf6bjq6cfI(&kQVf5-lW@0}qnRu_*i z2G1wDvTaeuTU(ROdza#Eb4}c-H5je8e~ztIEpX=<2r>&wUoFcuv_^LbMiO=TYy#hb z5NuQ=9*O++6suVyr8g^Ef`A8fIanGD%!DJ#uj)w1b5`c!rW9~&zd!{OHpdl*V_}NX zUNWDOQ(c)eT{1_ceD_=TsVI>5CU0MGOvw@SM~h@@awsJ-XA+q#LlYI+1%W1utrJ}p zR=9Q-t;H5^{ zCs=u&lm)CiLL?#BEUaBf#Lz2?vgd*gcrY*6jOTLMtDQ-{tJ&*J#LyJ1aIAG>ktL}m zmBk8jmNI-ENDjE{*bu=|8sGK)T$X%1v1&^qILuUeCDjrds++JD^g>n01?jDq8*%UYS3cbuZ`NFn^j?p360C`I#%P zd$u9yk4Nj5Kz`99>QYDm%=JGnm!E^$BlI`<&zdc*luPn6@-Jg^kG#B z<-?bVXSMR!f)M)p7dM=SRV|AReu-s1wWcRroL$X|jNAF`F^1sQwUxVm^_44~`v^Ht ze|QeXOrcPu-7Bm+6D7-64m@uYHA+9V)kdKjNSF}zNMc8fXvL9U)GsVWBDM-8pnT<7 z)(%ZLxK%Mmf!iQ(*FdyDS{{@&9)5&MFB5^g*7pUR#iC08B^*O^mh4}V@&Dvs{iTFK z?P8zNTZAh?)x?!o3zTJ33L1MJN{x9my`_4f1$GEV^p0qr2Ku37Z3hWO(j=-DT? z;)41JG{#CiH8zCDrHb8fOzdzbB2qpmQ7i^g7J~XA=mYX+fh1_HM~bX}e~yUkUqqMt zi?JXw3W{;-Ckfma_=_?7Gm{01a=Zb>(8bhBybd(TczwJilOQA;ipaJE0T00>iBN2U z;4Xbcs*=bT*13T`S%15J;?%xm{#!McW5qidwsPs@kEtyo8mY0}Tg0)vE}7O3P$?{? ze&#;?>zVB2sNNrlJ@Y}Wo1-94-8-t+zj-1@E?SITELz-ZBRg542M~VHmO{q5c<;oK zPg*w-7iT1Io$zIj^b$0vUHq^bWtglC|-vH)g;MYZQ@!Fr1$QXK7`xbkuKOZiScnRW&WdK?fVN5on zKluy|RrDVlqCsD(4B8hUEF5f}mj`}(I@z8Nj2=Kf`#>hL{z>C(7;~-xWDY{hA+AMe z4@7}MB7k=xkXGGtdT_`;2f8mpnU$q{0-_(&@ILwS zBr67a&6yHm1Zb7va3bXHep&UcOvB$m)|vA26i^gQALjmK)_3O`HZp(aR^BkLds5e* zyu-Vlq8}TQvzbboNgWMoN*&C2e&i2xUi2@;{;e-sUAK2-6245fy;RrB)3x0*{N>=I z+aaOhw5#F6ch3y5WwZX@H|b-I!WR7H5CGzdA@%-eI=a^3n5QzeN#8XODks**0IktW7E;a{Tl+?HSTH z1^Vjhg1wfIx9a{!2o9^b*MIA%%JZZOKiqGM8@B)PJ#P&N9CGo8R|9@(zDF{7ZDJBWUf!$u=FV7t_=O_O+JYwg9SQeO zH`(ujMBO9>;^fUIkqRdlUk&bSn3y}4_Bx<+Nz|6WznlU#wauO!i}(7L%P_8|o-oVW9j(Lap*@SR>f@jwjnT&8Ib!k*Yss&CDX9+RDH+KQ=s94Yq#=xXu-x=4X2z zS;_{L@%H6BL9o(|4}uT;2a|0zC=--1k9v)l8aygc0`)MOyZEUI3jZ;kfV@@IfxZhR2#mzZFs)DK*JE=q>n4@oE=G7*)`!~CW!TdK?44czarr5w1aDl}-|1`{>j zFJz2Hwqc>742Vfylr zBs3;1EvAOch0Tn^;?}KgtUW7UJz&vjOL|mc3o&VDw*U6_JvVrqyT`iqp$xY4Vn?>- zTW)Afby`ey%o$|;>X@2wIFPjMHKFj|nV5?;??YpbRL4}OwpmzMBjr20Ok3eIYS-@n z`b#34DqgM?pP<#RvPHV;ybF`__?w8P5bT_d`q_ZxHXBOl%|?%5EXX}hyS;*%pCyg# zbdN(G+K+_#75YMI`BoZ)rz53yurfQV*YGMSl*CmJAzYYKhfyXMrUCU9U7`{x&FatU zbdLrPtE@lNL+6K=+~cg~HwzJLs|0!z;R0cPkIir-cIVOA&ag7_L_M|;!|{OW5j-)l zkC)g`I4;9x!zOVsVu!U4HWW?2JlRws8(-|VoQ6xkBP#W@1#6x3vUFTX}a_iAB$eXjP2(p;62rvto%z9YS#9NI*^@4?WEKYC9eUfN?l zbanaAz^z)Y#f<-oo~(x{Eq;7)F_@0rpwaXkL9@G? zxo5@Sflz84E;2q3`9&h7kx#WBmA98~4HnVF@RUAoK!W}l_>Fi_GUZ75%I&R{C!l<{ z>mHk!AkjTe!%&@ID-Aop0u<|hF?fn81Pd>ODh`%gMH@EL;i0mn-|oC~Hs>b``4me_ z%<8m)!p?_1!h{viY6o&cq!2F1CnBH2?|v05L*04qywwdVa4ztm=zc>AP6DTj1hy5~ zANdqk;#t8$@L3~rGKgp*S_+;;XuwNrnq?}5GOF3OEWG{5{01XzN^f_d9foaT$+yo*Tlj>ulg+Z+J(4_!If9&k-CO= zk-7=tn7JuW4pJ}p290_Y1R2WQi(X)|J-L^|p*0Dy*qE~`pkoXAy+1)aTgKulMGgv| z{&FLMwq&dZbz9LJi9v4}4KAU(KmMfv@8A#=;5sC(`o(Iq^jz!bb=9gXXpzaxg=HJP1CfR{*9X zJ^wFWWey?b;LZYe1!UilC#(tTxt+`qW=cd2+ms$J4j{~`4?&2Kw*oPU+_Zg!=TuQP+exaa%n zjL8-KinVhs_wJa^-FEQ%#2H`mcg?j(YwBQEbo2ov6{J>9T2tJ#5!^p&V=c~q^->(3lsrak(EHp zZ%+f?N6WQkSWTcG`vWfq&QVP?5dbSdG>UoomSP}Q=DW~2>C|PAEE?!j(qINNFeehL zRAI8g>QA)I}OdptR!H znfKW)&0~TzSR$PQa%|-Ky=2vg2mOA~5uFu(-p(%^V~Ks@Ul(Ie?QR#3@x9T?ttEcx zWV2f3U`hk5kw)MW_%75mWUK3@2;^2VTj-5G*qX-l4WLi z?&H1hX+RNuPouERr$apJva)QmI?q+b)n%QbT3`fEYPv%2-Dp1xXeG!N-UQuLAq^WU z5Z9Gt&?I{{a~ zQrV}zD+G|F2JwE(SztPZuA2&v9tIjy(nDbbrzrySkXDL75W5$c zsUA(lGz4Fd@9_b;4-e?GV`mWtkN7F(qA&vbsK9iBj%&>WIH&!V01oFvBW`1lHyoWZ zaFh^M4DsI=H|OL5(N>_wpt}1;f|?Ay0sqLLE}>S5v<>opOCol#{dDSnV?9`VyY*`q)b zVhuT+>g>^0SuZ4!q>!zMO^kNtAuao8tm7 zN4i_0Si(5{rgV{(X7Lhh!qlBFQz(oC8qx63xdA|&ezWw zER6C@^H%m5tt!bk4B>Dg3U`=@I(dYKW}}v(nTj(K0Q+!FuxZ-9W1i@-Wf?y(u0pobbEEwd?P_`1bR+`(~t~Wtn?_-F@6*T)uhhQ}dfw z9-aQQ9=}LQocXRUJwK=GTTFMmLA~Xd_?E692HxvjZh_A7hw4c!Ki`nmf}39Jj_hNn zO-gq*e$#p`?6juu9B4nLWo=beMNiy$*qWw#3#m<2*#XUODQ1@~g&Yq81u9%lHB_i+ z;!YZV(@7LGsISLkQinzw0&;)2j-;mLq(xP>vD@7QsKrDoNZ$82n?(qGY| z+TBrfgy|&j{y7i8IXxG=w|KdnwQwx9?APQo$-LLd^_ye$=iFbw&snAIs6s%QK5%n< z)s(H6e0n&l3Z{BL=X}n2od-Z2)-SQ&-zL9JhCIv%Hb>yynP7x=b0$_lIGH!)063c! zRUeyHRLbrpzk9O!rMsv{FZmQac=E}n-q+p#05azK&~YYZ zFUMk2O5Y)%A7s-keXdiFV(B}1u^Mv)LdKE9KY;l-_5quxyAKFGGZJKRpaJE)zSZ!> zCJ)d)3;$=$6@ch@S5)@zVYWPIngJvaFol3BBnRRS9LNm9>4Ckzc6cQl6W!`VwhK5O zn+LG3Dfo{1!70URld`_&9EW9>%V*Yp@gKGN&RF0@V592ag-ROF?vDNa!>@4>b#m~emPfcSYC_Yj0>vYgP*5JmwHQr1>H1|nrO@sn z2ut?pE3+=W*BtSNXdGCpasBx;|7I9fTOV*P#wU1z^jCX7ro6{XF)0E4hax{5YH&}e zCV@LUIDh?aV!gs3{AXX?W+fjKiiMKJYs5ekX9I5q(DINP^(;0l`0^&>v}_`T{L20L z(Fo%PMld>Qa3=ujLu+e$EjARn0SL^c`@HGU^qOg|28fC>RH{ARV`nX_0DjAPLVXpw zs{=DwbP*UZ#hO4zSt>8qT$=}VC*be^>D&SBI-oH?wc7|j5x{PNO(NLmWE;m}Tm##vyj^fA=U za9e;zC=p!q0QdlPH?kbuv;ejT6m@`#LoC%v4na(4=%&bFKDIH~d!)y>)k>q)-b?-c zCQ{=eQbvA`8{w{}haq2Q@sHINzy!bMVVIcXPPiKZH)sH4CdRtJz4x>#>3sy4KUbt6 zsLL6(irp-&eqdX|@^Hg`p8|}`G+0UvED7glM8LM>tt(aBql0W-^%qMHqWoXn_-Jd!TW*(Yb z`CHx7plTieYHei-`FgQm&=m*PDhjLH%okH_c+R}O1L9fK!YaP9VQt-0Aoj)8ktHs* z1zYU1gaJ8?xs!!szCpl#+D3KJwEjb>l$zC}>{yF-Y1JfzT^LF852p^rQ&X$ul`WOE zKXX$c1bnH!GpM{C*3a5Go|tu}R@uJF^IWadzlxS&}+Kr(%IRJ5S8ZAdKo7 zy0BsBU^Uopc;Y8kBc2>T@bBGFJ6s&#O&;7_UZ9=}wjVSa)kfby-%*z?E!Vkah_N*i zBdhQ-rAsj;>g*hKaxOu(0DTh-Cf#fz_aQ3iwMBh!zBMzD@QRAYUuQzzN+}Ae!{#VKnPyS=zwD+@0$$*+%n>ZI&@?|3b z`xGXz@8$~C`!Ay!Q7`nCZa>nAMZNEXQ6ZIz;yyHmcL6jcDkJ&6BEHBj^8gIg{7 z5Plu`j`}}i3ifFCm91D5QIGk7hvP9vxoSeTxB(}iVS+Io>u#?SH<*AZL-tTof`OXB z2Q~zoA);5oewGcMm~2Wwsa`yL9krD zW}|#AJ>tH_uiTbQqM%_BXX>}Qf@gB3)@ON2#f{`bg^ZuiK1XJ*mUk1@tGtiVc>9rT zQp7?Pqe!29I%q0<-iyaxT|F2RA4V+41heApFD$w~U7KXWad60>8&>iLP&+YazO@rY zCd`@iwY1`X80rOrz^OT(E!7J32jM6R*GM-#fn zvdOk|g7IM=O>ZbNOC17ny7Ux5kEAP}QO0WuMV|duzHO#!X#U@P%KylUjGnPkp{J3+@f-8=1$gI+F4ccel0?sMgl_ELeQa3a z#F}sZHwaH=7EWv}z{@V2ZUidR$18iU?(X>RSi9y(t=F%2{0G=_K2BPlr^**&qPluMIYayAVpPaXy+EtmsK$uZWw83ugTwn z8Oguf;-0ULkt)1)Inn!gF)CBK2rGTs|8ZrUN9rl<>ZIIIqsW}WiXbC$HqR$3JBfpZr z$k4qz$=|<6bgFp+L3PZze}C@vEgg#2$-A}3u6tf=NIZ~FQHVk_u?7DvPe^G0TV{l* za8^tcG}&!)*y~40PR|vk_mAje;-_Nm&n%}uj6U-SE1)`D&sd{GQG9b}ndpS~!C)LO zn0;e39jq*+zewy=Ktz)cMwa~1^X{9>Te2R=!MaHfH*O6cG*u#BwVwq!()9~zB>`4d zrA2vZnC_0=gJ#qoqL)3FI8h#wXHi<>=P_D^(;g`CkCPJbE;8se<{4rpUSU^7ICS*C zvU&f(iaM2mw#H1YI%zwC^Nk(GJKJ+}*n~kRUu&JFO}+O`1TA#*Mp%>M$u3T42z(i!JKO| z4Jau``&uGO;3i=}rUjQs$YEDZtxmOJON6eYls}>!_bz6FQoIUoc zV5EG2cQ=oMk2U6v2zZ{$GDSo@ae<8lB>d(mLqp1vN(B_h1BJP3q=+`l>TjfTQC$MU zVt}Fs6VQ|KkbRvLB1Fj&R9ef;c?89KXn`^>0d1nh`$ZT3C;wEy@l(=# zR4*RtQ$@7Yw|?Pf%>9v}l+9l+g}UDl^-v_0^t;)TM31jYbJ$nus#g4rK0%avp1>IE zxll@qTBz=0?PV+Mceqv-g|f=und~eyQc}oa;4M?5mK_>9?s)xU_`3S4*{R6v@V-suV@Nozd)l4iC++dZnc97~52i>}{Z)C`VV7 zz0avz*3N@%Ht zNQ0L)nx0o*O-n@S5EtCkv)$7zAVG9|>+6ba-aEwzNEn5=M`Ul+tV=#2hbTI{i8qlf z624VcEE4tVs<9J4Nmj71w_x8>ilQ8S*iVs2-m9w$2@Dk~_zgx5zpTr7pRWts=+ zpA09KuEM_)-tLbQ<_T|0J4Q7{x}<&XGst|>Ui^yIC4}depiv`UlFKAJ@Un_MT~ErU zt7g)pth!P#+w84KIjc^6xcK+z_XmspqX!edmfdNSzxwO*1CvLtgWVT= zkMb=HHQu|9T)I%{!H@90{m)4u^23Mo%53Bp*8jEpDB4e-y-;Cy5DT4|75}%80|I>y z%m_;X%`+v^omA?N`IVlSyjgmT5K)3deT@eR|6Lgim(Pk*bWMFuN2?clZyC7af*}tO z(1u72tgv1XpsO4;XTH?&SinMsh3kuL^FRLOgo0%9au7-%m)0H^>ELmdMpjAEz23zT9w<;~b1f=^JbjLDyaoy~gf*JKr zbxXygTr3Nf)W0mj#aN)l3*Q%>sK5f?=F{VdMzKxphrOyCo+Jlw2bQ;fbe8OWwM3nB zjjyThPgCd9)IG!O4KLH=@{GFdSC(Y)w+gn`a<$={7$IlZ5<9b{95-6A|XhR(v6;@ECvyZkMR%(n#bnBX)j z?HJpX2p*jdU5{;oXsZ-#jDR*SEh`c2>jwEbPfE|7RaXvd8GY8 zwq$2FnNST6zL=?29~ws2_V4`DBWSJL5KngO@z29f( zpT*&G>U*(zPaetVu9*F@&*Rriv)+3+<@ z#E`ss1J^Pgxqki4`-q09y@$_eMV0O*fxqM%9Li%W!>}qQA-8mIh6ulSP049x5HuA# zV~DpTX;#&Vzfaqcjw|FxvO`kum7`=l+9IC?do>3d?OlGBb7aoK1(Fs>cR3KdbiiEkP zpc$U7407YmM3PcUw=72BmoN2DhIp*cVk>Jwe`=D{0)uhh+_#90$|v2hvqSaY1U!Db>t;&K&y#XV24-#mGEbY=P4snvHRYuJD_tVKb!(D$8{#h zx;@h5X7aIYh)ejzF{KK06})rxxpc1q;w!t9?CF-p({oGy2$_WeV=E(4NKDsZ+ks|( zjRlncyCvp{oloq~4X)q(@^Cu!AmNI<^tW?b_Ge>GojSax*x=&5#M@qa27Unzo4W&^ zmBfuZpGaPR=z0D0uFbuERjXaZnWhg5E0jbA#@`I)Qj0NnvyEx?sCxhf)35JnBp4RJLvsMj_>18)N zkt|$ML&I{8#MBZgUmi0ODc{&mTIIlQ3Y0HD^F(wfFPK;!A%uIzT+E3R+YiL?GdqF6 z%;wUtB)oM_P1NR;w*-)fjMZB0BHM>l(}75Q1_?Jc{x)KIjSMs)Pn@O=I?AiW-q&wg zW;UI6s=b#}gYV=;PT>o?@jR^6G%u{z$LDTXdzqldSQt47Bak#)Zy0wN*WKQ~MB3hd z)&`4OIx4S5yGwiBYcND~wsN~5Z!Gg@GYnXW*fdOumIy3M36Xd{*h3(Jud|pE7^E}- zBa#ApPCCOsT6y08L9Ri9qWqf23hbtgS-)H|7aE99+>Y`K(n6^!rwXJPJl5F{(vyNH zv@H#SCi&E%l&MQo^3SUkwjZ$EHlicATW(cVP#MSdE{buA_#x8tvbiIV1l#iV_y{9amTt7Fd5AOXxL*udj@mz2PZ;R0L8Mmk|M4G4`;+)L@Ib#!{VU(J06@v|7!2)(CIt?T6nzFPJ zK1dMpnu4CO3Hz=TY0uav`X>1+2}I=O^87oNYn1YjB7$Q7pH6Sgi1}p(xNet7P@hJ; zYElVh8l0~89jSGsb{_A`s~oi0@;#sR;v{!I*`yF zW<-r>wf{NNlQNMLD3=v*@(NNa=BV3wJ%`kP`}5D=uUY@m0-QqmqfwodRxtZGKO3gC zgYXP0jbgbxVqA{jcom?zEpzHwBE#~|1}%yJtu|v$JZf8bfu#<9&rFS`)HfKHn8;o3 zD|(4R%Osr=Ynvp(^mK1ZN$Xr+$gSQEV8WAF2TdKx`-4|S$(#d?k@9slTduO`@~+*R z^n7(zWKI)+wKLa@Yml0nd<;{mk6$Y_^2_WODD6AAM= zlzzXmNu~aNJrz(L@e=N(XWi?*hdl3lCftLGnS1xLvF=fc(c~PuE`jYp#;{Q%&SD;x zA?EQCdFVkqtbH75pw8_j)_yDDB;f?Sr6SkP zCK2#tQ#Tk)7Vu#VStusLLWUF)gOkBU=1MX+>ZBvPeTd4ahSPI%LOO z-yS->C3HHfIE6*aKIEPpcQusdSs$HM5NRHwYxI>?>{-7pBw)*Ger{h;P7*)Y%c!@_ zsl-KdRUE0dcs&z;K52K(3iC^!wyt3rb$PWdo7vW6vjF>U;+%>%uYSXD|E?jKTJ#po z1+BELUft0fM$awf`s?Mool9}>(efM#s!|Uin{}%NRjIl^?BB1PzNG&zpCf;)d~kA$ z^1X6Rr+wRsqK{J44KNj=Jpst&cWu9 zn6#Q@F)-wnLuU18TDL>~@+@N06b69wPKfKnpzSbXmJ^DDOb=Qq__UC&gS$>04IxaQ zLM&n|L)U>L&r&(~SHe=l9TOJ9Sd^+TniwG;ODRgxzaBBx*l-}`)PdC(gV(efSeRm@_F~{iQ|u!gtt7|a(Uk# zep+lTE-c9Xe-D}bUts|LpBECWdUO{>poMa*;(1)sT^I^kdsKRzyYsN3Zsu^Kr`FS* zje13gO9#zPD{A#8Qdx*IVBW-tvG@4Nktddn@$jPE}6zn2S;^7S%oBPw(5>dfz*?iNLbyGPt^0 z13e#X8GON&DjCN45Oa;3*S)Xvs&b}sUc-Q5A9!!{c!Fgq(HDk&LZhLGfej80uRjxh zXulfVkC+mcLz<%9ywd{WLfDE7_{ie3*UIOZS+6cKvkD@pKQ6)bXKpc;)LOr3BdeI2 z7xB8k`BL$8Y+gwGj-t6xZqWLoqK@nFn?pm)^(P-bZ2$jX82_izle0}rddpwy9qGYV zDVCD-q37SD=mIyaP%cCid{s_34i7!~_?njOvk@ly`>tSV*(&>0Z3Pe{U?(~@=mx7X zLB`Q89NBoB7$Rg2f)CuL83_a|Xt*=6 z>%V6E*7_smn`VwzBtU+h^grb*f&|jwJ67@i?X`id7V8dIBxNrGQUn_z544yz&1;6# z&T2?;gWN~n!cBs$%dyMF`_Yhp_lO7ica4w-8TXX^)U98uiPPrCzeO6Zb9*8@X@}Q= zTsaou0-p8nD>hW3b`G*4QI8>o4if0-SJgMp=qR?SrR%b#PaQ0;@yiKemXdH@o2v@R z2?|!7ckQ6N!UpwCh69w0W_N{9(0O0Wb_+FZwM1m!aoLe(>Ty~7vNC=nrhla(pP8${ zF9h+NPm&lGw%6B$%{V0$xM~QMJKF_jhh@%aSEJ$_`vB|wxVCj|%P$?NL=q_*-Umld zUY`G1r-*4;tt0$63M<$O>KtUK5wVwVV1*`{avLn6MF(?>Pf65R*lg64BD~0{3rxv$N% z4UTIV;0=dV%Bs#LTr3EwY}&LrH>G5B494~+rLXlJBrpUn@9rp8oaTOkV8qV^P>Ddf z3wmxfkDY~q*2PGcTmX{DMm8nxqoL;cIdU@(BA&c!?LEWSo!=BV_jM&KG>_;&6JVbc zQ5%I(@`(&cRyt_th)p4ks*Bf+gkU-_4`xs+Fi(Wrab<{qy~gEWrQTqw&|&0M)~mA` zm%}ztt+*Q38kjk^|G5)+;Q5}JVx5AA@3e0H?zeITeV4o`bFE{=o_t~1tkbGR=DHiJ zKXKDZGW$tx#*-gU};s9j(O+)$^nHgbTw7ip9%_muj{3ZbotGj#Ox#P)=0xvYR@m*)?m>^& z<~No>I4&4Uum^kgDV9jGq(No1|5T-f6DCm(EL_jTQH2Mk&`6<36j*3w>78KhNy#wJ zmJeZ5+hA=Lex}X2N+rF!%_GnZ6qvXrwvo0Mw1Wd|`95ti){-Yhh!8Ybo87+ybe8j? z%PI#X8{w4+%oxCBsD7%6nnd}$AsP(EW=vN z^MF5vXw^<@7q%8V8t&BT%JPeT?Vj3|GE|H7|ck#y2J+~}>n`ldWLUI!ECfa`RCt^mm6`=()(({T5tL)1H zZtrEyyPFIvO${p&fwS;Z-MO*NDV5>L5mVe{EY!`2eT-W@2_F`I$leoox#7wE1K((? zpJ09Wg*;z@R5E zVBS|9zRPa*kZo*v=Io5E&x2ogKYy}!;guGv6p^U%>GbWxF`54Dv6GGHx*(G|h8B7z zxSX{lXZ#LOy4nxBLKVr~ZT(N|eaR;4??KOuUngkx-g;`^b+D&>vT+*1u)AqRkkQGq zDVIF08w8zeO|I=0+Xm3NvamV&5O#HJ*v-`|E!-Vdp3F^*<5{g$E@G+UF8;^yqX z-)2$pDHL4nS00B&FkzG-L6>eoXLK?Wc_WED zSw3gPxs}aJWMCO023W?}Y?j_XiM8KOvd72uvtLR-XcOS=4v~sT<%io#IpO34U&3z@ zua(|EK{$cTMJ9FdI>M8->bC0kq@}GUsV1`YAMD5K_UiCPm=4>M%hD$@ZyD~8rB`V9 zWtL}N60bGT3@~wat!On_Wc`?AWuk8UK)l*Dj2`K|c%(I~yluhmN2BWf@YjN0@P=gk z;3L=VZqfaKsgw)4hLIlE{4+IzhDr2DkLPsn(-(f8GCgj1IzsH@`R5Eha-AGcKiYgT zRrAk?AMCqj@*VKLK$gBJWj5k_ikwiAFDGw`A|1ut%W!6Mf~4w?vx~#ZJ*Ahj-m#QE zuI&^R{xkOF#F|y!8=enzWW>vV%dH{TH$D{<2;YtmT zrIr{hlq4{mXJr&I<;?9E9dJ-d3oIx2m9E8a4M#eD0tn&%P_2OEY}qoWK8!sP)0f4>nKOY=zIk+j^$EmOmU2 zc@VS&?Y)+fkCP_D12rN4ZA5zD{&kAsGfw40xk zHws;kl-u7Ub617$*d6qy(|)B{9N%>l!;dQ}_^Vq;336X;cGTJgXi!UiKlCrpy8Fhw zN^X4AeD$QaanM)~OL&MlF}&eG%$^rZ!-rx?ud>nxb39CqR{a1liPO(AxGgi4cnjZx z9sGM7#}JYN>~R@&59*NXccX=`l0N7|Un#QHoAKoem=4JJn9tV$V=ckI$sD6RV?J99 zQD7ei6D?RC47r*;D3B!;qmqRV!9cJ@Jl{_?r9mDAPQXy#9J8#3f;waD;8~~8`k7VA z<0|tC?-B&rTE`4cYEH>Yt=_U^hq5OlcIuNCZSDS?kXzCca}1E+jVN|Rp1N6+z5B5I z&$4GuS3{WcMaK+o&edn#opZ_?O*(PY#5Iuqdt>!c+M>C}Ap7Wbum2fsceUqq4J+tV zw78E1=61`HbW3bzyOUg0e_3$Ou-wnpNNa2HIiUi5fuz3mvW|sf4k7MHDATO%XnNF> zpz@|Xy#i{+zOEB<6(u{VE9B#6yp509#ZJ#{nRNG@TNC{{Zz3uMe!%&Ev$pKXH%rgEM?CK(_C?@-nXb!zGi`HRcKXhe<99GsS)_t`W00ZOAj!Z>dWb?R zPI($)XZL!>z@>3$%wy>F&})zd*sqO|W)~N#Y=H(AqJgcK0G$TO`)90gfsU`A1>y^D zT>XuRl<%1(dU84{He`anKin3>@wg4069#ud$Z=kRH~P2^-ne>MfV50HiRxK<`1NrX zf%{Z~k+9gS_?>1*e4tfagJw=pub0XaSx?dgI_d9}UTF^M*rng zK$Ni)#0<)ffK#)Ruvl9!a>JT4XrV#FfRRSL^5Tg^?T`9Q=t-Av8a*3 z@B>VG;vOe>olmi7@R3gruRGl7dMs-!Y$z^mYg>BahXdXZoL;Y=tqq(TGTO9%{p5<) zURS-OgA+MhGh5AX2b|2_qO(1E5BJsUZ3z`AUe_PrVqABBxc&Q-fAJs8rzh*dW9tOa01>{j6)+-rJid$E${?t=4I zc93xa@}}RqPB&d4$L=`!>PJ)Y?!Gd`uG5RMYJo_kfCi%>uh#m3^}dvNU-gIq#KMa5 zZfJ}u2gP?Qt7>y~{ez#mbuYoS9lkHX+pD2J3__Bwc%*!fSf0R9P*M9QsIF995~2Og zZ4AUJ%J-iB^WA}x#?S;IF6S<{7YDiM?cydcF93btGzdj%=JssNy~vq_bjB_l8V%#5 z&Aps$Tyi}_E54W|mm>A518yh=mTprj`TYn-WKEs&fSPRH^#1vzrI90`AB#BCe@n}du`=sgIJ1H))e$VuS1Keeb<(|r^vr+2bQ_VEc35?m{Pt$UR*XGNa_ZyEh4j(;exh-tkwfmtc7tBGmjQ| zcPmsgMPe3iQc_c5FwaNIv;K@D`)%Dn@t^)Lh+O3JVMuCfQB_fD(XkS){VZ0 zXmDVqW65GfHv^CxS3=34hwXvr2y$@0Tmb|za2rcU- zC07o*lVaJ6yG#hn2a^1Fjh3Mg51l$bJQ+(G{i`xZ*62|-ck@)n3*x7L{0HvU-BHW& zDF53m-uZkmDT((oY2=Xo|{RUJXNE3{OfUEQg&%%2h;30D}fG4!1Mmt7W4X}R_h}~xPCW&`%n4C z_u%e}n^@3{ss@b|_v9+*#)D#{Y(80-2rx3UNC;TM^(Jt+7b4J#6%x_JaxjuEfD<;` zE0kxp`>FMqwE7sYd8`Jz#NK~bj5j8mc8Xw$UxLYoeZ$rGp)oXy#%6k9z37r_^yXgo z-tc@$)QDRZtpr0X&A3Jm>ka4)yL*AvTt|zK-^>H^T`U`&om&{z-s_FIS6sZ~Q~73_ zLo^tn()7l9>9+c}^a`^D>5Rx%mw8r630kTC+{@M3)A5iNRo^J|lG~X&;8BX10p>2( zGBEYn)JoQe(-xX$gy%(PYm2iT{z~k7NjVz3F8kH=fn$mmo>rZqeh1cX887QW1>IkN z?wq^tBW;OTB|PxT9$(1MJ^#J|vW}>sxu*Sz+BeSrbz!`OgN7DJebI8n&>OMGXbyr@ zX2>KCEYfk`;eW*eOpi7qQb?xrlrw`Ty2_Nj1B0a{v|sfi#JSXeHCI9n5u3rxvfxXzQUO7NB8tECjl;Tyu26L6hjBjs>p z)5trZq#@tPSWGz-rXiP&_~-1`nL`=TVC*NZ&LDdXd(;O_M{S$00||89;%dpp-NMVfGCKd5CgV^F1m_hUsjUPs|bQ% z0}&~T8bH^!D@iC)6%D=!= zL2s?%PF?xS;h|*L+t5!f1?Lr;+B;(3iC&d-Zatp5{A%dD!<&8I?^>L0{`KbWw{cca z8Y0sEO8Mu~qxJf${;UCW((H=m>$cYR)SOsdR~mn0bzPy^BY3rOaCKqnich0w z2Q;6=)D0}FaC*446|_%=$urZr!OU>#S#O6-Os20n(yNIp)JNZ*_kOvb^e=G*?g-N) zlo@Uk4pTD|7S+1~5H*1uKNB;^e&M4Jw0`TX9F%ks!KWPt!NzQ?PWD7Gzc{BD5+z_g zCW~o7SEtiSWpZ<4^=i3LUMvVU31@QYmQ-u14XKQ%nNF6QqnpQMa$wO~x+UEzos3_V zEC?0Ap7_A7@#DxDt3=1Fsn&G(L^?U0jDrz^4}pEQ0)xqL#e=FRzaIJCa%Rh-+9Q8Xmpqh-hWcYnA{*}SyqeW@ zrB~_RFG{pmYlCl__r*FmeyX?287(*x&(Haq=H(io5|_Af$ri8bd#Uub4iDx`>T6iu zvEj3>(Pw(LJX#3u)_EE?eee|VxccvhUs0680^U5=wYL8G>Zj}P*ZpFkVt`K)XFsiQ zP))z1lCFRJ^Fo^0%u2JP-_xE?IaMx{wzaJPc(kXsW&g+TvG>9aUo|}X@96ZlwmFuw+2VP{C^fk)8-{Cc$hW$rt+~;lKg0{H3~?WaU$zk`+JvyIU;ZDlw6m$RdLR%pUn)Nl01gqz@r>?&!f{A3iv9 z(|BeQAAU)Tr(|XE8pRx6+>+MY;}EE@S=2A~ge6rrtfo>kv5X*2n{PT|W7Z^=EsH%m zAtv`I*z`#suLT~f2?8msDp*tkYo$UtD@)K4RcC_mI!hSJ4$2CLl~7y>1)o-lFIV>H zwlV8&>%(i6PQ>b%rNK5!T0*VWygYhmWoJUD*QX1@RsY%{U$!OVr^ohHxCF;}wVqdB z1OEEz*pu!lDQwI5X)x|KxW)Z2iAyLQH(K;H;3@rj8i+w&flh5j517fNdkXKH=}cs# zR)I;}kNIPLhpE@oh#PJMJcolSJF`WRk$O!*UO}O8-V_&Nw*|MvmF^xf!E>8*{Enzy zeD&3=^wqULZsGrEG8}z%$$p{4t77}Tp(i!6Z*1<4 z_0e_LfFlxhJ3MJ1T#8BR1vZHj;7HCTUalvn07=Ap*AIisq`b0=&TGOK8q2&c?96;L zo(&N~PdkZVeD5@WI%k2ZPccqFaVU*JQnB8GS6~E>1~$*8{i5)x?$Q zslo}(@s38X-!`jd6e^Nb`GygTVN+@NiDZrl^tBu|)zUjJ*-7SW%t41zSpw`=&LSwq z*HmsgCYz@oc*8lsNoJ0Z)5m1bMMn(AVzSef#_5np z!GF#k6IY2buiiDwGK<}1nHgP9fbJHpL$l*l({e_1Gn^l5o74KR72?UG@Cdx0JIAUJ_{-he0x%>9`i<1`rx>9ff+Lx8~n@cJBFB3VG zc2R}W`13*%?eEU}!6~Zxatxy7n%seL6-w2my0^ns>3sIMX^HdcT5-Uk^E6yP3*1T} zun(g-B*sbQ7JM`R4A1IV)><9q2{D5lXT?7sYhxTq0X~~%)A=}i|CweAwk#()kEhwR zh8@h)e0{3fKF2uMKF^B3wmAr(Zfr77v$Lu{4A5`lIsO`W3xQ@hFh7`Y%$kZCSVehZ zy*=+vbbm@7kJhxDhEm^Fbpj2J#H;8y)k$kYF>!9M0Tib>32!@q>iR=pHfdXKT>o82UpfF$mbx0{G zoqM8Z+rJ7)Bc=MWmZMK?!TrFT!s$=XrrNsLy!|5e8XblD_w^|yONW5aF!s)!^zVlC zGxF%)E&dteAG`&Q(iLX~LGi;<(xDNOB`ws_D#VvkPG}hA^L+f>P^q%SVbI~htGXvE z9P9R#kl}ibZ!$)}^p*fUwa#tOH=Wx$-!u!V%z0G8i0U-4X%*swIaa*Plpl3WYx)3- zWs?U%968&2Z>AL#laBWlHg8~kg~AbZn>HTv_g$NoYyM#Vp=S#zI)|sx?-PWrs6m;3 zT6riC5Coff_hw|z(Chb(#f&oO%N)*V{r%DMSNgy^9y)e0xeMfz5)gMf9zhkDJWt8N z$ma_wIdBGUfwN1aw_}#J1@7;fYi7_Bkf2~IE5Dzm6LULfd@qXSCdx$FRL-X4TVFRFW~R{Uo|aCj&fycEPql5i z{{7F_KAJnHzUwL87_V5UpZmOrIAgG7DM?qc;du|KV zFU`xx5z8`+hsn%z-QE@;3tF0ToyGBt|(y_gP)*m8#9>GtU=gX4W+ewybf!Nd8vV z#I|+Lr%=d$Fc?}*HXA0d2`)u(uU^_&j=qiBc1U+5t9k1sD1pH^(Rl}!Y>jD*dd_A$ z=dX@#k7^x=Zs$oZZ53>dx<4a-AeUc*k_B6vc<6Sw_+HD*JfE3`&JpaKyMz`lfUf|P>UgBn zf%yl3zc>DxIv(S?8DTg&K1^WnjQZDD2V^{bkb6`Q4CXDK&RYz41t9+c^KYzU3(<6} zBiSi%I(jh?l-|`H0?o8>%`NFm6moPZHGRo+(FYU{sK7nHJvJa60J5M$mNS-kl;05= z&B^2BS?W3^N4xrKndFw^sVDf-6%^IUeR~pOGGj9N#E>yzETBJCg!2Q zMyE7oe;EdWQ*KORD5ZX@rGg?98W>@Je{^2uO6$lemkRf~_ z4+70vK;7aI0k#D~GF-0|bZJuLDNZpjgVLZZtQ1|s#=PBEkz2si=ZtuA-OsE8FX&Rb zy$0R>yZy1Rf~u+9g^IWP;aJM4NV_bHj>xG@9U8}2o$l&)!0!O)@%;cBsgPwb`7-tD zPFBD9E*p3@NwPSbR}*d_PBf2BGT&w0uBjZp8sFrn`@#MeE-6>Tf6dTCn>P#3zoeD< z4JqnBr70SYX?-W8K5;KQKe;D7Y~A(AgeTJY*7;;bOTS`1aUjP_@y~aBYT}*~g;;T+ zxshTzf|P7$St;y(HLI`hcpK-= zUG!%iLfFAT0O$!gF(HOWq|{L|0cx;*R0s!n%;Im9X=d?;kR1ff1txNkN7zwer}c?{ zi!bKi0H`M@_QaSQP^)@HdY54hRp689+u-_XL8{QxVO5|H)gP!cH4erm`6+_d??c-= zN(kr&&uITW{RvOXe)rcKs5nG2#YerJ%Y-{zxsV0H#`48G+yNm9=g5EWWFTmhCO6>_ zbvdhpMixSTy0ebAJ7y`WRrR~1#q>4-QEI`VT$a)BN+gG{Kus1*#{eTiZpO^;jv)!R zk&5}m-%RkqCiBeXXkv92l_doMp?`)qAJl;CBIx}om1dt*Gh=7VyVD0KCYip1qMyPp<{)@&l679rWdQMMYJ&0@9fRs^H#_lDO-*5=G&IT@ zuNhWLnUMinQx4>ygJ9!Gu!H7TTwghp6{?e{BPd5>sl2Pz0ag-1xsu&>UiqoV$8j;Z z#SWbXx78n3M;>Prqqv)}14-c8YsY(B^yj{%t_WoZ2l+FHZ-{dC#Z>(CC(rddc{0F7+-2KHemH(_~VBFQ|gq!`jtK}F#CHM67 zpSX(WM8@s&Q4vFYB@~mXE^^YSF`+tU@RVnsvHpPAsl; zwGxZx#H9>OuqN=9;YYnk7Obh=hwl{n%ItZ@r6{07jIFS=D%D_MsC0c<6jk$-%tu8> zzej`)^@;%IEb{@T^Xcj)S`%=agEciFt+jTNOpfj73MR!$v&dw=5NOOi0>|4|vK$XI z=XC)VFmfz&L+iKGc>xF4@39#jO*$3J^!tAGoQ}Jj_?(V|+f@yNZ7U)F#m%ksT%{X8 zo@G81q>cGXBMI*OAIFDm( z*;EuQus>Qz+=@N9jI}?^?RIxpqDDnUQFLr1KZI!il^G@4EQ+f~!8$!#Auyj0#M*i^ z-|4<6^Z8UObh$KSA_1+9h#wPRGkkrmX~{m0KFOl|cJBNHog zhtkov4Le4bU3BmHMekb3@ANYZ_aFIMTU_(I({$L?YX6%Y^;gg$Ux2uIurZ_U3&nWNqtylLeQme$Wvo00V9U*D$xgspS~e?4;Nm5p#G(r8fR=)4K2gN9VTk3Yy$?a z-xWTeAn4j>$lbfZ-N!W)yvr2Mw8L0q)4S1Fs;fu;yLb2Q^?RaQF^meek0z9(EMjT& zgNNU?19xo|FrdN#vw>`?JWyp#E>SM4qzm@8$s~LH@tYw}55Iv9iFws|BuRcwoykWAWFe8N^D++o zx$=Q!sD&_Ovehhd$KS^v|FhkW?b5z2&N5*+^`CX`nqJMEs3Gs2vemaVNv^w4Z0Rpu zp15(=+7GY4j^ckl-{A3)6;pSP-W$f}OpEL6-13T|Gx6~@o|*-;jgfQNswGys_xd?0 zEWlpZhW``l?Q@|_l=Qapg7v(entSA}*D*EzJx!V)j=G`DwVf|GS&Sw5NUpQ4%C6jrs2Hg7zBS2N+ClP*4ilf6TC z89yJLjMiVdjqH!i}ALe(GRIgg^tUlc4<(vT0w8~f!kT$Sh zB$**J=a3!VOHg7*5na3poinX6U7$R8!CK~;_oAW6ChueD`j1=0`>fFyTEenI!MAIx zincsBRuq3}?b!+a%8|DBbL`J&wu97fE_7;*|Bi{8>*7lVt!r;Ro7<2qu>2cOUFCRL zv$~pR6m7e=?={(|^JrJnhK;`}G3ysDMX%jvrT$hB=TgCcs8O$z7NU3&xwrNw=qw|6U9o6{f~O* zCZ)Z!5n0^P7tJ)`yWh#6J6?|?u{oy}75 zLF)1hsh{jMZm#3)CeXw&=t_RVluC_CEsg0Uqyy8y(ijfDqzM-B4aXZ~0s_;k~8$sc^rEBW$2P|CqGB3(3N>X*W(stqa-@~(w%d@<`oDFaN_mXb-?`_sB_5)hZ z`uS<$MDI?u*;|&R$Zn4ppC_aiW!@6w7~8LpCF~&2%!1!q9-jJ@*gYM(wh#kj)shW; zgo5T2hNU&TDU!b~JX|EzVkKiG*2kKZMFV<8LJIkOgPyTHA*U33|kNu>#cY*c1O z_CB{MmV)w;+@iWQiB})-fLjTpjF7jn#fC>}20)(V=2l_;0SzqbT$I}i6nMzjU;7ZU z%!H2=U~^P#g@vLPP8y1~0-0S)A-yhbR!CbAeQ1OsQDc~SNZl1GNq+@hG#@j_o`zN$ zc8~$>w`#U1njm2-U|}n^n*nR&Fp_PdK^G-Tm}axplQ~C{6UOe7KIx3zgE5!`D{ici z92qxMklxcDk=o>Tz{w}Q)48G$eMpAR_PptILb?a6m-f}M4Nyi2__T~P2x}|g^Uvth z32E~>Kq#Eoc|Ps+VeGyl<7c_o^brN_gM1nNgRE+S{$|C9cZ>99%cvf9Z1-emZsdxi z3FQG_$3!Kr5^OCE;YQi$9MpTsd}&r}?b}@9X8OQ2jFmaGDa2QEIuls!yPd)Ul5nFTon_+W|265Ut-| zY*neiSBXS)f`(X{q@rO+)X40kSjZvWg(x96Zv}wzPRPX&t%tHhohNE>Q6~+#*Q>*z zrOr5w1vC|CNG{uYi7wfipm`d%KzG0>1NBP-id3A)gh;3Gkwv%en8}zwxY+zKs45k> z1O-RMqDeT=m4HSSs!tIeMwy`tFy6cprX+1}sv_`9(%P&@y(3}a zQ?K@#>aT;RcQyPm_35qW%tU?I$Y=dU3!&Rr&Nm)E^i~!AdYr zZ+Z_+u$7?Ivut`aWT50zdwU@hO?|9AHPPRHAXTEtm%iw&Tc#Q#_pTm~^ILB}J}vT# zz%f{%%+|hDc>C2t*7fpYiCxql6xN0ISvv0|=&yfN`5j`5HkuNvd&bebn}fqt1rVVW z8zoe});hz5P~+smXMmnV>-UgN9q}u&)8UMI(oH=0r+DvMKN>laP<9GplDn!#`7?L= zU*;QBLVERjH(s{qgeW+&PXpr0@JK;}ookyzb_HRz@^~l-?uD!~K@0eC&leI56Z8}I zPXJ^N6e_d%N@b_GyHI4FD~ha}NC13Zq~Zq*@7^rTc*gVS4-585?S$~Bjb^hOug)@y zl^M+xGsP8RtsJf~?+o`xTDL;_-yJGY!j9lvbq1G>9r39meWAaD_n3e2siq5bMY;zu zdBwQV*nMKE+HJWtH8gsGzEMS!d56l=W$Ci~ee&qu(SDI1Chznt^lHo~W}_B!74x^_ zKgTTNmbIO2`&RjwG-B;^X~ngv=e=38I*M<{$-LYdUn|enNu1B0#;kYFq0BETmR}+s z^m*!DvbBOlf_}r;p0<{gUvK6!MHPj`Jc(Vra)2E_9&R8}DN%*1$lqPOKY09wEwFdZ z01lKcSUWs9MPNYdIIlAk2>5x!bpC}tZ8q=n8%;PZqxOv2x*D1R9ZrxE2}x-{9CqIh z=03+H&07oO@&ivI{-uzHCWyXP3@S2-M2io&yWT$lFUpSoAEyg8o4}K_Y_CS^_l)f~ z;9u?3S@nPU8-u0{fQ+*D2(QL&--V{|;Q}(!Onnj6BG{ z172__B*uS7Q-BGzfCL8jBf_`s(3J+_-2Jl4qLmW3yW1{-ggr1pf>b@YU8A&4#kc@E{GLW-3Q_$8%*0hX-|zJj$mp{22*=Dg77jEIPVAP+^E zwaZD4!D(a!n=Il8Py6-WjdxdN;$5SseZ)Xj!86F`MXO?%8>!m3I#(W5kbEnq9GNQO z;TQ-JgH+%VK251CI~a?x0f!lRvs?nbO#49%3`}q&Wwv&O&%un0iolf_*5%k>vm5fD zUb&~-H&sFtV9bSL>BGRm<*7(;nl5`TuW-+I@%4V^RvJJ_G#KxCn(ZY=ldGJYQ;7Z=g zj34<|bI#>({idXxG3qyah3Kp(KzCB_J=1557f9ijeaCdX3E!{v!b zZocWu9|&iSSQfrx$)TG9dn`M6Jfp%J_O~4C1z)8Jb~y-RE`YsSCct=22+W05Fv_$d zokkC!`>BxRmS6o@9XNGjhUh#-6Q@q~wr6!rMgQWrre5D ztD-Eo8ta(mBVVaZeI_j@nUkL3li>@1G(@0ECnS5Tsud+>w&yBtK4gi>CTqNM)(>9& zD(<}V;;+tQ^mBeXc8|8|7o;Vmyl(Ds`|`eE*d&|zVcpDW`Q2Nb-TL|;mrGKvsLL0T zI4Spkak4W#dHZ%pz2bc+vb>|7R!G0UCR0A{@$hLR3&B8c^xzrVu1_!4JFQS9rrmr* zG6_?@kb^J@lYW@u&D$2Pb{!K8pImGH&`!WZ#t1^Sl0b1uL;e`q@aX;0$*;$a%{p;r z$6K>|P8~X*G%h_+Q>%*SBT6{v|B9q!sS+k!jKE;Aj#Af+ToJAF&Q-f=OJUKHilH{zBPEl1NHHB9>IpY{OjxjFB8~@-bh8 zhTwz*J_hmj#@O$2?Yv)FdNN4qVSub~ud_qy#-5>=#U#9Hx(gEgcr!;R9PB(tXA1q+ zN||^Tm8iYc{*RiNOlcPpC(H<7;(VkuSN_tF>PNP{=^6}bI{&^R>B7;AIX-Y4l#oQR zG2Yz~qp|RjjHML4qH0Fi3vm%ibtH)Kf=yv>u`404u^*u#TBhMiXIPlE&T%0lBa1Gi zeGVMay&)t7LhL}xlpFr`^hN76;r~Qjyb#|sqI+?D_=J#xT-$eLq4CK@F!5MFvgq0t z#pj@fkrVaTwn!&mr~f+SY(oJ$JxvDf`?pT7U}s^NZs65c6e zZEm*CrxlVj{QiKnPt(~w(_WvyJPq+Epig_=y;tNxJ|gXR`4AVEwrHmd|KJD5;zgf> zN7k>0`qSz2PE%$^@1usJwieZw zIj&cwjBt_#vTN}d8b^$I7DDpri`VuBH~xMBrXyO85$FN4-n)81pAwOT-k)Rab`VsN zgrC0X1#6ihb6JE5{xvWFJaKz{c&2daTBzj46X&=|7s@V>kR<4W=hcV5fWx#t{O!pR zI9Ll@QkI4{@p7Vg1dous;nY%upOpr%u(1e6YT=7S+uU_Jl@d4UeTVnUlKaJD8rv7! zAohfDWlWd0;i@?yiJ>vziMgNykS)U6YXSsYXM>%j)NM#M`Y!@352LSWbtWrI zLVP^(UQ}OtaXs&gOOST?xeVbV&6EEePclC*+4FbP9?6I0o8#6zJ1Rf8Q+1t-@6vOr zUYmA%y;eL`EX@79Ua|Ne-zn|?V%X{+<+TTSQdZSVc;+k5zr{+V~SxsMB8{a04x zPH+49zq|LXc3$Cb37gqB_$1VK*5^FQPUB*6bJrm5#MR`wLPFB*onfz)7c#7m7rkyO z4E|7=S}@bR(sSni;>s7ZQ_n-rCoXSuSzY$!$T!8Cgunl8AIv&?$<}iBjNYT;$B!4I z^}E_KOPMA0ik#NpW5vO80!|M*&aQMGFg)R;kt(Pg;0$A1XNFrb8&`w!{d@%(u@qKE zDOZy%*iqJ?F3o)5aiXD4 zIqH(AixEi+jZ4>E3xG@Rc96OgemCRSh^Je8-fHh}D|Dl1p2$A_1~1PpZ^BD+WY)pE zoz~McwdckDJ6k@u z$A~#1(*Af7Uk6}x)UbVvqaB79v`e4fIC%V_Y42X=@woc9RRPA>%3EUhZSTugJ4er7 zDO-OIOA`wuXoC2 zbk%@O&(Rr^*vk5|7Y7VfCQ7Db&t_De%_vkg%kMaP{ui~aU)@#GS&wX#A<;Kweznm7whnf6#hq$nKJ9Ge*-`>RT>X^YWzaud@i4dGH2!bj0Z%qI1bd2J zdNR!Z7sR(3sk?`DSX}eZVi`YH-sKl#;yY$G2JXDEce;WZ*sb2VnS|pbt-l7`8$D^0 zz)nw~Ya*5_eZLDMF??<)((KsLzxXi)+x=^d?NJTF++z8=#eE5jCCa7tO6tUN53-7n?E9xZc^Bz^*Y=+>%zpfy zflK{rP%k#TbEh_9CuN{4#FG1mWBty_vfz(1xBhwDx3f~Q_Vkq~!~W0CZs(tcz>RJG zv5U&}U!pE?ZB9HXYr73|TyA_;TZ3M|YpfP1ESgv3%>T8YT~tJG=efy_N(c5Y*!c7J zE2RAfFCxsona%Cq5?6LCX zJ<_enp%R&fNrl8)KQkbdtP(8b3pBAOV&7WCYLpc_eNgcjVLEjz1&>7J&N(tfupETp zSln4}w?8_AUK7BC1IFX zSeg1~jj)oN`J`%^uFzBAZH@UI1}51os=r@8iNzfB_Ztp0pt)rVzDy+8cpK>Eg0Eq*>X!zdT6zXDM)gIUm-AY2FHhmDDDCPU4t(TVfRb{6 zw{q8#>Pb;vohgwZbd^rOYf=ML{!6v=mlQEFhI{U^@7{x@r(16r(T;c^MS7qE@QxCH59hsx^xQNMCMi7t+S4?H^q` zOs>xC4(#=I`u67An=4l~jp?h!`Mj}mIaK(^I$fEbEAXI zah%@RpZ(IjAehepU0F~to>E!nu(gDzQw&69>_ok-ab{1vJw<1ZlkkG4Q#w({rh4a_ zRtBhWDjmObM)MuL0R3l5p;GOF*hYoj{mw@%yNvgcr!6;LMX%r2&YM+$kL1bIZ(Zqx zV2QR}90wgsK9RxP2%r-MTf6_x7EmNhHp4kG)1sI=p5G~+`;`KdNKin(kPV|m2`2b2 zF)9b&B=`W6;C%E0>D4Bf0RT(X!yG1QKTa@|+B`2&gHsTFUjdLyr3zOgc)0-DdDI)D zSKiZtrX)hJa?2+&YSa#$@nDE(L6j1#aiX^T@ZdnraQ-Kn1rbz`sE^)2++3%ngTm46W5-;e|dLKUGD6nv@6OKzfUVtW~Ux4TNLc^`8&07`poIW*@h-L z&k*0q$&lC9>vFby`nGT&Z(Mm)d0a`G^l5-H{;*7u6J^@5KSO_`>BFH-q>H^%!E$YV z%*hWYJNOM539pCli4^+t&*_8Ik=paSD1lLaqik#!(>ov@+ogt+Vg7Bwujg=-9lxK% z3F|7clSW7v4w15?5?p3SjD3lep2)$;h5|zFVXL91lqw{z;4a>v0EHOV&M_c34~3IRdu7DBULFL?k!HbXVkdnP3(X25$^; z9?7|Xq}1g|o|KH%?=gF^Ht@{~&(ISwISi8Djk1du){#!G-uH|1;EkS_AHvV@X*t5* z5@gDyLs>u3nDfx{s_;XiF_@E^%nLC>lh+m}M44S_Oo&Z#mRKO)DW+*v1;DGfNZt z*$jK3w22R%P2N`&ir4p!s-E&po_ja-vc71|nbY@PhZM9o6sVr#{9OQN6#UKO4rAJo zr^s`Z=?GSSXimv6yQ{rpLBf0 zF+8bL=-2V_fP75pI|N~|k#*iTn9-Mo((I7ry@IyO*%Gn{=D4~>z$Gmyu0^JH` zMK6Vc)^8!_iMAQC|I!(qZ9nb7N>(fdXdN4DtN1tAF&`cHH+rK(?O^ylP?KQP9~VIG zIW~$FcY-9ag6+WUZZ+_%JJBCm!KYN%IcSNyHMvZf$)K0A3m#H&IQ4WIhZn`SwBs@v z71UMCXo{A2s)az1%65r6da{rk%*8lDwIJhaX8r2);66un*-jgRv|VRwKZ8&Z8(i>e zUGS^m{=)?6**taA8!$gORL)q@2KSz}zM#Y04Rbr^BG&`WZnI6E{00U=2>4fA{40&KJPpyt zj<5DVxh3BaJ86Bpa;fyzjtf>lJ-B>uaRA|7oLT=*oH@?Wily9?0Nq7wM%TPyBvR2m zM8~;0rd;-my~C~C^T%?vv#ArbLgiC4ySwe5tbE$|CY99qPRH-%l}{Jv^1rWr`un`Y z?-A!e?T@&dNJ zHULGdjPL%*O=?@<;i=p03XBo25yyyGJQimyWPtm|!bu{tvW}T$&c8(fmkbdmhPq}p z8?E1M)`wLr;QH;>{`S`aEG=Mu>og}Sw)>bMhc6J?MeSr0`j00#Nu>~woOyV#Ai!cG zSceVhn7U77EHzl6<9u{pJ`UbU6cC>pW2#1{qLeH^J`KY34>iYrzdYjBkf}EG?&VNah`k7vF zx1rq8W=0-P(al$P@3@9-4?HXM{#Gb!EUF*=$is3Iw2G#{b7=U#`0bMfVipgvU@HY6 z!eteCzt~eFciiu1uae0nO2bq3K^UKk4n*Mme!W6A8J=V6y=b0!F7$x(4PT<0_Z-ZS zf-pnngj}9GS5;9av_KFGUYA9UWtKsDovP466|q2YSuhhpi27*#iaF|Z%d`8lyO}ka zeh8D8sCuH4&!>Ue_5>toBEg8h(@}oY`6@L@u*d^8GSfq}3s z5_3_fd_{o1Dg!N9OGtIa%(4v%um(1e2ZN- zEM3M0du^n>K4o9iY72cw$sP%;7KGpsAr-+mbmJukoH6*}!b%Z|K8J*FFKYKND0q+I zkoHI+fm5m9)txi-^bbNQ0--3Z1T_x$v2W&l^o~dgjn~U; zW8g6e3Q^+M)wG_&A!SR->C_k+UVP;pdKjiAzS zKHTHGu3At}q2PJ;D#Mhzn)vNWO=-udd-(EWob%-1ZHkdj-IwvN+)x^h~jid8P-~EbJscNc@OM(~Sm#%HdC@aCPXw~efyTHSG zDf)j7>0gW`OW}-$`c1N1rDllyd@c3=+Rw8jx#MWBiL^_=4B*nsi6&A%9%-O<>Yh2N zKblz(P0Evd*^uUY9dde2kI7jDO{Oza|B3YCUgLr;R!)-%1eqs<1-ulUs$1>Y`<(ABx4&p1iDCd~BOp_X5D(#Q9qto$z-qH306EeJi4_ka?rG!9x;N#(S#U`wS_Q3V-`E63h*nN5^v}u#Ql7HdqG1HMwBr{kPcBmK6nXZDV|z+sn<^EGAAFTN_U z*Wn4zF@H}U? z@YY-faumjQnpnR{aPefpx0pK~lRx=<h%DB;?S@3xo9YW>~k(%$OVd zeNAsLJ>;gpnV=Jst1$v)G|VSG_|FJfnIV;PEI1#NsmZ`DqS$CS^PEsz(zCwX04oGOC^<;v$ho_?Nh1KxQo z8;-SRF6}+O)wjLy`^JGMb{3z%*zalO;@kc^P%}7eYEWKhl2Mg@knp}J6oW+P?6Xmv zOU*M}y~u6weMW#HJm#$G!tOPS09Rub{`=(_^ya@dp2~}N^}*!a7a5HH_BF1cB_Lx3 z=J$Ws#I0)Vm1+>uagM~b51jd9$`{MA4`ivkwspl-@T2yCPi&nmH#+RJsg+2IDkMEP zZy*U$@w8!?ijE1pg5zBc^V&cdN#@R*jMe$a1tJI;e?F!wDt zauS}Rc(zM?AINrAZiU9bH*XPSAYu2X6zM+{gd(IYXEvKL2gbrN z;5}VlUMUdVdal0E1j7?3(6cuSvdoC3g%QT#AU)+Ej!+E{GQxRu+?O#RHiq(DlRSjO2_ECOO92TBoH8dDUo}a>7vpw#5c47xyMjN_V}kA zA8Ywh)+RTFV{Sqz$CAnkG;+RCXdkF&Lu~7Xz>2I!>cUllngyZYi)e|J2sy)3(aP#~ z(qj`90J3vX#t)M<(kxTE%`{lWD!ylJ(oP74u>?#*Akl}#&|I{crB$5P+il48nPt&} z@ER;y+I%-W6;s9DAnK)xeb|;i{H@Mg*HIw8ajeL&Nu?SYutd3SI(d~K&=-#F|i-OosvH@&SG>v^iGmWkb+i3{8vJQtfK zjt5FQe4<$|R_7DDcI@CSas1_Bu8V7Fn$42gB}-f@N?K#IG}+|2JTkMPksm6tnGmgQ zUaqM*b2aS;pP_ou6AE+f{yjT%|I`V^(*`D%IHv5D-Q+(W80dfe_{Hjir%wk4ertL@ zFWEVb3i7c(HR;XV>s(j9bg5h+ItH^9Ws$#Q0ZgJ3W(tiehr|%uSF>X z!PMFVehq%Fs3atZ#)om~G{u5%{z(WkWETIHk{b!t_}lTG-Qg+9TS{Klo16W*O8tTM zkvXz&amM1}b6*ukHLttZ(l(Z8#a;Ox<(a!$d!(#oeV;}D@ASyVmcv<%tGGiB$7{BQ zl#E<;-4Is|{#6mW4JVCAQq&Ic3o>R%3FKJ)(9*Isu-l6Mc)^by(MwF>$@|3rWE{w%eD z_Ep&aJt2A&Wiu>uMFY5`>vmRmf=4^1DN1Pw)^u#}-#OMP?3CmU=1D;_IEZI!^YoMu zt7HK@wi^I@vOSBbM1Zq{PA)1<> zcKej7AI+%X4)8`Yjy|9!iB>i!r;@{iVy4-8#4(#kT3wERg#EsG|`A_x$9y&(xggCf@ow5oC%4nP(Hq2``#cJ3k)i98Z-juDEld;&kHE=%xmdf8Fe4l86hCAbcsB8V zpoX*9Tl95Ox$K$d1o%n$0XZ%w01H#0~WA^Q0 zQ?Z@&E~+movL(G*alXVZa%OZVJMdRtxw+UnirwemGNgrWTcK!I1J9n_(|9}L#v8cB zM42p$C>wk1t{vI9USa9P!Uq~Hs41jp(%g+Vr9J8%{hs=M%~X>5aqp07s@U0MBxFzQ ztIxm8d0cz2zbf_h-)~mg&z_oNzP|T*UnJ>@X~=AK>niHD4c{zGQa(fWM!mHbeY81me0JUG)I*o<;Y5TQ!35O@B( z_#xZYhRt?Ad9m@<7UlsEk^r3>oSB-MymPKau&b zw+xUCn4&#YeWL4rsMX~68;$QS>U-|GZ;MCY#LU#o*~}M4u7@IR=dZC9Gxu$S4g&ls zuRCdMq^8OF0NSU=b1^~!??T?$Wru$$`B-yoS~XO%toM-7(%y3QiY`TLMrOTHnxm4s z+1%NSTdpkRtUX&+_OIL7&GA?Cd!{kIu0AsQ>yFzuX(o^SzW&|zAdnlG#r^%5EiLQk zZ&mvi=_N8h7pl&>wblg|`iNaS)p<+M;!<34T+^fBH~VR`dVJSdV)+~9Uo@(oES+zE zk1DaSU;O8uO$v?XyYku1e;5zU}wBx9|1Gb^Be{?RVX7*VkJ!W4sTO*Zc5#zn+iB{n2XAdn;aS zXl6*@+oyZsc51}J3-lA>Gqsp$FXk9>SFM?ff#(ddrB}qlsX{e~5C@~DqSaF|Q_}Y3 z#CwFzgzy>r8RM!s@-4DLI!CS|cKvytPjpH};e5`VWaB8=TP#x~O3iBGYzO){5#8I4 zk8E$BdBYkeeXs1#96pfiL9nHiqfm|BO1x0GCyfVreMUFxj|l#<&bKoxJQS&YxU!*- zD~7v-9(_nn7GXyHn&1HCa7g4(xS)>=)O4&g@$QhfIwnZDZNi3LX3!NI(1gU9WFi?R+r~w3s;x)P+MANshAQT5`R&jCZr?8J=OhLy zb<<0emzu8N4}GnkZhrBu5-hUQPmeh6N8PqavsEowgV3!`$~L7 z#K$+miiaGhA2vnazwLhIT*+?!3)8o^SK7UhraW)zikiKBbh@caG!A~Z{M#928l^?B z$?2xa&@DFO&21cOb?Nk-_#Gj=`Mob5H(cVBmC^c^90I4V3@OvFdkoSC^9=)>+5LXcaa*$;s^!KHhZKf zqv9up+ECT!KjFiR(n4Bc8KIA8ktqz$1doRAY9~5=dnV@%Z0NGo(%0fk}2jqmQ^|A`@Ya;q=_xR^kxQ8LUt< z6d-KCL!$xbOye}5)pSb>k%?A#vUrFOpQo^pPUwU1l%38_<54kDUe3<^g&er|gX21F zg?J5t*}nqSV%QF&(WWYXH*2Ufr!9O!&7!VfPJDihu@KMQg`zkMch>G;U9VIa96z8@ziN!gRxUd^ zM@D)qzByZxDh@X64Xsvn)u+)72PcDXv8`w&H|5T8%AvE=da1Ud55V1@m0>q+4|Lk= zjg0AQ^NI9LlE)xphuTls87e(R(8998(oIlZHOh!|#uMx^15wsgp&$8HIQIMlx58ZR zP3kf+Nwr|g;}&(9WaDG*Pe=|>*J$1$)(-5l6RNB!`}gVd`DNFaT~}jJ-<7%=d>L~^ z^2@@BS5c4{1i#l->>xO!U@3r^lW*?8CY?}zx?AkrCom7fCONy?lp7vakeBiLy56bV zmrUgyV(cKE7+dg>6%JdF#XO&7z^0HGBG~8?F=;gB&mNC90`L@(mBY-Ejj9EI5e%vq z%ramJgSM#~YJ|E%Fk%RRx02G~{#(`~1i~`oK^j$BdT7qFa8m+3q6jVI3G`+e_^d|= zprw+l%7!Gt4jo3iGdb%Kgf|RAXP$F^&+<@vUx&GS>VXi*7iyMPC} z34i^3{7&1`3x`cCjPssf-!>Rb=PWkg^KmBRc+-KCv}Je30&zwAY<(SGe%J0Dy}7i$ zvVY=Lo&A?`^KZu{;vOEbSzcC7WR#)SQuBwmtbi(JsDQg8lqaL-&(Sb|feo#x59&P@P=-RmFxQpf z24m=IpTCmv0gvPI*5dFS3@x(^t!-R>XQ%)-q2h+aDxpS=0M5fRZ$m%4i zzyZet2|NYakR!v&?zkRquE+gz}66ng4}K`pJ3S58RRq-pj=vRX~wq_ zfJZ%;7~MHR^ojmA!FT@GI5^T({7&;8c#A zu-1Bh(Rz2#)-Hek7T5Pj1Ii>O8=r{p_G=F(ByVlu=Ra0=m?o0CDqCx|Fs2 z+R#R0ufW4!tWH}!+5FvHV9S^D8`H)=`^*2h&M0YkR0G*rH|o0+VtrcIeM7A1)j8+A zGo*g!n#&Fu-}=pRcug+t=mJ?NyRjGXw=~{Um*ML~5&Ua?knDOI$#^}j^{9I<9d3ONoUK-! z$BQD6_ARsd=@Sn1UCA!S3pW?ww||Aifj0U9zCryq zGXpt+^wZ?Yy)G+8267%R4ENEPo?vHTyJM!K2&g&j7Q(xy0HR|=Jc(d*Mt9!DYNJm&lbx{3 zeaKH>kSnikssma#n{3I+LePHY`aStk4{JefY=V+ynRTaW9osdK>JV z+sB`*PddE(#KBOL*$1B<=TF}kWt)>VUf>7T7CyPWr~``^Md5Q^7&09!$iqIW6iZHo zuq;~HgLm-hHzeDUedwTd5a=7&aCz)Edjx(U#h@FbR*-^&KyUs962pHFJ-uS}+3+_h zln~#bF(9gi?tu_j!*u;N5yNoLZMG%d&se74)Wpe9qkBasn0)=F6C5xwAp~v7Jm34M zm5wGEwSUu*h3#V$493fb185GoN$V@al-b|U@2gi*S{E9=w2kSjSC!fB=@T(5l!m+e zdV)L0z3r!!<0O3lWs1ELrs?~oWr#Eek&Y0HrBq5Tadcrdg^C#@UQ2gA*?tp_fqJZXJqWW3tJ@KDsu-Cm#m+ix>mYBtW})fB#w}bayY%AIbjJ8G^YT@*R*?7fiQGAKWm?Ie(8|mb z8Q(sf-nrZOcFfJ*Oj)nUxHs$O&lEqU;H^qfs}wLgcRwCf87o#Xn8b1>NOSZWR4{&r z1KyFTfVj_`Zm$1i8&~Roc-YW4b8MLAJM#JTCn1QSf^iCse$)cRq6xq1QmgUIy38}d ze!V3wUF7OOY0!&5UB-3n0xg4@j9<_=`u25Ny>zWIEm{fNGB`*9^L?-k>P3#<8a4)x zL+uYdLW^F)AQRUerg30?wsmk&FO6+A9^)#n7F3rY3kAKP*&pk#-t5%w!#iq+*@)5V zq~l6+*$aY~_F+gESNUeO#Ghb6UGK@wN9Tdj$cH7&kI)x%N&*Oyj$XQlY1{#9uc=@< zr`%QaDyPy_@c}0wAI&K<&B!^Gb82iH{zdOFT_1uvuy(-IubB)pbi0y z@MA}h>E^8Bu~)hrSJikzXrE0hc4SJX>P@hPwK)E(Yi*gX$7Zy zRu~y_WCiO@Mt=(0o2U+)_AGzwLK#JowW63Dc}dwu3>WQ<7ci#KC}Y&p;Y%pW&C{8v zB_3Z;dY?_|rj;;mA!vtVZGotGNExE6PGR-m>vkVL^(p$$R8y8TubE&c8fEhzZl`6| z8G5EmFiMQR(T`gw7-K`eHs1xKvAS=I*ZA0_lf+&b4A?l07 z-Uf%I`~CVmQ46ljJ_YzHtug%R9e_gD>}AwYMwpXZG-O z%g;2#HQD9mPT(+QD484FclriH`UKAf^djJenn0s+h+0bY*8b*9gTSMn02KZC@)oWe z*DWY3GtZilnOuaMX+{jeYdY`tcJwdDw2 zl1=H-o9zTzpxsiARwOYJ47tFpbibb8_4kjF0IEdtkmgyQKB)y0%v+IKk{PQGVEN{y9z7C5KKV06S@r2m`ss6&@@Ni2A4dzF!L1H#m| zAL5SXkX^aR3>`WK+uDE2Hf3c(_?WGME;YR%_Y!q|e(t&V`h;YOH<`0g1OE`ag?N$t zJDKm$Y8OG1d^bmaNBoPZZeKQ4q$sL3QBUn`BfcR&c1U-~m^HsNW-?WDi~3AGYu@T0 zxSP?wvVGN<$sxl_Vw0%?+3g5}jH#&6+gO-Mt)41sUqyf(+TNLU8N^A$n}&6~8l^3t ztko&whJjBfVumL`xkskliYezr@*@ePjVM0P8sgA#_+f$EfP&Hva=|2I)pIc8br?)n z?FK!xa$g!iVp+%TomzS5zGvm-lE-XYJO?>00q6GLGuf?c>tyJtORChk8POx&3G`|gV;C=^os<3sGk|It4_ex zm2G=!N#M9-u*WV)JzB`~&>T|s< z3kTgA$rpxM;BE|5YV@kFm|OT@LC1PAHEpUBVo=< z80DLDg!~sjw!QPeWp#DkyUb;YVFPjN%?eWI$_D5^zETgBs(*jEM{=&>;K8}H6&E)I z^;zuLaNm8W)i<*0Z3nBgjB)YDp6PV2`{h(XL5t7QLH{icJdZu^etF=*xLY&udcVGI z<)bB*{KEZtnmSqSdQ2XFVJ zM6uF=>C&lmha*D&_S(xDV(C5|ZiL;&~`h%!?n zS$Hyt3WSx|yXY&Ha+wsRFc71O5JNw>BGYpf5>Uo54c;Af+2#4=`~i+}EST@B9+jR! zy!9d72gb#12q}~eKSkB3o=xlqV|>W3vBtp%Dt7}K6qeJ^+`_%HKgEqy}9_&0f?0q4(bO<^B)( z10S^jn9zJ5pbBMGfJ4w!KzFX}6r5@{G>eX$S_8;~MUp_Zs7BB^mG71h=6Nxl%3s$u zwn*$^s4|^#XmwD;s!jv;V#X7l4;N}?=Pcr^w{x*eiM(oNZ)PgVK$sW~vVk#E@hVBW z`BQUXU>rT7@zdyCx09mZcz0E&Bzwn9j*tf(GT>+>s{wT}YyNENsJkohFpiMlslmQq zy$G%gQ?YJUlJC@neblL==?-eb5kME~ewhW2|Kz&Sdvjy}L8}S-r&f=DK%?y*MSZ`D ztymt>enpHzu~Gi!R4Rd3&RroE)A6EC3m>1z#oaC|1=uX%0wv0kr(gv$&*CGs76ZjB zKM(p#JnlDk`fs~bBX5CrLqGsGxkN^8CK>|l>wQ^#!~;(4SQTP7h*6Hoi^@Q?h*Pd3 zr=%z~$Z6Pq9Bx49s?^wsBA`$LriBagfWZjNvt2N;0*jUV_$#^S`^wh>+-(gq4mD6z zfU;N#nni*ODi{?dvv6?P?*@*n5a4dUZ_XhEaRlL>&@$P{cm`E_Au-tMzh$0H(5KGhm$Q!b|r6;kHC8p$3wN7gU z7*JQIOgj~vT>5ei*>Yq}xZcRk`z7JI167yYv_8e0m}=P!?Q0Qp^NNKS-9SZ!oORoE z)rC`&^z1z28NVQIz%l324~f4~lLIadZvT19|4GKEKU0u*Ty?UV%jNw%;lru)(hrYv zrP@cJ;_djbGh`F6ICQ5sSD@??yxBW07|RlgdJk-_UsG>=Itv<~vofmQN+9Uih_$ds zqX)4R4KKbM;>~hH2`7uvHsABp5=LV!%U4nk)JYC8^7Bqw(qzKD3)s91sK_M}x53gH z-EUCBk)RD8clFUj^rimHG<=?@fk}aREl!F4_c=4EMBQal(Yk{u8bvhBGP95tLJJ##DI!0VHTI-jfZFbS9 zy4Fd=)pl#p%+=#QG!pyiF%m`}YBTSnTD4J+P+vFxF2sTGnpsd$nt z_A;_!yxk`l-CcBhr;Y~0#WbpEsOWZ#=!sY~4#O3{^*aV|A18Hq+QZBpF%dOT3TS~? z(!CrL?L*La98}oYMb0YH>>>uCWDA1^=Lq5RULM22H<7Ajb-Cj8L>ds2LWB20o~fb@ z3cO$fT3LFU%!gHw`5fep$;hE($RllvmxmsvS8ch4R05{2)MaHQx*{b3-SZu9=KV*% zI<3^2LZt>h{7-%CLmQ_bTp!Av3bd2g@>?z-DB;lHYrPL?7Z8N;rsQy>_+Zop1%~j= z9FCgJ2Y7MU89qPw+TF4S3;Vl+GHxU;ZKo3333 zQ|T$hU&eMC-#PoxW{$i~Oi%YrmINbHJ39qo4@dSx)$y8`E(vxkKs~e((?#-a1e&OqKhq@C~>(IbGE0W=A%cCdW9QI`U+#e?}zSy%p-?fYS2pi7te*8rFPz<{qD zZR3QB=h~&ujf#f~3q|GL{bW9Wph)U63AjE8IUbO@xONmt-@3@x^l8LIbeoz2a_{!e zv_<0)FgnM^ar78wD>C&MOfx}}9J0%$ad9IMSMF6?n}CeBJ32;1d7WbH;<~f%u1~z{153WpFI*ot^1^!raqi73oD+7}WtkgE!3o%UUpZ9R zSQ+=Eu+ixIV}iKu?!pc0yR!$De|)bm-R1Z`%IDklFZ88xN!=rTT3)q|!OjWZo}y;t z_?>XJ#e761r~cy~yy5^H>*^Zi69L!)V?@o$M`ikb*qyb`QER;%;lWc>(}esMHX==m zlPAgJx<~pGxUl+opZB|w4cX~%<3>jAQP;zNcBDTk8N%72z96bm4uu{ogt|j%A=nc? z6+%%&jg%qa2^55hGY;W|q@nts&y z3G1lwlh{e@OnvN-QY5>JT`ozZK7~HBn+bNg*l7l4P1&hBIScDZ_@{kAuk*k0^}*PL z!!a%&JFUaO+&L|zYmi8&Aw7yKo65n`@$Jgd_EaiGgDNxV_uw&D3?l;#Zc5B5vhMcr zV-OM8_h+!dxDYSS5-t(f13p-{Oou@+4Mr)2T-$tQ8H%n$JI!6HHL&nVDeS$W16A^{xP|}S68v~Wda3c_?WClv$&QysA8l%cFO1l|ROw6Y05Kt=7 zlz^#hXA*OWQ)CnYM@ADcRQT8eBA1)X1d0lZ;Z8<*peYhJG4w=KLJ5yb^lLY}nY z=t%dia9n|XvWn~ndY#|ioFv3AUlKv7zZDD^qa4(0=0pS&# z?N)vYNj|Nm87aJgy^F`*MdJo7NsviI5J6ZF$8UZ;Wt2^5lp-_DaY;Uw=f-whsjvH? z8VY#}+I7WDoIs=$NdzKQCRr>Ipp`-m3=aMh()s$)yXBF%-yy$y*MFX$y@>WGyEp{4 zZuCHSn4S{Zubt3i^gxyvtO9O(x*fb-ZNy8vE4uGUNP0?sqHypMzP?X9iFHmOJ0iia zPg%*5O`{!xjtu0@M9C(%Y(<_7f?s70UaFG$OjyY#$)+*A2n5wG)*kKPd+s7!*-2;u zygO*f<`rZ(I7^Ezg#OKXkMjg@{9_P|2|zfO1Vnhp|Ck^#mUv2Z(viBHU?kau)mCN0 zR`QTfbQh9mi6=SuTT!+Am`{hIYHEV%hDVTZBl5~}zPOxEFU_hG?4DO1gKm{W&DPG} z2_|O$`r5h)A8D`_-@;xH&e1Oa`B-CN8FJ?A2FG&&oQK*t+HfN0KLMdJpAW;iv>Tw5 z(aGkA#$3#P6B<*L{e~Y?m;Gik`1Qn_P(Fy~p8Y)!x#jZR2ZK$Af+u&?)x(S@y5E3y zIp$*BE@X+K`cOU}+PWgKR`>ki>eFuU$40J7a^fG@$rc#CkM&#q1Mi_68+w&+Sq9gn z0Z2|4)3J2;D}ltWNictrN`xOnMDkGTYe_KfAaeYwohs&jN9|ABVfLK@_(+>xiar92 zA0g<+*@3ps6IoH0EDx4~liC|q9^`Hx{on^*evcqS9&vy3Log(!wwB~>Fb_tkzP>%)5L z4*kWvl5wm~o>~7kYt{m$n~uh#XdOWcn(A3?@e0I&L%5|>KN;`&^NPFEO+3u$!8~6{ zPyhbZo3E-D$BEXTdnrZTzV z68HJY7(xnaW6^zgX(j=+Xbe*y%Sk$JF&mjA%~_bc`>dB^{mz{cZZCth8Xlev&CJ(o zc=U41$1R#~tnoqAl$w&!mb_+QiqWh3d;zc*`Qz!|fzWs`35G-9_LF|h*PW9vwI#us z=KnQ9-*fK@&*&}0?OsD+*8T$7*$MxM0O_@(OEZtMnnKoJJnNmJZ$r~hvi2|G`@gW1 zRqvpjw`R4vBkg;_wuY<*zS(N(+AG|L6cTg7f4%&iwIkn(rXTk#U9M={L8&P$Ey+Lc zNI75G@FWE6>G{B^m7m>l4(;Ba1=T86Gn>_1SaYo+MqOG)J4`G2)=OGb67SX9r&V&+ z+c7xQ`k?hecFm2#gVuO0&%Fyhw?IV79}7J*0<%{5Us>Vb7FeZL`?smp5S(%p-Uuv( zTsN1%sy~eUYqx@v`4STF29a;X?X+V^c;?8M7d(6S21>U2UorCZ2>Js|V}V%<{p(<% zKuK_5Nf5jbFaB5vYlV$?Cc~o)hls_38(g=GrI;8m^mun{_+5 zeCE;XxqWlX!oRldIH}Gt$(ZTxdXn+w^o{4(w%ScEdS}F2Wy)&}CP#jV4_aMe3W)8^ zp>aLqk|-}PD4!NwuuqE|K4sZzn9H29UL7~K(=-k4TcA9Qrte>0M~ zqtB7lW+>}wBFW4&NOZ*+hrv&{Y)C0aSD15(!LFB3}H@{tbN8< zuPF_6;fEf51PLe6!aQ!;^J7`b#qrurC0c5p!UocQmei`jd3k3$41ifR1JjXy9O!mg z^)^g8B!Ajin%rp)-P3gwBu2PqBoH)LKQ!tmfXL~=XZHyWdJHPy;S5_!fFm<27uedt~I>%T(vmxV7Y@1sxIPY;V2v@(tXJ2Du_5x6P8UM5H)0knV5 zXp&c`e-q~vDh9?~0!9PokoG-q8;SI5K6YPG91Y4Z86Th6tbqj&#KUOrVOS=rZ}WKVM+C3P z9A3LHE3s1EIN1BEjsNx~AJf7o>xah7{=!e?zWY@8psl;;{PIuXGuMDKW4Q5;x1#6$ zg<)5?>%U{tp8WPIY#k@y>pJ8^`T# z27#?@WM>rP7;$>*Z30pNzeT6()N|cHJFrZt8w}fIqE# zv2XHr5qlqh?|4)nw*R}!rY?%N@&ntNyI88`8=hj%oxOKUPa>?4v$aWgPdV(o`TC^8 z&iuP3<~tA-T>X{!(0P$u)fTu<4ZVLQt0*hc`{AdEWDv^@Aqu(rCJ5O1yM1pFrs4i~ zm%(qck7S0&PZuB!ClOM4R{umIX-7gzjzaNyuT#MxxF!BUJK-7B>RAfbRk&F4$#1B9 zc-%(3FqMz=zy^xps(KI0Z(&>}aEKS+S_+AoaIq(Ys$O)|m}*KjhA=b6*p%H1kyr)O zsOl+el}e06pedtnOJKmzNR>SWJ*10NC+HrXA}mzig^EAY8-re2LAqz0T`~ytZB)?j z;HZfgW?7e^8?mX~qFOJE0Z}_tXR0H4xnxsHge*eNN+3JaPHj&xCYfFc-PXBmY&#zB zYj(i(+0+oX+9hjj<&l7&5#@mXPz&u5hhkuh1*d zva;Jjy*X6NVJ>Rto=MysBcS#CvO6zAwGLexMd-gxbqHCsZjGU^m1fx`-DIiM{;Xl| zSe?PGezC}utB^JLXC0-VhERK@Tl7{>fov+s#&@lli>71>P=MRpxE`3BaAKo97kOqK zkA{<w&Sn=f3C?hZEdNOAQe?oe}Xo3rHvXfLTEDMymXw_>(gtIaKhVB4{O7 z7(Ks_Ho%4ftN9GOzQ=%KJsS26(!NzT58?Q|@DhdepV2%N3NuSOTHj;TJ`CB}Q1U>a zEfWq`>uAy)m_r23JotG0>Qp)XGq@8 zEf-@6)cQLWgfSbhstcYHpZ39)cCKgQjOG(NsSR3t*M`=$Ryk2tQrT{S&XIB#;v!DP z!I4&XG-D;M+=9xW*f5w_e5Sc>b_ZeY2b`Ojz)MlC*5xuN5O42h2>W#wdQgIKa9m@j zjIf&|va!;Yi+ygcBu9JPO;}07TIIP3862np(T_Q!WY;rQc?iowfW-?13YCE0?O&px zf6A(k&k`kefJaYlzfy(IV!2{rAgC9RoUQAPvPdcKNtm zSd6kh6j#Aa08myr{Srg9SuQUz~FT=DqjokKA+~z6ZKdhi%*Ktc@K3F|| zAtQ9^yAy<5ps@;3W4q7ipuY9v5gG99PCOq4`wfn#hwZqG6*o6e$#vM+S%p1gpGmo~ z$pOJi4tl7yqj=We-^(&CD#*X>tTbJyv+nVr1S?3w=oEHaPFkMzKfj2JGt)sCU6-Kp z>nWM5AB0|mOXZhRjqAP;lBu4BM63V#Ybsf#NLo0RdYo`w$HQbQ_PT%~A`-9|FzmgX zZQf?U>|#}^JJ_fe^(+rkRf)=c{f;d?r|0 zz}xD+-5s{t?+4w8BIm8kxsLj+wQJyASP4pB!BmyOd zS*ZJalpZTfWq%=7Z5ophMgS>y6Dc^e8$y z7KVPzs7{px*B#`>_`{xWZ5Z@0s>9E*A%>SHamPmr;sB5)%+LFr2Z2dCOB;G1kyTHa zH~#&yJ4gDaLhO~%IEQ6Vvez=1=ien=sMwz?@|@W&Gpjg4VLeeYFKcG^FV@20`cPJu zOp9!uY{i}j=IX^1R4J=AI;rC8_w|EQwBj!oe@$2XX#eFz^5JE_wCzV)@4znsfxJealh<=nF`85!zp z$yXM|+$jFDa#HF|J)r+uXOEMi=9i{{wBk;4-Y6C3p7@C4R$m{ z_$ZYrt#1_sJ08DXm8$K<5@HD^9q4)j3EmE%JESx2P7U*JVDa6S7Uf~Kk;3cw6pNKfC#5=! zw7@H_3>^}Fn=Hik1DUy6Tek4od!BeXm{j5OqsJw+1 zdM_hS|2jnY_-n^&?6Ll-qMq4bI-t|a<$gWZ zDnH9X%zu^|7E~;pYzTQ5wMiSU>($8_0b|w6EadW___EEP_wePPeStyM^&x%0 zMVo4mA5uSCw!Q<4tLS(+5WWU(%u#yteBdosztcGF;9oH8w~59O*O}n9m~TxuUva(+ zKOVWDi4+u6L&@Vylql@dm7AsxzbCo{5P24;rAGe_ey~x1e&HARQab{G@o*sVd&G3cI6>8R0!V}VYAC{&NFBaWOk-E8{6 zNG8-2p_r^V;+63Cnv~Ih(i)cXeTD>uA@=zc*(ICp0V;%5~0twSyy8gL}E* z%i|RPDXxacxsfS>>*J4@rZ>1_{I(5yxY`a4ztBovJJY&kMq^3#&&jIU{F z!9OgyVR$QQ;m739MI@61&JS6M`j$2I1r3pzt&YK;X|w6htn9n*!)U`^pc?&`C#`MP z(_P)IH@EDWT5T3S^SnLK8Y#BPUM%>4@eK4;9{q@sQIy4t+h6(p zd+KHf*K9hiB({I4h0nBRe;P{mhsz!>kyxkKfs!d3-Tw9a4aP#5!Y#4vrK-p~4~rvV zEb;zF)c|UDJ8%LzrwAt=2c-9q-TZZ*P8iTmAk_C0^L(?t%XVrk7VJ;^r@lbC*_Wvu zkUMR8&+KhqA3~Mi;(PSwEDwIN{k8*m=B8uI}-A<7lcKi z3FpHimUQn}5WIl@o$L26&IUmYa*!tX`{&Is00p1ZtXww_cxSlnK7lKhRHSSWEJld( z7CRQevO6dUSe_;fiZ zv39!9j)gABWT85V#9S<8F@wok%wodCLi87+Tq<_KVkDZLVZl_~U}R24LVLs4%yr^s zH6`32@^?EAy1`n?GP5)TA%B!VlX+I(Z8zS7M&U=E|EDzZzfpJkFSC4ky=IPyBIBt2 z7c`}Kbbu=b`$?o4%GrACLabXRQ_5(WB`z_H_9+*v5#~PIQ+DHY#nh1q)KT>n9?!7y zR`T&z?=m4vr0m3O%V7d+ePi>irh(VMi*xJ*^K$dQg>}t&z#o|wQl_(`=a&^BgK|3 zKk%5!lo!xN4lbKHv82+_=tz0JHXYrpNK$B*J6aj$krsJq8_rmj zTM~qrB`iLnKX4Pd%bYAF3~l$yl!b_&Kgk`PVn!W`h=1mI6enf#ExY%>eOIM)SZa$HbN$e30hPl}NO5Q(akjDC5vVB(j={v%zYJ(IbZGwim8$fV) zagG!tZM;ZUEQni7v4e$ieuuOPVggeLB%CpQiXD#ZCZ_OU#qeT&Hb;ghj_M}y^UK$w zy777zQ|vWSa6?s$>RF76Kmj9!K;iY^7GasiXbCC;tIq1ccM#j=8*~nI&SMlyE=j2r z@^bYEXr5BYXHc{V0P8~u1^!$Q5*CAI3j8<>{67cb|GifG|Gg&J;o%WEAUMcqy>?Xe zUza3mXJTgwF~ze21?K@7nnH+$J4!>;2OaTQtZ-Vfh(iRpJt}44RyI0eJQ9@E z=s+j03lL?O0o4c~tj;Tzb7|`FIvA7SPCW;TFi7yAeG@GnYMMg*=%4!Fs(#m1vjZJc zSJ#dX_>0aeZYKWLl{m-W;q=Elb~q>LK*XU7>F{RXcw$%Lc%ph79=ZyJu{@`rc%T0u z*ucQm{3HpUa)KI{0IEJpkYe=A6CrkK3`H|qSPICAKFZ5~48fp(6`@h)2Me>%F`|gm_#6%K zqN9$9FW@9tzs|g(mKYhi0wh8@CZ%E!c!Jf8gDGcP2`CDEeG-vIq(OEcWu!upW6G#%n2Woa9Z{q41BoaI{sRPo!+0Y(XEO0W>`;RS*Xc2Z$J2u0)hVr>fx z8LX5;m6Ff|Rb{9vga02I7$h`MCUh!*;vbAL2G0*;V)@rZaz3&POtM_bDKL&Xs#3w= zB{F=OGsAd4Mpde2JW?PJdVis+ZDG}XGXr~;mZ2tPlO$Arfk)8aa%@p1XtE^%{|(#s zf8M@H5UD*F-uS3nuKyfPgydn+k`O0=x3b&<23FO$& zbHxedz`21%i@%RuJ<}b2?P1}aJ;r(Vzh^AEm+>#Q`b|YZ4kqZX|M?8{7v%WevBVm0 zWR<$I`l~X=89@YO{ zjZ$s*BL6n6$nN*TVTD6&BHwjO{C&5#aN72Vuv2ylm*T5u5xd{rxHS$@9g6*TMruI>AWuYifK!*a`8 zuT=~lIBin4?CzlA-7h+yM*2*AnnvVGEd+vwwSziTO_xCH#LtQpDE9Oo(G|;{ot66g z5BXhBi7Fm{Q^282q7uLW5hH1|Q0yY4g1Yra`0ti@*Uj zt^5GB5EsR&S%u5(qs!Mq^Mb42^|mfY*RG>K<+<{?a=HmD{m{%RQ{tS)v#HfHi_#Yz z()~;ZdV%N01y3(?$~GPlC_-7aVOKqgjd54`Hf5wGjY(NYXC*IY)Mgr=NJpQ)GFW4_ zXMt@1MRI%c>!PflHI%b6U(N8nwmWP?NC4!yo^{u3z4w2+_JumG$h{kfsW84D9Ooc| zE@&{vK?V-Y!0=`Hk0#Q@Wd;*c?J}&_LMw(oF?cBlO~Ryz-V{Rdt>%0k8#0S7J~(;l zJjBOtaoC!W?|9BSC4Y<;PFeS-ijN5#^W$T;q!`Z$Om@=43C2TD|G1y>MZp9B!@}xjFvGj7crz#4S zZ`bTgRMY~2;B9cpyL=yLvz zX{EG3(xExTr~9k}BUdr*M3r{#3iRKw$)m=>xVSUH{QK+LUXdJ{55?9l<)(ZG-+)%o zPqDQhg?u<}@o^-(4er(@Cum60oZx0V#wRGK3$N8h_6t(5CyK23ksnwr4z)thFob&S zn4zIzh~Y768RKrQRN)mJ?d1gzNBA(qCqb^lD;Op|%ii80(wt*QCrWb*dLQ>bmP&Kw zxpHYwZhtPkP2HTy> z$~BZ47EwLOUStnK9>vI!7k3OxMb~(W&Uz6F8Fonfs_mNzACdl*$#)|k^$iO;jTur_ zuJ!`1BVRaVPGC(%T6)5gZu)|_A&I}lUzLS=K@*i)68z+h0HD~xL}sh9+?PZws3MjU z^ob>6Ehv9wnJo}4xXB1&1+n}XevDwU6+vI(J-XK7tIg;==I*PGktMGkdvV0?hAFK*ci-ofyWM%>*zZqNt8g3s}&ChemlHoh0@IplUH8#>22ywpY+{6t}fVN%E695aLf<7 ztGKWAekxu;U#ok}Iq)fsxXa>mfeg?O z{U7=?Dqhibz|iC>pS~=8Q2@>)j`4gXsG2=_&qe;iRSkwGSl~d^W!HCXmx1{S<^_9{ zi^56}s2|V?*`sb+U2M1)2S;Q;(?$#(#|H1?w6bb0c-bd`dlR}q3`w6&`97g zMEXQIOQb)QuN*=>K%XIx#!3!CvTh}La?u?1cQP1@)+ts`t$s)R2sR}(k{XItoAh!9 zWUEd$)t^~6@_RLEB)sRv>MH*7ux(}w6}ReV@6J+wX$x8%)@~g9fpTrf)0DVxPdu6% z`9U4N=-OwJ{*B7Uky|d$wsvO!6&~i-z7ne}PQS9_>%yu7aM|QnMBAPJJ&UNIvCRz` zMf;|zBaLErJ~_XBqQTtoX;CoEeueSBV)rPjPY-mjGcPTY?lb11QWQkH^qVTy2V5oz zb=&|$UJzuL&Qt-;JBI`%0FV(Ib0<+svkuB)MQbOK9TvHjku{#!Uz=*_*Gsd{((0}7 zo$xycXnB5DW_G~x3ioqHD?-*p=4IfAnqiekKJ-LTv$TDN_c z_m|RAmvM6STDEa-$+XHHnVPdQvx6F8IdFpa>Z>{WYQ!S`0zLz~h;JFA1)l;e=EjkA zF+PoJt^MTHCDt)o6#;dbS(&$Dyer(ZV#w8eFIg|vnk+Zx#bBI>Bk;ugjaTUINE|fx zi*a`pTa#t>RfMgol3DXGohy<=LIAbaoNq7e#U&E_%>7mdlj}9Sr(#^qcTnXzr361i zzWq(Z^(OJ6`@-II6DwHFdizSlFhjqUBZj;fbP2W&T>_g4GY9I6WWT%3O~V^-I=MGS zpNdNNRM;OfTr=afU%WI$`d0hL4DrGl`~72v;7J7!o3A8Rb$tGp>M7~y-+>(f9v!_i z7(HvaX7tXCR|Y~Qo$*RDUrjzkuB280opjZ#`A-_NL`W$>?F%%p!!G04*>@?klv3X? z+0+0-OG#l0!i*_?wLx0+{7h8nbpe`45V9yZh<R4}d;`cnw0C(HG{&uXfoyB+vRKhb%C3_=SveDWEi8Q9eXp za#Gls#eW5F`0?lqk;5W7ggd9W#%6ZaacmPggqxReTAPH{6IV4Mpf#&teWC!l{`0Bu z5Sk>~3i+?Rn)V)je)u2VhW}uO>az+(EJacan`XXrfcx~+MAo~N|-lh`d zgBP>G%w6COa?y-=fP@x^L+Djt5pNq@HfZIX@zt4vM7~BLnmrJML3|GqnDhO85N&0E zEDKVj*8{&3agM_WfCO{cLM?nF_VG*ezVS71iv?0Lh`EKJ=Yoyn^$;ctakWVNH3Zm# z1Pne~S6=hZ&34-(voNF`MXt5fX5+F$4a_prpk-mjC)(UN9jRM-zftWY5IZ0G2QT-X%y z!xeub8I_QO%5^uU`3f+VESYB?DNuNRXuC7-Er0&E5go5d?a} z=$m@J=}6G7YFW4%)^IQhZuj5?9@IEVi1H5{%=Zu-F(iE-M37Pz@E#MkguDrR{5Q^r zF~_4|hXH-!>!$H@uooRn0xc-A-vC%3`UxOJ049L!5wQD|khB6@MjZ7f4hK(6U)c60 zSlu7fSkkcTO1yNRd(+O(CGv#1D_T)(*Lw#-FU;2QST)Thb!_-R)%6ai!ylj;O3@H1 z(4a}Myc<-VjoF!S{@g3~<91$T>+`ca*L*Gi{TbHv;=0Z)JQ?$PWY7KYg-@1b2V4#G zj<(zX;qYkI-Dzck=mI;;#bE|D8uYC%m2VBlW-#m@yelqj%RWo9n1z2Bv2DCczV!`C zGE25$>S2aUFXF?ruE!(i=Rgii3-psNw*Ps5B)n>4w;lZBDXWxMq2FHm^rCXV=qbM$ zpH6>`#3^I8V}_$O{|;AInv8=Vz3X^1=vRB+LJCIKeh%;=_R-6m_H+sV%?U&1gZ5kz z&$|k(VzUyT0ZjmEDkRBy-}Eq)NiN=L);O?TJ;-T_x>8`EhT#n*!0B6Z=^7cIK}Q?E z29C618_0X_(bq_ZJ?|ETupc=^1n%hT%P


hV~u=CwSA|L>-w)5AcB0G03@gt9|O z79v!$)Q7VeG(89~;ZHss+vg+Namw`68ugqgUV+%#A(hIOh88pStq2t-g&8DU6rvopy4?`0rv1K zOQq~XiVE}ddmd~&<1UC&LLXzvIJ4C_tsK;NzR=K5i(EqdtjTnv3Mk%_GFWg0<8tgc zW|tmpOch4PUPSNT1oHnu1sp>_W$dcKOL^)GSzpCW#N>m~$ zQN3 z+*yZO!2!gnuEwtvt+g*1j&uw=p;3o^5GmhZnO<@WO7V>UtY7Ve6LWa%*hc03j}M3i z^1X5*25LsI8C8N}ING3XSa4D_XW8}5sA3*lN-)k93k|2%`pVuw^FH1}b1*_2$e`ti z;eNq5l!B()v!#Iky-W!I1k@%t6%AejC>*`BmQ#X8&MPTFOK2#>^kkVJMU;;#*LTdN zO1U}X1%bpwXZ#H&pUTAPcHwlRuy{xwVx)U8ae5RIBM>LxkkF}+Js>2;TJb?$%;Mls zLVZy>l_S(oXB`)>qme;71vnz1I4>VVjPN#(L=Xe%pRCi~N&?h1q+i)ffANhKhYfm03;{>vY_JHZ z<$)4p6{NAka`YGoNd2BnH3u7NZH-t)YA~x>g`@0;4GrYH(qYiaQgFH9QG?;p0-K3{(2_2oC54aw zQMQ71G+Lm^9q-Z9_C3fIO=<#mm@Y%Xb~LUVbokr_+PifK(ik!Om`GhR#e60@1M1N4 zh%(gi>~ttlQZ~c_MuoQ;`+Dp?-%6%w?3ZEqOzI$M7^I-0GwBI3ze*hd5$TlL4efTI ztn5;&%;(wTKN~c{lgj&I`vtYi(6ReZC99lFtu*Nt3v}BS8Jc7Ez^voIEYq<@1>7Yw z3{)~K4IEpA)|`jxoDnOwN)G0HnYui+PQC=)%!u})%el&9T~eb3#-&zJiY}ZtB@oCB zZ&ZiRXt@4v16*3GKp>z{{gIy%8LuUzGdFx{3G7TjM`UE#ofdKP`3)I04H;Ej_DArc z$N=vPQ;TQbbcoc5TOWxc{9uDhqP1xN>W4pj-|6mh(%l^<^yd&JJkD416yIudh?bif&ndf1FG38Xc53*1FEWlTchz7Sw=vav(<{b#p2tqXKR)1dKA^7Qm>Z&i~^?NCwhIz`cqfh_n<- zLaIA777!&09MN$}UD96{K)C3#V-85hkU9)pdB}T*RCQZKl${O=vXpU%_akxt%B9CN za;eKcMD;C3RA%WA`FEl%wFl#n7*Me4GN&8T=>s91{u;?oOvAv&B?|&ygPIM}=@G4X zC}iLJ@hSf!6N9E;c~iMV!B*gfD1gdS_aAF5J>-~^gZ6V5?rJJ{0BZ{l?R92mX`y&L zP!sFBQ@vNi_j3N&IRu+seowSnY7)Pd4XX{_iU7$5O%X7%S0m~o#U+W6J!7_7@Xp@e zn)PlqLgsFzU?I>7Cw>6RaDFw!WF6pJFY4w_Kv4IcBAmMT7&SS~Z}ga!+6V+1O5^p; zwHAZ=H1~HnroSr(4Lgb+B1;-2Sh%+b{6V{-uKfN73tjwNhtC2{Urk@AIgGJc+gXQq z4)1JdZQqI}le)vyev-?5k@Ee+#z_wJ-OQG{QkdZL_PbF!&Bz{fDxmHE>ak_b7H8)=HiD}ql*mSsbh&uOxgf` z*e*7AR?cLr8qxAL7R|P-JynfoWUN1oxBBH&!v?0OcfBiJjzYR0_x4ss+KM!suA~3k zt34}{vXp?=M(DpwO*6dVczhR|G%5A*9Exqv#<1-sQl(+SUK>Je@P-8j6*_FT5BuMd zVL4zc)2X%2i&dA0bW=2B5MySOY6@PyIXkBr~HEZclj>eHL^6~ z-r#jN@NV4QAQO-NL-Y+68N@25)=XW;<4Q|QENRkdYkfNe2K{%Fxcjq|Cv`U-Ul!alV2nz{}dGC#LJ#41R2 z(AvwQAiRs)W}KOWu5Lxx8|w09ZdPmqOrFk_bzU$FuKv&*NcuT znjpK=lgMIecOxV5u^i|TU0@4Is{tOuRSO|do69*58<5Ver-$4C{DL!pbj~}2Fd=}Y zT#EQLs|?7wAV3FSYu`HPOQGMhirf^E)~4=7^QL*N(yJB&oS`vXQ0Hp}TODwgU7(JIUFSI8Z*LS z?LSGw+E;N=xNrkOe(GNx2K5_Qhex!(kNq;Tfped~N_19&i$Z!>tTptlT&>tOUvQDc z*QDVMT&=V(v7x{sizE)iu4!N3@4qz#lzbL`mU(Hw=$1iq%Gf0%z7=M$up`8^RJ2$Oyn1Nov{ z>pV-{Dbd(0G9q4$@KGF_8KX!6wsW~uwsCP$f0s+@!(&<$Fhg(M#Y801fP8U5B=5S&-VS~yEKG-41MAw zcmTtyozRJvP}I2p?#T)32B0%V%J-+8HtsVjKIuQ@yK{1BkcN%80+8)mKxV8u~FpShiUWr)@`Kq($M=)Z$I8Ubh6Ore<0be~^DUcP{1CG)#G z7uoGs9(C8|D7;9%@`l#d|NBgLM$Tr+{BGDSBCzXP*R>(q^#SCy1bIW;opG@E`N0?e zi4XqwtSR}q8PRZyhdpm@_JA_QXRAU2l1hkpQ#U^V;a?J@$e0Ec=MdRO$_GBuS%RT1;?jt2CxJr*QsnpE2_v&{OX zoW}JqmtrH(97t~j)GiM`BFL<_&w(_=mIyz1*Qu?61L=-nqT*}1MAU(>@Usu=;j1TT zifCd-`4%^=y}a$B*BwL>SoQ7G5sHor=m)tG-{G?}Pcx#n1U~i~{*6xi#QyXT>c$^f zI^Sm5?Y|-A`^owW{&Q?Rk}3aB{v@4lx;AIqB}6#b4i|tRKLXy#yvp@|%x+tN+>IvM z&P6uxaY`ua-L)ANpjUian~{-m>~Zs%*BKeLN92A3%ne`!NC;h_1>(fWi0KCC?VFa& zc_he)$pM+)0F(iM3rG&uSQLN;(2ahGd>;-*22&hlIwb|eu2>jFz0`+k4{R_$TACUC zgBAcJK`BBR_+Q`#$jpT29LWD^#a)dN0U971DPKs4nYD);aF~_~x(nh167TB=1C~zo z5i7rL3pE!3a8!3ua)_S}Sd_Wj~tq2_B=5N7}HNp?6zz6{X zp*TRTh5V>T!+}fG1aLD#{;#n1NRAVmH1$uOD)N7Ul<%Bv93JZLf92O9nX26dCp=bN z1|$*I?+Idy0+U%}I*W!0?Ex{2Vj(526<7_HmFf>Ffdc`<-zAiSl&|oIDN+nLQ`nZW zVv(uqQn?_-4Mt7)rbW_DNm>(P%LHW)D5DV==z_#_sc3-!0Yx4S*>|gQY?q2HnA-La z9|@hqLI_DtG<1$0S&$9nBrtnI^7NKSNUbnzMwF_J$X5g1v~<4d?B{fX`&6fWX7s}5 z*?=7R6rBC>;TD-y-&xQ8H5(x6@6ObaFRme4ta(cw>u21uE87m&F_2~7BF zk9mp98swo*gUM~?f&dyfXe9xbG4)Ri361Ij2`zz!G|kXjK6ARh9+~;T1JW|Xgl5)s zsqKaKj$8she}3GOTgWf4{yW>iEdldBn3PQY3!2PB6PZ(BLF%JwMsPZDJ!pEXhJs1UF{he+c|Bfs#Lwfnc&=diI;C zga+S3w!d%N0yGE6(A+X z^3y-Kl&zC|L(?iIPfznqvFbO^{;2qcB0`?|!GtlpZ$b z7X4U%zhwH0JwY{7^I+Ja%zjSbr1S;4axssp$IK~Bb19$;G}(%lmG2Z*L)=)X=f|wU zs4!Fp{CKc8>DIxZUh3CLholoalT%6D z(R^)R_$V+>OEXTI;@*gthZl5b}_U?YpIJBT^r(iB&ZL;uN|6RL5@?fN= z>PxPfrs8$UHuZIq1^#u3rpdA6aT(3m@7Smx(J$>UABhoX*%01B!^N?ybaMjP@Gpfb!Y0rIFQw5<_E+EIY02^0M>_>h@*$N zb;<2&1l2d!ny|^&&)WTFJlzj-iX-r|mK%V~Y&3yFC<}azV?zYW!$4{TLJq~&tc(JP z5%{z85=J4=2f+zQysKS`@<>C#Ve^)cEl6kq`2$dC<2DJznU1CffI$3X+Z1_91^f#9 zxBa6$pHsOdtsC$I=nZhI!?b}dpbWnN3h+_LbJO)gxV-?PxEG-u1hK_6)*i-~UxECv z>8#Gg#rM^X)R+1%@PFX1p1H<%cG>1q*nm+_<{s{J0&<9QJx?uMuLYACFzGGd`Qk0n~qc7i1NmIhT-6s z`ftX1AmW^pQv78)&D-CS#%J8Hw6L(^v=QX!id1ymMFj&n>G}~?14r7sGz-P5^e{3a zk5x{_79LkGtR|5+Jq63P?OPkx^Zu;YLkXP?zT0`Ww(?+_yEK0Gq;Fq@_>pqM zbbR(VqsFUtK&r`6)UV>ys(aRIc&Gcg`0Q|DWnePWjk3oH8Jk@awtE-t!21ZiXc=)X z+cjK*=s+fS>Jm8>`fbHz03;{2Au`t2!>QkjOdF7{e?&_+Le%PmKhTupe> z)D@*mc_|J3wCYQsVWy=Uf3i|B-b(V^ic!IDC%adRYo$y-Su4cG=M}ap9JR*vN~Yst z>jb{tf2Y!eMy->|cNu@Jq#-8ysRg@#UE z)ivw6k$1tG!VfYGJI8)&^q@7mUx5{$*CJ4WozVAt`t0WHT>GBG>7w%r;_~h&c(yBW zuUW5Kh=vPg-cz($c9v25DRHCvySuc^_**FjZzDH-*_860EA{YGP}|csRmq}yKp*dX z`5#91RW2*xE%E93is5II$_IBrT30PxxPA~8b>XBIrA7%cCDNynNkxO7dWJU z+sEg308n3uypzTXsql|C^Rs3>E570%!)m=c;O6gibzrL$yD9K0Qh#^RT=8?LDANi} z2U2ZOUAQj5;+;pAXf5qiDp7QQj{X~jV*&wpjG6CE|A`B$PGcJV0v@ zItv68T1e;k3$F{?0J;-vI!8&pjv1k#uA>yvLIJkJY6mQJTBLL(>Mu+$N-@@y;|Hv1 zK-6IGpmg8}6t;k#f>KD8K?zXWfT~V4WuZKM+$!(Cq`uxk3beQ}t`fi}jol5vZ55s6 zt-i4#LGAW}!Md^gBCXdz5(lDq=vXNH5^23bYU|j2iRdgmq#xqjjNPBp{y7{9&r^WJ zbBJ^A4btF9aQmPRCmVF)|5&QyxP2@@c$B)6ORxi2I>s#ON)$&+DIM!fR;R0Ei@^Pd zf_4cMITF&3o^;}4PHa^2yr*3#?zy8|bJB&ZGC_VAAXahldW3$rs$|Q_4gx^o>!$S~ z2h$5c`!Y-AOMedK8{>!4%9T$>HJj*W;cqs!N~ignGRTnsQU2R*1STG|J7{+>Vatix z-Ld*Tx5lE9+sC5hLry`)#MZ>6+)W6giy}JY!%4BLKDJE&J^X48m<7~*8=YSDQ2-%x z3_UHPHH{VZ6O%(g8d=CQ(4VL?HAkZFqKH!nW@*a_h!3M1k$eiEhCn+Fk=*00K#~>Pt2(>$xql{ zjVz`wyln}fW%Kn*UCN1JvAJHPusO>CzS^8iIVmjN@1?;$z$m@pn@aF6I{Bs}u5acn zUP;8(e~_+A#s&^a0Ijt28!6eHoj_Zf4(S8|qpLB_MUQ%glnu}3T!H13=t^58X2%A5 z&si7~#Bv!fng=&T&j*fXI|L3DS6Dj@U~<_4UM+u;?oXpg*8`?fPghg^vcAB>WBoT@ zyooLfmQTTwgC%{*xWBwXOw+F}Cubw&yUC;u|HC6n{s}c%Rtdw%-$j>0+T_&w_8oKr zG|isv+i7ckjdWY{l5!~7m*Q%m;Wd8};rf&8&KA^PfOeaA$!1*IiEx(5bp#R2R6Qv+ zTWa@KCnK_-|C7CBr)+#nb5FCB=T^_{u1_Yw~=V=Y-f0 zeTM|@EKWA~6ANI@;);5e8k5d~55>!|iP!`KDcIdzpR0*Y00$RF7G9_^PStl>ZV(H; zEm`>C6X_|aH}DKOYb-o&5bKllG~f;Jb~$4Hg6;06K-{N3#8WK2YUF#j^G~g-l}{JE zc1~=jyQUZ|HhsO$=-g(sE#_NH`ijeC@kOpz|G4#gc~fii^Ix4qd?Ozhm=8z(dA(U8 zxUfIWvNYhtx^8{r$X=Zp-tTgYK9=VobwmTV(!_7EKV&40P&{Cl1UV%Wyor$PfwBm$ zi~da*I+-%GDi1+QQwbdsnXtcN@}Dh@9)LDg-f+*qe}U}}3^et(S04u}q5+Hsc^k?2 zn|rj={+a*oG1Zs*6BVWAreQgXM>%ez=D4T1T!1s6eNAoEg{$gSa5v;R>cb|JxB65d z-gi+3e1~EB^~pNr4S>{xRn_Hok?M0^$d#@kOJoO8tq8GmmvWxiJVgr{pvs;&qW0 zeu}(!R<=blrLvT*0iGOS!GnN%mXdNn?V52BQh<1mQs%ix>kk7b%Bf*<$kYg@0ICb^ z3mGJkUUT~)>g)yaI8aXEWTPs z`gMH|-^)j*XxYlgmfEHZUOMxcr{4XdxpSN6k@%v`gY%x|k;#=`0z!Z8{<$H?e^OR< z$IisG8Bq|OeZ_Nt9!))KIj%T3U1=9m>eTY$c7;v~ehgg3G&7mcpW zTUNifye|5RH~nOlJrOwQQA06;p{)CD9at`7)eajGk^1XRH(cs}?`SDMjYZYdIJbov z!*pW1+-}58$qOh+g4Srr#jEZB01}06#`eSXKSRpbKTZtY;EIb#v8pNqtO>oJb{Deg zaP7~aNhhe!rR~7b!Mq8e2(E?u+r^j_eNFo-Dsr!F}9IF$iHkEA3 zv?)3UdT$2;Y9~fWLAA_L81!-K1%ru21cEmmc6wY#*WxwlI%fdTc+)3N@f~hh7>f!6 zfS)J9B{9NIi7EoXFo12@N1B-qD>48e9&q&(2i~X6J9MbM7gcOqPB*gZhfAJ<%EYcd3jy2HEZoA5I~X6vEPNiOV59!?LFnxM zT0UgomY`j=rKQ(Tq)x}mduO2xQ7AGGnmaiW7yCY63f;~6JhA*dQoe2`*Kj{j@yqhH zlI2TLzlEb#*t%@D1BBx$Q&(Q6cZTis#iHn@NGrA4H13i?iSjCkfwA;awm%@S@{At(F9@z`;BT22_qqJs>ST6N zIzzu2T-IRdY$8p$C>M+9Xf9b_G>Ko#eq9q@kNh01al4W|dDZAq-Va*+qjJ#KB<5sv zp|VLHd3Zr~)*XGG`CVWAhpKmXl6JX#rGQ=#aW1>-V@7qk>!nImmA$)1f6SziLh@84 zIg+PuAb+_O5?7UQj1<1hc9(5-mLMB1*tHUIZ4u<)@90+>A$DhlWOzebjPJ~kSd687Kv|=!4=Stc0V9@m0#7#xI5k@z-9vTUbh4xC>Ao7f2lBNHlk5U+-#LVtW^QI+VW zt+G@F^4Y>Q)ZAaF&pXew-#V9^SK6vm7A2*@*;!g*io)>GcS)RbkseA6><0z!3q9ub zc!l+6KJPxB)m{(+T)y|g8%*r67}O#83hKN4w%{-MvT28@^&7*42cs`mCokJ#eO_j0 zv}<)(OtxFL*7QcJLQ0TbGNEW!=7BG~EX(%%Q=y#Hzv&m}WjTg>N z&acH>H1oDMTfh3R?((|Fwr*dxTrr!Pd=hsjds zEKhfT{WEqh@s3&G*srQCUt@RY%-t}{ss14TjZ&4cTeVxbp78K6@_{KS))XD7Dm|XK zJLm0`zx3QKbHc;;oZ+h(SIi0HQL5b{@JRcbygP?C?T>kwBOdeb9#1sg=x{CKTT$U!ZN+m|CLB5An5&c9cU4EuOi-~X+f=E| zC=}P%t4B-s2j_=$kF;=(Q>uRLdtg+y9SuU-pfxcv@`!_jaF&R4IHZ3hx=2U0{p5@^ri9_Q(nj zN*64p1pZZECW_|aWA{8QUcFrsx%l*`Le_8g`7PyPTDF6hx$o+8iSH^u8SEbpRqJ&e zcGb70y%jWN^RCh43ir^mzj}cU-u7&9yU*M$GXhT{eqv6jUNncI-lh6AagXZQ-9*;} z{gHpxIrth#Ci__KlcJ1<)^t&9hnT0jg#RNR>4m9EHRt#;aRSG*g*M^kuMx!4D3M$5>LUSk63Par>U4z3JVoD34RzO<+ycY6IXV1UzU8@{|N--N3+ws#}%L2ref-p zQW;WP-Z&`r$6F!pM9Lm1+>jvfP5y-sgoe)>#|ItC zXdEh$?PFvF|7}C^(>FLWnVA`jWI3_8!zxexO4#0GikG^LEe60(u-tJA-tr)?;QlF7 zYSxw8+T@xB*V^Atxq7?jTb(Zow2v$7S-xnX_d;aBF!DP;zs~1ORM&B3OY>o?XOmIt z-|{pfe{o#DV#Y|{-{|)$X_N3}?4b;kYCFP?xE1>*JXgQp{`itAaMR`a11OH*;Wo!{ zo(eUNP#X1xLuluc)UtTMkSgHxUBaHW-P&ryRAQQV-~*XzCA}YlelO3h=BB0v52w=A zERvL-;41gCgsgNuT`8)ALom0Yq!%e)f75e0+TicpsMfm08T_4pj>lRAW$GW5~l8Zkynn@L7rk9mlnq_~U&zYzX zGOY7=4bpvwx|bh>G4y8#2jY@*t(|Ec+8i+g)+U#5GmpvRe|uWMj+mw+#|TlUOy9BZ)C+A2P9Edg6O za!(0=w{KuQv8RyFB+hF_)Zg{Iuc^sM6|loTd>}rL)JLa>vKhfnIA`b3%AVwFoUxu{ zwyGM6s8%(f)kn?h6EcHc`Qo>R3PcK3eS>@0ime5@7{6mF?8PzZM(>GLmm=e*U`u&(}A?Do|gp zZ+m@iO%WWgI^I+`LeO1UNmoP(i16PPntH#R5?B&pStXO0^;vH z94q6(=|938kng!w8rR@D#cGu6{e$JhFriO8atUnubc;wJ5+a6_6Px<28KyYN%}rOb zYgn0PxQk|+E;T$4QgixvvK=fE`o1q>b zTOx-U`*dVmO2f4Hv8oJB#yz;#dpxdSfWO+95qn^*FA>mIEb%*I8KQ!FkSfmm>S|_C zrNBOjYJ01K5z7(WP(Pl^bf#V)GQt_Q1Q{#@2wPLBY-;6}z({H((|Jo^gfhE$A-gc} z8H`;94~wr*%M>&QEKTJqhv@X*;0QX@upRqOKjN=DDGp44+zAQ#44=|(%fjT_l8 zdBXjMYv$*3@tZdoWF&t0$*rhDt!Uf0kEH5Mc>RcgUF(juJn7c=l(}5eV0+^4`zCXD zLlTVF=T_swr8l-eQej@uZ%7Mco^i6>Mf9CJR^7lKFTDIN5pTK8%}UH7al%}BCceupXt8jk2kG8)P6q^eRz-*b^oic+<{$+u*Non8{ zMmB00d`v}M`IM?@bZu`}e{6PNqYARXJ?D9%tT^LGUUfa|5-r^loP{Ona#Z%$nwqi# zuX0#EHc>mnysoByvAViE!+kKN^iyGOqVZNIX{WT%;A?RMuGjJ}>S4xR+H^L^J(V`e z+p9(H7Y`cIdwX+_guf8yCMNgt3b${?Nc)VF^P-QCiFA7Jpt!y#KRCQ=e8psLaE+r! z8$sJrZ`M+W(H>->JtOp|shxRvy@vW@*k@3eEf(|YHta}_iggXag>qdf6f(gjw$h76 z_4on6`08pJIiq3dN1k!7L~;x|M2WHF^l6EiL;va1{gesjxKqYv740IkwPwB^N&Wp; zwZF}X*#S7qaZmSV+vatGU~hcexy_H+u143=t8=d74E0p{yh6uS2B||h%PPttoTWrm zxFzKhDZ73p`7+FMuK#v|uMbO=eHRzsdU2c^XZ%`m<)GT6E6UgH*OCxWkpQ-2C_AgV$o{%2#>@M^7gm z8RR^E}ZnSQ0?GW0pT!49kFGauro^58>}mUbwMumHEro0B=i}Qm z(+d7|w@chc=jutd-cM0F*Owr+so3h7{735@&bsu${KFTw3JpX0-0pN2Qx6)S8I>zN zmB0N#;5Dx;#fJ>{4r^#;3{FSn8>u!FlCu3C(l(}Pf0Qi^yR}o}(r@kq&vo8=NCYSB zb_O=BSJzrrcADFMp`}c?@vpCism@c&P?$jjIh(R3RG&19VY^`xuX=u}e(EMZ-m{p> zNc}dv3%fs)@X3IP$_x36;W*X(rsmMr{{A{y)nXoTk2!;NV`xKL7ulizVw-ACjGPYD{ z$2YcAN(tH!FO|fTt3}%9GK*z0+Nk)Ve2QF^Gx)=bI46a=jY__<4A%1Qw=;J?e{k_? zNy^FS*{+IQC2c=%o9lKHMCOi%Jw&LJ0>@(X3V7QQC#y<9OLI<1VTj*+EGlpGSMmfK z*}C0hd{shjy$>_lXA}h=ZUz-BAqT}1IM$yC;~XkozAJWflqwOnYn96#lH z(f9P`%-fogqY2Wg*?nJnMadtl=r&}>#JNVTA72AvB&&5k8nnbGOF#eNNR{)m_<>4W zPZ09iEIx)+5U!uPec3FAb?WF~L8_X|L$^l!jPTNa{2q7JS6ex5i5!+3$L&(g=2Xi^W&ge5fBKqGze$xYLw-sy@3bGx9-t^g$BRM1*?B@G`IWc|zB8JK zl<#5M@3#^D)un&uU&&ci?@5gl9={}|UcX@;;Tf7x$eYCL2GA=~Pnnz3WNEUXH{ko+ zWAJ7KCun%!1+Lm;upLjs16#%MG;&liPzD{@fFB&F2dd#yOtL_EQfXO;XyxlY+!c}u z?ut}C-rU||7QcPfjv84X#_1Y!JY7}eQPm}rQm2M=DGFvY+E7U;q`PO&o)D4YG9@lr znv2_|#8q^O%tYQ7k@QHYKxcH;9?~-6GMp}JsZgDPM5s?x#>q+^Bz<|Hxq9Kt?5Wqk z99pwx&1|pYqHf|t$H3pIb{|!V{HOn)iFf}!3Ge^)T(Kt|rQu6Q=?P_zg>5{yJdLfb zi*iQE@=&rId&Zh|bW>L;zJadOniAA3R`^yh^#L;-O!Ai=V$8{)r0W87duL2=0U4Q; zj9PA3sXt}D!!x2SsEWOFkrS&OWUL?ZIU8g$oJ1gQmukw8)4=F1|B+#VR&jo|NHVFY zRr>j0P$eXf^ZP}UiZW^iV2^i5GSBg6Q^+~%fLq9nc=A9H2)TNY97id8@+mgHYKd>w zZvI1F9et?i^1sX1PqG;J5a5NG2Pi4?klEK&)jZa7fRcn{ozV&MM?7qq!(;z?D-<;P zA@o8mf{Zs%n*0F+1F9;3A<8G;S@m`J+1tlRj^NNNl0HZWgFgWso)R#R?aysF3b@0v zVSmUOWDAIg1wjIveWH9}uOe340XSzsIh&$>6_JLsKI^ZROq?(cIX_P^E$L|WFCo~G z=cC*G2a=+*CG^d96uMiB3caN7Cb`6|A^Iwv($}I&FZ8;nqCat5Wx$1ZZ}0zkG5*hT z{q|f6bsBsXjnizEDfoo0IVA#$$JvUy<15vj>d25-o?6F|FubrR6fTuT72u^)tC+t^ zDO>Wi7#iqmp?q=bLtd#d)%e)>DLGwrT?I9^V+lra04>E`p!ICqzzDyBH?*lt6BF6mDdgSx_~{RVUrySYutWtP5PKoa>@4xqb_)il&?7C)Fo~^)TAhXLwh> zICG&Z#L~EEu+EDHF9y(%9+=sc0&p|O5~Wb2{&q55WtX827yqaJ@*RsSJDMPKk*=GE zZ1w{+Ca!5b9n=1Cja>EPdoo6kB{ z!e^F_m2ebY_dbqqr%rqyZdvFybWCiwjCjR0+)DL~JTA^kt*^i(UCo~Wdt2Pi8uqei zS1mt|+o4a_9m?e6+*03%>%yUcIRv1gqYBkUS|aKBOCr(WAJ%v?_gY2iRo+}>;+vjt z`+B~-JN)&5CJYZ#()*?Z+tkq`F}`L{wG-M|K=6nCp?%)LwBJbEAaSQ z1&_C-#|b<{!dRh_ln^!5#d-Pm!riX8<)T=sIF;RK#2B%$rldF&yrPqIBop~=pCZrq z4@&52J+FLvj!e`Y$(uaF=uW%Q7{k1o>Ci@ZxSDCZgKp~+Gb4N*_R?gB$64Z$(vj%! zw3~T_hBnS;R@@2-9j_*>n+OUNjtFe5SR(@K8RQvVKD(zo?YgAw#>>#zq_cSwe58CE z%uW)Pp$`35`KCz9I$p-q?BDri@MR3mrbNEy?;{nOFWEK~YJY~lOFDahB6QYfZ4Pcm z)H#6rLbuEweY5C0L<>8+I^l5BD7QR%GHIu6)47T0nJ$N>a^InuF5A(;cVTSO>coeSm#6=o?XzQ!#a?c1jFbs=53u)f`=@+;FbFJ-y zkWW>o2fBIlTmd^;ExE&Y&y4u%XLI$Lzvi2pi(#WCJv??S=@v*`4-UP zWVLJJ_cs2!eeMwWO@Kw-|B{O>WpLIW^ENe=(9O^M_}bh|Za(v4o}2jW+Lj}U&77u0 z*pO)lfGYic*}jkiPq!!E0-q}Y97gzsl@Qk5Pl7!b1b3naCObNO@A!5u!o#nV9luUQ zy^b1q?Ry7AYCDrb@iql=wOWwYL7r&<4Pm7%{h4W|nM_n{FD4gDO57fq8x(fV*ccGk z753d!$#PYdp6kGe`#TMr<-CO@<&1h${r7uvMB%C~FL$eEJpb?Io4Srw-EYlSQEB(p z^wZBWdYD5AJfWG;qokK17ja56xL4b**FEAC#Xvn_B74K8va?p9_bKvcIzlCP_gPmC8kqDFfJ}zE*6IeNb?S`&~#{8>m^TK&c7- zokif^`ObBr=Sen*P6=3-v^HeGIO}5JFdqyQBYv_xdeNma`lw<8fMVVr&)cYDZb;Ln19H3=S8C-Dn z6B`NQElL#p{7CI-i6{!Hwg zc{3y8TsJMrkv{pRWb;Okh4&}ow47_Ea_ZfP%17wLil!Rl_c4Ee8T?djNbKn8QNUM_ zPE{OD?tJFZ7Vj>U3Oy1#T)#hQ-^Sat990Vi^RwcW77AwitJ89vk7{dcCp!#nK;?8j z3O}0E^z$fnb=JbrV)L|vNaxMM_ta%4S2PexY_e5w zV2hKmd7k6ZP3mZT+4xcr9XdI|kD0Zxrdfl5Zdu2M5{-_g8!s07Uu3_4L;GLk{d%Jb z-VwZ7^o^MaUG&|%$hMYP#>Yr#HnY|?G#k1G`Rsh4!9wGU!I6bj8zhhuYns2;=yR&cCEEoKE~1*%^K zYve(^5X<#AzKeJvoKZ(ILAY06Z8MwmlZ-ZPOrp&MNb+MOB?Eqe{w1~5Jv$45n>90- zU9x$(c}{7HPRqj4?m;t8iB6xJq~L-cUSAm$X=?488ZOq5KCR>A%zVn)^b*a$uCF=1 zU-L@9blJ-%clSG%S(F|#spggPmA*IBDgPSsIGy~&%M+9CqM6He(j<=7o+>(U?@R_0 zTOh!?qE(jPRnS|=*oUshM)vLRdy)`~ewdzLryK@iorQ$Hdib|9xDme1N^VOdjuVkF1B*X_wZMH;%2~cUyR>bbtRTIQU!cYTM z#>^vK&tu1^iAn7Jt>;RSUlBR((i8x7umsHg99s}=>MU}{yHh2Q2fZH}&C=MUW5*~8 zh@Ws*b{EK=--Ch)c7u~9XtP0K3M4EV$V=b04ak5B6OwKl--1fAC&h(7AX8is_NVXu zbEAHfNxct2K>2{KeWaXi3+CwdNGe1GWF*mnU4c+2{31HqYco=FC`GczId9t*hLG}o zZupw8935lPt4&AWMWJ+2Ao6Vd0&FXgV}_j%D^Zlph=4Mh)RP3b$mtEK65g7$ztWQx z=hVg?;1_{GHD|eK79oo?rSl)>hs=j6W)~I{JCf(cACqszj^7i1?np7)s%=(ocFrO8 zX8ImtO1eiD=_t|h1WvbCl6C^8sh`N0J}%1D$D)Gx+C+4&zSxL?MRC(TjxQ3yr+r*S zf04*I4upm=p?)HBTzOn|&SmT_H5Sf70!~8;zSJ3~l(R_Ey-OWeo<1(tWsa-J#|Gj` z8DevZ*nF%PtH>u4d?I;h*?iIDYM5~h$YQlF5c^lpUd`_pjd|ScJY~4qLanGT?0cDC zL8CU?LWkH7?%cVU?qkNem>wnp?Y_iGhvENy^IY};aNq9cJ!;|R0MYyn%^qyG+2$pL z*C8{16`#%?=ovAm5GMDS&-*0{_E_Ib9Qd7gZKw=-%n_P-*%85B-u%muY)uuD%NhKsv$s=4f@KCqy2iE5!3bK?^N{sDa+rj>CkBjCXrRS53@_ z=l$pXzjBiW9qc`DZ~*k4N&e6iQ&e*Zi+~b@^U=+b^8L<;!z!WU_x9<%N84f;uG}*- zUz^Q~o6juX`{xSH$Q#JDK1Y*3lbcIa`=50@d$#cG^n#!h*v}a=J)-93q-Hq%S1xqO zOBdX;rWkIT4yQ@-hcT~2!i%M4SC+eYde7CHBsOGMm`JkEljknKt#2kXeRj+>LeB}j z{AqvQu-SMN!Ie@9QQU{I0jkmunFekJa?ms7IyI<*2UTP*W*jF$|-G;Z0LpGq4L_OpKNiq$U_ir(=5O zD|N|mVJ&GmZ!ug=H_0EaOHqRH6Mr(57mhdNrA?$OJ387+=^&SbXI3ik07XoP15zaE z9IP&~IWdq81Dj(3YD|b1;v8{~>6|$W9EqQKh0ML&9qb!;ne4fpoUeX+VS9FG&tD5M zuHRx@Kkt(tcXa)*XvaUZDSzMQlNUPTEq0w=@$US|uF}mXYtC$v-I4L~PN}|v9iGR& zxXF3a@b&JSwZD{|insFR?lox19t{5e*TdcW_jxS*9&f*nIDhWS;akl&%NCzWP8E&3 zev~9bd4Fl&-k&cQ%U;g*b#&IHbe^3RT)9seKewD3*!4wC`PZk{KHbx7T)Q{5`8ZqQ zB#uhVTvxNA7L$o1V8%j&Kg728dJZ?z=`s*JcHMK*kODl01p(!(RR9$r4pZPhOew*( znX}$02;X5--wF>RBkQ!D)iTu2n!XB*SX)t7Cu4t+NcgtpP3dRNAf=lbygmw!g3fhG zy%~{O6)%Gw)I|1!mPV_r=)k0cMXBu}uw9sE3m}*EAs1AM)c}ZjHh`3Gm8B~|$rJn+ zsK0DS)+cLcVYPytVA=3&&>!@QkNcm&atDHr$zq<9t~dT&Q9=0zv7EL;QlecHzxYV^H z~q z?^TS>yLzbmw^PbTiq&?X*nBeFqx?ac!(2&x@vB!j1+y=49~qK~k{1^`cQ{N*@2+`# zZ;`ZC@FHez;hCiQN8HB0|Mk}w>c1{s_*jVRgT z%1^q58Jv%rb9Xjsf%{+IC9>Q%UpeUvOb)8T7r(@3h&3;fhxmS2g~s0#qLW1vR)k5s z99$u_4vz=Uga>djdAuYxu;E4MP51gmL?smwns|=dq$yH&CshizvLooq>P)-8l-2B+ z-8`z1Ubk|oKJ!!!)tFh+4N5BRg?sk~RUMxDZ_zA$czm$5{N5R2Y3aS={r3hd=px5; z{UUc{rr;ow9V{;uZ3IW$dui@xj6{y^DsV}H)Z{RpEe*U2Dd`yqvv8Cp1E>tn9k$dA zfIOuoF!2Czr}Bb;un0GuK9LId%hqG-@xWeJQfjnfW}M#Ol`$daRykHB#XW3^9n+Xt zkxfy`;xJfRSyK8$2YYR;x7xajN?lcAqJyoiV;1(VuC>XU9^Tp<( z6Zl)iL(}=4a~mABr^8Yt(_u%6NUGJiw&)xTU?5j31+wZ6OU53E&My)t3Q&gV>+`9n z3Cqgbj`tfT`9%aS3|^~Az`{6zB~U5yEyHr@Oot3%Ywm57#gh?JreEA+a>`yBfq)T|$4xsYuf~*6kgd3BRZhx))|Qy{ z{vaVnw%;0QQ-~!g5W}y7f*3@FT>majTY-b%^Q&7RHtVIxP`~yTj{*qXwl{z2?0}GC zHJh6MK!}0j=wGCZOCXQ(muTG`bwd)#z2KpR!iAIz63Fj*^Wn$ll_>#VQiAWV`FUz% z@S`2yYd5}M99`ThNk0AN&B={#KGM%gPHjB8Nz(afTgv;^jjJUqP96Sx#oW3vi6pMF zZ}p-)pYtV7x6hh5XCXglE5vNvds_lIvfuqa9X_zebhPAZ=tdJC&xi!yXL7i7=RrZx zhhGmmUOtf9bY|v3gw}Pb%I9u3ZHygv)OMd)=2;6jUZdvftx#_13={_-^5m@&xGz+1 z^-vk~#^tIT5Pveayk-);aJgP81B9f~^oUa4`FVa|SvG5w&v^z`D`}YqM06~}|EwW_ z*L^W^Q4i&R@#?`#{v0DS$N&Wf$XzzRVE;SVt%k@`5bfY>Q8^U?WwgNyDn74QtxO=G#{xl$jzU51Bk^vc)u#nU*2z4e2LE0p-mo}JvU~{Kd_jdtdrg8NEx<^w7pi7du zrnk*xm*nka30c2Ux!fQ&pTuK0k1MCg9>=H33n7`^IW;C$UYM8eY>>`sBD*B6fD3AT=vXLoFmck$8kT{MpBDfoZNd-JfS&aUlyXC#mS36PK=Xc$Bw0g?a; zNF^X3A}Ru6pj5*QN)?oWtWB(F6&tC9Mml7=Y`DhWm1?|x1c7|=tTkZ4umX*5 zklk&v4zg|UKm0nQp%q3Opp+K02UqDm$hnY1t9`$51^aiMJ)kd;BQ$}T9 z7iROLUA;>9CD_1WpN=;sp>Vg>=6X?c4;EaK+D|`UCl4Iuau~&Es)0+^X|J`1+(tyg z;(BpJf)`wga}rka6I&kM3}Kk*QS4pbo3`)qZiM7U4rDIm8A^K&BVA6|Z4U^wS7ERn zxRi%+0hrM)yE6{H5a>TCwhNq@?oFZ^7qkE$g&;yvwW8%ZRA*~9f~rt6aau#?e@Q2# ziVQ17mfL+=*%p})?(h@>L6}{ImPG3Vkx_s zd49~@h)~pE4Dd~+19+K3#iBxm5Ke=U0w_X#!Z>8#(6Di(Qu>YSsur-m)q%QirA-q7 zv^}7D1WgXeX%Ya!%0>C_(O?do4AteiGws*z8JP%tvw!#-M$xUq@q}t%IyzUk=`M>0Tt zhliMP5HjM?0IrE*gf8VEGl(IpHWHV_$C9x+ZS-?o2QG;l%GE}H_}D=I{o|@Es#Tjd z-(ndypp98Xv;s&`0-`{OX&8yy2%;^)irmFQrr~bFKp84QqDJExczT{oPp|($swEoV z8~l5eY4)~|f8yKyP*LPL!uAq-iT>I)R(JEeNUMjrqOz;)-X5&a02^5&lrY%wRl`$pJp1_I^AzE3W&pj+8`ItOX1^f}WWv|LI zp!xI7jnuh=s#aJpikiJR+bav9Av_E#g(PHSK9H--x% zC&qthUi)cI1|wOOCuVgADHz zkyi1(?%v&cS_+Re5j>co%(Bv7(vRIeo(gob8GhIwnWet~i0l{o@o!BNjwhrg92Ws+ z-Lu)_`RKEoaX>gLLF0Rc_dDoQwg~>=*DZ9qGv3_$9A&iA%f1ChJFw11Qh&KSoIpRG zV2Tcwt=k2ITQ{G==$YBf9zIXP;2G@xbLlT7`c8M8_H$g%!CqW7+_bP%pqQzL6IeIt z<>V<^}$@?m5NFaRpmWZ1qrj z+`Rm5>u5t}2_yXsKyQvbDIW%78nCE|Zatp^Y>if6U?iP+71rEC2GE8G{D@Ex20$}r z*c9AFa-APc9stWGCuhSlascRtfZ4HO`6lmcyMfv<)9Ec?cz@xA#{?T3kW%t^g=+epp>0txZ#C6;TcG2LpVT0Q9dcDVJxa5>H@1-#A;57y1?t>_4zy; zyb%PMl#@R9{>SYXbqw1!Co{FB-E33pK|>RFw-!T_gkhgkos}CL`N)nQq-7KXphZTXP{mSHIl{itOeo_QdvKNm)C0(LZY@3xbVZP z47Av+%#qa6+_H7T6s_63%4L{rqD8FA+JE%O}k)P2t-1+ z6gPI4^btjBLXmXg4p1ap$J_u?xLuHC1ONU0C++vY7I6IASNK?A%K&zIz8mGT;S{jX zv4?SouzXz9@`DUpzt*Z-?lCKj`m;F>aXqD*LC^*Q=)=M$^=1{sx4)Jc!mnx+~-$EN5C92Cl z1_XN&aQPJ`zcLQ@13SqWv1s2QZEz`ZDM7WDiieRu%fr_c&qkv^V$_;W+>zf+%=FJvc5e^dj{i+s11s5YGM)&^IAe*bj!IQu`)!!DO=VOYB35mhd}50~v&VnAc<>W`ZHa4StDz zLXaR+ z(7Dn3+T%=$3mPgiE+Qhu?xj*h1=CJyL$InwN)C+39KR=Rz^#SMPU`#A@F#F3gA~p2 zRPj5oDmj5dKU0SxS+nRe*K8W*9T$1rc@5?z?Fl6_J|$&9{S5!*q3^eFNrcPjo!5idoHI@E8;W+BG;_(a)0i}h)X4@g7l@7Tn4XkjROCFX6v}xW^S8|LRyc@~b5VP{7c&{|PK`E%yVd-KGY-B6+8c_adI~+WuvZh6z z-FG+(r{G;ohne4YEY1}f!`+!emJS9i8L&mqsfKm$jMnjeH9)@tzL}E)pU3-m?U1WS zmmKN3`)H|IR%3&nLVG|obXkvnjEP{5?NgA3_DSdx1`?nOfOAauRi!h+yJXd14Py{o~ z5T0R^N?U>`*i86QumRK#_MzgfqSs2yC2iSD8SvOgt;w~6*1zugX*V)=kFHAR6A$6K z1J50=d~>vd31D@xB<$svyAkVOYb0c(1j7@e)byWI8gO{5YVIDL#~l36NP|8a-zP5f zb>RH$^iTh40fvj+hycSK%pJtYMwtjv;m-jM-Z;>Bxk|rcnI3T9D;NkApkjEY=!R8@ za!5oZv7->?Vj`YN&Ba2mR@kZs{a7Xs5s&hyylf_k$5x3|YN=Y9?M1{>*hCx-VXLw* zL@gr1L~=3GEDR|d?P6!Z7R16<1lG|bQqc~wD~~G1NO6ca8&=hVZZTCXR%L;)3=>1z zL&U>M80aipAh~RnG#gRjz+IV%cFNge>_726bqai55~keK%N)e_o@_|X7uis}JPW4R z6jzi`mt(V)64)_pUY3Z)_x*wq;MZ+S`{(>@GdFQ= zHnzo}9TSCDW@-Mc8MLCS zqDSAVuQ{Tlj~+Z6z)TVgj+sGSM&iZl4|clxX^j<7W03*`eyOg!6!dAqY>JE`C8@H- zgDGsh7%!HpWHgBu5%NkTH7*Ma&#`ztJX3)9A%1!^X)7e4FooL%dRkbLDU%n=6c8y= z_!^;GcrdQAiwoQs=WiFc*gMYubyP}Z^hs*e_Qq&@ zBp9jrHbo?!X<*E>(^2fxDb5|y!S1#sb>OAe#)J{{F=tr9Rux+IAoVgyT?u}(%1%TG ztPd#&?JgGN1XeD#2~viBDt@;wND1~u9GleV3>}s?SO;|g-M2&s41#3nPmZiM*bRmL z##y2hkFNpRu%JjzyX}TL8*M{by8sZ5VdzYD*ug?NbkyI-h)WkV!V-3{3uS0GUBiKG zqQ*nZ2+Ig7_#t+oXZbfd!*O7l6g)%IMdPb+t%CmZwvgjsLQuk_3l!j4gtH}vS_=1W7Zk>so2bC&2>gi7c)Jk8XtlHk zmT)iD1$PGQiGHAgr;+g26|@S$6>vc^H^C=r1q`NDTMFI#&|j9&JxK37?hNu0-Gb`n zN^m&>Z=>SbiBz`_?bFYC9}g{mg!w#xk(O(;|77GOUAz*=CT!WFP5X-My1$RFx;e_a zo6zR1@o6P>^e20cka9(Bdz>E&Xv`ztcad+ZNV`%U0Fg6vPhuKwmt+J?YhF$ z__4Bk78Y|EJIgenKhwVDdWzWUIrM&ErI%@$mc0#gt*Es4K*4mclZ~qNhJmWBHw(8w zt9OwOzp&R8I(`TZOF&25PUR2FTKD6*)bC4lLS*-fp96FTu%-#00TELH3?bD+Oe)H) zsIh0$qx&&@7bRA_{aM7~_RX!s)mX5t>8GCsfJW3g8G%tWKjVkUmQhB;Vs&&s{n_>z zKJXsL^>BXP;=Y#IrVIXM`ByeHfV~moA^~F@KpxB|9E_#3P%h}S^&G|>7Ak=`CTt6B3!EF3xc(Udr0SF$kEt;225orTh25O)p>t~}TId`D9cRXSpjKBebiE&udv*PF? z=~5W2Y=)0%0hp=O-a(CZ)}VGeV7L+zh0XH{`j%n)d@Z}qcr+8=t<$vnfs643){6R5 zUvCa!WFvwJIK6k}`(q~;uG_FP~6x%Gu@|nYt&&$8qaJ z{rvSZ&)3zEpm^o^tU~kl#CyYuz|0-1#{~WMQzI1J;t5J?^h>c%^?)A!E(mBPvuP4- znRcai1jf5{5!a}0ZY|ewq^Z4HkWZSVhYqj!mQGvkSF{SqG<2ZHT}s9Pw*vaGm@oj& zt+D6P#2$02?aj|Nq9l>)@GB3555(RaV7_Sk-Qo~IoI*CFV@}?}dt6?}a2mEtd0ZSC$1ig0#C$c3EMvKAvir9i4Y1n)640RDWUTY-y2Zywf4 zmo}m-CLPUqKjfS~CNs(*vbHeg5e(}>=1HAIwS_MqO7VyTj!P!mk-q{~PcSzVz7>F4 zafGiGk95eIB8*OnR_6^Fwb}yEVS?{rq2PNG(^!vLAlA;_&eDJ*USGn*k}ELQGQ=VZ z_u{_Ska{gMHLBl;!UENcu{hFGVo4^c_8N^Q%Xr9$ z=r~PTOIew{%HJqy1@^oBTb%YMb4)GN9xhWBH$RkL+qLTSE9VDoG-}Rih4+I%DkoxZ zeb;CI1~EC5Dh**`>%s8|P#23%#H|l0%#qPUBsq6i`4vSrRMh*Q6xrBCl%eqrw_k*v zW^Z84W*pLGQw)|3*0N|s!l>sUF?XuqQyL$kr$F+;td$Mo!ut|tay?*|Q_0D0z9E&aoKlW%BfNV!JiyXk|;s_XD9q#S@DtFC{T1z^7FH6Pr3<4iuxf+bg- zpB1v-cU?&N`!&9+O*9`|VOmJ}su~mDl^;w#xQ6Tp4%DjZFqj93M;+r=^Ye300hT6z z`o#Jz8bH!?FL<@N&hyp$U8for)jOVE?lkpepZxv9B~#ZwxwCeC+>L$Z&x^?9+|^89 zlEwNSFU)yb8#mvxG3fO45%PIGgB?3b-%p*r@qX5)%~$WP(k$`r`|WeuqCBt9(cV8# z^LB07l$(DFMn-&lzbvG5;4`susXV_|SRe1;5ZyY;h*3vjJB*C#2w7}t+^khCp&Xs% zqSN60V0vU4uL*2=)3nDc_83$L?E?QxwO||xzMODCC_GBt7EYJjIy_I zDh|L$>P9Cs%39iUEsZIOG7$yT=YrmNvlY96b%g zsdGO7#WGrkYvHaL*>Es!zVI}|7_7=l4v{4QNo%hjKM#Z>-&$A-@Je-HF>5T{kUV2YIAvwEZ?uM}S>gHef4JOiu*Zt)XY*)?{Efp}FIVHE{n8 zuUZK^2yj;)hXu}MUK7C0g8M7h!8HSH&UV20y%iEr3;^pnLIPwaHIvbj*EoIDIxf(H z7WhJ=|4zTM;OlFf?A<+IF81J^a$Rz2?puIPOXhi>MvLL~pa{097$d2!)PqJinUT3~GKItC*t8S|BrI_<#ap(gBET$ay z9dYn$XUjyqdPDb3T5vJpG;&Ip&#CUVc6FLA?)LL9Zi?@I;gW`%cjAxXdA-M~oe!N% z?h;ksJpSNbx;pQ0QfafzJr9I<9F6aC?j>U#aql3oIdJ|M34)ya3GY=m*DkoR?(F#e zxjXWydkxB3Cd6?c-`5BG7TPC1$2aM2Sn54&QGMT|Nf7I*=VE7Y-VSqTvE<;9EfW$S zSDWbF&L!QNvv-8K+P~S?I=`r2<-+=^+b>{`k*h=dPxBm`W0TK(flS{k)TumU-fR=& z|L0`Wj)V3An7+oTB+ajlHP;<);3_sBiaN@(j?p?<{#}^j-jR&sE2g_*&$!eq4L;N4 zob%J%gBb@_T4h?D;cJo(#CKND`J;Q_#jRpZ=F^8C{Rd+A9&4&tT36%!`&WC5YpnUt-^h;q9wr^%biX9!wEA4!LlrX}zgbi0W8rvUk8WA0Kh}7{X1W!8U zpE-Co;wx2q`AWyQjR6r&+2Ib$rmHMfe!kWmjP;Loq2s~Br?xg!ajssj#C>D`+lMCu zvw$Bgo!@E2cldJN@|FdeFM9W_DCJxLuyBw~lWw6ps*v_Y6C#@y(G)ZJ!6w)F#zrZ< zDgVURC)#OF2|nJE&njpW(KO#}{(ShgznxeqB00neL*kOBtJX##GE5-*3qDVAQ;IXp zyMomF-4+|f2o*N><6oQ$$*3?`BWx)!a^GZPJBi0sX$6<*rkoylLv zOq%d8oVepgXdnKo)>qJO~i&HDp^^>gloy}ub4=W+c(>!)w-`52Lm zQfzpMDwVooU&JY|Q#Z7x%!qCQnfG7vLtn16wu`Rs6OT8IFx`&_dMk&r`t)K+xH=XV zd4b%s(GpmBb>3f!oWu)V-B{s&O*HtgxDZGJw=s2;XMvlSVCgcky8RmERJpFBdre7Ibp}sJ02!% z$j5{(NAd-dpv^7@9Cp}}KAoh_5)y-CbD&_tjTTNY6xv~^daxdYRElN42(XewtLe-I zm!$pq{aNc-Mk9(`XK7KO-Vu>SEUCC~)j{)cPFiSwQc!#>Ih_6zYnYhf;5fH@SXR5awEqadqS*W{7B98bR%OK6hT_|QYM zBQnem=uyu{L*2NzlgKy{CrB#1An|^%48u5^fATC`ti{R*s#o_mqT3?1X}*a~o0%8! zjL0qNB6)@09}N$V00L-l_$j+p7z|vwd; z=+>?#X`+!dsTmvgPZ&Ms#FERcyX12*ir&b!0fMSNUK5{O zAJ6TM{=x6iq397LDTKk`$2Moi@?#8nyP!zlV%3blWWUohZ4=# zOmILsvX0Q}BrgXTv*6i1=`v}Fw6TOa%RrS*6kRNFNG4;*7-m`1pdZNyc+V+mTpyjKoPX|;VII~e_r5j)?{M+YmfRm&CDT{ZHf@6{uMS?}J~26rJml@hBiRktBk@Hrv39{`!;#|llaiDKaoAT7ES)xGBq>ri2PLvf{E%>Ay0z$nQ4?VnD?YL@N_tp~=f=sU z%VrsL8sQzJzA{zZaj@l;?!t2;Qf#B|F`#mUA#&8ba+$94M0-Xj0#bllTBo!5#9EXR zZ3n&A1sdo(&4HOP`zWE-p2tc`ItcwC4uE8#hX}nb>ze?!gf37V3VWRbTTv3);X-+_ z064TrhJKZ%BH}-dPyTOeY@pL2+wq2j%~x$!viMh2NLst}agPbRA^cC<)pteeJ{W>c zYcEXcs448Iw)e?b8+C+q;E)b-UagUl-$!-bvML#p+%PNi>F{b228(z}vel_bYqJ&G zJ6V1eE2MJ$2+ZRyir*J1*1IsVFkFpQ0qvCx%5YOb;6eD$!`~M8AB3?`1POH4lVb+N z2tRhAz~77?yKTY!I~=P51i7gdIP)+%`tz}Zm*Sg@<*`Tu92o_VhoSJ4(H|JP;*^Gz zGmmc~Ffe`-p`@ju@f~nJu4PW!me%oi`-Qbf@hZK&F|KD-KmRO8i0;@JiS--nZ?D$H z98dp!1bMwdZv?B1+2boQL@-=LLDG)s3LiM8#~Qf1Pp?>_csa1Ifpzz;#Edx#!|32E zj|aMP7&bvnbVWoMexHGAsDaCRTq?py%wRb&>`d|+AbPKL2l^I-Au1_JigU*^@fKR@ zJ%|cJl4c9Xcrf~mWeZi3o8#17=Kia;+-b?{q|=D=Xb?ccJF?ETzrMxK~`OfY9hCbufvwcRmrTmvdP_@P(7KU z&M+=8hGa8&Sh9|{PB%+edfU0#s(GRM4j+y0$bA0KMnO9t?EgEyp@et3lC|hBZ$S4T z9JAnN#KH2`n@(Oq&ck5eAqY*wlkspM+2`Q(ImkNr&&9K{ zP1vP2_@wN_XaCL60QVuMvK(!TxpUVCz4*XX8Y2q^oFpcM3X-tT(Pd50G+Sjfu? z=6{A);LF3nU7k{!jd-5;m}uu&>sbk|03&8YPokc{ixZ!~%|u#<#`h|p^??zzqksJG z`10#aHmmxN90r>+*M9n5&l2L%?vAos^~?5NIojD#{&t1EpQ0hDAZd5`6#55dHT>9{ zwD)8K3@UbaC_2iLdVlOt=$<#(&gIm`2mnp2;uT!Gxh$bm{Q#j8(W0dJRs`=DbokO&7F!jX;8}KH590y6<_@O_WN(+`F}e@ z6Z+&qfC@L&+Ih2pg|yFKw)9mIlsJ$BLrKaue^ww);@N|kO^XcQ8NQqJurz#!qaEeN zX+6hR7;|t2_bOJw#X{jIOW&;`(IvSlR4s&*f`M~_$`x0DyHS0U;!cTcn?TPHs$b!R za!q_~mbGZ83I5ELJmL`E5ysYX38guTaC%NrI2zx*9M-$p$d3NbzvF9D7Hg1qKoZWf z$tlVy3@^?p5`}N~3CAdKSR%Lm@L+UjIHm>@OFZOT=o7x(mm#gXA3^h#v_(*SuMy=m zvFX-*s#{q&-;>{~?yk4T?R)!JZ=uB_J|iWci}TG@TF8ZWqwPsmea<5Jb`yKwip2Ab zA)_zHoWC?v-HnDtPAYX7Rkvv~Go)3IUc)w%E#zoy(SKt%vc!Xz`YsLaA)le}qSVU7 zncvE9hQ3iN#Zj{PpZHL$*NDDNx_9S| zPKJ=-$MBxLJv}9tx~Gtd#j-6YeDM4K&}4Rdu`Dxh2d_l5QY=}Hnt_L%v^UYS|L}0V z8W*tZk?icF-F_R|c2-a3hQIbQgycN7tJ+wYJA0)U&r-WVsy(mzy-aUKWvAO0e64vt zqq6X5$akW z-NnUxJJkDlS8I9uPnAD5#5CyADht20-c+~NH zv+8LN@%e&Tn(kXqL<{Igube@d7~;oao4B*`Z?*3kxOVH8JqwP${9{wyz|r!!tW7NE6R;d>6c-(_V}^*!7n~Z{`?*H6pinjxx>FW1cAQH;!|ZBmel+61^um~OSV!T z)1GKu-ah;DklEnQKXD6i3#7k4fl1f5YF$p5{Toa}U&A^7U;n0Z-hT8fyym&fdje2r zsxJKl2WaumZcAgB(%8t~<2;lV5KX+&aXe*)3MGGxEA8U^epMfLjNQ0w}xHerFbY~ z;i187L{E$9(Jg*^_n2>uG@%5oxd)5F8EQ~ZJJ1gujpe#ojsVYC3mlD}TQg4|&2JC- z$^M|@ce+iF1n**%16m@TG)W#ZN^XbS&oPIHm#lC6WjA4#L@WnsYl%T0X-uFpfYZc8;7x-5>#zmG7EZK z&;bHXF35o4e~Iyoeo!;-n;~xHmT$%^+uXKOPTn#l0~BL8=Z%K*Xa=|umGSVfoZOx> ze*W22S?dJrTP1O=BLuaxC7*%D_Lu9wOpR>0y~6B8!=V?AItSB2Tyt=*TS8WQdD>lG zwWZ<6p<`cH9qxH|?CJ1tPL@Y+F|g*+EdTsiyZKw=mM3?;R)k(DwQVi#UGTMK^!E$D zHGOj8h?mKZ$6|iO>cl4Q_1k}hRHZxj^$-4Fb2CA_tiHc@=kkax7N6=2%UoQ-C;nVK z?|$L*o$~FE4kmnj{3)Yrs`bw;YrnsK=iJ%dCGGb#>l>3RB>oc@{Tn$Qx##AUI(fBQ z<{2G+Y3HGwzkYp!i8IJ=Yr%>&!K5}OV96|#?WYnZYE!e~o#%%+i?gReWv8=zL?IpoaCtSS5n&-2*DOzPNHc~IWRa1T6@pL>I4M{ds=(P*(f3d@8HGQa-AB#n3)00PXYhTA zP^8i5y+$Y!a@of~<_pXZz1`VokiJA+0-}^^rV~kv!l3d(f4o&2)$0Ny1ga;rdPq>! zkw_v+7sD(L=yI`W`_;^ITg62-9aUOezAX}-eZ_5=DZfCs7SOl6nyOyagVYv)VsST# zJfZ{QLR8Hcw30&rNf0&@0z7q8^U>k=%1?zY^;o7YK9NrtzaY3^QBxG%d$zZcxJK(2 zEv@0)MKm}cMj07rhppat@JywVDYJQ+=IE94VaVW@Eb7{xb7Rr^MD_*nA`Bmk?t*J) z!!=6yV0$^KVX*zMW-Pk<`ZZek*^|EUl<-}Y@XWD%Q0VK?ywtglMD~XXUo9fL)y8vz zuLawpcn!AA$qw4i*V0@EGs|^E?kTtq>uc2KntAr1$8WF6GOcmvXt9df@j>YaWp+iu zfV3fa!h_TgT9I52WUz{s(HKO@Y$NieL?VN1t=^}8IxK#!1vWO#T3IvI=nU}IPR&wx z=xZrPO~V%Q6Tk0nuSM7Q-EHp0E({k+waTnzY!nuppZ8=mr#EDLgw>Wu-x+W12Pq)9{o%|CDJf&(!38mroBMFHT`d*?(n$k~K8@zb zSJ&#kUccr<<~NSVj%%hSbl0>77EtEsyZPVBkNm-=>9_IOqnWIwOV50|Hk{`e3tY7EojL|; zXsw2s)IeDUm%7|rsNrlodhyA|Kev^Cc)an=(B+>;Jf%9WjIwP5EJ-N(O6FKZ;34^!s6eF7_Ge>EnDiLl;~ab}iTpZpSSo7cY48UIj? zosO0fvgLttwtUhDPYwM1v+C#D6a2@-H{0-EsW$$Z#{8Y6!R&(#z>~}BeS`8V`jelt z%pUQ7CS8W{g^EQBGB(0p=K|vNf-9RevmS5E+^d=O=*dLIl}*nxXWhNDVEwVF>kj~I z!9O0p<9dFEcdzH=Zff4$idMgUD{M*C+c#Nnh~{ORD`J0(UK+CM-tUjEJX^{giCDej zx2NM)%dHOL43>SH@O7!*@*n43KOcHKsbAkw!dD5n4D)jjqvNT+(pI4!8tT8M(S?I$GCx&^k@LqIazU4M# z2`E|4`@{lLmO$5%OO5w}O%=uurCArb`V|cli>l+2SQUlwGB0|?EuzfXK>rp#YvvJ% za&nTOmE1$J^kR4Sq3yRO84C_Et9CAv{fbo^*V*+Ef8R1;k-oL7aUFA*;Bv*6qFWO~8#l#Sb; zJl~d&56ajywQ29$s~xX{EVuHt6GseQoP^&CBYLSew%T?92Gy0eb8qstEEWB7;Mr*K z;v>se{}bQ8`p@GkBW0pM8#&`0OTe7;Cswd$O~}?8zdcEsyWTJe8eipt@$AmGhTBY* z;bWe7-V%aaGyc~$LofC%yv)r65PnWY@IHLA;Vo^pVnu+XF{o6;&SA5-#DZ!QL18jD z_StF_$@Amb1G}^QLTnj0_yk)ijg}lQ6K%pY`rZrUWTZ1U;QMgTl6}A_<$#mQ-;!Ns z$-d{L074*qv>X%=*h8{tVMS1s5Iyw4gan_v3FQoY*bHi1_Do1TVd@KiPtHFvNOXha zg3r4Xg$$NZ{bu-PV+-PS^^h%Blz-p<79G(gjAVIDMo@rA*h)h80Ng=AtNv|28M>{R z!1t{ULz#syApA2&0f(>S69!@3iy*h)&n7+%y*WChnVz_|_VyfO^>{|=P2=LB7c`CRQJgCTR~a84eY>ybZO)0xeU(v%pI7e7nas(mJbdFE@Isjp?)02mcd)=E z2ekH+y>FO#-ynP6d1vO`s(KYAeic&zdxklVdqT{DV#wro(0g{gx!9}NFjOmKd(U&T zC5y>RLcD(2Vn)ZAt;VV|;Gm-W>yNGJ0Ln z19jgki>FTYeeHV|1D@j*N(fSNT_#!{_PkqnEO0c9YqosXb*Lv7po{_JnXP{XP(~$U5)Vjpb2(&yiK|8RDT~Oj;iSXT{D;JZ3$Ge?q zih?!^7gjDj<)6ImUjCdD1Fw74VQoTbQK8_%bO7qoc` zzPIhNs(k@Y3mu!6+D@kJK0zzaE%3A8+7$R%@~QiK{i3;~f({ElwV=UI#HTBIuy6=H zSPPY0)9V9E%mkdV>ROsS-lLDBehGTU?Q=e7pQU@eBv!xV!tp8Cn`LTo(msHyalZU?`H`st`*Uk1-}1k%TRLo&8xJ!Lz;R` z$Wyqc*R6voX^QU5&wieo&wiQrSHhFe$aK}Ns+8m#*&L` zj=W9JbtI9Nkw=aT%`sGoORx$_p^3C561z|~IoH@*UntW(vxQCLAUrDa%pA!R3su5p zD?~gTqf`Rg>z3ha9Nc!(CrF^6-;oT4dDh?}22i!`cBrTv;fm2Dc0>^SA|$B;mq9XA#ZRx1 z&oa+b)XcIkstNyRb&W;=;DyXd^cXT{NSO^N>`mRCur^BFVG5Sl8jW+yi8uy7bxUk4 zF2GE2aX>K3BK&&Othae1o=3iXD<3%ep;SJAS=SrR9iZrbcKG`|&HIne)H_5H=x!=- zd;HVa&0n7iEZFLFWui5Ei_YQ|PmFl=j~-w`LJ2w#h@o~RKtqSRkU#?xmz=Mj8?};4 z>n))3Dot;fIqe~9QAYJSSSGK*J*HN0tkzA;7i)_#Vr?msu!5zUE6z|_cDE^ttgS#z zZ7{C~o6&ZbkJbHlF_qnx%Zc33#*`9r+M_A{vKcM44?UMd8)2`6^uvM1xzgET?1rV{ zl_hwXY!HoS7R`Zq#ku3E1Mst-4;v%#Ey!5UUjK4F=&k9wnsL|+(Q3)0cxE*;zAs#U zC+Hx*|402saW0pn;ag<1<#7Sz-MA>)hn~lwiVUa2;E+ISZb9+SG63@7k`nx+sC-;i z{;$Arp;|vIMAJj?=%$Z19T!1yK3aJX<@>?4VQ%42B;Q>ORr4oIH+BT`iwojnN*0!) zh5gZPhwS2Xii<%&qK49XsN>JkLu>IHO}n9FUywto46zI9MhoswlzLxF?WPwCih5}I z&rAKM<8;t1R7SfM7o>KlhkL6-Y-jaWuH)Fn1!R`Mev3i#7^Z!PZRHEE5jJQeZS@W#%#VYWz0UQNa5AxG!Rx|&B+mk6w_+D#%Z6e)zh zIwT|npIR3>4-B8fAS`5n<`tI1 zJ48TjJ!mDN6dvAyDsIqwST>@9Vi1BDiUP>=!&qZ`+Y*PPv!Nv5FFQggTc;m}a!WqQ zYUk4%j2`qEuuT1zTs-{MgvRq~-|2GGH&up3A;(817na{&`Eg>_Sd~L=Y2w<|zrGly zo;#}8a*m1|BP|@wtK7JQ?(=b{YQeTXuhd1OM{Jn0%FZGItxvZTzxjjYM@5*;d2c?Y z)`NW4vo5WZ&!Zf z&T)^ny?2b&Tetq(m8HFPNd@t4;%S9?f8c8OkXIRe^}Y~gF$kdeGvdd|%*Czra*L|90d^>o{=ID(hj9-o% zdpH@9@$TXt%d%(ddQKkht4WI4wdB~KIww2nX~AkRVP&>WC5*4iDeuUf%T$%5D%1y_ z6BY5Si4tOfLgWyGvn|z*tKo{Z_WBix?83$8{U0orauBsv51~Yd^d;0C!Sy~MViT83 z0M~mtJ7~7SJ?R=;1VN;`eKs33s#5U=$wR149!6y%T&XyW#7Lf7a){FMhyle;Xnds( zTd{wDJ|)o1aQN!xL$#5pt9BXXJQUyPhZ31PYjkYvC+$9o=i`e5iY_5uOuP>u5CZ$)Gp*2$yr?mRHbM zSuUFFbz}1DtqvN1ooc2jnW*z*)_mv85rasPw)>*zRXd#%w)oMxmh}810yl9`t9M4L zHzg>veN{ndNH3e+VAa6BDa?Rc94(!j6g;AI6=8S?&J2~I(&?Nf96#r6n;S#ueq2lD z$;!+5#V42*_4)aVih4A@v35E0 zl}p2Ci>zQVnaY|(1PLr?RS_;W3?s585jK_}9DIX;64kGctB6bI49BL2&yF<+TN+OE znj6a?X$1ZptIjKDpI?Bjb#dltG~KWt(Yzkuxrk*=p6IKY#R;*+2g1C<=Sr|#X66Ks z$W~e4{e6b0l4L9X!pqXf`TLM6jV!YCTdkNW&RI+!(seapYpLI7e7AaF z%Cj!3aPuqI;GQax@=oqhCg$$9^V~we3_j|9T~M}q&h6js#&yuD>=P?$o9EOF^D3c^64O7 z5)1{}Wd)1wU!uHb1nvC(NYyJ+bO>u~$!dgzDR)g!(GjL$PS9KgjN6RD5&TK~LZ&cp zxxd5W?`HJ-sTS+8uXk+FJvH0#x5vWZ2gIs6vT3(0$2q35_YdTC*QL5jiBN>eW+Ub9 z@>)cJ%Q6XRQY*$~C8Ua<$wm9lGtN|ZOp&iD7x$=-w49zVzw*`a28Z#2!`mJlF76`* zT2}GOQ z#+=X6)MCQ@3CpiBiz*KNS@Ctq*s|YrrB5p`6P9?Xr$#Hqk5nNa!BrNnN4`^g4j$HM zf_{i9d}O9(&V9l>zu1QGgrHOs?w|kI&8e1aUZmE&E{L>Bp ze^hyREoFbOJgdWGImc0B2z!Y-9#|hQrd1QeM7=6{SW8w{l;&+NV%ZRPuXJI)ILnvW zSDEv&)3`}GjBJZg|6KX*(8XadLb}%XOHPiKoVPHhxtKgy8e()g*!n`{@*}?0R#oPF znoH%a@qw>D#l@enP4?ya+Sf1J$nQR%YB;{CD8?%U|C8pABL2OILnq1`0z;ihm5b5# zTf$m|AN$0z-B|PdZ7(9wEk59r_riXO6kIljWthgGj62p#W`1kJ|>_Pc?5Mk__^~8PU(<_W9F2oaF`q zsRz<_cC}qK^>19=P51jFiGDs}f?;C6<3PZ|!Dm-4KO0$FP;BozG3#+^E^` z90-BV;`5znPBsLP7hrF+FSn8~j2Rn>R|`Xh-=CC&zuTHmg4SVZyZzV3P zNbaT;bs{78uFIZd&z-x!;0;yZuyi+e&M&^PWr2&^H4n=ZzQwN1`xmbZaB)hnzoB>P zjKHTZYw5+F1xDZAICOu=ZOtOfZ*MMlamuxH7#-rN_dTD@Y008CS>){wtw-C`ShrCV4CHDpy_PeCBX?!QFkp9c3|rvCrt#s2~pxdCAS diff --git a/api/tests/integration_tests/model_runtime/azure_ai_studio/__init__.py b/api/tests/integration_tests/model_runtime/azure_ai_studio/__init__.py deleted file mode 100644 index e69de29bb2d1d6..00000000000000 diff --git a/api/tests/integration_tests/model_runtime/azure_ai_studio/test_llm.py b/api/tests/integration_tests/model_runtime/azure_ai_studio/test_llm.py deleted file mode 100644 index 8655b43d8f0b3d..00000000000000 --- a/api/tests/integration_tests/model_runtime/azure_ai_studio/test_llm.py +++ /dev/null @@ -1,113 +0,0 @@ -import os -from collections.abc import Generator - -import pytest - -from core.model_runtime.entities.llm_entities import LLMResult, LLMResultChunk, LLMResultChunkDelta -from core.model_runtime.entities.message_entities import ( - AssistantPromptMessage, - ImagePromptMessageContent, - PromptMessageTool, - SystemPromptMessage, - TextPromptMessageContent, - UserPromptMessage, -) -from core.model_runtime.errors.validate import CredentialsValidateFailedError -from core.model_runtime.model_providers.azure_ai_studio.llm.llm import AzureAIStudioLargeLanguageModel -from tests.integration_tests.model_runtime.__mock.azure_ai_studio import setup_azure_ai_studio_mock - - -@pytest.mark.parametrize("setup_azure_ai_studio_mock", [["chat"]], indirect=True) -def test_validate_credentials(setup_azure_ai_studio_mock): - model = AzureAIStudioLargeLanguageModel() - - with pytest.raises(CredentialsValidateFailedError): - model.validate_credentials( - model="gpt-35-turbo", - credentials={"api_key": "invalid_key", "api_base": os.getenv("AZURE_AI_STUDIO_API_BASE")}, - ) - - model.validate_credentials( - model="gpt-35-turbo", - credentials={ - "api_key": os.getenv("AZURE_AI_STUDIO_API_KEY"), - "api_base": os.getenv("AZURE_AI_STUDIO_API_BASE"), - }, - ) - - -@pytest.mark.parametrize("setup_azure_ai_studio_mock", [["chat"]], indirect=True) -def test_invoke_model(setup_azure_ai_studio_mock): - model = AzureAIStudioLargeLanguageModel() - - result = model.invoke( - model="gpt-35-turbo", - credentials={ - "api_key": os.getenv("AZURE_AI_STUDIO_API_KEY"), - "api_base": os.getenv("AZURE_AI_STUDIO_API_BASE"), - }, - prompt_messages=[ - SystemPromptMessage( - content="You are a helpful AI assistant.", - ), - UserPromptMessage(content="Hello World!"), - ], - model_parameters={"temperature": 0.0, "max_tokens": 100}, - stream=False, - user="abc-123", - ) - - assert isinstance(result, LLMResult) - assert len(result.message.content) > 0 - - -@pytest.mark.parametrize("setup_azure_ai_studio_mock", [["chat"]], indirect=True) -def test_invoke_stream_model(setup_azure_ai_studio_mock): - model = AzureAIStudioLargeLanguageModel() - - result = model.invoke( - model="gpt-35-turbo", - credentials={ - "api_key": os.getenv("AZURE_AI_STUDIO_API_KEY"), - "api_base": os.getenv("AZURE_AI_STUDIO_API_BASE"), - }, - prompt_messages=[ - SystemPromptMessage( - content="You are a helpful AI assistant.", - ), - UserPromptMessage(content="Hello World!"), - ], - model_parameters={"temperature": 0.0, "max_tokens": 100}, - stream=True, - user="abc-123", - ) - - assert isinstance(result, Generator) - - for chunk in result: - assert isinstance(chunk, LLMResultChunk) - assert isinstance(chunk.delta, LLMResultChunkDelta) - assert isinstance(chunk.delta.message, AssistantPromptMessage) - if chunk.delta.finish_reason is not None: - assert chunk.delta.usage is not None - assert chunk.delta.usage.completion_tokens > 0 - - -def test_get_num_tokens(): - model = AzureAIStudioLargeLanguageModel() - - num_tokens = model.get_num_tokens( - model="gpt-35-turbo", - credentials={ - "api_key": os.getenv("AZURE_AI_STUDIO_API_KEY"), - "api_base": os.getenv("AZURE_AI_STUDIO_API_BASE"), - }, - prompt_messages=[ - SystemPromptMessage( - content="You are a helpful AI assistant.", - ), - UserPromptMessage(content="Hello World!"), - ], - ) - - assert num_tokens == 21 diff --git a/api/tests/integration_tests/model_runtime/azure_ai_studio/test_provider.py b/api/tests/integration_tests/model_runtime/azure_ai_studio/test_provider.py deleted file mode 100644 index 8afe38b09b9f02..00000000000000 --- a/api/tests/integration_tests/model_runtime/azure_ai_studio/test_provider.py +++ /dev/null @@ -1,17 +0,0 @@ -import os - -import pytest - -from core.model_runtime.errors.validate import CredentialsValidateFailedError -from core.model_runtime.model_providers.azure_ai_studio.azure_ai_studio import AzureAIStudioProvider - - -def test_validate_provider_credentials(): - provider = AzureAIStudioProvider() - - with pytest.raises(CredentialsValidateFailedError): - provider.validate_provider_credentials(credentials={}) - - provider.validate_provider_credentials( - credentials={"api_key": os.getenv("AZURE_AI_STUDIO_API_KEY"), "api_base": os.getenv("AZURE_AI_STUDIO_API_BASE")} - ) diff --git a/api/tests/integration_tests/model_runtime/azure_ai_studio/test_rerank.py b/api/tests/integration_tests/model_runtime/azure_ai_studio/test_rerank.py deleted file mode 100644 index 466facc5fffcf6..00000000000000 --- a/api/tests/integration_tests/model_runtime/azure_ai_studio/test_rerank.py +++ /dev/null @@ -1,50 +0,0 @@ -import os - -import pytest - -from core.model_runtime.entities.rerank_entities import RerankResult -from core.model_runtime.errors.validate import CredentialsValidateFailedError -from core.model_runtime.model_providers.azure_ai_studio.rerank.rerank import AzureAIStudioRerankModel - - -def test_validate_credentials(): - model = AzureAIStudioRerankModel() - - with pytest.raises(CredentialsValidateFailedError): - model.validate_credentials( - model="azure-ai-studio-rerank-v1", - credentials={"api_key": "invalid_key", "api_base": os.getenv("AZURE_AI_STUDIO_API_BASE")}, - query="What is the capital of the United States?", - docs=[ - "Carson City is the capital city of the American state of Nevada. At the 2010 United States " - "Census, Carson City had a population of 55,274.", - "The Commonwealth of the Northern Mariana Islands is a group of islands in the Pacific Ocean that " - "are a political division controlled by the United States. Its capital is Saipan.", - ], - score_threshold=0.8, - ) - - -def test_invoke_model(): - model = AzureAIStudioRerankModel() - - result = model.invoke( - model="azure-ai-studio-rerank-v1", - credentials={ - "api_key": os.getenv("AZURE_AI_STUDIO_JWT_TOKEN"), - "api_base": os.getenv("AZURE_AI_STUDIO_API_BASE"), - }, - query="What is the capital of the United States?", - docs=[ - "Carson City is the capital city of the American state of Nevada. At the 2010 United States " - "Census, Carson City had a population of 55,274.", - "The Commonwealth of the Northern Mariana Islands is a group of islands in the Pacific Ocean that " - "are a political division controlled by the United States. Its capital is Saipan.", - ], - score_threshold=0.8, - ) - - assert isinstance(result, RerankResult) - assert len(result.docs) == 1 - assert result.docs[0].index == 1 - assert result.docs[0].score >= 0.8 diff --git a/api/tests/integration_tests/model_runtime/azure_openai/__init__.py b/api/tests/integration_tests/model_runtime/azure_openai/__init__.py deleted file mode 100644 index e69de29bb2d1d6..00000000000000 diff --git a/api/tests/integration_tests/model_runtime/azure_openai/test_llm.py b/api/tests/integration_tests/model_runtime/azure_openai/test_llm.py deleted file mode 100644 index 8f50ebf7a6d03f..00000000000000 --- a/api/tests/integration_tests/model_runtime/azure_openai/test_llm.py +++ /dev/null @@ -1,290 +0,0 @@ -import os -from collections.abc import Generator - -import pytest - -from core.model_runtime.entities.llm_entities import LLMResult, LLMResultChunk, LLMResultChunkDelta -from core.model_runtime.entities.message_entities import ( - AssistantPromptMessage, - ImagePromptMessageContent, - PromptMessageTool, - SystemPromptMessage, - TextPromptMessageContent, - UserPromptMessage, -) -from core.model_runtime.errors.validate import CredentialsValidateFailedError -from core.model_runtime.model_providers.azure_openai.llm.llm import AzureOpenAILargeLanguageModel -from tests.integration_tests.model_runtime.__mock.openai import setup_openai_mock - - -@pytest.mark.parametrize("setup_openai_mock", [["chat"]], indirect=True) -def test_validate_credentials_for_chat_model(setup_openai_mock): - model = AzureOpenAILargeLanguageModel() - - with pytest.raises(CredentialsValidateFailedError): - model.validate_credentials( - model="gpt35", - credentials={ - "openai_api_base": os.environ.get("AZURE_OPENAI_API_BASE"), - "openai_api_key": "invalid_key", - "base_model_name": "gpt-35-turbo", - }, - ) - - model.validate_credentials( - model="gpt35", - credentials={ - "openai_api_base": os.environ.get("AZURE_OPENAI_API_BASE"), - "openai_api_key": os.environ.get("AZURE_OPENAI_API_KEY"), - "base_model_name": "gpt-35-turbo", - }, - ) - - -@pytest.mark.parametrize("setup_openai_mock", [["completion"]], indirect=True) -def test_validate_credentials_for_completion_model(setup_openai_mock): - model = AzureOpenAILargeLanguageModel() - - with pytest.raises(CredentialsValidateFailedError): - model.validate_credentials( - model="gpt-35-turbo-instruct", - credentials={ - "openai_api_base": os.environ.get("AZURE_OPENAI_API_BASE"), - "openai_api_key": "invalid_key", - "base_model_name": "gpt-35-turbo-instruct", - }, - ) - - model.validate_credentials( - model="gpt-35-turbo-instruct", - credentials={ - "openai_api_base": os.environ.get("AZURE_OPENAI_API_BASE"), - "openai_api_key": os.environ.get("AZURE_OPENAI_API_KEY"), - "base_model_name": "gpt-35-turbo-instruct", - }, - ) - - -@pytest.mark.parametrize("setup_openai_mock", [["completion"]], indirect=True) -def test_invoke_completion_model(setup_openai_mock): - model = AzureOpenAILargeLanguageModel() - - result = model.invoke( - model="gpt-35-turbo-instruct", - credentials={ - "openai_api_base": os.environ.get("AZURE_OPENAI_API_BASE"), - "openai_api_key": os.environ.get("AZURE_OPENAI_API_KEY"), - "base_model_name": "gpt-35-turbo-instruct", - }, - prompt_messages=[UserPromptMessage(content="Hello World!")], - model_parameters={"temperature": 0.0, "max_tokens": 1}, - stream=False, - user="abc-123", - ) - - assert isinstance(result, LLMResult) - assert len(result.message.content) > 0 - - -@pytest.mark.parametrize("setup_openai_mock", [["completion"]], indirect=True) -def test_invoke_stream_completion_model(setup_openai_mock): - model = AzureOpenAILargeLanguageModel() - - result = model.invoke( - model="gpt-35-turbo-instruct", - credentials={ - "openai_api_base": os.environ.get("AZURE_OPENAI_API_BASE"), - "openai_api_key": os.environ.get("AZURE_OPENAI_API_KEY"), - "base_model_name": "gpt-35-turbo-instruct", - }, - prompt_messages=[UserPromptMessage(content="Hello World!")], - model_parameters={"temperature": 0.0, "max_tokens": 100}, - stream=True, - user="abc-123", - ) - - assert isinstance(result, Generator) - - for chunk in result: - assert isinstance(chunk, LLMResultChunk) - assert isinstance(chunk.delta, LLMResultChunkDelta) - assert isinstance(chunk.delta.message, AssistantPromptMessage) - assert len(chunk.delta.message.content) > 0 if chunk.delta.finish_reason is None else True - - -@pytest.mark.parametrize("setup_openai_mock", [["chat"]], indirect=True) -def test_invoke_chat_model(setup_openai_mock): - model = AzureOpenAILargeLanguageModel() - - result = model.invoke( - model="gpt35", - credentials={ - "openai_api_base": os.environ.get("AZURE_OPENAI_API_BASE"), - "openai_api_key": os.environ.get("AZURE_OPENAI_API_KEY"), - "base_model_name": "gpt-35-turbo", - }, - prompt_messages=[ - SystemPromptMessage( - content="You are a helpful AI assistant.", - ), - UserPromptMessage(content="Hello World!"), - ], - model_parameters={ - "temperature": 0.0, - "top_p": 1.0, - "presence_penalty": 0.0, - "frequency_penalty": 0.0, - "max_tokens": 10, - }, - stop=["How"], - stream=False, - user="abc-123", - ) - - assert isinstance(result, LLMResult) - assert len(result.message.content) > 0 - - -@pytest.mark.parametrize("setup_openai_mock", [["chat"]], indirect=True) -def test_invoke_stream_chat_model(setup_openai_mock): - model = AzureOpenAILargeLanguageModel() - - result = model.invoke( - model="gpt35", - credentials={ - "openai_api_base": os.environ.get("AZURE_OPENAI_API_BASE"), - "openai_api_key": os.environ.get("AZURE_OPENAI_API_KEY"), - "base_model_name": "gpt-35-turbo", - }, - prompt_messages=[ - SystemPromptMessage( - content="You are a helpful AI assistant.", - ), - UserPromptMessage(content="Hello World!"), - ], - model_parameters={"temperature": 0.0, "max_tokens": 100}, - stream=True, - user="abc-123", - ) - - assert isinstance(result, Generator) - - for chunk in result: - assert isinstance(chunk, LLMResultChunk) - assert isinstance(chunk.delta, LLMResultChunkDelta) - assert isinstance(chunk.delta.message, AssistantPromptMessage) - if chunk.delta.finish_reason is not None: - assert chunk.delta.usage is not None - assert chunk.delta.usage.completion_tokens > 0 - - -@pytest.mark.parametrize("setup_openai_mock", [["chat"]], indirect=True) -def test_invoke_chat_model_with_vision(setup_openai_mock): - model = AzureOpenAILargeLanguageModel() - - result = model.invoke( - model="gpt-4v", - credentials={ - "openai_api_base": os.environ.get("AZURE_OPENAI_API_BASE"), - "openai_api_key": os.environ.get("AZURE_OPENAI_API_KEY"), - "base_model_name": "gpt-4-vision-preview", - }, - prompt_messages=[ - SystemPromptMessage( - content="You are a helpful AI assistant.", - ), - UserPromptMessage( - content=[ - TextPromptMessageContent( - data="Hello World!", - ), - ImagePromptMessageContent( - data="data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAE4AAABMCAYAAADDYoEWAAAMQGlDQ1BJQ0MgUHJvZmlsZQAASImVVwdYU8kWnluSkEBoAQSkhN4EkRpASggt9I4gKiEJEEqMgaBiRxcVXLuIgA1dFVGwAmJBETuLYu+LBRVlXSzYlTcpoOu+8r35vrnz33/O/OfMmbllAFA7zhGJclF1APKEBeLYYH/6uOQUOukpIAEdoAy0gA2Hmy9iRkeHA1iG2r+Xd9cBIm2v2Eu1/tn/X4sGj5/PBQCJhjidl8/Ng/gAAHg1VyQuAIAo5c2mFoikGFagJYYBQrxIijPluFqK0+V4j8wmPpYFcTsASiocjjgTANVLkKcXcjOhhmo/xI5CnkAIgBodYp+8vMk8iNMgtoY2Ioil+oz0H3Qy/6aZPqzJ4WQOY/lcZEUpQJAvyuVM/z/T8b9LXq5kyIclrCpZ4pBY6Zxh3m7mTA6TYhWI+4TpkVEQa0L8QcCT2UOMUrIkIQlye9SAm8+COYMrDVBHHicgDGIDiIOEuZHhCj49QxDEhhjuEHSaoIAdD7EuxIv4+YFxCptN4smxCl9oY4aYxVTwZzlimV+pr/uSnASmQv91Fp+t0MdUi7LikyCmQGxeKEiMhFgVYof8nLgwhc3YoixW5JCNWBIrjd8c4li+MNhfro8VZoiDYhX2pXn5Q/PFNmUJ2JEKvK8gKz5Enh+sncuRxQ/ngl3iC5kJQzr8/HHhQ3Ph8QMC5XPHnvGFCXEKnQ+iAv9Y+VicIsqNVtjjpvzcYClvCrFLfmGcYiyeWAA3pFwfzxAVRMfL48SLsjmh0fJ48OUgHLBAAKADCazpYDLIBoLOvqY+eCfvCQIcIAaZgA/sFczQiCRZjxBe40AR+BMiPsgfHucv6+WDQsh/HWblV3uQIestlI3IAU8gzgNhIBfeS2SjhMPeEsFjyAj+4Z0DKxfGmwurtP/f80Psd4YJmXAFIxnySFcbsiQGEgOIIcQgog2uj/vgXng4vPrB6oQzcI+heXy3JzwhdBEeEq4Rugm3JgmKxT9FGQG6oX6QIhfpP+YCt4Sarrg/7g3VoTKug+sDe9wF+mHivtCzK2RZirilWaH/pP23GfywGgo7siMZJY8g+5Gtfx6paqvqOqwizfWP+ZHHmj6cb9Zwz8/+WT9knwfbsJ8tsUXYfuwMdgI7hx3BmgAda8WasQ7sqBQP767Hst015C1WFk8O1BH8w9/Qykozme9Y59jr+EXeV8CfJn1HA9Zk0XSxIDOrgM6EXwQ+nS3kOoyiOzk6OQMg/b7IX19vYmTfDUSn4zs3/w8AvFsHBwcPf+dCWwHY6w4f/0PfOWsG/HQoA3D2EFciLpRzuPRCgG8JNfik6QEjYAas4XycgBvwAn4gEISCKBAPksFEGH0W3OdiMBXMBPNACSgDy8EaUAk2gi1gB9gN9oEmcAScAKfBBXAJXAN34O7pAS9AP3gHPiMIQkKoCA3RQ4wRC8QOcUIYiA8SiIQjsUgykoZkIkJEgsxE5iNlyEqkEtmM1CJ7kUPICeQc0oXcQh4gvchr5BOKoSqoFmqIWqKjUQbKRMPQeHQCmolOQYvQBehStAKtQXehjegJ9AJ6De1GX6ADGMCUMR3MBLPHGBgLi8JSsAxMjM3GSrFyrAarx1rgOl/BurE+7CNOxGk4HbeHOzgET8C5+BR8Nr4Er8R34I14O34Ff4D3498IVIIBwY7gSWATxhEyCVMJJYRywjbCQcIp+Cz1EN4RiUQdohXRHT6LycRs4gziEuJ6YgPxOLGL+Ig4QCKR9Eh2JG9SFIlDKiCVkNaRdpFaSZdJPaQPSspKxkpOSkFKKUpCpWKlcqWdSseULis9VfpMVidbkD3JUWQeeTp5GXkruYV8kdxD/kzRoFhRvCnxlGzKPEoFpZ5yinKX8kZZWdlU2UM5RlmgPFe5QnmP8lnlB8ofVTRVbFVYKqkqEpWlKttVjqvcUnlDpVItqX7UFGoBdSm1lnqSep/6QZWm6qDKVuWpzlGtUm1Uvaz6Uo2sZqHGVJuoVqRWrrZf7aJanzpZ3VKdpc5Rn61epX5I/Yb6gAZNY4xGlEaexhKNnRrnNJ5pkjQtNQM1eZoLNLdontR8RMNoZjQWjUubT9tKO0Xr0SJqWWmxtbK1yrR2a3Vq9WtrartoJ2pP067SPqrdrYPpWOqwdXJ1luns07mu82mE4QjmCP6IxSPqR1we8V53pK6fLl+3VLdB95ruJz26XqBejt4KvSa9e/q4vq1+jP5U/Q36p/T7RmqN9BrJHVk6ct/I2waoga1BrMEMgy0GHQYDhkaGwYYiw3WGJw37jHSM/IyyjVYbHTPqNaYZ+xgLjFcbtxo/p2vTmfRcegW9nd5vYmASYiIx2WzSafLZ1Mo0wbTYtMH0nhnFjGGWYbbarM2s39zYPMJ8pnmd+W0LsgXDIstircUZi/eWVpZJlgstmyyfWelasa2KrOqs7lpTrX2tp1jXWF+1IdowbHJs1ttcskVtXW2zbKtsL9qhdm52Arv1dl2jCKM8RglH1Yy6Ya9iz7QvtK+zf+Cg4xDuUOzQ5PBytPnolNErRp8Z/c3R1THXcavjnTGaY0LHFI9pGfPaydaJ61TldNWZ6hzkPMe52fmVi50L32WDy01XmmuE60LXNtevbu5uYrd6t153c/c092r3GwwtRjRjCeOsB8HD32OOxxGPj55ungWe+zz/8rL3yvHa6fVsrNVY/titYx95m3pzvDd7d/vQfdJ8Nvl0+5r4cnxrfB/6mfnx/Lb5PWXaMLOZu5gv/R39xf4H/d+zPFmzWMcDsIDggNKAzkDNwITAysD7QaZBmUF1Qf3BrsEzgo+HEELCQlaE3GAbsrnsWnZ/qHvorND2MJWwuLDKsIfhtuHi8JYINCI0YlXE3UiLSGFkUxSIYketiroXbRU9JfpwDDEmOqYq5knsmNiZsWfiaHGT4nbGvYv3j18WfyfBOkGS0JaolpiaWJv4PikgaWVS97jR42aNu5CsnyxIbk4hpSSmbEsZGB84fs34nlTX1JLU6xOsJkybcG6i/sTciUcnqU3iTNqfRkhLStuZ9oUTxanhDKSz06vT+7ks7lruC54fbzWvl+/NX8l/muGdsTLjWaZ35qrM3izfrPKsPgFLUCl4lR2SvTH7fU5Uzvacwdyk3IY8pby0vENCTWGOsH2y0eRpk7tEdqISUfcUzylrpvSLw8Tb8pH8CfnNBVrwR75DYi35RfKg0KewqvDD1MSp+6dpTBNO65huO33x9KdFQUW/zcBncGe0zTSZOW/mg1nMWZtnI7PTZ7fNMZuzYE7P3OC5O+ZR5uXM+73YsXhl8dv5SfNbFhgumLvg0S/Bv9SVqJaIS24s9Fq4cRG+SLCoc7Hz4nWLv5XySs+XOZaVl31Zwl1y/tcxv1b8Org0Y2nnMrdlG5YTlwuXX1/hu2LHSo2VRSsfrYpY1biavrp09ds1k9acK3cp37iWslaytrsivKJ5nfm65eu+VGZVXqvyr2qoNqheXP1+PW/95Q1+G+o3Gm4s2/hpk2DTzc3BmxtrLGvKtxC3FG55sjVx65nfGL/VbtPfVrbt63bh9u4dsTvaa91ra3ca7FxWh9ZJ6np3pe66tDtgd3O9ff3mBp2Gsj1gj2TP871pe6/vC9vXtp+xv/6AxYHqg7SDpY1I4/TG/qaspu7m5OauQ6GH2lq8Wg4edji8/YjJkaqj2keXHaMcW3BssLWodeC46HjficwTj9omtd05Oe7k1faY9s5TYafOng46ffIM80zrWe+zR855njt0nnG+6YLbhcYO146Dv7v+frDTrbPxovvF5ksel1q6xnYdu+x7+cSVgCunr7KvXrgWea3resL1mzdSb3Tf5N18div31qvbhbc/35l7l3C39J76vfL7Bvdr/rD5o6Hbrfvog4AHHQ/jHt55xH304nH+4y89C55Qn5Q/NX5a+8zp2ZHeoN5Lz8c/73khevG5r+RPjT+rX1q/PPCX318d/eP6e16JXw2+XvJG7832ty5v2waiB+6/y3v3+X3pB70POz4yPp75lPTp6eepX0hfKr7afG35Fvbt7mDe4KCII+bIfgUwWNGMDABebweAmgwADZ7PKOPl5z9ZQeRnVhkC/wnLz4iy4gZAPfx/j+mDfzc3ANizFR6/oL5aKgDRVADiPQDq7Dxch85qsnOltBDhOWBT5Nf0vHTwb4r8zPlD3D+3QKrqAn5u/wWdZ3xtG7qP3QAAADhlWElmTU0AKgAAAAgAAYdpAAQAAAABAAAAGgAAAAAAAqACAAQAAAABAAAATqADAAQAAAABAAAATAAAAADhTXUdAAARnUlEQVR4Ae2c245bR3aGi4fulizFHgUzQAYIggBB5klymfeaZ8hDBYjvAiRxkMAGkowRWx7JktjcZL7vX1Uku62Burkl5YbV5q7Tqqq1/v3XqgMpL95tbvftEh6NwPLRLS4NgsAFuDOJcAHuAtyZCJzZ7MK4C3BnInBmswvjLsCdicCZzS6MOxO49Znt0uz3//CPbbv6srXFrq0W9Q6Wi0VbLPn4R8x/jSLiu3nrl8s9dcartlwtKdmTbm21XranN6v27Mm6XV8t25fP1+3Pn1+1r4if3Czbk+t9u1rR6f9jmAXc1P6sbaevQGbfdgGJeA8ke0AQsCYYgiYgPR1QyVO+3wvcMm2WO0G2PeWkX79btp839AG4//UjYC62gDsB2rI9f7pov3q2bX/9F1ftBWAufTufOcwCrnTtR90dOdHoNgCJeAbUkuM5TsWAW5W9gfkE83ZkUHg0oAyAwbm927a2ebVoP/xx2f7jD1uYuG9/89tF+/VXK1hq+88TZgG32O1g2r7tpRdBM8fUTM7pyR8SYddgxkJErUszHti7U44CpzyEo16syNtx+qgy+1og7RMetpev9+3rb3bt+c2u/ebFsv3uL1ftiqn+qcMs4HY7jNQpEfadNU5VqeHUTJkgUbaPDxRADdZ8jU9LHoJYnwLUtgWN4ObDC7Kdr8Hp7d9qMTW8gt23V1zyvPrD1H56e9t+99vr9uJLprBDfaIw69U4dQRCIw2JdVIjbUzecj+7qYyPpZHiAbDaJwsXyMhQEQ0pq6sAp7hMS2XGqykdA2iy4EUtF6v206ur9k/fbNo//+frtt2OaW/rjxtmAaeNGqihBY5xfVQzQEZfoSH0KHgkrbD/CX6vPIqlSTU61vVCovRSbEwbIS851vj23Q+tff3vu/bzu5I7tvs4qVnADTa5FCbNC86qCLN2E1MxKKroYB2pgSz2RLbbVcVkSJhOKxIDjGxn+nSuqes2JlKuG8fA/IzPXazbj68X7et/27UfX7GifORwOuSju47h/c3beKfRFO74CNA04YP0ZT2/YzERFGojc9pmDG47/wyDZwJjiX4wwJNer1dZPJbs5/xzK5Ppzp7SQZBszNy22U7tX7/dtFdvJrv8aGE2cDJLoPycBgHSgICJUQLo8nmUo6y7oH0S5Lu/FGhDQULCfIooATw3yyOQQ46eYVpYiaBMTFtAFPR307r9y3fbdvsRfd5Rg6HJI2Lt1qaAF6TEqoxWdVdYSHawezCvAHLjW7Jh2QGcUkDDT4Og2OfSFRVkxipcAJUZARC5FVRbeRpB1hVY6r25XQHexIZ96Hfa++PTs4Dbi8rQg7imWQG27/uEgCTCssk/WWg7GwJWwDQ36PceGzQ+x7jOtgNogkIIpsZiFMdXoEfOPUlh3l5ulu2/X6bJ7Mc84Bw+xgOKzJqM0VKm8WYlVMqt61gFKNtQKeZ6o7Ls/aqEeYooJXDIZ9uiT0uZ5UxPUJNlYdoAK62qHfM7unz3/bb9/Ha+v3u/tn3AD0XOrnxAZdpNYZILgoxyGk4BqMCbssq66dXv6RdFkiB6Rj2u3N1npiMw1dQjF4oJW/kzy6VdMRFA9Xd8VvhCLxCyYUYkvhHZb7+fotvdUR6XmwXcYI1DangAA6yspgBj/dRjp6L+RbmSPaaxuuMnGEeVAhBF4pSapAFG5gUo60rAHmpVtcz0sR2aBZW8NAB9+W7dXr9N0dmPmUcu10pWrq7kQQvBQXn1dUsgoM4ej12TtyBknG51PEMGOV2TLLVZ/GLvLMBYHsYJhg7fuMBx6tq3LFu7aBxxD9jKFiO7Thbwcv7n5dS+/ML0eWEWcBqoptk+mEQp2aTG+rbmBYA+D6MyMwMAdepKsX5QpnglFZyZ5k4tDYsI/Y1pF7CRq22HoHXgGEOwgodvgH79INnW3tlFIVVQvkBXg1dvF3z27fkTGzw+zALOPZluVoVkV4yLHoBB3VBJUNyo6uEWXAyIkruC2OQjbVeppxkm8+iti2mySsM1EPYGKBcEyul3LKTW1+pr+wLRstwP0J8a2K95Txf/+6q1ZzeUDEXt/oFhHnA4fJYCBtawYlWmlsrJBEHhP43bi9Rq1Z0ymlK3Z/QCRqA5YfaNLZJWEACn929eluXlUGO8CgMrHWYi441S2tsFebLRL5RWL0e0nL64SEEf2sjMR4ZZwA0Ddfziclz1eN8yDn1qAaHSq3G0FEQXjABDo51sJVNyGnA0QlAPL4LOApzMo0mY1sUFbQBj8xTzYhKrROYF5VGIftR1uW3+3uiWU8XnBw7l3HIYVG/P/djYgMZoyrTJrci0n2qPZVnNFV913viW6btGzsXBT6aW3VKmsauVTFOc2DxpP5YJYLBBeCUixE71IlGBR2EF+6OugHbP12Ddoj29HgIPj+cxDiPDFGINzB8sKhLh0Ui4gOgDI8deb8FiwYxlteWhLHWTlmOzhkxLAObPIkFqS8+bbG5BdgWiAmJTwXdqZ7oysktzdKC/BWMWiAJNpyP0ZPTMItRy7fTi2RB4eDwLuIkpCma1gob/Dsw7zcKAMf3txiCot8c42ZCDPu3WAqRMJAGEk4cACaLzSZsFRhAE9QoAtXcwTX92XDT0sxTQXJYHdDJin0KfVN8PmzNvnOYBx5XNlik4giumihb7tJ60ezgNhgXuXgRNttxunZYAj7uzbL3nUA67rm5KJWrJCyTfIVwBMh3bTkD8TqFYp6uv8RwrgJpAZmHHScqv0qWeKT48NujhAuELekyYBdz9gXJQ53DvDh3tU62xTtN8bQhzzE9OccAK8wA2ez2k3cNtN7wM/RZs9M5NkNZoee0H2rmhLr8miPV9roAZtN1RHV/gDb7EoUtXKeXjYXUBN0oeFs8CbrtlhZRGPZSSZNyI9gA+TBFkelFNWxgEgCtG3wDiFqEr5Jz6y/U1DAM4QLxi2l7DNhl3w/epNTUFWGbXC7HrMQMz7WUbf8AaDQ46DYXuxLoJX6CFRzvuiPyJzCzgZIoKyqgKAx1yAGPQUWfa+GoDsqwDJNnHLF9juSz0i5VrpvqSwmsQul5dtyfrfX1zL3i0WdHHSjaKVjf0T5k7ABtxlEHbwxusgjydAY8N84BjvAx5GLfMqBW0VJEZ+pwKskQnbpnFHPzpwWo/bzkGvX51296+bu1v/+qL9usXT9rTJ07Bzh9k9HEPsxNhwhh6xLXKo3fXWf3iMkrBBz9nAbflbHm6ONxhXp8/NW26lkSleIEV9FBVI+o6ihjmffPDt+3v/+5Z+82vnsZw/fyercweB2d7wzA8mfuPEknpXTnHvQsoPd1v/aD8LODw+AxbAw/QjnEfv69u5kz6dtOiW2R6YmW7vd0C3qK94wcjf/zxZ1bRXfvqGT6U3f2G/Z6AesqotgJX477PNVmTmxfiwTSS5irqz2ybEHD6PzbMAk7lS/0BxgkTqPAUYBiAkQpTLLdKxe1D4Lbsp968uW1vXk+ZrnpsN7yL1TbmbvCl4GcPPPStZWyNcM9s++9y92ruZu2CT21q7lZ9KDcLuC3WbmGG42uA30EISOVkFynt1BBialOliF/wZHqGTa1tOfq8fbMHPL6N2iBPW2d7HfxZdWnreiN49UL0dfhLR6tBSVVwNo+TQ1U5IsHvQU4Dcry7bGNOix+SngVcwAhYpZjTQxaNMABLLLtUFEAMEwi4kk63fGDbLTcVm82ubd7hNylzEXCa6SPdz2Vf5iUobe0jAFIq8+JHT8CjGeUjHFOj5E7MIO4THxvOaHIcwu2IOKiznyg89BTEXi6WssO8B36vkLa33Pv7/QRbEtm21c/BtIm9Yb4ho19PDg4g09aeucySdpzq3BfVx6WQqh7MkLOSkHLf2olEKni4n7xznh0VH4jnAYdy6hfVSZTvUmF54f2cU9d9XmlhvUyTlbkxIT0BWtgH4wRRgPMy7EFbAwi8ojzbNyqtH/7coWxnUHyE+rmYjbs3NCnqdwIbbM/GZ4RZwDleVskO3viSBhWjSu2Pxj7JU4bsqrzTU5YZQ7xKu73Bb8bAbo+s28NStxEyb8e+K1UAKXhOVivK7x0RUANf3zEw/smJpsr37cad9RlhFnCbzQYwfN36I+5qwxgVwRA/vOHxlneeMiaux9lymN5tTTttkZN5mbZwCYsLM550taA+zJM5gsdHsGSdQTbngN7ZlC/JrRhXIcorRJvVcp2pnjzdy+0nnErOCbOAE5x8d4oVCy4xMSFGetjfgWJ3MQFHdomxZbUwwC4B84YlzBNojUEmxmqO1tVC4VcVopUzKuXK+XArUeDVTyq85wv7xKqHsel1dfIUkl8zUXcFm8eUH7IPjWcBp8J5mYxWcWmbclhlyEIAMJm2HbSwDCHZGD9IuR1UH4MhaZ4HOAIQIJOrIxfjxOFRUMNQq8wI9EH5WNVJdcEje22ofxs3K6PlQ+OZwA2ghrFSKhiEVSqh/5JJcfodKBnntLac7wb5CKLpAs+0RguYuAhoNh2CRV1dTVFhqWhRn/u+tOsMtTph6JhOkAWsQDz1K3NHeHyYBZyK70BG5oy3SyqGumoaAhr1Aiggnm8FzXr3cQWSq++p8seM10v6LW9Elgh5kyGINXMdi1xspw2LRHwqMjJTV2KdU9c2eQ1SkXDDHL2aYf2MprVp1dFrtcBlAWB/sNuxMoJIzEfRqhMk04qXfM0n8yVDaa/DRLp1GuGSKhNz65ZEOQUSdyD0Y/adRSojsxjoz2jnNFdN3l/S+sUvnqbDsx+zgCvQMJzhPaCrlouCLBvbA43x68DhsAc7DxpTr0y39VAMBCfpSlpSUMggzRe8X4bIAWRYJqVJj6t7feMV/9Bkfeb+bYw2Czg78S3GwWtEQEPRWFMMEDAZhVTiMaWLnZZRxSexfaStPR9DAXbMj5Qs479Dm8PqqYCNEpUTVAe/GpLC3vH16hI64zkLuB1XQVsdFkED8ps40oLjj2sMAdbFwGlKRjbW6UHAFZaRJVegIpeWVafZhQ4yHahUm+5VyfOwXYFHTX8DKUNSn+fCcsN3qOd8AT3GGPEs4EYnxho9YlOnU1WTUj98GbLKWCawI5wk71DiBMoh+qjYfgXUc+nNlW+rXuqjOrknPAs4sRoHcvvNguDZNEChYOoBUUZ175z9nMBZnQ6cnncgS7uDnt3BJ49Y8axqPYLZ0gVEb2DaICyHtOUM5t2eP7AJexWaGWYBVzcdsqneoAAViyzzo3ZsC1Jeq2qBKVhlkIxDsuSRrSY6/6S6eaaFjD+B4BGmMo9X9M06kcAdMq0qU5eT+lBBc8+GqaVmCc989iHP6yVvOcr4qE8ZLijVZ8VleC/5xWDWFmN6ow6aIKX75EfdL5rfKxBJgAcwwV/zeXrFjyqqo3uy52dnMa5oU4O7svo7YMNgWrFKdsk6WBXmmS82HuKsuADjHZFGi5iBIv+9qnn/qt+qSh3JTFNjPvWDiqpnA0SexYB/ijm6q5qP85wFnIZrXQHgillpVesHh9QVaAWWAJccfo/VNrOcbmrbYn/vCR9gy2m1aUH2WOa/rv4UoKnhPODowC2Gx6jQo4Nox4ZinDL392ssIHFSZWa1rTZJD/wSy0Kn34eDpwZvP1w96+dmH25zrsQs4KSLP4GAawWSjhnFZZQFmUZxOZSTj/ne2yUhIHCjRIlFKcIU0x852RjZTGGlDdaQrkxk7MPrJr/gzg17r4vgJ3rMAk4/wmQDE7wJhg+fFV1xaMGiMqnXaFc5jd4FjCCIRAEmAO5aPE7lzsw0ZelHYJB0PCWscErqOJcsrbllGmhmzE/7mAXcPof544Wlqg6wTuORtvKQzjV2gVC+shaNMhc24v8iIloGmS3ogc7bD9sS884Oi0kEP89jFnDX++/hCtPVtT7kwaxOkZpmxQ/L9vgdj1r+NCtAwQ6/A9DXMXnBqZgoHDdXP7Wna/Id6PRCum7DiREqcg1UPw9Yp6MsLv/HwlM4Hp7WQ1/CGQhcgDsDNJtcgLsAdyYCZza7MO4C3JkInNnswrgLcGcicGazC+POBO7/AH5zPa/ivytzAAAAAElFTkSuQmCC" - ), - ] - ), - ], - model_parameters={"temperature": 0.0, "max_tokens": 100}, - stream=False, - user="abc-123", - ) - - assert isinstance(result, LLMResult) - assert len(result.message.content) > 0 - - -@pytest.mark.parametrize("setup_openai_mock", [["chat"]], indirect=True) -def test_invoke_chat_model_with_tools(setup_openai_mock): - model = AzureOpenAILargeLanguageModel() - - result = model.invoke( - model="gpt-35-turbo", - credentials={ - "openai_api_base": os.environ.get("AZURE_OPENAI_API_BASE"), - "openai_api_key": os.environ.get("AZURE_OPENAI_API_KEY"), - "base_model_name": "gpt-35-turbo", - }, - prompt_messages=[ - SystemPromptMessage( - content="You are a helpful AI assistant.", - ), - UserPromptMessage( - content="what's the weather today in London?", - ), - ], - model_parameters={"temperature": 0.0, "max_tokens": 100}, - tools=[ - PromptMessageTool( - name="get_weather", - description="Determine weather in my location", - parameters={ - "type": "object", - "properties": { - "location": {"type": "string", "description": "The city and state e.g. San Francisco, CA"}, - "unit": {"type": "string", "enum": ["c", "f"]}, - }, - "required": ["location"], - }, - ), - PromptMessageTool( - name="get_stock_price", - description="Get the current stock price", - parameters={ - "type": "object", - "properties": {"symbol": {"type": "string", "description": "The stock symbol"}}, - "required": ["symbol"], - }, - ), - ], - stream=False, - user="abc-123", - ) - - assert isinstance(result, LLMResult) - assert isinstance(result.message, AssistantPromptMessage) - assert len(result.message.tool_calls) > 0 - - -def test_get_num_tokens(): - model = AzureOpenAILargeLanguageModel() - - num_tokens = model.get_num_tokens( - model="gpt-35-turbo-instruct", - credentials={"base_model_name": "gpt-35-turbo-instruct"}, - prompt_messages=[UserPromptMessage(content="Hello World!")], - ) - - assert num_tokens == 3 - - num_tokens = model.get_num_tokens( - model="gpt35", - credentials={"base_model_name": "gpt-35-turbo"}, - prompt_messages=[ - SystemPromptMessage( - content="You are a helpful AI assistant.", - ), - UserPromptMessage(content="Hello World!"), - ], - ) - - assert num_tokens == 21 diff --git a/api/tests/integration_tests/model_runtime/azure_openai/test_text_embedding.py b/api/tests/integration_tests/model_runtime/azure_openai/test_text_embedding.py deleted file mode 100644 index a1ae2b2e5b740c..00000000000000 --- a/api/tests/integration_tests/model_runtime/azure_openai/test_text_embedding.py +++ /dev/null @@ -1,62 +0,0 @@ -import os - -import pytest - -from core.model_runtime.entities.text_embedding_entities import TextEmbeddingResult -from core.model_runtime.errors.validate import CredentialsValidateFailedError -from core.model_runtime.model_providers.azure_openai.text_embedding.text_embedding import AzureOpenAITextEmbeddingModel -from tests.integration_tests.model_runtime.__mock.openai import setup_openai_mock - - -@pytest.mark.parametrize("setup_openai_mock", [["text_embedding"]], indirect=True) -def test_validate_credentials(setup_openai_mock): - model = AzureOpenAITextEmbeddingModel() - - with pytest.raises(CredentialsValidateFailedError): - model.validate_credentials( - model="embedding", - credentials={ - "openai_api_base": os.environ.get("AZURE_OPENAI_API_BASE"), - "openai_api_key": "invalid_key", - "base_model_name": "text-embedding-ada-002", - }, - ) - - model.validate_credentials( - model="embedding", - credentials={ - "openai_api_base": os.environ.get("AZURE_OPENAI_API_BASE"), - "openai_api_key": os.environ.get("AZURE_OPENAI_API_KEY"), - "base_model_name": "text-embedding-ada-002", - }, - ) - - -@pytest.mark.parametrize("setup_openai_mock", [["text_embedding"]], indirect=True) -def test_invoke_model(setup_openai_mock): - model = AzureOpenAITextEmbeddingModel() - - result = model.invoke( - model="embedding", - credentials={ - "openai_api_base": os.environ.get("AZURE_OPENAI_API_BASE"), - "openai_api_key": os.environ.get("AZURE_OPENAI_API_KEY"), - "base_model_name": "text-embedding-ada-002", - }, - texts=["hello", "world"], - user="abc-123", - ) - - assert isinstance(result, TextEmbeddingResult) - assert len(result.embeddings) == 2 - assert result.usage.total_tokens == 2 - - -def test_get_num_tokens(): - model = AzureOpenAITextEmbeddingModel() - - num_tokens = model.get_num_tokens( - model="embedding", credentials={"base_model_name": "text-embedding-ada-002"}, texts=["hello", "world"] - ) - - assert num_tokens == 2 diff --git a/api/tests/integration_tests/model_runtime/baichuan/__init__.py b/api/tests/integration_tests/model_runtime/baichuan/__init__.py deleted file mode 100644 index e69de29bb2d1d6..00000000000000 diff --git a/api/tests/integration_tests/model_runtime/baichuan/test_llm.py b/api/tests/integration_tests/model_runtime/baichuan/test_llm.py deleted file mode 100644 index fe7fe968911439..00000000000000 --- a/api/tests/integration_tests/model_runtime/baichuan/test_llm.py +++ /dev/null @@ -1,172 +0,0 @@ -import os -from collections.abc import Generator -from time import sleep - -import pytest - -from core.model_runtime.entities.llm_entities import LLMResult, LLMResultChunk, LLMResultChunkDelta -from core.model_runtime.entities.message_entities import AssistantPromptMessage, SystemPromptMessage, UserPromptMessage -from core.model_runtime.entities.model_entities import AIModelEntity -from core.model_runtime.errors.validate import CredentialsValidateFailedError -from core.model_runtime.model_providers.baichuan.llm.llm import BaichuanLanguageModel - - -def test_predefined_models(): - model = BaichuanLanguageModel() - model_schemas = model.predefined_models() - assert len(model_schemas) >= 1 - assert isinstance(model_schemas[0], AIModelEntity) - - -def test_validate_credentials_for_chat_model(): - sleep(3) - model = BaichuanLanguageModel() - - with pytest.raises(CredentialsValidateFailedError): - model.validate_credentials( - model="baichuan2-turbo", credentials={"api_key": "invalid_key", "secret_key": "invalid_key"} - ) - - model.validate_credentials( - model="baichuan2-turbo", - credentials={ - "api_key": os.environ.get("BAICHUAN_API_KEY"), - "secret_key": os.environ.get("BAICHUAN_SECRET_KEY"), - }, - ) - - -def test_invoke_model(): - sleep(3) - model = BaichuanLanguageModel() - - response = model.invoke( - model="baichuan2-turbo", - credentials={ - "api_key": os.environ.get("BAICHUAN_API_KEY"), - "secret_key": os.environ.get("BAICHUAN_SECRET_KEY"), - }, - prompt_messages=[UserPromptMessage(content="Hello World!")], - model_parameters={ - "temperature": 0.7, - "top_p": 1.0, - "top_k": 1, - }, - stop=["you"], - user="abc-123", - stream=False, - ) - - assert isinstance(response, LLMResult) - assert len(response.message.content) > 0 - assert response.usage.total_tokens > 0 - - -def test_invoke_model_with_system_message(): - sleep(3) - model = BaichuanLanguageModel() - - response = model.invoke( - model="baichuan2-turbo", - credentials={ - "api_key": os.environ.get("BAICHUAN_API_KEY"), - "secret_key": os.environ.get("BAICHUAN_SECRET_KEY"), - }, - prompt_messages=[ - SystemPromptMessage(content="请记住你是Kasumi。"), - UserPromptMessage(content="现在告诉我你是谁?"), - ], - model_parameters={ - "temperature": 0.7, - "top_p": 1.0, - "top_k": 1, - }, - stop=["you"], - user="abc-123", - stream=False, - ) - - assert isinstance(response, LLMResult) - assert len(response.message.content) > 0 - assert response.usage.total_tokens > 0 - - -def test_invoke_stream_model(): - sleep(3) - model = BaichuanLanguageModel() - - response = model.invoke( - model="baichuan2-turbo", - credentials={ - "api_key": os.environ.get("BAICHUAN_API_KEY"), - "secret_key": os.environ.get("BAICHUAN_SECRET_KEY"), - }, - prompt_messages=[UserPromptMessage(content="Hello World!")], - model_parameters={ - "temperature": 0.7, - "top_p": 1.0, - "top_k": 1, - }, - stop=["you"], - stream=True, - user="abc-123", - ) - - assert isinstance(response, Generator) - for chunk in response: - assert isinstance(chunk, LLMResultChunk) - assert isinstance(chunk.delta, LLMResultChunkDelta) - assert isinstance(chunk.delta.message, AssistantPromptMessage) - assert len(chunk.delta.message.content) > 0 if chunk.delta.finish_reason is None else True - - -def test_invoke_with_search(): - sleep(3) - model = BaichuanLanguageModel() - - response = model.invoke( - model="baichuan2-turbo", - credentials={ - "api_key": os.environ.get("BAICHUAN_API_KEY"), - "secret_key": os.environ.get("BAICHUAN_SECRET_KEY"), - }, - prompt_messages=[UserPromptMessage(content="北京今天的天气怎么样")], - model_parameters={ - "temperature": 0.7, - "top_p": 1.0, - "top_k": 1, - "with_search_enhance": True, - }, - stop=["you"], - stream=True, - user="abc-123", - ) - - assert isinstance(response, Generator) - total_message = "" - for chunk in response: - assert isinstance(chunk, LLMResultChunk) - assert isinstance(chunk.delta, LLMResultChunkDelta) - assert isinstance(chunk.delta.message, AssistantPromptMessage) - assert len(chunk.delta.message.content) > 0 if not chunk.delta.finish_reason else True - total_message += chunk.delta.message.content - - assert "不" not in total_message - - -def test_get_num_tokens(): - sleep(3) - model = BaichuanLanguageModel() - - response = model.get_num_tokens( - model="baichuan2-turbo", - credentials={ - "api_key": os.environ.get("BAICHUAN_API_KEY"), - "secret_key": os.environ.get("BAICHUAN_SECRET_KEY"), - }, - prompt_messages=[UserPromptMessage(content="Hello World!")], - tools=[], - ) - - assert isinstance(response, int) - assert response == 9 diff --git a/api/tests/integration_tests/model_runtime/baichuan/test_provider.py b/api/tests/integration_tests/model_runtime/baichuan/test_provider.py deleted file mode 100644 index 4036edfb7a7062..00000000000000 --- a/api/tests/integration_tests/model_runtime/baichuan/test_provider.py +++ /dev/null @@ -1,15 +0,0 @@ -import os - -import pytest - -from core.model_runtime.errors.validate import CredentialsValidateFailedError -from core.model_runtime.model_providers.baichuan.baichuan import BaichuanProvider - - -def test_validate_provider_credentials(): - provider = BaichuanProvider() - - with pytest.raises(CredentialsValidateFailedError): - provider.validate_provider_credentials(credentials={"api_key": "hahahaha"}) - - provider.validate_provider_credentials(credentials={"api_key": os.environ.get("BAICHUAN_API_KEY")}) diff --git a/api/tests/integration_tests/model_runtime/baichuan/test_text_embedding.py b/api/tests/integration_tests/model_runtime/baichuan/test_text_embedding.py deleted file mode 100644 index cbc63f3978fb99..00000000000000 --- a/api/tests/integration_tests/model_runtime/baichuan/test_text_embedding.py +++ /dev/null @@ -1,87 +0,0 @@ -import os - -import pytest - -from core.model_runtime.entities.text_embedding_entities import TextEmbeddingResult -from core.model_runtime.errors.validate import CredentialsValidateFailedError -from core.model_runtime.model_providers.baichuan.text_embedding.text_embedding import BaichuanTextEmbeddingModel - - -def test_validate_credentials(): - model = BaichuanTextEmbeddingModel() - - with pytest.raises(CredentialsValidateFailedError): - model.validate_credentials(model="baichuan-text-embedding", credentials={"api_key": "invalid_key"}) - - model.validate_credentials( - model="baichuan-text-embedding", credentials={"api_key": os.environ.get("BAICHUAN_API_KEY")} - ) - - -def test_invoke_model(): - model = BaichuanTextEmbeddingModel() - - result = model.invoke( - model="baichuan-text-embedding", - credentials={ - "api_key": os.environ.get("BAICHUAN_API_KEY"), - }, - texts=["hello", "world"], - user="abc-123", - ) - - assert isinstance(result, TextEmbeddingResult) - assert len(result.embeddings) == 2 - assert result.usage.total_tokens == 6 - - -def test_get_num_tokens(): - model = BaichuanTextEmbeddingModel() - - num_tokens = model.get_num_tokens( - model="baichuan-text-embedding", - credentials={ - "api_key": os.environ.get("BAICHUAN_API_KEY"), - }, - texts=["hello", "world"], - ) - - assert num_tokens == 2 - - -def test_max_chunks(): - model = BaichuanTextEmbeddingModel() - - result = model.invoke( - model="baichuan-text-embedding", - credentials={ - "api_key": os.environ.get("BAICHUAN_API_KEY"), - }, - texts=[ - "hello", - "world", - "hello", - "world", - "hello", - "world", - "hello", - "world", - "hello", - "world", - "hello", - "world", - "hello", - "world", - "hello", - "world", - "hello", - "world", - "hello", - "world", - "hello", - "world", - ], - ) - - assert isinstance(result, TextEmbeddingResult) - assert len(result.embeddings) == 22 diff --git a/api/tests/integration_tests/model_runtime/bedrock/__init__.py b/api/tests/integration_tests/model_runtime/bedrock/__init__.py deleted file mode 100644 index e69de29bb2d1d6..00000000000000 diff --git a/api/tests/integration_tests/model_runtime/bedrock/test_llm.py b/api/tests/integration_tests/model_runtime/bedrock/test_llm.py deleted file mode 100644 index c19ec35a6e45fc..00000000000000 --- a/api/tests/integration_tests/model_runtime/bedrock/test_llm.py +++ /dev/null @@ -1,103 +0,0 @@ -import os -from collections.abc import Generator - -import pytest - -from core.model_runtime.entities.llm_entities import LLMResult, LLMResultChunk, LLMResultChunkDelta -from core.model_runtime.entities.message_entities import AssistantPromptMessage, SystemPromptMessage, UserPromptMessage -from core.model_runtime.errors.validate import CredentialsValidateFailedError -from core.model_runtime.model_providers.bedrock.llm.llm import BedrockLargeLanguageModel - - -def test_validate_credentials(): - model = BedrockLargeLanguageModel() - - with pytest.raises(CredentialsValidateFailedError): - model.validate_credentials(model="meta.llama2-13b-chat-v1", credentials={"anthropic_api_key": "invalid_key"}) - - model.validate_credentials( - model="meta.llama2-13b-chat-v1", - credentials={ - "aws_region": os.getenv("AWS_REGION"), - "aws_access_key": os.getenv("AWS_ACCESS_KEY"), - "aws_secret_access_key": os.getenv("AWS_SECRET_ACCESS_KEY"), - }, - ) - - -def test_invoke_model(): - model = BedrockLargeLanguageModel() - - response = model.invoke( - model="meta.llama2-13b-chat-v1", - credentials={ - "aws_region": os.getenv("AWS_REGION"), - "aws_access_key": os.getenv("AWS_ACCESS_KEY"), - "aws_secret_access_key": os.getenv("AWS_SECRET_ACCESS_KEY"), - }, - prompt_messages=[ - SystemPromptMessage( - content="You are a helpful AI assistant.", - ), - UserPromptMessage(content="Hello World!"), - ], - model_parameters={"temperature": 0.0, "top_p": 1.0, "max_tokens_to_sample": 10}, - stop=["How"], - stream=False, - user="abc-123", - ) - - assert isinstance(response, LLMResult) - assert len(response.message.content) > 0 - - -def test_invoke_stream_model(): - model = BedrockLargeLanguageModel() - - response = model.invoke( - model="meta.llama2-13b-chat-v1", - credentials={ - "aws_region": os.getenv("AWS_REGION"), - "aws_access_key": os.getenv("AWS_ACCESS_KEY"), - "aws_secret_access_key": os.getenv("AWS_SECRET_ACCESS_KEY"), - }, - prompt_messages=[ - SystemPromptMessage( - content="You are a helpful AI assistant.", - ), - UserPromptMessage(content="Hello World!"), - ], - model_parameters={"temperature": 0.0, "max_tokens_to_sample": 100}, - stream=True, - user="abc-123", - ) - - assert isinstance(response, Generator) - - for chunk in response: - print(chunk) - assert isinstance(chunk, LLMResultChunk) - assert isinstance(chunk.delta, LLMResultChunkDelta) - assert isinstance(chunk.delta.message, AssistantPromptMessage) - assert len(chunk.delta.message.content) > 0 if chunk.delta.finish_reason is None else True - - -def test_get_num_tokens(): - model = BedrockLargeLanguageModel() - - num_tokens = model.get_num_tokens( - model="meta.llama2-13b-chat-v1", - credentials={ - "aws_region": os.getenv("AWS_REGION"), - "aws_access_key": os.getenv("AWS_ACCESS_KEY"), - "aws_secret_access_key": os.getenv("AWS_SECRET_ACCESS_KEY"), - }, - messages=[ - SystemPromptMessage( - content="You are a helpful AI assistant.", - ), - UserPromptMessage(content="Hello World!"), - ], - ) - - assert num_tokens == 18 diff --git a/api/tests/integration_tests/model_runtime/bedrock/test_provider.py b/api/tests/integration_tests/model_runtime/bedrock/test_provider.py deleted file mode 100644 index 080727829e9e2f..00000000000000 --- a/api/tests/integration_tests/model_runtime/bedrock/test_provider.py +++ /dev/null @@ -1,21 +0,0 @@ -import os - -import pytest - -from core.model_runtime.errors.validate import CredentialsValidateFailedError -from core.model_runtime.model_providers.bedrock.bedrock import BedrockProvider - - -def test_validate_provider_credentials(): - provider = BedrockProvider() - - with pytest.raises(CredentialsValidateFailedError): - provider.validate_provider_credentials(credentials={}) - - provider.validate_provider_credentials( - credentials={ - "aws_region": os.getenv("AWS_REGION"), - "aws_access_key": os.getenv("AWS_ACCESS_KEY"), - "aws_secret_access_key": os.getenv("AWS_SECRET_ACCESS_KEY"), - } - ) diff --git a/api/tests/integration_tests/model_runtime/chatglm/__init__.py b/api/tests/integration_tests/model_runtime/chatglm/__init__.py deleted file mode 100644 index e69de29bb2d1d6..00000000000000 diff --git a/api/tests/integration_tests/model_runtime/chatglm/test_llm.py b/api/tests/integration_tests/model_runtime/chatglm/test_llm.py deleted file mode 100644 index 418e88874d1572..00000000000000 --- a/api/tests/integration_tests/model_runtime/chatglm/test_llm.py +++ /dev/null @@ -1,230 +0,0 @@ -import os -from collections.abc import Generator - -import pytest - -from core.model_runtime.entities.llm_entities import LLMResult, LLMResultChunk, LLMResultChunkDelta -from core.model_runtime.entities.message_entities import ( - AssistantPromptMessage, - PromptMessageTool, - SystemPromptMessage, - TextPromptMessageContent, - UserPromptMessage, -) -from core.model_runtime.entities.model_entities import AIModelEntity -from core.model_runtime.errors.validate import CredentialsValidateFailedError -from core.model_runtime.model_providers.chatglm.llm.llm import ChatGLMLargeLanguageModel -from tests.integration_tests.model_runtime.__mock.openai import setup_openai_mock - - -def test_predefined_models(): - model = ChatGLMLargeLanguageModel() - model_schemas = model.predefined_models() - assert len(model_schemas) >= 1 - assert isinstance(model_schemas[0], AIModelEntity) - - -@pytest.mark.parametrize("setup_openai_mock", [["chat"]], indirect=True) -def test_validate_credentials_for_chat_model(setup_openai_mock): - model = ChatGLMLargeLanguageModel() - - with pytest.raises(CredentialsValidateFailedError): - model.validate_credentials(model="chatglm2-6b", credentials={"api_base": "invalid_key"}) - - model.validate_credentials(model="chatglm2-6b", credentials={"api_base": os.environ.get("CHATGLM_API_BASE")}) - - -@pytest.mark.parametrize("setup_openai_mock", [["chat"]], indirect=True) -def test_invoke_model(setup_openai_mock): - model = ChatGLMLargeLanguageModel() - - response = model.invoke( - model="chatglm2-6b", - credentials={"api_base": os.environ.get("CHATGLM_API_BASE")}, - prompt_messages=[ - SystemPromptMessage( - content="You are a helpful AI assistant.", - ), - UserPromptMessage(content="Hello World!"), - ], - model_parameters={ - "temperature": 0.7, - "top_p": 1.0, - }, - stop=["you"], - user="abc-123", - stream=False, - ) - - assert isinstance(response, LLMResult) - assert len(response.message.content) > 0 - assert response.usage.total_tokens > 0 - - -@pytest.mark.parametrize("setup_openai_mock", [["chat"]], indirect=True) -def test_invoke_stream_model(setup_openai_mock): - model = ChatGLMLargeLanguageModel() - - response = model.invoke( - model="chatglm2-6b", - credentials={"api_base": os.environ.get("CHATGLM_API_BASE")}, - prompt_messages=[ - SystemPromptMessage( - content="You are a helpful AI assistant.", - ), - UserPromptMessage(content="Hello World!"), - ], - model_parameters={ - "temperature": 0.7, - "top_p": 1.0, - }, - stop=["you"], - stream=True, - user="abc-123", - ) - - assert isinstance(response, Generator) - for chunk in response: - assert isinstance(chunk, LLMResultChunk) - assert isinstance(chunk.delta, LLMResultChunkDelta) - assert isinstance(chunk.delta.message, AssistantPromptMessage) - assert len(chunk.delta.message.content) > 0 if chunk.delta.finish_reason is None else True - - -@pytest.mark.parametrize("setup_openai_mock", [["chat"]], indirect=True) -def test_invoke_stream_model_with_functions(setup_openai_mock): - model = ChatGLMLargeLanguageModel() - - response = model.invoke( - model="chatglm3-6b", - credentials={"api_base": os.environ.get("CHATGLM_API_BASE")}, - prompt_messages=[ - SystemPromptMessage( - content="你是一个天气机器人,你不知道今天的天气怎么样,你需要通过调用一个函数来获取天气信息。" - ), - UserPromptMessage(content="波士顿天气如何?"), - ], - model_parameters={ - "temperature": 0, - "top_p": 1.0, - }, - stop=["you"], - user="abc-123", - stream=True, - tools=[ - PromptMessageTool( - name="get_current_weather", - description="Get the current weather in a given location", - parameters={ - "type": "object", - "properties": { - "location": {"type": "string", "description": "The city and state e.g. San Francisco, CA"}, - "unit": {"type": "string", "enum": ["celsius", "fahrenheit"]}, - }, - "required": ["location"], - }, - ) - ], - ) - - assert isinstance(response, Generator) - - call: LLMResultChunk = None - chunks = [] - - for chunk in response: - chunks.append(chunk) - assert isinstance(chunk, LLMResultChunk) - assert isinstance(chunk.delta, LLMResultChunkDelta) - assert isinstance(chunk.delta.message, AssistantPromptMessage) - assert len(chunk.delta.message.content) > 0 if chunk.delta.finish_reason is None else True - - if chunk.delta.message.tool_calls and len(chunk.delta.message.tool_calls) > 0: - call = chunk - break - - assert call is not None - assert call.delta.message.tool_calls[0].function.name == "get_current_weather" - - -@pytest.mark.parametrize("setup_openai_mock", [["chat"]], indirect=True) -def test_invoke_model_with_functions(setup_openai_mock): - model = ChatGLMLargeLanguageModel() - - response = model.invoke( - model="chatglm3-6b", - credentials={"api_base": os.environ.get("CHATGLM_API_BASE")}, - prompt_messages=[UserPromptMessage(content="What is the weather like in San Francisco?")], - model_parameters={ - "temperature": 0.7, - "top_p": 1.0, - }, - stop=["you"], - user="abc-123", - stream=False, - tools=[ - PromptMessageTool( - name="get_current_weather", - description="Get the current weather in a given location", - parameters={ - "type": "object", - "properties": { - "location": {"type": "string", "description": "The city and state e.g. San Francisco, CA"}, - "unit": {"type": "string", "enum": ["c", "f"]}, - }, - "required": ["location"], - }, - ) - ], - ) - - assert isinstance(response, LLMResult) - assert len(response.message.content) > 0 - assert response.usage.total_tokens > 0 - assert response.message.tool_calls[0].function.name == "get_current_weather" - - -def test_get_num_tokens(): - model = ChatGLMLargeLanguageModel() - - num_tokens = model.get_num_tokens( - model="chatglm2-6b", - credentials={"api_base": os.environ.get("CHATGLM_API_BASE")}, - prompt_messages=[ - SystemPromptMessage( - content="You are a helpful AI assistant.", - ), - UserPromptMessage(content="Hello World!"), - ], - tools=[ - PromptMessageTool( - name="get_current_weather", - description="Get the current weather in a given location", - parameters={ - "type": "object", - "properties": { - "location": {"type": "string", "description": "The city and state e.g. San Francisco, CA"}, - "unit": {"type": "string", "enum": ["c", "f"]}, - }, - "required": ["location"], - }, - ) - ], - ) - - assert isinstance(num_tokens, int) - assert num_tokens == 77 - - num_tokens = model.get_num_tokens( - model="chatglm2-6b", - credentials={"api_base": os.environ.get("CHATGLM_API_BASE")}, - prompt_messages=[ - SystemPromptMessage( - content="You are a helpful AI assistant.", - ), - UserPromptMessage(content="Hello World!"), - ], - ) - - assert isinstance(num_tokens, int) - assert num_tokens == 21 diff --git a/api/tests/integration_tests/model_runtime/chatglm/test_provider.py b/api/tests/integration_tests/model_runtime/chatglm/test_provider.py deleted file mode 100644 index 7907805d072772..00000000000000 --- a/api/tests/integration_tests/model_runtime/chatglm/test_provider.py +++ /dev/null @@ -1,17 +0,0 @@ -import os - -import pytest - -from core.model_runtime.errors.validate import CredentialsValidateFailedError -from core.model_runtime.model_providers.chatglm.chatglm import ChatGLMProvider -from tests.integration_tests.model_runtime.__mock.openai import setup_openai_mock - - -@pytest.mark.parametrize("setup_openai_mock", [["chat"]], indirect=True) -def test_validate_provider_credentials(setup_openai_mock): - provider = ChatGLMProvider() - - with pytest.raises(CredentialsValidateFailedError): - provider.validate_provider_credentials(credentials={"api_base": "hahahaha"}) - - provider.validate_provider_credentials(credentials={"api_base": os.environ.get("CHATGLM_API_BASE")}) diff --git a/api/tests/integration_tests/model_runtime/cohere/__init__.py b/api/tests/integration_tests/model_runtime/cohere/__init__.py deleted file mode 100644 index e69de29bb2d1d6..00000000000000 diff --git a/api/tests/integration_tests/model_runtime/cohere/test_llm.py b/api/tests/integration_tests/model_runtime/cohere/test_llm.py deleted file mode 100644 index b7f707e935dbea..00000000000000 --- a/api/tests/integration_tests/model_runtime/cohere/test_llm.py +++ /dev/null @@ -1,191 +0,0 @@ -import os -from collections.abc import Generator - -import pytest - -from core.model_runtime.entities.llm_entities import LLMResult, LLMResultChunk, LLMResultChunkDelta -from core.model_runtime.entities.message_entities import AssistantPromptMessage, SystemPromptMessage, UserPromptMessage -from core.model_runtime.errors.validate import CredentialsValidateFailedError -from core.model_runtime.model_providers.cohere.llm.llm import CohereLargeLanguageModel - - -def test_validate_credentials_for_chat_model(): - model = CohereLargeLanguageModel() - - with pytest.raises(CredentialsValidateFailedError): - model.validate_credentials(model="command-light-chat", credentials={"api_key": "invalid_key"}) - - model.validate_credentials(model="command-light-chat", credentials={"api_key": os.environ.get("COHERE_API_KEY")}) - - -def test_validate_credentials_for_completion_model(): - model = CohereLargeLanguageModel() - - with pytest.raises(CredentialsValidateFailedError): - model.validate_credentials(model="command-light", credentials={"api_key": "invalid_key"}) - - model.validate_credentials(model="command-light", credentials={"api_key": os.environ.get("COHERE_API_KEY")}) - - -def test_invoke_completion_model(): - model = CohereLargeLanguageModel() - - credentials = {"api_key": os.environ.get("COHERE_API_KEY")} - - result = model.invoke( - model="command-light", - credentials=credentials, - prompt_messages=[UserPromptMessage(content="Hello World!")], - model_parameters={"temperature": 0.0, "max_tokens": 1}, - stream=False, - user="abc-123", - ) - - assert isinstance(result, LLMResult) - assert len(result.message.content) > 0 - assert model._num_tokens_from_string("command-light", credentials, result.message.content) == 1 - - -def test_invoke_stream_completion_model(): - model = CohereLargeLanguageModel() - - result = model.invoke( - model="command-light", - credentials={"api_key": os.environ.get("COHERE_API_KEY")}, - prompt_messages=[UserPromptMessage(content="Hello World!")], - model_parameters={"temperature": 0.0, "max_tokens": 100}, - stream=True, - user="abc-123", - ) - - assert isinstance(result, Generator) - - for chunk in result: - assert isinstance(chunk, LLMResultChunk) - assert isinstance(chunk.delta, LLMResultChunkDelta) - assert isinstance(chunk.delta.message, AssistantPromptMessage) - assert len(chunk.delta.message.content) > 0 if chunk.delta.finish_reason is None else True - - -def test_invoke_chat_model(): - model = CohereLargeLanguageModel() - - result = model.invoke( - model="command-light-chat", - credentials={"api_key": os.environ.get("COHERE_API_KEY")}, - prompt_messages=[ - SystemPromptMessage( - content="You are a helpful AI assistant.", - ), - UserPromptMessage(content="Hello World!"), - ], - model_parameters={ - "temperature": 0.0, - "p": 0.99, - "presence_penalty": 0.0, - "frequency_penalty": 0.0, - "max_tokens": 10, - }, - stop=["How"], - stream=False, - user="abc-123", - ) - - assert isinstance(result, LLMResult) - assert len(result.message.content) > 0 - - -def test_invoke_stream_chat_model(): - model = CohereLargeLanguageModel() - - result = model.invoke( - model="command-light-chat", - credentials={"api_key": os.environ.get("COHERE_API_KEY")}, - prompt_messages=[ - SystemPromptMessage( - content="You are a helpful AI assistant.", - ), - UserPromptMessage(content="Hello World!"), - ], - model_parameters={"temperature": 0.0, "max_tokens": 100}, - stream=True, - user="abc-123", - ) - - assert isinstance(result, Generator) - - for chunk in result: - assert isinstance(chunk, LLMResultChunk) - assert isinstance(chunk.delta, LLMResultChunkDelta) - assert isinstance(chunk.delta.message, AssistantPromptMessage) - assert len(chunk.delta.message.content) > 0 if chunk.delta.finish_reason is None else True - if chunk.delta.finish_reason is not None: - assert chunk.delta.usage is not None - assert chunk.delta.usage.completion_tokens > 0 - - -def test_get_num_tokens(): - model = CohereLargeLanguageModel() - - num_tokens = model.get_num_tokens( - model="command-light", - credentials={"api_key": os.environ.get("COHERE_API_KEY")}, - prompt_messages=[UserPromptMessage(content="Hello World!")], - ) - - assert num_tokens == 3 - - num_tokens = model.get_num_tokens( - model="command-light-chat", - credentials={"api_key": os.environ.get("COHERE_API_KEY")}, - prompt_messages=[ - SystemPromptMessage( - content="You are a helpful AI assistant.", - ), - UserPromptMessage(content="Hello World!"), - ], - ) - - assert num_tokens == 15 - - -def test_fine_tuned_model(): - model = CohereLargeLanguageModel() - - # test invoke - result = model.invoke( - model="85ec47be-6139-4f75-a4be-0f0ec1ef115c-ft", - credentials={"api_key": os.environ.get("COHERE_API_KEY"), "mode": "completion"}, - prompt_messages=[ - SystemPromptMessage( - content="You are a helpful AI assistant.", - ), - UserPromptMessage(content="Hello World!"), - ], - model_parameters={"temperature": 0.0, "max_tokens": 100}, - stream=False, - user="abc-123", - ) - - assert isinstance(result, LLMResult) - - -def test_fine_tuned_chat_model(): - model = CohereLargeLanguageModel() - - # test invoke - result = model.invoke( - model="94f2d55a-4c79-4c00-bde4-23962e74b170-ft", - credentials={"api_key": os.environ.get("COHERE_API_KEY"), "mode": "chat"}, - prompt_messages=[ - SystemPromptMessage( - content="You are a helpful AI assistant.", - ), - UserPromptMessage(content="Hello World!"), - ], - model_parameters={"temperature": 0.0, "max_tokens": 100}, - stream=False, - user="abc-123", - ) - - assert isinstance(result, LLMResult) diff --git a/api/tests/integration_tests/model_runtime/cohere/test_provider.py b/api/tests/integration_tests/model_runtime/cohere/test_provider.py deleted file mode 100644 index fb7e6d34984a61..00000000000000 --- a/api/tests/integration_tests/model_runtime/cohere/test_provider.py +++ /dev/null @@ -1,15 +0,0 @@ -import os - -import pytest - -from core.model_runtime.errors.validate import CredentialsValidateFailedError -from core.model_runtime.model_providers.cohere.cohere import CohereProvider - - -def test_validate_provider_credentials(): - provider = CohereProvider() - - with pytest.raises(CredentialsValidateFailedError): - provider.validate_provider_credentials(credentials={}) - - provider.validate_provider_credentials(credentials={"api_key": os.environ.get("COHERE_API_KEY")}) diff --git a/api/tests/integration_tests/model_runtime/cohere/test_rerank.py b/api/tests/integration_tests/model_runtime/cohere/test_rerank.py deleted file mode 100644 index a1b6922128570e..00000000000000 --- a/api/tests/integration_tests/model_runtime/cohere/test_rerank.py +++ /dev/null @@ -1,40 +0,0 @@ -import os - -import pytest - -from core.model_runtime.entities.rerank_entities import RerankResult -from core.model_runtime.errors.validate import CredentialsValidateFailedError -from core.model_runtime.model_providers.cohere.rerank.rerank import CohereRerankModel - - -def test_validate_credentials(): - model = CohereRerankModel() - - with pytest.raises(CredentialsValidateFailedError): - model.validate_credentials(model="rerank-english-v2.0", credentials={"api_key": "invalid_key"}) - - model.validate_credentials(model="rerank-english-v2.0", credentials={"api_key": os.environ.get("COHERE_API_KEY")}) - - -def test_invoke_model(): - model = CohereRerankModel() - - result = model.invoke( - model="rerank-english-v2.0", - credentials={"api_key": os.environ.get("COHERE_API_KEY")}, - query="What is the capital of the United States?", - docs=[ - "Carson City is the capital city of the American state of Nevada. At the 2010 United States " - "Census, Carson City had a population of 55,274.", - "Washington, D.C. (also known as simply Washington or D.C., and officially as the District of Columbia) " - "is the capital of the United States. It is a federal district. The President of the USA and many major " - "national government offices are in the territory. This makes it the political center of the United " - "States of America.", - ], - score_threshold=0.8, - ) - - assert isinstance(result, RerankResult) - assert len(result.docs) == 1 - assert result.docs[0].index == 1 - assert result.docs[0].score >= 0.8 diff --git a/api/tests/integration_tests/model_runtime/cohere/test_text_embedding.py b/api/tests/integration_tests/model_runtime/cohere/test_text_embedding.py deleted file mode 100644 index ae26d36635d1b5..00000000000000 --- a/api/tests/integration_tests/model_runtime/cohere/test_text_embedding.py +++ /dev/null @@ -1,45 +0,0 @@ -import os - -import pytest - -from core.model_runtime.entities.text_embedding_entities import TextEmbeddingResult -from core.model_runtime.errors.validate import CredentialsValidateFailedError -from core.model_runtime.model_providers.cohere.text_embedding.text_embedding import CohereTextEmbeddingModel - - -def test_validate_credentials(): - model = CohereTextEmbeddingModel() - - with pytest.raises(CredentialsValidateFailedError): - model.validate_credentials(model="embed-multilingual-v3.0", credentials={"api_key": "invalid_key"}) - - model.validate_credentials( - model="embed-multilingual-v3.0", credentials={"api_key": os.environ.get("COHERE_API_KEY")} - ) - - -def test_invoke_model(): - model = CohereTextEmbeddingModel() - - result = model.invoke( - model="embed-multilingual-v3.0", - credentials={"api_key": os.environ.get("COHERE_API_KEY")}, - texts=["hello", "world", " ".join(["long_text"] * 100), " ".join(["another_long_text"] * 100)], - user="abc-123", - ) - - assert isinstance(result, TextEmbeddingResult) - assert len(result.embeddings) == 4 - assert result.usage.total_tokens == 811 - - -def test_get_num_tokens(): - model = CohereTextEmbeddingModel() - - num_tokens = model.get_num_tokens( - model="embed-multilingual-v3.0", - credentials={"api_key": os.environ.get("COHERE_API_KEY")}, - texts=["hello", "world"], - ) - - assert num_tokens == 3 diff --git a/api/tests/integration_tests/model_runtime/fireworks/__init__.py b/api/tests/integration_tests/model_runtime/fireworks/__init__.py deleted file mode 100644 index e69de29bb2d1d6..00000000000000 diff --git a/api/tests/integration_tests/model_runtime/fireworks/test_llm.py b/api/tests/integration_tests/model_runtime/fireworks/test_llm.py deleted file mode 100644 index 699ca293a2fca8..00000000000000 --- a/api/tests/integration_tests/model_runtime/fireworks/test_llm.py +++ /dev/null @@ -1,186 +0,0 @@ -import os -from collections.abc import Generator - -import pytest - -from core.model_runtime.entities.llm_entities import LLMResult, LLMResultChunk, LLMResultChunkDelta -from core.model_runtime.entities.message_entities import ( - AssistantPromptMessage, - PromptMessageTool, - SystemPromptMessage, - UserPromptMessage, -) -from core.model_runtime.entities.model_entities import AIModelEntity -from core.model_runtime.errors.validate import CredentialsValidateFailedError -from core.model_runtime.model_providers.fireworks.llm.llm import FireworksLargeLanguageModel - -"""FOR MOCK FIXTURES, DO NOT REMOVE""" -from tests.integration_tests.model_runtime.__mock.openai import setup_openai_mock - - -def test_predefined_models(): - model = FireworksLargeLanguageModel() - model_schemas = model.predefined_models() - - assert len(model_schemas) >= 1 - assert isinstance(model_schemas[0], AIModelEntity) - - -@pytest.mark.parametrize("setup_openai_mock", [["chat"]], indirect=True) -def test_validate_credentials_for_chat_model(setup_openai_mock): - model = FireworksLargeLanguageModel() - - with pytest.raises(CredentialsValidateFailedError): - # model name to gpt-3.5-turbo because of mocking - model.validate_credentials(model="gpt-3.5-turbo", credentials={"fireworks_api_key": "invalid_key"}) - - model.validate_credentials( - model="accounts/fireworks/models/llama-v3p1-8b-instruct", - credentials={"fireworks_api_key": os.environ.get("FIREWORKS_API_KEY")}, - ) - - -@pytest.mark.parametrize("setup_openai_mock", [["chat"]], indirect=True) -def test_invoke_chat_model(setup_openai_mock): - model = FireworksLargeLanguageModel() - - result = model.invoke( - model="accounts/fireworks/models/llama-v3p1-8b-instruct", - credentials={"fireworks_api_key": os.environ.get("FIREWORKS_API_KEY")}, - prompt_messages=[ - SystemPromptMessage( - content="You are a helpful AI assistant.", - ), - UserPromptMessage(content="Hello World!"), - ], - model_parameters={ - "temperature": 0.0, - "top_p": 1.0, - "presence_penalty": 0.0, - "frequency_penalty": 0.0, - "max_tokens": 10, - }, - stop=["How"], - stream=False, - user="foo", - ) - - assert isinstance(result, LLMResult) - assert len(result.message.content) > 0 - - -@pytest.mark.parametrize("setup_openai_mock", [["chat"]], indirect=True) -def test_invoke_chat_model_with_tools(setup_openai_mock): - model = FireworksLargeLanguageModel() - - result = model.invoke( - model="accounts/fireworks/models/llama-v3p1-8b-instruct", - credentials={"fireworks_api_key": os.environ.get("FIREWORKS_API_KEY")}, - prompt_messages=[ - SystemPromptMessage( - content="You are a helpful AI assistant.", - ), - UserPromptMessage( - content="what's the weather today in London?", - ), - ], - model_parameters={"temperature": 0.0, "max_tokens": 100}, - tools=[ - PromptMessageTool( - name="get_weather", - description="Determine weather in my location", - parameters={ - "type": "object", - "properties": { - "location": {"type": "string", "description": "The city and state e.g. San Francisco, CA"}, - "unit": {"type": "string", "enum": ["c", "f"]}, - }, - "required": ["location"], - }, - ), - PromptMessageTool( - name="get_stock_price", - description="Get the current stock price", - parameters={ - "type": "object", - "properties": {"symbol": {"type": "string", "description": "The stock symbol"}}, - "required": ["symbol"], - }, - ), - ], - stream=False, - user="foo", - ) - - assert isinstance(result, LLMResult) - assert isinstance(result.message, AssistantPromptMessage) - assert len(result.message.tool_calls) > 0 - - -@pytest.mark.parametrize("setup_openai_mock", [["chat"]], indirect=True) -def test_invoke_stream_chat_model(setup_openai_mock): - model = FireworksLargeLanguageModel() - - result = model.invoke( - model="accounts/fireworks/models/llama-v3p1-8b-instruct", - credentials={"fireworks_api_key": os.environ.get("FIREWORKS_API_KEY")}, - prompt_messages=[ - SystemPromptMessage( - content="You are a helpful AI assistant.", - ), - UserPromptMessage(content="Hello World!"), - ], - model_parameters={"temperature": 0.0, "max_tokens": 100}, - stream=True, - user="foo", - ) - - assert isinstance(result, Generator) - - for chunk in result: - assert isinstance(chunk, LLMResultChunk) - assert isinstance(chunk.delta, LLMResultChunkDelta) - assert isinstance(chunk.delta.message, AssistantPromptMessage) - assert len(chunk.delta.message.content) > 0 if chunk.delta.finish_reason is None else True - if chunk.delta.finish_reason is not None: - assert chunk.delta.usage is not None - assert chunk.delta.usage.completion_tokens > 0 - - -def test_get_num_tokens(): - model = FireworksLargeLanguageModel() - - num_tokens = model.get_num_tokens( - model="accounts/fireworks/models/llama-v3p1-8b-instruct", - credentials={"fireworks_api_key": os.environ.get("FIREWORKS_API_KEY")}, - prompt_messages=[UserPromptMessage(content="Hello World!")], - ) - - assert num_tokens == 10 - - num_tokens = model.get_num_tokens( - model="accounts/fireworks/models/llama-v3p1-8b-instruct", - credentials={"fireworks_api_key": os.environ.get("FIREWORKS_API_KEY")}, - prompt_messages=[ - SystemPromptMessage( - content="You are a helpful AI assistant.", - ), - UserPromptMessage(content="Hello World!"), - ], - tools=[ - PromptMessageTool( - name="get_weather", - description="Determine weather in my location", - parameters={ - "type": "object", - "properties": { - "location": {"type": "string", "description": "The city and state e.g. San Francisco, CA"}, - "unit": {"type": "string", "enum": ["c", "f"]}, - }, - "required": ["location"], - }, - ), - ], - ) - - assert num_tokens == 77 diff --git a/api/tests/integration_tests/model_runtime/fireworks/test_provider.py b/api/tests/integration_tests/model_runtime/fireworks/test_provider.py deleted file mode 100644 index a68cf1a1a8fbda..00000000000000 --- a/api/tests/integration_tests/model_runtime/fireworks/test_provider.py +++ /dev/null @@ -1,17 +0,0 @@ -import os - -import pytest - -from core.model_runtime.errors.validate import CredentialsValidateFailedError -from core.model_runtime.model_providers.fireworks.fireworks import FireworksProvider -from tests.integration_tests.model_runtime.__mock.openai import setup_openai_mock - - -@pytest.mark.parametrize("setup_openai_mock", [["chat"]], indirect=True) -def test_validate_provider_credentials(setup_openai_mock): - provider = FireworksProvider() - - with pytest.raises(CredentialsValidateFailedError): - provider.validate_provider_credentials(credentials={}) - - provider.validate_provider_credentials(credentials={"fireworks_api_key": os.environ.get("FIREWORKS_API_KEY")}) diff --git a/api/tests/integration_tests/model_runtime/fireworks/test_text_embedding.py b/api/tests/integration_tests/model_runtime/fireworks/test_text_embedding.py deleted file mode 100644 index 7bf723b3a93742..00000000000000 --- a/api/tests/integration_tests/model_runtime/fireworks/test_text_embedding.py +++ /dev/null @@ -1,54 +0,0 @@ -import os - -import pytest - -from core.model_runtime.entities.text_embedding_entities import TextEmbeddingResult -from core.model_runtime.errors.validate import CredentialsValidateFailedError -from core.model_runtime.model_providers.fireworks.text_embedding.text_embedding import FireworksTextEmbeddingModel -from tests.integration_tests.model_runtime.__mock.openai import setup_openai_mock - - -@pytest.mark.parametrize("setup_openai_mock", [["text_embedding"]], indirect=True) -def test_validate_credentials(setup_openai_mock): - model = FireworksTextEmbeddingModel() - - with pytest.raises(CredentialsValidateFailedError): - model.validate_credentials( - model="nomic-ai/nomic-embed-text-v1.5", credentials={"fireworks_api_key": "invalid_key"} - ) - - model.validate_credentials( - model="nomic-ai/nomic-embed-text-v1.5", credentials={"fireworks_api_key": os.environ.get("FIREWORKS_API_KEY")} - ) - - -@pytest.mark.parametrize("setup_openai_mock", [["text_embedding"]], indirect=True) -def test_invoke_model(setup_openai_mock): - model = FireworksTextEmbeddingModel() - - result = model.invoke( - model="nomic-ai/nomic-embed-text-v1.5", - credentials={ - "fireworks_api_key": os.environ.get("FIREWORKS_API_KEY"), - }, - texts=["hello", "world", " ".join(["long_text"] * 100), " ".join(["another_long_text"] * 100)], - user="foo", - ) - - assert isinstance(result, TextEmbeddingResult) - assert len(result.embeddings) == 4 - assert result.usage.total_tokens == 2 - - -def test_get_num_tokens(): - model = FireworksTextEmbeddingModel() - - num_tokens = model.get_num_tokens( - model="nomic-ai/nomic-embed-text-v1.5", - credentials={ - "fireworks_api_key": os.environ.get("FIREWORKS_API_KEY"), - }, - texts=["hello", "world"], - ) - - assert num_tokens == 2 diff --git a/api/tests/integration_tests/model_runtime/fishaudio/__init__.py b/api/tests/integration_tests/model_runtime/fishaudio/__init__.py deleted file mode 100644 index e69de29bb2d1d6..00000000000000 diff --git a/api/tests/integration_tests/model_runtime/fishaudio/test_provider.py b/api/tests/integration_tests/model_runtime/fishaudio/test_provider.py deleted file mode 100644 index 3526574b613238..00000000000000 --- a/api/tests/integration_tests/model_runtime/fishaudio/test_provider.py +++ /dev/null @@ -1,33 +0,0 @@ -import os - -import httpx -import pytest - -from core.model_runtime.errors.validate import CredentialsValidateFailedError -from core.model_runtime.model_providers.fishaudio.fishaudio import FishAudioProvider -from tests.integration_tests.model_runtime.__mock.fishaudio import setup_fishaudio_mock - - -@pytest.mark.parametrize("setup_fishaudio_mock", [["list-models"]], indirect=True) -def test_validate_provider_credentials(setup_fishaudio_mock): - print("-----", httpx.get) - provider = FishAudioProvider() - - with pytest.raises(CredentialsValidateFailedError): - provider.validate_provider_credentials( - credentials={ - "api_key": "bad_api_key", - "api_base": os.environ.get("FISH_AUDIO_API_BASE", "https://api.fish.audio"), - "use_public_models": "false", - "latency": "normal", - } - ) - - provider.validate_provider_credentials( - credentials={ - "api_key": os.environ.get("FISH_AUDIO_API_KEY", "test"), - "api_base": os.environ.get("FISH_AUDIO_API_BASE", "https://api.fish.audio"), - "use_public_models": "false", - "latency": "normal", - } - ) diff --git a/api/tests/integration_tests/model_runtime/fishaudio/test_tts.py b/api/tests/integration_tests/model_runtime/fishaudio/test_tts.py deleted file mode 100644 index f61fee28b98e30..00000000000000 --- a/api/tests/integration_tests/model_runtime/fishaudio/test_tts.py +++ /dev/null @@ -1,32 +0,0 @@ -import os - -import pytest - -from core.model_runtime.model_providers.fishaudio.tts.tts import ( - FishAudioText2SpeechModel, -) -from tests.integration_tests.model_runtime.__mock.fishaudio import setup_fishaudio_mock - - -@pytest.mark.parametrize("setup_fishaudio_mock", [["tts"]], indirect=True) -def test_invoke_model(setup_fishaudio_mock): - model = FishAudioText2SpeechModel() - - result = model.invoke( - model="tts-default", - tenant_id="test", - credentials={ - "api_key": os.environ.get("FISH_AUDIO_API_KEY", "test"), - "api_base": os.environ.get("FISH_AUDIO_API_BASE", "https://api.fish.audio"), - "use_public_models": "false", - "latency": "normal", - }, - content_text="Hello, world!", - voice="03397b4c4be74759b72533b663fbd001", - ) - - content = b"" - for chunk in result: - content += chunk - - assert content != b"" diff --git a/api/tests/integration_tests/model_runtime/google/__init__.py b/api/tests/integration_tests/model_runtime/google/__init__.py deleted file mode 100644 index e69de29bb2d1d6..00000000000000 diff --git a/api/tests/integration_tests/model_runtime/google/test_llm.py b/api/tests/integration_tests/model_runtime/google/test_llm.py deleted file mode 100644 index 34d08f270afe7e..00000000000000 --- a/api/tests/integration_tests/model_runtime/google/test_llm.py +++ /dev/null @@ -1,177 +0,0 @@ -import os -from collections.abc import Generator - -import pytest - -from core.model_runtime.entities.llm_entities import LLMResult, LLMResultChunk, LLMResultChunkDelta -from core.model_runtime.entities.message_entities import ( - AssistantPromptMessage, - ImagePromptMessageContent, - SystemPromptMessage, - TextPromptMessageContent, - UserPromptMessage, -) -from core.model_runtime.errors.validate import CredentialsValidateFailedError -from core.model_runtime.model_providers.google.llm.llm import GoogleLargeLanguageModel -from tests.integration_tests.model_runtime.__mock.google import setup_google_mock - - -@pytest.mark.parametrize("setup_google_mock", [["none"]], indirect=True) -def test_validate_credentials(setup_google_mock): - model = GoogleLargeLanguageModel() - - with pytest.raises(CredentialsValidateFailedError): - model.validate_credentials(model="gemini-pro", credentials={"google_api_key": "invalid_key"}) - - model.validate_credentials(model="gemini-pro", credentials={"google_api_key": os.environ.get("GOOGLE_API_KEY")}) - - -@pytest.mark.parametrize("setup_google_mock", [["none"]], indirect=True) -def test_invoke_model(setup_google_mock): - model = GoogleLargeLanguageModel() - - response = model.invoke( - model="gemini-pro", - credentials={"google_api_key": os.environ.get("GOOGLE_API_KEY")}, - prompt_messages=[ - SystemPromptMessage( - content="You are a helpful AI assistant.", - ), - UserPromptMessage(content="Give me your worst dad joke or i will unplug you"), - AssistantPromptMessage( - content="Why did the scarecrow win an award? Because he was outstanding in his field!" - ), - UserPromptMessage( - content=[ - TextPromptMessageContent(data="ok something snarkier pls"), - TextPromptMessageContent(data="i may still unplug you"), - ] - ), - ], - model_parameters={"temperature": 0.5, "top_p": 1.0, "max_tokens_to_sample": 2048}, - stop=["How"], - stream=False, - user="abc-123", - ) - - assert isinstance(response, LLMResult) - assert len(response.message.content) > 0 - - -@pytest.mark.parametrize("setup_google_mock", [["none"]], indirect=True) -def test_invoke_stream_model(setup_google_mock): - model = GoogleLargeLanguageModel() - - response = model.invoke( - model="gemini-pro", - credentials={"google_api_key": os.environ.get("GOOGLE_API_KEY")}, - prompt_messages=[ - SystemPromptMessage( - content="You are a helpful AI assistant.", - ), - UserPromptMessage(content="Give me your worst dad joke or i will unplug you"), - AssistantPromptMessage( - content="Why did the scarecrow win an award? Because he was outstanding in his field!" - ), - UserPromptMessage( - content=[ - TextPromptMessageContent(data="ok something snarkier pls"), - TextPromptMessageContent(data="i may still unplug you"), - ] - ), - ], - model_parameters={"temperature": 0.2, "top_k": 5, "max_tokens_to_sample": 2048}, - stream=True, - user="abc-123", - ) - - assert isinstance(response, Generator) - - for chunk in response: - assert isinstance(chunk, LLMResultChunk) - assert isinstance(chunk.delta, LLMResultChunkDelta) - assert isinstance(chunk.delta.message, AssistantPromptMessage) - assert len(chunk.delta.message.content) > 0 if chunk.delta.finish_reason is None else True - - -@pytest.mark.parametrize("setup_google_mock", [["none"]], indirect=True) -def test_invoke_chat_model_with_vision(setup_google_mock): - model = GoogleLargeLanguageModel() - - result = model.invoke( - model="gemini-pro-vision", - credentials={"google_api_key": os.environ.get("GOOGLE_API_KEY")}, - prompt_messages=[ - SystemPromptMessage( - content="You are a helpful AI assistant.", - ), - UserPromptMessage( - content=[ - TextPromptMessageContent(data="what do you see?"), - ImagePromptMessageContent( - data="data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAE4AAABMCAYAAADDYoEWAAAMQGlDQ1BJQ0MgUHJvZmlsZQAASImVVwdYU8kWnluSkEBoAQSkhN4EkRpASggt9I4gKiEJEEqMgaBiRxcVXLuIgA1dFVGwAmJBETuLYu+LBRVlXSzYlTcpoOu+8r35vrnz33/O/OfMmbllAFA7zhGJclF1APKEBeLYYH/6uOQUOukpIAEdoAy0gA2Hmy9iRkeHA1iG2r+Xd9cBIm2v2Eu1/tn/X4sGj5/PBQCJhjidl8/Ng/gAAHg1VyQuAIAo5c2mFoikGFagJYYBQrxIijPluFqK0+V4j8wmPpYFcTsASiocjjgTANVLkKcXcjOhhmo/xI5CnkAIgBodYp+8vMk8iNMgtoY2Ioil+oz0H3Qy/6aZPqzJ4WQOY/lcZEUpQJAvyuVM/z/T8b9LXq5kyIclrCpZ4pBY6Zxh3m7mTA6TYhWI+4TpkVEQa0L8QcCT2UOMUrIkIQlye9SAm8+COYMrDVBHHicgDGIDiIOEuZHhCj49QxDEhhjuEHSaoIAdD7EuxIv4+YFxCptN4smxCl9oY4aYxVTwZzlimV+pr/uSnASmQv91Fp+t0MdUi7LikyCmQGxeKEiMhFgVYof8nLgwhc3YoixW5JCNWBIrjd8c4li+MNhfro8VZoiDYhX2pXn5Q/PFNmUJ2JEKvK8gKz5Enh+sncuRxQ/ngl3iC5kJQzr8/HHhQ3Ph8QMC5XPHnvGFCXEKnQ+iAv9Y+VicIsqNVtjjpvzcYClvCrFLfmGcYiyeWAA3pFwfzxAVRMfL48SLsjmh0fJ48OUgHLBAAKADCazpYDLIBoLOvqY+eCfvCQIcIAaZgA/sFczQiCRZjxBe40AR+BMiPsgfHucv6+WDQsh/HWblV3uQIestlI3IAU8gzgNhIBfeS2SjhMPeEsFjyAj+4Z0DKxfGmwurtP/f80Psd4YJmXAFIxnySFcbsiQGEgOIIcQgog2uj/vgXng4vPrB6oQzcI+heXy3JzwhdBEeEq4Rugm3JgmKxT9FGQG6oX6QIhfpP+YCt4Sarrg/7g3VoTKug+sDe9wF+mHivtCzK2RZirilWaH/pP23GfywGgo7siMZJY8g+5Gtfx6paqvqOqwizfWP+ZHHmj6cb9Zwz8/+WT9knwfbsJ8tsUXYfuwMdgI7hx3BmgAda8WasQ7sqBQP767Hst015C1WFk8O1BH8w9/Qykozme9Y59jr+EXeV8CfJn1HA9Zk0XSxIDOrgM6EXwQ+nS3kOoyiOzk6OQMg/b7IX19vYmTfDUSn4zs3/w8AvFsHBwcPf+dCWwHY6w4f/0PfOWsG/HQoA3D2EFciLpRzuPRCgG8JNfik6QEjYAas4XycgBvwAn4gEISCKBAPksFEGH0W3OdiMBXMBPNACSgDy8EaUAk2gi1gB9gN9oEmcAScAKfBBXAJXAN34O7pAS9AP3gHPiMIQkKoCA3RQ4wRC8QOcUIYiA8SiIQjsUgykoZkIkJEgsxE5iNlyEqkEtmM1CJ7kUPICeQc0oXcQh4gvchr5BOKoSqoFmqIWqKjUQbKRMPQeHQCmolOQYvQBehStAKtQXehjegJ9AJ6De1GX6ADGMCUMR3MBLPHGBgLi8JSsAxMjM3GSrFyrAarx1rgOl/BurE+7CNOxGk4HbeHOzgET8C5+BR8Nr4Er8R34I14O34Ff4D3498IVIIBwY7gSWATxhEyCVMJJYRywjbCQcIp+Cz1EN4RiUQdohXRHT6LycRs4gziEuJ6YgPxOLGL+Ig4QCKR9Eh2JG9SFIlDKiCVkNaRdpFaSZdJPaQPSspKxkpOSkFKKUpCpWKlcqWdSseULis9VfpMVidbkD3JUWQeeTp5GXkruYV8kdxD/kzRoFhRvCnxlGzKPEoFpZ5yinKX8kZZWdlU2UM5RlmgPFe5QnmP8lnlB8ofVTRVbFVYKqkqEpWlKttVjqvcUnlDpVItqX7UFGoBdSm1lnqSep/6QZWm6qDKVuWpzlGtUm1Uvaz6Uo2sZqHGVJuoVqRWrrZf7aJanzpZ3VKdpc5Rn61epX5I/Yb6gAZNY4xGlEaexhKNnRrnNJ5pkjQtNQM1eZoLNLdontR8RMNoZjQWjUubT9tKO0Xr0SJqWWmxtbK1yrR2a3Vq9WtrartoJ2pP067SPqrdrYPpWOqwdXJ1luns07mu82mE4QjmCP6IxSPqR1we8V53pK6fLl+3VLdB95ruJz26XqBejt4KvSa9e/q4vq1+jP5U/Q36p/T7RmqN9BrJHVk6ct/I2waoga1BrMEMgy0GHQYDhkaGwYYiw3WGJw37jHSM/IyyjVYbHTPqNaYZ+xgLjFcbtxo/p2vTmfRcegW9nd5vYmASYiIx2WzSafLZ1Mo0wbTYtMH0nhnFjGGWYbbarM2s39zYPMJ8pnmd+W0LsgXDIstircUZi/eWVpZJlgstmyyfWelasa2KrOqs7lpTrX2tp1jXWF+1IdowbHJs1ttcskVtXW2zbKtsL9qhdm52Arv1dl2jCKM8RglH1Yy6Ya9iz7QvtK+zf+Cg4xDuUOzQ5PBytPnolNErRp8Z/c3R1THXcavjnTGaY0LHFI9pGfPaydaJ61TldNWZ6hzkPMe52fmVi50L32WDy01XmmuE60LXNtevbu5uYrd6t153c/c092r3GwwtRjRjCeOsB8HD32OOxxGPj55ungWe+zz/8rL3yvHa6fVsrNVY/titYx95m3pzvDd7d/vQfdJ8Nvl0+5r4cnxrfB/6mfnx/Lb5PWXaMLOZu5gv/R39xf4H/d+zPFmzWMcDsIDggNKAzkDNwITAysD7QaZBmUF1Qf3BrsEzgo+HEELCQlaE3GAbsrnsWnZ/qHvorND2MJWwuLDKsIfhtuHi8JYINCI0YlXE3UiLSGFkUxSIYketiroXbRU9JfpwDDEmOqYq5knsmNiZsWfiaHGT4nbGvYv3j18WfyfBOkGS0JaolpiaWJv4PikgaWVS97jR42aNu5CsnyxIbk4hpSSmbEsZGB84fs34nlTX1JLU6xOsJkybcG6i/sTciUcnqU3iTNqfRkhLStuZ9oUTxanhDKSz06vT+7ks7lruC54fbzWvl+/NX8l/muGdsTLjWaZ35qrM3izfrPKsPgFLUCl4lR2SvTH7fU5Uzvacwdyk3IY8pby0vENCTWGOsH2y0eRpk7tEdqISUfcUzylrpvSLw8Tb8pH8CfnNBVrwR75DYi35RfKg0KewqvDD1MSp+6dpTBNO65huO33x9KdFQUW/zcBncGe0zTSZOW/mg1nMWZtnI7PTZ7fNMZuzYE7P3OC5O+ZR5uXM+73YsXhl8dv5SfNbFhgumLvg0S/Bv9SVqJaIS24s9Fq4cRG+SLCoc7Hz4nWLv5XySs+XOZaVl31Zwl1y/tcxv1b8Org0Y2nnMrdlG5YTlwuXX1/hu2LHSo2VRSsfrYpY1biavrp09ds1k9acK3cp37iWslaytrsivKJ5nfm65eu+VGZVXqvyr2qoNqheXP1+PW/95Q1+G+o3Gm4s2/hpk2DTzc3BmxtrLGvKtxC3FG55sjVx65nfGL/VbtPfVrbt63bh9u4dsTvaa91ra3ca7FxWh9ZJ6np3pe66tDtgd3O9ff3mBp2Gsj1gj2TP871pe6/vC9vXtp+xv/6AxYHqg7SDpY1I4/TG/qaspu7m5OauQ6GH2lq8Wg4edji8/YjJkaqj2keXHaMcW3BssLWodeC46HjficwTj9omtd05Oe7k1faY9s5TYafOng46ffIM80zrWe+zR855njt0nnG+6YLbhcYO146Dv7v+frDTrbPxovvF5ksel1q6xnYdu+x7+cSVgCunr7KvXrgWea3resL1mzdSb3Tf5N18div31qvbhbc/35l7l3C39J76vfL7Bvdr/rD5o6Hbrfvog4AHHQ/jHt55xH304nH+4y89C55Qn5Q/NX5a+8zp2ZHeoN5Lz8c/73khevG5r+RPjT+rX1q/PPCX318d/eP6e16JXw2+XvJG7832ty5v2waiB+6/y3v3+X3pB70POz4yPp75lPTp6eepX0hfKr7afG35Fvbt7mDe4KCII+bIfgUwWNGMDABebweAmgwADZ7PKOPl5z9ZQeRnVhkC/wnLz4iy4gZAPfx/j+mDfzc3ANizFR6/oL5aKgDRVADiPQDq7Dxch85qsnOltBDhOWBT5Nf0vHTwb4r8zPlD3D+3QKrqAn5u/wWdZ3xtG7qP3QAAADhlWElmTU0AKgAAAAgAAYdpAAQAAAABAAAAGgAAAAAAAqACAAQAAAABAAAATqADAAQAAAABAAAATAAAAADhTXUdAAARnUlEQVR4Ae2c245bR3aGi4fulizFHgUzQAYIggBB5klymfeaZ8hDBYjvAiRxkMAGkowRWx7JktjcZL7vX1Uku62Burkl5YbV5q7Tqqq1/v3XqgMpL95tbvftEh6NwPLRLS4NgsAFuDOJcAHuAtyZCJzZ7MK4C3BnInBmswvjLsCdicCZzS6MOxO49Znt0uz3//CPbbv6srXFrq0W9Q6Wi0VbLPn4R8x/jSLiu3nrl8s9dcartlwtKdmTbm21XranN6v27Mm6XV8t25fP1+3Pn1+1r4if3Czbk+t9u1rR6f9jmAXc1P6sbaevQGbfdgGJeA8ke0AQsCYYgiYgPR1QyVO+3wvcMm2WO0G2PeWkX79btp839AG4//UjYC62gDsB2rI9f7pov3q2bX/9F1ftBWAufTufOcwCrnTtR90dOdHoNgCJeAbUkuM5TsWAW5W9gfkE83ZkUHg0oAyAwbm927a2ebVoP/xx2f7jD1uYuG9/89tF+/VXK1hq+88TZgG32O1g2r7tpRdBM8fUTM7pyR8SYddgxkJErUszHti7U44CpzyEo16syNtx+qgy+1og7RMetpev9+3rb3bt+c2u/ebFsv3uL1ftiqn+qcMs4HY7jNQpEfadNU5VqeHUTJkgUbaPDxRADdZ8jU9LHoJYnwLUtgWN4ObDC7Kdr8Hp7d9qMTW8gt23V1zyvPrD1H56e9t+99vr9uJLprBDfaIw69U4dQRCIw2JdVIjbUzecj+7qYyPpZHiAbDaJwsXyMhQEQ0pq6sAp7hMS2XGqykdA2iy4EUtF6v206ur9k/fbNo//+frtt2OaW/rjxtmAaeNGqihBY5xfVQzQEZfoSH0KHgkrbD/CX6vPIqlSTU61vVCovRSbEwbIS851vj23Q+tff3vu/bzu5I7tvs4qVnADTa5FCbNC86qCLN2E1MxKKroYB2pgSz2RLbbVcVkSJhOKxIDjGxn+nSuqes2JlKuG8fA/IzPXazbj68X7et/27UfX7GifORwOuSju47h/c3beKfRFO74CNA04YP0ZT2/YzERFGojc9pmDG47/wyDZwJjiX4wwJNer1dZPJbs5/xzK5Ppzp7SQZBszNy22U7tX7/dtFdvJrv8aGE2cDJLoPycBgHSgICJUQLo8nmUo6y7oH0S5Lu/FGhDQULCfIooATw3yyOQQ46eYVpYiaBMTFtAFPR307r9y3fbdvsRfd5Rg6HJI2Lt1qaAF6TEqoxWdVdYSHawezCvAHLjW7Jh2QGcUkDDT4Og2OfSFRVkxipcAJUZARC5FVRbeRpB1hVY6r25XQHexIZ96Hfa++PTs4Dbi8rQg7imWQG27/uEgCTCssk/WWg7GwJWwDQ36PceGzQ+x7jOtgNogkIIpsZiFMdXoEfOPUlh3l5ulu2/X6bJ7Mc84Bw+xgOKzJqM0VKm8WYlVMqt61gFKNtQKeZ6o7Ls/aqEeYooJXDIZ9uiT0uZ5UxPUJNlYdoAK62qHfM7unz3/bb9/Ha+v3u/tn3AD0XOrnxAZdpNYZILgoxyGk4BqMCbssq66dXv6RdFkiB6Rj2u3N1npiMw1dQjF4oJW/kzy6VdMRFA9Xd8VvhCLxCyYUYkvhHZb7+fotvdUR6XmwXcYI1DangAA6yspgBj/dRjp6L+RbmSPaaxuuMnGEeVAhBF4pSapAFG5gUo60rAHmpVtcz0sR2aBZW8NAB9+W7dXr9N0dmPmUcu10pWrq7kQQvBQXn1dUsgoM4ej12TtyBknG51PEMGOV2TLLVZ/GLvLMBYHsYJhg7fuMBx6tq3LFu7aBxxD9jKFiO7Thbwcv7n5dS+/ML0eWEWcBqoptk+mEQp2aTG+rbmBYA+D6MyMwMAdepKsX5QpnglFZyZ5k4tDYsI/Y1pF7CRq22HoHXgGEOwgodvgH79INnW3tlFIVVQvkBXg1dvF3z27fkTGzw+zALOPZluVoVkV4yLHoBB3VBJUNyo6uEWXAyIkruC2OQjbVeppxkm8+iti2mySsM1EPYGKBcEyul3LKTW1+pr+wLRstwP0J8a2K95Txf/+6q1ZzeUDEXt/oFhHnA4fJYCBtawYlWmlsrJBEHhP43bi9Rq1Z0ymlK3Z/QCRqA5YfaNLZJWEACn929eluXlUGO8CgMrHWYi441S2tsFebLRL5RWL0e0nL64SEEf2sjMR4ZZwA0Ddfziclz1eN8yDn1qAaHSq3G0FEQXjABDo51sJVNyGnA0QlAPL4LOApzMo0mY1sUFbQBj8xTzYhKrROYF5VGIftR1uW3+3uiWU8XnBw7l3HIYVG/P/djYgMZoyrTJrci0n2qPZVnNFV913viW6btGzsXBT6aW3VKmsauVTFOc2DxpP5YJYLBBeCUixE71IlGBR2EF+6OugHbP12Ddoj29HgIPj+cxDiPDFGINzB8sKhLh0Ui4gOgDI8deb8FiwYxlteWhLHWTlmOzhkxLAObPIkFqS8+bbG5BdgWiAmJTwXdqZ7oysktzdKC/BWMWiAJNpyP0ZPTMItRy7fTi2RB4eDwLuIkpCma1gob/Dsw7zcKAMf3txiCot8c42ZCDPu3WAqRMJAGEk4cACaLzSZsFRhAE9QoAtXcwTX92XDT0sxTQXJYHdDJin0KfVN8PmzNvnOYBx5XNlik4giumihb7tJ60ezgNhgXuXgRNttxunZYAj7uzbL3nUA67rm5KJWrJCyTfIVwBMh3bTkD8TqFYp6uv8RwrgJpAZmHHScqv0qWeKT48NujhAuELekyYBdz9gXJQ53DvDh3tU62xTtN8bQhzzE9OccAK8wA2ez2k3cNtN7wM/RZs9M5NkNZoee0H2rmhLr8miPV9roAZtN1RHV/gDb7EoUtXKeXjYXUBN0oeFs8CbrtlhZRGPZSSZNyI9gA+TBFkelFNWxgEgCtG3wDiFqEr5Jz6y/U1DAM4QLxi2l7DNhl3w/epNTUFWGbXC7HrMQMz7WUbf8AaDQ46DYXuxLoJX6CFRzvuiPyJzCzgZIoKyqgKAx1yAGPQUWfa+GoDsqwDJNnHLF9juSz0i5VrpvqSwmsQul5dtyfrfX1zL3i0WdHHSjaKVjf0T5k7ABtxlEHbwxusgjydAY8N84BjvAx5GLfMqBW0VJEZ+pwKskQnbpnFHPzpwWo/bzkGvX51296+bu1v/+qL9usXT9rTJ07Bzh9k9HEPsxNhwhh6xLXKo3fXWf3iMkrBBz9nAbflbHm6ONxhXp8/NW26lkSleIEV9FBVI+o6ihjmffPDt+3v/+5Z+82vnsZw/fyercweB2d7wzA8mfuPEknpXTnHvQsoPd1v/aD8LODw+AxbAw/QjnEfv69u5kz6dtOiW2R6YmW7vd0C3qK94wcjf/zxZ1bRXfvqGT6U3f2G/Z6AesqotgJX477PNVmTmxfiwTSS5irqz2ybEHD6PzbMAk7lS/0BxgkTqPAUYBiAkQpTLLdKxe1D4Lbsp968uW1vXk+ZrnpsN7yL1TbmbvCl4GcPPPStZWyNcM9s++9y92ruZu2CT21q7lZ9KDcLuC3WbmGG42uA30EISOVkFynt1BBialOliF/wZHqGTa1tOfq8fbMHPL6N2iBPW2d7HfxZdWnreiN49UL0dfhLR6tBSVVwNo+TQ1U5IsHvQU4Dcry7bGNOix+SngVcwAhYpZjTQxaNMABLLLtUFEAMEwi4kk63fGDbLTcVm82ubd7hNylzEXCa6SPdz2Vf5iUobe0jAFIq8+JHT8CjGeUjHFOj5E7MIO4THxvOaHIcwu2IOKiznyg89BTEXi6WssO8B36vkLa33Pv7/QRbEtm21c/BtIm9Yb4ho19PDg4g09aeucySdpzq3BfVx6WQqh7MkLOSkHLf2olEKni4n7xznh0VH4jnAYdy6hfVSZTvUmF54f2cU9d9XmlhvUyTlbkxIT0BWtgH4wRRgPMy7EFbAwi8ojzbNyqtH/7coWxnUHyE+rmYjbs3NCnqdwIbbM/GZ4RZwDleVskO3viSBhWjSu2Pxj7JU4bsqrzTU5YZQ7xKu73Bb8bAbo+s28NStxEyb8e+K1UAKXhOVivK7x0RUANf3zEw/smJpsr37cad9RlhFnCbzQYwfN36I+5qwxgVwRA/vOHxlneeMiaux9lymN5tTTttkZN5mbZwCYsLM550taA+zJM5gsdHsGSdQTbngN7ZlC/JrRhXIcorRJvVcp2pnjzdy+0nnErOCbOAE5x8d4oVCy4xMSFGetjfgWJ3MQFHdomxZbUwwC4B84YlzBNojUEmxmqO1tVC4VcVopUzKuXK+XArUeDVTyq85wv7xKqHsel1dfIUkl8zUXcFm8eUH7IPjWcBp8J5mYxWcWmbclhlyEIAMJm2HbSwDCHZGD9IuR1UH4MhaZ4HOAIQIJOrIxfjxOFRUMNQq8wI9EH5WNVJdcEje22ofxs3K6PlQ+OZwA2ghrFSKhiEVSqh/5JJcfodKBnntLac7wb5CKLpAs+0RguYuAhoNh2CRV1dTVFhqWhRn/u+tOsMtTph6JhOkAWsQDz1K3NHeHyYBZyK70BG5oy3SyqGumoaAhr1Aiggnm8FzXr3cQWSq++p8seM10v6LW9Elgh5kyGINXMdi1xspw2LRHwqMjJTV2KdU9c2eQ1SkXDDHL2aYf2MprVp1dFrtcBlAWB/sNuxMoJIzEfRqhMk04qXfM0n8yVDaa/DRLp1GuGSKhNz65ZEOQUSdyD0Y/adRSojsxjoz2jnNFdN3l/S+sUvnqbDsx+zgCvQMJzhPaCrlouCLBvbA43x68DhsAc7DxpTr0y39VAMBCfpSlpSUMggzRe8X4bIAWRYJqVJj6t7feMV/9Bkfeb+bYw2Czg78S3GwWtEQEPRWFMMEDAZhVTiMaWLnZZRxSexfaStPR9DAXbMj5Qs479Dm8PqqYCNEpUTVAe/GpLC3vH16hI64zkLuB1XQVsdFkED8ps40oLjj2sMAdbFwGlKRjbW6UHAFZaRJVegIpeWVafZhQ4yHahUm+5VyfOwXYFHTX8DKUNSn+fCcsN3qOd8AT3GGPEs4EYnxho9YlOnU1WTUj98GbLKWCawI5wk71DiBMoh+qjYfgXUc+nNlW+rXuqjOrknPAs4sRoHcvvNguDZNEChYOoBUUZ175z9nMBZnQ6cnncgS7uDnt3BJ49Y8axqPYLZ0gVEb2DaICyHtOUM5t2eP7AJexWaGWYBVzcdsqneoAAViyzzo3ZsC1Jeq2qBKVhlkIxDsuSRrSY6/6S6eaaFjD+B4BGmMo9X9M06kcAdMq0qU5eT+lBBc8+GqaVmCc989iHP6yVvOcr4qE8ZLijVZ8VleC/5xWDWFmN6ow6aIKX75EfdL5rfKxBJgAcwwV/zeXrFjyqqo3uy52dnMa5oU4O7svo7YMNgWrFKdsk6WBXmmS82HuKsuADjHZFGi5iBIv+9qnn/qt+qSh3JTFNjPvWDiqpnA0SexYB/ijm6q5qP85wFnIZrXQHgillpVesHh9QVaAWWAJccfo/VNrOcbmrbYn/vCR9gy2m1aUH2WOa/rv4UoKnhPODowC2Gx6jQo4Nox4ZinDL392ssIHFSZWa1rTZJD/wSy0Kn34eDpwZvP1w96+dmH25zrsQs4KSLP4GAawWSjhnFZZQFmUZxOZSTj/ne2yUhIHCjRIlFKcIU0x852RjZTGGlDdaQrkxk7MPrJr/gzg17r4vgJ3rMAk4/wmQDE7wJhg+fFV1xaMGiMqnXaFc5jd4FjCCIRAEmAO5aPE7lzsw0ZelHYJB0PCWscErqOJcsrbllGmhmzE/7mAXcPof544Wlqg6wTuORtvKQzjV2gVC+shaNMhc24v8iIloGmS3ogc7bD9sS884Oi0kEP89jFnDX++/hCtPVtT7kwaxOkZpmxQ/L9vgdj1r+NCtAwQ6/A9DXMXnBqZgoHDdXP7Wna/Id6PRCum7DiREqcg1UPw9Yp6MsLv/HwlM4Hp7WQ1/CGQhcgDsDNJtcgLsAdyYCZza7MO4C3JkInNnswrgLcGcicGazC+POBO7/AH5zPa/ivytzAAAAAElFTkSuQmCC" - ), - ] - ), - ], - model_parameters={"temperature": 0.3, "top_p": 0.2, "top_k": 3, "max_tokens": 100}, - stream=False, - user="abc-123", - ) - - assert isinstance(result, LLMResult) - assert len(result.message.content) > 0 - - -@pytest.mark.parametrize("setup_google_mock", [["none"]], indirect=True) -def test_invoke_chat_model_with_vision_multi_pics(setup_google_mock): - model = GoogleLargeLanguageModel() - - result = model.invoke( - model="gemini-pro-vision", - credentials={"google_api_key": os.environ.get("GOOGLE_API_KEY")}, - prompt_messages=[ - SystemPromptMessage(content="You are a helpful AI assistant."), - UserPromptMessage( - content=[ - TextPromptMessageContent(data="what do you see?"), - ImagePromptMessageContent( - data="data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAE4AAABMCAYAAADDYoEWAAAMQGlDQ1BJQ0MgUHJvZmlsZQAASImVVwdYU8kWnluSkEBoAQSkhN4EkRpASggt9I4gKiEJEEqMgaBiRxcVXLuIgA1dFVGwAmJBETuLYu+LBRVlXSzYlTcpoOu+8r35vrnz33/O/OfMmbllAFA7zhGJclF1APKEBeLYYH/6uOQUOukpIAEdoAy0gA2Hmy9iRkeHA1iG2r+Xd9cBIm2v2Eu1/tn/X4sGj5/PBQCJhjidl8/Ng/gAAHg1VyQuAIAo5c2mFoikGFagJYYBQrxIijPluFqK0+V4j8wmPpYFcTsASiocjjgTANVLkKcXcjOhhmo/xI5CnkAIgBodYp+8vMk8iNMgtoY2Ioil+oz0H3Qy/6aZPqzJ4WQOY/lcZEUpQJAvyuVM/z/T8b9LXq5kyIclrCpZ4pBY6Zxh3m7mTA6TYhWI+4TpkVEQa0L8QcCT2UOMUrIkIQlye9SAm8+COYMrDVBHHicgDGIDiIOEuZHhCj49QxDEhhjuEHSaoIAdD7EuxIv4+YFxCptN4smxCl9oY4aYxVTwZzlimV+pr/uSnASmQv91Fp+t0MdUi7LikyCmQGxeKEiMhFgVYof8nLgwhc3YoixW5JCNWBIrjd8c4li+MNhfro8VZoiDYhX2pXn5Q/PFNmUJ2JEKvK8gKz5Enh+sncuRxQ/ngl3iC5kJQzr8/HHhQ3Ph8QMC5XPHnvGFCXEKnQ+iAv9Y+VicIsqNVtjjpvzcYClvCrFLfmGcYiyeWAA3pFwfzxAVRMfL48SLsjmh0fJ48OUgHLBAAKADCazpYDLIBoLOvqY+eCfvCQIcIAaZgA/sFczQiCRZjxBe40AR+BMiPsgfHucv6+WDQsh/HWblV3uQIestlI3IAU8gzgNhIBfeS2SjhMPeEsFjyAj+4Z0DKxfGmwurtP/f80Psd4YJmXAFIxnySFcbsiQGEgOIIcQgog2uj/vgXng4vPrB6oQzcI+heXy3JzwhdBEeEq4Rugm3JgmKxT9FGQG6oX6QIhfpP+YCt4Sarrg/7g3VoTKug+sDe9wF+mHivtCzK2RZirilWaH/pP23GfywGgo7siMZJY8g+5Gtfx6paqvqOqwizfWP+ZHHmj6cb9Zwz8/+WT9knwfbsJ8tsUXYfuwMdgI7hx3BmgAda8WasQ7sqBQP767Hst015C1WFk8O1BH8w9/Qykozme9Y59jr+EXeV8CfJn1HA9Zk0XSxIDOrgM6EXwQ+nS3kOoyiOzk6OQMg/b7IX19vYmTfDUSn4zs3/w8AvFsHBwcPf+dCWwHY6w4f/0PfOWsG/HQoA3D2EFciLpRzuPRCgG8JNfik6QEjYAas4XycgBvwAn4gEISCKBAPksFEGH0W3OdiMBXMBPNACSgDy8EaUAk2gi1gB9gN9oEmcAScAKfBBXAJXAN34O7pAS9AP3gHPiMIQkKoCA3RQ4wRC8QOcUIYiA8SiIQjsUgykoZkIkJEgsxE5iNlyEqkEtmM1CJ7kUPICeQc0oXcQh4gvchr5BOKoSqoFmqIWqKjUQbKRMPQeHQCmolOQYvQBehStAKtQXehjegJ9AJ6De1GX6ADGMCUMR3MBLPHGBgLi8JSsAxMjM3GSrFyrAarx1rgOl/BurE+7CNOxGk4HbeHOzgET8C5+BR8Nr4Er8R34I14O34Ff4D3498IVIIBwY7gSWATxhEyCVMJJYRywjbCQcIp+Cz1EN4RiUQdohXRHT6LycRs4gziEuJ6YgPxOLGL+Ig4QCKR9Eh2JG9SFIlDKiCVkNaRdpFaSZdJPaQPSspKxkpOSkFKKUpCpWKlcqWdSseULis9VfpMVidbkD3JUWQeeTp5GXkruYV8kdxD/kzRoFhRvCnxlGzKPEoFpZ5yinKX8kZZWdlU2UM5RlmgPFe5QnmP8lnlB8ofVTRVbFVYKqkqEpWlKttVjqvcUnlDpVItqX7UFGoBdSm1lnqSep/6QZWm6qDKVuWpzlGtUm1Uvaz6Uo2sZqHGVJuoVqRWrrZf7aJanzpZ3VKdpc5Rn61epX5I/Yb6gAZNY4xGlEaexhKNnRrnNJ5pkjQtNQM1eZoLNLdontR8RMNoZjQWjUubT9tKO0Xr0SJqWWmxtbK1yrR2a3Vq9WtrartoJ2pP067SPqrdrYPpWOqwdXJ1luns07mu82mE4QjmCP6IxSPqR1we8V53pK6fLl+3VLdB95ruJz26XqBejt4KvSa9e/q4vq1+jP5U/Q36p/T7RmqN9BrJHVk6ct/I2waoga1BrMEMgy0GHQYDhkaGwYYiw3WGJw37jHSM/IyyjVYbHTPqNaYZ+xgLjFcbtxo/p2vTmfRcegW9nd5vYmASYiIx2WzSafLZ1Mo0wbTYtMH0nhnFjGGWYbbarM2s39zYPMJ8pnmd+W0LsgXDIstircUZi/eWVpZJlgstmyyfWelasa2KrOqs7lpTrX2tp1jXWF+1IdowbHJs1ttcskVtXW2zbKtsL9qhdm52Arv1dl2jCKM8RglH1Yy6Ya9iz7QvtK+zf+Cg4xDuUOzQ5PBytPnolNErRp8Z/c3R1THXcavjnTGaY0LHFI9pGfPaydaJ61TldNWZ6hzkPMe52fmVi50L32WDy01XmmuE60LXNtevbu5uYrd6t153c/c092r3GwwtRjRjCeOsB8HD32OOxxGPj55ungWe+zz/8rL3yvHa6fVsrNVY/titYx95m3pzvDd7d/vQfdJ8Nvl0+5r4cnxrfB/6mfnx/Lb5PWXaMLOZu5gv/R39xf4H/d+zPFmzWMcDsIDggNKAzkDNwITAysD7QaZBmUF1Qf3BrsEzgo+HEELCQlaE3GAbsrnsWnZ/qHvorND2MJWwuLDKsIfhtuHi8JYINCI0YlXE3UiLSGFkUxSIYketiroXbRU9JfpwDDEmOqYq5knsmNiZsWfiaHGT4nbGvYv3j18WfyfBOkGS0JaolpiaWJv4PikgaWVS97jR42aNu5CsnyxIbk4hpSSmbEsZGB84fs34nlTX1JLU6xOsJkybcG6i/sTciUcnqU3iTNqfRkhLStuZ9oUTxanhDKSz06vT+7ks7lruC54fbzWvl+/NX8l/muGdsTLjWaZ35qrM3izfrPKsPgFLUCl4lR2SvTH7fU5Uzvacwdyk3IY8pby0vENCTWGOsH2y0eRpk7tEdqISUfcUzylrpvSLw8Tb8pH8CfnNBVrwR75DYi35RfKg0KewqvDD1MSp+6dpTBNO65huO33x9KdFQUW/zcBncGe0zTSZOW/mg1nMWZtnI7PTZ7fNMZuzYE7P3OC5O+ZR5uXM+73YsXhl8dv5SfNbFhgumLvg0S/Bv9SVqJaIS24s9Fq4cRG+SLCoc7Hz4nWLv5XySs+XOZaVl31Zwl1y/tcxv1b8Org0Y2nnMrdlG5YTlwuXX1/hu2LHSo2VRSsfrYpY1biavrp09ds1k9acK3cp37iWslaytrsivKJ5nfm65eu+VGZVXqvyr2qoNqheXP1+PW/95Q1+G+o3Gm4s2/hpk2DTzc3BmxtrLGvKtxC3FG55sjVx65nfGL/VbtPfVrbt63bh9u4dsTvaa91ra3ca7FxWh9ZJ6np3pe66tDtgd3O9ff3mBp2Gsj1gj2TP871pe6/vC9vXtp+xv/6AxYHqg7SDpY1I4/TG/qaspu7m5OauQ6GH2lq8Wg4edji8/YjJkaqj2keXHaMcW3BssLWodeC46HjficwTj9omtd05Oe7k1faY9s5TYafOng46ffIM80zrWe+zR855njt0nnG+6YLbhcYO146Dv7v+frDTrbPxovvF5ksel1q6xnYdu+x7+cSVgCunr7KvXrgWea3resL1mzdSb3Tf5N18div31qvbhbc/35l7l3C39J76vfL7Bvdr/rD5o6Hbrfvog4AHHQ/jHt55xH304nH+4y89C55Qn5Q/NX5a+8zp2ZHeoN5Lz8c/73khevG5r+RPjT+rX1q/PPCX318d/eP6e16JXw2+XvJG7832ty5v2waiB+6/y3v3+X3pB70POz4yPp75lPTp6eepX0hfKr7afG35Fvbt7mDe4KCII+bIfgUwWNGMDABebweAmgwADZ7PKOPl5z9ZQeRnVhkC/wnLz4iy4gZAPfx/j+mDfzc3ANizFR6/oL5aKgDRVADiPQDq7Dxch85qsnOltBDhOWBT5Nf0vHTwb4r8zPlD3D+3QKrqAn5u/wWdZ3xtG7qP3QAAADhlWElmTU0AKgAAAAgAAYdpAAQAAAABAAAAGgAAAAAAAqACAAQAAAABAAAATqADAAQAAAABAAAATAAAAADhTXUdAAARnUlEQVR4Ae2c245bR3aGi4fulizFHgUzQAYIggBB5klymfeaZ8hDBYjvAiRxkMAGkowRWx7JktjcZL7vX1Uku62Burkl5YbV5q7Tqqq1/v3XqgMpL95tbvftEh6NwPLRLS4NgsAFuDOJcAHuAtyZCJzZ7MK4C3BnInBmswvjLsCdicCZzS6MOxO49Znt0uz3//CPbbv6srXFrq0W9Q6Wi0VbLPn4R8x/jSLiu3nrl8s9dcartlwtKdmTbm21XranN6v27Mm6XV8t25fP1+3Pn1+1r4if3Czbk+t9u1rR6f9jmAXc1P6sbaevQGbfdgGJeA8ke0AQsCYYgiYgPR1QyVO+3wvcMm2WO0G2PeWkX79btp839AG4//UjYC62gDsB2rI9f7pov3q2bX/9F1ftBWAufTufOcwCrnTtR90dOdHoNgCJeAbUkuM5TsWAW5W9gfkE83ZkUHg0oAyAwbm927a2ebVoP/xx2f7jD1uYuG9/89tF+/VXK1hq+88TZgG32O1g2r7tpRdBM8fUTM7pyR8SYddgxkJErUszHti7U44CpzyEo16syNtx+qgy+1og7RMetpev9+3rb3bt+c2u/ebFsv3uL1ftiqn+qcMs4HY7jNQpEfadNU5VqeHUTJkgUbaPDxRADdZ8jU9LHoJYnwLUtgWN4ObDC7Kdr8Hp7d9qMTW8gt23V1zyvPrD1H56e9t+99vr9uJLprBDfaIw69U4dQRCIw2JdVIjbUzecj+7qYyPpZHiAbDaJwsXyMhQEQ0pq6sAp7hMS2XGqykdA2iy4EUtF6v206ur9k/fbNo//+frtt2OaW/rjxtmAaeNGqihBY5xfVQzQEZfoSH0KHgkrbD/CX6vPIqlSTU61vVCovRSbEwbIS851vj23Q+tff3vu/bzu5I7tvs4qVnADTa5FCbNC86qCLN2E1MxKKroYB2pgSz2RLbbVcVkSJhOKxIDjGxn+nSuqes2JlKuG8fA/IzPXazbj68X7et/27UfX7GifORwOuSju47h/c3beKfRFO74CNA04YP0ZT2/YzERFGojc9pmDG47/wyDZwJjiX4wwJNer1dZPJbs5/xzK5Ppzp7SQZBszNy22U7tX7/dtFdvJrv8aGE2cDJLoPycBgHSgICJUQLo8nmUo6y7oH0S5Lu/FGhDQULCfIooATw3yyOQQ46eYVpYiaBMTFtAFPR307r9y3fbdvsRfd5Rg6HJI2Lt1qaAF6TEqoxWdVdYSHawezCvAHLjW7Jh2QGcUkDDT4Og2OfSFRVkxipcAJUZARC5FVRbeRpB1hVY6r25XQHexIZ96Hfa++PTs4Dbi8rQg7imWQG27/uEgCTCssk/WWg7GwJWwDQ36PceGzQ+x7jOtgNogkIIpsZiFMdXoEfOPUlh3l5ulu2/X6bJ7Mc84Bw+xgOKzJqM0VKm8WYlVMqt61gFKNtQKeZ6o7Ls/aqEeYooJXDIZ9uiT0uZ5UxPUJNlYdoAK62qHfM7unz3/bb9/Ha+v3u/tn3AD0XOrnxAZdpNYZILgoxyGk4BqMCbssq66dXv6RdFkiB6Rj2u3N1npiMw1dQjF4oJW/kzy6VdMRFA9Xd8VvhCLxCyYUYkvhHZb7+fotvdUR6XmwXcYI1DangAA6yspgBj/dRjp6L+RbmSPaaxuuMnGEeVAhBF4pSapAFG5gUo60rAHmpVtcz0sR2aBZW8NAB9+W7dXr9N0dmPmUcu10pWrq7kQQvBQXn1dUsgoM4ej12TtyBknG51PEMGOV2TLLVZ/GLvLMBYHsYJhg7fuMBx6tq3LFu7aBxxD9jKFiO7Thbwcv7n5dS+/ML0eWEWcBqoptk+mEQp2aTG+rbmBYA+D6MyMwMAdepKsX5QpnglFZyZ5k4tDYsI/Y1pF7CRq22HoHXgGEOwgodvgH79INnW3tlFIVVQvkBXg1dvF3z27fkTGzw+zALOPZluVoVkV4yLHoBB3VBJUNyo6uEWXAyIkruC2OQjbVeppxkm8+iti2mySsM1EPYGKBcEyul3LKTW1+pr+wLRstwP0J8a2K95Txf/+6q1ZzeUDEXt/oFhHnA4fJYCBtawYlWmlsrJBEHhP43bi9Rq1Z0ymlK3Z/QCRqA5YfaNLZJWEACn929eluXlUGO8CgMrHWYi441S2tsFebLRL5RWL0e0nL64SEEf2sjMR4ZZwA0Ddfziclz1eN8yDn1qAaHSq3G0FEQXjABDo51sJVNyGnA0QlAPL4LOApzMo0mY1sUFbQBj8xTzYhKrROYF5VGIftR1uW3+3uiWU8XnBw7l3HIYVG/P/djYgMZoyrTJrci0n2qPZVnNFV913viW6btGzsXBT6aW3VKmsauVTFOc2DxpP5YJYLBBeCUixE71IlGBR2EF+6OugHbP12Ddoj29HgIPj+cxDiPDFGINzB8sKhLh0Ui4gOgDI8deb8FiwYxlteWhLHWTlmOzhkxLAObPIkFqS8+bbG5BdgWiAmJTwXdqZ7oysktzdKC/BWMWiAJNpyP0ZPTMItRy7fTi2RB4eDwLuIkpCma1gob/Dsw7zcKAMf3txiCot8c42ZCDPu3WAqRMJAGEk4cACaLzSZsFRhAE9QoAtXcwTX92XDT0sxTQXJYHdDJin0KfVN8PmzNvnOYBx5XNlik4giumihb7tJ60ezgNhgXuXgRNttxunZYAj7uzbL3nUA67rm5KJWrJCyTfIVwBMh3bTkD8TqFYp6uv8RwrgJpAZmHHScqv0qWeKT48NujhAuELekyYBdz9gXJQ53DvDh3tU62xTtN8bQhzzE9OccAK8wA2ez2k3cNtN7wM/RZs9M5NkNZoee0H2rmhLr8miPV9roAZtN1RHV/gDb7EoUtXKeXjYXUBN0oeFs8CbrtlhZRGPZSSZNyI9gA+TBFkelFNWxgEgCtG3wDiFqEr5Jz6y/U1DAM4QLxi2l7DNhl3w/epNTUFWGbXC7HrMQMz7WUbf8AaDQ46DYXuxLoJX6CFRzvuiPyJzCzgZIoKyqgKAx1yAGPQUWfa+GoDsqwDJNnHLF9juSz0i5VrpvqSwmsQul5dtyfrfX1zL3i0WdHHSjaKVjf0T5k7ABtxlEHbwxusgjydAY8N84BjvAx5GLfMqBW0VJEZ+pwKskQnbpnFHPzpwWo/bzkGvX51296+bu1v/+qL9usXT9rTJ07Bzh9k9HEPsxNhwhh6xLXKo3fXWf3iMkrBBz9nAbflbHm6ONxhXp8/NW26lkSleIEV9FBVI+o6ihjmffPDt+3v/+5Z+82vnsZw/fyercweB2d7wzA8mfuPEknpXTnHvQsoPd1v/aD8LODw+AxbAw/QjnEfv69u5kz6dtOiW2R6YmW7vd0C3qK94wcjf/zxZ1bRXfvqGT6U3f2G/Z6AesqotgJX477PNVmTmxfiwTSS5irqz2ybEHD6PzbMAk7lS/0BxgkTqPAUYBiAkQpTLLdKxe1D4Lbsp968uW1vXk+ZrnpsN7yL1TbmbvCl4GcPPPStZWyNcM9s++9y92ruZu2CT21q7lZ9KDcLuC3WbmGG42uA30EISOVkFynt1BBialOliF/wZHqGTa1tOfq8fbMHPL6N2iBPW2d7HfxZdWnreiN49UL0dfhLR6tBSVVwNo+TQ1U5IsHvQU4Dcry7bGNOix+SngVcwAhYpZjTQxaNMABLLLtUFEAMEwi4kk63fGDbLTcVm82ubd7hNylzEXCa6SPdz2Vf5iUobe0jAFIq8+JHT8CjGeUjHFOj5E7MIO4THxvOaHIcwu2IOKiznyg89BTEXi6WssO8B36vkLa33Pv7/QRbEtm21c/BtIm9Yb4ho19PDg4g09aeucySdpzq3BfVx6WQqh7MkLOSkHLf2olEKni4n7xznh0VH4jnAYdy6hfVSZTvUmF54f2cU9d9XmlhvUyTlbkxIT0BWtgH4wRRgPMy7EFbAwi8ojzbNyqtH/7coWxnUHyE+rmYjbs3NCnqdwIbbM/GZ4RZwDleVskO3viSBhWjSu2Pxj7JU4bsqrzTU5YZQ7xKu73Bb8bAbo+s28NStxEyb8e+K1UAKXhOVivK7x0RUANf3zEw/smJpsr37cad9RlhFnCbzQYwfN36I+5qwxgVwRA/vOHxlneeMiaux9lymN5tTTttkZN5mbZwCYsLM550taA+zJM5gsdHsGSdQTbngN7ZlC/JrRhXIcorRJvVcp2pnjzdy+0nnErOCbOAE5x8d4oVCy4xMSFGetjfgWJ3MQFHdomxZbUwwC4B84YlzBNojUEmxmqO1tVC4VcVopUzKuXK+XArUeDVTyq85wv7xKqHsel1dfIUkl8zUXcFm8eUH7IPjWcBp8J5mYxWcWmbclhlyEIAMJm2HbSwDCHZGD9IuR1UH4MhaZ4HOAIQIJOrIxfjxOFRUMNQq8wI9EH5WNVJdcEje22ofxs3K6PlQ+OZwA2ghrFSKhiEVSqh/5JJcfodKBnntLac7wb5CKLpAs+0RguYuAhoNh2CRV1dTVFhqWhRn/u+tOsMtTph6JhOkAWsQDz1K3NHeHyYBZyK70BG5oy3SyqGumoaAhr1Aiggnm8FzXr3cQWSq++p8seM10v6LW9Elgh5kyGINXMdi1xspw2LRHwqMjJTV2KdU9c2eQ1SkXDDHL2aYf2MprVp1dFrtcBlAWB/sNuxMoJIzEfRqhMk04qXfM0n8yVDaa/DRLp1GuGSKhNz65ZEOQUSdyD0Y/adRSojsxjoz2jnNFdN3l/S+sUvnqbDsx+zgCvQMJzhPaCrlouCLBvbA43x68DhsAc7DxpTr0y39VAMBCfpSlpSUMggzRe8X4bIAWRYJqVJj6t7feMV/9Bkfeb+bYw2Czg78S3GwWtEQEPRWFMMEDAZhVTiMaWLnZZRxSexfaStPR9DAXbMj5Qs479Dm8PqqYCNEpUTVAe/GpLC3vH16hI64zkLuB1XQVsdFkED8ps40oLjj2sMAdbFwGlKRjbW6UHAFZaRJVegIpeWVafZhQ4yHahUm+5VyfOwXYFHTX8DKUNSn+fCcsN3qOd8AT3GGPEs4EYnxho9YlOnU1WTUj98GbLKWCawI5wk71DiBMoh+qjYfgXUc+nNlW+rXuqjOrknPAs4sRoHcvvNguDZNEChYOoBUUZ175z9nMBZnQ6cnncgS7uDnt3BJ49Y8axqPYLZ0gVEb2DaICyHtOUM5t2eP7AJexWaGWYBVzcdsqneoAAViyzzo3ZsC1Jeq2qBKVhlkIxDsuSRrSY6/6S6eaaFjD+B4BGmMo9X9M06kcAdMq0qU5eT+lBBc8+GqaVmCc989iHP6yVvOcr4qE8ZLijVZ8VleC/5xWDWFmN6ow6aIKX75EfdL5rfKxBJgAcwwV/zeXrFjyqqo3uy52dnMa5oU4O7svo7YMNgWrFKdsk6WBXmmS82HuKsuADjHZFGi5iBIv+9qnn/qt+qSh3JTFNjPvWDiqpnA0SexYB/ijm6q5qP85wFnIZrXQHgillpVesHh9QVaAWWAJccfo/VNrOcbmrbYn/vCR9gy2m1aUH2WOa/rv4UoKnhPODowC2Gx6jQo4Nox4ZinDL392ssIHFSZWa1rTZJD/wSy0Kn34eDpwZvP1w96+dmH25zrsQs4KSLP4GAawWSjhnFZZQFmUZxOZSTj/ne2yUhIHCjRIlFKcIU0x852RjZTGGlDdaQrkxk7MPrJr/gzg17r4vgJ3rMAk4/wmQDE7wJhg+fFV1xaMGiMqnXaFc5jd4FjCCIRAEmAO5aPE7lzsw0ZelHYJB0PCWscErqOJcsrbllGmhmzE/7mAXcPof544Wlqg6wTuORtvKQzjV2gVC+shaNMhc24v8iIloGmS3ogc7bD9sS884Oi0kEP89jFnDX++/hCtPVtT7kwaxOkZpmxQ/L9vgdj1r+NCtAwQ6/A9DXMXnBqZgoHDdXP7Wna/Id6PRCum7DiREqcg1UPw9Yp6MsLv/HwlM4Hp7WQ1/CGQhcgDsDNJtcgLsAdyYCZza7MO4C3JkInNnswrgLcGcicGazC+POBO7/AH5zPa/ivytzAAAAAElFTkSuQmCC" - ), - ] - ), - AssistantPromptMessage(content="I see a blue letter 'D' with a gradient from light blue to dark blue."), - UserPromptMessage( - content=[ - TextPromptMessageContent(data="what about now?"), - ImagePromptMessageContent( - data="data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAABAAAAAQCAYAAAAf8/9hAAAACXBIWXMAAAABAAAAAQBPJcTWAAADl0lEQVR4nC3Uf0zUdRjA8S9W6w//bGs1DUd5RT+gIY0oYeEqY0QCy5EbAnF4IEgyAnGuCBANWOjih6YOlK0BbtLAX+iAENFgUBLMkzs8uDuO+wEcxx3cgdx9v3fvvn/0x+v5PM+z56/n2T6CIAgIQUEECVsICnqOoC0v8PyLW3n5lW28GhLG9hAFwYowdoRsJ+Tzv3hdEcpOxVvsfDscheI1BIXKy5t7OwiPiCI8IZaIL+OISPKxK/IDdiU6ifwqjqj4WKISP5VN8mHSFNHJA7KnfJQYh7A7+g1i9hXw2dcX2JuSxhcJnxCfnEJ8ygESqtfYl3qA5O/1pKaX8E2Rn7R0JWnKXFkRaX0OhIOqUtJVRWQoj5ChyiOjb4XMQ0fIVB0lM6eEzMO5ZN5x8W1xD1nZh1Fm55OtzOdQTgEqZR6CSi5UjSI5hTnk3bWSX/gj+ccaKCgspaDkNIWlpygc3OTYtZc4fqKcE5Vn+eFkDWUp8ZS1ryOUn66lvGmCyt/8nLwxTlXZcapqL1Nd10B1Uy01FbnUnFVS+2sLvzTWUXfRRMOAgcb6KhovdSA0XnHRdL6Zcy1/0lyTS3NfgJbWNq6cu0nrPyu0FSlpu9pF21037ZFhXLtYT+eNIbp61+jq70bofv8drvf0c2vQz+3O3+nRrNI78JD+/psMfLefe0MG7p+a5v6tP3g48ojhC7mMXP2Y0YoZRitnEcbkMPaglzEnPAoNZrw4hXH1LBOtOiYfa3gcugO1+gnqZwGeaHRMTcyhaduKRjOBxiJfQSsnWq0W7YwVrd3PtH6BaeMST40adJ3V6OwBZlR7mNUvMWswYsiKxTA1gWHOgsGiRzCmRGOcW8QoD855JObWJUxmHSb5nfd4Mc+ZMFv1MjtmuWepSMNiMmAxz2LN2o1gbdmDdV6NdVnE1p6EzajHZp7BtjCLbSnAgsMtE1k8H8OiwyuTWPL4sLduwz5vRLA7XCzbLCw7PTiswzgWJnBsijhNwzhtw6xmRLLmdLC27sU9dBC324un/iieSyF4rPIS1/8eZOOego0NL898Epv14Wz2nMHrsOB12/Glh+Mrfg/fqgufKCHmxSC21SE6JxFdKwjihhFxw4O4aUf0bSKVRyN1pyKNXEcaDUbS3EZan5Sp/zeFtLGO5LUiSRKCJAXwZ0bg73oXv+kBfrsOv8uOXxIJ/JRG4N/9sjME1B3QXAjzd8CqhqWfkT8C4T8Z5+ciRtwo8gAAAABJRU5ErkJggg==" - ), - ] - ), - ], - model_parameters={"temperature": 0.3, "top_p": 0.2, "top_k": 3, "max_tokens": 100}, - stream=False, - user="abc-123", - ) - - print(f"result: {result.message.content}") - assert isinstance(result, LLMResult) - assert len(result.message.content) > 0 - - -def test_get_num_tokens(): - model = GoogleLargeLanguageModel() - - num_tokens = model.get_num_tokens( - model="gemini-pro", - credentials={"google_api_key": os.environ.get("GOOGLE_API_KEY")}, - prompt_messages=[ - SystemPromptMessage( - content="You are a helpful AI assistant.", - ), - UserPromptMessage(content="Hello World!"), - ], - ) - - assert num_tokens > 0 # The exact number of tokens may vary based on the model's tokenization diff --git a/api/tests/integration_tests/model_runtime/google/test_provider.py b/api/tests/integration_tests/model_runtime/google/test_provider.py deleted file mode 100644 index c217e4fe058870..00000000000000 --- a/api/tests/integration_tests/model_runtime/google/test_provider.py +++ /dev/null @@ -1,17 +0,0 @@ -import os - -import pytest - -from core.model_runtime.errors.validate import CredentialsValidateFailedError -from core.model_runtime.model_providers.google.google import GoogleProvider -from tests.integration_tests.model_runtime.__mock.google import setup_google_mock - - -@pytest.mark.parametrize("setup_google_mock", [["none"]], indirect=True) -def test_validate_provider_credentials(setup_google_mock): - provider = GoogleProvider() - - with pytest.raises(CredentialsValidateFailedError): - provider.validate_provider_credentials(credentials={}) - - provider.validate_provider_credentials(credentials={"google_api_key": os.environ.get("GOOGLE_API_KEY")}) diff --git a/api/tests/integration_tests/model_runtime/huggingface_hub/__init__.py b/api/tests/integration_tests/model_runtime/huggingface_hub/__init__.py deleted file mode 100644 index e69de29bb2d1d6..00000000000000 diff --git a/api/tests/integration_tests/model_runtime/huggingface_hub/test_llm.py b/api/tests/integration_tests/model_runtime/huggingface_hub/test_llm.py deleted file mode 100644 index 6a6cc874fa2f30..00000000000000 --- a/api/tests/integration_tests/model_runtime/huggingface_hub/test_llm.py +++ /dev/null @@ -1,277 +0,0 @@ -import os -from collections.abc import Generator - -import pytest - -from core.model_runtime.entities.llm_entities import LLMResult, LLMResultChunk, LLMResultChunkDelta -from core.model_runtime.entities.message_entities import AssistantPromptMessage, UserPromptMessage -from core.model_runtime.errors.validate import CredentialsValidateFailedError -from core.model_runtime.model_providers.huggingface_hub.llm.llm import HuggingfaceHubLargeLanguageModel -from tests.integration_tests.model_runtime.__mock.huggingface import setup_huggingface_mock - - -@pytest.mark.parametrize("setup_huggingface_mock", [["none"]], indirect=True) -def test_hosted_inference_api_validate_credentials(setup_huggingface_mock): - model = HuggingfaceHubLargeLanguageModel() - - with pytest.raises(CredentialsValidateFailedError): - model.validate_credentials( - model="HuggingFaceH4/zephyr-7b-beta", - credentials={"huggingfacehub_api_type": "hosted_inference_api", "huggingfacehub_api_token": "invalid_key"}, - ) - - with pytest.raises(CredentialsValidateFailedError): - model.validate_credentials( - model="fake-model", - credentials={"huggingfacehub_api_type": "hosted_inference_api", "huggingfacehub_api_token": "invalid_key"}, - ) - - model.validate_credentials( - model="HuggingFaceH4/zephyr-7b-beta", - credentials={ - "huggingfacehub_api_type": "hosted_inference_api", - "huggingfacehub_api_token": os.environ.get("HUGGINGFACE_API_KEY"), - }, - ) - - -@pytest.mark.parametrize("setup_huggingface_mock", [["none"]], indirect=True) -def test_hosted_inference_api_invoke_model(setup_huggingface_mock): - model = HuggingfaceHubLargeLanguageModel() - - response = model.invoke( - model="HuggingFaceH4/zephyr-7b-beta", - credentials={ - "huggingfacehub_api_type": "hosted_inference_api", - "huggingfacehub_api_token": os.environ.get("HUGGINGFACE_API_KEY"), - }, - prompt_messages=[UserPromptMessage(content="Who are you?")], - model_parameters={ - "temperature": 1.0, - "top_k": 2, - "top_p": 0.5, - }, - stop=["How"], - stream=False, - user="abc-123", - ) - - assert isinstance(response, LLMResult) - assert len(response.message.content) > 0 - - -@pytest.mark.parametrize("setup_huggingface_mock", [["none"]], indirect=True) -def test_hosted_inference_api_invoke_stream_model(setup_huggingface_mock): - model = HuggingfaceHubLargeLanguageModel() - - response = model.invoke( - model="HuggingFaceH4/zephyr-7b-beta", - credentials={ - "huggingfacehub_api_type": "hosted_inference_api", - "huggingfacehub_api_token": os.environ.get("HUGGINGFACE_API_KEY"), - }, - prompt_messages=[UserPromptMessage(content="Who are you?")], - model_parameters={ - "temperature": 1.0, - "top_k": 2, - "top_p": 0.5, - }, - stop=["How"], - stream=True, - user="abc-123", - ) - - assert isinstance(response, Generator) - - for chunk in response: - assert isinstance(chunk, LLMResultChunk) - assert isinstance(chunk.delta, LLMResultChunkDelta) - assert isinstance(chunk.delta.message, AssistantPromptMessage) - assert len(chunk.delta.message.content) > 0 if chunk.delta.finish_reason is None else True - - -@pytest.mark.parametrize("setup_huggingface_mock", [["none"]], indirect=True) -def test_inference_endpoints_text_generation_validate_credentials(setup_huggingface_mock): - model = HuggingfaceHubLargeLanguageModel() - - with pytest.raises(CredentialsValidateFailedError): - model.validate_credentials( - model="openchat/openchat_3.5", - credentials={ - "huggingfacehub_api_type": "inference_endpoints", - "huggingfacehub_api_token": "invalid_key", - "huggingfacehub_endpoint_url": os.environ.get("HUGGINGFACE_TEXT_GEN_ENDPOINT_URL"), - "task_type": "text-generation", - }, - ) - - model.validate_credentials( - model="openchat/openchat_3.5", - credentials={ - "huggingfacehub_api_type": "inference_endpoints", - "huggingfacehub_api_token": os.environ.get("HUGGINGFACE_API_KEY"), - "huggingfacehub_endpoint_url": os.environ.get("HUGGINGFACE_TEXT_GEN_ENDPOINT_URL"), - "task_type": "text-generation", - }, - ) - - -@pytest.mark.parametrize("setup_huggingface_mock", [["none"]], indirect=True) -def test_inference_endpoints_text_generation_invoke_model(setup_huggingface_mock): - model = HuggingfaceHubLargeLanguageModel() - - response = model.invoke( - model="openchat/openchat_3.5", - credentials={ - "huggingfacehub_api_type": "inference_endpoints", - "huggingfacehub_api_token": os.environ.get("HUGGINGFACE_API_KEY"), - "huggingfacehub_endpoint_url": os.environ.get("HUGGINGFACE_TEXT_GEN_ENDPOINT_URL"), - "task_type": "text-generation", - }, - prompt_messages=[UserPromptMessage(content="Who are you?")], - model_parameters={ - "temperature": 1.0, - "top_k": 2, - "top_p": 0.5, - }, - stop=["How"], - stream=False, - user="abc-123", - ) - - assert isinstance(response, LLMResult) - assert len(response.message.content) > 0 - - -@pytest.mark.parametrize("setup_huggingface_mock", [["none"]], indirect=True) -def test_inference_endpoints_text_generation_invoke_stream_model(setup_huggingface_mock): - model = HuggingfaceHubLargeLanguageModel() - - response = model.invoke( - model="openchat/openchat_3.5", - credentials={ - "huggingfacehub_api_type": "inference_endpoints", - "huggingfacehub_api_token": os.environ.get("HUGGINGFACE_API_KEY"), - "huggingfacehub_endpoint_url": os.environ.get("HUGGINGFACE_TEXT_GEN_ENDPOINT_URL"), - "task_type": "text-generation", - }, - prompt_messages=[UserPromptMessage(content="Who are you?")], - model_parameters={ - "temperature": 1.0, - "top_k": 2, - "top_p": 0.5, - }, - stop=["How"], - stream=True, - user="abc-123", - ) - - assert isinstance(response, Generator) - - for chunk in response: - assert isinstance(chunk, LLMResultChunk) - assert isinstance(chunk.delta, LLMResultChunkDelta) - assert isinstance(chunk.delta.message, AssistantPromptMessage) - assert len(chunk.delta.message.content) > 0 if chunk.delta.finish_reason is None else True - - -@pytest.mark.parametrize("setup_huggingface_mock", [["none"]], indirect=True) -def test_inference_endpoints_text2text_generation_validate_credentials(setup_huggingface_mock): - model = HuggingfaceHubLargeLanguageModel() - - with pytest.raises(CredentialsValidateFailedError): - model.validate_credentials( - model="google/mt5-base", - credentials={ - "huggingfacehub_api_type": "inference_endpoints", - "huggingfacehub_api_token": "invalid_key", - "huggingfacehub_endpoint_url": os.environ.get("HUGGINGFACE_TEXT2TEXT_GEN_ENDPOINT_URL"), - "task_type": "text2text-generation", - }, - ) - - model.validate_credentials( - model="google/mt5-base", - credentials={ - "huggingfacehub_api_type": "inference_endpoints", - "huggingfacehub_api_token": os.environ.get("HUGGINGFACE_API_KEY"), - "huggingfacehub_endpoint_url": os.environ.get("HUGGINGFACE_TEXT2TEXT_GEN_ENDPOINT_URL"), - "task_type": "text2text-generation", - }, - ) - - -@pytest.mark.parametrize("setup_huggingface_mock", [["none"]], indirect=True) -def test_inference_endpoints_text2text_generation_invoke_model(setup_huggingface_mock): - model = HuggingfaceHubLargeLanguageModel() - - response = model.invoke( - model="google/mt5-base", - credentials={ - "huggingfacehub_api_type": "inference_endpoints", - "huggingfacehub_api_token": os.environ.get("HUGGINGFACE_API_KEY"), - "huggingfacehub_endpoint_url": os.environ.get("HUGGINGFACE_TEXT2TEXT_GEN_ENDPOINT_URL"), - "task_type": "text2text-generation", - }, - prompt_messages=[UserPromptMessage(content="Who are you?")], - model_parameters={ - "temperature": 1.0, - "top_k": 2, - "top_p": 0.5, - }, - stop=["How"], - stream=False, - user="abc-123", - ) - - assert isinstance(response, LLMResult) - assert len(response.message.content) > 0 - - -@pytest.mark.parametrize("setup_huggingface_mock", [["none"]], indirect=True) -def test_inference_endpoints_text2text_generation_invoke_stream_model(setup_huggingface_mock): - model = HuggingfaceHubLargeLanguageModel() - - response = model.invoke( - model="google/mt5-base", - credentials={ - "huggingfacehub_api_type": "inference_endpoints", - "huggingfacehub_api_token": os.environ.get("HUGGINGFACE_API_KEY"), - "huggingfacehub_endpoint_url": os.environ.get("HUGGINGFACE_TEXT2TEXT_GEN_ENDPOINT_URL"), - "task_type": "text2text-generation", - }, - prompt_messages=[UserPromptMessage(content="Who are you?")], - model_parameters={ - "temperature": 1.0, - "top_k": 2, - "top_p": 0.5, - }, - stop=["How"], - stream=True, - user="abc-123", - ) - - assert isinstance(response, Generator) - - for chunk in response: - assert isinstance(chunk, LLMResultChunk) - assert isinstance(chunk.delta, LLMResultChunkDelta) - assert isinstance(chunk.delta.message, AssistantPromptMessage) - assert len(chunk.delta.message.content) > 0 if chunk.delta.finish_reason is None else True - - -def test_get_num_tokens(): - model = HuggingfaceHubLargeLanguageModel() - - num_tokens = model.get_num_tokens( - model="google/mt5-base", - credentials={ - "huggingfacehub_api_type": "inference_endpoints", - "huggingfacehub_api_token": os.environ.get("HUGGINGFACE_API_KEY"), - "huggingfacehub_endpoint_url": os.environ.get("HUGGINGFACE_TEXT2TEXT_GEN_ENDPOINT_URL"), - "task_type": "text2text-generation", - }, - prompt_messages=[UserPromptMessage(content="Hello World!")], - ) - - assert num_tokens == 7 diff --git a/api/tests/integration_tests/model_runtime/huggingface_hub/test_text_embedding.py b/api/tests/integration_tests/model_runtime/huggingface_hub/test_text_embedding.py deleted file mode 100644 index 0ee593f38a494a..00000000000000 --- a/api/tests/integration_tests/model_runtime/huggingface_hub/test_text_embedding.py +++ /dev/null @@ -1,112 +0,0 @@ -import os - -import pytest - -from core.model_runtime.entities.text_embedding_entities import TextEmbeddingResult -from core.model_runtime.errors.validate import CredentialsValidateFailedError -from core.model_runtime.model_providers.huggingface_hub.text_embedding.text_embedding import ( - HuggingfaceHubTextEmbeddingModel, -) - - -def test_hosted_inference_api_validate_credentials(): - model = HuggingfaceHubTextEmbeddingModel() - - with pytest.raises(CredentialsValidateFailedError): - model.validate_credentials( - model="facebook/bart-base", - credentials={ - "huggingfacehub_api_type": "hosted_inference_api", - "huggingfacehub_api_token": "invalid_key", - }, - ) - - model.validate_credentials( - model="facebook/bart-base", - credentials={ - "huggingfacehub_api_type": "hosted_inference_api", - "huggingfacehub_api_token": os.environ.get("HUGGINGFACE_API_KEY"), - }, - ) - - -def test_hosted_inference_api_invoke_model(): - model = HuggingfaceHubTextEmbeddingModel() - - result = model.invoke( - model="facebook/bart-base", - credentials={ - "huggingfacehub_api_type": "hosted_inference_api", - "huggingfacehub_api_token": os.environ.get("HUGGINGFACE_API_KEY"), - }, - texts=["hello", "world"], - ) - - assert isinstance(result, TextEmbeddingResult) - assert len(result.embeddings) == 2 - assert result.usage.total_tokens == 2 - - -def test_inference_endpoints_validate_credentials(): - model = HuggingfaceHubTextEmbeddingModel() - - with pytest.raises(CredentialsValidateFailedError): - model.validate_credentials( - model="all-MiniLM-L6-v2", - credentials={ - "huggingfacehub_api_type": "inference_endpoints", - "huggingfacehub_api_token": "invalid_key", - "huggingface_namespace": "Dify-AI", - "huggingfacehub_endpoint_url": os.environ.get("HUGGINGFACE_EMBEDDINGS_ENDPOINT_URL"), - "task_type": "feature-extraction", - }, - ) - - model.validate_credentials( - model="all-MiniLM-L6-v2", - credentials={ - "huggingfacehub_api_type": "inference_endpoints", - "huggingfacehub_api_token": os.environ.get("HUGGINGFACE_API_KEY"), - "huggingface_namespace": "Dify-AI", - "huggingfacehub_endpoint_url": os.environ.get("HUGGINGFACE_EMBEDDINGS_ENDPOINT_URL"), - "task_type": "feature-extraction", - }, - ) - - -def test_inference_endpoints_invoke_model(): - model = HuggingfaceHubTextEmbeddingModel() - - result = model.invoke( - model="all-MiniLM-L6-v2", - credentials={ - "huggingfacehub_api_type": "inference_endpoints", - "huggingfacehub_api_token": os.environ.get("HUGGINGFACE_API_KEY"), - "huggingface_namespace": "Dify-AI", - "huggingfacehub_endpoint_url": os.environ.get("HUGGINGFACE_EMBEDDINGS_ENDPOINT_URL"), - "task_type": "feature-extraction", - }, - texts=["hello", "world"], - ) - - assert isinstance(result, TextEmbeddingResult) - assert len(result.embeddings) == 2 - assert result.usage.total_tokens == 0 - - -def test_get_num_tokens(): - model = HuggingfaceHubTextEmbeddingModel() - - num_tokens = model.get_num_tokens( - model="all-MiniLM-L6-v2", - credentials={ - "huggingfacehub_api_type": "inference_endpoints", - "huggingfacehub_api_token": os.environ.get("HUGGINGFACE_API_KEY"), - "huggingface_namespace": "Dify-AI", - "huggingfacehub_endpoint_url": os.environ.get("HUGGINGFACE_EMBEDDINGS_ENDPOINT_URL"), - "task_type": "feature-extraction", - }, - texts=["hello", "world"], - ) - - assert num_tokens == 2 diff --git a/api/tests/integration_tests/model_runtime/huggingface_tei/__init__.py b/api/tests/integration_tests/model_runtime/huggingface_tei/__init__.py deleted file mode 100644 index e69de29bb2d1d6..00000000000000 diff --git a/api/tests/integration_tests/model_runtime/huggingface_tei/test_embeddings.py b/api/tests/integration_tests/model_runtime/huggingface_tei/test_embeddings.py deleted file mode 100644 index b1fa9d5ca5097f..00000000000000 --- a/api/tests/integration_tests/model_runtime/huggingface_tei/test_embeddings.py +++ /dev/null @@ -1,70 +0,0 @@ -import os - -import pytest - -from core.model_runtime.entities.text_embedding_entities import TextEmbeddingResult -from core.model_runtime.errors.validate import CredentialsValidateFailedError -from core.model_runtime.model_providers.huggingface_tei.text_embedding.text_embedding import ( - HuggingfaceTeiTextEmbeddingModel, - TeiHelper, -) -from tests.integration_tests.model_runtime.__mock.huggingface_tei import MockTEIClass - -MOCK = os.getenv("MOCK_SWITCH", "false").lower() == "true" - - -@pytest.fixture -def setup_tei_mock(request, monkeypatch: pytest.MonkeyPatch): - if MOCK: - monkeypatch.setattr(TeiHelper, "get_tei_extra_parameter", MockTEIClass.get_tei_extra_parameter) - monkeypatch.setattr(TeiHelper, "invoke_tokenize", MockTEIClass.invoke_tokenize) - monkeypatch.setattr(TeiHelper, "invoke_embeddings", MockTEIClass.invoke_embeddings) - monkeypatch.setattr(TeiHelper, "invoke_rerank", MockTEIClass.invoke_rerank) - yield - - if MOCK: - monkeypatch.undo() - - -@pytest.mark.parametrize("setup_tei_mock", [["none"]], indirect=True) -def test_validate_credentials(setup_tei_mock): - model = HuggingfaceTeiTextEmbeddingModel() - # model name is only used in mock - model_name = "embedding" - - if MOCK: - # TEI Provider will check model type by API endpoint, at real server, the model type is correct. - # So we dont need to check model type here. Only check in mock - with pytest.raises(CredentialsValidateFailedError): - model.validate_credentials( - model="reranker", - credentials={ - "server_url": os.environ.get("TEI_EMBEDDING_SERVER_URL", ""), - }, - ) - - model.validate_credentials( - model=model_name, - credentials={ - "server_url": os.environ.get("TEI_EMBEDDING_SERVER_URL", ""), - }, - ) - - -@pytest.mark.parametrize("setup_tei_mock", [["none"]], indirect=True) -def test_invoke_model(setup_tei_mock): - model = HuggingfaceTeiTextEmbeddingModel() - model_name = "embedding" - - result = model.invoke( - model=model_name, - credentials={ - "server_url": os.environ.get("TEI_EMBEDDING_SERVER_URL", ""), - }, - texts=["hello", "world"], - user="abc-123", - ) - - assert isinstance(result, TextEmbeddingResult) - assert len(result.embeddings) == 2 - assert result.usage.total_tokens > 0 diff --git a/api/tests/integration_tests/model_runtime/huggingface_tei/test_rerank.py b/api/tests/integration_tests/model_runtime/huggingface_tei/test_rerank.py deleted file mode 100644 index 45370d9fba41b0..00000000000000 --- a/api/tests/integration_tests/model_runtime/huggingface_tei/test_rerank.py +++ /dev/null @@ -1,78 +0,0 @@ -import os - -import pytest - -from core.model_runtime.entities.rerank_entities import RerankDocument, RerankResult -from core.model_runtime.entities.text_embedding_entities import TextEmbeddingResult -from core.model_runtime.errors.validate import CredentialsValidateFailedError -from core.model_runtime.model_providers.huggingface_tei.rerank.rerank import ( - HuggingfaceTeiRerankModel, -) -from core.model_runtime.model_providers.huggingface_tei.text_embedding.text_embedding import TeiHelper -from tests.integration_tests.model_runtime.__mock.huggingface_tei import MockTEIClass - -MOCK = os.getenv("MOCK_SWITCH", "false").lower() == "true" - - -@pytest.fixture -def setup_tei_mock(request, monkeypatch: pytest.MonkeyPatch): - if MOCK: - monkeypatch.setattr(TeiHelper, "get_tei_extra_parameter", MockTEIClass.get_tei_extra_parameter) - monkeypatch.setattr(TeiHelper, "invoke_tokenize", MockTEIClass.invoke_tokenize) - monkeypatch.setattr(TeiHelper, "invoke_embeddings", MockTEIClass.invoke_embeddings) - monkeypatch.setattr(TeiHelper, "invoke_rerank", MockTEIClass.invoke_rerank) - yield - - if MOCK: - monkeypatch.undo() - - -@pytest.mark.parametrize("setup_tei_mock", [["none"]], indirect=True) -def test_validate_credentials(setup_tei_mock): - model = HuggingfaceTeiRerankModel() - # model name is only used in mock - model_name = "reranker" - - if MOCK: - # TEI Provider will check model type by API endpoint, at real server, the model type is correct. - # So we dont need to check model type here. Only check in mock - with pytest.raises(CredentialsValidateFailedError): - model.validate_credentials( - model="embedding", - credentials={ - "server_url": os.environ.get("TEI_RERANK_SERVER_URL"), - }, - ) - - model.validate_credentials( - model=model_name, - credentials={ - "server_url": os.environ.get("TEI_RERANK_SERVER_URL"), - }, - ) - - -@pytest.mark.parametrize("setup_tei_mock", [["none"]], indirect=True) -def test_invoke_model(setup_tei_mock): - model = HuggingfaceTeiRerankModel() - # model name is only used in mock - model_name = "reranker" - - result = model.invoke( - model=model_name, - credentials={ - "server_url": os.environ.get("TEI_RERANK_SERVER_URL"), - }, - query="Who is Kasumi?", - docs=[ - 'Kasumi is a girl\'s name of Japanese origin meaning "mist".', - "Her music is a kawaii bass, a mix of future bass, pop, and kawaii music ", - "and she leads a team named PopiParty.", - ], - score_threshold=0.8, - ) - - assert isinstance(result, RerankResult) - assert len(result.docs) == 1 - assert result.docs[0].index == 0 - assert result.docs[0].score >= 0.8 diff --git a/api/tests/integration_tests/model_runtime/hunyuan/__init__.py b/api/tests/integration_tests/model_runtime/hunyuan/__init__.py deleted file mode 100644 index e69de29bb2d1d6..00000000000000 diff --git a/api/tests/integration_tests/model_runtime/hunyuan/test_llm.py b/api/tests/integration_tests/model_runtime/hunyuan/test_llm.py deleted file mode 100644 index b3049a06d9b98a..00000000000000 --- a/api/tests/integration_tests/model_runtime/hunyuan/test_llm.py +++ /dev/null @@ -1,90 +0,0 @@ -import os -from collections.abc import Generator - -import pytest - -from core.model_runtime.entities.llm_entities import LLMResult, LLMResultChunk, LLMResultChunkDelta -from core.model_runtime.entities.message_entities import AssistantPromptMessage, SystemPromptMessage, UserPromptMessage -from core.model_runtime.errors.validate import CredentialsValidateFailedError -from core.model_runtime.model_providers.hunyuan.llm.llm import HunyuanLargeLanguageModel - - -def test_validate_credentials(): - model = HunyuanLargeLanguageModel() - - with pytest.raises(CredentialsValidateFailedError): - model.validate_credentials( - model="hunyuan-standard", credentials={"secret_id": "invalid_key", "secret_key": "invalid_key"} - ) - - model.validate_credentials( - model="hunyuan-standard", - credentials={ - "secret_id": os.environ.get("HUNYUAN_SECRET_ID"), - "secret_key": os.environ.get("HUNYUAN_SECRET_KEY"), - }, - ) - - -def test_invoke_model(): - model = HunyuanLargeLanguageModel() - - response = model.invoke( - model="hunyuan-standard", - credentials={ - "secret_id": os.environ.get("HUNYUAN_SECRET_ID"), - "secret_key": os.environ.get("HUNYUAN_SECRET_KEY"), - }, - prompt_messages=[UserPromptMessage(content="Hi")], - model_parameters={"temperature": 0.5, "max_tokens": 10}, - stop=["How"], - stream=False, - user="abc-123", - ) - - assert isinstance(response, LLMResult) - assert len(response.message.content) > 0 - - -def test_invoke_stream_model(): - model = HunyuanLargeLanguageModel() - - response = model.invoke( - model="hunyuan-standard", - credentials={ - "secret_id": os.environ.get("HUNYUAN_SECRET_ID"), - "secret_key": os.environ.get("HUNYUAN_SECRET_KEY"), - }, - prompt_messages=[UserPromptMessage(content="Hi")], - model_parameters={"temperature": 0.5, "max_tokens": 100, "seed": 1234}, - stream=True, - user="abc-123", - ) - - assert isinstance(response, Generator) - - for chunk in response: - assert isinstance(chunk, LLMResultChunk) - assert isinstance(chunk.delta, LLMResultChunkDelta) - assert isinstance(chunk.delta.message, AssistantPromptMessage) - assert len(chunk.delta.message.content) > 0 if chunk.delta.finish_reason is None else True - - -def test_get_num_tokens(): - model = HunyuanLargeLanguageModel() - - num_tokens = model.get_num_tokens( - model="hunyuan-standard", - credentials={ - "secret_id": os.environ.get("HUNYUAN_SECRET_ID"), - "secret_key": os.environ.get("HUNYUAN_SECRET_KEY"), - }, - prompt_messages=[ - SystemPromptMessage( - content="You are a helpful AI assistant.", - ), - UserPromptMessage(content="Hello World!"), - ], - ) - - assert num_tokens == 14 diff --git a/api/tests/integration_tests/model_runtime/hunyuan/test_provider.py b/api/tests/integration_tests/model_runtime/hunyuan/test_provider.py deleted file mode 100644 index e3748c2ce713d4..00000000000000 --- a/api/tests/integration_tests/model_runtime/hunyuan/test_provider.py +++ /dev/null @@ -1,20 +0,0 @@ -import os - -import pytest - -from core.model_runtime.errors.validate import CredentialsValidateFailedError -from core.model_runtime.model_providers.hunyuan.hunyuan import HunyuanProvider - - -def test_validate_provider_credentials(): - provider = HunyuanProvider() - - with pytest.raises(CredentialsValidateFailedError): - provider.validate_provider_credentials(credentials={"secret_id": "invalid_key", "secret_key": "invalid_key"}) - - provider.validate_provider_credentials( - credentials={ - "secret_id": os.environ.get("HUNYUAN_SECRET_ID"), - "secret_key": os.environ.get("HUNYUAN_SECRET_KEY"), - } - ) diff --git a/api/tests/integration_tests/model_runtime/hunyuan/test_text_embedding.py b/api/tests/integration_tests/model_runtime/hunyuan/test_text_embedding.py deleted file mode 100644 index 69d14dffeebf35..00000000000000 --- a/api/tests/integration_tests/model_runtime/hunyuan/test_text_embedding.py +++ /dev/null @@ -1,96 +0,0 @@ -import os - -import pytest - -from core.model_runtime.entities.text_embedding_entities import TextEmbeddingResult -from core.model_runtime.errors.validate import CredentialsValidateFailedError -from core.model_runtime.model_providers.hunyuan.text_embedding.text_embedding import HunyuanTextEmbeddingModel - - -def test_validate_credentials(): - model = HunyuanTextEmbeddingModel() - - with pytest.raises(CredentialsValidateFailedError): - model.validate_credentials( - model="hunyuan-embedding", credentials={"secret_id": "invalid_key", "secret_key": "invalid_key"} - ) - - model.validate_credentials( - model="hunyuan-embedding", - credentials={ - "secret_id": os.environ.get("HUNYUAN_SECRET_ID"), - "secret_key": os.environ.get("HUNYUAN_SECRET_KEY"), - }, - ) - - -def test_invoke_model(): - model = HunyuanTextEmbeddingModel() - - result = model.invoke( - model="hunyuan-embedding", - credentials={ - "secret_id": os.environ.get("HUNYUAN_SECRET_ID"), - "secret_key": os.environ.get("HUNYUAN_SECRET_KEY"), - }, - texts=["hello", "world"], - user="abc-123", - ) - - assert isinstance(result, TextEmbeddingResult) - assert len(result.embeddings) == 2 - assert result.usage.total_tokens == 6 - - -def test_get_num_tokens(): - model = HunyuanTextEmbeddingModel() - - num_tokens = model.get_num_tokens( - model="hunyuan-embedding", - credentials={ - "secret_id": os.environ.get("HUNYUAN_SECRET_ID"), - "secret_key": os.environ.get("HUNYUAN_SECRET_KEY"), - }, - texts=["hello", "world"], - ) - - assert num_tokens == 2 - - -def test_max_chunks(): - model = HunyuanTextEmbeddingModel() - - result = model.invoke( - model="hunyuan-embedding", - credentials={ - "secret_id": os.environ.get("HUNYUAN_SECRET_ID"), - "secret_key": os.environ.get("HUNYUAN_SECRET_KEY"), - }, - texts=[ - "hello", - "world", - "hello", - "world", - "hello", - "world", - "hello", - "world", - "hello", - "world", - "hello", - "world", - "hello", - "world", - "hello", - "world", - "hello", - "world", - "hello", - "world", - "hello", - "world", - ], - ) - - assert isinstance(result, TextEmbeddingResult) - assert len(result.embeddings) == 22 diff --git a/api/tests/integration_tests/model_runtime/jina/__init__.py b/api/tests/integration_tests/model_runtime/jina/__init__.py deleted file mode 100644 index e69de29bb2d1d6..00000000000000 diff --git a/api/tests/integration_tests/model_runtime/jina/test_provider.py b/api/tests/integration_tests/model_runtime/jina/test_provider.py deleted file mode 100644 index e3b6128c59d8df..00000000000000 --- a/api/tests/integration_tests/model_runtime/jina/test_provider.py +++ /dev/null @@ -1,15 +0,0 @@ -import os - -import pytest - -from core.model_runtime.errors.validate import CredentialsValidateFailedError -from core.model_runtime.model_providers.jina.jina import JinaProvider - - -def test_validate_provider_credentials(): - provider = JinaProvider() - - with pytest.raises(CredentialsValidateFailedError): - provider.validate_provider_credentials(credentials={"api_key": "hahahaha"}) - - provider.validate_provider_credentials(credentials={"api_key": os.environ.get("JINA_API_KEY")}) diff --git a/api/tests/integration_tests/model_runtime/jina/test_text_embedding.py b/api/tests/integration_tests/model_runtime/jina/test_text_embedding.py deleted file mode 100644 index 290735ec49e625..00000000000000 --- a/api/tests/integration_tests/model_runtime/jina/test_text_embedding.py +++ /dev/null @@ -1,49 +0,0 @@ -import os - -import pytest - -from core.model_runtime.entities.text_embedding_entities import TextEmbeddingResult -from core.model_runtime.errors.validate import CredentialsValidateFailedError -from core.model_runtime.model_providers.jina.text_embedding.text_embedding import JinaTextEmbeddingModel - - -def test_validate_credentials(): - model = JinaTextEmbeddingModel() - - with pytest.raises(CredentialsValidateFailedError): - model.validate_credentials(model="jina-embeddings-v2-base-en", credentials={"api_key": "invalid_key"}) - - model.validate_credentials( - model="jina-embeddings-v2-base-en", credentials={"api_key": os.environ.get("JINA_API_KEY")} - ) - - -def test_invoke_model(): - model = JinaTextEmbeddingModel() - - result = model.invoke( - model="jina-embeddings-v2-base-en", - credentials={ - "api_key": os.environ.get("JINA_API_KEY"), - }, - texts=["hello", "world"], - user="abc-123", - ) - - assert isinstance(result, TextEmbeddingResult) - assert len(result.embeddings) == 2 - assert result.usage.total_tokens == 6 - - -def test_get_num_tokens(): - model = JinaTextEmbeddingModel() - - num_tokens = model.get_num_tokens( - model="jina-embeddings-v2-base-en", - credentials={ - "api_key": os.environ.get("JINA_API_KEY"), - }, - texts=["hello", "world"], - ) - - assert num_tokens == 6 diff --git a/api/tests/integration_tests/model_runtime/localai/__init__.py b/api/tests/integration_tests/model_runtime/localai/__init__.py deleted file mode 100644 index e69de29bb2d1d6..00000000000000 diff --git a/api/tests/integration_tests/model_runtime/localai/test_embedding.py b/api/tests/integration_tests/model_runtime/localai/test_embedding.py deleted file mode 100644 index 7fd9f2b3000a31..00000000000000 --- a/api/tests/integration_tests/model_runtime/localai/test_embedding.py +++ /dev/null @@ -1,4 +0,0 @@ -""" -LocalAI Embedding Interface is temporarily unavailable due to -we could not find a way to test it for now. -""" diff --git a/api/tests/integration_tests/model_runtime/localai/test_llm.py b/api/tests/integration_tests/model_runtime/localai/test_llm.py deleted file mode 100644 index aa5436c34fc7dd..00000000000000 --- a/api/tests/integration_tests/model_runtime/localai/test_llm.py +++ /dev/null @@ -1,174 +0,0 @@ -import os -from collections.abc import Generator - -import pytest - -from core.model_runtime.entities.llm_entities import LLMResult, LLMResultChunk, LLMResultChunkDelta -from core.model_runtime.entities.message_entities import ( - AssistantPromptMessage, - PromptMessageTool, - SystemPromptMessage, - TextPromptMessageContent, - UserPromptMessage, -) -from core.model_runtime.entities.model_entities import ParameterRule -from core.model_runtime.errors.validate import CredentialsValidateFailedError -from core.model_runtime.model_providers.localai.llm.llm import LocalAILanguageModel - - -def test_validate_credentials_for_chat_model(): - model = LocalAILanguageModel() - - with pytest.raises(CredentialsValidateFailedError): - model.validate_credentials( - model="chinese-llama-2-7b", - credentials={ - "server_url": "hahahaha", - "completion_type": "completion", - }, - ) - - model.validate_credentials( - model="chinese-llama-2-7b", - credentials={ - "server_url": os.environ.get("LOCALAI_SERVER_URL"), - "completion_type": "completion", - }, - ) - - -def test_invoke_completion_model(): - model = LocalAILanguageModel() - - response = model.invoke( - model="chinese-llama-2-7b", - credentials={ - "server_url": os.environ.get("LOCALAI_SERVER_URL"), - "completion_type": "completion", - }, - prompt_messages=[UserPromptMessage(content="ping")], - model_parameters={"temperature": 0.7, "top_p": 1.0, "max_tokens": 10}, - stop=[], - user="abc-123", - stream=False, - ) - - assert isinstance(response, LLMResult) - assert len(response.message.content) > 0 - assert response.usage.total_tokens > 0 - - -def test_invoke_chat_model(): - model = LocalAILanguageModel() - - response = model.invoke( - model="chinese-llama-2-7b", - credentials={ - "server_url": os.environ.get("LOCALAI_SERVER_URL"), - "completion_type": "chat_completion", - }, - prompt_messages=[UserPromptMessage(content="ping")], - model_parameters={"temperature": 0.7, "top_p": 1.0, "max_tokens": 10}, - stop=[], - user="abc-123", - stream=False, - ) - - assert isinstance(response, LLMResult) - assert len(response.message.content) > 0 - assert response.usage.total_tokens > 0 - - -def test_invoke_stream_completion_model(): - model = LocalAILanguageModel() - - response = model.invoke( - model="chinese-llama-2-7b", - credentials={ - "server_url": os.environ.get("LOCALAI_SERVER_URL"), - "completion_type": "completion", - }, - prompt_messages=[UserPromptMessage(content="Hello World!")], - model_parameters={"temperature": 0.7, "top_p": 1.0, "max_tokens": 10}, - stop=["you"], - stream=True, - user="abc-123", - ) - - assert isinstance(response, Generator) - for chunk in response: - assert isinstance(chunk, LLMResultChunk) - assert isinstance(chunk.delta, LLMResultChunkDelta) - assert isinstance(chunk.delta.message, AssistantPromptMessage) - assert len(chunk.delta.message.content) > 0 if chunk.delta.finish_reason is None else True - - -def test_invoke_stream_chat_model(): - model = LocalAILanguageModel() - - response = model.invoke( - model="chinese-llama-2-7b", - credentials={ - "server_url": os.environ.get("LOCALAI_SERVER_URL"), - "completion_type": "chat_completion", - }, - prompt_messages=[UserPromptMessage(content="Hello World!")], - model_parameters={"temperature": 0.7, "top_p": 1.0, "max_tokens": 10}, - stop=["you"], - stream=True, - user="abc-123", - ) - - assert isinstance(response, Generator) - for chunk in response: - assert isinstance(chunk, LLMResultChunk) - assert isinstance(chunk.delta, LLMResultChunkDelta) - assert isinstance(chunk.delta.message, AssistantPromptMessage) - assert len(chunk.delta.message.content) > 0 if chunk.delta.finish_reason is None else True - - -def test_get_num_tokens(): - model = LocalAILanguageModel() - - num_tokens = model.get_num_tokens( - model="????", - credentials={ - "server_url": os.environ.get("LOCALAI_SERVER_URL"), - "completion_type": "chat_completion", - }, - prompt_messages=[ - SystemPromptMessage( - content="You are a helpful AI assistant.", - ), - UserPromptMessage(content="Hello World!"), - ], - tools=[ - PromptMessageTool( - name="get_current_weather", - description="Get the current weather in a given location", - parameters={ - "type": "object", - "properties": { - "location": {"type": "string", "description": "The city and state e.g. San Francisco, CA"}, - "unit": {"type": "string", "enum": ["c", "f"]}, - }, - "required": ["location"], - }, - ) - ], - ) - - assert isinstance(num_tokens, int) - assert num_tokens == 77 - - num_tokens = model.get_num_tokens( - model="????", - credentials={ - "server_url": os.environ.get("LOCALAI_SERVER_URL"), - "completion_type": "chat_completion", - }, - prompt_messages=[UserPromptMessage(content="Hello World!")], - ) - - assert isinstance(num_tokens, int) - assert num_tokens == 10 diff --git a/api/tests/integration_tests/model_runtime/localai/test_rerank.py b/api/tests/integration_tests/model_runtime/localai/test_rerank.py deleted file mode 100644 index 13c7df6d1473b0..00000000000000 --- a/api/tests/integration_tests/model_runtime/localai/test_rerank.py +++ /dev/null @@ -1,96 +0,0 @@ -import os - -import pytest - -from core.model_runtime.entities.rerank_entities import RerankDocument, RerankResult -from core.model_runtime.errors.validate import CredentialsValidateFailedError -from core.model_runtime.model_providers.localai.rerank.rerank import LocalaiRerankModel - - -def test_validate_credentials_for_chat_model(): - model = LocalaiRerankModel() - - with pytest.raises(CredentialsValidateFailedError): - model.validate_credentials( - model="bge-reranker-v2-m3", - credentials={ - "server_url": "hahahaha", - "completion_type": "completion", - }, - ) - - model.validate_credentials( - model="bge-reranker-base", - credentials={ - "server_url": os.environ.get("LOCALAI_SERVER_URL"), - "completion_type": "completion", - }, - ) - - -def test_invoke_rerank_model(): - model = LocalaiRerankModel() - - response = model.invoke( - model="bge-reranker-base", - credentials={"server_url": os.environ.get("LOCALAI_SERVER_URL")}, - query="Organic skincare products for sensitive skin", - docs=[ - "Eco-friendly kitchenware for modern homes", - "Biodegradable cleaning supplies for eco-conscious consumers", - "Organic cotton baby clothes for sensitive skin", - "Natural organic skincare range for sensitive skin", - "Tech gadgets for smart homes: 2024 edition", - "Sustainable gardening tools and compost solutions", - "Sensitive skin-friendly facial cleansers and toners", - "Organic food wraps and storage solutions", - "Yoga mats made from recycled materials", - ], - top_n=3, - score_threshold=0.75, - user="abc-123", - ) - - assert isinstance(response, RerankResult) - assert len(response.docs) == 3 - - -def test__invoke(): - model = LocalaiRerankModel() - - # Test case 1: Empty docs - result = model._invoke( - model="bge-reranker-base", - credentials={"server_url": "https://example.com", "api_key": "1234567890"}, - query="Organic skincare products for sensitive skin", - docs=[], - top_n=3, - score_threshold=0.75, - user="abc-123", - ) - assert isinstance(result, RerankResult) - assert len(result.docs) == 0 - - # Test case 2: Valid invocation - result = model._invoke( - model="bge-reranker-base", - credentials={"server_url": "https://example.com", "api_key": "1234567890"}, - query="Organic skincare products for sensitive skin", - docs=[ - "Eco-friendly kitchenware for modern homes", - "Biodegradable cleaning supplies for eco-conscious consumers", - "Organic cotton baby clothes for sensitive skin", - "Natural organic skincare range for sensitive skin", - "Tech gadgets for smart homes: 2024 edition", - "Sustainable gardening tools and compost solutions", - "Sensitive skin-friendly facial cleansers and toners", - "Organic food wraps and storage solutions", - "Yoga mats made from recycled materials", - ], - top_n=3, - score_threshold=0.75, - user="abc-123", - ) - assert isinstance(result, RerankResult) - assert len(result.docs) == 3 - assert all(isinstance(doc, RerankDocument) for doc in result.docs) diff --git a/api/tests/integration_tests/model_runtime/localai/test_speech2text.py b/api/tests/integration_tests/model_runtime/localai/test_speech2text.py deleted file mode 100644 index 91b7a5752ce973..00000000000000 --- a/api/tests/integration_tests/model_runtime/localai/test_speech2text.py +++ /dev/null @@ -1,42 +0,0 @@ -import os - -import pytest - -from core.model_runtime.errors.validate import CredentialsValidateFailedError -from core.model_runtime.model_providers.localai.speech2text.speech2text import LocalAISpeech2text - - -def test_validate_credentials(): - model = LocalAISpeech2text() - - with pytest.raises(CredentialsValidateFailedError): - model.validate_credentials(model="whisper-1", credentials={"server_url": "invalid_url"}) - - model.validate_credentials(model="whisper-1", credentials={"server_url": os.environ.get("LOCALAI_SERVER_URL")}) - - -def test_invoke_model(): - model = LocalAISpeech2text() - - # Get the directory of the current file - current_dir = os.path.dirname(os.path.abspath(__file__)) - - # Get assets directory - assets_dir = os.path.join(os.path.dirname(current_dir), "assets") - - # Construct the path to the audio file - audio_file_path = os.path.join(assets_dir, "audio.mp3") - - # Open the file and get the file object - with open(audio_file_path, "rb") as audio_file: - file = audio_file - - result = model.invoke( - model="whisper-1", - credentials={"server_url": os.environ.get("LOCALAI_SERVER_URL")}, - file=file, - user="abc-123", - ) - - assert isinstance(result, str) - assert result == "1, 2, 3, 4, 5, 6, 7, 8, 9, 10" diff --git a/api/tests/integration_tests/model_runtime/minimax/__init__.py b/api/tests/integration_tests/model_runtime/minimax/__init__.py deleted file mode 100644 index e69de29bb2d1d6..00000000000000 diff --git a/api/tests/integration_tests/model_runtime/minimax/test_embedding.py b/api/tests/integration_tests/model_runtime/minimax/test_embedding.py deleted file mode 100644 index cf2a28eb9eb2fe..00000000000000 --- a/api/tests/integration_tests/model_runtime/minimax/test_embedding.py +++ /dev/null @@ -1,58 +0,0 @@ -import os - -import pytest - -from core.model_runtime.entities.text_embedding_entities import TextEmbeddingResult -from core.model_runtime.errors.validate import CredentialsValidateFailedError -from core.model_runtime.model_providers.minimax.text_embedding.text_embedding import MinimaxTextEmbeddingModel - - -def test_validate_credentials(): - model = MinimaxTextEmbeddingModel() - - with pytest.raises(CredentialsValidateFailedError): - model.validate_credentials( - model="embo-01", - credentials={"minimax_api_key": "invalid_key", "minimax_group_id": os.environ.get("MINIMAX_GROUP_ID")}, - ) - - model.validate_credentials( - model="embo-01", - credentials={ - "minimax_api_key": os.environ.get("MINIMAX_API_KEY"), - "minimax_group_id": os.environ.get("MINIMAX_GROUP_ID"), - }, - ) - - -def test_invoke_model(): - model = MinimaxTextEmbeddingModel() - - result = model.invoke( - model="embo-01", - credentials={ - "minimax_api_key": os.environ.get("MINIMAX_API_KEY"), - "minimax_group_id": os.environ.get("MINIMAX_GROUP_ID"), - }, - texts=["hello", "world"], - user="abc-123", - ) - - assert isinstance(result, TextEmbeddingResult) - assert len(result.embeddings) == 2 - assert result.usage.total_tokens == 16 - - -def test_get_num_tokens(): - model = MinimaxTextEmbeddingModel() - - num_tokens = model.get_num_tokens( - model="embo-01", - credentials={ - "minimax_api_key": os.environ.get("MINIMAX_API_KEY"), - "minimax_group_id": os.environ.get("MINIMAX_GROUP_ID"), - }, - texts=["hello", "world"], - ) - - assert num_tokens == 2 diff --git a/api/tests/integration_tests/model_runtime/minimax/test_llm.py b/api/tests/integration_tests/model_runtime/minimax/test_llm.py deleted file mode 100644 index aacde04d326caf..00000000000000 --- a/api/tests/integration_tests/model_runtime/minimax/test_llm.py +++ /dev/null @@ -1,143 +0,0 @@ -import os -from collections.abc import Generator -from time import sleep - -import pytest - -from core.model_runtime.entities.llm_entities import LLMResult, LLMResultChunk, LLMResultChunkDelta -from core.model_runtime.entities.message_entities import AssistantPromptMessage, UserPromptMessage -from core.model_runtime.entities.model_entities import AIModelEntity -from core.model_runtime.errors.validate import CredentialsValidateFailedError -from core.model_runtime.model_providers.minimax.llm.llm import MinimaxLargeLanguageModel - - -def test_predefined_models(): - model = MinimaxLargeLanguageModel() - model_schemas = model.predefined_models() - assert len(model_schemas) >= 1 - assert isinstance(model_schemas[0], AIModelEntity) - - -def test_validate_credentials_for_chat_model(): - sleep(3) - model = MinimaxLargeLanguageModel() - - with pytest.raises(CredentialsValidateFailedError): - model.validate_credentials( - model="abab5.5-chat", credentials={"minimax_api_key": "invalid_key", "minimax_group_id": "invalid_key"} - ) - - model.validate_credentials( - model="abab5.5-chat", - credentials={ - "minimax_api_key": os.environ.get("MINIMAX_API_KEY"), - "minimax_group_id": os.environ.get("MINIMAX_GROUP_ID"), - }, - ) - - -def test_invoke_model(): - sleep(3) - model = MinimaxLargeLanguageModel() - - response = model.invoke( - model="abab5-chat", - credentials={ - "minimax_api_key": os.environ.get("MINIMAX_API_KEY"), - "minimax_group_id": os.environ.get("MINIMAX_GROUP_ID"), - }, - prompt_messages=[UserPromptMessage(content="Hello World!")], - model_parameters={ - "temperature": 0.7, - "top_p": 1.0, - "top_k": 1, - }, - stop=["you"], - user="abc-123", - stream=False, - ) - - assert isinstance(response, LLMResult) - assert len(response.message.content) > 0 - assert response.usage.total_tokens > 0 - - -def test_invoke_stream_model(): - sleep(3) - model = MinimaxLargeLanguageModel() - - response = model.invoke( - model="abab5.5-chat", - credentials={ - "minimax_api_key": os.environ.get("MINIMAX_API_KEY"), - "minimax_group_id": os.environ.get("MINIMAX_GROUP_ID"), - }, - prompt_messages=[UserPromptMessage(content="Hello World!")], - model_parameters={ - "temperature": 0.7, - "top_p": 1.0, - "top_k": 1, - }, - stop=["you"], - stream=True, - user="abc-123", - ) - - assert isinstance(response, Generator) - for chunk in response: - assert isinstance(chunk, LLMResultChunk) - assert isinstance(chunk.delta, LLMResultChunkDelta) - assert isinstance(chunk.delta.message, AssistantPromptMessage) - assert len(chunk.delta.message.content) > 0 if chunk.delta.finish_reason is None else True - - -def test_invoke_with_search(): - sleep(3) - model = MinimaxLargeLanguageModel() - - response = model.invoke( - model="abab5.5-chat", - credentials={ - "minimax_api_key": os.environ.get("MINIMAX_API_KEY"), - "minimax_group_id": os.environ.get("MINIMAX_GROUP_ID"), - }, - prompt_messages=[UserPromptMessage(content="北京今天的天气怎么样")], - model_parameters={ - "temperature": 0.7, - "top_p": 1.0, - "top_k": 1, - "plugin_web_search": True, - }, - stop=["you"], - stream=True, - user="abc-123", - ) - - assert isinstance(response, Generator) - total_message = "" - for chunk in response: - assert isinstance(chunk, LLMResultChunk) - assert isinstance(chunk.delta, LLMResultChunkDelta) - assert isinstance(chunk.delta.message, AssistantPromptMessage) - total_message += chunk.delta.message.content - assert len(chunk.delta.message.content) > 0 if not chunk.delta.finish_reason else True - - assert "参考资料" in total_message - - -def test_get_num_tokens(): - sleep(3) - model = MinimaxLargeLanguageModel() - - response = model.get_num_tokens( - model="abab5.5-chat", - credentials={ - "minimax_api_key": os.environ.get("MINIMAX_API_KEY"), - "minimax_group_id": os.environ.get("MINIMAX_GROUP_ID"), - }, - prompt_messages=[UserPromptMessage(content="Hello World!")], - tools=[], - ) - - assert isinstance(response, int) - assert response == 30 diff --git a/api/tests/integration_tests/model_runtime/minimax/test_provider.py b/api/tests/integration_tests/model_runtime/minimax/test_provider.py deleted file mode 100644 index 575ed13eef124a..00000000000000 --- a/api/tests/integration_tests/model_runtime/minimax/test_provider.py +++ /dev/null @@ -1,25 +0,0 @@ -import os - -import pytest - -from core.model_runtime.errors.validate import CredentialsValidateFailedError -from core.model_runtime.model_providers.minimax.minimax import MinimaxProvider - - -def test_validate_provider_credentials(): - provider = MinimaxProvider() - - with pytest.raises(CredentialsValidateFailedError): - provider.validate_provider_credentials( - credentials={ - "minimax_api_key": "hahahaha", - "minimax_group_id": "123", - } - ) - - provider.validate_provider_credentials( - credentials={ - "minimax_api_key": os.environ.get("MINIMAX_API_KEY"), - "minimax_group_id": os.environ.get("MINIMAX_GROUP_ID"), - } - ) diff --git a/api/tests/integration_tests/model_runtime/mixedbread/__init__.py b/api/tests/integration_tests/model_runtime/mixedbread/__init__.py deleted file mode 100644 index e69de29bb2d1d6..00000000000000 diff --git a/api/tests/integration_tests/model_runtime/mixedbread/test_provider.py b/api/tests/integration_tests/model_runtime/mixedbread/test_provider.py deleted file mode 100644 index 25c9f3ce8dffa9..00000000000000 --- a/api/tests/integration_tests/model_runtime/mixedbread/test_provider.py +++ /dev/null @@ -1,28 +0,0 @@ -import os -from unittest.mock import Mock, patch - -import pytest - -from core.model_runtime.errors.validate import CredentialsValidateFailedError -from core.model_runtime.model_providers.mixedbread.mixedbread import MixedBreadProvider - - -def test_validate_provider_credentials(): - provider = MixedBreadProvider() - - with pytest.raises(CredentialsValidateFailedError): - provider.validate_provider_credentials(credentials={"api_key": "hahahaha"}) - with patch("requests.post") as mock_post: - mock_response = Mock() - mock_response.json.return_value = { - "usage": {"prompt_tokens": 3, "total_tokens": 3}, - "model": "mixedbread-ai/mxbai-embed-large-v1", - "data": [{"embedding": [0.23333 for _ in range(1024)], "index": 0, "object": "embedding"}], - "object": "list", - "normalized": "true", - "encoding_format": "float", - "dimensions": 1024, - } - mock_response.status_code = 200 - mock_post.return_value = mock_response - provider.validate_provider_credentials(credentials={"api_key": os.environ.get("MIXEDBREAD_API_KEY")}) diff --git a/api/tests/integration_tests/model_runtime/mixedbread/test_rerank.py b/api/tests/integration_tests/model_runtime/mixedbread/test_rerank.py deleted file mode 100644 index b65aab74aa96d3..00000000000000 --- a/api/tests/integration_tests/model_runtime/mixedbread/test_rerank.py +++ /dev/null @@ -1,100 +0,0 @@ -import os -from unittest.mock import Mock, patch - -import pytest - -from core.model_runtime.entities.rerank_entities import RerankResult -from core.model_runtime.errors.validate import CredentialsValidateFailedError -from core.model_runtime.model_providers.mixedbread.rerank.rerank import MixedBreadRerankModel - - -def test_validate_credentials(): - model = MixedBreadRerankModel() - - with pytest.raises(CredentialsValidateFailedError): - model.validate_credentials( - model="mxbai-rerank-large-v1", - credentials={"api_key": "invalid_key"}, - ) - with patch("httpx.post") as mock_post: - mock_response = Mock() - mock_response.json.return_value = { - "usage": {"prompt_tokens": 86, "total_tokens": 86}, - "model": "mixedbread-ai/mxbai-rerank-large-v1", - "data": [ - { - "index": 0, - "score": 0.06762695, - "input": "Carson City is the capital city of the American state of Nevada. At the 2010 United " - "States Census, Carson City had a population of 55,274.", - "object": "text_document", - }, - { - "index": 1, - "score": 0.057403564, - "input": "The Commonwealth of the Northern Mariana Islands is a group of islands in the Pacific " - "Ocean that are a political division controlled by the United States. Its capital is " - "Saipan.", - "object": "text_document", - }, - ], - "object": "list", - "top_k": 2, - "return_input": True, - } - mock_response.status_code = 200 - mock_post.return_value = mock_response - model.validate_credentials( - model="mxbai-rerank-large-v1", - credentials={ - "api_key": os.environ.get("MIXEDBREAD_API_KEY"), - }, - ) - - -def test_invoke_model(): - model = MixedBreadRerankModel() - with patch("httpx.post") as mock_post: - mock_response = Mock() - mock_response.json.return_value = { - "usage": {"prompt_tokens": 56, "total_tokens": 56}, - "model": "mixedbread-ai/mxbai-rerank-large-v1", - "data": [ - { - "index": 0, - "score": 0.6044922, - "input": "Kasumi is a girl name of Japanese origin meaning mist.", - "object": "text_document", - }, - { - "index": 1, - "score": 0.0703125, - "input": "Her music is a kawaii bass, a mix of future bass, pop, and kawaii music and she leads a " - "team named PopiParty.", - "object": "text_document", - }, - ], - "object": "list", - "top_k": 2, - "return_input": "true", - } - mock_response.status_code = 200 - mock_post.return_value = mock_response - result = model.invoke( - model="mxbai-rerank-large-v1", - credentials={ - "api_key": os.environ.get("MIXEDBREAD_API_KEY"), - }, - query="Who is Kasumi?", - docs=[ - "Kasumi is a girl name of Japanese origin meaning mist.", - "Her music is a kawaii bass, a mix of future bass, pop, and kawaii music and she leads a team named " - "PopiParty.", - ], - score_threshold=0.5, - ) - - assert isinstance(result, RerankResult) - assert len(result.docs) == 1 - assert result.docs[0].index == 0 - assert result.docs[0].score >= 0.5 diff --git a/api/tests/integration_tests/model_runtime/mixedbread/test_text_embedding.py b/api/tests/integration_tests/model_runtime/mixedbread/test_text_embedding.py deleted file mode 100644 index ca97a1895113f0..00000000000000 --- a/api/tests/integration_tests/model_runtime/mixedbread/test_text_embedding.py +++ /dev/null @@ -1,78 +0,0 @@ -import os -from unittest.mock import Mock, patch - -import pytest - -from core.model_runtime.entities.text_embedding_entities import TextEmbeddingResult -from core.model_runtime.errors.validate import CredentialsValidateFailedError -from core.model_runtime.model_providers.mixedbread.text_embedding.text_embedding import MixedBreadTextEmbeddingModel - - -def test_validate_credentials(): - model = MixedBreadTextEmbeddingModel() - - with pytest.raises(CredentialsValidateFailedError): - model.validate_credentials(model="mxbai-embed-large-v1", credentials={"api_key": "invalid_key"}) - with patch("requests.post") as mock_post: - mock_response = Mock() - mock_response.json.return_value = { - "usage": {"prompt_tokens": 3, "total_tokens": 3}, - "model": "mixedbread-ai/mxbai-embed-large-v1", - "data": [{"embedding": [0.23333 for _ in range(1024)], "index": 0, "object": "embedding"}], - "object": "list", - "normalized": "true", - "encoding_format": "float", - "dimensions": 1024, - } - mock_response.status_code = 200 - mock_post.return_value = mock_response - model.validate_credentials( - model="mxbai-embed-large-v1", credentials={"api_key": os.environ.get("MIXEDBREAD_API_KEY")} - ) - - -def test_invoke_model(): - model = MixedBreadTextEmbeddingModel() - - with patch("requests.post") as mock_post: - mock_response = Mock() - mock_response.json.return_value = { - "usage": {"prompt_tokens": 6, "total_tokens": 6}, - "model": "mixedbread-ai/mxbai-embed-large-v1", - "data": [ - {"embedding": [0.23333 for _ in range(1024)], "index": 0, "object": "embedding"}, - {"embedding": [0.23333 for _ in range(1024)], "index": 1, "object": "embedding"}, - ], - "object": "list", - "normalized": "true", - "encoding_format": "float", - "dimensions": 1024, - } - mock_response.status_code = 200 - mock_post.return_value = mock_response - result = model.invoke( - model="mxbai-embed-large-v1", - credentials={ - "api_key": os.environ.get("MIXEDBREAD_API_KEY"), - }, - texts=["hello", "world"], - user="abc-123", - ) - - assert isinstance(result, TextEmbeddingResult) - assert len(result.embeddings) == 2 - assert result.usage.total_tokens == 6 - - -def test_get_num_tokens(): - model = MixedBreadTextEmbeddingModel() - - num_tokens = model.get_num_tokens( - model="mxbai-embed-large-v1", - credentials={ - "api_key": os.environ.get("MIXEDBREAD_API_KEY"), - }, - texts=["ping"], - ) - - assert num_tokens == 1 diff --git a/api/tests/integration_tests/model_runtime/nomic/__init__.py b/api/tests/integration_tests/model_runtime/nomic/__init__.py deleted file mode 100644 index e69de29bb2d1d6..00000000000000 diff --git a/api/tests/integration_tests/model_runtime/nomic/test_embeddings.py b/api/tests/integration_tests/model_runtime/nomic/test_embeddings.py deleted file mode 100644 index 52dc96ee95c1bc..00000000000000 --- a/api/tests/integration_tests/model_runtime/nomic/test_embeddings.py +++ /dev/null @@ -1,62 +0,0 @@ -import os - -import pytest - -from core.model_runtime.entities.text_embedding_entities import TextEmbeddingResult -from core.model_runtime.errors.validate import CredentialsValidateFailedError -from core.model_runtime.model_providers.nomic.text_embedding.text_embedding import NomicTextEmbeddingModel -from tests.integration_tests.model_runtime.__mock.nomic_embeddings import setup_nomic_mock - - -@pytest.mark.parametrize("setup_nomic_mock", [["text_embedding"]], indirect=True) -def test_validate_credentials(setup_nomic_mock): - model = NomicTextEmbeddingModel() - - with pytest.raises(CredentialsValidateFailedError): - model.validate_credentials( - model="nomic-embed-text-v1.5", - credentials={ - "nomic_api_key": "invalid_key", - }, - ) - - model.validate_credentials( - model="nomic-embed-text-v1.5", - credentials={ - "nomic_api_key": os.environ.get("NOMIC_API_KEY"), - }, - ) - - -@pytest.mark.parametrize("setup_nomic_mock", [["text_embedding"]], indirect=True) -def test_invoke_model(setup_nomic_mock): - model = NomicTextEmbeddingModel() - - result = model.invoke( - model="nomic-embed-text-v1.5", - credentials={ - "nomic_api_key": os.environ.get("NOMIC_API_KEY"), - }, - texts=["hello", "world"], - user="foo", - ) - - assert isinstance(result, TextEmbeddingResult) - assert result.model == "nomic-embed-text-v1.5" - assert len(result.embeddings) == 2 - assert result.usage.total_tokens == 2 - - -@pytest.mark.parametrize("setup_nomic_mock", [["text_embedding"]], indirect=True) -def test_get_num_tokens(setup_nomic_mock): - model = NomicTextEmbeddingModel() - - num_tokens = model.get_num_tokens( - model="nomic-embed-text-v1.5", - credentials={ - "nomic_api_key": os.environ.get("NOMIC_API_KEY"), - }, - texts=["hello", "world"], - ) - - assert num_tokens == 2 diff --git a/api/tests/integration_tests/model_runtime/nomic/test_provider.py b/api/tests/integration_tests/model_runtime/nomic/test_provider.py deleted file mode 100644 index 6cad400c069555..00000000000000 --- a/api/tests/integration_tests/model_runtime/nomic/test_provider.py +++ /dev/null @@ -1,22 +0,0 @@ -import os - -import pytest - -from core.model_runtime.errors.validate import CredentialsValidateFailedError -from core.model_runtime.model_providers.nomic.nomic import NomicAtlasProvider -from core.model_runtime.model_providers.nomic.text_embedding.text_embedding import NomicTextEmbeddingModel -from tests.integration_tests.model_runtime.__mock.nomic_embeddings import setup_nomic_mock - - -@pytest.mark.parametrize("setup_nomic_mock", [["text_embedding"]], indirect=True) -def test_validate_provider_credentials(setup_nomic_mock): - provider = NomicAtlasProvider() - - with pytest.raises(CredentialsValidateFailedError): - provider.validate_provider_credentials(credentials={}) - - provider.validate_provider_credentials( - credentials={ - "nomic_api_key": os.environ.get("NOMIC_API_KEY"), - }, - ) diff --git a/api/tests/integration_tests/model_runtime/novita/__init__.py b/api/tests/integration_tests/model_runtime/novita/__init__.py deleted file mode 100644 index e69de29bb2d1d6..00000000000000 diff --git a/api/tests/integration_tests/model_runtime/novita/test_llm.py b/api/tests/integration_tests/model_runtime/novita/test_llm.py deleted file mode 100644 index 35fa0dc1904f7b..00000000000000 --- a/api/tests/integration_tests/model_runtime/novita/test_llm.py +++ /dev/null @@ -1,99 +0,0 @@ -import os -from collections.abc import Generator - -import pytest - -from core.model_runtime.entities.llm_entities import LLMResult, LLMResultChunk, LLMResultChunkDelta -from core.model_runtime.entities.message_entities import ( - AssistantPromptMessage, - PromptMessageTool, - SystemPromptMessage, - UserPromptMessage, -) -from core.model_runtime.errors.validate import CredentialsValidateFailedError -from core.model_runtime.model_providers.novita.llm.llm import NovitaLargeLanguageModel - - -def test_validate_credentials(): - model = NovitaLargeLanguageModel() - - with pytest.raises(CredentialsValidateFailedError): - model.validate_credentials( - model="meta-llama/llama-3-8b-instruct", credentials={"api_key": "invalid_key", "mode": "chat"} - ) - - model.validate_credentials( - model="meta-llama/llama-3-8b-instruct", - credentials={"api_key": os.environ.get("NOVITA_API_KEY"), "mode": "chat"}, - ) - - -def test_invoke_model(): - model = NovitaLargeLanguageModel() - - response = model.invoke( - model="meta-llama/llama-3-8b-instruct", - credentials={"api_key": os.environ.get("NOVITA_API_KEY"), "mode": "completion"}, - prompt_messages=[ - SystemPromptMessage( - content="You are a helpful AI assistant.", - ), - UserPromptMessage(content="Who are you?"), - ], - model_parameters={ - "temperature": 1.0, - "top_p": 0.5, - "max_tokens": 10, - }, - stop=["How"], - stream=False, - user="novita", - ) - - assert isinstance(response, LLMResult) - assert len(response.message.content) > 0 - - -def test_invoke_stream_model(): - model = NovitaLargeLanguageModel() - - response = model.invoke( - model="meta-llama/llama-3-8b-instruct", - credentials={"api_key": os.environ.get("NOVITA_API_KEY"), "mode": "chat"}, - prompt_messages=[ - SystemPromptMessage( - content="You are a helpful AI assistant.", - ), - UserPromptMessage(content="Who are you?"), - ], - model_parameters={"temperature": 1.0, "top_k": 2, "top_p": 0.5, "max_tokens": 100}, - stream=True, - user="novita", - ) - - assert isinstance(response, Generator) - - for chunk in response: - assert isinstance(chunk, LLMResultChunk) - assert isinstance(chunk.delta, LLMResultChunkDelta) - assert isinstance(chunk.delta.message, AssistantPromptMessage) - - -def test_get_num_tokens(): - model = NovitaLargeLanguageModel() - - num_tokens = model.get_num_tokens( - model="meta-llama/llama-3-8b-instruct", - credentials={ - "api_key": os.environ.get("NOVITA_API_KEY"), - }, - prompt_messages=[ - SystemPromptMessage( - content="You are a helpful AI assistant.", - ), - UserPromptMessage(content="Hello World!"), - ], - ) - - assert isinstance(num_tokens, int) - assert num_tokens == 21 diff --git a/api/tests/integration_tests/model_runtime/novita/test_provider.py b/api/tests/integration_tests/model_runtime/novita/test_provider.py deleted file mode 100644 index 191af99db20bd9..00000000000000 --- a/api/tests/integration_tests/model_runtime/novita/test_provider.py +++ /dev/null @@ -1,19 +0,0 @@ -import os - -import pytest - -from core.model_runtime.errors.validate import CredentialsValidateFailedError -from core.model_runtime.model_providers.novita.novita import NovitaProvider - - -def test_validate_provider_credentials(): - provider = NovitaProvider() - - with pytest.raises(CredentialsValidateFailedError): - provider.validate_provider_credentials(credentials={}) - - provider.validate_provider_credentials( - credentials={ - "api_key": os.environ.get("NOVITA_API_KEY"), - } - ) diff --git a/api/tests/integration_tests/model_runtime/oci/__init__.py b/api/tests/integration_tests/model_runtime/oci/__init__.py deleted file mode 100644 index e69de29bb2d1d6..00000000000000 diff --git a/api/tests/integration_tests/model_runtime/oci/test_llm.py b/api/tests/integration_tests/model_runtime/oci/test_llm.py deleted file mode 100644 index 531f26a32e657c..00000000000000 --- a/api/tests/integration_tests/model_runtime/oci/test_llm.py +++ /dev/null @@ -1,130 +0,0 @@ -import os -from collections.abc import Generator - -import pytest - -from core.model_runtime.entities.llm_entities import LLMResult, LLMResultChunk, LLMResultChunkDelta -from core.model_runtime.entities.message_entities import ( - AssistantPromptMessage, - PromptMessageTool, - SystemPromptMessage, - TextPromptMessageContent, - UserPromptMessage, -) -from core.model_runtime.errors.validate import CredentialsValidateFailedError -from core.model_runtime.model_providers.oci.llm.llm import OCILargeLanguageModel - - -def test_validate_credentials(): - model = OCILargeLanguageModel() - - with pytest.raises(CredentialsValidateFailedError): - model.validate_credentials( - model="cohere.command-r-plus", - credentials={"oci_config_content": "invalid_key", "oci_key_content": "invalid_key"}, - ) - - model.validate_credentials( - model="cohere.command-r-plus", - credentials={ - "oci_config_content": os.environ.get("OCI_CONFIG_CONTENT"), - "oci_key_content": os.environ.get("OCI_KEY_CONTENT"), - }, - ) - - -def test_invoke_model(): - model = OCILargeLanguageModel() - - response = model.invoke( - model="cohere.command-r-plus", - credentials={ - "oci_config_content": os.environ.get("OCI_CONFIG_CONTENT"), - "oci_key_content": os.environ.get("OCI_KEY_CONTENT"), - }, - prompt_messages=[UserPromptMessage(content="Hi")], - model_parameters={"temperature": 0.5, "max_tokens": 10}, - stop=["How"], - stream=False, - user="abc-123", - ) - - assert isinstance(response, LLMResult) - assert len(response.message.content) > 0 - - -def test_invoke_stream_model(): - model = OCILargeLanguageModel() - - response = model.invoke( - model="meta.llama-3-70b-instruct", - credentials={ - "oci_config_content": os.environ.get("OCI_CONFIG_CONTENT"), - "oci_key_content": os.environ.get("OCI_KEY_CONTENT"), - }, - prompt_messages=[UserPromptMessage(content="Hi")], - model_parameters={"temperature": 0.5, "max_tokens": 100, "seed": 1234}, - stream=True, - user="abc-123", - ) - - assert isinstance(response, Generator) - - for chunk in response: - assert isinstance(chunk, LLMResultChunk) - assert isinstance(chunk.delta, LLMResultChunkDelta) - assert isinstance(chunk.delta.message, AssistantPromptMessage) - assert len(chunk.delta.message.content) > 0 if chunk.delta.finish_reason is None else True - - -def test_invoke_model_with_function(): - model = OCILargeLanguageModel() - - response = model.invoke( - model="cohere.command-r-plus", - credentials={ - "oci_config_content": os.environ.get("OCI_CONFIG_CONTENT"), - "oci_key_content": os.environ.get("OCI_KEY_CONTENT"), - }, - prompt_messages=[UserPromptMessage(content="Hi")], - model_parameters={"temperature": 0.5, "max_tokens": 100, "seed": 1234}, - stream=False, - user="abc-123", - tools=[ - PromptMessageTool( - name="get_current_weather", - description="Get the current weather in a given location", - parameters={ - "type": "object", - "properties": { - "location": {"type": "string", "description": "The city and state e.g. San Francisco, CA"}, - "unit": {"type": "string", "enum": ["celsius", "fahrenheit"]}, - }, - "required": ["location"], - }, - ) - ], - ) - - assert isinstance(response, LLMResult) - assert len(response.message.content) > 0 - - -def test_get_num_tokens(): - model = OCILargeLanguageModel() - - num_tokens = model.get_num_tokens( - model="cohere.command-r-plus", - credentials={ - "oci_config_content": os.environ.get("OCI_CONFIG_CONTENT"), - "oci_key_content": os.environ.get("OCI_KEY_CONTENT"), - }, - prompt_messages=[ - SystemPromptMessage( - content="You are a helpful AI assistant.", - ), - UserPromptMessage(content="Hello World!"), - ], - ) - - assert num_tokens == 18 diff --git a/api/tests/integration_tests/model_runtime/oci/test_provider.py b/api/tests/integration_tests/model_runtime/oci/test_provider.py deleted file mode 100644 index 2c7107c7ccfe45..00000000000000 --- a/api/tests/integration_tests/model_runtime/oci/test_provider.py +++ /dev/null @@ -1,20 +0,0 @@ -import os - -import pytest - -from core.model_runtime.errors.validate import CredentialsValidateFailedError -from core.model_runtime.model_providers.oci.oci import OCIGENAIProvider - - -def test_validate_provider_credentials(): - provider = OCIGENAIProvider() - - with pytest.raises(CredentialsValidateFailedError): - provider.validate_provider_credentials(credentials={}) - - provider.validate_provider_credentials( - credentials={ - "oci_config_content": os.environ.get("OCI_CONFIG_CONTENT"), - "oci_key_content": os.environ.get("OCI_KEY_CONTENT"), - } - ) diff --git a/api/tests/integration_tests/model_runtime/oci/test_text_embedding.py b/api/tests/integration_tests/model_runtime/oci/test_text_embedding.py deleted file mode 100644 index 032c5c681a7aeb..00000000000000 --- a/api/tests/integration_tests/model_runtime/oci/test_text_embedding.py +++ /dev/null @@ -1,58 +0,0 @@ -import os - -import pytest - -from core.model_runtime.entities.text_embedding_entities import TextEmbeddingResult -from core.model_runtime.errors.validate import CredentialsValidateFailedError -from core.model_runtime.model_providers.oci.text_embedding.text_embedding import OCITextEmbeddingModel - - -def test_validate_credentials(): - model = OCITextEmbeddingModel() - - with pytest.raises(CredentialsValidateFailedError): - model.validate_credentials( - model="cohere.embed-multilingual-v3.0", - credentials={"oci_config_content": "invalid_key", "oci_key_content": "invalid_key"}, - ) - - model.validate_credentials( - model="cohere.embed-multilingual-v3.0", - credentials={ - "oci_config_content": os.environ.get("OCI_CONFIG_CONTENT"), - "oci_key_content": os.environ.get("OCI_KEY_CONTENT"), - }, - ) - - -def test_invoke_model(): - model = OCITextEmbeddingModel() - - result = model.invoke( - model="cohere.embed-multilingual-v3.0", - credentials={ - "oci_config_content": os.environ.get("OCI_CONFIG_CONTENT"), - "oci_key_content": os.environ.get("OCI_KEY_CONTENT"), - }, - texts=["hello", "world", " ".join(["long_text"] * 100), " ".join(["another_long_text"] * 100)], - user="abc-123", - ) - - assert isinstance(result, TextEmbeddingResult) - assert len(result.embeddings) == 4 - # assert result.usage.total_tokens == 811 - - -def test_get_num_tokens(): - model = OCITextEmbeddingModel() - - num_tokens = model.get_num_tokens( - model="cohere.embed-multilingual-v3.0", - credentials={ - "oci_config_content": os.environ.get("OCI_CONFIG_CONTENT"), - "oci_key_content": os.environ.get("OCI_KEY_CONTENT"), - }, - texts=["hello", "world"], - ) - - assert num_tokens == 2 diff --git a/api/tests/integration_tests/model_runtime/ollama/__init__.py b/api/tests/integration_tests/model_runtime/ollama/__init__.py deleted file mode 100644 index e69de29bb2d1d6..00000000000000 diff --git a/api/tests/integration_tests/model_runtime/ollama/test_llm.py b/api/tests/integration_tests/model_runtime/ollama/test_llm.py deleted file mode 100644 index 58a1339f506458..00000000000000 --- a/api/tests/integration_tests/model_runtime/ollama/test_llm.py +++ /dev/null @@ -1,222 +0,0 @@ -import os -from collections.abc import Generator - -import pytest - -from core.model_runtime.entities.llm_entities import LLMResult, LLMResultChunk, LLMResultChunkDelta -from core.model_runtime.entities.message_entities import ( - AssistantPromptMessage, - ImagePromptMessageContent, - SystemPromptMessage, - TextPromptMessageContent, - UserPromptMessage, -) -from core.model_runtime.errors.validate import CredentialsValidateFailedError -from core.model_runtime.model_providers.ollama.llm.llm import OllamaLargeLanguageModel - - -def test_validate_credentials(): - model = OllamaLargeLanguageModel() - - with pytest.raises(CredentialsValidateFailedError): - model.validate_credentials( - model="mistral:text", - credentials={ - "base_url": "http://localhost:21434", - "mode": "chat", - "context_size": 2048, - "max_tokens": 2048, - }, - ) - - model.validate_credentials( - model="mistral:text", - credentials={ - "base_url": os.environ.get("OLLAMA_BASE_URL"), - "mode": "chat", - "context_size": 2048, - "max_tokens": 2048, - }, - ) - - -def test_invoke_model(): - model = OllamaLargeLanguageModel() - - response = model.invoke( - model="mistral:text", - credentials={ - "base_url": os.environ.get("OLLAMA_BASE_URL"), - "mode": "chat", - "context_size": 2048, - "max_tokens": 2048, - }, - prompt_messages=[UserPromptMessage(content="Who are you?")], - model_parameters={"temperature": 1.0, "top_k": 2, "top_p": 0.5, "num_predict": 10}, - stop=["How"], - stream=False, - ) - - assert isinstance(response, LLMResult) - assert len(response.message.content) > 0 - - -def test_invoke_stream_model(): - model = OllamaLargeLanguageModel() - - response = model.invoke( - model="mistral:text", - credentials={ - "base_url": os.environ.get("OLLAMA_BASE_URL"), - "mode": "chat", - "context_size": 2048, - "max_tokens": 2048, - }, - prompt_messages=[ - SystemPromptMessage( - content="You are a helpful AI assistant.", - ), - UserPromptMessage(content="Who are you?"), - ], - model_parameters={"temperature": 1.0, "top_k": 2, "top_p": 0.5, "num_predict": 10}, - stop=["How"], - stream=True, - ) - - assert isinstance(response, Generator) - - for chunk in response: - assert isinstance(chunk, LLMResultChunk) - assert isinstance(chunk.delta, LLMResultChunkDelta) - assert isinstance(chunk.delta.message, AssistantPromptMessage) - - -def test_invoke_completion_model(): - model = OllamaLargeLanguageModel() - - response = model.invoke( - model="mistral:text", - credentials={ - "base_url": os.environ.get("OLLAMA_BASE_URL"), - "mode": "completion", - "context_size": 2048, - "max_tokens": 2048, - }, - prompt_messages=[UserPromptMessage(content="Who are you?")], - model_parameters={"temperature": 1.0, "top_k": 2, "top_p": 0.5, "num_predict": 10}, - stop=["How"], - stream=False, - ) - - assert isinstance(response, LLMResult) - assert len(response.message.content) > 0 - - -def test_invoke_stream_completion_model(): - model = OllamaLargeLanguageModel() - - response = model.invoke( - model="mistral:text", - credentials={ - "base_url": os.environ.get("OLLAMA_BASE_URL"), - "mode": "completion", - "context_size": 2048, - "max_tokens": 2048, - }, - prompt_messages=[ - SystemPromptMessage( - content="You are a helpful AI assistant.", - ), - UserPromptMessage(content="Who are you?"), - ], - model_parameters={"temperature": 1.0, "top_k": 2, "top_p": 0.5, "num_predict": 10}, - stop=["How"], - stream=True, - ) - - assert isinstance(response, Generator) - - for chunk in response: - assert isinstance(chunk, LLMResultChunk) - assert isinstance(chunk.delta, LLMResultChunkDelta) - assert isinstance(chunk.delta.message, AssistantPromptMessage) - - -def test_invoke_completion_model_with_vision(): - model = OllamaLargeLanguageModel() - - result = model.invoke( - model="llava", - credentials={ - "base_url": os.environ.get("OLLAMA_BASE_URL"), - "mode": "completion", - "context_size": 2048, - "max_tokens": 2048, - }, - prompt_messages=[ - UserPromptMessage( - content=[ - TextPromptMessageContent( - data="What is this in this picture?", - ), - ImagePromptMessageContent( - data="data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAE4AAABMCAYAAADDYoEWAAAMQGlDQ1BJQ0MgUHJvZmlsZQAASImVVwdYU8kWnluSkEBoAQSkhN4EkRpASggt9I4gKiEJEEqMgaBiRxcVXLuIgA1dFVGwAmJBETuLYu+LBRVlXSzYlTcpoOu+8r35vrnz33/O/OfMmbllAFA7zhGJclF1APKEBeLYYH/6uOQUOukpIAEdoAy0gA2Hmy9iRkeHA1iG2r+Xd9cBIm2v2Eu1/tn/X4sGj5/PBQCJhjidl8/Ng/gAAHg1VyQuAIAo5c2mFoikGFagJYYBQrxIijPluFqK0+V4j8wmPpYFcTsASiocjjgTANVLkKcXcjOhhmo/xI5CnkAIgBodYp+8vMk8iNMgtoY2Ioil+oz0H3Qy/6aZPqzJ4WQOY/lcZEUpQJAvyuVM/z/T8b9LXq5kyIclrCpZ4pBY6Zxh3m7mTA6TYhWI+4TpkVEQa0L8QcCT2UOMUrIkIQlye9SAm8+COYMrDVBHHicgDGIDiIOEuZHhCj49QxDEhhjuEHSaoIAdD7EuxIv4+YFxCptN4smxCl9oY4aYxVTwZzlimV+pr/uSnASmQv91Fp+t0MdUi7LikyCmQGxeKEiMhFgVYof8nLgwhc3YoixW5JCNWBIrjd8c4li+MNhfro8VZoiDYhX2pXn5Q/PFNmUJ2JEKvK8gKz5Enh+sncuRxQ/ngl3iC5kJQzr8/HHhQ3Ph8QMC5XPHnvGFCXEKnQ+iAv9Y+VicIsqNVtjjpvzcYClvCrFLfmGcYiyeWAA3pFwfzxAVRMfL48SLsjmh0fJ48OUgHLBAAKADCazpYDLIBoLOvqY+eCfvCQIcIAaZgA/sFczQiCRZjxBe40AR+BMiPsgfHucv6+WDQsh/HWblV3uQIestlI3IAU8gzgNhIBfeS2SjhMPeEsFjyAj+4Z0DKxfGmwurtP/f80Psd4YJmXAFIxnySFcbsiQGEgOIIcQgog2uj/vgXng4vPrB6oQzcI+heXy3JzwhdBEeEq4Rugm3JgmKxT9FGQG6oX6QIhfpP+YCt4Sarrg/7g3VoTKug+sDe9wF+mHivtCzK2RZirilWaH/pP23GfywGgo7siMZJY8g+5Gtfx6paqvqOqwizfWP+ZHHmj6cb9Zwz8/+WT9knwfbsJ8tsUXYfuwMdgI7hx3BmgAda8WasQ7sqBQP767Hst015C1WFk8O1BH8w9/Qykozme9Y59jr+EXeV8CfJn1HA9Zk0XSxIDOrgM6EXwQ+nS3kOoyiOzk6OQMg/b7IX19vYmTfDUSn4zs3/w8AvFsHBwcPf+dCWwHY6w4f/0PfOWsG/HQoA3D2EFciLpRzuPRCgG8JNfik6QEjYAas4XycgBvwAn4gEISCKBAPksFEGH0W3OdiMBXMBPNACSgDy8EaUAk2gi1gB9gN9oEmcAScAKfBBXAJXAN34O7pAS9AP3gHPiMIQkKoCA3RQ4wRC8QOcUIYiA8SiIQjsUgykoZkIkJEgsxE5iNlyEqkEtmM1CJ7kUPICeQc0oXcQh4gvchr5BOKoSqoFmqIWqKjUQbKRMPQeHQCmolOQYvQBehStAKtQXehjegJ9AJ6De1GX6ADGMCUMR3MBLPHGBgLi8JSsAxMjM3GSrFyrAarx1rgOl/BurE+7CNOxGk4HbeHOzgET8C5+BR8Nr4Er8R34I14O34Ff4D3498IVIIBwY7gSWATxhEyCVMJJYRywjbCQcIp+Cz1EN4RiUQdohXRHT6LycRs4gziEuJ6YgPxOLGL+Ig4QCKR9Eh2JG9SFIlDKiCVkNaRdpFaSZdJPaQPSspKxkpOSkFKKUpCpWKlcqWdSseULis9VfpMVidbkD3JUWQeeTp5GXkruYV8kdxD/kzRoFhRvCnxlGzKPEoFpZ5yinKX8kZZWdlU2UM5RlmgPFe5QnmP8lnlB8ofVTRVbFVYKqkqEpWlKttVjqvcUnlDpVItqX7UFGoBdSm1lnqSep/6QZWm6qDKVuWpzlGtUm1Uvaz6Uo2sZqHGVJuoVqRWrrZf7aJanzpZ3VKdpc5Rn61epX5I/Yb6gAZNY4xGlEaexhKNnRrnNJ5pkjQtNQM1eZoLNLdontR8RMNoZjQWjUubT9tKO0Xr0SJqWWmxtbK1yrR2a3Vq9WtrartoJ2pP067SPqrdrYPpWOqwdXJ1luns07mu82mE4QjmCP6IxSPqR1we8V53pK6fLl+3VLdB95ruJz26XqBejt4KvSa9e/q4vq1+jP5U/Q36p/T7RmqN9BrJHVk6ct/I2waoga1BrMEMgy0GHQYDhkaGwYYiw3WGJw37jHSM/IyyjVYbHTPqNaYZ+xgLjFcbtxo/p2vTmfRcegW9nd5vYmASYiIx2WzSafLZ1Mo0wbTYtMH0nhnFjGGWYbbarM2s39zYPMJ8pnmd+W0LsgXDIstircUZi/eWVpZJlgstmyyfWelasa2KrOqs7lpTrX2tp1jXWF+1IdowbHJs1ttcskVtXW2zbKtsL9qhdm52Arv1dl2jCKM8RglH1Yy6Ya9iz7QvtK+zf+Cg4xDuUOzQ5PBytPnolNErRp8Z/c3R1THXcavjnTGaY0LHFI9pGfPaydaJ61TldNWZ6hzkPMe52fmVi50L32WDy01XmmuE60LXNtevbu5uYrd6t153c/c092r3GwwtRjRjCeOsB8HD32OOxxGPj55ungWe+zz/8rL3yvHa6fVsrNVY/titYx95m3pzvDd7d/vQfdJ8Nvl0+5r4cnxrfB/6mfnx/Lb5PWXaMLOZu5gv/R39xf4H/d+zPFmzWMcDsIDggNKAzkDNwITAysD7QaZBmUF1Qf3BrsEzgo+HEELCQlaE3GAbsrnsWnZ/qHvorND2MJWwuLDKsIfhtuHi8JYINCI0YlXE3UiLSGFkUxSIYketiroXbRU9JfpwDDEmOqYq5knsmNiZsWfiaHGT4nbGvYv3j18WfyfBOkGS0JaolpiaWJv4PikgaWVS97jR42aNu5CsnyxIbk4hpSSmbEsZGB84fs34nlTX1JLU6xOsJkybcG6i/sTciUcnqU3iTNqfRkhLStuZ9oUTxanhDKSz06vT+7ks7lruC54fbzWvl+/NX8l/muGdsTLjWaZ35qrM3izfrPKsPgFLUCl4lR2SvTH7fU5Uzvacwdyk3IY8pby0vENCTWGOsH2y0eRpk7tEdqISUfcUzylrpvSLw8Tb8pH8CfnNBVrwR75DYi35RfKg0KewqvDD1MSp+6dpTBNO65huO33x9KdFQUW/zcBncGe0zTSZOW/mg1nMWZtnI7PTZ7fNMZuzYE7P3OC5O+ZR5uXM+73YsXhl8dv5SfNbFhgumLvg0S/Bv9SVqJaIS24s9Fq4cRG+SLCoc7Hz4nWLv5XySs+XOZaVl31Zwl1y/tcxv1b8Org0Y2nnMrdlG5YTlwuXX1/hu2LHSo2VRSsfrYpY1biavrp09ds1k9acK3cp37iWslaytrsivKJ5nfm65eu+VGZVXqvyr2qoNqheXP1+PW/95Q1+G+o3Gm4s2/hpk2DTzc3BmxtrLGvKtxC3FG55sjVx65nfGL/VbtPfVrbt63bh9u4dsTvaa91ra3ca7FxWh9ZJ6np3pe66tDtgd3O9ff3mBp2Gsj1gj2TP871pe6/vC9vXtp+xv/6AxYHqg7SDpY1I4/TG/qaspu7m5OauQ6GH2lq8Wg4edji8/YjJkaqj2keXHaMcW3BssLWodeC46HjficwTj9omtd05Oe7k1faY9s5TYafOng46ffIM80zrWe+zR855njt0nnG+6YLbhcYO146Dv7v+frDTrbPxovvF5ksel1q6xnYdu+x7+cSVgCunr7KvXrgWea3resL1mzdSb3Tf5N18div31qvbhbc/35l7l3C39J76vfL7Bvdr/rD5o6Hbrfvog4AHHQ/jHt55xH304nH+4y89C55Qn5Q/NX5a+8zp2ZHeoN5Lz8c/73khevG5r+RPjT+rX1q/PPCX318d/eP6e16JXw2+XvJG7832ty5v2waiB+6/y3v3+X3pB70POz4yPp75lPTp6eepX0hfKr7afG35Fvbt7mDe4KCII+bIfgUwWNGMDABebweAmgwADZ7PKOPl5z9ZQeRnVhkC/wnLz4iy4gZAPfx/j+mDfzc3ANizFR6/oL5aKgDRVADiPQDq7Dxch85qsnOltBDhOWBT5Nf0vHTwb4r8zPlD3D+3QKrqAn5u/wWdZ3xtG7qP3QAAADhlWElmTU0AKgAAAAgAAYdpAAQAAAABAAAAGgAAAAAAAqACAAQAAAABAAAATqADAAQAAAABAAAATAAAAADhTXUdAAARnUlEQVR4Ae2c245bR3aGi4fulizFHgUzQAYIggBB5klymfeaZ8hDBYjvAiRxkMAGkowRWx7JktjcZL7vX1Uku62Burkl5YbV5q7Tqqq1/v3XqgMpL95tbvftEh6NwPLRLS4NgsAFuDOJcAHuAtyZCJzZ7MK4C3BnInBmswvjLsCdicCZzS6MOxO49Znt0uz3//CPbbv6srXFrq0W9Q6Wi0VbLPn4R8x/jSLiu3nrl8s9dcartlwtKdmTbm21XranN6v27Mm6XV8t25fP1+3Pn1+1r4if3Czbk+t9u1rR6f9jmAXc1P6sbaevQGbfdgGJeA8ke0AQsCYYgiYgPR1QyVO+3wvcMm2WO0G2PeWkX79btp839AG4//UjYC62gDsB2rI9f7pov3q2bX/9F1ftBWAufTufOcwCrnTtR90dOdHoNgCJeAbUkuM5TsWAW5W9gfkE83ZkUHg0oAyAwbm927a2ebVoP/xx2f7jD1uYuG9/89tF+/VXK1hq+88TZgG32O1g2r7tpRdBM8fUTM7pyR8SYddgxkJErUszHti7U44CpzyEo16syNtx+qgy+1og7RMetpev9+3rb3bt+c2u/ebFsv3uL1ftiqn+qcMs4HY7jNQpEfadNU5VqeHUTJkgUbaPDxRADdZ8jU9LHoJYnwLUtgWN4ObDC7Kdr8Hp7d9qMTW8gt23V1zyvPrD1H56e9t+99vr9uJLprBDfaIw69U4dQRCIw2JdVIjbUzecj+7qYyPpZHiAbDaJwsXyMhQEQ0pq6sAp7hMS2XGqykdA2iy4EUtF6v206ur9k/fbNo//+frtt2OaW/rjxtmAaeNGqihBY5xfVQzQEZfoSH0KHgkrbD/CX6vPIqlSTU61vVCovRSbEwbIS851vj23Q+tff3vu/bzu5I7tvs4qVnADTa5FCbNC86qCLN2E1MxKKroYB2pgSz2RLbbVcVkSJhOKxIDjGxn+nSuqes2JlKuG8fA/IzPXazbj68X7et/27UfX7GifORwOuSju47h/c3beKfRFO74CNA04YP0ZT2/YzERFGojc9pmDG47/wyDZwJjiX4wwJNer1dZPJbs5/xzK5Ppzp7SQZBszNy22U7tX7/dtFdvJrv8aGE2cDJLoPycBgHSgICJUQLo8nmUo6y7oH0S5Lu/FGhDQULCfIooATw3yyOQQ46eYVpYiaBMTFtAFPR307r9y3fbdvsRfd5Rg6HJI2Lt1qaAF6TEqoxWdVdYSHawezCvAHLjW7Jh2QGcUkDDT4Og2OfSFRVkxipcAJUZARC5FVRbeRpB1hVY6r25XQHexIZ96Hfa++PTs4Dbi8rQg7imWQG27/uEgCTCssk/WWg7GwJWwDQ36PceGzQ+x7jOtgNogkIIpsZiFMdXoEfOPUlh3l5ulu2/X6bJ7Mc84Bw+xgOKzJqM0VKm8WYlVMqt61gFKNtQKeZ6o7Ls/aqEeYooJXDIZ9uiT0uZ5UxPUJNlYdoAK62qHfM7unz3/bb9/Ha+v3u/tn3AD0XOrnxAZdpNYZILgoxyGk4BqMCbssq66dXv6RdFkiB6Rj2u3N1npiMw1dQjF4oJW/kzy6VdMRFA9Xd8VvhCLxCyYUYkvhHZb7+fotvdUR6XmwXcYI1DangAA6yspgBj/dRjp6L+RbmSPaaxuuMnGEeVAhBF4pSapAFG5gUo60rAHmpVtcz0sR2aBZW8NAB9+W7dXr9N0dmPmUcu10pWrq7kQQvBQXn1dUsgoM4ej12TtyBknG51PEMGOV2TLLVZ/GLvLMBYHsYJhg7fuMBx6tq3LFu7aBxxD9jKFiO7Thbwcv7n5dS+/ML0eWEWcBqoptk+mEQp2aTG+rbmBYA+D6MyMwMAdepKsX5QpnglFZyZ5k4tDYsI/Y1pF7CRq22HoHXgGEOwgodvgH79INnW3tlFIVVQvkBXg1dvF3z27fkTGzw+zALOPZluVoVkV4yLHoBB3VBJUNyo6uEWXAyIkruC2OQjbVeppxkm8+iti2mySsM1EPYGKBcEyul3LKTW1+pr+wLRstwP0J8a2K95Txf/+6q1ZzeUDEXt/oFhHnA4fJYCBtawYlWmlsrJBEHhP43bi9Rq1Z0ymlK3Z/QCRqA5YfaNLZJWEACn929eluXlUGO8CgMrHWYi441S2tsFebLRL5RWL0e0nL64SEEf2sjMR4ZZwA0Ddfziclz1eN8yDn1qAaHSq3G0FEQXjABDo51sJVNyGnA0QlAPL4LOApzMo0mY1sUFbQBj8xTzYhKrROYF5VGIftR1uW3+3uiWU8XnBw7l3HIYVG/P/djYgMZoyrTJrci0n2qPZVnNFV913viW6btGzsXBT6aW3VKmsauVTFOc2DxpP5YJYLBBeCUixE71IlGBR2EF+6OugHbP12Ddoj29HgIPj+cxDiPDFGINzB8sKhLh0Ui4gOgDI8deb8FiwYxlteWhLHWTlmOzhkxLAObPIkFqS8+bbG5BdgWiAmJTwXdqZ7oysktzdKC/BWMWiAJNpyP0ZPTMItRy7fTi2RB4eDwLuIkpCma1gob/Dsw7zcKAMf3txiCot8c42ZCDPu3WAqRMJAGEk4cACaLzSZsFRhAE9QoAtXcwTX92XDT0sxTQXJYHdDJin0KfVN8PmzNvnOYBx5XNlik4giumihb7tJ60ezgNhgXuXgRNttxunZYAj7uzbL3nUA67rm5KJWrJCyTfIVwBMh3bTkD8TqFYp6uv8RwrgJpAZmHHScqv0qWeKT48NujhAuELekyYBdz9gXJQ53DvDh3tU62xTtN8bQhzzE9OccAK8wA2ez2k3cNtN7wM/RZs9M5NkNZoee0H2rmhLr8miPV9roAZtN1RHV/gDb7EoUtXKeXjYXUBN0oeFs8CbrtlhZRGPZSSZNyI9gA+TBFkelFNWxgEgCtG3wDiFqEr5Jz6y/U1DAM4QLxi2l7DNhl3w/epNTUFWGbXC7HrMQMz7WUbf8AaDQ46DYXuxLoJX6CFRzvuiPyJzCzgZIoKyqgKAx1yAGPQUWfa+GoDsqwDJNnHLF9juSz0i5VrpvqSwmsQul5dtyfrfX1zL3i0WdHHSjaKVjf0T5k7ABtxlEHbwxusgjydAY8N84BjvAx5GLfMqBW0VJEZ+pwKskQnbpnFHPzpwWo/bzkGvX51296+bu1v/+qL9usXT9rTJ07Bzh9k9HEPsxNhwhh6xLXKo3fXWf3iMkrBBz9nAbflbHm6ONxhXp8/NW26lkSleIEV9FBVI+o6ihjmffPDt+3v/+5Z+82vnsZw/fyercweB2d7wzA8mfuPEknpXTnHvQsoPd1v/aD8LODw+AxbAw/QjnEfv69u5kz6dtOiW2R6YmW7vd0C3qK94wcjf/zxZ1bRXfvqGT6U3f2G/Z6AesqotgJX477PNVmTmxfiwTSS5irqz2ybEHD6PzbMAk7lS/0BxgkTqPAUYBiAkQpTLLdKxe1D4Lbsp968uW1vXk+ZrnpsN7yL1TbmbvCl4GcPPPStZWyNcM9s++9y92ruZu2CT21q7lZ9KDcLuC3WbmGG42uA30EISOVkFynt1BBialOliF/wZHqGTa1tOfq8fbMHPL6N2iBPW2d7HfxZdWnreiN49UL0dfhLR6tBSVVwNo+TQ1U5IsHvQU4Dcry7bGNOix+SngVcwAhYpZjTQxaNMABLLLtUFEAMEwi4kk63fGDbLTcVm82ubd7hNylzEXCa6SPdz2Vf5iUobe0jAFIq8+JHT8CjGeUjHFOj5E7MIO4THxvOaHIcwu2IOKiznyg89BTEXi6WssO8B36vkLa33Pv7/QRbEtm21c/BtIm9Yb4ho19PDg4g09aeucySdpzq3BfVx6WQqh7MkLOSkHLf2olEKni4n7xznh0VH4jnAYdy6hfVSZTvUmF54f2cU9d9XmlhvUyTlbkxIT0BWtgH4wRRgPMy7EFbAwi8ojzbNyqtH/7coWxnUHyE+rmYjbs3NCnqdwIbbM/GZ4RZwDleVskO3viSBhWjSu2Pxj7JU4bsqrzTU5YZQ7xKu73Bb8bAbo+s28NStxEyb8e+K1UAKXhOVivK7x0RUANf3zEw/smJpsr37cad9RlhFnCbzQYwfN36I+5qwxgVwRA/vOHxlneeMiaux9lymN5tTTttkZN5mbZwCYsLM550taA+zJM5gsdHsGSdQTbngN7ZlC/JrRhXIcorRJvVcp2pnjzdy+0nnErOCbOAE5x8d4oVCy4xMSFGetjfgWJ3MQFHdomxZbUwwC4B84YlzBNojUEmxmqO1tVC4VcVopUzKuXK+XArUeDVTyq85wv7xKqHsel1dfIUkl8zUXcFm8eUH7IPjWcBp8J5mYxWcWmbclhlyEIAMJm2HbSwDCHZGD9IuR1UH4MhaZ4HOAIQIJOrIxfjxOFRUMNQq8wI9EH5WNVJdcEje22ofxs3K6PlQ+OZwA2ghrFSKhiEVSqh/5JJcfodKBnntLac7wb5CKLpAs+0RguYuAhoNh2CRV1dTVFhqWhRn/u+tOsMtTph6JhOkAWsQDz1K3NHeHyYBZyK70BG5oy3SyqGumoaAhr1Aiggnm8FzXr3cQWSq++p8seM10v6LW9Elgh5kyGINXMdi1xspw2LRHwqMjJTV2KdU9c2eQ1SkXDDHL2aYf2MprVp1dFrtcBlAWB/sNuxMoJIzEfRqhMk04qXfM0n8yVDaa/DRLp1GuGSKhNz65ZEOQUSdyD0Y/adRSojsxjoz2jnNFdN3l/S+sUvnqbDsx+zgCvQMJzhPaCrlouCLBvbA43x68DhsAc7DxpTr0y39VAMBCfpSlpSUMggzRe8X4bIAWRYJqVJj6t7feMV/9Bkfeb+bYw2Czg78S3GwWtEQEPRWFMMEDAZhVTiMaWLnZZRxSexfaStPR9DAXbMj5Qs479Dm8PqqYCNEpUTVAe/GpLC3vH16hI64zkLuB1XQVsdFkED8ps40oLjj2sMAdbFwGlKRjbW6UHAFZaRJVegIpeWVafZhQ4yHahUm+5VyfOwXYFHTX8DKUNSn+fCcsN3qOd8AT3GGPEs4EYnxho9YlOnU1WTUj98GbLKWCawI5wk71DiBMoh+qjYfgXUc+nNlW+rXuqjOrknPAs4sRoHcvvNguDZNEChYOoBUUZ175z9nMBZnQ6cnncgS7uDnt3BJ49Y8axqPYLZ0gVEb2DaICyHtOUM5t2eP7AJexWaGWYBVzcdsqneoAAViyzzo3ZsC1Jeq2qBKVhlkIxDsuSRrSY6/6S6eaaFjD+B4BGmMo9X9M06kcAdMq0qU5eT+lBBc8+GqaVmCc989iHP6yVvOcr4qE8ZLijVZ8VleC/5xWDWFmN6ow6aIKX75EfdL5rfKxBJgAcwwV/zeXrFjyqqo3uy52dnMa5oU4O7svo7YMNgWrFKdsk6WBXmmS82HuKsuADjHZFGi5iBIv+9qnn/qt+qSh3JTFNjPvWDiqpnA0SexYB/ijm6q5qP85wFnIZrXQHgillpVesHh9QVaAWWAJccfo/VNrOcbmrbYn/vCR9gy2m1aUH2WOa/rv4UoKnhPODowC2Gx6jQo4Nox4ZinDL392ssIHFSZWa1rTZJD/wSy0Kn34eDpwZvP1w96+dmH25zrsQs4KSLP4GAawWSjhnFZZQFmUZxOZSTj/ne2yUhIHCjRIlFKcIU0x852RjZTGGlDdaQrkxk7MPrJr/gzg17r4vgJ3rMAk4/wmQDE7wJhg+fFV1xaMGiMqnXaFc5jd4FjCCIRAEmAO5aPE7lzsw0ZelHYJB0PCWscErqOJcsrbllGmhmzE/7mAXcPof544Wlqg6wTuORtvKQzjV2gVC+shaNMhc24v8iIloGmS3ogc7bD9sS884Oi0kEP89jFnDX++/hCtPVtT7kwaxOkZpmxQ/L9vgdj1r+NCtAwQ6/A9DXMXnBqZgoHDdXP7Wna/Id6PRCum7DiREqcg1UPw9Yp6MsLv/HwlM4Hp7WQ1/CGQhcgDsDNJtcgLsAdyYCZza7MO4C3JkInNnswrgLcGcicGazC+POBO7/AH5zPa/ivytzAAAAAElFTkSuQmCC" - ), - ] - ) - ], - model_parameters={"temperature": 0.1, "num_predict": 100}, - stream=False, - ) - - assert isinstance(result, LLMResult) - assert len(result.message.content) > 0 - - -def test_invoke_chat_model_with_vision(): - model = OllamaLargeLanguageModel() - - result = model.invoke( - model="llava", - credentials={ - "base_url": os.environ.get("OLLAMA_BASE_URL"), - "mode": "chat", - "context_size": 2048, - "max_tokens": 2048, - }, - prompt_messages=[ - UserPromptMessage( - content=[ - TextPromptMessageContent( - data="What is this in this picture?", - ), - ImagePromptMessageContent( - data="data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAE4AAABMCAYAAADDYoEWAAAMQGlDQ1BJQ0MgUHJvZmlsZQAASImVVwdYU8kWnluSkEBoAQSkhN4EkRpASggt9I4gKiEJEEqMgaBiRxcVXLuIgA1dFVGwAmJBETuLYu+LBRVlXSzYlTcpoOu+8r35vrnz33/O/OfMmbllAFA7zhGJclF1APKEBeLYYH/6uOQUOukpIAEdoAy0gA2Hmy9iRkeHA1iG2r+Xd9cBIm2v2Eu1/tn/X4sGj5/PBQCJhjidl8/Ng/gAAHg1VyQuAIAo5c2mFoikGFagJYYBQrxIijPluFqK0+V4j8wmPpYFcTsASiocjjgTANVLkKcXcjOhhmo/xI5CnkAIgBodYp+8vMk8iNMgtoY2Ioil+oz0H3Qy/6aZPqzJ4WQOY/lcZEUpQJAvyuVM/z/T8b9LXq5kyIclrCpZ4pBY6Zxh3m7mTA6TYhWI+4TpkVEQa0L8QcCT2UOMUrIkIQlye9SAm8+COYMrDVBHHicgDGIDiIOEuZHhCj49QxDEhhjuEHSaoIAdD7EuxIv4+YFxCptN4smxCl9oY4aYxVTwZzlimV+pr/uSnASmQv91Fp+t0MdUi7LikyCmQGxeKEiMhFgVYof8nLgwhc3YoixW5JCNWBIrjd8c4li+MNhfro8VZoiDYhX2pXn5Q/PFNmUJ2JEKvK8gKz5Enh+sncuRxQ/ngl3iC5kJQzr8/HHhQ3Ph8QMC5XPHnvGFCXEKnQ+iAv9Y+VicIsqNVtjjpvzcYClvCrFLfmGcYiyeWAA3pFwfzxAVRMfL48SLsjmh0fJ48OUgHLBAAKADCazpYDLIBoLOvqY+eCfvCQIcIAaZgA/sFczQiCRZjxBe40AR+BMiPsgfHucv6+WDQsh/HWblV3uQIestlI3IAU8gzgNhIBfeS2SjhMPeEsFjyAj+4Z0DKxfGmwurtP/f80Psd4YJmXAFIxnySFcbsiQGEgOIIcQgog2uj/vgXng4vPrB6oQzcI+heXy3JzwhdBEeEq4Rugm3JgmKxT9FGQG6oX6QIhfpP+YCt4Sarrg/7g3VoTKug+sDe9wF+mHivtCzK2RZirilWaH/pP23GfywGgo7siMZJY8g+5Gtfx6paqvqOqwizfWP+ZHHmj6cb9Zwz8/+WT9knwfbsJ8tsUXYfuwMdgI7hx3BmgAda8WasQ7sqBQP767Hst015C1WFk8O1BH8w9/Qykozme9Y59jr+EXeV8CfJn1HA9Zk0XSxIDOrgM6EXwQ+nS3kOoyiOzk6OQMg/b7IX19vYmTfDUSn4zs3/w8AvFsHBwcPf+dCWwHY6w4f/0PfOWsG/HQoA3D2EFciLpRzuPRCgG8JNfik6QEjYAas4XycgBvwAn4gEISCKBAPksFEGH0W3OdiMBXMBPNACSgDy8EaUAk2gi1gB9gN9oEmcAScAKfBBXAJXAN34O7pAS9AP3gHPiMIQkKoCA3RQ4wRC8QOcUIYiA8SiIQjsUgykoZkIkJEgsxE5iNlyEqkEtmM1CJ7kUPICeQc0oXcQh4gvchr5BOKoSqoFmqIWqKjUQbKRMPQeHQCmolOQYvQBehStAKtQXehjegJ9AJ6De1GX6ADGMCUMR3MBLPHGBgLi8JSsAxMjM3GSrFyrAarx1rgOl/BurE+7CNOxGk4HbeHOzgET8C5+BR8Nr4Er8R34I14O34Ff4D3498IVIIBwY7gSWATxhEyCVMJJYRywjbCQcIp+Cz1EN4RiUQdohXRHT6LycRs4gziEuJ6YgPxOLGL+Ig4QCKR9Eh2JG9SFIlDKiCVkNaRdpFaSZdJPaQPSspKxkpOSkFKKUpCpWKlcqWdSseULis9VfpMVidbkD3JUWQeeTp5GXkruYV8kdxD/kzRoFhRvCnxlGzKPEoFpZ5yinKX8kZZWdlU2UM5RlmgPFe5QnmP8lnlB8ofVTRVbFVYKqkqEpWlKttVjqvcUnlDpVItqX7UFGoBdSm1lnqSep/6QZWm6qDKVuWpzlGtUm1Uvaz6Uo2sZqHGVJuoVqRWrrZf7aJanzpZ3VKdpc5Rn61epX5I/Yb6gAZNY4xGlEaexhKNnRrnNJ5pkjQtNQM1eZoLNLdontR8RMNoZjQWjUubT9tKO0Xr0SJqWWmxtbK1yrR2a3Vq9WtrartoJ2pP067SPqrdrYPpWOqwdXJ1luns07mu82mE4QjmCP6IxSPqR1we8V53pK6fLl+3VLdB95ruJz26XqBejt4KvSa9e/q4vq1+jP5U/Q36p/T7RmqN9BrJHVk6ct/I2waoga1BrMEMgy0GHQYDhkaGwYYiw3WGJw37jHSM/IyyjVYbHTPqNaYZ+xgLjFcbtxo/p2vTmfRcegW9nd5vYmASYiIx2WzSafLZ1Mo0wbTYtMH0nhnFjGGWYbbarM2s39zYPMJ8pnmd+W0LsgXDIstircUZi/eWVpZJlgstmyyfWelasa2KrOqs7lpTrX2tp1jXWF+1IdowbHJs1ttcskVtXW2zbKtsL9qhdm52Arv1dl2jCKM8RglH1Yy6Ya9iz7QvtK+zf+Cg4xDuUOzQ5PBytPnolNErRp8Z/c3R1THXcavjnTGaY0LHFI9pGfPaydaJ61TldNWZ6hzkPMe52fmVi50L32WDy01XmmuE60LXNtevbu5uYrd6t153c/c092r3GwwtRjRjCeOsB8HD32OOxxGPj55ungWe+zz/8rL3yvHa6fVsrNVY/titYx95m3pzvDd7d/vQfdJ8Nvl0+5r4cnxrfB/6mfnx/Lb5PWXaMLOZu5gv/R39xf4H/d+zPFmzWMcDsIDggNKAzkDNwITAysD7QaZBmUF1Qf3BrsEzgo+HEELCQlaE3GAbsrnsWnZ/qHvorND2MJWwuLDKsIfhtuHi8JYINCI0YlXE3UiLSGFkUxSIYketiroXbRU9JfpwDDEmOqYq5knsmNiZsWfiaHGT4nbGvYv3j18WfyfBOkGS0JaolpiaWJv4PikgaWVS97jR42aNu5CsnyxIbk4hpSSmbEsZGB84fs34nlTX1JLU6xOsJkybcG6i/sTciUcnqU3iTNqfRkhLStuZ9oUTxanhDKSz06vT+7ks7lruC54fbzWvl+/NX8l/muGdsTLjWaZ35qrM3izfrPKsPgFLUCl4lR2SvTH7fU5Uzvacwdyk3IY8pby0vENCTWGOsH2y0eRpk7tEdqISUfcUzylrpvSLw8Tb8pH8CfnNBVrwR75DYi35RfKg0KewqvDD1MSp+6dpTBNO65huO33x9KdFQUW/zcBncGe0zTSZOW/mg1nMWZtnI7PTZ7fNMZuzYE7P3OC5O+ZR5uXM+73YsXhl8dv5SfNbFhgumLvg0S/Bv9SVqJaIS24s9Fq4cRG+SLCoc7Hz4nWLv5XySs+XOZaVl31Zwl1y/tcxv1b8Org0Y2nnMrdlG5YTlwuXX1/hu2LHSo2VRSsfrYpY1biavrp09ds1k9acK3cp37iWslaytrsivKJ5nfm65eu+VGZVXqvyr2qoNqheXP1+PW/95Q1+G+o3Gm4s2/hpk2DTzc3BmxtrLGvKtxC3FG55sjVx65nfGL/VbtPfVrbt63bh9u4dsTvaa91ra3ca7FxWh9ZJ6np3pe66tDtgd3O9ff3mBp2Gsj1gj2TP871pe6/vC9vXtp+xv/6AxYHqg7SDpY1I4/TG/qaspu7m5OauQ6GH2lq8Wg4edji8/YjJkaqj2keXHaMcW3BssLWodeC46HjficwTj9omtd05Oe7k1faY9s5TYafOng46ffIM80zrWe+zR855njt0nnG+6YLbhcYO146Dv7v+frDTrbPxovvF5ksel1q6xnYdu+x7+cSVgCunr7KvXrgWea3resL1mzdSb3Tf5N18div31qvbhbc/35l7l3C39J76vfL7Bvdr/rD5o6Hbrfvog4AHHQ/jHt55xH304nH+4y89C55Qn5Q/NX5a+8zp2ZHeoN5Lz8c/73khevG5r+RPjT+rX1q/PPCX318d/eP6e16JXw2+XvJG7832ty5v2waiB+6/y3v3+X3pB70POz4yPp75lPTp6eepX0hfKr7afG35Fvbt7mDe4KCII+bIfgUwWNGMDABebweAmgwADZ7PKOPl5z9ZQeRnVhkC/wnLz4iy4gZAPfx/j+mDfzc3ANizFR6/oL5aKgDRVADiPQDq7Dxch85qsnOltBDhOWBT5Nf0vHTwb4r8zPlD3D+3QKrqAn5u/wWdZ3xtG7qP3QAAADhlWElmTU0AKgAAAAgAAYdpAAQAAAABAAAAGgAAAAAAAqACAAQAAAABAAAATqADAAQAAAABAAAATAAAAADhTXUdAAARnUlEQVR4Ae2c245bR3aGi4fulizFHgUzQAYIggBB5klymfeaZ8hDBYjvAiRxkMAGkowRWx7JktjcZL7vX1Uku62Burkl5YbV5q7Tqqq1/v3XqgMpL95tbvftEh6NwPLRLS4NgsAFuDOJcAHuAtyZCJzZ7MK4C3BnInBmswvjLsCdicCZzS6MOxO49Znt0uz3//CPbbv6srXFrq0W9Q6Wi0VbLPn4R8x/jSLiu3nrl8s9dcartlwtKdmTbm21XranN6v27Mm6XV8t25fP1+3Pn1+1r4if3Czbk+t9u1rR6f9jmAXc1P6sbaevQGbfdgGJeA8ke0AQsCYYgiYgPR1QyVO+3wvcMm2WO0G2PeWkX79btp839AG4//UjYC62gDsB2rI9f7pov3q2bX/9F1ftBWAufTufOcwCrnTtR90dOdHoNgCJeAbUkuM5TsWAW5W9gfkE83ZkUHg0oAyAwbm927a2ebVoP/xx2f7jD1uYuG9/89tF+/VXK1hq+88TZgG32O1g2r7tpRdBM8fUTM7pyR8SYddgxkJErUszHti7U44CpzyEo16syNtx+qgy+1og7RMetpev9+3rb3bt+c2u/ebFsv3uL1ftiqn+qcMs4HY7jNQpEfadNU5VqeHUTJkgUbaPDxRADdZ8jU9LHoJYnwLUtgWN4ObDC7Kdr8Hp7d9qMTW8gt23V1zyvPrD1H56e9t+99vr9uJLprBDfaIw69U4dQRCIw2JdVIjbUzecj+7qYyPpZHiAbDaJwsXyMhQEQ0pq6sAp7hMS2XGqykdA2iy4EUtF6v206ur9k/fbNo//+frtt2OaW/rjxtmAaeNGqihBY5xfVQzQEZfoSH0KHgkrbD/CX6vPIqlSTU61vVCovRSbEwbIS851vj23Q+tff3vu/bzu5I7tvs4qVnADTa5FCbNC86qCLN2E1MxKKroYB2pgSz2RLbbVcVkSJhOKxIDjGxn+nSuqes2JlKuG8fA/IzPXazbj68X7et/27UfX7GifORwOuSju47h/c3beKfRFO74CNA04YP0ZT2/YzERFGojc9pmDG47/wyDZwJjiX4wwJNer1dZPJbs5/xzK5Ppzp7SQZBszNy22U7tX7/dtFdvJrv8aGE2cDJLoPycBgHSgICJUQLo8nmUo6y7oH0S5Lu/FGhDQULCfIooATw3yyOQQ46eYVpYiaBMTFtAFPR307r9y3fbdvsRfd5Rg6HJI2Lt1qaAF6TEqoxWdVdYSHawezCvAHLjW7Jh2QGcUkDDT4Og2OfSFRVkxipcAJUZARC5FVRbeRpB1hVY6r25XQHexIZ96Hfa++PTs4Dbi8rQg7imWQG27/uEgCTCssk/WWg7GwJWwDQ36PceGzQ+x7jOtgNogkIIpsZiFMdXoEfOPUlh3l5ulu2/X6bJ7Mc84Bw+xgOKzJqM0VKm8WYlVMqt61gFKNtQKeZ6o7Ls/aqEeYooJXDIZ9uiT0uZ5UxPUJNlYdoAK62qHfM7unz3/bb9/Ha+v3u/tn3AD0XOrnxAZdpNYZILgoxyGk4BqMCbssq66dXv6RdFkiB6Rj2u3N1npiMw1dQjF4oJW/kzy6VdMRFA9Xd8VvhCLxCyYUYkvhHZb7+fotvdUR6XmwXcYI1DangAA6yspgBj/dRjp6L+RbmSPaaxuuMnGEeVAhBF4pSapAFG5gUo60rAHmpVtcz0sR2aBZW8NAB9+W7dXr9N0dmPmUcu10pWrq7kQQvBQXn1dUsgoM4ej12TtyBknG51PEMGOV2TLLVZ/GLvLMBYHsYJhg7fuMBx6tq3LFu7aBxxD9jKFiO7Thbwcv7n5dS+/ML0eWEWcBqoptk+mEQp2aTG+rbmBYA+D6MyMwMAdepKsX5QpnglFZyZ5k4tDYsI/Y1pF7CRq22HoHXgGEOwgodvgH79INnW3tlFIVVQvkBXg1dvF3z27fkTGzw+zALOPZluVoVkV4yLHoBB3VBJUNyo6uEWXAyIkruC2OQjbVeppxkm8+iti2mySsM1EPYGKBcEyul3LKTW1+pr+wLRstwP0J8a2K95Txf/+6q1ZzeUDEXt/oFhHnA4fJYCBtawYlWmlsrJBEHhP43bi9Rq1Z0ymlK3Z/QCRqA5YfaNLZJWEACn929eluXlUGO8CgMrHWYi441S2tsFebLRL5RWL0e0nL64SEEf2sjMR4ZZwA0Ddfziclz1eN8yDn1qAaHSq3G0FEQXjABDo51sJVNyGnA0QlAPL4LOApzMo0mY1sUFbQBj8xTzYhKrROYF5VGIftR1uW3+3uiWU8XnBw7l3HIYVG/P/djYgMZoyrTJrci0n2qPZVnNFV913viW6btGzsXBT6aW3VKmsauVTFOc2DxpP5YJYLBBeCUixE71IlGBR2EF+6OugHbP12Ddoj29HgIPj+cxDiPDFGINzB8sKhLh0Ui4gOgDI8deb8FiwYxlteWhLHWTlmOzhkxLAObPIkFqS8+bbG5BdgWiAmJTwXdqZ7oysktzdKC/BWMWiAJNpyP0ZPTMItRy7fTi2RB4eDwLuIkpCma1gob/Dsw7zcKAMf3txiCot8c42ZCDPu3WAqRMJAGEk4cACaLzSZsFRhAE9QoAtXcwTX92XDT0sxTQXJYHdDJin0KfVN8PmzNvnOYBx5XNlik4giumihb7tJ60ezgNhgXuXgRNttxunZYAj7uzbL3nUA67rm5KJWrJCyTfIVwBMh3bTkD8TqFYp6uv8RwrgJpAZmHHScqv0qWeKT48NujhAuELekyYBdz9gXJQ53DvDh3tU62xTtN8bQhzzE9OccAK8wA2ez2k3cNtN7wM/RZs9M5NkNZoee0H2rmhLr8miPV9roAZtN1RHV/gDb7EoUtXKeXjYXUBN0oeFs8CbrtlhZRGPZSSZNyI9gA+TBFkelFNWxgEgCtG3wDiFqEr5Jz6y/U1DAM4QLxi2l7DNhl3w/epNTUFWGbXC7HrMQMz7WUbf8AaDQ46DYXuxLoJX6CFRzvuiPyJzCzgZIoKyqgKAx1yAGPQUWfa+GoDsqwDJNnHLF9juSz0i5VrpvqSwmsQul5dtyfrfX1zL3i0WdHHSjaKVjf0T5k7ABtxlEHbwxusgjydAY8N84BjvAx5GLfMqBW0VJEZ+pwKskQnbpnFHPzpwWo/bzkGvX51296+bu1v/+qL9usXT9rTJ07Bzh9k9HEPsxNhwhh6xLXKo3fXWf3iMkrBBz9nAbflbHm6ONxhXp8/NW26lkSleIEV9FBVI+o6ihjmffPDt+3v/+5Z+82vnsZw/fyercweB2d7wzA8mfuPEknpXTnHvQsoPd1v/aD8LODw+AxbAw/QjnEfv69u5kz6dtOiW2R6YmW7vd0C3qK94wcjf/zxZ1bRXfvqGT6U3f2G/Z6AesqotgJX477PNVmTmxfiwTSS5irqz2ybEHD6PzbMAk7lS/0BxgkTqPAUYBiAkQpTLLdKxe1D4Lbsp968uW1vXk+ZrnpsN7yL1TbmbvCl4GcPPPStZWyNcM9s++9y92ruZu2CT21q7lZ9KDcLuC3WbmGG42uA30EISOVkFynt1BBialOliF/wZHqGTa1tOfq8fbMHPL6N2iBPW2d7HfxZdWnreiN49UL0dfhLR6tBSVVwNo+TQ1U5IsHvQU4Dcry7bGNOix+SngVcwAhYpZjTQxaNMABLLLtUFEAMEwi4kk63fGDbLTcVm82ubd7hNylzEXCa6SPdz2Vf5iUobe0jAFIq8+JHT8CjGeUjHFOj5E7MIO4THxvOaHIcwu2IOKiznyg89BTEXi6WssO8B36vkLa33Pv7/QRbEtm21c/BtIm9Yb4ho19PDg4g09aeucySdpzq3BfVx6WQqh7MkLOSkHLf2olEKni4n7xznh0VH4jnAYdy6hfVSZTvUmF54f2cU9d9XmlhvUyTlbkxIT0BWtgH4wRRgPMy7EFbAwi8ojzbNyqtH/7coWxnUHyE+rmYjbs3NCnqdwIbbM/GZ4RZwDleVskO3viSBhWjSu2Pxj7JU4bsqrzTU5YZQ7xKu73Bb8bAbo+s28NStxEyb8e+K1UAKXhOVivK7x0RUANf3zEw/smJpsr37cad9RlhFnCbzQYwfN36I+5qwxgVwRA/vOHxlneeMiaux9lymN5tTTttkZN5mbZwCYsLM550taA+zJM5gsdHsGSdQTbngN7ZlC/JrRhXIcorRJvVcp2pnjzdy+0nnErOCbOAE5x8d4oVCy4xMSFGetjfgWJ3MQFHdomxZbUwwC4B84YlzBNojUEmxmqO1tVC4VcVopUzKuXK+XArUeDVTyq85wv7xKqHsel1dfIUkl8zUXcFm8eUH7IPjWcBp8J5mYxWcWmbclhlyEIAMJm2HbSwDCHZGD9IuR1UH4MhaZ4HOAIQIJOrIxfjxOFRUMNQq8wI9EH5WNVJdcEje22ofxs3K6PlQ+OZwA2ghrFSKhiEVSqh/5JJcfodKBnntLac7wb5CKLpAs+0RguYuAhoNh2CRV1dTVFhqWhRn/u+tOsMtTph6JhOkAWsQDz1K3NHeHyYBZyK70BG5oy3SyqGumoaAhr1Aiggnm8FzXr3cQWSq++p8seM10v6LW9Elgh5kyGINXMdi1xspw2LRHwqMjJTV2KdU9c2eQ1SkXDDHL2aYf2MprVp1dFrtcBlAWB/sNuxMoJIzEfRqhMk04qXfM0n8yVDaa/DRLp1GuGSKhNz65ZEOQUSdyD0Y/adRSojsxjoz2jnNFdN3l/S+sUvnqbDsx+zgCvQMJzhPaCrlouCLBvbA43x68DhsAc7DxpTr0y39VAMBCfpSlpSUMggzRe8X4bIAWRYJqVJj6t7feMV/9Bkfeb+bYw2Czg78S3GwWtEQEPRWFMMEDAZhVTiMaWLnZZRxSexfaStPR9DAXbMj5Qs479Dm8PqqYCNEpUTVAe/GpLC3vH16hI64zkLuB1XQVsdFkED8ps40oLjj2sMAdbFwGlKRjbW6UHAFZaRJVegIpeWVafZhQ4yHahUm+5VyfOwXYFHTX8DKUNSn+fCcsN3qOd8AT3GGPEs4EYnxho9YlOnU1WTUj98GbLKWCawI5wk71DiBMoh+qjYfgXUc+nNlW+rXuqjOrknPAs4sRoHcvvNguDZNEChYOoBUUZ175z9nMBZnQ6cnncgS7uDnt3BJ49Y8axqPYLZ0gVEb2DaICyHtOUM5t2eP7AJexWaGWYBVzcdsqneoAAViyzzo3ZsC1Jeq2qBKVhlkIxDsuSRrSY6/6S6eaaFjD+B4BGmMo9X9M06kcAdMq0qU5eT+lBBc8+GqaVmCc989iHP6yVvOcr4qE8ZLijVZ8VleC/5xWDWFmN6ow6aIKX75EfdL5rfKxBJgAcwwV/zeXrFjyqqo3uy52dnMa5oU4O7svo7YMNgWrFKdsk6WBXmmS82HuKsuADjHZFGi5iBIv+9qnn/qt+qSh3JTFNjPvWDiqpnA0SexYB/ijm6q5qP85wFnIZrXQHgillpVesHh9QVaAWWAJccfo/VNrOcbmrbYn/vCR9gy2m1aUH2WOa/rv4UoKnhPODowC2Gx6jQo4Nox4ZinDL392ssIHFSZWa1rTZJD/wSy0Kn34eDpwZvP1w96+dmH25zrsQs4KSLP4GAawWSjhnFZZQFmUZxOZSTj/ne2yUhIHCjRIlFKcIU0x852RjZTGGlDdaQrkxk7MPrJr/gzg17r4vgJ3rMAk4/wmQDE7wJhg+fFV1xaMGiMqnXaFc5jd4FjCCIRAEmAO5aPE7lzsw0ZelHYJB0PCWscErqOJcsrbllGmhmzE/7mAXcPof544Wlqg6wTuORtvKQzjV2gVC+shaNMhc24v8iIloGmS3ogc7bD9sS884Oi0kEP89jFnDX++/hCtPVtT7kwaxOkZpmxQ/L9vgdj1r+NCtAwQ6/A9DXMXnBqZgoHDdXP7Wna/Id6PRCum7DiREqcg1UPw9Yp6MsLv/HwlM4Hp7WQ1/CGQhcgDsDNJtcgLsAdyYCZza7MO4C3JkInNnswrgLcGcicGazC+POBO7/AH5zPa/ivytzAAAAAElFTkSuQmCC" - ), - ] - ) - ], - model_parameters={"temperature": 0.1, "num_predict": 100}, - stream=False, - ) - - assert isinstance(result, LLMResult) - assert len(result.message.content) > 0 - - -def test_get_num_tokens(): - model = OllamaLargeLanguageModel() - - num_tokens = model.get_num_tokens( - model="mistral:text", - credentials={ - "base_url": os.environ.get("OLLAMA_BASE_URL"), - "mode": "chat", - "context_size": 2048, - "max_tokens": 2048, - }, - prompt_messages=[UserPromptMessage(content="Hello World!")], - ) - - assert isinstance(num_tokens, int) - assert num_tokens == 6 diff --git a/api/tests/integration_tests/model_runtime/ollama/test_text_embedding.py b/api/tests/integration_tests/model_runtime/ollama/test_text_embedding.py deleted file mode 100644 index 3c4f740a4fd09c..00000000000000 --- a/api/tests/integration_tests/model_runtime/ollama/test_text_embedding.py +++ /dev/null @@ -1,65 +0,0 @@ -import os - -import pytest - -from core.model_runtime.entities.text_embedding_entities import TextEmbeddingResult -from core.model_runtime.errors.validate import CredentialsValidateFailedError -from core.model_runtime.model_providers.ollama.text_embedding.text_embedding import OllamaEmbeddingModel - - -def test_validate_credentials(): - model = OllamaEmbeddingModel() - - with pytest.raises(CredentialsValidateFailedError): - model.validate_credentials( - model="mistral:text", - credentials={ - "base_url": "http://localhost:21434", - "mode": "chat", - "context_size": 4096, - }, - ) - - model.validate_credentials( - model="mistral:text", - credentials={ - "base_url": os.environ.get("OLLAMA_BASE_URL"), - "mode": "chat", - "context_size": 4096, - }, - ) - - -def test_invoke_model(): - model = OllamaEmbeddingModel() - - result = model.invoke( - model="mistral:text", - credentials={ - "base_url": os.environ.get("OLLAMA_BASE_URL"), - "mode": "chat", - "context_size": 4096, - }, - texts=["hello", "world"], - user="abc-123", - ) - - assert isinstance(result, TextEmbeddingResult) - assert len(result.embeddings) == 2 - assert result.usage.total_tokens == 2 - - -def test_get_num_tokens(): - model = OllamaEmbeddingModel() - - num_tokens = model.get_num_tokens( - model="mistral:text", - credentials={ - "base_url": os.environ.get("OLLAMA_BASE_URL"), - "mode": "chat", - "context_size": 4096, - }, - texts=["hello", "world"], - ) - - assert num_tokens == 2 diff --git a/api/tests/integration_tests/model_runtime/openai/test_llm.py b/api/tests/integration_tests/model_runtime/openai/test_llm.py deleted file mode 100644 index 3b3ea9ec800cbb..00000000000000 --- a/api/tests/integration_tests/model_runtime/openai/test_llm.py +++ /dev/null @@ -1,313 +0,0 @@ -import os -from collections.abc import Generator - -import pytest - -from core.model_runtime.entities.llm_entities import LLMResult, LLMResultChunk, LLMResultChunkDelta -from core.model_runtime.entities.message_entities import ( - AssistantPromptMessage, - ImagePromptMessageContent, - PromptMessageTool, - SystemPromptMessage, - TextPromptMessageContent, - UserPromptMessage, -) -from core.model_runtime.entities.model_entities import AIModelEntity, ModelType -from core.model_runtime.errors.validate import CredentialsValidateFailedError -from core.model_runtime.model_providers.__base.large_language_model import LargeLanguageModel -from core.model_runtime.model_providers.openai.llm.llm import OpenAILargeLanguageModel - -"""FOR MOCK FIXTURES, DO NOT REMOVE""" -from tests.integration_tests.model_runtime.__mock.openai import setup_openai_mock - - -def test_predefined_models(): - model = OpenAILargeLanguageModel() - model_schemas = model.predefined_models() - - assert len(model_schemas) >= 1 - assert isinstance(model_schemas[0], AIModelEntity) - - -@pytest.mark.parametrize("setup_openai_mock", [["chat"]], indirect=True) -def test_validate_credentials_for_chat_model(setup_openai_mock): - model = OpenAILargeLanguageModel() - - with pytest.raises(CredentialsValidateFailedError): - model.validate_credentials(model="gpt-3.5-turbo", credentials={"openai_api_key": "invalid_key"}) - - model.validate_credentials(model="gpt-3.5-turbo", credentials={"openai_api_key": os.environ.get("OPENAI_API_KEY")}) - - -@pytest.mark.parametrize("setup_openai_mock", [["completion"]], indirect=True) -def test_validate_credentials_for_completion_model(setup_openai_mock): - model = OpenAILargeLanguageModel() - - with pytest.raises(CredentialsValidateFailedError): - model.validate_credentials(model="text-davinci-003", credentials={"openai_api_key": "invalid_key"}) - - model.validate_credentials( - model="text-davinci-003", credentials={"openai_api_key": os.environ.get("OPENAI_API_KEY")} - ) - - -@pytest.mark.parametrize("setup_openai_mock", [["completion"]], indirect=True) -def test_invoke_completion_model(setup_openai_mock): - model = OpenAILargeLanguageModel() - - result = model.invoke( - model="gpt-3.5-turbo-instruct", - credentials={"openai_api_key": os.environ.get("OPENAI_API_KEY"), "openai_api_base": "https://api.openai.com"}, - prompt_messages=[UserPromptMessage(content="Hello World!")], - model_parameters={"temperature": 0.0, "max_tokens": 1}, - stream=False, - user="abc-123", - ) - - assert isinstance(result, LLMResult) - assert len(result.message.content) > 0 - assert model._num_tokens_from_string("gpt-3.5-turbo-instruct", result.message.content) == 1 - - -@pytest.mark.parametrize("setup_openai_mock", [["completion"]], indirect=True) -def test_invoke_stream_completion_model(setup_openai_mock): - model = OpenAILargeLanguageModel() - - result = model.invoke( - model="gpt-3.5-turbo-instruct", - credentials={ - "openai_api_key": os.environ.get("OPENAI_API_KEY"), - "openai_organization": os.environ.get("OPENAI_ORGANIZATION"), - }, - prompt_messages=[UserPromptMessage(content="Hello World!")], - model_parameters={"temperature": 0.0, "max_tokens": 100}, - stream=True, - user="abc-123", - ) - - assert isinstance(result, Generator) - - for chunk in result: - assert isinstance(chunk, LLMResultChunk) - assert isinstance(chunk.delta, LLMResultChunkDelta) - assert isinstance(chunk.delta.message, AssistantPromptMessage) - assert len(chunk.delta.message.content) > 0 if chunk.delta.finish_reason is None else True - - -@pytest.mark.parametrize("setup_openai_mock", [["chat"]], indirect=True) -def test_invoke_chat_model(setup_openai_mock): - model = OpenAILargeLanguageModel() - - result = model.invoke( - model="gpt-3.5-turbo", - credentials={"openai_api_key": os.environ.get("OPENAI_API_KEY")}, - prompt_messages=[ - SystemPromptMessage( - content="You are a helpful AI assistant.", - ), - UserPromptMessage(content="Hello World!"), - ], - model_parameters={ - "temperature": 0.0, - "top_p": 1.0, - "presence_penalty": 0.0, - "frequency_penalty": 0.0, - "max_tokens": 10, - }, - stop=["How"], - stream=False, - user="abc-123", - ) - - assert isinstance(result, LLMResult) - assert len(result.message.content) > 0 - - -@pytest.mark.parametrize("setup_openai_mock", [["chat"]], indirect=True) -def test_invoke_chat_model_with_vision(setup_openai_mock): - model = OpenAILargeLanguageModel() - - result = model.invoke( - model="gpt-4-vision-preview", - credentials={"openai_api_key": os.environ.get("OPENAI_API_KEY")}, - prompt_messages=[ - SystemPromptMessage( - content="You are a helpful AI assistant.", - ), - UserPromptMessage( - content=[ - TextPromptMessageContent( - data="Hello World!", - ), - ImagePromptMessageContent( - data="data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAE4AAABMCAYAAADDYoEWAAAMQGlDQ1BJQ0MgUHJvZmlsZQAASImVVwdYU8kWnluSkEBoAQSkhN4EkRpASggt9I4gKiEJEEqMgaBiRxcVXLuIgA1dFVGwAmJBETuLYu+LBRVlXSzYlTcpoOu+8r35vrnz33/O/OfMmbllAFA7zhGJclF1APKEBeLYYH/6uOQUOukpIAEdoAy0gA2Hmy9iRkeHA1iG2r+Xd9cBIm2v2Eu1/tn/X4sGj5/PBQCJhjidl8/Ng/gAAHg1VyQuAIAo5c2mFoikGFagJYYBQrxIijPluFqK0+V4j8wmPpYFcTsASiocjjgTANVLkKcXcjOhhmo/xI5CnkAIgBodYp+8vMk8iNMgtoY2Ioil+oz0H3Qy/6aZPqzJ4WQOY/lcZEUpQJAvyuVM/z/T8b9LXq5kyIclrCpZ4pBY6Zxh3m7mTA6TYhWI+4TpkVEQa0L8QcCT2UOMUrIkIQlye9SAm8+COYMrDVBHHicgDGIDiIOEuZHhCj49QxDEhhjuEHSaoIAdD7EuxIv4+YFxCptN4smxCl9oY4aYxVTwZzlimV+pr/uSnASmQv91Fp+t0MdUi7LikyCmQGxeKEiMhFgVYof8nLgwhc3YoixW5JCNWBIrjd8c4li+MNhfro8VZoiDYhX2pXn5Q/PFNmUJ2JEKvK8gKz5Enh+sncuRxQ/ngl3iC5kJQzr8/HHhQ3Ph8QMC5XPHnvGFCXEKnQ+iAv9Y+VicIsqNVtjjpvzcYClvCrFLfmGcYiyeWAA3pFwfzxAVRMfL48SLsjmh0fJ48OUgHLBAAKADCazpYDLIBoLOvqY+eCfvCQIcIAaZgA/sFczQiCRZjxBe40AR+BMiPsgfHucv6+WDQsh/HWblV3uQIestlI3IAU8gzgNhIBfeS2SjhMPeEsFjyAj+4Z0DKxfGmwurtP/f80Psd4YJmXAFIxnySFcbsiQGEgOIIcQgog2uj/vgXng4vPrB6oQzcI+heXy3JzwhdBEeEq4Rugm3JgmKxT9FGQG6oX6QIhfpP+YCt4Sarrg/7g3VoTKug+sDe9wF+mHivtCzK2RZirilWaH/pP23GfywGgo7siMZJY8g+5Gtfx6paqvqOqwizfWP+ZHHmj6cb9Zwz8/+WT9knwfbsJ8tsUXYfuwMdgI7hx3BmgAda8WasQ7sqBQP767Hst015C1WFk8O1BH8w9/Qykozme9Y59jr+EXeV8CfJn1HA9Zk0XSxIDOrgM6EXwQ+nS3kOoyiOzk6OQMg/b7IX19vYmTfDUSn4zs3/w8AvFsHBwcPf+dCWwHY6w4f/0PfOWsG/HQoA3D2EFciLpRzuPRCgG8JNfik6QEjYAas4XycgBvwAn4gEISCKBAPksFEGH0W3OdiMBXMBPNACSgDy8EaUAk2gi1gB9gN9oEmcAScAKfBBXAJXAN34O7pAS9AP3gHPiMIQkKoCA3RQ4wRC8QOcUIYiA8SiIQjsUgykoZkIkJEgsxE5iNlyEqkEtmM1CJ7kUPICeQc0oXcQh4gvchr5BOKoSqoFmqIWqKjUQbKRMPQeHQCmolOQYvQBehStAKtQXehjegJ9AJ6De1GX6ADGMCUMR3MBLPHGBgLi8JSsAxMjM3GSrFyrAarx1rgOl/BurE+7CNOxGk4HbeHOzgET8C5+BR8Nr4Er8R34I14O34Ff4D3498IVIIBwY7gSWATxhEyCVMJJYRywjbCQcIp+Cz1EN4RiUQdohXRHT6LycRs4gziEuJ6YgPxOLGL+Ig4QCKR9Eh2JG9SFIlDKiCVkNaRdpFaSZdJPaQPSspKxkpOSkFKKUpCpWKlcqWdSseULis9VfpMVidbkD3JUWQeeTp5GXkruYV8kdxD/kzRoFhRvCnxlGzKPEoFpZ5yinKX8kZZWdlU2UM5RlmgPFe5QnmP8lnlB8ofVTRVbFVYKqkqEpWlKttVjqvcUnlDpVItqX7UFGoBdSm1lnqSep/6QZWm6qDKVuWpzlGtUm1Uvaz6Uo2sZqHGVJuoVqRWrrZf7aJanzpZ3VKdpc5Rn61epX5I/Yb6gAZNY4xGlEaexhKNnRrnNJ5pkjQtNQM1eZoLNLdontR8RMNoZjQWjUubT9tKO0Xr0SJqWWmxtbK1yrR2a3Vq9WtrartoJ2pP067SPqrdrYPpWOqwdXJ1luns07mu82mE4QjmCP6IxSPqR1we8V53pK6fLl+3VLdB95ruJz26XqBejt4KvSa9e/q4vq1+jP5U/Q36p/T7RmqN9BrJHVk6ct/I2waoga1BrMEMgy0GHQYDhkaGwYYiw3WGJw37jHSM/IyyjVYbHTPqNaYZ+xgLjFcbtxo/p2vTmfRcegW9nd5vYmASYiIx2WzSafLZ1Mo0wbTYtMH0nhnFjGGWYbbarM2s39zYPMJ8pnmd+W0LsgXDIstircUZi/eWVpZJlgstmyyfWelasa2KrOqs7lpTrX2tp1jXWF+1IdowbHJs1ttcskVtXW2zbKtsL9qhdm52Arv1dl2jCKM8RglH1Yy6Ya9iz7QvtK+zf+Cg4xDuUOzQ5PBytPnolNErRp8Z/c3R1THXcavjnTGaY0LHFI9pGfPaydaJ61TldNWZ6hzkPMe52fmVi50L32WDy01XmmuE60LXNtevbu5uYrd6t153c/c092r3GwwtRjRjCeOsB8HD32OOxxGPj55ungWe+zz/8rL3yvHa6fVsrNVY/titYx95m3pzvDd7d/vQfdJ8Nvl0+5r4cnxrfB/6mfnx/Lb5PWXaMLOZu5gv/R39xf4H/d+zPFmzWMcDsIDggNKAzkDNwITAysD7QaZBmUF1Qf3BrsEzgo+HEELCQlaE3GAbsrnsWnZ/qHvorND2MJWwuLDKsIfhtuHi8JYINCI0YlXE3UiLSGFkUxSIYketiroXbRU9JfpwDDEmOqYq5knsmNiZsWfiaHGT4nbGvYv3j18WfyfBOkGS0JaolpiaWJv4PikgaWVS97jR42aNu5CsnyxIbk4hpSSmbEsZGB84fs34nlTX1JLU6xOsJkybcG6i/sTciUcnqU3iTNqfRkhLStuZ9oUTxanhDKSz06vT+7ks7lruC54fbzWvl+/NX8l/muGdsTLjWaZ35qrM3izfrPKsPgFLUCl4lR2SvTH7fU5Uzvacwdyk3IY8pby0vENCTWGOsH2y0eRpk7tEdqISUfcUzylrpvSLw8Tb8pH8CfnNBVrwR75DYi35RfKg0KewqvDD1MSp+6dpTBNO65huO33x9KdFQUW/zcBncGe0zTSZOW/mg1nMWZtnI7PTZ7fNMZuzYE7P3OC5O+ZR5uXM+73YsXhl8dv5SfNbFhgumLvg0S/Bv9SVqJaIS24s9Fq4cRG+SLCoc7Hz4nWLv5XySs+XOZaVl31Zwl1y/tcxv1b8Org0Y2nnMrdlG5YTlwuXX1/hu2LHSo2VRSsfrYpY1biavrp09ds1k9acK3cp37iWslaytrsivKJ5nfm65eu+VGZVXqvyr2qoNqheXP1+PW/95Q1+G+o3Gm4s2/hpk2DTzc3BmxtrLGvKtxC3FG55sjVx65nfGL/VbtPfVrbt63bh9u4dsTvaa91ra3ca7FxWh9ZJ6np3pe66tDtgd3O9ff3mBp2Gsj1gj2TP871pe6/vC9vXtp+xv/6AxYHqg7SDpY1I4/TG/qaspu7m5OauQ6GH2lq8Wg4edji8/YjJkaqj2keXHaMcW3BssLWodeC46HjficwTj9omtd05Oe7k1faY9s5TYafOng46ffIM80zrWe+zR855njt0nnG+6YLbhcYO146Dv7v+frDTrbPxovvF5ksel1q6xnYdu+x7+cSVgCunr7KvXrgWea3resL1mzdSb3Tf5N18div31qvbhbc/35l7l3C39J76vfL7Bvdr/rD5o6Hbrfvog4AHHQ/jHt55xH304nH+4y89C55Qn5Q/NX5a+8zp2ZHeoN5Lz8c/73khevG5r+RPjT+rX1q/PPCX318d/eP6e16JXw2+XvJG7832ty5v2waiB+6/y3v3+X3pB70POz4yPp75lPTp6eepX0hfKr7afG35Fvbt7mDe4KCII+bIfgUwWNGMDABebweAmgwADZ7PKOPl5z9ZQeRnVhkC/wnLz4iy4gZAPfx/j+mDfzc3ANizFR6/oL5aKgDRVADiPQDq7Dxch85qsnOltBDhOWBT5Nf0vHTwb4r8zPlD3D+3QKrqAn5u/wWdZ3xtG7qP3QAAADhlWElmTU0AKgAAAAgAAYdpAAQAAAABAAAAGgAAAAAAAqACAAQAAAABAAAATqADAAQAAAABAAAATAAAAADhTXUdAAARnUlEQVR4Ae2c245bR3aGi4fulizFHgUzQAYIggBB5klymfeaZ8hDBYjvAiRxkMAGkowRWx7JktjcZL7vX1Uku62Burkl5YbV5q7Tqqq1/v3XqgMpL95tbvftEh6NwPLRLS4NgsAFuDOJcAHuAtyZCJzZ7MK4C3BnInBmswvjLsCdicCZzS6MOxO49Znt0uz3//CPbbv6srXFrq0W9Q6Wi0VbLPn4R8x/jSLiu3nrl8s9dcartlwtKdmTbm21XranN6v27Mm6XV8t25fP1+3Pn1+1r4if3Czbk+t9u1rR6f9jmAXc1P6sbaevQGbfdgGJeA8ke0AQsCYYgiYgPR1QyVO+3wvcMm2WO0G2PeWkX79btp839AG4//UjYC62gDsB2rI9f7pov3q2bX/9F1ftBWAufTufOcwCrnTtR90dOdHoNgCJeAbUkuM5TsWAW5W9gfkE83ZkUHg0oAyAwbm927a2ebVoP/xx2f7jD1uYuG9/89tF+/VXK1hq+88TZgG32O1g2r7tpRdBM8fUTM7pyR8SYddgxkJErUszHti7U44CpzyEo16syNtx+qgy+1og7RMetpev9+3rb3bt+c2u/ebFsv3uL1ftiqn+qcMs4HY7jNQpEfadNU5VqeHUTJkgUbaPDxRADdZ8jU9LHoJYnwLUtgWN4ObDC7Kdr8Hp7d9qMTW8gt23V1zyvPrD1H56e9t+99vr9uJLprBDfaIw69U4dQRCIw2JdVIjbUzecj+7qYyPpZHiAbDaJwsXyMhQEQ0pq6sAp7hMS2XGqykdA2iy4EUtF6v206ur9k/fbNo//+frtt2OaW/rjxtmAaeNGqihBY5xfVQzQEZfoSH0KHgkrbD/CX6vPIqlSTU61vVCovRSbEwbIS851vj23Q+tff3vu/bzu5I7tvs4qVnADTa5FCbNC86qCLN2E1MxKKroYB2pgSz2RLbbVcVkSJhOKxIDjGxn+nSuqes2JlKuG8fA/IzPXazbj68X7et/27UfX7GifORwOuSju47h/c3beKfRFO74CNA04YP0ZT2/YzERFGojc9pmDG47/wyDZwJjiX4wwJNer1dZPJbs5/xzK5Ppzp7SQZBszNy22U7tX7/dtFdvJrv8aGE2cDJLoPycBgHSgICJUQLo8nmUo6y7oH0S5Lu/FGhDQULCfIooATw3yyOQQ46eYVpYiaBMTFtAFPR307r9y3fbdvsRfd5Rg6HJI2Lt1qaAF6TEqoxWdVdYSHawezCvAHLjW7Jh2QGcUkDDT4Og2OfSFRVkxipcAJUZARC5FVRbeRpB1hVY6r25XQHexIZ96Hfa++PTs4Dbi8rQg7imWQG27/uEgCTCssk/WWg7GwJWwDQ36PceGzQ+x7jOtgNogkIIpsZiFMdXoEfOPUlh3l5ulu2/X6bJ7Mc84Bw+xgOKzJqM0VKm8WYlVMqt61gFKNtQKeZ6o7Ls/aqEeYooJXDIZ9uiT0uZ5UxPUJNlYdoAK62qHfM7unz3/bb9/Ha+v3u/tn3AD0XOrnxAZdpNYZILgoxyGk4BqMCbssq66dXv6RdFkiB6Rj2u3N1npiMw1dQjF4oJW/kzy6VdMRFA9Xd8VvhCLxCyYUYkvhHZb7+fotvdUR6XmwXcYI1DangAA6yspgBj/dRjp6L+RbmSPaaxuuMnGEeVAhBF4pSapAFG5gUo60rAHmpVtcz0sR2aBZW8NAB9+W7dXr9N0dmPmUcu10pWrq7kQQvBQXn1dUsgoM4ej12TtyBknG51PEMGOV2TLLVZ/GLvLMBYHsYJhg7fuMBx6tq3LFu7aBxxD9jKFiO7Thbwcv7n5dS+/ML0eWEWcBqoptk+mEQp2aTG+rbmBYA+D6MyMwMAdepKsX5QpnglFZyZ5k4tDYsI/Y1pF7CRq22HoHXgGEOwgodvgH79INnW3tlFIVVQvkBXg1dvF3z27fkTGzw+zALOPZluVoVkV4yLHoBB3VBJUNyo6uEWXAyIkruC2OQjbVeppxkm8+iti2mySsM1EPYGKBcEyul3LKTW1+pr+wLRstwP0J8a2K95Txf/+6q1ZzeUDEXt/oFhHnA4fJYCBtawYlWmlsrJBEHhP43bi9Rq1Z0ymlK3Z/QCRqA5YfaNLZJWEACn929eluXlUGO8CgMrHWYi441S2tsFebLRL5RWL0e0nL64SEEf2sjMR4ZZwA0Ddfziclz1eN8yDn1qAaHSq3G0FEQXjABDo51sJVNyGnA0QlAPL4LOApzMo0mY1sUFbQBj8xTzYhKrROYF5VGIftR1uW3+3uiWU8XnBw7l3HIYVG/P/djYgMZoyrTJrci0n2qPZVnNFV913viW6btGzsXBT6aW3VKmsauVTFOc2DxpP5YJYLBBeCUixE71IlGBR2EF+6OugHbP12Ddoj29HgIPj+cxDiPDFGINzB8sKhLh0Ui4gOgDI8deb8FiwYxlteWhLHWTlmOzhkxLAObPIkFqS8+bbG5BdgWiAmJTwXdqZ7oysktzdKC/BWMWiAJNpyP0ZPTMItRy7fTi2RB4eDwLuIkpCma1gob/Dsw7zcKAMf3txiCot8c42ZCDPu3WAqRMJAGEk4cACaLzSZsFRhAE9QoAtXcwTX92XDT0sxTQXJYHdDJin0KfVN8PmzNvnOYBx5XNlik4giumihb7tJ60ezgNhgXuXgRNttxunZYAj7uzbL3nUA67rm5KJWrJCyTfIVwBMh3bTkD8TqFYp6uv8RwrgJpAZmHHScqv0qWeKT48NujhAuELekyYBdz9gXJQ53DvDh3tU62xTtN8bQhzzE9OccAK8wA2ez2k3cNtN7wM/RZs9M5NkNZoee0H2rmhLr8miPV9roAZtN1RHV/gDb7EoUtXKeXjYXUBN0oeFs8CbrtlhZRGPZSSZNyI9gA+TBFkelFNWxgEgCtG3wDiFqEr5Jz6y/U1DAM4QLxi2l7DNhl3w/epNTUFWGbXC7HrMQMz7WUbf8AaDQ46DYXuxLoJX6CFRzvuiPyJzCzgZIoKyqgKAx1yAGPQUWfa+GoDsqwDJNnHLF9juSz0i5VrpvqSwmsQul5dtyfrfX1zL3i0WdHHSjaKVjf0T5k7ABtxlEHbwxusgjydAY8N84BjvAx5GLfMqBW0VJEZ+pwKskQnbpnFHPzpwWo/bzkGvX51296+bu1v/+qL9usXT9rTJ07Bzh9k9HEPsxNhwhh6xLXKo3fXWf3iMkrBBz9nAbflbHm6ONxhXp8/NW26lkSleIEV9FBVI+o6ihjmffPDt+3v/+5Z+82vnsZw/fyercweB2d7wzA8mfuPEknpXTnHvQsoPd1v/aD8LODw+AxbAw/QjnEfv69u5kz6dtOiW2R6YmW7vd0C3qK94wcjf/zxZ1bRXfvqGT6U3f2G/Z6AesqotgJX477PNVmTmxfiwTSS5irqz2ybEHD6PzbMAk7lS/0BxgkTqPAUYBiAkQpTLLdKxe1D4Lbsp968uW1vXk+ZrnpsN7yL1TbmbvCl4GcPPPStZWyNcM9s++9y92ruZu2CT21q7lZ9KDcLuC3WbmGG42uA30EISOVkFynt1BBialOliF/wZHqGTa1tOfq8fbMHPL6N2iBPW2d7HfxZdWnreiN49UL0dfhLR6tBSVVwNo+TQ1U5IsHvQU4Dcry7bGNOix+SngVcwAhYpZjTQxaNMABLLLtUFEAMEwi4kk63fGDbLTcVm82ubd7hNylzEXCa6SPdz2Vf5iUobe0jAFIq8+JHT8CjGeUjHFOj5E7MIO4THxvOaHIcwu2IOKiznyg89BTEXi6WssO8B36vkLa33Pv7/QRbEtm21c/BtIm9Yb4ho19PDg4g09aeucySdpzq3BfVx6WQqh7MkLOSkHLf2olEKni4n7xznh0VH4jnAYdy6hfVSZTvUmF54f2cU9d9XmlhvUyTlbkxIT0BWtgH4wRRgPMy7EFbAwi8ojzbNyqtH/7coWxnUHyE+rmYjbs3NCnqdwIbbM/GZ4RZwDleVskO3viSBhWjSu2Pxj7JU4bsqrzTU5YZQ7xKu73Bb8bAbo+s28NStxEyb8e+K1UAKXhOVivK7x0RUANf3zEw/smJpsr37cad9RlhFnCbzQYwfN36I+5qwxgVwRA/vOHxlneeMiaux9lymN5tTTttkZN5mbZwCYsLM550taA+zJM5gsdHsGSdQTbngN7ZlC/JrRhXIcorRJvVcp2pnjzdy+0nnErOCbOAE5x8d4oVCy4xMSFGetjfgWJ3MQFHdomxZbUwwC4B84YlzBNojUEmxmqO1tVC4VcVopUzKuXK+XArUeDVTyq85wv7xKqHsel1dfIUkl8zUXcFm8eUH7IPjWcBp8J5mYxWcWmbclhlyEIAMJm2HbSwDCHZGD9IuR1UH4MhaZ4HOAIQIJOrIxfjxOFRUMNQq8wI9EH5WNVJdcEje22ofxs3K6PlQ+OZwA2ghrFSKhiEVSqh/5JJcfodKBnntLac7wb5CKLpAs+0RguYuAhoNh2CRV1dTVFhqWhRn/u+tOsMtTph6JhOkAWsQDz1K3NHeHyYBZyK70BG5oy3SyqGumoaAhr1Aiggnm8FzXr3cQWSq++p8seM10v6LW9Elgh5kyGINXMdi1xspw2LRHwqMjJTV2KdU9c2eQ1SkXDDHL2aYf2MprVp1dFrtcBlAWB/sNuxMoJIzEfRqhMk04qXfM0n8yVDaa/DRLp1GuGSKhNz65ZEOQUSdyD0Y/adRSojsxjoz2jnNFdN3l/S+sUvnqbDsx+zgCvQMJzhPaCrlouCLBvbA43x68DhsAc7DxpTr0y39VAMBCfpSlpSUMggzRe8X4bIAWRYJqVJj6t7feMV/9Bkfeb+bYw2Czg78S3GwWtEQEPRWFMMEDAZhVTiMaWLnZZRxSexfaStPR9DAXbMj5Qs479Dm8PqqYCNEpUTVAe/GpLC3vH16hI64zkLuB1XQVsdFkED8ps40oLjj2sMAdbFwGlKRjbW6UHAFZaRJVegIpeWVafZhQ4yHahUm+5VyfOwXYFHTX8DKUNSn+fCcsN3qOd8AT3GGPEs4EYnxho9YlOnU1WTUj98GbLKWCawI5wk71DiBMoh+qjYfgXUc+nNlW+rXuqjOrknPAs4sRoHcvvNguDZNEChYOoBUUZ175z9nMBZnQ6cnncgS7uDnt3BJ49Y8axqPYLZ0gVEb2DaICyHtOUM5t2eP7AJexWaGWYBVzcdsqneoAAViyzzo3ZsC1Jeq2qBKVhlkIxDsuSRrSY6/6S6eaaFjD+B4BGmMo9X9M06kcAdMq0qU5eT+lBBc8+GqaVmCc989iHP6yVvOcr4qE8ZLijVZ8VleC/5xWDWFmN6ow6aIKX75EfdL5rfKxBJgAcwwV/zeXrFjyqqo3uy52dnMa5oU4O7svo7YMNgWrFKdsk6WBXmmS82HuKsuADjHZFGi5iBIv+9qnn/qt+qSh3JTFNjPvWDiqpnA0SexYB/ijm6q5qP85wFnIZrXQHgillpVesHh9QVaAWWAJccfo/VNrOcbmrbYn/vCR9gy2m1aUH2WOa/rv4UoKnhPODowC2Gx6jQo4Nox4ZinDL392ssIHFSZWa1rTZJD/wSy0Kn34eDpwZvP1w96+dmH25zrsQs4KSLP4GAawWSjhnFZZQFmUZxOZSTj/ne2yUhIHCjRIlFKcIU0x852RjZTGGlDdaQrkxk7MPrJr/gzg17r4vgJ3rMAk4/wmQDE7wJhg+fFV1xaMGiMqnXaFc5jd4FjCCIRAEmAO5aPE7lzsw0ZelHYJB0PCWscErqOJcsrbllGmhmzE/7mAXcPof544Wlqg6wTuORtvKQzjV2gVC+shaNMhc24v8iIloGmS3ogc7bD9sS884Oi0kEP89jFnDX++/hCtPVtT7kwaxOkZpmxQ/L9vgdj1r+NCtAwQ6/A9DXMXnBqZgoHDdXP7Wna/Id6PRCum7DiREqcg1UPw9Yp6MsLv/HwlM4Hp7WQ1/CGQhcgDsDNJtcgLsAdyYCZza7MO4C3JkInNnswrgLcGcicGazC+POBO7/AH5zPa/ivytzAAAAAElFTkSuQmCC" - ), - ] - ), - ], - model_parameters={"temperature": 0.0, "max_tokens": 100}, - stream=False, - user="abc-123", - ) - - assert isinstance(result, LLMResult) - assert len(result.message.content) > 0 - - -@pytest.mark.parametrize("setup_openai_mock", [["chat"]], indirect=True) -def test_invoke_chat_model_with_tools(setup_openai_mock): - model = OpenAILargeLanguageModel() - - result = model.invoke( - model="gpt-3.5-turbo", - credentials={"openai_api_key": os.environ.get("OPENAI_API_KEY")}, - prompt_messages=[ - SystemPromptMessage( - content="You are a helpful AI assistant.", - ), - UserPromptMessage( - content="what's the weather today in London?", - ), - ], - model_parameters={"temperature": 0.0, "max_tokens": 100}, - tools=[ - PromptMessageTool( - name="get_weather", - description="Determine weather in my location", - parameters={ - "type": "object", - "properties": { - "location": {"type": "string", "description": "The city and state e.g. San Francisco, CA"}, - "unit": {"type": "string", "enum": ["c", "f"]}, - }, - "required": ["location"], - }, - ), - PromptMessageTool( - name="get_stock_price", - description="Get the current stock price", - parameters={ - "type": "object", - "properties": {"symbol": {"type": "string", "description": "The stock symbol"}}, - "required": ["symbol"], - }, - ), - ], - stream=False, - user="abc-123", - ) - - assert isinstance(result, LLMResult) - assert isinstance(result.message, AssistantPromptMessage) - assert len(result.message.tool_calls) > 0 - - -@pytest.mark.parametrize("setup_openai_mock", [["chat"]], indirect=True) -def test_invoke_stream_chat_model(setup_openai_mock): - model = OpenAILargeLanguageModel() - - result = model.invoke( - model="gpt-3.5-turbo", - credentials={"openai_api_key": os.environ.get("OPENAI_API_KEY")}, - prompt_messages=[ - SystemPromptMessage( - content="You are a helpful AI assistant.", - ), - UserPromptMessage(content="Hello World!"), - ], - model_parameters={"temperature": 0.0, "max_tokens": 100}, - stream=True, - user="abc-123", - ) - - assert isinstance(result, Generator) - - for chunk in result: - assert isinstance(chunk, LLMResultChunk) - assert isinstance(chunk.delta, LLMResultChunkDelta) - assert isinstance(chunk.delta.message, AssistantPromptMessage) - assert len(chunk.delta.message.content) > 0 if chunk.delta.finish_reason is None else True - if chunk.delta.finish_reason is not None: - assert chunk.delta.usage is not None - assert chunk.delta.usage.completion_tokens > 0 - - -def test_get_num_tokens(): - model = OpenAILargeLanguageModel() - - num_tokens = model.get_num_tokens( - model="gpt-3.5-turbo-instruct", - credentials={"openai_api_key": os.environ.get("OPENAI_API_KEY")}, - prompt_messages=[UserPromptMessage(content="Hello World!")], - ) - - assert num_tokens == 3 - - num_tokens = model.get_num_tokens( - model="gpt-3.5-turbo", - credentials={"openai_api_key": os.environ.get("OPENAI_API_KEY")}, - prompt_messages=[ - SystemPromptMessage( - content="You are a helpful AI assistant.", - ), - UserPromptMessage(content="Hello World!"), - ], - tools=[ - PromptMessageTool( - name="get_weather", - description="Determine weather in my location", - parameters={ - "type": "object", - "properties": { - "location": {"type": "string", "description": "The city and state e.g. San Francisco, CA"}, - "unit": {"type": "string", "enum": ["c", "f"]}, - }, - "required": ["location"], - }, - ), - ], - ) - - assert num_tokens == 72 - - -@pytest.mark.parametrize("setup_openai_mock", [["chat", "remote"]], indirect=True) -def test_fine_tuned_models(setup_openai_mock): - model = OpenAILargeLanguageModel() - - remote_models = model.remote_models(credentials={"openai_api_key": os.environ.get("OPENAI_API_KEY")}) - - if not remote_models: - assert isinstance(remote_models, list) - else: - assert isinstance(remote_models[0], AIModelEntity) - - for llm_model in remote_models: - if llm_model.model_type == ModelType.LLM: - break - - assert isinstance(llm_model, AIModelEntity) - - # test invoke - result = model.invoke( - model=llm_model.model, - credentials={"openai_api_key": os.environ.get("OPENAI_API_KEY")}, - prompt_messages=[ - SystemPromptMessage( - content="You are a helpful AI assistant.", - ), - UserPromptMessage(content="Hello World!"), - ], - model_parameters={"temperature": 0.0, "max_tokens": 100}, - stream=False, - user="abc-123", - ) - - assert isinstance(result, LLMResult) - - -def test__get_num_tokens_by_gpt2(): - model = OpenAILargeLanguageModel() - num_tokens = model._get_num_tokens_by_gpt2("Hello World!") - - assert num_tokens == 3 diff --git a/api/tests/integration_tests/model_runtime/openai/test_provider.py b/api/tests/integration_tests/model_runtime/openai/test_provider.py deleted file mode 100644 index 4d56cfcf3c25f0..00000000000000 --- a/api/tests/integration_tests/model_runtime/openai/test_provider.py +++ /dev/null @@ -1,17 +0,0 @@ -import os - -import pytest - -from core.model_runtime.errors.validate import CredentialsValidateFailedError -from core.model_runtime.model_providers.openai.openai import OpenAIProvider -from tests.integration_tests.model_runtime.__mock.openai import setup_openai_mock - - -@pytest.mark.parametrize("setup_openai_mock", [["chat"]], indirect=True) -def test_validate_provider_credentials(setup_openai_mock): - provider = OpenAIProvider() - - with pytest.raises(CredentialsValidateFailedError): - provider.validate_provider_credentials(credentials={}) - - provider.validate_provider_credentials(credentials={"openai_api_key": os.environ.get("OPENAI_API_KEY")}) diff --git a/api/tests/integration_tests/model_runtime/openai/test_speech2text.py b/api/tests/integration_tests/model_runtime/openai/test_speech2text.py deleted file mode 100644 index aa92c8b61fb684..00000000000000 --- a/api/tests/integration_tests/model_runtime/openai/test_speech2text.py +++ /dev/null @@ -1,45 +0,0 @@ -import os - -import pytest - -from core.model_runtime.errors.validate import CredentialsValidateFailedError -from core.model_runtime.model_providers.openai.speech2text.speech2text import OpenAISpeech2TextModel -from tests.integration_tests.model_runtime.__mock.openai import setup_openai_mock - - -@pytest.mark.parametrize("setup_openai_mock", [["speech2text"]], indirect=True) -def test_validate_credentials(setup_openai_mock): - model = OpenAISpeech2TextModel() - - with pytest.raises(CredentialsValidateFailedError): - model.validate_credentials(model="whisper-1", credentials={"openai_api_key": "invalid_key"}) - - model.validate_credentials(model="whisper-1", credentials={"openai_api_key": os.environ.get("OPENAI_API_KEY")}) - - -@pytest.mark.parametrize("setup_openai_mock", [["speech2text"]], indirect=True) -def test_invoke_model(setup_openai_mock): - model = OpenAISpeech2TextModel() - - # Get the directory of the current file - current_dir = os.path.dirname(os.path.abspath(__file__)) - - # Get assets directory - assets_dir = os.path.join(os.path.dirname(current_dir), "assets") - - # Construct the path to the audio file - audio_file_path = os.path.join(assets_dir, "audio.mp3") - - # Open the file and get the file object - with open(audio_file_path, "rb") as audio_file: - file = audio_file - - result = model.invoke( - model="whisper-1", - credentials={"openai_api_key": os.environ.get("OPENAI_API_KEY")}, - file=file, - user="abc-123", - ) - - assert isinstance(result, str) - assert result == "1, 2, 3, 4, 5, 6, 7, 8, 9, 10" diff --git a/api/tests/integration_tests/model_runtime/openai/test_text_embedding.py b/api/tests/integration_tests/model_runtime/openai/test_text_embedding.py deleted file mode 100644 index f5dd73f2d4cd60..00000000000000 --- a/api/tests/integration_tests/model_runtime/openai/test_text_embedding.py +++ /dev/null @@ -1,48 +0,0 @@ -import os - -import pytest - -from core.model_runtime.entities.text_embedding_entities import TextEmbeddingResult -from core.model_runtime.errors.validate import CredentialsValidateFailedError -from core.model_runtime.model_providers.openai.text_embedding.text_embedding import OpenAITextEmbeddingModel -from tests.integration_tests.model_runtime.__mock.openai import setup_openai_mock - - -@pytest.mark.parametrize("setup_openai_mock", [["text_embedding"]], indirect=True) -def test_validate_credentials(setup_openai_mock): - model = OpenAITextEmbeddingModel() - - with pytest.raises(CredentialsValidateFailedError): - model.validate_credentials(model="text-embedding-ada-002", credentials={"openai_api_key": "invalid_key"}) - - model.validate_credentials( - model="text-embedding-ada-002", credentials={"openai_api_key": os.environ.get("OPENAI_API_KEY")} - ) - - -@pytest.mark.parametrize("setup_openai_mock", [["text_embedding"]], indirect=True) -def test_invoke_model(setup_openai_mock): - model = OpenAITextEmbeddingModel() - - result = model.invoke( - model="text-embedding-ada-002", - credentials={"openai_api_key": os.environ.get("OPENAI_API_KEY"), "openai_api_base": "https://api.openai.com"}, - texts=["hello", "world", " ".join(["long_text"] * 100), " ".join(["another_long_text"] * 100)], - user="abc-123", - ) - - assert isinstance(result, TextEmbeddingResult) - assert len(result.embeddings) == 4 - assert result.usage.total_tokens == 2 - - -def test_get_num_tokens(): - model = OpenAITextEmbeddingModel() - - num_tokens = model.get_num_tokens( - model="text-embedding-ada-002", - credentials={"openai_api_key": os.environ.get("OPENAI_API_KEY"), "openai_api_base": "https://api.openai.com"}, - texts=["hello", "world"], - ) - - assert num_tokens == 2 diff --git a/api/tests/integration_tests/model_runtime/openai_api_compatible/__init__.py b/api/tests/integration_tests/model_runtime/openai_api_compatible/__init__.py deleted file mode 100644 index e69de29bb2d1d6..00000000000000 diff --git a/api/tests/integration_tests/model_runtime/openai_api_compatible/test_llm.py b/api/tests/integration_tests/model_runtime/openai_api_compatible/test_llm.py deleted file mode 100644 index f2302ef05e06de..00000000000000 --- a/api/tests/integration_tests/model_runtime/openai_api_compatible/test_llm.py +++ /dev/null @@ -1,197 +0,0 @@ -import os -from collections.abc import Generator - -import pytest - -from core.model_runtime.entities.llm_entities import LLMResult, LLMResultChunk, LLMResultChunkDelta -from core.model_runtime.entities.message_entities import ( - AssistantPromptMessage, - PromptMessageTool, - SystemPromptMessage, - UserPromptMessage, -) -from core.model_runtime.errors.validate import CredentialsValidateFailedError -from core.model_runtime.model_providers.openai_api_compatible.llm.llm import OAIAPICompatLargeLanguageModel - -""" -Using Together.ai's OpenAI-compatible API as testing endpoint -""" - - -def test_validate_credentials(): - model = OAIAPICompatLargeLanguageModel() - - with pytest.raises(CredentialsValidateFailedError): - model.validate_credentials( - model="mistralai/Mixtral-8x7B-Instruct-v0.1", - credentials={"api_key": "invalid_key", "endpoint_url": "https://api.together.xyz/v1/", "mode": "chat"}, - ) - - model.validate_credentials( - model="mistralai/Mixtral-8x7B-Instruct-v0.1", - credentials={ - "api_key": os.environ.get("TOGETHER_API_KEY"), - "endpoint_url": "https://api.together.xyz/v1/", - "mode": "chat", - }, - ) - - -def test_invoke_model(): - model = OAIAPICompatLargeLanguageModel() - - response = model.invoke( - model="mistralai/Mixtral-8x7B-Instruct-v0.1", - credentials={ - "api_key": os.environ.get("TOGETHER_API_KEY"), - "endpoint_url": "https://api.together.xyz/v1/", - "mode": "completion", - }, - prompt_messages=[ - SystemPromptMessage( - content="You are a helpful AI assistant.", - ), - UserPromptMessage(content="Who are you?"), - ], - model_parameters={ - "temperature": 1.0, - "top_k": 2, - "top_p": 0.5, - }, - stop=["How"], - stream=False, - user="abc-123", - ) - - assert isinstance(response, LLMResult) - assert len(response.message.content) > 0 - - -def test_invoke_stream_model(): - model = OAIAPICompatLargeLanguageModel() - - response = model.invoke( - model="mistralai/Mixtral-8x7B-Instruct-v0.1", - credentials={ - "api_key": os.environ.get("TOGETHER_API_KEY"), - "endpoint_url": "https://api.together.xyz/v1/", - "mode": "chat", - "stream_mode_delimiter": "\\n\\n", - }, - prompt_messages=[ - SystemPromptMessage( - content="You are a helpful AI assistant.", - ), - UserPromptMessage(content="Who are you?"), - ], - model_parameters={ - "temperature": 1.0, - "top_k": 2, - "top_p": 0.5, - }, - stop=["How"], - stream=True, - user="abc-123", - ) - - assert isinstance(response, Generator) - - for chunk in response: - assert isinstance(chunk, LLMResultChunk) - assert isinstance(chunk.delta, LLMResultChunkDelta) - assert isinstance(chunk.delta.message, AssistantPromptMessage) - - -def test_invoke_stream_model_without_delimiter(): - model = OAIAPICompatLargeLanguageModel() - - response = model.invoke( - model="mistralai/Mixtral-8x7B-Instruct-v0.1", - credentials={ - "api_key": os.environ.get("TOGETHER_API_KEY"), - "endpoint_url": "https://api.together.xyz/v1/", - "mode": "chat", - }, - prompt_messages=[ - SystemPromptMessage( - content="You are a helpful AI assistant.", - ), - UserPromptMessage(content="Who are you?"), - ], - model_parameters={ - "temperature": 1.0, - "top_k": 2, - "top_p": 0.5, - }, - stop=["How"], - stream=True, - user="abc-123", - ) - - assert isinstance(response, Generator) - - for chunk in response: - assert isinstance(chunk, LLMResultChunk) - assert isinstance(chunk.delta, LLMResultChunkDelta) - assert isinstance(chunk.delta.message, AssistantPromptMessage) - - -# using OpenAI's ChatGPT-3.5 as testing endpoint -def test_invoke_chat_model_with_tools(): - model = OAIAPICompatLargeLanguageModel() - - result = model.invoke( - model="gpt-3.5-turbo", - credentials={ - "api_key": os.environ.get("OPENAI_API_KEY"), - "endpoint_url": "https://api.openai.com/v1/", - "mode": "chat", - }, - prompt_messages=[ - SystemPromptMessage( - content="You are a helpful AI assistant.", - ), - UserPromptMessage( - content="what's the weather today in London?", - ), - ], - tools=[ - PromptMessageTool( - name="get_weather", - description="Determine weather in my location", - parameters={ - "type": "object", - "properties": { - "location": {"type": "string", "description": "The city and state e.g. San Francisco, CA"}, - "unit": {"type": "string", "enum": ["celsius", "fahrenheit"]}, - }, - "required": ["location"], - }, - ), - ], - model_parameters={"temperature": 0.0, "max_tokens": 1024}, - stream=False, - user="abc-123", - ) - - assert isinstance(result, LLMResult) - assert isinstance(result.message, AssistantPromptMessage) - assert len(result.message.tool_calls) > 0 - - -def test_get_num_tokens(): - model = OAIAPICompatLargeLanguageModel() - - num_tokens = model.get_num_tokens( - model="mistralai/Mixtral-8x7B-Instruct-v0.1", - credentials={"api_key": os.environ.get("OPENAI_API_KEY"), "endpoint_url": "https://api.openai.com/v1/"}, - prompt_messages=[ - SystemPromptMessage( - content="You are a helpful AI assistant.", - ), - UserPromptMessage(content="Hello World!"), - ], - ) - - assert isinstance(num_tokens, int) - assert num_tokens == 21 diff --git a/api/tests/integration_tests/model_runtime/openai_api_compatible/test_speech2text.py b/api/tests/integration_tests/model_runtime/openai_api_compatible/test_speech2text.py deleted file mode 100644 index cf805eafff4968..00000000000000 --- a/api/tests/integration_tests/model_runtime/openai_api_compatible/test_speech2text.py +++ /dev/null @@ -1,50 +0,0 @@ -import os - -import pytest - -from core.model_runtime.errors.validate import CredentialsValidateFailedError -from core.model_runtime.model_providers.openai_api_compatible.speech2text.speech2text import ( - OAICompatSpeech2TextModel, -) - - -def test_validate_credentials(): - model = OAICompatSpeech2TextModel() - - with pytest.raises(CredentialsValidateFailedError): - model.validate_credentials( - model="whisper-1", - credentials={"api_key": "invalid_key", "endpoint_url": "https://api.openai.com/v1/"}, - ) - - model.validate_credentials( - model="whisper-1", - credentials={"api_key": os.environ.get("OPENAI_API_KEY"), "endpoint_url": "https://api.openai.com/v1/"}, - ) - - -def test_invoke_model(): - model = OAICompatSpeech2TextModel() - - # Get the directory of the current file - current_dir = os.path.dirname(os.path.abspath(__file__)) - - # Get assets directory - assets_dir = os.path.join(os.path.dirname(current_dir), "assets") - - # Construct the path to the audio file - audio_file_path = os.path.join(assets_dir, "audio.mp3") - - # Open the file and get the file object - with open(audio_file_path, "rb") as audio_file: - file = audio_file - - result = model.invoke( - model="whisper-1", - credentials={"api_key": os.environ.get("OPENAI_API_KEY"), "endpoint_url": "https://api.openai.com/v1/"}, - file=file, - user="abc-123", - ) - - assert isinstance(result, str) - assert result == "1, 2, 3, 4, 5, 6, 7, 8, 9, 10" diff --git a/api/tests/integration_tests/model_runtime/openai_api_compatible/test_text_embedding.py b/api/tests/integration_tests/model_runtime/openai_api_compatible/test_text_embedding.py deleted file mode 100644 index 052b41605f6da2..00000000000000 --- a/api/tests/integration_tests/model_runtime/openai_api_compatible/test_text_embedding.py +++ /dev/null @@ -1,67 +0,0 @@ -import os - -import pytest - -from core.model_runtime.entities.text_embedding_entities import TextEmbeddingResult -from core.model_runtime.errors.validate import CredentialsValidateFailedError -from core.model_runtime.model_providers.openai_api_compatible.text_embedding.text_embedding import ( - OAICompatEmbeddingModel, -) - -""" -Using OpenAI's API as testing endpoint -""" - - -def test_validate_credentials(): - model = OAICompatEmbeddingModel() - - with pytest.raises(CredentialsValidateFailedError): - model.validate_credentials( - model="text-embedding-ada-002", - credentials={"api_key": "invalid_key", "endpoint_url": "https://api.openai.com/v1/", "context_size": 8184}, - ) - - model.validate_credentials( - model="text-embedding-ada-002", - credentials={ - "api_key": os.environ.get("OPENAI_API_KEY"), - "endpoint_url": "https://api.openai.com/v1/", - "context_size": 8184, - }, - ) - - -def test_invoke_model(): - model = OAICompatEmbeddingModel() - - result = model.invoke( - model="text-embedding-ada-002", - credentials={ - "api_key": os.environ.get("OPENAI_API_KEY"), - "endpoint_url": "https://api.openai.com/v1/", - "context_size": 8184, - }, - texts=["hello", "world", " ".join(["long_text"] * 100), " ".join(["another_long_text"] * 100)], - user="abc-123", - ) - - assert isinstance(result, TextEmbeddingResult) - assert len(result.embeddings) == 4 - assert result.usage.total_tokens == 502 - - -def test_get_num_tokens(): - model = OAICompatEmbeddingModel() - - num_tokens = model.get_num_tokens( - model="text-embedding-ada-002", - credentials={ - "api_key": os.environ.get("OPENAI_API_KEY"), - "endpoint_url": "https://api.openai.com/v1/embeddings", - "context_size": 8184, - }, - texts=["hello", "world"], - ) - - assert num_tokens == 2 diff --git a/api/tests/integration_tests/model_runtime/openllm/__init__.py b/api/tests/integration_tests/model_runtime/openllm/__init__.py deleted file mode 100644 index e69de29bb2d1d6..00000000000000 diff --git a/api/tests/integration_tests/model_runtime/openllm/test_embedding.py b/api/tests/integration_tests/model_runtime/openllm/test_embedding.py deleted file mode 100644 index 14d47217af62c8..00000000000000 --- a/api/tests/integration_tests/model_runtime/openllm/test_embedding.py +++ /dev/null @@ -1,57 +0,0 @@ -import os - -import pytest - -from core.model_runtime.entities.text_embedding_entities import TextEmbeddingResult -from core.model_runtime.errors.validate import CredentialsValidateFailedError -from core.model_runtime.model_providers.openllm.text_embedding.text_embedding import OpenLLMTextEmbeddingModel - - -def test_validate_credentials(): - model = OpenLLMTextEmbeddingModel() - - with pytest.raises(CredentialsValidateFailedError): - model.validate_credentials( - model="NOT IMPORTANT", - credentials={ - "server_url": "ww" + os.environ.get("OPENLLM_SERVER_URL"), - }, - ) - - model.validate_credentials( - model="NOT IMPORTANT", - credentials={ - "server_url": os.environ.get("OPENLLM_SERVER_URL"), - }, - ) - - -def test_invoke_model(): - model = OpenLLMTextEmbeddingModel() - - result = model.invoke( - model="NOT IMPORTANT", - credentials={ - "server_url": os.environ.get("OPENLLM_SERVER_URL"), - }, - texts=["hello", "world"], - user="abc-123", - ) - - assert isinstance(result, TextEmbeddingResult) - assert len(result.embeddings) == 2 - assert result.usage.total_tokens > 0 - - -def test_get_num_tokens(): - model = OpenLLMTextEmbeddingModel() - - num_tokens = model.get_num_tokens( - model="NOT IMPORTANT", - credentials={ - "server_url": os.environ.get("OPENLLM_SERVER_URL"), - }, - texts=["hello", "world"], - ) - - assert num_tokens == 2 diff --git a/api/tests/integration_tests/model_runtime/openllm/test_llm.py b/api/tests/integration_tests/model_runtime/openllm/test_llm.py deleted file mode 100644 index 35939e3cfe8bfd..00000000000000 --- a/api/tests/integration_tests/model_runtime/openllm/test_llm.py +++ /dev/null @@ -1,95 +0,0 @@ -import os -from collections.abc import Generator - -import pytest - -from core.model_runtime.entities.llm_entities import LLMResult, LLMResultChunk, LLMResultChunkDelta -from core.model_runtime.entities.message_entities import AssistantPromptMessage, UserPromptMessage -from core.model_runtime.errors.validate import CredentialsValidateFailedError -from core.model_runtime.model_providers.openllm.llm.llm import OpenLLMLargeLanguageModel - - -def test_validate_credentials_for_chat_model(): - model = OpenLLMLargeLanguageModel() - - with pytest.raises(CredentialsValidateFailedError): - model.validate_credentials( - model="NOT IMPORTANT", - credentials={ - "server_url": "invalid_key", - }, - ) - - model.validate_credentials( - model="NOT IMPORTANT", - credentials={ - "server_url": os.environ.get("OPENLLM_SERVER_URL"), - }, - ) - - -def test_invoke_model(): - model = OpenLLMLargeLanguageModel() - - response = model.invoke( - model="NOT IMPORTANT", - credentials={ - "server_url": os.environ.get("OPENLLM_SERVER_URL"), - }, - prompt_messages=[UserPromptMessage(content="Hello World!")], - model_parameters={ - "temperature": 0.7, - "top_p": 1.0, - "top_k": 1, - }, - stop=["you"], - user="abc-123", - stream=False, - ) - - assert isinstance(response, LLMResult) - assert len(response.message.content) > 0 - assert response.usage.total_tokens > 0 - - -def test_invoke_stream_model(): - model = OpenLLMLargeLanguageModel() - - response = model.invoke( - model="NOT IMPORTANT", - credentials={ - "server_url": os.environ.get("OPENLLM_SERVER_URL"), - }, - prompt_messages=[UserPromptMessage(content="Hello World!")], - model_parameters={ - "temperature": 0.7, - "top_p": 1.0, - "top_k": 1, - }, - stop=["you"], - stream=True, - user="abc-123", - ) - - assert isinstance(response, Generator) - for chunk in response: - assert isinstance(chunk, LLMResultChunk) - assert isinstance(chunk.delta, LLMResultChunkDelta) - assert isinstance(chunk.delta.message, AssistantPromptMessage) - assert len(chunk.delta.message.content) > 0 if chunk.delta.finish_reason is None else True - - -def test_get_num_tokens(): - model = OpenLLMLargeLanguageModel() - - response = model.get_num_tokens( - model="NOT IMPORTANT", - credentials={ - "server_url": os.environ.get("OPENLLM_SERVER_URL"), - }, - prompt_messages=[UserPromptMessage(content="Hello World!")], - tools=[], - ) - - assert isinstance(response, int) - assert response == 3 diff --git a/api/tests/integration_tests/model_runtime/openrouter/__init__.py b/api/tests/integration_tests/model_runtime/openrouter/__init__.py deleted file mode 100644 index e69de29bb2d1d6..00000000000000 diff --git a/api/tests/integration_tests/model_runtime/openrouter/test_llm.py b/api/tests/integration_tests/model_runtime/openrouter/test_llm.py deleted file mode 100644 index ce4876a73a740e..00000000000000 --- a/api/tests/integration_tests/model_runtime/openrouter/test_llm.py +++ /dev/null @@ -1,104 +0,0 @@ -import os -from collections.abc import Generator - -import pytest - -from core.model_runtime.entities.llm_entities import LLMResult, LLMResultChunk, LLMResultChunkDelta -from core.model_runtime.entities.message_entities import ( - AssistantPromptMessage, - PromptMessageTool, - SystemPromptMessage, - UserPromptMessage, -) -from core.model_runtime.errors.validate import CredentialsValidateFailedError -from core.model_runtime.model_providers.openrouter.llm.llm import OpenRouterLargeLanguageModel - - -def test_validate_credentials(): - model = OpenRouterLargeLanguageModel() - - with pytest.raises(CredentialsValidateFailedError): - model.validate_credentials( - model="mistralai/mixtral-8x7b-instruct", credentials={"api_key": "invalid_key", "mode": "chat"} - ) - - model.validate_credentials( - model="mistralai/mixtral-8x7b-instruct", - credentials={"api_key": os.environ.get("TOGETHER_API_KEY"), "mode": "chat"}, - ) - - -def test_invoke_model(): - model = OpenRouterLargeLanguageModel() - - response = model.invoke( - model="mistralai/mixtral-8x7b-instruct", - credentials={"api_key": os.environ.get("TOGETHER_API_KEY"), "mode": "completion"}, - prompt_messages=[ - SystemPromptMessage( - content="You are a helpful AI assistant.", - ), - UserPromptMessage(content="Who are you?"), - ], - model_parameters={ - "temperature": 1.0, - "top_k": 2, - "top_p": 0.5, - }, - stop=["How"], - stream=False, - user="abc-123", - ) - - assert isinstance(response, LLMResult) - assert len(response.message.content) > 0 - - -def test_invoke_stream_model(): - model = OpenRouterLargeLanguageModel() - - response = model.invoke( - model="mistralai/mixtral-8x7b-instruct", - credentials={"api_key": os.environ.get("TOGETHER_API_KEY"), "mode": "chat"}, - prompt_messages=[ - SystemPromptMessage( - content="You are a helpful AI assistant.", - ), - UserPromptMessage(content="Who are you?"), - ], - model_parameters={ - "temperature": 1.0, - "top_k": 2, - "top_p": 0.5, - }, - stop=["How"], - stream=True, - user="abc-123", - ) - - assert isinstance(response, Generator) - - for chunk in response: - assert isinstance(chunk, LLMResultChunk) - assert isinstance(chunk.delta, LLMResultChunkDelta) - assert isinstance(chunk.delta.message, AssistantPromptMessage) - - -def test_get_num_tokens(): - model = OpenRouterLargeLanguageModel() - - num_tokens = model.get_num_tokens( - model="mistralai/mixtral-8x7b-instruct", - credentials={ - "api_key": os.environ.get("TOGETHER_API_KEY"), - }, - prompt_messages=[ - SystemPromptMessage( - content="You are a helpful AI assistant.", - ), - UserPromptMessage(content="Hello World!"), - ], - ) - - assert isinstance(num_tokens, int) - assert num_tokens == 21 diff --git a/api/tests/integration_tests/model_runtime/replicate/__init__.py b/api/tests/integration_tests/model_runtime/replicate/__init__.py deleted file mode 100644 index e69de29bb2d1d6..00000000000000 diff --git a/api/tests/integration_tests/model_runtime/replicate/test_llm.py b/api/tests/integration_tests/model_runtime/replicate/test_llm.py deleted file mode 100644 index b940005b715760..00000000000000 --- a/api/tests/integration_tests/model_runtime/replicate/test_llm.py +++ /dev/null @@ -1,112 +0,0 @@ -import os -from collections.abc import Generator - -import pytest - -from core.model_runtime.entities.llm_entities import LLMResult, LLMResultChunk, LLMResultChunkDelta -from core.model_runtime.entities.message_entities import AssistantPromptMessage, SystemPromptMessage, UserPromptMessage -from core.model_runtime.errors.validate import CredentialsValidateFailedError -from core.model_runtime.model_providers.replicate.llm.llm import ReplicateLargeLanguageModel - - -def test_validate_credentials(): - model = ReplicateLargeLanguageModel() - - with pytest.raises(CredentialsValidateFailedError): - model.validate_credentials( - model="meta/llama-2-13b-chat", - credentials={ - "replicate_api_token": "invalid_key", - "model_version": "f4e2de70d66816a838a89eeeb621910adffb0dd0baba3976c96980970978018d", - }, - ) - - model.validate_credentials( - model="meta/llama-2-13b-chat", - credentials={ - "replicate_api_token": os.environ.get("REPLICATE_API_KEY"), - "model_version": "f4e2de70d66816a838a89eeeb621910adffb0dd0baba3976c96980970978018d", - }, - ) - - -def test_invoke_model(): - model = ReplicateLargeLanguageModel() - - response = model.invoke( - model="meta/llama-2-13b-chat", - credentials={ - "replicate_api_token": os.environ.get("REPLICATE_API_KEY"), - "model_version": "f4e2de70d66816a838a89eeeb621910adffb0dd0baba3976c96980970978018d", - }, - prompt_messages=[ - SystemPromptMessage( - content="You are a helpful AI assistant.", - ), - UserPromptMessage(content="Who are you?"), - ], - model_parameters={ - "temperature": 1.0, - "top_k": 2, - "top_p": 0.5, - }, - stop=["How"], - stream=False, - user="abc-123", - ) - - assert isinstance(response, LLMResult) - assert len(response.message.content) > 0 - - -def test_invoke_stream_model(): - model = ReplicateLargeLanguageModel() - - response = model.invoke( - model="mistralai/mixtral-8x7b-instruct-v0.1", - credentials={ - "replicate_api_token": os.environ.get("REPLICATE_API_KEY"), - "model_version": "2b56576fcfbe32fa0526897d8385dd3fb3d36ba6fd0dbe033c72886b81ade93e", - }, - prompt_messages=[ - SystemPromptMessage( - content="You are a helpful AI assistant.", - ), - UserPromptMessage(content="Who are you?"), - ], - model_parameters={ - "temperature": 1.0, - "top_k": 2, - "top_p": 0.5, - }, - stop=["How"], - stream=True, - user="abc-123", - ) - - assert isinstance(response, Generator) - - for chunk in response: - assert isinstance(chunk, LLMResultChunk) - assert isinstance(chunk.delta, LLMResultChunkDelta) - assert isinstance(chunk.delta.message, AssistantPromptMessage) - - -def test_get_num_tokens(): - model = ReplicateLargeLanguageModel() - - num_tokens = model.get_num_tokens( - model="", - credentials={ - "replicate_api_token": os.environ.get("REPLICATE_API_KEY"), - "model_version": "2b56576fcfbe32fa0526897d8385dd3fb3d36ba6fd0dbe033c72886b81ade93e", - }, - prompt_messages=[ - SystemPromptMessage( - content="You are a helpful AI assistant.", - ), - UserPromptMessage(content="Hello World!"), - ], - ) - - assert num_tokens == 14 diff --git a/api/tests/integration_tests/model_runtime/replicate/test_text_embedding.py b/api/tests/integration_tests/model_runtime/replicate/test_text_embedding.py deleted file mode 100644 index 397715f2252083..00000000000000 --- a/api/tests/integration_tests/model_runtime/replicate/test_text_embedding.py +++ /dev/null @@ -1,136 +0,0 @@ -import os - -import pytest - -from core.model_runtime.entities.text_embedding_entities import TextEmbeddingResult -from core.model_runtime.errors.validate import CredentialsValidateFailedError -from core.model_runtime.model_providers.replicate.text_embedding.text_embedding import ReplicateEmbeddingModel - - -def test_validate_credentials_one(): - model = ReplicateEmbeddingModel() - - with pytest.raises(CredentialsValidateFailedError): - model.validate_credentials( - model="replicate/all-mpnet-base-v2", - credentials={ - "replicate_api_token": "invalid_key", - "model_version": "b6b7585c9640cd7a9572c6e129c9549d79c9c31f0d3fdce7baac7c67ca38f305", - }, - ) - - model.validate_credentials( - model="replicate/all-mpnet-base-v2", - credentials={ - "replicate_api_token": os.environ.get("REPLICATE_API_KEY"), - "model_version": "b6b7585c9640cd7a9572c6e129c9549d79c9c31f0d3fdce7baac7c67ca38f305", - }, - ) - - -def test_validate_credentials_two(): - model = ReplicateEmbeddingModel() - - with pytest.raises(CredentialsValidateFailedError): - model.validate_credentials( - model="nateraw/bge-large-en-v1.5", - credentials={ - "replicate_api_token": "invalid_key", - "model_version": "9cf9f015a9cb9c61d1a2610659cdac4a4ca222f2d3707a68517b18c198a9add1", - }, - ) - - model.validate_credentials( - model="nateraw/bge-large-en-v1.5", - credentials={ - "replicate_api_token": os.environ.get("REPLICATE_API_KEY"), - "model_version": "9cf9f015a9cb9c61d1a2610659cdac4a4ca222f2d3707a68517b18c198a9add1", - }, - ) - - -def test_invoke_model_one(): - model = ReplicateEmbeddingModel() - - result = model.invoke( - model="nateraw/bge-large-en-v1.5", - credentials={ - "replicate_api_token": os.environ.get("REPLICATE_API_KEY"), - "model_version": "9cf9f015a9cb9c61d1a2610659cdac4a4ca222f2d3707a68517b18c198a9add1", - }, - texts=["hello", "world"], - user="abc-123", - ) - - assert isinstance(result, TextEmbeddingResult) - assert len(result.embeddings) == 2 - assert result.usage.total_tokens == 2 - - -def test_invoke_model_two(): - model = ReplicateEmbeddingModel() - - result = model.invoke( - model="andreasjansson/clip-features", - credentials={ - "replicate_api_token": os.environ.get("REPLICATE_API_KEY"), - "model_version": "75b33f253f7714a281ad3e9b28f63e3232d583716ef6718f2e46641077ea040a", - }, - texts=["hello", "world"], - user="abc-123", - ) - - assert isinstance(result, TextEmbeddingResult) - assert len(result.embeddings) == 2 - assert result.usage.total_tokens == 2 - - -def test_invoke_model_three(): - model = ReplicateEmbeddingModel() - - result = model.invoke( - model="replicate/all-mpnet-base-v2", - credentials={ - "replicate_api_token": os.environ.get("REPLICATE_API_KEY"), - "model_version": "b6b7585c9640cd7a9572c6e129c9549d79c9c31f0d3fdce7baac7c67ca38f305", - }, - texts=["hello", "world"], - user="abc-123", - ) - - assert isinstance(result, TextEmbeddingResult) - assert len(result.embeddings) == 2 - assert result.usage.total_tokens == 2 - - -def test_invoke_model_four(): - model = ReplicateEmbeddingModel() - - result = model.invoke( - model="nateraw/jina-embeddings-v2-base-en", - credentials={ - "replicate_api_token": os.environ.get("REPLICATE_API_KEY"), - "model_version": "f8367a1c072ba2bc28af549d1faeacfe9b88b3f0e475add7a75091dac507f79e", - }, - texts=["hello", "world"], - user="abc-123", - ) - - assert isinstance(result, TextEmbeddingResult) - assert len(result.embeddings) == 2 - assert result.usage.total_tokens == 2 - - -def test_get_num_tokens(): - model = ReplicateEmbeddingModel() - - num_tokens = model.get_num_tokens( - model="nateraw/jina-embeddings-v2-base-en", - credentials={ - "replicate_api_token": os.environ.get("REPLICATE_API_KEY"), - "model_version": "f8367a1c072ba2bc28af549d1faeacfe9b88b3f0e475add7a75091dac507f79e", - }, - texts=["hello", "world"], - ) - - assert num_tokens == 2 diff --git a/api/tests/integration_tests/model_runtime/sagemaker/__init__.py b/api/tests/integration_tests/model_runtime/sagemaker/__init__.py deleted file mode 100644 index e69de29bb2d1d6..00000000000000 diff --git a/api/tests/integration_tests/model_runtime/sagemaker/test_provider.py b/api/tests/integration_tests/model_runtime/sagemaker/test_provider.py deleted file mode 100644 index 9f0b439d6c32a1..00000000000000 --- a/api/tests/integration_tests/model_runtime/sagemaker/test_provider.py +++ /dev/null @@ -1,15 +0,0 @@ -import os - -import pytest - -from core.model_runtime.errors.validate import CredentialsValidateFailedError -from core.model_runtime.model_providers.sagemaker.sagemaker import SageMakerProvider - - -def test_validate_provider_credentials(): - provider = SageMakerProvider() - - with pytest.raises(CredentialsValidateFailedError): - provider.validate_provider_credentials(credentials={}) - - provider.validate_provider_credentials(credentials={}) diff --git a/api/tests/integration_tests/model_runtime/sagemaker/test_rerank.py b/api/tests/integration_tests/model_runtime/sagemaker/test_rerank.py deleted file mode 100644 index d5a6798a1ef735..00000000000000 --- a/api/tests/integration_tests/model_runtime/sagemaker/test_rerank.py +++ /dev/null @@ -1,55 +0,0 @@ -import os - -import pytest - -from core.model_runtime.entities.rerank_entities import RerankResult -from core.model_runtime.errors.validate import CredentialsValidateFailedError -from core.model_runtime.model_providers.sagemaker.rerank.rerank import SageMakerRerankModel - - -def test_validate_credentials(): - model = SageMakerRerankModel() - - with pytest.raises(CredentialsValidateFailedError): - model.validate_credentials( - model="bge-m3-rerank-v2", - credentials={ - "aws_region": os.getenv("AWS_REGION"), - "aws_access_key": os.getenv("AWS_ACCESS_KEY"), - "aws_secret_access_key": os.getenv("AWS_SECRET_ACCESS_KEY"), - }, - query="What is the capital of the United States?", - docs=[ - "Carson City is the capital city of the American state of Nevada. At the 2010 United States " - "Census, Carson City had a population of 55,274.", - "The Commonwealth of the Northern Mariana Islands is a group of islands in the Pacific Ocean that " - "are a political division controlled by the United States. Its capital is Saipan.", - ], - score_threshold=0.8, - ) - - -def test_invoke_model(): - model = SageMakerRerankModel() - - result = model.invoke( - model="bge-m3-rerank-v2", - credentials={ - "aws_region": os.getenv("AWS_REGION"), - "aws_access_key": os.getenv("AWS_ACCESS_KEY"), - "aws_secret_access_key": os.getenv("AWS_SECRET_ACCESS_KEY"), - }, - query="What is the capital of the United States?", - docs=[ - "Carson City is the capital city of the American state of Nevada. At the 2010 United States " - "Census, Carson City had a population of 55,274.", - "The Commonwealth of the Northern Mariana Islands is a group of islands in the Pacific Ocean that " - "are a political division controlled by the United States. Its capital is Saipan.", - ], - score_threshold=0.8, - ) - - assert isinstance(result, RerankResult) - assert len(result.docs) == 1 - assert result.docs[0].index == 1 - assert result.docs[0].score >= 0.8 diff --git a/api/tests/integration_tests/model_runtime/sagemaker/test_text_embedding.py b/api/tests/integration_tests/model_runtime/sagemaker/test_text_embedding.py deleted file mode 100644 index e4e404c7a86ae6..00000000000000 --- a/api/tests/integration_tests/model_runtime/sagemaker/test_text_embedding.py +++ /dev/null @@ -1,33 +0,0 @@ -import os - -import pytest - -from core.model_runtime.entities.text_embedding_entities import TextEmbeddingResult -from core.model_runtime.errors.validate import CredentialsValidateFailedError -from core.model_runtime.model_providers.sagemaker.text_embedding.text_embedding import SageMakerEmbeddingModel - - -def test_validate_credentials(): - model = SageMakerEmbeddingModel() - - with pytest.raises(CredentialsValidateFailedError): - model.validate_credentials(model="bge-m3", credentials={}) - - model.validate_credentials(model="bge-m3-embedding", credentials={}) - - -def test_invoke_model(): - model = SageMakerEmbeddingModel() - - result = model.invoke(model="bge-m3-embedding", credentials={}, texts=["hello", "world"], user="abc-123") - - assert isinstance(result, TextEmbeddingResult) - assert len(result.embeddings) == 2 - - -def test_get_num_tokens(): - model = SageMakerEmbeddingModel() - - num_tokens = model.get_num_tokens(model="bge-m3-embedding", credentials={}, texts=[]) - - assert num_tokens == 0 diff --git a/api/tests/integration_tests/model_runtime/siliconflow/__init__.py b/api/tests/integration_tests/model_runtime/siliconflow/__init__.py deleted file mode 100644 index e69de29bb2d1d6..00000000000000 diff --git a/api/tests/integration_tests/model_runtime/siliconflow/test_llm.py b/api/tests/integration_tests/model_runtime/siliconflow/test_llm.py deleted file mode 100644 index f47c9c558808af..00000000000000 --- a/api/tests/integration_tests/model_runtime/siliconflow/test_llm.py +++ /dev/null @@ -1,73 +0,0 @@ -import os -from collections.abc import Generator - -import pytest - -from core.model_runtime.entities.llm_entities import LLMResult, LLMResultChunk, LLMResultChunkDelta -from core.model_runtime.entities.message_entities import AssistantPromptMessage, SystemPromptMessage, UserPromptMessage -from core.model_runtime.errors.validate import CredentialsValidateFailedError -from core.model_runtime.model_providers.siliconflow.llm.llm import SiliconflowLargeLanguageModel - - -def test_validate_credentials(): - model = SiliconflowLargeLanguageModel() - - with pytest.raises(CredentialsValidateFailedError): - model.validate_credentials(model="deepseek-ai/DeepSeek-V2-Chat", credentials={"api_key": "invalid_key"}) - - model.validate_credentials(model="deepseek-ai/DeepSeek-V2-Chat", credentials={"api_key": os.environ.get("API_KEY")}) - - -def test_invoke_model(): - model = SiliconflowLargeLanguageModel() - - response = model.invoke( - model="deepseek-ai/DeepSeek-V2-Chat", - credentials={"api_key": os.environ.get("API_KEY")}, - prompt_messages=[UserPromptMessage(content="Who are you?")], - model_parameters={"temperature": 0.5, "max_tokens": 10}, - stop=["How"], - stream=False, - user="abc-123", - ) - - assert isinstance(response, LLMResult) - assert len(response.message.content) > 0 - - -def test_invoke_stream_model(): - model = SiliconflowLargeLanguageModel() - - response = model.invoke( - model="deepseek-ai/DeepSeek-V2-Chat", - credentials={"api_key": os.environ.get("API_KEY")}, - prompt_messages=[UserPromptMessage(content="Hello World!")], - model_parameters={"temperature": 0.5, "max_tokens": 100, "seed": 1234}, - stream=True, - user="abc-123", - ) - - assert isinstance(response, Generator) - - for chunk in response: - assert isinstance(chunk, LLMResultChunk) - assert isinstance(chunk.delta, LLMResultChunkDelta) - assert isinstance(chunk.delta.message, AssistantPromptMessage) - assert len(chunk.delta.message.content) > 0 if chunk.delta.finish_reason is None else True - - -def test_get_num_tokens(): - model = SiliconflowLargeLanguageModel() - - num_tokens = model.get_num_tokens( - model="deepseek-ai/DeepSeek-V2-Chat", - credentials={"api_key": os.environ.get("API_KEY")}, - prompt_messages=[ - SystemPromptMessage( - content="You are a helpful AI assistant.", - ), - UserPromptMessage(content="Hello World!"), - ], - ) - - assert num_tokens == 12 diff --git a/api/tests/integration_tests/model_runtime/siliconflow/test_provider.py b/api/tests/integration_tests/model_runtime/siliconflow/test_provider.py deleted file mode 100644 index 8f70210b7a2ace..00000000000000 --- a/api/tests/integration_tests/model_runtime/siliconflow/test_provider.py +++ /dev/null @@ -1,15 +0,0 @@ -import os - -import pytest - -from core.model_runtime.errors.validate import CredentialsValidateFailedError -from core.model_runtime.model_providers.siliconflow.siliconflow import SiliconflowProvider - - -def test_validate_provider_credentials(): - provider = SiliconflowProvider() - - with pytest.raises(CredentialsValidateFailedError): - provider.validate_provider_credentials(credentials={}) - - provider.validate_provider_credentials(credentials={"api_key": os.environ.get("API_KEY")}) diff --git a/api/tests/integration_tests/model_runtime/siliconflow/test_rerank.py b/api/tests/integration_tests/model_runtime/siliconflow/test_rerank.py deleted file mode 100644 index ad794613f91013..00000000000000 --- a/api/tests/integration_tests/model_runtime/siliconflow/test_rerank.py +++ /dev/null @@ -1,47 +0,0 @@ -import os - -import pytest - -from core.model_runtime.entities.rerank_entities import RerankResult -from core.model_runtime.errors.validate import CredentialsValidateFailedError -from core.model_runtime.model_providers.siliconflow.rerank.rerank import SiliconflowRerankModel - - -def test_validate_credentials(): - model = SiliconflowRerankModel() - - with pytest.raises(CredentialsValidateFailedError): - model.validate_credentials( - model="BAAI/bge-reranker-v2-m3", - credentials={"api_key": "invalid_key"}, - ) - - model.validate_credentials( - model="BAAI/bge-reranker-v2-m3", - credentials={ - "api_key": os.environ.get("API_KEY"), - }, - ) - - -def test_invoke_model(): - model = SiliconflowRerankModel() - - result = model.invoke( - model="BAAI/bge-reranker-v2-m3", - credentials={ - "api_key": os.environ.get("API_KEY"), - }, - query="Who is Kasumi?", - docs=[ - 'Kasumi is a girl\'s name of Japanese origin meaning "mist".', - "Her music is a kawaii bass, a mix of future bass, pop, and kawaii music ", - "and she leads a team named PopiParty.", - ], - score_threshold=0.8, - ) - - assert isinstance(result, RerankResult) - assert len(result.docs) == 1 - assert result.docs[0].index == 0 - assert result.docs[0].score >= 0.8 diff --git a/api/tests/integration_tests/model_runtime/siliconflow/test_speech2text.py b/api/tests/integration_tests/model_runtime/siliconflow/test_speech2text.py deleted file mode 100644 index 0502ba5ab404bc..00000000000000 --- a/api/tests/integration_tests/model_runtime/siliconflow/test_speech2text.py +++ /dev/null @@ -1,45 +0,0 @@ -import os - -import pytest - -from core.model_runtime.errors.validate import CredentialsValidateFailedError -from core.model_runtime.model_providers.siliconflow.speech2text.speech2text import SiliconflowSpeech2TextModel - - -def test_validate_credentials(): - model = SiliconflowSpeech2TextModel() - - with pytest.raises(CredentialsValidateFailedError): - model.validate_credentials( - model="iic/SenseVoiceSmall", - credentials={"api_key": "invalid_key"}, - ) - - model.validate_credentials( - model="iic/SenseVoiceSmall", - credentials={"api_key": os.environ.get("API_KEY")}, - ) - - -def test_invoke_model(): - model = SiliconflowSpeech2TextModel() - - # Get the directory of the current file - current_dir = os.path.dirname(os.path.abspath(__file__)) - - # Get assets directory - assets_dir = os.path.join(os.path.dirname(current_dir), "assets") - - # Construct the path to the audio file - audio_file_path = os.path.join(assets_dir, "audio.mp3") - - # Open the file and get the file object - with open(audio_file_path, "rb") as audio_file: - file = audio_file - - result = model.invoke( - model="iic/SenseVoiceSmall", credentials={"api_key": os.environ.get("API_KEY")}, file=file - ) - - assert isinstance(result, str) - assert result == "1,2,3,4,5,6,7,8,9,10." diff --git a/api/tests/integration_tests/model_runtime/siliconflow/test_text_embedding.py b/api/tests/integration_tests/model_runtime/siliconflow/test_text_embedding.py deleted file mode 100644 index ab143c10613a88..00000000000000 --- a/api/tests/integration_tests/model_runtime/siliconflow/test_text_embedding.py +++ /dev/null @@ -1,60 +0,0 @@ -import os - -import pytest - -from core.model_runtime.entities.text_embedding_entities import TextEmbeddingResult -from core.model_runtime.errors.validate import CredentialsValidateFailedError -from core.model_runtime.model_providers.siliconflow.text_embedding.text_embedding import ( - SiliconflowTextEmbeddingModel, -) - - -def test_validate_credentials(): - model = SiliconflowTextEmbeddingModel() - - with pytest.raises(CredentialsValidateFailedError): - model.validate_credentials( - model="BAAI/bge-large-zh-v1.5", - credentials={"api_key": "invalid_key"}, - ) - - model.validate_credentials( - model="BAAI/bge-large-zh-v1.5", - credentials={ - "api_key": os.environ.get("API_KEY"), - }, - ) - - -def test_invoke_model(): - model = SiliconflowTextEmbeddingModel() - - result = model.invoke( - model="BAAI/bge-large-zh-v1.5", - credentials={ - "api_key": os.environ.get("API_KEY"), - }, - texts=[ - "hello", - "world", - ], - user="abc-123", - ) - - assert isinstance(result, TextEmbeddingResult) - assert len(result.embeddings) == 2 - assert result.usage.total_tokens == 6 - - -def test_get_num_tokens(): - model = SiliconflowTextEmbeddingModel() - - num_tokens = model.get_num_tokens( - model="BAAI/bge-large-zh-v1.5", - credentials={ - "api_key": os.environ.get("API_KEY"), - }, - texts=["hello", "world"], - ) - - assert num_tokens == 2 diff --git a/api/tests/integration_tests/model_runtime/spark/__init__.py b/api/tests/integration_tests/model_runtime/spark/__init__.py deleted file mode 100644 index e69de29bb2d1d6..00000000000000 diff --git a/api/tests/integration_tests/model_runtime/spark/test_llm.py b/api/tests/integration_tests/model_runtime/spark/test_llm.py deleted file mode 100644 index 4fe2fd8c0a3eac..00000000000000 --- a/api/tests/integration_tests/model_runtime/spark/test_llm.py +++ /dev/null @@ -1,92 +0,0 @@ -import os -from collections.abc import Generator - -import pytest - -from core.model_runtime.entities.llm_entities import LLMResult, LLMResultChunk, LLMResultChunkDelta -from core.model_runtime.entities.message_entities import AssistantPromptMessage, SystemPromptMessage, UserPromptMessage -from core.model_runtime.errors.validate import CredentialsValidateFailedError -from core.model_runtime.model_providers.spark.llm.llm import SparkLargeLanguageModel - - -def test_validate_credentials(): - model = SparkLargeLanguageModel() - - with pytest.raises(CredentialsValidateFailedError): - model.validate_credentials(model="spark-1.5", credentials={"app_id": "invalid_key"}) - - model.validate_credentials( - model="spark-1.5", - credentials={ - "app_id": os.environ.get("SPARK_APP_ID"), - "api_secret": os.environ.get("SPARK_API_SECRET"), - "api_key": os.environ.get("SPARK_API_KEY"), - }, - ) - - -def test_invoke_model(): - model = SparkLargeLanguageModel() - - response = model.invoke( - model="spark-1.5", - credentials={ - "app_id": os.environ.get("SPARK_APP_ID"), - "api_secret": os.environ.get("SPARK_API_SECRET"), - "api_key": os.environ.get("SPARK_API_KEY"), - }, - prompt_messages=[UserPromptMessage(content="Who are you?")], - model_parameters={"temperature": 0.5, "max_tokens": 10}, - stop=["How"], - stream=False, - user="abc-123", - ) - - assert isinstance(response, LLMResult) - assert len(response.message.content) > 0 - - -def test_invoke_stream_model(): - model = SparkLargeLanguageModel() - - response = model.invoke( - model="spark-1.5", - credentials={ - "app_id": os.environ.get("SPARK_APP_ID"), - "api_secret": os.environ.get("SPARK_API_SECRET"), - "api_key": os.environ.get("SPARK_API_KEY"), - }, - prompt_messages=[UserPromptMessage(content="Hello World!")], - model_parameters={"temperature": 0.5, "max_tokens": 100}, - stream=True, - user="abc-123", - ) - - assert isinstance(response, Generator) - - for chunk in response: - assert isinstance(chunk, LLMResultChunk) - assert isinstance(chunk.delta, LLMResultChunkDelta) - assert isinstance(chunk.delta.message, AssistantPromptMessage) - assert len(chunk.delta.message.content) > 0 if chunk.delta.finish_reason is None else True - - -def test_get_num_tokens(): - model = SparkLargeLanguageModel() - - num_tokens = model.get_num_tokens( - model="spark-1.5", - credentials={ - "app_id": os.environ.get("SPARK_APP_ID"), - "api_secret": os.environ.get("SPARK_API_SECRET"), - "api_key": os.environ.get("SPARK_API_KEY"), - }, - prompt_messages=[ - SystemPromptMessage( - content="You are a helpful AI assistant.", - ), - UserPromptMessage(content="Hello World!"), - ], - ) - - assert num_tokens == 14 diff --git a/api/tests/integration_tests/model_runtime/spark/test_provider.py b/api/tests/integration_tests/model_runtime/spark/test_provider.py deleted file mode 100644 index 9da0df6bb3d556..00000000000000 --- a/api/tests/integration_tests/model_runtime/spark/test_provider.py +++ /dev/null @@ -1,21 +0,0 @@ -import os - -import pytest - -from core.model_runtime.errors.validate import CredentialsValidateFailedError -from core.model_runtime.model_providers.spark.spark import SparkProvider - - -def test_validate_provider_credentials(): - provider = SparkProvider() - - with pytest.raises(CredentialsValidateFailedError): - provider.validate_provider_credentials(credentials={}) - - provider.validate_provider_credentials( - credentials={ - "app_id": os.environ.get("SPARK_APP_ID"), - "api_secret": os.environ.get("SPARK_API_SECRET"), - "api_key": os.environ.get("SPARK_API_KEY"), - } - ) diff --git a/api/tests/integration_tests/model_runtime/stepfun/__init__.py b/api/tests/integration_tests/model_runtime/stepfun/__init__.py deleted file mode 100644 index e69de29bb2d1d6..00000000000000 diff --git a/api/tests/integration_tests/model_runtime/stepfun/test_llm.py b/api/tests/integration_tests/model_runtime/stepfun/test_llm.py deleted file mode 100644 index c03b1bae1f1574..00000000000000 --- a/api/tests/integration_tests/model_runtime/stepfun/test_llm.py +++ /dev/null @@ -1,125 +0,0 @@ -import os -from collections.abc import Generator - -import pytest - -from core.model_runtime.entities.llm_entities import LLMResult, LLMResultChunk, LLMResultChunkDelta -from core.model_runtime.entities.message_entities import ( - AssistantPromptMessage, - ImagePromptMessageContent, - PromptMessageTool, - SystemPromptMessage, - TextPromptMessageContent, - UserPromptMessage, -) -from core.model_runtime.entities.model_entities import AIModelEntity, ModelType -from core.model_runtime.errors.validate import CredentialsValidateFailedError -from core.model_runtime.model_providers.stepfun.llm.llm import StepfunLargeLanguageModel - - -def test_validate_credentials(): - model = StepfunLargeLanguageModel() - - with pytest.raises(CredentialsValidateFailedError): - model.validate_credentials(model="step-1-8k", credentials={"api_key": "invalid_key"}) - - model.validate_credentials(model="step-1-8k", credentials={"api_key": os.environ.get("STEPFUN_API_KEY")}) - - -def test_invoke_model(): - model = StepfunLargeLanguageModel() - - response = model.invoke( - model="step-1-8k", - credentials={"api_key": os.environ.get("STEPFUN_API_KEY")}, - prompt_messages=[UserPromptMessage(content="Hello World!")], - model_parameters={"temperature": 0.9, "top_p": 0.7}, - stop=["Hi"], - stream=False, - user="abc-123", - ) - - assert isinstance(response, LLMResult) - assert len(response.message.content) > 0 - - -def test_invoke_stream_model(): - model = StepfunLargeLanguageModel() - - response = model.invoke( - model="step-1-8k", - credentials={"api_key": os.environ.get("STEPFUN_API_KEY")}, - prompt_messages=[ - SystemPromptMessage( - content="You are a helpful AI assistant.", - ), - UserPromptMessage(content="Hello World!"), - ], - model_parameters={"temperature": 0.9, "top_p": 0.7}, - stream=True, - user="abc-123", - ) - - assert isinstance(response, Generator) - - for chunk in response: - assert isinstance(chunk, LLMResultChunk) - assert isinstance(chunk.delta, LLMResultChunkDelta) - assert isinstance(chunk.delta.message, AssistantPromptMessage) - assert len(chunk.delta.message.content) > 0 if chunk.delta.finish_reason is None else True - - -def test_get_customizable_model_schema(): - model = StepfunLargeLanguageModel() - - schema = model.get_customizable_model_schema( - model="step-1-8k", credentials={"api_key": os.environ.get("STEPFUN_API_KEY")} - ) - assert isinstance(schema, AIModelEntity) - - -def test_invoke_chat_model_with_tools(): - model = StepfunLargeLanguageModel() - - result = model.invoke( - model="step-1-8k", - credentials={"api_key": os.environ.get("STEPFUN_API_KEY")}, - prompt_messages=[ - SystemPromptMessage( - content="You are a helpful AI assistant.", - ), - UserPromptMessage( - content="what's the weather today in Shanghai?", - ), - ], - model_parameters={"temperature": 0.9, "max_tokens": 100}, - tools=[ - PromptMessageTool( - name="get_weather", - description="Determine weather in my location", - parameters={ - "type": "object", - "properties": { - "location": {"type": "string", "description": "The city and state e.g. San Francisco, CA"}, - "unit": {"type": "string", "enum": ["c", "f"]}, - }, - "required": ["location"], - }, - ), - PromptMessageTool( - name="get_stock_price", - description="Get the current stock price", - parameters={ - "type": "object", - "properties": {"symbol": {"type": "string", "description": "The stock symbol"}}, - "required": ["symbol"], - }, - ), - ], - stream=False, - user="abc-123", - ) - - assert isinstance(result, LLMResult) - assert isinstance(result.message, AssistantPromptMessage) - assert len(result.message.tool_calls) > 0 diff --git a/api/tests/integration_tests/model_runtime/test_tiktoken.py b/api/tests/integration_tests/model_runtime/test_tiktoken.py new file mode 100644 index 00000000000000..f92d9dc6037fd9 --- /dev/null +++ b/api/tests/integration_tests/model_runtime/test_tiktoken.py @@ -0,0 +1,11 @@ +import os + +import tiktoken + +from core.model_runtime.model_providers.__base.tokenizers.gpt2_tokenzier import GPT2Tokenizer + + +def test_tiktoken(): + os.environ["TIKTOKEN_CACHE_DIR"] = "/tmp/.tiktoken_cache" + GPT2Tokenizer.get_num_tokens("Hello, world!") + assert tiktoken.registry.ENCODING_CONSTRUCTORS is not None diff --git a/api/tests/integration_tests/model_runtime/togetherai/__init__.py b/api/tests/integration_tests/model_runtime/togetherai/__init__.py deleted file mode 100644 index e69de29bb2d1d6..00000000000000 diff --git a/api/tests/integration_tests/model_runtime/togetherai/test_llm.py b/api/tests/integration_tests/model_runtime/togetherai/test_llm.py deleted file mode 100644 index 06ebc2a82dc754..00000000000000 --- a/api/tests/integration_tests/model_runtime/togetherai/test_llm.py +++ /dev/null @@ -1,104 +0,0 @@ -import os -from collections.abc import Generator - -import pytest - -from core.model_runtime.entities.llm_entities import LLMResult, LLMResultChunk, LLMResultChunkDelta -from core.model_runtime.entities.message_entities import ( - AssistantPromptMessage, - PromptMessageTool, - SystemPromptMessage, - UserPromptMessage, -) -from core.model_runtime.errors.validate import CredentialsValidateFailedError -from core.model_runtime.model_providers.togetherai.llm.llm import TogetherAILargeLanguageModel - - -def test_validate_credentials(): - model = TogetherAILargeLanguageModel() - - with pytest.raises(CredentialsValidateFailedError): - model.validate_credentials( - model="mistralai/Mixtral-8x7B-Instruct-v0.1", credentials={"api_key": "invalid_key", "mode": "chat"} - ) - - model.validate_credentials( - model="mistralai/Mixtral-8x7B-Instruct-v0.1", - credentials={"api_key": os.environ.get("TOGETHER_API_KEY"), "mode": "chat"}, - ) - - -def test_invoke_model(): - model = TogetherAILargeLanguageModel() - - response = model.invoke( - model="mistralai/Mixtral-8x7B-Instruct-v0.1", - credentials={"api_key": os.environ.get("TOGETHER_API_KEY"), "mode": "completion"}, - prompt_messages=[ - SystemPromptMessage( - content="You are a helpful AI assistant.", - ), - UserPromptMessage(content="Who are you?"), - ], - model_parameters={ - "temperature": 1.0, - "top_k": 2, - "top_p": 0.5, - }, - stop=["How"], - stream=False, - user="abc-123", - ) - - assert isinstance(response, LLMResult) - assert len(response.message.content) > 0 - - -def test_invoke_stream_model(): - model = TogetherAILargeLanguageModel() - - response = model.invoke( - model="mistralai/Mixtral-8x7B-Instruct-v0.1", - credentials={"api_key": os.environ.get("TOGETHER_API_KEY"), "mode": "chat"}, - prompt_messages=[ - SystemPromptMessage( - content="You are a helpful AI assistant.", - ), - UserPromptMessage(content="Who are you?"), - ], - model_parameters={ - "temperature": 1.0, - "top_k": 2, - "top_p": 0.5, - }, - stop=["How"], - stream=True, - user="abc-123", - ) - - assert isinstance(response, Generator) - - for chunk in response: - assert isinstance(chunk, LLMResultChunk) - assert isinstance(chunk.delta, LLMResultChunkDelta) - assert isinstance(chunk.delta.message, AssistantPromptMessage) - - -def test_get_num_tokens(): - model = TogetherAILargeLanguageModel() - - num_tokens = model.get_num_tokens( - model="mistralai/Mixtral-8x7B-Instruct-v0.1", - credentials={ - "api_key": os.environ.get("TOGETHER_API_KEY"), - }, - prompt_messages=[ - SystemPromptMessage( - content="You are a helpful AI assistant.", - ), - UserPromptMessage(content="Hello World!"), - ], - ) - - assert isinstance(num_tokens, int) - assert num_tokens == 21 diff --git a/api/tests/integration_tests/model_runtime/tongyi/__init__.py b/api/tests/integration_tests/model_runtime/tongyi/__init__.py deleted file mode 100644 index e69de29bb2d1d6..00000000000000 diff --git a/api/tests/integration_tests/model_runtime/tongyi/test_llm.py b/api/tests/integration_tests/model_runtime/tongyi/test_llm.py deleted file mode 100644 index 61650735f2ad3f..00000000000000 --- a/api/tests/integration_tests/model_runtime/tongyi/test_llm.py +++ /dev/null @@ -1,75 +0,0 @@ -import os -from collections.abc import Generator - -import pytest - -from core.model_runtime.entities.llm_entities import LLMResult, LLMResultChunk, LLMResultChunkDelta -from core.model_runtime.entities.message_entities import AssistantPromptMessage, SystemPromptMessage, UserPromptMessage -from core.model_runtime.errors.validate import CredentialsValidateFailedError -from core.model_runtime.model_providers.tongyi.llm.llm import TongyiLargeLanguageModel - - -def test_validate_credentials(): - model = TongyiLargeLanguageModel() - - with pytest.raises(CredentialsValidateFailedError): - model.validate_credentials(model="qwen-turbo", credentials={"dashscope_api_key": "invalid_key"}) - - model.validate_credentials( - model="qwen-turbo", credentials={"dashscope_api_key": os.environ.get("TONGYI_DASHSCOPE_API_KEY")} - ) - - -def test_invoke_model(): - model = TongyiLargeLanguageModel() - - response = model.invoke( - model="qwen-turbo", - credentials={"dashscope_api_key": os.environ.get("TONGYI_DASHSCOPE_API_KEY")}, - prompt_messages=[UserPromptMessage(content="Who are you?")], - model_parameters={"temperature": 0.5, "max_tokens": 10}, - stop=["How"], - stream=False, - user="abc-123", - ) - - assert isinstance(response, LLMResult) - assert len(response.message.content) > 0 - - -def test_invoke_stream_model(): - model = TongyiLargeLanguageModel() - - response = model.invoke( - model="qwen-turbo", - credentials={"dashscope_api_key": os.environ.get("TONGYI_DASHSCOPE_API_KEY")}, - prompt_messages=[UserPromptMessage(content="Hello World!")], - model_parameters={"temperature": 0.5, "max_tokens": 100, "seed": 1234}, - stream=True, - user="abc-123", - ) - - assert isinstance(response, Generator) - - for chunk in response: - assert isinstance(chunk, LLMResultChunk) - assert isinstance(chunk.delta, LLMResultChunkDelta) - assert isinstance(chunk.delta.message, AssistantPromptMessage) - assert len(chunk.delta.message.content) > 0 if chunk.delta.finish_reason is None else True - - -def test_get_num_tokens(): - model = TongyiLargeLanguageModel() - - num_tokens = model.get_num_tokens( - model="qwen-turbo", - credentials={"dashscope_api_key": os.environ.get("TONGYI_DASHSCOPE_API_KEY")}, - prompt_messages=[ - SystemPromptMessage( - content="You are a helpful AI assistant.", - ), - UserPromptMessage(content="Hello World!"), - ], - ) - - assert num_tokens == 12 diff --git a/api/tests/integration_tests/model_runtime/tongyi/test_provider.py b/api/tests/integration_tests/model_runtime/tongyi/test_provider.py deleted file mode 100644 index 0bc96c84e73195..00000000000000 --- a/api/tests/integration_tests/model_runtime/tongyi/test_provider.py +++ /dev/null @@ -1,17 +0,0 @@ -import os - -import pytest - -from core.model_runtime.errors.validate import CredentialsValidateFailedError -from core.model_runtime.model_providers.tongyi.tongyi import TongyiProvider - - -def test_validate_provider_credentials(): - provider = TongyiProvider() - - with pytest.raises(CredentialsValidateFailedError): - provider.validate_provider_credentials(credentials={}) - - provider.validate_provider_credentials( - credentials={"dashscope_api_key": os.environ.get("TONGYI_DASHSCOPE_API_KEY")} - ) diff --git a/api/tests/integration_tests/model_runtime/tongyi/test_response_format.py b/api/tests/integration_tests/model_runtime/tongyi/test_response_format.py deleted file mode 100644 index 905e7907fde5a8..00000000000000 --- a/api/tests/integration_tests/model_runtime/tongyi/test_response_format.py +++ /dev/null @@ -1,80 +0,0 @@ -import json -import os -from collections.abc import Generator - -from core.model_runtime.entities.llm_entities import LLMResultChunk, LLMResultChunkDelta -from core.model_runtime.entities.message_entities import AssistantPromptMessage, UserPromptMessage -from core.model_runtime.model_providers.tongyi.llm.llm import TongyiLargeLanguageModel - - -def test_invoke_model_with_json_response(): - """ - Test the invocation of a model with JSON response. - """ - model_list = [ - "qwen-max-0403", - "qwen-max-1201", - "qwen-max-longcontext", - "qwen-max", - "qwen-plus-chat", - "qwen-plus", - "qwen-turbo-chat", - "qwen-turbo", - ] - for model_name in model_list: - print("testing model: ", model_name) - invoke_model_with_json_response(model_name) - - -def invoke_model_with_json_response(model_name="qwen-max-0403"): - """ - Method to invoke the model with JSON response format. - Args: - model_name (str): The name of the model to invoke. Defaults to "qwen-max-0403". - - Returns: - None - """ - model = TongyiLargeLanguageModel() - - response = model.invoke( - model=model_name, - credentials={"dashscope_api_key": os.environ.get("TONGYI_DASHSCOPE_API_KEY")}, - prompt_messages=[ - UserPromptMessage(content='output json data with format `{"data": "test", "code": 200, "msg": "success"}') - ], - model_parameters={ - "temperature": 0.5, - "max_tokens": 50, - "response_format": "JSON", - }, - stream=True, - user="abc-123", - ) - print("=====================================") - print(response) - assert isinstance(response, Generator) - output = "" - for chunk in response: - assert isinstance(chunk, LLMResultChunk) - assert isinstance(chunk.delta, LLMResultChunkDelta) - assert isinstance(chunk.delta.message, AssistantPromptMessage) - output += chunk.delta.message.content - assert is_json(output) - - -def is_json(s): - """ - Check if a string is a valid JSON. - - Args: - s (str): The string to check. - - Returns: - bool: True if the string is a valid JSON, False otherwise. - """ - try: - json.loads(s) - except ValueError: - return False - return True diff --git a/api/tests/integration_tests/model_runtime/upstage/__init__.py b/api/tests/integration_tests/model_runtime/upstage/__init__.py deleted file mode 100644 index e69de29bb2d1d6..00000000000000 diff --git a/api/tests/integration_tests/model_runtime/upstage/test_llm.py b/api/tests/integration_tests/model_runtime/upstage/test_llm.py deleted file mode 100644 index bc7517acbe2601..00000000000000 --- a/api/tests/integration_tests/model_runtime/upstage/test_llm.py +++ /dev/null @@ -1,186 +0,0 @@ -import os -from collections.abc import Generator - -import pytest - -from core.model_runtime.entities.llm_entities import LLMResult, LLMResultChunk, LLMResultChunkDelta -from core.model_runtime.entities.message_entities import ( - AssistantPromptMessage, - PromptMessageTool, - SystemPromptMessage, - UserPromptMessage, -) -from core.model_runtime.entities.model_entities import AIModelEntity, ModelType -from core.model_runtime.errors.validate import CredentialsValidateFailedError -from core.model_runtime.model_providers.__base.large_language_model import LargeLanguageModel -from core.model_runtime.model_providers.upstage.llm.llm import UpstageLargeLanguageModel - -"""FOR MOCK FIXTURES, DO NOT REMOVE""" -from tests.integration_tests.model_runtime.__mock.openai import setup_openai_mock - - -def test_predefined_models(): - model = UpstageLargeLanguageModel() - model_schemas = model.predefined_models() - - assert len(model_schemas) >= 1 - assert isinstance(model_schemas[0], AIModelEntity) - - -@pytest.mark.parametrize("setup_openai_mock", [["chat"]], indirect=True) -def test_validate_credentials_for_chat_model(setup_openai_mock): - model = UpstageLargeLanguageModel() - - with pytest.raises(CredentialsValidateFailedError): - # model name to gpt-3.5-turbo because of mocking - model.validate_credentials(model="gpt-3.5-turbo", credentials={"upstage_api_key": "invalid_key"}) - - model.validate_credentials( - model="solar-1-mini-chat", credentials={"upstage_api_key": os.environ.get("UPSTAGE_API_KEY")} - ) - - -@pytest.mark.parametrize("setup_openai_mock", [["chat"]], indirect=True) -def test_invoke_chat_model(setup_openai_mock): - model = UpstageLargeLanguageModel() - - result = model.invoke( - model="solar-1-mini-chat", - credentials={"upstage_api_key": os.environ.get("UPSTAGE_API_KEY")}, - prompt_messages=[ - SystemPromptMessage( - content="You are a helpful AI assistant.", - ), - UserPromptMessage(content="Hello World!"), - ], - model_parameters={ - "temperature": 0.0, - "top_p": 1.0, - "presence_penalty": 0.0, - "frequency_penalty": 0.0, - "max_tokens": 10, - }, - stop=["How"], - stream=False, - user="abc-123", - ) - - assert isinstance(result, LLMResult) - assert len(result.message.content) > 0 - - -@pytest.mark.parametrize("setup_openai_mock", [["chat"]], indirect=True) -def test_invoke_chat_model_with_tools(setup_openai_mock): - model = UpstageLargeLanguageModel() - - result = model.invoke( - model="solar-1-mini-chat", - credentials={"upstage_api_key": os.environ.get("UPSTAGE_API_KEY")}, - prompt_messages=[ - SystemPromptMessage( - content="You are a helpful AI assistant.", - ), - UserPromptMessage( - content="what's the weather today in London?", - ), - ], - model_parameters={"temperature": 0.0, "max_tokens": 100}, - tools=[ - PromptMessageTool( - name="get_weather", - description="Determine weather in my location", - parameters={ - "type": "object", - "properties": { - "location": {"type": "string", "description": "The city and state e.g. San Francisco, CA"}, - "unit": {"type": "string", "enum": ["c", "f"]}, - }, - "required": ["location"], - }, - ), - PromptMessageTool( - name="get_stock_price", - description="Get the current stock price", - parameters={ - "type": "object", - "properties": {"symbol": {"type": "string", "description": "The stock symbol"}}, - "required": ["symbol"], - }, - ), - ], - stream=False, - user="abc-123", - ) - - assert isinstance(result, LLMResult) - assert isinstance(result.message, AssistantPromptMessage) - assert len(result.message.tool_calls) > 0 - - -@pytest.mark.parametrize("setup_openai_mock", [["chat"]], indirect=True) -def test_invoke_stream_chat_model(setup_openai_mock): - model = UpstageLargeLanguageModel() - - result = model.invoke( - model="solar-1-mini-chat", - credentials={"upstage_api_key": os.environ.get("UPSTAGE_API_KEY")}, - prompt_messages=[ - SystemPromptMessage( - content="You are a helpful AI assistant.", - ), - UserPromptMessage(content="Hello World!"), - ], - model_parameters={"temperature": 0.0, "max_tokens": 100}, - stream=True, - user="abc-123", - ) - - assert isinstance(result, Generator) - - for chunk in result: - assert isinstance(chunk, LLMResultChunk) - assert isinstance(chunk.delta, LLMResultChunkDelta) - assert isinstance(chunk.delta.message, AssistantPromptMessage) - assert len(chunk.delta.message.content) > 0 if chunk.delta.finish_reason is None else True - if chunk.delta.finish_reason is not None: - assert chunk.delta.usage is not None - assert chunk.delta.usage.completion_tokens > 0 - - -def test_get_num_tokens(): - model = UpstageLargeLanguageModel() - - num_tokens = model.get_num_tokens( - model="solar-1-mini-chat", - credentials={"upstage_api_key": os.environ.get("UPSTAGE_API_KEY")}, - prompt_messages=[UserPromptMessage(content="Hello World!")], - ) - - assert num_tokens == 13 - - num_tokens = model.get_num_tokens( - model="solar-1-mini-chat", - credentials={"upstage_api_key": os.environ.get("UPSTAGE_API_KEY")}, - prompt_messages=[ - SystemPromptMessage( - content="You are a helpful AI assistant.", - ), - UserPromptMessage(content="Hello World!"), - ], - tools=[ - PromptMessageTool( - name="get_weather", - description="Determine weather in my location", - parameters={ - "type": "object", - "properties": { - "location": {"type": "string", "description": "The city and state e.g. San Francisco, CA"}, - "unit": {"type": "string", "enum": ["c", "f"]}, - }, - "required": ["location"], - }, - ), - ], - ) - - assert num_tokens == 106 diff --git a/api/tests/integration_tests/model_runtime/upstage/test_provider.py b/api/tests/integration_tests/model_runtime/upstage/test_provider.py deleted file mode 100644 index 9d83779aa00a49..00000000000000 --- a/api/tests/integration_tests/model_runtime/upstage/test_provider.py +++ /dev/null @@ -1,17 +0,0 @@ -import os - -import pytest - -from core.model_runtime.errors.validate import CredentialsValidateFailedError -from core.model_runtime.model_providers.upstage.upstage import UpstageProvider -from tests.integration_tests.model_runtime.__mock.openai import setup_openai_mock - - -@pytest.mark.parametrize("setup_openai_mock", [["chat"]], indirect=True) -def test_validate_provider_credentials(setup_openai_mock): - provider = UpstageProvider() - - with pytest.raises(CredentialsValidateFailedError): - provider.validate_provider_credentials(credentials={}) - - provider.validate_provider_credentials(credentials={"upstage_api_key": os.environ.get("UPSTAGE_API_KEY")}) diff --git a/api/tests/integration_tests/model_runtime/upstage/test_text_embedding.py b/api/tests/integration_tests/model_runtime/upstage/test_text_embedding.py deleted file mode 100644 index 8c83172fa3ff7e..00000000000000 --- a/api/tests/integration_tests/model_runtime/upstage/test_text_embedding.py +++ /dev/null @@ -1,54 +0,0 @@ -import os - -import pytest - -from core.model_runtime.entities.text_embedding_entities import TextEmbeddingResult -from core.model_runtime.errors.validate import CredentialsValidateFailedError -from core.model_runtime.model_providers.upstage.text_embedding.text_embedding import UpstageTextEmbeddingModel -from tests.integration_tests.model_runtime.__mock.openai import setup_openai_mock - - -@pytest.mark.parametrize("setup_openai_mock", [["text_embedding"]], indirect=True) -def test_validate_credentials(setup_openai_mock): - model = UpstageTextEmbeddingModel() - - with pytest.raises(CredentialsValidateFailedError): - model.validate_credentials( - model="solar-embedding-1-large-passage", credentials={"upstage_api_key": "invalid_key"} - ) - - model.validate_credentials( - model="solar-embedding-1-large-passage", credentials={"upstage_api_key": os.environ.get("UPSTAGE_API_KEY")} - ) - - -@pytest.mark.parametrize("setup_openai_mock", [["text_embedding"]], indirect=True) -def test_invoke_model(setup_openai_mock): - model = UpstageTextEmbeddingModel() - - result = model.invoke( - model="solar-embedding-1-large-passage", - credentials={ - "upstage_api_key": os.environ.get("UPSTAGE_API_KEY"), - }, - texts=["hello", "world", " ".join(["long_text"] * 100), " ".join(["another_long_text"] * 100)], - user="abc-123", - ) - - assert isinstance(result, TextEmbeddingResult) - assert len(result.embeddings) == 4 - assert result.usage.total_tokens == 2 - - -def test_get_num_tokens(): - model = UpstageTextEmbeddingModel() - - num_tokens = model.get_num_tokens( - model="solar-embedding-1-large-passage", - credentials={ - "upstage_api_key": os.environ.get("UPSTAGE_API_KEY"), - }, - texts=["hello", "world"], - ) - - assert num_tokens == 5 diff --git a/api/tests/integration_tests/model_runtime/volcengine_maas/__init__.py b/api/tests/integration_tests/model_runtime/volcengine_maas/__init__.py deleted file mode 100644 index e69de29bb2d1d6..00000000000000 diff --git a/api/tests/integration_tests/model_runtime/volcengine_maas/test_embedding.py b/api/tests/integration_tests/model_runtime/volcengine_maas/test_embedding.py deleted file mode 100644 index f831c063a42630..00000000000000 --- a/api/tests/integration_tests/model_runtime/volcengine_maas/test_embedding.py +++ /dev/null @@ -1,79 +0,0 @@ -import os - -import pytest - -from core.model_runtime.entities.text_embedding_entities import TextEmbeddingResult -from core.model_runtime.errors.validate import CredentialsValidateFailedError -from core.model_runtime.model_providers.volcengine_maas.text_embedding.text_embedding import ( - VolcengineMaaSTextEmbeddingModel, -) - - -def test_validate_credentials(): - model = VolcengineMaaSTextEmbeddingModel() - - with pytest.raises(CredentialsValidateFailedError): - model.validate_credentials( - model="NOT IMPORTANT", - credentials={ - "api_endpoint_host": "maas-api.ml-platform-cn-beijing.volces.com", - "volc_region": "cn-beijing", - "volc_access_key_id": "INVALID", - "volc_secret_access_key": "INVALID", - "endpoint_id": "INVALID", - "base_model_name": "Doubao-embedding", - }, - ) - - model.validate_credentials( - model="NOT IMPORTANT", - credentials={ - "api_endpoint_host": "maas-api.ml-platform-cn-beijing.volces.com", - "volc_region": "cn-beijing", - "volc_access_key_id": os.environ.get("VOLC_API_KEY"), - "volc_secret_access_key": os.environ.get("VOLC_SECRET_KEY"), - "endpoint_id": os.environ.get("VOLC_EMBEDDING_ENDPOINT_ID"), - "base_model_name": "Doubao-embedding", - }, - ) - - -def test_invoke_model(): - model = VolcengineMaaSTextEmbeddingModel() - - result = model.invoke( - model="NOT IMPORTANT", - credentials={ - "api_endpoint_host": "maas-api.ml-platform-cn-beijing.volces.com", - "volc_region": "cn-beijing", - "volc_access_key_id": os.environ.get("VOLC_API_KEY"), - "volc_secret_access_key": os.environ.get("VOLC_SECRET_KEY"), - "endpoint_id": os.environ.get("VOLC_EMBEDDING_ENDPOINT_ID"), - "base_model_name": "Doubao-embedding", - }, - texts=["hello", "world"], - user="abc-123", - ) - - assert isinstance(result, TextEmbeddingResult) - assert len(result.embeddings) == 2 - assert result.usage.total_tokens > 0 - - -def test_get_num_tokens(): - model = VolcengineMaaSTextEmbeddingModel() - - num_tokens = model.get_num_tokens( - model="NOT IMPORTANT", - credentials={ - "api_endpoint_host": "maas-api.ml-platform-cn-beijing.volces.com", - "volc_region": "cn-beijing", - "volc_access_key_id": os.environ.get("VOLC_API_KEY"), - "volc_secret_access_key": os.environ.get("VOLC_SECRET_KEY"), - "endpoint_id": os.environ.get("VOLC_EMBEDDING_ENDPOINT_ID"), - "base_model_name": "Doubao-embedding", - }, - texts=["hello", "world"], - ) - - assert num_tokens == 2 diff --git a/api/tests/integration_tests/model_runtime/volcengine_maas/test_llm.py b/api/tests/integration_tests/model_runtime/volcengine_maas/test_llm.py deleted file mode 100644 index 8ff9c414046e7d..00000000000000 --- a/api/tests/integration_tests/model_runtime/volcengine_maas/test_llm.py +++ /dev/null @@ -1,118 +0,0 @@ -import os -from collections.abc import Generator - -import pytest - -from core.model_runtime.entities.llm_entities import LLMResult, LLMResultChunk, LLMResultChunkDelta -from core.model_runtime.entities.message_entities import AssistantPromptMessage, UserPromptMessage -from core.model_runtime.errors.validate import CredentialsValidateFailedError -from core.model_runtime.model_providers.volcengine_maas.llm.llm import VolcengineMaaSLargeLanguageModel - - -def test_validate_credentials_for_chat_model(): - model = VolcengineMaaSLargeLanguageModel() - - with pytest.raises(CredentialsValidateFailedError): - model.validate_credentials( - model="NOT IMPORTANT", - credentials={ - "api_endpoint_host": "maas-api.ml-platform-cn-beijing.volces.com", - "volc_region": "cn-beijing", - "volc_access_key_id": "INVALID", - "volc_secret_access_key": "INVALID", - "endpoint_id": "INVALID", - }, - ) - - model.validate_credentials( - model="NOT IMPORTANT", - credentials={ - "api_endpoint_host": "maas-api.ml-platform-cn-beijing.volces.com", - "volc_region": "cn-beijing", - "volc_access_key_id": os.environ.get("VOLC_API_KEY"), - "volc_secret_access_key": os.environ.get("VOLC_SECRET_KEY"), - "endpoint_id": os.environ.get("VOLC_MODEL_ENDPOINT_ID"), - }, - ) - - -def test_invoke_model(): - model = VolcengineMaaSLargeLanguageModel() - - response = model.invoke( - model="NOT IMPORTANT", - credentials={ - "api_endpoint_host": "maas-api.ml-platform-cn-beijing.volces.com", - "volc_region": "cn-beijing", - "volc_access_key_id": os.environ.get("VOLC_API_KEY"), - "volc_secret_access_key": os.environ.get("VOLC_SECRET_KEY"), - "endpoint_id": os.environ.get("VOLC_MODEL_ENDPOINT_ID"), - "base_model_name": "Skylark2-pro-4k", - }, - prompt_messages=[UserPromptMessage(content="Hello World!")], - model_parameters={ - "temperature": 0.7, - "top_p": 1.0, - "top_k": 1, - }, - stop=["you"], - user="abc-123", - stream=False, - ) - - assert isinstance(response, LLMResult) - assert len(response.message.content) > 0 - assert response.usage.total_tokens > 0 - - -def test_invoke_stream_model(): - model = VolcengineMaaSLargeLanguageModel() - - response = model.invoke( - model="NOT IMPORTANT", - credentials={ - "api_endpoint_host": "maas-api.ml-platform-cn-beijing.volces.com", - "volc_region": "cn-beijing", - "volc_access_key_id": os.environ.get("VOLC_API_KEY"), - "volc_secret_access_key": os.environ.get("VOLC_SECRET_KEY"), - "endpoint_id": os.environ.get("VOLC_MODEL_ENDPOINT_ID"), - "base_model_name": "Skylark2-pro-4k", - }, - prompt_messages=[UserPromptMessage(content="Hello World!")], - model_parameters={ - "temperature": 0.7, - "top_p": 1.0, - "top_k": 1, - }, - stop=["you"], - stream=True, - user="abc-123", - ) - - assert isinstance(response, Generator) - for chunk in response: - assert isinstance(chunk, LLMResultChunk) - assert isinstance(chunk.delta, LLMResultChunkDelta) - assert isinstance(chunk.delta.message, AssistantPromptMessage) - assert len(chunk.delta.message.content) > 0 if chunk.delta.finish_reason is None else True - - -def test_get_num_tokens(): - model = VolcengineMaaSLargeLanguageModel() - - response = model.get_num_tokens( - model="NOT IMPORTANT", - credentials={ - "api_endpoint_host": "maas-api.ml-platform-cn-beijing.volces.com", - "volc_region": "cn-beijing", - "volc_access_key_id": os.environ.get("VOLC_API_KEY"), - "volc_secret_access_key": os.environ.get("VOLC_SECRET_KEY"), - "endpoint_id": os.environ.get("VOLC_MODEL_ENDPOINT_ID"), - "base_model_name": "Skylark2-pro-4k", - }, - prompt_messages=[UserPromptMessage(content="Hello World!")], - tools=[], - ) - - assert isinstance(response, int) - assert response == 6 diff --git a/api/tests/integration_tests/model_runtime/wenxin/__init__.py b/api/tests/integration_tests/model_runtime/wenxin/__init__.py deleted file mode 100644 index e69de29bb2d1d6..00000000000000 diff --git a/api/tests/integration_tests/model_runtime/wenxin/test_embedding.py b/api/tests/integration_tests/model_runtime/wenxin/test_embedding.py deleted file mode 100644 index ac38340aecf7d2..00000000000000 --- a/api/tests/integration_tests/model_runtime/wenxin/test_embedding.py +++ /dev/null @@ -1,69 +0,0 @@ -import os -from time import sleep - -from core.model_runtime.entities.text_embedding_entities import TextEmbeddingResult -from core.model_runtime.model_providers.wenxin.text_embedding.text_embedding import WenxinTextEmbeddingModel - - -def test_invoke_embedding_v1(): - sleep(3) - model = WenxinTextEmbeddingModel() - - response = model.invoke( - model="embedding-v1", - credentials={"api_key": os.environ.get("WENXIN_API_KEY"), "secret_key": os.environ.get("WENXIN_SECRET_KEY")}, - texts=["hello", "你好", "xxxxx"], - user="abc-123", - ) - - assert isinstance(response, TextEmbeddingResult) - assert len(response.embeddings) == 3 - assert isinstance(response.embeddings[0], list) - - -def test_invoke_embedding_bge_large_en(): - sleep(3) - model = WenxinTextEmbeddingModel() - - response = model.invoke( - model="bge-large-en", - credentials={"api_key": os.environ.get("WENXIN_API_KEY"), "secret_key": os.environ.get("WENXIN_SECRET_KEY")}, - texts=["hello", "你好", "xxxxx"], - user="abc-123", - ) - - assert isinstance(response, TextEmbeddingResult) - assert len(response.embeddings) == 3 - assert isinstance(response.embeddings[0], list) - - -def test_invoke_embedding_bge_large_zh(): - sleep(3) - model = WenxinTextEmbeddingModel() - - response = model.invoke( - model="bge-large-zh", - credentials={"api_key": os.environ.get("WENXIN_API_KEY"), "secret_key": os.environ.get("WENXIN_SECRET_KEY")}, - texts=["hello", "你好", "xxxxx"], - user="abc-123", - ) - - assert isinstance(response, TextEmbeddingResult) - assert len(response.embeddings) == 3 - assert isinstance(response.embeddings[0], list) - - -def test_invoke_embedding_tao_8k(): - sleep(3) - model = WenxinTextEmbeddingModel() - - response = model.invoke( - model="tao-8k", - credentials={"api_key": os.environ.get("WENXIN_API_KEY"), "secret_key": os.environ.get("WENXIN_SECRET_KEY")}, - texts=["hello", "你好", "xxxxx"], - user="abc-123", - ) - - assert isinstance(response, TextEmbeddingResult) - assert len(response.embeddings) == 3 - assert isinstance(response.embeddings[0], list) diff --git a/api/tests/integration_tests/model_runtime/wenxin/test_llm.py b/api/tests/integration_tests/model_runtime/wenxin/test_llm.py deleted file mode 100644 index e2e58f15e025d8..00000000000000 --- a/api/tests/integration_tests/model_runtime/wenxin/test_llm.py +++ /dev/null @@ -1,214 +0,0 @@ -import os -from collections.abc import Generator -from time import sleep - -import pytest - -from core.model_runtime.entities.llm_entities import LLMResult, LLMResultChunk, LLMResultChunkDelta -from core.model_runtime.entities.message_entities import AssistantPromptMessage, SystemPromptMessage, UserPromptMessage -from core.model_runtime.entities.model_entities import AIModelEntity -from core.model_runtime.errors.validate import CredentialsValidateFailedError -from core.model_runtime.model_providers.wenxin.llm.llm import ErnieBotLargeLanguageModel - - -def test_predefined_models(): - model = ErnieBotLargeLanguageModel() - model_schemas = model.predefined_models() - assert len(model_schemas) >= 1 - assert isinstance(model_schemas[0], AIModelEntity) - - -def test_validate_credentials_for_chat_model(): - sleep(3) - model = ErnieBotLargeLanguageModel() - - with pytest.raises(CredentialsValidateFailedError): - model.validate_credentials( - model="ernie-bot", credentials={"api_key": "invalid_key", "secret_key": "invalid_key"} - ) - - model.validate_credentials( - model="ernie-bot", - credentials={"api_key": os.environ.get("WENXIN_API_KEY"), "secret_key": os.environ.get("WENXIN_SECRET_KEY")}, - ) - - -def test_invoke_model_ernie_bot(): - sleep(3) - model = ErnieBotLargeLanguageModel() - - response = model.invoke( - model="ernie-bot", - credentials={"api_key": os.environ.get("WENXIN_API_KEY"), "secret_key": os.environ.get("WENXIN_SECRET_KEY")}, - prompt_messages=[UserPromptMessage(content="Hello World!")], - model_parameters={ - "temperature": 0.7, - "top_p": 1.0, - }, - stop=["you"], - user="abc-123", - stream=False, - ) - - assert isinstance(response, LLMResult) - assert len(response.message.content) > 0 - assert response.usage.total_tokens > 0 - - -def test_invoke_model_ernie_bot_turbo(): - sleep(3) - model = ErnieBotLargeLanguageModel() - - response = model.invoke( - model="ernie-bot-turbo", - credentials={"api_key": os.environ.get("WENXIN_API_KEY"), "secret_key": os.environ.get("WENXIN_SECRET_KEY")}, - prompt_messages=[UserPromptMessage(content="Hello World!")], - model_parameters={ - "temperature": 0.7, - "top_p": 1.0, - }, - stop=["you"], - user="abc-123", - stream=False, - ) - - assert isinstance(response, LLMResult) - assert len(response.message.content) > 0 - assert response.usage.total_tokens > 0 - - -def test_invoke_model_ernie_8k(): - sleep(3) - model = ErnieBotLargeLanguageModel() - - response = model.invoke( - model="ernie-bot-8k", - credentials={"api_key": os.environ.get("WENXIN_API_KEY"), "secret_key": os.environ.get("WENXIN_SECRET_KEY")}, - prompt_messages=[UserPromptMessage(content="Hello World!")], - model_parameters={ - "temperature": 0.7, - "top_p": 1.0, - }, - stop=["you"], - user="abc-123", - stream=False, - ) - - assert isinstance(response, LLMResult) - assert len(response.message.content) > 0 - assert response.usage.total_tokens > 0 - - -def test_invoke_model_ernie_bot_4(): - sleep(3) - model = ErnieBotLargeLanguageModel() - - response = model.invoke( - model="ernie-bot-4", - credentials={"api_key": os.environ.get("WENXIN_API_KEY"), "secret_key": os.environ.get("WENXIN_SECRET_KEY")}, - prompt_messages=[UserPromptMessage(content="Hello World!")], - model_parameters={ - "temperature": 0.7, - "top_p": 1.0, - }, - stop=["you"], - user="abc-123", - stream=False, - ) - - assert isinstance(response, LLMResult) - assert len(response.message.content) > 0 - assert response.usage.total_tokens > 0 - - -def test_invoke_stream_model(): - sleep(3) - model = ErnieBotLargeLanguageModel() - - response = model.invoke( - model="ernie-3.5-8k", - credentials={"api_key": os.environ.get("WENXIN_API_KEY"), "secret_key": os.environ.get("WENXIN_SECRET_KEY")}, - prompt_messages=[UserPromptMessage(content="Hello World!")], - model_parameters={ - "temperature": 0.7, - "top_p": 1.0, - }, - stop=["you"], - stream=True, - user="abc-123", - ) - - assert isinstance(response, Generator) - for chunk in response: - assert isinstance(chunk, LLMResultChunk) - assert isinstance(chunk.delta, LLMResultChunkDelta) - assert isinstance(chunk.delta.message, AssistantPromptMessage) - assert len(chunk.delta.message.content) > 0 if chunk.delta.finish_reason is None else True - - -def test_invoke_model_with_system(): - sleep(3) - model = ErnieBotLargeLanguageModel() - - response = model.invoke( - model="ernie-bot", - credentials={"api_key": os.environ.get("WENXIN_API_KEY"), "secret_key": os.environ.get("WENXIN_SECRET_KEY")}, - prompt_messages=[SystemPromptMessage(content="你是Kasumi"), UserPromptMessage(content="你是谁?")], - model_parameters={ - "temperature": 0.7, - "top_p": 1.0, - }, - stop=["you"], - stream=False, - user="abc-123", - ) - - assert isinstance(response, LLMResult) - assert "kasumi" in response.message.content.lower() - - -def test_invoke_with_search(): - sleep(3) - model = ErnieBotLargeLanguageModel() - - response = model.invoke( - model="ernie-bot", - credentials={"api_key": os.environ.get("WENXIN_API_KEY"), "secret_key": os.environ.get("WENXIN_SECRET_KEY")}, - prompt_messages=[UserPromptMessage(content="北京今天的天气怎么样")], - model_parameters={ - "temperature": 0.7, - "top_p": 1.0, - "disable_search": True, - }, - stop=[], - stream=True, - user="abc-123", - ) - - assert isinstance(response, Generator) - total_message = "" - for chunk in response: - assert isinstance(chunk, LLMResultChunk) - assert isinstance(chunk.delta, LLMResultChunkDelta) - assert isinstance(chunk.delta.message, AssistantPromptMessage) - total_message += chunk.delta.message.content - print(chunk.delta.message.content) - assert len(chunk.delta.message.content) > 0 if not chunk.delta.finish_reason else True - - # there should be 对不起、我不能、不支持…… - assert "不" in total_message or "抱歉" in total_message or "无法" in total_message - - -def test_get_num_tokens(): - sleep(3) - model = ErnieBotLargeLanguageModel() - - response = model.get_num_tokens( - model="ernie-bot", - credentials={"api_key": os.environ.get("WENXIN_API_KEY"), "secret_key": os.environ.get("WENXIN_SECRET_KEY")}, - prompt_messages=[UserPromptMessage(content="Hello World!")], - tools=[], - ) - - assert isinstance(response, int) - assert response == 10 diff --git a/api/tests/integration_tests/model_runtime/wenxin/test_provider.py b/api/tests/integration_tests/model_runtime/wenxin/test_provider.py deleted file mode 100644 index 337c3d2a8010dd..00000000000000 --- a/api/tests/integration_tests/model_runtime/wenxin/test_provider.py +++ /dev/null @@ -1,17 +0,0 @@ -import os - -import pytest - -from core.model_runtime.errors.validate import CredentialsValidateFailedError -from core.model_runtime.model_providers.wenxin.wenxin import WenxinProvider - - -def test_validate_provider_credentials(): - provider = WenxinProvider() - - with pytest.raises(CredentialsValidateFailedError): - provider.validate_provider_credentials(credentials={"api_key": "hahahaha", "secret_key": "hahahaha"}) - - provider.validate_provider_credentials( - credentials={"api_key": os.environ.get("WENXIN_API_KEY"), "secret_key": os.environ.get("WENXIN_SECRET_KEY")} - ) diff --git a/api/tests/integration_tests/model_runtime/xinference/__init__.py b/api/tests/integration_tests/model_runtime/xinference/__init__.py deleted file mode 100644 index e69de29bb2d1d6..00000000000000 diff --git a/api/tests/integration_tests/model_runtime/xinference/test_embeddings.py b/api/tests/integration_tests/model_runtime/xinference/test_embeddings.py deleted file mode 100644 index 8e778d005a4bc3..00000000000000 --- a/api/tests/integration_tests/model_runtime/xinference/test_embeddings.py +++ /dev/null @@ -1,64 +0,0 @@ -import os - -import pytest - -from core.model_runtime.entities.text_embedding_entities import TextEmbeddingResult -from core.model_runtime.errors.validate import CredentialsValidateFailedError -from core.model_runtime.model_providers.xinference.text_embedding.text_embedding import XinferenceTextEmbeddingModel -from tests.integration_tests.model_runtime.__mock.xinference import MOCK, setup_xinference_mock - - -@pytest.mark.parametrize("setup_xinference_mock", [["none"]], indirect=True) -def test_validate_credentials(setup_xinference_mock): - model = XinferenceTextEmbeddingModel() - - with pytest.raises(CredentialsValidateFailedError): - model.validate_credentials( - model="bge-base-en", - credentials={ - "server_url": os.environ.get("XINFERENCE_SERVER_URL"), - "model_uid": "www " + os.environ.get("XINFERENCE_EMBEDDINGS_MODEL_UID"), - }, - ) - - model.validate_credentials( - model="bge-base-en", - credentials={ - "server_url": os.environ.get("XINFERENCE_SERVER_URL"), - "model_uid": os.environ.get("XINFERENCE_EMBEDDINGS_MODEL_UID"), - }, - ) - - -@pytest.mark.parametrize("setup_xinference_mock", [["none"]], indirect=True) -def test_invoke_model(setup_xinference_mock): - model = XinferenceTextEmbeddingModel() - - result = model.invoke( - model="bge-base-en", - credentials={ - "server_url": os.environ.get("XINFERENCE_SERVER_URL"), - "model_uid": os.environ.get("XINFERENCE_EMBEDDINGS_MODEL_UID"), - }, - texts=["hello", "world"], - user="abc-123", - ) - - assert isinstance(result, TextEmbeddingResult) - assert len(result.embeddings) == 2 - assert result.usage.total_tokens > 0 - - -def test_get_num_tokens(): - model = XinferenceTextEmbeddingModel() - - num_tokens = model.get_num_tokens( - model="bge-base-en", - credentials={ - "server_url": os.environ.get("XINFERENCE_SERVER_URL"), - "model_uid": os.environ.get("XINFERENCE_EMBEDDINGS_MODEL_UID"), - }, - texts=["hello", "world"], - ) - - assert num_tokens == 2 diff --git a/api/tests/integration_tests/model_runtime/xinference/test_llm.py b/api/tests/integration_tests/model_runtime/xinference/test_llm.py deleted file mode 100644 index fb5e03855d2120..00000000000000 --- a/api/tests/integration_tests/model_runtime/xinference/test_llm.py +++ /dev/null @@ -1,366 +0,0 @@ -import os -from collections.abc import Generator - -import pytest - -from core.model_runtime.entities.llm_entities import LLMResult, LLMResultChunk, LLMResultChunkDelta -from core.model_runtime.entities.message_entities import ( - AssistantPromptMessage, - PromptMessageTool, - SystemPromptMessage, - TextPromptMessageContent, - UserPromptMessage, -) -from core.model_runtime.entities.model_entities import AIModelEntity -from core.model_runtime.errors.validate import CredentialsValidateFailedError -from core.model_runtime.model_providers.xinference.llm.llm import XinferenceAILargeLanguageModel - -"""FOR MOCK FIXTURES, DO NOT REMOVE""" -from tests.integration_tests.model_runtime.__mock.openai import setup_openai_mock -from tests.integration_tests.model_runtime.__mock.xinference import setup_xinference_mock - - -@pytest.mark.parametrize(("setup_openai_mock", "setup_xinference_mock"), [("chat", "none")], indirect=True) -def test_validate_credentials_for_chat_model(setup_openai_mock, setup_xinference_mock): - model = XinferenceAILargeLanguageModel() - - with pytest.raises(CredentialsValidateFailedError): - model.validate_credentials( - model="ChatGLM3", - credentials={ - "server_url": os.environ.get("XINFERENCE_SERVER_URL"), - "model_uid": "www " + os.environ.get("XINFERENCE_CHAT_MODEL_UID"), - }, - ) - - with pytest.raises(CredentialsValidateFailedError): - model.validate_credentials(model="aaaaa", credentials={"server_url": "", "model_uid": ""}) - - model.validate_credentials( - model="ChatGLM3", - credentials={ - "server_url": os.environ.get("XINFERENCE_SERVER_URL"), - "model_uid": os.environ.get("XINFERENCE_CHAT_MODEL_UID"), - }, - ) - - -@pytest.mark.parametrize(("setup_openai_mock", "setup_xinference_mock"), [("chat", "none")], indirect=True) -def test_invoke_chat_model(setup_openai_mock, setup_xinference_mock): - model = XinferenceAILargeLanguageModel() - - response = model.invoke( - model="ChatGLM3", - credentials={ - "server_url": os.environ.get("XINFERENCE_SERVER_URL"), - "model_uid": os.environ.get("XINFERENCE_CHAT_MODEL_UID"), - }, - prompt_messages=[ - SystemPromptMessage( - content="You are a helpful AI assistant.", - ), - UserPromptMessage(content="Hello World!"), - ], - model_parameters={ - "temperature": 0.7, - "top_p": 1.0, - }, - stop=["you"], - user="abc-123", - stream=False, - ) - - assert isinstance(response, LLMResult) - assert len(response.message.content) > 0 - assert response.usage.total_tokens > 0 - - -@pytest.mark.parametrize(("setup_openai_mock", "setup_xinference_mock"), [("chat", "none")], indirect=True) -def test_invoke_stream_chat_model(setup_openai_mock, setup_xinference_mock): - model = XinferenceAILargeLanguageModel() - - response = model.invoke( - model="ChatGLM3", - credentials={ - "server_url": os.environ.get("XINFERENCE_SERVER_URL"), - "model_uid": os.environ.get("XINFERENCE_CHAT_MODEL_UID"), - }, - prompt_messages=[ - SystemPromptMessage( - content="You are a helpful AI assistant.", - ), - UserPromptMessage(content="Hello World!"), - ], - model_parameters={ - "temperature": 0.7, - "top_p": 1.0, - }, - stop=["you"], - stream=True, - user="abc-123", - ) - - assert isinstance(response, Generator) - for chunk in response: - assert isinstance(chunk, LLMResultChunk) - assert isinstance(chunk.delta, LLMResultChunkDelta) - assert isinstance(chunk.delta.message, AssistantPromptMessage) - assert len(chunk.delta.message.content) > 0 if chunk.delta.finish_reason is None else True - - -""" - Function calling of xinference does not support stream mode currently -""" -# def test_invoke_stream_chat_model_with_functions(): -# model = XinferenceAILargeLanguageModel() - -# response = model.invoke( -# model='ChatGLM3-6b', -# credentials={ -# 'server_url': os.environ.get('XINFERENCE_SERVER_URL'), -# 'model_type': 'text-generation', -# 'model_name': 'ChatGLM3', -# 'model_uid': os.environ.get('XINFERENCE_CHAT_MODEL_UID') -# }, -# prompt_messages=[ -# SystemPromptMessage( -# content='你是一个天气机器人,可以通过调用函数来获取天气信息', -# ), -# UserPromptMessage( -# content='波士顿天气如何?' -# ) -# ], -# model_parameters={ -# 'temperature': 0, -# 'top_p': 1.0, -# }, -# stop=['you'], -# user='abc-123', -# stream=True, -# tools=[ -# PromptMessageTool( -# name='get_current_weather', -# description='Get the current weather in a given location', -# parameters={ -# "type": "object", -# "properties": { -# "location": { -# "type": "string", -# "description": "The city and state e.g. San Francisco, CA" -# }, -# "unit": { -# "type": "string", -# "enum": ["celsius", "fahrenheit"] -# } -# }, -# "required": [ -# "location" -# ] -# } -# ) -# ] -# ) - -# assert isinstance(response, Generator) - -# call: LLMResultChunk = None -# chunks = [] - -# for chunk in response: -# chunks.append(chunk) -# assert isinstance(chunk, LLMResultChunk) -# assert isinstance(chunk.delta, LLMResultChunkDelta) -# assert isinstance(chunk.delta.message, AssistantPromptMessage) -# assert len(chunk.delta.message.content) > 0 if chunk.delta.finish_reason is None else True - -# if chunk.delta.message.tool_calls and len(chunk.delta.message.tool_calls) > 0: -# call = chunk -# break - -# assert call is not None -# assert call.delta.message.tool_calls[0].function.name == 'get_current_weather' - -# def test_invoke_chat_model_with_functions(): -# model = XinferenceAILargeLanguageModel() - -# response = model.invoke( -# model='ChatGLM3-6b', -# credentials={ -# 'server_url': os.environ.get('XINFERENCE_SERVER_URL'), -# 'model_type': 'text-generation', -# 'model_name': 'ChatGLM3', -# 'model_uid': os.environ.get('XINFERENCE_CHAT_MODEL_UID') -# }, -# prompt_messages=[ -# UserPromptMessage( -# content='What is the weather like in San Francisco?' -# ) -# ], -# model_parameters={ -# 'temperature': 0.7, -# 'top_p': 1.0, -# }, -# stop=['you'], -# user='abc-123', -# stream=False, -# tools=[ -# PromptMessageTool( -# name='get_current_weather', -# description='Get the current weather in a given location', -# parameters={ -# "type": "object", -# "properties": { -# "location": { -# "type": "string", -# "description": "The city and state e.g. San Francisco, CA" -# }, -# "unit": { -# "type": "string", -# "enum": [ -# "c", -# "f" -# ] -# } -# }, -# "required": [ -# "location" -# ] -# } -# ) -# ] -# ) - -# assert isinstance(response, LLMResult) -# assert len(response.message.content) > 0 -# assert response.usage.total_tokens > 0 -# assert response.message.tool_calls[0].function.name == 'get_current_weather' - - -@pytest.mark.parametrize(("setup_openai_mock", "setup_xinference_mock"), [("completion", "none")], indirect=True) -def test_validate_credentials_for_generation_model(setup_openai_mock, setup_xinference_mock): - model = XinferenceAILargeLanguageModel() - - with pytest.raises(CredentialsValidateFailedError): - model.validate_credentials( - model="alapaca", - credentials={ - "server_url": os.environ.get("XINFERENCE_SERVER_URL"), - "model_uid": "www " + os.environ.get("XINFERENCE_GENERATION_MODEL_UID"), - }, - ) - - with pytest.raises(CredentialsValidateFailedError): - model.validate_credentials(model="alapaca", credentials={"server_url": "", "model_uid": ""}) - - model.validate_credentials( - model="alapaca", - credentials={ - "server_url": os.environ.get("XINFERENCE_SERVER_URL"), - "model_uid": os.environ.get("XINFERENCE_GENERATION_MODEL_UID"), - }, - ) - - -@pytest.mark.parametrize(("setup_openai_mock", "setup_xinference_mock"), [("completion", "none")], indirect=True) -def test_invoke_generation_model(setup_openai_mock, setup_xinference_mock): - model = XinferenceAILargeLanguageModel() - - response = model.invoke( - model="alapaca", - credentials={ - "server_url": os.environ.get("XINFERENCE_SERVER_URL"), - "model_uid": os.environ.get("XINFERENCE_GENERATION_MODEL_UID"), - }, - prompt_messages=[UserPromptMessage(content="the United States is")], - model_parameters={ - "temperature": 0.7, - "top_p": 1.0, - }, - stop=["you"], - user="abc-123", - stream=False, - ) - - assert isinstance(response, LLMResult) - assert len(response.message.content) > 0 - assert response.usage.total_tokens > 0 - - -@pytest.mark.parametrize(("setup_openai_mock", "setup_xinference_mock"), [("completion", "none")], indirect=True) -def test_invoke_stream_generation_model(setup_openai_mock, setup_xinference_mock): - model = XinferenceAILargeLanguageModel() - - response = model.invoke( - model="alapaca", - credentials={ - "server_url": os.environ.get("XINFERENCE_SERVER_URL"), - "model_uid": os.environ.get("XINFERENCE_GENERATION_MODEL_UID"), - }, - prompt_messages=[UserPromptMessage(content="the United States is")], - model_parameters={ - "temperature": 0.7, - "top_p": 1.0, - }, - stop=["you"], - stream=True, - user="abc-123", - ) - - assert isinstance(response, Generator) - for chunk in response: - assert isinstance(chunk, LLMResultChunk) - assert isinstance(chunk.delta, LLMResultChunkDelta) - assert isinstance(chunk.delta.message, AssistantPromptMessage) - assert len(chunk.delta.message.content) > 0 if chunk.delta.finish_reason is None else True - - -def test_get_num_tokens(): - model = XinferenceAILargeLanguageModel() - - num_tokens = model.get_num_tokens( - model="ChatGLM3", - credentials={ - "server_url": os.environ.get("XINFERENCE_SERVER_URL"), - "model_uid": os.environ.get("XINFERENCE_GENERATION_MODEL_UID"), - }, - prompt_messages=[ - SystemPromptMessage( - content="You are a helpful AI assistant.", - ), - UserPromptMessage(content="Hello World!"), - ], - tools=[ - PromptMessageTool( - name="get_current_weather", - description="Get the current weather in a given location", - parameters={ - "type": "object", - "properties": { - "location": {"type": "string", "description": "The city and state e.g. San Francisco, CA"}, - "unit": {"type": "string", "enum": ["c", "f"]}, - }, - "required": ["location"], - }, - ) - ], - ) - - assert isinstance(num_tokens, int) - assert num_tokens == 77 - - num_tokens = model.get_num_tokens( - model="ChatGLM3", - credentials={ - "server_url": os.environ.get("XINFERENCE_SERVER_URL"), - "model_uid": os.environ.get("XINFERENCE_GENERATION_MODEL_UID"), - }, - prompt_messages=[ - SystemPromptMessage( - content="You are a helpful AI assistant.", - ), - UserPromptMessage(content="Hello World!"), - ], - ) - - assert isinstance(num_tokens, int) - assert num_tokens == 21 diff --git a/api/tests/integration_tests/model_runtime/xinference/test_rerank.py b/api/tests/integration_tests/model_runtime/xinference/test_rerank.py deleted file mode 100644 index 71ac4eef7c22be..00000000000000 --- a/api/tests/integration_tests/model_runtime/xinference/test_rerank.py +++ /dev/null @@ -1,52 +0,0 @@ -import os - -import pytest - -from core.model_runtime.entities.rerank_entities import RerankResult -from core.model_runtime.errors.validate import CredentialsValidateFailedError -from core.model_runtime.model_providers.xinference.rerank.rerank import XinferenceRerankModel -from tests.integration_tests.model_runtime.__mock.xinference import MOCK, setup_xinference_mock - - -@pytest.mark.parametrize("setup_xinference_mock", [["none"]], indirect=True) -def test_validate_credentials(setup_xinference_mock): - model = XinferenceRerankModel() - - with pytest.raises(CredentialsValidateFailedError): - model.validate_credentials( - model="bge-reranker-base", - credentials={"server_url": "awdawdaw", "model_uid": os.environ.get("XINFERENCE_RERANK_MODEL_UID")}, - ) - - model.validate_credentials( - model="bge-reranker-base", - credentials={ - "server_url": os.environ.get("XINFERENCE_SERVER_URL"), - "model_uid": os.environ.get("XINFERENCE_RERANK_MODEL_UID"), - }, - ) - - -@pytest.mark.parametrize("setup_xinference_mock", [["none"]], indirect=True) -def test_invoke_model(setup_xinference_mock): - model = XinferenceRerankModel() - - result = model.invoke( - model="bge-reranker-base", - credentials={ - "server_url": os.environ.get("XINFERENCE_SERVER_URL"), - "model_uid": os.environ.get("XINFERENCE_RERANK_MODEL_UID"), - }, - query="Who is Kasumi?", - docs=[ - 'Kasumi is a girl\'s name of Japanese origin meaning "mist".', - "Her music is a kawaii bass, a mix of future bass, pop, and kawaii music ", - "and she leads a team named PopiParty.", - ], - score_threshold=0.8, - ) - - assert isinstance(result, RerankResult) - assert len(result.docs) == 1 - assert result.docs[0].index == 0 - assert result.docs[0].score >= 0.8 diff --git a/api/tests/integration_tests/model_runtime/zhinao/__init__.py b/api/tests/integration_tests/model_runtime/zhinao/__init__.py deleted file mode 100644 index e69de29bb2d1d6..00000000000000 diff --git a/api/tests/integration_tests/model_runtime/zhinao/test_llm.py b/api/tests/integration_tests/model_runtime/zhinao/test_llm.py deleted file mode 100644 index 4ca1b864764818..00000000000000 --- a/api/tests/integration_tests/model_runtime/zhinao/test_llm.py +++ /dev/null @@ -1,73 +0,0 @@ -import os -from collections.abc import Generator - -import pytest - -from core.model_runtime.entities.llm_entities import LLMResult, LLMResultChunk, LLMResultChunkDelta -from core.model_runtime.entities.message_entities import AssistantPromptMessage, SystemPromptMessage, UserPromptMessage -from core.model_runtime.errors.validate import CredentialsValidateFailedError -from core.model_runtime.model_providers.zhinao.llm.llm import ZhinaoLargeLanguageModel - - -def test_validate_credentials(): - model = ZhinaoLargeLanguageModel() - - with pytest.raises(CredentialsValidateFailedError): - model.validate_credentials(model="360gpt2-pro", credentials={"api_key": "invalid_key"}) - - model.validate_credentials(model="360gpt2-pro", credentials={"api_key": os.environ.get("ZHINAO_API_KEY")}) - - -def test_invoke_model(): - model = ZhinaoLargeLanguageModel() - - response = model.invoke( - model="360gpt2-pro", - credentials={"api_key": os.environ.get("ZHINAO_API_KEY")}, - prompt_messages=[UserPromptMessage(content="Who are you?")], - model_parameters={"temperature": 0.5, "max_tokens": 10}, - stop=["How"], - stream=False, - user="abc-123", - ) - - assert isinstance(response, LLMResult) - assert len(response.message.content) > 0 - - -def test_invoke_stream_model(): - model = ZhinaoLargeLanguageModel() - - response = model.invoke( - model="360gpt2-pro", - credentials={"api_key": os.environ.get("ZHINAO_API_KEY")}, - prompt_messages=[UserPromptMessage(content="Hello World!")], - model_parameters={"temperature": 0.5, "max_tokens": 100, "seed": 1234}, - stream=True, - user="abc-123", - ) - - assert isinstance(response, Generator) - - for chunk in response: - assert isinstance(chunk, LLMResultChunk) - assert isinstance(chunk.delta, LLMResultChunkDelta) - assert isinstance(chunk.delta.message, AssistantPromptMessage) - assert len(chunk.delta.message.content) > 0 if chunk.delta.finish_reason is None else True - - -def test_get_num_tokens(): - model = ZhinaoLargeLanguageModel() - - num_tokens = model.get_num_tokens( - model="360gpt2-pro", - credentials={"api_key": os.environ.get("ZHINAO_API_KEY")}, - prompt_messages=[ - SystemPromptMessage( - content="You are a helpful AI assistant.", - ), - UserPromptMessage(content="Hello World!"), - ], - ) - - assert num_tokens == 21 diff --git a/api/tests/integration_tests/model_runtime/zhinao/test_provider.py b/api/tests/integration_tests/model_runtime/zhinao/test_provider.py deleted file mode 100644 index c22f797919597c..00000000000000 --- a/api/tests/integration_tests/model_runtime/zhinao/test_provider.py +++ /dev/null @@ -1,15 +0,0 @@ -import os - -import pytest - -from core.model_runtime.errors.validate import CredentialsValidateFailedError -from core.model_runtime.model_providers.zhinao.zhinao import ZhinaoProvider - - -def test_validate_provider_credentials(): - provider = ZhinaoProvider() - - with pytest.raises(CredentialsValidateFailedError): - provider.validate_provider_credentials(credentials={}) - - provider.validate_provider_credentials(credentials={"api_key": os.environ.get("ZHINAO_API_KEY")}) diff --git a/api/tests/integration_tests/model_runtime/zhipuai/__init__.py b/api/tests/integration_tests/model_runtime/zhipuai/__init__.py deleted file mode 100644 index e69de29bb2d1d6..00000000000000 diff --git a/api/tests/integration_tests/model_runtime/zhipuai/test_llm.py b/api/tests/integration_tests/model_runtime/zhipuai/test_llm.py deleted file mode 100644 index 20380513eaa789..00000000000000 --- a/api/tests/integration_tests/model_runtime/zhipuai/test_llm.py +++ /dev/null @@ -1,109 +0,0 @@ -import os -from collections.abc import Generator - -import pytest - -from core.model_runtime.entities.llm_entities import LLMResult, LLMResultChunk, LLMResultChunkDelta -from core.model_runtime.entities.message_entities import ( - AssistantPromptMessage, - PromptMessageTool, - SystemPromptMessage, - UserPromptMessage, -) -from core.model_runtime.errors.validate import CredentialsValidateFailedError -from core.model_runtime.model_providers.zhipuai.llm.llm import ZhipuAILargeLanguageModel - - -def test_validate_credentials(): - model = ZhipuAILargeLanguageModel() - - with pytest.raises(CredentialsValidateFailedError): - model.validate_credentials(model="chatglm_turbo", credentials={"api_key": "invalid_key"}) - - model.validate_credentials(model="chatglm_turbo", credentials={"api_key": os.environ.get("ZHIPUAI_API_KEY")}) - - -def test_invoke_model(): - model = ZhipuAILargeLanguageModel() - - response = model.invoke( - model="chatglm_turbo", - credentials={"api_key": os.environ.get("ZHIPUAI_API_KEY")}, - prompt_messages=[UserPromptMessage(content="Who are you?")], - model_parameters={"temperature": 0.9, "top_p": 0.7}, - stop=["How"], - stream=False, - user="abc-123", - ) - - assert isinstance(response, LLMResult) - assert len(response.message.content) > 0 - - -def test_invoke_stream_model(): - model = ZhipuAILargeLanguageModel() - - response = model.invoke( - model="chatglm_turbo", - credentials={"api_key": os.environ.get("ZHIPUAI_API_KEY")}, - prompt_messages=[UserPromptMessage(content="Hello World!")], - model_parameters={"temperature": 0.9, "top_p": 0.7}, - stream=True, - user="abc-123", - ) - - assert isinstance(response, Generator) - - for chunk in response: - assert isinstance(chunk, LLMResultChunk) - assert isinstance(chunk.delta, LLMResultChunkDelta) - assert isinstance(chunk.delta.message, AssistantPromptMessage) - assert len(chunk.delta.message.content) > 0 if chunk.delta.finish_reason is None else True - - -def test_get_num_tokens(): - model = ZhipuAILargeLanguageModel() - - num_tokens = model.get_num_tokens( - model="chatglm_turbo", - credentials={"api_key": os.environ.get("ZHIPUAI_API_KEY")}, - prompt_messages=[ - SystemPromptMessage( - content="You are a helpful AI assistant.", - ), - UserPromptMessage(content="Hello World!"), - ], - ) - - assert num_tokens == 14 - - -def test_get_tools_num_tokens(): - model = ZhipuAILargeLanguageModel() - - num_tokens = model.get_num_tokens( - model="tools", - credentials={"api_key": os.environ.get("ZHIPUAI_API_KEY")}, - tools=[ - PromptMessageTool( - name="get_current_weather", - description="Get the current weather in a given location", - parameters={ - "type": "object", - "properties": { - "location": {"type": "string", "description": "The city and state e.g. San Francisco, CA"}, - "unit": {"type": "string", "enum": ["c", "f"]}, - }, - "required": ["location"], - }, - ) - ], - prompt_messages=[ - SystemPromptMessage( - content="You are a helpful AI assistant.", - ), - UserPromptMessage(content="Hello World!"), - ], - ) - - assert num_tokens == 88 diff --git a/api/tests/integration_tests/model_runtime/zhipuai/test_provider.py b/api/tests/integration_tests/model_runtime/zhipuai/test_provider.py deleted file mode 100644 index cb5bc0b20aafc1..00000000000000 --- a/api/tests/integration_tests/model_runtime/zhipuai/test_provider.py +++ /dev/null @@ -1,15 +0,0 @@ -import os - -import pytest - -from core.model_runtime.errors.validate import CredentialsValidateFailedError -from core.model_runtime.model_providers.zhipuai.zhipuai import ZhipuaiProvider - - -def test_validate_provider_credentials(): - provider = ZhipuaiProvider() - - with pytest.raises(CredentialsValidateFailedError): - provider.validate_provider_credentials(credentials={}) - - provider.validate_provider_credentials(credentials={"api_key": os.environ.get("ZHIPUAI_API_KEY")}) diff --git a/api/tests/integration_tests/model_runtime/zhipuai/test_text_embedding.py b/api/tests/integration_tests/model_runtime/zhipuai/test_text_embedding.py deleted file mode 100644 index 9c97c91ecbdd94..00000000000000 --- a/api/tests/integration_tests/model_runtime/zhipuai/test_text_embedding.py +++ /dev/null @@ -1,41 +0,0 @@ -import os - -import pytest - -from core.model_runtime.entities.text_embedding_entities import TextEmbeddingResult -from core.model_runtime.errors.validate import CredentialsValidateFailedError -from core.model_runtime.model_providers.zhipuai.text_embedding.text_embedding import ZhipuAITextEmbeddingModel - - -def test_validate_credentials(): - model = ZhipuAITextEmbeddingModel() - - with pytest.raises(CredentialsValidateFailedError): - model.validate_credentials(model="text_embedding", credentials={"api_key": "invalid_key"}) - - model.validate_credentials(model="text_embedding", credentials={"api_key": os.environ.get("ZHIPUAI_API_KEY")}) - - -def test_invoke_model(): - model = ZhipuAITextEmbeddingModel() - - result = model.invoke( - model="text_embedding", - credentials={"api_key": os.environ.get("ZHIPUAI_API_KEY")}, - texts=["hello", "world"], - user="abc-123", - ) - - assert isinstance(result, TextEmbeddingResult) - assert len(result.embeddings) == 2 - assert result.usage.total_tokens > 0 - - -def test_get_num_tokens(): - model = ZhipuAITextEmbeddingModel() - - num_tokens = model.get_num_tokens( - model="text_embedding", credentials={"api_key": os.environ.get("ZHIPUAI_API_KEY")}, texts=["hello", "world"] - ) - - assert num_tokens == 2 diff --git a/api/tests/unit_tests/core/model_runtime/model_providers/__init__.py b/api/tests/unit_tests/core/model_runtime/model_providers/__init__.py deleted file mode 100644 index e69de29bb2d1d6..00000000000000 diff --git a/api/tests/unit_tests/core/model_runtime/model_providers/wenxin/__init__.py b/api/tests/unit_tests/core/model_runtime/model_providers/wenxin/__init__.py deleted file mode 100644 index e69de29bb2d1d6..00000000000000 diff --git a/api/tests/unit_tests/core/model_runtime/model_providers/wenxin/test_text_embedding.py b/api/tests/unit_tests/core/model_runtime/model_providers/wenxin/test_text_embedding.py deleted file mode 100644 index 5b159b49b61f37..00000000000000 --- a/api/tests/unit_tests/core/model_runtime/model_providers/wenxin/test_text_embedding.py +++ /dev/null @@ -1,75 +0,0 @@ -import numpy as np - -from core.model_runtime.entities.text_embedding_entities import TextEmbeddingResult -from core.model_runtime.model_providers.__base.tokenizers.gpt2_tokenzier import GPT2Tokenizer -from core.model_runtime.model_providers.wenxin.text_embedding.text_embedding import ( - TextEmbedding, - WenxinTextEmbeddingModel, -) - - -def test_max_chunks(): - class _MockTextEmbedding(TextEmbedding): - def embed_documents(self, model: str, texts: list[str], user: str) -> (list[list[float]], int, int): - embeddings = [[1.0, 2.0, 3.0] for i in range(len(texts))] - tokens = 0 - for text in texts: - tokens += len(text) - - return embeddings, tokens, tokens - - def _create_text_embedding(api_key: str, secret_key: str) -> TextEmbedding: - return _MockTextEmbedding() - - model = "embedding-v1" - credentials = { - "api_key": "xxxx", - "secret_key": "yyyy", - } - embedding_model = WenxinTextEmbeddingModel() - context_size = embedding_model._get_context_size(model, credentials) - max_chunks = embedding_model._get_max_chunks(model, credentials) - embedding_model._create_text_embedding = _create_text_embedding - - texts = ["0123456789" for i in range(0, max_chunks * 2)] - result: TextEmbeddingResult = embedding_model.invoke(model, credentials, texts, "test") - assert len(result.embeddings) == max_chunks * 2 - - -def test_context_size(): - def get_num_tokens_by_gpt2(text: str) -> int: - return GPT2Tokenizer.get_num_tokens(text) - - def mock_text(token_size: int) -> str: - _text = "".join(["0" for i in range(token_size)]) - num_tokens = get_num_tokens_by_gpt2(_text) - ratio = int(np.floor(len(_text) / num_tokens)) - m_text = "".join([_text for i in range(ratio)]) - return m_text - - model = "embedding-v1" - credentials = { - "api_key": "xxxx", - "secret_key": "yyyy", - } - embedding_model = WenxinTextEmbeddingModel() - context_size = embedding_model._get_context_size(model, credentials) - - class _MockTextEmbedding(TextEmbedding): - def embed_documents(self, model: str, texts: list[str], user: str) -> (list[list[float]], int, int): - embeddings = [[1.0, 2.0, 3.0] for i in range(len(texts))] - tokens = 0 - for text in texts: - tokens += get_num_tokens_by_gpt2(text) - return embeddings, tokens, tokens - - def _create_text_embedding(api_key: str, secret_key: str) -> TextEmbedding: - return _MockTextEmbedding() - - embedding_model._create_text_embedding = _create_text_embedding - text = mock_text(context_size * 2) - assert get_num_tokens_by_gpt2(text) == context_size * 2 - - texts = [text] - result: TextEmbeddingResult = embedding_model.invoke(model, credentials, texts, "test") - assert result.usage.tokens == context_size From 8563155d1bd5db5bbc95c2785294c8b710908d0a Mon Sep 17 00:00:00 2001 From: takatost Date: Sun, 29 Sep 2024 18:18:01 +0800 Subject: [PATCH 069/325] feat: remove unused codes --- api/poetry.lock | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/api/poetry.lock b/api/poetry.lock index 5c8485990cc67a..92eca926d0d088 100644 --- a/api/poetry.lock +++ b/api/poetry.lock @@ -8916,4 +8916,4 @@ cffi = ["cffi (>=1.11)"] [metadata] lock-version = "2.0" python-versions = ">=3.10,<3.13" -content-hash = "46120eb2caaf416a798cfe425674e3dcf83a9c7f1b1273e7703ca32ebea21ffd" +content-hash = "13b250c6d52b6ce0f72cc019106b62687070d1eeb93586477399ecdc6d314d4b" From d4e007f9db82fb3d12c274be4da71a1f4f106b3c Mon Sep 17 00:00:00 2001 From: Yeuoly Date: Sun, 29 Sep 2024 18:18:45 +0800 Subject: [PATCH 070/325] feat: support get tool runtime parameters --- api/core/plugin/manager/tool.py | 42 +++++++++++++++++++++++- api/core/tools/entities/tool_entities.py | 2 ++ api/core/tools/plugin_tool/tool.py | 18 +++++++++- api/core/tools/tool_manager.py | 6 +++- 4 files changed, 65 insertions(+), 3 deletions(-) diff --git a/api/core/plugin/manager/tool.py b/api/core/plugin/manager/tool.py index 50970243a11955..874b3ff596bc47 100644 --- a/api/core/plugin/manager/tool.py +++ b/api/core/plugin/manager/tool.py @@ -1,9 +1,11 @@ from collections.abc import Generator from typing import Any +from pydantic import BaseModel + from core.plugin.entities.plugin_daemon import PluginBasicBooleanResponse, PluginToolProviderEntity from core.plugin.manager.base import BasePluginManager -from core.tools.entities.tool_entities import ToolInvokeMessage +from core.tools.entities.tool_entities import ToolInvokeMessage, ToolParameter class PluginToolManager(BasePluginManager): @@ -144,3 +146,41 @@ def validate_provider_credentials( return resp.result return False + + def get_runtime_parameters( + self, + tenant_id: str, + user_id: str, + provider: str, + credentials: dict[str, Any], + tool: str, + ) -> list[ToolParameter]: + """ + get the runtime parameters of the tool + """ + plugin_id, provider_name = self._split_provider(provider) + + class RuntimeParametersResponse(BaseModel): + parameters: list[ToolParameter] + + response = self._request_with_plugin_daemon_response_stream( + "GET", + f"plugin/{tenant_id}/dispatch/tool/get_runtime_parameters", + RuntimeParametersResponse, + params={ + "user_id": user_id, + "data": { + "provider": provider_name, + "tool": tool, + "credentials": credentials, + }, + }, + headers={ + "X-Plugin-ID": plugin_id, + }, + ) + + for resp in response: + return resp.parameters + + return [] diff --git a/api/core/tools/entities/tool_entities.py b/api/core/tools/entities/tool_entities.py index 0808ff721d16d2..d037b00d3379ac 100644 --- a/api/core/tools/entities/tool_entities.py +++ b/api/core/tools/entities/tool_entities.py @@ -298,6 +298,8 @@ class ToolEntity(BaseModel): identity: ToolIdentity parameters: list[ToolParameter] = Field(default_factory=list) description: Optional[ToolDescription] = None + # TODO: output schema + has_runtime_parameters: bool = Field(default=False, description="Whether the tool has runtime parameters") # pydantic configs model_config = ConfigDict(protected_namespaces=()) diff --git a/api/core/tools/plugin_tool/tool.py b/api/core/tools/plugin_tool/tool.py index 7c6c4de3e00828..afbd2e40bfd41e 100644 --- a/api/core/tools/plugin_tool/tool.py +++ b/api/core/tools/plugin_tool/tool.py @@ -4,7 +4,7 @@ from core.plugin.manager.tool import PluginToolManager from core.tools.__base.tool import Tool from core.tools.__base.tool_runtime import ToolRuntime -from core.tools.entities.tool_entities import ToolEntity, ToolInvokeMessage, ToolProviderType +from core.tools.entities.tool_entities import ToolEntity, ToolInvokeMessage, ToolParameter, ToolProviderType class PluginTool(Tool): @@ -35,3 +35,19 @@ def fork_tool_runtime(self, runtime: ToolRuntime) -> "PluginTool": runtime=runtime, tenant_id=self.tenant_id, ) + + def get_runtime_parameters(self) -> list[ToolParameter]: + """ + get the runtime parameters + """ + if not self.entity.has_runtime_parameters: + return self.entity.parameters + + manager = PluginToolManager() + return manager.get_runtime_parameters( + tenant_id=self.tenant_id, + user_id="", + provider=self.entity.identity.provider, + tool=self.entity.identity.name, + credentials=self.runtime.credentials, + ) diff --git a/api/core/tools/tool_manager.py b/api/core/tools/tool_manager.py index 2f747a317e0e43..d780d2866fcaf8 100644 --- a/api/core/tools/tool_manager.py +++ b/api/core/tools/tool_manager.py @@ -255,7 +255,11 @@ def _init_runtime_parameter(cls, parameter_rule: ToolParameter, parameters: dict @classmethod def get_agent_tool_runtime( - cls, tenant_id: str, app_id: str, agent_tool: AgentToolEntity, invoke_from: InvokeFrom = InvokeFrom.DEBUGGER + cls, + tenant_id: str, + app_id: str, + agent_tool: AgentToolEntity, + invoke_from: InvokeFrom = InvokeFrom.DEBUGGER, ) -> Tool: """ get the agent tool runtime From bcdb407be8f85a03aee8a5eca9e0c79c618a42d3 Mon Sep 17 00:00:00 2001 From: takatost Date: Sun, 29 Sep 2024 18:24:20 +0800 Subject: [PATCH 071/325] feat: remove unused codes --- .../__base/tokenizers/gpt2/merges.txt | 50001 --------------- .../tokenizers/gpt2/special_tokens_map.json | 23 - .../tokenizers/gpt2/tokenizer_config.json | 33 - .../__base/tokenizers/gpt2/vocab.json | 50259 ---------------- api/poetry.lock | 1011 +- api/pyproject.toml | 8 - 6 files changed, 122 insertions(+), 101213 deletions(-) delete mode 100644 api/core/model_runtime/model_providers/__base/tokenizers/gpt2/merges.txt delete mode 100644 api/core/model_runtime/model_providers/__base/tokenizers/gpt2/special_tokens_map.json delete mode 100644 api/core/model_runtime/model_providers/__base/tokenizers/gpt2/tokenizer_config.json delete mode 100644 api/core/model_runtime/model_providers/__base/tokenizers/gpt2/vocab.json diff --git a/api/core/model_runtime/model_providers/__base/tokenizers/gpt2/merges.txt b/api/core/model_runtime/model_providers/__base/tokenizers/gpt2/merges.txt deleted file mode 100644 index 226b0752cac778..00000000000000 --- a/api/core/model_runtime/model_providers/__base/tokenizers/gpt2/merges.txt +++ /dev/null @@ -1,50001 +0,0 @@ -#version: 0.2 -Ġ t -Ġ a -h e -i n -r e -o n -Ġt he -e r -Ġ s -a t -Ġ w -Ġ o -e n -Ġ c -i t -i s -a n -o r -e s -Ġ b -e d -Ġ f -in g -Ġ p -o u -Ġa n -a l -a r -Ġt o -Ġ m -Ġo f -Ġ in -Ġ d -Ġ h -Ġan d -i c -a s -l e -Ġt h -i on -o m -l l -en t -Ġ n -Ġ l -s t -Ġ re -v e -Ġ e -r o -l y -Ġb e -Ġ g -Ġ T -c t -Ġ S -i d -o t -Ġ I -u t -e t -Ġ A -Ġ is -Ġ on -i m -a m -o w -a y -a d -s e -Ġth at -Ġ C -i g -Ġf or -a c -Ġ y -v er -u r -Ġ u -l d -Ġs t -Ġ M -' s -Ġ he -Ġ it -at ion -it h -i r -c e -Ġy ou -i l -Ġ B -Ġw h -o l -Ġ P -Ġw ith -Ġ 1 -t er -c h -Ġa s -Ġw e -Ġ ( -n d -i ll -Ġ D -i f -Ġ 2 -a g -er s -k e -Ġ " -Ġ H -e m -Ġc on -Ġ W -Ġ R -he r -Ġw as -Ġ r -o d -Ġ F -u l -at e -Ġa t -r i -p p -o re -ĠT he -Ġs e -u s -Ġp ro -Ġh a -u m -Ġa re -Ġd e -a in -an d -Ġo r -ig h -es t -is t -a b -r om -Ġ N -t h -Ġc om -Ġ G -u n -o p -0 0 -Ġ L -Ġn ot -es s -Ġe x -Ġ v -re s -Ġ E -e w -it y -an t -Ġb y -e l -o s -or t -o c -q u -Ġf rom -Ġha ve -Ġs u -i ve -ou ld -Ġs h -Ġth is -n t -r a -p e -igh t -ar t -m ent -Ġa l -u st -en d -- - -al l -Ġ O -ac k -Ġc h -Ġ le -i es -re d -ar d -â Ģ -ou t -Ġ J -Ġa b -e ar -i v -al ly -ou r -o st -g h -p t -Ġp l -as t -Ġc an -a k -om e -u d -T he -Ġh is -Ġd o -Ġg o -Ġh as -g e -' t -Ġ U -r ou -Ġs a -Ġ j -Ġb ut -Ġw or -Ġa ll -e ct -Ġ k -am e -Ġw ill -o k -Ġw he -Ġthe y -id e -0 1 -f f -ic h -p l -t her -Ġt r -. . -Ġin t -i e -u re -ag e -Ġn e -i al -a p -in e -ic e -Ġm e -Ġo ut -an s -on e -on g -ion s -Ġwh o -Ġ K -Ġu p -Ġthe ir -Ġa d -Ġ 3 -Ġu s -at ed -ou s -Ġm ore -u e -o g -ĠS t -in d -i ke -Ġs o -im e -p er -. " -b er -i z -a ct -Ġon e -Ġsa id -Ġ - -a re -Ġyou r -c c -ĠT h -Ġc l -e p -a ke -ab le -i p -Ġcon t -Ġwh ich -i a -Ġ im -Ġab out -Ġwe re -ver y -u b -Ġh ad -Ġ en -Ġcom p -, " -ĠI n -Ġu n -Ġa g -i re -ac e -a u -ar y -Ġw ould -as s -r y -Ġ âĢ -c l -o ok -e re -s o -Ġ V -ig n -i b -Ġof f -Ġt e -v en -Ġ Y -i le -o se -it e -or m -Ġ2 01 -Ġre s -Ġm an -Ġp er -Ġo ther -or d -ul t -Ġbe en -Ġl ike -as e -an ce -k s -ay s -ow n -en ce -Ġd is -ct ion -Ġan y -Ġa pp -Ġs p -in t -res s -ation s -a il -Ġ 4 -ic al -Ġthe m -Ġhe r -ou nt -ĠC h -Ġa r -Ġ if -Ġthe re -Ġp e -Ġy ear -a v -Ġm y -Ġs ome -Ġwhe n -ou gh -ac h -Ġth an -r u -on d -ic k -Ġo ver -ve l -Ġ qu -Ċ Ċ -Ġs c -re at -re e -ĠI t -ou nd -p ort -Ġal so -Ġp art -f ter -Ġk n -Ġbe c -Ġt ime -en s -Ġ 5 -op le -Ġwh at -Ġn o -d u -m er -an g -Ġn ew --- -- -Ġg et -or y -it ion -ing s -Ġj ust -Ġint o -Ġ 0 -ent s -o ve -t e -Ġpe ople -Ġp re -Ġit s -Ġre c -Ġt w -i an -ir st -ar k -or s -Ġwor k -ad e -o b -Ġs he -Ġo ur -w n -in k -l ic -Ġ1 9 -ĠH e -is h -nd er -au se -Ġh im -on s -Ġ [ -Ġ ro -f orm -i ld -at es -ver s -Ġon ly -o ll -Ġs pe -c k -e ll -am p -Ġa cc -Ġb l -i ous -ur n -f t -o od -Ġh ow -he d -Ġ ' -Ġa fter -a w -Ġat t -o v -n e -Ġpl ay -er v -ic t -Ġc ould -it t -Ġa m -Ġf irst -Ġ 6 -Ġa ct -Ġ $ -e c -h ing -u al -u ll -Ġcom m -o y -o ld -c es -at er -Ġf e -Ġbe t -w e -if f -Ġtw o -oc k -Ġb ack -) . -id ent -Ġu nder -rou gh -se l -x t -Ġm ay -rou nd -Ġp o -p h -is s -Ġd es -Ġm ost -Ġd id -Ġad d -j ect -Ġin c -f ore -Ġp ol -on t -Ġag ain -cl ud -ter n -Ġkn ow -Ġne ed -Ġcon s -Ġc o -Ġ . -Ġw ant -Ġse e -Ġ 7 -n ing -i ew -ĠTh is -c ed -Ġe ven -Ġin d -t y -ĠW e -at h -Ġthe se -Ġp r -Ġu se -Ġbec ause -Ġf l -n g -Ġn ow -ĠâĢ ĵ -c om -is e -Ġm ake -Ġthe n -ow er -Ġe very -ĠU n -Ġse c -os s -u ch -Ġe m -Ġ = -ĠR e -i ed -r it -Ġin v -le ct -Ġsu pp -at ing -Ġl ook -m an -pe ct -Ġ 8 -ro w -Ġb u -Ġwhe re -if ic -Ġyear s -i ly -Ġd iff -Ġsh ould -Ġre m -T h -I n -Ġe v -d ay -' re -ri b -Ġre l -s s -Ġde f -Ġr ight -Ġs y -) , -l es -00 0 -he n -Ġth rough -ĠT r -_ _ -Ġw ay -Ġd on -Ġ , -Ġ1 0 -as ed -Ġas s -ub lic -Ġre g -ĠA nd -i x -Ġ very -Ġin clud -ot her -Ġim p -ot h -Ġsu b -ĠâĢ Ķ -Ġbe ing -ar g -ĠW h -= = -ib le -Ġdo es -an ge -r am -Ġ 9 -er t -p s -it ed -ation al -Ġb r -Ġd own -Ġman y -ak ing -Ġc all -ur ing -it ies -Ġp h -ic s -al s -Ġde c -at ive -en er -Ġbe fore -il ity -Ġwe ll -Ġm uch -ers on -Ġth ose -Ġsu ch -Ġ ke -Ġ end -ĠB ut -as on -t ing -Ġl ong -e f -Ġth ink -y s -Ġbe l -Ġs m -it s -a x -Ġo wn -Ġpro v -Ġs et -if e -ment s -b le -w ard -Ġsh ow -Ġp res -m s -om et -Ġo b -Ġs ay -ĠS h -t s -f ul -Ġe ff -Ġg u -Ġin st -u nd -re n -c ess -Ġ ent -ĠY ou -Ġgo od -Ġst art -in ce -Ġm ade -t t -st em -ol og -u p -Ġ | -um p -Ġhe l -ver n -ul ar -u ally -Ġa c -Ġm on -Ġl ast -Ġ2 00 -1 0 -Ġst ud -u res -ĠA r -sel f -ar s -mer ic -u es -c y -Ġm in -oll ow -Ġc ol -i o -Ġm od -Ġc ount -ĠC om -he s -Ġf in -a ir -i er -âĢ Ķ -re ad -an k -at ch -e ver -Ġst r -Ġpo int -or k -ĠN ew -Ġs ur -o ol -al k -em ent -Ġus ed -ra ct -we en -Ġs ame -ou n -ĠA l -c i -Ġdiff ere -Ġwh ile ----- ---- -Ġg ame -ce pt -Ġs im -.. . -Ġin ter -e k -Ġre port -Ġpro du -Ġst ill -l ed -a h -Ġhe re -Ġwor ld -Ġth ough -Ġn um -ar ch -im es -al e -ĠS e -ĠI f -/ / -ĠL e -Ġre t -Ġre f -Ġtr ans -n er -ut ion -ter s -Ġt ake -ĠC l -Ġcon f -w ay -a ve -Ġgo ing -Ġs l -u g -ĠA meric -Ġspe c -Ġh and -Ġbet ween -ist s -ĠD e -o ot -I t -Ġe ar -Ġagain st -Ġh igh -g an -a z -at her -Ġex p -Ġo p -Ġin s -Ġg r -Ġhel p -Ġre qu -et s -in s -ĠP ro -is m -Ġf ound -l and -at a -us s -am es -Ġp erson -Ġg reat -p r -Ġs ign -ĠA n -' ve -Ġs omet -Ġs er -h ip -Ġr un -Ġ : -Ġt er -ire ct -Ġf ollow -Ġd et -ic es -Ġf ind -1 2 -Ġm em -Ġc r -e red -e x -Ġex t -ut h -en se -c o -Ġte am -v ing -ou se -as h -at t -v ed -Ġsy stem -ĠA s -d er -iv es -m in -Ġle ad -ĠB l -c ent -Ġa round -Ġgo vern -Ġc ur -vel op -an y -Ġc our -al th -ag es -iz e -Ġc ar -od e -Ġl aw -Ġre ad -' m -c on -Ġre al -Ġsupp ort -Ġ1 2 -.. .. -Ġre ally -n ess -Ġf act -Ġd ay -Ġb oth -y ing -Ġs erv -ĠF or -Ġth ree -Ġw om -Ġm ed -od y -ĠThe y -5 0 -Ġex per -t on -Ġe ach -ak es -Ġc he -Ġc re -in es -Ġre p -1 9 -g g -ill ion -Ġg rou -ut e -i k -W e -g et -E R -Ġm et -Ġs ays -o x -Ġd uring -er n -iz ed -a red -Ġf am -ic ally -Ġha pp -ĠI s -Ġch ar -m ed -v ent -Ġg ener -i ent -p le -i et -re nt -1 1 -v es -pt ion -Ġ2 0 -form ation -Ġc or -Ġoff ic -ie ld -Ġto o -is ion -Ġin f -Ġ Z -t he -o ad -Ġp ublic -Ġpro g -r ic -* * -Ġw ar -Ġp ower -v iew -Ġf ew -Ġl oc -Ġdiffere nt -Ġst ate -Ġhe ad -' ll -Ġp oss -Ġst at -re t -ant s -Ġv al -Ġis s -Ġc le -i vers -an c -Ġex pl -Ġan other -Ġ Q -Ġa v -th ing -n ce -W h -Ġch ild -Ġs ince -i red -l ess -Ġl ife -Ġde velop -itt le -Ġde p -Ġp ass -ã ĥ -Ġt urn -or n -Th is -b ers -ro ss -ĠA d -Ġf r -Ġres p -Ġsec ond -o h -Ġ / -Ġdis c -Ġ & -Ġsomet hing -Ġcomp le -Ġ ed -Ġf il -Ġmon th -a j -u c -Ġgovern ment -Ġwith out -Ġle g -Ġd ist -Ġp ut -Ġqu est -an n -Ġpro t -2 0 -Ġne ver -i ence -Ġle vel -Ġar t -Ġth ings -Ġm ight -Ġeff ect -Ġcont ro -Ġc ent -Ġ1 8 -Ġall ow -Ġbel ie -ch ool -ot t -Ġinc re -Ġfe el -Ġres ult -Ġl ot -Ġf un -ot e -Ġt y -ere st -Ġcont in -Ġus ing -Ġb ig -2 01 -Ġas k -Ġb est -Ġ ) -I N -Ġo pp -3 0 -Ġnum ber -in ess -S t -le ase -Ġc a -Ġm ust -Ġd irect -Ġg l -Ġ < -Ġop en -Ġp ost -Ġcom e -Ġse em -ord ing -Ġwe ek -ate ly -it al -Ġe l -ri end -Ġf ar -Ġt ra -in al -Ġp ri -ĠU S -Ġpl ace -Ġfor m -Ġto ld -" : -ain s -at ure -ĠTr ump -Ġst and -Ġ # -id er -ĠF r -Ġne xt -Ġs oc -Ġp ur -Ġle t -Ġl ittle -Ġh um -Ġ i -r on -1 5 -Ġ1 5 -Ġcomm un -Ġm ark -ĠThe re -Ġw r -ĠTh at -Ġin formation -w ays -Ġb us -a pp -Ġinv est -m e -Ġh ard -ain ed -e ad -Ġim port -Ġapp ro -Ġt est -Ġt ri -Ġre st -os ed -Ġf ull -Ġc are -ĠS p -Ġc ase -O N -Ġs k -Ġl ess -Ġ + -Ġpart ic -ĠP l -ab ly -u ck -is hed -ch n -b e -Ġl ist -at or -Ġto p -Ġad v -ĠB e -ru ct -Ġd em -r ation -l ing -g y -re en -g er -Ġh ome -Ġle ft -Ġbet ter -Ġd ata -Ġ1 1 -Ġatt ack -Ġpro ble -l ine -ard s -Ġbe h -r al -ĠH ow -ĠS he -ar ge -Ġ -- -: // -Ġb ro -ĠP h -at s -Ġbu ild -w w -id ed -a im -as es -en cy -Ġm ain -in ed -Ġinclud ing -Ġ { -Ġg ot -Ġint erest -Ġke ep -Ġ X -Ġe as -ain ing -Ġcl ass -âĢ ¦ -ĠN o -Ġv ar -Ġsm all -amp le -A T -Ġ ide -ĠS o -Ġre ce -Ġpol it -Ġm ov -Ġpl an -Ġper cent -iv ing -Ġc amp -Ġp ay -1 4 -s c -is ed -Ġu nt -one y -pl oy -== == -Ġdid n -ĠI nd -el s -ert ain -Ġp os -__ __ -i ver -Ġpro cess -Ġprog ram -if ied -ĠR ep -1 6 -u ro -olog y -at ter -in a -Ġn ame -ĠA ll -Ġf our -Ġret urn -v ious -b s -Ġcall ed -Ġm ove -ĠS c -ir d -Ġgrou p -Ġb re -Ġm en -Ġc ap -t en -e e -Ġd ri -le g -he re -uth or -Ġp at -Ġcur rent -id es -Ġp op -t o -ent ion -Ġal ways -Ġm il -Ġwom en -Ġ1 6 -Ġo ld -iv en -ra ph -ĠO r -r or -ent ly -Ġn ear -ĠE x -re am -s h -Ġ1 4 -Ġf ree -iss ion -st and -ĠC on -al ity -us ed -1 3 -Ġdes ign -Ġch ange -Ġch ang -Ġb o -Ġv is -em ber -Ġb ook -read y -Ġk ill -2 5 -pp ed -Ġa way -Ġab le -Ġcount ry -Ġcon st -ar n -Ġor der -A R -i or -i um -or th -1 8 -ail able -Ġs w -Ġm illion -Ġ1 3 -at ic -t ed -ĠG o -Ġo per -en g -Ġth ing -aj or -con om -ĠCom m -Ġwh y -u red -ur al -Ġs chool -b y -ĠM ar -Ġa ff -Ġd ays -Ġan n -us h -an e -I f -e g -Ġpro f -Ġhe alth -ou th -B ut -ion al -. , -Ġs ol -Ġal ready -Ġ3 0 -Ġchar act -H e -Ġf riend -E S -i ans -ic le -' d -ĠO n -Ġle ast -Ġp rom -Ġd r -Ġh ist -it her -Ġ est -i qu -1 7 -s on -Ġte ll -Ġt alk -oh n -o int -le ction -A N -Ġunt il -au gh -Ġl ater -Ġ ve -Ġv iew -end ing -iv ed -Ġwor d -w are -Ġc ost -Ġen ough -Ġg ive -ĠUn ited -Ġte chn -are nt -O R -Ġp ar -ĠD r -Ġ201 6 -r ist -er ing -Ġ  -Ġl arge -s ide -ac y -cc ess -Ġw in -Ġimport ant -Ġ19 9 -Ġdoes n -Ġ1 7 -Ġbus iness -Ġcle ar -Ġre se -" , -ur y -Ġe qu -as ter -al f -ĠAmeric an -n ect -Ġex pect -ivers ity -Ġo cc -ĠF l -Ġk ind -Ġme an -Ġp ast -Ġde v -Ġb as -le t -ra ft -Ġor gan -Ġde l -Ġper form -Ġst ory -Ġse ason -ĠC ol -Ġcl aim -Ġc ame -Ġwith in -Ġl ine -Ġpro ject -ĠA t -Ġcontro l -end ed -ĠS y -Ġa ir -iz ation -Ġ * -le y -Ġm oney -id d -Y ou -f or -Ġfam ily -Ġm aking -Ġb it -Ġpol ice -Ġhapp en -Ġ vers -on y -u ff -ĠW hen -Ġs it -ide o -l f -is on -Ġsu re -g in -Ġapp ear -Ġl ight -Ġ es -o f -Ġw ater -Ġt imes -n ot -Ġg row -Ġcomp any -ĠT e -ow s -Ġm ar -our ce -i ol -ar m -b r -Ġex ample -Ġcon c -Ġf ore -ĠT o -p ro -E N -ri es -Ġ2 5 -ĠC an -ne y -Ġact ually -Ġe ver -ur ity -ak en -ap s -Ġt ax -Ġm ajor -am a -Ġof ten -er al -Ġhum an -Ġj ob -is ter -Ġav ailable -oc r -en n -a id -iv id -Ġrec ord -? " -Ġs ing -ĠA m -id ence -Ġnew s -st er -Ġe conom -Ġfollow ing -ĠB r -is ing -Ġh our -m ost -um ent -Ġse x -Ġdes c -Ġbec ome -ĠE d -Ġto ok -Ġha ving -Ġprodu ct -a ult -A s -ar ing -Ġme ans -Ġh op -un e -Ġch o -Ġc ertain -Ġn on -Ġde al -2 4 -le ment -oc i -en e -Ġs ide -ĠP r -ĠM ay -Ġre ason -u ed -c hed -ul ation -Ġe lect -Ġoffic ial -Ġposs ible -Ġh old -and s -ot s -Ġc ity -or ies -Ġse ver -Ġchild ren -Ġon ce -Ġact iv -l er -Ġn ight -it ions -ĠJ ohn -a pe -pl ay -Ġd one -Ġl im -Ġwork ing -ĠP res -or ld -e b -ĠC o -Ġb ody -ail s -ut es -ĠM r -Ġwhe ther -Ġa uthor -ro p -Ġpro per -Ġse en -) ; -Ġf ac -ĠS u -Ġcon d -it ing -Ġcour se -Ġ } --------- -------- -a ign -Ġev ent -Ġen g -Ġp ot -Ġin tern -i am -Ġsh ort -em pt -ã Ĥ -ĠG od -il ar -8 0 -Ġor ig -I S -our n -ab ility -it ive -Ġd am -Ġ1 00 -Ġp ress -Ġdo ing -Ġprot ect -r ing -Ġthough t -Ġquest ion -re w -ĠW ar -Ġsever al -ĠSt ate -Ġg iven -Ġf und -ĠT w -Ġw ent -an ces -w ork -p or -m y -4 0 -Ġar g -art ment -ust om -Ġpol ic -Ġme et -Ġc reat -2 2 -ĠSt ates -Ġg ames -ra w -ut ure -Ġunder stand -ur s -ĠO b -l ish -s y -Ġm akes -Ġw on -ag on -Ġh tt -Ġl ove -ent ial -Ġcomple te -p ar -ĠI m -A L -Ġacc ount - ł -ore d -ver t -Ġ ident -Ġ201 5 -Ġother s -ĠM in -i ber -ver age -The re -ition al -d d -Ġpro b -Ġyou ng -Ġal ong -Ġacc ording -Ġy et -Ġmem bers -ĠWh at -o id -ĠM an -A nd -Ġam ong -a i -Ġem ploy -ĠR es -Ġ > -Ġinv ol -Ġl ow -a f -ĠC ar -Ġh ig -ĠO ne -ĠS ec -in ation -Ġlike ly -Ġan t -ag ed -ĠR uss -Ġb en -Ġre le -F or -b ack -ĠN ot -Ġpres ident -b all -Ġacc ess -ivid ual -ĠD em -ĠE uro -6 0 -Ġkn own -ir l -ĠG r -Ġear ly -u se -iet y -âĢ ĵ -Ġf ight -Ġs ent -Ġto day -Ġmark et -" . -Ġb ased -Ġstr ong -ur ther -Ġde b -m ber -Ġproble m -Ġde ath -Ġsoc ial -im ate -A S -ort un -Ġcamp aign -er y -C h -Ġe y -i ally -Ġm us -w h -p os -Ġ er -Ġsa f -Ġmonth s -ir on -Ġv iol -Ġf ive -Ġst re -Ġplay ers -in c -al d -y ear -a un -Ġsu ccess -Ġpres ent -ere nce -Ġ201 4 -Ġsu gg -Ġpartic ular -Ġtr y -Ġsugg est -ĠCh rist -on es -Ġpri v -2 3 -Ġc rit -Ġl and -Ġloc al -if y -2 9 -Ġa ut -E D -ĠG u -Ġm ult -Ġpolit ical -Ġask ed -Ġfor mer -it ter -ri pt -Ġcl ose -Ġp ract -ĠY ork -Ġget ting -Ġac ross -Ġcom b -Ġbelie ve -Ġ z -Ġto get -Ġtoget her -ĠC ent -ir c -Ġind ividual -ĠM c -2 7 -is k -ĠE ng -Ġf ace -Ġ2 4 -Ġval ue -Ġare a -e v -Ġw rit -ĠPres ident -Ġv ot -Ġke y -Ġm om -p ut -Ġany thing -Ġexper ience -att le -Ġm ind -a ff -om m -Ġf uture -g ed -Ġc ut -Ġto t -it ch -Ġv ideo -Ġinvest ig -Ġn et -ĠM y -r ict -i en -. ) -Ġimp ro -th ough -ward s -Ġcon nect -ĠM ed -sel ves -ens ive -m b -o ber -at ors -A n -Ġ5 0 -Ġre du -res ent -Ġab ove -Ġf re -ĠEuro pe -s w -Ġam ount -ĠA pp -Ġe ither -Ġmil it -Ġan al -Ġf ail -ĠE n -al es -Ġspec ial -Ġbl ack -I T -c her -Ġlook ing -Ġf ire -y n -Ġal most -o on -Ġstud y -Ġm iss -c hes -ro wn -Ġt re -Ġcommun ity -Ġmed ia -Ġf ood -Ġcom es -ĠUn iversity -Ġsing le -Wh at -u ly -Ġh alf -ag ue -h od -ĠRep ublic -Ġstart ed -Ġqu ick -ot o -b ook -Ġiss ue -it or -Ġel se -Ġcons ider -2 6 -ro du -Ġt aken -2 8 -9 9 -ĠW ith -Ġtr ue -Ġw a -Ġtr ad -Ġag o -Ġm ess -ie f -Ġadd ed -o ke -Ġb ad -Ġf av -3 3 -Ġsim ilar -as k -ĠD on -Ġcharact er -ort s -ĠH ouse -Ġreport ed -Ġty pe -v al -i od -ĠHow ever -Ġt arg -Ġent ire -pp ing -Ġhist ory -Ġl ive -ff ic -.... .... -ed eral -Ġtr ying -Ġdisc uss -ĠH ar -ac es -l ished -Ġse lf -os p -re st -Ġro om -el t -Ġf all -ol ution -Ġe t -Ġ x -Ġis n -Ġide a -b o -Ġs ound -ĠD ep -Ġsome one -ci ally -ull y -Ġf oc -Ġob ject -if t -ap er -Ġplay er -Ġr ather -Ġserv ice -as hing -ĠD o -ĠP art -ru g -m on -p ly -Ġm or -Ġnot hing -Ġprov ide -I C -un g -Ġpart y -Ġex ist -Ġm ag -7 0 -Ġr ul -Ġh ouse -Ġbeh ind -Ġhow ever -ĠW orld -Ġs um -Ġapp lic -Ġ ; -Ġfun ction -g r -ĠP ol -Ġfr ont -2 00 -Ġser ies -Ġt em -Ġty p -ill s -Ġo pt -Ġpoint s -Ġbel ow -itt ed -Ġspec ific -Ġ201 7 -um b -Ġr a -Ġpre vious -Ġpre t -re me -Ġc ustom -Ġcour t -ĠM e -Ġre pl -Ġwho le -g o -c er -Ġt reat -ĠA ct -Ġprob ably -Ġle arn -end er -ĠA ss -Ġvers ion -n ow -Ġche ck -ĠC al -R E -min ist -O n -our ces -Ġben ef -Ġd oc -Ġdet er -Ġen c -Ġsu per -Ġadd ress -Ġv ict -Ġ201 3 -Ġme as -t r -Ġf ield -W hen -Ġsign ific -u ge -Ġfe at -Ġcomm on -l oad -Ġbe gin -Ġbr ing -Ġa ction -er man -Ġdesc rib -Ġind ust -Ġwant ed -ri ed -m ing -Ġatt empt -4 5 -f er -Ġd ue -ress ion -# # -Ġsh all -Ġs ix -o o -Ġst ep -Ġp ub -Ġhim self -Ġ2 3 -Ġc op -Ġd est -Ġst op -A C -ib ility -Ġl ab -ic ult -Ġhour s -Ġcre ate -Ġf urther -ĠAmeric a -ĠC ity -Ġd ou -he ad -S T -ĠN orth -c ing -Ġn ational -u le -ĠIn st -Ġt aking -ĠQ u -ir t -Ġre d -Ġrese arch -v iron -ĠG e -Ġbre ak -an a -Ġsp ace -ater ial -Ġrec ent -ĠA b -Ġgener al -Ġh it -Ġper iod -Ġevery thing -ive ly -Ġph ys -Ġsay ing -an ks -Ġc ou -Ġc ult -ac ed -e al -u ation -Ġc oun -l u -Ġinclud e -Ġpos ition -ĠA fter -ĠCan ad -ĠE m -Ġim m -ĠR ed -Ġp ick -Ġcom pl -Ġm atter -re g -e xt -ang u -is c -o le -a ut -Ġcomp et -e ed -f ect -Ġ2 1 -ĠS en -ĠThe se -as ing -Ġcan not -Ġin it -Ġrel ations -ac hed -Ġb ar -Ġ4 0 -ĠT H -Ġ201 2 -Ġv ol -Ġg round -Ġsec urity -Ġup d -il t -3 5 -Ġconc ern -ĠJ ust -Ġwh ite -Ġseem s -ĠH er -pe cially -i ents -Ġann oun -Ġf ig -ight s -Ġst ri -l ike -id s -Ġs us -Ġw atch -Ġ â -Ġw ind -ĠC ont -Ġit self -Ġm ass -A l -y le -iqu e -ĠN ational -Ġab s -Ġp ack -Ġout side -Ġan im -Ġp ain -et er -Ġman ag -du ct -og n -Ġ ] -ĠSe pt -se c -o ff -ĠJ an -Ġf oot -ad es -Ġth ird -Ġm ot -Ġev idence -int on -Ġth reat -a pt -pl es -c le -Ġl o -Ġde cl -Ġit em -med i -Ġrep resent -om b -am er -Ġsignific ant -og raph -s u -Ġc al -i res -00 00 -I D -A M -Ġsim ply -Ġlong er -Ġf ile -O T -c he -S o -ate g -or g -ĠH is -Ġen er -Ġd om -Ġup on -il i -": " -Ġthem selves -Ġcom ing -Ġqu ite -Ġdiff icult -ĠB ar -il ities -re l -end s -c ial -6 4 -Ġwom an -ra p -y r -Ġne cess -ip s -Ġte xt -Ġrequ ire -Ġmilit ary -Ġre view -Ġresp ons -7 5 -Ġsub ject -Ġinst ead -Ġiss ues -Ġg en -" ," -Ġmin utes -Ġwe ap -r ay -am ed -t ime -b l -H ow -Ġc ode -ĠS m -Ġhig her -ĠSt e -r is -Ġp age -Ġstud ents -ĠIn tern -Ġmet hod -ĠA ug -ĠP er -ĠA g -Ġpolic y -ĠS w -Ġex ec -Ġac cept -um e -rib ut -Ġword s -Ġfin al -Ġchang es -ĠDem ocr -Ġfriend s -Ġres pect -Ġe p -Ġcomp an -iv il -Ġdam age -** ** -og le -viron ment -Ġne g -ent al -Ġa p -Ġtot al -iv al -! " -l im -Ġneed s -Ġag re -Ġdevelop ment -Ġa ge -ip le -2 1 -Ġresult s -ĠA f -S h -Ġg un -ĠOb ama -ro ll -Ġ @ -Ġright s -ĠB rit -Ġrun ning -Ġwas n -Ġp ort -Ġr ate -Ġpret ty -Ġtarg et -Ġsa w -Ġc irc -Ġwor ks -ic ro -al t -o ver -ww w -Th at -l ier -Ġevery one -ud e -Ġp ie -idd le -ra el -Ġr ad -Ġbl ock -Ġw alk -T o -ã ģ -n es -ĠA ust -a ul -ro te -ĠS outh -ess ion -op h -Ġshow s -Ġs ite -Ġj o -Ġr isk -cl us -l t -Ġin j -id ing -ĠS pe -Ġch all -ir m -Ġ2 2 -itt ing -st r -Ġh y -L E -ke y -Ġbe gan -at ur -ashing ton -l am -ĠD av -b it -Ġs ize -ĠP ar -3 8 -ourn al -f ace -Ġdec ision -Ġl arg -Ġj ud -re ct -Ġcontin ue -ĠO ct -ove red -ĠI nt -==== ==== -Ġp arent -ĠW ill -Ġeas y -Ġd rug -ang er -Ġs ense -Ġd i -id ay -Ġener gy -ist ic -Ġass oci -ar ter -ob al -e ks -ĠE l -ur ch -Ġg irl -o e -it le -Ġ2 8 -ĠC he -Ġrequ est -Ġso on -Ġh ost -k y -Ġst ates -om es -Ġm aterial -le x -Ġmom ent -Ġan sw -on se -Ġes pecially -Ġn orm -Ġserv ices -p ite -r an -Ġro le -4 4 -) : -Ġc red -C l -____ ____ -Ġm at -Ġl og -ĠCl inton -O U -Ġoff ice -Ġ2 6 -Ġch arg -Ġtr ack -m a -Ġhe art -Ġb all -Ġperson al -Ġbuild ing -n a -s et -b ody -ĠBl ack -Ġincre ase -itt en -Ġneed ed -3 6 -3 2 -= " -Ġl ost -Ġbec ame -Ġgrou ps -ĠM us -Ġw rote -ĠP e -Ġpro p -j oy -à © -ĠWh ite -Ġde ad -. ' -Ġhtt p -Ġwe bs -O S -Ġins ide -Ġwr ong -Ġstat ement -Ġ ... -y l -Ġfil m -Ġmus ic -Ġsh are -ific ation -Ġre lease -Ġfor ward -Ġst ay -Ġcomp ut -it te -s er -Ġorig inal -Ġc ard -Ġc and -Ġd iv -at ural -Ġfav or -O M -Ġc ases -us es -Ġse ction -Ġle ave -g ing -ov ed -ĠW ashington -3 9 -ĠG l -Ġrequ ired -act ion -ap an -o or -it er -ĠK ing -Ġcount ries -ĠG erman -ll ing -Ġ2 7 -3 4 -Ġquest ions -Ġpr im -Ġc ell -Ġsh oot -Ġany one -ĠW est -Ġaff ect -ep end -Ġon line -ĠIs rael -ĠSept ember -Ġab ility -Ġcont ent -is es -Ġre ve -Ġl aun -Ġind ic -Ġfor ce -c ast -Ġso ld -av ing -f l -Ġso ft -Ġcompan ies -ce ed -Ġart icle -Ġa ud -Ġre v -Ġed uc -Ġplay ing -0 5 -Ġhe ld -ct or -Ġrele ased -Ġf ederal -3 7 -Ġad minist -Ġinter view -Ġinst all -Ġrece ived -Ġs ource -u k -P h -Ġser ious -Ġcre ated -Ġc ause -Ġim medi -Ġdef in -u el -ĠDep artment -ct ions -ĠC our -ĠN ow -z e -it es -it ution -Ġl ate -Ġspe ak -n ers -Ġleg al -ar i -ĠC or -Ġwe eks -Ġmod el -Ġp red -Ġex act -B C -ĠB y -IN G -os ing -Ġt akes -Ġreg ard -Ġopp ortun -Ġpr ice -Ġ19 8 -ĠA pr -f ully -Ġor d -Ġproble ms -ru ction -h am -ĠC ount -le ge -Ġlead ers -E T -le v -Ġde ep -olog ical -es e -h aps -ĠS ome -Ġp ers -Ġcont ract -Ġrelations hip -s p -ou d -Ġb ase -4 8 -m it -A d -anc ial -Ġcons um -Ġpot ential -Ġl angu -re m -et h -Ġrel ig -ress ed -6 6 -Ġl ink -Ġl ower -ay er -ĠJ une -Ġf em -un t -er c -ur d -Ġcont act -Ġ ill -Ġm other -Ġest ab -h tt -ĠM arch -ĠB ro -ĠCh ina -Ġ2 9 -Ġs qu -Ġprov ided -Ġa verage -as ons -Ġ201 1 -Ġex am -l in -5 5 -n ed -Ġper fect -Ġt ou -al se -u x -Ġbu y -Ġsh ot -Ġcol lect -Ġph ot -Ġplay ed -Ġsur pr -Ġofficial s -Ġsim ple -av y -Ġindust ry -Ġhand s -g round -Ġp ull -Ġr ound -Ġus er -Ġr ange -u ary -Ġpriv ate -op s -e es -Ġw ays -ĠM ich -Ġve h -Ġex cept -Ġter ms -im um -pp er -I ON -ore s -ĠDr agon -ou l -Ġd en -Ġperform ance -Ġb ill -c il -4 7 -Ġen vironment -Ġex c -ad d -Ġwor th -Ġp ict -Ġch ance -Ġ201 8 -b or -Ġspe ed -ict ion -Ġal leg -ĠJ apan -at ory -re et -Ġm atch -ĠI I -Ġst ru -ord er -Ġst e -Ġl iving -Ġst ruct -in o -Ġse par -her n -Ġresp onse -Ġen joy -Ġv ia -A D -um ents -ace book -Ġmem ber -ib r -iz ing -Ġto ol -ĠM on -ĠWh ile -h ood -ĠA ng -ĠD ef -Ġoff er -T r -a ur -Ġturn ed -ĠJ uly -d own -an ced -Ġrec ently -ĠE ar -Ġc e -ĠSt ar -ĠC ong -rough t -Ġbl ood -Ġhop e -Ġcom ment -ain t -Ġar ri -il es -Ġpartic ip -ough t -ri ption -0 8 -4 9 -Ġg ave -Ġse lect -Ġkill ed -sy ch -Ġgo es -i j -Ġc oll -Ġimp act -at ives -ĠS er -0 9 -ĠAug ust -Ġb oy -d e -ĠD es -Ġf elt -U S -Ġexpect ed -Ġim age -ĠM ark -cc ording -o ice -E C -ĠM ag -en ed -h old -ĠP ost -Ġpre vent -N o -Ġinvol ved -Ġey es -Ġquick ly -A t -un k -Ġbeh av -Ġ ur -Ġl ed -c ome -e y -Ġcand id -Ġear lier -Ġfoc us -et y -P ro -led ge -ix ed -ill ed -Ġpop ular -A P -Ġset t -l ight -Ġvar ious -in ks -Ġlevel s -Ġro ad -ell ig -ab les -he l -itte e -ĠG ener -y pe -Ġhe ard -ic les -Ġm is -Ġus ers -ĠS an -Ġimpro ve -Ġf ather -Ġse arch -The y -v il -Ġprof ess -Ġkn ew -Ġl oss -Ġev ents -6 5 -Ġb illion -0 7 -0 2 -ĠNew s -ĠA M -Ġco ver -w here -ens ion -Ġb ott -Ġare as -en ces -op e -ĠTw itter -a el -Ġget s -ĠGo ogle -Ġs n -i ant -Ġv ote -Ġnear ly -Ġinclud ed -Ġrec ogn -z z -m m -al ed -Ġhappen ed -0 4 -Ġh ot -Ġwho se -Ġc ivil -Ġsu ff -o es -it iz -ĠSy ri -Ġresp ond -Ġh on -Ġfeat ures -Ġeconom ic -ĠApr il -r im -Ġtechn ology -Ġo ption -ag ing -Ġpur ch -R e -Ġl at -ch ie -is l -Ġrec omm -u f -Ġtr aining -Ġeffect s -Ġf ast -Ġ201 0 -Ġocc ur -Ġwebs ite -Ġem ail -Ġs ens -e ch -Ġo il -Ġinf lu -Ġcurrent ly -ĠS ch -ĠAd d -Ġgo al -Ġsc ient -Ġcon v -1 00 -em y -Ġdec ided -Ġtra vel -Ġm ention -L L -0 3 -Ġe lection -Ġph one -Ġlook s -Ġsit uation -Ġc y -Ġh or -b ed -ĠCour t -a ily -av es -Ġqu ality -ĠCom p -w ise -Ġt able -Ġst aff -ĠW ind -et t -Ġtri ed -ide red -Ġadd ition -Ġb ox -Ġl ack -ar ily -Ġw ide -Ġm id -Ġbo ard -ys is -Ġant i -h a -Ġd ig -en ing -Ġd ro -C on -6 8 -Ġsl ow -b ased -se qu -Ġp ath -E x -ak er -Ġwork ed -Ġp en -Ġeng ine -Ġlook ed -ĠSu per -ĠS erv -Ġvict im -U n -Ġproper ty -Ġint rodu -Ġexec ut -ĠP M -L e -Ġcol or -ĠM ore -Ġ6 0 -Ġnet work -Ġd ate -c ul -id ge -Ġext ra -3 1 -Ġs le -6 7 -Ġw ond -Ġreport s -j ust -ĠAust ral -Ġcap ital -Ġen s -Ġcomm and -Ġallow ed -Ġpre p -Ġca pt -h ib -Ġnum bers -ch an -Ġf air -m p -om s -Ġre ach -W ith -t ain -Ġbro ad -Ġcou ple -ec ause -ly ing -ĠF eb -Ġsc reen -Ġl ives -Ġpri or -ĠCong ress -A r -Ġappro ach -Ġe mer -ar ies -ĠD is -s erv -ĠN e -Ġbu ilt -c ies -Ġre pe -Ġrul es -for ce -ĠP al -Ġfin ancial -Ġcons idered -ĠCh ar -n ces -ĠI S -Ġb rought -Ġb i -i ers -ĠS im -O P -Ġproduct s -Ġvis it -Ġdoc ument -Ġcon duct -Ġcomplete ly -in ing -ĠCal if -ib ly -Ġwr itten -ĠT V -em ents -Ġd raw -O ne -Ġpub lished -Ġsec ret -r ain -he t -ĠF acebook -ond ay -ĠU p -Ġsex ual -Ġth ous -ĠP at -Ġ ess -Ġstand ard -Ġar m -g es -ect ion -Ġf ell -Ġfore ign -an i -ĠFr iday -Ġreg ular -in ary -Ġincre ased -Ġus ually -Ġdem on -Ġd ark -Ġadd itional -ro l -ĠO f -Ġprodu ction -! ! -und red -Ġintern ational -id ents -ĠF ree -rou p -Ġr ace -Ġm ach -Ġh uge -A ll -le ar -ove mber -Ġto wn -Ġatt ention -ĠO ff -y ond -ĠThe n -f ield -Ġter ror -ra z -ĠB o -Ġmeet ing -ĠP ark -Ġar rest -Ġf ear -Ġa w -ĠV al -or ing -' , -Ġext reme -ar r -Ġwork ers -A fter -Ġ3 1 -n et -am ent -Ġdirect ly -Ġpop ulation -ub e -ĠOct ober -ĠI N -ĠJan uary -5 9 -ĠDav id -Ġc ross -ce mber -ĠF irst -Ġmess age -ir it -Ġn ation -Ġp oll -is ions -Ġansw er -n y -is ode -Ġcar ry -ĠRuss ia -Ġhe ar -eng th -ro y -Ġn atural -in ally -Ġdo g -m itted -Ġtr ade -Ġsub st -Ġmult iple -ĠAf ric -Ġf ans -Ġs ort -Ġgl obal -ic ation -ĠW ed -ar a -Ġa chie -Ġlangu age -ve y -Ġt al -Ġnecess ary -Ġdet ails -Ġs en -ĠS und -ĠRe g -ĠR ec -0 6 -Ġs il -ress ive -Ġmed ical -un ch -orn ia -Ġu nd -f ort -oc ks -ĠM onday -ues day -c raft -7 7 -ur t -Ġ ver -ĠH ill -Ġrece ive -Ġmor ning -es tern -Ġb ank -Ġs at -ir th -ĠH igh -Ġdev ice -ĠTH E -ĠCent er -Ġsaf e -Ġp le -ĠCanad a -Ġsystem s -Ġass ist -Ġsur v -Ġb attle -ĠS oc -vert is -S he -Ġp aper -Ġgrow th -Ġc ast -S c -Ġpl ans -ll ed -Ġpart s -Ġw all -Ġmove ment -Ġpract ice -im ately -Ġdis play -Ġsomet imes -om p -ĠP aul -ĠY es -k ing -5 8 -o ly -Ġs on -Ġav oid -ok es -ĠJ ew -Ġto wards -as c -Ġ // -ĠK ore -Ġtalk ing -Ġcor rect -Ġsp ent -ic ks -i able -e ared -Ġter m -Ġwant s -om ing -Ġ ut -Ġdou b -Ġfor ces -Ġp lease -6 9 -ĠN ovember -at form -ond on -Ġon es -Ġimmedi ately -ĠRuss ian -ĠM et -Ġde g -Ġparent s -C H -ĠAmeric ans -al y -ĠM od -Ġsh own -Ġcond itions -Ġst uff -Ġre b -ĠY our -Ġinclud es -n own -ĠS am -Ġexper ien -m ission -ĠE ven -augh t -Ġannoun ced -ĠRepublic an -Ġdeter min -Ġdescrib ed -ĠCount y -( ) -Ġdo or -Ġchang ed -Ġne igh -ĠH ere -Ġcle an -Ġp an -ĠDe cember -ĠEurope an -ir ing -ap ter -Ġcl ub -ĠT uesday -Ġp aid -ĠN et -Ġattack s -Ġcharact ers -Ġal one -Ġdirect or -d om -Ġ3 5 -Ġl oad -Ġr out -ĠCalif ornia -Ġfin ally -Ġr ac -Ġcont r -Ġexact ly -res h -p ri -ĠIs lam -Ġn ature -Ġcare er -Ġlat est -Ġcon vers -ĠS l -p ose -ci ent -ĠIn c -iv ity -8 8 -ĠA tt -ĠM or -nes day -Ġwe ight -k en -Ġnot e -Ġteam s -Ġ \ -air s -ĠG reen -Ġh undred -on ent -Ġstre ng -Ġcons ist -ic ated -Ġreg ul -Ġl ic -ast ic -Ġt en -urs day -ellig ence -ous ly -ĠU K -B I -Ġcost s -Ġind epend -ĠA P -Ġnorm al -Ġh om -Ġob vious -Ġs we -Ġst ar -Ġread y -ac her -Ġimp lement -g est -Ġs ong -ĠG et -ĠL ab -Ġinterest ing -us ing -Ġg iving -ĠSund ay -Ġet c -Ġm iddle -Ġrem ember -r ight -os ition -ut ions -Ġm ax -4 6 -Ġyour self -Ġdem and -Ġtreat ment -Ġd anger -ĠC ons -Ġgu y -ĠBrit ish -Ġphys ical -Ġrel ated -Ġrem ain -Ġcould n -Ġref er -Ġc itiz -b ox -EN T -bo ard -Ġin n -I G -er o -ĠSt reet -osp ital -ren ch -cher s -Ġst ra -O L -ag er -ĠA N -Ġeas ily -I A -en ge -in y -Ġcl os -ock ed -Ġus es -ĠC oun -I m -u ild -? ? -m ore -Ġan g -Ġwr ite -ol ute -5 7 -Ġlead er -Ġread ing -< / -Ġaut om -est s -4 3 -Ġleg isl -ĠG old -Ġdesign ed -ĠS T -ĠLe g -a res -Ġbe aut -ĠT ex -Ġappear s -Ġstru gg -ĠR om -Ġ 00 -Ġcho ice -Ġparticular ly -ĠF rom -op er -ĠL ondon -ann ed -Ġallow s -ob ile -Ġdiffere nce -âĢ ¢ -ĠV iew -ĠWed nesday -Ġal though -Ġrel ative -Ġapplic ation -ate ver -Ġare n -Ġmy self -Ġim ag -Ġdis e -Ġsoc iety -Ġfre qu -ĠEng lish -Ġpo or -ĠD ay -Ġwrit ing -Ġse ven -Ġstart ing -Ġb ud -Ġpr int -ĠTr ans -uf act -ĠSt ud -n ew -Ġcr im -Ġg ives -Ġco ol -a e -i ance -ĠGener al -Ġthink ing -Ġsa ve -Ġlim ited -ĠPart y -Ġmean ing -p en -ow ers -ĠJ ack -E M -Ġn ice -ru pt -Ġg as -Ġe ight -Ġfe et -Ġeff ort -Ġ ign -ic it -B l -co in -Ġop in -Ġbr ain -Wh ile -he st -ĠTh ursday -Ġwould n -augh ter -Ġtou ch -le ments -Ġstud ies -Ġcent er -c ont -or ge -Ġcomput er -Ġinvestig ation -P l -or ks -Ġ200 8 -Ġincre asing -Ġst ore -Ġcom ments -Ġb al -m en -Ġdo ll -Ġl iber -Ġw ife -Ġlaw s -atur day -it ness -Ġmod ern -ĠS k -Ġadminist ration -Ġopportun ity -Ġs al -Ġpower ful -M y -Ġclaim s -ĠEar th -ord s -Ġt itle -Ġes c -n ame -N ot -om en -Ġbe yond -Ġc amer -Ġse ll -it ute -ear ch -Ġapp l -im ent -4 2 -ĠAr t -Ġun f -Ġviol ence -ur g -ĠE ast -Ġcomp ared -Ġopt ions -Ġthrough out -Ġv s -ig r -. [ -ac hes -7 8 -Ġfil es -F L -E L -ar ian -ĠJ ames -ĠA ir -an ch -Ġdet ail -Ġpie ce -P S -Ġn amed -Ġeduc ation -Ġdri ve -Ġitem s -Ġstud ent -ic ed -: : -ic o -Ġth row -Ġsc ene -Ġcomple x -Ġ200 9 -Ġpre c -ĠB re -7 9 -Ġcon cept -Ġstat us -am ing -Ġd ied -Ġknow ledge -Ġbegin ning -O D -ru ary -Ġcertain ly -Ġgu ys -Ġsl ight -in n -ound s -Ġf ine -Ġf at -ic ations -Ġper haps -ĠA nt -Ġinc ome -Ġhtt ps -Ġmajor ity -port s -st on -Ġgreat er -Ġfe ed -ent ially -Ġsaf ety -Ġun ique -and om -Ġg one -Ġshow ed -Ġhist or -Ġcoun ter -i us -id a -Ġlead ing -i pe -Ġs end -ĠDon ald -er ve -Ġdef ense -ines e -Ġy es -ĠF ire -ĠMus lim -ra q -Ġcontin ued -os h -Ġprov ides -Ġpr ison -ĠP re -Ġhapp y -Ġeconom y -Ġtr ust -ag s -ĠG ame -Ġweap ons -um an -ĠC le -it ation -Ġanal ysis -ĠT imes -Ġsc ience -- > -Ġfig ure -Ġdis app -ent y -Ġsoft ware -Ġu lt -Ġoffic ers -N ew -I s -Ġrem ains -ĠInd ia -Ġp sych -ri ef -Ġc at -es c -Ġob serv -Ġst age -ĠD ark -Ġent er -ch ange -Ġpass ed -Ġdes pite -ĠO ut -Ġmov ie -r s -Ġv oice -m ine -ĠPl ay -Ġto ward -ĠT er -Ġreg ion -Ġval ues -or ters -Ġm ount -Ġoffic er -ĠO ther -b an -Ġh ous -w ood -ro om -I V -ĠS un -se e -ĠO ver -ro g -9 0 -Ġl ay -ĠT ur -a wn -Ġpress ure -ĠS ub -Ġbook s -ed om -ĠS and -A A -ag o -Ġre asons -f ord -Ġactiv ity -U T -N ow -ĠSen ate -ce ll -n ight -Ġcall s -in ter -Ġlet ter -ĠR ob -ĠJ e -Ġcho ose -ĠL aw -G et -B e -Ġro b -Ġtyp es -Ġpl atform -Ġqu arter -R A -ĠT ime -Ġmay be -ĠC r -9 5 -p re -Ġmov ing -Ġl if -Ġgo ld -Ġs om -Ġpat ients -Ġtr uth -ĠK e -ur ance -ant ly -m ar -Ġchar ge -ĠG reat -Ġce le ----------------- ---------------- -Ġro ck -ro id -an cy -Ġcred it -a ud -B y -ĠE very -Ġmov ed -ing er -rib ution -Ġn ames -Ġstra ight -ĠHe alth -ĠW ell -Ġfe ature -Ġr ule -Ġsc he -in ated -ĠMich ael -ber g -4 1 -il ed -b and -Ġcl ick -ĠAng el -on ents -Â Ń -ĠI raq -ĠS aturday -Ġa ware -p art -Ġpat tern -O W -ĠL et -Ġgr ad -ign ed -Ġassoci ated -Ġst yle -n o -i ation -a ith -il ies -Ġst ories -ur ation -Ġindividual s -ĠâĢ ¦ -m iss -ĠAss oci -ish ing -ab y -Ġsum mer -ĠB en -Ġ3 2 -Ġar ch -ut y -ĠTex as -h ol -Ġfull y -Ġm ill -Ġfollow ed -ĠB ill -ĠInd ian -ĠSec ret -ĠB el -ĠFeb ruary -Ġjob s -Ġseem ed -ĠGo vern -i pped -Ġreal ity -Ġl ines -Ġp ark -Ġmeas ure -ĠO ur -I M -Ġbro ther -Ġgrow ing -Ġb an -Ġest im -Ġc ry -ĠS chool -Ġme chan -ĠO F -ĠWind ows -Ġr ates -ĠO h -Ġpos itive -Ġcult ure -ist ics -ic a -Ġh ar -y a -ite ly -i pp -Ġm ap -en cies -ĠWill iam -I I -ak ers -5 6 -ĠM art -ĠR em -Ġal tern -it ude -Ġco ach -row d -D on -Ġk ids -Ġj ournal -Ġcor por -Ġf alse -Ġwe b -Ġsle ep -Ġcont ain -Ġst o -Ġb ed -iver se -ĠR ich -ĠCh inese -Ġp un -Ġme ant -k nown -Ġnot ice -Ġfavor ite -a ven -Ġcond ition -Ġpur pose -) ) -Ġorgan ization -Ġchall eng -Ġman ufact -Ġsus p -ĠA c -Ġcrit ic -un es -uc lear -Ġm er -vent ion -Ġ8 0 -Ġm ist -ĠU s -ĠT or -htt p -ol f -Ġlarg er -Ġadv ant -Ġrese ar -Ġact ions -m l -Ġke pt -Ġa im -, ' -c ol -Ġbenef its -if ying -Ġact ual -ĠIntern ational -Ġveh icle -Ġch ief -Ġeff orts -ĠLe ague -ĠM ost -Ġwa it -Ġad ult -Ġover all -Ġspe ech -Ġhigh ly -Ġfem ale -Ġer ror -Ġeffect ive -5 4 -Ġenc our -w ell -Ġfail ed -Ġcons erv -Ġprogram s -Ġt rou -Ġa head -5 00 -vertis ement -I P -ĠF ound -p ir -Ġ % -Ġcr ime -and er -Ġloc ation -ĠI ran -Ġbehav ior -az ing -Ġr are -Ġem b -Ġca used -Ġsh ip -Ġact ive -Ġcont ribut -Ġg reen -Ġac qu -Ġref lect -ven ue -Ġf irm -Ġb irth -] . -Ġclear ly -Ġem ot -Ġag ency -ri age -Ġmem ory -9 8 -S A -ĠSe e -ac ing -C C -Ġbig gest -Ġr ap -Ġbas ic -Ġb and -e at -Ġsus pect -ĠM ac -Ġ9 0 -m ark -ist an -Ġsp read -am s -k i -as y -ra v -ĠR ober -Ġdemon str -r ated -Ġabs olute -Ġpl aces -Ġim pl -ibr ary -Ġc ards -Ġdest roy -Ġv irt -ve re -Ġapp eared -y an -p oint -Ġbe g -Ġtem per -s pe -ant ed -ear s -ĠD irect -Ġl ength -Ġbl og -am b -Ġint eg -Ġres ources -ac c -if ul -Ġsp ot -Ġfor ced -Ġthous ands -ĠMin ister -Ġqu al -ĠF rench -at ically -Ġgener ally -Ġdr ink -Ġth us -I L -od es -Ġappro pri -ĠRe ad -Ġwh om -Ġey e -Ġcol lege -Ġ4 5 -ire ction -Ġens ure -Ġapp arent -id ers -Ġrelig ious -Ġmin or -ol ic -Ġt ro -ĠWh y -rib ute -m et -Ġprim ary -Ġdevelop ed -Ġpe ace -Ġsk in -st e -av a -Ġbl ue -Ġfam ilies -Ġ ir -Ġapp ly -Ġin form -ĠSm ith -C T -i i -Ġlim it -Ġres ist -........ ........ -um n -Ġconf lic -Ġtw e -ud d -ĠT om -Ġl iter -qu e -b on -Ġha ir -Ġevent ually -Ġp us -Ġhelp ed -Ġag g -or ney -ĠApp le -Ġf it -ĠS ur -Ġpre m -Ġs ales -Ġsecond s -Ġstreng th -Ġfeel ing -¿ ½ -Ġt our -Ġknow s -o om -Ġex erc -Ġsom ew -ï ¿½ -> > -Ġsp okes -Ġide as -Ġreg ist -so ft -ĠD el -ĠP C -Ġpro pos -Ġlaun ch -Ġbott om -T H -ĠP lease -v est -it z -ĠIn ter -Ġsc ript -Ġr at -ar ning -Ġ il -ĠJ er -ĠA re -Ġwh atever -ok en -ci ence -Ġmod e -Ġag ree -Ġs ources -Ġinit ial -Ġrest rict -Ġwond er -us ion -## ## -ĠS il -vil le -Ġb urn -t w -as ion -Ġ £ -Ġn or -u ing -Ġre ached -Ġs un -Ġc ateg -ig ration -Ġc ook -Ġprom ot -Ġm ale -Ġcl imate -Ġf ix -Ġalleg ed -U R -all ed -Ġim ages -C ont -ot a -Ġschool s -i os -Ġd rop -Ġst ream -ĠM o -Ġprevious ly -al ing -Ġp et -Ġdou ble -Ġ( @ -ann el -Ġdef ault -t ies -Ġr ank -ĠD ec -ĠCoun cil -Ġweap on -Ġst ock -Ġanal y -ĠSt r -Ġpict ure -ĠPol ice -f erence -Ġcent ury -Ġcitiz ens -Ġon to -Ġexp and -Ġhe ro -ĠS ol -Ġw ild -Ġupd ate -Ġcustom ers -r ont -d ef -Ġl ik -Ġcrim inal -ĠChrist ian -S P -7 6 -Ġle aving -Ġother wise -ĠD ist -Ġbas is -5 2 -5 3 -ic ip -ĠB er -Ġrecomm end -Ġfl oor -Ġc rowd -ol es -Ġ7 0 -Ġcent ral -ĠE v -Ġd ream -Ġdown load -Ġconf ir -ĠTh om -Ġwind ow -Ġhapp ens -Ġun it -Ġt end -Ġs pl -Ġbec omes -Ġfight ing -Ġpred ict -ĠP ress -ĠP ower -Ġhe avy -ak ed -Ġf an -or ter -ate gy -B A -iz es -Ġsp end -H ere -Ġ200 7 -Ġad op -ĠH am -Ġfoot ball -ĠP ort -od ay -5 1 -amp ions -Ġtrans fer -h t -Ġ3 8 -ter m -ac ity -Ġb ur -] , -tern al -r ig -b ut -Ġthere fore -ĠB ecause -res p -re y -Ġm ission -S ome -Ġnot ed -Ġass um -Ġdise ase -Ġed it -Ġprog ress -r d -ĠB rown -oc al -Ġadd ing -Ġra ised -ĠAn y -Ġt ick -Ġsee ing -ĠPe ople -Ġagre ement -Ġser ver -Ġw at -Ġdeb ate -Ġsupp osed -il ing -Ġlarg est -Ġsuccess ful -ĠP ri -ĠDemocr atic -Ġj ump -ĠSyri a -Ġown ers -Ġoff ers -Ġshoot ing -Ġeff ic -se y -Ġha ven -ver se -te red -ĠL ight -im al -ĠB ig -Ġdef end -Ġbe at -Ġrecord s -% ) -Ġsc en -Ġemploy ees -Ġdev ices -he m -Ġcom mer -ĠM ex -Ġbenef it -ĠPro f -Ġil leg -Ġsur face -ĠAl so -Ġh arm -ing ly -w ide -ĠA lex -Ġsh ut -ĠC ur -Ġl ose -p m -Ġchall enge -se mb -Ġst ation -Ġint elligence -Ġacc ur -ĠFl or -Ġrequ ires -ĠM al -b um -Ġh ospital -Ġsp irit -Ġoff ered -Ġprodu ce -ĠComm un -Ġcreat ing -Ġcr is -s pect -Ġend ed -Ġd aily -Ġvot ers -land s -i as -i h -on a -Ġsm art -ĠOff ice -ĠL ord -ri al -ĠIntern et -Ġcirc um -Ġextreme ly -' . -Ġopin ion -ĠM il -Ġg ain -B S -ĠF in -y p -Ġuse ful -Ġbud get -Ġcom fort -is f -Ġback ground -el ine -Ġep isode -Ġen emy -Ġtri al -Ġestab lish -d ate -ĠC ap -Ġcontin ues -Ġshow ing -ĠUn ion -w ith -Ġpost ed -ĠSy stem -Ġe at -ri an -Ġr ise -ĠGerman y -il s -Ġsign ed -Ġv ill -Ġgr and -m or -ĠEng land -Ġproject s -um ber -Ġconf erence -z a -Ġrespons ible -ĠAr ab -Ġlearn ed -âĢĶ âĢĶ -i pping -ĠGe orge -O C -Ġreturn ed -ĠAustral ia -Ġb rief -Q u -Ġbr and -ill ing -ab led -Ġhig hest -Ġtr ain -ĠComm ission -wh ile -Ġn om -cept ion -Ġm ut -ĠBl ue -Ġinc ident -v ant -8 6 -ĠI D -Ġn uclear -7 4 -ĠL ike -ĠR E -ĠM icro -l i -m ail -Ġcharg es -8 9 -Ġad just -ad o -Ġear th -N A -Ġpr ices -P A -Ġd raft -Ġrun s -Ġcandid ate -ens es -Ġmanag ement -ĠPh il -ĠM iss -Ġte ach -g ram -Ġunderstand ing -a it -ic ago -A dd -ĠE p -sec ut -Ġsepar ate -Ġinst ance -Ġe th -Ġun less -**** **** -ĠF ore -in ate -Ġoper ations -S p -Ġf aith -g ar -ĠCh urch -ron ic -Ġconf ig -os ure -Ġactiv ities -Ġtrad itional -Ġ3 6 -Ġd irection -Ġmach ine -Ġsur round -Ġp ush -un ction -ĠE U -Ġeas ier -Ġarg ument -G B -Ġm icro -Ġsp ending -iz ations -Ġthe ory -ad ow -Ġcall ing -ĠL ast -Ġd er -Ġinflu ence -Ġcomm it -Ġph oto -Ġun c -ist ry -g n -ast e -ack s -Ġdis p -ad y -d o -ĠG ood -Ġ ` -Ġw ish -Ġreve aled -Âł Âł -l ig -Ġen force -ĠComm ittee -Ġche m -Ġmil es -Ġinterest ed -Ġsol ution -ic y -in ct -Ġ- > -ĠD et -Ġrem oved -Ġcomp ar -e ah -Ġpl ant -ĠS ince -Ġachie ve -Ġadvant age -Ġslight ly -b ing -Ġpl aced -u nder -201 5 -ĠM ad -Ġt im -os es -Ġc ru -ĠR ock -Ġmost ly -Ġneg ative -Ġset ting -Ġprodu ced -Ġm ur -Ġconnect ion -ĠM er -Ġdri ver -Ġexecut ive -Ġass ault -Ġb orn -ĠV er -t ained -Ġstruct ure -Ġredu ce -Ġdec ades -Ġd ed -u ke -ĠM any -idd en -Ġle ague -S e -Ġjo in -Ġdis co -Ġd ie -c ks -act ions -Ġass ess -ag n -Ġgo als -our s -I R -Ġsen ior -ill er -m od -ip ment -oc ol -u y -ĠQ ue -Ġpart ies -ir gin -Ġle arning -it able -Ġstre et -Ġcamer a -A pp -Ġsk ills -b re -c ious -Ġcele br -ĠFr anc -Ġexist ing -Ġwill ing -l or -Ġ id -ĠSp ace -Ġcrit ical -ĠL a -ortun ately -Ġser ve -Ġc old -Ġspec ies -T S -Ġanim als -ĠB ay -Ġold er -ĠU nder -est ic -ĠT re -Ġte acher -Ġpre fer -v is -Ġth read -ĠM att -Ġmanag er -ãĥ » -Ġprofess ional -ĠV ol -Ġnot es -The se -ul a -Ġf resh -ent ed -u zz -ed y -clus ion -ĠR el -Ġdoub t -E O -Ġopen ed -ĠB it -Ad vertisement -Ġgu ess -ĠU N -Ġse qu -Ġexpl ain -ott en -Ġatt ract -ak s -Ġstr ing -Ġcont ext -oss ible -ĠRepublic ans -Ġsol id -Ġc ities -Ġask ing -Ġr andom -u ps -ur ies -ar ant -dd en -g l -ĠFlor ida -Ġdep end -ĠSc ott -Ġ3 3 -Ġi T -ic on -Ġmention ed -Ġ2 000 -Ġclaim ed -Ġdefin itely -ul f -Ġc ore -Ġopen ing -ĠCon st -wh ich -ĠT ra -A G -7 2 -Ġbelie ved -ad a -Ġ4 8 -ĠSec urity -yr ight -ĠP et -ĠL ou -Ġhold ing -======== ======== -Ġ ice -Ġb row -Ġauthor ities -h ost -w ord -Ġsc ore -ĠD iv -Ġcell s -Ġtrans l -Ġneigh bor -Ġrem ove -u ct -Ġdist rict -ĠA ccording -Ġwor se -Ġconcern s -Ġpresident ial -Ġpolic ies -ĠH all -7 3 -Ġh us -A Y -Ġ200 6 -ĠJ ud -Ġindepend ent -ĠJust ice -ili ar -pr int -igh ter -Ġprotect ion -z en -Ġsu dden -h ouse -ĠJ es -P R -ĠIn f -Ġb ul -Ġ _ -ĠServ ice -ĠP R -Ġstr ategy -ff ect -Ġgirl s -Ġmiss ing -oy al -ĠTe am -ul ated -Ġd at -Ġpolit ics -ab or -A ccording -Ġspe ll -Ġg raph -ort hern -T C -A b -Ġlab or -is her -Ġk ick -ĠiT unes -Ġstep s -pos es -Ġsmall er -E n -ber t -Ġro ll -Ġresear chers -Ġcl osed -Ġtrans port -Ġlaw y -________ ________ -ĠCh icago -Ġas pect -Ġn one -Ġmar riage -9 6 -Ġe lements -ĠF re -ĠS al -Ġd ram -F C -t op -e qu -Ġhe aring -Ġsupport ed -Ġtest ing -co hol -Ġmass ive -Ġst ick -Ġgu ard -is co -ph one -F rom -How ever -Ġb order -Ġcop y -ograph y -l ist -7 1 -Ġown er -cl ass -ru it -r ate -ĠO nce -Ġdig ital -Ġt ask -ER S -Ġinc red -t es -+ + -ĠFr ance -Ġb reat -ow l -Ġiss ued -ĠW estern -Ġdet ect -Ġpart ners -Ġsh ared -ĠC all -Ġcan cer -ac he -rib e -Ġexpl ained -Ġhe at -{ " -Ġinvest ment -ĠB ook -Ġw ood -Ġtool s -ĠAl though -Ġbelie f -Ġcris is -Ġg e -ĠM P -Ġoper ation -ty pe -~ ~ -g a -Ġcont ains -ant a -Ġexp ress -ĠG roup -ĠJ ournal -k a -Ġam b -ĠUS A -Ġfind ing -Ġfund ing -h ow -Ġestab lished -ide os -Ġdeg ree -Ġdanger ous -ang ing -Ġfre edom -pp ort -out hern -Ġch urch -Ġc atch -ĠTw o -Ġpres ence -ĠGu ard -U p -Ġauthor ity -ĠPro ject -Ġbut ton -Ġcon sequ -Ġval id -Ġwe ak -Ġstart s -Ġref erence -ĠM em -" ) -U N -or age -ĠO pen -Ġcol lection -y m -g ency -Ġbeaut iful -ro s -Ġtell s -Ġwa iting -n el -Ġprov iding -ĠDemocr ats -Ġd aughter -Ġm aster -Ġpur poses -ĠJapan ese -Ġequ al -Ġturn s -Ġdoc uments -Ġwatch ing -R es -Ġr an -201 4 -Ġre ject -ĠKore a -Ġvictim s -Le vel -ere nces -Ġw itness -Ġ3 4 -Ġre form -com ing -Ġocc up -Ġc aught -Ġtra ffic -ad ing -Ġmod els -ar io -Ġserv ed -Ġb atter -u ate -ĠSecret ary -Ġagre ed -Ġtr uly -yn am -ĠR et -Ġun its -ĠRes earch -h and -az ine -ĠM ike -Ġvar iety -ot al -Ġam azing -Ġconfir med -Ġentire ly -Ġpurch ase -Ġe lement -Ġc ash -Ġdeter mine -D e -Ġc ars -ĠW all -â ĸ -Ġview s -Ġdrug s -Ġdep artment -ĠSt ep -u it -Ġ3 9 -as ure -ĠCl ass -Ġc overed -ĠB ank -Ġme re -u ana -Ġmult i -Ġm ix -Ġun like -lev ision -Ġsto pped -Ġs em -ĠG al -ul es -Ġwe l -ĠJohn son -l a -Ġsk ill -Ġbec oming -ri e -Ġappropri ate -f e -ell ow -ĠPro t -ul ate -oc ation -Ġweek end -od ies -Ġsit es -Ġanim al -ĠT im -Ġsc ale -Ġcharg ed -Ġinst ruct -ill a -Ġmethod s -Ġc ert -Ġjud ge -ĠH el -Ġdoll ars -Ġstand ing -ĠS qu -Ġdeb t -l iam -Ġdri ving -ĠS um -ĠEd ition -Ġal bum -and on -I F -ĠU k -6 3 -ad er -Ġcommer cial -es h -ĠGovern ment -Ġdisc overed -Ġout put -ĠHill ary -ĠCar ol -Ġ200 5 -Ġab use -anc ing -Ġsw itch -Ġann ual -T w -Ġst ated -ag ement -in ner -Ġdem ocr -Ġres idents -Ġallow ing -Ġfact ors -od d -Ġf uck -em ies -Ġoccur red -ot i -Ġn orth -ĠP ublic -Ġinj ury -Ġins urance -C L -oll y -ã Ģ -Ġrepe ated -Ġar ms -ang ed -Ġconst ruction -Ġf le -P U -ic ians -Ġfor ms -ĠMc C -ant ic -Ġm ental -p ire -Ġequ ipment -Ġf ant -Ġdiscuss ion -Ġregard ing -k in -ar p -Ġch air -og ue -Ġpro ceed -ĠI d -O ur -Ġmur der -M an -Ġ4 9 -as p -Ġsupp ly -Ġin put -Ġwe alth -liam ent -Ġpro ced -or ial -ĠSt at -ĠN FL -hen s -ĠInst itute -Ġput ting -ourn ament -et ic -Ġloc ated -Ġk id -er ia -r un -Ġpr inc -Ġ ! -go ing -ĠB et -Ġcl ot -Ġtell ing -Ġprop osed -i ot -or ry -Ġfund s -g ment -ĠL ife -Ġb aby -ĠB ack -Ġsp oke -Im age -Ġear n -ĠA T -g u -Ġex change -ĠL in -ov ing -Ġp air -M ore -az on -Ġarrest ed -Ġkill ing -c an -ĠC ard -y d -Ġident ified -Ġm obile -Ġthan ks -ony m -ĠF orm -Ġhundred s -ĠCh ris -ĠC at -Ġtre nd -h at -ĠA v -om an -Ġelect ric -ĠW il -S E -O f -Ġrest aur -ot ed -Ġtr ig -Ġn ine -Ġb omb -Wh y - ¯ -Ġco verage -Ġapp eal -ĠRober t -ĠS up -Ġfin ished -Ġfl ow -Ġdel iver -Ġcal cul -Ġphot os -Ġph il -Ġpie ces -Ġapp re -k es -Ġr ough -D o -Ġpart ner -Ġconcern ed -Ġ3 7 -ĠG en -C ol -ct ors -Ġ= > -st ate -Ġsuggest ed -ĠFor ce -C E -Ġher self -ĠPl an -w orks -o oth -ren cy -Ġcor ner -Ġhus band -Ġintern et -ĠA ut -em s -os en -ĠAt l -g en -Ġbal ance -6 2 -Ġsound s -te xt -Ġar r -ov es -Ġmill ions -Ġrad io -Ġsat isf -ĠD am -M r -G o -S pe -Ġcomb at -r ant -ĠG ree -Ġf uel -Ġdist ance -Ġtest s -Ġdec re -ĠE r -Ġman aged -D S -Ġt it -Ġmeas ures -ĠL iber -Ġatt end -as hed -ĠJ ose -ĠN ight -d it -ĠN ov -ĠE nd -out s -Ġgener ation -Ġadv oc -y th -Ġconvers ation -ĠS ky -act ive -ce l -ri er -ĠFr ank -Ġg ender -Ġcon cent -Ġcar ried -and a -ĠV irgin -Ġarri ved -ic ide -ad ed -Ġfail ure -Ġmin imum -le ts -Ġwor st -Ġkeep ing -Ġint ended -Ġilleg al -Ġsub sc -Ġdetermin ed -Ġtri p -Y es -Ġra ise -Ġ ~ -Ġfeel s -Ġpack age -ĠJ o -h i -201 6 -re al -Ġf ra -Ġsy mb -M e -uck y -p ret -ĠK h -ĠEd it -ĠWe b -em ic -ĠCol or -Ġjust ice -I nt -Ġfar m -ck now -" > -el ess -Ġredu ced -Ġ5 00 -x x -ĠR ad -ĠW ood -Ġcl in -Ġhy p -il er -ur a -k ins -8 5 -6 1 -ĠThe ir -ĠM ary -Ġs an -Ġno vel -ĠWh o -Ġcap acity -Ġimp ossible -Ġpl ays -Ġmin ister -ij uana -ic ate -ĠS et -Ġf ram -Ġ ing -Ġcommun ities -ĠF BI -it a -Ġb on -Ġstr ateg -Ġinterest s -l ock -g ers -m as -ĠAN D -Ġconflic t -Ġrequire ments -Ġs ac -Ġoper ating -in i -rel ated -Ġcomm itted -Ġrelative ly -Ġs outh -¯ ¯ -Ġaff ord -Ġident ity -Ġdec isions -Ġacc used -pl ace -Ġvict ory -o ch -i at -N ame -C om -t ion -ed s -Ġsee k -Ġt ight -ĠIm ages -Ġinit i -Ġhum ans -Ġfam iliar -Ġaud ience -Ġintern al -vent ure -Ġs ides -ĠT O -Ġd im -Ġcon clud -Ġapp oint -Ġenforce ment -ĠJ im -ĠAssoci ation -Ġcircum st -ĠCanad ian -Ġjo ined -Ġdiffere nces -ĠL os -Ġprot est -Ġtw ice -w in -Ġgl ass -ars h -ĠAr my -Ġexp ression -Ġdec ide -Ġplan ning -an ia -Ġhand le -ĠMicro soft -ĠN or -Ġmax imum -ĠRe v -Ġse a -Ġev al -Ġhel ps -re f -Ġb ound -Ġm outh -Ġstand ards -Ġcl im -ĠC amp -ĠF ox -cl es -Ġar my -ĠTe chn -ack ing -x y -S S -Ġ4 2 -Ġbu g -ĠUk rain -ĠM ax -ĠJ ones -ĠSh ow -l o -Ġplan et -Ġ7 5 -Ġwin ning -Ġf aster -Ġspe ct -Ġbro ken -T R -Ġdef ined -Ġhealth y -Ġcompet ition -htt ps -ĠIs land -ĠF e -Ġannoun ce -ĠC up -ĠInst ead -Ġcl ient -Ġposs ibly -se ction -ock et -l ook -Ġfin ish -Ġcre w -Ġres erv -Ġed itor -Ġh ate -Ġs ale -Ġcontro vers -Ġp ages -w ing -Ġnum er -Ġopp osition -Ġ200 4 -Ġref uge -Ġfl ight -Ġap art -ĠL at -A meric -ĠAfric a -Ġapplic ations -ĠPal est -ĠB ur -Ġg ar -ĠSoc ial -Ġup gr -Ġsh ape -Ġspe aking -ans ion -a o -ĠS n -Ġwor ry -ĠBrit ain -P lease -rou d -Ġh un -Ġintrodu ced -Ġd iet -I nd -ĠSec ond -Ġfun ctions -ut s -ĠE ach -ĠJe ff -Ġst ress -Ġaccount s -Ġgu arant -ĠAn n -ed ia -Ġhon est -Ġt ree -ĠAfric an -ĠB ush -} , -Ġs ch -ĠOn ly -Ġf if -ig an -Ġexerc ise -ĠEx p -Ġscient ists -Ġlegisl ation -ĠW ork -ĠS pr -à Ĥ -ĠH uman -Ġ è -Ġsur vey -Ġr ich -ri p -Ġmain tain -Ġfl o -Ġleaders hip -st ream -ĠIslam ic -Ġ 01 -ĠCol lege -Ġmag ic -ĠPr ime -Ġfig ures -201 7 -ind er -x ual -ĠDe ad -Ġabsolute ly -Ġfour th -Ġpresent ed -resp ond -rib le -Ġal cohol -at o -ĠD E -por ary -Ġgr ab -Ġvar i -Ġqu ant -ĠPh oto -Ġpl us -r ick -ar ks -Ġaltern ative -Ġp il -Ġappro x -th at -Ġobject s -ĠR o -ĠAnd roid -Ġsignificant ly -ĠR oad -k ay -R ead -av or -Ġa cknow -ĠH D -ĠS ing -O r -ĠM ont -Ġun s -pro f -Ġneg oti -ĠAr ch -ik i -Ġte levision -ĠJew ish -Ġcomm ittee -Ġmot or -Ġappear ance -Ġs itting -Ġstri ke -ĠD own -com p -ĠH ist -Ġf old -ac ement -ĠLou is -Ġbel ong -ĠâĢ ¢ -Ġm ort -Ġprep ared -Ġ6 4 -ĠM aster -Ġind eed -ĠD en -Ġre nt -T A -our ney -ar c -S u -9 7 -Ġadv ice -Ġchang ing -Ġlist ed -Ġlaun ched -is ation -ĠP eter -is hes -Ġl ived -ĠM el -ĠSup reme -ĠF ederal -Ġ) ; -ruct ure -Ġset s -Ġphil os -u ous -Ġ ł -Ġappl ied -ĠN OT -Ġhous ing -ĠM ount -Ġo dd -Ġsu st -D A -ffic ient -Ġ ? -ol ved -Ġp owers -Ġth r -Ġrem aining -ĠW ater -L C -Ġca uses -ãģ ® -Ġman ner -ad s -Ġsuggest s -Ġend s -stand ing -f ig -ĠD un -id th -Ġg ay -Ġter min -ĠAngel es -M S -Ġscient ific -Ġco al -ap ers -b ar -ĠThom as -Ġsy m -ĠR un -th is -P C -igr ants -Ġmin ute -ĠDist rict -cell ent -Ġle aves -Ġcomple ted -am in -Ġfoc used -Ġmon itor -Ġveh icles -M A -ĠM ass -ĠGr and -Ġaffect ed -itution al -Ġconst ruct -Ġfollow s -Ġt on -re ens -Ġh omes -ĠE xt -ĠLe vel -r ast -ĠI r -Ġel im -Ġlarge ly -ĠJ oe -Ġvot es -all s -Ġbusiness es -ĠFound ation -ĠCent ral -Ġy ards -Ġmaterial s -ul ner -Ġgu ide -Ġclos er -um s -Ġsp orts -ed er -J ust -Ġtax es -8 4 -ĠO ld -Ġdec ade -ol a -Ġv ir -Ġdro pped -Ġdel ay -it ect -Ġsec ure -ste in -le vel -Ġtre ated -Ġfil ed -ain e -Ġv an -Ġm ir -Ġcol umn -ict ed -e per -Ġro t -Ġcons ult -Ġent ry -Ġmar ijuana -ĠD ou -Ġapparent ly -ok ing -clus ive -Ġincre ases -an o -Ġspecific ally -Ġte le -ens ions -Ġrelig ion -ab ilities -Ġfr ame -ĠN ote -ĠLe e -Ġhelp ing -Ġed ge -ost on -Ġorgan izations -à ĥ -ĠB oth -hip s -Ġbig ger -Ġbo ost -ĠSt and -Ġro w -ul s -ab ase -Ġr id -L et -are n -ra ve -Ġst ret -P D -Ġv ision -Ġwe aring -Ġappre ci -Ġa ward -ĠU se -Ġfact or -w ar -ul ations -) ( -Ġg od -Ġter rit -Ġpar am -ast s -8 7 -Ġen emies -ĠG ames -F F -Ġacc ident -W ell -ĠMart in -T ER -Ġat h -ĠHe ll -Ġfor g -Ġve ter -ĠMed ic -f ree -Ġst ars -Ġexp ensive -Ġac ad -ra wn -ĠW he -Ġl ock -Ġform at -Ġsold iers -s m -Ġag ent -Ġrespons ibility -or a -ĠS cience -Ġrap id -Ġt ough -ĠJes us -Ġbelie ves -M L -Ġwe ar -le te -Ãĥ ÃĤ -ĠD ri -Ġcomm ission -ĠB ob -O h -ap ed -Ġwar m -ÃĥÃĤ ÃĥÃĤ -Ġ200 3 -ort ion -Ġhas n -ust er -Ġun ivers -ĠI ll -Ġk ing -olog ies -9 4 -ĠT em -ĠM os -Ġpat ient -ĠMex ico -ce an -ĠDe ath -ĠSand ers -y ou -ĠC ast -ĠComp any -pt y -Ġhappen ing -F P -ĠB attle -Ġb ought -A m -M od -U s -ut ers -ĠC re -ĠTh ose -Ġ4 4 -is er -Ġs oul -ĠT op -ĠHar ry -ĠA w -Ġse at -ff ee -Ġrev olution -Ġ( " -ĠD uring -et te -Ġr ing -Ġoff ensive -Ġreturn s -Ġv ideos -Ġdis cl -Ġfam ous -en ced -ĠS ign -ĠR iver -Ġ3 00 -P M -ĠB us -ĠC H -Ġcandid ates -ard en -Ġpercent age -Ġvis ual -Ġthan k -Ġtrou ble -ner gy -Ġ200 1 -Ġpro ve -ash ion -Ġen h -ĠL ong -U M -Ġconnect ed -Ġposs ibility -O ver -Ġexper t -Ġl ibrary -art s -ĠDirect or -Ġfell ow -9 2 -ir ty -Ġd ry -Ġsign s -ĠL ove -Ġqu iet -f oot -Ġp ure -ĠH un -Ġf illed -ph as -ĠE lect -end ment -ĠEx pl -Ġun able -n s -m o -Ġv ast -ob e -Ġident ify -app ing -ĠCarol ina -g ress -Ġpro te -Ġf ish -Ġcircumst ances -raz y -ĠPh ot -Ġb odies -ĠM ur -Ġdevelop ing -ĠA R -Ġexperien ced -Ġsubst ant -ĠBo ard -es ome -Ġdom estic -Ġcomb ined -ĠP ut -Ġchem ical -ĠCh ild -Ġpo ol -ĠC y -Ġe gg -c ons -st ers -Ġh urt -Ġmark ets -Ġconserv ative -Ġsupp orters -Ġag encies -id el -O b -ur b -Ġ4 3 -ĠDef ense -y e -ĠA p -du le -Ġtemper ature -Ġconduct ed -ĠCh ief -Ġpull ed -Ġf ol -L ast -ont o -os is -V ER -D es -ĠP an -F irst -Ġadv ance -Ġlic ense -r ors -ĠJ on -Ġimag ine -Ġhe ll -Ġf ixed -Ġinc or -os ite -ĠL og -ick en -] : -Ġsurpr ise -h ab -Ġc raft -ol t -ĠJ ul -Ġd ial -Ġrele vant -Ġent ered -Ġlead s -ĠA D -ĠCle an -Ġpict ures -ess or -Ġal t -Ġpay ing -P er -ĠMark et -Ġupd ates -am ily -ĠT ype -ĠH ome -Ġ5 5 -semb ly -rom e -8 3 -Ġgreat est -Ġhe ight -Ġhe av -ain ts -Ġlist en -as er -ĠS H -Ġcap able -ac le -Ġpers pect -in ating -Ġoff ering -ry pt -ĠDe velop -ab in -r c -Ġbr ight -al ty -ar row -Ġsupp l -ind ing -ack ed -gy pt -ĠAn other -p g -ĠVirgin ia -ĠL u -Ġpl anned -Ġp it -Ġswe et -T ype -ĠD i -Ġtyp ically -ĠFranc isco -Ġpro spect -ĠD an -Ġte en -re es -Ġsc hed -Ġh ol -Ġsc r -Ġlot s -l ife -Ġnews p -Ġfor get -ĠN one -ĠM iddle -ĠR yan -ed d -Ġse vere -Ġsu it -ll er -9 3 -Ġcor respond -Ġexpl os -u ations -Ġfl ag -g ame -r id -Ġpr in -ĠD ata -Ġde ploy -ĠEn ter -su it -gh an -ĠM en -Ġthough ts -Ġmat ters -Ġad apt -ĠA ri -Ġf ill -Ġfor th -Ġs am -Ġ4 1 -Ġpay ment -ĠH or -Ġsp ring -du c -Ġl osing -Ġbring ing -F O -al a -Ġdist ribution -he red -b our -ĠIsrael i -om a -Ġcomb ination -Ġpl enty -V E -C an -ĠH aw -Ġper man -ĠSpe cial -Ġto w -Ġsee king -Ġexam ples -Ġclass es -c r -Ġbe er -Ġmov es -ĠI P -ĠK n -Ġpan el -E ven -Ġproper ly -Ġr is -Ġpl ug -Ġestim ated -E very -Ġdef ensive -ag raph -Ġpre gn -Ġinst it -ĠV ict -Ġvol ume -Ġpos itions -Ġl inks -ĠPro gram -ĠWe ek -ag ues -Ġtrans form -k er -ĠC EO -Ġc as -Ġopp onent -Ġtwe et -ĠC ode -Ġsh op -Ġf ly -Ġtal ks -Ġb ag -Ph one -Ġa id -Ġpl ants -Ġ6 5 -Ġatt orney -ar ters -qu est -ĠMag ic -Ġbeg ins -Ġmy ster -Ġenvironment al -Ġst orage -N N -Ġm arg -Ġs ke -Ġmet al -ell y -Ġord ered -Ġrem ained -Ġl oved -Ġprom pt -Ġupd ated -Ġexper ts -Ġwalk ing -Ġan cient -Ġperform ed -AT E -Ġne ither -i ency -Ġmanufact ure -ĠP ak -Ġselect ed -Ġm ine -Ġult imately -Ġexpl an -Ġlab el -ĠServ ices -ribut ed -Tr ump -Ġsy n -ĠU lt -S C -Ġme at -Ġg iant -ĠW ars -ĠO N -Ġad m -Ġinter pret -Ġeven ing -Ġev il -ĠB oston -ĠW ild -Ġ à -ĠBit coin -ĠAm azon -D r -ĠIn formation -Ġobvious ly -Ġadv anced -Ph oto -ol ar -Ġwe ather -Ġsymb ol -Ġso le -Ġpot entially -ost er -Ġorig inally -m un -3 00 -az e -ess ions -Ġde ck -Ġst ood -Ġyou th -ĠB ern -R ep -ĠT est -Ġbas ically -ot ic -Ġinvol ve -ol it -ly n -S ee -Ġair craft -Ġconf irm -E W -Ġmess ages -ĠRich ard -Ġk it -Ġpro hib -Ġv ulner -is ters -Ġexist ence -Ġturn ing -ĠS P -Ġdes ire -Ġfl at -Ġm ent -se ason -ang es -Ġneighbor hood -ĠL ake -AT ION -Ġpoint ed -b ur -Ġinn ov -uc ks -U L -Ġprofess or -Ġexp ressed -A B -ic ious -Ġ200 2 -ĠDe v -Ġs ession -Ġb are -s en -Ġdis s -ĠC ath -ĠP ass -ĠP oint -Ġdo ctor -or row -ail ed -ĠR ub -ĠD C -ĠChar l -p erson -Ġwrit er -igh ters -ure au -Ġob lig -Ġrecord ed -Ġbro ke -Ġord ers -il ty -Ġmot ion -in ity -l aw -ad ium -Ġimm igration -Ġcontr ast -Ġb att -Ġex cellent -Ġtechn ical -am i -Ġt un -Ġcl oud -ĠY ear -ge on -Ġcre ation -Ġstr ange -Ġa uth -Ġfor t -b orn -Ġext ent -ĠT oday -ĠCl ub -Ġr ain -Ġs ample -Ġaccept ed -Ġt act -Ġf ired -ĠS on -Ġstand s -Ġb oot -Ġ4 7 -Ġstat ements -Ġvers ions -Ġse lling -ound ed -Ġ199 0 -Ġwere n -ĠW atch -Ġexper iment -P ost -Ġret ail -ul ed -In st -un te -ãĥ ¼ -Ġdep art -Ġb ond -i very -om pl -Ġre action -ĠSyri an -ĠP ac -app ed -ani el -D P -Ġres olution -Ġre act -Ġappro ved -on om -m ond -ĠO ffic --- - -Ġrepl ace -Ġt ack -Ġsp ort -Ġch ain -Ġemer gency -r ad -ĠPalest in -Ġ4 6 -Ġautom atically -Ġrout e -Ġp al -Ġb anks -ĠPar is -ĠMed ia -ro ad -ic ing -i xt -ist ed -Ġg rew -Ġco ord -ĠW here -om in -Ġsub s -� � -Ġ ± -Ġcorpor ate -Ġse lection -n oon -ĠRep ort -c s -clud ing -ord ers -anc he -ĠIt s -Ġslow ly -ĠE gypt -ĠA cc -Ġcol le -iqu es -E X -Ġattempt s -ur l -ĠC ross -Ġfind ings -ĠS C -ĠO R -Ġind ex -ens ity -ĠW ay -ĠL and -Ġsh ock -d is -Ġd ynam -Ġc art -m osp -S ince -i est -ĠB oy -Ġst orm -ĠCont in -201 3 -he w -il it -Ġess ential -iqu id -O ther -ive red -Ġreason able -A ct -Ġsub sequ -ĠP ack -ĠF ort -Ġconsider ing -Ġun iversity -l og -Ġmar ried -Ġill ust -ĠTr ue -£ ı -Ġnumer ous -rast ructure -Ġserious ly -Ġrefer red -u a -Ġconsist ent -on na -ĠRe al -ru ption -ci ples -Ġfact s -9 1 -ot es -er g -The n -Ġacc ompl -N ote -Ġre venue -Ġpass ing -Ġm al -e en -ĠY et -Ġg ather -ter day -ew ork -ĠA uthor -P e -Ġopt im -Ġr ub -Ġè £ı -Ġun known -st one -Ġun ion -ol ve -Ġopportun ities -Ġbrow ser -ĠW al -ĠC ost -Ġreport ing -st s -p et -Ġs and -Ġsudden ly -Ġsurpr ising -ĠV R -Ġsomew hat -ĠB as -ult ure -iz z -ĠC D -Ġchalleng es -Ġsett ings -Ġexperien ces -ĠF ull -Ġcan n -Ġrece iving -ES T -Ġj oint -Ġcult ural -Ġa st -8 2 -as tern -ce ived -ĠC ru -Ġb ull -p ired -am m -Ġfac ing -p ower -Ġb oss -ĠH ol -Ġinst r -Ġincreasing ly -Ġsh ift -Ġstre ets -ĠWilliam s -ab b -Ġl ie -Ġl augh -ĠC a -P L -Ġadult s -Ġcustom er -Ġob tained -Ġsupport ing -ht ml -f ire -Ġdetail ed -Ġpick ed -ĠR ight -ld er -E E -st ood -ĠK im -Ġw ire -Ġs ight -Ġdevelop ers -Ġpers ons -Ġs ad -Ġc up -Ġwar ning -Ġboy s -l ong -Ġb ird -f o -Ġw al -Ġobserv ed -Ġz one -iven ess -Ġch annel -c ript -Ġref used -ĠAg ain -Ġsu c -Ġspokes man -ĠRe f -r ite -ou ston -ãĥ ³ -ĠS her -Ġact s -ĠN ame -Ġstrugg le -ar ry -omet imes -Ġdisc rim -H T -Ġcateg ory -Ġreal ize -Ġemploy ee -ĠAf ghan -en ger -Ġgun s -ĠSte ve -ĠM ot -ĠO l -ok ed -Ġth ick -Ġfair ly -ill y -Ġsur ve -ĠM at -we ight -â Ķ -Ġtro ops -Ġag ents -Ġbatter y -Ġmot iv -à ¡ -S ec -d en -o very -L S -Ġfl u -Ġconf ident -ĠO per -Ġem pty -Ġp hen -Ġse ctor -Ġexc ited -Ġrem ote -ap h -o en -Ġdestroy ed -Ġmor al -ĠH P -ĠR on -Ġd ress -ĠB at -Ġl it -ĠM S -Ġa f -H L -r um -is ms -Ġshould n -Ġsym pt -ĠTor onto -het ic -Ġcar bon -Ġinstall ed -Ġviol ent -Ġsol ar -j a -Ġpract ices -Ġr ide -ĠP enn -Ġimpro ved -Ġaud io -Ġbehav i -ĠP S -Ġe ating -D ata -ĠRe view -p ass -cl aim -u ated -ang ers -c hen -Ġproper ties -Ġany where -An other -Ġbl ow -ĠJack son -Ġp roud -Ġplan e -l ines -Ġsqu are -Ġpro of -ans as -Ġtalk ed -m akers -Ġs ister -Ġhold s -Ġres ident -Ġ= = -Ġresist ance -Ġspl it -Ġpro secut -Ġconf idence -res ents -Ġcut s -Ġexcept ion -Ġz ero -Get ty -Ġcop yright -Ġtot ally -orm al -ific ations -ĠAustral ian -Ġs ick -Ġ1 50 -Ġhouse hold -Ġfe es -Ġdri vers -og en -ĠN Y -Ġnecess arily -Ġregul ations -ear ing -s l -Ġperspect ive -c are -ic ial -H is -Ġesc ape -Ġsurpr ised -ĠV an -ur rent -Ġv ac -8 1 -ĠTh us -Ġem phas -ĠCh ampions -ĠI ce -Ġn arr -Ġhead s -Ġca using -b el -f ortunately -ĠM a -Ġtarg ets -ci pl -Ġafter noon -Ġadd s -ĠMay be -ĠF our -ess ed -ple te -Ġus ual -ch o -ing u -Ġwith d -ĠE nergy -ĠE conom -O O -Ġart icles -Ġinj ured -Ġman age -Ġexpl ains -Ġdi agn -R ec -at ures -Ġlink ed -Ġdiscuss ed -Ġexpl o -Ġocc asion -ath an -Ġopp osite -Ġfac es -Ġden ied -ĠK night -Ġn ut -Ġapprox imately -Ġdisapp oint -onym ous -ĠB est -ĠL o -ĠH y -ĠA ff -Ġvot ing -an while -ĠII I -Ġinstit utions -ag ram -ĠD aily -Ġdr ag -Ġnear by -Ġgu ilty -Ġcon ver -P re -s hip -Ġre ward -Ġphilos oph -ĠS S -u gh -Ġapp s -f riend -Ġu pper -Ġad vert -Ġs now -Ġfr ust -Ġour selves -F r -ĠD ie -amp ion -Ġdis miss -Ġc ere -Ġsign al -f rom -Ġ ). -Ġ5 2 -Ġcr imes -it ors -est ival -use um -Ġcoun cil -ĠS aud -M ay -ĠG un -ic ian -et her -Ġsu fficient -ĠH en -so le -Ġhistor ical -ĠF ar -ĠT urn -Ġp in -Ġsuc ceed -m at -ly mp -Ġtrad ition -ĠO k -Ġc ro -Ġdesc ription -al le -Ġsk y -T e -Ġwide ly -Ġw ave -Ġdefin ition -ĠJew s -Ġcy cle -Ġref ere -Ġbr ings -us al -Ġal ive -Ġfrequ ently -Ġint ention -ĠCont rol -l v -y stem -Ġpriv acy -g ent -ren ce -ĠQu est -ĠChrist mas -Ġr ail -Ġco oper -Ġtest ed -ĠC apt -as ks -Ġcomfort able -Ġdel ivered -sc ape -Ġdep th -ĠG OP -Ġwrit es -Ġass ets -Ġsa v -im ents -Ġtrans ition -Ġart ist -ĠL ook -Ġl ob -Ġcomp onents -ar ity -Ġwalk ed -Ġro ot -Ġparticip ants -Ġnot iced -Ġres c -Ġn av -ĠAd minist -d a -ut ral -pl ate -Ġimport ance -Ġass ert -ious ly -c ription -Ġinj uries -ĠChe ck -Ġregist ered -Ġint ent -Ġmiss ed -ograph ic -Ġsent ence -oun ter -Ġassist ance -ev in -Ġdat abase -Ġbuild ings -Ġclass ic -Ġth inks -ĠOh io -P r -ug g -Ġfe e -p an -Ġeffect ively -Ġfac ility -Ġbe ar -Ġch apter -Ġdog s -ĠCol umb -Ġl atter -it ial -Ġad mitted -T V -ĠGe org -Ġpost s -\ \ -Ġlawy er -Ġequ ival -Ġm and -Ġcontro lled -ĠW alk -ĠAnd rew -Ġmen u -am ental -Ġprotect ed -v a -Ġadminist r -or al -Ġre in -ĠS ar -Ġamount s -Ġn ative -ĠM oon -Ġrep resents -Ġab andon -Ġcarry ing -Ġt ank -m ary -Ġdecl ared -T ube -Ġh at -Ġpun ish -el lect -m es -Ġun iverse -ĠR od -ph y -Ġinf rastructure -Ġ5 1 -Ġopp osed -ow nt -c a -ĠM ake -Ġhard ware -Ġco ffee -R el -b al -w orld -ĠS af -ĠSe a -in als -Ġown ed -Ġh all -ers ion -Ġdescrib e -ĠP ot -Ġport ion -Ġat mosp -Ġgovern ments -Ġdep ending -Ġoff ense -Ġtr ick -aw a -ĠL ine -ĠV is -ĠH ard -ĠOr ig -ĠCl ick -Ġdes k -ĠVal ley -ĠS ov -Ġmov ies -Ġrem ark -Ġm ail -Ġcons cious -Ġrul ing -ĠR ights -Ġmed ic -he nt -ĠW omen -> < -Ġrepl aced -ĠP rem -ĠTh anks -Ġre new -ĠB all -if orm -Ġsh ots -C omm -Ġar med -Ġconst ant -Ġt aste -Ġreal ized -Ġbu ff -Ġm o -Ġeffic ient -M ost -or ation -if ies -Ġcommun ication -Ġfl ood -Ġconsequ ences -Ġany way -ig g -ĠG M -ĠTh ank -Ġ iron -Ġev olution -ĠC op -tw itter -Ġ9 5 -Ġrelationship s -ad el -ĠYou ng -Ġpropos al -ay ers -uild ing -ĠH ot -OR E -c os -Ġcoll abor -P G -ax y -Ġknow ing -Ġsupport s -ow ed -Ġcontrol s -Ġmere ly -um er -Ġath let -Ġf ashion -p ath -Ġg ift -Ġer a -AN D -Ġkind s -ĠKore an -Ġleg it -ul ous -Ġess entially -Ġthe rap -n ic -Ġsuff ered -Ġh ur -Ġprom ise -Ġex cess -Ġover w -Ġpr ime -ĠH ouston -er ry -ĠM s -R S -201 2 -Ġst ores -ĠO lymp -Ġj ourney -Al though -S ub -ĠE duc -ĠCh apter -Ġrequest s -Ġconsum ers -Ġt iny -Ġis ol -ĠF air -b a -ĠY OU -Ġcr ash -ce ler -Ġemot ional -Ġgood s -Ġelect ed -Ġmod er -ĠLin ux -Ġbl ocks -Ġis land -ĠSoc iety -Ġelect ions -Ġbroad cast -Ġche ap -Ġn ations -Ġse asons -4 00 -Ġwas te -ĠS at -Ġfield s -em ploy -Ġprof ile -Ġauth ors -AL L -ĠG ra -w est -ĠT y -Ġdeath s -Ġv acc -Ġfor med -Ġd u -Ġon going -ĠMuslim s -el f -ig ure -Ġass ume -ĠUkrain e -w ater -Ġco ast -Ġvot ed -g or -ĠA S -ĠMich igan -az a -ĠAr m -i ro -Ġf lex -as ters -' ' -Ġwel come -ar l -Ġloc ations -ig ation -ĠF il -Ġbu ying -Ġarch itect -Ġhard er -ĠC ub -Ġinter face -Ġrestaur ant -Ġdisco ver -Ġex ceed -Ġfav our -ger y -Ġd uty -Ġp itch -ad or -ĠM ach -b oy -Ġrespond ed -Ġext ended -her s -M any -ra id -if er -ĠIn s -S er -Ġmed ium -s he -ĠS ports -Ġmag azine -ut ation -Ġlim its -ĠG all -Ġex ternal -raz il -Ġyoung er -t le -Ġrem ind -ĠC ON -Ġimmedi ate -Ġh idden -Ġvol unte -Ġsim pl -od cast -Ġph ase -d r -Ġpl ot -Ġexp osure -R I -og rap -v in -an ish -ĠAc ad -ĠEng ine -Ġexp ansion -ĠP ay -Y our -Ġpus hed -ĠE ll -ĠHe ad -Ġmarket ing -ĠA C -k et -Ġh its -Ġg ro -ĠA ge -ĠSc ot -] [ -Ġst im -Ġi Phone -Ī Ĵ -Ġn arrow -ĠGet ty -ĠTur key -Ġperfect ly -Ġen able -ut ch -Ġprec ise -Ġreg ime -Ġsh if -Ġcomp ens -g un -d iv -Ġch osen -ĠK en -An y -Ġtre es -Ġrecomm ended -ĠR en -u able -ĠH T -F ollow -E G -ĠH and -ĠK enn -Ġarg uments -Ġex ists -Ġb ike -ĠCons erv -Ġbre aking -ĠG ar -Ġc razy -Ġvirt ual -ay lor -ix el -Ġ19 80 -Ġper mission -ĠSer ies -Ġconsum er -Ġclose ly -c alled -Ġ5 4 -Ġhop es -Ġar ray -ĠW in -ĠLab our -Ġsp ons -ĠI re -Ġp ow -Ġread ers -Ġemploy ment -Ġcreat ure -Ġresult ing -Ġaccur ate -Ġmom ents -Ġarg ued -Ġp ed -D uring -Ġ5 3 -ĠT al -Ġs ought -Ġsuff ering -Ġ icon -le e -Ġ( $ -al ian - ° -Ġp ra -Ġbon us -( " -k o -Ġact ing -D E -f all -Ġcompar ison -Ġsm ooth -ĠN AS -u pp -ĠJose ph -ep ing -ĠT ake -ĠM id -Ġs ending -f ast -ĠF all -Ġdeal ing -us er -ĠOr gan -C o -Ġatt ached -Ġse es -% . -Ġtyp ical -AR T -Ġfind s -ĠAs ia -um in -ĠC ore -ĠE nt -in ent -u ce -ĠBl ood -ĠN ever -Ġem ails -Ġhigh light -Ġconf ront -at us -ut ed -Ġun us -Ġtop ic -ĠAd am -Ġb le -at i -Ġunder stood -S et -st ruct -T P -Ġm ob -a a -ĠSt art -pect ed -se ll -Ġded icated -ĠC A -u an -Ġsong s -esc ription -Ġte ch -Ġr ape -Ġas ide -Ġgr ant -Ġ5 6 -s ub -Ġarg ue -Ġcont aining -Ġsche dule -Ġliber al -Ġpublic ly -Ġheav ily -ĠU t -in er -ĠS ection -ĠC are -we et -l s -D is -âĶ Ģ -ĠF ollow -B ack -ĠI T -Ġb es -j i -ĠH it -est ed -Ġevery body -ĠSw ed -Ġfem in -Ġfac ilities -Ġcon ven -C omp -ĠO S -c ore -Ġan x -Ġdiv ision -ĠC am -ĠSt an -m ates -Ġexpl ore -pl om -Ġsh ares -pl oad -an es -Ġide al -et ers -ĠB ase -Ġpl astic -Ġdist inct -ĠNet work -ĠSe attle -Ġtrad ing -ens us -int end -Ġex hib -Ġinit ially -ĠF ood -Ġthous and -ĠBus iness -act er -Ġpar agraph -Ġrough ly -Ġw ww -Ġcreat ive -ĠCon f -Ġconsum ption -Ġfil ms -ag an -Ġob tain -Ġt all -Ġt or -Ġacknow led -Ġg rown -al o -K E -Ġ4 00 -end ers -t aining -U G -Ġsu icide -Ġwat ched -ĠL ist -al i -re hens -Ġsurround ing -Ġp ip -Ġf lying -ĠJ ava -ord an -Ġserv ing -in ations -p ost -Ġsh o -A v -Ġj ail -z y -Ġ199 9 -Ġ< / -Ġliter ally -ĠS ir -Ġexp osed -Ġl ies -st ar -Ġb at -Ġear ned -ĠD ig -Ġspec ified -ĠSe ason -Ġdeg rees -Don ald -Ġcent re -Ġsh aring -Ġwin ter -ĠC O -C he -Ġ Î -M P -Ġun w -Ġfew er -ĠM ir -Ġsomew here -ĠK ey -Ġattack ed -ĠK ir -Ġdom ain -Ġstrong er -Ġ9 9 -Ġpen alty -I d -Sc ript -Ġdecl ined -Ġne ck -Ġfra ud -Ġcur rency -Ġr ising -R C -âĢ¦ âĢ¦ -H z -Ġt ab -Ġtal ent -n am -ĠN BA -Ġvill age -Ġleg s -ĠN ext -E d -Ġac id -Ġhy d -8 00 -Ġinvol ving -ĠIm age -ĠBe fore -F l -Ġyes terday -S ource -Ġterror ist -Ġsu p -Ġsy nt -ĠSaud i -Ġw est -Ġr u -b urg -Ġvis ible -Ġstru ck -r ison -Ġaw esome -Ġd rawn -Ġansw ers -ĠG irl -ĠR am -Ġthreat s -Ġdef eat -os it -Ġv ent -atur ally -Americ an -end a -ĠH oly -Ġr um -% , -c ase -ĠHist ory -ĠYou Tube -Ġsit uations -ĠD NA -S te -Ġsa ved -It em -Ġrec ip -olog ist -Ġfac ed -Ġel ig -O nce -ĠL i -u h -Ġmist ake -ĠDiv ision -ĠB ell -Ġsympt oms - ® -Ġdom in -Ġfall ing -Ġend ing -as hes -Ġmat ches -ĠOn line -Ġexplan ation -D ef -red it -Ġany more -ĠT otal -ĠF OR -us hed -Ġlet ters -Ġris ks -ĠO K -Ġreported ly -: \ -Ġpl ate -Ġsubject s -Ġattempt ed -if ier -ian a -Ġunlike ly -ĠTh ough -um a -ĠIn vest -ĠPr in -ic an -ĠD ar -ĠColor ado -au g -Ġve get -a os -ri a -Ġshe l -Ġmark ed -Ġ( ) -Ġsp r -p o -ĠL ink -Ġdef e -ĠJ r -Ġthem e -Ġpass ion -ĠP en -Ġinf o -iz er -Ġsh it -ĠC ivil -ap se -c re -Ġpo ly -Ġcomp onent -ĠChar les -ĠIre land -ĠPro v -Ġdo ctors -Ġgr anted -Ġpain t -Ġhon or -Ġsm oke -Ġpay ments -Ġprim arily -ĠKing dom -r ich -ate ll -Ġde als -Ġsched uled -Ġfund amental -Ġprote in -Ġnewsp aper -Ġcl ients -yth on -ĠD ate -h us -Ġfeed back -Ġstret ch -Ġc ock -Ġhot el -ĠQue en -Ġsu gar -Ġj u -Ġmil k -Ġappro val -ĠL ive -Ġequival ent -ef ully -Ġins ert -z ona -Ġext ension -d ri -J ohn -Ġacc omp -S m -ĠF und -Ġconst antly -Ġ` ` -Ġgener ated -ĠA ction -ĠP sych -ĠT ri -Ġrecogn ize -Ġv ary -ph a -ĠR a -d f -et ch -ĠSov iet -Tw o -Ġpattern s -Ġprof ession -an ing -T ime -ĠL im -Ġcol ors -ĠA z -ĠT R -Ġinf ect -Ġphen omen -Ġshe ll -Al so -Ġput s -Ġdel ivery -Ġbro wn -Ġprocess ing -Ġlight s -ess age -ĠBro ok -ĠA ud -l ation -Ġindust rial -L ike -ĠB razil -rou s -ES S -ĠL uc -Ġsome how -Ġ8 5 -Ġpro port -Ġpolit icians -Ġindic ate -Ġh ole -Ġtechn iques -Ġcompet itive -Ġph r -Ġv o -ist ent -ĠD ream -Ġcamp us -Ġaspect s -Ġhelp ful -Ġsh ield -or se -Ġtrig ger -m al -Ġ5 8 -Ġt ort -Ġperson ally -Ġt ag -Ġkeep s -ĠV ideo -Ġben ch -Ġg ap -a ire -Ġe ast -Ġrec overy -per ial -Ġprof it -ĠM ic -Ġ5 7 -Ġcol on -Ġstrong ly -st yle -Ġalleg ations -h an -Ġrep orters -j o -r ine -arg et -and al -Ġ0 3 -Ġfl ash -tr ans -Ġstr ict -Ġpark ing -ĠPak istan -Ġl i -Ġwe ird -ĠE ric -Ġreg ions -ĠJ un -Ġint ellect -ĠW H -od ing -rib utes -up id -ĠT it -Ġf inger -or ia -Ġe lev -ĠF ield -Ġcon clusion -; ; -Ġfeel ings -Ġext ensive -Ġm ixed -Ġne uro -v y -Ġhar ass -ĠC irc -ou ch -Ġterrit ory -Ġsuccess fully -M ar -Ġing red -Ġoverw hel -Ġl ayer -V iew -Ġall ies -ill ance -ĠTh ree -Ġb unch -Ġnorm ally -Ġnet works -Ġsac r -ĠC IA -b les -Ġch ose -Ġopp onents -Ġregard less -Ġfr anch -Ġpre f -ĠP o -Ġbr idge -ann a -ĠSil ver -Ġw age -p age -ri or -Ġrad ical -ĠL ittle -Ġman ip -Ġsecret ary -Ġg ang -D R -F A -Ġdec ent -ĠSp irit -Ġun cle -ĠDevelop ment -Ġinvest ors -Ġwall s -Ġpub lish -Ġgener ate -iss ions -c ar -Ġprom ote -Ġcut ting -Ġche st -Ġdrink ing -Ġcollect ed -Ġ7 2 -Ġhop ing -Ġem br -gor ith -Ġwar ned -Ġinstruct ions -O G -ĠD id -ĠAg ency -Ġg ear -Ġcritic ism -ĠF urther -Ġut il -ann y -R ed -Ġcoun sel -ĠAs ian -Ġredu ction -p ool -Ġteach ing -Ġdeep ly -i y -Ġestim ates -Ġcho ices -Ġperman ent -in em -ke l -Ġf asc -p se -f ile -ĠL ow -ĠP erson -Ġt ournament -st al -Ġm el -U ST -ĠR ay -az i -V al -Ġcont ained -ĠH olly -Ġw ake -Ġreve al -Ġprocess es -ĠIS IS -Ġ0 9 -Ġbl ind -Ġste el -ĠB ad -Ġcare fully -app y -ro it -Ġg aming -Ġhous es -ĠC oll -Ġtr uck -er m -Ġsc ored -Ġocc as -ret urn -b ound -v ar -Ġsh arp -Ġaf raid -ĠE X -am ber -c ific -Ġsche me -N C -ĠPol it -Ġdecl ine -Ġ199 8 -Ġpus hing -Ġposs ession -Ġpriv ile -Ġteacher s -Ġy ield -H A -ĠDav is -it led -#### #### -Ġr ig -ĠD aniel -ac on -Ġh ide -ut en -Ġcolle agues -Ġprin ciples -Ġl oud -Ġs in -ĠDem on -Ġst one -Ġ0 2 -Ġt aught -Ġter rible -Ġst uck -ĠPol icy -te en -Ġimplement ation -ĠB BC -ĠAP I -Ġwhe el -all as -Ġch ampions -ol ars -play er -Ġrepeated ly -ĠSt ill -Ġlik es -ast y -es ter -ĠCath olic -R L -Ġb ath -Ġno ise -t itle -Ġn orthern -P art -Ġmag n -Ġf ab -ĠAs h -Ġdis pl -Ġtick et -Ġm urd -Ġalong side -ĠMus ic -Ġr iver -ĠSte el -ĠC L -ĠPl ayer -ĠM ult -ow ing -re p -s ize -Ġt ur -ĠGeorg ia -isc al -ra ction -Ġc able -Ġ5 9 -Ġw ins -Ġup coming -Ġsurv ive -Ġins pired -ĠEduc ation -Ġstat istics -ĠF oot -iam i -Ġy ellow -ĠP age -. - -ĠH as -Ġur ban -Ġa x -es sel -\ " -Ġquarter back -Ġreg ister -ĠLab or -Ġab ilities -ĠF amily -Ġvar iable -ĠPr ice -Ġcont em -Ġth in -ĠE qu -d ata -Ġg otten -Ġconst it -Ġas ks -Ġt ail -Ġexc iting -ĠE ffect -ĠSp anish -Ġencour age -ins on -ĠA h -Ġcommit ment -C S -Ġr ally -Ġ: : -Ġsubs id -Ġsp in -Ġcapt ured -201 8 -Ġinn oc -Ġalleged ly -ĠC ome -Ġart ists -ĠN umber -Ġelect ronic -Ġreg ional -ap es -Ġw ra -Ġmy th -pr ise -ĠM iller -ĠC reat -ĠEp isode -b ell -Ġdirect ed -Ġext ract -Ġs orry -Ġv ice -ag ger -ĠSu pport -Ġ6 6 -ĠI ron -Ġwonder ful -Ġg ra -N et -ion e -E ng -Ġsh ips -ik es -ĠK evin -it ar -Ġactiv ists -tr ue -ĠAri zona -ent h -ĠDes pite -ĠS E -Ġha bit -ern el -Ġin qu -Ġab ortion -Ġv oid -Ġexpl icit -Ġeng aged -Ġang ry -Ġr ating -Ġfr ag -b ro -ick ing -d ev -Ġwor ried -Ġob ser -Ġap artment -ĠG T -Ġest ate -ĠConst itution -em on -ĠS now -Ġcount y -Ġdis ag -ĠStep hen -Ġimm igrants -w ind -ĠN ations -Ġfol ks -O ut -Ġg all -Ġtarget ed -Ġst ead -ĠB on -ĠL ib -Ġinform ed -Ġ12 0 -ch ain -idel ines -or ough -Ġdri ven -Ġregular ly -Ġbas ket -Ġprinc iple -oc ument -Ġst un -ib ilities -ĠRom an -ĠAb out -Ġal ert -Ġdemocr acy -Ġrepresent ed -H S -c ers -p arent -Ar t -p ack -Ġdi plom -re ts -ĠN O -Ġcapt ure -ĠAd v -Ħ ¢ -Ġannounce ment -ĠL ear -Ġh ook -Ġpur s -ĠS uch -ĠC amer -Ġrefuge es -ĠV e -P ol -Ġrecogn ized -l ib -Ġhad n -A ss -Ġpil ot -us hing -Ġreturn ing -Ġtra il -ĠSt one -Ġrout ine -Ġcour ts -Ġdes per -Ġfriend ly -ĠIt aly -Ġpl ed -Ġbreat h -Ġstud io -N S -Ġimp ressive -ĠAfghan istan -Ġf ing -Ġd ownt -ink ing -ĠR og -i ary -col or -se x -ar on -Ġf ault -ĠN ick -D own -ĠR ose -ĠS outhern -X X -is odes -L ist -6 00 -Ġout come -er r -Ġelse where -Ġret ire -Ġp ounds -ĠGl obal -Pe ople -Ġcommun ications -Ġlo an -Ġrat io -ĠEm pire -Ġg onna -Ġinv ent -D F -Ġ19 70 -ĠComm on -p at -Ġprom ised -Ġd inner -ĠH om -Ġcreat es -Ġoper ate -ver ty -ĠJ ordan -et ime -Ġsust ain -R eg -Ġincred ible -im a -Ġwar rant -Ġm m -A tt -Ġlaw suit -Ġreview s -it ure -ĠS ource -l ights -ĠF ord -Ġ6 3 -g roup -st ore -Ġfeat ured -Ġfore ver -Ġpo verty -ĠP op -ĠC NN -az z -ab is -ach ing -Ġl aid -ĠSu pp -Ġfil ter -en a -ĠCommun ity -Ġcreat ures -u ction -ĠR oyal -Ġassoci ation -ĠCon nect -ĠBr ad -âĸ Ī -l ers -the re -ĠG i -Ġval uable -AC K -ĠT aylor -Ġl iquid -ĠAtt orney -ĠCar l -ĠF inal -ag a -ĠWil son -B ecause -ĠProf essor -ak a -Ġincred ibly -r ance -! ) -R ef -s k -Ġsol utions -Ġatmosp here -Ġbl ame -um es -ĠN ob -C A -um ps -r ical -ĠPut in -ĠD est -or ic -ĠP A -Ġrespect ively -w an -Ġfif th -â Ħ¢ -ĠC ry -Ġgovern or -res ident -Ġpurch ased -Ġh ack -Ġint ense -ob s -Ġorig in -Ġdef ine -Ġcare ful -** * -Ġshould er -Cl ick -Ġt ied -Ġdest ruction -ou red -Ġno body -Ġh o -ĠEx per -Ġt ip -" ; -Ġtechn ique -Ġj ur -ĠP ok -b ow -Ġleg end -Ġacc ord -Ġbus y -ĠInt el -Ġh ang -ak i -. ] -âĢĶâĢĶ âĢĶâĢĶ -Ġsur gery -Ġrep rodu -Ġun iform -Ġscen es -c ode -Ġ6 2 -l isher -ĠH ave -ph ia -Ġcry pt -Ġrec on -Ġsc ream -Ġadop ted -Ġsc ores -N e -ĠIt alian -in cluding -B O -Ġindic ated -Ġent ertain -G u -T ext -i el -Ġtw enty -Ġeng age -off s -ĠPac ific -Ġsm ile -Ġperson nel -Ġto ler -Ġdo ors -Ġt one -Ġmach ines -Ġent ering -ten ance -C O -ĠJer sey -Ġfore st -Ġhor se -Ġcompl aint -ĠSpr ing -y o -ĠPl us -ed ing -ĠRet urn -qu arters -ial s -c ow -Ġacad emic -Ġf ruit -Ġ199 6 -og ether -Ġw ine -Ġpur su -ĠSte ven -Ġlic ens -Wh o -Ġclot hes -re ction -Ġsqu ad -Ġst able -Ġr aw -z ens -St ar -ut ies -anc er -Ġke ys -ĠM u -Ġcompl icated -ig er -ĠTe xt -Ġabs or -Ġ6 8 -Ġfun ny -Ġrel ief -ĠL ew -ĠC ook -Ġch art -Ġdraw ing -G E -Ġmod ule -ĠB ull -I LL -Ġs alt -0000 0000 -il le -Ġres ource -aw ay -adel phia -ĠB ru -Ġ6 7 -Ġsome body -Ġparticip ate -Ġro se -we red -Ġmus cle -Ġcons ent -Ġcontin uing -ĠGuard ian -ĠOr der -reg on -Ġre ar -Ġprov ision -Ġlik ed -ri ent -Ġb ra -Tr ans -Ġmeet ings -Ġto x -Ġcon vent -Ġaut o -Ġrec ording -ĠSo ft -00 1 -ĠR oll -Ġprogram ming -Ġp ic -Ġprov ed -Ġst ab -ĠA st -Ġca ption -ul ating -ĠAtt ack -Ġnew ly -Ġ199 7 -f r -Ġdis cipl -ĠGree k -Ġed ition -ĠDo es -ĠB ox -if le -ack et -Ġpass es -Ġgu est -Ġac celer -it als -U D -Ġaut hent -ĠR est -ov al -t a -u ine -Ġarm or -ĠT own -Ġcomp at -Ġinc hes -Des pite -Ġass ign -he rent -Ġprep are -ĠM eg -oc key -Ġdep ends -Ġtrack s -w atch -Ġl ists -ĠN orthern -Ġal ter -re c -ĠE astern -Ġcond em -Ġevery where -? ' -Ġaff ili -Ġf ought -": {" -Ġm ac -it arian -Ġsc ope -ĠA L -aw s -ar ms -Ġqu e -Ġenjoy ed -nes ota -Ġagg ressive -ĠSt ory -ĠI V -Ġrec ipe -Ġrare ly -ĠMed ical -val ue -ang el -ay ing -omet hing -Ġsub section -Ġs outhern -Ġfrequ ency -re te -roll ed -ult s -ĠN ic -Ġbeh alf -Ġsequ ence -ab et -Ġcontrovers ial -Ġcomp rom -Ġwork er -Ġmain ly -Ġal gorith -ĠM ajor -or ce -g ender -Ġorgan ized -Ġf ake -Ġconclud ed -ĠE D -ĠEx ec -r age -Ġch ances -ber ry -ĠTr ad -Ġconfig uration -Ġwithd raw -Ġf ro -ud es -ĠBro ther -ĠB rian -Ġtri es -Ġsam ples -Ġb id -ĠGold en -Ġphot ograph -if est -ĠD O -ĠPar liament -******** ******** -R em -Ġcont est -Ġsign ing -p x -ĠZ eal -âĶĢ âĶĢ -E ar -Ġex it -Be fore -ĠCor por -n ull -mon th -Ġrac ial -ott ed -ĠV eg -ĠRe uters -Ġsw ord -ps on -ĠRom ney -a ed -Ġt rib -Ġin ner -Ġprot ocol -ĠB i -ĠM iami -ever al -p ress -Ġsh ipping -ĠAm endment -ĠHow ard -con nect -ĠD isc -ĠJ ac -iam ond -ĠThere fore -s es -ĠPrin cess -ĠUS B -ĠAn th -Ġsurve illance -Ġap olog -Ġ6 1 -ow a -Ġf ulf -j s -Ġl uck -ust ed -Ġ § -n i -Ġant icip -em an -Ġwin ner -Ġsil ver -ll a -ic ity -Ġunus ual -Ġcr ack -Ġt ies -e z -Ġpract ical -Ġprov ince -ĠPl ace -Ġprior ity -IC E -Ġdescrib es -Ġbr anch -F orm -ask a -miss ions -b i -Ġp orn -ĠTur k -Ġent hus -Ġf ighters -Ġ0 8 -ĠDet roit -Ġfound ation -av id -A re -Ġjud gment -cl ing -Ġsol ve -ĠDes ign -W here -hes is -ĠT ro -a fter -Ġne utral -ĠPalestin ian -ĠHolly wood -Ġadv is -ĠN on -y es -ol is -Ġrep utation -Ġsm ell -Ġb read -ĠB ul -ĠBe ach -Ġclaim ing -Ġgen etic -Ġtechn ologies -Ġupgr ade -row s -Ġdevelop er -ĠJ osh -ĠDis ney -erv ed -ip al -Ġun ex -Ġbare ly -t hen -ĠP ub -Ġill ness -et ary -ĠB al -Ġp atch -Ġbut t -Ġst upid -ĠD og -ĠD allas -f ront -ie ce -Ġprot ests -Ġch at -oen ix -Ġw ing -Ġpar liament -Ġ7 7 -ose xual -Ġre nder -pt ions -ĠCo ast -os a -ĠG reg -h op -ĠMan agement -Ġbit coin -Ġrec over -Ġincor por -or ne -ĠUs ing -Ġpre ced -Ġthreat ened -Ġspirit ual -ĠE vent -ĠF red -Ġadvert ising -Ġimprove ments -ĠC ustom -Ġer rors -Ġsens itive -ĠN avy -Ġcre am -L ook -Ġex clusive -Ġcomp rehens -Ġde leg -Ġcon ce -Ġrem em -Ġstruct ures -Ġst ored -N D -Ġ1 000 -U P -ĠB udd -A F -w oman -ĠAcad emy -ð Ł -se a -Ġtem porary -Ab out -es ters -Ġtick ets -Ġposs ess -in ch -o z -Ġl a -Ġcontract s -Ġun p -Ġc ig -ĠK at -ult ural -as m -Ġmount ain -ĠCapt ain -St ep -m aking -ĠSp ain -Ġequ ally -Ġl ands -at ers -Ġreject ed -er a -im m -ri x -C D -Ġtrans action -g ener -less ly -Ġ| | -Ġc os -ĠHen ry -Ġprov isions -Ġg ained -Ġdirect ory -Ġra ising -ĠS ep -ol en -ond er -Ġcon sole -in st -Ġb om -Ġunc ertain -1 50 -ock ing -Ġmeas ured -Ġpl ain -Ġse ats -Ġd ict -S L -af e -Ġest imate -iz on -at hered -Ġcontribut ed -Ġep isodes -omm od -G r -AN T -Ġ6 9 -G ener -Ġ2 50 -vious ly -rog en -Ġterror ism -Ġmove ments -ent le -oun ce -ĠS oul -Ġpre v -ĠT able -act s -ri ors -t ab -Ġsuff er -Ġn erv -Ġmain stream -ĠW olf -Ġfranch ise -b at -Ġdem ands -Ġag enda -Ġdo zen -Ġclin ical -iz ard -ĠO p -t d -Ġvis ited -ĠPer haps -Ġact or -Ġde lic -Ġcont ribute -Ġin ject -ĠE s -ac co -Ġlist ening -Ġcon gress -epend ent -Ġprem ium -Ġ7 6 -ĠIr ish -Ġass igned -ĠPh ys -Ġworld wide -Ġnarr ative -ot ype -m ont -b ase -ĠB owl -ĠAdminist ration -Ġrel ation -ĠE V -C P -Ġco vers -Ġ7 8 -Ġcert ific -Ġgr ass -Ġ0 4 -pir acy -ir a -Ġengine ering -ĠM ars -Ġun employ -ĠFore ign -st ract -Ġv en -Ġst eal -Ġrepl ied -Ġult imate -Ġtit les -d ated -Ġj oy -a us -Ġhy per -ak u -Ġoffic ially -ĠPro duct -Ġdifficult y -per or -Ġresult ed -rib ed -l ink -wh o -~~ ~~ -ĠSpe ed -ĠV iet -W ind -ĠBar ack -Ġrestrict ions -ĠSh are -Ġ199 5 -ition ally -Ġbeaut y -op t -Ġm aps -ĠC R -ĠN ation -ĠCru z -W ill -Ġelectric ity -Ġor g -Ġb urd -Ġviol ation -Ġus age -Ġper mit -ĠCh ron -ĠF ant -Ġn aturally -Ġ0 7 -Ġth rown -ĠAw oken -Ġal ien -ĠHer o -ĠK ent -ĠR ick -ri ke -Ġp ace -}, {" -G L -Ġpo ison -ĠT ower -Ġform al -al ysis -Ġgen uine -Ġk il -a ver -Ġproced ure -ĠPro p -intend o -ĠM ain -as ant -Ġtr ained -G ame -ĠL oad -ĠM A -Ġcru cial -Ġle ts -ĠF R -Ġch ampion -1 01 -ĠCon ference -Ġwrit ers -Ġconnect ions -Ġo kay -ir ms -ĠR and -Ġenc ounter -ĠB uff -Ġachie ved -Ġche cks -isc ons -Ġassist ant -Ġwhen ever -ĠA ccess -ĠU r -b in -Ġcl ock -is p -op her -Ġb orrow -Ġm ad -Ġperson ality -on ly -IS T -ab ama -Ġg ains -Ġcommon ly -Ġter r -Ġhyp ot -Ġre ly -Ġt iss -iscons in -Ġrid ic -f unction -ĠO regon -Ġun com -r ating -el and -ĠN C -Ġm oon -ann on -Ġvulner able -ut ive -³³ ³³ -ĠRad io -Ġw estern -se ct -ĠT ony -Ġocc urs -ĠO s -ĠH on -Ã Ń -Ġv essel -ĠScot land -Ġdiscrim ination -Ġsubsequ ent -st ring -Ġfant asy -ĠSh adow -Ġtest im -W E -it i -r as -Ġbo at -Ġmar ks -Ġord inary -Ġre n -Ġrepresent ative -Ġpet ition -Ġ7 3 -Ġad venture -Ġign ore -ĠPhil adelphia -ĠS av -V P -Ġfact ory -Ġt asks -Ġdep ression -z ed -................ ................ -ĠSt orm -Ġc ogn -Ġelig ible -Ġredu cing -v ia -Ġ0 5 -Ġstri king -Ġdoll ar -h o -O V -Ġinstr ument -Ġphilosoph y -ĠMo ore -ĠA venue -Ġrul ed -ĠFr ont -IN E -ĠM ah -Ġscen ario -ĠNAS A -Ġen orm -Ġdeb ut -Ġte a -T oday -Ġabs ence -S im -Ġh am -le ep -Ġt ables -ĠHe art -M I -K e -re qu -V D -m ap -Ġchair man -Ġp ump -Ġrapid ly -v i -Ġsubstant ial -E P -d es -ch ant -ili pp -ĠS anta -ri ers -anche ster -L oad -ĠC ase -Ġsa ving -Ġ7 4 -ĠA FP -er ning -oun ced -ĠMin nesota -ĠW as -Ġrec ru -Ġassess ment -ĠB ron -U E -Ġdynam ic -Ġf urn -ul ator -Ġprop ag -h igh -Ġacc ommod -Ġst ack -ĠS us -w rit -Ġre ven -ĠGod d -ĠZeal and -ab s -Ġbr ut -Ġper pet -h ot -Ġhard ly -ĠB urn -ãĤ ¹ -Ġst y -Ġtrans actions -Ġg ate -Ġsc reens -Ġsub mitted -Ġ1 01 -Ġlangu ages -ugh t -em en -Ġfall s -Ġc oc -Ĥ ¬ -Ġstri kes -p a -Ġdel iber -ĠI M -Ġrel ax -ann els -ĠSen ator -Ġext rem -Ġ} , -ĠDe b -Ġbe ll -Ġdis order -c ut -Ġi OS -Ġl ocked -Ġem issions -Ġshort ly -" ] -ĠJud ge -ĠS ometimes -Ġr ival -Ġd ust -Ġreach ing -F ile -¯¯ ¯¯ -ino is -ĠJ ason -Ġs atell -are t -Ġst ations -Ġag ric -ĠTechn ology -com es -ĠUn fortunately -ĠChild ren -Ġappl ies -ast ed -Ġan ger -ail ability -ĠDam age -Ġcomp are -ĠStand ard -Ġaim ed -ĠB a -angu age -Ġreg ulation -Ġj ury -Ġair port -Ġse ctions -ĠPr ince -em ed -Ġmedic ine -Ġh itting -Ġsp ark -ol ves -Ġad s -St ate -Ġfood s -Ġrepl acement -Ġch icken -Ġlow est -Ġmind s -Ġinvol ves -u i -Ġarr ang -Ġproced ures -ĠWh ich -ivers ary -Ġb ills -Ġimprove ment -Ġin ev -Ġexpect ations -Ġintellect ual -Ġsp aces -Ġmechan ism -2 50 -bre ak -ĠZ e -ĠT enn -ĠB alt -Ġbar rel -Ġstat ic -man n -Pol ice -Ġt ips -Ġhand ling -c us -od ed -il ton -ir y -Ġjournal ists -our se -Ġcom ic -Ġnom ine -IT Y -Ġvers us -Ġlo op -Ġsur f -ĠInd ust -ĠHun ter -Ġbelief s -is an -Ġset up -Ġbre w -im age -Ġcomput ers -f ol -} ," -ĠMed al -Ġtax p -Ġdisplay ed -Ġg rav -Ġf iscal -M on -ĠMos cow -ĠK ong -ĠCent re -Ġcamer as -ĠMr s -ĠH ay -Ġa ver -ĠK elly -p y -Ġrequire ment -Ġent itled -omb ie -Ġsh adow -ag ic -ĠA k -Ġel ite -Ġdiv ided -Ġhead ing -Ġcop ies -Ġloss es -Ġv it -k ed -ĠB ry -Ġan s -ĠSte am -Ġrep orter -he im -ĠIt em -Ġsuper ior -d on -ere nt -à ¶ -Ġtherap y -Ġpe ak -ĠMod el -Ġl ying -Ġg am -z er -r itten -Ġrespons es -Ġconsider ation -ĠB ible -Ġl oyal -Ġinst ant -Ġp m -ĠFore st -à ¼ -Ġext end -Ġconv icted -Ġfound er -Ġconv in -ĠO ak -che ck -Ġsch olars -p ed -Ġover se -T op -c ount -ĠAr k - · -Ġ0 6 -ĠL A -m d -ĠLat in -im ental -ĠC PU -Ġsubst ance -Ġminor ity -Ġmanufact uring -E r -ocol ate -Ġatt ended -ĠMan ager -r ations -Ġappreci ate -om y -GB T -id ency -B L -Ġguarant ee -pos ition -Ġo cean -clud e -Ġhead ed -Ġt ape -Ġlo ose -Ġlog ic -Ġpro ven -Ġsp ir -Ġad mit -is a -Ġinvestig ate -Ġ199 4 -sy lv -ĠL ost -c est -Ġ7 1 -Ġrequest ed -Ġwind ows -ĠPok é -ĠWith out -M et -Ġbehavi our -Ġread er -Ġh ung -ĠKe ep -Ġro les -Ġimplement ed -Ġbl ank -Ġserv es -ĠJ ay -Ġc ited -ĠF riend -prof it -ap on -Ġrep air -it em -arr ass -Ġcrit ics -ad i -ĠF ather -Ġsh out -Ġf ool -Ġ8 8 -Ġprodu cing -Ġl ib -Ġround s -Ġcirc le -Ġpre par -Ġsub mit -Ġn ic -mor row -ãĥ « -U nder -Ġv ital -ater n -Ġpass word -Ġpublic ation -Ġprom inent -Ġspeak s -Ġb ars -Ġde eper -ĠM ill -port ed -Ġw id -Ġbut ter -Ġsm oking -Ġindic ates -K ey -rop ri -ĠF ile -all ing -ast ing -ĠR us -Ġad j -Ġ7 9 -av al -Ġpres um -bur gh -on ic -Ġf ur -Ġpoll s -ik a -Ġsecond ary -Ġmon ster -ig s -ĠCur rent -E vent -Ġowners hip -end ar -Ġarri ve -ĠT ax -Ġn ull -ĠPri v -Ġth ro -Ġk iss -c at -Ġup set -ang le -it ches -ect or -olog ists -ĠGal axy -Ġcor ruption -Ġh int -ent er -ĠH ospital -Ġgreat ly -Ġbeg un -es y -Ġso il -ĠAnt on -Ġmain tenance -ãĥ © -Ġdo zens -Ġhuman ity -ĠAl abama -Ġr om -w orth -ap ing -sylv ania -l ah -Ġg athered -G A -Ġattack ing -f ound -ĠSqu are -Ġar bit -ict ions -ĠW isconsin -Ġd ance -ĠS aint -arch y -Ġbase ball -Ġcontribut ions -Ġliter ature -Ġex ha -per ty -t est -Ġb ab -Ġcontain er -let ter -Ġfall en -Ġwebs ites -Ġbott le -ĠS ac -Ġbre ast -ĠP L -Ġveter an -Ġinterview s -ĠA le -Ġb anned -eng ers -ĠRev olution -in th -Ġconc erning -IV E -Ġexp enses -ĠMatt hew -ĠColumb ia -d s -ist ance -Ġent ity -.. ." -Ġrel iable -Ġpar alle -ĠChrist ians -Ġopin ions -Ġin du -l ow -Ġcompet e -Ġth orough -Ġemploy ed -Ġestablish ment -ig en -ĠC ro -Ġlawy ers -ĠSt ation -T E -ĠL ind -ĠP ur -it ary -Ġeffic iency -âĢ IJ -ĠL y -Ġm ask -Ġdis aster -Ġag es -ER E -es is -ĠH old -Ġcas ual -b led -Ġen abled -ĠEn vironment -ĠInt elligence -i per -ĠM ap -ĠB E -Ġemer ged -is dom -Ġc abin -Ġregist ration -Ġfing ers -Ġro ster -Ġfram ework -ĠDo ctor -et ts -Ġtransport ation -Ġaware ness -H er -Ġattempt ing -O ff -ĠSt ore -ÃĥÃĤÃĥÃĤ ÃĥÃĤÃĥÃĤ -ĠK now -Ġdef ence -Ġsc an -ĠT en -ĠCh air -ĠP H -ĠAtl anta -Ġfuck ing -Ġans wered -b n -ĠK ar -Ġcateg ories -Ġr ational -Ġc ust -Ġrob ot -Ġcorrect ly -Ġg if -Ġgraph ics -m ic -Ġground s -ĠO pp -i ate -Ġdist ributed -Ġsan ctions -Ġchalleng ing -ut o -Ġingred ients -Ġinv ited -Ġfound ed -ĠRe qu -d ed -Ġb owl -Ġbrother s -ĠH a -I O -Ġw ages -im ore -oc ial -Ġse ed -ative ly -Ġaddress es -ĠI owa -ab eth -Ġatt itude -is d -ch ild -Ġm ole -Ġdisco very -y ard -B r -Ġ8 2 -Ġsuppl ies -ell ing -Ġdist ingu -C R -Ġre cept -Ġ vert -Ġsw im -b ec -d oor -ĠY eah -Ġg al -Ġinter act -ĠE SP -ĠC S -amp s -Ġconvin ced -Ġobject ive -Ġdis h -ĠPhot os -l ad -Ġdownt own -o il -in ction -Ġto morrow -ĠC OM -Ġsurv ival -sh ot -Ġsett lement -C ons -ĠX box -int erest -ĠS M -arg o -en ess -Ġeth nic -b ered -M in -ĠT ok -Ġinc ent -ĠComm and -Ġmain tained -Ġbreak s -br idge -at ar -ag g -ĠF inally -un icip -ĠO nt -le ft -Ġrecogn ition -Ġ* / -ĠP ers -Ġwe lf -Ġaddress ed -ĠK ansas -Ġvir us -Ġwhere as -Ġp apers -ram s -ĠMin istry -Ġple asure -Ġacqu ired -Ġd uration -j pg -Ġcal m -ĠN HL -Ġburn ing -Ġfold er -ick ed -ĠP y -ĠIll inois -Cl ass -ĠGodd ess -Ġperform ing -Ġwelf are -j ar -In ter -Ġl in -Ġenh ance -Ġnot ion -f are -yp es -ĠAre a -Ġcann abis -ĠDie go -f s -ĠM anchester -com m -in ite -Ġcover ing -ĠS ound -Ġ19 60 -Ġ8 4 -e lect -z ing -Ġcitiz en -Ġph ones -Ġr aid -Ġign ored -ĠOb ject -Ġu pload -c ard -Ġmod ified -Ġroom s -ia h -r ange -he ast -ach us -Ġsuggest ing -âĢ ĭ -gr ade -E l -Ġclot hing -Ġr h -ĠH an -un ity -en cing -ĠAust in -sec ution -t ra -d em -ĠQ ual -Ġhe aven -Ġst ages -Ġw edd -pl us -ific ial -ĠIm m -ĠH o -iet ies -Ġphr ase -Ġbr ill -act ory -Ġprov iders -Ġsil ence -Ġa er -ĠA I -ĠAd venture -Ġplatform s -Ġdemonstr ated -Ġinter f -ing ton -Ġr aces -Ġgr ade -ult ane -ĠTh rough -f alse -Ġb ow -ĠA B -Ġfl avor -Ġhistor ic -g ov -Ġcol our -Ġview ed -ĠEm ail -el come -Ġinter vention -Ġd iversity -Ġperiod s -Ġre verse -ĠV ery -Ġqu ote -ĠLe ft -th rough -Ġsc rew -Ġland ing -Ġp ill -Ġw et -Ġprot esters -Ġrepe at -av ed -er k -Ġsal ary -ĠPenn sylvania -St ill -Ġmay or -Ġkit chen -Ġfeat uring -ĠM useum -ĠT ournament -ĠF al -Ġser vers -U C -Ġany body -im g -ĠTr ade -ixt ure -the less -Ġfin ance -Ġcl osing -ĠPat ri -i ac -ab el -Ġ> > -or ous -Ġf irms -sc reen -un a -Ġemb arrass -ul se -Ġlet ting -Ġth rew -ile y -Ġch annels -l an -ĠVeg as -Ġse ar -Ġfant astic -ar re -uzz le -ĠD er -Th ose -Ġsw ing -Ġshe et -ind ex -co ver -og an -Ġvari ables -ĠTe ch -Ġsp oken -ac hel -ĠD a -ĠMount ain -Ġload ed -Ġfoot age -vers ion -Ġun l -ĠPh oenix -Ġthrow ing -Ġf iring -Ġtrack ing -Ġw idth -Ġstrugg ling -ro oms -ot ion -Ġmonth ly -ĠSer ver -Ġegg s -op en -M C -Ġ199 3 -Ġh ired -Ġstay ed -ĠAll en -Ġst ro -Ġ9 8 -st ep -ĠTurk ish -Ġfab ric -ist ing -ĠD om -Ġd ates -Ġpr on -Ġbasket ball -Ġl ucky -ĠArab ia -Ġassum ed -est y -Ġaff airs -Ġgl ad -ĠInd eed -ĠF A -ĠW ord -Ġjo ining -if ice -p read -ir ts -ĠSe lect -Ġpop ulations -aw are -Ġn ose -Ġcompl aints -st art -Ġsc oring -Th anks -Ġmin ing -Ġvisit ors -S H -Ġdam aged -Ġcharacter istics -ĠP ent -D C -Ġ8 3 -ĠS ix -r ates -Ġfl ags -ĠB rew -d og -M ark -// // -Ġexec ution -Ġj oke -ph ones -Ġtestim ony -Ġob st -Q L -ĠC ut -Ġstud ied -ĠN intendo -ick et -ĠN BC -Ġl ad -ĠB ra -ĠM oh -Ġk ernel -Ġoverwhel ming -Ġag ed -Ġapplic able -ĠC ond -Ġroad s -ĠBl ock -m ade -od ge -Ġcomm ands -Ġoff ices -vel and -Ġt ut -Ġrece iver -ĠF ro -Ġsho pping -Ġi P -ĠSt re -ĠA BC -Ġentertain ment -ĠB ow -ort ed -M c -Ġread s -gr ad -ĠCol lect -Ġâ ĪĴ -ĠCap ital -eder ation -Ġemploy er -Ġinvolve ment -Ġanx iety -al ia -Ġro of -ĠAm ong -ĠDemocr at -Ġstat s -ĠV ill -Ġconst itutional -Ġrefer ring -itt y -Ġtack le -out ube -Ġback ed -ĠH ong -ĠBro ad -Ġe le -ĠO tt -Ġ199 2 -h our -achus etts -C al -Ġdefe ated -Ġ8 1 -es p -Ġseem ingly -w as -ĠJ enn -ĠK urd -Ġg ene -Ġdisc ount -R et -EC T -( ); -Ġclub s -Ġs id -ĠM arsh -Che ck -Ġp p -ĠE ag -ides pread -Ġbe ings -F T -Ġintrodu ction -ĠCh ange -AR D -Ġ1 10 -ad ows -ier ce -Ġme al -a uthor -ĠB ang -lah oma -Ġr anks -201 1 -?? ?? -m ax -Ġcoll apse -Ġop ens -Ġe cho -Ġs oph -Ġrac ist -Ġenorm ous -Ġw aves -Ġt ap -Ġcomprehens ive -. -- -ĠR oy -Ġfarm ers -Rel ated -a ired -ron es -ĠC rim -Ġproport ion -Ġdesign s -Ġnegoti ations -Ġvirt ually -ĠBat man -Ġwar n -Ġlegit imate -m ate -Ġcon vention -, , -net ic -ĠS D -Ġconsist ently -Ġcompens ation -Ġpunish ment -Ġy e -Ġt ie -ĠB ureau -ir lf -ĠB u -ĠA ren -ĠPh ilipp -Ġkn ife -Ġmem ories -ĠR oss -Ġang le -Ġ8 6 -ĠTh under -Ġre nd -ĠT our -Ġcount s -s ung -ĠIm p -Ġeduc ational -Ġaccess ible -C OM -Ġd rew -y er -G l -am ine -OR T -O B -I B -m aster -Ġtri als -og y -h ar -ĠTr ust -Ġprefer red -irlf riend -ĠN ev -Ġb in -Ġc ow -P age -Ġsign ature -ĠB L -7 00 -Ġret ired -Ġby tes -Ġneigh b -ĠLeg end -Ġdev ast -Ġsuspect ed -is ons -ĠPoké mon -sc ale -Ġcap abilities -Ġre vel -Ġche ese -d y -igr ant -Ġfail ing -b its -ĠHer oes -ĠG host -ĠS cient -Ġappoint ed -ur i -Ġinst itution -Ġexpand ed -g reg -Ġmonitor ing -Ġp odcast -Ġcoal ition -Ġ9 6 -J o -Ġst olen -ĠS ab -Ġstop s -Ġhol iday -Ġint r -C ar -Bl ack -ĠL GBT -Ġwar ming -ĠAnd erson -Ġ8 9 -Ġprodu cer -M ed -Ġaccur acy -ĠMar vel -iz abeth -ĠPat rick -m ony -Ġmin i -ac les -Ġover t -the y -Ġmembers hip -ĠV en -Ġex ch -Ġrem oval -ĠD ave -T Y -m ad -ĠF ind -Ġad equ -Ġe c -Ġte eth -Ġemot ion -Ġper m -Ġsole ly -d b -Ġextra ord -IG HT -c al -Ġgu idelines -Ġd ying -Ġsusp ended -ĠPrem ier -ĠAnth ony -el ve -Ġd ad -ĠE th -ĠFoot ball -Ġabandon ed -Ġ< < -Ġm arch -Ġhor ror -âĢ¦ " -Ġchild hood -Ġcampaign s -Ġl unch -ĠAl bert -bl ock -âĸĪ âĸĪ -ound ing -Ġb one -or gan -ad ers -ĠFl ash -ĠDri ve -Ġton ight -Ġw ars -ĠF L -Ġform ation -con st -New s -Ġcom pe -or ious -ĠSt aff -Ġdiscuss ions -ĠProt ection -ĠJ am -Ġcrit eria -Ġinstall ation -Ġaccompl ish -iz za -Ġpub lisher -Ġresc ue -ĠT ry -U LL -ĠS om -ĠH op -ore t -th s -ord on -Ġp ocket -ĠIn v -Down load -ĠCr ime -Ġb ene -ĠGu ide -ĠAs sembly -Ġparam eters -I E -ĠAlex ander -Ġconc ert -ĠSc he -Ġsh oes -Ġvis iting -Ġrec all -Ġb ub -Ġr ural -Ġconc rete -ĠR os -N ext -R uss -Ġlo ans -ĠSh ield -Ġtre m -hem at -k g -ĠHar ris -is ition -ĠM ove -ĠF C -Ġf ate -ĠCh o -Ġt ired -Ġprinc ipal -h ist -ien ces -ath y -Ġse vent -Ġm ood -Ġstrateg ic -Ġdise ases -Ġfor um -Ġtem por -Ġhead quarters -P ar -ig e -fl ix -Ġgu itar -Ġ9 4 -On ly -Ġrele ases -ro ph -================ ================ -Ġ6 00 -ĠContin ue -ig ate -ĠC rit -sy stem -Ġdis abled -Ġunex pected -ith ub -Ġuncle ar -ĠE st -Ġcontr ad -Ġstrateg ies -vent ures -Ġpass age -AM E -Ġimpro ving -Ġreve als -Ġdecre ase -ov a -Ġann oy -ĠSh ort -ĠL ibrary -Ġcy ber -n ell -ĠH ur -ĠC B -Ġphot ograp -U I -Ġs ed -G e -Ġ8 7 -Ġd iverse -Ġencour aged -Ġcons piracy -Ġbird s -Ġoper ator -Ġhand ful -Ġclass ified -? ) -Ġdram atic -Ġinvestig ators -it o -Ġw idespread -ĠR oom --------------------------------- -------------------------------- -Ġcollect ive -Ġjournal ist -St ring -Ġtemper atures -il a -Ġgu id -Ġins pect -Ġmiss ile -ĠMay or -Ġman ual -Ġsim ultane -Ġrat ings -Ġsu ck -Ġ9 7 -Ġunivers al -Ġph arm -Ġdis rupt -ian o -A V -Ġf t -Ġstat ist -old s -ĠWalk er -ph p -Ġunder t -ĠL as -ish op -nt il -res hold -ĠWhe ther -M s -Ġden y -ĠCl oud -Ġprov ider -Ġsurv iv -ĠUp date -h as -Ġmist akes -ch arge -pl ed -r ity -Ġn ode -ĠMass achusetts -ool s -lic ation -Ġf ails -em ale -or i -back s -Ġsh irt -Ġ' ' -ĠN AT -Ġwat ers -els on -Ġe ase -Ġsc ar -Ġcont ents -m ind -Ġcont ribution -Ġsh r -Ġhand ed -Ġst ability -Ġtra ve -E m -Ġmir ror -12 3 -Ġwe igh -Ġf iction -ou ver -ist ant -r ition -ĠF ed -Ġphys ically -Ġst ake -ĠArt icle -ĠAr c -ĠLew is -ĠM ind -Ġdemonstr ate -Ġprof its -v ision -om ic -ol id -Ġbatt les -Ġdri ves -Ġeas tern -ĠS ony -!! ! -ar ation -v ard -ĠG L -port ation -Ġ9 2 -Ġlaw makers -Ġprotect ing -ĠE PA -Ġy eah -Ġsh ame -ol ph -e ven -x it -Ġatt ach -Ġrepresent ing -Ġob s -ĠUt ah -iff s -ĠFre edom -à ³ -A K -Ġinc idents -it age -Ġview ers -c d -Ġm ouse -Ġcl ar -Ġaccord ance -Ġb ot -c or -ĠSum mer -he ld -Ġinnoc ent -Ġiniti ative -ol s -________________ ________________ -Ġsp ots -p ace -Ġconvent ional -Ġcorpor ations -Ġblock ed -H D -at tered -Ġref ers -Ġbu ck -ĠDig ital -12 0 -Ġtop ics -T F -Ä ģ -br id -re ement -Ġunder lying -ĠM ember -Ġinvestig ating -Ġpregn ancy -Ġtouch down -ĠB and -ĠCall er -Ġinst ances -P P -w a -G ood -Ġ199 1 -ĠC old -Ġfear s -Ġrem arks -Ĩ Ĵ -at al -Ġm it -Ġexper iments -i pt -Col or -ind u -Up date -Ġ9 3 -A g -Ġ å -anc ouver -B oth -Ġjud ges -Ob ject -Ġst ere -umb n -Ġparticip ation -ĠSt ars -ĠJ ere -Ġweek ly -ĠB an -Ġconvers ations -ĠP itt -u z -ĠIndian a -ĠK ick -Ġinf ection -Ġhero es -Ġsett led -Ġstri p -Ġh al -Ġd ump -ĠS ci -Ġl es -Ġref erences -ĠU RL -ĠBr idge -Ġwant ing -For ce -Ġex clus -Me anwhile -m n -Ġg entle -m aker -sen al -ĠG ro -ou ri -ĠR ain -ĠAll iance -Ġl ift -el a -S D -ĠCle veland -Ġrank ed -Ġst adium -Ġdead ly -ä ¸ -Ġr iding -ar ia -ĠAr mor -Ġdocument ation -ĠGree ce -ree k -Ġl ens -ĠS a -Ġg ross -ĠE mer -ag ers -ĠD ub -ĠR h -ĠAM D -Ġarri val -Ġdes ert -Ġsupp lement -ĠRes p -Ġkn ee -Ġmarg in -f ont -og g -201 0 -ĠP ir -ĠP rom -iv als -Ġint ake -Ġdifferent ly -ug s -Ġb its -clud ed -Ġsearch ing -ĠD u -um ble -Ġfunction al -ĠBalt imore -ĠC ould -Ġdes ired -Ġcirc uit -ĠL yn -ĠG O -ĠF alse -re pre -' : -alt ies -Ġmin im -Ġdro ve -ĠSh ould -Ġh ip -Ġpro s -Ġut ility -ĠN ature -ĠM ode -P resident -o pp -r at -form ance -Ġconcent ration -Ġf ont -ĠB ud -Ġam id -Ġre vers -ĠM L -B ar -Ġinter action -Ġjur isd -Ġspell s -d ep -f il -Ġcivil ians -ut ter -ĠCo oper -ĠBel ow -Ġent rance -Ġcon vert -Ġcontrovers y -ow ered -Ġcontr ary -Ġar c -ĠExec utive -ĠOffic er -Ġpack ages -Ġprog ressive -w idth -Ġreserv ed -v ol -ĠSam sung -Ġprint ed -Ġcent ers -Ġintrodu ce -ĠKenn edy -Ġodd s -Ġsure ly -Ġindepend ence -Ġpass engers -repre ne -ĠBe h -Ġl oves -ĠESP N -Ġfac ilit -Ġident ical -Ġdo ct -Ġpartners hip -con f -ĠH ide -Ġconf used -ĠC ow -M en -Ġw rest -ĠIraq i -Ġh oles -ĠStud ies -Ġpregn ant -h ard -Ġsign als -I X -Ġpull ing -Ġgrad uate -Ġnomine e -D ate -Ġper mitted -Ġâ Ĥ¬ -ĠOk lahoma -St art -Ġauthor ized -Ġal arm -ĠC os -v an -Ġgener ations -c ular -Ġdr agon -ĠSoft ware -ĠEd ward -Ġcontro ller -S en -ge red -ĠV ik -Ġappro ached -Th ank -Ġcan ce -Ġform ula -ĠSm all -Ġweak ness -Ġr amp -it udes -j ud -Ġbrill iant -Ġacc us -s ource -Ġ8 00 -ĠE vil -S w -Ġhom eless -we ek -i ens -r ics -ĠTh ird -T O -Ġorgan ic -Ġpresent ation -ag h -ĠDown load -v ation -Ġas sembly -or able -hold ers -ĠBern ie -ĠHel p -Ġt ong -ĠF ight -Ġbe ach -B ook -ĠL ic -Ġr ush -ĠR ound -ou p -ĠMar x -Ġcalcul ated -ĠDe vil -ĠSar ah -Ġoccasion ally -Ġbul let -Av ailable -g ate -Ġ9 1 -Ġh osp -Ġprom ises -ĠH IV -ĠSt adium -ĠSt ock -ĠCorpor ation -g age -N G -ĠC redit -Ġs ne -ib l -Ġacc um -s uch -Ġterror ists -Ġconscious ness -ĠZ h -Ġdram a -ool a -pir ation -Ġlab our -ĠN in -Ġut ter -Ġdemocr atic -Ġass ass -il ation -Ġg est -Ġab road -Ġmet ab -Ġs orts -Ġfl av -U B -Ġm g -ĠNot hing -ĠO d -Ġmus ical -200 9 -Ġdro ps -oc ated -ater al -0000 00 -Ġg re -Ġequ ality -Ġburd en -Ġv ig -ĠLe ader --------- ---- -Ġcere mony -Ġf ighter -Ġact ors -Ġ æ -am an -F i -Ġal ign -put er -Ġe lder -ĠN SA -Ġrepresent ation -ĠOnt ario -IT H -usal em -Ġharass ment -itz er -Ġsy mp -Ġbox es -ĠD R -Ġman ifest -at re -Ġ ^ -Ġd ies -le ton -Ġmiss ions -et he -Ġres olve -Ġfollow ers -Ġas c -Ġk m -l ord -am med -Ġsil ent -ĠAssoci ated -Ġtim ing -Ġprison ers -ĠK ings -ĠF ive -Ġtow er -Ġappro aches -Ġprecise ly -Ġb ureau -ĠM other -ĠI ss -Ġkey board -it ual -Ġfund ed -Ġstay ing -Ġpsych ological -Ġm ile -ĠLe on -ĠBar b -w ill -Ġw ider -ĠAtl antic -Ġt ill -ĠR ome -ro t -Ġaccomp an -Ġfl our -ac o -W orld -ĠExp ress -ĠY u -C or -Ġple ased -part y -Ġpoint ing -Ġinf lation -Ġro y -Ġ ), -ain er -Ġwedd ing -orm on -Ġrequ iring -Ġqual ified -Ġse gment -EN D -Ġs izes -e als -Ġcor rupt -ass ador -Ġcele b -Ġdream s -ĠM ess -Ġcheck ing -ĠV ersion -Ġprep aring -Ġact ively -ĠD iff -Ġl ux -ĠW inter -act eria -ĠN E -Ġdep uty -Ġtrans gender -Ġsum mary -Ġin her -er ies -ch ar -ĠY an -Ġkn ock -ĠP ath -Ġl ip -roll er -Ġimp ression -Ġcelebr ate -Ġsl ide -Ġgu ests -Ġcl ip -F S -Ġsav ings -Ġcapt ain -Ġleg acy -ĠDen ver -Ġw ounded -tab oola -AC T -Ġpurs ue -Ġo xy -Ġ q -Ġsem i -ĠN eed -ĠAff airs -Ġob sc -Ġcheck ed -Ġd ual -C ode -ĠM D -le m -ult y -Ġ © -ĠEl izabeth -Ġcent uries -ard ed -s rc -Ġev ident -enn is -at in -Ġunemploy ment -ĠMar io -Ġint im -Ch rist -Ġbi ological -Ġsold ier -ĠAdd ed -Ġm ath -ĠG il -Ġbi as -Ġd ating -ĠO cean -Ġm ice -M us -h ire -ĠT es -Ser ver -lim ited -S ize -Ġmet ers -Ġrock et -es see -Ġcertific ate -ĠIran ian -AS S -Ġgr id -D ec -Ġro lling -com mun -ĠSwed en -b ury -Ġtiss ue -Ġrac ism -ĠL ocal -Ġmyster y -Ġexam ine -Ġst em -Ġs its -Ġhop ed -ot ing -Ġdial ogue -Ġpers u -W atch -l ay -M AN -Ġch ronic -ĠPort land -mark et -ĠS EC -Ġparalle l -Ġsc andal -Ġcar ries -Ġphenomen on -h uman -ack er -ĠO x -Ġretire ment -tain ment -ov ie -ĠG ear -Ġd uties -Ġdo se -Ġsc roll -M B -in f -Ġsa uce -Ġland scape -red dit -ĠChampions hip -ĠRed dit -al id -Ġco in -Ġover s -Ġpost ing -ab out -Ġf el -and y -Ġb old -Ġfocus ing -e ffect -G R -Ġde emed -Ġrecommend ations -Ġste pped -Ġvot er -ĠDe ep -ĠInst agram -Ġmoder ate -ĠMary land -Ġrestrict ed -ĠM B -ĠCh all -Ġto b -Ġc ir -ĠO cc -ĠE ver -Ġcoll aps -IN FO -= - -ĠP ict -ĠAcc ount -n c -Ġo ught -Ġex port -Ġdr unk -( ' -Ġw ise -ĠM ort -ne cess -Ġan cest -ĠInc re -Ġfrequ ent -m ir -Ġinterpret ation -Ġdepend ent -Ġco ins -ĠB ol -V ideo -ĠJust in -Ġfat al -Ġcook ing -Ġconf usion -ip her -Ġcust ody -ĠMor gan -om ach -ĠGovern or -Ġrestaur ants -el ing -Ġacknowled ged -Ġthe r -Ġgen es -ch ing -He y -Ġtact ics -ĠMex ican -Ġv end -Ġhe s -qu er -Ġnot ing -ĠCamer on -Ġtarget ing -ro ck -Ġcred its -Ġemot ions -Ġrepresent atives -new s -Ġlegisl ative -Ġrem oving -Ġtweet ed -ĠCar ter -ĠF ixed -Ġfor cing -Ġspeak er -Ġm ales -ĠViet nam -l ined -Ġconcept s -Ġvo ices -o ir -ĠT rib -W he -ĠJer usalem -ĠS ant -Ġc ul -Ġl ady -ĠHaw ai -Ġar ts -ĠIn n -ĠMach ine -ĠEm peror -Ġsl ot -g ly -ĠPro cess -II I -Ġathlet es -ĠTem ple -ĠRep resent -Ġpres c -Ġt ons -Ġgold en -Ġp unch -ĠG R -iver pool -Ġen act -Ġlob by -Ġm os -Ġpick ing -Ġlif etime -Ġcogn itive -E ach -z o -Ġd ub -Ġcons ists -ol n -Ġf estival -am ous -Ġint ellig -w ords -ĠSm art -Ġde le -Ġl apt -Ġmag ical -ĠS in -b us -ur ities -igh th -ĠRub y -ĠS ure -ol ving -Ġj un -O ST -Ġimp osed -Ġast ron -Ġcor rel -ĠN S -ĠK it -ĠF uture -b urn -Ġimm une -oc us -Ġcour ses -ĠSt ring -Ġle an -Ġg host -Ġout comes -Ġexp ense -Ġevery day -Ġaccept able -A h -Ġequ ipped -Ġor ange -F R -ĠD utch -Th ough -ĠR ank -Q U -ĠRober ts -wh at -re nd -Ġdisapp ear -Ġsp awn -ĠL am -o is -Ġdes erve -Ġmin imal -Ġnerv ous -ĠW ould -Ġro ok -ĠV ancouver -Ġres ign -sh ire -ĠW orks -ĠB uild -Ġafford able -ĠG ary -ĠAren a -Ġh anging -Ġimpl ications -ĠS ong -Ġmain taining -Ġgu ards -C ON -Ġder ived -Ġexecut ed -Ġthe ories -Ġqu oted -ĠAnd re -og a -sel ess -in fo -ĠBel g -Ġt ears -ĠSur v -Ġbirth day -ig ious -im mer -Ġspect rum -Ġarchitect ure -Ġrec ruit -arm a -T able -Ġmon sters -ĠG ov -Ġdest ination -Ġattract ive -Ġf oss -ĠMore over -Ġpres ents -TH E -Ġrep ly -pt on -Ġc um -Ġdel ight -Ġaffect s -Ġdon ations -ĠT oy -ĠH im -M ENT -Ġover come -it ched -ĠFant asy -ĠH at -ĠBe ast -b ott -Ġinvestig ations -R un -Ġhun ting -d i -f und -Ġs essions -est yle -Ġport ray -oid s -Y eah -Ġcommun icate -Ġcom edy -ĠY ang -Ġbel t -ĠMar ine -Ġpredict ed -Pl ay -Ġimportant ly -Ġremark able -Ġelim inate -D avid -Ġb ind -V ID -Ġadvoc ates -ĠG aza -im p -D B -ĠN a -ĠSim ilar -I ES -Ġchar ity -v as -m ath -Ġâ ĸ -ok er -nd um -Ġcap s -ĠH al -2 000 -e an -Ġfle et -Ġrec re -R ight -Ġsleep ing -ij ing -k ind -Ġdesign ated -à ¤ -Ġanim ation -ke e -ĠInt rodu -Ġ/ > -Ġdelay ed -Ġtrem end -Ġcur ious -U se -Ġle ct -d am -Ġinnov ation -ĠPoint s -Ġload ing -Ġdisp ute -ct ic -ird s -ĠB Y -Ġn urs -ĠVal ue -ION S -ĠH um -Ġtem plate -m ers -Ġappear ances -ĠEnter tainment -Ġtransl ation -Ġsa ke -Ġbene ath -Ġin hib -Ġe uro -abet es -Ġstud ying -ĠM as -Ġper ceived -Ġexam ined -Ġe ager -Ġco aches -Ġim per -ch i -Ġprodu ces -" ). -ĠEvery one -Ġm unicip -Ġg irlfriend -Ġh ire -ĠV ice -Ġsu itable -op y -Ġin equ -ĠD uke -f ish -f irst -ĠO bs -Ġinter ior -ĠBru ce -ĠR y -Ġanal ys -Ġconsider able -Ġfore cast -Ġf ert -ors hip -ĠD rug -ĠA LL -: " -th ur -ĠM ail -Ġball ot -Ġinst antly -ĠCh annel -Ġp icks -Ġ198 9 -Ġt ent -ol i -Ġcivil ian -b ling -ell o -b u -Ġin ch -Ġlog o -Ġcooper ation -Ġwal ks -Ġinvest ments -Ġimp rison -ĠF estival -ĠK y -Ġleg ally -Ġg ri -ch arg -S l -Ġthreat ening -du ction -fl ow -Ġdismiss ed -ibr aries -c ap -e le -ĠMc G -ĠHar vard -ĠConserv ative -ĠC BS -p ng -Ġro ots -ĠH aving -umb led -ĠF un -\ / -ĠS earch -ple x -Ġdiscuss ing -Ġcontin u -ĠT ai -ĠW ik -F ree -f it -Ġref use -Ġmanag ing -Ġsy nd -ip edia -w alk -Ġprofession als -Ġguid ance -Ġunivers ities -Ġas semb -unt u -F inally -AS E -ĠAut o -ĠH ad -Ġann iversary -L D -ĠD ur -ĠUlt imate -ih ad -pro duct -Ġtrans it -Ġrest ore -Ġexpl aining -Ġass et -Ġtransfer red -Ġbur st -ap olis -ĠMag azine -ĠC ra -ĠB R -gg ed -ĠH E -M ich -b et -ĠL ady -yl um -erv es -Ġme ets -wh ite -L og -Ġcorrespond ing -Ġins isted -G G -Ġsurround ed -Ġt ens -Ġl ane -Ġco inc -h ome -Ġexist ed -ect ed -ĠDou ble -lam m -Ġske pt -ex p -Ġper ception -ie v -ĠBe ing -o ft -Ġadop t -. : -] ; -Wind ows -Ġsatell ite -AS H -Ġinf ant -d escription -ĠMe anwhile -c m -oc a -ĠT reat -act or -Ġtob acco -ĠN orm -em ption -Ġfl esh -Ġj e -o op -ĠHe aven -Ġbe ating -an im -Ġgather ing -Ġcult iv -G O -ab e -ĠJon athan -ĠSaf ety -Ġbad ly -pro t -Ġcho osing -Ġcontact ed -Ġqu it -Ġdist ur -Ġst ir -Ġto ken -D et -ĠP a -Ġfunction ality -00 3 -s ome -Ġlimit ations -Ġmet h -b uild -con fig -N T -re ll -ble m -ĠM om -Ġveter ans -ĠH u -Ġtrend s -are r -ĠG iven -ĠCa ption -m ay -AS T -Ġwond ering -ĠCl ark -n ormal -Ġsepar ated -Ġdes p -st ic -b rew -Ġrel ating -ĠN ik -ĠF arm -Ġenthus i -g ood -d eb -Ġactiv ist -Ġm art -Ġexplos ion -ĠEconom ic -L ink -Ġins ight -Ġconven ient -Ġcounter part -su pport -ĠV irt -ag en -ĠTenn essee -ĠSim on -ĠA ward -OC K -ĠF igure -Ġoverse as -Ġpr ide -ĠC as -n ote -m g -C urrent -Ġdispl ays -cont ent -Ġtravel ing -Ġhosp itals -ĠFin ancial -ĠP ast -Ġdefend ant -Ġstream ing -m ble -ĠBer lin -uk i -Ġdist ribut -Ġant ib -Ġch ocolate -ĠCast le -Ġinter rupt -ĠR ow -Ġconvers ion -Ġbug s -ĠR ather -li est -L Y -ĠJe an -com mon -ak h -Ġ1 30 -ot ton -ĠDe an -Ġam endment -Ġgame play -ĠWar ren -od a -Ġhigh lights -Ġir re -ĠNAT O -Ġball s -Ġdemand ing -U RE -ĠL uke -F igure -st op -on ia -z one -iz ers -ĠW R -Ġaward ed -Ġregul atory -ĠH art -ĠS N -pl ing -Ġs our -ĠP ixel -us ive -Ġf et -ĠS ent -Ġautom atic -Ġf er -vern ment -ĠKh an -T ON -f ather -Ġextraord inary -th rop -ĠP ython -ĠG PU -Ġsex ually -Ġdesk top -it ivity -ĠAnton io -Ġo rient -Ġe ars -ob by -ous es -vertis ements -Ġmanufacture rs -ic ient -min ute -Ġconv iction -Ġg arden -p ublic -Ġsatisf ied -f old -O K -Ġin hab -ĠTh ink -Ġprogram me -Ġst omach -Ġcoord in -Ġh oly -Ġth reshold -Ġr het -Ġser ial -Ġemploy ers -ĠEvery thing -ra h -Ġb other -Ġbr ands -Val ue -ĠT ed -ĠPlan et -Ġp ink -ĠFurther more -s a -P E -re ck -ĠUS D -ot te -Ġ& & -Ġland ed -g ets -Ġprodu cers -Ġhealth care -Ġdomin ant -Ġdest ro -Ġam ended -ch ron -Ġf its -ĠSy d -ĠAuthor ity -AT CH -Ġfight s -ĠL LC -Ġ-- - -ĠCor p -Ġtox ic -spe cific -ĠC orn -ĠChe l -Ġtele phone -ĠP ant -Ġmyster ious -aun ch -od ox -med ia -Ġwitness es -ag u -Ġquestion ed -ĠBre xit -ĠRem ember -ene z -Ġend orse -iat ric -ĠId ent -Ġridic ulous -1 10 -Ġpr ayer -Ġscient ist -Ġ19 50 -ĠA qu -Ġunder ground -ĠU FC -m are -ĠL ater -w ich -Ġsubsc rib -Ġhost s -Ġer r -Ġgr ants -ant om -Ġsum mon -ear ly -ĠC lear -ĠPr im -Ġsusp ension -Ġguarant eed -app er -Ġr ice -ĠSe an -ĠSh in -Ġrefere ndum -Ġfl ed -r ust -Ġ3 60 -ter y -Ġsh ocked -B R -ĠO il -ĠAll ah -Ġpart ly -Ġign or -Ġtrans mission -Ġhom osexual -ivers al -Ġhop efully -ãĤ ¤ -Ġless on -L eg -Ġ .. -Y et -t able -app ropri -re tt -Ġbo ards -Ġincor rect -Ġb acteria -ar u -am ac -Ġsn ap -.' " -Ġpar ad -t em -he art -Ġav ailability -Ġw isdom -Ġ( + -Ġpri est -ĠÂł ĠÂł -O pen -Ġsp an -Ġparam eter -Ġconv ince -Ġ( %) -r ac -Ġf o -Ġsafe ly -Ġconver ted -ĠOlymp ic -Ġres erve -Ġhe aling -ĠM ine -M ax -Ġin herent -ĠGra ham -Ġinteg rated -D em -Ġpip eline -Ġapp lying -Ġem bed -ĠCharl ie -Ġc ave -200 8 -Ġcons ensus -Ġre wards -P al -ĠHT ML -Ġpopular ity -look ing -ĠSw ord -ĠAr ts -' ) -Ġelect ron -clus ions -Ġinteg rity -Ġexclus ively -Ġgr ace -Ġtort ure -Ġburn ed -tw o -Ġ18 0 -P rodu -Ġent reprene -raph ics -Ġg ym -ric ane -ĠT am -Ġadministr ative -Ġmanufacture r -Ġ vel -ĠN i -Ġisol ated -ĠMedic ine -Ġback up -Ġpromot ing -Ġcommand er -Ġfle e -ĠRus sell -Ġforg otten -ĠMiss ouri -Ġres idence -m ons -Ġrese mb -Ġw and -Ġmeaning ful -P T -Ġb ol -Ġhe lic -Ġwealth y -Ġr ifle -str ong -row ing -pl an -as ury -âĢ¦ . -Ġexpand ing -ĠHam ilton -Ġrece ives -S I -eat ures -ĠAn im -RE E -P ut -Ġbrief ly -ri ve -Ġstim ul -Ġ`` ( -Ġ __ -Ġch ip -Ġha z -Ġpri ze -ĠTh ings -AC E -ul in -d ict -ok u -Ġassoci ate -ock ets -y outube -St ory -ateg ory -Ġm ild -ail ing -ĠY e -O rig -ĠK a -or ig -Ġpropag anda -Ġan onymous -Ġstrugg led -Ġout rage -AT ED -ĠBe ijing -r ary -Ġle ather -Ġworld s -Ġbroad er -12 5 -id al -ĠBet ter -Ġt ear -E xt -Ġpropos als -Ġit er -ĠSqu ad -Ġvol unt -m i -D id -ĠP u -p in -Ġspeak ers -Ġb orders -Ġfig ured -= ' -Ġsimultane ously -aed a -Ġcharg ing -Ġur ged -Ġcon j -25 6 -ĠG ordon -mer ce -Ġdocument ary -Sh are -it ol -ON E -ĠG arden -h att -ĠThom pson -ane ous -ap ore -Ġt anks -Ġless ons -tr ack -Ġout standing -Ġvolunte ers -Ġsp ray -Ġmanag ers -l arge -Ġcamp s -Ġart ificial -ĠR u -Ġb ags -th al -Ġcompat ible -ĠBl ade -Ġf ed -Ġarg ues -F I -Ġunf air -Ġcor n -Ġoff set -Ġdirect ions -Ġdisappoint ed -ĠCon vention -Ġview ing -M E -oc ity -Ġtown s -Ġlay ers -Ġro lled -Ġjump ed -Ġatt ribute -Ġun necess -inc oln -Ġsupp ose -ĠNet her -ch a -Ġbur ied -Ġsix th -B en -ress ing -OU R -Ġw ound -Ġcy cl -Ġmechan isms -Ġcongress ional -ĠE lement -Ġagre ements -Ġdec or -Ġclos est -ĠM it -Go ogle -} } -Ġm ixture -Ġflu id -S ign -ĠSch olar -Ġp ist -ask et -ab ling -Ġrac ing -he ro -ri el -ass y -Ġche aper -b en -Ġvert ical -amac are -ĠRead ing -g ments -Ġhelic op -Ġsacr ifice -ay a -p aren -V A -ĠL es -ĠStud io -Ġviol ations -ĠAn na -ac er -é ¾ -ĠR at -ĠBe ck -ĠD ick -ĠA CT -Ġcomp osition -Ġtext ure -ĠO wn -Ġsmart phone -ĠN A -Ġfor b -im port -Ġdef ending -il st -re r -Ġo h -ĠJere my -Ġbank ing -cept ions -Ġrespect ive -/ . -Ġdr inks -ĠW i -Ġb ands -ĠL iverpool -Ġg rip -ĠB uy -Ġopen ly -Ġreview ed -per t -Ġver ify -ĠCo le -ĠW ales -M O -Ġun pre -Ġshel ter -ĠIm perial -Ġgu i -ĠD ak -Ġsuggest ions -Ġexplicit ly -Ġsl ave -Ġblock chain -Ġcompet ing -Ġprom ising -S ON -Ġsoc cer -Ġconst itution -4 29 -Ġdist ract -ĠU ser -es ides -ĠMet hod -ĠTok yo -Ġaccompan ied -Cl ient -s ur -al og -Ġident ification -Ġinv asion -as ma -Ġindust ries -pp ers -Ġsub tle -ĠUn it -n atural -Ġsurv ived -Ġfl aw -ĺ ħ -ĠH oll -Ġdef icit -Ġtut orial -ĠCh ance -Ġarg uing -Ġcontem porary -Ġinteg ration -for ward -Ġt um -it is -Ġh iding -ĠD omin -ĠT an -ĠB uilding -ĠV in -Ġspokes person -ĠNot es -Ġemer ging -Ġprepar ation -Ġpro st -Ġsuspect s -Ġaut onom -D escription -Ġdeal t -ĠP ear -Ġstead y -Ġdecre ased -Ġso vere -ĠCl in -Ġgrad ually -ors es -ĠW AR -S erv -ãĤ ¢ -h r -Ġd irty -ĠB arn -ĠB C -Ġd il -Ġcal endar -Ġcompl iance -Ġch amber -b b -Ġpass enger -ate ful -ĠT itle -ĠSyd ney -ĠG ot -Ġdark ness -Ġdef ect -Ġpack ed -ass ion -Ġgod s -Ġh arsh -IC K -le ans -Ġalgorith m -Ġoxy gen -Ġvis its -Ġbl ade -Ġkil omet -ĠKent ucky -Ġkill er -P ack -enn y -Ġdiv ine -Ġnom ination -be ing -Ġeng ines -Ġc ats -Ġbuff er -ĠPh ill -Ġtra ff -AG E -Ġtong ue -Ġrad iation -ere r -m em -ĠExpl icit -é¾ į -Ġcou ples -Ġphys ics -ĠMc K -Ġpolit ically -aw ks -ĠBl oom -Ġwor ship -e ger -ut er -ĠF O -Ġmat hemat -Ġsent enced -Ġdis k -ĠM arg -Ġ/ * -P I -Ġoption al -Ġbab ies -Ġse eds -ĠScott ish -Ġth y -] ] -ĠHit ler -P H -ng th -Ġrec overed -ing e -Ġpow der -Ġl ips -Ġdesign er -Ġdis orders -Ġcour age -Ġch aos -" },{" -Ġcar rier -b ably -H igh -ĠR T -es ity -l en -Ġrout es -u ating -F il -N OT -w all -s burgh -Ġeng aging -ĠJava Script -ore r -li hood -Ġun ions -ĠF ederation -ĠTes la -Ġcomple tion -ĠT a -Ġprivile ge -ĠOr ange -Ġne ur -paren cy -Ġb ones -Ġtit led -Ġprosecut ors -ĠM E -Ġengine er -ĠUn iverse -ĠH ig -n ie -o ard -Ġheart s -ĠG re -uss ion -Ġmin istry -Ġpen et -ĠN ut -ĠO w -ĠX P -in stein -Ġbul k -S ystem -ic ism -ĠMarket able -Ġpre val -Ġpost er -Ġatt ending -ur able -Ġlicens ed -ĠG h -et ry -ĠTrad able -Ġbl ast -à ¤ -ĠTit an -ell ed -d ie -H ave -ĠFl ame -Ġprof ound -Ġparticip ating -Ġan ime -ĠE ss -Ġspec ify -Ġregard ed -ĠSpe ll -Ġs ons -own ed -Ġm erc -Ġexper imental -land o -h s -ĠDun geon -in os -Ġcomp ly -ĠSystem s -ar th -Ġse ized -l ocal -ĠGirl s -ud o -on ed -ĠF le -Ġconstruct ed -Ġhost ed -Ġsc ared -act ic -ĠIs lands -ĠM ORE -Ġbl ess -Ġblock ing -Ġch ips -Ġev ac -P s -Ġcorpor ation -Ġo x -Ġlight ing -Ġneighb ors -ĠU b -ar o -Ġbe ef -ĠU ber -F acebook -ar med -it ate -ĠR ating -ĠQu ick -Ġoccup ied -Ġaim s -ĠAdd itionally -ĠInt erest -Ġdram atically -Ġhe al -Ġpain ting -Ġengine ers -M M -ĠM ust -Ġquant ity -P aul -Ġearn ings -ĠPost s -st ra -ãĥ¼ ãĥ -Ġst ance -Ġdro pping -sc ript -Ġd ressed -M ake -Ġjust ify -ĠL td -Ġprompt ed -Ġscr ut -Ġspeed s -ĠGi ants -om er -ĠEd itor -Ġdescrib ing -ĠL ie -ment ed -Ġnow here -oc aly -Ġinst ruction -fort able -Ġent ities -Ġc m -ĠN atural -Ġinqu iry -Ġpress ed -iz ont -for ced -Ġra ises -ĠNet flix -ĠS ide -Ġout er -Ġamong st -im s -ows ki -Ġclim b -ne ver -Ġcomb ine -d ing -Ġcomp r -Ġsignific ance -Ġremem bered -ĠNev ada -ĠT el -ĠSc ar -ĠWar riors -ĠJ ane -Ġcou p -b as -Ġtermin al -, - -O H -Ġt ension -Ġw ings -ĠMy ster -�� �� -ĠUn like -val id -viron ments -ĠAl i -Ġn aked -book s -ĠM un -ĠG ulf -Ġd ensity -Ġdim in -Ġdesper ate -Ġpres idency -Ġ198 6 -h y -IN D -Ġun lock -im ens -Ġhand led -ĠE b -Ġdisapp eared -Ġgen re -Ġ198 8 -Ġdetermin ation -St ream -ik o -ap ters -Ġacknow ledge -J an -Ġcapital ism -P at -Ġ20 20 -Ġpain ful -Ġcur ve -Ġbom bs -st orm -ĠMet al -en cer -ĠF ig -ĠA aron -anc hes -Ġins piration -Ġexha ust -t ains -ash i -Ġdesc ript -Ġr itual -ĠChel sea -Ġpromot ion -ĠH ung -ĠW ard -iv a -ĠE T -Ġto ss -all ow -ĠFranc is -D ep -Ġhapp iness -ĠGl ass -Ġbet a -Ġstreng then -N E -o a -Ġbutt ons -ĠMur ray -Ġkick ed -Qu est -ĠT alk -ĠS everal -ĠZ ero -Ġdr one -ul k -Ġc am -ĠM obile -Ġprevent ing -Ġret ro -ĠA x -Ġcru el -Ġflo at -. ), -Ġfil ing -ĠGr ant -ĠB or -Ġr ib -Ġchampions hip -ĠM erc -Ġsty les -Ġc ake -Ġbuild s -ĠS elf -io x -Ġep ic -oy d -B el -ĠSt ew -. ( -ah u -ĠBe yond -Ġout s -Ġsol o -ĠT ree -Ġpres erve -Ġt ub -AR E -ro c -ĠIm pro -ĠW right -Ġbu nd -Ġtr aged -Ġoccas ional -b ian -Sec ond -r ons -Ġinter actions -form ed -s ing -Ġown s -Ġh ockey -Gener al -Ġlog ical -Ġexp end -Ġesc al -ĠGr iff -ĠC rown -ĠRes erve -Ġsto pping -Ġexc use -sec ond -Ġoper ated -Ġre aches -ĠMal ays -Ġpoll ution -ĠBrook lyn -Ġde lete -Ġhas h -Bl ock -ah a -âĢ ³ -Ġsh orter -p iece -> >> -ĠM ormon -t or -Ġpartic les -ĠB art -ry ption -Ġad min -Ġsqu ee -VID IA -Ġcreat or -iam eter -ic ular -N BC -Ġgrab bed -Ġn odd -Ġr ated -Ġrot ation -Ġgr asp -Ġexcess ive -ĠE C -ĠWh it -Ġinvent ory -ault s -ĠF B -Ġe cosystem -Ġbill ions -Ġvent ure -n amed -Ġdef ender -out e -Inst ead -ir able -W ar -Ġassum ption -Ġb ite -Ġearth qu -t ail -sp ace -Ġgif ts -boy s -Ġinev itable -Ġstruct ural -Ġbenef icial -Ġcompe lling -h ole -erv ation -Ġco at -o j -inc arn -ĠY ears -Ġdetermin ing -Ġrhet oric -Ġbound aries -Ġwh ites -A nt -add y -) - -ra ham -eter min -Ġhar vest -ĠCon c -Ġlapt op -ĠM atch -Ġenjoy ing -cc a -oll ar -Ġtri ps -Ġadd iction -ĠS ak -Ġpow ered -Ġc ous -ĠRuss ians -ie re -Ġret rie -qu ality -Ġdiff er -Ġking dom -ĠL aur -ĠCap itol -Ġcon clusions -ĠAl tern -ĠN av -Ġtrans parent -B ER -G roup -ĠCom plete -Ġinf er -Ġint rig -Ġins ane -R O -oph ob -is en -qu al -Mich ael -Ġm useum -ĠP ope -Ġres et -r ative -f ive -Ġagg reg -itte es -osit ory -Ġcar b -ĠRec ord -Ġdec ides -ĠF ix -Ġexcept ions -ĠCommission er -un s -ĠEnvironment al -Ġlegend ary -ist ence -Ġtun nel -k m -Ġins ult -Ġt roll -Ġsh ake -Ġdet ention -qu es -ĠCh rome -ĠF iles -Ġsub t -Ġprospect s -Ġpro l -re nder -pro of -Ġperform ances -St r -Ġh ref -ern ame -Ġachieve ment -Ġf ut -F ull -ĠLe ban -go ogle -ãĥ Ī -amp a -May be -Ġproject ed -ĠE mb -Ġcol leg -Ġa wards -Ġâ Ķ -G old -ĠBl ake -ĠR aj -if ting -Ġp ending -Ġinst inct -Ġdevelop ments -Con nect -ĠM and -ĠW ITH -ĠPhilipp ines -prof ile -Ġalt ogether -ĠB und -ĠT D -oo oo -amp ed -ip h -Ġste am -Ġold est -Ġdet ection -ul pt -Ġ ç -ĠWay ne -200 6 -f a -Ġcir cles -ĠF u -Ġdon ors -appropri ate -ĠDak ota -j amin -Ġmotiv ated -Ġpurch ases -ĠLouis iana -ĠS pl -Ġgl obe -Ġ10 5 -z ip -c all -Ġdepart ments -Ġsustain able -10 5 -ĠO P -if iers -Ġprevent ed -Ġinc omp -ĠComm ander -Ġdom inated -Ġ » -Ġinvest ed -Ġcomplex ity -Ġin cl -Ġens uring -Ġreal m -yn c -ĠInd ependent -r ained -ĠJ en -ĠFl ight -Ġat he -Ġspec ulation -ĠT E -oc ate -t ic -Ġpl aint -her ry -Ġto y -Ġ1 11 -Ġpl ates -st atus -ĠIs a -Ġdev oted -C op -ĠE S -25 5 -ur rency -M ain -Ġsl aves -Ġpe pper -Ġqu otes -Ġce iling -ĠF ish -Ġtrans formation -Ġfra ction -Ġadvant ages -Ġto ile -Ġstun ning -Ġmo ist -bre aking -s i -ĠL ocation -ĠMed ium -Ġtext s -Ġu gly -Ġb io -. âĢĶ -ĠB ased -Ġtr ains -ĠW ing -ĠAn cient -ĠRec ords -ĠH ope -Spe cial -ades h -ob i -[ / -Ġtempor arily -V er -h u -os er -Ġover night -Ġm amm -ĠTre asury -ĠV enezuel -ĠMeg a -Ġt ar -Ġexpect s -bl ack -or ph -\\ \\ -Ġaccept ance -Ġrad ar -s is -Ġjun ior -Ġfram es -Ġobserv ation -ac ies -P ower -ĠAdv anced -M ag -olog ically -ĠMe chan -Ġsent ences -Ġanaly sts -augh ters -force ment -Ġv ague -Ġcl ause -Ġdirect ors -Ġeval uate -Ġcabin et -M att -ĠClass ic -A ng -Ġcl er -ĠB uck -Ġresear cher -Ġ16 0 -Ġpoor ly -Ġexperien cing -ĠP ed -ĠMan hattan -Ġfre ed -Ġthem es -ad vant -Ġn in -Ġpra ise -10 4 -ĠLib ya -b est -Ġtrust ed -Ġce ase -Ġd ign -D irect -Ġbomb ing -Ġm igration -ĠSci ences -Ġmunicip al -ĠA verage -Ġgl ory -Ġreve aling -Ġare na -Ġuncertain ty -Ġbattle field -ia o -G od -Ġc inem -ra pe -el le -ap ons -Ġlist ing -Ġwa ited -Ġsp otted -ke ley -ĠAud io -e or -ard ing -idd ing -ig ma -ĠN eg -Ġl one -Ġ ---- -ex e -d eg -Ġtrans f -Ġwas h -Ġsl avery -Ġexpl oring -ĠW W -ats on -Ġen cl -l ies -ĠC reek -Ġwood en -Man ager -ĠBr and -um my -ĠAr thur -Ġbureau cr -Ġbl end -ar ians -F urther -Ġsupposed ly -Ġwind s -Ġ19 79 -Ġgrav ity -Ġanalys es -ĠTra vel -ĠV eter -Ġd umb -Ġaltern ate -g al -Ġconsum ed -Ġeffect iveness -.' ' -Ġpath s -ond a -L A -ĠStr ong -Ġen ables -Ġesc aped -Ġ" " -Ġ1 12 -Ġ198 3 -Ġsm iled -Ġtend ency -F ire -Ġp ars -ĠR oc -Ġl ake -Ġf itness -ĠA th -ĠH orn -Ġh ier -Ġimp ose -m other -Ġp ension -ic ut -bor ne -ic iary -. _ -ĠS U -Ġpol ar -is y -eng u -itial ized -AT A -w rite -Ġexerc ises -ĠD iamond -ot ypes -Ġharm ful -on z -Ġprint ing -st ory -Ġexpert ise -ĠG er -Ġtraged y -ĠF ly -Ġd ivid -amp ire -st ock -M em -Ġre ign -Ġun ve -Ġam end -ĠProp het -Ġmut ual -ĠF ac -Ġrepl acing -H ar -ĠCirc uit -Ġthro at -ĠSh ot -Ġbatter ies -Ġto ll -Ġaddress ing -ĠMedic aid -Ġp upp -ĠN ar -ol k -Ġequ ity -M R -ĠHis pan -ĠL arge -m id -D ev -Ġexp ed -Ġdem o -ĠMarsh all -erg us -Ġf iber -Ġdiv orce -ĠCre ate -Ġsl ower -ĠPark er -ĠStud ent -ĠTr aining -Ret urn -ĠT ru -Ġc ub -ĠRe ached -Ġpan ic -Ġqu arters -Ġre ct -Ġtreat ing -Ġr ats -ĠChristian ity -ol er -Ġsac red -Ġdecl are -ul ative -et ing -Ġdeliver ing -est one -Ġt el -ĠL arry -Ġmet a -ac cept -art z -ĠRog er -hand ed -Ġhead er -Ġtra pped -ĠCent ury -Ġkn ocked -ĠOx ford -Ġsurviv ors -b ot -Ġdemon stration -Ġd irt -Ġass ists -OM E -ĠD raft -ortun ate -fol io -pe red -ust ers -g t -ĠL ock -Ġjud icial -ver ted -Ġsec ured -out ing -ĠBook s -Ġhost ing -Ġlif ted -l ength -Ġj er -Ġwhe els -ĠR ange -umbn ails -Ġdiagn osis -te ch -ĠStew art -ĠP ract -Ġnation wide -Ġde ar -Ġoblig ations -Ġgrow s -Ġmand atory -Ġsusp icious -! ' -A pr -G reat -Ġmort gage -Ġprosecut or -Ġeditor ial -ĠK r -Ġprocess ed -ung le -Ġflex ibility -Ear lier -ĠC art -ĠS ug -Ġfoc uses -Ġstart up -Ġbre ach -ĠT ob -cy cle -ãĢ Į -ro se -Ġb izarre -ãĢ į -Ġveget ables -$ $ -Ġret reat -osh i -ĠSh op -ĠG round -ĠSt op -ĠHawai i -ĠA y -Per haps -ĠBe aut -uff er -enn a -Ġproduct ivity -F ixed -cont rol -Ġabs ent -ĠCamp aign -G reen -Ġident ifying -Ġreg ret -Ġpromot ed -ĠSe ven -Ġer u -ne ath -aug hed -ĠP in -ĠL iving -C ost -om atic -me ga -ĠN ig -oc y -Ġin box -Ġem pire -Ġhor izont -Ġbr anches -Ġmet aph -Act ive -ed i -ĠFil m -ĠS omething -Ġmod s -inc ial -ĠOrig inal -G en -Ġspir its -Ġear ning -H ist -Ġr iders -Ġsacr ific -M T -ĠV A -ĠS alt -Ġoccup ation -ĠM i -Ġdis g -lic t -Ġn it -Ġn odes -e em -ĠP ier -Ġhat red -ps y -ãĥ ī -Ġthe ater -Ġsophistic ated -Ġdef ended -Ġbes ides -Ġthorough ly -ĠMedic are -Ġbl amed -arent ly -Ġcry ing -F OR -pri v -Ġsing ing -ĠI l -Ġc ute -o ided -olit ical -ĠNe uro -å ¤ -Ġdon ation -ĠEag les -ĠG ive -T om -Ġsubstant ially -ĠLic ense -ĠJ a -Ġg rey -ĠAn imal -ĠE R -ĠU nd -Ġke en -Ġconclud e -ĠMississ ippi -Eng ine -ĠStud ios -P ress -o vers -ll ers -Ġ3 50 -ĠR angers -Ġr ou -ert o -E p -iss a -iv an -Ġse al -ĠReg ist -dis play -Ġwe aken -u um -ĠComm ons -ĠS ay -Ġcult ures -Ġl aughed -Ġsl ip -Ġtreat ments -iz able -m art -ĠR ice -Ġbe ast -Ġob esity -ĠLa ure -ig a -Wh ich -hold er -Ġelder ly -Ġp ays -Ġcompl ained -Ġc rop -Ġpro c -Ġexplos ive -ĠF an -ĠAr senal -A uthor -ef ul -Ġme als -Ġ( - -id ays -Ġimag ination -Ġann ually -Ġm s -as ures -H ead -ik h -m atic -Ġboy friend -ĠCom puter -Ġb ump -Ġsur ge -ĠCra ig -ĠKir k -D el -medi ate -Ġscen arios -ĠM ut -ĠSt ream -Ġcompet itors -Ù Ħ -ĠStan ford -ĠRes ources -az ed -b age -Ġorgan is -ĠRe lease -Ġsepar ately -Ġha bits -Ġmeasure ments -ĠCl ose -Ġaccomp any -Ġg ly -Ġt ang -ĠR ou -Ġplug in -Ġcon vey -ĠChall enge -oot s -j an -Ġcur s -ĠRel ations -ke eper -Ġapproach ing -p ing -Spe aking -Ġarrang ement -ĠV I -are ttes -Ġaffect ing -Ġperm its -b ecause -Ġu seless -ĠH us -!! !! -Ġdestro ying -Un fortunately -Ġfasc inating -S em -Ġelect oral -Ġtrans parency -ĠCh aos -Ġvolunte er -Ġstatist ical -Ġactiv ated -ro x -We b -H E -ĠHamp shire -is ive -M ap -Ġtr ash -ĠLaw rence -st ick -C r -Ġr ings -EX T -Ġoper ational -op es -D oes -ĠEv ans -Ġwitness ed -P ort -Ġlaunch ing -ec onom -w ear -ĠPart icip -um m -cul es -ĠR AM -ĠT un -Ġass ured -Ġb inary -Ġbet ray -Ġexpl oration -ĠF el -Ġad mission -it ated -S y -Ġav oided -ĠSim ulator -Ġcelebr ated -ĠElect ric -¥ ŀ -Ġcl uster -itzer land -he alth -L ine -ĠN ash -at on -Ġsp are -Ġenter prise -ĠD IS -clud es -Ġfl ights -Ġreg ards -ĠÃ Ĺ -h alf -Ġtr ucks -Ġcontact s -Ġunc ons -ĠCl imate -Ġimm ense -N EW -oc c -ect ive -Ġemb od -Ġpat rol -Ġbes ide -Ġv iable -Ġcre ep -Ġtrig gered -ver ning -Ġcompar able -q l -Ġg aining -ass es -Ġ( ); -ĠG rey -ĠM LS -s ized -Ġpros per -" ? -Ġpoll ing -Ġsh ar -ĠR C -Ġfire arm -or ient -Ġf ence -Ġvari ations -g iving -ĠP i -osp el -Ġpled ge -Ġc ure -Ġsp y -Ġviol ated -Ġr ushed -Ġstro ke -ĠBl og -sel s -ĠE c -,' ' -Ġp ale -ĠColl ins -ter ror -ĠCanad ians -Ġt une -Ġlabor atory -Ġn ons -t arian -Ġdis ability -ĠG am -Ġsing er -al g -ĠSen ior -Ġtrad ed -ĠWar rior -Ġinf ring -ĠFrank lin -Ġstr ain -ĠSwed ish -Ġsevent h -ĠB enn -ĠT ell -Ġsynd rome -Ġwond ered -id en -++ ++ -ig o -Ġpur ple -Ġjournal ism -Ġreb el -Ġf u -bl og -Ġinv ite -ren cies -ĠCont act -Is rael -ĠCont ent -Ġche er -Ġbed room -ĠEngine ering -ĠQue ens -Ġd well -ĠPlay Station -ĠD im -ĠCol on -l r -Ġoper ates -Ġmotiv ation -US A -ast ered -C ore -ĠTr uth -ol o -OS E -ĠMem ory -Ġpred ec -Ġan arch -Ġ19 20 -ĠY am -à ¨ -b id -Ġgr ateful -Ġexc itement -Ġtre asure -Ġlong est -ct ive -Ġdes erves -Ġreserv es -Ġcop s -ĠOtt awa -ĠEgypt ian -ank ed -Ġart if -Ġhypot hesis -: / -Ġpurch asing -Ġlove ly -H P -Ġdiv ide -Ġstrict ly -Ġquestion ing -Ġtaxp ayers -ĠJ oy -Ġroll s -ĠHe avy -Ġp orts -Ġmag netic -Ġinf lamm -Ġbr ush -t ics -â ĪĴ -Ġbott les -pp y -Ġp add -ãĤ ¯ -m illion -Ġdevast ating -Ġcomp iled -Ġmed ication -Ġtw elve -ĠPer ry -Sp ace -im b -y our -Ġle aked -ĠT ar -Ġun ity -Ġinfect ed -Ġtravel ed -ID E -ĠMc Donald -t xt -ĠPr inc -Ġinter ven -ĠTai wan -ĠP ow -Ġbe aring -ĠTh read -Ġz ones -iz ards -un ks -Ch apter -ll or -Ġ · -Ġw ounds -Ġdisc retion -Ġsucceed ed -ik ing -Ġicon ic -C all -Ġscreen ing -ĠM is -ict s -Ġmin isters -Ġsepar ation -Pl ayer -Ġb ip -Ġbel oved -Ġcount ing -ĠE ye -ar ound -ing ing -Ġtable t -Ġoff ence -in ance -h ave -ĠInf o -ĠNin ja -Ġprotect ive -ĠC ass -M ac -ĠQual ity -N orth -Ġ ic -ĠCub a -ĠChron icle -ĠPro perty -Ġfast est -ot os -ĠG erm -OW N -Ġbo om -ĠStan ley -ergus on -Ġcle ver -Ġent ers -m ode -ter ior -ĠS ens -Ġlin ear -AR K -Ġcomp aring -Ġpure ly -Ġsaf er -ĠPot ter -Ġc ups -R T -Ġgl uc -Ġatt ributed -Ġdu pl -ĠP ap -Ġprec ious -Ġp a -iction ary -ĠT ig -ĠTo o -ol utions -st an -Ġrob ots -Ġlob b -Ġstat ute -Ġprevent ion -w estern -16 0 -ĠAct ive -ĠMar ia -h al -N one -ell ar -ĠK B -ĠPart ners -ĠSing le -ĠFollow ing -ang o -ac ious -Ġth ou -Ġk g -Ġinflu ential -ĠFriend s -S ur -ain ted -Ġfor ums -Ġst arter -Ġcitizens hip -ĠE lection -on ge -ot ation -os ph -;; ;; -ut ical -p ur -ere n -Ġaccus ations -bit ious -ab bit -ĠOr d -Post ed -ir k -Ġsens itivity -ic he -ĠAm y -ĠF ab -Ġsum mit -Ġped est -Ġrub ber -Ġagric ultural -Ġcan cel -A E -Ġin aug -Ġcont am -Ġfirm ly -i w -st age -ĠK an -Ġt ier -Ġinv ention -Ġtransl ated -ĠR ules -B ox -Tw itter -ID S -Ġp izza -Ġdeb ug -ĠD rop -v s -Ġh orses -b ig -Ġb oring -Ġh ood -ĠMcC ain -at ched -ĠBro s -Ġsk ip -Ġess ay -st at -ĠLeg ends -Ġam munition -au c -Ġshoot er -Ġun h -Ġsuppl ied -Ġgener ic -ĠS K -ib an -yr ics -Ġ25 5 -Ġclim bing -Form er -Ġfl ip -Ġjump ing -Ġfrust ration -ĠTer ry -Ġneighborhood s -Ġmed ian -be an -Ġbr ains -Follow ing -Ġsh aped -Ġdraw s -Ġal tered -J ack -Ġrecip es -Ġsk illed -we alth -ach i -e lection -Ġbehavi ors -de als -ĠU ntil -F e -Ġdecl aration -mar ks -ĠBet ween -cel ona -Ġres on -Ġbub ble -Am ong -Ġim perial -G S -Ġfemin ist -200 5 -ĠK yle -Ġaccount ing -ĠTe le -ĠT yr -Ġconnect ing -Ġre hab -ĠP red -s im -Ġmeant ime -Ġphys ician -M W -ĠCamp bell -ĠBr andon -Ġcontribut ing -ĠR ule -ĠWe ight -ĠN ap -Ġinter active -Ġv ag -Ġhel met -ĠCom b -f our -Ġsh ipped -Ġcomple ting -ĠP D -PD ATE -Ġspread ing -Ġsc ary -erv ing -ĠG as -Ġfr ank -s chool -Ġrom antic -Ġstab il -R ob -Ġaccur ately -Ġac ute -ĠH ann -Ġsymbol s -Ġcivil ization -ĠA W -Ġlight ning -Ġcons iders -Ġven ue -Ġ × -Ġo ven -ĠS F -h is -Ġn u -ĠLear n -Ġpe oples -Ġst d -Ġsle e -Ġs lic -ĠStat istics -Ġcor ners -ĠB aker -Ġ: ) -ment ation -ol ver -Ġlaugh ing -ĠT odd -ond e -ĠH ills -Ġn uts -ĠW oman -pl ane -Ġl iver -ĠIn side -S orry -Ġagre es -Ġfund ament -ĠF isher -Ġa uction -Ġthread s -gl as -ĠBas ic -ĠN at -Ġlack ing -Ġceleb ration -j u -Ġs illy -E uro -Ġt att -ight y -cont rolled -T est -ĠSing h -Ġr age -Ġrh yth -o ffic -ĠPh antom -Ġhead lines -Ġrespond ing -ĠMor ning -Ġvit amin -Ġboot s -ĠS ite -al in -p i -Ġvir al -ĠU C -D ER -ĠSe x -Ġst ocks -c urrent -Ġch urches -ĠR are -ĠMur phy -Ġden ial -ĠG aming -Ġtou g -Ġn ick -Ġm akers -ĠRon ald -Ġgener ous -ĠD oc -ĠMor ris -Ġtransform ed -ĠN ormal -Ġ10 4 -ĠKick starter -ĠUp on -On line -ĠI RS -Ġw rap -Ġl oving -Ġarri ves -ĠD ue -Ġhe ter -ĠM ade -Ġrent al -Ġbelong s -Ġatt orneys -Ġcro ps -Ġmat ched -ul um -ol ine -10 9 -Ġdis par -Ġbuy ers -ĠCam bridge -Ġeth ics -rou ps -Ġjust ified -Ġmarg inal -Ġrespect ed -win ning -Ġnodd ed -ĠSer ge -ĠForm er -C raft -######## ######## -ĠWar ner -Ġd ash -et e -Ġent ert -ĠE scape -out heast -Ġkn ees -ĠB omb -Ġr ug -P ass -Ġatt itudes -go vernment -ĠPri or -Ġqual ities -Ġnot ification -ĠPh one -l ie -Ġanticip ated -ĠCom bat -ĠBar ry -Ġ198 2 -Us ers -on er -Ġcomput ing -ĠConnect icut -Ġless er -Ġpe ers -ĠC u -Ġtechn ically -Ġsub mission -ĠUn iversal -Ġman ually -our ge -Ġrespond ents -ĠB TC -ĠH ost -Ġf are -ĠB ird -Ġrece ipt -al so -Ġj ack -Ġagric ulture -Ġsk ull -Ġ! = -Ġpass ive -ĠC I -Ġsoc ieties -Ġremind ed -Ġinter ference -B uy -Ġâ ľ -g on -Ġscrut iny -ĠW itch -Ġconduct ing -Ġ ãĥ -Ġexch anges -ĠMit chell -Ġinhab it -Ġtw ist -B D -Ġwhere ver -group on -Ġj okes -ĠBen jamin -ĠR andom -fr ame -ĠL ions -Ġhighlight ed -ĠArk ansas -E nt -Ġp ile -Ġpre lim -g s -mind ed -Ġfel ony -ĠG A -ĠL uck -Ġpract ically -ĠB os -Ġact ress -D am -ĠB ou -Ġvis a -Ġembed ded -Ġhy brid -Ġear liest -Ġsoon er -s ocial -ĠH A -Ġste ep -Ġdis advant -Ġexplo it -ĠE gg -ĠUlt ra -Ġnecess ity -L ocal -ie ge -Ġd ated -Ġmass es -Ġsubsc ription -pl ess -Ġan onym -Ġpresum ably -Bl ue -The ir -asket ball -ĠPhil ip -Ġcom ed -load ed -r ane -Ġref lection -Ch ina -Ġext ends -Ġform ing -Ġund ers -200 1 -Ġgr at -Ġconcent rations -Ġins ulin -Ġsec ular -Ġwh ilst -Ġwin ners -Ad vertisements -Ġdeliber ately -ĠWork ing -Ġs ink -et ics -d ale -Ġmand ate -Ġg ram -Ġvac ation -Ġwarn ings -ri pp -ĠTH AT -Ġcomment ary -Ġint u -Ġa est -Ġreason ing -Ġbreak down -ĠZ ombie -Ġ-- > -ĠPolit ical -c ott -Ġthr ust -Ġtechn ological -Ġdec iding -Ġtraff icking -L ong -W elcome -pr ising -ĠCommun ications -Ġend ors -Ġsw ift -Ġmetab ol -co ins -res a -ĠHT TP -Ġen roll -ĠH appy -us r -int age -Ġ[ " -u ably -ĠM aterial -Ġrepe al -Se pt -k h -ĠMod i -Ġunder neath -ĠI L -sh ore -Ġdiagn osed -ace utical -Ġsh ower -au x -ĠSw itch -ĠStre ngth -Ġj ihad -n ational -Ġtra uma -uss y -on i -Ġcons olid -Ġcal ories -ĠF lynn -ag ged -16 8 -ĠP ink -Ġfulf ill -Ġch ains -Ġnot ably -ĠA V -L ife -ĠCh uck -m us -ĠUr ban -ĠH end -Ġdep osit -ĠS ad -Ġaff air -OR K -ie val -ĠF DA -Ġt rop -ĠOver all -Ġvirt ue -Ġsatisf action -au nd -Ġl un -ĠSw itzerland -ĠOper ation -pro cess -Ġsh ook -Ġcount ies -le ased -ĠCharl otte -1 12 -Ġtrans cript -Ġre dd -p ush -ĠHe y -ĠAn alysis -[ " -Ġaltern atives -ard less -Ġele ph -Ġpre jud -ĠLe af -H aving -ĠH ub -Ġexpress ions -ĠVol ume -Ġshock ing -ĠRed s -Ġread ily -Ġplan ets -ad ata -Ġcollaps ed -ĠMad rid -Ġir rit -i pper -ĠEn c -ĠW ire -Ġbu zz -ĠG P -ash a -Ġaccident ally -ur u -Ġfrust rated -ĠS A -Ġhung ry -ĠH uff -Ġlab els -ant o -ĠE P -Ġbar riers -) | -ĠBer keley -ĠJ ets -Ġp airs -ĠL an -J ames -ĠB ear -Ġhum or -ĠLiber ty -Ġmagn itude -Ġag ing -ĠM ason -Ġfriends hip -umb ling -Ġemer ge -Ġnewsp apers -Ġam bitious -ĠRich ards -atern al -Ġ198 1 -Ġcook ies -Ġsc ulpt -Ġpur suit -L ocation -Ġscript s -p c -Ġarrang ements -Ġd iameter -Ġl oses -am ation -Ġl iqu -ĠJ ake -aret te -Ġunderstand s -ĠZ en -v m -Ġappro ve -Ġw ip -Ġult ra -Ġint end -ĠD I -asc ular -Ġst ays -ĠK or -ĠK l -Ġinvest ing -L a -Ġbelie ving -b ad -m outh -Ġtaxp ayer -ãĥ ĥ -ĠQue bec -Ġl ap -ĠSw iss -d rop -Ġdr ain -ir i -et c -ft en -ĠN ex -Ġst raw -Ġscream ing -Ġcount ed -Ġdam aging -Ġamb assador -cent ury -Ġpro x -Ġarrest s -u v -il ateral -ĠCh arg -Ġpresc ribed -Ġindepend ently -Ġf ierce -ĠB aby -Ġb rave -Ġsu its -= > -Ġbas eline -ĠR ate -Ġis lands -Ġ( ( -g reen -ix els -Ġname ly -ĠVill age -th an -am y -V ersion -g mail -ential s -ĠS ud -ĠMel bourne -Ġarri ving -Ġquant um -e ff -rop olitan -T ri -Ġfun eral -ĠI R -ÃĥÃĤÃĥÃĤÃĥÃĤÃĥÃĤ ÃĥÃĤÃĥÃĤÃĥÃĤÃĥÃĤ -ĠC ob -it ably -Ġt urb -Ġcomb o -Re view -Ġdeploy ment -u ity -ĠB ott -Ġinv isible -Ġrender ing -Ġunl ocked -Ġa qu -ĠVlad imir -Ġp ad -ĠBr ain -ĠLeg acy -dr agon -ĠKurd ish -Ġsound ed -Ġdet ained -ĠD M -g ary -Ġd aughters -Ġdistur bing -uk a -ĠPar ad -Ġt ast -Ġunf ortunate -Ġu l -em in -Ġattend ance -tr l -Ġpar ks -ĠMem orial -ĠAl ice -oth y -gu ard -ĠD ise -ĠSh an -ĠFor um -R ich -Ġshif ted -ue z -Ġl ighter -ĠMag n -Ġc od -S ch -ham mad -P ub -3 50 -ĠP okemon -Ġprot otype -Ġun re -B ase -ĠStud ents -ĠRep ly -ĠCommun ist -Ġg au -ĠTy ler -I Z -Ġparticip ated -Ġsup rem -ĠDet ails -Ġvessel s -ro d -Ġt ribe -ke ep -Ġassum ptions -Ġp ound -Ġcr ude -ĠAv ailable -Ġswim ming -Ġin clusion -Ġadv ances -c ulation -Ġconserv ation -Ġover d -ĠBuff alo -Art icle -ed ge -Ġaw a -ĠMad ison -Ġsid ew -Ġcat ast -ĠK rist -uc le -ĠHigh way -ĠTer ror -Ġactiv ation -Ġuncons cious -ĠSat an -ĠSus an -ill ery -Ġarr anged -i op -Ġrum ors -ur ring -th ink -ĠKe ith -ĠK ind -Ġavoid ing -by n -n ut -ĠSpe aker -r us -n ames -Ġgu ilt -ĠOlymp ics -Ġsa il -ĠM es -lev ant -ĠColumb us -a ft -C ity -S outh -ĠHar vey -ĠP un -S everal -Ġment ally -Ġimp ress -m ount -ĠUb untu -âĢĶâĢĶâĢĶâĢĶ âĢĶâĢĶâĢĶâĢĶ -ĠSuper man -ĠMP s -Ġintent ions -ĠR acing -Ġlike lihood -Ġ2 40 -T otal -Ġto ys -ĠW atson -Ġur ge -L ear -ĠP aper -Ġoccur ring -ĠB eng -ĠC ert -Ġst ones -T im -ĠTw in -z b -ĠD ynam -Ġpolit ician -k ens -ĠEnter prise -UT ERS -Ġab ol -Ġref resh -Ġarbit rary -pe ction -Ġtrou bles -Ġ} ); -t v -Ġpil ots -Ġdist ribute -Ġaud it -Ġp ause -orig inal -Ġr ivals - £ -F ig -T L -ab il -ry ing -L in -ion ed -l on -Ġf ancy -Ġcr ashed -Ġt ract -Ġshe d -Ġcons ume -B ased -down load -in it -Ġvolt age -Int rodu -Ġcondem ned -ĠFin ance -res pect -Ġex cluded -Ġestablish ing -her ic -Ġher itage -Ġspect acular -Ġun st -ĠSnow den -ĠL ane -S an -Ġprotect ions -st ruction -inc inn -Ġmac ro -C ustom -ios ity -Ġes p -Ġfunction ing -Ġm ush -Ġp uzzle -Ġeth ical -M al -Ġgo verning -ĠF erguson -Ġrest ored -Ġst ressed -ĠCoun ter -ĠK as -cl ip -AN S -Ġse iz -U K -by ss -old own -ap i -Ġperman ently -oun ters -W est -Th rough -L ight -at oes -Ġne at -Ġc ord -ure r -Ġsevere ly -ĠA ven -Ġinter rog -Ġtri ple -G iven -N umber -Ġar ise -Ġs her -pl ant -Ġfl ower -ĠC ou -Ġat e -Ġnew er -b ul -Ġmean while -ĠL air -Ġadjust ment -ĠCop yright -Ġd ivers -i ological -Ġgam ers -o at -Ġhistor ically -Ġanal og -Ġlong time -Ġpres cription -ĠM ist -ĠHy per -ĠM aine -ĠDe ity -Ġmulti pl -ĠRe incarn -ĠH yd -ĠP ic -S il -r ants -ĠC ris -. ; -( { -epend ence -Ġrec y -ate ur -Ġqu ad -Ġgl ob -Ġcon ced -te am -Ġcapital ist -ĠL ot -Ġroy al -ĠCy ber -Ġblack s -met ic -ri v -ĠD anny -Ġsp o -ĠR O -Ġanim ated -rypt ed -ĠDep uty -Ġrend ered -F E -Ġstre ak -Ġcloud s -ĠDou g -~~~~ ~~~~ -Ġdisc our -ĠVe h -Ġpsych ology -ĠJ ourney -Ġcry stal -ĠFro st -Ġsuspic ion -Ġrel ate -or us -ĠC rypt -ĠN VIDIA -com ed -ut ing -incinn ati -Ġvulner ability -ost ic -Ġisol ation -Ġcool ing -ĠCoal ition -Ġ1 19 -F our -ĠDe al -Ġâ ī -se mble -ram ent -ĠBar celona -Ġ10 2 -Ġcoc aine -ocaly pse -F eb -ogen ic -Ġmut ation -Ġcrypt oc -ĠK el -ĠG it -a is -Ġs isters -AN K -Ġactiv ate -T er -Ġd read -yl on -Ġprop ri -A ust -ĠDef ault -Ġout door -Ġshe er -ce ive -Ġg ently -Ð ¾ -Pro gram -Ġâ ĨĴ -Ġve gan -ĠCr us -Ġrespons ibilities -ĠH R -OL D -Ġprev ents -Ġst iff -ĠW ere -Ġathlet ic -ĠSc ore -Ġ) : -Ġcolumn s -ĠL oc -av ailable -ĠF ram -ĠS essions -Ġcompan ion -Ġpack s -14 0 -ĠKn ights -Ġf art -Ġstream s -Ġsh ore -Ġapp eals -ĠPer formance -h aul -ĠSt ra -ĠN ag -10 3 -ĠTrans portation -B B -E v -z an -P ublic -Ġtw in -uls ion -M ult -Ġelect ro -Ġstat ue -ation ally -ĠN ort -Ġins pection -/ * -ig ue -Ġcomp assion -ĠT ales -ĠSte in -ĠSc reen -ĠB ug -ĠL ion -g irl -Ġwithdraw al -Ġobject ives -Ġblood y -Ġprelim inary -Ġj acket -Ġdim ensions -ĠC ool -ĠOcc up -Ġw reck -Ġdoub led -ank ing -Ġ19 75 -Ġglass es -ĠW ang -pro v -P ath -connect ed -ĠMult i -ĠNor way -agon ist -Ġfe ared -Ġtouch ing -Ġarg uably -¯¯¯¯ ¯¯¯¯ -ĠNC AA -che m -Ġsp at -ĠW WE -ĠC el -ig ger -Ġattack er -ĠJo in -ob ject -ett a -Ġelim inated -d et -Ġdest ruct -ĠLuc as -ct uary -18 0 -ĠBr ady -ĠBl ues -B ay -au kee -Ġtim eline -Ġdeleg ates -w ritten -uff icient -Ġsh apes -Cop yright -ou ble -serv ice -Ġp ione -Ġcolleg es -Ġrow s -Ġsp ite -Ġassess ed -3 60 -Ġle ase -Ġconfident ial -ck er -ĠMan ning -ĠV oice -Ġse aled -Ġcalcul ate -N O -ĠAss istant -Ġteen ager -ul ent -ather ine -Ġm ock -Ġd iamond -Ġf est -Ġsw itched -Ġres ume -ĠPu erto -Ġl anes -ir ation -ĠSimilar ly -Ġro d -ĠS el -ĠPal ace -ĠLim ited -e ous -Ġvar iant -Ġw ard -Ġ) ) -Sh ow -OO K -A lex -ĠN ep -br is -ĠWik ipedia -Ġexcept ional -Ġman ages -ĠD raw -Ag ain -Ġco pper -ut t -Ġex ports -Ġport folio -Ġelev ated -R ated -ĠOther wise -ĠT act -ĠShe l -ĠT X -" âĢĶ -Ġres ur -ĠW a -ven ant -Ġmon etary -pe ople -E mail -Ġfif ty -ĠS weet -ĠMalays ia -Ġconf using -ĠR io -ud a -uten ant -" ); -Ġpra ised -Ġvol umes -t urn -Ġm ature -Ġnon profit -Ġpassion ate -ĠPriv ate -Ġ10 3 -Ġdesc end -ç ¥ŀ -uff y -head ed -Whe ther -ri en -ze ch -be it -Ġch rom -ĠMc M -Ġd ancing -Ġe leg -ĠNot iced -11 5 -Ġadvoc acy -ENT S -amb ling -ĠMin or -ĠF inn -Ġprior ities -Ġthere of -ĠSt age -ĠRog ers -Ġsubst itute -ĠJ ar -ĠJeff erson -Ġlight ly -10 2 -ĠL isa -u its -ys ical -Ġshif ts -Ġd rones -Ġwork place -Ġres id -ens ed -ah n -Ġpref erences -ser ver -Ġdeb ates -d oc -ĠGod s -Ġhelicop ter -Ġhon our -Ġconsider ably -ed ed -ĠF emale -ĠAn ne -Ġre un -ĠF ace -ĠHall ow -ĠBud get -Ġcondem n -Ġt ender -Pro f -ocr atic -ĠTurn er -ĠAg ric -Ġ19 76 -Ġa pt -d isc -ĠF ighter -ĠA ur -Ġgar bage -in put -ĠK arl -ĠOl iver -ĠL anguage -k n -N on -ĠCl ar -Ġtrad itions -Ġad vertisement -ĠS or -Ġarch ive -Ġvill ages -7 50 -Ġimplement ing -w aukee -Ġdiet ary -Ġswitch ing -Rep ublic -Ġvel ocity -Ġc it -ĠA wards -Ġfin ancing -Ġlast ed -) ] -Ġrem inder -P erson -Ġprec ision -Ġdesign ers -ĠF ried -ĠB order -Ġtr agic -Ġw ield -Ġiniti atives -ĠT ank -w er -Ġjo ins -R o -in ery -Ġar row -Ġgener ating -found er -Ġsear ches -Ġrandom ly -A ccess -Ġb atch -Ġp osed -l at -Ġpursu ing -as a -Ġtest ified -form ing -ĠSh ar -w iki -ĠE ither -S ometimes -Ġsen ators -ĠJohn ny -ĠTal iban -ĠG PS -":" / -ãģ® å -Ġanaly zed -ĠRub io -ĠMove ment -op ard -ii i -St and -f ight -Ġign oring -i ang -ĠG N -so ever -ĠST AT -Ġref using -Ġswe at -Ġb ay -P ORT -ir med -ak y -Ġdis pro -Ġlabel ed -Ġ10 8 -H ello -Ġple asant -ab a -Ġtri umph -Ġab oard -Ġinc om -ĠC row -le tt -Ġfol k -Ġch ase -` ` -ĠBr us -Ġte ens -c ue -Ġter rain -h yd -il ight -OR Y -Su pport -ew s -ll i -rain ts -ĠC and -Ġab used -ach ment -l arg -B as -ĠC ancer -Ġ19 78 -Ġsupp orter -ac cess -ĠTer min -ĠT ampa -ĠAN Y -Ġnew est -ĠCrim inal -ed u -Ġ19 30 -Ġadm its -Ġend e -Ġfail ures -ur ate -ful ness -cy cl -ĠSub ject -Ġinf inite -th ree -W A -p it -ĠInst all -R ad -ili ation -G M -Ġcontin ent -Ġaccommod ate -ĠCl ay -Ġp up -ĠF unction -Ġham mer -ĠAlbert a -Ġrev ised -Ġminor ities -Ġmeasure ment -Con nell -Ġdis able -ĠM ix -In cre -Ġfor k -ĠR osen -Ġimpl ies -umb lr -AN G -Ġprote ins -Ġagg ression -Ġfacilit ate -S N -Ġilleg ally -u er -Ġacad em -Ġp uzz -ĠSh ift -p ay -oll o -Ġaud iences -B uild -Ġno ble -Ġsynt ax -â ĺħ -Ġbe am -ĠB ed -ĠA ld -Ġorig ins -v ideo -Ġ19 77 -ĠAss ault -Ġgar age -Te am -Ġver dict -Ġd war -ĠVirt ual -e vent -Ke ep -Ġsent iment -Ġwild life -sh irt -Ġb urg -Ġrecommend ation -rep resent -Ġgall ery -own ers -Ġsch olar -Ġconven ience -ĠSw ift -Ġconv inc -C ap -Ġwar fare -ĠVis ual -Ġconst itute -Ġab ort -ĠWe ather -ĠLook ing -ĠH em -Ġmart ial -Ġinc oming -et ition -Ġtoler ance -ĠCre ated -Ġfl ows -ĠE lder -Ġsoul s -Ġf oul -ĠP ain -ĠC AN -Ġ2 20 -b c -he nd -Ġgen ius -R eal -ĠW r -omet er -p ad -Ġlim iting -ĠS i -ĠL ore -ĠAd ventures -Ġvar ied -D isc -f in -ĠPerson al -Ch ris -Ġinv ented -Ġd ive -ĠR ise -Ġo z -ĠCom ics -Ġexp ose -ĠRe b -let ters -s ite -im ated -Ġh acking -Ġeduc ated -ĠNob ody -Ġdep ri -Ġincent ive -ãĤ · -Ġovers ight -Ġtrib es -ĠBelg ium -Ġlicens ing -our t -Produ ct -ah l -ĠG em -Ġspecial ist -Ġc ra -ann ers -ĠCor byn -Ġ19 73 -RE AD -Ġsum mar -Ġover look -ĠApp lication -Ġin appropriate -Ġdownload ed -Q ue -ĠB ears -Ġth umb -ĠChar acter -ĠReincarn ated -ĠS id -Ġdemonstr ates -s ky -ĠBloom berg -ĠAr ray -ĠRes ults -ĠFour th -ĠED T -ĠO scar -c end -Ġ10 6 -ĠN ULL -ĠH ERE -m atch -ĠBr un -Ġgluc ose -ie g -eg u -Ġcert ified -Ġrel ie -Ġhuman itarian -Ġpr ayers -K ing -Ġn an -h ou -10 8 -ul u -Ġrenew able -Ġdistingu ish -Ġd ense -ĠV ent -ĠPack age -ĠB oss -Ġedit ors -Ġm igr -T ra -ĠPet ers -ĠAr ctic -200 4 -ĠC ape -Ġloc ally -Ġlast ing -Ġhand y -. ). -P an -ĠR ES -Ind ex -Ġt ensions -Ġformer ly -Ġide ological -Ġsens ors -Ġdeal ers -Ġdef ines -S k -Ġproceed s -Ġpro xy -az ines -ĠB ash -ĠP ad -ĠC raft -eal ous -Ġshe ets -omet ry -J une -cl ock -T T -ĠThe atre -ĠB uzz -Ġch apters -Ġmill enn -Ġd ough -ĠCongress ional -Ġimag ined -av ior -Ġclin ic -Ġ19 45 -Ġhold er -ro ot -oles ter -Ġrest art -B N -ĠHam as -ĠJ ob -Ġor b -Ġr am -Ġdiscl ose -Ġtransl ate -Ġimm igrant -Ġannoy ing -Ġtreat y -an ium -ĠTe a -ĠLeg ion -Ġcrowd s -ĠB ec -ĠA er -oh yd -B ro -Look ing -Ġl bs -Ġagg ress -Ġse am -Ġinter cept -ĠM I -mer cial -act iv -ĠC it -Ġdim ension -Ġconsist ency -Ġr ushing -ĠDou glas -Ġtr im -Inst all -ick er -Ġsh y -10 6 -Ġment ions -pe lled -ĠT ak -c ost -Ġclass room -Ġfort une -dri ven -Ġun le -ĠWhe el -Ġinvest or -ĠM asters -k it -Ġassoci ations -ĠEv olution -op ing -us cript -Ġprov incial -ĠWal ter -av i -S O -Ġun limited -Eng lish -ĠC ards -ĠEb ola -ne red -Ġreven ge -Ġout right -um per -Ġf itting -ĠSol id -Ġform ally -Ġproblem atic -Ġhaz ard -Ġenc ryption -Ġstraight forward -ĠA K -Ġp se -ĠOr b -ĠCh amber -ĠM ak -Cont ents -Ġloyal ty -Ġl yrics -ĠSy m -Ġwel comed -Ġcook ed -Ġmon op -Ġn urse -Ġmis leading -Ġe ternal -Ġshif ting -Ġ+ = -V is -Ġinst itutional -ill ary -Ġp ant -VER T -ĠA CC -ĠEn h -Ġinc on -ĠRE UTERS -Ġdon ated -âĢ¦âĢ¦ âĢ¦âĢ¦ -In tern -Ġexhib it -Ġt ire -ĠR ic -ĠCh ampion -ĠMu hammad -N ING -ĠSoc cer -Ġmob ility -Ġvary ing -ĠM ovie -Ġl ord -o ak -F ield -Ġve ctor -us ions -Ġsc rap -Ġen abling -m ake -T or -. * -| | -ĠWe bsite -ĠN PC -Ġsocial ist -ĠBill y -ĠAdd itional -Ġc argo -Ġfar ms -ĠSo on -ĠPri ze -Ġmid night -Ġ9 00 -se en -ĠSp ot -Ġshe ep -Ġspons ored -ĠH i -ĠJ ump -Ġ19 67 -Micro soft -ĠAg ent -Ġch arts -d ir -Ġadj acent -Ġtr icks -Ġman ga -Ġex agger -/ > -foot ball -ĠF CC -G C -ĠT ier -and ra -OU ND -% ), -Ġfru its -V C -ĠA A -R ober -Ġmid st -â Ĺ -ank a -Ġlegisl ature -ĠNe il -Ġtour ists -" " -ĠWar ning -ĠNever theless -ĠOffic ial -ĠWh atever -Ġm old -Ġdraft ed -Ġsubst ances -Ġbre ed -Ġt ags -ĠT ask -Ġver b -Ġmanufact ured -com ments -ĠPol ish -Pro v -Ġdetermin es -Ob ama -k ers -Ġutter ly -Ġse ct -sc he -ĠG ates -ĠCh ap -Ġal uminum -Ġz ombie -ĠT ouch -ĠU P -Ġsatisf y -Ġpred omin -asc ript -Ġelabor ate -Ġ19 68 -Ġmeas uring -ĠV ari -any ahu -Ġs ir -ul ates -id ges -ick ets -ĠSp encer -T M -oub ted -Ġpre y -Ġinstall ing -ĠC ab -re ed -re ated -Su pp -Ġwr ist -ĠK erry -10 7 -ĠK le -ĠR achel -Ġc otton -ĠA RE -ĠE le -Cont rol -Ġload s -ĠD od -an as -b one -Ġclass ical -ĠReg ional -ĠInt eg -V M -Ġdes ires -Ġaut ism -support ed -ĠM essage -Ġcomp act -writ er -Ġ10 9 -ĠHur ricane -c ision -Ġcy cles -Ġdr ill -Ġcolle ague -Ġm aker -G erman -Ġmist aken -S un -ĠG ay -Ġwhat soever -Ġsell s -ĠA irl -l iv -ĠO ption -Ġsol ved -Ġse ctors -Ġhorizont al -Ġequ ation -ĠSk ill -ĠB io -g ement -ĠSn ap -ĠLeg al -Ġtradem ark -Ġmake up -Ġassemb led -Ġsa ves -ĠHallow een -ĠVer mont -ĠFR OM -Ġfar ming -ĠP odcast -accept able -ĠHig her -Ġas leep -ull ivan -Ġrefere n -ĠLe v -Ġbul lets -ok o -H C -Ġst airs -Ġmain tains -ĠL ower -ĠV i -Ġmar ine -Ġac res -Ġcoordin ator -ĠJ oh -Ġcounterpart s -ĠBrother s -Ġind ict -b ra -Ġch unk -Ġc ents -H ome -ĠMon th -Ġaccording ly -if les -ĠGerm ans -ĠSy n -H ub -Ġey eb -âĶĢâĶĢ âĶĢâĶĢ -Ġr anges -ĠHoll and -ĠRob ot -f c -M ike -Ġpl asma -Ġsw ap -Ġath lete -ĠR ams -,' " -Ġinfect ions -Ġcor rid -Ġv ib -Ġpat ches -Ġtradition ally -Ġrevel ation -Ġswe ep -Ġgl ance -Ġin ex -200 3 -ĠR aw -work ing -os ures -ĠD at -ĠLyn ch -Ġle verage -ĠRe id -Ġcorrel ation -ian ces -av ascript -Ġrep ository -ret ty -Ġ19 72 -24 0 -Ġo un -p ol -ĠRe ed -Ġtact ical -is ite -App le -ĠQu inn -Ġrap ed -ill o -Euro pe -Ġalgorith ms -ĠRod rig -i u -Ġill um -Ġf ame -Ġintrodu cing -Ġdel ays -ĠRaid ers -Ġwh istle -Ġnovel s -ĠRe ally -Ġder iv -Ġpublic ations -ĠNe ither -ĠCom merce -Ġa ston -l anguage -Not es -ĠR oth -ĠF ear -Ġm ate -Ġpar ade -ĠQ B -Ġman eu -ĠC incinnati -m itting -Ġwa ist -ĠR ew -Ġdisc ont -Ð ° -Ġst aring -Ġal ias -Ġsec urities -Ġtoile t -ĠJ edi -Ġun law -v ised -//// //// -] ( -ĠWe iss -Ġpre st -ĠComp an -Ġmem o -ĠGr ace -J uly -ĠEl ite -cent er -ĠSt ay -Ġgal axy -Ġto oth -ĠS ettings -Ġsubject ed -ãĤ ¦ -Ġline back -Ġretail ers -ĠW ant -Ġd angers -A ir -Ġvolunt ary -ew ay -Ġinterpret ed -ot ine -à § -Ġp el -Serv ice -ĠEvent ually -Ġcare ers -Ġthreat en -Ġmem or -ĠBrad ley -anc ies -s n -ĠUn known -N ational -Ġsh adows -ail and -ĠD ash -Every one -izz ard -M arch -= ( -Ġpull s -Ġstr anger -Ġback wards -ĠBern ard -imens ional -Ġch ron -Ġtheoret ical -k top -Ġw are -ĠInvest ig -ĠIn iti -ĠOper ations -o ven -oc ide -* / -Ġfl ames -ĠC ash -sh it -Ġc ab -ĠAn aly -ĠSe ah -Ġdefin ing -Ġorder ing -Ġimm un -Ġpers istent -AC H -Russ ian -m ans -Ġh ind -Ġphot ography - © -Ġh ug -Ġ10 7 -ĠH ence -i ots -ude au -Ġsubsid ies -Ġroutine ly -ĠDev ice -it ic -Ġdisg ust -land er -Ġ19 40 -Ġassign ment -ĠB esides -w ick -ĠD ust -us c -struct ed -11 1 -de velop -Ġf ond -Ġinter section -Ġdign ity -Ġcommission er -With out -re ach -Ġcart oon -Ġsc ales -ãĥ Ń -F IG -Ġsurve ys -ĠIndones ia -Ġart work -Ġun ch -Ġcy cling -un ct -au er -or ate -ĠOb viously -Ġcharacter ized -fe ld -Ġaff irm -Ġinn ings -Ġ é -Ġal iens -Ġcl oth -et ooth -ĠC ertain - § -Ġdig est -k now -ĠX L -Ġpredict ions -Ġd in -W AR -Ġafter math -Ex ample -ĠSu ccess -ĠTh r -IG N -Ġmin er -B us -Ġcl arity -heim er -ĠO UT -ĠS end -ĠCirc le -ĠD iet -Ġpron ounced -Ġcreat ors -Ġearthqu ake -atter y -ge ons -Ġo d -Ġlay ing -or p -U lt -pro ject -Ġunder min -Ġsequ el -S am -ĠDark ness -Ġre ception -b ull -Y S -ĠV ir -Ġsequ ences -ĠCo in -Ġout fit -ĠW ait -1 19 -Ġdel ivers -.... .. -Ġbl own -ĠE sc -ĠM ath -per m -ĠU l -Ġgl im -Ġfac ial -Ġgreen house -Ġto kens -/ - -ĠAnn ual -ĠON E -Ġteen age -ĠPhys ical -ĠL ang -ĠC elt -Ġsu ed -ivid ually -Ġpat ience -ch air -reg ular -Ġa ug -in v -ex cept -ĠL il -Ġn est -f d -s um -ĠCh ase -Russ ia -ĠJenn ifer -Ġoff season -Over all -F ore -Ġr iot -A ud -form er -Ġdefend ers -ĠC T -iot ic -rib ly -Ġautom ated -Ġpen is -Ġins ist -Ġdi agram -ĠS QL -ĠG arc -Ġw itch -cl ient -ier ra -am bers -Ġrec ount -f ar -V ery -oster one -Ġappreci ated -ĠPer fect -S ection -Ġd oses -oca ust -Ġcost ly -Ġg rams -ĠSh i -Ġwrest ling -Ġ19 71 -Ġtro phy -Ġn erve -ĠK az -ĠExper ience -Ġpled ged -Ġplay back -Ġcreat ivity -by e -Ġattack ers -Ġhold ers -ĠCo ach -ĠPh D -Ġtransf ers -Ġcol ored -ĠH indu -Ġd rown -Ġlist ened -ĠW A -ias m -P O -Ġappeal ing -Ġdiscl osed -ĠCh icken -ag ging -Ġple aded -Ġnav igation -ĠReturn s -Ġ[ [ -R OR -E A -Ġphotograp her -ĠR ider -ipp ers -Ġsl ice -Ġe rect -Ġhe d -iss ance -ĠVik ings -ur ious -Ġapp et -oubted ly -Ch ild -Ġauthent ic -o os -ĠM aking -Ġannoun cing -Ġb od -Ġmet er -ĠN ine -ĠR ogue -Ġwork force -Ġrenew ed -Ġorganis ations -ac s -P LE -Sh ort -Ġcomp ounds -ĠVis it -Ġen velop -ear th -Ġsupport ive -gg le -ĠBrus sels -ĠGu ild -Cre ate -RE L -Ġaver aged -Ġ19 69 -ri ages -Ġlength y -Ġforg ot -O kay -ĠE rd -Ġdeal er -Ġrec ession -D D -Ġdesper ately -Ġhun ger -Ġst icks -Ġm ph -ĠF aith -Ġintention ally -Ġdem ol -ue ller -ĠS ale -Ġde bris -s pring -Ġle ap ->> >> -Ġcontain ers -se lling -rane an -atter ing -Ġcomment ed -ĠC M -on ut -Ġwood s -es pecially -Ġorgan ize -iv ic -ĠWood s -ang a -s qu -Ġm aj -am on -Ġax is -Ġ19 74 -ĠDen mark -Ġwar rior -ĠP and -Ġout lined -ĠB O -ins ula -z illa -eb ook -Ġd are -Ġsear ched -Ġnav igate -S n -writ ing -Ġun ited -J apan -ĠHe brew -Ġfl ame -Ġrel ies -Ġcatch ing -ĠSh o -Ġimprison ment -Ġp ockets -Ġclos ure -ĠF am -t im -ade qu -Act ivity -Ġrecru iting -ĠW ATCH -ĠArgent ina -d est -Ġapolog ize -or o -Ġlack s -Ġtun ed -ĠGriff in -Ġinf amous -Ġcelebr ity -ss on -Ġ ---------------------------------------------------------------- -ĠIs is -ĠDis play -Ġcred ibility -Ġeconom ies -Ġhead line -ĠCow boys -Ġind ef -Ġl ately -Ġincent ives -but ton -ĠM ob -A ut -Ġres igned -ĠO m -c amp -Ġprof iles -Ġsche mes -olph ins -ay ed -Cl inton -en h -ĠY ahoo -Ġab st -Ġan k -su its -Ġw ished -ĠMar co -udd en -Ġsp here -ĠB ishop -Ġincorpor ated -ĠPl ant -11 4 -Ġh ated -p ic -Ġdon ate -Ġl ined -Ġbe ans -Ġsteal ing -Ġcost ume -Ġsher iff -Ġfor ty -Ġint act -Ġadapt ed -Ġtrave lling -b art -Ġnice ly -Ġdri ed -Ġsc al -os ity -NOT E -ĠB h -ĠBron cos -ĠI gn -Ġint imate -Ġchem istry -Ġopt imal -D eb -ĠGener ation -Ġ] , -ich i -ĠW ii -ĠYOU R -vent ions -W rite -Ġpop ul -un ning -ĠW or -V ol -Ġqu een -head s -K K -Ġanaly ze -op ic -ear chers -Ġd ot -leg raph -ast ically -Ġupgr ades -Ġca res -Ġext ending -Ġfree ze -Ġin ability -Ġorg ans -Ġpret end -Ġout let -11 3 -ol an -ĠM all -ul ing -t alk -Ġexpress ing -ĠAl ways -ĠBe gin -f iles -Ġlic enses -% % -ĠM itt -Ġfil ters -ĠMil waukee -G N -Ġunf old -M o -Ġnut rition -pp o -B o -Ġfound ing -Ġunder mine -Ġeas iest -ĠC zech -ĠM ack -Ġsexual ity -ĠN ixon -W in -ĠAr n -ĠK in -ãĤ £ -ic er -Ġfort un -Ġsurf aces -agh d -Ġcar riers -ĠP ART -ĠT ib -Ġinter val -Ġfrust rating -ĠSh ip -ĠAr med -ff e -Ġbo ats -ĠAb raham -in is -Ġsu ited -th read -i ov -ab ul -ĠVenezuel a -Ġto m -su per -Ġcast le -alth ough -iox ide -ec hes -Ġevolution ary -Ġnegoti ate -Ġconfront ed -Rem ember -Ġ17 0 -S uch -Ġ9 11 -m ult -ĠA byss -ur ry -ke es -spe c -ĠBarb ara -Ġbelong ing -Ġvill ain -ist ani -Ġaccount able -Ġport ions -ĠDe cl -U r -ĠK ate -g re -Ġmag azines -UC K -Ġregul ate -om on -ĠAl most -Ġover view -Ġsc ram -Ġl oot -ĠF itz -Ġcharacter istic -ĠSn ake -s ay -ĠR ico -Ġtra it -ĠJo ined -au cus -Ġadapt ation -ĠAirl ines -Ġarch ae -ĠI de -Ġb ikes -Ġliter ary -Ġinflu ences -ĠUs ed -C reat -Ġple a -ĠDef ence -ĠAss ass -Ġp ond -UL T -) " -Ġeval uated -Ġob taining -Ġdem ographic -Ġvig il -ale y -Ġsp ouse -ĠSeah awks -resp ons -ĠB elt -um atic -Ġr ises -run ner -ĠMichel le -Ġpot ent -r ace -ĠP AC -F ind -olester ol -IS S -ĠIntrodu ced -ress es -ign ment -O s -ĠT u -ĠDe x -ic ides -Ġspark ed -ĠLaur a -ĠBry ant -Ġsm iling -ĠNex us -Ġdefend ants -ĠCat al -Ġdis hes -sh aped -Ġpro long -m t -( $ -ãĢ Ĥ -Ġcalcul ations -ĠS ame -Ġp iv -H H -Ġcance lled -Ġgr in -Ġterrit ories -ist ically -C ome -ĠP arent -Pro ject -Ġneg lig -ĠPriv acy -Ġam mo -LE CT -olute ly -ĠEp ic -Ġmis under -w al -Apr il -m os -path y -ĠC arson -Ġalbum s -ĠE asy -Ġpist ol -< < -Ġ\ ( -t arget -hel p -Ġinter pre -cons cious -ĠH ousing -ĠJ oint -12 7 -Ġbe ers -s cience -ĠFire fox -effect ive -ĠC abin -ĠO kay -ĠApp lic -Ġspace craft -ĠS R -ve t -ĠStr ange -S B -Ġcor ps -iber al -e fficient -Ġpreval ence -Ġeconom ists -11 8 -Th read -ord able -OD E -ĠC ant -=- =- -if iable -ĠA round -Ġpo le -Ġwilling ness -CL A -ĠK id -Ġcomple ment -Ġsc attered -Ġin mates -Ġble eding -e very -Ġque ue -ĠTr ain -Ġh ij -Ġme lee -ple ted -Ġdig it -Ġg em -offic ial -Ġlif ting -Ð µ -Re qu -it utes -Ġpack aging -ĠWork ers -h ran -ĠLeban on -ol esc -Ġpun ished -ĠJ uan -Ġj am -ĠD ocument -Ġm apping -ic ates -Ġinev itably -Ġvan illa -ĠT on -Ġwat ches -Ġle agues -Ġiniti ated -deg ree -port ion -Ġrec alls -Ġru in -Ġm elt -I AN -Ġhe m -Ex p -Ġb aking -ĠCol omb -at ible -Ġrad ius -pl ug -ĠI F -et ically -Ġf ict -H ER -ĠT ap -atin um -Ġin k -Ġco h -ĠW izard -b oth -te x -Ġsp ends -ĠCurrent ly -ĠP it -Ġneur ons -ig nt -Ġr all -Ġbus es -b uilding -Ġadjust ments -Ġc ried -ibl ical -att ed -ĠZ ion -ĠM atter -Ġmed itation -ĠD ennis -Ġour s -ĠT ab -Ġrank ings -ort al -Ġad vers -Ġsur render -ĠG ob -ci um -om as -im eter -Ġmulti player -Ġhero in -Ġoptim istic -Ġindic ator -ĠBr ig -Ġgro cery -Ġapplic ant -ĠRock et -v id -Ex ception -p ent -Ġorgan izing -Ġenc ounters -ĠT OD -Ġjew el -S ave -ĠChrist ie -Ġhe ating -Ġl azy -ĠC P -Ġcous in -Con fig -Ġreg ener -Ġne arest -Ġachie ving -EN S -th row -ĠRich mond -ant le -200 2 -Ġan ten -b ird -13 3 -Ġn arc -r aint -un ny -ĠHispan ic -ourn aments -Ġprop he -ĠTh ailand -ĠT i -Ġinject ion -Ġinher it -rav is -Ġmed i -Ġwho ever -ĠDE BUG -G P -ĠH ud -C ard -p rom -Ġp or -Ġover head -L aw -Ġviol ate -Ġhe ated -Ġdescript ions -Ġachieve ments -ĠBe er -ĠQu ant -W as -Ġe ighth -ĠI v -Ġspecial ized -U PDATE -ĠD elta -P op -J ul -ĠAs k -oph y -Ġnews letters -ĠT ool -Ġg ard -ĠConf eder -ĠGM T -ĠAb bott -Ġimm unity -ĠV M -Is lam -Ġimpl icit -w d -Ġ19 44 -rav ity -omet ric -Ġsurv iving -ur ai -ĠPr ison -Ġr ust -ĠSk etch -Ġbe es -ĠThe ory -Ġmer it -T ex -ch at -Ġm im -Ġpast e -ĠK och -Ġignor ance -ĠSh oot -Ġbas ement -Un ited -ĠAd vis -he ight -Ġf oster -Ġdet ain -in formation -Ġne ural -' ; -Ġprov es -all ery -Ġinv itation -um bers -Ġc attle -Ġbicy cle -z i -Ġconsult ant -Ġap ology -ĠT iger -Ġ12 3 -99 9 -Ġind ividually -r t -ig ion -ĠBrazil ian -Ġdist urb -Ġentreprene urs -Ġfore sts -cer pt -pl ates -p her -clip se -Ġtw itter -Ġac ids -ograph ical -h um -ĠB ald -if ully -Ġcomp iler -ĠD A -Ġdon or -as i -Ġtrib al -l ash -ĠCon fig -Ġapplic ants -Ġsal aries -13 5 -Put in -ĠF ocus -ir s -Ġmisc onduct -ĠH az -Ġeat en -M obile -Mus lim -ĠMar cus -v iol -Ġfavor able -Ġst ub -ad in -ĠH ob -Ġfaith ful -Ġelectron ics -Ġvac uum -w ait -back ed -econom ic -d ist -Ġten ure -Ġsince re -ĠT ogether -ĠW ave -Ġprog ression -Ġden ying -Ġdist ress -br aska -th ird -Ġmix ing -Ġcolon ial -Ġpriv ately -Ġun rest -atern ity -Ġprem ises -ant i -greg ation -Ġlic ence -ĠH ind -ĠSam uel -Ġconvinc ing -ĠA ce -ĠR ust -ĠNet anyahu -Ġhand les -ĠP atch -orient ed -ah o -ĠG onz -Ġhack ers -claim er -Ġcustom s -ĠGr an -f ighters -Ġl uc -Ġman uscript -aren thood -Ġdev il -Ġwar riors -Ġoff enders -Will iam -Ġhol idays -Ġnight mare -Ġle ver -iff erent -St at -Ġexhib ition -put ed -ĠP ure -Ġal pha -Ġenthus iasm -ĠRepresent atives -E AR -ĠT yp -Ġwhe at -ĠAl f -Ġcor rection -Ġev angel -AT T -M iss -Ġs oup -Ġimpl ied -par am -Ġsex y -ĠL ux -Ġrep ublic -p atch -ab lish -Ġic ons -Ġfather s -ĠG ET -ĠCar ib -Ġregul ated -ĠCo hen -ĠBob by -Ġn er -Ġb ent -vent ory -ĠAl ong -ĠE ST -ĠWall ace -Ġmurd ers -r ise -ke ll -ĠCommon wealth -Ġn asty -et a -ĠM IT -Ġadminist ered -Ġgenuine ly -Ed itor -n ick -Ġhyd ro -**************** **************** -ĠB le -Ġfin es -Ġg orge -aus ible -r h -Ġapp le -ment ioned -Ġro pe -ot yp -H R -Ġdisappoint ing -Ġc age -n ik -Ġdoub ts -ĠF REE -print s -ĠM UST -Ġvend ors -ĠIn qu -Ġliber als -Ġcontract or -Ġup side -child ren -Ġtrick y -Ġregul ators -charg ed -l iter -Ġ *** -Ġreb ell -l ang -Ġloc als -Ġphys icians -Ġhe y -ar se -t m -ĠLe x -Ġbehavior al -success ful -F X -Ġbr ick -ov ic -Ġcon form -Ġreview ing -Ġins ights -Ġbi ology -ĠRem ove -ĠExt ra -Ġcomm itting -indu ced -ignt y -ig m -Ġat omic -Comm on -ĠE M -ĠP ere -ĠIt ems -e h -Ġpres erved -ĠH ood -Ġprison er -Ġbankrupt cy -Ġg ren -us hes -Ġexplo itation -Ġsign atures -Ġfin an -] ," -ĠM R -Ġme g -rem lin -Ġmusic ians -Ġselect ing -Ġexam ining -IN K -l ated -H i -Ġart ic -Ġp ets -Ġimp air -ĠM AN -Ġtable ts -in clude -R ange -Ġca ut -Ġlog s -Ġmount ing -Ġun aware -Ġdynam ics -ĠPalest ine -ĠQu arter -ĠPur ple -Ġm a -ĠIm port -Ġcollect ions -ci ation -Ġsuccess or -Ġcl one -Ġaim ing -Ġposs essed -Ġstick ing -Ġsh aking -Ġloc ate -ĠH ockey -T urn -17 0 -Ġfif teen -ĠHar rison -Ġcontinu ously -ĠT C -ĠVal ent -ĠRes cue -Ġby pass -am ount -Ġm ast -Ġprotect s -Ġart istic -Ġsomet ime -Ġsh oe -Ġshout ed -ific ant -et itive -ĠReg ister -ĠJ in -Ġconcent rated -ling ton -on ies -Ġgener ator -yr im -ĠAr men -Ġclear ing -id o -ĠT W -al ph -Ġlad ies -H ard -Ġdial og -Ġinput s -æ ľ -Ġpos es -Ġsl ots -ĠPrem ium -Ġle aks -Ġboss es -Ġ11 3 -c ourse -A cc -ĠNew ton -ĠAust ria -ĠM age -Ġte aches -ab ad -Ġwe ars -Ġc yl -Ġcur se -ĠS ales -ĠW ings -Ġp sy -Ġg aps -ĠIce land -ĠP interest -Ġland lord -Ġdefin itions -ĠK er -Ġsufficient ly -ĠP ence -ĠArch itect -Ġsur pass -Ġ11 4 -Ġsuper hero -ĠDise ase -Ġpri ests -ĠC ulture -Ġdefin itive -Ġsecret ly -ĠD ance -inst all -ch ief -ĠJess ica -W ould -Up dated -Ġlock er -ĠK ay -Ġmem orial -è ¦ -f at -Ġdis gu -Ġflav ors -ĠBase ball -ĠRes istance -Ġk icks -Ġen v -Ġteen agers -D ark -ĠC AR -Ġh alt -ĠL G -ĠGab riel -Ġfe ver -Ġs atur -Ġm all -Ġaffili ate -ĠS leep -ĠSpe cific -ĠV el -Ġj ar -ĠSac red -ĠEd wards -ĠA CL -Ġret ained -ĠG iant -Ġlim itation -in ces -Ġref usal -ĠT ale -ĠBut ler -Ġacc idents -ĠC SS -Ġimport ed -ĠCop y -Î ± -ER T -z el -Ġdiv isions -h ots -ĠAl b -ĠD S -Load er -W ashington -at isf -ĠCreat ive -\ . -ĠAut om -red ict -Ġrecept or -ĠCarl os -Met hod -ok a -Ġmal icious -Ġste pping -, [ -ĠD ad -Ġatt raction -ĠEffect s -ĠPir ate -ĠC er -ĠIndust ry -ĠR ud -Ġchar ter -Ġd ining -Ġins ists -Ġconfig ure -Ġ( # -ĠSim ple -ĠSc roll -UT C -17 5 -ĠK on -Ġmarket place -Ġ ãĤ -Ġref res -Ġg ates -er red -ĠP od -Ġbeh ave -Fr ank -n ode -Ġendors ed -he tt -as ive -ĠHom eland -Ġr ides -ĠLe ave -er ness -Ġflood ing -A FP -Ġris en -Ġcontin ually -Ġun anim -ĠCont ract -ĠP as -Ġgu ided -ĠCh ile -b d -Ġsu cc -pt ic -Ġcomm ittees -ĠL uther -ĠAny one -Ġs ab -12 4 -Ġp ixel -ĠB ak -ĠT ag -ĠBenn ett -En ter -sm all -ĠPresident ial -Ġp ul -Ġcontr ace -arch ive -Ġcoast al -ĠK ids -19 2 -âĢ ² -ick y -ING TON -Ġw olf -ĠSt alin -T ur -id get -am as -ĠUn less -Ġspons or -Ġmor ph -ĠCho ose -Ġrun ner -Ġun bel -Ġm ud -ĠMan a -Ġdub bed -Ġg odd -ure rs -wind ow -Ġrel ied -Ġcelebr ating -os c -Ġ13 5 -Ġlobb ying -Ġincom plete -Ġrestrict ion -Ġinc ap -it us -Ġexpect ation -ĠAp ollo -Ġint ens -Ġsyn c -G H -Ġmanip ulation -B Y -Ġspe ar -Ġbre asts -Ġvol can -il ia -M aterial -Ġform ats -ĠB ast -Ġparliament ary -Ġsn ake -Ġserv ants -ĠTr udeau -ĠGr im -ĠArab ic -ĠSC P -ĠBoy s -st ation -Ġprospect ive -ord e -in itialized -Ġb ored -AB LE -Ġaccess ed -Ġtax i -ĠShe ll -aid en -urs ed -in ates -ĠIns urance -ĠPet e -Sept ember -6 50 -Ġad ventures -ĠCo ver -Ġt ribute -Ġsk etch -Ġem power -Ġ Ø -ĠGl enn -ĠD aw -= \" -ĠPolit ics -Ġgu ides -Ġd ioxide -ĠG ore -ĠBr ight -ĠS ierra -Ġval ued -c ond -Ġpo inter -Se lect -Ġrisk y -Ġabsor b -im ages -Ġref uses -Ġbon uses -__ _ -Ġh ilar -ĠF eatures -2 20 -ĠCollect or -F oot -Ġ19 64 -cul us -Ġd awn -Ġwork out -ĠL O -Ġphilosoph ical -ĠSand y -ĠYou th -Ġl iable -A f -bl ue -Ġovert urn -less ness -ĠTrib une -ĠIn g -Ġfact ories -Ġcat ches -Ġpr one -Ġmat rix -Ġlog in -Ġin acc -Ġex ert -s ys -Ġneed le -ĠQ ur -Ġnot ified -ould er -t x -Ġremind s -Ġpublisher s -Ġn ort -Ġg it -Ġfl ies -ĠEm ily -Ġflow ing -ĠAl ien -ĠStr ateg -Ġhard est -Ġmod ification -AP I -ĠM Y -Ġcr ashes -st airs -n umber -Ġur ging -ch annel -ĠFal con -Ġinhabit ants -Ġterr ifying -Ġutil ize -Ġban ner -Ġcig arettes -Ġsens es -ĠHol mes -Ġpract ition -ĠPhill ips -ott o -Ġcomp ile -Mod el -ĠK o -Ġ[ ] -Americ ans -ĠTer ms -Ġmed ications -ĠAn a -Ġfundament ally -ĠNot ice -Ġwe aker -Ġ 0000 -Ġgar lic -Ġout break -Ġeconom ist -ĠB irth -Ġobst acles -ar cer -ĠOr thodox -Ġplace bo -ĠC rew -asp berry -ĠAng els -Ġdis charge -Ġdestruct ive -11 7 -ĠR ising -Ġd airy -l ate -Ġcoll ision -ĠTig ers -ean or -ocument ed -ĠIn valid -Ġd ont -ĠL iter -ĠV a -Ġhyd rogen -Ġvari ants -ĠBrown s -Ġ19 65 -Ġind igenous -Ġtrad es -Ġremain der -Ġswe pt -ĠImp act -Ġred ist -Ġun int -grad uate -ãĥ ķ -ĠW ILL -ãģ® ç -ĠCrit ical -Ġf isher -Ġv icious -Ġrevers ed -Y ear -ĠS ox -Ġshoot ings -Ġfil ming -Ġtouchdown s -ai res -m el -Ġgrand father -Ġaffect ion -ing le -Ġover ly -Add itional -Ġsup reme -ĠGr ad -Ġsport ing -Ġmer cy -ĠBrook s -ount y -Ġperform s -Ġtight ly -Ġdem ons -Ġkill ings -Ġfact ion -ĠNov a -aut s -Ġund oubtedly -ar in -Ġunder way -ra k -Ġl iv -ĠReg ion -Ġbrief ing -s ers -cl oud -ĠM ik -us p -Ġpred iction -az or -Ġport able -ĠG and -Ġpresent ing -Ġ10 80 - » -ush i -ĠSp ark -there um -Ġjust ification -ĠN y -Ġcontract ors -ming ham -ĠSt yle -å ħ -ĠChron icles -ĠPict ure -Ġprov ing -Ġw ives -set t -Ġmole cules -ĠFair y -Ġconsist ing -Ġp ier -al one -in ition -Ġn ucle -j son -Ġg otta -Ġmob il -Ġver bal -ar ium -Ġmon ument -uck ed -Ġ25 6 -T ech -mine craft -ĠTr ack -Ġt ile -Ġcompat ibility -as is -Ġs add -Ġinstruct ed -ĠM ueller -Ġle thal -Ġhorm one -Ġor che -el se -Ġske let -Ġentert aining -Ġminim ize -ag ain -Ġunder go -Ġconst raints -Ġcig arette -ĠIslam ist -Ġtravel s -ĠPant hers -l ings -C are -Ġlaw suits -ur as -Ġcry st -Ġlow ered -Ġaer ial -Ġcomb inations -Ġha un -Ġch a -Ġv ine -Ġquant ities -Ġlink ing -b ank -Ġso y -B ill -ĠAngel a -Ġrecip ient -ĠProt est -Ġs ocket -Ġsolid arity -Ġâ Ĩ -m ill -Ġvar ies -ĠPak istani -Dr agon -Ġun e -Ġhor izon -³³³³ ³³³³ -Ġprov inces -Ġfrank ly -Ġenact ed -not es -[ ' -Ġ19 2 -ocr acy -Ġendorse ment -Ġover time -Tr ue -L ab -lic ted -ĠD NC -Ġbe ats -ĠJam ie -15 2 -ĠIN T -Cont act -Ġaccount ed -h ash -ĠPack ers -p ires -Ġles bian -Ġamend ments -Ġhop eful -ĠFin land -Ġspot light -Ġconfig ured -Ġtrou bled -Ġg aze -ĠCal gary -Ġrel iability -Ġins urg -sw er -b uy -ĠSk in -Ġp ixels -Ġhand gun -Ġpar as -Ġcateg or -ĠE L -ĠRe x -Ind eed -Ġkind a -Ġconj unction -ĠBry an -ĠMan ufact -y ang -Pl us -S QL -ish ment -Ġdom inate -Ġn ail -Ġo ath -Ġeru pt -ĠF ine -it bart -ĠCh ip -ĠAb d -ĠN am -Ġbuy er -Ġdiss ent -Le aks -Cont in -Ġr ider -ĠSome one -Ġill usion -c in -ĠBoe ing -Ġin adequ -ov ation -i ants -Ġreb uild -4 50 -ĠDest iny -S W -ĠT ill -H it -ia z -ĠBang l -acher s -ĠRe form -Ġse gments -Ġsystem atic -d c -ĠConserv atives -Ġport al -h or -ĠDragon bound -Ġdrag ged -om o -Ġthe e -ad vert -ĠRep orts -ĠE t -Ġbarrel s -Aug ust -Ġcompar isons -Ġhe x -Ġan throp -" [ -bor ough -ab i -Ġpict ured -play ing -ĠAdd ress -ĠMir ror -Sm ith -Ġt ires -ĠN PR -AA AA -Ġclass ification -ĠTh an -ĠH arm -ĠR A -Ġreject ion -min ation -Ġr anged -ĠF alls -D I -H ost -ãĤ ´ -ĠEx ample -list ed -th irds -Ġsaf egu -br and -Ġprob able -Can ada -IT ION -ĠQ aeda -Ġch ick -Ġimport s -h it -l oc -W W -Ġble w -Ġany time -Ġwh oles -ik ed -Ġcal culation -cre ate -ĠO ri -Ġupgr aded -Ġapp ar -ut ory -ĠM ol -B rit -ĠJ ong -IN AL -ĠStart ing -Ġd ice -urt le -Ġre lying -cl osure -Ġprof itable -Ġsl aughter -ĠMan ual -c aster -Ġ" $ -Ġfe ather -ĠSim ply -ie ves -Ġdeter ior -ĠPC I -Ġst amp -Ġfl aws -Ġsh ade -ham mer -Ġpass port -Ġcont ing -am el -Ġobser vers -Ġneg lect -ĠR B -ĠBrother hood -Ġskept ical -f amily -us k -Ġemotion ally -â Ļ -ĠBet a -ason able -id ity -ĠM ul -Ġkick ing -ĠC arm -oll ah -VERT IS -ĠAt hen -Ġlad der -ĠBul let -å £ -00 01 -ĠWild life -ĠM ask -ĠN an -R ev -Ġun acceptable -leg al -Ġcrowd ed -ag i -ĠC ox -j e -Ġmor ality -Ġfu els -Ġc ables -Ġman kind -ĠCarib bean -Ġanch or -Ġby te -ĠO ften -ĠO z -Ġcraft ed -Ġhistor ian -ĠW u -Ġtow ers -ĠCitiz ens -Ġhel m -Ġcred entials -Ġsing ular -ĠJes se -Ġtack les -Ġcont empt -Ġa fore -ĠSh adows -Ġn il -Ġur gent -app le -bl ood -Ġv on -Ġoff line -Ġbreat he -Ġj umps -Ġirre levant -ox ic -om al -import ant -J im -Ġgl oves -arm ing -dep th -Ġtal ents -ook ie -ĠS B -Ġpal m -uff s -est a -IG H -Ġcan on -ĠVer izon -ĠP le -Ġcou pled -vel t -Ġfundra ising -ĠGet ting -ĠD LC -Ġmathemat ical -ĠH S -ĠCard inals -te lling -Ġspons ors -Ġ Ï -ĠBull s -op tion -Ġprop ose -Ġmem orable -Ġembr aced -Ġdecl ining -He alth -ed a -Ġ} ; -Ġsp am -m ile -Ġpit cher -ĠE ight -Ġcar ing -ut ic -ro le -Ġair line -ernand ez -ĠAth let -Ġcert ification -ux e -rig er -Ġem pir -Ġsens ation -Ġdis m -Ġb olt -Ġev olve -H ouse -Ġconsult ation -ĠD uty -Ġtou ches -ĠN athan -Ġf aint -h ad -" ( -ĠCons umer -ĠExt reme -Ġ12 7 -ĠHer m -ĠSac rament -iz oph -Ġanx ious -ul ously -Ġsoc ially -ĠU TC -Ġsol ving -ĠLet ter -Hist ory -ed uc -Pr ice -) ); -Ġrel oad -am ic -Ġp ork -Ġdisc ourse -Ġt ournaments -ai ro -ĠK ur -ĠCost a -Ġviol ating -Ġinterf ere -Ġrecre ational -uff le -Ġspe eches -Ġneed ing -Ġremem bers -Ġcred ited -n ia -f ocused -amer a -Ġb ru -um bs -ĠCub an -Ġpreced ing -Ġnons ense -ac ial -Ġsmart phones -ĠSt ories -S ports -ĠEmer gency -oun cing -ef ined -Ġb er -Ġconsult ing -Ġm asters -he astern -." [ -ĠRun ning -Ġsus cept -ĠF eng -Americ a -pr ises -st itial -ĠWeek ly -ĠGreat er -mod ules -if ter -G raphics -ul er -Ġwho lly -Ġsupp ress -Ġconce aled -Ġhapp ily -Ġaccept s -ĠEn joy -Ġr ivers -ĠEx cept -2 25 -ĠN HS -ĠMc Connell -Ġp ussy -fer red -ut able -Ġatt ain -Ġ> = -Ġdepos its -roph ic -Ġnot orious -ĠSh aw -il itation -Ġepid emic -all ic -Ġsmall est -ov ich -Ġaccess ories -per ties -Ġsur plus -ĠMe ch -Ġamb ig -ĠImm igration -Ġch im -ev al -Ġpract icing -ĠMyster y -Ġdom ains -ĠSil icon -app s -Ġkilomet ers -e a -ĠSm ash -Ġwarrant y -Ġn ost -s il -re v -J on -ĠDub lin -Ġtast es -Ġb out -g reat -er ror -Ġsw itches -ĠB apt -D O -ok i -Ġsour ced -pro du -Ġattach ment -ĠIss ue -ĠQuest ion -Jo in -Ġf itted -Ġunlaw ful -^ ^ -ere k -Ġauthent ication -Ġst ole -Ġaccount ability -l abel -S earch -Ġal beit -atic an -fund ed -ĠAdd ing -ĠI Q -Ġsub mar -l it -a que -ĠLear ning -Ġint eger -M aster -ĠCh rom -Ġprem ier -O p -ĠLi u -Ġbl essed -ĠGl obe -ĠResp onse -Ġlegit im -ĠMer kel -Ġdispos al - ´ -Ġgau ge -pe at -Ġindu ced -Ġquestion able -arth y -ĠV it -ĠF eed -U ntil -U t -worth y -R Y -ĠH erald -ĠHam mer -Ġmed al -ĠR ivers -ĠH ack -Ġclar ify -Ġtrack ed -Ġautonom ous -Ġten ant -ĠQ atar -er ie -Ġgr im -ĠMon itor -Ġresist ant -ĠSpe c -ĠWell s -N AS -14 8 -Ġmin ers -iot ics -Ġmiss es -11 6 -g ian -g it -ĠE yes -p res -Ġgrad uated -Ġang el -Ġsyn chron -Ġefficient ly -Ġtrans mitted -H arry -Ġglob ally -EN CE -ĠMont ana -r aged -ĠPre vention -Ġp iss -ĠL l -Ġshe lf -ĠB JP -ĠTest ament -ĠL ate -ik er -ĠH app -ĠJul ian -h all -Ġsp ont -Ġshut down -Ġincons istent -Ġsubscrib ers -Ġske leton -ĠNe braska -Ġins pire -ĠV oid -F eed -Ġang les -ĠSpr ings -Ġbench mark -Ġvacc ines -izoph ren -se xual -uff ed -Ġsh ine -ĠK ath -Ġgest ure -ine a -Ġr ip -Ġopp ression -Ġcons cience -b t -ĠL um -Ġinc idence -ĠF a -w r -Ġmin eral -ĠSp urs -alk y -Ġth under -Ġop io -Be ing -ĠPal m -Ġwas ted -Ġl b -i aries -ĠIniti ative -Ġcur ric -Ġmark er -ĠMc L -Ġext ensions -ĠP v -ĠAr ms -Ġoffer ings -Ġdef enses -Ġvend or -Ġcontrad ict -ĠCol in -Ġredd it -Ġper ipher -12 2 -Ġs ins -E dit -IC T -So ft -ĠSh ah -Ġadministr ator -ĠT rip -Ġporn ography -Ġtu ition -in ence -ĠPro gress -Ġcat alog -Ġsu ite -Ġh ike -Ġreprodu ctive -eng ine -Ġd rought -ĠNo ah -Ġ2 30 -Ġd ude -Ġrelax ed -Ġpart ition -Ġparticip ant -Ġtel esc -Ġfe as -ĠF F -own er -Ġswe eping -Ġl enses -Ġmatch up -ĠRe pl -ourn als -Ġcred ible -Ġgrand mother -Ġther mal -Ġsubscrib ing -Ġident ities -col m -U CT -Ġreluct ant -us ers -ĠC ort -Ġassist ed -OS S -ATION S -IS H -Ġpharm aceutical -ic able -ad ian -ĠSon ic -ĠF ury -ĠM ong -A H -ĠPsych ology -Ġph osph -Ġtreat s -Ń Ķ -Ġstead ily -ĠHell o -Ġrel ates -Ġcl ue -Ex pl -a uth -Ġrev ision -Ġe ld -os ion -Ġbr on -14 4 -ri kes -Ġmin es -Ġblank et -ĠF ail -el ed -ĠIm agine -ĠPl anned -a ic -Re quest -M ad -ĠHor se -ĠEag le -Ġcap ac -15 7 -Ġl ing -ĠN ice -ĠP arenthood -min ster -og s -ens itive -Not hing -Ġcar n -F in -ĠP E -Ġr ifles -ĠL P -S and -Ġgui Active -Ġtour ist -C NN -Ġunve iled -Ġpredec essor -} { -u ber -Ġoff shore -Ġopt ical -ĠR ot -ĠPear l -et on -Ġst ared -Ġfart her -at ility -cont in -ĠG y -ĠF oster -ĠC oc -ri ents -Ġdesign ing -ĠEconom y -ON G -W omen -ĠN ancy -er ver -Ġmas cul -Ġcasual ties -Ġ2 25 -ĠS ullivan -ĠCh oice -Ġa ster -w s -Ġhot els -Ġconsider ations -Ġcou ch -ĠSt rip -ĠG n -Ġmanip ulate -l ied -Ġsynt hetic -Ġassault ed -Ġoff enses -ĠDra ke -Ġim pe -Oct ober -ĠHer itage -h l -ĠBl air -Un like -Ġg rief -Ġ4 50 -Ġopt ed -Ġresign ation -il o -Ġver se -ĠT omb -Ġu pt -Ġa ired -ĠH ook -ĠML B -Ġassum es -out ed -ĠV ers -Ġinfer ior -Ġbund le -ĠD NS -ograp her -Ġmult ip -ĠSoul s -Ġillust rated -Ġtact ic -Ġdress ing -Ġdu o -Con f -Ġrel ent -Ġc ant -Ġscar ce -Ġcand y -ĠC F -Ġaffili ated -Ġspr int -yl an -ĠGarc ia -Ġj unk -Pr int -ex ec -C rit -Ġport rait -ir ies -ĠOF F -Ġdisp utes -W R -L ove -ãģ Ħ -ĠRe yn -Ġh ipp -op ath -Ġflo ors -ĠFe el -Ġwor ries -Ġsett lements -ĠP os -Ġmos que -Ġfin als -Ġcr ushed -ĠPro bably -ĠB ot -ĠM ans -ĠPer iod -Ġsovere ignty -Ġsell er -Ġap ost -Ġam ateur -Ġd orm -Ġconsum ing -Ġarm our -ĠRo ose -Ġint ensive -Ġelim inating -ĠSun ni -ĠAle ppo -j in -Ġadv ise -p al -ĠH alo -Ġdes cent -Ġsimpl er -Ġbo oth -ST R -L ater -ĠC ave -== = -Ġm ol -Ġf ist -Ġshot gun -su pp -Ġrob bery -E ffect -Ġobsc ure -ĠProf essional -Ġemb assy -Ġmilit ant -Ġinc arcer -Ġgener ates -Ġlaun ches -Ġadministr ators -Ġsh aft -Ġcirc ular -Ġfresh man -ĠW es -ĠJo el -ĠD rew -ĠDun can -ĠApp arently -s ight -ĠIntern al -ĠInd ividual -ĠF E -Ġb ore -ĠM t -Ġbroad ly -ĠO ptions -ount ain -ip es -ĠV ideos -20 4 -Ġh ills -Ġsim ulation -Ġdisappoint ment -it an -ĠLabor atory -Ġup ward -Ġbound ary -Ġdark er -h art -Ġdomin ance -C ong -ĠOr acle -ĠL ords -Ġscholars hip -ĠVin cent -ed e -ĠR ah -Ġencour ages -ro v -Ġqu o -Ġprem ise -ĠCris is -ĠHol ocaust -Ġrhyth m -Ġmet ric -cl ub -Ġtransport ed -Ġn od -ĠP ist -Ġancest ors -ĠFred er -th umbnails -ĠC E -ON D -Ph il -ven ge -ĠProduct s -cast le -Ġqual ifying -ĠK aren -VERTIS EMENT -Ġmight y -Ġexplan ations -Ġfix ing -D i -Ġdecl aring -Ġanonym ity -Ġju ven -ĠN ord -ĠDo om -ĠAct ually -O k -ph is -ĠDes ert -Ġ11 6 -I K -ĠF M -Ġinc omes -V EL -ok ers -Ġpe cul -Ġlight weight -g ue -Ġacc ent -Ġincre ment -ĠCh an -Ġcompl aining -ĠB aghd -Ġmidfield er -Ġover haul -Pro cess -ĠH ollow -ĠTit ans -Sm all -man uel -ĠUn ity -ĠEv ents -S ty -Ġdispro portion -n esty -en es -ĠC od -Ġdemonstr ations -ĠCrim son -ĠO H -Ġen rolled -Ġc el -ĠBre tt -Ġa ide -Ġhe els -Ġbroad band -Ġmark ing -Ġw izard -ĠN J -ĠChief s -Ġingred ient -Ġd ug -ĠSh ut -urch ase -end or -Ġfar mer -ĠGold man -12 9 -15 5 -Or der -Ġl ion -i ably -Ġst ain -ar ray -ilit ary -ĠFA Q -Ġexpl oded -ĠMcC arthy -ĠT weet -ĠG reens -ek ing -l n -ens en -Ġmotor cycle -Ġpartic le -Ġch olesterol -B ron -Ġst air -Ġox id -Ġdes irable -ib les -Ġthe or -for cing -Ġpromot ional -ov o -b oot -ĠBon us -raw ling -Ġshort age -ĠP sy -Ġrecru ited -Ġinf ants -Ġtest osterone -Ġded uct -Ġdistinct ive -Ġfirm ware -bu ilt -14 5 -Ġexpl ored -Ġfact ions -Ġv ide -Ġtatt oo -Ġfinan cially -Ġfat igue -Ġproceed ing -const itutional -Ġmis er -Ġch airs -gg ing -ipp le -Ġd ent -Ġdis reg -ç Ķ -st ant -ll o -b ps -aken ing -Ġab normal -ĠE RA -å£ « -ĠH BO -ĠM AR -Ġcon cess -Ġserv ant -Ġas pir -l av -ĠPan el -am o -Ġprec ip -Ġrecord ings -Ġproceed ed -Ġcol ony -ĠT ang -ab lo -Ġstri pped -Le ft -to o -Ġpot atoes -Ġfin est -% ). -Ġc rap -ĠZ ach -ab ases -ĠG oth -Ġbillion aire -w olf -Ġsan ction -S K -Ġlog ged -P o -ey ed -un al -Ġcr icket -Ġarm ies -Ġunc overed -Cl oud -ó n -Ġreb ounds -Ġm es -O per -P ac -Ġnation ally -Ġinsert ed -p ict -Ġgovern ance -Ð ¸ -Ġprivile ges -G ET -Ġfavor ites -im ity -Ġlo ver -the m -em pl -Ġgorge ous -An n -Ġsl ipped -Ġve to -B ob -Ġsl im -u cc -ĠF ame -udden ly -Ġden ies -ĠM aur -Ġdist ances -Ġw anna -t ar -ĠS ER -Ġâ Ī -Ġle mon -at hetic -Ġlit eral -Ġdistingu ished -Ġansw ering -G I -Ġrelig ions -ĠPhil os -ĠL ay -Ġcomp os -ire ments -ĠK os -ine z -roll ing -Ġyoung est -and ise -ĠB orn -Ġalt ar -am ina -ĠB oot -v oc -Ġdig ging -Ġpress ures -Ġl en -26 4 -Ġassass ination -ĠBir mingham -ĠMy th -Ġsovere ign -ĠArt ist -ĠPhot ograph -Ġdep icted -Ġdisp ens -orth y -Ġamb ul -int eg -ĠC ele -ĠTib et -Ġhier archy -Ġc u -Ġpre season -ĠPet erson -Ġcol ours -Ġworry ing -Ġback ers -ĠPal mer -ĠÎ ¼ -Ġcontribut or -Ġhear ings -Ġur ine -Ġ Ù -ourge ois -Sim ilar -ĠZ immer -s omething -ĠUS C -Ġstrength s -ĠF I -Ġlog ging -As ked -ĠTh ai -in qu -ĠW alt -Ġcrew s -it ism -3 01 -Ġshar ply -um ed -Ġred irect -r ators -In f -ĠWe apons -Ġte asp -19 99 -L ive -ĠEs pecially -ĠS ter -ĠVeter ans -Ġint ro -other apy -Ġmal ware -Ġbre eding -Ġmole cular -ĠR oute -ĠCom ment -oc hem -Ġa in -Se ason -Ġlineback er -Ä « -ĠEconom ics -es ar -ĠL ives -ĠEm ma -Ġk in -ĠTer rit -Ġpl anted -ot on -ĠBut ter -ĠSp ons -P ER -Ġdun geon -Ġsymb olic -Ġfil med -Ġdi ets -Ġconclud es -Ġcertain ty -ĠForm at -Ġstr angers -form at -ĠPh ase -Ġcop ied -Ġmet res -ld a -ĠUs ers -Ġdeliber ate -Ġwas hed -ĠL ance -im ation -Ġimpro per -ĠGen esis -ick r -ĠK ush -Ġreal ise -Ġembarrass ing -alk ing -b ucks -Ġver ified -Ġout line -year s -ĠIn come -20 2 -Ġz ombies -F inal -ĠMill enn -Ġmod ifications -ĠV ision -ĠM oses -ver b -iter ranean -ĠJ et -Ġnav al -ĠA gg -Ġur l -Ġvict ories -Ġnon etheless -Ġinj ust -ĠF act -ç ļ -Ġins ufficient -re view -face book -Ġnegoti ating -Ġguarant ees -im en -uten berg -Ġg ambling -Ġcon gr -Load ing -Ġnever theless -Ġpres idents -ĠIndust rial -Ġ11 8 -Ġp oured -ĠT ory -Ġ17 5 -Ġ: = -Sc ott -ange red -T ok -Ġorgan izers -M at -ĠG rowth -Ġad ul -Ġens ures -Ġ11 7 -é¾į å -Ġmass acre -Ġgr ades -be fore -AD VERTISEMENT -ĠSl ow -ĠM MA -âĢĶ " -ĠV atican -Q aeda -Ġo we -66 66 -ĠS orry -ĠGr ass -Ġbackground s -Ġexha usted -Ġcl an -Ġcomprom ised -ĠE lf -ĠIsa ac -ens on -In vest -IF A -Ġinterrupt ed -ãĥī ãĥ© -Ġtw isted -ĠDrag ons -M ode -ĠK remlin -Ġfert il -he res -ph an -ĠN ode -f ed -ĠOr c -Ġunw illing -C ent -Ġprior it -Ġgrad uates -Ġsubject ive -Ġiss uing -ĠL t -Ġview er -Ġw oke -Th us -bro ok -Ġdep ressed -Ġbr acket -ĠG or -ĠFight ing -Ġstri ker -Rep ort -ĠPortug al -Ġne o -w ed -19 9 -Ġflee ing -sh adow -ident ified -US E -Ste am -Ġstret ched -Ġrevel ations -art ed -ĠD w -Ġalign ment -est on -ĠJ ared -S ep -Ġblog s -up date -g om -r isk -Ġcl ash -ĠH our -Ġrun time -Ġunw anted -Ġsc am -Ġr ack -Ġen light -on est -ĠF err -Ġconv ictions -Ġp iano -Ġcirc ulation -ĠW elcome -Ġback lash -ĠW ade -Ġrece ivers -ot ive -J eff -Ġnetwork ing -ĠPre p -ĠExpl orer -Ġlect ure -Ġupload ed -ĠMe at -B LE -ĠNaz is -ĠSy nd -st ud -ro ots -ri ans -Ġportray ed -Ġ ?? -ĠBudd ha -s un -Rober t -ĠCom plex -Ġover see -Ġste alth -T itle -ĠJ obs -ĠK um -Ġappreci ation -ĠM OD -Ġbas ics -Ġcl ips -Ġnurs ing -Ġpropos ition -Ġreal ised -ĠNY C -Ġall ocated -ri um -ar an -ĠPro duction -ĠV ote -Ġsm ugg -Ġhun ter -az er -ĠCh anges -Ġfl uct -y on -Ar ray -Ġk its -W ater -Ġuncom mon -Ġrest ing -ell s -w ould -Ġpurs ued -Ġassert ion -omet own -ĠMos ul -ĠPl atform -io let -Ġshare holders -Ġtra ils -P ay -ĠEn forcement -ty pes -ĠAn onymous -Ġsatisf ying -il ogy -Ġ( ' -w ave -c ity -Ste ve -Ġconfront ation -ĠE ld -C apt -ah an -ht m -ĠC trl -ON S -2 30 -if a -hold ing -Ġdelic ate -Ġj aw -ĠGo ing -or um -S al -Ġd ull -ĠB eth -Ġpr isons -Ġe go -ĠEl sa -avor ite -ĠG ang -ĠN uclear -Ġsp ider -ats u -Ġsam pling -Ġabsor bed -ĠPh arm -iet h -Ġbuck et -ĠRec omm -O F -ĠF actory -AN CE -Ġb acter -H as -ĠObs erv -12 1 -Ġprem iere -De velop -Ġcur rencies -C ast -Ġaccompany ing -ĠNash ville -Ġfat ty -ĠBre nd -Ġloc ks -Ġcent ered -ĠU T -augh s -or ie -ĠAff ordable -v ance -D L -em et -Ġthr one -ĠBlu etooth -Ġn aming -if ts -AD E -Ġcorrect ed -Ġprompt ly -ĠST R -Ġgen ome -Ġcop e -Ġval ley -Ġround ed -ĠK end -al ion -p ers -Ġtour ism -Ġst ark -v l -Ġblow ing -ĠSche dule -st d -Ġunh appy -Ġlit igation -ced es -Ġand roid -Ġinteg ral -ere rs -ud ed -t ax -Ġre iter -ĠMot ors -oci ated -Ġwond ers -ĠAp ost -uck ing -ĠRoose velt -f ram -Ġyield s -Ġconstit utes -aw k -Int erest -Ġinter im -Ġbreak through -ĠC her -Ġpro sec -ĠD j -ĠM T -Res p -ĠP T -Ġs perm -ed it -B T -Lin ux -count ry -le ague -Ġd ick -Ġo ct -Ġinsert ing -Ġsc ra -ĠBrew ing -Ġ19 66 -Ġrun ners -Ġpl un -id y -ĠD ian -Ġdys function -Ġex clusion -Ġdis gr -Ġincorpor ate -Ġrecon c -Ġnom inated -ĠAr cher -d raw -achel or -Ġwrit ings -Ġshall ow -Ġh ast -ĠB MW -ĠR S -Ġth igh -Ġ19 63 -Ġl amb -Ġfav ored -ag le -Ġcool er -ĠH ours -ĠG U -ĠOrig in -Ġglim pse ----------------- ---- -L im -Ġche ek -Ġj ealous -- ' -Ġhar ness -ĠPo ison -Ġdis abilities -ne apolis -Ġout look -Ġnot ify -ĠIndian apolis -Ġab rupt -ns ic -Ġenc rypted -Ġfor fe -reat h -Ġr abb -Ġfound ations -Ġcompl iment -ĠInter view -ĠS we -Ġad olesc -Ġmon itors -ĠSacrament o -Ġtime ly -Ġcontem pl -Ġposition ed -Ġpost ers -ph ies -iov ascular -v oid -ĠFif th -Ġinvestig ative -OU N -Ġinteg rate -ĠIN C -ish a -ibl ings -ĠRe quest -ĠRodrig uez -Ġsl ides -ĠD X -Ġfemin ism -Ġdat as -Ġb end -ir us -ĠNig eria -F ox -Ch ange -Ġair plane -ĠLad en -Ġpublic ity -ixt y -Ġcommit ments -Ġaggreg ate -Ġdisplay ing -ĠAr row -Ġ12 2 -Ġrespect s -and roid -s ix -ĠSh a -Ġrest oration -) \ -W S -oy s -Ġillust rate -with out -12 6 -ĠâĶ Ĥ -Ġpick up -n els -Ġ .... -f ood -ĠF en -) ? -Ġphenomen a -Ġcompan ions -ĠW rite -Ġsp ill -Ġbr idges -ĠUp dated -ĠF o -Ġinsect s -ASH INGTON -Ġsc are -il tr -ĠZh ang -Ġsever ity -Ġind ul -14 9 -ĠCo ffee -Ġnorm s -Ġp ulse -ĠF T -Ġhorr ific -ĠDest roy -ĠJ SON -Ġo live -Ġdiscuss es -R est -E lect -ĠW inn -ĠSurv iv -ĠH ait -S ure -op ed -Ġro oted -ĠS ke -ĠBron ze -Ġl ol -Def ault -Ġcommod ity -red ited -Ġliber tarian -Ġforb idden -Ġgr an -à ¨ -Ġl ag -en z -dri ve -Ġmathemat ics -Ġw ires -Ġcrit ically -Ġcarb ohyd -ĠChance llor -ĠEd die -Ġban ning -ĠF ri -Ġcompl ications -et ric -ĠBangl adesh -Ġband width -St op -ĠOrig inally -Ġhalf way -yn asty -sh ine -Ġt ales -rit ies -av ier -Ġspin ning -ĠWH O -Ġneighbour hood -b ach -Ġcommer ce -ĠS le -B U -Ġentreprene ur -Ġpecul iar -ĠCom ments -f re -3 20 -IC S -Ġimag ery -ĠCan on -ĠElect ronic -sh ort -( ( -D ig -Ġcomm em -u ced -Ġincl ined -ĠSum mon -Ġcl iff -ĠMed iterranean -Ġpo etry -Ġprosper ity -ĠRe ce -Ġp ills -m ember -Ġfin ale -un c -ĠG ig -ä ½ -Ġl od -Ġback ward -- + -ĠFor ward -Ġth ri -s ure -Ġso ap -ĠF X -R ES -ĠSe xual -oul os -Ġfool ish -Ġright eous -Ġco ff -terror ism -ust ain -ot er -Ġab uses -ne xt -Ġab usive -Ġthere after -Ġprohib ition -ĠS UP -Ġd ip -Ġr ipped -Ġinher ited -Ġb ats -st ru -G T -Ġflaw ed -ph abet -Ġf og -do ors -Ġim aging -Ġdig its -ĠHung ary -Ġar rog -Ġteach ings -Ġprotocol s -ĠB anks -à ¸ -p ound -ĠC urt -." ) -. / -Ġex emption -end ix -ĠM ull -Ġimpro ves -ĠG amer -d imensional -I con -ĠMarg aret -St atus -d ates -Ġint ends -Ġdep ict -Ġpark ed -J oe -ĠMar ines -chn ology -! ). -Ġjud ged -Ġwe ights -R ay -Ġapart ments -he ster -Ġrein force -Ġoff ender -occ up -Ġs ore -e pt -ĠPH P -ĠB row -Ġauthor ization -ĠR isk -ĠDel aware -ĠQ U -Ġnot ifications -Ġsun light -Ġex clude -d at -Ġm esh -ĠSud an -Ġbelong ed -Ġsub way -Ġno on -ĠInter ior -ol ics -ĠL akers -Ġc oding -Dis claimer -Cal if -O ld -Ġdis l -???? ? -Ġconfir ms -Ġrecruit ment -Ġhom icide -Cons ider -ĠJeff rey -ft y -} ; -Ġobject ion -do ing -ĠLe o -W ant -Ġgl ow -ĠClar ke -ĠNorm an -Ġver ification -Ġpack et -ĠForm ula -Ġpl ag -es ville -Ġshout ing -Ġo v -ĠR EC -ĠB ub -Ġn inth -Ġener g -Ġvalid ity -Ġup s -j ack -Ġneighbor ing -ĠN ec -ew orks -ĠH ab -are z -Ġsp ine -Ġevent ual -ĠLe aders -ĠC arn -Ġprob ation -Ġrom ance -ms g -ĠMechan ical -ER Y -R ock -Ġpart isan -N ode -ass ets -min ent -Ġforeign ers -Ġtest ify -ĠUs ually -l ords -ĠG ren -ĠPow ell -BI L -Ġs r -Ġadd ict -Ġshell s -Ġs igh -ĠY ale -tern ity -Ġ7 50 -E U -ĠR ifle -Ġpat ron -em a -ĠB annon -an ity -Ġtrop ical -ĠV II -c ross -Every thing -ĠIS O -Ġhum ble -ass ing -ĠF IG -Ġupd ating -ys on -Ġcal cium -Ġcompet ent -Ġste ering -Pro t -ĠS Y -ĠFin als -ĠR ug -15 9 -13 7 -ĠG olf -Ġ12 6 -Ġaccommod ation -ĠHug hes -Ġaest hetic -art isan -ĠTw ilight -Ġpr ince -ĠAgric ulture -ĠDis co -Ġpreced ent -Ġtyp ing -author ized -O ption -ĠA ub -l ishes -ach t -m ag -P eter -ĠU FO -mont on -ĠL ith -Ġa rom -Ġsec uring -Ġconf ined -priv ate -Ġsw ords -Ġmark ers -Ġmetab olic -se lect -ĠCur se -ĠO t -g ressive -Ġinc umb -ĠS aga -Ġpr iced -Ġclear ance -Cont ent -Ġdr illing -Ġnot ices -Ġb ourgeois -Ġv est -Ġcook ie -ĠGuard ians -ry s -in yl -Ġ12 4 -Ġpl ausible -on gh -ĠOd in -Ġconcept ion -ĠY uk -ĠBaghd ad -ĠFl ag -Aust ral -ĠI BM -Ġintern ationally -ĠWiki Leaks -I ED -Ġc yn -Ġcho oses -ĠP ill -Ġcomb ining -Ġrad i -ĠMoh ammed -def ense -atch ing -Sub ject -ic iency -Fr ame -Ġ{ " -Ġche ss -Ġtim er -19 0 -Ġt in -Ġord inance -emet ery -Ġacc using -Ġnotice able -Ġcent res -Ġl id -ĠM ills -img ur -Ġz oom -erg ic -Ġcomp ression -pr im -f ind -Ġsur g -Ġp and -ĠK ee -ĠCh ad -cell ence -oy le -Ġsocial ism -ĠT ravis -ĠM Hz -Ġgu ild -ALL Y -ĠSub scribe -ĠRel ated -Ġoccur rence -itch ing -Ġfict ional -Ġcr ush -ĠE A -c od -m ix -ĠTri ple -Ġretrie ve -Ġstimul us -Ġpsych iat -ĠDo or -Ġhomosexual ity -Ġelement ary -Ġcell ular -id ian -ĠL aun -Ġintrig uing -Ġfo am -ĠB ass -id i -its u -Ġass ure -Ġcongr at -Ġbusiness man -ĠBo ost -cl ose -Ġl ied -Ġsc iences -ĠO mega -ĠG raphics -Ġ< = -sp oken -Ġconnect ivity -S aturday -ĠAven gers -Ġto ggle -Ġank le -Ġnational ist -mod el -ĠP ool -ophob ia -V ar -ĠM ons -ator ies -Ġaggress ively -C lear -For ge -act ers -Ġhed ge -Ġpip es -Ġbl unt -Ġs q -Ġremote ly -W ed -as ers -Ġref riger -Ġt iles -Ġresc ued -Ġcompr ised -ins ky -Ġman if -avan augh -Ġprol ifer -Ġal igned -x ml -Ġtri v -Ġcoord ination -ĠP ER -ĠQu ote -13 4 -b f -ĠS aw -Ġtermin ation -Ġ19 0 -Ġadd itions -Ġtri o -Ġproject ions -Ġpositive ly -Ġin clusive -Ġmem br -19 90 -old er -Ġpract iced -ink le -Ar ch -Ġstar ters -ari us -Ġinter mediate -ĠBen ef -ĠK iller -Ġinter ventions -ĠK il -ĠF lying -In v -Ġprem ature -Ġpsych iatric -Ġind ie -Ġcoll ar -ĠRain bow -af i -Ġdis ruption -ĠFO X -cast ing -Ġmis dem -c ro -Ġw ipe -ard on -Ġb ast -ĠTom my -ĠRepresent ative -Ġbell y -ĠP O -ĠBre itbart -13 2 -Ġmess aging -Sh ould -Ref erences -ĠG RE -ist ical -L P -ĠC av -ĠC razy -Ġintu itive -ke eping -ĠM oss -Ġdiscont in -ĠMod ule -Ġun related -ĠPract ice -ĠTrans port -Ġstatist ically -orn s -Ġs ized -p u -Ġca f -ĠWorld s -ĠRod gers -ĠL un -ĠCom ic -l iving -Ġc ared -Ġclim bed -) { -Ġconsist ed -Ġmed ieval -fol k -Ġh acked -Ġd ire -ĠHerm ione -Ġt ended -ce ans -D aniel -w ent -Ġlegisl ators -Ġred es -g ames -Ġg n -am iliar -Ġ+ + -gg y -th reat -Ġmag net -Ġper ceive -Ġz ip -Ġindict ment -Ġcrit ique -g ard -ĠSaf e -ĠC ream -Ġad vent -ob a -Ġv owed -ous ands -Ġsk i -Ġabort ions -u art -Ġstun ned -Ġadv ancing -Ġlack ed -Ġ\ " -Ġsch izophren -Ġeleg ant -Ġconf erences -Ġcance led -ĠHud son -ĠHop efully -Ġtr ump -Ġfrequ encies -Ġmet eor -ĠJun ior -ĠFle et -ĠMal colm -ĠT ools -Ġ ........ -Ġh obby -ĠEurope ans -Ġ15 00 -ĠInt o -Ġs way -ĠApp ro -ĠCom pl -Comm unity -Ġt ide -ĠSum mit -ä » -Ġinter vals -ĠE ther -Ġhabit at -ĠSteven s -lish ing -ĠDom ain -Ġtrig gers -Ġch asing -Ġchar m -ĠFl ower -it ored -Ġbless ing -Ġtext ures -F ive -Ġliqu or -R P -F IN -Ġ19 62 -C AR -Un known -Ġres il -ĠL ily -Ġabund ance -Ġpredict able -r ar -Ġbull shit -le en -che t -M or -M uch -ä ¹ -Ġemphas ized -Ġcr ust -Ġprim itive -Ġenjoy able -ĠPict ures -Ġteam mate -pl er -ĠT ol -ĠK ane -Ġsummon ed -th y -ram a -ĠH onda -Ġreal izing -Ġquick er -Ġconcent rate -cle ar -Ġ2 10 -ĠErd ogan -ar is -Ġrespond s -ĠB I -Ġelig ibility -Ġpus hes -ĠId aho -Ġagg rav -Ġru ins -ur ations -Ġb ans -Ġan at -sh are -Ġgr ind -h in -um en -Ġut ilities -ĠYan kees -Ġdat abases -ĠD D -Ġdispl aced -Ġdepend encies -Ġstim ulation -h un -h ouses -ĠP retty -ĠRaven s -ĠTOD AY -Ġassoci ates -Ġthe rape -cl ed -Ġde er -Ġrep airs -rent ice -Ġrecept ors -Ġrem ed -ĠC e -Ġmar riages -Ġball ots -ĠSold ier -Ġhilar ious -op l -13 8 -Ġinherent ly -Ġignor ant -Ġb ounce -ĠE aster -REL ATED -ĠCur rency -E V -ãĥ ŀ -ĠLe ad -Ġdece ased -B rien -ĠMus k -J S -Ġmer ge -heart ed -c reat -m itt -m und -ĠâĢ ĭ -ĠB ag -Ġproject ion -Ġj ava -ĠStand ards -ĠLeon ard -Ġcoc onut -ĠPop ulation -Ġtra ject -Ġimp ly -Ġcur iosity -ĠD B -ĠF resh -ĠP or -Ġheav ier -ne ys -gom ery -Ġdes erved -Ġphr ases -ĠG C -Ġye ast -d esc -De ath -Ġreb oot -Ġmet adata -IC AL -Ġrep ay -ĠInd ependence -Ġsubur ban -ical s -Ġat op -Ġall ocation -gener ation -ĠG ram -Ġmoist ure -Ġp ine -ĠLiber als -Ġa ides -Ġund erest -ĠBer ry -Ġcere mon -3 70 -ast rous -ĠPir ates -Ġt ense -ĠIndust ries -ĠApp eals -ĠN ear -Ġè£ı ç -Ġlo vers -ĠC AP -ĠC raw -Ġg iants -Ġeffic acy -E lement -ĠBeh avior -ĠToy ota -Ġint est -P riv -A I -Ġmaneu ver -Ġperfect ion -Ġb ang -p aper -r ill -Ge orge -b order -in ters -ĠS eth -Ġcl ues -ĠLe vi -ĠRe venue -14 7 -Ġv apor -Ġfortun ate -Ġthreat ens -Ġve t -Ġdepend ency -ers ed -art icle -ĠBl izzard -Ġch lor -Ġmin us -ĠB ills -Ġcryptoc urrency -Ġmetabol ism -ter ing -Ġp estic -step s -ĠTre asure -ract ed -ĠConst ant -Ġtem p -13 9 -ĠDet ective -ur ally -Ġrecover ing -Ġcort ex -Ġ14 4 -cl osed -Ġprejud ice -aun ted -Ġstorm s -ĠN OW -Ġmach inery -Add ress -Ġcompe lled -27 0 -Ġdesp air -b ane -Ġveget able -Ġbed s -Lear n -Ġcolor ful -Ġsp ike -Ġmarg ins -Ġsymp athy -Ġworks hop -ĠC BC -S at -Ġburn s -ĠG ender -Ġ12 9 -ĠC able -Ġdeb ts -ĠThe resa -Ġreflect ing -Ġa irst -Ġr im -ram id -Ġweakness es -W rit -ogg le -t i -ĠCh arge -Ġwe ighed -Ġ( . -Ġl aughter -Ġrou ter -ĠDemocr acy -D ear -Ġhas ht -Ġd y -Ġhint s -run ning -Ġfin ishes -ar us -M ass -res ult -asc us -Ġv intage -Ġcon qu -Ġwild ly -ac ist -Ġl ingu -Ġprot agonist -st rom -te enth -ĠSol o -m ac -f illed -Ġre nown -it ives -Ġmot ive -ĠAnt ar -ĠM ann -ĠAd just -Ġrock ets -Ġtrou bling -e i -Ġorgan isms -ass is -Christ ian -Ġ14 5 -ĠH ass -Ġsw all -Ġw ax -ĠSurv ival -V S -ĠM urd -v d -stand ard -Ġdrag ons -Ġacceler ation -r ational -f inal -Ġp aired -ĠE thereum -Ġinterf aces -Ġres ent -Ġartif acts -Å « -are l -Ġcompet itor -ĠNich olas -ĠSur face -c pp -ĠT ot -Ġeconom ically -Ġorgan ised -Ġen forced -in ho -Ġvar ieties -Ġab dom -ĠBa iley -id av -ĠSal v -p aid -Ġalt itude -ess ert -ĠG utenberg -are a -op oulos -Ġprofess ors -igg s -ĠF ate -he y -Ġ3 000 -D ist -Ġtw ins -c ill -ĠM aps -Ġtra ps -Ġwe ed -ĠK iss -Ġy oga -Ġrecip ients -ĠWest minster -Ġpool s -ĠWal mart -18 8 -ĠSchool s -att ack -ĠAR M -par agraph -W arning -j l -Ġself ish -anche z -ĠHe ights -F re -ĠS oph -Ġ -------------------------------- -t ml -33 3 -Ġraid s -Ġsatell ites -KE Y -Ġlast s -Ñ Ĥ -In s -ĠD ame -Ġunp redict -// / -gh ai -Ġart illery -Ġcru ise -Ġg el -ĠCabin et -Ġbl ows -ĠE sp -Ġprox imity -ot he -ĠSk ills -ĠU pper -ob o -ĠN DP -Ġenjoy s -Ġrepe ating -ĠConst ruction -ĠQuest ions -H illary -Ġu int -Ġprocess ors -ĠGib son -ĠMult iple -q a -ĠB om -ĠM iles -vent ional -Ġhur ts -s kin -ĠA IDS -Ġadvis ers -ĠR oot -Ġmethod ology -ĠD ale -Ġdet on -ĠKnow ledge -sequ ently -Ġ12 1 -Ġconnect s -C y -ĠD anger -Ġcontribut ors -ĠB ent -Ġbr ass -ĠGun s -int o -ĠFort une -Ġbro ker -bal ance -Ġlength s -Ġv ic -Ġaver aging -Ġappropri ately -ĠCamer a -Ġsand wich -ĠCD C -Ġcoord inate -Ġnav ig -Ġgood ness -l aim -Ġbra ke -Ġextrem ist -ĠW ake -ĠM end -ĠT iny -ĠC OL -ĠR F -ĠD ual -ĠW ine -C ase -Ġref ined -Ġl amp -L ead -Ġb apt -ĠCar b -ĠS add -ĠMin neapolis -PD F -Ear ly -ĠH idden -I ts -ĠT IME -Ġp ap -Ġcommission ed -ĠF ew -ĠCol ts -ĠB ren -Ġbot hered -Ġlike wise -Ex per -ĠSch w -c ry -n n -ĠM itch -im on -M G -b m -UM P -r ays -Ġregist ry -Ġ2 70 -ach ine -re lla -ant ing -00 000 -Ġru ined -sp ot -Ġt a -Ġmaxim ize -Ġincon ven -D ead -H uman -En abled -ĠMar ie -Ġch ill -ĠParad ise -Ġstar ring -ĠLat ino -ĠProt ocol -ĠE VER -Ġsuppl iers -m essage -ĠBro ck -Ġser um -âĸĪâĸĪ âĸĪâĸĪ -Ġen comp -Ġamb ition -ues e -Ġar rows -And rew -Ġanten na -Ġ19 61 -ĠB ark -Ġb ool -ãĤ ª -ĠSt orage -Ġrail way -Ġtoug her -ĠC ad -Ġwas hing -P y -' ] -em bed -ĠMem phis -ack le -Ġfam ously -ĠF ortunately -ov ies -Ġmind set -Ġsne ak -ĠD h -RA W -ĠSim pson -Ġliv est -Ġland mark -Ġc ement -L ow -Ġthr illed -ĠCour se -in el -Ġch uck -id ate -gl obal -Ġwh it -Ġ � -ad ays -s ki -ĠS V -Ġvir uses -30 6 -ĠResp ons -Ġthe aters -ĠBr anch -ĠGene va -ĠM K -Ġunbel iev -Ġcommun ist -Orig inal -ĠRe ceived -ĠTrans fer -ĠAr g -In put -ĠStr ategy -Ġpal ace -the ning -D ri -Ġsent encing -umbn ail -Ġp ins -re cy -Ġs iblings -Get ting -ĠB U -ĠNorth west -Ġprolong ed -ĠSak ura -C omb -ĠB our -Ġinadequ ate -ĠK ash -Ġus ername -ĠImpro ve -Ġbatt ling -ĠM AC -Ġcurric ulum -Ġs oda -ĠC annon -Ġsens ible -sp ons -De cember -Ġw icked -ĠP engu -Ġdict ators -ĠHe arts -og yn -Ġsimilar ities -ĠSt ats -Ġh ollow -it ations -": [ -Ġh over -ĠList en -s ch -S und -Ġc ad -ĠPar ks -Ġl ur -Ġhy pe -ĠL em -N AME -is ure -Fr iday -Ġshoot s -Ġclos es -Ġd b -ĠR idge -ĠDiff erent -Ġrepl ies -ĠBroad way -op ers -Ġint oler -ĠZe us -akes pe -Ġpropri etary -Ġrequest ing -Ġcontro llers -ĠM IN -im edia -be cca -Ġexp ans -Ġoil s -B ot -ĠCh and -Ġpr inter -Ġto pped -ĠP OL -ĠEar lier -S ocial -av in -Ġdecre ases -ĠSe b -Ġspecific ations -ĠBl ast -ĠK urt -Ġfre el -B rown -Ġdil ig -ro e -ĠPro blem -ĠQu ad -Ġdecent ral -ĠV ector -an ut -Ġplug ins -ĠGreg ory -Ġfuck ed -el ines -ĠAmb assador -t ake -Ġcle ans -ong yang -An onymous -st ro -" } -al ine -ĠO dd -ĠE ug -2 16 -Ġbo il -ĠP owers -Ġnurs es -Ob viously -ĠTechn ical -Ġexceed ed -OR S -Ġextrem ists -Ġtr aces -ex pl -Ġcom r -ĠS ach -) / -Ġm asks -Ġsc i -B on -Ġreg ression -we gian -Ġadvis or -it ures -ĠV o -ex ample -ĠInst ruct -Ġs iege -Ġredu ctions -pt r -Ġstat utory -Ġrem oves -Ġp uck -red its -Ġbe e -Ġsal ad -Ġpromot ions -ĠJosh ua -with standing -ET H -ĠCh a -im us -Ġexpend iture -aun ting -Ġdelight ed -Ġ15 5 -be h -Ġcar pet -ĠSp art -Ġj ungle -l ists -Ġbull ying -ĠNob el -ĠGl en -Ġreferen ced -Ġintrodu ces -se in -Ġcho pped -gl ass -ĠW rest -Ġneutral ity -Ġâ Ļ -Ġinvestig ator -Ġshel ves -Ġun constitutional -Ġreprodu ction -Ġmer chant -m ia -Ġmet rics -Ġexplos ives -ĠSon ia -Ġbod ily -Ġthick ness -Ġpredomin antly -ĠAb ility -Ġmon itored -IC H -Ġ] . -ĠMart inez -Ġvis ibility -Ġqu eries -Ġgen ocide -ĠWar fare -Qu ery -Ġstud ios -Ġemb ry -Ġcorrid or -Ġclean ed -com plete -ĠM H -Ġenroll ment -ING S -Ġimpact ed -Ġdis astrous -ĠY un -ĠCl aire -ĠBas ically -y t -uster ity -Ġindirect ly -w ik -Ġd od -ĠCar r -Ġam p -Ġprohib it -ĠIn itial -ĠR d -ij i -Ġeduc ate -c orn -i ott -ĠBeaut y -Ġdetect ive -ĠCon n -s ince -Ġst agger -Ġob ese -Ġb ree -olog ic -is se -walk er -Ġbl ades -Ġlaw ful -fun c -ĠBeh ind -Ġappet ite -Ġ( * -Ġt ennis -Ġoff spring -Ġj ets -Ġstruct ured -Ġafore mentioned -N ov -Ġsc aling -f ill -Ġst ew -Ġcur b -ĠStep han -ed In -S F -ob ic -é ŃĶ -ou g -ĠM M -Ġgen etically -ope z -13 6 -Ġu mb -anc ers -Ġcoh ort -Ġmerch andise -Ġimp osing -ĠLegisl ature -ĠArch ive -iv ia -ĠN aval -Ġoff ences -Ġmir acle -Ġsn apped -Ġf oes -Ġextensive ly -ĠR af -Ġc ater -ed ience -K it -ĠB in -Ġrecomm ends -ĠC ities -Ġrig id -ĠRE AD -ĠNob le -ĠT ian -Ġcertific ates -ant is -o iler -ĠBudd hist -d id -Ġsurvey ed -Ġdown ward -Ġprint s -ĠMot ion -ron ics -ĠS ans -oss ibly -u ctions -Ġcolon ies -ĠDan ish -un it -Ġsp oil -Ġadvis ory -ber ries -Pl an -Ġspecific ation -op hers -ĠRes ource -Ġsh irts -prising ly -commun ications -Ġtriv ial -Ġmention ing -ise xual -Ġsupp lements -Ġsuper vision -B P -v or -Ġw it -Ġco oldown -Ġplaint iff -ĠReview s -ĠS ri -ĠM int -ĠSug ar -Ġafter ward -ĠPri est -ĠInvest ment -og ene -ĠT aking -Ġstretch ing -Ġinflamm ation -ĠTe hran -Ġl ining -Ġfree zing -ĠEnt ity -Ġins piring -spe cial -pr ice -Ġsu e -ĠP orter -oun ge -ET A -ĠD erek -ĠLu is -u o -ym ph -Ġex terior -ih il -ĠAsh ley -in ator -Ġnut rients -ĠTh rones -Ġfin ances -ĠIn spect -Ġspe cially -ĠRequ ired -ĠP TS -ĠViol ence -oint ed -sh ots -Ġex cerpt -co on -IN S -ĠG ri -Ġrecogn ised -We ek -You ng -Ġv om -is le -ĠCur ry -ĠBudd h -Ġnot ebook -Ġd urable -/ ? -ĠG ad -ĠP upp -Ġforg ive -p ark -Ġpersonal ities -an alysis -cl amation -Ġelev ator -Ġware house -ĠR ole -un n -Ġillust ration -ĠSc an -Ġatmosp heric -Im port -AN C -rict ed -f u -01 0 -Ġar che -Ġreward ed -akespe are -Ġintern ally -ĠR BI -alk er -Ġeleph ant -ow itz -ĠP izza -Ġbip artisan -é s -Ġslow ed -ĠSt ark -Ġover ride -OU S -Ġ3 20 -undred s -ĠDe ck -ĠC ensus -be e -14 6 -ot or -Ġ ip -Ġu b -oc ations -ĠBut ton -r ice -Ġc ripp -ff f -Ġorig inated -Ġoverwhel med -app a -Ġfore most -âĢ ij -ĠL EG -re lease -eat ured -at ches -Ġre ps -Ġl ending -ĠRe ference -ĠCl ient -16 5 -vent h -Com plete -ĠPat rol -Ġsw orn -c am -Ġshut tle -ĠR alph -Ġh ometown -- , -on al -ĠB P -å ı -Ġpersu ade -ĠAlex and -Ġcomb ines -Ġv ivid -ĠL ag -Ġenc oding -Ġsal vation -w en -ĠRec overy -i ya -Un iversity -ĠB iden -Ġbud gets -ĠTex ans -f its -Ġhon ored -Ġp ython -T D -## # -cl one -Ġbl ink -ĠL iquid -Ġunemploy ed -Ġcl ashes -ĠCoun sel -Ġdirect ing -Ġpun ct -ĠFal cons -Ġsh ark -ĠDam ascus -Ġje ans -Ġemb ark -Ġse ize -Ġup wards -2 80 -ĠE z -ĠAny thing -Ġex otic -l ower -ĠCreat or -ĠU m -Ġsubur bs -ber ger -ĠW end -Ġm int -ĠX X -ĠD ro -Ġsuff ers -Ġher b -t ree -Ġfrag ile -Ġflood ed -ĠAl cohol -ole an -ny der -ĠK O -F ram -Ġ13 6 -Ġow ed -ĠMe lee -ĠH ash -Ġwh isk -Ġsu do -r r -Qu ick -app ro -Ġi i -ĠEx amples -he e -Ġpromot es -per ature -k ar -ĠHon or -Ġs odium -ĠL if -ros so -intend ent -Ġcorrespond ent -F ound -sec ret -Ġident ifies -ag ne -Ġl ou -ĠP P -Ġcoinc idence -m ove -Ġmilit ia -Ġinf iltr -ĠPrim ary -Ġpitch ing -ĠI b -ĠGO OD -ãĤ ¸ -ĠW izards -ir al -ĠVen us -R R -ĠâĢ ķ -ĠCase y -Ġsad ly -Ġadm ire -Ġembarrass ed -c b -M el -Ġtub es -Ġbeaut ifully -ĠQueens land -Bel ow -re z -qu et -ple asant -Ġ « -C amp -Ġdec isive -19 98 -ĠL amb -ut ton -h n -ĠJ agu -au nder -ĠC ord -Ġcl erk -Ġca ffe -Ġwip ed -Ġre im -ĠMount ains -Ġimprison ed -Ġdevelop s -ĠP ra -Ġmodel ing -Any one -ance l -ĠS it -Ġshield s -Ġl awn -Ġcard iovascular -Ġdemonstr ating -Ġpar se -ĠIsrael is -Ġeuro s -14 3 -Ġgl orious -ins ki -ec d -Ġcondition ing -Ġhel pless -Ġmicro sc -ĠHar bor -Ġst akes -Ġ2 60 -Ġun equ -ĠFl oyd -Ġd amp -Ġappar atus -ĠLaw s -Ġcoun ters -Ġindu ce -at able -ĠAh med -Ġsl am -N ovember -Ġpers ist -Ġim minent -á n -Ġsh red -Ġph ases -ĠEd monton -ĠArm strong -ĠMe et -ĠK itty -Ñ Ģ -c irc -ĠAd ult -Ġa rose -ĠX en -D an -g ow -Ġsuper f -ĠAd mir -Ġend ure -Ġkey word -yr us -Ġy arn -Ġpath way -ĠHop kins -mid t -Ġcens orship -d ependent -Ġinstruct or -S ources -Ġto e -Ġball oon -N ob -Ġsw ear -ĠCast ro -Ġgl oss -ĠK avanaugh -Ġremark ably -Ph otos -ĠN om -ĠS outheast -y ers -Ġvalid ation -Ġcann on -ĠVict ory -ĠPier re -Ġcaut ious -Aud io -Ġf etch -ĠG ift -ĠH yp -Ġrem edy -Z E -Ġsc ent -Ġbe ard -ĠR ut -- " -Ġpat ents -H y -Ġun just -Ġpot ato -Ġforth coming -Ġche f -ĠR ift -aff e -ĠR OM -ĠL aunch -Ġp ads -ĠNe o -Ġon set -Ġsquee ze -s afe -Ġpref ix -ĠT M -ĠN early -ĠClin ical -ĠM ental -ot iation -ĠUn ic -ant ry -ĠC ir -Ġep it -à ¦ -Ġextract ed -verse ly -ri ad -Ġstr ains -Ġto ps -Ġpo em -ĠRand y -ĠMap le -TH ER -up iter -ĠSS D -ļ é -Ġun con -per ing -Ġsle pt -in ers -Ġunder water -ĠEv idence -g one -20 5 -Ġhistor ians -Ġsynt hesis -Ġf rog -b asketball -Ġvibr ant -Ġsub ord -Ġ3 65 -ĠD ial -Ġcooper ate -HA HA -Ġgreet ed -15 8 -Ġj azz -Ġinto x -ĠWalk ing -Ġsuper visor -ĠF usion -ĠMer cedes -s end -H am -s d -n l -Ġtour s -ĠF IFA -Ġcul p -g d -30 4 -Ġple as -Ġillust rates -ĠColomb ia -Ġhighlight ing -ĠSum mary -Ġexp osing -ĠD ru -Ġir ony -r itional -ĠCar roll -ĠEll is -P ict -ĠR apt -Ġad apter -Ġun m -Ġcor pse -Ġceleb rities -D en -at um -ĠAp ocalypse -ĠW ag -lin ing -Ġhorm ones -R ub -ĠX i -ĠV aults -20 8 -alky rie -inos aur -Ġfeed s -v ity -Ġdefe ating -W ait -Ġemphas ize -ĠSteel ers -yr inth -le ys -ĠWhe never -Current ly -ĠCl ock -Ġcollect ively -any on -ĠJ P -Ġment ality -Ġdownload s -Ġsurround ings -ĠBarn es -Ġflags hip -Ġindic ators -Ġgra pp -Jan uary -ĠElement al -ĠAthen a -ib al -Ġs ights -Ġcap ita -ĠTreat y -Ġvo iced -ĠG az -let te -Ġy a -Ġexp ired -Leg end -H ot -n ature -Ġunst able -Ġ2 80 -à º -Com ment -AL E -Ġquest s -Ġhand ler -n is -Ġvers atile -Ġconce al -enge ance -ĠInter active -Ġobs essed -ĠDog s -Ġcr acked -S ound -s v -ĠD ylan -ro ads -f x -ĠCath olics -ĠH ag -Ġsl ammed -Ġgl owing -s ale -Ġtiss ues -ĠCh i -ne e -Ġc her -s ic -ur rection -Ġb acon -ul atory -) ." -Ġir regular -FOR M -ass ed -Ġintention al -Ġcompens ate -ĠSpe aking -ĠS ets -15 3 -Ġconvent ions -b ands -em ade -Ġe cc -ĠWin ston -ĠAssass in -ĠBelg ian -Ġdepend ence -Ġnic he -Ġb ark -ĠJ azz -Ġdisadvant age -Ġgas oline -Ġ16 5 -çļ Ħ -ess a -mod ule -ang ular -O Y -ĠTreat ment -it as -ol ation -ĠArn old -Ġfe ud -ĠN est -Ġthe atre -ew ater -Ġmin ors -olic y -ĠH aven -div ision -Ġtr unk -F ar -ĠP ull -Ġcapt uring -Ġ18 00 -ĠTe en -Ġex empl -Ġclin ics -ĠB urg -Ġsubst it -Ġpay load -ĠL av -ĠT roy -ĠW itness -Ġfrag ments -Ġpass words -Ġg ospel -ĠG in -Ġten ants -ol ith -S ix -Pre vious -ĠAg es -ĠDar win -Ġbl at -Ġem pathy -sm ith -b ag -ĠE cho -ĠC amb -ĠM add -ĠB oo -Ġred e -ĠBurn ing -Ġsmooth ly -ĠAd rian -ĠV ampire -ĠMon sters -ste am -Sty le -M a -re a -ĠD war -aly st -urs or -Ġelim ination -Ġcrypt o -ch t -ĠE ternal -âĢ¦ ] -ĠS orce -I ll -N ER -Ġu h -Con clusion -w age -Ġresp ir -Ġrem inis -het ical -Ġg y -Ġutil ized -ic idal -Ġ19 00 -Ġhun ters -ĠSw an -ĠRe act -Ġvis itor -ĠThanks giving -30 8 -Post s -Ġh ips -19 97 -om ers -Ġkn ocking -ĠVeh icle -Ġt il -Ġ13 8 -Ġm i -ĠInvest igation -ĠKen ya -Ġcas ino -Ġmot ives -Ġreg ain -re x -Ġweek ends -Ġstab bed -bor o -Ġexplo ited -ĠHA VE -ĠTe levision -c ock -Ġprepar ations -Ġende av -ĠRem ote -ĠM aker -ĠPro du -ĠEv an -Ġinform ational -ĠLouis ville -15 4 -ĠDream s -Ġpl ots -ĠRun ner -Ġhur ting -Ġacad emy -ĠMont gomery -n m -ĠL anc -ĠAl z -2 10 -el ong -Ġretail er -Ġar ising -Ġrebell ion -Ġbl onde -play ed -Ġinstrument al -C ross -Ġret ention -Ġtherape utic -Ġse as -Ġinfant ry -ĠCl int -Ġprompt ing -Ġbit ch -Ġst ems -ĠK ra -Ġthe sis -ĠB og -ru ed -Ġk ings -Ġcl ay -ific ent -ĠY ES -ĠTh ing -ĠCub s -vey ard -els h -in arily -ĠE y -ĠRoll ing -Ġev olving -Ind ia -Ġrecogn izes -Ġgrad uation -is ers -Ġfert ility -ĠMil an -Comm and -Ġbox ing -Ġ19 43 -Ġgl uten -ĠEm ir -Ġid ol -Ġcon ceived -ĠCre ation -Mer it -udd y -uss ions -ĠLie utenant -iet al -Ġunch anged -ĠSc ale -ĠCrime a -ball s -ator ial -Ġdepth s -Ġempir ical -Ġtrans m -Ġuns afe -miss ible -com fort -15 6 -Ġmechan ic -00 2 -l ins -Ġsm oked -P os -Ġslow ing -Ġl av -Tex as -Ġche ating -ĠMet ropolitan -eth yl -Ġdiscover ing -as se -Ġpen cil -ĠPy ongyang -Ġclos et -ĠShe et -ĠEnt ry -ou stic -Ġmy st -er ate -ari at -Ġminer als -Ġmusic ian -ĠP ul -ĠM az -24 9 -Ġper missions -Ġ iv -en ary -ick ers -ĠB ing -he a -en able -Ġgri ev -Ġassert ed -ĠColon el -Ġaff idav -w o -Ġse ated -ĠR ide -Ġpaint ings -ĠP ix -Ġ13 7 -ish i -umb ai -g otten -ĠEar l -Ġin ning -Ġc ensus -Ġtrave lled -ĠCons ult -18 5 -b ind -Ġsimpl icity -Ġoverlook ed -ĠHelp ful -Ġmon key -Ġoverwhelming ly -Bl ood -ĠFl int -ĠJ ama -ĠPres ent -ĠR age -ĠT A -pt ive -Ġturn out -w ald -ĠD olphins -ĠV PN -Ġon ion -Ġcraft ing -m ma -ĠMerc ury -Ġarr ange -Ġalert s -ĠO T -zb ollah -Ġg ases -ĠRichards on -s al -l ar -Ġfro st -Ġlower ing -Ġacc laim -Ġstart ups -ĠG ain -ess ment -Ġguard ian -äº º -ĠP ie -ĠL inks -Ġmer its -Ġaw ake -Ġparent al -Ġexceed s -Ġid le -ĠPil ot -Ġe Bay -ĠAc cept -ipe g -C am -ĠK ot -Ġtrad ers -olit ics -unk er -ĠP ale -os i -an mar -Ġ19 47 -ĠF ell -est ial -it ating -G F -ĠS r -if ted -Ġconnect or -ĠB one -ill es -2 60 -h ma -Ġoverl ap -ĠGit Hub -Ġclean er -ĠBapt ist -ĠW AS -Ġlung s -Ñ ģ -ĠB UT -Ġc ite -Ġpit ched -reat ment -Ġtro phies -ĠN u -38 6 -ĠPr ide -Ġattend ees -[ ] -17 9 -Ġspat ial -Ġpri zes -ĠRel igion -Ġshow case -ĠC ategory -vid ia -T arget -Pro perty -? , -Ġf usion -p ie -ĠU CLA -Ġsound track -Ġprin cess -ĠC aval -sh ould -Ġlim bs -Back ground -Ġlone ly -Ġc ores -ĠT ail -she et -Ġ13 2 -R a -ãĤ « -ĠB olt -Ġbook ed -Ġadmin ister -Ġequ als -w y -Ġobserv ing -ĠBar on -ĠAd obe -Ġv irgin -ĠSocial ist -M ove -gh azi -ĠLind a -2 12 -Ġbre wing -Ġmerch ants -bur se -Ġdiv or -Ġmet als -ĠN er -Ġsum s -ĠEn emy -Ġen vision -Ġgrant ing -ĠH oney -ĠSk yrim -Ġsoc io -gr aded -Ġselect ive -W ASHINGTON -Ġ19 48 -ĠSir ius -ĠG ross -act ivity -ĠI van -Ġfur ious -BS D -ĠPre vious -Ġrespons ive -Ġchar itable -Ġle aning -ĠP ew -Ġviol ates -\\\\ \\\\ -ĠCom ing -w ire -Ġpo et -Ġres olutions -comm and -ĠPortug uese -Ġnick name -Ġde af -Feb ruary -Ġrecogn ise -Ġentire ty -Ġseason al -pl aced -ĠTe legraph -Ġmicro phone -our ing -Ġgr ains -Ġgovern ed -Ġpost p -ĠW aters -in ement -Ġund ocumented -ĠCom cast -Ġf ox -Ġassault s -re on -man y -ĠJen kins -ĠAny way -Ġassess ments -Ġdown s -ĠM ouse -Ġsuper b -k t -ĠD ow -Ġtax ation -4 01 -Ġsm iles -Ġundert aken -Ġex h -Ġenthusi astic -Ġtw ent -Ġgovernment al -Ġautonom y -ĠTechn ologies -ĠCh ain -Ġpreval ent -f b -Ġnic otine -og ram -j ob -Ġawa iting -ĠMen u -Ġdep uties -k ov -ish ops -But ton -ĠShan ghai -Ġdies el -ĠD uck -R yan -ĠPC s -N F -j ury -ent e -Ġinacc urate -edd y -Wh atever -Ġshow c -ĠN ad -od us -et r -Ġplaint iffs -ĠW OR -ĠAss ange -Ġpriv at -Ġpremium s -Ġt am -UR L -Ġel ites -ĠR anger -otten ham -ĠH off -ĠAt hens -Ġdefin ite -Ġs ighed -Ġeven ly -2 11 -ĠAm ber -ak ia -Ġmail ing -Ġcr ashing -ĠConfeder ate -ru gged -W al -ĠDep ths -Ġjuven ile -Ġreact or -Introdu ction -ĠDel uxe -19 95 -ĠS anchez -ĠM ead -iv able -: - -ĠPlan ning -ĠT rap -qu in -ĠProt ect -ve red -In formation -Ġkid ney -inn amon -l as -Ġpolic ing -Ġtoler ate -ĠQ i -Ġbi ased -F ort -ĠK i -s ave -Ġprivile ged -Ġbe asts -ĠGl as -ĠC inem -Ġcome back -Sund ay -Ġext inction -h ops -Ġtrans mit -Ġdoub les -ĠFl at -16 7 -Ġdis puted -Ġinjust ice -f oo -V ict -role um -ĠJul ie -Con text -ĠR arity -iss ue -Comp onent -Ġcounsel ing -an ne -d ark -Ġobject ions -u ilt -Ġg ast -Ġpl ac -Ġun used -ãĥ ĩ -ĠT rial -ĠJ as -hed ral -ob b -Ġtempor al -ĠPR O -ĠN W -ĠAnn iversary -L arge -Ġther m -Ġd avid -Ġsystem ic -ĠSh ir -m ut -ĠNe pt -add ress -Ġscan ning -Ġunderstand able -Ġcan vas -C at -ĠZ oo -Ġang els -L O -ĠStat ement -ĠS ig -ov able -ĠA way -sh aring -ocr ats -st ated -Ġweigh ing -N or -w ild -B ey -Ġaston ishing -ĠReyn olds -Ġop ener -Ġtrain er -Ġsurg ical -p n -Ġadjust ing -whe el -Ġf rown -erv ative -Ġsusp end -With in -te in -Ġobst acle -Ġliber ties -ym es -Ġur anium -ans om -an ol -ub a -ĠL oss -Ġa rous -ĠHend erson -W ow -s pl -c ur -ĠÂ Ń -Ġtheir s -Dam age -Ġdownload ing -Ġdisc ern -ĠSt o -ĠFl a -Ġh ath -ĠA j -Ġun pleasant -Europe an -exp ensive -Ġscreens hot -ĠU V -Ġall ied -ĠPers ian -Ġmonop oly -Ġat om -ĠReds kins -"> < -Ġcan cell -Ġcinem a -13 1 -f air -ĠAlf red -Ġd uck -arg s -22 3 -ĠIS I -Ġsign aling -in ar -Ġlaugh s -Ġfor wards -Ġreck less -Ġlisten ers -at ivity -Ġvast ly -n ant -L ess -ĠHun ting -ĠScient ific -IT ED -Ġkn ight -ĠH TC -us a -t mp -Ġr ude -ĠLegend ary -Ġar ises -B ad -ĠCl aim -pe g -Ġreal ities -Th ink -Ġ ° -Ġro de -Ġstri ve -Ġan ecd -Ġshort s -Ġhypot hes -Ġcoord inated -ĠGand hi -ĠF PS -R ED -Ġsuscept ible -Ġshr ink -ĠCh art -Hel p -Ġ ion -de ep -rib es -ĠK ai -ĠCustom er -Sum mary -Ġc ough -w ife -Ġl end -Ġposition ing -Ġlot tery -ĠC anyon -Ġf ade -Ġbron ze -ĠKenn y -Ġbo asts -ĠEnh anced -rec ord -Ġemer gence -Ġa kin -ĠB ert -it ous -âĸ ij -Ġst ip -Ġexch anged -om ore -als h -Ġreserv oir -Ġstand point -W M -Ġiniti ate -Ġdec ay -Ġbrew ery -Ġter ribly -Ġmort al -lev ard -Ġrev is -N I -el o -Ġconf ess -ĠMS NBC -Ġsub missions -Cont roller -Ġ20 2 -ĠR uth -} ); -ĠAz ure -Ġ ." -20 6 -ĠMarket ing -Ġl aund -ien cies -Ġrenown ed -ĠT rou -ĠN GO -ble ms -Ġterr ified -Ġwar ns -Ġper t -Ġuns ure -4 80 -ale z -ult z -ĠOut side -Ġst yl -ĠUnder ground -Ġp anc -Ġd ictionary -Ġf oe -rim inal -ĠNor wegian -Ġj ailed -Ġm aternal -é e -ĠLu cy -c op -Ch o -Ġuns igned -ĠZe lda -ĠIns ider -ĠContin ued -Ġ13 3 -ĠNar uto -ĠMajor ity -16 9 -ĠW o -ãĤ ĵ -Ġpast or -Ġinform al -Ð ½ -an throp -jo in -ãģ Ĺ -it ational -N P -ĠWrit ing -f n -ĠB ever -19 5 -Ġy elling -Ġdr astically -Ġe ject -Ġne ut -Ġth rive -ĠFre qu -ou x -Ġpossess es -ĠSen ators -ĠD ES -ĠSh akespeare -ĠFran co -ĠL B -uch i -Ġinc arn -Ġfound ers -F unction -Ġbright ness -ĠB T -Ġwh ale -ĠThe ater -m ass -ĠD oll -S omething -Ġecho ed -ĠHe x -c rit -af ia -Ġgodd ess -Ġele ven -ĠPre view -ĠAur ora -Ġ4 01 -uls ive -ĠLog an -in burgh -ĠCent ers -ĠON LY -ĠA id -Ġparad ox -Ġh urd -ĠL C -D ue -c ourt -Ġoff ended -Ġeval uating -ĠMatthew s -Ġto mb -Ġpay roll -Ġextra ction -ĠH ands -if i -Ġsuper natural -ĠCOM M -] = -dog s -Ġ5 12 -ĠMe eting -Rich ard -ĠMax imum -Ġide als -Th ings -m and -ĠReg ardless -Ġhum ili -b uffer -L ittle -ĠD ani -ĠN ak -Ġliber ation -ĠA be -ĠO L -Ġstuff ed -ac a -ind a -raph ic -Ġmos qu -Ġcampaign ing -Ġoccup y -S qu -r ina -ĠW el -ĠV S -Ġphys ic -Ġp uls -r int -oad ed -ET F -ĠArch ives -Ġven ues -h ner -ĠTur bo -Ġl ust -Ġappeal ed -que z -il ib -ĠTim othy -Ġo mn -d ro -Ġobs ession -ĠSav age -19 96 -Gl obal -J es -2 14 -Ġsl iding -Ġdisapp ro -ĠMag ical -Ġvolunt arily -g b -ane y -Ġprop het -ĠRe in -ĠJul ia -ĠW orth -aur us -Ġb ounds -ie u -)) ) -Ġcro re -ĠCitiz en -S ky -Ġcolumn ist -Ġseek ers -ond o -IS A -ĠL ength -Ġnost alg -Ġnew com -Ġdet rim -ent ric -3 75 -ĠG E -Ġaut op -Ġacadem ics -App Data -ĠS hen -Ġid iot -ĠTrans it -Ġteasp oon -W il -K O -ĠCom edy -> , -Ġpop ulated -W D -Ġp igs -ĠO culus -Ġsymp athetic -Ġmar athon -19 8 -Ġseiz ure -s ided -Ġd op -irt ual -L and -ĠFl oor -osa urs -... ] -Ġl os -Ġsubsid iary -E Y -ĠPart s -ĠSt ef -ĠJud iciary -Ġ13 4 -Ġmir rors -Ġk et -t imes -Ġneuro log -Ġc av -ĠGu est -Ġtum or -sc ill -ĠLl oyd -E st -Ġcle arer -Ġstere otypes -Ġd ur -not hing -Red dit -Ġnegoti ated ----------------- -------- -23 5 -Ġfl own -ĠSe oul -ĠRes ident -ĠS CH -Ġdisappear ance -ĠV ince -g rown -Ġgrab s -r il -ĠInf inite -ĠTw enty -Ġpedest rian -Ġjer sey -ĠF ur -ĠInf inity -ĠEll iott -Ġment or -Ġmor ally -Ġob ey -sec ure -iff e -Ġantib iotics -ang led -ĠFre eman -ĠIntrodu ction -J un -Ġm arsh -ic ans -ĠEV ENTS -och ond -W all -icult y -Ġmisdem eanor -Ġl y -Th omas -ĠRes olution -Ġanim ations -ĠD ry -Ġinter course -ĠNew castle -ĠH og -ĠEqu ipment -17 7 -Ġterrit orial -Ġarch ives -20 3 -Fil ter -ĠMun ich -Ġcommand ed -ĠW and -Ġpit ches -ĠCro at -Ġrat ios -ĠM its -Ġaccum ulated -ĠSpecific ally -Ġgentle man -acer b -Ġp enn -Ġa ka -ĠF uk -Ġinterven e -ĠRef uge -ĠAlz heimer -Ġsuccess ion -oh an -d oes -L ord -Ġsepar at -Ġcorrespond ence -Ġsh iny -P rior -Ġs ulf -Ġmiser able -Ġded ication -( ). -Ġspecial ists -Ġdefect s -ĠC ult -ĠX ia -Ġje opard -ĠO re -Ab ility -Ġle ar -Ġamb itions -ĠB MI -ĠArab s -Ġ19 42 -Ġpres ervation -ific ate -Ġash amed -l oss -ĠRest aur -Ġrese mble -Ġen rich -ĠK N -ĠCl an -fl oat -Ġplay able -IT T -Ġharm ony -arr ison -ĠWe instein -w ere -Ġpoison ing -ĠCom put -ĠWord Press -m ajor -ĠVal ve -F an -ĠTh row -ĠRom ans -ĠDep ression -ad os -Ġtort ured -Ġbal ancing -bott om -Ġacqu iring -ĠMon te -ard i -Ġa ura -Ġ# # -ĠStand ing -ĠAtl as -C F -Ġintr ins -ĠBen ghazi -Ġcamp ing -Ġt apped -bl ade -st rous -ĠR abb -ĠW ritten -t ip -ĠNe igh -ster dam -ĠAll ow -ĠHe aling -ĠR hod -n um -Ġcaffe ine -ĠPer cent -Ġbo o -Ġapp les -30 5 -Ġwel coming -Ġappl aud -Ġa usterity - ± -ĠRe ality -ef e -å ® -Ġsu cks -Ġtab s -ĠPay Pal -Ġback pack -Ġgif ted -abul ary -ĠSc out -ir teen -Ġch in -Ġo mitted -Ġnegative ly -Ġaccess ing -ĠE arn -Ġambul ance -Ġhead phones -Ġ20 5 -ĠRef resh -p resident -ĠKit chen -ĠEnt ered -ĠS nyder -00 5 -om ical -Ġborrow ed -ĠN em -Ġav iation -Ġst all -rim ination -Ġuniform s -it ime -ĠSim mons -ener gy -ab lished -y y -qual ified -Ġrall ies -ĠSt uart -fl ight -Ġgang s -r ag -Ġv ault -lu x -ĠCom par -Ġdesign ation -20 9 -ĠJ os -d ollar -z ero -Ġwell s -30 3 -Ġconstitu ents -Ġhe ck -Ġc ows -Ġcommand ers -Ġdifferent ial -ĠC atherine -29 9 -Ġval ve -Ġbr ace -Ġperspect ives -c ert -f act -icular ly -ĠMc N -pl anes -Ġint ric -Ġpe as -ov an -Ġtoss ed -ret ch -ĠL opez -Ġunf amiliar -de ath -ĠA part -ĠCh ang -Ġrelie ved -rop he -Ġair ports -Ġfre ak -ut il -M ill -ĠCh in -ĠOw en -m ale -ĠBro ken -ĠWind s -ro b -r ising -Ġfire fighters -Ġauthor itarian -Ġ14 8 -Bit coin -ex ternal -Ġbrow sers -iche ver -or ian -Ġun b -Ġpo ke -ĠZ ot -M id -ĠPop ular -Ġco vert -Ġcont ributes -Ġ6 50 -Ġcont ention -G ate -Ġcons oles -Ġchrom os -ĠI X -Ġvis ually -ĠE isen -Ġjewel ry -Ġdeleg ation -Ġacceler ate -ĠR iley -Ġsl ope -Ġind oor -it ially -Ġhuge ly -Ġtun nels -Ġfin ed -Ġdirect ive -Ġfore head -ustom ed -Ġsk ate -Mus ic -g as -Ġrecogn izing -am bo -Ġover weight -ĠGr ade -Ù Ĭ -Ġsound ing -Ġlock ing -ĠR EM -St ore -Ġexc av -ĠLike wise -ĠL ights -Ġel bow -ĠSupp ly -w ic -Ġhands ome -19 94 -C oll -Ġadequ ately -ĠAssoci ate -Ġstri ps -Ġcrack down -Ġmar vel -ĠK un -Ġpass ages -@@ @@ -ĠT all -Ġthought ful -names e -Ġprost itution -bus iness -Ġball istic -person al -c ig -iz ational -R ound -ĠÂłĠÂł ĠÂłĠÂł -ĠCole man -Ġadm itting -ĠPl ug -Ġbit coins -ĠSu z -Ġfair ness -Ġsupp lier -Ġcatast rophic -ĠHel en -o qu -M arc -ĠArt icles -g ie -Ġend angered -Ġdest iny -ĠVol t -ol ia -ax is -Ġche at -Ġun ified -IC O -qu ote -30 2 -ĠS ed -Ġsupp ression -Ġanaly zing -Ġsqu at -Ġfig uring -Ġcoordin ates -Ġch unks -Ġ19 46 -Ġsub p -Ġw iki -ĠFor bes -ĠJ upiter -ĠE rik -im er -ĠCom mercial -\ ) -Ġlegitim acy -Ġd ental -ĠMe an -Ġdefic its -5 50 -Orig inally -ĠHor ror -Ġcontam ination -ll ah -Ġconf isc -ĠCl are -T B -ĠF ailed -an ed -Ġrul er -ĠCont roller -Ġfemin ists -F ix -g ay -20 7 -Ġr abbit -Th ird -ownt own -Ġgl ue -Ġvol atile -Ġsh ining -Ġf oll -Ġimp aired -Ġsup ers -æ Ī -Ġcl utch -ļé ĨĴ -Ġpro let -Ġ( ! -Ġy elled -ĠK iev -ĠEr n -ĠSh ock -K B -Ġsit uated -qu ery -ĠN as -Ġan nex -char acter -ĠHol iday -Ġautom ation -ĠJ ill -ĠRem astered -Ġl inem -Ġwild erness -ĠHor izon -ĠGu inea -A Z -Ġmain land -Ġsec recy -LE ASE -Ġp unk -ĠProv ince -( ), -Spe ed -Ġhand ing -ĠSeb ast -S ir -r ase -Ġj ournals -Ġcon gest -ĠT ut -ir rel -Ġschizophren ia -Ġmis ogyn -health y -I ron -Ġreact ed -- $ -25 2 -Ġpl ural -Ġpl um -Ġbarg ain -Ġground ed -f inder -Ġdis se -ĠL az -O OD -Ġat roc -F actory -Ġmin ions -Ġo ri -ĠB rave -ĠP RE -ĠMy anmar -ĠH od -Ġexped ition -Ġexpl ode -ĠCo ord -Ġext r -ĠB rief -ĠAD HD -Ġhard core -feed ing -Ġd ile -ĠF ruit -Ġvacc ination -ĠM ao -osp here -Ġcont ests -- | -Ġf ren -isp here -R om -ĠSh arp -ĠTre nd -Ġdis connect -âĢ¢ âĢ¢ -Ġper secution -Ear th -Ġhealth ier -38 4 -Ġc ob -ĠTr inity -OW S -AN N -Ġspecial ty -Ġg ru -Ġcooper ative -wh y -Start ing -ĠIss ues -st re -ens or -Ġ18 5 -Ad v -! ? -ĠRe vel -em ia -ĠH ulk -Ġcelebr ations -ĠS ou -ra ud -ĠKle in -Ġun real -con text -Ġpartners hips -Ġadop ting -t ical -Ġspl ash -ĠHe zbollah -c ategory -cycl op -xt on -ĠD ot -urd y -t z -Ġenvelop e -ĠN L -â ķ -Ġwhere in -Spe c -18 4 -Ġte lev -al iation -Ġmyth s -å ° -Ġrig orous -Ġcommun icating -Ġobser ver -Ġre he -ĠW ash -Ġapolog ized -ĠT in -Ġexpend itures -work ers -d ocument -Ġhes itate -ĠLen in -Ġunpredict able -Ġrenew al -cl er -ok ia -ĠCON T -Ġpost season -Tok ens -Ġex acerb -Ġbet ting -Ġ14 7 -Ġelev ation -W ood -ĠSol omon -19 4 -00 4 -out put -Ġredu nd -ĠM umbai -Ġp H -Ġreprodu ce -ĠD uration -MA X -Ġb og -C BS -ĠBal ance -ĠS gt -ĠRec ent -Ġc d -Ġpo pped -Ġincomp et -pro p -ay an -g uy -Pac ific -Ġty r -Ġ{ { -ĠMy stic -ĠD ana -Ġmast urb -Ġge ometry -à ¢ -ĠCor rect -Ġtraject ory -Ġdistract ed -Ġf oo -ĠW elsh -L uc -m ith -Ġrug by -Ġrespir atory -Ġtri angle -Ġ2 15 -Ġunder graduate -ĠSuper ior -ch anging -_ - -Ġright ly -Ġrefere e -Ġluc rative -Ġun authorized -Ġresemb les -ĠGN U -ĠDer by -Ġpath ways -ĠL ed -Ġend urance -Ġst int -Ġcollect or -F ast -Ġd ots -Ġnational s -ĠSec urities -Ġwh ip -Par am -Ġlearn s -M agic -Ġdetail ing -m oon -Ġbroadcast ing -Ġb aked -26 5 -hol m -ĠS ah -ĠHus sein -ĠCourt esy -17 4 -Ġ14 6 -Ġge ographic -pe ace -Ġjud ging -ĠS tern -B ur -Ġstory line -G un -ĠSt ick -24 5 -30 7 -ãĤ´ ãĥ³ -ĠAdminist rator -Ġbur nt -Ġp ave -ch oes -Ex ec -Ġcamp uses -Res ult -Ġmut ations -ĠCh arter -Ġcapt ures -Ġcomp ares -Ġbad ge -S cient -Ġer ad -ier y -o i -ett es -ĠE state -Ġst rap -Ġproud ly -Ġf ried -Ġwithd rawn -ĠV oy -ph ony -It ems -ĠP ierce -b ard -Ġann otation -ant on -ill on -Im pro -... ) -Ġhapp ier ----- -- -ad just -Ġstaff ers -Ġactiv ism -Ġper f -Ġal right -N eed -Ġcomm ence -Ġopio id -ĠAm anda -E s -ĠP ars -ĠK aw -W orks -24 8 -Ġind o -t c -end ant -ĠM oto -Ġlegal ization -OT E -Ġtask ed -Ġt sp -ĠACT IONS -16 6 -Ġrefres hing -ĠN R -ĠPere z -Ġinfring ement -S Y -List en -in ning -k u -Ġrot ate -pro gram -ar ah -Des ign -Ġ( £ -Ġst oring -Ġwar rants -Ġjud gement -ĠB rist -us ually -ph oto -ĠR an -ĠP ine -Ġoutrage ous -ĠValent ine -lu ence -ĠEvery body -Al tern -Ġrele vance -Ġtermin ated -Ġd essert -Ġfulf illed -Ġprosecut ed -ĠW ords -Ġm igrant -Ġcultiv ation -ÃĥÃĤÃĥÃĤÃĥÃĤÃĥÃĤÃĥÃĤÃĥÃĤÃĥÃĤÃĥÃĤ ÃĥÃĤÃĥÃĤÃĥÃĤÃĥÃĤÃĥÃĤÃĥÃĤÃĥÃĤÃĥÃĤ -idel ity -ĠV ern -ĠLog in -Ġmetaph or -ĠT ip -Ġrecru its -ĠP ig -rib ing -Ġenthusi asts -ex per -Ġfright ening -ĠH air -ans on -str ate -Ġh i -He ight -Ġown ing -n one -Ġdis like -Ġkn ives -pher d -Ġloud ly -ĠAP Is -Dis play -ĠL ac -ĠUS S -ab l -ver ages -J ew -Ġ17 2 -ĠHist orical -at oon -ĠPhys ics -in tern -Ġwarm th -Ġto pp -D M -Ġgun man -Ġem peror -od i -ãĥ £ -in atory -ĠR ib -Ġ13 1 -ĠSat urn -ĠSh ining -Ġw aking -Qu otes -Ġcomed ian -en berg - ½ -Ġbelie vers -Ġpaper work -c ustom -Ġle v -Ġl ament -Ġpour ing -22 2 -p olitical -ĠSupp lement -m aid -Ġcruel ty -Ġt read -ys ics -A w -rit es -Ġmod ifier -ĠP osition -Ad am -l b -ub s -Ġimper fect -Ġcl usters -ĠEngine er -ĠC herry -Ġinaug uration -ĠS au -Ġembod iment -ĠUn cle -Ġover r -Ġexplos ions -c ule -ĠPrinc eton -ĠAndre a -Ġincorrect ly -Ġearn est -Ġpil gr -ĠS print -Ġslee ve -Ġhe ars -ĠAm azing -Ġbrow sing -ag in -Ġhom eland -Ġha w -Ġd iving -ist ered -17 8 -Ġbarg aining -ĠArc ade -Ġdeleg ate -ters on -................................ ................................ -ĠJackson ville -27 5 -Ġst agn -Ġad am -ĠSher man -C B -Ġsub urb -ĠFood s -Ġconver ting -ĠAr ist -Ġch ambers -l ove -Ġam ino -ĠG an -Ġmad ness -m c -ĠUS E -def ined -Ġul tr -ind ust -Ġw olves -l ance -Add itionally -Ġcr acks -as ia -ĠRe ason -ĠP ump -Ġaccident al -ĠL aser -ĠR id -Ġinitial ized -ell i -Ġun named -Ġn oun -ĠPass ed -Ġhost age -ĠEth iop -sh irts -Ġun rel -ĠEmb assy -Ġ19 41 -Ġat oms -Ġpur ported -16 4 -ĠF i -Ġgall ons -ĠMon ica -Ġp g -en ment -Ġsort ed -ĠG ospel -Ġhe ights -Ġtr aced -Ġunder going -She ll -Ġs acks -Ġproport ions -Ġhall uc -F ont -ac et -Ġwar mer -ĠIN TER -Ġgrab bing -Pl ug -Ġreal ization -ĠBur ke -Ġen chant -AT ER -ĠSe ed -Ġabund ant -F M -Ġc ivic -V s -is i -Ġv ow -Ġre per -ĠPartners hip -Ġpenet ration -Ġax e -Ġsh attered -ĠZ ombies -Ġv inyl -ĠAl ert -e on -Ġoblig ed -ĠIll ust -ĠPl aza -ĠFront ier -Ġdavid jl -ĠSer ial -ĠH av -ĠNut rition -B i -Ġâĸ Ī -ĠJ ays -lin ux -Ġhur ry -Ġv oy -Ġhop eless -ĠSte alth -Ġ ãģ -ess ors -tt le -b org -ĠSaf ari -f ell -Ġw ary -d ue -ĠAb ove -H a -E LL -Ġnot or -ĠW on -T oo -Ġoccup ations -Ġposs essions -Ġinv iting -Ġpred ators -Ġacceler ated -Ġ15 7 -uter te -ĠC ube -e ast -acc ount -G ive -Ġtrans plant -red ients -id able -Ġscreens hots -ĠG und -ĠF S -Ġtravel ers -Ġsens ory -ĠF iat -ĠRock ets -İ ĭ -_ { -F riend -Ġchar ming -AL S -Ġenjoy ment -m ph -Ġ5 000 -ĠRE G -Ù Ĩ -b ia -Ġcomp ilation -ro st -ĠV P -ĠSch ne -201 9 -Ġcop ying -M ORE -ĠFl ore -f alls -2 15 -t otal -Ġdis ciples -d ouble -Ġexceed ing -Ġsm ashed -Ġconcept ual -ĠRom ania -ĠB rent -ĠI CE -ĠT ou -Ġg rap -Ġn ails -18 9 -ãĥ ĺ -Ġproc ure -e ur -Ġconfir ming -ĠC ec -aw i -ĠEd en -Ġn g -Ġengine ered -at ics -Ġhook ed -Ġdisgust ing -ĠMur der -ãĤ ¿ -L ibrary -Ġ16 8 -Al most -hem atic -Men u -ĠNot re -ĠJ ur -Ġkidn apped -Ġhack er -ĠJ ade -Ġcreep y -Ġdraw ings -ĠSpons or -Ġcycl ists -ĠGob lin -Ġoptim ized -Ġst aged -ĠMc D -bet ween -A ge -en o -S ex -ĠW ide -n ings -av is -Ġincap able -ĠK ob -Ġreward ing -ĠL one -oles cent -Ġcontract ed -Ġstick y -J ose -B all -f est -ĠIn put -ĠRec ently -Ġto mat -squ are -App lication -Ġnit rogen -Ġdupl icate -ĠRec on -ĠD ear -L ondon -Ġint ra -Ġd ock -Ġout reach -ĠM illion -Ġmamm als -am pton -V AL -Ġsn aps -Ġd os -ĠWh ole -ĠRead y -T ry -ĠWinn ipeg -ear ance -Ġinc urred -ren ched -ĠNS W -il ot -rain e -Ġc ube -g ot -Ġrun way -etermin ed -ĠHaw ks -Ġsurviv or -ĠW ish -ĠD in -ĠDE F -ĠV ault -18 7 -Ġmush rooms -Ġcris p -be y -ĠDisco very -Ġdevelopment al -Ġparad igm -Ġcha otic -ĠT su -Ġ3 33 -b ons -Ġbacter ial -Ġcomm its -Ġcos mic -Ġme ga -oc ative -ĠP aint -ophob ic -Ġv ain -Ġcar ved -ĠTh ief -ĠG ul -ows hip -Ġc ites -ĠEd inburgh -Ġdimin ished -Ġacknowled ges -ĠK ills -Ġmic row -ĠHer a -Ġsen iors -Ġwhere by -H op -at ron -Ġun available -ĠN ate -Ġ4 80 -Ġsl ated -ĠRe becca -ĠB attery -Ġgram mar -Ġhead set -Ġcurs or -Ġex cluding -any e -aunder ing -eb in -Ġfeas ible -ĠPub lishing -ĠLab s -ĠCl iff -ĠFerr ari -Ġp ac -vis ible -mark ed -pe ll -Ġpol ite -Ġstagger ing -ĠGal actic -Ġsuper st -Ġpar an -ĠOffic ers -ãĢ ģ -Ġspecific s -ul us -23 9 -ĠP aste -AM P -ĠPan ama -ĠDe lete -angu ard -rest rial -Ġhero ic -ĠD y -ا ÙĦ -Ġincumb ent -Ġcr unch -t ro -Ġsc oop -Ġblog ger -Ġsell ers -ure n -Ġmedic ines -ĠC aps -ĠAnim ation -ox y -Ġout ward -Ġinqu iries -22 9 -Ġpsych ologist -ĠS ask -ev il -Ġcontam inated -ãĤ ¨ -he rence -Ġbrand ed -ĠAbd ul -z h -Ġparagraph s -Ġmin s -Ġcor related -er b -Ġimp art -Ġmil estone -ĠSol utions -ot le -Ġunder cover -Ġmar ched -ĠCharg ers -f ax -ĠSec rets -Ġr uth -we ather -Ġfemin ine -Ġsh am -Ġprest igious -igg ins -Ġs ung -hist ory -ett le -gg ie -Ġout dated -ol and -Ġper ceptions -ĠS ession -ĠDod gers -u j -ĠE ND -D oc -Ġdefic iency -Gr and -ĠJ oker -Ġretro spect -Ġdiagn ostic -Ġharm less -Ġro gue -ĠA val -E qu -Ġtrans c -ĠRoberts on -ĠDep ending -ĠBurn s -iv o -Ġhost ility -F eatures -ĵ ĺ -Ġdis comfort -ĠL CD -spec ified -ĠEx pect -3 40 -Ġimper ative -ĠReg ular -Ch inese -Ġstate wide -Ġsy mm -Ġlo ops -Ġaut umn -N ick -Ġsh aping -Ġqu ot -Ġc herry -ĠCross ref -è¦ ļéĨĴ -Stand ard -he ed -ĠD ell -ĠViet namese -Ġo st -ĠV alkyrie -O A -Ass ad -Ġreb ound -ĠTra ffic -pl aces -æ ĺ -ĠB uc -17 2 -Ġshel ters -Ġins isting -ĠCertain ly -ĠKenn eth -ĠT CP -Ġpen al -ĠRe play -he ard -Ġdial ect -iz a -ĠF Y -it cher -ĠD L -Ġspir al -Ġquarterback s -Ġh ull -Ġgo ogle -Ġto dd -ĠSter ling -ĠPl ate -Ġsp ying -mb ol -ĠReal m -ĠPro ced -ĠCr ash -Ġtermin ate -Ġprotest ing -C enter -gu ided -Ġun cover -Ġboy cott -Ġreal izes -s ound -Ġpret ending -ĠV as -19 80 -Ġfram ed -Ġ13 9 -Ġdesc ended -Ġrehab ilitation -Ġborrow ing -ĠB uch -Ġbl ur -R on -ĠFro zen -en za -Ch ief -ĠP oor -Ġtransl ates -M IN -Ġ2 12 -J ECT -Ġerupt ed -Ġsuccess es -S EC -Ġpl ague -Ġg ems -d oms -Ġstret ches -ĠSp y -Ġstory telling -C redit -ĠP ush -Ġtra ction -Ġin effective -ĠL una -Ġt apes -Ġanaly tics -erc ise -Ġprogram mes -ĠCar bon -Ġbeh old -he avy -ĠConserv ation -ĠF IR -Ġs ack -ter min -ric ks -Ġhous ed -Ġunus ually -I ce -Ġexecut ing -ĠMor oc -ed ay -Ġed itions -Ġsm arter -ĠB A -Ġout law -Ġvan ished -ib a -AL SE -ĠSil va -23 8 -C ould -Ġphilos opher -Ġevac uated -Sec ret -14 2 -Ġvis as -ãĤ ¬ -ĠM alt -ĠClear ly -ĠN iger -ĠC airo -ĠF ist -3 80 -ĠX ML -aut o -it ant -Ġrein forced -Rec ord -ĠSurviv or -G Hz -Ġscrew s -parent s -Ġo ceans -ma res -Ġbra kes -vas ive -Ġhell o -ĠS IM -rim p -Ġo re -ĠArm our -24 7 -Ġterr ific -Ġt ones -14 1 -ĠMin utes -Ep isode -Ġcur ves -Ġinflamm atory -Ġbat ting -ĠBeaut iful -L ay -Ġunp op -v able -Ġr iots -ĠTact ics -b augh -ĠC ock -Ġorg asm -ĠS as -Ġconstruct or -et z -G ov -Ġant agon -Ġthe at -Ġde eds -ha o -c uts -ĠMc Cl -Ġu m -ĠScient ists -Ġgrass roots -ys sey -"] => -Ġsurf aced -Ġsh ades -Ġneighb ours -Ġad vertis -oy a -Ġmer ged -Up on -Ġg ad -Ġanticip ate -Any way -Ġsl ogan -Ġdis respect -I ran -ĠT B -act ed -Ġsubp oen -medi ately -OO OO -Ġwa iver -Ġvulner abilities -ott esville -ĠHuff ington -J osh -ĠD H -M onday -ĠEll en -K now -x on -it ems -22 8 -Ġf ills -ĠN ike -Ġcum ulative -and als -I r -Ġ ì -Ġfr iction -ig ator -Ġsc ans -ĠVi enna -ld om -Ġperform ers -P rim -Ġb idding -M ur -Ġlean ed -ĠPri x -al ks -Ġ[ âĢ¦] -ĠTw itch -ĠDevelop er -ĠG ir -Ġcall back -Ab stract -Ġacc ustomed -Ġfreed oms -ĠP G -ur acy -Ġl ump -is man -,, ,, -19 92 -ĠR ED -Ġwor m -M atch -ĠPl atinum -I J -ĠOwn er -Tri via -com pl -Ġnew born -Ġfant as -O wn -Ġ19 59 -Ġsymp ath -Ġub iqu -Ġoutput s -Ġal lev -Ġpr ag -K evin -Ġfav ors -Ġbur ial -Ġn urt -so lete -c ache -Ġ15 6 -Ġunl ocks -te chn -M aking -Ġcon quer -ad ic -æ ĸ -Ġel f -Ġelect orate -ĠKurd s -ĠSt ack -ĠSam urai -Ġâ ĺħ -Ġ{ } -ĠS aid -ĠFall out -Ġkind ness -ĠCustom s -ĠBou levard -Ġhelicop ters -ot ics -ĠVe get -com ment -Ġcritic ised -Ġpol ished -ĠRem ix -ĠC ultural -Ġrec ons -Ġdo i -at em -Sc reen -Ġbar red -Com ments -ĠGener ally -Ġsl ap -7 20 -V ari -p ine -Ġem pt -Ġh ats -ĠPlay ing -l ab -a verage -form s -ĠC otton -Ġcan s -ĠD ON -ĠSom alia -C rypt -ĠIncre ases -E ver -mod ern -Ġsur geon -3 000 -Ġrandom ized -================================ ================================ -B ern -im pl -ĠC OR -Ġpro claim -th ouse -Ġto es -Ġam ple -Ġpres erving -Ġdis bel -gr and -B esides -Ġsil k -ĠPat tern -h m -Ġenter prises -Ġaffidav it -ĠAdvis ory -Ġadvert ised -ĠRel igious -se ctions -psy ch -ĠField s -aw ays -Ġhasht ag -ĠNight mare -Ġv ampire -Ġfore nsic -rosso ver -n ar -Ġn avy -Ġvac ant -ĠD uel -Ġhall way -Ġface book -ident ally -ĠN RA -Ġm att -Ġhur ricane -ĠKir by -ĠP uzzle -Ġsk irt -ou st -du llah -Ġanal ogy -in ion -Ġtomat oes -ĠN V -ĠPe ak -ĠMe yer -Ġappoint ments -Ġm asc -Ġal ley -re hend -Ġchar ities -Ġund o -Ġdest inations -ĠTest ing -"> " -c ats -* . -Ġgest ures -gener al -Le ague -Ġpack ets -ĠInspect or -ĠBer g -Ġfraud ulent -Ġcritic ize -F un -Ġbl aming -nd ra -Ġsl ash -ĠE ston -Ġpropos ing -Ġwh ales -Ġtherap ist -Ġsub set -Ġle isure -EL D -ĠC VE -ĠAct ivity -Ġcul min -sh op -ĠD AY -is cher -ĠAdmir al -ĠAtt acks -Ġ19 58 -Ġmem oir -Ġfold ed -Ġsex ist -Ġ15 3 -ĠL I -Ġread ings -Ġembarrass ment -ĠEmploy ment -w art -ch in -Ġcontin uation -l ia -Rec ently -Ġd uel -Ġevac uation -ĠKash mir -Ġdis position -ĠR ig -Ġbol ts -Ġins urers -4 67 -M ex -Ġret aliation -Ġmis ery -Ġunre asonable -r aining -I mm -ĠP U -em er -Ġgen ital -ãĤ ³ -ĠC andy -Ġon ions -ĠP att -lin er -Ġconced ed -Ġf a -Ġfor c -ĠH ernandez -ĠGe off -deb ian -ĠTe ams -Ġc ries -Ġhome owners -23 7 -A BC -Ġst itch -Ġstat istic -Ġhead ers -ĠBi ology -Ġmot ors -ĠG EN -ĠL ip -Ġh ates -Ġhe el -S elf -i pl -ED IT -ort ing -Ġann ot -ĠSpe ech -old emort -ĠJ avascript -ĠLe Bron -Ġfoot print -Ġf n -Ġseiz ures -n as -h ide -Ġ19 54 -ĠBe e -ĠDecl aration -ĠKat ie -Ġreserv ations -N R -f emale -Ġsatur ated -Ġb iblical -Ġtroll s -Dev ice -ph otos -Ġdr ums -ãĥīãĥ© ãĤ´ãĥ³ -N ight -f ighter -ĠH ak -ri ber -Ġc ush -Ġdiscipl inary -ba um -ĠG H -ĠSch midt -ilib rium -Ġs ixty -ĠKush ner -ro ts -Ġp und -ĠR ac -Ġspr ings -Ġcon ve -Bus iness -F all -Ġqual ifications -Ġvers es -Ġnarc iss -ĠK oh -ĠW ow -ĠCharl ottesville -ed o -Ġinterrog ation -ĠW ool -36 5 -B rian -Ġâľ ĵ -Ġalleg es -ond s -id ation -ĠJack ie -y u -Ġl akes -Ġworth while -Ġcryst als -ĠJud a -Ġcomp rehend -Ġfl ush -Ġabsor ption -ĠO C -Ġfright ened -ĠCh ocolate -Mart in -Ġbu ys -Ġbu cks -Ġapp ell -ĠChampions hips -Ġlist ener -ĠDef ensive -Ġc z -ud s -ĠM ate -Ġre play -Ġdecor ated -Ġs unk -ĠV IP -ĠAn k -Ġ19 5 -aa aa -Nob ody -ĠMil k -ĠG ur -ĠM k -ĠS ara -Ġse ating -ĠW id -Tr ack -Ġemploy s -Ġgig antic -AP P -ãĤ § -in ventory -Ġtow el -at che -l asting -ĠT L -Ġlat ency -Ġkn e -B er -me aning -Ġup held -Ġplay ground -Ġm ant -S ide -Ġstere o -Ġnorth west -Ġexception ally -Ġr ays -Ġrec urring -D rive -Ġup right -Ġab duct -ĠMar athon -Ġgood bye -Ġal phabet -h p -Ġcourt room -ring ton -ot hing -T ag -Ġdiplom ats -Ġbar bar -ĠAqu a -18 3 -33 33 -Ġmat urity -Ġinst ability -ĠAp ache -Ġ= == -Ġfast ing -ĠGr id -Mod Loader -Ġ15 2 -A bs -ĠOper ating -ett i -Ġacqu aint -Don nell -ĠK em -ĠFor ge -Ġarm ored -M il -Ġphilos ophers -in vest -Pl ayers -â Ī -Ġmy riad -Ġcomr ades -R ot -Ġremember ing -Ġcorrespond s -Ġprogram mers -ĠLyn n -Ġo lig -Ġco herent -yn chron -ĠChem ical -Ġj ugg -p air -post s -E ye -ĠIn ner -Ġsem ester -ott est -ĠEmir ates -ric anes -or ously -m its -ĠW is -Ġd odge -l ocation -Ġf aded -Am azon -ĠPro ceed -ĠIN FO -j ournal -ĠTru ck -T en -Ġ2 17 -Ġstat utes -m obile -ĠT ypes -Rec omm -b uster -pe x -Ġleg ends -Ġhead ache -f aced -ĠWi Fi -if ty -ĠH ER -Ġcirc uits -ER ROR -22 6 -ol in -Ġcyl inder -osp ace -ik ers -P rem -Qu ant -Ġconflic ting -Ġslight est -Ġfor ged -ion age -Step hen -ĠK ub -ĠOpp ortun -ĠHe al -Ġbl o -Ġrul ers -Ġh uh -Ġsubmar ine -f y -ass er -Ġallow ance -ĠKas ich -ĠT as -ĠAustral ians -Forge ModLoader -ĠâĨ ij -ĠMat rix -am ins -Ġ12 00 -ĠAc qu -23 6 -D ocument -ĠBre aking -19 3 -ĠSub st -ĠRoll er -ĠPro perties -ĠN I -t ier -Ġcr ushing -Ġadvoc ating -Further more -keep ers -Ġsex ism -x d -Ġcall er -ĠS ense -chie ve -ĠT F -Ġfuel ed -Ġreminis cent -Ġobs ess -ur st -Ġup hold -ĠF ans -het ics -Ġâ Ĺ -ĠB ath -Ġbe verage -Ġo scill -25 4 -Ġpol es -Ġgrad ual -Ġex ting -ĠS uff -ĠS uddenly -Ġlik ing -Ġ19 49 -un ciation -am ination -ĠO mar -ĠL V -ĠCon sequently -Ġsynt hes -ĠG IF -Ġp ains -Ġinteract ing -u ously -inc re -Ġrum or -ĠScient ology -19 7 -ĠZ ig -Ġspe lling -ĠA SS -Ġexting u -ms on -Ġg h -Ġremark ed -ĠStrateg ic -ĠM ON -å ¥ -g ae -ĠWH AT -E ric -ĠCamp us -Ġmeth ane -Ġimag in -J UST -ĠAl m -X T -i q -ĠR SS -Ġwrong doing -att a -Ġbig ot -Ġdemonstr ators -ĠCal vin -ĠV illa -Ġmembr ane -ĠAw esome -Ġbenef ic -26 8 -Ġmagn ificent -ĠL ots -G reg -ĠBor is -Ġdetain ees -ĠH erman -Ġwhis pered -Ġa we -Prof essor -fund ing -Ġphys iological -ĠDest ruction -Ġlim b -Ġmanip ulated -Ġbub bles -Ġpse ud -Ġhyd ra -ĠBrist ol -Ġst ellar -ĠExp ansion -ĠK ell -ĠInterest ingly -Ġm ans -Ġdrag ging -Ġec ological -ĠF it -Ġg ent -Ġbenef ited -ĠHait i -Ġpoly g -ãĥ İ -Ġ20 30 -Ġpro w -Ġrecon struction -Ġwas t -Ġpsych ic -ĠGree ks -Hand ler -16 2 -ĠP ulse -Ġsol icit -Ġsy s -Ġinflu x -ĠG entle -per cent -Ġprolifer ation -Ġtax able -Ġdisreg ard -Ġesc aping -Ġg inger -Ġwith stand -Ġdevast ated -ĠD ew -ser ies -Ġinject ed -ela ide -Ġturn over -he at -Ļ Ĥ -H appy -ĠSil ent -ãĤ Ń -iv ism -Ġir rational -AM A -Ġre ef -r ub -Ġ16 2 -Ġbank ers -ĠEth ics -v v -Ġcritic isms -K n -18 6 -M ovie -ĠT ories -Ġno od -Ġdist ortion -F alse -od ore -Ġt asty -Res earch -ĠU ID -- ) -Ġdivor ced -ĠM U -ĠHay es -ĠIs n -ian i -ĠH Q -Ġ" # -ign ant -Ġtra umatic -ĠL ing -H un -Ġsab ot -on line -r andom -Ġren amed -ra red -K A -d ead -é t -ĠAss istance -Ġse af -++++ ++++ -Ġse ldom -ĠWeb b -Ġbo olean -u let -Ġref rain -ĠDI Y -ru le -Ġshut ting -Ġutil izing -load ing -ĠPar am -co al -oot er -Ġattract ing -ĠD ol -Ġher s -ag netic -ĠRe ach -im o -Ġdisc arded -ĠP ip -01 5 -ü r -Ġm ug -Im agine -C OL -Ġcurs ed -ĠSh ows -ĠCurt is -ĠSach s -spe aking -ĠV ista -ĠFram ework -ong o -Ġsub reddit -Ġcr us -ĠO val -R ow -g rowing -Ġinstall ment -Ġgl ac -ĠAdv ance -EC K -ĠLGBT Q -LE Y -Ġac et -Ġsuccess ive -ĠNic ole -Ġ19 57 -Qu ote -Ġcircumst ance -ack ets -Ġ14 2 -ort ium -Ġguess ed -ĠFr ame -Ġperpet rators -ĠAv iation -ĠBen ch -Ġhand c -A p -Ġ19 56 -25 9 -r and -Net Message -d in -urt les -h ig -ĠV III -ff iti -ĠSw ords -b ial -Ġkidn apping -dev ice -Ġb arn -ĠEl i -auc as -S end -Con structed -Ġ ½ -Ġneed les -Ġad vertisements -Ġv ou -Ġexhib ited -ĠFort ress -As k -B erry -TY PE -Ġcan cers -ump ing -ĠTerrit ory -Ġpr ud -Ġn as -Ġathe ist -Ġbal ances -ãģ Ł -ĠSh awn -& & -Ġland sc -ĠR GB -Ġpet ty -Ġex cellence -Ġtransl ations -Ġpar cel -ĠChe v -E ast -ĠOut put -im i -Ġamb ient -ĠTh reat -Ġvill ains -Ġ5 50 -IC A -Ġtall er -Ġle aking -c up -Ġpol ish -Ġinfect ious -ĠK C -Ġ@ @ -back ground -Ġbureaucr acy -ĠS ai -un less -it ious -ĠSky pe -At l -ID ENT -00 8 -Ġhyp ocr -Ġpit chers -Ġguess ing -ĠF INAL -Bet ween -Ġvill agers -Ġ25 2 -f ashion -ĠTun is -Be h -ĠEx c -ĠM ID -28 8 -ĠHas kell -19 6 -ĠN OR -Ġspec s -Ġinv ari -Ġgl ut -ĠC ars -Ġimp ulse -Ġhon ors -g el -Ġjurisd ictions -ĠBund le -ul as -Calif ornia -ĠIncre ase -Ġp ear -Ġsing les -Ġc ues -Ġunder went -ĠW S -Ġexagger ated -Ġdub ious -Ġfl ashing -L OG -) ]. -J ournal -t g -V an -ĠI stanbul -ĠIn sp -ĠFrank en -D raw -Ġsad ness -Ġiron ic -ĠF ry -x c -Ġ16 4 -is ch -W ay -ĠProtest ant -h orn -Ġun aff -ĠV iv -ill as -ĠProduct ions -ĠH ogan -Ġper imeter -ĠS isters -Ġspont aneous -Ġdown side -Ġdescend ants -Ġor n -w orm -Japan ese -Ġ19 55 -Ġ15 1 -ĠDo ing -els en -umb les -Ġrad ically -ĠDr um -ĠB ach -Ġli abilities -ĠO B -ĠElement ary -Ġmem e -yn es -Ġfinger print -ĠGr ab -Ġundert ake -Mem bers -ĠRead er -ĠSim s -g od -Ġhypot hetical -s cient -ĠA J -Ġchar ism -Ġad missions -ĠMiss ile -tr ade -Ġexerc ising -ĠBack ground -W ritten -Ġvoc als -whe ther -Ġv i -ĠW inner -Ġl itter -ĠSh ooting -ST EM -ãĤ ¡ -ĠA FL -Ġvari ability -Ġe ats -ĠD PS -b row -Ġeleph ants -Ġstr at -Ġ Å -Ġsett lers -Matt hew -Ġin advert -H I -ĠIM F -ĠGo al -Ġnerv es -John son -ey e -ablish ment -Th ursday -BIL ITY -H ad -am oto -het amine -ep s -Ġmit ochond -Ġcomp ressed -ĠTre vor -ĠAnim als -T ool -L ock -Ġtwe ak -Ġpin ch -Ġcancell ation -P ot -Ġfoc al -ĠAst ron -17 3 -ĠA SC -ĠO THER -umn i -Ġdem ise -d l -Ù ħ -Sem itism -Ġcr acking -Ġcollabor ative -Ġexpl ores -s ql -Ġher bs -Ġconfig urations -m is -ĠRes ult -ace y -ĠSm oke -Ġsan ct -el ia -Ġdeg ener -Ġdeep est -Ġscream ed -Ġn ap -Soft ware -ĠST AR -E F -ĠX in -spons ored -mans hip -23 3 -Ġprim aries -Ġfilter ing -Ġas semble -m il -ĠMy ers -b ows -Ġpun ched -M ic -Ġinnov ations -Ġfun c -and o -Ġfr acking -ĠV ul -о Ð -osh op -ĠIm mun -Ġsett ling -Ġadolesc ents -Ġreb uilding -Ġtransform ing -Ġpar ole -Ġhar bor -Ġbook ing -ot ional -onge vity -ĠY o -b ug -Ġemer ges -ĠMethod s -ĠCh u -P res -ĠDun geons -Ġtra iling -ĠR um -ĠH ugh -å¤ © -ĠE ra -ĠBatt les -Res ults -ĠTr ading -Ġvers a -c ss -ax ies -he et -Ġgre ed -19 89 -Ġgard ens -Ġconting ent -P ark -ĠLeaf s -h ook -ro be -Ġdiplom acy -ĠF uel -ĠInv asion -Ġupgr ading -M ale -Ġe lic -Ġrelent less -ĠCo venant -ap esh -ĠT rop -T y -pro duction -art y -Ġpun ches -ak o -cyclop edia -ĠR abbit -ĠHD MI -Ġ14 1 -Ġf oil -Item Image -ĠF G -Ġimplement ations -ĠP om -ixt ures -Ġaw ait -Ġ3 30 -am us -Ġumb rella -Ġfore see -se par -Ġcircum cision -Ġperipher al -S ay -ĠExper t -In c -Ġwithd rew -ĠAnd ers -f ried -Ġradio active -ĠOp ening -Ġboard ing -ĠN D -Ġover throw -Act iv -W P -ĠAct s -× Ļ -Ġmot ions -v ic -ĠM ighty -ĠDef ender -a er -Ġthank ful -ĠK illing -ĠBr is -mo il -Ġpredict ing -26 6 -ch oice -Ġkill ers -Ġinc ub -ĠChe st -ather ing -Ġpro claimed -fl ower -oss om -umbled ore -ĠCy cling -ĠOccup y -AG ES -P en -ĠY ug -Ġpack aged -Ġheight ened -c ot -st ack -C ond -Ġst amps -m age -Ġpersu aded -Ġens l -ĠCard inal -Ġsol itary -Ġpossess ing -ĠC ork -Ġev id -ĠT ay -Ġbl ues -Ġextrem ism -Ġlun ar -Ġcl own -Te chn -Ġfest ivals -ĠPv P -ĠL ar -Ġconsequ ently -p resent -Ġsom eday -ç İĭ -ĠMet eor -Ġtour ing -c ulture -Ġbe aches -S hip -c ause -ĠFl ood -ãĥ ¯ -Ġpur ity -th ose -Ġem ission -b olt -Ġch ord -ĠScript ure -L u -Ġ$ { -cre ated -Other s -25 8 -Ġelement al -Ġannoy ed -ĠA E -d an -ĠS ag -Res earchers -Ġfair y -âĢĵ âĢĵ -======== ==== -Sm art -GG GG -Ġskelet ons -Ġpup ils -link ed -Ġur gency -en abled -ĠF uck -Ġcoun cill -r ab -U AL -T I -Ġlif es -Ġconf essed -B ug -Ġharm on -ĠCON FIG -ĠNe utral -D ouble -Ġst aple -ĠSH A -Brit ish -ĠSN P -AT OR -oc o -Ġswing ing -ge x -ole on -pl ain -ĠMiss ing -ĠTro phy -v ari -ran ch -Ġ3 01 -4 40 -00000000 00000000 -Ġrest oring -Ġha ul -uc ing -ner g -Ġfut ures -Ġstrateg ist -quest ion -Ġlater al -ĠB ard -Ġs or -ĠRhod es -ĠD owntown -????? - -ĠL it -ĠB ened -Ġco il -st reet -ĠPort al -FI LE -ĠG ru -* , -23 1 -ne um -Ġsuck ed -Ġr apper -Ġtend encies -ĠLaure n -cell aneous -26 7 -Ġbrow se -Ġover c -head er -o ise -Ġbe et -ĠG le -St ay -Ġm um -Ġtyp ed -Ġdiscount s -T alk -ĠO g -ex isting -ĠS ell -u ph -C I -ĠAust rian -ĠW arm -Ġdismiss al -Ġaver ages -c amera -Ġalleg iance -L AN -=" # -Ġcomment ators -ĠSet ting -ĠMid west -Ġpharm ac -ĠEX P -Ġstain less -Ch icago -Ġt an -24 4 -Ġcountry side -ĠV ac -29 5 -Ġpin ned -Ġcr ises -Ġstandard ized -T ask -ĠJ ail -ĠD ocker -col ored -f orth -" }, -Ġpat rons -Ġsp ice -Ġm ourn -ĠM ood -Ġlaund ry -Ġequ ip -ĠM ole -y ll -ĠTH C -n ation -ĠSher lock -Ġiss u -ĠK re -ĠAmeric as -ĠA AA -Ġsystem atically -Ġcont ra -ĠS ally -Ġrational e -Ġcar riage -Ġpe aks -Ġcontrad iction -ens ation -ĠFail ure -Ġpro ps -Ġnames pace -Ġc ove -field s -ãĤ ĭ -Ġw ool -ĠC atch -Ġpresum ed -ĠD iana -r agon -ig i -Ġh amm -Ġst unt -ĠG UI -ĠObserv atory -ĠSh ore -Ġsmell s -ann ah -Ġcock pit -ĠD uterte -8 50 -Ġopp ressed -bre aker -ĠCont ribut -ĠPer u -ĠMons anto -ĠAtt empt -Ġcommand ing -Ġfr idge -ĠR in -ĠChe ss -ual ity -Ġo l -Republic an -ĠGl ory -ĠW IN -.... ... -ag ent -read ing -Ġin h -J ones -Ġcl icks -al an -Ġ[ ]; -ĠMaj esty -ĠC ed -op us -ate l -à ª -AR C -ĠEc uador -ãĥ ł -ĠK uro -Ġritual s -Ġcapt ive -Ġoun ce -Ġdisag reement -Ġsl og -f uel -P et -M ail -Ġexerc ised -Ġsol ic -Ġrain fall -Ġdev otion -ĠAss essment -Ġrob otic -opt ions -ĠR P -ĠFam ilies -ĠFl ames -Ġassign ments -00 7 -aked own -Ġvoc abulary -Re illy -Ġc aval -g ars -Ġsupp ressed -ĠS ET -ĠJohn s -Ġwar p -bro ken -Ġstat ues -Ġadvoc ated -Ġ2 75 -Ġper il -om orph -ĠF emin -per fect -Ġh atch -L ib -5 12 -Ġlif elong -3 13 -Ġche eks -Ġnum bered -ĠM ug -B ody -ra vel -We ight -ĠJ ak -ĠHe ath -Ġkiss ing -ĠJ UST -Ġw aving -u pload -Ġins ider -ĠPro gressive -ĠFil ter -tt a -ĠBe am -Ġviol ently -ip ation -Ġskept icism -Ġ19 18 -ĠAnn ie -ĠS I -Ġgen etics -Ġon board -at l -ĠFried man -ĠB ri -cept ive -Ġpir ate -ĠRep orter -27 8 -Ġmyth ology -Ġe clipse -Ġsk ins -Ġgly ph -ing ham -F iles -C our -w omen -Ġreg imes -Ġphotograp hed -K at -ĠMA X -Offic ials -Ġunexpected ly -Ġimpress ions -F ront -;;;; ;;;; -Ġsuprem acy -Ġs ang -Ġaggrav ated -Ġabrupt ly -ĠS ector -Ġexc uses -Ġcost ing -ide press -St ack -ĠR NA -ob il -Ġghost s -ld on -at ibility -Top ics -Ġreim burse -ĠH M -ĠDe g -Ġth ief -y et -ogen esis -le aning -ĠK ol -ĠB asketball -Ġf i -ĠSee ing -Ġrecy cling -Ġ[ - -Cong ress -Ġlect ures -P sy -Ġne p -Ġm aid -Ġori ented -A X -Ġrespect ful -re ne -fl ush -ĠUn loaded -re quest -gr id -ĠAltern atively -ĠHug o -Ġdec ree -ĠBuddh ism -and um -And roid -ĠCong o -ĠJoy ce -Ġacknowled ging -hes ive -ĠTom orrow -ĠH iro -th ren -ĠM aced -Ġho ax -ĠIncre ased -ĠPr adesh -W ild -____ __ -16 1 -Ġa unt -Ġdistribut ing -ĠT ucker -ĠSS L -ĠW olves -B uilding -ou lt -ĠLu o -ĠY as -ĠSp ir -ĠSh ape -ĠCamb od -ĠIP v -Ġm l -Ġext rad -39 0 -ĠPenn y -d ream -Ġstation ed -opt ional -ew orthy -. -ĠWorks hop -ĠRet ail -ĠAv atar -6 25 -N a -ĠV C -ĠSec ure -M Y -19 88 -oss ip -Ġpro state -Ġund en -Ġg amer -ĠCont ents -ĠWar hammer -ĠSent inel -3 10 -Ġse gregation -ĠF lex -ĠM AY -Ġdr ills -ĠDrug s -Islam ic -Ġsp ur -Ġca fe -Ġimag inary -Ġgu iding -Ġsw ings -ĠThe me -ob y -Ġn ud -Ġbe gging -Ġstr ongh -Ġreject ing -Ġpedest rians -ĠPro spect -R are -s le -Ġconcess ions -ĠConst itutional -Ġbe ams -Ġfib ers -p oon -Ġinstinct s -pro perty -ĠB IG -Sand ers -im ates -Ġco ating -Ġcorps es -ĠTR UE -check ed -Ġ16 6 -A sh -ĠJ S -ĠF iction -Ġcommun al -Ġener getic -oooo oooo -Ġnow adays -IL D -ib o -ĠSU V -R en -Ġdwell ing -Sil ver -Ġt ally -ĠM oving -Ġcow ard -Ġgener als -Ġhorn s -Ġcirc ulated -Ġrob bed -ĠUn limited -Ġharass ed -Ġinhib it -Ġcomp oser -ĠSpot ify -Ġspread s -3 64 -Ġsu icidal -Ġno ises -ĠSt ur -Ġs aga -ĠK ag -is o -Ġtheoret ically -M oney -Ġsimilar ity -Ġslic ed -ut ils -ing es -" - -Ġan th -Ġimp ed -Mod ule -Through out -Ġmen us -comm ittee -and i -ob j -in av -f ired -ĠAb dullah -Ġund ead -Ġfont s -H old -EN G -Ġsustain ability -Ġfl ick -Ġr azor -ĠF est -ĠChar acters -Ġword ing -Ġpopul ist -Ġcritic izing -Ġm use -v ine -Ġcard board -Ġkind ly -Ġfr inge -ĠThe ft -icult ural -Ġgovern ors -Ġ ���� -Ġ16 3 -Ġtime out -ĠA uth -Child ren -A U -Ġred emption -ĠAl ger -Ġ19 14 -Ġw aved -Ġastron auts -og rams -Ġsw amp -ĠFinn ish -Ġcand le -Ġton nes -ut m -Ġr ay -Ġsp un -Ġfear ful -art icles -Ġca us -or ically -ĠRequ ires -ĠG ol -Ġpop e -Ġinaug ural -Ġg le -AD A -ĠIS IL -ĠOff ensive -Ġwatch dog -Ġbal con -ent ity -ĠH oo -Ġgall on -AC C -Ġdoub ling -Ġimpl ication -ĠS ight -Ġdoct r ----- --- -Ġ\ \ -Ġm alt -R oll -Ġâī ¥ -Ġrec ap -add ing -u ces -ĠB end -fig ure -Ġtur key -Ġsoc ietal -ĠT ickets -Ġcommer cially -Ġsp icy -Ġ2 16 -ĠR amp -Ġsuperior ity -à ¯ -ĠTr acker -C arl -ĠC oy -ĠPatri ot -Ġconsult ed -Ġlist ings -Ġsle w -reens hot -ĠG one -Ġ[ ...] -30 9 -Ġh ottest -Ø ± -Ġrock y -ĠD iaz -Ġmass age -Ġpar aly -Ġp ony -A z -Ġcart ridge -ĠN Z -Ġsn ack -ĠLam ar -ple ment -ĠLes lie -Ġm ater -Ġsn ipp -24 6 -Ġjoint ly -ĠBris bane -ĠiP od -Ġpump ing -Ġgo at -ĠSh aron -eal ing -Ġcor on -Ġan omal -rah im -ĠConnect ion -Ġsculpt ure -Ġsched uling -ĠD addy -at hing -Ġeyeb rows -Ġcur ved -Ġsent iments -Ġdraft ing -D rop -( [ -Ġnom inal -ĠLeaders hip -ĠG row -Ġ17 6 -Ġconstruct ive -iv ation -Ġcorrupt ed -ger ald -ĠC ros -ĠChe ster -ĠL ap -ãģ ª -OT H -D ATA -Ġal mond -pro bably -I mp -Ġfe ast -ĠWar craft -F lor -Ġcheck point -Ġtrans cription -Ġ20 4 -Ġtwe aks -Ġrel ieve -S cience -Ġperform er -Z one -Ġtur moil -ig ated -hib it -ĠC afe -the med -Ġflu or -ben ch -Ġde com -ĠU nt -ĠBar rett -ĠF acts -Ġt asting -ĠPTS D -ĠSe al -ĠJuda ism -ĠDynam ic -ĠC ors -V e -ĠM ing -ĠTrans form -v on -ĠDef enders -ĠTact ical -ĠV on -ĠUn ivers -Ġdist orted -ĠB reath -?' " -Ġag on -ĠDead ly -Ġl an -ĠCy cle -orn ed -Ġrel iably -Ġgl or -ĠMon key -ãĥ ¡ -Ġad ren -Ġmicrow ave -ĠAl ban -irc raft -dig it -sm art -ĠD read -¯¯¯¯¯¯¯¯ ¯¯¯¯¯¯¯¯ -{ { -ĠRoc hester -Ġsimpl ified -Ġinf licted -Ġtake over -Ġyour selves -ad itional -Ġmus cular -K S -Ġing en -T ax -ĠFe ature -27 7 -Ġcru c -Ġcr ate -Ġun identified -Ġacclaim ed -ĠM anga -ĠFr ances -ĠNep al -ĠG erald -ĠKu wait -Ġsl ain -ĠHe b -ĠG oku -ãģ® æ -28 6 -M rs -ĠC ody -ĠSan ctuary -01 6 -Ġdism ant -Ġdatas et -ĠH ond -b uck -ĠPat terson -Ġpal ette -ĠG D -ic ol -ĠL odge -Ġplanet ary -ak in -ĠRegist ered -ab we -ĠPeters burg -Ġha iled -ĠP iece -S che -ĠDO J -Ġen umer -18 1 -ĠObs erver -ĠB old -f ounded -com merce -Ġexplo its -ĠF inding -UR N -ĠS ne -ĠAc id -ay ette -ĠVal ues -Ġdr astic -Ġarchitect ural -Ġ" . -× ķ -ump ed -Ġwra pping -Ġwid ow -ĠSl ayer -l ace -on ce -German y -av oid -Ġtem ples -P AR -à ´ -ĠLuc ifer -ĠFl ickr -l ov -for ces -Ġsc outing -Ġlou der -tes y -Ġbefore hand -Ä ĵ -ĠNe on -ĠW ol -ĠTyp ically -ĠPolit ico --+ -+ -Ġbuild er -Ġder ive -K ill -Ġp oker -Ġambig uous -Ġlif ts -Ġcy t -Ġrib s -ood le -ĠS ounds -h air -ĠSynd rome -t f -Ġproport ional -u id -Ġper taining -ĠKind le -ĠNeg ro -Ġreiter ated -ĠTon ight -oth s -ĠCorn ell -Ġo wing -Ġ20 8 -elf are -oc ating -ĠB irds -Sub scribe -Ġess ays -Ġburd ens -Ġillust rations -ar ious -ER AL -ĠCal cul -Ġx en -ĠLink edIn -ĠJ ung -Ġredes ign -Con nor -29 6 -Ġrevers al -ĠAd elaide -ĠL L -Ġs inking -Ġg um -US H -c apt -ĠGr imm -Ġfoot steps -ĠCB D -isp ers -Ġpro se -Wed nesday -ĠM ovies -ed in -Ġoverturn ed -Ġcontent ious -US B -~~~~~~~~ ~~~~~~~~ -ĠCo pper -Ġpoint less -N V -val ues -olph in -d ain -Ġdepos ited -ĠG W -Ġpreced ed -ĠCl a -ĠGo lem -ĠN im -ĠÎ ² -ĠEngine ers -m iddle -Ġfl att -oper ative -Ġcouncil s -imb abwe -el in -Ġstress ful -ĠL D -Ġres h -l ake -Ġwheel chair -ĠAltern ative -Ġoptim ize -oper ation -Ġpe ek -Ġones elf -ig il -Ġtrans itions -op athy -bl ank -Ġ16 9 -17 1 -________________________________ ________________________________ -Ġl aundering -En c -ĠD EC -Ġwork outs -Ġsp ikes -Ġdin osaurs -Ġdiscrim inatory -P ool -R ather -38 5 -R NA -tes ters -et o -ĠIdent ity -Ġve in -ĠBur ton -Ġarc ade -4 20 -Ult imately -ĠSad ly -à ° -p ill -Ġcub ic -ĠSpect rum -the se -st ates -Ġun official -h awks -ĠEVER Y -Ġrain bow -Ġincarcer ation -and ing -Ġsy ll -ĠEver ton -Ġ17 9 -ĠSer bia -Ġ18 9 -m eter -ĠMic key -Ġant iqu -Ġfact ual -ne ck -ĠN are -n orm -m ust -Ġhigh ways -Ġgl am -Ġdivid ing -ĠSquad ron -ĠMar tha -Ġbirth s -C over -//////// //////// -ĠW ong -Ph ot -ĠA LS -ri o -ĠNon etheless -ĠL emon -Ġ20 6 -ĠE E -Ġderiv ative -ĠWW II -v ote -Ġthere in -Ġsepar ating -44 6 -sy nc -ĠStre ets -Ġr att -Ġmunicip ality -ĠShort ly -Ġmon k -) ," -Ġscr ub -Ġoper atives -Ne ither -Pl ace -ĠLim it -F emale -ĠAct or -Char acter -Ġconstit uted -35 7 -Ġprotest ed -ĠSt raw -ĠHe ight -ild a -ĠTy ph -Ġflood s -Ġcos metic -W AY -pert ure -up on -t ons -ess ing -ĠP ocket -Ġro oft -ĠC aucas -Ġant idepress -Ġincomp atible -EC D -Ġoper a -ĠCont est -Ġgener ators -l ime -Def ense -19 87 -for um -Ġsav age -ĠHung arian -n z -Ġmet allic -Ġex pelled -Ġres idency -Ġdress es -66 6 -ĠC lement -f ires -C ategory -Ġge ek -al is -Ġc emetery -educ ated -Ġc rawl -ĠUn able -ĠT yson -ak is -Ġp ardon -ĠW ra -Ġstrengthen ed -ĠF ors -33 5 -ĠH C -ĠM ond -Ġvisual s -ĠBeat les -ett lement -Ġ ï -g ro -Ġb ash -Ġpo orest -Ġex cel -Ġaspir ations -ĠM unicip -ens ible -Ġceremon ies -Ġintimid ation -ĠCON TR -be ck -ĠK ap -as u -Ġtradem arks -ĠS ew -ĠComp etition -net work -ĠAr ri -ĠT et -Ro aming -W C -D at -Ġso b -Ġpair ing -Ġoverd ose -SA Y -ab er -Ġrev olt -ĠF ah -act ing -e q -est ation -F ight -ĠMar ks -27 3 -Ġ17 8 -R aw -ãģ ĭ -34 9 -bl ocks -Ġver ge -est ine -ĠPod esta -Ġinv asive -Ġprofound ly -ĠA o -e ach -Ġl est -inter pret -Ġshr inking -Ġerr one -Ġche es -ly s -ĠI vy -ĠDirect ory -Ġhint ed -V ICE -Ġcontact ing -ĠG ent -he i -Ġlabel ing -Ġmerc ury -ĠL ite -Ġexp ires -Ġdest abil -rit is -c u -Ġfeather s -Ġste er -Ġprogram med -ĠV ader -Go ing -ĠE lim -Ġy o -ĠMic he -Ġ20 3 -Ġslee ves -Ġb ully -ĠHum ans -36 8 -Ġcomp ress -ĠBan ner -AR S -Ġa while -Ġcal ib -Ġspons orship -ĠDiff iculty -ĠP apers -Ġident ifier -} . -Ġy og -ĠSh ia -Ġclean up -Ġvib e -int rodu -im ming -Austral ia -Ġout lines -ĠY outube -tr ain -ĠM akes -Ġde ported -Ġcent r -ĠD ug -ĠB oulder -ĠBuff y -Ġinj unction -ĠHar ley -ĠG roups -ĠD umbledore -ĠCl ara -Ġ" - -Ġsacrific ed -ep h -Sh adow -ib ling -Ġfreel ance -Ġevident ly -ph al -Ġret ains -M ir -Ġfin ite -d ar -ĠC ous -Ġrep aired -Ġperiod ic -Ġchampions hips -Ġaster oid -bl ind -Ġexpress ly -ĠAst ros -Ġsc aled -Ġge ographical -ĠRap ids -En joy -Ġel astic -ĠMoh amed -Mark et -be gin -Ġdisco vers -Ġtele communications -Ġscan ner -Ġen large -Ġsh arks -Ġpsy chedel -ĠRou ge -Ġsnap shot -is ine -X P -Ġpestic ides -ĠL SD -ĠDist ribution -re ally -Ġde gradation -Ġdisgu ise -Ġbi om -ĠEX T -Ġequ ations -Ġhaz ards -ĠComp ared -) * -Ġvirt ues -Ġeld ers -Ġenh ancing -ĠAc ross -er os -ang ling -Ġcomb ust -ucc i -Ġconc ussion -Ġcontrace ption -ĠK ang -Ġexpress es -Ġa ux -ĠP ione -Ġexhib its -Deb ug -OT AL -ĠAl ready -ĠWheel er -Ġexp ands -? : -Ġreconc iliation -Ġpir ates -Ġpur se -Ġdiscour age -Ġspect acle -R ank -Ġwra ps -ĠTh ought -Ġimp ending -O pp -ĠAng lo -ĠE UR -Ġscrew ed -ret ched -Ġencour agement -mod els -Ġconf use -mm m -ĠVit amin -âĸij âĸij -C ru -Ġkn ights -Ġdisc ard -Ġb ishops -ĠW ear -ĠGar rett -k an -ãĥ Ł -Ġmascul ine -cap ital -ĠA us -Ġfat ally -th anks -ĠA U -ĠG ut -12 00 -Ġ 00000000 -Ġsur rog -ĠBI OS -ra its -ĠWat ts -Ġresur rection -ĠElect oral -ĠT ips -4 000 -Ġnut rient -Ġdepict ing -Ġspr ink -Ġm uff -ĠL IM -ĠS ample -ps c -ib i -gener ated -Ġspec imens -Ġdiss atisf -Ġtail ored -Ġhold ings -ĠMonth ly -ĠE at -po ons -Ġne c -ĠC age -ĠLot us -ĠLan tern -Ġfront ier -Ġp ensions -Ġj oked -ĠHard y -=-=- =-=- -r ade -U ID -Ġr ails -Ġem it -Ġsl ate -Ġsm ug -Ġsp it -ĠCall s -ĠJac obs -f eat -ĠU E -Ġrest ruct -Ġregener ation -Ġenerg ies -ĠCon nor -OH N -ĠChe ese -Ġg er -Ġresur rect -man agement -N W -Ġpres ently -ĠBru ins -M ember -ĠM ang -id an -Ġboost ing -w yn -+ . -requ isite -ĠNY PD -ĠMe gan -ĠCond itions -Ġp ics -nes ium -ĠR ash -Ġ17 4 -ĠD ucks -Ġemb ro -z u -on ian -rel igious -Ġc raz -ĠAC A -ĠZ ucker -EM A -ĠPro s -We apon -ĠKn ox -ĠAr duino -Ġst ove -Ġheaven s -ĠP urchase -Ġher d -Ġfundra iser -Dig ital -5 000 -Ġprop onents -/ âĢĭ -Ġj elly -ĠVis a -Ġmon ks -Ġadvance ment -ĠW er -Ġ18 7 -e us -ert ility -Ġfet al -Ġ19 36 -L o -Ġout fits -Ġstair case -b omb -Ġcustom ized -cl air -T ree -Ġm apped -ĠConsider ing -ĠTor res -Ġmeth yl -Ġapprox imate -Ġdo om -ĠHans en -Ġc rossover -Ġstand alone -ä ¼ -Ġinv ites -Ġgra veyard -Ġh p -Donald Trump -Ġesc ort -G ar -Ġpredec essors -Ġh ay -Ġen zyme -ĠStra ight -vis ors -I ng -ane ously -ĠApp lied -Ġf ec -ĠDur ant -Ġout spoken -or b -Ġz eal -Ġdisgr ace -' ). -ĠChe ng -28 9 -ĠRen a -ĠSu icide -29 4 -Ġout raged -ĠNew man -ĠN vidia -ĠA ber -ĠB ers -Ġrecre ation -Wind ow -ĠD P -x e -Ġped oph -Ġfall out -ambo o -Ġpresent ations -ĠApp s -Ġh tml -3 45 -ĠX XX -Ġrub bing -ĠLe ather -Ġhum idity -se ys -est ablished -ĠUn its -64 6 -Ġrespect able -A uto -Ġthri ving -ĠInn ovation -ang s -Ext ra -reg ulation -29 8 -p ick -Ex amples -ĠC J -Att ack -Ġdr acon -L T -Ġstick er -re rs -Ġsun ny -I ss -reg ulated -d im -ĠAb stract -Ġhus bands -Off ice -om ination -it ars -AN GE -asc al -ĠK ris -ĠInf antry -Ġm alf -ĠA the -ĠR ally -bal anced -................ ........ -OU P -Ġmole cule -met ics -ĠSpl it -ĠInstruct ions -ĠN ights -c ards -Ġt ug -Ġcon e -å Ń -Ġt x -ĠDisc ussion -Ġcatast rophe -pp e -g io -Ġcommun ism -Ġhal ted -ĠGu ant -cle an -ĠSc hed -ĠK anye -Ġw ander -ĠSer iously -Ġ18 8 -enn ial -f ollow -product ive -ĠFl ow -ĠS ail -Ġc raw -Ġsim ulations -or u -ang les -ĠN olan -Ġmen stru -4 70 -Ġ20 7 -aj a -Ġcas ually -board ing -Ġ2 22 -ov y -ĠN umbers -um at -O E -28 7 -ĠCle mson -Ġcert s -Ġsl id -ĠT ribe -Ġto ast -Ġfort unes -Ġf als -ĠComm ittees -Ġg p -Ġf iery -ĠN ets -ĠAn ime -Pack age -ĠComp are -l aughter -in fect -Ġatroc ities -Ġjust ices -Ġins ults -ĠVern on -Ġsh aken -Ġperson a -est amp -36 7 -br ain -Ġexperiment ing -K en -ĠElect ronics -Ġ16 1 -dom ain -Ġgraph ical -b ishop -Ġwho pping -ĠEv angel -Ġadvertis ers -ĠSpe ar -Ġb ids -Ġdestro ys -ut z -Ġunders c -ĠAD D -Ġan ts -ĠC um -ipp les -ĠF ill -Ġgl anced -Ġind icted -ĠE ff -Ġmis con -ĠDes ktop -Ġab ide -ãĥ Ģ -ĠI o -ĠC oul -Ġcaps ule -ĠCh rys -M ON -Ġund es -ĠI RA -Ġc itation -Ġdict ate -ĠNet works -ĠConf lict -ĠSt uff -x a -is ec -ĠChem istry -Ġquarter ly -William s -an an -O pt -ĠAlexand ria -out heastern -ĠSpring field -ĠBlack s -Ġge ography -24 2 -Ġut most -ĠEx xon -ab outs -E VA -ĠEn able -ĠBar r -Ġdisag reed -ĠCy prus -Ġdement ia -Ġlab s -Ġubiqu itous -ĠLO VE -Ġconsolid ated -s r -Ġcream y -ĠTim ber -Reg ardless -ĠCert ificate -Ġ" ... -ogen ous -Capt ain -Ġinsult ing -ĠSor os -ĠInst r -ĠBulgar ia -bet ter -Ġsuck ing -ĠDavid son -at z -Ġcoll ateral -g if -Ġplag ued -ĠC ancel -ĠGard ner -R B -Ġsix teen -Rem ove -ur istic -c ook -R od -Ġcompr ising -f le -) âĢĶ -ĠVik ing -g rowth -agon al -Ġsr f -af ety -m ot -N early -st own -ĠF actor -Ġautom obile -Ġproced ural -m ask -amp ires -Ġdisapp ears -j ab -3 15 -Ġ19 51 -ne eded -Ġd aring -le ader -Ġp odium -Ġun healthy -Ġm und -Ġpy ramid -oc re -Ġkiss ed -Ġdream ed -ĠFant astic -ĠG ly -å Ĭ -Ġgreat ness -Ġsp ices -Ġmet ropolitan -Ġcomp uls -i ets -101 6 -ĠSh am -ĠP yr -fl ies -ĠMid night -Ġswall owed -Ġgen res -ĠL ucky -ĠRew ards -Ġdisp atch -ĠI PA -ĠApp ly -Ġa ven -al ities -3 12 -th ings -Ġ( ). -Ġm ates -ĠS z -ĠC OP -ol ate -O FF -Ġre charge -c aps -ĠYork er -ic one -Ġgal axies -ile aks -D ave -ĠP uzz -ĠCelt ic -ĠA FC -27 6 -ĠS ons -Ġaffirm ative -H or -Ġtutorial s -ĠC ITY -ĠR osa -ĠExt ension -Ser ies -Ġf ats -Ġr ab -l is -Ġun ic -Ġe ve -ĠSp in -Ġadul thood -ty p -Ġsect arian -Ġcheck out -ĠCy cl -S ingle -Ġmart yr -Ġch illing -88 8 -ou fl -Ġ] ; -Ġcongest ion -m k -ĠWhere as -Ġ19 38 -ur rencies -er ion -Ġbo ast -ĠPat ients -Ġch ap -ĠB D -real DonaldTrump -Ġexam ines -h ov -Ġstart ling -ĠBab ylon -w id -om ew -br ance -ĠOd yssey -w ig -Ġtor ch -ĠV ox -ĠMo z -ĠT roll -ĠAn s -Similar ly -ĠF ul -00 6 -Un less -ĠAl one -st ead -ĠPub lisher -r ights -t u -ĠDoes n -Ġprofession ally -Ġcl o -ic z -Ġste als -Ġ á -19 86 -Ġst urdy -ĠJoh ann -Ġmed als -Ġfil ings -ĠFr aser -d one -Ġmult inational -Ġf eder -Ġworth less -Ġp est -Yes terday -ank ind -Ġg ays -Ġb orne -ĠP OS -Pict ure -Ġpercent ages -25 1 -r ame -Ġpot ions -AM D -ĠLeban ese -Ġr ang -ĠL SU -ong s -Ġpen insula -ĠCl ause -AL K -oh a -ĠMac Book -Ġunanim ous -Ġl enders -Ġhang s -Ġfranch ises -ore rs -ĠUp dates -Ġisol ate -and ro -S oon -Ġdisrupt ive -ĠSur ve -Ġst itches -ĠSc orp -ĠDomin ion -Ġsupp lying -Ar g -Ġtur ret -ĠL uk -Ġbr ackets -* ) -ĠRevolution ary -ĠHon est -Ġnot icing -ĠSh annon -Ġafford ed -Ġth a -ĠJan et -! -- -ĠNare ndra -ĠPl ot -H ol -se ver -e enth -Ġobst ruction -Ġ10 24 -st aff -j as -or get -sc enes -l aughs -ĠF argo -cr ime -Ġorche str -Ġde let -ili ary -rie ved -Ġmilit ar -ĠGreen e -âĹ ı -ãģ ¦ -ĠGu ards -Ġunle ashed -ĠWe ber -Ġadjust able -Ġcal iber -Ġmotiv ations -Ġà ł -m Ah -ĠL anka -hand le -Ġp ent -ĠR av -ĠAng ular -ĠK au -umb ing -Ġphil anthrop -Ġde hyd -Ġtox icity -e er -ĠY ORK -w itz -å ¼ -ĠI E -commun ity -ĠA H -Ġret ali -Ġmass ively -ĠDani els -ĠD EL -Ġcar cin -Ur l -Ġrout ing -ĠNPC s -ĠR AF -ry ce -Ġwa ived -ĠGu atem -Every body -Ġco venant -Ġ17 3 -Ġrelax ing -Ġqu art -al most -Ġguard ed -ĠSold iers -ĠPL AY -Ġout going -L AND -Ġre write -ĠM OV -ĠIm per -ĠS olution -Ġphenomen al -Ġl ongevity -Ġimp at -ĠN issan -ir ie -Ġod or -ĠZ ar -ok s -Ġmilit ias -ĠSP EC -Ġtoler ated -ars er -ĠBrad ford -+ , -Ġsur real -s f -Can adian -Ġresemb lance -Ġcarbohyd rate -VI EW -Ġaccess ory -me al -larg est -ieg el -Some one -Ġtoug hest -os o -Ġfun nel -Ġcondemn ation -lu ent -Ġw ired -ĠSun set -Jes us -ĠP ST -ĠP ages -ĠTy coon -ĠP F -Ġselect ions -Ġ ठ-part isan -Ġhigh s -ĠR une -Ġcraft s -le ad -ĠParent s -Ġre claim -ek er -ĠAll ied -ae per -Ġlo oming -Ġbenefic iaries -ĠH ull -Stud ents -Jew ish -d j -Ġp act -tem plate -ĠOffic ials -ĠBay lor -Ġhe mp -Ġyouth s -ĠLevel s -ĠX iao -ĠC hes -Ġende avor -ĠRem oved -Ġhipp ocamp -H ell -ãĤ Ĭ -80 5 -Ġd inosaur -ĠWr ath -ĠIndones ian -Ġcalcul ator -ĠD ictionary -Ġ4 20 -ĠM AG -( _ -! , -t arians -Ġrestrict ing -rac use -Ġweek day -OU NT -Ġsh rugged -leg round -Ġb ald -ĠDo ctors -Ġt outed -ĠMax well -Ġ2 14 -Ġdiplom at -Ġrep ression -Ġconstitu ency -v ice -r anked -ĠNap oleon -g ang -ĠFore ver -t un -Ġbul b -ĠPD T -ĠC isco -V EN -Ġres umed -Ste ven -ĠManit oba -Ġfab ulous -ĠAg ents -19 84 -Ġam using -ĠMyster ies -Ġor thodox -fl oor -Ġquestion naire -Ġpenet rate -Ġfilm makers -ĠUn c -Ġst amped -Ġth irteen -Ġout field -Ġforward ed -Ġapp ra -Ġa ided -t ry -Ġunf ocused -ĠL iz -ĠWend y -ĠSc ene -Ch arg -Ġreject s -Ġleft ist -ĠProv idence -ĠBr id -reg n -Ġprophe cy -ĠL IVE -4 99 -Ġfor ge -ĠF ML -Ġintrins ic -ĠF rog -Ġw ont -ĠH olt -Ġfam ed -CL US -aeper nick -ĠH ate -ĠC ay -Ġregister ing -ort ality -rop y -ocaly ptic -a an -n av -Ġfasc ist -IF IED -Ġimpl icated -ĠRes ort -ĠChand ler -ĠBr ick -P in -ys c -Us age -ĠHel m -us ra -âĺħ âĺħ -ĠAb bas -Ġunanim ously -Ġke eper -Ġadd icted -?? ? -Ġhelm ets -Ġant ioxid -aps ed -80 8 -gi ene -Ġwa its -Ġmin ion -ra ved -ĠP orsche -Ġdream ing -Ġ17 1 -ĠC ain -Ġun for -ass o -ĠConfig uration -k un -hard t -Ġn ested -ĠL DS -L ES -Ġt ying -en os -Ġc ue -ĠMar qu -sk irts -Ġclick ed -Ġexp iration -ĠAccording ly -ĠW C -Ġbless ings -Ġaddict ive -ĠN arr -y x -ĠJagu ars -Ġrent s -ĠS iber -Ġt ipped -ous se -ĠFitz gerald -Ġhier arch -out ine -Ġwa velength -> . -ch id -ĠProcess ing -/ + -r anking -E asy -ĠConst ruct -Ġt et -ins ured -H UD -Ġqu oting -Ġcommun icated -in x -Ġin mate -Ġerect ed -ĠAbs olutely -ĠSure ly -Ġun im -ĠThr one -he id -Ġcl aws -Ġsuper star -ĠL enn -ĠWh is -U k -ab ol -Ġsk et -ĠN iet -Ġper ks -Ġaff inity -Ġopen ings -phas is -Ġdiscrim inate -T ip -v c -Ġgr inding -ĠJenn y -Ġast hma -hol es -ĠHom er -Ġreg isters -ĠGl ad -Ġcre ations -Ġlith ium -Ġappl ause -unt il -Just ice -ĠTur ks -Ġsc andals -Ġb ake -t ank -M ech -ĠMe ans -ĠM aid -Republic ans -is al -wind ows -ĠSant os -Ġveget ation -33 8 -t ri -Ġfl ux -ins ert -Ġclar ified -Ġmort g -ĠCh im -ĠT ort -Ġdiscl aim -met al -ĠAs ide -Ġindu ction -Ġinf l -Ġathe ists -amp h -Ġe ther -ĠV ital -ĠBu ilt -M ind -Ġweapon ry -S ET -Ġ18 6 -ad min -g am -cont ract -af a -Ġderiv atives -Ġsn acks -Ġch urn -E conom -Ġca pped -ĠUnder standing -ĠH ers -ĠI z -Ġd uct -I ENT -augh ty -Ġâľ Ķ -ĠN P -Ġsa iling -In itialized -Ġt ed -Ġreact ors -ĠL omb -Ġcho ke -ĠW orm -Ġadm iration -Ġsw ung -ens ibly -Ġr ash -ĠGo als -ĠImport ant -Sh ot -ĠR as -Ġtrain ers -ĠB un -Work ing -Ġhar med -ĠPand ora -ĠL TE -Ġmush room -ĠCH AR -ĠF ee -ĠM oy -B orn -ol iberal -ĠMart ial -Ġgentle men -Ġling ering -Offic ial -Ġgra ffiti -ĠN ames -D er -Ġqu int -ist rate -aze era -ĠNOT ICE -ĠFlore nce -Ġpay able -Ġdep icts -ĠSpe cies -He art -âĶĢâĶĢâĶĢâĶĢ âĶĢâĶĢâĶĢâĶĢ -Ġencl osed -Incre ases -D aily -ĠL is -Ġenact ment -ĠB acon -ĠSt eele -dem and -Ġ18 3 -Ġmouth s -Ġstr anded -Ġenhance ment -01 1 -ĠWh ats -Ġhe aled -en y -ĠR ab -Ġ3 40 -ĠLab yrinth -ro ach -ĠY osh -ĠCl ippers -Ġconcert s -Intern et -35 5 -Ġstick ers -Ġter med -ĠAx e -Ġgrand parents -Fr ance -ĠCl im -ĠU h -ul ic -Ġthr ill -cent ric -ĠOver view -ĠCond uct -Ġsubstant ive -Ġ18 2 -m ur -Ġstr ay -ĠCo ff -Ġrep etitive -ĠFor gotten -Ġqual ification -ew itness -ĠZ imbabwe -Ġsim ulated -ĠJ D -25 3 -ĠW are -Ġun sc -T imes -Ġsum mons -Ġdis connected -Ġ18 4 -ci us -ĠGu jar -od ka -Ġer ase -ĠTob acco -elect ed -Ġun cont -ĠShe pard -ĠL amp -Ġalert ed -Ġoper ative -arn a -u int -Ġneglig ence -ac ements -Ġsup ra -Ġprev ail -ĠSh ark -Ġbel ts -ãģ « -Ġt ighter -Engine ers -Ġin active -Ġexp onent -ĠWill ie -a ples -Ġhe ir -ĠH its -ian n -ĠS ays -Ġcurrent s -ĠBeng al -Ġar ist -B uffer -Ġbree ze -ĠWes ley -Col a -Ġpron oun -Ġde ed -ĠK ling -Ġof t -Ġinf lict -Ġpun ishing -Ġn m -ik u -OD UCT -01 4 -Ġsubsid y -ĠDE A -ĠHer bert -ĠJ al -B ank -Ġdef erred -Ġship ment -B ott -Ġal le -b earing -HT ML -Off line -Ġ2 13 -Ġscroll ing -Ġsc anned -ĠLib yan -ĠT OP -ch rom -d t -col umn -Psy NetMessage -Z ero -Ġtor so -0 50 -âķ IJ -Ġimp erson -ĠSchw artz -ud ic -Ġpiss ed -ĠS app -25 7 -ĠIS Ps -og l -Ġsuper vised -Ġad olescent -Ġatt ained -ĠDel ivery -ĠB unny -Ġ19 37 -Ġmini ature -Ġo s -Ġ3 70 -60 8 -ĠMour inho -Ġinn ate -Ġtem po -ĠN M -ĠFall en -00 9 -Ġprov ocative -Stream er -ĠBened ict -ĠBol she -Ġt urtle -ĠPC B -ĠEqu al -Direct or -ĠR end -Ġflu ids -Author ities -Ġcous ins -requ ency -ĠNeigh bor -s ets -sh ared -Char les -pass word -Ġg ears -Ġ2 11 -ĠHard ware -ri ka -Ġup stream -H om -Ġdisproportion ately -iv ities -Ġund efined -Ġelect rons -Ġcommem or -Event ually -Ġ> < -Ġir responsible -2 18 -ĠRe leased -ĠO VER -ĠI GN -ĠB read -st ellar -ĠS age -tt ed -dam age -ed ition -ĠPre c -Ġl ime -Ġconf inement -Ġcal orie -we apon -Ġdiff ering -ĠS ina -m ys -am d -Ġintric ate -k k -ĠP AT -ã o -st ones -lin ks -Ġr anch -Sem itic -Ġdifferent iate -ĠS inger -occup ied -Ġfort ress -c md -Ġinter ception -ĠAnk ara -Ġre pt -ĠSol itaire -Ġrem ake -p red -Ġd ared -aut ions -ĠB ACK -Run ning -Ġdebug ging -Ġgraph s -3 99 -ĠNig el -Ġb un -Ġpill ow -Ġprog ressed -fashion ed -Ġob edience -ER N -Ġrehe ars -C ell -t l -S her -Ġher ald -ĠPay ment -ĠC ory -ĠDe pt -Ġrep ent -ĠWe ak -uck land -Ġple asing -Ġshort ages -Ġjur ors -ĠK ab -q qa -Ant i -Ġw ow -ĠRC MP -Ġt sun -ĠS ic -Ġcomp rises -Ġsp ies -Ġprec inct -n u -Ġur ges -Ġtim ed -Ġstrip es -ĠB oots -Ġy en -Adv anced -Ġdisc rete -ĠArch angel -employ ment -D iff -Ġmon uments -Ġ20 9 -work er -Ġ19 6 -ĠI g -utter stock -T PS -J ac -Ġhomeless ness -Ġcomment ator -Ġrac ially -f ing -se ed -E le -ell ation -Ġeth anol -Ġpar ish -ĠD ong -ĠAw akening -Ġdev iation -ĠB earing -ĠTsu k -Ġrec ess -Ġl ymph -ĠCann abis -å ľ -ĠNEW S -Ġd ra -ĠStef an -ĠWr ong -ĠS AM -Ġloose ly -Ġinterpre ter -ĠPl ain -Go vernment -Ġbigot ry -Ġgren ades -ave z -pict ured -Ġmand ated -ĠMon k -ĠPed ro -Ġl ava -27 4 -Ġcyn ical -ĠScroll s -l ocks -M p -Ġcon gregation -orn ings -ph il -ĠI bid -Ġf erv -Ġdisapp earing -Ġarrog ant -sy n -ĠMa ver -ĠSu it -24 1 -Ġab bre -ack ers -P a -ĠY el -Whe never -Ġ23 5 -ĠV ine -ĠAn at -Ġext inct -LE T -Ġexecut able -V ERS -ox ide -D NA -ĠP rel -Ġresent ment -Ġcompr ise -ĠAv iv -Ġinter ceptions -Ġprol ific -IN A -ĠEr in -though t -2 19 -ĠPsychiat ry -un ky -chem ist -H o -ĠMcC oy -Ġbr icks -L os -ri ly -ĠUS SR -Ġr ud -Ġl aud -ĠW ise -ĠEmer ald -Ġrev ived -Ġdam ned -ĠRep air -id em -ct ica -Ġpatri arch -ĠN urs -me g -Ġcheap est -re ements -empt y -ĠCele br -Ġdepri vation -ch anted -ĠTh umbnails -E nergy -ĠEth an -ĠQ ing -Ġopp oses -W IND -v ik -ĠM au -ĠS UB -66 7 -G RE -ĠVol unte -nt on -C ook -å IJ -es que -Ġplum met -Ġsu ing -Ġpron ounce -Ġresist ing -ĠF ishing -ĠTri als -Ġy ell -Ġ3 10 -Ġin duct -Ġpersonal ized -oft en -R eb -EM BER -Ġview point -Ġexist ential -() ) -rem ove -MENT S -l asses -Ġev apor -Ġa isle -met a -Ġreflect ive -Ġentit lement -Ġdev ised -mus ic -asc ade -Ġwind ing -off set -Ġaccess ibility -ke red -Bet ter -ĠJohn ston -th inking -S now -ĠCroat ia -ĠAt omic -27 1 -34 8 -Ġtext book -ĠSix th -Ġ اÙĦ -Ġsl ider -ĠBur ger -b ol -S ync -Ġgrand children -Ġc erv -+ ) -Ġe ternity -Ġtweet ing -Ġspec ulative -Ġpiv otal -ĠW P -ĠT ER -ynam ic -Ġu pl -ĠC ats -per haps -Ġclass mates -Ġblat ant -' - -Ġl akh -ant ine -ĠB org -i om -/ ( -ĠAthlet ic -Ġs ar -OT A -ĠHoff man -Never theless -Ġad orable -Ġspawn ed -Ass ociated -ĠDom estic -Ġimpl ant -ĠLux em -ĠK ens -Ġp umps -ĠS AT -Att ributes -50 9 -av our -Ġcentral ized -ĠT N -Ġfresh ly -ĠA chieve -Ġouts iders -her ty -ĠRe e -ĠT owers -ĠD art -ak able -Ġm p -ĠHeaven ly -Ġr ipe -ĠCarol ine -ry an -Ġclass ics -Ġret iring -Ġ2 28 -Ġa h -Ġdeal ings -Ġpunch ing -ĠChap man -O ptions -max well -vol ume -Ġst al -Ġex ported -ĠQu ite -Ġnumer ical -B urn -F act -ĠKey stone -Ġtrend ing -Ġalter ing -ĠAfric ans -47 8 -ĠM N -ĠKn ock -Ġtempt ation -Ġprest ige -Over view -ĠTrad itional -ĠBah rain -Priv ate -ĠH OU -Ġbar r -ĠT at -C ube -US D -ĠGrand e -ĠG at -ĠFl o -Ġres ides -Ġind ec -vol ent -Ġperpet ual -ub es -Ġworld view -ĠQuant um -Ġfil tered -Ġen su -orget own -ERS ON -ĠM ild -37 9 -OT T -à ¥ -Ġvit amins -Ġrib bon -Ġsincere ly -ĠH in -Ġeight een -Ġcontradict ory -Ġgl aring -Ġexpect ancy -Ġcons pir -Ġmon strous -Ġ3 80 -re ci -Ġhand ic -Ġpump ed -Ġindic ative -Ġr app -Ġav ail -ĠLEG O -ĠMar ijuana -19 85 -ert on -Ġtwent ieth -################ ################ -ĠSw amp -Ġval uation -Ġaffili ates -adjust ed -ĠFac ility -26 2 -Ġenz ymes -itud inal -Ġimp rint -S ite -Ġinstall er -ĠT RA -m ology -lin ear -ĠCollect ive -ig ating -ĠT oken -Ġspec ulated -K N -ĠC ly -or ity -Ġdef er -Ġinspect ors -appro ved -R M -ĠSun s -Ġinform ing -ĠSy racuse -ib li -7 65 -Ġgl ove -Ġauthor ize -âĢ¦âĢ¦âĢ¦âĢ¦ âĢ¦âĢ¦âĢ¦âĢ¦ -ĠCru ise -Ġcontract ing -she ll -IF E -ĠJew el -p ract -ĠPhot oshop -ĠKnow ing -h arm -Ġattract ions -ad an -et us -01 8 -w agen -Al t -Ġmultip ly -Ġequ ilibrium -: { -ĠF ighters -ĠEd gar -Ġfour teen -Go vern -Ġmis use -Ġab using -Ġancest ry -ram er -64 4 -Ġwor ms -Ġthick er -ĠComb ine -Ġpeas ants -Ġv ind -Ġcon quest -Ġm ocked -Ġc innamon -ĠC ald -ĠGall up -Ġavoid ance -Ġincarn ation -ĠStr at -Ġt asted -ent a -ĠN eal -p ared -Ġtermin ology -ject ion -Scient ists -ĠIN S -ĠDe e -Ġdirect ories -R oad -ĠSh ap -br ight -ĠDirect ors -ĠCol umn -Ġb ob -Ġprefer ably -Ġgl itch -f urt -Ġe g -id is -C BC -Ġsur rendered -Ġtest ament -33 6 -ug gest -ĠN il -an other -Ġpat hetic -ĠDon na -Ġ2 18 -ĠA very -Ġwhis key -Ġf ixture -ĠCon quest -Ġbet s -O cc -ĠLe icester -] ." -Ġ) ); -Ġfl ashes -45 6 -Ġmask ed -ge bra -Ġcomput ed -che l -aud er -Ġdefe ats -ĠLiber ation -ĠOs ama -ĠV ive -Ch anges -Ch annel -Ġtar iffs -Ġm age -ĠS ax -Ġinadvert ently -ĠC RE -ĠRe aper -ink y -gr ading -Ġstere otyp -Ġcur l -ĠF ANT -Ġfram eworks -M om -ĠAn ch -Ġflav our -car bon -Ġperm itting -let cher -ĠMo zilla -ĠPark ing -ĠCh amp -Sc roll -Ġmurd erer -Ġrest ed -Ġow es -ĠP oss -AD D -IF F -res olution -ĠMin ing -Ġcompar ative -D im -Ġneighbour ing -ĠA ST -ĠT oxic -Ġbi ases -Ġgun fire -ur ous -ĠMom ent -19 83 -Ġper vasive -tt p -ĠNorm ally -r ir -S arah -ĠAlb any -Ġun sett -ĠS MS -ip ers -l ayer -ĠWh ites -up le -Ġtur bo -ĠLe eds -Ġthat s -ĠMin er -M ER -ĠRe ign -Ġper me -ĠBl itz -Ġ19 34 -Ġintimid ating -t ube -Ġecc entric -ab olic -box es -ĠAssoci ates -v otes -Ġsim ulate -um bo -aster y -Ġship ments -FF FF -an th -Ġseason ed -Ġexperiment ation -âĸ ł -law s -Me et -idd les -ant ics -R ating -IS IS -h ift -Ġfront s -b uf -01 7 -Ġun att -ĠD il -le ases -ĠGard ens -77 7 -t ouch -ve ll -45 8 -Ġ= ==== -s aving -Ġer osion -ĠQu in -Ġearn s -Ġaccomplish ment -ĠWe i -Ġ< [ -____ _ -Ġir rig -ĠT eddy -Ġconqu ered -ĠArm ored -Ġassert s -Ġmanip ulating -r é -Ġtranscript s -G allery -Ġplot ting -Ne il -Ġbetray al -load er -ĠS ul -Ġdispl acement -Ġroy alty -ĠW I -he it -ĠDev ices -alle l -Ġmunicipal ities -Ġcan al -St ars -ĠU AE -Ġ" âĢ¦ -ĠC U -ab ove -Ġreson ance -ĠguiActive Un -add ed -ĠBra ves -ĠI bn -Ġhere by -ĠB RE -Ġshare holder -ĠH ir -ĠJ i -Ġstrange ly -Ġadm ired -Ġpl ight -Ġb achelor -ĠP ole -cipl inary -T ony -ĠArmen ian -Ġun man -ĠZion ist -St age -isco ver -Ġautom otive -Ġs idelines -Ġsl ick -ĠRena issance -ĠF UN -Im ages -ĠH aj -Ġp ing -Ġshort cut -ĠBl vd -ĠLook s -Ġbur sts -Ġcl amp -Ġm ish -Ġsort ing -Ġpatri ot -Ġcorrect ness -ĠScand inav -ĠCaval iers -p ython -az ar -Ġ3 75 -ĠJa une -40 9 -Ġdetrim ental -Ġstab bing -Ġpoison ed -Ġf ountain -oc ent -or st -ĠMar i -Ġr ains -ĠO vers -ĠInst itution -ud get -AM Y -t ale -ĠK R -ĠPr ices -Ġhead aches -Ġlands l -ĠA ura -Bon us -ĠZ hao -ĠH ip -Ġhop s -ĠKurd istan -Ġexplo iting -ry n -Ġhypocr isy -op ening -Ġgun shot -Ġw ed -inter stitial -Inter stitial -Ġam en -Bre aking -Ġmarket ed -W ire -ĠC rowd -Contin ue -ĠK nown -ĠEffect ive -ore an -iz ons -Jose ph -Ġescal ation -us ername -Ġcur tain -AT ES -ĠP AR -ĠM iy -Ġcounter fe -l ene -Ġcont enders -d aily -ĠAs c -ĠPhill ip -most ly -Ġfil ename -he ne -Ġresemb ling -Ġst aging -ĠCh loe -Ġw iring -H on -ĠRen ew -ott age -ĠHy brid -m uch -Ġstro kes -Ġpolicy makers -AP TER -ĠArk ham -pl ot -Ġassist ants -Ġde port -ĠSe ga -Ġinflu enza -ĠC ursed -ĠK obe -Ġskin ny -Prov ider -ĠR ip -Ġincrement al -product s -B F -Ġd ome -ĠC redits -Ġlos ers -int s -ĠBet ty -ĠTal ent -ĠD AM -L v -E ss -Ġd ens -tem p -J udge -od ic -Ġ' ( -UR ES -ets k -V O -Ġretrie ved -Ġarchitect s -Ù ĩ -Ġeth ic -ĠSecond ary -st ocks -ad ia -Ġ3 25 -ĠOp inion -Ġsimultane ous -Ġd izz -ul p -Ġsmugg ling -ipp ery -R andom -f acing -ĠD as -Ġstock p -Ġdiscl osures -po inter -Ġcor al -ĠSe lection -ĠP ike -ival ent -Ġruth less -ĠR im -Ġensu ing -ĠExper iment -Ġcongress man -Ġbelie ver -Ġun specified -ĠM ord -Ġknowledge able -ĠV ERY -T X -Ġstra ps -Ġtur f -apesh ifter -Ġmar ital -Ġfl ock -ãģ Ĩ -26 3 -AM ES -ĠOpp osition -Ġtre asures -ĠG OD -Ġmodel ed -ĠWOR LD -Ġ( [ -ĠUs age -H F -Ġ$ ( -uss ed -Ġpione er -E ight -par se -b read -rit z -ĠMir anda -ĠK ant -++ ) -ore n -Ġprov oked -Ġbre eds -ĠIn cludes -ĠPast ebin -ĠFl ip -J ava -Ġbr ink -Ġrum ored -Ġun seen -Ġgar nered -ĠDef in -al ted -Ġtatt oos -Ġhes itation -is itions -ĠWe aver -ĠReport ing -Ġtherap ies -Ġconsult ants -Ġresid ual -ĠMal i -ĠRom a -i ago -ĠRes idents -ub i -Ġremed ies -Ġadapt ive -ĠAl ive -ĠBar cl -Ġwal lets -c rypt -etermin ation -ĠPel osi -Ġsl ipping -oton in -Ġall iances -pat rick -ir is -Ġor th -ĠPer kins -ĠDe V -ĠG ets -Ġdry ing -ge e -fore st -ĠFor get -ore m -33 9 -Ġvague ly -ĠD ion -ĠP orn -ĠH OW -Ġp neum -Ġrub ble -ĠT aste -enc ia -ĠG el -Ġd st -Ġ24 5 -ĠMoroc co -inf lamm -ĠTw ins -Ġb ots -d aughter -ĠB alk -Ġbre thren -Ġlog os -Ġgo bl -f ps -Ġsub division -Ġp awn -Ġsquee zed -Ġmor ale -ĠD W -' " -Ġkn ot -ook y -Ġdiv isive -Ġboost ed -ch y -ãĥ IJ -if act -Ġnewcom ers -ĠWrest ling -Ġsc outs -w olves -R at -Ġnin eteenth -ĠOs borne -St ats -Ġem powered -Ġpsych opath -ĠO EM -ugg age -ĠP K -ĠMoh ammad -P ak -Ġanarch ists -ĠExt ract -est hes -ĠStock holm -l oo -ĠG raph -Ġdeploy ing -ĠStr anger -ĠM old -Ġstaff er -Ġdiscount ed -uck le -ple ase -ĠLand ing -ÃŃ a -Ġ19 3 -Ġan te -Ġrep etition -Ġ+ /- -Ġpar ody -Ġlive ly -AA A -ĠHor us -Ġp its -ind ers -L OC -ĠVen ice -40 6 -ĠDis cover -â Ĩ -ellect ual -Ġp ens -Ġey el -ig uous -Im pl -Ġj oking -Ġinv al -ĠBel fast -Ġcredit ors -ĠSky walker -ov sky -Ġcease fire -Ġse als -is oft -) ). -ĠFel ix -IT S -Ġt resp -ĠBlock chain -ew are -ĠSch war -en ne -mount ed -ĠBe acon -les h -Ġimmense ly -Ġche ering -Em ploy -sc ene -ish ly -atche wan -ĠNic olas -Ġdr ained -ĠEx it -ĠAz erb -j un -Ġflo ated -u ania -De ep -Ġsuper v -Ġmyst ical -ĠD ollar -ĠApost le -ĠR EL -ĠProv ided -ĠB ucks -ãĥ ´ -cut ting -Ġenhance ments -ĠPengu ins -ĠIsa iah -Ġj erk -ĠW yn -Ġst alled -Ġcryptoc urrencies -ĠR oland -sing le -Ġl umin -ĠF ellow -ĠCap acity -ĠKaz akh -W N -Ġfin anced -38 9 -Ġt id -Ġcoll usion -ĠMy r -î Ģ -Sen ator -Ġped iatric -Ġneat ly -Ġsandwic hes -ĠArchitect ure -Ġt ucked -Ġbalcon y -Ġearthqu akes -qu ire -F uture -Ġhe fty -é Ĺ -Ġspecial izes -Ġstress es -Ġs ender -Ġmisunder standing -Ġep ile -Ġprov oke -ĠCol ors -Ġdis may -uk o -[ _ -58 6 -ne utral -Ġdon ating -ĠRand all -Mult i -Ġconvenient ly -ĠS ung -ĠC oca -Ġt ents -ĠAc celer -Ġpart nered -27 2 -ir ming -ĠB AS -s ometimes -Ġobject ed -ub ric -p osed -LC S -gr ass -Ġattribut able -V IS -Israel i -Ġrepe ats -ĠR M -v ag -ut a -in ous -Ġin ert -ĠMig uel -æ Ń -ĠHawai ian -B oard -Ġart ific -ĠAzerb ai -as io -ĠR ent -A IN -Ġappl iances -Ġnational ity -Ġass hole -ĠN eb -Ġnot ch -h ani -ĠBr ide -Av ailability -Ġintercept ed -Ġcontin ental -Ġsw elling -ĠPers pect -b ies -. < -ith metic -ĠL ara -Ġtempt ing -add r -Ġoversee ing -cl ad -ĠD V -ĠGing rich -Ġm un -ĠApp ropri -Ġalter ations -ĠPat reon -Ġha voc -Ġdiscipl ines -Ġnotor iously -aku ya -ier i -? ). -ĠW ent -Ġsil icon -Ġtre mb -Cont ainer -K nown -Ġmort ar -est e -ick a -Ar thur -ĠPre viously -ĠMart y -Ġsp arse -g ins -Ġin ward -ĠParticip ant -C opy -ĠM isc -Ġantib iotic -ĠRet ro -Ġel usive -Ġass ail -ĠBatt alion -ĠB ought -Ġdimin ish -ĠEuro pa -s ession -ĠDanger ous -ies el -Ġdisbel ief -Ġbl asts -ext reme -ĠBoy d -ĠProject s -ĠGu ys -Ġunder gone -Ġgr ill -ĠDw ight -Ġ19 7 -US ER -Ġfiles ystem -Ġcl ocks -T aylor -Ġwra pper -Ġfold ing -ous and -ĠPhilipp ine -ATION AL -ĠPer th -Ġas hes -Ġaccum ulate -ĠGate way -Sh op -orks hire -H an -ĠBar rel -ĠLe h -ĠX V -Ġwh im -Ġrep o -ĠC G -ĠM am -Ġincorpor ating -Ġbail out -Ġlingu istic -Ġdis integ -C LE -Ġcinem atic -ĠF iber -S yn -il ion -ĠCom pos -c hens -Ġne oc -Ġbo iled -F INE -on o -un cle -ik en -ĠB M -Î ¹ -Ġreceipt s -Ġdisp osed -ĠTh irty -ĠR ough -ĠA BS -Ġnot withstanding -oll en -# $ -Ġunrel iable -Ġbl oom -Ġmedi ocre -Ġtr am -ĠTas man -Ġsh akes -Ġmanifest o -ĠM W -Ġsatisf actory -Ġsh ores -Ġcomput ation -Ġassert ions -orm ons -ar ag -ab it -Dem ocrats -ĠL oot -ĠVol ks -ha ired -Ġgrav itational -S ing -ĠM iz -Ġthro ttle -Ġtyr anny -ĠView s -Ġrob ber -ĠMinor ity -Ġsh rine -sc ope -pur pose -Ġnucle us -our cing -ĠUS DA -ĠD HS -w ra -ĠBow ie -Sc ale -ĠB EL -x i -I ter -Ġ( ), -w right -Ġsail ors -ous ed -NAS A -ĠPro of -ĠMin eral -t oken -ĠF D -R ew -Ġe ll -6 30 -Ġchance llor -ĠG os -Ġamount ed -ĠRec re -ome z -ĠOpt im -ĠOl ive -Ġtrack er -ow ler -ĠUn ique -R oot -Ġmar itime -ĠQur an -ĠAd apt -Ġecosystem s -ĠRe peat -ĠS oy -ĠI MP -Ġgrad uating -and em -P ur -ĠRes et -ĠTr ick -ĠPh illy -ĠT ue -ĠMalays ian -Ġclim ax -Ġb ury -Ġcons pic -ĠSouth ampton -ĠFl owers -Ġesc orted -ĠEduc ational -ĠI RC -Ġbrut ally -e ating -Ġpill ar -ĠS ang -ĠJ ude -ar ling -ĠAm nesty -Ġrem inding -ĠAdminist rative -hes da -Ġfl ashed -ĠP BS -per ate -fe ature -Ġsw ipe -Ġgra ves -oult ry -26 1 -bre aks -ĠGu er -Ġsh rimp -ĠV oting -qu ist -Ġanaly tical -Ġtables poons -ĠS OU -Ġresear ched -Ġdisrupt ed -Ġj our -Ġrepl ica -Ġcart oons -b ians -} ) -c opy -G ot -ou ched -P UT -Ġsw arm -not ations -s aid -Ġreb uilt -Ġcollabor ate -Ġr aging -Ġn ar -Ġdem ographics -ĠD DR -Ġdist rust -oss ier -ĠK ro -Ġpump kin -Ġreg rets -Ġfatal ities -ĠL ens -ĠO le -p d -Ġpupp et -ĠOut look -ĠSt am -O l -F air -U U -Ġre written -Ä ± -Ġfasc inated -Ġve ctors -Ġtrib unal -u ay -ĠM ats -ĠCo ins -[ [ -Ġ18 1 -Ġrend ers -ĠK aepernick -Ġesp ionage -Ġsum m -Ġd itch -Acc ount -Ġspread sheet -Ġmut ant -p ast -40 7 -Ġd ye -Ġinit iation -Ġ4 000 -Ġpunish able -Ġth inner -ĠKh al -Ġinter medi -D un -ĠGoth am -Ġeager ly -Ġvag inal -p owers -V W -ĠWATCH ED -Ġpred ator -ams ung -Ġdispar ity -Ġ[ * -Ġam ph -Ġout skirts -ĠSpir its -Ġskelet al -Ð » -ĠR ear -Ġissu ance -ĠLog ic -re leased -Z Z -ĠB ound -Ent ry -Ġex its -is ol -ĠFound er -Ġw re -ĠGreen land -ĠM MO -t aker -IN C -ãģ ¾ -Ġhour ly -hen ko -Ġfantas ies -Ġdis ob -Ġdemol ition -ãĥ ĭ -Ġen listed -rat ulations -Ġmis guided -Ġens ured -Ġdiscour aged -m ort -Ġfl ank -Ġc ess -Ġreact s -ĠS ere -s ensitive -ĠSer pent -ass ad -Ġ24 7 -Ġcalm ly -b usters -Ġble ed -ĠSt ro -Ġamuse ment -ĠAntar ctica -Ġs cept -ĠG aw -a q -ason ic -Ġsp rawling -n ative -atur ated -ĠBattle field -IV ERS -E B -ĠG ems -ĠNorth western -ĠFil ms -ĠAut omatic -Ġappre hend -ãģ ¨ -Ġgui Name -Ġback end -Ġevid enced -ge ant -01 2 -ĠS iege -Ġexternal To -Ġunfocused Range -ĠguiActiveUn focused -Ġgui Icon -ĠexternalTo EVA -ĠexternalToEVA Only -F ri -ch ard -en aries -Ġchief s -Ġc f -ĠH UD -Ġcorro bor -Ġd B -ĠT aken -ĠPat ricia -ra il -ĠCh arm -ĠLiber tarian -rie ve -Person al -ĠO UR -ger ies -Ġdump ing -Ġneurolog ical -it imate -ĠClint ons -raft ed -ĠM olly -Ġtermin als -reg ister -Ġfl are -Ġenc oded -Ġautop sy -p el -m achine -Ġexempt ions -ĠRoy als -d istance -Ġdraft s -Ġl ame -ĠC unning -Ġsp ouses -ĠMark ets -ĠCar rier -Ġimp lying -ĠY ak -s id -Ġl oser -Ġvigil ant -Ġimpe achment -Ġaug mented -ĠEmploy ees -Ġunint ended -tern ally -ĠW att -Ġrecogn izable -ess im -æ Ŀ -Ġco ated -r ha -Ġlie utenant -ĠLegisl ation -pub lished -44 4 -01 3 -Ġide ally -ĠPass word -Ġsimpl ify -ĠMet a -ĠM RI -Ġple ading -organ ized -hand ler -Ġun ravel -cor rect -Ġ icy -Ġparan oid -Ġpass er -Ġinspect ions -of er -ĠHealth care -28 3 -ĠBr ut -iol a -for ge -ĠMed ieval -MS N -ie vers -ĠProgram ming -å ī -Ġ2 23 -m u -ĠC LE -ug a -Ġsho ppers -Ġinform ative -ĠPl ans -Ġsupplement ation -ĠT ests -ty ard -ocy tes -ĠVeg a -ĠGujar at -erman ent -Ex cept -ĠL OT -all a -ĠC umm -ĠO sw -Ġven om -ĠDeb t -ĠD OWN -Ġreun ion -Ġm uc -ĠRel ief -Ġge op -ĠðŁ ĺ -al ogue -An th -ech o -Ġcor ros -Ġrepl ication -ĠBl azing -ĠD aughter -Ġinf lic -ĠLind sey -Ù Ī -28 4 -Ex it -Ġgl oom -TA IN -Ġundermin ing -Ġadv ising -h idden -Ġover flow -Ġg or -urd ue -Ġe choes -enh agen -Ġimp uls -d rug -c ash -Ġas ync -Ġmir ac -at ts -p unk -Ġpiv ot -ĠLegisl ative -Ġblog gers -ĠCl aw -s burg -d yl -ĠRecomm end -Ġver te -Ġprohib iting -ĠPant her -Jon athan -Ġo min -Ġhate ful -28 1 -ĠOr che -ĠMurd och -down s -Ġas ymm -G ER -Al ways -Ġinform s -ĠW M -ĠP ony -ĠApp endix -ĠAr lington -J am -Ġmedic inal -ĠS lam -IT IES -Ġre aff -ĠR i -F G -S pring -b ool -Ġthigh s -Ġmark ings -ĠRa qqa -ĠL ak -p oll -ts ky -ĠMort y -ĠDef inition -Ġdeb unk -end ered -ĠLe one -a vers -Ġmortg ages -App arently -N ic -ha us -ĠTh ousands -au ld -Ġm ash -sh oot -Ġdi arr -Ġconscious ly -H ero -e as -ĠN aturally -ĠDestroy er -Ġdash board -serv ices -R og -Ġmillenn ials -Ġinv ade -- ( -Ġcomm issions -ĠA uckland -Ġbroadcast s -Ġfront al -Ġcr ank -ĠHist oric -Ġrum ours -CT V -Ġster il -Ġboost er -rock et -ãĤ ¼ -ut sche -ĠP I -Ġ2 33 -ĠProdu cer -ĠAnaly tics -Ġinval uable -Ġunint ention -ĠC Y -Ġscrut in -Ġg igg -Ġeng ulf -Ġprolet ariat -Ġh acks -ĠH ew -ar ak -ĠSl ime -ield ing -ag her -ĠEll iot -Ġtele com -Ġ2 19 -ult an -ĠAr bor -ĠSc outs -B an -Ġlifes pan -Ġbl asp -38 8 -Ġjud iciary -ĠContin ental -ask ing -Mc C -L ED -Ġbag gage -ĠSorce rer -Ġrem nants -ĠGriff ith -ets u -ĠSub aru -ĠPerson ality -des igned -ush ima -agn ar -Ġrec oil -Ġpass ions -\ ": -Ġte e -Ġabol ition -ĠCreat ing -j ac -Ġ19 4 -01 9 -Ġpill ars -ric hed -/ " -t k -Ġlive lihood -Ġro asted -ah on -ĠH utch -ass ert -Ġdivid end -Ġkn it -Ġd aunting -Ġdisturb ance -Ġsh ale -Ġcultiv ated -Ġrefriger ator -L B -ĠN ET -Ġcommercial s -Ġthink ers -45 5 -Ġch op -B road -Ġsuspic ions -Ġtag ged -l ifting -Ġsty lish -ĠShield s -Short ly -Ġt ails -A uth -ST E -ĠG AME -Ġse ism -ĠK is -olog ne -Ġcow ork -Ġforc ibly -Ġthy roid -ĠP B -AN E -mar ried -h orse -Ġpoly mer -ĠCh al -od or -DE BUG -ĠCon text -Ġbl iss -Ġpin point -ĠMat hemat -leg ram -ĠWeek end -Ġlab elled -Ġb art -it les -Ġest rogen -âĢĶâĢĶâĢĶâĢĶâĢĶâĢĶâĢĶâĢĶ âĢĶâĢĶâĢĶâĢĶâĢĶâĢĶâĢĶâĢĶ -" ' -Ġvis ibly -Ġouts ider -aid a -Are a -Ġdisse min -Ġdish onest -ĠCl osed -ĠBullet in -ĠRam sey -sw ord -ĠX I -our ced -S ame -34 6 -ĠRe pe -ĠK ou -c ake -em is -C ache -ĠMe aning -ĠEn light -onom y -Ġmanifest ation -sw orth -J ay -Ġch ore -ö r -D ream -Ġsanction ed -Ġcult urally -ĠA ra -N av -Ġthe ological -Ġstr ut -ĠV O -ĠHand book -Ġconstruct ing -Ġ ¶ -ĠBenef its -ĠPsych ological -s ac -å ¸ -p olicy -ĠMat ters -ĠReport ed -ĠBy te -Ġvit ro -ĠM aiden -Ġl am -ĠJenn ings -Ġgar ment -ĠRut gers -ĠStaff ord -ĠWell ington -Ġinter mitt -Ġn pm -Ġord eal -Ġplug ged -o oming -in ished -fram ework -Ġtim ber -Ġc ass -Ġ8 50 -il ess -ĠRed ux -7 68 -St re -Ġsurpass ed -w hel -Ġparalle ls -Ġve il -ĠG I -ĠR EST -Ġread iness -s ort -Ġmod ifying -ĠSl ate -ru ff -Ġmar ble -Ġinf rared -Ġaud itor -ĠFANT ASY -ĠP overty -ĠS PD -Ġ" ( -K y -RA Y -Ġexecut ions -ĠBever ly -ĠMarx ism -ĠBur st -ĠK ali -est ones -Clear ly -E ll -ãģ § -ĠProceed ings -T oken -IF IC -ñ a -Cent ral -ĠH aley -ĠD rama -Ġform ations -OR N -Book s -Ġdom inating -ĠFly ers -ĠCompan ion -Ġdiscipl ined -ĠYug oslav -ĠSpell s -Ġv engeance -Ġland lords -L en -ĠO gre -ano ia -Ġpier cing -Ġcon greg -Ġscore r -ob ia -Ġnic kel -ĠLear ns -Ġre jo -Ġmaster piece -Fl ash -Ġinhab ited -ĠOpen GL -ĠD ud -ĠI CO -Ġar ter -Ġpl ur -Ġmaster y -Ġlong standing -st ed -Ġw ines -Ġtelev ised -ĠSh rine -ĠBay ern -Ġâ ĵĺ -Ġencl osure -j ohn -Ġprophe ts -ĠRes urrection -ĠOrd ers -Ġun even -r als -Ġd wind -ĠL ah -ĠSl oven -37 8 -Ġins istence -aff le -ĠCl one -Ġhard ship -ĠCongress man -Ġple ad -Ġreview ers -Ġc ured -Ġ19 35 -as ley -f ake -ĠTh inking -yd ia -P ART -ĠD ota -o it -Ġwh ipped -Ġb ouncing -ĠHispan ics -com ings -Ġcann abin -ĠCh ambers -ĠZ ack -Option al -Ġco ats -Ġprow ess -ĠNort on -Ġplain ly -Ġfre ight -Ġinhib ition -Ġcl am -Ġ30 3 -ke f -ale igh -L uke -Ġpsych o -ator ium -M ED -Ġtreat ies -Ġind isc -Ġd c -OP S -Ġresil ient -ĠInter state -Ġsl ack -Ġmund ane -Ġestab lishes -35 9 -Ġstr ained -Ġn ond -S us -Ġcast e -ar ate -ie ving -Ġunfair ly -Ġpars er -on ial -urs ive -V ia -ĠOtt o -ĠAuthor ities -stro ke -K R -ĠMer cy -Ġfurn ished -Ġout set -Ġmet ic -19 82 -olith ic -ĠT ent -og ical -ĠA ircraft -Ġh ides -ĠBec ame -Ġeduc ators -re aching -Ġvol atility -Ġtodd ler -ĠNAS CAR -ĠTw elve -ĠHigh lights -Ġgra pe -Ġspl its -Ġpe asant -Ġre neg -ĠMS I -Tem p -st ars -Ġtre k -ĠHy de -b inding -Ġreal ism -Ġox ide -ĠH os -Ġmount s -Ġbit ing -Ġcollaps ing -Ġpost al -Ġmuse ums -Ġdet ached -Ġrespect ing -Ġmonop ol -Ġwork flow -ĠC ake -Tem plate -ĠOrgan isation -Ġpers istence -36 9 -C oming -B rad -Ġredund ant -ĠG TA -Ġb ending -Ġrev oked -Ġoff ending -Ġfram ing -Ġprint f -Comm un -mem bers -Out side -Ġconst rued -Ġc oded -F ORE -Ġch ast -Ch at -Ind ian -ĠY ard -? !" -ĠP orts -ĠX avier -ĠR ET -' ." -ĠBo at -iv ated -ich t -umer able -D s -ĠDun n -Ġcoff in -Ġsecure ly -ĠRapt ors -ĠB es -Install ation -Ġin ception -ĠHealth y -end ants -Ġpsych ologists -ĠShe ikh -c ultural -ĠBlack Berry -sh ift -F red -oc he -Ġc akes -ĠS EO -ĠG ian -ĠAs ians -og ging -e lement -Ġpund its -ĠV augh -ĠG avin -Ġh itter -Ġdrown ed -Ġch alk -ĠZ ika -Ġmeas les -80 2 -âĢ¦ .. -ĠAW S -] " -Ġdist ort -ĠM ast -Ġantib odies -ĠM ash -Mem ory -ĠUg anda -ĠPro b -Ġvom iting -ĠTurn s -Ġoccup ying -Ġev asion -ĠTher apy -Ġprom o -Ġelect r -Ġblue print -ĠD re -pr iced -ĠDep ot -Ġallev iate -ĠSom ali -m arg -n ine -Ġnostalg ia -ĠShe pherd -Ġcaval ry -Ġtor ped -ĠBlood y -x b -Ġs ank -Ġgo alt -report print -embed reportprint -clone embedreportprint -ĠIn itially -ĠF ischer -Ġnot eworthy -c ern -Ġin efficient -raw download -rawdownload cloneembedreportprint -c ation -ĠD ynasty -l ag -D ES -Ġdistinct ly -ĠEston ia -Ġopen ness -Ġg ossip -ru ck -W idth -ĠIb rahim -Ġpet roleum -Ġav atar -ĠH ed -ath a -ĠHog warts -Ġc aves -67 8 -Ġsafegu ard -ĠM og -iss on -ĠDur ham -sl aught -ĠGrad uate -Ġsub conscious -ĠEx cellent -ĠD um ----- - -Ġp iles -ĠW ORK -ĠG arn -ĠF ol -ĠAT M -Ġavoid s -ĠT ul -Ġble ak -EL Y -iv ist -light ly -P ers -ĠD ob -ĠL S -Ġins anity -Î µ -atal ie -En large -Ġtw ists -Ġfault y -Ġpir acy -Ġimp over -Ġrug ged -ĠF ashion -Ġs ands -' ? -sw ick -Ġn atives -Ġhe n -ĠNo ise -ãĥ Ĺ -Ġg reens -Ġfree zer -Ġd ynasty -ĠFather s -ĠNew ark -Ġarchae ological -Ġo t -ob ar -Ġblock ade -Ġall erg -L V -Ġdeb it -ĠR FC -ĠMil ton -ĠPress ure -Ġwill ingly -Ġdisproportion ate -Ġopp ressive -Ġdiamond s -Ġbelong ings -19 70 -Ġbell s -Ġimperial ism -Ġ2 27 -Ġexpl oding -ĠE clipse -Ġ19 19 -Ġr ant -Ġnom inations -34 7 -Ġpeace fully -ric a -ĠF UCK -Ġvib ration -mal ink -Ġro pes -ĠIv anka -ĠBrew ery -ĠBook er -ĠOw ens -go ers -Serv ices -ĠSn ape -Ġ19 1 -39 5 -Ġ2 99 -just ice -Ġb ri -Ġdisc s -Ġprom inently -Ġvul gar -Ġsk ipping -l ves -Ġtsun ami -37 4 -ĠU rug -ĠE id -rec ated -p hen -Ġfault s -ĠStart ed -9 50 -Ġp i -Ġdetect or -Ġbast ard -Ġvalid ated -Space Engineers -OUR CE -Ġ( ~ -Ġuns ur -Ġaff irmed -Ġfasc ism -Ġres olving -ĠCh avez -ĠC yn -Ġdet ract -L ost -Ġrig ged -Ġhom age -ĠBrun o -55 5 -ec a -Ġpress es -Ġhum our -Ġsp acing -Ġ' / -olk ien -C oun -OP ER -T re -S on -ĠCambod ia -ier re -m ong -o zy -Ġliquid ity -ĠSov iets -ĠFernand o -Ġ2 29 -Ġsl ug -ĠCatal an -elect ric -Ġsc enery -ĠH earth -Ġconst rained -Ġgoal ie -ĠGu idelines -ĠAm mo -ĠPear son -Ġtax ed -Ġfet us -Resp onse -ĠAlex is -th ia -G uy -Ġrecon struct -Ġextrem es -Ġconclud ing -ĠP eg -ook s -Ġded uctions -R ose -Ġground breaking -ĠT arg -ãĥ ģ -ĠRe ve -res ource -Ġmo ons -Ġelectrom agnetic -Ġamid st -ĠVik tor -N ESS -B ACK -Ġcomm ute -ĠAna heim -Ġfluct uations -6 40 -Ġnood les -ĠCop enhagen -ĠT ide -ĠGri zz -ĠS EE -Ġpip elines -Ġsc ars -end o -ag us -ĠE TF -/ # -ĠBec ome -44 8 -Ġvis c -ĠRecomm ended -Ġj umper -Ġcogn ition -Ġassass in -Ġwitness ing -ĠSet up -Ġl ac -v im -IS M -p ages -SS L -35 8 -Ġad ject -indust rial -l ore -cher y -Ġgl itter -Ġc alf -Flor ida -Ġspoil ers -Ġsucceed s -Ġch anting -Ġslog ans -ĠTr acy -Vis it -rol ogy -Ġm ornings -Ġline age -Ġs ip -Ġintense ly -Ġflour ish -ĠSle eping -ĠF em -or por -ĠK lan -ĠDar th -h ack -ĠNi elsen -Ġtum ors -Ġprocure ment -ĠY orkshire -Ġra ided -K Y -An na -Ġ// [ -ĠDis order -ĠMust ang -ĠW en -ĠTry ing -s q -Ġdeliver ies -Ġshut ter -Ġcere bral -Ġbip olar -ĠC N -l ass -j et -Ġdeb ating -> : -Ġe agle -gr ades -ĠD ixon -UG C -M AS -ĠDr aco -ĠMach ines -aff er -Ġem an - ² -pr on -ĠG ym -Ġcompar atively -ĠTrib unal -PR O -Ġle x -Ġfert ile -Ġdep ressing -Ġsuperf icial -ess ential -ĠHun ters -g p -Ġprom inence -L iber -ĠAn cest -ote chnology -Ġm ocking -ĠTra ff -ĸ ļ -Med ium -I raq -Ġpsychiat rist -Quant ity -ĠL ect -Ġno isy -5 20 -G Y -Ġsl apped -ĠM TV -Ġpar a -p ull -Mult iple -as her -Ġn our -ĠSe g -Spe ll -v ous -ord ial -Sen ior -ĠGold berg -ĠPl asma -ne ed -Ġmess enger -ere t -Ġteam ed -Ġliter acy -ĠLe ah -ĠD oyle -Ġem itted -U X -Ġev ade -Ġm aze -Ġwrong ly -ĠL ars -Ġstere otype -Ġpled ges -Ġarom a -ĠM ET -Ġac re -ĠO D -Ġf f -Ġbrew eries -ĠH ilton -und le -ĠK ak -ĠThank fully -ĠCan ucks -in ctions -ĠApp ears -Ġco er -Ġundermin ed -ro vers -And re -Ġbl aze -um ers -Ġfam ine -amp hetamine -ulk an -Am ount -Ġdesper ation -wik ipedia -develop ment -ĠCor inth -uss ia -Jack son -L I -N ative -R s -Oh io -ĠKath leen -F ortunately -Ġattend ant -ĠPre ferred -ĠDid n -ĠV s -M is -Ġrespond ent -Ġb oun -st able -Ġp aved -Ġunex pl -ĠChe ney -L M -ĠC ull -bl own -Ġconfront ing -oc ese -serv ing -W i -ĠLith uania -ann i -Ġst alk -h d -Ġv ener -AP H -ynchron ous -UR R -um ably -hist oric -H alf -H ay -Ġresil ience -spe ction -Ġabandon ing -O bs -ĠDeb bie -Ġgrad ient -ĠPl aint -ĠCan al -AR CH -Ġexpans ive -Ġfun g -Ġb ounced -U nd -Ġprec autions -Ġclar ification -Ġd agger -Ġgri ps -Ġ µ -ĠRiver a -ĠUnd ead -is ites -ĠFIR ST -ñ o -aud i -Ġhost ages -Ġcompl iant -Ġal umni -Se ven -Ġcyber security -e ither -Col lect -Ġinvari ably -ĠS oci -Ġlaw maker -Ġa le -ĠPerson ally -N azi -Ġcustom ization -ĠPro c -ĠSask atchewan -eat uring -Ġsp ared -Ġdiscontin ued -Ġcomput ational -ĠMotor ola -Ġsuprem acist -government al -Ġparad ise -ĠDown ing -ĠNik on -Ġcat alyst -ber ra -Tor onto -8 75 -bet a -ĠMac ron -Ġunreal istic -ve ctor -ĠVeh icles -it iveness -ĠR V -ĠCol bert -s in -o ji -ent in -ĠKr ish -hell o -ff ield -ok y -ĠT ate -Ġmap le -Ġa ids -chem ical -33 4 -n uts -ĠWar p -Ġx x -ĠRob b -umer ous -_- _ -ft ime -ĠV W -Ġw inger -ĠD ome -t ools -ĠP V -ĠGe orgetown -Ġg eared -Ġjihad ists -Ġc p -Ġster oids -M other -cler osis -ĠDR M -nes ia -Ġl inger -Ġimm ersive -ĠC OUN -Ġoutwe igh -ens ual -B and -Ġtransform s -mat ched -ps ons -ĠJud icial -f actor -Ġrefer ral -Ġodd ly -ĠW enger -B ring -ĠB ows -60 2 -IC LE -Ġl ions -ĠAcad emic -ĠTh orn -ĠRa ider -kef eller -St orage -L ower -ĠOr t -ĠEqu ality -AL T -ĠS OC -T ypes -Ġl yn -ĠAss et -co at -TP P -C VE -ĠPione er -app lication -Mod ern -ĠH K -En vironment -Al right -R ain -IP P -ĠShi ite -Ġm ound -ĠAb ilities -cond ition -St aff -Ġcompet ence -ĠM oor -ĠDi ablo -Ġwith held -Ġost ensibly -ĠB rom -Ġms g -Ġden omin -ĠRef erences -ĠF P -Ġplun ged -Ġp amph -m oving -cent ral -Ġdown right -Ġf ading -T al -T yp -ĠTh y -uk es -it he -Ġo ve -Ġbatt led -Ġseaf ood -Ġfig ur -ĠR D -c rop -Ġsqu ads -{ \ -à ¹ -ĠE h -Ġinterview ing -ĠQ in -Ġas piring -PL IC -Ġcla uses -ĠG ast -ĠN ir -Ġl uggage -Ġh ose -Ġsystem d -Ġdesc ending -ĠRev ised -ĠR ails -al ign -70 9 -33 7 -Ġf ug -charg ing -t ags -Ġut er -k ish -WAR NING -49 0 -prof its -Ġvoy age -Ġa ce -ĠV anguard -ĠT anks -ĠM uk -Ġ2 26 -S afe -Ar mor -Ġvolcan ic -Ġwom b -ĠM IL -Ġbegin ner -ĠRec ogn -ĠA AP -PL AY -) ! -Ġdetect ing -c n -Ġbre aches -Bas ically -ĠP ag -ĠMunicip al -ĠInd ie -ĠL af -ĠDis able -ĠOl son -Ġrest rained -Ġrul ings -Ġhum ane -ev ents -ĠCinem a -display Text -ĠH atch -action Date -onna issance -Ġassault ing -ĠL ug -CH AT -Ġvig orous -ĠPer se -Ġintoler ance -ĠSnap chat -ĠSh arks -Ġd ummy -ĠDi agn -ĠGu itar -im eters -40 3 -RE G -A x -Ġsepar ates -ĠMah m -Ġt v -j ah -O OL -C irc -ĠWinds or -uss ian -Ġintu ition -Ġdis dain -ĠDon ovan -Ġ2 21 -E mb -Ġcondem ning -Ġgener osity -zz y -Ġpant ies -ĠPre vent -Action Code -AN A -34 2 -external ActionCode -Ġspec ifying -Ġcryst all -J ere -Ġru pt -ĠApp rentice -Ġprof iling -Ð º -St rike -Ġsid eline -Ġoblig ated -Ġocc ult -Ġbureaucr atic -ant ically -rupt ed -neg ative -ĠEthiop ia -ĠC ivic -Ġins iders -el igible -ĠTV s -ĠB AR -ĠT I -i ologist -ĠA IR -Ġsubstit uted -Ar ab -ĠS aul -ĠY og -p rem -Ġbuild ers -Ġstation ary -Ġdoubt ful -Ġvig orously -Ġthr illing -Ph ysical -ĠCare y -ĠHyd ra -geon ing -ĠS ly -y ton -Ġborrow ers -ĠPark inson -Ġ ë -ĠJama ica -Ġsat ir -Ġinsurg ents -ĠF irm -Ġis ot -ĠK arn -our ning -ak ens -doc s -l ittle -ĠMon aco -CL ASS -Tur key -L y -ĠCon an -ass ic -Ġstar red -ĠPac ers -et ies -Ġt ipping -M oon -ĠR w -s ame -Ġcav ity -Ġgo of -ĠZ o -Sh ock -um mer -Ġemphas izes -Ġreg rett -Ġnovel ty -Ġen vy -ĠPass ive -r w -50 5 -Ġind ifferent -ĠR ica -ĠHim self -ĠFred die -Ġad ip -ä¸ Ģ -Ġbreak out -Ġhur ried -ĠHu ang -ĠD isk -Ġro aming -?????- ?????- -U V -ĠRick y -ĠS igma -Ġmarginal ized -Ġed its -Ġ30 4 -mem ory -Ġspec imen -29 3 -ãģ ¯ -Ġvert ically -Ġaud ition -ĠHe ck -Ġc aster -ĠHold ings -ad al -ĠC ron -ĠL iam -Ġdef lect -P ick -ĠDeb ug -RE F -Ġvers atility -ot hes -class ified -ĠMah ar -ĠH ort -C ounter -st asy -not iced -33 1 -ĠSh im -f uck -ĠB ie -Ġair ing -ĠPro tein -ĠHold ing -Ġspect ators -ili ated -ĠThat cher -n osis -ãĥ¼ ãĥ³ -Te le -B oston -ĠTem pl -st ay -Ġdecl arations -47 9 -Vol ume -ĠDesign er -ĠOver watch -id ae -Ġon wards -Ġn ets -ĠMan ila -part icularly -Ġpolit ic -o other -Ġport raits -Ġpave ment -c ffff -Ġs aints -Ġbegin ners -ES PN -Ġshort comings -âķIJ âķIJ -Ġcom et -ĠOrgan ic -qu el -Ġhospital ized -Bre ak -Ġpe el -dyl ib -asp x -ur ances -ĠT IM -P g -Ġread able -ĠMal ik -Ġm uzzle -Ġbench marks -d al -ĠV acc -ĠH icks -60 9 -ĠB iblical -he ng -Ġover load -ĠCivil ization -Ġimm oral -Ġf ries -ãĤ Ĵ -Ġreprodu ced -Ġform ulation -j ug -ire z -g ear -Ġco ached -Mp Server -ĠS J -ĠK w -In it -d eal -ĠO ro -ĠL oki -ĠSong s -Ġ23 2 -ĠLou ise -asion ally -Ġunc ond -olly wood -Ġprogress ives -ĠEn ough -ĠDo e -Ġwreck age -Ġbr ushed -ĠBase Type -Ġz oning -ish able -het ically -ĠC aucus -ĠH ue -Ġk arma -ĠSport ing -Ġtrad er -Ġseem ing -ĠCapt ure -4 30 -b ish -Ġt unes -Ġindo ors -ĠSp here -ĠD ancing -TER N -Ġno b -ĠG ST -m aps -Ġpe ppers -F it -Ġoverse es -ĠRabb i -ĠR uler -vert ising -off ice -xx x -Ġra ft -Ch anged -Ġtext books -L inks -ĠO mn -ãĢ ij -Ġinconven ience -ĠDon etsk -= ~ -Ġimplicit ly -Ġboost s -ĠB ones -ĠBo om -Cour tesy -Ġsens ational -AN Y -Ġgre edy -ed en -Ġinex per -ĠL er -ĠV ale -Ġtight en -ĠE AR -ĠN um -Ġancest or -S ent -ĠH orde -urg ical -all ah -Ġsa p -amb a -ĠSp read -tw itch -Ġgrand son -Ġfract ure -Ġmoder ator -ĠSe venth -ĠRe verse -Ġestim ation -Cho ose -Ġpar ach -Ġbar ric -ãĢ IJ -Ġcomp ass -Ġall ergic -âĢ ķ -OT HER -err illa -Ġw agon -Ġz inc -Ġrub bed -ĠFull er -ĠLuxem bourg -ĠHoo ver -Ġli ar -ĠEven ing -ĠCob b -est eem -Ġselect or -ĠB rawl -is ance -ĠE k -Ġtro op -Ġg uts -ĠApp eal -ĠTibet an -Ġrout ines -ĠM ent -Ġsummar ized -steam apps -Ġtr anqu -Ġ19 29 -or an -ĠAut hent -Ġg maxwell -Ġappre hens -Ġpo ems -Ġsa usage -ĠWeb ster -ur us -Ġthem ed -Ġl ounge -Ġcharg er -Sp oiler -Ġsp illed -h og -ĠSu nder -ĠA in -ĠAng ry -Ġdis qual -ĠFrequ ency -ĠEther net -Ġhel per -Per cent -Ġhorr ifying -Ġa il -ĠAll an -EE E -ĠCross ing -44 9 -Ġh olog -ĠPuzz les -ĠGo es -eren n -60 4 -ãģ ı -ĠRaf ael -Ġatt en -ĠE manuel -Ġup ro -ĠSus p -P sych -ĠTr ainer -ĠN ES -ĠHun ts -bec ue -Ġcounsel or -R ule -Ġtox ins -Ġb anners -r ifice -Ġgreet ing -Ġfren zy -Ġall ocate -Ġ* ) -ex pr -50 3 -ĠCh ick -ĠT orn -Ġconsolid ation -ĠF letcher -sw itch -fr ac -cl ips -ĠMcK in -ĠLun ar -Mon th -IT CH -Ġscholar ly -rap ed -39 8 -Ġ19 10 -Ġe greg -Ġin secure -Ġvict orious -cffff cc -Ġsing led -Ġel ves -ĠW ond -bur st -Ġcam oufl -ĠBL ACK -Ġcondition ed -ç ī -ans wered -Ġcompuls ory -asc ist -Ġpodcast s -ĠFrank furt -bn b -Ġne oliberal -ĠKey board -ĠBel le -w arm -Ġtrust s -Ġins ured -ĠBu cc -us able -60 7 -ĠPl ains -Ġ18 90 -Ġsabot age -Ġlod ged -f elt -Ġg a -ĠN arc -ĠSal em -Ġsevent y -ĠBl ank -p ocket -Ġwhis per -Ġm ating -om ics -ĠSal man -ĠK ad -Ġan gered -Ġcoll isions -Ġextraord inarily -Ġcoerc ion -G host -b irds -è Ģ -k ok -Ġper missible -avor able -Ġpo inters -Ġdiss ip -ac i -Ġtheat rical -ĠCos mic -Ġforget ting -Ġfinal ized -å¤ § -y out -l ibrary -Ġbo oming -ĠBel ieve -ĠTe acher -ĠL iv -ĠGOOD MAN -ĠDomin ican -OR ED -ĠPart ies -Ġprecip itation -ĠSl ot -R oy -ĠComb ined -Ġinteg rating -Ġch rome -Ġintest inal -ĠRe bell -Ġmatch ups -Ġblock buster -ĠLore n -ĠLe vy -Ġpre aching -ĠS ending -ĠPur pose -ra x -f if -Ġauthor itative -ĠP ET -ast ical -Ġdish on -Ġchat ting -Ġ"$ :/ -Connect ion -Ġrecre ate -Ġdel inqu -Ġbro th -ĠD irty -ĠAd min -z man -Ġscholars hips -Ġ25 3 -cont act -als a -7 67 -c reen -abb age -Ġ19 15 -Ġbl ended -Ġal armed -L anguage -35 6 -Ġbl ends -ĠCh anged -W olf -Ġhe pat -Creat ing -Ġper secut -Ġsweet ness -art e -Ġforfe iture -ĠRober to -im pro -N FL -ĠMag net -Det ailed -Ġinsign ificant -ĠPOL IT -ĠBB Q -ĠC PS -Ġse aw -amin er -m L -end if -f inals -Ġ26 5 -u ish -Ġ} ) -ĠPro blems -Ġem blem -Ġserious ness -Ġpars ing -Ġsubst itution -Ġpress ured -Ġrecy cled -ale b -Rub y -Ġprof iciency -Dri ver -ĠW ester -: ' -AF TA -Ġm antle -ĠClay ton -fl ag -Ġpractition er -c overed -ĠSt ruct -add afi -4 25 -ĠTown ship -ĠHyd ro -Lou is -34 3 -Ġcond o -ĠT ao -Ġutil ization -Ġnause a -ĠDem s -rid ges -p ause -Ġform ulas -Ġchall enger -37 6 -Ġdefect ive -ĠRail way -ĠPub Med -Ġyog urt -l bs -ĠNor folk -OP E -ĠMood y -Ġdistribut or -Ġscroll s -Ġextract s -St an -Ġv iability -Ġexp oses -Ġstar vation -ĠStep s -ĠD odd -f ew -ST D -33 2 -Ġclos ures -Ġcomplement ary -ĠS asha -ump y -Ġmon et -Ġartic ulate -ĠDo ct -k iller -Ġsc rim -Ġ2 64 -Ġprost itutes -Ġse vered -Ġattach ments -Ġcool ed -L ev -ĠF alk -f ail -Ġpolic eman -ĠD ag -Ġpray ed -ĠK ernel -Ġcl ut -Ġc ath -Ġan omaly -St orm -em aker -ĠBreak fast -ul i -o ire -J J -h z -Oper ation -ĠS ick -35 4 -ĠGuatem ala -R ate -Ġexp osures -f aces -ĠArch ae -ra f -ĠM ia -Ġ20 25 -Ġop aque -Ġdisgu ised -ĠHead quarters -S ah -Ġp ots -9 78 -ĠM alf -Ġfrown ed -Ġpoison ous -ĠCon vers -ee ks -Ġcr ab -." " -Ġtre ason -Ġr anc -Ġescal ating -Ġwar r -Ġmob s -Ġl amps -ĠSun shine -ĠBrun swick -Ph ones -Ġspe lled -ĠSk ip -Ġ20 50 -Ġ19 11 -ĠPl uto -ĠAm end -Ġme ats -38 7 -Ġst omp -ĠZh ou -ĠLevi athan -ĠHaz ard -ad v -ĠOr well -Ġal oud -Ġb umper -ĠAn arch -ub untu -ĠSer ious -f itting -ĠOption al -ĠCec il -RE AM -Ġser otonin -Ġcultiv ate -ag ogue -} \ -Ġmos ques -ĠSun ny -Ġre active -rev olution -ĠL up -ĠFed ora -Ġdefense man -ĠV ID -ist ine -Ġdrown ing -ĠBroad casting -Ġthr iller -ĠS cy -Ġacceler ating -Ġdirect s -od ied -b ike -d uration -Ġpain fully -R edd -Ġproduct ions -Ġg ag -Ġwh ist -Ġs ock -Ġinf initely -ĠConc ern -ĠCit adel -Ġlie u -Ġcand les -ogene ous -arg er -Ġheaven ly -inflamm atory -Per formance -C s -ruct ose -az aki -Ġp essim -Ġinf erence -Ġpow d -ĠZ oe -Ġpain ts -Ġd azz -pt a --------- --- -Ġins pir -ĠExper imental -ĠKn ife -reg or -b ors -Ġshow ers -rom eda -Ġs aint -Ġben ign -ĠJ iang -Ġenvision ed -Ġsh roud -IF T -H O -Ġsh uff -ĠI CC -Ġse greg -Ġrevis it -ighth ouse -L i -Ġsub strate -ĠSe as -ĠRew ard -ĠH ep -ĠBr ass -s bm -Ġelim inates -Ġst amina -ĠV AT -ĠLo an -Ġconst raint -Ġappropri ated -Ġp es -ĠA LE -r anging -Ġ40 4 -39 2 -Ġintellectual s -ach u -Ġrestruct uring -ĠLe vin -Ġrun es -Ġdelight ful -Ġcarbohyd rates -ĠMod els -ĠExp o -Ġtransport ing -all oc -Ġring ing -S amsung -Ġscarce ly -ĠURL s -ĠM AS -Ġprot otypes -Ġnarr ator -ĠCPU s -cd n -ĠBart on -Ġdecided ly -ĠSh u -ix ir -oc ious -ĠMy st -N intendo -Ġre use -Ġforg iven -F ew -in ical -n at -Ġseam less -ĠEv a -ĠE VE -ĠJ O -land ers -Ġso fter -neg ie -Ġtrans ient -Ġorb ital -Ġfulf il -ĠK om -Hop efully -Ġdynam ically -ĠHun ger -å Ľ -ĠArmen ia -el man -ber to -Ġp ige -ĠID s -lim it -Ġve ins -Ġso aring -p acks -Gold en -ĠCr ab -ist or -ĠR PM -Ġ$ $ -g ression -Ġjihad ist -Ġgam ble -Ġcare g -Ġinf lated -F ace -ĠFire arms -ĠEm manuel -â Ŀ -Ġsh ocks -gr ab -Ġspl end -ĠHP V -ab ortion -Ab ove -Ent ity -play ers -Ġcomm enced -ul ence -Ġfulfill ment -Ġembod iments -ĠW elfare -Ġha il -Ġ< @ -tt en -Ġcat cher -ĠJ azeera -Ġvolcan o -Ġstabil ize -ĠHand ler -Ġintens ified -ĠAb rams -Ġhum iliation -p aced -60 5 -ĠCent OS -Spe cific -Ġhe ed -ĠC AM -ĠGal ile -D ie -Ġabol ished -ĠThom son -ĠTe achers -ĠW ass -j ong -ĠIS BN -ĠAll ies -sh ake -å · -v ict -How ard -Ġde em -Ġexceed ingly -ĠSmart stocks -ib e -Ġdoor way -Ġcompet ed -ig mat -Ġnational ists -Ġg room -ĠKe en -Ġdispos able -de cl -ĠT olkien -ĠSche me -Ġb iod -Ġav id -ĠEl on -ag ar -ĠT SA -R oman -Ġartific ially -Ġadvis ors -X L -ĠInf erno -36 6 -Ġted ious -ĠPhot ography -ĠCar rie -Ġtro pe -ĠSand ra -Ġdec imal -Que en -ĠGund am -ĠO M -ote ch -N BA -Ġ19 32 -Ġent renched -ĠMar ion -Ġfr aternity -Lab our -Hen ry -Ġlat itude -E ither -Ġenh ances -ĠPot ential -Ġsh ines -id ad -Ġbread th -Ġcapac ities -ĠðŁ ĻĤ -ĠBron x -Ġsex es -Ġdifferent iation -Ġheavy weight -ĠT aj -d ra -Ġmigr ate -Ġexhaust ion -ĠR UN -els ius -ĠCu omo -Ġgu itars -Ġcl ones -ĠSom ew -ĠP ry ------------- - -Ġwarr anted -cy cles -Ġsalv age -Ġdis ks -R ANT -ĠNGO s -ĠMart ian -":[ {" -Ġadd icts -oj ure -il let -Ġamazing ly -art ments -p ixel -ĠGPU s -Lay out -è £ -ĠTam il -ĠBas il -Ġimpart ial -ĠSt ructure -f ork -b ryce -Ġr idge -ĠHamb urg -ri ous -Ġbl itz -cig arettes -Ġcan ned -40 2 -Ġiron ically -Ġcompassion ate -ĠHaw kins -. # -ĠCat hedral -Ġrall ied -in ternal -Ġqu ota -st akes -T EXT -m om -Ġcomple tes -Ġ23 8 -Ġsh rug -ãĥ ij -ĠN inth -Ġrev ise -ĠProv ider -Ġtre acher -Ġqu asi -ĠPR ES -Ġdep osition -Ġconfidential ity -iss ors -Ġim balance -Ġspan ning -Ġang ular -ĠC ul -commun ication -ĠNor a -ĠGen ius -op ter -Ġs acked -Sp ot -Ġfine ly -ĠCH R -28 2 -w aves -Pal est -ĠRo hing -N L -è ¿ -Ġsh itty -ĠSc alia -4 75 -Pro gress -Ġreferen cing -Ġclass rooms -ab ee -Ġs od -hes ion -70 8 -ĠZucker berg -ĠFin ish -ĠScot ia -ĠSav ior -ĠInstall ation -an tha -( - -Ġ30 2 -ĠP unk -Ġcr ater -yout u -Ġro ast -Ġinflu encing -Ġd up -ĠJ R -ĠG rav -Ġstat ure -Ġbath rooms -A side -W iki -me an -ĠZ ak -ĠOn es -ĠN ath -Ġhyper t -Ġcommence ment -C ivil -Ġmoder ately -Ġdistribut ors -Ġbreast feeding -Ġ9 80 -ĠS ik -ĠC ig -ĠAM ER -R IP -ĠCare er -ust ing -Ġmess ed -Ġe h -ĠJ ensen -/ $ -Ġblack mail -Ġconvers ions -Ġscientific ally -Ġmant ra -p aying -Ġiv ory -ĠCour ts -OU GH -aunt let -Ser ial -B row -ĠH undreds -3 23 -Ġpe e -Ġlin ux -Ġsub mer -ĠPrinc ipal -48 5 -ĠD SL -ĠCous ins -Ġdoctr ines -ĠAthlet ics -Ġ3 15 -ĠK arma -Ġatt ent -ur ger -Ġpresc ribe -Ġenc aps -ĠC ame -Ġsecret ive -ĠCr imes -d n -C lean -ĠEgypt ians -ĠCar penter -Ġ ll -H um -ĠMil o -Ġcapital ists -Ġbrief ed -T we -ĠBas in -elve t -M os -Ġplun ge -ĠKa iser -ĠFu j -ill in -Ġsafegu ards -Ġo ste -ĠOpportun ity -ĠM afia -ĠCall ing -ap a -ur ban -br ush -ill ard -c é -int elligence -ĠL ob -ĠDru id -Ġsm oother -Ġfoot ing -Ġmotor ists -arc ity -Ġmascul inity -Ġm ism -Ġabdom inal -ĠTa vern -ĠR oh -Ġesc apes -s igned -Anth ony -Ġsacrific ing -Ġintim acy -Ġan terior -ĠK od -Ġmot if -Ġg raz -Ġvisual ization -Ġguitar ist -ĠTro tsky -m agic -D ar -ĠMor i -Ġw ards -Ġtoile ts -l est -Ġtele port -ĠSund ays -ĠPl at -ET S -Ġe Sports -Pat rick -ĠK atherine -en ko -Ġhas sle -ĠM ick -gg les -Ġh ob -aint ain -Ġair borne -Ġsp ans -Ġch ili -Ġa perture -Ġvolunte ered -ĠInc ident -ĠF res -ĠVeter an -augh tered -ing o -Ġun insured -CL OSE -Ġf use -Ġer otic -Ġadvert ise -ra ising -Text ure -Ġatt ends -ĠRE AL -udd led -Ġsm oot -Ġ30 5 -ĠWill is -Ġbl ond -An alysis -ĠV T -on ica -Ġstrongh old -R F -N M -. >> -Ġprosper ous -Ġbo asted -29 2 -ĠManufact uring -PR ESS -g ren -Ġpharm acy -ĠRoc kefeller -k ai -Ġth umbs -ĠH ut -Ġmother board -Ġguard ians -ĠAl ter -ll ular -Ġsh ack -Ġwise ly -Ġback bone -erv a -Ġsu icides -ĠMcG regor -ij ah -E mer -ĠB rav -Ġdesign ate -P OST -produ ced -Ġcleans ing -irl wind -ex istent -ĠHum ph -ĠPay ne -Ġv ested -Å ¡ -Ġstring ent -ion a -Ġuns ub -Ġsum med -ĠHer cules -sub ject -ĠR agnar -ĠN os -Ġcharacter ization -Ġsav vy -ĠDaw son -ĠCas ino -Ġf ri -ĠBar rier -Ġmis information -Ġins ulation -Ġcorrid ors -Ġair planes -ĠNo ct -ah i -Ġ19 16 -k b -arm ac -Ġsh un -Ġsche ma -Ġhorr ified -Ġ23 9 -aund ers -N B -i ates -er ity -ĠSh ard -Ġr arity -Ġgroup ed -ĠGh ana -again st -ĠBi ological -ĠA ware -ow ell -Ï Ħ -ĠBe au -sh aw -H ack -ĠJul ius -US S -ol son -aun a -c ru -ĠMaur ice -ĠI k -Ġsequ encing -Ġradical s -Ġ( ?, -v irtual -Ġany ways -Ġreper c -Ġhand lers -Ġhes itant -é ĥ -ĠM F -ple mentation -ass ociated -Ġcampaign ed -ĠY ue -ut ations -ĠY oga -Ġsim mer -Ġro ds -Ġmel ody -Ġconv oy -v ideos -Ġscreen ed -N eg -ochem ical -Ġ( )) -Ġultr as -Ġant ip -ĠIsland ers -70 4 -Ġfet ish -Ġridic ulously -ĠK art -Ġmitochond rial -Ġinterf ering -Build er -Ġover fl -Ġac ne -ĠM ud -ĠK err -f lex -ĠPost al -ĠBalt ic -47 7 -ĠPers ons -our age -H B -ĠM use -ĠImm ortal -ĠDri ving -Ġpet itions -Ġsubsc ript -Ġs orce -ĠProcess or -ut on -S ony -Ġph on -Ġr aced -ĠAnth rop -Ġday time -ĠEx ercise -Add ing -Ġeng ages -ĠQual comm -Ġmir acles -Ġmem es -ĠDr ink -ĠOri oles -Ġhair s -ĠPol ar -ath om -Ġsl ippery -ĠR emy -Ġcar amel -ĠY EAR -Ġal k -I gn -a ution -ĠMer lin -ĠC ran -Ġap ologies -Ġ4 10 -Ġout ing -ĠMem ories -app ointed -Ġcount ered -u ld -pos ing -Ġfire wall -ĠW ast -ĠW et -work ed -se ller -Ġrepe aled -ere o -ass uming -BL IC -m ite -ĠCEO s -ĠChap el -ellig ent -________________ ________ -D og -Ġw art -Ġsubsc riber -s ports -Ġbe gged -ĠM V -Ġsem if -eth ical -Ġpre ach -Ġrev ital -Ġpun itive -Ġshort cuts -Ġinstit uted -ĠWars aw -Ġabdom en -ĠK ING -Ġsuper intendent -Ġf ry -ĠGe o -T OR -Ġcontrad ictions -apt ic -Ġlandsc apes -b ugs -Ġcl ust -Ġvol ley -c ribed -Ġt andem -Ġrob es -WH AT -Ġpromot er -Ġel oqu -review ed -ĠD K -ĠPl ato -Ġf ps -T ank -ĠDer rick -Ġpriorit ize -as per -ĠHond uras -ĠCom pleted -ne c -Ġm og -n ir -ĠMay o -DE F -st all -in ness -ĠVolks wagen -Ġprec aution -ĠM ell -i ak -ist ries -Ġ24 8 -Ġoverl apping -Sen ate -ĠEnh ance -res y -rac ial -OR TS -ĠM ormons -Str ong -ĠCo ch -Mex ico -ĠMad uro -Ġj ars -Ġcan e -W ik -oll a -iff erence -Ġphysic ist -ĠMag gie -Ġ28 5 -Ġdep iction -ĠMcL aren -J u -Ġsl ows -Ġcommission ers -ĠWill ow -ĠExpl os -hov ah -Ġtechn ician -Ġhom icides -ĠFl av -ĠTr uman -Ġ100 00 -u ctor -Ġsh ader -News letter -45 7 -Ġre ver -Ġhard ened -Ġwhere abouts -Ġrede velop -Ġcar bs -Ġtra vers -Ġsqu irrel -Ġfoll ower -Ġs ings -50 8 -Ġrabb its -emon ium -Ġdocument ing -Ġmisunder stood -) ' -R ick -gg ies -Ġprem ie -Ġsk ating -Ġpass ports -Ġf ists -aged don -H aw -AC P -0 80 -ĠThough ts -ĠCarl son -Ġpriest hood -h ua -Ġdun geons -ĠLo ans -Ġant is -Ġfamiliar ity -ĠS abb -op al -ĠIn k -st rike -Ġc ram -Ġlegal ized -Ġcu isine -Ġfib re -Tra vel -ĠMon ument -OD Y -eth y -Ġinter state -ĠP UR -em porary -ĠArab ian -develop ed -Ġsadd le -Ġg ithub -ĠOff er -ĠIS P -ro let -ĠSUP ER -ĠDen is -Ġmultipl ier -Ġstir red -Interest ingly -Ġcustom ary -Ġbill ed -he x -Ġmultipl ied -Ġfl ipping -ĠCros by -Ġfundament als -ia e -ĠPlay ed -ĠAt om -am azon -ĠFl am -ee z -activ ated -Ġtables poon -Ġliberal ism -ĠPal in -ĠP atel -N um -ĠT AM -Ġs urn -ĠRel oaded -Ġco ined -" ], -ĠCl ash -ĠAg u -Ġprag matic -ĠActiv ate -Ġ8 02 -Ġtrail ers -Ġsil hou -Ġprob es -Ġcirc us -ĠB ain -ĠLind say -ĠAb bey -Del ivery -Ġconcess ion -Ġgast ro -ĠSpr ite -Ä Ł -and el -Ġg imm -Ġaut obi -ĠT urtle -Ġwonder fully -ĠHar am -ĠWorld wide -ĠHand le -Ġtheor ists -Ġsle ek -ĠZh u -ograph ically -EG A -ĠOwn ers -ath s -ĠAntar ctic -n atal -=" " -fl ags -`` `` -Ġs ul -K h -Ġpot assium -Ġlinem an -Ġcere al -ĠSe asons -Ġ20 22 -Ġmat hematic -Ġastron omers -prof essional -Ġf ares -cknow led -Ġch i -Ġyoung sters -Ġmistaken ly -Ġhem isphere -ĠDiv inity -r one -Ġ" , -r ings -Ġattract s -v ana -å ¹ -C AP -Ġplay list -Ġpor ch -ãģ £ -Ġincorpor ates -Ġso ak -Ġassert ing -ĠTerror ism -ĠP ablo -J a -ces ter -Ġfear ing -ĠPr ayer -Ġescal ated -G W -Ġro be -ĠBright on -ac ists -ĠSym phony -ĠDwar f -ĠPar ade -ĠLe go -Ġinex pl -Ġl ords -le af -RA G -l iber -Ġcig ars -ĠJe hovah -60 6 -WIND OWS -ĠLiber ia -eb us -He avy -Ġl ubric -ĠR W -angu ages -Ġnarrow ed -com puter -ĠE mber -Ġmurder ing -Ġdown stream -ĠT uls -ĠT ables -Top ic -ĠAcc uracy -= / -l ost -ĠRe i -Ġprogress es -b ear -Ġestablish ments -Just in -ĠPe ach -ĠG omez -å ¿ -ĠTri angle -Id ent -ĠH ive -Res ources -Ġmix es -ĠAss uming -M u -Ġhyp oc -Ġs ane -ĠW an -id ious -Su ccess -Ġ io -Ang el -Ġdanger ously -ĠCreat ure -W ORK -: [ -ĠKat rina -List ener -M iller -ĠId lib -h ang -Ġcircum vent -h ref -Ġcel estial -ĠWe eks -ĠP ug -ĠDal ton -Ġsubpoen a -uk u -Ġpers isted -pe i -old ing -ĠDoc uments -ĠH ast -ĠC ENT -Ġprim er -Ġsyn onymous -Ġn ib -om bs -Ġnot ation -ĠD ish -ĠAt mosp -Ġforb id -ĠAN G -pat tern -l os -Ġproject iles -b rown -." , -ĠVen om -Ġfierce ly -ub lished -ĠU ran -ĠNic arag -4 10 -ĠC AL -OT OS -ĠMir acle -ĠEn chant -Ġguard ing -app end -Att ach -Ġlevel ed -Ġcond oms -ih ilation -64 9 -Ġnight mares -ĠTHE Y -ĠST ART -ĠK inn -Ġroomm ate -Ġhy giene -o pping -J ob -Ġl vl -ĠV ER -ĠKe eping -ab etic -Ġformat ting -eral a -Ġrev isions -Ġres urg -T el -ĠGood man -35 3 -p od -Ġind isp -ĠTrans lation -Ġg own -ĠM und -Ġc is -Ġby stand -col lect -ĠPun jab -act ively -ĠG amb -te ll -Ġimport ing -g encies -Ġloc om -ĠBr ill -H oly -ĠBer ger -Ġshow down -Ġrespond ers -IL Y -Ġt akedown -le ted -Ġmat tered -Ġpredict ive -Ġover lay -G PU -ĠV ick -Ġconvey ed -T ab -pe er -Sc an -Ġdefensive ly -v ae -Ġappro ving -Ġt iers -ĠV ia -quer ade -ĠSaud is -Ġdemol ished -ĠProp he -Ġmon o -Ġhospital ity -H AM -ĠAri el -M OD -ĠTor ah -Ġbl ah -ĠBel arus -erent ial -ĠT uc -Ġbank er -39 7 -Ġmosqu it -ĠScient ist -ĠMus ical -Ġh ust -Sh ift -Ġtor ment -Ġstand off -E duc -ĠF og -Ġampl ifier -Sh ape -Inst ance -ĠCrit ics -Ġda emon -H ouston -Ġmatt ress -ĠID F -Ġobsc ene -ĠA mer -hett i -Ġcomp iling -35 2 -vere tt -ĠRed uction -ist ration -ĠBl essed -ĠB achelor -3 16 -Ġpr ank -ĠVul can -dd ing -Ġm ourning -ĠQu int -ĠBl aster -test ing -Ġsed iment ->> > -ĠE ternity -ĠWH ERE -ĠM aze -Ġreact ing -ĠAl v -oms day -ĠC RA -Ġtransl ator -Ġbog us -at u -We bsite -oll s -Ġbapt ism -Ġs ibling -ĠAut umn -ve z -ãģ® é -gu ards -Ge org -assad ors -ĠFre ud -Ġcontin ents -ĠReg istry -Bern ie -ĸļ 士 -Ġtoler ant -ĠU W -Ġhor ribly -99 5 -ĠMID I -Ġimpat ient -oc ado -er i -ĠWor st -ĠNor ris -ĠTalk ing -Ġdef ends -ens able -Ġ20 21 -Ġanat omy -L ew -Ġdraw er -ĠCan berra -Ġpatri otic -é¾įå ĸļ士 -ĠAv g -AR M -Ġundis closed -Ġfare well -45 9 -b able -ĠAll ison -OL OG -Ġcon co -t ight -ĠAC PI -ĠM ines -l ich -ĠâĶ ľ -represent ed -200 000 -Ġenthusi ast -OT S -b il -ĠIng redients -Ġinvent or -ĠMy SQL -³³ Âł -ĠAB OUT -with in -Ġm k -B ul -ĠF ake -Ġdracon ian -W a -hel m -ĠTer ran -erv ille -Ġcommon place -SI ZE -Ġ" < -re place -ograph s -ĠSE LECT -inc ible -ĠMost ly -ĠShe ffield -ĠID E -ugg le -Ġcit ations -h urst -ĠUn ix -Ġunle ash -ĠP iper -ĠN ano -Ġsucc umb -Ġreluct ance -Ġ25 00 -ĠMer chant -Ġwire t -Ġcomb os -ĠBirth day -Ġchar coal -ĠU PS -ĠFair fax -Ġdrive way -ĠT ek -ĠP itch -ove re -Ġtechn icians -ĠAct ual -fl ation -ĠF iscal -ĠEm pty -an amo -Ġmag nesium -Ġsl ut -Ġgrow ers -Invest igators -( ): -ĠS atellite -ĠKe ynes -miss ive -l ane -Ġb orough -3 44 -ĠTE AM -ĠBet hesda -C V -h ower -ĠR AD -Ġch ant -ĠR iy -Ġcompos itions -Ġmild ly -Ġmedd ling -Ġag ility -ane ers -5 01 -Ġsyn th -ling er -29 1 -Ġex claimed -Part y -Ġcont amin -ĠMan or -ĠResp ond -Ġpra ising -Ġman ners -fle et -Sum mer -ĠLy nd -ĠDef initely -gr im -Ġbow ling -st ri -ç Ľ -y nt -Ġmand ates -D IV -Ġreconc ile -view s -ĠDam on -vet te -F lo -ĠGreat est -il on -ic ia -Ġportray al -Ġcush ion -50 4 -19 79 -oss al -App lic -sc ription -Ġmit igation -AT S -p ac -Ġer ased -Ġdefic iencies -ĠHolland e -ĠX u -Ġb red -Ġpregn ancies -f emin -Ġem ph -Ġpl anners -Ġout per -utter ing -Ġperpet rator -Ġm otto -ĠEll ison -ĠNE VER -Ġadmitted ly -AR I -ĠAzerbai jan -Ġmill isec -Ġcombust ion -ĠBott le -ĠL und -ĠP s -ĠD ress -Ġfabric ated -Ġbat tered -Ġs idel -ĠNot ting -Fore ign -ĠJer ome -0 20 -ĠAr bit -Ġkn ots -ĠR IGHT -M oving -ãģ Ļ -Ġsur geries -Ġcour thouse -Ġm astered -Ġhover ing -ĠBr an -ĠAl ison -Ġsaf est -m ilitary -Ġbull ied -Ġbar rage -Read er -ES E -ĠGe ographic -T ools -3 14 -ĠGe ek -ro th -gl ers -ĠF IN -Ï ģ -ĠA ston -al tern -48 8 -Ġveter in -G amer -Ġint el -ren ches -Sh ield -Ġam nesty -ĠB har -Ġp iled -Ġhonor able -ĠInst itutes -Ġso aked -Ġcom a -ĠE FF -34 1 -by tes -ĠG mail -le in -ĠCanad iens -m aterial -I l -Ġinstruct ors -ĠK Y -Ġconce ive -ub b -ĠP ossible -Ġeas ing -ĠChrist ina -Ġcar ic -ĠHD R -R OM -Ġsho vel -de lete -Ġp uff -ĠCh anging -Ġseam lessly -Att ribute -Ġacqu isitions -ak ery -ĠE F -Ġaut istic -ĠT akes -ĠPow der -ĠSt ir -5 10 -ĠBub ble -sett ings -ĠF owler -Ġmust ard -Ġmore over -Ġcopyright ed -ĠLED s -15 00 -æ ī -ĠH IS -en f -Ġcust od -ĠH uck -G i -Ġim g -An swer -C t -j ay -ĠInf rastructure -Ġfeder ally -L oc -Ġmicro bes -Ġover run -dd s -ot ent -adi ator ->>>> >>>> -Ġtorn ado -Ġadj ud -Ġintrig ued -Ġs i -ĠRevel ation -pro gress -Ġburgl ary -ĠSai yan -ĠK athy -Ġser pent -ĠAndre as -Ġcomp el -ess ler -ĠPl astic -ĠAd vent -ĠPos itive -ĠQ t -ĠHind us -reg istered -ular ity -Ġrighteous ness -Ġdemon ic -u itive -ĠB DS -ĠGre gg -c ia -ĠCrus ade -ĠSina i -W ARE -+ ( -Ġme ll -Ġder ail -y ards -A st -Ġnotice ably -ĠO ber -R am -Ġun noticed -Ġse q -av age -T s -Ġ6 40 -Ġconced e -Ġ] ) -F ill -Ġcapt ivity -ĠImprove ment -ĠCrus ader -ara oh -M AP -æ Ĺ -Ġstr ide -al ways -F ly -N it -Ġal gae -ĠCook ing -ĠDo ors -Mal ley -Ġpolic emen -ãģ į -Ġastron aut -access ible -49 5 -ĠR AW -cl iffe -udic rous -Ġdep ended -al ach -Ġvent ures -ra ke -Ġt its -ĠH ou -Ġcond om -ormon al -Ġind ent -Ġupload ing -Foot note -Import ant -Ġ27 1 -Ġmind ful -Ġcont ends -C ra -Ġcal ibr -ĠO ECD -plug in -F at -ĠIS S -ĠDynam ics -ans en -68 6 -' ), -Ġsp rite -Ġhand held -ĠH ipp -=~ =~ -Tr ust -Ġsem antics -ĠBund es -ĠRen o -ĠLiter ature -s ense -G ary -ĠA eg -ĠTr in -EE K -Ġcler ic -ĠSS H -Ġch rist -Ġinv ading -ib u -Ġen um -aur a -Ġal lege -ĠInc redible -B BC -Ġth ru -Ġsa iled -Ġem ulate -Ġin security -Ġc rou -Ġaccommod ations -Ġincompet ent -Ġsl ips -ĠEarth qu -s ama -IL LE -Ġi Phones -as aki -Ġby e -Ġar d -Ġext ras -Ġsl aughtered -Ġcrowd funding -res so -Ġfil ib -ĠER ROR -ĠT LS -e gg -ĠIt al -Ġen list -ĠCatal onia -ĠSc ots -Ġser geant -Ġdiss olve -N H -Ġstand ings -ri que -I Q -Ġbenef iciary -Ġaqu arium -You Tube -ĠPower Shell -Ġbright est -ĠWar rant -S old -Writ ing -Ġbegin nings -ĠRes erved -ĠLatin os -head ing -Ġ4 40 -Ġrooft op -AT ING -Ġ3 90 -VP N -G s -k ernel -turn ed -Ġprefer able -Ġturn overs -ĠH els -S a -ĠShin ji -ve h -ĠMOD ULE -V iol -Ġex iting -Ġj ab -ĠVan illa -Ġac ron -ĠG ap -ber n -A k -ĠMc Gu -Ġend lessly -ĠFar age -ĠNo el -V a -M K -Ġbr ute -ĠK ru -ĠES V -ĠOl ivia -âĢ ł -ĠK af -Ġtrust ing -Ġh ots -3 24 -Ġmal aria -Ġj son -Ġp ounding -ort ment -Count ry -Ġpostp oned -Ġunequ iv -? ), -ĠRo oney -udd ing -ĠLe ap -ur rence -sh apeshifter -ĠH AS -os ate -Ġca vern -Ġconserv atism -ĠB AD -Ġmile age -Ġarrest ing -V aults -Ġmix er -Dem ocratic -ĠB enson -Ġauth ored -8 000 -Ġpro active -ĠSpirit ual -t re -Ġincarcer ated -ĠS ort -Ġpe aked -Ġwield ing -re ciation -×Ļ × -P atch -ĠEm my -Ġex qu -tt o -ĠRat io -ĠP icks -ĠG ry -ph ant -Ġf ret -Ġeth n -Ġarch ived -% - -c ases -ĠBl aze -Ġim b -c v -y ss -im ony -Ġcount down -Ġaw akening -ĠTunis ia -ĠRe fer -ĠM J -Ġun natural -ĠCar negie -iz en -ĠN uggets -he ss -Ġev ils -64 7 -Ġintrodu ctory -l oving -ĠMcM ahon -Ġambig uity -L abel -ĠAlm ighty -Ġcolor ing -ĠCl aus -set ting -N ULL -ĠF avorite -ĠS IG -> ( -ĠSh iva -ĠMay er -Ġstorm ed -ĠCo verage -we apons -igh am -Ġun answered -Ġle ve -Ġc oy -c as -b ags -as ured -Se attle -ĠSant orum -ser ious -Ġcourage ous -ĠS oup -Ġconfisc ated -Ġ// / -Ġuncon ventional -Ġmom s -ĠRohing ya -ĠOrche stra -ĠPot ion -Ġdisc redit -ĠF IL -f ixed -ĠDe er -do i -ĠDim ension -Ġbureaucr ats -et een -Ġaction Group -oh m -Ġb umps -ĠUt ility -Ġsubmar ines -ren heit -re search -ĠShap iro -Ġsket ches -Ġde ceptive -ĠV il -es ame -ĠEss entially -Ġramp age -isk y -Ġmut tered -th ritis -Ġ23 6 -f et -b ars -Ġpup il -ĠTh ou -o S -s ong -Ġfract ured -Ġre vert -pict ure -Ġcrit erion -us her -Ġreperc ussions -ĠV intage -ĠSuper intendent -Offic ers -Ġflag ged -Ġbl ames -Ġin verse -ograp hers -Ġmakes hift -Ġdev oid -Ġfoss ils -ĠArist otle -ĠFund s -Ġde pleted -ĠFl u -ĠY uan -Ġw oes -Ġlip id -Ġsit u -requ isites -Ġfurn ish -ĠSam ar -Ġshame ful -Ġadverse ly -Ġad ept -Ġrem orse -Ġmurder ous -uck les -ĠE SL -Ġ3 14 -s ent -Ġred ef -ĠC ache -ĠP urs -ig ans -Ġ4 60 -Ġpres criptions -Ġf res -F uck -ocr ates -Tw enty -ĠWe ird -ĠT oggle -ĠC alled -itiz ens -Ġp oultry -Ġharvest ing -ãĤ¦ ãĤ¹ -Bott om -Ġcaution ed -t n -39 6 -ĠNik ki -Ġeval uations -Ġharass ing -Ġbind ings -ĠMon etary -Ġhit ters -Ġadvers ary -un ts -Ġset back -Ġenc rypt -ĠC ait -Ġl ows -eng es -ĠN orn -Ġbul bs -Ġbott led -ĠVoy ager -3 17 -Ġsp heres -p olitics -Ġsubt ract -Ġsens ations -Ġapp alling -Ġ3 16 -Ġenvironment ally -ĠST EM -Ġpub lishes -5 60 -Ġdilig ence -48 4 -Ġadv ises -Ġpet rol -Ġimag ining -Ġpatrol s -ĠInt eger -ĠAs hes -act us -ĠRad iant -ĠL T -it ability -ht aking -Set ting -Ġnu anced -ĠRe ef -ĠDevelop ers -N i -pie ces -99 0 -Lic ense -Ġlow ers -ĠOtt oman -3 27 -oo o -Ġqu itting -mark ets -Beh ind -Ġbas in -Ġdoc s -an ie -fl ash -ct l -Ġcivil ized -ĠFuk ushima -"] ," -ĠK S -ĠHonest ly -ar at -Ġconstruct s -ĠL ans -ĠD ire -ĠLI KE -ĠTrou ble -Ġwith holding -ĠOb livion -Ġsan ity -any a -Con st -Ġgro cer -ĠC elsius -Ġrecount ed -ĠW ife -B order -ate red -h appy -Ġspo iler -Ġlog ically -H all -Ġsucceed ing -Ġpoly morph -Ġax es -ĠShot gun -ĠS lim -ĠPrin ciples -ĠL eth -art a -Ġsc or -Sc reenshot -Ġrelax ation -#$ #$ -Ġdeter rent -idd y -Ġpower less -Ġles bians -Ġch ords -ĠEd ited -se lected -Ġseparat ists -000 2 -Ġair space -Ġturn around -Ġc unning -P ATH -P oly -Ġbomb ed -Ġt ion -x s -Ġwith hold -Ġw aged -ĠLiber ties -Fl ag -Ġcomfort ing -45 4 -ĠI ris -are rs -Ġr ag -Ġrel ocated -ĠGu arant -Ġstrateg ically -Ġgam ma -uber ty -ĠLock heed -g res -Ġgr illed -ĠLow e -st ats -ĠR ocks -Ġsens ing -Ġrent ing -ĠGe ological -ا Ø -ot rop -Ġse w -Ġimproper ly -48 6 -Ġâĸ ł -Ġstar ving -ĠB j -Disc ussion -3 28 -ĠCom bo -ĠFix es -N AT -Ġstri ving -th ora -Ġharvest ed -ĠP ing -Ġplay ful -Ġaven ues -Ġoccup ational -Ġw akes -ĠCou rier -Ġdrum mer -ĠBrow ser -ĠH outh -it u -Ġapp arel -p aste -Ġhun ted -ĠSecond ly -l ain -X Y -ĠP IN -ic ons -Ġcock tails -Ġs izable -Ġhurd les -est inal -ĠRecre ation -Ġe co -64 8 -ĠD ied -m int -Ġfinger prints -Ġdis pose -ĠBos nia -ts y -22 00 -Ġins pected -ĠF ou -Ġf uss -Ġamb ush -ĠR ak -Ġmanif ested -Pro secut -Ġsuff ice -ren ces -Ġcompens ated -ĠC yrus -Ġgen us -ĠWolver ine -ĠTrend s -Ġh ikes -ĠSe en -Ġen rol -C old -Ġpol itely -ĠSl av -ĠRu pert -Ġey ewitness -ĠAl to -Ġun comp -Ġposter ior -M ust -ĠHer z -Ġprogress ively -Ġ23 4 -Ġind ifference -ĠCunning ham -Ġacadem ia -Ġse wer -Ġast ounding -ĠA ES -r ather -Ġeld est -Ġclim bs -ĠAdd s -Ġout cry -Ġcont ag -ĠH ouses -Ġpe pt -ĠMel ania -interest ed -ĠU CH -ĠR oots -ĠHub bard -ĠT BD -ĠRoman ian -fil ename -St one -ĠIm pl -Ġchromos ome -C le -d x -Ġscram bled -ĠP t -Ġ24 2 -OP LE -Ġtremend ously -St reet -Ġcra ving -Ġbund led -ĠR G -p ipe -Ġinj uring -Ġarc ane -Part icip -ĠHero ic -st y -Ġto pping -ĠTemp est -rent ices -b h -Ġpar anoia -ĠUnic ode -Ġegreg ious -Ġ\ ' -ĠOsw ald -Ġgra vel -ĠSim psons -Ġbl and -ĠGuant anamo -Writ er -lin ers -ĠD ice -J C -Ġpar ity -Ġs ided -Ġ23 7 -ĠPyr rha -at ters -d k -F ine -comp an -Ġform ulated -ĠId ol -il ers -hem oth -ĠF av -Ġintr usion -Ġcar rots -ĠL ayer -ĠH acker -Ġ ---------------- -Ġmoder ation -é ģ -oc oc -Ġcharacter ize -ĠTe resa -Ġsocio economic -Ġper k -ĠParticip ation -tr aining -ĠPaul o -ph ys -Ġtrust worthy -Ġembod ied -ĠMer ch -c urrency -ĠPrior ity -Ġte asing -Ġabsor bing -Ġunf inished -ĠCompar ison -Ġdis ple -writ ers -Ġprofess ions -ĠPengu in -Ġang rily -ĠL INK -68 8 -ĠCor respond -Ġprev ailed -Ġcart el -l p -as ms -ĠRed emption -ĠIslam ists -effect s -d ose -ĠL atter -ĠHal ifax -Ġv as -ĠTop ics -ĠN amed -advert ising -zz a -IC ES -Ġret arded -ach able -ĠPupp et -ĠItem Level -Ġret ract -Ġident ifiable -A aron -ĠB uster -s ol -hel le -as semb -H ope -r anged -B a -ĠP urch -é Ģ -ĠSir i -Ġarri vals -Ġ19 12 -Ġshort ened -Ġ3 12 -Ġdiscrep ancy -ĠTem perature -ĠWal ton -Ġkind erg -p olit -Ġrem ix -Ġconnect ors -ãĥĺ ãĥ© -ĠKazakh stan -dom inated -Ġsu gars -im ble -ĠPan ic -ĠDem and -ĠCol ony -on en -ĠM ER -7 75 -ur ia -aza ar -ĠDeg ree -P ri -Ġsun shine -Ġ25 1 -Ġpsychedel ic -Ġdigit ally -ĠBra un -Ġsh immer -Ġsh ave -ĠTel esc -ĠAst ral -ĠVenezuel an -ĠO G -Ġc rawling -Int eg -ĠFe ather -Ġunfold ing -Ġappropri ation -Ġè£ı è -ĠMob ility -ĠN ey -- . -b ilt -L IN -ĠT ube -ĠCon versely -Ġkey boards -ĠC ao -Ġover th -Ġla ure ->> \ -ĠV iper -ach a -Off set -ĠR aleigh -ĠJ ae -J ordan -j p -Ġtotal itarian -Connect or -Ġobserv es -ĠSpart an -ĠIm mediately -ĠSc al -C ool -Ġt aps -Ġro ar -P ast -Ġch ars -ĠB ender -ĠShe ldon -Ġpain ter -Ġbe acon -ĠCreat ures -Ġdownt urn -Ġh inder -ĠAnd romeda -à Ľ -cc oli -ĠF itness -et rical -Ġutil izes -Ġsen ate -Ġen semble -Ġche ers -T W -Ġaff luent -k il -ry lic -ord ering -Com puter -Ġgru esome -ost ics -ĠUb isoft -ĠKel ley -Ġw rench -Ġbourgeois ie -IB LE -ĠPrest on -w orn -ar ist -reat ing -Ġst ained -ar ine -Ġsl ime -EN N -Ġche sts -Ġground water -ann ot -ĠTr ay -ĠLoc ke -ĠC TR -Ġd udes -ĠEx ternal -ĠDec oder -Ġpar amed -ĠMed line -80 9 -ĠD inner -rup al -g z -ĠG um -ĠDem o -j ee -Ġd h -ber man -arch s -Ġen qu -ĠEp stein -Ġdevast ation -Ġfriends hips -ĠAr d -Ġ23 1 -ĠRub in -ĠDist ance -Ġsp urred -Ġd ossier -Ġover looking -\\\\\\\\ \\\\\\\\ -Fore st -ĠCom es -\ ", -ĠIran ians -Ġf ixtures -L aughs -Ġcur ry -ĠKing ston -Ġsqu ash -Ġcat alogue -Ġabnormal ities -Ġdigest ive -.... ..... -Ġsubord inate -og ly -Ġ24 9 -M iddle -Ġmass ac -Ġburg ers -Ġdown stairs -Ġ19 31 -39 4 -ĠV G -Ġl asers -ĠS ikh -ĠAlex a -der ived -Ġcycl ist -ãģ® éŃĶ -onel iness -!!!! !!!! -Ġbuff s -leg ate -Ġrap ing -Ġrecomm ending -ro red -Ġmult icultural -un ique -Ġbusiness men -Ġune asy -ĠM AP -Ġdisp ersed -cipl ine -J ess -ĠK erala -å § -Ġabst raction -Sur v -U h -Ġprin ters -ij a -ow der -Ġanalog ous -ĠA SP -af er -Ġunfold ed -Ġlevel ing -Ġbre ached -ĠH earing -Ġn at -Ġtransl ating -crit ical -Ġant agonist -ĠYes terday -Ġfuzz y -w ash -m ere -Ġbe wild -ĠM ae -V irgin -ph rase -Ġsign aled -ĠH IGH -Ġprot ester -Ġgar ner -unk nown -Ġk ay -Ġabduct ed -Ġst alking -am n -Ġdes erving -ĠR iv -ĠJ orge -Ġscratch ing -ĠS aving -ip ing -Ġte ase -Ġmission ary -ĠMor row -T IME -P resent -Ġchem otherapy -tern ess -ĠH omes -ĠP urdue -Ġst aunch -ĠWhit ney -ĠTH ERE -Î ¼ -iat us -ĠErn est -ĠDe ploy -Ġcove ted -F ML -ĠDial ogue -Ġex ited -f ruit -Ġner d -":" "," -Ġv ivo -ru ly -4 60 -ĠAm en -rehens ible -Ġâ ĺ -D IR -Ġad herence -Ġche w -ĠCo ke -ĠSerge i -dig ital -ĠNe ck -g ently -enth al -/ ) -Ġwe ary -Ġgu ise -ĠConc ord -ĠOn ion -at cher -Ġb inge -ĠDirect ive -Ġman ned -ans k -Ġill usions -Ġbillion aires -38 3 -oly n -odynam ic -ĠWhe at -ĠA lic -Ġcol oured -ĠN AFTA -ab o -Ġmac ros -ind ependent -s weet -Ġsp ac -ĠK abul -Ġ Ä -em e -Ġdict ated -Ġsh outs -= { -Ġr ipping -ĠSh ay -ĠCr icket -direct ed -Ġanalys ed -ĠWAR RANT -ag ons -ĠBlaz ers -Ġche ered -Ġar ithmetic -ĠTan z -37 3 -ĠFl ags -Ġ29 5 -Ġw itches -ĠIn cluded -ĠG ained -ĠBl ades -G am -ĠSam antha -ĠAtl antis -ĠPr att -Ġspo iled -ĠI B -ĠRam irez -Pro bably -re ro -ĠN g -ĠWar lock -t p -Ġover he -Ġadministr ations -Ġt int -Ġreg iment -Ġpist ols -Ġblank ets -Ġep ist -Ġbowl s -Ġhydra ulic -Ġde an -Ġj ung -Ġasc end -70 5 -ĠSant iago -à ® -Ġun avoid -ĠSh aman -re b -Ġstem ming -99 8 -ĠM G -st icks -esthes ia -ER O -Ġmor bid -ĠGr ill -ĠP oe -any l -Ġdele ting -ĠSurve illance -Ġdirect ives -Ġiter ations -ĠR ox -ĠMil ky -F ather -Ġpat ented -44 7 -Ġprec ursor -Ġm aiden -ĠP hen -ĠVe gan -ĠPat ent -K elly -Redd itor -Ġn ods -Ġvent ilation -ĠSchwar z -Ġw izards -Ġomin ous -ĠHe ads -ĠB G -Ġl umber -ĠSp iel -Ġis Enabled -Ġancest ral -ĠSh ips -Ġwrest ler -ph i -Ġy uan -ĠRebell ion -Ġice berg -Ġmag ically -Ġdivers ion -ar ro -yth m -ĠR iders -ĠRob bie -ĠK ara -ĠMain tenance -ĠHer b -Ġhar ms -p acked -ĠFe instein -Ġmarry ing -Ġbl ending -ĠR ates -Ġ18 80 -Ġwr ink -ĠUn ch -ĠTor ch -desc ribed -Ġhuman oid -ilit ating -ĠCon v -ĠFe ld -IGH TS -Ġwhistlebl ower -ort mund -ets y -arre tt -ĠMon o -ĠI ke -ĠC NBC -ĠW AY -ĠMD MA -ĠIndividual s -Ġsupplement al -Ġpower house -ĠSt ru -F ocus -aph ael -ĠCol leg -att i -Z A -Ġp erenn -ĠSign ature -ĠRod ney -Ġcub es -idd led -ĠD ante -ĠIN V -iling ual -ĠC th -Ġso fa -Ġintimid ate -ĠR oe -ĠDi plom -ĠCount ries -ays on -Ġextrad ition -Ġdis abling -ĠCard iff -Ġmemor andum -ĠTr ace -Ġ?? ? -se ctor -ĠRou hani -ĠY ates -ĠFree ze -Ġbl adder -M otor -ĠProm ise -ant asy -Ġforesee able -ĠC ologne -cont ainer -ĠTre es -ĠG ors -ĠSin clair -Ġbar ring -key e -Ġsl ashed -ĠStat istical -é ĩ -Ġâĸ º -All ows -Ġhum ility -Ġdr illed -ĠF urn -44 3 -Ġse wage -Ġhome page -Ġcour tyard -Ġv ile -Ġsubsid iaries -aj o -direct ory -Ġam mon -V ers -charg es -Ġ} } -ĠCh ains -Ġ24 6 -n ob -Ġper cept -Ġg rit -Ġfisher men -ĠIraq is -ĠDIS TR -ĠF ULL -ĠEval uation -g raph -at ial -Ġcooper ating -Ġmel an -Ġenlight ened -Ġal i -t ailed -Ġsal ute -Ġweak est -ĠBull dogs -U A -ĠAll oy -Ġsem en -oc ene -ĠWilliam son -s pr -, âĢĶ -ĠG F -itt ens -Be at -ĠJ unk -iph ate -ĠFarm ers -ĠBit coins -ig ers -d h -ĠL oyal -p ayer -Ġentert ained -Ġpenn ed -Ġcoup on -Que ue -Ġweaken ing -c arry -Ġunderest imate -Ġshoot out -Ġcharism atic -ĠProced ure -Ġprud ent -in ances -Ġric hes -Ġcort ical -Ġstr ides -Ġd rib -ĠOil ers -5 40 -ĠPer form -ĠBang kok -Ġe uth -S ER -Ġsimpl istic -t ops -camp aign -Q uality -Ġimpover ished -ĠEisen hower -Ġaug ment -ĠH arden -Ġinterven ed -Ġlist ens -ĠK ok -Ġs age -Ġrub bish -ĠD ed -Ġm ull -pe lling -Ġvide ot -Produ ction -D J -m iah -Ġadapt ations -Ġmed ically -Ġboard ed -Ġarrog ance -Ġscra pped -Ġopp ress -FORM ATION -Ġj unction -4 15 -EE EE -S kill -Ġsub du -ĠSug gest -ĠP ett -Ġle tt -ĠMan ip -ĠC af -ĠCooper ation -T her -Ġreg ained -¶ æ -ref lect -Ġth ugs -ĠShel by -Ġdict ates -ĠWe iner -ĠH ale -Ġbatt leground -s child -Ġcond ol -h unt -osit ories -Ġacc uses -Fil ename -Ġsh ri -Ġmotiv ate -Ġreflect ions -N ull -ĠL obby -¥ µ -ĠS ATA -ĠBack up -Ñ ĥ -n in -ĠCor rection -Ġju icy -ut ra -ĠP ric -Ġrest raining -ĠAir bnb -ĠAr rest -Ġappropri ations -Ġsl opes -Ġmans laughter -Ġwork ings -ĠH uss -ĠF rey -Le ave -ĠHarm ony -ĠF eder -Ġ4 30 -Ġt rench -Ġglad ly -Ġbull pen -ĠG au -b ones -Ġgro ove -Ġpre text -ã ħĭ -Ġtransm itter -ĠComp onent -Ġunder age -ĠEm pires -T ile -Ġo y -ĠMar vin -ĠC AS -Ġbl oss -Ġrepl icated -ĠMar iners -Marc us -ĠBl ocks -Ġliber ated -Ġbutter fly -Fe el -Ġfer mentation -Ġyou tube -Ġoff end -ĠTer m -res ist -Ġcess ation -Ġinsurg ency -Ġb ir -ĠRa ise -59 5 -Ġhypothes es -50 2 -Ġpl aque -ocr at -Ġjack ets -ĠHuff Post -am ong -Ġconf er -48 7 -ĠL illy -Ġadapt ing -ĠF ay -Ġsh oved -ve c -Ġref ine -Ġg on -Ġgun men -z ai -ĠShut tle -ĠI zan -Ġ19 13 -Ġple thora -· · -Ġ5 10 -Ġp uberty -Ġ24 1 -ĠWe alth -ĠAl ma -ĠM EM -ĠAd ults -C as -pr ison -R ace -Ġwater proof -Ġathlet icism -Ġcapital ize -ĠJu ice -Ġillum inated -ĠP ascal -Ġirrit ation -ĠWitness es -ad le -ĠAst ro -Ġf ax -ĠEl vis -Prim ary -ĠL ich -ĠEl ves -Ġres iding -Ġst umble -3 19 -ĠP KK -Ġadvers aries -D OS -ĠR itual -Ġsm ear -Ġar son -ident al -Ġsc ant -Ġmon archy -Ġhal ftime -Ġresid ue -Ġind ign -ĠSh aun -ĠEl m -aur i -A ff -W ATCH -ĠLy on -hel ps -36 1 -Ġlobby ist -Ġdimin ishing -Ġout breaks -Ġgo ats -f avorite -ĠN ah -son ian -ĠBo oster -Ġsand box -ĠF are -ĠMalt a -Ġatt Rot -ĠM OR -ld e -Ġnavig ating -T ouch -Ġunt rue -ĠDis aster -Ġl udicrous -Pass word -ĠJ FK -blog spot -4 16 -ĠUN DER -ern al -Ġdelay ing -T OP -Ġimpl ants -ĠAV G -ĠH uge -att r -Ġjournal istic -ĠPe yton -ĠI A -R ap -go al -ĠProgram me -Ġsm ashing -w ives -print ln -ĠPl ague -in us -EE P -Ġcru iser -ĠPar ish -umin ium -Ġoccup ants -ĠJ ihad -m op -Ġp int -Ġhe ct -ĠMe cca -direct or -ĠFund ing -ĠM ixed -Ġst ag -T ier -Ġg ust -Ġbright ly -ors i -Ġup hill -R D -Ġles ions -ĠBund y -liv ious -Ġbi ologist -ĠFac ulty -ĠAuthor ization -Ġ24 4 -All ow -ï ¸ -ĠGi ul -Ġpert inent -ot aur -es se -ĠRo of -Ġunman ned -35 1 -ĠSh ak -ĠO rient -Ġend anger -D ir -Ġrepl en -ed ient -Ġtail or -Ġgad gets -Ġaud ible -âĺ Ĩ -N ice -Ġbomb ard -ĠR ape -Ġdef iance -ĠTW O -ĠFilip ino -Ġunaff ected -erv atives -Ġso ared -ĠBol ton -Ġcomprom ising -ĠBrew ers -R AL -ĠA HL -icy cle -Ġv ampires -Ġdi pped -oy er -ĠX III -Ġsidew ays -ĠW aste -ĠD iss -ĠâĶľ âĶĢâĶĢ -$ . -Ġhabit ats -ĠBe ef -tr uth -tr ained -spl it -R us -And y -ĠB ram -RE P -p id -è£ ħ -ĠMut ant -An im -ĠMar ina -Ġfut ile -hig hest -f requency -Ġepile psy -Ġcop ing -Ġconc ise -Ġtr acing -ĠS UN -pan el -ĠSoph ie -ĠCrow ley -ĠAd olf -ĠShoot er -Ġsh aky -ĠI G -ĠL ies -ĠBar ber -p kg -Ġupt ake -Ġpred atory -UL TS -/ ** -Ġintox icated -ĠWest brook -od der -he ment -Ġbas eman -AP D -st orage -ĠFif ty -ed itor -G EN -UT ION -ir ting -Ġse wing -r ift -Ġag ony -ĠS ands -Ġ25 4 -C ash -Ġl odge -Ġp unt -N atural -ĠIde as -Ġerrone ous -ĠSens or -ĠHann ity -Ġ19 21 -Ġm ould -ĠG on -kay a -Ġanonym ously -ĠK EY -Ġsim ulator -W inter -Ġstream ed -50 7 -? ", -Ġte ased -Ġco efficient -Ġwart ime -ĠTH R -' '. -ĠBank ing -mp ire -Ġf andom -Ġl ia -G a -Ġdown hill -Ġinterpre ting -Ind ividual -N orm -Ġjealous y -bit coin -Ġple asures -ĠToy s -ĠChev rolet -ĠAd visor -IZ E -Ġrecept ions -70 6 -C ro -Ġ26 2 -Ġcit rus -ir u -Review er -ject ed -U ES -an z -19 81 -ĠWork er -Ġcompl ied -ores cent -contin ental -T on -ĠPr ism -ĠShe ep -Ġ28 8 -n ox -ĠV og -O rd -Ġreal ms -te k -Ġirrig ation -Ġbicy cles -Ġelectron ically -p oly -t all -() ); -Ġaest hetics -ĠInteg rated -Expl ore -Ġd unk -47 6 -p ain -ĠJac ques -ĠD mit -Fram es -Ġreun ited -Ġhum id -D ro -P olitical -Ġyouth ful -Ġent ails -Ġmosqu ito -36 3 -spe cies -Ġcoord inating -ĠMay hem -ĠMagn us -M ount -Impro ved -ĠST ATE -ATT LE -Ġflow ed -Ġtack led -Ġfashion ed -Ġre organ -iv ari -f inger -Ġreluct antly -et ting -ĠV and -you ng -ĠGar land -Ġpresum ption -Ġamen ities -ĠPle asant -on ential -ĠO xy -Ġmor als -ĠY ah -Read y -Sim on -En h -D emon -Ġcl ich -Mon itor -ĠD U -Ġwel comes -Ġstand out -Ġdread ful -Ġban anas -Ġball oons -h ooting -bas ic -Ġsuff ix -Ġd uly -can o -Ch ain -at os -Ġgeop olitical -Ġ( & -ĠGem ini -ÃĥÃĤÃĥÃĤÃĥÃĤÃĥÃĤÃĥÃĤÃĥÃĤÃĥÃĤÃĥÃĤÃĥÃĤÃĥÃĤÃĥÃĤÃĥÃĤÃĥÃĤÃĥÃĤÃĥÃĤÃĥÃĤ ÃĥÃĤÃĥÃĤÃĥÃĤÃĥÃĤÃĥÃĤÃĥÃĤÃĥÃĤÃĥÃĤÃĥÃĤÃĥÃĤÃĥÃĤÃĥÃĤÃĥÃĤÃĥÃĤÃĥÃĤÃĥÃĤ -Ġacqu itted -L uck -prot ect -10 24 -Ġsc arcity -Ġmind fulness -ec ided -D N -pr ime -ĠPres idents -ĠVID EO -Ġ( âĪĴ -add ock -N OR -ĠP ru -p un -ĠL OL -)) )) -ĠL iqu -ĠS AS -Ġsty ling -Ġpunish ments -Ġnum b -Ġasc ertain -ĠRock ies -f lu -Th umbnail -Ġperpet rated -ĠSem i -Ġdis arm -ĠOld er -ĠEx ception -Ġexponent ially -ĠCommun ities -Ġabol ish -ĠPart ner -pt oms -Ġ7 77 -ĠFo ley -ĠC ases -Ġgre ase -ĠReb irth -G round -Ġ; ) -ĠDoct rine -ik ini -Y e -ĠBl ossom -Ġpers ists -b ill -Ġinf usion -Ġbud dies -9 11 -ĠPat ient -Ġdem os -Ġacquaint ance -ĠP aw -at ari -Ġx ml -Ġfasc ination -ĠSer ve -Ï Ĥ -br anded -Ġa z -Return s -Ġover shadow -Ġro am -Ġspeed y -n umbered -hel ial -Ġdisc iple -Ġass urances -g iven -pect ing -ĠN atalie -çĶ ° -Ġmosquit oes -rote in -Ġnumer ic -Ġindepend ents -Ġtrans itional -Ġreaction ary -ĠMech dragon -do ctor -Ġshort est -Ġsequ ential -ĠB ac -ĠAccount s -ãģ Į -ach y -ract ive -ĠReg iment -Ġbreat htaking -ffic iency -ĠB ates -Ġ3 11 -Ġward robe -ft s -ĠBer k -Sim ply -ĠRivers ide -iver ing -ident ial -lu cent -Ġen riched -ĠCon ver -ĠG iving -ãĥ Ļ -Ġlegal ize -ĠF TC -Ġfre aking -M ix -Ġter restrial -es ian -ci ents -W ing -LO AD -Ġled ge -ĠViol ent -ĠMet all -Ġ30 8 -Ġs outheastern -hett o -M eat -Ġslow down -Ġret reated -Jere my -end as -**** * -er ic -Ġre ins -opp able -ĠHuman ity -ear ances -rig an -C amera -Ġwa ivers -s oc -Ġalter ation -trans form -ĠC emetery -50 6 -Ġindef inite -Ġstim ulating -y g -60 3 -ĠS op -Ġdescript ive -Ph ase -ĠEd mund -Ġpneum onia -vent us -A mb -Ġlabor atories -ĠEx clusive -ug ar -W ere -Ġmalf unction -Ġhomosexual s -Ġ---- --- -un i -Ġturb ines -ĠEqu ity -D u -Ġmind ed -ĠR H -ĠBlack hawks -Ġfe ats -Ġ17 00 -re pl -36 2 -lad en -Ġindisp ensable -ly ss -tt i -Ġre el -Ġdiver ted -Ġlik eness -Ġsubscript ions -Ġfing ert -Ġfil thy -dest ruct -d raft -ĠBernard ino -l aunch -Ġper plex -ĠS UM -car b -Ġswe ater -ĠVent ure -ĠJ ag -ĠCele b -ĠV oters -Ġstead fast -Ġathlet ics -ĠHans on -ĠDr ac -Tr acker -Ġcomm end -ĠPres idency -ĠD ID -in formed -Ġweb page -P retty -Ġforce fully -ãĥĥ ãĤ¯ -Ġrel ocation -Ġsat ire -â ī -ĠSunder land -æ Ħ -V oice -???? ???? -Ġinform ant -Ġbow el -ĠUn iform -Ġ ..." -Ġpur ge -Ġpic nic -ĠU mb -ĠU PDATE -ĠSapp hire -ĠSt all -le arn -Ġobject ively -Ġob liter -Ġlooph ole -Ġjour neys -Ġo mission -Pro s -ĠSid ney -pl oma -Ġspray ed -Ġg uru -Ġtra itor -Ġtim et -Ġsn apping -ĠSe vent -urn al -ĠUk ip -Ġb owed -por al -l iberal -R os -Quest ions -i OS -Ġsummar ize -ST AT -Ġ18 50 -ap est -Ġl ender -ĠVari able -br inging -ĠL ORD -, ) -Ġcollaps es -x iety -ĠN ed -Y D -ĠSch a -Ġantib ody -Ġdis band -y re -ill usion -Ġro ver -s hed -ĠHiro sh -cc i -Ġcal am -ĠMort on -P interest -Ġ19 28 -ĠE uras -ord es -Ġf ences -ĠIn ventory -ĠVal encia -ĠU d -ĠT iff -Ġsqu e -Ġqu otation -Ġtroubles ome -er ker -QU EST -ĠKing doms -s outh -Ġle vy -Pr ince -ĠSt ing -Ġnick named -Ġapp e -Ġphot ographic -Ġcorp us -re ference -ĠT rog -U nt -) =( -ĠLat via -Ġactiv ating -Ġlicense e -Ġdispar ities -ĠNews letter -ãĥĥ ãĥĪ -Ġfree ing -ĠJe ep -ĠPer ception -ins k -Ġsil icone -ĠHay den -Le an -ĠSuz uki -ibr arian -66 8 -Ġsp or -Ġcorrel ations -ag hetti -Ġtu ber -ĠIP CC -il us -ĠV u -Ġwealth iest -ĠCarb uncle -an za -Ġfool ed -ĠZ ur -Ġd addy -ran o -il ian -Ġknock out -f man -requ ired -ĠWik ileaks -ĠD uffy -ON T -Ġins ol -ĠObject s -Ġb ou -ĠNord ic -ĠIns ert -sc an -Ġd ancers -Ġid iots -major ity -ĠNev ille -ĠFree BSD -Ġt art -pan ic -69 0 -Ġcoc oa -Ġsam pled -Ġlook up -Ind ust -Ġinject ions -gen re -Ġa u -Ġroad way -Ġgen itals -K ind -ĠEx aminer -ĠY az -F resh -Ġpar alysis -ĠAl uminum -Ġre ap -ok é -Ġsl oppy -ĠTun nel -pos ium -ner y -en ic -Ġher bal -ĠOut er -ĠBuild er -Ġinc ur -Ġide ologies -Ġback ups -cons uming -ĠDet ect -de ck -ĠKN OW -ĠG ret -ĠM IC -Ġtough ness -ĠEx hibit -Ġh ive -L es -ĠSCH OOL -ĠAt ari -ald e -ĠN ull -and estine -m ouse -Ġbrig ade -48 9 -Ġrev ol -ĠLaw son -ĠW ah -op oly -eb ted -ĠS aunders -Ġ3 13 -ĠW inc -Ġtab oo -ĠHel met -Ġw edge -ch ip -ĠT ina -b g -Ġinf uri -r n -Ġanomal ies -ĠSy nc -ĠEx am -ĠComm it -ĠDi ary -ĠALS O -ĠDe bor -omed ical -Ġcomprehens ion -6 55 -Ġempower ing -Ġ ire -Ġju ices -ĠE TH -ĠBox ing -=" / -Ġfacilit ated -p oke -ĠPars ons -ĠMod er -tra vel -Ġcivil izations -Ġliber tarians -Ġrun e -ĠCl arks -at hed -Ġcampaign ers -ĠDis patch -ĠFah renheit -ĠCap com --------- -- -Ġl ace -Ġdr aining -Ġl iner -ĠArt ificial -é n -t ask -] ). -ĠGM O -ĠOper ator -ord inary -ĠInf luence -ĠU ps -Ġpot ency -uss en -osp ons -ĠSw im -ĠDead line -Un ity -Ġcul inary -Ġenlight enment -Ġwe arer -Ġmin ed -Ġp ly -Ġinc est -ĠDVD s -W alk -B TC -Tr ade -Ġdev al -ib and -ĠOvers ight -Palest inian -Ġd art -Ġm ul -L R -Ġrem ovable -ĠReal ms -ì Ŀ -Ġmisc ar -ĠV ulkan -68 5 -è re -ĠS ap -Ġmer ging -ĠCar ly -che ster -Ġbr isk -Ġlux urious -ĠGener ator -Ġbit terness -Ġed ible -Ġ24 3 -T G -Ġrect angle -With No -bel ow -J enn -Ġdark est -Ġh itch -Ġdos age -Ġsc aven -ĠK eller -ĠIllust rated -Certain ly -ĠMaver icks -Marg inal -Ġdiarr hea -Ġenorm ously -Ġ9 99 -sh r -qu art -Ġadam ant -ĠM ew -Ġren ovation -Ġcerv ical -ĠPercent age -en ers -ĠKim ber -Ġflo ats -Ġde x -ĠW itcher -ĠSwan sea -d m -Ġsal ty -y ellow -Ġca pe -ĠDr ain -ĠPaul a -ĠTol edo -les i -Mag azine -ĠW ick -ĠM n -ĠA ck -ĠR iding -AS ON -Ġhom ophobic -AR P -Ġwand ered -C PU -ood oo -ĠP ipe -Ġtight ening -ĠBut t -3 18 -Ġdesert ed -S ession -Ġfacilit ating -J ump -Ġemer gencies -OW ER -Ġexhaust ive -ĠAF TER -Ġheart beat -ĠLab el -ack y -ĠCert ified -ilt ration -Z e -ĠU tt -Ġ13 00 -Ġpres ume -ĠDis p -Ġsur ged -Ġdoll s -Col umb -Ġchim pan -ĠR azor -Ġt icks -Ġcouncill or -Ġpilgr image -ĠReb els -ĠQ C -ĠA uction -x ia -ik k -b red -Ġinsert ion -Ġco arse -d B -SE E -ĠZ ap -ĠF oo -Ġcontem por -ĠQuarter ly -ot ions -ĠAl chemist -ĠT rey -ĠDu o -S weet -80 4 -ĠGi ov -Ġfun n -N in -h off -Ġram ifications -Ġ19 22 -ĠExper ts -az es -Ġgar ments -ar ial -ĠN ab -Ġ25 7 -ĠV ed -Ġhum orous -ĠPom pe -Ġn ylon -Ġlur king -ĠSerge y -ĠMatt is -Ġmisogyn y -ĠComp onents -ĠWatch ing -ĠF olk -ract ical -B ush -Ġt aped -Ġgroup ing -Ġbe ads -Ġ20 48 -Ġcon du -quer que -Read ing -Ġgriev ances -Ult ra -Ġend point -H ig -ĠSt atic -ĠScar borough -L ua -ĠMess i -a qu -ĠPsy Net -ĠR udd -Ġa venue -v p -J er -Ġsh ady -ĠRes ist -ĠArt emis -Ġcare less -Ġbro kers -Ġtemper ament -Ġ5 20 -T ags -ĠTurn ing -Ġut tered -Ġp edd -Ġimpro vised -Ġ: ( -Ġtab l -Ġpl ains -16 00 -press ure -ĠEss ence -marg in -friend s -ĠRest oration -Ġpoll ut -ĠPok er -ĠAugust ine -ĠC IS -ĠSE AL -or ama -Ġth wart -se ek -Ġp agan - º -cp u -Ġg arn -Ġass ortment -ĠI LCS -t ower -Recomm ended -Ġun born -ĠRandom Redditor -ĠRandomRedditor WithNo -Ġparaly zed -Ġeru ption -Ġinter sect -ĠSt oke -ĠS co -B ind -å ¾ -ĠP NG -ĠNeg ative -ĠNO AA -Le on -Ġall oy -ĠL ama -ĠD iversity -5 75 -Ġunderest imated -ĠSc or -Ġm ural -Ġb usted -so on -l if -Ġnone x -Ġall ergy -ĠUnder world -ĠR ays -ĠBl asio -Ġh rs -ĠD ir -Ġ3 27 -by ter -Ġrepl acements -Ġactiv ates -ri ved -M H -Ġp ans -ĠH I -Ġlong itudinal -Ġnu isance -al er -Ġsw ell -ĠS igned -s ci -ĠIs les -ĠA GA -Ġdef iant -Ġson ic -oc on -K C -ĠA im -t ie -ah ah -Ġm L -D X -Ġb isc -ĠBill board -ĠSY STEM -NE Y -ga ard -Ġdist ressed -former ly -Al an -Ġche fs -Ġopt ics -ĠC omet -ĠAM C -Ġredes igned -irm ation -Ġsight ings -38 2 -3 11 -ĠW B -Ġcont raction -ĠT OTAL -D ual -Ġstart led -Ġunderstand ably -Ġsung lasses -ETH OD -Ġd ocker -Ġsurf ing -ĠH EL -ĠSl ack -ton es -Ġsh alt -Vis ual -49 8 -Dep artment -c ussion -Ġunrest ricted -Ġt ad -Ġre name -employ ed -Ġeduc ating -Ġgrin ned -bed room -ĠActiv ities -ĠV elvet -ĠSW AT -Ġsh uffle -ig or -Ġsatur ation -F inding -c ream -ic ter -Ġv odka -tr acking -te c -Ġfore ground -iest a -Ġve hement -ĠEC B -ĠT ie -E y -Ġt urtles -ĠRail road -ĠKat z -ĠFram es -Ġmen ace -ĠFell owship -ĠEss ential -ugg ish -Ġdri p -ch witz -ĠKy oto -s b -ĠN ina -Param eter -Ġal arms -ĠCl aud -Ġpione ering -Ġchief ly -ĠSc ream -Col lection -Ġthank fully -ĠRonald o -åŃ IJ -st rip -ĠDisney land -com mercial -See ing -S oul -Ġevac uate -Ġc iv -ĠAs he -Ġdiv ides -ĠD agger -rehens ive -Ġber ries -ĠD F -Ġs ushi -Ġplur ality -W I -Ġdisadvant aged -Ġbatt alion -ob iles -45 1 -Ġcl ing -Ġunden iable -ĠL ounge -Ġha unt -p he -Ġquant ify -Ġdiff ered -Ġ[* ] -ĠV iz -c um -sl ave -Ġvide og -Ġqu ar -Ġbund les -ĠAl onso -t ackle -Ġneur onal -Ġlandsl ide -conf irmed -ĠDep th -Ġrenew ables -B ear -ĠMaced onia -Ġjer seys -Ġb unk -ĠSp awn -ĠControl s -ĠBuch anan -Ġrobot ics -Ġemphas izing -ĠTut orial -h yp -ist on -Ġmonument al -æ ° -ĠCar ry -Ġt bsp -en ance -H ill -art hed -Ġro tten -De an -Ġtw isting -Ġgood will -Ġimm ersion -L iving -Ġbr ushes -ĠC GI -ĠAt k -tr aditional -Ġph antom -ĠSt amina -Ġexpans ions -ĠMar in -Ġembark ed -ĠE g -int estinal -ĠPE OPLE -ĠBo oth -ĠApp alach -Ġreleg ated -V T -M IT -Ġmust er -Ġwithdraw ing -Ġmicrosc ope -ĠG athering -ĠC rescent -ĠArgent ine -ĠDec re -ĠDomin ic -Ġbud s -ant age -ĠI on -Ġwid ened -ONS ORED -ĠGl oves -iann opoulos -raz en -fe el -Ġrepay ment -Ġhind sight -ĠRE ALLY -ĠPist ol -ĠBra h -Ġwat ts -Ġsurv ives -Ġfl urry -iss y -Al ert -ĠUrug uay -Ph oenix -S low -ĠG rave -ĠF ir -Ġmanage able -Ġtar iff -ĠU DP -ĠPist ons -ĠNiger ian -Ġstrike outs -Ġcos metics -whel ming -f ab -c ape -pro xy -Ġre think -Ġover coming -sim ple -Ġw oo -Ġdistract ing -ĠSt anton -ĠTuls a -ĠD ock -65 9 -Ġdisc ord -ĠEm acs -ĠV es -ĠR OB -Ġreass uring -Ġcons ortium -Muslim s -3 21 -Ġprompt s -se i -ĠH itch -imp osed -ĠF ool -Ġindisc rim -wr ong -bu querque -D avis -! ] -Ġtim eless -ĠNE ED -Ġpestic ide -Ġrally ing -ĠCal der -Ġå ¤ -Ġx p -ĠUn le -ĠEx port -lu aj -B uff -) [ -Ġsq or -S audi -Ġis tg -Ġindul ge -pro c -Ġdisg usted -Ġcomp ounded -Ġn em -Ġschool ing -ĠC ure -process ing -S ol -Ġpro verb -it ized -ĠAlv arez -Ġscar f -Ġrect angular -re ve -Ġh ormonal -ĠSt ress -itiz en -Ġ4 25 -girl s -ĠNo ir -ĠR app -Ġmar ches -ch urch -ĠUs es -Ġ40 5 -ĠBer m -Ġord inances -ĠJud gment -Charg es -ĠZ in -Ġdust y -Ġstraw berries -Ġper ce -ĠTh ur -ĠDebor ah -net flix -ĠLam bert -Ġam used -ĠGu ang -Y OU -R GB -ĠC CTV -Ġf iat -r ang -Ġf ederation -ĠM ant -ĠB ust -ĠM are -respect ive -ĠM igration -ĠB IT -59 0 -Ġpatriot ism -Ġout lining -reg ion -ĠJos é -Ġbl asting -ĠEz ra -B s -Ġundermin es -ĠSm ooth -Ġcl ashed -rad io -Ġtransition ing -ĠBucc aneers -ĠOw l -Ġplug s -Ġh iatus -ĠPin ball -Ġm ig -ĠNut r -ĠWolf e -Ġinteg ers -Ġor bits -ĠEd win -ĠDirect X -b ite -Ġbl azing -v r -Ed ge -ĠP ID -ex it -ĠCom ed -ĠPath finder -ĠGu id -ĠSign s -ĠZ er -ĠAg enda -Ġreimburse ment -M esh -i Phone -ĠMar cos -ĠS ites -h ate -en burg -Ġs ockets -p end -Bat man -v ir -ĠSH OW -Ġprovision al -con n -ĠDeath s -AT IVE -Pro file -sy m -J A -Ġnin ja -inst alled -id ates -eb ra -ĠOm aha -Ġse izing -ĠBe asts -Ġsal ts -M ission -Gener ally -ĠTr ilogy -he on -leg ates -Ġd ime -Ġf aire -par able -G raph -Ġtotal ing -Ġdiagram s -ĠYan uk -ple t -ĠMe h -Ġmyth ical -ĠStep hens -aut ical -ochem istry -Ġkil ograms -Ġel bows -anc ock -ĠB CE -ĠPr ague -Ġimpro v -ĠDev in -Ġ" \ -par alle -Ġsuprem acists -ĠB illion -Ġreg imen -inn acle -Ġrequ isite -ang an -ĠBur lington -ain ment -ĠObject ive -oms ky -G V -Ġun ilateral -Ġt c -Ġh ires -ment al -Ġinvol untary -Ġtrans pl -ĠASC II - ¨ -Ev ents -Ġdoub ted -ĠKa plan -ĠCour age -ig on -ĠMan aging -ĠT art -Ġfalse hood -ĠV iolet -Ġair s -Ġfertil izer -Brit ain -Ġaqu atic -ou f -W ords -ĠHart ford -Ġeven ings -ĠV engeance -qu ite -G all -ĠP ret -Ġp df -ĠL M -ĠSo chi -ĠInter cept -9 20 -Ġprofit ability -ĠId le -ĠMac Donald -ĠEst ablishment -um sy -Ġgather ings -ĠN aj -Charl ie -Ġas cent -ĠProt ector -Ġal gebra -Ġbi os -for ums -EL S -Introdu ced -Ġ3 35 -Ġastron omy -Cont ribut -ĠPol ic -Pl atform -Ġcontain ment -w rap -Ġcoron ary -ĠJ elly -man ager -Ġheart breaking -c air -ĠChe ro -c gi -Med ical -ĠAccount ability -! !" -oph ile -Ġpsych otic -ĠRest rict -Ġequ itable -iss ues -Ġ19 05 -ĠN ek -c ised -ĠTr acking -Ġo zone -Ġcook er -ros is -Ġre open -Ġinf inity -ĠPharm aceutical -ens ional -Att empt -ĠR ory -Mar co -Ġawa its -H OW -t reated -Ġbol st -Ġreve red -Ġp ods -opp ers -00 10 -Ġampl itude -ric an -SP ONSORED -Ġtrou sers -Ġhal ves -ĠK aine -ĠCut ler -ĠA UTH -Ġsplend id -Ġprevent ive -ĠDud ley -if acts -umin ati -ĠY in -Ġad mon -ĠV ag -Ġin verted -Ġhast ily -ĠH ague -L yn -Ġled ger -Ġastron omical -get ting -Ġcirc a -ĠC ic -ĠTenn is -Lim ited -Ġd ru -ĠBY U -Ġtrave llers -Ġp ane -ĠInt ro -Ġpatient ly -Ġa iding -Ġlo os -ĠT ough -Ġ29 3 -Ġconsum es -Source File -Ġ"" " -Ġbond ing -Ġtil ted -Ġmenstru al -ĠCel estial -UL AR -Plug in -Ġrisk ing -N az -ĠRiy adh -Ġacc redited -Ġsk irm -é Ľ -Ġexam iner -Ġmess ing -Ġnear ing -ĠC hern -ĠBeck ham -Ġsw apped -Ġgo ose -K ay -Ġlo fty -ĠWal let -Ġ[ ' -Ġap ocalypse -Ġb amboo -ĠSP ACE -ĠEl ena -Ġ30 6 -ac ons -Ġtight ened -Ġadolesc ence -Ġrain y -Ġvandal ism -ĠNew town -Ġcon ject -c akes -Ġche ated -Ġmoder ators -par ams -E FF -Ġdece it -ĠST L -ĠTanz ania -ĠR I -Ġ19 23 -ĠEx ile -the l -Ġthe olog -Ġquir ky -ĠIr vine -Ġneed y -or is -U m -K a -Ġmail box -3 22 -Ġb os -ĠPet ra -K ING -Ġenlarg ed -O ften -Ġbad ass -Ġ3 43 -ĠPl aces -ĠC AD -Ġpr istine -Ġinterven ing -d irection -Ġl az -ĠD SM -Ġproject ing -ĠF unk -ag og -pay ment -n ov -Ġch atter -AR B -Ġexam inations -ĠHouse hold -ĠG us -F ord -4 14 -B oss -Ġmy stic -Ġle aps -ĠB av -ul z -b udget -Foot ball -Ġsubsid ized -Ġfirst hand -Ġcoinc ide -oc ular -Con n -ĠColl abor -Ġfool s -am ura -ah ar -r ists -Ġsw ollen -Ġexp ended -ĠP au -s up -Ġsp ar -Ġkey note -s uff -Ġunequ al -Ġprogress ing -str ings -ĠGamer gate -Dis ney -ĠEle ven -om nia -Ġscript ed -Ġear ners -bro ther -ĠEn abled -æ ³ -Ġlar vae -ĠL OC -m ess -Wil son -ĠTem plate -success fully -Ġparam ount -Ġcamoufl age -Ġbind s -ĠQu iet -ĠSh utterstock -r ush -Ġmasc ot -fort une -ĠCol t -ĠBe yon -hab i -Ġha irc -Ġ26 7 -ĠDe us -Ġtw itch -Ġconcent rating -Ġn ipples -c ible -Ġg ir -N Z -M ath -n ih -Requ ired -Ġp onder -ĠS AN -Ġwedd ings -Ġl oneliness -N ES -ĠMah jong -69 5 -add le -ĠGar ner -ĠC OUR -Br idge -Ġsp ree -ĠCald well -Ġbri bery -Ġ���� ���� -plug ins -Ġr acket -Ġchamp agne -vers ible -V ote -Ġmod ifiers -May or -6 80 -Ġassemb lies -ĠS ultan -ĠN ing -ĠLad ies -Ġsulf ur -Ġor bs -Ġ---- - -____ ___ -ĠJournal ism -Ġes ports -Ġl ush -Ġh ue -Ġspect ral -H onest -ãĥ ı -Ġbus hes -Ġrein forcement -Ġre opened -ĠWhe els -ĠM org -rie ving -Ġaux iliary -Ġj Query -ĠB AT -tes que -Ġver tex -p ure -f rey -ãĤ º -d os -Ġty ph -Ġc ull -Ġe q -Ġdec on -Ġtoss ing -Ġdispar ate -ĠBr igham -print f -led ged -Ġsu nd -Ġco zy -Ġhepat itis -per forming -Ġav al -ĠG G -f uture -Ġpet ertodd -ĠKos ovo -Ġmagn ets -Al ready -ĠEd ison -ĠCe res -ĠRA ID -Ġbrill iance -57 6 -Ġder ives -Ġhypert ension -ĠÎ Ķ -Ġlamb da -Ġfl air -Ġmission aries -Ġrap es -ĠSt arter -ĠMon ths -Ġdef y -Ġseism ic -ĠR aphael -Ġeuro zone -65 6 -z sche -Ġscr atched -Ġb ows -ĠLenn on -ĠGa ia -Ġdri pping -f acts -A le -Ġfrog s -ĠBre ast -ogene ity -ĠProsecut or -Ġampl ified -ĠHod g -ĠF n -Th ousands -ĠNI H -ĠMonitor ing -FT WARE -ĠPri ebus -ĠG rowing -hun ter -Ġdiagn ose -ĠM ald -ĠL R -Ġcrown ed -Ġburst ing -Ġdiss olution -j avascript -Ġuseful ness -ĠExec ution -: ( -ĠIv ory -a ah -Ġpersecut ed -viol ence -ist as -ĠCr ate -Ġimpuls es -ĠSp ani -ed es -Hand le -ĠZ erg -think able -Last ly -Ġspont aneously -Ġinconven ient -Ġdismiss ing -Ġpl otted -Ġeight y -Ġ7 37 -r ish -ĠThor nton -ath am -Ġsit com -V en -Rec ipe -t el -l und -Ġcle ars -ĠSas uke -Ġ25 8 -Ġopt ing -Ġen raged -est hetic -ĠA e -uch s -Pre p -Fl ow -Ġrun off -ĠE ating -ĠG iles -ĠAct ing -res ources -ib aba -Ġr pm -Ġske wed -ĠBl anc -ĠS akuya -Ġhot ter -Ġ19 24 -op ian -ck o -Ġcr umbling -Ġcapt ains -ĠAppropri ations -le aders -dro pping -an uts -Ġrevers ing -ĠP ose -ĠS ek -Sc ot -ĠIde a -c ise -ĠSloven ia -Ġ3 17 -Do ctor -Ġcro cod -ald i -Se a -ĠFar rell -Ġmerc enaries -ĠR NC -ĠGu ess -Ġp acing -M achine -Streamer Bot -ĠChar ity -Ġ29 8 -Ġcann ons -ĠTob y -TPP StreamerBot -ĠPass ion -cf g -Th om -Ġbad ges -ĠBern stein -. âĢĵ -ĠP OP -ĠCon j -Ġinitial ization -Ġbiod iversity -D ub -Ġfeud al -Ġdisclaim er -Ġc row -Ġign ition -ar f -S HA -Ġk Hz -h azard -ĠArt ists -oe uv -67 9 -ĠRud y -N ine -ĠRam adan -å ½ -itt o -Ġadren aline -C ert -Ġsmell ed -Ġimp unity -Ġag endas -ĠRe born -ĠCon cent -ĠSe ems -Ġo mega -ĠDust in -Ġback er -ĠSau ce -ĠBoy le -W IN -Ġsp ins -Ġpa uses -u pt -Ġshred ded -Ġstra pped -ĠCor ruption -Ġscr atches -Ġn i -Ġatt ire -ĠS AF -Factory Reloaded -ĠI PS -Ġ( % -Ġsem inar -f ocus -c ivil -Ġ18 60 -int osh -Ġcontin ual -Ġabbre vi -ĠS ok -oc obo -X M -Ġfr antic -Ġunavoid able -Ġar tery -Ġannot ations -b ath -Cl imate -Ġd ors -ĠSl ide -co ord -ĠRel oad -ĠL DL -ĠLove craft -Ġunim agin -Ġresemb led -Ġbarr acks -n p -Ġsurrog ate -Ġcategor ized -ãĤ © -Ġvacc inated -Ġdrain age -Ġind ist -ĠWhats App -Ġ18 70 -oler ance -inv oke -am orph -Ġrecon nect -Ġem anc -Ġblind ness -Ġ12 80 -intern et -c ollar -Ġalt ru -Ġab yss -ĠT RI -65 7 -Ġinf used -HE AD -Ġforest ry -ĠWood y -ĠC i -w i -s am -78 4 -hol iday -Ġmog ul -ĠF ees -ĠD EN -In ternal -ur bed -f usc -at om -ĠIll usion -Ġpoll ed -Ġfl ap -Ġco ax -L GBT -An aly -ĠSect ions -ĠCalif orn -em n -Ġh ither -ĠN IGHT -Ġn ailed -ĠPip eline -39 1 -o of -ĠPr imal -vere nd -Ġsl ashing -Ġret ri -avi our -Ġdepart ing -g il -IS C -Ġmid way -Ġultras ound -Ġbeh aving -ĠT ara -class es -V irtual -ĠColon ial -Ġstri pping -Ġorchestr ated -ĠGra ves -45 2 -ĠIron ically -ĠWrit ers -Ġl ends -ĠMan z -Ġra ven -Ġoxid ative -Ġ26 6 -EL F -act ually -asc ar -D raft -Ġfavour able -Ġhumili ating -Ġf idelity -ĠH of -ĠX uan -49 6 -Ġlay ered -at is -79 0 -Ġpay check -it on -K ar -ĠVM ware -ĠFar mer -Ġserv ic -gl omer -Ġsl ump -ĠFab ric -ĠD OC -est ing -Ġreass ure -Ġph yl -v olt -it ory -R ules -Ġoxid ation -Ġpri zed -Ġmist ress -ĠDj ango -WAR N -å ij -Ġenc ode -ĠFeed back -Ġstupid ity -I an -ĠYugoslav ia -× ¨ -ac l -UT E -19 77 -Ġqual ifies -Ġpuls es -pret ty -Ġfro ze -Ġs s -Iter ator -Ġur gently -Ġm ailed -ĠCh am -Ġsust aining -Ġbas il -Ġpupp ies -il ant -ĠP LEASE -l ap -ace ous -F ear -ĠMaster y -aut omatic -ĠT AG -Ġant im -ag les -47 3 -fram es -Ġwh ispers -ĠWho ever -Ġbra very -ĠUK IP -ract ions -"" " -Ġt ame -Ġpart ed -every thing -CON T -Ġind ebted -Ġadd r -re k -IR ED -Ġem inent -cl inton -Ġo usted -Ġreview er -Ġmelt down -Ġre arr -ĠY ao -the real -aby te -Ġst umbling -Ġbat ches -Ġ25 9 -Ġcontrace ptive -Ġprost itute -ens is -De cl -ĠSt rikes -M ilitary -ĠO ath -v acc -pp ings -05 2 -Ġpart Name -amp ing -Rep orts -K I -CH R -Ġsubt ly -sw ers -Bl ake -us ual -Ġcontest ants -Ġcart ridges -ĠGRE AT -Ġbl ush -ĠâĢ º -47 2 -Ġreason ed -ãĥ ¤ -paralle led -Ġd yn -ag ate -Ġnight ly -å Ĩ -55 6 -Ġsem antic -ĠAdv oc -Ġ !! -Ġdisag rees -ĠB W -V eh -Ġharm ing -Ġembr aces -Ġstri ves -Ġin land -ĠK ard -Ġhe ats -ĠGin ny -ut an -ern aut -yl ene -ĠE lev -J D -Ġh ars -ĠStar r -Ġsk ysc -Ġcollabor ators -Us ually -Ġrev olutions -ĠSTAT S -Ġdism antle -Ġconfident ly -Ġkin etic -Al i -Ġpercent ile -Ġextract ing -ill ian -est ead -Ġphysic ists -ĠMarsh al -Ġfell owship -Ġd ashed -ĠU R -ĠSi oux -ĠComp act -am ide -P ython -ĠLe igh -ĠPharm ac -ist rates -her ical -Ġf ue -ĠE min -Ġ( { -ĠNeighbor hood -Ġdisrupt ing -ĠD up -Ġg land -ĠSe v -ĠMar ian -arg on -ĠD und -Ġ< !-- -Ġstr and -Ġstadium s -z os -Ġpsych osis -ĠR ack -Ġbrilliant ly -ï¸ ı -Ġsubmer ged -ĠInst it -ĠCh ow -Ġc ages -ĠH ats -ĠU rs -Ġdil uted -us at -ien ne -ĠMembers hip -ĠBur k -Ġ ie -Ġarche type -D rug -ult on -ĠSp ock -ĠMcK ay -ĠDep end -F eatured -S oc -19 78 -ĠB ere -Ġrelent lessly -Ġcripp ling -Ġar thritis -çĶ Ł -ĠTrop ical -ĠBul g -ĠCher yl -Ġadm irable -Ġsub title -Over ride -Ġorig inating -ĠC CP -Ġsw ore -ĠSo le -ĠDis orders -3 29 -Ġprocess ion -Ġref urb -Ġimm ersed -requ ently -Ġskept ics -Ġcer amic -m itter -en stein -b elt -ĠT IT -b idden -Ġf ir -m ist -> ] -Ġwe ave -ĠParad ox -Ġentr usted -ĠBarcl ays -Ġnovel ist -og ie -80 6 -Ġnin ety -Ġdisag reements -@@@@ @@@@ -ĠAus chwitz -c ars -ĠL ET -t ub -arant ine -P OS -Ġback story -Ġcheer ful -ĠR ag -ek a -bi ased -Ġinexper ienced -ak ra -ĠW itt -t an -Ġrap ist -Ġplate au -ch al -ĠInqu is -exp ression -Ġc ipher -Ġsh aving -add en -re ly -( \ -ism a -ĠReg ulatory -CH AR -ily n -N VIDIA -G U -Ġmur m -la us -Christ opher -Ġcontract ual -ĠPro xy -ĠJa ime -ĠMethod ist -Ġstew ards -st a -per ia -Ġphys iology -Ġbump ed -Ġf ructose -Austral ian -ĠMet allic -ĠMas querade -ar b -Ġprom ul -Ġdown fall -Ġbut cher -Ġb our -ĠIN FORMATION -ĠB is -pect s -ad ena -Ġcontempl ating -ar oo -cent ered -ĠPe aks -Us ed -Ġmod em -Ġg enders -Ġ8 000 -37 1 -Ġm aternity -ĠR az -Ġrock ing -Ġhandgun s -ĠD ACA -Aut om -ĠN ile -Ġtum ult -ĠBenef it -ĠAppro ach -works hop -ĠLe aving -G er -inst ead -Ġvibr ations -Ġrep ositories -49 7 -ĠA unt -ĠJ ub -ĠExp edition -Al pha -Ġs ans -Ġoverd ue -Ġoverc rowd -Ġlegisl atures -Ġp aternal -ĠLeon ardo -Ġexp ressive -Ġdistract ions -Ġsil enced -tr ust -Ġb iking -Ġ5 60 -Ġpropri et -Ġimp osition -Ġcon glomer -Ġ= ================================================================ -ĠTe aching -ĠY ose -int ensive -T own -Ġtroll ing -ĠGr ac -ĠAS US -Y o -Ġspecial s -ĠNep h -ĠGod zilla -Dat abase -ĠHe gel -Ġ27 2 -19 76 -ĠGl oria -Ġdis emb -ĠInvestig ations -ĠB ane -ag ements -St range -Ġtre asury -ĠPl ays -Ġundes irable -Ġwid ening -Ġverb ally -Ġinf ancy -Ġcut ter -f ml -Ġ21 00 -prot otype -f ine -Ġdec riminal -Ġdysfunction al -Ġbes ie -ĠErn st -z eb -Ġnort heastern -Ġa ust -por ate -ĠMar lins -Ġsegreg ated -ew orld -ĠMa her -Ġtra verse -Ġmon astery -ur gy -G ear -s and -Com pl -ĠE MP -Ġpl ent -ĠMer cer -Ġ27 6 -TA BLE -Config uration -H undreds -Ġpr ic -Ġcollabor ating -ĠPar amount -ĠCumm ings -Ġ( < -Ġrecord er -Ġfl ats -Ġ4 16 -wh ose -Font Size -ĠOr bit -Y R -Ġwr ists -Ġb akery -) } -ĠB ounty -ĠLanc aster -Ġend ings -acc ording -ĠSal am -e asy -75 5 -ĠBur r -ĠBarn ett -onom ous -Un ion -Ġpreced ence -ĠScholars hip -ĠU X -Ġroll out -Ġbo on -al m -ĠCan ter -æ µ -Ġround ing -Ġcl ad -Ġv ap -ĠF eatured -is ations -Ġ5 40 -pol ice -Ġunsett ling -Ġdr ifting -ĠLum ia -ĠObama Care -ĠF avor -Hy per -ĠRoth schild -ĠMil iband -an aly -ĠJul iet -H u -Ġrec alling -a head -69 6 -Ġunf avorable -Ġd ances -O x -Ġleg ality -Ġ40 3 -rom ancer -Ġinqu ire -ĠM oves -\ "> -ĠVari ant -ĠMess iah -ĠL CS -ĠBah á -75 6 -Ġeyeb row -Ġ ¥ -ĠMc F -ĠFort y -M as -Ġpan icked -Ġtransform ations -q q -Ġrev olves -ring e -ĠA i -ax e -Ġon ward -ĠC FR -ĠB are -log in -Ġliqu ids -Ġde comp -second ary -il an -ĠCon vert -ami ya -Ġprosecut ing -Ġâī ¡ -ĠYork ers -ĠByr ne -sl ow -aw ei -J ean -Ġ26 9 -ĠSky dragon -Ġ é -ĠNicarag ua -ĠHuck abee -ĠHigh ly -Ġamph ib -ĠPast or -ĠL ets -Ġbl urred -Ġvisc eral -ĠC BO -Ġcollabor ated -z ig -Leg al -Ġapart heid -Ġbr id -Ġpres et -ĠD ET -ĠAM A -× Ķ -arch ing -auc uses -build er -Ġpo etic -Ġem ulator -ĠMole cular -Ġhon oring -ise um -Ġtract or -ĠCl uster -ĠCal m -ared evil -Ġsidew alks -Ġviol in -Ġgeneral ized -ĠAle c -Ġemb argo -Ġfast ball -ĠHT TPS -ĠL ack -ĠCh ill -ri ver -C hel -ĠSw arm -ĠLev ine -ro ying -L aunch -Ġkick er -Ġadd itive -ĠDe als -W idget -cont aining -Ġescal ate -ĠOP EN -Ġtwe aked -Ġst ash -Ġsp arks -ĠEs sex -ĠE cc -Ġconv ict -Ġblog ging -I ER -ĠH L -Ġmurd erers -75 9 -ĠH ib -Ġde pl -ĠJ ord -S ac -Ġdis sect -ĠHow e -os her -Ġcustom izable -ĠFran z -Ġat ro -Ä ĩ -Ġ000 4 -Ġout post -R oss -Ġglyph osate -ĠHast ings -ĠBE FORE -Ġsh ove -o pped -ĠSc ala -Ġam ulet -an ian -Ġexacerb ated -Ġe ater -47 1 -UM E -Ġpul p -izont al -ĠZ am -ĠAT I -imm une -aby tes -Ġunnecess arily -ĠC AT -ĠAx is -Ġvisual ize -à ī -ĠRad ical -f m -Doc uments -ĠFor rest -Ġcontext ual -ĠSy mbol -Ġtent ative -ĠDO ES -ĠGood s -Ġintermitt ent -} : -medi ated -Ġridic ule -Ġathe ism -Ġpath ogens -ĠM um -Ġre introdu -Ġ30 7 -i HUD -Ġflash light -Ġsw earing -Ġp engu -B u -Ġrot ated -ĠCr ane -Ġ() ); -Ġfashion able -Ġendors ing -46 3 -) [ -Ġingest ion -Ġcook s -Ġ9 50 -ot omy -ĠIm am -Ġk a -Ġte aser -ĠGhost s -ĠãĤ µ -19 69 -Ï ĥ -ub by -Ġconver ter -zan ne -end e -ĠPre par -ĠNic kel -ĠChim era -h im -ĠTyr ann -ĠSabb ath -ĠNich ols -Ġra pt -ih ar -Ġshe lling -Ġillum inate -Ġdent ist -ut or -ĠInteg ration -Ġwh ims -ĠLiter ary -Be aut -Ġp archment -ag ara -Br and -Ġder og -âĢ¦ ) -ĠNor se -Ġunw itting -Ġc uc -Ġborder line -Ġupset ting -Ġrec ourse -Ġd raped -ĠRad ar -Ġcold er -ĠPep si -im inary -], [ -65 8 -V i -ĠF rem -ĠP es -Ġveter inary -ĠT ED -ĠEp idem -n ova -k id -Ġdev out -o ct -j ad -M oh -ĠP AY -Ġge ometric -Ġ3 23 -Ġcircum ference -ich ick -19 75 -ĠY uri -ĠSh all -ĠH over -un in -S pr -Ġg raft -ĠHapp iness -Ġdisadvant ages -att acks -Ġhub s -ĠStar Craft -é ĸ -Ġgall eries -ĠKor ra -Ġgrocer ies -ĠGors uch -Ġrap ists -Ġfun gi -ĠTyph oon -V ector -ĠEm press -b attle -4 68 -Ġparas ite -ĠBom ber -S G -ex ist -ĠP f -Ġun se -Ġsurge ons -B irth -ĠUn sure -ĠPrint ed -ĠBehavior al -ĠA ster -Pak istan -Ġun ethical -Ġs v -ĠIo T -Ġlay outs -P ain -Ġconst ants -ĠL W -ĠB ake -Ġtow els -Ġdeterior ation -ĠBol ivia -Ġblind ed -ĠW arden -ĠMist ress -Ġon stage -Ġcl ans -ĠB EST -19 60 -Ġant ique -Ġrhet orical -ĠPer cy -ĠRw anda -, . -B ruce -Ġtra umat -ĠParliament ary -Ġfoot note -id ia -ĠLear ned -se eking -gen ic -Ġdim ensional -H ide -èĢ ħ -Ġintrig ue -in se -Ġle ases -Ġapp rentices -w ashing -Ġ19 26 -V ILLE -Ġsw oop -s cl -Ġbed rooms -on ics -ĠCr unch -comp atible -Ġincap ac -ĠYemen i -ash tra -z hou -d anger -Ġmanifest ations -ĠDem ons -AA F -Secret ary -ACT ED -L OD -Ġam y -ra per -eth nic -4 17 -Ġpos itives -Ġ27 3 -ĠRefuge es -Ġus b -ĠV ald -odd y -ĠMahm oud -As ia -Ġskull s -ĠEx odus -ĠComp et -ĠL IC -ĠM ansion -ĠA me -Ġconsolid ate -storm s -ont ent -99 6 -Ġcl en -Ġm ummy -fl at -75 8 -ĠV OL -oter ic -n en -ĠMin ute -S ov -Ġfin er -R h -ly cer -Ġreinforce ments -ĠJohann es -ĠGall agher -Ġgym n -S uddenly -Ġext ortion -k r -i ator -T a -Ġhippocamp us -N PR -ĠComput ing -Ġsquare ly -Ġmod elling -ĠFor ums -ĠL isp -ĠKrish na -Ġ3 24 -Ġr ushes -Ġens ued -Ġcre eping -on te -n ai -il ater -ĠHorn ets -Ġob livious -IN ST -55 9 -Ġjeopard y -Ġdistingu ishing -j ured -Ġbeg s -sim ilar -ph ot -5 30 -ĠPark way -Ġs inks -ĠHearth stone -ib ur -ĠBat on -Av oid -Ġd ancer -Ġmag istrate -ary n -Ġdisturb ances -ĠRom ero -Ġpar aph -Ġmis chief -âĸ ĵ -ĠSh aria -Ġur inary -r oute -iv as -f itted -Ġeject ed -ĠAl buquerque -Ġ4 70 -Ġirrit ated -ĠZ ip -ĠB iol -à į -Ġden ounce -Ġbin aries -ĠVer se -Ġopp os -ĠKend rick -ĠG PL -Ġsp ew -ĠEl ijah -ĠE as -Ġdr ifted -so far -Ġannoy ance -ĠB ET -47 4 -ĠSt rongh -it ates -ĠCogn itive -oph one -ĠIdent ification -ocr ine -connect ion -Ġbox er -ĠAS D -ĠAre as -Y ang -t ch -ull ah -Ġdece ive -Comb at -ep isode -cre te -W itness -Ġcondol ences -ht ar -Ġhe als -Ġbuck ets -ĠLA W -B lu -Ġsl ab -ĠOR DER -oc l -att on -ĠSteven son -ĠG inger -ĠFriend ly -ĠVander bilt -sp irit -ig l -ĠReg arding -ĠPR OG -Ġse aling -start ing -Ġcard inal -ĠV ec -ĠBe ir -Ġmillisec onds -we ak -per se -Ġster ile -ĠCont emporary -ĠPh ant -ĠCl o -Ġout p -Ġex iled -Ġ27 7 -Ġself ie -Ġman ic -Ġn ano -ter ms -Alex ander -Ġres olves -Ġmillenn ia -Ġexpl odes -Ġconst ellation -Ġadul tery -m otion -D OC -Ġbroad casters -Ġkinderg arten -ĠMay weather -ĠE co -ich o -Ġ28 7 -l aun -Ġm ute -Ġdisc reet -Ġpres chool -Ġpre empt -De lete -ĠFre ed -P i -H K -Ġblock er -ĠC umber -Ġw rought -d ating -Ġins urer -Ġquot as -Ġpre ached -Ġev iction -ĠReg ina -ĠP ens -Ġsevent een -ĠN ass -D ick -Ġfold s -Ġd otted -ĠA ad -Un iversal -Ġp izz -ĠG uru -Ġso ils -Ġno vice -ĠNe ander -Ġst ool -Ġdeton ated -ĠPik achu -ĠMass ive -IV ER -ĠAb del -Ġsubdu ed -Ġtall est -Ġprec arious -Ġa y -r ification -ĠOb j -c ale -Ġun question -cul osis -ad as -igr ated -D ays -Ġque ens -ĠGaz ette -ĠCol our -ĠBow man -ĠJ J -ï ve -Ġdomin ates -Stud ent -Ġm u -Ġback log -ĠElect ro -Tr uth -48 3 -Ġcond ensed -r ules -ĠCons piracy -Ġacron ym -hand led -ĠMat te -j ri -ĠImp ossible -l ude -cre ation -Ġwar med -ĠSl ave -Ġmis led -Ġfer ment -ĠK ah -ink i -ke leton -cy l -ĠKar in -Hun ter -Reg ister -ĠSur rey -Ġst ares -ĠW idth -ĠN ay -ĠSk i -Ġblack list -uck et -Ġexp ulsion -im et -Ġret weet -vant age -Fe ature -Ġtro opers -Ġhom ers -9 69 -Ġconting ency -ĠW TC -ĠBrew er -fore ign -W are -S olar -Ġund ue -RE C -ulner able -path ic -ĠBo ise -Ġ3 22 -Ġarous ed -ĠY ing -ä¸ į -uel ess -Ġp as -Ġmor p -Ġfl oral -Ex press -ud ging -k B -ĠGr anted -Ø ¯ -ĠMich a -ĠGoth ic -ĠSPEC IAL -ĠRic ardo -F ran -Ġadminister ing -6 20 -por a -Ġ ® -Ġcomprom ises -Ġb itten -Ac cept -Th irty -Ð ² -Ġmater ially -ĠTer r -ig matic -ch ains -Ġdo ve -stad t -Mar vel -FA ULT -Ġwind shield -Ġ3 36 -ad ier -Ġsw apping -Ġflaw less -ĠPred ator -ĠMiche le -Ġprop ulsion -ĠPsych ic -Ġassign ing -Ġfabric ation -Ġbar ley -l ust -Ġtow ering -Ġalter cation -ĠBent ley -Sp here -Ġtun a -ĠClass es -Fre edom -un er -L ady -v oice -Ġcool est -or r -Ġpal p -$ { -Ġhyster ia -ĠMet atron -p ants -Ġspawn ing -Exper ts -ĠInvest ors -ĠAn archy -Ġshr unk -ĠVict im -Ġ28 9 -Ġec stasy -ĠB inding -58 5 -ĠMel ody -57 8 -ot ally -ĠE tsy -lig a -Ġapplaud ed -Ġswe ating -Ġredist ributed -Ġpop corn -Ġsem inal -f ur -ĠNeuro science -R and -ĠO st -ĠMadd en -ĠIncre asing -ĠDaw kins -ĠSub way -Ġar sen -cons erv -B UR -Ġsp iked -ĠLy ft -ĠImper ium -ĠDrop box -Ġfav oured -Ġencomp asses -gh ost -Ġins pires -Ġbur geoning -ĠY oshi -ĠVert ical -ĠAud itor -Ġint ending -Ġfilib uster -Bl oom -f ac -ĠCav s -ign ing -Ġcowork ers -ĠBarb arian -rem ember -FL AG -Ġaudit ory -ason ry -Col lege -Ġmut ed -gem ony -ob in -ĠPsych o -9 68 -Ġlav ish -Ġhierarch ical -ĠDr one -ou k -Ġcripp led -ĠMax im -Sl ot -Ġqu iz -ĠV id -if ling -Ġarchae ologists -Ġabandon ment -d ial -le on -ĠF as -T ed -Ġr aspberry -Ġmaneu vers -Ġbehavi ours -Ġins ure -Ġrem od -Sw itch -h oe -Ġsp aced -Ġafford ability -ĠF ern -not ation -ĠBal anced -Ġoccup ies -en vironment -Ġneck lace -Ġsed an -F U -ĠBrav o -Ġab users -ĠAn ita -met adata -ĠG ithub -ait o -ĠF aster -ĠWass erman -ĠF lesh -Ġth orn -r arily -ĠMer ry -w ine -Ġpopul ace -ĠL ann -Ġrepair ing -Ġpsy che -Ġmod ulation -aw aru -âĢĭ âĢĭ -ari j -Ġdecor ations -Ġapolog ise -ĠG arg -app ly -Ġgive away -ĠFl an -ĠWy att -U ber -Ġauthor ised -ĠMor al -HAHA HAHA -activ ate -Ġtorped o -ĠF AR -Ġam assed -ĠA ram -ark in -ĠVict ims -st ab -Ġo m -ĠE CO -Ġopio ids -Ġpurpose ly -ĠV est -Ġer g -at an -ĠSur gery -Ġcorrect ing -ĠOrt iz -ĠBe et -Ġrev oke -Ġfre eway -ĠH iggins -F ail -ĠFar ms -ĠAT P -h ound -Ġp oking -ĠCommun ists -mon ster -iment ary -Ġunlock ing -Ġunf it -we ed -en ario -at ical -ĠEnlight enment -ĠN G -ĠComp ensation -de en -ĠWid ow -ĠCind y -ĠAfter wards -Ġ6 000 -ikh ail -ag ically -Ġrat ified -Ġcasual ty -H OME -p sey -f ee -Ġspark ling -Ġd é -Ġconcert ed -C atal -Ġcomp lying -ĠA res -ĠD ent -Sh ut -Ġsk im -ad minist -Ġhost ilities -ĠG ins -Ġ6 08 -Ġm uddy -ĠMc Int -ĠDec ay -5 25 -Ġconspic uous -ĠEx posure -Ġresc ind -Ġwear able -Ġ3 28 -our met -ah s -ĠRob ots -Ġe clips -inst ance -ĠRE PORT -ĠApp l -0 30 -ĠSk ies -01 00 -Ġfall acy -S ocket -ĠRece iver -Ġsol ves -ĠButter fly -ĠSho pping -ĠFI RE -65 4 -Med ic -Ġsing ers -ĠNeed less -'' '' -isher s -ĠD ive -58 8 -Ġselect ively -Ġcl umsy -88 9 -Ġpurch aser -ear ned -ard y -Ġbenef iting -eng lish -Ġyield ing -ĠP our -Ġspin ach -Ġdel ve -ĠC rom -6 10 -Ġexport ing -ĠMA KE -Ġ26 3 -Ġg rop -Ġenv oy -ĠInqu iry -ĠLu igi -d ry -ĠT uring -Thumbnail Image -ĠVar iety -Ġfac et -Ġfl uffy -Ġexcerpt s -Ġsh orth -ĠOl sen -CL UD -Ġrel iant -ĠUN C -T our -Ġbat hing -Comp any -Ġglobal ization -P red -ĠMalf oy -Ġh oc -j am -craft ed -ĠBond s -ĠKiss inger -Eng land -Ġorder ly -cat entry -Ġ26 1 -Ġexch anging -ĠInt ent -ĠAmend ments -D OM -Ġst out -³³³³³³³³ ³³³³³³³³ -ĠAir bus -Ġ27 8 -hy de -P oll -Item ThumbnailImage -Ġlooph oles -ĠPill ar -Ġexpl or -St retch -A part -Ġun married -Lim it -ĠTransform ers -Ġintellect ually -unct ure -18 00 -Ġd arn -B razil -Ġleft over -ber us -f red -Mine craft -3 26 -ĠForm s -Ġproof s -ĠDes igned -Ġindex es -ĠSupp ose -EM S -ĠL oving -ĠBon nie -im ating -OT US -Ġconduct or -Ġbehav ed -ĠF ren -Ġsy nerg -Ġmillenn ium -Ġcater ing -ĠL auder -W r -ĠY iannopoulos -ĠAT F -Ġensl aved -Ġawaken ed -D VD -ĠED ITION -ĠConc ert -ĠChall enger -ĠH aku -umer ic -Ġdep recated -ĠSH AR -4 12 -Ġdy stop -Ġtremb ling -Ġdread ed -ĠSp ac -p adding -Re pl -ĠG arrison -M ini -Ġun paralleled -am ar -URR ENT -w reck -c ertain -t al -ĠC LS -app ings -Ġsens ed -Ġf encing -ĠPas o -ĠDes k -Ġsc off -Ġcontem plate -ĠL iga -l iquid -75 7 -Ġapp rentice -ĠUCH IJ -5 70 -ĠTh ousand -ĠIll um -Ġchampion ed -ãĤ Į -Ġelect ors -Ġ3 98 -ĠH ancock -round ed -ĠJ OHN -Ġuns atisf -Ġqual ifier -ĠGad get -EN E -Ġdead liest -ĠPl ants -Ġ ions -Ġacc ents -Ġtwe aking -Ġsh aved -F REE -ĠCh aser -Again st -9 60 -Ġmeth amphetamine -Ġnormal ized -Ġ$ \ -ĠPre cision -ĠGu am -Ġch oked -ĠX II -ĠCast ing -Tor rent -Ġscal p -ĠJagu ar -w it -Ġsem ic -ix ie -ĠG ould -Ġconf ines -N usra -ĠL on -ĠJ ugg -y cle -ĠCod ec -E gypt -Ġrest rain -ĠAl iens -Ġch oking -ĠD unk -ĠBell a -ab c -Ġsl ang -Ġneuro trans -s av -Ġempower ment -â ĨĴ -Ġclim bers -ĠM im -ĠF ra -ros se -Cap ital -ĠCth ulhu -Inter face -Ġprof icient -ĠIN TO -Ġ3 18 -ront al -5 80 -ĠDes pair -K enn -Ġscrim mage -ĠCo at -as ions -Ġwall paper -ĠJ ol -Ġresurg ence -Ġant iv -ĠB alls -² ¾ -Ġbuff ers -Ġsub system -ĠSt ellar -ĠL ung -A IDS -Ġerad icate -Ġblat antly -Ġbehav es -ĠN un -Ġant ics -ex port -DE V -w b -Ġph p -ĠInteg rity -Ġexplore r -Ġrev olving -auth ored -g ans -Ġbas k -Ġas ynchronous -å į -TH ING -69 8 -G ene -ĠR acer -ĠN ico -iss ued -Ġser mon -p ossibly -Ġsize of -Ġentrepreneur ial -ox in -ĠMin erva -Ġpl atoon -n os -ri ks -A UT -ĠAval anche -ĠDes c -ij 士 -ĠP oc -Ġconf erred -Î » -Ġpat ched -F BI -66 2 -Ġfract ures -Ġdetect s -Ġded icate -Ġconstitu ent -Ġcos mos -W T -Ġswe ats -Ġspr ung -b ara -s olid -Ġuns us -Ġbul ky -ĠPhilipp e -ĠFen rir -Ġtherap ists -ore al -^^ ^^ -Ġtotal ed -Ġboo ze -ĠR PC -Prosecut ors -Ġdis eng -ĠSh ared -Ġmotor cycles -Ġinvent ions -Ġlett uce -ĠMer ge -ĠJ C -Ġspiritual ity -ĠWAR NING -Ġunl ucky -ĠT ess -Ġtong ues -ĠD UI -T umblr -Ġle ans -Ġinv aders -Ġcan opy -ĠHur ricanes -ĠB ret -ĠAP PLIC -id ine -ick le -Reg arding -Ġve ggies -Ġe jac -ju ven -F ish -D EM -ĠD ino -Th row -ĠCheck ing -be ard -( & -Ġj ails -Ġh r -trans fer -iv ating -Ġfle ets -ĠIm ag -ĠMc Donnell -Ġsnipp et -Is a -ĠCh att -ĠSt ain -ĠSet FontSize -ĠO y -ĠMathemat ics -49 4 -Ġelectro ly -ĠG ott -ĠBr as -B OOK -ĠF inger -d ump -Ġmut ants -Ġrent als -Ġinter tw -Ġc reek -ail a -Bro ther -ĠDisc ord -pe e -raw ler -Ġcar p -Ġ27 9 -ãĤ· ãĥ£ -rel ations -Ġcontr asts -Col umn -Ġrec onnaissance -Ġun know -Ġl ooting -Ġregul ates -Ġopt imum -ĠChero kee -ĠA ry -Lat est -Ġroad side -Ġd anced -ĠUnic orn -A cknowled -Ġuncont roll -ĠM US -at io -ch ance -ha ven -VAL UE -Ġfavour ites -Ġceremon ial -b inary -pe ed -wood s -EM P -Ġv ascular -Ġcontempl ated -Ġbar ren -ĠL IST -Y ellow -ospons ors -Ġwhisk y -ĠM amm -ĠDeV os -min imum -H ung -44 2 -P ic -ĠSnap dragon -77 6 -Ġcar ving -Ġund ecided -Ġadvantage ous -Ġpal ms -ĠA Q -Ġst arch -L oop -Ġpadd le -Ġfl aming -ĠHor izons -An imation -bo ost -Ġprob abilities -ĠM ish -Ġex odus -ĠEditor ial -Ġfung us -Ġdissent ing -ĠDel icious -rog ram -ĠD yn -d isk -t om -Ġfab rics -ĠC ove -ĠB ans -Ġsoft en -ĠCON S -Ġin eligible -Ġestim ating -ĠLex ington -pract ice -of i -Ġshe dding -ĠN ope -Ġbreat hed -ĠCorinth ians -y ne -ek i -B ull -Ġatt aching -reens hots -Ġanaly se -ĠK appa -Ġuns ustainable -Ġinter pol -ank y -he mer -Ġprot agonists -Ġform atted -ĠBry ce -ĠAch illes -ĠAb edin -sh ock -Ġb um -b os -qu a -ĠW arn -q t -ĠDi abetes -8 64 -ĠIn visible -Ġvan ish -Ġtrans mitting -Ġmur ky -ĠFe i -Ġawa ited -ĠJur assic -umm ies -Ġmen acing -g all -C ath -B uilt -ild o -ĠV otes -Ġon t -Ġmun itions -ĠFre em -ÃŃ n -Ġdec ency -lo pp -ie ved -ĠG ord -Ġun thinkable -ĠNews week -Ġ3 21 -He at -Ġpresent er -ji ang -Ġpl ank -ĠAval on -Ġben z -ĠR out -Ġslam ming -ĠD ai -ou ter -ĠCook ie -ĠAlic ia -ge y -Ġvan ity -Ġow l -á µ -t ested -ĠAw akens -Ġcan v -Ġblind ly -ĠRid ley -ĠEm ails -Requ ires -ĠSer bian -ograp hed -if rame -eter ia -Ġaltern ating -qu iet -Ġsoc iology -ĠUn lock -ĠCommun ism -Ġo ps -Ġatt ribution -Ġab duction -ĠAb ram -Ġsidel ined -ĠB OOK -Ġref ining -ĠFe eling -ĠOs lo -ĠPru itt -r ack -ang ible -Ġcaut iously -ĠM ARK -eed s -M ouse -ĠStep h -ĠP air -S ab -99 7 -ĠBa al -B ec -Ġcomm a -ĠP all -ĠG ael -Ġmisunder stand -ĠP esh -Order able -Ġdis mal -ĠSh iny -% " -Ġreal istically -Ġpat io -ĠG w -ĠVirt ue -Ġexhaust ing -wh atever -oph ys -y ip -4 18 -Ad just -ĠWa iting -ess on -ĠMaz da -ĠDo zens -Ġstream lined -Ġincompet ence -ĠM eth -Ġeth os -ON ES -Ġincent iv -Ġgr itty -ĠBut cher -Head er -Ġexp onential -à Ł -Ġcorrel ate -Ġcons ensual -s ounding -R ing -Orig in -Ġcon clusive -fe et -ac ly -ĠF ernandez -Buy able -Ġd ucks -aunt lets -Ġel ong -Ġ28 6 -Ġsim ul -G as -ĠK irst -Ġprot r -ĠRob o -ĠAo E -op ol -Ġpsych ologically -sp in -ilater ally -ĠCon rad -W ave -44 1 -ĠAd vertisement -ĠHarm on -ĠOri ental -is Special -Ġpresum ptive -Ġw il -ĠK ier -ne a -Ġp pm -Ġhar bour -ĠW ired -comp any -Ġcor oner -atur days -ĠP roud -ĠN EXT -ĠFl ake -val ued -ce iver -Ġfra ught -Ġc asing -Ġrun away -Ġg in -ĠLaure nt -ĠHar lem -ĠCur iosity -qu ished -Ġneuro science -ĠH ulu -Ġborrow er -Ġpetition er -ĠCo oldown -W ARD -Ġinv oking -conf idence -For ward -Ġst s -pop ulation -Delivery Date -Fil m -ĠC ov -quick Ship -quickShip Available -prim ary -isSpecial Orderable -inventory Quantity -channel Availability -BO X -ĠMulti player -ĠJen ner -77 8 -ĠM d -Ġ~ /. -M N -Ġchild ish -Ġantioxid ant -ĠChrom ebook -Ġ27 4 -Ġscreen play -Ġadvent urous -ĠRelations hip -respons ive -ming ton -Ġcorner stone -ĠF ey -F IR -Ġrook ies -ĠF eaturing -Ġorig inate -Ġelectro des -ant es -Ġscript ures -Ġgl ued -Ġdiscont ent -Ġaff licted -lay out -B rave -Ġm osa -ĠQuant ity -ĠH ik -w inner -H ours -Ġent ail -ĠCell s -olog ue -Ġv il -Ġpre acher -Ġdecor ative -d ifferent -Ġprejud ices -ĠSm oking -ĠNotting ham -so Type -Ġrhyth ms -ĠAl ph -bl ast -Ste el -ĠDaniel le -Ġstr ife -Ġrem atch -so DeliveryDate -ĠF ork -t rip -ol ulu -hes es -C G -ĠPOLIT ICO -ost a -ĠDr ift -é¾įå ¥ -é¾įå¥ ij士 -Ġvet ting -ĠJin ping -ĠRec ession -Min or -ĠF raud -enf ranch -Ġconven ed -ĠNA ACP -ĠMill ions -ĠFarm ing -ĠW oo -ĠFl are -rit o -imm igrant -Ġvac ancy -ĠHE AD -ĠV aj -eg al -ĠV igil -Stud y -Ġru ining -Ġr acks -Ġhe ater -ĠRand olph -ĠBr ush -ĠT ir -Ø ¨ -Ġc ov -% ] -Ġrecount s -ĠO PT -ĠM elt -Ġtr uce -Ġcas inos -Ġcrus ade -Ġcarn age -Ġstri pe -ĠK yl -Text ures -Ġ6 98 -Ġpro clamation -Ġgood ies -Ġ........ .. -pro claimed -P olit -Ġtop ical -Ġspecial ize -ĠA min -g m -Ġanch ored -Ġbear ings -s ample -ĠHigh land -ĠAut ism -Ġmerc enary -Ġinterview er -L ER -ĠSom ers -Ġembry o -ĠAss y -Ġ28 1 -ĠEd iting -ĠCh osen -6 60 -Ġp ci -ĠThunder bolt -BI LL -Ġchuck led -jri wal -h of -Ġearth ly -() { -ind ependence -Ġdisp ers -ĠV endor -ĠG areth -Ġp als -P enn -ĠSub mit -ic um -Th u -Ġcl andestine -Ġcann ibal -ĠCl erk -E Stream -gal itarian -âĻ ¥ -g ew -Ġhor rend -ĠL ov -ĠRe action -ocr in -Class ic -Ġecho ing -Ġdiscl osing -ĠIns ight -og un -ĠInc arn -upload s -pp erc -guy en -Ġ19 01 -ĠB ars -68 7 -Ġb ribes -ĠFres no -ur at -ĠRe ese -Ġintr usive -Ġgri pping -ĠBlue print -ĠR asm -un ia -man aged -ĠHeb do -Ġ3 45 -Ġdec oding -Ġpo ets -Ġj aws -ĠF IGHT -am eless -ĠMead ows -ĠHar baugh -Inter view -ĠH osp -ĠB RA -Ġdelet ion -m ob -W alker -ĠMoon light -ĠJ ed -ĠSoph ia -Ġus ur -Ġfortun ately -ĠPut ting -ĠF old -Ġsan itation -Ġpart isans -IS ON -B ow -ĠCON C -ĠRed uced -ĠS utton -Ġtouch screen -Ġembry os -âĢ¢âĢ¢ âĢ¢âĢ¢ -ĠK rug -com bat -ĠPet roleum -Ġam d -ĠCos mos -Ġpresc ribing -Ġconform ity -ours es -Ġplent iful -Ġdis illusion -ĠEc ology -itt al -Ġf anc -Ġassass inated -regn ancy -Ġperenn ial -ĠBul lets -Ġst ale -Ġc ached -ĠJud ith -ĠDise ases -All en -Ġl as -Ġsh ards -ĠSu arez -ĠFriend ship -inter face -ĠSupp orters -add ons -46 2 -ĠIm ran -ĠW im -Ġnew found -ĠM b -An imal -Ġd arling -and e -Ġrh y -ĠTw isted -pos al -yn ski -Var ious -× ľ -ĠK iw -uy omi -Ġwell being -ĠL au -an os -Ġunm ist -Ġmac OS -Ġrest room -ĠOl iv -ĠAir ways -Ġtimet able -9 80 -Ġrad ios -v oy -ias co -Ġcloud y -ĠDraw ing -Any thing -Sy ria -ĠH ert -st aking -Ġun checked -Ġb razen -ĠN RS -69 7 -onom ic -est ablish -Ġl eng -Ġdi agonal -ĠF ior -L air -ĠSt ard -Ġdef icient -jo ining -be am -Ġomn ip -Ġbl ender -Ġsun rise -Mo ore -ĠF ault -ĠCost ume -ĠM ub -Fl ags -an se -Ġpay out -ĠGovern ors -ĠD illon -ĠBan ana -N ar -Ġtra iled -Ġimperial ist -um ann -ats uki -4 35 -ĠRoad s -Ġsl ur -ĠIde ally -Ġt renches -C trl -Ġmir rored -ĠZ el -ĠC rest -Comp at -ĠRoll s -sc rib -ĠTra ils -omet ers -w inter -Ġimm ortality -il ated -Ġcontrad icts -un iversal -ill ions -ĠM ama -opt im -AT URE -Ġge o -et ter -ĠCar lo -4 24 -Ġcanon ical -ĠStrongh old -n ear -Ġperf ume -Ġorche stra -od iac -Ġup he -Ġreign ing -vers ive -Ġc aucuses -ĠD EM -Ġinsult ed -Ġ---- -- -ĠCr ush -Ġroot ing -ĠWra ith -Ġwh ore -Ġto fu -C md -ĠB ree -Ġ$ _ -Ġr ive -ĠAd vertising -Ġw att -ĠH O -Ġpersu asive -ĠParam eters -Ġobserv ational -ĠN CT -ĠMo j -ĠSal on -Ġtr unc -Ġexqu isite -ĠMar a -Ġpo op -ĠAN N -Ex c -ĠWonder ful -ĠT aco -Ġhome owner -ĠSmith sonian -orpor ated -mm mm -Ġlo af -ĠYam ato -ĠInd o -Ġcl inging -á s -Ġimm utable -h ub -Or ange -Ġfingert ips -ĠWood en -ĠK idd -ĠJ PM -ĠDam n -C ow -c odes -48 2 -Ġiniti ating -ĠEl k -ĠCut ting -Ġabsent ee -ĠV ance -ĠLil ith -G UI -Ġobsc ured -Ġdwar ves -ĠCh op -ĠB oko -Val ues -Ġmult imedia -Ġbrew ed -Reg ular -CRIP TION -ĠMort al -Ġa pex -Ġtravel er -Ġbo ils -Ġspray ing -Rep resent -ĠStars hip -4 28 -Ġdisappro val -Ġshadow y -Ġlament ed -ĠRe place -ĠFran ç -67 7 -d or -Ġunst oppable -Ġcoh orts -gy n -ĠClass ics -ĠAm ph -Ġsl uggish -ĠAdd iction -ĠPad res -Ġins cription -Ġin human -min us -ĠJere miah -at ars -Ter ror -ĠT os -ĠSh arma -ast a -c atch -Ġpl umbing -ĠTim bers -Sh ar -H al -ĠO sc -Ġcou pling -hum ans -Ġsp onge -Ġid ols -ĠSp a -ĠAdv ocate -ĠBe ats -lu a -Ġtick ing -Ġload er -ĠG ron -8 10 -Ġstim ulated -Ġside bar -ĠManufact urer -ore And -19 73 -Ġpra ises -ĠFl ores -dis able -ĠElect rical -ra ise -E th -Ġmigr ated -Ġlect urer -K ids -ĠCa vern -Ġk ettle -Ġgly c -ĠMand ela -ĠF ully -å§ « -FIN EST -Ġsquee zing -ĠRy der -amp oo -oreAnd Online -Inst oreAndOnline -Buyable InstoreAndOnline -Ġcommem orate -ĠRamp age -Aust in -ĠSh roud -ĠRu ins -9 15 -ĠK H -Ġwater front -ĠE SC -b aby -ĠC out -ĠEm blem -Ġequival ents -49 2 -Un ique -ĠNiet zsche -brow ser -Ġim itation -ĠWere wolf -ĠKir in -ac as -' ," -Ġà ¾ -Review ed -Ġc unt -Ġvo ic -ĠLen ovo -Ġbond ed -48 1 -Ġinhib itors -Ġendeav ors -ĠHav ana -ĠSt out -ĠJ olly -A ctor -*/ ( -Ġoccur rences -ĠT ens -Incre ased -ĠACT ION -Ġ ãĢĮ -ĠRank ings -ĠB reat -Ġ30 9 -D ou -Ġimpact ing -ĠDuc hess -pre fix -Q B -Ġsummon ing -Ġbest owed -ĠKe pler -ĠPOW ER -c ube -ĠK its -ĠG rip -Ġop ium -Ġrep utable -t oc -ich ael -ĠR ipple -Ġcaf é -ĠZ oom -ĠBur ma -Ġwa ive -Ġst alls -Ġdem eanor -inc erity -Ġfluor ide -ĠSH OULD -Par is -Ġlong ing -Ġpl at -Ġgross ly -Ġbull s -Ġshowc asing -ex pected -ĠG addafi -engine ering -Re peat -ĠK ut -Ġconce ivable -Ġtrim med -osc ope -ĠCand idate -ĠT ears -rol og -Lew is -S UP -Ġroad map -Ġsal iva -Ġtrump et -Jim my -Ġmirac ulous -Ġcolon ization -Ġam put -ĠGN OME -ate ch -D ifferent -ĠE LE -ĠGovern ments -ĠA head -ãħĭ ãħĭ -word press -L IB -ĠIn clude -ĠDor othy -0 45 -ĠColomb ian -Ġle ased -88 4 -Ġde grading -ĠDa isy -i ations -Ġbapt ized -Ġsurn ame -co x -Ġblink ed -ãĥ ¢ -Ġpoll en -Ġder mat -Ġre gex -ĠNich olson -ĠE ater -ç ľ -rad or -Ġnarrow er -Ġhur ricanes -Ġhalluc inations -r idden -ISS ION -ĠFire fly -Ġattain ment -Ġnom inate -Ġav ocado -ĠM eredith -Ġt s -Ġreve rence -Ġe uph -Ġcr ates -ĠT EXT -Ġ4 43 -Ġ3 19 -J SON -iqu ette -Ġshort stop -ic key -Ġpro pelled -Ġap i -ĠTh ieves -77 9 -Ġovers aw -Ġcol i -ĠNic ola -Ġover cl -ik awa -ĠC yr -Ġ38 4 -78 9 -ĠAll ows -10 27 -Det roit -TR Y -set up -ĠSocial ism -Sov iet -s usp -ĠAP R -ĠShut down -Ġal uminium -zb ek -ĠL over -GGGG GGGG -Ġdemocr acies -Ġ19 08 -ĠMer rill -ĠFranco is -gd ala -Ġtraff ickers -ĠT il -ĠGo at -Ġsp ed -ĠRes erv -Ġpro d -55 2 -Ġc ac -ĠUn iv -ĠSch we -Ġsw irling -ĠWild erness -ĠEgg s -Ġsadd ened -Ġarch aic -H yd -Ġexcess ively -B RE -Ġaer ospace -ĠVo ices -Cra ig -Ġign ited -In itially -ĠMc A -Ġhand set -Ġreform ing -Ġfrust rations -ĠDead pool -ĠBel ichick -ract or -ĠRagnar ok -ĠD rupal -ĠApp roximately -19 20 -ĠHub ble -arm or -ĠSar as -ĠJon as -Ġnostalg ic -Ġfeas ibility -Sah aran -Ġorb iting -Ġ9 70 -R u -Ġsh in -ĠInvestig ators -Ġinconsist encies -ĠP AN -B G -Ġgraz ing -Ġdetect ors -ĠStart up -ĠFun ny -ĠNa omi -Consider ing -Ġh og -ut f -ce mic -Ġfort ified -ĠFun ctions -Ġcod ec -nut rition -H at -" ! -micro soft -55 8 -ĠTh in -ĠA CE -Al ias -ĠO PS -p apers -P K -ãĢ İ -Ġimpro bable -N orthern -equ al -Ġlook out -Ġty res -ĠMod ified -ĠK op -Abs olutely -Ġbuild up -sil ver -Ġaud i -Ġgro tesque -ĠSab er -ĠPres byter -ON Y -Ġglac iers -ĠSho als -ĠK ass -ĠH RC -ĠNic ol -ĠL unch -ĠF oss -âĸ Ĵ -AD RA -ĠOne Plus -o ing -ground s -Ġincident al -Ġdatas ets -68 9 -ĠClarks on -Ġassemb ling -ĠCorrect ions -Ġdrink ers -Ġqual ifiers -Ġle ash -Ġunf ounded -ĠH undred -Ġkick off -T i -Ġrecon cil -ĠGr ants -ĠCompl iance -ĠDexter ity -Ġ19 06 -w arn -D allas -Max imum -n ard -av ia -be aut -ens itivity -tr ace -Ġpione ers -ĠF ract -ãĢ ı -Ġpre cept -Ġgloss y -ĠI EEE -Ac ross -Ġ6 80 -S leep -che on -Ġsatir ical -ĠMin otaur -ĠCla ude -Ġr é -ape go -Ġcar rot -ĠSem in -ino a -Ġz o -Ind ependent -Ġdiagn oses -ĠC ue -M AR -Ġrend ition -ĠK ik -Ġpath ology -Ġselect s -Link edIn -Ġass ay -ĠD res -Ġtext ual -post ed -IT AL -ĠM aul -N eal -Ġinter connected -Ġerr atic -ĠVir us -Ġ5 30 -Ġenvironmental ists -ĠP helps -Ġeng agements -ĠIN ST -Ġeconom ical -nox ious -Ġg earing -izz y -Ġfavor ably -ĠMcG ill -T erm -Ġh anged -Ġball park -ĠRe yes -Ġbe ware -ĠP sal -ĠMass acre -q i -Ġin accessible -acly sm -Ġfr ay -ill ac -Ġbitter ly -ĠCert ification -Mich igan -Ġir respective -al ore -Em pty -Ġendorse ments -Ġund et -f g -equ ipped -Ġmerc iless -ĠC ust -Ġimm ature -Ġvou cher -ĠBlack well -Ñ ı -h awk -dis ciplinary -ile e -ĠMak oto -ĠD ude -ãĥĩ ãĤ£ -Y ears -Ġin ver -Ġsh aman -ĠY ong -ip el -ell en -ĠCath y -br ids -Ġs arc -65 1 -N ear -Ġground work -Ġam az -Ġ4 15 -ĠHunting ton -hew s -ĠB ung -Ġarbit rarily -ĠW it -ĠAl berto -Ġdis qualified -best os -46 1 -Ġp c -Ġ28 4 -ro bat -Rob in -Ġh ugs -ĠTrans ition -ĠOcc asionally -Ġ3 26 -ĠWh ilst -ĠLe y -Ġspaces hip -cs v -Ġun successfully -ĠA u -le ck -ĠWing ed -ĠGrizz lies -. � -Ġne arer -ĠSorce ress -ĠInd igo -El se -8 40 -let es -Co ach -Ġup bringing -ĠK es -Ġseparat ist -Ġrac ists -Ġch ained -Ġabst inence -lear ning -Ġrein stated -Ġsymm etry -Ġremind ers -ĠChe vy -Ġm ont -Ġexempl ary -ĠT OR -Z X -Ġqual itative -ĠSt amp -ĠSav annah -ĠRoss i -Ġp aed -Ġdispens aries -ĠWall s -ĠCh ronic -Ġcompliment ary -ĠBeir ut -Ġ+ --- -igs list -Ġcrypt ographic -mas ters -ĠCap itals -Ġmax imal -Ġent ropy -Point s -Ġcombat ants -l ip -ĠGl ob -ĠB MC -ph ase -th ank -HT TP -Ġcomm uter -Ġ\( \ -.. / -ĠReg ener -ĠDO I -ĠActiv ision -Ġsl it -os al -RE M -Ġch ants -Y u -Ke ys -Bre xit -ĠFor ced -Ari zona -Ġsquad ron -IS O -ĠMal one -Ġ3 38 -Ġcontrast ing -Ġt idal -Ġlib el -Ġimpl anted -Ġupro ar -ĠC ater -Ġpropos itions -M anchester -ĠEuro s -it amin -G il -ĠEl ven -ĠSe ek -ĠB ai -Ġredevelop ment -ĠTown s -ĠL ub -! ", -al on -K rist -Ġmeas urable -Ġimagin able -Ġapost les -Y N -7 60 -Ġster oid -Ġspecific ity -ĠL ocated -ĠBeck er -ĠE du -ĠDiet ary -uts ch -ĠMar ilyn -Ġbl ister -ĠM EP -ĠK oz -ĠC MS -y ahoo -ĠCar ney -Ġbo asting -ĠC aleb -By te -read s -ad en -Pro blem -ĠWood ward -S we -S up -ĠK GB -Set up -Ġtac it -Ġret ribution -Ġd ues -ĠM ü -. ? -ä¸ Ń -p ots -Ġcame o -ĠP AL -educ ation -A my -like ly -g ling -Ġconstitution ally -ĠHam m -ĠSpe ak -Ġwid gets -br ate -Ġcra ppy -ĠI ter -Ġanticip ating -ĠB out -P ixel -ĠY ep -ĠLaur ie -Ġh ut -Ġbullet in -ĠSal vation -Ġch ats -ear able -Honest ly -AL TH -onse qu -c ult -isco very -ovy ch -Ġse lves -ĠSat oshi -S ounds -Ġconver gence -ĠRosen berg -19 74 -Ġnas al -Ġfull est -Ġfer ocious -x us -ist e -AM S -Ġlobb ied -Ġso othing -ĠGun n -t oday -0 24 -Ġinspir ational -ĠN BN -p b -g ewater -or ah -all owed -ĠCol iseum -Ġspecial izing -Ġinsane ly -ĠT ape -del ay -Ġt arn -ĠP ound -Ġmel anch -Ġdeploy ments -il and -Ġless en -Ġfur ry -ĠUE FA -Ġblood shed -ĠMe ier -ither ing -Ġhe irs -ĠJ aw -ax ter -ĠPublic ations -Ġal ters -int ention -ĠWinc hester -d etermination -ĠLif etime -th in -Mon ster -7 80 -Ġapprox imation -Ġsuper markets -ĠSecond s -or os -h uge -Ġb ribe -ĠLIM ITED -un ed -Ġmis interpret -ĠIn jury -Ġ3 67 -Ġthreshold s -ĠCarn ival -Ġgastro intestinal -Ġguid eline -Ġde ceived -f eatures -Ġpurported ly -ĠRon nie -ĠNew t -Ġsp acious -as us -Ġsuperhero es -ĠCyn thia -le gged -k amp -ch io -Ġth umbnail -ĠShir ley -ill ation -Ġshe ds -ĠZ y -E PA -Ġdam s -Ġy awn -n ah -ĠPe ggy -ĠE rie -ĠJu ventus -ĠF ountain -r x -don ald -al bum -ĠComp rehensive -Ġc aching -ĠU z -ulner ability -ĠPrinc iple -ĠJ ian -ing ers -cast s -ĠOs iris -ch art -t ile -ĠTiff any -ĠPatt on -ĠWh ip -Ġovers ized -J e -ĠCind erella -ĠB orders -ĠDa esh -M ah -Ġdog ma -Ġcommun ists -v u -Coun cil -Ġfresh water -Ġw ounding -Ġdeb acle -Ġyoung ster -Ġthread ed -ĠB ots -ĠSav ings -ãģ Ĥ -ol ing -oh o -Ġillum ination -M RI -Ġlo osen -tr ump -ag ency -ur ion -Ġmoment arily -ĠCh un -ĠBud apest -ĠAl ley -D isk -Ġaston ished -ĠCon quer -ĠAccount ing -h aving -ĠWe in -ĠAl right -Ġrev olver -Ġdel usion -Ġrelic s -Ġad herent -qu ant -Ġhand made -or io -Ġcomb ating -c oded -Ġquad ru -re th -N ik -ĠTrib al -ĠMyster ious -Ġin hal -ĠWin ning -ĠClass ification -ch anged -Ġun ab -Ġsc orn -icip ated -w l -ond uctor -Ġrein forcing -ĠChild hood -an ova -Ġadventure r -Ġdoctor al -ĠStrateg ies -Ġengulf ed -ĠEnc ounter -Ġl ashes -Crit ical -ric ular -ĠU TF -oci ation -check ing -ĠConsult ing -Run time -per iod -ĠAs gard -Ġdist illed -ĠPas adena -ĠD ying -ĠCOUN TY -Ġgran ite -Ġsm ack -Ġparach ute -ĠS UR -Virgin ia -ĠF urious -78 7 -ĠO kin -Ġcam el -ĠM bps -19 72 -ĠCh ao -ĠC yan -j oice -ef er -ĠW rap -ĠDeb ate -S eg -Ġfore arm -ĠIgn ore -Ġtim estamp -Ġprob ing -ĠNo on -ĠGra il -f en -Ġdorm ant -ĠFirst ly -ĠE ighth -ĠH UN -ĠDes ire -or as -Girl s -ĠDes mond -z ar -am ines -O AD -exec ute -Ġbo obs -ĠAT L -_ ( -Chel sea -Ġmasturb ation -ĠCo C -Ġdestroy er -ĠCh omsky -Ġsc atter -ĠAss ets -79 6 -ĠC argo -Ġrecept ive -ĠSc ope -Ġmarket ers -Ġlaun chers -Ġax le -ĠSE A -se q -ĠM off -f inding -ĠGib bs -Georg ia -extreme ly -N J -Ġlab orers -st als -Ġmed iation -ĠH edge -at own -Ġi od -des pite -v ill -J ane -ex istence -Ġcoinc ided -ĠUt ilities -ĠChe ap -Ġlog istical -Ġcul mination -ĠNic otine -p ak -F older -Ġrod ents -st uff -Ġlaw fully -Ġreper to -io ch -j j -Dial ogue -HH HH -lic tion -Look s -Ġ29 7 -Ġtur rets -ĠAb andon -Ġinc ess -ĠTraff ord -Ġcur led -Ġprefer ring -Ġprivat ization -Ġir resist -ĠP anda -ĠSh ake -ĠMc Gr -ãĥ Ħ -und ers -Ġdiscrim inated -Ġbart ender -I LE -Atl antic -Ġprop ensity -ĠW iz -ĠG im -con ference -Ġrein forces -G h -w agon -Ġe erie -F al -Ġhug ged -rac ist -R IC -F u -Ġf iller -ĠSt ub -Ġeng raved -ĠWrest le -Ġimagin ative -ĠPe er -ĠFact ors -an us -ĠDrac ula -mon itor -Ġrou ters -ib ia -ĠBoo lean -end ale -ĠSl aughter -ĠSh ack -R FC -ĠSpiel berg -S ax -ĠPH OTO -ĠCl over -ĠR ae -Dep ending -ĠMem or -ar am -Ġpier ced -Ġcur tains -v ale -ĠInqu isition -ĠP oke -Ġforecast ing -Ġcompl ains -S ense -ĠHer mes -isc overed -Ġb ible -ĠMor ph -Ġg erm -78 5 -D ON -Ġcon gen -Ġcr ane -ĠD PR -Ġrespect fully -R oom -ĠN aw -ĠDal ai -re ason -ĠAng us -Educ ation -ĠTitan ic -Ë ľ -Ġo val -un ited -Ġthird s -Ġmoist ur -ĠC PC -M iami -Ġtent acles -ĠPol aris -ex c -ex clusive -ĠPra irie -Ġcol ossal -ĠBl end -sur prisingly -ÃŃ s -Ġindo ctr -Ġbas al -ĠMP EG -und o -Spl it -Develop ment -Ġlan tern -19 71 -Ġprov ocation -Ġang uish -ĠB ind -ĠLe ia -duc ers -ipp y -conserv ancy -Ġinitial ize -ĠTw ice -ĠSu k -Ġpred ic -Ġdi ploma -Ġsoc iop -Ing redients -Ġhamm ered -ĠIr ma -Q aida -Ġglim ps -ĠB ian -Ġst acking -Ġf end -gov track -Ġun n -dem ocratic -ig ree -Ġ5 80 -Ġ29 4 -Ġstraw berry -ID ER -Ġcher ished -ĠH ots -Ġinfer red -Ġ8 08 -ĠS ocrates -O regon -ĠR oses -ĠFO IA -Ġins ensitive -Ġ40 8 -Recomm end -ĠSh ine -Ġpain staking -UG E -ĠHell er -ĠEnter prises -I OR -ad j -N RS -L G -Ġalien ated -Ġacknowled gement -ĠA UD -ĠRen eg -Ġvou chers -Ġ9 60 -Ġm oot -ĠDim ensions -Ġc abbage -B right -g at -ĠK lu -Ġlat ent -Ġz e -ĠM eng -Ġdis perse -Ġpand emonium -H Q -Ġvirt uous -ĠLoc ations -ee per -prov ided -Ġse ams -ĠW T -iz o -PR OV -Ġtit anium -Ġrecol lection -Ġcr an -Ġ7 80 -ĠN F -49 1 -64 2 -p acking -59 8 -text ure -Sp ider -fre edom -cipl ed -ĠTAM ADRA -âĻ ¦ -aut hent -ĠW ANT -r ified -Ġr ites -Ġuter us -k iss -Ġâī ¤ -Ġsk illet -Ġdis enfranch -ĠGa al -Comp an -Ġage ing -gu ide -B alt -Ġiter ator -Ġdiscretion ary -t ips -Ġprim ates -ĠTechn ique -ĠPay ments -az el -ĠR OCK -stant ial -0 60 -Ġd mg -ĠJack ets -ĠPlay off -Ġnurs ery -ĠSy mb -art on -Ġannex ation -Color ado -Ġco ils -ĠSh oes -âĦ¢ : -ĠRo z -COM PLE -ĠEve rest -ĠTri umph -J oy -G rid -à ¼ -process or -ĠPros per -ĠSever us -ĠSelect ed -r g -ĠTay yip -St ra -Ġski ing -Ġ? ) -Ġpe g -Tes la -Ġtime frame -Ġmaster mind -ĠN B -scient ific -ĠSh it -gener ic -IN TER -N UM -Ġst roll -ĠEn ix -ĠM MR -ĠE MS -m ovie -Ĥ ª -Ġminim izing -idd ling -Ġilleg itimate -Ġprot otyp -Ġpremature ly -Ġmanual s -obb ies -ĠCass idy -D EC -des ktop -Ġaer os -Ġscreen ings -Ġdeb ilitating -ĠGr ind -nature conservancy -Ġf ades -ter mination -assets adobe -F actor -Ġdefinitive ly -P oké -ap ult -ĠLaf ayette -C orn -ĠCor al -Ġstagn ant -T ue -Ġdissatisf action -G ender -Ġkid neys -ĠG ow -ĠDef eat -ĠAsh ton -Ġcart els -Ġfore closure -ĠExpl ore -stre ngth -ot in -Ġveterin arian -Ġf umble -Ġpar ap -ĠSt rait -r ils -Ġpr ick -ĠBerm uda -ĠAm munition -skin ned -Ġab ound -ĠB raz -Ġshar per -ĠAsc ension -Ġ9 78 -Ġpreview s -Ġcommun ion -ĠX Y -Ġph ony -Ġnewcom er -Ġ3 32 -." ," -Ġredist ribution -Prot ect -ĠSo f -K al -Ġlip stick -w orst -Ġtang led -Ġretrospect ive -int eger -Ġvolunte ering -Ġ19 07 -Ġ -------------------- -ic hen -Ġunve iling -Ġsen seless -Ġfisher ies -\ - -Ġh inges -Ġcalcul us -My th -Ġund efeated -Ġoptim izations -Ġdep ress -Ġbill board -ĠY ad -ĠPy ramid -Is n -I de -Ġleg ion -ĠK ramer -ent anyl -Ġpenet rating -ĠHaw th -ĠPR ODUCT -ĠGer ard -ĠP act -ĠIn cluding -ĠEl ias -ĠEl aine -vis ual -Ġhum ming -Ġcond esc -ĠF asc -ä¸ Ĭ -Ġe galitarian -Ġdev s -ĠD ahl -O ps -D H -ĠB ounce -id ated -ald o -Ġrepublic an -Ġh amb -ĠS ett -ograph ies -CH APTER -Ġtrans sexual -Ġsky rocket -ans wer -Ġmark up -Ø ª -Ġhero ine -Comp are -ĠT av -Be ast -Ġsuccess ors -Ġna ïve -ĠBuck ley -st ress -me at -Ġdownload able -Ġindex ed -Ġsc aff -ĠL ump -ĠHom o -Stud io -In sp -Ġr acked -far ious -ĠPet ty -Ex ternal -Ġ19 09 -W ars -com mit -put ers -Ġun ob -ĠEr r -ĠE G -ĠAl am -ĠSiber ia -ĠAtmosp heric -IS TER -ĠSatan ic -trans lation -ĠL oud -tra umatic -l ique -Ġreson ate -ĠWel ch -Ġspark ing -ĠT OM -t one -Ġout l -Ġhandc uffed -ĠSer ie -8 01 -Ġland marks -ĠRee ves -Ġsoft ened -Ġdazz ling -ĠW anted -month s -Mag ikarp -Ġunt reated -ĠBed ford -M i -ĠDynam o -O re -79 5 -Ġwrong ful -Ġl ured -Ġcort isol -Ġve x -d rawn -ile t -Download ha -ĠF action -Ġlab yrinth -Ġhij acked -w aters -er ick -Ġsuper iors -ĠRow ling -ĠGu inness -Ġt d -99 2 -Ġune arthed -Ġcentr if -Ġsham eless -P od -ĠF ib -Ġ icing -Ġpredict or -Ġ29 2 -fore station -con struct -C and -@ # -Ġag itated -Ġre pr -OV A -Ġkn itting -ĠLim a -Ġf odder -68 4 -ĠPerson a -k l -7 01 -Ġbreak up -á ¸ -Ġapp alled -Ġantidepress ants -ĠSus sex -Har ris -ĠTher mal -ee ee -U pload -Ġg ulf -Ġdoor step -ĠSh ank -L U -ĠM EN -ĠP ond -s orry -Ġmis fortune -n ance -Ġb ona -M ut -Ġde graded -ĠL OG -ĠN ess -an imal -Ġa version -und own -Ġsupplement ed -ĠC ups -Ġ50 4 -Ġdep rive -ĠSpark le -Å Ĥ -ĠMed itation -auth ors -ĠSab an -ĠN aked -air d -ĠMand arin -ĠScript ures -ĠPerson nel -ĠMahar ashtra -Ġ19 03 -ĠP ai -ĠMir age -omb at -Access ory -Ġfrag mented -T ogether -Ġbelie vable -ĠGl adiator -al igned -ĠSl ug -M AT -Ġconvert ible -ĠBour bon -amer on -ĠRe hab -nt ax -Ġpowd ered -pill ar -Ġsm oker -ĠMans on -ĠB F -5 11 -ĠGood ell -ĠD AR -m ud -g art -Ġob edient -ĠTrans mission -ĠDon ation -8 80 -Ġbother ing -Material s -ãĤ ± -dest roy -Ġfore going -Ġanarch ism -ĠK ry -ice ps -Ġl ittered -ĠSch iff -Ġanecd otal -un its -Ġf ian -ĠSt im -ĠS OME -ĠInv aders -Ġbehaviour al -ĠVent ures -Ġsub lime -Ġfru ition -ĠPen alty -Ġcorros ion -¶ ħ -Ġlik ened -Ġbesie ged -ween ey -ĠCre ep -Ġlinem en -mult i -ic ably -ud der -Ġvital ity -Ġshort fall -ĠP ants -ap ist -H idden -ĠDro ps -med ical -Ġpron unciation -ĠN RL -Ġinsight ful -J V -ĠBe ard -ĠCh ou -Ġchar ms -Ġb ins -Ġamb assadors -ĠS aturdays -Ġinhib itor -ĠFr anch -6 01 -', ' -ĠCon or -art ney -ĠX peria -g rave -be es -ĠProtest ants -Ġso aking -ĠM andal -Ġph ased -Ġ6 60 -Ġsc ams -Ġbuzz ing -ĠItal ians -ĠLoren zo -ĠJ A -Ġhes itated -Ġcl iffs -ĠG OT -ingu ishable -Ġk o -Ġinter ruption -Z ip -Lear ning -Ġundersc ores -ĠBl ink -K u -57 9 -ĠAut ob -I RE -Ġwater ing -Ġpast ry -8 20 -Ġvision ary -ĠTempl ar -awa ited -Ġpist on -Ġant id -current ly -Ġp ard -Ġw aging -Ġnob ility -ĠY us -Ġinject ing -f aith -ĠP ASS -å º -Ġret ake -ĠPR OC -Ġcat hedral -b ash -Ġwrest lers -Ġpartner ing -Ġn oses -Ġ3 58 -Trans form -am en -Ġb outs -ĠId eal -ĠConstant in -Ġse p -ĠMon arch -att en -ĠPe oples -mod ified -Ġmor atorium -Ġpen chant -Ġoffensive ly -Ġprox ies -ok ane -ĠTaiwan ese -ĠP oo -ĠH OME -us ional -Ġver bs -ĠO man -vis ory -Ġpersu asion -Ġmult it -Ġsc issors -G ay -ow ay -oph ysical -l us -gn u -Ġap ocalyptic -Ġabsurd ity -Ġplay book -Ġautobi ography -I UM -Ġsne aking -ĠSim ulation -pp s -ell ery -Plan et -Ġright fully -Ġn iece -ĠN EC -ĠIP O -ĠDis closure -lean or -ous y -ST ER -Ġ28 2 -Cru z -Ch all -64 3 -ĠSurv ive -ĠF atal -ĠAm id -ap o -We apons -D EN -7 70 -ĠGreen wald -Ġlin en -al os -Ġpollut ants -ĠPCI e -k at -Ġp aw -ĠK raft -C hem -ĠTermin ator -Ġre incarn -Ġ] [ -ĠSe eds -Ġsilhou ette -ĠSt ores -Ġgro oming -ĠD irection -ĠIs abel -ĠBr idges -ðŁ ij -E ED -ĠM orsi -Ġval ves -ĠRank ed -ĠPh arma -ĠOrgan izations -Ġpenet rated -ĠRod ham -ĠProt oss -Ġove rest -Ġex asper -ĠT J -Ġ 000000 -Ġtrick le -Ġbour bon -WH O -Ġw retched -Ġmicrosc opic -Ġcheck list -Ġad orned -R oyal -Ad minist -ĠRet irement -ĠHig hest -We ather -ile ge -Ġincre ments -ĠC osponsors -Ġmas se -ĠS inn -r f -Ġh ordes -as sembly -75 4 -ĠNat asha -ĠTY PE -ĠGEN ERAL -Ġarr anging -Ġ40 7 -l ator -Ġg lean -Ġdisc redited -Ġclin icians -UN E -Ġachie ves -ĠEm erson -com plex -= [ -Ġprincip ally -Ġfra il -p icked -Ġthan king -Ġre cl -ĠL AST -Ġsupp ressing -il ic -Ġantidepress ant -ĠLis bon -Ġth or -Ġsp a -Ġking doms -ĠPear ce -em o -Ġpl ung -Ġdiv est -Ġ ******************************** -b is -osp els -ad r -Sp irit -hall a -P ink -end ez -Ġresurrect ed -esc ape -ĠRosen stein -Ġge ological -Ġnecess ities -Ġcarn iv -ĠE lys -ĠBar ney -Ġ29 6 -dig y -ST ON -D OWN -Ġmil estones -Ġk er -Ġdismant ling -Ġre prim -Ġcross ings -19 45 -Ġpatri archy -Ġblasp hemy -Ġ3 59 -met ry -ĠOb esity -ĠDiff erences -bl ocking -ãĥķ ãĤ¡ -ich ita -ĠSab ha -ph alt -ĠCol o -ual a -effic ients -ĠMed ina -con sole -55 7 -ĠHann ibal -ĠHab it -ĠF ever -Ġthen ce -Ġsyn agogue -Ġessential s -Ġw ink -ĠTr ader -ID A -ĠSp oiler -ĠIceland ic -ĠHay ward -Ġpe ac -Ġmal ice -Ġflash back -Ġth w -Ġlay offs -L iquid -Ġtro oper -Ġh inge -ĠRead ers -Ph ill -ĠB auer -Cre ated -Ġaud its -ac compan -Ġunsus pecting -ier a -6666 6666 -Ġbro ch -Ġapprehend ed -ĠM alk -cer ning -ĠCod ex -O VER -M arsh -ĠD eng -ĠExp ression -Ġdisrespect ful -Ġasc ending -t ests -ĠPlaint iff -ster y -ĠAl ibaba -din and -ĠDem psey -Applic ations -mor al -Ġthrough put -Ġquar rel -Ġm ills -Ġhe mor -ĠC ASE -terror ist -st im -ifest yle -ro zen -CE PT -Ar k -u ci -lect ic -Ġirrit ating -she ets -A y -Ġrede emed -Ġhorn y -ĠTe ach -ĠS ear -dem ocracy -4 65 -ĠRest ore -Ġstand by -ĠP is -iff in -Ġsleep y -Ġextr ater -Ġcompl iments -Fram eworks -Ġinstall s -Ġb anging -sur face -found land -Ġmetaph ysical -Ġ28 3 -oul s -dev ices -Ar gs -ĠSac rifice -ĠMcC orm -es on -Cons ervative -ĠM ikhail -see ing -is ively -ĠRo oms -ĠGener ic -Ġenthusi astically -Ġgri pped -Ġcomed ic -ĠElectric ity -Ġgu errilla -Ġdec oration -ĠPerspect ive -Ġconsult ations -Ġun amb -Ġplag iar -Ġmagic ian -Ġe rection -ĠTour ism -or ied -ro xy -11 00 -T am -Ī è -Î ³ -× ª -ĠPred ators -Nit rome -Ġtelesc opes -project s -Ġun protected -Ġst ocked -ĠEnt reprene -nex pected -Ġwast ewater -V ill -Ġint imately -Ġi Cloud -ĠConst able -Ġspo of -Ġne farious -Ġfin s -Ġcens or -ĠMod es -ĠEs per -ar bon -Ġinter sections -Ġlaud ed -Ġphys i -Ġgener ously -ĠThe Nitrome -ĠTheNitrome Fan -Ġar isen -ĠÙ Ī -Ġg lands -ĠPav ilion -ĠGu pta -Ġuniform ly -Ġr amps -ri et -ĠWH EN -ĠVan essa -Ġrout ed -Ġlim p -ĠC PI -p ter -int uitive -Ġv aping -Ġexperiment ed -ĠOlymp us -ĠAm on -Ġsight ing -Ġinfiltr ate -ĠGentle man -Ġsign ings -ĠMe ow -ĠNav igation -che cks -4 33 -Ġel apsed -ĠBulg arian -esp ie -ĠS OM -d uring -Ġsp ills -anc a -ĠPly mouth -M AL -Ġdomest ically -ĠWater gate -ĠF AM -k illed -ed ited -ĠYour self -Ġsynchron ization -ĠPract ices -ST EP -Ġgen omes -ĠQ R -not ice -Ġloc ating -z in -Ġ3 29 -al cohol -Ġk itten -V o -Ġr inse -Ġgrapp le -ĠSc rew -ĠD ul -A IR -Ġle asing -ĠCaf é -Ġro ses -ĠRes pect -Ġmis lead -Ġperfect ed -Ġnud ity -Ġnon partisan -ĠCons umption -Report ing -Ġnu ances -Ġdeduct ible -ĠSh ots -Ġ3 77 -Ġæ ľ -ano oga -Ben ef -ĠB am -ĠS amp -if ix -Ġgal van -ĠMed als -rad ius -Ġno bles -Ġe aves -igr ate -K T -ĠHar bour -u ers -Ġrisk ed -re q -Ġneuro t -get table -ain a -Rom ney -Ġunder pin -Ġlo ft -ĠSub committee -ĠMong ol -b iz -Ġmanif ests -ass isted -ĠG aga -Ġsy nergy -Ġreligious ly -ĠPre f -ĠG erry -T AG -ĠCho i -4 66 -beh ind -ĠO u -Gold Magikarp -Ġhemor rh -R iver -Ġtend on -Ġinj ure -ĠF iona -Ġp ag -Ġag itation -|| || -ur an -ĠE SA -Ġest eem -Ġdod ging -Ġ4 12 -r ss -Ġce ases -ex cluding -Ġint akes -Ġinsert s -Ġemb old -ĠO ral -up uncture -4 11 -ĠUn ified -ĠDe le -Ġfurn ace -ĠCoy otes -ĠBr ach -L abor -Ġhand shake -Ġbru ises -Gr ade -éĹ ĺ -ĠGram my -ile en -St ates -ĠScandinav ian -ĠKard ash -8 66 -Ġeffort lessly -ĠDI RECT -ĠTH EN -ĠMe i -ert ation -19 68 -Ġgro in -w itch -Requ irements -98 5 -Ġroof s -Ġest ates -ĠH F -Ġha ha -Ġdense ly -ĠO CT -Ġpl astics -Ġincident ally -ĠTr acks -ĠTax es -Ġch anted -Ġforce ful -ĠBie ber -ĠK ahn -K ent -ĠC ot -lic ts -F ed -Ġhide ous -ĠVer d -ĠSynd icate -ĠIl legal -J et -ĠD AV -re asonable -c rew -Ġfundamental ist -Ġtruth ful -ĠJ ing -Ġl il -Ġdown ed -Ġen chanted -ĠPolic ies -ĠMcM aster -ĠH are -ides how -Ġpar ams -en cers -gorith m -Ġallow ances -Ġturb ulent -Ġcomplex ities -ĠK T -Ġ3 37 -ĠGen etic -F UN -D oug -t ick -Ġg igs -ument hal -Ġpatriarch al -Ġcal c -, ... -Ġc out -ĠGu an -Ġpath ological -ĠR ivals -Ġunder rated -Ġflu orescent -ĠJ iu -arna ev -ĠQu an -Ġ4 29 -Ġ ਠ-M ario -Con struct -ĠC itation -ĠR acial -ĠR SA -ĠF idel -Ġ3 95 -Person ally -C ause -à » -rad ical -in en -Ġvehement ly -ĠPap a -Ġintern ship -Ġfl akes -ĠRe ck -Luck ily -B ra -20 20 -rav ings -R N -W onder -Ser iously -Ġre usable -Ġpoll uted -ĠP eng -le igh -ind le -Ġcircuit ry -ĠMad onna -ĠB ART -Res idents -att ribute -Phil adelphia -Cl ub -Ġplan ner -Ġfr antically -Ġfaith fully -ĠTerrit ories -ĠL AT -ĠAnders en -an u -ĠP ARK -ĠS ora -i age -ĠPlay offs -ĠG CC -4 27 -Ġab norm -ĠL ever -Ġdisob edience -As ync -ĠShe a -V ert -Ġsk irts -ĠSaw yer -x p -Ġwors ening -Ġsc apego -ĠAng le -oth al -Ġtro ve -ĠSt y -ĠN guyen -mar ine -ide on -Dep ths -Bl og -ĠIll uminati -Ġtract s -Ġorgan ise -Ġo str -F s -Ġlever aging -ĠD aredevil -as ar -Ġl ang -Ġex termin -urs ions -ĠRom o -ãĤ¤ ãĥĪ -Ġcont ended -Ġencounter ing -ĠTable t -ĠAltern ate -sk ill -Ġswe ets -Ġco hesive -cap acity -Ġrep ud -Ġl izard -ro o -Ġpilgr ims -ĠR uff -ĠInstr ument -ĠLog o -uit ous -E H -Ġsales man -Ġank les -L ed -ĠPat ty -ud os -Own er -Ġdiscrep ancies -k j -M U -Ġuncond itional -Dragon Magazine -i ard -O ak -ĠConvers ation -be er -ĠOs aka -D elta -us ky -Ġsecret ion -Ġpl aza -Ġm ing -Ġde pletion -ĠM ous -ĠI TS -ĠH imal -ĠFle ming -Ġcyt ok -ĠH ick -Ġbat ters -ĠInt ellectual -6 75 -é r -IS ION -ĠQu entin -ĠCh apters -ih adi -Ġco aster -WAY S -ĠL izard -ĠY or -and ering -S kin -ha ust -ab by -Ġportray ing -Ġwield ed -d ash -Ġprop onent -Ġr ipple -Ġgrap hene -Ġfly er -Ġrec urrent -Ġdev ils -Ġwater fall -æĺ ¯ -go o -Text Color -Ġtam pering -IV ES -TR UMP -ĠAb el -ĠS AL -ĠHend ricks -ĠLu cius -b ots -Ġ40 96 -IST ORY -Gu est -ĠN X -in ant -Ben z -ĠLoad ed -ĠCle ver -t reatment -Ġta vern -Ġ3 39 -ĠT NT -ific antly -Tem perature -F el -Ġunder world -ĠJud ges -Ġ< + -Ġst ump -Ġoccup ancy -Ġab er -ĠF inder -) ", -ĠN unes -res et -in et -ect omy -Ġwell ness -ĠP eb -quart ered -and an -Ġneg atives -ĠTh iel -ĠCl ip -ĠL TD -Ġbl ight -Ġreperto ire -K yle -Ġqu er -ĠC es -Ġha pl -98 9 -ĠTh ames -isc opal -Des k -ivari ate -ĠEx cellence -found ation -Ġâ ĩ -X i -Ġmyster iously -esty les -Ġper ish -ĠEng els -ĠDE AD -09 0 -}} } -ĠUn real -Ġrest less -ID ES -orth odox -ĠInter mediate -Ġdin ners -ĠTr out -ĠSe ym -ĠHall s -og ged -Ġtraged ies -Ġdid nt -67 6 -Ġail ments -Ġobserv able -ĠV ide -ad apt -ĠD usk -Ġprofessional ism -ĠPres cott -ĠInd ies -p ox -ĠMe hran -W ide -Ġend emic -ĠPar an -B ird -Ġped als -ĠI U -ĠAdam ant -ĠH urt -Ġcorrel ates -urd en -Ġspons oring -cl imate -ĠUnivers ities -ĠK not -enn es -ĠDam ian -ĠAx el -S port -Ġbar b -ĠS no -sh own -ste en -ud ence -Ġnon violent -Ġhom ophobia -Ġbiom ass -ĠDet ail -Ġsrf N -ĠT une -accompan ied -I ENCE -Al bert -ĠMong o -z x -ĠCer berus -or bit -c ens -Ġsl ay -SH ARE -H Y -Ġb rawl -ĠPro be -Ġnonex istent -ĠClare nce -ĠBlack burn -Ġport als -ĠR ita -ĠRem ain -ĠLe vant -Ġtrick ed -ĠF erry -aver ing -ĠStraw berry -ĠAn swers -Ġhorrend ous -ĠA man -Supp lement -ĠT oad -Ġpe eled -Ġman oeuv -ĠU zbek -mond s -ĠH ector -Ġ40 2 -pe es -fix es -Ġd j -Ġres umes -Ġaccount ant -Ġadvers ity -Ġham pered -ĠL arson -Ġd oping -part s -H ur -Ġbe arded -Ġy r -ĠPlug in -å¥ ³ -Ġ/ ** -rol ley -Ġwaters hed -ĠSub mission -if lower -AS C -Ġcho ir -Ġsculpt ures -m A -incre asing -ai i -Ġsne akers -Ġconfront s -ĠEle phant -ĠEl ixir -Ġrec al -ĠT TL -w idget -ĠW ax -ĠGr ayson -Ġha irst -Ġhumili ated -ĠWAR N -app iness -ĠT TC -F uel -Ġpol io -Ġcomplex es -Ġbab e -ĠX IV -P F -). [ -P arts -Ġ4 35 -M eg -ĠY ards -ĠAL P -Ġy ells -Ġprin ces -Ġbull ies -ĠCapital ism -ex empt -FA Q -ĠSp onge -ĠAl a -Ġpleas antly -Ġbu f -Ġden ote -Ġunp ublished -Ġkne eling -asc a -Ġl apse -al ien -99 4 -Ġrefere es -ĠLaw yers -S anta -Ġpuzz ling -ĠProm etheus -ĠPh araoh -ĠDel ay -Ġfacilit ates -ĠC ES -Ġjew els -Ġbook let -ond ing -Ġpolar ization -ĠMor an -ĠSal ad -ĠS OS -ĠAdv ice -PH OTOS -IC AN -iat ures -ex press -ĠWonder land -ĠC ODE -ĠCL ASS -9 75 -Ġg rep -ĠD iesel -ĠGl ac -! ?" -Ġr m -o ine -disc rimination -ĠN urse -m allow -Ġv ortex -ĠCons ortium -Ġlarge Download -stra ight -augh lin -G rad -Ġpublic ized -ĠW aves -ĠRed d -Ġfest ivities -ĠM ane -ar ov -Ġfleet ing -ĠDr unk -ug en -C ele -Ġchromos omes -ĠD OT --+-+ -+-+ -Ġbus iest -ĠBe aver -Sy rian -ĠK yr -k as -ĠCross Ref -19 50 -76 01 -Ġrepe aling -ĠWin ners -ĠMac ro -ĠD OD -bl ance -S ort -64 1 -Ġmet re -ĠD irk -Ġgo ggles -Ġdraw backs -Ġcomplain ant -Ġauthor izing -Ġantit rust -oper ated -Ġm ah -Ġexagger ation -Am azing -ĠSer aph -Ġha ze -w ow -Ġextingu ished -Ġcan yon -ĠB osh -Ġv ents -Ġsc rape -Cor rect -4 26 -Ġav g -Dem and -ĠâĪ ¼ -Ġmicrobi ota -"} ]," -ĠSt ev -B io -ĠPlan es -Ġsuggest ive -Ġdec ipher -ĠRefuge e -ĠKe jriwal -ĠGreen peace -Ġdecl ass -ĠSound ers -Ġth o -Ġdec rypt -Ġbr ushing -ĠJane iro -ip op -S i -8 77 -ĠGeoff rey -Ġc pu -ĠHaz el -Ġview points -Ġcris py -ĠNot ification -Ġsold er -ĠMod est -ĠHem isphere -Ġcass ette -in cludes -Ġident ifiers -ĠC ALL -in cent -T odd -ĠSwe ep -Ġ3 34 -b oss -Ġsm ir -gin x -Ġtown ship -Ġg rieving -ĠMos que -Net flix -AS ED -ĠMillenn ials -oc om -19 67 -Ġbold ly -s leep -Ġes che -arij uana -Ġsw irl -ĠPen al -Ġneglig ent -ĠStephen son -K ER -ĠZ oro -ris is -Ġlocal ization -ĠSeym our -ĠAng lic -red itation -prot ection -ĠPa ige -Ġo mit -ĠR ousse -ĠT ub -Ġinv itations -t ty -Ġm oss -ph ysical -C redits -Ġan archy -Ġchild care -Ġl ull -ĠM ek -ĠL anguages -lat est -ĠSan ford -Ġus ability -Ġdiff use -ĠD ATA -Ġsp rites -ĠVeget a -ĠProm otion -ãĥ¼ ãĤ¯ -rict ing -z ee -Tur kish -ĠTD s -pro ven -57 1 -Ġsmug glers -707 10 -Ġreform ed -ĠLo is -Ġun fl -ĠWITH OUT -ĠReturn ing -ann ie -ĠTom as -Fr anc -ĠProf it -ĠSER V -ĠR umble -ik uman -es an -Ġt esters -Ġgad get -Ġbrace let -ĠF SA -comp onent -Ġparamed ics -Ġj an -ĠRem em -ĠSk inner -Ġl ov -ĠQu ake -rom a -Ġfl ask -Pr inc -Ġover power -Ġlod ging -ĠK KK -ret te -Ġabsor bs -w rote -Ġ ," -K ings -ĠH ail -ĠFall ing -xt ap -ĠHel ena -ire ns -L arry -Ġpamph let -ĠC PR -G ro -ĠHirosh ima -Ġhol istic -". [ -Ġdet achment -Ġas pire -Ġcompl icit -ĠGreen wood -Ġresp awn -ĠSt upid -ĠFin ished -f al -b ass -Ġab hor -Ġmock ery -ĠFe ast -VID EO -Ġcon sec -ĠHung ry -P ull -ĠH ust -it ance -? ãĢį -) -- -ĠPar allel -con v -4 69 -ha ar -w ant -P aper -m ins -ĠTor o -ĠTR UMP -ĠR ai -D W -ĠW icked -ĠL ep -Ġfun ky -Ġdetrim ent -ios is -ache v -Ġde grade -im ilation -Ġret ard -Ġfrag mentation -Ġcow boy -ĠY PG -ĠH AL -Parent s -ĠS ieg -ĠStra uss -ĠRub ber -× IJ -Fr ag -Ġp t -Ġoption ally -ĠZ IP -ĠTrans cript -ĠD well -88 2 -M erc -ĠM OT -ãĥ¯ ãĥ³ -Ġhun ts -Ġexec utes -In cludes -Ġacid ic -ĠRespons ibility -ĠD umb -we i -And erson -ĠJas per -ight on -abs olutely -Ad ult -Ġpl under -Mor ning -ĠT ours -ĠD ane -Î º -ĠT EST -ĠG ina -Ġcan ine -aw an -Ġsocial ists -ĠS oda -Ġimp etus -ĠSupplement ary -oli ath -ĠKinn ikuman -mitted ly -second s -Ġorganis ers -Ġdocument aries -Vari able -GRE EN -Ġres orts -Ġbr agging -Ġ3 68 -Art ist -w k -bl ers -Un common -ĠRet rieved -Ġhect ares -Ġtox in -r ank -Ġfaith s -ĠG raphic -Ġve c -ĠL IA -Af rican -Ġard ent -end iary -L ake -ĠD OS -cient ious -ĠOk awaru -ĠAll y -ĠTim eline -D ash -ĠI c -contin ue -Ġt idy -Ġinstinct ively -ĠP ossibly -ĠOut door -ĠWould n -Ġl ich -ĠBr ay -ĠA X -Ġà ī -Ġ+ # -\ ' -Direct ory -ab iding -Ġf eral -ic ative -but t -Ġper verse -S alt -Ġwar ped -Ġnin eteen -Ġcabin ets -Ġsrf Attach -ĠSl oan -Ġpower ing -reg ation -F light -se vere -Ġst ren -Ġc og -ap ache -Ġâ Ŀ -Ġcaf eteria -p aces -ĠGrim oire -uton ium -Ġr aining -Ġcir cling -Ġlineback ers -c redit -Ġrep atri -ĠCam den -lic ense -Ġly ric -Ġdescript or -Ġval leys -Ġre q -Ġback stage -ĠPro hibition -ĠK et -Op ening -S ym -æĸ ¹ -Ġserv ings -Ġoverse en -Ġaster oids -ĠMod s -ĠSpr inger -ĠCont ainer -è » -ĠM ens -Ġmult im -Ġfire fighter -pe c -Ġchlor ine -Ð ¼ -end i -Ġsp aring -Ġpolyg amy -ĠR N -ĠP ell -Ġt igers -Ġflash y -ĠMad ame -S word -Ġpref rontal -Ġpre requisite -uc a -Ġw ifi -Ġmiscon ception -Ġharsh ly -ĠStream ing -ot om -ĠGiul iani -foot ed -Ġtub ing -ind ividual -z ek -n uclear -m ol -Ġright ful -49 3 -Ġspecial ization -Ġpassion ately -ĠVel ocity -ĠAv ailability -T enn -Ġl atch -ĠSome body -Ġhel ium -cl aw -Ġdi pping -XX X -Ġinter personal -7 10 -Ġsub ter -Ġbi ologists -ĠLight ing -Ġopt ic -Ġden im -end on -ĠC orm -Ġ3 41 -ĠC oup -Ġfear less -Ġal ot -ĠCliff ord -ĠRun time -ĠProv ision -up dated -lene ck -Ġneur on -Ġgrad ing -ĠC t -sequ ence -in ia -con cept -Ġro aring -ri val -ĠCaucas ian -Ġmon og -key es -Ġappell ate -Ġlia ison -EStream Frame -ĠPl um -! . -Ġsp herical -Ġper ished -Ġbl ot -Ġben ches -Ġ4 11 -Ġpione ered -Ġhur led -Jenn ifer -ĠYose mite -Ch air -Ġreef s -Ġelect or -ĠAnt hem -65 2 -Ġun install -Ġimp ede -Ġbl inking -Ġgot o -Dec re -A ren -Ġstabil ization -ĠDis abled -ĠYanuk ovych -Ġoutlaw ed -ĠVent ura -ten ess -Ġplant ation -Ġy acht -ĠHu awei -Ġsol vent -Ġgr acious -Ġcur iously -Ġcapac itor -Ġc x -ĠRef lex -Ph ys -ĠC f -pt in -cons ervative -Ġinv ocation -c our -F N -ĠNew ly -H our -As ian -ĠLe ading -ĠAer ospace -An ne -Ġpre natal -Ġdeterior ating -H CR -ĠNorm andy -ol ini -ĠAm bro -9 10 -Ġset backs -ĠT RE -Ġs ig -ĠSc ourge -59 7 -79 8 -Game play -Ġm sec -M X -Ġprice y -ĠL LP -aker u -Ġover arching -ĠB ale -Ġworld ly -Cl ark -Ġscen ic -Ġdisl iked -ĠCont rolled -T ickets -ĠE W -ab ies -ĠPl enty -Non etheless -Ġart isan -Trans fer -ĠF amous -Ġinf ield -ble y -Ġunres olved -ĠML A -ãĤ Ĥ -Cor rection -Ġdemocr at -ĠMore no -ro cal -il ings -Ġsail or -Ġr ife -h ung -Ġtrop es -Ġsn atched -ĠL IN -ĠB ib -ES A -ĠPre v -ĠCam el -run time -Ġob noxious -4 37 -Ġsum mers -Ġunexpl ained -ĠWal ters -cal iber -Ġg ull -ĠEnd urance -ä½ ľ -Ġ3 47 -Ir ish -Ġaer obic -Ġcr amped -ĠHon olulu -à © -us erc -ec ast -AC Y -ĠQu ery -ãĤ¹ ãĥĪ -Bet a -Ġsuscept ibility -ĠSh iv -ĠLim baugh -Ġà ĸ -ĠN XT -ĠM uss -ĠBrit ons -ES CO -EG IN -Ġ% % -Ġsec ession -ĠPat ron -ĠLu a -n aires -ĠJPM organ -us b -ocy te -Ġcouncill ors -ĠLi ang -f arm -Ġnerv ously -Ġattract iveness -ĠK ov -j ump -Pl ot -Ġst ains -ĠStat ue -ĠApost les -he ter -ĠSUP PORT -Ġoverwhel m -Y ES -Ġ29 1 -d ensity -Ġtra pping -M it -Ġf ide -ĠPam ela -atl antic -Dam n -Ġp ts -OP A -Ġserv icing -Ġoverfl owing -ul o -ĠE rit -t icket -light ing -ĠH mm -ãĥ¼ ãĥ« -im oto -Ġchuck le -4 23 -ãģ ķ -sh ape -Ġque ues -Ġanch ors -ãĤ¼ ãĤ¦ãĤ¹ -F er -Ġaw oke -Ġ6 66 -h ands -Ġdiver gence -Ġ50 5 -T ips -Ġdep ot -Ġske w -ĠDel iver -op ot -Ġdiv ul -ĠE B -uns igned -ĠUn i -X box -Ġfor ks -Ġ7 02 -å ¯ -Ġpromot ers -ĠV apor -Ġlev ied -sl ot -Ġpig ment -Ġcyl inders -C RE -Ġsn atch -Ġperpet ually -Ġl icking -ĠFe et -ĠKra ken -ĠHold en -ĠCLS ID -m r -Ġproject or -Ġden otes -Ġchap el -ĠTor rent -b ler -R oute -ĠDef endant -ĠPublisher s -ĠM ales -ĠInn ov -ĠAg ility -rit er -ty mology -st ores -L ind -Ġf olly -ĠZur ich -B le -Ġnurt ure -Ġcoast line -uch in -D omin -Ġfri vol -ĠCons olid -res ults -M J -Ġphyl ogen -Ġha uled -ĠW iley -ĠJess ie -ĠPrep are -ĠE ps -Ġtreasure r -I AS -Ġcolon ists -Ġin und -ĠWW F -ĠCon verted -6 000 -out side -ĠApp earance -ĠRel ic -ĠM ister -s aw -Ġresult ant -Ġadject ive -ĠLaure l -ĠHind i -b da -Pe ace -Ġreb irth -Ġmembr anes -Ġforward ing -Ġcoll ided -ĠCar olyn -K ansas -5 99 -ĠSolid GoldMagikarp -Be ck -Ġstress ing -ĠGo o -ĠCooper ative -Ġf s -ĠAr chie -L iter -ĠK lopp -J erry -Ġfoot wear -War ren -Ġsc ree -h are -Under standing -P ed -Ġanth ology -ĠAnn ounce -M ega -Ġflu ent -Ġbond age -ĠDisc ount -il ial -C art -ĠNight mares -Sh am -ĠB oll -uss ie -H ttp -Atl anta -Ġun recogn -ĠB id -Ġunder grad -Ġforg iving -ĠGl over -AAAA AAAA -4 45 -V G -pa io -kill ers -Ġrespons ibly -Ġmobil ize -Ġeffect ed -ĠL umin -Ġk ale -Ġinfring ing -ann ounced -Ġf itt -b atch -ĠT ackle -ĠL ime -ĠAP P -uke mia -Ġrub y -Ġex oner -ĠCas ual -0 70 -Ġpel vic -Ġautom ate -ĠK ear -ĠCoast al -Ġcre ed -Ġbored om -ĠSt un -ri ott -Ĥ İ -Ġregener ate -Ġcomed ians -ĠOP ER -Sp ons -id ium -on is -L ocated -05 7 -Ġsusp ense -ĠD ating -C ass -Ġneoc ons -ĠShin zo -Ġaw oken -ch rist -ĠMess ages -att led -ĠSpr ay -ĠSp ice -C W -Ġshield ing -ĠG aul -Am id -Ġparam ilitary -Ġmult if -ĠTan ner -il k -Ġgodd amn -g ements -Ġbe friend -m obi -Ġ3 88 -fold er -acc a -Ġins in -g ap -N ev -fif th -Ġpsychiat ry -b anks -TH IS -Ġhar b -ac qu -Ġfac ade -ĠPower Point -80 3 -Ġbl uff -Sh ares -Ġfavor ing -El izabeth -Ãį Ãį -Ġr anger -77 2 -ĠAr che -h ak -ĠGen etics -ĠF EMA -Ġev olves -Ġest e -ĠP ets -ĠM é -ĠInterest ing -ĠCanter bury -ch apter -ĠStar fleet -Sp anish -Ġdraw back -ĠNor wich -9 70 -n orth -ag anda -Ġtransform ative -ram ids -bi ology -ad ay -Ġpropag ation -ĠGam ma -ĠDen ise -ĠCalcul ator -ent imes -ĠB ett -Ġapp endix -ĠHD D -AK ING -Ġst igmat -Ġhol ster -Ġord inarily -Ch ance -ĠCont rary -Ġad hesive -Ġgather s -6 12 -re au -ony ms -ew ays -Ġindu ces -Ġinterchange able -se m -Wh it -Ġtr ance -Ġincorpor ation -ĠExt ras -Fin ancial -Ġawkward ly -ĠStur geon -ĠH Y -Norm ally -ĠEnd ing -ĠAss ist -enc rypted -Ġsub jug -Ġn os -Ġfan atic -C ub -C U -?" . -Ġirre versible -å Ĥ -03 1 -ĠH AR -sp read -ul ia -= $ -Sc ope -L ots -Ġlif estyles -ol on -Ġf eds -Ġcongrat ulate -web kit -Ġindist inguishable -ĠSw ing -Ġcommand ments -qu ila -ab ella -m ethyl -ann abin -Ġo vere -Ġlob ster -ĠQU EST -ĠCONT IN -bern atorial -:::: :::: -ĠTra ve -ĠSam oa -AN I -75 2 -Ð ´ -userc ontent -ĠMod erate -y eah -ĠK itt -Ġwe e -Ġstuff ing -ĠInter vention -ĠD ign -Ġware houses -ĠF iji -Ġpel lets -Ġtake away -ĠT ABLE -ĠClass ical -col lection -Ġland fall -ĠMus cle -Ġsett les -ĠAD V -Ġ3 44 -L aura -Ġf ared -ĠPart ial -4 36 -oss ibility -ĠD aly -ĠT arant -ĠFu ji -am l -c ence -55 1 -ĠProced ures -ĠO CD -ĠU D -t in -Q UI -ach o -4 38 -Ġgl itches -Ġenchant ment -Ġcalcul ates -IR O -ĠH ua -alys es -ĠL ift -um o -Ġle apt -Ġhypothes ized -ĠGust av -it ans -VERS ION -æ ł -Rog er -Ġr and -ĠAd apter -Ġ3 31 -ĠPet ition -k ies -M ars -Ġunder cut -ze es -ĠLy ons -ĠDH CP -Miss ing -Ġretire es -Ġins idious -el i -> ) -. ãĢį -Ġfinal ists -ĠA ure -Ġacc user -Ġwas tes -ĠY s -ĠL ori -Ġconstitu encies -Ġsupp er -Ġmay hem -or ange -Ġmis placed -Ġmanager ial -Ġex ce -ĠCL I -Ġprim al -ĠL ent -Cry stal -h over -ĠN TS -end um -Ġd w -ĠAl c -n ostic -Ġpres erves -ĠTs arnaev -Ġtri pled -rel ative -Arc ade -k illing -ĠW EEK -ĠH anna -D ust -Com pleted -ģ « -Ġappro ves -ĠSur f -ĠLuther an -ven ants -Ġrobber ies -we ights -soft ware -at ana -ug al -Ġgrav y -ĠC ance -OLOG Y -ly ak -Ton ight -Ġunve il -Ġ19 04 -ĠMin ion -ent ious -st ice -pack ages -ĠG EAR -Ġg ol -ĠHutch inson -ĠProf ession -ĠG UN -ĠDiff erence -ĠTsuk uyomi -ĠLes bian -6 70 -Ġfug itive -ĠPlan etary --------------------------------- ------------------------ -Ġacc rued -Ġch icks -Ġsto pp -Ġblock ers -C od -Ġcomment ers -ĠSomew here -ĠPhot ographer -the me -Ġmay oral -w u -Ġanten nas -Ġrev amped -ĠSubject s -it é -im ura -Ġentr ances -liter ally -Ġten ets -ĠO MG -ĠMP H -ĠDon key -ĠOff ense -Ġ" + -Sn ap -ĠAF B -Ġan imate -ĠS od -His panic -Ġinconsist ency -D b -F Y -Ex port -Ġa pe -Ġpear l -ib el -ĠPAC s -Ġ{ \ -Ġact u -ĠHS BC -camp us -Ġpay off -Ġde ities -ĠN ato -ou ple -Ġcens ored -ĠCl ojure -Ġconf ounding -en i -Ġreck on -op he -Ġspot ting -Ġsign ifies -Ġprop el -Ġfest ive -S uggest -Ġpled ging -ĠB erman -Ġrebell ious -Ġovershadow ed -Ġinfiltr ated -j obs -67 2 -Ġscal able -Ġdomin ion -ĠNew foundland -ĠMead ow -Ġpart itions -AM I -Ġsupplement ary -str ument -Ġhair y -Ġperpet uate -Ġnuts hell -ĠPot ato -ĠHob bit -Ġcur ses -Flo at -Ġquiet er -Ġfuel ing -Ġcaps ules -ĠL ust -ĠH aunted -Exec utive -Ġchild birth -G re -Ġrad iant -å İ -Ġm alls -Ġin ept -ĠWarrant y -Ġspect ator -E h -t hens -Ġculmin ating -æ © -ary a -ãĤ ® -ilit arian -ĠOR IG -ĠSp ending -pt ives -ĠS iren -ĠRec ording -ay ne -Ġv im -Ġspr ang -T ang -ĠM FT -mor ning -ĠWe ed -m peg -cess ion -ĠCh ung -7 30 -w arning -56 2 -handed ly -P oor -P olitics -: # -Ġp ian -Ġfec es -ĠDocument ation -Ġban ished -Ġ3 99 -ĠAR C -Ġhe inous -J ake -ĠAm ir -way ne -v re -os henko -Ġnotebook s -Ġfound ational -Ġmarvel ous -ixt ape -Ġwithdraw als -Ġh orde -ĠD habi -is able -ĠK D -Ġcontag ious -ĠD ip -ĠAr rows -Ġpronoun s -Ġmorph ine -ĠB US -68 2 -Ġk osher -fin ished -ĠInstr uments -Ġf used -yd en -ĠSal mon -F ab -aff ected -K EN -C ENT -Dom ain -Ġpoke mon -ĠDr inking -G rowing -ĠInvestig ative -ĠA ether -em i -Ġtabl oid -Ġrep ro -ĠNot withstanding -ĠBers erker -Ġdram as -Ġclich é -Ġb ung -ĠU RI -ĠD os -0 44 -Ġpast ors -Ġl s -Ġac rylic -aun ts -Ed ward -Ġmajor ities -B ang -Ġfield ing -ĠRepl acement -ĠAl chemy -pp ard -ĠRome o -ĠSan ct -ĠLav rov -ib ble -Inst ruct -Ġimp ractical -ĠPlay boy -ce phal -Ġsw aps -Ġk an -ĠThe o -Ġillust rating -Ġdismant led -ĠTrans gender -ĠG uth -UG H -Ġtriumph ant -Ġencomp ass -Ġbook mark -udd in -j er -Ġpred icate -ES H -Ġwhen ce -ĠAB E -Ġnon profits -Se qu -Ġdi abetic -Ġp end -Ġheart felt -sh i -Ġinter acts -ĠTele com -Ġbombard ment -dep ending -ĠLow ry -ĠAd mission -ĠBl ooming -ust ration -ene gger -B rew -Ġmol ten -ĠNer d -P IN -âĸ Ģ -ave ment -Ġtou red -Ġco efficients -ĠTray von -ans son -Ġsand y -t old -fl ows -Ġpop ulous -ĠT inder -ĠBl iss -R achel -Min imum -Ġcontest ant -ĠRed uce -ĠMor se -ĠGrass ley -ĠClick er -Ġexp r -Ġs incerity -Ġmar qu -Ġelic it -ĠPro position -ĠDemon ic -Ġtac os -G reek -Ġpost war -Ġin sofar -ĠP ork -Ġ35 2 -doctor al -walk ing -Ġmid term -ĠSam my -sight ed -ĠTR ANS -ic i -AL D -ĠUS L -ĠF ISA -ĠAm pl -ĠAlex andra -ine lli -Tr ain -Ġsign ify -ĠVers us -Ġob fusc -Ġk h -Ġagg ro -ĠRen ault -Ġ3 48 -5 18 -ox icity -0 22 -ĠTw ist -Ġgoof y -D ynamic -Ġbrief ings -m ight -8 99 -Ġderog atory -T ro -Ġfor ging -ĠKor an -ĠMar ried -ĠBuc s -Ġpal ate -ĠCon version -m able -4 13 -Ġ( _ -Ġs iph -ĠN EO -col lege -Ġmarg inally -Ġfl irt -ĠTra ps -ĠP ace -é »Ĵ -Ġgoalt ender -Ġforb ids -Ġcler ks -ĠT ant -ĠRobb ins -ĠPrint ing -Ġpremie red -Ġmagn ification -ĠT G -ĠR ouse -ĠM ock -odynam ics -Ġpre clude -ism o -ĠPul itzer -Ġaval anche -ĠK odi -rib une -ĠL ena -Elect ric -Ġref inery -Ġend owed -Ġcounsel ors -Ġd olphin -ĠM ith -Ġarm oured -hib ited -Beg in -ĠP W -O il -ĠV or -ĠShar if -ĠFraz ier -est ate -Ġj ams -Pro xy -Ġband its -ĠPresbyter ian -ĠPrem iere -t iny -ĠCru el -Test ing -Ġhom er -ĠV ERS -ĠPro l -ĠDep osit -ĠCoff in -Ġsemin ars -Ġs ql -ĠDef endants -Altern atively -ĠR ats -ç « -ethy st -' > -Ġiss uer -58 9 -Ġch aired -ĠAccess ories -man ent -Ġmar row -ĠPrim ordial -C N -Ġlimit less -ĠCarn age -Ġund rafted -q v -IN ESS -on ew -Ġco hesion -98 7 -Ġne cks -Ġfootball er -ĠG ER -Ġdetect able -ĠSupport ing -ĠCS V -oc ally -k Hz -Ġund e -Ġsh one -Ġbud ding -tra k -Stand ing -ĠStar craft -ĠKem p -Ben ch -Ġthw arted -ĠGround s -ath i -L isa -Dial og -ĠS X -V ision -Ġingen ious -Ù IJ -Ġfost ering -ĠZ a -ĠIn gram -Ġ" @ -N aturally -6 16 -0 35 -ĠF AC -H mm -55 4 -Ġacceler ator -ĠV end -Ġsun screen -Ġtuber culosis -rav iolet -ĠFunction al -ĠEr rors -ed ar -19 66 -ĠSpect re -ĠRec ipes -88 5 -ĠM ankind -L iverpool -Ġ| -- -Ġsubst itutes -ĠX T -w ired -Ġinc o -ĠAf gh -E va -ic c -S ong -K night -Ġdilig ently -ĠBroad cast -A id -Ġaf ar -ĠH MS -aton in -ĠGr ateful -Ġfire place -ĠOm ni -e uro -ĠF RE -ĠSh ib -ĠDig est -t oggle -Ġheads ets -Ġdiff usion -ĠSqu irrel -ĠF N -Ġdark ened -out her -Ġsleep s -ĠX er -gun s -Ġset ups -Ġpars ed -Ġmamm oth -ĠCur ious -g ob -ĠFitz patrick -ĠEm il -im ov -........ ..... -ĠB enny -Second ly -Ġheart y -Ġcons on -st ained -Ġgal actic -cl ave -Ġplummet ed -Ġp ests -Ġsw at -Ġrefer rals -ĠLion el -h oly -Ġunder dog -ĠSl ater -ĠProv ide -ĠAm ar -ress or -å Į -ong a -Ġtim id -Ġp iety -ĠD ek -Ġsur ging -az o -Ġ6 10 -Ġdes ks -ĠSp okane -ĠAn field -Ġwars hips -ĠCob ra -Ġar ming -clus ively -ĠBad ge -ag ascar -ĠPR ESS -ĠMcK enzie -ĠFer dinand -burn ing -Af ee -Ġtyr ann -ĠI w -ĠBo one -100 7 -ĠRe pt -Ċ Âł -Ġcar avan -ĠD ill -ĠBundes liga -Ch uck -Ġheal er -ãĥ¼ãĥ Ĩ -ĠH obby -Ġneg ate -Ġcrit iques -section al -mop olitan -Ġd x -Ġouts ourcing -ĠC ipher -t ap -Sh arp -Ġup beat -Ġhang ar -Ġcru ising -ĠNi agara -Ġ3 42 -ill us -ĠS v -Ġsubt itles -Ġsqu ared -Ġbook store -Ġrevolution aries -ĠCarl ton -ab al -Ut ah -Ġdesp ise -ĠU M -cons ider -aid o -Ġc arts -ĠT urtles -Tr aining -Ġhonor ary - ¢ -Ġtri angles -4 22 -Ġreprint ed -Ġgrace ful -ĠMong olia -Ġdisrupt ions -ĠB oh -Ġ3 49 -Ġdr ains -Ġcons ulate -Ġb ends -Ġm afia -ur on -ĠF ulton -m isc -Ġren al -Ġin action -ck ing -Ġphot ons -Ġbru ised -ĠC odes -og i -Ġn ests -ĠLove ly -ĠLib re -ĠD aryl -Ġ# ## -S ys -. ," -Ġfree zes -est ablishment -and owski -Ġcum bers -ĠSt arg -ĠBom bs -Ġleg ions -Ġhand writing -Ġgr un -ĠC ah -sequ ent -Ġm oth -ĠMS M -Ins ert -F if -Ġmot el -Ġdex ter -ĠB ild -hearted ly -Ġpro pe -ĠText ure -ĠJ unction -ynt hesis -oc ard -ĠVer a -ĠBar th -Ġμ g -Ġl ashed -Ġ35 1 -ĠZ amb -ĠSt aples -ĠCort ex -ĠCork er -Ġcontinu um -ĠWR ITE -unt a -rid or -Ġde ems -0 33 -ĠG OLD -p as -Ġrep ressive -ãĥĨ ãĤ£ -Ġbaff led -Sc ar -Ġc rave -Ġ ______ -Ġentrepreneurs hip -ĠDirector ate -Ġ' [ -Ġv ines -Ġasc ended -ĠGR OUP -ĠGood bye -Ġdo gged -ãĥ´ ãĤ¡ -Man ufact -Ġunimagin able -ri ots -ier rez -Ġrel ativity -ĠCraft ing -ra ught -ud en -c ookie -Ġassass ins -Ġdissatisf ied -ac ci -Ġcondu it -Sp read -ĠR ican -n ice -izz le -Ġsc ares -ĠWH Y -ph ans -5 35 -Ġprot racted -ĠKrist en -5 36 -ĠSc rib -ĠNe h -Ġtwent ies -Ġpredic ament -Ġhandc uffs -Ġfruit ful -ĠU L -ĠLud wig -Ġatt est -ĠBre aker -Ġbi ologically -ĠDeal er -Ġrenov ations -f w -ess en -Al ice -ĠHen ri -Ġun ilaterally -ĠS idd -h ai -ĠSt retch -S ales -Ġcumbers ome -ĠJ avier -Ġtrend y -Ġrot ting -ĠChall enges -Ġscra ps -Ġfac ets -ĠVer onica -ĠVer ge -ĠS ana -Al ien -ĠR ih -Ġrad ial -ect ar -Ġ6 30 -cl i -Mar ie -Ġwild fire -ĠCat o -h ander -Ġwait ress -Ġch ops -ĠS ECTION -Ġblunt ly -ĠCat alog -n ian -stud y -Ġpat rolling -ĠT enth -nex us -ĠN ON -op sy -Ġsc athing -s ie -Ġdeterior ated -V B -Naz is -Ġdep ictions -Ġauthent icated -ĠCon ce -k rit -Ġpromul g -ĠL ONG -U FC -ĠVis itors -ĠRec all -Ġrehab ilit -ĠSL I -Ġglac ier -ĠB ite -Ġ50 3 -Ġvom it -Ġfer mented -ĠKh alid -Ġgrad ed -ĠMag icka -ĠIch igo -power ful -ic ators -75 3 -Ġsh rew -Ġ35 6 -Ġlegal izing -Ġall otted -ĠArch demon -ith ing -igg urat -V OL -Le od -Ġo ily -Ġindu cing -Ġamy gdala -Ġadm ins -ĠAcqu isition -C AN -Ġsche matic -Ġmo an -ĠCamer oon -Ġt ink -Ġmer ry -Ġbutter flies -ĠGo ff -Ġworks pace -ĠCor ona -Ġj avascript -ĠD olphin -ĠCant or -4 64 -to e -AP S -ĠAg ing -Ġpadd ed -ĠZ heng -ĠHe ld -Ġest ranged -Ġ7 70 -. } -ĠDun ham -Ġsm okes -Ġcap itals -und ai -Sh in -ĠFound ing -Ġent itle -Ġcenter piece -D iscover -Ġthere to -al ert -ĠN ou -ĠAnaly st -l c -F H -FI ELD -ĠP OV -gr ay -Ġar cs -ĠH OT -Ġr s -Ġoblig atory -ĠArchitect s -ĠS ven -ĠF EC -0 200 -Christ mas -ĠAlban ia -rat om -58 7 -Ġhard ships -Ġaut os -ĠCharg es -Ġap es -Ġ3 76 -wal let -Ġintox ication -Ġgobl in -Ġ5 70 -++++++++ ++++++++ -ĠYel p -ĠMag netic -ĠBr iggs -R ail -Ġspawn s -ĠW iggins -Ġshowc ased -Ġres orted -ub en -Ġwh ipping -Ġim itate -Ġdigest ion -ĠUS PS -ĠG est -Ġye a -ĠT ight -ind al -ic as -` . -C AST -'' ; -ĠF et -opath ic -In valid -Ġregrett ed -Ġbro ccoli -ĠSc ores -e ve -Ġpost ings -Ġaccum ulating -Ġneed less -elf th -Ġmay ors -Ġsc rib -Ġanecd otes -Ġbot ched -ĠRib bon -ĠConstant ine -i uses -ess es -Ġdev ise -Comp ared -Ġp udding -Ġg arg -Ġev oke -79 7 -Ġdet ox -9 09 -ĠPie ces -ĠMcC artney -Ġmet ast -ĠK rypt -P OR -Ġt ending -ĠMerch ants -Pro of -ĠV arg -ĠPort able -ãĥ¼ãĥĨ ãĤ£ -B rain -25 00 -Ġfol iage -Ø ¹ -Ġment ors -ĠA ires -Ġminimal ist -Ġing ested -ĠTro jan -ĠQ ian -inv olved -0 27 -Ġer oded -RA FT -Ġbl urry -M ob -Ġbuff et -ĠFn atic -ae a -KN OWN -ĠIn it -s afety -en um -ACT ION -ĠCrus her -ĠD ates -Ġ ................ -c alling -ak ov -Ġvent ured -Ġ5 55 -au ga -H art -ĠA ero -M AC -Ġthin ly -Ġar ra -ST ATE -ild e -ĠJac qu -ĠFem ales -Ġthe orem -Ġ3 46 -Ġsmart est -ĠPU BLIC -ĠK ron -ĠB its -ĠV essel -ĠTele phone -Ġdec ap -Ġadj unct -ĠS EN -mer ga -Ġred acted -Ġpre historic -Ġexplan atory -ĠRun s -ĠUtt ar -ĠM anny -ĠAUTH OR -ĠUnle ashed -ĠBow ling -be ans -79 3 -Ġunivers es -Ġsens it -ĠK ung -re peat -ctr l -Ġp aced -Ġfull er -Cl ock -Ġrec omb -ĠF aul -ĠB unker -Ġpool ed -Ġan a -ĠM outh -LL OW -hum ane -Ġbull do -ĠMicha els -f am -Ġwreck ed -Ġport rays -ĠWh ale -ĠH es -Ġguess es -ĠBrow se -ĠL APD -Ġconsequ ential -ĠInn ocent -ĠD RAG -Ġtrans gress -ĠO aks -Ġtri via -ĠRes on -ĠA DS --- + -ĠT oll -Ġgrasp ing -ĠTHE M -ĠT ags -ĠCon clusion -Ġpract icable -Ġho op -Ġunintention ally -Ġign ite -ĠM ov -ur ized -le hem -Ter min -Ġcolour ful -ĠLin ear -ĠEll ie -G y -Ġman power -Ġj s -Ġem oji -ĠSHAR ES -_ . -0000 7 -Ġsophistic ation -Ġunders core -Ġpract ise -Ġbl ob -op ens -Uk raine -Ke eping -Y C -J R -ult imate -Cl aim -Ġautom obiles -99 3 -ste el -Ġpart ing -ĠL ank -... ? -Ġ38 5 -Ġremem brance -Ġe ased -Ġcov ari -ĠS ind -Effect ive -Ġdisse mination -ĠMo ose -ĠCl apper -br ates -App ly -Ġinv is -Ġwors ened -âĢĶ - -Ġlegisl ator -ĠL ol -ĠRow e -Ġdealers hip -um ar -id ences -Ġinvestig ates -Ġc ascade -Ġbid der -ĠB EN -Iron ically -Ġpres iding -Ġd ing -Ġcontrad icted -Ġshut s -ĠF IX -Ġ3 66 -Dist rict -Ġsin ful -ĠChar isma -o ops -Ġtot ality -Ġrest itution -ĠOpt imus -ĠD ah -Ġcl ueless -urn ed -Ġnut rit -Ġland owners -Ġfl ushed -Ġbroad en -m ie -Ġprint ln -Ġn ig -ĠCorp us -J en -Ġprot o -ĠWik imedia -ĠPal o -C OR -Ġstory lines -Ġevangel icals -ĠDar rell -Ġrot or -ĠH W -sk illed -ery l -Ġbe gg -ĠBl umenthal -Ġwe aving -Ġdown wards -ĠJack et -ĠANG EL -Te chnology -Ġes oteric -alde hyde -Ġfur iously -Ġforeign er -We ak -CH O -ĠH ound -Exper ience -ĠPlay station -ĠM IA -ĠU ng -cl oth -ag all -Ġcal ming -iz ens -St ruct -ĠW itches -ĠCeleb ration -Ġ........ ...... -pt roller -ĠTC U -Ġb unny -ãĥ į -ut orial -Ġup scale -ĠSt a -ĠCol ossus -Ġchlor ide -ĠZ ac -ĠRe asons -ĠBrook ings -ĠWH ITE -][ / -ĠL ose -9 05 -Ġunders ide -ern els -Ġv ape -do zen -upp et -ĠST OP -mat ical -ĠStat ements -hed dar -P AC -Custom er -Ġmem os -ĠP J -end ars -ĠLim its -l augh -Ġstabil ized -ĠALE C -Y A -Up grade -al am -Ġtechn o -Ġan ew -fore seen -Ġcolleg iate -ĠPy ro -ĠD ism -Ġfront line -Ġammon ia -I U -Qu ite -John ny -ass in -G OP -ĠSt yles -ĠSovere ign -acter ial -5 49 -ĠR IP -ĠL ists -Ġ3 64 -ĠRece p -s ocket -ĠByr d -ĠCand le -An cient -Ġappell ant -en forcement -ace a -ans ki -Ġold s -88 6 -Ġsl urs -Ġem pires -Ġbuck le -Ġalien ation -ĠAber deen -Ġunic orn -Ġoverr iding -ĠL X -pp a -Ġdesp ised -ĠB ugs -ĠB ST -S outhern -5 33 -Ġhall mark -ĠPost er -Ġstem med -Ġprincip als -ĠT ECH -ĠSand wich -It aly -Ġche esy -ĠSet TextColor -ĠProt ective -ĠC ohn -J O -apt op -Re ason -Lead er -ĠUnder stand -ĠFr idays -ĠContin uous -Ġcl ipping -ĠR ye -Ġber th -tim er -ann is -re act -Ġbuff alo -ĠPar as -Ġ6 55 -Ġpres ided -ĠSun rise -Ġve ts -Ġcl oves -ĠMcC ull -Stre ngth -G AN -Ġill iter -ĠPric ing -l é -Ġresist or -Ġbr un -ĠSuff olk -Ñ ĭ -ĠL iver -Re leased -Ġwhat s -8 60 -ĠMe asures -Ġden ouncing -ĠRy zen -Ġsou ven -Ġcareg ivers -ch ini -ĠScar lett -Ġt rough -Cong ratulations -Ġtax is -ĠTrad ition -j it -Ġtable top -Ġhither to -Ġdis information -off ensive -h ra -ĠDISTR ICT -Ġcompl icate -chen ko -ĠRecon struction -Ġpalp able -Ġa usp -Ġ4 28 -Ġshowc ases -ĠPublic ation -know ledge -inn on -4 19 -Ġretri eval -and ers -Ġref ute -Ġinqu ired -g ur -Ġneg ativity -Ġcons erve -Ġafter life -Ġpres upp -ĠGill espie -Ġm t -ĠD N -T ap -Ġper pend -ĠS my -does n -Ġsp illing -Ġhyp ers -K ate -® , -ke pt -ĠP owered -Ġj a -ĠK lux -ard e -ab an -Ġ4 44 -Ġflatt ened -ĠImprove ments -urg a -ĠK und -Ġins cribed -Ġfac ult -Ġunpre pared -ĠCons umers -Ġsatisf ies -Ġpul monary -Ġinf iltration -Ġex ternally -Ġcongrat ulations -ag han -Ġair liner -Ġfl ung -Ġfly ers -G D -Ġsnipp ets -Ġrec ursive -Ġmaster ing -L ex -Ġovert ly -v g -Ġluck ily -Ġenc ro -ĠLanc et -ĠAbyss al -function al -Ġs ow -Ġsqu id -Ġnar ration -Ġn aughty -ĠHon our -ĠSpart ans -Ġsh atter -ĠTac oma -ĠCal ories -ĠR aces -Sub mit -Ġpurpose fully -w av -ĠY ok -F est -ĠG err -Met ro -Ġit iner -f amous -Ġ" { -in line -was her -Iss ue -ĠCL IENT -oz o -Vers ions -7 25 -ĠGl ock -Ġshield ed -ĠPC R -ENC Y -ĠWe ld -ĠSim pl -Ġredirect ed -ĠK ham -Ġ( > -Ġlab ou -Ġdi apers -ss l -Ġcell ar -organ isms -ore sc -ĠBer ks -did n -Sh ipping -C hest -Ġund one -Ġmillion aire -Ġc ords -ĠYoung er -appropri ately -Ġsequ els -u ve -ant icipated -Ġle wd -ĠSh irt -ĠDmit ry -V eter -Ġsl aying -ĠY ar -Ġcompl ication -I owa -ĠEric a -ĠBL M -g irlfriend -b odied -6 26 -19 63 -Ġintermedi ary -Ġcons olation -M ask -ĠSi em -ow an -Beg inning -Ġfix me -Ġculmin ated -Ġcon duc -ĠVolunte er -Ġpos itional -Ġgre ets -ĠDefin itions -Ġthink er -Ġingen uity -Ġfresh men -ĠMom ents -Ġ35 7 -ate urs -ĠFed Ex -s g -69 4 -Ġdwind ling -ĠBO X -sel age -Ġt mp -Ġst en -ĠS ut -Ġneighbourhood s -Ġclass mate -f ledged -Ġleft ists -Ġclim ates -ATH ER -ĠScy the -ul iffe -Ġs ag -Ġho pped -ĠF t -ĠE ck -ĠC K -ĠDo omsday -k ids -Ġgas ped -Ġmon iker -ĠL od -ĠC FL -t ions -r ums -fol ios -Ġm d -Ġunc anny -Ġtrans ports -ĠLab rador -Ġrail ways -Ġappl iance -ĠCTR L -æ Ģ -Pop ulation -ĠConfeder acy -Ġunb earable -Ġdors al -ĠIn form -op ted -ĠK ILL -Mar x -Ġhypoc ritical -q us -ĠN umerous -ĠGeorg ian -ĠAmbro se -ĠL och -Ġgu bernatorial -ĠX eon -ĠSupp orts -ens er -ee ly -ĠAven ger -19 65 -Ar my -Ġju xtap -Ġcho pping -ĠSpl ash -ĠS ustainable -ĠFin ch -Ġ18 61 -ict ive -at meal -ĠG ohan -Ġlights aber -ĠG PA -ug u -ĠRE PL -vari able -Ġher pes -Ġdesert s -ac iously -Ġsitu ational -week ly -ob l -Ġtext ile -ĠCorn wall -Ġcontrace ptives -ĠA ke -] - -ä¹ ĭ -: , -ĠW em -ĠB ihar -Ġ' . -Ġbe re -Ġanal ogue -ĠCook ies -Ġtake off -Whe el -Ġmaj estic -Ġcomm uting -0 23 -ĠCor pse -ass ment -min i -Ġgor illa -ĠAl as -ere e -Ġacquaint ances -ĠAd vantage -Ġspirit ually -Ġey ed -pm wiki -ĠE nder -Ġtrans lucent -Ġnight time -ĠIM AGES -5 45 -ĠK amp -ĠFre ak -Ġ ig -Port land -4 32 -ĠM ata -Ġmar ines -Ġh ors -ater asu -ĠAtt ribution -Ġ-------- - -Ġk ins -ĠBEL OW -++ + -Ġre eling -ol ed -Ġcl utter -ĠRel ative -Ġ4 27 -B US -Ġa vert -ĠChe ong -ĠA ble -ĠPry or -Develop er -Ġen cyclopedia -ĠUSA F -ĠG arry -Sp ain -Bl ocks -Ġexp osition -ĠGamer Gate -W OR -Ġstockp ile -Ġclot hed -ĠT one -ĠR ue -t umblr -Ġtreacher ous -Ġf rying -Ñ Į -ĠS ph -Ġrest raints -Ġemb odies -ĠG es -S afety -Ġnegoti ators -min ing -ĠAppalach ian -L OS -ĠJenn a -Ġpass ers -ç ĭ -sn ap -Ġshort en -creat or -Ġinn umerable -uther land -67 4 -ĠW OM -ĠAs cend -ĠArm ory -ĠTrans action -K ick -Ġsuit case -day Name -Ġwaste ful -mar riage -ĠMcC abe -ite ch -ĠO ss -Cl osure -ĠTreasure r -Ġindec ent -ĠD ull -Ġresid ences -19 59 -ĠS ettlement -Ham ilton -Ġself ies -ĠRank ing -ĠBark ley -ĠB ore -ĠW CS -ĠMar itime -ĠH uh -ĠForest ry -Ġcultiv ating -ĠBall ard -Ġg arrison -ĠSD L -9 30 -Ġnas cent -Ġirresist ible -Ġaw fully -\/ \/ -Ġequ ate -Ġanthrop ology -ĠSylv ia -Ġintest ine -Ġinnoc uous -cess ive -ag ra -ĠMet roid -G rant -8 55 -ģ ĸ -Ġ" _ -ãĥĥ ãĥī -Ġappra isal -ĠFred dy -04 6 -Ġ40 6 -Ġ18 30 -Ġd ocking -St atic -Ġp ont -ĠVolt age -ĠSt ead -ĠMort gage -ĠJon ah -Y L -CLASS IFIED -Ġas bestos -nik ov -Ġcoll agen -ĠOrb ital -P ocket -7 99 -Ġhy brids -inc hes -Ġinv oice -und y -Ġinequ alities -T rend -w ashed -B ALL -Ġluc id -ĠComment ary -Ġw itty -Br andon -Ġbru ising -Ġ6 20 -es cent -box ing -P OL -Ġ3 78 -R ect -Ġlic ences -ĠMcG ee -p ressed -D anny -Ġj ammed -ord inate -Ġle th -Ġdistingu ishes -ĠYam aha -IL S -ĠH ume -ĠC ategories -Rober ts -Ch art -Ġbeet le -ĠGra veyard -Ġ($ ) -o ÄŁ -Ġtw ilight -are lla -á ½ -Ġbooth s -ĠH HS -ĠFeld man -Ġexcav ation -Ġphilosoph ies -at ography -ĠGar age -te chnology -Ġunfor gettable -Ġver ifying -Ġsubord inates -E ls -Ġne b -G aming -EN A -ĠAchieve ment -it ters -ĠG abe -Ġd umps -for cer -Ġpo ignant -ĠM BA -ĠHe idi -ime i -Ġm ages -Ġliber ate -Ġcircum cised -ĠMer maid -ĠMat th -t ogether -ĠW ichita -Ġstore front -ĠAd in -V II -Four th -Ġexplore rs -W ER -Not able -Bro ok -m ens -F aith --------- - -ĠJ ou -¬ ¼ -Ġpine apple -Ġam alg -el n -ark able -ĠãĤµ ãĥ¼ãĥĨãĤ£ -ĠãĤµãĥ¼ãĥĨãĤ£ ãĥ¯ãĥ³ -Ġov arian -ĠE choes -Ġhairc ut -Ġp av -Ġch illed -anas ia -Ġsty led -Ġd ab -ni per -Ġminister ial -ĠD UP -T an -Ġsul ph -ĠD eter -ĠBo hem -od an -Ġeduc ator -â ĵĺ -sp ir -Ch icken -ĠE leanor -Ġqu i -Ġheav iest -Ġgrasp ed -U RA -Ġcro oked -Jess ica -pro blem -Ġpred etermined -Ġman iac -Ġbreath s -ĠLauder dale -Ġh obbies -y z -Cr ime -Ġcharism a -d L -Ġle aping -Ġk ittens -Ang elo -ĠJ ACK -ĠSu zanne -Ġhal ting -ENT ION -Ġswall owing -ĠEarthqu ake -Ġeight eenth -ĠN IC -ĠIN F -ĠCons cious -Ġparticular s -circ le -7 40 -Ġbene volent -Ġ7 47 -Ġ4 90 -Ġr undown -ĠVal erie -ĠB UR -Ġcivil isation -ĠS chn -W B -ot ide -intern ational -Ġj ohn -Ġ19 02 -Ġpe anuts -Ġflav ored -k us -Ġro ared -Ġcut off -é £ -Ġorn ament -Ġarchitect ures -Ġ3 69 -ol or -ĠWild e -ĠC RC -ĠAdjust ed -Ġprov oking -land ish -Ġrational ity -Ġjust ifies -Ġdisp el -Ġa meric -ĠPol es -Ø © -Ġen vis -ĠD oodle -ä½ ¿ -igs aw -auld ron -Techn ical -T een -up hem -ĠX iang -Ġdetract ors -ĠZ i -ĠJournal ists -Ġconduc ive -ĠVolunte ers -Ġs d -Know ing -Ġtrans missions -ĠPL AN -ĠL IB -Ġall uded -Ġob e -Ġd ope -ĠGold stein -Ġwavelength s -ĠDest ination -nd a -ug i -Ġattent ive -ĠLe an -ral tar -Ġman g -mb uds -ak ings -b ender -Ġacc ol -Ġcraw led -N OW -Min nesota -Ġflour ished -ĠZ up -ĠSuper visor -ĠOliv ier -Ex cellent -Ġwid en -D one -Ġw ig -Ġmiscon ceptions -Cor p -W an -Ġvener able -ĠNot ably -ĠKling on -an imate -Bo ost -ĠS AY -miss ing -ibli ography -mel on -Ġpay day -Ø ³ -bo le -Ġve iled -ĠAl phabet -It alian -Ġever lasting -ĠR IS -ĠC ree -rom pt -Ġh ating -Ġgrin ning -Ġge ographically -OS H -Ġwe eping -ĠÂłĠÂłĠÂłĠÂł ĠÂłĠÂłĠÂłĠÂł -Ġimpe cc -Let ter -Ġblo ated -PL A -ĠFe in -Ġper sever -Th under -Ġa ur -ĠR L -Ġpit falls -âĸ º -Ġpredomin ant -Ġ5 25 -7 18 -AP E -7 14 -Ġfarm land -ĠQ iao -Ġv iolet -ĠBah amas -Ġinflic ting -ĠE fficiency -Ġhome brew -Ġundert ook -Ġcur ly -ĠHard ing -man ia -59 6 -Ġtem pered -Ġhar rowing -ĠP ledge -ĠFranken stein -è ª -M otion -Ġpredict ably -ĠExpl osion -oc using -er d -col o -FF ER -Ġback field -ĠV IDE -ue bl -N arr -ĠArg ument -Ġgen omic -Ġbout ique -Ġbatt ed -ĠB inary -Ġg amb -ĠRh ythm -67 3 -Ġa float -ĠOlymp ia -Y ING -Ġend if -is in -Ġwin ters -Ġsc attering -I v -D istance -Ġtr u -ĠCom fort -Ġne xus -Ġair flow -ĠByz antine -p ayers -con i -ĠB etsy -D eal -ĠN ug -ĠContin ent -red ibly -Ġoptim izing -al beit -Ġec static -ĠPro to -ç · -iv ot -âĸ Ħ -em p -rou nder -Ġcl out -ĠI ST -66 3 -ĠDoll ars -ĠD AC -Ġsubsc ribed -Ġrehears al -Ġam ps -ĠSh ang -es m -Ġspr inkle -Ġassail ant -ĠO o -ĠCoin base -T act -Ġret ina -Ġn uns -R ON -att o -Ġj ug -ĠSV G -Ġb ikini -ĠFI LE -ĠFound ers -ep ort -ĠK P -Ġrest ores -ĠTh ick -Ġash ore -Ġappro vals -R ender -M AG -G raham -ĠCort ana -ãĥ³ ãĤ¸ -ss h -or ians -ars ity -ĠInsp ired -u pper -Ġsign alling -Ġreb uke -Ġfl ares -Ġdownt ime -Stud ies -Ġstagn ation -ĠSequ ence -Ġgr unt -Ġass ures -ĠPL A -59 2 -Ġintra ven -d epend -Sus an -ĠManz iel -Man ia -Cont ract -Ġsl ams -Ġcult ured -Ġcred itor -L IST -ĠH UM -ĠChatt anooga -serv ed -Ġclo aked -ĠF TP -p owder -ĠSt ella -uct ive -Ġcheap ly -ĠMU CH -ĠGalile o -Ġsu ites -spe ech -Ġdeliber ations -ĠCh ips -« ĺ -Bal ance -ĠWyn ne -ĠAk ron -Ass et -Ġhon oured -Ġed ged -Like wise -anim ous -ĠW age -ĠEz ek -ad vertisement -ĠRT X -ĠM AD -Ġmigr ating -ĠS QU -Ġ4 75 -Ed ited -Ġshorth and -ĠBas ics -Ġcro tch -ĠEV EN -Ġv m -effic iency -Ġcal ves -ĠF rie -ĠBrill iant -Ġstri kers -Ġrepent ance -Ġarter ies -r l -B ed -h ap -Ġcrypt ography -ĠSab res -Ġ4 14 -vi ks -ih ara -aps es -T alking -Ġintertw ined -Ġdoc ks -Ġalle le -ĠArt ifact -ĠH IM -t orn -ç ķ -Ġop acity -ĠE ly -os uke -Ġn ipple -Ġhand written -ĠV K -ĠChamber lain -ĠLa os -ig raph -g row -Ġtr illions -Ġdescend ant -ĠSail or -as uring -Ġce ilings -ĠWare house -f lying -ĠGl ow -Ġn ont -Ġmiscar riage -Ġrig s -Ġmin istries -Ġelabor ated -Ġdel usional -ĠHum ane -Ġ3 79 -n ets -Ġblack out -add ers -Ġn p -ĠT ire -ro sc -Ġsub div -Ġlink age -Ġchron ological -ĠHER O -Ġres ettlement -ĠVin yl -Ġpast oral -ĠMob il -ĠBar bar -Co oldown -ĠF ritz -c riminal -re pe -Ġbell ig -ĠBre ed -Ġ4 18 -Ġsem blance -ij k -Ġcur tail -Ġclin ch -cont ained -ĠProm pt -ast on -Ġw i -Ġpursu its -5 15 -ĠGl oss -Ġfl ips -Ġcoup ons -Ġcl oning -ĠLike ly -Rem oved -ĠQu artz -r ices -ĠSpe ars -Ġp ious -Ġdep reciation -ĠD are -oun ces -am az -O nt -Ġp innacle -d ocker -0 26 -ĠW yr -ĠPro per -Ë Ī -n il -By tes -Ġseek er -t rial -Ġunf olds -ĠMar se -Ġextravag ant -ĠSurviv ors -RED ACTED -ĠSpeed way -ĠCra igslist -sub mit -ĠGener ations -Ġup holding -Ġblood stream -ĠMiss ions -ĠL awn -Ġlim bo -ene i -H uh -ĠWild cats -pre p -ĠMark us -ĠFor bidden -rit ic -IN O -Ġexhib iting -requ ent -ch uk -Ġhabit ual -ĠComp atibility -Dr ag -RIP T -uj ah -GR OUND -Ġdelinqu ent -Ġburn er -Ġcontempor aries -Ġgimm ick -load s -Ġno zzle -p odcast -ĠW ak -ĠStat en -ĠK uh -ãģ ĵ -inter rupted -Ġinv incible -ĠBurn ett -cig arette -ĠPeb ble -ĠTem porary -ĠMar ino -58 2 -Ġwast eland -ident ly -T x -Ġr ite -ĠPan asonic -ĠM iddles -ĠHort on -ae us -Ġc uring -Ġm ats -Ġadj ourn -Ġfears ome -pe z -bo ats -Ġpro pell -Ġconflic ted -ĠAng er -Ġinsurg ent -K arl -Ġco ales -Ġsouth western -Ġdis su -ĠO vert -******** **** -Ġbox ed -ĠBr une -aa a -Ġgard ening -ĠEng el -tr acks -Ġpur ified -Ġplace holder -ĠL ikes -Ġd an -G ab -Ġe ct -ĠF aw -ĠEl iot -Ġ' , -otrop ic -ĠRu in -hed on -Ġca ul -Ġa ft -ĠCad illac -gh a -ass ian -ud eb -ĠT ick -Ġadjust s -AR GET -5 37 -isc he -ant y -ĠFried rich -ĠBl izz -ĠA OL -Camp aign -Ġmamm al -ĠVe il -ĠK ev -ĠMaur it -ĠDam ien -N ation -E astern -Ġ{ : -Ġ= ================================ -Ġstereotyp ical -Ġatt ic -ĠCy borg -requ ire -Ġaward ing -ĠPap ua -bt n -b ent -B oo -Ġ( = -ĠX ander -ĠSomers et -Ġcatch y -Ġcert ify -STR UCT -Ġit al -Ġt ides -ĠBr ands -G ray -comp etitive -Ġcur ator -ĠD G -omin ium -ĠGM Os -ci ating -ĠCarm en -ow ard -Balt imore -Ġr gb -C u -Ġwip es -spe ll -IT NESS -Ġsummar izes -ĠRe vis -Ġwhistlebl owers -ĠBre ach -Ġcro chet -k os -ews ki -Ġrep et -Ġcrim son -ĠKar achi -read able -dim ension -ĠI gor -ild ed -ĠZ ed -ĠKe ane -ĠCos metic -DE P -Ġretreat ing -ĠU A -ens ical -Ġd usk -ĠDick ens -Ġaren as -ĠPass age -level s -Ġcur v -P ope -Ġch ores -ĠEl ise -ĠComp ass -b ub -Ġmamm alian -ĠSans krit -ĠAN C -ĠCr ack -Q ual -L aun -amp unk -Ġlearn ers -Ġglam orous -Ġfur the -erm ott -c and -Gener ic -Ġnarr ated -Ġdisorder ly -ĠTrans actions -ĠDet ention -ĠR oku -Ä į -Ġunder statement -ĠS aur -ĠRodrig o -ĠAS AP -S in -Ġre joice -Method s -Ġelectro de -Ġworsh ipped -Ġid i -ĠPhys icians -Ġpop up -Ġde ft -ĠRem oval -ĠBu enos -ver bs -Ġfun k -ush a -rict ion -ore a -ĠBang alore -ĠKen obi -zz i -Ġnorm ative -Ġgobl ins -Ġcaf es -ĠUN CLASSIFIED -ĠF ired -S IGN -Ġs clerosis -ĠV oter -ĠSon ny -ĠExt end -ĠEV s -Ar senal -Ġp si -Ġwid est -ĠT us -Ġlo oms -Ġjust ifying -ĠGr anger -è ¯ -Ref er -58 3 -Ġflour ishing -ab re -Ġr ave -ĠCont ra -Ġ18 98 -Add s -Ġf ul -ĠCo oke -some one -= # -67 1 -Ġy ak -Ġar te -ĠMis cellaneous -ĠDet ection -ĠCl ancy -â ģ -ass ies -Ġval iant -ĠFemin ist -cor ruption -V el -P ear -Ġsucc inct -Ġquick est -k w -Ġsp itting -ĠL ibraries -åħ ī -ant z -D ad -ĠSpec ifications -rup ulous -and r -RES ULTS -Ġsnow ball -Ġpred is -ĠB axter -ĠNurs ing -ĠCh aff -s we -Ġout age -Ġnest ing -Ġnotor iety -tr igger -on ite -j on -Ġf ou -ook ed -ĠCelebr ity -re ality -Ġfat ig -Ġhug ging -Ġbother s -ĠPan zer -ĠCh andra -fig ured -Ġvol ts -ĠCloud s -Ġfee ble -ĠCur ve -ĠAs us -78 6 -abs or -ĠV ICE -ĠH ess -Ġmanufact ures -Ġgri zz -ĠPower ful -ac id -Ġsub sections -ĠKrug man -ĠAl ps -is u -Ġsequ est -ĠUlt ron -ĠT inker -ĠGo ose -Ġmism atch -Att orney -Ġmorph ology -ĠSix ers -ut tered -ĠE LECT -gr an -Rus sell -ĠG SL -Ġfort night -Ġ. ) -Ġapost le -pr one -el ist -Unt itled -ĠIm plementation -ist ors -Ġtank er -Ġpl ush -Ġattend ants -ĠT ik -ĠGreen wich -ĠY on -ĠSP L -cell s -unt led -S olution -ĠQu é -Ġvac ated -Ġupt ick -ĠMer idian -æ ĥ -ĠDr ill -9 25 -58 4 -Ġrenov ated -ĠKub rick -zy k -Ġl ousy -pp el -ohyd rate -ĠI zzy -lesi astical -CC C -ĠAj ax -Ġad apters -ĠPetra eus -Ġaffirm ation -ĠST OR -le ms -ad oes -ĠConstantin ople -Ġp onies -Ġl ighthouse -Ġadherent s -ĠBre es -omorph ic -Fight ing -Ġpl aster -ĠP VC -ĠOb st -Ġdear ly -ĠTo oth -icks on -Ġsh aming -P lex -A gg -ĠâĢ¦ " -Ġsub reddits -Ġpige on -ĠResident ial -ĠPass ing -Ġl um -ĠP ension -Ġpessim istic -Ġ4 32 -z inski -c ade -0 75 -Ġapolog ised -iy ah -Put ting -Ġgloom y -ĠLy me -=-=-=-=- =-=-=-=- -ĠT ome -ĠPsych iatric -ĠH IT -c ms -ap olog -Ġbreak er -Ġdeep en -Ġtheor ist -ĠHigh lands -Ġb aker -Ġst aples -Ġinterf ered -ĠAb ortion -jo ined -ch u -Ġform ulate -Ġvacc inations -Ġban ter -phe us -Ġoutfield er -ĠM eter -Ġ# #### -Ġ18 95 -Ġnarrow ing -ĠST ORY -f p -ĠC ST -ign ore -Ġproclaim ing -ĠR U -ĠB ALL -yn a -65 3 -Ġpos it -P RE -59 4 -ĠRegist rar -ĠPil grim -ic io -Ġpre tt -Ġlif eless -Ġ__ _ -Ne igh -ĠCh urches -orn o -Ġor cs -Ġkind red -ĠAud it -Ġmillenn ial -ĠPers ia -g ravity -ĠDis ability -ĠD ARK -W s -od on -Ġgrand daughter -ĠBro oke -ĠA DA -ER A -Ġpick ups -ĠWil kinson -ĠSh ards -ĠN K -Ġexp el -ĠKis lyak -Ġj argon -Ġpolar ized -ian e -Pub lisher -Ġreb utt -Ġapprehens ion -ĠK essler -Ġpr ism -F UL -19 64 -ĠL oll -ä ¿ -le thal -Å Ł -Ġg hetto -Ġb oulder -ĠSlow ly -ĠOsc ars -ĠInst ruction -ĠUl tr -ĠM oe -N ich -ĠP ATH -( * -ĠRE LEASE -un ing -rou se -en eg -Ġre imb -ĠDet ected -Do S -Ġster ling -Ġaggreg ation -ĠLone ly -ĠAtt end -hig her -Ġairst rike -ks on -SE LECT -Ġdef lation -ĠHer rera -C ole -rit ch -Ġadvis able -F ax -Ġwork around -Ġp id -mort em -ers en -Ġtyp o -Ġal um -78 2 -ĠJam al -script s -Ġcapt ives -ĠPres ence -ĠLie berman -angel o -Ġalcohol ism -ass i -Ġrec ite -Ġgap ing -Ġbask ets -ĠG ou -Brow ser -ne au -Ġcorrect ive -und a -sc oring -ĠX D -Ġfil ament -Ġdeep ening -ĠStain less -Int eger -Ġbu ggy -Ġten ancy -ĠMub arak -Ġt uple -ĠD roid -ĠS itting -Ġforfe it -ĠRasm ussen -ixt ies -es i -ĠKim mel -Ġmetic ulously -Ġap opt -ĠS eller -08 8 -ec ake -hem atically -T N -Ġmind less -Ġdig s -ĠAcc ord -ons ense -em ing -br ace -Ġe Book -ĠDist ribut -ĠInvest ments -w t -] ), -beh avior -56 3 -Ġbl inding -ĠPro testers -top ia -Ġreb orn -ĠKel vin -ĠDo ver -ĠD airy -ĠOut s -Ġ[ / -Ï Ģ -b p -ĠVan ity -ĠRec ap -ĠHOU SE -ĠF ACE -Ġ4 22 -69 2 -ĠAnt ioch -cook ed -Ġcoll ide -Ġa pr -Ġsle eper -ĠJar vis -Ġalternative ly -ĠLe aves -ĠM aw -Ġantiqu ity -ĠAdin ida -Ġab user -Poké mon -Ġass orted -ĠRev ision -ĠP iano -ĠG ideon -O cean -Ġsal on -Ġbust ling -ogn itive -ĠRah man -Ġwa iter -Ġpres ets -ĠO sh -ĠG HC -oper ator -Ġrept iles -Ġ4 13 -ĠG arr -ĠCh ak -Ġhas hes -Ġfail ings -Ġfolk lore -Ġab l -ĠC ena -ĠMac Arthur -ĠCOUR T -Ġperipher y -app ers -Ġreck oned -ĠInf lu -ĠC ET -Ġ3 72 -ĠDefin itive -ass ault -4 21 -Ġreservoir s -Ġd ives -ĠCo il -DA Q -Ġvivid ly -ĠR J -ĠBel lev -Ġec lectic -ĠShow down -ĠK M -ip ed -reet ings -ĠAs uka -L iberal -ĠÏ Ħ -Ġbystand ers -ĠGood win -uk ong -S it -ĠT rem -Ġcrim inally -ĠCirc us -ch rome -88 7 -Ġnan op -ĠOb i -ĠL OW -o gh -ĠAuth ors -ob yl -Ur ban -Ġt i -ĠWe ir -t rap -ag y -Ġparent heses -Ġout numbered -Ġcounter productive -ĠTob ias -ub is -P arser -ST AR -Ġsyn aptic -ĠG ears -Ġh iber -Ġdebunk ed -Ġex alted -aw atts -H OU -Ch urch -ĠPix ie -ĠU ri -ĠForm ation -ĠPred iction -C EO -Ġthro tt -ĠBrit ann -ĠMad agascar -ë ĭ -Ġbill boards -ĠRPG s -ĠBe es -complete ly -F IL -Ġdoes nt -ĠGreen berg -re ys -Ġsl ing -Ġempt ied -ĠPix ar -ĠDh arma -l uck -ingu ished -Ġend ot -Ġbab ys -05 9 -che st -r ats -Ġr idden -Ġbeet les -Ġillum inating -Ġfict itious -ĠProv incial -Ġ7 68 -Ġshe pherd -ĠR ender -Ġ18 96 -C rew -Ġmold ed -ĠXia omi -ĠSp iral -Ġdel im -Ġorgan ising -Ġho ops -ĠBe i -z hen -Ġfuck in -Ġdec ad -Ġun biased -am my -sw ing -Ġsmugg led -Ġk ios -ĠP ERSON -ĠInquis itor -Ġsnow y -Ġscrap ing -ĠBurg ess -P tr -ag ame -R W -Ġdro id -ĠL ys -ĠCass andra -Jac ob -Ġ35 4 -Ġpast ure -Ġfr anc -ĠScot ch -ĠEnd s -ĠI GF -def inition -Ġhyster ical -ĠBrown e -77 1 -Ġmobil ization -æ ķ -iqu eness -Th or -Ġspear headed -Ġembro iled -Ġconject ure -jud icial -Ch oice -Ġpaper back -P ir -Ġrec overs -ĠSur ge -ĠSh ogun -ĠPed iatrics -ãģ ł -Ġsweep s -ĠLabor atories -ĠP acks -al us -add in -Ġhead lights -g ra -Ev idence -COL OR -Ad min -Ĭ ± -Ġconco ct -s ufficient -Ġun marked -Ġrich ness -Ġdiss ertation -Ġseason ing -Ġg ib -ĠM ages -un ctions -ĠN id -che at -ĠTM Z -c itizens -ĠCatholic ism -n b -Ġdisemb ark -ĠPROG RAM -a ques -Ty ler -Or g -ĠSl ay -ĠN ero -ĠTown send -IN TON -te le -Ġmes mer -9 01 -Ġfire ball -ev idence -aff iliated -ĠFrench man -ĠAugust a -0 21 -Ġs led -Ġre used -ĠImmun ity -Ġwrest le -assemb led -Mar ia -Ġgun shots -ĠBarb ie -Ġcannabin oids -ĠTo ast -ĠK inder -IR D -Ġre juven -Ġg ore -Ġrupt ure -Ġbre aching -ĠCart oon -Ġ4 55 -ĠPale o -6 14 -Ġspe ars -ĠAm es -ab us -Mad ison -GR OUP -Ġab orted -y ah -Ġfel on -Ġcaus ation -Ġprep aid -Ġp itted -op lan -ĠShel ley -ĠRus so -ĠP agan -Ġwill fully -ĠCan aver -und rum -ĠSal ary -ĠAr paio -read er -ĠR ational -ĠOver se -ĠCa uses -Ġ* . -Ġw ob -Ke ith -ĠCons ent -man ac -77 3 -6 23 -Ġfate ful -et imes -Ġspir ited -ĠD ys -Ġhe gemony -Ġboy cot -ĠEn rique -em outh -Ġtim elines -ĠSah ara -ĠRel ax -ĠQuin cy -ĠLess ons -ĠE QU -SE A -N K -ĠCost co -Incre ase -Ġmotiv ating -ĠCh ong -am aru -ĠDiv ide -Ġped igree -ĠTasman ia -ĠPrel ude -L as -9 40 -57 4 -Ġch au -ĠSp iegel -un ic --- > -ĠPhil ips -ĠKaf ka -Ġuphe aval -Ġsent imental -Ġsa x -ĠAk ira -ser ial -Mat rix -Ġelect ing -Ġcomment er -ĠNeb ula -ple ts -ĠNad u -ĠAd ren -Ġen shr -ĠR AND -fin ancial -ĠCly de -uther ford -Ġsign age -Ġde line -Ġphosph ate -rovers ial -f ascist -ĠV all -ĠBeth lehem -Ġfor s -Ġeng lish -S olid -N ature -Ġv a -ĠGu ests -Ġtant al -Ġauto immune -;;;;;;;; ;;;; -ĠTot ally -ĠO v -Ġdef ences -ĠCoc onut -Ġtranqu il -Ġpl oy -Ġflav ours -ĠFl ask -ãĤ¨ ãĥ« -ĠWest on -ĠVol vo -8 70 -Ġmicro phones -ver bal -R PG -Ġi ii -; } -0 28 -Ġhead lined -Ġprim ed -Ġho ard -ĠSh ad -ĠEN TER -Ġtri angular -Ġcap it -l ik -ĠAn cients -Ġl ash -Ġconv ol -Ġcolon el -en emy -G ra -Ġpub s -ut ters -Ġassign s -ĠPen et -ĠMon strous -ĠBow en -il ver -H aunted -ĠD ing -start ed -pl in -Ġcontamin ants -ĠDO E -ff en -ĠTechn ician -R y -Ġrob bers -Ġhot line -ĠGuard iola -ĠKau fman -row er -ĠDres den -ĠAl pine -E lf -Ġf mt -ĠS ard -urs es -g pu -Un ix -Ġunequiv ocally -ĠCitizens hip -qu ad -m ire -ĠS weeney -B attery -6 15 -Ġpanc akes -Ġo ats -M aps -ĠCont rast -mbuds man -ĠE PS -Ġsub committee -Ġsour cing -Ġs izing -ĠBuff er -ĠMand atory -Ġmoder ates -ĠPattern s -ĠCh ocobo -ĠZ an -ĠSTAT ES -ĠJud ging -ĠIn her -* : -Ġb il -ĠY en -Ġexh ilar -oll ower -z ers -Ġsn ug -max imum -Ġdesp icable -ĠP ACK -ĠAn nex -Ġsarcast ic -Ġlate x -Ġt amp -ĠS ao -b ah -ĠRe verend -ĠChin atown -ĠA UT -d ocumented -ĠGA BA -ĠCan aan -ĠÙ ħ -Ġgovern s -pre v -E sc -ĠEst imates -OS P -Ġendeav our -ĠCl osing -omet ime -every one -Ġwor sen -Ġsc anners -Ġdev iations -ĠRobot ics -ĠCom pton -Ġsorce rer -Ġend ogenous -Ġem ulation -ĠPier cing -ĠA ph -ĠS ocket -Ġb ould -ĠO U -ĠBorder lands -Ġ18 63 -G ordon -ĠW TO -Ġrestrict s -Ġmosa ic -Ġmel odies -ç Ħ -T ar -Ġdis son -ĠProv ides -Ġ ...... -b ek -F IX -Ġbro om -ans hip -Do ctors -Ġner ds -ĠReg ions -na issance -Ġmet e -Ġcre pt -pl ings -Ġgirlfriend s -kn it -ig ent -ow e -Ġus hered -ĠB az -M obil -4 34 -ĠPres ents -orig in -Ġins omnia -ĠA ux -4 39 -ĠCh ili -irs ch -G AME -Ġgest ation -alg ia -rom ising -$ , -c row -ĠIn spection -at omic -Rel ations -J OHN -rom an -ĠClock work -ĠBak r -m one -M ET -Ġthirst y -Ġb c -Ġfacult ies -R um -Ġnu ance -ĠD arius -ple ting -fter s -etch up -Reg istration -ĠK E -R ah -Ġpref erential -ĠL ash -ĠH H -Val id -ĠN AV -Ġstar ve -ĠG ong -z ynski -ĠAct ress -Ġw ik -Ġun accompanied -lv l -Br ide -AD S -ĠCommand o -ĠVaugh n -Wal let -Ġho pping -ĠV ie -Ġcave ats -Ġal as -if led -ab use -66 1 -Ġib n -Ġg ul -Ġrob bing -t il -IL A -Ġmit igating -Ġapt ly -Ġty rant -Ġmid day -ĠGil more -ĠDe cker -Ġ§ § -part ial -Ex actly -Ġphen otype -Ġ[+ ] -ĠP lex -ĠI ps -vers ions -Ġe book -Ġch ic -g ross -":" "},{" -ĠSur prisingly -M organ -Ġresid ues -ĠConf ederation -in feld -Ġl yr -mod erate -Ġperpend icular -V K -Ġsynchron ized -Ġrefres hed -Ġad ore -ĠTor ment -ol ina -Ġ26 00 -Item Tracker -Ġp ies -ĠF AT -ĠR HP -0 48 -ĠRES P -ĠB J -all ows -P and -Ġunw elcome -ĠV oc -ĠBast ard -ĠO W -ĠL AR -ĠHeal er -Environment al -ĠKen yan -ĠTr ance -ĠP ats -Ġali ases -ĠGar field -Ġcampaign er -Ġadvance ments -ĠOkin awa -ĠC oh -ows ky -Ġstar ved -Ġsize able -Ġ: -) -Ġm RNA -Ġsusp ensions -ist ar -Scot land -Pr in --------------------------------- ---------------- -Ġ50 2 -Ġteasp oons -Ġ10 50 -Ġcoerc ive -ĠMason ic -edd ed -ĠPass enger -Ġl att -Ġbr aces -ĠSt eal -ĠNY T -ĠK ats -ĠCel est -ae z -T u -ĠCoul ter -ðŁ ĺ -Fl ickr -ĠWil mington -ith s -++ ; -Ġv ending -Ġneg ro -ĠPh i -ĠYellow stone -Call back -Ġsh ampoo -ĠSh ades -w at -Ġsuper human -Ġridic uled -Ġhol iest -om bo -Ġintern s -Ġh one -ĠPar agu -UR I -Ġd angling -ãĤ » -so v -ict ional -av ailability -Ġrev ocation -Ġd ow -in ic -ĠTHE IR -Ġis o -Ġout ings -ĠLeth al -Ġ) )) -Ġinacc ur -Ġout landish -Ġan us -let ico -id on -l ol -Ġun regulated -Ġsuccumb ed -Ġc uff -ĠWast eland -let al -Ġsub str -Ġcoff ers -Ġautom akers -ov i -ĠX ue -ĠDayton a -Ġjar ring -Ġf umes -Ġdisband ed -z ik -itt on -Ġstriking ly -Ġsp ores -Ad apter -.) : -ĠLynd on -ival ry -Ġor ally -Ġtumult uous -Ġdisple asure -Ġcon es -or rect -Ġappe ase -Ġder by -ĠTrip oli -ĠAl ess -Ġp oked -ĠGu ilty -v P -En ough -Ġorig inals -6 99 -Ġrabb i -Ġproverb ial -Ġpostp one -el ope -ĠMist y -Ġstaff ed -ĠUn employment -redit ary -Ġdilig ent -re comm -me asures -as in -8 25 -Ġpond s -Ġmm ol -ĠS AR -ĠC ARE -Ġ3 71 -Ġclen ched -ĠCors air -Ġcaric ature -z n -att ach -ĠSch ro -spe ak -p ainted -ĠS uc -ĠE NT -Ġcell ul -ĠP aid -di agn -WH ERE -Ġtext ed -B arn -Ġret racted -ĠRe ferred -S av -Ġup keep -Ġwork places -ĠTok ens -Ġampl ify -cl inical -Ġmult ic -mber g -Ġconvol uted -Reg ion -5 65 -ĠTop ic -Ġsn ail -Ġsal ine -Ġins urrection -ĠPet r -f orts -B AT -ĠNav ajo -Ġrud imentary -ĠLak sh -OND ON -Me asure -Ġtransform er -ĠGodd ard -Ġcoinc ides -ir in -R ex -ĠB ok -qu it -Ġshotgun s -Ġprolet arian -Ġsc orp -ĠAd a -5 14 -Ġsl ander -record ed -Ġemb ell -ris ome -Ġapolog izing -ĠMul cair -ĠGib raltar -Cl a -Ġall ot -ĠAtt ention -Ġ4 33 -le ave -Ġwh ine -ĠIss a -ĠFa ust -ĠBar ron -hen y -Ġvictim ized -J ews -Ġnurt uring -ett el -W inged -ĠSub tle -Ġflavor ful -ĠRep s -eng ed -call back -Ġdirection al -Ġcl asp -ĠDirect ions -plan et -icult ure -Hel per -ic ion -ac ia -Ġç ¥ŀ -Ġsur ges -Ġcan oe -ĠPrem iership -be en -Ġdef ied -ĠTro oper -Ġtrip od -Ġgas p -ĠE uph -ĠAd s -vern ight -high ly -R ole -Ġent angled -ĠZe it -6 18 -ĠRust y -Ġhaven s -ĠVaugh an -HA EL -ĠSER VICE -/ , -Ġstr icken -Ġdel usions -Ġb is -ĠH af -Ġgrat ification -Ġent icing -UN CH -Ad ams -ĠOL ED -ĠBeet le -Ġ18 99 -ĠSO FTWARE -ateg or -V L -ĠTot em -ĠG ators -AT URES -Ġimped ance -Reg istered -ĠC ary -ĠAer ial -on ne -en ium -Ġd red -ĠBe g -Ġconcurrent ly -Ġsuper power -ĠX an -j ew -imes ter -ĠDick inson -âĶ ģ -F la -Ġp ree -ĠRoll ins -© ¶æ -Ġden omination -ĠL ana -5 16 -Ġinc iting -sc ribed -j uries -ĠWond ers -app roximately -Ġsusp ending -Ġmountain ous -ĠL augh -oid al -N s -Det ect -) = -ĠL uthor -ĠSchwarz enegger -ĠMull er -ĠDev i -ec ycle -J ar -6 13 -ĠL ongh -B ah -ĠSP ORTS -n w -Ġref inement -Ġwater ways -Ġd iner -Bl ade -68 3 -F ac -Ġinitial s -Ġro g -Ġparan ormal -B UT -Ġ[ ( -ĠSw anson -ĠM esh -âĸ ¬ -Impro ve -ĠRad iation -ĠEst her -ĠE sk -ĠA ly -ik y -Ġir rad -ĠBuck ingham -Ġref ill -Ġ. _ -Re pe -CON CLUS -Ġdifferent iated -Ġchi rop -ĠAt kins -Pat tern -Ġexc ise -Ġcab al -N SA -ĠST A -ĠS IL -ĠPar aly -Ġr ye -ĠHow ell -ĠCount down -ness es -alys ed -Ġres ize -ãĤ ½ -Ġbudget ary -ĠStr as -w ang -Ġap iece -Ġprecinct s -Ġpe ach -Ġsky line -Ġ35 3 -pop ular -App earances -ĠMechan ics -ĠDev Online -S ullivan -Z en -Ġp u -op olis -5 44 -Ġde form -Ġcounter act -ĠL ange -Ġ4 17 -Con sole -77 4 -Ġnodd ing -Ġpopul ism -Ġhe p -Ġcoun selling -compl iance -U FF -Ġunden iably -Ġrail ing -ĠHor owitz -ĠSim one -ĠBung ie -Ġa k -ĠTal ks -x ff -fl ake -Cr ash -Ġsweat y -Ġban quet -ĠOFF IC -Ġinvent ive -Ġastron omer -ĠStam ford -ĠSc are -ĠGRE EN -olic ited -Ġr usher -Ġcent rist -ight ing -Ġsub class -Ġdis av -Ġdef und -ĠN anto -oci ate -m ast -Ġpac if -Ġm end -e ers -imm igration -ESS ION -Ġnumber ing -Ġlaugh able -ĠEnd ed -v iation -em ark -P itt -Ġmetic ulous -ĠL F -Ġcongrat ulated -ĠBir ch -Ġsway ed -Ġsemif inals -Ġhum ankind -m atter -ĠEqu ip -opa usal -S aid -ĠLay out -Ġvo icing -Ġth ug -Ġporn ographic -I PS -Ġmo aning -Ġgriev ance -Ġconf essions -esc al -TEXT URE -Aut hent -os aurus -P urchase -Ġreleg ation -al ter -ĠÂł Âł -Ġr iddled -Ġo gre -ĠLow ell -Occ up -E at -ĠHy der -ĠAdvis er -Com merce -H unt -ĠOr th -ĠComp etitive -ĠCL A -CD C -Ġsal ads -F le -Ġindustrial ized -` , -ĠO WN -Ġbec k -ĠPart icularly -oub t -Ġm M -ĠHuss ain -ĠChen nai -Ġ9 20 -Ġappoint ing -ĠCull en -,,,, ,,,, -Ġp ores -ver ified -Ġbi ochemical -em ate -Ġcoward ly -ĠHels inki -ĠEthiop ian -S OURCE -ER C -est ro -Ġbi otech -ĠS our -Ġbrew er -Bloom berg -Ġintens ify -Gl ass -an co -ĠF DR -gre SQL -ĠF ires -©¶æ ¥µ -ec o -100 1 -ĠHom eless -Ġinstant aneous -ĠH aste -ig el -D iamond -Ġp aving -Ġland fill -Ġd ads -h oun -: ] -Ġinc endiary -ĠLiving ston -ĠHil bert -ĠChe cks -st yles -in ators -ĠCl ive -ph rine -Ġchimpan zees -Ġp all -ĠJ M -ĠAad haar -ð Ŀ -Ġachie vable -dis abled -P ET -OOOO OOOO -M ot -Ġint angible -Ġbal let -ĠWe bs -ĠEst imated -Effect s -Ġb ailed -Josh ua -Ġturb ulence -Ġoccup ant -ĠDay light -Ġ36 1 -me et -Ġstat ically -Ġon look -Ġk i -il legal -Ġvel vet -Ġdehyd ration -Ġacqu ies -ĠRe z -ak ura -ĠU pton -at ro -Ġincomp rehensible -Ġback door -ĠRh ino -7 27 -Ġmath s -) + -Ġhe resy -Ġd f -ĠRoc he -ĠL ydia -Ġpanc reat -re ply -arre ll -Ġsolicit ation -Ġcirc adian -BI P -Ġfor ay -Ġcrypt ic -iz u -ime o -ĠTom ato -ĠH oms -ex amination -Ġqu arry -ĠVal iant -ĠJer icho -ĠIN CLUD -Ġ18 40 -5 19 -Ġres ists -Ġsnap shots -ĠSp ur -ĠAnt iqu -Log in -Ġbest selling -Ġant ic -ĠS utherland -ãĤ¢ ãĥ« -Ġ~ / -ĠP arm -è ĥ -P ages -int ensity -Ġimm obil -Ġ18 65 -zz o -Ġn ifty -Ġf entanyl -ĠPres ervation -op hen -Ġd arts -ĠD inosaur -po inters -ĠR ite -s uggest -aware ness -ĠSher idan -Ġst ances -Ġsor cery -Ġper jury -ĠNik ola -ie ver -Ġf iance -ĠJordan ian -ĠBall oon -Ġn ab -Ġk b -Ġhuman ities -ĠTan aka -hill ary -Ġconsult ancy -ĠZ ub -Ġrem ission -Ġconf id -CH Q -ĠF ug -Ġimpro vis -Y ep -/ _ -Ġunwilling ness -Ġport folios -05 5 -ĠInstruct or -aim an -Ġclaim ants -M bps -ĠBy e -re ceived -T weet -Ġind emn -ri z -am ara -N at -Ġeval uates -ĠL ur -ep ad -FO X -ĠTh ro -Ġrust y -Ġbed rock -ĠOp rah -J B -Ġmanip ulative -Ġwill ful -Ġrel apse -Ġext ant -The me -S ensor -ĠSt ability -go vern -Ġpo ppy -Ġkn ack -Ġins ulated -ĠT ile -ĠExt rem -Ġunt old -Ġconver ge -Ġref uel -ig roup -Ġdistort ions -Ġrav aged -Ġmechan ically -ĠRe illy -ĠN ose -ĠIncarn ation -ĠBeck y -abb ling -Ġt aco -Ġr ake -Ġmelanch oly -Ġillust rious -ĠDart mouth -Gu ide -ĠR azer -ĠBen z -Ult imate -ĠSur prise -Ġpage ant -off er -Who ever -Ġw iser -Ġchem ist -ĠHE LL -ĠBul k -Ġpl utonium -ĠCO VER -Ö ¼ -f ailed -Ġtire lessly -Ġinf ertility -ĠTr ident -ĠShow time -ĠC iv -V ice -requ ires -itt ance -Ġun controlled -interest ing -56 1 -Ġinnov ate -ateg ic -L ie -ĠS elling -U l -Ġsav ior -ĠT osh -Ġsw ast -P ASS -Ġr ink -Ġcard io -ĠI ro -ud i -Ġv antage -Ġv ans -ĠNi ño -+ = -Ġpropag ate -< ? -Ġmethod ological -204 39 -Ġtrig lycer -Ġing rained -ĠAn notations -arr anted -6 17 -ĠS odium -ĠA AC -techn ical -mult ipl -Ġ3 73 -å ĭ -Ġdec isively -Ġboost ers -Ġdessert s -ĠGren ade -Ġtest ifying -ĠSc ully -ID s -Ġlock down -ĠSc her -ĠR é -ĠWhit man -ĠRams ay -rem ote -Ġh ikers -ĠHy undai -Ġcons cientious -Ġcler ics -ĠSiber ian -ut i -is bury -Ġrel ayed -Ġqu artz -ĠC BI -seek ers -ull a -Ġweld ing -ĠSh al -ble acher -T ai -ĠSam son -Ġt umble -ĠInvest or -Ġsub contract -ĠShin ra -ow icz -j andro -d ad -Ġtermin ating -ĠNe ural -ä» £ -Ġleak age -ĠMid lands -ĠCaucas us -í ķ -c it -ll an -iv ably -ĠAlb ion -Ġ4 57 -Ġregist rations -Ġcomr ade -Ġclip board -0 47 -Ġdiscour aging -ĠO ops -Ad apt -Ġem path -n v -ĠPR OT -ĠDon n -ĠP ax -ĠB ayer -t is -Squ are -Ġfoot prints -part icip -ĠChile an -B rend -ind ucing -M agn -Ġclub house -ĠMagn um -Ġenc amp -ĠEth nic -uch a -ere y -Ġw atered -ĠCal ais -Ġcomplex ion -Ġsect s -Ġren ters -Ġbr as -oÄŁ an -Time out -Man agement -Ġinf ographic -P okemon -Cl ar -Ġloc ality -Ġfl ora -as el -P ont -Ġpop ulate -ĠO ng -Ġsubs istence -Ġa uctions -ĠMcA uliffe -ĠL OOK -br inger -Ġtit an -Ġmanif old -ĠâĹ ı -Ġcalibr ated -Ġcal iphate -ĠSH E -ĠCommission ers -ce ivable -j c -W inner -5 24 -Ġcond one -Other wise -Ġp iling -Ġem body -ĠCrime an -ut ics -ĠEx hibition -Ġ4 26 -e ering -Ġv ying -ĠH UGE -* =- -Ġprin cipled -à ¦ -Ġquir ks -ĠEdit ors -put ing -G ES -ĠF TA -ठ¾ -add on -ĠH AM -ĠFrie za -W oman -. $ -Ġc rib -ĠHer od -Ġtim ers -ĠSp aces -ĠMac intosh -at aka -Ġgl ide -Ġsmell ing -ĠB AL -Ġun su -Ġcond os -Ġbicy cl -ĠRev ival -55 3 -Ġjugg ling -H ug -ĠKardash ian -ĠBalk ans -mult iple -Ġnutrit ious -oc ry -19 00 -Ġinteg rates -Ġad joining -ĠF older -roll ment -ven ient -Ġu ber -y i -Ġwh iff -ĠJu ven -ĠB orough -net te -Ġb ilingual -ĠSp arks -ph thal -man ufact -Ġt outing -ĠPH I -Ke efe -Rew ard -Ġinf all -ĠTem per -typ ically -ĠNik ol -Ġregular s -Ġpseud onym -Ġexhib itions -Ġbl aster -Ġ40 9 -w arming -Ġrever ber -Ġrecip rocal -Ġ6 70 -ip ient -b ett -ĠBe gins -Ġit ching -ĠPh ar -Ass uming -Ġem itting -ĠML G -Ġbirth place -Ġt aunt -ĠL uffy -ĠAm it -Ġcir cled -ĠN ost -enn ett -Ġde forestation -ĠHist orically -ĠEvery day -Ġovert ake -79 2 -Ġn un -ĠLuc ia -Ġaccompan ies -ĠSe eking -ĠTr ash -an ism -R ogue -Ġnorth western -ĠSupplement al -ĠNY U -ĠF RI -ĠSat isf -x es -5 17 -Ġreass ured -Ġspor adic -Ġ7 01 -Ġmed ial -Ġcannabin oid -Ġbarbar ic -Ġep is -ĠExplos ive -ĠD ough -Ġuns olved -Support ed -Ġacknowled gment -sp awn -Ġkit chens -Ġ- = -talk ing -ic ist -ĠPeg asus -ĠPS U -Ġphot on -ĠAuthent ication -R G -@# & -76 2 -ĠCl air -Ġdi aper -Ġbr ist -ĠProsecut ors -ĠJ em -6 28 -ĠEvery where -ĠJean ne -equ ality -ãĥ© ãĥ³ -object s -ĠPel icans -Ġ39 2 -Ġbl u -b ys -ĠA go -Ġinstruction al -Ġdiscrim inating -ĠTR AN -ĠCorn el -ag os -Ġty re -Ġas piration -ĠBrid gewater -": - -! ". -ĠEn s -ĠCoc o -P ie -Ġdet ach -ĠC ouch -Ġphys ique -ĠOccup ations -osc opic -en ough -B uzz -App earance -Y P -Ġrac er -Ġcompl icity -r pm -T oy -Ġinterrupt s -ĠCat alyst -Ġut ilitarian -imp act -Ġsp aghetti -Ġp orous -Ġeste emed -Ġinc iner -ĠI OC -7 48 -Ġesp resso -ĠSm ile -abil ia -6 35 -Ġmathematic ian -Ġ4 24 -ĠK L -ĠH IP -Ġover heard -ĠT ud -ĠT ec -Ġqu izz -Ġfl attering -Ġcon n -âĢ İ -Ġatt aches -ĠR OS -ĠAC S -Ġt cp -ĠSh ame -sk ip -res pected -ĠTrin idad -gr ain -Ġfooth old -ĠUnch arted -ĠJul io -z l -av ored -ĠAn xiety -er rors -ĠCent auri -its ch -D addy -Ġclutch ing -ĠIm plement -ĠGut ierrez -Ġ7 60 -Ġtele portation -end ra -Ġrevers ible -st ros -Ad venture -08 3 -Ġliber ating -Ġas phalt -ĠSp end -AR DS -im sy -PR ES -ĠEmer ging -Ġwild fires -Ġtechn ologically -Ġem its -ĠART ICLE -Ġirregular ities -Ġcher ish -çī Ī -Ġst ink -ĠR ost -Econom ic -Ġcough ing -ĠMcC ann -pro perties -ilant ro -Ġreneg oti -Trans lation -Ġin quest -ĠGra pe -oot ers -gu i -ĠSwords man -ace ae -h itting -Ġr c -Ġexert ed -ĠS AP -it ent -Ġperil ous -Ġobsc urity -Ġassass inate -Ġab original -Ġresc uing -ĠSh attered -lock ing -all ion -Ch anging -ĠHar rington -ĠB ord -ĠAfgh ans -Jam ie -aret z -ĠAugust us -Ġ38 6 -8 30 -Ġj og -ok ingly -Tr igger -ĠH OR -Stat istics -Ġviewers hip -Ġadd itives -h ur -Ġmaxim izing -ĠR ove -ĠLou ie -ĠBuck et -ĠCHR IST -ou sel -Ġstre aks -ir ted -Ġt ert -Ġcolonial ism -Ġbur ying -y k -Cond ition -ĠDPR K -By Id -75 1 -âĹ ¼ -Ġwor risome -Ġvoc ational -sl ice -Ġsa ils -ĠCorrection al -95 4 -Ġt ul -K id -l uster -Ġfam ilial -ĠSp it -ĠEp iscopal -Specific ally -ĠVol cano -run s -q s -Ġve tted -Ġcram med -t rop -here r -Thank fully -Ġper cussion -Ġor anges -Ġround up -Ġ4 99 -x ious -Char acters -ĠZion ism -ĠR ao -ÃĽ ÃĽ -W F -Ġunintention al -ONE Y -Gr ab -Com mercial -Ġglut amate -ĠMcK enna -ru ciating -ning ton -ih u -Ch an -ĠSw ap -Ġleaf lets -Ġfunction ally -er ous -F arm -Ġcal oric -ĠLiter ally -con cert -Ġshe nan -Ġrep aid -ey es -Ġbas hing -ĠG orge -Ġcollabor ations -Ġun account -itch ie -Ġteam work -pp elin -Ġpip ing -Ġmin ced -Ġd iam -ri eg -Ġmasc ara -Ġsuck er -ĠMo ons -App s -ĠPe ck -Ġper v -ĠFl oat -o ley -ĠN ish -im ize -Ġarom atic -u in -end ish -! / -ĠB icycle -ĠAS IC -ile ged -ĠQuad ro -ios yn -Ġlock out -ĠW ink -SP EC -Attempt s -Ġseed ed -red o -ias is -Ġsn ag -ãĥķ ãĤ© -ãĤ ¶ -Ġground ing -Ġrelie ver -Ġfrivol ous -ĠG ifts -ĠF aces -Es pecially -Ġmicrobi ome -im ag -ĠSch l -ĠP les -ĠBle ach -ĠIr win -ĠE aton -ĠDisc iple -Ġmultipl ication -Ġcoer ced -Ġ4 19 -st h -E vil -B omb -Ġex orc -Ġstag gered -L ESS -Ġinert ia -ĠED IT -Ġgo b -Tr aditional -Ġclass y -Lear y -ĠP AGE -yr s -Ġtrans porter -Ġmat ured -Ġhij ab -Ġbi ome -Where as -Ġex termination -ĠT ues -ĠT akeru -ĠAud rey -er ial -ĠAd en -aff les -Ġnarciss istic -ĠB aird -UT F -I re -ĠCon nie -Ch amp -Ġwhis pering -ĠH att -D K -Ġdis infect -Ġdeduct ed -Ġpart ake -Ġdown grade -ĠEs ports -ĠContin uing -Ġdemocr atically -icro bial -itt a -Ġlim estone -Ġexempt ed -ĠFren zy -H erm -7 28 -Ġfled gling -Met a -765 61 -69 3 -% : -w ake -5 26 -ĠDis cipline -Ġvirgin ity -ĠLeg ions -ĠFrank ie -int ent -Ġrest rooms -ĠRou ter -da q -Ġobjection able -âĨ ij -w ark -ĠRah ul -g ain -activ ation -abs olute -ĠAccess ed -Ġ24 00 -ogg les -Ġsecond ly -ĠDEF ENSE -Ġpost age -wra pper -sh arp -7 29 -Ġcommun icates -Ġadd on -ĠMil itia -H ong -Ġsl umped -ĠJP EG -ĠI car -ad ish -68 1 -Ġmaj esty -ĠWolf gang -ĠEl astic -u per -Ġv iz -Ġunconscious ly -ĠST D -ĠS ass -Ġflower ing -ĠHel ic -ĠDra per -ĠAm ateur -Ġman ure -Ġdis ingen -ĠLe i -br ing -9 49 -Ġinhib ited -Ġhead quartered -Ġen igmatic -�� � -Ġred ress -R H -Ġratt led -Ġd iction -l io -ĠT BA -ĠSN AP -C alling -Ġfasc ists -ĠD ove -iew icz -0 36 -Ġco asts -ĠR ect -Ġ) ] -L ot -6 29 -ĠS EM -ĠPeters en -ĠExpl ain -ĠBo ards -ĠBe zos -ĠJ ournals -Ġ20 24 -p arser -Ġmist rust -Ġgr ate -ĠL ocked -bo a -S aint -g aming -Ġvow el -in ately -bl ow -All ah -Ġun matched -Ġb ordering -ĠExp end -n r -Or acle -rou ch -Ġcont iguous -ac us -Ġdist raught -58 1 -Ġanat omical -O X -ap ixel -8 33 -ĠPL US -Ġres usc -Ġab iding -57 3 -Ġvac ancies -Em ily -Ġhyp othal -ĠWer ner -ĠWe e -ĠDJ s -5 13 -Ġwitch craft -Ġac upuncture -ent ary -benef it -Product s -ĠP SP -ĠMP G -ĠJ inn -ĠJ arrett -Ġ4 45 -ĠIm aging -ĠP yth -Fin ish -Ġte x -Ġjuven iles -Ġhero ism -Ġdoubt less -ĠA ki -ĠT end -ĠPatri arch -Ġbit ters -ĠTele communications -it atively -ag na -Ġr g -ĠS OLD -Ġcomp ulsion -ĠN asa -ĠKath ryn -Ġmillion aires -Ġintrins ically -Ġbolst ered -time out -fl o -Ġtut or -p our -Stat ement -Ġ{ * -ĠRud olph -ĠKimber ly -rog ens -adi q -] + -Ġindign ation -Ġfract uring -ĠRe leases -ĠGr ain -pro tein -L ago -Ġvac ations -Ġboot ed -ĠTH REE -ĠH G -oresc ence -Ġt f -Ġso ar -iosyn cr -Ġgl ances -ĠSp oon -ĠJ ury -ĠCow boy -Ġcreat ively -Hig her -Ġsolic itor -Ġhaw k -ac io -89 6 -Ġsuperf lu -Ġbombs hell -ct ure -Ġbroker age -Ġraid ing -Ġf rench -Ġang led -Trans action -ĠGen ocide -u pe -ĠHait ian -57 2 -! : -Ġunwitting ly -iter ator -sc roll -Ġtall ied -Ġbi omedical -ĠC ARD -Ġe uphem -Ġbrain storm -a quin -K o -Mic helle -ĠR unes -ĠBall istic -ud ers -Ġmod esty -ĠiP ads -ĠEzek iel -Y E -Ġstars hip -Ġpower fully -Ġper l -ĠSh ade -ĠQu art -ĠE EG -Ġfisher man -OS ED -ĠTyp ical -df x -Ġmes hes -Ġet ched -worth iness -Ġtopp led -Ġ3 96 -or ius -We iss -Ġmy sql -ĠVal halla -Ù Ĵ -le asing -Ġrec omp -rap nel -S el -04 3 -Ġder ailed -ĠGu ides -IR T -Ġde human -ĠBritt any -" )) -Ġex claim -Ġb alk -Ġ8 40 -CLA IM -int el -L AB -Ġpe gged -Ġast roph -sm oking -Ġrig ging -Ġfix ation -Ġcat apult -ins ide -ĠC ascade -ĠBolshe vik -G aza -Dep th -Ġloud spe -Ġalmond s -me yer -l eness -j en -f resh -Ġunbeat en -ĠSqu id -ĠPres umably -Tim er -B W -Ġro sters -Ġell ipt -ĠHar riet -dat abase -ĠMut ual -ĠComm odore -uk ed -kn ife -ĠCOMM UN -h ya -Ġmel ts -arch ives -Ġrat ification -Ġmultip lying -Ġinter oper -Ġasc ert -w ings -ver ting -ĠScorp ion -ay e -ĠPorts mouth -ĠM TA -n it -iaz ep -Ġqu arantine -Ġslides how -Ġcent imeters -Ġsyn opsis -Ġsp ate -th irst -Ġnom inating -ĠMel vin -Pre view -Ġthro b -Ġgener ational -ĠRad ius -rest ling -put able -aw ar -N ECT -Ġunlaw fully -ĠRevel ations -Wik ipedia -sur v -Ġeye ing -ij n -ĠF W -Ġbr unt -Ġinter stellar -Ġcl itor -ĠCroat ian -ĠCh ic -ev a -ĠDis app -ĠA kin -iner ies -d ust -Interest ed -Ġgen esis -ĠE ucl -ö n -p icking -Ġmut ated -Ġdisappro ve -ĠHD L -Ġ6 25 -Ì ¶ -c ancer -Ġsqu ats -Ġle vers -Disc uss -= ] -D ex -ĠVIDE OS -A UD -Ġtrans act -ĠKin ect -ĠK uala -ĠC yp -7 47 -Ġsh attering -Ġarsen ic -ĠInt ake -ĠAngel o -ĠQu it -ĠK he -Ġ18 93 -M aker -0 29 -ĠPain ting -Dis able -9 16 -Ġanal ges -Ġtact ile -Ġprop hes -Ġd iced -ĠTravel s -ĠHe ader -ĠClub s -Ass istant -Ġinc rim -Ġd ips -Ġcruc ifix -ĠShan ahan -ĠInter pret -Ġ40 90 -al ogy -abb a -Ġsimul ac -hus band -S IM -Ġrecy cle -uc er -ed ged -Ġre naissance -ĠBomb ay -Cath olic -ĠL INE -ĠCl othing -re ports -Ġpl aus -Ġd ag -ĠM ace -Z I -Ġintr uder -ĠVeter inary -g ru -Ġsne aky -ĠS ie -ĠC innamon -P OSE -Ġcou rier -ĠC NS -Ġemanc ipation -s it -Ġplay through -ĠFac ilities -v irt -ĠG auntlet -Thom pson -Ġunbeliev ably -Param eters -Ġst itching -ign e -ĠTH ESE -Priv acy -Ġshenan igans -Ġvit ri -ĠVal id -59 1 -Ń · -ĠProt otype -ink a -SC P -ĠT id -è Ī -old ed -Ġindividual ity -Ġbark ing -Ġm ars -ĠW D -Ġ8 20 -Ġt ir -Ġsl apping -Ġdisgr untled -ĠAng ola -ri us -ĠTorn ado -ĠTh urs -Ġcapt cha -Ġang st -ĠP og -ĠAssass ins -ĠAd idas -Ġjoy ful -Ġwh ining -Emer gency -Ġphosph orus -Ġatt rition -oph on -ĠTimber wolves -ĠJ ah -ĠBr inging -ĠW ad -ĠEn sure -oh l -ĠX ie -omm el -c mp -Ġz ipper -Ġrel at -ĠCor ridor -m ilo -T ING -Av g -Ġcro pped -] } -Ġr aged -ĠLump ur -ĠGuer rero -our ke -N ut -Ġoff sets -og lu -dr m -Ġmort als -lat able -Ġdismiss ive -ä¸ ī -Ġthro ats -Ġchips et -ĠSpot light -Catal og -art ist -G b -Ġch illy -Ġst oked -Ġ3 74 -W ard -L atin -Ġf iasco -Ġble ach -Ġb rav -Enh anced -Ġin oc -ĠFior ina -_ > -Ġle ukemia -Ġel uc -Ġannoun cer -ĠLith uan -ĠArm ageddon -å ĩ -Len in -ĠR uk -Ġpe pp -ĠRom antic -ĠP IT -ĠInter stellar -ĠAt kinson -R aid -J s -Go al -C ourse -Ġvan ishing -es ley -ĠR ounds -Els a -59 3 -Ġredund ancy -ĠST AND -Ġprop hetic -Ġhabit able -ry u -Ġfaint ly -M ODE -Ġfl anked -IR C -Aw esome -Ġsp urious -ĠZ ah -ĠMS G -Ġsh ading -Ġmotiv ational -ĠSant ana -ĠS PR -Ġexc ruciating -om ial -ĠM iko -ĠLe opard -A byss -Ġ[ | -d irty -Ġbath s -Ġdem oral -and re -P B -Ġun ification -Ġsac rament -Ġ[ & -Ġpric eless -Ġgel atin -Ġeman ating -ĠAll aah -98 6 -Ġout burst -Ġer as -ĠX VI -ĠSP I -O tt -ĠLaz arus -PL IED -F lying -blog s -W isconsin -R aven -Ġreb ate -Ġcreep s -ĠSp an -ĠPain ter -ĠKir a -ĠAm os -ĠCor vette -Cons umer -ĠRec over -ck i -Ġpes ky -ĠIn vention -Compan ies -Ġchalleng ers -ad emic -ĠUkrain ians -ĠNeuro log -ĠFors aken -Ġent rants -Ġemb attled -Ġdef unct -ĠGlac ier -Ġpo isons -ĠH orses -m akes -ĠD irt -Ġ4 23 -hh h -ĠTrans formation -QUI RE -................ .. -Ġtrave ller -ĠSe xy -ĠK ern -ip olar -Ġransom ware -oooooooo oooooooo -E c -rub y -Prof essional -ĠOut break -arg ument -G rey -ĠFif a -ĠCH O -ĠFOR M -ĠAm trak -- [ -Ġcr adle -Ġantioxid ants -ãģ®å ® -7 36 -ĠNAS L -ĠContribut ions -Ind iana -ĠST EP -C SS -Ġsal ient -Ġall ocations -yr ights -Ġm ashed -ĠCut ter -Sex ual -Ġp ounded -Ġfan base -Ġc asc -ĠTrans parency -Ġanaly tic -ĠSummon er -× ŀ -ĠAD C -det ail -Ġvan quished -Ġcr abs -ar ie -Dest roy -ĠS ack -Ġtrans istor -Al abama -ĠK oen -ĠFisher ies -c one -Ġannex ed -ĠM GM -es a -Ġf aked -ĠCong ratulations -Ġhind ered -Ġcorrection al -ĠI TV -lee ve -Ġin appropriately -lic ks -Ġtresp ass -Ġp aws -Ġnegoti ator -ĠChrist ensen -lim its -ĠDian ne -Ġeleg ance -ĠContract s -an ke -Ob j -Ġvigil ance -Ġcast les -ĠN AD -ĠHol o -Ġemph atically -ĠTit us -ĠServ ing -ĠRich ie -ĠP igs -5 68 -Ġanim osity -ĠAtt ributes -ĠU riel -M Q -my ra -ĠApplic ant -Ġpsychiat rists -ĠV ij -ĠAb by -ag ree -P ush -Ġk Wh -hib a -Ġinc ite -ĠWe asley -ĠTax i -minist ic -hy per -ĠF arn -Ġ6 01 -ĠNation wide -F ake -95 2 -Ġma ize -Ġinteract ed -Ġtransition ed -Ġparas itic -Ġharm onic -Ġdec aying -Ġbas eless -ns ics -Ġtrans pired -Ġabund antly -ĠFore nsic -Ġtread mill -ĠJ av -ab and -Ġssh d -Ġfront man -ĠJak arta -oll er -dro ps -ĠSERV ICES -rompt u -oph ical -h ospital -bled on -6 45 -Ġmid range -ĠEV ENT -cul ated -raw led -Ġper ched -Ġover board -ĠPe el -ĠP wr -ĠCar th -ĠCOM PLE -co e -sh all -Ġdeter rence -M ETHOD -ĠAbs ent -M EN -Ġs ill -ĠLE VEL -Y ork -Ġsin ners -ĠOP EC -ĠN ur -ĠDesign s -se lection -Ġunw orthy -CH A -Ġstreng thens -88 3 -ed ly -Ġslic ing -Ġmal nutrition -Ġfilm making -ĠPol k -ur ated -Ġ4 21 -bre akers -!' " -Ġwet lands -ĠDisc rimination -Ġallow able -Ġste ered -ĠSic ily -S AM -Ġmust ache -Ġm ids -Ġcl ipped -Ġcirc ulate -Ġbr ittle -ĠBuild ings -ra ised -ĠRound up -Ġwealth ier -Ġoverw rite -Ġover powered -ĠGerr ard -s ites -PD ATED -Ġacute ly -ĠGam ble -Ġp im -ĠK us -Typ ically -De ploy -ĠMoroc can -p otion -com be -Ġvigil ante -Ġ36 3 -St ew -ĠB agg -Ġres ided -ĠSp o -Ġrem nant -Ġempt iness -br ainer -Ġout patient -pri ority -Ġle ptin -ĠPay ton -ĠGle aming -ĠS hed -ĠPol o -ĠMormon ism -rest ricted -arl ane -w x -Ġcreat ine -ĠAn on -ĠST UD -ĠJ UL -ĠT ee -5 28 -08 9 -Ġhat ched -Dis patch -ĠCompos ite -Ġ45 1 -p uff -ĠX COM -ĠOr n -ĠTH ANK -END ED -ĠAshe ville -Ġà ľ -Ġman go -ĠS lightly -world ly -ĠW ander -ĠExp and -ĠCh r -M ist -Ġorthodox y -ĠUN ESCO -reg ate -Else where -k ie -ir led -Ġtopp le -Ġadopt ive -ĠLeg s -d ress -ĠS agan -b are -ĠGl ou -Cr unch -Ġhelp ers -Ġchron ically -ĠH uma -1 0000 -Ġaccommod ating -äº Ķ -Ġwrink les -Ġdod ged -four th -Ġpre con -Ġcompress or -ĠK are -Ġev ict -ĠWar wick -im ar -Ġmodern ization -Ġband wagon -Ġref uted -Ġnet ted -ĠNa ples -ĠGen ie -per ors -Ġfield ed -Ġde re -ĠPar ables -le es -Ġtr out -asp ers -Ġn ihil -Ġhapp iest -Ġflo ppy -ĠLo ft -ĠHe ard -Ġun ison -Ġl ug -ĠRed mond -class ic -Supp orters -SH IP -G MT -Ġfue lled -ç IJ -Ġd d -ĠEmin em -Ġ18 97 -NY SE -Ġsecret aries -ĠF IA -ĠCanaver al -F avorite -Ġp omp -Ġdetain ee -ers hip -aim on -i our -ĠA pex -Ġplant ations -am ia -ac ion -R ust -Ġtow ed -ĠTru ly -5 77 -Ġshel tered -r ider -W o -Ġl air -ĠInt elligent -impro ve -m atically -Ġet iquette -ad ra -all o -ĠJun o -any thing -ĠStru ggle -ĠPred ict -ĠGr imes -ĠAMER ICA -ct x -ĠSit uation -W OOD -Ġsol uble -me ier -Ġintoler able -ang ering -Ġun interrupted -Ġtool tip -Ġinterrog ated -Ġgun ned -ĠSne ak -æŃ ¦ -Ġt ether -Ġcr umble -L ens -Ġclust ered -ĠSy l -ĠHas an -Ġdystop ian -w ana -Ġjoy stick -ĠTh ib -amm u -Tom orrow -5 46 -Ġoverc ame -Ġminim ized -cept or -Run ner -ENG TH -ĠBrend a -ĠAchieve ments -Ġtor ches -Ġrapp ort -ĠInvestig ator -ĠHand ling -rel ation -g rey -8 15 -Ġk cal -ĠComm ands -d q -Ġcur ls -Ġbe arer -Ġcyn icism -it ri -ĠUse ful -B ee -D CS -Ġab ras -P ract -BIL ITIES -7 12 -Ġdebug ger -Ġdebt or -ĠL ia -ĠK ers -Ġexacerb ate -ĠSt acy -ĠB land -ĠSc enes -Ġbranch ing -âĸĪâĸĪâĸĪâĸĪ âĸĪâĸĪâĸĪâĸĪ -ape ake -Ġs alsa -Ġmish and -ĠKon ami -ĠN ib -Ġanecd ote -Ġagree able -Ï ī -ĠNath aniel -ĠHe isman -ĠB eware -Ġ18 86 -spect ive -69 1 -5 22 -Ġinhib its -Ġhas hing -Ġ18 89 -å° Ĩ -v ich -P ure -Ġsolid ly -Ġaspir in -im aru -Ġstreet car -ĠU CS -ĠJ udd -Ġflash backs -p ins -Ġ14 40 -ĠUN HCR -ĠSym ptoms -T IT -5 38 -F ra -% ); -Ġo oz -Ġcur few -Ġcal med -Ġparticip ates -Te X -Ġnons ensical -Ġfull back -ĠDe L -mon key -h ari -Ġmetabol ites -Ġloot ed -ĠAL WAYS -ĠB CC -L t -oc het -B one -Ġveto ed -Ġg cc -ĠCL ICK -Ġ18 88 -s af -Ġstiff ness -Ġlow ly -ĠGe h -vers on -ors et -Ġun foreseen -Ġan esthesia -ĠOpt ical -Ġrecon structed -ĠT up -sh ows -NEW S -ĠNewsp aper -ĠA SA -ter a -N umbers -Ġinexpl icable -× ij -Ġhard ness -unt arily -ĠA cer -grad ient -ARD IS -Ġwood land -Ġmetaph ors -ĠWem bley -ĠPa vel -phil is -Ġre writing -Ġpercept ual -Ġ10 70 -worm s -ĠDown s -Ġunsur prisingly -Ġtag ging -fl ame -Ġlit res -Ġboun ces -ĠB abe -sh ut -Ġoverd oses -ĠShe ila -ĠCh au -ĠBl ess -Capt ure -ĠSign ificant -ĠSc ion -Ġ38 9 -ĠMc H -ĠTitan ium -ĠMe al -amed a -ag ents -agg ressive -B illy -76 3 -ĠS aying -DER R -it one -Coll ins -B ound -Ġbol ted -ĠDM CA -95 3 -Ġun iqueness -Ġep igen -un ci -ant am -Ġreck oning -ch airs -OG R -ĠSen egal -Ġ18 62 -re levant -Ġ ¯ -Ġpharm acies -ĠG eral -v ier -Y an -OR PG -Ġrab id -b ending -ĠUN ITED -Ġ4 65 -As sembly -Ġwe ep -Ġbe hest -ĠMother s -ĠJ ace -h id -Ġwh irlwind -ĠUN IVERS -Ġut opian -Ġkidn ap -Ph ilipp -K in -89 3 -Ġlivest ream -ĠM ISS -Ġsub versive -ĠTechn iques -ĠJUST ICE -ĠB ASE -Ġ38 7 -Ġassail ants -ĠHard core -Ġsprink led -ĠP se -é ļ -print ed -ĠH au -OR GE -ĠT OUR -Ġl aced -Ġit ch -G iving -Ġport ed -78 1 -//////////////// //////////////// -bre eding -Ġlog ger -ĠH OL -inn ie -First ly -Ġembry onic -Ġdeleg ated -p ai -O IL -Ġcentr ally -ĠR x -ĠSc outing -D utch -Ġhe reditary -ĠCru iser -s at -5 29 -ĠMar riott -other mal -Ġprohib itions -E arn -ĠSt ab -ĠColleg es -ĠBel ief -st retched -ĠL H -ĠEntity Item -C IA -Ġun rem -Ġlaure ate -Ġdenomin ations -sum mary -h ler -S pect -ĠK laus -ĠBe ans -Ġins ur -ĠPA X -Ġfield er -ĠV et -ĠSp arrow -z ie -ĠS Q -ĠMond ays -ĠOff line -ĠLer ner -ĠExt ensions -Ire land -Ġpatron age -Ġcontrast ed -ĠMan ia -h irt -Mos cow -Ġcondem ns -ĠAn ge -Ġcomp osing -ĠPe pe -ĠP addock -Ġheter ogeneity -Ġide ologically -Ġf ishes -Ġcur sing -ĠR utherford -ĠFlo ating -ĠAm elia -Te a -Syn opsis -Ġstun ts -Ġbe ad -Ġstock ing -ĠM ILL -ob ook -mass ive -\ < -Ġh ump -ĠPref erences -Engine Debug -ge ist -ĠNiet o -ome ver -ish y -eval uate -col onial -Altern ative -ĠGo Pro -ĠV ortex -ĠNET WORK -ans ky -Sec ure -ĠTh rust -Sn ake -Ġparcel s -Ġsam urai -Ġactress es -N ap -M F -ifer ation -Be er -5 23 -ĠI ly -oint ment -P ing -Ġstri ped -ĠMell on -oss ession -Ġneut ron -end ium -Ġa ph -ĠFlav oring -Ġ38 3 -Ġrespons iveness -ĠJ indal -ĠHitch cock -Den ver -ĠDRAG ON -sm anship -ĠDu pl -Ġs ly -Ġweb cam -ĠTw ain -ĠDar ling -ili ate -cons umer -D IT -Ġnames ake -Ġun orthodox -Ġfun er -ĠPL oS -ĠCONTR OL -ozy g -ogl obin -F ACE -ER G -ĠD ia -ĠF iesta -ce le -0 34 -Ġencl ave -âĸ¬ âĸ¬ -on ement -al ist -M and -Ġhome grown -ĠF ancy -Ġconcept ions -ĠCont ains -ure en -Ġreiter ate -Ġme ager -Ġinstall ments -Sp awn -6 27 -Ġphot oc -ĠCab rera -ĠRos enthal -ĠLans ing -is ner -Ġinvest s -ĠUFO s -EX P -Hard ware -Ġtr agically -Ġconced es -ie ft -ch am -bor gh -ĠSch r -ĠMel anie -ĠH oy -Ġvisit ation -Ġid iosyncr -Ġfract ions -Ġfore skin -ob os -Ġpo aching -ĠVI EW -Ġstimul ates -ĠG ork -can on -M IC -ĠNem esis -ĠInd ra -ĠDM V -Ġ5 29 -Ġinspect ing -Ġgrand ma -ĠW hedon -ĠSh ant -ĠP urg -ik an -ĠT eg -ĠCL R -z ac -Vict oria -ĠVer ify -ion ics -Ġpart ying -ĠM ou -col our -Ġtestim onies -l ations -Ġpress uring -hi ro -ac ers -Ġf id -ang ler -ĠCS I -Ġhere after -Ġdiss idents -report ing -iph any -che v -Ġsol itude -Ġl obe -Ġind is -Ġcred ential -re cent -ad ult -ĠNir vana -ĠFranch ise -L ayer -H yp -ĠBerks hire -Ġwill s -t if -Ġtot em -ĠJud ah -rep air -Inst ant -5 48 -Ġemb assies -Ġbott leneck -Ġb ount -Ġtyp ew -ĠAl vin -j ing -im ilar -R ush -Ġbr im -ĠHEL P -A im -] ' -Ġpass ively -Ġbound ed -ĠR ated -Ġcriminal ity -Ġbiom ark -Ġdisp atcher -ĠTow ards -Ġ+ ++ -right eous -f rog -ĠP anc -C arter -0 32 -æ© Ł -Ġult raviolet -ĠLic ensed -ĠT ata -ĠBl essing -ĠG AM -Ġchem ically -ĠSe af -ĠRE LE -ĠMerc enary -capital ist -Ġform ulations -Ġann ihilation -ĠVer b -ĠAr gon -Ġun loaded -Ġmorp hed -Ġconqu ering -back er -I ELD -Ġtheft s -Ġfront runner -ĠRoy ale -ĠFund amental -el ight -C hip -necess ary -ay n -ĠSl ip -Ġ4 48 -cern ed -P ause -Ġshock ingly -ĠAB V -Ġcomp osure -7 33 -ĠMotors port -ah ime -Mur ray -M ach -Ġgr ids -Ġdeb ian -Ġfurther more -Ġdexter ity -ĠCollect ions -os lov -il age -b j -ĠMont eneg -Ġstrut Connector -Ġmassac res -Ġbrief s -fet ched -uv ian -ol ition -Fail ure -emon ic -Ġfl ared -Ġclaim ant -Ġc ures -Ġgive aways -ĠSubst ance -al ions -Ġcr inge -ĠK ul -Ġarist ocracy -ĠUl ster -ol ated -h ousing -ĠM IS -Ġgl ared -ĠWil helm -ne eds -lam bda -build ers -ĠV IS -Ġradi ator -ĠGhost busters -Ġ4 36 -act ual -Ġher ds -ç a -watch ing -Ġcounter ing -Ch arge -Ġchar red -Ġwar heads -Ġiod ine -ĠM acy -04 1 -Ġdepart ures -ĠS ins -Ġdy ed -ĠConcept s -g ado -7 13 -Ġquot ations -Ġg ist -ĠChrist y -Ġant igen -ĠHem p -ĠD rawn -ĠB arg -ez vous -Ġp aternity -Ġar du -ĠAnch orage -ĠR ik -Ġover loaded -ĠUs ername -ĠTam my -ĠN au -ĠCell ular -Ġw aning -Ġrod ent -ĠWor cester -il ts -ĠT ad -Ġdwell ings -Ġbull ish -4 31 -Ġretali ate -Ġmig raine -ĠChev ron -CH ECK -Ġdon key -c rim -SP A -ĠAn alog -Ġmarqu ee -ĠHa as -B ir -ĠGD DR -ĠDownload s -Ġwill power -ĠFor th -ĠRecord ed -Ġimp ossibility -ĠLog ged -ĠFr anks -ĠR att -in itions -Ġclean ers -Ġsore ly -Ġflick ering -ĠEx amination -c atching -allow een -Ms g -Ġdun no -F a -Ġdys ph -c razy -.' '. -Ġmain line -Ġc s -Ġp tr -ĠW ally -ig un -95 1 -ĠBig foot -f ights -Ġretrie ving -J r -Ġdupl ication -ĠExpl an -Ġrel ational -Ġqu aint -Ġbisc uits -Ġad o -Ġsh udder -Ġantid ote -blood ed -ks h -Ġsa uces -Ġrein vest -Ġdispens ary -ĠD iver -Ġ9 000 -stud ent -Ġin separ -esc ap -Ġtodd lers -ĠGP IO -ĠAss ignment -head ers -Ġlack luster -Ġab ack -95 6 -Ġtool bar -7 45 -Ġo ust -Ġcontempl ation -ĠPRES IDENT -Ġ4 58 -==== == -Ġguarantee ing -ĠHe ist -ĠCann es -Ļ ½ -Ġcollabor ator -ĠAm p -Ġg ou -ĠSH ALL -st ories -78 3 -Ġmobil ized -Ġbro od -ĠL U -ĠðŁ ij -Ġref in -ĠAnthrop ology -v ind -ill i -Ġwarrant ies -ĠB abel -Ġsw ath -Ġc aches -Ġantagon ists -art ifacts -Ġhot ly -ĠSt arts -ĠG ö -z ag -!! !!! -Ġsc ourge -Ġcons piring -ru its -re verse -ĠShe en -ĠJes uit -ĠGiov anni -ad ies -Ġbutt ocks -ear cher -ac an -Ġvolley ball -Ġshroud ed -Ġscore board -b ats -ĠI PM -Ġass es -Ġde regulation -ĠTe legram -ĠReb oot -Ġ7 000 -ĠCan ary -Ġk ernels -ĠFranç ois -ĠD uff -ĠP on -ĠLe ica -ĠGar min -Ġor phans -ĠClaud ia -Ġcal endars -ĠLe ilan -ent o -R ocket -Ġbr unch -ĠHaw king -ain ers -Ġsens ibilities -Ġk W -ĠK and -Ġre claimed -Ġinteresting ly -× © -rom y -J M -ĠEnhance ment -b ush -Sk ip -Ġrapp ers -Ġg azing -p edia -ath lon -Rev olution -Ġsn ipers -Ġre verted -Ġconglomer ate -T erry -79 4 -Ġhars her -Ġdes olate -ĠHit man -Comm ission -Ġ( / -âĢ¦ ." -Com par -Ġampl ification -om inated -Ġreg ress -ĠColl ider -Ġinform ants -Ġg azed diff --git a/api/core/model_runtime/model_providers/__base/tokenizers/gpt2/special_tokens_map.json b/api/core/model_runtime/model_providers/__base/tokenizers/gpt2/special_tokens_map.json deleted file mode 100644 index 773bd68cf09004..00000000000000 --- a/api/core/model_runtime/model_providers/__base/tokenizers/gpt2/special_tokens_map.json +++ /dev/null @@ -1,23 +0,0 @@ -{ - "bos_token": { - "content": "<|endoftext|>", - "lstrip": false, - "normalized": true, - "rstrip": false, - "single_word": false - }, - "eos_token": { - "content": "<|endoftext|>", - "lstrip": false, - "normalized": true, - "rstrip": false, - "single_word": false - }, - "unk_token": { - "content": "<|endoftext|>", - "lstrip": false, - "normalized": true, - "rstrip": false, - "single_word": false - } -} diff --git a/api/core/model_runtime/model_providers/__base/tokenizers/gpt2/tokenizer_config.json b/api/core/model_runtime/model_providers/__base/tokenizers/gpt2/tokenizer_config.json deleted file mode 100644 index 48314a4d098cbc..00000000000000 --- a/api/core/model_runtime/model_providers/__base/tokenizers/gpt2/tokenizer_config.json +++ /dev/null @@ -1,33 +0,0 @@ -{ - "add_bos_token": false, - "add_prefix_space": false, - "bos_token": { - "__type": "AddedToken", - "content": "<|endoftext|>", - "lstrip": false, - "normalized": true, - "rstrip": false, - "single_word": false - }, - "clean_up_tokenization_spaces": true, - "eos_token": { - "__type": "AddedToken", - "content": "<|endoftext|>", - "lstrip": false, - "normalized": true, - "rstrip": false, - "single_word": false - }, - "errors": "replace", - "model_max_length": 1024, - "pad_token": null, - "tokenizer_class": "GPT2Tokenizer", - "unk_token": { - "__type": "AddedToken", - "content": "<|endoftext|>", - "lstrip": false, - "normalized": true, - "rstrip": false, - "single_word": false - } -} diff --git a/api/core/model_runtime/model_providers/__base/tokenizers/gpt2/vocab.json b/api/core/model_runtime/model_providers/__base/tokenizers/gpt2/vocab.json deleted file mode 100644 index a15dd0028acd1d..00000000000000 --- a/api/core/model_runtime/model_providers/__base/tokenizers/gpt2/vocab.json +++ /dev/null @@ -1,50259 +0,0 @@ -{ - "!": 0, - "!!": 3228, - "!!!": 10185, - "!!!!": 13896, - "!!!!!": 50184, - "!!!!!!!!": 34635, - "!!\"": 37160, - "!\"": 2474, - "!\",": 40754, - "!\".": 48220, - "!'": 13679, - "!'\"": 49296, - "!)": 8133, - "!),": 26290, - "!).": 19588, - "!,": 28265, - "!--": 28112, - "!.": 43179, - "!/": 48443, - "!:": 48725, - "!?": 22857, - "!?\"": 42720, - "!]": 36463, - "\"": 1, - "\"!": 40484, - "\"\"": 15931, - "\"\"\"": 37811, - "\"'": 30543, - "\"(": 18109, - "\")": 4943, - "\"))": 48774, - "\"),": 12340, - "\").": 11074, - "\");": 15341, - "\",": 1600, - "\",\"": 2430, - "\"-": 26793, - "\".": 1911, - "\"...": 26214, - "\".[": 42924, - "\"/>": 26700, - "\":": 1298, - "\":\"": 2404, - "\":\"\",\"": 34713, - "\":\"\"},{\"": 47182, - "\":\"/": 15473, - "\":-": 48219, - "\":[": 20598, - "\":[\"": 26358, - "\":[{\"": 32509, - "\":{\"": 8351, - "\";": 8172, - "\">": 5320, - "\"><": 22039, - "\">": 23785, - "\"}": 20662, - "\"},": 25719, - "\"},\"": 13018, - "\"},{\"": 11919, - "\"}],\"": 42785, - "\"âĢ¦": 24426, - "\"âĢĶ": 15327, - "#": 2, - "##": 2235, - "###": 21017, - "####": 4242, - "########": 7804, - "################": 14468, - "################################": 29113, - "#$": 29953, - "#$#$": 34206, - "$": 3, - "$$": 13702, - "$$$$": 36737, - "$,": 47113, - "$.": 35307, - "${": 38892, - "%": 4, - "%\"": 39658, - "%%": 16626, - "%%%%": 36917, - "%)": 4407, - "%),": 15920, - "%).": 18823, - "%);": 49563, - "%,": 7441, - "%-": 33963, - "%.": 7225, - "%:": 48529, - "%;": 26525, - "%]": 39850, - "&": 5, - "&&": 25226, - "'": 6, - "'\"": 29653, - "''": 7061, - "''''": 39115, - "''.": 35384, - "'';": 44648, - "')": 11537, - "'),": 33809, - "').": 27691, - "');": 24036, - "',": 3256, - "',\"": 40264, - "','": 41707, - "'-": 29001, - "'.": 4458, - "'.\"": 30827, - "'/": 26488, - "':": 10354, - "';": 17020, - "'>": 44167, - "'?": 30960, - "']": 20520, - "'d": 1549, - "'ll": 1183, - "'m": 1101, - "'re": 821, - "'s": 338, - "'t": 470, - "'ve": 1053, - "(": 7, - "(\"": 7203, - "($": 16763, - "(&": 39434, - "('": 10786, - "((": 19510, - "()": 3419, - "())": 28955, - "());": 35430, - "(),": 22784, - "().": 22446, - "():": 33529, - "();": 9783, - "(){": 39893, - "(*": 46491, - "(-": 32590, - "([": 26933, - "(\\": 38016, - "(_": 28264, - "({": 15090, - ")": 8, - ")!": 31520, - ")\"": 16725, - ")\",": 42501, - ")'": 33047, - ")(": 5769, - "))": 4008, - ")))": 22305, - "))))": 35514, - ")),": 36911, - ")).": 29720, - "));": 18125, - ")*": 27493, - ")+": 47762, - "),": 828, - "),\"": 27267, - ")-": 13219, - ")--": 42944, - ").": 737, - ").\"": 21387, - ")...": 26513, - ").[": 42669, - ")/": 20679, - "):": 2599, - ");": 1776, - ")": 46904, - "-.": 34507, - "->": 3784, - "-[": 49146, - "-|": 22831, - ".": 13, - ".\"": 526, - ".\"\"": 32203, - ".\")": 19570, - ".\",": 33283, - ".\",\"": 41424, - ".\"[": 18161, - ".#": 32535, - ".$": 48082, - ".'": 2637, - ".'\"": 11496, - ".''": 13531, - ".''.": 50113, - ".(": 12195, - ".)": 2014, - ".),": 12179, - ".).": 15729, - ".):": 47308, - ".*": 15885, - ".,": 1539, - ".,\"": 44388, - ".-": 7874, - ".--": 9816, - "..": 492, - "...": 986, - "...\"": 9313, - "...)": 23029, - "....": 1106, - ".....": 12359, - "......": 16317, - ".......": 25780, - "........": 2109, - ".........": 34617, - ".............": 44274, - "................": 4181, - "..................": 49129, - "........................": 27754, - "................................": 8864, - "................................................................": 23193, - "...?": 44825, - "...]": 22345, - "../": 40720, - "./": 19571, - ".:": 11207, - ".;": 15089, - ".<": 29847, - ".>": 32756, - ".?": 40791, - ".[": 3693, - ".]": 8183, - "._": 13557, - ".}": 44587, - ".âĢĵ": 37585, - ".âĢĶ": 13402, - ".ãĢį": 43735, - ".�": 40670, - "/": 14, - "/\"": 30487, - "/#": 31113, - "/$": 32624, - "/(": 29006, - "/)": 34729, - "/*": 15211, - "/**": 35343, - "/+": 28404, - "/,": 47454, - "/-": 16327, - "/.": 11757, - "//": 1003, - "///": 20379, - "////": 9705, - "////////": 16150, - "////////////////": 27246, - "////////////////////////////////": 49704, - "/>": 15913, - "/?": 20924, - "/_": 47835, - "/âĢĭ": 27643, - "0": 15, - "00": 405, - "000": 830, - "0000": 2388, - "00000": 20483, - "000000": 10535, - "0000000": 24598, - "00000000": 8269, - "0000000000000000": 25645, - "00007": 44808, - "0001": 18005, - "0002": 34215, - "001": 8298, - "0010": 37187, - "002": 21601, - "00200000": 36490, - "003": 11245, - "004": 22914, - "005": 22544, - "006": 28041, - "007": 25816, - "008": 25257, - "009": 28694, - "01": 486, - "010": 20943, - "0100": 39103, - "011": 28555, - "012": 30206, - "013": 30273, - "014": 28645, - "015": 25150, - "016": 27037, - "017": 29326, - "018": 29159, - "019": 30484, - "02": 2999, - "020": 33618, - "0200": 44613, - "021": 46821, - "022": 44087, - "023": 45310, - "024": 40839, - "025": 36629, - "026": 45987, - "027": 44698, - "028": 46957, - "029": 48891, - "03": 3070, - "030": 39101, - "031": 43637, - "032": 49959, - "033": 44427, - "034": 49841, - "035": 44215, - "036": 48597, - "04": 3023, - "040": 36676, - "041": 50049, - "043": 48768, - "044": 43977, - "045": 40350, - "046": 45438, - "047": 48000, - "048": 47202, - "05": 2713, - "050": 28669, - "052": 37841, - "055": 47838, - "057": 43526, - "059": 46712, - "06": 3312, - "060": 41322, - "07": 2998, - "070": 43509, - "075": 46396, - "08": 2919, - "080": 33057, - "083": 48290, - "088": 46556, - "089": 49352, - "09": 2931, - "090": 42534, - "1": 16, - "10": 940, - "100": 3064, - "1000": 12825, - "10000": 49388, - "1001": 47705, - "1007": 44318, - "101": 8784, - "1016": 27956, - "102": 15377, - "1024": 35500, - "1027": 40403, - "103": 15197, - "104": 13464, - "105": 13348, - "106": 15801, - "107": 15982, - "108": 15711, - "1080": 24045, - "109": 14454, - "11": 1157, - "110": 11442, - "1100": 42060, - "111": 16243, - "1111": 26259, - "112": 14686, - "113": 16616, - "114": 16562, - "115": 15363, - "116": 18298, - "117": 17657, - "118": 16817, - "119": 16315, - "12": 1065, - "120": 10232, - "1200": 27550, - "121": 19244, - "122": 18376, - "123": 10163, - "124": 17464, - "125": 11623, - "126": 19420, - "127": 16799, - "128": 12762, - "129": 18741, - "13": 1485, - "130": 12952, - "131": 22042, - "132": 19924, - "133": 16945, - "134": 19880, - "135": 17059, - "136": 20809, - "137": 19708, - "138": 20107, - "139": 20219, - "14": 1415, - "140": 15187, - "141": 23756, - "142": 23726, - "143": 21139, - "144": 18444, - "145": 18781, - "146": 20964, - "147": 20198, - "148": 18294, - "149": 19442, - "15": 1314, - "150": 8628, - "1500": 33698, - "151": 24309, - "152": 17827, - "153": 21395, - "154": 21526, - "155": 18742, - "156": 21599, - "157": 18458, - "158": 21273, - "159": 19707, - "16": 1433, - "160": 14198, - "1600": 36150, - "161": 25948, - "162": 25061, - "163": 24136, - "164": 23237, - "165": 20986, - "166": 23055, - "167": 21940, - "168": 14656, - "169": 22172, - "17": 1558, - "170": 17279, - "171": 27192, - "172": 23628, - "173": 25399, - "174": 22985, - "175": 17430, - "176": 24096, - "177": 22413, - "178": 23188, - "179": 21738, - "18": 1507, - "180": 15259, - "1800": 39188, - "181": 27057, - "182": 24294, - "183": 24839, - "184": 22883, - "185": 21652, - "186": 25096, - "187": 23451, - "188": 20356, - "189": 23362, - "19": 1129, - "190": 19782, - "1900": 48104, - "191": 26492, - "192": 17477, - "1920": 40454, - "193": 24943, - "194": 22913, - "1945": 41931, - "195": 22186, - "1950": 42751, - "1959": 45403, - "196": 25272, - "1960": 38503, - "1963": 45192, - "1964": 46477, - "1965": 45271, - "1966": 44227, - "1967": 42830, - "1968": 42246, - "1969": 38391, - "197": 24991, - "1970": 30986, - "1971": 41208, - "1972": 41023, - "1973": 40220, - "1974": 40828, - "1975": 38449, - "1976": 38108, - "1977": 37781, - "1978": 37950, - "1979": 33581, - "198": 22337, - "1980": 23664, - "1981": 35411, - "1982": 30763, - "1983": 29279, - "1984": 28296, - "1985": 29110, - "1986": 28054, - "1987": 27301, - "1988": 26709, - "1989": 25475, - "199": 19104, - "1990": 19891, - "1991": 24529, - "1992": 23847, - "1993": 24465, - "1994": 22666, - "1995": 21908, - "1996": 22288, - "1997": 21498, - "1998": 21113, - "1999": 18946, - "2": 17, - "20": 1238, - "200": 2167, - "2000": 11024, - "200000": 33470, - "2001": 14585, - "2002": 16942, - "2003": 16088, - "2004": 15724, - "2005": 14315, - "2006": 13330, - "2007": 12726, - "2008": 11528, - "2009": 10531, - "201": 1264, - "2010": 10333, - "2011": 9804, - "2012": 6999, - "2013": 6390, - "2014": 4967, - "2015": 4626, - "2016": 5304, - "2017": 5539, - "2018": 7908, - "2019": 23344, - "202": 19004, - "2020": 42334, - "203": 22416, - "204": 18638, - "20439": 47936, - "205": 21261, - "206": 22136, - "207": 22745, - "208": 21315, - "209": 22567, - "21": 2481, - "210": 21536, - "211": 21895, - "212": 21777, - "213": 26427, - "214": 22291, - "215": 23349, - "216": 20666, - "217": 24591, - "218": 28727, - "219": 28896, - "22": 1828, - "220": 17572, - "2200": 34294, - "221": 26115, - "222": 23148, - "223": 22047, - "224": 24137, - "225": 18182, - "226": 24909, - "227": 24403, - "228": 23815, - "229": 23539, - "23": 1954, - "230": 19214, - "231": 25667, - "232": 24339, - "233": 25429, - "234": 24409, - "235": 22370, - "236": 24940, - "237": 24693, - "238": 23721, - "239": 23516, - "24": 1731, - "240": 16102, - "241": 28872, - "242": 27877, - "243": 26660, - "244": 25707, - "245": 22995, - "246": 26912, - "247": 23753, - "248": 23045, - "249": 21626, - "25": 1495, - "250": 9031, - "2500": 44688, - "251": 28072, - "252": 22800, - "253": 28592, - "254": 24970, - "255": 13381, - "256": 11645, - "257": 28676, - "258": 25600, - "259": 25191, - "26": 2075, - "260": 21719, - "261": 30057, - "262": 29119, - "263": 29558, - "264": 18897, - "265": 22980, - "266": 25540, - "267": 25674, - "268": 25022, - "269": 26276, - "27": 1983, - "270": 20233, - "271": 28977, - "272": 29807, - "273": 27367, - "274": 28857, - "275": 23195, - "276": 27988, - "277": 27019, - "278": 25870, - "279": 26050, - "28": 2078, - "280": 21033, - "281": 30368, - "282": 32568, - "283": 30290, - "284": 30336, - "285": 26279, - "286": 27033, - "287": 27800, - "288": 25270, - "289": 27693, - "29": 1959, - "290": 24369, - "291": 33551, - "292": 32759, - "293": 31675, - "294": 27696, - "295": 25710, - "296": 27137, - "297": 26561, - "298": 27728, - "299": 22579, - "3": 18, - "30": 1270, - "300": 6200, - "3000": 23924, - "301": 18938, - "302": 22709, - "303": 22572, - "304": 21288, - "305": 22515, - "306": 20548, - "307": 22996, - "308": 21495, - "309": 26895, - "31": 3132, - "310": 26717, - "311": 36244, - "312": 27970, - "313": 25838, - "314": 33638, - "315": 27936, - "316": 33400, - "317": 34125, - "318": 36042, - "319": 35175, - "32": 2624, - "320": 19504, - "321": 36453, - "322": 37283, - "323": 32637, - "324": 33916, - "325": 26582, - "326": 39195, - "327": 34159, - "328": 34256, - "329": 37967, - "33": 2091, - "330": 26073, - "331": 31697, - "332": 32148, - "333": 20370, - "3333": 24840, - "334": 31380, - "335": 27326, - "336": 29211, - "337": 31496, - "338": 28460, - "339": 29626, - "34": 2682, - "340": 23601, - "341": 33660, - "342": 31575, - "343": 32118, - "344": 33535, - "345": 27712, - "346": 30557, - "347": 30995, - "348": 28978, - "349": 27371, - "35": 2327, - "350": 14877, - "351": 35273, - "352": 33394, - "353": 33319, - "354": 32182, - "355": 28567, - "356": 32066, - "357": 27277, - "358": 31128, - "359": 30743, - "36": 2623, - "360": 15277, - "361": 35195, - "362": 35667, - "363": 35447, - "364": 26780, - "365": 24760, - "366": 32459, - "367": 27824, - "368": 27412, - "369": 30803, - "37": 2718, - "370": 20167, - "371": 38056, - "372": 36720, - "373": 34770, - "374": 31020, - "375": 22318, - "376": 32128, - "377": 26514, - "378": 30695, - "379": 29088, - "38": 2548, - "380": 23734, - "381": 36626, - "382": 36243, - "383": 34741, - "384": 22842, - "385": 27203, - "386": 21734, - "387": 32220, - "388": 30460, - "389": 29769, - "39": 2670, - "390": 25964, - "391": 37710, - "392": 32321, - "393": 26007, - "394": 34626, - "395": 31010, - "396": 34107, - "397": 33372, - "398": 31952, - "399": 28771, - "4": 19, - "40": 1821, - "400": 7029, - "4000": 27559, - "401": 21844, - "402": 32531, - "403": 31552, - "404": 26429, - "405": 26598, - "406": 29703, - "407": 30120, - "408": 26200, - "409": 29416, - "41": 3901, - "410": 33289, - "411": 42224, - "412": 39226, - "413": 44103, - "414": 37309, - "415": 35038, - "416": 35218, - "417": 38547, - "418": 39667, - "419": 45068, - "42": 3682, - "420": 27211, - "421": 46636, - "422": 44361, - "423": 43356, - "424": 40090, - "425": 32114, - "426": 42780, - "427": 42363, - "428": 40173, - "429": 11785, - "43": 3559, - "430": 31794, - "431": 50080, - "432": 45331, - "433": 42117, - "434": 47101, - "435": 40064, - "436": 43690, - "437": 43284, - "438": 43704, - "439": 47106, - "44": 2598, - "440": 25644, - "441": 39710, - "442": 39506, - "443": 34938, - "444": 30272, - "445": 43489, - "446": 27260, - "447": 34825, - "448": 31115, - "449": 31911, - "45": 2231, - "450": 17885, - "451": 36330, - "452": 37730, - "453": 36625, - "454": 34229, - "455": 30505, - "456": 29228, - "457": 33032, - "458": 29334, - "459": 33459, - "46": 3510, - "460": 34716, - "461": 40652, - "462": 39997, - "463": 38380, - "464": 44578, - "465": 42018, - "466": 42199, - "467": 24669, - "468": 38472, - "469": 42947, - "47": 2857, - "470": 27790, - "471": 38339, - "472": 37856, - "473": 37804, - "474": 38652, - "475": 32576, - "476": 35435, - "477": 32883, - "478": 29059, - "479": 31714, - "48": 2780, - "480": 22148, - "481": 40271, - "482": 40149, - "483": 38783, - "484": 34137, - "485": 32642, - "486": 34251, - "487": 35133, - "488": 33646, - "489": 35890, - "49": 2920, - "490": 31503, - "491": 41289, - "492": 40256, - "493": 43134, - "494": 39449, - "495": 33781, - "496": 37747, - "497": 38073, - "498": 36260, - "499": 28324, - "5": 20, - "50": 1120, - "500": 4059, - "5000": 27641, - "501": 33548, - "502": 35126, - "503": 31938, - "504": 33580, - "505": 31654, - "506": 35638, - "507": 35378, - "508": 33042, - "509": 29022, - "51": 4349, - "510": 33690, - "511": 41647, - "512": 25836, - "513": 48645, - "514": 47396, - "515": 45969, - "516": 47493, - "517": 48170, - "518": 44085, - "519": 47785, - "52": 4309, - "520": 31211, - "522": 49542, - "523": 49803, - "524": 48057, - "525": 39088, - "526": 48531, - "528": 49351, - "529": 49721, - "53": 4310, - "530": 38612, - "533": 44994, - "535": 44465, - "536": 44468, - "537": 46096, - "538": 49561, - "54": 4051, - "540": 35005, - "544": 47576, - "545": 45326, - "546": 49489, - "548": 49934, - "549": 44966, - "55": 2816, - "550": 22730, - "551": 43697, - "552": 40427, - "553": 48096, - "554": 44218, - "555": 31046, - "556": 37864, - "557": 41948, - "558": 40486, - "559": 38605, - "56": 3980, - "560": 34135, - "561": 47915, - "562": 43918, - "563": 46572, - "565": 47372, - "568": 49211, - "57": 3553, - "570": 39254, - "571": 42875, - "572": 48724, - "573": 48638, - "574": 46900, - "575": 36189, - "576": 37452, - "577": 49447, - "578": 38907, - "579": 41734, - "58": 3365, - "580": 39322, - "581": 48630, - "582": 46044, - "583": 46239, - "584": 46352, - "585": 38905, - "586": 29796, - "587": 44617, - "588": 39118, - "589": 44169, - "59": 3270, - "590": 36993, - "591": 48952, - "592": 45839, - "593": 49051, - "594": 46438, - "595": 35124, - "596": 45734, - "597": 43239, - "598": 41292, - "599": 43452, - "6": 21, - "60": 1899, - "600": 8054, - "6000": 43434, - "601": 41706, - "602": 31418, - "603": 35642, - "604": 31916, - "605": 32417, - "606": 33206, - "607": 31980, - "608": 28688, - "609": 31751, - "61": 5333, - "610": 39132, - "612": 43610, - "613": 47512, - "614": 46841, - "615": 47007, - "616": 44214, - "617": 47941, - "618": 47448, - "62": 5237, - "620": 38850, - "623": 46872, - "625": 26704, - "626": 45191, - "627": 49856, - "628": 48200, - "629": 48602, - "63": 5066, - "630": 30005, - "635": 48250, - "64": 2414, - "640": 31102, - "641": 42759, - "642": 41290, - "643": 41813, - "644": 29173, - "645": 49259, - "646": 27720, - "647": 33981, - "648": 34287, - "649": 33300, - "65": 2996, - "650": 17544, - "651": 40639, - "652": 43193, - "653": 46435, - "654": 39111, - "655": 35916, - "656": 37466, - "657": 37680, - "658": 38431, - "659": 36445, - "66": 2791, - "660": 39885, - "661": 47159, - "662": 39380, - "663": 45791, - "665": 36879, - "666": 27310, - "6666": 19060, - "66666666": 41977, - "667": 28933, - "668": 35809, - "669": 36657, - "67": 3134, - "670": 43798, - "671": 46250, - "672": 43864, - "673": 45758, - "674": 45385, - "675": 42444, - "676": 42548, - "677": 40179, - "678": 30924, - "679": 37601, - "68": 3104, - "680": 37397, - "681": 48564, - "682": 43950, - "683": 47521, - "684": 41580, - "685": 35978, - "686": 33808, - "687": 39925, - "688": 34427, - "689": 40523, - "69": 3388, - "690": 35844, - "691": 49541, - "692": 46589, - "693": 48528, - "694": 45214, - "695": 37381, - "696": 38205, - "697": 40035, - "698": 39357, - "699": 47325, - "7": 22, - "70": 2154, - "700": 9879, - "701": 41583, - "702": 36680, - "703": 36809, - "704": 32869, - "705": 34801, - "706": 35402, - "707": 24038, - "70710": 42877, - "708": 32583, - "709": 31495, - "71": 4869, - "710": 43147, - "712": 49517, - "713": 50055, - "714": 45722, - "718": 45720, - "72": 4761, - "720": 23906, - "725": 45151, - "727": 47760, - "728": 48524, - "729": 48555, - "73": 4790, - "730": 43916, - "733": 49995, - "736": 49150, - "74": 4524, - "740": 45598, - "745": 50150, - "747": 48882, - "748": 48246, - "75": 2425, - "750": 15426, - "751": 48365, - "752": 43665, - "753": 44550, - "754": 41874, - "755": 38172, - "756": 38219, - "757": 39251, - "758": 38569, - "759": 38314, - "76": 4304, - "760": 40761, - "7601": 42752, - "762": 48194, - "763": 49641, - "765": 29143, - "76561": 48527, - "767": 32059, - "768": 30610, - "77": 3324, - "770": 41820, - "771": 46761, - "772": 43571, - "773": 46871, - "774": 47582, - "775": 34483, - "776": 39509, - "777": 29331, - "778": 39761, - "779": 40393, - "78": 3695, - "780": 40873, - "781": 49703, - "782": 46519, - "783": 50165, - "784": 37688, - "785": 41172, - "786": 46302, - "787": 41019, - "789": 40401, - "79": 3720, - "790": 37750, - "792": 48156, - "793": 44750, - "794": 50242, - "795": 41544, - "796": 41060, - "797": 44673, - "798": 43240, - "799": 45455, - "8": 23, - "80": 1795, - "800": 7410, - "8000": 33942, - "801": 41531, - "802": 30863, - "803": 43564, - "804": 36088, - "805": 28256, - "806": 37988, - "807": 36928, - "808": 28362, - "809": 34583, - "81": 6659, - "810": 40215, - "815": 49503, - "82": 6469, - "820": 41739, - "825": 47338, - "83": 5999, - "830": 48341, - "833": 48634, - "84": 5705, - "840": 40675, - "85": 5332, - "850": 25764, - "855": 45432, - "86": 4521, - "860": 45039, - "864": 39570, - "866": 42240, - "87": 5774, - "870": 46951, - "875": 31360, - "877": 42802, - "88": 3459, - "880": 41655, - "882": 42980, - "883": 49287, - "884": 40353, - "885": 44230, - "886": 44980, - "887": 46660, - "888": 28011, - "889": 39121, - "89": 4531, - "893": 49682, - "896": 48712, - "899": 44093, - "9": 24, - "90": 3829, - "900": 12865, - "901": 46815, - "905": 44928, - "909": 44675, - "91": 6420, - "910": 43234, - "911": 35549, - "915": 40248, - "916": 48894, - "92": 5892, - "920": 37128, - "925": 46351, - "93": 6052, - "930": 45418, - "94": 5824, - "940": 46899, - "949": 48581, - "95": 3865, - "950": 31027, - "951": 50119, - "952": 49234, - "953": 49649, - "954": 48372, - "956": 50148, - "96": 4846, - "960": 39277, - "968": 38956, - "969": 38819, - "97": 5607, - "970": 43587, - "975": 42716, - "978": 32196, - "98": 4089, - "980": 40022, - "985": 42250, - "986": 49087, - "987": 44183, - "989": 42520, - "99": 2079, - "990": 34155, - "992": 41561, - "993": 44821, - "994": 42691, - "995": 33438, - "996": 38565, - "997": 39647, - "998": 34808, - "999": 17032, - "9999": 24214, - ":": 25, - ":\"": 11097, - ":#": 43922, - ":'": 32105, - ":(": 37498, - ":,": 45299, - ":-": 21912, - ":/": 14079, - "://": 1378, - "::": 3712, - "::::": 24022, - "::::::::": 43661, - ":[": 33250, - ":\\": 7479, - ":]": 47715, - ":{": 29164, - ";": 26, - ";\"": 26033, - ";;": 7665, - ";;;;": 14223, - ";;;;;;;;": 25887, - ";;;;;;;;;;;;": 46939, - ";}": 46956, - "<": 27, - "": 50256, - "=": 28, - "=\"": 2625, - "=\"\"": 33151, - "=\"#": 25698, - "=\"/": 35922, - "=#": 46249, - "=$": 43641, - "='": 11639, - "=(": 16193, - "=-": 10779, - "=-=-": 16822, - "=-=-=-=-": 27584, - "=-=-=-=-=-=-=-=-": 46402, - "=/": 33223, - "==": 855, - "===": 18604, - "====": 1421, - "======": 50155, - "========": 2559, - "============": 25609, - "================": 4770, - "================================": 10052, - "================================================================": 23926, - "=>": 14804, - "=[": 41888, - "=\\\"": 17553, - "=]": 48874, - "={": 34758, - "=~": 31820, - "=~=~": 33813, - ">": 29, - ">\"": 24618, - ">(": 33994, - ">)": 43734, - ">,": 22330, - ">.": 28401, - ">:": 31175, - "><": 6927, - ">>": 4211, - ">>>": 33409, - ">>>>": 16471, - ">>>>>>>>": 33717, - ">>\\": 34516, - ">[": 36937, - ">]": 37981, - "?": 30, - "?!": 12248, - "?!\"": 30823, - "?\"": 1701, - "?\",": 35379, - "?\".": 43634, - "?'": 8348, - "?'\"": 26989, - "?)": 10091, - "?),": 33924, - "?).": 29865, - "?,": 21747, - "?:": 27514, - "??": 3548, - "???": 28358, - "????": 9805, - "?????": 19622, - "?????-": 25658, - "?????-?????-": 31666, - "????????": 35709, - "?]": 26398, - "?ãĢį": 42943, - "@": 31, - "@#": 41573, - "@#&": 48193, - "@@": 12404, - "@@@@": 22675, - "@@@@@@@@": 37991, - "A": 32, - "AA": 3838, - "AAA": 29697, - "AAAA": 17922, - "AAAAAAAA": 43488, - "AAF": 38540, - "AB": 6242, - "ABC": 24694, - "ABLE": 17534, - "AC": 2246, - "ACA": 26576, - "ACC": 26861, - "ACE": 11598, - "ACH": 16219, - "ACK": 8120, - "ACP": 33056, - "ACT": 10659, - "ACTED": 38542, - "ACTION": 44710, - "ACY": 43300, - "AD": 2885, - "ADA": 26853, - "ADD": 29266, - "ADE": 19266, - "ADRA": 40517, - "ADS": 47149, - "ADVERTISEMENT": 19053, - "AE": 14242, - "AF": 8579, - "AFP": 17449, - "AFTA": 32106, - "AG": 4760, - "AGE": 11879, - "AGES": 25552, - "AH": 18429, - "AI": 20185, - "AIDS": 39338, - "AIN": 29833, - "AIR": 42149, - "AK": 10206, - "AKING": 43602, - "AL": 1847, - "ALD": 44071, - "ALE": 21358, - "ALK": 28082, - "ALL": 7036, - "ALLY": 19807, - "ALS": 23333, - "ALSE": 23719, - "ALT": 31429, - "ALTH": 40818, - "AM": 2390, - "AMA": 25087, - "AMD": 28075, - "AME": 10067, - "AMES": 29559, - "AMI": 43870, - "AMP": 23518, - "AMS": 40834, - "AMY": 29428, - "AN": 1565, - "ANA": 31574, - "ANC": 20940, - "ANCE": 19240, - "AND": 6981, - "ANE": 30525, - "ANG": 15567, - "ANGE": 27746, - "ANI": 43664, - "ANK": 15154, - "ANN": 22846, - "ANS": 15037, - "ANT": 8643, - "ANY": 31827, - "AP": 2969, - "APD": 35349, - "APE": 45721, - "APH": 31300, - "API": 17614, - "APP": 24805, - "APS": 44580, - "APTER": 29485, - "AR": 1503, - "ARA": 24401, - "ARB": 37304, - "ARC": 25793, - "ARCH": 31315, - "ARD": 9795, - "ARDIS": 49608, - "ARDS": 48294, - "ARE": 12203, - "ARGET": 46095, - "ARI": 33604, - "ARK": 14175, - "ARM": 33456, - "ARP": 36035, - "ARR": 26465, - "ARS": 27415, - "ART": 7227, - "ARY": 13153, - "AS": 1921, - "ASC": 42643, - "ASE": 11159, - "ASED": 42827, - "ASH": 11211, - "ASHINGTON": 19436, - "ASON": 36033, - "ASS": 10705, - "AST": 11262, - "ASY": 26483, - "AT": 1404, - "ATA": 13563, - "ATCH": 11417, - "ATE": 6158, - "ATED": 11617, - "ATER": 23261, - "ATES": 29462, - "ATH": 12599, - "ATHER": 45226, - "ATING": 33881, - "ATION": 6234, - "ATIONAL": 29912, - "ATIONS": 18421, - "ATIVE": 37045, - "ATOR": 25633, - "ATS": 33586, - "ATT": 17139, - "ATTLE": 35455, - "ATURE": 40086, - "ATURES": 47471, - "AU": 26830, - "AUD": 48877, - "AUT": 39371, - "AV": 10116, - "AW": 12298, - "AX": 25922, - "AY": 4792, - "AZ": 22778, - "Aaron": 34451, - "Ab": 4826, - "Ability": 22453, - "About": 8585, - "Above": 32397, - "Abs": 24849, - "Absolutely": 40501, - "Abstract": 23839, - "Abyss": 49073, - "Ac": 12832, - "Acc": 17320, - "Accept": 38855, - "Access": 15457, - "Accessory": 41629, - "According": 4821, - "Account": 30116, - "Acknowled": 39482, - "Across": 40553, - "Act": 6398, - "Action": 12502, - "ActionCode": 31573, - "Activ": 25526, - "Active": 13739, - "Activity": 16516, - "Actor": 40277, - "Actually": 26417, - "Ad": 2782, - "Adam": 23159, - "Adams": 47462, - "Adapt": 48003, - "Adapter": 47307, - "Add": 4550, - "Added": 13003, - "Adding": 32901, - "Additional": 17699, - "Additionally": 23216, - "Address": 20231, - "Adds": 46245, - "Adjust": 39668, - "Admin": 46787, - "Administ": 41862, - "Adult": 42995, - "Adv": 22856, - "Advanced": 28809, - "Adventure": 48289, - "Advertisement": 4723, - "Advertisements": 14592, - "Af": 17584, - "Afee": 44314, - "Aff": 35191, - "African": 43032, - "After": 3260, - "Ag": 10262, - "Again": 15316, - "Against": 39276, - "Age": 23396, - "Agent": 36772, - "Agg": 46384, - "Ah": 10910, - "Aid": 44245, - "Aim": 49945, - "Air": 16170, - "Ak": 33901, - "Al": 2348, - "Alabama": 49177, - "Alan": 36235, - "Albert": 42590, - "Ale": 37474, - "Alert": 36420, - "Alex": 15309, - "Alexander": 38708, - "Ali": 37893, - "Alias": 40489, - "Alice": 44484, - "Alien": 44501, - "All": 3237, - "Allah": 48620, - "Allen": 39989, - "Allow": 35265, - "Allows": 34934, - "Almost": 23379, - "Along": 24035, - "Alpha": 38077, - "Already": 37447, - "Alright": 31442, - "Also": 7583, - "Alt": 29161, - "Altern": 23081, - "Alternative": 49788, - "Alternatively": 44163, - "Although": 7003, - "Always": 30374, - "Am": 5840, - "Amazing": 42770, - "Amazon": 24888, - "Amb": 35649, - "Americ": 5477, - "America": 18165, - "American": 7437, - "Americans": 17636, - "Amid": 43541, - "Among": 14311, - "Amount": 31264, - "Amy": 40797, - "An": 2025, - "Analy": 37702, - "Analysis": 32750, - "Ancient": 44974, - "And": 1870, - "Anderson": 42991, - "Andre": 31258, - "Andrew": 20508, - "Android": 25934, - "Andy": 35314, - "Ang": 13450, - "Angel": 33246, - "Angelo": 45585, - "Anim": 35320, - "Animal": 40002, - "Animation": 39520, - "Ann": 18858, - "Anna": 31160, - "Anne": 43227, - "Anonymous": 20660, - "Another": 6610, - "Answer": 33706, - "Ant": 13217, - "Anth": 30327, - "Anthony": 32697, - "Anti": 28795, - "Any": 7149, - "Anyone": 21129, - "Anything": 40028, - "Anyway": 23795, - "Ap": 25189, - "Apart": 39182, - "App": 4677, - "AppData": 22322, - "Apparently": 30402, - "Appearance": 48231, - "Appearances": 47569, - "Apple": 16108, - "Applic": 33583, - "Application": 23416, - "Applications": 41995, - "Apply": 44836, - "Apps": 48433, - "Apr": 13680, - "April": 16784, - "Ar": 3163, - "Arab": 31602, - "Arc": 24021, - "Arcade": 43763, - "Arch": 19895, - "Are": 8491, - "Area": 30547, - "Aren": 43199, - "Arg": 28100, - "Args": 42035, - "Ari": 26529, - "Arizona": 40732, - "Ark": 42007, - "Arm": 26560, - "Armor": 31512, - "Army": 45272, - "Around": 24472, - "Array": 19182, - "Arsenal": 46230, - "Art": 8001, - "Arthur": 29874, - "Article": 14906, - "Artist": 43020, - "As": 1722, - "Ash": 26754, - "Asia": 38555, - "Asian": 43224, - "Aside": 32602, - "Ask": 25214, - "Asked": 18932, - "Ass": 8021, - "Assad": 23622, - "Assembly": 49670, - "Asset": 45869, - "Assistant": 48902, - "Associated": 29014, - "Assuming": 48142, - "Ast": 33751, - "Async": 42367, - "At": 2953, - "Atl": 25255, - "Atlanta": 43482, - "Atlantic": 41120, - "Att": 8086, - "Attach": 33296, - "Attack": 27732, - "Attempt": 37177, - "Attempts": 48452, - "Attorney": 46319, - "Attribute": 33682, - "Attributes": 29021, - "Aud": 16353, - "Audio": 21206, - "Aug": 12512, - "August": 17908, - "Aust": 15160, - "Austin": 40245, - "Austral": 19763, - "Australia": 27429, - "Australian": 38036, - "Aut": 16541, - "Auth": 30515, - "Authent": 47649, - "Author": 13838, - "Authorities": 28705, - "Auto": 27722, - "Autom": 38062, - "Av": 7355, - "Availability": 29841, - "Available": 10493, - "Average": 26287, - "Avg": 48997, - "Avoid": 38618, - "Aw": 23155, - "Awesome": 49061, - "Ax": 31554, - "Ay": 42012, - "Az": 26903, - "B": 33, - "BA": 4339, - "BACK": 31098, - "BALL": 45463, - "BAT": 47379, - "BB": 15199, - "BBC": 33833, - "BC": 2749, - "BD": 14529, - "BE": 12473, - "BER": 13246, - "BF": 29499, - "BG": 40469, - "BI": 3483, - "BIL": 19676, - "BILITIES": 49516, - "BILITY": 25382, - "BILL": 39888, - "BIP": 47772, - "BIT": 26094, - "BL": 9148, - "BLE": 19146, - "BLIC": 32936, - "BM": 12261, - "BN": 15766, - "BO": 8202, - "BOOK": 39453, - "BOX": 39758, - "BP": 20866, - "BR": 11473, - "BRE": 40438, - "BS": 4462, - "BSD": 21800, - "BT": 19313, - "BTC": 35964, - "BU": 19499, - "BUG": 12953, - "BUR": 38926, - "BUS": 45346, - "BUT": 47526, - "BW": 48802, - "BY": 17513, - "Ba": 34458, - "Baby": 36534, - "Back": 7282, - "Background": 21756, - "Bad": 22069, - "Bah": 47514, - "Bal": 24597, - "Balance": 45866, - "Ball": 23410, - "Balt": 41312, - "Baltimore": 46139, - "Ban": 30457, - "Band": 31407, - "Bang": 43984, - "Bank": 28650, - "Bar": 10374, - "Barn": 47359, - "Bas": 15522, - "Base": 14881, - "Based": 15001, - "Basic": 26416, - "Basically": 31524, - "Bat": 24541, - "Batman": 37039, - "Battery": 47006, - "Battle": 24064, - "Bay": 15262, - "Be": 3856, - "Bear": 36352, - "Beast": 41490, - "Beat": 34979, - "Beaut": 38413, - "Bec": 39649, - "Because": 8128, - "Beck": 43454, - "Bed": 45896, - "Bee": 49512, - "Beer": 49802, - "Before": 8421, - "Beg": 24586, - "Begin": 44140, - "Beginning": 45198, - "Beh": 25267, - "Behind": 34163, - "Being": 18357, - "Bel": 12193, - "Bell": 36488, - "Below": 21106, - "Ben": 11696, - "Bench": 44199, - "Benef": 42166, - "Benz": 42484, - "Ber": 24814, - "Bern": 23927, - "Bernie": 33433, - "Berry": 25215, - "Besides": 23937, - "Best": 13014, - "Bet": 13056, - "Beta": 43303, - "Better": 28971, - "Between": 25262, - "Bey": 21993, - "Beyond": 24102, - "Bi": 23286, - "Big": 12804, - "Bill": 17798, - "Billy": 49640, - "Bind": 36180, - "Bio": 42787, - "Bir": 50091, - "Bird": 42562, - "Birth": 38480, - "Bit": 13128, - "Bitcoin": 22614, - "Bl": 3629, - "Black": 9915, - "Blade": 47520, - "Blake": 37849, - "Ble": 43413, - "Block": 12235, - "Blocks": 45356, - "Blog": 42383, - "Blood": 21659, - "Bloom": 38941, - "Bloomberg": 47696, - "Blu": 38676, - "Blue": 14573, - "Bo": 16635, - "Board": 29828, - "Bob": 18861, - "Body": 25842, - "Bomb": 48478, - "Bon": 20682, - "Bone": 49580, - "Bonus": 29435, - "Boo": 46120, - "Book": 10482, - "Books": 30650, - "Boost": 45686, - "Boot": 36476, - "Border": 34189, - "Born": 28524, - "Boss": 37310, - "Boston": 31710, - "Bot": 20630, - "Both": 10265, - "Bott": 28653, - "Bottom": 34104, - "Bound": 49646, - "Bow": 39961, - "Box": 14253, - "Boy": 26554, - "Br": 9414, - "Bra": 42333, - "Brad": 30805, - "Brain": 44687, - "Brand": 38416, - "Brandon": 45467, - "Brave": 39787, - "Brazil": 39190, - "Bre": 12679, - "Break": 31737, - "Breaking": 29449, - "Brend": 48015, - "Brew": 44029, - "Brexit": 40730, - "Brian": 24761, - "Bride": 47148, - "Bridge": 37385, - "Brien": 20118, - "Bright": 41267, - "Bring": 31416, - "Brit": 17959, - "Britain": 37114, - "British": 25631, - "Bro": 15783, - "Broad": 30507, - "Bron": 18760, - "Brook": 45534, - "Brother": 39461, - "Brow": 32635, - "Brown": 20644, - "Browser": 46532, - "Bruce": 38509, - "Bs": 37000, - "Bu": 38374, - "Buff": 36474, - "Buffer": 28632, - "Bug": 25624, - "Build": 15580, - "Builder": 32875, - "Building": 25954, - "Built": 39582, - "Bul": 33481, - "Bull": 39549, - "Bur": 22991, - "Burn": 29053, - "Bus": 16286, - "Bush": 36113, - "Business": 24749, - "But": 1537, - "Button": 21864, - "Buy": 14518, - "Buyable": 39693, - "BuyableInstoreAndOnline": 40242, - "Buzz": 48230, - "By": 3886, - "ById": 48364, - "Byte": 40778, - "Bytes": 45992, - "C": 34, - "CA": 8141, - "CAN": 44565, - "CAP": 33177, - "CAR": 20034, - "CAST": 44647, - "CB": 23199, - "CBC": 29208, - "CBS": 22923, - "CC": 4093, - "CCC": 46361, - "CD": 8610, - "CDC": 47667, - "CE": 5222, - "CENT": 43960, - "CEO": 46691, - "CEPT": 42006, - "CF": 22495, - "CG": 39816, - "CH": 3398, - "CHA": 49285, - "CHAPTER": 41481, - "CHAR": 38019, - "CHAT": 31542, - "CHECK": 50084, - "CHO": 44899, - "CHQ": 47831, - "CHR": 37846, - "CI": 25690, - "CIA": 49732, - "CL": 5097, - "CLA": 16827, - "CLAIM": 48778, - "CLASS": 31631, - "CLASSIFIED": 45449, - "CLE": 29931, - "CLOSE": 32737, - "CLUD": 39149, - "CLUS": 28332, - "CM": 24187, - "CN": 44175, - "CNN": 18474, - "CO": 8220, - "COL": 25154, - "COLOR": 46786, - "COM": 9858, - "COMPLE": 41335, - "CON": 10943, - "CONCLUS": 47542, - "CONT": 37815, - "COR": 44879, - "CP": 8697, - "CPU": 36037, - "CR": 9419, - "CRE": 43387, - "CRIP": 36584, - "CRIPTION": 40165, - "CS": 7902, - "CSS": 49155, - "CT": 4177, - "CTV": 30428, - "CU": 43633, - "CV": 33538, - "CVE": 31436, - "CW": 43538, - "Ca": 24334, - "Cache": 30562, - "Cal": 9771, - "Calif": 19619, - "California": 25284, - "Call": 14134, - "Callback": 47258, - "Calling": 48593, - "Cam": 21701, - "Camera": 35632, - "Camp": 21111, - "Campaign": 46102, - "Can": 6090, - "Canada": 17940, - "Canadian": 28203, - "Cand": 41572, - "Cap": 15610, - "Capital": 39315, - "Capt": 19209, - "Captain": 27898, - "Capture": 49630, - "Car": 9914, - "Card": 16962, - "Care": 17784, - "Carl": 26886, - "Cart": 43476, - "Carter": 49958, - "Cas": 35155, - "Case": 20448, - "Cash": 35361, - "Cass": 43529, - "Cast": 19248, - "Cat": 21979, - "Catal": 39075, - "Catalog": 49015, - "Category": 27313, - "Cath": 39581, - "Catholic": 48919, - "Cause": 42323, - "Cele": 42741, - "Cell": 28780, - "Cent": 19085, - "Center": 23656, - "Central": 30645, - "Cert": 37608, - "Certain": 26469, - "Certainly": 36001, - "Ch": 1925, - "Chain": 35491, - "Chair": 43189, - "Chall": 41812, - "Champ": 48507, - "Chan": 48407, - "Chance": 43606, - "Change": 19400, - "Changed": 31813, - "Changes": 29238, - "Changing": 48333, - "Channel": 29239, - "Chapter": 14126, - "Char": 12441, - "Character": 27275, - "Characters": 48393, - "Charg": 28316, - "Charge": 50044, - "Charges": 36970, - "Charl": 24453, - "Charles": 28711, - "Charlie": 37136, - "Chart": 45488, - "Chat": 30820, - "Che": 7376, - "Check": 9787, - "Chel": 38292, - "Chelsea": 41053, - "Chem": 41829, - "Chest": 45170, - "Chicago": 25705, - "Chicken": 45565, - "Chief": 23675, - "Child": 16424, - "Children": 26829, - "China": 14581, - "Chinese": 23604, - "Chip": 49985, - "Cho": 22164, - "Choice": 46770, - "Choose": 31851, - "Chris": 15645, - "Christ": 10684, - "Christian": 20298, - "Christmas": 44614, - "Christopher": 38025, - "Chuck": 44324, - "Church": 46686, - "Circ": 31560, - "City": 14941, - "Civil": 32610, - "Cl": 2601, - "Cla": 47404, - "Claim": 44819, - "Clar": 48035, - "Clark": 43250, - "Class": 9487, - "Classic": 39914, - "Cle": 34349, - "Clean": 32657, - "Clear": 19856, - "Clearly": 30638, - "Click": 8164, - "Client": 11792, - "Climate": 37649, - "Clinton": 16549, - "Clock": 44758, - "Close": 26125, - "Closure": 45398, - "Cloud": 18839, - "Club": 42350, - "Cmd": 40109, - "Co": 7222, - "Coach": 40677, - "Cod": 43806, - "Code": 10669, - "Coin": 24387, - "Col": 5216, - "Cola": 28635, - "Cold": 34312, - "Cole": 46509, - "Coll": 22667, - "Collect": 31337, - "Collection": 36307, - "College": 38951, - "Collins": 49645, - "Color": 10258, - "Colorado": 41330, - "Columb": 36063, - "Column": 39470, - "Com": 5377, - "Comb": 20575, - "Combat": 38667, - "Come": 16773, - "Coming": 30804, - "Comm": 6935, - "Command": 21575, - "Comment": 21357, - "Comments": 23903, - "Commerce": 47662, - "Commercial": 48401, - "Commission": 50246, - "Common": 17227, - "Commun": 30813, - "Community": 20012, - "Comp": 7293, - "Compan": 41309, - "Companies": 49111, - "Company": 39154, - "Compar": 50249, - "Compare": 41488, - "Compared": 44669, - "Compat": 40073, - "Compl": 38143, - "Complete": 20988, - "Completed": 43768, - "Component": 21950, - "Computer": 34556, - "Con": 3103, - "Conclusion": 21481, - "Cond": 25559, - "Condition": 48362, - "Conf": 18546, - "Config": 16934, - "Configuration": 38149, - "Cong": 18649, - "Congratulations": 45048, - "Congress": 25916, - "Conn": 37321, - "Connect": 13313, - "Connection": 32048, - "Connector": 34525, - "Connell": 15559, - "Connor": 27136, - "Cons": 9444, - "Conservative": 42039, - "Consider": 19626, - "Considering": 40475, - "Console": 47581, - "Const": 34184, - "Construct": 42316, - "Constructed": 25207, - "Construction": 36687, - "Consumer": 49106, - "Cont": 4264, - "Contact": 17829, - "Container": 29869, - "Content": 19746, - "Contents": 15842, - "Context": 21947, - "Contin": 17875, - "Continue": 29453, - "Contract": 45845, - "Contribut": 37146, - "Control": 15988, - "Controller": 22130, - "Cook": 28937, - "Cool": 34530, - "Cooldown": 45953, - "Cop": 13379, - "Copy": 29881, - "Copyright": 15269, - "Cor": 10606, - "Core": 14055, - "Corn": 41389, - "Corp": 45680, - "Correct": 42779, - "Correction": 43267, - "Cos": 36734, - "Cost": 13729, - "Could": 23722, - "Coun": 31053, - "Council": 40940, - "Count": 12332, - "Counter": 31694, - "Country": 33921, - "Cour": 25877, - "Course": 49046, - "Court": 36699, - "Courtesy": 31825, - "Cover": 27245, - "Cow": 40147, - "Cr": 13916, - "Cra": 33800, - "Craft": 14467, - "Craig": 40441, - "Crash": 47598, - "Cre": 12443, - "Creat": 16719, - "Create": 16447, - "Created": 41972, - "Creating": 32071, - "Credit": 23690, - "Credits": 42855, - "Crew": 46724, - "Crime": 45580, - "Crit": 18559, - "Critical": 41000, - "Critics": 36623, - "Cro": 35403, - "Cross": 21544, - "Cru": 27535, - "Crunch": 49384, - "Cruz": 41811, - "Cry": 26677, - "Crypt": 23919, - "Crystal": 43752, - "Cs": 32274, - "Ct": 33707, - "Ctrl": 40069, - "Cu": 46141, - "Cub": 43632, - "Cube": 29071, - "Cur": 26628, - "Current": 11297, - "Currently": 21327, - "Custom": 15022, - "Customer": 44939, - "Cut": 26254, - "Cy": 20418, - "D": 35, - "DA": 5631, - "DAQ": 46640, - "DATA": 26947, - "DAY": 26442, - "DB": 11012, - "DC": 9697, - "DCS": 49513, - "DD": 16458, - "DE": 7206, - "DEBUG": 30531, - "DEC": 41374, - "DEF": 32988, - "DEM": 39429, - "DEN": 41819, - "DEP": 46162, - "DER": 14418, - "DERR": 49643, - "DES": 30910, - "DEV": 39345, - "DF": 8068, - "DH": 41473, - "DI": 17931, - "DIR": 34720, - "DIS": 26288, - "DIT": 49828, - "DIV": 33569, - "DJ": 35028, - "DK": 48510, - "DL": 19260, - "DM": 23127, - "DN": 35504, - "DNA": 28886, - "DO": 18227, - "DOC": 38715, - "DOM": 39170, - "DON": 41173, - "DOS": 35178, - "DOWN": 41925, - "DP": 6322, - "DR": 7707, - "DS": 5258, - "DT": 24544, - "DVD": 39218, - "DW": 42955, - "DX": 36227, - "Da": 26531, - "Dad": 46270, - "Daddy": 48280, - "Daily": 28545, - "Dallas": 40540, - "Dam": 14550, - "Damage": 22022, - "Damn": 43343, - "Dan": 21174, - "Daniel": 19962, - "Danny": 45478, - "Dar": 32708, - "Dark": 17367, - "Dash": 43041, - "Dat": 27354, - "Data": 6601, - "Database": 38105, - "Date": 10430, - "Dave": 27984, - "David": 11006, - "Davis": 36462, - "Day": 12393, - "Days": 38770, - "Db": 43832, - "De": 5005, - "Dead": 20489, - "Deal": 45776, - "Dean": 36372, - "Dear": 20266, - "Death": 20148, - "Deb": 16587, - "Debug": 27509, - "Dec": 10707, - "December": 20588, - "Decl": 37835, - "Decre": 43198, - "Deep": 29744, - "Def": 7469, - "Default": 19463, - "Defense": 27300, - "Definition": 36621, - "Del": 13856, - "Delete": 38727, - "Delivery": 33129, - "DeliveryDate": 39749, - "Delta": 42430, - "Dem": 11522, - "Demand": 42782, - "Democratic": 33939, - "Democrats": 29969, - "Demon": 35477, - "Den": 21306, - "Denver": 49818, - "Dep": 12156, - "Department": 36261, - "Depending": 41156, - "Deploy": 49322, - "Depth": 48791, - "Depths": 42382, - "Der": 28532, - "Des": 5960, - "Desc": 24564, - "Description": 11828, - "Design": 23067, - "Desk": 42523, - "Desktop": 36881, - "Despite": 8332, - "Dest": 24159, - "Destroy": 49174, - "Det": 11242, - "Detailed": 32080, - "Details": 24259, - "Detect": 47504, - "Detroit": 40404, - "Dev": 13603, - "Develop": 19246, - "Developer": 45351, - "Development": 41206, - "Device": 24728, - "Dex": 48875, - "Di": 18683, - "Dial": 24400, - "Dialog": 44204, - "Dialogue": 41099, - "Diamond": 47710, - "Dick": 38743, - "Did": 11633, - "Die": 32423, - "Diff": 28813, - "Different": 40341, - "Dig": 19511, - "Digital": 27640, - "Dim": 29271, - "Dir": 35277, - "Direct": 13470, - "Director": 28702, - "Directory": 43055, - "Dis": 7279, - "Disable": 48893, - "Disc": 15642, - "Disclaimer": 19618, - "Discover": 44596, - "Discuss": 48873, - "Discussion": 34255, - "Disk": 40961, - "Disney": 37338, - "Dispatch": 49354, - "Display": 23114, - "Dist": 20344, - "Distance": 45767, - "District": 44857, - "Div": 24095, - "Do": 5211, - "DoS": 46498, - "Doc": 23579, - "Doctor": 37564, - "Doctors": 47087, - "Document": 24941, - "Documents": 38354, - "Does": 13921, - "Dog": 32942, - "Dom": 24510, - "Domain": 43961, - "Domin": 43417, - "Don": 3987, - "Donald": 7371, - "DonaldTrump": 27674, - "Done": 45677, - "Donnell": 24853, - "Dou": 40287, - "Double": 25628, - "Doug": 42297, - "Down": 8048, - "Download": 10002, - "Downloadha": 41551, - "Dr": 6187, - "Draft": 37741, - "Drag": 46022, - "Dragon": 17808, - "DragonMagazine": 42424, - "Draw": 25302, - "Dream": 30571, - "Dri": 20564, - "Drive": 24825, - "Driver": 32103, - "Dro": 35442, - "Drop": 26932, - "Drug": 37943, - "Ds": 30832, - "Du": 35660, - "Dual": 36248, - "Dub": 37590, - "Due": 22229, - "Dun": 30128, - "Dur": 36927, - "Duration": 26054, - "During": 7191, - "Dust": 43767, - "Dutch": 49717, - "Dynamic": 44090, - "E": 36, - "EA": 16412, - "EAR": 17133, - "EB": 30195, - "EC": 2943, - "ECA": 36600, - "ECD": 27295, - "ECH": 25994, - "ECK": 25171, - "ECT": 9782, - "ECTION": 24565, - "ED": 1961, - "EDIT": 24706, - "EE": 6500, - "EED": 41841, - "EEE": 31909, - "EEEE": 35039, - "EEK": 33823, - "EEP": 35238, - "EF": 25425, - "EFF": 37267, - "EG": 7156, - "EGA": 33146, - "EGIN": 43312, - "EH": 42413, - "EL": 3698, - "ELD": 24639, - "ELF": 37738, - "ELL": 23304, - "ELS": 37142, - "ELY": 30943, - "EM": 3620, - "EMA": 27630, - "EMBER": 28952, - "EMENT": 12529, - "EMOTE": 36862, - "EMP": 39494, - "EMS": 39201, - "EN": 1677, - "ENA": 45510, - "ENC": 24181, - "ENCE": 18310, - "ENCY": 45155, - "END": 10619, - "ENDED": 49361, - "ENE": 39267, - "ENG": 26808, - "ENGTH": 49494, - "ENN": 34571, - "ENS": 16938, - "ENSE": 24290, - "ENT": 3525, - "ENTION": 45589, - "ENTS": 15365, - "EO": 4720, - "EP": 8905, - "EPA": 40906, - "ER": 1137, - "ERA": 46461, - "ERAL": 27130, - "ERC": 47691, - "ERE": 9338, - "ERG": 49837, - "ERN": 28778, - "ERO": 34812, - "ERROR": 24908, - "ERS": 4877, - "ERSON": 29086, - "ERT": 17395, - "ERY": 19664, - "ES": 1546, - "ESA": 43279, - "ESCO": 43311, - "ESE": 33635, - "ESH": 44011, - "ESPN": 31730, - "ESS": 7597, - "ESSION": 47621, - "EST": 6465, - "EStream": 39906, - "EStreamFrame": 43177, - "ET": 2767, - "ETA": 20892, - "ETF": 22274, - "ETH": 20702, - "ETHOD": 36252, - "ETS": 32716, - "EU": 19684, - "EV": 20114, - "EVA": 27881, - "EW": 6217, - "EX": 6369, - "EXP": 49864, - "EXT": 13918, - "EY": 22348, - "Each": 10871, - "Ear": 8419, - "Earlier": 13689, - "Early": 20457, - "Earn": 49725, - "Earth": 22840, - "East": 25234, - "Eastern": 46109, - "Easy": 28406, - "Eat": 47659, - "Ec": 49136, - "Econom": 28489, - "Economic": 48307, - "Ed": 7407, - "Edge": 37021, - "Edit": 18378, - "Edited": 45882, - "Editor": 17171, - "Educ": 33380, - "Education": 41183, - "Edward": 43982, - "Effect": 18610, - "Effective": 44831, - "Effects": 47738, - "Egypt": 39299, - "Eh": 43894, - "Eight": 29571, - "Either": 32478, - "El": 9527, - "Ele": 28827, - "Elect": 19453, - "Electric": 44132, - "Element": 20180, - "Elf": 46995, - "Elizabeth": 43568, - "Ell": 30639, - "Els": 45507, - "Elsa": 49050, - "Else": 40674, - "Elsewhere": 49374, - "Em": 10161, - "Email": 15333, - "Emb": 31567, - "Emer": 32779, - "Emergency": 48979, - "Emily": 48640, - "Employ": 29733, - "Empty": 40613, - "En": 4834, - "Enable": 36695, - "Enabled": 20491, - "Enc": 27195, - "End": 12915, - "Energy": 28925, - "Eng": 7936, - "Engine": 13798, - "EngineDebug": 49781, - "Engineers": 28620, - "England": 39163, - "English": 15823, - "Enh": 35476, - "Enhanced": 49026, - "Enjoy": 27467, - "Enlarge": 30952, - "Enough": 47323, - "Ent": 14539, - "Enter": 17469, - "Entity": 32398, - "Entry": 30150, - "Environment": 31441, - "Environmental": 47213, - "Ep": 13807, - "Episode": 23758, - "Equ": 23588, - "Er": 9139, - "Eric": 25004, - "Error": 12331, - "Es": 23041, - "Esc": 47051, - "Especially": 48464, - "Ess": 29508, - "Est": 22362, - "Eth": 40226, - "Euro": 14398, - "Europe": 16112, - "European": 22030, - "Ev": 15200, - "Eva": 44239, - "Even": 6104, - "Event": 9237, - "Events": 37103, - "Eventually": 28724, - "Ever": 23921, - "Every": 6109, - "Everybody": 28172, - "Everyone": 16190, - "Everything": 19693, - "Evidence": 46785, - "Evil": 48477, - "Ex": 3109, - "Exactly": 47173, - "Example": 16281, - "Examples": 27730, - "Exc": 40127, - "Excellent": 45675, - "Except": 30313, - "Exception": 16922, - "Exec": 23002, - "Executive": 43885, - "Exit": 30337, - "Exp": 16870, - "Exper": 20468, - "Experience": 44901, - "Experts": 38897, - "Expl": 18438, - "Explore": 35433, - "Export": 43834, - "Express": 38839, - "Ext": 11627, - "External": 41506, - "Extra": 27726, - "Extreme": 36716, - "Ey": 36287, - "Eye": 24876, - "F": 37, - "FA": 7708, - "FACE": 49836, - "FAQ": 42680, - "FAULT": 38865, - "FB": 26001, - "FBI": 39379, - "FC": 4851, - "FD": 26009, - "FE": 15112, - "FER": 24302, - "FF": 5777, - "FFER": 45746, - "FFFF": 29312, - "FG": 30386, - "FH": 44602, - "FI": 11674, - "FIELD": 44603, - "FIG": 16254, - "FIL": 46700, - "FILE": 25664, - "FIN": 20032, - "FINE": 29940, - "FINEST": 40236, - "FIR": 39776, - "FIX": 47084, - "FK": 26236, - "FL": 3697, - "FLAG": 38948, - "FM": 23264, - "FML": 34708, - "FN": 43221, - "FO": 6080, - "FOR": 13775, - "FORE": 30818, - "FORM": 21389, - "FORMATION": 35036, - "FOX": 47853, - "FP": 5837, - "FR": 10913, - "FREE": 39274, - "FS": 10652, - "FT": 9792, - "FTWARE": 37485, - "FU": 38989, - "FUL": 46476, - "FUN": 42296, - "FW": 24160, - "FX": 17213, - "FY": 43833, - "Fa": 50110, - "Fab": 43957, - "Fac": 47522, - "Face": 32388, - "Facebook": 12025, - "Fact": 29054, - "Factor": 41384, - "Factory": 22810, - "FactoryReloaded": 37631, - "Fail": 39044, - "Failure": 50015, - "Fair": 30099, - "Faith": 45536, - "Fake": 49233, - "Fal": 41129, - "Fall": 24750, - "False": 25101, - "Family": 24094, - "Fan": 22480, - "Fans": 36570, - "Far": 21428, - "Farm": 48412, - "Fast": 22968, - "Fat": 33804, - "Father": 34823, - "Favorite": 49434, - "Fax": 46512, - "Fe": 14304, - "Fear": 37798, - "Feature": 38816, - "Featured": 37948, - "Features": 23595, - "Feb": 15146, - "February": 21816, - "Fed": 42268, - "Federal": 24099, - "Feed": 18332, - "Feel": 35114, - "Fel": 42493, - "Female": 27273, - "Fer": 43362, - "Fest": 45139, - "Few": 32351, - "Fi": 10547, - "Field": 15878, - "Fif": 44403, - "Fig": 14989, - "Fight": 27365, - "Fighting": 46375, - "Figure": 11337, - "Fil": 11928, - "File": 8979, - "Filename": 35063, - "Files": 25876, - "Fill": 33762, - "Film": 39750, - "Filter": 22417, - "Fin": 18467, - "Final": 19006, - "Finally": 11158, - "Financial": 43621, - "Find": 16742, - "Finding": 36276, - "Fine": 34389, - "Finish": 48658, - "Fire": 13543, - "First": 5962, - "Firstly": 49709, - "Fish": 39428, - "Fit": 31805, - "Five": 20029, - "Fix": 22743, - "Fixed": 13715, - "Fl": 7414, - "Fla": 47487, - "Flag": 34227, - "Flags": 40053, - "Flash": 30670, - "Fle": 47669, - "Flickr": 47250, - "Flight": 43069, - "Flo": 33574, - "Float": 43879, - "Flor": 26953, - "Florida": 31135, - "Flow": 37535, - "Fly": 33771, - "Flying": 49095, - "Focus": 34888, - "Folder": 41092, - "Follow": 7155, - "Following": 14291, - "Font": 23252, - "FontSize": 38160, - "Food": 24602, - "Foot": 17574, - "Football": 37316, - "Footnote": 33795, - "For": 1890, - "Force": 10292, - "Ford": 37308, - "Fore": 16351, - "Foreign": 33616, - "Forest": 34605, - "Forge": 19857, - "ForgeModLoader": 24934, - "Form": 8479, - "Format": 26227, - "Former": 14282, - "Fort": 21926, - "Fortunately": 31276, - "Forward": 39746, - "Found": 21077, - "Four": 15137, - "Fourth": 45530, - "Fox": 19399, - "Fr": 6732, - "Fra": 49562, - "Frag": 42974, - "Fram": 21055, - "Frame": 19778, - "Frames": 35439, - "Frameworks": 42026, - "Fran": 38848, - "Franc": 42885, - "France": 28572, - "Frank": 17439, - "Fre": 20366, - "Fred": 30847, - "Free": 11146, - "Freedom": 38885, - "French": 24111, - "Fresh": 35857, - "Fri": 30214, - "Friday": 20610, - "Friend": 23331, - "Friends": 36705, - "From": 4863, - "Front": 25886, - "Fs": 42388, - "Fu": 41133, - "Fuck": 34094, - "Fuel": 42663, - "Full": 13295, - "Fun": 24629, - "Function": 22203, - "Fund": 24553, - "Further": 13518, - "Furthermore": 24951, - "Future": 29783, - "G": 38, - "GA": 9273, - "GAME": 47109, - "GAN": 45028, - "GB": 4579, - "GBT": 9146, - "GC": 15916, - "GD": 45113, - "GE": 8264, - "GEN": 35353, - "GER": 30373, - "GES": 48075, - "GET": 18851, - "GF": 21713, - "GG": 11190, - "GGGG": 25611, - "GGGGGGGG": 40415, - "GH": 17511, - "GHz": 23741, - "GI": 18878, - "GL": 8763, - "GM": 15548, - "GMT": 49424, - "GN": 16630, - "GO": 11230, - "GOP": 44962, - "GP": 16960, - "GPU": 33346, - "GR": 10761, - "GRE": 28934, - "GREEN": 43016, - "GROUND": 46025, - "GROUP": 46846, - "GS": 14313, - "GT": 19555, - "GU": 38022, - "GUI": 40156, - "GV": 37094, - "GW": 33191, - "GY": 31212, - "Ga": 35389, - "Gab": 46079, - "Gal": 26552, - "Gall": 37122, - "Gallery": 29352, - "Gam": 34777, - "Game": 8777, - "Gameplay": 43241, - "Gamer": 33648, - "Games": 24474, - "Gaming": 45509, - "Gar": 27676, - "Gary": 33820, - "Gas": 39699, - "Gate": 22628, - "Gay": 41787, - "Gaza": 48790, - "Gb": 49017, - "Ge": 10082, - "Gear": 38141, - "Gen": 13746, - "Gender": 41394, - "Gene": 39358, - "Gener": 8645, - "General": 12218, - "Generally": 37058, - "Generic": 46189, - "Georg": 33428, - "George": 20191, - "Georgia": 41072, - "Ger": 38069, - "German": 16010, - "Germany": 27079, - "Get": 3855, - "Getting": 20570, - "Getty": 6633, - "Gh": 41126, - "Ghost": 32001, - "Gi": 33704, - "Gil": 40747, - "Girl": 24151, - "Girls": 41044, - "Give": 23318, - "Given": 15056, - "Giving": 49701, - "Gl": 9861, - "Glass": 47698, - "Global": 22289, - "Go": 5247, - "Goal": 49045, - "God": 13482, - "Going": 27404, - "Gold": 13306, - "GoldMagikarp": 42202, - "Golden": 32378, - "Good": 10248, - "Google": 11708, - "Gordon": 47073, - "Got": 30074, - "Gov": 23774, - "Govern": 29168, - "Government": 28848, - "Gr": 8642, - "Gra": 46971, - "Grab": 48400, - "Grad": 42731, - "Grade": 42233, - "Graham": 45821, - "Grand": 23581, - "Grant": 45431, - "Graph": 37065, - "Graphics": 18172, - "Gray": 46130, - "Gre": 43887, - "Great": 13681, - "Greek": 44059, - "Green": 13719, - "Greg": 25025, - "Grey": 49141, - "Grid": 41339, - "Gro": 42921, - "Ground": 35539, - "Group": 13247, - "Growing": 43964, - "Gs": 33884, - "Gu": 8205, - "Guard": 24502, - "Guest": 42481, - "Guide": 47889, - "Gun": 22993, - "Guy": 31080, - "Gy": 44802, - "H": 39, - "HA": 7801, - "HAEL": 47452, - "HAHA": 21271, - "HAHAHAHA": 39021, - "HAM": 33363, - "HB": 32886, - "HC": 16045, - "HCR": 43230, - "HD": 10227, - "HE": 13909, - "HEAD": 37682, - "HER": 16879, - "HF": 29567, - "HH": 16768, - "HHHH": 41100, - "HI": 25374, - "HK": 38730, - "HL": 6581, - "HM": 36905, - "HO": 32298, - "HOME": 39069, - "HOU": 46685, - "HOW": 37181, - "HP": 14082, - "HQ": 41275, - "HR": 17184, - "HS": 7998, - "HT": 6535, - "HTML": 28656, - "HTTP": 40717, - "HUD": 28410, - "HY": 42598, - "Ha": 23303, - "Hack": 32833, - "Had": 25383, - "Hal": 40202, - "Half": 31305, - "Hall": 34194, - "Ham": 21281, - "Hamilton": 45405, - "Han": 29919, - "Hand": 12885, - "Handle": 37508, - "Handler": 25060, - "Happy": 25082, - "Har": 13587, - "Hard": 17309, - "Hardware": 49865, - "Harris": 41589, - "Harry": 18308, - "Hart": 44719, - "Has": 19242, - "Hash": 26257, - "Hat": 40483, - "Haunted": 46979, - "Have": 11980, - "Having": 14698, - "Haw": 33055, - "Hay": 31306, - "He": 1544, - "Head": 13847, - "Header": 39681, - "Health": 18081, - "Heart": 28541, - "Heat": 39596, - "Heavy": 33210, - "Height": 23106, - "Hel": 12621, - "Hell": 28254, - "Hello": 15496, - "Help": 22087, - "Helper": 47429, - "Hen": 26055, - "Henry": 32476, - "Her": 9360, - "Here": 4342, - "Herm": 48523, - "Hero": 30411, - "Hey": 10814, - "Hi": 17250, - "Hidden": 41691, - "Hide": 38518, - "Hig": 36124, - "High": 11922, - "Higher": 48708, - "Hill": 36369, - "Hillary": 20397, - "His": 6653, - "Hispanic": 43830, - "Hist": 13749, - "History": 18122, - "Hit": 17889, - "Hmm": 44217, - "Ho": 28900, - "Hol": 28115, - "Hold": 26807, - "Holy": 33336, - "Hom": 28718, - "Home": 16060, - "Hon": 29478, - "Honest": 37411, - "Honestly": 40817, - "Hong": 48559, - "Hop": 23483, - "Hope": 34456, - "Hopefully": 32365, - "Hor": 27991, - "Host": 17932, - "Hot": 21352, - "Hour": 43223, - "Hours": 39792, - "House": 18102, - "Houston": 33387, - "How": 2437, - "Howard": 32434, - "However": 4864, - "Http": 43481, - "Hu": 38202, - "Hub": 16066, - "Hug": 48098, - "Huh": 46010, - "Hum": 32661, - "Human": 20490, - "Hun": 25117, - "Hundreds": 38150, - "Hung": 39505, - "Hunt": 47663, - "Hunter": 38803, - "Hur": 42633, - "Hy": 21217, - "Hyd": 40436, - "Hyp": 49926, - "Hyper": 38197, - "Hz": 7399, - "I": 40, - "IA": 3539, - "IAL": 12576, - "IAN": 16868, - "IAS": 43429, - "IB": 9865, - "IBLE": 34563, - "IC": 2149, - "ICA": 25241, - "ICAL": 20151, - "ICAN": 42710, - "ICE": 8476, - "ICES": 34444, - "ICH": 20739, - "ICK": 11860, - "ICLE": 31419, - "ICO": 22707, - "ICS": 19505, - "ICT": 18379, - "ID": 2389, - "IDA": 41957, - "IDE": 14114, - "IDENT": 25256, - "IDER": 41237, - "IDES": 42538, - "IDS": 14255, - "IDs": 47954, - "IE": 10008, - "IED": 19767, - "IELD": 49979, - "IENCE": 42589, - "IENT": 28495, - "IER": 38311, - "IES": 11015, - "IF": 5064, - "IFA": 19071, - "IFE": 29150, - "IFF": 29267, - "IFIC": 30643, - "IFIED": 28343, - "IFT": 32297, - "IG": 3528, - "IGH": 18060, - "IGHT": 9947, - "IGHTS": 34874, - "IGN": 16284, - "II": 3978, - "III": 10855, - "IJ": 23852, - "IK": 18694, - "IL": 4146, - "ILA": 47164, - "ILD": 26761, - "ILE": 41119, - "ILL": 8267, - "ILLE": 33844, - "ILS": 45484, - "ILY": 33340, - "IM": 3955, - "IME": 12789, - "IN": 1268, - "INA": 28893, - "INAL": 17961, - "INC": 30158, - "IND": 12115, - "INE": 8881, - "INESS": 44180, - "INFO": 10778, - "ING": 2751, - "INGS": 20754, - "INGTON": 17480, - "INK": 17248, - "INO": 46016, - "INS": 20913, - "INST": 38604, - "INT": 12394, - "INTER": 41358, - "INTON": 46812, - "IO": 9399, - "ION": 2849, - "IONS": 11053, - "IOR": 41254, - "IP": 4061, - "IPP": 31444, - "IPS": 47643, - "IQ": 33866, - "IR": 4663, - "IRC": 49060, - "IRD": 46833, - "IRE": 41736, - "IRED": 37819, - "IRO": 43708, - "IRT": 48771, - "IS": 1797, - "ISA": 22312, - "ISC": 37719, - "ISE": 24352, - "ISH": 18422, - "ISION": 42446, - "ISIS": 29322, - "ISM": 31125, - "ISO": 40734, - "ISON": 39960, - "ISS": 16744, - "ISSION": 40373, - "IST": 8808, - "ISTER": 41517, - "ISTORY": 42480, - "IT": 2043, - "ITAL": 40579, - "ITCH": 31949, - "ITE": 12709, - "ITED": 22061, - "ITH": 10554, - "ITIES": 30383, - "ITION": 17941, - "ITNESS": 46144, - "ITS": 29722, - "ITT": 22470, - "ITY": 9050, - "IU": 44958, - "IUM": 41796, - "IV": 3824, - "IVE": 9306, - "IVER": 38757, - "IVERS": 30194, - "IVES": 42472, - "IX": 10426, - "IZ": 14887, - "IZE": 35400, - "Ian": 37776, - "Ice": 23709, - "Icon": 19578, - "Id": 7390, - "Ide": 41452, - "Ident": 33234, - "If": 1532, - "Ign": 32916, - "Il": 33666, - "Ill": 21478, - "Im": 3546, - "Image": 5159, - "Images": 29398, - "Imagine": 25153, - "Imm": 24675, - "Imp": 26950, - "Impl": 29710, - "Import": 20939, - "Important": 33796, - "Impro": 23028, - "Improve": 47531, - "Improved": 35453, - "In": 818, - "Inc": 25517, - "Includes": 42986, - "Incre": 15562, - "Increase": 46890, - "Increased": 40281, - "Increases": 28544, - "Ind": 5497, - "Indeed": 17854, - "Independent": 40566, - "Index": 15732, - "India": 21569, - "Indian": 30821, - "Indiana": 49153, - "Individual": 35392, - "Indust": 35848, - "Inf": 18943, - "Info": 12360, - "Information": 21918, - "Ing": 27682, - "Ingredients": 41222, - "Init": 31768, - "Initial": 24243, - "Initialized": 28500, - "Initially": 40443, - "Input": 20560, - "Ins": 20376, - "Insert": 44402, - "Inside": 24441, - "Insp": 41502, - "Inst": 6310, - "Install": 15798, - "Installation": 30838, - "Instance": 33384, - "Instant": 49933, - "Instead": 13193, - "InstoreAndOnline": 40241, - "Instruct": 43993, - "Int": 5317, - "Integ": 34500, - "Integer": 46541, - "Intel": 24123, - "Inter": 9492, - "Interest": 19302, - "Interested": 48860, - "Interestingly": 33092, - "Interface": 39317, - "Intern": 15865, - "Internal": 37693, - "International": 24274, - "Internet": 28566, - "Interstitial": 29447, - "Interview": 39945, - "Introdu": 15005, - "Introduced": 37143, - "Introduction": 21906, - "Inv": 19904, - "Invalid": 44651, - "Invest": 19070, - "Investigators": 33528, - "Iowa": 45186, - "Ir": 23820, - "Iran": 23798, - "Iraq": 31206, - "Ire": 48505, - "Ireland": 49752, - "Irish": 43293, - "Iron": 22797, - "Ironically": 44850, - "Is": 3792, - "Isa": 39443, - "Islam": 16991, - "Islamic": 26723, - "Isn": 41451, - "Israel": 14040, - "Israeli": 29818, - "Iss": 27738, - "Issue": 45147, - "It": 1026, - "Italian": 45696, - "Italy": 45001, - "Item": 7449, - "ItemImage": 25502, - "ItemThumbnailImage": 39177, - "ItemTracker": 47198, - "Items": 23022, - "Iter": 29993, - "Iterator": 37787, - "Its": 20459, - "Iv": 45766, - "J": 41, - "JA": 37048, - "JB": 47858, - "JC": 34382, - "JD": 37882, - "JECT": 23680, - "JJ": 32178, - "JM": 50229, - "JO": 45006, - "JOHN": 47118, - "JP": 12889, - "JR": 44817, - "JS": 20120, - "JSON": 40386, - "JUST": 25008, - "JV": 41697, - "Ja": 33186, - "Jac": 28821, - "Jack": 14295, - "Jackson": 31270, - "Jacob": 46751, - "Jake": 43930, - "Jam": 30380, - "James": 14731, - "Jamie": 48337, - "Jan": 12128, - "Jane": 41083, - "January": 21339, - "Japan": 16504, - "Japanese": 25324, - "Jar": 47511, - "Jason": 26497, - "Java": 29584, - "Jay": 30568, - "Je": 40932, - "Jean": 38248, - "Jeff": 19139, - "Jen": 44875, - "Jenn": 35994, - "Jennifer": 43187, - "Jer": 36134, - "Jere": 31579, - "Jeremy": 35623, - "Jerry": 43462, - "Jes": 22290, - "Jess": 34648, - "Jessica": 45572, - "Jesus": 28219, - "Jet": 42273, - "Jew": 23119, - "Jewish": 28240, - "Jews": 47415, - "Jim": 18050, - "Jimmy": 40335, - "Jo": 9908, - "Job": 33308, - "Joe": 19585, - "John": 7554, - "Johnny": 44960, - "Johnson": 25378, - "Join": 18234, - "Joined": 24363, - "Jon": 18219, - "Jonathan": 30365, - "Jones": 25784, - "Jordan": 34522, - "Jose": 23409, - "Joseph": 29458, - "Josh": 23808, - "Joshua": 47740, - "Journal": 25296, - "Joy": 41338, - "Jr": 50123, - "Js": 49044, - "Ju": 33018, - "Jud": 26141, - "Judge": 29511, - "Jul": 16980, - "July": 16157, - "Jump": 36046, - "Jun": 22396, - "June": 15749, - "Just": 5703, - "Justice": 28447, - "Justin": 33229, - "K": 42, - "KA": 25123, - "KB": 22764, - "KC": 36222, - "KE": 7336, - "KEN": 43959, - "KER": 42839, - "KEY": 20373, - "KI": 37845, - "KING": 37286, - "KK": 16601, - "KN": 29132, - "KNOWN": 44706, - "KO": 22328, - "KR": 30758, - "KS": 27015, - "KT": 42176, - "KY": 31159, - "Ka": 37281, - "Kal": 41428, - "Kansas": 43451, - "Kar": 37753, - "Karl": 46063, - "Kat": 25881, - "Kate": 45087, - "Kay": 37247, - "Ke": 8896, - "Keefe": 48122, - "Keep": 15597, - "Keeping": 44815, - "Keith": 46868, - "Kelly": 34831, - "Ken": 27827, - "Kenn": 39324, - "Kent": 42265, - "Kevin": 23865, - "Key": 9218, - "Keys": 40729, - "Kh": 33155, - "Kick": 45390, - "Kid": 48374, - "Kids": 40229, - "Kill": 27100, - "Kim": 26374, - "Kin": 49681, - "Kind": 35854, - "King": 15708, - "Kings": 42912, - "Kit": 20827, - "Kn": 25095, - "Knight": 44242, - "Know": 23812, - "Knowing": 45648, - "Known": 29870, - "Ko": 48735, - "Krist": 40756, - "Ku": 41733, - "Ky": 30630, - "Kyle": 42516, - "L": 43, - "LA": 13534, - "LAB": 48780, - "LAN": 25697, - "LAND": 28182, - "LB": 30501, - "LC": 5639, - "LCS": 29814, - "LD": 11163, - "LE": 2538, - "LEASE": 22781, - "LECT": 16779, - "LED": 30465, - "LER": 39878, - "LES": 28378, - "LESS": 48481, - "LET": 28882, - "LEY": 25173, - "LG": 41257, - "LGBT": 37701, - "LI": 31271, - "LIB": 40347, - "LIN": 34509, - "LINE": 24027, - "LIST": 45849, - "LL": 3069, - "LLOW": 44765, - "LM": 31288, - "LO": 21982, - "LOAD": 35613, - "LOC": 29701, - "LOCK": 36840, - "LOD": 38543, - "LOG": 25294, - "LOS": 45376, - "LP": 19930, - "LR": 35972, - "LS": 6561, - "LT": 27734, - "LU": 41596, - "LV": 30976, - "LY": 11319, - "La": 14772, - "Lab": 17822, - "Label": 33986, - "Labor": 42230, - "Labour": 32475, - "Lady": 38887, - "Lago": 48694, - "Lair": 40041, - "Lake": 43035, - "Land": 22342, - "Language": 32065, - "Large": 21968, - "Larry": 42918, - "Las": 46898, - "Last": 5956, - "Lastly": 37511, - "Lat": 24220, - "Late": 26302, - "Later": 18602, - "Latest": 39478, - "Latin": 49022, - "Laughs": 34610, - "Laun": 46182, - "Launch": 38296, - "Laura": 43687, - "Law": 16966, - "Lay": 23763, - "Layer": 49925, - "Layout": 32517, - "Le": 3123, - "Lead": 20451, - "Leader": 45009, - "League": 24623, - "Leaks": 17874, - "Lean": 35806, - "Lear": 14961, - "Learn": 20238, - "Learning": 41730, - "Leary": 48487, - "Leave": 35087, - "Led": 42416, - "Lee": 24338, - "Left": 18819, - "Leg": 11484, - "Legal": 38263, - "Legend": 21351, - "Legendary": 24524, - "Len": 30659, - "Length": 24539, - "Lenin": 49036, - "Lens": 49479, - "Leod": 44559, - "Leon": 36185, - "Les": 35882, - "Less": 22058, - "Let": 5756, - "Letter": 45708, - "Lev": 32163, - "Level": 4971, - "Lew": 33450, - "Lewis": 40330, - "Lex": 45117, - "Li": 32304, - "Lib": 25835, - "Liber": 31199, - "Liberal": 46650, - "Library": 23377, - "Lic": 26656, - "License": 34156, - "Lie": 47918, - "Life": 14662, - "Light": 15047, - "Like": 7594, - "Likewise": 45872, - "Lim": 19352, - "Limit": 39184, - "Limited": 37214, - "Lin": 14993, - "Lind": 43410, - "Line": 13949, - "Link": 11280, - "LinkedIn": 40574, - "Links": 31815, - "Linux": 19314, - "Liquid": 41966, - "Lisa": 44203, - "List": 8053, - "Listen": 23061, - "Listener": 33252, - "Liter": 43460, - "Little": 22253, - "Live": 18947, - "Liverpool": 44232, - "Living": 36376, - "Lo": 27654, - "Load": 8912, - "Loader": 17401, - "Loading": 19031, - "Loc": 33711, - "Local": 14565, - "Located": 43525, - "Location": 14749, - "Lock": 25392, - "Log": 11187, - "Login": 47790, - "London": 23421, - "Long": 14617, - "Look": 8567, - "Looking": 15784, - "Looks": 41102, - "Loop": 39516, - "Lord": 22438, - "Los": 28903, - "Lost": 31042, - "Lot": 48601, - "Lots": 43643, - "Lou": 24016, - "Louis": 32117, - "Love": 18565, - "Low": 20535, - "Lower": 31426, - "Lt": 49578, - "Lu": 25596, - "Lua": 36127, - "Luc": 22946, - "Luck": 35498, - "Luckily": 42332, - "Luke": 30730, - "Lv": 29507, - "Ly": 31633, - "Lyn": 37207, - "M": 44, - "MA": 5673, - "MAC": 44721, - "MAG": 45820, - "MAL": 42126, - "MAN": 10725, - "MAP": 33767, - "MAR": 40569, - "MAS": 31180, - "MAT": 41636, - "MAX": 22921, - "MB": 10744, - "MC": 9655, - "MD": 12740, - "ME": 11682, - "MED": 30733, - "MEN": 49275, - "MENT": 10979, - "MENTS": 28957, - "MER": 29296, - "MET": 47123, - "METHOD": 49273, - "MF": 49800, - "MG": 20474, - "MH": 36208, - "MHz": 25983, - "MI": 8895, - "MIC": 49884, - "MIN": 23678, - "MIT": 36393, - "MJ": 43421, - "MK": 33907, - "ML": 5805, - "MM": 12038, - "MN": 39764, - "MO": 11770, - "MOD": 33365, - "MODE": 49058, - "MON": 27857, - "MORE": 23346, - "MP": 7378, - "MQ": 49215, - "MR": 13599, - "MRI": 40952, - "MS": 5653, - "MSN": 30295, - "MT": 13752, - "MU": 42422, - "MW": 14326, - "MX": 43243, - "MY": 26708, - "Ma": 21467, - "Mac": 14155, - "Mach": 49999, - "Machine": 37573, - "Mad": 18454, - "Made": 24616, - "Madison": 46845, - "Mag": 13436, - "Magazine": 36028, - "Magic": 22975, - "Magikarp": 41538, - "Magn": 48017, - "Mah": 40936, - "Mail": 25804, - "Main": 13383, - "Major": 24206, - "Make": 12050, - "Maker": 48890, - "Making": 23874, - "Mal": 15029, - "Male": 25486, - "Malley": 33776, - "Man": 5124, - "Management": 48032, - "Manager": 13511, - "Manchester": 40744, - "Mand": 49846, - "Mania": 45844, - "Manufact": 44445, - "Many": 7085, - "Map": 13912, - "Maps": 47010, - "Mar": 7676, - "Marc": 22697, - "March": 16192, - "Marco": 37179, - "Marcus": 35110, - "Marg": 24428, - "Marginal": 36003, - "Maria": 46827, - "Marie": 44507, - "Mario": 42315, - "Mark": 9704, - "Market": 27470, - "Mars": 43725, - "Marsh": 41984, - "Mart": 13143, - "Martin": 24778, - "Marvel": 38864, - "Marx": 45258, - "Mary": 24119, - "Mas": 38224, - "Mask": 45195, - "Mass": 20273, - "Master": 18254, - "Mat": 19044, - "Match": 23850, - "Material": 17518, - "Materials": 41657, - "Math": 37372, - "Matrix": 46912, - "Matt": 13448, - "Matthew": 25372, - "Max": 11518, - "Maximum": 40541, - "May": 6747, - "Maybe": 13300, - "Mayor": 37396, - "Mbps": 47842, - "Mc": 9742, - "McC": 30464, - "Me": 5308, - "Meanwhile": 10294, - "Measure": 47384, - "Meat": 35620, - "Mech": 28452, - "Med": 9921, - "Media": 13152, - "Medic": 39112, - "Medical": 37158, - "Medium": 31205, - "Meet": 29318, - "Meg": 42672, - "Mega": 43471, - "Mel": 21102, - "Mem": 13579, - "Member": 27608, - "Members": 25341, - "Memory": 30871, - "Men": 10418, - "Menu": 23381, - "Mer": 13102, - "Merc": 42981, - "Merit": 21583, - "Mesh": 37031, - "Mess": 36479, - "Message": 12837, - "Met": 9171, - "Meta": 48526, - "Metal": 36790, - "Method": 17410, - "Methods": 46202, - "Metro": 45141, - "Mex": 24670, - "Mexico": 33006, - "Mi": 41541, - "Miami": 41191, - "Mic": 25437, - "Mich": 11180, - "Michael": 13256, - "Michelle": 48736, - "Michigan": 40610, - "Micro": 13031, - "Microsoft": 15905, - "Mid": 22622, - "Middle": 34621, - "Mike": 16073, - "Mil": 24857, - "Military": 37837, - "Mill": 22603, - "Miller": 33253, - "Min": 9452, - "Mind": 28478, - "Mine": 24461, - "Minecraft": 39194, - "Mini": 39234, - "Minimum": 44046, - "Minnesota": 45670, - "Minor": 39825, - "Mir": 27453, - "Mis": 31281, - "Miss": 17140, - "Missing": 43730, - "Mission": 37057, - "Mist": 49370, - "Mit": 43339, - "Mix": 35608, - "Mo": 16632, - "Mob": 44702, - "Mobil": 47100, - "Mobile": 17066, - "Mod": 5841, - "ModLoader": 24847, - "Mode": 19076, - "Model": 17633, - "Modern": 31439, - "Mods": 24239, - "Module": 26796, - "Moh": 38443, - "Mom": 29252, - "Mon": 9069, - "Monday": 23810, - "Money": 26788, - "Monitor": 35479, - "Monster": 40872, - "Mont": 26031, - "Month": 31948, - "Moon": 31640, - "Moore": 40049, - "Mor": 20044, - "More": 5167, - "Moreover": 24606, - "Morgan": 47184, - "Morning": 42997, - "Mos": 32668, - "Moscow": 49757, - "Most": 6943, - "Mot": 47733, - "Mother": 31398, - "Motion": 45740, - "Motor": 34919, - "Mount": 35452, - "Mouse": 39643, - "Move": 21774, - "Movie": 25097, - "Moving": 33622, - "Mp": 28861, - "MpServer": 31765, - "Mr": 5246, - "Mrs": 27034, - "Ms": 10128, - "Msg": 50108, - "Mu": 33239, - "Much": 20045, - "Mult": 15205, - "Multi": 29800, - "Multiple": 31217, - "Mur": 23830, - "Murray": 49998, - "Mus": 10694, - "Music": 22648, - "Muslim": 17067, - "Muslims": 36452, - "Must": 34320, - "Mut": 41603, - "My": 3666, - "Myth": 41444, - "N": 45, - "NA": 4535, - "NAME": 20608, - "NAS": 18293, - "NASA": 29998, - "NAT": 34259, - "NB": 32819, - "NBA": 32470, - "NBC": 13175, - "NC": 7792, - "ND": 8575, - "NE": 12161, - "NECT": 48842, - "NER": 21479, - "NES": 37379, - "NESS": 31097, - "NET": 12884, - "NEW": 13965, - "NEWS": 49597, - "NEY": 36231, - "NF": 21870, - "NFL": 32078, - "NG": 10503, - "NH": 33863, - "NI": 22125, - "NING": 15871, - "NJ": 41074, - "NK": 46888, - "NL": 32572, - "NM": 32755, - "NN": 6144, - "NO": 15285, - "NOR": 35510, - "NOT": 11929, - "NOTE": 16580, - "NOW": 45669, - "NP": 22182, - "NPR": 38588, - "NR": 24723, - "NRS": 41256, - "NS": 8035, - "NSA": 47549, - "NT": 11251, - "NULL": 33991, - "NUM": 41359, - "NV": 27159, - "NVIDIA": 38021, - "NW": 27605, - "NY": 12805, - "NYSE": 49430, - "NZ": 37371, - "Na": 26705, - "Name": 5376, - "Names": 36690, - "Nap": 49799, - "Nar": 40059, - "Narr": 45750, - "Nat": 47849, - "Nation": 46108, - "National": 16186, - "Native": 31272, - "Natural": 35364, - "Naturally": 44213, - "Nature": 46934, - "Nav": 30575, - "Naz": 37235, - "Nazi": 31343, - "Nazis": 44527, - "Ne": 8199, - "Neal": 40581, - "Near": 40640, - "Nearly": 27927, - "Need": 23037, - "Neg": 32863, - "Neigh": 46445, - "Neil": 29354, - "Neill": 26538, - "Neither": 27270, - "Net": 7934, - "NetMessage": 25193, - "Netflix": 42826, - "Network": 26245, - "Nev": 43555, - "Never": 12295, - "Nevertheless": 29011, - "New": 3791, - "News": 9980, - "Newsletter": 33031, - "Next": 10019, - "Ni": 34153, - "Nic": 30403, - "Nice": 35284, - "Nich": 46489, - "Nick": 23609, - "Night": 24732, - "Nik": 40979, - "Nin": 36091, - "Nine": 37603, - "Nintendo": 32348, - "Nit": 33772, - "Nitrome": 42066, - "No": 2949, - "Nob": 21191, - "Nobody": 24795, - "Node": 19667, - "Non": 15419, - "None": 14202, - "Nonetheless": 43258, - "Nor": 21991, - "Norm": 35393, - "Normal": 26447, - "Normally": 43625, - "North": 14157, - "Northern": 40495, - "Not": 3673, - "Notable": 45533, - "Note": 6425, - "Notes": 16130, - "Nothing": 18465, - "Notice": 26396, - "Nov": 20795, - "November": 21159, - "Now": 3844, - "Ns": 47503, - "Null": 35067, - "Num": 33111, - "Number": 15057, - "Numbers": 49601, - "Nusra": 39294, - "Nut": 49004, - "O": 46, - "OA": 23621, - "OAD": 41048, - "OB": 9864, - "OC": 4503, - "OCK": 11290, - "OD": 3727, - "ODE": 16820, - "ODUCT": 28644, - "ODY": 33076, - "OE": 27799, - "OF": 19238, - "OFF": 27977, - "OG": 7730, - "OGR": 49656, - "OH": 12096, - "OHN": 27600, - "OIL": 49713, - "OK": 11380, - "OL": 3535, - "OLD": 15173, - "OLOG": 33462, - "OLOGY": 43781, - "OM": 2662, - "OME": 13649, - "ON": 1340, - "OND": 18672, - "ONDON": 47383, - "ONE": 11651, - "ONES": 39677, - "ONEY": 48399, - "ONG": 18494, - "ONS": 19213, - "ONSORED": 36406, - "ONT": 35830, - "ONY": 40508, - "OO": 6684, - "OOD": 22808, - "OOK": 15308, - "OOL": 31559, - "OOOO": 23803, - "OOOOOOOO": 47732, - "OP": 3185, - "OPA": 43345, - "OPE": 32135, - "OPER": 31054, - "OPLE": 34354, - "OPS": 30737, - "OR": 1581, - "ORD": 12532, - "ORE": 6965, - "ORED": 32023, - "ORGE": 49697, - "ORK": 14670, - "ORN": 30649, - "ORPG": 49665, - "ORS": 20673, - "ORT": 9863, - "ORTS": 33002, - "ORY": 15513, - "OS": 2640, - "OSE": 14058, - "OSED": 48751, - "OSH": 45704, - "OSP": 47053, - "OSS": 18420, - "OST": 10892, - "OT": 2394, - "OTA": 29009, - "OTAL": 27510, - "OTE": 23051, - "OTH": 26946, - "OTHER": 31858, - "OTO": 26631, - "OTOS": 33291, - "OTS": 33472, - "OTT": 29089, - "OTUS": 39205, - "OU": 2606, - "OUGH": 32632, - "OULD": 24010, - "OUN": 19385, - "OUND": 15919, - "OUNT": 28270, - "OUP": 27755, - "OUR": 11698, - "OURCE": 31033, - "OUS": 20958, - "OUT": 12425, - "OV": 8874, - "OVA": 41576, - "OVER": 41983, - "OW": 3913, - "OWER": 36048, - "OWN": 14165, - "OWS": 22845, - "OX": 48632, - "OY": 21414, - "Oak": 42426, - "Ob": 5944, - "Obama": 15948, - "Obj": 49201, - "Object": 10267, - "Obs": 31310, - "Obviously": 20670, - "Occ": 29223, - "Occup": 47658, - "Ocean": 46607, - "Oct": 12349, - "October": 18517, - "Of": 5189, - "Off": 9362, - "Offic": 12710, - "Office": 27743, - "Officers": 34059, - "Official": 28529, - "Officials": 25883, - "Offline": 28657, - "Offset": 34519, - "Often": 37288, - "Oh": 5812, - "Ohio": 31274, - "Oil": 44142, - "Ok": 18690, - "Okay": 16454, - "Ol": 30098, - "Old": 19620, - "On": 2202, - "Once": 7454, - "One": 3198, - "Online": 14439, - "Only": 10049, - "Ont": 45984, - "Op": 18257, - "Open": 11505, - "Opening": 43093, - "Oper": 18843, - "Operation": 32180, - "Opp": 27524, - "Ops": 41472, - "Opt": 27871, - "Option": 19722, - "Optional": 30719, - "Options": 29046, - "Or": 5574, - "Oracle": 48625, - "Orange": 40141, - "Ord": 35422, - "Order": 18743, - "Orderable": 39655, - "Ore": 41543, - "Oregon": 41243, - "Org": 46808, - "Organ": 26121, - "Orig": 11610, - "Origin": 39688, - "Original": 20556, - "Originally": 22731, - "Os": 16748, - "Other": 6395, - "Others": 25599, - "Otherwise": 48059, - "Ott": 49092, - "Our": 5122, - "Out": 7975, - "Output": 26410, - "Outside": 30815, - "Over": 5886, - "Overall": 16350, - "Override": 37961, - "Overview": 29064, - "Own": 23858, - "Owner": 42419, - "Ox": 38208, - "P": 47, - "PA": 4537, - "PAC": 44938, - "PAR": 27082, - "PART": 30709, - "PASS": 47924, - "PATH": 34219, - "PB": 49079, - "PC": 5662, - "PD": 5760, - "PDATE": 14341, - "PDATED": 49316, - "PDF": 20456, - "PE": 11401, - "PER": 18973, - "PET": 47731, - "PF": 42668, - "PG": 6968, - "PH": 11909, - "PHOTOS": 42709, - "PI": 11901, - "PIN": 44032, - "PK": 40492, - "PL": 6489, - "PLA": 45710, - "PLAY": 31519, - "PLE": 16437, - "PLIC": 31484, - "PLIED": 49094, - "PM": 5868, - "PN": 13137, - "PO": 16402, - "POL": 45472, - "POR": 44680, - "PORT": 15490, - "POS": 37997, - "POSE": 48933, - "POST": 32782, - "PP": 10246, - "PR": 4805, - "PRE": 46437, - "PRES": 48296, - "PRESS": 32761, - "PRO": 31190, - "PROV": 41283, - "PS": 3705, - "PT": 11571, - "PU": 5105, - "PUT": 30076, - "Pa": 28875, - "Pac": 18844, - "Pacific": 22933, - "Pack": 11869, - "Package": 27813, - "Pad": 26114, - "Page": 9876, - "Pages": 47798, - "Pain": 38490, - "Pak": 29675, - "Pakistan": 38485, - "Pal": 11531, - "Palest": 32570, - "Palestinian": 35969, - "Pan": 15730, - "Pand": 47206, - "Panel": 26639, - "Paper": 42950, - "Par": 10044, - "Param": 22973, - "Parameter": 36301, - "Parameters": 48944, - "Parent": 24546, - "Parents": 42969, - "Paris": 40313, - "Park": 25478, - "Parser": 46677, - "Part": 7841, - "Particip": 34363, - "Parts": 42670, - "Party": 33553, - "Pass": 14478, - "Password": 35215, - "Past": 34533, - "Pat": 12130, - "Patch": 33952, - "Path": 15235, - "Patrick": 32718, - "Pattern": 47546, - "Paul": 12041, - "Pause": 49991, - "Pay": 19197, - "Pe": 6435, - "Peace": 43445, - "Pear": 46262, - "Ped": 43468, - "Pen": 25553, - "Penn": 39899, - "People": 8061, - "Per": 5990, - "Percent": 31905, - "Perfect": 36635, - "Performance": 32273, - "Perhaps": 13710, - "Pers": 30946, - "Person": 15439, - "Personal": 30228, - "Personally": 42322, - "Pet": 25803, - "Peter": 19727, - "Pg": 31743, - "Ph": 2725, - "Phase": 35645, - "Phil": 18673, - "Philadelphia": 42349, - "Philipp": 49680, - "Phill": 41970, - "Phoenix": 36422, - "Phone": 6132, - "Phones": 32212, - "Phot": 27248, - "Photo": 6191, - "Photos": 21197, - "Phys": 43215, - "Physical": 31611, - "Pi": 38729, - "Pic": 39507, - "Pick": 31686, - "Pict": 21300, - "Picture": 28070, - "Pie": 48223, - "Pierre": 36910, - "Pin": 28348, - "Ping": 49806, - "Pink": 41912, - "Pinterest": 35767, - "Pir": 46772, - "Pitt": 47627, - "Pixel": 40809, - "Pl": 3646, - "Place": 27271, - "Plan": 20854, - "Planet": 41801, - "Platform": 37148, - "Play": 11002, - "Player": 14140, - "Players": 24860, - "Playing": 36530, - "Please": 5492, - "Plex": 46383, - "Plot": 43328, - "Plug": 23257, - "Plugin": 37233, - "Plus": 17860, - "Po": 18833, - "Pocket": 45454, - "Pod": 41565, - "Point": 12727, - "Points": 40710, - "Pokemon": 48034, - "Poké": 41386, - "Pokémon": 46602, - "Pol": 8017, - "Police": 9039, - "Policy": 36727, - "Polit": 39866, - "Political": 35443, - "Politics": 43921, - "Poll": 39176, - "Poly": 34220, - "Pont": 48039, - "Pool": 27201, - "Poor": 43920, - "Pop": 16979, - "Pope": 46172, - "Population": 45251, - "Port": 13924, - "Portland": 45330, - "Pos": 21604, - "Position": 26545, - "Post": 6307, - "Posted": 14231, - "Posts": 21496, - "Pot": 25396, - "Power": 13434, - "Pr": 6836, - "Pract": 49515, - "Pre": 6719, - "Pred": 39156, - "Pref": 36698, - "Prem": 24914, - "Premium": 36787, - "Prep": 37534, - "Pres": 25460, - "Present": 34695, - "President": 10364, - "Press": 13800, - "Pretty": 35700, - "Prev": 36854, - "Preview": 48835, - "Previous": 21448, - "Previously": 36837, - "Pri": 34487, - "Price": 18124, - "Prim": 23828, - "Primary": 35170, - "Prime": 26405, - "Prin": 47231, - "Princ": 42904, - "Prince": 35784, - "Print": 18557, - "Prior": 22442, - "Priv": 20184, - "Privacy": 48948, - "Private": 29067, - "Pro": 2964, - "Probably": 34784, - "Problem": 40781, - "Process": 18709, - "Produ": 11547, - "Product": 15667, - "Production": 35027, - "Products": 48650, - "Prof": 15404, - "Professional": 49138, - "Professor": 25031, - "Profile": 37046, - "Program": 15167, - "Progress": 32577, - "Project": 16775, - "Prom": 24129, - "Proof": 44683, - "Prop": 24331, - "Property": 21746, - "Pros": 35726, - "Prosecut": 34301, - "Prosecutors": 39401, - "Prot": 19703, - "Protect": 41426, - "Prov": 15946, - "Provider": 29495, - "Proxy": 44148, - "Ps": 12016, - "Psy": 25918, - "PsyNetMessage": 28666, - "Psych": 31923, - "Ptr": 46745, - "Pub": 14876, - "Public": 15202, - "Published": 24492, - "Publisher": 46471, - "Pull": 42940, - "Pur": 30026, - "Purchase": 47651, - "Pure": 49548, - "Push": 49222, - "Put": 11588, - "Putin": 17060, - "Putting": 46399, - "Py": 20519, - "Python": 37906, - "Q": 48, - "QB": 40291, - "QL": 9711, - "QU": 10917, - "QUEST": 35780, - "QUI": 43702, - "QUIRE": 49128, - "Qaeda": 19058, - "Qaida": 41225, - "Qu": 4507, - "Qual": 46181, - "Quality": 35013, - "Quant": 24915, - "Quantity": 31208, - "Que": 15681, - "Queen": 32466, - "Query": 20746, - "Quest": 12166, - "Question": 24361, - "Questions": 35741, - "Queue": 34991, - "Quick": 21063, - "Quite": 44959, - "Quote": 25178, - "Quotes": 23138, - "R": 49, - "RA": 3861, - "RAFT": 44700, - "RAG": 33202, - "RAL": 35296, - "RAM": 24115, - "RANT": 32506, - "RAW": 20530, - "RAY": 30631, - "RB": 27912, - "RC": 7397, - "RD": 35257, - "RE": 2200, - "READ": 15675, - "REAM": 32235, - "REC": 38827, - "RECT": 23988, - "RED": 22083, - "REDACTED": 45999, - "REE": 11587, - "REF": 31688, - "REG": 31553, - "REL": 16448, - "RELATED": 20112, - "REM": 40726, - "REP": 35316, - "RES": 19535, - "RESULTS": 46274, - "RET": 26087, - "RF": 32754, - "RFC": 41150, - "RG": 48192, - "RGB": 36982, - "RH": 48587, - "RI": 7112, - "RIC": 41132, - "RIP": 32618, - "RIPT": 46023, - "RL": 7836, - "RM": 29138, - "RN": 42336, - "RNA": 27204, - "RO": 13252, - "ROM": 33676, - "RON": 45806, - "ROR": 16411, - "RP": 20031, - "RPG": 46954, - "RR": 21095, - "RS": 6998, - "RT": 14181, - "RW": 46747, - "RY": 18276, - "Ra": 21762, - "Race": 35157, - "Rachel": 44045, - "Rad": 15546, - "Radio": 26093, - "Rah": 47135, - "Raid": 49043, - "Rail": 44631, - "Rain": 31443, - "Ram": 33754, - "Rand": 38918, - "Random": 29531, - "Range": 17257, - "Rank": 27520, - "Ranked": 36713, - "Rap": 35230, - "Rare": 26737, - "Rat": 29665, - "Rate": 32184, - "Rated": 15322, - "Rather": 27202, - "Rating": 29321, - "Raven": 49098, - "Raw": 27369, - "Ray": 19591, - "Re": 3041, - "Read": 5569, - "Reader": 33634, - "Reading": 36120, - "Ready": 35474, - "Real": 15633, - "Really": 26392, - "Reason": 45008, - "Reb": 28951, - "Rec": 6690, - "Recent": 26446, - "Recently": 24661, - "Recipe": 37523, - "Recomm": 24898, - "Recommend": 41248, - "Recommended": 36171, - "Record": 23739, - "Rect": 45474, - "Red": 7738, - "Redd": 32259, - "Reddit": 22367, - "Redditor": 34832, - "Ref": 8134, - "Refer": 46238, - "Reference": 26687, - "References": 19927, - "Reg": 8081, - "Regarding": 39424, - "Regardless": 27894, - "Region": 47371, - "Register": 38804, - "Registered": 47473, - "Registration": 47133, - "Regular": 40164, - "Reilly": 25819, - "Rel": 6892, - "Related": 9819, - "Relations": 47117, - "Release": 26362, - "Released": 45037, - "Reloaded": 36726, - "Rem": 8413, - "Remember": 16676, - "Remote": 36510, - "Remove": 27914, - "Removed": 45975, - "Ren": 26764, - "Render": 45819, - "Rep": 6207, - "Repe": 47541, - "Repeat": 40322, - "Repl": 39232, - "Reply": 36875, - "Report": 19100, - "Reporting": 42159, - "Reports": 37844, - "Represent": 40171, - "Republic": 15431, - "Republican": 25777, - "Republicans": 28455, - "Requ": 16844, - "Request": 18453, - "Required": 37374, - "Requirements": 42249, - "Requires": 39618, - "Res": 4965, - "Research": 25104, - "Researchers": 25606, - "Residents": 42347, - "Resource": 26198, - "Resources": 33236, - "Resp": 19309, - "Response": 31077, - "Rest": 19452, - "Result": 23004, - "Results": 25468, - "Ret": 9781, - "Return": 13615, - "Returns": 35561, - "Reuters": 12637, - "Rev": 18009, - "Review": 14832, - "Reviewed": 40266, - "Reviewer": 35407, - "Revolution": 50237, - "Rew": 30003, - "Reward": 48123, - "Rex": 47389, - "Rh": 38576, - "Rich": 14868, - "Richard": 22245, - "Rick": 33048, - "Right": 11028, - "Ring": 39687, - "River": 42204, - "Ro": 15450, - "Road": 29197, - "Roaming": 27352, - "Rob": 14350, - "Rober": 15924, - "Robert": 19156, - "Roberts": 45487, - "Robin": 40656, - "Rock": 19665, - "Rocket": 50218, - "Rod": 27917, - "Rog": 30417, - "Roger": 43719, - "Rogue": 48163, - "Role": 47445, - "Roll": 26869, - "Rom": 22834, - "Roman": 32454, - "Romney": 42184, - "Ron": 23672, - "Room": 41178, - "Root": 30016, - "Ros": 35740, - "Rose": 31087, - "Ross": 38328, - "Rot": 24864, - "Round": 22685, - "Route": 43401, - "Row": 25166, - "Roy": 32027, - "Royal": 41861, - "Rs": 31273, - "Ru": 40464, - "Rub": 21312, - "Ruby": 32101, - "Rule": 31929, - "Rules": 37766, - "Rum": 47127, - "Run": 10987, - "Runner": 49493, - "Running": 28768, - "Runtime": 41006, - "Rus": 35313, - "Rush": 49942, - "Russ": 10020, - "Russell": 46325, - "Russia": 16347, - "Russian": 16220, - "Rust": 49444, - "Ry": 46987, - "Ryan": 21868, - "S": 50, - "SA": 4090, - "SAM": 49302, - "SAN": 36753, - "SAY": 27358, - "SB": 16811, - "SC": 6173, - "SCP": 48956, - "SD": 10305, - "SE": 5188, - "SEA": 46887, - "SEC": 23683, - "SEE": 36078, - "SELECT": 46506, - "SER": 35009, - "SET": 28480, - "SF": 20802, - "SG": 38475, - "SH": 9693, - "SHA": 37596, - "SHARE": 42597, - "SHIP": 49423, - "SI": 11584, - "SIGN": 46224, - "SIM": 48913, - "SIZE": 33489, - "SK": 18831, - "SL": 8634, - "SM": 12310, - "SN": 15571, - "SO": 15821, - "SON": 11782, - "SOURCE": 47690, - "SP": 4303, - "SPA": 50087, - "SPEC": 48451, - "SPONSORED": 37190, - "SQL": 17861, - "SR": 12562, - "SS": 5432, - "SSL": 31127, - "ST": 2257, - "STAR": 46678, - "STAT": 35744, - "STATE": 44724, - "STD": 32147, - "STDOUT": 36886, - "STE": 30516, - "STEM": 25361, - "STEP": 42135, - "STER": 41809, - "STON": 41924, - "STR": 18601, - "STRUCT": 46126, - "SU": 12564, - "SUP": 40331, - "SW": 17887, - "SY": 23060, - "Sa": 33890, - "Sab": 39646, - "Sac": 38318, - "Sad": 26699, - "Sadly": 36725, - "Safe": 31511, - "Safety": 45372, - "Sah": 32194, - "Saharan": 40461, - "Said": 47638, - "Saint": 48615, - "Sal": 19221, - "Sales": 44490, - "Salt": 43061, - "Sam": 16305, - "Same": 30556, - "Sample": 36674, - "Samsung": 32334, - "San": 15017, - "Sand": 18471, - "Sanders": 26747, - "Santa": 42694, - "Sarah": 29284, - "Sat": 20245, - "Saturday": 19844, - "Saudi": 36939, - "Sav": 47362, - "Save": 16928, - "Sax": 41152, - "Say": 25515, - "Sc": 3351, - "Scale": 29990, - "Scan": 33351, - "Scar": 44433, - "Scene": 36542, - "Sch": 14874, - "Sche": 27054, - "School": 26130, - "Science": 26959, - "Scient": 23010, - "Scientists": 29193, - "Scope": 43642, - "Score": 26595, - "Scot": 37559, - "Scotland": 47230, - "Scott": 19040, - "Screen": 23901, - "Screenshot": 34204, - "Script": 7391, - "Scroll": 29261, - "Se": 4653, - "Sea": 37567, - "Sean": 26408, - "Search": 18243, - "Season": 18960, - "Seattle": 34007, - "Sec": 6558, - "Second": 12211, - "Secondly": 44276, - "Secret": 23725, - "Secretary": 38541, - "Section": 16375, - "Secure": 49793, - "Security": 24074, - "See": 6214, - "Seeing": 36314, - "Seg": 41030, - "Sel": 48767, - "Select": 17563, - "Self": 24704, - "Sem": 13900, - "Semitic": 28753, - "Semitism": 25406, - "Sen": 10445, - "Senate": 32998, - "Senator": 29774, - "Send": 25206, - "Senior": 31224, - "Sense": 41166, - "Sensor": 47864, - "Sent": 31837, - "Sep": 19117, - "Sept": 14635, - "September": 17543, - "Sequ": 44015, - "Ser": 7089, - "Serial": 32634, - "Series": 27996, - "Seriously": 42338, - "Serv": 11838, - "Server": 10697, - "Service": 16177, - "Services": 31007, - "Session": 36044, - "Set": 7248, - "Setting": 34149, - "Settings": 26232, - "Setup": 40786, - "Seven": 31334, - "Several": 14945, - "Sex": 23398, - "Sexual": 49161, - "Sh": 2484, - "Shadow": 27447, - "Sham": 43478, - "Shape": 33383, - "Shar": 40201, - "Share": 11649, - "Shares": 43566, - "Sharp": 44336, - "She": 3347, - "Shell": 23248, - "Sher": 28782, - "Shield": 33651, - "Shift": 33377, - "Shin": 44592, - "Ship": 25586, - "Shipping": 45169, - "Shock": 31646, - "Shop": 29917, - "Short": 16438, - "Shortly": 30513, - "Shot": 28512, - "Should": 19926, - "Show": 15307, - "Shut": 39079, - "Si": 42801, - "Side": 24819, - "Sign": 11712, - "Sil": 15086, - "Silver": 26766, - "Sim": 8890, - "Similar": 18925, - "Similarly": 28039, - "Simon": 35475, - "Simple": 26437, - "Simply": 35596, - "Sin": 46200, - "Since": 6385, - "Sing": 29974, - "Single": 28008, - "Sir": 22788, - "Sit": 46655, - "Site": 29123, - "Six": 21447, - "Size": 10699, - "Sk": 15739, - "Skill": 35040, - "Skin": 42455, - "Skip": 50232, - "Sky": 22308, - "Sl": 11122, - "Sleep": 40555, - "Slot": 38963, - "Slow": 36423, - "Sm": 7556, - "Small": 18712, - "Smart": 25610, - "Smith": 17919, - "Sn": 16501, - "Snake": 49795, - "Snap": 43826, - "Snow": 28974, - "So": 2396, - "Soc": 37949, - "Social": 20636, - "Socket": 39105, - "Soft": 18380, - "Software": 25423, - "Sol": 36949, - "Solar": 38825, - "Sold": 33873, - "Solid": 46933, - "Solution": 46344, - "Some": 4366, - "Someone": 28211, - "Something": 22210, - "Sometimes": 15468, - "Son": 31056, - "Song": 44241, - "Sony": 32895, - "Soon": 28093, - "Sorry": 14385, - "Sort": 42758, - "Soul": 36315, - "Sound": 21369, - "Sounds": 40825, - "Source": 7416, - "SourceFile": 37226, - "Sources": 21188, - "South": 14942, - "Southern": 44993, - "Sov": 38574, - "Soviet": 40408, - "Sp": 4561, - "Space": 14106, - "SpaceEngineers": 31032, - "Spain": 45355, - "Spanish": 43584, - "Spawn": 49855, - "Spe": 5248, - "Speaking": 13887, - "Spec": 22882, - "Special": 13409, - "Specific": 32419, - "Specifically": 48379, - "Spect": 49738, - "Speed": 22785, - "Spell": 31221, - "Sphere": 38882, - "Spider": 41294, - "Spirit": 41910, - "Spl": 26568, - "Split": 41205, - "Spoiler": 31895, - "Spons": 43522, - "Sport": 42576, - "Sports": 18153, - "Spot": 32565, - "Spr": 38454, - "Spread": 44458, - "Spring": 30387, - "Squ": 22266, - "Square": 48011, - "St": 1273, - "Stack": 25896, - "Staff": 31449, - "Stage": 29391, - "Stan": 32140, - "Stand": 15480, - "Standard": 23615, - "Standing": 44196, - "Star": 8248, - "Stars": 29366, - "Start": 10434, - "Starting": 22851, - "Stat": 17126, - "State": 9012, - "Statement": 48682, - "States": 42237, - "Static": 45442, - "Station": 12367, - "Statistics": 48346, - "Stats": 29668, - "Status": 19580, - "Stay": 25681, - "Ste": 7447, - "Steam": 19109, - "Steel": 39807, - "Step": 8600, - "Stephen": 24920, - "Steve": 19206, - "Steven": 28292, - "Stew": 49328, - "Still": 9590, - "Stock": 26207, - "Stone": 34346, - "Stop": 19485, - "Storage": 31425, - "Store": 22658, - "Storm": 32173, - "Story": 11605, - "Str": 13290, - "Stra": 41347, - "Strange": 38114, - "Stre": 30611, - "Stream": 12124, - "Streamer": 28696, - "StreamerBot": 37574, - "Street": 34356, - "Strength": 45027, - "Stretch": 39181, - "Strike": 31584, - "String": 10100, - "Strong": 33004, - "Struct": 44909, - "Stud": 13007, - "Student": 38778, - "Students": 28239, - "Studies": 45833, - "Studio": 41501, - "Study": 39841, - "Sty": 18716, - "Style": 21466, - "Su": 5606, - "Sub": 7004, - "Subject": 19776, - "Submit": 45135, - "Subscribe": 27125, - "Success": 33244, - "Such": 16678, - "Suddenly": 38582, - "Suggest": 43857, - "Sullivan": 47572, - "Sum": 13065, - "Summary": 22093, - "Summer": 33560, - "Sun": 16012, - "Sund": 20602, - "Sunday": 21934, - "Sup": 40784, - "Super": 12442, - "Supp": 15979, - "Supplement": 42615, - "Support": 15514, - "Supported": 48181, - "Supporters": 49422, - "Sur": 14214, - "Sure": 19457, - "Surv": 34652, - "Sus": 30746, - "Susan": 45842, - "Sw": 10462, - "Swe": 40783, - "Sweet": 36087, - "Switch": 38978, - "Sword": 43117, - "Sy": 13940, - "Sym": 43094, - "Syn": 29934, - "Sync": 28985, - "Synopsis": 49771, - "Syria": 40029, - "Syrian": 42747, - "Sys": 44387, - "System": 11964, - "T": 51, - "TA": 5603, - "TABLE": 38148, - "TAG": 42197, - "TAIN": 30339, - "TB": 22737, - "TC": 4825, - "TD": 21016, - "TE": 9328, - "TED": 36493, - "TER": 5781, - "TERN": 31800, - "TEXT": 32541, - "TEXTURE": 47648, - "TF": 10234, - "TG": 35990, - "TH": 4221, - "THE": 10970, - "THER": 21250, - "THING": 39356, - "THIS": 43559, - "TI": 25621, - "TIME": 34694, - "TING": 48996, - "TION": 24131, - "TIT": 49560, - "TL": 14990, - "TM": 15972, - "TN": 46559, - "TO": 10468, - "TON": 11357, - "TOP": 35222, - "TOR": 32961, - "TP": 7250, - "TPP": 31435, - "TPPStreamerBot": 37579, - "TPS": 28820, - "TR": 5446, - "TRUMP": 42473, - "TRY": 40405, - "TS": 4694, - "TT": 15751, - "TV": 6849, - "TW": 34551, - "TX": 29551, - "TY": 9936, - "TYPE": 25216, - "Ta": 38586, - "Tab": 33349, - "Table": 10962, - "Tact": 45803, - "Tag": 24835, - "Tags": 36142, - "Tai": 47976, - "Take": 12322, - "Taking": 26556, - "Tal": 31466, - "Talk": 25685, - "Talking": 45904, - "Tam": 42061, - "Tan": 45557, - "Tang": 43909, - "Tank": 32978, - "Tap": 45081, - "Tar": 47079, - "Target": 21745, - "Task": 25714, - "Tax": 27017, - "Taylor": 29907, - "Te": 6767, - "TeX": 49568, - "Tea": 49770, - "Team": 15592, - "Tech": 17760, - "Techn": 25574, - "Technical": 45638, - "Technology": 44893, - "Ted": 38972, - "Teen": 45639, - "Tel": 33317, - "Tele": 31709, - "Tell": 24446, - "Tem": 12966, - "Temp": 30782, - "Temperature": 42492, - "Template": 30800, - "Ten": 24893, - "Tenn": 43139, - "Ter": 15156, - "Term": 40596, - "Termin": 44798, - "Terror": 40194, - "Terry": 50241, - "Tes": 36504, - "Tesla": 41351, - "Test": 14402, - "Testing": 44154, - "Tex": 17005, - "Texas": 21607, - "Text": 8206, - "TextColor": 42470, - "Texture": 32742, - "Textures": 39860, - "Th": 817, - "Thank": 10449, - "Thankfully": 48387, - "Thanks": 9690, - "That": 2504, - "The": 464, - "Their": 14574, - "Theme": 47863, - "Then": 6423, - "Ther": 35048, - "There": 1858, - "Therefore": 26583, - "These": 4711, - "They": 2990, - "Things": 22248, - "Think": 22073, - "Third": 22747, - "Thirty": 38856, - "This": 1212, - "Thom": 37582, - "Thomas": 22405, - "Thompson": 48942, - "Thor": 46765, - "Those": 9627, - "Though": 10915, - "Thousands": 37482, - "Thread": 16818, - "Three": 12510, - "Through": 15046, - "Throughout": 26797, - "Throw": 39431, - "Thu": 39902, - "Thumbnail": 35523, - "ThumbnailImage": 39142, - "Thunder": 45713, - "Thursday": 25381, - "Thus": 19093, - "Ti": 40533, - "Tickets": 43254, - "Tier": 35252, - "Tile": 35103, - "Tim": 14967, - "Time": 7575, - "Timeout": 48031, - "Timer": 48801, - "Times": 28595, - "Tip": 28434, - "Tips": 43368, - "Title": 19160, - "To": 2514, - "Today": 8888, - "Todd": 42817, - "Together": 41631, - "Tok": 19042, - "Token": 30642, - "Tokens": 22906, - "Tom": 13787, - "Tomorrow": 49488, - "Ton": 35416, - "Tonight": 43783, - "Tony": 29387, - "Too": 23307, - "Tool": 25391, - "Tools": 33637, - "Top": 9126, - "Topic": 33221, - "Topics": 25902, - "Tor": 15884, - "Toronto": 31359, - "Torrent": 39286, - "Total": 14957, - "Touch": 35211, - "Tour": 39152, - "Town": 38097, - "Toy": 48236, - "Tr": 2898, - "Tra": 15721, - "Track": 24802, - "Tracker": 35694, - "Trade": 35965, - "Traditional": 48485, - "Train": 44077, - "Training": 44357, - "Trans": 8291, - "Transaction": 48720, - "Transfer": 43260, - "Transform": 41762, - "Translation": 48313, - "Travel": 33074, - "Tre": 31055, - "Tree": 27660, - "Trend": 45461, - "Tri": 14824, - "Trigger": 48344, - "Trivia": 23854, - "Tro": 44095, - "True": 17821, - "Trump": 6170, - "Trust": 33814, - "Truth": 38782, - "Try": 23433, - "Ts": 33758, - "Tu": 47247, - "Tube": 6876, - "Tue": 41392, - "Tuesday": 26133, - "Tumblr": 39415, - "Tur": 17483, - "Turkey": 31632, - "Turkish": 42872, - "Turn": 17278, - "Tw": 5080, - "Twe": 32665, - "Tweet": 47845, - "Twenty": 34096, - "Twitter": 14254, - "Two": 7571, - "Tx": 46047, - "Ty": 25492, - "Tyler": 46807, - "Typ": 31467, - "Type": 6030, - "Types": 31431, - "Typically": 49321, - "U": 52, - "UA": 34970, - "UAL": 25620, - "UB": 10526, - "UC": 9598, - "UCK": 16696, - "UCT": 18415, - "UD": 8322, - "UE": 8924, - "UES": 35409, - "UF": 36820, - "UFC": 44534, - "UFF": 47588, - "UG": 7340, - "UGC": 31179, - "UGE": 41251, - "UGH": 44004, - "UI": 10080, - "UID": 27586, - "UK": 15039, - "UL": 6239, - "ULAR": 37232, - "ULE": 24212, - "ULL": 9994, - "ULT": 16724, - "ULTS": 35342, - "UM": 5883, - "UME": 38340, - "UMP": 20476, - "UN": 4944, - "UNCH": 47461, - "UNE": 41884, - "UP": 8577, - "UPDATE": 16977, - "UR": 4261, - "URA": 45570, - "URE": 11335, - "URES": 29514, - "URI": 47269, - "URL": 21886, - "URN": 27064, - "URR": 31302, - "URRENT": 39237, - "US": 2937, - "USA": 14053, - "USB": 27155, - "USD": 29072, - "USE": 19108, - "USER": 29904, - "USH": 27143, - "USS": 32835, - "UST": 7759, - "UT": 3843, - "UTC": 17429, - "UTE": 37780, - "UTERS": 14974, - "UTF": 48504, - "UTH": 24318, - "UTION": 35354, - "UU": 30100, - "UV": 31667, - "UX": 31235, - "Ub": 36609, - "Uber": 39018, - "Uh": 34653, - "Uk": 28425, - "Ukraine": 44814, - "Ul": 47920, - "Ult": 16301, - "Ultimate": 47892, - "Ultimately": 27212, - "Ultra": 36122, - "Um": 37280, - "Un": 3118, - "Uncommon": 43023, - "Und": 31319, - "Under": 9203, - "Understanding": 43467, - "Unfortunately": 13898, - "Union": 38176, - "Unique": 40257, - "Unit": 26453, - "United": 17013, - "Unity": 35955, - "Universal": 38747, - "University": 21009, - "Unix": 47000, - "Unknown": 20035, - "Unless": 28042, - "Unlike": 18521, - "Unt": 35792, - "Until": 18273, - "Untitled": 46332, - "Up": 4933, - "Update": 10260, - "Updated": 17354, - "Upgrade": 44948, - "Upload": 41592, - "Upon": 23792, - "Ur": 16692, - "Urban": 46667, - "Url": 28165, - "Us": 5842, - "Usage": 28350, - "Use": 11041, - "Used": 38052, - "User": 12982, - "Users": 14490, - "Using": 12814, - "Usually": 37887, - "Ut": 18274, - "Utah": 44350, - "V": 53, - "VA": 11731, - "VAL": 23428, - "VALUE": 39488, - "VB": 44526, - "VC": 15922, - "VD": 8898, - "VE": 6089, - "VEL": 18697, - "VEN": 28290, - "VER": 5959, - "VERS": 28884, - "VERSION": 43717, - "VERT": 15858, - "VERTIS": 18000, - "VERTISEMENT": 18679, - "VG": 43490, - "VI": 12861, - "VICE": 27389, - "VID": 11008, - "VIDEO": 42937, - "VIDIA": 13171, - "VIEW": 28206, - "VII": 45529, - "VILLE": 38526, - "VIS": 29817, - "VK": 47191, - "VL": 47468, - "VM": 15996, - "VO": 29516, - "VOL": 44558, - "VP": 8859, - "VPN": 33883, - "VR": 13024, - "VS": 20304, - "VT": 36392, - "VW": 30133, - "Va": 33906, - "Val": 7762, - "Valid": 47139, - "Value": 11395, - "Values": 40161, - "Van": 25298, - "Var": 19852, - "Vari": 23907, - "Variable": 43015, - "Various": 40009, - "Vaults": 33937, - "Ve": 26979, - "Vector": 38469, - "Veh": 37870, - "Vel": 46261, - "Ven": 37522, - "Ver": 13414, - "Vers": 34947, - "Version": 14815, - "Versions": 45150, - "Vert": 42369, - "Very": 16371, - "Veter": 45182, - "Vi": 38432, - "Via": 30754, - "Vice": 47910, - "Vict": 21944, - "Victoria": 49898, - "Video": 10798, - "View": 7680, - "Vill": 42074, - "Viol": 33894, - "Virgin": 34674, - "Virginia": 41017, - "Virtual": 37725, - "Vis": 15854, - "Vision": 44206, - "Visit": 31141, - "Visual": 36259, - "Vo": 42144, - "Voice": 35708, - "Vol": 16598, - "Volume": 31715, - "Vote": 37394, - "Vs": 23266, - "W": 54, - "WA": 15543, - "WAR": 16279, - "WARD": 39743, - "WARE": 33746, - "WARN": 37771, - "WARNING": 31502, - "WASHINGTON": 21793, - "WATCH": 35192, - "WAY": 27285, - "WAYS": 42451, - "WB": 45607, - "WC": 27353, - "WD": 22332, - "WE": 8845, - "WER": 45532, - "WF": 48397, - "WH": 12418, - "WHAT": 32971, - "WHERE": 47357, - "WHO": 41856, - "WI": 36326, - "WIN": 37620, - "WIND": 28929, - "WINDOWS": 33207, - "WM": 22117, - "WN": 29767, - "WOOD": 49466, - "WOR": 45359, - "WORK": 33249, - "WP": 25527, - "WR": 18564, - "WS": 19416, - "WT": 39386, - "WW": 17947, - "Wa": 33484, - "Wait": 21321, - "Wal": 21902, - "Walk": 35963, - "Walker": 39950, - "Wall": 22401, - "Wallet": 47152, - "Wan": 45681, - "Want": 19633, - "War": 13195, - "Ward": 49021, - "Ware": 38824, - "Warning": 20361, - "Warren": 43464, - "Wars": 41508, - "Was": 16973, - "Washington": 17402, - "Watch": 10723, - "Water": 19184, - "Wave": 39709, - "Way": 25309, - "We": 1135, - "Weak": 44898, - "Weapon": 27632, - "Weapons": 41818, - "Weather": 41865, - "Web": 13908, - "Website": 33420, - "Wed": 19864, - "Wednesday": 27150, - "Week": 20916, - "Weight": 25844, - "Weiss": 48760, - "Welcome": 14618, - "Well": 5779, - "Were": 35653, - "West": 15045, - "Western": 24227, - "Wh": 1199, - "What": 2061, - "Whatever": 21875, - "Whe": 10842, - "Wheel": 45307, - "When": 2215, - "Whenever": 28877, - "Where": 8496, - "Whereas": 48494, - "Whether": 15354, - "Which": 13828, - "While": 3633, - "Whit": 43617, - "White": 12256, - "Who": 8241, - "Whoever": 47896, - "Why": 5195, - "Wi": 31294, - "Wide": 42559, - "Widget": 38300, - "Width": 30916, - "Wik": 33010, - "Wiki": 32603, - "Wikipedia": 48845, - "Wil": 22327, - "Wild": 25946, - "Will": 8743, - "William": 17121, - "Williams": 27869, - "Wilson": 37349, - "Win": 16643, - "Wind": 8731, - "Window": 27703, - "Windows": 11209, - "Wing": 35612, - "Winged": 47418, - "Winner": 48056, - "Winter": 35376, - "Wire": 29451, - "Wisconsin": 49097, - "With": 3152, - "WithNo": 35992, - "Within": 22005, - "Without": 16249, - "Witness": 38670, - "Wo": 49450, - "Wolf": 32069, - "Woman": 48081, - "Women": 18495, - "Wonder": 42337, - "Wood": 22911, - "Word": 26449, - "Words": 37117, - "Work": 12468, - "Working": 28516, - "Works": 23044, - "World": 10603, - "Would": 17353, - "Wow": 22017, - "Wr": 39213, - "Wra": 36918, - "Writ": 20257, - "Write": 16594, - "Writer": 34379, - "Writing": 33874, - "Written": 25354, - "Ws": 46456, - "X": 55, - "XL": 32457, - "XM": 37643, - "XP": 27481, - "XT": 25010, - "XX": 8051, - "XXX": 43145, - "XXXX": 24376, - "XY": 34278, - "Xbox": 43377, - "Xi": 42528, - "Y": 56, - "YA": 44947, - "YC": 44816, - "YD": 35755, - "YE": 48743, - "YES": 43335, - "YING": 45761, - "YL": 45448, - "YN": 40760, - "YOU": 36981, - "YP": 48232, - "YR": 38162, - "YS": 16309, - "YY": 26314, - "Yan": 49664, - "Yang": 38663, - "Ye": 35543, - "Yeah": 10995, - "Year": 17688, - "Years": 40630, - "Yellow": 39499, - "Yep": 47834, - "Yes": 5297, - "Yesterday": 28065, - "Yet": 11486, - "Yo": 38101, - "York": 49278, - "You": 1639, - "YouTube": 33869, - "Young": 20917, - "Your": 7120, - "Yu": 40728, - "Z": 57, - "ZA": 34892, - "ZE": 21211, - "ZI": 48926, - "ZX": 40692, - "ZZ": 30148, - "Ze": 36056, - "Zen": 47573, - "Zero": 28667, - "Zip": 41729, - "Zone": 26961, - "[": 58, - "[\"": 14692, - "['": 17816, - "[/": 13412, - "[[": 30109, - "[]": 21737, - "[_": 29795, - "\\": 59, - "\\\"": 7879, - "\\\",": 34607, - "\\\":": 30478, - "\\\">": 38214, - "\\'": 43054, - "\\)": 22725, - "\\-": 41441, - "\\.": 17405, - "\\/": 11139, - "\\/\\/": 45422, - "\\<": 49778, - "\\\\": 6852, - "\\\\\\\\": 13426, - "\\\\\\\\\\\\\\\\": 21807, - "\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\": 34604, - "]": 60, - "]\"": 30866, - "]'": 49946, - "](": 16151, - "])": 12962, - "]),": 46570, - "]).": 35944, - "]);": 36563, - "]+": 48688, - "],": 4357, - "],\"": 17241, - "],[": 38430, - "]-": 45297, - "].": 4083, - "].\"": 29225, - "]:": 5974, - "];": 11208, - "]=": 22241, - "][": 7131, - "][/": 44926, - "]]": 11907, - "]}": 48999, - "^": 61, - "^^": 18237, - "^^^^": 39397, - "^{": 36796, - "_": 62, - "_(": 41052, - "_-": 22955, - "_-_": 31386, - "_.": 44807, - "_>": 49029, - "__": 834, - "___": 17569, - "____": 1427, - "_____": 29343, - "______": 25947, - "_______": 37405, - "________": 2602, - "________________": 4841, - "________________________": 32941, - "________________________________": 10221, - "________________________________________________________________": 27193, - "_{": 23330, - "`": 63, - "`,": 47671, - "`.": 44646, - "``": 15506, - "````": 33153, - "a": 64, - "aa": 7252, - "aaa": 46071, - "aaaa": 24794, - "aah": 37500, - "aan": 28340, - "ab": 397, - "aba": 15498, - "abad": 17325, - "abal": 44349, - "abama": 8809, - "aban": 45094, - "aband": 49248, - "abase": 5754, - "abases": 18826, - "abb": 6485, - "abba": 48910, - "abbage": 32061, - "abbit": 14229, - "abbling": 47883, - "abby": 42457, - "abc": 39305, - "abe": 11231, - "abee": 32580, - "abel": 9608, - "abella": 43653, - "aber": 27359, - "abet": 8380, - "abetes": 11064, - "abeth": 9407, - "abetic": 33312, - "abi": 17914, - "abiding": 43056, - "abies": 43256, - "abil": 14991, - "abilia": 48249, - "abilities": 5738, - "ability": 1799, - "abin": 6014, - "abis": 8102, - "abit": 29968, - "abl": 23117, - "able": 540, - "abled": 4510, - "ables": 2977, - "abling": 11716, - "ablish": 17148, - "ablished": 22555, - "ablishment": 25380, - "ablo": 18817, - "ably": 1346, - "abo": 34748, - "abol": 28426, - "abolic": 29304, - "abor": 4820, - "abortion": 32396, - "about": 10755, - "abouts": 27880, - "above": 29370, - "abre": 46241, - "abs": 8937, - "absolute": 48546, - "absolutely": 42994, - "absor": 46303, - "abul": 16665, - "abulary": 22528, - "abus": 46844, - "abuse": 47158, - "abwe": 27050, - "aby": 3930, - "abyte": 37828, - "abytes": 38346, - "ac": 330, - "aca": 22260, - "acan": 50195, - "acas": 40263, - "acc": 4134, - "acca": 43552, - "accept": 13635, - "acceptable": 16037, - "access": 15526, - "accessible": 33780, - "acci": 44456, - "acco": 8679, - "accompan": 41974, - "accompanied": 42588, - "according": 38169, - "account": 23317, - "ace": 558, - "acea": 44977, - "aceae": 48319, - "acebook": 2887, - "aced": 2286, - "acement": 5592, - "acements": 28613, - "acent": 12643, - "aceous": 37797, - "acer": 11736, - "acerb": 22428, - "acers": 49908, - "aces": 2114, - "acet": 23253, - "aceutical": 14642, - "acey": 25415, - "ach": 620, - "acha": 34518, - "achable": 34446, - "ache": 4891, - "ached": 2317, - "achel": 9636, - "achelor": 19335, - "acher": 3493, - "achers": 17892, - "aches": 3694, - "achev": 42961, - "achi": 14299, - "achine": 20480, - "aching": 8103, - "achment": 15520, - "acho": 43703, - "acht": 19725, - "achu": 32323, - "achus": 9523, - "achusetts": 9770, - "achy": 35586, - "aci": 32009, - "acia": 47431, - "acial": 18150, - "acid": 46309, - "acies": 13433, - "acing": 4092, - "acio": 48711, - "acion": 49443, - "acious": 14209, - "aciously": 45289, - "acist": 20279, - "acists": 33194, - "acity": 4355, - "ack": 441, - "acked": 6021, - "acker": 10735, - "ackers": 28874, - "acket": 8317, - "ackets": 25180, - "acking": 5430, - "ackle": 20523, - "acks": 4595, - "acky": 36053, - "acl": 37779, - "acle": 6008, - "acles": 9928, - "acly": 39691, - "aclysm": 40605, - "aco": 10602, - "acon": 7807, - "acons": 37256, - "acqu": 43561, - "acre": 12345, - "acs": 16436, - "act": 529, - "acted": 23800, - "acter": 7321, - "acteria": 10634, - "acterial": 44965, - "acters": 19858, - "actic": 12009, - "acting": 27362, - "action": 2673, - "actionDate": 31538, - "actions": 4658, - "activ": 15791, - "activate": 39022, - "activated": 33106, - "activation": 48545, - "active": 5275, - "actively": 33329, - "activity": 21797, - "actly": 24342, - "actor": 11218, - "actory": 9548, - "acts": 8656, - "actual": 50039, - "actually": 37739, - "actus": 34144, - "acular": 12754, - "acus": 48628, - "acy": 1590, - "ad": 324, - "ada": 4763, - "adal": 31682, - "adan": 29157, - "adapt": 42552, - "adas": 38768, - "adata": 14706, - "aday": 43593, - "adays": 20544, - "add": 2860, - "addafi": 32113, - "added": 29373, - "adden": 38014, - "adder": 26676, - "adders": 45940, - "addin": 46782, - "adding": 26872, - "addle": 37382, - "addock": 35509, - "addon": 48078, - "addons": 39996, - "addr": 29851, - "address": 21975, - "addy": 13218, - "ade": 671, - "aded": 5286, - "adel": 6959, - "adelphia": 8273, - "adem": 36920, - "ademic": 49113, - "aden": 40780, - "adena": 38047, - "adeon": 12424, - "adequ": 16515, - "ader": 5067, - "aders": 9972, - "ades": 2367, - "adesh": 13410, - "adh": 24411, - "adi": 9189, - "adia": 29523, - "adian": 18425, - "adiator": 33716, - "adic": 23876, - "adier": 38868, - "adies": 50192, - "adin": 17072, - "ading": 4980, - "adiq": 48687, - "adish": 48563, - "aditional": 27013, - "adium": 6271, - "adj": 41255, - "adjust": 23032, - "adjusted": 29117, - "adle": 35166, - "admin": 28482, - "administ": 39081, - "ado": 4533, - "adobe": 36752, - "adoes": 46368, - "ador": 7079, - "ados": 22484, - "adow": 4584, - "adows": 9797, - "adr": 41909, - "adra": 49456, - "ads": 5643, - "adult": 49922, - "adv": 32225, - "advant": 13461, - "advert": 17904, - "advertisement": 45876, - "advertising": 34442, - "ady": 4597, - "ae": 3609, - "aea": 44705, - "aed": 8432, - "aeda": 11641, - "ael": 3010, - "aeper": 28235, - "aepernick": 28333, - "aer": 25534, - "aeus": 46052, - "aez": 47246, - "af": 1878, - "afa": 28485, - "afe": 8635, - "afer": 34659, - "afety": 27925, - "aff": 2001, - "affe": 21223, - "affected": 43958, - "affer": 31183, - "affiliated": 46818, - "affle": 30697, - "affles": 48501, - "afi": 19910, - "afia": 22214, - "afort": 24515, - "aft": 14940, - "after": 8499, - "ag": 363, - "aga": 8126, - "again": 17776, - "against": 32826, - "agall": 44906, - "agame": 46746, - "agan": 7329, - "aganda": 43589, - "agar": 32452, - "agara": 38415, - "agascar": 44309, - "agate": 37861, - "age": 496, - "aged": 1886, - "ageddon": 33054, - "agement": 5082, - "agements": 38113, - "agen": 11286, - "agency": 40955, - "agent": 25781, - "agents": 49638, - "ager": 3536, - "agers": 10321, - "ages": 1095, - "agg": 9460, - "agged": 14655, - "agger": 7928, - "agging": 16406, - "aggressive": 49639, - "agh": 10471, - "aghan": 45109, - "aghd": 16650, - "agher": 30450, - "aghetti": 35812, - "agi": 18013, - "agic": 9083, - "agically": 39066, - "agin": 23183, - "agine": 12756, - "aging": 3039, - "agle": 19345, - "agles": 37803, - "agn": 4660, - "agna": 48669, - "agnar": 30475, - "agne": 21080, - "agnetic": 25145, - "ago": 3839, - "agog": 37300, - "agogue": 32238, - "agon": 1840, - "agonal": 27923, - "agonist": 15239, - "agonists": 36764, - "agons": 34765, - "agos": 48215, - "agra": 45429, - "agram": 6713, - "agraph": 6111, - "agree": 49221, - "ags": 3775, - "agu": 11433, - "ague": 2064, - "agues": 6120, - "agus": 31111, - "agy": 46671, - "ah": 993, - "aha": 12236, - "ahah": 36225, - "ahan": 19210, - "ahar": 37325, - "ahead": 38204, - "ahi": 32810, - "ahime": 49997, - "ahl": 15668, - "ahn": 15386, - "aho": 17108, - "ahon": 30491, - "ahoo": 12992, - "ahs": 39095, - "ahu": 12196, - "ai": 1872, - "aic": 18452, - "aid": 1698, - "aida": 30546, - "aiden": 17538, - "aido": 44354, - "aign": 1784, - "aii": 42648, - "ail": 603, - "aila": 39460, - "ailability": 8994, - "ailable": 1508, - "ailand": 16188, - "ailed": 6255, - "ailing": 11608, - "ails": 1768, - "aily": 3079, - "aim": 1385, - "aiman": 47840, - "aimon": 49438, - "ain": 391, - "aina": 42183, - "aine": 5718, - "ained": 1328, - "ainer": 10613, - "ainers": 50221, - "aining": 1397, - "ainment": 37091, - "ains": 1299, - "aint": 2913, - "aintain": 32725, - "ainted": 14215, - "aints": 6003, - "air": 958, - "aird": 41620, - "aire": 7626, - "aired": 9820, - "aires": 17693, - "airo": 18131, - "airs": 3468, - "airy": 13021, - "ais": 15152, - "ait": 4548, - "aith": 3921, - "aito": 38995, - "aj": 1228, - "aja": 27792, - "aji": 26436, - "ajo": 34944, - "ajor": 1518, - "ak": 461, - "aka": 8130, - "akable": 29033, - "ake": 539, - "aked": 4335, - "akedown": 25817, - "aken": 1685, - "akening": 18800, - "akens": 31627, - "aker": 3110, - "akers": 3979, - "akeru": 43246, - "akery": 33684, - "akes": 1124, - "akespe": 20621, - "akespeare": 20946, - "akh": 11322, - "aki": 8182, - "akia": 21897, - "akin": 27048, - "aking": 868, - "akings": 45665, - "akis": 27321, - "ako": 25496, - "akov": 44715, - "akra": 38004, - "aks": 4730, - "aku": 8719, - "akura": 47754, - "akuya": 29863, - "aky": 15492, - "al": 282, - "ala": 6081, - "alach": 33786, - "alam": 44949, - "alan": 25786, - "albeit": 45781, - "album": 40916, - "alcohol": 42142, - "ald": 1940, - "alde": 35885, - "aldehyde": 44895, - "aldi": 37566, - "aldo": 41476, - "ale": 1000, - "aleb": 32100, - "aled": 3021, - "aleigh": 30729, - "aler": 36213, - "alert": 44598, - "ales": 2040, - "aley": 16730, - "alez": 22149, - "alf": 1604, - "alg": 14016, - "algia": 47111, - "ali": 7344, - "alia": 9752, - "alian": 7199, - "alias": 26011, - "aliation": 22885, - "alid": 10751, - "alien": 42690, - "align": 31494, - "aligned": 41634, - "alin": 14414, - "aline": 20663, - "aling": 4272, - "alion": 19275, - "alions": 50022, - "alis": 27315, - "alist": 49845, - "alities": 27969, - "ality": 1483, - "alk": 971, - "alker": 20949, - "alking": 18998, - "alks": 23833, - "alky": 18354, - "alkyrie": 21316, - "all": 439, - "alla": 30315, - "allah": 31840, - "allas": 7826, - "alle": 6765, - "alled": 4262, - "allel": 29363, - "allery": 17022, - "alli": 36546, - "allic": 18196, - "alling": 9221, - "allion": 48332, - "allo": 49457, - "alloc": 32332, - "allow": 12154, - "allowed": 40845, - "alloween": 50107, - "allows": 47205, - "alls": 5691, - "ally": 453, - "alm": 38182, - "almost": 28177, - "alo": 7335, - "alog": 11794, - "alogue": 30326, - "alogy": 48909, - "alon": 40755, - "alone": 17749, - "along": 24176, - "alore": 40612, - "alos": 41823, - "alph": 17307, - "alpha": 26591, - "als": 874, - "alsa": 32058, - "alse": 2820, - "alsh": 22114, - "also": 14508, - "alt": 2501, - "alted": 29590, - "alter": 47653, - "altern": 33645, - "alth": 1094, - "although": 16670, - "alties": 10355, - "alty": 6017, - "alus": 46781, - "always": 33770, - "aly": 3400, - "alys": 26266, - "alysed": 47557, - "alyses": 43710, - "alysis": 8767, - "alyst": 21470, - "am": 321, - "ama": 1689, - "amac": 11494, - "amacare": 11724, - "aman": 10546, - "amar": 39236, - "amara": 47848, - "amaru": 46893, - "amas": 17485, - "amate": 36754, - "amation": 14755, - "amaz": 45983, - "amazon": 33103, - "amb": 4131, - "amba": 31842, - "amber": 7789, - "ambers": 16368, - "ambling": 15366, - "ambo": 22651, - "amboo": 27708, - "amd": 28745, - "ame": 480, - "amed": 2434, - "ameda": 49637, - "amel": 17983, - "ameless": 39942, - "amen": 41763, - "ament": 3263, - "amental": 6860, - "aments": 12604, - "amer": 2382, - "amera": 18144, - "ameron": 41639, - "ames": 1047, - "ami": 6277, - "amia": 49442, - "amic": 18127, - "amide": 37905, - "amiliar": 19968, - "amily": 5993, - "amin": 5669, - "amina": 18891, - "amination": 24979, - "amine": 9862, - "aminer": 32086, - "amines": 41047, - "aming": 3723, - "amins": 24937, - "amiya": 38241, - "aml": 43695, - "amm": 6475, - "ammad": 26035, - "ammed": 10573, - "ammers": 36846, - "ammu": 49487, - "ammy": 46736, - "amn": 34684, - "amo": 18811, - "amon": 16487, - "among": 35131, - "amorph": 37670, - "amoto": 25384, - "amount": 17287, - "amous": 10877, - "amp": 696, - "ampa": 13299, - "amped": 13322, - "amph": 28474, - "amphetamine": 31262, - "amping": 37843, - "ampion": 6734, - "ampions": 4350, - "ampire": 13577, - "ampires": 27933, - "ample": 1403, - "amples": 12629, - "ampoo": 40239, - "amps": 9430, - "ampton": 23427, - "ampunk": 46183, - "ams": 4105, - "amsung": 30136, - "amura": 37324, - "amus": 25509, - "amy": 14814, - "an": 272, - "ana": 2271, - "analy": 38200, - "analysis": 20930, - "anamo": 33524, - "anan": 27870, - "anas": 15991, - "anasia": 45551, - "anc": 1192, - "anca": 42124, - "ance": 590, - "anced": 2903, - "ancel": 21130, - "ancer": 8250, - "ancers": 20811, - "ances": 1817, - "anch": 3702, - "anche": 6362, - "anches": 12140, - "anchester": 8911, - "anchez": 20364, - "ancial": 2783, - "ancies": 16183, - "ancing": 5077, - "anco": 47699, - "ancock": 37077, - "ancouver": 10264, - "ancy": 3883, - "and": 392, - "anda": 5282, - "andal": 7642, - "andals": 23819, - "andan": 42509, - "ande": 40004, - "anded": 12249, - "andel": 33134, - "andem": 30025, - "ander": 4066, - "andering": 42454, - "anders": 45070, - "andestine": 35887, - "andi": 26800, - "anding": 27225, - "andise": 18888, - "ando": 25440, - "andom": 3749, - "andon": 5063, - "andowski": 44391, - "andr": 46273, - "andra": 15918, - "andre": 49078, - "andro": 28092, - "android": 19411, - "ands": 1746, - "andum": 25933, - "andy": 10757, - "ane": 1531, - "aned": 22739, - "aneers": 33547, - "aneous": 11655, - "aneously": 27683, - "anes": 7305, - "aney": 22297, - "ang": 648, - "anga": 16484, - "angan": 37089, - "ange": 858, - "anged": 5102, - "angel": 8368, - "angelo": 46525, - "anger": 2564, - "angered": 19041, - "angering": 49470, - "angers": 6606, - "anges": 6231, - "angible": 39639, - "anging": 4924, - "angle": 9248, - "angled": 22393, - "angler": 49910, - "angles": 27787, - "angling": 27499, - "ango": 14208, - "angs": 27725, - "angu": 2303, - "anguage": 9000, - "anguages": 33213, - "anguard": 23521, - "angular": 21413, - "ani": 3216, - "ania": 5411, - "anian": 38336, - "anic": 26277, - "anical": 36684, - "anie": 34166, - "aniel": 6321, - "anim": 11227, - "animal": 41607, - "animate": 45685, - "animous": 45873, - "aning": 7574, - "anish": 7115, - "anism": 48162, - "anity": 19689, - "anium": 15776, - "ank": 962, - "anka": 15927, - "anke": 49200, - "anked": 14076, - "ankind": 28066, - "anking": 15230, - "anks": 2283, - "anky": 39556, - "anmar": 21708, - "ann": 1236, - "anna": 7697, - "annabin": 43655, - "annah": 25761, - "anne": 21952, - "anned": 3577, - "annel": 4276, - "annels": 8961, - "anners": 15672, - "anni": 31296, - "annie": 42883, - "annis": 45017, - "annon": 8825, - "annot": 34574, - "announced": 43499, - "anny": 7737, - "ano": 5733, - "anoia": 30661, - "anol": 22012, - "anon": 36902, - "anooga": 42165, - "anos": 40015, - "another": 29214, - "anova": 40993, - "anqu": 26184, - "ans": 504, - "ansas": 6618, - "anse": 40054, - "ansen": 33807, - "anship": 47086, - "ansion": 5487, - "ansk": 34738, - "anski": 44978, - "ansky": 49792, - "ansom": 22011, - "anson": 23103, - "ansson": 44038, - "answer": 41484, - "answered": 31966, - "ant": 415, - "anta": 4910, - "antage": 36403, - "antam": 49653, - "antasy": 34921, - "ante": 12427, - "anted": 4126, - "antes": 39781, - "anth": 29313, - "antha": 32589, - "anthrop": 22178, - "anti": 17096, - "antic": 5109, - "antically": 31589, - "anticipated": 45178, - "antics": 29320, - "antine": 29003, - "anting": 20482, - "antis": 20836, - "antle": 16941, - "antly": 3875, - "anto": 14723, - "antom": 11456, - "anton": 23026, - "antry": 21238, - "ants": 1187, - "anty": 46098, - "antz": 46269, - "anu": 42357, - "anus": 41141, - "anut": 20651, - "anuts": 37555, - "anwhile": 6710, - "any": 1092, - "anya": 34183, - "anyahu": 15966, - "anye": 23495, - "anyl": 34816, - "anyon": 21330, - "anything": 49459, - "anz": 35410, - "anza": 35819, - "ao": 5488, - "aos": 7495, - "ap": 499, - "apa": 32678, - "apache": 43073, - "apan": 2674, - "ape": 1758, - "apeake": 49528, - "aped": 5813, - "apego": 40561, - "aper": 2136, - "apers": 5656, - "apes": 7916, - "apesh": 25490, - "apeshifter": 29554, - "apest": 35746, - "aph": 6570, - "aphael": 34889, - "api": 15042, - "aping": 9269, - "apist": 41690, - "apixel": 48633, - "aple": 24052, - "aples": 28624, - "apo": 41817, - "apolis": 11174, - "apolog": 46407, - "apon": 9184, - "apons": 13486, - "apor": 12687, - "apore": 11656, - "app": 1324, - "appa": 20975, - "apped": 6320, - "append": 33295, - "apper": 11463, - "appers": 46629, - "appiness": 42661, - "apping": 5912, - "appings": 39242, - "apple": 18040, - "application": 31438, - "apply": 39014, - "appointed": 32924, - "appro": 21064, - "appropri": 11488, - "appropriate": 13335, - "appropriately": 45175, - "approved": 29137, - "approximately": 47498, - "apps": 18211, - "appy": 7774, - "aps": 1686, - "apse": 7512, - "apsed": 28361, - "apses": 45903, - "apt": 2373, - "apter": 3429, - "apters": 12126, - "aptic": 32963, - "aptop": 45007, - "apult": 41387, - "apy": 12826, - "aq": 30188, - "aqu": 36129, - "aque": 18251, - "aques": 46806, - "aquin": 48734, - "ar": 283, - "ara": 3301, - "arag": 29967, - "arah": 23066, - "arak": 30447, - "aram": 41158, - "aran": 19173, - "arant": 4741, - "arantine": 37996, - "araoh": 33766, - "arat": 34174, - "arate": 30748, - "aration": 10186, - "arations": 24355, - "arb": 38039, - "arbon": 42084, - "arc": 5605, - "arcer": 17649, - "arch": 998, - "arching": 38270, - "archive": 17474, - "archives": 48814, - "archment": 36767, - "archs": 34592, - "archy": 9282, - "arcity": 32689, - "ard": 446, - "arde": 45093, - "arded": 10676, - "arden": 5872, - "ardi": 22490, - "arding": 13493, - "ardless": 14694, - "ardo": 13109, - "ardon": 19917, - "ards": 1371, - "ardy": 39124, - "are": 533, - "area": 20337, - "ared": 1144, - "aredevil": 38281, - "arel": 20318, - "arella": 45494, - "aren": 5757, - "arent": 1580, - "arenthood": 17117, - "arently": 13773, - "arer": 11258, - "arers": 34231, - "ares": 3565, - "arest": 12423, - "aret": 8984, - "areth": 26659, - "arette": 14758, - "arettes": 13890, - "aretz": 48338, - "arez": 19655, - "arf": 37595, - "arg": 853, - "arge": 1376, - "arger": 32270, - "arget": 7641, - "argo": 9448, - "argon": 37920, - "args": 22046, - "argument": 49140, - "ari": 2743, - "aria": 10312, - "arial": 36098, - "arian": 3699, - "arians": 13517, - "ariat": 21621, - "arie": 49173, - "aries": 3166, - "arij": 39010, - "arijuana": 42834, - "arily": 3093, - "arin": 17714, - "arine": 34569, - "aring": 1723, - "ario": 4982, - "arios": 13010, - "arious": 27129, - "aris": 20066, - "arist": 34566, - "arity": 6806, - "arium": 17756, - "arius": 19897, - "ark": 668, - "arkable": 45543, - "arkin": 39027, - "arks": 5558, - "arl": 7063, - "arlane": 49344, - "arling": 30045, - "arm": 1670, - "arma": 10961, - "armac": 32813, - "armed": 12026, - "arming": 18052, - "armor": 40456, - "arms": 8357, - "arn": 1501, - "arna": 28610, - "arnaev": 42311, - "arning": 4228, - "aro": 12022, - "aron": 8045, - "aroo": 38049, - "around": 14145, - "arov": 42737, - "arp": 5117, - "arr": 3258, - "arranted": 47940, - "arrass": 9187, - "array": 18747, - "arre": 9624, - "arrell": 47769, - "arrett": 34878, - "arrison": 22472, - "arro": 34852, - "arrow": 6018, - "arry": 6532, - "ars": 945, - "arse": 17208, - "arser": 28198, - "arsh": 5406, - "arsity": 45826, - "arson": 12613, - "art": 433, - "arta": 34202, - "arte": 32074, - "arted": 19112, - "arten": 23996, - "arter": 2571, - "arters": 6137, - "arth": 11999, - "arthed": 36370, - "arthy": 18270, - "article": 20205, - "articles": 26845, - "artifacts": 50179, - "artisan": 19714, - "artist": 49016, - "artment": 1823, - "artments": 32514, - "artney": 41709, - "arton": 41328, - "arts": 5889, - "arty": 25494, - "artz": 13636, - "aru": 11493, - "arus": 20272, - "ary": 560, - "arya": 43898, - "aryl": 36822, - "aryn": 38621, - "as": 292, - "asa": 15462, - "asaki": 33846, - "asant": 8775, - "asar": 42391, - "asc": 3372, - "asca": 42688, - "ascade": 28966, - "ascal": 27747, - "ascar": 37740, - "ascist": 31968, - "ascript": 15961, - "ascular": 14767, - "ascus": 20275, - "ase": 589, - "ased": 839, - "asel": 48038, - "aser": 6005, - "asers": 19865, - "ases": 1386, - "ash": 1077, - "asha": 14715, - "ashed": 5263, - "asher": 31218, - "ashes": 7465, - "ashi": 12144, - "ashing": 2140, - "ashington": 2542, - "ashion": 5880, - "ashtra": 38535, - "asi": 17053, - "asia": 23218, - "asin": 47337, - "asing": 2313, - "asio": 29831, - "asion": 4247, - "asionally": 31775, - "asions": 39327, - "asis": 17765, - "asive": 17443, - "ask": 2093, - "aska": 8480, - "asket": 11715, - "asketball": 14575, - "asking": 30463, - "asks": 6791, - "asley": 30705, - "asm": 8597, - "asma": 11797, - "asms": 34432, - "ason": 888, - "asonable": 17994, - "asonic": 30189, - "asonry": 38950, - "asons": 2812, - "asp": 5126, - "aspberry": 17653, - "asper": 32981, - "aspers": 49412, - "aspx": 31740, - "ass": 562, - "assad": 30178, - "assador": 10623, - "assadors": 33429, - "assault": 46635, - "asse": 21612, - "assed": 21390, - "assemb": 34455, - "assembled": 46826, - "assembly": 41873, - "asser": 24929, - "assert": 30493, - "asses": 13978, - "assets": 19668, - "assetsadobe": 41383, - "assi": 46527, - "assian": 46091, - "assic": 31635, - "assies": 46257, - "assin": 44961, - "assing": 19696, - "assion": 11857, - "assis": 20297, - "assisted": 42191, - "assium": 26663, - "assment": 45312, - "asso": 28372, - "associated": 32852, - "assuming": 32935, - "assy": 11720, - "ast": 459, - "asta": 40197, - "aste": 4594, - "asted": 8992, - "aster": 1603, - "astered": 14054, - "astern": 6470, - "asters": 7060, - "astery": 29310, - "astic": 3477, - "astical": 32044, - "astically": 16607, - "astics": 24232, - "asting": 9222, - "aston": 45966, - "astrous": 20168, - "asts": 5773, - "asty": 7833, - "asu": 27345, - "asure": 5015, - "asured": 34006, - "asures": 13846, - "asuring": 45925, - "asury": 11579, - "asus": 40895, - "asy": 4107, - "at": 265, - "ata": 1045, - "atable": 21156, - "ataka": 48088, - "atal": 10254, - "atalie": 30951, - "atan": 39036, - "atana": 43777, - "atar": 9459, - "atari": 35554, - "atars": 40193, - "atch": 963, - "atche": 24809, - "atched": 14265, - "atcher": 34734, - "atches": 20981, - "atchewan": 29736, - "atching": 19775, - "ate": 378, - "atech": 40340, - "ated": 515, - "ateful": 11850, - "ateg": 2397, - "ategic": 47917, - "ategor": 47467, - "ategories": 26129, - "ategory": 11606, - "ategy": 4338, - "atel": 25791, - "atell": 7528, - "atellite": 26493, - "ately": 1286, - "atem": 23900, - "aten": 36686, - "ater": 729, - "ateral": 10534, - "aterasu": 45335, - "atered": 34190, - "aterial": 2273, - "atern": 9205, - "aternal": 14744, - "aternity": 17094, - "aters": 8605, - "ates": 689, - "ateur": 15093, - "ateurs": 45211, - "atever": 3587, - "atform": 3390, - "ath": 776, - "atha": 30921, - "atham": 37520, - "athan": 6696, - "athe": 26221, - "athed": 35932, - "ather": 1032, - "athered": 8638, - "atherine": 15289, - "athering": 25545, - "athetic": 18874, - "athi": 44202, - "athing": 26927, - "athlon": 50236, - "athom": 32910, - "athon": 12938, - "aths": 33148, - "athy": 10036, - "ati": 7246, - "atial": 34961, - "atibility": 25901, - "atible": 16873, - "atic": 1512, - "atical": 39056, - "atically": 4142, - "atican": 18245, - "atics": 23372, - "atile": 12610, - "atility": 18486, - "atin": 10680, - "ating": 803, - "atinum": 16881, - "atio": 39485, - "ation": 341, - "ational": 864, - "ationally": 15208, - "ations": 602, - "atis": 37749, - "atisf": 17403, - "atism": 26185, - "ative": 876, - "atively": 9404, - "atives": 2929, - "ativity": 22055, - "atl": 25864, - "atlantic": 43342, - "atmeal": 45280, - "ato": 5549, - "atoes": 15048, - "atography": 45501, - "atom": 37696, - "atomic": 47116, - "aton": 13951, - "atonin": 44248, - "atoon": 23122, - "ator": 1352, - "atorial": 21592, - "atories": 19854, - "atorium": 30732, - "ators": 2024, - "atory": 2870, - "atos": 35492, - "atown": 41079, - "atra": 26066, - "atre": 10562, - "atri": 26646, - "atro": 47756, - "atron": 23484, - "ats": 1381, - "atson": 13506, - "atsu": 19231, - "atsuki": 40063, - "att": 1078, - "atta": 25014, - "attach": 47348, - "attack": 20358, - "attacks": 38458, - "atted": 16898, - "atten": 41769, - "atter": 1436, - "attered": 10228, - "attering": 16475, - "atters": 34387, - "attery": 16296, - "atti": 34891, - "attle": 1999, - "attled": 43535, - "atto": 45807, - "atton": 38680, - "attr": 35226, - "attribute": 42348, - "atts": 30353, - "atu": 33419, - "atum": 21307, - "atur": 2541, - "atural": 2660, - "aturally": 7436, - "aturated": 30192, - "aturation": 36921, - "aturday": 3658, - "aturdays": 39724, - "ature": 1300, - "atures": 6691, - "atus": 7240, - "atz": 27906, - "au": 559, - "auc": 14272, - "aucas": 25205, - "aucus": 16710, - "aucuses": 38271, - "aud": 3885, - "auder": 29233, - "audi": 31330, - "audio": 24051, - "auer": 16261, - "aug": 7493, - "auga": 44718, - "augh": 1567, - "aughed": 13726, - "aughlin": 42730, - "aughs": 19256, - "aught": 3413, - "aughter": 3637, - "aughtered": 32734, - "aughters": 13441, - "aughty": 28496, - "aukee": 15263, - "aul": 2518, - "auld": 30406, - "auldron": 45637, - "ault": 1721, - "aults": 13185, - "aum": 26043, - "aun": 1942, - "auna": 32837, - "aunch": 11429, - "aund": 14677, - "aunder": 21118, - "aundering": 23496, - "aunders": 32818, - "aunt": 12968, - "aunted": 20227, - "aunting": 20706, - "auntlet": 32633, - "auntlets": 39695, - "aunts": 43981, - "aur": 2899, - "aura": 33830, - "auri": 35190, - "aurus": 22302, - "aus": 8717, - "ause": 682, - "ausible": 17178, - "aut": 2306, - "auth": 18439, - "authent": 41299, - "author": 9800, - "authored": 39351, - "authorized": 19721, - "authors": 41617, - "autical": 37073, - "aution": 32917, - "autions": 28766, - "auto": 23736, - "automatic": 37800, - "auts": 17712, - "aux": 14644, - "av": 615, - "ava": 4170, - "avage": 33757, - "availability": 47274, - "available": 15182, - "aval": 9226, - "avan": 12421, - "avanaugh": 19872, - "avascript": 16098, - "ave": 1015, - "aved": 9586, - "avement": 44034, - "aven": 4005, - "aver": 8770, - "average": 23913, - "avering": 42610, - "avers": 30400, - "avery": 12447, - "aves": 3080, - "avez": 28851, - "avi": 15820, - "avia": 40543, - "avid": 8490, - "avier": 19492, - "avin": 20637, - "aving": 2703, - "avior": 15759, - "aviour": 37716, - "avis": 23401, - "avoid": 27080, - "avor": 5570, - "avorable": 32006, - "avored": 48275, - "avorite": 19227, - "avour": 29023, - "avy": 2830, - "aw": 707, - "awa": 6909, - "awaited": 41742, - "awan": 43004, - "awar": 48841, - "aware": 9685, - "awareness": 47812, - "awaru": 39008, - "awatts": 46684, - "away": 8272, - "aways": 23949, - "awed": 36825, - "awei": 38247, - "awi": 23368, - "awk": 19301, - "awks": 11890, - "awn": 3832, - "aws": 8356, - "ax": 897, - "axe": 38231, - "axies": 25472, - "axis": 22704, - "axter": 40864, - "axy": 6969, - "ay": 323, - "aya": 11729, - "ayan": 22931, - "aye": 48822, - "ayed": 16548, - "ayer": 2794, - "ayers": 6962, - "ayette": 27067, - "aying": 8369, - "aylor": 7167, - "ayn": 49987, - "ayne": 43906, - "ays": 592, - "ayson": 34907, - "az": 1031, - "aza": 7056, - "azaar": 34485, - "azaki": 32276, - "azar": 29413, - "azard": 26267, - "aze": 6201, - "azed": 13865, - "azeera": 28535, - "azel": 41319, - "azer": 19178, - "azes": 36096, - "azi": 7761, - "azine": 4994, - "azines": 15742, - "azing": 4070, - "azo": 44299, - "azon": 5168, - "azor": 17725, - "azy": 12582, - "azz": 8101, - "b": 65, - "ba": 7012, - "bable": 33460, - "bably": 11921, - "baby": 40252, - "bach": 19496, - "back": 1891, - "backed": 17078, - "backer": 49978, - "background": 25249, - "backs": 10146, - "bad": 14774, - "bag": 21454, - "bage": 13866, - "bags": 34005, - "bah": 47041, - "bal": 6893, - "balance": 20427, - "balanced": 27753, - "ball": 1894, - "balls": 21591, - "ban": 3820, - "band": 3903, - "bands": 21397, - "bane": 20235, - "bang": 36668, - "bank": 17796, - "banks": 43558, - "bar": 5657, - "bara": 39389, - "bard": 23024, - "bare": 49382, - "bars": 34046, - "bart": 16575, - "bas": 12093, - "base": 8692, - "based": 3106, - "bash": 41757, - "basic": 35487, - "basketball": 21265, - "bass": 42933, - "bat": 8664, - "batch": 43501, - "bath": 37648, - "bats": 50199, - "battle": 38471, - "baugh": 23768, - "baum": 24738, - "bay": 24406, - "bb": 11848, - "bc": 15630, - "bd": 17457, - "bda": 43444, - "be": 1350, - "beam": 40045, - "bean": 14289, - "beans": 44749, - "bear": 33227, - "beard": 39433, - "bearing": 28655, - "beat": 12945, - "beaut": 40544, - "bec": 9423, - "because": 13893, - "becca": 20627, - "beck": 27343, - "becue": 31927, - "bed": 3077, - "bedroom": 36269, - "bee": 20963, - "been": 47436, - "beer": 42428, - "bees": 41712, - "before": 19052, - "begin": 27471, - "beh": 20709, - "behavior": 46571, - "behind": 42200, - "being": 11873, - "beit": 15357, - "bek": 47083, - "bel": 6667, - "bell": 7923, - "below": 35993, - "belt": 37976, - "ben": 11722, - "bench": 26968, - "bender": 45666, - "bending": 49667, - "benef": 36934, - "benefit": 48649, - "bent": 46119, - "ber": 527, - "bered": 9451, - "berg": 3900, - "berger": 21041, - "berman": 34591, - "bern": 33900, - "bernatorial": 43660, - "berra": 31358, - "berries": 20853, - "berry": 8396, - "bers": 1213, - "bert": 4835, - "berto": 32371, - "berus": 39192, - "bery": 13001, - "bes": 12636, - "best": 13466, - "bestos": 40651, - "bet": 11181, - "beta": 31361, - "bett": 48138, - "better": 27903, - "between": 23395, - "bey": 23454, - "bf": 19881, - "bg": 35904, - "bh": 34369, - "bi": 8482, - "bia": 23339, - "bial": 25200, - "bian": 12210, - "bians": 30071, - "biased": 38002, - "bid": 14065, - "bidden": 37978, - "bie": 12590, - "bies": 29846, - "big": 14261, - "bike": 32256, - "bil": 33473, - "bill": 35546, - "billion": 24540, - "bilt": 34508, - "bin": 8800, - "binary": 39491, - "bind": 21653, - "binding": 30786, - "bing": 4623, - "biology": 43592, - "bird": 16944, - "birds": 32002, - "birth": 24280, - "bis": 41907, - "bish": 31795, - "bishop": 27832, - "bit": 2545, - "bitcoin": 35395, - "bite": 37018, - "bitious": 14228, - "bits": 9895, - "biz": 42189, - "bj": 50007, - "bl": 2436, - "black": 13424, - "blade": 22500, - "blance": 42757, - "blank": 27190, - "blast": 39806, - "ble": 903, - "bleacher": 47975, - "bled": 9342, - "bledon": 49258, - "blem": 11253, - "blems": 22143, - "bler": 43400, - "blers": 43022, - "bles": 7689, - "bley": 43263, - "blind": 27461, - "bling": 11108, - "block": 9967, - "blocking": 41938, - "blocks": 27372, - "blog": 14036, - "blogs": 49096, - "blogspot": 35217, - "blood": 18041, - "blooded": 50132, - "blow": 48619, - "blown": 31290, - "blue": 17585, - "bly": 36874, - "bm": 20475, - "bn": 9374, - "bnb": 31971, - "bo": 2127, - "boa": 48614, - "board": 3526, - "boarding": 27794, - "boards": 12821, - "boat": 24482, - "boats": 46058, - "bodied": 45190, - "body": 2618, - "bol": 28984, - "bold": 36575, - "bole": 45693, - "bolt": 25593, - "bomb": 27657, - "bon": 4189, - "bone": 15992, - "bones": 35095, - "bons": 23461, - "book": 2070, - "books": 12106, - "bool": 30388, - "boost": 39521, - "boot": 18769, - "bor": 2865, - "border": 20192, - "borg": 23297, - "borgh": 49870, - "born": 6286, - "borne": 13555, - "boro": 21513, - "borough": 17913, - "bors": 32289, - "bos": 39565, - "boss": 42820, - "bot": 13645, - "both": 16885, - "bots": 42478, - "bott": 10985, - "bottom": 22487, - "bound": 7784, - "bour": 6084, - "bourg": 24256, - "bourne": 12544, - "bow": 8176, - "bowl": 36859, - "bows": 25435, - "box": 3524, - "boxes": 29305, - "boxing": 45471, - "boy": 7081, - "boys": 13202, - "bp": 46583, - "bps": 18799, - "br": 1671, - "bra": 16057, - "brace": 46565, - "brain": 27825, - "brainer": 49334, - "bral": 24427, - "brance": 28031, - "brand": 17938, - "branded": 35559, - "braska": 17088, - "brate": 40804, - "brates": 44835, - "bre": 4679, - "bread": 29573, - "break": 9032, - "breaker": 25766, - "breakers": 49295, - "breaking": 13395, - "breaks": 30058, - "bred": 36074, - "breeding": 49705, - "brew": 11269, - "brid": 10236, - "bridge": 9458, - "brids": 40637, - "bright": 29199, - "bring": 48580, - "bringer": 48046, - "bringing": 35749, - "bris": 15311, - "bro": 7957, - "broad": 36654, - "broken": 25826, - "brook": 19094, - "brother": 37343, - "brow": 25367, - "brown": 33282, - "browser": 40259, - "brush": 32680, - "bryce": 32524, - "bs": 1443, - "bsite": 12485, - "bsp": 24145, - "bt": 18347, - "btn": 46118, - "bu": 11110, - "bub": 46176, - "buck": 27041, - "bucks": 18999, - "budget": 37315, - "buf": 29325, - "buff": 36873, - "buffer": 22252, - "bug": 25456, - "bugs": 32965, - "build": 11249, - "builder": 38272, - "builders": 50034, - "building": 16894, - "built": 18780, - "bul": 15065, - "bull": 16308, - "bum": 4435, - "buquerque": 36461, - "bur": 6236, - "burg": 7423, - "burgh": 9228, - "burn": 10899, - "burning": 44313, - "burse": 21780, - "burst": 31961, - "bury": 10711, - "bus": 10885, - "bush": 50231, - "business": 22680, - "buster": 24899, - "busters": 30181, - "but": 4360, - "butt": 43059, - "button": 16539, - "buy": 17846, - "by": 1525, - "bye": 16390, - "byn": 14929, - "bys": 48209, - "byss": 15040, - "byte": 26327, - "byter": 36204, - "bytes": 33661, - "c": 66, - "ca": 6888, - "cache": 23870, - "cade": 46395, - "cair": 37155, - "cake": 30560, - "cakes": 37263, - "cal": 9948, - "cale": 38765, - "caliber": 43288, - "call": 13345, - "callback": 47423, - "called": 7174, - "calling": 44714, - "cam": 20991, - "camera": 25695, - "camp": 16544, - "campaign": 35012, - "campus": 43842, - "can": 5171, - "cancer": 48870, - "cand": 46188, - "cano": 35490, - "canon": 49883, - "cap": 11128, - "capacity": 42404, - "cape": 36435, - "capital": 27544, - "capitalist": 49970, - "caps": 27979, - "capt": 27144, - "car": 7718, - "carb": 35684, - "carbon": 29255, - "card": 9517, - "cards": 27761, - "care": 6651, - "carry": 34993, - "cars": 37993, - "cart": 26674, - "cas": 34004, - "case": 7442, - "cases": 33964, - "cash": 30350, - "cast": 2701, - "caster": 17970, - "casters": 26248, - "casting": 19913, - "castle": 18676, - "casts": 40924, - "cat": 9246, - "catch": 40198, - "catching": 50106, - "category": 22872, - "catentry": 39165, - "cation": 30907, - "cats": 24619, - "cause": 25587, - "cb": 21101, - "cc": 535, - "cca": 13227, - "ccess": 1591, - "cci": 35764, - "ccoli": 34544, - "ccording": 2941, - "cd": 10210, - "cdn": 32341, - "ce": 344, - "cean": 5829, - "ceans": 19961, - "ced": 771, - "cedented": 12292, - "cedes": 19285, - "ceed": 2707, - "ceivable": 48054, - "ceive": 15164, - "ceived": 6471, - "ceiver": 39729, - "cel": 5276, - "cele": 49840, - "celer": 7015, - "cell": 3846, - "cellaneous": 25673, - "cellence": 19801, - "cellent": 5666, - "cells": 46342, - "celona": 14308, - "cember": 3273, - "cemic": 40478, - "cence": 43696, - "cend": 15695, - "cens": 42595, - "cent": 1087, - "center": 16159, - "centered": 38050, - "central": 31463, - "centric": 28577, - "century": 14792, - "cephal": 43996, - "cept": 984, - "ception": 4516, - "ceptions": 11755, - "ceptive": 25867, - "ceptor": 49492, - "cer": 2189, - "cern": 30903, - "cerned": 49990, - "cerning": 41981, - "cerpt": 17040, - "cers": 7999, - "cert": 22583, - "certain": 39239, - "cery": 12757, - "ces": 728, - "cess": 919, - "cession": 43914, - "cessive": 45428, - "cest": 9165, - "cester": 33187, - "cf": 12993, - "cffff": 31727, - "cffffcc": 31957, - "cfg": 37581, - "cgi": 37157, - "ch": 354, - "cha": 11693, - "chain": 7983, - "chains": 38861, - "chair": 16337, - "chairs": 49655, - "chal": 38009, - "chall": 36747, - "cham": 49869, - "chan": 3147, - "chance": 39486, - "change": 3803, - "changed": 40985, - "changes": 36653, - "changing": 22954, - "channel": 17620, - "channelAvailability": 39757, - "chant": 8907, - "chanted": 28923, - "chapter": 43582, - "char": 10641, - "character": 22769, - "chard": 30215, - "charg": 11121, - "charge": 10136, - "charged": 17200, - "charges": 34948, - "charging": 31498, - "chart": 40926, - "chat": 17006, - "che": 2395, - "cheat": 46799, - "check": 9122, - "checked": 26752, - "checking": 41004, - "checks": 42116, - "ched": 1740, - "chedel": 24015, - "chel": 29232, - "chell": 12398, - "chem": 15245, - "chemical": 31379, - "chemist": 28899, - "chemy": 26599, - "chen": 6607, - "chenko": 45059, - "chens": 29937, - "cheon": 40556, - "cher": 2044, - "chers": 3533, - "chery": 31132, - "ches": 2052, - "chest": 46713, - "chester": 35983, - "chet": 20043, - "chev": 49916, - "chi": 11072, - "chid": 28402, - "chie": 3043, - "chief": 17351, - "chieve": 24957, - "child": 9410, - "children": 17197, - "chin": 24658, - "ching": 10813, - "chini": 45045, - "chio": 40900, - "chip": 35902, - "chlor": 36813, - "chn": 1349, - "chnology": 19587, - "cho": 6679, - "choes": 23001, - "choice": 25541, - "chool": 1251, - "christ": 43533, - "chrom": 28663, - "chrome": 46659, - "chron": 11413, - "cht": 21474, - "chu": 46417, - "chuk": 46019, - "church": 36964, - "chwitz": 36297, - "chy": 29658, - "ci": 979, - "cia": 33743, - "cial": 2413, - "cially": 2131, - "ciating": 46136, - "ciation": 17269, - "cible": 37369, - "cience": 4234, - "cient": 3456, - "cientious": 43037, - "cients": 35611, - "cies": 3171, - "cific": 7790, - "cig": 22683, - "cigarette": 46040, - "cigarettes": 32529, - "cil": 2856, - "cill": 20346, - "cin": 17879, - "cing": 2259, - "cious": 4680, - "cipl": 6671, - "cipled": 41296, - "ciples": 6418, - "ciplinary": 29386, - "cipline": 34647, - "circ": 21170, - "circle": 45597, - "cise": 37561, - "cised": 37168, - "cision": 16005, - "cit": 47992, - "citizens": 46801, - "city": 19205, - "cium": 16910, - "cius": 28599, - "civil": 37636, - "ck": 694, - "cker": 15280, - "cki": 49108, - "cking": 44377, - "cknow": 5319, - "cknowled": 33165, - "cko": 37549, - "cks": 4657, - "cl": 565, - "clad": 29853, - "claim": 6604, - "claimed": 12795, - "claimer": 17111, - "clair": 27659, - "clamation": 20931, - "class": 4871, - "classes": 37724, - "classic": 49421, - "classified": 31691, - "clave": 44281, - "claw": 43143, - "cle": 2375, - "clean": 27773, - "clear": 20063, - "cled": 20095, - "cler": 22902, - "clerosis": 31399, - "cles": 5427, - "cli": 44506, - "click": 12976, - "client": 16366, - "cliffe": 33783, - "climate": 42570, - "cling": 8493, - "clinical": 47367, - "clinton": 37821, - "clip": 15036, - "clips": 31945, - "clipse": 17043, - "clock": 15750, - "clone": 21018, - "cloneembedreportprint": 30899, - "close": 19836, - "closed": 20225, - "closure": 17966, - "cloth": 44905, - "cloud": 17721, - "club": 18664, - "clud": 758, - "clude": 9152, - "cluded": 10341, - "cludes": 13955, - "cluding": 6360, - "clus": 2527, - "clusion": 4717, - "clusions": 11539, - "clusive": 5731, - "clusively": 44307, - "cm": 11215, - "cmd": 28758, - "cmp": 48991, - "cms": 46406, - "cn": 31522, - "co": 1073, - "coal": 25140, - "coat": 31434, - "cock": 21517, - "cod": 19815, - "code": 8189, - "coded": 40976, - "codes": 40148, - "coe": 49270, - "cohol": 4857, - "coin": 3630, - "coins": 14624, - "col": 4033, - "cold": 36673, - "coll": 26000, - "collar": 37676, - "collect": 33327, - "collection": 43681, - "college": 44107, - "colm": 18414, - "colo": 45745, - "colonial": 49787, - "color": 8043, - "colored": 25717, - "colour": 49903, - "column": 28665, - "com": 785, - "comb": 24011, - "combat": 39969, - "combe": 49325, - "come": 2958, - "comed": 15128, - "comes": 8988, - "comfort": 21598, - "coming": 4976, - "comings": 30715, - "comm": 9503, - "command": 21812, - "comment": 23893, - "comments": 15944, - "commerce": 27061, - "commercial": 36313, - "commit": 41509, - "committee": 26799, - "common": 11321, - "commun": 10709, - "communication": 32560, - "communications": 20860, - "community": 28158, - "comp": 5589, - "compan": 34390, - "company": 39722, - "compatible": 38532, - "competitive": 46131, - "compl": 23855, - "complete": 20751, - "completely": 46699, - "complex": 41887, - "compliance": 47587, - "component": 42895, - "computer": 33215, - "con": 1102, - "concept": 43169, - "concert": 48415, - "cond": 17561, - "condition": 31448, - "conduct": 36495, - "cone": 49180, - "conf": 10414, - "conference": 41124, - "confidence": 39745, - "config": 11250, - "confirmed": 36349, - "cong": 36801, - "coni": 45774, - "conn": 37043, - "connect": 8443, - "connected": 15236, - "connection": 38659, - "conom": 1519, - "cons": 5936, - "conscious": 16796, - "conserv": 38925, - "conservancy": 41215, - "conservative": 43218, - "consider": 44353, - "console": 41947, - "const": 9979, - "constitutional": 18789, - "construct": 41571, - "consumer": 49827, - "consuming": 35873, - "cont": 3642, - "contact": 32057, - "contained": 45964, - "container": 34924, - "containing": 38301, - "content": 11299, - "context": 22866, - "contin": 18487, - "continental": 35415, - "continue": 43043, - "contract": 28484, - "control": 13716, - "controlled": 14401, - "controller": 36500, - "conv": 42946, - "cook": 27916, - "cooked": 46591, - "cookie": 44453, - "cool": 24494, - "coon": 20912, - "coord": 37652, - "cop": 22163, - "copy": 30073, - "cor": 10215, - "core": 7295, - "corn": 20772, - "correct": 30283, - "corruption": 46260, - "cos": 6966, - "cost": 15805, - "cosystem": 12541, - "cot": 25557, - "cott": 14612, - "could": 24089, - "count": 9127, - "counter": 24588, - "country": 19315, - "cour": 43220, - "course": 17319, - "court": 22230, - "cover": 9631, - "covered": 32111, - "cow": 8232, - "cox": 40359, - "cp": 13155, - "cpp": 20322, - "cpu": 36166, - "cr": 6098, - "craft": 3323, - "crafted": 39160, - "crazy": 50112, - "cre": 7513, - "cream": 36277, - "creat": 20123, - "create": 17953, - "created": 25598, - "creation": 38793, - "creator": 45382, - "credit": 43082, - "creen": 32060, - "crete": 38669, - "crew": 42276, - "cribed": 32968, - "crim": 50086, - "crime": 28126, - "criminal": 45955, - "cript": 6519, - "cription": 6820, - "criptions": 24370, - "crit": 22213, - "critical": 34666, - "cro": 19915, - "croft": 36714, - "crop": 31476, - "cross": 19692, - "crow": 47114, - "cru": 32838, - "cry": 20470, - "crypt": 29609, - "cs": 6359, - "css": 25471, - "csv": 40664, - "ct": 310, - "ctic": 11048, - "ctica": 28914, - "ction": 596, - "ctions": 2733, - "ctive": 14070, - "ctl": 34168, - "ctor": 2715, - "ctors": 5217, - "ctory": 25977, - "ctr": 24087, - "ctrl": 44755, - "ctuary": 15258, - "cture": 48715, - "ctx": 49464, - "cu": 27399, - "cube": 40296, - "cue": 15509, - "cul": 3129, - "cular": 10440, - "culated": 49262, - "culation": 14902, - "cule": 23172, - "cules": 13930, - "culosis": 38767, - "cult": 40820, - "cultural": 30844, - "culture": 25584, - "culus": 17576, - "cum": 36340, - "cup": 25244, - "cur": 22019, - "currency": 34415, - "current": 14421, - "currently": 41745, - "cus": 9042, - "cussion": 36262, - "custom": 23144, - "cut": 8968, - "cuts": 23779, - "cutting": 29753, - "cv": 33967, - "cy": 948, - "cycl": 15539, - "cycle": 13696, - "cycles": 32503, - "cyclop": 22873, - "cyclopedia": 25497, - "cyl": 38801, - "cz": 26691, - "cé": 32682, - "d": 67, - "dB": 36077, - "dL": 45582, - "da": 6814, - "dad": 47984, - "daily": 29468, - "dain": 27162, - "dal": 31748, - "dale": 14597, - "dam": 11043, - "damage": 28735, - "dan": 25604, - "danger": 38537, - "daq": 48539, - "dar": 27455, - "dark": 21953, - "dash": 42460, - "dat": 19608, - "data": 7890, - "database": 48806, - "date": 4475, - "dated": 8715, - "dates": 19581, - "dating": 38734, - "daughter": 29642, - "day": 820, - "dayName": 45392, - "days": 12545, - "db": 9945, - "dc": 17896, - "dd": 1860, - "dden": 4742, - "dding": 33403, - "dds": 33714, - "de": 2934, - "dead": 25124, - "deal": 31769, - "deals": 14302, - "death": 22595, - "deb": 11275, - "debian": 24689, - "debug": 24442, - "dec": 12501, - "deck": 35875, - "decl": 32446, - "ded": 9395, - "deen": 39060, - "deep": 22089, - "def": 4299, - "default": 12286, - "defense": 19774, - "define": 13086, - "defined": 23211, - "definition": 46758, - "deg": 13500, - "degree": 16863, - "del": 12381, - "delay": 40850, - "delete": 33678, - "dem": 9536, - "demand": 28550, - "democracy": 42017, - "democratic": 41232, - "demon": 26567, - "den": 6559, - "density": 43337, - "dep": 10378, - "depend": 45841, - "dependent": 21186, - "depending": 44023, - "depth": 18053, - "der": 1082, - "derived": 34631, - "des": 8906, - "desc": 20147, - "described": 34869, - "description": 11213, - "design": 26124, - "designed": 30473, - "desktop": 41375, - "despite": 41081, - "dest": 16520, - "destroy": 41659, - "destruct": 35678, - "det": 15255, - "detail": 49170, - "details": 36604, - "determination": 40869, - "dev": 7959, - "develop": 16244, - "developed": 33082, - "development": 31267, - "device": 25202, - "devices": 42034, - "df": 7568, - "dfx": 48753, - "dh": 34985, - "di": 10989, - "diagn": 47356, - "dial": 38969, - "dict": 11600, - "did": 20839, - "didn": 45168, - "die": 11979, - "dies": 25990, - "diff": 26069, - "different": 39799, - "dig": 12894, - "digit": 27003, - "digital": 34725, - "digy": 41923, - "dim": 27740, - "dimension": 46156, - "dimensional": 19577, - "din": 25194, - "dinand": 41993, - "ding": 12083, - "dir": 15908, - "direct": 12942, - "directed": 34762, - "direction": 37295, - "director": 35248, - "directory": 34945, - "dirty": 49075, - "dis": 6381, - "disable": 40223, - "disabled": 47730, - "disc": 15410, - "disciplinary": 40625, - "discrimination": 42723, - "disk": 39531, - "display": 13812, - "displayText": 31536, - "dist": 17080, - "distance": 30246, - "dit": 5266, - "div": 7146, - "division": 21426, - "dj": 28241, - "dk": 34388, - "dl": 25404, - "dll": 12736, - "dm": 36020, - "dn": 32656, - "do": 4598, - "doc": 15390, - "docker": 45986, - "docs": 31628, - "doctor": 35580, - "doctoral": 44064, - "document": 22897, - "documented": 47045, - "does": 22437, - "doesn": 45084, - "dog": 9703, - "dogs": 22242, - "doi": 34023, - "doing": 19631, - "dollar": 22569, - "dom": 3438, - "domain": 27830, - "dominated": 34475, - "doms": 23686, - "don": 9099, - "donald": 40915, - "done": 28060, - "door": 9424, - "doors": 19559, - "dor": 40180, - "dos": 37427, - "dose": 34436, - "dot": 26518, - "double": 23352, - "down": 2902, - "download": 15002, - "downs": 30371, - "dozen": 44932, - "dp": 26059, - "dq": 49506, - "dr": 7109, - "dra": 32491, - "draft": 35679, - "dragon": 14844, - "draw": 19334, - "drawn": 41549, - "dream": 25966, - "dress": 49380, - "dri": 7553, - "drive": 19472, - "driven": 15808, - "driver": 26230, - "drivers": 36702, - "driving": 24255, - "drm": 49007, - "dro": 22285, - "drop": 14781, - "dropping": 37554, - "drops": 49253, - "drug": 30349, - "dry": 39140, - "ds": 9310, - "dt": 28664, - "du": 646, - "duc": 6077, - "ducers": 41213, - "duct": 2359, - "duction": 11124, - "due": 23301, - "duino": 24493, - "dule": 5950, - "dullah": 23969, - "dump": 39455, - "duration": 32257, - "during": 42122, - "dust": 48859, - "duty": 26278, - "dx": 34350, - "dy": 9892, - "dyl": 30360, - "dylib": 31739, - "e": 68, - "ea": 18213, - "each": 27379, - "ead": 1329, - "eah": 4617, - "eal": 2287, - "ealing": 26919, - "ealous": 15746, - "eals": 10621, - "ean": 11025, - "eanor": 17663, - "ear": 451, - "earable": 40816, - "earance": 23435, - "earances": 35630, - "earch": 3679, - "earcher": 50194, - "earchers": 16604, - "eared": 3380, - "earing": 6648, - "early": 11458, - "earned": 39123, - "ears": 4127, - "earth": 16442, - "eas": 30412, - "east": 23316, - "easy": 38171, - "eat": 4098, - "eating": 30041, - "eatured": 20980, - "eatures": 11585, - "eaturing": 31347, - "eb": 1765, - "ebin": 23497, - "ebook": 16497, - "ebra": 37052, - "ebted": 35895, - "ebus": 33209, - "ec": 721, - "eca": 31047, - "ecake": 46557, - "ecast": 43299, - "ecause": 3156, - "ecd": 21142, - "ech": 3055, - "eches": 16672, - "echo": 30328, - "ecided": 35503, - "eco": 47704, - "econom": 13926, - "economic": 17079, - "ect": 478, - "ectar": 44504, - "ected": 11197, - "ection": 3213, - "ective": 13967, - "ectomy": 42505, - "ector": 9250, - "ecycle": 47510, - "ed": 276, - "edIn": 20801, - "eda": 18082, - "edar": 44226, - "eday": 23712, - "edd": 6048, - "edded": 47238, - "eddy": 21874, - "ede": 18654, - "eded": 15395, - "eden": 31829, - "eder": 5702, - "ederal": 2110, - "ederation": 9748, - "edes": 37507, - "edge": 14907, - "edged": 48916, - "edi": 13740, - "edia": 5507, - "edience": 20826, - "edient": 35279, - "edin": 27152, - "eding": 8228, - "edit": 19312, - "edited": 42131, - "edition": 28736, - "editor": 35352, - "edly": 49288, - "edo": 24757, - "edom": 3836, - "eds": 5379, - "edu": 15532, - "educ": 18123, - "educated": 27317, - "education": 40796, - "edy": 4716, - "ee": 1453, - "eed": 2308, - "eeds": 39642, - "eeee": 41591, - "eeks": 32201, - "eele": 26213, - "eely": 45269, - "eem": 13761, - "een": 6429, - "eenth": 28117, - "eeper": 41278, - "eer": 28153, - "eering": 48066, - "eers": 47619, - "ees": 2841, - "eez": 33105, - "ef": 891, - "efe": 22521, - "efeated": 36807, - "efer": 41027, - "eff": 14822, - "effect": 10760, - "effective": 16803, - "effects": 34435, - "effic": 24531, - "efficiency": 45888, - "efficient": 16814, - "efficients": 41945, - "efined": 18156, - "eful": 13839, - "efully": 7549, - "eg": 1533, - "ega": 26470, - "egal": 39839, - "eger": 11893, - "egg": 33856, - "egu": 15703, - "eh": 17231, - "ei": 20295, - "eight": 26022, - "either": 31336, - "ek": 988, - "eka": 38001, - "eker": 28233, - "eki": 39548, - "eking": 18754, - "eks": 2573, - "el": 417, - "ela": 10304, - "elaide": 25078, - "eland": 8822, - "elcome": 9571, - "ele": 11129, - "elect": 9509, - "elected": 28604, - "election": 14300, - "electric": 31067, - "eled": 18449, - "element": 30854, - "eless": 5321, - "elf": 7046, - "elfare": 27122, - "elfth": 44659, - "eli": 43733, - "elia": 25418, - "elight": 49984, - "eligible": 31595, - "elin": 27176, - "eline": 4470, - "elines": 20655, - "eling": 10809, - "elist": 46331, - "ell": 695, - "ella": 12627, - "ellar": 14203, - "ellation": 28828, - "elle": 13485, - "ellect": 6879, - "ellectual": 29706, - "elled": 11978, - "ellen": 40635, - "eller": 12368, - "ellery": 41800, - "elli": 23225, - "ellig": 2976, - "elligence": 3480, - "elligent": 32940, - "elling": 9417, - "ello": 11109, - "ellow": 5037, - "ells": 19187, - "elly": 6148, - "elman": 32370, - "eln": 45542, - "elo": 22126, - "elong": 21537, - "elope": 47329, - "els": 1424, - "else": 17772, - "elsen": 25328, - "elsh": 21564, - "elsius": 32495, - "elson": 10151, - "elt": 2120, - "elta": 12514, - "elve": 9954, - "elvet": 32667, - "em": 368, - "ema": 19687, - "emade": 21398, - "email": 12888, - "emaker": 32174, - "emale": 10144, - "eman": 8463, - "emark": 47626, - "emate": 47686, - "emb": 24419, - "embed": 20521, - "embedreportprint": 30898, - "ember": 1491, - "eme": 34755, - "emed": 9006, - "emen": 8952, - "ement": 972, - "ements": 3196, - "emer": 24677, - "emet": 19261, - "emetery": 19785, - "emi": 43967, - "emia": 22859, - "emic": 5314, - "emies": 5090, - "emin": 14857, - "eming": 46564, - "emis": 30561, - "emn": 37705, - "emo": 41903, - "emon": 7966, - "emonic": 50016, - "emonium": 33044, - "emort": 24466, - "emouth": 46880, - "emp": 45787, - "emphasis": 36663, - "empl": 18856, - "employ": 7033, - "employed": 36266, - "employment": 28812, - "emporary": 33080, - "empt": 1791, - "emption": 11221, - "empty": 28920, - "ems": 5232, - "emy": 3065, - "en": 268, - "ena": 8107, - "enable": 21633, - "enabled": 25616, - "ename": 12453, - "enance": 36368, - "enaries": 30216, - "enario": 39055, - "enary": 21629, - "enberg": 23140, - "enburg": 37036, - "enc": 12685, - "ence": 594, - "enced": 5864, - "encer": 12137, - "encers": 42288, - "ences": 3007, - "ench": 24421, - "encia": 29634, - "encies": 3976, - "encing": 9532, - "encrypted": 43628, - "ency": 1387, - "end": 437, - "enda": 7438, - "endale": 41147, - "endant": 23048, - "endants": 30841, - "endar": 9239, - "endars": 44942, - "endas": 35624, - "ende": 38396, - "ended": 1631, - "ender": 2194, - "endered": 30398, - "enders": 7338, - "endez": 41913, - "endi": 43109, - "endiary": 43034, - "endif": 32088, - "ending": 1571, - "endish": 48442, - "endium": 49811, - "endix": 19573, - "endment": 5904, - "endo": 31110, - "endon": 43153, - "endor": 18738, - "endra": 48286, - "ends": 2412, - "endum": 43755, - "ene": 1734, - "ened": 2945, - "eneg": 46495, - "enegger": 44028, - "enei": 46009, - "enemy": 46970, - "ener": 877, - "energy": 22554, - "eners": 36014, - "enery": 24156, - "enes": 18719, - "eness": 9449, - "enez": 11437, - "enezuel": 12596, - "enf": 33701, - "enforcement": 44976, - "enfranch": 39827, - "eng": 1516, - "enge": 3540, - "engeance": 21364, - "enged": 47422, - "enger": 6540, - "engers": 9302, - "enges": 34120, - "engine": 18392, - "engineering": 40321, - "english": 39126, - "ength": 3286, - "engu": 13561, - "enh": 16550, - "enhagen": 30347, - "eni": 43850, - "enic": 35866, - "ening": 3101, - "enium": 47477, - "enko": 32720, - "enment": 23242, - "enn": 1697, - "enna": 13713, - "enne": 29727, - "ennes": 42573, - "ennett": 48151, - "ennial": 27779, - "ennis": 10679, - "enny": 11870, - "eno": 23397, - "enos": 28380, - "enough": 48229, - "ens": 641, - "ensable": 33447, - "ensation": 25742, - "ense": 1072, - "ensed": 15385, - "ensen": 18756, - "enser": 45268, - "enses": 4541, - "ensible": 27339, - "ensibly": 28508, - "ensical": 46165, - "ensing": 26426, - "ension": 3004, - "ensional": 37176, - "ensions": 5736, - "ensis": 37834, - "ensitive": 18464, - "ensitivity": 40545, - "ensity": 6377, - "ensive": 2021, - "enson": 19069, - "ensor": 22854, - "enstein": 37975, - "ensual": 31406, - "ensus": 7314, - "ent": 298, - "enta": 29188, - "ental": 2470, - "entanyl": 41455, - "entary": 48648, - "ente": 21872, - "ented": 4714, - "enter": 9255, - "enth": 7944, - "enthal": 34728, - "ential": 1843, - "entially": 3746, - "entials": 14817, - "entimes": 43598, - "entin": 31371, - "enting": 36589, - "ention": 1463, - "entious": 43787, - "entity": 26858, - "entle": 8651, - "ently": 1473, - "ento": 50217, - "enton": 26673, - "entric": 22317, - "entry": 13000, - "ents": 658, - "enture": 36697, - "enty": 3787, - "enum": 44709, - "env": 24330, - "environment": 38986, - "eny": 28558, - "enz": 19471, - "enza": 23674, - "enzie": 26389, - "eon": 23277, - "eor": 13492, - "eous": 15303, - "ep": 538, - "epad": 47852, - "epend": 2690, - "ependence": 15091, - "ependent": 8682, - "eper": 5723, - "eph": 27446, - "eping": 7213, - "episode": 38668, - "eport": 45813, - "eps": 25386, - "ept": 19598, - "eq": 27363, - "equ": 4853, - "equal": 40496, - "equality": 48203, - "equipped": 40617, - "er": 263, - "era": 8607, - "eral": 1691, - "erala": 33314, - "erald": 12573, - "erate": 21620, - "erb": 23552, - "erc": 2798, - "ercise": 23697, - "erd": 45744, - "ere": 567, - "ered": 1068, - "eredith": 36897, - "eree": 45316, - "erek": 18238, - "erella": 36648, - "eren": 14226, - "erence": 1945, - "erences": 4972, - "erenn": 31915, - "erent": 9100, - "erential": 33369, - "ereo": 32934, - "erer": 11882, - "erers": 19288, - "erest": 1260, - "eret": 31229, - "erey": 48023, - "erg": 6422, - "ergic": 19793, - "ergus": 13607, - "erguson": 14168, - "ergy": 26079, - "eri": 33442, - "eria": 5142, - "erial": 48499, - "eric": 35626, - "erick": 41556, - "erie": 18287, - "eries": 10640, - "ering": 1586, - "erion": 28019, - "erity": 32821, - "erk": 9587, - "erker": 35779, - "erm": 7780, - "erman": 2224, - "ermanent": 30312, - "ermott": 46187, - "ern": 1142, - "ernal": 35220, - "ername": 13292, - "ernand": 13023, - "ernandez": 18092, - "ernaut": 37879, - "ernel": 7948, - "ernels": 44930, - "erness": 17447, - "erning": 8917, - "erno": 24100, - "ero": 3529, - "eros": 27498, - "erous": 48411, - "err": 8056, - "erred": 17436, - "errilla": 31859, - "error": 18224, - "errors": 48277, - "erry": 6996, - "ers": 364, - "ersed": 20204, - "ersen": 46516, - "ership": 49437, - "ersion": 6900, - "ersive": 24469, - "erson": 882, - "ert": 861, - "ertain": 1425, - "ertation": 42245, - "ertility": 27651, - "erto": 13806, - "ertodd": 36481, - "erton": 29111, - "erv": 712, - "erva": 32775, - "ervation": 13208, - "ervative": 22003, - "ervatives": 35291, - "erve": 3760, - "erved": 8520, - "erver": 18497, - "erves": 11184, - "erville": 33487, - "erving": 14344, - "ery": 1924, - "eryl": 44886, - "es": 274, - "esa": 49183, - "esame": 34038, - "esan": 42890, - "esar": 18964, - "esc": 3798, - "escal": 47647, - "escap": 50141, - "escape": 41915, - "escent": 45470, - "escription": 7260, - "ese": 2771, - "esh": 5069, - "esi": 46551, - "esian": 35610, - "esides": 11788, - "esis": 9339, - "esity": 11924, - "esley": 49048, - "esm": 45798, - "esome": 5927, - "eson": 42038, - "esp": 9774, - "especially": 16480, - "espie": 42120, - "esque": 28939, - "ess": 408, - "essa": 21411, - "essage": 7589, - "esse": 35270, - "essed": 6676, - "essee": 10702, - "essel": 7878, - "essen": 44483, - "essential": 31195, - "essert": 20335, - "esses": 44667, - "essim": 30265, - "essing": 27289, - "ession": 2521, - "essional": 12743, - "essions": 6202, - "essler": 33730, - "essment": 21687, - "esson": 39670, - "essor": 5987, - "essors": 23295, - "est": 395, - "esta": 18059, - "establish": 40037, - "established": 27718, - "establishment": 44390, - "estamp": 27823, - "estate": 44146, - "estation": 27364, - "este": 29872, - "estead": 37897, - "ested": 7287, - "esteem": 31869, - "ester": 7834, - "estern": 3330, - "esters": 8586, - "esthes": 29678, - "esthesia": 34811, - "esthetic": 37531, - "estial": 21711, - "estic": 4699, - "estinal": 34284, - "estine": 27374, - "esting": 37761, - "estival": 6743, - "eston": 19115, - "estone": 13631, - "estones": 30637, - "estro": 47692, - "ests": 3558, - "esty": 9673, - "estyle": 10992, - "estyles": 42530, - "esville": 19641, - "esy": 9259, - "et": 316, - "eta": 17167, - "etary": 8527, - "etc": 14784, - "etch": 7569, - "etchup": 47132, - "ete": 14471, - "eteen": 34026, - "eteenth": 26425, - "eter": 2357, - "eteria": 39622, - "etermin": 13221, - "etermination": 29610, - "etermined": 23444, - "eters": 7307, - "eth": 2788, - "ethe": 10567, - "etheless": 12845, - "ether": 6750, - "etheus": 36916, - "ethical": 32949, - "ethnic": 38546, - "ethy": 33077, - "ethyl": 21610, - "ethyst": 44166, - "etic": 5139, - "etically": 16877, - "etics": 14596, - "eties": 31638, - "etime": 8079, - "etimes": 46874, - "eting": 13629, - "etition": 15620, - "etitive": 17295, - "eto": 27206, - "eton": 18483, - "etooth": 16271, - "etr": 21879, - "etric": 19482, - "etrical": 34546, - "etry": 11973, - "ets": 1039, - "etsk": 29515, - "etsu": 30470, - "etsy": 34877, - "ett": 3087, - "etta": 15253, - "ette": 5857, - "ettel": 47417, - "etter": 40088, - "ettes": 23014, - "etti": 24851, - "etting": 35463, - "ettings": 12374, - "ettle": 23570, - "ettlement": 27331, - "etts": 9357, - "etus": 29158, - "ety": 2963, - "etz": 23773, - "eu": 12496, - "eur": 23365, - "euro": 44252, - "eus": 27650, - "ev": 1990, - "eva": 48855, - "eval": 18206, - "evaluate": 49786, - "eve": 44655, - "even": 10197, - "event": 15596, - "events": 31534, - "ever": 964, - "everal": 8438, - "every": 16833, - "everyone": 47057, - "everything": 37814, - "evidence": 46817, - "evil": 23542, - "evin": 6830, - "ew": 413, - "eware": 29725, - "ewater": 21422, - "eway": 16172, - "eways": 43613, - "ewitness": 28588, - "ework": 6433, - "eworks": 19653, - "eworld": 38136, - "eworthy": 25969, - "ews": 15515, - "ewski": 46151, - "ex": 1069, - "examination": 47779, - "example": 20688, - "exc": 41194, - "except": 16341, - "excluding": 42218, - "exclusive": 41195, - "exe": 13499, - "exec": 18558, - "execute": 41049, - "exempt": 42679, - "exist": 38476, - "existence": 41084, - "existent": 32786, - "existing": 25687, - "exit": 37023, - "exp": 11201, - "expected": 40319, - "expensive": 22031, - "exper": 23100, - "expl": 20676, - "export": 39344, - "expr": 31937, - "express": 42712, - "expression": 38011, - "ext": 2302, - "external": 22615, - "externalActionCode": 31576, - "extra": 26086, - "extreme": 29896, - "extremely": 41073, - "ey": 2959, - "eye": 25379, - "eyed": 18834, - "eyes": 48418, - "ez": 8471, - "ezvous": 50063, - "f": 69, - "fa": 13331, - "fab": 36434, - "fac": 38942, - "face": 2550, - "facebook": 19024, - "faced": 24903, - "faces": 32186, - "facing": 29532, - "fact": 22584, - "factor": 31412, - "facts": 37473, - "fail": 32165, - "failed": 47904, - "fair": 22043, - "faith": 41751, - "fake": 30706, - "fal": 42932, - "fall": 7207, - "falls": 23348, - "false": 9562, - "fam": 44769, - "family": 17989, - "famous": 45143, - "fan": 24408, - "far": 16370, - "fare": 9496, - "farious": 41504, - "farm": 43323, - "fascist": 46928, - "fashion": 25265, - "fashioned": 28776, - "fast": 7217, - "fat": 17359, - "father": 11358, - "favorite": 35200, - "fax": 23560, - "fb": 21855, - "fc": 16072, - "fd": 16344, - "fe": 5036, - "feat": 27594, - "feature": 30053, - "features": 40890, - "fect": 2309, - "fecture": 36637, - "fed": 19082, - "fee": 39071, - "feed": 12363, - "feeding": 22824, - "feel": 36410, - "feet": 39690, - "feld": 16265, - "fell": 23299, - "felt": 31985, - "female": 24724, - "femin": 33594, - "fen": 41037, - "fer": 2232, - "ference": 4288, - "ferred": 18186, - "fest": 23411, - "fet": 34045, - "fetched": 50012, - "few": 32146, - "ff": 487, - "ffe": 16658, - "ffect": 4812, - "ffee": 5853, - "ffen": 46985, - "ffer": 36761, - "fff": 20972, - "ffff": 12927, - "ffic": 2108, - "fficiency": 35590, - "fficient": 5632, - "ffield": 31374, - "ffiti": 25198, - "fg": 40616, - "fi": 12463, - "fiction": 24046, - "field": 3245, - "fields": 25747, - "fif": 32041, - "fifth": 43556, - "fig": 5647, - "fight": 15481, - "fighter": 24733, - "fighters": 17114, - "fighting": 26594, - "fights": 50121, - "figure": 26875, - "figured": 46296, - "fil": 10379, - "file": 7753, - "filename": 34345, - "files": 16624, - "fill": 20797, - "filled": 20286, - "film": 26240, - "filter": 24455, - "fin": 15643, - "final": 20311, - "finals": 32089, - "financial": 46921, - "find": 19796, - "finder": 22805, - "finding": 41070, - "fine": 38125, - "fing": 28825, - "finger": 35461, - "finished": 43952, - "fire": 6495, - "fired": 26803, - "fires": 27312, - "first": 11085, - "fish": 11084, - "fit": 11147, - "fits": 21013, - "fitted": 38631, - "fitting": 32232, - "five": 13261, - "fix": 13049, - "fixed": 34021, - "fixes": 42624, - "fl": 2704, - "flag": 32109, - "flags": 33152, - "flake": 47597, - "flame": 49621, - "flash": 34167, - "flat": 38568, - "flation": 33521, - "fle": 27919, - "fledged": 45223, - "fleet": 33559, - "flex": 32880, - "flies": 27959, - "flight": 22560, - "flix": 10046, - "flo": 48679, - "float": 22468, - "floor": 28300, - "flow": 11125, - "flower": 25547, - "flows": 44041, - "flu": 35522, - "flush": 25925, - "fly": 12254, - "flying": 45928, - "fm": 38353, - "fman": 35826, - "fml": 38122, - "fn": 22184, - "fo": 6513, - "focus": 37635, - "focused": 18143, - "fol": 9062, - "fold": 11379, - "folder": 43551, - "folio": 13652, - "folios": 45242, - "folk": 19956, - "follow": 27780, - "font": 10331, - "foo": 21943, - "food": 19425, - "foot": 5898, - "football": 15914, - "footed": 43127, - "for": 1640, - "force": 3174, - "forced": 12072, - "forcement": 13442, - "forcer": 45515, - "forces": 27087, - "forcing": 18766, - "ford": 3841, - "fore": 754, - "foreign": 38823, - "foreseen": 44952, - "forest": 29623, - "forestation": 41570, - "forge": 30293, - "fork": 32523, - "form": 687, - "formance": 10367, - "format": 18982, - "formation": 1161, - "formed": 12214, - "former": 16354, - "formerly": 36234, - "forming": 15464, - "forms": 23914, - "fort": 3319, - "fortable": 12065, - "forth": 25718, - "forts": 47378, - "fortunately": 6668, - "fortune": 37359, - "forum": 27302, - "forums": 37141, - "forward": 11813, - "found": 9275, - "foundation": 42526, - "founded": 27060, - "founder": 15454, - "foundland": 42030, - "four": 14337, - "fourth": 49393, - "fox": 12792, - "fp": 46428, - "fps": 29647, - "fr": 8310, - "frac": 31944, - "fram": 19298, - "frame": 14535, - "frames": 37805, - "framework": 30604, - "fre": 19503, - "fred": 39193, - "free": 5787, - "freedom": 41295, - "frequency": 35324, - "fresh": 48797, - "frey": 37425, - "fried": 25520, - "friend": 6726, - "friendly": 13120, - "friends": 36154, - "frog": 49956, - "from": 6738, - "front": 8534, - "fruit": 34711, - "fs": 9501, - "ft": 701, - "ften": 14785, - "fter": 637, - "fters": 47131, - "ftime": 31387, - "fts": 35594, - "fty": 19628, - "fu": 20942, - "fuck": 31699, - "fuel": 25802, - "ful": 913, - "full": 12853, - "fully": 2759, - "fulness": 15538, - "fun": 12543, - "func": 20786, - "function": 8818, - "functional": 45124, - "fund": 10990, - "funded": 18246, - "funding": 25032, - "fur": 38916, - "furt": 29205, - "fusc": 37695, - "future": 37443, - "fw": 44482, - "fx": 21373, - "fy": 24928, - "g": 70, - "ga": 4908, - "gaard": 36232, - "gado": 50054, - "gae": 25002, - "gage": 10502, - "gain": 48544, - "gal": 13528, - "galitarian": 39907, - "gall": 39580, - "gallery": 24460, - "gam": 28483, - "game": 6057, - "gamer": 36515, - "games": 19966, - "gaming": 48616, - "gan": 1030, - "gang": 28284, - "gans": 39352, - "gap": 43554, - "gar": 4563, - "gard": 19977, - "gars": 25821, - "gart": 41651, - "gary": 14849, - "gas": 22649, - "gat": 41268, - "gate": 10494, - "gay": 22744, - "gb": 22296, - "gc": 36484, - "gd": 21287, - "gdala": 40420, - "ge": 469, - "geant": 30205, - "gear": 31763, - "gebra": 29230, - "ged": 2004, - "gee": 29622, - "geist": 49782, - "gel": 25280, - "gem": 24090, - "gement": 16025, - "gements": 43547, - "gemony": 38953, - "gen": 5235, - "gence": 12745, - "gencies": 33333, - "gency": 4949, - "gender": 8388, - "gener": 8612, - "general": 24622, - "generated": 27568, - "generation": 20158, - "generic": 41357, - "genic": 38516, - "genre": 35850, - "gent": 6783, - "gently": 34727, - "geon": 6281, - "geoning": 31614, - "geons": 16297, - "ger": 1362, - "gerald": 26941, - "gered": 10446, - "geries": 30230, - "gers": 5355, - "gery": 7076, - "ges": 3212, - "gest": 3495, - "get": 1136, - "getic": 24321, - "gets": 11407, - "gettable": 42182, - "getting": 37210, - "gew": 39909, - "gewater": 40843, - "gex": 25636, - "gey": 39608, - "gg": 1130, - "gged": 11178, - "gger": 26679, - "ggie": 23571, - "ggies": 33049, - "gging": 18792, - "ggle": 16444, - "ggles": 32723, - "ggy": 19970, - "gh": 456, - "gha": 46090, - "ghai": 20380, - "ghan": 6064, - "ghazi": 21775, - "ghost": 38933, - "gi": 12397, - "gian": 18299, - "gie": 22699, - "giene": 28363, - "gif": 27908, - "gil": 37718, - "gin": 1655, - "ging": 2667, - "gins": 29878, - "ginx": 42822, - "gio": 27769, - "girl": 15219, - "girlfriend": 45189, - "girls": 36960, - "git": 18300, - "github": 12567, - "give": 26535, - "given": 35569, - "giving": 13992, - "gl": 4743, - "glas": 14391, - "glass": 20721, - "glers": 33641, - "gling": 40799, - "global": 20541, - "glomer": 37757, - "gly": 10853, - "gm": 39870, - "gmail": 14816, - "gment": 5154, - "gments": 11726, - "gn": 4593, - "gnu": 41791, - "go": 2188, - "goal": 35231, - "gob": 44270, - "god": 25344, - "goers": 31006, - "going": 5146, - "gold": 24267, - "gom": 19120, - "gomery": 20142, - "gon": 14520, - "gone": 21260, - "goo": 42469, - "good": 11274, - "google": 13297, - "gor": 7053, - "gorith": 7727, - "gorithm": 42289, - "got": 23442, - "gotten": 21646, - "gov": 9567, - "govern": 47866, - "government": 14480, - "governmental": 31353, - "govtrack": 41230, - "gow": 21175, - "gp": 31197, - "gpu": 46999, - "gr": 2164, - "gra": 46784, - "grab": 32393, - "grad": 9744, - "gradation": 26317, - "grade": 9526, - "graded": 21791, - "grades": 31177, - "gradient": 49607, - "grading": 29247, - "graduate": 17680, - "grain": 48270, - "gram": 4546, - "gran": 46324, - "grand": 23936, - "graph": 34960, - "grass": 29815, - "grave": 41711, - "gravity": 46453, - "gray": 44605, - "gre": 16694, - "greSQL": 47701, - "great": 18223, - "green": 14809, - "greg": 9903, - "gregation": 17097, - "gren": 32762, - "gres": 34239, - "gress": 5914, - "gression": 32383, - "gressive": 19741, - "grey": 49502, - "grid": 25928, - "grim": 33563, - "gro": 27333, - "gross": 47181, - "ground": 2833, - "grounds": 40520, - "group": 8094, - "groupon": 14531, - "groups": 24432, - "grow": 45921, - "growing": 25167, - "grown": 22377, - "growth": 27922, - "gru": 48929, - "gs": 14542, - "gt": 13655, - "gu": 5162, - "guard": 14864, - "guards": 33427, - "gue": 18701, - "gui": 48317, - "guide": 41311, - "guided": 23657, - "gun": 7145, - "guns": 44265, - "gur": 45073, - "guy": 22932, - "guyen": 39922, - "gy": 1360, - "gyn": 40183, - "gypt": 6022, - "gz": 34586, - "h": 71, - "ha": 3099, - "haar": 42948, - "hab": 5976, - "habi": 37362, - "hack": 31153, - "had": 18108, - "hai": 44488, - "hair": 27108, - "haired": 29972, - "hak": 43573, - "hal": 14201, - "half": 13959, - "hall": 18323, - "halla": 41911, - "ham": 2763, - "hammad": 14875, - "hammer": 17980, - "han": 7637, - "hand": 4993, - "handed": 13638, - "handedly": 43919, - "hander": 44510, - "handle": 28144, - "handled": 38788, - "handler": 30281, - "hands": 43365, - "hang": 33255, - "hani": 29839, - "hao": 23778, - "hap": 45897, - "happy": 34191, - "haps": 2772, - "har": 9869, - "hard": 10424, - "hardt": 28375, - "hare": 43466, - "hari": 49573, - "harm": 29155, - "hart": 18647, - "has": 10134, - "hash": 17831, - "hat": 5183, - "hate": 37035, - "hatt": 11653, - "hattan": 12904, - "haul": 15194, - "haus": 30404, - "haust": 42456, - "have": 14150, - "haven": 39487, - "having": 40965, - "haw": 26615, - "hawk": 40624, - "hawks": 27221, - "hazard": 37598, - "hd": 31298, - "he": 258, - "hea": 21632, - "head": 2256, - "headed": 15353, - "header": 25677, - "headers": 50145, - "heading": 33878, - "heads": 16600, - "health": 13948, - "healthy": 22796, - "heard": 23636, - "heart": 11499, - "hearted": 20122, - "heartedly": 44407, - "heast": 9522, - "heastern": 18160, - "heat": 25080, - "heavy": 23701, - "hed": 704, - "heddar": 44937, - "hedon": 46086, - "hedral": 21962, - "hee": 21067, - "heed": 23616, - "heet": 25473, - "hei": 27392, - "heid": 28420, - "height": 17015, - "heim": 9096, - "heimer": 16288, - "heit": 29361, - "hel": 2978, - "held": 10217, - "helial": 35566, - "hell": 12758, - "helle": 34454, - "hello": 31373, - "helm": 33485, - "help": 16794, - "helps": 35194, - "hem": 4411, - "hemat": 10024, - "hematic": 23380, - "hematically": 46558, - "hement": 35347, - "hemer": 39557, - "hemoth": 34394, - "hemy": 36598, - "hen": 831, - "hend": 15631, - "hene": 29473, - "heng": 31753, - "henko": 30161, - "hens": 5135, - "hent": 6925, - "heny": 47413, - "heon": 37060, - "her": 372, - "here": 1456, - "hered": 6083, - "herence": 23545, - "herent": 8334, - "herer": 48386, - "heres": 19079, - "heric": 15011, - "herical": 37910, - "hern": 2881, - "hero": 11718, - "herry": 13372, - "hers": 7084, - "herty": 29029, - "hes": 956, - "hesda": 30049, - "heses": 39815, - "hesion": 32582, - "hesis": 8497, - "hesive": 25938, - "hess": 33979, - "hest": 3634, - "hester": 19593, - "het": 3202, - "hetamine": 25385, - "heter": 43332, - "hetic": 6587, - "hetical": 21485, - "hetically": 31786, - "hetics": 24965, - "hett": 17442, - "hetti": 33392, - "hetto": 35619, - "hew": 6391, - "hews": 40645, - "hex": 33095, - "hey": 20342, - "hh": 12337, - "hhh": 49126, - "hhhh": 36607, - "hi": 5303, - "hib": 3145, - "hiba": 49224, - "hibit": 26964, - "hibited": 44139, - "hibition": 24108, - "hid": 49675, - "hidden": 30342, - "hide": 24717, - "hift": 29323, - "hig": 25196, - "high": 8929, - "higher": 46503, - "highest": 35323, - "highly": 47444, - "hill": 12639, - "hillary": 47826, - "him": 38400, - "hin": 20079, - "hing": 722, - "hip": 1056, - "hips": 5748, - "hire": 10695, - "hiro": 49907, - "hirt": 49756, - "his": 14363, - "hist": 10034, - "historic": 31304, - "history": 23569, - "hit": 17945, - "hitting": 48320, - "hl": 18519, - "hler": 49737, - "hm": 23940, - "hma": 21720, - "hn": 21116, - "hner": 22277, - "ho": 8873, - "hod": 2065, - "hoe": 38979, - "hof": 39891, - "hoff": 36092, - "hog": 31897, - "hol": 3937, - "hold": 2946, - "holder": 13829, - "holders": 10476, - "holding": 19216, - "hole": 13207, - "holes": 28439, - "holiday": 37689, - "holm": 22981, - "holy": 44287, - "hom": 26452, - "home": 11195, - "hon": 24130, - "hood": 2894, - "hook": 25480, - "hooting": 35486, - "hop": 8548, - "hops": 21936, - "hor": 17899, - "horn": 25311, - "horse": 30527, - "hospital": 49257, - "host": 4774, - "hot": 8940, - "hots": 17398, - "hou": 15710, - "houn": 47714, - "hound": 39047, - "hour": 9769, - "hours": 24425, - "house": 4803, - "houses": 20089, - "housing": 50028, - "hov": 28026, - "hovah": 33023, - "hover": 43753, - "how": 4919, - "hower": 33539, - "hp": 24831, - "hr": 11840, - "hra": 45056, - "hran": 16848, - "href": 33257, - "hs": 11994, - "ht": 4352, - "htaking": 34148, - "htar": 38672, - "htm": 19211, - "html": 6494, - "htt": 2804, - "http": 4023, - "https": 5450, - "hu": 13415, - "hua": 33061, - "hub": 40140, - "huge": 40878, - "hum": 17047, - "human": 10734, - "humane": 44766, - "humans": 40205, - "hun": 20088, - "hung": 43274, - "hunt": 35060, - "hunter": 37488, - "hur": 48349, - "hurst": 33500, - "hus": 7537, - "husband": 48912, - "hw": 36599, - "hy": 12114, - "hya": 48812, - "hyd": 15511, - "hyde": 39175, - "hyp": 36362, - "hyper": 49229, - "hz": 32179, - "i": 72, - "iHUD": 38370, - "iOS": 35742, - "iPhone": 37032, - "ia": 544, - "iability": 12455, - "iable": 3379, - "iably": 18745, - "iac": 9607, - "iae": 33100, - "iage": 42360, - "iago": 29601, - "iah": 9520, - "iak": 32994, - "ial": 498, - "ially": 1927, - "ials": 8231, - "iam": 1789, - "iameter": 13173, - "iami": 7871, - "iamond": 8446, - "ian": 666, - "iana": 7484, - "iance": 3610, - "iances": 16097, - "iane": 46470, - "iang": 15483, - "iani": 25111, - "iann": 28627, - "iannopoulos": 36408, - "iano": 10115, - "ians": 1547, - "iant": 3014, - "iants": 17883, - "iao": 13481, - "iar": 12571, - "iard": 42425, - "iaries": 18361, - "iary": 8042, - "ias": 4448, - "iasco": 40025, - "iasis": 48455, - "iasm": 16401, - "iat": 5375, - "iate": 9386, - "iated": 12931, - "iates": 32820, - "iating": 26336, - "iation": 3920, - "iations": 40356, - "iator": 38585, - "iatric": 11439, - "iatrics": 36549, - "iatures": 42711, - "iatus": 34704, - "iaz": 17890, - "iazep": 48826, - "ib": 571, - "iba": 23718, - "ibaba": 37541, - "ibal": 21342, - "iban": 14278, - "iband": 35967, - "ibble": 43992, - "ibe": 32438, - "ibel": 43837, - "iber": 1856, - "iberal": 16813, - "ibi": 27567, - "ibia": 41145, - "ibilities": 7992, - "ibility": 2247, - "ibl": 10506, - "ible": 856, - "ibles": 18764, - "ibli": 29142, - "iblical": 16897, - "ibling": 27448, - "iblings": 19389, - "ibliography": 45689, - "ibly": 3193, - "ibo": 26762, - "ibr": 2889, - "ibrarian": 35808, - "ibraries": 11127, - "ibrary": 4115, - "ibu": 33828, - "ibur": 38616, - "ibus": 26333, - "ic": 291, - "ica": 3970, - "icable": 18424, - "icably": 41685, - "icago": 4549, - "ical": 605, - "ically": 1146, - "icals": 20155, - "ican": 7490, - "icans": 22398, - "icas": 44645, - "icate": 5344, - "icated": 3474, - "icates": 16856, - "icating": 12364, - "ication": 3299, - "ications": 3736, - "icative": 43058, - "icator": 26407, - "icators": 44549, - "icc": 44240, - "ice": 501, - "iced": 3711, - "icent": 36712, - "iceps": 41663, - "icer": 16647, - "ices": 1063, - "icester": 26382, - "ich": 488, - "ichael": 40302, - "iche": 14234, - "ichen": 41437, - "ichever": 22617, - "ichi": 16590, - "ichick": 38448, - "ichita": 41940, - "icho": 38720, - "icht": 30830, - "ici": 44070, - "icia": 33577, - "icial": 6652, - "ician": 6749, - "icians": 5106, - "iciary": 13556, - "icidal": 21488, - "icide": 5285, - "icides": 16751, - "iciency": 19777, - "icient": 11373, - "icing": 6345, - "icio": 46441, - "icion": 47430, - "icious": 6243, - "icip": 4311, - "icipated": 40988, - "icism": 11965, - "icist": 48187, - "icit": 3628, - "icity": 8467, - "ick": 624, - "icka": 29873, - "icked": 9484, - "icken": 5973, - "icker": 15799, - "ickers": 21630, - "icket": 9715, - "ickets": 15970, - "ickey": 40389, - "icking": 7958, - "ickle": 39423, - "ickr": 18994, - "icks": 3378, - "ickson": 46381, - "icky": 17479, - "icle": 1548, - "icles": 2983, - "ico": 3713, - "icol": 27045, - "icon": 4749, - "icone": 27981, - "icons": 34280, - "icro": 2500, - "icrobial": 48518, - "ics": 873, - "ict": 713, - "icted": 5722, - "icter": 36278, - "iction": 2867, - "ictional": 47273, - "ictionary": 14188, - "ictions": 9278, - "ictive": 45279, - "icts": 14137, - "icular": 13174, - "icularly": 22585, - "icult": 2249, - "icultural": 26823, - "iculture": 47428, - "iculty": 22402, - "icum": 39901, - "icus": 24552, - "icut": 13554, - "icy": 4611, - "icycle": 35298, - "icz": 28051, - "id": 312, - "ida": 3755, - "idable": 23321, - "idad": 32482, - "idae": 31718, - "idal": 11624, - "idan": 27610, - "idas": 24496, - "idate": 20540, - "idated": 41475, - "idates": 37051, - "idation": 24765, - "idav": 20331, - "iday": 2567, - "idays": 13842, - "idd": 1638, - "idden": 4651, - "idding": 13494, - "iddle": 2509, - "iddled": 34897, - "iddler": 26458, - "iddles": 29319, - "iddling": 41367, - "iddy": 34208, - "ide": 485, - "ided": 1384, - "idel": 5943, - "idelines": 7984, - "idelity": 23091, - "idem": 28913, - "iden": 14029, - "idence": 1704, - "idences": 44845, - "idency": 9147, - "ident": 738, - "idental": 35182, - "identally": 23961, - "idential": 35599, - "identified": 19107, - "idently": 46046, - "idents": 3231, - "ideo": 1651, - "ideon": 42381, - "ideos": 4921, - "idepress": 25895, - "ider": 1304, - "idered": 3089, - "iders": 4157, - "ides": 1460, - "ideshow": 42286, - "idespread": 9790, - "idge": 3130, - "idges": 15969, - "idget": 17484, - "idi": 19830, - "idia": 38513, - "idian": 19825, - "idine": 39422, - "iding": 2530, - "idious": 33243, - "idis": 29207, - "idity": 17995, - "idium": 43523, - "ido": 17305, - "idon": 47287, - "ids": 2340, - "idth": 5649, - "idy": 19325, - "ie": 494, - "iece": 8535, - "ied": 798, - "ief": 2086, - "ieft": 49868, - "ieg": 15702, - "iege": 14566, - "iegel": 28210, - "iel": 8207, - "ield": 1164, - "ielding": 30449, - "iem": 26597, - "ien": 2013, - "ience": 1240, - "ienced": 26343, - "iences": 10035, - "iencies": 22139, - "iency": 6160, - "ienne": 37938, - "iens": 10465, - "ient": 1153, - "ients": 2334, - "ier": 959, - "iera": 41976, - "ierce": 9798, - "iere": 13235, - "ieri": 29864, - "ierra": 16367, - "ierre": 31058, - "ierrez": 44448, - "iers": 3183, - "iership": 36689, - "iery": 23012, - "ies": 444, - "iesel": 29893, - "iest": 6386, - "iesta": 36283, - "iet": 1155, - "ietal": 21587, - "ieth": 19235, - "ieties": 9545, - "iets": 27955, - "iety": 1905, - "ieu": 22304, - "iev": 11203, - "ieval": 14671, - "ieve": 12311, - "ieved": 39591, - "iever": 47818, - "ievers": 30296, - "ieves": 17974, - "ieving": 30749, - "iew": 769, - "iewicz": 48596, - "if": 361, - "ifa": 19215, - "ifact": 29660, - "ifacts": 37199, - "ifax": 26590, - "ife": 901, - "ifer": 7087, - "iferation": 49801, - "ifest": 8409, - "ifestyle": 42004, - "iff": 733, - "iffe": 22391, - "ifference": 33012, - "ifferent": 17125, - "iffin": 42022, - "iffs": 10203, - "ifi": 22238, - "ifiable": 16823, - "ific": 811, - "ificant": 17294, - "ificantly": 42491, - "ificate": 22460, - "ification": 2649, - "ifications": 6637, - "ifice": 9680, - "ificent": 21559, - "ificial": 9542, - "ified": 1431, - "ifier": 7483, - "ifiers": 13350, - "ifies": 6945, - "ifix": 42169, - "ifle": 8316, - "ifled": 47157, - "ifles": 16063, - "ifling": 38966, - "iflower": 42642, - "iform": 6933, - "iframe": 39621, - "ift": 2135, - "ifted": 21715, - "ifter": 18171, - "ifting": 13309, - "ifts": 19265, - "ifty": 24905, - "iful": 4135, - "ifully": 17049, - "ify": 1958, - "ifying": 4035, - "ig": 328, - "iga": 13827, - "igan": 5516, - "igans": 34090, - "igate": 10055, - "igated": 26963, - "igating": 29129, - "igation": 7065, - "igator": 23823, - "igators": 25975, - "ige": 10045, - "igel": 47709, - "igen": 9324, - "igenous": 12357, - "igent": 47096, - "iger": 8254, - "igers": 34984, - "igg": 6950, - "igger": 15249, - "iggins": 23567, - "iggle": 24082, - "iggs": 20340, - "iggurat": 44557, - "igh": 394, - "igham": 34000, - "ighed": 12570, - "ight": 432, - "ighter": 4799, - "ighters": 6261, - "ighth": 10887, - "ighthouse": 32303, - "ighting": 47610, - "ighton": 42993, - "ights": 2337, - "ighty": 14400, - "igi": 25754, - "igible": 26032, - "igil": 27187, - "igion": 17035, - "igious": 10956, - "igl": 38686, - "igm": 17225, - "igma": 13495, - "igmat": 32441, - "igmatic": 38860, - "ign": 570, - "ignant": 25114, - "igne": 48946, - "igned": 3916, - "igning": 38944, - "ignment": 16747, - "ignore": 46430, - "ignt": 16891, - "ignty": 17224, - "igo": 14031, - "igon": 37107, - "igor": 36274, - "igr": 3692, - "igrant": 9893, - "igrants": 5663, - "igraph": 45920, - "igrate": 42175, - "igrated": 38769, - "igration": 4254, - "igree": 41233, - "igroup": 47875, - "igs": 9235, - "igsaw": 45636, - "igslist": 40704, - "igue": 15212, - "igun": 50118, - "iguous": 29709, - "igure": 7047, - "ih": 4449, - "ihad": 11166, - "ihadi": 42449, - "ihar": 38405, - "ihara": 45902, - "ihil": 20898, - "ihilation": 33299, - "ihu": 48406, - "ii": 4178, - "iii": 15479, - "ij": 2926, - "ija": 34655, - "ijah": 32778, - "iji": 20770, - "ijing": 11030, - "ijk": 45961, - "ijn": 48848, - "ijuana": 5343, - "ik": 1134, - "ika": 9232, - "ikan": 49894, - "ikarp": 36850, - "ikawa": 40398, - "ike": 522, - "iked": 17951, - "iken": 29943, - "iker": 18320, - "ikers": 24913, - "ikes": 7938, - "ikh": 13848, - "ikhail": 39065, - "iki": 5580, - "iking": 14132, - "ikini": 35542, - "ikk": 36073, - "iko": 12125, - "iku": 28643, - "ikuman": 42889, - "iky": 47536, - "il": 346, - "ila": 10102, - "ilage": 50006, - "ilan": 38239, - "iland": 40855, - "ilant": 37794, - "ilantro": 48311, - "ilar": 1794, - "ilated": 40080, - "ilater": 38601, - "ilateral": 14796, - "ilaterally": 39707, - "ilation": 10520, - "ild": 688, - "ilda": 27281, - "ilde": 44725, - "ilded": 46158, - "ildo": 39583, - "ile": 576, - "ileaks": 27983, - "iled": 3902, - "ilee": 40626, - "ileen": 42236, - "ilege": 41866, - "ileged": 48446, - "iler": 5329, - "ilers": 34393, - "iles": 2915, - "iless": 30608, - "ilet": 41550, - "iley": 9618, - "ili": 2403, - "ilia": 17517, - "ilial": 43475, - "ilian": 35824, - "iliar": 4797, - "iliary": 28129, - "iliate": 49826, - "iliated": 31705, - "iliation": 15547, - "ilib": 22282, - "ilibrium": 24741, - "ilic": 41896, - "ilies": 3922, - "ilight": 15512, - "iling": 4386, - "ilings": 43271, - "ilingual": 34900, - "ilion": 29935, - "ilipp": 8908, - "ilit": 6392, - "ilitarian": 43900, - "ilitary": 18748, - "ilitating": 34871, - "ilitation": 18194, - "ilities": 2410, - "ility": 879, - "ilk": 43545, - "ill": 359, - "illa": 5049, - "illac": 40607, - "illance": 7682, - "illard": 32681, - "illary": 15856, - "illas": 25314, - "illation": 40903, - "ille": 8270, - "illed": 2967, - "illegal": 47749, - "iller": 4665, - "illery": 14920, - "illes": 21718, - "illet": 32512, - "illi": 50173, - "illian": 37896, - "illin": 32672, - "illing": 4509, - "illion": 1131, - "illions": 40083, - "illo": 16111, - "illon": 23027, - "ills": 2171, - "illus": 44342, - "illusion": 35760, - "illy": 6548, - "ilo": 18526, - "ilogy": 19202, - "ilon": 33576, - "ilot": 23439, - "ils": 4487, - "ilst": 11750, - "ilt": 2326, - "ilton": 9044, - "iltr": 19438, - "iltration": 36055, - "ilts": 50076, - "ilty": 6267, - "ilus": 35815, - "ilver": 46978, - "ily": 813, - "ilyn": 38020, - "im": 320, - "ima": 8083, - "imag": 48466, - "image": 9060, - "images": 17566, - "imal": 4402, - "iman": 24086, - "imar": 49399, - "imaru": 49551, - "imate": 1920, - "imated": 15655, - "imately": 3358, - "imates": 26748, - "imating": 39204, - "imation": 18991, - "imb": 14107, - "imbabwe": 27175, - "imble": 34477, - "ime": 524, - "imedia": 20626, - "imei": 45519, - "imen": 19027, - "imens": 12117, - "imensional": 16198, - "iment": 3681, - "imental": 9134, - "imentary": 39051, - "iments": 6800, - "imeo": 47776, - "imer": 22723, - "imes": 999, - "imester": 47484, - "imet": 38813, - "imeter": 16912, - "imeters": 31551, - "img": 9600, - "imgur": 19791, - "imi": 25236, - "imil": 26641, - "imilar": 49941, - "imilation": 42963, - "iminary": 38429, - "imir": 13057, - "imity": 18853, - "imize": 48439, - "imm": 8608, - "immer": 10957, - "immers": 36904, - "immigrant": 39835, - "immigration": 47620, - "imming": 27428, - "immune": 38345, - "imo": 25147, - "imon": 20473, - "imony": 33969, - "imore": 9401, - "imoto": 43354, - "imov": 44273, - "imp": 11011, - "impact": 48240, - "impl": 23928, - "import": 11748, - "important": 18049, - "imposed": 36457, - "impro": 32077, - "improve": 49453, - "ims": 12078, - "imsy": 48295, - "imum": 2847, - "imura": 43817, - "imus": 20704, - "in": 259, - "ina": 1437, - "inal": 1292, - "inally": 3289, - "inals": 6897, - "inance": 14149, - "inances": 34999, - "inant": 42483, - "inar": 22050, - "inarily": 21565, - "inary": 3219, - "inas": 24252, - "inate": 4559, - "inated": 3898, - "inately": 48618, - "inates": 17540, - "inating": 6010, - "ination": 1883, - "inational": 26201, - "inations": 7352, - "inator": 20900, - "inators": 47721, - "inatory": 23132, - "inav": 26802, - "inburgh": 22222, - "inc": 1939, - "incarn": 13211, - "ince": 924, - "incent": 42816, - "incerity": 40310, - "inces": 17386, - "inch": 8589, - "inches": 45457, - "incial": 13744, - "incible": 33494, - "incinn": 15020, - "incinnati": 15130, - "include": 17256, - "includes": 42813, - "including": 8201, - "incoln": 11690, - "income": 12519, - "incre": 24988, - "increasing": 42647, - "inct": 4612, - "inction": 9438, - "inctions": 31253, - "ind": 521, - "inda": 22261, - "indal": 44644, - "independence": 39894, - "independent": 34750, - "inder": 5540, - "inders": 29700, - "index": 9630, - "inding": 6020, - "individual": 43129, - "indle": 42343, - "indu": 10259, - "induced": 17223, - "inducing": 48016, - "indust": 23213, - "industrial": 31130, - "ine": 500, - "inea": 18343, - "ined": 1389, - "inel": 20538, - "inelli": 44076, - "inem": 7749, - "inement": 21828, - "inen": 42326, - "inence": 18386, - "inent": 7233, - "inently": 26528, - "iner": 7274, - "ineries": 48858, - "iners": 21257, - "inery": 15451, - "ines": 1127, - "inese": 3762, - "iness": 1272, - "inet": 42504, - "inez": 18885, - "inf": 10745, - "infect": 27816, - "infeld": 47187, - "inflamm": 29639, - "inflammatory": 32272, - "info": 10951, - "information": 17018, - "informed": 35698, - "ing": 278, - "inge": 11912, - "inged": 24431, - "ingen": 36795, - "inger": 3889, - "ingers": 40923, - "inges": 26792, - "ingham": 25875, - "inging": 14146, - "ingle": 17697, - "ingly": 4420, - "ingo": 32735, - "ings": 654, - "ington": 9557, - "ingu": 6680, - "inguishable": 41726, - "inguished": 46709, - "inho": 20327, - "ini": 5362, - "inia": 43168, - "inian": 24605, - "inic": 47277, - "inical": 32352, - "ining": 3191, - "inion": 23971, - "inis": 16661, - "inished": 30603, - "init": 15003, - "inite": 9504, - "initely": 12998, - "initial": 36733, - "initialized": 17532, - "inition": 17750, - "initions": 50101, - "inity": 6269, - "ink": 676, - "inka": 48955, - "inker": 24275, - "inki": 38799, - "inking": 8040, - "inkle": 19894, - "inks": 2973, - "inky": 29246, - "inline": 45145, - "inn": 3732, - "innacle": 37087, - "innamon": 21920, - "inner": 5083, - "inness": 32990, - "innie": 49708, - "inning": 23062, - "innon": 45067, - "ino": 2879, - "inoa": 40564, - "inois": 8981, - "inos": 11996, - "inosaur": 21317, - "inous": 29823, - "input": 15414, - "inqu": 18934, - "ins": 1040, - "inse": 38521, - "insert": 28463, - "inside": 48787, - "insk": 35803, - "inski": 21141, - "insky": 19870, - "inson": 7899, - "inspired": 24194, - "inst": 8625, - "install": 17350, - "installed": 37050, - "instance": 39098, - "instead": 38070, - "instein": 11962, - "insula": 16495, - "insured": 28409, - "int": 600, - "intage": 14630, - "integ": 18908, - "integer": 41433, - "intel": 48779, - "intelligence": 32683, - "intend": 7315, - "intendent": 21075, - "intendo": 8773, - "intensity": 47799, - "intensive": 38096, - "intent": 48536, - "intention": 40867, - "inter": 3849, - "interest": 9446, - "interested": 34339, - "interesting": 47914, - "interface": 39994, - "intern": 23124, - "internal": 32538, - "international": 45609, - "internet": 37675, - "interpret": 27381, - "interrupted": 46037, - "inters": 20193, - "interstitial": 29446, - "intestinal": 36387, - "inth": 9304, - "into": 20424, - "inton": 2371, - "intosh": 37638, - "introdu": 27427, - "ints": 29503, - "intuitive": 42105, - "inus": 35237, - "inv": 16340, - "inventory": 24807, - "inventoryQuantity": 39756, - "invest": 24859, - "invoke": 37669, - "involved": 44697, - "inx": 28413, - "iny": 3541, - "inyl": 19754, - "io": 952, - "ioch": 41097, - "iod": 2101, - "iol": 1669, - "iola": 30292, - "iolet": 19194, - "iological": 15071, - "iologist": 31599, - "iology": 12371, - "iom": 29005, - "ion": 295, - "iona": 32792, - "ionage": 24919, - "ional": 1538, - "ione": 7935, - "ioned": 14994, - "ionic": 26523, - "ionics": 49900, - "ions": 507, - "iop": 14922, - "ior": 1504, - "iors": 12706, - "ios": 4267, - "iosis": 42960, - "iosity": 15023, - "iosyn": 48448, - "iosyncr": 48702, - "iot": 5151, - "iotic": 16357, - "iotics": 18296, - "iots": 16228, - "iott": 20773, - "iour": 49439, - "ious": 699, - "iously": 6819, - "iov": 16664, - "iovascular": 19381, - "iox": 12190, - "ioxid": 26294, - "ioxide": 16671, - "ip": 541, - "ipal": 8521, - "ipation": 25857, - "ipe": 3757, - "iped": 46647, - "ipedia": 11151, - "ipeg": 21700, - "ipel": 40634, - "iper": 9346, - "ipers": 29288, - "ipes": 18636, - "iph": 13323, - "iphany": 49915, - "iphate": 34981, - "ipher": 10803, - "ipient": 48137, - "iping": 34690, - "ipl": 24705, - "iple": 2480, - "ipment": 4667, - "ipolar": 49133, - "ipop": 42800, - "ipp": 3974, - "ipped": 3949, - "ipper": 14710, - "ippers": 16415, - "ippery": 29530, - "ippi": 12715, - "ipping": 4501, - "ipple": 18793, - "ipples": 27844, - "ippy": 41214, - "ips": 2419, - "ipt": 10257, - "iq": 25011, - "iqu": 1557, - "ique": 2350, - "iqueness": 46764, - "iques": 6368, - "iquette": 40387, - "iquid": 6394, - "ir": 343, - "ira": 8704, - "irable": 13194, - "iral": 21093, - "iration": 15297, - "irc": 1980, - "ircraft": 27002, - "ird": 1447, - "irds": 11049, - "ire": 557, - "irect": 1060, - "irection": 4154, - "ired": 1202, - "irement": 24615, - "irements": 18883, - "iren": 24080, - "irens": 42917, - "ires": 2387, - "irez": 31762, - "irgin": 4672, - "iri": 14783, - "irie": 28191, - "iries": 18561, - "irin": 47388, - "iring": 3428, - "iris": 29616, - "irit": 3276, - "irk": 14232, - "irl": 1901, - "irled": 49376, - "irlf": 9841, - "irlfriend": 9872, - "irling": 24297, - "irlwind": 32785, - "irm": 2533, - "irmation": 36241, - "irmed": 15491, - "irming": 29808, - "irms": 8789, - "iro": 7058, - "iron": 1934, - "irrel": 22793, - "irs": 17062, - "irsch": 47108, - "irst": 667, - "irt": 2265, - "irted": 48357, - "irteen": 22530, - "irth": 3333, - "irting": 35355, - "irts": 9682, - "irtual": 22341, - "irty": 5893, - "iru": 35406, - "irus": 19397, - "iry": 9045, - "is": 271, - "isSpecial": 39714, - "isSpecialOrderable": 39755, - "isa": 9160, - "isable": 43942, - "isal": 28456, - "isan": 9057, - "isance": 31872, - "isans": 26100, - "isation": 5612, - "isations": 38189, - "isbury": 47967, - "isc": 2304, - "iscal": 7860, - "isch": 25308, - "ische": 46097, - "ischer": 24645, - "isco": 4861, - "iscons": 8795, - "isconsin": 8816, - "iscopal": 42522, - "iscover": 29392, - "iscovered": 41168, - "iscovery": 40821, - "isd": 9409, - "isdom": 9350, - "ise": 786, - "isec": 27866, - "ised": 1417, - "isel": 36811, - "isen": 13254, - "iser": 5847, - "isers": 21572, - "ises": 2696, - "iseum": 38277, - "isexual": 20863, - "isf": 4468, - "ish": 680, - "isha": 19388, - "ishable": 31785, - "ished": 1348, - "isher": 4828, - "ishers": 39116, - "ishes": 5614, - "ishi": 21644, - "ishing": 3929, - "ishly": 29735, - "ishment": 17862, - "ishop": 10124, - "ishops": 21863, - "ishy": 49785, - "isi": 23267, - "isible": 12843, - "isin": 45763, - "isine": 27480, - "ising": 1710, - "ision": 1166, - "isions": 3279, - "isite": 16107, - "isites": 31327, - "isition": 10027, - "isitions": 29593, - "isive": 13911, - "isively": 42042, - "isk": 1984, - "isks": 36730, - "isky": 34041, - "isl": 3044, - "isle": 20919, - "ism": 1042, - "isma": 38017, - "isman": 23845, - "ismo": 44126, - "isms": 6583, - "isner": 49861, - "iso": 26786, - "isode": 3282, - "isodes": 8052, - "isoft": 29719, - "isol": 30152, - "ison": 1653, - "isons": 9886, - "isp": 8802, - "ispers": 27148, - "isphere": 22833, - "iss": 747, - "issa": 13808, - "issan": 24112, - "issance": 16419, - "isse": 20782, - "ission": 1480, - "issions": 7717, - "isson": 30927, - "issors": 32555, - "issue": 21949, - "issued": 39361, - "issues": 37165, - "issy": 36419, - "ist": 396, - "ista": 12523, - "istan": 4103, - "istance": 9311, - "istani": 16688, - "istant": 10167, - "istar": 47229, - "istas": 37503, - "iste": 40833, - "isted": 6347, - "istence": 13274, - "istent": 7609, - "ister": 1694, - "istered": 23187, - "isters": 6223, - "istic": 2569, - "istical": 19929, - "istically": 16772, - "istics": 3969, - "istine": 32248, - "isting": 9665, - "istle": 12535, - "iston": 36363, - "istor": 32380, - "istors": 46334, - "istrate": 28534, - "istrates": 37909, - "istration": 33397, - "istries": 32995, - "istry": 4592, - "ists": 1023, - "isu": 46313, - "isure": 20609, - "isy": 13560, - "it": 270, - "ita": 5350, - "itability": 34147, - "itable": 4674, - "itably": 14829, - "itage": 10208, - "itaire": 26627, - "ital": 1287, - "itals": 8321, - "itamin": 40746, - "itan": 18642, - "itance": 42942, - "itans": 43716, - "itant": 23737, - "itar": 7940, - "itarian": 8353, - "itars": 27745, - "itary": 9331, - "itas": 21416, - "itate": 12027, - "itated": 13939, - "itates": 38654, - "itating": 21712, - "itation": 3780, - "itational": 22181, - "itations": 20597, - "itative": 12464, - "itatively": 48668, - "itbart": 17868, - "itch": 2007, - "itched": 10981, - "itcher": 23640, - "itches": 9249, - "itchie": 48423, - "itching": 19811, - "ite": 578, - "itech": 45396, - "itect": 5712, - "ited": 863, - "itely": 3973, - "item": 9186, - "itement": 12559, - "items": 23814, - "itent": 48324, - "iter": 2676, - "iterator": 48727, - "iterranean": 19012, - "ites": 2737, - "ith": 342, - "ithe": 31470, - "ither": 1555, - "ithering": 40861, - "ithing": 44556, - "ithmetic": 29848, - "iths": 47252, - "ithub": 10060, - "iti": 8846, - "itia": 36723, - "itial": 6847, - "itialized": 13562, - "itially": 22640, - "itic": 16233, - "ities": 871, - "itimate": 30233, - "itime": 22552, - "iting": 1780, - "ition": 653, - "itional": 1859, - "itionally": 8736, - "itions": 1756, - "itious": 25253, - "itis": 11815, - "itism": 18937, - "itive": 1800, - "itiveness": 31366, - "itives": 20288, - "itivity": 11365, - "itiz": 3029, - "itized": 36951, - "itizen": 36958, - "itizens": 34100, - "itle": 2578, - "itled": 7803, - "itles": 30540, - "itness": 3659, - "ito": 10094, - "itol": 11650, - "iton": 37752, - "itone": 49644, - "itor": 2072, - "itored": 20026, - "itors": 6742, - "itory": 37765, - "itous": 22109, - "itri": 49510, - "its": 896, - "itsch": 48279, - "itsu": 19831, - "itt": 715, - "itta": 48519, - "ittal": 39979, - "ittance": 47912, - "itte": 2654, - "itted": 2175, - "ittee": 2979, - "ittees": 13263, - "itten": 2621, - "ittens": 34978, - "itter": 1967, - "ittered": 36613, - "itters": 45512, - "itting": 2535, - "ittle": 1206, - "itto": 37606, - "itton": 47304, - "itty": 9760, - "itu": 34272, - "itual": 10587, - "itud": 26331, - "itude": 3984, - "itudes": 10455, - "itudinal": 29121, - "iture": 8089, - "itures": 20686, - "itus": 17506, - "itute": 3678, - "itutes": 16845, - "itution": 2738, - "itutional": 5677, - "ity": 414, - "itz": 4224, - "itzer": 10557, - "itzerland": 13947, - "ité": 43816, - "iu": 16115, - "ium": 1505, - "ius": 3754, - "iuses": 44666, - "iv": 452, - "iva": 12151, - "ivable": 21911, - "ivably": 47994, - "ival": 2473, - "ivalent": 29540, - "ivalry": 47310, - "ivals": 10336, - "ivan": 13809, - "ivari": 35460, - "ivariate": 42524, - "ivas": 38630, - "ivated": 30829, - "ivating": 39438, - "ivation": 26939, - "ive": 425, - "ived": 1572, - "ively": 2280, - "iven": 1469, - "iveness": 6517, - "iver": 1428, - "ivered": 6396, - "ivering": 35598, - "iverpool": 10864, - "ivers": 1191, - "iversal": 11480, - "iversary": 9023, - "iverse": 3997, - "iversity": 1608, - "ivery": 6315, - "ives": 1083, - "ivia": 20817, - "ivic": 16482, - "ivid": 1699, - "ividual": 1896, - "ividually": 16335, - "ivil": 2464, - "iving": 1412, - "ivism": 25085, - "ivist": 30944, - "ivities": 28720, - "ivity": 3458, - "ivo": 23593, - "ivot": 45785, - "iw": 14246, - "ix": 844, - "ixed": 2966, - "ixel": 7168, - "ixels": 14810, - "ixie": 39291, - "ixir": 32345, - "ixon": 12305, - "ixt": 6346, - "ixtape": 43938, - "ixties": 46550, - "ixture": 9602, - "ixtures": 25506, - "ixty": 19404, - "iy": 7745, - "iya": 21008, - "iyah": 46398, - "iz": 528, - "iza": 23638, - "izabeth": 9924, - "izable": 13821, - "izard": 8669, - "izards": 14124, - "izarre": 12474, - "ization": 1634, - "izational": 22684, - "izations": 4582, - "ize": 1096, - "ized": 1143, - "izen": 33977, - "izens": 44908, - "izer": 7509, - "izers": 11341, - "izes": 4340, - "izing": 2890, - "izo": 41282, - "izon": 8637, - "izons": 29457, - "izont": 12071, - "izontal": 38342, - "izoph": 18115, - "izophren": 18337, - "izu": 47775, - "izz": 6457, - "izza": 9990, - "izzard": 16191, - "izzle": 44461, - "izzy": 40593, - "j": 73, - "ja": 6592, - "jab": 27935, - "jac": 30482, - "jack": 19650, - "jad": 38442, - "jah": 31558, - "jam": 39159, - "jamin": 13337, - "jan": 13881, - "jandro": 47983, - "jar": 9491, - "jas": 28121, - "java": 12355, - "javascript": 37495, - "jay": 33708, - "jc": 48055, - "je": 18015, - "ject": 752, - "jected": 35408, - "jection": 29192, - "jee": 34589, - "jen": 48796, - "jer": 44009, - "jet": 31173, - "jew": 47483, - "ji": 7285, - "jiang": 39598, - "jin": 18594, - "jing": 49940, - "jit": 45051, - "jj": 41098, - "jl": 20362, - "jo": 7639, - "job": 21858, - "jobs": 43863, - "john": 30686, - "joice": 41026, - "join": 22179, - "joined": 46416, - "joining": 40044, - "jon": 46286, - "jong": 32428, - "journal": 24891, - "joy": 2633, - "jp": 34523, - "jpg": 9479, - "jri": 38790, - "jriwal": 39890, - "js": 8457, - "json": 17752, - "ju": 14396, - "jud": 10456, - "judicial": 46769, - "jug": 31761, - "jump": 43327, - "jun": 29741, - "jured": 38608, - "juries": 47496, - "jury": 21871, - "just": 3137, - "justice": 31012, - "juven": 39427, - "k": 74, - "kB": 38841, - "kHz": 44191, - "ka": 4914, - "kai": 32765, - "kamp": 40899, - "kan": 27541, - "kar": 21070, - "kas": 42749, - "kat": 41826, - "kay": 5568, - "kaya": 35372, - "kb": 32812, - "ke": 365, - "ked": 9091, - "kee": 11035, - "keep": 14894, - "keeper": 13884, - "keepers": 24952, - "keeping": 19934, - "kees": 16683, - "kef": 30728, - "kefeller": 31424, - "kel": 7750, - "keleton": 38800, - "keley": 13490, - "kell": 17164, - "ken": 3464, - "kens": 14972, - "kept": 45089, - "ker": 6122, - "kered": 28970, - "kernel": 33885, - "kers": 15949, - "kes": 5209, - "ket": 7126, - "key": 2539, - "keye": 34929, - "keyes": 43174, - "keys": 13083, - "kg": 10025, - "kh": 14636, - "ki": 4106, - "kick": 24585, - "kid": 38439, - "kids": 45235, - "kie": 49375, - "kies": 43724, - "kil": 34553, - "kill": 12728, - "killed": 42130, - "killer": 32156, - "killers": 43492, - "killing": 43764, - "kin": 5116, - "kind": 11031, - "king": 3364, - "kins": 5331, - "kinson": 26030, - "kish": 31501, - "kiss": 41304, - "kit": 15813, - "kj": 42421, - "kk": 28747, - "kl": 41582, - "km": 13276, - "kn": 15418, - "knife": 48810, - "knit": 47095, - "know": 16275, - "knowledge": 45066, - "known": 4002, - "ko": 7204, - "kok": 32004, - "kos": 46150, - "kov": 21862, - "kowski": 26216, - "kr": 38584, - "krit": 44531, - "ks": 591, - "ksh": 50133, - "kson": 46505, - "kt": 21841, - "ktop": 16201, - "ku": 23063, - "kun": 28374, - "kus": 45614, - "kw": 46265, - "kward": 12378, - "ky": 2584, - "l": 75, - "la": 5031, - "lab": 23912, - "label": 18242, - "lace": 27077, - "lad": 9435, - "laden": 35668, - "lag": 30909, - "lah": 9271, - "lahoma": 9802, - "laim": 20438, - "lain": 34277, - "lake": 27180, - "lam": 2543, - "lambda": 50033, - "lamm": 11199, - "lan": 9620, - "lance": 23215, - "land": 1044, - "lander": 16235, - "landers": 32358, - "landish": 45626, - "lando": 11993, - "lands": 4447, - "lane": 33533, - "lang": 17204, - "language": 16129, - "lap": 37796, - "lar": 21681, - "larg": 15521, - "large": 11664, - "largest": 28209, - "las": 21921, - "lash": 17055, - "lass": 31172, - "lasses": 28958, - "last": 12957, - "lasting": 24810, - "lat": 15460, - "latable": 49009, - "late": 17660, - "lated": 17249, - "later": 36760, - "latest": 42861, - "lation": 7592, - "lations": 49905, - "lator": 41880, - "laugh": 44944, - "laughs": 28124, - "laughter": 27815, - "laun": 38722, - "launch": 35681, - "laus": 38024, - "lav": 18809, - "law": 6270, - "laws": 29317, - "lay": 10724, - "layer": 29289, - "layout": 39786, - "lb": 23160, - "lbs": 32133, - "lc": 44601, - "ld": 335, - "lda": 18986, - "lde": 35209, - "lder": 6499, - "ldom": 23826, - "ldon": 25900, - "le": 293, - "lead": 28230, - "leader": 27940, - "leaders": 37553, - "leading": 12294, - "leaf": 33201, - "league": 19316, - "lean": 13087, - "leaning": 25909, - "leanor": 41807, - "leans": 11861, - "lear": 3238, - "learn": 35720, - "learning": 40684, - "lease": 1274, - "leased": 14684, - "leases": 29329, - "leasing": 48764, - "leave": 47408, - "leck": 40667, - "lect": 801, - "lected": 12609, - "lectic": 42009, - "lection": 1564, - "lections": 26448, - "led": 992, - "ledge": 2965, - "ledged": 37436, - "lee": 7197, - "leen": 20042, - "leep": 8892, - "lees": 49410, - "leeve": 49189, - "left": 9464, - "leg": 1455, - "legal": 18011, - "legate": 34637, - "legates": 37061, - "lege": 2765, - "legged": 40898, - "legram": 30536, - "legraph": 16606, - "leground": 28272, - "lehem": 44797, - "leigh": 42342, - "lein": 33663, - "lem": 10671, - "lement": 1732, - "lements": 3639, - "lems": 46367, - "len": 11925, - "lene": 29466, - "leneck": 43163, - "leness": 48795, - "length": 13664, - "leon": 38970, - "ler": 1754, - "lers": 8116, - "les": 829, - "lesh": 29730, - "lesi": 36027, - "lesiastical": 46360, - "less": 1203, - "lessly": 8613, - "lessness": 17587, - "lest": 32712, - "let": 1616, - "letal": 47293, - "letcher": 29257, - "lete": 5807, - "leted": 33342, - "letes": 40676, - "lethal": 46480, - "letico": 47286, - "leton": 10565, - "lets": 5289, - "lett": 15503, - "lette": 21348, - "letter": 9291, - "letters": 15653, - "lev": 2768, - "levant": 14938, - "levard": 22123, - "level": 5715, - "levels": 46170, - "levision": 5024, - "lex": 2588, - "ley": 1636, - "leys": 21325, - "lez": 36858, - "lf": 1652, - "li": 4528, - "lia": 24660, - "liam": 5058, - "liament": 5130, - "lib": 8019, - "liber": 33203, - "liberal": 35739, - "library": 32016, - "lic": 677, - "lication": 10142, - "license": 43085, - "licensed": 36612, - "lich": 33467, - "licks": 49191, - "lict": 13758, - "licted": 17823, - "liction": 41101, - "licts": 42267, - "lie": 14485, - "lied": 18511, - "lier": 2505, - "lies": 13508, - "liest": 11318, - "lif": 36195, - "life": 6042, - "lift": 26282, - "lifting": 30510, - "lig": 4604, - "liga": 38910, - "light": 2971, - "lighting": 43351, - "lightly": 30945, - "lights": 8091, - "lihood": 11935, - "lik": 46965, - "like": 2339, - "likely": 40798, - "lim": 2475, - "lime": 27299, - "limit": 32374, - "limited": 10698, - "limits": 49196, - "lin": 2815, - "line": 1370, - "linear": 29127, - "lined": 10837, - "liner": 24683, - "liners": 34380, - "lines": 6615, - "liness": 26061, - "ling": 1359, - "linger": 33550, - "lings": 17783, - "lington": 17299, - "lining": 21310, - "link": 8726, - "linked": 25614, - "links": 28751, - "lins": 21602, - "linux": 23289, - "lio": 48590, - "lip": 40712, - "lique": 41522, - "liquid": 39250, - "lis": 27999, - "lish": 1836, - "lished": 2115, - "lisher": 8191, - "lishes": 19724, - "lishing": 20020, - "list": 4868, - "listed": 17935, - "lists": 20713, - "lit": 18250, - "lite": 36890, - "liter": 17201, - "literally": 43819, - "little": 31629, - "liv": 16017, - "live": 12583, - "lived": 24489, - "living": 19950, - "livion": 26018, - "livious": 35260, - "ll": 297, - "lla": 8466, - "llah": 22734, - "llan": 47993, - "lled": 3353, - "ller": 6051, - "llers": 13802, - "lli": 15516, - "lling": 2680, - "llo": 18798, - "llor": 14127, - "llular": 32771, - "lly": 12810, - "ln": 18755, - "lo": 5439, - "load": 2220, - "loaded": 14578, - "loader": 29356, - "loading": 25138, - "loads": 46030, - "loc": 17946, - "local": 12001, - "localhost": 36750, - "location": 24886, - "lock": 5354, - "locked": 24162, - "locking": 48331, - "locks": 28860, - "loe": 24617, - "log": 6404, - "login": 38235, - "lol": 47288, - "lon": 14995, - "long": 6511, - "loo": 29680, - "look": 5460, - "looking": 11534, - "loop": 26268, - "lopp": 39590, - "lor": 4685, - "lord": 10572, - "lords": 19673, - "lore": 31131, - "los": 33280, - "loss": 22462, - "lost": 33224, - "lot": 26487, - "lov": 27086, - "love": 23205, - "loving": 33983, - "low": 9319, - "lower": 21037, - "lp": 34431, - "lr": 14050, - "ls": 7278, - "lt": 2528, - "lu": 2290, - "lua": 40211, - "luaj": 36473, - "lucent": 35600, - "luck": 46708, - "lude": 38792, - "luence": 23079, - "luent": 28216, - "lund": 37525, - "lus": 41790, - "lust": 38878, - "luster": 48375, - "lux": 22564, - "lv": 6780, - "lves": 31018, - "lvl": 47147, - "ly": 306, - "lyak": 43782, - "lycer": 38577, - "lying": 3157, - "lymp": 6760, - "lyn": 6213, - "lynn": 12935, - "lys": 27385, - "lyss": 35670, - "lé": 45031, - "m": 76, - "mA": 42646, - "mAh": 28142, - "mL": 32087, - "ma": 2611, - "mable": 44102, - "mac": 20285, - "machine": 30243, - "mad": 9937, - "made": 9727, - "mag": 19726, - "mage": 25561, - "magic": 32707, - "maid": 23151, - "mail": 4529, - "mails": 26165, - "main": 12417, - "major": 22478, - "majority": 35839, - "make": 15883, - "maker": 10297, - "makers": 6620, - "makes": 49123, - "making": 8601, - "mal": 7617, - "male": 22606, - "malink": 31000, - "mallow": 42725, - "man": 805, - "manac": 46870, - "managed": 39935, - "management": 27604, - "manager": 37153, - "mand": 22249, - "manent": 44172, - "mania": 45733, - "mann": 9038, - "mans": 16221, - "manship": 25428, - "manuel": 18713, - "manufact": 48119, - "many": 21834, - "map": 8899, - "maps": 31803, - "mar": 3876, - "mare": 11449, - "mares": 23745, - "marg": 30887, - "margin": 36153, - "marine": 42380, - "mark": 4102, - "marked": 23505, - "market": 10728, - "markets": 34162, - "marks": 14306, - "marriage": 45394, - "married": 30526, - "mart": 13822, - "mary": 6874, - "mas": 5356, - "mask": 27932, - "mass": 22208, - "massive": 49777, - "mast": 47616, - "master": 9866, - "masters": 40706, - "mat": 6759, - "match": 15699, - "matched": 31409, - "mate": 9830, - "material": 33665, - "mates": 7300, - "math": 11018, - "matic": 13849, - "matical": 44935, - "matically": 49454, - "matter": 47635, - "max": 9806, - "maximum": 47033, - "maxwell": 29047, - "may": 11261, - "maybe": 25991, - "mb": 2022, - "mber": 1916, - "mberg": 47369, - "mble": 11306, - "mbol": 23650, - "mbuds": 45664, - "mbudsman": 47012, - "mc": 23209, - "md": 9132, - "me": 1326, - "meal": 28208, - "mean": 32604, - "meaning": 24815, - "measures": 47336, - "meat": 41495, - "med": 1150, - "medi": 2379, - "media": 11431, - "mediate": 13857, - "mediated": 38363, - "mediately": 23802, - "medical": 41693, - "medium": 24132, - "meet": 47745, - "meg": 28917, - "mega": 13731, - "meier": 49468, - "mel": 17694, - "melon": 45690, - "mem": 11883, - "member": 19522, - "members": 30814, - "memory": 31673, - "men": 3653, - "mens": 45535, - "ment": 434, - "mental": 37098, - "mentation": 14374, - "mented": 12061, - "mentioned": 17181, - "ments": 902, - "menu": 26272, - "mer": 647, - "merce": 11647, - "mercial": 15790, - "mere": 34671, - "merga": 44739, - "meric": 946, - "mers": 11056, - "mes": 6880, - "mess": 37348, - "message": 20500, - "met": 4164, - "meta": 28961, - "metadata": 38993, - "metal": 28469, - "meter": 27231, - "method": 24396, - "methyl": 43654, - "metic": 15103, - "metics": 27757, - "metry": 41935, - "meyer": 48794, - "mg": 11296, - "mi": 11632, - "mia": 20730, - "miah": 35029, - "mic": 9383, - "micro": 24055, - "microsoft": 40485, - "mid": 13602, - "middle": 27171, - "midt": 21184, - "mie": 44871, - "might": 44092, - "mil": 25433, - "mile": 18085, - "military": 33631, - "mill": 17805, - "million": 14100, - "milo": 48995, - "min": 1084, - "mination": 17928, - "mind": 10155, - "minded": 14543, - "mine": 3810, - "minecraft": 17761, - "minent": 19669, - "ming": 2229, - "mingham": 17737, - "mington": 39773, - "mini": 45313, - "minimum": 39504, - "mining": 45374, - "minist": 2201, - "ministic": 49228, - "mins": 42951, - "minster": 18462, - "mint": 34289, - "minus": 40191, - "minute": 11374, - "mir": 10793, - "mire": 47004, - "mis": 25413, - "misc": 44374, - "miss": 3927, - "missible": 21597, - "missing": 45688, - "mission": 3411, - "missions": 8481, - "missive": 33532, - "mist": 37980, - "mit": 2781, - "mite": 32937, - "mith": 22947, - "mits": 24883, - "mitt": 20124, - "mitted": 3291, - "mittedly": 43011, - "mitter": 37974, - "mitting": 16138, - "mix": 19816, - "mk": 28015, - "ml": 4029, - "mm": 3020, - "mma": 21672, - "mmm": 27532, - "mmmm": 40133, - "mn": 10295, - "mo": 5908, - "mob": 39949, - "mobi": 43549, - "mobile": 24896, - "mod": 4666, - "mode": 14171, - "model": 19849, - "models": 27530, - "moderate": 47189, - "modern": 23922, - "modified": 41771, - "mods": 24122, - "module": 21412, - "modules": 18170, - "moil": 25538, - "mol": 43132, - "mology": 29126, - "mom": 32542, - "mon": 2144, - "monary": 36639, - "mond": 6327, - "monds": 42620, - "mone": 47122, - "money": 26316, - "mong": 31059, - "monitor": 41143, - "monkey": 49572, - "mons": 11567, - "monster": 39050, - "mont": 8691, - "month": 8424, - "months": 41537, - "monton": 19729, - "mony": 9926, - "moon": 22977, - "mop": 35244, - "mopolitan": 44331, - "mor": 4491, - "moral": 41996, - "more": 3549, - "morning": 43911, - "morph": 24503, - "morrow": 9201, - "mort": 30171, - "mortem": 46515, - "mos": 16785, - "mosp": 6384, - "most": 1712, - "mostly": 29471, - "mot": 27926, - "mother": 13552, - "motion": 38714, - "mount": 14948, - "mounted": 29728, - "mouse": 35888, - "mouth": 14775, - "move": 21084, - "movie": 41364, - "moving": 31462, - "mp": 3149, - "mpeg": 43913, - "mph": 23335, - "mpire": 35386, - "mr": 43395, - "ms": 907, - "msg": 19662, - "mson": 24996, - "mt": 16762, - "mu": 30300, - "much": 29482, - "mud": 41650, - "mult": 16680, - "multi": 41684, - "multipl": 47945, - "multiple": 48101, - "mun": 6199, - "mund": 20125, - "munition": 12640, - "mur": 28582, - "mus": 14664, - "music": 28965, - "must": 27238, - "mut": 21973, - "mx": 36802, - "my": 1820, - "myra": 49216, - "mys": 28744, - "n": 77, - "na": 2616, - "nah": 40909, - "nai": 38600, - "naire": 24042, - "naires": 43317, - "naissance": 47090, - "nam": 7402, - "name": 3672, - "named": 13190, - "names": 14933, - "namese": 22678, - "nan": 12647, - "nance": 41601, - "nant": 22057, - "nants": 26501, - "nar": 23955, - "nard": 40542, - "nas": 24716, - "nat": 32353, - "natal": 33150, - "nation": 25729, - "national": 14648, - "native": 30191, - "natural": 11802, - "nature": 21353, - "natureconservancy": 41380, - "nav": 28341, - "nb": 46803, - "nc": 10782, - "nce": 1198, - "nces": 3179, - "nd": 358, - "nda": 45658, - "nder": 681, - "ndra": 24631, - "ndum": 11021, - "ne": 710, - "nea": 39718, - "neapolis": 19359, - "near": 40093, - "neath": 13725, - "neau": 46533, - "nec": 32984, - "necess": 10789, - "necessary": 49986, - "neck": 27235, - "nect": 1606, - "ned": 2817, - "nee": 21381, - "need": 31227, - "needed": 27938, - "needs": 50032, - "neg": 12480, - "negative": 31591, - "negie": 32360, - "nel": 4954, - "nell": 10076, - "nels": 19423, - "nen": 38572, - "ner": 1008, - "nered": 15826, - "nerg": 25649, - "nergy": 5877, - "ners": 2741, - "nery": 35865, - "nes": 2516, - "nesday": 3462, - "nesia": 31401, - "nesium": 27619, - "nesota": 8360, - "ness": 1108, - "nesses": 47556, - "nesty": 18718, - "net": 3262, - "netflix": 36977, - "netic": 9833, - "nets": 45938, - "nette": 48115, - "network": 27349, - "neum": 25668, - "neutral": 29797, - "never": 12081, - "new": 3605, - "news": 10827, - "nex": 12413, - "nexpected": 42072, - "next": 19545, - "nexus": 44520, - "ney": 1681, - "neys": 20141, - "ng": 782, - "ngth": 11910, - "ni": 8461, - "nia": 18142, - "nian": 44516, - "nic": 6988, - "nice": 44460, - "nick": 17172, - "nie": 11952, - "night": 3847, - "nih": 37373, - "nik": 17187, - "nikov": 45451, - "nil": 45991, - "nin": 35073, - "nine": 30888, - "ning": 768, - "nings": 23400, - "nington": 48405, - "niper": 45554, - "nir": 32986, - "nis": 21361, - "nit": 48825, - "nl": 21283, - "nm": 21533, - "nn": 20471, - "no": 3919, - "nob": 34952, - "node": 17440, - "nom": 26601, - "non": 13159, - "none": 23108, - "noon": 6357, - "nor": 13099, - "norm": 27237, - "normal": 11265, - "north": 43588, - "nos": 39369, - "nosis": 31707, - "nostic": 43758, - "not": 1662, - "notation": 38983, - "notations": 30078, - "note": 11295, - "notes": 17815, - "nothing": 22366, - "notice": 42138, - "noticed": 31696, - "nov": 37302, - "nova": 38438, - "now": 2197, - "nown": 3408, - "nox": 35420, - "noxious": 40591, - "np": 37659, - "nr": 48624, - "ns": 5907, - "nsic": 19364, - "nsics": 49242, - "nt": 429, - "ntax": 41641, - "ntil": 10125, - "nton": 28936, - "nu": 28803, - "nuclear": 43131, - "null": 8423, - "num": 22510, - "number": 17618, - "numbered": 35565, - "nut": 14930, - "nutrition": 40482, - "nuts": 31381, - "nv": 48005, - "nw": 47516, - "ny": 3281, - "nyder": 21053, - "nz": 27305, - "o": 78, - "oS": 34049, - "oa": 12162, - "oad": 1170, - "oaded": 22273, - "oak": 15877, - "oan": 24611, - "oard": 11953, - "oat": 15073, - "ob": 672, - "oba": 19981, - "obal": 2572, - "obar": 30973, - "obb": 21963, - "obbies": 41372, - "obby": 11369, - "obe": 5910, - "ober": 2023, - "obi": 13411, - "obia": 30665, - "obic": 20803, - "obil": 25898, - "obile": 3579, - "obiles": 36329, - "obin": 38954, - "obj": 26801, - "object": 15252, - "objects": 48205, - "obl": 45292, - "obo": 20391, - "obook": 49776, - "obos": 49878, - "obs": 8158, - "oby": 26730, - "obyl": 46666, - "oc": 420, - "oca": 11216, - "ocado": 33441, - "ocal": 4374, - "ocally": 44190, - "ocaly": 12063, - "ocalypse": 15145, - "ocalyptic": 28339, - "ocamp": 26047, - "ocard": 44412, - "ocate": 13369, - "ocated": 10533, - "ocating": 27123, - "ocation": 5040, - "ocations": 20968, - "ocative": 23466, - "ocaust": 16377, - "occ": 13966, - "occup": 19596, - "occupied": 28756, - "ocene": 34973, - "ocent": 29421, - "ocese": 31292, - "och": 5374, - "oche": 30848, - "ochem": 18958, - "ochemical": 32864, - "ochemistry": 37074, - "ochet": 49579, - "ochond": 22400, - "oci": 1733, - "ocial": 9402, - "ociate": 47615, - "ociated": 19293, - "ociation": 41003, - "ocide": 16207, - "ocious": 32346, - "ocity": 11683, - "ock": 735, - "ocked": 3543, - "ocker": 12721, - "ocket": 5459, - "ockets": 11603, - "ockey": 8337, - "ocking": 8629, - "ocks": 3320, - "ocl": 38679, - "oco": 25634, - "ocobo": 37642, - "ococ": 34403, - "ocol": 4668, - "ocolate": 9140, - "ocom": 42829, - "ocon": 36221, - "ocr": 1696, - "ocracy": 17818, - "ocrat": 35128, - "ocrates": 34095, - "ocratic": 15405, - "ocrats": 21988, - "ocre": 27945, - "ocrin": 39913, - "ocrine": 38658, - "ocry": 48103, - "oct": 38441, - "ocular": 37320, - "ocument": 7990, - "ocumented": 17664, - "ocus": 10901, - "ocused": 13073, - "ocusing": 45743, - "ocy": 13733, - "ocyte": 43320, - "ocytes": 30309, - "od": 375, - "oda": 11329, - "odan": 45561, - "oday": 4348, - "odcast": 7107, - "odd": 5088, - "odder": 35346, - "oddy": 38553, - "ode": 1098, - "oded": 9043, - "oder": 12342, - "odes": 4147, - "odge": 9728, - "odi": 23130, - "odiac": 40096, - "odic": 29512, - "odied": 32255, - "odies": 5042, - "oding": 7656, - "odium": 12664, - "odka": 28601, - "odo": 24313, - "odon": 46457, - "odor": 30530, - "odore": 25102, - "odox": 11430, - "ods": 12978, - "odus": 21878, - "ody": 1118, - "odynam": 24319, - "odynamic": 34743, - "odynamics": 44124, - "oe": 2577, - "oen": 6571, - "oenix": 8538, - "oes": 3028, - "oeuv": 37600, - "of": 1659, - "ofer": 30288, - "off": 2364, - "offensive": 45055, - "offer": 47895, - "offic": 14406, - "office": 31810, - "official": 16841, - "offs": 8210, - "offset": 28968, - "ofi": 39542, - "oft": 11205, - "often": 28950, - "og": 519, - "oga": 10949, - "ogan": 9632, - "ogen": 6644, - "ogene": 20878, - "ogeneity": 37477, - "ogeneous": 32269, - "ogenesis": 25908, - "ogenic": 15147, - "ogenous": 27897, - "ogens": 26612, - "ogether": 8236, - "ogg": 10332, - "ogged": 42545, - "ogging": 30853, - "oggle": 20258, - "oggles": 48549, - "ogh": 46664, - "ogi": 44381, - "ogical": 30766, - "ogie": 37987, - "ogl": 28678, - "ogle": 2467, - "oglobin": 49835, - "oglu": 49006, - "ogly": 34619, - "ogn": 2360, - "ognitive": 46610, - "ogo": 24076, - "ogram": 21857, - "ograms": 26836, - "ograp": 7113, - "ograph": 2384, - "ographed": 39620, - "ographer": 18539, - "ographers": 34063, - "ographic": 6826, - "ographical": 17046, - "ographically": 33145, - "ographics": 24188, - "ographies": 41480, - "ographs": 33492, - "ography": 4867, - "ogs": 18463, - "ogue": 5119, - "ogun": 39918, - "ogy": 9868, - "ogyn": 20593, - "oh": 1219, - "oha": 28083, - "ohan": 22436, - "ohl": 48988, - "ohm": 34028, - "ohn": 1562, - "oho": 40950, - "ohyd": 15782, - "ohydrate": 46358, - "oi": 23013, - "oice": 2942, - "oid": 1868, - "oidal": 47502, - "oided": 13780, - "oids": 10994, - "oil": 9437, - "oiler": 20837, - "oin": 36743, - "oine": 42722, - "oing": 40519, - "oint": 1563, - "ointed": 20909, - "ointment": 49805, - "oir": 10840, - "oire": 32177, - "ois": 10924, - "oise": 25678, - "oit": 30711, - "oj": 13210, - "oji": 31370, - "ojure": 32511, - "ok": 482, - "oka": 17411, - "okane": 41776, - "oke": 2088, - "oked": 6545, - "okemon": 12717, - "oken": 4233, - "oker": 11020, - "okers": 18698, - "okes": 3369, - "oki": 18228, - "okia": 22903, - "okin": 36749, - "oking": 5730, - "okingly": 48343, - "oko": 16044, - "oks": 28194, - "oku": 11601, - "oky": 31375, - "oké": 35861, - "ol": 349, - "ola": 5708, - "olan": 16617, - "oland": 23573, - "olar": 6192, - "olars": 7828, - "olas": 12456, - "olate": 27976, - "olated": 50027, - "olation": 21417, - "old": 727, - "olded": 48959, - "oldemort": 24710, - "older": 19892, - "olding": 33266, - "oldown": 15041, - "olds": 10119, - "ole": 2305, - "olean": 21052, - "oled": 45342, - "olen": 8622, - "oleon": 25637, - "oler": 13625, - "olerance": 37668, - "oles": 4316, - "olesc": 16850, - "olescent": 23406, - "olester": 15764, - "olesterol": 16743, - "oley": 48437, - "olf": 4024, - "oli": 11106, - "olia": 22703, - "oliath": 43009, - "oliberal": 28525, - "olic": 4160, - "olicited": 47607, - "olics": 19615, - "olicy": 21424, - "olid": 10180, - "olin": 24910, - "olina": 47196, - "oline": 14453, - "oling": 40949, - "olini": 43232, - "olis": 8506, - "olit": 6212, - "olitan": 12977, - "olith": 21446, - "olithic": 30764, - "olitical": 13781, - "olitics": 21704, - "olition": 50014, - "olk": 13597, - "olkien": 31052, - "oll": 692, - "olla": 33011, - "ollah": 17999, - "ollar": 13228, - "ollen": 29952, - "oller": 49252, - "ollo": 15578, - "ollow": 950, - "ollower": 47030, - "olls": 33421, - "olly": 5098, - "ollywood": 31777, - "oln": 10875, - "olo": 14057, - "olog": 928, - "ologic": 20781, - "ological": 2770, - "ologically": 13437, - "ologies": 5823, - "ologist": 7451, - "ologists": 9251, - "ologne": 30520, - "ologue": 39795, - "ology": 1435, - "olon": 43645, - "olor": 45621, - "olph": 10196, - "olphin": 27161, - "olphins": 16547, - "ols": 10220, - "olson": 32836, - "olt": 5978, - "olulu": 39814, - "olute": 3552, - "olutely": 16780, - "olution": 2122, - "olutions": 14191, - "olve": 6442, - "olved": 5634, - "olver": 14375, - "olves": 9010, - "olving": 10890, - "oly": 3366, - "olyn": 34742, - "om": 296, - "oma": 6086, - "omach": 10806, - "omal": 18048, - "omaly": 24335, - "oman": 5185, - "omas": 16911, - "omatic": 13730, - "omb": 2381, - "ombat": 41628, - "ombie": 9081, - "ombies": 12676, - "ombo": 47265, - "ombs": 33273, - "ome": 462, - "omed": 12657, - "omedical": 35914, - "omen": 3674, - "omer": 12057, - "omers": 21499, - "omes": 2586, - "omet": 908, - "ometer": 15635, - "ometers": 40077, - "omething": 8370, - "ometime": 47056, - "ometimes": 6533, - "ometown": 19191, - "ometric": 16996, - "ometry": 15748, - "omever": 49784, - "omew": 28030, - "omez": 30010, - "omi": 12753, - "omial": 49070, - "omic": 10179, - "omical": 22545, - "omics": 31994, - "omin": 6351, - "ominated": 50251, - "omination": 27744, - "oming": 3383, - "ominium": 46134, - "omm": 2002, - "ommel": 48990, - "ommod": 8641, - "omnia": 37340, - "omo": 17902, - "omon": 16698, - "omore": 22113, - "omorph": 25831, - "omorphic": 46374, - "omp": 3361, - "ompl": 6316, - "oms": 3150, - "omsday": 33415, - "omsky": 37093, - "omy": 9145, - "on": 261, - "ona": 4450, - "onal": 20996, - "once": 27078, - "ond": 623, - "onda": 13533, - "onday": 3204, - "onde": 14378, - "onder": 8623, - "onding": 42703, - "ondo": 22311, - "ondon": 3391, - "onds": 24764, - "onduct": 12920, - "onductor": 40990, - "one": 505, - "oned": 12004, - "onel": 26261, - "oneliness": 34634, - "onement": 49844, - "onen": 34481, - "onent": 3471, - "onential": 35470, - "onents": 3906, - "oner": 14491, - "ones": 1952, - "onest": 19129, - "onet": 36823, - "onew": 44181, - "oney": 1419, - "ong": 506, - "onga": 44294, - "onge": 14220, - "ongevity": 25454, - "ongh": 19757, - "ongo": 25162, - "ongs": 28079, - "ongyang": 20659, - "oni": 14651, - "onia": 11339, - "onial": 30752, - "onian": 27625, - "onic": 9229, - "onica": 32752, - "onics": 38530, - "onies": 17300, - "oning": 12484, - "onis": 43524, - "onite": 46285, - "online": 25119, - "only": 8807, - "onna": 6415, - "onnaissance": 31539, - "onne": 47476, - "ono": 29941, - "onom": 6326, - "onomic": 40036, - "onomous": 38175, - "onomy": 30565, - "ons": 684, - "onse": 2591, - "onsense": 46563, - "onsequ": 40819, - "onso": 26666, - "onson": 36742, - "ont": 756, - "onte": 38599, - "ontent": 38564, - "onto": 5957, - "onut": 16478, - "ony": 1647, - "onym": 5177, - "onymous": 6704, - "onyms": 43612, - "onz": 13569, - "oo": 2238, - "ood": 702, - "oodle": 27106, - "oodoo": 36038, - "oof": 37711, - "ook": 566, - "ooked": 46288, - "ookie": 18055, - "ooks": 31085, - "ooky": 29655, - "ool": 970, - "oola": 10513, - "ools": 10141, - "oom": 4207, - "ooming": 30602, - "oon": 2049, - "oons": 13022, - "ooo": 34160, - "oooo": 13321, - "oooooooo": 26759, - "oooooooooooooooo": 49135, - "oop": 11224, - "oops": 44860, - "oor": 2675, - "oos": 16426, - "oot": 1025, - "ooter": 25141, - "ooters": 48316, - "ooth": 5226, - "oother": 31724, - "ooting": 12494, - "oots": 13880, - "op": 404, - "opa": 26186, - "opal": 33067, - "opard": 15478, - "opath": 18569, - "opathic": 44650, - "opathy": 27189, - "opausal": 47637, - "ope": 3008, - "oped": 19458, - "open": 9654, - "opened": 26350, - "opening": 29443, - "opens": 44813, - "oper": 3575, - "operated": 42767, - "operation": 27184, - "operative": 27173, - "operator": 46616, - "opers": 20618, - "opes": 13920, - "opez": 20808, - "oph": 2522, - "ophe": 43852, - "ophen": 47806, - "opher": 8803, - "ophers": 20856, - "ophical": 49256, - "ophile": 37161, - "ophob": 13253, - "ophobia": 19851, - "ophobic": 23468, - "ophon": 48982, - "ophone": 38656, - "ophy": 16982, - "ophys": 39665, - "ophysical": 41789, - "opia": 24464, - "opian": 37548, - "opic": 16603, - "oping": 15816, - "opl": 20106, - "oplan": 46853, - "ople": 643, - "oples": 12614, - "opol": 39704, - "opolis": 47575, - "opoly": 35894, - "opot": 43372, - "opoulos": 20338, - "opp": 10365, - "oppable": 35628, - "opped": 38333, - "oppers": 37186, - "opping": 33307, - "oppy": 26696, - "ops": 2840, - "opsis": 24608, - "opsy": 44522, - "opt": 8738, - "opted": 45256, - "opter": 32563, - "optim": 40085, - "option": 18076, - "optional": 25968, - "options": 25811, - "opus": 25790, - "opy": 11081, - "oqu": 22696, - "or": 273, - "ora": 5799, - "orable": 10475, - "orage": 4945, - "orah": 40844, - "oral": 6864, - "orama": 36161, - "oran": 31884, - "orange": 43745, - "oras": 41043, - "orate": 16262, - "oration": 6944, - "orative": 36478, - "orb": 27688, - "orbit": 42594, - "orc": 24449, - "orce": 8387, - "ord": 585, - "ordable": 16819, - "ordan": 7350, - "orde": 17531, - "order": 2875, - "ordered": 24071, - "ordering": 34555, - "orders": 6361, - "ordes": 35770, - "ordial": 31223, - "ordinary": 35947, - "ordinate": 45480, - "ording": 1284, - "ordon": 9999, - "ords": 3669, - "ore": 382, - "oreAnd": 40219, - "oreAndOnline": 40240, - "orea": 46215, - "oreal": 39396, - "orean": 29456, - "ored": 1850, - "orem": 29625, - "oren": 29578, - "orer": 11934, - "orers": 28089, - "ores": 2850, - "oresc": 45166, - "orescence": 48699, - "orescent": 35414, - "orest": 26522, - "oret": 9997, - "orf": 24263, - "org": 2398, - "organ": 9971, - "organic": 36617, - "organisms": 45165, - "organized": 30280, - "orge": 3643, - "orget": 28122, - "orgetown": 29085, - "ori": 10145, - "oria": 7661, - "orial": 5132, - "orian": 22618, - "orians": 45825, - "oric": 8146, - "orical": 12409, - "orically": 26847, - "orie": 19257, - "oried": 42058, - "orient": 13989, - "oriented": 17107, - "ories": 1749, - "orig": 11612, - "origin": 47103, - "original": 14986, - "oring": 3255, - "orio": 40974, - "orious": 9982, - "oris": 37279, - "ority": 29134, - "orius": 48759, - "ork": 967, - "orks": 3647, - "orkshire": 29918, - "orld": 1764, - "orm": 579, - "ormal": 6636, - "orman": 26183, - "ormon": 10615, - "ormonal": 33792, - "ormons": 29966, - "orn": 1211, - "orne": 8553, - "orned": 26994, - "orney": 4195, - "orneys": 13060, - "ornia": 3317, - "ornings": 28863, - "orno": 46447, - "orns": 19942, - "oro": 16522, - "oros": 40877, - "orough": 7985, - "orous": 9610, - "orously": 24882, - "orp": 16300, - "orph": 13425, - "orpor": 31150, - "orporated": 40132, - "orr": 38890, - "orrect": 47315, - "orrow": 6254, - "orry": 5152, - "ors": 669, - "orsche": 26164, - "orse": 7615, - "orses": 11836, - "orset": 49590, - "orship": 11094, - "orsi": 35255, - "orst": 29422, - "ort": 419, - "ortal": 16906, - "ortality": 28337, - "orted": 9741, - "orter": 4337, - "orters": 3816, - "ortex": 26158, - "orth": 1506, - "orthern": 4824, - "orthodox": 42539, - "orthy": 18906, - "orting": 24707, - "ortion": 5817, - "ortium": 25182, - "ortment": 33920, - "ortmund": 34876, - "orts": 2096, - "ortun": 1922, - "ortunate": 13651, - "ortunately": 4690, - "oru": 27786, - "orum": 19220, - "orus": 15125, - "ory": 652, - "os": 418, - "osa": 8546, - "osal": 40725, - "osate": 33931, - "osaurs": 22344, - "osaurus": 47650, - "osc": 17500, - "oscope": 40326, - "oscopic": 48228, - "ose": 577, - "osed": 1335, - "osen": 5233, - "oser": 13416, - "oses": 4629, - "osexual": 8542, - "osh": 3768, - "oshenko": 43934, - "osher": 38321, - "oshi": 13704, - "oshop": 25444, - "osi": 21707, - "osing": 2752, - "osion": 18442, - "osis": 5958, - "osit": 7434, - "osite": 5971, - "osition": 3507, - "ositories": 35061, - "ository": 13264, - "osity": 16579, - "oslav": 26388, - "oslov": 50005, - "oso": 28213, - "osp": 2117, - "ospace": 24912, - "ospel": 13994, - "ospels": 41908, - "osph": 14222, - "osphere": 22829, - "ospital": 3531, - "ospons": 35952, - "osponsors": 39500, - "oss": 793, - "ossal": 33582, - "ossession": 49809, - "ossibility": 43691, - "ossible": 4733, - "ossibly": 20846, - "ossier": 30087, - "ossip": 26710, - "ossom": 25548, - "ossus": 36533, - "ost": 455, - "osta": 39818, - "oster": 6197, - "osterone": 16372, - "ostic": 15132, - "ostics": 34558, - "oston": 5744, - "osuke": 45914, - "osure": 4567, - "osures": 16091, - "ot": 313, - "ota": 4265, - "otal": 4997, - "otally": 38908, - "otation": 14221, - "otaur": 35269, - "ote": 1258, - "otech": 32469, - "otechnology": 31201, - "oted": 5191, - "otent": 33715, - "oter": 19543, - "oteric": 38571, - "oters": 26008, - "otes": 6421, - "oth": 849, - "othal": 42376, - "othe": 20388, - "other": 847, - "otherapy": 18952, - "othermal": 49723, - "othes": 31690, - "othing": 24834, - "oths": 27118, - "othy": 14863, - "oti": 5092, - "otiation": 21236, - "otic": 6210, - "otics": 23891, - "otide": 45608, - "otin": 41403, - "otine": 16174, - "oting": 10720, - "otion": 9650, - "otional": 25453, - "otions": 36083, - "otive": 19138, - "otle": 23556, - "oto": 2069, - "otom": 43125, - "otomy": 38385, - "oton": 18970, - "otonin": 29613, - "otor": 20965, - "otos": 14163, - "otrop": 34248, - "otropic": 46084, - "ots": 1747, - "ott": 1252, - "otta": 12375, - "ottage": 29480, - "otte": 11404, - "otted": 8426, - "otten": 4728, - "ottenham": 21889, - "ottest": 24879, - "ottesville": 23806, - "otti": 26380, - "otto": 17631, - "otton": 11324, - "otyp": 17183, - "otype": 8690, - "otypes": 13567, - "ou": 280, - "oub": 12944, - "ouble": 15270, - "oubt": 47675, - "oubted": 15973, - "oubtedly": 16423, - "ouch": 7673, - "ouched": 30075, - "oud": 2778, - "ouf": 37116, - "oufl": 28012, - "oug": 20805, - "ough": 619, - "ought": 2917, - "ouk": 38960, - "oul": 2852, - "ould": 426, - "oulder": 17601, - "oulos": 19537, - "ouls": 42033, - "oult": 25955, - "oultry": 30056, - "oun": 977, - "ounce": 8652, - "ounced": 8918, - "ounces": 45982, - "ouncing": 18155, - "ound": 633, - "ounded": 6302, - "ounding": 9969, - "ounds": 3733, - "ounge": 20891, - "ount": 608, - "ountain": 18635, - "ounter": 6828, - "ounters": 15044, - "ounty": 17705, - "oup": 10486, - "ouple": 43846, - "our": 454, - "ourage": 32885, - "ource": 1668, - "ourced": 30555, - "ources": 2203, - "ourcing": 29985, - "oured": 8167, - "ourge": 14501, - "ourgeois": 18924, - "ouri": 10300, - "ouring": 21823, - "ourke": 49003, - "ourmet": 39094, - "ourn": 1798, - "ournal": 2549, - "ournals": 18408, - "ournament": 5138, - "ournaments": 16950, - "ourney": 5604, - "ourning": 31626, - "ours": 4662, - "ourse": 9047, - "ourses": 39975, - "ourt": 15666, - "ous": 516, - "ousand": 29910, - "ousands": 19983, - "ouse": 1076, - "oused": 29997, - "ousel": 48355, - "ouses": 11370, - "ousing": 12752, - "ously": 3481, - "ousse": 28396, - "oust": 23968, - "oustic": 21618, - "ouston": 6526, - "ousy": 41808, - "out": 448, - "oute": 13192, - "outed": 18534, - "outer": 39605, - "outh": 1536, - "outheast": 14474, - "outheastern": 27873, - "outher": 44262, - "outhern": 4927, - "outine": 28399, - "outing": 13660, - "output": 22915, - "outs": 5269, - "outside": 43435, - "outube": 9762, - "ouver": 10166, - "oux": 22193, - "ov": 709, - "ova": 10071, - "ovable": 21985, - "oval": 8325, - "ovan": 22590, - "ovation": 17882, - "ove": 659, - "oved": 2668, - "ovember": 3239, - "oven": 16206, - "over": 2502, - "overe": 33518, - "overed": 2557, - "overs": 13801, - "overty": 24085, - "overy": 6560, - "oves": 5241, - "ovi": 47297, - "ovic": 17215, - "ovich": 18198, - "ovie": 10739, - "ovies": 20526, - "oving": 5165, - "ovo": 18768, - "ovsky": 29716, - "ovy": 27796, - "ovych": 40822, - "ow": 322, - "owa": 8455, - "owan": 45197, - "oward": 46138, - "oway": 41788, - "owder": 34656, - "owe": 47097, - "owed": 6972, - "owell": 32829, - "ower": 789, - "owered": 10387, - "owers": 3618, - "owicz": 47982, - "owing": 7855, - "owitz": 20951, - "owl": 4883, - "owler": 30014, - "owment": 36569, - "own": 593, - "owned": 11990, - "owner": 18403, - "owners": 15605, - "ownt": 6887, - "owntown": 22748, - "ows": 1666, - "owship": 23473, - "owski": 12079, - "owsky": 47223, - "ox": 1140, - "oxic": 18047, - "oxicity": 44086, - "oxide": 28885, - "oxin": 39366, - "oxy": 23536, - "oy": 726, - "oya": 23790, - "oyal": 4815, - "oyd": 12192, - "oyer": 35301, - "oyle": 19802, - "oys": 19417, - "oz": 8590, - "ozo": 45149, - "ozy": 31060, - "ozyg": 49834, - "oÄŁ": 45492, - "oÄŁan": 48030, - "p": 79, - "pa": 8957, - "pac": 33587, - "pace": 10223, - "paced": 32416, - "paces": 43076, - "pack": 8002, - "package": 26495, - "packages": 43789, - "packed": 34860, - "packing": 41291, - "packs": 32377, - "pad": 15636, - "padding": 39231, - "page": 7700, - "pages": 31126, - "pai": 49712, - "paid": 20333, - "pain": 35436, - "painted": 47351, - "paio": 43491, - "pair": 24874, - "pak": 41091, - "pal": 18596, - "pan": 6839, - "panel": 35330, - "panic": 35843, - "pants": 38895, - "paper": 20189, - "papers": 40491, - "par": 1845, - "parable": 37064, - "paragraph": 20360, - "paralle": 37083, - "paralleled": 37859, - "param": 17143, - "params": 37266, - "pard": 26037, - "pared": 29190, - "paren": 11730, - "parency": 11944, - "parent": 8000, - "parents": 23743, - "park": 20928, - "parse": 29572, - "parser": 48610, - "part": 3911, - "partial": 47172, - "particip": 48013, - "particularly": 31722, - "partisan": 28226, - "parts": 42632, - "party": 10608, - "pas": 44429, - "pass": 6603, - "password": 28712, - "past": 30119, - "paste": 34274, - "pat": 8071, - "patch": 17147, - "path": 6978, - "pathic": 38829, - "pathy": 16786, - "patient": 26029, - "patrick": 29615, - "pattern": 33279, - "pause": 32125, - "pay": 15577, - "payer": 34987, - "payers": 45773, - "paying": 32629, - "payment": 37301, - "pb": 40842, - "pc": 14751, - "pd": 30094, - "pdf": 12315, - "pe": 431, - "peace": 22988, - "peak": 36729, - "peat": 18267, - "pec": 43106, - "pecially": 2333, - "pect": 806, - "pected": 7254, - "pecting": 35570, - "pection": 14978, - "pects": 38046, - "ped": 9124, - "pedia": 50235, - "pee": 39463, - "peed": 39492, - "peer": 33350, - "pees": 42623, - "peg": 22071, - "pei": 33265, - "pel": 30242, - "pell": 23506, - "pelled": 15803, - "pelling": 35025, - "pen": 3617, - "pend": 37038, - "pent": 16923, - "penter": 26419, - "people": 15332, - "per": 525, - "perate": 30052, - "perature": 21069, - "percent": 25067, - "pered": 13653, - "perfect": 25833, - "performance": 26585, - "performing": 37440, - "perhaps": 28998, - "peria": 38032, - "perial": 7629, - "pering": 21255, - "period": 41007, - "perm": 16321, - "peror": 8723, - "perors": 49406, - "pers": 19276, - "perse": 38696, - "person": 6259, - "personal": 22682, - "pert": 11766, - "perties": 18200, - "perture": 27286, - "perty": 9287, - "pes": 12272, - "pet": 6449, - "pex": 24900, - "pez": 46057, - "pg": 6024, - "ph": 746, - "pha": 7566, - "phabet": 19557, - "phal": 27451, - "phalt": 41942, - "phan": 19080, - "phans": 44464, - "phant": 33959, - "phas": 5902, - "phase": 40715, - "phasis": 28432, - "phe": 36335, - "phen": 31024, - "pher": 17042, - "pherd": 23111, - "pheus": 46421, - "phi": 34846, - "phia": 8193, - "phies": 19380, - "phil": 28864, - "philis": 49613, - "phis": 18691, - "phone": 4862, - "phones": 9708, - "phony": 23021, - "phot": 38611, - "photo": 23074, - "photos": 24729, - "php": 10121, - "phrase": 34675, - "phrine": 47723, - "phthal": 48118, - "phy": 6883, - "phys": 34411, - "physical": 42854, - "pi": 14415, - "pic": 16564, - "pick": 27729, - "picked": 41891, - "picking": 48864, - "pict": 18847, - "picture": 34053, - "pictured": 28852, - "pid": 35317, - "pie": 21749, - "piece": 12239, - "pieces": 34154, - "pill": 27215, - "pillar": 41643, - "pin": 11635, - "pine": 23908, - "ping": 13886, - "pins": 49556, - "pipe": 34360, - "pir": 4063, - "piracy": 8703, - "piration": 10514, - "pire": 5111, - "pired": 6474, - "pires": 17833, - "piring": 12987, - "pit": 15544, - "pite": 2595, - "pixel": 32515, - "pkg": 35339, - "pl": 489, - "place": 5372, - "placed": 21820, - "places": 23625, - "plain": 25638, - "plan": 11578, - "plane": 14382, - "planes": 22587, - "planet": 47427, - "planned": 36800, - "plant": 15060, - "plate": 6816, - "plates": 17041, - "platform": 24254, - "play": 1759, - "played": 21542, - "player": 7829, - "players": 32399, - "playing": 17916, - "plays": 26024, - "ple": 1154, - "pleasant": 21109, - "please": 29688, - "pled": 10137, - "plement": 26908, - "plementation": 32851, - "pler": 20053, - "ples": 2374, - "pless": 14570, - "plet": 37069, - "plete": 6677, - "pleted": 16838, - "pleting": 47130, - "pletion": 24547, - "plets": 46916, - "plex": 11141, - "plin": 46982, - "pling": 11347, - "plings": 47093, - "pload": 7304, - "plom": 7302, - "ploma": 35728, - "plot": 29487, - "ploy": 1420, - "plug": 16875, - "plugin": 33803, - "plugins": 37390, - "plus": 9541, - "ply": 2145, - "pm": 4426, - "pmwiki": 45321, - "pn": 21999, - "png": 11134, - "po": 7501, - "pocket": 31991, - "pod": 33320, - "podcast": 46032, - "point": 4122, - "pointer": 29536, - "pointers": 47809, - "points": 13033, - "poke": 35924, - "pol": 16104, - "pole": 36869, - "police": 38191, - "policy": 30586, - "polit": 34470, - "political": 23149, - "politics": 34127, - "poll": 30393, - "poly": 35428, - "pool": 7742, - "poon": 26743, - "poons": 27575, - "poor": 36672, - "pop": 12924, - "popular": 47568, - "population": 39748, - "por": 1819, - "pora": 38851, - "poral": 35738, - "porary": 5551, - "porate": 38133, - "port": 634, - "portation": 10189, - "ported": 9213, - "porter": 26634, - "porting": 26527, - "portion": 16864, - "ports": 3742, - "pos": 1930, - "posal": 40007, - "pose": 3455, - "posed": 29813, - "poses": 4832, - "posing": 32927, - "position": 9150, - "positive": 24561, - "posium": 35864, - "possibly": 39363, - "post": 7353, - "posted": 40578, - "posts": 24875, - "posure": 26205, - "pot": 13059, - "potion": 49324, - "pots": 40793, - "pound": 19568, - "pour": 48681, - "powder": 45855, - "power": 6477, - "powered": 12293, - "powerful": 44548, - "powers": 30132, - "pox": 42557, - "pp": 381, - "ppa": 44989, - "ppard": 43988, - "ppe": 27768, - "pped": 1496, - "ppel": 46357, - "ppelin": 48425, - "pper": 2848, - "pperc": 39921, - "ppers": 11799, - "pping": 2105, - "ppings": 37840, - "ppo": 16634, - "pport": 4926, - "pps": 41799, - "ppy": 14097, - "pr": 1050, - "pract": 29152, - "practice": 39541, - "pre": 3866, - "pread": 9681, - "pred": 28764, - "prefix": 40290, - "prem": 31605, - "prep": 46012, - "pres": 18302, - "present": 25579, - "president": 22540, - "press": 8439, - "pressed": 45477, - "pressure": 36151, - "pret": 5310, - "pretty": 37784, - "prev": 47050, - "pri": 3448, - "price": 20888, - "priced": 30883, - "prim": 19795, - "primary": 39754, - "prime": 35505, - "pring": 12667, - "print": 4798, - "printed": 49695, - "printf": 37435, - "println": 35235, - "prints": 17190, - "priority": 49336, - "prise": 7919, - "prises": 18166, - "prising": 14619, - "prisingly": 20859, - "prison": 35156, - "priv": 13776, - "private": 19734, - "pro": 1676, - "probably": 26949, - "problem": 45573, - "proc": 36942, - "process": 14681, - "processing": 36948, - "processor": 41341, - "proclaimed": 39865, - "produ": 18230, - "produced": 32783, - "producing": 36866, - "product": 11167, - "production": 25493, - "productive": 27781, - "products": 29498, - "prof": 5577, - "professional": 33163, - "profile": 13317, - "profit": 9183, - "profits": 31504, - "program": 23065, - "progress": 33723, - "project": 16302, - "projects": 42068, - "prom": 16963, - "pron": 31186, - "prone": 46330, - "proof": 13288, - "prop": 22930, - "properties": 48310, - "property": 26745, - "prot": 11235, - "protect": 35499, - "protected": 24326, - "protection": 42846, - "protein": 48693, - "prototype": 38124, - "prov": 15234, - "proven": 42874, - "provided": 41279, - "proxy": 36436, - "prus": 26440, - "ps": 862, - "psc": 27566, - "pse": 7752, - "psey": 39070, - "pson": 8430, - "psons": 31410, - "psy": 13764, - "psych": 23947, - "pt": 457, - "pta": 32283, - "pter": 42104, - "ptic": 17459, - "ptin": 43217, - "ption": 1159, - "ptions": 8544, - "ptive": 21665, - "ptives": 43903, - "ptoms": 35533, - "pton": 10972, - "ptr": 20692, - "ptroller": 44913, - "pty": 5835, - "pu": 19944, - "pub": 12984, - "public": 11377, - "published": 30271, - "puff": 49357, - "pull": 31216, - "pun": 35512, - "punk": 30354, - "pur": 14225, - "pure": 37424, - "purpose": 29983, - "push": 14689, - "put": 1996, - "putable": 48840, - "puted": 17128, - "puter": 10549, - "puters": 41510, - "puting": 48074, - "px": 8416, - "py": 9078, - "python": 29412, - "q": 80, - "qa": 20402, - "qi": 40603, - "ql": 13976, - "qq": 38227, - "qqa": 28794, - "qs": 48382, - "qt": 39568, - "qu": 421, - "qua": 39566, - "quad": 47003, - "qual": 13255, - "qualified": 22557, - "quality": 13237, - "quant": 40972, - "quart": 36008, - "quarter": 24385, - "quartered": 42508, - "quarters": 8230, - "que": 4188, - "quel": 31735, - "quer": 10819, - "querade": 33357, - "querque": 36119, - "query": 22766, - "ques": 13281, - "quest": 6138, - "question": 25652, - "quet": 21108, - "queue": 36560, - "quez": 22281, - "quick": 24209, - "quickShip": 39752, - "quickShipAvailable": 39753, - "quiet": 39624, - "quila": 43652, - "quin": 21915, - "quire": 29782, - "quished": 39737, - "quist": 30062, - "quit": 47391, - "quite": 37121, - "quote": 22708, - "qus": 45260, - "qv": 44179, - "r": 81, - "ra": 430, - "rab": 25619, - "rac": 11510, - "race": 16740, - "racial": 33001, - "racist": 41131, - "rack": 39638, - "ract": 974, - "racted": 20216, - "ractical": 36112, - "raction": 7861, - "ractions": 37810, - "ractive": 35587, - "ractor": 40450, - "racuse": 28268, - "rad": 6335, - "rade": 27585, - "radical": 42325, - "radio": 37004, - "radius": 42172, - "rador": 40368, - "rael": 2510, - "raf": 32188, - "raft": 1617, - "rafted": 30235, - "rag": 22562, - "rage": 8394, - "raged": 18312, - "ragon": 25753, - "rah": 11392, - "raham": 13220, - "rahim": 26922, - "raid": 7086, - "rail": 30224, - "rain": 3201, - "raine": 23440, - "rained": 13363, - "raining": 24674, - "raint": 16947, - "raints": 15517, - "raise": 40225, - "raised": 49309, - "raising": 32741, - "rait": 12907, - "raits": 27554, - "rak": 17716, - "rake": 33788, - "ral": 1373, - "rals": 30691, - "raltar": 45662, - "ram": 859, - "rama": 20058, - "rame": 28073, - "rament": 15141, - "ramer": 29172, - "ramid": 20255, - "ramids": 43591, - "rams": 9474, - "ran": 2596, - "rance": 8132, - "ranch": 25642, - "rand": 25192, - "random": 25120, - "rane": 14579, - "ranean": 16474, - "rang": 36985, - "range": 9521, - "ranged": 34457, - "ranging": 32319, - "rank": 43027, - "ranked": 28282, - "ranking": 28405, - "rano": 35823, - "rans": 26084, - "rant": 5250, - "rants": 15087, - "rap": 2416, - "rape": 13484, - "raped": 31951, - "raper": 38545, - "raph": 1470, - "raphic": 22262, - "raphics": 11549, - "rapnel": 48766, - "raq": 3766, - "rar": 20040, - "rared": 25122, - "rarily": 39000, - "rary": 11619, - "ras": 8847, - "rase": 22789, - "rast": 5685, - "rastructure": 6410, - "rat": 10366, - "ratch": 36722, - "rate": 4873, - "rated": 4111, - "rates": 9700, - "rather": 34330, - "rating": 8821, - "ration": 1358, - "rational": 20310, - "rations": 9143, - "rative": 13260, - "ratom": 44616, - "rator": 12392, - "rators": 18942, - "rats": 46714, - "ratulations": 30167, - "raud": 22863, - "raught": 44451, - "rav": 4108, - "rave": 5758, - "raved": 28366, - "ravel": 25843, - "ravings": 42335, - "raviolet": 44223, - "ravis": 16956, - "ravity": 16995, - "raw": 1831, - "rawdownload": 30905, - "rawdownloadcloneembedreportprint": 30906, - "rawl": 13132, - "rawled": 49263, - "rawler": 39464, - "rawling": 18771, - "rawn": 5791, - "rax": 32040, - "ray": 2433, - "rays": 20477, - "raz": 3247, - "razen": 36409, - "razil": 7098, - "razy": 5918, - "rb": 26145, - "rc": 6015, - "rd": 4372, - "re": 260, - "rea": 21468, - "reach": 16250, - "reaching": 30771, - "react": 45018, - "read": 961, - "readable": 46155, - "reader": 46862, - "reading": 25782, - "reads": 40779, - "ready": 1493, - "real": 5305, - "realDonaldTrump": 28024, - "reality": 46290, - "really": 27485, - "ream": 1476, - "reason": 41181, - "reasonable": 42275, - "reat": 630, - "reated": 15978, - "reath": 19367, - "reating": 34567, - "reatment": 21731, - "reau": 43611, - "reb": 34806, - "rec": 8344, - "recated": 31023, - "received": 47844, - "recent": 49921, - "reci": 29102, - "reciation": 33950, - "reck": 11402, - "recogn": 26243, - "recomm": 47335, - "record": 22105, - "recorded": 47398, - "rect": 2554, - "rection": 8243, - "recy": 20568, - "red": 445, - "redd": 26504, - "reddit": 10748, - "reddits": 36581, - "redible": 26260, - "redibly": 45779, - "redict": 17407, - "redients": 23320, - "redit": 7470, - "reditary": 47333, - "reditation": 42845, - "redited": 19465, - "redits": 20696, - "redo": 48454, - "ree": 631, - "reed": 15977, - "reek": 10316, - "reement": 10237, - "reements": 28919, - "reen": 1361, - "reens": 5681, - "reenshot": 26892, - "reenshots": 39551, - "rees": 6037, - "reet": 2871, - "reetings": 46648, - "ref": 5420, - "reference": 35790, - "reflect": 35051, - "reg": 2301, - "regate": 49373, - "regation": 43068, - "region": 36996, - "register": 30238, - "registered": 33736, - "regn": 28321, - "regnancy": 39982, - "regon": 8285, - "regor": 32288, - "regular": 16338, - "regulated": 27739, - "regulation": 27727, - "rehend": 23979, - "rehens": 7345, - "rehensible": 34718, - "rehensive": 36321, - "rek": 37818, - "rel": 2411, - "related": 5363, - "relation": 49501, - "relations": 39468, - "relative": 43762, - "release": 20979, - "released": 30147, - "relevant": 49659, - "religious": 27626, - "rell": 11252, - "rella": 20481, - "rely": 38015, - "rem": 2787, - "reme": 2182, - "remember": 38947, - "remlin": 17244, - "remote": 47960, - "remove": 28956, - "ren": 918, - "rence": 6784, - "rences": 34303, - "rench": 3532, - "renched": 23437, - "renches": 33650, - "rencies": 14038, - "rency": 5227, - "rend": 10920, - "render": 13287, - "rendered": 26238, - "rene": 25924, - "renheit": 34032, - "rent": 1156, - "rentice": 20098, - "rentices": 34368, - "reon": 21833, - "rep": 7856, - "repair": 49932, - "repe": 45956, - "repeat": 44754, - "repl": 35666, - "replace": 33491, - "reply": 47768, - "report": 13116, - "reported": 26263, - "reporting": 49914, - "reportprint": 30897, - "reports": 48922, - "repre": 10353, - "reprene": 10406, - "represent": 15603, - "represented": 33469, - "req": 42180, - "requ": 8897, - "requency": 28707, - "requent": 46018, - "requently": 37971, - "request": 25927, - "require": 46115, - "required": 35827, - "requires": 47911, - "requisite": 27614, - "requisites": 34075, - "rer": 11751, - "rera": 24420, - "rero": 34785, - "rers": 27736, - "res": 411, - "resa": 14625, - "rescent": 26505, - "research": 34033, - "resent": 2028, - "resents": 6629, - "reset": 42503, - "resh": 3447, - "reshold": 10126, - "resident": 8154, - "resist": 35119, - "resistant": 26128, - "resolution": 29268, - "resource": 31092, - "resources": 37540, - "resp": 4363, - "respect": 15008, - "respected": 48268, - "respective": 36990, - "respond": 5546, - "respons": 16733, - "response": 26209, - "responsible": 24358, - "responsive": 39772, - "ress": 601, - "ressed": 2790, - "resses": 16746, - "ressing": 11697, - "ression": 2234, - "ressive": 3314, - "resso": 33852, - "ressor": 44292, - "rest": 2118, - "restling": 48839, - "restrial": 23522, - "restricted": 49343, - "result": 20274, - "results": 43420, - "resy": 33000, - "ret": 1186, - "retch": 22592, - "retched": 27528, - "rete": 8374, - "reth": 40978, - "retion": 12307, - "rets": 8004, - "rett": 11489, - "rette": 42908, - "retty": 16100, - "return": 7783, - "rev": 18218, - "reve": 36955, - "reverse": 50188, - "review": 19023, - "reviewed": 32974, - "revolution": 32243, - "rew": 1809, - "rex": 21510, - "rey": 4364, - "reys": 46703, - "rez": 21107, - "rf": 41871, - "rg": 41345, - "rh": 17179, - "rha": 30268, - "ri": 380, - "ria": 7496, - "riad": 21244, - "riage": 4087, - "riages": 16451, - "rial": 4454, - "rian": 4484, - "rians": 19151, - "rib": 822, - "ribe": 4892, - "ribed": 8725, - "riber": 24735, - "ribes": 22090, - "ribing": 23098, - "rible": 5547, - "ribly": 16358, - "ribune": 44130, - "ribut": 2455, - "ribute": 4163, - "ributed": 6169, - "ributes": 7657, - "ribution": 3890, - "ric": 1173, - "rica": 30997, - "rical": 8143, - "rican": 37189, - "ricane": 11551, - "ricanes": 24881, - "rice": 20970, - "rices": 45977, - "rich": 7527, - "riched": 30486, - "ricia": 26654, - "rick": 5557, - "ricks": 23706, - "rics": 10466, - "rict": 2012, - "ricted": 20941, - "ricting": 42870, - "riction": 46214, - "ricular": 41001, - "rid": 6058, - "ridden": 40372, - "ride": 13154, - "rider": 49449, - "ridge": 12818, - "ridges": 32124, - "ridor": 44425, - "rie": 5034, - "ried": 2228, - "rief": 3796, - "rieg": 48429, - "riel": 11719, - "rien": 15355, - "riend": 1289, - "rient": 8289, - "rients": 18491, - "rier": 5277, - "riers": 8910, - "ries": 1678, - "riet": 42098, - "rieve": 30227, - "rieved": 28130, - "rieving": 37418, - "rification": 38763, - "rifice": 31932, - "rified": 41301, - "rift": 35357, - "rig": 4359, - "rigan": 35631, - "riger": 18096, - "right": 3506, - "righteous": 49955, - "rights": 28046, - "rik": 12602, - "rika": 28716, - "rike": 8760, - "rikes": 18445, - "riks": 39370, - "ril": 22379, - "rill": 20190, - "rils": 41408, - "rily": 28904, - "rim": 3036, - "riminal": 22157, - "rimination": 22550, - "rimp": 23750, - "rin": 12769, - "rina": 22267, - "rine": 7640, - "ring": 1806, - "ringe": 38229, - "rings": 33173, - "rington": 24833, - "rint": 22272, - "rio": 27250, - "rior": 7701, - "riors": 8657, - "riot": 36671, - "riots": 44447, - "riott": 43517, - "rious": 32527, - "rip": 5528, - "ripp": 14602, - "ript": 1968, - "ription": 2918, - "rique": 33865, - "rir": 29283, - "ris": 2442, - "rise": 17163, - "rises": 26064, - "rish": 37518, - "rising": 22610, - "risis": 42841, - "risk": 19121, - "risome": 47400, - "rison": 7426, - "rist": 1585, - "rists": 37326, - "rit": 799, - "ritch": 46510, - "rite": 6525, - "riter": 43407, - "rites": 23156, - "ritic": 46015, - "ritical": 36487, - "rities": 19491, - "rition": 10168, - "ritional": 21297, - "ritis": 27398, - "rito": 39834, - "ritten": 9108, - "rity": 10138, - "ritz": 29574, - "rium": 19172, - "rius": 48969, - "riv": 15104, - "rival": 43171, - "rive": 11590, - "rived": 36207, - "river": 38291, - "rix": 8609, - "riz": 47847, - "rl": 45895, - "rm": 26224, - "rn": 35906, - "ro": 305, - "roach": 28562, - "road": 6344, - "roads": 21372, - "rob": 22609, - "robat": 40655, - "robe": 25481, - "roc": 12204, - "rocal": 43270, - "rock": 10823, - "rocket": 30431, - "rod": 14892, - "rodu": 2076, - "roe": 20646, - "rog": 3828, - "rogen": 8648, - "rogens": 48686, - "rogram": 39529, - "roid": 3882, - "roit": 7775, - "rol": 3225, - "role": 18090, - "rolet": 33087, - "roleum": 21945, - "roll": 2487, - "rolled": 8375, - "roller": 10646, - "rollers": 36667, - "rolley": 42639, - "rolling": 18886, - "rollment": 48108, - "rolog": 40329, - "rology": 31142, - "rom": 398, - "roma": 42902, - "roman": 47119, - "romancer": 38211, - "rome": 5998, - "romeda": 32291, - "romising": 47112, - "rompt": 45700, - "romptu": 49255, - "romy": 50228, - "ron": 1313, - "rone": 33171, - "rones": 9821, - "rongh": 36670, - "ronic": 4565, - "ronics": 20844, - "rons": 12212, - "ront": 4298, - "rontal": 39321, - "roo": 42407, - "room": 3823, - "rooms": 9649, - "root": 15763, - "roots": 19150, - "rop": 1773, - "roph": 10051, - "rophe": 22599, - "rophic": 18191, - "ropolis": 25986, - "ropolitan": 14823, - "ropri": 9219, - "ropy": 28338, - "ror": 1472, - "rored": 34640, - "rors": 5965, - "ros": 4951, - "rosc": 45943, - "rose": 13698, - "rosis": 37172, - "ross": 1214, - "rosse": 39314, - "rosso": 21074, - "rossover": 23954, - "rost": 23341, - "rot": 10599, - "rote": 2519, - "rotein": 35574, - "roth": 33640, - "rots": 24744, - "rou": 472, - "rouch": 48626, - "roud": 5493, - "rough": 740, - "rought": 2909, - "round": 744, - "rounded": 39262, - "rounder": 45788, - "roup": 3233, - "roups": 14459, - "rous": 7596, - "rouse": 46494, - "route": 38629, - "rov": 18657, - "rovers": 31257, - "roversial": 46927, - "row": 808, - "rowd": 3986, - "rower": 46992, - "rowing": 11577, - "rown": 2053, - "rows": 8516, - "rowth": 13046, - "rox": 13907, - "roximately": 24378, - "roxy": 42059, - "roy": 3287, - "roying": 38295, - "rozen": 42005, - "rpm": 48235, - "rr": 21062, - "rs": 3808, - "rss": 42216, - "rt": 17034, - "ru": 622, - "ruary": 3728, - "rub": 25089, - "ruby": 49137, - "ruce": 26524, - "ruciating": 48404, - "ruck": 30915, - "ruct": 1356, - "ruction": 2762, - "ructose": 32275, - "ructure": 5620, - "rue": 24508, - "rued": 21556, - "ruff": 30622, - "rug": 2143, - "rugged": 21901, - "ruit": 4872, - "ruits": 50187, - "rule": 25135, - "rules": 38785, - "ruly": 34715, - "rum": 6582, - "rums": 45241, - "run": 5143, - "runner": 16737, - "runners": 36740, - "running": 20270, - "runs": 48381, - "runtime": 43282, - "rup": 12618, - "rupal": 34585, - "rupt": 3622, - "rupted": 31590, - "ruption": 6417, - "rupulous": 46272, - "rus": 14932, - "rush": 37357, - "rust": 11469, - "rw": 31653, - "rx": 40914, - "ry": 563, - "ryan": 29038, - "ryce": 28169, - "rying": 14992, - "rylic": 34554, - "ryn": 29441, - "rypt": 6012, - "rypted": 15109, - "ryption": 13168, - "rys": 19753, - "ryu": 49056, - "ré": 29350, - "s": 82, - "sa": 11400, - "sac": 30584, - "saf": 49585, - "safe": 21230, - "safety": 44708, - "said": 30079, - "sal": 21680, - "sale": 21378, - "sam": 37687, - "sama": 33843, - "same": 31642, - "sample": 39873, - "san": 12807, - "sand": 38142, - "sat": 49720, - "sav": 39308, - "save": 21928, - "saving": 29336, - "saw": 43439, - "say": 16706, - "sb": 36299, - "sbm": 32310, - "sburg": 30359, - "sburgh": 11931, - "sc": 1416, - "scale": 9888, - "scan": 35836, - "scape": 6794, - "scar": 13034, - "scene": 29734, - "scenes": 28123, - "sch": 20601, - "sche": 15952, - "schild": 35058, - "school": 14347, - "sci": 36216, - "science": 16801, - "scient": 25346, - "scientific": 41355, - "scill": 22360, - "scl": 38528, - "scope": 29982, - "score": 26675, - "scoring": 46536, - "screen": 9612, - "scrib": 40075, - "scribe": 12522, - "scribed": 47495, - "script": 12048, - "scription": 33584, - "scripts": 46521, - "scroll": 48728, - "sd": 21282, - "se": 325, - "sea": 8583, - "search": 12947, - "season": 6230, - "seat": 24073, - "sec": 2363, - "second": 12227, - "secondary": 38238, - "seconds": 43012, - "secret": 21078, - "sect": 8831, - "section": 5458, - "sectional": 44330, - "sections": 23946, - "sector": 34914, - "secure": 22390, - "security": 12961, - "secut": 4552, - "secution": 9534, - "sed": 36622, - "see": 3826, - "seed": 28826, - "seeing": 42041, - "seek": 36163, - "seekers": 47971, - "seeking": 38515, - "seen": 15898, - "sei": 36455, - "sein": 20719, - "sel": 741, - "selage": 45217, - "select": 19738, - "selected": 34213, - "selection": 49283, - "seless": 10950, - "self": 944, - "sell": 7255, - "seller": 32932, - "selling": 16473, - "sels": 14002, - "selves": 2020, - "sem": 43616, - "semb": 4428, - "semble": 15140, - "sembly": 5997, - "sen": 6248, - "senal": 10298, - "send": 21280, - "sense": 33819, - "sensitive": 30176, - "sent": 34086, - "separ": 25512, - "seq": 41068, - "sequ": 3107, - "sequence": 43167, - "sequent": 44399, - "sequently": 20415, - "ser": 2655, - "serial": 46911, - "series": 25076, - "serious": 34009, - "sers": 17720, - "serv": 3168, - "served": 45852, - "server": 15388, - "service": 15271, - "services": 30416, - "serving": 31293, - "ses": 8448, - "session": 29891, - "set": 2617, - "sets": 28709, - "sett": 17744, - "setting": 33990, - "settings": 33692, - "setup": 40406, - "seven": 26548, - "sever": 28116, - "severe": 43070, - "sex": 8044, - "sexual": 18338, - "sey": 4397, - "seys": 27717, - "sf": 28202, - "sg": 45213, - "sh": 1477, - "sha": 26270, - "shadow": 19106, - "shake": 32431, - "shall": 49271, - "shape": 43358, - "shaped": 16760, - "shapeshifter": 33929, - "share": 20077, - "shared": 28710, - "sharing": 21987, - "sharp": 48554, - "shaw": 32832, - "she": 7091, - "shed": 35762, - "sheet": 21760, - "sheets": 42011, - "shell": 29149, - "shi": 44019, - "shield": 26662, - "shift": 30846, - "shine": 19489, - "ship": 6720, - "ships": 26313, - "shire": 10932, - "shirt": 15600, - "shirts": 23231, - "shit": 16211, - "shock": 39563, - "shoot": 30408, - "shop": 24643, - "shore": 14640, - "short": 19509, - "shot": 9442, - "shots": 20910, - "should": 21754, - "show": 12860, - "shown": 42579, - "shows": 49596, - "shr": 36007, - "shut": 49625, - "si": 13396, - "sic": 21383, - "sid": 30255, - "side": 1589, - "sided": 22339, - "sie": 44524, - "sight": 18627, - "sighted": 44068, - "sign": 12683, - "signed": 32696, - "significant": 36591, - "sil": 18217, - "silver": 40503, - "sim": 14323, - "similar": 38610, - "simple": 36439, - "sin": 31369, - "since": 20777, - "sing": 12215, - "single": 29762, - "sis": 13429, - "sit": 48937, - "site": 15654, - "sites": 49315, - "six": 19412, - "size": 7857, - "sized": 13982, - "sk": 8135, - "ski": 20545, - "skill": 42401, - "skilled": 44885, - "skin": 20407, - "skinned": 41412, - "skip": 48267, - "skirts": 28383, - "sky": 15688, - "sl": 6649, - "slaught": 30929, - "slave": 36341, - "sle": 26738, - "sleep": 42832, - "slice": 48369, - "slot": 43384, - "slow": 38246, - "sm": 5796, - "small": 17470, - "smanship": 49820, - "smart": 27004, - "smith": 21453, - "smoking": 48783, - "sn": 16184, - "snap": 45380, - "so": 568, - "soDeliveryDate": 39811, - "soType": 39803, - "soc": 35634, - "social": 14557, - "socket": 44971, - "soever": 15485, - "sofar": 38649, - "soft": 4215, - "software": 43776, - "sol": 34453, - "sold": 24120, - "sole": 6753, - "solete": 23869, - "solid": 39390, - "some": 11246, - "someone": 46248, - "something": 18927, - "sometimes": 29810, - "son": 1559, - "song": 34050, - "sonian": 35202, - "soon": 36194, - "sorry": 41599, - "sort": 30619, - "sound": 23661, - "sounding": 39686, - "source": 10459, - "south": 35782, - "sov": 47272, - "sp": 2777, - "space": 13200, - "span": 12626, - "spawn": 48183, - "spe": 4125, - "speak": 47350, - "speaking": 25159, - "spec": 16684, - "special": 20887, - "species": 35448, - "specific": 11423, - "specified": 23599, - "spect": 4443, - "spection": 31308, - "spective": 49540, - "speech": 45862, - "speed": 12287, - "spell": 46143, - "spin": 39706, - "spir": 45564, - "spirit": 38685, - "spl": 22018, - "split": 35312, - "spoken": 19842, - "spons": 20587, - "sponsored": 25427, - "sports": 32945, - "spot": 20485, - "spr": 34975, - "spread": 43639, - "spring": 16469, - "sq": 31166, - "sql": 25410, - "squ": 16485, - "square": 23415, - "sr": 27891, - "src": 10677, - "ss": 824, - "ssh": 45824, - "ssl": 45163, - "sson": 16528, - "st": 301, - "sta": 38031, - "stab": 39029, - "stable": 31284, - "stack": 25558, - "stad": 24107, - "stadt": 38863, - "staff": 28120, - "stage": 14247, - "stained": 44279, - "stairs": 17617, - "stakes": 32540, - "staking": 40031, - "stal": 7757, - "stall": 32989, - "stals": 41076, - "stan": 14192, - "stanbul": 24179, - "stand": 1481, - "standard": 20307, - "standing": 5646, - "stant": 18797, - "stantial": 41321, - "star": 7364, - "stars": 30783, - "start": 9688, - "started": 46981, - "starter": 12339, - "starting": 38690, - "stasy": 31695, - "stat": 14269, - "state": 5219, - "stated": 21989, - "statement": 26090, - "states": 27219, - "static": 12708, - "station": 17529, - "stats": 34242, - "status": 13376, - "stay": 31712, - "std": 19282, - "ste": 4169, - "stead": 28044, - "steam": 21465, - "steamapps": 31881, - "sted": 30679, - "steel": 44822, - "steen": 42580, - "stein": 5714, - "stellar": 28732, - "stem": 927, - "sten": 26400, - "step": 9662, - "steps": 20214, - "ster": 1706, - "sterdam": 22506, - "sters": 5937, - "stery": 41991, - "sth": 48476, - "stic": 11268, - "stice": 43788, - "stick": 13915, - "sticks": 34810, - "still": 24219, - "stim": 42003, - "stitial": 18167, - "stock": 13578, - "stocks": 29522, - "ston": 3743, - "stone": 6440, - "stones": 28750, - "stood": 6501, - "stop": 11338, - "storage": 35350, - "store": 8095, - "stores": 43409, - "stories": 50164, - "storm": 12135, - "storms": 38563, - "story": 13571, - "stown": 27928, - "str": 2536, - "stra": 12044, - "stract": 8709, - "straight": 42729, - "strap": 26418, - "strate": 23104, - "stration": 12401, - "stre": 22853, - "stream": 5532, - "street": 25662, - "strength": 41402, - "stress": 41494, - "stretched": 49729, - "stri": 33565, - "strike": 33069, - "string": 8841, - "strings": 37336, - "strip": 36311, - "stro": 20661, - "stroke": 30757, - "strom": 20282, - "strong": 11576, - "stros": 48288, - "strous": 22501, - "stru": 19554, - "struct": 7249, - "structed": 16242, - "struction": 15019, - "strument": 43872, - "sts": 6448, - "stud": 19149, - "student": 50139, - "study": 44517, - "stuff": 41094, - "sty": 34365, - "style": 7635, - "styles": 47720, - "su": 2385, - "sub": 7266, - "subject": 32796, - "submit": 46002, - "success": 13138, - "successful": 17212, - "successfully": 37351, - "such": 10508, - "sudo": 24032, - "suff": 37333, - "sufficient": 46790, - "suggest": 47811, - "suit": 6063, - "suits": 16554, - "sum": 16345, - "summary": 49736, - "sun": 19155, - "sung": 9854, - "sup": 37330, - "super": 16668, - "supp": 18608, - "support": 11284, - "supported": 15999, - "sur": 11793, - "sure": 19532, - "surface": 42029, - "surprisingly": 41199, - "surv": 48846, - "susp": 40409, - "sv": 21370, - "sw": 2032, - "swe": 46280, - "sweet": 34751, - "swer": 17845, - "swers": 37848, - "swick": 30961, - "swing": 46737, - "switch": 31943, - "sword": 30553, - "sworth": 30567, - "sy": 1837, - "sych": 2924, - "sylv": 9163, - "sylvania": 9270, - "sym": 37047, - "syn": 28869, - "sync": 27261, - "sys": 17597, - "system": 10057, - "t": 83, - "ta": 8326, - "tab": 8658, - "table": 11487, - "taboola": 10658, - "tackle": 36346, - "tag": 12985, - "tags": 31499, - "tail": 13199, - "tailed": 34966, - "tails": 26404, - "tain": 3153, - "tained": 4644, - "taining": 7339, - "tainment": 10738, - "tains": 12143, - "take": 20657, - "taker": 30157, - "taking": 26103, - "tal": 39240, - "tale": 29429, - "talk": 16620, - "talking": 48186, - "tall": 35429, - "tan": 38006, - "tank": 28451, - "tap": 44335, - "tar": 18870, - "target": 16793, - "tarian": 14012, - "tarians": 28266, - "task": 35943, - "tax": 19290, - "tc": 23047, - "tch": 38664, - "td": 8671, - "te": 660, - "team": 15097, - "tec": 36281, - "tech": 13670, - "techn": 23873, - "technical": 47944, - "technology": 45503, - "ted": 1513, - "teen": 7821, - "teenth": 20283, - "tein": 22006, - "tek": 35424, - "tel": 37524, - "tele": 46813, - "tell": 33331, - "telling": 18072, - "tem": 11498, - "temp": 29510, - "template": 28243, - "ten": 1452, - "tenance": 8219, - "teness": 43205, - "ter": 353, - "tera": 49600, - "terday": 6432, - "tered": 4400, - "tering": 20212, - "terior": 14172, - "term": 4354, - "termin": 23705, - "termination": 41382, - "terms": 38707, - "tern": 759, - "ternal": 4358, - "ternally": 30262, - "terness": 34697, - "ternity": 19682, - "terror": 14007, - "terrorism": 19541, - "terrorist": 42002, - "ters": 1010, - "terson": 23192, - "tery": 11471, - "tes": 4879, - "tesque": 37422, - "test": 9288, - "tested": 39612, - "testers": 27205, - "testing": 33407, - "tests": 41989, - "tesy": 27090, - "tex": 16886, - "text": 5239, - "texture": 41293, - "tf": 27110, - "tg": 25297, - "th": 400, - "tha": 12898, - "thal": 11669, - "than": 14813, - "thank": 40716, - "thanks": 27547, - "that": 5562, - "the": 1169, - "their": 24571, - "thel": 37274, - "theless": 9603, - "them": 18855, - "theme": 43810, - "themed": 26966, - "then": 8524, - "thening": 20563, - "thens": 43895, - "ther": 490, - "there": 8117, - "thereal": 37827, - "thereum": 17733, - "these": 27218, - "they": 9930, - "thia": 31079, - "thin": 40871, - "thing": 1197, - "things": 27971, - "think": 14925, - "thinkable": 37510, - "thinking": 28973, - "third": 17089, - "thirds": 17936, - "thirst": 48832, - "this": 5661, - "thodox": 12836, - "thood": 12951, - "thora": 34261, - "those": 25591, - "though": 2016, - "thought": 28895, - "thouse": 23931, - "thread": 16663, - "threat": 19971, - "threatening": 26159, - "three": 15542, - "thren": 25941, - "thritis": 34043, - "thro": 26110, - "throp": 11360, - "through": 9579, - "throw": 16939, - "ths": 9998, - "thumbnails": 18670, - "thur": 11098, - "thus": 26239, - "thy": 20057, - "ti": 20259, - "tic": 13370, - "tical": 22869, - "tick": 42298, - "ticket": 43350, - "tics": 14094, - "tie": 36224, - "tier": 24948, - "ties": 4278, - "tif": 49929, - "tight": 33464, - "til": 47163, - "tile": 40927, - "tim": 16514, - "time": 2435, - "timeout": 48678, - "timer": 45016, - "times": 22355, - "tin": 43701, - "ting": 889, - "tiny": 44152, - "tion": 5378, - "tions": 45240, - "tip": 22504, - "tips": 41315, - "tis": 48010, - "title": 7839, - "tk": 30488, - "tl": 28781, - "tle": 7100, - "tm": 17209, - "tml": 20369, - "tmp": 22065, - "tn": 34106, - "tnc": 26642, - "to": 1462, - "toc": 40301, - "today": 40838, - "toe": 44579, - "together": 45525, - "toggle": 44256, - "token": 30001, - "told": 44040, - "tom": 39532, - "ton": 1122, - "tone": 41527, - "tones": 36257, - "tons": 27288, - "too": 18820, - "tool": 25981, - "tools": 31391, - "top": 4852, - "topia": 46575, - "topic": 26652, - "tops": 35011, - "tor": 13165, - "torn": 45910, - "total": 23350, - "touch": 29332, - "tower": 36170, - "town": 12735, - "tp": 34788, - "tr": 2213, - "tra": 9535, - "trace": 40546, - "track": 11659, - "tracking": 36280, - "tracks": 46074, - "trade": 25351, - "traditional": 36380, - "train": 27432, - "trained": 35311, - "training": 34409, - "trak": 44195, - "trans": 7645, - "transfer": 39437, - "transform": 35636, - "translation": 41519, - "trap": 46670, - "traumatic": 41521, - "travel": 35927, - "tre": 33945, - "treated": 37182, - "treatment": 42487, - "tree": 21048, - "tri": 28461, - "trial": 45994, - "trigger": 46284, - "trip": 39813, - "trl": 14859, - "tro": 23528, - "trop": 48385, - "true": 7942, - "trump": 40954, - "trust": 38087, - "truth": 35310, - "try": 28311, - "ts": 912, - "tsky": 30394, - "tsy": 34293, - "tt": 926, - "tta": 25854, - "tted": 28734, - "tten": 32407, - "ttes": 13036, - "tti": 35671, - "ttle": 23296, - "tto": 33955, - "ttp": 29281, - "tty": 42852, - "tu": 28047, - "tub": 37995, - "tube": 29302, - "tumblr": 45364, - "tun": 28286, - "tur": 36590, - "turn": 15344, - "turned": 33886, - "tv": 14981, - "tw": 4246, - "twitch": 31844, - "twitter": 6956, - "two": 11545, - "tx": 17602, - "txt": 14116, - "ty": 774, - "tyard": 30308, - "tymology": 43408, - "typ": 28004, - "type": 4906, - "types": 19199, - "typically": 48126, - "tz": 22877, - "u": 84, - "ua": 6413, - "uable": 7153, - "uably": 14632, - "uador": 24201, - "ual": 723, - "uala": 41944, - "uality": 25775, - "ually": 935, - "uan": 7258, - "uana": 5020, - "uania": 29743, - "uart": 19986, - "uary": 2838, - "uate": 4985, - "uated": 6605, - "uates": 12632, - "uating": 11927, - "uation": 2288, - "uations": 6055, - "uay": 30106, - "ub": 549, - "uba": 22013, - "ubb": 33670, - "ubby": 38393, - "ube": 3266, - "uben": 44636, - "uber": 18478, - "uberty": 34237, - "ubes": 29080, - "ubi": 29603, - "ubis": 46676, - "uble": 26664, - "ublic": 841, - "ublished": 33286, - "ubric": 29812, - "ubs": 23161, - "ubuntu": 32230, - "uc": 1229, - "uca": 43120, - "ucc": 18863, - "ucci": 27501, - "uce": 7234, - "uced": 19513, - "ucer": 48915, - "uces": 26873, - "uch": 794, - "ucha": 48022, - "uchi": 22200, - "uchin": 43416, - "uchs": 37533, - "uci": 42008, - "ucing": 25648, - "uck": 1347, - "ucked": 17758, - "ucker": 12603, - "ucket": 38811, - "ucking": 19296, - "uckland": 28789, - "uckle": 29687, - "uckles": 34083, - "ucks": 6238, - "ucky": 5309, - "ucl": 36616, - "ucle": 14913, - "uclear": 4016, - "uct": 4782, - "uction": 8110, - "uctions": 20847, - "uctive": 45857, - "uctor": 33029, - "ud": 463, - "uda": 15339, - "udd": 4185, - "udden": 16557, - "uddenly": 18865, - "udder": 41686, - "uddin": 44008, - "udding": 33926, - "uddle": 24500, - "uddled": 32745, - "uddy": 21584, - "ude": 2507, - "udeau": 16229, - "udeb": 46092, - "uded": 19289, - "uden": 44452, - "udence": 42581, - "uder": 26651, - "uders": 48739, - "udes": 8401, - "udge": 12587, - "udget": 29427, - "udging": 38840, - "udi": 47928, - "udic": 28673, - "udicrous": 33784, - "uding": 26570, - "udo": 12003, - "udos": 42418, - "uds": 24786, - "ue": 518, - "uebl": 45749, - "ued": 1739, - "uel": 2731, - "ueless": 38835, - "ueller": 16466, - "uer": 15573, - "uers": 42178, - "ues": 947, - "uesday": 3322, - "uese": 20506, - "uez": 14870, - "uf": 3046, - "ufact": 3603, - "uff": 1648, - "uffed": 18339, - "uffer": 13712, - "ufficient": 15267, - "uffle": 18137, - "uffs": 18058, - "uffy": 15352, - "ug": 1018, - "uga": 30302, - "ugal": 43778, - "ugar": 35652, - "uge": 2217, - "ugen": 42740, - "ugg": 6837, - "uggage": 29672, - "uggest": 29212, - "uggets": 26550, - "uggish": 36295, - "uggle": 33498, - "ugh": 6724, - "ught": 8951, - "ugi": 45659, - "ugs": 10339, - "ugu": 45284, - "uh": 7456, - "ui": 9019, - "uid": 27112, - "uild": 3547, - "uilding": 6963, - "uilt": 21955, - "uin": 48441, - "uine": 8327, - "uing": 4250, - "uint": 28611, - "uish": 32091, - "uit": 5013, - "uitive": 33740, - "uitous": 42412, - "uits": 15379, - "uity": 14834, - "uj": 23577, - "ujah": 46024, - "uk": 2724, - "uka": 14852, - "uke": 4649, - "uked": 48809, - "ukemia": 43505, - "ukes": 31469, - "uki": 11308, - "uko": 29794, - "ukong": 46654, - "uku": 33263, - "ul": 377, - "ula": 4712, - "ular": 934, - "ularity": 33737, - "ulas": 25283, - "ulate": 5039, - "ulated": 4817, - "ulates": 15968, - "ulating": 8306, - "ulation": 1741, - "ulations": 5768, - "ulative": 13628, - "ulator": 8927, - "ulators": 24325, - "ulatory": 21386, - "uld": 32926, - "ule": 2261, - "uled": 6309, - "ulence": 32401, - "ulent": 15288, - "uler": 18173, - "ules": 5028, - "ulet": 25132, - "ulf": 4754, - "ulhu": 36828, - "uli": 32176, - "ulia": 43640, - "ulic": 28575, - "uliffe": 45228, - "ulin": 11599, - "uling": 16619, - "ulk": 12171, - "ulkan": 31263, - "ull": 724, - "ulla": 47972, - "ullah": 38665, - "ullivan": 16040, - "ully": 2132, - "ulner": 5697, - "ulnerability": 40920, - "ulnerable": 38828, - "ulo": 43348, - "ulous": 6985, - "ulously": 18117, - "ulp": 29528, - "ulpt": 13327, - "uls": 5753, - "ulse": 9615, - "ulsion": 15204, - "ulsive": 22220, - "ult": 586, - "ultan": 30454, - "ultane": 9560, - "ultimate": 44818, - "ulton": 37944, - "ults": 8376, - "ultural": 8596, - "ulture": 6456, - "ulty": 10672, - "ultz": 22150, - "ulu": 15712, - "ulum": 14452, - "ulus": 23515, - "uly": 2062, - "ulz": 37314, - "um": 388, - "uma": 7487, - "umably": 31303, - "uman": 3778, - "umann": 40062, - "umar": 44844, - "umat": 27798, - "umatic": 16735, - "umb": 2178, - "umbai": 21645, - "umber": 4494, - "umbered": 26584, - "umbers": 17024, - "umbing": 28149, - "umble": 10344, - "umbled": 11137, - "umbledore": 25549, - "umbles": 25329, - "umbling": 14739, - "umblr": 15566, - "umbn": 10269, - "umbnail": 20566, - "umbnails": 13668, - "umbo": 29309, - "umbs": 18146, - "ume": 2454, - "umed": 18940, - "umen": 20080, - "ument": 1713, - "umenthal": 42300, - "uments": 2886, - "umer": 6975, - "umerable": 30831, - "umeric": 39223, - "umerous": 31385, - "umers": 31260, - "umes": 8139, - "umi": 12994, - "umin": 7230, - "uminati": 37200, - "uming": 12595, - "uminium": 35241, - "uminum": 13074, - "umm": 13929, - "ummer": 31647, - "ummies": 39578, - "ummy": 13513, - "umn": 4182, - "umni": 25402, - "umo": 43712, - "ump": 931, - "umped": 27073, - "umper": 15829, - "umph": 12875, - "umping": 25218, - "umps": 8142, - "umption": 24098, - "umpy": 32152, - "ums": 5700, - "umsy": 37133, - "un": 403, - "una": 9613, - "unal": 18835, - "unc": 19524, - "unch": 3316, - "unci": 49652, - "unciation": 24978, - "uncle": 29942, - "unct": 16260, - "unction": 4575, - "unctions": 46797, - "uncture": 39187, - "und": 917, - "unda": 46535, - "undai": 44591, - "under": 4625, - "unders": 41116, - "undle": 31249, - "undo": 41204, - "undown": 41609, - "undred": 3229, - "undreds": 20960, - "undrum": 46859, - "undy": 45459, - "une": 1726, - "uned": 40881, - "uner": 38886, - "unes": 4015, - "ung": 2150, - "ungle": 13687, - "uni": 35657, - "unia": 39934, - "unic": 46903, - "unicip": 9462, - "unin": 38453, - "uning": 46493, - "union": 24592, - "unique": 34642, - "unit": 20850, - "united": 41187, - "units": 41667, - "unity": 9531, - "universal": 40082, - "unk": 2954, - "unker": 21705, - "unknown": 34680, - "unks": 14125, - "unky": 28898, - "unless": 25252, - "unn": 20935, - "unning": 16596, - "unny": 16948, - "uno": 36909, - "uns": 13271, - "unsigned": 43375, - "unt": 2797, - "unta": 44424, - "untarily": 49605, - "untary": 26468, - "unte": 6311, - "until": 28446, - "untled": 46343, - "unts": 34115, - "untu": 11157, - "uo": 20895, - "uous": 5623, - "uously": 24987, - "up": 929, - "update": 19119, - "updated": 43162, - "upe": 48722, - "uper": 48568, - "uph": 25689, - "uphem": 45640, - "upid": 7658, - "upiter": 21251, - "uple": 29291, - "upload": 25850, - "uploads": 39920, - "upon": 27287, - "upp": 7211, - "upper": 45828, - "uppet": 44933, - "ups": 4739, - "upt": 37623, - "upuncture": 42223, - "ur": 333, - "ura": 5330, - "urable": 11970, - "uracy": 23843, - "urai": 16998, - "ural": 1523, - "urally": 20221, - "uran": 42211, - "urance": 3874, - "urances": 31741, - "uras": 17786, - "urat": 39928, - "urate": 15537, - "urated": 49293, - "uration": 3924, - "urations": 20074, - "urb": 5945, - "urban": 32679, - "urbed": 37694, - "urch": 2575, - "urchase": 18737, - "urches": 12730, - "urd": 2799, - "urden": 42568, - "urdue": 30345, - "urdy": 22876, - "ure": 495, - "ureau": 6262, - "ured": 1522, - "ureen": 49851, - "uren": 23532, - "urer": 15051, - "urers": 17496, - "ures": 942, - "urg": 3686, - "urga": 45098, - "urger": 32650, - "urgical": 31839, - "urgy": 38140, - "uri": 9900, - "uria": 34484, - "uries": 4740, - "uring": 870, - "urion": 40956, - "urious": 16421, - "uristic": 27915, - "urities": 10886, - "urity": 1684, - "urized": 44796, - "url": 6371, - "urn": 700, - "urnal": 35735, - "urned": 44866, - "uro": 1434, - "uron": 44372, - "urous": 29277, - "urrection": 21384, - "urred": 12808, - "urrence": 33928, - "urrencies": 28018, - "urrency": 13382, - "urrent": 6657, - "urring": 14924, - "urry": 16682, - "urs": 1834, - "ursday": 3479, - "urse": 12321, - "ursed": 17539, - "urses": 46998, - "ursion": 24197, - "ursions": 42394, - "ursive": 30753, - "ursor": 21471, - "urst": 24962, - "urt": 3325, - "urther": 1914, - "urtle": 17964, - "urtles": 25195, - "uru": 14717, - "urus": 31891, - "ury": 1601, - "us": 385, - "usa": 22064, - "usable": 31979, - "usage": 26060, - "usal": 6775, - "usalem": 10555, - "usat": 37937, - "usb": 43319, - "usc": 16241, - "uscript": 15817, - "use": 1904, - "used": 1484, - "user": 7220, - "userc": 43298, - "usercontent": 43667, - "username": 29460, - "users": 18417, - "uses": 2664, - "useum": 6744, - "ush": 1530, - "usha": 46213, - "ushed": 7474, - "usher": 34055, - "ushes": 17237, - "ushi": 17731, - "ushima": 30474, - "ushing": 8023, - "using": 3500, - "usion": 4241, - "usional": 41780, - "usions": 15880, - "usive": 11350, - "usk": 17990, - "usky": 42431, - "usp": 17723, - "usr": 14629, - "usra": 28352, - "uss": 1046, - "ussed": 29569, - "ussen": 35951, - "ussia": 31269, - "ussian": 31562, - "ussie": 43480, - "ussion": 11956, - "ussions": 21585, - "ussy": 14650, - "ust": 436, - "ustain": 19542, - "ustainable": 24196, - "usted": 8459, - "uster": 5819, - "usterity": 20761, - "usters": 13654, - "usting": 32620, - "ustom": 1824, - "ustomed": 22646, - "ustration": 44027, - "usual": 37850, - "usually": 23073, - "ut": 315, - "uta": 29822, - "utable": 18187, - "utan": 37878, - "utation": 7094, - "utations": 32855, - "utch": 7140, - "ute": 1133, - "uted": 7241, - "uten": 7809, - "utenant": 15340, - "utenberg": 19028, - "uter": 11894, - "uters": 5843, - "uterte": 23314, - "utes": 1769, - "utf": 40477, - "uth": 1071, - "uther": 12866, - "utherford": 46923, - "utherland": 45384, - "uthor": 1457, - "uti": 47966, - "utic": 18089, - "utical": 14224, - "utics": 48063, - "uties": 8249, - "util": 22602, - "utils": 26791, - "uting": 15129, - "ution": 1009, - "utions": 3508, - "utive": 8827, - "utm": 26841, - "uto": 9390, - "uton": 32894, - "utonium": 43078, - "utor": 38409, - "utorial": 44917, - "utory": 17957, - "utra": 35076, - "utral": 6815, - "uts": 5500, - "utsch": 40768, - "utsche": 30433, - "utsu": 36567, - "utt": 15318, - "utter": 10381, - "uttered": 46322, - "uttering": 33598, - "utters": 46973, - "utterstock": 28819, - "utton": 21115, - "uture": 1832, - "uty": 3935, - "utz": 27839, - "uu": 12303, - "uum": 13814, - "uv": 14795, - "uve": 45177, - "uvian": 50013, - "ux": 2821, - "uxe": 18095, - "uy": 4669, - "uyomi": 40012, - "uz": 10277, - "uzz": 4715, - "uzzle": 9625, - "v": 85, - "vP": 47322, - "va": 6862, - "vable": 23765, - "vacc": 37839, - "vae": 33353, - "vag": 29821, - "val": 2100, - "vale": 41161, - "valid": 12102, - "vals": 12786, - "value": 8367, - "valued": 39728, - "values": 27160, - "van": 10438, - "vana": 33175, - "vance": 19259, - "vant": 4520, - "vantage": 38815, - "var": 7785, - "vard": 10187, - "vari": 25641, - "variable": 45286, - "vas": 11017, - "vasive": 23747, - "vati": 36868, - "vation": 10473, - "vc": 28435, - "vd": 20306, - "ve": 303, - "vec": 35138, - "vector": 31364, - "ved": 1079, - "veh": 33892, - "vel": 626, - "veland": 9731, - "velength": 26623, - "vell": 29333, - "velop": 1091, - "velt": 18065, - "ven": 574, - "venant": 15330, - "venants": 43773, - "venge": 18674, - "venient": 48109, - "vent": 1151, - "venth": 20987, - "vention": 4018, - "ventional": 20405, - "ventions": 16593, - "ventory": 17158, - "venture": 5388, - "ventures": 10065, - "ventus": 35648, - "venue": 4080, - "ver": 332, - "verage": 1857, - "verages": 23118, - "verb": 19011, - "verbal": 46953, - "verbs": 46211, - "vere": 4119, - "vered": 21917, - "verend": 37713, - "verett": 33395, - "verified": 47684, - "vern": 933, - "vernight": 47443, - "verning": 13974, - "vernment": 11355, - "vers": 690, - "verse": 4399, - "versely": 21243, - "versible": 37393, - "version": 9641, - "versions": 47178, - "versive": 40099, - "verson": 49589, - "vert": 1851, - "verted": 13658, - "verting": 48820, - "vertis": 3346, - "vertisement": 4060, - "vertisements": 11371, - "vertising": 31809, - "verts": 24040, - "verty": 8077, - "very": 548, - "ves": 1158, - "vest": 4223, - "vet": 16809, - "vette": 33573, - "vey": 3304, - "veyard": 21563, - "vez": 33425, - "vg": 45119, - "vi": 8903, - "via": 8869, - "viation": 47625, - "vic": 25531, - "vice": 28281, - "vich": 49547, - "vict": 32433, - "vid": 16921, - "video": 15588, - "videos": 32861, - "vidia": 21744, - "vier": 49663, - "view": 1177, - "views": 33571, - "vik": 28930, - "viks": 45901, - "vil": 2991, - "vill": 41082, - "ville": 4244, - "vim": 31124, - "vin": 7114, - "vind": 50172, - "vine": 26818, - "ving": 1075, - "viol": 17069, - "violence": 37502, - "violent": 24498, - "vious": 1442, - "viously": 8647, - "vir": 37040, - "viron": 2268, - "vironment": 2468, - "vironments": 12103, - "virt": 48940, - "virtual": 32844, - "vis": 4703, - "vised": 16149, - "visible": 23504, - "vision": 10178, - "visor": 13131, - "visors": 27681, - "visory": 41783, - "visual": 41464, - "vity": 21319, - "vl": 19279, - "vm": 14761, - "vo": 13038, - "voc": 18893, - "voice": 38888, - "void": 19382, - "vol": 10396, - "volent": 29078, - "volt": 37764, - "volume": 29048, - "von": 26982, - "vor": 20867, - "vote": 27257, - "votes": 29307, - "vous": 31222, - "voy": 40024, - "vp": 36133, - "vr": 37020, - "vre": 43933, - "vs": 14259, - "vt": 36540, - "vu": 40939, - "vv": 25093, - "vy": 7670, - "w": 86, - "wa": 10247, - "wage": 21482, - "wagen": 29160, - "wagon": 41127, - "wait": 17077, - "wake": 48530, - "wal": 16783, - "wald": 21667, - "walk": 11152, - "walker": 20783, - "walking": 44065, - "wall": 11930, - "wallet": 44623, - "wan": 8149, - "wana": 49484, - "wang": 47562, - "want": 42949, - "war": 5767, - "ward": 904, - "wards": 2017, - "ware": 1574, - "wark": 48542, - "warm": 31975, - "warming": 48133, - "warn": 40539, - "warning": 43917, - "wart": 24657, - "warts": 26586, - "was": 9776, - "wash": 34670, - "washed": 45462, - "washer": 45146, - "washing": 38524, - "wat": 47261, - "watch": 8340, - "watching": 50042, - "water": 7050, - "waters": 41555, - "waukee": 15428, - "wav": 45137, - "wave": 19204, - "waves": 32569, - "way": 1014, - "wayne": 43932, - "ways": 1322, - "wb": 39346, - "wcs": 12712, - "wcsstore": 12781, - "wd": 16993, - "we": 732, - "weak": 38695, - "wealth": 14298, - "weapon": 28741, - "weapons": 33999, - "wear": 13927, - "weather": 23563, - "web": 12384, - "webkit": 43648, - "wed": 19103, - "weed": 39054, - "week": 10464, - "weekly": 45291, - "ween": 975, - "weeney": 41681, - "weet": 7277, - "wegian": 20684, - "wei": 42990, - "weight": 6551, - "weights": 43775, - "well": 4053, - "wen": 21006, - "went": 19963, - "wer": 15448, - "were": 22474, - "wered": 8279, - "west": 7038, - "western": 14197, - "wh": 1929, - "what": 10919, - "whatever": 39664, - "whe": 12491, - "wheel": 22001, - "whel": 30613, - "whelming": 36433, - "when": 12518, - "where": 3003, - "whether": 25356, - "which": 4758, - "while": 4514, - "white": 11186, - "who": 8727, - "whose": 38159, - "why": 22850, - "wi": 37686, - "wic": 22664, - "wich": 11451, - "wick": 16239, - "wid": 28029, - "wide": 4421, - "widget": 42655, - "width": 10394, - "wife": 22095, - "wig": 28033, - "wik": 20763, - "wiki": 15466, - "wikipedia": 31266, - "wild": 21992, - "will": 10594, - "win": 5404, - "wind": 7972, - "window": 17497, - "windows": 28457, - "wine": 39002, - "wing": 5469, - "wings": 48819, - "winner": 39791, - "winning": 14463, - "winter": 40078, - "wire": 21809, - "wired": 44236, - "wise": 3083, - "wit": 39289, - "witch": 42248, - "with": 4480, - "within": 33479, - "without": 19419, - "withstanding": 20701, - "witz": 28155, - "wives": 35234, - "wk": 43021, - "wl": 40989, - "wm": 26377, - "wn": 675, - "wo": 21638, - "wolf": 18829, - "wolves": 29664, - "woman": 8580, - "women": 25878, - "won": 26502, - "wood": 3822, - "woods": 39493, - "word": 4775, - "wordpress": 40346, - "words": 10879, - "work": 1818, - "worked": 32931, - "worker": 28816, - "workers": 22896, - "working": 16090, - "works": 5225, - "workshop": 38067, - "world": 6894, - "worldly": 49366, - "worm": 25323, - "worms": 49617, - "worn": 34565, - "worst": 41430, - "worth": 9268, - "worthiness": 48756, - "worthy": 18275, - "would": 19188, - "wow": 42773, - "wp": 24142, - "wr": 18351, - "wra": 29988, - "wrap": 37150, - "wrapper": 48553, - "wreck": 39238, - "wright": 29995, - "writ": 8933, - "write": 13564, - "writer": 16002, - "writers": 34422, - "writing": 16502, - "written": 15266, - "wrong": 36460, - "wrote": 42910, - "ws": 18504, - "wt": 46569, - "wu": 43812, - "ww": 1383, - "www": 2503, - "wx": 49345, - "wy": 21768, - "wyn": 27612, - "x": 87, - "xa": 27865, - "xb": 30894, - "xc": 25306, - "xd": 24954, - "xe": 27705, - "xes": 48169, - "xf": 26152, - "xff": 47596, - "xi": 29992, - "xia": 36072, - "xiety": 35753, - "xious": 48392, - "xit": 10198, - "xml": 19875, - "xon": 23813, - "xp": 42372, - "xs": 34223, - "xt": 742, - "xtap": 42915, - "xton": 22874, - "xual": 5541, - "xus": 40832, - "xx": 5324, - "xxx": 31811, - "xxxx": 12343, - "xxxxxxxx": 24223, - "xy": 5431, - "y": 88, - "ya": 3972, - "yah": 46848, - "yahoo": 40774, - "yan": 4121, - "yang": 17859, - "yard": 9413, - "yards": 33750, - "ycle": 39297, - "yd": 5173, - "yden": 43955, - "ydia": 30708, - "ye": 5948, - "yeah": 43669, - "year": 1941, - "years": 19002, - "yellow": 36022, - "yer": 9860, - "yers": 21200, - "yes": 8505, - "yet": 25907, - "yg": 35641, - "yi": 48111, - "ying": 1112, - "yip": 39666, - "yk": 48361, - "yl": 2645, - "ylan": 18554, - "yle": 2349, - "ylene": 37880, - "yles": 24327, - "yll": 25727, - "ylon": 15158, - "ylum": 11183, - "ym": 4948, - "ymes": 22009, - "ymm": 26621, - "ymph": 20896, - "yn": 2047, - "yna": 46434, - "ynam": 4989, - "ynamic": 28995, - "ynasty": 19488, - "ync": 13361, - "ynchron": 24871, - "ynchronous": 31301, - "yne": 39547, - "ynes": 25337, - "ynski": 40008, - "ynt": 33567, - "ynthesis": 44411, - "yo": 8226, - "yon": 19181, - "yond": 3243, - "you": 5832, - "young": 35465, - "your": 14108, - "yout": 32015, - "youtu": 32594, - "youtube": 11604, - "yp": 4464, - "ype": 2981, - "ypes": 9497, - "yr": 2417, - "yre": 35759, - "yrics": 14279, - "yright": 4766, - "yrights": 49158, - "yrim": 17302, - "yrinth": 21324, - "yrs": 48489, - "yrus": 21180, - "ys": 893, - "ysc": 28349, - "ysical": 15380, - "ysics": 23154, - "ysis": 3097, - "yson": 19699, - "yss": 33968, - "yssey": 23784, - "ystem": 6781, - "yt": 20760, - "yth": 5272, - "ythm": 34853, - "ython": 7535, - "yton": 31616, - "yu": 24767, - "yx": 28391, - "yy": 22556, - "yz": 45579, - "z": 89, - "za": 4496, - "zac": 49897, - "zag": 50183, - "zai": 35142, - "zan": 15201, - "zanne": 38395, - "zar": 41046, - "zb": 14969, - "zbek": 40413, - "zbollah": 21677, - "ze": 2736, - "zeb": 38130, - "zech": 15356, - "zed": 8863, - "zee": 42871, - "zees": 43727, - "zek": 43130, - "zel": 17396, - "zen": 4801, - "zens": 8247, - "zer": 9107, - "zero": 22570, - "zers": 47031, - "zes": 12271, - "zh": 23548, - "zhen": 46732, - "zhou": 38536, - "zi": 17027, - "zie": 49746, - "zig": 38262, - "zik": 47303, - "zilla": 16496, - "zin": 42140, - "zing": 9510, - "zinski": 46394, - "zip": 13344, - "zl": 48274, - "zman": 32054, - "zn": 47347, - "zo": 10872, - "zon": 26361, - "zona": 7551, - "zone": 11340, - "zos": 37925, - "zsche": 37467, - "zu": 27624, - "zx": 42592, - "zy": 7357, - "zyk": 46355, - "zyme": 24266, - "zynski": 47143, - "zz": 3019, - "zza": 34443, - "zzi": 46218, - "zzle": 26413, - "zzo": 47802, - "zzy": 31570, - "{": 90, - "{\"": 4895, - "{\\": 31478, - "{{": 27007, - "|": 91, - "||": 15886, - "||||": 42210, - "}": 92, - "}\"": 36786, - "})": 30072, - "});": 22133, - "},": 5512, - "},\"": 9063, - "},{\"": 8762, - "}.": 27422, - "}:": 38362, - "};": 19629, - "}\\": 32239, - "}{": 18477, - "}}": 11709, - "}}}": 42535, - "~": 93, - "~~": 4907, - "~~~~": 8728, - "~~~~~~~~": 15116, - "~~~~~~~~~~~~~~~~": 27156, - "¡": 94, - "¢": 95, - "£": 96, - "£ı": 6408, - "¤": 97, - "¥": 98, - "¥µ": 35069, - "¥ŀ": 13945, - "¦": 99, - "§": 100, - "¨": 101, - "©": 102, - "©¶æ": 47490, - "©¶æ¥µ": 47703, - "ª": 103, - "«": 104, - "«ĺ": 45865, - "¬": 105, - "¬¼": 45539, - "®": 106, - "¯": 107, - "°": 108, - "±": 109, - "²": 110, - "²¾": 39333, - "³": 111, - "´": 112, - "µ": 113, - "¶": 114, - "¶æ": 35050, - "¶ħ": 41678, - "·": 115, - "¸": 116, - "¹": 117, - "º": 118, - "»": 119, - "»Ĵ": 36596, - "¼": 120, - "½": 121, - "¾": 122, - "¿": 123, - "¿½": 4204, - "À": 124, - "Á": 125, - "Â": 126, - "¢": 44359, - "£": 14988, - "§": 16273, - "¨": 37102, - "©": 16224, - "«": 24328, - "®": 7461, - "®,": 45088, - "¯": 5196, - "¯¯": 5367, - "¯¯¯¯": 8980, - "¯¯¯¯¯¯¯¯": 15243, - "¯¯¯¯¯¯¯¯¯¯¯¯¯¯¯¯": 27006, - "°": 7200, - "±": 22519, - "²": 31185, - "´": 18265, - "¶": 26604, - "·": 9129, - "··": 35147, - "º": 36165, - "»": 17730, - "½": 23141, - "Âł": 1849, - "³³": 4603, - "³³³": 33477, - "³³³³": 8828, - "³³³³³³³³": 17811, - "³³³³³³³³³³³³³³³³": 39172, - "ÂŃ": 3907, - "Ã": 127, - "á": 6557, - "án": 21162, - "ás": 40138, - "â": 22940, - "ã": 26102, - "ão": 28749, - "ä": 11033, - "Ã¥": 29090, - "æ": 21241, - "ç": 16175, - "ça": 50041, - "è": 14064, - "ère": 35979, - "é": 2634, - "ée": 22161, - "én": 35942, - "ér": 42445, - "és": 20954, - "ét": 25125, - "ê": 25792, - "ë": 26689, - "î": 34803, - "ï": 26884, - "ïve": 38776, - "ð": 27214, - "ñ": 12654, - "ña": 30644, - "ño": 31329, - "ó": 10205, - "ón": 18840, - "ô": 27083, - "ö": 9101, - "ön": 48863, - "ör": 30570, - "ø": 24172, - "ú": 21356, - "û": 42324, - "ü": 9116, - "ür": 25151, - "ÃĤ": 5523, - "Ãĥ": 5746, - "ÃĥÃĤ": 5808, - "ÃĥÃĤÃĥÃĤ": 5815, - "ÃĥÃĤÃĥÃĤÃĥÃĤÃĥÃĤ": 9364, - "ÃĥÃĤÃĥÃĤÃĥÃĤÃĥÃĤÃĥÃĤÃĥÃĤÃĥÃĤÃĥÃĤ": 14827, - "ÃĥÃĤÃĥÃĤÃĥÃĤÃĥÃĤÃĥÃĤÃĥÃĤÃĥÃĤÃĥÃĤÃĥÃĤÃĥÃĤÃĥÃĤÃĥÃĤÃĥÃĤÃĥÃĤÃĥÃĤÃĥÃĤ": 23090, - "ÃĥÃĤÃĥÃĤÃĥÃĤÃĥÃĤÃĥÃĤÃĥÃĤÃĥÃĤÃĥÃĤÃĥÃĤÃĥÃĤÃĥÃĤÃĥÃĤÃĥÃĤÃĥÃĤÃĥÃĤÃĥÃĤÃĥÃĤÃĥÃĤÃĥÃĤÃĥÃĤÃĥÃĤÃĥÃĤÃĥÃĤÃĥÃĤÃĥÃĤÃĥÃĤÃĥÃĤÃĥÃĤÃĥÃĤÃĥÃĤÃĥÃĤÃĥÃĤ": 35496, - "Ãī": 38351, - "Ãį": 38638, - "ÃįÃį": 43569, - "ÃĹ": 12906, - "ÃĽ": 34543, - "ÃĽÃĽ": 48396, - "ÃŁ": 39683, - "Ãł": 24247, - "ÃŃ": 8836, - "ÃŃa": 29690, - "ÃŃn": 39588, - "ÃŃs": 41200, - "Ä": 128, - "Ä«": 18962, - "ı": 30102, - "Äģ": 10235, - "Äĩ": 38325, - "Äį": 46195, - "Äĵ": 27092, - "ÄŁ": 33133, - "Å": 129, - "Å¡": 32790, - "Å«": 20317, - "ÅĤ": 41615, - "Åį": 13090, - "ÅŁ": 46481, - "Æ": 130, - "Ç": 131, - "È": 132, - "É": 133, - "Ê": 134, - "Ë": 135, - "ËĪ": 45990, - "Ëľ": 41185, - "Ì": 136, - "̶": 48869, - "Í": 137, - "Î": 138, - "α": 17394, - "β": 26638, - "γ": 42063, - "ε": 30950, - "ι": 29945, - "κ": 43000, - "λ": 39377, - "μ": 34703, - "ν": 26180, - "ο": 26517, - "Ï": 139, - "ÏĢ": 46582, - "Ïģ": 33643, - "ÏĤ": 35558, - "Ïĥ": 38392, - "ÏĦ": 32830, - "Ïī": 49535, - "Ð": 140, - "а": 16142, - "в": 38857, - "д": 43666, - "е": 16843, - "и": 18849, - "к": 31583, - "л": 30143, - "м": 43108, - "н": 22177, - "о": 15166, - "оÐ": 25443, - "Ñ": 141, - "ÑĢ": 21169, - "Ñģ": 21727, - "ÑĤ": 20375, - "Ñĥ": 35072, - "Ñĭ": 45035, - "ÑĮ": 45367, - "Ñı": 40623, - "Ò": 142, - "Ó": 143, - "Ô": 144, - "Õ": 145, - "Ö": 146, - "Ö¼": 47903, - "×": 147, - "ר": 37778, - "ש": 50227, - "ת": 42064, - "×IJ": 42973, - "×ij": 49603, - "×Ķ": 38269, - "×ķ": 27072, - "×Ļ": 25529, - "×Ļ×": 33951, - "׾": 40010, - "×ŀ": 49168, - "Ø": 148, - "ا": 12919, - "اØ": 34247, - "اÙĦ": 23525, - "ب": 39848, - "Ø©": 45632, - "ت": 41486, - "د": 38843, - "ر": 26897, - "س": 45692, - "ع": 44690, - "Ù": 149, - "ÙĦ": 13862, - "Ùħ": 25405, - "ÙĨ": 23338, - "Ùĩ": 29519, - "ÙĪ": 30335, - "ÙĬ": 22654, - "Ùİ": 24333, - "ÙIJ": 44208, - "ÙĴ": 48763, - "Ú": 150, - "Û": 151, - "Ü": 152, - "Ý": 153, - "Þ": 154, - "ß": 155, - "à": 156, - "à¤": 11976, - "ा": 48077, - "à¥": 24231, - "à¦": 48071, - "à¨": 19469, - "à©": 43297, - "à¸": 19567, - "à¹": 31479, - "à¼": 41340, - "á": 157, - "áµ": 39611, - "á¸": 41585, - "á¹": 26292, - "á½": 45495, - "â": 158, - "âĢ": 447, - "âĢ¢": 3581, - "âĢ¢âĢ¢": 22838, - "âĢ¢âĢ¢âĢ¢âĢ¢": 39967, - "âĢ¦": 1399, - "âĢ¦\"": 9962, - "âĢ¦)": 38418, - "âĢ¦.": 11580, - "âĢ¦.\"": 50248, - "âĢ¦..": 30864, - "âĢ¦]": 21476, - "âĢ¦âĢ¦": 7398, - "âĢ¦âĢ¦âĢ¦âĢ¦": 15864, - "âĢ¦âĢ¦âĢ¦âĢ¦âĢ¦âĢ¦âĢ¦âĢ¦": 29146, - "âĢ²": 17478, - "âĢ³": 12237, - "âĢĭ": 9525, - "âĢĭâĢĭ": 39009, - "âĢİ": 48261, - "âĢIJ": 9333, - "âĢij": 20977, - "âĢĵ": 1906, - "âĢĵâĢĵ": 25608, - "âĢĶ": 960, - "âĢĶ\"": 19056, - "âĢĶ-": 44839, - "âĢĶâĢĶ": 4500, - "âĢĶâĢĶâĢĶâĢĶ": 8184, - "âĢĶâĢĶâĢĶâĢĶâĢĶâĢĶâĢĶâĢĶ": 14950, - "âĢĶâĢĶâĢĶâĢĶâĢĶâĢĶâĢĶâĢĶâĢĶâĢĶâĢĶâĢĶâĢĶâĢĶâĢĶâĢĶ": 30542, - "âĢķ": 31857, - "âĢł": 33912, - "âģ": 46256, - "âĤ¬": 26391, - "âĦ¢": 8151, - "âĦ¢:": 41333, - "âĨ": 29705, - "âĨij": 48541, - "âĨĴ": 39310, - "âĪ": 24861, - "âĪĴ": 14095, - "âī": 35705, - "âĵĺ": 45563, - "âĶ": 6552, - "âĶĢ": 7280, - "âĶĢâĶĢ": 8418, - "âĶĢâĶĢâĶĢâĶĢ": 16068, - "âĶĢâĶĢâĶĢâĶĢâĶĢâĶĢâĶĢâĶĢ": 28542, - "âĶģ": 47486, - "âķ": 22880, - "âķIJ": 28670, - "âķIJâķIJ": 31732, - "âĸ": 5008, - "âĸ¬": 47530, - "âĸ¬âĸ¬": 49843, - "âĸº": 45717, - "âĸĢ": 44033, - "âĸĦ": 45786, - "âĸĪ": 8115, - "âĸĪâĸĪ": 9968, - "âĸĪâĸĪâĸĪâĸĪ": 20503, - "âĸĪâĸĪâĸĪâĸĪâĸĪâĸĪâĸĪâĸĪ": 49527, - "âĸij": 22110, - "âĸijâĸij": 27534, - "âĸĴ": 40516, - "âĸĵ": 38626, - "âĸł": 29316, - "âĹ": 15926, - "âĹ¼": 48366, - "âĹı": 28133, - "âĺ": 24583, - "âĺħ": 15583, - "âĺħâĺħ": 28353, - "âĺĨ": 35283, - "âĻ": 17992, - "âĻ¥": 39908, - "âĻ¦": 41298, - "âľ": 26486, - "âĿ": 32391, - "ã": 159, - "ãĢ": 5099, - "ãĢģ": 23513, - "ãĢĤ": 16764, - "ãĢĮ": 13697, - "ãĢį": 13700, - "ãĢİ": 40493, - "ãĢı": 40549, - "ãĢIJ": 31854, - "ãĢij": 31817, - "ãģ": 2515, - "ãģ£": 33180, - "ãģ¦": 28134, - "ãģ§": 30640, - "ãģ¨": 30201, - "ãģª": 26945, - "ãģ«": 28618, - "ãģ®": 5641, - "ãģ®å": 15474, - "ãģ®å®": 49149, - "ãģ®æ": 27032, - "ãģ®ç": 17683, - "ãģ®é": 33426, - "ãģ®éŃĶ": 34633, - "ãģ¯": 31676, - "ãģ¾": 30159, - "ãģĤ": 40948, - "ãģĦ": 18566, - "ãģĨ": 29557, - "ãģĭ": 27370, - "ãģĮ": 35585, - "ãģį": 33778, - "ãģı": 31917, - "ãģĵ": 46036, - "ãģķ": 43357, - "ãģĹ": 22180, - "ãģĻ": 33623, - "ãģŁ": 25224, - "ãģł": 46777, - "ãĤ": 1792, - "ãĤ¡": 25362, - "ãĤ¢": 11839, - "ãĤ¢ãĥ«": 47794, - "ãĤ£": 16646, - "ãĤ¤": 11482, - "ãĤ¤ãĥĪ": 42396, - "ãĤ¦": 16165, - "ãĤ¦ãĤ¹": 34103, - "ãĤ§": 24806, - "ãĤ¨": 23544, - "ãĤ¨ãĥ«": 46948, - "ãĤ©": 37662, - "ãĤª": 20513, - "ãĤ«": 21763, - "ãĤ¬": 23728, - "ãĤ®": 43899, - "ãĤ¯": 14099, - "ãĤ°": 26095, - "ãĤ±": 41658, - "ãĤ³": 24679, - "ãĤ´": 17933, - "ãĤ´ãĥ³": 22997, - "ãĤµ": 26503, - "ãĤ¶": 48458, - "ãĤ·": 15661, - "ãĤ·ãĥ£": 39467, - "ãĤ¸": 21091, - "ãĤ¹": 8943, - "ãĤ¹ãĥĪ": 43302, - "ãĤº": 37426, - "ãĤ»": 47271, - "ãĤ¼": 30432, - "ãĤ¼ãĤ¦ãĤ¹": 43361, - "ãĤ½": 47559, - "ãĤ¿": 23376, - "ãĤĤ": 43266, - "ãĤī": 36853, - "ãĤĬ": 28255, - "ãĤĭ": 25748, - "ãĤĮ": 39258, - "ãĤĴ": 31758, - "ãĤĵ": 22174, - "ãĤŃ": 25084, - "ãĥ": 1209, - "ãĥ¡": 26998, - "ãĥ¢": 40361, - "ãĥ£": 23131, - "ãĥ¤": 37858, - "ãĥ¥": 24440, - "ãĥ©": 9263, - "ãĥ©ãĥ³": 48204, - "ãĥª": 12675, - "ãĥ«": 9202, - "ãĥ¬": 24186, - "ãĥ¯": 25589, - "ãĥ¯ãĥ³": 42983, - "ãĥ³": 6527, - "ãĥ³ãĤ¸": 45823, - "ãĥ´": 29752, - "ãĥ´ãĤ¡": 44444, - "ãĥ»": 4707, - "ãĥ¼": 6312, - "ãĥ¼ãĤ¯": 42869, - "ãĥ¼ãĥ": 12045, - "ãĥ¼ãĥ«": 43353, - "ãĥ¼ãĥ³": 31708, - "ãĥ¼ãĥĨ": 44326, - "ãĥ¼ãĥĨãĤ£": 44686, - "ãĥĢ": 27852, - "ãĥģ": 31090, - "ãĥĥ": 14777, - "ãĥĥãĤ¯": 35702, - "ãĥĥãĥĪ": 35799, - "ãĥĥãĥī": 45435, - "ãĥĦ": 41115, - "ãĥĨ": 24336, - "ãĥĨãĤ£": 44431, - "ãĥĩ": 21959, - "ãĥĩãĤ£": 40629, - "ãĥĪ": 13298, - "ãĥī": 13765, - "ãĥīãĥ©": 19073, - "ãĥīãĥ©ãĤ´ãĥ³": 24731, - "ãĥĬ": 26229, - "ãĥĭ": 30165, - "ãĥį": 44916, - "ãĥİ": 25053, - "ãĥı": 37412, - "ãĥIJ": 29659, - "ãĥij": 32546, - "ãĥĵ": 36922, - "ãĥķ": 17681, - "ãĥķãĤ¡": 41939, - "ãĥķãĤ©": 48457, - "ãĥĸ": 24001, - "ãĥĹ": 30965, - "ãĥĺ": 23363, - "ãĥĺãĥ©": 34473, - "ãĥĻ": 35604, - "ãĥŀ": 20115, - "ãĥŁ": 27542, - "ãĥł": 25795, - "ãĥŃ": 16253, - "ãħĭ": 35098, - "ãħĭãħĭ": 40345, - "ä": 160, - "ä¸": 10310, - "ä¸Ģ": 31660, - "ä¸ī": 49011, - "ä¸Ĭ": 41468, - "ä¸į": 38834, - "ä¸Ń": 40792, - "ä¹": 20046, - "ä¹ĭ": 45298, - "äº": 12859, - "人": 21689, - "äºĶ": 49390, - "ä»": 20015, - "代": 47987, - "ä¼": 27670, - "ä½": 19526, - "使": 45635, - "ä½ľ": 43291, - "ä¿": 46479, - "å": 161, - "å£": 18004, - "士": 18803, - "å¤": 13783, - "大": 32014, - "天": 25465, - "å¥": 25001, - "女": 42637, - "å¦": 36685, - "å§": 34650, - "姫": 40235, - "å®": 22522, - "å¯": 43380, - "å°": 22887, - "å°Ĩ": 49546, - "å·": 32432, - "å¸": 30585, - "å¹": 33176, - "åº": 41753, - "å¼": 28156, - "å½": 37605, - "å¾": 36181, - "å¿": 33232, - "åĤ": 43636, - "åħ": 17739, - "åħī": 46268, - "åĨ": 37863, - "åĩ": 49035, - "åĪ": 26344, - "åī": 30298, - "åĬ": 27950, - "åĭ": 47947, - "åĮ": 44293, - "åį": 39355, - "åİ": 43889, - "åı": 20998, - "åIJ": 28938, - "åij": 37772, - "åĽ": 32368, - "åľ": 28839, - "åŃ": 27764, - "åŃIJ": 36310, - "æ": 162, - "æ©": 43897, - "æ©Ł": 49960, - "æ°": 36365, - "æ³": 37345, - "æµ": 38184, - "æĢ": 45250, - "æĥ": 46349, - "æĦ": 35707, - "æĪ": 22755, - "æĪ¦": 36704, - "æī": 33699, - "æķ": 46763, - "æĸ": 23877, - "æĸ¹": 43095, - "æĹ": 33768, - "æĺ": 23626, - "æĺ¯": 42468, - "æľ": 17312, - "æĿ": 30266, - "æł": 43718, - "æŃ": 29826, - "æѦ": 49476, - "ç": 163, - "ç¥ŀ": 15351, - "ç«": 44165, - "ç·": 45784, - "çĦ": 47078, - "çī": 31965, - "çīĪ": 48304, - "çĭ": 45379, - "çİĭ": 25581, - "çIJ": 49426, - "çĶ": 18796, - "çĶ°": 35572, - "çĶŁ": 37955, - "çķ": 45911, - "çļ": 19021, - "çļĦ": 21410, - "çĽ": 33566, - "çľ": 40367, - "è": 164, - "è¡": 26193, - "è£": 32518, - "è£ħ": 35318, - "è¦": 17358, - "è¦ļéĨĴ": 23614, - "èª": 45739, - "è¯": 46237, - "è»": 43102, - "è¿": 32573, - "èĢ": 32003, - "èĢħ": 38519, - "èĥ": 47797, - "èĪ": 48958, - "é": 165, - "é£": 45617, - "é»Ĵ": 44112, - "é¾": 11737, - "é¾į": 11885, - "é¾įå": 19049, - "é¾įå¥": 39820, - "é¾įå¥ij士": 39821, - "é¾įåĸļ士": 33454, - "éĢ": 34460, - "éģ": 34402, - "éĥ": 32849, - "éĩ": 34932, - "éĸ": 38461, - "éĹ": 29785, - "éĹĺ": 42234, - "éļ": 49694, - "éĽ": 37239, - "éŃĶ": 20804, - "ê": 166, - "ë": 167, - "ëĭ": 46695, - "ì": 168, - "ìĿ": 35975, - "í": 169, - "íķ": 47991, - "î": 170, - "îĢ": 29773, - "ï": 171, - "ï¸": 35266, - "ï¸ı": 37929, - "�": 4210, - "��": 6353, - "���": 48585, - "����": 12100, - "ð": 172, - "ðĿ": 47728, - "ðŁ": 8582, - "ðŁij": 41840, - "ðŁĺ": 47249, - "ñ": 173, - "ò": 174, - "ó": 175, - "ô": 176, - "õ": 177, - "ö": 178, - "÷": 179, - "ø": 180, - "ù": 181, - "ú": 182, - "û": 183, - "ü": 184, - "ý": 185, - "þ": 186, - "ÿ": 187, - "Ā": 188, - "ā": 189, - "Ă": 190, - "ă": 191, - "Ą": 192, - "ą": 193, - "Ć": 194, - "ć": 195, - "Ĉ": 196, - "ĉ": 197, - "Ċ": 198, - "ĊÂł": 44320, - "ĊĊ": 628, - "ċ": 199, - "Č": 200, - "č": 201, - "Ď": 202, - "ď": 203, - "Đ": 204, - "đ": 205, - "Ē": 206, - "ē": 207, - "Ĕ": 208, - "ĕ": 209, - "Ė": 210, - "ė": 211, - "Ę": 212, - "ę": 213, - "Ě": 214, - "ě": 215, - "Ĝ": 216, - "ĝ": 217, - "Ğ": 218, - "ğ": 219, - "Ġ": 220, - "Ġ!": 5145, - "Ġ!!": 37867, - "Ġ!=": 14512, - "Ġ\"": 366, - "Ġ\"\"": 13538, - "Ġ\"\"\"": 37227, - "Ġ\"#": 25113, - "Ġ\"$": 17971, - "Ġ\"$:/": 32047, - "Ġ\"%": 36521, - "Ġ\"'": 24018, - "Ġ\"(": 30629, - "Ġ\"+": 43825, - "Ġ\",": 33172, - "Ġ\"-": 27444, - "Ġ\".": 27071, - "Ġ\"...": 27896, - "Ġ\"/": 12813, - "Ġ\"<": 33490, - "Ġ\"@": 44212, - "Ġ\"[": 12878, - "Ġ\"\\": 37082, - "Ġ\"_": 45434, - "Ġ\"{": 45144, - "Ġ\"âĢ¦": 29368, - "Ġ#": 1303, - "Ġ##": 22492, - "Ġ###": 44386, - "Ġ#####": 46424, - "Ġ$": 720, - "Ġ$$": 32382, - "Ġ$(": 29568, - "Ġ$\\": 39280, - "Ġ$_": 40111, - "Ġ${": 25597, - "Ġ%": 4064, - "Ġ%%": 43313, - "Ġ&": 1222, - "Ġ&&": 11405, - "Ġ'": 705, - "Ġ''": 10148, - "Ġ'(": 29513, - "Ġ',": 46083, - "Ġ'.": 45302, - "Ġ'/": 31051, - "Ġ'[": 44438, - "Ġ(": 357, - "Ġ(!": 22759, - "Ġ(\"": 5855, - "Ġ(#": 17426, - "Ġ($": 7198, - "Ġ($)": 45491, - "Ġ(%": 37633, - "Ġ(%)": 11509, - "Ġ(&": 35494, - "Ġ('": 19203, - "Ġ((": 14808, - "Ġ()": 7499, - "Ġ())": 32865, - "Ġ());": 38377, - "Ġ(),": 29994, - "Ġ().": 27972, - "Ġ();": 13979, - "Ġ(*": 20789, - "Ġ(+": 11502, - "Ġ(-": 13841, - "Ġ(.": 20262, - "Ġ(/": 50247, - "Ġ(<": 38155, - "Ġ(=": 46121, - "Ġ(>": 45160, - "Ġ(?,": 32843, - "Ġ(@": 4275, - "Ġ([": 29565, - "Ġ(_": 44104, - "Ġ({": 37913, - "Ġ(~": 31034, - "Ġ(£": 23068, - "Ġ(âĪĴ": 35508, - "Ġ)": 1267, - "Ġ))": 15306, - "Ġ)))": 47282, - "Ġ));": 29226, - "Ġ),": 10612, - "Ġ).": 6739, - "Ġ):": 15179, - "Ġ);": 5619, - "Ġ)]": 48600, - "Ġ*": 1635, - "Ġ*)": 31936, - "Ġ**": 12429, - "Ġ***": 17202, - "Ġ****": 25998, - "Ġ********************************": 41906, - "Ġ*.": 46866, - "Ġ*/": 9466, - "Ġ+": 1343, - "Ġ+#": 43053, - "Ġ++": 19969, - "Ġ+++": 49954, - "Ġ+---": 40703, - "Ġ+/-": 29694, - "Ġ+=": 15853, - "Ġ,": 837, - "Ġ,\"": 42911, - "Ġ-": 532, - "Ġ--": 1377, - "Ġ---": 11420, - "Ġ----": 13498, - "Ġ-----": 37404, - "Ġ------": 40103, - "Ġ-------": 35656, - "Ġ--------": 24200, - "Ġ---------": 45337, - "Ġ----------------": 34400, - "Ġ--------------------": 41436, - "Ġ--------------------------------": 20368, - "Ġ----------------------------------------------------------------": 16529, - "Ġ-->": 14610, - "Ġ-=": 48185, - "Ġ->": 4613, - "Ġ.": 764, - "Ġ.\"": 22135, - "Ġ.)": 46328, - "Ġ..": 11485, - "Ġ...": 2644, - "Ġ...\"": 35713, - "Ġ....": 19424, - "Ġ......": 47082, - "Ġ........": 20004, - "Ġ..........": 39864, - "Ġ..............": 44912, - "Ġ................": 44713, - "Ġ./": 24457, - "Ġ._": 47540, - "Ġ/": 1220, - "Ġ/*": 11900, - "Ġ/**": 42638, - "Ġ//": 3373, - "Ġ///": 34013, - "Ġ//[": 31161, - "Ġ/>": 11037, - "Ġ0": 657, - "Ġ00": 3571, - "Ġ000": 12877, - "Ġ0000": 17643, - "Ġ000000": 41853, - "Ġ00000000": 27551, - "Ġ0004": 38326, - "Ġ01": 5534, - "Ġ02": 7816, - "Ġ03": 7643, - "Ġ04": 8702, - "Ġ05": 8870, - "Ġ06": 9130, - "Ġ07": 8753, - "Ġ08": 8487, - "Ġ09": 7769, - "Ġ1": 352, - "Ġ10": 838, - "Ġ100": 1802, - "Ġ1000": 8576, - "Ġ10000": 33028, - "Ġ101": 8949, - "Ġ102": 15143, - "Ġ1024": 28119, - "Ġ103": 15349, - "Ġ104": 14436, - "Ġ105": 13343, - "Ġ1050": 47235, - "Ġ106": 15696, - "Ġ107": 16226, - "Ġ1070": 49616, - "Ġ108": 15495, - "Ġ1080": 17729, - "Ġ109": 16003, - "Ġ11": 1367, - "Ġ110": 9796, - "Ġ1100": 36566, - "Ġ111": 13374, - "Ġ112": 13539, - "Ġ113": 17318, - "Ġ114": 17342, - "Ġ115": 12279, - "Ġ116": 18693, - "Ġ117": 19048, - "Ġ118": 19035, - "Ġ119": 15136, - "Ġ12": 1105, - "Ġ120": 7982, - "Ġ1200": 24938, - "Ġ121": 20416, - "Ġ122": 19409, - "Ġ123": 17031, - "Ġ124": 19755, - "Ġ125": 13151, - "Ġ126": 19710, - "Ġ127": 18112, - "Ġ128": 13108, - "Ġ1280": 37674, - "Ġ129": 20248, - "Ġ13": 1511, - "Ġ130": 11323, - "Ġ1300": 36058, - "Ġ131": 23134, - "Ġ132": 21761, - "Ġ133": 22169, - "Ġ134": 22352, - "Ġ135": 17501, - "Ġ136": 21056, - "Ġ137": 21643, - "Ġ138": 21503, - "Ġ139": 23666, - "Ġ14": 1478, - "Ġ140": 12713, - "Ġ1400": 36641, - "Ġ141": 25500, - "Ġ142": 25181, - "Ġ143": 24356, - "Ġ144": 20224, - "Ġ1440": 49557, - "Ġ145": 20299, - "Ġ146": 22986, - "Ġ147": 22909, - "Ġ148": 22613, - "Ġ149": 24041, - "Ġ15": 1315, - "Ġ150": 6640, - "Ġ1500": 20007, - "Ġ151": 25326, - "Ġ152": 24848, - "Ġ153": 24652, - "Ġ154": 24235, - "Ġ155": 20708, - "Ġ156": 23871, - "Ġ157": 23313, - "Ġ158": 24063, - "Ġ159": 26422, - "Ġ16": 1467, - "Ġ160": 13454, - "Ġ1600": 26143, - "Ġ161": 27829, - "Ġ162": 25090, - "Ġ163": 26826, - "Ġ164": 25307, - "Ġ165": 21409, - "Ġ166": 26753, - "Ġ167": 26118, - "Ġ168": 23378, - "Ġ169": 27191, - "Ġ17": 1596, - "Ġ170": 16677, - "Ġ1700": 35665, - "Ġ171": 28369, - "Ġ172": 23120, - "Ġ173": 28174, - "Ġ174": 27621, - "Ġ175": 19038, - "Ġ176": 26937, - "Ġ177": 26607, - "Ġ178": 27368, - "Ġ179": 27228, - "Ġ18": 1248, - "Ġ180": 11546, - "Ġ1800": 21431, - "Ġ181": 30110, - "Ġ182": 28581, - "Ġ183": 28551, - "Ġ1830": 45440, - "Ġ184": 28598, - "Ġ1840": 47784, - "Ġ185": 22855, - "Ġ1850": 35745, - "Ġ186": 28481, - "Ġ1860": 37637, - "Ġ1861": 45278, - "Ġ1862": 49658, - "Ġ1863": 47072, - "Ġ1865": 47801, - "Ġ187": 27649, - "Ġ1870": 37667, - "Ġ188": 27778, - "Ġ1880": 34865, - "Ġ1886": 49539, - "Ġ1888": 49584, - "Ġ1889": 49545, - "Ġ189": 27230, - "Ġ1890": 31982, - "Ġ1893": 48889, - "Ġ1895": 46425, - "Ġ1896": 46723, - "Ġ1897": 49429, - "Ġ1898": 46244, - "Ġ1899": 47465, - "Ġ19": 678, - "Ġ190": 19884, - "Ġ1900": 21489, - "Ġ1901": 39923, - "Ġ1902": 45611, - "Ġ1903": 41625, - "Ġ1904": 43785, - "Ġ1905": 37166, - "Ġ1906": 40538, - "Ġ1907": 41435, - "Ġ1908": 40417, - "Ġ1909": 41507, - "Ġ191": 31009, - "Ġ1910": 31953, - "Ġ1911": 32216, - "Ġ1912": 34463, - "Ġ1913": 35145, - "Ġ1914": 26833, - "Ġ1915": 32062, - "Ġ1916": 32811, - "Ġ1917": 24168, - "Ġ1918": 25859, - "Ġ1919": 30992, - "Ġ192": 17817, - "Ġ1920": 14062, - "Ġ1921": 35369, - "Ġ1922": 36094, - "Ġ1923": 37272, - "Ġ1924": 37547, - "Ġ1925": 36864, - "Ġ1926": 38525, - "Ġ1927": 36565, - "Ġ1928": 35768, - "Ġ1929": 31883, - "Ġ193": 29691, - "Ġ1930": 15533, - "Ġ1931": 34625, - "Ġ1932": 32471, - "Ġ1933": 26539, - "Ġ1934": 29300, - "Ġ1935": 30704, - "Ġ1936": 27653, - "Ġ1937": 28684, - "Ġ1938": 28017, - "Ġ1939": 24414, - "Ġ194": 30483, - "Ġ1940": 16236, - "Ġ1941": 23234, - "Ġ1942": 22458, - "Ġ1943": 21577, - "Ġ1944": 16994, - "Ġ1945": 15761, - "Ġ1946": 22717, - "Ġ1947": 21709, - "Ġ1948": 21794, - "Ġ1949": 24977, - "Ġ195": 24793, - "Ġ1950": 11445, - "Ġ1951": 27937, - "Ġ1952": 26352, - "Ġ1953": 24217, - "Ġ1954": 24718, - "Ġ1955": 25325, - "Ġ1956": 25190, - "Ġ1957": 25177, - "Ġ1958": 24648, - "Ġ1959": 23859, - "Ġ196": 28817, - "Ġ1960": 9507, - "Ġ1961": 20510, - "Ġ1962": 20033, - "Ġ1963": 19342, - "Ġ1964": 17575, - "Ġ1965": 17672, - "Ġ1966": 19322, - "Ġ1967": 15904, - "Ġ1968": 15963, - "Ġ1969": 16450, - "Ġ197": 29903, - "Ġ1970": 8069, - "Ġ1971": 16382, - "Ġ1972": 16101, - "Ġ1973": 15674, - "Ġ1974": 16489, - "Ġ1975": 15231, - "Ġ1976": 15408, - "Ġ1977": 15589, - "Ġ1978": 15524, - "Ġ1979": 13521, - "Ġ198": 2757, - "Ġ1980": 7169, - "Ġ1981": 14745, - "Ġ1982": 14489, - "Ġ1983": 13540, - "Ġ1984": 12844, - "Ġ1985": 12863, - "Ġ1986": 12113, - "Ġ1987": 12923, - "Ġ1988": 12122, - "Ġ1989": 11104, - "Ġ199": 1594, - "Ġ1990": 6303, - "Ġ1991": 10249, - "Ġ1992": 9768, - "Ġ1993": 9656, - "Ġ1994": 9162, - "Ġ1995": 8735, - "Ġ1996": 8235, - "Ġ1997": 8309, - "Ġ1998": 7795, - "Ġ1999": 7358, - "Ġ2": 362, - "Ġ20": 1160, - "Ġ200": 939, - "Ġ2000": 4751, - "Ġ2001": 5878, - "Ġ2002": 6244, - "Ġ2003": 5816, - "Ġ2004": 5472, - "Ġ2005": 5075, - "Ġ2006": 4793, - "Ġ2007": 4343, - "Ġ2008": 3648, - "Ġ2009": 3717, - "Ġ201": 580, - "Ġ2010": 3050, - "Ġ2011": 2813, - "Ġ2012": 2321, - "Ġ2013": 2211, - "Ġ2014": 1946, - "Ġ2015": 1853, - "Ġ2016": 1584, - "Ġ2017": 2177, - "Ġ2018": 2864, - "Ġ2019": 13130, - "Ġ202": 22131, - "Ġ2020": 12131, - "Ġ2021": 33448, - "Ġ2022": 33160, - "Ġ2024": 48609, - "Ġ2025": 32190, - "Ġ203": 27408, - "Ġ2030": 25054, - "Ġ204": 26956, - "Ġ2048": 36117, - "Ġ205": 22538, - "Ġ2050": 32215, - "Ġ206": 27253, - "Ġ207": 27791, - "Ġ208": 27121, - "Ġ209": 28815, - "Ġ21": 2310, - "Ġ210": 20064, - "Ġ2100": 38123, - "Ġ211": 28714, - "Ġ212": 23679, - "Ġ213": 28658, - "Ġ214": 28277, - "Ġ215": 22951, - "Ġ216": 26881, - "Ġ217": 24894, - "Ġ218": 29217, - "Ġ219": 30453, - "Ġ22": 2534, - "Ġ220": 15629, - "Ġ221": 31566, - "Ġ222": 27795, - "Ġ223": 30299, - "Ġ224": 26063, - "Ġ225": 18500, - "Ġ226": 31510, - "Ġ227": 30989, - "Ġ228": 29041, - "Ġ229": 31064, - "Ġ23": 2242, - "Ġ230": 18395, - "Ġ231": 34598, - "Ġ232": 31773, - "Ġ233": 30435, - "Ġ234": 34323, - "Ġ235": 28878, - "Ġ236": 34044, - "Ġ237": 34385, - "Ġ238": 32544, - "Ġ239": 32817, - "Ġ24": 1987, - "Ġ240": 14956, - "Ġ2400": 48548, - "Ġ241": 35150, - "Ġ242": 34353, - "Ġ243": 35989, - "Ġ244": 35264, - "Ġ245": 29637, - "Ġ246": 34951, - "Ġ247": 30179, - "Ġ248": 32996, - "Ġ249": 34620, - "Ġ25": 1679, - "Ġ250": 8646, - "Ġ2500": 33507, - "Ġ251": 34489, - "Ġ252": 25264, - "Ġ253": 32056, - "Ġ254": 35360, - "Ġ255": 14280, - "Ġ256": 17759, - "Ġ257": 36100, - "Ġ258": 37528, - "Ġ259": 37831, - "Ġ26": 2608, - "Ġ260": 21148, - "Ġ2600": 47197, - "Ġ261": 39166, - "Ġ262": 35404, - "Ġ263": 39135, - "Ġ264": 32158, - "Ġ265": 32090, - "Ġ266": 37737, - "Ġ267": 37364, - "Ġ268": 36678, - "Ġ269": 38249, - "Ġ27": 2681, - "Ġ270": 20479, - "Ġ271": 33797, - "Ġ272": 38107, - "Ġ273": 38549, - "Ġ274": 39768, - "Ġ275": 25829, - "Ġ276": 38147, - "Ġ277": 38703, - "Ġ278": 39174, - "Ġ279": 39466, - "Ġ28": 2579, - "Ġ280": 21355, - "Ġ281": 39882, - "Ġ282": 41810, - "Ġ283": 42032, - "Ġ284": 40654, - "Ġ285": 33015, - "Ġ286": 39697, - "Ġ287": 38721, - "Ġ288": 35419, - "Ġ289": 38902, - "Ġ29": 2808, - "Ġ290": 26481, - "Ġ291": 43336, - "Ġ292": 41569, - "Ġ293": 37224, - "Ġ294": 41235, - "Ġ295": 34772, - "Ġ296": 41922, - "Ġ297": 41103, - "Ġ298": 37576, - "Ġ299": 31011, - "Ġ3": 513, - "Ġ30": 1542, - "Ġ300": 5867, - "Ġ3000": 20343, - "Ġ301": 25643, - "Ġ302": 32591, - "Ġ303": 30727, - "Ġ304": 31672, - "Ġ305": 32747, - "Ġ306": 37255, - "Ġ307": 38369, - "Ġ308": 35617, - "Ġ309": 40286, - "Ġ31": 3261, - "Ġ310": 28947, - "Ġ311": 35592, - "Ġ312": 34465, - "Ġ313": 35897, - "Ġ314": 34085, - "Ġ315": 32647, - "Ġ316": 34131, - "Ġ317": 37563, - "Ġ318": 39320, - "Ġ319": 40385, - "Ġ32": 3933, - "Ġ320": 20959, - "Ġ321": 39595, - "Ġ322": 38831, - "Ġ323": 38446, - "Ġ324": 38595, - "Ġ325": 29524, - "Ġ326": 40660, - "Ġ327": 36203, - "Ġ328": 39093, - "Ġ329": 42141, - "Ġ33": 4747, - "Ġ330": 25508, - "Ġ331": 43722, - "Ġ332": 41423, - "Ġ333": 23460, - "Ġ334": 42819, - "Ġ335": 37144, - "Ġ336": 38867, - "Ġ337": 42294, - "Ġ338": 40736, - "Ġ339": 42489, - "Ġ34": 4974, - "Ġ340": 28560, - "Ġ341": 43155, - "Ġ342": 44341, - "Ġ343": 37290, - "Ġ344": 43686, - "Ġ345": 39937, - "Ġ346": 44729, - "Ġ347": 43292, - "Ġ348": 44084, - "Ġ349": 44367, - "Ġ35": 3439, - "Ġ350": 13803, - "Ġ351": 44417, - "Ġ352": 44063, - "Ġ353": 47567, - "Ġ354": 46752, - "Ġ355": 36561, - "Ġ356": 44552, - "Ġ357": 45210, - "Ġ358": 41761, - "Ġ359": 41934, - "Ġ36": 4570, - "Ġ360": 11470, - "Ġ361": 47744, - "Ġ363": 49327, - "Ġ364": 44969, - "Ġ365": 21268, - "Ġ366": 44856, - "Ġ367": 40884, - "Ġ368": 43019, - "Ġ369": 45620, - "Ġ37": 5214, - "Ġ370": 28687, - "Ġ371": 47343, - "Ġ372": 46633, - "Ġ373": 47946, - "Ġ374": 49020, - "Ġ375": 29414, - "Ġ376": 44622, - "Ġ377": 42163, - "Ġ378": 45473, - "Ġ379": 45937, - "Ġ38": 4353, - "Ġ380": 29101, - "Ġ383": 49814, - "Ġ384": 40400, - "Ġ385": 44826, - "Ġ386": 48340, - "Ġ387": 49689, - "Ġ388": 43550, - "Ġ389": 49633, - "Ġ39": 5014, - "Ġ390": 33882, - "Ġ392": 48207, - "Ġ395": 42321, - "Ġ396": 48758, - "Ġ398": 39260, - "Ġ399": 43927, - "Ġ4": 604, - "Ġ40": 2319, - "Ġ400": 7337, - "Ġ4000": 30123, - "Ġ401": 22219, - "Ġ402": 42622, - "Ġ403": 38210, - "Ġ404": 32320, - "Ġ405": 36966, - "Ġ406": 45439, - "Ġ407": 41879, - "Ġ408": 41247, - "Ġ409": 48132, - "Ġ4090": 48908, - "Ġ4096": 42479, - "Ġ41": 6073, - "Ġ410": 32921, - "Ġ411": 43184, - "Ġ412": 42215, - "Ġ413": 46618, - "Ġ414": 45900, - "Ġ415": 40643, - "Ġ416": 38158, - "Ġ417": 47580, - "Ġ418": 45959, - "Ġ419": 48475, - "Ġ42": 5433, - "Ġ420": 28262, - "Ġ421": 49294, - "Ġ422": 46588, - "Ġ423": 49125, - "Ġ424": 48252, - "Ġ425": 36959, - "Ġ426": 48065, - "Ġ427": 45345, - "Ġ428": 45063, - "Ġ429": 42313, - "Ġ43": 5946, - "Ġ430": 35090, - "Ġ432": 46393, - "Ġ433": 47407, - "Ġ435": 42671, - "Ġ436": 50038, - "Ġ44": 5846, - "Ġ440": 33879, - "Ġ443": 40384, - "Ġ444": 45095, - "Ġ445": 48655, - "Ġ448": 49989, - "Ġ45": 4153, - "Ġ450": 18523, - "Ġ451": 49356, - "Ġ455": 46839, - "Ġ457": 47996, - "Ġ458": 50154, - "Ġ46": 6337, - "Ġ460": 34091, - "Ġ465": 49669, - "Ġ47": 6298, - "Ġ470": 38634, - "Ġ475": 45881, - "Ġ48": 4764, - "Ġ480": 23487, - "Ġ49": 5125, - "Ġ490": 45601, - "Ġ499": 48391, - "Ġ5": 642, - "Ġ50": 2026, - "Ġ500": 5323, - "Ġ5000": 23336, - "Ġ501": 24555, - "Ġ502": 47233, - "Ġ503": 44541, - "Ġ504": 41612, - "Ġ505": 43367, - "Ġ51": 6885, - "Ġ510": 35148, - "Ġ512": 22243, - "Ġ52": 6740, - "Ġ520": 36141, - "Ġ525": 45719, - "Ġ529": 49888, - "Ġ53": 7192, - "Ġ530": 40585, - "Ġ54": 7175, - "Ġ540": 38190, - "Ġ55": 5996, - "Ġ550": 25240, - "Ġ555": 44717, - "Ġ56": 7265, - "Ġ560": 38089, - "Ġ57": 7632, - "Ġ570": 44626, - "Ġ58": 7618, - "Ġ580": 41234, - "Ġ59": 7863, - "Ġ6": 718, - "Ġ60": 3126, - "Ġ600": 10053, - "Ġ6000": 39064, - "Ġ601": 49231, - "Ġ608": 39084, - "Ġ61": 8454, - "Ġ610": 44300, - "Ġ62": 8190, - "Ġ620": 45469, - "Ġ625": 48868, - "Ġ63": 8093, - "Ġ630": 44505, - "Ġ64": 5598, - "Ġ640": 33759, - "Ġ65": 6135, - "Ġ650": 22626, - "Ġ655": 45021, - "Ġ66": 7930, - "Ġ660": 41717, - "Ġ666": 43364, - "Ġ67": 8275, - "Ġ670": 48136, - "Ġ68": 8257, - "Ġ680": 40554, - "Ġ69": 8644, - "Ġ698": 39861, - "Ġ7": 767, - "Ġ70": 4317, - "Ġ700": 13037, - "Ġ7000": 50205, - "Ġ701": 48173, - "Ġ702": 43379, - "Ġ71": 9166, - "Ġ72": 7724, - "Ġ720": 26250, - "Ġ73": 8854, - "Ġ737": 37517, - "Ġ74": 8915, - "Ġ747": 45600, - "Ġ75": 5441, - "Ġ750": 19683, - "Ġ76": 8684, - "Ġ760": 48284, - "Ġ768": 46720, - "Ġ77": 8541, - "Ġ770": 44586, - "Ġ777": 35534, - "Ġ78": 8699, - "Ġ780": 41287, - "Ġ79": 9225, - "Ġ8": 807, - "Ġ80": 4019, - "Ġ800": 10460, - "Ġ8000": 38055, - "Ġ802": 33121, - "Ġ808": 41241, - "Ġ81": 9773, - "Ġ82": 9415, - "Ġ820": 48964, - "Ġ83": 9698, - "Ġ84": 9508, - "Ġ840": 48777, - "Ġ85": 7600, - "Ġ850": 30607, - "Ġ86": 9849, - "Ġ87": 10083, - "Ġ88": 9193, - "Ġ89": 9919, - "Ġ9": 860, - "Ġ90": 4101, - "Ġ900": 15897, - "Ġ9000": 50138, - "Ġ91": 10495, - "Ġ911": 16679, - "Ġ92": 10190, - "Ġ920": 47679, - "Ġ93": 10261, - "Ġ94": 10048, - "Ġ95": 6957, - "Ġ950": 38384, - "Ġ96": 9907, - "Ġ960": 41263, - "Ġ97": 10111, - "Ġ970": 40463, - "Ġ978": 41417, - "Ġ98": 9661, - "Ġ980": 32614, - "Ġ99": 7388, - "Ġ999": 36006, - "Ġ:": 1058, - "Ġ:(": 36147, - "Ġ:)": 14373, - "Ġ:-)": 47226, - "Ġ::": 7904, - "Ġ:=": 19039, - "Ġ;": 2162, - "Ġ;)": 35540, - "Ġ;;": 36792, - "Ġ<": 1279, - "Ġ

k$`}#af5N!&==qbe$T1#D1%1VU@C> zw{3{L5>K28Rc@&QVwN;&bu5!Rk|`3X!Jkj2M3!SLC9U_cJ< zu5{$>mBW2|0LZ303%b`GL{Pabw8ozmhCK4cN(7KlaC|EUZUuwn<=~XhE0u8r;r;?A zU=~Rxyjvghv_8`+jA!gYsns5SYurDrP>fqf&L=@1Jr%kV6yiUm`@R?WX(3b>7~dd+ z&V%^6xi~#NgCi5_M)DZ|-K$MNr!Z9kZv|9At{ZtAR*n|ASECvJiq@r_nbTK!yDc(1`Gy^^<8t# z`B^Sn5gChrGd^D9zrybbFA5Q7jf*d#f1t=U(KDJ=XrJNEzYb)bK>9<&Sv~C! z<54tEo@HUjr9mtiQ4vRuxElR$x0$x<@WfF`uouceAs2huX@@Xl%*(q_mT5&g9hnU<<`GMN9h(`>{UWrl@;vwO{+`7i=r712Zrc-CSfn!dMQ&Oz} zfjY>}%+!X&pVZ%P{f+zPa^y@6p|~tWoR;TJ{o^XQf4|i_Wx%&C_shibYsuj#iWt=D zpG6+1wcBlN$*monm3Gt7v|(#>(WsCf%dZ^h;q@X`!`l@`P}!#+j;&Cb5I_sDz7vW8 z(I*gR7&Mu-9I&)F;>^%;We||ad7EFjH8}_t2Z}qjB4pIme~J*<0U9kznUthre4cO#(x8P138i1rU#(tjerktkYFB2Ge5`#V{G|WN ztq+S08Hck|Uw|(tQx)-76b}Vtlm&wJjc;gmR$Ijbf$%yx7+xF?IMX$+UJkIzl(@w3 zDl*mKiqLLq4ZA&TU{BzIKB^C_y`2Ja_2vECU%7#or1S)Bp?$;r)(RF12l+f>(^3FN zUH=4)r-csk9tPdj?Rkz>fcBOn!w8b5)LsSuIjQ3`cdE>6I4;#T=xmrh?4LY!n$ep7 zvL{Tp>KSHG`zwm~kp#m|mzCj23t-RhS9Q!hDjiE^l)Gy@&*8&|dR(S-nPg9hW|07S zmpd8mnpjt?SAoZ&kVWqD!~r>?qY|y z=i}8c(*yKFqQX)Xq2I6Tu-k}Z71H3MDJip%3j@;ZE!W&z5O|^PGC>8mZ`su7I;{H?$8|a+Jvug%B)+Z3OHtCp(V$ zJs8|}Q0fe_4umLxe+7`_rXh(~K~h`r-d>nvaH|C1CQ_18GH&5Ma~fdb(TVpbomoXz zZI7s5$|Y-zMB}m#pn^CqJT63>Z5tJ9MD#GwR|?NO=%wvF@9*zl0$LdM)l)bG8r1#s zsx&6XM##{4l<1-Z^AZ0}TU^7)#XpPyMaW=Yg>qUB2T&m#)A%N2+0StBK-;p)%&I#d zU+$!|&{*z2a7?Y1FP_D-B|ef{Ey4f(&P^fWA$%Q#rbgzZ6mVNQuOGUB##f7>2*uZB7h&NixkvAgqJ;NO2B~~1 zIb@dT4o)${Wlr(VE9v~#yk0znsZOo>_wEh2Jm;@+tHfl_{q9pE8s7%HwVj*$S`{r~ z5N*81g~vl1v=y5TP8fcA#wF=O6@{az)fc?xp7sSy`KPF&lS&1$|8$b*CajH5XnR>^ zmtE#018=0ZsGWLduk_E*Dz;_Fnh%Gn3S0es6aGi0;&)EiL~-`}@KE|Wup3NyleW>v zq38T8w7K{|LSi9L4;?52<;-Pcb-iKBsASS~(*;D2jU!NS zgKUGfEca>QU&!V7wmI9bzH!^_Vo@QjT}9AEomz3$LXvSsU^R-4Fw@B z-aGOnvsBB=4MCA1O`R%~?C6=muh!VItHkZSg?jCo)&o5xo|e&+TQVR~dteJ}yjT66 z3499>#nSixmXM(Rss`Qk{kd({`~8!n$JNQ9L#V?a9W2)@sQOlAMazU#p z)2seW8WcIF_b1o2S2PQicxYw#U(ocqy;cU1@o(DpJ6R^?7Em$4eN2nfH()dI;O6=} zFxu+pquyW9aoG$fMh3a@Q}p71cE*kVZPMfnV%p*~E?%VU*l4KB`s$~Xwd?Ll&-6*R z(PkPb3|%OW5duN`*G$)7%SgjV6p-Y>3LlYfs@;yfAxYsi&76@d$6zjx(b#4*I#pUn zz4OYEgp6WrsA>nS;s+<*9{ezt! zEGq@eRuzk;k>gY;W&5 zc)3oAE~bPUM=#i<3upN}@(t_lXZ>Yia{i|CxJUu|)jhU<;?lizZCR7}++Z$ngLonN zHjV}faOd$fV|6=M@sI!iAFN_(tKumNKO_>-qn6j!t^D%w^=UokZDi89hpgre)<};{ z6hB3-m6_=J1UP>ZvG)>P(iORh_0o?FDv?)iXLx(?$?CO>`d3#Eq44Lnd3<p=Y-Gc1cV3&QlW|f#evfkE6mJ#d*K+%7%>28d{rMGtWS+zh5d$TNZw$Zc`+1b$~{avILawz1A}=~e=p1L z7!BFuK_(TzGRpmTSeohWTM&4ot&6WlcL3=K8yb}w{`(X)mK|vd^|D%*1FYG4DEezg zzO0g9;HkU<k%_*tSb3DlvW=)f%FSVBntY=rZ3SuDgKYd%)TigkNG@7y z&zFf{V6j&A(T=md+=e>gC-3;BOk(+BK?6p?q|a4B_#pXO9zJr!NJm?|(tG-Vyd-?~ zchz~>%Ie9O$i;UYe3263TOU;VC(j@Mb^o?pW6j=pM}<|-e8`Poojkt-vx6CD(7LaK z+rSHOLY(p;V4#j_N0Oq{M05X46rF)ZEabJqEw3=~ zW~87UH3*r0;_ejd(^mO5IP>fJ)^?qlq9WCc*@b3)jb*!+htB}jEwn01bTS4cOQv2A zV-v6wG&fa*L~R%JgK*;pKF3)`2r_|y?&|IsIAOU6QE2?tjx zfrOn=VSI(yYP=HSzcix(5j~nU=4TVm>^n;;X5tQe^`qKLCi%~?#*o1pKD>gg1Cz`j;U_cn-w=i9 zm-LUm|Gw%r{A%L#)Qj@nDLu~?Si{LpMjE)VT24-xR;VPc@%n6gU&I>s&%04*Q-~6H z&}wOEr62IXX2D-$XMcYGN)^4ZvF|sP1e~}q+$|~`$lUB3shOp zJ{1#FG!dk#JrlM(yLz^!H%5o}0*0o$D+MFyJW!V>d0JL&;GDj6pYSsjF$UbgA>@RL zz)ExW^o!wIAvVq$1AlaVVfyLAVAjV)#&f;0z#>{guBO8S7Fx)hN|&ypn2G|lMo)Ll z@MFBwjw0@OLJTQw%3d_+lcqQ!1(b{Lz8(Fvm_MKVG%!ShDw^>tw^NOnom}VU!nDvk zTKW-eK0>|B$&3xPVgmfBFZh~R5ev0S9Sjz}GcA6`+IyFo8s!GG5Di>D7NFpC!z{H2 zg_Nlwgz%8Ko%g88q}Z}FD$0+p7~QmN-KLkqp{+p{+?_v4RJ6p+)A z{oRMOj~>M7La5+LPKU^uU5^wmM&9l{882)#ySXvI_gcD7!Mk7C*}<*Pc-C-L2h zrQ)iSKdz!s8jotc5zt{0QAnBJm(1NdI{)jYV#zAGGdb9-3{eRnJR+6z9X>4o3)+uW z`}y_qzEG5penDk&mk)X<97#3zg9yDs{iMZz)fyM?$J;ZC2qPI8P~~uWxq!;d>ULTa zJyfZn@%v%@g9H^Fj?lng?sxli5PL6Vt7jX=fh$2(((K6?U;0OGT|}-a-sG0PtpG>6 z>c;L%60tkb=K!WyS|-R8DILp_iJ7e;7z!)qEE~f47qW4RAGC1yzE6%tWwc^_f^*d_ zh9`c*N8lFnPK775eRrUGlB!zI+d`VSb7qU*MAz_J<)Hj(-xC>Wn%NV(qz$wZE9f^x z(9p^)Hnz`HyAid5wxh7F{dh)%Z2VO{S1|Jb3D5rDPnYJLLksFE9bX;tyk0bjy-$FQ zWh{DEcG^bnZC+XFMm1HHcA2es*F&I=hW(ALZJjhaP^NX(yHT%Gx3loaLdXt->p-&QjQ%PH?V$dLbI!>oHBX) zfxGQ$6|92%$7iCQ&TD7bP&y|WFpBVoG^4LLM~{oxmdjN73EW(2xuIGyri)0Gp`cjS zWGS)u8Sye4Rs+uueNlL!n~E`bYWH*BxeUM&Pbf$a2-m+mBU!ND{IYgeR8kSoW*dk_ zL#Qs`6*jDj=04aI$5O?p z!So$ICk=AlvXBcEdA}74n6tu~V9`*#D^fHR@s8Vr!=E`Vb|>h0S@X=EE-oc)c?TzI zjCEO6UBZ~ZMepAoyvS9m$|~y5nFV1-GV3R3lb}uie8rRdqhkK*O7IpkL#CHE@ml5n z-tB~E_(L=yZ83pwH1JJ)=_@1mZ>!LmQw%uK9(HKX>5X(kL_+=YgE<;U)PM?^Z+R{49Pu-G^M zm>^)MkYR79wD~4$FNw;cy^!@x&YGq81eg-8b>Trxx{B`|eO7LPAn%|eR1}qC_|LN# zpBu1BA{o8=7gIt<&wfymVp_>#Y-Xg!uj8c%ZIYb2CJLZcs z?vlYP6t6Q~eAV`O3(=xR9!6$OMvP}i1A!Iix zm>nm*DqMrMZT{co1n|;9t)?KYQ^fVM&{ecnJi_dDBR>0z zSS~pR4|2LfGc^rytJZtAv!7i910EPeC&R!3lsMj<``7*JCt4hICle=rAj_Z{mo|cf zp0QY4xAdLP|M=5wJDd9(!@X|qK4LB$ApH4d$2@p3L-x6XM`i9n(#fJ`hQ-CNn{)Gt zondD&>a~_iRTH;{OIB0By7WDSSqBCUi^F>tZ%XbpBazkvI8zd|X(bB3 z{HR=d4TfyhToa&?>@-J2gC@Qx#H8Yz!EH{6c$rX&XJ=pcMsRR?hbib)yneC2#7;K# z-1@#ajt}Ro+ zv{ec-kf-{?MKO40|2p+c`d>DwN?4EiV^zC_j zd)2~RpRTMTLhp@PTaU)mIrzL;xW2;|G7^&rmp-3M1q`pl`2I~3l+LDIwDgsXDKNMm z#9k;pDu~ka+cyCdToR6*qQ~de_a?t+C$DuBTWNar>?3~J*)Dg}K;`v3-dhrXXUiUS zH1U|!mB}wy9K`Uj=r$sEa*oDV@?bX^>%TJ|#S}LLe_k#U+5o}~jLwu1ke!r5{x>{p@%C<{$9y;pYAG z8E!%2Fr$uy>^rE*vRh`%YYx=@=C-c3=9LgP1efeJ>Y+P&!T^Vvg+ zwSGXL6c=AgLBjmDgmWs~(S?hB-odN=&8DDSd zFxyE02ngDj1#RPn+gt3pl?9*A3L^;0cz4(C*&wiAz(;$o0~fBZE}REWznn<}jt`^XJKq(5#l4qQHe(F$VBn-lH8jhK9N< z@v3C5G=2!OeH!o^CJFDAFgqd*{ca}12}xcnw%_PndLt8&F`MD<-=eZJhENEQ8P2nF zPFslVQeXWI)Qv6v(f>s7=NHhGVWgQ3oiHuklyhplVG{UfGYb|jA$#UhF1B6DwDPhU zp8#MQD$}E7{eJDDtVtx8f-8&Om6xrhmQ2h2CEU}>O%*VvzD}HTivit~IUfrQWg3$A z87FqtOIFN=7W!DW9)bBC-t!;kJ+9K|v`hX}{c20A zN`szVNO##U|D~1-o27}}c^9unqzp62L0HTF)?Qe{3?#ieMeBxvSP0}*lJEaWkUMlG zr?D88z%R<`kh~IN$IR}D*yN=;6)Eb)<~22x7jpC*K;B(PAl@$pC}96O;4A*jZ?|N0 z$u{@P6JyH{KFy}0%eqQJKM_P!R3r;yQbAOJE4lBtPNXxWm(#c{-R@++5PiQN`)rfL zBJ2CIN%iDXWa!2;P~=e8G) zhy>kpd;GLCsF3|=Idi)F;q>+4I={k$8Z#}#?z454bB<12edatm`wzd$78iO-YUK6l z{AP}d+Dn6l!`CqcnyJeJ#4cyYIXQW_t9JiVrQx8@W9?QLg7bz?MH-Q_*X_kb0iC=p zgk?@evUL)>c3iX6jPwuUH8p=<`bKP9_eOlYl7Ne0(mpn{vBFg66{;$u_@_Uy$r zoV5%|c0@VvNgSRskssRV6h`XbxPchaGKkg-{#rZVxal1xe3e7++71&()<|m`2rIab zxL}4tw*%O*9`5*Oss=Zk`>>67ER8jUhQ7kf{_GLENNHFEaEgVB+Vl=Q{R=)ay%)e@ z_{)a!N~K^18sq_RX?xfmnL57}hUJxZsjcqmpYw?If9!tViPmJXj1~KQi?5ZdEOR(N zwtw4r%sQ#j`!KTps3dPeWhq&m0v>C5Fn7bJUk%ZA9L^4q6VSN-Y($>KziHx3AyJP=A_ia@qdJ zCnj+ML4CSE0uEcsTnPQ;0t}A=<`2J5fA`$gy@==%_$}rXo+m13JlORxHt&iw#g;It zx2b-{kRfLTZkK;YMHZh)V9!Kyg5+YdeCyx!t1^djzwNZ(&F^qbSfcoHOvv|Kn2RDD zHd`|J$aP>EHzUN*Gn5;0X~YRRcIN>^jQB~~$av9>#*BBsrXmi*S#StQME`f=3Ib&d z(?GD0Bj?-W6`*jCG#S>fn}YEAXS3B#vKWZPA(VKd2h^u_yZdnU7Q2u2-GvX$1m@}= zk6c3|-Z!5y*BJ6SGF|hZXl3FGzV#*<11A!;0+~S$7l5Xi2Ae63YYi$PlX8{C zf;9IMm-c4Y3Gp=i?Mo)(s0J6a0xFK~_v=44BX=PKMADbazAYX*=d0Bwa|tL-4AcBa z`T4Ws?X5#Z4^o?+qFyDW#fd+qiO;^LK(pqBHH94#Rfqxw2qn;ENy2USNx%}a9z(|qUSnd z`1LcpOOD)ZtgmO`)cdBuEc0SVfv;Fb$HnRP&Qe7uP0{9M{K!9b_9ucWN-ZF6Iy-I^ zL-54YiH{boT!84BL7>)g<1+?GVUeN?$?ztcy%kjiO~2knoMVX%eKNUjzYEKsiUsWB z+7ILwIO0VMvi9mc-x{cS(QRRG z(t|scowoF7EsS1<$4WJFYkd@$fCsft!8 z7;L1>uSiHRFmlD7$MerH=q5z0sqfzSn^O*7jSJ$XB_R42e>5LBl7e=Cs5jH!{7%iB zW;1?FWReZS@i1IiEBB%>=s9xtF%|fU4&^`()QqxHk@G~VMA>n-JfhyhT=*vR29qge zJrSKTWjQ7X&K6kz_ny>VuT_BbKp`{M#$L}I*TJ~NBJ#MJN~J;;mERO)hne%_>njr1 z^IOz0>1E73&6wBe(NMr;>^>uAkFsH971>W4P9A>kFzlNV=ybOQ(rp2cbp#Fn5;gY6- z<_$ktS}~DGUrfyV2VISsp-!jqvZ|`JogGctSGGeJyS*j&zrlso@l1GA=~1 zQuyZ;U6A+kSPB14_t&rIA3D_k__pYa+H!Kd!Ucn96pOqvH6%xg4N^#x@Yk+n<%45? z+R)*d&*lr#E2wm^wHJM`Z5Ypc{@vFa7w-r472&le&*#{VUBA39s_;opK|g;dUW7!B zg2cgGp3^>M!3q^#vd(j3!(eeXs8%66fHVC1P!mAo75N`TLO!T8rCEZ|zCFwxIn z=MRBBOW$4X=C#kH9vX-uSWFoYO0l_7M3vx~)PeTkZaVKAv{q8!WinvF%e+4UlV(qt zAC~2!o0g=@LDrarjP@^jK%CRKUXNO#RHky1a2xSRP%Tq9t6n|07h5&gbMYB^F)do9csF5Knw{6m z!;@Sbh8BDx09o|508`MoDRQ@%uTaQupv})U1Ey~bv+Xa8U%@XGZeFc@)#2h9?pI?R zM{s{^#F(aIo{wH-1~3NP{B}xvs|PvPfG{_)fHAO8KirUW*HTQ6WmG5m6OXO8PIBB~ zu6O0BDkC%fLHlaD2RBX)L0MjUtP?M6!HOi_l=O1D4lt89d1sfzG3R=bU5j+IIJqPa zw^yIEe@Gaaa;scg%=;Z4k7XB;2^r~=NpqHBN<=$z-A}+KC)7JfM#Zo_&gMkh5XE$P z57vKp4Tx|F2lfFf)(P+Dy`+yzP44C6L13Mfc1Ozi9iOG)PW>5PwGX31*l)T-H4mejwVVfT(zBn@;6XoJb3>>@(l2j@BB{>BcVbo z%kwg$N`nW~1Ad#4d}?IruKMs+4_;;TtZ5ud+M>L7F?P^^Lqr77nK?0`d{p&!Hs|HM zqwF_I4Y)<>eHqGkh?ump;L_txrbJj8JQ{>7G0FnKZ9H}>=?e?#=I@B{N z2&p9(4+CHDHtN%Bk+kovDNKat{Ok+AIv~BL3NUn(GdVKxqBIoFci|R}eS+THlqd-b zX?QZTI0UPlXJ!(^uvr7U&hd&JyO%(u;_sx85I4dvU`==9g(GbMoK~r9b-2?|^`nuL zN<_U}_m4juq_r#bAYy%aU`Nwzc;Qt(??R&MNMiwYl18^!IW6NGnmt^8dzbSb5EwMb z-=co%E|mBt0XN@4{GthuA3w`idZ!@8>TN?sSW-5#C+O5?RaP(qHwz?v>vG=LH{83} z6`SUB-UUn3hr+^m$^q<=`0HXDYzSqzrH+Uaa`fgfyNpa_eJchEa)nH6h!zJFDp`-W2Xb|R6rGeZCq%VFG3SkDc5~Y@#oyD`Iv6|bvc;7E> zA*vOcFn1GrZifR!XvRLj>$V%EE=%`p?`T+pVIqEp^uqPYUl3Rbr!*b}ON&U<0|N4C z9Y3AHadqQALxC{6N@VOn%;sWCnNt^x;AnHpii6hfBXKHxz({hm9T{rf(;~#YkZ|Q$ zY=T0oe;gA?@;!I#fPo!>XhkmXmXlTUr|DXgQ zqk!za>#-yZcYUYM$9Dp2*({y7&EP)7(KcR*4ot@_)C@>>*qXNQ-WvMcxyD82O6@a{ni_qR5UQr(XubT%mvfH=dK{mTOYM zKiz2y#h)fN>^QuU9=~FG$43B)ZpwXt#nNZ|e!81REACJqmsv@LGc$7zoEHxHOov41 zfbB*bqF)BU)}NIK%Sd#QO0UguXparZ)8G}gk-}DZ5cFXb(nhR)L<7$ z+%L@1mpZyKHQG4``o_gcE6$_9jt1ymbDKMC4`OP`(*^}B3`X|qKc7#L_2oqQh^!?& z%q;V2epw~nUEn|O_Zn!5iR}q351{;$N~q(cWuyBoiWku)vMrzIAS2?Dp1MfGU)gw? z%yBaDRvNQjSf?|afH27)mevdh5yn=C%h-kWngxKGYO?k?`v7;VO|KD(d=sKt+A$im zsHB;UOl%Ad>OiXIHg%(51q?WGsltU>^kk140gnPZ!@Fda-=Q%X3}BJQvs-?024DXD0hPgAfyC-c>RX`hbWnIEa)I_FsM;|Ol5{7 zCHW6yV#+LQKv7GWdc~UeWmO@hFSbpQ)U<#_UdO7w{T5I^5*48oVKA)?0)$Ahx0d*% z3mn%s<2W#K>Z!Sqn<9)a;*v7_u2Tk>@#3b3xI8 z%EhL{!+yH&YCh!Qdf@JTJ`UUw3V?iSGiVMLCcjdq8dOL`cR2lhsA|>$?k8Wo#$ICo z=QK4`6}Z9+wAc*`rAK_#$XATC>usgv1OxDl)&^j;&p zjtrC#NXJy|(84w#;KH)f%15WbTXW=@90%oY2M)Uei@=*tc-KiB-QSaznQBhl8!InF zD%$&`I2FMiNBouPX~ktZoVjhC5+Cvib};kero==w6n*)?U!e+d-qT(+MER2f8&mjp z2#HDrGpoBoU@m)wptW5gH%lwiA3WZ=BnA% zsURPS<`5%42+J$tn^+?zMa?(uJw86-Ia$afb7*-UOBs6OF2C)7XK(Rp92-Ff(=0{r zmrB=CiN(|6X5kFX=?A_O#>$&<74%+yLxLkz7PX+k=cL}d!K+MJ`pPjFEX^ZuEQ|&V ziQ9HrT|^_AWV^<%R4-S!Rm}U?Xv|=$5J$6jHRk)e-=yf5-ZO2#?W^lhsuV-lw~Tn6 zskVLzSC4KTYm_1FiK)!&djhl8Mx!_@++uW{?OpWJXo^tn%$<|=k)&Z}@k6@G*pIMT zSeIJ4l0tUjz%k1SQl&Z8X8gz5v}@kLe@jJ;&!plmJ4_iZny_h-4Xe=~*cz~~Pme~) ze$%D0M>5%hb#hjKpXcq2E~b5HSFBdKRS`iG9yU#TXI=oMQTilj?Hj+WC=7}>P_ehe zeg$T4^6BR4;q6&VD!<1Bnt&gMPI7eINg>yz87Zze*rJ~kRu(m+cG+$lAm^K3qJJR( z1mc7_`5j*+d(EUw=9Gb_RGjjfMLab{97On(3{1cJt_Vd+M)`XhGin9(N&ReH*ot&E zzQl{^xKQcf$|MWSid)d%)QXkTPkx>4|GCNFBz+zaOx!D!wLO~cp4?)WbiT1cwb9)s z66;ozcd(0bkt2Q&z4j

WLW($?@^$MT z2mWs`qCE#BwXF>^8`UARf19Ge<~%v?PYTAwd}yptThZ<9bD%GC9L456g{UaU;*#_Y z>F85!qri3tlyB-wtA_~Ul12Dir3d-Sr9R+W^gY6d&?J-4_kGUAeL4B=jaRrPg!^4G zkUreB(9X*$EA)Gb(Y7q_L?5rAv8ueo$$B>}3)h+Ckh>C|rCALTc`K$OO&=}?3{?rG z>2JXXSlxDW1dA&4?m8aF1AVufPkxsrZF|{%qwBpgaqSgsI^Li2{v#WAe{spQrRr=o zcA+pQiW_8h@A=xp99PuLRY?+i0^;v^G!!xMbZq{(jHa}arg;g4F5{H&bCz@4DCV>` z6c^FX-=g{lK@#hA8=(HLF<1#PBBmk_yIMyks<@9cu`n+-f^bG@^O)gk?2^xQU*@cO z_#BQZ?}Gd9zC*#kI@z5#?_Sd$I6Vw8n84R9p~1uoN-*KX_LK`^8{N{mJaIK3X5Fl0 z%$gxBa-7Zim4_>q&Nt8X=krcW4^Cc!{v(Y4BKW^tgy?_qu;@6So^{nEb6sjF8Z^t% z9{*P$NYseix#WwFKyYsct)w~06>n7AiH!ID2QxeM1M2p ziMKV(W!X1-iIQ>J^~?1ly_A^Se6s|@^)vge+VJal{_fNiKn8mteXQkc`M!G$S6Ff8 zb`Dd7La31hYaJ#U!ZC3K=}>#gH9uv|BhbB63Z>H;Oo!+;mue%%&`rOx5 z1yh$nLN-UN`=%rc7d9OJ_Mv5tU0t-;Ruah436}H^tb(-RfH0OF zbB{n_lgM_esR4tb$$H6fh~25Ryk2N%8gR|oYG!yg17Vgu|nI82n;>72D`xPZ!H5Gyww+8-IjYt2~so%v3`-J&is;aW02( zQ3&wm)Qu0RyPtkG&rE%6FNXwti{PnPJc1At?4bJeq0@90ug#MEO_E!7p2XhEF2g=g zP1lmQr)SnYFk<>a?Zt{LE8+tcS*DW}d@69?rTpG1won&o|0Gg>y}EWNy&sXBKrMHF zvj-M>mg*ZN`j+`nTPN5wsI*$p{}%ASIf^)9s=4i=2P%>Nn|sHT28S^uOKrH{Wc}YQ zmV&8AV*3YU^T2*(p75Rwv*5H)yoBBGb|$QHlR)X815jzHdfMN~m{a4z!Ut(iIllZ` znC_Hc>Hk4E%3A%o)C|*z-%yY?wI;-GwA5tNl5mLa>pSKr=Q@fBP(Ijo2@{ZI2(t*@ zElSqrk^ImL7Bj#+vh2wM?&}Y@rdGo&0#335lzi`RUQa6_m={kaaX!N?Hq+Z3((h2} zKD3#?+8Uw_>1++6T`h+FKeith7o;Se>>f3Fs}_iEXu3dmk*1P`3gE;f6_vT(E~*?SGl zE;ao4utv=wZT=nH^`miPfZT1Zzjfn5#I9q<^?cng4Nm5x-rKr%g46R}s^bz`&L~aC zTl$z=2&OcxUIiN-WjaY6supTOeQCu&ICxoTTDwGoMr#hmAGtmQ{`yg?ymU@dBg>!2 zxX{V=Iw!{9C@#Wl9(xf8V`v?}V5(E@{ZX9lSwbQ6Psu{Q5*x|wjP2#x;N3-h71*j07Wd?PP>fG~HR1D7o2edE57B zuX8OJOp0 z9qH6Pw)`i>DAW&~1z%5<+A=;u^HVvUu#5{}%L9mMCO}z>HGkmg18X0m>j8ex^KO?; zl^$>}m=rCIB0X=g;8-<|_VAL9Ybl}QPaVmtBmoY|!z2;JTnc^?qDH*&8^TS0Gp{&` z?xV_oe5v=l)bxe$K{E?Se;Yyy>aDI^{^JuN#&}DT&ih$tz(*+L!0#yW#Kp3hB27KL z(|MTN=yhr&;88RR}+y3U;9U zJ~HpXlU8?L1lhE6aTmoGob)e>jF?#Ma{qMUcJU z$j;M^=qtG=J6l^dqAGN`I`oy+3E=ngu->Eex@|Lnvk`<*tJ`pyl-l7h^VaoH3_@sU5_EXb!wFu*@dgvnC(UC z*k<{|k})x{!J3}Hc)u@l9TDdxM**|L&$r=<_^5Q>KZaLuh%8Q;w!+bS>G?$;aShGx z!H|5yXFl$fG=HK}>CS{s6 zNE4Dd*DkMv%K-8pzDN}AsUFC_GCg@gi#_Al>aDS<(j~pMjq(q-*SADFuPMR{jT3J7>Y}WHOy3>m09Jw^EBlJJhr7d(hy`G!@2Tli*NTBsY<)Qjcy;H8@KTKT?@vce zen9#^6)Bx5ysvwSxy?B;A}0t8&^#<<+vWPi-+sFU-m?$FaTJ;qTE-@1aGZTDNb}`G zfP6>7R!fNyfIk+1%N)&2=vVlmgUS{4fasKLnZ$W8n`$lJ=;GmrYN{{Ni&JSkrlSCb z({oi&0T>WWYs-80G@lBX=SUv!C@#TX=Ls`YmABG^i;=@;$uFfRhv%{jDWw)i#!Z`! z9PhFtvuq0dV<#-L`RC$M*Y~vZjWjHlJ?Bx%W|}(x9fMXG;!OzsCJGPr-4UL8Df;D> zig-%3d|v$MDLVFoA8wL<=MEu3E^J_C>n2N%8%|;c?w~L5I|b}yLTz14--P~AZ-*i4 zsW@EC=Jwq9LP{3vec)i4$V=he?MsR^fjWpOCPvs?5dRxsFe|fo25tc7ZKO0^oSjgn z7`}rvEIQZbF==H2hSbD8MGjO>liFPDT1YKa?4#%>!oet z9Ze+kl4HHM^X16U$YV+lqY((M!xW41BKXVm72I8z-g=x~an_Rd?;js=vMr_QW4>tq z>#AWd%KTep+con#G**E7#9wPie?X7|DhzLExZLD&0YaF)t`#_28f}jA*l%~OK;oWg zx<)m46!lBF;Sfp;T6{E#TUMqfBH1FIPw2PnAyLSW~{+oy`XrWROZ~KScA=vQnlqKKLU@T!udqCBMj1>#I)Lm za(3)FS7b4&F+J~p{~+4dVIvj`Mi+}%4#(2CH8rzw$T~w+93}fe@T}H&8wVY$yj~WC z)HbkAZ3j=i6YDvFtmc$QU*eH4q?J)1`-`xaIj>$&nu??nw8hWl$PmH-c6bcFcRTN$ z*voimkBI@PbImxZ_56JxgTN&oIbp1({y2Vqp6W|>m8bdPbCt5vbogb%qmu061B~v( z;<(#ukKg{ay(byPEWTE3hFMTZ0-mPnxEwi1$1Cs=Mbu~{yc}7@#dgkY#MpVAN-c(VgSK4;vi|Pmdfe7Tz2xxnP)_=CTz}FmSuS5~B1v z{sJj;f!Fbt`Y-#5sKbttqq#$4g*g)5iyCCYZPv@By;LHM;=veHRrjf#lI<5!40V%F zPchzCG=7s4&C&mmebOb(Mt+re=`-<*4~~{>tvdjsiyJx&g*4#YA6`VqY7HQ!3~y;0 zvWND|eT|UTk9)(F-t0GB*)1-c-%WNig ztLw{<-QsuxQImdzm|9Ye;i@Fb3sWncZ1JKf4{S?u{&>vP>{cID%0`=|mYk&njH=2< zi^iWXd~1HT<9|&0(T%2xhKB4>RRo~h)DhB3I;;0sqtsVP z!O1{(8Bk;{1oM5MgV1Zk$A8~Mb5;C%Dh2L$oWVDXC}w~TTd&Q9nD9H=UEY0D&#JNF zsGOncJv@7-*JaKh(G*P%YZP(b5GRj)rOu35PhrA`1o1!U{fO9n zIk{GSrm&x91z`)=rH-Wcf0KW)5u4JpHvy%p^UH}$3_!OGE4S2rG)}IGp+ce;Thz3cv zm)K8|5ey$>><dQLl_b7{cKmIZFKIq=8qu$am=(#&(vgRy(tPOD zbC~)h;=+01c^^F({tZI5gA1VgC1?97ac!f7V|9+G@qX!6P{<(H`XIht>zV6B%QJ}K zOqSbiqVKVrmb}K3XJ_D=Yj!(ZA^HP3NNpsDc@Ea2?mR%uOp{u-y^3XW@lZjni=kGJiuWB+9R+8P zo(5p1Tyjd@L@1E^p1%a(L7iJZbgds$vjyMltP~G_lggwQvo^{au~E3kQXWK&7(q;` zb5!F<-{}TYLFrF5|R zE({|4;+FfRz6!l)CL%$HCSOqnsv~JFQ?1JVlMJzVeBbPqd4#7c1N=GPhsG8oxYhpud?$)yCF5X$cpDnqP zuUiz>-%c<(aL`N0L+C#_RKNiw=49DoZnluS#;VK&63`;Ec(N5fvd*yh9@TdaPQDI& zShfoYWBgAiqfo8*AnN*UGYqr)kcFjwMGVqt5uZAwWTsKkqM$4U6ow-zX`)d9o$G%Y z-V(pvRpNA9cH;@I^W{EUQq=zlHCDF{&X?Gt=Am}Zfc)Wty9_|l2qB)$bEiS_9Yr(j z-+aMDjYLO@un{RPD9v=Ln)Rflb04Zu_r`VYi z#GA_KtP6qgNL;wJ!{d}DB>v%>YWG77;Or~S>aw@b$?_IrOXtD*=~XmDo?5bm588*oSOG z!(+`QhW;22NB(+Ex~Joh?4dDJ7Gg}oWGy3%dW)|KIqk%~H*j11Y~X#>?|s^CFCM_X zytvfzmoa?I0+juo9SS!GqpWeTVTOc6;#?&L|kD z(3_x+kYbSkt=5jt-G~*f#K41&!~Lxiy#IF`-M>&L00_$uXHuq0_|;q` z1og|9cUJ&jk)~5PJ*}wMGQ1~!A~H-tkPho!Zde9};$ntn*1j>CMbg(Ft{kRYdBsq} z1l|r{JzPzPaMdtOT7F8BaoRqB6VlFv2h0fX1XIi_iDa1p=t)BEFvZlIWuL;7YZb^3 zWVU*!O>>5$119y}De2`r)#=dc6ZkSe@V_oIpZ1#(sD0|^pv`i@@d|@2grPL!QpTek z%|~xamUW}6(qt08I<%72N07jNl|4eBGt}Q6%Jjq4jN^E{De;3?3Y~xtW?|+FYRHnX z;NtWI)amV23w}Gt(v7-03>IQ{VQeW?(o2%KnOIWNh zl9!4k)D`eRATp~BrA2HwLsa06G{r4pFI*yihc({k66a1v1FG0TITm*^jw*L_sM}X*^bHm%NQq|=n1Gf%w zf(BQJ44n&th-9Ff`xPfy0bgH$Dp1D7nWR$($5UT7?;G4&`GEe7Yl`SSCa>?_v!7qD z(8sqvBv38OE-Z`2uw!(E)MVD=q5*QY1<4P>OC;3+ZUoDqQr3U-y&{<(ez!+p=0a(p z0gZ2WNU^Of;8>!!W9a@={-?*rhgY^R`_7=Mw%`FT&JRB`riyji%yvAqB$l_b|0S`= z;MQ}oOOY4eEk$9Q^eK=?BRFqG2`Og*{CPe;SQ)%NFE~7>h{$loitUdbVrpn3C%olb zfGl=Zl(CK#SVv0G#U$g4i*wbVPr1n5aI^^f?W&an=K>TEvVL|q^zd*f{{nVOs%9G` z!ckUmpdAYC2nVbd!c7tTHJ%Uf_pUp@9Y=iPSzq{-X0#FthKNVwWr*nR<>AIFS938F zJw>BsnWo4h8ME|QzQ<4^`tq{4V2vbH`k!tZh%e|3CGr!^?0_bp-t`<>p=2a#(Dh{q z!oMgOidvTN&-w9_N|1SsFku21?B zUW&&XnzVkO!X4K=HnFWR==^&vZZs5`Na$&}?a~iQ{b~Q13~(I2Mx+QtC99y8InFQ^ zM8ZrY!Gz_uJ6*WC;HMoLfmLYEJK9r9j0}*ahzYCd{nX%+l#w!)A3)WD0|}fT1Ep1#(BdYyl&e&x)&QkOB?FV)Wy$+CClpTrtOZ! zg*wVn3V+%)er;|;oleBCE-#`YKPW@`Jx9Z6t<;Ih`gRqEm-_JW!RK|^#y9aJhkmdtP+ z^+NH^nsAGMLiGbf9nCAL*4daiBYg*0(ygj^Q+HrugQc?FHY+S1w+@5rHH7Mv4 z1{Avd+96Nw;;z#ASCN!Jmo#D-F4%^;O|pJt>JOX zQU&f;Mf*z~+huPmnFhn*QPc2BL$oCJY2QSbV!`Sgb%dc@2oBp?S}EcK)L+2f&MT~P zk|;-M64C4tgdmgBrC)qREzw_VqCvKO*V3Cj0vjVcxlh!Z3(H@Eu=wjRyFQbt;@iFj zd^F$h=h$i>)lzY^U)^S&=R`L{uAdu=v9{h2A4M%tji8B^Zjw9d&#F&0CzfjQNNsXR zQYpKYjy<2@mVw^NOnW{OY(zQ5`n?`tU8T)h;uvxDtk-wR@tW501?l@JrG`h%>q5m` z0GcNmy+LBP2nI$rGDF!#;Rst}GJY(L8$RA)vi#>p`wWJIk-1z`L`Bom7| z4);u;)&MnsQetwUvCfXO_O=W1i|xy`m(qF7X<)K<&P))ISYu8c-Wc*6-?oCIrp7=~ zKyiw^E$^dv;c}K?jKgEpgN=n)*3Jl#Xs#vX0Xpywi~6`vYV%d)ri#%_t?%n7?odPyUC z&+~d^*#7dng`bUL@%`s(80V|DWz9)_XRpJHGJz{(U?vy~&bnA^o+~6;W2|(sK;s&1u93l6CJp3`hKO0-ez5^ad^}oC7 z8`p-RUbDK*TZW>e>zVfSDYJ@cloJ8WEqfAKLBtGfc30>uapKW*KPfh`7gPU(O6&3-N`QD`9B0mx@wqx5ccL~z?7IV0b3 z-FDr1q(!WYJ*Py6Y4EKfq!fie9x9_qmX%#4A)9Mgvw2dSk$>Bz2y;Ol1AOeg$o&uTuH25lqVQwu4z2 zUH&-#@r%&OKjAJHRE3Wcifa2zj{sl99<~cJf*Q%#rwfg7hfRbkir&Z@yv-bJD@&lG zUsa$dZ;lazLZ1Fkxln3O?SBOVW*ZKCuhE!|s#P_AM^6ze92{E2)K!>Lg6#bct_qnV z62{|v5jE^nR?|fZZnEsQuG(R}2PEiL-SHu3hvAcCB-i~34tWQ}9`26=9tM{RaR)1g z*Lpek!jq=1qptU>&0*cw+@@ek9AdVvV{tQgcnc-*&yrb#AgpD^6FQl^?>x5N91KU? zlD|3;=40GbOd)GtQ_4;&G9nX58`|nLf$oCC&OJiW`E-s~(2jq~4IOi3hK+iiB*kh8{^NVyO_^S|0gky}SMw z`8WK;MokFx(X?`#=HnA>WC3XktjV_C<`%8~&5h*ir;n!FVP zWej2K&nPkN?YS)2Z%B2hk8NU~C2O-3{ZiT%N_5DvHN%=J@<@{(H>ps!mLCdb&V|ms zT3@H*_Mav_<*bKs=jF_eweb4n050LTPLo+ziMCJjzYlm1Jl{AT*r++-2@w-iE`j#{ zRv4$o;Y;M$V*?mFp<(vI5KO7X>qmRjsg0p2aR+}Qb?W8^bhN=}WNikxg)P}+@ND^Y zB^nJTMOjm;svX>Mtm%qgDuaOw6Nh_0n)~{wL3c;%#eP=+!brFE$i%P6t7T&|2LK|H zv8z{k%}|rC@R6TDLz`fCb1{CQtUcNKg-8Ow#`xITJ}`QPw=cNAOG~ zy-Cp*!FJ1iGF7GBX_u^SQ;!K;KET8Jsy&eHGOk)G7M$aR9-tkZ>Ni=t)a*g?z98k- z?8a^q^#cC~?h;!9JlwnV*3h{-U+Y%ef`=ye&I1ajlF~e%}$edEH89FK#HXkWXmg4u2(QFb9^{6wo`&L;hh+1i?%3~Ma z&OcK4m6RWb{=h#l$gyboJo$-t6j_7@FMrH9Ye&T2d|nspUrR@rEXAg}jm_?`xm(xJ7u1KPxIP`V zWA`tD*^^_fxZ$)00ANbYBfAFMULSuTspHv(k3Ux16USjLSnwd%i?FtfJFqXK1GwLA z$==8%<`SJGVq)sW02k)Xy==;$Mvn)`ypS^u&R!nW#9~p@s!*t4G|7s9l?la>i22S# zRjWArL_K>Y%qxUGia7M>n@dP--SDFWPobgv6`v|H79o6o&rFqxMvU6K_zmrmbtl~E zG=gtb(z+|kCv?!z7p2m7FUtKU_l@9pQ03Ug_p2^RSHI2x!Uv4~&XbTa$eiEyJgF9n ze+entu>jtE#X|>f=8#1ki$;Rl0m#K*s?u1SpnOOy02}!qayeiQUzeNz$9WalCaj_{ z05aXB-rcx~{L0wm^W`+k_?Q8uFE&>Q!MkQOR!A^l&=0t{nT|BQV zi3of*_zn6y_}jynok>soJR$Gk#-AZs9gj*V0R{M}zoIFt*X7q`j|I|) zmLApMc5{WYgi_4uvGo$(`kU~tv~r+g1qbhX_!>Yx4~5vJ1F&nFnX@q9-Kz7+$rix9 zt;Osv80KOtWpmy5Ha`U64nwK16j_wPL7IjY%JWKL;!h`5RN|8e7V`Z9*(vF+B8gypyGWH@PV4HkVjyHV^q@c1-9M(j6p zU$yR8;}^x&Lj%GKRLUGIvfxowSm20fN2LobmKKNp;;{9BaYkrvEx29nj;lQs(^ndj zZ3pt{$XaY-Rz$1ALNYbC5*bqHS!L*i^R8q6<*Ul%MfF!Z z@4WO(23(O5s7*;0gi=o3KbkqeV@d)&d$|rCc8!lFTHR+GJJZfm4f?)95Bn%-Y=J&u$fC ziL0Y4ehvi&$UYDld*rg3%&Bs%Mw-4gpkc-BK8xFp(f(PbEe%`Qs^g2JQi0?VM6T6J z#C||>iPH?h3jvZ}-uz*&dc6karc3ngU*@|T<&CZKwK^9wHYl?0$$S6*!*S4S|e+ln2eF$;U>yr?1RYUgOXnpWEl z^6U!uH*~f@zS-r1#(IubrM1QTS@d4>cKLzq;SF+I3FQ`j3AC(I?cQc|v|@8~hvym% zS1glaWHt|jr)c0QOifb*GY5WGEyWz5(aWqm6b8X6)c!LYy;E7JHpC>c=M?Vi57w45 zvM)N-t|%LpkL1^MKcJF!9rLnC>@h|HzPvAuLlbts(%5?*KTJQxepA8JjF1Y&2T@HT zdno37YHL*e%nxUdjxUwxDDej~CeSQa{s*kyl-ReV5hkw>Clcct9oyS>12p{3OMEAb zXGVAqdFmp!5F7U$8{e=`_RYy!Brm#p^IGD!NoY6;zkYw5eWg(-2>$8Y0_O0!nbkR- zTRy=znWd|_?04S2SSvajV-Sx0_6o7DYG2qPG$N)>uYG7HP2SVdDl^ug2J&IRn91?V z4(h{Z$krBUii68o3J7Q5yMJLH2A7vlwOC{#TTblS-sG%KOtp}}C@`21TQ;$#5@^|X z_0Ey6IHSsMOdi>AQ@BgcFcC*lpv;lV&oq$~ol;u;_S^qF+2 z$osDd1@UnV#bSG~eWV4dw*Fz`_@`+nRn?r6P;7^j9P{%wkFKpuO{1a)*wWp{bap{j zaK|He#=!|!t#Jw!Co_$U^1E3~1%;G~8Apk!rz2qoq;p-R5c0$%PgpCT?@B1j$NhOx2K7l@P zZzdpJe_1WKqh=G}#)inA@!Mm!&C$5CmoDUT#!@8bK=kIy+tWCG;qBVy>A(#t{O{V# zDEhROuum7mV?`?5O?H+;Aof-5hh7ZJ6i^8>H771_oh(d&sGvHuP{7e%5T?=RkQB0l zptL#7;Y&67Rs)`Az9ku@OS26ohj$=$$+~S+8 z+Liz?7bp8b44I51c5$1$^*V62?W?KhSr7d8f@k{WTwv+F+Cp|Vz1q|z9;ux~c0)}% zi~>nF*#SU3A9fgkEFaGS~`Zi#b{*J6A#fk@Q)p|}mI$r%}!%aLW z|EHwnOyul42{!KPb+nHjy+!x*b{7p{2vlB8Pm-HlbdsA@0nBQZov9c@Q2)l7QQ^*Dfj3?fbFcpBIY3B~dNYipS(6j$9_!AbrUNO7r zG`}J%@rVn6)FH@J8O-a1xziVB(QTHK%1=WHRE?F4jVTV4PSK9Q&V{Y8&b*K#9#GLk|SjiI`$-dAs)}t3k1DMUy z@Q5>P!?a|&7h~|L-rLxei#hB#a*r70eLLDzK3Mfk$kR;Ud4u@*3)-fqu>TPVHZeg$ zTK1pW+~*9!qdpv|f$x4Z2NB#jQrrg;r(6i&&2~#E?)!fU<`|_;&XQ>ezNdQ}n++@H z+dK;jSSi+5VxLa{LgDe*UKFhx*A~*LS(9tOFg*Pu%`V@xJRe({tv=E1xdio+%!)Ahx}<2mq<6uXV`Vm z-Y~wl`+9rY;GF~MXG=r5ho6JhLd?VMU%0-(PJD`5&f3gY_8Q^|b4o!FPyXvxAr&>0 zt=7i7%Rv*qrE2K)+*&EIMNT!@q(+`wN!G}&aSGx#lA>`DFpUZ;5xvGWxGyIHL-tZ{ zLToAz4ijj2*WfBBg8#uCj=8?M{$IoRjoA8DM25Gc5R4tCT9T4-VoVaD3 z#cAeQo|jCEUC$mc_b1S&jSHIRp)mV~-On79o}pI78r#YYlvUW`;kF%r%CQs*shtD{ zxph1|236{;idumHSh%*{_cw=cGfK7g1a*Jkx3lSF_8TgwHmBU%8*n8T^b2_=BXc=) zBW^`&PsYkK`Ukr^zoT*kLICRdRfSr8sm{YAB6vLYA`QVt8p41F{Xsic0YvUsd+ z#1nb@Hu#3F~)S)n6?j z^W91P^LOsJ!(us-$kdc0fC3(Pw*iIkA2sax6P0G*wXYqa%p0Xz<60z^F9<>5ql2B0 z9yp(XO&#}AJX$)zlw52jYkKEYd{k#y`3o zK*X{x50 zo;_sq+v9D5x$ld>dRObl6Gi!3SimO~kw*N-*f8C4&=E=A;#dHEefBcam(;IqjqI=& zxXtX;x*Wm8XKts|N7IQMqTZKL0++gR?#u_md)7J7m(GD^snk}oieRyGp_}xnac<4u zLyo1|7<^o{n9wV8n-&2pt=GhaUQPb04`b(iCnjZCn{8E^ER9Hm`Sn8$5{oK8iw{2o zc&CMS>YzZKl3{fDN0@OVsug;v8#&VFq#T>`hpU%#_34Z=;eB{BqHs=f>F0W3VK`W^W zzw5Dik`!V-sy>TW|7^$eW@O@_Qw~fzJqNPqJm_^S@noWfQLnlJubV%~yr-(qNaK>L zmw=vHZ9t4MEM-5MUVFx=hf%slMJ%WwpE^89^-AWP`uu4OEE+^53lqEhb*a7iR(gFVL2!d3hAmwi0g zjgUmD+L`eSh%Hj9ypapIT?$?WA?r0-CKqa8Y7@mY9EQ%(7=A*A377Dj*l{e14!~Hy zI`4d9fLy^r9bTP&x-7jeTK2})UV6m3ftq>fx|hw-`)9#9tDrWby32b=wmC``GkY?^ zwN-H*%Rr%3w6Cl!L?=lvT8~)ja$7m(wqK=uMuX~xB6p9h>t;B?;bA&bIEYF>ZrnNn z{H{ISMDMQt4*<46Nxyvmt6#>3Q@3)>KYxT+4j@KY-4AOAaT`xXGc5I-7@Dd9GLIw& z1#wAq$ao#h@hnz>#X3+U zNE#>U0Euc^3}UtUAd_yEu{e;BOuW69h6ES{RD?pgDWJIeuUB6P(Q1-1zM_$yw>CqDm0ZfMDY4;Zu#z&9vl^A^r|)1R^L zipw~%636X*0TTJG5u#Cx@8@_-q!JN+=`LrrR*z>EnwI5{2lY$UK@mR=psZ_2w7p zK9FdJrFJg$IYd;r{Ow)oNf^}GA)*@GkIhFI?vF3$aGS3HAD{fm{ zw5aAXIRR-_f>9F^nJ`J#3U=O5 zaLEC1GP$O>WZXvgjspM|o!|NKf8)cy`^U_jatfDTd>->oSXSdV^r*1CqMo9d+mC)IB~QNz&9ZBhn>rLZ}}4rY}wB0YAVK!Bh<7}BKi50 zy6ZY4)`c>HI36+QdH8&XQ@WP!zhf1yJ5ceNy_JckO9o!ZGNn+cL+V40YNL2#Bo|MJ zkvu{@s*pOM(F|;sWl&aV8If{Cxc2X?Oz;&DL*TrtpTKG7ZfB*0QGm5kU{D5xoT%oE zjN6@>h0+Vr7aUw%<+i&I;QKwc^m95i415Kx5mmD2@aioMnIb3D-ZPq7yc$-wr1~t- zbylOjL*WYthg^5}8ml#^>)^bR4udALVbpOMqKt~~I=VA1v95(~=J?g0{hy46$Cpo) zjb~7AxsrOJqCB$3@W?8N!F3(63$?&3k&>F+K^~J%Kla{p0Y5ek_8Gmn z@ogi{)ZY{#7ooCKLB!e?D`&m_0ZVUtf#@C1c{WIg!_M)6LB;;H$l*{SJ z4o$uX_X4!8x#90V$jAQTZ`l2qD>!GzHiE!F%s1;hVN>SFof2c96qBs>DGLz1U86=$ zSrpWHCTbFlBW`f_o0dY6CRjB{j-+%i^Pabn(VNDfFdFQ7q8WiCmM zsvUN$3plwSu&x1nzUkVCiM(iC1c<{YCoPJm44Nq%suLERaG)CD@kHmSfR}vJ6F7bQ z4*uTDM`NX~fhnH)}I^goOUvQPfg-D$il3cAOUZ)`_8Uwx##0Z(o za3SJCpr|8dh}#F69*8O3AB`PP%9Qw^_y}Hw+{4Jh zZ3>@Xs`$d4N4fFNB}%$3JLFz15IVaR#EMq}Hd{d4q=iru znQbasZSHl~+k)$Fw~2(^%`jY;+UVq=Tw3N6fBKhv`h6ebqNhHQZF4gqu&9xU((#`8 z+|zfK_OGItrUen_UCLlgvQ)m|1YjK*&G4UwdC%k{62&8UN_8w%fx{XInZqFzSP8I> zky_O{Ett}}NT(qt2SFQtjJP!Cf$<3F#<_Ci^J@-6?i_XFc)Nzs*pv_4xJ^)zRHw*T zRAKLE1m4k09X(II>_WchdEde7-}I+e_Gx(m_Y2xCfNvnoJ?}f2|GA&zuA`7wfQwY# z>e@xL{9CDD)M;BW3I|YA2Iaz8&&8V^J3Sl*Zd?d_{BQ-s^yTv&BqIliHC3fBfNls> z0=WvqIvA@vs&yIE0m?w84y_~A;wp>+nYX`L8IcV~#P_|1YR~c0eskNl^Sht;I}RLG zvMA&Z_U#+8T7h>i#ith2G>Ybk$H#!DBt~UvFyiRJWqPsX%pE;0Ib$p5?(DNYEus~Z z2*;wF#E%f`?`{ees~E%ouA?J9zi)xX5p-gKh^L$9baQYnBaqqb_{?Zl&IwuvqDHzi z8U1-jD4>gSb|Jj(fBY_g`TK7??zs@j$uE5Y)mcwBVVo2_suKz_wD#07qFIF59M{iq zr_C{c_D1HmXVlKd2V1M5rD$_(a8DSox9RKel+63)SE49Rv6{0J%CaVR#)rs(p%)5g znSrL2uqKe<8HrFi*f*-!QV+RWYM#7fhRbFOE&zJK7l$L>^^Sk$H9!CJR0DGi#pm{J zc3e7Up!t-+38^XF-u6TXPm@BO!&>>8TDK%)tSPsy3pXT|+sR+2xV7Z2)WniGmNG48733O80Jdh|ViG|6Sjh+`e*NHx`i-()5d?R+RTuqHFqRUo2*i4 zL{uo7_Cn{VLS$xb$P@ZK9^r-UQMtqk*B@TwqAQ*Hh^Z;R!Ubg}c!q!V;a2y_vy{CpHt{ zHGlU$iZi!S4J`G3=*WnJ3njiN@X=&R6L$(w@?-=;>Sl;lMXiyA#U+-GEaL0P?wuPs zb7#iR4LLjK=je6|x?+YalT^=)uu>`qh9wJw5qB>I?phd74Mup)n46tL^PEn%gQq5o zHIfySZv(Iu?4&Qm{Q&>B?aGbLM(%nMu>A%DpJ%rlq zSJTodIZpaiQHeUjd*fYbj4<0}Zs!bsM~7_pM!H+g_?HpZ0(8cT26n3y(X{=^tZqFJ zOlCQ%4fYW=?SHj0l5b@FoZ{7-G3SNaMO=_E`#p>16dZ{qn^7*z;mKX$%K0AWIM@i> zG*o`=H(t+Q{ps5%S4>?{oF5a@*!l1Y!#$c+bIt|{sYX*BOcAVBfmqw)-dpXyk?Yl{ zuyCEl)hDRH_|-t1;}I|WUY`Enev(-Sdx7P$rk8m-&N44+wGE0|!!gQ?Y+0!BPa=v} zi~PYzr@r~f>9OFzX~d^%7sXR6NFZ?uN;Vgc(~6uK8cj=Ok}fgFqN)9>TB~PM7;4I0 zjpARON{Ee^k(GlqDGJ(lA)U^VXd)V0mW>DlXGg5XLo_A9d>D8{E<7=JoI>DXc$QWx z&bj<*hQs5FVzoHAXdQqDIK*e1`fI<%>Jy&8!T@p&RAP&6Axb&!>lSPrTk~fCsW@)~ zTsYfdz9S5Q>yHFJvv(D19Pg5+)wKfh#?_CJDh9GTS}R#?`T$;mGC-|lqKs{X` z5K7LN>2T|Hckol+{2eEB?Xzu{(7XC4S*`&MW6~Q=5XpcXFG7t)T;4ls=OG3pl*Y85 z@3Udo7B-yKMRw#cCxl=vb$L`mFfkz~;M0tN3Zd(*WH1U=W)f%7Xb6#^cl0#iz0fqE z1)ulIUdkPs<=%p2kINC}66vZzq2QaGWLa_@mgDN@OlaY=9F?fvK4WH_uF1)v`5Cq!bX z>17$)JZ#TAbIFK1RiXr)2!#f{d36ivvhq2x9sH zqYzB@!8z)vRH{U8Wjo56NBNA$XO3;aS++|qf5g)`aOn86Fbp3FeJ-3^LKua z!PSrE@G9g1`@%XvlrYxpmy{cIsW?}Ij<|eV&L#65Hs->ik&;NMGXQ zxqD^ItaCxA>*jC!*HNVk#-xv8vaP6*%|eWHP^gs{qNVhzfR9Gn8f7`$^br^?ANOXRoOH~t%Mc(ap=@earjH>o5 z4kBx%S}KxtDY`Sv&doD7Gl#egbs1zCE_3)Sr;N&|s!`_< z?+I0jQ-KhP-mwyD{Dy*jCK16LY|kA3{d<3aj1Cj&aOnFWm zO<_dEk)@yB7evXCH5e_7SlYA5(*9Md!%8>Ln4kC5-cgI-M2+K-8GpQ+d_J&2kR!7> zR4ADUITIDhIwhPejPTA{vs=VO3TKVjtPr%aM@<2Zxa5edva5h&0OjeymzYjG&rE_oi$BGRY!HQWR_qlmqE( zh0$Rqg`krItt#&L_$RsM|GbrK%MLEPvG&OlSgHam!H9^?S=Pj-b76%Z3+bmneI$;JeiX)> zkS4}xq>Ss7JwUCEHmXe!RVO%&mKBIGtyQLWSZf%!E5^)m$FO2eg|5J=3iAR#{zEV2 z?SJ{V99>*GY1gC0S4QgqJV4>{oKxTMd+fjR5iBi~^i>!+8SBc=B+^b0N1>^vD-lA; z#$35_ONX=PGN_eXmjfTZd%(gVGeP=%DpSz>qUj5-P+|T3E=219z#_Ubr4tK;N`y`T zAB}Lcl0 zFKVgwN|g~1eNLI`qg7~B&ph52sQ2VqMz<*Fc00`Wd(8EFY?zs4w%;f37R=7gGBY)Mj((`mT7N*Z1l?^@5oj2Xh zyME($PU!keG0Xhp{yzq_B*SIVuK9VAq;mauHUVmKz$6}jbVe+5&YA8($l!2Ay?2?l z{X<3vR#>caHq7O8dNvQBQl)T?s5V=m28%M)YW*W1mozOdGqRZ`{edhhAzDXNFa^G06$on1BF<(;h`o8DCN4z!XZ`q$HTtb+ zHcpadz->5y&du)v+33be>v-g;;SkR?KA` z2C581_>u4acK+pKpJC6Pcb&BB(BhuaIsgwaxGkG`*#G@=_HW$E%9=eEQF1k^j(18W zSS>i&@j@Iy0+-A<&YSNsQ^5W;_{6~xhpQa&0=y?nbx=#+n)49TVmxO2Mx_%1WmNFC zP#Sw!jTDwhUYYHRjzSic(O{KIp&Kpr?(1k#TAikqYfX2{>C}6^h^`%<02f9h7M22! z{N}4zT}wktoPf(&SqlU*z^0f*nzlqsH^wxsEIL~@>trq!D|bX^s()k|WX{G1XAa4{ zsRnw+>Ss8|C{~DbR8@s!9+!C=`5FUNUE{q&BZMk2B#_PJr?i^E* ztCD#~m1W!=Be$%MxP4Ty&*hAA;mpkQ@@GDkZ++JZ zk7f6cP2hwjtfe&SqJ+UmHN%^KyAEwXHXlhv1aKZ(slS2V1 zQvk^^uxk7RF;Ywa#Sp9{5d~L6RaX#=JXD7oRi&;4Svi0nxcP+6 z#~q*kG>`e7mou{|qa2xm>vV*&gyEpZWkPUD&;XT1sS-gYEsow3O)M3>qtb|KB!ob4 zn);d5GAJoR2_cSk+7*X*sAHgxMut;SGaw;36{^%R;3Pvus5*g7r}V*el<0PZ9`MW0 zcs^lteBmDe$!5q-dnEPjd8p5f0GF~hM*ad3PfVNuaaIEkShqnV*V@dacaEe{;Bc8B z-ma?`!Mhx-J>_CxZO$az(^C3$n!4_s&!288@io^DB{sHDxG5T?ps)3SRkL-^!Dp|9qBKSGoSS zThO~cjT;<+l>@|??TELuvuZNvG={U8l&iU5woeHxrmthJfm!dF>v|&%#zA+wnqo+$@1Wsg*)j4=rf zS!YQ|DT+GnqX{&EG}9Zw4cMMDT+~E;lT&XSU$5Ut; z`U2zfW**Jx&!`EwRHF1%AqkdrNaJJ$GNO$Oj#eae0xm|fXp2iXL~1ogdDMuOfvDD6 zM^scJuC9?9V)V>Z1M2(3wPVyD7r0 zBe<=sSr+K$j*I3xeCN}y;;P48#{1s)AqLBfX7D$hM&TmSMYCL~PCho^DFZP*p4Su~ z8znn$Sv68QXJ$LDyA`_I(wu>l>j&H@v>{n$Z!feXwX-fAu z!?Li19&2i>fILO^mNhYFZtP$PgqSibnv@7#Y8i}itiORmnXb9bFR+6wA}mBjG*Eb9 z1;_KB`4sltv6t(=c>PH||1Auy1MmPqarsr8^7=QiFL;K7NMD3C=f)BGbi&k&SRFrB z%;d^dn><@L2uFwT>3t=8%Ro^S1a%aN)aO&$zt9wkr8?peGyZwfBex+qia29)cMxJa zw8IvmjyPgeDv1y+60V~y4n6P4f@zC871Il-YI5(X>i|`tnA?t9JxG1{_7gfcs|WWp zSn2Snr#^z!%2JFjgJL#gSSf>*5ng&ASt`(9Jyq4|k3Nw+|wFgJZ3LjuWofzJ*u(;7izi z=GlDW6Q8E6m2mgxVCfLLbO1JPw@z3j8TJ&`HjcoA1)H432GJqa_-}==Ct`fnRu45?f}(DH}`DHJcUGpmaJ8Q=yD{T zM|F%PpHb5jn4C))ms-YzzjSp%W?7o0T3Ly~2zufu#72NLEq@}GacEi-)3TEGm7Gv9 z5~&K*={{_lR_bWFhT^QRz`11jgW4>L#xcxt4!g+SFe1;oc!a|WKlF@m;~btxU84~lF>+81)EZ3@E+rBpN*y8{=kRs3 zQUMleWohV8<}!3p(eW9+j>J((tr0?m5QwqH)oaAXeJ3Uj4BYmqkFs&wC0umXZdNN8 z26F^9&gO`ER+ocCInL6|Ax1KsrG(S5oj7Zi%4{@k)R4%KY1FTCIG+Z~I2)VmQo27x zOO;l2K%=9oBH}$h^9-sH&O0`4?y_Zv=>voSo8}z#!IIy6_IEOEY)%Lo(Q=h~Wgqpz zt;9n&Qr-Gt$~_-P_kD(Lb(k&N&qT8xl`oR&qY$eI;>kqEf-Qv@yN2vFWxyI8 z`G~cc77A1~vFMP~p!c&c(V6M1VZ!fb)D|5I8i5j51{{cl3cLUw95L|G}4V_8B|* z#}9pkdbkD}>1^C>e2tWr7S~MNv(N32i2-X0>?wnxNhVoShQN8tEYaJF^tR#pTcNWN zhDXvF{Q8}NrM>(3=$qcmji0)f^Pl(_&Y78IAfBOf9 zHc3J&UyU0PD_NHbr+C7yLZZ2R9t5`NFcv zZIUH%_Gn+iW7nkuDAlqERU#KOZG|x=qG4(`^Q1mxg<2djNq+ezwOoZUH=w7sSY%Nd zjB1ML7=}O`jchMfjSDsnEjXxWwv(+MLXYe@p^gB6&wk)N?7ra9oO$t1mPZrelDRog zRyY<{R*j>OWjOUo3qL1lwA80qXLVhbL=E7?hWG^I1gI$DGlD9ky23lps2mZzIXD_4 zl6is}ry#_dS}XF7XMR(M&D%Wcj5xPp*5L>6(^o!$SbyaKNGAl1XuXE69YPoHpx*mA z)^7d~_0ikO>IFJ_lw$KKNo>YbMaZlkKtmezPN-=#<}EW2h$*^D|A_-hlL9jk7f^|0 zF0)}S%O$IOR#@7%jIIqSI(_`62 zeb$KaP-?LWh6p$>baKx@bzHMDWM5ekI|b@sEAZ%Zck{wmzl>q+xc1uXDOVxJKsM7S z7%_lJ%qodTkw)Ayt_YZ6bjBTM>h3D3$9~*xkP7yl%}94EjE;Pn3-{Gy@%B6T$Q$3x zwIBU7XI%X_&Y0~pBaYSdy(`4ADf4X3Z7Soaj%cl9F{D<$G!9xOZl_?`D`-A*^PH6n zqH~lY1m_rPV6`kMgWXGoGv}@%)$r3f(&XHw16XH3;?&gXgholKoefthIR;%Pj8w;B zLb>saQMF*B=R9l9b9Y%WD;eq{1B6#U|2cf*6Q5@99c_!?$wKP@+)v2PJfAb)@JHOP z4ys6%DP4q-*wnUBWlM-?Mov38(339V#_)T1#)^EmpdR54PBuvS-=pC8q@x*}7EAtH4R zE;FrtRbpM!Ipti!U7tmVCocN&@xOT&XI}C|&boLTE2H#LBg|}Y%*EGLU1*; zcWglTp2uIwb6)jgW;f1qd5dO>2E^^CwCn2(w%$w+RoIXTn+iwfggPj- zb>uSgGQRFY%J8J_fF{GDq~xL|a$!^}1*t>HSdOp~Dk^U(lTC}DpKymKg-J0Uj>!Y- zIKz~dAx1>1lpWB-Sr}?WRB_HxqQ(dCNk<^sy_bpS*1D#XWeCdM2+#YjXYsLr{0K+( zwV8mEfmRCmetUx|Bt(zuvQ0<0GT7lQH|%&8WC^}NV7ND)pwk}O^^V$9**o! zQ`kxmGtq|E;LPw>sWw}xdG3WkuFA9!Z=UjlRfy!O=`}{uBG#-)%I#G(J@HhkWTV>V z1(Y>lHUFxr?7%S+SBJ<-g%1Ka>cwT#nhzEYFe-6%AXXLG=iWtq-9H?6Oz^RI>F>Oj zE57}S99V8zinPgN1ym$za zEX#12XRx+P<~_O3h(U2aQqM$YHfC&`%hRx>$S6d1&SXe{|N5A3V{PBvU)MeIwLu`C zA=|W@&dv*P+b*O${c(iKQ%40VT+<^KAxb`pY7;xFqm?N`rqQh=V`l-RwKkez4wi$~ z5yd8D^J#NzK4*^Zt}bg1hLvT6#04PYyf;fuDi;9hO(bI+eji)AMG?IdnBoQDN?fX+pOeI1`WCWVA!9o=y(c63yr!Z7S6AwUD)7%kQ0gB}C z8Nb#_onC|ml+9JmC3(h~PB;rV7kKgGpUwU|?mVgI_@Rh418^V1ZQI4!zx$`$n0KgZ zp{F1&n&y2|_fKJbu3|m^(Xp#9?A{>^2XNhjvJj!jg;JFw>Uf|Z$~aPpsXpjM#*FL6 z)Xt|~T`xivYPtg8s1P7B5=ZVFUZ#eB#AriR#9MS2qxGm`YGuY8Ry33pKDR1l6r9V+ z9Y|F|9jtvV3U2$uVDT<=ao<;U4nOmr_u^%aE5GS7;{!w?L|ZUs`WZ8G8AZ1MafEur z+RA{4(CKvWE~Bm@F@VbpQ?-kxGzg|ut^(a6XBaB*jgtwGagAF=Ka%Qr6)kW#qP{RdC>nj1^#aH|JSbT)@-5 z^|5&8_|(Tg3iSv?@LP9VmZC}Lr+*?g!dKnoZz~I=iu#zxU?n80f7^#>qlPE4P&}0gE2ENOh-7(2RO8lUsXSR@t+OEpRGp2;HouWftotBy z4i#q-;pto}YU>7*%!aien=sJ`=sDqV2pkE4nJgnqXYysQd?A1NrneG?Lr!+9=0gds z18^V0&28lD|MM5zK0i;iA?(TXwFPY?ksOClecdCm#d%u~PTedlk0LiNMTTGtPp~?w z$*$TW(4_fFvA>5RbVXRj^x1QTUP=K2tVZak_h)1sXLVRrx|j~UYdQf^8RB3k5+46)w#U!U$0@|u2mlS zEsrDXLtRIcB94x{Bh1b_<~L=`%z7lZ7OA>~!Jst9C(DQ-q7ft$Y8{&5U1r>eGK?r= zNvG$TnRjg3TCnZZ0@q86X0-F%?+K@6!oU8_HT?Q>zK41^I4Sqa$$*9-dh~YcJ3dZ* z;0AjA9^K8ms9ZtNiMpH;5Ut4s#MyiRm824hrj)NsWdajl60z)4KyXD)EDC(gSUps7 z^yX!TYZbE_y7apqhezXNCPGv9Zy4$oW+*fnOGg?il#GNIOUq|YQt!BJ(@0c%2XhD` zpK;63bJL*aphT+7oV0W1W_j-8FXxIUJ%MXJ{yCNw4x=&)Sr_qrycFgD2}w#gi5mgq z`2nfzO%%NIAc{t7dBrDfKIbiK;63!Mb3nS=knR?wx81#`6qFvm?YhNrV(ch^bpv0)%AG6LD2V$((1Pk$qYr z3Yl|k0ABe0FXioTd^0hGFF8*wP7G}c;68#B9nO2hJGf={E~=7LQ;*c*$izme=8hD3 zC4V>y8)x9Ojj*^npBxCnU&^ufvv z(US|T#Yh%nnm(9OtB{RKY72+(qteKRKqqKY%8P`>Wx8u4>Po0fg)-u!P}CJ61j@mP zysGheM!7N|E-&M6dN1MjkHP5plLC)Lchgp0_)EXarQd!91K{vcOEEhisZ?>!o6#9T zJyMpIMvMj~P*heGRjBcq5S>z2Bj)BdFw^T1;)ty0>CfbJJISdU*)Y_IvSCIz9XPV4 zy#7Vs&uyRh)Yp1z+; zs&z{4YAOxji^KsbL58r|v2n)?yD!~HzAa;=Rzjup3L}+iU ziGCYlUd;i>)fRv%2?O<_QYX4^S=H15GF7r*lL3PY-543x6)p(b@`!RUBr7A7P}IVx zOcEe55QZhL3@{uLhb68*!`4}0>AkcM_@ujN~-uXX&hY$VUA6Z)S z8w!G(+lfE@5p>VE9CzxYSfmT;%~Kj{ySd3AA8bJmX+h7$%0*79lp<=<+&NJ^PHSQ{ z1XsiMj599S#D;U`C}v@;Qev=(KBn8A%+kS8nzlqEmKCa$GI0RF%49BmZ7P`(A4G9h zFQCeVqm|-D6<6ee%V!H7USwPfoQHj1{{Exa^SVEOCm;CGr@-~dH*cXD42>))Vw@J$ zwEv_Z5fwr}vJCH(7;0iP_PRjgXl~fAO~NW`lV*D z8Q9=GnYR){RGZ3a*#;v4tGD~6R2wxtCUj2ybIOFHv!17}r!{TBf_hG^tJr%PUX?=yo}E z+g9p*H*nxRzXx|;XQZsJ1L8Bzef$%6=1=`6wq3Gw9BmBNl1ND+b>&{hi@9@67?koZ zYOhhX6y{79W(0a^U;Fp%AQ+3qWGXFs0o>~8`ht92mF zoCYS*wUGggKp7>odsRacRkBUI^Hvs!(KOf-%InW)FQvA zMig{n#MLNJXe1j)hsp4#`_u@7rqfVk(Pri%>XkZ1qBQD)!m660;U!t*>H;3y>u@%X zU0506&p&Z3|LwQ_gquHm11#Qwy z2u)QDi2>)7c=u;5vv4m&t-jymvgdsVPygAUU~4z00v3QG)MQ?$ajZr-Qr8?-WjVqi zS{curFY}aL#8efD_~4OXZGXX;(GCbPvusN*D)V_xr?7buKHVj@Y}gp!O3c}-#Yzn! z#U5!=h-K@^T$V()%L}kGTf*# zY2v|%ymm0cl=y1JSAkI&!LWofu<@+Z*>T!DOMm@;+5h(cMXXLl7Wk_}cgq%@`tp}^ z(bJ#A_6trMkF%|S#Q{i+;}o)Yj*$jbg(7$Ka!VC&01gE>_=Vf}|Ji%*xXZ4qzWclO z-sjx%)agYU&5SgvV@bALWLuVG<0eZk7!x{y&;o=vKne*Z3E>S1Y47`zkYFGNn`&?Y zH(W5b+%3ztWc4!ANF$A=m#5!*&e?mtf2@7(ea1it_A_$R{(MGy%6)D*Wv{h<>$iTt zyWajzKJ|`wv2g4wNbozWOYpNhn7s6*=!Ms@G_jqyoV^S3)I{N$JiG026nIU>D{-Eh zM1fG}L=Q2N9?qNYyClejt##+F=bTGA6z7$sE-Xjmh;$L0I2z{|xCALts>7*~6rJWO z(PgiOl)q2oq`=69Y1H0w0#Z7LF(Jd6S!uW+1g@X#aCv~8z*gY#YRDV@_`UqwU;Guc z8X_*x-TgdPngPaO#rOabj5%Ql0lBx1Ge*4%SvK3qg@PcOOpGYIMv6v6Bk9OJh8V0d z8UV)Sxah^#^SWRDS@g zY*TGVuyV>=00fY3{`;n3E(_R9V6wfHD5VOOO8%Jz_SJ~MU%xD%;_cAdwz%P-|wuX+XNUUU)Nj=}t`T3I6N zBrL*_{f}_-o8QV4ciqc@d+#N#eDx6fo{S~4yXangJ>A_`v$XzVswP6JC}^lfLCuq( z#9-ejHqEjkAWjYXm6ph7NR-DmajgP%J%sKuvwJ4keDN&3EnQpzRh?T2PIJ}mkQfOr zP(?Fi3VCwC0=>0zoXDtc5b|x+pdOePd1CoB<~ly7?@z=J zQHmVg8%d*+>OPbOuDOFXX=*bSTuyz4jrgm?bm|KY40$FIlrR0438R_F3(vF<+%{B8{5lR@UN$75>{un269sJ|MTFXWx`T?9sv-;fz%GRAz zY`A!u?)HMFfHY8=1Wv6wb}A(AMnQ(GPS+$O4YsQggOZb&p;aw5C4e(&>k0DwK8^(k zL#^0at#Ey>Pb_08`cF(6DKg@r7<2!lt`#y#qpNI7u@x4B=83%MxQ&Mcfe`C*J zr~nz^*Ooz%!=7SV(O@ko0m-IRXd)e7Taw`F)!)2{=l#g{aozWQJKMW~dEf|5o**($ zj9OtHIFVqkN}Os;A0RqM$Ue@M8q(4D7J{*=y;Pepuyd5?cZ9;bkvBv%P-K+mjDW|? z9jG;pbO2Ux)G3m@!+W8rtuyKrs@xVTNi3v_^Aw)h?J((t^}rwf%)jH_KYH6)Ifh?v z>!~rov#{K{jhy=*f0sjbL97}YHP!7_VGOLr8k{3}C3&H6!mxm$gPtlrk0bdU;9KT( zJ0ab&b6d+^pOXYHW0H+M&MKd%N-q-t%T@oph>-_XZA3_6sL%}|O9nRvpOzgOA4qjg z2S_30)ZL^+?aozFqqsUz*C1Lmd&yRIzjzzdO~akv^_?tz;Ju?{<14i^s}ZKUp&1TI zbxj&p=u_iFXDqE3NKf2Bv;P+S;&Hkg&&5w{CPssFlea5Uvt_i3@k^q}NE5!jF54OO z!UvlaP=JDN6IeJ}bL7z#q-vO$?9!bQn#N?6OYczck=SxnTExso@4V!Bc2!FlT z1o%D>aH3dvY&8HR8*MSt4``)#N!BWGPSD_U+aT)(xG|qW`Ww;SMx?WTmDj-9ta@pg z{U7}#AN`Ft@z{fpGV}Dy*fBLtPaMl+TVI8~Q)V5^1V`5il~Wdotke;S8G-TEh8aj6 z)6ActzT*W-E$0a)WDWtVOL`FA*Z&Ls@0iZ04P3%qkg zp9o^ETR8LkZh}%cq3l2q>(95<`2>1~APX2|U;ZsWYy19Ht~iQSD8lN;j$S8e<^m*Y zF*Bkfw(KjVjz$Wb7SA#*X)vLcvA71#;p>PFYC@f;1}pe=CA+WM%;s5m^q2n^2Y>7J z*RkC30y;XCqRV-lr|Gd1~{5Z}kK%e{P z6$u6lgmsQBmu_Oyo<438Voc`9=mKQn7(!e5D?X(x;$(Ktl4tduz>rASB}59^KDN(% zfx;_8QHI_dNpDy&y|T=1ZP+_K#pS({-6C9y1@sTx_Xux$=gs`~`#wr@WD%w|)1B&4 zH3OWsfvqiRQ=X$Y;%Td-WZwg=2Sa<$CWWRUB&A7>B`HbKsBr4uvxW!&%Tluq!fqmgD?%0fyIJe?Goa4;2w3&aSXzkcEVLg=qtnu3PPktF*bR#DhYoM`BFm(XI`xqZt1nHT3hYTg3=j)14-= zQYqae3`qFc5VeA?FnR72SMDfDCx_hnE#F1+(8FixXg{^S63fr-q;vTVgxyzA&+KNU zw*J1)8WwGj){ALSSXCbJVoZw)M^B(~b}dIQ)`r316)ImbIqTVa@hr0!^^p>)6$Ph6 zXAJvN?0zt|eTLaHL~X4yQ?8U?#6t6cfHEfl9wjltEl!&Ry%wLqLuiM0n#CYWmy z3r)j7VZjTv^OQIU#%1!2wF*kr5s9<(5Q2;c8BXwpkTVRl1<)8@)!4nEdUTH7WGDD{W%pk~oo>8|EM=OlG2A zn;49in*C=VW5b&VyIIFNH5j+RQ6MaeP{f8Li8`wF_{HXeD;f&tfs%+r;}8uEa{eTj z?w#W58PC2qyqP<{;~$VtojUsm{;Bn(E*;EKANU;A{@W>4nci_BwX!6w?JUi1o^IFn5^Tu#%bR8BP}< zWb+=EiHHficg&S8kJ0D?e$zp@CC zApOm_{wB1$F1H2FL;%<1oW{tpdmrFqzwx_#=FdOO?C#xMxN|!b;u$(&IYxpwx&j+T z+2|d!&e8XR5?N^~j#mweW;aF2AI;wDO8!@?AX#+Ciy;n9v&2bmA4J9y%T=ZsG|v9D zUz>uewIO0M;%P{*ST`i^SZX30yMf&|zMT8s^haxcyuM!6QwhKtt%(=hz|>FvBBvJy zbXNR5)j5nO?nf{rQ!U53q$ z%@_8$Zc~Y?;S;a z=9$mGiW5sKJp9mO@c8G@<)i4r6X@)AW6rxslxTbaj4G&---{2cc>fmo#WIBKMUcl; zak=;H(cXGwN9B=WiaJ9ERLm%* z1RH6^Bw|rZ209{$n5TS5O2?N}6bz0oapH+ZeB+s%>EL>fLdW*Oj4H~^#?PbsBWAv2 z{VgnrsI&xj$x4nw$mT^JPbcQDOJP_#9v9)>#Q~?9L=&vtFbOv~>u!}HjC*&aUd zkxx)BoQ4$9b?1>HhBWwg3{vtuyjdFOoSu;x5za)FVgugHmPiS49>JO1uoLKSgx&^N zInLQY1KJe_1NMFL7C!O%H}U9w53}i#3%PLpI?7xikXiqem>0vkPGD;h*ySCas^;|a zfE5>5@WP5%izR46y#;VhjC@VVJS3;4B@xvoK~hc%g>lO=WXQ%wDjKV`8qL#*?SEVZek5s1=cH$KO} ztBo&}j(*P}E^mUORJmL!)hv4_z&6HJYc-LtU8_lgGpfwxB%*y>EB#xn`nvu75@P}& zd9PZT)dmVfv!IQi!vI2(ufsr9wA#Np4q zf^}EFj&Q*XI9Ur-WBqNfH9iO3C_+yX6=@`N<#W$0=F;42Cii)e5YPsyVFjBLd-nF( zvS%9Cfx*!9?}HPfvzA$r(S7TFI0XEs*Z%?kCaB+m2! zR6#tFqCx+TktH$F(i+i3sw7`?vGq3r|vE7?pv?^d|DP(Wrgx z^Z5d6A+_yNyKWN0lsM&u^<=4|(^epCA zqWRscXEK@C&!QK=SkgEldZkwiP0t&#DH?MXO!xv?gCCn`E=2xH0AIb%nB^B4k^A7x z#9-o!{$_%4a$v#VOoEo3&~J>!Unqgc)F8Q}G9H`4GSPmXGknT@S$eX^M%doM?cJ4T>c?&Jk0Broji31dSv}Sv<`7 ztP`M?q@;*SB;bg3g=;J!k4NtvAN;dO7HpSujy=~tkC*@AKV$ozU3g$Lw3x`2%0?PI z9+fAmA&13r62Ys`A=W>!}3b%UF}&`;)kD`+nxWs+g7bILpP5FAf=;)jE7nFSR{gDmvB(LGya$@3EH5pyw6w&j z)AJmB;vo0mb`J*+9OUtb9_7fvLoA&<^&}<X`_Rp$wsl%%){_ci@V0g3G<* zJYWOx@Jix$-u`}m=dJJN;3LOjVwTS2EJ+iM4sqg$sWfLkZAjXf$3aZyiy>GPZLbrJ zMtY2f0DvNOO-v2MG2bVpgtn6dPr@qKt>Xp%;Cs04AN>IR(vJ0UAPyrPZ}!pyXT;?B zs2p4w@_6IG2fTNbCe%RG2A`)RoCph=RsWo!LqT9k?Yd|(f!WI#5r`hUA@kn5(wHQ) zl#Zb&HzC~lpMRbG|Lr%Qv}5ztzMe_|zQk2r`8+oL;{W7{1q`dpefpStdfwR$8UcJm zD4?0>Kw*7((2)IU5Cux=a9$R*7=b2YOhv$6Li#X@9 z3)!%3mT7E)!ZY>@Sjb;D35F{fidCqZiYkGs$9W}WwzjH?Ifag+Tbi0&2RjE7SmI!K zP#&dP9B}lpqulX{TlkB&zk~e`?WbB^J`10^y`a9bx4HD!(R=1A>0Wj%#il)+T8>l= zl%cTqmd2`bMjy`!HP-KKtH5@ZdA7bvHO;VrUSj5g4(DCGiK&f3ie>;6LME8SoB#mM zD@jejr@dW7B%=rIJ&@myGl)7x6T0yT$(ci*cb2FOh*b3oJAL3vUvg#P*x@Wmo(Arm zU*Lbf@tyqPdq2tiz9-<^b15b|)J=_~E@kLYRV#QBjmA7OXk{s!Fotg_$Zf-5fPs+V5R^{;*56l6>Xva4=MV&IhbF5Arsgz^2u#gfl1WE_KutWf$O17s~=*VB9 zOv=g`+nJ#RKL4!BijYH~t|L+kb3wQiVcYyNAA0)rU(?9tTCJxNfWPAsacupMf5^&t z7qhSufk^aj1it|pqQZ8K5Q$R(CLBd3*L`u&uI0=g{}Q?Xuga2GBCv!oqhkY~{BF$D zN0p@$&lV{vH?Y+|Df4n>+v7<95y(breWQCfOK|5zA+9prY*qeD$j4p}Z-ldv zH-H0NqKp=F)`@U&5xA&u>~X@yz%1~Y#}4z(n{Ve0Z~b!)?|Z@~Z?>O{dZ!K6r`ZWL^R?7(MEe`h2BD)dq2q$W4 zB?x2F5>p+sDkV5E$2oyeI5W3*0?E;cfz2HpLsh#LTBdhOR+7319S3fSt(_s4Zk}b& z_JZwOC)l+ma0#$3f4-v${`_t4<4@l3CidU=;Qt00SPl0JY`n15T(w{^ssce~E(Ec_ z<&*tBp`=s$R&{~S_SPV{t(P$KjF;27;#v;-bxdw2lgVj2-<*Mi;RMCyNTC%QyY)$>EEd7-!3En zvC8pp?EcsOe!Sf1{nTnBfVD<&H4`OtraUw23-$sJzvm;|^P@ktb_a3|*81(+c^m5_?}5|Y|G+Z)PdMV-vNiHMJ?QlCMTZ!Nlw}y9G0{7sBn$9a zeGM7^-|{=_J$f;PxdzSZGB$ADQDvBuE`UyM2Sq7q)PqDqZVNTUTzQ5(Kc%1zG!;zs z*s(Ej+4>=Sx6iWooDSPdBb99i?jBV9{{Q<1-uJdYkT*Spt0c6kfnXe;*;Uaw;F`9Cq*NFFO zaPaC@o3k;&ph}~To+wc(Dj@hC!=|Efi|o93nr#OWWbs)#z0OE?jFjWPKL&eRL>ipyl)=#7!yR^&0ktR&8-;%UKgNw;JNa1QV&@aB)+ z%J2Qbd%62_4;d+F{S>~}g{H{}1BL*oB1uQ*Fsb5v0Yp-Yi0=RijSZsFxCAjH04YP> zDUq5?GCY3Is#E;roa?DATX_CY{V31)wr^%#35&pS5<;Yd5iJ)~I3ATpW8_$sIHgM0 z3DJ8C@?uD9%K-=qO-40A={fs;8RJsn>~&%UiiCHb7XqKS`bGv{rxQ&7C+n#M;Qw$* z*=6Iudn3ory@*a#(P(Rc(L%y`R7Dg>NLP{RlCm%v-}wuUYfLhF#pb9H=*L65|%5x=f*YCfT z`+xSA&+^gzKU>AbG|zd(jeO@%{zEREXG$*I;@P{?arw5AE6TuD;BnxNTkqw6 z{Ja0egSX#-u5J;mN(}wFi-Ce94eHG8j@Fz2?0FtjH~a|0^}ARe8VXS2S#XND3Uv*kF7X<`!EhB<1_Uq3dh!Z=@rK?wWvQB@ znDy+wXg!;E_UW&4tW@A65=kTps0!W0DBz0N|AT-}j9fMoxpK|Np%Fne;+@#Flk8kJ zxy9fSYuzub2CF#$ zL19i9wG--`bhLk4mt84;8S;jtzk;&*^i~?mky;@SG291nymo{LX*kEud!802X6K8*oLg3GCQg^`3}@u;OiEq^yM5fpWg9*wypw=>#sq z+CDY#{!iV@oBs3zeB{HoSlholiJw|$!a*^DCM1+VgjD0i6O$st6++RW@gA*eh!v8# zEV(u@1b|hDfC#>6@S>ZAAaWD`U;?!Amx{@g$1;_I#&shuuT(^b=dSj02eBo5V) zBaQNy0M5~KeiZN4#2UHAf?Dlx}NmtK|b_?S3YT<^{Z(;l>q!7 ztn%WknEs_Vu&~t7$QK_j+w5&~j}$)9Oavynj^*5*v+Cd2_G9rKtK{1r|4a_}ZA$=F zBLZztf1EVbtNQ-^{JmsNfz=LyL1lC^)M`mUy9!CvlBj*6?0Bm6jy-+h65tEJ{CXaH z{co)KvHZKNFgeLhKlM|*@*lpA0Ni^dvG4u`zVN_l9(rH^ry~>|*9)Z16fT-&EUqSr zQiKAM5{p2#9kEJZ6DjZRBSkT1NnPbVh#QX`IRQ=J18Bn2iqeb|f^jgk{5tcJ2o$ zB7=rK!SS4~;~ekUDZ&Lqo1ZZx&)KcgpKP_O~hD(4>%v_sADi3 z5Jae(XaYhaS*R*k`$;ppAyCy;=?4!n;ncDNpGOT<6R94erye@b zT=T=+moQVJg+NKOP2m6B0XMhQSG5>&Nq)&xV1 zBx+vK?Q!=n{eK+ztv8*u&-K;1o=O1zmgPEqw*SXJ;n2)EM557iRlu7?r9CM7iMoVz z1I?6YB8*k{T84h1mf7|dmUD4$HowYDR{sM=eXJ4Yh$R4h#*be{&Oif7&rdIzsD5t zx#RE`H$TB0U#Q?@ZGH1HK-ncYPx6kGTQW`*C&^UByu+y{xnw)L=02^6A@NSkBS9br z@UFERb)x_>ZUeLt#epipdzxsI5=OF{n6iW8#Gu7hJi0if5O?I!tHaPv875-t?=#$+5>DKl9*< zAmWhCmqR9(rKU0BSsrcFn6kdU1&rjZ+SQn$aB_c{WQf37T|#mGGnskG_mXx$hvgyo zx@wOijT(Z}*OW(Pb)@+ZikBiQ2oACtfXi)|ia}K){ett)?{Ln=vvj%`gq%;e^TzjU zCjdN>6$1ehzIF9$rvmc(wZAuxfht62njej-nHJ^NLhbGETrPP#O~p2!*jox0hLX#? zunn6E`S`;J`P27(gb#k~UJl-I7fepV%q(S5QZ>1qmE!pQG-SgdP&Fc80wgv#Olw~v z*a~vq}%0XIR5h4H&7iua#larSKE3j0r*?i zh9CGjhA;kmEH5@lR@8|qqh7ylHjbfdpkGi-1d0Ndo#tTuS4Z<~3H>y#?2G*WXIhYq}ueVw_2uw||h>WU!3;yl*35YZk!scy(&B07P?)ko-V(vp9 zIV;EVZ(F_f8+h$M`6*uVeXnE*`2771pZ(Zj?tEY#<_1u1z;&kR_(Y5iDMBiQE zWXDBB(Pjvm(9`!nDI_v+fpF2)Y<$u8lFok)b4wM2YQTgDLzCO013<)!B{M@bs|U8p zlw;@;T_TMnf_NGghRu-fl(PGxX}0X1r05tYvz@hY`P`@|4wnsmaxWgNErg7?19tqz z?|lrqwIhc*O0XcIA~b4K1uoUNx@Ln9?CKO;=3#dbc3}m;XBR5o`=L+rmOuL-58wYN ztklq-BFwBKHW7`DaR%ySh+t|_2NOgxhn<*3hK#!~8ZmX4h)@1}E(Ikbde zs7gm+s5QyKnS(D|II2deimDP=wEVVr9oYd6B`N3B7cae zjZTr!5{l^#bq7jO>oH>lpbjv=ph?s=3y(SQ)hY}#(>!6Db`r8Iuh7leu05Z@kX|5( z($9M4%lY?xY-Gp*10=r|d2=_%E_3~};MbpsY&tiv(OZ&o_xJoH^EZF=Nj=5@t|)lP zKll;;#UKASc0K(fKE5yUmUleLU;g=H9DN``oTA&GHpX|Va~yKu0xD{dCD=XgJV|^e zyBYT)iq$_JYwnu6q9Zx8U~Zg4y~pRx4)G?Y;>>Rx5nSF#i)iCk&KrrZ-RT2DBMz6f z=Mk&w6GD;gXHzB=c0}?Nfwct?8IiFRW0X6<`+$pz27{C%Py}x#HyV*35W2W>l2eQD zz;Wf{k1cR|spg5k!*xB^@7cwxzU#F-Mn3~Lx$2q(+JKG#pkp33vZNN=zM7tYdaDp)^wT4R*>G)$T-ToKUCk}GX_VbvU zoM9n08QKT%*-swC3AH4GL`ou!nBY$olWZt(sCN`T5Q<5b7aNWpp6BH0CHkSux^pP3CgOYAelR`DoGIRn)1<=pBUh8@b@J zJ=*^FF*Bwj7sO(K3WygqfDd7Yv z?-5ND&e32&R-88ymN4qcChtKN=M)n)QoM&c355&x*o+c*jf@7r$wnhq_Te$h+E#U} zq@DH<;|K@@XL{$Y!<%JZ`P_{F9my>M$?rR64uKn0r%k>FM$oNybX9H4G-P664nFGb zDJ2Pow;tz8QdAmX1)O{7j7^>sgORvQc9Haw3_i|dYUxBKlAWdH$_XZ}ehm}PeKmgLxtt!m+~etS2mtbmhgF;Lze=0KQ_^IR#sHO|W@$7wN%rY)_`?1hhb& zwel0;i|*C-x!dl5O52aF2|kc=GT?yZbToq42w99sGU-r>O(b~FcCEQk8!qYhIR{ve zCI270?LOZ2&X4i_58evPwY|^&3?0`+y;9}tI7TD8ibPaAMj%Xl%miTk{L7Fr{Dknvv3su z=C$G69sG;8yqR;(?Q`1+_{3iv;8UMG38zb#n1;eSeO?$zv(~xi>Q%?edw`ehCX##j zpm^^YiUS((i9(d95nPEEp~iZj6n@m;EX8HWAFR2rOp+x?RY$FXGw|J_FlDkFXnaho zOlz`fNlpNoTo#I8f-b@5_b;X=kuhbTQ3>%bKaWL<5Iif{G)KJ4?SWBwiF%z-zh3g!RHUxnn zF1HA>r#<;xrG)nd!50jxWtQq?v`}_!pWxi{`*b%IlqJ;lnViZt5m;>!wL0kA`823H z5&^|KpW8ylFDNkLpg7HX1CmLG_8c*{4$^R$X-%w?l3m@vZU?)aNe#~dw>^H8cfI{D zdEbX_<=Dd~Y@^z~$+A`)A_=V{gzS5e43$zHkAo^Pi&hzVP+~+Sn=~amw~3T*0Ye3R zQqs|T?VJlw+Vp^U&ojQ|)x7NA|2&(zp2q-UqVbM-;AkD$S2gSlj-D&11ZFi4)~LCk zwj2kXvZ58|1=ul{_~cbDM4S3a`5a%#^;8SsuUUS_g-reUFLQjkw#w2eBu8IuUR!|V zphF~egwAw_!rO*Yh!7D5jA8xu?}jqAD@`(n`T-`f&9=1xt~0h+=PJSr`DgiWfv(uk zhxzN215KfA^^rI5Tmh(8nwfy#;0Qsu0=V}_e~$V0zVFF9ww(^I{l$OFkN?`g;bd9x zw)dUn5C3os8ZGeP>eE_34k#rH2>V?_e8td#mK1cSH-(LX_fzloKQ5Xxg}sd z{gzekSk-GLtMOTcrHi2qisFK&Y9e??A@;bO8jJvjz6g?_Ndhk5iVms;Pn=Y4d3?YV zryCyg&`fzQo}S?EfBUPs;d{TEhaNn@!AJHR`3uFZ-$79ns3eketMrAkri7RRorpp0 zntz6K;B7iyyo1gLq_+|2ZiL|pd!v#wpfzd}=x~YUhi+r}&X2KSa+Xc!?PjS6Bo!K( zoIG0lrV*hdo@G(;fR0%OXA-BNh$IpbUj!n;=~F8lI<`byR!ZNc*E4dU&j&vwBTPCs z{%D%7(f0gg&M*N8#umtW{bW}{YYE~6Nv1^Uo#c^2`#YBtza}_N6$OXu$dNj-pm56D zxwvS0f|p%=IWK+9OWC<=GgSePKlA`BE@9daCK)U$q@;OFlzdJN5wKjiMGEYBao!Tv z7(pEBi}8m{;9MZ})+7B*NPi=&9A9;w&*D<$&^`C@$=CljrxyoYcFonyc%jCQ>jv+b zDFdY>4mJ(R2g`+288~66*18)i^fXe7sj~;v_qfl^%Mf|Mavakw*Bm{ zbMU;qMtX79Zzl;(40laLkO+m+-_)b-$cPZsD)_A)=WE&e)(ZHWVSN+)8B>buEYwr1 z3jP57{1nTK9si~kR`Sos(!Lo{ki>FGbY?t$gHQ@w0o?sFzs!k0dh#6oF1+@e_^vnp zR~7<%_9G|x!iNqrx8LB(Zl}jkBTf_1wL0fAV#hZa)IRb7SFGRfWfU;YQ``)~@8EOLcBXXnQ zG6%-a)rb?KKoX6EMjR;$p-Ps6tl*Fp<*K>C?f`EbNNn(7yj%|aqSO&2M&0Q20@5;9wBl2u;xTcIPr9R zIeI@sO_b4EB^_^57v~lQ$8P*yhIiiezkA8A`Snx+@I|Y0!}l@yoj=8?<)u6pWgA5h zsPgwtBgrS24)kY3?$?jU?AqMF4c^Q7mN$6co^LhmZ@)_{2mJj9VZ7h%{zX1mbs`u)lErkA%YAc!tl#f|6mlgkmHq}Z3NJnNDvtXKrDcJ@3Zx24&E8oER*K>f3fkv+-hi(fZ!cXuHa}##PTEn zShX>dE)^pu`kWis8+AlYlqk^$O9X<+yo!UydxDIUO!5AVR)Zyq?YWOsp{8^WC}^|| zNYPXVUB&M8aNYR@&)weVMb1QM_MIH?2XB5CzxijMpgC{=*3CdsnwGr{%qY$Ysfl?EGomN&<7|iuxnhFJ>%N1nH~tu>>cpvf!R(&Jk6{0f z%N`G-B+4N!n+>U&a8Qc$6zalM9(~oYNR9NB^S1?d@0nm?eaG^(TmZyO{EZk2H&zwQ zwYD${j-w57i10eH0Ooh)7 zBvnk&l4Px#xjQx_*TLs| zt5l_*gj3R@vx%KWpf|tF{a0R(HebK4flsvnRxQ8rTsHmV|HSFWZiJM3T#B=8P9(d2 z32w4Lr#%H^F96f?myDMtu^?1nzq5|^IO4bP|BzN`Oh$}-kFW??-qM18t3sQABE!U^ ziGd8QdQp}$+*pc|y#1`{JF3mF5)v;#c=!!(<Jt(rCKL`P6@25)nS$#d{(;zr_(Ngu>WSk4LRK$D`*naE8{ zc-1_2E%-bwpd!Y2Cz_j@fJ@f;5FbWf@!GaVNG1j)LJUGAQaVqQ zxd(*`jZ<7wBot`qa(V^6aA1+U4^`|-iRJYL+j<4Bd-h&l_R{CmbV}~r|0o=vhsNPL zB{*w61RPEbff+&Hc>^_2LvTLNq_j}5xU6U3k>JsC9lo~_9jq;*TvN?(|81Q9%sc4^ zW%h!r7Wa^e4o014KwM;(Xg?G92HDUB8mae|E7uBTY)aUd8qgM?1M=VofO}6iZGu-xEPy zo`A77jCfD&3+7cgR3#2;;wX`MghGV#dL=Ku^n7l7&9z)`@j0Aa8FJ{618{l)yiyjy zs(THV^!nTeP(e@%@9kbxk2o>33IXw`54fyVp&~e!rTLuT$$~`A(aySzCq>Rqiy#h$ z-1F9VaMuSv!831q8Rzu6tO#^eSXTt5gXd^fQ9Gpm7uPpI_SO3a?)%lye3_H*R3%b*>K$(bxJlMwMaXy<-t+fSD9 zXk7cx=Rn!0TFcT2k|HViEX}O8`Dh}w!B8~&`dH6Dh_j#%Aet*3rSy^uFFV&BOE5II z`;iRyt8M0O-`-0eAW*VpEs&F$mL=Bu$J=Mi{0y-`lvD}Mn<0@Zg_@3i@TOxgD0ojH zCfAJ1v-#c_11+mCwl=bk+@rUEmYhvPwBj@BEP1le7nDUV-+7cw!mC)EKU_% zKH1?#&wCozz4Thtd+vGkQS`)dWMTrJJp`ODY-B8duD0(jVkAM$g8D9t45=F1-nsF( zp(xPZhzw4Q$qZ-w`_h*V7FoLQ(=0vs1vYHh$fk34uo9I)GbB12d2ubGo4v!yQ;mFYi;&LQ(x^(&`t*xmAmUN^5>EH|~T*b~s z`zh@blo9GBYVMN~R<*Un5nZ0)7i)`!lzBhccq2vNloO65<#^pNU)Ky>pmxGmf$J_f zmlwX``8@0Cm!n$q&|`;b4jqB+1ij7#g5spf*N($uBZ5LfRLQ&=L&{Q1_;l71%4`N) zDC`>a-hSs@PJrxuGqC~R-C)VzS>g&T9X-mYUjIf;FE8`#>z;=SEGL+Dj&+6Sa2&Ft ziLOAC5iCP_1*gK11eZN$FGv3H&qzy4XXQP9rPos}fV{%Fm$KpC{vpQ}mqsTA=T8 zJsDs*2$d$f>m7p$$E6P3U5|0kjjzd&-Pv2yJ9hIEfBwfTxq^59-hLkX%t??5C`wb? z%TT^kB|0}smR5VPiyEt)EJg!k8Qmy~1cxT8@KaHYnDEIdt{ukdjSnI&b%ABkUlsGDB zz$+aaLSvX}s82Dw4sO`h<7GRhc|LYMe)Pdd_>JFr4hNigacq=bS*icw8@RM!XU2Do9eK0Yr&2^9s1J& zZE_;7ElsqA^|8txh)E4b&k2IA3jQFZoHev%sH1DGPLU{yZj;!M z9Ot;e&LHe8VJr4efDhk$khkA_8*hKl$2tA*Bhc@`WQml0G?mcQcrS=LV$-0(5VCd* zRNL1TQ_~dHj9e+igs9mzxo2G_S4=|=ovS#Oe^_so#0>0~QZ(zf^8BSm- zttU|KXoehUDtri(Qc!82R8t8oRhTJ+$@?B--%DS;rtjygWj%!e02Ey|{^-Bs=;UVW zS>=AP%D6>o@K$7^@2F;q5#zqEaz-y6H5RI&05YonhZq{rg^_qqu?iYaj7>*UR<3QI zXQ-JAkpI;v-P~UAWR*KH3EM`5QX|7up);Ol)^oXlJJE30OI~4Qva`4@zTwsU@Y`R{ z?VqptyzZCct=du%?{o(Oby;Siewv?&HJ}n@RAi7A)X66wT^_PR)!+oWZ_bjl^V(?x>AH zw33CjCeE-ItZ~-Fj!XZdVI)3$7>E3f=&fqxM7x? zUi=&`z4jR#oIB0o{RfGs=V5AUl!%~;I-jBKT*G@yxU?k$c&o6)Wg!C>%&dllI6rFl z;62)12ZQ5lR3R$$u}7HuQ#wo?sX%Mt_VJvA3}kd583@@ zw$afgI^NDp848wb<=BZ8=8g=gDxu#i=uLtz1)`Fo;)1nAnv@VEBS4ZT1# z&kR^HFhUSA+7cL#$hzDfaBZR>36+`coTX>NM-b>yS74Hi>MOjG7?vIxIYUrDY%kR#O zT3<0G8-?v|L?$-jCN>*7FgURW=ljexIC+X&fBlUNE9I&eJevX&MckI#R7aXfp$@OY ziW)N16XAryMVn_>df*WT4?gs_zRuU$dMW_`W^VWac)_b#87@ndZz7fS*ReQDpjYb!deo!Vf8MpKd0(eotd zaB7U}R=lV{Xq*S{a4FF!L}$HKNsWomIH8G+1%7GyB|=w(VgAmO%+M!8$KCcgXV|$T zp3*rxAEG4J;G9r~F0nJs(P7}$M~-rEX~@y8W42fDoXzWbTw;RG#$G2h#yz89^PZyU5-Z2S^ckjZdITbtLzsgGyTD!V#dL~W;{Eb z?ZVIC1=QFqKUfmc27Oljzn%Fv5}@^|QKb;;(-k5NH4>+k?&c0XFU$h>-t;}hhaWj> z$55Q-`+w^X*nIU1c++p}WB;d)Krw}s#;jHqQZzL&d%X27LC~Z$xg`J?Os#y0j$WcnX<0AVjFo|#rXXH60H9# zHmYY2m(laPw%^^BXITECK#5hgmh9N5q1o^nbX%xWJ27D?ta3_5&CavoZp0@@nows6 zOd^l1wR{9;ag(CK>pUKN{17MiKMr+;@Ah*mAWuK!q^petg5Zl_$!`t@ zD%Nrn=Q0Pw0+z|sD$-vM%WGT}HWe!mevbLievoePY(M`}I%SWQI^vU&OQVA3;2iV$ zu~9-JizYE-$8mdq8Ydu$72zIUYE>?pv=5oASx; z$pzGTd!NpajHe{uJFd3@x*H4;Si>Zso}1@0zxlg#&fUsodoQ5@D(BH6Fjq&W8==gJ zb%-@J%H~cdl{Or|bSl+7pq|kAmm@IIT_3V?x zOjxrd#ib0mdNCNg9qrPTCkPr8(+Y4YQWU{j2MO?Dd|n(7Ya%ksJww2b#)vPmx0HXhf@Nx&J`|ChgX zWuBFLKFPvuA7^@UJ?qcEgq6XNh*B0tA{~lNJxD63DG-EGlTB72RAR+%QuI7=wBq!!6{=++bW8e^LRr|TYW7tewH>7m;6iR~L+>B|cji_q+{pDKSDHZSl*%ctqoj3R=zE*B29PmXukGiz1izX5 z^3|81=wLtzHp$TTJCjn!2)!mcJjFwDoSgEAyL z61r?$6>PrY`-pp=%V~t= zW(7%(ZYuD}rbwb|@M7m|6oB(Fm6o(8*)>#+EGn);6Dt;~Ii~x<&U5-~Ij7J1&2Cg3 zH%5XnMM=#NyJbkGuLQ4;-u$2ZoX4 zttHV>7>cl?gx^=cOXGMr)iyLz<=z!|E2`o*Ju3vXlDJD9$$>cG+03$U-kBx%GOag3W zxJ&OC#)wyg<;Oi^pRIDoPI#n5Srn$rkB!gapdnc=R;=>1r6gwtAqB?nZWFsCVoZ6L zFo_{Y!30C^aK0fFHD#&HQgQ0Ok5kW`L{1;1T0V&^95#*c!4g`pjDppu8q@SAqhonB98>rQ&RuHw`%(bO=MKu8wRd`@gYXo5#H!>aAa%J6-w_E&q5 z1R=}VHG(Btq*3p~~5T%Mrt3h01Vn)t;Ys+Y3!G9!fB$g0386jObC2D1&_`1Rc z>moO9>vG+;S)T3;xw`GxN#63Nckze6_G=Iu@CCB#xj9H>O?osl$MduRd4vryX1oK9 zl_3CXiDXO(7o$xIU>Lo`L^^fvntIer=rHljmoatiYv`VT8OH}Os2v>|f;2>DypHGu z6B5dn!vaIpM21?SrqI#I$%adCb*So+eLQ##YKDr)0)^M%5ERnS$H;! za30q3yl;Mq4}SVy{`B1+hiQhw21)7guqtE!tU#KDgX@C zgNW9ZB|0gN#gY=}sr&LRw|0A*>vXyCjc;J%3ohdh;9wIuHW*NP8?G$XG2e&h0{8#u z&vEKK?_IOk{z|TI*aTqW#vj4I^2eC37C}vS^vqztTGqEvrH8QcMH^a+GT3w3-CcX7(1w-1;?5~K*9jYr zu5kMcUP4+qV?JTc*2OP;Eid{P|C*csa*q2x{ty(CNa-O))Fu2V2piGP3~QW0+`*B& zBW0poyJ<%SNhueJV>sRP^?jD|^>|IxnE>VU-)oB2PH{rzWwdEZCTx)*@iY<{N?BW7 zO(8HiW90oqU8icLYsqYYO7~vxJT36E3`l?iDYgp4vj?5{))A zRWlm@v+BS7s|@`+hlZ9MnNfn^v%6g$#1l-ZaMXu&IpNBKWX@!fd(J^htb`27iqcVK zD1vucCO2ys;GmI)C`0fiT`5dqtyzE=dD?oSKlN{T?B07| z{a$2t3K37r)xP;5Ski43X`T{6z(r^hTGy5qWIlr_%0*HWk=B~odD784QPSE5g>r)F zt6#&G8-IwU-YhFlSgx0WNIzwRsGJ1YGB>}FO_{1>@V8e>25eN-$c|2-YHAj%0Yw+i z*;H`Oc?Fxd_bCEM%m`OfB%1r9Xx6l7S&8Uh>5YpRTH_c`6}2KgQEaLuo$>fAVJskO z&XmbPY#K`M@gl6`k-r%cwmaCFBD;g*Lh)<_P6O}%%)Pw-Q+M&6zxWiVAAJPYZG>JA zS9I)r#b{MO84<|G%T8HD>STTcInhDQkVr&vO*AJ%GO;$AXF)od<)qi}oIm>)|1}r? z;A`0j?29!ghc#vN;5rNng;KaG!flsbgD#(){q@OQ-;fD_+jiU#$oCl8w$i54 z`Ra5H6b?2`QS|I`T^;yW&-3?JeZLy2UrrWq`nebW#VA0V1hmf$xyo0~k2R85`~H@l zB?o;cX6x%g0%+3^w+1#(3mbv^Uid9E5A9#WBQ{>o`r&`go||6FyZ-o5mi8UR^=AkK zYK^GW;B6k=H9`z-)U)-ty#M7&**PQMWu1J3LGxU(w~@h+KX($q*~rw$ezs_p)hz*N zAE$mS7u=AHji1{BjgT4F4M`J4@Jvt5Fnsjmq5t4RX~5lck9T`TH@p7#u}Nr(2h)ZXDjpbIRE1%D?( zawgMWZf!8yJna>nIg2BzP{8$JO4#tIs|oN zq|#(TEX4*%5j}nnYb`G<-6>|C`C>M{_#ZIbd>+e9B38?05(W&M&?VbOdw`geQ%6)w z+)7|s9Q~*)i?wuGZ-OBWO&n6k0q3mmuxrl@=WKU`($OTSD;+5WI#{IvFy2twjzH3s zg`Q;8=`jSrS^({KGUD8}&zYf5BV?K^cW{=N*WxX)P{lVjQ`)dWU_FkV&X9q<2s6NK z$L9FZ=O5tBfA)Spd&dLjqdYlj|59$_jEN4J^sAr=nlwt@T}s4Qlf}HvpD3D=r*-$D zO%$YLh`<`Q2R6UxIB9W~_&tvP%Kx(_ulJQ)-;fEw zy6^kFm{6|Dz@H6|6!7^sn~%3xVZ)w-s0) zybCO>N&;4k0R=S^3hV7#knCZ8)mHh-==J(2bCM{$AVH}opu5p?m4|zN{O39Ku0LP< zgC@e|-~CI>Uj0fw_LmR9fm67NNefa~a88oY&_anWlkKeM7%)|^lE25%u2m159_-_!y+|??zCyoiHy}`CEIy9MAfpVq)QLmL9vE!Q=Om zPV9$zVeOCE-*x#8{@lGxKkfO1-A^N)b1Cyx))G)Lu0)m|R1)GM;OG))vb8WV(&Pz) z+9^1Xla@S~muMc-VpP2rq6~w}m6s)GUM%pp7Rv;OukG55rGiAdIcN_wSu))$0V`a) zA@G77lRSH>%SBkA`PeP@@|Jgen491KDTspKuo>q}eNUwogc4Mv^)-@EI&V;Mj3jN) z7>&&49cfq@(M${#Xx$9u$UUq01#6fH^k?Zm{iSSu`S(z--^Nl*R3vB`8ZjfLAP!At zHWal@Y9djg7vGQsTnT=_Q8)Cn(RVJZ7~ z(Gr0Cdaa|R-FGbq0HUNb#*+j~|51TOf3xsep_KWHbnAED6YnQ*5f7Fl6zr zWR*WL+5}6!4y=V>oDfn%=O0{?b6#Gsi|ha5Z43*?XMw?Tq>PTLBXm4migNq&ZzMf& z_{)B+ujKlMO909Xu4d{-|2^}m1If`yfI?7j7h8eSNQ3kpoo$meJ)w@aLAHiJJ;uHG z)d|3ARA7u2o=eYq`yppl z*@jnCvpS%R;BTj`U2ER-M-kpOs8XQLdx(8+DsIU;3RjpdY!FBd{cb^+dw_cXhpF~` zh`83HWox*kKTEjqnM}RlwZt8lGi(9_&7?DLZj=RyMui{&TuI{$`|Y9(oI)CpU0LPt zOh85iW7$WlwxqN{Y&woHfza7?H>7QyxJ+(iMBw(=y)n>+uJ>j}6cYp2fGlwRrohX0 zujBF{T!QIXeCYG{@ZVnlZa)9IfFrOyCr(iWO-U^I}E^)XZii*RM^D z7bVse-aBX-Bh?+b`>b$aTyL7G=e>r>7u-a#V-L&A6$?!ycq34DC}ApnN17WYm0zOYU8xtjX4JSIikqOH8JMPrbcm1!!$KJUBQLEV7I^? z%wX!agQxl3_uk6yzUMDFdGs_?70gaS*|&a0N=T}#6`-ahsG5U6DT{j3Pz@j-Kc0BCqK>X}|-1FM+{5wDI zSJV228UqyP+3-WZ#Qf|wn#KrXNn9TLvQ0--(NfTLU7D`9aX)8cdv(5PN1FXuWz%Oo zzSmx7t%PTOW&7Ex>c7?c?_o4Fk1BX44f^SS%x&*BOAPC-zi@y66wObyM1L2_?iTe43}O0tnkO$lidt_*~j z5EK`JvFXVg_nO((v}uk>jM{derGZl0$rRL?B3~hrnb%f~CTFHNolc-LI6!#doh*Ou zO;nHFg4Qc*eyrAX)dSLz{R}?)KH|e)U}pVh{PrD&c*JO;D^Q={k=f#3_8N_gUU!?8m(gJnnI5}mF$ zo`A=YA$6z(Lr+oyvE+5Rpf#iUUa*IM3ecE;n9v0XN)mHCv~<99dc5_~8>USb% z&S{CKWF<)_ZL82;^9)HIRYS){=h=>@D>tCkmzL;jUGvx*|B%V;=kv6uUBG=_=?u*9 z`H33N-L;X^AN?%Np+jHN^M5t0Z>R*If64RVh2P84z)Wmx=&1<-d92D`4YSp1kld6N##zkABz{L~f1^!GN3DTY3!oP0X8re+ zZ^qh#P71SIg$+xQdtdY_bU0)!mxMm&eDg1}=yvnSZTA5lDb2~wrEH3mHw48t4^c+C z<*i9bN(t|SS;;0v%0P;Vl+<9yRzWWJ_`Ne1Jdcg-wKuwAq`oFQt&BDaY)-pYC22I1 z5*J88nCK?bJ#VJE<+m9e_&BNhDuDX`PD@Fr4zYaO2jJm5*uH5U6PtEY6@BWMva6t) zNeyFjsh9(^k*W;Jmpmm=B=b0MWJg8st%c(qMM^|WR4Ha?-UWw?LTNdG3FKsm*i3FL zh*6Xybf=4%)HFp&N}l^qEOXnj6?C%8V%IU@;5kn_pBKF3c{D^Gxc`3A!f9|#4s6k; z0~)Ik5p$qR`7?QOC=Tih3BiJfn4nTSPp%1cYBMsq1=rsI-StRsBMeTmCJ8}PF?{51 zmOuSox(g@Sylp#fW<7NgScnasI^w*i29}+UfEI_aBuXb)n`=pO@|qH5a12FJZNNE- z&|#wNvQ)$T=>d;DvCQ$~4RxjTrwW`Gk{4oX2$;@-#(Y2JHi1{Q$DvtrI1@^BOeSqd z1^qZ97>QZ6Np~Z8Y6J4Pu8cSds+Kr9@zf&BY2;Yb@OZ5}BCzNk=XN{Xc;!XB<~1*3 z@6PS4G?9lNcmN#?X*DQl&AK#?e74yKS!hUP%pq`@-?OUQ;{-GpAtQP4>eQEYz7PM! z2U)2i&%f?!?h_$JK^!c5xZp)E;mB|O_LuzJU+MJ?lK{BR1na*0XE|AHqL47E{)?3R zUuuv*~wJ2btqafb=NncxaoFTdpiv}+^*oc}+r-p8z4kBoZ$wOYP0%*a6+J$|*r zUyB@A|G#~nVMwH2V*3`yWgc$-mVZF~=z+C7TuA7%`KEuzaAGgV_C0DNI=-;}vm-O; zty)1%JjW`CTxF>sR{Imw|<`hE*c@QlU>V1jSn6*<*_FvYXI5zerzwQ zkR?cMQ;I5d3Z*>wVODPa9hwKvU1;N=!U?$UFTAjp4b2` zP~>KA6PuC#dXZsU(_d&c88Ue=Uz;9-m7A8WE=I1BU~Ki!C@DZKCYaR0u1CZ?14bR)l-fOO8ap^P%_V0s} zC-5_~n3=4{c~6QBE(AgdSs*H}jNF;xoDn8zJq6le1v#*IL#28f?cd0j_RBmo)e0*Q zeSy=T`w+S`$HsGZF*Ci6rDOs((JMs`zBO(4MDV%z@XisOB?G996l!jznO{S~2agLK zy3*s=V#A@M%gmpsSy*cDJ}@~EaKW@LBv&6U2WXQO&hD5wo(kYqvO0ezhvxlfG+rpJ zg)t-D0A(C+Q%+{cds7hOGFcr}Kv{Cs3HzJG!3K^b!AAblD@fHKTkck zz;j>n0v>LFCQ%`5nk^XKyN}_%{a^BPeoEDuAElms4)X^O*eqX|Af#jzBd^)tyM4bI zlGSmWLev=f>YbqkSOsTDfo*V_qPfL+hdPs=O<5QuIcqD}d~-HR$!I_ln+FcI6Iiv7 zEqs%JZ%!chzm>rkeh*!EjI|_!eZ`k1bnY<2J3mbQ!0k-WY^L0PJ_8A;izJP7)r@wM zGg)0T;_(#TQ%QSnJcTM9XUD9e-Bhi>QfCs#wK$?THomI0V(L78=5sNo*%DTrRj(mg z6l)|AG*M+zYY2hVlpJ0ZZaZ1?(8(nh`;M|3xO~GjFZiabxp41=JbrqPBbPyO-rk>@T(e7w)&g4EVB!S&o1nWMR@R0KqQhl|`@X>Z zr{0U4K1px)C3L1|XkrDb)Odp0Iq4vygrLZ>*yR1eVYcHQK_%d|AdYM@6FtlHSR6P` zE(phtEwQ{{RZMpxP!>WFj6b1y5T|L+m1JH;J~px%d~10ZjEn5Hz|Nh6F&dW>eSxgW zVF{(nXhMT=3Y>F@_bdj-p_Dif8x8=65c*Dd-uCr;%k@|B;_IFZy^=3Ha)fmFkoE97 zrpw=s{Az|j4RMrlYe&Qgz~t6oetg0b8$qDl0M(b5@Xg(JCyNg}#Fejo8AmI@twdt! zIOoOJbL2PQn1!mow$?X90^mB6Z269#XRg?2fkS=nImLWaIPeiS6YyIKBu|*tnuUHK ztGL+>P~^&eI{!b3{j4ivR{s#_VybnhW!DXIM)lGn31$Ffm1~Iw|4LyJBTX#|^pcGd z4Ld^YCoW&_IQik*dHi4gt2I4V;tD3N{&Do&m$G>5m=ShD2^eG?+mSs42xw~iiq?Bq z$qr_@YWFQS$GH)i$3+`evUzt0F+x&e(rl6w-$*D9M@bfF%7MdWm&_6qWoo%Gquk0le1HjH=mJrdu7j8y9SQ9V8;P?F zP;{7EjNE>-;<4q#)E+?H;+DXkkgMoN~)G|y}m)#l7pj5I0Lou zn%e=sZH(tICf0`15G+xY91WeribG2nSUI_-JOQl+tUP=N%O80=@xXm-J?C7)mh)Jm zFl%ac_++HKLP2muJT*4a5J`qUW>KCfrtqguDMWKJ9q5LDNMO(irxq#>9U3xsv|?#q zkx<}FfHK=E7;(-gG1Wz}VkWr^fg;&oKvt#8iufG(N0U;LtNwhC3fWAnMNA#7o}c7V z#8w4dgCmB(>6AE_;8BGm0s|4QnCbCt&wUy%e&I7ICc4~p;2`nXX{aiEr)M4(8X)GD zmSkOk>^C9F+$5c}s)=nuk}rA`>1_D2QJg z@BNaW_bau&ArgSz3%;Fn`3(#+tIUhoXcUP8>&>M;xQ!ijG86RL{(QbxR-5{?)qaOD z`Oiz%{LjBDAXo7AJT2L8HCoWN7yzn#*Twv=Z4-Efp@UMO=)iDFIB$z%_Sho#zWQ5@ z^s{D5=me~i5D+r(aNUlJ$dfR(qFBnp0Du(kwJve7;sdK)XZ|2}E)i8VYXU+qgtb4OYH z{9h7}KFap(J1D2uvur|aXggNrBCYygYhsfer0f!C`~chBTlYk;^U~2|1T(+_fKrny zEn4FwC^Q+d%o~$^prGPu5GL9^*Fl-=y>z-A8oC^whdYk1aD1s^X)3TQIIi8fo#(vj zI=WL`?tbDB&B4PkIYZ$KoB4Mw|HPXGaelLswdUJv&O6Ed}9+mo$& zpwFm5%GGsl9OOgSKF9dEY{#(u&JcaavUi*kcp^2-YhpeL%T9Rube|VLXD>It`bGEz zU$}1{@z7!D^q}Y%4}gS78Ygu58q_>Wtoe<$MoQ|&3W?I)Xa+;WlV9?AU3ugY=01N9 zd;i`mIMqZ>Xk_oT&*!n<{sXAbuHE`tudn|Epf|SUAy zxa?*ptAd8gSQ8+{m~U7>vvQ!eTgKQKu&q3jw7tw4bqyi&3!+#h7v{e}eea)8KlomV zYfFUunpv8NPwZpymiJPkY(4Kn%83c8WR0Bi2F~h$AJT2#!jHCM5_$C_O23d15i~siVsr z8ZNWcb(~uky!7&mxb~(Op_7x`d-sFHqeq}9@SO=1tM--Lc-9<*TtdVdligW+$@!3z z%zP920w)1Q3=veIzY!+ZBO^p$jaE}pJ#se-AA2W*#~xzKx#uyxVFNA{oD`^?Qn^H5 z%seSMD8#WMd5>w^8l!%Du5xq#yMt0S4IS?-*mp`aB^DPW^C#fop#k+uL+A#|zD+$P zFUI0mvGym%L9pbzRb$L8IcdE(#)qZlMURApU_77L^7iuAZ4Ev`>=jCd;7rN%c!CG( zhT|$MGzlLZ7jz1~{kc!$B`>*#iA`I%{oebDN1uQr_aKYMVE#BVvjt$}erXr4S#rBo zcGNEWBan*~K`iMTeCe&9>d^zt-~1V#_WiHr&`{VgQGhxYK6%TR{G4Bj^>v>B^sjj> zdc`ZKq97VaEOmq81rbHMk+7{tx9g0|mtp;SRX}b>{93rwvq}J{4ea@`e$`{z$0f$B z{-{u5oPYsjH!xTNI<~Fhv5lbgFboaM2+an^`2imO!JlDx*IjFRoVvSTjN9=_s`(lh z8=Rk@cHPX5^kW4v=SDNZZ56<(exSk4GLrhWo`2h0G)vo70v2Z^L6bLic-MNBwU)d< z4pb4VJj?|kqEQHR=`B7$ecxM%hdw(#5#PW|W6j`!&$9UG_uzHN?6aQ7a;G3QCaoI5 zh<^F&K6&(XR1Icv6(PgHMyT`RDK$|7E5S^Hd`iTYi7n110LaJ>hBnlgwnHuEO|M>Q z+ABrv+OcaW(d(8psl)!`6*r$;=15aBKM}aFTk?u$T*y_|Jd@?YJp1=OMml{0H`T*= z2SjM184o$*hd|0Gp7pw0g*@N-&$sG>nIF+22VNYhw;s-bi`NJdNDb8!`JndoTO_yC3Mp%9%VWdSR0oGR)Aolr7tB2}A(b&d%_;&wLK82DV46+iu57v-nv1 z!qOHaEh1@zVkh=9lNwf=2>CMjANAqGEZue=mw)>!co?{7@8vx9hTntcYX=4Rx=sLO zdNUKR{U^*zkG5-V1{is?OoVP-(b?3YySXG`K|hHF?ylH)UyA~?75gZl=f@6N)hfup zOIVM;W%T!QCcIilU_ci3QPceoYUGubyg4=w2#Hig;?Z+25se+p>xOCkFFL~j!x$^0kak{Q~?C!hJp(zG>R~q`H zWOEyPUR@PXUG{=dkt~=mnI$JUUsyZAAS??I6B`MWo6+uUwxeC6L?G4-4?WDOPraA= zfje10yP4Uo=RvPvF;6vE1@1l2r=1MU$-KRTmVaYy(oqX22l^3Nu>D}GA%fA*#(6>e|IvuF zTCxvh)k#O~h2tp4o5-mcIqdAZx^$|~i=TNJ-|~Im%NOq3$HD!NLM+U6)2a?K7L>UR ztB8#4YvuiVl-!WMY)-)Nkw-cGh5I@GwKpK8LzW}+w|@3Ze!eGreO)I2-DkfBuDFpp zRRmR3UET;Rm`SXOUB#W#N4r9uGCZIA^aV!1Yy1C+v4PKO_}{O(e!PtTwTZzHt6*!a zJ)mme7izYlOZjix#H1s}$n{>SLqlgKaM`qD<%yH*|Lz~i+V^X-+`9AWUihsH(+-G3 zBTNBN69thZ$ynAhqV^!TGg!8oyNGSpEm>HGTus_Yk+b|Da{|;mTvAg6M68WKD|71c zjexg}+^KDB$r(woQ}ij1euDWg{4dhd<7;*-z8;s(9cOU+M_75_ZZ=%{ET-2@u@ECE z2}rQxDp_wo&wpExwxHSM3NQe5CJatGPt2_E4yOLtIzMV2-}EAM#FAVULw}l#5cl$i zo3z!~u-dQ{y9ypA5PYY@QViUGc*wqmn#IDQJ>B-KbxI<_j2;+VU9iYD4L*{ zn6i!D8M23@(MX&ntl5Rox5P2Q?1?SyQAsw=>Kp`bB1E~GcHK?rh$FB@D>cMpk1~JD zds*6dH)T1&#;seK=yq5U&r&lqEd*(Uni~}cmPDtl%9lqmGh@#ZE(_w`DZ`KmM6__s zXqO;eR#p;^&(%C~a=>u0#(PI7JmQT@>2gJ{qp+X3IPKn)?TQ%%R6Fjiq(=)Aw5k9uL);bz9#A=LQ@1zI>)_r&4Ji(+F9FbSI6^XuYEZ${npp85(@5m;1M`I z2O;3fj(Gz(Xt9jZ?vy!4_F-0?#Qr9HKcCw#@#I$fA0h2Kz{ZDX2w)rq9m%Z^hvK48B#15%sPNjDIE_R^BmU?oSia*8=$Ap{`9@oM>;Nl&`NyOa zhnRf&vsmf$Vd$)q1U!l4%uJ}0$nJ?2NIR&FzKSW@l?qi(;A#Z#g^ofM6F6^TGFdg# z(dt~-luJTPepsPrAXiFx6{BNeAkMV3|?x?P9wIJ(XfG|gy&$T+By zTX-)2zeQLJ8H<%UR#pG2kLc(zV&}lQ5y~OzNn*xfwFr362^8IfG@DBy-f&KFQ4J;ZvMVy{kH_bc7)KxBU|Tr8Sx5|8Y!zbB@P=Ij8J(cO$tmq z*jVr!5BL0|pJRCI=hyTYO>VscS^sPp%+XClN*5`7pmbdVU6PcjI-F+FP?IsUqtsUP zZIz!hY#}EQxpyg<51=h^Os)EtL?&q&yf2wxmegj6>(HVxj)d6(OMv3oM;PAu`)GA~ zO^?Ml#L`6l=zXkw=pAgHT+g~a7qb|ksU00Wh|*}Xfb9GTLap+Qe^-?xFp#Wi5gd&O zE=2^TX<7Gb&~X&#gbsGtc3PR(RSjcnKeD1?zMH{)upETJekacYIzjxDnU z%ufaSB0TS+UEJ`>7t&kTU6Y;)n{BK&eWN5u81iTt#erH(**PF@TG9f0jPbm6E>evRpgnPcC!pWaRkC zhGFHP>+prpbr_-|iqn`qIm{bEQVXhRR)=InBTJp@nOgDjoEEuBE8H_@JYt5Y;5 zCUzaP*M*|+WuO26^S9l>g7o;NpZIP*|CV>5^I!d{qhFV1K&~^vx*z?YoLGMm)X~PE z#KNx^Pb?g&OLTTjFx=dsm&}Q;T|qDz;Is78X=W}H;PdxAz9 zDY z5TS?zio!D=fu<&O2E1xZk1MwJxKdyixcB5~KJbo@@Qy$EAUtsdHf%#&*UV)w(VbRI ziWE^*ir~$WFebbcQmjW4E2<{2<5K+p+57J}ORnp_7yYiitLlW#-P4nE9^?#yU<4Q? zQHhd8Wm6PoS+Z>FI^XN?-jiiNIo|7cW$Rkkm&&qDQUr6(nP4VJ0w73aOrGwZ4(FVz z+Izh})~?eHkRk@a3}yzx!l%(a9nPs!r)uvNej$~uprJ;!f~05(wh6|QUmrT?ovo1W z6rJmDVe;me(LQ)NE9zM64=CagdvC>wxA)4-gi^TGWTq$3Nyip*PhLlsoYED~APSjp z*c=eTfcaQKA#50j*@=veQyEi}K2f0~Au2BM7ck2Bu~=@s5|g;z>Wo(-qIi4nXUQTXEO@glX8Qq3oOe zJV&UtL`EUsID+H`T&l5+-=*rmk>~BoDu8b-wsS28V8ALpu+i`@RT(6pRd=XUuS1@7 zNt%I1rQHJBLKs-}PrHWcjH_BgePW64{MgT(BbBKLTBnZ+bO7}dTAoB#?kAkSo9f6X zspjt_gaz8;d&yktZ-f#gPxK^f-D~rh-VbSGsG)S7voQ?B1SpC~jCkh>F(mJDC6D$q zm6|M;<_zdP#oX=mzxs#hg<#wJ42f{^VU|DoMnZp{v3>i8F%GM(Tm#WV6|EvQZg#}0Q9NsDo`c2|(v$=-6P-F$YONUP zIbq<1j`P$~5UUnoYGUj1=tZw0X(Z2f5_B11tG|eM9h030UJ$t$VOo*f^^nf6=`MV*?E(yKGpr-Wa+aX zqIds&bVsI`+_VAhcBq`EiqU9}>FFwr0;gi8L%|!6uq};}sdWfQw9h=@b36_gVc;Xa z5Q?@VW*Lih!Q5%(=+Qola}|9JNIR#MIr>h>oQ+mg)f5faa_*Z_zZnC_nvsA!`UCkM z-^12q#F#5Fbuz>`f^(dTa3n-dA}oq9A@K5j+xYcg`$fKf_!Qs1^IOD4z-72Rvu=WO z#28R%m;?H#;{-7isop7`Y!dL`U%ZXy{Nn3);Ip5mfAYe09EE3K0w6A<^@G38Qg=N? zh}6z#Kq^X>wkgd5Zu2PZQNQ{gueo2Fi?|Dz9|HAB=wrks=P|23B*s+`4o1a9Ob0zCbvfT8N6r&4|8MmfL2|rn(>nDz zdSCxzba3|Y56^~$#z1|~*XVuxos5hWj2*a~8c$g!FrT-Iu8!8G&+?3*kZI!qAXXuh z^a0e``s9v4unIY8^F!+TILD~MN~W}dI<@7qG>dC2;!Cl|jH=|B8Rig`GPQr57uv0i zQcbL`PlMt;wB;txe4^SV){Q{n1R+wPWJw0v130Dj;CjYC`F=sHK? zJ-G-cdx3iv;PhO@d@rq$F5Yy+Ozwh7 z=CA<=lI)TNXiYyQis0A&{Li`RH~u#FedNPW`Z1o4c*Z3F*@nG@TYnW-hUBp4$dZku zsuJUXV!D7$9dJr7RUr*vsrkAA^)-S&B(UBP;HQRcC4r_I1;4~frlmu`J9&Z?p(dwW ziXow!g|9}asz{uI@l8T+)N!?khko}@S$NA^pXM|8eL^c@?-+6ZTj=pGlGVp)&FrI& z85#xk$x#u9MDz4^-dp!228P2uNX8wF8k#LcHj@P>o=6S|+1#h7?*0pO`Qh{Ty8kLf z*{ApAPtp6rN0=EOXWgEC%tuLj?go@nF~0?5wGAAcPbmKc+EOKEVqG&ad0QV-BCQei zE`f0)8f;!%yM_gjXo4bSYTwII`xFZweF;O{_aE~~gIBKVabYBLs4KSDV^K@?xh~!?VUB*b>Cd8Zvmur?!^*A{n7*xWb ztcgWNk*Xq>o=clszR3*)3X=7A8VxkeD@ki~lMOUU!I0J{_Ss296mlHFLFt9l2y?2; zg}@?_zH{tt7yQ^up2HRdn9 zRcYS8Eb^2PoEGTB#?E&&YBltKEopu^%raM>i)fEgwV>`mYYfI_VUveyewn+jev!?t zF4Pc*8{LiDcq3uU%k1W@1ww|nh)P8x9h`UsL+uriWW~(TBLIh)lxU7hMyu7Q`oeD$ zPhNZ)+ZTHj*WALoSN|G*=N^vrA_D^6=O*Q>Fe3I=4-{Hn8KfTcs79((!VR1;UDBu; zfY4kHV(sYAl#5PjW3^mq7zs@ zUa)S=Q)H_mUTA{_h^Pb{IijTXSy!ce#h44j5dw)xwMH%?t~rG&_8842rV4povAM1| zlocGzGxh-YobK}zul^DGW?FPN7W90|ELoL69a8*A7wz{6kNJ3ac(g5W;}_s$ z)##_h@B?+4|3iaEP%0+|c;O z-o&OY>p8O6XYT$7h*gF93`slBswGY`50S=ez&?p{5W)G(TK}0JCb9&xQI1|3OZz03 z>yXY2zB5bgpE%2JRz>~b-K>1-J)Hj3`)P%mY|9RqXwec!udGmk9P4bXNThIP?Hp8T zId5A1Nm{!Z%hj61lEieAa-Y!h3T-h}!YD#3&*>I9OMT(!5-cv%9A2zh8Ysaz^1?7A zL(;H#asoLq?ef7kbxA^0sS+R7RH-QtS#2VwmE=(b4EvsvN#Img@JJO|Q0RNdrLBzL z{DoIDbKr8`_wM&u0_QxkbPQQJ0^J$bTv`SYMUc+SS#yiL5aSt@0OU9RG=pu|5M#3? zr_CP--ULR*wv14XNNVRtN-;W3)6TC+S)>|hsl-fwqL4sOt8^vDs-xd92~bUcXH`Q? zi(ZW_k`c4_tsH0pwM%V)NvNlU%Ynmx_!j2>{C_@8XYDM626Xu_aq(U|Td~uE)SKq|3 z!kHsmCKOT87*OYpik5n)rf-o{0aIn%at#;T^d)L#Xd+ahbeyM(0ke8Zn|ovO19;~s ziMTXe&M^t&C`wDLg*%E6G@8JUcw+LTKe-aP>r|f;W#D8hqf>Zp-oJwvz3K%_ty{X6$F)**tagHC}znvfY*}uly z-up3@=T1SACTIB=jLtxMjG^!mimp6!^+ujC34j|}$Jk3=&q`~OES5wZnQGdwZMSYs z733R>p~*%mq!#09i>Jv_%e272Y89WURrqVSq^6>85`dDX@|U4*K$$uP5lL3U3SN+0 z@VU|*^^{Y>p$zh+KHvG7*C*Gji!ACD!s&Z(gX4@&A7bEI$(gI9Y1uYapQsMcyJBea z=I$=$KSQ|p?aw0qFO;ZD2KRiGg-^eivDwY6-?4)T45EMrtGqF0Kqhvb+bSK&qB0OD zFreTHNtZq8+IQ4=>?;XW*ixnTp0=0@U(=o|lZj7D%pA(6Yb81$25NBx6}qW6T)G7C zsv?M|n|HZyDe(EZ9`jL|Z)c2q;pT%oxaH-~WnyNQM@}4L`QH0suneQ4NiNo;ioCd_ zFi4v7J2SuvHrmR3fi{kVsf^5AVL&q!l{^8=63=~f7Dm^bDNTDC`pqVDmcRmKpTT{1 zGWVJHvvSX!jL)oRZ2b(aZi^5cWrUDr)~RWfAjP~8LehPxG?2SA!1ImJkXm9Ok`np^ z`b=g;5fG!IcNz18jFT(EsfCh$spOfU-hsEms7@7tQ*#G(YVVy}v(ZBWD>l{s z+S4Sf^5RjfbJVD@2JblGJr9>Py%^|w&;F4%f8&>aim!d=5x#xb-H=o-W%(qGu1g(B zH#8g6m1jEu7ksp?csco%FQs3VHZ#JI_6@y<0@_v)Q+JRaH8aDUArKWNF;Yc6o>aY(Pca|fZI2&cy5W)_dk$cOoq5@RjCe$&aPL7!+8Ti4i!CIlQcvSHQa>SNr_3KR-HiRjTS3n zmfP(aq%}>}Sx0;dE_L7irUj}a_cH&ncX0Z1A47eYk!`yfX?dcDo)|jEy98cTCHIbj zN}@#@1H`AXK=WRDtkbC?6yi(EqGZmc_b#U`#-1ouIN5{4i^@_N=!b}Jd0M_P z{)%`tPr}ApB+`s(Q{j*xBaQ2G1KJs^ZmrSvcFw93Cky7WR_-7`D_Akp-0|j^oRa2^L{iTjC0cyN!e?R0k*U#H?$a} z1*-WN!q3x3lQd+SP|2(UPg?qDx!+}0FVHgmT?go;?m;VEz_}#uzW}}_x?q?D4bTnL zGoI_b@W7kj&)o0*w{voerY6VPw`(Vx*3Z&yx2#?19rZctG{pWfw0DxM-Ga_~G)9{U zN6Yf9*%$FBs-{_{QjiqcY3!ql{oD-hD}qL6uyMDu)fNe91(woyi)o#xUlwT%W*a)eTpjwqEv z;WBWdai~nhr%PXkqwiM*+Jxv zt6qpMEj{J)F*QEHbFR9ATb}U{Hlj$ix)cf;h&+#ItIz zhUQJJY8-T;42_AhOd!U@wt-q2rA_-DB6Ph_X(adF5(EXjA-$;r$TE9oJl9RP*;Zs+ z4r~NYg__TQ@C&^CwhwXlXTA)bHcW5C1&^9Q-yC9P&UT2;$@X2vrdxi9^|!o|K{w}Ezhof_oy=3< zC{cX!04XwXA=+rh8Q8P!-I}hbt$F5nYKx7zhpv8YMHiIb=zLY|6SSn$5hh1+raPYL zu@)m(0^kyevhj*&9=Bl%07DE>vkVWY4}ALb zPdOQIoelVHFQi`gVl>JSX{*tvd8v~5NcP~{=->ZNp2@VEXKJ(#-N@9>{syw|QjXP; zP?;o_Phc{z7QgpOZ~>21g#89hJ4AviS)Ne`65TJFEOBP{W}P@-9p~uL_y(lTTQf9P z9VP}v;_$Dy<{q?bD_3LS7-2ntE+eCk0~n?$PiQYQ@3C!A13Z2T(VKLNZx4=HWsK*qA2a;rdY)!q62^oWu(_8 z03me_8q4UKKoq5uIqInNwWi2~k(M&s^-Pa=##@rQx&{yx^+P&fdb4?Kmn5u0M;=8N zl;l$Y(WHy3lKnB`D(|==&)5n4&L6#lfAUZN1*+y7gE(~NV47>yaQsf5N$b-^p8&Yo zLyWxk_gJYML2E2;%)yo5N@UzKvTurh*A{@DtSabAjWYKuiK@+iIk zN_F^qYcI!UfvH7?9Ri~LZszUD>%4{;4UOH}Tv5>GzmleDd81*Iq}q<@wYb zUyf=_RevKg<0(#lg!+MZ5eLuQdD}BRB+KdEa4XY)^Pe%_?XtKSX(0@p(27c(ROX;T zoy+jwWF8-y%2L4@KBpdn%)pWXp+D3HV3oe(T#^+A1TrziddES}*>g1)e~yXWtfapi z$za(5p<>-gVEx868G#LTWVg?`G=nRF1>h5({3`Ey(+9cZv)=$$;%8^jAjC?+wQOvm zW?`*~B>_N;cohPQq;IL1>L67Obq%>kl61CKwSn%`IsqCYoVX(;0&vbYA)nFRe>EF^ z=rwencMD}3PWA#p%z&ssz?nfY&{2R}xF2F^f0JLuS8Mx(dQ};>&HJ*%uf|4E- zC4SF1aZ}r>{-!P8vIT|8MiuP$@*Jca z31h-w95#*$>rSn3#|^ibYR40Zb?c`2?|=IH+LER5=ZD5$XZ6>mBQ6O!TM| zV6z5xc*kz<*^{3AlOO*wZ@caN-1WJyK+%QPI6faS)1erWpj9M>v`(b5R7B8_49XCO zLb*{Wy*PdeZc|XH)K8@n=DtL0E8O4$??wHy+MnBhdAz0<$T*Kwp zA=M;T9!2|W(En>8a*e%jg`td%i!4o&ORlLkMw(TL(U)REIhS6jib$9c7!!60-1%d# zB|bsHpTG5Mzr@@BxDPNpMh(vsUt3uM28Z4$0dxp09nS$N=YutR5t@J924c?s65oK zIjKU(9M|mF$n`J3nVp9YFu%CSiF@xymzL4ah%o~a2oc{f0y5JDNQ#&0?K!h?4MO0n}`*bwL@;NDUNOG`C_ z${a@X%v=v+qsQ==bI@$=&3b6e5nBU`N7Y5g0KD zt$BL`8~NXAt$PKl6u;Gf>k#Z$rLjO0O;uu@1Iq@?>NL&&RjI^L=guB?M!9~o@bH`7 z%Ok(}kI(q^Zdf-+_>5L` z2@4Xbq$2nN-`rC!0Rw@ygMQOJZgixj!PPjMKL@L#YfqsAP~+64`?rEvK^>70QMG$Nl%)X- zLk*|Dbxz+)KR(ISOMZk6Klkh8Q#tdc9cGkB?wnEk#B7o)l(wXiq_`nQAPQ-uVd39E2 z-sXKU@3n z?-ys1)5aK$)&G0&5YknMgIVd|x{m%c#%?ms48Afi849^A)k ze&iLr;+C6Pzix(3tH66t2!XP!I6gPe=e~Fc@BhT7dH=^h%gIyo=lhT^ig0;{(O0~h ziP!vH4v%{3CGatl`#_(7mu4Bal0H|VPsEEQ=>4>kH*B0t7_tMBF0YLN($_dk2EfL3 z9fZoKdpQ9Jt3)YmTn2LwD^io=4GgRUHf=1Job=?uo^Bu{WPi=(=-J^j_T_K|un2tm zb9eL3H@}bXeEBXYYbZu>oenWzJ728vsjsN1qhVIxwuXq>2rs6to`l+n6hVp3R1OJ> z#PoM{jcO!wn51zqzXF}VC!P6ov%HJ*j9hU8J74p9vTLqpr2~rtSgr=NqB7MD{|m4TRYU&KCE z>3$}HVV0TJ%jzSvG2xS|abnvci+`!D-AlhCL`RByYL^Bb@KMgHtfZ)f3&&=+Hc4i*_1 zo1i{@Cv{_zavsBHo*S;Xlwberui-~uc?926lanz}*Aeep812nG>^-*mx6PlA ze)1a_L8x)2{qOAlYSiC~2(=SB&LwP5GWBwf%sEO)_la{XC2(C#wtYU0q6Q9d6&|52 zj!HaB%Yl(hD7r!(Y{PP12;Otbd5#99H>jBtqr_ddWrpitdIP)n>}2kAkCXR4i1t>X z+r??HHgYqLAT>3-iQfR=T=GCtL!c+c8&_o3>Q-mt1o3I4?r^y$)D=GSsP~35XiX!X zSs0un8CzA#BM))x18?Qj$36}#k&T--Fg-cWVkz{n^PXYA;~;`35)nKOHE6$eFhz#) z!?u2zL#rgJp(N&3p7KL+HU^45XTE}!p1DqBg$(JZF*zG*`p;wIg~tK|Lt6ZtTGz>8 z3W*ORtn>q8-XQ|7dBF{w)`HJ|=5uSt02hB;v5YQ>z?V4szyHTGdYwQ1gRkICzyCW7NN!#U*w<|SKH(3Vx!RN2 z)SpN+o4-Y*$UJ}d^*_f4Kl~XUIPwJe`{e?}KK0z4XnlU|GM#hm-?^P%dd-jWx}W|r zCMUi>jj~k~Y}vGd8?U~c1G{%HK01O3tn>y{ArLM`8Rw@dbmcTFpL#EgU;Z592M#j5 zX(P+Ei3g21PgFIGjW{X^#wmA#Cckc%67@_mIbpRqa4kYdjFWENWx;!)PIBNa5lWh> z-;ny_z`#3PT_esBsmV2h<_yfQjwrCO+^5sZXt#tI!DW_ww48l?GE9wfOm+u3ONdhcX$z8R+w60XFarM$=d4@!{+RF1j52`mcQ_EPQ3Fr2H$>=je9O( ze0G#}VFgzZC3DIEV+}~NF(-jE#47ZL#Sv90Qm)e^2(DRdU83?uArEHV?9{U~h|E=y z=%JN41V_M}7`+P7nYfgi+tYAuY-EOG10qa?n|H!d9LLIpnX4Ebv0ZD@7JmFHB;jHjWKl7u!>-`_+@X=#uoE?Xiqvw6|2bdTg z;iWHl4zGR9k1#ntJ{)nJNz6=7a`k10xc_3BJX(}^TzOOmP;r4I=_=*XhtP&3+cLUcy&Gs+>9NzS%T_#QL& zabyV5I|e`&)vRGrqBz<(24Y(DUcfsgx{P|MVo(U99S788Vj|Wx*2>SsvnVyk>WW87 zHacz-L7oCC79tnM$TL8be|@n37@)sa$5& zRLNyoQuC~k%##(yT)+v=7pYK{B%Ld0@Ax^bT(v6c-Fqh|-uXYNzIq2!qqB_d+k)#j z7ArgcJmDQe+J${ujMLU^?~O}p{4OaEruQ|*#8h(DVXehR3+e=y33U(_`;kE%$-Kw= zW~~n?=~x{!G$2H$_S~RD;Gh*!LiBI~;drmdtQWe@@x#|$MrU?CANb(M?3blk124|F zC=&oTxs&|)zezvX#Sn4E{meX?m*l%U=!BycA&<#5ELG$k8izj}+Vq0ZN{C!U=p}GX zE8XDsVe`346S_71UGvzfK!Nmi=^@V?bp~166Q&%OwBhjo=RdRf(T_atYuveG2OoLQ z|D^EHua(RjKHnp_n2@q-GTC!h@_(OrG;C6M{m;Fc|M}MUaN^Y5Mb#p^^x$4z`J$V7 z*$Z#NH}b4!7~VNrt%8l~*RgZkW-dLrm%ZD!;+$~w#3`z}e*6dds~W15_uj?A2j0S@ z1~y!NB_Z>al`Z@s*2%TbE~hft1xF`1B4rHJ-dVdpjRd?XgH-uv=>m1$QAgvzbVO*Y z{SMJKojQTgNUWErE+qgFRPo~Jt5Ej(6z!Z=;gUL?UE?*@i4*Y*vYgX6j%eg`gi|7P zMYv+;244Kq8#r)aJ1f8=j~qtlmjH*)Gn`YDIw5u&9D}#6rP^kX2ofVPbqFHSHg(28EWlj)(~XNkaJoCIZ!O}QCLDVA^sFO> z->vAb7ol-r7q{~`k6CR_Dq&}rmKP&^+CU9Q)3O&_R6I6UY7q2Zl}sEVL~?1g{*@L` zCTgE?U5v_7D-;MBlEwk6i-E(mv&%ezT!1((%+`_ne&ugjmi~QWYHE^Ce)>I3<%tiQ zHhPc4snMk#y-n$(0RT@^$ngKG&-?4IeGUKi_x^~wEH9cQ-~~5c>=(jstskKBdQ* z7#n5p&TU+A*#S0hSkL6>2qK13Uw(o>$-h!jm8^XAb{0SUE;?iDSif@*!3m{SII;U$ z5Yy6+fCik{hLFbhX~u7}-WVg%iFt8Il93g_n>f%QIoeSNVkSUMR{`%a{X*N6Jb_9L ztmws2MPXs3qSMML3V9R@r`eQR2rcjEd(XXII2?szm9kt%io$Wl)^*(U!mBuN&85V= z#l!dBOPD)h$ntiZTs$abSw<^*YL`PY%4&=TRh#?W(g2_32}l?cm&YUCD$|($(0Om3 z1kPEW)15`Svq)#!oCwcRG7wH2<;2I{$IP|E1+KqJ%mCM3sck#hSVsJ zE_GlA8fiHwyp0KBN_2uUq(lxoJIP!^GFzut27%>%P4td-?(q_cYMq;art_!CF2D`P z5^GO|7jSvbG3Pls=rNgRID{X(>JVXMl23f>lV|;$J)LpUB><9jn7;O>S?+F049=9< zc|lzTJKALHa>{zXA zGl;H2M?D9pgv0;xKhgWb7a#XEe*EL_W$SpWamh|vCo*jN507X#>|e79OG8gtOH+Az zHsXdd_=`XF6a0tY|D&hnInPGie9e_yesCXqc5Xc@kJoM$?BBhE=Ujgk2X^nE@SYQ= z=2>_ON{W9KLYGg||I7zi`O0URzWN&a8)ndfCU%RWPH8u;fWsXzRr`s{)pq7g7n3Ah z4RpOXns117BuRlQbiJbrkqpxb@EVB8fCyr8AoN7_2|R3m2B(%PT18H~1wyq3+LjdR zQs=>FZOQ`515)#dRvb}b)H$x59p~zs4syxOSK~&6qbHZC?t7RR9n$R@_ziDhMUAL8 zaj(Yj-o_INM?kPDRs`<~#Aj%hp;66TD5>&^IMiE$A>QK?8sBwhpfihf)*0w&rR=2%pY4=N^d>&E=#b(it2 zlPlcul?#Ky{AA+dO8_SK(Yfw*mYJ{d-i|VWF=2Ga7=3T@-YsXKy^_?XPZfQRBqo5+ zf?+j1NOZmC-+9{X!8U%Uu!3O@n$3Sz5fGZIwiXxx$_yA$CboJu&ez=ccmDxeuQe3@ z?)WGF_#g4gTdpOPig%=J*gi5CX2##8^E*62kKOdo-E3g(2w->?L>M1wvwP=mZhOxM z&dKwh?YQc){T$lAhrK(uoz=%}w+c4Q&T#pmeO!KMAL}M37z|2|o;XLy>}N>ClSf#3 z&ztFWHTwt>Vp`xBJ}LMjoB{dc+7Z_f#>aPJs!zMCTyZ zbbLktYVYW(GDwbuU1{vS?Iz2`;X+F9eMYQ=-qL`g6B#KA)AC1#tH4l^4kg3Hk1C%r zC!XV>=0qKNzzIDOc2AA)!`ENNb8o$g*{z#7xw63gk%wWWPmDE^dqd27GtV-Cu{1Y# zf_QJ(0iS_d3tJOL!mI%Y`9T`G7~72)^x`15qR+b`yW}k{R`jswe8-yi#Pn2 z-+~A&0;r(g*?pXjt(jx}rSV+}z?lQuo@lInrhpw|^Q>1M+QXl`>0Qj7etfXw`HoGq z>)5|*2bUe(d)6Pj$aAJ9#yPZa7rVA^Vb}JpOihdtLu6^CM^!zG5%``FBjr6`=hO$@ zM&27Rd*~W8^Q0k`6~u`5 z#zcsztCp*+5izX+6_7hiDtb#xc<<H{;qv+qm(iH?VF0E(YZaN9K>CCmw;Kz!jY#huu3HWjODR7AHP!`m8PvLt2m^ zE+cX0OmIIM#zB3;7GxO?hZCPR!w4hOuyU3yiPuCmcY-4yeGkV!^EuY-zLc5G(|8e9 zV%p#n@H4p8msCsYunIV}Ld%KufW4C>&SvkOR|u{FYC6PclvPcyG#AKro;G^5H!hi# z53yZCiXcf1R@2XN$7yx+>Y8rmXo>L4Kky!ho5Em{pNc$nk;lvWslHN9hy$` zrP197aMTDRK+A~~;NHLWza(X^zdZi@kN=om8^;aVs`f03wWPB2avqfc z$YVZWO#-ks&TR+av4_yakH6|w{KxTybb0 zS6_CJsmXCfgq2>Os;UVwt}%LfHipq#X7IJovGB?FFmdQICbn&0A-J?jJDjuD`-mEt z59e@VKSPvQ*F&^?N!p-e7>>@m&RN$$mFPWQ8oICT4wX164QBM`Adj`ptHqJmo|B6M z9K!fWmhMkQRfhNKs<>f9rKZBu6|+8Gl$wVSz8PvBQkWAM%{d}y^Jv4 z;r>UCqDN2K=IlLqk1q<-{=ZVQ75to4zQyo+T z*hN5!!cg*KvoJCP-Dy}kbxx9j>i7|kz2i-s{>FEhx#2~O&vKgUp?}xVn26RRWOD?5BISD@bJx&9!{wic@9R5sY_|YGJ1~9P^hcF<4`P4!BJ*PUv4v>BhW^Upv6{j!8{jVj!gy*s; z;qG7m`&8fh_V;~lH{Ngq|NQU#EX#c$M51$4b)ayT-C9Ml%Ju)A@n~QG-+Nm=8)2EY z+SJVEX;|B|Nsa^HiQozo-$V5HMw->z+3f90j@ z-nNypDmi-WB>nQqnJoQP2whxY`Te(1-tiSSUVT07bz>}-W~u9((pN+8yHxSHp)03V z@2#|60LxUMiY!ts9Zsg=k#)qGD2&_lci2w za|K1~Dig$X{w*nx#7(!EyozT|;gJa6R>v_+3GkA2liYg!WnBBbtC`rkp1FlN79Tl+ zE-xVY2uPbu3lN`VZ7WoTEaukAcQyhLpIi0nQo`h|>i6Pp^YaB{8PN-f`42>&+YalE zYfpC_jLssG>lb&8Bgb2Y|A(Tf=qD@dG2|uUJ zx)_t%pS_pXC~8Ili8`w0VSosAr40IkqA&_+^LiS6m1d0No3GR3Wht|ZBAloLJz%|e zw1C(C;$PzpZ}{&F`cKT*?R>^Xl>lVBZ-s5wQ-<1RIBBk>xl-&Lp_ugeX!O1z7%xSU zzRuF;KAI5xNz6xVF4R!|I;r9>Nj&2wtJE}mqDd2N0%#ONqbSu*p#?j)JBn`|=8^x` zZ$Iv9d*hq`kde_2F+{{U`ay^>Qe=+It+xN0&GkYEd-ZlM6icpxB(seKN_yF?FX2D^ zw?90u$N6|s))l+9ZsvxoE@NzTGNOAQ6G7TrT4xCU0h=GHP^D771V08UlKALA}vYS9yPh$+&Q8%%uWP4Vv2zp z>7Zl3boHTT$kW##$3h>@S{KVGXik3m>$H(4FKUb?)m$7HZ7Dl=qbCOdSl-0AUg5M*wuBg88WtKnqPNsJq zU}onQ6c~g6t|ke3nEt++=nshzG^G8ed%a9uvmsqjS9ReL&}bvLJb}v^3tLl+vyHv0 z4q{GKnrmI|An(}NE;Ms89X={5P4BSzxBXJ4Fovyb4|&J8MR-J&1yu$P+D^D>-*%q= z@|)Rl#Q`FH9(?F-;@ljR0oNTzvQ{dKlQwBoG)6TToU-229LJXrvnFp}XN9Ujl=GSaJSMv9EgB@Bc}dY%{)JtRa%e9pmci_4yK1Gl~DZD;3Poa4B75`f8_==FaS9hgjHUj)q@;w#uZLcTd4 zYW6pqb6rg?1FP`NdISb3aJZ zq*)}6I(F@F^xykwj{VUeKkjS$cmMX^uy@~926ezWSSf232Ni|O7|A`}u_obIeNE>( z>`1Kr@5GKfs*-0OM;tyybi!?)QzE$q6nyu$TS2caZ1VW=!BsH54cSL>Oqa(EyP)VunEgDn>mNN2pp67OwfM;n zCl?m!-G4t?_R!qnj8BIo43Rjq3Xf@%Hzhomk`Ki>k9co3!_FbjTo5HIa9Ilxhsz3r zg3F6x0wH*GY=dk%(32Pj04M7~TS0w|iqkQz3U zTLmSj1i-Ocw76sy7`&-CmV?M3*7)3$IX83@Y?8EwUETb<5r4u9RhDt07wCv*G!t&v zwuw{f`O;@@KP%_qT*gI{0JL^Km$>Qq#M;CyM4W*#Mud@_Bb1{C@@WZ441-5)lm*a7 zC`8hRos54HCZHA=S%dy45!$J9Fhqx@hpSaB3&vVYmDpGyFC029#BtAMBXGx0|1!G# z{oA>n?g+p0@Bg`V6*UrK&2m}O4=_^XjO5M|fb{$FnAdeaqXEw}XKkgbscOhGZ|yp< z3S!H)4gBl>8VjwWV8iS>cJJ8A6$kgTee*^# zFC0F8>e+6jDKEJcJowkK)T-dS^25Yy%Kdf?JQ>`Gd=UB{cody3nu}v&lXE{O&OHTWl6f&QkC4ndq47N9yf9P1iCuI&xl*#W}~s*eH8;ZsoH5d)T&RGwY@%2qDrhD|-F&@1JnN zMX2fDeFuvld?%xmlg#el&w?ju=%)lA3BWtCuAO0jY^QCBP-eEFYh9cAXXbH1sYTP~ zZ+76BS{3SPD*s43i3oWC0z@*jAL$RkwH)m%RgE}mNvU(f6|HL0ljixPIde#QBaT9# zUJC7YC)0_w*Ivy z5TI4U!~di~#H-N;T?S$TU(Q=sz-K;D!J$nw=jz5<5VIN7V1Zh2ZFyy_=j#UK3M9}>fbOZB}d5&$>88+YB` zMKOP9X>OWaU{8m^w6`}by|FGf4t*)ZTQmP_GQNf#P>bEneatPaV6FbYwThe=#V->{ z6eGK`GK5-`m<8}k-4a@3!j=x){rcZP&wvB}oB!Z%am9gM4EiPH1uM0(JOGIvQO9Uk z7)u3;sRAZsYf~Pq`2?#iD%O1dUbkLc^E>)Dzx&;>I!A2&S`e1YniaK$!ueGIjv)B4 zjlA`p?>wVNKOYf7V0LDb7d+=$HmsXD--k1dk&!M3_Uz!MYp-P6=8bpwD10YC<_R%aQs5E+&u1jnGW$KBqiQ1&MQ|hQ@gp0E zE5~?Bi9igT{>tr~_~a)z_@h6Lc7%npCU=3_8`#s6@KVWgxsEnsfnf7}HIQ~@FU9DE zP7r5;LeY7|XY@j3xnI$0d2%Pzu_mZm=NTn5;|5tB38*)#>0YgLa>pSDxBkSB@n`?{ zg{z8sQ6vD_mg|sxFQcpm)}B(6cy#M?Mt66>Gc@ODZYBp53F!+$8$*Rpl<22+{5fTGnZ*t}tuYc4;;6^HgS z+G%rgZl3wmi|6~$F31Q+9%1RWKc{u#C>x*uT&g@{P^I!MiFXBH)H!+y2b8N)hhW`B z%rm5&d8)LY)K2KqZ1P5XZS1*mLY0b+JXM9>MSLbyCCn{UjJ6!Dwrw^N6^C^kA`H6# zPHZF@Ah{u0MQ(nD23l@JYK++?^hBADHHS6uK#Y7xg;P%0HQD9GSMKL|&%2WK+ct2# zT;}x2d01FLGfUJn@kxUMqSN7~CffA(>=>OhU4^Fj8UhNP`Knm*Vj@FXmXZ0~JdKJh zF*5{hPa~c6))9CLngaF7Jbb%bbM5t_6OaBEuj&JBf~ z@-*IYYGVSQMk5MjxhK?tP*t?rIYb?fQFIk$j!R&`jKn(0JSVCeA0pk%b8xIn-Rg4t zCqDC}UhBDziy#3I{Qg_1*IkOn8bru(giOhHb?~zpD{0142#EriF5T4TZ>W7>oBP@> zKq;%tfcBc|KTDf^{g`nAG_t}e84zpP#yle9o=t7|)-U}k@o{GV0F%?x{L{bvi_D$w z(<*XGV198xP)CU7ayHiW$urwFePIncb_ttMXOaXJvqUFJv-U4 zZ4!-m?LKDuZv`$^3iY&cw(Bvxg3_M5NYeS{sSbQXUeLCNQ&vWXi_^crMz!c`WMl)&!JB~bu1fd%d9yi06> zbXc0sb0!V2F`_Zj&OK$Q=?!XpE2o|Lp>s10E=w&D9D)P|$Q&noJw^~FGtUp)a3ybi z%llZIpMO%X^<2gUp8&Yl6rC6RCWE@wygO*Dkx|I@k5jf*XFX*~^BP{hRqj^7TOuH7 z46s`N7Gg7h;8E3ot*eZJ$Fd4+EMT4hct@Q_D3qNWJS*?{I16uj(;2_tV z8Y04m*%_|6{18`NdVs0PaZqKs*C(nzYeD>&(B&nTKmB2rKKBWxw(n%!wr!}(7(}J6 zt+ksYlp1I$#-~peYK%&rD1$vUOptZ8Y;+jX!rxBc&&3jhw#YE4h=Tdqi(_S32}=Rm z8ChY}v_@)f?I^AhDl)^v)$XH73!5*1q`z@$<~6x^IyiC_dd~BZDEG#|J$22JgN}Hv z-8RE5&%cU;H(W+{!)6|tJ5D)w9PL$TyM<)AK@*(dokyZc4%fj(tmeoVK+Uq)yS#Bx z#%CGMnUr#<18ELu;zJ@xV^Gwf^E;tmUZF^y z4l&x^;M4hR1b+r1jb0qU8d+(mV>Bm4!Nx&ZK^29%u4uI~8;PX#)mR;XM1rJUB`7L3 zo>?qwHs_ux5nlcCKgI9=AOF{rd!6SnF8BmMrVrBE_bLW;mM%K;&33Z|epic_=q|O8 zHbzaGc9QH}Qzfsu_pM%Vr5M^Du?@cw3W}-vci3Y$z?>)$HD%%|6dvjcMP_og<#6BY zehpnZv$pI@U-=4dx%DRM+NNX(uLg6hMHY?EtP0B zzW2=)AC(xGKDs^3nWq5FvJ`>igUCwd2$I<&xkyn6-dhWGMBwY+euz(h{_))q&sP+A z&aUm7xaRUh7ZeWQ`^8AN%fY=nx$eqK*|}viF$Nwwa*VS2{-#rZ6(gQJPVYnSA$;c^ z+Bdz3YAnaqmh9IW>8PL~n4Sk}%Acy#(ZIu5vKvNBCnQrPb4f8UZ8TN-xfg5KR}D^} z`OiTWBa6#58ik^8=5en&l<$wJ{MWSZs5CS|+t@;CX%Ah2fwtP{z)*S$-j1~dPSh0- z))n8O;+Q%r2bWHca>M1@x%x#{(w&)PVWnd6_#(PEpAsN%%6eGBsm?G^0z*k*Yknx1 z_e7ULFw-St2N<0WZvdtSf01RyaNgmZgOPQxa)KwY?0i=Y9=eyqpZElOfAB|$qn>&o zI4?-Vd=e8gph548Bp(5+gCb@gW9%;PjNy^WzktJMhA^+nigxDhvD5nLW2A^`_<%J6 zQ4wJ!)SxhudG>hNeDD%J@Q(L9`4@T);(|{AvVA`U>#wtPP6HZ&5l`p91ifib5u27g zWN4!JtyaiRlKig-{p1ngQa8Y*kBFw3zx|Ef?4|+G$C84Wz*cQBPI2a~GN@fmHtE>4 zQCNKQ`&l>xBSPO30GrDb&#x$8O>AKu6%n$@sW2cwce)Tofqb&yNsH++cO4~VduasIE@VDs(DtOeuVn7$A zpEi=N+Bk4P5(rWySEKZ;-c_lApE>g2s7}}PYo*;5@>aqFs8Wi7k(6)EL>NS?vfE8nk5I6R{I?xchbZA; zl@ElVWZooyOR;R*`=t9n=7?j$mJGi0>wgzrIP*96mw)wFnO!$&3yc#^50rkT#2BqA z5rr6-?6k=In$#xQe#)>ySVf~Yp=&?nF(0f}?rY!zK*OHkyFOU+eKgH2u>@eDjy$xa zRAht*Xr!*KLKKLmm6x!6 z^G0T-CJ}@|Suz-u#26p{LH?>lRWkVcmzaCs+wmydF1wtT _Nojbe+^3>KZ#nHjU zVY~=!D_~g`lLpJNqPs~3Vj2clm^^r`P!aK}|OdSLs^9MGlwcvvBrzTnI662#G0-^v^5bgIC1oX604f^MMfKV*$@06fAaf( z@Z_%9a}XDF0^mp2Gji?g>8o8xwgpvWJI3%^a!QTFD6}!xzP^K61)na`ruKCS@X;Dx zQ~D@;fK~ZQl|+34*2=4~8@;9AGE)o8)I@@6tj)%_uyZ|>zyD_D-uaFv^!i@^E5FR- zM3+!UYLtb3q*rNrr6$-B>x#ArV_g$5bt$8^PSc}rlBP~yyTM0{->Z-L07Dq_*ze2g z$51}BIhSd^pa?;PI%#7btKitOnIfB`mJMLM1iG1NjE_0^{5QYNZSQ*D6TAwZzED;A zWyN!^y@DIBx#D~u&XW%BJ=->Kf$))>uQ{*{IODmk3KTUWx33ycK$}-jGKh5cn zeUOoj8yP*cm-(RNb)Z5hlbBVWx{O|A_%RS2cF#~_1+}oMKUWhSix>+AlsqBDV)8BM z00XD^O6V^J;y`G%JR`Yn-j$Q#*iu<-$2W?0F43E#u9A9vk{J%eyF9hnyGNoW~0;w zuK+$;vLFcqAX!GN)jGIXrX=3k0-O2t03N!t(3wFxGpQr+#1a7w6-VCvb~aq{9LD!= zAXYF)eg}E#NH|#qg2!|SQ82E5!VXyOq6uGNM-kGvpe9m9>lnI}9Qd@(qLS>4!Pnrj zNUx4~gmLepdR^x#F6acHwdrbf*Nds5$El)P!=y01ZxSt(QZ1`) zMMs@(hD=P(*I-cg)@h97BB^y3sl<_C|5m4rs4zh3B81ckcWT*uNOL}-njlb<61oUo zfpKB`7~K83UxO#;0`>F1`14Foj1ZzyE6nw3$^_^ME6J!St8`73!CJsd(A5N?vAG=nKC8c{u!@j{;cVbhKNowBxd`{3s#&Q$wbg)Ypo^x^l=`|{`6v~MrnEnAom0)vVy8dk`qj$039fYHuC zsiWOJjWb;p8M@XrH&%<)Ie0}(XmZ=!Ix!e}EU= zaswli<2-o$5&B1uqWub26p5AOa1l%Ry|=NTnsAUfv)|P;I`GaI6=qswyQC0TiwsGi z!PI`IJp;WbajnL{;dj1;Y>=__Mb|P2GAxdq2o-9025Hu@RqREU4o@|m)1=iP&c*}Q zSy7@oB&g{Kcp1_GgIX6=HMz$+r7kk&2Ff~d6dqpwvKR7(-~E3sh>qL^od9GzpGTa% zk`U7yr~!7461TQkk;*dbm{9{&sz#f6{wfYyX^C-k(N1yQP`}tq|ImX9E@G6w*IJFWO)J7kwaYT(o z?--Pkj%18?T#}`$o!>Wsv3bKR8`n)!&K5)~22$ef(nfCkKq%d7@@<>;Une+UI=7d+w|wr%L6N zD$5yc>lQrsie23Jq8pi-=ZS0Nr0B8afHtdGx zxV$kN0J9^`Y&?KMXBv8^Qnz#Mkn{0l{)@NM?yhI^^@o_Nu&zV2E>1}85n+&03f_PgJ3b0a1IOu_m>F3Q0SWK=6*rIm+6Shp7xU3+%h@I^O%H zw?5(P^K8cjn*d1GA-nm1AxM{qL+~gyoxNk!>pY^#qfZUWrfY1QjLrv$QT2==;7r70 zjo)Jp5F;(;@G2~+Wpt``frhjjjw0kvh`uoqiR4{RYa2|jR6O*ne+PXMsOw+&<<~Mk z)`1|bM40PUltBqGB_T5#4bP6;!e`gOq#2k2-ui>6!J^C&uI=lFT8ek^GNbVx-c1BcTEEhiY#c%N8 z4}RncpQoogW+o>&uxA$s_wGF3hjLb9Y^2L&2lsOG^;fZV^G49f;?fFLT@zw>R#mWx zx~970%PfE3T}*76WoF+&oO-G?UJOR-TRmSjGQpbkF)!={MbpAc@*6 ziA_KxSH%Yt$mu$pUmtOf&uhhBX54z&9-jZon_0JQJ@fN(oO<8^v{Yi2 z4@VO*r5%moPP6XBWVU3D5bGA;t@mzz0})3Cs;I`>k%93Lyu+#u8lyZU7u}n3>*wlqx>O|DI*(&brW#n z|Mv!ZpZMgHdX2Am)ep0N(=>HILR3yH4H)1FK24Nrp$?9|%a~uOsY76PG6xkz1s4skclhSz@_V-A8HYi}vr3x&sYHVz~LQ`M|Ad`FlmhHi$|EGOJ|1ajfFlV2w|L zc+*c1k7}foL($IZ2VuN`k9_3!sbl%M|$GnZXPwrQ3{b;M9oOTguJy*tb+ph`X?&Enz3 zHdn(A00hvGyaJSN+MJE9Y<9d+-B6u0Mi}%f76!todh#x04cjhtC7dei3^W*y#=g=6T$pv~R zcrr2nxlb{^ekbd$+sEO0Ks&Pq&?AHdWiVQz6@|I8c+oBV#UK6IlX^{OD=ydsz)$ZZ+w%j|8j;|M zT2t)kl5HiS)pq1<&p@GFz+b%a-F)MYJD$|JdOD-l$o^eBc>ayo z7+3ij79!?4I6XPR-d)?+y<;nTc5G#Iw2Q{biBt3E`w%avsE;3I{yn$hjvQv&_0J_A zX|o({Gwme({0gI%MQ8NDrVZW*&g4Wqk(MSek2J1acEWfX4RqAHWqAnnF~z}_%jlOv ze>oUEzi@c!&X? zm`$1rupzlVhUCT=3=`lS8Wb7Z04q<Ie)Q$LVXa^Ct>*X4s05FaRDKGc<`75gQXA)E-u1q~jdpB3!m^ocF)`Bb++% zaaP$l=@|U+JDGpi6GQ&5jcvPk^86Ryz+&Hc|BIEf zP(}tQ6`&G_?X(y(`T-s~HOIQ~7VF!dDw^t4z<`6zrauJiu?p~Oz_*Pqz)-$(&AED1 z_9Sb+OvWC`0ORhHC0~7bh1lv+qf{YMM}C6(B*L3pS_j)@BPk` zK3`8)lx4||EgN~^bFQO%0ZIa&&B(HBs494H?=EJhCdo6$(n^mQBUSaRsvuzc0r%X= z!u#G%F*eQYzC)}y#~@mpH>$H;!38P>FOD9j92f-#VxmAkF^%%nnaf3!f~liUXxk{Z zUZKzuN0xc2x@K;v&s@Kv8x(0}kSF zOs#@dmy&sf@r0dP5#a%?xIaXWM8`>lwucw(+r|rD@m$t#+Q0)3KEmk-9ztWJHQFUK zI=L~VwMT~N`9K6`i2y@TT-uf+NQ`JRY7vKPO+qmNgL$41LqH>k-}`PRwj5;rRXbTo z`n?T+($0r1j*>R>d^lpU^}q7=zDdG+;2=xJO{fq;Oj-zn3u2xD2$ge5bx>FdLa1uS zGRHb6-1@2?;7|X{|MR3T%cmL_W&+>~T6kx;^927AXKx7JKWwCoU%-dUE`Nv0pydF+DNC6_@Pi zx+^bbBea||5$_$S!(@>h{>8KK{=@XjI54aX$0X?U@`2~(Y{K!+dDxZ8@kO_daCKCFM7|TqhSR&6`Zr@YXPw> z)MLuTOh*3hk2C*AfBKYOXTR6u7k>S|3>Awj z13ca|>Yd?&8rtuW!X{SP4&^*xX#5jWS^&n8528p?B9tV1Y=x9Ny2XA{j`+od2xak&y%3f=u2 zy#P(!;Ks301k*zi1Bz=j?3rZFx&+Z4H;5Hpxtb2d+@NMkgpPMS{}nIejsN+-o%w4x zi*aEl08@L&cmDu`I>T!PMWnrJhW=Ex%C`h-38jm&xlIy?)p(rgdz#w%jz=c|uu2J8 z+x;F2Z8XDipc2K8)WnI5?OizbkNzdq;is0hDe%hIzK((OglMz8M8Z{E)b|HuD$ zO6Tk8jL~j~1ABI{XZu#V-Ol+woToiTN4jj^x{0eVKggDi8RgZbp_o$(^MvgXSBAY-p~@L(wovy_cCpaIZT;Ki`LQ-Xim! zUE8;C)3sM}*@4}(iWUnCreZ#0iderf93PczwaH4ZJ1^3zyanokf~AYyaaM_ z3`8hVBswx2UW8?VJoW#hSSKI{dP&+m$2OLMq@7wFdGu78(ba;e9#?r*dNr|9v}J}v zA+G8G7(JFQl|6eq9P*Mj(BNPTAtdr-1 zh>Z$bVw-(b6FmxE6VoD{M`sf9;yv>Hy_PdUAd=QwJ$XMUWn8+k)Ra;NcCv~onT7RXQhj}CSn|l~9EPH*Z zg3ll8@$Ro3<@WpLS#}-TBV**P7J*0|A`z&A2@-{lt_tZB8T7q zC%;AS3HT(O@5sDkW_p5~ufK}*>!#25;hgs<@|<=2x}=9E;%P|$SKEfJjxI_|44+e|(pRO2iHzn5 zErik~bU%jvZ(%+rOCpSD(%+~8%Y%{_z!#Rlqsaj^ZDvh1+W?*$B~hQ&z-Am6bd@#W z(#F^bkomN(dK#S+E0&tVs_kqoS=7kUP;(**M{z_4&)c?<7rpdmim_4d`1&`|<0p{r zNTPFV(lXE?1}TdSwM;Z7sI#}i&)5uB5qf7mTWwE2p>aTY>d z>~@YHetPZFFSrCi#FOuO5%uI=V^g{To$XWTY|bFAqSu?YOvNhArH=^!HK=~yh-8SQ zk$~MqsToeC;GB@FwQ-z~zSg0t)<7f92XxZIB<$`h_y2>x|D=R}zE>POc8r^T?ynOI zVNlyevNQ;sUM@jAu`-)UaT#?&rB<~f-q9}y1RPm+f)l0Zs}Ig``&~ykvQ#k;Pt__I zX$##<@VVeKlNj}xBhT%_WrAjKBEZ*n=Oysz>r;3oZ9H$0>cm2zMcUI2V(-k2`a-Z>n8?U`+oB_{u z6g!91OUSP9H&3{9;&x%vB8Jr~#t~HNAd?DilRV%w%Y2 z6Gh3_Q2&&qi?23MUz-Fp(&0mVl$v~QP^Cz~xJ*|)Q?j(C=4z@SO9%t;oN&Uis4Vv? z2Dva(}Ba@Aw+U)F!q+=OD+*NUo7QSEr$a$Il;fGfrryZ%?{Yl}=vXbXP# zG%F(xpH!KQr{83O7PYKy{%smcOG474N!_lwuE)R07fp=yb-m zDLDNHZ{+kx&YqlY91Pg7`yiXH*vU!>RSiYgvDj1U+9iu^K*u;GDI}Xa9PG6 zL^|CzopDbPr4(UfTX@UA{x3Z6#fzu?4;Te51fGBW)m(aD&tDlPz?sI>#5kAi-@^@8 zUB>#EX<9|j(sGaTSyjPd)DPXq{M+70HMhW)Yp$a+T5vkl5HdPsEK5yIpp(R6iiF0` zMOYE*uX{0{l8DeIB?l-27_tXCDAC)bDr4M12G@+e96+t>mQ>ny4Yd2~b^ZRaQeLgemRI9x`SoUr7T%hyft z@>f2WGJ|h?=?+*pjqi@(qqk{!+U(7HC3!12gGC}TRDKnW=H3_0D@Ih)IN&5_Iu1DT zna{A}r+%9Dc#GwpIlr2JL9>t8{JuY$jMN+Z)u zqkBXR^vaUWt%BVnEk5>vkF#*>>GSHi;1U4o&fxa_09EOU^$NOvjMm<9DpQCYW`pGlCaA0Nt(V3`)^8Ayl`7fX8jj-Q-kY5B>CV?EPCW1zb8Yb?C& z&2)W-$t$lScR9-g6PalxBe3BtUJO{5r?o7mQ9ur?kOYU)2WmP3O$*(LCyK{Mp{0&k zD~n5goI3K6%v9>q#-vzh5sY;ZREAmrBCEz>A&n0`4B2=dw`vro4VT`LsnAcjDyxFc zlnC+U-g5%S*XzijvTnkp7jC-x5Ze##=Jq?jM)lys$jFp2yVMv7+LX~r5hbAmfrtd- zG1$JKrX!#V>Y+1}Fac)}b47%xmZujw%Y7LU0WuYBhOk@R%_sQzvJA06~pz zEop4)ll}=w49ck~Hwqw1<&5Q_;s|xkItgsdayDOaB_H|IH=d14?~LMtO91leefV9s zP*oXdU}Wz&{Y`CLwKkti6@8j*wXqRE(~eR#^leC7*PsSc$LT)B~p`o60kwylU z25@zkoU^eeJfhdM-C9I_0=(~A`&<1n=`qm0H)p6tNK z{`BqK^X~Va)${dqMU2XohxT##p?!>xjh^qrxgf(iq17sw93NxX_ATt$zLkS}cQHLN z#?o?+xr+$*aV|s4K1-kf1WO-y4-+HftUGjozW1C?Ww=Dei6e^PX{_=m=?5k`;q(z% zZPiCecmnHORtfCqMObo%9hx5mT$vHQBhQ84lq!kyWWkt2kpzN+p|x*~68{v zV+mGduZ!f1kdl}LF*aAd23?)@@Qycn+yepnWyx~k2_iga>jqx%lAHL}efM$f&IgFC z4(ctj$&?{ZNyHl&AJYv$#gqmcIb&1dNk`_ArSO?00byZ*-m%kM@&h+>RKX0{6FVR} z>qbPQg%72au_9DLvQ~G3%R!<^rhAv9hHH~+u9C}Ooy?egOe&1Q5rQ(+$~drLn)ki& zZS+q+J+j;vTmqmQuO)7{8i@tk$*A{Ek+2UjvroP-TiF!!(iExo(%I^);k{TQN+`0)3S2k-ngFZ#(> zaHo74s%1Eb8R^;F#fndNI1)mj&a|BiTbwpf_dPiN=_*Ox1P{l~z zf{B@&td%m~wslti@aO*)+I#$JP@K0LeCTa{{zFySwq+xi?cc+ejqA_%;au>M=NVfz zuIJh-E@9v99gL55De{aMmEK@LU5CeikY{-4;sT4G`Vfns`xIODUB;$u>#2w=)-`RH zCDScf7C_x_vv1>)`H&{1!PSwJt!q}Awnd1}z|9TcDdN$Avao2gbm=yYY$f7W&2U`f z(zhxO=F%SCj7?oy${r;zPGTMkv=U$;f$2?3JsoPDFjj>Z?(%ToV895DUgp@-?Qrue zU&ung<~z54Hu<_|IQ2G*Z`u#T9u5^CiDk_RaI4nr0n8&HWoiy%Q^a9X)!@)%*tNzxmV>903p*Xe ziD|z2-VdJfaX*!~uo3`qxb4p+jPHRO+*BLcJxZOea{CJ`>k5vcf|VE3zHh*QRl4Zb zqWghi5|F{8{qED7qUlEP>Tu!+qO`}strMmc9{CslZ#ZN7REI03Q=9DZ+uw8d-Or^z z{2JPu+pJVd>8uaft0UD)Ad3Y7H4d&is5PriYPE3A(H$KzFY7F~Y}-2qgNn@Mc*z)40iWgcs|ufcdi@ILlctz+hy-F#s@P*< z$}>4rP*ye3!DJzPNXTI0nkTn{*Mi_AYK(WYl|EHeuzr1Uy@74SlgLc5aw{|)A|RbF+l?y98Oqi zn22jr^hPWw7wbMaQEHK4;*g1f8zW6u!2Yf)a7y6eUQLL?Aa~3N-1?kLSRNbY&d+}Z zs(|-h(;^UkbRm@p`FX;5X)uL2OA8t>15ivoAyDAaFMor}fBHw66QS0SM1mZpHx`x0 z@IjfFM?s0<5i)0m88LMVL=2mN{j9Dc&VhG=OVWag$!`8_PH=+(otER8OZM~D-+hC< zrgIz@Rs!I2^5^~?LTKURfcDNQip?!bOwR~KB=PSLW_NxzDS2!law>R(9#%X^RWP}UXQG0&z=ZF@{r z#=D-=N0<4ZultoV8<&WKv2DaHx8eqKXtj93BmoLsb;$v)zv|NSeJB@II42ZE&iZxJ z?AW%2%Ma{h_qNRxdB%|wrzp=7i{@gF@{xy_f7fjk^Yd)H@n%*Ev-{I%>YZy_{yDlj z22e*&l`hsIuT+ihOfwd+%{FSJ6O{}oz0KBpy^=v0ae0o<8lfQjyrS49PQqFx#D7!a zDu!8zkW>eS5Uo0Q*1s@8qa^NwrlLzBOX__wng?R;JX!EO*soYZ2)Sca;Dwj(WqjXm zKL44o5*HT9x?@C*NL}MxwrXfb!aq1c)DMyS(s&OfdL{ic2h~-_kK&iw9J=Ky=1Rqh zP&orig0=UpBjJf^=Xl`lSiCbC=2*wou0Z-7JcQb4ltS9Wv~jdGU8g}^k&EL%=6Lv9 z5A)z%XWngovT;Er0OAYe&|gDBM#w7s&MDk9WAd~uF46M`2)Au!|F-GyWS?6(;_^uU?FmVBJp*-zF!fcEYL zef$Nlck%?%ndPdRFJ)n1FEVS{AZ%q&QI-WUbWCqr19{P=3dv3rA%tKqf@wtJGS8qa z5riVo%|9HY5PEUo9GTB7IS7$j99izEw4|<~3~H6WQ~Fg!-p5K?<1X_-kgWr4Q&rr^tnNNZgqjdKFH1z;Y9;96M`y}&N&&ap@{cqkN_GsvCCz}SiAI^Q0Be(`l#Iz5U?6H$zgJ6+R3fGG zw1D-QWB=7R@V?(a_rgFIR04o+Q|$k1<~iRDgx!;rU6Z3M5C*B;;x!pxh)G;JX)1Vo z80!!~KaC9|?5=2M^^_GR!V|M0hX=*B*dgv%RHip)gf~dEHmBbj(L{Sv4rrbw!XlY>4 zo`&8TCIK;W^ecCA%}>3O(~{It7a(CG@-!3iJ}If{Dx(8kGpKo2IXIGs%wSVCo{_AfsURb!QYv^j@x=H}t z@Fx8J@1u<^?C8NQa}X@Qa~WOQ&9+nw4-k{A8vf+=zp7>xKill>b72>9XO`3btJJb) z!(`ZFDhI(u231SB&9n5L-)8X&XW0M}X7O8p2#rdTgzMi5qo=N@3xFyQ{mO6hwm<*l z_`a}Q3$063m7_m!EG>;0FGsv&jqFV8X;TX%ilPLBl-UA^osXCj&UwT;@TT?MiGZ|C zB%uuZ6{FFJvM4Aj&*I9Ms3^=@aY6^&Fk!BI%m0~Vly4bUGJ8yjLz0A!{6a4e7qi)32MhJoJTQ_t6 z-M90)d+uV>>@*kW7dUhN`sn&>bm-|5jDF`=aTm@q_xjgyp;r-B8axGsb2QnjqbJ#Y zaI!H&wU#TLXQK0>3^EpAtU@pI*&Ko!N78bH!VyZLR*iQi20Zq-&RY_?|>Kyx6D%P8W6MiPm<0?y z0Vsub=-5&UC;#C;yejU$D|Zm9yAeqs5u!ud<7ix3v#PHhakRqce*JyC>o5E$sV9tT zA=Xei3KhkP^;0+7t<)$XfkI4RZN{OA5&a zBsgN5D64>Yd=m~z>bMvqi%mn{7gVK8Kz~83iyG<12zKu+^Qge6Y_{C) z-t2T0@HZARwOUtE@}ju|V2}4U#Lu*Cr6*8p%TbM#&M}4K?#+D;Ja9K3eejFK`HS=i zQ?w~F83clMMn?%r$=dL(*_t_LB5FoaxC(6m;P|H=;&p%VPqXOFjy+o6bE1H%q|Bgj z88n$U6HK5m3mfKfbtmU$36vK*S9NzD^Ae*ZR${}>qTs%J@8-Qf_tK65uB!wfY`v3q z^8?^LZr>b!TR|JUWOmh@p+ojh?0wg4;F(O|b9iz@$pmEW@@s~2(Qq3%PN zbL>2GmNP&5&(Y53^Bac8Rol_(R%{{^(WtocR-`$KrYjRjsV~j*rQd&$xBu7=pdvJ_ zjV!`Y7|wcH6-Fx~QU=DfZ9+r|UKovQy!XVUhz}%$wowp20rub><2HgSp)j09Owk<6 zio(#sy^=ac+9 zTvMne1EW0C1|_x1xJgX;fRwgLnaiEsTfKI^Dp{N;-g5i|W6;NCT(@C_Q1-6L;EY1aZVC~$)h`*z$(zM`GbPDa z==>8#4cx#8A#n4qn|R9`?q_N^*wD4Sl4H>8ar?o&yy>;~a{KJ1RjvFU z3fbY$C(pnWYvC${h z+!f;i&OP!)?)b4k%V1hqOpzKJZ*;l@k=2&JicC5n0t9DxDa3T;BxF;kw5m&^F5wVD z#Pn61V>t*#o7faQr=C2*(Jy{!U9O=Qit8o;5Eqzw&0nCFS-5Gy^ueK-V`P56&og=i zN;Gp$ncxB%SrxIUSWO6AuKsP5V6zt$7-HLST+>l#tQ&<-BXriWeTV17|NHNgE?oI+ zQpz1j?{-p*Ac|K@In*Vjw-48zM&p%lXv=3$^V$FKYrOeS{xCDM1(!6@L~F(OdXCxc zJt!MSbWGAi&uAiR>-l^B?#$|M`c0>liI8W{t(y2xDaIr~ zm6C)?Vj$3@mc=F#LWwU8WDU@&jSRrQsf|3jmAYZlAuRiBZ&GN}h-23jgb37P=Zvynw)vj^MV9C^tP4in4#kw#t?_-+f zfV(8Y@BxB)bE(xV5xh)f7y<`&-o%6d>6fqaO1bj5ZV~{(kYeBW(MW;2eFx?CFgd}I z%%>Exovj)5q`Y}|Nx)=ksH+uzknRo3ACUZSod53xgK!x?5KYBy>#maWOwGA}@lUVf zARMmRiI%q$QGA9kn&^Xp^rX5IsSlx7$O-^xR+f11-~JNYZ@-JX@7%{yaEx06lsE^| zn@Z+3RRrfq-my}TO%TWlk_^pT79Pb$2`MUN-{$_#dD=G7w2q=z(nw^S#x%4{Z7rGG z8rZ(4&tS$OtrC;a@4+n&-uLtWg%A9-zjYPoCr2?w*mMA$+6&@HP6#d_E+CRspS*p- z1~(s|dGERmSPO&9|J1<4#x zfhFf~jWVti!Fz_4Ly}pen^2mfsWalW&H!Yc{FK(ftLapLA=3j%t3p5yrPWnjmtpB# z?&1ol-94MAEIOq?nGK#PfjeG%D_?s2Ip!aI5;r|-oyQc(FM$#b5Qw7A=L+4b|ENZb zmi`vx3h_l3pLvSgf9%KT4GYd^D|FOEkCK=VK*x;etP5<^YLvm)={W0Z*V)trnOF#G z&Vn@m&5}Uj9d~Zu%pd;jzrBhP^wq_6lK@C>CvM-nsY|80dmAe~M?;rAu3jiktgX@| z1tnOfmCw3?0c~thVCT#2hD7x`B}G9{^ySq3o?hr*uij+LMN1r{Fp?3EvbA5iW5 z5mYNsH+f@K>`f)v%-yxefXJ071a_e*@#t^8k3$cCnb-W$cQaE7qh!Y>D)f57?DoLa zY)P*#c(1t?2X&)n*rSP76OGm=Es>Tao`{gVpq0?uv4we{zmoL9zW_^yWWe*?$O#4><3^J6do01cs<@K-O{=08uc4~^zsOId2i?3>YKstAl3m<$R z+g|@>s+~8nqLHPfgyiv=uP?w7wqaG6olZY%gqXvip_FQbf&?lI)r^GVxQ21WR~0T8 zuRl85?4t>g(u&fHc28}k)9uf^_S};142DRO@6mSSHCbFs6*}D>)GBqK3_Bpnx(p;` zK^iVdL_)z<;LbPR!zVxfd76`F@nwmspe}>1v6+JNZner6XDG64G9(B(+zMBc1jLq$ zPafy3Klq&-c8SI4P_g-b%4)Renu0V}{jIa!LnDe)CFP<_bKz!5ktIi+z+0kYT{sqE z%T3<#_-}lW(fKRaF}RwzeiDGeF5K?#AkOuexqXi1AYEEGG5zVP^)94X;AB^3^n5w4uIUt(-v;MqJa9RK@&kF<21JgH^Ol_^` z%>~?)5PJ>7&4DOO6jLp3DiLNJ#U}7`j_Dna>1{B(&9iM=NpBd?WcI`aWvUc*dU)y& z9_HuY{li>(`su4$cNc#9>qvXQpV%fU?{U7c%1$78Yn=)9Aqr(+dP~L9qaWVz1R#XK z1NYv^ZToM&0R;Rehh*Wdt(!KnfA4Nyf8X8g-F*{n8(-ClfYu`}eCT)Cy6+xl4jkmd zc!k12%r`kNjsm5{<*|Tvc=d_HVE2C#bLi`K%t9RVv1K%F@V%0@-+|W*2@lYSQYD+) zch0M;i(4bZp1ZeQxl@RVh~u2#$%raUM^K_{*f>0doz!0(#)Wk>|Me|Nf)woau3))yzW6-DPFs zI1AvCjYXVOQtE(-b{>-0JJ2aMcFS~K^$!yX=JPSKzwlH~pW^W6Um830>m~sRv$vAA zyn(QLGwr@%m&Dly-6=lhHdLjV`jY<#CQ`tq+Kj0~`P;a_bg*6id#85ccL_jp2_FP4 zA*B)QtQa18mJ7f13s-RjjxgLu+q)g7Cd6TF9kMWm(P2zv16*}8sXdOQ5xl_M?CXWb z$TJ`L7?1q=`?=-5`#G?88=-_nXUChFHE0Q8AWUy^%xo?hZt_g;=rOme&)l{?(_2c0 zn*xJbp&CNyD@9Nujub7Kr~=$9!osl&{L-KKD}4I@{8#8TTKl@`oAI~(7_s#@A8@_^ z=V-(s-V!|_qhm^sD?D4a_PF@@4?x`5n|xUm+_LK??!5H?+qP`l(6zi;!#T(7^c1)3 zzKJ^y?qm1P9qit*jj{+Vt&CV&zVfl!D<)#g#gBb}-sWv=zV|gO*GlV*lRm}-gZg>) z?hsb6ioAshR$zrZam!tS0=qBz1UeogB7{Lna8N}+U?f5n!K=aSq9{S#>R7!~C(R%~ zB+IKOd;ShH)Hw(?d9Mz_B$04gxfDR1$4kX>6PXGybdJ~T*~;m<Svgt@mBsb#aAp-6Q~h=62dWZ>N9T7Tl)5 zIO>GcU#MLeQEm1=rt`{HqO(eyWDJ4Uif?P@ivb3^3sJ=`in60*0F4Oh1@#Fj6Rt1Z zyghL4U;RAIqgRf;6hB3O&yUkY#d%}oMm2P<(8XXl2lVjRqlq@v3<}RCaoqK~+xf^R9-%pP3N1<`8Q{_3NaAsLR7_1)HRj2zW`3*@VZF=WcD_6-?t<8%%WwU)pr09mr?$7APhsrP+=&;P(~NuGSquQNJxScizU_)D)-Bp69~+rHx+NH$9d<_X&ENHZgtYT{LYLp2;O? zp~{$){RDtqgxEdROmzye0!up3w3=2xi!uUwMLA(wMF%cfSF;0DccMda%|DM!iZMqg z7rg3LsMzxhd4wRVO?oFV7NM8B1Fb3*iAw>RtY&&|@bu=U`TU1I0ig#tRI*7>-ef@x zL?^jIPsy-Ycs!iY6HLz`1q0c?v`dTiE&_>Dgel8n-2!?Oyj$bv4)@A>h1ay=E z!%`Scy=(m^CJ-`O4`EnwQDdic2Lw!WF#A`sNzYkgd1LEiiS0oqwH6o42Jo>)(@#y>Bhj>qa#}2mLbvJw8 z@CJ6?ejA(j?jg)hW_eAZ`Cv zP=|OSdCR<2@xi0Tl*BfYz~CY8v9x3$Eh;PTzIeD zL>mUld<>i#*ljJ1uG^TG#bw7pCWV|=mxXi^P3F^&fTHm{=3&Z5cJ>|bc<1YQ=v&{+ z7eDwQer7YGe!>WIIGif2$)9Ge60_>n7Er-EPjXjKU2Edl@BaZ8|EC{k>(DVykybn@ zS`y_Z!b&+I>h>?J$c)_{hO!ccQXuA?KIX*Cla1{(1KLF>f!<9!Xii<_o^%CqeIx)9 za7CXm6xsq@1BG_x7Mjtac2XtJ_%mN(g7R$F7SGwTHh+W9n1FGt>Dp|?)hGydin>DlfMq*mCwD8Jz9@G>$@z4Gtd$w((*mo-n3e%IS+7UElDPY_9yBL!wI-nIW zr7)f}qa9OCqFxx|`#nMs#zC0|x=DHzWsFimwr-b6yT3tjsx+yia%cJW+@S?!kRO{+ z%B;>7rPFw^K1B(%Vj^VqC~;yKcp5kWeEW}mKVSUtgT%H*e2;DlpD@i7)I(|~<5;`( zTld&`kGkw2_zk+S&9TEA{`gbe_uaR1zJk#>8k<4gg!N#mvX%gKuBn|dP?NG`y;tfa z*`3juTO)}Y+mE~owG&2JE^u(itt@=uk*m8Ht{|?95rFs#f8U>@*|P(;Yd{-U5v4|p zu-=V;YRzs_c{yRhsU%m5OREQDbhkF}$0!;e{bq0$!mo|(Sm^#A@ZNLOfO z3*f>uGkgCO<0hLAk)`qJO}L}8!dnPqCy;Q z`+fB9{88N0F01%`z==@`5ub%Q6b;5*HnF0Dt92!SKUYeuH%@V8gkcd`+4*0?&Id2J80XMlV{H}8gIP0)Jq-Oj9Gm6GwlAOKSV77 z6$ABp0tEpt#;X9tKs�vr?WDGZ*SSQ%FigsWLC$tHU{ASpXG=Wnu1j0tIU0*$mp2 zy!l6fq}ljJCV;!B)-o57$-P=MWh!Ad5bAIM0_r-Jf{i7dSmm5h7Q`%ir-I|o9Wy-n z)N#%}dI)-hJm0tbzvPNqQq22`$rTg4Rpw1j3|6jy5~zujvv+Xt4R=yijx(bMoSy*I z+YV=ii6?btL0*+Qwx(X}tt&CsWa@+ho`gC(zDDi5E#?{c&LE6Vp5y$7KC*rnNsj9y z0VsyJgWm}^@1ogW*_)NKr!FaT`TI%npA&#i+0WV~tENC;ZI==gz!>Muua#^Jq-vbg z0`Smi&896C({;nyfBDZ$5$FoTRl8y44VG1EGVuvrbs}^b1}oytX)Aw_Tv_4=!-;cY z5RdluBF!0l$W@31T`%FPX=M9dgo8goOo7IE)ETJHIcHx7kN3v^*W7)PszRJ6l?Avl zd+xi1-pNO~@R8r$=tH_J_UzihYwy0D?OV5GqmdhNJ%)2a2!TzrGwj{Hll$+wjobI# zLRFPqoL^*d`N}*wUap}_3#5}L+4e2p&Bd|ONT6`ah=gdIpzDpPZZtfnY6^eSxtL_Z zHM;h+Fm78yP+Z}%=$_e%Pk{INi;hXqW-uXG`+gOdkVW29Z3KuFYKA@Nz9-8FYaR>O zM1RFWk?;MBIMh3CbuhDKGoSf`PeY@K^9UZ*4vVFeZn0!}SxNbRO`gy9Hbb@f8)5&w)aWDUPkSz{HP z|2N;ZRc62>pU8$0_S)73AVjv@5}5nSGn{zO)d}4A>3g8M4I$BLXF;JxqcLo(Q4YoC zPF_9g1V8Mfg(s1S&g6H!dQ$&ZR2S@7+Lwv^sW{y9PS|_^J^1}39LWXL6`+C?9`8LA zruY}dz&hS}C_`3!boPc!Ic|CD9h`a3zhU&;GaG$mm&M%lG`Anv$L$CA-mnIKnZ%&q zW7m%DJaFG#+_GyYH*MWauPUkAmc^yztCq2Txy1O{rx@OHfM(x8NQqhu`sXK;>aK%j zoq#w21+Mz-2?U1I*oMV@%@5OcsxFl~bdnfS}Bx&|89(J1M#td?X< z_O5R8cb#uc_Og*|)8fF5p)SGAtp7f6U337XzZ>rVF4DfOL}%@N&GZSILkU$@pz6e8 zO3fO}DGM;H0Shv*nao%LEaP)I`=YyGMQ{Z)B$x|qnT6y3%m0clUU}6mt_S^_p|=;2 zH0dl@_LnmEJ|7Pb5l>1GdVTzGXve@>$x1OG972?YPTh*66(kIhxbmV{0_ztrIkIi~N@pIFCL;pP-T^rG4e;;VG+Lo035k|Up&B?&{O0>fFOrh|{J1U>0G+y- z@aFHO+BHK>riJB(y%qchis)p2JHBLx+N^TsRkA9lsH&zUB{>;b14QYvK(7!3P_;1Y znM=yWfA+s!)!JP#OMlOgpxuW_GQ~vAVau8pV*J+5m_nhPnI;aZd?5;cjpVU&S);(& zyh^KsxS2bU_B76ynNGNIz&Pz(MncO!GRBT|^^6%O?QElk_rdq)fo?>)%} zJdPOb962$UEH4f%f~CG&Jomoi4xai)KS?@wWo8}U%&41&?OQkV?QeS{bFBfxYazc@H;j-9qrf(rC=e%8O^>TrV*?`W!pI>)k9A1qGfES9$Ul zCc0K+;GBpl({=oQp9`2)k{PUNz>E2fR8?T)S2rzGGDR>0v-AC~&G&Gy`L`1_6uu~} z1e}&c90fJbIaVat_yDuoX~h#o2;vyy=;!BJ%I)7vZOwEqaR2mx3r80?@~O|jU;y6J z-T6fX#5Q^-xf_wX{URY6Ld1V={Vz)=F|jZVVn6tVMQd%?pktCX49aq3ov=TP8`TVsfltLX0|&PKk_NY zzx&%)wRTtSpqP8384h*MPDxSkkOYWQtOVh(&U}1x2%HVl0ekdsq)e|)5dQ#Fm?_+ ztaw;-!ay92iG+~yXRME~C>@U{VR4bdfLp!Kb0@=L7({g3?XC z(++EhXifl9%B%>1-sZ231yDs}OWJ!6Cl=